From 7a394fcf02d19e2ff7369b3d15f23622567526e0 Mon Sep 17 00:00:00 2001 From: pytorchbot Date: Tue, 11 Jun 2024 03:58:31 +0000 Subject: [PATCH] Generate Python docs from pytorch/pytorch@1cd41997e99ae1722be3fe88e1867af5f6779433 --- 2.4/_images/RReLU.png | Bin 34449 -> 34020 bytes 2.4/_sources/generated/exportdb/index.rst.txt | 12 +- .../generated/exportdb/python.assert.rst.txt | 2 +- .../exportdb/python.control-flow.rst.txt | 4 +- .../exportdb/python.data-structure.rst.txt | 4 +- .../exportdb/torch.dynamic-shape.rst.txt | 4 +- .../exportdb/torch.dynamic-value.rst.txt | 4 +- .../exportdb/torch.escape-hatch.rst.txt | 4 +- .../generated/exportdb/torch.operator.rst.txt | 2 +- 2.4/generated/exportdb/index.html | 12 +- 2.4/generated/exportdb/python.assert.html | 2 +- .../exportdb/python.control-flow.html | 4 +- .../exportdb/python.data-structure.html | 4 +- .../exportdb/torch.dynamic-shape.html | 4 +- .../exportdb/torch.dynamic-value.html | 4 +- .../exportdb/torch.escape-hatch.html | 4 +- 2.4/generated/exportdb/torch.operator.html | 2 +- 2.4/quantization-backend-configuration.html | 180 +++++++++--------- 2.4/searchindex.js | 2 +- 19 files changed, 127 insertions(+), 127 deletions(-) diff --git a/2.4/_images/RReLU.png b/2.4/_images/RReLU.png index 4d994f9f8ce26963b21f6c123d971230339d8d19..e500c6736c3ee64d5ad6040721508a3d33c21aa4 100644 GIT binary patch literal 34020 zcmd?RbyQYs)IIuwlp-amlz@UDA)<6is+0%_($Wah-AaRih=R0WprmwnDJdY`ARygc zcWpeL_`dtQf7~(dU)M1_9`TL6pJzR5%{kXxJMf;oG~SsDXHX~<-ksYL_faTxFBA%G z36h92`&negV6sjWI{-)#6V0kkeMTAKIc&1dov4XrDw%rmXo7WEt%wiu#KYS4Rda>Z~N47m> zBctdC9~s{kJ;M$1I>Qot@mh;erAR>dQ$E(~*Bp(6mwTI4Tc_IMZG|0mD{Pm9wpqk* z(CFcp&3S%T9(}}(s7uHTI9XAF@ORC#|3CQW8*FTBcnJv! zJL~Ntt<4|h7^gPPmThG>S zRA-w@aNl~ku{cP63hQi9x&z-S6_wRa84fyndUUvqfWW}&rq@@^HiueTWLu)Su*Ln( zKF!MFAd#$|ZjDV#S5;FBC>(LYWsrIUivaU@KX$>Fny_H0``}!>s45hlj^byXTSjJL#~Vxnhp874*!ai8C0OnAKff#EOcF zZ-iXAb8>Q6dz!y~tzs)2dKOA8P}AE>-WqdLJW<$v&!>w<9iqW)4e1A%toeO8F{d&~@Q z3&W-4unA=Ietv#><2Al0uZ0EkbhT`B0>Kp&EY26*YIN5dl|kw0tZr-Hn09_fC@c(< zl)6j=GObJmbLj1D%>EiK<>|@P3XZiXIUSU&yY};2xA6A&ByqJo6QbngEmv1xr06KT24`@CG2im^S94-329uNgLxJf{&F(0f?vamZfz zj(x7i7nkC?0gjEW?JC4!?eH*lR{7QiEW8Uob#)RqEJw-)W|jx?akI0t-v~NihuE+l zt3oR~KHA?I_osdF<_(Uer6tO+a)wDEp$<0f`$suSiV!LDp&}xs6#4ojasQPXT#h$| z(XU=%czSvc!DZl_JzEDczS1F0o1~B^+*j~~vcz%OLfdhsEuMnc>LTnCmV$!9rAwE_ z>dAHLI4TdGv1t^Drd*{o=Jhz>U0YwbhSS3UvEu9NyR$c+#c46bTeiPcTD0CK&^jo4 z`!=%I4Iz}+@P$+Y4riAukA*9ak6af<$|>pS=uk~Tuif{z9&P;UFc~ahv*V{e+MDMP zT&X(MSK%)3AYJXzh>?xWb@=&G$vk`B=6wZ!ptmC358a9WLt5?#2UK3UP!+)NB-g;WeD z+hy}Hn&<(1uu4yJQ`0%u?WK$yw}CuUbg$cY?r?GlI4rzDUDWp2ooZfqZCh+LE>@mU zY0^QDCc4mzff50w5o?D3Q?Fbuz6QTMsj;;Sh6vc+H7cA zm>9-o+@^N00EtyPOS^n*YRcEk3niN%c;3|16t8nSsA7Lf01{#0fOzxNnKNfrzKavk zv9g|ptn#Zbn}Li&+uWLB8e&GeJwb>q!ErIaS4@#4U8AtA%p8)*Jrd0M-Yg!D=5ODs z7y5Hs2Q5b{gvi*`4XnSG9en=<`<89+?OdK&FFt%=U!~`YU%nan-u5y92M33Qgv409<4E@2kV@Kv@zq9JX$YsW z#l;Zo$%cpS#}=a%)bFH1#2}A#yuU+y_QH)%O-bp-3C>uMDvpm1i)KH@jx8^TUCk=X z(RSC#HNtb*UJ4m0ca4p*pZjsT)(@{%Rma`_;UKF>JUtUr%7b|_GP0f`T8%uDr;t>C zxWwi=yO(Q+efe@N)*_dEp4+q&*#Htfo_2Fy@RXD0H^dg(bh2%rKH zijB349Q(+p)b?a(Ah`fl1^<)OHQO>n4Y#8A{q3#W3JQ(S&vPNk=>U@a8v#f5>s(yO zUIuTm>QqokN=hQhV|;x45qnimZmwCkqmGV_lamwbih%hfEOlGFfJS;(iURF=`@Dvi z^30FVN^lW%P1Eg(VbV0t7(vNzRkO6!zUZP!J*Vi?wa+l4T zGUxTTU9(@@c5Tj5^OI;4THKt`vY4m~3yjrGv^iok%^ZCKSfqH(Wpn{4@wsAX6 zntvS>4R62)=OKBlY;I;axQgM#3AvJe`0xQve>HsR+unIjr+~VZjco@4q?UJJ~>KnKzJ?YusLAaD5!gw9{6mWIJ*9Vg`2ntfcq0P#%(AK88d-v{(`1lY7kwX&5TJ@7*B5ksibY0rzF1K)=Rvzu) z>Nkew4+uY&aG-L(M9C~tw%5#2835%oJTj882U69uXU}~6{rzvt$mFVd!tN(SrlP-k zRmQ;3Fc2_R5NwzPiV)HkXI4~949kW~Ic(h1`t^vHFMVQTW9w}+Xwik;cD|r4MzU+E z8xA-5dsRYOK@$}`K3FH4mwR*V)|DW7d|F{vA3{ASTIgOoa4LNX#UN!!qX>zamsywMF*r+c&boL@wjd@AI3miNs<7GX7GB2M2Z0T*j$+l%g3& zY&sP~^if~Geq|YCxpIXNRR?>OdLa9;M7*kpgq%E>U8@98G|r1vi*-c4@q z2YDtPVy|AkQWGq+pYMUw)Lym(=lL=!pjXFJ3rEr6wh#-;-RD$%_dlgR*3SC! z0Epy)GV_r#XLPSO*A0X62=aGT*wj9t?td+E!g+Lq<c0Ju)$>=9{}-dy3+XzQG`U8tVqr8wV}L0Ewv5G?Wmd!HKz$Axv?#@cl@&UJi#ra5 zOXO(hRN;_Ku(Lp#YE}aj-0|gQ%kWyr1gi+{0}NWUJ008FY*{q!$I_2p1&GG1dppQq zk3+l6ga$ADliU4vI6G$ft<9WfJ**3(m9z*r85&|g{A|+o5f6}`ZuU|};Ybh!%~+B@ zP0brl{R`=GQ$s^T*N_F)_pbIj&t-H{vNE+wt?Eft-*`F=7zLy38?Wcj&$?}Pt6qzi?G*n2HCtLy zkqDoP*9+NJNM{dvDFM`STMPxn#Zj3L8}1<+HkQ+6!vt6Y1GnXfrr{y5ia=m=K@c%P zc$C}$KwmWSEQZxL=6l=qYU&}+Yb{=pl9he#tflcB1+VNmZ~}Azd14{AJvCj!83Tbe z^sKDuO)h+VeCS?I0H#};Gb{EUbLv*1=zItxm=wsAKFSgS@+6020C#W)c!Vk-N!2WF zk;dlcv;it@_e~YLx(`I6X7Bi-nzb3cILbF(qAn%?+E=&q1YC+P`kH8F;L|*gZY=kc zRC9em2xLsNQ6k&#!h}0{}|&-4&-Ge6yfpy z=<#DsM<^z6t+FI!WDiP501-++Xx57p2q{l;SME&ULZy|^)6)x?hYRXX2GU0ME~|%1 z*iFsJ#YJ6+9|6L{?Qe8Qo4E-+c8bH8 zkU&b;c&-1WAEiZAiT(Wdd^6P{yDqs0l{Z~nUG?GoQgG^F08p?v0KNif)W1F;AvG0{ zo5b7)M)8)g~ohA_l+1uH8m6K&oOH^d~w1vn>j=o&UAH8>(oDg=Iw1g z_v155FG+Jt3kGWa1Eu*al&J>&Fd@Ivr4n2P;a!7_L~=4REL4>Jv%=&n5292nupWtD zN&LDFQ7p43vUwY9ZnB3Dz}g_lMvMd0&Sf%^O?S^`G(A~Mns z5{YtYLS?0hA3n8u+7oTh!%ft^w0SXkIHdxPOA=6!^XCB(;S0HJe6mku*DUgazda~T zsMf72vYuRN4r_1!AcRm}C|Jn1`F)S-~8K+yKl@|9v zY$%$F)K~!^I{_3=3S#Gbrj`H>9U&3XQ+#UvA8A5>lTb~WGOF1{EKzZB&mnij&=#6@ zVQFh?^Exc3WfUAjMONl}cr;e+O_qiCiqzrq<;!ZtHkUyn9F ziG!U3`DUx5$4BHyN`kF>|&f){aXnofKBD`+Y^Ed>4m*0KL4>b)TX-&?Sq7SR}7B$Ju4BpAS zJ`SPc>pX}%K9~`)a&#;#@_k8&zFfABkH}F+hx@Tn#zsaHu$d~EJvtg11?JVjKoRsl z9?Vhk$wvNuag(Dss%gn!6*xT|Bjd*&Ukk}8Nws<+;)riP+as>+b9(?i$r!#v95`(c zI}(wQ_yC@Jn7%)olBnL&rUk470D0?jF#I$LR2elW;oTiJW{3G752&gJ4$?lk<$auR zJp|EX2LL>$^@K#QJpqki0D!GGwB#ftZ@+x`a#y3`XwM8zI*qlp_06rxS^^QD_;5qX$=Nx&a_m)Pq&oHPXtqA~ z%a<=%3!>xWwWchUw6sN-n6Py`4{w4X^gUS;7uAwScb%=!te5=?pzsDW3~5yS5W-Ia ziJ2xK5fr#gxJYW1*i}PKTZLU5`<#*>R_d_0a^NYv zdLVd#{Qh{A}3AkZyS7Lv)0xH3;O_%i&+nKg{rbM@AaAyynKYP6#0U?Qo65e0nLLd`L zSGdmgV8{A7fGyysH9n`+%3bm$1)zYmFc^gi?_CL{;(G@A6&>h1pFe*_qDArEJuf)q z!xPSkrUvqm5ofBYoSoebL~Lw{7a#`=E-osHsyJAaF}0>XckY>FFd5y=o1~Cks!~CdQ^WLy;6!6(jfJZuV3}pm2zKfI1 zLyxeBG}Mfa(gGOUS*ay}O+dh6Oz&WsGZ$oR zVF1i%ph{k0W=2rwDkT1k?CfoN*SmrKzKDwQM*fP)e<28Y^d>a}W%e?JEl z4dD1lhy{d(`V|)PAiEFX?8cJ;5@7GIoMYj#`9_>)sGrk{(1eGFe{FAX^uwb>aGs{? zVm<}G-6!W^gt!8({xw+0p$-ZHoKIbl!+_8rJJB-FsWfP;ZZX*qLUB!3G{mtmNlGup zmf)LTYOL;~>(LT$NRZzxsvim+m|oZb-X}xV0S9hrMvk^MTud zbb=r?215;x!1zEOSk5ZGEg|g=u#AnlR9#`*1Q;5rzk; zEf7%^CmX|5oezC{Fsu(ZdxJswM?RB?hK3z9ic8#21`M=Q+|J#8@Zdqn;pgDIyc?X4 zYB0uUW|-XE-0A>lu`N~H$M>SRVe(?v`oV)pL@$v{k&pK&EG)c6ah4vKVoQQh>0L_C zqg`W8NF(dZ0a;mDu+3fo>z!YZMmVoe2Lkv9mcs!%AaVP404T~fubaIPtT+U`gZ}#U z`*5+RLHq@slnt&>bqb&Ps%j<)93OJPKqM9x77+Y8XK95s)3r)_^pN^%eSEkb{N@eC z`e-2Nrq-YsDLY#(4;3rP#_=k`S&{-72N9_|a*Y*M=hCv&HA`;e(2W2iLBc5rR(^bX znh_Xa9VF1J1?K&F*)&}F?jOZo$u)6NoSpC{5dO5rl|vwjb5*}w;sWvo7y0=51^K+G zn&01vco7p5L@nUZXC4dEf|P>;_tw&|#-a;EZo>;Y@h>35!jY^t%2JT9F9f}*Y@<_# z7szVJjQah_F+Q*Nrt8zKwIDC04ZRG`Q^0xJ8q1?0#O4*CDZN`f1wisrun68b^13Gj zd7ry6$U12x71I+g$xCF`4-XIPt(`MHCH+@ttsU)mr;X~bKmTXP?P1mC+fm>1*dN?MRe5c-t`3A` zn~cw!X48mNI#Y|LB&D)!Ee>LVK6oJ+- zTH=w;C`&!<>v6#0bUx5YBI13kLHeJI^lOsBd8#7NbW-$1)V$4qR&kEswk z`<%ccOn=oeniu_-#16Lf$^QMh%AqL(2A`@%&kqy6v;BWaHreiAR!JDwWDB;L#Ek_ zCrrbtaNzLZPAsoUz^Y8AJj>C@G@q+`C73|NebwU%XKzBNu!7{=H8W&6j7$ z5&jbbafIlzGi#jNrQSR1Umv2X@Px!yhjvVEa2A>K>G7}&;5vc)hl7h-yEfI_0RlcO zk#1c8kx6f6e0i+HLuHkEsNzYfssAe$3qBe=8j2lUnF-HbvFX)^7T5;uI3%X$AKL60uu)1cM`Uaf z-`jesk{us9!lbd35>z?Z*2$!?$i#Lq+%@N#rN`aYnd0Iu%*;cA^k%)q;(zx8yo4jN{3Q$l82%QX=p;BY*O)Bp`dXxJq37kWqn=z z{{5Hr97<8_TDsRL0CJmj_*o2>Bo!4=1O^69+bV3h29u=;^7gIVE#*R%iu$@jUie&uN}jGBCIV*TR-#y&>HHSY&s9 ze-c0mq1c<;-#;J)r4h>V5UugA2)1pddh@ss3IziSq`B6S!dgon{Ei(y1Y585_OsVf zUfM_XZ`>s>vL^^*VGL#|_~TPQOSA32a}moM4VdH3d{$*kGXka;hKh+nE6&}cGGp)p z*9NcSQrzyP`>vs=B`AFz9UWv$v;u2|QX1~WWMazS8ann3(hlFJvN$Y{gdn2GUzhee zGExuvp}%@EV(X(=q?ceMtb57OVczA-xC9yUHO0@)IZpOt`YW?KpeqBD)64&)T3u~GEZ3371n`usZf@RSsM}xf zf4)jF zVOMWg4gTkJX$r+Yj|+ZtFfuwlu^f4IU~JHJkKy{)#;M$n3lLYdPGe{Xhufo0vDQw>$yrhC0pIQK#_c4l^*cU+3bTVq|u zc2|eTX0;v8_+B-BC&?~w`aP(KJAmmq9hWo(_ZM>Eo~l4Pu>!4Qu+$WeFn+Dc{nbtd z?A4Fb*IE<6r}h0yShjQ8`U z_qju>42{tS(T$<}hNm&J-yE;s#LhlwS4N4Nt8uZ$cbE?py;L6!k3@hEwJ@Q$O;)E!rYtg^o`IH(2X#Ib@rKSJi z&8t98OLNKlF6=QktsDId8^;4VwREl0w3x?R`g!RSP9ebPzN8c}*?|=*@leabtf0us zvyAftwpfgFf3cx5?FlCQnyTNA`DRyAaLYJySYdmqdU0i}n|dsB22Db`5x)LP5IP*k zw#=M=W!gCA*!;j*r-Rr_B(L5OjyYIB!K}C1gd00FqexcPG&8 zs^fb!)Sd*!Sjab_@2Q_t;x&u}4i2U-5vBm#0a4Ca^gv?c!{#Rv91nk`sjf z!r`HeU!M8e&&zPKC_ifu__i_Nu9iE038gE#Kkc;Gpq95{pCDeLOnL$n$nAeA3maty zG>&tTzg6<;uNBapezEm^o>#H9KO1FtE+`rNMK5yG(*G4E(%%4~WgXqAEwkUhXQ13s z(|gKNcJ}F2`dFSmGyc2Wna^^)=ARr98Uzh168~%a?usyX3CM5pN>BIb{m|dau55d_rF4YQ`Z);llbwaiKU?1A8-$ei@R33~c^Ymwgre>?NGsFVfKSqpx_n+3QQvx9zx z8t*P3VZtAe4+21(mf^pU-pvQx0Lqpk->Ds})BPKi-FWi7H-n(UGSBx(0AjkbGRc2Wd_hyg`!i(~%$PEsa!(0G{Ue2) z=07F`Jf1as7s)M*tO@4Z2+*_*jjwn6{8N+6!&_&|=!cFC(T{{(@KM)_9Vw=@DDgPF zN66&zD3P}r7Da#*b6Zvxh(?a1(hdKA-VFRA@+a$a5BICcn@6yOv=ga<$t?q{cP0qN z_NB~DIEKvSKMV*0J1RmBYH1o}#Ol^G1Inwd+5@hVCgAK^w*C4-&@$zsE<#2Pq@#<; ztw)Z+D{XA$D=kGPDI7;ASYN*rnt_-8Ub((z6Z^|a z-P`_R$nkt0t{;Mmc(SRGUR;VQkVue*JalRv zeSJX(cGx=^2`TvxB^V=4yOggdO55=Lvu14u*Q-CF<9a|l_(&KZ^^0^V4CDjRc_VEH zi+Fd_Dff5z?Ut@qyIB{>*)^*`BYnO->PdTabhKsBW=M?C42<-dG`)XN3mwKwE?cYG zoXVE^Q7!L*@n(T6tKEsPWy&YkyXaQXH>2P(I_pP3Yq9+CM{sItU!|vrvP%AcW3Pwt z<7A0rfxeCu4SE^gj+44&OYukIp-uK<>*no{hN6v2FTm<}Zgq7PoKVIYJsj-pQr6bl zyVhWd@yelFTr*t^N)}D&x}~BLY{>^{CMfxbv-8yKTF#Vce_>iv^QMcC49bC`typ55ooTml% z57E+xtkF^51hD6gzNaIJbEwNbBt$_?p0iy1q*a?J=;Qg4!&7_im807RZAV z($bTlK7k#y$hjCyt{~jLh>i{fc(88cy2%WVF=#ZwE>t@APi-(?zkUv`g{+^~CDA-T z@!;^#dZCXKhYk?a$q#~pf?mYMnJh7J_zvN4k|b)&N?i1fy;GlPSfGsSI1FzfAILVO&BPHhvA4Dc%bH# zgv$SkJuDV%^RTkHm6HFzArDcdi4}6U8@uMW-hHX_5v3(yP54e)fjK*EPL-{Yuch59 zd_F|#(&fw1x!J~v&>#j9z1$fg3ftJy-d?!lfQEuzr#L9MD}DN5Nzf&_Y2l3^6-}i_ zcbF5jO9{*z;xSMgmc3LmjE%{@mW!cR!rfPaJrJPn@9jNcUMnjv?*M6DozzpE!AI1? zLm2#ytuy6{=l_zB;?j&G-1up^Dg&gOqH(`7R~^>!>-KyHNJSFW{*k{0k`LQA1nL4G zOz3<3r(U+H%bXFKU-^l06lsaj-bwG4%+^(aIr*=bb_0p^?tTecQ|I zv@(ZZBYWC-i}$~Q%Ro^w0)l3QiULX54rKz`Lk;4snH6F_)h$#!;I?_87@ z$9W3o?=J~iL4VaG-~kJA?e%>hJjeEhqOH1^o!so0+^jS-GlE2n+dm4w2`TsjpdvMO z0{Vsj0u^l!kwuEAyl;It=sCXAFP3L9?VGhU=P!_zdbsP+78Ak-O9Z+xD3@zC92Zvm zVuW@~N10Jo-CF8;t3FF619igy_8wCpgy8Fk%O9t7dxD(e1U-qCTrh33W}lm$8nDSh z=evohsEQ>RY|0XcT2<{~~=Jk^lq4w#xL`XK~%agJp0c9YnT+aC0M>^!o{~*1_ zS4wKn{8g75II!DLC@DuXU#PfG6PXBhOVB+$Y|jOB0bpZb@ctKI<8$FI4Na{5)mu0b zb;?*56uBYKxWb~V;ic>^L!Bu9Ns11nnXd6F%r`1pMye8miUkP&X-mz8GW>0mq;XUK`J|b`!|cRePyn! zGPc<2zdF}o@Ch9-LW2g+ro_ycpTO*>Voo+TL`xi|CPAE}gv5Kfhr9JoD`^tua^J8} znxpS-CQy!&jtKsg8Jpp$f=KZ$?EAYSn+nBN;v4HD;O^#SO6?_=GC?}5tPzUAEA@#z zx!Fwcxd5^-EOXfY02e^|j4EUKA>WF2YZt#ZaYzBWQsXg0F*f+#RHw{~2_Zu^NR1cD zZW))eBuIS{<^nEcC@XzUiiGpz2%3W6=ww9D-_P#_myE}sss!?C+vSHig z$+8iV>4pCT6EM9Tdp{xwAZ5c~Y?vs{TcxWyW%jlTdAMuY2n`4!j@jis;&d$#NexLUL0#iub4vH>IJ(0HGsemOPs6jN+8yrG2^h;Vq zVLQZpD6h|lgL+H9Hn4o zWM=0uGpnpl?0N6}Y6sC`DM@a1T26?F?ECv$I)%c2eL3`KsvZL5#$y1*!YCu4aVer0 z<#num{;9OI{j=(j`cvJsz!q`)Od)ZC^7=10U+mXr3>LM2)V09xw*cpkDGaYn3j|AzgA zbHCq-u%qzd4@Ts=(?L!P&0kNUS|~^Y{iW@EO11ajc}Ro?ASk!LyJYx0q7eYumGmk^ z%aWZ7rK8s9D}5*-9|9>BR8TN3Pb{!p{R7qtnm&Ftsex8eV-qzK3gte4H5S5rq8G5m zn3B7Vg3|ls(aCAE?vIwe`KPbaPan(hko?+uuoIQwYXV6$BRtcveSgX7)@p*?HFs!X z~k^HWf+!Hm* z)BOm7fJl_7Mu!Dx_{ouiK7(y`oli>dQvIA36B_D}K6Y~R8?=9Biu~O&fIWuxo18Qo z9$S4&S?7eydLLJeibo$3Nk3(T_k^HaKHIS?K=T|TG(5g0hLb*Ge@-E5IGFxj&_x}B zGR|V`y?iqOk3_{cM$Tl?QTxEs5i0QQ#*1awnaR*bumlR92e9ZP6?n6NM~FQ4 zK^?=UIo*)m8JhF`L|xcYC3VP|vY8dH0vQ^a@<41c;9t@`*jeSZ`}LwcR#M{UH6Owq zI}Mgh&wY;f32YL8vjF&u?)2lm`=8n-dWJr7R4(d_DnBR zW3Zht0p0 zY>Zds+gA&^K-F$OR98IMFbM^=|0mi^ zyu1|9kcK9CK(Q5u7xeVOYB2$3)hjG4*Ezz1f}R23N9=*+8h?~G&ovv4+CD>G?QX@W)DKH-}`JW_Wmpas#dCjyitI`@Py5Hq>0QHi3e zU>ph=A2RRH!35>Bb2B{z<>%`=K2&TwZHu$Ohh7B396)t6;&jcHE!EU_95{SFz5IO% zcT`l6aW?Ra+%*2p@L1T`>S2hn1BL;EIaET2Qkwe}K73$wTQO$Q-sL<0Ayss6T5e@8 zWLPoAz6EY%^E2IcM289Dp9dx+j!+{Q_ke|M`FFp;7Nu2!A`u${6H+}gr3v-~1{Ri7 z`SzJR?(;iDtj%ch88oCMBSijYPp^J3InHz4@SQ}h!jRq z>cMzqHTQ!VJo+6Ya%o(27;9^5V>O#)Bqs_)W-(#&5WJJ-#XTKiG`EC4Xc~b zu$FM_9MYf$%&moDhv`&k&Ql0FT?2~QdJHao!{v|N(W$90;4&VV@w>Xsr1pPGtwMT^ zueOgHK9|u;U^!2gMxU}$8s(n8qwfd{AHu)Wikh!$tI>L=$PVF7U`nDj~zV`gTi zvU07oYM~yE_ILX> zwYnY~AsXjLh<^V3`M+^qXhLhlA@rCwGXg}yPw6P|u$C4xO&_PE$pwi>w|}HVn#U)Q z*(Ws>62Q+taK!e9IoKQow+}sJk2h$4YTrn9HBmZn8Wq*-V(T-?8voRP0}Cm|zX=3i zV4kh-oBZo&rCl~Cl%wYPr=XDoLL1!-{CZO8nO3?+WCe4K2-bq{cKm3j4rX#_1okxm zVCcQ#Z}$YGzJI>`m3feioYT1fsU3qsYm{g^ck4>SD;k_l%TC>_*(?2hr;rmeQ1cf; zG*I{qJ)HdOG_GCiC=?f#g3@m z&i6{eQfA%{ay!ufwGsjU#bWptVg0r5#u~&cB$TBQeo0kTc$#UouFzFcO-~H)R@N{@ z@KeQGTNY$Qd{>a*0GkeEeST_bbe0ntQvPbcCM)Nok&^PAxx@&Rmq&Q(>fTT2*R1FL z;Sb0b{$W#i%u{wq>qdHQ7WW>>*wG`!B1aRF4SsDY?@z*iF>e(WC?T+O+#gKxp`!@471I#{XMrh$jiYs1x=# zfk1FJf|gC|s+F9PCR?ZhsVI1ULVg95V&{9yp8~Y}Qxkbbe>5RF7@TUk=C_iHlqIi< zC?K6WXqojvkMrmI8`k)`(Da6?rD5m{!+21X7QfRsz5}-o4Nco00N<%O1<3r!=7(U*_XEVPK(8kI#BBM1= z1`G1Vo2kN5QxS_kzvEJirRwVwCsFSUfR8J?qm8RW+t~N??h|$XAeOP53)CJz;&zYs ztJy}}Vi41Mz35he_E1d~q(dJ2i)13Uj*imMct`n>_RVz?fHxbBb(o?6~)oXhD1RWvK;lPG!&cY;tsRb}R*qFQth#OeSqiRt< zAC_l(g3o4R+IA|@o+Mov5dJA6(I!nigQ5{|Xc>{acmmQ{kGV*3duhEu-3$%bcQN7y+V(bk9OHWFj=wyJU?= z9X=E{Hf;(exfY-OvoG7|OC9h0>-E9(4SwnB)>gd5fqZc{x3YNk2SOt=S6&N~$Jpt=V8W}Ny8`lq;#;Izg)fNYY zaWGyy;7FOAJK5tj1*8cz63OspqmBFhjHZOsI^)iC!ckF1qHWxAIS(dH( z)$7w?fS|M5Z=5&|b932BX#oL`rByH^4?QrlEM)!ekOrLH08wRIVn(-DurMGXPk}fB ziVr<24cKbnu@jGeeNRG$dqIg@{E;(fI(w<}H;=N(OZsgwd>kvc1?HzOEJPYgIcTWf zO{nmVNQGyt(BZ+Uu!Kne1Jz9?I7qTisd)w^SLt5sJB+#;{CgecxCcYy>JN1o$7KHl z0lH?s{j-Pb)?mbH%{w!PYL)GwA*?;?^L(a9)xi1)(1Qu_qNZ?tb5Pf z%RBn7^NOhpW3~w{bIgbd5e}FnoQ94(Qzc|t^4f)vEejf=@c!rn`k}pXhq5i1>&j@d z3#38$xma(-Wu7FYib)uRFyf<)x-OUROvhTTz_UP-bSgbiUhq-l(Z&hYz#6i_HzH8@ zA^>NH;WA~e>g9|3Vy9y$HV{LZ<|x;Xkyd*8R4&+AmEtDx3ltN*{9=gjzA4oDh2Jw?EP|EG5GfFDLQ25)_)cROWXb363;^)W`)VNzf zg#rhvKX{>oEmfguB8G#^?LHW3C}Fo;5PoR3KecfONAdD+Us(jSsY)F=kSA}n0o}9X z+y9)GM*{vwq(*P~o}+4G5uFHJS1T1DMiEjk`1dk7O7BN{;CH3?4JklwfoB&4goOBN zXE_ETek|}fsT+Wa30h4s%TLzdSE`UHPmHMx!*y1*csy#XxVX4x-+Noy+Wf%&2_s)@ zk%ch-g-t+{w-*NwhtPv`qY}9cwkly^!x?l;cwz#~gu_D}zP7b-XAx6TJqM3I6`xI= zCNH`uutoG!r;rDcKtnq^N?KB~y1ktMu86Es7RGL#R_luD=_N~2!a$23IK(e8GRBB} zh4$G0Xa2K?g9?iSh0?hnl^QXZ&_rDBk|6zx}+euLU-wGKR7`z2<;fW)SD-3N6#;fXK>Le6q5LBH*^NFg! zs*Q|CyWu|-l134qJI9ND@8o_v}n z@YZ$yBSr28LEhuJz@J^@_f*{((7!XJGe7%bTVyXm3luW*X#aiJ>^)FZc6OXXQR)vZ zZd?d|@uG@I^y(9y3n9R?f7IdV0x*G&n${kbsN8cD3}cW$EaS!8G&9N;1Vb6n?M8T< z!Q=JbvuDrxXlEfKzig54=${K2>iIc)R4{=6_I%}s+DB5Ew^MRGTnx3Mn&l%i%7&nY z+uGXtSc60|yCnW3fVJc)GuX1&DWw;c?3?EY92^}T(~vl8Y$wy*P5fMxufwa(vo7|T zWu>LWz0dwY5G+4LLqBi>P5D#`IL#!V?H;cOJ!U_$J9Eul_Y#&pj@XhUgP(&18UauF z?lt2nbIrr;&u+!lx_>zKH+62XU7E3{D7?ZB{J*9T@4+k; z3^v{DGvg)!?!CSaY8$Sk^^t~RH}FZb0P~|eLjFQbwjG`A#MBzQ$>7qxl-Slt4z41m z!KSv^Vovt6X2e`56xh3xfF3OfM{6uPaHQ;*ASA!1;lNGqY&4o}LfFzG=%m<;~1 zY@Z-9eSLjUCgI6bFv*v8Fx1@KEXdzWXi>H*g$#wmG$1mWL|WlSe{$*NkpygXRpO5-{5?8$b3h(x!T8(y*0U)x$5bR)T25x z4c3C(IojrZ_g>`#6>i(x0&c{Kp)Eu7!BYy)m!5m4tR?lLsXCharj=^$X@#Cm9h8jS zMLGZ{FvlcV3%6>m-yg=Dz-EdK;~^~~Fb!e?W@7KkJMdJK?zg=cE_J3t_rzq5`S`G@ z+@uan;ClBpDb97;e7`zGTe%<-;yk~FcIE1lGMZEI{Aznd?fBxj-?F2znz=ZsXd{zn zj%n$!lR1vqVeD9Vc>EV0p5lvNb*Q{`wh|jWMGmo1FIIkZBW`;Ik3#ZO@6bC+qNALLGc#h* zQF>yr-TU@|0TB2`Xr6nsDcwg01%no5?<0BHB^BYxSl`xJ(ygY=!6}wp9EL_m8k7v~ zDOnds%9jc{h|6*nTqEo4own=~2US$LJ68zw9a+ zH#l+oMn^{{vO7#99zCKvi**Vq*D$8l|Jks6l9fI+W8yw>oA#CLD?hg4TB?C*jrVwr z7iCI+s^0_4&g#!XS5AoNwBtLqH;12bCPhv0L#nhIlolZDuE7==;vt0Q9|HdosRDE) z&aCZ`jPmbXL$uIazRhb*(_4NRl@|~9!^u*j^4fOeLg&wsN7p{1r-3VxO*OB2eX{iC zfEjVP*~CntloYBxQAEe^;oZAukO6kVwXX~gCgGFN$pte5GS^wDrPA4lbcB7gQYdR; z;SoJ@F58w1^3B!9Mp`gZVCc_Ad(3?rSUe#pA%I{zF82}KnJs=JRCY=m2oyK9ptszy z_8G&jYA%&CRhGn^Xeef5_P4bW^JuR~t%}LnIAZN)j7w3d(nOl4U;^zf5ZX>AN?8w1 z)kXNW(Fx{BCmj)gQl*#tlW3 zIx#7!FXHAw9%ix8V{R7+Ob{N_1!aM+H1%Z+()M~}RcfdB9okHvjd8zc8xC`*Sn+4! z=tmu)yPzpr}t@#+xmJ%ph6?hm8-v+IKu8n#MQiH zX=Wk!B(?Y4no)n^xSo`*o?$Epy!m{#_C}+cchrlGkSyT8iBId)4> z*M=qx-=c%>-5u%P?M#-=REf?a3OIV3lM`F<@k~qpwrqc??q!|KK;P?h_3r7y)DULA z2jSO5-uMTpexm=71($pU|f_T#-dVcCAF8t!~^o=teZJ zN)i6R(q#p*lDM+r1&$-+PsnSRKKZ| zdMLe}3$wXNPd07Lc(;^Il*S3IWpp3z7+<(ZYl*&i_$%wnip!&vzi}*=5MFBG>kxdS zxz`gl4D_i;+B{+YhGOp3yt(Ib(qKgarm~G6}^vc|8pj) z)6j3XGc!3Qm^qM0%9U$riI$@47lAsvMXnBE({|;&x#l^8lAQP@(@Li_k17Y6ar>;m zKB*g7sg*U8ed_e8DX**Qj5jh}Y1k=EUo$Dt(s|Yyl5c!Z^WtX_)vT8yVmohyx=sD2rqVq~0QrSI8u zD#Ah$D2F>( zcHuJ8ZxHS^+I5x9AwqOO=k$%SlN$Mt!oC#930aH$Hyy#;$^mC#XnKon@=4Eg3SF z734=zu{WeFd2E{;bT($Or8cYPcsi}P#2dk8nJ0&m*Kpr^C~L*BcC;Z7T>W8zd2S>@^|aUI&v_WFDVDY=6sM-M)YZ)PCf*}!6>Qp0`4b^V6$PFeGq z%xAZ|=pIn3XEUYcdz>j&PJeXu^sTSS)z7z^bxKmQqYO7y=gktYAb(qi;qq!O_J#~Q z_dw}WrJGV#M)j2c!7<{Wv!6$llEl?hO@Cs4UC^<^f}~IMDk$@-S5x)@uaZR|R)R;V zoX~e^3iejb1Oh0VvTys7EgLhu(4T&nbn=@)HRssDVA_b2pCB9f^pXEi-%yk}ZVu4} z$yvNuF8PGPm)q-~wVtcDpd{V(TS?LA$g3?I`tI3d-A?%@dK4s+DNFX5XMS1bJ~oP+ zmYXlP%lc(&G9(_Q4)9st&s*H`G}_XqZmP7hcIA-9jZ?mH-g)i4*44eyAHFhfhX95lnbirMK$wvpOs7Bw zCt~qcmM7t`n5^6$7Rr(tQ_@uJ&fL6q`N}AN2+y;qx!U`C$Ifjr61mQOcYFW-(;I4q zOs;--dBe(jAHAi2VMa@k*x5%;e-HEyby{+YIR`P-?#!%1FPAc| zZeA>$x?IXjvGq%9)q0&zISm@7)tx5|Un%#tE_2SGo~YPcI3rWd_vMbPfYeKY6+r=` z<-7Hl8H*mb?loEAty&uuKT%g(Q2b%Mq}>+^_`w;a!kpf9(=x&c_h{+a~?Jh+U4UX7gB*3dL@8|$Q2-8ou;AdH zx=3H*kjA}7fkNoHCgZfkC075L#4YQt*+sBAiEc{Eo3ZYcF=sHi>1})~f*a9*QDt#)m~m(PYH|Bp+~!#ozL0*b*}P%|9gf(}&7VM-1z6P& z=*3DUMHV);7N9vYpPvfqD&0Q8K9wU+*dp&ZcFKHayWebjBl#$4&!o$s)z`+r7lF5f zG!II#cs0&&jO)KuU;X}#x`nkhrb1o1qkU)wY`^3RouGCBUF4Hc-sJi^)SB-o$5HbZ zvnZ-Hsc{THaO;!jObve?khB8YuXs`2o_)bg>G_ak$SH(S3r5|3cSZLQVPWQf&}p$$ z!moaR+cW08+!;9)7^13KGf@jtt?JQSX*BRF59*? z!lUs1rtMYT-qCj7_$T&Ny3kanN*lFLN3PMP{`B^a_&3$QW2vh93(%ra?`e;#>4nke zR(Ec(#zbsfK?%`#$XKnU4pR{5KoX8YE=eN_u^3iLzD2ffn?x97}14d_wvsSvP+C@Vo^JuMhR6?<+wFAIs9B|F_-kr zD)x^3d9Atpv2MM*tZIuwUE6-O?x>0czGEVu*RUAJBvCo@-;+1%(a?O}(P#8Jf5N!+ z+ol*{9=1bq0_w*T8D0H{Tugl{*511HT%%@cof`V^=h7T@7yC=XrWlvJ@s&wq!_(ra ziL_R+et(H>Q-ibw3met3r zEUOKcGPKyMX;2sCkslQ3rj3;m)mgPapZVs_*c(nw=B^jcIX@hBfV4*{s&xpJ1!}la z3KL^_pb?_AOk+O03;bFXBNyZ7e(^bGc}vWNXADNoNN$Gb8)*==kbi2PQ1N-6=$Q&cRR3D5^4)#o~48r3qGTXW2vMFKl)NzA5imF>1@P z^_7>MQflT@k)W0Ts)~beR5hW_qFuiHrgZ2dN$SVny;-&AN0|2LUfPgG`&a0e0LSDu zIq7un@%&zr)8LKuR$5xTuKukm5!ROar(6g%)8`r6JX6U2Q>^UoRK8!n)6>_4_e_J= zWMG)LU0Im_h=0`38}vh(nidxb+*Wqj(p1nY*K}z!_X)Qw4xi=l*|JB}KDrtCR}Hi? z{F#)G)oYz-CwUWgLNs4#7}@mK?W=$P{rBUu=6mOLH{@QfkzQYc4@#Gr6N%YPUj_If z*KN*{bZ-h?=G}8$qkG&0Wv^;0lXroqy|Uv6rp4`8D`!Co?7|_V_rc)OHs`a?n$F2T6HQ$%F)il zLJ~6**DL+KAIrkStsUEy`>aqu@%6XrEQmCRY+UY}T#@IvVIjV_v_|f2bj;2od-1-Z zk5e!yYSPl_T}?`$IBcWyr-PU|4N>)bm#(%R;segf5BezUJW0=AU4^aM!`JJ$IEEG6 zN98UX36tQPdsf++YmihLx6{Ixx~q$?LHl#tTcf`0;W&G?cFnD9&UiGR%AJwboZqs z*sz&~0?|EraiZWBTeKn2@oJ^V75x9TS)WZ8g`@lCESAUs~3Ys(HuVT-IY+Pz5USBxln0uee zyU}#x@MJ+rNmgqcYFRHg=ihB$lejD$r~fJg4&39d@s4$h(I%18g6$Tg z`cjey;{Wy+Q;^sOK{z^_cBTe0oYopUdQ$lQC@}6($dwlh9WWHx6wOC8+0kg2m2kLi z9tqTBbK)`i_|1Ww8#vhlNvugvm8{_|3RlXVj#4vLX|u28HTjvCAF9#wceL z&CK-Wxf5)Fo=HPCC>UZbtDFkn>=7b7ub*hud6M;7Rny(yWt~`@x@>9s?U9{tFYDP} zr_Q4jx}3UvQ>fY%E~Cdss$KLuTpPK#?)b{kho7G8Fc*c#`(oRp1tIyYh`{#gl{+pM zi}&BTv3`Tk3MZCBq-%D&W5$}+%2hoo=cQk*G~S#rZN&7oagkbMCHKfOBM*;|oFj<_ z-O`+B&XtsuH1(91v47v*5YiF4behtq!3@T!>*^bd^cYilC<78RwnWR^U`l6w>jw{l zxqd5MK-Xj{cp15wotiOCSHcUOZ7~aDQPX>yW^9BO6FLW(T9>-Ys&A6DFbmr!tL5rc zqy2BM92RqoZDZ*!45+Rgi(11{lIk@!-s6y@Q6uE4E#uLBHSjd^8)qS^la>M+uK|RZ zu~&6OHT4i|!ak1sKXY@=>q}?b;2D%L6iFQxfQpnoj~?qv60oxQI4!zc_x2RCGZ&@tng)wUt@>2eL$n!PZ%=f7pLMGt1g6!S;=3IDVH9Rv0zUq9%e^%^G0>LirV z7Uj@3!(DN!`te}@4x(jYVIyrSC=-$97%&#=A1XF2wzYgxjC-l&JOinbdq#z*Csy@wk$X3M0Hc4abRh)x+kG$oT zFTFa;)78vwu1k4+`J-q#X~jYGiar$zF36E*Wk(o0&mf;Qc4Ll3v+8SqOSyk?hdC>= zhz`B}Vv^qJr+J&wc9#SNi3T3>(Mfu%`+Q8d_hW5%Z8dj!EcLXX7`=&f$5^lBN5(); z*KKBPkLJe>xeyMa%|Gde7mHF9!*wW~RYL=sy`u(>y&jbpVTh6CuS>@^xTVgPCOtYc zSTU;S^kDE=#hpXfqqa>PgDl(|Mp2_R0WM-vL-hNj`@#gzbt;j*uGkaVTBscVcxmaU zkZX=!$DD0bcMG^p7Og7h;-0dmk3QaI-2cSoqQ>FfO1BJ>olW!nFb}vSeWq0hC$1-{ zWjS6sy`X>&3tAQ&J?L>>Qm7M5>ga1;w(TK1n|!OT=#i!!fvi0Gq^*w1=J&PB1LICl z>6%tF4LdZoH%9Fmms*tmDP;fTnX^1iFT$Hb(q46PbJZ{%DY7cQQ$FXY%Q z|LhjHNy}c`-{LpZ{M_7U#*3!#EQkC6M4YRQEv zKY?0|F21KUMT`UcPW^jh7a^=cv+6pq7%K7<56xm_#|4FtXtZ|7C##WWHqa2_cDD&> z&041fcBYR@vEK}f;Jv=*NJp^ct7Wfdx|AXr$u>d4Fm&(V?^F7?zFv9JB3Z{%prV`y zx7mmLOKa<+nKV0juQ?V-yrFz#j6-;QjMK5|e+(Kk7dw|I$GlMuNEG8*T}I*I-cAbW z5Qa!G zSjb!ryU03T#&lrFV$l^}b!Y}C=UX7PC`~DxKfjJd!%wyymft7RLz#TKH{k1%p4(0mkXy(5?wfAJch#O=*v4L51 z2g}1%Ep7?ZG(^+Zxo&W95Dba)4L3tWc>{xjE&{&H8Ex9Z{tGSS{)DW+ciHg4ZJa+6 zO1zDk*jQMo@lW7}Q|LZpeY15z?z`noY<0w_6=>*dn_p$xcCDrLNFt&;B$MqPdn4WiM*p&Zz9Wj_*Ova$_PG;+5X1 z_)db8;wx&gT~Zn(K@hmN0*y@oO)Dj;YiZTbonz0`alE1F8ol`BP+cD216v1%BuS)e zVl*0qPvf!@qfgwjNS>_nsOG75@st<-LLZXafBtx^ZBqVY}+GBd?%p)*?$Ip=DP zdqAk6AA=6lYWS*(_iVE)wK|YNs6p$;8{623-lSi76~s#o5D1qI>ulWkcE`3L1+Qal zDpBH{!|%8G#(vQffKhNbPUrIj?sWk_9HVEa>Wg8NF$t(h0+p{0SF0vaoO$_gc7!J& ziNT_w;dY08u%s(JpO2e?X+=hAX8g!x%I2V%-t@yCoEa%++50;^xK6KJ*v8Ga7nPJR z(VK&TC}KCd72v(=rKKl9*zuWBac{e8&`F`4sOakWRsT;3`<*1h3$-;^hJE~?P+COw z-BYtps2Z(#x8y=^RbUq-Dpwy?m9Rw6Z5jC*-Ep?Rxd5tEjhEHf+^>7XiNBy{;1{0~ zPHY4`Sz}1AC_EG!DudSkbvrXW{Mb+EFKF_bWveL+4Pl?s19$G2+95bY^U5GBh0*+9 zF2*1BsK3gL=dO>uwuJYC6d%mbg#pCgGbK^yVQm)vgc+UxkfAreTH;f_v#(n-V z_lgXI07SjCBk9-=Wsg}+<^u8I7AJxKplshED|Zl$!Ji=sbNi7!1R@ST@ZbOPcW?VI z2F0xCd0sJl;IkEUu}t&Sy5D{S0KxprD>k>so;At+6z=tV$OHNMU42qH_9P(Us-Rda z?58!Ed^@vz{|6!*`AB8HZZi1p+jeq`O!w)Q}r#-+vT7_)DJ}T(<5FR;%%T5 z+eXORJO&2_NcZf6{_?NHP8M1+;0Ua|7T9CD$KF4nN`GrV!N|<4+24IMx_Un?1z`RK zKly-`tPRte^_HBLV<8l#fLImHndXmgV1`u$LJb2#JqGNR>@E(Fw?MYKaN)(lg*8Uc z(~Zmyh=Yej9W?JGxZ@ew*qlFn_@KDk1Zh|gB^@2^w?f2n1uV#)cM=KKzMnp+U%!6+ zQ=TVJTP70;s46CQ_FE7O20VYBmm&@e4||zh#VtKG<-UQ0V>-8?6U2;np@esYD4!s zFVCplRk_Q_tgSF3@F1G)U*KmNg)62Xg2N^Z7iZPzL)eSsN!CZvW_DWu;)Sk+bGjdo z(~>1iI4SPkA<2{PbpccQtRSlc_xi8qmKIFx+ScEJVr7#GFaQ=LaKQe*VT1npMO#6y z2HZlxetEzdHvbj4DZcc-u`LT0IVsp{eT{Lx-q5X{H*Qf zhsD8^{=*z%pxE$7oYibi#ha5NA_C=S(+r*_JlvlHMJNe^*23B}D{=V&_(ZN(WUmMQ zYbk;MV%o(8hw`G*3s5Eb`WBh3{GnJkH_a1Z0XY_+=)cU{On#bc=HCubCZ^Zy|2E3} z_)+E&Tg1%}22}DF+sjY&P~Ts?E&uDA!en=8-W~Bjz3GChhMjLN8#|b{t&-`U> zrzLp&0oN&D;J4XPJpEZ;yi-&B>pnN8@aXW$+2Qe0e*f$EcmGII->t^;uf6vG zV;}M@DM%&agY-m($}6Kf*65Uhr8Ep&cOcANv}n;ed)5Cx*U=|oq$h&9%i)fe?G|0U zoO5gMD6gLtrEt9|6^X#C6UmuhE5kwirVlSs`5Dgh|G;VcYBc7yo7+L4UkHh2C;aM& zxm$$*!tDN*+@b;mTY)Qlqpc5ds`TT+T z5(%8L$Y2nm)C=wg!4uoR$Lth^@84fZOG`WEPt`8Vs zHYfaL7_ops@|GyU4-sl5_rN>Nb5nUogukjnb!(iL_NFbukB*y~nZ2xK1?k&3G^&6j zS;o6U%Ag>;Am`MC?8kvO2_S!e)E9EEC#n@ll$(|GKTw+UZO@`(cVY*VU!xZ(6ZP(e z6sePlZ8(RR4Tgf@sgpmRJo5N#gJ3|KA+j z$W4V`0D`lfig`@m!lCUtBccsH{dQ?@{}U%V>@NT7H~lwX@;|((mFw#HHEH375OZ{c zmePMZw&WBwEHl+=Z9EUVbyxi#=}6>5@+&%w*HohuFPrVSvCsp*CvrHLkL zNFK<5(Vx&UUiR7%{PWq)T_rdqq}g4pXAh!fQ&LihM5IC=VFtK(hK(Pqq#*N-$dVP` zy}JxAMMS7JoQO+?uZ76Ng6$pT2>z`OlUAIf=6MZ8<|qi z4%lQOAtd(lOJyzqc82iSwNxpYWFyQV#&*Pb66XdY)Dzz76V^!10$=Ss&Xk!*4h0aY z!G1)gt1gj&Nbx1II>eFAIc%&Lq!h2u!e!UDm$+)w0^1u>#iZOhUBG65+thLh6l+az z>_+h?k$Hwh(>v_jD3PRwj~7C4kbB=w2q2^d(ZnZ29H+%;=-EjyU=V?K6C=I32p6Tq zx^FA&wrJPtpt6k!4&o;=b^x__=`h)UsO-fVUV>Ulu?Gj@f0BXH=}aZy?xMerjviHNEuLi7584avgjLfaz; z32I&Sq7x|4e?JiG_O|I&&DPgXBf=1)=YlL|GdOnjYT|;k2N3btj3TV1{{3NWPk&&3 zr1}IcK`{4v90Q5lhWmZSeHQmk5H5(^RMZCC&}L!qw;OOt^=TWxMDh17v$j2_?&;6m z_b;F($%ywaGEQou`S!JvuE2YXbKpiIlE>G!Y^%Oc5nO!)vK!Z2oBZA_FyXcAJwHG9 zQ$bQ1$gg}TVgJTgQCd+5H-(6~CKAH&G%kT{D+vkFM21;$&>^?}dm z5j3(yCN#m`fCO&X*nLA6Vy<lj3y;JR*F4H z=jq2oS@+_gtUGl8)sm%4_rX`=);WYbh$S7=8pf!UfukfRRMv5-$6Uqn=8@&8IzuA6 zl3;bW<$_nhzU$JTkWbz~8AeEpxf}NuQe{UZ*KG4(?=+9P?1Tyxkt+Mos|#LZaYw?! z&I8y7ZSMnn4qe@SwDp9!6O8Ow>p>mNWyC|Z4f#Msk7F|q$*Z9Jx{mbeLTzX{kixR( zegXA*US6?oT~8hWm98Uq!?arEvIc@YF}d%@;fOGElG1^+@5boVuLyC=*5u~qChdVG zQQ2k(OyjFDdXc6gf)wRDh+7>jQ8xPANkHX6k$1;~KxmbaWs98;1q8b~y&*M~fPciE z&t8hK7FL9Zw3)5Ll`Tcctyp65TdW!3Pv|<3u8X9X(%ZiA#P)Tp&shO~hP?*d1b~okWy#%J3fw@tCuSKsRRw?x@qWbZhYyPbBr{ zaCqLoe3ZZDjC5w)s;k;6@tYH|FEC^ zFahCGe}8v<5;8b3b$Ex;o}K8F#t%O?EBF4%O`L7ma>;QDdD;Iy`jLN~^`C!z=_m&J z&c>3&SUaR*=^?c?=74S-}Cg34x&(A8!w$rKR12q1#s|SaL2RXyZ~$ z2seqSB~?{bgwwr);jU1!5gAaR(n(9ZTN8ab{4WL{2#I%K_*#o;4q}i~_xPgs9(cwA zDns6SB5oe3v^g-1_G3D3g6%pRA*1|N)?Z(Q60S{k6#R(Kzs-6;3tg-L`MO@D6C!4o zz=PufMa`X=L2}Y`zxj<@&l|8hdmcl>F#|1RV&6~1odTy%4Bu=~H-~${3W@G z=^3A*59YS>t@-!c(fD+V+noA>_5IxiDKip^c q(XVAL===36r@|aOQl2vq)WOk zy5XG*-Elw9_x*eSy&i|h-oh1Yo##B~9AnHeSI~V0+4E;9&!SMM^LK7b{e?nddZSPn zW4Nc`C%j!lzu-TDj?(Im$~MN1&U*GnD0w|cTT2^9OEdjTPDb_)W;WJ0ICwd3uw62B zbhLF4;^egY=LH-#_9mR|*UG-bhn%s!{m=o0BG5zrVtf=&Gee>Lj_yd^RB?%08g_l8 zqI!&j?u_@rFjRUaeP8PS(EUbHUgz#d*IPdn$$J^aMlG4O8Vn@Ae*HnrN~}NQ?Q64; z*AG))r?W7#p5?fd&m72BfbY<`LZGj&i=Tww5&yPxe{)Us(Q;eddYSW^mAEIJRl$U} zyaVCSBma((^OqPH7?N8P3+LJ+Q-1f$2V`D>Ofq+KnhrS0VCud`0qi%nJ(S6m;&=5sqcJ3j`SYmW9> z=T}<7>1>KWY8IIi?QJdu_+a5^t5n4KWMsI^eTXI`BEo>nK;FJIP<-Ca(%AUwbbD-o zUW0#KUmtmJvMyX#aql$_j^?kguW_6IVD|O(t%cu+x^2|P+Eky3{9>4IefMr~(~9-FCg5KgFOW486H2U1;3?t;9yh9bJb*cuPI2La*WUb6Qc#ckkYz z(P)W<-fV8yHA89*|hnszRJe2Ic!_^+bSLNP#!#=sVxg{I5MwRZ(XR=p6 zy58OV<;U#L}!UURDcfY>T3iBE;5T?`Q5Nkv5@UPrr3BVNZI2k<2V zF6c`lqN41*R>S4IWSrXPP@lhisf*#YK#x5mU=S9j>J&e&6phij=j6mEwlju>tjU&j zu$JiVuSoR*!!yS1F@9nmd%Ba&p>M=Igh-_8LitD3XAHt=MT5z8+=-NwlxBOgshpOF z%pO+}GbxP0eK!OWF`~ao64Z5e5{Mt5v6id0wGVb@64grTip;vXoEFtXj1Kp=Yj(oQ zKd#So8vM-D``p>tLaZo);q2^Oxiw&cEZoH8WLL6uh!9fytOE-O{rNDwRzl@SkDIP>RLm~_&fqvXN8VbbB7mc}$OF_CBZ z<-);6x8hIO5XbVbcK$zpsF0A6O?(=(mNYeG4h{)fZ6epvt@l0SvcFVfHxut1lX!!J z10V9u8!kP}gTq6+wO@BCArN+^V{P)hjy-+-{V_kXsUdNbZ_tu#HTdbgQ+-J|J-ZM8I2z=WC@w7hO z-hgbT*YUR3l(qPuQZwk9H<8z12&sY*fC}(tukZjksqedTT4kS?BB5uNNR?P2^N8XKV{bb7--m z0^^09d&{(?rTu0{sw!rG<{MOX?)K?;ok8iZPBd-i2%B_JV@Mbp8^eo~lp=8P@ySwT z!*jL;-_Ct75%N56;Wqu2 zfA0GSvH-oDnj^Q3#s2OIyS_&b=iyAH%Ez{~!u?@OE9JSa8?P5sR*HzAYq6$hY(}d^ zwcS^xLnNnF17zSXgCQc`X;nBurm7up2;dZY7D~lmZ^|<<@??dVQ<(&Nd&q&{2u{OX zpf;(Fjt-B_NaaZ18C+ZmM@Pr(sXi@d8q_6j?ymVs*!=t2+S)7YeQEO~J~06)pR5Me z7VJ2$oe_BHMmA@k3Gko{r$gsx4qolO7bYzKwgv1weF3*l2)!Lq06|J709-l`C z-oq0365Mm=EMN7Rq!#y8%x7exAsNu~^OubDr8Kul)+%JpH~bhKt*KkM7W@=4*xP8) z-;bOs1eUJo&V#S+)bx%%8LV0?665bly9`y=PNNa#S=F+P_ zg{p=Bfg{{DLqkSRZt(d9{cT0X7A@ytKR&C0v2gLD+JoKoh_`QpU>zS8nQYGYFtf8a z#XFCjAs`@dz=(>B>`toQ-`j(uh$S(lM`dGU7T}5f~mG0Q-#^n@RL)e#33>4Pu;4Jr5-mj}KX;89m+r zABgWTqM9oA_UXru%$4YRylKvhkQbQQ*evhwXRfKJlH%Y~=ega^g2>%jZJ|?ASI5=S z85K%Y#PumO=}b;e#{BZ-%Nu?h+6^~Re|#DZi{Y0qFOZOuj>D4dZ1(BoyKS0ptc;Gv zNk4{I;Gzuu{Pk;nfl=%I`}e(x7-fEF0npSjLGEUSU zm#NZ)D)+h-#e)E7Kn}Pnc6{JKL_xtUA|fKCp%K%xC+UBVC|>CCh5r72sJEKbg7#7( zd$TE+-W2@SajsRbUXi@(TWDJE6t`L3@%245K0R%mKhzuG9Zn;B6%{zu8r`Q+GEG00 zTfKJ|Mc6629oOU*$)rdYoE!*f<_x{>&q9-M)Fmjp!t}m`u=a$g##b!LQjmk%!ehDg zp3Ziq1nkb3Bzma1Jb4oG{{7{UnM8h@gl`L*6F<_GZv}c0gzoL`20_{TVb1S;`cSjV ztyr2PA|j$VA@k$M+bBY)F`1jfZ_Dv9G?mrVu9iE_K|Wyc?K0)zRFsz|O_2$`U{k%D z`m@L^QvH+VThs2;P^fAX0N~7QpJZeXh$|$BnB^Zt>v*R3OhXmIMNPbZpq+1OVQHC# zd-m*wsi`TlaI2n-*UZe!T)F%GCW&WI6A%dGw^V5V3J(sx06Cg0BO|5{fFc>x)`^VF zoa>yLdAdHROOSG|E=furseXYXgK7l4U@kfeU=kZ;2jT3t-rCkyLeMX$r*@Rp$K#SK#2FTRCgg52H-*pvGc<%j;@gsouwjya>6c zrfCf_6h)%fkszPV$a6u5=_J{3TJ-8_-PY0|w{aWA;?I1nY|WD9?2?h-h1?q2rluzJ z+FHF$&7nVRRR4jZ8l58RxK1VR-1NgBVt)}50c8ql0eSas?+z1b)`N`7k z{%9s$Fgpticlc?w2fXQN@87?_+3Cs+wf$K`0Kp_=V!znI8ZXv?BC}_7o;!8=!ow$pG04bIGcu-$S1`ulML9jGT`98sE>sWF z2Rx(8Lt_E6vul1rAhDgD-41{ei*}0a>sA9i8_UB~a2^p@X1ljxHfNEgT}6qFjV)j| zaY`*$I~3B$8@a0?Y^bDr(HuVujPRgVqFd;^#(;#ptUf|`#g|AXMn*;~C|noiu10R$ zp95y2q@fYq*eK@SdZhoY$udsyGnB5OJL zDZBS1tpI)`g)Jchko|0D#q0Q*MzQ&oz3mkn?(qp&&~yh9#;aGGD|6UoP!MkxN3MV{ zF}-(YQ^L)&H62dbZp?Nuu(4(Jxj>ygeD@deyY4wD zILuWG+K+owlNA%i$Wn59sfC^7fI?}Avfl581j`98n2*o94@oRJC8Y^sdfr39*lq4D z)Mz}^M1Bix=Jw{tRl5s;#B+lsoN!Qs0BPt!F_|4GzLu((XuYYKQc}W?{6Z;3<|JmF zM?C_7K4?ClP0u(ht%L?LZO|IUUVKz_xHWj*TGJ!tA{;fc=#2F8jV53pln0dO?WSHo z(CVX2ow~sU#@& zz3}R}arVran`&xm*%rY@#>SZ5yWh`$+5o;k2}z}8*m*QGEx|3LE*uXJ56e45s7_Of zB(v2*`&k~hiu^BE$SofROQ)qSTMU-M@vR#yy$eYmXsX^wLXXy0K3Xna)U8{$&|jDm ze^f00`Vt9+&3|dIq^~TYrbgVKfNmsdrt)ZS0VC;VpI@Aa>-DY0{%7z(LVXr}IW%q% zR%2rE;$E~={MK0xkQ2Sn5MLdG>j^+33>EF@95*}F zD=aK$VDWg5hf?LK0FsZx6`yxoPW0SM@BOkS8%CY)VwF^ocMSjoB<+ZV1Z(S7h+_oG zoWj9Lkqp2GCcr123ZEh1b>yBcxk%1>`SSal3)zqrKt4gC00HXvWT@Vjlam4n-nOtf zP~3X(M023jj&Zqk6919c;ih_-{S>z7r}_G(UVJY=)Zgy6zkQaTp03u{`T4U1u#H_e zOH>=36E7!oLR_5XY9S$JOBju$oZLC@uS<9cii|D_g<}x#_J#?c>2StV?X>Q9nkCk1 zoG+>^`Z%OyWO{`$q*5W5?e6Yw<~-P>AO7~3szCeU!;31aRd{J&dtqH+%SIb`nIA%*ll~6y~^SCW2m5h zUkvMmNw2X*)c`2IPtr8uy1%)AgRuCWi4fC~%CcSeikr_)kAuq42)c+yrUXDCRw%R& z7b^La3Uu?Gazc0ux<14l%Y@TfdA1ezY2(4WLo}Js&?A)Nooc2U1(&Y3t1DPh9}<1^cn8QoM4CF)(%;t*UQ_1m{^rw@}Kz&fl(##wjIXzLF z)jOCo9r2PYBUKAaEyqXO;>^MNc0uoBqMu^2=3dA{vfoTrOqWJ?hH56NR$t4a(~O65 z6%rY+?bZYAwB+Ra`On8df+JqM&|Qra6cj8z;?w%PFf%nZ9>Q5Ip{`Ex^yyO+S9kO0 zqOVXf-!Y%%q{t)Ofxon0DVjK>u z?nGk{h+$1o$nrq|8*-hAKdAuW{FI@{%gjY>ASu|vf+DQ7bm`OJH=w{nwhiY^yOOaX z4Y&D8Zf5pky>9A^R1`C-whU?G?R)eL#JzFI8&Y@go-KD?2?MxH1ZkiVN~SIchxbfdjrDRd7<}q z+$r}E6oV9Cz>^n&w0Z1p)IHGg@B`R32G`-Eoy(sq40nhKSpuLkG%>69@CiE35~2{a zfB>G;sA0|XV41)^4jIDt<2Mz)Ri$s#w9eriUU=Mo_D0FXmN7D@pskLY?$ zlZdbeN?y0VN_Py%DUSfLyb<>j1BlpF>FVGP9P|5x5DyOt1OfxR8OStFmSze5>gp;I zhGWai*7?@Rx*|0IFq44C9uJ6B{s7Yqdb1v)SY^l6vNim;4Vo_r2~h$lm&#Pjevyz6 zrsKJb4M{@HCRFgTL^CtnML_{Y5foKGP0)}`a?3}Zk534NVd3@;n$YZkyF#LJA zqZ4@x3RNipO*W2|u53+ygskW1=cBi`W$o>+AAycsXUe1h<>i&>O^^@#_g8A@fSt&w zsE~YZZACJyI~1GQ;fj(b!;FgMay$T8$gu^W;+K`h=DzlYNdUmFw(At>y51JZJ&bH@ z=lF+gv7kVYFD-=vAwr;;d@NrRa4aj&4j`-)HdUKsR~6y{KYzZBC^85&hb6Ra=k3$o zL61*AZh3z389@FNof=Qz7xn4NX?egY4Gj%-hsz&NZ@x)ix{(cplI$F{K;B5j;5Q@v zGzWq&{_`=qde`4dftrE*ZF}&c#EIzzD*z{ARO3@@ytdZroe4Z--_8OZhl^tudOUyG zxHEx0j8@cKdj?d&$OMT|C{ychfIP_>{Ld4^hSd)bYk@8s7(nZGI}R3FO;aIJjD2Wm z$XxzZK72ua{jF6H|AN1LyFb3X%mGla5h|tS=H=Opdm}$8Np1oy1vYcAv#Jr!!p24d zYeWvEnEdp!o2O2l3dF!V9r)p1bPT8PcXj8_TyNzj{K&Nlp_JF_av^dbLNfX^a?PoJ z#NDPJc$~39V9}dJ^bCi93^HMTd%HaHVGB7G3LyKGsKrKc>TsX}S89%{ zM?7i8Js&-X)a&Bn5;!)di}2w?5gqd8lRhEftuWeRD*=v3`IZ! zq{y}2ym=GR5g|ww-lwMax*e953fO_v9YV&g0j!&Vh?bTkRUw}Bll8E+N=?cA?URD zrgXDcQ`*q*JQiI1s}&OvG5^5Dk{tl&FWl9S~EdXJwkYc*m|Rm^D>P%q z78b5my%hyAZx*sMqum6Zm+ynxLc(O;(U*W!fzY>QeHwnN_?|8^>qoZZV&=9Yr$_ ziC2maZ3vN&yc(=%KDTvX}XcnLCDX zY|^LC=6O@?7Dnh%d6=&=X4xsol%@kkENkoQOP;uI;OlxwqaTdT<>HTrcz9u9Y=$evzBjS!vlXW;mM`)s`1^z%^HmK$BxB+mvu`zFA+z(CLbp! z&{D6v&yMf+GSJ6!X4XbhUC84W!$NVrm%OAMKS0CoF0wdueaYaY3yVKGKK<`y$7L4~-5Q(JN<%O52PkR3 zV|bs=6s5S}Gh!7x`0+!I4|Y-{U``{G>plN-w{h$REX=(6N6}|?wj{2ibfwZRvBiI< zIT|^w$ zAo9%)j_luicoHf@O@@rVwfkQ0$NW|<=<%E>GN1M%?S7IZOLZ(``ffq@WC3Dc=~y-z z=q$Dn1SnvkWFDVsZjG!+F(Y;sE@@lMM_mRztvyKiF}_1@BfV(usnb>XiqDsP2TvFI zCEG^jvZK{(qb2T@yi6~zO@-FfC^awFKcNZ%Z()k;(tzCsPEW5>q!|*i)AD)tZiTc856_^I2W3xPaRKzV|lYOz_v$&wGbI-F}@z={_nUWJ~yQ-ZMeu zmxV``R&?*y zq{~#x-6tx>x-BW-A1qRG%a8_(WH8)d&lf6MDXBz3=g}aGci2 z=4M|wTC)X%k0;ock9qLZ@V<_Xr#2*8q;MBIQ*87YEa#r| z54(Ts97030GG^PElR#lPvT1X{JB* z-2spLIFoVbaOWYa_I*P3>SCiQ4|l1hpguo`Fy0O{wxj^JBi9H5K5RB7h^DsyWGoIB zg(PRN;?s$pQ&n9WCTahl98tn3NJXr+*tpWtS>LJ^{z18X@^$*~!-M{y*3@dV?yE@8 zS~*p&4uEMefW~nURGP9L7QPQ>xpqzR;lsDPGf$slqFzf5sCEyKO8Y3JJ!P9CrT1C&_&aL{wBEfVf~sWBWEYHl!RKc{&nAL!hy{ zZVw+IB`r;V;|4K81gEy&N605utvd^_Uc*&PaO;RMd&GnFL8UH@9aA z&k7Mk4x8#5mCAxEXKbv6@lmtX69KQgpIo~Bt)+u~pIp2~%jLpn_)d%S7))2L(2Ix+ zDX^FG+`=UxA*q#K*cJLu9nLg~jCk;rYIL{z(ZR60G?itkGl~Jrp0j`IIwVVnDN9c=drE`RpFTBzb=LJY zoWGX3UF$$xQP*9yMsoH+hfK&Lrwg|e#XLZ~J>8ZlP6tT!mXy>fgr&t=mwy?NXJlZo zr;v#Zj`k7=cK(!zLOELWkie%%xZv3cmpq@JD8RmidQK-!3&C3Ad;^w)`upfiFt>?3 z^+fj?3U%XjQPV+w{MdN2@H^A^d*t37Qc5J+uG;^peUDgT>F|xlyEgZg?rUI(EAm&0 zd=QqAEqRr*a<^XqGddpI5-&~~ctuWXR=w|)*l^v=6&95D&pcywv22|B82)^l1`gE8 zN(UAa-MYlY6i}F%rTcF-H0hlF(XTJ(>5x{o6U)ntnK;~2bF(S2&}7mSsmH3S5iv1e z`&Iu_EaGmMtZUu6+FX^7b!KkYw6~GwxFVZBfw~X-bcz4#3mtKxj1z(g5WK8v&NIGi zL%iCR_`1Y+fo3_JKXp^W^adxa4F@)!1mwShc|!#vW3K{&fE(9V(w=mFb8?x@z_;yN zM+LOjJxzBXU9 zB|~2sV}sAO=ldW-Oy=o1tN8?NR(_x-`EJxI$R3;68xPYb~;t$4pvqKB9wh!q$ccabX>Dt`L^{J1oJ8<>_rMZ#XaweV4)}mv#&(S1^XnX z9m*cwuK4|mk5%bqr8e@^*Vpdp0sfj>)v|S{;j0{`>q;}s2p24P@_6`sWjZowNtO_uU!iC0%&pixqKGC%M2gKsv%USUUPV+c$B78se zlc`tuyVFFspcnV*f0m`J`D^q0vj^MJ1};l!mp@{flFx>|=HHY`iwe8p*7WzHn5jGI zU8H?P9zGg2g+g83vUmF{4R{r2_dY{pB>fopmV=)L65a9s~eSEud` zru;W_a^Ce*$g@Tr*^=1Uh-08;2fh+qOWT%m!CPtV`O&IyZ$C6jtY*oN5f)jAgyP)k zikkGIz<)^`<;!*`eRiV*0*&!su~2Ok-^kWmoSp{6I?%06orE1wjEcMep&0Y6(=Cz@ zDYlz!0)=1WAf@lI1E4_&te)_=6QyvmKXO$Nt#m%pYuAjyP0}EUh1RztL1(0}-itik>_$XS*M81pqU)(cDkr=ixI^ZXH5gRF< z`#1J=N8Tvd_Ej9Tuy0d^+Rfd$1PwD5(4hlFpKS4%9u^c_fA#wHTQ3e~X4HMb_gcK! zT1q4!XMD^sUuBp5|H~n$axy~6%Ma|gzi13{rYiRSLCYAgy04gNXuZ)YMUKaxU0PgwoO`smO`!SyI z32=V~1qDItm6sE|B~JlnT)1$7ZA18&96|DF3-sM9)EU-0w)yGo#nx1I`8J=tR6B; zTDjAb#)^Y2n0q2DSUGZQpN0nHd9IA2yfKzEbo)c|ao4PV5?v|a^z^Ji)9ZqpOhY;W z`tTY>rd>%BiWa_v;64LuaPDp;e@5dU#OSbT`BH&ge~W4Vh5NLK<5@0eDV(;8BeZVu z6KnlH{Q6Dt(yj<1I2YLLb2B%S z&GZp(tiLYbz(fI~cP|(*+DGJJ?BF~rYi6yo4o#zfsUxa3=_9JrNsdr((u>XU4ArN&%b*TPis*e6b!8Zxv2zIr>H>?MY6_g*JH{UyQp7khBH1=MdWsx5u zv8j?mx9mS?x#J;Fim_+nf7u>-9ZP)e*GDw3&gJEBVI2MgcX5;UX0kV-pFlGPFz&j0 zMW$_k0>wwyhKYIv80#LBvuyX(Whd*4N|Ptz7>AXY1}&1Oi-s1(TA^V#1$_5EWT55M zgGXVqy>zvTK|lr!DvBFl-r+{C|DxAJva`^F8yf5CWMiZMVen?pBh<#sZFZq=mR5l8jp#AA;38` ziNrxXw55aC1Ovstf!}b0IC4|XrdHl+3K^xV1O9OaH9Jzy0@NcYZ(5#m^D!s(Hi*gp z5`d3(@2AKNI$}J29y^Pn#=@>KqJQ?p>)INuN<@bxj)Er40|L`K$$UnMF_Hdm`QVOBJX%UPBP5g%zefpaE!?Zrx2H&7P2ard# zRqSM_ndrBI^fdMuGxFb8oriL~5S(1PD3`kTw`^`1(Z0I4j^uLcFRZ@FC(mYfdi;2^sUby3$bA*Z zwk919ZZ`}iL{y#rPb-T&QvOca89Y9kMiZ~Y^Qc+6e$3oaijYg)@h^JT@r7X5{v`G%c9e5) zQ6>+1{Qkh+2M>^@M*rip#1D?bCi|sL=VpQwR~Ezj8wP|u{KR1FgRE-(qdsn7V*XKsdg@CfjmKfZV!49 zw$+cC4mb(MDty^cwVylc)P=5`ZjUM5qSrx|^NWlHEa%vFY+ZS1b|XFpMCOOyZeK-p zl+z`4`P@Frzq^%$m`}nI>l$>ZZw-|Fr-nY>%_<%JY{o*x?%_EY0=JpNA}&sc`UXA~ z;gVnV=^4RrjgqRWPxz%B{-p0-uwB*AFmWAvdC@ODI#VC>WPH5>XY4rVdES0NIoY2) zb?4GCBt`-eH0m@ybzqKGg@#sIR@ONX+NTro&Ynf7;F~>=w|GE;4RZq-8OeKN8PI!3 zW+tz!oOX2Ww@+FlaA=B>)mgA-B#{d zSsG?~(1MAkgMou)#KI3f`8$0NVUtfxZo6O!7=y+y3KyngTwDacfB*g>GSWbs>GI`M z(1{lCJSZ)*J>`u?+d`RzSs9`aA3jV^!K51?5lO(bO|j`<3x+ugXe*SMN&+|rOQ}Ef z8`gJ8;jX|Q19()*-oC(93&tO%WMwCTqTQC4=jGgn4*+WnAO{%!Fw?fq6>Dm4POhw^ z4j`c8= z`kY9|$lV^yFQ{gvlop0%XnQ3IWaetqF9moL952^D-}9zxD%X_Y?l5r)TJ4oCHOlGxOmj@PRCBz5k`ZxFC#PkGO#bQO$3m zd_0uO0P5@V30(klfR)`GCc2$}RLav+3wAwS`(M3tH!<~dc5xO z>d%NruGbI$24zd*-|0xD6oOXF85rt2CQZ1vy9MG{*q)_T;D_Wa1+!FX#L6B9elGi6 zrA#O0=YMD1xJ*NkwxW*Xp7VdYDoHA#@`B*#iL#;;_WoVEN!I>H=l*=T35E`92WUgG zPN{h$1n!jK;$ex{D7mF!N<1bm1MO|A~ndoq>7>sA`X-)I3>`K5&KNG$tx_+v3^r^A$`*XWF$Z z_Pj{hdsO==#RUD$WIsYv1)q+9ztYx&Uv8Gxe8re3BL4Tf)OL1;|E>IWJS^ajQK?)r zxG=J}DS)~p^y5U$0vHzJsbKOYl^dz@|6-vcHUy02hBl1+a6owv?gbbe{p=9=b&n$) zfDk}sU&7CyKac&5DI7K|J%!vU5fP~>eP#@x8NbbZkYAru@txI~*?yj@C(HhaQbCXL zA9xrRBk$@LJX0Z0m_V#cCXBuqZIFmrywL zHtPXzLXCkWj^07=*`%`vLjh9lKA&;-;vx?B^hEi6AT^zz3PT#l860&05Q)r4{_jQt z^_AFyv&7mvd_;7Ma@eR1%kFbFatl7aNAi?Qyfjy+p8VcQ0lb|H2-WOYzN@i z*eLG`f2oNGC7SV^8G4=M^4~=Bv`zjW8x7EhX^GR#oX6*Jy79(F8)-64HtG(z<2moG zrfpg-o)kkXJwWmAQz8vWniD|EB8x4`)zQ63v+({#zKn`s28o@J-X3#YH^~>;| zrqL6V!lPUsIrUZHmkNopNx3*;Whr#D6#X}en-Z%EyRQEZk^h@PY<*4rn1!imBNE8D z{Q=xR(%54?>G`-Do26MxAMp?vh~yJLK*A9+fJ51 z%Sv^eydoZ|JouVK3g-yFvZ@qqtT$41yCvC3IXvMH4UBtrwFBe%^XH8)-)q6%+1e@z zrZ^gnUoOD5{>v*k9`MbyvT|*AyR;o&$1oyg^>;eg=aE{`$YUmawUxytCDC&=ut^#J#>(Gx>f z3cC|Kir;F$@aPz<+8E#jVS$6iT&iGnH1u1lh!orU6 z4^v-4^}{D;$!rvSyDonl7{x=x_U%05Eo(}mGG=)aZ|Fv6tZyIhc6xCr-=mIJNJ9Wc zXJ+2-o}{KxBsh&y65+WF0gB+P39}9&XKG1GkFNNK(;$>eeh9A==EKOusX}F7egm1K zMm+Bz{+gsk^@MI;xSU+x7%vncXfg33;Q=OyAZg9c&Fl|+*I#f(#ILSvGWKCS4+$yU zaYm2bgn4TH#y}z$=*BM%iv;MN^ge7h?S*nqRQ1Pw!s4LcX0HDrxM=a?V;v#6g}Xb@RMK zFwkjxju>XtsLSN}kVHSi&uwjuvUDE4;Jp5st|}#U9#M0S+^BP{?>xOj8h8%f{594s z*&)y(K@&8Tw4gi4V@Alt7aWC7oj$D(zGK7%?(X5S^rN8MZ0_>+3_MGaitUTuokmXP zFUlDCj@|1b8~vV^ieFEo9`&V=CdB?E9iffc@f>FU9gyrsDIdUu)YYiNtc`>I&Vm@W zbhr6mlb!T~`d(NbiP(`L)#z>gb(ivmyxD=AH{UahyJx-}{`M7Gbv3e$%_DF&D$01p zSb(}C=jBOE=Hyt)Wz;T+UoqIL@S2d*yO{jcJ*L2-a9W?t za)UM8bnp{<44+jn_(0Ffq;!q6GcsHkR#p$z93AxQ^St|%)f+&F!DO7zEMZo=vT9&p zR#zka7(DTS0Q~A#wB6>ez$h;rD$ij?L1ew{h5(G>j<~PO3zxhI#N9bm$Dj7343Aqk zGS&Y!2{tl2k4~gzN7RK4*344lc1pxfPjfFZg|J3NqD(eF+Zm1ZOin;Jl`?8kRM{Kx*ueEan2r*$+H(IrL z^;Wdbve-k5{icmOxJuXlFdDF^!V^hgGQ_$A%w>)&gX!P{h+Cf$c%u}U7#`Q0dI)xa zSweAWj5-$)Y&n#Q^%RR5wk}vyd@%&A4^!X30LG=`lm8G8XsbvfO^N3+DbjpMn+O8u zXV$|T$h@5XLv?lalxkxI1$@{jFmJO*;o#s*!W@JeU7^dWKHH*VosqrW!5B6%6vc3H~Fad`&pthDKag$z8!LSdE@lskohPla3 zhj*=<&WRkU@KA-m*P$s}={Zy615N-HGW&m~D57kc3kjhK23A*}B`g%`;_io?Yg5!Y zbF{%9#A%yi2g9&X%Tyz@vV~_?Tf1(U&KpT|?0)4mg~kX7xZGY1H#2kf+J_|v^jy*| zZJ2hSTi){`aOiDo?ip27Z9RWlGW$;eoU~FOI&gb{b+32t*IgcN%#Yo3Kx$Pjrk7x9 z@Ego|$y87{!sI{lU>i1_YHAeN?U6Zb$TPYlm9Cp&3E<9~XY!RuSMrN~w>DUveDEag z`MMFQK%A1dlg|ix-M)n%=Q?_<{BU~ICP5filUUJUF8p;ssPzI}7=aVaZ-nC6Nv?36 zntF{se3m{4h4ALtFj$cBb{?S;bh)(lv3wf-KqZ)#~|YLn8}yl=ILCf(@nsQ!-voEHd<1U`=Fzxe%@AjVQ8&p z_%rKAkrGqG8C`0*&JveU?%b8Uoih5S#M}U?v#qQs6u(Z_>A9J^=+CoZ%$fJQt~BP> z-EjE>JP|zIl!L7cCQz`WB{Nfqxw~YKK5=z@D?398gH%JvqoodJ#E)SNmkiREp#tqSHz>}5d}zKAZwOEN2zAT8 z$&fAZQq|kQq{F~nj`>I+0Sh-t$5$NRBs#^_nwtUh+KLgC^E~y{s)w#D?_XV+YzwcM zFJ4khe1Q9d^=N+mz&YsUOnKgozY?eiFVd~tZE7jmmQ-KAIz83B_o*ythMvKm7a)5B z%;$RdP12X6ZsDqY#DwUF}i5KGLm zUOShC?JpzKi$oC)2na$3;5tS}-4?Q=)31cx`=_}IRQj)!Gf0D!W$a+-avY(ML4>wr9W}lBXV)N=`-)u;x^2P<8WNf8xl1KcpO~g}<@%!Dto15Tcd+G^SMwqbPvvHmSft8v;dFeM|WQ(rJ9|V`ZmYgATFAAyV?#+0BkXT=3>z zAFnVmO(G9wfWb-g05U;Z`csHm8V0w}gR18>TBkv|cHc0Wvh0mjTdF_8PRYt~hS`br zW2Sg9of!x1-Q+3U0Z{4g^=4IQk$)XamE094SLZ98`CLq(gEPqF5KLKI^=a%~pB@Z^ z@K{WQWu|D>H^?KwW1ihttUATNXva1K+)(*E7b(p~XyKNjp8qxW^- zN+9~=Ip6pk^yx+X$h4KWr&PPk;*(JQ)SLPt5A|G zOGQeCk}^i-S%u6bUbB!nLu3~Fx?fYZ`hNHCkG=P?JC60wQr_Wtp8LA*>pIWRc^h0U zWLJiKkDEWnEikYXqwc{8_opFZ@nYC#Xi#uByAscAwG05IABG+L0YWOc{g8~OEKgIv zyIkCADSWt2MW?`G*+Vw+dE11bd?(9?!r73slQpgPo)+D0a51ZEm&LV}dOBE+_+JaU znxEgHHnXj;oo%d`@)p-vijC0%hAb8fiec|Qzm&)vv>Vu=>oF=V{mGGE8MA0fE$cR6 zCx)hT@}**OFg?~PChV3wr>#q%B;=OBnEcAbZ~QCn)?x1FgvpqNZW={SoPVL)xQAQv z)zZz}k7y0-_+y{9dEH&yZbRTu8bRimIr!~;@a85vK6T&RWs>Rjg+AIS-1YwHTW2z@quUK|| zWR~cargO;K<{WmTD~x4qK2%z|`VCJyST}M{ZOWg{nrK_c}J54Yf%RsoG=@%Hpk*};G6sy zvsE+MGafi###NI7ZE6$dMOp|Dq>q=Nr*C1wWgIGv-JMm`$;xfoV!{``-LBA0%eH#Y2fW=KtI?k6nGtF-N*H%U}bOdB3+C@!yPh^u_r z?Q>jzDlubbFmAj@N(xT@4DrxCBXkpk2{kJadMb3C09J(51VI=YBMWfEj- zN#Gl47l)PThFi>1p6K6S`Sy=li0fNxC7*jnuH?X)ukb`eG zwp$b2-16VZJdn|xD$rvaJzeBd5T2@TX|U+6q$^xks8KLFPjQg9g6s5noPBuv2h4JE zSnSAqwntHn;EMR%^KC5Li(rK4b$?kP{i0@d_qE&@mL01jT-2B1)l4PZbdvy}opM%I zRtcT{OakPvp1(Cs$}^6k%f45+xZO-7)PLAHHP)^})mz^CvF@!~xG^LZan0te5*<>p z6U+H>#2fnj4wDAgs5T(r?xon>|lIWz= z+~Dy<`s8^f(M*d1naD4LFG3F1oTG=?&JRO2?BYyW%dPtrcOS?YY7`d~e*v`2K_CT=XN#%$M6QoE2Wolgv6t`UvFpO;Zfj zhyws-=tiz33=!%OgA|!KL{clKv@<3sQJ)gpIVtK(b=9=CE`4&y$maFk6>Fq)YUFN= z4Nz9m>%5%8;w|#jyw$Wl@B0sB*m(7Ya;MufW%3lnh4u3$KnLhD$ zs-22;=oV?D460cu*)vXpWvLKp1yZ;PtL8~=`CT5r(-?2ys|uxLOUrZ z_KZ84u7BXnkDFTT-M6E6?}W|3CnnA*RS%DY%X19*cWMgN=&g0NbsnldCOa3cmMKo2 zQ2BS&{zKh{MB5%UI?VV_kf&8|=H_$Q>ifR6;OR?;JnFC;hq7Bv8)6|0%vS^3n-ba6 zb~@PED)1+$dKG|PpZ30LL{>&*H7+izv$kMOzl0jN&SZ|juyF)9figgjh8+$PmPGyqR#Gp@v~dOhoZaG zMJ6ovBd_?&Z7u=Jz2GzxY!d&iodQD6Dtm0As%^^&Ph4R@*>d#d9_(<}prliS-3j3V{p0v*|jw;!PxZxgx| zsHSaUo!I%vnb$JKaFY7#$wZI0a#HYkmh|=SP<7yoA!A#zQTv;sE+#?TRcwgO>L6E~ z4(6OaeE78+wkX;hZ1B;8}BiGAwL|JNhB|GWUHRJ2qo9OEo z(g@Bvz50g>jBodp?8Eq$xpN9FHPQmvP5YI!XZjSe!o{tm$H9FVAk=u|rHEf*=|CxE%hR9rhrgDo9;NEX; zr5&N~o%&{Zsc>N#M{nv79xG7y<<5sba2E7_7%SH9_68rVWb+S2)Tl-@M8t!HQ4}Tt z4Hed*wgMSFwi1W1A>IpbCYE{fV?E~N#Z8=MNGXMeZ>Es!U8eJ~dq6hm6rI+{IMrg$ zKXt7X<`&>eTykXV^B?pI47)jG<{+gqRu7_AKdn#!!)YF@?R$ZEa!+!X;FX%*+RC^k zsdyDzVUoPJKk9&5Vne{t*hb-?&FBlp=3;u6uy> zBd0A&WUica<2ZY4F=0PB@sgWoehDi_tZ~cEDz5jTI?Fq+x_(>W=vz(_e*Y?#@Xn=U z){z=P<-V8z>E=m=yZisj5Y5fBPztS*B`{Sad{y0IWDx%E`l|nZI)s#_1Z82dEw@EsX93I5w(!2i(2%4e+vH0BlI5A&-ar{N=f}{bT-##v>=j?sp-l1 z^28a}8u;Yq_Ge{L>M5#BZoz+RjM=!2%aOy;TZ`61B`FymCH0&g=2!fwC6@R3Y+`$w zprqq%C?$sxT>RG8LZi(qql4p(=S$bBaKXHALGIRCEMhzICos z)1i_2@i=-u;lowuVO$R!qclwCn8mep1&b>p3L?7Wc~#!rIjAC)_`rcZ^pi4{zN$2S z=>Bw;H*z|SGA_=Yrc~~MEJXC_?2Lcqq>vWM$9ye@TzMwTg}z(c`duPtQ@pRp1XJdy_n`s1 zzSdL~`Y&2JREgK};|Zbf(y-iSAsolyo7d8qpWd04oM2^c7N=PO?)6l!Tb}09N#}=H zn~AVzvE5mg!^bPL1ismZ6_9%J^+Z!f#z{7r5x`g5D3K+~a`HO}gxn0#c+1e6mW!jOKAT;6Uf0^wI5nzU) zLouqyOd3WQpPc}0m-3gGHsg-7HAjv;jMX|~t*UFjCa7Kc&c3*6kPKxJv0$HVulJtP z8&wF7*T?{gD({eW1LK*k2Qa-*+MA{MN3F_|NDGI)-1lpM*``+XlMcf3Zc#kCTK`o6A?gwgAQiE6xpnOG=pQ=z(+IHn4-8+AS^%!QeXDnh0MAZ%c@U7NB1rQ z*OkVcEtJVxeM&`zMcDTC@Au+oBYW^Axd;`-SH6s5ACo#bGk-ApQk6iv)VGSjBS-b7 z$8=8L-A4cmB8_1DU}NlGAnE^mvS$41>ERbg$3YxBoN*>@Npq7BD-0GF56zrnX<`eA zNDuow)x7F!!g+`Ab8LZrFtm?{}IN?-rL?6TVNsbbX>5BcV*2Ix%x2 zDMo}=M*o`}gd6yCc=B?yi?NwHgvF+{Rp?k+>lFC067Ub$r5SgrzT|RkUuqu#b8yik z$+@y1NwQhh1DFP>-AV;)0V#Kk0?iy-yNnSWTPDI0Ks_UIL*gmH;?^s zjVWDWH=*cxjLCLi$;_+5zd|O215e@U=gpv#G@Rn_H!?kZ)FD6%B z-fk6VKii;_bhFOT>5zj&4qe~!j1^9Qx^^1#JgmcG+1wcWh~7P1h-GGrxta-l705Y! zUp9MFB*#xyR{TxOTKK6$KX_{Dy8wA1#WB)DeB+%9V3@J19i%mLb0nLW;U~qQtt{t9 zEe@i4=Zq<#fT6769j>-6;(#n6LYCQ(M0SSS>0RWlg4! zhu*XlQA>zixzyW{!Y4o(D40G(qG!6iPZh@qrVHrI7Kvnh=cD()ko6O4H&x3gnvokz zdC{SYF(G3q%63Bj(ERG_%o(-=dl)w2 zFWz-FcQN3nkm${_Q@Q$-Qa#$}yb^bIdYsdb@44k7*I@th4qnhF>H}pBZ?+&44C@aG zXGb@5(ojQ(dKsU2V@0kZd-0t!AS3+VXc|YKYOvo#0!CC-*I8D*fbOLkg zul>F?OQ+t!W5Ar?Mh~rHFg5-Trb&OjC4)w<#;2jfIq%Zpb1zPfw$)`Mh?mF+6y;=m zdis}EJ)HDjPR=ySa*F3aW0SStR$}5Xl`$U@my`b9<;eUluw4(0jrbm^S=V{5EPHa; z$aAUJ2SMJm`#=FLbN9#S;lUjhgs&y}*bUKm=v$SXSqvcRbJQ|D!U{43g_~j18D}x9 zGiC$dY~ZvtT&*@+@|^|QG`czL>D5!GCDmyS8)Y%Q%tU76ZDf|U!}@ zH?1+weTfZ=!)<%gk>p=I*PXy~$7E9;O)-9v%R^ zR9u!KKtRjOmssr%?BD-#kuCCVr-C~vMNkS(S@0JJsLEXFF6!eXn?q5FW(}o4fF|x4*fZR57aQR1xP4^i0`_kZDfFL0t zAV5ggD_364&HN(IwmYIUd0~EN!^f%Rs*aS_e;UP4bt)_ThJJzHivTk~Xodq~Wd&%y zSJ5-eT3d+d+hSm~DZF^-)Bj08%r7$SdcatB4Lup68qal_90z!KW0oRlRe(2WxO7ZR zj1`dbMF5sWWH@0qQ*+`&08Qjvs+N>Yr=Ic1?p(}S3gofkkHc!te7#dwSA=knm-K{p zYUpKE6LGZk*3S{H$4B?3s{{9eKJlYcwcE{FHeMkL3%rTq1sLjCY0J z!$w;(0ysQX=PxKLUx#NFmYDL1&~@Df2arII5?+oTf$=;*E<+gzD)FQkaiEI%AtpHh zbYI$pefZ#Esr4{0@H+58^$R@-5Dat3%^d)Mj*g%5Ym*;8W)2lde&Q{X??TpCIb>q} z^j-7SAi*Wev(IHB_vHvEHV8vupg(8h2g9TA;t4L0;=m-PPe{e?PL#UX;Iw3LMT=u( zg%Ky#8uf=k{ZYJkb<5f`UDJYRRgjW~rt6K`r^BxrKTA8FwLON3AeRc!Zh`-VFt{Kg zpN%r!s}d$1>fXow(a-ZQ%Mt$N(JnA>=2I)v4Tt~5U3@>o{?aA01EP*9w?dU*BPJb8 zzXs#3jCCR@UpOVc2L5hsigg8YjbQC zynSiG-U~qzsjG*Ai(|$>SfG~A`a%|QLNp>BOU+ZGyUw(+L>re4#c=zPbdfuNJpled^jcd6= z7W@^+v$A6WJvqCV1o%?^{KG$eSXiq6bX!KyXuwyeHv&jiUuD9%W=wx1mQfFtcJ@Qp;t_D{U=1d*K|^y3i34s=bx z7%(3)`qrCBzm+Mzvai-ft3n@rzl@{n@L~2IwS#z8lbyO=Wh%th6#& zpL+#N6v`eF6@t@~iiXDE+sLUToD>;>ib_>QELv$n5*45JH-F`Or25^lJ&4Km} zr&rv2vWvR9JAqE2GtVLcHPCgo>lR3%?+W95!3qFu=)0h=hvnj3w*#oRO3nzidr=;r zEgUis6BWHGwrSSEHbOY8dVcT$eYUoJidj|fN_|#sjoN>wp+Bp(H#kR$zFrpQo#iFU z3m0$`6y`bbr@@RR3+)Lv-Hcp;Q1E0tT)CObG|~3Ml=p>_z?(YFaBbi7{+J*H*2Hl8 zctNPjA?NRxT~U%0E{P4sPVEu>T?qb#uLGRS1ZFRRR;#|KZMUQgm_;V-#VY^}@b>lw z;JJ6y_;J66jrrAT1vq>bdHMnIaqLamPk=4_v!|tf4bmt8jQf@(zzdzT{JP0dR~@BH z;$dX~eAZkf@olii;I7%bpKwx|A0F~pMR|qg9P=gtEQA=LbE<3%=XD=XjTTJ6UZVPVHVVB7Xw;9u8DqcGSLj~Qg41d|Qn02tCia>w`Q z3WUlN?56(w!#|ZLz%~D=Jgoz=K70e46Y!3}Ig9Tq{=(J6FT2R%1qiTTz3!`@0`JlJ z!-qxq+h53XE4PJ^i;$-oj9!Do$}X_2-&(6q6LlXE$jI~pDJ-Pb{N&<={SJN~f~-~2 zvoBUW0INF_w5z}~qkhUrbv%%%5D~3ac0uPN>{#Js+LXql`ECQfS1GHg(0qAtvQ-xd zxCqV%I^l)Kr~Edy4C^oV8_a3#-o|e}N}?$Wfxl1pV}9x;q>RC0a~K4=E<)&@ep#3G zHB15#)+<8=C?KL+GO3_sPqDvRC6P z^%K>Ux|aS7vCJ>*pW)!b4kstrYC*WD#L1In5HZkG%i#&f@!;ThB*Sygzu#r3TcPyi zC;^7@K2gjKifeQP{QDu;(eK&27X|;RvZ;ad7o1HHP)I_8aTU~EsJeP?)%UFV^$Wb* zoha?`iO@IEh7|;#;mmg1*Ef0AJ{mX7j!xBmMk?mH__eXI#r?3`cF9lWE6CO7fLH?$ zImAFbUZslyQws2=y95Or*VsJ9g4MOFa8JM0qcvDGb~C%olr=J`Q zLN0fVD;=NP18I81n0lVG04$mQQ!sFW45pO%rvK~k)&K8m>_s?=ftFFLUl^3Kv!De! z>^f_OGP2ZkoucO);0{F@Sl>W9uH_b=MqLP5tY7;2LTlJx_mf@J)@G5z_HuX%;4jTJ zVk^)xBUw5gv$}Fveu%3qVMLf*Lw+V|diqjoi39(s(r($ZMf&2!i+RH+7-Q$oorpY; zh4C}_pOlwB;pmw8-5UNUq&>b&%e^X>s2KCc-MQDBH#m%2%O(r30sy3MNQaD(DJNcJ zqpvSct!tUcPO<(3#)(DA)=^-z3oy%#A~$)_#nqi!MZ&7hJ_TQFLxF8uWtQ5=sn&gg z!;`-II_b_r2mrhX0TYz47&=0N!#CJ{Am;?I?=5Tbq$d2?AS1{h)4h#aWpEbH?0u+p z_T0G}V4mrMe{Ny*Ek*z)aCCHUQP}U>1#Z=i{WE@ry6jhR?|ODljsfJ09EBVVa&mH% zbp(d_)*KD`Gk8n2jd9Q~U0v9qr)Qe2oDpO3I7EE5^B?$0Yipo&vqoV$o_w=@UXvt) zd_T`A*Nqt$tcvJ2Bg62Dv~Hq2Iau?!+KYxhLFNU6H zQ`TNjNcfqX>{kZ6e4ei5Sl?yxUs@78M(3xpM#`TBa$^Ti03!|C#m!w=&BG3d910b6 z_1mB*X~|)RRVj8LmDAE?2WjAD>(ZDU2^Kc9>aX9@t9c;!8|ZwABI zHWV?9-ZYfWNXVvU5Od}S^MJwFi{=J|AptdL51a{5%<@HkCY0O_J`Ug@gr-33MJ~(` zP?-a?epg$T4Z~5ZT@X!^47s-8Y+?XwEjd*Fk#_#=Uu?m)1=f%U>SqZ+HbjJp!p=@M@YKmzSR{sNndnK9RfZfUf}}YiNZvAsN3aosbzXz- zvCR&rGi_0B^dKmR2=;*x&f?b#v3TH&OgSHyDyO1?GKA6h4}iU^@bBJ2eBm9qr26o+ zK@a5x1g8_C83zCi$03~YGt&A)Eet>>yN)w9PTUyl2RMvI>^~K@99!K>=*b!Gx*fg^ zNr*y4kvee8!zQ9Ni{AfPWHa&`!8Z>!*!;Z5t{VLO!9z%N3z|9fvBBT?FicZXQ_yAF z3^fT*I2&B6&2WZ?_#s}1@uC6#T}#ffP~hP^Pk+TDy-^N4gyQ<53jwCc6o-24r?5In zY@LC(i$bF85^2CB2jE3a0_Cz*z+sdM{s%-H2^iCiyohW@?k~`CnIyOG|ZDBZsjpa@%a%3pFK-U0b1FRts(yup0DO?tW&Lf|=%;90$mOt0W zjA$wFY@|Eb@asc!;;2pE7I>pqbdAA{4vA#(>dAy46a-wPy;Tujf2`_7(!Q~ivq6rNuwa(b3t?XFgQ*Z1gopPs8D_r62bzo za4VFsX^Vf1QI6B*&70{ekm(enal4?On}7gHECN)lyken{uy+r-Mi4U2veNZV zpL0kQoR~uJ|8&SshcB-V8Pp(+26EFvj@i7(du*(RSUnJBj`WA%0Sm41HG}jMSZTku z2`@}cO(odzK3`!zc1`r3gyhVdkPC`wm?#kr zAUkk1_JMUCx(l>2ucnx!r8SEHDWd(Bl+6``*eh^;QlJDOs6Srd1yGNlgkU*?kI&^1 z0mveIa39L$gPELJ?Ia~US?~ni1UbeE@5onfr_=Mp*^<(>W$@ku6`)2GL9Q0njve0M zx!!@=N^sZmG(O@pB1fr;wga^UHOCjYVbSx47-XS=xviUsz>E$to3&uyOf_rX4nYzA zbvt%PDU_f$DS7(z-QpVP^ueQfw>Skd5ODKZktmboo)_%hvWQW#U^paU#KvxQ&s9{L z>pw@PuWQWDWL|%CcT~k)_&astOR!>SiaU(9F=A8<3}hHINBEcFbviD&-7La_%06ZK z@7bLKP-$uDW#kVBleQR~2KC{iAjche8Hq3yNCHpFc;%6rUB`|wcDgU{AoGe}X$G(* z`fI|G@gOiL=w^5KB^TS9w_oa_%fFOt)g_4x@92Ah!P)IJQ(P20KNuG!dy-BL9pqS^ zfV4;y29MSSx&St~|3~u|EYQshap>a}EJq-oiKfw?91i-)??3?#KUfyDo}`, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -739,7 +739,7 @@ constrain_as_value_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -999,7 +999,7 @@ dynamic_shape_if_guard .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED @@ -1263,7 +1263,7 @@ list_contains .. note:: - Tags: :doc:`python.assert `, :doc:`torch.dynamic-shape `, :doc:`python.data-structure ` + Tags: :doc:`python.assert `, :doc:`python.data-structure `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED @@ -1310,7 +1310,7 @@ list_unpack .. note:: - Tags: :doc:`python.data-structure `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`python.data-structure ` Support Level: SUPPORTED @@ -2044,6 +2044,6 @@ Result: .. code-block:: - Unsupported: torch.* op returned non-Tensor int call_function + Unsupported: torch.* op returned non-Tensor int call_function diff --git a/2.4/_sources/generated/exportdb/python.assert.rst.txt b/2.4/_sources/generated/exportdb/python.assert.rst.txt index 4f4386dfaa..6d56582748 100644 --- a/2.4/_sources/generated/exportdb/python.assert.rst.txt +++ b/2.4/_sources/generated/exportdb/python.assert.rst.txt @@ -52,7 +52,7 @@ list_contains .. note:: - Tags: :doc:`python.assert `, :doc:`torch.dynamic-shape `, :doc:`python.data-structure ` + Tags: :doc:`python.assert `, :doc:`python.data-structure `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/python.control-flow.rst.txt b/2.4/_sources/generated/exportdb/python.control-flow.rst.txt index 101951656b..aaea843b95 100644 --- a/2.4/_sources/generated/exportdb/python.control-flow.rst.txt +++ b/2.4/_sources/generated/exportdb/python.control-flow.rst.txt @@ -5,7 +5,7 @@ dynamic_shape_if_guard .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED @@ -52,7 +52,7 @@ list_unpack .. note:: - Tags: :doc:`python.data-structure `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`python.data-structure ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/python.data-structure.rst.txt b/2.4/_sources/generated/exportdb/python.data-structure.rst.txt index ba2c206c88..3ec1e2289e 100644 --- a/2.4/_sources/generated/exportdb/python.data-structure.rst.txt +++ b/2.4/_sources/generated/exportdb/python.data-structure.rst.txt @@ -118,7 +118,7 @@ list_contains .. note:: - Tags: :doc:`python.assert `, :doc:`torch.dynamic-shape `, :doc:`python.data-structure ` + Tags: :doc:`python.assert `, :doc:`python.data-structure `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED @@ -165,7 +165,7 @@ list_unpack .. note:: - Tags: :doc:`python.data-structure `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`python.data-structure ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt b/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt index a20900c1b2..df54a9fede 100644 --- a/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt @@ -458,7 +458,7 @@ dynamic_shape_if_guard .. note:: - Tags: :doc:`torch.dynamic-shape `, :doc:`python.control-flow ` + Tags: :doc:`python.control-flow `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED @@ -698,7 +698,7 @@ list_contains .. note:: - Tags: :doc:`python.assert `, :doc:`torch.dynamic-shape `, :doc:`python.data-structure ` + Tags: :doc:`python.assert `, :doc:`python.data-structure `, :doc:`torch.dynamic-shape ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt b/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt index c05ca55dc2..2e8ca3dd8c 100644 --- a/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt @@ -5,7 +5,7 @@ constrain_as_size_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -69,7 +69,7 @@ constrain_as_value_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt b/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt index 8db8d539b9..13733bb1b1 100644 --- a/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt @@ -55,7 +55,7 @@ constrain_as_size_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -119,7 +119,7 @@ constrain_as_value_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.operator.rst.txt b/2.4/_sources/generated/exportdb/torch.operator.rst.txt index 654b765e4c..7c6a6a8c9e 100644 --- a/2.4/_sources/generated/exportdb/torch.operator.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.operator.rst.txt @@ -31,4 +31,4 @@ Result: .. code-block:: - Unsupported: torch.* op returned non-Tensor int call_function + Unsupported: torch.* op returned non-Tensor int call_function diff --git a/2.4/generated/exportdb/index.html b/2.4/generated/exportdb/index.html index be714d9e2e..222e80b5ef 100644 --- a/2.4/generated/exportdb/index.html +++ b/2.4/generated/exportdb/index.html @@ -1107,7 +1107,7 @@

cond_predicate¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -1163,7 +1163,7 @@

constrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -1383,7 +1383,7 @@

dynamic_shape_constructor¶

Note

-

Tags: torch.dynamic-shape, python.control-flow

+

Tags: python.control-flow, torch.dynamic-shape

Support Level: SUPPORTED

Original source code:

@@ -1607,7 +1607,7 @@

fn_with_kwargs¶

Original source code:

@@ -1646,7 +1646,7 @@

list_contains¶

Original source code:

@@ -2257,7 +2257,7 @@

torch_sym_min
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7f4663957ca0>
+
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7f4817597ca0>
 
diff --git a/2.4/generated/exportdb/python.assert.html b/2.4/generated/exportdb/python.assert.html index b89db62890..3f51ea151b 100644 --- a/2.4/generated/exportdb/python.assert.html +++ b/2.4/generated/exportdb/python.assert.html @@ -558,7 +558,7 @@

dynamic_shape_assertlist_contains¶

Original source code:

diff --git a/2.4/generated/exportdb/python.control-flow.html b/2.4/generated/exportdb/python.control-flow.html index 15152ad4a3..7783fb1530 100644 --- a/2.4/generated/exportdb/python.control-flow.html +++ b/2.4/generated/exportdb/python.control-flow.html @@ -519,7 +519,7 @@

python.control-flow¶

Note

-

Tags: torch.dynamic-shape, python.control-flow

+

Tags: python.control-flow, torch.dynamic-shape

Support Level: SUPPORTED

Original source code:

@@ -558,7 +558,7 @@

dynamic_shape_if_guardlist_unpack¶

Original source code:

diff --git a/2.4/generated/exportdb/python.data-structure.html b/2.4/generated/exportdb/python.data-structure.html index 8cbe40c54b..3cbb11d623 100644 --- a/2.4/generated/exportdb/python.data-structure.html +++ b/2.4/generated/exportdb/python.data-structure.html @@ -616,7 +616,7 @@

fn_with_kwargs¶

Original source code:

@@ -655,7 +655,7 @@

list_contains¶

Original source code:

diff --git a/2.4/generated/exportdb/torch.dynamic-shape.html b/2.4/generated/exportdb/torch.dynamic-shape.html index 672e6991d8..32f2d74617 100644 --- a/2.4/generated/exportdb/torch.dynamic-shape.html +++ b/2.4/generated/exportdb/torch.dynamic-shape.html @@ -924,7 +924,7 @@

dynamic_shape_constructor¶

Note

-

Tags: torch.dynamic-shape, python.control-flow

+

Tags: python.control-flow, torch.dynamic-shape

Support Level: SUPPORTED

Original source code:

@@ -1125,7 +1125,7 @@

dynamic_shape_view¶

Original source code:

diff --git a/2.4/generated/exportdb/torch.dynamic-value.html b/2.4/generated/exportdb/torch.dynamic-value.html index 8e9d6ca95c..d6cf49a258 100644 --- a/2.4/generated/exportdb/torch.dynamic-value.html +++ b/2.4/generated/exportdb/torch.dynamic-value.html @@ -519,7 +519,7 @@

torch.dynamic-value¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -575,7 +575,7 @@

constrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

diff --git a/2.4/generated/exportdb/torch.escape-hatch.html b/2.4/generated/exportdb/torch.escape-hatch.html index d2322b0223..8524ec3e0e 100644 --- a/2.4/generated/exportdb/torch.escape-hatch.html +++ b/2.4/generated/exportdb/torch.escape-hatch.html @@ -561,7 +561,7 @@

assume_constant_resultconstrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -617,7 +617,7 @@

constrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

diff --git a/2.4/generated/exportdb/torch.operator.html b/2.4/generated/exportdb/torch.operator.html index 50a3f43b2d..e00b50c93b 100644 --- a/2.4/generated/exportdb/torch.operator.html +++ b/2.4/generated/exportdb/torch.operator.html @@ -538,7 +538,7 @@

torch_sym_min
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7f4663957ca0>
+
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7f4817597ca0>
 
diff --git a/2.4/quantization-backend-configuration.html b/2.4/quantization-backend-configuration.html index e360bed39e..9ad2d7ca29 100644 --- a/2.4/quantization-backend-configuration.html +++ b/2.4/quantization-backend-configuration.html @@ -534,7 +534,7 @@

Default values for native configurations= 2.9.6": [[2045, "id5"]], "Graph memory management": [[2045, "graph-memory-management"]], "Sharing memory across captures": [[2045, "sharing-memory-across-captures"]], "Extending torch.func with autograd.Function": [[2049, "extending-torch-func-with-autograd-function"]], "Basic Usage": [[2049, "basic-usage"]], "Example 1: autograd.Function calls into another system": [[2049, "example-1-autograd-function-calls-into-another-system"]], "Example 2: autograd.Function specifies custom gradient rules": [[2049, "example-2-autograd-function-specifies-custom-gradient-rules"]], "Limitations and gotchas": [[2049, "limitations-and-gotchas"]], "torch.vmap() Support": [[2049, "torch-vmap-support"]], "Automatically generate a vmap rule": [[2049, "automatically-generate-a-vmap-rule"]], "Defining the vmap staticmethod": [[2049, "defining-the-vmap-staticmethod"]], "torch.func.jvp() Support": [[2049, "torch-func-jvp-support"]], "Broadcasting semantics": [[2043, "broadcasting-semantics"]], "General semantics": [[2043, "general-semantics"]], "In-place semantics": [[2043, "in-place-semantics"]], "Backwards compatibility": [[2043, "backwards-compatibility"]], "torch.special": [[2081, "torch-special"]], "torch.utils.tensorboard": [[2085, "module-torch.utils.tensorboard"]], "A Simple Custom Module": [[2055, "a-simple-custom-module"]], "Modules as Building Blocks": [[2055, "modules-as-building-blocks"]], "Neural Network Training with Modules": [[2055, "neural-network-training-with-modules"]], "Module State": [[2055, "module-state"]], "Module Initialization": [[2055, "module-initialization"]], "Module Hooks": [[2055, "module-hooks"]], "Advanced Features": [[2055, "advanced-features"]], "Distributed Training": [[2055, "distributed-training"]], "Profiling Performance": [[2055, "profiling-performance"]], "Improving Performance with Quantization": [[2055, "improving-performance-with-quantization"]], "Improving Memory Usage with Pruning": [[2055, "improving-memory-usage-with-pruning"]], "Parametrizations": [[2055, "parametrizations"]], "Transforming Modules with FX": [[2055, "transforming-modules-with-fx"]], "torch.Tensor": [[2086, "torch-tensor"]], "Data types": [[2086, "data-types"]], "Initializing and basic operations": [[2086, "initializing-and-basic-operations"]], "Tensor class reference": [[2086, "tensor-class-reference"]], "Tensor Attributes": [[2083, "tensor-attributes"]], "torch.dtype": [[2083, "torch-dtype"]], "torch.device": [[2083, "torch-device"]], "torch.layout": [[2083, "torch-layout"]], "torch.memory_format": [[2083, "torch-memory-format"]], "Autograd mechanics": [[2042, "autograd-mechanics"]], "How autograd encodes the history": [[2042, "how-autograd-encodes-the-history"]], "Saved tensors": [[2042, "saved-tensors"]], "Gradients for non-differentiable functions": [[2042, "gradients-for-non-differentiable-functions"]], "Setting requires_grad": [[2042, "setting-requires-grad"]], "Grad Modes": [[2042, "grad-modes"]], "Default Mode (Grad Mode)": [[2042, "default-mode-grad-mode"]], "No-grad Mode": [[2042, "no-grad-mode"]], "Inference Mode": [[2042, "inference-mode"]], "Evaluation Mode (nn.Module.eval())": [[2042, "evaluation-mode-nn-module-eval"]], "In-place operations with autograd": [[2042, "in-place-operations-with-autograd"]], "Multithreaded Autograd": [[2042, "multithreaded-autograd"]], "Concurrency on CPU": [[2042, "concurrency-on-cpu"]], "Non-determinism": [[2042, "non-determinism"]], "Graph retaining": [[2042, "graph-retaining"]], "Thread Safety on Autograd Node": [[2042, "thread-safety-on-autograd-node"]], "No thread safety on C++ hooks": [[2042, "no-thread-safety-on-c-hooks"]], "Autograd for Complex Numbers": [[2042, "autograd-for-complex-numbers"]], "What are complex derivatives?": [[2042, "what-are-complex-derivatives"]], "Wirtinger Calculus comes into the picture \u2026": [[2042, "wirtinger-calculus-comes-into-the-picture"]], "How is Wirtinger Calculus useful in optimization?": [[2042, "how-is-wirtinger-calculus-useful-in-optimization"]], "How does PyTorch compute the conjugate Wirtinger derivative?": [[2042, "how-does-pytorch-compute-the-conjugate-wirtinger-derivative"]], "How can I write my own derivative formula for a complex function?": [[2042, "how-can-i-write-my-own-derivative-formula-for-a-complex-function"]], "What about cross-domain functions?": [[2042, "what-about-cross-domain-functions"]], "Hooks for saved tensors": [[2042, "hooks-for-saved-tensors"]], "Registering hooks for a saved tensor": [[2042, "registering-hooks-for-a-saved-tensor"]], "Registering default hooks for saved tensors": [[2042, "registering-default-hooks-for-saved-tensors"]], "Backward Hooks execution": [[2042, "backward-hooks-execution"]], "Whether a particular hook will be fired": [[2042, "whether-a-particular-hook-will-be-fired"]], "The order in which the different hooks are fired": [[2042, "the-order-in-which-the-different-hooks-are-fired"]], "Special hooks": [[2042, "special-hooks"]], "Behavior of Tensor hooks when Tensor is modified in-place": [[2042, "behavior-of-tensor-hooks-when-tensor-is-modified-in-place"]], "CUDA Automatic Mixed Precision examples": [[2041, "cuda-automatic-mixed-precision-examples"]], "Typical Mixed Precision Training": [[2041, "typical-mixed-precision-training"]], "Working with Unscaled Gradients": [[2041, "working-with-unscaled-gradients"]], "Gradient clipping": [[2041, "gradient-clipping"]], "Working with Scaled Gradients": [[2041, "working-with-scaled-gradients"]], "Gradient accumulation": [[2041, "gradient-accumulation"]], "Gradient penalty": [[2041, "gradient-penalty"]], "Working with Multiple Models, Losses, and Optimizers": [[2041, "working-with-multiple-models-losses-and-optimizers"]], "Working with Multiple GPUs": [[2041, "working-with-multiple-gpus"]], "DataParallel in a single process": [[2041, "dataparallel-in-a-single-process"]], "DistributedDataParallel, one GPU per process": [[2041, "distributeddataparallel-one-gpu-per-process"]], "DistributedDataParallel, multiple GPUs per process": [[2041, "distributeddataparallel-multiple-gpus-per-process"]], "Autocast and Custom Autograd Functions": [[2041, "autocast-and-custom-autograd-functions"]], "Functions with multiple inputs or autocastable ops": [[2041, "functions-with-multiple-inputs-or-autocastable-ops"]], "Functions that need a particular dtype": [[2041, "functions-that-need-a-particular-dtype"]], "PyTorch Custom Operators Landing Page": [[2046, "pytorch-custom-operators-landing-page"]], "TL;DR": [[2046, "tl-dr"]], "How do I author a custom op from Python?": [[2046, "how-do-i-author-a-custom-op-from-python"]], "How do I integrate custom C++ and/or CUDA code with PyTorch?": [[2046, "how-do-i-integrate-custom-c-and-or-cuda-code-with-pytorch"]], "For more details": [[2046, "for-more-details"]], "When should I create a Custom Operator?": [[2046, "when-should-i-create-a-custom-operator"]], "Why should I create a Custom Operator?": [[2046, "why-should-i-create-a-custom-operator"]], "torch.testing": [[2087, "module-torch.testing"]], "MPS backend": [[2056, "mps-backend"]], "Distributed RPC Framework": [[2075, "distributed-rpc-framework"]], "RPC": [[2075, "rpc"]], "TensorPipe Backend": [[2075, "tensorpipe-backend"]], "RRef": [[2075, "rref"]], "More Information about RRef": [[2075, null]], "RemoteModule": [[2075, "remotemodule"]], "Distributed Autograd Framework": [[2075, "distributed-autograd-framework"]], "More Information about RPC Autograd": [[2075, null]], "Distributed Optimizer": [[2075, "distributed-optimizer"], [2076, "distributed-optimizer"]], "Distributed Data Parallel": [[2047, "distributed-data-parallel"]], "Internal Design": [[2047, "internal-design"]], "Implementation": [[2047, "implementation"], [2077, "implementation"]], "ProcessGroup": [[2047, "processgroup"]], "TorchDynamo DDPOptimizer": [[2047, "id1"]], "torch.optim": [[2067, "module-torch.optim"]], "How to use an optimizer": [[2067, "how-to-use-an-optimizer"]], "Constructing it": [[2067, "constructing-it"]], "Per-parameter options": [[2067, "per-parameter-options"]], "Taking an optimization step": [[2067, "taking-an-optimization-step"]], "optimizer.step()": [[2067, "optimizer-step"]], "optimizer.step(closure)": [[2067, "optimizer-step-closure"]], "Base class": [[2067, "base-class"]], "Algorithms": [[2067, "algorithms"]], "How to adjust learning rate": [[2067, "how-to-adjust-learning-rate"]], "Weight Averaging (SWA and EMA)": [[2067, "weight-averaging-swa-and-ema"]], "Constructing averaged models": [[2067, "constructing-averaged-models"]], "Custom averaging strategies": [[2067, "custom-averaging-strategies"]], "SWA learning rate schedules": [[2067, "swa-learning-rate-schedules"]], "Taking care of batch normalization": [[2067, "taking-care-of-batch-normalization"]], "Putting it all together: SWA": [[2067, "putting-it-all-together-swa"]], "Putting it all together: EMA": [[2067, "putting-it-all-together-ema"]], "Quantization API Reference": [[2073, "quantization-api-reference"], [2070, "quantization-api-reference"]], "torch.ao.quantization": [[2073, "torch-ao-quantization"]], "Top level APIs": [[2073, "top-level-apis"]], "Preparing model for quantization": [[2073, "preparing-model-for-quantization"]], "Utility functions": [[2073, "utility-functions"], [2060, "utility-functions"]], "torch.ao.quantization.quantize_fx": [[2073, "torch-ao-quantization-quantize-fx"]], "torch.ao.quantization.qconfig_mapping": [[2073, "torch-ao-quantization-qconfig-mapping"]], "torch.ao.quantization.backend_config": [[2073, "torch-ao-quantization-backend-config"]], "torch.ao.quantization.fx.custom_config": [[2073, "torch-ao-quantization-fx-custom-config"]], "torch.ao.quantization.quantizer": [[2073, "module-torch.ao.quantization.quantizer"]], "torch.ao.quantization.pt2e (quantization in pytorch 2.0 export implementation)": [[2073, "module-torch.ao.quantization.pt2e"]], "torch.ao.quantization.pt2e.export_utils": [[2073, "torch-ao-quantization-pt2e-export-utils"]], "torch (quantization related functions)": [[2073, "torch-quantization-related-functions"]], "torch.Tensor (quantization related methods)": [[2073, "torch-tensor-quantization-related-methods"]], "torch.ao.quantization.observer": [[2073, "torch-ao-quantization-observer"]], "torch.ao.quantization.fake_quantize": [[2073, "torch-ao-quantization-fake-quantize"]], "torch.ao.quantization.qconfig": [[2073, "torch-ao-quantization-qconfig"]], "torch.ao.nn.intrinsic": [[2073, "module-torch.ao.nn.intrinsic"]], "torch.ao.nn.intrinsic.qat": [[2073, "module-torch.ao.nn.intrinsic.qat"]], "torch.ao.nn.intrinsic.quantized": [[2073, "module-torch.ao.nn.intrinsic.quantized"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[2073, "module-torch.ao.nn.intrinsic.quantized.dynamic"]], "torch.ao.nn.qat": [[2073, "module-torch.ao.nn.qat"]], "torch.ao.nn.qat.dynamic": [[2073, "module-torch.ao.nn.qat.dynamic"]], "torch.ao.nn.quantized": [[2073, "module-torch.ao.nn.quantized.modules"]], "torch.ao.nn.quantized.functional": [[2073, "module-torch.ao.nn.quantized.functional"]], "torch.ao.nn.quantizable": [[2073, "torch-ao-nn-quantizable"]], "torch.ao.nn.quantized.dynamic": [[2073, "module-torch.ao.nn.quantized.dynamic"]], "Quantized dtypes and quantization schemes": [[2073, "quantized-dtypes-and-quantization-schemes"]], "torch.Size": [[2079, "torch-size"]], "Distributed Autograd Design": [[2076, "distributed-autograd-design"]], "Background": [[2076, "background"], [2077, "background"]], "Autograd recording during the forward pass": [[2076, "autograd-recording-during-the-forward-pass"]], "Distributed Autograd Context": [[2076, "distributed-autograd-context"]], "Distributed Backward Pass": [[2076, "distributed-backward-pass"]], "Computing dependencies": [[2076, "computing-dependencies"]], "FAST mode algorithm": [[2076, "fast-mode-algorithm"]], "SMART mode algorithm": [[2076, "smart-mode-algorithm"]], "Simple end to end example": [[2076, "simple-end-to-end-example"]], "torch.profiler": [[2069, "torch-profiler"]], "Intel Instrumentation and Tracing Technology APIs": [[2069, "intel-instrumentation-and-tracing-technology-apis"]], "Remote Reference Protocol": [[2077, "remote-reference-protocol"]], "RRef Lifetime": [[2077, "rref-lifetime"]], "Design Reasoning": [[2077, "design-reasoning"]], "Protocol Scenarios": [[2077, "protocol-scenarios"]], "User Share RRef with Owner as Return Value": [[2077, "user-share-rref-with-owner-as-return-value"]], "User Share RRef with Owner as Argument": [[2077, "user-share-rref-with-owner-as-argument"]], "Owner Share RRef with User": [[2077, "owner-share-rref-with-user"]], "User Share RRef with User": [[2077, "user-share-rref-with-user"]], "Multiprocessing best practices": [[2057, "multiprocessing-best-practices"]], "CUDA in multiprocessing": [[2057, "cuda-in-multiprocessing"]], "Best practices and tips": [[2057, "best-practices-and-tips"]], "Avoiding and fighting deadlocks": [[2057, "avoiding-and-fighting-deadlocks"]], "Reuse buffers passed through a Queue": [[2057, "reuse-buffers-passed-through-a-queue"]], "Asynchronous multiprocess training (e.g. Hogwild)": [[2057, "asynchronous-multiprocess-training-e-g-hogwild"]], "Hogwild": [[2057, "hogwild"]], "CPU in multiprocessing": [[2057, "cpu-in-multiprocessing"]], "CPU oversubscription": [[2057, "cpu-oversubscription"]], "Avoid CPU oversubscription": [[2057, "avoid-cpu-oversubscription"]], "Quantization Accuracy Debugging": [[2071, "quantization-accuracy-debugging"], [2070, "quantization-accuracy-debugging"]], "Data insensitive error": [[2071, "data-insensitive-error"]], "General tips": [[2071, "general-tips"]], "Int8 quantization tips": [[2071, "int8-quantization-tips"]], "Data sensitive error": [[2071, "data-sensitive-error"]], "Implementation error": [[2071, "implementation-error"]], "Numerical Debugging Tooling (prototype)": [[2071, "numerical-debugging-tooling-prototype"]], "Features for large-scale deployments": [[2054, "features-for-large-scale-deployments"]], "Fleet-wide operator profiling": [[2054, "fleet-wide-operator-profiling"]], "API usage logging": [[2054, "api-usage-logging"]], "Attaching metadata to saved TorchScript models": [[2054, "attaching-metadata-to-saved-torchscript-models"]], "Build environment considerations": [[2054, "build-environment-considerations"]], "Common extension points": [[2054, "common-extension-points"]], "Windows FAQ": [[2061, "windows-faq"]], "Building from source": [[2061, "building-from-source"]], "Include optional components": [[2061, "include-optional-components"]], "Speeding CUDA build for Windows": [[2061, "speeding-cuda-build-for-windows"]], "One key install script": [[2061, "one-key-install-script"]], "Extension": [[2061, "extension"]], "CFFI Extension": [[2061, "cffi-extension"]], "Cpp Extension": [[2061, "cpp-extension"]], "Installation": [[2061, "installation"]], "Package not found in win-32 channel.": [[2061, "package-not-found-in-win-32-channel"]], "Import error": [[2061, "import-error"]], "Usage (multiprocessing)": [[2061, "usage-multiprocessing"]], "Multiprocessing error without if-clause protection": [[2061, "multiprocessing-error-without-if-clause-protection"]], "Multiprocessing error \u201cBroken pipe\u201d": [[2061, "multiprocessing-error-broken-pipe"]], "Multiprocessing error \u201cdriver shut down\u201d": [[2061, "multiprocessing-error-driver-shut-down"]], "CUDA IPC operations": [[2061, "cuda-ipc-operations"]], "Quantization Backend Configuration": [[2072, "quantization-backend-configuration"], [2070, "quantization-backend-configuration"]], "Default values for native configurations": [[2072, "default-values-for-native-configurations"]], "torch.Storage": [[2082, "torch-storage"]], "Quantization": [[2070, "module-torch.ao.quantization"]], "Introduction to Quantization": [[2070, "introduction-to-quantization"]], "Quantization API Summary": [[2070, "quantization-api-summary"]], "Eager Mode Quantization": [[2070, "eager-mode-quantization"]], "Post Training Dynamic Quantization": [[2070, "post-training-dynamic-quantization"]], "Post Training Static Quantization": [[2070, "post-training-static-quantization"]], "Quantization Aware Training for Static Quantization": [[2070, "quantization-aware-training-for-static-quantization"]], "Model Preparation for Eager Mode Static Quantization": [[2070, "model-preparation-for-eager-mode-static-quantization"]], "(Prototype - maintenance mode) FX Graph Mode Quantization": [[2070, "prototype-maintenance-mode-fx-graph-mode-quantization"]], "(Prototype) PyTorch 2 Export Quantization": [[2070, "prototype-pytorch-2-export-quantization"]], "Quantization Stack": [[2070, "quantization-stack"]], "Quantized Model": [[2070, "quantized-model"]], "Quantized Tensor": [[2070, "quantized-tensor"]], "Quantize and Dequantize": [[2070, "quantize-and-dequantize"]], "Quantized Operators/Modules": [[2070, "quantized-operators-modules"]], "Quantized Engine": [[2070, "quantized-engine"]], "Quantization Flow": [[2070, "quantization-flow"]], "Observer and FakeQuantize": [[2070, "observer-and-fakequantize"]], "QConfig": [[2070, "qconfig"], [844, "qconfig"]], "General Quantization Flow": [[2070, "general-quantization-flow"]], "Quantization Support Matrix": [[2070, "quantization-support-matrix"]], "Quantization Mode Support": [[2070, "quantization-mode-support"]], "Quantization Flow Support": [[2070, "quantization-flow-support"]], "Backend/Hardware Support": [[2070, "backend-hardware-support"]], "Note for native CPU backends": [[2070, "note-for-native-cpu-backends"]], "Operator Support": [[2070, "operator-support"]], "Quantization Customizations": [[2070, "quantization-customizations"]], "Quantization Custom Module API": [[2070, "quantization-custom-module-api"]], "Best Practices": [[2070, "best-practices"]], "Common Errors": [[2070, "common-errors"]], "Passing a non-quantized Tensor into a quantized kernel": [[2070, "passing-a-non-quantized-tensor-into-a-quantized-kernel"]], "Passing a quantized Tensor into a non-quantized kernel": [[2070, "passing-a-quantized-tensor-into-a-non-quantized-kernel"]], "Saving and Loading Quantized models": [[2070, "saving-and-loading-quantized-models"]], "Symbolic Trace Error when using FX Graph Mode Quantization": [[2070, "symbolic-trace-error-when-using-fx-graph-mode-quantization"]], "torch": [[2089, "module-torch"]], "Tensors": [[2089, "tensors"]], "Creation Ops": [[2089, "creation-ops"]], "Indexing, Slicing, Joining, Mutating Ops": [[2089, "indexing-slicing-joining-mutating-ops"]], "Generators": [[2089, "generators"]], "Random sampling": [[2089, "random-sampling"]], "In-place random sampling": [[2089, "in-place-random-sampling"]], "Quasi-random sampling": [[2089, "quasi-random-sampling"]], "Parallelism": [[2089, "parallelism"]], "Math operations": [[2089, "math-operations"]], "Pointwise Ops": [[2089, "pointwise-ops"]], "Reduction Ops": [[2089, "reduction-ops"]], "Comparison Ops": [[2089, "comparison-ops"]], "Spectral Ops": [[2089, "spectral-ops"]], "Other Operations": [[2089, "other-operations"]], "BLAS and LAPACK Operations": [[2089, "blas-and-lapack-operations"]], "Foreach Operations": [[2089, "foreach-operations"]], "Symbolic Numbers": [[2089, "symbolic-numbers"]], "Export Path": [[2089, "export-path"]], "Control Flow": [[2089, "control-flow"]], "Optimizations": [[2089, "optimizations"]], "Operator Tags": [[2089, "operator-tags"]], "torch.nn.init": [[2040, "torch-nn-init"]], "torch.ao.ns._numeric_suite": [[2090, "torch-ao-ns-numeric-suite"]], "Tensor Views": [[2084, "tensor-views"]], "Numerical accuracy": [[2058, "numerical-accuracy"]], "Batched computations or slice computations": [[2058, "batched-computations-or-slice-computations"]], "Extremal values": [[2058, "extremal-values"]], "Linear algebra (torch.linalg)": [[2058, "linear-algebra-torch-linalg"]], "Non-finite values": [[2058, "non-finite-values"]], "Extremal values in linalg": [[2058, "extremal-values-in-linalg"]], "TensorFloat-32(TF32) on Nvidia Ampere (and later) devices": [[2058, "tensorfloat-32-tf32-on-nvidia-ampere-and-later-devices"]], "Reduced Precision Reduction for FP16 and BF16 GEMMs": [[2058, "reduced-precision-reduction-for-fp16-and-bf16-gemms"]], "Reduced Precision FP16 and BF16 GEMMs and Convolutions on AMD Instinct MI200 devices": [[2058, "reduced-precision-fp16-and-bf16-gemms-and-convolutions-on-amd-instinct-mi200-devices"]], "Serialization semantics": [[2060, "serialization-semantics"]], "Table of Contents": [[2060, "table-of-contents"]], "Saving and loading tensors": [[2060, "saving-and-loading-tensors"]], "Saving and loading tensors preserves views": [[2060, "saving-and-loading-tensors-preserves-views"]], "Saving and loading torch.nn.Modules": [[2060, "saving-and-loading-torch-nn-modules"]], "Serialized file format for torch.save": [[2060, "serialized-file-format-for-torch-save"]], "Serializing torch.nn.Modules and loading them in C++": [[2060, "serializing-torch-nn-modules-and-loading-them-in-c"]], "Saving and loading ScriptModules across PyTorch versions": [[2060, "saving-and-loading-scriptmodules-across-pytorch-versions"]], "torch.div performing integer division": [[2060, "torch-div-performing-integer-division"]], "torch.full always inferring a float dtype": [[2060, "torch-full-always-inferring-a-float-dtype"]], "ONNX Backend for TorchDynamo": [[2064, "onnx-backend-for-torchdynamo"]], "torch.package": [[2068, "torch-package"]], "Packaging your first model": [[2068, "packaging-your-first-model"]], "How do I\u2026": [[2068, "how-do-i"]], "See what is inside a package?": [[2068, "see-what-is-inside-a-package"]], "Treat the package like a ZIP archive": [[2068, "treat-the-package-like-a-zip-archive"]], "Use the file_structure() API": [[2068, "use-the-file-structure-api"]], "See why a given module was included as a dependency?": [[2068, "see-why-a-given-module-was-included-as-a-dependency"]], "Include arbitrary resources with my package and access them later?": [[2068, "include-arbitrary-resources-with-my-package-and-access-them-later"]], "Customize how a class is packaged?": [[2068, "customize-how-a-class-is-packaged"]], "Test in my source code whether or not it is executing inside a package?": [[2068, "test-in-my-source-code-whether-or-not-it-is-executing-inside-a-package"]], "Patch code into a package?": [[2068, "patch-code-into-a-package"]], "Access package contents from packaged code?": [[2068, "access-package-contents-from-packaged-code"]], "Distinguish between packaged code and non-packaged code?": [[2068, "distinguish-between-packaged-code-and-non-packaged-code"]], "Re-export an imported object?": [[2068, "re-export-an-imported-object"]], "Package a TorchScript module?": [[2068, "package-a-torchscript-module"]], "Explanation": [[2068, "explanation"]], "torch.package Format Overview": [[2068, "torch-package-format-overview"]], "Framework files": [[2068, "framework-files"]], "User files": [[2068, "user-files"]], "How torch.package finds your code\u2019s dependencies": [[2068, "how-torch-package-finds-your-code-s-dependencies"]], "Analyzing an object\u2019s dependencies": [[2068, "analyzing-an-object-s-dependencies"]], "Analyzing a module\u2019s dependencies": [[2068, "analyzing-a-module-s-dependencies"]], "Dependency Management": [[2068, "dependency-management"]], "intern": [[2068, "intern"]], "extern": [[2068, "extern"]], "mock": [[2068, "mock"]], "Refactoring": [[2068, "refactoring"]], "Patterns": [[2068, "patterns"]], "torch.package sharp edges": [[2068, "torch-package-sharp-edges"]], "Avoid global state in your modules": [[2068, "avoid-global-state-in-your-modules"]], "Types are not shared between packages and the loading environment": [[2068, "types-are-not-shared-between-packages-and-the-loading-environment"]], "How torch.package keeps packages isolated from each other": [[2068, "how-torch-package-keeps-packages-isolated-from-each-other"]], "Mangling": [[2068, "mangling"]], "Reproducibility": [[2059, "reproducibility"]], "Controlling sources of randomness": [[2059, "controlling-sources-of-randomness"]], "PyTorch random number generator": [[2059, "pytorch-random-number-generator"]], "Python": [[2059, "python"]], "Random number generators in other libraries": [[2059, "random-number-generators-in-other-libraries"]], "CUDA convolution benchmarking": [[2059, "cuda-convolution-benchmarking"]], "Avoiding nondeterministic algorithms": [[2059, "avoiding-nondeterministic-algorithms"]], "CUDA convolution determinism": [[2059, "cuda-convolution-determinism"]], "CUDA RNN and LSTM": [[2059, "cuda-rnn-and-lstm"]], "Filling uninitialized memory": [[2059, "filling-uninitialized-memory"]], "DataLoader": [[2059, "dataloader"]], "torch.onnx": [[2062, "torch-onnx"]], "TorchDynamo-based ONNX Exporter": [[2062, "torchdynamo-based-onnx-exporter"], [2063, "torchdynamo-based-onnx-exporter"]], "TorchScript-based ONNX Exporter": [[2062, "torchscript-based-onnx-exporter"], [2065, "torchscript-based-onnx-exporter"]], "Contributing / Developing": [[2062, "contributing-developing"]], "Dependencies": [[2063, "dependencies"]], "A simple example": [[2063, "a-simple-example"]], "Inspecting the ONNX model using GUI": [[2063, "inspecting-the-onnx-model-using-gui"]], "Diagnosing issues with SARIF": [[2063, "diagnosing-issues-with-sarif"]], "ONNX Diagnostic SARIF Rules": [[2063, null]], "FSDP Notes": [[2051, "fsdp-notes"]], "FSDP Prefetch Nuances": [[2051, "fsdp-prefetch-nuances"]], "Communication payload size": [[2051, "communication-payload-size"]], "FSDP buffers sizes": [[2051, "fsdp-buffers-sizes"]], "Gradcheck mechanics": [[2052, "gradcheck-mechanics"]], "Notations and background information": [[2052, "notations-and-background-information"]], "Default backward mode gradcheck behavior": [[2052, "default-backward-mode-gradcheck-behavior"]], "Real-to-real functions": [[2052, "real-to-real-functions"]], "Default real input numerical evaluation": [[2052, "default-real-input-numerical-evaluation"]], "Default real input analytical evaluation": [[2052, "default-real-input-analytical-evaluation"]], "Complex-to-real functions": [[2052, "complex-to-real-functions"]], "Default complex input numerical evaluation": [[2052, "default-complex-input-numerical-evaluation"]], "Default complex input analytical evaluation": [[2052, "default-complex-input-analytical-evaluation"]], "Functions with complex outputs": [[2052, "functions-with-complex-outputs"]], "Fast backward mode gradcheck": [[2052, "fast-backward-mode-gradcheck"]], "Fast gradcheck for real-to-real functions": [[2052, "fast-gradcheck-for-real-to-real-functions"]], "Fast gradcheck for complex-to-real functions": [[2052, "fast-gradcheck-for-complex-to-real-functions"]], "Fast complex input numerical evaluation": [[2052, "fast-complex-input-numerical-evaluation"]], "Fast complex input analytical evaluation": [[2052, "fast-complex-input-analytical-evaluation"]], "Why not use a complex u": [[2052, "why-not-use-a-complex-u"]], "Fast gradcheck for functions with complex outputs": [[2052, "fast-gradcheck-for-functions-with-complex-outputs"]], "Gradgradcheck implementation": [[2052, "gradgradcheck-implementation"]], "torch.signal": [[2078, "module-torch.signal"]], "torch.signal.windows": [[2078, "module-torch.signal.windows"]], "ONNX supported TorchScript operators": [[2066, "onnx-supported-torchscript-operators"]], "Supported operators": [[2066, "supported-operators"]], "ONNX support for TorchScript operators": [[2066, "id1"]], "Unsupported operators": [[2066, "unsupported-operators"], [2066, "id2"]], "Example: AlexNet from PyTorch to ONNX": [[2065, "example-alexnet-from-pytorch-to-onnx"]], "Tracing vs Scripting": [[2065, "tracing-vs-scripting"]], "Avoiding Pitfalls": [[2065, "avoiding-pitfalls"]], "Avoid NumPy and built-in Python types": [[2065, "avoid-numpy-and-built-in-python-types"]], "Avoid Tensor.data": [[2065, "avoid-tensor-data"]], "Avoid in-place operations when using tensor.shape in tracing mode": [[2065, "avoid-in-place-operations-when-using-tensor-shape-in-tracing-mode"]], "Differences in Operator Implementations": [[2065, "differences-in-operator-implementations"]], "Unsupported Tensor Indexing Patterns": [[2065, "unsupported-tensor-indexing-patterns"]], "Reads / Gets": [[2065, "reads-gets"]], "Writes / Sets": [[2065, "writes-sets"]], "Adding support for operators": [[2065, "adding-support-for-operators"]], "ONNX exporter internals": [[2065, "onnx-exporter-internals"]], "ATen operators": [[2065, "aten-operators"]], "List of supported operators": [[2065, "list-of-supported-operators"]], "Adding support for an aten or quantized operator": [[2065, "adding-support-for-an-aten-or-quantized-operator"]], "torch.autograd.Functions": [[2065, "torch-autograd-functions"]], "Static Symbolic Method": [[2065, "static-symbolic-method"]], "Inline Autograd Function": [[2065, "inline-autograd-function"]], "Custom operators": [[2065, "custom-operators"]], "ONNX-script functions": [[2065, "onnx-script-functions"]], "C++ Operators": [[2065, "c-operators"]], "Discovering all unconvertible ATen ops at once": [[2065, "discovering-all-unconvertible-aten-ops-at-once"]], "Classes": [[2065, "classes"]], "My model reports \u201ccuda runtime error(2): out of memory\u201d": [[2050, "my-model-reports-cuda-runtime-error-2-out-of-memory"]], "My GPU memory isn\u2019t freed properly": [[2050, "my-gpu-memory-isn-t-freed-properly"]], "My out of memory exception handler can\u2019t allocate memory": [[2050, "my-out-of-memory-exception-handler-can-t-allocate-memory"]], "My data loader workers return identical random numbers": [[2050, "my-data-loader-workers-return-identical-random-numbers"]], "My recurrent network doesn\u2019t work with data parallelism": [[2050, "my-recurrent-network-doesn-t-work-with-data-parallelism"]], "Threading Environment Variables": [[2088, "threading-environment-variables"]], "torch.random": [[2074, "module-torch.random"]], "HIP (ROCm) semantics": [[2053, "hip-rocm-semantics"]], "HIP Interfaces Reuse the CUDA Interfaces": [[2053, "hip-interfaces-reuse-the-cuda-interfaces"]], "Checking for HIP": [[2053, "checking-for-hip"]], "TensorFloat-32(TF32) on ROCm": [[2053, "tensorfloat-32-tf32-on-rocm"]], "hipFFT/rocFFT plan cache": [[2053, "hipfft-rocfft-plan-cache"]], "torch.distributed backends": [[2053, "torch-distributed-backends"]], "CUDA API to HIP API mappings in C++": [[2053, "cuda-api-to-hip-api-mappings-in-c"]], "Refer to CUDA Semantics doc": [[2053, "refer-to-cuda-semantics-doc"]], "Enabling kernel asserts": [[2053, "enabling-kernel-asserts"]], "torch.as_tensor": [[882, "torch-as-tensor"]], "torch.arccos": [[869, "torch-arccos"]], "torch.autograd.functional.vjp": [[916, "torch-autograd-functional-vjp"]], "torch.autograd.functional.vhp": [[915, "torch-autograd-functional-vhp"]], "torch.argsort": [[879, "torch-argsort"]], "torch.argmax": [[877, "torch-argmax"]], "NestedIOFunction": [[909, "nestediofunction"]], "torch.arccosh": [[870, "torch-arccosh"]], "torch.asarray": [[883, "torch-asarray"]], "torch.as_strided": [[881, "torch-as-strided"]], "torch.atleast_2d": [[890, "torch-atleast-2d"]], "torch.autograd.functional.jvp": [[914, "torch-autograd-functional-jvp"]], "torch.autograd.Function.vmap": [[895, "torch-autograd-function-vmap"]], "torch.autograd.function.FunctionCtx.save_for_backward": [[906, "torch-autograd-function-functionctx-save-for-backward"]], "torch.autograd.function.FunctionCtx.set_materialize_grads": [[907, "torch-autograd-function-functionctx-set-materialize-grads"]], "torch.autograd.backward": [[896, "torch-autograd-backward"]], "torch.autograd.function.once_differentiable": [[910, "torch-autograd-function-once-differentiable"]], "swap_module": [[867, "swap-module"]], "torch.asinh": [[885, "torch-asinh"]], "torch.are_deterministic_algorithms_enabled": [[876, "torch-are-deterministic-algorithms-enabled"]], "torch.autograd.grad": [[917, "torch-autograd-grad"]], "torch.argwhere": [[880, "torch-argwhere"]], "dual_level": [[898, "dual-level"]], "torch.arange": [[868, "torch-arange"]], "torch.arctanh": [[875, "torch-arctanh"]], "torch.atanh": [[888, "torch-atanh"]], "torch.arctan2": [[874, "torch-arctan2"]], "torch.autograd.function.FunctionCtx.mark_non_differentiable": [[905, "torch-autograd-function-functionctx-mark-non-differentiable"]], "UnpackedDualTensor": [[897, "unpackeddualtensor"]], "torch.autograd.forward_ad.exit_dual_level": [[900, "torch-autograd-forward-ad-exit-dual-level"]], "torch.atleast_1d": [[889, "torch-atleast-1d"]], "torch.atleast_3d": [[891, "torch-atleast-3d"]], "torch.autograd.forward_ad.unpack_dual": [[902, "torch-autograd-forward-ad-unpack-dual"]], "torch.atan": [[886, "torch-atan"]], "torch.arcsinh": [[872, "torch-arcsinh"]], "torch.arcsin": [[871, "torch-arcsin"]], "torch.autograd.Function.backward": [[892, "torch-autograd-function-backward"]], "InplaceFunction": [[908, "inplacefunction"]], "torch.argmin": [[878, "torch-argmin"]], "torch.autograd.functional.hvp": [[912, "torch-autograd-functional-hvp"]], "torch.autograd.functional.jacobian": [[913, "torch-autograd-functional-jacobian"]], "torch.autograd.Function.jvp": [[894, "torch-autograd-function-jvp"]], "torch.arctan": [[873, "torch-arctan"]], "torch.autograd.forward_ad.make_dual": [[901, "torch-autograd-forward-ad-make-dual"]], "torch.autograd.functional.hessian": [[911, "torch-autograd-functional-hessian"]], "torch.autograd.Function.forward": [[893, "torch-autograd-function-forward"]], "torch.autograd.forward_ad.enter_dual_level": [[899, "torch-autograd-forward-ad-enter-dual-level"]], "BackwardCFunction": [[903, "backwardcfunction"]], "torch.asin": [[884, "torch-asin"]], "torch.autograd.function.FunctionCtx.mark_dirty": [[904, "torch-autograd-function-functionctx-mark-dirty"]], "torch.atan2": [[887, "torch-atan2"]], "get_default_qat_qconfig_mapping": [[858, "get-default-qat-qconfig-mapping"]], "quantize": [[860, "quantize"]], "per_channel_dynamic_qconfig": [[856, "per-channel-dynamic-qconfig"]], "prepare_qat": [[841, "prepare-qat"]], "HistogramObserver": [[821, "histogramobserver"]], "load_observer_state_dict": [[839, "load-observer-state-dict"]], "quantize_dynamic": [[861, "quantize-dynamic"]], "convert_fx": [[862, "convert-fx"]], "float_qparams_weight_only_qconfig": [[855, "float-qparams-weight-only-qconfig"]], "default_histogram_observer": [[833, "default-histogram-observer"]], "default_per_channel_weight_observer": [[835, "default-per-channel-weight-observer"]], "prepare_qat_fx": [[865, "prepare-qat-fx"]], "default_qconfig": [[851, "default-qconfig"]], "default_qat_qconfig_v2": [[850, "default-qat-qconfig-v2"]], "float16_dynamic_qconfig": [[853, "float16-dynamic-qconfig"]], "StandaloneModuleConfigEntry": [[820, "standalonemoduleconfigentry"]], "prepare_fx": [[864, "prepare-fx"]], "ObserverBase": [[826, "observerbase"]], "MovingAverageMinMaxObserver": [[823, "movingaverageminmaxobserver"]], "model_is_exported": [[843, "model-is-exported"]], "RecordingObserver": [[829, "recordingobserver"]], "default_qat_qconfig": [[849, "default-qat-qconfig"]], "FuseCustomConfig": [[818, "fusecustomconfig"]], "propagate_qconfig": [[842, "propagate-qconfig"]], "prepare": [[840, "prepare"]], "ConvertCustomConfig": [[817, "convertcustomconfig"]], "fuse_modules": [[816, "fuse-modules"]], "default_weight_only_qconfig": [[852, "default-weight-only-qconfig"]], "MinMaxObserver": [[822, "minmaxobserver"]], "get_default_qconfig_mapping": [[859, "get-default-qconfig-mapping"]], "default_dynamic_quant_observer": [[831, "default-dynamic-quant-observer"]], "PrepareCustomConfig": [[819, "preparecustomconfig"]], "default_weight_observer": [[837, "default-weight-observer"]], "default_activation_only_qconfig": [[845, "default-activation-only-qconfig"]], "default_per_channel_qconfig": [[848, "default-per-channel-qconfig"]], "PlaceholderObserver": [[828, "placeholderobserver"]], "MovingAveragePerChannelMinMaxObserver": [[824, "movingaverageperchannelminmaxobserver"]], "default_observer": [[834, "default-observer"]], "quantize_qat": [[866, "quantize-qat"]], "get_observer_state_dict": [[838, "get-observer-state-dict"]], "default_float_qparams_observer": [[832, "default-float-qparams-observer"]], "PerChannelMinMaxObserver": [[827, "perchannelminmaxobserver"]], "default_dynamic_qconfig": [[847, "default-dynamic-qconfig"]], "default_debug_observer": [[830, "default-debug-observer"]], "QConfigMapping": [[857, "qconfigmapping"]], "float16_static_qconfig": [[854, "float16-static-qconfig"]], "fuse_fx": [[863, "fuse-fx"]], "default_placeholder_observer": [[836, "default-placeholder-observer"]], "default_debug_qconfig": [[846, "default-debug-qconfig"]], "NoopObserver": [[825, "noopobserver"]], "enable_observer": [[815, "enable-observer"]], "avg_pool2d": [[770, "avg-pool2d"]], "conv1d": [[774, "conv1d"]], "default_fused_per_channel_wt_fake_quant": [[807, "default-fused-per-channel-wt-fake-quant"]], "convert": [[799, "convert"]], "upsample": [[787, "upsample"]], "upsample_bilinear": [[788, "upsample-bilinear"]], "FakeQuantizeBase": [[802, "fakequantizebase"]], "ObservationType": [[798, "observationtype"]], "hardtanh": [[780, "hardtanh"]], "avg_pool3d": [[771, "avg-pool3d"]], "upsample_nearest": [[789, "upsample-nearest"]], "QuantStub": [[791, "quantstub"]], "default_weight_fake_quant": [[811, "default-weight-fake-quant"]], "hardsigmoid": [[778, "hardsigmoid"]], "DeQuantStub": [[790, "dequantstub"]], "max_pool2d": [[785, "max-pool2d"]], "add_quant_dequant": [[793, "add-quant-dequant"]], "clamp": [[773, "clamp"]], "linear": [[783, "linear"]], "FusedMovingAvgObsFakeQuantize": [[804, "fusedmovingavgobsfakequantize"]], "enable_fake_quant": [[814, "enable-fake-quant"]], "FakeQuantize": [[801, "fakequantize"]], "default_fused_wt_fake_quant": [[808, "default-fused-wt-fake-quant"]], "QuantWrapper": [[792, "quantwrapper"]], "DTypeWithConstraints": [[797, "dtypewithconstraints"]], "default_per_channel_weight_fake_quant": [[810, "default-per-channel-weight-fake-quant"]], "celu": [[772, "celu"]], "threshold": [[786, "threshold"]], "default_fused_act_fake_quant": [[806, "default-fused-act-fake-quant"]], "FixedQParamsFakeQuantize": [[803, "fixedqparamsfakequantize"]], "BackendConfig": [[794, "backendconfig"]], "elu": [[777, "elu"]], "hardswish": [[779, "hardswish"]], "interpolate": [[781, "interpolate"]], "BackendPatternConfig": [[795, "backendpatternconfig"]], "conv3d": [[776, "conv3d"]], "default_histogram_fake_quant": [[809, "default-histogram-fake-quant"]], "disable_observer": [[813, "disable-observer"]], "DTypeConfig": [[796, "dtypeconfig"]], "disable_fake_quant": [[812, "disable-fake-quant"]], "default_fake_quant": [[805, "default-fake-quant"]], "max_pool1d": [[784, "max-pool1d"]], "conv2d": [[775, "conv2d"]], "adaptive_avg_pool2d": [[768, "adaptive-avg-pool2d"]], "leaky_relu": [[782, "leaky-relu"]], "default_eval_fn": [[800, "default-eval-fn"]], "adaptive_avg_pool3d": [[769, "adaptive-avg-pool3d"]], "ConvReLU3d": [[729, "convrelu3d"], [721, "convrelu3d"], [712, "convrelu3d"]], "FXFloatFunctional": [[749, "fxfloatfunctional"]], "ConvReLU1d": [[727, "convrelu1d"], [710, "convrelu1d"]], "ConvBn2d": [[715, "convbn2d"], [705, "convbn2d"]], "LinearReLU": [[722, "linearrelu"], [731, "linearrelu"], [730, "linearrelu"], [713, "linearrelu"]], "ConvReLU2d": [[728, "convrelu2d"], [720, "convrelu2d"], [711, "convrelu2d"]], "ConvBnReLU1d": [[717, "convbnrelu1d"], [707, "convbnrelu1d"]], "ConvBnReLU2d": [[718, "convbnrelu2d"], [708, "convbnrelu2d"]], "FloatFunctional": [[750, "floatfunctional"]], "BNReLU3d": [[726, "bnrelu3d"], [703, "bnrelu3d"]], "update_bn_stats": [[724, "update-bn-stats"]], "ConvBn1d": [[714, "convbn1d"], [704, "convbn1d"]], "ConvBn3d": [[716, "convbn3d"], [706, "convbn3d"]], "BNReLU2d": [[725, "bnrelu2d"], [702, "bnrelu2d"]], "QFunctional": [[759, "qfunctional"]], "ConvBnReLU3d": [[719, "convbnrelu3d"], [709, "convbnrelu3d"]], "freeze_bn_stats": [[723, "freeze-bn-stats"]], "torch._logging.set_logs": [[682, "torch-logging-set-logs"]], "torch._foreach_sin": [[671, "torch-foreach-sin"]], "torch.amin": [[698, "torch-amin"]], "torch.addcdiv": [[689, "torch-addcdiv"]], "torch._foreach_trunc": [[679, "torch-foreach-trunc"]], "torch.abs": [[683, "torch-abs"]], "torch.addmm": [[691, "torch-addmm"]], "torch._foreach_sinh": [[673, "torch-foreach-sinh"]], "torch._foreach_sigmoid_": [[670, "torch-foreach-sigmoid"]], "torch.addcmul": [[690, "torch-addcmul"]], "torch.aminmax": [[699, "torch-aminmax"]], "torch.all": [[695, "torch-all"]], "torch._foreach_reciprocal_": [[666, "torch-foreach-reciprocal"]], "torch._foreach_zero_": [[681, "torch-foreach-zero"]], "torch._foreach_round": [[667, "torch-foreach-round"]], "torch.allclose": [[696, "torch-allclose"]], "torch.acos": [[685, "torch-acos"]], "torch.addr": [[693, "torch-addr"]], "torch._foreach_trunc_": [[680, "torch-foreach-trunc"]], "torch._foreach_sqrt": [[675, "torch-foreach-sqrt"]], "torch._foreach_neg": [[663, "torch-foreach-neg"]], "torch._foreach_sqrt_": [[676, "torch-foreach-sqrt"]], "torch.angle": [[700, "torch-angle"]], "torch._foreach_sin_": [[672, "torch-foreach-sin"]], "torch.acosh": [[686, "torch-acosh"]], "torch._foreach_reciprocal": [[665, "torch-foreach-reciprocal"]], "torch.addbmm": [[688, "torch-addbmm"]], "torch.addmv": [[692, "torch-addmv"]], "torch.amax": [[697, "torch-amax"]], "torch._foreach_tan_": [[678, "torch-foreach-tan"]], "torch.any": [[701, "torch-any"]], "torch._foreach_tan": [[677, "torch-foreach-tan"]], "torch._foreach_round_": [[668, "torch-foreach-round"]], "torch._foreach_neg_": [[664, "torch-foreach-neg"]], "torch.absolute": [[684, "torch-absolute"]], "torch._foreach_sinh_": [[674, "torch-foreach-sinh"]], "torch._foreach_sigmoid": [[669, "torch-foreach-sigmoid"]], "torch.adjoint": [[694, "torch-adjoint"]], "torch.add": [[687, "torch-add"]], "torch._foreach_cos_": [[638, "torch-foreach-cos"]], "torch.Tensor.view_as": [[620, "torch-tensor-view-as"]], "torch._foreach_log10_": [[657, "torch-foreach-log10"]], "torch._foreach_log": [[655, "torch-foreach-log"]], "torch._assert": [[626, "torch-assert"]], "torch._foreach_frac_": [[652, "torch-foreach-frac"]], "torch._foreach_log1p_": [[659, "torch-foreach-log1p"]], "torch._foreach_erfc": [[643, "torch-foreach-erfc"]], "torch._foreach_asin_": [[632, "torch-foreach-asin"]], "torch._foreach_expm1_": [[648, "torch-foreach-expm1"]], "torch._foreach_log2": [[660, "torch-foreach-log2"]], "torch._foreach_erf_": [[642, "torch-foreach-erf"]], "torch._foreach_asin": [[631, "torch-foreach-asin"]], "torch._foreach_atan": [[633, "torch-foreach-atan"]], "torch._foreach_erf": [[641, "torch-foreach-erf"]], "torch._foreach_log10": [[656, "torch-foreach-log10"]], "torch._foreach_lgamma": [[653, "torch-foreach-lgamma"]], "torch.Tensor.xlogy": [[623, "torch-tensor-xlogy"]], "torch._foreach_lgamma_": [[654, "torch-foreach-lgamma"]], "torch._foreach_cos": [[637, "torch-foreach-cos"]], "torch._foreach_log_": [[662, "torch-foreach-log"]], "torch._foreach_log1p": [[658, "torch-foreach-log1p"]], "torch._foreach_erfc_": [[644, "torch-foreach-erfc"]], "torch._foreach_exp": [[645, "torch-foreach-exp"]], "torch.Tensor.unique_consecutive": [[612, "torch-tensor-unique-consecutive"]], "torch._foreach_cosh": [[639, "torch-foreach-cosh"]], "torch.Tensor.where": [[622, "torch-tensor-where"]], "torch._foreach_floor_": [[650, "torch-foreach-floor"]], "torch.Tensor.vdot": [[618, "torch-tensor-vdot"]], "torch._foreach_acos": [[629, "torch-foreach-acos"]], "torch.Tensor.xlogy_": [[624, "torch-tensor-xlogy"]], "torch._foreach_cosh_": [[640, "torch-foreach-cosh"]], "torch.Tensor.var": [[617, "torch-tensor-var"]], "torch._foreach_ceil_": [[636, "torch-foreach-ceil"]], "torch._foreach_abs": [[627, "torch-foreach-abs"]], "torch.Tensor.view": [[619, "torch-tensor-view"]], "torch.Tensor.untyped_storage": [[615, "torch-tensor-untyped-storage"]], "torch._foreach_acos_": [[630, "torch-foreach-acos"]], "torch._foreach_expm1": [[647, "torch-foreach-expm1"]], "torch._foreach_exp_": [[646, "torch-foreach-exp"]], "torch._foreach_frac": [[651, "torch-foreach-frac"]], "torch.Tensor.unsqueeze": [[613, "torch-tensor-unsqueeze"]], "torch.Tensor.values": [[616, "torch-tensor-values"]], "torch.Tensor.unsqueeze_": [[614, "torch-tensor-unsqueeze"]], "torch._foreach_floor": [[649, "torch-foreach-floor"]], "torch._foreach_ceil": [[635, "torch-foreach-ceil"]], "torch.Tensor.vsplit": [[621, "torch-tensor-vsplit"]], "torch._foreach_log2_": [[661, "torch-foreach-log2"]], "torch._foreach_atan_": [[634, "torch-foreach-atan"]], "torch._foreach_abs_": [[628, "torch-foreach-abs"]], "torch.Tensor.zero_": [[625, "torch-tensor-zero"]], "torch.Tensor.trunc_": [[604, "torch-tensor-trunc"]], "torch.Tensor.to_dense": [[583, "torch-tensor-to-dense"]], "torch.Tensor.to_mkldnn": [[584, "torch-tensor-to-mkldnn"]], "torch.Tensor.transpose": [[594, "torch-tensor-transpose"]], "torch.Tensor.subtract_": [[566, "torch-tensor-subtract"]], "torch.Tensor.tan": [[576, "torch-tensor-tan"]], "torch.Tensor.sub": [[563, "torch-tensor-sub"]], "torch.Tensor.trunc": [[603, "torch-tensor-trunc"]], "torch.Tensor.unbind": [[607, "torch-tensor-unbind"]], "torch.Tensor.tile": [[581, "torch-tensor-tile"]], "torch.Tensor.svd": [[569, "torch-tensor-svd"]], "torch.Tensor.to_sparse": [[585, "torch-tensor-to-sparse"]], "torch.Tensor.stride": [[562, "torch-tensor-stride"]], "torch.Tensor.sum_to_size": [[568, "torch-tensor-sum-to-size"]], "torch.Tensor.subtract": [[565, "torch-tensor-subtract"]], "torch.Tensor.trace": [[593, "torch-tensor-trace"]], "torch.Tensor.uniform_": [[610, "torch-tensor-uniform"]], "torch.Tensor.tanh_": [[579, "torch-tensor-tanh"]], "torch.Tensor.to_sparse_bsc": [[586, "torch-tensor-to-sparse-bsc"]], "torch.Tensor.triu": [[599, "torch-tensor-triu"]], "torch.Tensor.t_": [[573, "torch-tensor-t"]], "torch.Tensor.to_sparse_csr": [[590, "torch-tensor-to-sparse-csr"]], "torch.Tensor.true_divide_": [[602, "torch-tensor-true-divide"]], "torch.Tensor.unfold": [[609, "torch-tensor-unfold"]], "torch.Tensor.storage_type": [[561, "torch-tensor-storage-type"]], "torch.Tensor.to_sparse_csc": [[589, "torch-tensor-to-sparse-csc"]], "torch.Tensor.to_sparse_coo": [[588, "torch-tensor-to-sparse-coo"]], "torch.Tensor.tanh": [[578, "torch-tensor-tanh"]], "torch.Tensor.tolist": [[591, "torch-tensor-tolist"]], "torch.Tensor.transpose_": [[595, "torch-tensor-transpose"]], "torch.Tensor.take": [[574, "torch-tensor-take"]], "torch.Tensor.take_along_dim": [[575, "torch-tensor-take-along-dim"]], "torch.Tensor.t": [[572, "torch-tensor-t"]], "torch.Tensor.type_as": [[606, "torch-tensor-type-as"]], "torch.Tensor.sum": [[567, "torch-tensor-sum"]], "torch.Tensor.true_divide": [[601, "torch-tensor-true-divide"]], "torch.Tensor.tril_": [[598, "torch-tensor-tril"]], "torch.Tensor.tril": [[597, "torch-tensor-tril"]], "torch.Tensor.to_sparse_bsr": [[587, "torch-tensor-to-sparse-bsr"]], "torch.Tensor.type": [[605, "torch-tensor-type"]], "torch.Tensor.swapaxes": [[570, "torch-tensor-swapaxes"]], "torch.Tensor.tensor_split": [[580, "torch-tensor-tensor-split"]], "torch.Tensor.triangular_solve": [[596, "torch-tensor-triangular-solve"]], "torch.Tensor.unflatten": [[608, "torch-tensor-unflatten"]], "torch.Tensor.sub_": [[564, "torch-tensor-sub"]], "torch.Tensor.swapdims": [[571, "torch-tensor-swapdims"]], "torch.Tensor.topk": [[592, "torch-tensor-topk"]], "torch.Tensor.to": [[582, "torch-tensor-to"]], "torch.Tensor.triu_": [[600, "torch-tensor-triu"]], "torch.Tensor.unique": [[611, "torch-tensor-unique"]], "torch.Tensor.tan_": [[577, "torch-tensor-tan"]], "torch.Tensor.stft": [[558, "torch-tensor-stft"]], "torch.Tensor.sparse_resize_and_clear_": [[548, "torch-tensor-sparse-resize-and-clear"]], "torch.Tensor.share_memory_": [[526, "torch-tensor-share-memory"]], "torch.Tensor.std": [[557, "torch-tensor-std"]], "torch.Tensor.sin_": [[534, "torch-tensor-sin"]], "torch.Tensor.square": [[552, "torch-tensor-square"]], "torch.Tensor.scatter_": [[515, "torch-tensor-scatter"]], "torch.Tensor.set_": [[522, "torch-tensor-set"]], "torch.Tensor.slice_scatter": [[540, "torch-tensor-slice-scatter"]], "torch.Tensor.sparse_dim": [[545, "torch-tensor-sparse-dim"]], "torch.Tensor.squeeze": [[554, "torch-tensor-squeeze"]], "torch.Tensor.shape": [[525, "torch-tensor-shape"]], "torch.Tensor.sqrt_": [[551, "torch-tensor-sqrt"]], "torch.Tensor.scatter_add": [[516, "torch-tensor-scatter-add"]], "torch.Tensor.sinh_": [[538, "torch-tensor-sinh"]], "torch.Tensor.sign": [[530, "torch-tensor-sign"]], "torch.Tensor.sign_": [[531, "torch-tensor-sign"]], "torch.Tensor.sort": [[544, "torch-tensor-sort"]], "torch.Tensor.row_indices": [[511, "torch-tensor-row-indices"]], "torch.Tensor.slogdet": [[541, "torch-tensor-slogdet"]], "torch.Tensor.softmax": [[543, "torch-tensor-softmax"]], "torch.Tensor.rsqrt_": [[513, "torch-tensor-rsqrt"]], "torch.Tensor.smm": [[542, "torch-tensor-smm"]], "torch.Tensor.scatter": [[514, "torch-tensor-scatter"]], "torch.Tensor.sigmoid_": [[529, "torch-tensor-sigmoid"]], "torch.Tensor.sinc": [[535, "torch-tensor-sinc"]], "torch.Tensor.squeeze_": [[555, "torch-tensor-squeeze"]], "torch.Tensor.square_": [[553, "torch-tensor-square"]], "torch.Tensor.rsqrt": [[512, "torch-tensor-rsqrt"]], "torch.Tensor.scatter_add_": [[517, "torch-tensor-scatter-add"]], "torch.Tensor.sin": [[533, "torch-tensor-sin"]], "torch.Tensor.sparse_mask": [[546, "torch-tensor-sparse-mask"]], "torch.Tensor.split": [[549, "torch-tensor-split"]], "torch.Tensor.sgn": [[523, "torch-tensor-sgn"]], "torch.Tensor.sspaddmm": [[556, "torch-tensor-sspaddmm"]], "torch.Tensor.scatter_reduce_": [[519, "torch-tensor-scatter-reduce"]], "torch.Tensor.storage": [[559, "torch-tensor-storage"]], "torch.Tensor.sgn_": [[524, "torch-tensor-sgn"]], "torch.Tensor.sparse_resize_": [[547, "torch-tensor-sparse-resize"]], "torch.Tensor.sinc_": [[536, "torch-tensor-sinc"]], "torch.Tensor.sinh": [[537, "torch-tensor-sinh"]], "torch.Tensor.select": [[520, "torch-tensor-select"]], "torch.Tensor.sqrt": [[550, "torch-tensor-sqrt"]], "torch.Tensor.storage_offset": [[560, "torch-tensor-storage-offset"]], "torch.Tensor.round_": [[510, "torch-tensor-round"]], "torch.Tensor.short": [[527, "torch-tensor-short"]], "torch.Tensor.select_scatter": [[521, "torch-tensor-select-scatter"]], "torch.Tensor.scatter_reduce": [[518, "torch-tensor-scatter-reduce"]], "torch.Tensor.size": [[539, "torch-tensor-size"]], "torch.Tensor.sigmoid": [[528, "torch-tensor-sigmoid"]], "torch.Tensor.signbit": [[532, "torch-tensor-signbit"]], "torch.Tensor.pinverse": [[466, "torch-tensor-pinverse"]], "torch.Tensor.resolve_conj": [[503, "torch-tensor-resolve-conj"]], "torch.Tensor.rot90": [[508, "torch-tensor-rot90"]], "torch.Tensor.rad2deg": [[482, "torch-tensor-rad2deg"]], "torch.Tensor.polygamma_": [[468, "torch-tensor-polygamma"]], "torch.Tensor.q_per_channel_axis": [[474, "torch-tensor-q-per-channel-axis"]], "torch.Tensor.pow": [[470, "torch-tensor-pow"]], "torch.Tensor.ormqr": [[462, "torch-tensor-ormqr"]], "torch.Tensor.real": [[485, "torch-tensor-real"]], "torch.Tensor.q_per_channel_zero_points": [[476, "torch-tensor-q-per-channel-zero-points"]], "torch.Tensor.qr": [[479, "torch-tensor-qr"]], "torch.Tensor.quantile": [[481, "torch-tensor-quantile"]], "torch.Tensor.reciprocal_": [[487, "torch-tensor-reciprocal"]], "torch.Tensor.orgqr": [[461, "torch-tensor-orgqr"]], "torch.Tensor.numpy": [[460, "torch-tensor-numpy"]], "torch.Tensor.random_": [[483, "torch-tensor-random"]], "torch.Tensor.remainder_": [[492, "torch-tensor-remainder"]], "torch.Tensor.resize_": [[501, "torch-tensor-resize"]], "torch.Tensor.resolve_neg": [[504, "torch-tensor-resolve-neg"]], "torch.Tensor.numel": [[459, "torch-tensor-numel"]], "torch.Tensor.record_stream": [[488, "torch-tensor-record-stream"]], "torch.Tensor.roll": [[507, "torch-tensor-roll"]], "torch.Tensor.put_": [[473, "torch-tensor-put"]], "torch.Tensor.ravel": [[484, "torch-tensor-ravel"]], "torch.Tensor.positive": [[469, "torch-tensor-positive"]], "torch.Tensor.repeat": [[495, "torch-tensor-repeat"]], "torch.Tensor.requires_grad": [[497, "torch-tensor-requires-grad"]], "torch.Tensor.reshape_as": [[500, "torch-tensor-reshape-as"]], "torch.Tensor.pow_": [[471, "torch-tensor-pow"]], "torch.Tensor.q_scale": [[477, "torch-tensor-q-scale"]], "torch.Tensor.renorm": [[493, "torch-tensor-renorm"]], "torch.Tensor.retains_grad": [[506, "torch-tensor-retains-grad"]], "torch.Tensor.retain_grad": [[505, "torch-tensor-retain-grad"]], "torch.Tensor.register_hook": [[489, "torch-tensor-register-hook"]], "torch.Tensor.resize_as_": [[502, "torch-tensor-resize-as"]], "torch.Tensor.outer": [[463, "torch-tensor-outer"]], "torch.Tensor.q_zero_point": [[478, "torch-tensor-q-zero-point"]], "torch.Tensor.q_per_channel_scales": [[475, "torch-tensor-q-per-channel-scales"]], "torch.Tensor.register_post_accumulate_grad_hook": [[490, "torch-tensor-register-post-accumulate-grad-hook"]], "torch.Tensor.prod": [[472, "torch-tensor-prod"]], "torch.Tensor.requires_grad_": [[498, "torch-tensor-requires-grad"]], "torch.Tensor.reciprocal": [[486, "torch-tensor-reciprocal"]], "torch.Tensor.remainder": [[491, "torch-tensor-remainder"]], "torch.Tensor.round": [[509, "torch-tensor-round"]], "torch.Tensor.repeat_interleave": [[496, "torch-tensor-repeat-interleave"]], "torch.Tensor.polygamma": [[467, "torch-tensor-polygamma"]], "torch.Tensor.qscheme": [[480, "torch-tensor-qscheme"]], "torch.Tensor.pin_memory": [[465, "torch-tensor-pin-memory"]], "torch.Tensor.reshape": [[499, "torch-tensor-reshape"]], "torch.Tensor.renorm_": [[494, "torch-tensor-renorm"]], "torch.Tensor.permute": [[464, "torch-tensor-permute"]], "torch.Tensor.nelement": [[446, "torch-tensor-nelement"]], "torch.Tensor.multiply_": [[425, "torch-tensor-multiply"]], "torch.Tensor.movedim": [[419, "torch-tensor-movedim"]], "torch.Tensor.mm": [[415, "torch-tensor-mm"]], "torch.Tensor.moveaxis": [[418, "torch-tensor-moveaxis"]], "torch.Tensor.mean": [[411, "torch-tensor-mean"]], "torch.Tensor.mvlgamma": [[427, "torch-tensor-mvlgamma"]], "torch.Tensor.nbytes": [[437, "torch-tensor-nbytes"]], "torch.Tensor.mode": [[416, "torch-tensor-mode"]], "torch.Tensor.ne": [[440, "torch-tensor-ne"]], "torch.Tensor.nanmean": [[431, "torch-tensor-nanmean"]], "torch.Tensor.multinomial": [[423, "torch-tensor-multinomial"]], "torch.Tensor.new_ones": [[449, "torch-tensor-new-ones"]], "torch.Tensor.new_empty": [[447, "torch-tensor-new-empty"]], "torch.Tensor.neg_": [[443, "torch-tensor-neg"]], "torch.Tensor.nan_to_num_": [[430, "torch-tensor-nan-to-num"]], "torch.Tensor.nanquantile": [[433, "torch-tensor-nanquantile"]], "torch.Tensor.negative_": [[445, "torch-tensor-negative"]], "torch.Tensor.new_full": [[448, "torch-tensor-new-full"]], "torch.Tensor.nanmedian": [[432, "torch-tensor-nanmedian"]], "torch.Tensor.norm": [[455, "torch-tensor-norm"]], "torch.Tensor.ndim": [[438, "torch-tensor-ndim"]], "torch.Tensor.ndimension": [[439, "torch-tensor-ndimension"]], "torch.Tensor.ne_": [[441, "torch-tensor-ne"]], "torch.Tensor.nextafter_": [[453, "torch-tensor-nextafter"]], "torch.Tensor.max": [[409, "torch-tensor-max"]], "torch.Tensor.not_equal": [[457, "torch-tensor-not-equal"]], "torch.Tensor.matrix_power": [[408, "torch-tensor-matrix-power"]], "torch.Tensor.nonzero": [[454, "torch-tensor-nonzero"]], "torch.Tensor.median": [[412, "torch-tensor-median"]], "torch.Tensor.new_zeros": [[451, "torch-tensor-new-zeros"]], "torch.Tensor.not_equal_": [[458, "torch-tensor-not-equal"]], "torch.Tensor.maximum": [[410, "torch-tensor-maximum"]], "torch.Tensor.min": [[413, "torch-tensor-min"]], "torch.Tensor.minimum": [[414, "torch-tensor-minimum"]], "torch.Tensor.mv": [[426, "torch-tensor-mv"]], "torch.Tensor.mvlgamma_": [[428, "torch-tensor-mvlgamma"]], "torch.Tensor.nansum": [[434, "torch-tensor-nansum"]], "torch.Tensor.new_tensor": [[450, "torch-tensor-new-tensor"]], "torch.Tensor.nan_to_num": [[429, "torch-tensor-nan-to-num"]], "torch.Tensor.negative": [[444, "torch-tensor-negative"]], "torch.Tensor.multiply": [[424, "torch-tensor-multiply"]], "torch.Tensor.narrow_copy": [[436, "torch-tensor-narrow-copy"]], "torch.Tensor.module_load": [[417, "torch-tensor-module-load"]], "torch.Tensor.mul_": [[422, "torch-tensor-mul"]], "torch.Tensor.msort": [[420, "torch-tensor-msort"]], "torch.Tensor.neg": [[442, "torch-tensor-neg"]], "torch.Tensor.mul": [[421, "torch-tensor-mul"]], "torch.Tensor.normal_": [[456, "torch-tensor-normal"]], "torch.Tensor.narrow": [[435, "torch-tensor-narrow"]], "torch.Tensor.nextafter": [[452, "torch-tensor-nextafter"]]}, "indexentries": {"gradscaler (class in torch.cuda.amp)": [[0, "torch.cuda.amp.GradScaler"]], "autocast (class in torch)": [[0, "torch.autocast"]], "autocast (class in torch.cpu.amp)": [[0, "torch.cpu.amp.autocast"]], "autocast (class in torch.cuda.amp)": [[0, "torch.cuda.amp.autocast"]], "custom_bwd() (in module torch.amp)": [[0, "torch.amp.custom_bwd"]], "custom_bwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_bwd"]], "custom_fwd() (in module torch.amp)": [[0, "torch.amp.custom_fwd"]], "custom_fwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_fwd"]], "is_autocast_available() (in module torch.amp.autocast_mode)": [[0, "torch.amp.autocast_mode.is_autocast_available"]], "module": [[0, "module-torch.amp"], [0, "module-torch.amp.autocast_mode"], [0, "module-torch.amp.grad_scaler"], [0, "module-torch.cpu.amp"], [0, "module-torch.cpu.amp.autocast_mode"], [0, "module-torch.cpu.amp.grad_scaler"], [0, "module-torch.cuda.amp"], [0, "module-torch.cuda.amp.autocast_mode"], [0, "module-torch.cuda.amp.common"], [0, "module-torch.cuda.amp.grad_scaler"], [1, "module-torch.autograd"], [1, "module-torch.autograd.anomaly_mode"], [1, "module-torch.autograd.forward_ad"], [1, "module-torch.autograd.function"], [1, "module-torch.autograd.functional"], [1, "module-torch.autograd.grad_mode"], [1, "module-torch.autograd.gradcheck"], [1, "module-torch.autograd.graph"], [1, "module-torch.autograd.profiler"], [1, "module-torch.autograd.profiler_legacy"], [1, "module-torch.autograd.profiler_util"], [1, "module-torch.autograd.variable"], [2, "module-torch.backends"], [2, "module-torch.backends.cpu"], [2, "module-torch.backends.cuda"], [2, "module-torch.backends.cudnn"], [2, "module-torch.backends.cudnn.rnn"], [2, "module-torch.backends.mha"], [2, "module-torch.backends.mkl"], [2, "module-torch.backends.mkldnn"], [2, "module-torch.backends.mps"], [2, "module-torch.backends.nnpack"], [2, "module-torch.backends.openmp"], [2, "module-torch.backends.opt_einsum"], [2, "module-torch.backends.quantized"], [2, "module-torch.backends.xeon"], [2, "module-torch.backends.xeon.run_cpu"], [2, "module-torch.backends.xnnpack"], [3, "module-torch.utils.benchmark"], [3, "module-torch.utils.benchmark.examples"], [3, "module-torch.utils.benchmark.op_fuzzers"], [3, "module-torch.utils.benchmark.utils"], [3, "module-torch.utils.benchmark.utils.valgrind_wrapper"], [4, "module-torch.utils.bottleneck"], [13, "module-torch.__config__"], [16, "module-torch.cpu"], [17, "module-torch.cuda"], [17, "module-torch.cuda.comm"], [17, "module-torch.cuda.error"], [17, "module-torch.cuda.graphs"], [17, "module-torch.cuda.jiterator"], [17, "module-torch.cuda.memory"], [17, "module-torch.cuda.nccl"], [17, "module-torch.cuda.nvtx"], [17, "module-torch.cuda.profiler"], [17, "module-torch.cuda.random"], [17, "module-torch.cuda.sparse"], [17, "module-torch.cuda.streams"], [18, "module-torch.cuda._sanitizer"], [19, "module-torch.cuda.tunable"], [23, "module-torch.utils.data"], [23, "module-torch.utils.data.datapipes"], [23, "module-torch.utils.data.datapipes.dataframe"], [23, "module-torch.utils.data.datapipes.iter"], [23, "module-torch.utils.data.datapipes.map"], [23, "module-torch.utils.data.datapipes.utils"], [27, "module-torch.utils.deterministic"], [28, "module-torch.distributed"], [28, "module-torch.distributed.algorithms"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"], [28, "module-torch.distributed.algorithms.join"], [28, "module-torch.distributed.algorithms.model_averaging"], [28, "module-torch.distributed.algorithms.model_averaging.averagers"], [28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"], [28, "module-torch.distributed.algorithms.model_averaging.utils"], [28, "module-torch.distributed.argparse_util"], [28, "module-torch.distributed.c10d_logger"], [28, "module-torch.distributed.checkpoint.api"], [28, "module-torch.distributed.checkpoint.default_planner"], [28, "module-torch.distributed.checkpoint.filesystem"], [28, "module-torch.distributed.checkpoint.metadata"], [28, "module-torch.distributed.checkpoint.optimizer"], [28, "module-torch.distributed.checkpoint.planner"], [28, "module-torch.distributed.checkpoint.planner_helpers"], [28, "module-torch.distributed.checkpoint.resharding"], [28, "module-torch.distributed.checkpoint.state_dict"], [28, "module-torch.distributed.checkpoint.state_dict_loader"], [28, "module-torch.distributed.checkpoint.state_dict_saver"], [28, "module-torch.distributed.checkpoint.stateful"], [28, "module-torch.distributed.checkpoint.storage"], [28, "module-torch.distributed.checkpoint.utils"], [28, "module-torch.distributed.collective_utils"], [28, "module-torch.distributed.constants"], [28, "module-torch.distributed.device_mesh"], [28, "module-torch.distributed.distributed_c10d"], [28, "module-torch.distributed.elastic"], [28, "module-torch.distributed.elastic.agent.server.api"], [28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"], [28, "module-torch.distributed.elastic.events.api"], [28, "module-torch.distributed.elastic.events.handlers"], [28, "module-torch.distributed.elastic.metrics.api"], [28, "module-torch.distributed.elastic.multiprocessing.api"], [28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"], [28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"], [28, "module-torch.distributed.elastic.multiprocessing.redirects"], [28, "module-torch.distributed.elastic.multiprocessing.tail_log"], [28, "module-torch.distributed.elastic.rendezvous.api"], [28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.etcd_server"], [28, "module-torch.distributed.elastic.rendezvous.etcd_store"], [28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.utils"], [28, "module-torch.distributed.elastic.timer.api"], [28, "module-torch.distributed.elastic.timer.file_based_local_timer"], [28, "module-torch.distributed.elastic.timer.local_timer"], [28, "module-torch.distributed.elastic.utils"], [28, "module-torch.distributed.elastic.utils.api"], [28, "module-torch.distributed.elastic.utils.data"], [28, "module-torch.distributed.elastic.utils.data.cycling_iterator"], [28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"], [28, "module-torch.distributed.elastic.utils.distributed"], [28, "module-torch.distributed.elastic.utils.log_level"], [28, "module-torch.distributed.elastic.utils.logging"], [28, "module-torch.distributed.elastic.utils.store"], [28, "module-torch.distributed.fsdp.api"], [28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"], [28, "module-torch.distributed.fsdp.sharded_grad_scaler"], [28, "module-torch.distributed.fsdp.wrap"], [28, "module-torch.distributed.launch"], [28, "module-torch.distributed.launcher"], [28, "module-torch.distributed.launcher.api"], [28, "module-torch.distributed.logging_handlers"], [28, "module-torch.distributed.nn"], [28, "module-torch.distributed.nn.api"], [28, "module-torch.distributed.nn.api.remote_module"], [28, "module-torch.distributed.nn.functional"], [28, "module-torch.distributed.nn.jit"], [28, "module-torch.distributed.nn.jit.instantiator"], [28, "module-torch.distributed.nn.jit.templates"], [28, "module-torch.distributed.nn.jit.templates.remote_module_template"], [28, "module-torch.distributed.optim.apply_optimizer_in_backward"], [28, "module-torch.distributed.optim.functional_adadelta"], [28, "module-torch.distributed.optim.functional_adagrad"], [28, "module-torch.distributed.optim.functional_adam"], [28, "module-torch.distributed.optim.functional_adamax"], [28, "module-torch.distributed.optim.functional_adamw"], [28, "module-torch.distributed.optim.functional_rmsprop"], [28, "module-torch.distributed.optim.functional_rprop"], [28, "module-torch.distributed.optim.functional_sgd"], [28, "module-torch.distributed.optim.named_optimizer"], [28, "module-torch.distributed.optim.optimizer"], [28, "module-torch.distributed.optim.post_localSGD_optimizer"], [28, "module-torch.distributed.optim.utils"], [28, "module-torch.distributed.optim.zero_redundancy_optimizer"], [28, "module-torch.distributed.remote_device"], [28, "module-torch.distributed.rendezvous"], [28, "module-torch.distributed.rpc.api"], [28, "module-torch.distributed.rpc.backend_registry"], [28, "module-torch.distributed.rpc.constants"], [28, "module-torch.distributed.rpc.functions"], [28, "module-torch.distributed.rpc.internal"], [28, "module-torch.distributed.rpc.options"], [28, "module-torch.distributed.rpc.rref_proxy"], [28, "module-torch.distributed.rpc.server_process_global_profiler"], [28, "module-torch.distributed.tensor"], [28, "module-torch.distributed.tensor.parallel.api"], [28, "module-torch.distributed.tensor.parallel.ddp"], [28, "module-torch.distributed.tensor.parallel.fsdp"], [28, "module-torch.distributed.tensor.parallel.input_reshard"], [28, "module-torch.distributed.tensor.parallel.loss"], [28, "module-torch.distributed.tensor.parallel.style"], [28, "module-torch.distributed.utils"], [30, "module-torch.distributed.checkpoint"], [30, "module-torch.distributed.checkpoint.format_utils"], [30, "module-torch.distributed.checkpoint.logger"], [30, "module-torch.distributed.checkpoint.logging_handlers"], [30, "module-torch.distributed.checkpoint.staging"], [32, "module-torch.distributed.optim"], [33, "module-torch.distributed.pipelining"], [33, "module-torch.distributed.pipelining.microbatch"], [33, "module-torch.distributed.pipelining.schedules"], [33, "module-torch.distributed.pipelining.stage"], [34, "module-torch.distributed.tensor.parallel"], [35, "module-torch.distributions"], [35, "module-torch.distributions.bernoulli"], [35, "module-torch.distributions.beta"], [35, "module-torch.distributions.binomial"], [35, "module-torch.distributions.categorical"], [35, "module-torch.distributions.cauchy"], [35, "module-torch.distributions.chi2"], [35, "module-torch.distributions.constraint_registry"], [35, "module-torch.distributions.constraints"], [35, "module-torch.distributions.continuous_bernoulli"], [35, "module-torch.distributions.dirichlet"], [35, "module-torch.distributions.distribution"], [35, "module-torch.distributions.exp_family"], [35, "module-torch.distributions.exponential"], [35, "module-torch.distributions.fishersnedecor"], [35, "module-torch.distributions.gamma"], [35, "module-torch.distributions.geometric"], [35, "module-torch.distributions.gumbel"], [35, "module-torch.distributions.half_cauchy"], [35, "module-torch.distributions.half_normal"], [35, "module-torch.distributions.independent"], [35, "module-torch.distributions.inverse_gamma"], [35, "module-torch.distributions.kl"], [35, "module-torch.distributions.kumaraswamy"], [35, "module-torch.distributions.laplace"], [35, "module-torch.distributions.lkj_cholesky"], [35, "module-torch.distributions.log_normal"], [35, "module-torch.distributions.logistic_normal"], [35, "module-torch.distributions.lowrank_multivariate_normal"], [35, "module-torch.distributions.mixture_same_family"], [35, "module-torch.distributions.multinomial"], [35, "module-torch.distributions.multivariate_normal"], [35, "module-torch.distributions.negative_binomial"], [35, "module-torch.distributions.normal"], [35, "module-torch.distributions.one_hot_categorical"], [35, "module-torch.distributions.pareto"], [35, "module-torch.distributions.poisson"], [35, "module-torch.distributions.relaxed_bernoulli"], [35, "module-torch.distributions.relaxed_categorical"], [35, "module-torch.distributions.studentT"], [35, "module-torch.distributions.transformed_distribution"], [35, "module-torch.distributions.transforms"], [35, "module-torch.distributions.uniform"], [35, "module-torch.distributions.utils"], [35, "module-torch.distributions.von_mises"], [35, "module-torch.distributions.weibull"], [35, "module-torch.distributions.wishart"], [37, "module-torch.distributed.elastic.agent"], [37, "module-torch.distributed.elastic.agent.server"], [37, "module-torch.distributed.elastic.agent.server.health_check_server"], [38, "module-torch.distributed.elastic.control_plane"], [40, "module-torch.distributed.elastic.multiprocessing.errors"], [41, "module-torch.distributed.elastic.events"], [44, "module-torch.distributed.elastic.metrics"], [45, "module-torch.distributed.elastic.multiprocessing"], [47, "module-torch.distributed.elastic.rendezvous"], [47, "module-torch.distributed.elastic.rendezvous.registry"], [48, "module-torch.distributed.run"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"], [50, "module-torch.distributed.elastic.timer"], [50, "module-torch.distributed.elastic.timer.debug_info_logging"], [52, "module-torch.export"], [52, "module-torch.export.custom_obj"], [52, "module-torch.export.dynamic_shapes"], [52, "module-torch.export.exported_program"], [52, "module-torch.export.graph_signature"], [52, "module-torch.export.unflatten"], [54, "module-torch.fft"], [55, "module-torch.distributed.fsdp"], [57, "module-torch.func"], [62, "module-torch.__future__"], [63, "module-torch.futures"], [64, "module-torch.fx"], [64, "module-torch.fx.annotate"], [64, "module-torch.fx.config"], [64, "module-torch.fx.experimental"], [64, "module-torch.fx.experimental.accelerator_partitioner"], [64, "module-torch.fx.experimental.const_fold"], [64, "module-torch.fx.experimental.debug"], [64, "module-torch.fx.experimental.graph_gradual_typechecker"], [64, "module-torch.fx.experimental.merge_matmul"], [64, "module-torch.fx.experimental.meta_tracer"], [64, "module-torch.fx.experimental.migrate_gradual_types"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"], [64, "module-torch.fx.experimental.migrate_gradual_types.operation"], [64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"], [64, "module-torch.fx.experimental.migrate_gradual_types.util"], [64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"], [64, "module-torch.fx.experimental.normalize"], [64, "module-torch.fx.experimental.optimization"], [64, "module-torch.fx.experimental.partitioner_utils"], [64, "module-torch.fx.experimental.proxy_tensor"], [64, "module-torch.fx.experimental.recording"], [64, "module-torch.fx.experimental.refinement_types"], [64, "module-torch.fx.experimental.rewriter"], [64, "module-torch.fx.experimental.schema_type_annotation"], [64, "module-torch.fx.experimental.sym_node"], [64, "module-torch.fx.experimental.unification"], [64, "module-torch.fx.experimental.unification.core"], [64, "module-torch.fx.experimental.unification.dispatch"], [64, "module-torch.fx.experimental.unification.match"], [64, "module-torch.fx.experimental.unification.more"], [64, "module-torch.fx.experimental.unification.multipledispatch"], [64, "module-torch.fx.experimental.unification.multipledispatch.conflict"], [64, "module-torch.fx.experimental.unification.multipledispatch.core"], [64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"], [64, "module-torch.fx.experimental.unification.multipledispatch.utils"], [64, "module-torch.fx.experimental.unification.multipledispatch.variadic"], [64, "module-torch.fx.experimental.unification.unification_tools"], [64, "module-torch.fx.experimental.unification.utils"], [64, "module-torch.fx.experimental.unification.variable"], [64, "module-torch.fx.experimental.unify_refinements"], [64, "module-torch.fx.experimental.validator"], [64, "module-torch.fx.graph"], [64, "module-torch.fx.graph_module"], [64, "module-torch.fx.immutable_collections"], [64, "module-torch.fx.interpreter"], [64, "module-torch.fx.node"], [64, "module-torch.fx.operator_schemas"], [64, "module-torch.fx.passes"], [64, "module-torch.fx.passes.annotate_getitem_nodes"], [64, "module-torch.fx.passes.backends"], [64, "module-torch.fx.passes.backends.cudagraphs"], [64, "module-torch.fx.passes.dialect"], [64, "module-torch.fx.passes.dialect.common"], [64, "module-torch.fx.passes.dialect.common.cse_pass"], [64, "module-torch.fx.passes.fake_tensor_prop"], [64, "module-torch.fx.passes.graph_drawer"], [64, "module-torch.fx.passes.graph_manipulation"], [64, "module-torch.fx.passes.graph_transform_observer"], [64, "module-torch.fx.passes.infra"], [64, "module-torch.fx.passes.infra.partitioner"], [64, "module-torch.fx.passes.infra.pass_base"], [64, "module-torch.fx.passes.infra.pass_manager"], [64, "module-torch.fx.passes.net_min_base"], [64, "module-torch.fx.passes.operator_support"], [64, "module-torch.fx.passes.param_fetch"], [64, "module-torch.fx.passes.pass_manager"], [64, "module-torch.fx.passes.reinplace"], [64, "module-torch.fx.passes.runtime_assert"], [64, "module-torch.fx.passes.shape_prop"], [64, "module-torch.fx.passes.split_module"], [64, "module-torch.fx.passes.split_utils"], [64, "module-torch.fx.passes.splitter_base"], [64, "module-torch.fx.passes.tests"], [64, "module-torch.fx.passes.tests.test_pass_manager"], [64, "module-torch.fx.passes.tools_common"], [64, "module-torch.fx.passes.utils"], [64, "module-torch.fx.passes.utils.common"], [64, "module-torch.fx.passes.utils.fuser_utils"], [64, "module-torch.fx.passes.utils.matcher_utils"], [64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"], [64, "module-torch.fx.passes.utils.source_matcher_utils"], [64, "module-torch.fx.proxy"], [64, "module-torch.fx.subgraph_rewriter"], [64, "module-torch.fx.tensor_type"], [64, "module-torch.fx.traceback"], [65, "module-torch.fx.experimental.symbolic_shapes"], [2011, "module-torch.hub"], [2013, "module-torch.jit"], [2013, "module-torch.jit.annotations"], [2013, "module-torch.jit.frontend"], [2013, "module-torch.jit.generate_bytecode"], [2013, "module-torch.jit.mobile"], [2013, "module-torch.jit.quantized"], [2014, "module-torch.jit.supported_ops"], [2018, "module-torch.jit.unsupported_tensor_ops"], [2019, "module-torch.utils.jit"], [2020, "module-torch.library"], [2021, "module-torch.linalg"], [2022, "module-torch._logging"], [2023, "module-torch.masked"], [2023, "module-torch.masked.maskedtensor"], [2023, "module-torch.masked.maskedtensor.binary"], [2023, "module-torch.masked.maskedtensor.core"], [2023, "module-torch.masked.maskedtensor.creation"], [2023, "module-torch.masked.maskedtensor.passthrough"], [2023, "module-torch.masked.maskedtensor.reductions"], [2023, "module-torch.masked.maskedtensor.unary"], [2027, "module-torch.utils.model_zoo"], [2028, "module-torch.utils.module_tracker"], [2029, "module-torch.monitor"], [2030, "module-torch.mps"], [2030, "module-torch.mps.event"], [2030, "module-torch.mps.profiler"], [2031, "module-torch.mtia"], [2032, "module-torch.multiprocessing"], [2032, "module-torch.multiprocessing.pool"], [2032, "module-torch.multiprocessing.queue"], [2032, "module-torch.multiprocessing.reductions"], [2032, "module-torch.multiprocessing.spawn"], [2035, "module-torch.nested"], [2036, "module-torch.nn"], [2036, "module-torch.nn.backends"], [2036, "module-torch.nn.backends.thnn"], [2036, "module-torch.nn.common_types"], [2036, "module-torch.nn.cpp"], [2036, "module-torch.nn.functional"], [2036, "module-torch.nn.grad"], [2036, "module-torch.nn.init"], [2036, "module-torch.nn.modules"], [2036, "module-torch.nn.modules.activation"], [2036, "module-torch.nn.modules.adaptive"], [2036, "module-torch.nn.modules.batchnorm"], [2036, "module-torch.nn.modules.channelshuffle"], [2036, "module-torch.nn.modules.container"], [2036, "module-torch.nn.modules.conv"], [2036, "module-torch.nn.modules.distance"], [2036, "module-torch.nn.modules.dropout"], [2036, "module-torch.nn.modules.flatten"], [2036, "module-torch.nn.modules.fold"], [2036, "module-torch.nn.modules.instancenorm"], [2036, "module-torch.nn.modules.lazy"], [2036, "module-torch.nn.modules.linear"], [2036, "module-torch.nn.modules.loss"], [2036, "module-torch.nn.modules.module"], [2036, "module-torch.nn.modules.normalization"], [2036, "module-torch.nn.modules.padding"], [2036, "module-torch.nn.modules.pixelshuffle"], [2036, "module-torch.nn.modules.pooling"], [2036, "module-torch.nn.modules.rnn"], [2036, "module-torch.nn.modules.sparse"], [2036, "module-torch.nn.modules.transformer"], [2036, "module-torch.nn.modules.upsampling"], [2036, "module-torch.nn.modules.utils"], [2036, "module-torch.nn.parallel"], [2036, "module-torch.nn.parallel.comm"], [2036, "module-torch.nn.parallel.distributed"], [2036, "module-torch.nn.parallel.parallel_apply"], [2036, "module-torch.nn.parallel.replicate"], [2036, "module-torch.nn.parallel.scatter_gather"], [2036, "module-torch.nn.parameter"], [2036, "module-torch.nn.utils"], [2036, "module-torch.nn.utils.clip_grad"], [2036, "module-torch.nn.utils.convert_parameters"], [2036, "module-torch.nn.utils.fusion"], [2036, "module-torch.nn.utils.init"], [2036, "module-torch.nn.utils.memory_format"], [2036, "module-torch.nn.utils.parametrizations"], [2036, "module-torch.nn.utils.parametrize"], [2036, "module-torch.nn.utils.prune"], [2036, "module-torch.nn.utils.rnn"], [2036, "module-torch.nn.utils.stateless"], [2037, "module-torch.nn.attention"], [2038, "module-torch.nn.attention.bias"], [2062, "module-torch.onnx.errors"], [2062, "module-torch.onnx.operators"], [2062, "module-torch.onnx.symbolic_caffe2"], [2062, "module-torch.onnx.symbolic_helper"], [2062, "module-torch.onnx.symbolic_opset10"], [2062, "module-torch.onnx.symbolic_opset11"], [2062, "module-torch.onnx.symbolic_opset12"], [2062, "module-torch.onnx.symbolic_opset13"], [2062, "module-torch.onnx.symbolic_opset14"], [2062, "module-torch.onnx.symbolic_opset15"], [2062, "module-torch.onnx.symbolic_opset16"], [2062, "module-torch.onnx.symbolic_opset17"], [2062, "module-torch.onnx.symbolic_opset18"], [2062, "module-torch.onnx.symbolic_opset19"], [2062, "module-torch.onnx.symbolic_opset20"], [2062, "module-torch.onnx.symbolic_opset7"], [2062, "module-torch.onnx.symbolic_opset8"], [2062, "module-torch.onnx.symbolic_opset9"], [2062, "module-torch.onnx.utils"], [2062, "module-torch.onnx.verification"], [2065, "module-torch.onnx"], [2067, "module-torch.optim"], [2067, "module-torch.optim.adadelta"], [2067, "module-torch.optim.adagrad"], [2067, "module-torch.optim.adam"], [2067, "module-torch.optim.adamax"], [2067, "module-torch.optim.adamw"], [2067, "module-torch.optim.asgd"], [2067, "module-torch.optim.lbfgs"], [2067, "module-torch.optim.lr_scheduler"], [2067, "module-torch.optim.nadam"], [2067, "module-torch.optim.optimizer"], [2067, "module-torch.optim.radam"], [2067, "module-torch.optim.rmsprop"], [2067, "module-torch.optim.rprop"], [2067, "module-torch.optim.sgd"], [2067, "module-torch.optim.sparse_adam"], [2067, "module-torch.optim.swa_utils"], [2068, "module-torch.package"], [2068, "module-torch.package.analyze"], [2068, "module-torch.package.analyze.find_first_use_of_broken_modules"], [2068, "module-torch.package.analyze.is_from_package"], [2068, "module-torch.package.analyze.trace_dependencies"], [2068, "module-torch.package.file_structure_representation"], [2068, "module-torch.package.find_file_dependencies"], [2068, "module-torch.package.glob_group"], [2068, "module-torch.package.importer"], [2068, "module-torch.package.package_exporter"], [2068, "module-torch.package.package_importer"], [2069, "module-torch.profiler"], [2069, "module-torch.profiler.itt"], [2069, "module-torch.profiler.profiler"], [2069, "module-torch.profiler.python_tracer"], [2070, "module-torch.ao"], [2070, "module-torch.ao.nn"], [2070, "module-torch.ao.nn.intrinsic.modules.fused"], [2070, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"], [2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"], [2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"], [2070, "module-torch.ao.nn.qat.dynamic.modules.linear"], [2070, "module-torch.ao.nn.qat.modules.conv"], [2070, "module-torch.ao.nn.qat.modules.embedding_ops"], [2070, "module-torch.ao.nn.qat.modules.linear"], [2070, "module-torch.ao.nn.quantizable"], [2070, "module-torch.ao.nn.quantizable.modules"], [2070, "module-torch.ao.nn.quantizable.modules.activation"], [2070, "module-torch.ao.nn.quantizable.modules.rnn"], [2070, "module-torch.ao.nn.quantized"], [2070, "module-torch.ao.nn.quantized.dynamic.modules.conv"], [2070, "module-torch.ao.nn.quantized.dynamic.modules.linear"], [2070, "module-torch.ao.nn.quantized.dynamic.modules.rnn"], [2070, "module-torch.ao.nn.quantized.modules.activation"], [2070, "module-torch.ao.nn.quantized.modules.batchnorm"], [2070, "module-torch.ao.nn.quantized.modules.conv"], [2070, "module-torch.ao.nn.quantized.modules.dropout"], [2070, "module-torch.ao.nn.quantized.modules.embedding_ops"], [2070, "module-torch.ao.nn.quantized.modules.functional_modules"], [2070, "module-torch.ao.nn.quantized.modules.linear"], [2070, "module-torch.ao.nn.quantized.modules.normalization"], [2070, "module-torch.ao.nn.quantized.modules.rnn"], [2070, "module-torch.ao.nn.quantized.modules.utils"], [2070, "module-torch.ao.nn.quantized.reference"], [2070, "module-torch.ao.nn.quantized.reference.modules"], [2070, "module-torch.ao.nn.quantized.reference.modules.conv"], [2070, "module-torch.ao.nn.quantized.reference.modules.linear"], [2070, "module-torch.ao.nn.quantized.reference.modules.rnn"], [2070, "module-torch.ao.nn.quantized.reference.modules.sparse"], [2070, "module-torch.ao.nn.quantized.reference.modules.utils"], [2070, "module-torch.ao.nn.sparse"], [2070, "module-torch.ao.nn.sparse.quantized"], [2070, "module-torch.ao.nn.sparse.quantized.dynamic"], [2070, "module-torch.ao.nn.sparse.quantized.dynamic.linear"], [2070, "module-torch.ao.nn.sparse.quantized.linear"], [2070, "module-torch.ao.nn.sparse.quantized.utils"], [2070, "module-torch.ao.ns"], [2070, "module-torch.ao.ns.fx"], [2070, "module-torch.ao.ns.fx.graph_matcher"], [2070, "module-torch.ao.ns.fx.graph_passes"], [2070, "module-torch.ao.ns.fx.mappings"], [2070, "module-torch.ao.ns.fx.n_shadows_utils"], [2070, "module-torch.ao.ns.fx.ns_types"], [2070, "module-torch.ao.ns.fx.pattern_utils"], [2070, "module-torch.ao.ns.fx.qconfig_multi_mapping"], [2070, "module-torch.ao.ns.fx.utils"], [2070, "module-torch.ao.ns.fx.weight_utils"], [2070, "module-torch.ao.pruning"], [2070, "module-torch.ao.pruning.scheduler"], [2070, "module-torch.ao.pruning.scheduler.base_scheduler"], [2070, "module-torch.ao.pruning.scheduler.cubic_scheduler"], [2070, "module-torch.ao.pruning.scheduler.lambda_scheduler"], [2070, "module-torch.ao.pruning.sparsifier"], [2070, "module-torch.ao.pruning.sparsifier.base_sparsifier"], [2070, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"], [2070, "module-torch.ao.pruning.sparsifier.utils"], [2070, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"], [2070, "module-torch.ao.quantization"], [2070, "module-torch.ao.quantization.backend_config"], [2070, "module-torch.ao.quantization.backend_config.backend_config"], [2070, "module-torch.ao.quantization.backend_config.executorch"], [2070, "module-torch.ao.quantization.backend_config.fbgemm"], [2070, "module-torch.ao.quantization.backend_config.native"], [2070, "module-torch.ao.quantization.backend_config.observation_type"], [2070, "module-torch.ao.quantization.backend_config.onednn"], [2070, "module-torch.ao.quantization.backend_config.qnnpack"], [2070, "module-torch.ao.quantization.backend_config.tensorrt"], [2070, "module-torch.ao.quantization.backend_config.utils"], [2070, "module-torch.ao.quantization.backend_config.x86"], [2070, "module-torch.ao.quantization.fake_quantize"], [2070, "module-torch.ao.quantization.fuse_modules"], [2070, "module-torch.ao.quantization.fuser_method_mappings"], [2070, "module-torch.ao.quantization.fx"], [2070, "module-torch.ao.quantization.fx.convert"], [2070, "module-torch.ao.quantization.fx.custom_config"], [2070, "module-torch.ao.quantization.fx.fuse"], [2070, "module-torch.ao.quantization.fx.fuse_handler"], [2070, "module-torch.ao.quantization.fx.graph_module"], [2070, "module-torch.ao.quantization.fx.lower_to_fbgemm"], [2070, "module-torch.ao.quantization.fx.lower_to_qnnpack"], [2070, "module-torch.ao.quantization.fx.lstm_utils"], [2070, "module-torch.ao.quantization.fx.match_utils"], [2070, "module-torch.ao.quantization.fx.pattern_utils"], [2070, "module-torch.ao.quantization.fx.prepare"], [2070, "module-torch.ao.quantization.fx.qconfig_mapping_utils"], [2070, "module-torch.ao.quantization.fx.quantize_handler"], [2070, "module-torch.ao.quantization.fx.tracer"], [2070, "module-torch.ao.quantization.fx.utils"], [2070, "module-torch.ao.quantization.observer"], [2070, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"], [2070, "module-torch.ao.quantization.pt2e.export_utils"], [2070, "module-torch.ao.quantization.pt2e.graph_utils"], [2070, "module-torch.ao.quantization.pt2e.port_metadata_pass"], [2070, "module-torch.ao.quantization.pt2e.prepare"], [2070, "module-torch.ao.quantization.pt2e.qat_utils"], [2070, "module-torch.ao.quantization.pt2e.representation.rewrite"], [2070, "module-torch.ao.quantization.pt2e.utils"], [2070, "module-torch.ao.quantization.qconfig"], [2070, "module-torch.ao.quantization.qconfig_mapping"], [2070, "module-torch.ao.quantization.quant_type"], [2070, "module-torch.ao.quantization.quantization_mappings"], [2070, "module-torch.ao.quantization.quantize_fx"], [2070, "module-torch.ao.quantization.quantize_jit"], [2070, "module-torch.ao.quantization.quantize_pt2e"], [2070, "module-torch.ao.quantization.quantizer.composable_quantizer"], [2070, "module-torch.ao.quantization.quantizer.embedding_quantizer"], [2070, "module-torch.ao.quantization.quantizer.quantizer"], [2070, "module-torch.ao.quantization.quantizer.utils"], [2070, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"], [2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"], [2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"], [2070, "module-torch.ao.quantization.stubs"], [2070, "module-torch.ao.quantization.utils"], [2070, "module-torch.nn.intrinsic.modules.fused"], [2070, "module-torch.nn.intrinsic.qat.modules.conv_fused"], [2070, "module-torch.nn.intrinsic.qat.modules.linear_fused"], [2070, "module-torch.nn.intrinsic.qat.modules.linear_relu"], [2070, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2070, "module-torch.nn.intrinsic.quantized.modules.bn_relu"], [2070, "module-torch.nn.intrinsic.quantized.modules.conv_relu"], [2070, "module-torch.nn.intrinsic.quantized.modules.linear_relu"], [2070, "module-torch.nn.qat.dynamic.modules.linear"], [2070, "module-torch.nn.qat.modules.conv"], [2070, "module-torch.nn.qat.modules.embedding_ops"], [2070, "module-torch.nn.qat.modules.linear"], [2070, "module-torch.nn.quantizable.modules.activation"], [2070, "module-torch.nn.quantizable.modules.rnn"], [2070, "module-torch.nn.quantized.dynamic.modules.conv"], [2070, "module-torch.nn.quantized.dynamic.modules.linear"], [2070, "module-torch.nn.quantized.dynamic.modules.rnn"], [2070, "module-torch.nn.quantized.functional"], [2070, "module-torch.nn.quantized.modules.activation"], [2070, "module-torch.nn.quantized.modules.batchnorm"], [2070, "module-torch.nn.quantized.modules.conv"], [2070, "module-torch.nn.quantized.modules.dropout"], [2070, "module-torch.nn.quantized.modules.embedding_ops"], [2070, "module-torch.nn.quantized.modules.functional_modules"], [2070, "module-torch.nn.quantized.modules.linear"], [2070, "module-torch.nn.quantized.modules.normalization"], [2070, "module-torch.nn.quantized.modules.rnn"], [2070, "module-torch.nn.quantized.modules.utils"], [2070, "module-torch.quantization.fake_quantize"], [2070, "module-torch.quantization.fuse_modules"], [2070, "module-torch.quantization.fuser_method_mappings"], [2070, "module-torch.quantization.fx.convert"], [2070, "module-torch.quantization.fx.fuse"], [2070, "module-torch.quantization.fx.fusion_patterns"], [2070, "module-torch.quantization.fx.graph_module"], [2070, "module-torch.quantization.fx.match_utils"], [2070, "module-torch.quantization.fx.pattern_utils"], [2070, "module-torch.quantization.fx.prepare"], [2070, "module-torch.quantization.fx.quantization_patterns"], [2070, "module-torch.quantization.fx.quantization_types"], [2070, "module-torch.quantization.fx.utils"], [2070, "module-torch.quantization.observer"], [2070, "module-torch.quantization.qconfig"], [2070, "module-torch.quantization.quant_type"], [2070, "module-torch.quantization.quantization_mappings"], [2070, "module-torch.quantization.quantize"], [2070, "module-torch.quantization.quantize_fx"], [2070, "module-torch.quantization.quantize_jit"], [2070, "module-torch.quantization.stubs"], [2070, "module-torch.quantization.utils"], [2073, "module-torch.ao.nn.intrinsic"], [2073, "module-torch.ao.nn.intrinsic.modules"], [2073, "module-torch.ao.nn.intrinsic.qat"], [2073, "module-torch.ao.nn.intrinsic.qat.modules"], [2073, "module-torch.ao.nn.intrinsic.quantized"], [2073, "module-torch.ao.nn.intrinsic.quantized.dynamic"], [2073, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"], [2073, "module-torch.ao.nn.intrinsic.quantized.modules"], [2073, "module-torch.ao.nn.qat"], [2073, "module-torch.ao.nn.qat.dynamic"], [2073, "module-torch.ao.nn.qat.dynamic.modules"], [2073, "module-torch.ao.nn.qat.modules"], [2073, "module-torch.ao.nn.quantized.dynamic"], [2073, "module-torch.ao.nn.quantized.dynamic.modules"], [2073, "module-torch.ao.nn.quantized.functional"], [2073, "module-torch.ao.nn.quantized.modules"], [2073, "module-torch.ao.quantization.pt2e"], [2073, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"], [2073, "module-torch.ao.quantization.pt2e.representation"], [2073, "module-torch.ao.quantization.quantizer"], [2073, "module-torch.nn.intrinsic"], [2073, "module-torch.nn.intrinsic.modules"], [2073, "module-torch.nn.intrinsic.qat"], [2073, "module-torch.nn.intrinsic.qat.modules"], [2073, "module-torch.nn.intrinsic.quantized"], [2073, "module-torch.nn.intrinsic.quantized.dynamic"], [2073, "module-torch.nn.intrinsic.quantized.dynamic.modules"], [2073, "module-torch.nn.intrinsic.quantized.modules"], [2073, "module-torch.nn.qat"], [2073, "module-torch.nn.qat.dynamic"], [2073, "module-torch.nn.qat.dynamic.modules"], [2073, "module-torch.nn.qat.modules"], [2073, "module-torch.nn.quantizable"], [2073, "module-torch.nn.quantizable.modules"], [2073, "module-torch.nn.quantized"], [2073, "module-torch.nn.quantized.dynamic"], [2073, "module-torch.nn.quantized.dynamic.modules"], [2073, "module-torch.nn.quantized.modules"], [2073, "module-torch.quantization"], [2073, "module-torch.quantization.fx"], [2074, "module-torch.random"], [2075, "module-torch.distributed.autograd"], [2075, "module-torch.distributed.rpc"], [2078, "module-torch.signal"], [2078, "module-torch.signal.windows"], [2080, "module-torch.sparse"], [2081, "module-torch.special"], [2085, "module-torch.utils.tensorboard"], [2087, "module-torch.testing"], [2089, "module-torch"], [2089, "module-torch.contrib"], [2089, "module-torch.functional"], [2089, "module-torch.quasirandom"], [2089, "module-torch.return_types"], [2089, "module-torch.serialization"], [2089, "module-torch.signal.windows.windows"], [2089, "module-torch.sparse.semi_structured"], [2089, "module-torch.storage"], [2089, "module-torch.torch_version"], [2089, "module-torch.types"], [2089, "module-torch.utils.backcompat"], [2089, "module-torch.utils.hipify"], [2089, "module-torch.utils.model_dump"], [2089, "module-torch.utils.viz"], [2089, "module-torch.version"], [2090, "module-torch.ao.ns._numeric_suite"], [2091, "module-torch.ao.ns._numeric_suite_fx"], [2094, "module-torch.compiler"], [2112, "module-torch.overrides"], [2117, "module-torch.utils"], [2117, "module-torch.utils.backend_registration"], [2117, "module-torch.utils.benchmark.examples.blas_compare_setup"], [2117, "module-torch.utils.benchmark.examples.compare"], [2117, "module-torch.utils.benchmark.examples.fuzzer"], [2117, "module-torch.utils.benchmark.examples.op_benchmark"], [2117, "module-torch.utils.benchmark.examples.simple_timeit"], [2117, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"], [2117, "module-torch.utils.benchmark.op_fuzzers.binary"], [2117, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"], [2117, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"], [2117, "module-torch.utils.benchmark.op_fuzzers.spectral"], [2117, "module-torch.utils.benchmark.op_fuzzers.unary"], [2117, "module-torch.utils.benchmark.utils.common"], [2117, "module-torch.utils.benchmark.utils.compare"], [2117, "module-torch.utils.benchmark.utils.compile"], [2117, "module-torch.utils.benchmark.utils.cpp_jit"], [2117, "module-torch.utils.benchmark.utils.fuzzer"], [2117, "module-torch.utils.benchmark.utils.sparse_fuzzer"], [2117, "module-torch.utils.benchmark.utils.timer"], [2117, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"], [2117, "module-torch.utils.bundled_inputs"], [2117, "module-torch.utils.checkpoint"], [2117, "module-torch.utils.collect_env"], [2117, "module-torch.utils.cpp_backtrace"], [2117, "module-torch.utils.cpp_extension"], [2117, "module-torch.utils.data.backward_compatibility"], [2117, "module-torch.utils.data.dataloader"], [2117, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"], [2117, "module-torch.utils.data.datapipes.dataframe.dataframes"], [2117, "module-torch.utils.data.datapipes.dataframe.datapipes"], [2117, "module-torch.utils.data.datapipes.dataframe.structures"], [2117, "module-torch.utils.data.datapipes.datapipe"], [2117, "module-torch.utils.data.datapipes.gen_pyi"], [2117, "module-torch.utils.data.datapipes.iter.callable"], [2117, "module-torch.utils.data.datapipes.iter.combinatorics"], [2117, "module-torch.utils.data.datapipes.iter.combining"], [2117, "module-torch.utils.data.datapipes.iter.filelister"], [2117, "module-torch.utils.data.datapipes.iter.fileopener"], [2117, "module-torch.utils.data.datapipes.iter.grouping"], [2117, "module-torch.utils.data.datapipes.iter.routeddecoder"], [2117, "module-torch.utils.data.datapipes.iter.selecting"], [2117, "module-torch.utils.data.datapipes.iter.sharding"], [2117, "module-torch.utils.data.datapipes.iter.streamreader"], [2117, "module-torch.utils.data.datapipes.iter.utils"], [2117, "module-torch.utils.data.datapipes.map.callable"], [2117, "module-torch.utils.data.datapipes.map.combinatorics"], [2117, "module-torch.utils.data.datapipes.map.combining"], [2117, "module-torch.utils.data.datapipes.map.grouping"], [2117, "module-torch.utils.data.datapipes.map.utils"], [2117, "module-torch.utils.data.datapipes.utils.common"], [2117, "module-torch.utils.data.datapipes.utils.decoder"], [2117, "module-torch.utils.data.datapipes.utils.snapshot"], [2117, "module-torch.utils.data.dataset"], [2117, "module-torch.utils.data.distributed"], [2117, "module-torch.utils.data.graph"], [2117, "module-torch.utils.data.graph_settings"], [2117, "module-torch.utils.data.sampler"], [2117, "module-torch.utils.dlpack"], [2117, "module-torch.utils.file_baton"], [2117, "module-torch.utils.flop_counter"], [2117, "module-torch.utils.hipify.constants"], [2117, "module-torch.utils.hipify.cuda_to_hip_mappings"], [2117, "module-torch.utils.hipify.hipify_python"], [2117, "module-torch.utils.hipify.version"], [2117, "module-torch.utils.hooks"], [2117, "module-torch.utils.jit.log_extract"], [2117, "module-torch.utils.mkldnn"], [2117, "module-torch.utils.mobile_optimizer"], [2117, "module-torch.utils.show_pickle"], [2117, "module-torch.utils.tensorboard.summary"], [2117, "module-torch.utils.tensorboard.writer"], [2117, "module-torch.utils.throughput_benchmark"], [2117, "module-torch.utils.weak"], [2118, "module-torch.xpu"], [2118, "module-torch.xpu.random"], [2118, "module-torch.xpu.streams"]], "torch.amp": [[0, "module-torch.amp"]], "torch.amp.autocast_mode": [[0, "module-torch.amp.autocast_mode"]], "torch.amp.grad_scaler": [[0, "module-torch.amp.grad_scaler"]], "torch.cpu.amp": [[0, "module-torch.cpu.amp"]], "torch.cpu.amp.autocast_mode": [[0, "module-torch.cpu.amp.autocast_mode"]], "torch.cpu.amp.grad_scaler": [[0, "module-torch.cpu.amp.grad_scaler"]], "torch.cuda.amp": [[0, "module-torch.cuda.amp"]], "torch.cuda.amp.autocast_mode": [[0, "module-torch.cuda.amp.autocast_mode"]], "torch.cuda.amp.common": [[0, "module-torch.cuda.amp.common"]], "torch.cuda.amp.grad_scaler": [[0, "module-torch.cuda.amp.grad_scaler"]], "function (class in torch.autograd)": [[1, "torch.autograd.Function"]], "gradientedge (class in torch.autograd.graph)": [[1, "torch.autograd.graph.GradientEdge"]], "allow_mutation_on_saved_tensors (class in torch.autograd.graph)": [[1, "torch.autograd.graph.allow_mutation_on_saved_tensors"]], "detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.detect_anomaly"]], "disable_saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.disable_saved_tensors_hooks"]], "emit_itt (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_itt"]], "emit_nvtx (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_nvtx"]], "get_gradient_edge() (in module torch.autograd.graph)": [[1, "torch.autograd.graph.get_gradient_edge"]], "profile (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.profile"]], "register_multi_grad_hook (class in torch.autograd.graph)": [[1, "torch.autograd.graph.register_multi_grad_hook"]], "save_on_cpu (class in torch.autograd.graph)": [[1, "torch.autograd.graph.save_on_cpu"]], "saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.saved_tensors_hooks"]], "set_detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.set_detect_anomaly"]], "torch.autograd": [[1, "module-torch.autograd"]], "torch.autograd.anomaly_mode": [[1, "module-torch.autograd.anomaly_mode"]], "torch.autograd.forward_ad": [[1, "module-torch.autograd.forward_ad"]], "torch.autograd.function": [[1, "module-torch.autograd.function"]], "torch.autograd.functional": [[1, "module-torch.autograd.functional"]], "torch.autograd.grad_mode": [[1, "module-torch.autograd.grad_mode"]], "torch.autograd.gradcheck": [[1, "module-torch.autograd.gradcheck"]], "torch.autograd.graph": [[1, "module-torch.autograd.graph"]], "torch.autograd.profiler": [[1, "module-torch.autograd.profiler"]], "torch.autograd.profiler_legacy": [[1, "module-torch.autograd.profiler_legacy"]], "torch.autograd.profiler_util": [[1, "module-torch.autograd.profiler_util"]], "torch.autograd.variable": [[1, "module-torch.autograd.variable"]], "sdpaparams (class in torch.backends.cuda)": [[2, "torch.backends.cuda.SDPAParams"]], "allow_bf16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_bf16_reduced_precision_reduction"]], "allow_fp16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction"]], "allow_tf32 (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_tf32"]], "allow_tf32 (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.allow_tf32"]], "benchmark (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark"]], "benchmark_limit (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark_limit"]], "can_use_cudnn_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_cudnn_attention"]], "can_use_efficient_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_efficient_attention"]], "can_use_flash_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_flash_attention"]], "clear() (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.clear"]], "cudnn_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cudnn_sdp_enabled"]], "cufft_plan_cache (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cufft_plan_cache"]], "deterministic (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.deterministic"]], "enable_cudnn_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_cudnn_sdp"]], "enable_flash_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_flash_sdp"]], "enable_math_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_math_sdp"]], "enable_mem_efficient_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_mem_efficient_sdp"]], "enabled (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.enabled"]], "enabled (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.enabled"]], "flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.flags"]], "flash_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.flash_sdp_enabled"]], "get_cpu_capability() (in module torch.backends.cpu)": [[2, "torch.backends.cpu.get_cpu_capability"]], "get_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.get_fastpath_enabled"]], "get_opt_einsum() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.get_opt_einsum"]], "is_available() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.is_available"]], "is_available() (in module torch.backends.mkl)": [[2, "torch.backends.mkl.is_available"]], "is_available() (in module torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.is_available"]], "is_available() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_available"]], "is_available() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.is_available"]], "is_available() (in module torch.backends.openmp)": [[2, "torch.backends.openmp.is_available"]], "is_available() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.is_available"]], "is_built() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.is_built"]], "is_built() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_built"]], "math_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.math_sdp_enabled"]], "max_size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.max_size"]], "mem_efficient_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.mem_efficient_sdp_enabled"]], "preferred_blas_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_blas_library"]], "preferred_linalg_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_linalg_library"]], "sdp_kernel() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.sdp_kernel"]], "set_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.set_fastpath_enabled"]], "set_flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.set_flags"]], "size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.size"]], "strategy (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.strategy"]], "torch.backends": [[2, "module-torch.backends"]], "torch.backends.cpu": [[2, "module-torch.backends.cpu"]], "torch.backends.cuda": [[2, "module-torch.backends.cuda"]], "torch.backends.cudnn": [[2, "module-torch.backends.cudnn"]], "torch.backends.cudnn.rnn": [[2, "module-torch.backends.cudnn.rnn"]], "torch.backends.mha": [[2, "module-torch.backends.mha"]], "torch.backends.mkl": [[2, "module-torch.backends.mkl"]], "torch.backends.mkldnn": [[2, "module-torch.backends.mkldnn"]], "torch.backends.mps": [[2, "module-torch.backends.mps"]], "torch.backends.nnpack": [[2, "module-torch.backends.nnpack"]], "torch.backends.openmp": [[2, "module-torch.backends.openmp"]], "torch.backends.opt_einsum": [[2, "module-torch.backends.opt_einsum"]], "torch.backends.quantized": [[2, "module-torch.backends.quantized"]], "torch.backends.xeon": [[2, "module-torch.backends.xeon"]], "torch.backends.xeon.run_cpu": [[2, "module-torch.backends.xeon.run_cpu"]], "torch.backends.xnnpack": [[2, "module-torch.backends.xnnpack"]], "verbose (class in torch.backends.mkl)": [[2, "torch.backends.mkl.verbose"]], "verbose (class in torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.verbose"]], "version() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.version"]], "callgrindstats (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.CallgrindStats"]], "compare (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Compare"]], "functioncounts (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.FunctionCounts"]], "measurement (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Measurement"]], "timer (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Timer"]], "adaptive_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.adaptive_autorange"]], "as_standardized() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.as_standardized"]], "blocked_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.blocked_autorange"]], "collect_callgrind() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.collect_callgrind"]], "colorize() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.colorize"]], "counts() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.counts"]], "delta() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.delta"]], "denoise() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.denoise"]], "extend_results() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.extend_results"]], "filter() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.filter"]], "highlight_warnings() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.highlight_warnings"]], "merge() (torch.utils.benchmark.measurement static method)": [[3, "torch.utils.benchmark.Measurement.merge"]], "print() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.print"]], "significant_figures (torch.utils.benchmark.measurement property)": [[3, "torch.utils.benchmark.Measurement.significant_figures"]], "stats() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.stats"]], "timeit() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.timeit"]], "torch.utils.benchmark": [[3, "module-torch.utils.benchmark"]], "torch.utils.benchmark.examples": [[3, "module-torch.utils.benchmark.examples"]], "torch.utils.benchmark.op_fuzzers": [[3, "module-torch.utils.benchmark.op_fuzzers"]], "torch.utils.benchmark.utils": [[3, "module-torch.utils.benchmark.utils"]], "torch.utils.benchmark.utils.valgrind_wrapper": [[3, "module-torch.utils.benchmark.utils.valgrind_wrapper"]], "transform() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.transform"]], "trim_significant_figures() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.trim_significant_figures"]], "torch.utils.bottleneck": [[4, "module-torch.utils.bottleneck"]], "checkpoint() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint"]], "checkpoint_sequential() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint_sequential"]], "set_checkpoint_debug_enabled() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.set_checkpoint_debug_enabled"]], "cond() (in module torch._higher_order_ops.cond)": [[12, "torch._higher_order_ops.cond.cond"]], "parallel_info() (in module torch.__config__)": [[13, "torch.__config__.parallel_info"]], "show() (in module torch.__config__)": [[13, "torch.__config__.show"]], "torch.__config__": [[13, "module-torch.__config__"]], "buildextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.BuildExtension"]], "cudaextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CUDAExtension"]], "cppextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CppExtension"]], "get_compiler_abi_compatibility_and_version() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.get_compiler_abi_compatibility_and_version"]], "include_paths() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.include_paths"]], "is_ninja_available() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.is_ninja_available"]], "load() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load"]], "load_inline() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load_inline"]], "verify_ninja_availability() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.verify_ninja_availability"]], "torch.cpu": [[16, "module-torch.cpu"]], "torch.cuda": [[17, "module-torch.cuda"]], "torch.cuda.comm": [[17, "module-torch.cuda.comm"]], "torch.cuda.error": [[17, "module-torch.cuda.error"]], "torch.cuda.graphs": [[17, "module-torch.cuda.graphs"]], "torch.cuda.jiterator": [[17, "module-torch.cuda.jiterator"]], "torch.cuda.memory": [[17, "module-torch.cuda.memory"]], "torch.cuda.nccl": [[17, "module-torch.cuda.nccl"]], "torch.cuda.nvtx": [[17, "module-torch.cuda.nvtx"]], "torch.cuda.profiler": [[17, "module-torch.cuda.profiler"]], "torch.cuda.random": [[17, "module-torch.cuda.random"]], "torch.cuda.sparse": [[17, "module-torch.cuda.sparse"]], "torch.cuda.streams": [[17, "module-torch.cuda.streams"]], "enable_cuda_sanitizer() (in module torch.cuda._sanitizer)": [[18, "torch.cuda._sanitizer.enable_cuda_sanitizer"]], "torch.cuda._sanitizer": [[18, "module-torch.cuda._sanitizer"]], "enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.enable"]], "get_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_filename"]], "get_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_duration"]], "get_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_iterations"]], "get_results() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_results"]], "get_validators() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_validators"]], "is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.is_enabled"]], "read_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.read_file"]], "set_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_filename"]], "set_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_duration"]], "set_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_iterations"]], "torch.cuda.tunable": [[19, "module-torch.cuda.tunable"]], "tuning_enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_enable"]], "tuning_is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_is_enabled"]], "write_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file"]], "write_file_on_exit() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file_on_exit"]], "batchsampler (class in torch.utils.data)": [[23, "torch.utils.data.BatchSampler"]], "chaindataset (class in torch.utils.data)": [[23, "torch.utils.data.ChainDataset"]], "concatdataset (class in torch.utils.data)": [[23, "torch.utils.data.ConcatDataset"]], "dataloader (class in torch.utils.data)": [[23, "torch.utils.data.DataLoader"]], "dataset (class in torch.utils.data)": [[23, "torch.utils.data.Dataset"]], "distributedsampler (class in torch.utils.data.distributed)": [[23, "torch.utils.data.distributed.DistributedSampler"]], "iterabledataset (class in torch.utils.data)": [[23, "torch.utils.data.IterableDataset"]], "randomsampler (class in torch.utils.data)": [[23, "torch.utils.data.RandomSampler"]], "sampler (class in torch.utils.data)": [[23, "torch.utils.data.Sampler"]], "sequentialsampler (class in torch.utils.data)": [[23, "torch.utils.data.SequentialSampler"]], "stackdataset (class in torch.utils.data)": [[23, "torch.utils.data.StackDataset"]], "subset (class in torch.utils.data)": [[23, "torch.utils.data.Subset"]], "subsetrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.SubsetRandomSampler"]], "tensordataset (class in torch.utils.data)": [[23, "torch.utils.data.TensorDataset"]], "weightedrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.WeightedRandomSampler"]], "collate() (in module torch.utils.data._utils.collate)": [[23, "torch.utils.data._utils.collate.collate"]], "default_collate() (in module torch.utils.data)": [[23, "torch.utils.data.default_collate"]], "default_convert() (in module torch.utils.data)": [[23, "torch.utils.data.default_convert"]], "get_worker_info() (in module torch.utils.data)": [[23, "torch.utils.data.get_worker_info"]], "random_split() (in module torch.utils.data)": [[23, "torch.utils.data.random_split"]], "torch.utils.data": [[23, "module-torch.utils.data"]], "torch.utils.data.datapipes": [[23, "module-torch.utils.data.datapipes"]], "torch.utils.data.datapipes.dataframe": [[23, "module-torch.utils.data.datapipes.dataframe"]], "torch.utils.data.datapipes.iter": [[23, "module-torch.utils.data.datapipes.iter"]], "torch.utils.data.datapipes.map": [[23, "module-torch.utils.data.datapipes.map"]], "torch.utils.data.datapipes.utils": [[23, "module-torch.utils.data.datapipes.utils"]], "gradbucket (class in torch.distributed)": [[24, "torch.distributed.GradBucket"]], "powersgdstate (class in torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState"]], "__getstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__getstate__"]], "__setstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__setstate__"]], "allreduce_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.allreduce_hook"]], "batched_powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.batched_powerSGD_hook"]], "bf16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_hook"]], "bf16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_wrapper"]], "buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.buffer"]], "fp16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_hook"]], "fp16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_wrapper"]], "gradients() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.gradients"]], "index() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.index"]], "is_last() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.is_last"]], "noop_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks.noop_hook"]], "parameters() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.parameters"]], "powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.powerSGD_hook"]], "set_buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.set_buffer"]], "fill_uninitialized_memory (in module torch.utils.deterministic)": [[27, "torch.utils.deterministic.fill_uninitialized_memory"]], "torch.utils.deterministic": [[27, "module-torch.utils.deterministic"]], "backend (class in torch.distributed)": [[28, "torch.distributed.Backend"]], "devicemesh (class in torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.DeviceMesh"]], "distbackenderror (class in torch.distributed)": [[28, "torch.distributed.DistBackendError"]], "disterror (class in torch.distributed)": [[28, "torch.distributed.DistError"]], "distnetworkerror (class in torch.distributed)": [[28, "torch.distributed.DistNetworkError"]], "diststoreerror (class in torch.distributed)": [[28, "torch.distributed.DistStoreError"]], "filestore (class in torch.distributed)": [[28, "torch.distributed.FileStore"]], "hashstore (class in torch.distributed)": [[28, "torch.distributed.HashStore"]], "p2pop (class in torch.distributed)": [[28, "torch.distributed.P2POp"]], "prefixstore (class in torch.distributed)": [[28, "torch.distributed.PrefixStore"]], "reduceop (class in torch.distributed)": [[28, "torch.distributed.ReduceOp"]], "store (class in torch.distributed)": [[28, "torch.distributed.Store"]], "tcpstore (class in torch.distributed)": [[28, "torch.distributed.TCPStore"]], "work (class in torch.distributed)": [[28, "torch.distributed.Work"]], "add() (in module torch.distributed.store)": [[28, "torch.distributed.Store.add"]], "all_gather() (in module torch.distributed)": [[28, "torch.distributed.all_gather"]], "all_gather_into_tensor() (in module torch.distributed)": [[28, "torch.distributed.all_gather_into_tensor"]], "all_gather_object() (in module torch.distributed)": [[28, "torch.distributed.all_gather_object"]], "all_reduce() (in module torch.distributed)": [[28, "torch.distributed.all_reduce"]], "all_to_all() (in module torch.distributed)": [[28, "torch.distributed.all_to_all"]], "all_to_all_single() (in module torch.distributed)": [[28, "torch.distributed.all_to_all_single"]], "barrier() (in module torch.distributed)": [[28, "torch.distributed.barrier"]], "batch_isend_irecv() (in module torch.distributed)": [[28, "torch.distributed.batch_isend_irecv"]], "breakpoint() (in module torch.distributed)": [[28, "torch.distributed.breakpoint"]], "broadcast() (in module torch.distributed)": [[28, "torch.distributed.broadcast"]], "broadcast_object_list() (in module torch.distributed)": [[28, "torch.distributed.broadcast_object_list"]], "compare_set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.compare_set"]], "delete_key() (in module torch.distributed.store)": [[28, "torch.distributed.Store.delete_key"]], "gather() (in module torch.distributed)": [[28, "torch.distributed.gather"]], "gather_object() (in module torch.distributed)": [[28, "torch.distributed.gather_object"]], "get() (in module torch.distributed.store)": [[28, "torch.distributed.Store.get"]], "get_backend() (in module torch.distributed)": [[28, "torch.distributed.get_backend"]], "get_global_rank() (in module torch.distributed)": [[28, "torch.distributed.get_global_rank"]], "get_group_rank() (in module torch.distributed)": [[28, "torch.distributed.get_group_rank"]], "get_process_group_ranks() (in module torch.distributed)": [[28, "torch.distributed.get_process_group_ranks"]], "get_rank() (in module torch.distributed)": [[28, "torch.distributed.get_rank"]], "get_world_size() (in module torch.distributed)": [[28, "torch.distributed.get_world_size"]], "init_device_mesh() (in module torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.init_device_mesh"]], "init_process_group() (in module torch.distributed)": [[28, "torch.distributed.init_process_group"]], "irecv() (in module torch.distributed)": [[28, "torch.distributed.irecv"]], "is_available() (in module torch.distributed)": [[28, "torch.distributed.is_available"]], "is_gloo_available() (in module torch.distributed)": [[28, "torch.distributed.is_gloo_available"]], "is_initialized() (in module torch.distributed)": [[28, "torch.distributed.is_initialized"]], "is_mpi_available() (in module torch.distributed)": [[28, "torch.distributed.is_mpi_available"]], "is_nccl_available() (in module torch.distributed)": [[28, "torch.distributed.is_nccl_available"]], "is_torchelastic_launched() (in module torch.distributed)": [[28, "torch.distributed.is_torchelastic_launched"]], "isend() (in module torch.distributed)": [[28, "torch.distributed.isend"]], "monitored_barrier() (in module torch.distributed)": [[28, "torch.distributed.monitored_barrier"]], "new_group() (in module torch.distributed)": [[28, "torch.distributed.new_group"]], "num_keys() (in module torch.distributed.store)": [[28, "torch.distributed.Store.num_keys"]], "recv() (in module torch.distributed)": [[28, "torch.distributed.recv"]], "recv_object_list() (in module torch.distributed)": [[28, "torch.distributed.recv_object_list"]], "reduce() (in module torch.distributed)": [[28, "torch.distributed.reduce"]], "reduce_op (class in torch.distributed)": [[28, "torch.distributed.reduce_op"]], "reduce_scatter() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter"]], "reduce_scatter_tensor() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter_tensor"]], "register_backend() (torch.distributed.backend class method)": [[28, "torch.distributed.Backend.register_backend"]], "scatter() (in module torch.distributed)": [[28, "torch.distributed.scatter"]], "scatter_object_list() (in module torch.distributed)": [[28, "torch.distributed.scatter_object_list"]], "send() (in module torch.distributed)": [[28, "torch.distributed.send"]], "send_object_list() (in module torch.distributed)": [[28, "torch.distributed.send_object_list"]], "set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set"]], "set_timeout() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set_timeout"]], "torch.distributed": [[28, "module-torch.distributed"]], "torch.distributed.algorithms": [[28, "module-torch.distributed.algorithms"]], "torch.distributed.algorithms.ddp_comm_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.default_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.post_localsgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"]], "torch.distributed.algorithms.join": [[28, "module-torch.distributed.algorithms.join"]], "torch.distributed.algorithms.model_averaging": [[28, "module-torch.distributed.algorithms.model_averaging"]], "torch.distributed.algorithms.model_averaging.averagers": [[28, "module-torch.distributed.algorithms.model_averaging.averagers"]], "torch.distributed.algorithms.model_averaging.hierarchical_model_averager": [[28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"]], "torch.distributed.algorithms.model_averaging.utils": [[28, "module-torch.distributed.algorithms.model_averaging.utils"]], "torch.distributed.argparse_util": [[28, "module-torch.distributed.argparse_util"]], "torch.distributed.c10d_logger": [[28, "module-torch.distributed.c10d_logger"]], "torch.distributed.checkpoint.api": [[28, "module-torch.distributed.checkpoint.api"]], "torch.distributed.checkpoint.default_planner": [[28, "module-torch.distributed.checkpoint.default_planner"]], "torch.distributed.checkpoint.filesystem": [[28, "module-torch.distributed.checkpoint.filesystem"]], "torch.distributed.checkpoint.metadata": [[28, "module-torch.distributed.checkpoint.metadata"]], "torch.distributed.checkpoint.optimizer": [[28, "module-torch.distributed.checkpoint.optimizer"]], "torch.distributed.checkpoint.planner": [[28, "module-torch.distributed.checkpoint.planner"]], "torch.distributed.checkpoint.planner_helpers": [[28, "module-torch.distributed.checkpoint.planner_helpers"]], "torch.distributed.checkpoint.resharding": [[28, "module-torch.distributed.checkpoint.resharding"]], "torch.distributed.checkpoint.state_dict": [[28, "module-torch.distributed.checkpoint.state_dict"]], "torch.distributed.checkpoint.state_dict_loader": [[28, "module-torch.distributed.checkpoint.state_dict_loader"]], "torch.distributed.checkpoint.state_dict_saver": [[28, "module-torch.distributed.checkpoint.state_dict_saver"]], "torch.distributed.checkpoint.stateful": [[28, "module-torch.distributed.checkpoint.stateful"]], "torch.distributed.checkpoint.storage": [[28, "module-torch.distributed.checkpoint.storage"]], "torch.distributed.checkpoint.utils": [[28, "module-torch.distributed.checkpoint.utils"]], "torch.distributed.collective_utils": [[28, "module-torch.distributed.collective_utils"]], "torch.distributed.constants": [[28, "module-torch.distributed.constants"]], "torch.distributed.device_mesh": [[28, "module-torch.distributed.device_mesh"]], "torch.distributed.distributed_c10d": [[28, "module-torch.distributed.distributed_c10d"]], "torch.distributed.elastic": [[28, "module-torch.distributed.elastic"]], "torch.distributed.elastic.agent.server.api": [[28, "module-torch.distributed.elastic.agent.server.api"]], "torch.distributed.elastic.agent.server.local_elastic_agent": [[28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"]], "torch.distributed.elastic.events.api": [[28, "module-torch.distributed.elastic.events.api"]], "torch.distributed.elastic.events.handlers": [[28, "module-torch.distributed.elastic.events.handlers"]], "torch.distributed.elastic.metrics.api": [[28, "module-torch.distributed.elastic.metrics.api"]], "torch.distributed.elastic.multiprocessing.api": [[28, "module-torch.distributed.elastic.multiprocessing.api"]], "torch.distributed.elastic.multiprocessing.errors.error_handler": [[28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"]], "torch.distributed.elastic.multiprocessing.errors.handlers": [[28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"]], "torch.distributed.elastic.multiprocessing.redirects": [[28, "module-torch.distributed.elastic.multiprocessing.redirects"]], "torch.distributed.elastic.multiprocessing.tail_log": [[28, "module-torch.distributed.elastic.multiprocessing.tail_log"]], "torch.distributed.elastic.rendezvous.api": [[28, "module-torch.distributed.elastic.rendezvous.api"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.etcd_server": [[28, "module-torch.distributed.elastic.rendezvous.etcd_server"]], "torch.distributed.elastic.rendezvous.etcd_store": [[28, "module-torch.distributed.elastic.rendezvous.etcd_store"]], "torch.distributed.elastic.rendezvous.static_tcp_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"]], "torch.distributed.elastic.rendezvous.utils": [[28, "module-torch.distributed.elastic.rendezvous.utils"]], "torch.distributed.elastic.timer.api": [[28, "module-torch.distributed.elastic.timer.api"]], "torch.distributed.elastic.timer.file_based_local_timer": [[28, "module-torch.distributed.elastic.timer.file_based_local_timer"]], "torch.distributed.elastic.timer.local_timer": [[28, "module-torch.distributed.elastic.timer.local_timer"]], "torch.distributed.elastic.utils": [[28, "module-torch.distributed.elastic.utils"]], "torch.distributed.elastic.utils.api": [[28, "module-torch.distributed.elastic.utils.api"]], "torch.distributed.elastic.utils.data": [[28, "module-torch.distributed.elastic.utils.data"]], "torch.distributed.elastic.utils.data.cycling_iterator": [[28, "module-torch.distributed.elastic.utils.data.cycling_iterator"]], "torch.distributed.elastic.utils.data.elastic_distributed_sampler": [[28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"]], "torch.distributed.elastic.utils.distributed": [[28, "module-torch.distributed.elastic.utils.distributed"]], "torch.distributed.elastic.utils.log_level": [[28, "module-torch.distributed.elastic.utils.log_level"]], "torch.distributed.elastic.utils.logging": [[28, "module-torch.distributed.elastic.utils.logging"]], "torch.distributed.elastic.utils.store": [[28, "module-torch.distributed.elastic.utils.store"]], "torch.distributed.fsdp.api": [[28, "module-torch.distributed.fsdp.api"]], "torch.distributed.fsdp.fully_sharded_data_parallel": [[28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"]], "torch.distributed.fsdp.sharded_grad_scaler": [[28, "module-torch.distributed.fsdp.sharded_grad_scaler"]], "torch.distributed.fsdp.wrap": [[28, "module-torch.distributed.fsdp.wrap"]], "torch.distributed.launch": [[28, "module-torch.distributed.launch"]], "torch.distributed.launcher": [[28, "module-torch.distributed.launcher"]], "torch.distributed.launcher.api": [[28, "module-torch.distributed.launcher.api"]], "torch.distributed.logging_handlers": [[28, "module-torch.distributed.logging_handlers"]], "torch.distributed.nn": [[28, "module-torch.distributed.nn"]], "torch.distributed.nn.api": [[28, "module-torch.distributed.nn.api"]], "torch.distributed.nn.api.remote_module": [[28, "module-torch.distributed.nn.api.remote_module"]], "torch.distributed.nn.functional": [[28, "module-torch.distributed.nn.functional"]], "torch.distributed.nn.jit": [[28, "module-torch.distributed.nn.jit"]], "torch.distributed.nn.jit.instantiator": [[28, "module-torch.distributed.nn.jit.instantiator"]], "torch.distributed.nn.jit.templates": [[28, "module-torch.distributed.nn.jit.templates"]], "torch.distributed.nn.jit.templates.remote_module_template": [[28, "module-torch.distributed.nn.jit.templates.remote_module_template"]], "torch.distributed.optim.apply_optimizer_in_backward": [[28, "module-torch.distributed.optim.apply_optimizer_in_backward"]], "torch.distributed.optim.functional_adadelta": [[28, "module-torch.distributed.optim.functional_adadelta"]], "torch.distributed.optim.functional_adagrad": [[28, "module-torch.distributed.optim.functional_adagrad"]], "torch.distributed.optim.functional_adam": [[28, "module-torch.distributed.optim.functional_adam"]], "torch.distributed.optim.functional_adamax": [[28, "module-torch.distributed.optim.functional_adamax"]], "torch.distributed.optim.functional_adamw": [[28, "module-torch.distributed.optim.functional_adamw"]], "torch.distributed.optim.functional_rmsprop": [[28, "module-torch.distributed.optim.functional_rmsprop"]], "torch.distributed.optim.functional_rprop": [[28, "module-torch.distributed.optim.functional_rprop"]], "torch.distributed.optim.functional_sgd": [[28, "module-torch.distributed.optim.functional_sgd"]], "torch.distributed.optim.named_optimizer": [[28, "module-torch.distributed.optim.named_optimizer"]], "torch.distributed.optim.optimizer": [[28, "module-torch.distributed.optim.optimizer"]], "torch.distributed.optim.post_localsgd_optimizer": [[28, "module-torch.distributed.optim.post_localSGD_optimizer"]], "torch.distributed.optim.utils": [[28, "module-torch.distributed.optim.utils"]], "torch.distributed.optim.zero_redundancy_optimizer": [[28, "module-torch.distributed.optim.zero_redundancy_optimizer"]], "torch.distributed.remote_device": [[28, "module-torch.distributed.remote_device"]], "torch.distributed.rendezvous": [[28, "module-torch.distributed.rendezvous"]], "torch.distributed.rpc.api": [[28, "module-torch.distributed.rpc.api"]], "torch.distributed.rpc.backend_registry": [[28, "module-torch.distributed.rpc.backend_registry"]], "torch.distributed.rpc.constants": [[28, "module-torch.distributed.rpc.constants"]], "torch.distributed.rpc.functions": [[28, "module-torch.distributed.rpc.functions"]], "torch.distributed.rpc.internal": [[28, "module-torch.distributed.rpc.internal"]], "torch.distributed.rpc.options": [[28, "module-torch.distributed.rpc.options"]], "torch.distributed.rpc.rref_proxy": [[28, "module-torch.distributed.rpc.rref_proxy"]], "torch.distributed.rpc.server_process_global_profiler": [[28, "module-torch.distributed.rpc.server_process_global_profiler"]], "torch.distributed.tensor": [[28, "module-torch.distributed.tensor"]], "torch.distributed.tensor.parallel.api": [[28, "module-torch.distributed.tensor.parallel.api"]], "torch.distributed.tensor.parallel.ddp": [[28, "module-torch.distributed.tensor.parallel.ddp"]], "torch.distributed.tensor.parallel.fsdp": [[28, "module-torch.distributed.tensor.parallel.fsdp"]], "torch.distributed.tensor.parallel.input_reshard": [[28, "module-torch.distributed.tensor.parallel.input_reshard"]], "torch.distributed.tensor.parallel.loss": [[28, "module-torch.distributed.tensor.parallel.loss"]], "torch.distributed.tensor.parallel.style": [[28, "module-torch.distributed.tensor.parallel.style"]], "torch.distributed.utils": [[28, "module-torch.distributed.utils"]], "wait() (in module torch.distributed.store)": [[28, "torch.distributed.Store.wait"]], "join (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Join"]], "joinhook (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.JoinHook"]], "joinable (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Joinable"]], "join_device (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_device"]], "join_hook() (torch.distributed.algorithms.joinable method)": [[29, "torch.distributed.algorithms.Joinable.join_hook"]], "join_process_group (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_process_group"]], "main_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.main_hook"]], "notify_join_context() (torch.distributed.algorithms.join static method)": [[29, "torch.distributed.algorithms.Join.notify_join_context"]], "post_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.post_hook"]], "asyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.AsyncStager"]], "blockingasyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager"]], "broadcastingtorchsavereader (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader"]], "defaultloadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner"]], "defaultsaveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner"]], "dynamicmetaloadplanner (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner"]], "filesystemreader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemReader"]], "filesystemwriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemWriter"]], "loadplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlan"]], "loadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlanner"]], "readitem (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.ReadItem"]], "saveplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlan"]], "saveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlanner"]], "statedictoptions (class in torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.StateDictOptions"]], "stateful (class in torch.distributed.checkpoint.stateful)": [[30, "torch.distributed.checkpoint.stateful.Stateful"]], "storagereader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageReader"]], "storagewriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageWriter"]], "writeitem (class in torch.distributed.checkpoint.planner)": [[30, "torch.distributed.checkpoint.planner.WriteItem"]], "async_save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.async_save"]], "checkpoint_id (torch.distributed.checkpoint.filesystemreader property)": [[30, "torch.distributed.checkpoint.FileSystemReader.checkpoint_id"]], "commit_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.commit_tensor"]], "create_global_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_global_plan"]], "create_global_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_global_plan"]], "create_local_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_local_plan"]], "create_local_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_local_plan"]], "dcp_to_torch_save() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.dcp_to_torch_save"]], "finish() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.finish"]], "finish_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.finish_plan"]], "finish_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.finish_plan"]], "get_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_model_state_dict"]], "get_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_optimizer_state_dict"]], "get_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_state_dict"]], "load() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load"]], "load_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.load_bytes"]], "load_state_dict() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load_state_dict"]], "load_state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.load_state_dict"]], "lookup_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.lookup_object"]], "lookup_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.lookup_tensor"]], "prepare_global_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_global_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_local_plan"]], "read_data() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_data"]], "read_data() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_data"]], "read_metadata() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_metadata"]], "read_metadata() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_metadata"]], "reset() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.reset"]], "reset() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.reset"]], "reset() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.reset"]], "resolve_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_bytes"]], "resolve_data() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.resolve_data"]], "resolve_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_tensor"]], "save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save"]], "save_state_dict() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save_state_dict"]], "set_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_model_state_dict"]], "set_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_optimizer_state_dict"]], "set_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_state_dict"]], "set_up_planner() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.format_utils.dynamicmetaloadplanner method)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner.set_up_planner"]], "set_up_storage_reader() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.set_up_storage_reader"]], "set_up_storage_reader() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.set_up_storage_reader"]], "set_up_storage_writer() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.set_up_storage_writer"]], "should_synchronize_after_execute (torch.distributed.checkpoint.staging.asyncstager property)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.should_synchronize_after_execute"]], "stage() (torch.distributed.checkpoint.filesystemwriter method)": [[30, "torch.distributed.checkpoint.FileSystemWriter.stage"]], "stage() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.stage"]], "stage() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.stage"]], "state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.state_dict"]], "storage_meta() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.storage_meta"]], "synchronize_staging() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.synchronize_staging"]], "synchronize_staging() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.synchronize_staging"]], "tensor_storage_size() (torch.distributed.checkpoint.planner.writeitem method)": [[30, "torch.distributed.checkpoint.planner.WriteItem.tensor_storage_size"]], "torch.distributed.checkpoint": [[30, "module-torch.distributed.checkpoint"]], "torch.distributed.checkpoint.format_utils": [[30, "module-torch.distributed.checkpoint.format_utils"]], "torch.distributed.checkpoint.logger": [[30, "module-torch.distributed.checkpoint.logger"]], "torch.distributed.checkpoint.logging_handlers": [[30, "module-torch.distributed.checkpoint.logging_handlers"]], "torch.distributed.checkpoint.staging": [[30, "module-torch.distributed.checkpoint.staging"]], "torch_save_to_dcp() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.torch_save_to_dcp"]], "transform_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.transform_object"]], "transform_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.transform_tensor"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagereader class method)": [[30, "torch.distributed.checkpoint.StorageReader.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagewriter class method)": [[30, "torch.distributed.checkpoint.StorageWriter.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader class method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.validate_checkpoint_id"]], "write_data() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.write_data"]], "distributedoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.DistributedOptimizer"]], "postlocalsgdoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer"]], "zeroredundancyoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer"]], "add_param_group() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.add_param_group"]], "consolidate_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.consolidate_state_dict"]], "join_device (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_device"]], "join_hook() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_hook"]], "join_process_group (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_process_group"]], "load_state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.load_state_dict"]], "load_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.load_state_dict"]], "state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.state_dict"]], "state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.state_dict"]], "step() (torch.distributed.optim.distributedoptimizer method)": [[32, "torch.distributed.optim.DistributedOptimizer.step"]], "step() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.step"]], "step() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.step"]], "torch.distributed.optim": [[32, "module-torch.distributed.optim"]], "pipe (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.Pipe"]], "pipelineschedulemulti (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti"]], "pipelineschedulesingle (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle"]], "pipelinestage (class in torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.PipelineStage"]], "schedule1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.Schedule1F1B"]], "schedulegpipe (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleGPipe"]], "scheduleinterleaved1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleInterleaved1F1B"]], "scheduleloopedbfs (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleLoopedBFS"]], "splitpoint (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.SplitPoint"]], "tensorchunkspec (class in torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.TensorChunkSpec"]], "build_stage() (in module torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.build_stage"]], "merge_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.merge_chunks"]], "pipe_split() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipe_split"]], "pipeline() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipeline"]], "split_args_kwargs_into_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.split_args_kwargs_into_chunks"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulemulti method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti.step"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulesingle method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle.step"]], "torch.distributed.pipelining": [[33, "module-torch.distributed.pipelining"]], "torch.distributed.pipelining.microbatch": [[33, "module-torch.distributed.pipelining.microbatch"]], "torch.distributed.pipelining.schedules": [[33, "module-torch.distributed.pipelining.schedules"]], "torch.distributed.pipelining.stage": [[33, "module-torch.distributed.pipelining.stage"]], "colwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.ColwiseParallel"]], "preparemoduleinput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleInput"]], "preparemoduleoutput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleOutput"]], "rowwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.RowwiseParallel"]], "sequenceparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.SequenceParallel"]], "loss_parallel() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.loss_parallel"]], "parallelize_module() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.parallelize_module"]], "torch.distributed.tensor.parallel": [[34, "module-torch.distributed.tensor.parallel"]], "abstransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AbsTransform"]], "affinetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AffineTransform"]], "bernoulli (class in torch.distributions.bernoulli)": [[35, "torch.distributions.bernoulli.Bernoulli"]], "beta (class in torch.distributions.beta)": [[35, "torch.distributions.beta.Beta"]], "binomial (class in torch.distributions.binomial)": [[35, "torch.distributions.binomial.Binomial"]], "cattransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CatTransform"]], "categorical (class in torch.distributions.categorical)": [[35, "torch.distributions.categorical.Categorical"]], "cauchy (class in torch.distributions.cauchy)": [[35, "torch.distributions.cauchy.Cauchy"]], "chi2 (class in torch.distributions.chi2)": [[35, "torch.distributions.chi2.Chi2"]], "composetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ComposeTransform"]], "constraint (class in torch.distributions.constraints)": [[35, "torch.distributions.constraints.Constraint"]], "constraintregistry (class in torch.distributions.constraint_registry)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry"]], "continuousbernoulli (class in torch.distributions.continuous_bernoulli)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli"]], "corrcholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CorrCholeskyTransform"]], "cumulativedistributiontransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CumulativeDistributionTransform"]], "dirichlet (class in torch.distributions.dirichlet)": [[35, "torch.distributions.dirichlet.Dirichlet"]], "distribution (class in torch.distributions.distribution)": [[35, "torch.distributions.distribution.Distribution"]], "exptransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ExpTransform"]], "exponential (class in torch.distributions.exponential)": [[35, "torch.distributions.exponential.Exponential"]], "exponentialfamily (class in torch.distributions.exp_family)": [[35, "torch.distributions.exp_family.ExponentialFamily"]], "fishersnedecor (class in torch.distributions.fishersnedecor)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor"]], "gamma (class in torch.distributions.gamma)": [[35, "torch.distributions.gamma.Gamma"]], "geometric (class in torch.distributions.geometric)": [[35, "torch.distributions.geometric.Geometric"]], "gumbel (class in torch.distributions.gumbel)": [[35, "torch.distributions.gumbel.Gumbel"]], "halfcauchy (class in torch.distributions.half_cauchy)": [[35, "torch.distributions.half_cauchy.HalfCauchy"]], "halfnormal (class in torch.distributions.half_normal)": [[35, "torch.distributions.half_normal.HalfNormal"]], "independent (class in torch.distributions.independent)": [[35, "torch.distributions.independent.Independent"]], "independenttransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.IndependentTransform"]], "inversegamma (class in torch.distributions.inverse_gamma)": [[35, "torch.distributions.inverse_gamma.InverseGamma"]], "kumaraswamy (class in torch.distributions.kumaraswamy)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy"]], "lkjcholesky (class in torch.distributions.lkj_cholesky)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky"]], "laplace (class in torch.distributions.laplace)": [[35, "torch.distributions.laplace.Laplace"]], "lognormal (class in torch.distributions.log_normal)": [[35, "torch.distributions.log_normal.LogNormal"]], "logitrelaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli"]], "lowrankmultivariatenormal (class in torch.distributions.lowrank_multivariate_normal)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal"]], "lowercholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.LowerCholeskyTransform"]], "mixturesamefamily (class in torch.distributions.mixture_same_family)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily"]], "multinomial (class in torch.distributions.multinomial)": [[35, "torch.distributions.multinomial.Multinomial"]], "multivariatenormal (class in torch.distributions.multivariate_normal)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal"]], "negativebinomial (class in torch.distributions.negative_binomial)": [[35, "torch.distributions.negative_binomial.NegativeBinomial"]], "normal (class in torch.distributions.normal)": [[35, "torch.distributions.normal.Normal"]], "onehotcategorical (class in torch.distributions.one_hot_categorical)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical"]], "pareto (class in torch.distributions.pareto)": [[35, "torch.distributions.pareto.Pareto"]], "poisson (class in torch.distributions.poisson)": [[35, "torch.distributions.poisson.Poisson"]], "positivedefinitetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PositiveDefiniteTransform"]], "powertransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PowerTransform"]], "relaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli"]], "relaxedonehotcategorical (class in torch.distributions.relaxed_categorical)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical"]], "reshapetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ReshapeTransform"]], "sigmoidtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SigmoidTransform"]], "softmaxtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftmaxTransform"]], "softplustransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftplusTransform"]], "stacktransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StackTransform"]], "stickbreakingtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StickBreakingTransform"]], "studentt (class in torch.distributions.studentt)": [[35, "torch.distributions.studentT.StudentT"]], "tanhtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.TanhTransform"]], "transform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.Transform"]], "transformeddistribution (class in torch.distributions.transformed_distribution)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution"]], "uniform (class in torch.distributions.uniform)": [[35, "torch.distributions.uniform.Uniform"]], "vonmises (class in torch.distributions.von_mises)": [[35, "torch.distributions.von_mises.VonMises"]], "weibull (class in torch.distributions.weibull)": [[35, "torch.distributions.weibull.Weibull"]], "wishart (class in torch.distributions.wishart)": [[35, "torch.distributions.wishart.Wishart"]], "arg_constraints (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.arg_constraints"]], "arg_constraints (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.arg_constraints"]], "arg_constraints (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.arg_constraints"]], "arg_constraints (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.arg_constraints"]], "arg_constraints (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.arg_constraints"]], "arg_constraints (torch.distributions.chi2.chi2 attribute)": [[35, "torch.distributions.chi2.Chi2.arg_constraints"]], "arg_constraints (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.arg_constraints"]], "arg_constraints (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.arg_constraints"]], "arg_constraints (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.arg_constraints"]], "arg_constraints (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.arg_constraints"]], "arg_constraints (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.arg_constraints"]], "arg_constraints (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.arg_constraints"]], "arg_constraints (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.arg_constraints"]], "arg_constraints (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.arg_constraints"]], "arg_constraints (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.arg_constraints"]], "arg_constraints (torch.distributions.independent.independent attribute)": [[35, "torch.distributions.independent.Independent.arg_constraints"]], "arg_constraints (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.arg_constraints"]], "arg_constraints (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.arg_constraints"]], "arg_constraints (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.arg_constraints"]], "arg_constraints (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.arg_constraints"]], "arg_constraints (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.arg_constraints"]], "arg_constraints (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.arg_constraints"]], "arg_constraints (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.arg_constraints"]], "arg_constraints (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.arg_constraints"]], "arg_constraints (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.arg_constraints"]], "arg_constraints (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.pareto.pareto attribute)": [[35, "torch.distributions.pareto.Pareto.arg_constraints"]], "arg_constraints (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.arg_constraints"]], "arg_constraints (torch.distributions.transformed_distribution.transformeddistribution attribute)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.arg_constraints"]], "arg_constraints (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.arg_constraints"]], "arg_constraints (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.arg_constraints"]], "arg_constraints (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.arg_constraints"]], "arg_constraints (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.arg_constraints"]], "batch_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.batch_shape"]], "cat (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.cat"]], "cdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.cdf"]], "cdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.cdf"]], "cdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.cdf"]], "cdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.cdf"]], "cdf() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.cdf"]], "cdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.cdf"]], "cdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.cdf"]], "cdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.cdf"]], "cdf() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.cdf"]], "cdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.cdf"]], "cdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.cdf"]], "cdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.cdf"]], "check() (torch.distributions.constraints.constraint method)": [[35, "torch.distributions.constraints.Constraint.check"]], "component_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.component_distribution"]], "concentration (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.concentration"]], "concentration0 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration0"]], "concentration1 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration1"]], "covariance_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.covariance_matrix"]], "dependent_property (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.dependent_property"]], "df (torch.distributions.chi2.chi2 property)": [[35, "torch.distributions.chi2.Chi2.df"]], "entropy() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.entropy"]], "entropy() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.entropy"]], "entropy() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.entropy"]], "entropy() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.entropy"]], "entropy() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.entropy"]], "entropy() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.entropy"]], "entropy() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.entropy"]], "entropy() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.entropy"]], "entropy() (torch.distributions.exp_family.exponentialfamily method)": [[35, "torch.distributions.exp_family.ExponentialFamily.entropy"]], "entropy() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.entropy"]], "entropy() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.entropy"]], "entropy() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.entropy"]], "entropy() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.entropy"]], "entropy() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.entropy"]], "entropy() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.entropy"]], "entropy() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.entropy"]], "entropy() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.entropy"]], "entropy() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.entropy"]], "entropy() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.entropy"]], "entropy() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.entropy"]], "entropy() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.entropy"]], "entropy() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.entropy"]], "entropy() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.entropy"]], "entropy() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.entropy"]], "entropy() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.entropy"]], "entropy() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.entropy"]], "entropy() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.entropy"]], "entropy() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.entropy"]], "entropy() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.entropy"]], "entropy() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.entropy"]], "enumerate_support() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.enumerate_support"]], "enumerate_support() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.enumerate_support"]], "enumerate_support() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.enumerate_support"]], "enumerate_support() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.enumerate_support"]], "enumerate_support() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.enumerate_support"]], "enumerate_support() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.enumerate_support"]], "event_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.event_shape"]], "expand() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.expand"]], "expand() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.expand"]], "expand() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.expand"]], "expand() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.expand"]], "expand() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.expand"]], "expand() (torch.distributions.chi2.chi2 method)": [[35, "torch.distributions.chi2.Chi2.expand"]], "expand() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.expand"]], "expand() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.expand"]], "expand() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.expand"]], "expand() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.expand"]], "expand() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.expand"]], "expand() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.expand"]], "expand() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.expand"]], "expand() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.expand"]], "expand() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.expand"]], "expand() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.expand"]], "expand() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.expand"]], "expand() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.expand"]], "expand() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.expand"]], "expand() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.expand"]], "expand() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.expand"]], "expand() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.expand"]], "expand() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.expand"]], "expand() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.expand"]], "expand() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.expand"]], "expand() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.expand"]], "expand() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.expand"]], "expand() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.expand"]], "expand() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.expand"]], "expand() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.expand"]], "expand() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.expand"]], "expand() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_bernoulli.relaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_categorical.relaxedonehotcategorical method)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.expand"]], "expand() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.expand"]], "expand() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.expand"]], "expand() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.expand"]], "expand() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.expand"]], "expand() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.expand"]], "expand() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.expand"]], "forward_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.forward_shape"]], "greater_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than"]], "greater_than_eq (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than_eq"]], "half_open_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.half_open_interval"]], "has_enumerate_support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.has_enumerate_support"]], "has_enumerate_support (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.has_enumerate_support"]], "has_enumerate_support (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.has_enumerate_support"]], "has_enumerate_support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_enumerate_support"]], "has_enumerate_support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.has_enumerate_support"]], "has_rsample (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.has_rsample"]], "has_rsample (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.has_rsample"]], "has_rsample (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.has_rsample"]], "has_rsample (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.has_rsample"]], "has_rsample (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.has_rsample"]], "has_rsample (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.has_rsample"]], "has_rsample (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.has_rsample"]], "has_rsample (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.has_rsample"]], "has_rsample (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.has_rsample"]], "has_rsample (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_rsample"]], "has_rsample (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.has_rsample"]], "has_rsample (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.has_rsample"]], "has_rsample (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.has_rsample"]], "has_rsample (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.has_rsample"]], "has_rsample (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.has_rsample"]], "has_rsample (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.has_rsample"]], "has_rsample (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.has_rsample"]], "has_rsample (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.has_rsample"]], "has_rsample (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.has_rsample"]], "has_rsample (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.has_rsample"]], "has_rsample (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.has_rsample"]], "has_rsample (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.has_rsample"]], "has_rsample (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.has_rsample"]], "icdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.icdf"]], "icdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.icdf"]], "icdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.icdf"]], "icdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.icdf"]], "icdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.icdf"]], "icdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.icdf"]], "icdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.icdf"]], "icdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.icdf"]], "icdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.icdf"]], "icdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.icdf"]], "independent (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.independent"]], "integer_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.integer_interval"]], "interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.interval"]], "inv (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.inv"]], "inverse_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.inverse_shape"]], "kl_divergence() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.kl_divergence"]], "less_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.less_than"]], "loc (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.loc"]], "log_abs_det_jacobian() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.log_abs_det_jacobian"]], "log_prob() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.log_prob"]], "log_prob() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.log_prob"]], "log_prob() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.log_prob"]], "log_prob() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.log_prob"]], "log_prob() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.log_prob"]], "log_prob() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.log_prob"]], "log_prob() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.log_prob"]], "log_prob() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.log_prob"]], "log_prob() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.log_prob"]], "log_prob() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.log_prob"]], "log_prob() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.log_prob"]], "log_prob() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.log_prob"]], "log_prob() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.log_prob"]], "log_prob() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.log_prob"]], "log_prob() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.log_prob"]], "log_prob() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.log_prob"]], "log_prob() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.log_prob"]], "log_prob() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.log_prob"]], "log_prob() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.log_prob"]], "log_prob() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.log_prob"]], "log_prob() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.log_prob"]], "log_prob() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.log_prob"]], "log_prob() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.log_prob"]], "log_prob() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.log_prob"]], "log_prob() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.log_prob"]], "log_prob() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.log_prob"]], "log_prob() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.log_prob"]], "log_prob() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.log_prob"]], "log_prob() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.log_prob"]], "log_prob() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.log_prob"]], "log_prob() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.log_prob"]], "log_prob() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.log_prob"]], "logits (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.logits"]], "logits (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.logits"]], "logits (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.logits"]], "logits (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.logits"]], "logits (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.logits"]], "logits (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.logits"]], "logits (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.logits"]], "logits (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.logits"]], "logits (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.logits"]], "mean (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mean"]], "mean (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mean"]], "mean (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mean"]], "mean (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mean"]], "mean (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mean"]], "mean (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.mean"]], "mean (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mean"]], "mean (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mean"]], "mean (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mean"]], "mean (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mean"]], "mean (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mean"]], "mean (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mean"]], "mean (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mean"]], "mean (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mean"]], "mean (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mean"]], "mean (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mean"]], "mean (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mean"]], "mean (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mean"]], "mean (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mean"]], "mean (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mean"]], "mean (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mean"]], "mean (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mean"]], "mean (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.mean"]], "mean (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mean"]], "mean (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mean"]], "mean (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mean"]], "mean (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mean"]], "mean (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mean"]], "mean (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mean"]], "mean (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mean"]], "mean (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mean"]], "mean (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mean"]], "mean (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mean"]], "mean (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mean"]], "mixture_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mixture_distribution"]], "mode (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mode"]], "mode (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mode"]], "mode (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mode"]], "mode (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mode"]], "mode (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mode"]], "mode (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mode"]], "mode (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mode"]], "mode (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mode"]], "mode (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mode"]], "mode (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mode"]], "mode (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mode"]], "mode (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mode"]], "mode (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mode"]], "mode (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mode"]], "mode (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mode"]], "mode (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mode"]], "mode (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mode"]], "mode (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mode"]], "mode (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mode"]], "mode (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mode"]], "mode (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mode"]], "mode (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mode"]], "mode (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mode"]], "mode (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mode"]], "mode (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mode"]], "mode (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mode"]], "mode (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mode"]], "mode (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mode"]], "mode (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mode"]], "mode (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mode"]], "mode (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mode"]], "multinomial (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.multinomial"]], "param_shape (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.param_shape"]], "param_shape (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.param_shape"]], "param_shape (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.param_shape"]], "param_shape (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.param_shape"]], "param_shape (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.param_shape"]], "param_shape (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.param_shape"]], "param_shape (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.param_shape"]], "param_shape (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.param_shape"]], "perplexity() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.perplexity"]], "precision_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.precision_matrix"]], "probs (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.probs"]], "probs (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.probs"]], "probs (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.probs"]], "probs (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.probs"]], "probs (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.probs"]], "probs (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.probs"]], "probs (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.probs"]], "probs (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.probs"]], "probs (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.probs"]], "rate (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.rate"]], "register() (torch.distributions.constraint_registry.constraintregistry method)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry.register"]], "register_kl() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.register_kl"]], "rsample() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.rsample"]], "rsample() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.rsample"]], "rsample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.rsample"]], "rsample() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.rsample"]], "rsample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.rsample"]], "rsample() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.rsample"]], "rsample() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.rsample"]], "rsample() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.rsample"]], "rsample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.rsample"]], "rsample() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.rsample"]], "rsample() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.rsample"]], "rsample() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.rsample"]], "rsample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.rsample"]], "rsample() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.rsample"]], "rsample() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.rsample"]], "rsample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.rsample"]], "rsample() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.rsample"]], "rsample() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.rsample"]], "sample() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.sample"]], "sample() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.sample"]], "sample() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.sample"]], "sample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.sample"]], "sample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample"]], "sample() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.sample"]], "sample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.sample"]], "sample() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.sample"]], "sample() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.sample"]], "sample() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.sample"]], "sample() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.sample"]], "sample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.sample"]], "sample() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.sample"]], "sample() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.sample"]], "sample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.sample"]], "sample() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.sample"]], "sample_n() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample_n"]], "scale (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.scale"]], "scale (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.scale"]], "scale (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.scale"]], "scale_tril (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.scale_tril"]], "set_default_validate_args() (torch.distributions.distribution.distribution static method)": [[35, "torch.distributions.distribution.Distribution.set_default_validate_args"]], "sign (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.sign"]], "stack (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.stack"]], "stddev (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.stddev"]], "stddev (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.stddev"]], "stddev (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.stddev"]], "stddev (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.stddev"]], "stddev (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.stddev"]], "stddev (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.stddev"]], "stddev (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.stddev"]], "support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.support"]], "support (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.support"]], "support (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.support"]], "support (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.support"]], "support (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.support"]], "support (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.support"]], "support (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.support"]], "support (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.support"]], "support (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.support"]], "support (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.support"]], "support (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.support"]], "support (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.support"]], "support (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.support"]], "support (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.support"]], "support (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.support"]], "support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.support"]], "support (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.support"]], "support (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.support"]], "support (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.support"]], "support (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.support"]], "support (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.support"]], "support (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.support"]], "support (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.support"]], "support (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.support"]], "support (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.support"]], "support (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.support"]], "support (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.support"]], "support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.support"]], "support (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.support"]], "support (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.support"]], "support (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.support"]], "support (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.support"]], "support (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.support"]], "support (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.support"]], "support (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.support"]], "support (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.support"]], "support (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.support"]], "support (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.support"]], "support (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.support"]], "temperature (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.temperature"]], "temperature (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.temperature"]], "torch.distributions": [[35, "module-torch.distributions"]], "torch.distributions.bernoulli": [[35, "module-torch.distributions.bernoulli"]], "torch.distributions.beta": [[35, "module-torch.distributions.beta"]], "torch.distributions.binomial": [[35, "module-torch.distributions.binomial"]], "torch.distributions.categorical": [[35, "module-torch.distributions.categorical"]], "torch.distributions.cauchy": [[35, "module-torch.distributions.cauchy"]], "torch.distributions.chi2": [[35, "module-torch.distributions.chi2"]], "torch.distributions.constraint_registry": [[35, "module-torch.distributions.constraint_registry"]], "torch.distributions.constraints": [[35, "module-torch.distributions.constraints"]], "torch.distributions.continuous_bernoulli": [[35, "module-torch.distributions.continuous_bernoulli"]], "torch.distributions.dirichlet": [[35, "module-torch.distributions.dirichlet"]], "torch.distributions.distribution": [[35, "module-torch.distributions.distribution"]], "torch.distributions.exp_family": [[35, "module-torch.distributions.exp_family"]], "torch.distributions.exponential": [[35, "module-torch.distributions.exponential"]], "torch.distributions.fishersnedecor": [[35, "module-torch.distributions.fishersnedecor"]], "torch.distributions.gamma": [[35, "module-torch.distributions.gamma"]], "torch.distributions.geometric": [[35, "module-torch.distributions.geometric"]], "torch.distributions.gumbel": [[35, "module-torch.distributions.gumbel"]], "torch.distributions.half_cauchy": [[35, "module-torch.distributions.half_cauchy"]], "torch.distributions.half_normal": [[35, "module-torch.distributions.half_normal"]], "torch.distributions.independent": [[35, "module-torch.distributions.independent"]], "torch.distributions.inverse_gamma": [[35, "module-torch.distributions.inverse_gamma"]], "torch.distributions.kl": [[35, "module-torch.distributions.kl"]], "torch.distributions.kumaraswamy": [[35, "module-torch.distributions.kumaraswamy"]], "torch.distributions.laplace": [[35, "module-torch.distributions.laplace"]], "torch.distributions.lkj_cholesky": [[35, "module-torch.distributions.lkj_cholesky"]], "torch.distributions.log_normal": [[35, "module-torch.distributions.log_normal"]], "torch.distributions.logistic_normal": [[35, "module-torch.distributions.logistic_normal"]], "torch.distributions.lowrank_multivariate_normal": [[35, "module-torch.distributions.lowrank_multivariate_normal"]], "torch.distributions.mixture_same_family": [[35, "module-torch.distributions.mixture_same_family"]], "torch.distributions.multinomial": [[35, "module-torch.distributions.multinomial"]], "torch.distributions.multivariate_normal": [[35, "module-torch.distributions.multivariate_normal"]], "torch.distributions.negative_binomial": [[35, "module-torch.distributions.negative_binomial"]], "torch.distributions.normal": [[35, "module-torch.distributions.normal"]], "torch.distributions.one_hot_categorical": [[35, "module-torch.distributions.one_hot_categorical"]], "torch.distributions.pareto": [[35, "module-torch.distributions.pareto"]], "torch.distributions.poisson": [[35, "module-torch.distributions.poisson"]], "torch.distributions.relaxed_bernoulli": [[35, "module-torch.distributions.relaxed_bernoulli"]], "torch.distributions.relaxed_categorical": [[35, "module-torch.distributions.relaxed_categorical"]], "torch.distributions.studentt": [[35, "module-torch.distributions.studentT"]], "torch.distributions.transformed_distribution": [[35, "module-torch.distributions.transformed_distribution"]], "torch.distributions.transforms": [[35, "module-torch.distributions.transforms"]], "torch.distributions.uniform": [[35, "module-torch.distributions.uniform"]], "torch.distributions.utils": [[35, "module-torch.distributions.utils"]], "torch.distributions.von_mises": [[35, "module-torch.distributions.von_mises"]], "torch.distributions.weibull": [[35, "module-torch.distributions.weibull"]], "torch.distributions.wishart": [[35, "module-torch.distributions.wishart"]], "total_count (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.total_count"]], "variance (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.variance"]], "variance (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.variance"]], "variance (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.variance"]], "variance (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.variance"]], "variance (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.variance"]], "variance (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.variance"]], "variance (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.variance"]], "variance (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.variance"]], "variance (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.variance"]], "variance (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.variance"]], "variance (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.variance"]], "variance (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.variance"]], "variance (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.variance"]], "variance (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.variance"]], "variance (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.variance"]], "variance (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.variance"]], "variance (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.variance"]], "variance (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.variance"]], "variance (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.variance"]], "variance (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.variance"]], "variance (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.variance"]], "variance (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.variance"]], "variance (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.variance"]], "variance (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.variance"]], "variance (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.variance"]], "variance (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.variance"]], "variance (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.variance"]], "variance (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.variance"]], "variance (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.variance"]], "variance (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.variance"]], "variance (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.variance"]], "variance (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.variance"]], "variance (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.variance"]], "variance (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.variance"]], "from_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.from_dlpack"]], "to_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.to_dlpack"]], "elasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent"]], "healthcheckserver (class in torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer"]], "localelasticagent (class in torch.distributed.elastic.agent.server.local_elastic_agent)": [[37, "torch.distributed.elastic.agent.server.local_elastic_agent.LocalElasticAgent"]], "runresult (class in torch.distributed.elastic.agent.server.api)": [[37, "torch.distributed.elastic.agent.server.api.RunResult"]], "simpleelasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent"]], "worker (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.Worker"]], "workergroup (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerGroup"]], "workerspec (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec"]], "workerstate (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerState"]], "_assign_worker_ranks() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._assign_worker_ranks"]], "_exit_barrier() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._exit_barrier"]], "_initialize_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._initialize_workers"]], "_monitor_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._monitor_workers"]], "_rendezvous() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._rendezvous"]], "_restart_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._restart_workers"]], "_shutdown() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._shutdown"]], "_start_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._start_workers"]], "_stop_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._stop_workers"]], "create_healthcheck_server() (in module torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.create_healthcheck_server"]], "get_entrypoint_name() (torch.distributed.elastic.agent.server.workerspec method)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec.get_entrypoint_name"]], "get_worker_group() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.get_worker_group"]], "is_running() (torch.distributed.elastic.agent.server.workerstate static method)": [[37, "torch.distributed.elastic.agent.server.WorkerState.is_running"]], "run() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.run"]], "start() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.start"]], "stop() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.stop"]], "torch.distributed.elastic.agent": [[37, "module-torch.distributed.elastic.agent"]], "torch.distributed.elastic.agent.server": [[37, "module-torch.distributed.elastic.agent.server"]], "torch.distributed.elastic.agent.server.health_check_server": [[37, "module-torch.distributed.elastic.agent.server.health_check_server"]], "torch.distributed.elastic.control_plane": [[38, "module-torch.distributed.elastic.control_plane"]], "worker_main() (in module torch.distributed.elastic.control_plane)": [[38, "torch.distributed.elastic.control_plane.worker_main"]], "childfailederror (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ChildFailedError"]], "errorhandler (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ErrorHandler"]], "processfailure (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ProcessFailure"]], "record() (in module torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.record"]], "torch.distributed.elastic.multiprocessing.errors": [[40, "module-torch.distributed.elastic.multiprocessing.errors"]], "event (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.Event"]], "eventmetadatavalue (in module torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventMetadataValue"]], "eventsource (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventSource"]], "get_logging_handler() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.get_logging_handler"]], "record() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.record"]], "torch.distributed.elastic.events": [[41, "module-torch.distributed.elastic.events"]], "consolemetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.ConsoleMetricHandler"]], "metrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.MetricHandler"]], "nullmetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.NullMetricHandler"]], "configure() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.configure"]], "prof() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.prof"]], "put_metric() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.put_metric"]], "torch.distributed.elastic.metrics": [[44, "module-torch.distributed.elastic.metrics"]], "defaultlogsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs"]], "logsdest (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsDest"]], "logsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs"]], "multiprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.MultiprocessContext"]], "pcontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.PContext"]], "runprocsresult (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.RunProcsResult"]], "subprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.SubprocessContext"]], "reify() (torch.distributed.elastic.multiprocessing.api.defaultlogsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs.reify"]], "reify() (torch.distributed.elastic.multiprocessing.api.logsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs.reify"]], "start_processes() (in module torch.distributed.elastic.multiprocessing)": [[45, "torch.distributed.elastic.multiprocessing.start_processes"]], "torch.distributed.elastic.multiprocessing": [[45, "module-torch.distributed.elastic.multiprocessing"]], "c10drendezvousbackend (class in torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend"]], "dynamicrendezvoushandler (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler"]], "etcdrendezvousbackend (class in torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend"]], "etcdrendezvoushandler (class in torch.distributed.elastic.rendezvous.etcd_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous.EtcdRendezvousHandler"]], "etcdserver (class in torch.distributed.elastic.rendezvous.etcd_server)": [[47, "torch.distributed.elastic.rendezvous.etcd_server.EtcdServer"]], "etcdstore (class in torch.distributed.elastic.rendezvous.etcd_store)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore"]], "rendezvousbackend (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend"]], "rendezvousclosederror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousClosedError"]], "rendezvousconnectionerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousConnectionError"]], "rendezvouserror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousError"]], "rendezvousgracefulexiterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousGracefulExitError"]], "rendezvoushandler (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler"]], "rendezvoushandlerregistry (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandlerRegistry"]], "rendezvousinfo (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousInfo"]], "rendezvousparameters (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters"]], "rendezvousstateerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStateError"]], "rendezvousstoreinfo (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo"]], "rendezvoustimeout (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout"]], "rendezvoustimeouterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousTimeoutError"]], "add() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.add"]], "build() (torch.distributed.elastic.rendezvous.api.rendezvousstoreinfo static method)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo.build"]], "check() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.check"]], "close (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.close"]], "create_backend() (in module torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.create_backend"]], "create_backend() (in module torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.create_backend"]], "create_handler() (in module torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.create_handler"]], "from_backend() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.dynamicrendezvoushandler class method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler.from_backend"]], "get() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get"]], "get() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.get"]], "get_as_bool() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_bool"]], "get_as_int() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_int"]], "get_backend() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_backend"]], "get_run_id() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_run_id"]], "get_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.get_state"]], "heartbeat (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.heartbeat"]], "is_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.is_closed"]], "join (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.join"]], "last_call (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.last_call"]], "name (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.name"]], "next_rendezvous() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.next_rendezvous"]], "num_nodes_waiting() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.num_nodes_waiting"]], "set() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.set"]], "set_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.set_closed"]], "set_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.set_state"]], "shutdown() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.shutdown"]], "torch.distributed.elastic.rendezvous": [[47, "module-torch.distributed.elastic.rendezvous"]], "torch.distributed.elastic.rendezvous.registry": [[47, "module-torch.distributed.elastic.rendezvous.registry"]], "use_agent_store (torch.distributed.elastic.rendezvous.rendezvoushandler property)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.use_agent_store"]], "wait() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.wait"]], "torch.distributed.run": [[48, "module-torch.distributed.run"]], "subprocesshandler (class in torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler.SubprocessHandler"]], "get_subprocess_handler() (in module torch.distributed.elastic.multiprocessing.subprocess_handler.handlers)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers.get_subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"]], "filetimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerClient"]], "filetimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerServer"]], "localtimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerClient"]], "localtimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerServer"]], "timerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerClient"]], "timerrequest (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerRequest"]], "timerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerServer"]], "acquire() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.acquire"]], "clear_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.clear_timers"]], "configure() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.configure"]], "expires() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.expires"]], "get_expired_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.get_expired_timers"]], "log_debug_info_for_expired_timers() (in module torch.distributed.elastic.timer.debug_info_logging)": [[50, "torch.distributed.elastic.timer.debug_info_logging.log_debug_info_for_expired_timers"]], "register_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.register_timers"]], "release() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.release"]], "torch.distributed.elastic.timer": [[50, "module-torch.distributed.elastic.timer"]], "torch.distributed.elastic.timer.debug_info_logging": [[50, "module-torch.distributed.elastic.timer.debug_info_logging"]], "constraint (in module torch.export)": [[52, "torch.export.Constraint"]], "customobjargument (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.CustomObjArgument"]], "dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.Dim"]], "exportbackwardsignature (class in torch.export)": [[52, "torch.export.ExportBackwardSignature"]], "exportgraphsignature (class in torch.export)": [[52, "torch.export.ExportGraphSignature"]], "exportgraphsignature (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.ExportGraphSignature"]], "exportedprogram (class in torch.export)": [[52, "torch.export.ExportedProgram"]], "flatargsadapter (class in torch.export.unflatten)": [[52, "torch.export.unflatten.FlatArgsAdapter"]], "inputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputKind"]], "inputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputSpec"]], "interpretermodule (class in torch.export.unflatten)": [[52, "torch.export.unflatten.InterpreterModule"]], "modulecallentry (class in torch.export)": [[52, "torch.export.ModuleCallEntry"]], "modulecallsignature (class in torch.export)": [[52, "torch.export.ModuleCallSignature"]], "outputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputKind"]], "outputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputSpec"]], "shapescollection (class in torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.ShapesCollection"]], "adapt() (torch.export.unflatten.flatargsadapter method)": [[52, "torch.export.unflatten.FlatArgsAdapter.adapt"]], "buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.buffers"]], "dims() (in module torch.export)": [[52, "torch.export.dims"]], "dynamic_dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.dynamic_dim"]], "dynamic_shapes() (torch.export.dynamic_shapes.shapescollection method)": [[52, "torch.export.dynamic_shapes.ShapesCollection.dynamic_shapes"]], "export() (in module torch.export)": [[52, "torch.export.export"]], "get_replace_hook() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.get_replace_hook"]], "load() (in module torch.export)": [[52, "torch.export.load"]], "module() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.module"]], "named_buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_buffers"]], "named_parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_parameters"]], "parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.parameters"]], "refine_dynamic_shapes_from_suggested_fixes() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.refine_dynamic_shapes_from_suggested_fixes"]], "register_dataclass() (in module torch.export)": [[52, "torch.export.register_dataclass"]], "replace_all_uses() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.replace_all_uses"]], "run_decompositions() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.run_decompositions"]], "save() (in module torch.export)": [[52, "torch.export.save"]], "torch.export": [[52, "module-torch.export"]], "torch.export.custom_obj": [[52, "module-torch.export.custom_obj"]], "torch.export.dynamic_shapes": [[52, "module-torch.export.dynamic_shapes"]], "torch.export.exported_program": [[52, "module-torch.export.exported_program"]], "torch.export.graph_signature": [[52, "module-torch.export.graph_signature"]], "torch.export.unflatten": [[52, "module-torch.export.unflatten"]], "unflatten() (in module torch.export.unflatten)": [[52, "torch.export.unflatten.unflatten"]], "torch.fft": [[54, "module-torch.fft"]], "backwardprefetch (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.BackwardPrefetch"]], "cpuoffload (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.CPUOffload"]], "fulloptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullOptimStateDictConfig"]], "fullstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullStateDictConfig"]], "fullyshardeddataparallel (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel"]], "localoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalOptimStateDictConfig"]], "localstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalStateDictConfig"]], "mixedprecision (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.MixedPrecision"]], "optimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.OptimStateDictConfig"]], "shardedoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedOptimStateDictConfig"]], "shardedstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedStateDictConfig"]], "shardingstrategy (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardingStrategy"]], "statedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictConfig"]], "statedictsettings (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictSettings"]], "apply() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.apply"]], "check_is_root() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.check_is_root"]], "clip_grad_norm_() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.clip_grad_norm_"]], "flatten_sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.flatten_sharded_optim_state_dict"]], "forward() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.forward"]], "fsdp_modules() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.fsdp_modules"]], "full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.full_optim_state_dict"]], "get_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.get_state_dict_type"]], "module (torch.distributed.fsdp.fullyshardeddataparallel property)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.module"]], "named_buffers() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_buffers"]], "named_parameters() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_parameters"]], "no_sync() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.no_sync"]], "optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict"]], "optim_state_dict_to_load() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict_to_load"]], "register_comm_hook() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.register_comm_hook"]], "rekey_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.rekey_optim_state_dict"]], "scatter_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.scatter_full_optim_state_dict"]], "set_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.set_state_dict_type"]], "shard_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.shard_full_optim_state_dict"]], "sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.sharded_optim_state_dict"]], "state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.state_dict_type"]], "summon_full_params() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.summon_full_params"]], "torch.distributed.fsdp": [[55, "module-torch.distributed.fsdp"]], "torch.func": [[57, "module-torch.func"]], "get_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_overwrite_module_params_on_conversion"]], "get_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_swap_module_params_on_conversion"]], "set_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_overwrite_module_params_on_conversion"]], "set_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_swap_module_params_on_conversion"]], "torch.__future__": [[62, "module-torch.__future__"]], "future (class in torch.futures)": [[63, "torch.futures.Future"]], "add_done_callback() (torch.futures.future method)": [[63, "torch.futures.Future.add_done_callback"]], "collect_all() (in module torch.futures)": [[63, "torch.futures.collect_all"]], "done() (torch.futures.future method)": [[63, "torch.futures.Future.done"]], "set_exception() (torch.futures.future method)": [[63, "torch.futures.Future.set_exception"]], "set_result() (torch.futures.future method)": [[63, "torch.futures.Future.set_result"]], "then() (torch.futures.future method)": [[63, "torch.futures.Future.then"]], "torch.futures": [[63, "module-torch.futures"]], "value() (torch.futures.future method)": [[63, "torch.futures.Future.value"]], "wait() (torch.futures.future method)": [[63, "torch.futures.Future.wait"]], "wait_all() (in module torch.futures)": [[63, "torch.futures.wait_all"]], "graph (class in torch.fx)": [[64, "torch.fx.Graph"]], "graphmodule (class in torch.fx)": [[64, "torch.fx.GraphModule"]], "interpreter (class in torch.fx)": [[64, "torch.fx.Interpreter"]], "node (class in torch.fx)": [[64, "torch.fx.Node"]], "proxy (class in torch.fx)": [[64, "torch.fx.Proxy"]], "tracer (class in torch.fx)": [[64, "torch.fx.Tracer"]], "transformer (class in torch.fx)": [[64, "torch.fx.Transformer"]], "__init__() (torch.fx.graph method)": [[64, "torch.fx.Graph.__init__"]], "__init__() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.__init__"]], "add_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.add_submodule"]], "all_input_nodes (torch.fx.node property)": [[64, "torch.fx.Node.all_input_nodes"]], "append() (torch.fx.node method)": [[64, "torch.fx.Node.append"]], "args (torch.fx.node property)": [[64, "torch.fx.Node.args"]], "boxed_run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.boxed_run"]], "call_function() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_function"]], "call_function() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_function"]], "call_function() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_function"]], "call_method() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_method"]], "call_method() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_method"]], "call_module() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_module"]], "call_module() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_module"]], "call_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.call_module"]], "call_module() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_module"]], "code (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.code"]], "create_arg() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_arg"]], "create_args_for_root() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_args_for_root"]], "create_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.create_node"]], "create_node() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_node"]], "create_proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_proxy"]], "delete_all_unused_submodules() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_all_unused_submodules"]], "delete_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_submodule"]], "eliminate_dead_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.eliminate_dead_code"]], "erase_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.erase_node"]], "fetch_args_kwargs_from_env() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_attr"]], "find_nodes() (torch.fx.graph method)": [[64, "torch.fx.Graph.find_nodes"]], "format_node() (torch.fx.node method)": [[64, "torch.fx.Node.format_node"]], "get_attr() (torch.fx.graph method)": [[64, "torch.fx.Graph.get_attr"]], "get_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.get_attr"]], "get_attr() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.get_attr"]], "get_fresh_qualname() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.get_fresh_qualname"]], "getattr() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.getattr"]], "graph (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.graph"]], "graph_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.graph_copy"]], "insert_arg() (torch.fx.node method)": [[64, "torch.fx.Node.insert_arg"]], "inserting_after() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_after"]], "inserting_before() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_before"]], "is_impure() (torch.fx.node method)": [[64, "torch.fx.Node.is_impure"]], "is_leaf_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.is_leaf_module"]], "iter() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.iter"]], "keys() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.keys"]], "kwargs (torch.fx.node property)": [[64, "torch.fx.Node.kwargs"]], "lint() (torch.fx.graph method)": [[64, "torch.fx.Graph.lint"]], "map_nodes_to_values() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.map_nodes_to_values"]], "next (torch.fx.node property)": [[64, "torch.fx.Node.next"]], "node_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.node_copy"]], "nodes (torch.fx.graph property)": [[64, "torch.fx.Graph.nodes"]], "normalized_arguments() (torch.fx.node method)": [[64, "torch.fx.Node.normalized_arguments"]], "on_generate_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.on_generate_code"]], "output() (torch.fx.graph method)": [[64, "torch.fx.Graph.output"]], "output() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.output"]], "path_of_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.path_of_module"]], "placeholder() (torch.fx.graph method)": [[64, "torch.fx.Graph.placeholder"]], "placeholder() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.placeholder"]], "placeholder() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.placeholder"]], "prepend() (torch.fx.node method)": [[64, "torch.fx.Node.prepend"]], "prev (torch.fx.node property)": [[64, "torch.fx.Node.prev"]], "print_readable() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.print_readable"]], "print_tabular() (torch.fx.graph method)": [[64, "torch.fx.Graph.print_tabular"]], "process_inputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_inputs"]], "process_outputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_outputs"]], "proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.proxy"]], "python_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.python_code"]], "recompile() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.recompile"]], "replace_all_uses_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_all_uses_with"]], "replace_input_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_input_with"]], "replace_pattern() (in module torch.fx)": [[64, "torch.fx.replace_pattern"]], "run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run"]], "run_node() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run_node"]], "set_codegen() (torch.fx.graph method)": [[64, "torch.fx.Graph.set_codegen"]], "stack_trace (torch.fx.node property)": [[64, "torch.fx.Node.stack_trace"]], "symbolic_trace() (in module torch.fx)": [[64, "torch.fx.symbolic_trace"]], "to_bool() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.to_bool"]], "to_folder() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.to_folder"]], "torch.fx": [[64, "module-torch.fx"]], "torch.fx.annotate": [[64, "module-torch.fx.annotate"]], "torch.fx.config": [[64, "module-torch.fx.config"]], "torch.fx.experimental": [[64, "module-torch.fx.experimental"]], "torch.fx.experimental.accelerator_partitioner": [[64, "module-torch.fx.experimental.accelerator_partitioner"]], "torch.fx.experimental.const_fold": [[64, "module-torch.fx.experimental.const_fold"]], "torch.fx.experimental.debug": [[64, "module-torch.fx.experimental.debug"]], "torch.fx.experimental.graph_gradual_typechecker": [[64, "module-torch.fx.experimental.graph_gradual_typechecker"]], "torch.fx.experimental.merge_matmul": [[64, "module-torch.fx.experimental.merge_matmul"]], "torch.fx.experimental.meta_tracer": [[64, "module-torch.fx.experimental.meta_tracer"]], "torch.fx.experimental.migrate_gradual_types": [[64, "module-torch.fx.experimental.migrate_gradual_types"]], "torch.fx.experimental.migrate_gradual_types.constraint": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint"]], "torch.fx.experimental.migrate_gradual_types.constraint_generator": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"]], "torch.fx.experimental.migrate_gradual_types.constraint_transformation": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"]], "torch.fx.experimental.migrate_gradual_types.operation": [[64, "module-torch.fx.experimental.migrate_gradual_types.operation"]], "torch.fx.experimental.migrate_gradual_types.transform_to_z3": [[64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"]], "torch.fx.experimental.migrate_gradual_types.util": [[64, "module-torch.fx.experimental.migrate_gradual_types.util"]], "torch.fx.experimental.migrate_gradual_types.z3_types": [[64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"]], "torch.fx.experimental.normalize": [[64, "module-torch.fx.experimental.normalize"]], "torch.fx.experimental.optimization": [[64, "module-torch.fx.experimental.optimization"]], "torch.fx.experimental.partitioner_utils": [[64, "module-torch.fx.experimental.partitioner_utils"]], "torch.fx.experimental.proxy_tensor": [[64, "module-torch.fx.experimental.proxy_tensor"]], "torch.fx.experimental.recording": [[64, "module-torch.fx.experimental.recording"]], "torch.fx.experimental.refinement_types": [[64, "module-torch.fx.experimental.refinement_types"]], "torch.fx.experimental.rewriter": [[64, "module-torch.fx.experimental.rewriter"]], "torch.fx.experimental.schema_type_annotation": [[64, "module-torch.fx.experimental.schema_type_annotation"]], "torch.fx.experimental.sym_node": [[64, "module-torch.fx.experimental.sym_node"]], "torch.fx.experimental.unification": [[64, "module-torch.fx.experimental.unification"]], "torch.fx.experimental.unification.core": [[64, "module-torch.fx.experimental.unification.core"]], "torch.fx.experimental.unification.dispatch": [[64, "module-torch.fx.experimental.unification.dispatch"]], "torch.fx.experimental.unification.match": [[64, "module-torch.fx.experimental.unification.match"]], "torch.fx.experimental.unification.more": [[64, "module-torch.fx.experimental.unification.more"]], "torch.fx.experimental.unification.multipledispatch": [[64, "module-torch.fx.experimental.unification.multipledispatch"]], "torch.fx.experimental.unification.multipledispatch.conflict": [[64, "module-torch.fx.experimental.unification.multipledispatch.conflict"]], "torch.fx.experimental.unification.multipledispatch.core": [[64, "module-torch.fx.experimental.unification.multipledispatch.core"]], "torch.fx.experimental.unification.multipledispatch.dispatcher": [[64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"]], "torch.fx.experimental.unification.multipledispatch.utils": [[64, "module-torch.fx.experimental.unification.multipledispatch.utils"]], "torch.fx.experimental.unification.multipledispatch.variadic": [[64, "module-torch.fx.experimental.unification.multipledispatch.variadic"]], "torch.fx.experimental.unification.unification_tools": [[64, "module-torch.fx.experimental.unification.unification_tools"]], "torch.fx.experimental.unification.utils": [[64, "module-torch.fx.experimental.unification.utils"]], "torch.fx.experimental.unification.variable": [[64, "module-torch.fx.experimental.unification.variable"]], "torch.fx.experimental.unify_refinements": [[64, "module-torch.fx.experimental.unify_refinements"]], "torch.fx.experimental.validator": [[64, "module-torch.fx.experimental.validator"]], "torch.fx.graph": [[64, "module-torch.fx.graph"]], "torch.fx.graph_module": [[64, "module-torch.fx.graph_module"]], "torch.fx.immutable_collections": [[64, "module-torch.fx.immutable_collections"]], "torch.fx.interpreter": [[64, "module-torch.fx.interpreter"]], "torch.fx.node": [[64, "module-torch.fx.node"]], "torch.fx.operator_schemas": [[64, "module-torch.fx.operator_schemas"]], "torch.fx.passes": [[64, "module-torch.fx.passes"]], "torch.fx.passes.annotate_getitem_nodes": [[64, "module-torch.fx.passes.annotate_getitem_nodes"]], "torch.fx.passes.backends": [[64, "module-torch.fx.passes.backends"]], "torch.fx.passes.backends.cudagraphs": [[64, "module-torch.fx.passes.backends.cudagraphs"]], "torch.fx.passes.dialect": [[64, "module-torch.fx.passes.dialect"]], "torch.fx.passes.dialect.common": [[64, "module-torch.fx.passes.dialect.common"]], "torch.fx.passes.dialect.common.cse_pass": [[64, "module-torch.fx.passes.dialect.common.cse_pass"]], "torch.fx.passes.fake_tensor_prop": [[64, "module-torch.fx.passes.fake_tensor_prop"]], "torch.fx.passes.graph_drawer": [[64, "module-torch.fx.passes.graph_drawer"]], "torch.fx.passes.graph_manipulation": [[64, "module-torch.fx.passes.graph_manipulation"]], "torch.fx.passes.graph_transform_observer": [[64, "module-torch.fx.passes.graph_transform_observer"]], "torch.fx.passes.infra": [[64, "module-torch.fx.passes.infra"]], "torch.fx.passes.infra.partitioner": [[64, "module-torch.fx.passes.infra.partitioner"]], "torch.fx.passes.infra.pass_base": [[64, "module-torch.fx.passes.infra.pass_base"]], "torch.fx.passes.infra.pass_manager": [[64, "module-torch.fx.passes.infra.pass_manager"]], "torch.fx.passes.net_min_base": [[64, "module-torch.fx.passes.net_min_base"]], "torch.fx.passes.operator_support": [[64, "module-torch.fx.passes.operator_support"]], "torch.fx.passes.param_fetch": [[64, "module-torch.fx.passes.param_fetch"]], "torch.fx.passes.pass_manager": [[64, "module-torch.fx.passes.pass_manager"]], "torch.fx.passes.reinplace": [[64, "module-torch.fx.passes.reinplace"]], "torch.fx.passes.runtime_assert": [[64, "module-torch.fx.passes.runtime_assert"]], "torch.fx.passes.shape_prop": [[64, "module-torch.fx.passes.shape_prop"]], "torch.fx.passes.split_module": [[64, "module-torch.fx.passes.split_module"]], "torch.fx.passes.split_utils": [[64, "module-torch.fx.passes.split_utils"]], "torch.fx.passes.splitter_base": [[64, "module-torch.fx.passes.splitter_base"]], "torch.fx.passes.tests": [[64, "module-torch.fx.passes.tests"]], "torch.fx.passes.tests.test_pass_manager": [[64, "module-torch.fx.passes.tests.test_pass_manager"]], "torch.fx.passes.tools_common": [[64, "module-torch.fx.passes.tools_common"]], "torch.fx.passes.utils": [[64, "module-torch.fx.passes.utils"]], "torch.fx.passes.utils.common": [[64, "module-torch.fx.passes.utils.common"]], "torch.fx.passes.utils.fuser_utils": [[64, "module-torch.fx.passes.utils.fuser_utils"]], "torch.fx.passes.utils.matcher_utils": [[64, "module-torch.fx.passes.utils.matcher_utils"]], "torch.fx.passes.utils.matcher_with_name_node_map_utils": [[64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"]], "torch.fx.passes.utils.source_matcher_utils": [[64, "module-torch.fx.passes.utils.source_matcher_utils"]], "torch.fx.proxy": [[64, "module-torch.fx.proxy"]], "torch.fx.subgraph_rewriter": [[64, "module-torch.fx.subgraph_rewriter"]], "torch.fx.tensor_type": [[64, "module-torch.fx.tensor_type"]], "torch.fx.traceback": [[64, "module-torch.fx.traceback"]], "trace() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.trace"]], "transform() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.transform"]], "update_arg() (torch.fx.node method)": [[64, "torch.fx.Node.update_arg"]], "update_kwarg() (torch.fx.node method)": [[64, "torch.fx.Node.update_kwarg"]], "wrap() (in module torch.fx)": [[64, "torch.fx.wrap"]], "torch.fx.experimental.symbolic_shapes": [[65, "module-torch.fx.experimental.symbolic_shapes"]], "generator (class in torch)": [[90, "torch.Generator"]], "clone_state() (torch.generator method)": [[90, "torch.Generator.clone_state"]], "device (torch.generator attribute)": [[90, "torch.Generator.device"]], "get_state() (torch.generator method)": [[90, "torch.Generator.get_state"]], "graphsafe_get_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_get_state"]], "graphsafe_set_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_set_state"]], "initial_seed() (torch.generator method)": [[90, "torch.Generator.initial_seed"]], "manual_seed() (torch.generator method)": [[90, "torch.Generator.manual_seed"]], "seed() (torch.generator method)": [[90, "torch.Generator.seed"]], "set_state() (torch.generator method)": [[90, "torch.Generator.set_state"]], "abs() (torch.tensor method)": [[91, "torch.Tensor.abs"]], "abs_() (torch.tensor method)": [[92, "torch.Tensor.abs_"]], "absolute() (torch.tensor method)": [[93, "torch.Tensor.absolute"]], "absolute_() (torch.tensor method)": [[94, "torch.Tensor.absolute_"]], "acos() (torch.tensor method)": [[95, "torch.Tensor.acos"]], "acos_() (torch.tensor method)": [[96, "torch.Tensor.acos_"]], "acosh() (torch.tensor method)": [[97, "torch.Tensor.acosh"]], "acosh_() (torch.tensor method)": [[98, "torch.Tensor.acosh_"]], "add() (torch.tensor method)": [[99, "torch.Tensor.add"]], "add_() (torch.tensor method)": [[100, "torch.Tensor.add_"]], "addbmm() (torch.tensor method)": [[101, "torch.Tensor.addbmm"]], "addbmm_() (torch.tensor method)": [[102, "torch.Tensor.addbmm_"]], "addcdiv() (torch.tensor method)": [[103, "torch.Tensor.addcdiv"]], "addcdiv_() (torch.tensor method)": [[104, "torch.Tensor.addcdiv_"]], "addcmul() (torch.tensor method)": [[105, "torch.Tensor.addcmul"]], "addcmul_() (torch.tensor method)": [[106, "torch.Tensor.addcmul_"]], "addmm() (torch.tensor method)": [[107, "torch.Tensor.addmm"]], "addmm_() (torch.tensor method)": [[108, "torch.Tensor.addmm_"]], "addmv() (torch.tensor method)": [[109, "torch.Tensor.addmv"]], "addmv_() (torch.tensor method)": [[110, "torch.Tensor.addmv_"]], "addr() (torch.tensor method)": [[111, "torch.Tensor.addr"]], "addr_() (torch.tensor method)": [[112, "torch.Tensor.addr_"]], "adjoint() (torch.tensor method)": [[113, "torch.Tensor.adjoint"]], "all() (torch.tensor method)": [[114, "torch.Tensor.all"]], "allclose() (torch.tensor method)": [[115, "torch.Tensor.allclose"]], "amax() (torch.tensor method)": [[116, "torch.Tensor.amax"]], "amin() (torch.tensor method)": [[117, "torch.Tensor.amin"]], "aminmax() (torch.tensor method)": [[118, "torch.Tensor.aminmax"]], "angle() (torch.tensor method)": [[119, "torch.Tensor.angle"]], "any() (torch.tensor method)": [[120, "torch.Tensor.any"]], "apply_() (torch.tensor method)": [[121, "torch.Tensor.apply_"]], "arccos() (torch.tensor method)": [[122, "torch.Tensor.arccos"]], "arccos_() (torch.tensor method)": [[123, "torch.Tensor.arccos_"]], "arccosh() (torch.tensor method)": [[124, "torch.Tensor.arccosh"]], "arccosh_() (torch.tensor method)": [[125, "torch.Tensor.arccosh_"]], "arcsin() (torch.tensor method)": [[126, "torch.Tensor.arcsin"]], "arcsin_() (torch.tensor method)": [[127, "torch.Tensor.arcsin_"]], "arcsinh() (torch.tensor method)": [[128, "torch.Tensor.arcsinh"]], "arcsinh_() (torch.tensor method)": [[129, "torch.Tensor.arcsinh_"]], "arctan() (torch.tensor method)": [[130, "torch.Tensor.arctan"]], "arctan2() (torch.tensor method)": [[131, "torch.Tensor.arctan2"]], "arctan2_() (torch.tensor method)": [[132, "torch.Tensor.arctan2_"]], "arctan_() (torch.tensor method)": [[133, "torch.Tensor.arctan_"]], "arctanh() (torch.tensor method)": [[134, "torch.Tensor.arctanh"]], "arctanh_() (torch.tensor method)": [[135, "torch.Tensor.arctanh_"]], "argmax() (torch.tensor method)": [[136, "torch.Tensor.argmax"]], "argmin() (torch.tensor method)": [[137, "torch.Tensor.argmin"]], "argsort() (torch.tensor method)": [[138, "torch.Tensor.argsort"]], "argwhere() (torch.tensor method)": [[139, "torch.Tensor.argwhere"]], "as_strided() (torch.tensor method)": [[140, "torch.Tensor.as_strided"]], "as_subclass() (torch.tensor method)": [[141, "torch.Tensor.as_subclass"]], "asin() (torch.tensor method)": [[142, "torch.Tensor.asin"]], "asin_() (torch.tensor method)": [[143, "torch.Tensor.asin_"]], "asinh() (torch.tensor method)": [[144, "torch.Tensor.asinh"]], "asinh_() (torch.tensor method)": [[145, "torch.Tensor.asinh_"]], "atan() (torch.tensor method)": [[146, "torch.Tensor.atan"]], "atan2() (torch.tensor method)": [[147, "torch.Tensor.atan2"]], "atan2_() (torch.tensor method)": [[148, "torch.Tensor.atan2_"]], "atan_() (torch.tensor method)": [[149, "torch.Tensor.atan_"]], "atanh() (torch.tensor method)": [[150, "torch.Tensor.atanh"]], "atanh_() (torch.tensor method)": [[151, "torch.Tensor.atanh_"]], "backward() (torch.tensor method)": [[152, "torch.Tensor.backward"]], "baddbmm() (torch.tensor method)": [[153, "torch.Tensor.baddbmm"]], "baddbmm_() (torch.tensor method)": [[154, "torch.Tensor.baddbmm_"]], "bernoulli() (torch.tensor method)": [[155, "torch.Tensor.bernoulli"]], "bernoulli_() (torch.tensor method)": [[156, "torch.Tensor.bernoulli_"]], "bfloat16() (torch.tensor method)": [[157, "torch.Tensor.bfloat16"]], "bincount() (torch.tensor method)": [[158, "torch.Tensor.bincount"]], "bitwise_and() (torch.tensor method)": [[159, "torch.Tensor.bitwise_and"]], "bitwise_and_() (torch.tensor method)": [[160, "torch.Tensor.bitwise_and_"]], "bitwise_left_shift() (torch.tensor method)": [[161, "torch.Tensor.bitwise_left_shift"]], "bitwise_left_shift_() (torch.tensor method)": [[162, "torch.Tensor.bitwise_left_shift_"]], "bitwise_not() (torch.tensor method)": [[163, "torch.Tensor.bitwise_not"]], "bitwise_not_() (torch.tensor method)": [[164, "torch.Tensor.bitwise_not_"]], "bitwise_or() (torch.tensor method)": [[165, "torch.Tensor.bitwise_or"]], "bitwise_or_() (torch.tensor method)": [[166, "torch.Tensor.bitwise_or_"]], "bitwise_right_shift() (torch.tensor method)": [[167, "torch.Tensor.bitwise_right_shift"]], "bitwise_right_shift_() (torch.tensor method)": [[168, "torch.Tensor.bitwise_right_shift_"]], "bitwise_xor() (torch.tensor method)": [[169, "torch.Tensor.bitwise_xor"]], "bitwise_xor_() (torch.tensor method)": [[170, "torch.Tensor.bitwise_xor_"]], "bmm() (torch.tensor method)": [[171, "torch.Tensor.bmm"]], "bool() (torch.tensor method)": [[172, "torch.Tensor.bool"]], "broadcast_to() (torch.tensor method)": [[173, "torch.Tensor.broadcast_to"]], "byte() (torch.tensor method)": [[174, "torch.Tensor.byte"]], "cauchy_() (torch.tensor method)": [[175, "torch.Tensor.cauchy_"]], "ccol_indices() (torch.tensor method)": [[176, "torch.Tensor.ccol_indices"]], "cdouble() (torch.tensor method)": [[177, "torch.Tensor.cdouble"]], "ceil() (torch.tensor method)": [[178, "torch.Tensor.ceil"]], "ceil_() (torch.tensor method)": [[179, "torch.Tensor.ceil_"]], "cfloat() (torch.tensor method)": [[180, "torch.Tensor.cfloat"]], "chalf() (torch.tensor method)": [[181, "torch.Tensor.chalf"]], "char() (torch.tensor method)": [[182, "torch.Tensor.char"]], "cholesky() (torch.tensor method)": [[183, "torch.Tensor.cholesky"]], "cholesky_inverse() (torch.tensor method)": [[184, "torch.Tensor.cholesky_inverse"]], "cholesky_solve() (torch.tensor method)": [[185, "torch.Tensor.cholesky_solve"]], "chunk() (torch.tensor method)": [[186, "torch.Tensor.chunk"]], "clamp() (torch.tensor method)": [[187, "torch.Tensor.clamp"]], "clamp_() (torch.tensor method)": [[188, "torch.Tensor.clamp_"]], "clip() (torch.tensor method)": [[189, "torch.Tensor.clip"]], "clip_() (torch.tensor method)": [[190, "torch.Tensor.clip_"]], "clone() (torch.tensor method)": [[191, "torch.Tensor.clone"]], "coalesce() (torch.tensor method)": [[192, "torch.Tensor.coalesce"]], "col_indices() (torch.tensor method)": [[193, "torch.Tensor.col_indices"]], "conj() (torch.tensor method)": [[194, "torch.Tensor.conj"]], "conj_physical() (torch.tensor method)": [[195, "torch.Tensor.conj_physical"]], "conj_physical_() (torch.tensor method)": [[196, "torch.Tensor.conj_physical_"]], "contiguous() (torch.tensor method)": [[197, "torch.Tensor.contiguous"]], "copy_() (torch.tensor method)": [[198, "torch.Tensor.copy_"]], "copysign() (torch.tensor method)": [[199, "torch.Tensor.copysign"]], "copysign_() (torch.tensor method)": [[200, "torch.Tensor.copysign_"]], "corrcoef() (torch.tensor method)": [[201, "torch.Tensor.corrcoef"]], "cos() (torch.tensor method)": [[202, "torch.Tensor.cos"]], "cos_() (torch.tensor method)": [[203, "torch.Tensor.cos_"]], "cosh() (torch.tensor method)": [[204, "torch.Tensor.cosh"]], "cosh_() (torch.tensor method)": [[205, "torch.Tensor.cosh_"]], "count_nonzero() (torch.tensor method)": [[206, "torch.Tensor.count_nonzero"]], "cov() (torch.tensor method)": [[207, "torch.Tensor.cov"]], "cpu() (torch.tensor method)": [[208, "torch.Tensor.cpu"]], "cross() (torch.tensor method)": [[209, "torch.Tensor.cross"]], "crow_indices() (torch.tensor method)": [[210, "torch.Tensor.crow_indices"]], "cuda() (torch.tensor method)": [[211, "torch.Tensor.cuda"]], "cummax() (torch.tensor method)": [[212, "torch.Tensor.cummax"]], "cummin() (torch.tensor method)": [[213, "torch.Tensor.cummin"]], "cumprod() (torch.tensor method)": [[214, "torch.Tensor.cumprod"]], "cumprod_() (torch.tensor method)": [[215, "torch.Tensor.cumprod_"]], "cumsum() (torch.tensor method)": [[216, "torch.Tensor.cumsum"]], "cumsum_() (torch.tensor method)": [[217, "torch.Tensor.cumsum_"]], "data_ptr() (torch.tensor method)": [[218, "torch.Tensor.data_ptr"]], "deg2rad() (torch.tensor method)": [[219, "torch.Tensor.deg2rad"]], "dense_dim() (torch.tensor method)": [[220, "torch.Tensor.dense_dim"]], "dequantize() (torch.tensor method)": [[221, "torch.Tensor.dequantize"]], "det() (torch.tensor method)": [[222, "torch.Tensor.det"]], "detach() (torch.tensor method)": [[223, "torch.Tensor.detach"]], "detach_() (torch.tensor method)": [[224, "torch.Tensor.detach_"]], "device (torch.tensor attribute)": [[225, "torch.Tensor.device"]], "diag() (torch.tensor method)": [[226, "torch.Tensor.diag"]], "diag_embed() (torch.tensor method)": [[227, "torch.Tensor.diag_embed"]], "diagflat() (torch.tensor method)": [[228, "torch.Tensor.diagflat"]], "diagonal() (torch.tensor method)": [[229, "torch.Tensor.diagonal"]], "diagonal_scatter() (torch.tensor method)": [[230, "torch.Tensor.diagonal_scatter"]], "diff() (torch.tensor method)": [[231, "torch.Tensor.diff"]], "digamma() (torch.tensor method)": [[232, "torch.Tensor.digamma"]], "digamma_() (torch.tensor method)": [[233, "torch.Tensor.digamma_"]], "dim() (torch.tensor method)": [[234, "torch.Tensor.dim"]], "dim_order() (torch.tensor method)": [[235, "torch.Tensor.dim_order"]], "dist() (torch.tensor method)": [[236, "torch.Tensor.dist"]], "div() (torch.tensor method)": [[237, "torch.Tensor.div"]], "div_() (torch.tensor method)": [[238, "torch.Tensor.div_"]], "divide() (torch.tensor method)": [[239, "torch.Tensor.divide"]], "divide_() (torch.tensor method)": [[240, "torch.Tensor.divide_"]], "dot() (torch.tensor method)": [[241, "torch.Tensor.dot"]], "double() (torch.tensor method)": [[242, "torch.Tensor.double"]], "dsplit() (torch.tensor method)": [[243, "torch.Tensor.dsplit"]], "element_size() (torch.tensor method)": [[244, "torch.Tensor.element_size"]], "eq() (torch.tensor method)": [[245, "torch.Tensor.eq"]], "eq_() (torch.tensor method)": [[246, "torch.Tensor.eq_"]], "equal() (torch.tensor method)": [[247, "torch.Tensor.equal"]], "erf() (torch.tensor method)": [[248, "torch.Tensor.erf"]], "erf_() (torch.tensor method)": [[249, "torch.Tensor.erf_"]], "erfc() (torch.tensor method)": [[250, "torch.Tensor.erfc"]], "erfc_() (torch.tensor method)": [[251, "torch.Tensor.erfc_"]], "erfinv() (torch.tensor method)": [[252, "torch.Tensor.erfinv"]], "erfinv_() (torch.tensor method)": [[253, "torch.Tensor.erfinv_"]], "exp() (torch.tensor method)": [[254, "torch.Tensor.exp"]], "exp_() (torch.tensor method)": [[255, "torch.Tensor.exp_"]], "expand() (torch.tensor method)": [[256, "torch.Tensor.expand"]], "expand_as() (torch.tensor method)": [[257, "torch.Tensor.expand_as"]], "expm1() (torch.tensor method)": [[258, "torch.Tensor.expm1"]], "expm1_() (torch.tensor method)": [[259, "torch.Tensor.expm1_"]], "exponential_() (torch.tensor method)": [[260, "torch.Tensor.exponential_"]], "fill_() (torch.tensor method)": [[261, "torch.Tensor.fill_"]], "fill_diagonal_() (torch.tensor method)": [[262, "torch.Tensor.fill_diagonal_"]], "fix() (torch.tensor method)": [[263, "torch.Tensor.fix"]], "fix_() (torch.tensor method)": [[264, "torch.Tensor.fix_"]], "flatten() (torch.tensor method)": [[265, "torch.Tensor.flatten"]], "flip() (torch.tensor method)": [[266, "torch.Tensor.flip"]], "fliplr() (torch.tensor method)": [[267, "torch.Tensor.fliplr"]], "flipud() (torch.tensor method)": [[268, "torch.Tensor.flipud"]], "float() (torch.tensor method)": [[269, "torch.Tensor.float"]], "float_power() (torch.tensor method)": [[270, "torch.Tensor.float_power"]], "float_power_() (torch.tensor method)": [[271, "torch.Tensor.float_power_"]], "floor() (torch.tensor method)": [[272, "torch.Tensor.floor"]], "floor_() (torch.tensor method)": [[273, "torch.Tensor.floor_"]], "floor_divide() (torch.tensor method)": [[274, "torch.Tensor.floor_divide"]], "floor_divide_() (torch.tensor method)": [[275, "torch.Tensor.floor_divide_"]], "fmax() (torch.tensor method)": [[276, "torch.Tensor.fmax"]], "fmin() (torch.tensor method)": [[277, "torch.Tensor.fmin"]], "fmod() (torch.tensor method)": [[278, "torch.Tensor.fmod"]], "fmod_() (torch.tensor method)": [[279, "torch.Tensor.fmod_"]], "frac() (torch.tensor method)": [[280, "torch.Tensor.frac"]], "frac_() (torch.tensor method)": [[281, "torch.Tensor.frac_"]], "frexp() (torch.tensor method)": [[282, "torch.Tensor.frexp"]], "gather() (torch.tensor method)": [[283, "torch.Tensor.gather"]], "gcd() (torch.tensor method)": [[284, "torch.Tensor.gcd"]], "gcd_() (torch.tensor method)": [[285, "torch.Tensor.gcd_"]], "ge() (torch.tensor method)": [[286, "torch.Tensor.ge"]], "ge_() (torch.tensor method)": [[287, "torch.Tensor.ge_"]], "geometric_() (torch.tensor method)": [[288, "torch.Tensor.geometric_"]], "geqrf() (torch.tensor method)": [[289, "torch.Tensor.geqrf"]], "ger() (torch.tensor method)": [[290, "torch.Tensor.ger"]], "get_device() (torch.tensor method)": [[291, "torch.Tensor.get_device"]], "grad (torch.tensor attribute)": [[292, "torch.Tensor.grad"]], "greater() (torch.tensor method)": [[293, "torch.Tensor.greater"]], "greater_() (torch.tensor method)": [[294, "torch.Tensor.greater_"]], "greater_equal() (torch.tensor method)": [[295, "torch.Tensor.greater_equal"]], "greater_equal_() (torch.tensor method)": [[296, "torch.Tensor.greater_equal_"]], "gt() (torch.tensor method)": [[297, "torch.Tensor.gt"]], "gt_() (torch.tensor method)": [[298, "torch.Tensor.gt_"]], "half() (torch.tensor method)": [[299, "torch.Tensor.half"]], "hardshrink() (torch.tensor method)": [[300, "torch.Tensor.hardshrink"]], "heaviside() (torch.tensor method)": [[301, "torch.Tensor.heaviside"]], "histc() (torch.tensor method)": [[302, "torch.Tensor.histc"]], "histogram() (torch.tensor method)": [[303, "torch.Tensor.histogram"]], "hsplit() (torch.tensor method)": [[304, "torch.Tensor.hsplit"]], "hypot() (torch.tensor method)": [[305, "torch.Tensor.hypot"]], "hypot_() (torch.tensor method)": [[306, "torch.Tensor.hypot_"]], "i0() (torch.tensor method)": [[307, "torch.Tensor.i0"]], "i0_() (torch.tensor method)": [[308, "torch.Tensor.i0_"]], "igamma() (torch.tensor method)": [[309, "torch.Tensor.igamma"]], "igamma_() (torch.tensor method)": [[310, "torch.Tensor.igamma_"]], "igammac() (torch.tensor method)": [[311, "torch.Tensor.igammac"]], "igammac_() (torch.tensor method)": [[312, "torch.Tensor.igammac_"]], "imag (torch.tensor attribute)": [[313, "torch.Tensor.imag"]], "index_add() (torch.tensor method)": [[314, "torch.Tensor.index_add"]], "index_add_() (torch.tensor method)": [[315, "torch.Tensor.index_add_"]], "index_copy() (torch.tensor method)": [[316, "torch.Tensor.index_copy"]], "index_copy_() (torch.tensor method)": [[317, "torch.Tensor.index_copy_"]], "index_fill() (torch.tensor method)": [[318, "torch.Tensor.index_fill"]], "index_fill_() (torch.tensor method)": [[319, "torch.Tensor.index_fill_"]], "index_put() (torch.tensor method)": [[320, "torch.Tensor.index_put"]], "index_put_() (torch.tensor method)": [[321, "torch.Tensor.index_put_"]], "index_reduce() (torch.tensor method)": [[322, "torch.Tensor.index_reduce"]], "index_reduce_() (torch.tensor method)": [[323, "torch.Tensor.index_reduce_"]], "index_select() (torch.tensor method)": [[324, "torch.Tensor.index_select"]], "indices() (torch.tensor method)": [[325, "torch.Tensor.indices"]], "inner() (torch.tensor method)": [[326, "torch.Tensor.inner"]], "int() (torch.tensor method)": [[327, "torch.Tensor.int"]], "int_repr() (torch.tensor method)": [[328, "torch.Tensor.int_repr"]], "inverse() (torch.tensor method)": [[329, "torch.Tensor.inverse"]], "is_coalesced() (torch.tensor method)": [[330, "torch.Tensor.is_coalesced"]], "is_complex() (torch.tensor method)": [[331, "torch.Tensor.is_complex"]], "is_conj() (torch.tensor method)": [[332, "torch.Tensor.is_conj"]], "is_contiguous() (torch.tensor method)": [[333, "torch.Tensor.is_contiguous"]], "is_cuda (torch.tensor attribute)": [[334, "torch.Tensor.is_cuda"]], "is_floating_point() (torch.tensor method)": [[335, "torch.Tensor.is_floating_point"]], "is_inference() (torch.tensor method)": [[336, "torch.Tensor.is_inference"]], "is_leaf (torch.tensor attribute)": [[337, "torch.Tensor.is_leaf"]], "is_meta (torch.tensor attribute)": [[338, "torch.Tensor.is_meta"]], "is_pinned() (torch.tensor method)": [[339, "torch.Tensor.is_pinned"]], "is_quantized (torch.tensor attribute)": [[340, "torch.Tensor.is_quantized"]], "is_set_to() (torch.tensor method)": [[341, "torch.Tensor.is_set_to"]], "is_shared() (torch.tensor method)": [[342, "torch.Tensor.is_shared"]], "is_signed() (torch.tensor method)": [[343, "torch.Tensor.is_signed"]], "is_sparse (torch.tensor attribute)": [[344, "torch.Tensor.is_sparse"]], "is_sparse_csr (torch.tensor attribute)": [[345, "torch.Tensor.is_sparse_csr"]], "isclose() (torch.tensor method)": [[346, "torch.Tensor.isclose"]], "isfinite() (torch.tensor method)": [[347, "torch.Tensor.isfinite"]], "isinf() (torch.tensor method)": [[348, "torch.Tensor.isinf"]], "isnan() (torch.tensor method)": [[349, "torch.Tensor.isnan"]], "isneginf() (torch.tensor method)": [[350, "torch.Tensor.isneginf"]], "isposinf() (torch.tensor method)": [[351, "torch.Tensor.isposinf"]], "isreal() (torch.tensor method)": [[352, "torch.Tensor.isreal"]], "istft() (torch.tensor method)": [[353, "torch.Tensor.istft"]], "item() (torch.tensor method)": [[354, "torch.Tensor.item"]], "itemsize (torch.tensor attribute)": [[355, "torch.Tensor.itemsize"]], "kthvalue() (torch.tensor method)": [[356, "torch.Tensor.kthvalue"]], "lcm() (torch.tensor method)": [[357, "torch.Tensor.lcm"]], "lcm_() (torch.tensor method)": [[358, "torch.Tensor.lcm_"]], "ldexp() (torch.tensor method)": [[359, "torch.Tensor.ldexp"]], "ldexp_() (torch.tensor method)": [[360, "torch.Tensor.ldexp_"]], "le() (torch.tensor method)": [[361, "torch.Tensor.le"]], "le_() (torch.tensor method)": [[362, "torch.Tensor.le_"]], "lerp() (torch.tensor method)": [[363, "torch.Tensor.lerp"]], "lerp_() (torch.tensor method)": [[364, "torch.Tensor.lerp_"]], "less() (torch.tensor method)": [[365, "torch.Tensor.less"]], "less_() (torch.tensor method)": [[366, "torch.Tensor.less_"]], "less_equal() (torch.tensor method)": [[367, "torch.Tensor.less_equal"]], "less_equal_() (torch.tensor method)": [[368, "torch.Tensor.less_equal_"]], "lgamma() (torch.tensor method)": [[369, "torch.Tensor.lgamma"]], "lgamma_() (torch.tensor method)": [[370, "torch.Tensor.lgamma_"]], "log() (torch.tensor method)": [[371, "torch.Tensor.log"]], "log10() (torch.tensor method)": [[372, "torch.Tensor.log10"]], "log10_() (torch.tensor method)": [[373, "torch.Tensor.log10_"]], "log1p() (torch.tensor method)": [[374, "torch.Tensor.log1p"]], "log1p_() (torch.tensor method)": [[375, "torch.Tensor.log1p_"]], "log2() (torch.tensor method)": [[376, "torch.Tensor.log2"]], "log2_() (torch.tensor method)": [[377, "torch.Tensor.log2_"]], "log_() (torch.tensor method)": [[378, "torch.Tensor.log_"]], "log_normal_() (torch.tensor method)": [[379, "torch.Tensor.log_normal_"]], "logaddexp() (torch.tensor method)": [[380, "torch.Tensor.logaddexp"]], "logaddexp2() (torch.tensor method)": [[381, "torch.Tensor.logaddexp2"]], "logcumsumexp() (torch.tensor method)": [[382, "torch.Tensor.logcumsumexp"]], "logdet() (torch.tensor method)": [[383, "torch.Tensor.logdet"]], "logical_and() (torch.tensor method)": [[384, "torch.Tensor.logical_and"]], "logical_and_() (torch.tensor method)": [[385, "torch.Tensor.logical_and_"]], "logical_not() (torch.tensor method)": [[386, "torch.Tensor.logical_not"]], "logical_not_() (torch.tensor method)": [[387, "torch.Tensor.logical_not_"]], "logical_or() (torch.tensor method)": [[388, "torch.Tensor.logical_or"]], "logical_or_() (torch.tensor method)": [[389, "torch.Tensor.logical_or_"]], "logical_xor() (torch.tensor method)": [[390, "torch.Tensor.logical_xor"]], "logical_xor_() (torch.tensor method)": [[391, "torch.Tensor.logical_xor_"]], "logit() (torch.tensor method)": [[392, "torch.Tensor.logit"]], "logit_() (torch.tensor method)": [[393, "torch.Tensor.logit_"]], "logsumexp() (torch.tensor method)": [[394, "torch.Tensor.logsumexp"]], "long() (torch.tensor method)": [[395, "torch.Tensor.long"]], "lt() (torch.tensor method)": [[396, "torch.Tensor.lt"]], "lt_() (torch.tensor method)": [[397, "torch.Tensor.lt_"]], "lu() (torch.tensor method)": [[398, "torch.Tensor.lu"]], "lu_solve() (torch.tensor method)": [[399, "torch.Tensor.lu_solve"]], "map_() (torch.tensor method)": [[400, "torch.Tensor.map_"]], "masked_fill() (torch.tensor method)": [[401, "torch.Tensor.masked_fill"]], "masked_fill_() (torch.tensor method)": [[402, "torch.Tensor.masked_fill_"]], "masked_scatter() (torch.tensor method)": [[403, "torch.Tensor.masked_scatter"]], "masked_scatter_() (torch.tensor method)": [[404, "torch.Tensor.masked_scatter_"]], "masked_select() (torch.tensor method)": [[405, "torch.Tensor.masked_select"]], "matmul() (torch.tensor method)": [[406, "torch.Tensor.matmul"]], "matrix_exp() (torch.tensor method)": [[407, "torch.Tensor.matrix_exp"]], "matrix_power() (torch.tensor method)": [[408, "torch.Tensor.matrix_power"]], "max() (torch.tensor method)": [[409, "torch.Tensor.max"]], "maximum() (torch.tensor method)": [[410, "torch.Tensor.maximum"]], "mean() (torch.tensor method)": [[411, "torch.Tensor.mean"]], "median() (torch.tensor method)": [[412, "torch.Tensor.median"]], "min() (torch.tensor method)": [[413, "torch.Tensor.min"]], "minimum() (torch.tensor method)": [[414, "torch.Tensor.minimum"]], "mm() (torch.tensor method)": [[415, "torch.Tensor.mm"]], "mode() (torch.tensor method)": [[416, "torch.Tensor.mode"]], "module_load() (torch.tensor method)": [[417, "torch.Tensor.module_load"]], "moveaxis() (torch.tensor method)": [[418, "torch.Tensor.moveaxis"]], "movedim() (torch.tensor method)": [[419, "torch.Tensor.movedim"]], "msort() (torch.tensor method)": [[420, "torch.Tensor.msort"]], "mul() (torch.tensor method)": [[421, "torch.Tensor.mul"]], "mul_() (torch.tensor method)": [[422, "torch.Tensor.mul_"]], "multinomial() (torch.tensor method)": [[423, "torch.Tensor.multinomial"]], "multiply() (torch.tensor method)": [[424, "torch.Tensor.multiply"]], "multiply_() (torch.tensor method)": [[425, "torch.Tensor.multiply_"]], "mv() (torch.tensor method)": [[426, "torch.Tensor.mv"]], "mvlgamma() (torch.tensor method)": [[427, "torch.Tensor.mvlgamma"]], "mvlgamma_() (torch.tensor method)": [[428, "torch.Tensor.mvlgamma_"]], "nan_to_num() (torch.tensor method)": [[429, "torch.Tensor.nan_to_num"]], "nan_to_num_() (torch.tensor method)": [[430, "torch.Tensor.nan_to_num_"]], "nanmean() (torch.tensor method)": [[431, "torch.Tensor.nanmean"]], "nanmedian() (torch.tensor method)": [[432, "torch.Tensor.nanmedian"]], "nanquantile() (torch.tensor method)": [[433, "torch.Tensor.nanquantile"]], "nansum() (torch.tensor method)": [[434, "torch.Tensor.nansum"]], "narrow() (torch.tensor method)": [[435, "torch.Tensor.narrow"]], "narrow_copy() (torch.tensor method)": [[436, "torch.Tensor.narrow_copy"]], "nbytes (torch.tensor attribute)": [[437, "torch.Tensor.nbytes"]], "ndim (torch.tensor attribute)": [[438, "torch.Tensor.ndim"]], "ndimension() (torch.tensor method)": [[439, "torch.Tensor.ndimension"]], "ne() (torch.tensor method)": [[440, "torch.Tensor.ne"]], "ne_() (torch.tensor method)": [[441, "torch.Tensor.ne_"]], "neg() (torch.tensor method)": [[442, "torch.Tensor.neg"]], "neg_() (torch.tensor method)": [[443, "torch.Tensor.neg_"]], "negative() (torch.tensor method)": [[444, "torch.Tensor.negative"]], "negative_() (torch.tensor method)": [[445, "torch.Tensor.negative_"]], "nelement() (torch.tensor method)": [[446, "torch.Tensor.nelement"]], "new_empty() (torch.tensor method)": [[447, "torch.Tensor.new_empty"]], "new_full() (torch.tensor method)": [[448, "torch.Tensor.new_full"]], "new_ones() (torch.tensor method)": [[449, "torch.Tensor.new_ones"]], "new_tensor() (torch.tensor method)": [[450, "torch.Tensor.new_tensor"]], "new_zeros() (torch.tensor method)": [[451, "torch.Tensor.new_zeros"]], "nextafter() (torch.tensor method)": [[452, "torch.Tensor.nextafter"]], "nextafter_() (torch.tensor method)": [[453, "torch.Tensor.nextafter_"]], "nonzero() (torch.tensor method)": [[454, "torch.Tensor.nonzero"]], "norm() (torch.tensor method)": [[455, "torch.Tensor.norm"]], "normal_() (torch.tensor method)": [[456, "torch.Tensor.normal_"]], "not_equal() (torch.tensor method)": [[457, "torch.Tensor.not_equal"]], "not_equal_() (torch.tensor method)": [[458, "torch.Tensor.not_equal_"]], "numel() (torch.tensor method)": [[459, "torch.Tensor.numel"]], "numpy() (torch.tensor method)": [[460, "torch.Tensor.numpy"]], "orgqr() (torch.tensor method)": [[461, "torch.Tensor.orgqr"]], "ormqr() (torch.tensor method)": [[462, "torch.Tensor.ormqr"]], "outer() (torch.tensor method)": [[463, "torch.Tensor.outer"]], "permute() (torch.tensor method)": [[464, "torch.Tensor.permute"]], "pin_memory() (torch.tensor method)": [[465, "torch.Tensor.pin_memory"]], "pinverse() (torch.tensor method)": [[466, "torch.Tensor.pinverse"]], "polygamma() (torch.tensor method)": [[467, "torch.Tensor.polygamma"]], "polygamma_() (torch.tensor method)": [[468, "torch.Tensor.polygamma_"]], "positive() (torch.tensor method)": [[469, "torch.Tensor.positive"]], "pow() (torch.tensor method)": [[470, "torch.Tensor.pow"]], "pow_() (torch.tensor method)": [[471, "torch.Tensor.pow_"]], "prod() (torch.tensor method)": [[472, "torch.Tensor.prod"]], "put_() (torch.tensor method)": [[473, "torch.Tensor.put_"]], "q_per_channel_axis() (torch.tensor method)": [[474, "torch.Tensor.q_per_channel_axis"]], "q_per_channel_scales() (torch.tensor method)": [[475, "torch.Tensor.q_per_channel_scales"]], "q_per_channel_zero_points() (torch.tensor method)": [[476, "torch.Tensor.q_per_channel_zero_points"]], "q_scale() (torch.tensor method)": [[477, "torch.Tensor.q_scale"]], "q_zero_point() (torch.tensor method)": [[478, "torch.Tensor.q_zero_point"]], "qr() (torch.tensor method)": [[479, "torch.Tensor.qr"]], "qscheme() (torch.tensor method)": [[480, "torch.Tensor.qscheme"]], "quantile() (torch.tensor method)": [[481, "torch.Tensor.quantile"]], "rad2deg() (torch.tensor method)": [[482, "torch.Tensor.rad2deg"]], "random_() (torch.tensor method)": [[483, "torch.Tensor.random_"]], "ravel() (torch.tensor method)": [[484, "torch.Tensor.ravel"]], "real (torch.tensor attribute)": [[485, "torch.Tensor.real"]], "reciprocal() (torch.tensor method)": [[486, "torch.Tensor.reciprocal"]], "reciprocal_() (torch.tensor method)": [[487, "torch.Tensor.reciprocal_"]], "record_stream() (torch.tensor method)": [[488, "torch.Tensor.record_stream"]], "register_hook() (torch.tensor method)": [[489, "torch.Tensor.register_hook"]], "register_post_accumulate_grad_hook() (torch.tensor method)": [[490, "torch.Tensor.register_post_accumulate_grad_hook"]], "remainder() (torch.tensor method)": [[491, "torch.Tensor.remainder"]], "remainder_() (torch.tensor method)": [[492, "torch.Tensor.remainder_"]], "renorm() (torch.tensor method)": [[493, "torch.Tensor.renorm"]], "renorm_() (torch.tensor method)": [[494, "torch.Tensor.renorm_"]], "repeat() (torch.tensor method)": [[495, "torch.Tensor.repeat"]], "repeat_interleave() (torch.tensor method)": [[496, "torch.Tensor.repeat_interleave"]], "requires_grad (torch.tensor attribute)": [[497, "torch.Tensor.requires_grad"]], "requires_grad_() (torch.tensor method)": [[498, "torch.Tensor.requires_grad_"]], "reshape() (torch.tensor method)": [[499, "torch.Tensor.reshape"]], "reshape_as() (torch.tensor method)": [[500, "torch.Tensor.reshape_as"]], "resize_() (torch.tensor method)": [[501, "torch.Tensor.resize_"]], "resize_as_() (torch.tensor method)": [[502, "torch.Tensor.resize_as_"]], "resolve_conj() (torch.tensor method)": [[503, "torch.Tensor.resolve_conj"]], "resolve_neg() (torch.tensor method)": [[504, "torch.Tensor.resolve_neg"]], "retain_grad() (torch.tensor method)": [[505, "torch.Tensor.retain_grad"]], "retains_grad (torch.tensor attribute)": [[506, "torch.Tensor.retains_grad"]], "roll() (torch.tensor method)": [[507, "torch.Tensor.roll"]], "rot90() (torch.tensor method)": [[508, "torch.Tensor.rot90"]], "round() (torch.tensor method)": [[509, "torch.Tensor.round"]], "round_() (torch.tensor method)": [[510, "torch.Tensor.round_"]], "row_indices() (torch.tensor method)": [[511, "torch.Tensor.row_indices"]], "rsqrt() (torch.tensor method)": [[512, "torch.Tensor.rsqrt"]], "rsqrt_() (torch.tensor method)": [[513, "torch.Tensor.rsqrt_"]], "scatter() (torch.tensor method)": [[514, "torch.Tensor.scatter"]], "scatter_() (torch.tensor method)": [[515, "torch.Tensor.scatter_"]], "scatter_add() (torch.tensor method)": [[516, "torch.Tensor.scatter_add"]], "scatter_add_() (torch.tensor method)": [[517, "torch.Tensor.scatter_add_"]], "scatter_reduce() (torch.tensor method)": [[518, "torch.Tensor.scatter_reduce"]], "scatter_reduce_() (torch.tensor method)": [[519, "torch.Tensor.scatter_reduce_"]], "select() (torch.tensor method)": [[520, "torch.Tensor.select"]], "select_scatter() (torch.tensor method)": [[521, "torch.Tensor.select_scatter"]], "set_() (torch.tensor method)": [[522, "torch.Tensor.set_"]], "sgn() (torch.tensor method)": [[523, "torch.Tensor.sgn"]], "sgn_() (torch.tensor method)": [[524, "torch.Tensor.sgn_"]], "shape (torch.tensor attribute)": [[525, "torch.Tensor.shape"]], "share_memory_() (torch.tensor method)": [[526, "torch.Tensor.share_memory_"]], "short() (torch.tensor method)": [[527, "torch.Tensor.short"]], "sigmoid() (torch.tensor method)": [[528, "torch.Tensor.sigmoid"]], "sigmoid_() (torch.tensor method)": [[529, "torch.Tensor.sigmoid_"]], "sign() (torch.tensor method)": [[530, "torch.Tensor.sign"]], "sign_() (torch.tensor method)": [[531, "torch.Tensor.sign_"]], "signbit() (torch.tensor method)": [[532, "torch.Tensor.signbit"]], "sin() (torch.tensor method)": [[533, "torch.Tensor.sin"]], "sin_() (torch.tensor method)": [[534, "torch.Tensor.sin_"]], "sinc() (torch.tensor method)": [[535, "torch.Tensor.sinc"]], "sinc_() (torch.tensor method)": [[536, "torch.Tensor.sinc_"]], "sinh() (torch.tensor method)": [[537, "torch.Tensor.sinh"]], "sinh_() (torch.tensor method)": [[538, "torch.Tensor.sinh_"]], "size() (torch.tensor method)": [[539, "torch.Tensor.size"]], "slice_scatter() (torch.tensor method)": [[540, "torch.Tensor.slice_scatter"]], "slogdet() (torch.tensor method)": [[541, "torch.Tensor.slogdet"]], "smm() (torch.tensor method)": [[542, "torch.Tensor.smm"]], "softmax() (torch.tensor method)": [[543, "torch.Tensor.softmax"]], "sort() (torch.tensor method)": [[544, "torch.Tensor.sort"]], "sparse_dim() (torch.tensor method)": [[545, "torch.Tensor.sparse_dim"]], "sparse_mask() (torch.tensor method)": [[546, "torch.Tensor.sparse_mask"]], "sparse_resize_() (torch.tensor method)": [[547, "torch.Tensor.sparse_resize_"]], "sparse_resize_and_clear_() (torch.tensor method)": [[548, "torch.Tensor.sparse_resize_and_clear_"]], "split() (torch.tensor method)": [[549, "torch.Tensor.split"]], "sqrt() (torch.tensor method)": [[550, "torch.Tensor.sqrt"]], "sqrt_() (torch.tensor method)": [[551, "torch.Tensor.sqrt_"]], "square() (torch.tensor method)": [[552, "torch.Tensor.square"]], "square_() (torch.tensor method)": [[553, "torch.Tensor.square_"]], "squeeze() (torch.tensor method)": [[554, "torch.Tensor.squeeze"]], "squeeze_() (torch.tensor method)": [[555, "torch.Tensor.squeeze_"]], "sspaddmm() (torch.tensor method)": [[556, "torch.Tensor.sspaddmm"]], "std() (torch.tensor method)": [[557, "torch.Tensor.std"]], "stft() (torch.tensor method)": [[558, "torch.Tensor.stft"]], "storage() (torch.tensor method)": [[559, "torch.Tensor.storage"]], "storage_offset() (torch.tensor method)": [[560, "torch.Tensor.storage_offset"]], "storage_type() (torch.tensor method)": [[561, "torch.Tensor.storage_type"]], "stride() (torch.tensor method)": [[562, "torch.Tensor.stride"]], "sub() (torch.tensor method)": [[563, "torch.Tensor.sub"]], "sub_() (torch.tensor method)": [[564, "torch.Tensor.sub_"]], "subtract() (torch.tensor method)": [[565, "torch.Tensor.subtract"]], "subtract_() (torch.tensor method)": [[566, "torch.Tensor.subtract_"]], "sum() (torch.tensor method)": [[567, "torch.Tensor.sum"]], "sum_to_size() (torch.tensor method)": [[568, "torch.Tensor.sum_to_size"]], "svd() (torch.tensor method)": [[569, "torch.Tensor.svd"]], "swapaxes() (torch.tensor method)": [[570, "torch.Tensor.swapaxes"]], "swapdims() (torch.tensor method)": [[571, "torch.Tensor.swapdims"]], "t() (torch.tensor method)": [[572, "torch.Tensor.t"]], "t_() (torch.tensor method)": [[573, "torch.Tensor.t_"]], "take() (torch.tensor method)": [[574, "torch.Tensor.take"]], "take_along_dim() (torch.tensor method)": [[575, "torch.Tensor.take_along_dim"]], "tan() (torch.tensor method)": [[576, "torch.Tensor.tan"]], "tan_() (torch.tensor method)": [[577, "torch.Tensor.tan_"]], "tanh() (torch.tensor method)": [[578, "torch.Tensor.tanh"]], "tanh_() (torch.tensor method)": [[579, "torch.Tensor.tanh_"]], "tensor_split() (torch.tensor method)": [[580, "torch.Tensor.tensor_split"]], "tile() (torch.tensor method)": [[581, "torch.Tensor.tile"]], "to() (torch.tensor method)": [[582, "torch.Tensor.to"]], "to_dense() (torch.tensor method)": [[583, "torch.Tensor.to_dense"]], "to_mkldnn() (torch.tensor method)": [[584, "torch.Tensor.to_mkldnn"]], "to_sparse() (torch.tensor method)": [[585, "torch.Tensor.to_sparse"]], "to_sparse_bsc() (torch.tensor method)": [[586, "torch.Tensor.to_sparse_bsc"]], "to_sparse_bsr() (torch.tensor method)": [[587, "torch.Tensor.to_sparse_bsr"]], "to_sparse_coo() (torch.tensor method)": [[588, "torch.Tensor.to_sparse_coo"]], "to_sparse_csc() (torch.tensor method)": [[589, "torch.Tensor.to_sparse_csc"]], "to_sparse_csr() (torch.tensor method)": [[590, "torch.Tensor.to_sparse_csr"]], "tolist() (torch.tensor method)": [[591, "torch.Tensor.tolist"]], "topk() (torch.tensor method)": [[592, "torch.Tensor.topk"]], "trace() (torch.tensor method)": [[593, "torch.Tensor.trace"]], "transpose() (torch.tensor method)": [[594, "torch.Tensor.transpose"]], "transpose_() (torch.tensor method)": [[595, "torch.Tensor.transpose_"]], "triangular_solve() (torch.tensor method)": [[596, "torch.Tensor.triangular_solve"]], "tril() (torch.tensor method)": [[597, "torch.Tensor.tril"]], "tril_() (torch.tensor method)": [[598, "torch.Tensor.tril_"]], "triu() (torch.tensor method)": [[599, "torch.Tensor.triu"]], "triu_() (torch.tensor method)": [[600, "torch.Tensor.triu_"]], "true_divide() (torch.tensor method)": [[601, "torch.Tensor.true_divide"]], "true_divide_() (torch.tensor method)": [[602, "torch.Tensor.true_divide_"]], "trunc() (torch.tensor method)": [[603, "torch.Tensor.trunc"]], "trunc_() (torch.tensor method)": [[604, "torch.Tensor.trunc_"]], "type() (torch.tensor method)": [[605, "torch.Tensor.type"]], "type_as() (torch.tensor method)": [[606, "torch.Tensor.type_as"]], "unbind() (torch.tensor method)": [[607, "torch.Tensor.unbind"]], "unflatten() (torch.tensor method)": [[608, "torch.Tensor.unflatten"]], "unfold() (torch.tensor method)": [[609, "torch.Tensor.unfold"]], "uniform_() (torch.tensor method)": [[610, "torch.Tensor.uniform_"]], "unique() (torch.tensor method)": [[611, "torch.Tensor.unique"]], "unique_consecutive() (torch.tensor method)": [[612, "torch.Tensor.unique_consecutive"]], "unsqueeze() (torch.tensor method)": [[613, "torch.Tensor.unsqueeze"]], "unsqueeze_() (torch.tensor method)": [[614, "torch.Tensor.unsqueeze_"]], "untyped_storage() (torch.tensor method)": [[615, "torch.Tensor.untyped_storage"]], "values() (torch.tensor method)": [[616, "torch.Tensor.values"]], "var() (torch.tensor method)": [[617, "torch.Tensor.var"]], "vdot() (torch.tensor method)": [[618, "torch.Tensor.vdot"]], "view() (torch.tensor method)": [[619, "torch.Tensor.view"]], "view_as() (torch.tensor method)": [[620, "torch.Tensor.view_as"]], "vsplit() (torch.tensor method)": [[621, "torch.Tensor.vsplit"]], "where() (torch.tensor method)": [[622, "torch.Tensor.where"]], "xlogy() (torch.tensor method)": [[623, "torch.Tensor.xlogy"]], "xlogy_() (torch.tensor method)": [[624, "torch.Tensor.xlogy_"]], "zero_() (torch.tensor method)": [[625, "torch.Tensor.zero_"]], "_assert() (in module torch)": [[626, "torch._assert"]], "_foreach_abs() (in module torch)": [[627, "torch._foreach_abs"]], "_foreach_abs_() (in module torch)": [[628, "torch._foreach_abs_"]], "_foreach_acos() (in module torch)": [[629, "torch._foreach_acos"]], "_foreach_acos_() (in module torch)": [[630, "torch._foreach_acos_"]], "_foreach_asin() (in module torch)": [[631, "torch._foreach_asin"]], "_foreach_asin_() (in module torch)": [[632, "torch._foreach_asin_"]], "_foreach_atan() (in module torch)": [[633, "torch._foreach_atan"]], "_foreach_atan_() (in module torch)": [[634, "torch._foreach_atan_"]], "_foreach_ceil() (in module torch)": [[635, "torch._foreach_ceil"]], "_foreach_ceil_() (in module torch)": [[636, "torch._foreach_ceil_"]], "_foreach_cos() (in module torch)": [[637, "torch._foreach_cos"]], "_foreach_cos_() (in module torch)": [[638, "torch._foreach_cos_"]], "_foreach_cosh() (in module torch)": [[639, "torch._foreach_cosh"]], "_foreach_cosh_() (in module torch)": [[640, "torch._foreach_cosh_"]], "_foreach_erf() (in module torch)": [[641, "torch._foreach_erf"]], "_foreach_erf_() (in module torch)": [[642, "torch._foreach_erf_"]], "_foreach_erfc() (in module torch)": [[643, "torch._foreach_erfc"]], "_foreach_erfc_() (in module torch)": [[644, "torch._foreach_erfc_"]], "_foreach_exp() (in module torch)": [[645, "torch._foreach_exp"]], "_foreach_exp_() (in module torch)": [[646, "torch._foreach_exp_"]], "_foreach_expm1() (in module torch)": [[647, "torch._foreach_expm1"]], "_foreach_expm1_() (in module torch)": [[648, "torch._foreach_expm1_"]], "_foreach_floor() (in module torch)": [[649, "torch._foreach_floor"]], "_foreach_floor_() (in module torch)": [[650, "torch._foreach_floor_"]], "_foreach_frac() (in module torch)": [[651, "torch._foreach_frac"]], "_foreach_frac_() (in module torch)": [[652, "torch._foreach_frac_"]], "_foreach_lgamma() (in module torch)": [[653, "torch._foreach_lgamma"]], "_foreach_lgamma_() (in module torch)": [[654, "torch._foreach_lgamma_"]], "_foreach_log() (in module torch)": [[655, "torch._foreach_log"]], "_foreach_log10() (in module torch)": [[656, "torch._foreach_log10"]], "_foreach_log10_() (in module torch)": [[657, "torch._foreach_log10_"]], "_foreach_log1p() (in module torch)": [[658, "torch._foreach_log1p"]], "_foreach_log1p_() (in module torch)": [[659, "torch._foreach_log1p_"]], "_foreach_log2() (in module torch)": [[660, "torch._foreach_log2"]], "_foreach_log2_() (in module torch)": [[661, "torch._foreach_log2_"]], "_foreach_log_() (in module torch)": [[662, "torch._foreach_log_"]], "_foreach_neg() (in module torch)": [[663, "torch._foreach_neg"]], "_foreach_neg_() (in module torch)": [[664, "torch._foreach_neg_"]], "_foreach_reciprocal() (in module torch)": [[665, "torch._foreach_reciprocal"]], "_foreach_reciprocal_() (in module torch)": [[666, "torch._foreach_reciprocal_"]], "_foreach_round() (in module torch)": [[667, "torch._foreach_round"]], "_foreach_round_() (in module torch)": [[668, "torch._foreach_round_"]], "_foreach_sigmoid() (in module torch)": [[669, "torch._foreach_sigmoid"]], "_foreach_sigmoid_() (in module torch)": [[670, "torch._foreach_sigmoid_"]], "_foreach_sin() (in module torch)": [[671, "torch._foreach_sin"]], "_foreach_sin_() (in module torch)": [[672, "torch._foreach_sin_"]], "_foreach_sinh() (in module torch)": [[673, "torch._foreach_sinh"]], "_foreach_sinh_() (in module torch)": [[674, "torch._foreach_sinh_"]], "_foreach_sqrt() (in module torch)": [[675, "torch._foreach_sqrt"]], "_foreach_sqrt_() (in module torch)": [[676, "torch._foreach_sqrt_"]], "_foreach_tan() (in module torch)": [[677, "torch._foreach_tan"]], "_foreach_tan_() (in module torch)": [[678, "torch._foreach_tan_"]], "_foreach_trunc() (in module torch)": [[679, "torch._foreach_trunc"]], "_foreach_trunc_() (in module torch)": [[680, "torch._foreach_trunc_"]], "_foreach_zero_() (in module torch)": [[681, "torch._foreach_zero_"]], "set_logs() (in module torch._logging)": [[682, "torch._logging.set_logs"]], "abs() (in module torch)": [[683, "torch.abs"]], "absolute() (in module torch)": [[684, "torch.absolute"]], "acos() (in module torch)": [[685, "torch.acos"]], "acosh() (in module torch)": [[686, "torch.acosh"]], "add() (in module torch)": [[687, "torch.add"]], "addbmm() (in module torch)": [[688, "torch.addbmm"]], "addcdiv() (in module torch)": [[689, "torch.addcdiv"]], "addcmul() (in module torch)": [[690, "torch.addcmul"]], "addmm() (in module torch)": [[691, "torch.addmm"]], "addmv() (in module torch)": [[692, "torch.addmv"]], "addr() (in module torch)": [[693, "torch.addr"]], "adjoint() (in module torch)": [[694, "torch.adjoint"]], "all() (in module torch)": [[695, "torch.all"]], "allclose() (in module torch)": [[696, "torch.allclose"]], "amax() (in module torch)": [[697, "torch.amax"]], "amin() (in module torch)": [[698, "torch.amin"]], "aminmax() (in module torch)": [[699, "torch.aminmax"]], "angle() (in module torch)": [[700, "torch.angle"]], "any() (in module torch)": [[701, "torch.any"]], "bnrelu2d (class in torch.ao.nn.intrinsic)": [[702, "torch.ao.nn.intrinsic.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic)": [[703, "torch.ao.nn.intrinsic.BNReLU3d"]], "convbn1d (class in torch.ao.nn.intrinsic)": [[704, "torch.ao.nn.intrinsic.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic)": [[705, "torch.ao.nn.intrinsic.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic)": [[706, "torch.ao.nn.intrinsic.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic)": [[707, "torch.ao.nn.intrinsic.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic)": [[708, "torch.ao.nn.intrinsic.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic)": [[709, "torch.ao.nn.intrinsic.ConvBnReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic)": [[710, "torch.ao.nn.intrinsic.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic)": [[711, "torch.ao.nn.intrinsic.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic)": [[712, "torch.ao.nn.intrinsic.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic)": [[713, "torch.ao.nn.intrinsic.LinearReLU"]], "convbn1d (class in torch.ao.nn.intrinsic.qat)": [[714, "torch.ao.nn.intrinsic.qat.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic.qat)": [[715, "torch.ao.nn.intrinsic.qat.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic.qat)": [[716, "torch.ao.nn.intrinsic.qat.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic.qat)": [[717, "torch.ao.nn.intrinsic.qat.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic.qat)": [[718, "torch.ao.nn.intrinsic.qat.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic.qat)": [[719, "torch.ao.nn.intrinsic.qat.ConvBnReLU3d"]], "convrelu2d (class in torch.ao.nn.intrinsic.qat)": [[720, "torch.ao.nn.intrinsic.qat.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.qat)": [[721, "torch.ao.nn.intrinsic.qat.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.qat)": [[722, "torch.ao.nn.intrinsic.qat.LinearReLU"]], "freeze_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[723, "torch.ao.nn.intrinsic.qat.freeze_bn_stats"]], "update_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[724, "torch.ao.nn.intrinsic.qat.update_bn_stats"]], "bnrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[725, "torch.ao.nn.intrinsic.quantized.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[726, "torch.ao.nn.intrinsic.quantized.BNReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic.quantized)": [[727, "torch.ao.nn.intrinsic.quantized.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[728, "torch.ao.nn.intrinsic.quantized.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[729, "torch.ao.nn.intrinsic.quantized.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized)": [[730, "torch.ao.nn.intrinsic.quantized.LinearReLU"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized.dynamic)": [[731, "torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU"]], "conv2d (class in torch.ao.nn.qat)": [[732, "torch.ao.nn.qat.Conv2d"]], "conv3d (class in torch.ao.nn.qat)": [[733, "torch.ao.nn.qat.Conv3d"]], "linear (class in torch.ao.nn.qat)": [[734, "torch.ao.nn.qat.Linear"]], "from_float() (torch.ao.nn.qat.linear class method)": [[734, "torch.ao.nn.qat.Linear.from_float"]], "linear (class in torch.ao.nn.qat.dynamic)": [[735, "torch.ao.nn.qat.dynamic.Linear"]], "lstm (class in torch.ao.nn.quantizable)": [[736, "torch.ao.nn.quantizable.LSTM"]], "multiheadattention (class in torch.ao.nn.quantizable)": [[737, "torch.ao.nn.quantizable.MultiheadAttention"]], "dequantize() (torch.ao.nn.quantizable.multiheadattention method)": [[737, "torch.ao.nn.quantizable.MultiheadAttention.dequantize"]], "forward() (torch.ao.nn.quantizable.multiheadattention method)": [[737, "torch.ao.nn.quantizable.MultiheadAttention.forward"]], "batchnorm2d (class in torch.ao.nn.quantized)": [[738, "torch.ao.nn.quantized.BatchNorm2d"]], "batchnorm3d (class in torch.ao.nn.quantized)": [[739, "torch.ao.nn.quantized.BatchNorm3d"]], "conv1d (class in torch.ao.nn.quantized)": [[740, "torch.ao.nn.quantized.Conv1d"]], "from_float() (torch.ao.nn.quantized.conv1d class method)": [[740, "torch.ao.nn.quantized.Conv1d.from_float"]], "conv2d (class in torch.ao.nn.quantized)": [[741, "torch.ao.nn.quantized.Conv2d"]], "from_float() (torch.ao.nn.quantized.conv2d class method)": [[741, "torch.ao.nn.quantized.Conv2d.from_float"]], "conv3d (class in torch.ao.nn.quantized)": [[742, "torch.ao.nn.quantized.Conv3d"]], "from_float() (torch.ao.nn.quantized.conv3d class method)": [[742, "torch.ao.nn.quantized.Conv3d.from_float"]], "convtranspose1d (class in torch.ao.nn.quantized)": [[743, "torch.ao.nn.quantized.ConvTranspose1d"]], "convtranspose2d (class in torch.ao.nn.quantized)": [[744, "torch.ao.nn.quantized.ConvTranspose2d"]], "convtranspose3d (class in torch.ao.nn.quantized)": [[745, "torch.ao.nn.quantized.ConvTranspose3d"]], "elu (class in torch.ao.nn.quantized)": [[746, "torch.ao.nn.quantized.ELU"]], "embedding (class in torch.ao.nn.quantized)": [[747, "torch.ao.nn.quantized.Embedding"]], "from_float() (torch.ao.nn.quantized.embedding class method)": [[747, "torch.ao.nn.quantized.Embedding.from_float"]], "embeddingbag (class in torch.ao.nn.quantized)": [[748, "torch.ao.nn.quantized.EmbeddingBag"]], "from_float() (torch.ao.nn.quantized.embeddingbag class method)": [[748, "torch.ao.nn.quantized.EmbeddingBag.from_float"]], "fxfloatfunctional (class in torch.ao.nn.quantized)": [[749, "torch.ao.nn.quantized.FXFloatFunctional"]], "floatfunctional (class in torch.ao.nn.quantized)": [[750, "torch.ao.nn.quantized.FloatFunctional"]], "groupnorm (class in torch.ao.nn.quantized)": [[751, "torch.ao.nn.quantized.GroupNorm"]], "hardswish (class in torch.ao.nn.quantized)": [[752, "torch.ao.nn.quantized.Hardswish"]], "instancenorm1d (class in torch.ao.nn.quantized)": [[753, "torch.ao.nn.quantized.InstanceNorm1d"]], "instancenorm2d (class in torch.ao.nn.quantized)": [[754, "torch.ao.nn.quantized.InstanceNorm2d"]], "instancenorm3d (class in torch.ao.nn.quantized)": [[755, "torch.ao.nn.quantized.InstanceNorm3d"]], "layernorm (class in torch.ao.nn.quantized)": [[756, "torch.ao.nn.quantized.LayerNorm"]], "leakyrelu (class in torch.ao.nn.quantized)": [[757, "torch.ao.nn.quantized.LeakyReLU"]], "linear (class in torch.ao.nn.quantized)": [[758, "torch.ao.nn.quantized.Linear"]], "from_float() (torch.ao.nn.quantized.linear class method)": [[758, "torch.ao.nn.quantized.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.linear class method)": [[758, "torch.ao.nn.quantized.Linear.from_reference"]], "qfunctional (class in torch.ao.nn.quantized)": [[759, "torch.ao.nn.quantized.QFunctional"]], "relu6 (class in torch.ao.nn.quantized)": [[760, "torch.ao.nn.quantized.ReLU6"]], "sigmoid (class in torch.ao.nn.quantized)": [[761, "torch.ao.nn.quantized.Sigmoid"]], "gru (class in torch.ao.nn.quantized.dynamic)": [[762, "torch.ao.nn.quantized.dynamic.GRU"]], "grucell (class in torch.ao.nn.quantized.dynamic)": [[763, "torch.ao.nn.quantized.dynamic.GRUCell"]], "lstm (class in torch.ao.nn.quantized.dynamic)": [[764, "torch.ao.nn.quantized.dynamic.LSTM"]], "lstmcell (class in torch.ao.nn.quantized.dynamic)": [[765, "torch.ao.nn.quantized.dynamic.LSTMCell"]], "linear (class in torch.ao.nn.quantized.dynamic)": [[766, "torch.ao.nn.quantized.dynamic.Linear"]], "from_float() (torch.ao.nn.quantized.dynamic.linear class method)": [[766, "torch.ao.nn.quantized.dynamic.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.dynamic.linear class method)": [[766, "torch.ao.nn.quantized.dynamic.Linear.from_reference"]], "rnncell (class in torch.ao.nn.quantized.dynamic)": [[767, "torch.ao.nn.quantized.dynamic.RNNCell"]], "adaptive_avg_pool2d (class in torch.ao.nn.quantized.functional)": [[768, "torch.ao.nn.quantized.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d (class in torch.ao.nn.quantized.functional)": [[769, "torch.ao.nn.quantized.functional.adaptive_avg_pool3d"]], "avg_pool2d (class in torch.ao.nn.quantized.functional)": [[770, "torch.ao.nn.quantized.functional.avg_pool2d"]], "avg_pool3d (class in torch.ao.nn.quantized.functional)": [[771, "torch.ao.nn.quantized.functional.avg_pool3d"]], "celu (class in torch.ao.nn.quantized.functional)": [[772, "torch.ao.nn.quantized.functional.celu"]], "clamp (class in torch.ao.nn.quantized.functional)": [[773, "torch.ao.nn.quantized.functional.clamp"]], "conv1d (class in torch.ao.nn.quantized.functional)": [[774, "torch.ao.nn.quantized.functional.conv1d"]], "conv2d (class in torch.ao.nn.quantized.functional)": [[775, "torch.ao.nn.quantized.functional.conv2d"]], "conv3d (class in torch.ao.nn.quantized.functional)": [[776, "torch.ao.nn.quantized.functional.conv3d"]], "elu (class in torch.ao.nn.quantized.functional)": [[777, "torch.ao.nn.quantized.functional.elu"]], "hardsigmoid (class in torch.ao.nn.quantized.functional)": [[778, "torch.ao.nn.quantized.functional.hardsigmoid"]], "hardswish (class in torch.ao.nn.quantized.functional)": [[779, "torch.ao.nn.quantized.functional.hardswish"]], "hardtanh (class in torch.ao.nn.quantized.functional)": [[780, "torch.ao.nn.quantized.functional.hardtanh"]], "interpolate (class in torch.ao.nn.quantized.functional)": [[781, "torch.ao.nn.quantized.functional.interpolate"]], "leaky_relu (class in torch.ao.nn.quantized.functional)": [[782, "torch.ao.nn.quantized.functional.leaky_relu"]], "linear (class in torch.ao.nn.quantized.functional)": [[783, "torch.ao.nn.quantized.functional.linear"]], "max_pool1d (class in torch.ao.nn.quantized.functional)": [[784, "torch.ao.nn.quantized.functional.max_pool1d"]], "max_pool2d (class in torch.ao.nn.quantized.functional)": [[785, "torch.ao.nn.quantized.functional.max_pool2d"]], "threshold (class in torch.ao.nn.quantized.functional)": [[786, "torch.ao.nn.quantized.functional.threshold"]], "upsample (class in torch.ao.nn.quantized.functional)": [[787, "torch.ao.nn.quantized.functional.upsample"]], "upsample_bilinear (class in torch.ao.nn.quantized.functional)": [[788, "torch.ao.nn.quantized.functional.upsample_bilinear"]], "upsample_nearest (class in torch.ao.nn.quantized.functional)": [[789, "torch.ao.nn.quantized.functional.upsample_nearest"]], "dequantstub (class in torch.ao.quantization)": [[790, "torch.ao.quantization.DeQuantStub"]], "quantstub (class in torch.ao.quantization)": [[791, "torch.ao.quantization.QuantStub"]], "quantwrapper (class in torch.ao.quantization)": [[792, "torch.ao.quantization.QuantWrapper"]], "add_quant_dequant (class in torch.ao.quantization)": [[793, "torch.ao.quantization.add_quant_dequant"]], "backendconfig (class in torch.ao.quantization.backend_config)": [[794, "torch.ao.quantization.backend_config.BackendConfig"]], "configs (torch.ao.quantization.backend_config.backendconfig property)": [[794, "torch.ao.quantization.backend_config.BackendConfig.configs"]], "from_dict() (torch.ao.quantization.backend_config.backendconfig class method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.from_dict"]], "set_backend_pattern_config() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_config"]], "set_backend_pattern_configs() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_configs"]], "set_name() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.set_name"]], "to_dict() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.to_dict"]], "backendpatternconfig (class in torch.ao.quantization.backend_config)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig"]], "add_dtype_config() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.add_dtype_config"]], "from_dict() (torch.ao.quantization.backend_config.backendpatternconfig class method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.from_dict"]], "set_dtype_configs() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_dtype_configs"]], "set_fused_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fused_module"]], "set_fuser_method() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fuser_method"]], "set_observation_type() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_observation_type"]], "set_pattern() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_pattern"]], "set_qat_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_qat_module"]], "set_reference_quantized_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_reference_quantized_module"]], "set_root_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_root_module"]], "to_dict() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.to_dict"]], "dtypeconfig (class in torch.ao.quantization.backend_config)": [[796, "torch.ao.quantization.backend_config.DTypeConfig"]], "from_dict() (torch.ao.quantization.backend_config.dtypeconfig class method)": [[796, "torch.ao.quantization.backend_config.DTypeConfig.from_dict"]], "to_dict() (torch.ao.quantization.backend_config.dtypeconfig method)": [[796, "torch.ao.quantization.backend_config.DTypeConfig.to_dict"]], "dtypewithconstraints (class in torch.ao.quantization.backend_config)": [[797, "torch.ao.quantization.backend_config.DTypeWithConstraints"]], "input_output_not_observed (torch.ao.quantization.backend_config.observationtype attribute)": [[798, "torch.ao.quantization.backend_config.ObservationType.INPUT_OUTPUT_NOT_OBSERVED"]], "output_share_observer_with_input (torch.ao.quantization.backend_config.observationtype attribute)": [[798, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_SHARE_OBSERVER_WITH_INPUT"]], "output_use_different_observer_as_input (torch.ao.quantization.backend_config.observationtype attribute)": [[798, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_USE_DIFFERENT_OBSERVER_AS_INPUT"]], "observationtype (class in torch.ao.quantization.backend_config)": [[798, "torch.ao.quantization.backend_config.ObservationType"]], "convert (class in torch.ao.quantization)": [[799, "torch.ao.quantization.convert"]], "default_eval_fn (class in torch.ao.quantization)": [[800, "torch.ao.quantization.default_eval_fn"]], "fakequantize (class in torch.ao.quantization.fake_quantize)": [[801, "torch.ao.quantization.fake_quantize.FakeQuantize"]], "fakequantizebase (class in torch.ao.quantization.fake_quantize)": [[802, "torch.ao.quantization.fake_quantize.FakeQuantizeBase"]], "fixedqparamsfakequantize (class in torch.ao.quantization.fake_quantize)": [[803, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize"]], "extra_repr() (torch.ao.quantization.fake_quantize.fixedqparamsfakequantize method)": [[803, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize.extra_repr"]], "fusedmovingavgobsfakequantize (class in torch.ao.quantization.fake_quantize)": [[804, "torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize"]], "default_fake_quant (in module torch.ao.quantization.fake_quantize)": [[805, "torch.ao.quantization.fake_quantize.default_fake_quant"]], "default_fused_act_fake_quant (in module torch.ao.quantization.fake_quantize)": [[806, "torch.ao.quantization.fake_quantize.default_fused_act_fake_quant"]], "default_fused_per_channel_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[807, "torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant"]], "default_fused_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[808, "torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant"]], "default_histogram_fake_quant (in module torch.ao.quantization.fake_quantize)": [[809, "torch.ao.quantization.fake_quantize.default_histogram_fake_quant"]], "default_per_channel_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[810, "torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant"]], "default_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[811, "torch.ao.quantization.fake_quantize.default_weight_fake_quant"]], "disable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[812, "torch.ao.quantization.fake_quantize.disable_fake_quant"]], "disable_observer (class in torch.ao.quantization.fake_quantize)": [[813, "torch.ao.quantization.fake_quantize.disable_observer"]], "enable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[814, "torch.ao.quantization.fake_quantize.enable_fake_quant"]], "enable_observer (class in torch.ao.quantization.fake_quantize)": [[815, "torch.ao.quantization.fake_quantize.enable_observer"]], "fuse_modules (class in torch.ao.quantization.fuse_modules)": [[816, "torch.ao.quantization.fuse_modules.fuse_modules"]], "convertcustomconfig (class in torch.ao.quantization.fx.custom_config)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig class method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.from_dict"]], "set_observed_to_quantized_mapping() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_observed_to_quantized_mapping"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.to_dict"]], "fusecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig class method)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.from_dict"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.to_dict"]], "preparecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig class method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.from_dict"]], "set_float_to_observed_mapping() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_float_to_observed_mapping"]], "set_input_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_input_quantized_indexes"]], "set_non_traceable_module_classes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_classes"]], "set_non_traceable_module_names() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_names"]], "set_output_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_output_quantized_indexes"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_preserved_attributes"]], "set_standalone_module_class() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_class"]], "set_standalone_module_name() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_name"]], "to_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.to_dict"]], "standalonemoduleconfigentry (class in torch.ao.quantization.fx.custom_config)": [[820, "torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry"]], "histogramobserver (class in torch.ao.quantization.observer)": [[821, "torch.ao.quantization.observer.HistogramObserver"]], "minmaxobserver (class in torch.ao.quantization.observer)": [[822, "torch.ao.quantization.observer.MinMaxObserver"]], "calculate_qparams() (torch.ao.quantization.observer.minmaxobserver method)": [[822, "torch.ao.quantization.observer.MinMaxObserver.calculate_qparams"]], "forward() (torch.ao.quantization.observer.minmaxobserver method)": [[822, "torch.ao.quantization.observer.MinMaxObserver.forward"]], "reset_min_max_vals() (torch.ao.quantization.observer.minmaxobserver method)": [[822, "torch.ao.quantization.observer.MinMaxObserver.reset_min_max_vals"]], "movingaverageminmaxobserver (class in torch.ao.quantization.observer)": [[823, "torch.ao.quantization.observer.MovingAverageMinMaxObserver"]], "movingaverageperchannelminmaxobserver (class in torch.ao.quantization.observer)": [[824, "torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver"]], "noopobserver (class in torch.ao.quantization.observer)": [[825, "torch.ao.quantization.observer.NoopObserver"]], "observerbase (class in torch.ao.quantization.observer)": [[826, "torch.ao.quantization.observer.ObserverBase"]], "with_args() (torch.ao.quantization.observer.observerbase class method)": [[826, "torch.ao.quantization.observer.ObserverBase.with_args"]], "with_callable_args() (torch.ao.quantization.observer.observerbase class method)": [[826, "torch.ao.quantization.observer.ObserverBase.with_callable_args"]], "perchannelminmaxobserver (class in torch.ao.quantization.observer)": [[827, "torch.ao.quantization.observer.PerChannelMinMaxObserver"]], "reset_min_max_vals() (torch.ao.quantization.observer.perchannelminmaxobserver method)": [[827, "torch.ao.quantization.observer.PerChannelMinMaxObserver.reset_min_max_vals"]], "placeholderobserver (class in torch.ao.quantization.observer)": [[828, "torch.ao.quantization.observer.PlaceholderObserver"]], "recordingobserver (class in torch.ao.quantization.observer)": [[829, "torch.ao.quantization.observer.RecordingObserver"]], "default_debug_observer (in module torch.ao.quantization.observer)": [[830, "torch.ao.quantization.observer.default_debug_observer"]], "default_dynamic_quant_observer (in module torch.ao.quantization.observer)": [[831, "torch.ao.quantization.observer.default_dynamic_quant_observer"]], "default_float_qparams_observer (in module torch.ao.quantization.observer)": [[832, "torch.ao.quantization.observer.default_float_qparams_observer"]], "default_histogram_observer (in module torch.ao.quantization.observer)": [[833, "torch.ao.quantization.observer.default_histogram_observer"]], "default_observer (in module torch.ao.quantization.observer)": [[834, "torch.ao.quantization.observer.default_observer"]], "default_per_channel_weight_observer (in module torch.ao.quantization.observer)": [[835, "torch.ao.quantization.observer.default_per_channel_weight_observer"]], "default_placeholder_observer (in module torch.ao.quantization.observer)": [[836, "torch.ao.quantization.observer.default_placeholder_observer"]], "default_weight_observer (in module torch.ao.quantization.observer)": [[837, "torch.ao.quantization.observer.default_weight_observer"]], "get_observer_state_dict (class in torch.ao.quantization.observer)": [[838, "torch.ao.quantization.observer.get_observer_state_dict"]], "load_observer_state_dict (class in torch.ao.quantization.observer)": [[839, "torch.ao.quantization.observer.load_observer_state_dict"]], "prepare (class in torch.ao.quantization)": [[840, "torch.ao.quantization.prepare"]], "prepare_qat (class in torch.ao.quantization)": [[841, "torch.ao.quantization.prepare_qat"]], "propagate_qconfig_ (class in torch.ao.quantization)": [[842, "torch.ao.quantization.propagate_qconfig_"]], "model_is_exported (class in torch.ao.quantization.pt2e.export_utils)": [[843, "torch.ao.quantization.pt2e.export_utils.model_is_exported"]], "qconfig (class in torch.ao.quantization.qconfig)": [[844, "torch.ao.quantization.qconfig.QConfig"]], "default_activation_only_qconfig (in module torch.ao.quantization.qconfig)": [[845, "torch.ao.quantization.qconfig.default_activation_only_qconfig"]], "default_debug_qconfig (in module torch.ao.quantization.qconfig)": [[846, "torch.ao.quantization.qconfig.default_debug_qconfig"]], "default_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[847, "torch.ao.quantization.qconfig.default_dynamic_qconfig"]], "default_per_channel_qconfig (in module torch.ao.quantization.qconfig)": [[848, "torch.ao.quantization.qconfig.default_per_channel_qconfig"]], "default_qat_qconfig (in module torch.ao.quantization.qconfig)": [[849, "torch.ao.quantization.qconfig.default_qat_qconfig"]], "default_qat_qconfig_v2 (in module torch.ao.quantization.qconfig)": [[850, "torch.ao.quantization.qconfig.default_qat_qconfig_v2"]], "default_qconfig (in module torch.ao.quantization.qconfig)": [[851, "torch.ao.quantization.qconfig.default_qconfig"]], "default_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[852, "torch.ao.quantization.qconfig.default_weight_only_qconfig"]], "float16_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[853, "torch.ao.quantization.qconfig.float16_dynamic_qconfig"]], "float16_static_qconfig (in module torch.ao.quantization.qconfig)": [[854, "torch.ao.quantization.qconfig.float16_static_qconfig"]], "float_qparams_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[855, "torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig"]], "per_channel_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[856, "torch.ao.quantization.qconfig.per_channel_dynamic_qconfig"]], "qconfigmapping (class in torch.ao.quantization.qconfig_mapping)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping"]], "from_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping class method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.from_dict"]], "set_global() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_global"]], "set_module_name() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name"]], "set_module_name_object_type_order() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_object_type_order"]], "set_module_name_regex() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_regex"]], "set_object_type() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_object_type"]], "to_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.to_dict"]], "get_default_qat_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[858, "torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping"]], "get_default_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[859, "torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping"]], "quantize (class in torch.ao.quantization)": [[860, "torch.ao.quantization.quantize"]], "quantize_dynamic (class in torch.ao.quantization)": [[861, "torch.ao.quantization.quantize_dynamic"]], "convert_fx (class in torch.ao.quantization.quantize_fx)": [[862, "torch.ao.quantization.quantize_fx.convert_fx"]], "fuse_fx (class in torch.ao.quantization.quantize_fx)": [[863, "torch.ao.quantization.quantize_fx.fuse_fx"]], "prepare_fx (class in torch.ao.quantization.quantize_fx)": [[864, "torch.ao.quantization.quantize_fx.prepare_fx"]], "prepare_qat_fx (class in torch.ao.quantization.quantize_fx)": [[865, "torch.ao.quantization.quantize_fx.prepare_qat_fx"]], "quantize_qat (class in torch.ao.quantization)": [[866, "torch.ao.quantization.quantize_qat"]], "swap_module (class in torch.ao.quantization)": [[867, "torch.ao.quantization.swap_module"]], "arange() (in module torch)": [[868, "torch.arange"]], "arccos() (in module torch)": [[869, "torch.arccos"]], "arccosh() (in module torch)": [[870, "torch.arccosh"]], "arcsin() (in module torch)": [[871, "torch.arcsin"]], "arcsinh() (in module torch)": [[872, "torch.arcsinh"]], "arctan() (in module torch)": [[873, "torch.arctan"]], "arctan2() (in module torch)": [[874, "torch.arctan2"]], "arctanh() (in module torch)": [[875, "torch.arctanh"]], "are_deterministic_algorithms_enabled() (in module torch)": [[876, "torch.are_deterministic_algorithms_enabled"]], "argmax() (in module torch)": [[877, "torch.argmax"]], "argmin() (in module torch)": [[878, "torch.argmin"]], "argsort() (in module torch)": [[879, "torch.argsort"]], "argwhere() (in module torch)": [[880, "torch.argwhere"]], "as_strided() (in module torch)": [[881, "torch.as_strided"]], "as_tensor() (in module torch)": [[882, "torch.as_tensor"]], "asarray() (in module torch)": [[883, "torch.asarray"]], "asin() (in module torch)": [[884, "torch.asin"]], "asinh() (in module torch)": [[885, "torch.asinh"]], "atan() (in module torch)": [[886, "torch.atan"]], "atan2() (in module torch)": [[887, "torch.atan2"]], "atanh() (in module torch)": [[888, "torch.atanh"]], "atleast_1d() (in module torch)": [[889, "torch.atleast_1d"]], "atleast_2d() (in module torch)": [[890, "torch.atleast_2d"]], "atleast_3d() (in module torch)": [[891, "torch.atleast_3d"]], "backward() (torch.autograd.function static method)": [[892, "torch.autograd.Function.backward"]], "forward() (torch.autograd.function static method)": [[893, "torch.autograd.Function.forward"]], "jvp() (torch.autograd.function static method)": [[894, "torch.autograd.Function.jvp"]], "vmap() (torch.autograd.function static method)": [[895, "torch.autograd.Function.vmap"]], "backward() (in module torch.autograd)": [[896, "torch.autograd.backward"]], "unpackeddualtensor (class in torch.autograd.forward_ad)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor"]], "count() (torch.autograd.forward_ad.unpackeddualtensor method)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.count"]], "index() (torch.autograd.forward_ad.unpackeddualtensor method)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.index"]], "primal (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.primal"]], "tangent (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.tangent"]], "dual_level (class in torch.autograd.forward_ad)": [[898, "torch.autograd.forward_ad.dual_level"]], "enter_dual_level() (in module torch.autograd.forward_ad)": [[899, "torch.autograd.forward_ad.enter_dual_level"]], "exit_dual_level() (in module torch.autograd.forward_ad)": [[900, "torch.autograd.forward_ad.exit_dual_level"]], "make_dual() (in module torch.autograd.forward_ad)": [[901, "torch.autograd.forward_ad.make_dual"]], "unpack_dual() (in module torch.autograd.forward_ad)": [[902, "torch.autograd.forward_ad.unpack_dual"]], "backwardcfunction (class in torch.autograd.function)": [[903, "torch.autograd.function.BackwardCFunction"]], "apply() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.apply"]], "apply_jvp() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.apply_jvp"]], "mark_dirty() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.set_materialize_grads"]], "mark_dirty() (torch.autograd.function.functionctx method)": [[904, "torch.autograd.function.FunctionCtx.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.functionctx method)": [[905, "torch.autograd.function.FunctionCtx.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.functionctx method)": [[906, "torch.autograd.function.FunctionCtx.save_for_backward"]], "set_materialize_grads() (torch.autograd.function.functionctx method)": [[907, "torch.autograd.function.FunctionCtx.set_materialize_grads"]], "inplacefunction (class in torch.autograd.function)": [[908, "torch.autograd.function.InplaceFunction"]], "backward() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.backward"]], "forward() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.forward"]], "jvp() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.jvp"]], "mark_dirty() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.setup_context"]], "vjp() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.vjp"]], "vmap() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.vmap"]], "nestediofunction (class in torch.autograd.function)": [[909, "torch.autograd.function.NestedIOFunction"]], "backward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.backward"]], "backward_extended() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.backward_extended"]], "forward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.forward"]], "forward_extended() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.forward_extended"]], "jvp() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.jvp"]], "mark_dirty() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.save_for_forward"]], "saved_tensors (torch.autograd.function.nestediofunction property)": [[909, "torch.autograd.function.NestedIOFunction.saved_tensors"]], "set_materialize_grads() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.setup_context"]], "vjp() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.vjp"]], "vmap() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.vmap"]], "once_differentiable() (in module torch.autograd.function)": [[910, "torch.autograd.function.once_differentiable"]], "hessian() (in module torch.autograd.functional)": [[911, "torch.autograd.functional.hessian"]], "hvp() (in module torch.autograd.functional)": [[912, "torch.autograd.functional.hvp"]], "jacobian() (in module torch.autograd.functional)": [[913, "torch.autograd.functional.jacobian"]], "jvp() (in module torch.autograd.functional)": [[914, "torch.autograd.functional.jvp"]], "vhp() (in module torch.autograd.functional)": [[915, "torch.autograd.functional.vhp"]], "vjp() (in module torch.autograd.functional)": [[916, "torch.autograd.functional.vjp"]], "grad() (in module torch.autograd)": [[917, "torch.autograd.grad"]], "clone() (torch.autograd.grad_mode.inference_mode method)": [[918, "torch.autograd.grad_mode.inference_mode.clone"]], "inference_mode (class in torch.autograd.grad_mode)": [[918, "torch.autograd.grad_mode.inference_mode"]], "clone() (torch.autograd.grad_mode.set_grad_enabled method)": [[919, "torch.autograd.grad_mode.set_grad_enabled.clone"]], "set_grad_enabled (class in torch.autograd.grad_mode)": [[919, "torch.autograd.grad_mode.set_grad_enabled"]], "clone() (torch.autograd.grad_mode.set_multithreading_enabled method)": [[920, "torch.autograd.grad_mode.set_multithreading_enabled.clone"]], "set_multithreading_enabled (class in torch.autograd.grad_mode)": [[920, "torch.autograd.grad_mode.set_multithreading_enabled"]], "gradcheckerror": [[921, "torch.autograd.gradcheck.GradcheckError"]], "gradcheck() (in module torch.autograd.gradcheck)": [[922, "torch.autograd.gradcheck.gradcheck"]], "gradgradcheck() (in module torch.autograd.gradcheck)": [[923, "torch.autograd.gradcheck.gradgradcheck"]], "metadata() (torch.autograd.graph.node method)": [[924, "torch.autograd.graph.Node.metadata"]], "name() (torch.autograd.graph.node method)": [[925, "torch.autograd.graph.Node.name"]], "next_functions (torch.autograd.graph.node property)": [[926, "torch.autograd.graph.Node.next_functions"]], "register_hook() (torch.autograd.graph.node method)": [[927, "torch.autograd.graph.Node.register_hook"]], "register_prehook() (torch.autograd.graph.node method)": [[928, "torch.autograd.graph.Node.register_prehook"]], "increment_version() (in module torch.autograd.graph)": [[929, "torch.autograd.graph.increment_version"]], "enforceunique (class in torch.autograd.profiler)": [[930, "torch.autograd.profiler.EnforceUnique"]], "see() (torch.autograd.profiler.enforceunique method)": [[930, "torch.autograd.profiler.EnforceUnique.see"]], "kinetosteptracker (class in torch.autograd.profiler)": [[931, "torch.autograd.profiler.KinetoStepTracker"]], "current_step() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.current_step"]], "erase_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.erase_step_count"]], "increment_step() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.increment_step"]], "init_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.init_step_count"]], "load_nvprof() (in module torch.autograd.profiler)": [[932, "torch.autograd.profiler.load_nvprof"]], "parse_nvprof_trace() (in module torch.autograd.profiler)": [[933, "torch.autograd.profiler.parse_nvprof_trace"]], "export_chrome_trace() (torch.autograd.profiler.profile method)": [[934, "torch.autograd.profiler.profile.export_chrome_trace"]], "key_averages() (torch.autograd.profiler.profile method)": [[935, "torch.autograd.profiler.profile.key_averages"]], "self_cpu_time_total (torch.autograd.profiler.profile property)": [[936, "torch.autograd.profiler.profile.self_cpu_time_total"]], "total_average() (torch.autograd.profiler.profile method)": [[937, "torch.autograd.profiler.profile.total_average"]], "record_function (class in torch.autograd.profiler)": [[938, "torch.autograd.profiler.record_function"]], "interval (class in torch.autograd.profiler_util)": [[939, "torch.autograd.profiler_util.Interval"]], "elapsed_us() (torch.autograd.profiler_util.interval method)": [[939, "torch.autograd.profiler_util.Interval.elapsed_us"]], "kernel (class in torch.autograd.profiler_util)": [[940, "torch.autograd.profiler_util.Kernel"]], "count() (torch.autograd.profiler_util.kernel method)": [[940, "torch.autograd.profiler_util.Kernel.count"]], "device (torch.autograd.profiler_util.kernel attribute)": [[940, "torch.autograd.profiler_util.Kernel.device"]], "duration (torch.autograd.profiler_util.kernel attribute)": [[940, "torch.autograd.profiler_util.Kernel.duration"]], "index() (torch.autograd.profiler_util.kernel method)": [[940, "torch.autograd.profiler_util.Kernel.index"]], "name (torch.autograd.profiler_util.kernel attribute)": [[940, "torch.autograd.profiler_util.Kernel.name"]], "memrecordsacc (class in torch.autograd.profiler_util)": [[941, "torch.autograd.profiler_util.MemRecordsAcc"]], "in_interval() (torch.autograd.profiler_util.memrecordsacc method)": [[941, "torch.autograd.profiler_util.MemRecordsAcc.in_interval"]], "stringtable (class in torch.autograd.profiler_util)": [[942, "torch.autograd.profiler_util.StringTable"]], "clear() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.clear"]], "copy() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.copy"]], "default_factory (torch.autograd.profiler_util.stringtable attribute)": [[942, "torch.autograd.profiler_util.StringTable.default_factory"]], "fromkeys() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.fromkeys"]], "get() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.get"]], "items() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.items"]], "keys() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.keys"]], "pop() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.pop"]], "popitem() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.popitem"]], "setdefault() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.setdefault"]], "update() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.update"]], "values() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.values"]], "baddbmm() (in module torch)": [[943, "torch.baddbmm"]], "bartlett_window() (in module torch)": [[944, "torch.bartlett_window"]], "bernoulli() (in module torch)": [[945, "torch.bernoulli"]], "bincount() (in module torch)": [[946, "torch.bincount"]], "bitwise_and() (in module torch)": [[947, "torch.bitwise_and"]], "bitwise_left_shift() (in module torch)": [[948, "torch.bitwise_left_shift"]], "bitwise_not() (in module torch)": [[949, "torch.bitwise_not"]], "bitwise_or() (in module torch)": [[950, "torch.bitwise_or"]], "bitwise_right_shift() (in module torch)": [[951, "torch.bitwise_right_shift"]], "bitwise_xor() (in module torch)": [[952, "torch.bitwise_xor"]], "blackman_window() (in module torch)": [[953, "torch.blackman_window"]], "block_diag() (in module torch)": [[954, "torch.block_diag"]], "bmm() (in module torch)": [[955, "torch.bmm"]], "broadcast_shapes() (in module torch)": [[956, "torch.broadcast_shapes"]], "broadcast_tensors() (in module torch)": [[957, "torch.broadcast_tensors"]], "broadcast_to() (in module torch)": [[958, "torch.broadcast_to"]], "bucketize() (in module torch)": [[959, "torch.bucketize"]], "can_cast() (in module torch)": [[960, "torch.can_cast"]], "cartesian_prod() (in module torch)": [[961, "torch.cartesian_prod"]], "cat() (in module torch)": [[962, "torch.cat"]], "cdist() (in module torch)": [[963, "torch.cdist"]], "ceil() (in module torch)": [[964, "torch.ceil"]], "chain_matmul() (in module torch)": [[965, "torch.chain_matmul"]], "cholesky() (in module torch)": [[966, "torch.cholesky"]], "cholesky_inverse() (in module torch)": [[967, "torch.cholesky_inverse"]], "cholesky_solve() (in module torch)": [[968, "torch.cholesky_solve"]], "chunk() (in module torch)": [[969, "torch.chunk"]], "clamp() (in module torch)": [[970, "torch.clamp"]], "clip() (in module torch)": [[971, "torch.clip"]], "clone() (in module torch)": [[972, "torch.clone"]], "column_stack() (in module torch)": [[973, "torch.column_stack"]], "combinations() (in module torch)": [[974, "torch.combinations"]], "compile() (in module torch)": [[975, "torch.compile"]], "compiled_with_cxx11_abi() (in module torch)": [[976, "torch.compiled_with_cxx11_abi"]], "allow_in_graph() (in module torch.compiler)": [[977, "torch.compiler.allow_in_graph"]], "assume_constant_result() (in module torch.compiler)": [[978, "torch.compiler.assume_constant_result"]], "compile() (in module torch.compiler)": [[979, "torch.compiler.compile"]], "cudagraph_mark_step_begin() (in module torch.compiler)": [[980, "torch.compiler.cudagraph_mark_step_begin"]], "disable() (in module torch.compiler)": [[981, "torch.compiler.disable"]], "is_compiling() (in module torch.compiler)": [[982, "torch.compiler.is_compiling"]], "is_dynamo_compiling() (in module torch.compiler)": [[983, "torch.compiler.is_dynamo_compiling"]], "list_backends() (in module torch.compiler)": [[984, "torch.compiler.list_backends"]], "reset() (in module torch.compiler)": [[985, "torch.compiler.reset"]], "complex() (in module torch)": [[986, "torch.complex"]], "concat() (in module torch)": [[987, "torch.concat"]], "concatenate() (in module torch)": [[988, "torch.concatenate"]], "cond() (in module torch)": [[989, "torch.cond"]], "conj() (in module torch)": [[990, "torch.conj"]], "conj_physical() (in module torch)": [[991, "torch.conj_physical"]], "copysign() (in module torch)": [[992, "torch.copysign"]], "corrcoef() (in module torch)": [[993, "torch.corrcoef"]], "cos() (in module torch)": [[994, "torch.cos"]], "cosh() (in module torch)": [[995, "torch.cosh"]], "count_nonzero() (in module torch)": [[996, "torch.count_nonzero"]], "cov() (in module torch)": [[997, "torch.cov"]], "stream (class in torch.cpu)": [[998, "torch.cpu.Stream"]], "streamcontext (class in torch.cpu)": [[999, "torch.cpu.StreamContext"]], "current_device() (in module torch.cpu)": [[1000, "torch.cpu.current_device"]], "current_stream() (in module torch.cpu)": [[1001, "torch.cpu.current_stream"]], "device_count() (in module torch.cpu)": [[1002, "torch.cpu.device_count"]], "is_available() (in module torch.cpu)": [[1003, "torch.cpu.is_available"]], "set_device() (in module torch.cpu)": [[1004, "torch.cpu.set_device"]], "stream() (in module torch.cpu)": [[1005, "torch.cpu.stream"]], "synchronize() (in module torch.cpu)": [[1006, "torch.cpu.synchronize"]], "cross() (in module torch)": [[1007, "torch.cross"]], "cudagraph (class in torch.cuda)": [[1008, "torch.cuda.CUDAGraph"]], "capture_begin() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.capture_begin"]], "capture_end() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.capture_end"]], "debug_dump() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.debug_dump"]], "enable_debug_mode() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.enable_debug_mode"]], "pool() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.pool"]], "replay() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.replay"]], "reset() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.reset"]], "cudapluggableallocator (class in torch.cuda)": [[1009, "torch.cuda.CUDAPluggableAllocator"]], "event (class in torch.cuda)": [[1010, "torch.cuda.Event"]], "elapsed_time() (torch.cuda.event method)": [[1010, "torch.cuda.Event.elapsed_time"]], "from_ipc_handle() (torch.cuda.event class method)": [[1010, "torch.cuda.Event.from_ipc_handle"]], "ipc_handle() (torch.cuda.event method)": [[1010, "torch.cuda.Event.ipc_handle"]], "query() (torch.cuda.event method)": [[1010, "torch.cuda.Event.query"]], "record() (torch.cuda.event method)": [[1010, "torch.cuda.Event.record"]], "synchronize() (torch.cuda.event method)": [[1010, "torch.cuda.Event.synchronize"]], "wait() (torch.cuda.event method)": [[1010, "torch.cuda.Event.wait"]], "externalstream (class in torch.cuda)": [[1011, "torch.cuda.ExternalStream"]], "query() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.query"]], "record_event() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.record_event"]], "synchronize() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.synchronize"]], "wait_event() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.wait_event"]], "wait_stream() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.wait_stream"]], "outofmemoryerror": [[1012, "torch.cuda.OutOfMemoryError"]], "stream (class in torch.cuda)": [[1013, "torch.cuda.Stream"]], "query() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.query"]], "record_event() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.record_event"]], "synchronize() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.synchronize"]], "wait_event() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.wait_event"]], "wait_stream() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.wait_stream"]], "streamcontext (class in torch.cuda)": [[1014, "torch.cuda.StreamContext"]], "caching_allocator_alloc() (in module torch.cuda)": [[1015, "torch.cuda.caching_allocator_alloc"]], "caching_allocator_delete() (in module torch.cuda)": [[1016, "torch.cuda.caching_allocator_delete"]], "can_device_access_peer() (in module torch.cuda)": [[1017, "torch.cuda.can_device_access_peer"]], "change_current_allocator() (in module torch.cuda)": [[1018, "torch.cuda.change_current_allocator"]], "clock_rate() (in module torch.cuda)": [[1019, "torch.cuda.clock_rate"]], "broadcast() (in module torch.cuda.comm)": [[1020, "torch.cuda.comm.broadcast"]], "broadcast_coalesced() (in module torch.cuda.comm)": [[1021, "torch.cuda.comm.broadcast_coalesced"]], "gather() (in module torch.cuda.comm)": [[1022, "torch.cuda.comm.gather"]], "reduce_add() (in module torch.cuda.comm)": [[1023, "torch.cuda.comm.reduce_add"]], "scatter() (in module torch.cuda.comm)": [[1024, "torch.cuda.comm.scatter"]], "current_blas_handle() (in module torch.cuda)": [[1025, "torch.cuda.current_blas_handle"]], "current_device() (in module torch.cuda)": [[1026, "torch.cuda.current_device"]], "current_stream() (in module torch.cuda)": [[1027, "torch.cuda.current_stream"]], "default_stream() (in module torch.cuda)": [[1028, "torch.cuda.default_stream"]], "device (class in torch.cuda)": [[1029, "torch.cuda.device"]], "device_count() (in module torch.cuda)": [[1030, "torch.cuda.device_count"]], "device_of (class in torch.cuda)": [[1031, "torch.cuda.device_of"]], "empty_cache() (in module torch.cuda)": [[1032, "torch.cuda.empty_cache"]], "get_allocator_backend() (in module torch.cuda)": [[1033, "torch.cuda.get_allocator_backend"]], "get_arch_list() (in module torch.cuda)": [[1034, "torch.cuda.get_arch_list"]], "get_device_capability() (in module torch.cuda)": [[1035, "torch.cuda.get_device_capability"]], "get_device_name() (in module torch.cuda)": [[1036, "torch.cuda.get_device_name"]], "get_device_properties() (in module torch.cuda)": [[1037, "torch.cuda.get_device_properties"]], "get_gencode_flags() (in module torch.cuda)": [[1038, "torch.cuda.get_gencode_flags"]], "get_rng_state() (in module torch.cuda)": [[1039, "torch.cuda.get_rng_state"]], "get_rng_state_all() (in module torch.cuda)": [[1040, "torch.cuda.get_rng_state_all"]], "get_sync_debug_mode() (in module torch.cuda)": [[1041, "torch.cuda.get_sync_debug_mode"]], "graph (class in torch.cuda)": [[1042, "torch.cuda.graph"]], "graph_pool_handle() (in module torch.cuda)": [[1043, "torch.cuda.graph_pool_handle"]], "init() (in module torch.cuda)": [[1044, "torch.cuda.init"]], "initial_seed() (in module torch.cuda)": [[1045, "torch.cuda.initial_seed"]], "ipc_collect() (in module torch.cuda)": [[1046, "torch.cuda.ipc_collect"]], "is_available() (in module torch.cuda)": [[1047, "torch.cuda.is_available"]], "is_current_stream_capturing() (in module torch.cuda)": [[1048, "torch.cuda.is_current_stream_capturing"]], "is_initialized() (in module torch.cuda)": [[1049, "torch.cuda.is_initialized"]], "_create_jit_fn() (in module torch.cuda.jiterator)": [[1050, "torch.cuda.jiterator._create_jit_fn"]], "_create_multi_output_jit_fn() (in module torch.cuda.jiterator)": [[1051, "torch.cuda.jiterator._create_multi_output_jit_fn"]], "list_gpu_processes() (in module torch.cuda)": [[1052, "torch.cuda.list_gpu_processes"]], "make_graphed_callables() (in module torch.cuda)": [[1053, "torch.cuda.make_graphed_callables"]], "manual_seed() (in module torch.cuda)": [[1054, "torch.cuda.manual_seed"]], "manual_seed_all() (in module torch.cuda)": [[1055, "torch.cuda.manual_seed_all"]], "max_memory_allocated() (in module torch.cuda)": [[1056, "torch.cuda.max_memory_allocated"]], "max_memory_cached() (in module torch.cuda)": [[1057, "torch.cuda.max_memory_cached"]], "max_memory_reserved() (in module torch.cuda)": [[1058, "torch.cuda.max_memory_reserved"]], "mem_get_info() (in module torch.cuda)": [[1059, "torch.cuda.mem_get_info"]], "memory_allocated() (in module torch.cuda)": [[1060, "torch.cuda.memory_allocated"]], "memory_cached() (in module torch.cuda)": [[1061, "torch.cuda.memory_cached"]], "memory_reserved() (in module torch.cuda)": [[1062, "torch.cuda.memory_reserved"]], "memory_snapshot() (in module torch.cuda)": [[1063, "torch.cuda.memory_snapshot"]], "memory_stats() (in module torch.cuda)": [[1064, "torch.cuda.memory_stats"]], "memory_summary() (in module torch.cuda)": [[1065, "torch.cuda.memory_summary"]], "memory_usage() (in module torch.cuda)": [[1066, "torch.cuda.memory_usage"]], "mark() (in module torch.cuda.nvtx)": [[1067, "torch.cuda.nvtx.mark"]], "range() (in module torch.cuda.nvtx)": [[1068, "torch.cuda.nvtx.range"]], "range_pop() (in module torch.cuda.nvtx)": [[1069, "torch.cuda.nvtx.range_pop"]], "range_push() (in module torch.cuda.nvtx)": [[1070, "torch.cuda.nvtx.range_push"]], "power_draw() (in module torch.cuda)": [[1071, "torch.cuda.power_draw"]], "reset_max_memory_allocated() (in module torch.cuda)": [[1072, "torch.cuda.reset_max_memory_allocated"]], "reset_max_memory_cached() (in module torch.cuda)": [[1073, "torch.cuda.reset_max_memory_cached"]], "reset_peak_memory_stats() (in module torch.cuda)": [[1074, "torch.cuda.reset_peak_memory_stats"]], "seed() (in module torch.cuda)": [[1075, "torch.cuda.seed"]], "seed_all() (in module torch.cuda)": [[1076, "torch.cuda.seed_all"]], "set_device() (in module torch.cuda)": [[1077, "torch.cuda.set_device"]], "set_per_process_memory_fraction() (in module torch.cuda)": [[1078, "torch.cuda.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.cuda)": [[1079, "torch.cuda.set_rng_state"]], "set_rng_state_all() (in module torch.cuda)": [[1080, "torch.cuda.set_rng_state_all"]], "set_stream() (in module torch.cuda)": [[1081, "torch.cuda.set_stream"]], "set_sync_debug_mode() (in module torch.cuda)": [[1082, "torch.cuda.set_sync_debug_mode"]], "stream() (in module torch.cuda)": [[1083, "torch.cuda.stream"]], "synchronize() (in module torch.cuda)": [[1084, "torch.cuda.synchronize"]], "temperature() (in module torch.cuda)": [[1085, "torch.cuda.temperature"]], "utilization() (in module torch.cuda)": [[1086, "torch.cuda.utilization"]], "cummax() (in module torch)": [[1087, "torch.cummax"]], "cummin() (in module torch)": [[1088, "torch.cummin"]], "cumprod() (in module torch)": [[1089, "torch.cumprod"]], "cumsum() (in module torch)": [[1090, "torch.cumsum"]], "cumulative_trapezoid() (in module torch)": [[1091, "torch.cumulative_trapezoid"]], "deg2rad() (in module torch)": [[1092, "torch.deg2rad"]], "dequantize() (in module torch)": [[1093, "torch.dequantize"]], "det() (in module torch)": [[1094, "torch.det"]], "diag() (in module torch)": [[1095, "torch.diag"]], "diag_embed() (in module torch)": [[1096, "torch.diag_embed"]], "diagflat() (in module torch)": [[1097, "torch.diagflat"]], "diagonal() (in module torch)": [[1098, "torch.diagonal"]], "diagonal_scatter() (in module torch)": [[1099, "torch.diagonal_scatter"]], "diff() (in module torch)": [[1100, "torch.diff"]], "digamma() (in module torch)": [[1101, "torch.digamma"]], "dist() (in module torch)": [[1102, "torch.dist"]], "div() (in module torch)": [[1103, "torch.div"]], "divide() (in module torch)": [[1104, "torch.divide"]], "dot() (in module torch)": [[1105, "torch.dot"]], "dsplit() (in module torch)": [[1106, "torch.dsplit"]], "dstack() (in module torch)": [[1107, "torch.dstack"]], "einsum() (in module torch)": [[1108, "torch.einsum"]], "empty() (in module torch)": [[1109, "torch.empty"]], "empty_like() (in module torch)": [[1110, "torch.empty_like"]], "empty_strided() (in module torch)": [[1111, "torch.empty_strided"]], "enable_grad (class in torch)": [[1112, "torch.enable_grad"]], "eq() (in module torch)": [[1113, "torch.eq"]], "equal() (in module torch)": [[1114, "torch.equal"]], "erf() (in module torch)": [[1115, "torch.erf"]], "erfc() (in module torch)": [[1116, "torch.erfc"]], "erfinv() (in module torch)": [[1117, "torch.erfinv"]], "exp() (in module torch)": [[1118, "torch.exp"]], "exp2() (in module torch)": [[1119, "torch.exp2"]], "expm1() (in module torch)": [[1120, "torch.expm1"]], "eye() (in module torch)": [[1121, "torch.eye"]], "fake_quantize_per_channel_affine() (in module torch)": [[1122, "torch.fake_quantize_per_channel_affine"]], "fake_quantize_per_tensor_affine() (in module torch)": [[1123, "torch.fake_quantize_per_tensor_affine"]], "fft() (in module torch.fft)": [[1124, "torch.fft.fft"]], "fft2() (in module torch.fft)": [[1125, "torch.fft.fft2"]], "fftfreq() (in module torch.fft)": [[1126, "torch.fft.fftfreq"]], "fftn() (in module torch.fft)": [[1127, "torch.fft.fftn"]], "fftshift() (in module torch.fft)": [[1128, "torch.fft.fftshift"]], "hfft() (in module torch.fft)": [[1129, "torch.fft.hfft"]], "hfft2() (in module torch.fft)": [[1130, "torch.fft.hfft2"]], "hfftn() (in module torch.fft)": [[1131, "torch.fft.hfftn"]], "ifft() (in module torch.fft)": [[1132, "torch.fft.ifft"]], "ifft2() (in module torch.fft)": [[1133, "torch.fft.ifft2"]], "ifftn() (in module torch.fft)": [[1134, "torch.fft.ifftn"]], "ifftshift() (in module torch.fft)": [[1135, "torch.fft.ifftshift"]], "ihfft() (in module torch.fft)": [[1136, "torch.fft.ihfft"]], "ihfft2() (in module torch.fft)": [[1137, "torch.fft.ihfft2"]], "ihfftn() (in module torch.fft)": [[1138, "torch.fft.ihfftn"]], "irfft() (in module torch.fft)": [[1139, "torch.fft.irfft"]], "irfft2() (in module torch.fft)": [[1140, "torch.fft.irfft2"]], "irfftn() (in module torch.fft)": [[1141, "torch.fft.irfftn"]], "rfft() (in module torch.fft)": [[1142, "torch.fft.rfft"]], "rfft2() (in module torch.fft)": [[1143, "torch.fft.rfft2"]], "rfftfreq() (in module torch.fft)": [[1144, "torch.fft.rfftfreq"]], "rfftn() (in module torch.fft)": [[1145, "torch.fft.rfftn"]], "fix() (in module torch)": [[1146, "torch.fix"]], "flatten() (in module torch)": [[1147, "torch.flatten"]], "flip() (in module torch)": [[1148, "torch.flip"]], "fliplr() (in module torch)": [[1149, "torch.fliplr"]], "flipud() (in module torch)": [[1150, "torch.flipud"]], "float_power() (in module torch)": [[1151, "torch.float_power"]], "floor() (in module torch)": [[1152, "torch.floor"]], "floor_divide() (in module torch)": [[1153, "torch.floor_divide"]], "fmax() (in module torch)": [[1154, "torch.fmax"]], "fmin() (in module torch)": [[1155, "torch.fmin"]], "fmod() (in module torch)": [[1156, "torch.fmod"]], "frac() (in module torch)": [[1157, "torch.frac"]], "frexp() (in module torch)": [[1158, "torch.frexp"]], "from_dlpack() (in module torch)": [[1159, "torch.from_dlpack"]], "from_file() (in module torch)": [[1160, "torch.from_file"]], "from_numpy() (in module torch)": [[1161, "torch.from_numpy"]], "frombuffer() (in module torch)": [[1162, "torch.frombuffer"]], "full() (in module torch)": [[1163, "torch.full"]], "full_like() (in module torch)": [[1164, "torch.full_like"]], "functional_call() (in module torch.func)": [[1165, "torch.func.functional_call"]], "functionalize() (in module torch.func)": [[1166, "torch.func.functionalize"]], "grad() (in module torch.func)": [[1167, "torch.func.grad"]], "grad_and_value() (in module torch.func)": [[1168, "torch.func.grad_and_value"]], "hessian() (in module torch.func)": [[1169, "torch.func.hessian"]], "jacfwd() (in module torch.func)": [[1170, "torch.func.jacfwd"]], "jacrev() (in module torch.func)": [[1171, "torch.func.jacrev"]], "jvp() (in module torch.func)": [[1172, "torch.func.jvp"]], "linearize() (in module torch.func)": [[1173, "torch.func.linearize"]], "replace_all_batch_norm_modules_() (in module torch.func)": [[1174, "torch.func.replace_all_batch_norm_modules_"]], "stack_module_state() (in module torch.func)": [[1175, "torch.func.stack_module_state"]], "vjp() (in module torch.func)": [[1176, "torch.func.vjp"]], "vmap() (in module torch.func)": [[1177, "torch.func.vmap"]], "callmethodkey (class in torch.fx.experimental.symbolic_shapes)": [[1178, "torch.fx.experimental.symbolic_shapes.CallMethodKey"]], "get() (torch.fx.experimental.symbolic_shapes.callmethodkey method)": [[1178, "torch.fx.experimental.symbolic_shapes.CallMethodKey.get"]], "convertintkey (class in torch.fx.experimental.symbolic_shapes)": [[1179, "torch.fx.experimental.symbolic_shapes.ConvertIntKey"]], "get() (torch.fx.experimental.symbolic_shapes.convertintkey method)": [[1179, "torch.fx.experimental.symbolic_shapes.ConvertIntKey.get"]], "dimconstraints (class in torch.fx.experimental.symbolic_shapes)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints"]], "add() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.add"]], "add_equality() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.add_equality"]], "forced_specializations() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.forced_specializations"]], "prettify_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.prettify_results"]], "remove_redundant_dynamic_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.remove_redundant_dynamic_results"]], "rewrite_with_congruences() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.rewrite_with_congruences"]], "solve() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.solve"]], "dimdynamic (class in torch.fx.experimental.symbolic_shapes)": [[1181, "torch.fx.experimental.symbolic_shapes.DimDynamic"]], "dividebykey (class in torch.fx.experimental.symbolic_shapes)": [[1182, "torch.fx.experimental.symbolic_shapes.DivideByKey"]], "get() (torch.fx.experimental.symbolic_shapes.dividebykey method)": [[1182, "torch.fx.experimental.symbolic_shapes.DivideByKey.get"]], "equalityconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1183, "torch.fx.experimental.symbolic_shapes.EqualityConstraint"]], "innertensorkey (class in torch.fx.experimental.symbolic_shapes)": [[1184, "torch.fx.experimental.symbolic_shapes.InnerTensorKey"]], "get() (torch.fx.experimental.symbolic_shapes.innertensorkey method)": [[1184, "torch.fx.experimental.symbolic_shapes.InnerTensorKey.get"]], "propagateunbackedsymints (class in torch.fx.experimental.symbolic_shapes)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts"]], "boxed_run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.boxed_run"]], "call_function() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_function"]], "call_method() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_method"]], "call_module() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_module"]], "fetch_args_kwargs_from_env() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_attr"]], "get_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.get_attr"]], "map_nodes_to_values() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.map_nodes_to_values"]], "output() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.output"]], "placeholder() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.placeholder"]], "run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run"]], "run_node() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run_node"]], "relaxedunspecconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1186, "torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint"]], "shapeenv (class in torch.fx.experimental.symbolic_shapes)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv"]], "add_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.add_var_to_val"]], "bind_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bind_symbols"]], "bound_sympy() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bound_sympy"]], "check_equal() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.check_equal"]], "cleanup() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.cleanup"]], "create_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbol"]], "create_symbolic_sizes_strides_storage_offset() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbolic_sizes_strides_storage_offset"]], "create_symboolnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symboolnode"]], "create_symfloatnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symfloatnode"]], "create_symintnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symintnode"]], "create_unbacked_symbool() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symbool"]], "create_unbacked_symfloat() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symfloat"]], "create_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symint"]], "create_unspecified_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symbol"]], "create_unspecified_symint_and_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symint_and_symbol"]], "defer_runtime_assert() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.defer_runtime_assert"]], "evaluate_expr() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_expr"]], "evaluate_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_expression"]], "evaluate_guards_for_args() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_for_args"]], "format_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.format_guards"]], "freeze() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze"]], "freeze_runtime_asserts() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze_runtime_asserts"]], "get_axioms() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_axioms"]], "get_implications() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_implications"]], "get_nontrivial_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_nontrivial_guards"]], "get_pruned_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_pruned_guards"]], "ignore_fresh_unbacked_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.ignore_fresh_unbacked_symbols"]], "is_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.is_unbacked_symint"]], "produce_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards"]], "produce_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards_expression"]], "replace() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.replace"]], "set_unbacked_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.set_unbacked_var_to_val"]], "simplify() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.simplify"]], "size_hint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.size_hint"]], "suppress_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.suppress_guards"]], "shapeenvsettings (class in torch.fx.experimental.symbolic_shapes)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnvSettings"]], "statefulsymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1189, "torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext"]], "statelesssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1190, "torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext"]], "strictminmaxconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1191, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint"]], "render() (torch.fx.experimental.symbolic_shapes.strictminmaxconstraint method)": [[1191, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint.render"]], "subclasssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1192, "torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext"]], "symboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1193, "torch.fx.experimental.symbolic_shapes.SymbolicContext"]], "canonicalize_bool_expr() (in module torch.fx.experimental.symbolic_shapes)": [[1194, "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr"]], "check_consistent() (in module torch.fx.experimental.symbolic_shapes)": [[1195, "torch.fx.experimental.symbolic_shapes.check_consistent"]], "compute_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1196, "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings"]], "constrain_range() (in module torch.fx.experimental.symbolic_shapes)": [[1197, "torch.fx.experimental.symbolic_shapes.constrain_range"]], "constrain_unify() (in module torch.fx.experimental.symbolic_shapes)": [[1198, "torch.fx.experimental.symbolic_shapes.constrain_unify"]], "definitely_false() (in module torch.fx.experimental.symbolic_shapes)": [[1199, "torch.fx.experimental.symbolic_shapes.definitely_false"]], "definitely_true() (in module torch.fx.experimental.symbolic_shapes)": [[1200, "torch.fx.experimental.symbolic_shapes.definitely_true"]], "guard_size_oblivious() (in module torch.fx.experimental.symbolic_shapes)": [[1201, "torch.fx.experimental.symbolic_shapes.guard_size_oblivious"]], "has_free_symbols() (in module torch.fx.experimental.symbolic_shapes)": [[1202, "torch.fx.experimental.symbolic_shapes.has_free_symbols"]], "hint_int() (in module torch.fx.experimental.symbolic_shapes)": [[1203, "torch.fx.experimental.symbolic_shapes.hint_int"]], "is_concrete_bool() (in module torch.fx.experimental.symbolic_shapes)": [[1204, "torch.fx.experimental.symbolic_shapes.is_concrete_bool"]], "is_concrete_int() (in module torch.fx.experimental.symbolic_shapes)": [[1205, "torch.fx.experimental.symbolic_shapes.is_concrete_int"]], "lru_cache() (in module torch.fx.experimental.symbolic_shapes)": [[1206, "torch.fx.experimental.symbolic_shapes.lru_cache"]], "parallel_and() (in module torch.fx.experimental.symbolic_shapes)": [[1207, "torch.fx.experimental.symbolic_shapes.parallel_and"]], "parallel_or() (in module torch.fx.experimental.symbolic_shapes)": [[1208, "torch.fx.experimental.symbolic_shapes.parallel_or"]], "rebind_unbacked() (in module torch.fx.experimental.symbolic_shapes)": [[1209, "torch.fx.experimental.symbolic_shapes.rebind_unbacked"]], "resolve_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1210, "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings"]], "statically_known_true() (in module torch.fx.experimental.symbolic_shapes)": [[1211, "torch.fx.experimental.symbolic_shapes.statically_known_true"]], "sym_eq() (in module torch.fx.experimental.symbolic_shapes)": [[1212, "torch.fx.experimental.symbolic_shapes.sym_eq"]], "gather() (in module torch)": [[1213, "torch.gather"]], "gcd() (in module torch)": [[1214, "torch.gcd"]], "ge() (in module torch)": [[1215, "torch.ge"]], "geqrf() (in module torch)": [[1216, "torch.geqrf"]], "ger() (in module torch)": [[1217, "torch.ger"]], "get_default_device() (in module torch)": [[1218, "torch.get_default_device"]], "get_default_dtype() (in module torch)": [[1219, "torch.get_default_dtype"]], "get_deterministic_debug_mode() (in module torch)": [[1220, "torch.get_deterministic_debug_mode"]], "get_device_module() (in module torch)": [[1221, "torch.get_device_module"]], "get_float32_matmul_precision() (in module torch)": [[1222, "torch.get_float32_matmul_precision"]], "get_num_interop_threads() (in module torch)": [[1223, "torch.get_num_interop_threads"]], "get_num_threads() (in module torch)": [[1224, "torch.get_num_threads"]], "get_rng_state() (in module torch)": [[1225, "torch.get_rng_state"]], "gradient() (in module torch)": [[1226, "torch.gradient"]], "greater() (in module torch)": [[1227, "torch.greater"]], "greater_equal() (in module torch)": [[1228, "torch.greater_equal"]], "gt() (in module torch)": [[1229, "torch.gt"]], "hamming_window() (in module torch)": [[1230, "torch.hamming_window"]], "hann_window() (in module torch)": [[1231, "torch.hann_window"]], "heaviside() (in module torch)": [[1232, "torch.heaviside"]], "histc() (in module torch)": [[1233, "torch.histc"]], "histogram() (in module torch)": [[1234, "torch.histogram"]], "histogramdd() (in module torch)": [[1235, "torch.histogramdd"]], "hsplit() (in module torch)": [[1236, "torch.hsplit"]], "hspmm() (in module torch)": [[1237, "torch.hspmm"]], "hstack() (in module torch)": [[1238, "torch.hstack"]], "hypot() (in module torch)": [[1239, "torch.hypot"]], "i0() (in module torch)": [[1240, "torch.i0"]], "igamma() (in module torch)": [[1241, "torch.igamma"]], "igammac() (in module torch)": [[1242, "torch.igammac"]], "imag() (in module torch)": [[1243, "torch.imag"]], "index_add() (in module torch)": [[1244, "torch.index_add"]], "index_copy() (in module torch)": [[1245, "torch.index_copy"]], "index_reduce() (in module torch)": [[1246, "torch.index_reduce"]], "index_select() (in module torch)": [[1247, "torch.index_select"]], "initial_seed() (in module torch)": [[1248, "torch.initial_seed"]], "inner() (in module torch)": [[1249, "torch.inner"]], "inverse() (in module torch)": [[1250, "torch.inverse"]], "is_complex() (in module torch)": [[1251, "torch.is_complex"]], "is_conj() (in module torch)": [[1252, "torch.is_conj"]], "is_deterministic_algorithms_warn_only_enabled() (in module torch)": [[1253, "torch.is_deterministic_algorithms_warn_only_enabled"]], "is_floating_point() (in module torch)": [[1254, "torch.is_floating_point"]], "is_grad_enabled() (in module torch)": [[1255, "torch.is_grad_enabled"]], "is_inference_mode_enabled() (in module torch)": [[1256, "torch.is_inference_mode_enabled"]], "is_nonzero() (in module torch)": [[1257, "torch.is_nonzero"]], "is_storage() (in module torch)": [[1258, "torch.is_storage"]], "is_tensor() (in module torch)": [[1259, "torch.is_tensor"]], "is_warn_always_enabled() (in module torch)": [[1260, "torch.is_warn_always_enabled"]], "isclose() (in module torch)": [[1261, "torch.isclose"]], "isfinite() (in module torch)": [[1262, "torch.isfinite"]], "isin() (in module torch)": [[1263, "torch.isin"]], "isinf() (in module torch)": [[1264, "torch.isinf"]], "isnan() (in module torch)": [[1265, "torch.isnan"]], "isneginf() (in module torch)": [[1266, "torch.isneginf"]], "isposinf() (in module torch)": [[1267, "torch.isposinf"]], "isreal() (in module torch)": [[1268, "torch.isreal"]], "istft() (in module torch)": [[1269, "torch.istft"]], "attribute (class in torch.jit)": [[1270, "torch.jit.Attribute"]], "count() (torch.jit.attribute method)": [[1270, "torch.jit.Attribute.count"]], "index() (torch.jit.attribute method)": [[1270, "torch.jit.Attribute.index"]], "type (torch.jit.attribute attribute)": [[1270, "torch.jit.Attribute.type"]], "value (torch.jit.attribute attribute)": [[1270, "torch.jit.Attribute.value"]], "scriptfunction (class in torch.jit)": [[1271, "torch.jit.ScriptFunction"]], "get_debug_state() (torch.jit.scriptfunction method)": [[1271, "torch.jit.ScriptFunction.get_debug_state"]], "save() (torch.jit.scriptfunction method)": [[1271, "torch.jit.ScriptFunction.save"]], "save_to_buffer() (torch.jit.scriptfunction method)": [[1271, "torch.jit.ScriptFunction.save_to_buffer"]], "scriptmodule (class in torch.jit)": [[1272, "torch.jit.ScriptModule"]], "add_module() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.add_module"]], "apply() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.apply"]], "bfloat16() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.bfloat16"]], "buffers() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.buffers"]], "children() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.children"]], "code (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.code"]], "code_with_constants (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.code_with_constants"]], "compile() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.compile"]], "cpu() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.cpu"]], "cuda() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.cuda"]], "double() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.double"]], "eval() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.eval"]], "extra_repr() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.extra_repr"]], "float() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.float"]], "get_buffer() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_buffer"]], "get_extra_state() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_extra_state"]], "get_parameter() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_parameter"]], "get_submodule() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_submodule"]], "graph (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.graph"]], "half() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.half"]], "inlined_graph (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.inlined_graph"]], "ipu() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.ipu"]], "load_state_dict() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.load_state_dict"]], "modules() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.modules"]], "named_buffers() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_buffers"]], "named_children() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_children"]], "named_modules() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_modules"]], "named_parameters() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_parameters"]], "parameters() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.parameters"]], "register_backward_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_backward_hook"]], "register_buffer() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_buffer"]], "register_forward_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_forward_hook"]], "register_forward_pre_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_forward_pre_hook"]], "register_full_backward_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_load_state_dict_post_hook"]], "register_module() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_module"]], "register_parameter() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_parameter"]], "register_state_dict_pre_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_state_dict_pre_hook"]], "requires_grad_() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.requires_grad_"]], "save() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.save"]], "set_extra_state() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.set_extra_state"]], "share_memory() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.share_memory"]], "state_dict() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.state_dict"]], "to() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.to"]], "to_empty() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.to_empty"]], "train() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.train"]], "type() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.type"]], "xpu() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.xpu"]], "zero_grad() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.zero_grad"]], "annotate() (in module torch.jit)": [[1273, "torch.jit.annotate"]], "enable_onednn_fusion() (in module torch.jit)": [[1274, "torch.jit.enable_onednn_fusion"]], "fork() (in module torch.jit)": [[1275, "torch.jit.fork"]], "freeze() (in module torch.jit)": [[1276, "torch.jit.freeze"]], "ignore() (in module torch.jit)": [[1277, "torch.jit.ignore"]], "interface() (in module torch.jit)": [[1278, "torch.jit.interface"]], "isinstance() (in module torch.jit)": [[1279, "torch.jit.isinstance"]], "load() (in module torch.jit)": [[1280, "torch.jit.load"]], "onednn_fusion_enabled() (in module torch.jit)": [[1281, "torch.jit.onednn_fusion_enabled"]], "optimize_for_inference() (in module torch.jit)": [[1282, "torch.jit.optimize_for_inference"]], "save() (in module torch.jit)": [[1283, "torch.jit.save"]], "script() (in module torch.jit)": [[1284, "torch.jit.script"]], "script_if_tracing() (in module torch.jit)": [[1285, "torch.jit.script_if_tracing"]], "set_fusion_strategy() (in module torch.jit)": [[1286, "torch.jit.set_fusion_strategy"]], "strict_fusion (class in torch.jit)": [[1287, "torch.jit.strict_fusion"]], "trace() (in module torch.jit)": [[1288, "torch.jit.trace"]], "trace_module() (in module torch.jit)": [[1289, "torch.jit.trace_module"]], "unused() (in module torch.jit)": [[1290, "torch.jit.unused"]], "wait() (in module torch.jit)": [[1291, "torch.jit.wait"]], "kaiser_window() (in module torch)": [[1292, "torch.kaiser_window"]], "kron() (in module torch)": [[1293, "torch.kron"]], "kthvalue() (in module torch)": [[1294, "torch.kthvalue"]], "lcm() (in module torch)": [[1295, "torch.lcm"]], "ldexp() (in module torch)": [[1296, "torch.ldexp"]], "le() (in module torch)": [[1297, "torch.le"]], "lerp() (in module torch)": [[1298, "torch.lerp"]], "less() (in module torch)": [[1299, "torch.less"]], "less_equal() (in module torch)": [[1300, "torch.less_equal"]], "lgamma() (in module torch)": [[1301, "torch.lgamma"]], "cholesky() (in module torch.linalg)": [[1302, "torch.linalg.cholesky"]], "cholesky_ex() (in module torch.linalg)": [[1303, "torch.linalg.cholesky_ex"]], "cond() (in module torch.linalg)": [[1304, "torch.linalg.cond"]], "cross() (in module torch.linalg)": [[1305, "torch.linalg.cross"]], "det() (in module torch.linalg)": [[1306, "torch.linalg.det"]], "diagonal() (in module torch.linalg)": [[1307, "torch.linalg.diagonal"]], "eig() (in module torch.linalg)": [[1308, "torch.linalg.eig"]], "eigh() (in module torch.linalg)": [[1309, "torch.linalg.eigh"]], "eigvals() (in module torch.linalg)": [[1310, "torch.linalg.eigvals"]], "eigvalsh() (in module torch.linalg)": [[1311, "torch.linalg.eigvalsh"]], "householder_product() (in module torch.linalg)": [[1312, "torch.linalg.householder_product"]], "inv() (in module torch.linalg)": [[1313, "torch.linalg.inv"]], "inv_ex() (in module torch.linalg)": [[1314, "torch.linalg.inv_ex"]], "ldl_factor() (in module torch.linalg)": [[1315, "torch.linalg.ldl_factor"]], "ldl_factor_ex() (in module torch.linalg)": [[1316, "torch.linalg.ldl_factor_ex"]], "ldl_solve() (in module torch.linalg)": [[1317, "torch.linalg.ldl_solve"]], "lstsq() (in module torch.linalg)": [[1318, "torch.linalg.lstsq"]], "lu() (in module torch.linalg)": [[1319, "torch.linalg.lu"]], "lu_factor() (in module torch.linalg)": [[1320, "torch.linalg.lu_factor"]], "lu_factor_ex() (in module torch.linalg)": [[1321, "torch.linalg.lu_factor_ex"]], "lu_solve() (in module torch.linalg)": [[1322, "torch.linalg.lu_solve"]], "matmul() (in module torch.linalg)": [[1323, "torch.linalg.matmul"]], "matrix_exp() (in module torch.linalg)": [[1324, "torch.linalg.matrix_exp"]], "matrix_norm() (in module torch.linalg)": [[1325, "torch.linalg.matrix_norm"]], "matrix_power() (in module torch.linalg)": [[1326, "torch.linalg.matrix_power"]], "matrix_rank() (in module torch.linalg)": [[1327, "torch.linalg.matrix_rank"]], "multi_dot() (in module torch.linalg)": [[1328, "torch.linalg.multi_dot"]], "norm() (in module torch.linalg)": [[1329, "torch.linalg.norm"]], "pinv() (in module torch.linalg)": [[1330, "torch.linalg.pinv"]], "qr() (in module torch.linalg)": [[1331, "torch.linalg.qr"]], "slogdet() (in module torch.linalg)": [[1332, "torch.linalg.slogdet"]], "solve() (in module torch.linalg)": [[1333, "torch.linalg.solve"]], "solve_ex() (in module torch.linalg)": [[1334, "torch.linalg.solve_ex"]], "solve_triangular() (in module torch.linalg)": [[1335, "torch.linalg.solve_triangular"]], "svd() (in module torch.linalg)": [[1336, "torch.linalg.svd"]], "svdvals() (in module torch.linalg)": [[1337, "torch.linalg.svdvals"]], "tensorinv() (in module torch.linalg)": [[1338, "torch.linalg.tensorinv"]], "tensorsolve() (in module torch.linalg)": [[1339, "torch.linalg.tensorsolve"]], "vander() (in module torch.linalg)": [[1340, "torch.linalg.vander"]], "vecdot() (in module torch.linalg)": [[1341, "torch.linalg.vecdot"]], "vector_norm() (in module torch.linalg)": [[1342, "torch.linalg.vector_norm"]], "linspace() (in module torch)": [[1343, "torch.linspace"]], "load() (in module torch)": [[1344, "torch.load"]], "lobpcg() (in module torch)": [[1345, "torch.lobpcg"]], "log() (in module torch)": [[1346, "torch.log"]], "log10() (in module torch)": [[1347, "torch.log10"]], "log1p() (in module torch)": [[1348, "torch.log1p"]], "log2() (in module torch)": [[1349, "torch.log2"]], "logaddexp() (in module torch)": [[1350, "torch.logaddexp"]], "logaddexp2() (in module torch)": [[1351, "torch.logaddexp2"]], "logcumsumexp() (in module torch)": [[1352, "torch.logcumsumexp"]], "logdet() (in module torch)": [[1353, "torch.logdet"]], "logical_and() (in module torch)": [[1354, "torch.logical_and"]], "logical_not() (in module torch)": [[1355, "torch.logical_not"]], "logical_or() (in module torch)": [[1356, "torch.logical_or"]], "logical_xor() (in module torch)": [[1357, "torch.logical_xor"]], "logit() (in module torch)": [[1358, "torch.logit"]], "logspace() (in module torch)": [[1359, "torch.logspace"]], "logsumexp() (in module torch)": [[1360, "torch.logsumexp"]], "lt() (in module torch)": [[1361, "torch.lt"]], "lu() (in module torch)": [[1362, "torch.lu"]], "lu_solve() (in module torch)": [[1363, "torch.lu_solve"]], "lu_unpack() (in module torch)": [[1364, "torch.lu_unpack"]], "manual_seed() (in module torch)": [[1365, "torch.manual_seed"]], "masked_select() (in module torch)": [[1366, "torch.masked_select"]], "matmul() (in module torch)": [[1367, "torch.matmul"]], "matrix_exp() (in module torch)": [[1368, "torch.matrix_exp"]], "matrix_power() (in module torch)": [[1369, "torch.matrix_power"]], "max() (in module torch)": [[1370, "torch.max"]], "maximum() (in module torch)": [[1371, "torch.maximum"]], "mean() (in module torch)": [[1372, "torch.mean"]], "median() (in module torch)": [[1373, "torch.median"]], "meshgrid() (in module torch)": [[1374, "torch.meshgrid"]], "min() (in module torch)": [[1375, "torch.min"]], "minimum() (in module torch)": [[1376, "torch.minimum"]], "mm() (in module torch)": [[1377, "torch.mm"]], "mode() (in module torch)": [[1378, "torch.mode"]], "moveaxis() (in module torch)": [[1379, "torch.moveaxis"]], "movedim() (in module torch)": [[1380, "torch.movedim"]], "current_allocated_memory() (in module torch.mps)": [[1381, "torch.mps.current_allocated_memory"]], "device_count() (in module torch.mps)": [[1382, "torch.mps.device_count"]], "driver_allocated_memory() (in module torch.mps)": [[1383, "torch.mps.driver_allocated_memory"]], "empty_cache() (in module torch.mps)": [[1384, "torch.mps.empty_cache"]], "event (class in torch.mps.event)": [[1385, "torch.mps.event.Event"]], "elapsed_time() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.elapsed_time"]], "query() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.query"]], "record() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.record"]], "synchronize() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.synchronize"]], "wait() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.wait"]], "get_rng_state() (in module torch.mps)": [[1386, "torch.mps.get_rng_state"]], "manual_seed() (in module torch.mps)": [[1387, "torch.mps.manual_seed"]], "profile() (in module torch.mps.profiler)": [[1388, "torch.mps.profiler.profile"]], "start() (in module torch.mps.profiler)": [[1389, "torch.mps.profiler.start"]], "stop() (in module torch.mps.profiler)": [[1390, "torch.mps.profiler.stop"]], "seed() (in module torch.mps)": [[1391, "torch.mps.seed"]], "set_per_process_memory_fraction() (in module torch.mps)": [[1392, "torch.mps.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.mps)": [[1393, "torch.mps.set_rng_state"]], "synchronize() (in module torch.mps)": [[1394, "torch.mps.synchronize"]], "msort() (in module torch)": [[1395, "torch.msort"]], "deferredmtiacallerror": [[1396, "torch.mtia.DeferredMtiaCallError"]], "event (class in torch.mtia)": [[1397, "torch.mtia.Event"]], "stream (class in torch.mtia)": [[1398, "torch.mtia.Stream"]], "streamcontext (class in torch.mtia)": [[1399, "torch.mtia.StreamContext"]], "current_device() (in module torch.mtia)": [[1400, "torch.mtia.current_device"]], "current_stream() (in module torch.mtia)": [[1401, "torch.mtia.current_stream"]], "default_stream() (in module torch.mtia)": [[1402, "torch.mtia.default_stream"]], "device (class in torch.mtia)": [[1403, "torch.mtia.device"]], "device_count() (in module torch.mtia)": [[1404, "torch.mtia.device_count"]], "init() (in module torch.mtia)": [[1405, "torch.mtia.init"]], "is_available() (in module torch.mtia)": [[1406, "torch.mtia.is_available"]], "is_initialized() (in module torch.mtia)": [[1407, "torch.mtia.is_initialized"]], "set_stream() (in module torch.mtia)": [[1408, "torch.mtia.set_stream"]], "stream() (in module torch.mtia)": [[1409, "torch.mtia.stream"]], "synchronize() (in module torch.mtia)": [[1410, "torch.mtia.synchronize"]], "mul() (in module torch)": [[1411, "torch.mul"]], "multinomial() (in module torch)": [[1412, "torch.multinomial"]], "multiply() (in module torch)": [[1413, "torch.multiply"]], "mv() (in module torch)": [[1414, "torch.mv"]], "mvlgamma() (in module torch)": [[1415, "torch.mvlgamma"]], "nan_to_num() (in module torch)": [[1416, "torch.nan_to_num"]], "nanmean() (in module torch)": [[1417, "torch.nanmean"]], "nanmedian() (in module torch)": [[1418, "torch.nanmedian"]], "nanquantile() (in module torch)": [[1419, "torch.nanquantile"]], "nansum() (in module torch)": [[1420, "torch.nansum"]], "narrow() (in module torch)": [[1421, "torch.narrow"]], "narrow_copy() (in module torch)": [[1422, "torch.narrow_copy"]], "ne() (in module torch)": [[1423, "torch.ne"]], "neg() (in module torch)": [[1424, "torch.neg"]], "negative() (in module torch)": [[1425, "torch.negative"]], "nextafter() (in module torch)": [[1426, "torch.nextafter"]], "adaptiveavgpool1d (class in torch.nn)": [[1427, "torch.nn.AdaptiveAvgPool1d"]], "adaptiveavgpool2d (class in torch.nn)": [[1428, "torch.nn.AdaptiveAvgPool2d"]], "adaptiveavgpool3d (class in torch.nn)": [[1429, "torch.nn.AdaptiveAvgPool3d"]], "adaptivelogsoftmaxwithloss (class in torch.nn)": [[1430, "torch.nn.AdaptiveLogSoftmaxWithLoss"]], "log_prob() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1430, "torch.nn.AdaptiveLogSoftmaxWithLoss.log_prob"]], "predict() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1430, "torch.nn.AdaptiveLogSoftmaxWithLoss.predict"]], "adaptivemaxpool1d (class in torch.nn)": [[1431, "torch.nn.AdaptiveMaxPool1d"]], "adaptivemaxpool2d (class in torch.nn)": [[1432, "torch.nn.AdaptiveMaxPool2d"]], "adaptivemaxpool3d (class in torch.nn)": [[1433, "torch.nn.AdaptiveMaxPool3d"]], "alphadropout (class in torch.nn)": [[1434, "torch.nn.AlphaDropout"]], "avgpool1d (class in torch.nn)": [[1435, "torch.nn.AvgPool1d"]], "avgpool2d (class in torch.nn)": [[1436, "torch.nn.AvgPool2d"]], "avgpool3d (class in torch.nn)": [[1437, "torch.nn.AvgPool3d"]], "bceloss (class in torch.nn)": [[1438, "torch.nn.BCELoss"]], "bcewithlogitsloss (class in torch.nn)": [[1439, "torch.nn.BCEWithLogitsLoss"]], "batchnorm1d (class in torch.nn)": [[1440, "torch.nn.BatchNorm1d"]], "batchnorm2d (class in torch.nn)": [[1441, "torch.nn.BatchNorm2d"]], "batchnorm3d (class in torch.nn)": [[1442, "torch.nn.BatchNorm3d"]], "bilinear (class in torch.nn)": [[1443, "torch.nn.Bilinear"]], "celu (class in torch.nn)": [[1444, "torch.nn.CELU"]], "ctcloss (class in torch.nn)": [[1445, "torch.nn.CTCLoss"]], "channelshuffle (class in torch.nn)": [[1446, "torch.nn.ChannelShuffle"]], "circularpad1d (class in torch.nn)": [[1447, "torch.nn.CircularPad1d"]], "circularpad2d (class in torch.nn)": [[1448, "torch.nn.CircularPad2d"]], "circularpad3d (class in torch.nn)": [[1449, "torch.nn.CircularPad3d"]], "constantpad1d (class in torch.nn)": [[1450, "torch.nn.ConstantPad1d"]], "constantpad2d (class in torch.nn)": [[1451, "torch.nn.ConstantPad2d"]], "constantpad3d (class in torch.nn)": [[1452, "torch.nn.ConstantPad3d"]], "conv1d (class in torch.nn)": [[1453, "torch.nn.Conv1d"]], "conv2d (class in torch.nn)": [[1454, "torch.nn.Conv2d"]], "conv3d (class in torch.nn)": [[1455, "torch.nn.Conv3d"]], "convtranspose1d (class in torch.nn)": [[1456, "torch.nn.ConvTranspose1d"]], "convtranspose2d (class in torch.nn)": [[1457, "torch.nn.ConvTranspose2d"]], "convtranspose3d (class in torch.nn)": [[1458, "torch.nn.ConvTranspose3d"]], "cosineembeddingloss (class in torch.nn)": [[1459, "torch.nn.CosineEmbeddingLoss"]], "cosinesimilarity (class in torch.nn)": [[1460, "torch.nn.CosineSimilarity"]], "crossentropyloss (class in torch.nn)": [[1461, "torch.nn.CrossEntropyLoss"]], "dataparallel (class in torch.nn)": [[1462, "torch.nn.DataParallel"]], "dropout (class in torch.nn)": [[1463, "torch.nn.Dropout"]], "dropout1d (class in torch.nn)": [[1464, "torch.nn.Dropout1d"]], "dropout2d (class in torch.nn)": [[1465, "torch.nn.Dropout2d"]], "dropout3d (class in torch.nn)": [[1466, "torch.nn.Dropout3d"]], "elu (class in torch.nn)": [[1467, "torch.nn.ELU"]], "embedding (class in torch.nn)": [[1468, "torch.nn.Embedding"]], "from_pretrained() (torch.nn.embedding class method)": [[1468, "torch.nn.Embedding.from_pretrained"]], "embeddingbag (class in torch.nn)": [[1469, "torch.nn.EmbeddingBag"]], "forward() (torch.nn.embeddingbag method)": [[1469, "torch.nn.EmbeddingBag.forward"]], "from_pretrained() (torch.nn.embeddingbag class method)": [[1469, "torch.nn.EmbeddingBag.from_pretrained"]], "featurealphadropout (class in torch.nn)": [[1470, "torch.nn.FeatureAlphaDropout"]], "flatten (class in torch.nn)": [[1471, "torch.nn.Flatten"]], "fold (class in torch.nn)": [[1472, "torch.nn.Fold"]], "fractionalmaxpool2d (class in torch.nn)": [[1473, "torch.nn.FractionalMaxPool2d"]], "fractionalmaxpool3d (class in torch.nn)": [[1474, "torch.nn.FractionalMaxPool3d"]], "gelu (class in torch.nn)": [[1475, "torch.nn.GELU"]], "glu (class in torch.nn)": [[1476, "torch.nn.GLU"]], "gru (class in torch.nn)": [[1477, "torch.nn.GRU"]], "grucell (class in torch.nn)": [[1478, "torch.nn.GRUCell"]], "gaussiannllloss (class in torch.nn)": [[1479, "torch.nn.GaussianNLLLoss"]], "groupnorm (class in torch.nn)": [[1480, "torch.nn.GroupNorm"]], "hardshrink (class in torch.nn)": [[1481, "torch.nn.Hardshrink"]], "hardsigmoid (class in torch.nn)": [[1482, "torch.nn.Hardsigmoid"]], "hardswish (class in torch.nn)": [[1483, "torch.nn.Hardswish"]], "hardtanh (class in torch.nn)": [[1484, "torch.nn.Hardtanh"]], "hingeembeddingloss (class in torch.nn)": [[1485, "torch.nn.HingeEmbeddingLoss"]], "huberloss (class in torch.nn)": [[1486, "torch.nn.HuberLoss"]], "identity (class in torch.nn)": [[1487, "torch.nn.Identity"]], "instancenorm1d (class in torch.nn)": [[1488, "torch.nn.InstanceNorm1d"]], "instancenorm2d (class in torch.nn)": [[1489, "torch.nn.InstanceNorm2d"]], "instancenorm3d (class in torch.nn)": [[1490, "torch.nn.InstanceNorm3d"]], "kldivloss (class in torch.nn)": [[1491, "torch.nn.KLDivLoss"]], "l1loss (class in torch.nn)": [[1492, "torch.nn.L1Loss"]], "lppool1d (class in torch.nn)": [[1493, "torch.nn.LPPool1d"]], "lppool2d (class in torch.nn)": [[1494, "torch.nn.LPPool2d"]], "lppool3d (class in torch.nn)": [[1495, "torch.nn.LPPool3d"]], "lstm (class in torch.nn)": [[1496, "torch.nn.LSTM"]], "lstmcell (class in torch.nn)": [[1497, "torch.nn.LSTMCell"]], "layernorm (class in torch.nn)": [[1498, "torch.nn.LayerNorm"]], "lazybatchnorm1d (class in torch.nn)": [[1499, "torch.nn.LazyBatchNorm1d"]], "cls_to_become (torch.nn.lazybatchnorm1d attribute)": [[1499, "torch.nn.LazyBatchNorm1d.cls_to_become"]], "lazybatchnorm2d (class in torch.nn)": [[1500, "torch.nn.LazyBatchNorm2d"]], "cls_to_become (torch.nn.lazybatchnorm2d attribute)": [[1500, "torch.nn.LazyBatchNorm2d.cls_to_become"]], "lazybatchnorm3d (class in torch.nn)": [[1501, "torch.nn.LazyBatchNorm3d"]], "cls_to_become (torch.nn.lazybatchnorm3d attribute)": [[1501, "torch.nn.LazyBatchNorm3d.cls_to_become"]], "lazyconv1d (class in torch.nn)": [[1502, "torch.nn.LazyConv1d"]], "cls_to_become (torch.nn.lazyconv1d attribute)": [[1502, "torch.nn.LazyConv1d.cls_to_become"]], "lazyconv2d (class in torch.nn)": [[1503, "torch.nn.LazyConv2d"]], "cls_to_become (torch.nn.lazyconv2d attribute)": [[1503, "torch.nn.LazyConv2d.cls_to_become"]], "lazyconv3d (class in torch.nn)": [[1504, "torch.nn.LazyConv3d"]], "cls_to_become (torch.nn.lazyconv3d attribute)": [[1504, "torch.nn.LazyConv3d.cls_to_become"]], "lazyconvtranspose1d (class in torch.nn)": [[1505, "torch.nn.LazyConvTranspose1d"]], "cls_to_become (torch.nn.lazyconvtranspose1d attribute)": [[1505, "torch.nn.LazyConvTranspose1d.cls_to_become"]], "lazyconvtranspose2d (class in torch.nn)": [[1506, "torch.nn.LazyConvTranspose2d"]], "cls_to_become (torch.nn.lazyconvtranspose2d attribute)": [[1506, "torch.nn.LazyConvTranspose2d.cls_to_become"]], "lazyconvtranspose3d (class in torch.nn)": [[1507, "torch.nn.LazyConvTranspose3d"]], "cls_to_become (torch.nn.lazyconvtranspose3d attribute)": [[1507, "torch.nn.LazyConvTranspose3d.cls_to_become"]], "lazyinstancenorm1d (class in torch.nn)": [[1508, "torch.nn.LazyInstanceNorm1d"]], "cls_to_become (torch.nn.lazyinstancenorm1d attribute)": [[1508, "torch.nn.LazyInstanceNorm1d.cls_to_become"]], "lazyinstancenorm2d (class in torch.nn)": [[1509, "torch.nn.LazyInstanceNorm2d"]], "cls_to_become (torch.nn.lazyinstancenorm2d attribute)": [[1509, "torch.nn.LazyInstanceNorm2d.cls_to_become"]], "lazyinstancenorm3d (class in torch.nn)": [[1510, "torch.nn.LazyInstanceNorm3d"]], "cls_to_become (torch.nn.lazyinstancenorm3d attribute)": [[1510, "torch.nn.LazyInstanceNorm3d.cls_to_become"]], "lazylinear (class in torch.nn)": [[1511, "torch.nn.LazyLinear"]], "cls_to_become (torch.nn.lazylinear attribute)": [[1511, "torch.nn.LazyLinear.cls_to_become"]], "leakyrelu (class in torch.nn)": [[1512, "torch.nn.LeakyReLU"]], "linear (class in torch.nn)": [[1513, "torch.nn.Linear"]], "localresponsenorm (class in torch.nn)": [[1514, "torch.nn.LocalResponseNorm"]], "logsigmoid (class in torch.nn)": [[1515, "torch.nn.LogSigmoid"]], "logsoftmax (class in torch.nn)": [[1516, "torch.nn.LogSoftmax"]], "mseloss (class in torch.nn)": [[1517, "torch.nn.MSELoss"]], "marginrankingloss (class in torch.nn)": [[1518, "torch.nn.MarginRankingLoss"]], "maxpool1d (class in torch.nn)": [[1519, "torch.nn.MaxPool1d"]], "maxpool2d (class in torch.nn)": [[1520, "torch.nn.MaxPool2d"]], "maxpool3d (class in torch.nn)": [[1521, "torch.nn.MaxPool3d"]], "maxunpool1d (class in torch.nn)": [[1522, "torch.nn.MaxUnpool1d"]], "maxunpool2d (class in torch.nn)": [[1523, "torch.nn.MaxUnpool2d"]], "maxunpool3d (class in torch.nn)": [[1524, "torch.nn.MaxUnpool3d"]], "mish (class in torch.nn)": [[1525, "torch.nn.Mish"]], "module (class in torch.nn)": [[1526, "torch.nn.Module"]], "add_module() (torch.nn.module method)": [[1526, "torch.nn.Module.add_module"]], "apply() (torch.nn.module method)": [[1526, "torch.nn.Module.apply"]], "bfloat16() (torch.nn.module method)": [[1526, "torch.nn.Module.bfloat16"]], "buffers() (torch.nn.module method)": [[1526, "torch.nn.Module.buffers"]], "children() (torch.nn.module method)": [[1526, "torch.nn.Module.children"]], "compile() (torch.nn.module method)": [[1526, "torch.nn.Module.compile"]], "cpu() (torch.nn.module method)": [[1526, "torch.nn.Module.cpu"]], "cuda() (torch.nn.module method)": [[1526, "torch.nn.Module.cuda"]], "double() (torch.nn.module method)": [[1526, "torch.nn.Module.double"]], "eval() (torch.nn.module method)": [[1526, "torch.nn.Module.eval"]], "extra_repr() (torch.nn.module method)": [[1526, "torch.nn.Module.extra_repr"]], "float() (torch.nn.module method)": [[1526, "torch.nn.Module.float"]], "forward() (torch.nn.module method)": [[1526, "torch.nn.Module.forward"]], "get_buffer() (torch.nn.module method)": [[1526, "torch.nn.Module.get_buffer"]], "get_extra_state() (torch.nn.module method)": [[1526, "torch.nn.Module.get_extra_state"]], "get_parameter() (torch.nn.module method)": [[1526, "torch.nn.Module.get_parameter"]], "get_submodule() (torch.nn.module method)": [[1526, "torch.nn.Module.get_submodule"]], "half() (torch.nn.module method)": [[1526, "torch.nn.Module.half"]], "ipu() (torch.nn.module method)": [[1526, "torch.nn.Module.ipu"]], "load_state_dict() (torch.nn.module method)": [[1526, "torch.nn.Module.load_state_dict"]], "modules() (torch.nn.module method)": [[1526, "torch.nn.Module.modules"]], "named_buffers() (torch.nn.module method)": [[1526, "torch.nn.Module.named_buffers"]], "named_children() (torch.nn.module method)": [[1526, "torch.nn.Module.named_children"]], "named_modules() (torch.nn.module method)": [[1526, "torch.nn.Module.named_modules"]], "named_parameters() (torch.nn.module method)": [[1526, "torch.nn.Module.named_parameters"]], "parameters() (torch.nn.module method)": [[1526, "torch.nn.Module.parameters"]], "register_backward_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_backward_hook"]], "register_buffer() (torch.nn.module method)": [[1526, "torch.nn.Module.register_buffer"]], "register_forward_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_forward_hook"]], "register_forward_pre_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_forward_pre_hook"]], "register_full_backward_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_load_state_dict_post_hook"]], "register_module() (torch.nn.module method)": [[1526, "torch.nn.Module.register_module"]], "register_parameter() (torch.nn.module method)": [[1526, "torch.nn.Module.register_parameter"]], "register_state_dict_pre_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_state_dict_pre_hook"]], "requires_grad_() (torch.nn.module method)": [[1526, "torch.nn.Module.requires_grad_"]], "set_extra_state() (torch.nn.module method)": [[1526, "torch.nn.Module.set_extra_state"]], "share_memory() (torch.nn.module method)": [[1526, "torch.nn.Module.share_memory"]], "state_dict() (torch.nn.module method)": [[1526, "torch.nn.Module.state_dict"]], "to() (torch.nn.module method)": [[1526, "torch.nn.Module.to"]], "to_empty() (torch.nn.module method)": [[1526, "torch.nn.Module.to_empty"]], "train() (torch.nn.module method)": [[1526, "torch.nn.Module.train"]], "type() (torch.nn.module method)": [[1526, "torch.nn.Module.type"]], "xpu() (torch.nn.module method)": [[1526, "torch.nn.Module.xpu"]], "zero_grad() (torch.nn.module method)": [[1526, "torch.nn.Module.zero_grad"]], "moduledict (class in torch.nn)": [[1527, "torch.nn.ModuleDict"]], "clear() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.clear"]], "items() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.items"]], "keys() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.keys"]], "pop() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.pop"]], "update() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.update"]], "values() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.values"]], "modulelist (class in torch.nn)": [[1528, "torch.nn.ModuleList"]], "append() (torch.nn.modulelist method)": [[1528, "torch.nn.ModuleList.append"]], "extend() (torch.nn.modulelist method)": [[1528, "torch.nn.ModuleList.extend"]], "insert() (torch.nn.modulelist method)": [[1528, "torch.nn.ModuleList.insert"]], "multilabelmarginloss (class in torch.nn)": [[1529, "torch.nn.MultiLabelMarginLoss"]], "multilabelsoftmarginloss (class in torch.nn)": [[1530, "torch.nn.MultiLabelSoftMarginLoss"]], "multimarginloss (class in torch.nn)": [[1531, "torch.nn.MultiMarginLoss"]], "multiheadattention (class in torch.nn)": [[1532, "torch.nn.MultiheadAttention"]], "forward() (torch.nn.multiheadattention method)": [[1532, "torch.nn.MultiheadAttention.forward"]], "merge_masks() (torch.nn.multiheadattention method)": [[1532, "torch.nn.MultiheadAttention.merge_masks"]], "nllloss (class in torch.nn)": [[1533, "torch.nn.NLLLoss"]], "prelu (class in torch.nn)": [[1534, "torch.nn.PReLU"]], "pairwisedistance (class in torch.nn)": [[1535, "torch.nn.PairwiseDistance"]], "parameterdict (class in torch.nn)": [[1536, "torch.nn.ParameterDict"]], "clear() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.clear"]], "copy() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.copy"]], "fromkeys() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.fromkeys"]], "get() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.get"]], "items() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.items"]], "keys() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.keys"]], "pop() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.pop"]], "popitem() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.popitem"]], "setdefault() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.setdefault"]], "update() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.update"]], "values() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.values"]], "parameterlist (class in torch.nn)": [[1537, "torch.nn.ParameterList"]], "append() (torch.nn.parameterlist method)": [[1537, "torch.nn.ParameterList.append"]], "extend() (torch.nn.parameterlist method)": [[1537, "torch.nn.ParameterList.extend"]], "pixelshuffle (class in torch.nn)": [[1538, "torch.nn.PixelShuffle"]], "pixelunshuffle (class in torch.nn)": [[1539, "torch.nn.PixelUnshuffle"]], "poissonnllloss (class in torch.nn)": [[1540, "torch.nn.PoissonNLLLoss"]], "rmsnorm (class in torch.nn)": [[1541, "torch.nn.RMSNorm"]], "extra_repr() (torch.nn.rmsnorm method)": [[1541, "torch.nn.RMSNorm.extra_repr"]], "forward() (torch.nn.rmsnorm method)": [[1541, "torch.nn.RMSNorm.forward"]], "reset_parameters() (torch.nn.rmsnorm method)": [[1541, "torch.nn.RMSNorm.reset_parameters"]], "rnn (class in torch.nn)": [[1542, "torch.nn.RNN"]], "rnnbase (class in torch.nn)": [[1543, "torch.nn.RNNBase"]], "flatten_parameters() (torch.nn.rnnbase method)": [[1543, "torch.nn.RNNBase.flatten_parameters"]], "rnncell (class in torch.nn)": [[1544, "torch.nn.RNNCell"]], "rrelu (class in torch.nn)": [[1545, "torch.nn.RReLU"]], "relu (class in torch.nn)": [[1546, "torch.nn.ReLU"]], "relu6 (class in torch.nn)": [[1547, "torch.nn.ReLU6"]], "reflectionpad1d (class in torch.nn)": [[1548, "torch.nn.ReflectionPad1d"]], "reflectionpad2d (class in torch.nn)": [[1549, "torch.nn.ReflectionPad2d"]], "reflectionpad3d (class in torch.nn)": [[1550, "torch.nn.ReflectionPad3d"]], "replicationpad1d (class in torch.nn)": [[1551, "torch.nn.ReplicationPad1d"]], "replicationpad2d (class in torch.nn)": [[1552, "torch.nn.ReplicationPad2d"]], "replicationpad3d (class in torch.nn)": [[1553, "torch.nn.ReplicationPad3d"]], "selu (class in torch.nn)": [[1554, "torch.nn.SELU"]], "sequential (class in torch.nn)": [[1555, "torch.nn.Sequential"]], "append() (torch.nn.sequential method)": [[1555, "torch.nn.Sequential.append"]], "silu (class in torch.nn)": [[1556, "torch.nn.SiLU"]], "sigmoid (class in torch.nn)": [[1557, "torch.nn.Sigmoid"]], "smoothl1loss (class in torch.nn)": [[1558, "torch.nn.SmoothL1Loss"]], "softmarginloss (class in torch.nn)": [[1559, "torch.nn.SoftMarginLoss"]], "softmax (class in torch.nn)": [[1560, "torch.nn.Softmax"]], "softmax2d (class in torch.nn)": [[1561, "torch.nn.Softmax2d"]], "softmin (class in torch.nn)": [[1562, "torch.nn.Softmin"]], "softplus (class in torch.nn)": [[1563, "torch.nn.Softplus"]], "softshrink (class in torch.nn)": [[1564, "torch.nn.Softshrink"]], "softsign (class in torch.nn)": [[1565, "torch.nn.Softsign"]], "syncbatchnorm (class in torch.nn)": [[1566, "torch.nn.SyncBatchNorm"]], "convert_sync_batchnorm() (torch.nn.syncbatchnorm class method)": [[1566, "torch.nn.SyncBatchNorm.convert_sync_batchnorm"]], "tanh (class in torch.nn)": [[1567, "torch.nn.Tanh"]], "tanhshrink (class in torch.nn)": [[1568, "torch.nn.Tanhshrink"]], "threshold (class in torch.nn)": [[1569, "torch.nn.Threshold"]], "transformer (class in torch.nn)": [[1570, "torch.nn.Transformer"]], "forward() (torch.nn.transformer method)": [[1570, "torch.nn.Transformer.forward"]], "generate_square_subsequent_mask() (torch.nn.transformer static method)": [[1570, "torch.nn.Transformer.generate_square_subsequent_mask"]], "transformerdecoder (class in torch.nn)": [[1571, "torch.nn.TransformerDecoder"]], "forward() (torch.nn.transformerdecoder method)": [[1571, "torch.nn.TransformerDecoder.forward"]], "transformerdecoderlayer (class in torch.nn)": [[1572, "torch.nn.TransformerDecoderLayer"]], "forward() (torch.nn.transformerdecoderlayer method)": [[1572, "torch.nn.TransformerDecoderLayer.forward"]], "transformerencoder (class in torch.nn)": [[1573, "torch.nn.TransformerEncoder"]], "forward() (torch.nn.transformerencoder method)": [[1573, "torch.nn.TransformerEncoder.forward"]], "transformerencoderlayer (class in torch.nn)": [[1574, "torch.nn.TransformerEncoderLayer"]], "forward() (torch.nn.transformerencoderlayer method)": [[1574, "torch.nn.TransformerEncoderLayer.forward"]], "tripletmarginloss (class in torch.nn)": [[1575, "torch.nn.TripletMarginLoss"]], "tripletmarginwithdistanceloss (class in torch.nn)": [[1576, "torch.nn.TripletMarginWithDistanceLoss"]], "unflatten (class in torch.nn)": [[1577, "torch.nn.Unflatten"]], "unfold (class in torch.nn)": [[1578, "torch.nn.Unfold"]], "upsample (class in torch.nn)": [[1579, "torch.nn.Upsample"]], "upsamplingbilinear2d (class in torch.nn)": [[1580, "torch.nn.UpsamplingBilinear2d"]], "upsamplingnearest2d (class in torch.nn)": [[1581, "torch.nn.UpsamplingNearest2d"]], "zeropad1d (class in torch.nn)": [[1582, "torch.nn.ZeroPad1d"]], "zeropad2d (class in torch.nn)": [[1583, "torch.nn.ZeroPad2d"]], "zeropad3d (class in torch.nn)": [[1584, "torch.nn.ZeroPad3d"]], "sdpbackend (class in torch.nn.attention)": [[1585, "torch.nn.attention.SDPBackend"]], "name (torch.nn.attention.sdpbackend property)": [[1585, "torch.nn.attention.SDPBackend.name"]], "causalbias (class in torch.nn.attention.bias)": [[1586, "torch.nn.attention.bias.CausalBias"]], "causalvariant (class in torch.nn.attention.bias)": [[1587, "torch.nn.attention.bias.CausalVariant"]], "causal_lower_right() (in module torch.nn.attention.bias)": [[1588, "torch.nn.attention.bias.causal_lower_right"]], "causal_upper_left() (in module torch.nn.attention.bias)": [[1589, "torch.nn.attention.bias.causal_upper_left"]], "sdpa_kernel() (in module torch.nn.attention)": [[1590, "torch.nn.attention.sdpa_kernel"]], "adaptive_avg_pool1d() (in module torch.nn.functional)": [[1591, "torch.nn.functional.adaptive_avg_pool1d"]], "adaptive_avg_pool2d() (in module torch.nn.functional)": [[1592, "torch.nn.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d() (in module torch.nn.functional)": [[1593, "torch.nn.functional.adaptive_avg_pool3d"]], "adaptive_max_pool1d() (in module torch.nn.functional)": [[1594, "torch.nn.functional.adaptive_max_pool1d"]], "adaptive_max_pool2d() (in module torch.nn.functional)": [[1595, "torch.nn.functional.adaptive_max_pool2d"]], "adaptive_max_pool3d() (in module torch.nn.functional)": [[1596, "torch.nn.functional.adaptive_max_pool3d"]], "affine_grid() (in module torch.nn.functional)": [[1597, "torch.nn.functional.affine_grid"]], "alpha_dropout() (in module torch.nn.functional)": [[1598, "torch.nn.functional.alpha_dropout"]], "avg_pool1d() (in module torch.nn.functional)": [[1599, "torch.nn.functional.avg_pool1d"]], "avg_pool2d() (in module torch.nn.functional)": [[1600, "torch.nn.functional.avg_pool2d"]], "avg_pool3d() (in module torch.nn.functional)": [[1601, "torch.nn.functional.avg_pool3d"]], "batch_norm() (in module torch.nn.functional)": [[1602, "torch.nn.functional.batch_norm"]], "bilinear() (in module torch.nn.functional)": [[1603, "torch.nn.functional.bilinear"]], "binary_cross_entropy() (in module torch.nn.functional)": [[1604, "torch.nn.functional.binary_cross_entropy"]], "binary_cross_entropy_with_logits() (in module torch.nn.functional)": [[1605, "torch.nn.functional.binary_cross_entropy_with_logits"]], "celu() (in module torch.nn.functional)": [[1606, "torch.nn.functional.celu"]], "conv1d() (in module torch.nn.functional)": [[1607, "torch.nn.functional.conv1d"]], "conv2d() (in module torch.nn.functional)": [[1608, "torch.nn.functional.conv2d"]], "conv3d() (in module torch.nn.functional)": [[1609, "torch.nn.functional.conv3d"]], "conv_transpose1d() (in module torch.nn.functional)": [[1610, "torch.nn.functional.conv_transpose1d"]], "conv_transpose2d() (in module torch.nn.functional)": [[1611, "torch.nn.functional.conv_transpose2d"]], "conv_transpose3d() (in module torch.nn.functional)": [[1612, "torch.nn.functional.conv_transpose3d"]], "cosine_embedding_loss() (in module torch.nn.functional)": [[1613, "torch.nn.functional.cosine_embedding_loss"]], "cosine_similarity() (in module torch.nn.functional)": [[1614, "torch.nn.functional.cosine_similarity"]], "cross_entropy() (in module torch.nn.functional)": [[1615, "torch.nn.functional.cross_entropy"]], "ctc_loss() (in module torch.nn.functional)": [[1616, "torch.nn.functional.ctc_loss"]], "dropout() (in module torch.nn.functional)": [[1617, "torch.nn.functional.dropout"]], "dropout1d() (in module torch.nn.functional)": [[1618, "torch.nn.functional.dropout1d"]], "dropout2d() (in module torch.nn.functional)": [[1619, "torch.nn.functional.dropout2d"]], "dropout3d() (in module torch.nn.functional)": [[1620, "torch.nn.functional.dropout3d"]], "elu() (in module torch.nn.functional)": [[1621, "torch.nn.functional.elu"]], "elu_() (in module torch.nn.functional)": [[1622, "torch.nn.functional.elu_"]], "embedding() (in module torch.nn.functional)": [[1623, "torch.nn.functional.embedding"]], "embedding_bag() (in module torch.nn.functional)": [[1624, "torch.nn.functional.embedding_bag"]], "feature_alpha_dropout() (in module torch.nn.functional)": [[1625, "torch.nn.functional.feature_alpha_dropout"]], "fold() (in module torch.nn.functional)": [[1626, "torch.nn.functional.fold"]], "fractional_max_pool2d() (in module torch.nn.functional)": [[1627, "torch.nn.functional.fractional_max_pool2d"]], "fractional_max_pool3d() (in module torch.nn.functional)": [[1628, "torch.nn.functional.fractional_max_pool3d"]], "gaussian_nll_loss() (in module torch.nn.functional)": [[1629, "torch.nn.functional.gaussian_nll_loss"]], "gelu() (in module torch.nn.functional)": [[1630, "torch.nn.functional.gelu"]], "glu() (in module torch.nn.functional)": [[1631, "torch.nn.functional.glu"]], "grid_sample() (in module torch.nn.functional)": [[1632, "torch.nn.functional.grid_sample"]], "group_norm() (in module torch.nn.functional)": [[1633, "torch.nn.functional.group_norm"]], "gumbel_softmax() (in module torch.nn.functional)": [[1634, "torch.nn.functional.gumbel_softmax"]], "hardshrink() (in module torch.nn.functional)": [[1635, "torch.nn.functional.hardshrink"]], "hardsigmoid() (in module torch.nn.functional)": [[1636, "torch.nn.functional.hardsigmoid"]], "hardswish() (in module torch.nn.functional)": [[1637, "torch.nn.functional.hardswish"]], "hardtanh() (in module torch.nn.functional)": [[1638, "torch.nn.functional.hardtanh"]], "hardtanh_() (in module torch.nn.functional)": [[1639, "torch.nn.functional.hardtanh_"]], "hinge_embedding_loss() (in module torch.nn.functional)": [[1640, "torch.nn.functional.hinge_embedding_loss"]], "huber_loss() (in module torch.nn.functional)": [[1641, "torch.nn.functional.huber_loss"]], "instance_norm() (in module torch.nn.functional)": [[1642, "torch.nn.functional.instance_norm"]], "interpolate() (in module torch.nn.functional)": [[1643, "torch.nn.functional.interpolate"]], "kl_div() (in module torch.nn.functional)": [[1644, "torch.nn.functional.kl_div"]], "l1_loss() (in module torch.nn.functional)": [[1645, "torch.nn.functional.l1_loss"]], "layer_norm() (in module torch.nn.functional)": [[1646, "torch.nn.functional.layer_norm"]], "leaky_relu() (in module torch.nn.functional)": [[1647, "torch.nn.functional.leaky_relu"]], "leaky_relu_() (in module torch.nn.functional)": [[1648, "torch.nn.functional.leaky_relu_"]], "linear() (in module torch.nn.functional)": [[1649, "torch.nn.functional.linear"]], "local_response_norm() (in module torch.nn.functional)": [[1650, "torch.nn.functional.local_response_norm"]], "log_softmax() (in module torch.nn.functional)": [[1651, "torch.nn.functional.log_softmax"]], "logsigmoid() (in module torch.nn.functional)": [[1652, "torch.nn.functional.logsigmoid"]], "lp_pool1d() (in module torch.nn.functional)": [[1653, "torch.nn.functional.lp_pool1d"]], "lp_pool2d() (in module torch.nn.functional)": [[1654, "torch.nn.functional.lp_pool2d"]], "lp_pool3d() (in module torch.nn.functional)": [[1655, "torch.nn.functional.lp_pool3d"]], "margin_ranking_loss() (in module torch.nn.functional)": [[1656, "torch.nn.functional.margin_ranking_loss"]], "max_pool1d() (in module torch.nn.functional)": [[1657, "torch.nn.functional.max_pool1d"]], "max_pool2d() (in module torch.nn.functional)": [[1658, "torch.nn.functional.max_pool2d"]], "max_pool3d() (in module torch.nn.functional)": [[1659, "torch.nn.functional.max_pool3d"]], "max_unpool1d() (in module torch.nn.functional)": [[1660, "torch.nn.functional.max_unpool1d"]], "max_unpool2d() (in module torch.nn.functional)": [[1661, "torch.nn.functional.max_unpool2d"]], "max_unpool3d() (in module torch.nn.functional)": [[1662, "torch.nn.functional.max_unpool3d"]], "mish() (in module torch.nn.functional)": [[1663, "torch.nn.functional.mish"]], "mse_loss() (in module torch.nn.functional)": [[1664, "torch.nn.functional.mse_loss"]], "multi_margin_loss() (in module torch.nn.functional)": [[1665, "torch.nn.functional.multi_margin_loss"]], "multilabel_margin_loss() (in module torch.nn.functional)": [[1666, "torch.nn.functional.multilabel_margin_loss"]], "multilabel_soft_margin_loss() (in module torch.nn.functional)": [[1667, "torch.nn.functional.multilabel_soft_margin_loss"]], "nll_loss() (in module torch.nn.functional)": [[1668, "torch.nn.functional.nll_loss"]], "normalize() (in module torch.nn.functional)": [[1669, "torch.nn.functional.normalize"]], "one_hot() (in module torch.nn.functional)": [[1670, "torch.nn.functional.one_hot"]], "pad() (in module torch.nn.functional)": [[1671, "torch.nn.functional.pad"]], "pairwise_distance() (in module torch.nn.functional)": [[1672, "torch.nn.functional.pairwise_distance"]], "pdist() (in module torch.nn.functional)": [[1673, "torch.nn.functional.pdist"]], "pixel_shuffle() (in module torch.nn.functional)": [[1674, "torch.nn.functional.pixel_shuffle"]], "pixel_unshuffle() (in module torch.nn.functional)": [[1675, "torch.nn.functional.pixel_unshuffle"]], "poisson_nll_loss() (in module torch.nn.functional)": [[1676, "torch.nn.functional.poisson_nll_loss"]], "prelu() (in module torch.nn.functional)": [[1677, "torch.nn.functional.prelu"]], "relu() (in module torch.nn.functional)": [[1678, "torch.nn.functional.relu"]], "relu6() (in module torch.nn.functional)": [[1679, "torch.nn.functional.relu6"]], "relu_() (in module torch.nn.functional)": [[1680, "torch.nn.functional.relu_"]], "rms_norm() (in module torch.nn.functional)": [[1681, "torch.nn.functional.rms_norm"]], "rrelu() (in module torch.nn.functional)": [[1682, "torch.nn.functional.rrelu"]], "rrelu_() (in module torch.nn.functional)": [[1683, "torch.nn.functional.rrelu_"]], "scaled_dot_product_attention() (in module torch.nn.functional)": [[1684, "torch.nn.functional.scaled_dot_product_attention"]], "selu() (in module torch.nn.functional)": [[1685, "torch.nn.functional.selu"]], "sigmoid() (in module torch.nn.functional)": [[1686, "torch.nn.functional.sigmoid"]], "silu() (in module torch.nn.functional)": [[1687, "torch.nn.functional.silu"]], "smooth_l1_loss() (in module torch.nn.functional)": [[1688, "torch.nn.functional.smooth_l1_loss"]], "soft_margin_loss() (in module torch.nn.functional)": [[1689, "torch.nn.functional.soft_margin_loss"]], "softmax() (in module torch.nn.functional)": [[1690, "torch.nn.functional.softmax"]], "softmin() (in module torch.nn.functional)": [[1691, "torch.nn.functional.softmin"]], "softplus() (in module torch.nn.functional)": [[1692, "torch.nn.functional.softplus"]], "softshrink() (in module torch.nn.functional)": [[1693, "torch.nn.functional.softshrink"]], "softsign() (in module torch.nn.functional)": [[1694, "torch.nn.functional.softsign"]], "tanh() (in module torch.nn.functional)": [[1695, "torch.nn.functional.tanh"]], "tanhshrink() (in module torch.nn.functional)": [[1696, "torch.nn.functional.tanhshrink"]], "threshold() (in module torch.nn.functional)": [[1697, "torch.nn.functional.threshold"]], "threshold_() (in module torch.nn.functional)": [[1698, "torch.nn.functional.threshold_"]], "data_parallel() (in module torch.nn.parallel)": [[1699, "torch.nn.parallel.data_parallel"]], "triplet_margin_loss() (in module torch.nn.functional)": [[1700, "torch.nn.functional.triplet_margin_loss"]], "triplet_margin_with_distance_loss() (in module torch.nn.functional)": [[1701, "torch.nn.functional.triplet_margin_with_distance_loss"]], "unfold() (in module torch.nn.functional)": [[1702, "torch.nn.functional.unfold"]], "upsample() (in module torch.nn.functional)": [[1703, "torch.nn.functional.upsample"]], "upsample_bilinear() (in module torch.nn.functional)": [[1704, "torch.nn.functional.upsample_bilinear"]], "upsample_nearest() (in module torch.nn.functional)": [[1705, "torch.nn.functional.upsample_nearest"]], "lazymodulemixin (class in torch.nn.modules.lazy)": [[1706, "torch.nn.modules.lazy.LazyModuleMixin"]], "has_uninitialized_params() (torch.nn.modules.lazy.lazymodulemixin method)": [[1706, "torch.nn.modules.lazy.LazyModuleMixin.has_uninitialized_params"]], "initialize_parameters() (torch.nn.modules.lazy.lazymodulemixin method)": [[1706, "torch.nn.modules.lazy.LazyModuleMixin.initialize_parameters"]], "register_module_backward_hook() (in module torch.nn.modules.module)": [[1707, "torch.nn.modules.module.register_module_backward_hook"]], "register_module_buffer_registration_hook() (in module torch.nn.modules.module)": [[1708, "torch.nn.modules.module.register_module_buffer_registration_hook"]], "register_module_forward_hook() (in module torch.nn.modules.module)": [[1709, "torch.nn.modules.module.register_module_forward_hook"]], "register_module_forward_pre_hook() (in module torch.nn.modules.module)": [[1710, "torch.nn.modules.module.register_module_forward_pre_hook"]], "register_module_full_backward_hook() (in module torch.nn.modules.module)": [[1711, "torch.nn.modules.module.register_module_full_backward_hook"]], "register_module_full_backward_pre_hook() (in module torch.nn.modules.module)": [[1712, "torch.nn.modules.module.register_module_full_backward_pre_hook"]], "register_module_module_registration_hook() (in module torch.nn.modules.module)": [[1713, "torch.nn.modules.module.register_module_module_registration_hook"]], "register_module_parameter_registration_hook() (in module torch.nn.modules.module)": [[1714, "torch.nn.modules.module.register_module_parameter_registration_hook"]], "rmsnorm (class in torch.nn.modules.normalization)": [[1715, "torch.nn.modules.normalization.RMSNorm"]], "extra_repr() (torch.nn.modules.normalization.rmsnorm method)": [[1715, "torch.nn.modules.normalization.RMSNorm.extra_repr"]], "forward() (torch.nn.modules.normalization.rmsnorm method)": [[1715, "torch.nn.modules.normalization.RMSNorm.forward"]], "reset_parameters() (torch.nn.modules.normalization.rmsnorm method)": [[1715, "torch.nn.modules.normalization.RMSNorm.reset_parameters"]], "distributeddataparallel (class in torch.nn.parallel)": [[1716, "torch.nn.parallel.DistributedDataParallel"]], "join() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.join"]], "join_hook() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.join_hook"]], "no_sync() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.no_sync"]], "register_comm_hook() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.register_comm_hook"]], "parameter (class in torch.nn.parameter)": [[1717, "torch.nn.parameter.Parameter"]], "uninitializedbuffer (class in torch.nn.parameter)": [[1718, "torch.nn.parameter.UninitializedBuffer"]], "uninitializedparameter (class in torch.nn.parameter)": [[1719, "torch.nn.parameter.UninitializedParameter"]], "cls_to_become (torch.nn.parameter.uninitializedparameter attribute)": [[1719, "torch.nn.parameter.UninitializedParameter.cls_to_become"]], "clip_grad_norm() (in module torch.nn.utils)": [[1720, "torch.nn.utils.clip_grad_norm"]], "clip_grad_norm_() (in module torch.nn.utils)": [[1721, "torch.nn.utils.clip_grad_norm_"]], "clip_grad_value_() (in module torch.nn.utils)": [[1722, "torch.nn.utils.clip_grad_value_"]], "convert_conv2d_weight_memory_format() (in module torch.nn.utils)": [[1723, "torch.nn.utils.convert_conv2d_weight_memory_format"]], "convert_conv3d_weight_memory_format() (in module torch.nn.utils)": [[1724, "torch.nn.utils.convert_conv3d_weight_memory_format"]], "fuse_conv_bn_eval() (in module torch.nn.utils)": [[1725, "torch.nn.utils.fuse_conv_bn_eval"]], "fuse_conv_bn_weights() (in module torch.nn.utils)": [[1726, "torch.nn.utils.fuse_conv_bn_weights"]], "fuse_linear_bn_eval() (in module torch.nn.utils)": [[1727, "torch.nn.utils.fuse_linear_bn_eval"]], "fuse_linear_bn_weights() (in module torch.nn.utils)": [[1728, "torch.nn.utils.fuse_linear_bn_weights"]], "parameters_to_vector() (in module torch.nn.utils)": [[1729, "torch.nn.utils.parameters_to_vector"]], "orthogonal() (in module torch.nn.utils.parametrizations)": [[1730, "torch.nn.utils.parametrizations.orthogonal"]], "spectral_norm() (in module torch.nn.utils.parametrizations)": [[1731, "torch.nn.utils.parametrizations.spectral_norm"]], "weight_norm() (in module torch.nn.utils.parametrizations)": [[1732, "torch.nn.utils.parametrizations.weight_norm"]], "parametrizationlist (class in torch.nn.utils.parametrize)": [[1733, "torch.nn.utils.parametrize.ParametrizationList"]], "right_inverse() (torch.nn.utils.parametrize.parametrizationlist method)": [[1733, "torch.nn.utils.parametrize.ParametrizationList.right_inverse"]], "cached() (in module torch.nn.utils.parametrize)": [[1734, "torch.nn.utils.parametrize.cached"]], "is_parametrized() (in module torch.nn.utils.parametrize)": [[1735, "torch.nn.utils.parametrize.is_parametrized"]], "register_parametrization() (in module torch.nn.utils.parametrize)": [[1736, "torch.nn.utils.parametrize.register_parametrization"]], "remove_parametrizations() (in module torch.nn.utils.parametrize)": [[1737, "torch.nn.utils.parametrize.remove_parametrizations"]], "basepruningmethod (class in torch.nn.utils.prune)": [[1738, "torch.nn.utils.prune.BasePruningMethod"]], "apply() (torch.nn.utils.prune.basepruningmethod class method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.apply"]], "apply_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.apply_mask"]], "compute_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.compute_mask"]], "prune() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.prune"]], "remove() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.remove"]], "customfrommask (class in torch.nn.utils.prune)": [[1739, "torch.nn.utils.prune.CustomFromMask"]], "apply() (torch.nn.utils.prune.customfrommask class method)": [[1739, "torch.nn.utils.prune.CustomFromMask.apply"]], "apply_mask() (torch.nn.utils.prune.customfrommask method)": [[1739, "torch.nn.utils.prune.CustomFromMask.apply_mask"]], "prune() (torch.nn.utils.prune.customfrommask method)": [[1739, "torch.nn.utils.prune.CustomFromMask.prune"]], "remove() (torch.nn.utils.prune.customfrommask method)": [[1739, "torch.nn.utils.prune.CustomFromMask.remove"]], "identity (class in torch.nn.utils.prune)": [[1740, "torch.nn.utils.prune.Identity"]], "apply() (torch.nn.utils.prune.identity class method)": [[1740, "torch.nn.utils.prune.Identity.apply"]], "apply_mask() (torch.nn.utils.prune.identity method)": [[1740, "torch.nn.utils.prune.Identity.apply_mask"]], "prune() (torch.nn.utils.prune.identity method)": [[1740, "torch.nn.utils.prune.Identity.prune"]], "remove() (torch.nn.utils.prune.identity method)": [[1740, "torch.nn.utils.prune.Identity.remove"]], "l1unstructured (class in torch.nn.utils.prune)": [[1741, "torch.nn.utils.prune.L1Unstructured"]], "apply() (torch.nn.utils.prune.l1unstructured class method)": [[1741, "torch.nn.utils.prune.L1Unstructured.apply"]], "apply_mask() (torch.nn.utils.prune.l1unstructured method)": [[1741, "torch.nn.utils.prune.L1Unstructured.apply_mask"]], "prune() (torch.nn.utils.prune.l1unstructured method)": [[1741, "torch.nn.utils.prune.L1Unstructured.prune"]], "remove() (torch.nn.utils.prune.l1unstructured method)": [[1741, "torch.nn.utils.prune.L1Unstructured.remove"]], "lnstructured (class in torch.nn.utils.prune)": [[1742, "torch.nn.utils.prune.LnStructured"]], "apply() (torch.nn.utils.prune.lnstructured class method)": [[1742, "torch.nn.utils.prune.LnStructured.apply"]], "apply_mask() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.compute_mask"]], "prune() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.prune"]], "remove() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.remove"]], "pruningcontainer (class in torch.nn.utils.prune)": [[1743, "torch.nn.utils.prune.PruningContainer"]], "add_pruning_method() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.add_pruning_method"]], "apply() (torch.nn.utils.prune.pruningcontainer class method)": [[1743, "torch.nn.utils.prune.PruningContainer.apply"]], "apply_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.apply_mask"]], "compute_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.compute_mask"]], "prune() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.prune"]], "remove() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.remove"]], "randomstructured (class in torch.nn.utils.prune)": [[1744, "torch.nn.utils.prune.RandomStructured"]], "apply() (torch.nn.utils.prune.randomstructured class method)": [[1744, "torch.nn.utils.prune.RandomStructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.compute_mask"]], "prune() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.prune"]], "remove() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.remove"]], "randomunstructured (class in torch.nn.utils.prune)": [[1745, "torch.nn.utils.prune.RandomUnstructured"]], "apply() (torch.nn.utils.prune.randomunstructured class method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomunstructured method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.apply_mask"]], "prune() (torch.nn.utils.prune.randomunstructured method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.prune"]], "remove() (torch.nn.utils.prune.randomunstructured method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.remove"]], "custom_from_mask() (in module torch.nn.utils.prune)": [[1746, "torch.nn.utils.prune.custom_from_mask"]], "global_unstructured() (in module torch.nn.utils.prune)": [[1747, "torch.nn.utils.prune.global_unstructured"]], "identity() (in module torch.nn.utils.prune)": [[1748, "torch.nn.utils.prune.identity"]], "is_pruned() (in module torch.nn.utils.prune)": [[1749, "torch.nn.utils.prune.is_pruned"]], "l1_unstructured() (in module torch.nn.utils.prune)": [[1750, "torch.nn.utils.prune.l1_unstructured"]], "ln_structured() (in module torch.nn.utils.prune)": [[1751, "torch.nn.utils.prune.ln_structured"]], "random_structured() (in module torch.nn.utils.prune)": [[1752, "torch.nn.utils.prune.random_structured"]], "random_unstructured() (in module torch.nn.utils.prune)": [[1753, "torch.nn.utils.prune.random_unstructured"]], "remove() (in module torch.nn.utils.prune)": [[1754, "torch.nn.utils.prune.remove"]], "remove_spectral_norm() (in module torch.nn.utils)": [[1755, "torch.nn.utils.remove_spectral_norm"]], "remove_weight_norm() (in module torch.nn.utils)": [[1756, "torch.nn.utils.remove_weight_norm"]], "packedsequence (class in torch.nn.utils.rnn)": [[1757, "torch.nn.utils.rnn.PackedSequence"]], "batch_sizes (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.batch_sizes"]], "count() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.count"]], "data (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.data"]], "index() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.index"]], "is_cuda (torch.nn.utils.rnn.packedsequence property)": [[1757, "torch.nn.utils.rnn.PackedSequence.is_cuda"]], "is_pinned() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.is_pinned"]], "sorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.sorted_indices"]], "to() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.to"]], "unsorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.unsorted_indices"]], "pack_padded_sequence() (in module torch.nn.utils.rnn)": [[1758, "torch.nn.utils.rnn.pack_padded_sequence"]], "pack_sequence() (in module torch.nn.utils.rnn)": [[1759, "torch.nn.utils.rnn.pack_sequence"]], "pad_packed_sequence() (in module torch.nn.utils.rnn)": [[1760, "torch.nn.utils.rnn.pad_packed_sequence"]], "pad_sequence() (in module torch.nn.utils.rnn)": [[1761, "torch.nn.utils.rnn.pad_sequence"]], "unpack_sequence() (in module torch.nn.utils.rnn)": [[1762, "torch.nn.utils.rnn.unpack_sequence"]], "unpad_sequence() (in module torch.nn.utils.rnn)": [[1763, "torch.nn.utils.rnn.unpad_sequence"]], "skip_init() (in module torch.nn.utils)": [[1764, "torch.nn.utils.skip_init"]], "spectral_norm() (in module torch.nn.utils)": [[1765, "torch.nn.utils.spectral_norm"]], "functional_call() (in module torch.nn.utils.stateless)": [[1766, "torch.nn.utils.stateless.functional_call"]], "vector_to_parameters() (in module torch.nn.utils)": [[1767, "torch.nn.utils.vector_to_parameters"]], "weight_norm() (in module torch.nn.utils)": [[1768, "torch.nn.utils.weight_norm"]], "no_grad (class in torch)": [[1769, "torch.no_grad"]], "nonzero() (in module torch)": [[1770, "torch.nonzero"]], "norm() (in module torch)": [[1771, "torch.norm"]], "normal() (in module torch)": [[1772, "torch.normal"]], "not_equal() (in module torch)": [[1773, "torch.not_equal"]], "numel() (in module torch)": [[1774, "torch.numel"]], "ones() (in module torch)": [[1775, "torch.ones"]], "ones_like() (in module torch)": [[1776, "torch.ones_like"]], "jitscalartype (class in torch.onnx)": [[1777, "torch.onnx.JitScalarType"]], "dtype() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.dtype"]], "from_dtype() (torch.onnx.jitscalartype class method)": [[1777, "torch.onnx.JitScalarType.from_dtype"]], "from_onnx_type() (torch.onnx.jitscalartype class method)": [[1777, "torch.onnx.JitScalarType.from_onnx_type"]], "from_value() (torch.onnx.jitscalartype class method)": [[1777, "torch.onnx.JitScalarType.from_value"]], "onnx_compatible() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.onnx_compatible"]], "onnx_type() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.onnx_type"]], "scalar_name() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.scalar_name"]], "torch_name() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.torch_name"]], "graphinfo (class in torch.onnx.verification)": [[1778, "torch.onnx.verification.GraphInfo"]], "all_mismatch_leaf_graph_info() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.all_mismatch_leaf_graph_info"]], "clear() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.clear"]], "essential_node_count() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.essential_node_count"]], "essential_node_kinds() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.essential_node_kinds"]], "export_repro() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.export_repro"]], "find_mismatch() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.find_mismatch"]], "find_partition() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.find_partition"]], "has_mismatch() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.has_mismatch"]], "pretty_print_mismatch() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.pretty_print_mismatch"]], "pretty_print_tree() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.pretty_print_tree"]], "verify_export() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.verify_export"]], "verificationoptions (class in torch.onnx.verification)": [[1779, "torch.onnx.verification.VerificationOptions"]], "asgd (class in torch.optim)": [[1780, "torch.optim.ASGD"]], "add_param_group() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.add_param_group"]], "load_state_dict() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_step_pre_hook"]], "state_dict() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.state_dict"]], "step() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.step"]], "zero_grad() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.zero_grad"]], "adadelta (class in torch.optim)": [[1781, "torch.optim.Adadelta"]], "add_param_group() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.add_param_group"]], "load_state_dict() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_step_pre_hook"]], "state_dict() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.state_dict"]], "step() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.step"]], "zero_grad() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.zero_grad"]], "adagrad (class in torch.optim)": [[1782, "torch.optim.Adagrad"]], "add_param_group() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.add_param_group"]], "load_state_dict() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_step_pre_hook"]], "state_dict() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.state_dict"]], "step() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.step"]], "zero_grad() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.zero_grad"]], "adam (class in torch.optim)": [[1783, "torch.optim.Adam"]], "add_param_group() (torch.optim.adam method)": [[1783, "torch.optim.Adam.add_param_group"]], "load_state_dict() (torch.optim.adam method)": [[1783, "torch.optim.Adam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_step_pre_hook"]], "state_dict() (torch.optim.adam method)": [[1783, "torch.optim.Adam.state_dict"]], "step() (torch.optim.adam method)": [[1783, "torch.optim.Adam.step"]], "zero_grad() (torch.optim.adam method)": [[1783, "torch.optim.Adam.zero_grad"]], "adamw (class in torch.optim)": [[1784, "torch.optim.AdamW"]], "add_param_group() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.add_param_group"]], "load_state_dict() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_step_pre_hook"]], "state_dict() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.state_dict"]], "step() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.step"]], "zero_grad() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.zero_grad"]], "adamax (class in torch.optim)": [[1785, "torch.optim.Adamax"]], "add_param_group() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.add_param_group"]], "load_state_dict() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_step_pre_hook"]], "state_dict() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.state_dict"]], "step() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.step"]], "zero_grad() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.zero_grad"]], "lbfgs (class in torch.optim)": [[1786, "torch.optim.LBFGS"]], "add_param_group() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.add_param_group"]], "load_state_dict() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_step_pre_hook"]], "state_dict() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.state_dict"]], "step() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.step"]], "zero_grad() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.zero_grad"]], "nadam (class in torch.optim)": [[1787, "torch.optim.NAdam"]], "add_param_group() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.add_param_group"]], "load_state_dict() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_step_pre_hook"]], "state_dict() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.state_dict"]], "step() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.step"]], "zero_grad() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.zero_grad"]], "add_param_group() (torch.optim.optimizer method)": [[1788, "torch.optim.Optimizer.add_param_group"]], "load_state_dict() (torch.optim.optimizer method)": [[1789, "torch.optim.Optimizer.load_state_dict"]], "state_dict() (torch.optim.optimizer method)": [[1790, "torch.optim.Optimizer.state_dict"]], "step() (torch.optim.optimizer method)": [[1791, "torch.optim.Optimizer.step"]], "zero_grad() (torch.optim.optimizer method)": [[1792, "torch.optim.Optimizer.zero_grad"]], "radam (class in torch.optim)": [[1793, "torch.optim.RAdam"]], "add_param_group() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.add_param_group"]], "load_state_dict() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_step_pre_hook"]], "state_dict() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.state_dict"]], "step() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.step"]], "zero_grad() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.zero_grad"]], "rmsprop (class in torch.optim)": [[1794, "torch.optim.RMSprop"]], "add_param_group() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.add_param_group"]], "load_state_dict() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_step_pre_hook"]], "state_dict() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.state_dict"]], "step() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.step"]], "zero_grad() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.zero_grad"]], "rprop (class in torch.optim)": [[1795, "torch.optim.Rprop"]], "add_param_group() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.add_param_group"]], "load_state_dict() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_step_pre_hook"]], "state_dict() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.state_dict"]], "step() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.step"]], "zero_grad() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.zero_grad"]], "sgd (class in torch.optim)": [[1796, "torch.optim.SGD"]], "add_param_group() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.add_param_group"]], "load_state_dict() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_step_pre_hook"]], "state_dict() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.state_dict"]], "step() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.step"]], "zero_grad() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.zero_grad"]], "sparseadam (class in torch.optim)": [[1797, "torch.optim.SparseAdam"]], "add_param_group() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.add_param_group"]], "load_state_dict() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_step_pre_hook"]], "state_dict() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.state_dict"]], "step() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.step"]], "zero_grad() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.zero_grad"]], "chainedscheduler (class in torch.optim.lr_scheduler)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler"]], "get_last_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.print_lr"]], "state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.state_dict"]], "constantlr (class in torch.optim.lr_scheduler)": [[1799, "torch.optim.lr_scheduler.ConstantLR"]], "get_last_lr() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.state_dict"]], "cosineannealinglr (class in torch.optim.lr_scheduler)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.state_dict"]], "cosineannealingwarmrestarts (class in torch.optim.lr_scheduler)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.state_dict"]], "step() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.step"]], "cycliclr (class in torch.optim.lr_scheduler)": [[1802, "torch.optim.lr_scheduler.CyclicLR"]], "get_last_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1802, "torch.optim.lr_scheduler.CyclicLR.get_last_lr"]], "get_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1802, "torch.optim.lr_scheduler.CyclicLR.get_lr"]], "print_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1802, "torch.optim.lr_scheduler.CyclicLR.print_lr"]], "exponentiallr (class in torch.optim.lr_scheduler)": [[1803, "torch.optim.lr_scheduler.ExponentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.state_dict"]], "lambdalr (class in torch.optim.lr_scheduler)": [[1804, "torch.optim.lr_scheduler.LambdaLR"]], "get_last_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.state_dict"]], "linearlr (class in torch.optim.lr_scheduler)": [[1805, "torch.optim.lr_scheduler.LinearLR"]], "get_last_lr() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.state_dict"]], "multisteplr (class in torch.optim.lr_scheduler)": [[1806, "torch.optim.lr_scheduler.MultiStepLR"]], "get_last_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.state_dict"]], "multiplicativelr (class in torch.optim.lr_scheduler)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR"]], "get_last_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.state_dict"]], "onecyclelr (class in torch.optim.lr_scheduler)": [[1808, "torch.optim.lr_scheduler.OneCycleLR"]], "get_last_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.state_dict"]], "polynomiallr (class in torch.optim.lr_scheduler)": [[1809, "torch.optim.lr_scheduler.PolynomialLR"]], "get_last_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.state_dict"]], "reducelronplateau (class in torch.optim.lr_scheduler)": [[1810, "torch.optim.lr_scheduler.ReduceLROnPlateau"]], "get_last_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1810, "torch.optim.lr_scheduler.ReduceLROnPlateau.get_last_lr"]], "print_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1810, "torch.optim.lr_scheduler.ReduceLROnPlateau.print_lr"]], "sequentiallr (class in torch.optim.lr_scheduler)": [[1811, "torch.optim.lr_scheduler.SequentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.state_dict"]], "steplr (class in torch.optim.lr_scheduler)": [[1812, "torch.optim.lr_scheduler.StepLR"]], "get_last_lr() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.state_dict"]], "orgqr() (in module torch)": [[1813, "torch.orgqr"]], "ormqr() (in module torch)": [[1814, "torch.ormqr"]], "outer() (in module torch)": [[1815, "torch.outer"]], "pca_lowrank() (in module torch)": [[1816, "torch.pca_lowrank"]], "permute() (in module torch)": [[1817, "torch.permute"]], "pinverse() (in module torch)": [[1818, "torch.pinverse"]], "poisson() (in module torch)": [[1819, "torch.poisson"]], "polar() (in module torch)": [[1820, "torch.polar"]], "polygamma() (in module torch)": [[1821, "torch.polygamma"]], "positive() (in module torch)": [[1822, "torch.positive"]], "pow() (in module torch)": [[1823, "torch.pow"]], "prod() (in module torch)": [[1824, "torch.prod"]], "promote_types() (in module torch)": [[1825, "torch.promote_types"]], "qr() (in module torch)": [[1826, "torch.qr"]], "quantile() (in module torch)": [[1827, "torch.quantile"]], "quantize_per_channel() (in module torch)": [[1828, "torch.quantize_per_channel"]], "quantize_per_tensor() (in module torch)": [[1829, "torch.quantize_per_tensor"]], "quantized_batch_norm() (in module torch)": [[1830, "torch.quantized_batch_norm"]], "quantized_max_pool1d() (in module torch)": [[1831, "torch.quantized_max_pool1d"]], "quantized_max_pool2d() (in module torch)": [[1832, "torch.quantized_max_pool2d"]], "sobolengine (class in torch.quasirandom)": [[1833, "torch.quasirandom.SobolEngine"]], "draw() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.draw"]], "draw_base2() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.draw_base2"]], "fast_forward() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.fast_forward"]], "reset() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.reset"]], "rad2deg() (in module torch)": [[1834, "torch.rad2deg"]], "rand() (in module torch)": [[1835, "torch.rand"]], "rand_like() (in module torch)": [[1836, "torch.rand_like"]], "randint() (in module torch)": [[1837, "torch.randint"]], "randint_like() (in module torch)": [[1838, "torch.randint_like"]], "randn() (in module torch)": [[1839, "torch.randn"]], "randn_like() (in module torch)": [[1840, "torch.randn_like"]], "randperm() (in module torch)": [[1841, "torch.randperm"]], "range() (in module torch)": [[1842, "torch.range"]], "ravel() (in module torch)": [[1843, "torch.ravel"]], "real() (in module torch)": [[1844, "torch.real"]], "reciprocal() (in module torch)": [[1845, "torch.reciprocal"]], "remainder() (in module torch)": [[1846, "torch.remainder"]], "renorm() (in module torch)": [[1847, "torch.renorm"]], "repeat_interleave() (in module torch)": [[1848, "torch.repeat_interleave"]], "reshape() (in module torch)": [[1849, "torch.reshape"]], "resolve_conj() (in module torch)": [[1850, "torch.resolve_conj"]], "resolve_neg() (in module torch)": [[1851, "torch.resolve_neg"]], "result_type() (in module torch)": [[1852, "torch.result_type"]], "roll() (in module torch)": [[1853, "torch.roll"]], "rot90() (in module torch)": [[1854, "torch.rot90"]], "round() (in module torch)": [[1855, "torch.round"]], "row_stack() (in module torch)": [[1856, "torch.row_stack"]], "rsqrt() (in module torch)": [[1857, "torch.rsqrt"]], "save() (in module torch)": [[1858, "torch.save"]], "scatter() (in module torch)": [[1859, "torch.scatter"]], "scatter_add() (in module torch)": [[1860, "torch.scatter_add"]], "scatter_reduce() (in module torch)": [[1861, "torch.scatter_reduce"]], "searchsorted() (in module torch)": [[1862, "torch.searchsorted"]], "seed() (in module torch)": [[1863, "torch.seed"]], "select() (in module torch)": [[1864, "torch.select"]], "select_scatter() (in module torch)": [[1865, "torch.select_scatter"]], "set_default_device() (in module torch)": [[1866, "torch.set_default_device"]], "set_default_dtype() (in module torch)": [[1867, "torch.set_default_dtype"]], "set_default_tensor_type() (in module torch)": [[1868, "torch.set_default_tensor_type"]], "set_deterministic_debug_mode() (in module torch)": [[1869, "torch.set_deterministic_debug_mode"]], "set_float32_matmul_precision() (in module torch)": [[1870, "torch.set_float32_matmul_precision"]], "set_flush_denormal() (in module torch)": [[1871, "torch.set_flush_denormal"]], "set_num_interop_threads() (in module torch)": [[1872, "torch.set_num_interop_threads"]], "set_num_threads() (in module torch)": [[1873, "torch.set_num_threads"]], "set_printoptions() (in module torch)": [[1874, "torch.set_printoptions"]], "set_rng_state() (in module torch)": [[1875, "torch.set_rng_state"]], "set_warn_always() (in module torch)": [[1876, "torch.set_warn_always"]], "sgn() (in module torch)": [[1877, "torch.sgn"]], "sigmoid() (in module torch)": [[1878, "torch.sigmoid"]], "sign() (in module torch)": [[1879, "torch.sign"]], "bartlett() (in module torch.signal.windows)": [[1880, "torch.signal.windows.bartlett"]], "blackman() (in module torch.signal.windows)": [[1881, "torch.signal.windows.blackman"]], "cosine() (in module torch.signal.windows)": [[1882, "torch.signal.windows.cosine"]], "exponential() (in module torch.signal.windows)": [[1883, "torch.signal.windows.exponential"]], "gaussian() (in module torch.signal.windows)": [[1884, "torch.signal.windows.gaussian"]], "general_cosine() (in module torch.signal.windows)": [[1885, "torch.signal.windows.general_cosine"]], "general_hamming() (in module torch.signal.windows)": [[1886, "torch.signal.windows.general_hamming"]], "hamming() (in module torch.signal.windows)": [[1887, "torch.signal.windows.hamming"]], "hann() (in module torch.signal.windows)": [[1888, "torch.signal.windows.hann"]], "kaiser() (in module torch.signal.windows)": [[1889, "torch.signal.windows.kaiser"]], "nuttall() (in module torch.signal.windows)": [[1890, "torch.signal.windows.nuttall"]], "signbit() (in module torch)": [[1891, "torch.signbit"]], "sin() (in module torch)": [[1892, "torch.sin"]], "sinc() (in module torch)": [[1893, "torch.sinc"]], "sinh() (in module torch)": [[1894, "torch.sinh"]], "slice_scatter() (in module torch)": [[1895, "torch.slice_scatter"]], "slogdet() (in module torch)": [[1896, "torch.slogdet"]], "smm() (in module torch)": [[1897, "torch.smm"]], "softmax() (in module torch)": [[1898, "torch.softmax"]], "sort() (in module torch)": [[1899, "torch.sort"]], "addmm() (in module torch.sparse)": [[1900, "torch.sparse.addmm"]], "as_sparse_gradcheck() (in module torch.sparse)": [[1901, "torch.sparse.as_sparse_gradcheck"]], "check_sparse_tensor_invariants (class in torch.sparse)": [[1902, "torch.sparse.check_sparse_tensor_invariants"]], "disable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1902, "torch.sparse.check_sparse_tensor_invariants.disable"]], "enable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1902, "torch.sparse.check_sparse_tensor_invariants.enable"]], "is_enabled() (torch.sparse.check_sparse_tensor_invariants static method)": [[1902, "torch.sparse.check_sparse_tensor_invariants.is_enabled"]], "log_softmax() (in module torch.sparse)": [[1903, "torch.sparse.log_softmax"]], "mm() (in module torch.sparse)": [[1904, "torch.sparse.mm"]], "sampled_addmm() (in module torch.sparse)": [[1905, "torch.sparse.sampled_addmm"]], "softmax() (in module torch.sparse)": [[1906, "torch.sparse.softmax"]], "spdiags() (in module torch.sparse)": [[1907, "torch.sparse.spdiags"]], "sum() (in module torch.sparse)": [[1908, "torch.sparse.sum"]], "sparse_bsc_tensor() (in module torch)": [[1909, "torch.sparse_bsc_tensor"]], "sparse_bsr_tensor() (in module torch)": [[1910, "torch.sparse_bsr_tensor"]], "sparse_compressed_tensor() (in module torch)": [[1911, "torch.sparse_compressed_tensor"]], "sparse_coo_tensor() (in module torch)": [[1912, "torch.sparse_coo_tensor"]], "sparse_csc_tensor() (in module torch)": [[1913, "torch.sparse_csc_tensor"]], "sparse_csr_tensor() (in module torch)": [[1914, "torch.sparse_csr_tensor"]], "split() (in module torch)": [[1915, "torch.split"]], "sqrt() (in module torch)": [[1916, "torch.sqrt"]], "square() (in module torch)": [[1917, "torch.square"]], "squeeze() (in module torch)": [[1918, "torch.squeeze"]], "sspaddmm() (in module torch)": [[1919, "torch.sspaddmm"]], "stack() (in module torch)": [[1920, "torch.stack"]], "std() (in module torch)": [[1921, "torch.std"]], "std_mean() (in module torch)": [[1922, "torch.std_mean"]], "stft() (in module torch)": [[1923, "torch.stft"]], "sub() (in module torch)": [[1924, "torch.sub"]], "subtract() (in module torch)": [[1925, "torch.subtract"]], "sum() (in module torch)": [[1926, "torch.sum"]], "svd() (in module torch)": [[1927, "torch.svd"]], "svd_lowrank() (in module torch)": [[1928, "torch.svd_lowrank"]], "swapaxes() (in module torch)": [[1929, "torch.swapaxes"]], "swapdims() (in module torch)": [[1930, "torch.swapdims"]], "sym_float() (in module torch)": [[1931, "torch.sym_float"]], "sym_int() (in module torch)": [[1932, "torch.sym_int"]], "sym_ite() (in module torch)": [[1933, "torch.sym_ite"]], "sym_max() (in module torch)": [[1934, "torch.sym_max"]], "sym_min() (in module torch)": [[1935, "torch.sym_min"]], "sym_not() (in module torch)": [[1936, "torch.sym_not"]], "t() (in module torch)": [[1937, "torch.t"]], "take() (in module torch)": [[1938, "torch.take"]], "take_along_dim() (in module torch)": [[1939, "torch.take_along_dim"]], "tan() (in module torch)": [[1940, "torch.tan"]], "tanh() (in module torch)": [[1941, "torch.tanh"]], "tensor() (in module torch)": [[1942, "torch.tensor"]], "tensor_split() (in module torch)": [[1943, "torch.tensor_split"]], "tensordot() (in module torch)": [[1944, "torch.tensordot"]], "tile() (in module torch)": [[1945, "torch.tile"]], "topk() (in module torch)": [[1946, "torch.topk"]], "trace() (in module torch)": [[1947, "torch.trace"]], "transpose() (in module torch)": [[1948, "torch.transpose"]], "trapezoid() (in module torch)": [[1949, "torch.trapezoid"]], "trapz() (in module torch)": [[1950, "torch.trapz"]], "triangular_solve() (in module torch)": [[1951, "torch.triangular_solve"]], "tril() (in module torch)": [[1952, "torch.tril"]], "tril_indices() (in module torch)": [[1953, "torch.tril_indices"]], "triu() (in module torch)": [[1954, "torch.triu"]], "triu_indices() (in module torch)": [[1955, "torch.triu_indices"]], "true_divide() (in module torch)": [[1956, "torch.true_divide"]], "trunc() (in module torch)": [[1957, "torch.trunc"]], "unbind() (in module torch)": [[1958, "torch.unbind"]], "unflatten() (in module torch)": [[1959, "torch.unflatten"]], "unique() (in module torch)": [[1960, "torch.unique"]], "unique_consecutive() (in module torch)": [[1961, "torch.unique_consecutive"]], "unravel_index() (in module torch)": [[1962, "torch.unravel_index"]], "unsqueeze() (in module torch)": [[1963, "torch.unsqueeze"]], "use_deterministic_algorithms() (in module torch)": [[1964, "torch.use_deterministic_algorithms"]], "generate_methods_for_privateuse1_backend() (in module torch.utils)": [[1965, "torch.utils.generate_methods_for_privateuse1_backend"]], "get_cpp_backtrace() (in module torch.utils)": [[1966, "torch.utils.get_cpp_backtrace"]], "rename_privateuse1_backend() (in module torch.utils)": [[1967, "torch.utils.rename_privateuse1_backend"]], "set_module() (in module torch.utils)": [[1968, "torch.utils.set_module"]], "swap_tensors() (in module torch.utils)": [[1969, "torch.utils.swap_tensors"]], "vander() (in module torch)": [[1970, "torch.vander"]], "var() (in module torch)": [[1971, "torch.var"]], "var_mean() (in module torch)": [[1972, "torch.var_mean"]], "vdot() (in module torch)": [[1973, "torch.vdot"]], "view_as_complex() (in module torch)": [[1974, "torch.view_as_complex"]], "view_as_real() (in module torch)": [[1975, "torch.view_as_real"]], "vmap() (in module torch)": [[1976, "torch.vmap"]], "vsplit() (in module torch)": [[1977, "torch.vsplit"]], "vstack() (in module torch)": [[1978, "torch.vstack"]], "where() (in module torch)": [[1979, "torch.where"]], "xlogy() (in module torch)": [[1980, "torch.xlogy"]], "event (class in torch.xpu)": [[1981, "torch.xpu.Event"]], "elapsed_time() (torch.xpu.event method)": [[1981, "torch.xpu.Event.elapsed_time"]], "query() (torch.xpu.event method)": [[1981, "torch.xpu.Event.query"]], "record() (torch.xpu.event method)": [[1981, "torch.xpu.Event.record"]], "synchronize() (torch.xpu.event method)": [[1981, "torch.xpu.Event.synchronize"]], "wait() (torch.xpu.event method)": [[1981, "torch.xpu.Event.wait"]], "stream (class in torch.xpu)": [[1982, "torch.xpu.Stream"]], "query() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.query"]], "record_event() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.record_event"]], "synchronize() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.synchronize"]], "wait_event() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.wait_event"]], "wait_stream() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.wait_stream"]], "streamcontext (class in torch.xpu)": [[1983, "torch.xpu.StreamContext"]], "current_device() (in module torch.xpu)": [[1984, "torch.xpu.current_device"]], "current_stream() (in module torch.xpu)": [[1985, "torch.xpu.current_stream"]], "device (class in torch.xpu)": [[1986, "torch.xpu.device"]], "device_count() (in module torch.xpu)": [[1987, "torch.xpu.device_count"]], "device_of (class in torch.xpu)": [[1988, "torch.xpu.device_of"]], "empty_cache() (in module torch.xpu)": [[1989, "torch.xpu.empty_cache"]], "get_device_capability() (in module torch.xpu)": [[1990, "torch.xpu.get_device_capability"]], "get_device_name() (in module torch.xpu)": [[1991, "torch.xpu.get_device_name"]], "get_device_properties() (in module torch.xpu)": [[1992, "torch.xpu.get_device_properties"]], "get_rng_state() (in module torch.xpu)": [[1993, "torch.xpu.get_rng_state"]], "get_rng_state_all() (in module torch.xpu)": [[1994, "torch.xpu.get_rng_state_all"]], "init() (in module torch.xpu)": [[1995, "torch.xpu.init"]], "initial_seed() (in module torch.xpu)": [[1996, "torch.xpu.initial_seed"]], "is_available() (in module torch.xpu)": [[1997, "torch.xpu.is_available"]], "is_initialized() (in module torch.xpu)": [[1998, "torch.xpu.is_initialized"]], "manual_seed() (in module torch.xpu)": [[1999, "torch.xpu.manual_seed"]], "manual_seed_all() (in module torch.xpu)": [[2000, "torch.xpu.manual_seed_all"]], "seed() (in module torch.xpu)": [[2001, "torch.xpu.seed"]], "seed_all() (in module torch.xpu)": [[2002, "torch.xpu.seed_all"]], "set_device() (in module torch.xpu)": [[2003, "torch.xpu.set_device"]], "set_rng_state() (in module torch.xpu)": [[2004, "torch.xpu.set_rng_state"]], "set_rng_state_all() (in module torch.xpu)": [[2005, "torch.xpu.set_rng_state_all"]], "set_stream() (in module torch.xpu)": [[2006, "torch.xpu.set_stream"]], "stream() (in module torch.xpu)": [[2007, "torch.xpu.stream"]], "synchronize() (in module torch.xpu)": [[2008, "torch.xpu.synchronize"]], "zeros() (in module torch)": [[2009, "torch.zeros"]], "zeros_like() (in module torch)": [[2010, "torch.zeros_like"]], "download_url_to_file() (in module torch.hub)": [[2011, "torch.hub.download_url_to_file"]], "get_dir() (in module torch.hub)": [[2011, "torch.hub.get_dir"]], "help() (in module torch.hub)": [[2011, "torch.hub.help"]], "list() (in module torch.hub)": [[2011, "torch.hub.list"]], "load() (in module torch.hub)": [[2011, "torch.hub.load"]], "load_state_dict_from_url() (in module torch.hub)": [[2011, "torch.hub.load_state_dict_from_url"]], "set_dir() (in module torch.hub)": [[2011, "torch.hub.set_dir"]], "torch.hub": [[2011, "module-torch.hub"]], "pytorch_jit": [[2013, "envvar-PYTORCH_JIT"]], "environment variable": [[2013, "envvar-PYTORCH_JIT"]], "export() (in module torch.jit)": [[2013, "torch.jit.export"]], "torch.jit": [[2013, "module-torch.jit"]], "torch.jit.annotations": [[2013, "module-torch.jit.annotations"]], "torch.jit.frontend": [[2013, "module-torch.jit.frontend"]], "torch.jit.generate_bytecode": [[2013, "module-torch.jit.generate_bytecode"]], "torch.jit.mobile": [[2013, "module-torch.jit.mobile"]], "torch.jit.quantized": [[2013, "module-torch.jit.quantized"]], "torch.jit.supported_ops": [[2014, "module-torch.jit.supported_ops"]], "is_scripting() (in module torch.jit)": [[2015, "torch.jit.is_scripting"]], "is_tracing() (in module torch.jit)": [[2015, "torch.jit.is_tracing"]], "torch.jit.unsupported_tensor_ops": [[2018, "module-torch.jit.unsupported_tensor_ops"]], "torch.utils.jit": [[2019, "module-torch.utils.jit"]], "library (class in torch.library)": [[2020, "torch.library.Library"]], "custom_op() (in module torch.library)": [[2020, "torch.library.custom_op"]], "define() (in module torch.library)": [[2020, "torch.library.define"]], "define() (torch.library.library method)": [[2020, "torch.library.Library.define"]], "fallthrough_kernel() (in module torch.library)": [[2020, "torch.library.fallthrough_kernel"]], "get_ctx() (in module torch.library)": [[2020, "torch.library.get_ctx"]], "impl() (in module torch.library)": [[2020, "torch.library.impl"]], "impl() (torch.library.library method)": [[2020, "torch.library.Library.impl"]], "impl_abstract() (in module torch.library)": [[2020, "torch.library.impl_abstract"]], "opcheck() (in module torch.library)": [[2020, "torch.library.opcheck"]], "register_autograd() (in module torch.library)": [[2020, "torch.library.register_autograd"]], "register_fake() (in module torch.library)": [[2020, "torch.library.register_fake"]], "register_kernel() (in module torch.library)": [[2020, "torch.library.register_kernel"]], "torch.library": [[2020, "module-torch.library"]], "torch.linalg": [[2021, "module-torch.linalg"]], "torch._logging": [[2022, "module-torch._logging"]], "torch.masked": [[2023, "module-torch.masked"]], "torch.masked.maskedtensor": [[2023, "module-torch.masked.maskedtensor"]], "torch.masked.maskedtensor.binary": [[2023, "module-torch.masked.maskedtensor.binary"]], "torch.masked.maskedtensor.core": [[2023, "module-torch.masked.maskedtensor.core"]], "torch.masked.maskedtensor.creation": [[2023, "module-torch.masked.maskedtensor.creation"]], "torch.masked.maskedtensor.passthrough": [[2023, "module-torch.masked.maskedtensor.passthrough"]], "torch.masked.maskedtensor.reductions": [[2023, "module-torch.masked.maskedtensor.reductions"]], "torch.masked.maskedtensor.unary": [[2023, "module-torch.masked.maskedtensor.unary"]], "optimize_for_mobile() (in module torch.utils.mobile_optimizer)": [[2026, "torch.utils.mobile_optimizer.optimize_for_mobile"]], "load_url() (in module torch.utils.model_zoo)": [[2027, "torch.utils.model_zoo.load_url"]], "torch.utils.model_zoo": [[2027, "module-torch.utils.model_zoo"]], "moduletracker (class in torch.utils.module_tracker)": [[2028, "torch.utils.module_tracker.ModuleTracker"]], "torch.utils.module_tracker": [[2028, "module-torch.utils.module_tracker"]], "aggregation (class in torch.monitor)": [[2029, "torch.monitor.Aggregation"]], "event (class in torch.monitor)": [[2029, "torch.monitor.Event"]], "eventhandlerhandle (class in torch.monitor)": [[2029, "torch.monitor.EventHandlerHandle"]], "stat (class in torch.monitor)": [[2029, "torch.monitor.Stat"]], "tensorboardeventhandler (class in torch.monitor)": [[2029, "torch.monitor.TensorboardEventHandler"]], "__init__() (torch.monitor.event method)": [[2029, "torch.monitor.Event.__init__"]], "__init__() (torch.monitor.stat method)": [[2029, "torch.monitor.Stat.__init__"]], "__init__() (torch.monitor.tensorboardeventhandler method)": [[2029, "torch.monitor.TensorboardEventHandler.__init__"]], "add() (torch.monitor.stat method)": [[2029, "torch.monitor.Stat.add"]], "count (torch.monitor.stat property)": [[2029, "torch.monitor.Stat.count"]], "data (torch.monitor.event property)": [[2029, "torch.monitor.Event.data"]], "data_value_t (class in torch.monitor)": [[2029, "torch.monitor.data_value_t"]], "get() (torch.monitor.stat method)": [[2029, "torch.monitor.Stat.get"]], "log_event() (in module torch.monitor)": [[2029, "torch.monitor.log_event"]], "name (torch.monitor.aggregation property)": [[2029, "torch.monitor.Aggregation.name"]], "name (torch.monitor.event property)": [[2029, "torch.monitor.Event.name"]], "name (torch.monitor.stat property)": [[2029, "torch.monitor.Stat.name"]], "register_event_handler() (in module torch.monitor)": [[2029, "torch.monitor.register_event_handler"]], "timestamp (torch.monitor.event property)": [[2029, "torch.monitor.Event.timestamp"]], "torch.monitor": [[2029, "module-torch.monitor"]], "unregister_event_handler() (in module torch.monitor)": [[2029, "torch.monitor.unregister_event_handler"]], "torch.mps": [[2030, "module-torch.mps"]], "torch.mps.event": [[2030, "module-torch.mps.event"]], "torch.mps.profiler": [[2030, "module-torch.mps.profiler"]], "torch.mtia": [[2031, "module-torch.mtia"]], "spawncontext (class in torch.multiprocessing)": [[2032, "torch.multiprocessing.SpawnContext"]], "get_all_sharing_strategies() (in module torch.multiprocessing)": [[2032, "torch.multiprocessing.get_all_sharing_strategies"]], "get_sharing_strategy() (in module torch.multiprocessing)": [[2032, "torch.multiprocessing.get_sharing_strategy"]], "join() (torch.multiprocessing.spawncontext method)": [[2032, "torch.multiprocessing.SpawnContext.join"]], "set_sharing_strategy() (in module torch.multiprocessing)": [[2032, "torch.multiprocessing.set_sharing_strategy"]], "spawn() (in module torch.multiprocessing.spawn)": [[2032, "torch.multiprocessing.spawn.spawn"]], "torch.multiprocessing": [[2032, "module-torch.multiprocessing"]], "torch.multiprocessing.pool": [[2032, "module-torch.multiprocessing.pool"]], "torch.multiprocessing.queue": [[2032, "module-torch.multiprocessing.queue"]], "torch.multiprocessing.reductions": [[2032, "module-torch.multiprocessing.reductions"]], "torch.multiprocessing.spawn": [[2032, "module-torch.multiprocessing.spawn"]], "align_as() (torch.tensor method)": [[2034, "torch.Tensor.align_as"]], "align_to() (torch.tensor method)": [[2034, "torch.Tensor.align_to"]], "names (torch.tensor attribute)": [[2034, "torch.Tensor.names"]], "refine_names() (torch.tensor method)": [[2034, "torch.Tensor.refine_names"]], "rename() (torch.tensor method)": [[2034, "torch.Tensor.rename"]], "rename_() (torch.tensor method)": [[2034, "torch.Tensor.rename_"]], "as_nested_tensor() (in module torch.nested)": [[2035, "torch.nested.as_nested_tensor"]], "nested_tensor() (in module torch.nested)": [[2035, "torch.nested.nested_tensor"]], "to_padded_tensor() (in module torch.nested)": [[2035, "torch.nested.to_padded_tensor"]], "torch.nested": [[2035, "module-torch.nested"]], "torch.nn": [[2036, "module-torch.nn"]], "torch.nn.backends": [[2036, "module-torch.nn.backends"]], "torch.nn.backends.thnn": [[2036, "module-torch.nn.backends.thnn"]], "torch.nn.common_types": [[2036, "module-torch.nn.common_types"]], "torch.nn.cpp": [[2036, "module-torch.nn.cpp"]], "torch.nn.functional": [[2036, "module-torch.nn.functional"]], "torch.nn.grad": [[2036, "module-torch.nn.grad"]], "torch.nn.init": [[2036, "module-torch.nn.init"]], "torch.nn.modules": [[2036, "module-torch.nn.modules"]], "torch.nn.modules.activation": [[2036, "module-torch.nn.modules.activation"]], "torch.nn.modules.adaptive": [[2036, "module-torch.nn.modules.adaptive"]], "torch.nn.modules.batchnorm": [[2036, "module-torch.nn.modules.batchnorm"]], "torch.nn.modules.channelshuffle": [[2036, "module-torch.nn.modules.channelshuffle"]], "torch.nn.modules.container": [[2036, "module-torch.nn.modules.container"]], "torch.nn.modules.conv": [[2036, "module-torch.nn.modules.conv"]], "torch.nn.modules.distance": [[2036, "module-torch.nn.modules.distance"]], "torch.nn.modules.dropout": [[2036, "module-torch.nn.modules.dropout"]], "torch.nn.modules.flatten": [[2036, "module-torch.nn.modules.flatten"]], "torch.nn.modules.fold": [[2036, "module-torch.nn.modules.fold"]], "torch.nn.modules.instancenorm": [[2036, "module-torch.nn.modules.instancenorm"]], "torch.nn.modules.lazy": [[2036, "module-torch.nn.modules.lazy"]], "torch.nn.modules.linear": [[2036, "module-torch.nn.modules.linear"]], "torch.nn.modules.loss": [[2036, "module-torch.nn.modules.loss"]], "torch.nn.modules.module": [[2036, "module-torch.nn.modules.module"]], "torch.nn.modules.normalization": [[2036, "module-torch.nn.modules.normalization"]], "torch.nn.modules.padding": [[2036, "module-torch.nn.modules.padding"]], "torch.nn.modules.pixelshuffle": [[2036, "module-torch.nn.modules.pixelshuffle"]], "torch.nn.modules.pooling": [[2036, "module-torch.nn.modules.pooling"]], "torch.nn.modules.rnn": [[2036, "module-torch.nn.modules.rnn"]], "torch.nn.modules.sparse": [[2036, "module-torch.nn.modules.sparse"]], "torch.nn.modules.transformer": [[2036, "module-torch.nn.modules.transformer"]], "torch.nn.modules.upsampling": [[2036, "module-torch.nn.modules.upsampling"]], "torch.nn.modules.utils": [[2036, "module-torch.nn.modules.utils"]], "torch.nn.parallel": [[2036, "module-torch.nn.parallel"]], "torch.nn.parallel.comm": [[2036, "module-torch.nn.parallel.comm"]], "torch.nn.parallel.distributed": [[2036, "module-torch.nn.parallel.distributed"]], "torch.nn.parallel.parallel_apply": [[2036, "module-torch.nn.parallel.parallel_apply"]], "torch.nn.parallel.replicate": [[2036, "module-torch.nn.parallel.replicate"]], "torch.nn.parallel.scatter_gather": [[2036, "module-torch.nn.parallel.scatter_gather"]], "torch.nn.parameter": [[2036, "module-torch.nn.parameter"]], "torch.nn.utils": [[2036, "module-torch.nn.utils"]], "torch.nn.utils.clip_grad": [[2036, "module-torch.nn.utils.clip_grad"]], "torch.nn.utils.convert_parameters": [[2036, "module-torch.nn.utils.convert_parameters"]], "torch.nn.utils.fusion": [[2036, "module-torch.nn.utils.fusion"]], "torch.nn.utils.init": [[2036, "module-torch.nn.utils.init"]], "torch.nn.utils.memory_format": [[2036, "module-torch.nn.utils.memory_format"]], "torch.nn.utils.parametrizations": [[2036, "module-torch.nn.utils.parametrizations"]], "torch.nn.utils.parametrize": [[2036, "module-torch.nn.utils.parametrize"]], "torch.nn.utils.prune": [[2036, "module-torch.nn.utils.prune"]], "torch.nn.utils.rnn": [[2036, "module-torch.nn.utils.rnn"]], "torch.nn.utils.stateless": [[2036, "module-torch.nn.utils.stateless"]], "torch.nn.attention": [[2037, "module-torch.nn.attention"]], "torch.nn.attention.bias": [[2038, "module-torch.nn.attention.bias"]], "calculate_gain() (in module torch.nn.init)": [[2040, "torch.nn.init.calculate_gain"]], "constant_() (in module torch.nn.init)": [[2040, "torch.nn.init.constant_"]], "dirac_() (in module torch.nn.init)": [[2040, "torch.nn.init.dirac_"]], "eye_() (in module torch.nn.init)": [[2040, "torch.nn.init.eye_"]], "kaiming_normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.kaiming_normal_"]], "kaiming_uniform_() (in module torch.nn.init)": [[2040, "torch.nn.init.kaiming_uniform_"]], "normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.normal_"]], "ones_() (in module torch.nn.init)": [[2040, "torch.nn.init.ones_"]], "orthogonal_() (in module torch.nn.init)": [[2040, "torch.nn.init.orthogonal_"]], "sparse_() (in module torch.nn.init)": [[2040, "torch.nn.init.sparse_"]], "trunc_normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.trunc_normal_"]], "uniform_() (in module torch.nn.init)": [[2040, "torch.nn.init.uniform_"]], "xavier_normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.xavier_normal_"]], "xavier_uniform_() (in module torch.nn.init)": [[2040, "torch.nn.init.xavier_uniform_"]], "zeros_() (in module torch.nn.init)": [[2040, "torch.nn.init.zeros_"]], "add_safe_globals() (in module torch.serialization)": [[2060, "torch.serialization.add_safe_globals"]], "clear_safe_globals() (in module torch.serialization)": [[2060, "torch.serialization.clear_safe_globals"]], "get_default_load_endianness() (in module torch.serialization)": [[2060, "torch.serialization.get_default_load_endianness"]], "get_default_mmap_options() (in module torch.serialization)": [[2060, "torch.serialization.get_default_mmap_options"]], "get_safe_globals() (in module torch.serialization)": [[2060, "torch.serialization.get_safe_globals"]], "register_package() (in module torch.serialization)": [[2060, "torch.serialization.register_package"]], "set_default_load_endianness() (in module torch.serialization)": [[2060, "torch.serialization.set_default_load_endianness"]], "set_default_mmap_options() (in module torch.serialization)": [[2060, "torch.serialization.set_default_mmap_options"]], "torch.onnx.errors": [[2062, "module-torch.onnx.errors"]], "torch.onnx.operators": [[2062, "module-torch.onnx.operators"]], "torch.onnx.symbolic_caffe2": [[2062, "module-torch.onnx.symbolic_caffe2"]], "torch.onnx.symbolic_helper": [[2062, "module-torch.onnx.symbolic_helper"]], "torch.onnx.symbolic_opset10": [[2062, "module-torch.onnx.symbolic_opset10"]], "torch.onnx.symbolic_opset11": [[2062, "module-torch.onnx.symbolic_opset11"]], "torch.onnx.symbolic_opset12": [[2062, "module-torch.onnx.symbolic_opset12"]], "torch.onnx.symbolic_opset13": [[2062, "module-torch.onnx.symbolic_opset13"]], "torch.onnx.symbolic_opset14": [[2062, "module-torch.onnx.symbolic_opset14"]], "torch.onnx.symbolic_opset15": [[2062, "module-torch.onnx.symbolic_opset15"]], "torch.onnx.symbolic_opset16": [[2062, "module-torch.onnx.symbolic_opset16"]], "torch.onnx.symbolic_opset17": [[2062, "module-torch.onnx.symbolic_opset17"]], "torch.onnx.symbolic_opset18": [[2062, "module-torch.onnx.symbolic_opset18"]], "torch.onnx.symbolic_opset19": [[2062, "module-torch.onnx.symbolic_opset19"]], "torch.onnx.symbolic_opset20": [[2062, "module-torch.onnx.symbolic_opset20"]], "torch.onnx.symbolic_opset7": [[2062, "module-torch.onnx.symbolic_opset7"]], "torch.onnx.symbolic_opset8": [[2062, "module-torch.onnx.symbolic_opset8"]], "torch.onnx.symbolic_opset9": [[2062, "module-torch.onnx.symbolic_opset9"]], "torch.onnx.utils": [[2062, "module-torch.onnx.utils"]], "torch.onnx.verification": [[2062, "module-torch.onnx.verification"]], "diagnosticoptions (class in torch.onnx)": [[2063, "torch.onnx.DiagnosticOptions"]], "exportoptions (class in torch.onnx)": [[2063, "torch.onnx.ExportOptions"]], "invalidexportoptionserror (class in torch.onnx)": [[2063, "torch.onnx.InvalidExportOptionsError"]], "onnxprogram (class in torch.onnx)": [[2063, "torch.onnx.ONNXProgram"]], "onnxprogramserializer (class in torch.onnx)": [[2063, "torch.onnx.ONNXProgramSerializer"]], "onnxruntimeoptions (class in torch.onnx)": [[2063, "torch.onnx.ONNXRuntimeOptions"]], "onnxexportererror (class in torch.onnx)": [[2063, "torch.onnx.OnnxExporterError"]], "onnxregistry (class in torch.onnx)": [[2063, "torch.onnx.OnnxRegistry"]], "adapt_torch_inputs_to_onnx() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.adapt_torch_inputs_to_onnx"]], "adapt_torch_outputs_to_onnx() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.adapt_torch_outputs_to_onnx"]], "diagnostic_context (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.diagnostic_context"]], "dynamo_export() (in module torch.onnx)": [[2063, "torch.onnx.dynamo_export"]], "enable_fake_mode() (in module torch.onnx)": [[2063, "torch.onnx.enable_fake_mode"]], "fake_context (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.fake_context"]], "get_op_functions() (torch.onnx.onnxregistry method)": [[2063, "torch.onnx.OnnxRegistry.get_op_functions"]], "is_registered_op() (torch.onnx.onnxregistry method)": [[2063, "torch.onnx.OnnxRegistry.is_registered_op"]], "model_proto (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.model_proto"]], "model_signature (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.model_signature"]], "opset_version (torch.onnx.onnxregistry property)": [[2063, "torch.onnx.OnnxRegistry.opset_version"]], "register_op() (torch.onnx.onnxregistry method)": [[2063, "torch.onnx.OnnxRegistry.register_op"]], "save() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.save"]], "save_diagnostics() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.save_diagnostics"]], "serialize() (torch.onnx.onnxprogramserializer method)": [[2063, "torch.onnx.ONNXProgramSerializer.serialize"]], "is_onnxrt_backend_supported() (in module torch.onnx)": [[2064, "torch.onnx.is_onnxrt_backend_supported"]], "disable_log() (in module torch.onnx)": [[2065, "torch.onnx.disable_log"]], "enable_log() (in module torch.onnx)": [[2065, "torch.onnx.enable_log"]], "export() (in module torch.onnx)": [[2065, "torch.onnx.export"]], "export_to_pretty_string() (in module torch.onnx)": [[2065, "torch.onnx.export_to_pretty_string"]], "find_mismatch() (in module torch.onnx.verification)": [[2065, "torch.onnx.verification.find_mismatch"]], "is_in_onnx_export() (in module torch.onnx)": [[2065, "torch.onnx.is_in_onnx_export"]], "register_custom_op_symbolic() (in module torch.onnx)": [[2065, "torch.onnx.register_custom_op_symbolic"]], "select_model_mode_for_export() (in module torch.onnx)": [[2065, "torch.onnx.select_model_mode_for_export"]], "torch.onnx": [[2065, "module-torch.onnx"]], "unregister_custom_op_symbolic() (in module torch.onnx)": [[2065, "torch.onnx.unregister_custom_op_symbolic"]], "optimizer (class in torch.optim)": [[2067, "torch.optim.Optimizer"]], "torch.optim": [[2067, "module-torch.optim"]], "torch.optim.adadelta": [[2067, "module-torch.optim.adadelta"]], "torch.optim.adagrad": [[2067, "module-torch.optim.adagrad"]], "torch.optim.adam": [[2067, "module-torch.optim.adam"]], "torch.optim.adamax": [[2067, "module-torch.optim.adamax"]], "torch.optim.adamw": [[2067, "module-torch.optim.adamw"]], "torch.optim.asgd": [[2067, "module-torch.optim.asgd"]], "torch.optim.lbfgs": [[2067, "module-torch.optim.lbfgs"]], "torch.optim.lr_scheduler": [[2067, "module-torch.optim.lr_scheduler"]], "torch.optim.nadam": [[2067, "module-torch.optim.nadam"]], "torch.optim.optimizer": [[2067, "module-torch.optim.optimizer"]], "torch.optim.radam": [[2067, "module-torch.optim.radam"]], "torch.optim.rmsprop": [[2067, "module-torch.optim.rmsprop"]], "torch.optim.rprop": [[2067, "module-torch.optim.rprop"]], "torch.optim.sgd": [[2067, "module-torch.optim.sgd"]], "torch.optim.sparse_adam": [[2067, "module-torch.optim.sparse_adam"]], "torch.optim.swa_utils": [[2067, "module-torch.optim.swa_utils"]], "directory (class in torch.package)": [[2068, "torch.package.Directory"]], "emptymatcherror (class in torch.package)": [[2068, "torch.package.EmptyMatchError"]], "packageexporter (class in torch.package)": [[2068, "torch.package.PackageExporter"]], "packageimporter (class in torch.package)": [[2068, "torch.package.PackageImporter"]], "packagingerror (class in torch.package)": [[2068, "torch.package.PackagingError"]], "__init__() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.__init__"]], "__init__() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.__init__"]], "add_dependency() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.add_dependency"]], "all_paths() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.all_paths"]], "close() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.close"]], "denied_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.denied_modules"]], "deny() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.deny"]], "dependency_graph_string() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.dependency_graph_string"]], "extern() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.extern"]], "externed_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.externed_modules"]], "file_structure() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.file_structure"]], "get_rdeps() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.get_rdeps"]], "get_unique_id() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.get_unique_id"]], "has_file() (torch.package.directory method)": [[2068, "torch.package.Directory.has_file"]], "id() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.id"]], "import_module() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.import_module"]], "intern() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.intern"]], "interned_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.interned_modules"]], "load_binary() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.load_binary"]], "load_pickle() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.load_pickle"]], "load_text() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.load_text"]], "mock() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.mock"]], "mocked_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.mocked_modules"]], "python_version() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.python_version"]], "register_extern_hook() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.register_extern_hook"]], "register_intern_hook() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.register_intern_hook"]], "register_mock_hook() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.register_mock_hook"]], "save_binary() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_binary"]], "save_module() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_module"]], "save_pickle() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_pickle"]], "save_source_file() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_source_file"]], "save_source_string() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_source_string"]], "save_text() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_text"]], "torch.package": [[2068, "module-torch.package"]], "torch.package.analyze": [[2068, "module-torch.package.analyze"]], "torch.package.analyze.find_first_use_of_broken_modules": [[2068, "module-torch.package.analyze.find_first_use_of_broken_modules"]], "torch.package.analyze.is_from_package": [[2068, "module-torch.package.analyze.is_from_package"]], "torch.package.analyze.trace_dependencies": [[2068, "module-torch.package.analyze.trace_dependencies"]], "torch.package.file_structure_representation": [[2068, "module-torch.package.file_structure_representation"]], "torch.package.find_file_dependencies": [[2068, "module-torch.package.find_file_dependencies"]], "torch.package.glob_group": [[2068, "module-torch.package.glob_group"]], "torch.package.importer": [[2068, "module-torch.package.importer"]], "torch.package.package_exporter": [[2068, "module-torch.package.package_exporter"]], "torch.package.package_importer": [[2068, "module-torch.package.package_importer"]], "profileraction (class in torch.profiler)": [[2069, "torch.profiler.ProfilerAction"]], "profileractivity (class in torch.profiler)": [[2069, "torch.profiler.ProfilerActivity"]], "_kinetoprofile (class in torch.profiler)": [[2069, "torch.profiler._KinetoProfile"]], "add_metadata() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.add_metadata"]], "add_metadata_json() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.add_metadata_json"]], "events() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.events"]], "export_chrome_trace() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.export_chrome_trace"]], "export_memory_timeline() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.export_memory_timeline"]], "export_stacks() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.export_stacks"]], "is_available() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.is_available"]], "key_averages() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.key_averages"]], "mark() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.mark"]], "name (torch.profiler.profileractivity property)": [[2069, "torch.profiler.ProfilerActivity.name"]], "preset_metadata_json() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.preset_metadata_json"]], "profile (class in torch.profiler)": [[2069, "torch.profiler.profile"]], "range_pop() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.range_pop"]], "range_push() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.range_push"]], "schedule() (in module torch.profiler)": [[2069, "torch.profiler.schedule"]], "step() (torch.profiler.profile method)": [[2069, "torch.profiler.profile.step"]], "tensorboard_trace_handler() (in module torch.profiler)": [[2069, "torch.profiler.tensorboard_trace_handler"]], "torch.profiler": [[2069, "module-torch.profiler"]], "torch.profiler.itt": [[2069, "module-torch.profiler.itt"]], "torch.profiler.profiler": [[2069, "module-torch.profiler.profiler"]], "torch.profiler.python_tracer": [[2069, "module-torch.profiler.python_tracer"]], "torch.ao": [[2070, "module-torch.ao"]], "torch.ao.nn": [[2070, "module-torch.ao.nn"]], "torch.ao.nn.intrinsic.modules.fused": [[2070, "module-torch.ao.nn.intrinsic.modules.fused"]], "torch.ao.nn.intrinsic.qat.modules.conv_fused": [[2070, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_fused": [[2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_relu": [[2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.modules.bn_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"]], "torch.ao.nn.intrinsic.quantized.modules.conv_add": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"]], "torch.ao.nn.intrinsic.quantized.modules.conv_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"]], "torch.ao.nn.intrinsic.quantized.modules.linear_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"]], "torch.ao.nn.qat.dynamic.modules.linear": [[2070, "module-torch.ao.nn.qat.dynamic.modules.linear"]], "torch.ao.nn.qat.modules.conv": [[2070, "module-torch.ao.nn.qat.modules.conv"]], "torch.ao.nn.qat.modules.embedding_ops": [[2070, "module-torch.ao.nn.qat.modules.embedding_ops"]], "torch.ao.nn.qat.modules.linear": [[2070, "module-torch.ao.nn.qat.modules.linear"]], "torch.ao.nn.quantizable": [[2070, "module-torch.ao.nn.quantizable"]], "torch.ao.nn.quantizable.modules": [[2070, "module-torch.ao.nn.quantizable.modules"]], "torch.ao.nn.quantizable.modules.activation": [[2070, "module-torch.ao.nn.quantizable.modules.activation"]], "torch.ao.nn.quantizable.modules.rnn": [[2070, "module-torch.ao.nn.quantizable.modules.rnn"]], "torch.ao.nn.quantized": [[2070, "module-torch.ao.nn.quantized"]], "torch.ao.nn.quantized.dynamic.modules.conv": [[2070, "module-torch.ao.nn.quantized.dynamic.modules.conv"]], "torch.ao.nn.quantized.dynamic.modules.linear": [[2070, "module-torch.ao.nn.quantized.dynamic.modules.linear"]], "torch.ao.nn.quantized.dynamic.modules.rnn": [[2070, "module-torch.ao.nn.quantized.dynamic.modules.rnn"]], "torch.ao.nn.quantized.modules.activation": [[2070, "module-torch.ao.nn.quantized.modules.activation"]], "torch.ao.nn.quantized.modules.batchnorm": [[2070, "module-torch.ao.nn.quantized.modules.batchnorm"]], "torch.ao.nn.quantized.modules.conv": [[2070, "module-torch.ao.nn.quantized.modules.conv"]], "torch.ao.nn.quantized.modules.dropout": [[2070, "module-torch.ao.nn.quantized.modules.dropout"]], "torch.ao.nn.quantized.modules.embedding_ops": [[2070, "module-torch.ao.nn.quantized.modules.embedding_ops"]], "torch.ao.nn.quantized.modules.functional_modules": [[2070, "module-torch.ao.nn.quantized.modules.functional_modules"]], "torch.ao.nn.quantized.modules.linear": [[2070, "module-torch.ao.nn.quantized.modules.linear"]], "torch.ao.nn.quantized.modules.normalization": [[2070, "module-torch.ao.nn.quantized.modules.normalization"]], "torch.ao.nn.quantized.modules.rnn": [[2070, "module-torch.ao.nn.quantized.modules.rnn"]], "torch.ao.nn.quantized.modules.utils": [[2070, "module-torch.ao.nn.quantized.modules.utils"]], "torch.ao.nn.quantized.reference": [[2070, "module-torch.ao.nn.quantized.reference"]], "torch.ao.nn.quantized.reference.modules": [[2070, "module-torch.ao.nn.quantized.reference.modules"]], "torch.ao.nn.quantized.reference.modules.conv": [[2070, "module-torch.ao.nn.quantized.reference.modules.conv"]], "torch.ao.nn.quantized.reference.modules.linear": [[2070, "module-torch.ao.nn.quantized.reference.modules.linear"]], "torch.ao.nn.quantized.reference.modules.rnn": [[2070, "module-torch.ao.nn.quantized.reference.modules.rnn"]], "torch.ao.nn.quantized.reference.modules.sparse": [[2070, "module-torch.ao.nn.quantized.reference.modules.sparse"]], "torch.ao.nn.quantized.reference.modules.utils": [[2070, "module-torch.ao.nn.quantized.reference.modules.utils"]], "torch.ao.nn.sparse": [[2070, "module-torch.ao.nn.sparse"]], "torch.ao.nn.sparse.quantized": [[2070, "module-torch.ao.nn.sparse.quantized"]], "torch.ao.nn.sparse.quantized.dynamic": [[2070, "module-torch.ao.nn.sparse.quantized.dynamic"]], "torch.ao.nn.sparse.quantized.dynamic.linear": [[2070, "module-torch.ao.nn.sparse.quantized.dynamic.linear"]], "torch.ao.nn.sparse.quantized.linear": [[2070, "module-torch.ao.nn.sparse.quantized.linear"]], "torch.ao.nn.sparse.quantized.utils": [[2070, "module-torch.ao.nn.sparse.quantized.utils"]], "torch.ao.ns": [[2070, "module-torch.ao.ns"]], "torch.ao.ns.fx": [[2070, "module-torch.ao.ns.fx"]], "torch.ao.ns.fx.graph_matcher": [[2070, "module-torch.ao.ns.fx.graph_matcher"]], "torch.ao.ns.fx.graph_passes": [[2070, "module-torch.ao.ns.fx.graph_passes"]], "torch.ao.ns.fx.mappings": [[2070, "module-torch.ao.ns.fx.mappings"]], "torch.ao.ns.fx.n_shadows_utils": [[2070, "module-torch.ao.ns.fx.n_shadows_utils"]], "torch.ao.ns.fx.ns_types": [[2070, "module-torch.ao.ns.fx.ns_types"]], "torch.ao.ns.fx.pattern_utils": [[2070, "module-torch.ao.ns.fx.pattern_utils"]], "torch.ao.ns.fx.qconfig_multi_mapping": [[2070, "module-torch.ao.ns.fx.qconfig_multi_mapping"]], "torch.ao.ns.fx.utils": [[2070, "module-torch.ao.ns.fx.utils"]], "torch.ao.ns.fx.weight_utils": [[2070, "module-torch.ao.ns.fx.weight_utils"]], "torch.ao.pruning": [[2070, "module-torch.ao.pruning"]], "torch.ao.pruning.scheduler": [[2070, "module-torch.ao.pruning.scheduler"]], "torch.ao.pruning.scheduler.base_scheduler": [[2070, "module-torch.ao.pruning.scheduler.base_scheduler"]], "torch.ao.pruning.scheduler.cubic_scheduler": [[2070, "module-torch.ao.pruning.scheduler.cubic_scheduler"]], "torch.ao.pruning.scheduler.lambda_scheduler": [[2070, "module-torch.ao.pruning.scheduler.lambda_scheduler"]], "torch.ao.pruning.sparsifier": [[2070, "module-torch.ao.pruning.sparsifier"]], "torch.ao.pruning.sparsifier.base_sparsifier": [[2070, "module-torch.ao.pruning.sparsifier.base_sparsifier"]], "torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier": [[2070, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"]], "torch.ao.pruning.sparsifier.utils": [[2070, "module-torch.ao.pruning.sparsifier.utils"]], "torch.ao.pruning.sparsifier.weight_norm_sparsifier": [[2070, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"]], "torch.ao.quantization": [[2070, "module-torch.ao.quantization"]], "torch.ao.quantization.backend_config": [[2070, "module-torch.ao.quantization.backend_config"]], "torch.ao.quantization.backend_config.backend_config": [[2070, "module-torch.ao.quantization.backend_config.backend_config"]], "torch.ao.quantization.backend_config.executorch": [[2070, "module-torch.ao.quantization.backend_config.executorch"]], "torch.ao.quantization.backend_config.fbgemm": [[2070, "module-torch.ao.quantization.backend_config.fbgemm"]], "torch.ao.quantization.backend_config.native": [[2070, "module-torch.ao.quantization.backend_config.native"]], "torch.ao.quantization.backend_config.observation_type": [[2070, "module-torch.ao.quantization.backend_config.observation_type"]], "torch.ao.quantization.backend_config.onednn": [[2070, "module-torch.ao.quantization.backend_config.onednn"]], "torch.ao.quantization.backend_config.qnnpack": [[2070, "module-torch.ao.quantization.backend_config.qnnpack"]], "torch.ao.quantization.backend_config.tensorrt": [[2070, "module-torch.ao.quantization.backend_config.tensorrt"]], "torch.ao.quantization.backend_config.utils": [[2070, "module-torch.ao.quantization.backend_config.utils"]], "torch.ao.quantization.backend_config.x86": [[2070, "module-torch.ao.quantization.backend_config.x86"]], "torch.ao.quantization.fake_quantize": [[2070, "module-torch.ao.quantization.fake_quantize"]], "torch.ao.quantization.fuse_modules": [[2070, "module-torch.ao.quantization.fuse_modules"]], "torch.ao.quantization.fuser_method_mappings": [[2070, "module-torch.ao.quantization.fuser_method_mappings"]], "torch.ao.quantization.fx": [[2070, "module-torch.ao.quantization.fx"]], "torch.ao.quantization.fx.convert": [[2070, "module-torch.ao.quantization.fx.convert"]], "torch.ao.quantization.fx.custom_config": [[2070, "module-torch.ao.quantization.fx.custom_config"]], "torch.ao.quantization.fx.fuse": [[2070, "module-torch.ao.quantization.fx.fuse"]], "torch.ao.quantization.fx.fuse_handler": [[2070, "module-torch.ao.quantization.fx.fuse_handler"]], "torch.ao.quantization.fx.graph_module": [[2070, "module-torch.ao.quantization.fx.graph_module"]], "torch.ao.quantization.fx.lower_to_fbgemm": [[2070, "module-torch.ao.quantization.fx.lower_to_fbgemm"]], "torch.ao.quantization.fx.lower_to_qnnpack": [[2070, "module-torch.ao.quantization.fx.lower_to_qnnpack"]], "torch.ao.quantization.fx.lstm_utils": [[2070, "module-torch.ao.quantization.fx.lstm_utils"]], "torch.ao.quantization.fx.match_utils": [[2070, "module-torch.ao.quantization.fx.match_utils"]], "torch.ao.quantization.fx.pattern_utils": [[2070, "module-torch.ao.quantization.fx.pattern_utils"]], "torch.ao.quantization.fx.prepare": [[2070, "module-torch.ao.quantization.fx.prepare"]], "torch.ao.quantization.fx.qconfig_mapping_utils": [[2070, "module-torch.ao.quantization.fx.qconfig_mapping_utils"]], "torch.ao.quantization.fx.quantize_handler": [[2070, "module-torch.ao.quantization.fx.quantize_handler"]], "torch.ao.quantization.fx.tracer": [[2070, "module-torch.ao.quantization.fx.tracer"]], "torch.ao.quantization.fx.utils": [[2070, "module-torch.ao.quantization.fx.utils"]], "torch.ao.quantization.observer": [[2070, "module-torch.ao.quantization.observer"]], "torch.ao.quantization.pt2e.duplicate_dq_pass": [[2070, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"]], "torch.ao.quantization.pt2e.export_utils": [[2070, "module-torch.ao.quantization.pt2e.export_utils"]], "torch.ao.quantization.pt2e.graph_utils": [[2070, "module-torch.ao.quantization.pt2e.graph_utils"]], "torch.ao.quantization.pt2e.port_metadata_pass": [[2070, "module-torch.ao.quantization.pt2e.port_metadata_pass"]], "torch.ao.quantization.pt2e.prepare": [[2070, "module-torch.ao.quantization.pt2e.prepare"]], "torch.ao.quantization.pt2e.qat_utils": [[2070, "module-torch.ao.quantization.pt2e.qat_utils"]], "torch.ao.quantization.pt2e.representation.rewrite": [[2070, "module-torch.ao.quantization.pt2e.representation.rewrite"]], "torch.ao.quantization.pt2e.utils": [[2070, "module-torch.ao.quantization.pt2e.utils"]], "torch.ao.quantization.qconfig": [[2070, "module-torch.ao.quantization.qconfig"]], "torch.ao.quantization.qconfig_mapping": [[2070, "module-torch.ao.quantization.qconfig_mapping"]], "torch.ao.quantization.quant_type": [[2070, "module-torch.ao.quantization.quant_type"]], "torch.ao.quantization.quantization_mappings": [[2070, "module-torch.ao.quantization.quantization_mappings"]], "torch.ao.quantization.quantize_fx": [[2070, "module-torch.ao.quantization.quantize_fx"]], "torch.ao.quantization.quantize_jit": [[2070, "module-torch.ao.quantization.quantize_jit"]], "torch.ao.quantization.quantize_pt2e": [[2070, "module-torch.ao.quantization.quantize_pt2e"]], "torch.ao.quantization.quantizer.composable_quantizer": [[2070, "module-torch.ao.quantization.quantizer.composable_quantizer"]], "torch.ao.quantization.quantizer.embedding_quantizer": [[2070, "module-torch.ao.quantization.quantizer.embedding_quantizer"]], "torch.ao.quantization.quantizer.quantizer": [[2070, "module-torch.ao.quantization.quantizer.quantizer"]], "torch.ao.quantization.quantizer.utils": [[2070, "module-torch.ao.quantization.quantizer.utils"]], "torch.ao.quantization.quantizer.x86_inductor_quantizer": [[2070, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer": [[2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer_utils": [[2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"]], "torch.ao.quantization.stubs": [[2070, "module-torch.ao.quantization.stubs"]], "torch.ao.quantization.utils": [[2070, "module-torch.ao.quantization.utils"]], "torch.nn.intrinsic.modules.fused": [[2070, "module-torch.nn.intrinsic.modules.fused"]], "torch.nn.intrinsic.qat.modules.conv_fused": [[2070, "module-torch.nn.intrinsic.qat.modules.conv_fused"]], "torch.nn.intrinsic.qat.modules.linear_fused": [[2070, "module-torch.nn.intrinsic.qat.modules.linear_fused"]], "torch.nn.intrinsic.qat.modules.linear_relu": [[2070, "module-torch.nn.intrinsic.qat.modules.linear_relu"]], "torch.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2070, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.nn.intrinsic.quantized.modules.bn_relu": [[2070, "module-torch.nn.intrinsic.quantized.modules.bn_relu"]], "torch.nn.intrinsic.quantized.modules.conv_relu": [[2070, "module-torch.nn.intrinsic.quantized.modules.conv_relu"]], "torch.nn.intrinsic.quantized.modules.linear_relu": [[2070, "module-torch.nn.intrinsic.quantized.modules.linear_relu"]], "torch.nn.qat.dynamic.modules.linear": [[2070, "module-torch.nn.qat.dynamic.modules.linear"]], "torch.nn.qat.modules.conv": [[2070, "module-torch.nn.qat.modules.conv"]], "torch.nn.qat.modules.embedding_ops": [[2070, "module-torch.nn.qat.modules.embedding_ops"]], "torch.nn.qat.modules.linear": [[2070, "module-torch.nn.qat.modules.linear"]], "torch.nn.quantizable.modules.activation": [[2070, "module-torch.nn.quantizable.modules.activation"]], "torch.nn.quantizable.modules.rnn": [[2070, "module-torch.nn.quantizable.modules.rnn"]], "torch.nn.quantized.dynamic.modules.conv": [[2070, "module-torch.nn.quantized.dynamic.modules.conv"]], "torch.nn.quantized.dynamic.modules.linear": [[2070, "module-torch.nn.quantized.dynamic.modules.linear"]], "torch.nn.quantized.dynamic.modules.rnn": [[2070, "module-torch.nn.quantized.dynamic.modules.rnn"]], "torch.nn.quantized.functional": [[2070, "module-torch.nn.quantized.functional"]], "torch.nn.quantized.modules.activation": [[2070, "module-torch.nn.quantized.modules.activation"]], "torch.nn.quantized.modules.batchnorm": [[2070, "module-torch.nn.quantized.modules.batchnorm"]], "torch.nn.quantized.modules.conv": [[2070, "module-torch.nn.quantized.modules.conv"]], "torch.nn.quantized.modules.dropout": [[2070, "module-torch.nn.quantized.modules.dropout"]], "torch.nn.quantized.modules.embedding_ops": [[2070, "module-torch.nn.quantized.modules.embedding_ops"]], "torch.nn.quantized.modules.functional_modules": [[2070, "module-torch.nn.quantized.modules.functional_modules"]], "torch.nn.quantized.modules.linear": [[2070, "module-torch.nn.quantized.modules.linear"]], "torch.nn.quantized.modules.normalization": [[2070, "module-torch.nn.quantized.modules.normalization"]], "torch.nn.quantized.modules.rnn": [[2070, "module-torch.nn.quantized.modules.rnn"]], "torch.nn.quantized.modules.utils": [[2070, "module-torch.nn.quantized.modules.utils"]], "torch.quantization.fake_quantize": [[2070, "module-torch.quantization.fake_quantize"]], "torch.quantization.fuse_modules": [[2070, "module-torch.quantization.fuse_modules"]], "torch.quantization.fuser_method_mappings": [[2070, "module-torch.quantization.fuser_method_mappings"]], "torch.quantization.fx.convert": [[2070, "module-torch.quantization.fx.convert"]], "torch.quantization.fx.fuse": [[2070, "module-torch.quantization.fx.fuse"]], "torch.quantization.fx.fusion_patterns": [[2070, "module-torch.quantization.fx.fusion_patterns"]], "torch.quantization.fx.graph_module": [[2070, "module-torch.quantization.fx.graph_module"]], "torch.quantization.fx.match_utils": [[2070, "module-torch.quantization.fx.match_utils"]], "torch.quantization.fx.pattern_utils": [[2070, "module-torch.quantization.fx.pattern_utils"]], "torch.quantization.fx.prepare": [[2070, "module-torch.quantization.fx.prepare"]], "torch.quantization.fx.quantization_patterns": [[2070, "module-torch.quantization.fx.quantization_patterns"]], "torch.quantization.fx.quantization_types": [[2070, "module-torch.quantization.fx.quantization_types"]], "torch.quantization.fx.utils": [[2070, "module-torch.quantization.fx.utils"]], "torch.quantization.observer": [[2070, "module-torch.quantization.observer"]], "torch.quantization.qconfig": [[2070, "module-torch.quantization.qconfig"]], "torch.quantization.quant_type": [[2070, "module-torch.quantization.quant_type"]], "torch.quantization.quantization_mappings": [[2070, "module-torch.quantization.quantization_mappings"]], "torch.quantization.quantize": [[2070, "module-torch.quantization.quantize"]], "torch.quantization.quantize_fx": [[2070, "module-torch.quantization.quantize_fx"]], "torch.quantization.quantize_jit": [[2070, "module-torch.quantization.quantize_jit"]], "torch.quantization.stubs": [[2070, "module-torch.quantization.stubs"]], "torch.quantization.utils": [[2070, "module-torch.quantization.utils"]], "torch.ao.nn.intrinsic": [[2073, "module-torch.ao.nn.intrinsic"]], "torch.ao.nn.intrinsic.modules": [[2073, "module-torch.ao.nn.intrinsic.modules"]], "torch.ao.nn.intrinsic.qat": [[2073, "module-torch.ao.nn.intrinsic.qat"]], "torch.ao.nn.intrinsic.qat.modules": [[2073, "module-torch.ao.nn.intrinsic.qat.modules"]], "torch.ao.nn.intrinsic.quantized": [[2073, "module-torch.ao.nn.intrinsic.quantized"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[2073, "module-torch.ao.nn.intrinsic.quantized.dynamic"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules": [[2073, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"]], "torch.ao.nn.intrinsic.quantized.modules": [[2073, "module-torch.ao.nn.intrinsic.quantized.modules"]], "torch.ao.nn.qat": [[2073, "module-torch.ao.nn.qat"]], "torch.ao.nn.qat.dynamic": [[2073, "module-torch.ao.nn.qat.dynamic"]], "torch.ao.nn.qat.dynamic.modules": [[2073, "module-torch.ao.nn.qat.dynamic.modules"]], "torch.ao.nn.qat.modules": [[2073, "module-torch.ao.nn.qat.modules"]], "torch.ao.nn.quantized.dynamic": [[2073, "module-torch.ao.nn.quantized.dynamic"]], "torch.ao.nn.quantized.dynamic.modules": [[2073, "module-torch.ao.nn.quantized.dynamic.modules"]], "torch.ao.nn.quantized.functional": [[2073, "module-torch.ao.nn.quantized.functional"]], "torch.ao.nn.quantized.modules": [[2073, "module-torch.ao.nn.quantized.modules"]], "torch.ao.quantization.pt2e": [[2073, "module-torch.ao.quantization.pt2e"]], "torch.ao.quantization.pt2e.generate_numeric_debug_handle": [[2073, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"]], "torch.ao.quantization.pt2e.representation": [[2073, "module-torch.ao.quantization.pt2e.representation"]], "torch.ao.quantization.quantizer": [[2073, "module-torch.ao.quantization.quantizer"]], "torch.nn.intrinsic": [[2073, "module-torch.nn.intrinsic"]], "torch.nn.intrinsic.modules": [[2073, "module-torch.nn.intrinsic.modules"]], "torch.nn.intrinsic.qat": [[2073, "module-torch.nn.intrinsic.qat"]], "torch.nn.intrinsic.qat.modules": [[2073, "module-torch.nn.intrinsic.qat.modules"]], "torch.nn.intrinsic.quantized": [[2073, "module-torch.nn.intrinsic.quantized"]], "torch.nn.intrinsic.quantized.dynamic": [[2073, "module-torch.nn.intrinsic.quantized.dynamic"]], "torch.nn.intrinsic.quantized.dynamic.modules": [[2073, "module-torch.nn.intrinsic.quantized.dynamic.modules"]], "torch.nn.intrinsic.quantized.modules": [[2073, "module-torch.nn.intrinsic.quantized.modules"]], "torch.nn.qat": [[2073, "module-torch.nn.qat"]], "torch.nn.qat.dynamic": [[2073, "module-torch.nn.qat.dynamic"]], "torch.nn.qat.dynamic.modules": [[2073, "module-torch.nn.qat.dynamic.modules"]], "torch.nn.qat.modules": [[2073, "module-torch.nn.qat.modules"]], "torch.nn.quantizable": [[2073, "module-torch.nn.quantizable"]], "torch.nn.quantizable.modules": [[2073, "module-torch.nn.quantizable.modules"]], "torch.nn.quantized": [[2073, "module-torch.nn.quantized"]], "torch.nn.quantized.dynamic": [[2073, "module-torch.nn.quantized.dynamic"]], "torch.nn.quantized.dynamic.modules": [[2073, "module-torch.nn.quantized.dynamic.modules"]], "torch.nn.quantized.modules": [[2073, "module-torch.nn.quantized.modules"]], "torch.quantization": [[2073, "module-torch.quantization"]], "torch.quantization.fx": [[2073, "module-torch.quantization.fx"]], "fork_rng() (in module torch.random)": [[2074, "torch.random.fork_rng"]], "get_rng_state() (in module torch.random)": [[2074, "torch.random.get_rng_state"]], "initial_seed() (in module torch.random)": [[2074, "torch.random.initial_seed"]], "manual_seed() (in module torch.random)": [[2074, "torch.random.manual_seed"]], "seed() (in module torch.random)": [[2074, "torch.random.seed"]], "set_rng_state() (in module torch.random)": [[2074, "torch.random.set_rng_state"]], "torch.random": [[2074, "module-torch.random"]], "backendtype (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.BackendType"]], "pyrref (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.PyRRef"]], "remotemodule (class in torch.distributed.nn.api.remote_module)": [[2075, "torch.distributed.nn.api.remote_module.RemoteModule"]], "rpcbackendoptions (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.RpcBackendOptions"]], "tensorpiperpcbackendoptions (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions"]], "workerinfo (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.WorkerInfo"]], "async_execution() (in module torch.distributed.rpc.functions)": [[2075, "torch.distributed.rpc.functions.async_execution"]], "backward() (in module torch.distributed.autograd)": [[2075, "torch.distributed.autograd.backward"]], "backward() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.backward"]], "confirmed_by_owner() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.confirmed_by_owner"]], "context (class in torch.distributed.autograd)": [[2075, "torch.distributed.autograd.context"]], "device_maps (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.device_maps"]], "devices (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.devices"]], "get_gradients() (in module torch.distributed.autograd)": [[2075, "torch.distributed.autograd.get_gradients"]], "get_module_rref() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2075, "torch.distributed.nn.api.remote_module.RemoteModule.get_module_rref"]], "get_worker_info() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.get_worker_info"]], "id (torch.distributed.rpc.workerinfo property)": [[2075, "torch.distributed.rpc.WorkerInfo.id"]], "init_method (torch.distributed.rpc.rpcbackendoptions property)": [[2075, "torch.distributed.rpc.RpcBackendOptions.init_method"]], "init_method (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.init_method"]], "init_rpc() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.init_rpc"]], "is_owner() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.is_owner"]], "local_value() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.local_value"]], "name (torch.distributed.rpc.workerinfo property)": [[2075, "torch.distributed.rpc.WorkerInfo.name"]], "num_worker_threads (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.num_worker_threads"]], "owner() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.owner"]], "owner_name() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.owner_name"]], "remote() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.remote"]], "remote() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.remote"]], "remote_parameters() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2075, "torch.distributed.nn.api.remote_module.RemoteModule.remote_parameters"]], "rpc_async() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.rpc_async"]], "rpc_async() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.rpc_async"]], "rpc_sync() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.rpc_sync"]], "rpc_sync() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.rpc_sync"]], "rpc_timeout (torch.distributed.rpc.rpcbackendoptions property)": [[2075, "torch.distributed.rpc.RpcBackendOptions.rpc_timeout"]], "rpc_timeout (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.rpc_timeout"]], "set_device_map() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_device_map"]], "set_devices() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_devices"]], "shutdown() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.shutdown"]], "to_here() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.to_here"]], "torch.distributed.autograd": [[2075, "module-torch.distributed.autograd"]], "torch.distributed.rpc": [[2075, "module-torch.distributed.rpc"]], "torch.signal": [[2078, "module-torch.signal"]], "torch.signal.windows": [[2078, "module-torch.signal.windows"]], "size (class in torch)": [[2079, "torch.Size"]], "count() (torch.size method)": [[2079, "torch.Size.count"]], "index() (torch.size method)": [[2079, "torch.Size.index"]], "numel() (torch.size method)": [[2079, "torch.Size.numel"]], "torch.sparse": [[2080, "module-torch.sparse"]], "airy_ai() (in module torch.special)": [[2081, "torch.special.airy_ai"]], "bessel_j0() (in module torch.special)": [[2081, "torch.special.bessel_j0"]], "bessel_j1() (in module torch.special)": [[2081, "torch.special.bessel_j1"]], "digamma() (in module torch.special)": [[2081, "torch.special.digamma"]], "entr() (in module torch.special)": [[2081, "torch.special.entr"]], "erf() (in module torch.special)": [[2081, "torch.special.erf"]], "erfc() (in module torch.special)": [[2081, "torch.special.erfc"]], "erfcx() (in module torch.special)": [[2081, "torch.special.erfcx"]], "erfinv() (in module torch.special)": [[2081, "torch.special.erfinv"]], "exp2() (in module torch.special)": [[2081, "torch.special.exp2"]], "expit() (in module torch.special)": [[2081, "torch.special.expit"]], "expm1() (in module torch.special)": [[2081, "torch.special.expm1"]], "gammainc() (in module torch.special)": [[2081, "torch.special.gammainc"]], "gammaincc() (in module torch.special)": [[2081, "torch.special.gammaincc"]], "gammaln() (in module torch.special)": [[2081, "torch.special.gammaln"]], "i0() (in module torch.special)": [[2081, "torch.special.i0"]], "i0e() (in module torch.special)": [[2081, "torch.special.i0e"]], "i1() (in module torch.special)": [[2081, "torch.special.i1"]], "i1e() (in module torch.special)": [[2081, "torch.special.i1e"]], "log1p() (in module torch.special)": [[2081, "torch.special.log1p"]], "log_ndtr() (in module torch.special)": [[2081, "torch.special.log_ndtr"]], "log_softmax() (in module torch.special)": [[2081, "torch.special.log_softmax"]], "logit() (in module torch.special)": [[2081, "torch.special.logit"]], "logsumexp() (in module torch.special)": [[2081, "torch.special.logsumexp"]], "multigammaln() (in module torch.special)": [[2081, "torch.special.multigammaln"]], "ndtr() (in module torch.special)": [[2081, "torch.special.ndtr"]], "ndtri() (in module torch.special)": [[2081, "torch.special.ndtri"]], "polygamma() (in module torch.special)": [[2081, "torch.special.polygamma"]], "psi() (in module torch.special)": [[2081, "torch.special.psi"]], "round() (in module torch.special)": [[2081, "torch.special.round"]], "scaled_modified_bessel_k0() (in module torch.special)": [[2081, "torch.special.scaled_modified_bessel_k0"]], "scaled_modified_bessel_k1() (in module torch.special)": [[2081, "torch.special.scaled_modified_bessel_k1"]], "sinc() (in module torch.special)": [[2081, "torch.special.sinc"]], "softmax() (in module torch.special)": [[2081, "torch.special.softmax"]], "spherical_bessel_j0() (in module torch.special)": [[2081, "torch.special.spherical_bessel_j0"]], "torch.special": [[2081, "module-torch.special"]], "xlog1py() (in module torch.special)": [[2081, "torch.special.xlog1py"]], "xlogy() (in module torch.special)": [[2081, "torch.special.xlogy"]], "zeta() (in module torch.special)": [[2081, "torch.special.zeta"]], "bfloat16storage (class in torch)": [[2082, "torch.BFloat16Storage"]], "boolstorage (class in torch)": [[2082, "torch.BoolStorage"]], "bytestorage (class in torch)": [[2082, "torch.ByteStorage"]], "charstorage (class in torch)": [[2082, "torch.CharStorage"]], "complexdoublestorage (class in torch)": [[2082, "torch.ComplexDoubleStorage"]], "complexfloatstorage (class in torch)": [[2082, "torch.ComplexFloatStorage"]], "doublestorage (class in torch)": [[2082, "torch.DoubleStorage"]], "floatstorage (class in torch)": [[2082, "torch.FloatStorage"]], "halfstorage (class in torch)": [[2082, "torch.HalfStorage"]], "intstorage (class in torch)": [[2082, "torch.IntStorage"]], "longstorage (class in torch)": [[2082, "torch.LongStorage"]], "qint32storage (class in torch)": [[2082, "torch.QInt32Storage"]], "qint8storage (class in torch)": [[2082, "torch.QInt8Storage"]], "quint2x4storage (class in torch)": [[2082, "torch.QUInt2x4Storage"]], "quint4x2storage (class in torch)": [[2082, "torch.QUInt4x2Storage"]], "quint8storage (class in torch)": [[2082, "torch.QUInt8Storage"]], "shortstorage (class in torch)": [[2082, "torch.ShortStorage"]], "typedstorage (class in torch)": [[2082, "torch.TypedStorage"]], "untypedstorage (class in torch)": [[2082, "torch.UntypedStorage"]], "bfloat16() (torch.typedstorage method)": [[2082, "torch.TypedStorage.bfloat16"]], "bfloat16() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.bfloat16"]], "bool() (torch.typedstorage method)": [[2082, "torch.TypedStorage.bool"]], "bool() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.bool"]], "byte() (torch.typedstorage method)": [[2082, "torch.TypedStorage.byte"]], "byte() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.byte"]], "byteswap() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.byteswap"]], "char() (torch.typedstorage method)": [[2082, "torch.TypedStorage.char"]], "char() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.char"]], "clone() (torch.typedstorage method)": [[2082, "torch.TypedStorage.clone"]], "clone() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.clone"]], "complex_double() (torch.typedstorage method)": [[2082, "torch.TypedStorage.complex_double"]], "complex_double() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.complex_double"]], "complex_float() (torch.typedstorage method)": [[2082, "torch.TypedStorage.complex_float"]], "complex_float() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.complex_float"]], "copy_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.copy_"]], "copy_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.copy_"]], "cpu() (torch.typedstorage method)": [[2082, "torch.TypedStorage.cpu"]], "cpu() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.cpu"]], "cuda() (torch.typedstorage method)": [[2082, "torch.TypedStorage.cuda"]], "cuda() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.cuda"]], "data_ptr() (torch.typedstorage method)": [[2082, "torch.TypedStorage.data_ptr"]], "data_ptr() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.data_ptr"]], "device (torch.typedstorage property)": [[2082, "torch.TypedStorage.device"]], "device (torch.untypedstorage attribute)": [[2082, "torch.UntypedStorage.device"]], "double() (torch.typedstorage method)": [[2082, "torch.TypedStorage.double"]], "double() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.double"]], "dtype (torch.bfloat16storage attribute)": [[2082, "torch.BFloat16Storage.dtype"]], "dtype (torch.boolstorage attribute)": [[2082, "torch.BoolStorage.dtype"]], "dtype (torch.bytestorage attribute)": [[2082, "torch.ByteStorage.dtype"]], "dtype (torch.charstorage attribute)": [[2082, "torch.CharStorage.dtype"]], "dtype (torch.complexdoublestorage attribute)": [[2082, "torch.ComplexDoubleStorage.dtype"]], "dtype (torch.complexfloatstorage attribute)": [[2082, "torch.ComplexFloatStorage.dtype"]], "dtype (torch.doublestorage attribute)": [[2082, "torch.DoubleStorage.dtype"]], "dtype (torch.floatstorage attribute)": [[2082, "torch.FloatStorage.dtype"]], "dtype (torch.halfstorage attribute)": [[2082, "torch.HalfStorage.dtype"]], "dtype (torch.intstorage attribute)": [[2082, "torch.IntStorage.dtype"]], "dtype (torch.longstorage attribute)": [[2082, "torch.LongStorage.dtype"]], "dtype (torch.qint32storage attribute)": [[2082, "torch.QInt32Storage.dtype"]], "dtype (torch.qint8storage attribute)": [[2082, "torch.QInt8Storage.dtype"]], "dtype (torch.quint2x4storage attribute)": [[2082, "torch.QUInt2x4Storage.dtype"]], "dtype (torch.quint4x2storage attribute)": [[2082, "torch.QUInt4x2Storage.dtype"]], "dtype (torch.quint8storage attribute)": [[2082, "torch.QUInt8Storage.dtype"]], "dtype (torch.shortstorage attribute)": [[2082, "torch.ShortStorage.dtype"]], "dtype (torch.typedstorage attribute)": [[2082, "torch.TypedStorage.dtype"]], "element_size() (torch.typedstorage method)": [[2082, "torch.TypedStorage.element_size"]], "element_size() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.element_size"]], "filename (torch.typedstorage property)": [[2082, "torch.TypedStorage.filename"]], "filename (torch.untypedstorage property)": [[2082, "torch.UntypedStorage.filename"]], "fill_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.fill_"]], "fill_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.fill_"]], "float() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float"]], "float() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float"]], "float8_e4m3fn() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e4m3fn"]], "float8_e4m3fn() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e4m3fn"]], "float8_e4m3fnuz() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e4m3fnuz"]], "float8_e4m3fnuz() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e4m3fnuz"]], "float8_e5m2() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e5m2"]], "float8_e5m2() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e5m2"]], "float8_e5m2fnuz() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e5m2fnuz"]], "float8_e5m2fnuz() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e5m2fnuz"]], "from_buffer() (torch.typedstorage class method)": [[2082, "torch.TypedStorage.from_buffer"]], "from_buffer() (torch.untypedstorage static method)": [[2082, "torch.UntypedStorage.from_buffer"]], "from_file() (torch.typedstorage class method)": [[2082, "torch.TypedStorage.from_file"]], "from_file() (torch.untypedstorage static method)": [[2082, "torch.UntypedStorage.from_file"]], "get_device() (torch.typedstorage method)": [[2082, "torch.TypedStorage.get_device"]], "get_device() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.get_device"]], "half() (torch.typedstorage method)": [[2082, "torch.TypedStorage.half"]], "half() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.half"]], "hpu() (torch.typedstorage method)": [[2082, "torch.TypedStorage.hpu"]], "hpu() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.hpu"]], "int() (torch.typedstorage method)": [[2082, "torch.TypedStorage.int"]], "int() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.int"]], "is_cuda (torch.typedstorage property)": [[2082, "torch.TypedStorage.is_cuda"]], "is_cuda (torch.untypedstorage property)": [[2082, "torch.UntypedStorage.is_cuda"]], "is_hpu (torch.typedstorage property)": [[2082, "torch.TypedStorage.is_hpu"]], "is_hpu (torch.untypedstorage property)": [[2082, "torch.UntypedStorage.is_hpu"]], "is_pinned() (torch.typedstorage method)": [[2082, "torch.TypedStorage.is_pinned"]], "is_pinned() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.is_pinned"]], "is_shared() (torch.typedstorage method)": [[2082, "torch.TypedStorage.is_shared"]], "is_shared() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.is_shared"]], "is_sparse (torch.typedstorage attribute)": [[2082, "torch.TypedStorage.is_sparse"]], "is_sparse (torch.untypedstorage attribute)": [[2082, "torch.UntypedStorage.is_sparse"]], "is_sparse_csr (torch.untypedstorage attribute)": [[2082, "torch.UntypedStorage.is_sparse_csr"]], "long() (torch.typedstorage method)": [[2082, "torch.TypedStorage.long"]], "long() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.long"]], "mps() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.mps"]], "nbytes() (torch.typedstorage method)": [[2082, "torch.TypedStorage.nbytes"]], "nbytes() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.nbytes"]], "new() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.new"]], "pickle_storage_type() (torch.typedstorage method)": [[2082, "torch.TypedStorage.pickle_storage_type"]], "pin_memory() (torch.typedstorage method)": [[2082, "torch.TypedStorage.pin_memory"]], "pin_memory() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.pin_memory"]], "resizable() (torch.typedstorage method)": [[2082, "torch.TypedStorage.resizable"]], "resizable() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.resizable"]], "resize_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.resize_"]], "resize_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.resize_"]], "share_memory_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.share_memory_"]], "share_memory_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.share_memory_"]], "short() (torch.typedstorage method)": [[2082, "torch.TypedStorage.short"]], "short() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.short"]], "size() (torch.typedstorage method)": [[2082, "torch.TypedStorage.size"]], "size() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.size"]], "to() (torch.typedstorage method)": [[2082, "torch.TypedStorage.to"]], "to() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.to"]], "tolist() (torch.typedstorage method)": [[2082, "torch.TypedStorage.tolist"]], "tolist() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.tolist"]], "type() (torch.typedstorage method)": [[2082, "torch.TypedStorage.type"]], "type() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.type"]], "untyped() (torch.typedstorage method)": [[2082, "torch.TypedStorage.untyped"]], "untyped() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.untyped"]], "device (class in torch)": [[2083, "torch.device"]], "dtype (class in torch)": [[2083, "torch.dtype"]], "layout (class in torch)": [[2083, "torch.layout"]], "memory_format (class in torch)": [[2083, "torch.memory_format"]], "summarywriter (class in torch.utils.tensorboard.writer)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter"]], "__init__() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.__init__"]], "add_audio() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_audio"]], "add_custom_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_custom_scalars"]], "add_embedding() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_embedding"]], "add_figure() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_figure"]], "add_graph() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_graph"]], "add_histogram() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_histogram"]], "add_hparams() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_hparams"]], "add_image() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_image"]], "add_images() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_images"]], "add_mesh() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_mesh"]], "add_pr_curve() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_pr_curve"]], "add_scalar() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_scalar"]], "add_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_scalars"]], "add_text() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_text"]], "add_video() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_video"]], "close() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.close"]], "flush() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.flush"]], "torch.utils.tensorboard": [[2085, "module-torch.utils.tensorboard"]], "h (torch.tensor attribute)": [[2086, "torch.Tensor.H"]], "t (torch.tensor attribute)": [[2086, "torch.Tensor.T"]], "tensor (class in torch)": [[2086, "torch.Tensor"]], "__init__() (torch.tensor method)": [[2086, "torch.Tensor.__init__"]], "mh (torch.tensor attribute)": [[2086, "torch.Tensor.mH"]], "mt (torch.tensor attribute)": [[2086, "torch.Tensor.mT"]], "assert_allclose() (in module torch.testing)": [[2087, "torch.testing.assert_allclose"]], "assert_close() (in module torch.testing)": [[2087, "torch.testing.assert_close"]], "make_tensor() (in module torch.testing)": [[2087, "torch.testing.make_tensor"]], "torch.testing": [[2087, "module-torch.testing"]], "symbool (class in torch)": [[2089, "torch.SymBool"]], "symfloat (class in torch)": [[2089, "torch.SymFloat"]], "symint (class in torch)": [[2089, "torch.SymInt"]], "tag (class in torch)": [[2089, "torch.Tag"]], "default_generator (torch.torch attribute)": [[2089, "torch.torch.default_generator"]], "is_integer() (torch.symfloat method)": [[2089, "torch.SymFloat.is_integer"]], "name (torch.tag property)": [[2089, "torch.Tag.name"]], "torch": [[2089, "module-torch"]], "torch.contrib": [[2089, "module-torch.contrib"]], "torch.functional": [[2089, "module-torch.functional"]], "torch.quasirandom": [[2089, "module-torch.quasirandom"]], "torch.return_types": [[2089, "module-torch.return_types"]], "torch.serialization": [[2089, "module-torch.serialization"]], "torch.signal.windows.windows": [[2089, "module-torch.signal.windows.windows"]], "torch.sparse.semi_structured": [[2089, "module-torch.sparse.semi_structured"]], "torch.storage": [[2089, "module-torch.storage"]], "torch.torch_version": [[2089, "module-torch.torch_version"]], "torch.types": [[2089, "module-torch.types"]], "torch.utils.backcompat": [[2089, "module-torch.utils.backcompat"]], "torch.utils.hipify": [[2089, "module-torch.utils.hipify"]], "torch.utils.model_dump": [[2089, "module-torch.utils.model_dump"]], "torch.utils.viz": [[2089, "module-torch.utils.viz"]], "torch.version": [[2089, "module-torch.version"]], "logger (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.Logger"]], "outputlogger (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.OutputLogger"]], "shadow (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.Shadow"]], "shadowlogger (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.ShadowLogger"]], "add() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.add"]], "add_relu() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.add_relu"]], "add_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.add_scalar"]], "cat() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.cat"]], "compare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.compare_model_outputs"]], "compare_model_stub() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.compare_model_stub"]], "compare_weights() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.compare_weights"]], "forward() (torch.ao.ns._numeric_suite.logger method)": [[2090, "torch.ao.ns._numeric_suite.Logger.forward"]], "forward() (torch.ao.ns._numeric_suite.outputlogger method)": [[2090, "torch.ao.ns._numeric_suite.OutputLogger.forward"]], "forward() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.forward"]], "forward() (torch.ao.ns._numeric_suite.shadowlogger method)": [[2090, "torch.ao.ns._numeric_suite.ShadowLogger.forward"]], "get_logger_dict() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.get_logger_dict"]], "get_matching_activations() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.get_matching_activations"]], "mul() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.mul"]], "mul_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.mul_scalar"]], "prepare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.prepare_model_outputs"]], "prepare_model_with_stubs() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.prepare_model_with_stubs"]], "torch.ao.ns._numeric_suite": [[2090, "module-torch.ao.ns._numeric_suite"]], "nstracer (class in torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.NSTracer"]], "outputcomparisonlogger (class in torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger"]], "outputlogger (class in torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputLogger"]], "add_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.add_loggers"]], "add_shadow_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.add_shadow_loggers"]], "compute_cosine_similarity() (in module torch.ao.ns.fx.utils)": [[2091, "torch.ao.ns.fx.utils.compute_cosine_similarity"]], "compute_normalized_l2_error() (in module torch.ao.ns.fx.utils)": [[2091, "torch.ao.ns.fx.utils.compute_normalized_l2_error"]], "compute_sqnr() (in module torch.ao.ns.fx.utils)": [[2091, "torch.ao.ns.fx.utils.compute_sqnr"]], "convert_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.convert_n_shadows_model"]], "extend_logger_results_with_comparison() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extend_logger_results_with_comparison"]], "extract_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_logger_info"]], "extract_results_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_results_n_shadows_model"]], "extract_shadow_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_shadow_logger_info"]], "extract_weights() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_weights"]], "forward() (torch.ao.ns._numeric_suite_fx.outputcomparisonlogger method)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger.forward"]], "forward() (torch.ao.ns._numeric_suite_fx.outputlogger method)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputLogger.forward"]], "is_leaf_module() (torch.ao.ns._numeric_suite_fx.nstracer method)": [[2091, "torch.ao.ns._numeric_suite_fx.NSTracer.is_leaf_module"]], "loggers_set_enabled() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.loggers_set_enabled"]], "loggers_set_save_activations() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.loggers_set_save_activations"]], "prepare_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.prepare_n_shadows_model"]], "print_comparisons_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.print_comparisons_n_shadows_model"]], "torch.ao.ns._numeric_suite_fx": [[2091, "module-torch.ao.ns._numeric_suite_fx"]], "torch.compiler": [[2094, "module-torch.compiler"]], "get_ignored_functions() (in module torch.overrides)": [[2112, "torch.overrides.get_ignored_functions"]], "get_overridable_functions() (in module torch.overrides)": [[2112, "torch.overrides.get_overridable_functions"]], "get_testing_overrides() (in module torch.overrides)": [[2112, "torch.overrides.get_testing_overrides"]], "handle_torch_function() (in module torch.overrides)": [[2112, "torch.overrides.handle_torch_function"]], "has_torch_function() (in module torch.overrides)": [[2112, "torch.overrides.has_torch_function"]], "is_tensor_like() (in module torch.overrides)": [[2112, "torch.overrides.is_tensor_like"]], "is_tensor_method_or_property() (in module torch.overrides)": [[2112, "torch.overrides.is_tensor_method_or_property"]], "resolve_name() (in module torch.overrides)": [[2112, "torch.overrides.resolve_name"]], "torch.overrides": [[2112, "module-torch.overrides"]], "wrap_torch_function() (in module torch.overrides)": [[2112, "torch.overrides.wrap_torch_function"]], "_dump_snapshot() (in module torch.cuda.memory)": [[2113, "torch.cuda.memory._dump_snapshot"]], "_record_memory_history() (in module torch.cuda.memory)": [[2113, "torch.cuda.memory._record_memory_history"]], "_snapshot() (in module torch.cuda.memory)": [[2113, "torch.cuda.memory._snapshot"]], "torch.finfo (class in torch)": [[2116, "torch.torch.finfo"]], "torch.iinfo (class in torch)": [[2116, "torch.torch.iinfo"]], "torch.utils": [[2117, "module-torch.utils"]], "torch.utils.backend_registration": [[2117, "module-torch.utils.backend_registration"]], "torch.utils.benchmark.examples.blas_compare_setup": [[2117, "module-torch.utils.benchmark.examples.blas_compare_setup"]], "torch.utils.benchmark.examples.compare": [[2117, "module-torch.utils.benchmark.examples.compare"]], "torch.utils.benchmark.examples.fuzzer": [[2117, "module-torch.utils.benchmark.examples.fuzzer"]], "torch.utils.benchmark.examples.op_benchmark": [[2117, "module-torch.utils.benchmark.examples.op_benchmark"]], "torch.utils.benchmark.examples.simple_timeit": [[2117, "module-torch.utils.benchmark.examples.simple_timeit"]], "torch.utils.benchmark.examples.spectral_ops_fuzz_test": [[2117, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"]], "torch.utils.benchmark.op_fuzzers.binary": [[2117, "module-torch.utils.benchmark.op_fuzzers.binary"]], "torch.utils.benchmark.op_fuzzers.sparse_binary": [[2117, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"]], "torch.utils.benchmark.op_fuzzers.sparse_unary": [[2117, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"]], "torch.utils.benchmark.op_fuzzers.spectral": [[2117, "module-torch.utils.benchmark.op_fuzzers.spectral"]], "torch.utils.benchmark.op_fuzzers.unary": [[2117, "module-torch.utils.benchmark.op_fuzzers.unary"]], "torch.utils.benchmark.utils.common": [[2117, "module-torch.utils.benchmark.utils.common"]], "torch.utils.benchmark.utils.compare": [[2117, "module-torch.utils.benchmark.utils.compare"]], "torch.utils.benchmark.utils.compile": [[2117, "module-torch.utils.benchmark.utils.compile"]], "torch.utils.benchmark.utils.cpp_jit": [[2117, "module-torch.utils.benchmark.utils.cpp_jit"]], "torch.utils.benchmark.utils.fuzzer": [[2117, "module-torch.utils.benchmark.utils.fuzzer"]], "torch.utils.benchmark.utils.sparse_fuzzer": [[2117, "module-torch.utils.benchmark.utils.sparse_fuzzer"]], "torch.utils.benchmark.utils.timer": [[2117, "module-torch.utils.benchmark.utils.timer"]], "torch.utils.benchmark.utils.valgrind_wrapper.timer_interface": [[2117, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"]], "torch.utils.bundled_inputs": [[2117, "module-torch.utils.bundled_inputs"]], "torch.utils.checkpoint": [[2117, "module-torch.utils.checkpoint"]], "torch.utils.collect_env": [[2117, "module-torch.utils.collect_env"]], "torch.utils.cpp_backtrace": [[2117, "module-torch.utils.cpp_backtrace"]], "torch.utils.cpp_extension": [[2117, "module-torch.utils.cpp_extension"]], "torch.utils.data.backward_compatibility": [[2117, "module-torch.utils.data.backward_compatibility"]], "torch.utils.data.dataloader": [[2117, "module-torch.utils.data.dataloader"]], "torch.utils.data.datapipes.dataframe.dataframe_wrapper": [[2117, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"]], "torch.utils.data.datapipes.dataframe.dataframes": [[2117, "module-torch.utils.data.datapipes.dataframe.dataframes"]], "torch.utils.data.datapipes.dataframe.datapipes": [[2117, "module-torch.utils.data.datapipes.dataframe.datapipes"]], "torch.utils.data.datapipes.dataframe.structures": [[2117, "module-torch.utils.data.datapipes.dataframe.structures"]], "torch.utils.data.datapipes.datapipe": [[2117, "module-torch.utils.data.datapipes.datapipe"]], "torch.utils.data.datapipes.gen_pyi": [[2117, "module-torch.utils.data.datapipes.gen_pyi"]], "torch.utils.data.datapipes.iter.callable": [[2117, "module-torch.utils.data.datapipes.iter.callable"]], "torch.utils.data.datapipes.iter.combinatorics": [[2117, "module-torch.utils.data.datapipes.iter.combinatorics"]], "torch.utils.data.datapipes.iter.combining": [[2117, "module-torch.utils.data.datapipes.iter.combining"]], "torch.utils.data.datapipes.iter.filelister": [[2117, "module-torch.utils.data.datapipes.iter.filelister"]], "torch.utils.data.datapipes.iter.fileopener": [[2117, "module-torch.utils.data.datapipes.iter.fileopener"]], "torch.utils.data.datapipes.iter.grouping": [[2117, "module-torch.utils.data.datapipes.iter.grouping"]], "torch.utils.data.datapipes.iter.routeddecoder": [[2117, "module-torch.utils.data.datapipes.iter.routeddecoder"]], "torch.utils.data.datapipes.iter.selecting": [[2117, "module-torch.utils.data.datapipes.iter.selecting"]], "torch.utils.data.datapipes.iter.sharding": [[2117, "module-torch.utils.data.datapipes.iter.sharding"]], "torch.utils.data.datapipes.iter.streamreader": [[2117, "module-torch.utils.data.datapipes.iter.streamreader"]], "torch.utils.data.datapipes.iter.utils": [[2117, "module-torch.utils.data.datapipes.iter.utils"]], "torch.utils.data.datapipes.map.callable": [[2117, "module-torch.utils.data.datapipes.map.callable"]], "torch.utils.data.datapipes.map.combinatorics": [[2117, "module-torch.utils.data.datapipes.map.combinatorics"]], "torch.utils.data.datapipes.map.combining": [[2117, "module-torch.utils.data.datapipes.map.combining"]], "torch.utils.data.datapipes.map.grouping": [[2117, "module-torch.utils.data.datapipes.map.grouping"]], "torch.utils.data.datapipes.map.utils": [[2117, "module-torch.utils.data.datapipes.map.utils"]], "torch.utils.data.datapipes.utils.common": [[2117, "module-torch.utils.data.datapipes.utils.common"]], "torch.utils.data.datapipes.utils.decoder": [[2117, "module-torch.utils.data.datapipes.utils.decoder"]], "torch.utils.data.datapipes.utils.snapshot": [[2117, "module-torch.utils.data.datapipes.utils.snapshot"]], "torch.utils.data.dataset": [[2117, "module-torch.utils.data.dataset"]], "torch.utils.data.distributed": [[2117, "module-torch.utils.data.distributed"]], "torch.utils.data.graph": [[2117, "module-torch.utils.data.graph"]], "torch.utils.data.graph_settings": [[2117, "module-torch.utils.data.graph_settings"]], "torch.utils.data.sampler": [[2117, "module-torch.utils.data.sampler"]], "torch.utils.dlpack": [[2117, "module-torch.utils.dlpack"]], "torch.utils.file_baton": [[2117, "module-torch.utils.file_baton"]], "torch.utils.flop_counter": [[2117, "module-torch.utils.flop_counter"]], "torch.utils.hipify.constants": [[2117, "module-torch.utils.hipify.constants"]], "torch.utils.hipify.cuda_to_hip_mappings": [[2117, "module-torch.utils.hipify.cuda_to_hip_mappings"]], "torch.utils.hipify.hipify_python": [[2117, "module-torch.utils.hipify.hipify_python"]], "torch.utils.hipify.version": [[2117, "module-torch.utils.hipify.version"]], "torch.utils.hooks": [[2117, "module-torch.utils.hooks"]], "torch.utils.jit.log_extract": [[2117, "module-torch.utils.jit.log_extract"]], "torch.utils.mkldnn": [[2117, "module-torch.utils.mkldnn"]], "torch.utils.mobile_optimizer": [[2117, "module-torch.utils.mobile_optimizer"]], "torch.utils.show_pickle": [[2117, "module-torch.utils.show_pickle"]], "torch.utils.tensorboard.summary": [[2117, "module-torch.utils.tensorboard.summary"]], "torch.utils.tensorboard.writer": [[2117, "module-torch.utils.tensorboard.writer"]], "torch.utils.throughput_benchmark": [[2117, "module-torch.utils.throughput_benchmark"]], "torch.utils.weak": [[2117, "module-torch.utils.weak"]], "torch.xpu": [[2118, "module-torch.xpu"]], "torch.xpu.random": [[2118, "module-torch.xpu.random"]], "torch.xpu.streams": [[2118, "module-torch.xpu.streams"]]}}) \ No newline at end of file +Search.setIndex({"docnames": ["amp", "autograd", "backends", "benchmark_utils", "bottleneck", "checkpoint", "community/build_ci_governance", "community/contribution_guide", "community/design", "community/governance", "community/persons_of_interest", "complex_numbers", "cond", "config_mod", "cpp_extension", "cpp_index", "cpu", "cuda", "cuda._sanitizer", "cuda.tunable", "cuda_environment_variables", "cudnn_persistent_rnn", "cudnn_rnn_determinism", "data", "ddp_comm_hooks", "debugging_environment_variables", "deploy", "deterministic", "distributed", "distributed.algorithms.join", "distributed.checkpoint", "distributed.elastic", "distributed.optim", "distributed.pipelining", "distributed.tensor.parallel", "distributions", "dlpack", "elastic/agent", "elastic/control_plane", "elastic/customization", "elastic/errors", "elastic/events", "elastic/examples", "elastic/kubernetes", "elastic/metrics", "elastic/multiprocessing", "elastic/quickstart", "elastic/rendezvous", "elastic/run", "elastic/subprocess_handler", "elastic/timer", "elastic/train_script", "export", "export.ir_spec", "fft", "fsdp", "func", "func.api", "func.batch_norm", "func.migrating", "func.ux_limitations", "func.whirlwind_tour", "future_mod", "futures", "fx", "fx.experimental", "generated/exportdb/index", "generated/exportdb/python.assert", "generated/exportdb/python.builtin", "generated/exportdb/python.closure", "generated/exportdb/python.context-manager", "generated/exportdb/python.control-flow", "generated/exportdb/python.data-structure", "generated/exportdb/python.object-model", "generated/exportdb/torch.cond", "generated/exportdb/torch.dynamic-shape", "generated/exportdb/torch.dynamic-value", "generated/exportdb/torch.escape-hatch", "generated/exportdb/torch.map", "generated/exportdb/torch.mutation", "generated/exportdb/torch.operator", "generated/onnx_dynamo_diagnostics_rules/FXE0007:fx-graph-to-onnx", "generated/onnx_dynamo_diagnostics_rules/FXE0008:fx-node-to-onnx", "generated/onnx_dynamo_diagnostics_rules/FXE0010:fx-pass", "generated/onnx_dynamo_diagnostics_rules/FXE0011:no-symbolic-function-for-call-function", "generated/onnx_dynamo_diagnostics_rules/FXE0012:unsupported-fx-node-analysis", "generated/onnx_dynamo_diagnostics_rules/FXE0013:op-level-debugging", "generated/onnx_dynamo_diagnostics_rules/FXE0014:find-opschema-matched-symbolic-function", "generated/onnx_dynamo_diagnostics_rules/FXE0015:fx-node-insert-type-promotion", "generated/onnx_dynamo_diagnostics_rules/FXE0016:find-operator-overloads-in-onnx-registry", "generated/torch.Generator", "generated/torch.Tensor.abs", "generated/torch.Tensor.abs_", "generated/torch.Tensor.absolute", "generated/torch.Tensor.absolute_", "generated/torch.Tensor.acos", "generated/torch.Tensor.acos_", "generated/torch.Tensor.acosh", "generated/torch.Tensor.acosh_", "generated/torch.Tensor.add", "generated/torch.Tensor.add_", "generated/torch.Tensor.addbmm", "generated/torch.Tensor.addbmm_", "generated/torch.Tensor.addcdiv", "generated/torch.Tensor.addcdiv_", "generated/torch.Tensor.addcmul", "generated/torch.Tensor.addcmul_", "generated/torch.Tensor.addmm", "generated/torch.Tensor.addmm_", "generated/torch.Tensor.addmv", "generated/torch.Tensor.addmv_", "generated/torch.Tensor.addr", "generated/torch.Tensor.addr_", "generated/torch.Tensor.adjoint", "generated/torch.Tensor.all", "generated/torch.Tensor.allclose", "generated/torch.Tensor.amax", "generated/torch.Tensor.amin", "generated/torch.Tensor.aminmax", "generated/torch.Tensor.angle", "generated/torch.Tensor.any", "generated/torch.Tensor.apply_", "generated/torch.Tensor.arccos", "generated/torch.Tensor.arccos_", "generated/torch.Tensor.arccosh", "generated/torch.Tensor.arccosh_", "generated/torch.Tensor.arcsin", "generated/torch.Tensor.arcsin_", "generated/torch.Tensor.arcsinh", "generated/torch.Tensor.arcsinh_", "generated/torch.Tensor.arctan", "generated/torch.Tensor.arctan2", "generated/torch.Tensor.arctan2_", "generated/torch.Tensor.arctan_", "generated/torch.Tensor.arctanh", "generated/torch.Tensor.arctanh_", "generated/torch.Tensor.argmax", "generated/torch.Tensor.argmin", "generated/torch.Tensor.argsort", "generated/torch.Tensor.argwhere", "generated/torch.Tensor.as_strided", "generated/torch.Tensor.as_subclass", "generated/torch.Tensor.asin", "generated/torch.Tensor.asin_", "generated/torch.Tensor.asinh", "generated/torch.Tensor.asinh_", "generated/torch.Tensor.atan", "generated/torch.Tensor.atan2", "generated/torch.Tensor.atan2_", "generated/torch.Tensor.atan_", "generated/torch.Tensor.atanh", "generated/torch.Tensor.atanh_", "generated/torch.Tensor.backward", "generated/torch.Tensor.baddbmm", "generated/torch.Tensor.baddbmm_", "generated/torch.Tensor.bernoulli", "generated/torch.Tensor.bernoulli_", "generated/torch.Tensor.bfloat16", "generated/torch.Tensor.bincount", "generated/torch.Tensor.bitwise_and", "generated/torch.Tensor.bitwise_and_", "generated/torch.Tensor.bitwise_left_shift", "generated/torch.Tensor.bitwise_left_shift_", "generated/torch.Tensor.bitwise_not", "generated/torch.Tensor.bitwise_not_", "generated/torch.Tensor.bitwise_or", "generated/torch.Tensor.bitwise_or_", "generated/torch.Tensor.bitwise_right_shift", "generated/torch.Tensor.bitwise_right_shift_", "generated/torch.Tensor.bitwise_xor", "generated/torch.Tensor.bitwise_xor_", "generated/torch.Tensor.bmm", "generated/torch.Tensor.bool", "generated/torch.Tensor.broadcast_to", "generated/torch.Tensor.byte", "generated/torch.Tensor.cauchy_", "generated/torch.Tensor.ccol_indices", "generated/torch.Tensor.cdouble", "generated/torch.Tensor.ceil", "generated/torch.Tensor.ceil_", "generated/torch.Tensor.cfloat", "generated/torch.Tensor.chalf", "generated/torch.Tensor.char", "generated/torch.Tensor.cholesky", "generated/torch.Tensor.cholesky_inverse", "generated/torch.Tensor.cholesky_solve", "generated/torch.Tensor.chunk", "generated/torch.Tensor.clamp", "generated/torch.Tensor.clamp_", "generated/torch.Tensor.clip", "generated/torch.Tensor.clip_", "generated/torch.Tensor.clone", "generated/torch.Tensor.coalesce", "generated/torch.Tensor.col_indices", "generated/torch.Tensor.conj", "generated/torch.Tensor.conj_physical", "generated/torch.Tensor.conj_physical_", "generated/torch.Tensor.contiguous", "generated/torch.Tensor.copy_", "generated/torch.Tensor.copysign", "generated/torch.Tensor.copysign_", "generated/torch.Tensor.corrcoef", "generated/torch.Tensor.cos", "generated/torch.Tensor.cos_", "generated/torch.Tensor.cosh", "generated/torch.Tensor.cosh_", "generated/torch.Tensor.count_nonzero", "generated/torch.Tensor.cov", "generated/torch.Tensor.cpu", "generated/torch.Tensor.cross", "generated/torch.Tensor.crow_indices", "generated/torch.Tensor.cuda", "generated/torch.Tensor.cummax", "generated/torch.Tensor.cummin", "generated/torch.Tensor.cumprod", "generated/torch.Tensor.cumprod_", "generated/torch.Tensor.cumsum", "generated/torch.Tensor.cumsum_", "generated/torch.Tensor.data_ptr", "generated/torch.Tensor.deg2rad", "generated/torch.Tensor.dense_dim", "generated/torch.Tensor.dequantize", "generated/torch.Tensor.det", "generated/torch.Tensor.detach", "generated/torch.Tensor.detach_", "generated/torch.Tensor.device", "generated/torch.Tensor.diag", "generated/torch.Tensor.diag_embed", "generated/torch.Tensor.diagflat", "generated/torch.Tensor.diagonal", "generated/torch.Tensor.diagonal_scatter", "generated/torch.Tensor.diff", "generated/torch.Tensor.digamma", "generated/torch.Tensor.digamma_", "generated/torch.Tensor.dim", "generated/torch.Tensor.dim_order", "generated/torch.Tensor.dist", "generated/torch.Tensor.div", "generated/torch.Tensor.div_", "generated/torch.Tensor.divide", "generated/torch.Tensor.divide_", "generated/torch.Tensor.dot", "generated/torch.Tensor.double", "generated/torch.Tensor.dsplit", "generated/torch.Tensor.element_size", "generated/torch.Tensor.eq", "generated/torch.Tensor.eq_", "generated/torch.Tensor.equal", "generated/torch.Tensor.erf", "generated/torch.Tensor.erf_", "generated/torch.Tensor.erfc", "generated/torch.Tensor.erfc_", "generated/torch.Tensor.erfinv", "generated/torch.Tensor.erfinv_", "generated/torch.Tensor.exp", "generated/torch.Tensor.exp_", "generated/torch.Tensor.expand", "generated/torch.Tensor.expand_as", "generated/torch.Tensor.expm1", "generated/torch.Tensor.expm1_", "generated/torch.Tensor.exponential_", "generated/torch.Tensor.fill_", "generated/torch.Tensor.fill_diagonal_", "generated/torch.Tensor.fix", "generated/torch.Tensor.fix_", "generated/torch.Tensor.flatten", "generated/torch.Tensor.flip", "generated/torch.Tensor.fliplr", "generated/torch.Tensor.flipud", "generated/torch.Tensor.float", "generated/torch.Tensor.float_power", "generated/torch.Tensor.float_power_", "generated/torch.Tensor.floor", "generated/torch.Tensor.floor_", "generated/torch.Tensor.floor_divide", "generated/torch.Tensor.floor_divide_", "generated/torch.Tensor.fmax", "generated/torch.Tensor.fmin", "generated/torch.Tensor.fmod", "generated/torch.Tensor.fmod_", "generated/torch.Tensor.frac", "generated/torch.Tensor.frac_", "generated/torch.Tensor.frexp", "generated/torch.Tensor.gather", "generated/torch.Tensor.gcd", "generated/torch.Tensor.gcd_", "generated/torch.Tensor.ge", "generated/torch.Tensor.ge_", "generated/torch.Tensor.geometric_", "generated/torch.Tensor.geqrf", "generated/torch.Tensor.ger", "generated/torch.Tensor.get_device", "generated/torch.Tensor.grad", "generated/torch.Tensor.greater", "generated/torch.Tensor.greater_", "generated/torch.Tensor.greater_equal", "generated/torch.Tensor.greater_equal_", "generated/torch.Tensor.gt", "generated/torch.Tensor.gt_", "generated/torch.Tensor.half", "generated/torch.Tensor.hardshrink", "generated/torch.Tensor.heaviside", "generated/torch.Tensor.histc", "generated/torch.Tensor.histogram", "generated/torch.Tensor.hsplit", "generated/torch.Tensor.hypot", "generated/torch.Tensor.hypot_", "generated/torch.Tensor.i0", "generated/torch.Tensor.i0_", "generated/torch.Tensor.igamma", "generated/torch.Tensor.igamma_", "generated/torch.Tensor.igammac", "generated/torch.Tensor.igammac_", "generated/torch.Tensor.imag", "generated/torch.Tensor.index_add", "generated/torch.Tensor.index_add_", "generated/torch.Tensor.index_copy", "generated/torch.Tensor.index_copy_", "generated/torch.Tensor.index_fill", "generated/torch.Tensor.index_fill_", "generated/torch.Tensor.index_put", "generated/torch.Tensor.index_put_", "generated/torch.Tensor.index_reduce", "generated/torch.Tensor.index_reduce_", "generated/torch.Tensor.index_select", "generated/torch.Tensor.indices", "generated/torch.Tensor.inner", "generated/torch.Tensor.int", "generated/torch.Tensor.int_repr", "generated/torch.Tensor.inverse", "generated/torch.Tensor.is_coalesced", "generated/torch.Tensor.is_complex", "generated/torch.Tensor.is_conj", "generated/torch.Tensor.is_contiguous", "generated/torch.Tensor.is_cuda", "generated/torch.Tensor.is_floating_point", "generated/torch.Tensor.is_inference", "generated/torch.Tensor.is_leaf", "generated/torch.Tensor.is_meta", "generated/torch.Tensor.is_pinned", "generated/torch.Tensor.is_quantized", "generated/torch.Tensor.is_set_to", "generated/torch.Tensor.is_shared", "generated/torch.Tensor.is_signed", "generated/torch.Tensor.is_sparse", "generated/torch.Tensor.is_sparse_csr", "generated/torch.Tensor.isclose", "generated/torch.Tensor.isfinite", "generated/torch.Tensor.isinf", "generated/torch.Tensor.isnan", "generated/torch.Tensor.isneginf", "generated/torch.Tensor.isposinf", "generated/torch.Tensor.isreal", "generated/torch.Tensor.istft", "generated/torch.Tensor.item", "generated/torch.Tensor.itemsize", "generated/torch.Tensor.kthvalue", "generated/torch.Tensor.lcm", "generated/torch.Tensor.lcm_", "generated/torch.Tensor.ldexp", "generated/torch.Tensor.ldexp_", "generated/torch.Tensor.le", "generated/torch.Tensor.le_", "generated/torch.Tensor.lerp", "generated/torch.Tensor.lerp_", "generated/torch.Tensor.less", "generated/torch.Tensor.less_", "generated/torch.Tensor.less_equal", "generated/torch.Tensor.less_equal_", "generated/torch.Tensor.lgamma", "generated/torch.Tensor.lgamma_", "generated/torch.Tensor.log", "generated/torch.Tensor.log10", "generated/torch.Tensor.log10_", "generated/torch.Tensor.log1p", "generated/torch.Tensor.log1p_", "generated/torch.Tensor.log2", "generated/torch.Tensor.log2_", "generated/torch.Tensor.log_", "generated/torch.Tensor.log_normal_", "generated/torch.Tensor.logaddexp", "generated/torch.Tensor.logaddexp2", "generated/torch.Tensor.logcumsumexp", "generated/torch.Tensor.logdet", "generated/torch.Tensor.logical_and", "generated/torch.Tensor.logical_and_", "generated/torch.Tensor.logical_not", "generated/torch.Tensor.logical_not_", "generated/torch.Tensor.logical_or", "generated/torch.Tensor.logical_or_", "generated/torch.Tensor.logical_xor", "generated/torch.Tensor.logical_xor_", "generated/torch.Tensor.logit", "generated/torch.Tensor.logit_", "generated/torch.Tensor.logsumexp", "generated/torch.Tensor.long", "generated/torch.Tensor.lt", "generated/torch.Tensor.lt_", "generated/torch.Tensor.lu", "generated/torch.Tensor.lu_solve", "generated/torch.Tensor.map_", "generated/torch.Tensor.masked_fill", "generated/torch.Tensor.masked_fill_", "generated/torch.Tensor.masked_scatter", "generated/torch.Tensor.masked_scatter_", "generated/torch.Tensor.masked_select", "generated/torch.Tensor.matmul", "generated/torch.Tensor.matrix_exp", "generated/torch.Tensor.matrix_power", "generated/torch.Tensor.max", "generated/torch.Tensor.maximum", "generated/torch.Tensor.mean", "generated/torch.Tensor.median", "generated/torch.Tensor.min", "generated/torch.Tensor.minimum", "generated/torch.Tensor.mm", "generated/torch.Tensor.mode", "generated/torch.Tensor.module_load", "generated/torch.Tensor.moveaxis", "generated/torch.Tensor.movedim", "generated/torch.Tensor.msort", "generated/torch.Tensor.mul", "generated/torch.Tensor.mul_", "generated/torch.Tensor.multinomial", "generated/torch.Tensor.multiply", "generated/torch.Tensor.multiply_", "generated/torch.Tensor.mv", "generated/torch.Tensor.mvlgamma", "generated/torch.Tensor.mvlgamma_", "generated/torch.Tensor.nan_to_num", "generated/torch.Tensor.nan_to_num_", "generated/torch.Tensor.nanmean", "generated/torch.Tensor.nanmedian", "generated/torch.Tensor.nanquantile", "generated/torch.Tensor.nansum", "generated/torch.Tensor.narrow", "generated/torch.Tensor.narrow_copy", "generated/torch.Tensor.nbytes", "generated/torch.Tensor.ndim", "generated/torch.Tensor.ndimension", "generated/torch.Tensor.ne", "generated/torch.Tensor.ne_", "generated/torch.Tensor.neg", "generated/torch.Tensor.neg_", "generated/torch.Tensor.negative", "generated/torch.Tensor.negative_", "generated/torch.Tensor.nelement", "generated/torch.Tensor.new_empty", "generated/torch.Tensor.new_full", "generated/torch.Tensor.new_ones", "generated/torch.Tensor.new_tensor", "generated/torch.Tensor.new_zeros", "generated/torch.Tensor.nextafter", "generated/torch.Tensor.nextafter_", "generated/torch.Tensor.nonzero", "generated/torch.Tensor.norm", "generated/torch.Tensor.normal_", "generated/torch.Tensor.not_equal", "generated/torch.Tensor.not_equal_", "generated/torch.Tensor.numel", "generated/torch.Tensor.numpy", "generated/torch.Tensor.orgqr", "generated/torch.Tensor.ormqr", "generated/torch.Tensor.outer", "generated/torch.Tensor.permute", "generated/torch.Tensor.pin_memory", "generated/torch.Tensor.pinverse", "generated/torch.Tensor.polygamma", "generated/torch.Tensor.polygamma_", "generated/torch.Tensor.positive", "generated/torch.Tensor.pow", "generated/torch.Tensor.pow_", "generated/torch.Tensor.prod", "generated/torch.Tensor.put_", "generated/torch.Tensor.q_per_channel_axis", "generated/torch.Tensor.q_per_channel_scales", "generated/torch.Tensor.q_per_channel_zero_points", "generated/torch.Tensor.q_scale", "generated/torch.Tensor.q_zero_point", "generated/torch.Tensor.qr", "generated/torch.Tensor.qscheme", "generated/torch.Tensor.quantile", "generated/torch.Tensor.rad2deg", "generated/torch.Tensor.random_", "generated/torch.Tensor.ravel", "generated/torch.Tensor.real", "generated/torch.Tensor.reciprocal", "generated/torch.Tensor.reciprocal_", "generated/torch.Tensor.record_stream", "generated/torch.Tensor.register_hook", "generated/torch.Tensor.register_post_accumulate_grad_hook", "generated/torch.Tensor.remainder", "generated/torch.Tensor.remainder_", "generated/torch.Tensor.renorm", "generated/torch.Tensor.renorm_", "generated/torch.Tensor.repeat", "generated/torch.Tensor.repeat_interleave", "generated/torch.Tensor.requires_grad", "generated/torch.Tensor.requires_grad_", "generated/torch.Tensor.reshape", "generated/torch.Tensor.reshape_as", "generated/torch.Tensor.resize_", "generated/torch.Tensor.resize_as_", "generated/torch.Tensor.resolve_conj", "generated/torch.Tensor.resolve_neg", "generated/torch.Tensor.retain_grad", "generated/torch.Tensor.retains_grad", "generated/torch.Tensor.roll", "generated/torch.Tensor.rot90", "generated/torch.Tensor.round", "generated/torch.Tensor.round_", "generated/torch.Tensor.row_indices", "generated/torch.Tensor.rsqrt", "generated/torch.Tensor.rsqrt_", "generated/torch.Tensor.scatter", "generated/torch.Tensor.scatter_", "generated/torch.Tensor.scatter_add", "generated/torch.Tensor.scatter_add_", "generated/torch.Tensor.scatter_reduce", "generated/torch.Tensor.scatter_reduce_", "generated/torch.Tensor.select", "generated/torch.Tensor.select_scatter", "generated/torch.Tensor.set_", "generated/torch.Tensor.sgn", "generated/torch.Tensor.sgn_", "generated/torch.Tensor.shape", "generated/torch.Tensor.share_memory_", "generated/torch.Tensor.short", "generated/torch.Tensor.sigmoid", "generated/torch.Tensor.sigmoid_", "generated/torch.Tensor.sign", "generated/torch.Tensor.sign_", "generated/torch.Tensor.signbit", "generated/torch.Tensor.sin", "generated/torch.Tensor.sin_", "generated/torch.Tensor.sinc", "generated/torch.Tensor.sinc_", "generated/torch.Tensor.sinh", "generated/torch.Tensor.sinh_", "generated/torch.Tensor.size", "generated/torch.Tensor.slice_scatter", "generated/torch.Tensor.slogdet", "generated/torch.Tensor.smm", "generated/torch.Tensor.softmax", "generated/torch.Tensor.sort", "generated/torch.Tensor.sparse_dim", "generated/torch.Tensor.sparse_mask", "generated/torch.Tensor.sparse_resize_", "generated/torch.Tensor.sparse_resize_and_clear_", "generated/torch.Tensor.split", "generated/torch.Tensor.sqrt", "generated/torch.Tensor.sqrt_", "generated/torch.Tensor.square", "generated/torch.Tensor.square_", "generated/torch.Tensor.squeeze", "generated/torch.Tensor.squeeze_", "generated/torch.Tensor.sspaddmm", "generated/torch.Tensor.std", "generated/torch.Tensor.stft", "generated/torch.Tensor.storage", "generated/torch.Tensor.storage_offset", "generated/torch.Tensor.storage_type", "generated/torch.Tensor.stride", "generated/torch.Tensor.sub", "generated/torch.Tensor.sub_", "generated/torch.Tensor.subtract", "generated/torch.Tensor.subtract_", "generated/torch.Tensor.sum", "generated/torch.Tensor.sum_to_size", "generated/torch.Tensor.svd", "generated/torch.Tensor.swapaxes", "generated/torch.Tensor.swapdims", "generated/torch.Tensor.t", "generated/torch.Tensor.t_", "generated/torch.Tensor.take", "generated/torch.Tensor.take_along_dim", "generated/torch.Tensor.tan", "generated/torch.Tensor.tan_", "generated/torch.Tensor.tanh", "generated/torch.Tensor.tanh_", "generated/torch.Tensor.tensor_split", "generated/torch.Tensor.tile", "generated/torch.Tensor.to", "generated/torch.Tensor.to_dense", "generated/torch.Tensor.to_mkldnn", "generated/torch.Tensor.to_sparse", "generated/torch.Tensor.to_sparse_bsc", "generated/torch.Tensor.to_sparse_bsr", "generated/torch.Tensor.to_sparse_coo", "generated/torch.Tensor.to_sparse_csc", "generated/torch.Tensor.to_sparse_csr", "generated/torch.Tensor.tolist", "generated/torch.Tensor.topk", "generated/torch.Tensor.trace", "generated/torch.Tensor.transpose", "generated/torch.Tensor.transpose_", "generated/torch.Tensor.triangular_solve", "generated/torch.Tensor.tril", "generated/torch.Tensor.tril_", "generated/torch.Tensor.triu", "generated/torch.Tensor.triu_", "generated/torch.Tensor.true_divide", "generated/torch.Tensor.true_divide_", "generated/torch.Tensor.trunc", "generated/torch.Tensor.trunc_", "generated/torch.Tensor.type", "generated/torch.Tensor.type_as", "generated/torch.Tensor.unbind", "generated/torch.Tensor.unflatten", "generated/torch.Tensor.unfold", "generated/torch.Tensor.uniform_", "generated/torch.Tensor.unique", "generated/torch.Tensor.unique_consecutive", "generated/torch.Tensor.unsqueeze", "generated/torch.Tensor.unsqueeze_", "generated/torch.Tensor.untyped_storage", "generated/torch.Tensor.values", "generated/torch.Tensor.var", "generated/torch.Tensor.vdot", "generated/torch.Tensor.view", "generated/torch.Tensor.view_as", "generated/torch.Tensor.vsplit", "generated/torch.Tensor.where", "generated/torch.Tensor.xlogy", "generated/torch.Tensor.xlogy_", "generated/torch.Tensor.zero_", "generated/torch._assert", "generated/torch._foreach_abs", "generated/torch._foreach_abs_", "generated/torch._foreach_acos", "generated/torch._foreach_acos_", "generated/torch._foreach_asin", "generated/torch._foreach_asin_", "generated/torch._foreach_atan", "generated/torch._foreach_atan_", "generated/torch._foreach_ceil", "generated/torch._foreach_ceil_", "generated/torch._foreach_cos", "generated/torch._foreach_cos_", "generated/torch._foreach_cosh", "generated/torch._foreach_cosh_", "generated/torch._foreach_erf", "generated/torch._foreach_erf_", "generated/torch._foreach_erfc", "generated/torch._foreach_erfc_", "generated/torch._foreach_exp", "generated/torch._foreach_exp_", "generated/torch._foreach_expm1", "generated/torch._foreach_expm1_", "generated/torch._foreach_floor", "generated/torch._foreach_floor_", "generated/torch._foreach_frac", "generated/torch._foreach_frac_", "generated/torch._foreach_lgamma", "generated/torch._foreach_lgamma_", "generated/torch._foreach_log", "generated/torch._foreach_log10", "generated/torch._foreach_log10_", "generated/torch._foreach_log1p", "generated/torch._foreach_log1p_", "generated/torch._foreach_log2", "generated/torch._foreach_log2_", "generated/torch._foreach_log_", "generated/torch._foreach_neg", "generated/torch._foreach_neg_", "generated/torch._foreach_reciprocal", "generated/torch._foreach_reciprocal_", "generated/torch._foreach_round", "generated/torch._foreach_round_", "generated/torch._foreach_sigmoid", "generated/torch._foreach_sigmoid_", "generated/torch._foreach_sin", "generated/torch._foreach_sin_", "generated/torch._foreach_sinh", "generated/torch._foreach_sinh_", "generated/torch._foreach_sqrt", "generated/torch._foreach_sqrt_", "generated/torch._foreach_tan", "generated/torch._foreach_tan_", "generated/torch._foreach_trunc", "generated/torch._foreach_trunc_", "generated/torch._foreach_zero_", "generated/torch._logging.set_logs", "generated/torch.abs", "generated/torch.absolute", "generated/torch.acos", "generated/torch.acosh", "generated/torch.add", "generated/torch.addbmm", "generated/torch.addcdiv", "generated/torch.addcmul", "generated/torch.addmm", "generated/torch.addmv", "generated/torch.addr", "generated/torch.adjoint", "generated/torch.all", "generated/torch.allclose", "generated/torch.amax", "generated/torch.amin", "generated/torch.aminmax", "generated/torch.angle", "generated/torch.any", "generated/torch.ao.nn.intrinsic.BNReLU2d", "generated/torch.ao.nn.intrinsic.BNReLU3d", "generated/torch.ao.nn.intrinsic.ConvBn1d", "generated/torch.ao.nn.intrinsic.ConvBn2d", "generated/torch.ao.nn.intrinsic.ConvBn3d", "generated/torch.ao.nn.intrinsic.ConvBnReLU1d", "generated/torch.ao.nn.intrinsic.ConvBnReLU2d", "generated/torch.ao.nn.intrinsic.ConvBnReLU3d", "generated/torch.ao.nn.intrinsic.ConvReLU1d", "generated/torch.ao.nn.intrinsic.ConvReLU2d", "generated/torch.ao.nn.intrinsic.ConvReLU3d", "generated/torch.ao.nn.intrinsic.LinearReLU", "generated/torch.ao.nn.intrinsic.qat.ConvBn1d", "generated/torch.ao.nn.intrinsic.qat.ConvBn2d", "generated/torch.ao.nn.intrinsic.qat.ConvBn3d", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU1d", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU2d", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU3d", "generated/torch.ao.nn.intrinsic.qat.ConvReLU2d", "generated/torch.ao.nn.intrinsic.qat.ConvReLU3d", "generated/torch.ao.nn.intrinsic.qat.LinearReLU", "generated/torch.ao.nn.intrinsic.qat.freeze_bn_stats", "generated/torch.ao.nn.intrinsic.qat.update_bn_stats", "generated/torch.ao.nn.intrinsic.quantized.BNReLU2d", "generated/torch.ao.nn.intrinsic.quantized.BNReLU3d", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU1d", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU2d", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU3d", "generated/torch.ao.nn.intrinsic.quantized.LinearReLU", "generated/torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU", "generated/torch.ao.nn.qat.Conv2d", "generated/torch.ao.nn.qat.Conv3d", "generated/torch.ao.nn.qat.Linear", "generated/torch.ao.nn.qat.dynamic.Linear", "generated/torch.ao.nn.quantizable.LSTM", "generated/torch.ao.nn.quantizable.MultiheadAttention", "generated/torch.ao.nn.quantized.BatchNorm2d", "generated/torch.ao.nn.quantized.BatchNorm3d", "generated/torch.ao.nn.quantized.Conv1d", "generated/torch.ao.nn.quantized.Conv2d", "generated/torch.ao.nn.quantized.Conv3d", "generated/torch.ao.nn.quantized.ConvTranspose1d", "generated/torch.ao.nn.quantized.ConvTranspose2d", "generated/torch.ao.nn.quantized.ConvTranspose3d", "generated/torch.ao.nn.quantized.ELU", "generated/torch.ao.nn.quantized.Embedding", "generated/torch.ao.nn.quantized.EmbeddingBag", "generated/torch.ao.nn.quantized.FXFloatFunctional", "generated/torch.ao.nn.quantized.FloatFunctional", "generated/torch.ao.nn.quantized.GroupNorm", "generated/torch.ao.nn.quantized.Hardswish", "generated/torch.ao.nn.quantized.InstanceNorm1d", "generated/torch.ao.nn.quantized.InstanceNorm2d", "generated/torch.ao.nn.quantized.InstanceNorm3d", "generated/torch.ao.nn.quantized.LayerNorm", "generated/torch.ao.nn.quantized.LeakyReLU", "generated/torch.ao.nn.quantized.Linear", "generated/torch.ao.nn.quantized.QFunctional", "generated/torch.ao.nn.quantized.ReLU6", "generated/torch.ao.nn.quantized.Sigmoid", "generated/torch.ao.nn.quantized.dynamic.GRU", "generated/torch.ao.nn.quantized.dynamic.GRUCell", "generated/torch.ao.nn.quantized.dynamic.LSTM", "generated/torch.ao.nn.quantized.dynamic.LSTMCell", "generated/torch.ao.nn.quantized.dynamic.Linear", "generated/torch.ao.nn.quantized.dynamic.RNNCell", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool2d", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool3d", "generated/torch.ao.nn.quantized.functional.avg_pool2d", "generated/torch.ao.nn.quantized.functional.avg_pool3d", "generated/torch.ao.nn.quantized.functional.celu", "generated/torch.ao.nn.quantized.functional.clamp", "generated/torch.ao.nn.quantized.functional.conv1d", "generated/torch.ao.nn.quantized.functional.conv2d", "generated/torch.ao.nn.quantized.functional.conv3d", "generated/torch.ao.nn.quantized.functional.elu", "generated/torch.ao.nn.quantized.functional.hardsigmoid", "generated/torch.ao.nn.quantized.functional.hardswish", "generated/torch.ao.nn.quantized.functional.hardtanh", "generated/torch.ao.nn.quantized.functional.interpolate", "generated/torch.ao.nn.quantized.functional.leaky_relu", "generated/torch.ao.nn.quantized.functional.linear", "generated/torch.ao.nn.quantized.functional.max_pool1d", "generated/torch.ao.nn.quantized.functional.max_pool2d", "generated/torch.ao.nn.quantized.functional.threshold", "generated/torch.ao.nn.quantized.functional.upsample", "generated/torch.ao.nn.quantized.functional.upsample_bilinear", "generated/torch.ao.nn.quantized.functional.upsample_nearest", "generated/torch.ao.quantization.DeQuantStub", "generated/torch.ao.quantization.QuantStub", "generated/torch.ao.quantization.QuantWrapper", "generated/torch.ao.quantization.add_quant_dequant", "generated/torch.ao.quantization.backend_config.BackendConfig", "generated/torch.ao.quantization.backend_config.BackendPatternConfig", "generated/torch.ao.quantization.backend_config.DTypeConfig", "generated/torch.ao.quantization.backend_config.DTypeWithConstraints", "generated/torch.ao.quantization.backend_config.ObservationType", "generated/torch.ao.quantization.convert", "generated/torch.ao.quantization.default_eval_fn", "generated/torch.ao.quantization.fake_quantize.FakeQuantize", "generated/torch.ao.quantization.fake_quantize.FakeQuantizeBase", "generated/torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize", "generated/torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize", "generated/torch.ao.quantization.fake_quantize.default_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_fused_act_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_histogram_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_weight_fake_quant", "generated/torch.ao.quantization.fake_quantize.disable_fake_quant", "generated/torch.ao.quantization.fake_quantize.disable_observer", "generated/torch.ao.quantization.fake_quantize.enable_fake_quant", "generated/torch.ao.quantization.fake_quantize.enable_observer", "generated/torch.ao.quantization.fuse_modules.fuse_modules", "generated/torch.ao.quantization.fx.custom_config.ConvertCustomConfig", "generated/torch.ao.quantization.fx.custom_config.FuseCustomConfig", "generated/torch.ao.quantization.fx.custom_config.PrepareCustomConfig", "generated/torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry", "generated/torch.ao.quantization.observer.HistogramObserver", "generated/torch.ao.quantization.observer.MinMaxObserver", "generated/torch.ao.quantization.observer.MovingAverageMinMaxObserver", "generated/torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver", "generated/torch.ao.quantization.observer.NoopObserver", "generated/torch.ao.quantization.observer.ObserverBase", "generated/torch.ao.quantization.observer.PerChannelMinMaxObserver", "generated/torch.ao.quantization.observer.PlaceholderObserver", "generated/torch.ao.quantization.observer.RecordingObserver", "generated/torch.ao.quantization.observer.default_debug_observer", "generated/torch.ao.quantization.observer.default_dynamic_quant_observer", "generated/torch.ao.quantization.observer.default_float_qparams_observer", "generated/torch.ao.quantization.observer.default_histogram_observer", "generated/torch.ao.quantization.observer.default_observer", "generated/torch.ao.quantization.observer.default_per_channel_weight_observer", "generated/torch.ao.quantization.observer.default_placeholder_observer", "generated/torch.ao.quantization.observer.default_weight_observer", "generated/torch.ao.quantization.observer.get_observer_state_dict", "generated/torch.ao.quantization.observer.load_observer_state_dict", "generated/torch.ao.quantization.prepare", "generated/torch.ao.quantization.prepare_qat", "generated/torch.ao.quantization.propagate_qconfig_", "generated/torch.ao.quantization.pt2e.export_utils.model_is_exported", "generated/torch.ao.quantization.qconfig.QConfig", "generated/torch.ao.quantization.qconfig.default_activation_only_qconfig", "generated/torch.ao.quantization.qconfig.default_debug_qconfig", "generated/torch.ao.quantization.qconfig.default_dynamic_qconfig", "generated/torch.ao.quantization.qconfig.default_per_channel_qconfig", "generated/torch.ao.quantization.qconfig.default_qat_qconfig", "generated/torch.ao.quantization.qconfig.default_qat_qconfig_v2", "generated/torch.ao.quantization.qconfig.default_qconfig", "generated/torch.ao.quantization.qconfig.default_weight_only_qconfig", "generated/torch.ao.quantization.qconfig.float16_dynamic_qconfig", "generated/torch.ao.quantization.qconfig.float16_static_qconfig", "generated/torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig", "generated/torch.ao.quantization.qconfig.per_channel_dynamic_qconfig", "generated/torch.ao.quantization.qconfig_mapping.QConfigMapping", "generated/torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping", "generated/torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping", "generated/torch.ao.quantization.quantize", "generated/torch.ao.quantization.quantize_dynamic", "generated/torch.ao.quantization.quantize_fx.convert_fx", "generated/torch.ao.quantization.quantize_fx.fuse_fx", "generated/torch.ao.quantization.quantize_fx.prepare_fx", "generated/torch.ao.quantization.quantize_fx.prepare_qat_fx", "generated/torch.ao.quantization.quantize_qat", "generated/torch.ao.quantization.swap_module", "generated/torch.arange", "generated/torch.arccos", "generated/torch.arccosh", "generated/torch.arcsin", "generated/torch.arcsinh", "generated/torch.arctan", "generated/torch.arctan2", "generated/torch.arctanh", "generated/torch.are_deterministic_algorithms_enabled", "generated/torch.argmax", "generated/torch.argmin", "generated/torch.argsort", "generated/torch.argwhere", "generated/torch.as_strided", "generated/torch.as_tensor", "generated/torch.asarray", "generated/torch.asin", "generated/torch.asinh", "generated/torch.atan", "generated/torch.atan2", "generated/torch.atanh", "generated/torch.atleast_1d", "generated/torch.atleast_2d", "generated/torch.atleast_3d", "generated/torch.autograd.Function.backward", "generated/torch.autograd.Function.forward", "generated/torch.autograd.Function.jvp", "generated/torch.autograd.Function.vmap", "generated/torch.autograd.backward", "generated/torch.autograd.forward_ad.UnpackedDualTensor", "generated/torch.autograd.forward_ad.dual_level", "generated/torch.autograd.forward_ad.enter_dual_level", "generated/torch.autograd.forward_ad.exit_dual_level", "generated/torch.autograd.forward_ad.make_dual", "generated/torch.autograd.forward_ad.unpack_dual", "generated/torch.autograd.function.BackwardCFunction", "generated/torch.autograd.function.FunctionCtx.mark_dirty", "generated/torch.autograd.function.FunctionCtx.mark_non_differentiable", "generated/torch.autograd.function.FunctionCtx.save_for_backward", "generated/torch.autograd.function.FunctionCtx.set_materialize_grads", "generated/torch.autograd.function.InplaceFunction", "generated/torch.autograd.function.NestedIOFunction", "generated/torch.autograd.function.once_differentiable", "generated/torch.autograd.functional.hessian", "generated/torch.autograd.functional.hvp", "generated/torch.autograd.functional.jacobian", "generated/torch.autograd.functional.jvp", "generated/torch.autograd.functional.vhp", "generated/torch.autograd.functional.vjp", "generated/torch.autograd.grad", "generated/torch.autograd.grad_mode.inference_mode", "generated/torch.autograd.grad_mode.set_grad_enabled", "generated/torch.autograd.grad_mode.set_multithreading_enabled", "generated/torch.autograd.gradcheck.GradcheckError", "generated/torch.autograd.gradcheck.gradcheck", "generated/torch.autograd.gradcheck.gradgradcheck", "generated/torch.autograd.graph.Node.metadata", "generated/torch.autograd.graph.Node.name", "generated/torch.autograd.graph.Node.next_functions", "generated/torch.autograd.graph.Node.register_hook", "generated/torch.autograd.graph.Node.register_prehook", "generated/torch.autograd.graph.increment_version", "generated/torch.autograd.profiler.EnforceUnique", "generated/torch.autograd.profiler.KinetoStepTracker", "generated/torch.autograd.profiler.load_nvprof", "generated/torch.autograd.profiler.parse_nvprof_trace", "generated/torch.autograd.profiler.profile.export_chrome_trace", "generated/torch.autograd.profiler.profile.key_averages", "generated/torch.autograd.profiler.profile.self_cpu_time_total", "generated/torch.autograd.profiler.profile.total_average", "generated/torch.autograd.profiler.record_function", "generated/torch.autograd.profiler_util.Interval", "generated/torch.autograd.profiler_util.Kernel", "generated/torch.autograd.profiler_util.MemRecordsAcc", "generated/torch.autograd.profiler_util.StringTable", "generated/torch.baddbmm", "generated/torch.bartlett_window", "generated/torch.bernoulli", "generated/torch.bincount", "generated/torch.bitwise_and", "generated/torch.bitwise_left_shift", "generated/torch.bitwise_not", "generated/torch.bitwise_or", "generated/torch.bitwise_right_shift", "generated/torch.bitwise_xor", "generated/torch.blackman_window", "generated/torch.block_diag", "generated/torch.bmm", "generated/torch.broadcast_shapes", "generated/torch.broadcast_tensors", "generated/torch.broadcast_to", "generated/torch.bucketize", "generated/torch.can_cast", "generated/torch.cartesian_prod", "generated/torch.cat", "generated/torch.cdist", "generated/torch.ceil", "generated/torch.chain_matmul", "generated/torch.cholesky", "generated/torch.cholesky_inverse", "generated/torch.cholesky_solve", "generated/torch.chunk", "generated/torch.clamp", "generated/torch.clip", "generated/torch.clone", "generated/torch.column_stack", "generated/torch.combinations", "generated/torch.compile", "generated/torch.compiled_with_cxx11_abi", "generated/torch.compiler.allow_in_graph", "generated/torch.compiler.assume_constant_result", "generated/torch.compiler.compile", "generated/torch.compiler.cudagraph_mark_step_begin", "generated/torch.compiler.disable", "generated/torch.compiler.is_compiling", "generated/torch.compiler.is_dynamo_compiling", "generated/torch.compiler.list_backends", "generated/torch.compiler.reset", "generated/torch.complex", "generated/torch.concat", "generated/torch.concatenate", "generated/torch.cond", "generated/torch.conj", "generated/torch.conj_physical", "generated/torch.copysign", "generated/torch.corrcoef", "generated/torch.cos", "generated/torch.cosh", "generated/torch.count_nonzero", "generated/torch.cov", "generated/torch.cpu.Stream", "generated/torch.cpu.StreamContext", "generated/torch.cpu.current_device", "generated/torch.cpu.current_stream", "generated/torch.cpu.device_count", "generated/torch.cpu.is_available", "generated/torch.cpu.set_device", "generated/torch.cpu.stream", "generated/torch.cpu.synchronize", "generated/torch.cross", "generated/torch.cuda.CUDAGraph", "generated/torch.cuda.CUDAPluggableAllocator", "generated/torch.cuda.Event", "generated/torch.cuda.ExternalStream", "generated/torch.cuda.OutOfMemoryError", "generated/torch.cuda.Stream", "generated/torch.cuda.StreamContext", "generated/torch.cuda.caching_allocator_alloc", "generated/torch.cuda.caching_allocator_delete", "generated/torch.cuda.can_device_access_peer", "generated/torch.cuda.change_current_allocator", "generated/torch.cuda.clock_rate", "generated/torch.cuda.comm.broadcast", "generated/torch.cuda.comm.broadcast_coalesced", "generated/torch.cuda.comm.gather", "generated/torch.cuda.comm.reduce_add", "generated/torch.cuda.comm.scatter", "generated/torch.cuda.current_blas_handle", "generated/torch.cuda.current_device", "generated/torch.cuda.current_stream", "generated/torch.cuda.default_stream", "generated/torch.cuda.device", "generated/torch.cuda.device_count", "generated/torch.cuda.device_of", "generated/torch.cuda.empty_cache", "generated/torch.cuda.get_allocator_backend", "generated/torch.cuda.get_arch_list", "generated/torch.cuda.get_device_capability", "generated/torch.cuda.get_device_name", "generated/torch.cuda.get_device_properties", "generated/torch.cuda.get_gencode_flags", "generated/torch.cuda.get_rng_state", "generated/torch.cuda.get_rng_state_all", "generated/torch.cuda.get_sync_debug_mode", "generated/torch.cuda.graph", "generated/torch.cuda.graph_pool_handle", "generated/torch.cuda.init", "generated/torch.cuda.initial_seed", "generated/torch.cuda.ipc_collect", "generated/torch.cuda.is_available", "generated/torch.cuda.is_current_stream_capturing", "generated/torch.cuda.is_initialized", "generated/torch.cuda.jiterator._create_jit_fn", "generated/torch.cuda.jiterator._create_multi_output_jit_fn", "generated/torch.cuda.list_gpu_processes", "generated/torch.cuda.make_graphed_callables", "generated/torch.cuda.manual_seed", "generated/torch.cuda.manual_seed_all", "generated/torch.cuda.max_memory_allocated", "generated/torch.cuda.max_memory_cached", "generated/torch.cuda.max_memory_reserved", "generated/torch.cuda.mem_get_info", "generated/torch.cuda.memory_allocated", "generated/torch.cuda.memory_cached", "generated/torch.cuda.memory_reserved", "generated/torch.cuda.memory_snapshot", "generated/torch.cuda.memory_stats", "generated/torch.cuda.memory_summary", "generated/torch.cuda.memory_usage", "generated/torch.cuda.nvtx.mark", "generated/torch.cuda.nvtx.range", "generated/torch.cuda.nvtx.range_pop", "generated/torch.cuda.nvtx.range_push", "generated/torch.cuda.power_draw", "generated/torch.cuda.reset_max_memory_allocated", "generated/torch.cuda.reset_max_memory_cached", "generated/torch.cuda.reset_peak_memory_stats", "generated/torch.cuda.seed", "generated/torch.cuda.seed_all", "generated/torch.cuda.set_device", "generated/torch.cuda.set_per_process_memory_fraction", "generated/torch.cuda.set_rng_state", "generated/torch.cuda.set_rng_state_all", "generated/torch.cuda.set_stream", "generated/torch.cuda.set_sync_debug_mode", "generated/torch.cuda.stream", "generated/torch.cuda.synchronize", "generated/torch.cuda.temperature", "generated/torch.cuda.utilization", "generated/torch.cummax", "generated/torch.cummin", "generated/torch.cumprod", "generated/torch.cumsum", "generated/torch.cumulative_trapezoid", "generated/torch.deg2rad", "generated/torch.dequantize", "generated/torch.det", "generated/torch.diag", "generated/torch.diag_embed", "generated/torch.diagflat", "generated/torch.diagonal", "generated/torch.diagonal_scatter", "generated/torch.diff", "generated/torch.digamma", "generated/torch.dist", "generated/torch.div", "generated/torch.divide", "generated/torch.dot", "generated/torch.dsplit", "generated/torch.dstack", "generated/torch.einsum", "generated/torch.empty", "generated/torch.empty_like", "generated/torch.empty_strided", "generated/torch.enable_grad", "generated/torch.eq", "generated/torch.equal", "generated/torch.erf", "generated/torch.erfc", "generated/torch.erfinv", "generated/torch.exp", "generated/torch.exp2", "generated/torch.expm1", "generated/torch.eye", "generated/torch.fake_quantize_per_channel_affine", "generated/torch.fake_quantize_per_tensor_affine", "generated/torch.fft.fft", "generated/torch.fft.fft2", "generated/torch.fft.fftfreq", "generated/torch.fft.fftn", "generated/torch.fft.fftshift", "generated/torch.fft.hfft", "generated/torch.fft.hfft2", "generated/torch.fft.hfftn", "generated/torch.fft.ifft", "generated/torch.fft.ifft2", "generated/torch.fft.ifftn", "generated/torch.fft.ifftshift", "generated/torch.fft.ihfft", "generated/torch.fft.ihfft2", "generated/torch.fft.ihfftn", "generated/torch.fft.irfft", "generated/torch.fft.irfft2", "generated/torch.fft.irfftn", "generated/torch.fft.rfft", "generated/torch.fft.rfft2", "generated/torch.fft.rfftfreq", "generated/torch.fft.rfftn", "generated/torch.fix", "generated/torch.flatten", "generated/torch.flip", "generated/torch.fliplr", "generated/torch.flipud", "generated/torch.float_power", "generated/torch.floor", "generated/torch.floor_divide", "generated/torch.fmax", "generated/torch.fmin", "generated/torch.fmod", "generated/torch.frac", "generated/torch.frexp", "generated/torch.from_dlpack", "generated/torch.from_file", "generated/torch.from_numpy", "generated/torch.frombuffer", "generated/torch.full", "generated/torch.full_like", "generated/torch.func.functional_call", "generated/torch.func.functionalize", "generated/torch.func.grad", "generated/torch.func.grad_and_value", "generated/torch.func.hessian", "generated/torch.func.jacfwd", "generated/torch.func.jacrev", "generated/torch.func.jvp", "generated/torch.func.linearize", "generated/torch.func.replace_all_batch_norm_modules_", "generated/torch.func.stack_module_state", "generated/torch.func.vjp", "generated/torch.func.vmap", "generated/torch.fx.experimental.symbolic_shapes.CallMethodKey", "generated/torch.fx.experimental.symbolic_shapes.ConvertIntKey", "generated/torch.fx.experimental.symbolic_shapes.DimConstraints", "generated/torch.fx.experimental.symbolic_shapes.DimDynamic", "generated/torch.fx.experimental.symbolic_shapes.DivideByKey", "generated/torch.fx.experimental.symbolic_shapes.EqualityConstraint", "generated/torch.fx.experimental.symbolic_shapes.InnerTensorKey", "generated/torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts", "generated/torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnv", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnvSettings", "generated/torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint", "generated/torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.SymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr", "generated/torch.fx.experimental.symbolic_shapes.check_consistent", "generated/torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings", "generated/torch.fx.experimental.symbolic_shapes.constrain_range", "generated/torch.fx.experimental.symbolic_shapes.constrain_unify", "generated/torch.fx.experimental.symbolic_shapes.definitely_false", "generated/torch.fx.experimental.symbolic_shapes.definitely_true", "generated/torch.fx.experimental.symbolic_shapes.guard_size_oblivious", "generated/torch.fx.experimental.symbolic_shapes.has_free_symbols", "generated/torch.fx.experimental.symbolic_shapes.hint_int", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_bool", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_int", "generated/torch.fx.experimental.symbolic_shapes.lru_cache", "generated/torch.fx.experimental.symbolic_shapes.parallel_and", "generated/torch.fx.experimental.symbolic_shapes.parallel_or", "generated/torch.fx.experimental.symbolic_shapes.rebind_unbacked", "generated/torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings", "generated/torch.fx.experimental.symbolic_shapes.statically_known_true", "generated/torch.fx.experimental.symbolic_shapes.sym_eq", "generated/torch.gather", "generated/torch.gcd", "generated/torch.ge", "generated/torch.geqrf", "generated/torch.ger", "generated/torch.get_default_device", "generated/torch.get_default_dtype", "generated/torch.get_deterministic_debug_mode", "generated/torch.get_device_module", "generated/torch.get_float32_matmul_precision", "generated/torch.get_num_interop_threads", "generated/torch.get_num_threads", "generated/torch.get_rng_state", "generated/torch.gradient", "generated/torch.greater", "generated/torch.greater_equal", "generated/torch.gt", "generated/torch.hamming_window", "generated/torch.hann_window", "generated/torch.heaviside", "generated/torch.histc", "generated/torch.histogram", "generated/torch.histogramdd", "generated/torch.hsplit", "generated/torch.hspmm", "generated/torch.hstack", "generated/torch.hypot", "generated/torch.i0", "generated/torch.igamma", "generated/torch.igammac", "generated/torch.imag", "generated/torch.index_add", "generated/torch.index_copy", "generated/torch.index_reduce", "generated/torch.index_select", "generated/torch.initial_seed", "generated/torch.inner", "generated/torch.inverse", "generated/torch.is_complex", "generated/torch.is_conj", "generated/torch.is_deterministic_algorithms_warn_only_enabled", "generated/torch.is_floating_point", "generated/torch.is_grad_enabled", "generated/torch.is_inference_mode_enabled", "generated/torch.is_nonzero", "generated/torch.is_storage", "generated/torch.is_tensor", "generated/torch.is_warn_always_enabled", "generated/torch.isclose", "generated/torch.isfinite", "generated/torch.isin", "generated/torch.isinf", "generated/torch.isnan", "generated/torch.isneginf", "generated/torch.isposinf", "generated/torch.isreal", "generated/torch.istft", "generated/torch.jit.Attribute", "generated/torch.jit.ScriptFunction", "generated/torch.jit.ScriptModule", "generated/torch.jit.annotate", "generated/torch.jit.enable_onednn_fusion", "generated/torch.jit.fork", "generated/torch.jit.freeze", "generated/torch.jit.ignore", "generated/torch.jit.interface", "generated/torch.jit.isinstance", "generated/torch.jit.load", "generated/torch.jit.onednn_fusion_enabled", "generated/torch.jit.optimize_for_inference", "generated/torch.jit.save", "generated/torch.jit.script", "generated/torch.jit.script_if_tracing", "generated/torch.jit.set_fusion_strategy", "generated/torch.jit.strict_fusion", "generated/torch.jit.trace", "generated/torch.jit.trace_module", "generated/torch.jit.unused", "generated/torch.jit.wait", "generated/torch.kaiser_window", "generated/torch.kron", "generated/torch.kthvalue", "generated/torch.lcm", "generated/torch.ldexp", "generated/torch.le", "generated/torch.lerp", "generated/torch.less", "generated/torch.less_equal", "generated/torch.lgamma", "generated/torch.linalg.cholesky", "generated/torch.linalg.cholesky_ex", "generated/torch.linalg.cond", "generated/torch.linalg.cross", "generated/torch.linalg.det", "generated/torch.linalg.diagonal", "generated/torch.linalg.eig", "generated/torch.linalg.eigh", "generated/torch.linalg.eigvals", "generated/torch.linalg.eigvalsh", "generated/torch.linalg.householder_product", "generated/torch.linalg.inv", "generated/torch.linalg.inv_ex", "generated/torch.linalg.ldl_factor", "generated/torch.linalg.ldl_factor_ex", "generated/torch.linalg.ldl_solve", "generated/torch.linalg.lstsq", "generated/torch.linalg.lu", "generated/torch.linalg.lu_factor", "generated/torch.linalg.lu_factor_ex", "generated/torch.linalg.lu_solve", "generated/torch.linalg.matmul", "generated/torch.linalg.matrix_exp", "generated/torch.linalg.matrix_norm", "generated/torch.linalg.matrix_power", "generated/torch.linalg.matrix_rank", "generated/torch.linalg.multi_dot", "generated/torch.linalg.norm", "generated/torch.linalg.pinv", "generated/torch.linalg.qr", "generated/torch.linalg.slogdet", "generated/torch.linalg.solve", "generated/torch.linalg.solve_ex", "generated/torch.linalg.solve_triangular", "generated/torch.linalg.svd", "generated/torch.linalg.svdvals", "generated/torch.linalg.tensorinv", "generated/torch.linalg.tensorsolve", "generated/torch.linalg.vander", "generated/torch.linalg.vecdot", "generated/torch.linalg.vector_norm", "generated/torch.linspace", "generated/torch.load", "generated/torch.lobpcg", "generated/torch.log", "generated/torch.log10", "generated/torch.log1p", "generated/torch.log2", "generated/torch.logaddexp", "generated/torch.logaddexp2", "generated/torch.logcumsumexp", "generated/torch.logdet", "generated/torch.logical_and", "generated/torch.logical_not", "generated/torch.logical_or", "generated/torch.logical_xor", "generated/torch.logit", "generated/torch.logspace", "generated/torch.logsumexp", "generated/torch.lt", "generated/torch.lu", "generated/torch.lu_solve", "generated/torch.lu_unpack", "generated/torch.manual_seed", "generated/torch.masked_select", "generated/torch.matmul", "generated/torch.matrix_exp", "generated/torch.matrix_power", "generated/torch.max", "generated/torch.maximum", "generated/torch.mean", "generated/torch.median", "generated/torch.meshgrid", "generated/torch.min", "generated/torch.minimum", "generated/torch.mm", "generated/torch.mode", "generated/torch.moveaxis", "generated/torch.movedim", "generated/torch.mps.current_allocated_memory", "generated/torch.mps.device_count", "generated/torch.mps.driver_allocated_memory", "generated/torch.mps.empty_cache", "generated/torch.mps.event.Event", "generated/torch.mps.get_rng_state", "generated/torch.mps.manual_seed", "generated/torch.mps.profiler.profile", "generated/torch.mps.profiler.start", "generated/torch.mps.profiler.stop", "generated/torch.mps.seed", "generated/torch.mps.set_per_process_memory_fraction", "generated/torch.mps.set_rng_state", "generated/torch.mps.synchronize", "generated/torch.msort", "generated/torch.mtia.DeferredMtiaCallError", "generated/torch.mtia.Event", "generated/torch.mtia.Stream", "generated/torch.mtia.StreamContext", "generated/torch.mtia.current_device", "generated/torch.mtia.current_stream", "generated/torch.mtia.default_stream", "generated/torch.mtia.device", "generated/torch.mtia.device_count", "generated/torch.mtia.init", "generated/torch.mtia.is_available", "generated/torch.mtia.is_initialized", "generated/torch.mtia.set_stream", "generated/torch.mtia.stream", "generated/torch.mtia.synchronize", "generated/torch.mul", "generated/torch.multinomial", "generated/torch.multiply", "generated/torch.mv", "generated/torch.mvlgamma", "generated/torch.nan_to_num", "generated/torch.nanmean", "generated/torch.nanmedian", "generated/torch.nanquantile", "generated/torch.nansum", "generated/torch.narrow", "generated/torch.narrow_copy", "generated/torch.ne", "generated/torch.neg", "generated/torch.negative", "generated/torch.nextafter", "generated/torch.nn.AdaptiveAvgPool1d", "generated/torch.nn.AdaptiveAvgPool2d", "generated/torch.nn.AdaptiveAvgPool3d", "generated/torch.nn.AdaptiveLogSoftmaxWithLoss", "generated/torch.nn.AdaptiveMaxPool1d", "generated/torch.nn.AdaptiveMaxPool2d", "generated/torch.nn.AdaptiveMaxPool3d", "generated/torch.nn.AlphaDropout", "generated/torch.nn.AvgPool1d", "generated/torch.nn.AvgPool2d", "generated/torch.nn.AvgPool3d", "generated/torch.nn.BCELoss", "generated/torch.nn.BCEWithLogitsLoss", "generated/torch.nn.BatchNorm1d", "generated/torch.nn.BatchNorm2d", "generated/torch.nn.BatchNorm3d", "generated/torch.nn.Bilinear", "generated/torch.nn.CELU", "generated/torch.nn.CTCLoss", "generated/torch.nn.ChannelShuffle", "generated/torch.nn.CircularPad1d", "generated/torch.nn.CircularPad2d", "generated/torch.nn.CircularPad3d", "generated/torch.nn.ConstantPad1d", "generated/torch.nn.ConstantPad2d", "generated/torch.nn.ConstantPad3d", "generated/torch.nn.Conv1d", "generated/torch.nn.Conv2d", "generated/torch.nn.Conv3d", "generated/torch.nn.ConvTranspose1d", "generated/torch.nn.ConvTranspose2d", "generated/torch.nn.ConvTranspose3d", "generated/torch.nn.CosineEmbeddingLoss", "generated/torch.nn.CosineSimilarity", "generated/torch.nn.CrossEntropyLoss", "generated/torch.nn.DataParallel", "generated/torch.nn.Dropout", "generated/torch.nn.Dropout1d", "generated/torch.nn.Dropout2d", "generated/torch.nn.Dropout3d", "generated/torch.nn.ELU", "generated/torch.nn.Embedding", "generated/torch.nn.EmbeddingBag", "generated/torch.nn.FeatureAlphaDropout", "generated/torch.nn.Flatten", "generated/torch.nn.Fold", "generated/torch.nn.FractionalMaxPool2d", "generated/torch.nn.FractionalMaxPool3d", "generated/torch.nn.GELU", "generated/torch.nn.GLU", "generated/torch.nn.GRU", "generated/torch.nn.GRUCell", "generated/torch.nn.GaussianNLLLoss", "generated/torch.nn.GroupNorm", "generated/torch.nn.Hardshrink", "generated/torch.nn.Hardsigmoid", "generated/torch.nn.Hardswish", "generated/torch.nn.Hardtanh", "generated/torch.nn.HingeEmbeddingLoss", "generated/torch.nn.HuberLoss", "generated/torch.nn.Identity", "generated/torch.nn.InstanceNorm1d", "generated/torch.nn.InstanceNorm2d", "generated/torch.nn.InstanceNorm3d", "generated/torch.nn.KLDivLoss", "generated/torch.nn.L1Loss", "generated/torch.nn.LPPool1d", "generated/torch.nn.LPPool2d", "generated/torch.nn.LPPool3d", "generated/torch.nn.LSTM", "generated/torch.nn.LSTMCell", "generated/torch.nn.LayerNorm", "generated/torch.nn.LazyBatchNorm1d", "generated/torch.nn.LazyBatchNorm2d", "generated/torch.nn.LazyBatchNorm3d", "generated/torch.nn.LazyConv1d", "generated/torch.nn.LazyConv2d", "generated/torch.nn.LazyConv3d", "generated/torch.nn.LazyConvTranspose1d", "generated/torch.nn.LazyConvTranspose2d", "generated/torch.nn.LazyConvTranspose3d", "generated/torch.nn.LazyInstanceNorm1d", "generated/torch.nn.LazyInstanceNorm2d", "generated/torch.nn.LazyInstanceNorm3d", "generated/torch.nn.LazyLinear", "generated/torch.nn.LeakyReLU", "generated/torch.nn.Linear", "generated/torch.nn.LocalResponseNorm", "generated/torch.nn.LogSigmoid", "generated/torch.nn.LogSoftmax", "generated/torch.nn.MSELoss", "generated/torch.nn.MarginRankingLoss", "generated/torch.nn.MaxPool1d", "generated/torch.nn.MaxPool2d", "generated/torch.nn.MaxPool3d", "generated/torch.nn.MaxUnpool1d", "generated/torch.nn.MaxUnpool2d", "generated/torch.nn.MaxUnpool3d", "generated/torch.nn.Mish", "generated/torch.nn.Module", "generated/torch.nn.ModuleDict", "generated/torch.nn.ModuleList", "generated/torch.nn.MultiLabelMarginLoss", "generated/torch.nn.MultiLabelSoftMarginLoss", "generated/torch.nn.MultiMarginLoss", "generated/torch.nn.MultiheadAttention", "generated/torch.nn.NLLLoss", "generated/torch.nn.PReLU", "generated/torch.nn.PairwiseDistance", "generated/torch.nn.ParameterDict", "generated/torch.nn.ParameterList", "generated/torch.nn.PixelShuffle", "generated/torch.nn.PixelUnshuffle", "generated/torch.nn.PoissonNLLLoss", "generated/torch.nn.RMSNorm", "generated/torch.nn.RNN", "generated/torch.nn.RNNBase", "generated/torch.nn.RNNCell", "generated/torch.nn.RReLU", "generated/torch.nn.ReLU", "generated/torch.nn.ReLU6", "generated/torch.nn.ReflectionPad1d", "generated/torch.nn.ReflectionPad2d", "generated/torch.nn.ReflectionPad3d", "generated/torch.nn.ReplicationPad1d", "generated/torch.nn.ReplicationPad2d", "generated/torch.nn.ReplicationPad3d", "generated/torch.nn.SELU", "generated/torch.nn.Sequential", "generated/torch.nn.SiLU", "generated/torch.nn.Sigmoid", "generated/torch.nn.SmoothL1Loss", "generated/torch.nn.SoftMarginLoss", "generated/torch.nn.Softmax", "generated/torch.nn.Softmax2d", "generated/torch.nn.Softmin", "generated/torch.nn.Softplus", "generated/torch.nn.Softshrink", "generated/torch.nn.Softsign", "generated/torch.nn.SyncBatchNorm", "generated/torch.nn.Tanh", "generated/torch.nn.Tanhshrink", "generated/torch.nn.Threshold", "generated/torch.nn.Transformer", "generated/torch.nn.TransformerDecoder", "generated/torch.nn.TransformerDecoderLayer", "generated/torch.nn.TransformerEncoder", "generated/torch.nn.TransformerEncoderLayer", "generated/torch.nn.TripletMarginLoss", "generated/torch.nn.TripletMarginWithDistanceLoss", "generated/torch.nn.Unflatten", "generated/torch.nn.Unfold", "generated/torch.nn.Upsample", "generated/torch.nn.UpsamplingBilinear2d", "generated/torch.nn.UpsamplingNearest2d", "generated/torch.nn.ZeroPad1d", "generated/torch.nn.ZeroPad2d", "generated/torch.nn.ZeroPad3d", "generated/torch.nn.attention.SDPBackend", "generated/torch.nn.attention.bias.CausalBias", "generated/torch.nn.attention.bias.CausalVariant", "generated/torch.nn.attention.bias.causal_lower_right", "generated/torch.nn.attention.bias.causal_upper_left", "generated/torch.nn.attention.sdpa_kernel", "generated/torch.nn.functional.adaptive_avg_pool1d", "generated/torch.nn.functional.adaptive_avg_pool2d", "generated/torch.nn.functional.adaptive_avg_pool3d", "generated/torch.nn.functional.adaptive_max_pool1d", "generated/torch.nn.functional.adaptive_max_pool2d", "generated/torch.nn.functional.adaptive_max_pool3d", "generated/torch.nn.functional.affine_grid", "generated/torch.nn.functional.alpha_dropout", "generated/torch.nn.functional.avg_pool1d", "generated/torch.nn.functional.avg_pool2d", "generated/torch.nn.functional.avg_pool3d", "generated/torch.nn.functional.batch_norm", "generated/torch.nn.functional.bilinear", "generated/torch.nn.functional.binary_cross_entropy", "generated/torch.nn.functional.binary_cross_entropy_with_logits", "generated/torch.nn.functional.celu", "generated/torch.nn.functional.conv1d", "generated/torch.nn.functional.conv2d", "generated/torch.nn.functional.conv3d", "generated/torch.nn.functional.conv_transpose1d", "generated/torch.nn.functional.conv_transpose2d", "generated/torch.nn.functional.conv_transpose3d", "generated/torch.nn.functional.cosine_embedding_loss", "generated/torch.nn.functional.cosine_similarity", "generated/torch.nn.functional.cross_entropy", "generated/torch.nn.functional.ctc_loss", "generated/torch.nn.functional.dropout", "generated/torch.nn.functional.dropout1d", "generated/torch.nn.functional.dropout2d", "generated/torch.nn.functional.dropout3d", "generated/torch.nn.functional.elu", "generated/torch.nn.functional.elu_", "generated/torch.nn.functional.embedding", "generated/torch.nn.functional.embedding_bag", "generated/torch.nn.functional.feature_alpha_dropout", "generated/torch.nn.functional.fold", "generated/torch.nn.functional.fractional_max_pool2d", "generated/torch.nn.functional.fractional_max_pool3d", "generated/torch.nn.functional.gaussian_nll_loss", "generated/torch.nn.functional.gelu", "generated/torch.nn.functional.glu", "generated/torch.nn.functional.grid_sample", "generated/torch.nn.functional.group_norm", "generated/torch.nn.functional.gumbel_softmax", "generated/torch.nn.functional.hardshrink", "generated/torch.nn.functional.hardsigmoid", "generated/torch.nn.functional.hardswish", "generated/torch.nn.functional.hardtanh", "generated/torch.nn.functional.hardtanh_", "generated/torch.nn.functional.hinge_embedding_loss", "generated/torch.nn.functional.huber_loss", "generated/torch.nn.functional.instance_norm", "generated/torch.nn.functional.interpolate", "generated/torch.nn.functional.kl_div", "generated/torch.nn.functional.l1_loss", "generated/torch.nn.functional.layer_norm", "generated/torch.nn.functional.leaky_relu", "generated/torch.nn.functional.leaky_relu_", "generated/torch.nn.functional.linear", "generated/torch.nn.functional.local_response_norm", "generated/torch.nn.functional.log_softmax", "generated/torch.nn.functional.logsigmoid", "generated/torch.nn.functional.lp_pool1d", "generated/torch.nn.functional.lp_pool2d", "generated/torch.nn.functional.lp_pool3d", "generated/torch.nn.functional.margin_ranking_loss", "generated/torch.nn.functional.max_pool1d", "generated/torch.nn.functional.max_pool2d", "generated/torch.nn.functional.max_pool3d", "generated/torch.nn.functional.max_unpool1d", "generated/torch.nn.functional.max_unpool2d", "generated/torch.nn.functional.max_unpool3d", "generated/torch.nn.functional.mish", "generated/torch.nn.functional.mse_loss", "generated/torch.nn.functional.multi_margin_loss", "generated/torch.nn.functional.multilabel_margin_loss", "generated/torch.nn.functional.multilabel_soft_margin_loss", "generated/torch.nn.functional.nll_loss", "generated/torch.nn.functional.normalize", "generated/torch.nn.functional.one_hot", "generated/torch.nn.functional.pad", "generated/torch.nn.functional.pairwise_distance", "generated/torch.nn.functional.pdist", "generated/torch.nn.functional.pixel_shuffle", "generated/torch.nn.functional.pixel_unshuffle", "generated/torch.nn.functional.poisson_nll_loss", "generated/torch.nn.functional.prelu", "generated/torch.nn.functional.relu", "generated/torch.nn.functional.relu6", "generated/torch.nn.functional.relu_", "generated/torch.nn.functional.rms_norm", "generated/torch.nn.functional.rrelu", "generated/torch.nn.functional.rrelu_", "generated/torch.nn.functional.scaled_dot_product_attention", "generated/torch.nn.functional.selu", "generated/torch.nn.functional.sigmoid", "generated/torch.nn.functional.silu", "generated/torch.nn.functional.smooth_l1_loss", "generated/torch.nn.functional.soft_margin_loss", "generated/torch.nn.functional.softmax", "generated/torch.nn.functional.softmin", "generated/torch.nn.functional.softplus", "generated/torch.nn.functional.softshrink", "generated/torch.nn.functional.softsign", "generated/torch.nn.functional.tanh", "generated/torch.nn.functional.tanhshrink", "generated/torch.nn.functional.threshold", "generated/torch.nn.functional.threshold_", "generated/torch.nn.functional.torch.nn.parallel.data_parallel", "generated/torch.nn.functional.triplet_margin_loss", "generated/torch.nn.functional.triplet_margin_with_distance_loss", "generated/torch.nn.functional.unfold", "generated/torch.nn.functional.upsample", "generated/torch.nn.functional.upsample_bilinear", "generated/torch.nn.functional.upsample_nearest", "generated/torch.nn.modules.lazy.LazyModuleMixin", "generated/torch.nn.modules.module.register_module_backward_hook", "generated/torch.nn.modules.module.register_module_buffer_registration_hook", "generated/torch.nn.modules.module.register_module_forward_hook", "generated/torch.nn.modules.module.register_module_forward_pre_hook", "generated/torch.nn.modules.module.register_module_full_backward_hook", "generated/torch.nn.modules.module.register_module_full_backward_pre_hook", "generated/torch.nn.modules.module.register_module_module_registration_hook", "generated/torch.nn.modules.module.register_module_parameter_registration_hook", "generated/torch.nn.modules.normalization.RMSNorm", "generated/torch.nn.parallel.DistributedDataParallel", "generated/torch.nn.parameter.Parameter", "generated/torch.nn.parameter.UninitializedBuffer", "generated/torch.nn.parameter.UninitializedParameter", "generated/torch.nn.utils.clip_grad_norm", "generated/torch.nn.utils.clip_grad_norm_", "generated/torch.nn.utils.clip_grad_value_", "generated/torch.nn.utils.convert_conv2d_weight_memory_format", "generated/torch.nn.utils.convert_conv3d_weight_memory_format", "generated/torch.nn.utils.fuse_conv_bn_eval", "generated/torch.nn.utils.fuse_conv_bn_weights", "generated/torch.nn.utils.fuse_linear_bn_eval", "generated/torch.nn.utils.fuse_linear_bn_weights", "generated/torch.nn.utils.parameters_to_vector", "generated/torch.nn.utils.parametrizations.orthogonal", "generated/torch.nn.utils.parametrizations.spectral_norm", "generated/torch.nn.utils.parametrizations.weight_norm", "generated/torch.nn.utils.parametrize.ParametrizationList", "generated/torch.nn.utils.parametrize.cached", "generated/torch.nn.utils.parametrize.is_parametrized", "generated/torch.nn.utils.parametrize.register_parametrization", "generated/torch.nn.utils.parametrize.remove_parametrizations", "generated/torch.nn.utils.prune.BasePruningMethod", "generated/torch.nn.utils.prune.CustomFromMask", "generated/torch.nn.utils.prune.Identity", "generated/torch.nn.utils.prune.L1Unstructured", "generated/torch.nn.utils.prune.LnStructured", "generated/torch.nn.utils.prune.PruningContainer", "generated/torch.nn.utils.prune.RandomStructured", "generated/torch.nn.utils.prune.RandomUnstructured", "generated/torch.nn.utils.prune.custom_from_mask", "generated/torch.nn.utils.prune.global_unstructured", "generated/torch.nn.utils.prune.identity", "generated/torch.nn.utils.prune.is_pruned", "generated/torch.nn.utils.prune.l1_unstructured", "generated/torch.nn.utils.prune.ln_structured", "generated/torch.nn.utils.prune.random_structured", "generated/torch.nn.utils.prune.random_unstructured", "generated/torch.nn.utils.prune.remove", "generated/torch.nn.utils.remove_spectral_norm", "generated/torch.nn.utils.remove_weight_norm", "generated/torch.nn.utils.rnn.PackedSequence", "generated/torch.nn.utils.rnn.pack_padded_sequence", "generated/torch.nn.utils.rnn.pack_sequence", "generated/torch.nn.utils.rnn.pad_packed_sequence", "generated/torch.nn.utils.rnn.pad_sequence", "generated/torch.nn.utils.rnn.unpack_sequence", "generated/torch.nn.utils.rnn.unpad_sequence", "generated/torch.nn.utils.skip_init", "generated/torch.nn.utils.spectral_norm", "generated/torch.nn.utils.stateless.functional_call", "generated/torch.nn.utils.vector_to_parameters", "generated/torch.nn.utils.weight_norm", "generated/torch.no_grad", "generated/torch.nonzero", "generated/torch.norm", "generated/torch.normal", "generated/torch.not_equal", "generated/torch.numel", "generated/torch.ones", "generated/torch.ones_like", "generated/torch.onnx.JitScalarType", "generated/torch.onnx.verification.GraphInfo", "generated/torch.onnx.verification.VerificationOptions", "generated/torch.optim.ASGD", "generated/torch.optim.Adadelta", "generated/torch.optim.Adagrad", "generated/torch.optim.Adam", "generated/torch.optim.AdamW", "generated/torch.optim.Adamax", "generated/torch.optim.LBFGS", "generated/torch.optim.NAdam", "generated/torch.optim.Optimizer.add_param_group", "generated/torch.optim.Optimizer.load_state_dict", "generated/torch.optim.Optimizer.state_dict", "generated/torch.optim.Optimizer.step", "generated/torch.optim.Optimizer.zero_grad", "generated/torch.optim.RAdam", "generated/torch.optim.RMSprop", "generated/torch.optim.Rprop", "generated/torch.optim.SGD", "generated/torch.optim.SparseAdam", "generated/torch.optim.lr_scheduler.ChainedScheduler", "generated/torch.optim.lr_scheduler.ConstantLR", "generated/torch.optim.lr_scheduler.CosineAnnealingLR", "generated/torch.optim.lr_scheduler.CosineAnnealingWarmRestarts", "generated/torch.optim.lr_scheduler.CyclicLR", "generated/torch.optim.lr_scheduler.ExponentialLR", "generated/torch.optim.lr_scheduler.LambdaLR", "generated/torch.optim.lr_scheduler.LinearLR", "generated/torch.optim.lr_scheduler.MultiStepLR", "generated/torch.optim.lr_scheduler.MultiplicativeLR", "generated/torch.optim.lr_scheduler.OneCycleLR", "generated/torch.optim.lr_scheduler.PolynomialLR", "generated/torch.optim.lr_scheduler.ReduceLROnPlateau", "generated/torch.optim.lr_scheduler.SequentialLR", "generated/torch.optim.lr_scheduler.StepLR", "generated/torch.orgqr", "generated/torch.ormqr", "generated/torch.outer", "generated/torch.pca_lowrank", "generated/torch.permute", "generated/torch.pinverse", "generated/torch.poisson", "generated/torch.polar", "generated/torch.polygamma", "generated/torch.positive", "generated/torch.pow", "generated/torch.prod", "generated/torch.promote_types", "generated/torch.qr", "generated/torch.quantile", "generated/torch.quantize_per_channel", "generated/torch.quantize_per_tensor", "generated/torch.quantized_batch_norm", "generated/torch.quantized_max_pool1d", "generated/torch.quantized_max_pool2d", "generated/torch.quasirandom.SobolEngine", "generated/torch.rad2deg", "generated/torch.rand", "generated/torch.rand_like", "generated/torch.randint", "generated/torch.randint_like", "generated/torch.randn", "generated/torch.randn_like", "generated/torch.randperm", "generated/torch.range", "generated/torch.ravel", "generated/torch.real", "generated/torch.reciprocal", "generated/torch.remainder", "generated/torch.renorm", "generated/torch.repeat_interleave", "generated/torch.reshape", "generated/torch.resolve_conj", "generated/torch.resolve_neg", "generated/torch.result_type", "generated/torch.roll", "generated/torch.rot90", "generated/torch.round", "generated/torch.row_stack", "generated/torch.rsqrt", "generated/torch.save", "generated/torch.scatter", "generated/torch.scatter_add", "generated/torch.scatter_reduce", "generated/torch.searchsorted", "generated/torch.seed", "generated/torch.select", "generated/torch.select_scatter", "generated/torch.set_default_device", "generated/torch.set_default_dtype", "generated/torch.set_default_tensor_type", "generated/torch.set_deterministic_debug_mode", "generated/torch.set_float32_matmul_precision", "generated/torch.set_flush_denormal", "generated/torch.set_num_interop_threads", "generated/torch.set_num_threads", "generated/torch.set_printoptions", "generated/torch.set_rng_state", "generated/torch.set_warn_always", "generated/torch.sgn", "generated/torch.sigmoid", "generated/torch.sign", "generated/torch.signal.windows.bartlett", "generated/torch.signal.windows.blackman", "generated/torch.signal.windows.cosine", "generated/torch.signal.windows.exponential", "generated/torch.signal.windows.gaussian", "generated/torch.signal.windows.general_cosine", "generated/torch.signal.windows.general_hamming", "generated/torch.signal.windows.hamming", "generated/torch.signal.windows.hann", "generated/torch.signal.windows.kaiser", "generated/torch.signal.windows.nuttall", "generated/torch.signbit", "generated/torch.sin", "generated/torch.sinc", "generated/torch.sinh", "generated/torch.slice_scatter", "generated/torch.slogdet", "generated/torch.smm", "generated/torch.softmax", "generated/torch.sort", "generated/torch.sparse.addmm", "generated/torch.sparse.as_sparse_gradcheck", "generated/torch.sparse.check_sparse_tensor_invariants", "generated/torch.sparse.log_softmax", "generated/torch.sparse.mm", "generated/torch.sparse.sampled_addmm", "generated/torch.sparse.softmax", "generated/torch.sparse.spdiags", "generated/torch.sparse.sum", "generated/torch.sparse_bsc_tensor", "generated/torch.sparse_bsr_tensor", "generated/torch.sparse_compressed_tensor", "generated/torch.sparse_coo_tensor", "generated/torch.sparse_csc_tensor", "generated/torch.sparse_csr_tensor", "generated/torch.split", "generated/torch.sqrt", "generated/torch.square", "generated/torch.squeeze", "generated/torch.sspaddmm", "generated/torch.stack", "generated/torch.std", "generated/torch.std_mean", "generated/torch.stft", "generated/torch.sub", "generated/torch.subtract", "generated/torch.sum", "generated/torch.svd", "generated/torch.svd_lowrank", "generated/torch.swapaxes", "generated/torch.swapdims", "generated/torch.sym_float", "generated/torch.sym_int", "generated/torch.sym_ite", "generated/torch.sym_max", "generated/torch.sym_min", "generated/torch.sym_not", "generated/torch.t", "generated/torch.take", "generated/torch.take_along_dim", "generated/torch.tan", "generated/torch.tanh", "generated/torch.tensor", "generated/torch.tensor_split", "generated/torch.tensordot", "generated/torch.tile", "generated/torch.topk", "generated/torch.trace", "generated/torch.transpose", "generated/torch.trapezoid", "generated/torch.trapz", "generated/torch.triangular_solve", "generated/torch.tril", "generated/torch.tril_indices", "generated/torch.triu", "generated/torch.triu_indices", "generated/torch.true_divide", "generated/torch.trunc", "generated/torch.unbind", "generated/torch.unflatten", "generated/torch.unique", "generated/torch.unique_consecutive", "generated/torch.unravel_index", "generated/torch.unsqueeze", "generated/torch.use_deterministic_algorithms", "generated/torch.utils.generate_methods_for_privateuse1_backend", "generated/torch.utils.get_cpp_backtrace", "generated/torch.utils.rename_privateuse1_backend", "generated/torch.utils.set_module", "generated/torch.utils.swap_tensors", "generated/torch.vander", "generated/torch.var", "generated/torch.var_mean", "generated/torch.vdot", "generated/torch.view_as_complex", "generated/torch.view_as_real", "generated/torch.vmap", "generated/torch.vsplit", "generated/torch.vstack", "generated/torch.where", "generated/torch.xlogy", "generated/torch.xpu.Event", "generated/torch.xpu.Stream", "generated/torch.xpu.StreamContext", "generated/torch.xpu.current_device", "generated/torch.xpu.current_stream", "generated/torch.xpu.device", "generated/torch.xpu.device_count", "generated/torch.xpu.device_of", "generated/torch.xpu.empty_cache", "generated/torch.xpu.get_device_capability", "generated/torch.xpu.get_device_name", "generated/torch.xpu.get_device_properties", "generated/torch.xpu.get_rng_state", "generated/torch.xpu.get_rng_state_all", "generated/torch.xpu.init", "generated/torch.xpu.initial_seed", "generated/torch.xpu.is_available", "generated/torch.xpu.is_initialized", "generated/torch.xpu.manual_seed", "generated/torch.xpu.manual_seed_all", "generated/torch.xpu.seed", "generated/torch.xpu.seed_all", "generated/torch.xpu.set_device", "generated/torch.xpu.set_rng_state", "generated/torch.xpu.set_rng_state_all", "generated/torch.xpu.set_stream", "generated/torch.xpu.stream", "generated/torch.xpu.synchronize", "generated/torch.zeros", "generated/torch.zeros_like", "hub", "index", "jit", "jit_builtin_functions", "jit_language_reference", "jit_language_reference_v2", "jit_python_reference", "jit_unsupported", "jit_utils", "library", "linalg", "logging", "masked", "meta", "miscellaneous_environment_variables", "mobile_optimizer", "model_zoo", "module_tracker", "monitor", "mps", "mtia", "multiprocessing", "name_inference", "named_tensor", "nested", "nn", "nn.attention", "nn.attention.bias", "nn.functional", "nn.init", "notes/amp_examples", "notes/autograd", "notes/broadcasting", "notes/cpu_threading_torchscript_inference", "notes/cuda", "notes/custom_operators", "notes/ddp", "notes/extending", "notes/extending.func", "notes/faq", "notes/fsdp", "notes/gradcheck", "notes/hip", "notes/large_scale_deployments", "notes/modules", "notes/mps", "notes/multiprocessing", "notes/numerical_accuracy", "notes/randomness", "notes/serialization", "notes/windows", "onnx", "onnx_dynamo", "onnx_dynamo_onnxruntime_backend", "onnx_torchscript", "onnx_torchscript_supported_aten_ops", "optim", "package", "profiler", "quantization", "quantization-accuracy-debugging", "quantization-backend-configuration", "quantization-support", "random", "rpc", "rpc/distributed_autograd", "rpc/rref", "signal", "size", "sparse", "special", "storage", "tensor_attributes", "tensor_view", "tensorboard", "tensors", "testing", "threading_environment_variables", "torch", "torch.ao.ns._numeric_suite", "torch.ao.ns._numeric_suite_fx", "torch.compiler", "torch.compiler_aot_inductor", "torch.compiler_api", "torch.compiler_best_practices_for_backends", "torch.compiler_cudagraph_trees", "torch.compiler_custom_backends", "torch.compiler_dynamic_shapes", "torch.compiler_dynamo_deepdive", "torch.compiler_dynamo_overview", "torch.compiler_fake_tensor", "torch.compiler_faq", "torch.compiler_fine_grain_apis", "torch.compiler_get_started", "torch.compiler_inductor_profiling", "torch.compiler_ir", "torch.compiler_nn_module", "torch.compiler_performance_dashboard", "torch.compiler_profiling_torch_compile", "torch.compiler_transformations", "torch.compiler_troubleshooting", "torch.overrides", "torch_cuda_memory", "torch_environment_variables", "torch_nccl_environment_variables", "type_info", "utils", "xpu"], "filenames": ["amp.rst", "autograd.rst", "backends.rst", "benchmark_utils.rst", "bottleneck.rst", "checkpoint.rst", "community/build_ci_governance.rst", "community/contribution_guide.rst", "community/design.rst", "community/governance.rst", "community/persons_of_interest.rst", "complex_numbers.rst", "cond.rst", "config_mod.rst", "cpp_extension.rst", "cpp_index.rst", "cpu.rst", "cuda.rst", "cuda._sanitizer.rst", "cuda.tunable.rst", "cuda_environment_variables.rst", "cudnn_persistent_rnn.rst", "cudnn_rnn_determinism.rst", "data.rst", "ddp_comm_hooks.rst", "debugging_environment_variables.rst", "deploy.rst", "deterministic.rst", "distributed.rst", "distributed.algorithms.join.rst", "distributed.checkpoint.rst", "distributed.elastic.rst", "distributed.optim.rst", "distributed.pipelining.rst", "distributed.tensor.parallel.rst", "distributions.rst", "dlpack.rst", "elastic/agent.rst", "elastic/control_plane.rst", "elastic/customization.rst", "elastic/errors.rst", "elastic/events.rst", "elastic/examples.rst", "elastic/kubernetes.rst", "elastic/metrics.rst", "elastic/multiprocessing.rst", "elastic/quickstart.rst", "elastic/rendezvous.rst", "elastic/run.rst", "elastic/subprocess_handler.rst", "elastic/timer.rst", "elastic/train_script.rst", "export.rst", "export.ir_spec.rst", "fft.rst", "fsdp.rst", "func.rst", "func.api.rst", "func.batch_norm.rst", "func.migrating.rst", "func.ux_limitations.rst", "func.whirlwind_tour.rst", "future_mod.rst", "futures.rst", "fx.rst", "fx.experimental.rst", "generated/exportdb/index.rst", "generated/exportdb/python.assert.rst", "generated/exportdb/python.builtin.rst", "generated/exportdb/python.closure.rst", "generated/exportdb/python.context-manager.rst", "generated/exportdb/python.control-flow.rst", "generated/exportdb/python.data-structure.rst", "generated/exportdb/python.object-model.rst", "generated/exportdb/torch.cond.rst", "generated/exportdb/torch.dynamic-shape.rst", "generated/exportdb/torch.dynamic-value.rst", "generated/exportdb/torch.escape-hatch.rst", "generated/exportdb/torch.map.rst", "generated/exportdb/torch.mutation.rst", "generated/exportdb/torch.operator.rst", "generated/onnx_dynamo_diagnostics_rules/FXE0007:fx-graph-to-onnx.md", "generated/onnx_dynamo_diagnostics_rules/FXE0008:fx-node-to-onnx.md", "generated/onnx_dynamo_diagnostics_rules/FXE0010:fx-pass.md", "generated/onnx_dynamo_diagnostics_rules/FXE0011:no-symbolic-function-for-call-function.md", "generated/onnx_dynamo_diagnostics_rules/FXE0012:unsupported-fx-node-analysis.md", "generated/onnx_dynamo_diagnostics_rules/FXE0013:op-level-debugging.md", "generated/onnx_dynamo_diagnostics_rules/FXE0014:find-opschema-matched-symbolic-function.md", "generated/onnx_dynamo_diagnostics_rules/FXE0015:fx-node-insert-type-promotion.md", "generated/onnx_dynamo_diagnostics_rules/FXE0016:find-operator-overloads-in-onnx-registry.md", "generated/torch.Generator.rst", "generated/torch.Tensor.abs.rst", "generated/torch.Tensor.abs_.rst", "generated/torch.Tensor.absolute.rst", "generated/torch.Tensor.absolute_.rst", "generated/torch.Tensor.acos.rst", "generated/torch.Tensor.acos_.rst", "generated/torch.Tensor.acosh.rst", "generated/torch.Tensor.acosh_.rst", "generated/torch.Tensor.add.rst", "generated/torch.Tensor.add_.rst", "generated/torch.Tensor.addbmm.rst", "generated/torch.Tensor.addbmm_.rst", "generated/torch.Tensor.addcdiv.rst", "generated/torch.Tensor.addcdiv_.rst", "generated/torch.Tensor.addcmul.rst", "generated/torch.Tensor.addcmul_.rst", "generated/torch.Tensor.addmm.rst", "generated/torch.Tensor.addmm_.rst", "generated/torch.Tensor.addmv.rst", "generated/torch.Tensor.addmv_.rst", "generated/torch.Tensor.addr.rst", "generated/torch.Tensor.addr_.rst", "generated/torch.Tensor.adjoint.rst", "generated/torch.Tensor.all.rst", "generated/torch.Tensor.allclose.rst", "generated/torch.Tensor.amax.rst", "generated/torch.Tensor.amin.rst", "generated/torch.Tensor.aminmax.rst", "generated/torch.Tensor.angle.rst", "generated/torch.Tensor.any.rst", "generated/torch.Tensor.apply_.rst", "generated/torch.Tensor.arccos.rst", "generated/torch.Tensor.arccos_.rst", "generated/torch.Tensor.arccosh.rst", "generated/torch.Tensor.arccosh_.rst", "generated/torch.Tensor.arcsin.rst", "generated/torch.Tensor.arcsin_.rst", "generated/torch.Tensor.arcsinh.rst", "generated/torch.Tensor.arcsinh_.rst", "generated/torch.Tensor.arctan.rst", "generated/torch.Tensor.arctan2.rst", "generated/torch.Tensor.arctan2_.rst", "generated/torch.Tensor.arctan_.rst", "generated/torch.Tensor.arctanh.rst", "generated/torch.Tensor.arctanh_.rst", "generated/torch.Tensor.argmax.rst", "generated/torch.Tensor.argmin.rst", "generated/torch.Tensor.argsort.rst", "generated/torch.Tensor.argwhere.rst", "generated/torch.Tensor.as_strided.rst", "generated/torch.Tensor.as_subclass.rst", "generated/torch.Tensor.asin.rst", "generated/torch.Tensor.asin_.rst", "generated/torch.Tensor.asinh.rst", "generated/torch.Tensor.asinh_.rst", "generated/torch.Tensor.atan.rst", "generated/torch.Tensor.atan2.rst", "generated/torch.Tensor.atan2_.rst", "generated/torch.Tensor.atan_.rst", "generated/torch.Tensor.atanh.rst", "generated/torch.Tensor.atanh_.rst", "generated/torch.Tensor.backward.rst", "generated/torch.Tensor.baddbmm.rst", "generated/torch.Tensor.baddbmm_.rst", "generated/torch.Tensor.bernoulli.rst", "generated/torch.Tensor.bernoulli_.rst", "generated/torch.Tensor.bfloat16.rst", "generated/torch.Tensor.bincount.rst", "generated/torch.Tensor.bitwise_and.rst", "generated/torch.Tensor.bitwise_and_.rst", "generated/torch.Tensor.bitwise_left_shift.rst", "generated/torch.Tensor.bitwise_left_shift_.rst", "generated/torch.Tensor.bitwise_not.rst", "generated/torch.Tensor.bitwise_not_.rst", "generated/torch.Tensor.bitwise_or.rst", "generated/torch.Tensor.bitwise_or_.rst", "generated/torch.Tensor.bitwise_right_shift.rst", "generated/torch.Tensor.bitwise_right_shift_.rst", "generated/torch.Tensor.bitwise_xor.rst", "generated/torch.Tensor.bitwise_xor_.rst", "generated/torch.Tensor.bmm.rst", "generated/torch.Tensor.bool.rst", "generated/torch.Tensor.broadcast_to.rst", "generated/torch.Tensor.byte.rst", "generated/torch.Tensor.cauchy_.rst", "generated/torch.Tensor.ccol_indices.rst", "generated/torch.Tensor.cdouble.rst", "generated/torch.Tensor.ceil.rst", "generated/torch.Tensor.ceil_.rst", "generated/torch.Tensor.cfloat.rst", "generated/torch.Tensor.chalf.rst", "generated/torch.Tensor.char.rst", "generated/torch.Tensor.cholesky.rst", "generated/torch.Tensor.cholesky_inverse.rst", "generated/torch.Tensor.cholesky_solve.rst", "generated/torch.Tensor.chunk.rst", "generated/torch.Tensor.clamp.rst", "generated/torch.Tensor.clamp_.rst", "generated/torch.Tensor.clip.rst", "generated/torch.Tensor.clip_.rst", "generated/torch.Tensor.clone.rst", "generated/torch.Tensor.coalesce.rst", "generated/torch.Tensor.col_indices.rst", "generated/torch.Tensor.conj.rst", "generated/torch.Tensor.conj_physical.rst", "generated/torch.Tensor.conj_physical_.rst", "generated/torch.Tensor.contiguous.rst", "generated/torch.Tensor.copy_.rst", "generated/torch.Tensor.copysign.rst", "generated/torch.Tensor.copysign_.rst", "generated/torch.Tensor.corrcoef.rst", "generated/torch.Tensor.cos.rst", "generated/torch.Tensor.cos_.rst", "generated/torch.Tensor.cosh.rst", "generated/torch.Tensor.cosh_.rst", "generated/torch.Tensor.count_nonzero.rst", "generated/torch.Tensor.cov.rst", "generated/torch.Tensor.cpu.rst", "generated/torch.Tensor.cross.rst", "generated/torch.Tensor.crow_indices.rst", "generated/torch.Tensor.cuda.rst", "generated/torch.Tensor.cummax.rst", "generated/torch.Tensor.cummin.rst", "generated/torch.Tensor.cumprod.rst", "generated/torch.Tensor.cumprod_.rst", "generated/torch.Tensor.cumsum.rst", "generated/torch.Tensor.cumsum_.rst", "generated/torch.Tensor.data_ptr.rst", "generated/torch.Tensor.deg2rad.rst", "generated/torch.Tensor.dense_dim.rst", "generated/torch.Tensor.dequantize.rst", "generated/torch.Tensor.det.rst", "generated/torch.Tensor.detach.rst", "generated/torch.Tensor.detach_.rst", "generated/torch.Tensor.device.rst", "generated/torch.Tensor.diag.rst", "generated/torch.Tensor.diag_embed.rst", "generated/torch.Tensor.diagflat.rst", "generated/torch.Tensor.diagonal.rst", "generated/torch.Tensor.diagonal_scatter.rst", "generated/torch.Tensor.diff.rst", "generated/torch.Tensor.digamma.rst", "generated/torch.Tensor.digamma_.rst", "generated/torch.Tensor.dim.rst", "generated/torch.Tensor.dim_order.rst", "generated/torch.Tensor.dist.rst", "generated/torch.Tensor.div.rst", "generated/torch.Tensor.div_.rst", "generated/torch.Tensor.divide.rst", "generated/torch.Tensor.divide_.rst", "generated/torch.Tensor.dot.rst", "generated/torch.Tensor.double.rst", "generated/torch.Tensor.dsplit.rst", "generated/torch.Tensor.element_size.rst", "generated/torch.Tensor.eq.rst", "generated/torch.Tensor.eq_.rst", "generated/torch.Tensor.equal.rst", "generated/torch.Tensor.erf.rst", "generated/torch.Tensor.erf_.rst", "generated/torch.Tensor.erfc.rst", "generated/torch.Tensor.erfc_.rst", "generated/torch.Tensor.erfinv.rst", "generated/torch.Tensor.erfinv_.rst", "generated/torch.Tensor.exp.rst", "generated/torch.Tensor.exp_.rst", "generated/torch.Tensor.expand.rst", "generated/torch.Tensor.expand_as.rst", "generated/torch.Tensor.expm1.rst", "generated/torch.Tensor.expm1_.rst", "generated/torch.Tensor.exponential_.rst", "generated/torch.Tensor.fill_.rst", "generated/torch.Tensor.fill_diagonal_.rst", "generated/torch.Tensor.fix.rst", "generated/torch.Tensor.fix_.rst", "generated/torch.Tensor.flatten.rst", "generated/torch.Tensor.flip.rst", "generated/torch.Tensor.fliplr.rst", "generated/torch.Tensor.flipud.rst", "generated/torch.Tensor.float.rst", "generated/torch.Tensor.float_power.rst", "generated/torch.Tensor.float_power_.rst", "generated/torch.Tensor.floor.rst", "generated/torch.Tensor.floor_.rst", "generated/torch.Tensor.floor_divide.rst", "generated/torch.Tensor.floor_divide_.rst", "generated/torch.Tensor.fmax.rst", "generated/torch.Tensor.fmin.rst", "generated/torch.Tensor.fmod.rst", "generated/torch.Tensor.fmod_.rst", "generated/torch.Tensor.frac.rst", "generated/torch.Tensor.frac_.rst", "generated/torch.Tensor.frexp.rst", "generated/torch.Tensor.gather.rst", "generated/torch.Tensor.gcd.rst", "generated/torch.Tensor.gcd_.rst", "generated/torch.Tensor.ge.rst", "generated/torch.Tensor.ge_.rst", "generated/torch.Tensor.geometric_.rst", "generated/torch.Tensor.geqrf.rst", "generated/torch.Tensor.ger.rst", "generated/torch.Tensor.get_device.rst", "generated/torch.Tensor.grad.rst", "generated/torch.Tensor.greater.rst", "generated/torch.Tensor.greater_.rst", "generated/torch.Tensor.greater_equal.rst", "generated/torch.Tensor.greater_equal_.rst", "generated/torch.Tensor.gt.rst", "generated/torch.Tensor.gt_.rst", "generated/torch.Tensor.half.rst", "generated/torch.Tensor.hardshrink.rst", "generated/torch.Tensor.heaviside.rst", "generated/torch.Tensor.histc.rst", "generated/torch.Tensor.histogram.rst", "generated/torch.Tensor.hsplit.rst", "generated/torch.Tensor.hypot.rst", "generated/torch.Tensor.hypot_.rst", "generated/torch.Tensor.i0.rst", "generated/torch.Tensor.i0_.rst", "generated/torch.Tensor.igamma.rst", "generated/torch.Tensor.igamma_.rst", "generated/torch.Tensor.igammac.rst", "generated/torch.Tensor.igammac_.rst", "generated/torch.Tensor.imag.rst", "generated/torch.Tensor.index_add.rst", "generated/torch.Tensor.index_add_.rst", "generated/torch.Tensor.index_copy.rst", "generated/torch.Tensor.index_copy_.rst", "generated/torch.Tensor.index_fill.rst", "generated/torch.Tensor.index_fill_.rst", "generated/torch.Tensor.index_put.rst", "generated/torch.Tensor.index_put_.rst", "generated/torch.Tensor.index_reduce.rst", "generated/torch.Tensor.index_reduce_.rst", "generated/torch.Tensor.index_select.rst", "generated/torch.Tensor.indices.rst", "generated/torch.Tensor.inner.rst", "generated/torch.Tensor.int.rst", "generated/torch.Tensor.int_repr.rst", "generated/torch.Tensor.inverse.rst", "generated/torch.Tensor.is_coalesced.rst", "generated/torch.Tensor.is_complex.rst", "generated/torch.Tensor.is_conj.rst", "generated/torch.Tensor.is_contiguous.rst", "generated/torch.Tensor.is_cuda.rst", "generated/torch.Tensor.is_floating_point.rst", "generated/torch.Tensor.is_inference.rst", "generated/torch.Tensor.is_leaf.rst", "generated/torch.Tensor.is_meta.rst", "generated/torch.Tensor.is_pinned.rst", "generated/torch.Tensor.is_quantized.rst", "generated/torch.Tensor.is_set_to.rst", "generated/torch.Tensor.is_shared.rst", "generated/torch.Tensor.is_signed.rst", "generated/torch.Tensor.is_sparse.rst", "generated/torch.Tensor.is_sparse_csr.rst", "generated/torch.Tensor.isclose.rst", "generated/torch.Tensor.isfinite.rst", "generated/torch.Tensor.isinf.rst", "generated/torch.Tensor.isnan.rst", "generated/torch.Tensor.isneginf.rst", "generated/torch.Tensor.isposinf.rst", "generated/torch.Tensor.isreal.rst", "generated/torch.Tensor.istft.rst", "generated/torch.Tensor.item.rst", "generated/torch.Tensor.itemsize.rst", "generated/torch.Tensor.kthvalue.rst", "generated/torch.Tensor.lcm.rst", "generated/torch.Tensor.lcm_.rst", "generated/torch.Tensor.ldexp.rst", "generated/torch.Tensor.ldexp_.rst", "generated/torch.Tensor.le.rst", "generated/torch.Tensor.le_.rst", "generated/torch.Tensor.lerp.rst", "generated/torch.Tensor.lerp_.rst", "generated/torch.Tensor.less.rst", "generated/torch.Tensor.less_.rst", "generated/torch.Tensor.less_equal.rst", "generated/torch.Tensor.less_equal_.rst", "generated/torch.Tensor.lgamma.rst", "generated/torch.Tensor.lgamma_.rst", "generated/torch.Tensor.log.rst", "generated/torch.Tensor.log10.rst", "generated/torch.Tensor.log10_.rst", "generated/torch.Tensor.log1p.rst", "generated/torch.Tensor.log1p_.rst", "generated/torch.Tensor.log2.rst", "generated/torch.Tensor.log2_.rst", "generated/torch.Tensor.log_.rst", "generated/torch.Tensor.log_normal_.rst", "generated/torch.Tensor.logaddexp.rst", "generated/torch.Tensor.logaddexp2.rst", "generated/torch.Tensor.logcumsumexp.rst", "generated/torch.Tensor.logdet.rst", "generated/torch.Tensor.logical_and.rst", "generated/torch.Tensor.logical_and_.rst", "generated/torch.Tensor.logical_not.rst", "generated/torch.Tensor.logical_not_.rst", "generated/torch.Tensor.logical_or.rst", "generated/torch.Tensor.logical_or_.rst", "generated/torch.Tensor.logical_xor.rst", "generated/torch.Tensor.logical_xor_.rst", "generated/torch.Tensor.logit.rst", "generated/torch.Tensor.logit_.rst", "generated/torch.Tensor.logsumexp.rst", "generated/torch.Tensor.long.rst", "generated/torch.Tensor.lt.rst", "generated/torch.Tensor.lt_.rst", "generated/torch.Tensor.lu.rst", "generated/torch.Tensor.lu_solve.rst", "generated/torch.Tensor.map_.rst", "generated/torch.Tensor.masked_fill.rst", "generated/torch.Tensor.masked_fill_.rst", "generated/torch.Tensor.masked_scatter.rst", "generated/torch.Tensor.masked_scatter_.rst", "generated/torch.Tensor.masked_select.rst", "generated/torch.Tensor.matmul.rst", "generated/torch.Tensor.matrix_exp.rst", "generated/torch.Tensor.matrix_power.rst", "generated/torch.Tensor.max.rst", "generated/torch.Tensor.maximum.rst", "generated/torch.Tensor.mean.rst", "generated/torch.Tensor.median.rst", "generated/torch.Tensor.min.rst", "generated/torch.Tensor.minimum.rst", "generated/torch.Tensor.mm.rst", "generated/torch.Tensor.mode.rst", "generated/torch.Tensor.module_load.rst", "generated/torch.Tensor.moveaxis.rst", "generated/torch.Tensor.movedim.rst", "generated/torch.Tensor.msort.rst", "generated/torch.Tensor.mul.rst", "generated/torch.Tensor.mul_.rst", "generated/torch.Tensor.multinomial.rst", "generated/torch.Tensor.multiply.rst", "generated/torch.Tensor.multiply_.rst", "generated/torch.Tensor.mv.rst", "generated/torch.Tensor.mvlgamma.rst", "generated/torch.Tensor.mvlgamma_.rst", "generated/torch.Tensor.nan_to_num.rst", "generated/torch.Tensor.nan_to_num_.rst", "generated/torch.Tensor.nanmean.rst", "generated/torch.Tensor.nanmedian.rst", "generated/torch.Tensor.nanquantile.rst", "generated/torch.Tensor.nansum.rst", "generated/torch.Tensor.narrow.rst", "generated/torch.Tensor.narrow_copy.rst", "generated/torch.Tensor.nbytes.rst", "generated/torch.Tensor.ndim.rst", "generated/torch.Tensor.ndimension.rst", "generated/torch.Tensor.ne.rst", "generated/torch.Tensor.ne_.rst", "generated/torch.Tensor.neg.rst", "generated/torch.Tensor.neg_.rst", "generated/torch.Tensor.negative.rst", "generated/torch.Tensor.negative_.rst", "generated/torch.Tensor.nelement.rst", "generated/torch.Tensor.new_empty.rst", "generated/torch.Tensor.new_full.rst", "generated/torch.Tensor.new_ones.rst", "generated/torch.Tensor.new_tensor.rst", "generated/torch.Tensor.new_zeros.rst", "generated/torch.Tensor.nextafter.rst", "generated/torch.Tensor.nextafter_.rst", "generated/torch.Tensor.nonzero.rst", "generated/torch.Tensor.norm.rst", "generated/torch.Tensor.normal_.rst", "generated/torch.Tensor.not_equal.rst", "generated/torch.Tensor.not_equal_.rst", "generated/torch.Tensor.numel.rst", "generated/torch.Tensor.numpy.rst", "generated/torch.Tensor.orgqr.rst", "generated/torch.Tensor.ormqr.rst", "generated/torch.Tensor.outer.rst", "generated/torch.Tensor.permute.rst", "generated/torch.Tensor.pin_memory.rst", "generated/torch.Tensor.pinverse.rst", "generated/torch.Tensor.polygamma.rst", "generated/torch.Tensor.polygamma_.rst", "generated/torch.Tensor.positive.rst", "generated/torch.Tensor.pow.rst", "generated/torch.Tensor.pow_.rst", "generated/torch.Tensor.prod.rst", "generated/torch.Tensor.put_.rst", "generated/torch.Tensor.q_per_channel_axis.rst", "generated/torch.Tensor.q_per_channel_scales.rst", "generated/torch.Tensor.q_per_channel_zero_points.rst", "generated/torch.Tensor.q_scale.rst", "generated/torch.Tensor.q_zero_point.rst", "generated/torch.Tensor.qr.rst", "generated/torch.Tensor.qscheme.rst", "generated/torch.Tensor.quantile.rst", "generated/torch.Tensor.rad2deg.rst", "generated/torch.Tensor.random_.rst", "generated/torch.Tensor.ravel.rst", "generated/torch.Tensor.real.rst", "generated/torch.Tensor.reciprocal.rst", "generated/torch.Tensor.reciprocal_.rst", "generated/torch.Tensor.record_stream.rst", "generated/torch.Tensor.register_hook.rst", "generated/torch.Tensor.register_post_accumulate_grad_hook.rst", "generated/torch.Tensor.remainder.rst", "generated/torch.Tensor.remainder_.rst", "generated/torch.Tensor.renorm.rst", "generated/torch.Tensor.renorm_.rst", "generated/torch.Tensor.repeat.rst", "generated/torch.Tensor.repeat_interleave.rst", "generated/torch.Tensor.requires_grad.rst", "generated/torch.Tensor.requires_grad_.rst", "generated/torch.Tensor.reshape.rst", "generated/torch.Tensor.reshape_as.rst", "generated/torch.Tensor.resize_.rst", "generated/torch.Tensor.resize_as_.rst", "generated/torch.Tensor.resolve_conj.rst", "generated/torch.Tensor.resolve_neg.rst", "generated/torch.Tensor.retain_grad.rst", "generated/torch.Tensor.retains_grad.rst", "generated/torch.Tensor.roll.rst", "generated/torch.Tensor.rot90.rst", "generated/torch.Tensor.round.rst", "generated/torch.Tensor.round_.rst", "generated/torch.Tensor.row_indices.rst", "generated/torch.Tensor.rsqrt.rst", "generated/torch.Tensor.rsqrt_.rst", "generated/torch.Tensor.scatter.rst", "generated/torch.Tensor.scatter_.rst", "generated/torch.Tensor.scatter_add.rst", "generated/torch.Tensor.scatter_add_.rst", "generated/torch.Tensor.scatter_reduce.rst", "generated/torch.Tensor.scatter_reduce_.rst", "generated/torch.Tensor.select.rst", "generated/torch.Tensor.select_scatter.rst", "generated/torch.Tensor.set_.rst", "generated/torch.Tensor.sgn.rst", "generated/torch.Tensor.sgn_.rst", "generated/torch.Tensor.shape.rst", "generated/torch.Tensor.share_memory_.rst", "generated/torch.Tensor.short.rst", "generated/torch.Tensor.sigmoid.rst", "generated/torch.Tensor.sigmoid_.rst", "generated/torch.Tensor.sign.rst", "generated/torch.Tensor.sign_.rst", "generated/torch.Tensor.signbit.rst", "generated/torch.Tensor.sin.rst", "generated/torch.Tensor.sin_.rst", "generated/torch.Tensor.sinc.rst", "generated/torch.Tensor.sinc_.rst", "generated/torch.Tensor.sinh.rst", "generated/torch.Tensor.sinh_.rst", "generated/torch.Tensor.size.rst", "generated/torch.Tensor.slice_scatter.rst", "generated/torch.Tensor.slogdet.rst", "generated/torch.Tensor.smm.rst", "generated/torch.Tensor.softmax.rst", "generated/torch.Tensor.sort.rst", "generated/torch.Tensor.sparse_dim.rst", "generated/torch.Tensor.sparse_mask.rst", "generated/torch.Tensor.sparse_resize_.rst", "generated/torch.Tensor.sparse_resize_and_clear_.rst", "generated/torch.Tensor.split.rst", "generated/torch.Tensor.sqrt.rst", "generated/torch.Tensor.sqrt_.rst", "generated/torch.Tensor.square.rst", "generated/torch.Tensor.square_.rst", "generated/torch.Tensor.squeeze.rst", "generated/torch.Tensor.squeeze_.rst", "generated/torch.Tensor.sspaddmm.rst", "generated/torch.Tensor.std.rst", "generated/torch.Tensor.stft.rst", "generated/torch.Tensor.storage.rst", "generated/torch.Tensor.storage_offset.rst", "generated/torch.Tensor.storage_type.rst", "generated/torch.Tensor.stride.rst", "generated/torch.Tensor.sub.rst", "generated/torch.Tensor.sub_.rst", "generated/torch.Tensor.subtract.rst", "generated/torch.Tensor.subtract_.rst", "generated/torch.Tensor.sum.rst", "generated/torch.Tensor.sum_to_size.rst", "generated/torch.Tensor.svd.rst", "generated/torch.Tensor.swapaxes.rst", "generated/torch.Tensor.swapdims.rst", "generated/torch.Tensor.t.rst", "generated/torch.Tensor.t_.rst", "generated/torch.Tensor.take.rst", "generated/torch.Tensor.take_along_dim.rst", "generated/torch.Tensor.tan.rst", "generated/torch.Tensor.tan_.rst", "generated/torch.Tensor.tanh.rst", "generated/torch.Tensor.tanh_.rst", "generated/torch.Tensor.tensor_split.rst", "generated/torch.Tensor.tile.rst", "generated/torch.Tensor.to.rst", "generated/torch.Tensor.to_dense.rst", "generated/torch.Tensor.to_mkldnn.rst", "generated/torch.Tensor.to_sparse.rst", "generated/torch.Tensor.to_sparse_bsc.rst", "generated/torch.Tensor.to_sparse_bsr.rst", "generated/torch.Tensor.to_sparse_coo.rst", "generated/torch.Tensor.to_sparse_csc.rst", "generated/torch.Tensor.to_sparse_csr.rst", "generated/torch.Tensor.tolist.rst", "generated/torch.Tensor.topk.rst", "generated/torch.Tensor.trace.rst", "generated/torch.Tensor.transpose.rst", "generated/torch.Tensor.transpose_.rst", "generated/torch.Tensor.triangular_solve.rst", "generated/torch.Tensor.tril.rst", "generated/torch.Tensor.tril_.rst", "generated/torch.Tensor.triu.rst", "generated/torch.Tensor.triu_.rst", "generated/torch.Tensor.true_divide.rst", "generated/torch.Tensor.true_divide_.rst", "generated/torch.Tensor.trunc.rst", "generated/torch.Tensor.trunc_.rst", "generated/torch.Tensor.type.rst", "generated/torch.Tensor.type_as.rst", "generated/torch.Tensor.unbind.rst", "generated/torch.Tensor.unflatten.rst", "generated/torch.Tensor.unfold.rst", "generated/torch.Tensor.uniform_.rst", "generated/torch.Tensor.unique.rst", "generated/torch.Tensor.unique_consecutive.rst", "generated/torch.Tensor.unsqueeze.rst", "generated/torch.Tensor.unsqueeze_.rst", "generated/torch.Tensor.untyped_storage.rst", "generated/torch.Tensor.values.rst", "generated/torch.Tensor.var.rst", "generated/torch.Tensor.vdot.rst", "generated/torch.Tensor.view.rst", "generated/torch.Tensor.view_as.rst", "generated/torch.Tensor.vsplit.rst", "generated/torch.Tensor.where.rst", "generated/torch.Tensor.xlogy.rst", "generated/torch.Tensor.xlogy_.rst", "generated/torch.Tensor.zero_.rst", "generated/torch._assert.rst", "generated/torch._foreach_abs.rst", "generated/torch._foreach_abs_.rst", "generated/torch._foreach_acos.rst", "generated/torch._foreach_acos_.rst", "generated/torch._foreach_asin.rst", "generated/torch._foreach_asin_.rst", "generated/torch._foreach_atan.rst", "generated/torch._foreach_atan_.rst", "generated/torch._foreach_ceil.rst", "generated/torch._foreach_ceil_.rst", "generated/torch._foreach_cos.rst", "generated/torch._foreach_cos_.rst", "generated/torch._foreach_cosh.rst", "generated/torch._foreach_cosh_.rst", "generated/torch._foreach_erf.rst", "generated/torch._foreach_erf_.rst", "generated/torch._foreach_erfc.rst", "generated/torch._foreach_erfc_.rst", "generated/torch._foreach_exp.rst", "generated/torch._foreach_exp_.rst", "generated/torch._foreach_expm1.rst", "generated/torch._foreach_expm1_.rst", "generated/torch._foreach_floor.rst", "generated/torch._foreach_floor_.rst", "generated/torch._foreach_frac.rst", "generated/torch._foreach_frac_.rst", "generated/torch._foreach_lgamma.rst", "generated/torch._foreach_lgamma_.rst", "generated/torch._foreach_log.rst", "generated/torch._foreach_log10.rst", "generated/torch._foreach_log10_.rst", "generated/torch._foreach_log1p.rst", "generated/torch._foreach_log1p_.rst", "generated/torch._foreach_log2.rst", "generated/torch._foreach_log2_.rst", "generated/torch._foreach_log_.rst", "generated/torch._foreach_neg.rst", "generated/torch._foreach_neg_.rst", "generated/torch._foreach_reciprocal.rst", "generated/torch._foreach_reciprocal_.rst", "generated/torch._foreach_round.rst", "generated/torch._foreach_round_.rst", "generated/torch._foreach_sigmoid.rst", "generated/torch._foreach_sigmoid_.rst", "generated/torch._foreach_sin.rst", "generated/torch._foreach_sin_.rst", "generated/torch._foreach_sinh.rst", "generated/torch._foreach_sinh_.rst", "generated/torch._foreach_sqrt.rst", "generated/torch._foreach_sqrt_.rst", "generated/torch._foreach_tan.rst", "generated/torch._foreach_tan_.rst", "generated/torch._foreach_trunc.rst", "generated/torch._foreach_trunc_.rst", "generated/torch._foreach_zero_.rst", "generated/torch._logging.set_logs.rst", "generated/torch.abs.rst", "generated/torch.absolute.rst", "generated/torch.acos.rst", "generated/torch.acosh.rst", "generated/torch.add.rst", "generated/torch.addbmm.rst", "generated/torch.addcdiv.rst", "generated/torch.addcmul.rst", "generated/torch.addmm.rst", "generated/torch.addmv.rst", "generated/torch.addr.rst", "generated/torch.adjoint.rst", "generated/torch.all.rst", "generated/torch.allclose.rst", "generated/torch.amax.rst", "generated/torch.amin.rst", "generated/torch.aminmax.rst", "generated/torch.angle.rst", "generated/torch.any.rst", "generated/torch.ao.nn.intrinsic.BNReLU2d.rst", "generated/torch.ao.nn.intrinsic.BNReLU3d.rst", "generated/torch.ao.nn.intrinsic.ConvBn1d.rst", "generated/torch.ao.nn.intrinsic.ConvBn2d.rst", "generated/torch.ao.nn.intrinsic.ConvBn3d.rst", "generated/torch.ao.nn.intrinsic.ConvBnReLU1d.rst", "generated/torch.ao.nn.intrinsic.ConvBnReLU2d.rst", "generated/torch.ao.nn.intrinsic.ConvBnReLU3d.rst", "generated/torch.ao.nn.intrinsic.ConvReLU1d.rst", "generated/torch.ao.nn.intrinsic.ConvReLU2d.rst", "generated/torch.ao.nn.intrinsic.ConvReLU3d.rst", "generated/torch.ao.nn.intrinsic.LinearReLU.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBn1d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBn2d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBn3d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU1d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU2d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU3d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvReLU2d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvReLU3d.rst", "generated/torch.ao.nn.intrinsic.qat.LinearReLU.rst", "generated/torch.ao.nn.intrinsic.qat.freeze_bn_stats.rst", "generated/torch.ao.nn.intrinsic.qat.update_bn_stats.rst", "generated/torch.ao.nn.intrinsic.quantized.BNReLU2d.rst", "generated/torch.ao.nn.intrinsic.quantized.BNReLU3d.rst", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU1d.rst", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU2d.rst", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU3d.rst", "generated/torch.ao.nn.intrinsic.quantized.LinearReLU.rst", "generated/torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU.rst", "generated/torch.ao.nn.qat.Conv2d.rst", "generated/torch.ao.nn.qat.Conv3d.rst", "generated/torch.ao.nn.qat.Linear.rst", "generated/torch.ao.nn.qat.dynamic.Linear.rst", "generated/torch.ao.nn.quantizable.LSTM.rst", "generated/torch.ao.nn.quantizable.MultiheadAttention.rst", "generated/torch.ao.nn.quantized.BatchNorm2d.rst", "generated/torch.ao.nn.quantized.BatchNorm3d.rst", "generated/torch.ao.nn.quantized.Conv1d.rst", "generated/torch.ao.nn.quantized.Conv2d.rst", "generated/torch.ao.nn.quantized.Conv3d.rst", "generated/torch.ao.nn.quantized.ConvTranspose1d.rst", "generated/torch.ao.nn.quantized.ConvTranspose2d.rst", "generated/torch.ao.nn.quantized.ConvTranspose3d.rst", "generated/torch.ao.nn.quantized.ELU.rst", "generated/torch.ao.nn.quantized.Embedding.rst", "generated/torch.ao.nn.quantized.EmbeddingBag.rst", "generated/torch.ao.nn.quantized.FXFloatFunctional.rst", "generated/torch.ao.nn.quantized.FloatFunctional.rst", "generated/torch.ao.nn.quantized.GroupNorm.rst", "generated/torch.ao.nn.quantized.Hardswish.rst", "generated/torch.ao.nn.quantized.InstanceNorm1d.rst", "generated/torch.ao.nn.quantized.InstanceNorm2d.rst", "generated/torch.ao.nn.quantized.InstanceNorm3d.rst", "generated/torch.ao.nn.quantized.LayerNorm.rst", "generated/torch.ao.nn.quantized.LeakyReLU.rst", "generated/torch.ao.nn.quantized.Linear.rst", "generated/torch.ao.nn.quantized.QFunctional.rst", "generated/torch.ao.nn.quantized.ReLU6.rst", "generated/torch.ao.nn.quantized.Sigmoid.rst", "generated/torch.ao.nn.quantized.dynamic.GRU.rst", "generated/torch.ao.nn.quantized.dynamic.GRUCell.rst", "generated/torch.ao.nn.quantized.dynamic.LSTM.rst", "generated/torch.ao.nn.quantized.dynamic.LSTMCell.rst", "generated/torch.ao.nn.quantized.dynamic.Linear.rst", "generated/torch.ao.nn.quantized.dynamic.RNNCell.rst", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool2d.rst", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool3d.rst", "generated/torch.ao.nn.quantized.functional.avg_pool2d.rst", "generated/torch.ao.nn.quantized.functional.avg_pool3d.rst", "generated/torch.ao.nn.quantized.functional.celu.rst", "generated/torch.ao.nn.quantized.functional.clamp.rst", "generated/torch.ao.nn.quantized.functional.conv1d.rst", "generated/torch.ao.nn.quantized.functional.conv2d.rst", "generated/torch.ao.nn.quantized.functional.conv3d.rst", "generated/torch.ao.nn.quantized.functional.elu.rst", "generated/torch.ao.nn.quantized.functional.hardsigmoid.rst", "generated/torch.ao.nn.quantized.functional.hardswish.rst", "generated/torch.ao.nn.quantized.functional.hardtanh.rst", "generated/torch.ao.nn.quantized.functional.interpolate.rst", "generated/torch.ao.nn.quantized.functional.leaky_relu.rst", "generated/torch.ao.nn.quantized.functional.linear.rst", "generated/torch.ao.nn.quantized.functional.max_pool1d.rst", "generated/torch.ao.nn.quantized.functional.max_pool2d.rst", "generated/torch.ao.nn.quantized.functional.threshold.rst", "generated/torch.ao.nn.quantized.functional.upsample.rst", "generated/torch.ao.nn.quantized.functional.upsample_bilinear.rst", "generated/torch.ao.nn.quantized.functional.upsample_nearest.rst", "generated/torch.ao.quantization.DeQuantStub.rst", "generated/torch.ao.quantization.QuantStub.rst", "generated/torch.ao.quantization.QuantWrapper.rst", "generated/torch.ao.quantization.add_quant_dequant.rst", "generated/torch.ao.quantization.backend_config.BackendConfig.rst", "generated/torch.ao.quantization.backend_config.BackendPatternConfig.rst", "generated/torch.ao.quantization.backend_config.DTypeConfig.rst", "generated/torch.ao.quantization.backend_config.DTypeWithConstraints.rst", "generated/torch.ao.quantization.backend_config.ObservationType.rst", "generated/torch.ao.quantization.convert.rst", "generated/torch.ao.quantization.default_eval_fn.rst", "generated/torch.ao.quantization.fake_quantize.FakeQuantize.rst", "generated/torch.ao.quantization.fake_quantize.FakeQuantizeBase.rst", "generated/torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize.rst", "generated/torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize.rst", "generated/torch.ao.quantization.fake_quantize.default_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_fused_act_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_histogram_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_weight_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.disable_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.disable_observer.rst", "generated/torch.ao.quantization.fake_quantize.enable_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.enable_observer.rst", "generated/torch.ao.quantization.fuse_modules.fuse_modules.rst", "generated/torch.ao.quantization.fx.custom_config.ConvertCustomConfig.rst", "generated/torch.ao.quantization.fx.custom_config.FuseCustomConfig.rst", "generated/torch.ao.quantization.fx.custom_config.PrepareCustomConfig.rst", "generated/torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry.rst", "generated/torch.ao.quantization.observer.HistogramObserver.rst", "generated/torch.ao.quantization.observer.MinMaxObserver.rst", "generated/torch.ao.quantization.observer.MovingAverageMinMaxObserver.rst", "generated/torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver.rst", "generated/torch.ao.quantization.observer.NoopObserver.rst", "generated/torch.ao.quantization.observer.ObserverBase.rst", "generated/torch.ao.quantization.observer.PerChannelMinMaxObserver.rst", "generated/torch.ao.quantization.observer.PlaceholderObserver.rst", "generated/torch.ao.quantization.observer.RecordingObserver.rst", "generated/torch.ao.quantization.observer.default_debug_observer.rst", "generated/torch.ao.quantization.observer.default_dynamic_quant_observer.rst", "generated/torch.ao.quantization.observer.default_float_qparams_observer.rst", "generated/torch.ao.quantization.observer.default_histogram_observer.rst", "generated/torch.ao.quantization.observer.default_observer.rst", "generated/torch.ao.quantization.observer.default_per_channel_weight_observer.rst", "generated/torch.ao.quantization.observer.default_placeholder_observer.rst", "generated/torch.ao.quantization.observer.default_weight_observer.rst", "generated/torch.ao.quantization.observer.get_observer_state_dict.rst", "generated/torch.ao.quantization.observer.load_observer_state_dict.rst", "generated/torch.ao.quantization.prepare.rst", "generated/torch.ao.quantization.prepare_qat.rst", "generated/torch.ao.quantization.propagate_qconfig_.rst", "generated/torch.ao.quantization.pt2e.export_utils.model_is_exported.rst", "generated/torch.ao.quantization.qconfig.QConfig.rst", "generated/torch.ao.quantization.qconfig.default_activation_only_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_debug_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_dynamic_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_per_channel_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_qat_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_qat_qconfig_v2.rst", "generated/torch.ao.quantization.qconfig.default_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_weight_only_qconfig.rst", "generated/torch.ao.quantization.qconfig.float16_dynamic_qconfig.rst", "generated/torch.ao.quantization.qconfig.float16_static_qconfig.rst", "generated/torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig.rst", "generated/torch.ao.quantization.qconfig.per_channel_dynamic_qconfig.rst", "generated/torch.ao.quantization.qconfig_mapping.QConfigMapping.rst", "generated/torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping.rst", "generated/torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping.rst", "generated/torch.ao.quantization.quantize.rst", "generated/torch.ao.quantization.quantize_dynamic.rst", "generated/torch.ao.quantization.quantize_fx.convert_fx.rst", "generated/torch.ao.quantization.quantize_fx.fuse_fx.rst", "generated/torch.ao.quantization.quantize_fx.prepare_fx.rst", "generated/torch.ao.quantization.quantize_fx.prepare_qat_fx.rst", "generated/torch.ao.quantization.quantize_qat.rst", "generated/torch.ao.quantization.swap_module.rst", "generated/torch.arange.rst", "generated/torch.arccos.rst", "generated/torch.arccosh.rst", "generated/torch.arcsin.rst", "generated/torch.arcsinh.rst", "generated/torch.arctan.rst", "generated/torch.arctan2.rst", "generated/torch.arctanh.rst", "generated/torch.are_deterministic_algorithms_enabled.rst", "generated/torch.argmax.rst", "generated/torch.argmin.rst", "generated/torch.argsort.rst", "generated/torch.argwhere.rst", "generated/torch.as_strided.rst", "generated/torch.as_tensor.rst", "generated/torch.asarray.rst", "generated/torch.asin.rst", "generated/torch.asinh.rst", "generated/torch.atan.rst", "generated/torch.atan2.rst", "generated/torch.atanh.rst", "generated/torch.atleast_1d.rst", "generated/torch.atleast_2d.rst", "generated/torch.atleast_3d.rst", "generated/torch.autograd.Function.backward.rst", "generated/torch.autograd.Function.forward.rst", "generated/torch.autograd.Function.jvp.rst", "generated/torch.autograd.Function.vmap.rst", "generated/torch.autograd.backward.rst", "generated/torch.autograd.forward_ad.UnpackedDualTensor.rst", "generated/torch.autograd.forward_ad.dual_level.rst", "generated/torch.autograd.forward_ad.enter_dual_level.rst", "generated/torch.autograd.forward_ad.exit_dual_level.rst", "generated/torch.autograd.forward_ad.make_dual.rst", "generated/torch.autograd.forward_ad.unpack_dual.rst", "generated/torch.autograd.function.BackwardCFunction.rst", "generated/torch.autograd.function.FunctionCtx.mark_dirty.rst", "generated/torch.autograd.function.FunctionCtx.mark_non_differentiable.rst", "generated/torch.autograd.function.FunctionCtx.save_for_backward.rst", "generated/torch.autograd.function.FunctionCtx.set_materialize_grads.rst", "generated/torch.autograd.function.InplaceFunction.rst", "generated/torch.autograd.function.NestedIOFunction.rst", "generated/torch.autograd.function.once_differentiable.rst", "generated/torch.autograd.functional.hessian.rst", "generated/torch.autograd.functional.hvp.rst", "generated/torch.autograd.functional.jacobian.rst", "generated/torch.autograd.functional.jvp.rst", "generated/torch.autograd.functional.vhp.rst", "generated/torch.autograd.functional.vjp.rst", "generated/torch.autograd.grad.rst", "generated/torch.autograd.grad_mode.inference_mode.rst", "generated/torch.autograd.grad_mode.set_grad_enabled.rst", "generated/torch.autograd.grad_mode.set_multithreading_enabled.rst", "generated/torch.autograd.gradcheck.GradcheckError.rst", "generated/torch.autograd.gradcheck.gradcheck.rst", "generated/torch.autograd.gradcheck.gradgradcheck.rst", "generated/torch.autograd.graph.Node.metadata.rst", "generated/torch.autograd.graph.Node.name.rst", "generated/torch.autograd.graph.Node.next_functions.rst", "generated/torch.autograd.graph.Node.register_hook.rst", "generated/torch.autograd.graph.Node.register_prehook.rst", "generated/torch.autograd.graph.increment_version.rst", "generated/torch.autograd.profiler.EnforceUnique.rst", "generated/torch.autograd.profiler.KinetoStepTracker.rst", "generated/torch.autograd.profiler.load_nvprof.rst", "generated/torch.autograd.profiler.parse_nvprof_trace.rst", "generated/torch.autograd.profiler.profile.export_chrome_trace.rst", "generated/torch.autograd.profiler.profile.key_averages.rst", "generated/torch.autograd.profiler.profile.self_cpu_time_total.rst", "generated/torch.autograd.profiler.profile.total_average.rst", "generated/torch.autograd.profiler.record_function.rst", "generated/torch.autograd.profiler_util.Interval.rst", "generated/torch.autograd.profiler_util.Kernel.rst", "generated/torch.autograd.profiler_util.MemRecordsAcc.rst", "generated/torch.autograd.profiler_util.StringTable.rst", "generated/torch.baddbmm.rst", "generated/torch.bartlett_window.rst", "generated/torch.bernoulli.rst", "generated/torch.bincount.rst", "generated/torch.bitwise_and.rst", "generated/torch.bitwise_left_shift.rst", "generated/torch.bitwise_not.rst", "generated/torch.bitwise_or.rst", "generated/torch.bitwise_right_shift.rst", "generated/torch.bitwise_xor.rst", "generated/torch.blackman_window.rst", "generated/torch.block_diag.rst", "generated/torch.bmm.rst", "generated/torch.broadcast_shapes.rst", "generated/torch.broadcast_tensors.rst", "generated/torch.broadcast_to.rst", "generated/torch.bucketize.rst", "generated/torch.can_cast.rst", "generated/torch.cartesian_prod.rst", "generated/torch.cat.rst", "generated/torch.cdist.rst", "generated/torch.ceil.rst", "generated/torch.chain_matmul.rst", "generated/torch.cholesky.rst", "generated/torch.cholesky_inverse.rst", "generated/torch.cholesky_solve.rst", "generated/torch.chunk.rst", "generated/torch.clamp.rst", "generated/torch.clip.rst", "generated/torch.clone.rst", "generated/torch.column_stack.rst", "generated/torch.combinations.rst", "generated/torch.compile.rst", "generated/torch.compiled_with_cxx11_abi.rst", "generated/torch.compiler.allow_in_graph.rst", "generated/torch.compiler.assume_constant_result.rst", "generated/torch.compiler.compile.rst", "generated/torch.compiler.cudagraph_mark_step_begin.rst", "generated/torch.compiler.disable.rst", "generated/torch.compiler.is_compiling.rst", "generated/torch.compiler.is_dynamo_compiling.rst", "generated/torch.compiler.list_backends.rst", "generated/torch.compiler.reset.rst", "generated/torch.complex.rst", "generated/torch.concat.rst", "generated/torch.concatenate.rst", "generated/torch.cond.rst", "generated/torch.conj.rst", "generated/torch.conj_physical.rst", "generated/torch.copysign.rst", "generated/torch.corrcoef.rst", "generated/torch.cos.rst", "generated/torch.cosh.rst", "generated/torch.count_nonzero.rst", "generated/torch.cov.rst", "generated/torch.cpu.Stream.rst", "generated/torch.cpu.StreamContext.rst", "generated/torch.cpu.current_device.rst", "generated/torch.cpu.current_stream.rst", "generated/torch.cpu.device_count.rst", "generated/torch.cpu.is_available.rst", "generated/torch.cpu.set_device.rst", "generated/torch.cpu.stream.rst", "generated/torch.cpu.synchronize.rst", "generated/torch.cross.rst", "generated/torch.cuda.CUDAGraph.rst", "generated/torch.cuda.CUDAPluggableAllocator.rst", "generated/torch.cuda.Event.rst", "generated/torch.cuda.ExternalStream.rst", "generated/torch.cuda.OutOfMemoryError.rst", "generated/torch.cuda.Stream.rst", "generated/torch.cuda.StreamContext.rst", "generated/torch.cuda.caching_allocator_alloc.rst", "generated/torch.cuda.caching_allocator_delete.rst", "generated/torch.cuda.can_device_access_peer.rst", "generated/torch.cuda.change_current_allocator.rst", "generated/torch.cuda.clock_rate.rst", "generated/torch.cuda.comm.broadcast.rst", "generated/torch.cuda.comm.broadcast_coalesced.rst", "generated/torch.cuda.comm.gather.rst", "generated/torch.cuda.comm.reduce_add.rst", "generated/torch.cuda.comm.scatter.rst", "generated/torch.cuda.current_blas_handle.rst", "generated/torch.cuda.current_device.rst", "generated/torch.cuda.current_stream.rst", "generated/torch.cuda.default_stream.rst", "generated/torch.cuda.device.rst", "generated/torch.cuda.device_count.rst", "generated/torch.cuda.device_of.rst", "generated/torch.cuda.empty_cache.rst", "generated/torch.cuda.get_allocator_backend.rst", "generated/torch.cuda.get_arch_list.rst", "generated/torch.cuda.get_device_capability.rst", "generated/torch.cuda.get_device_name.rst", "generated/torch.cuda.get_device_properties.rst", "generated/torch.cuda.get_gencode_flags.rst", "generated/torch.cuda.get_rng_state.rst", "generated/torch.cuda.get_rng_state_all.rst", "generated/torch.cuda.get_sync_debug_mode.rst", "generated/torch.cuda.graph.rst", "generated/torch.cuda.graph_pool_handle.rst", "generated/torch.cuda.init.rst", "generated/torch.cuda.initial_seed.rst", "generated/torch.cuda.ipc_collect.rst", "generated/torch.cuda.is_available.rst", "generated/torch.cuda.is_current_stream_capturing.rst", "generated/torch.cuda.is_initialized.rst", "generated/torch.cuda.jiterator._create_jit_fn.rst", "generated/torch.cuda.jiterator._create_multi_output_jit_fn.rst", "generated/torch.cuda.list_gpu_processes.rst", "generated/torch.cuda.make_graphed_callables.rst", "generated/torch.cuda.manual_seed.rst", "generated/torch.cuda.manual_seed_all.rst", "generated/torch.cuda.max_memory_allocated.rst", "generated/torch.cuda.max_memory_cached.rst", "generated/torch.cuda.max_memory_reserved.rst", "generated/torch.cuda.mem_get_info.rst", "generated/torch.cuda.memory_allocated.rst", "generated/torch.cuda.memory_cached.rst", "generated/torch.cuda.memory_reserved.rst", "generated/torch.cuda.memory_snapshot.rst", "generated/torch.cuda.memory_stats.rst", "generated/torch.cuda.memory_summary.rst", "generated/torch.cuda.memory_usage.rst", "generated/torch.cuda.nvtx.mark.rst", "generated/torch.cuda.nvtx.range.rst", "generated/torch.cuda.nvtx.range_pop.rst", "generated/torch.cuda.nvtx.range_push.rst", "generated/torch.cuda.power_draw.rst", "generated/torch.cuda.reset_max_memory_allocated.rst", "generated/torch.cuda.reset_max_memory_cached.rst", "generated/torch.cuda.reset_peak_memory_stats.rst", "generated/torch.cuda.seed.rst", "generated/torch.cuda.seed_all.rst", "generated/torch.cuda.set_device.rst", "generated/torch.cuda.set_per_process_memory_fraction.rst", "generated/torch.cuda.set_rng_state.rst", "generated/torch.cuda.set_rng_state_all.rst", "generated/torch.cuda.set_stream.rst", "generated/torch.cuda.set_sync_debug_mode.rst", "generated/torch.cuda.stream.rst", "generated/torch.cuda.synchronize.rst", "generated/torch.cuda.temperature.rst", "generated/torch.cuda.utilization.rst", "generated/torch.cummax.rst", "generated/torch.cummin.rst", "generated/torch.cumprod.rst", "generated/torch.cumsum.rst", "generated/torch.cumulative_trapezoid.rst", "generated/torch.deg2rad.rst", "generated/torch.dequantize.rst", "generated/torch.det.rst", "generated/torch.diag.rst", "generated/torch.diag_embed.rst", "generated/torch.diagflat.rst", "generated/torch.diagonal.rst", "generated/torch.diagonal_scatter.rst", "generated/torch.diff.rst", "generated/torch.digamma.rst", "generated/torch.dist.rst", "generated/torch.div.rst", "generated/torch.divide.rst", "generated/torch.dot.rst", "generated/torch.dsplit.rst", "generated/torch.dstack.rst", "generated/torch.einsum.rst", "generated/torch.empty.rst", "generated/torch.empty_like.rst", "generated/torch.empty_strided.rst", "generated/torch.enable_grad.rst", "generated/torch.eq.rst", "generated/torch.equal.rst", "generated/torch.erf.rst", "generated/torch.erfc.rst", "generated/torch.erfinv.rst", "generated/torch.exp.rst", "generated/torch.exp2.rst", "generated/torch.expm1.rst", "generated/torch.eye.rst", "generated/torch.fake_quantize_per_channel_affine.rst", "generated/torch.fake_quantize_per_tensor_affine.rst", "generated/torch.fft.fft.rst", "generated/torch.fft.fft2.rst", "generated/torch.fft.fftfreq.rst", "generated/torch.fft.fftn.rst", "generated/torch.fft.fftshift.rst", "generated/torch.fft.hfft.rst", "generated/torch.fft.hfft2.rst", "generated/torch.fft.hfftn.rst", "generated/torch.fft.ifft.rst", "generated/torch.fft.ifft2.rst", "generated/torch.fft.ifftn.rst", "generated/torch.fft.ifftshift.rst", "generated/torch.fft.ihfft.rst", "generated/torch.fft.ihfft2.rst", "generated/torch.fft.ihfftn.rst", "generated/torch.fft.irfft.rst", "generated/torch.fft.irfft2.rst", "generated/torch.fft.irfftn.rst", "generated/torch.fft.rfft.rst", "generated/torch.fft.rfft2.rst", "generated/torch.fft.rfftfreq.rst", "generated/torch.fft.rfftn.rst", "generated/torch.fix.rst", "generated/torch.flatten.rst", "generated/torch.flip.rst", "generated/torch.fliplr.rst", "generated/torch.flipud.rst", "generated/torch.float_power.rst", "generated/torch.floor.rst", "generated/torch.floor_divide.rst", "generated/torch.fmax.rst", "generated/torch.fmin.rst", "generated/torch.fmod.rst", "generated/torch.frac.rst", "generated/torch.frexp.rst", "generated/torch.from_dlpack.rst", "generated/torch.from_file.rst", "generated/torch.from_numpy.rst", "generated/torch.frombuffer.rst", "generated/torch.full.rst", "generated/torch.full_like.rst", "generated/torch.func.functional_call.rst", "generated/torch.func.functionalize.rst", "generated/torch.func.grad.rst", "generated/torch.func.grad_and_value.rst", "generated/torch.func.hessian.rst", "generated/torch.func.jacfwd.rst", "generated/torch.func.jacrev.rst", "generated/torch.func.jvp.rst", "generated/torch.func.linearize.rst", "generated/torch.func.replace_all_batch_norm_modules_.rst", "generated/torch.func.stack_module_state.rst", "generated/torch.func.vjp.rst", "generated/torch.func.vmap.rst", "generated/torch.fx.experimental.symbolic_shapes.CallMethodKey.rst", "generated/torch.fx.experimental.symbolic_shapes.ConvertIntKey.rst", "generated/torch.fx.experimental.symbolic_shapes.DimConstraints.rst", "generated/torch.fx.experimental.symbolic_shapes.DimDynamic.rst", "generated/torch.fx.experimental.symbolic_shapes.DivideByKey.rst", "generated/torch.fx.experimental.symbolic_shapes.EqualityConstraint.rst", "generated/torch.fx.experimental.symbolic_shapes.InnerTensorKey.rst", "generated/torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.rst", "generated/torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint.rst", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnv.rst", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnvSettings.rst", "generated/torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint.rst", "generated/torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.SymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr.rst", "generated/torch.fx.experimental.symbolic_shapes.check_consistent.rst", "generated/torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings.rst", "generated/torch.fx.experimental.symbolic_shapes.constrain_range.rst", "generated/torch.fx.experimental.symbolic_shapes.constrain_unify.rst", "generated/torch.fx.experimental.symbolic_shapes.definitely_false.rst", "generated/torch.fx.experimental.symbolic_shapes.definitely_true.rst", "generated/torch.fx.experimental.symbolic_shapes.guard_size_oblivious.rst", "generated/torch.fx.experimental.symbolic_shapes.has_free_symbols.rst", "generated/torch.fx.experimental.symbolic_shapes.hint_int.rst", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_bool.rst", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_int.rst", "generated/torch.fx.experimental.symbolic_shapes.lru_cache.rst", "generated/torch.fx.experimental.symbolic_shapes.parallel_and.rst", "generated/torch.fx.experimental.symbolic_shapes.parallel_or.rst", "generated/torch.fx.experimental.symbolic_shapes.rebind_unbacked.rst", "generated/torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings.rst", "generated/torch.fx.experimental.symbolic_shapes.statically_known_true.rst", "generated/torch.fx.experimental.symbolic_shapes.sym_eq.rst", "generated/torch.gather.rst", "generated/torch.gcd.rst", "generated/torch.ge.rst", "generated/torch.geqrf.rst", "generated/torch.ger.rst", "generated/torch.get_default_device.rst", "generated/torch.get_default_dtype.rst", "generated/torch.get_deterministic_debug_mode.rst", "generated/torch.get_device_module.rst", "generated/torch.get_float32_matmul_precision.rst", "generated/torch.get_num_interop_threads.rst", "generated/torch.get_num_threads.rst", "generated/torch.get_rng_state.rst", "generated/torch.gradient.rst", "generated/torch.greater.rst", "generated/torch.greater_equal.rst", "generated/torch.gt.rst", "generated/torch.hamming_window.rst", "generated/torch.hann_window.rst", "generated/torch.heaviside.rst", "generated/torch.histc.rst", "generated/torch.histogram.rst", "generated/torch.histogramdd.rst", "generated/torch.hsplit.rst", "generated/torch.hspmm.rst", "generated/torch.hstack.rst", "generated/torch.hypot.rst", "generated/torch.i0.rst", "generated/torch.igamma.rst", "generated/torch.igammac.rst", "generated/torch.imag.rst", "generated/torch.index_add.rst", "generated/torch.index_copy.rst", "generated/torch.index_reduce.rst", "generated/torch.index_select.rst", "generated/torch.initial_seed.rst", "generated/torch.inner.rst", "generated/torch.inverse.rst", "generated/torch.is_complex.rst", "generated/torch.is_conj.rst", "generated/torch.is_deterministic_algorithms_warn_only_enabled.rst", "generated/torch.is_floating_point.rst", "generated/torch.is_grad_enabled.rst", "generated/torch.is_inference_mode_enabled.rst", "generated/torch.is_nonzero.rst", "generated/torch.is_storage.rst", "generated/torch.is_tensor.rst", "generated/torch.is_warn_always_enabled.rst", "generated/torch.isclose.rst", "generated/torch.isfinite.rst", "generated/torch.isin.rst", "generated/torch.isinf.rst", "generated/torch.isnan.rst", "generated/torch.isneginf.rst", "generated/torch.isposinf.rst", "generated/torch.isreal.rst", "generated/torch.istft.rst", "generated/torch.jit.Attribute.rst", "generated/torch.jit.ScriptFunction.rst", "generated/torch.jit.ScriptModule.rst", "generated/torch.jit.annotate.rst", "generated/torch.jit.enable_onednn_fusion.rst", "generated/torch.jit.fork.rst", "generated/torch.jit.freeze.rst", "generated/torch.jit.ignore.rst", "generated/torch.jit.interface.rst", "generated/torch.jit.isinstance.rst", "generated/torch.jit.load.rst", "generated/torch.jit.onednn_fusion_enabled.rst", "generated/torch.jit.optimize_for_inference.rst", "generated/torch.jit.save.rst", "generated/torch.jit.script.rst", "generated/torch.jit.script_if_tracing.rst", "generated/torch.jit.set_fusion_strategy.rst", "generated/torch.jit.strict_fusion.rst", "generated/torch.jit.trace.rst", "generated/torch.jit.trace_module.rst", "generated/torch.jit.unused.rst", "generated/torch.jit.wait.rst", "generated/torch.kaiser_window.rst", "generated/torch.kron.rst", "generated/torch.kthvalue.rst", "generated/torch.lcm.rst", "generated/torch.ldexp.rst", "generated/torch.le.rst", "generated/torch.lerp.rst", "generated/torch.less.rst", "generated/torch.less_equal.rst", "generated/torch.lgamma.rst", "generated/torch.linalg.cholesky.rst", "generated/torch.linalg.cholesky_ex.rst", "generated/torch.linalg.cond.rst", "generated/torch.linalg.cross.rst", "generated/torch.linalg.det.rst", "generated/torch.linalg.diagonal.rst", "generated/torch.linalg.eig.rst", "generated/torch.linalg.eigh.rst", "generated/torch.linalg.eigvals.rst", "generated/torch.linalg.eigvalsh.rst", "generated/torch.linalg.householder_product.rst", "generated/torch.linalg.inv.rst", "generated/torch.linalg.inv_ex.rst", "generated/torch.linalg.ldl_factor.rst", "generated/torch.linalg.ldl_factor_ex.rst", "generated/torch.linalg.ldl_solve.rst", "generated/torch.linalg.lstsq.rst", "generated/torch.linalg.lu.rst", "generated/torch.linalg.lu_factor.rst", "generated/torch.linalg.lu_factor_ex.rst", "generated/torch.linalg.lu_solve.rst", "generated/torch.linalg.matmul.rst", "generated/torch.linalg.matrix_exp.rst", "generated/torch.linalg.matrix_norm.rst", "generated/torch.linalg.matrix_power.rst", "generated/torch.linalg.matrix_rank.rst", "generated/torch.linalg.multi_dot.rst", "generated/torch.linalg.norm.rst", "generated/torch.linalg.pinv.rst", "generated/torch.linalg.qr.rst", "generated/torch.linalg.slogdet.rst", "generated/torch.linalg.solve.rst", "generated/torch.linalg.solve_ex.rst", "generated/torch.linalg.solve_triangular.rst", "generated/torch.linalg.svd.rst", "generated/torch.linalg.svdvals.rst", "generated/torch.linalg.tensorinv.rst", "generated/torch.linalg.tensorsolve.rst", "generated/torch.linalg.vander.rst", "generated/torch.linalg.vecdot.rst", "generated/torch.linalg.vector_norm.rst", "generated/torch.linspace.rst", "generated/torch.load.rst", "generated/torch.lobpcg.rst", "generated/torch.log.rst", "generated/torch.log10.rst", "generated/torch.log1p.rst", "generated/torch.log2.rst", "generated/torch.logaddexp.rst", "generated/torch.logaddexp2.rst", "generated/torch.logcumsumexp.rst", "generated/torch.logdet.rst", "generated/torch.logical_and.rst", "generated/torch.logical_not.rst", "generated/torch.logical_or.rst", "generated/torch.logical_xor.rst", "generated/torch.logit.rst", "generated/torch.logspace.rst", "generated/torch.logsumexp.rst", "generated/torch.lt.rst", "generated/torch.lu.rst", "generated/torch.lu_solve.rst", "generated/torch.lu_unpack.rst", "generated/torch.manual_seed.rst", "generated/torch.masked_select.rst", "generated/torch.matmul.rst", "generated/torch.matrix_exp.rst", "generated/torch.matrix_power.rst", "generated/torch.max.rst", "generated/torch.maximum.rst", "generated/torch.mean.rst", "generated/torch.median.rst", "generated/torch.meshgrid.rst", "generated/torch.min.rst", "generated/torch.minimum.rst", "generated/torch.mm.rst", "generated/torch.mode.rst", "generated/torch.moveaxis.rst", "generated/torch.movedim.rst", "generated/torch.mps.current_allocated_memory.rst", "generated/torch.mps.device_count.rst", "generated/torch.mps.driver_allocated_memory.rst", "generated/torch.mps.empty_cache.rst", "generated/torch.mps.event.Event.rst", "generated/torch.mps.get_rng_state.rst", "generated/torch.mps.manual_seed.rst", "generated/torch.mps.profiler.profile.rst", "generated/torch.mps.profiler.start.rst", "generated/torch.mps.profiler.stop.rst", "generated/torch.mps.seed.rst", "generated/torch.mps.set_per_process_memory_fraction.rst", "generated/torch.mps.set_rng_state.rst", "generated/torch.mps.synchronize.rst", "generated/torch.msort.rst", "generated/torch.mtia.DeferredMtiaCallError.rst", "generated/torch.mtia.Event.rst", "generated/torch.mtia.Stream.rst", "generated/torch.mtia.StreamContext.rst", "generated/torch.mtia.current_device.rst", "generated/torch.mtia.current_stream.rst", "generated/torch.mtia.default_stream.rst", "generated/torch.mtia.device.rst", "generated/torch.mtia.device_count.rst", "generated/torch.mtia.init.rst", "generated/torch.mtia.is_available.rst", "generated/torch.mtia.is_initialized.rst", "generated/torch.mtia.set_stream.rst", "generated/torch.mtia.stream.rst", "generated/torch.mtia.synchronize.rst", "generated/torch.mul.rst", "generated/torch.multinomial.rst", "generated/torch.multiply.rst", "generated/torch.mv.rst", "generated/torch.mvlgamma.rst", "generated/torch.nan_to_num.rst", "generated/torch.nanmean.rst", "generated/torch.nanmedian.rst", "generated/torch.nanquantile.rst", "generated/torch.nansum.rst", "generated/torch.narrow.rst", "generated/torch.narrow_copy.rst", "generated/torch.ne.rst", "generated/torch.neg.rst", "generated/torch.negative.rst", "generated/torch.nextafter.rst", "generated/torch.nn.AdaptiveAvgPool1d.rst", "generated/torch.nn.AdaptiveAvgPool2d.rst", "generated/torch.nn.AdaptiveAvgPool3d.rst", "generated/torch.nn.AdaptiveLogSoftmaxWithLoss.rst", "generated/torch.nn.AdaptiveMaxPool1d.rst", "generated/torch.nn.AdaptiveMaxPool2d.rst", "generated/torch.nn.AdaptiveMaxPool3d.rst", "generated/torch.nn.AlphaDropout.rst", "generated/torch.nn.AvgPool1d.rst", "generated/torch.nn.AvgPool2d.rst", "generated/torch.nn.AvgPool3d.rst", "generated/torch.nn.BCELoss.rst", "generated/torch.nn.BCEWithLogitsLoss.rst", "generated/torch.nn.BatchNorm1d.rst", "generated/torch.nn.BatchNorm2d.rst", "generated/torch.nn.BatchNorm3d.rst", "generated/torch.nn.Bilinear.rst", "generated/torch.nn.CELU.rst", "generated/torch.nn.CTCLoss.rst", "generated/torch.nn.ChannelShuffle.rst", "generated/torch.nn.CircularPad1d.rst", "generated/torch.nn.CircularPad2d.rst", "generated/torch.nn.CircularPad3d.rst", "generated/torch.nn.ConstantPad1d.rst", "generated/torch.nn.ConstantPad2d.rst", "generated/torch.nn.ConstantPad3d.rst", "generated/torch.nn.Conv1d.rst", "generated/torch.nn.Conv2d.rst", "generated/torch.nn.Conv3d.rst", "generated/torch.nn.ConvTranspose1d.rst", "generated/torch.nn.ConvTranspose2d.rst", "generated/torch.nn.ConvTranspose3d.rst", "generated/torch.nn.CosineEmbeddingLoss.rst", "generated/torch.nn.CosineSimilarity.rst", "generated/torch.nn.CrossEntropyLoss.rst", "generated/torch.nn.DataParallel.rst", "generated/torch.nn.Dropout.rst", "generated/torch.nn.Dropout1d.rst", "generated/torch.nn.Dropout2d.rst", "generated/torch.nn.Dropout3d.rst", "generated/torch.nn.ELU.rst", "generated/torch.nn.Embedding.rst", "generated/torch.nn.EmbeddingBag.rst", "generated/torch.nn.FeatureAlphaDropout.rst", "generated/torch.nn.Flatten.rst", "generated/torch.nn.Fold.rst", "generated/torch.nn.FractionalMaxPool2d.rst", "generated/torch.nn.FractionalMaxPool3d.rst", "generated/torch.nn.GELU.rst", "generated/torch.nn.GLU.rst", "generated/torch.nn.GRU.rst", "generated/torch.nn.GRUCell.rst", "generated/torch.nn.GaussianNLLLoss.rst", "generated/torch.nn.GroupNorm.rst", "generated/torch.nn.Hardshrink.rst", "generated/torch.nn.Hardsigmoid.rst", "generated/torch.nn.Hardswish.rst", "generated/torch.nn.Hardtanh.rst", "generated/torch.nn.HingeEmbeddingLoss.rst", "generated/torch.nn.HuberLoss.rst", "generated/torch.nn.Identity.rst", "generated/torch.nn.InstanceNorm1d.rst", "generated/torch.nn.InstanceNorm2d.rst", "generated/torch.nn.InstanceNorm3d.rst", "generated/torch.nn.KLDivLoss.rst", "generated/torch.nn.L1Loss.rst", "generated/torch.nn.LPPool1d.rst", "generated/torch.nn.LPPool2d.rst", "generated/torch.nn.LPPool3d.rst", "generated/torch.nn.LSTM.rst", "generated/torch.nn.LSTMCell.rst", "generated/torch.nn.LayerNorm.rst", "generated/torch.nn.LazyBatchNorm1d.rst", "generated/torch.nn.LazyBatchNorm2d.rst", "generated/torch.nn.LazyBatchNorm3d.rst", "generated/torch.nn.LazyConv1d.rst", "generated/torch.nn.LazyConv2d.rst", "generated/torch.nn.LazyConv3d.rst", "generated/torch.nn.LazyConvTranspose1d.rst", "generated/torch.nn.LazyConvTranspose2d.rst", "generated/torch.nn.LazyConvTranspose3d.rst", "generated/torch.nn.LazyInstanceNorm1d.rst", "generated/torch.nn.LazyInstanceNorm2d.rst", "generated/torch.nn.LazyInstanceNorm3d.rst", "generated/torch.nn.LazyLinear.rst", "generated/torch.nn.LeakyReLU.rst", "generated/torch.nn.Linear.rst", "generated/torch.nn.LocalResponseNorm.rst", "generated/torch.nn.LogSigmoid.rst", "generated/torch.nn.LogSoftmax.rst", "generated/torch.nn.MSELoss.rst", "generated/torch.nn.MarginRankingLoss.rst", "generated/torch.nn.MaxPool1d.rst", "generated/torch.nn.MaxPool2d.rst", "generated/torch.nn.MaxPool3d.rst", "generated/torch.nn.MaxUnpool1d.rst", "generated/torch.nn.MaxUnpool2d.rst", "generated/torch.nn.MaxUnpool3d.rst", "generated/torch.nn.Mish.rst", "generated/torch.nn.Module.rst", "generated/torch.nn.ModuleDict.rst", "generated/torch.nn.ModuleList.rst", "generated/torch.nn.MultiLabelMarginLoss.rst", "generated/torch.nn.MultiLabelSoftMarginLoss.rst", "generated/torch.nn.MultiMarginLoss.rst", "generated/torch.nn.MultiheadAttention.rst", "generated/torch.nn.NLLLoss.rst", "generated/torch.nn.PReLU.rst", "generated/torch.nn.PairwiseDistance.rst", "generated/torch.nn.ParameterDict.rst", "generated/torch.nn.ParameterList.rst", "generated/torch.nn.PixelShuffle.rst", "generated/torch.nn.PixelUnshuffle.rst", "generated/torch.nn.PoissonNLLLoss.rst", "generated/torch.nn.RMSNorm.rst", "generated/torch.nn.RNN.rst", "generated/torch.nn.RNNBase.rst", "generated/torch.nn.RNNCell.rst", "generated/torch.nn.RReLU.rst", "generated/torch.nn.ReLU.rst", "generated/torch.nn.ReLU6.rst", "generated/torch.nn.ReflectionPad1d.rst", "generated/torch.nn.ReflectionPad2d.rst", "generated/torch.nn.ReflectionPad3d.rst", "generated/torch.nn.ReplicationPad1d.rst", "generated/torch.nn.ReplicationPad2d.rst", "generated/torch.nn.ReplicationPad3d.rst", "generated/torch.nn.SELU.rst", "generated/torch.nn.Sequential.rst", "generated/torch.nn.SiLU.rst", "generated/torch.nn.Sigmoid.rst", "generated/torch.nn.SmoothL1Loss.rst", "generated/torch.nn.SoftMarginLoss.rst", "generated/torch.nn.Softmax.rst", "generated/torch.nn.Softmax2d.rst", "generated/torch.nn.Softmin.rst", "generated/torch.nn.Softplus.rst", "generated/torch.nn.Softshrink.rst", "generated/torch.nn.Softsign.rst", "generated/torch.nn.SyncBatchNorm.rst", "generated/torch.nn.Tanh.rst", "generated/torch.nn.Tanhshrink.rst", "generated/torch.nn.Threshold.rst", "generated/torch.nn.Transformer.rst", "generated/torch.nn.TransformerDecoder.rst", "generated/torch.nn.TransformerDecoderLayer.rst", "generated/torch.nn.TransformerEncoder.rst", "generated/torch.nn.TransformerEncoderLayer.rst", "generated/torch.nn.TripletMarginLoss.rst", "generated/torch.nn.TripletMarginWithDistanceLoss.rst", "generated/torch.nn.Unflatten.rst", "generated/torch.nn.Unfold.rst", "generated/torch.nn.Upsample.rst", "generated/torch.nn.UpsamplingBilinear2d.rst", "generated/torch.nn.UpsamplingNearest2d.rst", "generated/torch.nn.ZeroPad1d.rst", "generated/torch.nn.ZeroPad2d.rst", "generated/torch.nn.ZeroPad3d.rst", "generated/torch.nn.attention.SDPBackend.rst", "generated/torch.nn.attention.bias.CausalBias.rst", "generated/torch.nn.attention.bias.CausalVariant.rst", "generated/torch.nn.attention.bias.causal_lower_right.rst", "generated/torch.nn.attention.bias.causal_upper_left.rst", "generated/torch.nn.attention.sdpa_kernel.rst", "generated/torch.nn.functional.adaptive_avg_pool1d.rst", "generated/torch.nn.functional.adaptive_avg_pool2d.rst", "generated/torch.nn.functional.adaptive_avg_pool3d.rst", "generated/torch.nn.functional.adaptive_max_pool1d.rst", "generated/torch.nn.functional.adaptive_max_pool2d.rst", "generated/torch.nn.functional.adaptive_max_pool3d.rst", "generated/torch.nn.functional.affine_grid.rst", "generated/torch.nn.functional.alpha_dropout.rst", "generated/torch.nn.functional.avg_pool1d.rst", "generated/torch.nn.functional.avg_pool2d.rst", "generated/torch.nn.functional.avg_pool3d.rst", "generated/torch.nn.functional.batch_norm.rst", "generated/torch.nn.functional.bilinear.rst", "generated/torch.nn.functional.binary_cross_entropy.rst", "generated/torch.nn.functional.binary_cross_entropy_with_logits.rst", "generated/torch.nn.functional.celu.rst", "generated/torch.nn.functional.conv1d.rst", "generated/torch.nn.functional.conv2d.rst", "generated/torch.nn.functional.conv3d.rst", "generated/torch.nn.functional.conv_transpose1d.rst", "generated/torch.nn.functional.conv_transpose2d.rst", "generated/torch.nn.functional.conv_transpose3d.rst", "generated/torch.nn.functional.cosine_embedding_loss.rst", "generated/torch.nn.functional.cosine_similarity.rst", "generated/torch.nn.functional.cross_entropy.rst", "generated/torch.nn.functional.ctc_loss.rst", "generated/torch.nn.functional.dropout.rst", "generated/torch.nn.functional.dropout1d.rst", "generated/torch.nn.functional.dropout2d.rst", "generated/torch.nn.functional.dropout3d.rst", "generated/torch.nn.functional.elu.rst", "generated/torch.nn.functional.elu_.rst", "generated/torch.nn.functional.embedding.rst", "generated/torch.nn.functional.embedding_bag.rst", "generated/torch.nn.functional.feature_alpha_dropout.rst", "generated/torch.nn.functional.fold.rst", "generated/torch.nn.functional.fractional_max_pool2d.rst", "generated/torch.nn.functional.fractional_max_pool3d.rst", "generated/torch.nn.functional.gaussian_nll_loss.rst", "generated/torch.nn.functional.gelu.rst", "generated/torch.nn.functional.glu.rst", "generated/torch.nn.functional.grid_sample.rst", "generated/torch.nn.functional.group_norm.rst", "generated/torch.nn.functional.gumbel_softmax.rst", "generated/torch.nn.functional.hardshrink.rst", "generated/torch.nn.functional.hardsigmoid.rst", "generated/torch.nn.functional.hardswish.rst", "generated/torch.nn.functional.hardtanh.rst", "generated/torch.nn.functional.hardtanh_.rst", "generated/torch.nn.functional.hinge_embedding_loss.rst", "generated/torch.nn.functional.huber_loss.rst", "generated/torch.nn.functional.instance_norm.rst", "generated/torch.nn.functional.interpolate.rst", "generated/torch.nn.functional.kl_div.rst", "generated/torch.nn.functional.l1_loss.rst", "generated/torch.nn.functional.layer_norm.rst", "generated/torch.nn.functional.leaky_relu.rst", "generated/torch.nn.functional.leaky_relu_.rst", "generated/torch.nn.functional.linear.rst", "generated/torch.nn.functional.local_response_norm.rst", "generated/torch.nn.functional.log_softmax.rst", "generated/torch.nn.functional.logsigmoid.rst", "generated/torch.nn.functional.lp_pool1d.rst", "generated/torch.nn.functional.lp_pool2d.rst", "generated/torch.nn.functional.lp_pool3d.rst", "generated/torch.nn.functional.margin_ranking_loss.rst", "generated/torch.nn.functional.max_pool1d.rst", "generated/torch.nn.functional.max_pool2d.rst", "generated/torch.nn.functional.max_pool3d.rst", "generated/torch.nn.functional.max_unpool1d.rst", "generated/torch.nn.functional.max_unpool2d.rst", "generated/torch.nn.functional.max_unpool3d.rst", "generated/torch.nn.functional.mish.rst", "generated/torch.nn.functional.mse_loss.rst", "generated/torch.nn.functional.multi_margin_loss.rst", "generated/torch.nn.functional.multilabel_margin_loss.rst", "generated/torch.nn.functional.multilabel_soft_margin_loss.rst", "generated/torch.nn.functional.nll_loss.rst", "generated/torch.nn.functional.normalize.rst", "generated/torch.nn.functional.one_hot.rst", "generated/torch.nn.functional.pad.rst", "generated/torch.nn.functional.pairwise_distance.rst", "generated/torch.nn.functional.pdist.rst", "generated/torch.nn.functional.pixel_shuffle.rst", "generated/torch.nn.functional.pixel_unshuffle.rst", "generated/torch.nn.functional.poisson_nll_loss.rst", "generated/torch.nn.functional.prelu.rst", "generated/torch.nn.functional.relu.rst", "generated/torch.nn.functional.relu6.rst", "generated/torch.nn.functional.relu_.rst", "generated/torch.nn.functional.rms_norm.rst", "generated/torch.nn.functional.rrelu.rst", "generated/torch.nn.functional.rrelu_.rst", "generated/torch.nn.functional.scaled_dot_product_attention.rst", "generated/torch.nn.functional.selu.rst", "generated/torch.nn.functional.sigmoid.rst", "generated/torch.nn.functional.silu.rst", "generated/torch.nn.functional.smooth_l1_loss.rst", "generated/torch.nn.functional.soft_margin_loss.rst", "generated/torch.nn.functional.softmax.rst", "generated/torch.nn.functional.softmin.rst", "generated/torch.nn.functional.softplus.rst", "generated/torch.nn.functional.softshrink.rst", "generated/torch.nn.functional.softsign.rst", "generated/torch.nn.functional.tanh.rst", "generated/torch.nn.functional.tanhshrink.rst", "generated/torch.nn.functional.threshold.rst", "generated/torch.nn.functional.threshold_.rst", "generated/torch.nn.functional.torch.nn.parallel.data_parallel.rst", "generated/torch.nn.functional.triplet_margin_loss.rst", "generated/torch.nn.functional.triplet_margin_with_distance_loss.rst", "generated/torch.nn.functional.unfold.rst", "generated/torch.nn.functional.upsample.rst", "generated/torch.nn.functional.upsample_bilinear.rst", "generated/torch.nn.functional.upsample_nearest.rst", "generated/torch.nn.modules.lazy.LazyModuleMixin.rst", "generated/torch.nn.modules.module.register_module_backward_hook.rst", "generated/torch.nn.modules.module.register_module_buffer_registration_hook.rst", "generated/torch.nn.modules.module.register_module_forward_hook.rst", "generated/torch.nn.modules.module.register_module_forward_pre_hook.rst", "generated/torch.nn.modules.module.register_module_full_backward_hook.rst", "generated/torch.nn.modules.module.register_module_full_backward_pre_hook.rst", "generated/torch.nn.modules.module.register_module_module_registration_hook.rst", "generated/torch.nn.modules.module.register_module_parameter_registration_hook.rst", "generated/torch.nn.modules.normalization.RMSNorm.rst", "generated/torch.nn.parallel.DistributedDataParallel.rst", "generated/torch.nn.parameter.Parameter.rst", "generated/torch.nn.parameter.UninitializedBuffer.rst", "generated/torch.nn.parameter.UninitializedParameter.rst", "generated/torch.nn.utils.clip_grad_norm.rst", "generated/torch.nn.utils.clip_grad_norm_.rst", "generated/torch.nn.utils.clip_grad_value_.rst", "generated/torch.nn.utils.convert_conv2d_weight_memory_format.rst", "generated/torch.nn.utils.convert_conv3d_weight_memory_format.rst", "generated/torch.nn.utils.fuse_conv_bn_eval.rst", "generated/torch.nn.utils.fuse_conv_bn_weights.rst", "generated/torch.nn.utils.fuse_linear_bn_eval.rst", "generated/torch.nn.utils.fuse_linear_bn_weights.rst", "generated/torch.nn.utils.parameters_to_vector.rst", "generated/torch.nn.utils.parametrizations.orthogonal.rst", "generated/torch.nn.utils.parametrizations.spectral_norm.rst", "generated/torch.nn.utils.parametrizations.weight_norm.rst", "generated/torch.nn.utils.parametrize.ParametrizationList.rst", "generated/torch.nn.utils.parametrize.cached.rst", "generated/torch.nn.utils.parametrize.is_parametrized.rst", "generated/torch.nn.utils.parametrize.register_parametrization.rst", "generated/torch.nn.utils.parametrize.remove_parametrizations.rst", "generated/torch.nn.utils.prune.BasePruningMethod.rst", "generated/torch.nn.utils.prune.CustomFromMask.rst", "generated/torch.nn.utils.prune.Identity.rst", "generated/torch.nn.utils.prune.L1Unstructured.rst", "generated/torch.nn.utils.prune.LnStructured.rst", "generated/torch.nn.utils.prune.PruningContainer.rst", "generated/torch.nn.utils.prune.RandomStructured.rst", "generated/torch.nn.utils.prune.RandomUnstructured.rst", "generated/torch.nn.utils.prune.custom_from_mask.rst", "generated/torch.nn.utils.prune.global_unstructured.rst", "generated/torch.nn.utils.prune.identity.rst", "generated/torch.nn.utils.prune.is_pruned.rst", "generated/torch.nn.utils.prune.l1_unstructured.rst", "generated/torch.nn.utils.prune.ln_structured.rst", "generated/torch.nn.utils.prune.random_structured.rst", "generated/torch.nn.utils.prune.random_unstructured.rst", "generated/torch.nn.utils.prune.remove.rst", "generated/torch.nn.utils.remove_spectral_norm.rst", "generated/torch.nn.utils.remove_weight_norm.rst", "generated/torch.nn.utils.rnn.PackedSequence.rst", "generated/torch.nn.utils.rnn.pack_padded_sequence.rst", "generated/torch.nn.utils.rnn.pack_sequence.rst", "generated/torch.nn.utils.rnn.pad_packed_sequence.rst", "generated/torch.nn.utils.rnn.pad_sequence.rst", "generated/torch.nn.utils.rnn.unpack_sequence.rst", "generated/torch.nn.utils.rnn.unpad_sequence.rst", "generated/torch.nn.utils.skip_init.rst", "generated/torch.nn.utils.spectral_norm.rst", "generated/torch.nn.utils.stateless.functional_call.rst", "generated/torch.nn.utils.vector_to_parameters.rst", "generated/torch.nn.utils.weight_norm.rst", "generated/torch.no_grad.rst", "generated/torch.nonzero.rst", "generated/torch.norm.rst", "generated/torch.normal.rst", "generated/torch.not_equal.rst", "generated/torch.numel.rst", "generated/torch.ones.rst", "generated/torch.ones_like.rst", "generated/torch.onnx.JitScalarType.rst", "generated/torch.onnx.verification.GraphInfo.rst", "generated/torch.onnx.verification.VerificationOptions.rst", "generated/torch.optim.ASGD.rst", "generated/torch.optim.Adadelta.rst", "generated/torch.optim.Adagrad.rst", "generated/torch.optim.Adam.rst", "generated/torch.optim.AdamW.rst", "generated/torch.optim.Adamax.rst", "generated/torch.optim.LBFGS.rst", "generated/torch.optim.NAdam.rst", "generated/torch.optim.Optimizer.add_param_group.rst", "generated/torch.optim.Optimizer.load_state_dict.rst", "generated/torch.optim.Optimizer.state_dict.rst", "generated/torch.optim.Optimizer.step.rst", "generated/torch.optim.Optimizer.zero_grad.rst", "generated/torch.optim.RAdam.rst", "generated/torch.optim.RMSprop.rst", "generated/torch.optim.Rprop.rst", "generated/torch.optim.SGD.rst", "generated/torch.optim.SparseAdam.rst", "generated/torch.optim.lr_scheduler.ChainedScheduler.rst", "generated/torch.optim.lr_scheduler.ConstantLR.rst", "generated/torch.optim.lr_scheduler.CosineAnnealingLR.rst", "generated/torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.rst", "generated/torch.optim.lr_scheduler.CyclicLR.rst", "generated/torch.optim.lr_scheduler.ExponentialLR.rst", "generated/torch.optim.lr_scheduler.LambdaLR.rst", "generated/torch.optim.lr_scheduler.LinearLR.rst", "generated/torch.optim.lr_scheduler.MultiStepLR.rst", "generated/torch.optim.lr_scheduler.MultiplicativeLR.rst", "generated/torch.optim.lr_scheduler.OneCycleLR.rst", "generated/torch.optim.lr_scheduler.PolynomialLR.rst", "generated/torch.optim.lr_scheduler.ReduceLROnPlateau.rst", "generated/torch.optim.lr_scheduler.SequentialLR.rst", "generated/torch.optim.lr_scheduler.StepLR.rst", "generated/torch.orgqr.rst", "generated/torch.ormqr.rst", "generated/torch.outer.rst", "generated/torch.pca_lowrank.rst", "generated/torch.permute.rst", "generated/torch.pinverse.rst", "generated/torch.poisson.rst", "generated/torch.polar.rst", "generated/torch.polygamma.rst", "generated/torch.positive.rst", "generated/torch.pow.rst", "generated/torch.prod.rst", "generated/torch.promote_types.rst", "generated/torch.qr.rst", "generated/torch.quantile.rst", "generated/torch.quantize_per_channel.rst", "generated/torch.quantize_per_tensor.rst", "generated/torch.quantized_batch_norm.rst", "generated/torch.quantized_max_pool1d.rst", "generated/torch.quantized_max_pool2d.rst", "generated/torch.quasirandom.SobolEngine.rst", "generated/torch.rad2deg.rst", "generated/torch.rand.rst", "generated/torch.rand_like.rst", "generated/torch.randint.rst", "generated/torch.randint_like.rst", "generated/torch.randn.rst", "generated/torch.randn_like.rst", "generated/torch.randperm.rst", "generated/torch.range.rst", "generated/torch.ravel.rst", "generated/torch.real.rst", "generated/torch.reciprocal.rst", "generated/torch.remainder.rst", "generated/torch.renorm.rst", "generated/torch.repeat_interleave.rst", "generated/torch.reshape.rst", "generated/torch.resolve_conj.rst", "generated/torch.resolve_neg.rst", "generated/torch.result_type.rst", "generated/torch.roll.rst", "generated/torch.rot90.rst", "generated/torch.round.rst", "generated/torch.row_stack.rst", "generated/torch.rsqrt.rst", "generated/torch.save.rst", "generated/torch.scatter.rst", "generated/torch.scatter_add.rst", "generated/torch.scatter_reduce.rst", "generated/torch.searchsorted.rst", "generated/torch.seed.rst", "generated/torch.select.rst", "generated/torch.select_scatter.rst", "generated/torch.set_default_device.rst", "generated/torch.set_default_dtype.rst", "generated/torch.set_default_tensor_type.rst", "generated/torch.set_deterministic_debug_mode.rst", "generated/torch.set_float32_matmul_precision.rst", "generated/torch.set_flush_denormal.rst", "generated/torch.set_num_interop_threads.rst", "generated/torch.set_num_threads.rst", "generated/torch.set_printoptions.rst", "generated/torch.set_rng_state.rst", "generated/torch.set_warn_always.rst", "generated/torch.sgn.rst", "generated/torch.sigmoid.rst", "generated/torch.sign.rst", "generated/torch.signal.windows.bartlett.rst", "generated/torch.signal.windows.blackman.rst", "generated/torch.signal.windows.cosine.rst", "generated/torch.signal.windows.exponential.rst", "generated/torch.signal.windows.gaussian.rst", "generated/torch.signal.windows.general_cosine.rst", "generated/torch.signal.windows.general_hamming.rst", "generated/torch.signal.windows.hamming.rst", "generated/torch.signal.windows.hann.rst", "generated/torch.signal.windows.kaiser.rst", "generated/torch.signal.windows.nuttall.rst", "generated/torch.signbit.rst", "generated/torch.sin.rst", "generated/torch.sinc.rst", "generated/torch.sinh.rst", "generated/torch.slice_scatter.rst", "generated/torch.slogdet.rst", "generated/torch.smm.rst", "generated/torch.softmax.rst", "generated/torch.sort.rst", "generated/torch.sparse.addmm.rst", "generated/torch.sparse.as_sparse_gradcheck.rst", "generated/torch.sparse.check_sparse_tensor_invariants.rst", "generated/torch.sparse.log_softmax.rst", "generated/torch.sparse.mm.rst", "generated/torch.sparse.sampled_addmm.rst", "generated/torch.sparse.softmax.rst", "generated/torch.sparse.spdiags.rst", "generated/torch.sparse.sum.rst", "generated/torch.sparse_bsc_tensor.rst", "generated/torch.sparse_bsr_tensor.rst", "generated/torch.sparse_compressed_tensor.rst", "generated/torch.sparse_coo_tensor.rst", "generated/torch.sparse_csc_tensor.rst", "generated/torch.sparse_csr_tensor.rst", "generated/torch.split.rst", "generated/torch.sqrt.rst", "generated/torch.square.rst", "generated/torch.squeeze.rst", "generated/torch.sspaddmm.rst", "generated/torch.stack.rst", "generated/torch.std.rst", "generated/torch.std_mean.rst", "generated/torch.stft.rst", "generated/torch.sub.rst", "generated/torch.subtract.rst", "generated/torch.sum.rst", "generated/torch.svd.rst", "generated/torch.svd_lowrank.rst", "generated/torch.swapaxes.rst", "generated/torch.swapdims.rst", "generated/torch.sym_float.rst", "generated/torch.sym_int.rst", "generated/torch.sym_ite.rst", "generated/torch.sym_max.rst", "generated/torch.sym_min.rst", "generated/torch.sym_not.rst", "generated/torch.t.rst", "generated/torch.take.rst", "generated/torch.take_along_dim.rst", "generated/torch.tan.rst", "generated/torch.tanh.rst", "generated/torch.tensor.rst", "generated/torch.tensor_split.rst", "generated/torch.tensordot.rst", "generated/torch.tile.rst", "generated/torch.topk.rst", "generated/torch.trace.rst", "generated/torch.transpose.rst", "generated/torch.trapezoid.rst", "generated/torch.trapz.rst", "generated/torch.triangular_solve.rst", "generated/torch.tril.rst", "generated/torch.tril_indices.rst", "generated/torch.triu.rst", "generated/torch.triu_indices.rst", "generated/torch.true_divide.rst", "generated/torch.trunc.rst", "generated/torch.unbind.rst", "generated/torch.unflatten.rst", "generated/torch.unique.rst", "generated/torch.unique_consecutive.rst", "generated/torch.unravel_index.rst", "generated/torch.unsqueeze.rst", "generated/torch.use_deterministic_algorithms.rst", "generated/torch.utils.generate_methods_for_privateuse1_backend.rst", "generated/torch.utils.get_cpp_backtrace.rst", "generated/torch.utils.rename_privateuse1_backend.rst", "generated/torch.utils.set_module.rst", "generated/torch.utils.swap_tensors.rst", "generated/torch.vander.rst", "generated/torch.var.rst", "generated/torch.var_mean.rst", "generated/torch.vdot.rst", "generated/torch.view_as_complex.rst", "generated/torch.view_as_real.rst", "generated/torch.vmap.rst", "generated/torch.vsplit.rst", "generated/torch.vstack.rst", "generated/torch.where.rst", "generated/torch.xlogy.rst", "generated/torch.xpu.Event.rst", "generated/torch.xpu.Stream.rst", "generated/torch.xpu.StreamContext.rst", "generated/torch.xpu.current_device.rst", "generated/torch.xpu.current_stream.rst", "generated/torch.xpu.device.rst", "generated/torch.xpu.device_count.rst", "generated/torch.xpu.device_of.rst", "generated/torch.xpu.empty_cache.rst", "generated/torch.xpu.get_device_capability.rst", "generated/torch.xpu.get_device_name.rst", "generated/torch.xpu.get_device_properties.rst", "generated/torch.xpu.get_rng_state.rst", "generated/torch.xpu.get_rng_state_all.rst", "generated/torch.xpu.init.rst", "generated/torch.xpu.initial_seed.rst", "generated/torch.xpu.is_available.rst", "generated/torch.xpu.is_initialized.rst", "generated/torch.xpu.manual_seed.rst", "generated/torch.xpu.manual_seed_all.rst", "generated/torch.xpu.seed.rst", "generated/torch.xpu.seed_all.rst", "generated/torch.xpu.set_device.rst", "generated/torch.xpu.set_rng_state.rst", "generated/torch.xpu.set_rng_state_all.rst", "generated/torch.xpu.set_stream.rst", "generated/torch.xpu.stream.rst", "generated/torch.xpu.synchronize.rst", "generated/torch.zeros.rst", "generated/torch.zeros_like.rst", "hub.rst", "index.rst", "jit.rst", "jit_builtin_functions.rst", "jit_language_reference.rst", "jit_language_reference_v2.rst", "jit_python_reference.rst", "jit_unsupported.rst", "jit_utils.rst", "library.rst", "linalg.rst", "logging.rst", "masked.rst", "meta.rst", "miscellaneous_environment_variables.rst", "mobile_optimizer.rst", "model_zoo.rst", "module_tracker.rst", "monitor.rst", "mps.rst", "mtia.rst", "multiprocessing.rst", "name_inference.rst", "named_tensor.rst", "nested.rst", "nn.rst", "nn.attention.rst", "nn.attention.bias.rst", "nn.functional.rst", "nn.init.rst", "notes/amp_examples.rst", "notes/autograd.rst", "notes/broadcasting.rst", "notes/cpu_threading_torchscript_inference.rst", "notes/cuda.rst", "notes/custom_operators.rst", "notes/ddp.rst", "notes/extending.rst", "notes/extending.func.rst", "notes/faq.rst", "notes/fsdp.rst", "notes/gradcheck.rst", "notes/hip.rst", "notes/large_scale_deployments.rst", "notes/modules.rst", "notes/mps.rst", "notes/multiprocessing.rst", "notes/numerical_accuracy.rst", "notes/randomness.rst", "notes/serialization.rst", "notes/windows.rst", "onnx.rst", "onnx_dynamo.rst", "onnx_dynamo_onnxruntime_backend.rst", "onnx_torchscript.rst", "onnx_torchscript_supported_aten_ops.rst", "optim.rst", "package.rst", "profiler.rst", "quantization.rst", "quantization-accuracy-debugging.rst", "quantization-backend-configuration.rst", "quantization-support.rst", "random.rst", "rpc.rst", "rpc/distributed_autograd.rst", "rpc/rref.rst", "signal.rst", "size.rst", "sparse.rst", "special.rst", "storage.rst", "tensor_attributes.rst", "tensor_view.rst", "tensorboard.rst", "tensors.rst", "testing.rst", "threading_environment_variables.rst", "torch.rst", "torch.ao.ns._numeric_suite.rst", "torch.ao.ns._numeric_suite_fx.rst", "torch.compiler.rst", "torch.compiler_aot_inductor.rst", "torch.compiler_api.rst", "torch.compiler_best_practices_for_backends.rst", "torch.compiler_cudagraph_trees.rst", "torch.compiler_custom_backends.rst", "torch.compiler_dynamic_shapes.rst", "torch.compiler_dynamo_deepdive.rst", "torch.compiler_dynamo_overview.rst", "torch.compiler_fake_tensor.rst", "torch.compiler_faq.rst", "torch.compiler_fine_grain_apis.rst", "torch.compiler_get_started.rst", "torch.compiler_inductor_profiling.rst", "torch.compiler_ir.rst", "torch.compiler_nn_module.rst", "torch.compiler_performance_dashboard.rst", "torch.compiler_profiling_torch_compile.rst", "torch.compiler_transformations.rst", "torch.compiler_troubleshooting.rst", "torch.overrides.rst", "torch_cuda_memory.rst", "torch_environment_variables.rst", "torch_nccl_environment_variables.rst", "type_info.rst", "utils.rst", "xpu.rst"], "titles": ["Automatic Mixed Precision package - torch.amp", "Automatic differentiation package - torch.autograd", "torch.backends", "Benchmark Utils - torch.utils.benchmark", "torch.utils.bottleneck", "torch.utils.checkpoint", "PyTorch Governance | Build + CI", "PyTorch Contribution Guide", "PyTorch Design Philosophy", "PyTorch Governance | Mechanics", "PyTorch Governance | Maintainers", "Complex Numbers", "Control Flow - Cond", "torch.__config__", "torch.utils.cpp_extension", "C++", "torch.cpu", "torch.cuda", "CUDA Stream Sanitizer", "TunableOp", "CUDA Environment Variables", "<no title>", "<no title>", "torch.utils.data", "DDP Communication Hooks", "Debugging Environment Variables", "torch::deploy has been moved to pytorch/multipy", "torch.utils.deterministic", "Distributed communication package - torch.distributed", "Generic Join Context Manager", "Distributed Checkpoint - torch.distributed.checkpoint", "Torch Distributed Elastic", "Distributed Optimizers", "Pipeline Parallelism", "Tensor Parallelism - torch.distributed.tensor.parallel", "Probability distributions - torch.distributions", "torch.utils.dlpack", "Elastic Agent", "Control Plane", "Customization", "Error Propagation", "Events", "Examples", "TorchElastic Kubernetes", "Metrics", "Multiprocessing", "Quickstart", "Rendezvous", "torchrun (Elastic Launch)", "Subprocess Handling", "Expiration Timers", "Train script", "torch.export", "torch.export IR Specification", "torch.fft", "FullyShardedDataParallel", "torch.func", "torch.func API Reference", "Patching Batch Norm", "Migrating from functorch to torch.func", "UX Limitations", "torch.func Whirlwind Tour", "torch.__future__", "torch.futures", "torch.fx", "torch.fx.experimental", "ExportDB", "python.assert", "python.builtin", "python.closure", "python.context-manager", "python.control-flow", "python.data-structure", "python.object-model", "torch.cond", "torch.dynamic-shape", "torch.dynamic-value", "torch.escape-hatch", "torch.map", "torch.mutation", "torch.operator", "FXE0007:fx-graph-to-onnx", "FXE0008:fx-node-to-onnx", "FXE0010:fx-pass", "FXE0011:no-symbolic-function-for-call-function", "FXE0012:unsupported-fx-node-analysis", "FXE0013:op-level-debugging", "FXE0014:find-opschema-matched-symbolic-function", "FXE0015:fx-node-insert-type-promotion", "FXE0016:find-operator-overloads-in-onnx-registry", "Generator", "torch.Tensor.abs", "torch.Tensor.abs_", "torch.Tensor.absolute", "torch.Tensor.absolute_", "torch.Tensor.acos", "torch.Tensor.acos_", "torch.Tensor.acosh", "torch.Tensor.acosh_", "torch.Tensor.add", "torch.Tensor.add_", "torch.Tensor.addbmm", "torch.Tensor.addbmm_", "torch.Tensor.addcdiv", "torch.Tensor.addcdiv_", "torch.Tensor.addcmul", "torch.Tensor.addcmul_", "torch.Tensor.addmm", "torch.Tensor.addmm_", "torch.Tensor.addmv", "torch.Tensor.addmv_", "torch.Tensor.addr", "torch.Tensor.addr_", "torch.Tensor.adjoint", "torch.Tensor.all", "torch.Tensor.allclose", "torch.Tensor.amax", "torch.Tensor.amin", "torch.Tensor.aminmax", "torch.Tensor.angle", "torch.Tensor.any", "torch.Tensor.apply_", "torch.Tensor.arccos", "torch.Tensor.arccos_", "torch.Tensor.arccosh", "torch.Tensor.arccosh_", "torch.Tensor.arcsin", "torch.Tensor.arcsin_", "torch.Tensor.arcsinh", "torch.Tensor.arcsinh_", "torch.Tensor.arctan", "torch.Tensor.arctan2", "torch.Tensor.arctan2_", "torch.Tensor.arctan_", "torch.Tensor.arctanh", "torch.Tensor.arctanh_", "torch.Tensor.argmax", "torch.Tensor.argmin", "torch.Tensor.argsort", "torch.Tensor.argwhere", "torch.Tensor.as_strided", "torch.Tensor.as_subclass", "torch.Tensor.asin", "torch.Tensor.asin_", "torch.Tensor.asinh", "torch.Tensor.asinh_", "torch.Tensor.atan", "torch.Tensor.atan2", "torch.Tensor.atan2_", "torch.Tensor.atan_", "torch.Tensor.atanh", "torch.Tensor.atanh_", "torch.Tensor.backward", "torch.Tensor.baddbmm", "torch.Tensor.baddbmm_", "torch.Tensor.bernoulli", "torch.Tensor.bernoulli_", "torch.Tensor.bfloat16", "torch.Tensor.bincount", "torch.Tensor.bitwise_and", "torch.Tensor.bitwise_and_", "torch.Tensor.bitwise_left_shift", "torch.Tensor.bitwise_left_shift_", "torch.Tensor.bitwise_not", "torch.Tensor.bitwise_not_", "torch.Tensor.bitwise_or", "torch.Tensor.bitwise_or_", "torch.Tensor.bitwise_right_shift", "torch.Tensor.bitwise_right_shift_", "torch.Tensor.bitwise_xor", "torch.Tensor.bitwise_xor_", "torch.Tensor.bmm", "torch.Tensor.bool", "torch.Tensor.broadcast_to", "torch.Tensor.byte", "torch.Tensor.cauchy_", "torch.Tensor.ccol_indices", "torch.Tensor.cdouble", "torch.Tensor.ceil", "torch.Tensor.ceil_", "torch.Tensor.cfloat", "torch.Tensor.chalf", "torch.Tensor.char", "torch.Tensor.cholesky", "torch.Tensor.cholesky_inverse", "torch.Tensor.cholesky_solve", "torch.Tensor.chunk", "torch.Tensor.clamp", "torch.Tensor.clamp_", "torch.Tensor.clip", "torch.Tensor.clip_", "torch.Tensor.clone", "torch.Tensor.coalesce", "torch.Tensor.col_indices", "torch.Tensor.conj", "torch.Tensor.conj_physical", "torch.Tensor.conj_physical_", "torch.Tensor.contiguous", "torch.Tensor.copy_", "torch.Tensor.copysign", "torch.Tensor.copysign_", "torch.Tensor.corrcoef", "torch.Tensor.cos", "torch.Tensor.cos_", "torch.Tensor.cosh", "torch.Tensor.cosh_", "torch.Tensor.count_nonzero", "torch.Tensor.cov", "torch.Tensor.cpu", "torch.Tensor.cross", "torch.Tensor.crow_indices", "torch.Tensor.cuda", "torch.Tensor.cummax", "torch.Tensor.cummin", "torch.Tensor.cumprod", "torch.Tensor.cumprod_", "torch.Tensor.cumsum", "torch.Tensor.cumsum_", "torch.Tensor.data_ptr", "torch.Tensor.deg2rad", "torch.Tensor.dense_dim", "torch.Tensor.dequantize", "torch.Tensor.det", "torch.Tensor.detach", "torch.Tensor.detach_", "torch.Tensor.device", "torch.Tensor.diag", "torch.Tensor.diag_embed", "torch.Tensor.diagflat", "torch.Tensor.diagonal", "torch.Tensor.diagonal_scatter", "torch.Tensor.diff", "torch.Tensor.digamma", "torch.Tensor.digamma_", "torch.Tensor.dim", "torch.Tensor.dim_order", "torch.Tensor.dist", "torch.Tensor.div", "torch.Tensor.div_", "torch.Tensor.divide", "torch.Tensor.divide_", "torch.Tensor.dot", "torch.Tensor.double", "torch.Tensor.dsplit", "torch.Tensor.element_size", "torch.Tensor.eq", "torch.Tensor.eq_", "torch.Tensor.equal", "torch.Tensor.erf", "torch.Tensor.erf_", "torch.Tensor.erfc", "torch.Tensor.erfc_", "torch.Tensor.erfinv", "torch.Tensor.erfinv_", "torch.Tensor.exp", "torch.Tensor.exp_", "torch.Tensor.expand", "torch.Tensor.expand_as", "torch.Tensor.expm1", "torch.Tensor.expm1_", "torch.Tensor.exponential_", "torch.Tensor.fill_", "torch.Tensor.fill_diagonal_", "torch.Tensor.fix", "torch.Tensor.fix_", "torch.Tensor.flatten", "torch.Tensor.flip", "torch.Tensor.fliplr", "torch.Tensor.flipud", "torch.Tensor.float", "torch.Tensor.float_power", "torch.Tensor.float_power_", "torch.Tensor.floor", "torch.Tensor.floor_", "torch.Tensor.floor_divide", "torch.Tensor.floor_divide_", "torch.Tensor.fmax", "torch.Tensor.fmin", "torch.Tensor.fmod", "torch.Tensor.fmod_", "torch.Tensor.frac", "torch.Tensor.frac_", "torch.Tensor.frexp", "torch.Tensor.gather", "torch.Tensor.gcd", "torch.Tensor.gcd_", "torch.Tensor.ge", "torch.Tensor.ge_", "torch.Tensor.geometric_", "torch.Tensor.geqrf", "torch.Tensor.ger", "torch.Tensor.get_device", "torch.Tensor.grad", "torch.Tensor.greater", "torch.Tensor.greater_", "torch.Tensor.greater_equal", "torch.Tensor.greater_equal_", "torch.Tensor.gt", "torch.Tensor.gt_", "torch.Tensor.half", "torch.Tensor.hardshrink", "torch.Tensor.heaviside", "torch.Tensor.histc", "torch.Tensor.histogram", "torch.Tensor.hsplit", "torch.Tensor.hypot", "torch.Tensor.hypot_", "torch.Tensor.i0", "torch.Tensor.i0_", "torch.Tensor.igamma", "torch.Tensor.igamma_", "torch.Tensor.igammac", "torch.Tensor.igammac_", "torch.Tensor.imag", "torch.Tensor.index_add", "torch.Tensor.index_add_", "torch.Tensor.index_copy", "torch.Tensor.index_copy_", "torch.Tensor.index_fill", "torch.Tensor.index_fill_", "torch.Tensor.index_put", "torch.Tensor.index_put_", "torch.Tensor.index_reduce", "torch.Tensor.index_reduce_", "torch.Tensor.index_select", "torch.Tensor.indices", "torch.Tensor.inner", "torch.Tensor.int", "torch.Tensor.int_repr", "torch.Tensor.inverse", "torch.Tensor.is_coalesced", "torch.Tensor.is_complex", "torch.Tensor.is_conj", "torch.Tensor.is_contiguous", "torch.Tensor.is_cuda", "torch.Tensor.is_floating_point", "torch.Tensor.is_inference", "torch.Tensor.is_leaf", "torch.Tensor.is_meta", "torch.Tensor.is_pinned", "torch.Tensor.is_quantized", "torch.Tensor.is_set_to", "torch.Tensor.is_shared", "torch.Tensor.is_signed", "torch.Tensor.is_sparse", "torch.Tensor.is_sparse_csr", "torch.Tensor.isclose", "torch.Tensor.isfinite", "torch.Tensor.isinf", "torch.Tensor.isnan", "torch.Tensor.isneginf", "torch.Tensor.isposinf", "torch.Tensor.isreal", "torch.Tensor.istft", "torch.Tensor.item", "torch.Tensor.itemsize", "torch.Tensor.kthvalue", "torch.Tensor.lcm", "torch.Tensor.lcm_", "torch.Tensor.ldexp", "torch.Tensor.ldexp_", "torch.Tensor.le", "torch.Tensor.le_", "torch.Tensor.lerp", "torch.Tensor.lerp_", "torch.Tensor.less", "torch.Tensor.less_", "torch.Tensor.less_equal", "torch.Tensor.less_equal_", "torch.Tensor.lgamma", "torch.Tensor.lgamma_", "torch.Tensor.log", "torch.Tensor.log10", "torch.Tensor.log10_", "torch.Tensor.log1p", "torch.Tensor.log1p_", "torch.Tensor.log2", "torch.Tensor.log2_", "torch.Tensor.log_", "torch.Tensor.log_normal_", "torch.Tensor.logaddexp", "torch.Tensor.logaddexp2", "torch.Tensor.logcumsumexp", "torch.Tensor.logdet", "torch.Tensor.logical_and", "torch.Tensor.logical_and_", "torch.Tensor.logical_not", "torch.Tensor.logical_not_", "torch.Tensor.logical_or", "torch.Tensor.logical_or_", "torch.Tensor.logical_xor", "torch.Tensor.logical_xor_", "torch.Tensor.logit", "torch.Tensor.logit_", "torch.Tensor.logsumexp", "torch.Tensor.long", "torch.Tensor.lt", "torch.Tensor.lt_", "torch.Tensor.lu", "torch.Tensor.lu_solve", "torch.Tensor.map_", "torch.Tensor.masked_fill", "torch.Tensor.masked_fill_", "torch.Tensor.masked_scatter", "torch.Tensor.masked_scatter_", "torch.Tensor.masked_select", "torch.Tensor.matmul", "torch.Tensor.matrix_exp", "torch.Tensor.matrix_power", "torch.Tensor.max", "torch.Tensor.maximum", "torch.Tensor.mean", "torch.Tensor.median", "torch.Tensor.min", "torch.Tensor.minimum", "torch.Tensor.mm", "torch.Tensor.mode", "torch.Tensor.module_load", "torch.Tensor.moveaxis", "torch.Tensor.movedim", "torch.Tensor.msort", "torch.Tensor.mul", "torch.Tensor.mul_", "torch.Tensor.multinomial", "torch.Tensor.multiply", "torch.Tensor.multiply_", "torch.Tensor.mv", "torch.Tensor.mvlgamma", "torch.Tensor.mvlgamma_", "torch.Tensor.nan_to_num", "torch.Tensor.nan_to_num_", "torch.Tensor.nanmean", "torch.Tensor.nanmedian", "torch.Tensor.nanquantile", "torch.Tensor.nansum", "torch.Tensor.narrow", "torch.Tensor.narrow_copy", "torch.Tensor.nbytes", "torch.Tensor.ndim", "torch.Tensor.ndimension", "torch.Tensor.ne", "torch.Tensor.ne_", "torch.Tensor.neg", "torch.Tensor.neg_", "torch.Tensor.negative", "torch.Tensor.negative_", "torch.Tensor.nelement", "torch.Tensor.new_empty", "torch.Tensor.new_full", "torch.Tensor.new_ones", "torch.Tensor.new_tensor", "torch.Tensor.new_zeros", "torch.Tensor.nextafter", "torch.Tensor.nextafter_", "torch.Tensor.nonzero", "torch.Tensor.norm", "torch.Tensor.normal_", "torch.Tensor.not_equal", "torch.Tensor.not_equal_", "torch.Tensor.numel", "torch.Tensor.numpy", "torch.Tensor.orgqr", "torch.Tensor.ormqr", "torch.Tensor.outer", "torch.Tensor.permute", "torch.Tensor.pin_memory", "torch.Tensor.pinverse", "torch.Tensor.polygamma", "torch.Tensor.polygamma_", "torch.Tensor.positive", "torch.Tensor.pow", "torch.Tensor.pow_", "torch.Tensor.prod", "torch.Tensor.put_", "torch.Tensor.q_per_channel_axis", "torch.Tensor.q_per_channel_scales", "torch.Tensor.q_per_channel_zero_points", "torch.Tensor.q_scale", "torch.Tensor.q_zero_point", "torch.Tensor.qr", "torch.Tensor.qscheme", "torch.Tensor.quantile", "torch.Tensor.rad2deg", "torch.Tensor.random_", "torch.Tensor.ravel", "torch.Tensor.real", "torch.Tensor.reciprocal", "torch.Tensor.reciprocal_", "torch.Tensor.record_stream", "torch.Tensor.register_hook", "torch.Tensor.register_post_accumulate_grad_hook", "torch.Tensor.remainder", "torch.Tensor.remainder_", "torch.Tensor.renorm", "torch.Tensor.renorm_", "torch.Tensor.repeat", "torch.Tensor.repeat_interleave", "torch.Tensor.requires_grad", "torch.Tensor.requires_grad_", "torch.Tensor.reshape", "torch.Tensor.reshape_as", "torch.Tensor.resize_", "torch.Tensor.resize_as_", "torch.Tensor.resolve_conj", "torch.Tensor.resolve_neg", "torch.Tensor.retain_grad", "torch.Tensor.retains_grad", "torch.Tensor.roll", "torch.Tensor.rot90", "torch.Tensor.round", "torch.Tensor.round_", "torch.Tensor.row_indices", "torch.Tensor.rsqrt", "torch.Tensor.rsqrt_", "torch.Tensor.scatter", "torch.Tensor.scatter_", "torch.Tensor.scatter_add", "torch.Tensor.scatter_add_", "torch.Tensor.scatter_reduce", "torch.Tensor.scatter_reduce_", "torch.Tensor.select", "torch.Tensor.select_scatter", "torch.Tensor.set_", "torch.Tensor.sgn", "torch.Tensor.sgn_", "torch.Tensor.shape", "torch.Tensor.share_memory_", "torch.Tensor.short", "torch.Tensor.sigmoid", "torch.Tensor.sigmoid_", "torch.Tensor.sign", "torch.Tensor.sign_", "torch.Tensor.signbit", "torch.Tensor.sin", "torch.Tensor.sin_", "torch.Tensor.sinc", "torch.Tensor.sinc_", "torch.Tensor.sinh", "torch.Tensor.sinh_", "torch.Tensor.size", "torch.Tensor.slice_scatter", "torch.Tensor.slogdet", "torch.Tensor.smm", "torch.Tensor.softmax", "torch.Tensor.sort", "torch.Tensor.sparse_dim", "torch.Tensor.sparse_mask", "torch.Tensor.sparse_resize_", "torch.Tensor.sparse_resize_and_clear_", "torch.Tensor.split", "torch.Tensor.sqrt", "torch.Tensor.sqrt_", "torch.Tensor.square", "torch.Tensor.square_", "torch.Tensor.squeeze", "torch.Tensor.squeeze_", "torch.Tensor.sspaddmm", "torch.Tensor.std", "torch.Tensor.stft", "torch.Tensor.storage", "torch.Tensor.storage_offset", "torch.Tensor.storage_type", "torch.Tensor.stride", "torch.Tensor.sub", "torch.Tensor.sub_", "torch.Tensor.subtract", "torch.Tensor.subtract_", "torch.Tensor.sum", "torch.Tensor.sum_to_size", "torch.Tensor.svd", "torch.Tensor.swapaxes", "torch.Tensor.swapdims", "torch.Tensor.t", "torch.Tensor.t_", "torch.Tensor.take", "torch.Tensor.take_along_dim", "torch.Tensor.tan", "torch.Tensor.tan_", "torch.Tensor.tanh", "torch.Tensor.tanh_", "torch.Tensor.tensor_split", "torch.Tensor.tile", "torch.Tensor.to", "torch.Tensor.to_dense", "torch.Tensor.to_mkldnn", "torch.Tensor.to_sparse", "torch.Tensor.to_sparse_bsc", "torch.Tensor.to_sparse_bsr", "torch.Tensor.to_sparse_coo", "torch.Tensor.to_sparse_csc", "torch.Tensor.to_sparse_csr", "torch.Tensor.tolist", "torch.Tensor.topk", "torch.Tensor.trace", "torch.Tensor.transpose", "torch.Tensor.transpose_", "torch.Tensor.triangular_solve", "torch.Tensor.tril", "torch.Tensor.tril_", "torch.Tensor.triu", "torch.Tensor.triu_", "torch.Tensor.true_divide", "torch.Tensor.true_divide_", "torch.Tensor.trunc", "torch.Tensor.trunc_", "torch.Tensor.type", "torch.Tensor.type_as", "torch.Tensor.unbind", "torch.Tensor.unflatten", "torch.Tensor.unfold", "torch.Tensor.uniform_", "torch.Tensor.unique", "torch.Tensor.unique_consecutive", "torch.Tensor.unsqueeze", "torch.Tensor.unsqueeze_", "torch.Tensor.untyped_storage", "torch.Tensor.values", "torch.Tensor.var", "torch.Tensor.vdot", "torch.Tensor.view", "torch.Tensor.view_as", "torch.Tensor.vsplit", "torch.Tensor.where", "torch.Tensor.xlogy", "torch.Tensor.xlogy_", "torch.Tensor.zero_", "torch._assert", "torch._foreach_abs", "torch._foreach_abs_", "torch._foreach_acos", "torch._foreach_acos_", "torch._foreach_asin", "torch._foreach_asin_", "torch._foreach_atan", "torch._foreach_atan_", "torch._foreach_ceil", "torch._foreach_ceil_", "torch._foreach_cos", "torch._foreach_cos_", "torch._foreach_cosh", "torch._foreach_cosh_", "torch._foreach_erf", "torch._foreach_erf_", "torch._foreach_erfc", "torch._foreach_erfc_", "torch._foreach_exp", "torch._foreach_exp_", "torch._foreach_expm1", "torch._foreach_expm1_", "torch._foreach_floor", "torch._foreach_floor_", "torch._foreach_frac", "torch._foreach_frac_", "torch._foreach_lgamma", "torch._foreach_lgamma_", "torch._foreach_log", "torch._foreach_log10", "torch._foreach_log10_", "torch._foreach_log1p", "torch._foreach_log1p_", "torch._foreach_log2", "torch._foreach_log2_", "torch._foreach_log_", "torch._foreach_neg", "torch._foreach_neg_", "torch._foreach_reciprocal", "torch._foreach_reciprocal_", "torch._foreach_round", "torch._foreach_round_", "torch._foreach_sigmoid", "torch._foreach_sigmoid_", "torch._foreach_sin", "torch._foreach_sin_", "torch._foreach_sinh", "torch._foreach_sinh_", "torch._foreach_sqrt", "torch._foreach_sqrt_", "torch._foreach_tan", "torch._foreach_tan_", "torch._foreach_trunc", "torch._foreach_trunc_", "torch._foreach_zero_", "torch._logging.set_logs", "torch.abs", "torch.absolute", "torch.acos", "torch.acosh", "torch.add", "torch.addbmm", "torch.addcdiv", "torch.addcmul", "torch.addmm", "torch.addmv", "torch.addr", "torch.adjoint", "torch.all", "torch.allclose", "torch.amax", "torch.amin", "torch.aminmax", "torch.angle", "torch.any", "BNReLU2d", "BNReLU3d", "ConvBn1d", "ConvBn2d", "ConvBn3d", "ConvBnReLU1d", "ConvBnReLU2d", "ConvBnReLU3d", "ConvReLU1d", "ConvReLU2d", "ConvReLU3d", "LinearReLU", "ConvBn1d", "ConvBn2d", "ConvBn3d", "ConvBnReLU1d", "ConvBnReLU2d", "ConvBnReLU3d", "ConvReLU2d", "ConvReLU3d", "LinearReLU", "freeze_bn_stats", "update_bn_stats", "BNReLU2d", "BNReLU3d", "ConvReLU1d", "ConvReLU2d", "ConvReLU3d", "LinearReLU", "LinearReLU", "Conv2d", "Conv3d", "Linear", "Linear", "LSTM", "MultiheadAttention", "BatchNorm2d", "BatchNorm3d", "Conv1d", "Conv2d", "Conv3d", "ConvTranspose1d", "ConvTranspose2d", "ConvTranspose3d", "ELU", "Embedding", "EmbeddingBag", "FXFloatFunctional", "FloatFunctional", "GroupNorm", "Hardswish", "InstanceNorm1d", "InstanceNorm2d", "InstanceNorm3d", "LayerNorm", "LeakyReLU", "Linear", "QFunctional", "ReLU6", "Sigmoid", "GRU", "GRUCell", "LSTM", "LSTMCell", "Linear", "RNNCell", "adaptive_avg_pool2d", "adaptive_avg_pool3d", "avg_pool2d", "avg_pool3d", "celu", "clamp", "conv1d", "conv2d", "conv3d", "elu", "hardsigmoid", "hardswish", "hardtanh", "interpolate", "leaky_relu", "linear", "max_pool1d", "max_pool2d", "threshold", "upsample", "upsample_bilinear", "upsample_nearest", "DeQuantStub", "QuantStub", "QuantWrapper", "add_quant_dequant", "BackendConfig", "BackendPatternConfig", "DTypeConfig", "DTypeWithConstraints", "ObservationType", "convert", "default_eval_fn", "FakeQuantize", "FakeQuantizeBase", "FixedQParamsFakeQuantize", "FusedMovingAvgObsFakeQuantize", "default_fake_quant", "default_fused_act_fake_quant", "default_fused_per_channel_wt_fake_quant", "default_fused_wt_fake_quant", "default_histogram_fake_quant", "default_per_channel_weight_fake_quant", "default_weight_fake_quant", "disable_fake_quant", "disable_observer", "enable_fake_quant", "enable_observer", "fuse_modules", "ConvertCustomConfig", "FuseCustomConfig", "PrepareCustomConfig", "StandaloneModuleConfigEntry", "HistogramObserver", "MinMaxObserver", "MovingAverageMinMaxObserver", "MovingAveragePerChannelMinMaxObserver", "NoopObserver", "ObserverBase", "PerChannelMinMaxObserver", "PlaceholderObserver", "RecordingObserver", "default_debug_observer", "default_dynamic_quant_observer", "default_float_qparams_observer", "default_histogram_observer", "default_observer", "default_per_channel_weight_observer", "default_placeholder_observer", "default_weight_observer", "get_observer_state_dict", "load_observer_state_dict", "prepare", "prepare_qat", "propagate_qconfig", "model_is_exported", "QConfig", "default_activation_only_qconfig", "default_debug_qconfig", "default_dynamic_qconfig", "default_per_channel_qconfig", "default_qat_qconfig", "default_qat_qconfig_v2", "default_qconfig", "default_weight_only_qconfig", "float16_dynamic_qconfig", "float16_static_qconfig", "float_qparams_weight_only_qconfig", "per_channel_dynamic_qconfig", "QConfigMapping", "get_default_qat_qconfig_mapping", "get_default_qconfig_mapping", "quantize", "quantize_dynamic", "convert_fx", "fuse_fx", "prepare_fx", "prepare_qat_fx", "quantize_qat", "swap_module", "torch.arange", "torch.arccos", "torch.arccosh", "torch.arcsin", "torch.arcsinh", "torch.arctan", "torch.arctan2", "torch.arctanh", "torch.are_deterministic_algorithms_enabled", "torch.argmax", "torch.argmin", "torch.argsort", "torch.argwhere", "torch.as_strided", "torch.as_tensor", "torch.asarray", "torch.asin", "torch.asinh", "torch.atan", "torch.atan2", "torch.atanh", "torch.atleast_1d", "torch.atleast_2d", "torch.atleast_3d", "torch.autograd.Function.backward", "torch.autograd.Function.forward", "torch.autograd.Function.jvp", "torch.autograd.Function.vmap", "torch.autograd.backward", "UnpackedDualTensor", "dual_level", "torch.autograd.forward_ad.enter_dual_level", "torch.autograd.forward_ad.exit_dual_level", "torch.autograd.forward_ad.make_dual", "torch.autograd.forward_ad.unpack_dual", "BackwardCFunction", "torch.autograd.function.FunctionCtx.mark_dirty", "torch.autograd.function.FunctionCtx.mark_non_differentiable", "torch.autograd.function.FunctionCtx.save_for_backward", "torch.autograd.function.FunctionCtx.set_materialize_grads", "InplaceFunction", "NestedIOFunction", "torch.autograd.function.once_differentiable", "torch.autograd.functional.hessian", "torch.autograd.functional.hvp", "torch.autograd.functional.jacobian", "torch.autograd.functional.jvp", "torch.autograd.functional.vhp", "torch.autograd.functional.vjp", "torch.autograd.grad", "inference_mode", "set_grad_enabled", "set_multithreading_enabled", "torch.autograd.gradcheck.GradcheckError", "torch.autograd.gradcheck.gradcheck", "torch.autograd.gradcheck.gradgradcheck", "torch.autograd.graph.Node.metadata", "torch.autograd.graph.Node.name", "torch.autograd.graph.Node.next_functions", "torch.autograd.graph.Node.register_hook", "torch.autograd.graph.Node.register_prehook", "torch.autograd.graph.increment_version", "EnforceUnique", "KinetoStepTracker", "torch.autograd.profiler.load_nvprof", "torch.autograd.profiler.parse_nvprof_trace", "torch.autograd.profiler.profile.export_chrome_trace", "torch.autograd.profiler.profile.key_averages", "torch.autograd.profiler.profile.self_cpu_time_total", "torch.autograd.profiler.profile.total_average", "record_function", "Interval", "Kernel", "MemRecordsAcc", "StringTable", "torch.baddbmm", "torch.bartlett_window", "torch.bernoulli", "torch.bincount", "torch.bitwise_and", "torch.bitwise_left_shift", "torch.bitwise_not", "torch.bitwise_or", "torch.bitwise_right_shift", "torch.bitwise_xor", "torch.blackman_window", "torch.block_diag", "torch.bmm", "torch.broadcast_shapes", "torch.broadcast_tensors", "torch.broadcast_to", "torch.bucketize", "torch.can_cast", "torch.cartesian_prod", "torch.cat", "torch.cdist", "torch.ceil", "torch.chain_matmul", "torch.cholesky", "torch.cholesky_inverse", "torch.cholesky_solve", "torch.chunk", "torch.clamp", "torch.clip", "torch.clone", "torch.column_stack", "torch.combinations", "torch.compile", "torch.compiled_with_cxx11_abi", "torch.compiler.allow_in_graph", "torch.compiler.assume_constant_result", "torch.compiler.compile", "torch.compiler.cudagraph_mark_step_begin", "torch.compiler.disable", "torch.compiler.is_compiling", "torch.compiler.is_dynamo_compiling", "torch.compiler.list_backends", "torch.compiler.reset", "torch.complex", "torch.concat", "torch.concatenate", "torch.cond", "torch.conj", "torch.conj_physical", "torch.copysign", "torch.corrcoef", "torch.cos", "torch.cosh", "torch.count_nonzero", "torch.cov", "Stream", "StreamContext", "torch.cpu.current_device", "torch.cpu.current_stream", "torch.cpu.device_count", "torch.cpu.is_available", "torch.cpu.set_device", "torch.cpu.stream", "torch.cpu.synchronize", "torch.cross", "CUDAGraph", "CUDAPluggableAllocator", "Event", "ExternalStream", "torch.cuda.OutOfMemoryError", "Stream", "StreamContext", "torch.cuda.caching_allocator_alloc", "torch.cuda.caching_allocator_delete", "torch.cuda.can_device_access_peer", "torch.cuda.change_current_allocator", "torch.cuda.clock_rate", "torch.cuda.comm.broadcast", "torch.cuda.comm.broadcast_coalesced", "torch.cuda.comm.gather", "torch.cuda.comm.reduce_add", "torch.cuda.comm.scatter", "torch.cuda.current_blas_handle", "torch.cuda.current_device", "torch.cuda.current_stream", "torch.cuda.default_stream", "device", "torch.cuda.device_count", "device_of", "torch.cuda.empty_cache", "torch.cuda.get_allocator_backend", "torch.cuda.get_arch_list", "torch.cuda.get_device_capability", "torch.cuda.get_device_name", "torch.cuda.get_device_properties", "torch.cuda.get_gencode_flags", "torch.cuda.get_rng_state", "torch.cuda.get_rng_state_all", "torch.cuda.get_sync_debug_mode", "graph", "torch.cuda.graph_pool_handle", "torch.cuda.init", "torch.cuda.initial_seed", "torch.cuda.ipc_collect", "torch.cuda.is_available", "torch.cuda.is_current_stream_capturing", "torch.cuda.is_initialized", "torch.cuda.jiterator._create_jit_fn", "torch.cuda.jiterator._create_multi_output_jit_fn", "torch.cuda.list_gpu_processes", "torch.cuda.make_graphed_callables", "torch.cuda.manual_seed", "torch.cuda.manual_seed_all", "torch.cuda.max_memory_allocated", "torch.cuda.max_memory_cached", "torch.cuda.max_memory_reserved", "torch.cuda.mem_get_info", "torch.cuda.memory_allocated", "torch.cuda.memory_cached", "torch.cuda.memory_reserved", "torch.cuda.memory_snapshot", "torch.cuda.memory_stats", "torch.cuda.memory_summary", "torch.cuda.memory_usage", "torch.cuda.nvtx.mark", "torch.cuda.nvtx.range", "torch.cuda.nvtx.range_pop", "torch.cuda.nvtx.range_push", "torch.cuda.power_draw", "torch.cuda.reset_max_memory_allocated", "torch.cuda.reset_max_memory_cached", "torch.cuda.reset_peak_memory_stats", "torch.cuda.seed", "torch.cuda.seed_all", "torch.cuda.set_device", "torch.cuda.set_per_process_memory_fraction", "torch.cuda.set_rng_state", "torch.cuda.set_rng_state_all", "torch.cuda.set_stream", "torch.cuda.set_sync_debug_mode", "torch.cuda.stream", "torch.cuda.synchronize", "torch.cuda.temperature", "torch.cuda.utilization", "torch.cummax", "torch.cummin", "torch.cumprod", "torch.cumsum", "torch.cumulative_trapezoid", "torch.deg2rad", "torch.dequantize", "torch.det", "torch.diag", "torch.diag_embed", "torch.diagflat", "torch.diagonal", "torch.diagonal_scatter", "torch.diff", "torch.digamma", "torch.dist", "torch.div", "torch.divide", "torch.dot", "torch.dsplit", "torch.dstack", "torch.einsum", "torch.empty", "torch.empty_like", "torch.empty_strided", "enable_grad", "torch.eq", "torch.equal", "torch.erf", "torch.erfc", "torch.erfinv", "torch.exp", "torch.exp2", "torch.expm1", "torch.eye", "torch.fake_quantize_per_channel_affine", "torch.fake_quantize_per_tensor_affine", "torch.fft.fft", "torch.fft.fft2", "torch.fft.fftfreq", "torch.fft.fftn", "torch.fft.fftshift", "torch.fft.hfft", "torch.fft.hfft2", "torch.fft.hfftn", "torch.fft.ifft", "torch.fft.ifft2", "torch.fft.ifftn", "torch.fft.ifftshift", "torch.fft.ihfft", "torch.fft.ihfft2", "torch.fft.ihfftn", "torch.fft.irfft", "torch.fft.irfft2", "torch.fft.irfftn", "torch.fft.rfft", "torch.fft.rfft2", "torch.fft.rfftfreq", "torch.fft.rfftn", "torch.fix", "torch.flatten", "torch.flip", "torch.fliplr", "torch.flipud", "torch.float_power", "torch.floor", "torch.floor_divide", "torch.fmax", "torch.fmin", "torch.fmod", "torch.frac", "torch.frexp", "torch.from_dlpack", "torch.from_file", "torch.from_numpy", "torch.frombuffer", "torch.full", "torch.full_like", "torch.func.functional_call", "torch.func.functionalize", "torch.func.grad", "torch.func.grad_and_value", "torch.func.hessian", "torch.func.jacfwd", "torch.func.jacrev", "torch.func.jvp", "torch.func.linearize", "torch.func.replace_all_batch_norm_modules_", "torch.func.stack_module_state", "torch.func.vjp", "torch.func.vmap", "CallMethodKey", "ConvertIntKey", "DimConstraints", "DimDynamic", "DivideByKey", "EqualityConstraint", "InnerTensorKey", "PropagateUnbackedSymInts", "RelaxedUnspecConstraint", "ShapeEnv", "ShapeEnvSettings", "StatefulSymbolicContext", "StatelessSymbolicContext", "StrictMinMaxConstraint", "SubclassSymbolicContext", "SymbolicContext", "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr", "torch.fx.experimental.symbolic_shapes.check_consistent", "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings", "torch.fx.experimental.symbolic_shapes.constrain_range", "torch.fx.experimental.symbolic_shapes.constrain_unify", "torch.fx.experimental.symbolic_shapes.definitely_false", "torch.fx.experimental.symbolic_shapes.definitely_true", "torch.fx.experimental.symbolic_shapes.guard_size_oblivious", "torch.fx.experimental.symbolic_shapes.has_free_symbols", "torch.fx.experimental.symbolic_shapes.hint_int", "torch.fx.experimental.symbolic_shapes.is_concrete_bool", "torch.fx.experimental.symbolic_shapes.is_concrete_int", "torch.fx.experimental.symbolic_shapes.lru_cache", "torch.fx.experimental.symbolic_shapes.parallel_and", "torch.fx.experimental.symbolic_shapes.parallel_or", "torch.fx.experimental.symbolic_shapes.rebind_unbacked", "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings", "torch.fx.experimental.symbolic_shapes.statically_known_true", "torch.fx.experimental.symbolic_shapes.sym_eq", "torch.gather", "torch.gcd", "torch.ge", "torch.geqrf", "torch.ger", "torch.get_default_device", "torch.get_default_dtype", "torch.get_deterministic_debug_mode", "torch.get_device_module", "torch.get_float32_matmul_precision", "torch.get_num_interop_threads", "torch.get_num_threads", "torch.get_rng_state", "torch.gradient", "torch.greater", "torch.greater_equal", "torch.gt", "torch.hamming_window", "torch.hann_window", "torch.heaviside", "torch.histc", "torch.histogram", "torch.histogramdd", "torch.hsplit", "torch.hspmm", "torch.hstack", "torch.hypot", "torch.i0", "torch.igamma", "torch.igammac", "torch.imag", "torch.index_add", "torch.index_copy", "torch.index_reduce", "torch.index_select", "torch.initial_seed", "torch.inner", "torch.inverse", "torch.is_complex", "torch.is_conj", "torch.is_deterministic_algorithms_warn_only_enabled", "torch.is_floating_point", "torch.is_grad_enabled", "torch.is_inference_mode_enabled", "torch.is_nonzero", "torch.is_storage", "torch.is_tensor", "torch.is_warn_always_enabled", "torch.isclose", "torch.isfinite", "torch.isin", "torch.isinf", "torch.isnan", "torch.isneginf", "torch.isposinf", "torch.isreal", "torch.istft", "Attribute", "ScriptFunction", "ScriptModule", "torch.jit.annotate", "torch.jit.enable_onednn_fusion", "torch.jit.fork", "torch.jit.freeze", "torch.jit.ignore", "torch.jit.interface", "torch.jit.isinstance", "torch.jit.load", "torch.jit.onednn_fusion_enabled", "torch.jit.optimize_for_inference", "torch.jit.save", "torch.jit.script", "torch.jit.script_if_tracing", "torch.jit.set_fusion_strategy", "strict_fusion", "torch.jit.trace", "torch.jit.trace_module", "torch.jit.unused", "torch.jit.wait", "torch.kaiser_window", "torch.kron", "torch.kthvalue", "torch.lcm", "torch.ldexp", "torch.le", "torch.lerp", "torch.less", "torch.less_equal", "torch.lgamma", "torch.linalg.cholesky", "torch.linalg.cholesky_ex", "torch.linalg.cond", "torch.linalg.cross", "torch.linalg.det", "torch.linalg.diagonal", "torch.linalg.eig", "torch.linalg.eigh", "torch.linalg.eigvals", "torch.linalg.eigvalsh", "torch.linalg.householder_product", "torch.linalg.inv", "torch.linalg.inv_ex", "torch.linalg.ldl_factor", "torch.linalg.ldl_factor_ex", "torch.linalg.ldl_solve", "torch.linalg.lstsq", "torch.linalg.lu", "torch.linalg.lu_factor", "torch.linalg.lu_factor_ex", "torch.linalg.lu_solve", "torch.linalg.matmul", "torch.linalg.matrix_exp", "torch.linalg.matrix_norm", "torch.linalg.matrix_power", "torch.linalg.matrix_rank", "torch.linalg.multi_dot", "torch.linalg.norm", "torch.linalg.pinv", "torch.linalg.qr", "torch.linalg.slogdet", "torch.linalg.solve", "torch.linalg.solve_ex", "torch.linalg.solve_triangular", "torch.linalg.svd", "torch.linalg.svdvals", "torch.linalg.tensorinv", "torch.linalg.tensorsolve", "torch.linalg.vander", "torch.linalg.vecdot", "torch.linalg.vector_norm", "torch.linspace", "torch.load", "torch.lobpcg", "torch.log", "torch.log10", "torch.log1p", "torch.log2", "torch.logaddexp", "torch.logaddexp2", "torch.logcumsumexp", "torch.logdet", "torch.logical_and", "torch.logical_not", "torch.logical_or", "torch.logical_xor", "torch.logit", "torch.logspace", "torch.logsumexp", "torch.lt", "torch.lu", "torch.lu_solve", "torch.lu_unpack", "torch.manual_seed", "torch.masked_select", "torch.matmul", "torch.matrix_exp", "torch.matrix_power", "torch.max", "torch.maximum", "torch.mean", "torch.median", "torch.meshgrid", "torch.min", "torch.minimum", "torch.mm", "torch.mode", "torch.moveaxis", "torch.movedim", "torch.mps.current_allocated_memory", "torch.mps.device_count", "torch.mps.driver_allocated_memory", "torch.mps.empty_cache", "Event", "torch.mps.get_rng_state", "torch.mps.manual_seed", "torch.mps.profiler.profile", "torch.mps.profiler.start", "torch.mps.profiler.stop", "torch.mps.seed", "torch.mps.set_per_process_memory_fraction", "torch.mps.set_rng_state", "torch.mps.synchronize", "torch.msort", "torch.mtia.DeferredMtiaCallError", "Event", "Stream", "StreamContext", "torch.mtia.current_device", "torch.mtia.current_stream", "torch.mtia.default_stream", "device", "torch.mtia.device_count", "torch.mtia.init", "torch.mtia.is_available", "torch.mtia.is_initialized", "torch.mtia.set_stream", "torch.mtia.stream", "torch.mtia.synchronize", "torch.mul", "torch.multinomial", "torch.multiply", "torch.mv", "torch.mvlgamma", "torch.nan_to_num", "torch.nanmean", "torch.nanmedian", "torch.nanquantile", "torch.nansum", "torch.narrow", "torch.narrow_copy", "torch.ne", "torch.neg", "torch.negative", "torch.nextafter", "AdaptiveAvgPool1d", "AdaptiveAvgPool2d", "AdaptiveAvgPool3d", "AdaptiveLogSoftmaxWithLoss", "AdaptiveMaxPool1d", "AdaptiveMaxPool2d", "AdaptiveMaxPool3d", "AlphaDropout", "AvgPool1d", "AvgPool2d", "AvgPool3d", "BCELoss", "BCEWithLogitsLoss", "BatchNorm1d", "BatchNorm2d", "BatchNorm3d", "Bilinear", "CELU", "CTCLoss", "ChannelShuffle", "CircularPad1d", "CircularPad2d", "CircularPad3d", "ConstantPad1d", "ConstantPad2d", "ConstantPad3d", "Conv1d", "Conv2d", "Conv3d", "ConvTranspose1d", "ConvTranspose2d", "ConvTranspose3d", "CosineEmbeddingLoss", "CosineSimilarity", "CrossEntropyLoss", "DataParallel", "Dropout", "Dropout1d", "Dropout2d", "Dropout3d", "ELU", "Embedding", "EmbeddingBag", "FeatureAlphaDropout", "Flatten", "Fold", "FractionalMaxPool2d", "FractionalMaxPool3d", "GELU", "GLU", "GRU", "GRUCell", "GaussianNLLLoss", "GroupNorm", "Hardshrink", "Hardsigmoid", "Hardswish", "Hardtanh", "HingeEmbeddingLoss", "HuberLoss", "Identity", "InstanceNorm1d", "InstanceNorm2d", "InstanceNorm3d", "KLDivLoss", "L1Loss", "LPPool1d", "LPPool2d", "LPPool3d", "LSTM", "LSTMCell", "LayerNorm", "LazyBatchNorm1d", "LazyBatchNorm2d", "LazyBatchNorm3d", "LazyConv1d", "LazyConv2d", "LazyConv3d", "LazyConvTranspose1d", "LazyConvTranspose2d", "LazyConvTranspose3d", "LazyInstanceNorm1d", "LazyInstanceNorm2d", "LazyInstanceNorm3d", "LazyLinear", "LeakyReLU", "Linear", "LocalResponseNorm", "LogSigmoid", "LogSoftmax", "MSELoss", "MarginRankingLoss", "MaxPool1d", "MaxPool2d", "MaxPool3d", "MaxUnpool1d", "MaxUnpool2d", "MaxUnpool3d", "Mish", "Module", "ModuleDict", "ModuleList", "MultiLabelMarginLoss", "MultiLabelSoftMarginLoss", "MultiMarginLoss", "MultiheadAttention", "NLLLoss", "PReLU", "PairwiseDistance", "ParameterDict", "ParameterList", "PixelShuffle", "PixelUnshuffle", "PoissonNLLLoss", "RMSNorm", "RNN", "RNNBase", "RNNCell", "RReLU", "ReLU", "ReLU6", "ReflectionPad1d", "ReflectionPad2d", "ReflectionPad3d", "ReplicationPad1d", "ReplicationPad2d", "ReplicationPad3d", "SELU", "Sequential", "SiLU", "Sigmoid", "SmoothL1Loss", "SoftMarginLoss", "Softmax", "Softmax2d", "Softmin", "Softplus", "Softshrink", "Softsign", "SyncBatchNorm", "Tanh", "Tanhshrink", "Threshold", "Transformer", "TransformerDecoder", "TransformerDecoderLayer", "TransformerEncoder", "TransformerEncoderLayer", "TripletMarginLoss", "TripletMarginWithDistanceLoss", "Unflatten", "Unfold", "Upsample", "UpsamplingBilinear2d", "UpsamplingNearest2d", "ZeroPad1d", "ZeroPad2d", "ZeroPad3d", "SDPBackend", "torch.nn.attention.bias.CausalBias", "CausalVariant", "torch.nn.attention.bias.causal_lower_right", "torch.nn.attention.bias.causal_upper_left", "torch.nn.attention.sdpa_kernel", "torch.nn.functional.adaptive_avg_pool1d", "torch.nn.functional.adaptive_avg_pool2d", "torch.nn.functional.adaptive_avg_pool3d", "torch.nn.functional.adaptive_max_pool1d", "torch.nn.functional.adaptive_max_pool2d", "torch.nn.functional.adaptive_max_pool3d", "torch.nn.functional.affine_grid", "torch.nn.functional.alpha_dropout", "torch.nn.functional.avg_pool1d", "torch.nn.functional.avg_pool2d", "torch.nn.functional.avg_pool3d", "torch.nn.functional.batch_norm", "torch.nn.functional.bilinear", "torch.nn.functional.binary_cross_entropy", "torch.nn.functional.binary_cross_entropy_with_logits", "torch.nn.functional.celu", "torch.nn.functional.conv1d", "torch.nn.functional.conv2d", "torch.nn.functional.conv3d", "torch.nn.functional.conv_transpose1d", "torch.nn.functional.conv_transpose2d", "torch.nn.functional.conv_transpose3d", "torch.nn.functional.cosine_embedding_loss", "torch.nn.functional.cosine_similarity", "torch.nn.functional.cross_entropy", "torch.nn.functional.ctc_loss", "torch.nn.functional.dropout", "torch.nn.functional.dropout1d", "torch.nn.functional.dropout2d", "torch.nn.functional.dropout3d", "torch.nn.functional.elu", "torch.nn.functional.elu_", "torch.nn.functional.embedding", "torch.nn.functional.embedding_bag", "torch.nn.functional.feature_alpha_dropout", "torch.nn.functional.fold", "torch.nn.functional.fractional_max_pool2d", "torch.nn.functional.fractional_max_pool3d", "torch.nn.functional.gaussian_nll_loss", "torch.nn.functional.gelu", "torch.nn.functional.glu", "torch.nn.functional.grid_sample", "torch.nn.functional.group_norm", "torch.nn.functional.gumbel_softmax", "torch.nn.functional.hardshrink", "torch.nn.functional.hardsigmoid", "torch.nn.functional.hardswish", "torch.nn.functional.hardtanh", "torch.nn.functional.hardtanh_", "torch.nn.functional.hinge_embedding_loss", "torch.nn.functional.huber_loss", "torch.nn.functional.instance_norm", "torch.nn.functional.interpolate", "torch.nn.functional.kl_div", "torch.nn.functional.l1_loss", "torch.nn.functional.layer_norm", "torch.nn.functional.leaky_relu", "torch.nn.functional.leaky_relu_", "torch.nn.functional.linear", "torch.nn.functional.local_response_norm", "torch.nn.functional.log_softmax", "torch.nn.functional.logsigmoid", "torch.nn.functional.lp_pool1d", "torch.nn.functional.lp_pool2d", "torch.nn.functional.lp_pool3d", "torch.nn.functional.margin_ranking_loss", "torch.nn.functional.max_pool1d", "torch.nn.functional.max_pool2d", "torch.nn.functional.max_pool3d", "torch.nn.functional.max_unpool1d", "torch.nn.functional.max_unpool2d", "torch.nn.functional.max_unpool3d", "torch.nn.functional.mish", "torch.nn.functional.mse_loss", "torch.nn.functional.multi_margin_loss", "torch.nn.functional.multilabel_margin_loss", "torch.nn.functional.multilabel_soft_margin_loss", "torch.nn.functional.nll_loss", "torch.nn.functional.normalize", "torch.nn.functional.one_hot", "torch.nn.functional.pad", "torch.nn.functional.pairwise_distance", "torch.nn.functional.pdist", "torch.nn.functional.pixel_shuffle", "torch.nn.functional.pixel_unshuffle", "torch.nn.functional.poisson_nll_loss", "torch.nn.functional.prelu", "torch.nn.functional.relu", "torch.nn.functional.relu6", "torch.nn.functional.relu_", "torch.nn.functional.rms_norm", "torch.nn.functional.rrelu", "torch.nn.functional.rrelu_", "torch.nn.functional.scaled_dot_product_attention", "torch.nn.functional.selu", "torch.nn.functional.sigmoid", "torch.nn.functional.silu", "torch.nn.functional.smooth_l1_loss", "torch.nn.functional.soft_margin_loss", "torch.nn.functional.softmax", "torch.nn.functional.softmin", "torch.nn.functional.softplus", "torch.nn.functional.softshrink", "torch.nn.functional.softsign", "torch.nn.functional.tanh", "torch.nn.functional.tanhshrink", "torch.nn.functional.threshold", "torch.nn.functional.threshold_", "torch.nn.functional.torch.nn.parallel.data_parallel", "torch.nn.functional.triplet_margin_loss", "torch.nn.functional.triplet_margin_with_distance_loss", "torch.nn.functional.unfold", "torch.nn.functional.upsample", "torch.nn.functional.upsample_bilinear", "torch.nn.functional.upsample_nearest", "LazyModuleMixin", "torch.nn.modules.module.register_module_backward_hook", "torch.nn.modules.module.register_module_buffer_registration_hook", "torch.nn.modules.module.register_module_forward_hook", "torch.nn.modules.module.register_module_forward_pre_hook", "torch.nn.modules.module.register_module_full_backward_hook", "torch.nn.modules.module.register_module_full_backward_pre_hook", "torch.nn.modules.module.register_module_module_registration_hook", "torch.nn.modules.module.register_module_parameter_registration_hook", "RMSNorm", "DistributedDataParallel", "Parameter", "UninitializedBuffer", "UninitializedParameter", "torch.nn.utils.clip_grad_norm", "torch.nn.utils.clip_grad_norm_", "torch.nn.utils.clip_grad_value_", "torch.nn.utils.convert_conv2d_weight_memory_format", "torch.nn.utils.convert_conv3d_weight_memory_format", "torch.nn.utils.fuse_conv_bn_eval", "torch.nn.utils.fuse_conv_bn_weights", "torch.nn.utils.fuse_linear_bn_eval", "torch.nn.utils.fuse_linear_bn_weights", "torch.nn.utils.parameters_to_vector", "torch.nn.utils.parametrizations.orthogonal", "torch.nn.utils.parametrizations.spectral_norm", "torch.nn.utils.parametrizations.weight_norm", "ParametrizationList", "torch.nn.utils.parametrize.cached", "torch.nn.utils.parametrize.is_parametrized", "torch.nn.utils.parametrize.register_parametrization", "torch.nn.utils.parametrize.remove_parametrizations", "BasePruningMethod", "CustomFromMask", "Identity", "L1Unstructured", "LnStructured", "PruningContainer", "RandomStructured", "RandomUnstructured", "torch.nn.utils.prune.custom_from_mask", "torch.nn.utils.prune.global_unstructured", "torch.nn.utils.prune.identity", "torch.nn.utils.prune.is_pruned", "torch.nn.utils.prune.l1_unstructured", "torch.nn.utils.prune.ln_structured", "torch.nn.utils.prune.random_structured", "torch.nn.utils.prune.random_unstructured", "torch.nn.utils.prune.remove", "torch.nn.utils.remove_spectral_norm", "torch.nn.utils.remove_weight_norm", "PackedSequence", "torch.nn.utils.rnn.pack_padded_sequence", "torch.nn.utils.rnn.pack_sequence", "torch.nn.utils.rnn.pad_packed_sequence", "torch.nn.utils.rnn.pad_sequence", "torch.nn.utils.rnn.unpack_sequence", "torch.nn.utils.rnn.unpad_sequence", "torch.nn.utils.skip_init", "torch.nn.utils.spectral_norm", "torch.nn.utils.stateless.functional_call", "torch.nn.utils.vector_to_parameters", "torch.nn.utils.weight_norm", "no_grad", "torch.nonzero", "torch.norm", "torch.normal", "torch.not_equal", "torch.numel", "torch.ones", "torch.ones_like", "JitScalarType", "GraphInfo", "VerificationOptions", "ASGD", "Adadelta", "Adagrad", "Adam", "AdamW", "Adamax", "LBFGS", "NAdam", "torch.optim.Optimizer.add_param_group", "torch.optim.Optimizer.load_state_dict", "torch.optim.Optimizer.state_dict", "torch.optim.Optimizer.step", "torch.optim.Optimizer.zero_grad", "RAdam", "RMSprop", "Rprop", "SGD", "SparseAdam", "ChainedScheduler", "ConstantLR", "CosineAnnealingLR", "CosineAnnealingWarmRestarts", "CyclicLR", "ExponentialLR", "LambdaLR", "LinearLR", "MultiStepLR", "MultiplicativeLR", "OneCycleLR", "PolynomialLR", "ReduceLROnPlateau", "SequentialLR", "StepLR", "torch.orgqr", "torch.ormqr", "torch.outer", "torch.pca_lowrank", "torch.permute", "torch.pinverse", "torch.poisson", "torch.polar", "torch.polygamma", "torch.positive", "torch.pow", "torch.prod", "torch.promote_types", "torch.qr", "torch.quantile", "torch.quantize_per_channel", "torch.quantize_per_tensor", "torch.quantized_batch_norm", "torch.quantized_max_pool1d", "torch.quantized_max_pool2d", "SobolEngine", "torch.rad2deg", "torch.rand", "torch.rand_like", "torch.randint", "torch.randint_like", "torch.randn", "torch.randn_like", "torch.randperm", "torch.range", "torch.ravel", "torch.real", "torch.reciprocal", "torch.remainder", "torch.renorm", "torch.repeat_interleave", "torch.reshape", "torch.resolve_conj", "torch.resolve_neg", "torch.result_type", "torch.roll", "torch.rot90", "torch.round", "torch.row_stack", "torch.rsqrt", "torch.save", "torch.scatter", "torch.scatter_add", "torch.scatter_reduce", "torch.searchsorted", "torch.seed", "torch.select", "torch.select_scatter", "torch.set_default_device", "torch.set_default_dtype", "torch.set_default_tensor_type", "torch.set_deterministic_debug_mode", "torch.set_float32_matmul_precision", "torch.set_flush_denormal", "torch.set_num_interop_threads", "torch.set_num_threads", "torch.set_printoptions", "torch.set_rng_state", "torch.set_warn_always", "torch.sgn", "torch.sigmoid", "torch.sign", "torch.signal.windows.bartlett", "torch.signal.windows.blackman", "torch.signal.windows.cosine", "torch.signal.windows.exponential", "torch.signal.windows.gaussian", "torch.signal.windows.general_cosine", "torch.signal.windows.general_hamming", "torch.signal.windows.hamming", "torch.signal.windows.hann", "torch.signal.windows.kaiser", "torch.signal.windows.nuttall", "torch.signbit", "torch.sin", "torch.sinc", "torch.sinh", "torch.slice_scatter", "torch.slogdet", "torch.smm", "torch.softmax", "torch.sort", "torch.sparse.addmm", "torch.sparse.as_sparse_gradcheck", "check_sparse_tensor_invariants", "torch.sparse.log_softmax", "torch.sparse.mm", "torch.sparse.sampled_addmm", "torch.sparse.softmax", "torch.sparse.spdiags", "torch.sparse.sum", "torch.sparse_bsc_tensor", "torch.sparse_bsr_tensor", "torch.sparse_compressed_tensor", "torch.sparse_coo_tensor", "torch.sparse_csc_tensor", "torch.sparse_csr_tensor", "torch.split", "torch.sqrt", "torch.square", "torch.squeeze", "torch.sspaddmm", "torch.stack", "torch.std", "torch.std_mean", "torch.stft", "torch.sub", "torch.subtract", "torch.sum", "torch.svd", "torch.svd_lowrank", "torch.swapaxes", "torch.swapdims", "torch.sym_float", "torch.sym_int", "torch.sym_ite", "torch.sym_max", "torch.sym_min", "torch.sym_not", "torch.t", "torch.take", "torch.take_along_dim", "torch.tan", "torch.tanh", "torch.tensor", "torch.tensor_split", "torch.tensordot", "torch.tile", "torch.topk", "torch.trace", "torch.transpose", "torch.trapezoid", "torch.trapz", "torch.triangular_solve", "torch.tril", "torch.tril_indices", "torch.triu", "torch.triu_indices", "torch.true_divide", "torch.trunc", "torch.unbind", "torch.unflatten", "torch.unique", "torch.unique_consecutive", "torch.unravel_index", "torch.unsqueeze", "torch.use_deterministic_algorithms", "torch.utils.generate_methods_for_privateuse1_backend", "torch.utils.get_cpp_backtrace", "torch.utils.rename_privateuse1_backend", "torch.utils.set_module", "torch.utils.swap_tensors", "torch.vander", "torch.var", "torch.var_mean", "torch.vdot", "torch.view_as_complex", "torch.view_as_real", "torch.vmap", "torch.vsplit", "torch.vstack", "torch.where", "torch.xlogy", "Event", "Stream", "StreamContext", "torch.xpu.current_device", "torch.xpu.current_stream", "device", "torch.xpu.device_count", "device_of", "torch.xpu.empty_cache", "torch.xpu.get_device_capability", "torch.xpu.get_device_name", "torch.xpu.get_device_properties", "torch.xpu.get_rng_state", "torch.xpu.get_rng_state_all", "torch.xpu.init", "torch.xpu.initial_seed", "torch.xpu.is_available", "torch.xpu.is_initialized", "torch.xpu.manual_seed", "torch.xpu.manual_seed_all", "torch.xpu.seed", "torch.xpu.seed_all", "torch.xpu.set_device", "torch.xpu.set_rng_state", "torch.xpu.set_rng_state_all", "torch.xpu.set_stream", "torch.xpu.stream", "torch.xpu.synchronize", "torch.zeros", "torch.zeros_like", "torch.hub", "PyTorch documentation", "TorchScript", "TorchScript Builtins", "TorchScript Language Reference", "TorchScript Language Reference", "Python Language Reference Coverage", "TorchScript Unsupported PyTorch Constructs", "JIT Utils - torch.utils.jit", "torch.library", "torch.linalg", "torch._logging", "torch.masked", "Meta device", "Miscellaneous Environment Variables", "torch.utils.mobile_optimizer", "torch.utils.model_zoo", "torch.utils.module_tracker", "torch.monitor", "torch.mps", "torch.mtia", "Multiprocessing package - torch.multiprocessing", "Named Tensors operator coverage", "Named Tensors", "torch.nested", "torch.nn", "torch.nn.attention", "torch.nn.attention.bias", "torch.nn.functional", "torch.nn.init", "CUDA Automatic Mixed Precision examples", "Autograd mechanics", "Broadcasting semantics", "CPU threading and TorchScript inference", "CUDA semantics", "PyTorch Custom Operators Landing Page", "Distributed Data Parallel", "Extending PyTorch", "Extending torch.func with autograd.Function", "Frequently Asked Questions", "FSDP Notes", "Gradcheck mechanics", "HIP (ROCm) semantics", "Features for large-scale deployments", "Modules", "MPS backend", "Multiprocessing best practices", "Numerical accuracy", "Reproducibility", "Serialization semantics", "Windows FAQ", "torch.onnx", "TorchDynamo-based ONNX Exporter", "ONNX Backend for TorchDynamo", "TorchScript-based ONNX Exporter", "ONNX supported TorchScript operators", "torch.optim", "torch.package", "torch.profiler", "Quantization", "Quantization Accuracy Debugging", "Quantization Backend Configuration", "Quantization API Reference", "torch.random", "Distributed RPC Framework", "Distributed Autograd Design", "Remote Reference Protocol", "torch.signal", "torch.Size", "torch.sparse", "torch.special", "torch.Storage", "Tensor Attributes", "Tensor Views", "torch.utils.tensorboard", "torch.Tensor", "torch.testing", "Threading Environment Variables", "torch", "torch.ao.ns._numeric_suite", "torch.ao.ns._numeric_suite_fx", "torch.compiler", "AOTInductor: Ahead-Of-Time Compilation for Torch.Export-ed Models", "torch.compiler API reference", "Best Practices for Backends", "CUDAGraph Trees", "Custom Backends", "Dynamic shapes", "Dynamo Deep-Dive", "Dynamo Overview", "Fake tensor", "Frequently Asked Questions", "TorchDynamo APIs for fine-grained tracing", "Getting Started", "TorchInductor GPU Profiling", "IRs", "PyTorch 2.0 NNModule Support", "PyTorch 2.0 Performance Dashboard", "Profiling to understand torch.compile performance", "Writing Graph Transformations on ATen IR", "PyTorch 2.0 Troubleshooting", "torch.overrides", "Understanding CUDA Memory Usage", "Torch Environment Variables", "PYTORCH ProcessGroupNCCL Environment Variables", "Type Info", "torch.utils", "torch.xpu"], "terms": {"provid": [0, 1, 3, 7, 8, 9, 11, 14, 15, 17, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 45, 47, 48, 50, 51, 52, 53, 55, 59, 63, 64, 65, 66, 76, 77, 152, 488, 605, 682, 691, 737, 740, 741, 742, 747, 748, 750, 758, 759, 762, 766, 790, 791, 801, 802, 816, 826, 844, 861, 896, 912, 914, 915, 916, 931, 942, 954, 959, 962, 981, 997, 1018, 1064, 1108, 1128, 1165, 1177, 1187, 1197, 1226, 1234, 1235, 1269, 1272, 1279, 1280, 1284, 1286, 1288, 1303, 1320, 1345, 1377, 1456, 1457, 1458, 1461, 1477, 1478, 1486, 1491, 1496, 1497, 1522, 1523, 1524, 1526, 1532, 1533, 1536, 1542, 1544, 1555, 1570, 1571, 1572, 1573, 1574, 1604, 1605, 1684, 1703, 1716, 1723, 1724, 1738, 1758, 1766, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1808, 1811, 1816, 1852, 1862, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1953, 1955, 1965, 1976, 2011, 2013, 2015, 2016, 2020, 2022, 2023, 2026, 2029, 2032, 2033, 2034, 2035, 2045, 2047, 2048, 2049, 2052, 2055, 2056, 2058, 2063, 2065, 2067, 2068, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2102, 2104, 2105, 2109, 2110, 2111, 2113, 2116], "conveni": [0, 3, 7, 14, 28, 44, 47, 48, 49, 55, 61, 1177, 1187, 1330, 1706, 1870, 1965, 1967, 1976, 2011, 2015, 2024, 2041, 2042, 2045, 2048, 2054, 2055, 2068, 2074, 2096, 2098, 2099, 2100, 2105], "method": [0, 3, 7, 9, 14, 15, 18, 23, 28, 29, 30, 32, 33, 35, 36, 37, 39, 45, 47, 52, 55, 62, 63, 64, 66, 68, 74, 75, 82, 90, 223, 224, 325, 417, 488, 489, 490, 499, 500, 501, 522, 616, 795, 799, 801, 817, 818, 819, 840, 844, 857, 877, 878, 879, 895, 896, 903, 904, 905, 906, 907, 908, 909, 927, 928, 942, 1044, 1159, 1178, 1185, 1226, 1270, 1272, 1273, 1275, 1276, 1277, 1283, 1284, 1288, 1289, 1290, 1318, 1336, 1337, 1344, 1345, 1419, 1422, 1430, 1438, 1440, 1441, 1467, 1468, 1483, 1526, 1527, 1528, 1532, 1536, 1537, 1543, 1545, 1555, 1632, 1706, 1716, 1720, 1731, 1733, 1736, 1738, 1740, 1743, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1754, 1760, 1765, 1781, 1782, 1783, 1785, 1795, 1827, 1908, 1923, 1927, 1928, 1965, 2011, 2012, 2013, 2016, 2017, 2020, 2024, 2026, 2029, 2032, 2033, 2034, 2040, 2041, 2042, 2045, 2048, 2049, 2050, 2052, 2055, 2057, 2060, 2063, 2067, 2068, 2070, 2072, 2075, 2077, 2082, 2083, 2085, 2086, 2089, 2092, 2097, 2099, 2100, 2102, 2109, 2111, 2112], "where": [0, 1, 2, 3, 4, 7, 8, 9, 11, 14, 15, 23, 24, 28, 32, 33, 34, 35, 37, 40, 45, 47, 48, 50, 51, 52, 53, 55, 57, 58, 60, 64, 88, 155, 225, 256, 402, 404, 488, 682, 695, 697, 698, 701, 737, 760, 762, 783, 822, 823, 835, 880, 898, 902, 911, 913, 934, 944, 953, 959, 966, 967, 968, 975, 993, 997, 1011, 1013, 1022, 1024, 1087, 1088, 1091, 1095, 1113, 1124, 1125, 1127, 1130, 1131, 1133, 1134, 1136, 1137, 1138, 1140, 1141, 1143, 1145, 1170, 1171, 1172, 1176, 1177, 1185, 1186, 1215, 1229, 1230, 1231, 1232, 1261, 1262, 1264, 1265, 1268, 1269, 1270, 1273, 1286, 1292, 1293, 1294, 1297, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1340, 1341, 1342, 1344, 1345, 1350, 1353, 1360, 1361, 1362, 1363, 1367, 1370, 1372, 1373, 1374, 1375, 1378, 1412, 1417, 1418, 1420, 1423, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1467, 1468, 1469, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1556, 1557, 1558, 1559, 1560, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1603, 1605, 1615, 1616, 1623, 1628, 1630, 1631, 1649, 1650, 1668, 1670, 1671, 1674, 1675, 1677, 1684, 1687, 1715, 1716, 1718, 1719, 1723, 1724, 1730, 1758, 1759, 1760, 1761, 1770, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1800, 1801, 1802, 1814, 1824, 1826, 1827, 1847, 1848, 1883, 1889, 1890, 1895, 1899, 1904, 1905, 1906, 1909, 1910, 1911, 1912, 1913, 1914, 1921, 1922, 1923, 1926, 1927, 1928, 1942, 1951, 1952, 1953, 1954, 1955, 1960, 1961, 1971, 1972, 1973, 1974, 1975, 1976, 1982, 2013, 2014, 2016, 2017, 2022, 2023, 2024, 2027, 2032, 2035, 2040, 2041, 2042, 2043, 2045, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2058, 2059, 2060, 2065, 2066, 2067, 2068, 2069, 2070, 2073, 2075, 2077, 2080, 2081, 2083, 2085, 2091, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2106, 2109, 2111, 2112, 2113], "some": [0, 1, 3, 7, 8, 9, 14, 17, 19, 22, 23, 24, 28, 30, 33, 34, 35, 37, 40, 48, 50, 52, 53, 55, 58, 60, 61, 63, 64, 87, 89, 479, 488, 498, 569, 691, 750, 918, 938, 975, 1008, 1042, 1060, 1064, 1067, 1108, 1129, 1131, 1139, 1140, 1141, 1156, 1166, 1177, 1183, 1187, 1199, 1200, 1269, 1270, 1272, 1273, 1283, 1286, 1288, 1318, 1336, 1342, 1367, 1374, 1377, 1430, 1434, 1438, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1485, 1488, 1489, 1490, 1491, 1492, 1496, 1517, 1518, 1526, 1529, 1530, 1531, 1533, 1540, 1542, 1543, 1558, 1559, 1575, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1617, 1644, 1649, 1668, 1671, 1676, 1684, 1706, 1716, 1717, 1718, 1719, 1730, 1764, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 1826, 1846, 1870, 1876, 1890, 1927, 1967, 1976, 2011, 2013, 2014, 2015, 2016, 2017, 2020, 2021, 2022, 2023, 2024, 2025, 2026, 2032, 2033, 2034, 2041, 2042, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2085, 2092, 2098, 2099, 2100, 2101, 2103, 2104, 2107, 2108, 2109, 2110, 2111], "oper": [0, 2, 4, 5, 7, 8, 11, 12, 15, 17, 18, 23, 27, 30, 33, 34, 35, 36, 44, 45, 47, 48, 53, 56, 61, 63, 64, 65, 66, 81, 82, 84, 85, 86, 87, 88, 256, 315, 323, 337, 354, 404, 447, 448, 449, 450, 451, 488, 490, 495, 498, 501, 515, 517, 519, 591, 682, 688, 691, 743, 744, 745, 749, 750, 759, 760, 770, 771, 781, 787, 794, 795, 797, 798, 801, 825, 828, 862, 864, 865, 868, 892, 894, 903, 904, 906, 908, 909, 918, 929, 943, 944, 946, 948, 951, 953, 955, 957, 962, 965, 972, 975, 977, 985, 989, 991, 1011, 1013, 1041, 1050, 1082, 1087, 1088, 1089, 1090, 1103, 1108, 1109, 1110, 1111, 1121, 1126, 1144, 1156, 1159, 1162, 1163, 1164, 1165, 1166, 1167, 1169, 1170, 1171, 1172, 1177, 1180, 1220, 1224, 1230, 1231, 1272, 1283, 1284, 1288, 1289, 1292, 1302, 1315, 1325, 1328, 1329, 1342, 1343, 1352, 1359, 1367, 1372, 1377, 1388, 1389, 1417, 1420, 1434, 1439, 1444, 1445, 1446, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1472, 1473, 1474, 1480, 1482, 1483, 1484, 1485, 1487, 1488, 1489, 1490, 1492, 1498, 1512, 1517, 1522, 1523, 1524, 1526, 1539, 1541, 1545, 1546, 1547, 1554, 1569, 1570, 1572, 1574, 1578, 1600, 1601, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1617, 1618, 1619, 1620, 1624, 1625, 1627, 1628, 1632, 1636, 1643, 1649, 1651, 1658, 1659, 1669, 1671, 1675, 1684, 1690, 1691, 1702, 1703, 1704, 1705, 1715, 1716, 1718, 1719, 1760, 1766, 1771, 1775, 1776, 1794, 1800, 1810, 1814, 1823, 1824, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1846, 1852, 1869, 1870, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1901, 1903, 1904, 1905, 1906, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1926, 1942, 1960, 1964, 1965, 1967, 1976, 1979, 2009, 2010, 2012, 2013, 2017, 2020, 2024, 2041, 2043, 2044, 2045, 2047, 2049, 2050, 2055, 2056, 2057, 2058, 2059, 2062, 2063, 2067, 2069, 2071, 2073, 2074, 2075, 2076, 2079, 2081, 2083, 2084, 2087, 2096, 2098, 2099, 2100, 2102, 2103, 2104, 2105, 2106, 2110, 2111], "us": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 19, 20, 21, 23, 27, 29, 30, 32, 34, 35, 36, 37, 39, 40, 44, 45, 46, 47, 48, 50, 52, 53, 55, 56, 58, 59, 60, 61, 62, 63, 65, 66, 74, 75, 76, 77, 84, 85, 86, 88, 90, 99, 121, 152, 156, 175, 193, 210, 321, 323, 337, 344, 345, 408, 417, 437, 450, 460, 488, 489, 490, 498, 501, 515, 519, 522, 546, 559, 585, 586, 587, 589, 590, 619, 682, 688, 691, 714, 715, 716, 717, 718, 719, 722, 731, 732, 733, 734, 735, 737, 750, 759, 762, 770, 771, 774, 775, 776, 781, 783, 787, 788, 789, 792, 795, 796, 797, 798, 799, 801, 804, 809, 817, 818, 819, 821, 822, 823, 824, 825, 826, 827, 828, 829, 833, 834, 835, 836, 839, 857, 861, 862, 864, 865, 868, 881, 882, 883, 892, 893, 894, 896, 898, 899, 900, 901, 902, 903, 905, 906, 908, 909, 911, 912, 913, 914, 917, 918, 919, 920, 922, 923, 927, 928, 931, 935, 938, 943, 944, 945, 953, 955, 956, 963, 965, 975, 977, 978, 983, 985, 989, 990, 993, 995, 1008, 1010, 1011, 1013, 1015, 1016, 1018, 1021, 1031, 1032, 1035, 1036, 1042, 1046, 1050, 1052, 1053, 1054, 1056, 1058, 1059, 1064, 1065, 1075, 1077, 1078, 1084, 1089, 1090, 1091, 1100, 1108, 1109, 1110, 1111, 1121, 1122, 1123, 1126, 1128, 1136, 1138, 1142, 1144, 1151, 1153, 1156, 1160, 1163, 1165, 1166, 1167, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1180, 1185, 1186, 1187, 1188, 1193, 1196, 1197, 1200, 1201, 1203, 1212, 1216, 1217, 1223, 1224, 1226, 1230, 1231, 1232, 1233, 1247, 1259, 1269, 1270, 1272, 1273, 1276, 1277, 1278, 1279, 1280, 1282, 1283, 1284, 1285, 1286, 1288, 1289, 1290, 1292, 1294, 1296, 1304, 1308, 1309, 1311, 1312, 1313, 1315, 1316, 1317, 1318, 1319, 1320, 1325, 1326, 1327, 1329, 1330, 1336, 1337, 1338, 1342, 1343, 1344, 1345, 1350, 1353, 1359, 1362, 1363, 1366, 1367, 1372, 1373, 1374, 1377, 1384, 1385, 1412, 1417, 1419, 1420, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1464, 1465, 1466, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1477, 1478, 1479, 1480, 1485, 1486, 1488, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1512, 1513, 1514, 1519, 1520, 1521, 1522, 1523, 1526, 1528, 1532, 1533, 1534, 1535, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1558, 1560, 1563, 1566, 1574, 1575, 1576, 1577, 1579, 1582, 1583, 1584, 1585, 1586, 1587, 1590, 1597, 1599, 1600, 1601, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1632, 1634, 1641, 1643, 1644, 1651, 1657, 1658, 1659, 1669, 1671, 1677, 1684, 1688, 1690, 1691, 1699, 1701, 1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1721, 1722, 1723, 1724, 1730, 1731, 1732, 1733, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1758, 1759, 1760, 1764, 1765, 1766, 1768, 1769, 1771, 1772, 1775, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1824, 1826, 1827, 1828, 1831, 1832, 1833, 1835, 1837, 1839, 1841, 1842, 1848, 1855, 1858, 1863, 1864, 1866, 1867, 1868, 1870, 1872, 1873, 1875, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1894, 1899, 1902, 1903, 1905, 1906, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1926, 1927, 1928, 1942, 1946, 1949, 1953, 1955, 1960, 1963, 1964, 1965, 1967, 1973, 1976, 1981, 1982, 1988, 1989, 1990, 1991, 1999, 2001, 2008, 2009, 2011, 2012, 2013, 2014, 2016, 2017, 2018, 2020, 2023, 2024, 2025, 2026, 2027, 2028, 2029, 2030, 2032, 2033, 2034, 2035, 2036, 2039, 2040, 2041, 2044, 2046, 2047, 2049, 2050, 2051, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2069, 2071, 2073, 2074, 2075, 2076, 2077, 2081, 2082, 2083, 2085, 2086, 2087, 2088, 2089, 2090, 2091, 2092, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2105, 2106, 2107, 2108, 2110, 2112, 2115, 2118], "float": [0, 1, 3, 11, 19, 23, 24, 27, 28, 32, 33, 35, 37, 41, 50, 52, 53, 55, 62, 64, 155, 156, 221, 315, 317, 319, 323, 335, 402, 477, 483, 501, 687, 689, 694, 696, 700, 734, 737, 740, 741, 742, 746, 747, 748, 750, 757, 758, 763, 764, 765, 766, 767, 772, 773, 774, 775, 776, 777, 779, 781, 782, 783, 787, 794, 795, 796, 801, 819, 821, 832, 840, 841, 855, 860, 861, 864, 865, 868, 883, 922, 923, 944, 945, 953, 960, 963, 967, 968, 977, 986, 992, 993, 997, 1007, 1078, 1091, 1102, 1103, 1109, 1110, 1111, 1113, 1126, 1144, 1153, 1154, 1155, 1156, 1158, 1185, 1187, 1215, 1219, 1229, 1230, 1231, 1234, 1235, 1254, 1261, 1262, 1264, 1265, 1266, 1267, 1270, 1272, 1288, 1289, 1292, 1296, 1298, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1345, 1350, 1359, 1361, 1363, 1372, 1392, 1411, 1412, 1416, 1418, 1419, 1420, 1423, 1426, 1430, 1434, 1440, 1441, 1442, 1444, 1447, 1448, 1459, 1460, 1461, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1473, 1474, 1479, 1480, 1481, 1484, 1485, 1486, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1512, 1514, 1518, 1526, 1531, 1532, 1534, 1535, 1540, 1541, 1545, 1548, 1549, 1550, 1551, 1552, 1558, 1563, 1564, 1566, 1569, 1570, 1572, 1574, 1575, 1576, 1579, 1580, 1581, 1614, 1615, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1629, 1634, 1643, 1669, 1671, 1676, 1684, 1703, 1715, 1716, 1721, 1722, 1726, 1728, 1731, 1741, 1742, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1760, 1761, 1765, 1771, 1772, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1820, 1823, 1827, 1828, 1829, 1830, 1842, 1846, 1847, 1867, 1868, 1871, 1874, 1883, 1884, 1886, 1887, 1889, 1924, 1927, 1931, 1934, 1949, 1951, 1964, 2013, 2014, 2015, 2016, 2017, 2020, 2023, 2029, 2032, 2033, 2035, 2036, 2040, 2041, 2045, 2048, 2050, 2055, 2058, 2063, 2065, 2066, 2068, 2069, 2070, 2071, 2075, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2098, 2102, 2104, 2106, 2110, 2116], "datatyp": [0, 19, 64, 883, 1269, 1272, 1526, 1718, 1719, 1870, 2058, 2065, 2080], "other": [0, 1, 2, 3, 4, 5, 7, 8, 9, 14, 15, 18, 19, 23, 24, 29, 30, 32, 33, 35, 36, 37, 40, 44, 45, 47, 48, 52, 53, 55, 56, 58, 60, 61, 63, 64, 99, 100, 115, 131, 132, 135, 147, 148, 151, 152, 161, 162, 167, 168, 198, 199, 200, 209, 236, 241, 245, 246, 247, 257, 276, 277, 284, 285, 286, 287, 293, 294, 295, 296, 297, 298, 305, 306, 309, 310, 311, 312, 315, 317, 323, 326, 346, 354, 357, 358, 359, 360, 361, 362, 365, 366, 367, 368, 380, 381, 396, 397, 410, 414, 417, 440, 441, 452, 453, 457, 458, 489, 490, 500, 522, 563, 564, 565, 566, 582, 585, 618, 619, 620, 623, 624, 682, 687, 696, 740, 741, 742, 743, 744, 745, 762, 816, 828, 864, 868, 874, 881, 882, 887, 893, 896, 898, 899, 900, 908, 917, 918, 919, 920, 922, 923, 927, 928, 947, 948, 950, 951, 952, 954, 959, 975, 982, 992, 1007, 1008, 1011, 1013, 1015, 1022, 1032, 1042, 1053, 1096, 1102, 1103, 1104, 1112, 1113, 1114, 1128, 1135, 1153, 1154, 1155, 1156, 1159, 1166, 1167, 1168, 1169, 1172, 1176, 1186, 1187, 1189, 1213, 1214, 1215, 1227, 1228, 1229, 1238, 1239, 1241, 1242, 1247, 1249, 1261, 1270, 1272, 1276, 1284, 1293, 1295, 1296, 1297, 1299, 1300, 1305, 1309, 1323, 1325, 1328, 1329, 1342, 1350, 1351, 1352, 1354, 1356, 1357, 1360, 1361, 1367, 1370, 1371, 1375, 1376, 1380, 1384, 1411, 1413, 1423, 1426, 1453, 1454, 1455, 1456, 1457, 1458, 1460, 1462, 1472, 1477, 1479, 1491, 1526, 1527, 1536, 1555, 1570, 1574, 1578, 1587, 1607, 1608, 1609, 1634, 1643, 1684, 1703, 1706, 1716, 1721, 1722, 1723, 1724, 1747, 1769, 1773, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1805, 1806, 1812, 1814, 1842, 1846, 1862, 1867, 1870, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1924, 1925, 1942, 1948, 1952, 1954, 1960, 1973, 1974, 1979, 1980, 1982, 1989, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2026, 2028, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2055, 2056, 2057, 2058, 2060, 2063, 2065, 2067, 2070, 2071, 2075, 2076, 2077, 2081, 2082, 2085, 2086, 2087, 2093, 2096, 2098, 2099, 2100, 2102, 2103, 2104, 2106, 2109, 2110, 2111, 2112], "lower": [0, 1, 8, 12, 24, 28, 35, 52, 53, 795, 797, 862, 954, 959, 966, 967, 968, 970, 1122, 1123, 1180, 1187, 1191, 1233, 1234, 1302, 1309, 1311, 1319, 1320, 1327, 1330, 1335, 1373, 1412, 1419, 1430, 1545, 1586, 1587, 1588, 1607, 1608, 1609, 1682, 1683, 1684, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 1810, 1825, 1827, 1847, 1862, 1870, 1907, 1951, 1952, 1953, 2013, 2014, 2016, 2024, 2036, 2040, 2042, 2055, 2060, 2066, 2070, 2081, 2087, 2106, 2111], "point": [0, 1, 7, 8, 9, 11, 23, 24, 27, 29, 30, 32, 33, 37, 44, 47, 48, 53, 55, 64, 90, 155, 156, 323, 335, 341, 483, 488, 501, 700, 740, 741, 742, 743, 744, 745, 746, 751, 752, 753, 754, 755, 756, 757, 758, 761, 763, 764, 765, 766, 767, 777, 779, 781, 782, 783, 787, 797, 801, 821, 822, 823, 824, 827, 832, 855, 864, 865, 868, 883, 912, 914, 915, 916, 922, 923, 944, 945, 953, 975, 992, 993, 997, 1056, 1058, 1067, 1072, 1073, 1109, 1110, 1111, 1154, 1155, 1156, 1219, 1226, 1230, 1231, 1235, 1254, 1269, 1272, 1288, 1289, 1296, 1298, 1319, 1320, 1343, 1345, 1350, 1359, 1372, 1419, 1426, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1472, 1519, 1520, 1521, 1526, 1578, 1579, 1597, 1623, 1632, 1643, 1684, 1703, 1716, 1723, 1724, 1771, 1780, 1811, 1827, 1828, 1829, 1833, 1842, 1855, 1867, 1868, 1874, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1964, 1965, 2012, 2013, 2015, 2016, 2017, 2020, 2029, 2036, 2040, 2041, 2042, 2045, 2047, 2051, 2052, 2055, 2058, 2069, 2070, 2071, 2073, 2075, 2076, 2080, 2083, 2085, 2086, 2087, 2090, 2097, 2099, 2100, 2102, 2104, 2107, 2110, 2113, 2116], "lower_precision_fp": 0, "half": [0, 1, 10, 24, 35, 619, 986, 1122, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1272, 1335, 1341, 1453, 1454, 1455, 1456, 1457, 1458, 1473, 1474, 1476, 1526, 1627, 1628, 1631, 1723, 1724, 1777, 1802, 1855, 1923, 2033, 2055, 2058, 2063, 2080, 2082, 2083, 2086], "like": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 19, 23, 24, 28, 30, 33, 34, 35, 45, 47, 48, 52, 53, 55, 56, 57, 58, 59, 60, 61, 64, 66, 68, 338, 488, 591, 619, 762, 795, 844, 863, 881, 903, 906, 908, 922, 923, 942, 944, 953, 985, 989, 1050, 1051, 1060, 1103, 1108, 1109, 1129, 1151, 1154, 1155, 1162, 1166, 1169, 1170, 1171, 1177, 1189, 1197, 1200, 1212, 1230, 1231, 1270, 1272, 1273, 1280, 1283, 1288, 1292, 1319, 1320, 1331, 1344, 1430, 1438, 1469, 1472, 1488, 1489, 1490, 1526, 1527, 1528, 1536, 1537, 1555, 1578, 1585, 1626, 1644, 1702, 1706, 1716, 1717, 1718, 1719, 1731, 1757, 1775, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1820, 1835, 1839, 1858, 1866, 1867, 1870, 1927, 1939, 1965, 1976, 2009, 2011, 2012, 2013, 2015, 2016, 2020, 2023, 2024, 2032, 2033, 2034, 2041, 2042, 2045, 2046, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2073, 2075, 2076, 2079, 2080, 2082, 2083, 2087, 2089, 2091, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2108, 2109, 2110, 2111, 2112, 2113], "linear": [0, 2, 9, 24, 28, 29, 32, 33, 34, 35, 52, 53, 55, 57, 59, 61, 64, 66, 433, 474, 475, 476, 477, 478, 481, 713, 722, 730, 731, 767, 794, 795, 796, 798, 816, 857, 861, 862, 864, 865, 913, 968, 1013, 1108, 1165, 1167, 1175, 1177, 1226, 1272, 1276, 1284, 1298, 1304, 1309, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1333, 1335, 1363, 1419, 1438, 1444, 1467, 1475, 1476, 1484, 1511, 1526, 1528, 1542, 1544, 1545, 1546, 1554, 1556, 1563, 1566, 1570, 1572, 1574, 1577, 1579, 1621, 1630, 1631, 1643, 1678, 1687, 1692, 1703, 1706, 1716, 1727, 1728, 1730, 1731, 1732, 1736, 1746, 1747, 1748, 1749, 1750, 1752, 1753, 1754, 1755, 1756, 1764, 1765, 1768, 1805, 1808, 1816, 1827, 1964, 1976, 1982, 2012, 2014, 2015, 2021, 2024, 2026, 2028, 2035, 2040, 2045, 2047, 2048, 2050, 2051, 2055, 2060, 2063, 2065, 2066, 2067, 2070, 2071, 2072, 2073, 2075, 2093, 2099, 2109, 2111], "layer": [0, 8, 24, 28, 30, 32, 33, 34, 55, 58, 736, 737, 762, 844, 861, 931, 1435, 1436, 1437, 1439, 1440, 1441, 1442, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1464, 1465, 1466, 1470, 1477, 1478, 1480, 1488, 1489, 1490, 1496, 1497, 1498, 1511, 1513, 1519, 1520, 1521, 1532, 1533, 1541, 1542, 1544, 1555, 1566, 1570, 1571, 1572, 1573, 1574, 1646, 1681, 1715, 1716, 1723, 1724, 1730, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 2012, 2040, 2045, 2048, 2050, 2051, 2054, 2055, 2060, 2063, 2065, 2067, 2070, 2071, 2073, 2080, 2091], "convolut": [0, 1, 2, 52, 740, 741, 742, 743, 744, 745, 774, 775, 776, 975, 1282, 1453, 1454, 1455, 1456, 1457, 1458, 1464, 1465, 1466, 1470, 1502, 1503, 1504, 1505, 1506, 1507, 1538, 1539, 1545, 1575, 1576, 1578, 1607, 1608, 1609, 1610, 1611, 1612, 1631, 1632, 1723, 1724, 1725, 1726, 1870, 2012, 2014, 2026, 2040, 2044, 2045, 2052, 2055, 2066, 2069, 2070, 2073, 2106], "ar": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 32, 33, 34, 35, 37, 39, 40, 41, 44, 45, 47, 48, 50, 52, 53, 55, 57, 58, 59, 60, 62, 63, 64, 65, 66, 68, 69, 71, 72, 75, 83, 86, 87, 88, 89, 90, 99, 152, 235, 256, 321, 323, 337, 338, 341, 379, 404, 450, 473, 483, 488, 501, 515, 519, 546, 582, 591, 619, 682, 691, 692, 693, 699, 737, 758, 762, 763, 765, 766, 767, 781, 784, 785, 787, 788, 789, 794, 795, 796, 797, 798, 801, 816, 817, 818, 819, 821, 822, 823, 824, 827, 842, 857, 861, 862, 863, 864, 865, 868, 877, 878, 889, 890, 891, 893, 896, 903, 906, 908, 909, 911, 912, 913, 914, 915, 916, 918, 922, 923, 929, 931, 942, 944, 953, 954, 956, 957, 959, 962, 975, 977, 982, 986, 989, 992, 993, 996, 997, 1010, 1011, 1013, 1014, 1016, 1021, 1033, 1044, 1050, 1053, 1054, 1064, 1068, 1075, 1082, 1091, 1096, 1100, 1103, 1108, 1109, 1110, 1111, 1124, 1125, 1126, 1127, 1129, 1131, 1136, 1138, 1144, 1147, 1149, 1150, 1154, 1155, 1156, 1160, 1165, 1166, 1171, 1173, 1175, 1176, 1177, 1180, 1183, 1185, 1186, 1187, 1191, 1193, 1196, 1197, 1200, 1209, 1216, 1226, 1230, 1231, 1233, 1235, 1249, 1261, 1262, 1264, 1265, 1268, 1269, 1270, 1272, 1273, 1276, 1279, 1280, 1283, 1286, 1288, 1289, 1294, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1325, 1327, 1328, 1329, 1330, 1331, 1333, 1334, 1335, 1336, 1337, 1339, 1342, 1343, 1344, 1345, 1354, 1355, 1356, 1357, 1359, 1360, 1362, 1364, 1365, 1367, 1370, 1373, 1374, 1375, 1378, 1380, 1385, 1399, 1412, 1416, 1417, 1418, 1419, 1420, 1430, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1447, 1448, 1449, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1464, 1465, 1466, 1469, 1470, 1472, 1477, 1478, 1479, 1480, 1485, 1488, 1489, 1490, 1491, 1492, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1540, 1542, 1544, 1555, 1558, 1559, 1560, 1566, 1570, 1572, 1574, 1575, 1576, 1578, 1579, 1585, 1587, 1588, 1589, 1597, 1603, 1604, 1605, 1615, 1616, 1623, 1624, 1625, 1626, 1632, 1643, 1644, 1668, 1671, 1673, 1676, 1684, 1702, 1703, 1704, 1705, 1706, 1716, 1717, 1718, 1719, 1721, 1722, 1723, 1724, 1729, 1730, 1731, 1734, 1736, 1743, 1757, 1761, 1764, 1765, 1766, 1767, 1768, 1769, 1771, 1772, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1804, 1807, 1808, 1820, 1826, 1827, 1833, 1842, 1845, 1846, 1853, 1855, 1866, 1867, 1870, 1899, 1900, 1902, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1919, 1922, 1923, 1926, 1927, 1937, 1939, 1943, 1945, 1946, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1960, 1964, 1965, 1967, 1970, 1971, 1972, 1976, 1981, 1982, 1983, 1999, 2001, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2022, 2023, 2024, 2026, 2028, 2029, 2031, 2032, 2033, 2034, 2035, 2036, 2039, 2040, 2041, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2066, 2067, 2069, 2070, 2071, 2073, 2074, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2090, 2092, 2093, 2095, 2096, 2097, 2098, 2100, 2101, 2103, 2104, 2105, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2116], "much": [0, 4, 7, 9, 15, 23, 24, 52, 152, 896, 912, 917, 1064, 1186, 1308, 1309, 1336, 1345, 1469, 1716, 1747, 1808, 2013, 2042, 2045, 2051, 2052, 2055, 2060, 2068, 2075, 2076, 2080, 2098, 2099, 2102, 2115], "faster": [0, 2, 8, 11, 23, 24, 28, 912, 922, 923, 977, 1165, 1202, 1302, 1303, 1308, 1309, 1313, 1318, 1326, 1328, 1330, 1333, 1336, 1338, 1342, 1345, 1543, 1560, 1673, 1684, 1690, 1716, 1721, 1722, 1730, 1783, 1784, 1795, 1796, 1870, 1909, 1910, 1911, 1913, 1914, 2024, 2026, 2042, 2045, 2052, 2067, 2070, 2080, 2085, 2092, 2100, 2102, 2107], "reduct": [0, 2, 11, 28, 34, 55, 323, 515, 519, 975, 1318, 1350, 1419, 1438, 1439, 1445, 1459, 1461, 1469, 1479, 1485, 1486, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1576, 1604, 1605, 1613, 1615, 1616, 1624, 1629, 1640, 1641, 1644, 1645, 1656, 1664, 1665, 1666, 1667, 1668, 1676, 1688, 1689, 1700, 1701, 1716, 1827, 1904, 2014, 2033, 2047, 2052, 2068, 2070, 2105, 2111], "often": [0, 2, 4, 7, 8, 14, 23, 28, 33, 35, 48, 55, 58, 64, 152, 896, 917, 1191, 1200, 1288, 1319, 1320, 1329, 1378, 1468, 1488, 1489, 1490, 1597, 1605, 1623, 1632, 1706, 1730, 1810, 2016, 2024, 2042, 2045, 2050, 2054, 2055, 2057, 2058, 2059, 2063, 2068, 2075, 2085, 2099, 2101, 2102, 2109, 2111], "requir": [0, 1, 5, 8, 9, 11, 14, 15, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 46, 47, 52, 53, 55, 58, 60, 63, 64, 121, 152, 223, 337, 460, 488, 490, 498, 515, 517, 519, 562, 795, 797, 825, 828, 883, 892, 896, 903, 905, 908, 909, 911, 912, 913, 914, 915, 916, 917, 975, 977, 1008, 1063, 1108, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1149, 1150, 1173, 1177, 1213, 1269, 1286, 1312, 1343, 1345, 1359, 1461, 1468, 1469, 1570, 1571, 1572, 1573, 1574, 1576, 1624, 1684, 1706, 1716, 1717, 1734, 1736, 1738, 1786, 1814, 1911, 1923, 1964, 1976, 2011, 2013, 2016, 2018, 2020, 2023, 2026, 2032, 2034, 2035, 2041, 2042, 2044, 2045, 2047, 2048, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2060, 2063, 2064, 2065, 2068, 2070, 2071, 2075, 2076, 2077, 2080, 2083, 2085, 2087, 2091, 2092, 2093, 2096, 2097, 2099, 2100, 2101, 2102, 2107, 2109, 2112], "dynam": [0, 12, 14, 15, 23, 53, 65, 66, 67, 68, 71, 72, 74, 77, 78, 682, 731, 735, 762, 763, 764, 765, 766, 767, 796, 801, 817, 826, 828, 831, 847, 853, 854, 855, 856, 861, 862, 975, 1180, 1181, 1183, 1186, 1193, 1280, 1286, 1288, 1344, 1730, 2015, 2016, 2017, 2040, 2045, 2055, 2058, 2062, 2063, 2065, 2067, 2068, 2071, 2092, 2093, 2099, 2100, 2102, 2108, 2111], "rang": [0, 1, 3, 11, 23, 28, 29, 32, 33, 35, 37, 47, 48, 51, 52, 53, 59, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 90, 303, 483, 686, 763, 765, 767, 797, 821, 822, 823, 824, 825, 827, 828, 829, 888, 945, 970, 980, 1069, 1070, 1078, 1108, 1158, 1175, 1185, 1191, 1197, 1233, 1234, 1235, 1339, 1350, 1365, 1374, 1392, 1419, 1430, 1461, 1471, 1473, 1474, 1478, 1484, 1497, 1516, 1528, 1533, 1537, 1542, 1544, 1560, 1561, 1562, 1566, 1627, 1628, 1632, 1690, 1716, 1722, 1798, 1799, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1827, 1906, 1962, 1963, 2013, 2014, 2016, 2042, 2044, 2045, 2050, 2055, 2057, 2058, 2065, 2066, 2067, 2069, 2070, 2073, 2074, 2081, 2083, 2085, 2086, 2087, 2089, 2097, 2098, 2100, 2109, 2111], "tri": [0, 2, 3, 7, 23, 35, 40, 64, 89, 582, 1272, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2015, 2016, 2045, 2050, 2065, 2068, 2099, 2101, 2102, 2104], "match": [0, 1, 3, 12, 28, 30, 32, 33, 35, 47, 52, 55, 64, 66, 74, 75, 86, 152, 315, 317, 323, 475, 476, 501, 502, 582, 585, 695, 701, 781, 795, 796, 797, 857, 881, 896, 917, 922, 923, 989, 1007, 1010, 1022, 1023, 1024, 1053, 1100, 1108, 1165, 1177, 1187, 1249, 1272, 1286, 1288, 1305, 1344, 1366, 1460, 1469, 1526, 1532, 1579, 1604, 1605, 1643, 1670, 1677, 1684, 1703, 1716, 1736, 1766, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1828, 1862, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1944, 1960, 1976, 1981, 2013, 2016, 2020, 2023, 2033, 2035, 2042, 2043, 2045, 2048, 2055, 2060, 2063, 2065, 2067, 2068, 2070, 2071, 2072, 2075, 2083, 2087, 2090, 2099, 2102, 2110, 2111], "each": [0, 1, 2, 5, 9, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 45, 48, 50, 51, 52, 53, 55, 56, 58, 60, 61, 62, 64, 82, 83, 85, 88, 99, 121, 155, 156, 400, 404, 495, 515, 517, 519, 547, 609, 619, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 685, 688, 695, 697, 698, 701, 737, 762, 794, 842, 844, 862, 864, 865, 868, 880, 889, 890, 891, 892, 894, 895, 896, 903, 905, 908, 909, 917, 943, 946, 955, 959, 963, 964, 966, 969, 973, 975, 977, 980, 997, 1019, 1020, 1024, 1053, 1056, 1058, 1064, 1066, 1071, 1074, 1080, 1085, 1086, 1087, 1088, 1091, 1092, 1103, 1106, 1108, 1125, 1127, 1128, 1129, 1130, 1131, 1133, 1134, 1137, 1138, 1139, 1140, 1141, 1143, 1145, 1149, 1150, 1152, 1157, 1162, 1167, 1168, 1172, 1176, 1177, 1185, 1187, 1213, 1226, 1232, 1234, 1235, 1236, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1272, 1288, 1289, 1294, 1326, 1344, 1345, 1360, 1362, 1370, 1372, 1373, 1374, 1375, 1378, 1380, 1388, 1389, 1412, 1418, 1420, 1430, 1435, 1438, 1439, 1443, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1477, 1478, 1480, 1485, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1498, 1506, 1507, 1511, 1513, 1517, 1518, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1540, 1542, 1544, 1555, 1558, 1559, 1561, 1566, 1569, 1575, 1576, 1578, 1587, 1602, 1604, 1605, 1610, 1611, 1612, 1615, 1616, 1618, 1619, 1620, 1623, 1624, 1625, 1629, 1632, 1642, 1644, 1668, 1669, 1671, 1673, 1676, 1684, 1697, 1706, 1716, 1723, 1724, 1757, 1758, 1760, 1770, 1771, 1772, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1819, 1823, 1824, 1827, 1834, 1842, 1847, 1848, 1853, 1857, 1862, 1870, 1874, 1885, 1891, 1908, 1909, 1910, 1911, 1913, 1914, 1915, 1926, 1927, 1943, 1945, 1946, 1949, 1960, 1961, 1962, 1970, 1976, 1977, 2005, 2011, 2015, 2016, 2020, 2022, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2057, 2058, 2059, 2060, 2063, 2065, 2067, 2069, 2070, 2075, 2076, 2077, 2080, 2081, 2083, 2085, 2086, 2087, 2090, 2091, 2092, 2095, 2096, 2098, 2100, 2102, 2103, 2104, 2105, 2107, 2108, 2109, 2110, 2111, 2113], "its": [0, 1, 4, 5, 7, 8, 9, 12, 14, 15, 18, 19, 23, 24, 26, 28, 29, 30, 32, 33, 34, 35, 37, 39, 47, 48, 50, 53, 55, 62, 63, 64, 66, 68, 83, 84, 85, 152, 260, 460, 489, 490, 506, 515, 517, 519, 585, 586, 587, 619, 682, 825, 828, 877, 878, 879, 881, 882, 883, 896, 901, 902, 919, 920, 927, 928, 967, 968, 975, 985, 986, 993, 997, 1014, 1042, 1044, 1053, 1068, 1091, 1095, 1098, 1108, 1111, 1156, 1162, 1166, 1167, 1168, 1171, 1176, 1187, 1234, 1235, 1239, 1252, 1272, 1283, 1308, 1309, 1313, 1318, 1325, 1327, 1330, 1331, 1336, 1342, 1367, 1377, 1395, 1399, 1438, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1488, 1489, 1490, 1526, 1527, 1536, 1558, 1566, 1574, 1632, 1643, 1671, 1677, 1706, 1711, 1712, 1717, 1718, 1719, 1723, 1724, 1725, 1727, 1731, 1732, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1758, 1764, 1768, 1772, 1778, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1817, 1842, 1846, 1850, 1851, 1867, 1868, 1891, 1901, 1928, 1948, 1949, 1951, 1959, 1969, 1974, 1983, 2013, 2015, 2016, 2020, 2026, 2028, 2029, 2032, 2041, 2042, 2043, 2045, 2047, 2048, 2050, 2051, 2052, 2055, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2075, 2076, 2077, 2080, 2083, 2084, 2086, 2090, 2091, 2092, 2093, 2096, 2098, 2099, 2100, 2101, 2102, 2105, 2107, 2113, 2114], "appropri": [0, 7, 8, 9, 28, 29, 30, 35, 55, 63, 64, 65, 488, 898, 929, 997, 1053, 1200, 1288, 1870, 2016, 2017, 2034, 2045, 2052, 2057, 2060, 2063, 2070, 2073, 2075, 2076, 2077, 2082, 2099, 2102, 2107, 2112], "ordinarili": [0, 1197, 2041], "train": [0, 1, 12, 15, 23, 24, 28, 29, 30, 31, 32, 33, 37, 45, 46, 47, 48, 55, 58, 59, 64, 714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 735, 801, 803, 840, 841, 858, 859, 860, 862, 864, 865, 866, 918, 980, 989, 1052, 1053, 1056, 1058, 1065, 1175, 1272, 1277, 1284, 1287, 1288, 1430, 1434, 1440, 1441, 1442, 1461, 1462, 1463, 1468, 1469, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1532, 1533, 1545, 1566, 1574, 1598, 1602, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1682, 1683, 1684, 1716, 1731, 1734, 1736, 1765, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2011, 2013, 2014, 2020, 2026, 2029, 2040, 2042, 2045, 2047, 2050, 2056, 2058, 2060, 2062, 2065, 2067, 2068, 2069, 2073, 2075, 2077, 2085, 2092, 2095, 2096, 2097, 2099, 2100, 2104, 2105, 2106, 2108, 2111], "gradscal": [0, 2041, 2045], "togeth": [0, 3, 9, 23, 28, 33, 34, 35, 56, 61, 64, 762, 1050, 1108, 1165, 1167, 1171, 1175, 1176, 1212, 1216, 1312, 1477, 1496, 1542, 1643, 1716, 1721, 1949, 2026, 2041, 2048, 2049, 2050, 2051, 2054, 2055, 2070, 2075, 2076, 2077, 2080, 2085, 2097, 2099, 2101, 2102, 2107, 2115], "shown": [0, 12, 24, 39, 48, 52, 1050, 1060, 1498, 1736, 1778, 1808, 1874, 2013, 2016, 2041, 2045, 2048, 2050, 2055, 2063, 2070, 2080, 2093, 2097, 2099, 2100, 2103], "exampl": [0, 1, 2, 3, 5, 7, 8, 9, 14, 17, 18, 19, 20, 23, 24, 28, 29, 30, 31, 32, 34, 35, 36, 37, 38, 40, 41, 44, 45, 46, 47, 48, 50, 51, 53, 55, 56, 57, 59, 60, 61, 63, 66, 68, 74, 75, 90, 193, 210, 235, 244, 256, 262, 291, 313, 315, 317, 319, 323, 337, 354, 403, 404, 417, 447, 448, 449, 450, 451, 473, 483, 485, 488, 489, 490, 495, 498, 501, 515, 517, 519, 525, 539, 546, 560, 562, 582, 583, 585, 586, 587, 588, 589, 590, 591, 609, 619, 682, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 722, 730, 731, 736, 740, 741, 742, 743, 744, 745, 747, 748, 750, 758, 759, 760, 762, 763, 764, 765, 766, 767, 774, 775, 776, 787, 794, 795, 796, 798, 799, 812, 813, 814, 815, 816, 817, 818, 819, 826, 840, 857, 862, 863, 864, 865, 868, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 898, 901, 902, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 918, 919, 925, 927, 928, 929, 931, 938, 943, 945, 946, 947, 948, 949, 950, 951, 952, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 973, 974, 975, 977, 980, 982, 983, 986, 989, 990, 991, 992, 993, 994, 995, 996, 997, 1007, 1050, 1051, 1056, 1058, 1087, 1088, 1089, 1090, 1091, 1092, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1103, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1118, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1165, 1166, 1167, 1168, 1173, 1175, 1177, 1185, 1186, 1187, 1197, 1200, 1213, 1214, 1215, 1219, 1226, 1229, 1232, 1233, 1234, 1235, 1236, 1238, 1239, 1243, 1247, 1249, 1257, 1259, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1272, 1273, 1275, 1276, 1277, 1278, 1279, 1280, 1282, 1283, 1284, 1286, 1287, 1288, 1289, 1290, 1291, 1293, 1294, 1295, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1346, 1347, 1348, 1349, 1350, 1352, 1353, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1362, 1363, 1364, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1395, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1586, 1590, 1597, 1599, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1632, 1634, 1668, 1670, 1671, 1674, 1675, 1684, 1703, 1706, 1715, 1716, 1723, 1724, 1730, 1731, 1732, 1734, 1736, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1768, 1769, 1770, 1771, 1772, 1774, 1775, 1776, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1815, 1817, 1819, 1820, 1822, 1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1837, 1839, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1857, 1858, 1862, 1864, 1865, 1866, 1867, 1868, 1871, 1874, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1894, 1895, 1899, 1901, 1902, 1904, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1920, 1921, 1922, 1924, 1926, 1927, 1929, 1930, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1967, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 2009, 2010, 2011, 2012, 2013, 2015, 2016, 2017, 2020, 2022, 2023, 2027, 2028, 2029, 2033, 2034, 2035, 2040, 2042, 2043, 2044, 2045, 2050, 2051, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2062, 2064, 2067, 2068, 2069, 2070, 2071, 2075, 2077, 2079, 2080, 2081, 2083, 2084, 2085, 2086, 2087, 2089, 2090, 2091, 2093, 2096, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2108, 2109, 2110, 2111, 2112, 2115], "recip": [0, 3, 32, 55, 1526, 1716, 1738, 2041, 2055, 2101], "howev": [0, 2, 3, 4, 5, 7, 9, 14, 15, 23, 24, 28, 32, 35, 37, 44, 47, 52, 53, 55, 56, 57, 59, 60, 63, 64, 66, 71, 75, 83, 86, 87, 88, 260, 483, 547, 898, 911, 1010, 1032, 1053, 1096, 1098, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1165, 1166, 1170, 1171, 1173, 1176, 1177, 1186, 1197, 1272, 1275, 1276, 1283, 1285, 1312, 1327, 1328, 1330, 1344, 1345, 1362, 1438, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1469, 1526, 1607, 1608, 1609, 1632, 1706, 1716, 1757, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1909, 1910, 1911, 1912, 1913, 1914, 1976, 1981, 1989, 2012, 2016, 2035, 2041, 2042, 2045, 2046, 2048, 2053, 2055, 2057, 2059, 2060, 2061, 2062, 2065, 2068, 2075, 2077, 2080, 2086, 2096, 2098, 2099, 2100, 2101, 2102, 2103], "modular": [0, 2041, 2063], "mai": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 14, 19, 22, 23, 24, 28, 30, 32, 33, 35, 36, 37, 40, 44, 46, 47, 48, 52, 55, 56, 60, 63, 64, 65, 86, 198, 223, 256, 315, 323, 460, 488, 517, 519, 558, 605, 619, 682, 691, 817, 818, 819, 864, 895, 903, 906, 908, 909, 911, 913, 917, 922, 935, 946, 957, 969, 975, 990, 991, 993, 995, 1008, 1010, 1011, 1019, 1032, 1042, 1043, 1044, 1050, 1051, 1053, 1064, 1066, 1071, 1085, 1086, 1108, 1147, 1151, 1156, 1159, 1162, 1169, 1170, 1172, 1186, 1187, 1192, 1197, 1199, 1200, 1201, 1211, 1235, 1269, 1272, 1275, 1276, 1282, 1283, 1285, 1286, 1288, 1294, 1303, 1304, 1308, 1309, 1310, 1312, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1328, 1330, 1331, 1334, 1335, 1336, 1342, 1344, 1345, 1350, 1367, 1377, 1392, 1430, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1491, 1496, 1522, 1523, 1524, 1526, 1533, 1542, 1572, 1574, 1579, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1624, 1632, 1634, 1643, 1649, 1671, 1684, 1702, 1703, 1704, 1705, 1706, 1716, 1730, 1733, 1734, 1736, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 1826, 1846, 1849, 1870, 1876, 1894, 1907, 1923, 1927, 1928, 1951, 1964, 1981, 1989, 2011, 2012, 2013, 2015, 2016, 2020, 2022, 2023, 2024, 2026, 2029, 2034, 2041, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2055, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2075, 2077, 2080, 2082, 2089, 2093, 2096, 2098, 2099, 2101, 2102, 2105, 2109, 2111, 2112, 2113, 2115], "separ": [0, 1, 3, 9, 11, 14, 20, 23, 28, 30, 32, 47, 48, 55, 56, 58, 61, 63, 64, 737, 762, 893, 908, 1108, 1125, 1127, 1133, 1134, 1137, 1138, 1143, 1145, 1165, 1180, 1283, 1318, 1333, 1480, 1488, 1489, 1490, 1532, 1534, 1651, 1772, 1839, 2011, 2014, 2016, 2020, 2022, 2041, 2042, 2044, 2045, 2049, 2051, 2060, 2063, 2067, 2068, 2076, 2080, 2081, 2085, 2096, 2109, 2111], "desir": [0, 1, 3, 23, 28, 34, 35, 55, 64, 90, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 256, 269, 299, 327, 395, 447, 448, 449, 450, 451, 499, 501, 502, 522, 527, 546, 547, 548, 562, 582, 585, 605, 606, 619, 795, 868, 882, 944, 953, 972, 1054, 1055, 1079, 1080, 1089, 1090, 1109, 1110, 1111, 1121, 1126, 1144, 1160, 1162, 1163, 1164, 1230, 1231, 1272, 1292, 1343, 1359, 1365, 1372, 1387, 1393, 1417, 1419, 1420, 1438, 1439, 1526, 1577, 1605, 1651, 1690, 1691, 1706, 1757, 1771, 1775, 1776, 1817, 1824, 1827, 1828, 1829, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1875, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1903, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1926, 1942, 1953, 1955, 1999, 2000, 2004, 2005, 2009, 2010, 2033, 2034, 2035, 2041, 2042, 2045, 2049, 2053, 2055, 2060, 2074, 2075, 2081, 2082, 2083, 2086, 2111], "As": [0, 1, 8, 20, 24, 28, 30, 35, 52, 53, 55, 58, 59, 64, 256, 488, 957, 1108, 1276, 1304, 1308, 1309, 1319, 1320, 1331, 1336, 1430, 1464, 1465, 1466, 1470, 1491, 1526, 1558, 1702, 1723, 1724, 1770, 1776, 2010, 2013, 2015, 2016, 2023, 2035, 2042, 2045, 2048, 2050, 2051, 2055, 2058, 2060, 2061, 2062, 2063, 2065, 2068, 2074, 2075, 2076, 2077, 2079, 2080, 2092, 2095, 2097, 2099, 2102, 2103, 2104, 2111], "section": [0, 1, 7, 23, 35, 37, 39, 53, 59, 64, 88, 121, 1064, 1108, 1226, 1462, 1477, 1496, 1524, 1542, 1615, 1760, 1943, 2013, 2014, 2015, 2016, 2017, 2032, 2034, 2035, 2041, 2042, 2045, 2047, 2048, 2049, 2052, 2053, 2055, 2057, 2069, 2076, 2085, 2097, 2099, 2100, 2102, 2103, 2104, 2105, 2109, 2111], "infer": [0, 1, 2, 3, 5, 11, 12, 15, 30, 33, 35, 47, 55, 582, 619, 864, 865, 868, 882, 883, 918, 980, 989, 1024, 1162, 1163, 1186, 1235, 1256, 1270, 1273, 1282, 1287, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1532, 1579, 1643, 1670, 1706, 1769, 1797, 1808, 1842, 1849, 1867, 1868, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 1959, 2012, 2013, 2015, 2016, 2020, 2033, 2035, 2045, 2055, 2065, 2069, 2070, 2071, 2073, 2080, 2092, 2095, 2096, 2098, 2099, 2102, 2104, 2108], "onli": [0, 1, 2, 3, 4, 5, 7, 8, 9, 12, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 45, 47, 48, 52, 53, 55, 59, 60, 63, 64, 84, 85, 86, 121, 256, 313, 323, 325, 337, 354, 447, 448, 449, 450, 451, 460, 483, 488, 490, 515, 517, 519, 559, 585, 586, 587, 589, 590, 616, 619, 682, 697, 698, 737, 740, 741, 742, 743, 745, 774, 775, 776, 781, 787, 788, 789, 793, 795, 803, 816, 819, 822, 823, 826, 830, 845, 852, 861, 881, 898, 900, 903, 906, 907, 908, 909, 911, 913, 922, 929, 931, 944, 945, 953, 975, 983, 989, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1008, 1010, 1011, 1013, 1020, 1022, 1024, 1042, 1050, 1051, 1053, 1075, 1105, 1109, 1111, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1154, 1155, 1159, 1160, 1161, 1166, 1177, 1185, 1187, 1199, 1200, 1225, 1226, 1230, 1231, 1235, 1243, 1248, 1253, 1261, 1270, 1272, 1275, 1276, 1284, 1288, 1289, 1292, 1303, 1308, 1309, 1311, 1312, 1313, 1314, 1316, 1318, 1319, 1320, 1321, 1331, 1333, 1334, 1336, 1337, 1344, 1345, 1362, 1367, 1430, 1456, 1457, 1458, 1461, 1462, 1468, 1469, 1472, 1496, 1498, 1526, 1529, 1531, 1532, 1534, 1543, 1566, 1574, 1578, 1579, 1590, 1615, 1624, 1626, 1632, 1643, 1671, 1684, 1702, 1703, 1706, 1709, 1710, 1711, 1712, 1716, 1718, 1719, 1723, 1724, 1737, 1757, 1758, 1759, 1771, 1779, 1782, 1786, 1797, 1800, 1804, 1807, 1808, 1810, 1814, 1826, 1835, 1839, 1841, 1843, 1866, 1870, 1872, 1875, 1876, 1901, 1904, 1908, 1918, 1923, 1927, 1934, 1942, 1953, 1955, 1960, 1961, 1964, 1965, 1967, 1973, 1974, 1975, 1976, 1981, 2001, 2011, 2014, 2015, 2016, 2020, 2022, 2023, 2024, 2026, 2027, 2029, 2031, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2073, 2074, 2075, 2076, 2077, 2080, 2081, 2083, 2084, 2085, 2086, 2087, 2092, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2109, 2110, 2111, 2113], "arg": [0, 1, 3, 4, 5, 14, 23, 24, 28, 30, 32, 33, 35, 37, 39, 45, 46, 48, 49, 50, 51, 52, 53, 55, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 582, 605, 682, 734, 749, 751, 753, 754, 755, 756, 762, 764, 765, 826, 864, 893, 895, 903, 904, 905, 908, 909, 938, 979, 1053, 1068, 1165, 1169, 1170, 1171, 1177, 1185, 1187, 1207, 1208, 1272, 1275, 1362, 1438, 1439, 1459, 1461, 1485, 1487, 1492, 1515, 1517, 1518, 1526, 1529, 1530, 1531, 1533, 1540, 1555, 1557, 1558, 1559, 1561, 1565, 1566, 1567, 1568, 1575, 1604, 1605, 1615, 1644, 1668, 1676, 1706, 1716, 1738, 1743, 1757, 1764, 1766, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1928, 1961, 1973, 1976, 2011, 2014, 2016, 2017, 2020, 2032, 2034, 2042, 2045, 2047, 2048, 2049, 2057, 2063, 2065, 2068, 2075, 2076, 2077, 2082, 2086, 2091, 2097, 2099, 2100, 2101, 2102, 2110, 2111, 2112], "deprec": [0, 7, 28, 30, 37, 47, 48, 52, 55, 59, 408, 515, 559, 605, 787, 788, 789, 795, 828, 917, 965, 966, 982, 1007, 1057, 1061, 1217, 1272, 1327, 1330, 1362, 1363, 1438, 1439, 1459, 1461, 1484, 1485, 1491, 1492, 1517, 1518, 1526, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1580, 1581, 1604, 1605, 1615, 1644, 1668, 1676, 1703, 1704, 1705, 1707, 1716, 1720, 1765, 1766, 1768, 1771, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1826, 1842, 1868, 1923, 1927, 1951, 2012, 2013, 2018, 2032, 2043, 2069, 2073, 2082, 2086, 2087, 2102], "pleas": [0, 1, 4, 5, 6, 7, 8, 9, 11, 12, 15, 24, 28, 30, 35, 39, 42, 43, 47, 48, 52, 55, 56, 57, 58, 59, 60, 61, 64, 66, 76, 77, 83, 88, 256, 257, 500, 515, 620, 691, 732, 733, 734, 735, 736, 737, 743, 744, 745, 747, 748, 758, 763, 764, 765, 766, 767, 864, 865, 895, 898, 901, 902, 908, 909, 911, 913, 914, 917, 931, 957, 977, 989, 1091, 1165, 1169, 1170, 1171, 1172, 1173, 1177, 1269, 1272, 1336, 1367, 1377, 1445, 1456, 1491, 1526, 1632, 1643, 1644, 1649, 1671, 1684, 1702, 1716, 1765, 1766, 1768, 1782, 1783, 1784, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1840, 1866, 1868, 1875, 1912, 1976, 2018, 2020, 2023, 2028, 2033, 2034, 2035, 2036, 2041, 2042, 2046, 2048, 2049, 2051, 2053, 2058, 2059, 2060, 2061, 2065, 2067, 2068, 2070, 2073, 2074, 2075, 2076, 2080, 2081, 2082, 2083, 2084, 2085, 2087, 2093, 2097, 2100, 2101, 2102, 2108, 2111, 2114], "instead": [0, 1, 3, 5, 8, 9, 14, 19, 20, 23, 24, 28, 30, 34, 35, 37, 44, 47, 48, 52, 53, 55, 58, 59, 60, 62, 64, 66, 75, 408, 460, 488, 501, 515, 750, 759, 770, 771, 795, 819, 828, 883, 893, 908, 909, 911, 912, 913, 914, 917, 922, 959, 965, 977, 1143, 1145, 1170, 1171, 1172, 1173, 1176, 1177, 1200, 1203, 1217, 1259, 1270, 1272, 1273, 1277, 1302, 1303, 1309, 1311, 1327, 1330, 1336, 1342, 1344, 1373, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1459, 1461, 1462, 1464, 1465, 1466, 1470, 1477, 1485, 1491, 1492, 1493, 1494, 1495, 1496, 1517, 1518, 1519, 1520, 1521, 1526, 1529, 1530, 1531, 1533, 1540, 1542, 1554, 1558, 1559, 1560, 1566, 1575, 1599, 1600, 1601, 1604, 1605, 1615, 1625, 1632, 1644, 1657, 1658, 1659, 1668, 1676, 1690, 1716, 1766, 1768, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1801, 1802, 1808, 1831, 1832, 1842, 1862, 1866, 1908, 1923, 1927, 1964, 1976, 2013, 2015, 2016, 2020, 2023, 2024, 2034, 2040, 2041, 2042, 2046, 2047, 2048, 2049, 2050, 2052, 2053, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2073, 2076, 2079, 2080, 2086, 2087, 2089, 2091, 2096, 2097, 2098, 2099, 2101, 2102, 2107, 2109, 2110, 2111, 2112], "new": [0, 1, 5, 8, 14, 18, 19, 23, 26, 28, 30, 35, 37, 38, 47, 48, 52, 55, 56, 59, 60, 61, 62, 63, 64, 88, 90, 223, 256, 313, 417, 450, 485, 488, 489, 498, 501, 546, 582, 585, 619, 682, 686, 762, 793, 816, 821, 857, 882, 884, 885, 886, 887, 888, 899, 901, 908, 909, 918, 927, 928, 942, 958, 959, 962, 964, 973, 977, 980, 992, 994, 995, 1011, 1013, 1092, 1096, 1118, 1122, 1123, 1149, 1150, 1152, 1166, 1175, 1177, 1185, 1187, 1195, 1209, 1211, 1243, 1247, 1261, 1262, 1265, 1268, 1272, 1279, 1284, 1286, 1346, 1347, 1348, 1349, 1366, 1421, 1424, 1440, 1441, 1442, 1477, 1488, 1489, 1490, 1526, 1527, 1532, 1536, 1566, 1577, 1579, 1643, 1708, 1713, 1714, 1716, 1725, 1726, 1727, 1728, 1731, 1736, 1738, 1742, 1743, 1744, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1761, 1765, 1768, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1810, 1834, 1844, 1845, 1849, 1850, 1851, 1857, 1858, 1862, 1867, 1868, 1877, 1879, 1890, 1892, 1894, 1916, 1917, 1920, 1938, 1940, 1941, 1957, 1959, 1963, 1974, 1975, 1976, 1982, 2011, 2012, 2013, 2015, 2016, 2023, 2024, 2026, 2032, 2033, 2034, 2035, 2036, 2042, 2045, 2046, 2049, 2053, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2073, 2077, 2080, 2081, 2082, 2084, 2085, 2086, 2087, 2091, 2096, 2098, 2099, 2100, 2102, 2110, 2111], "version": [0, 1, 2, 5, 8, 12, 14, 19, 22, 24, 28, 30, 33, 35, 48, 52, 55, 58, 59, 64, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 123, 125, 127, 129, 132, 133, 135, 143, 145, 148, 149, 151, 154, 160, 162, 164, 166, 168, 170, 179, 188, 196, 200, 203, 205, 215, 217, 233, 238, 240, 246, 249, 251, 253, 255, 259, 264, 271, 273, 275, 279, 281, 285, 287, 294, 296, 298, 306, 308, 310, 312, 314, 316, 318, 320, 358, 360, 362, 364, 366, 368, 370, 373, 375, 377, 378, 385, 387, 389, 391, 393, 397, 401, 403, 422, 425, 428, 430, 441, 443, 445, 453, 458, 468, 471, 487, 492, 494, 510, 513, 514, 515, 516, 518, 524, 529, 531, 534, 536, 538, 551, 553, 555, 558, 564, 566, 573, 577, 579, 595, 598, 600, 602, 604, 614, 624, 737, 738, 739, 751, 752, 753, 754, 755, 756, 777, 778, 779, 780, 782, 786, 787, 806, 807, 808, 841, 850, 858, 859, 861, 918, 929, 989, 1053, 1165, 1191, 1193, 1202, 1230, 1269, 1272, 1275, 1276, 1280, 1283, 1302, 1309, 1313, 1315, 1316, 1320, 1321, 1333, 1334, 1367, 1421, 1439, 1496, 1526, 1527, 1542, 1579, 1597, 1622, 1632, 1639, 1648, 1680, 1683, 1698, 1699, 1703, 1707, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1765, 1766, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1859, 1860, 1861, 1918, 1921, 1922, 1923, 1927, 1948, 1962, 1964, 1971, 1972, 2011, 2033, 2034, 2042, 2043, 2045, 2048, 2049, 2051, 2052, 2053, 2054, 2055, 2056, 2059, 2061, 2063, 2065, 2067, 2068, 2069, 2073, 2080, 2087, 2089, 2093, 2098, 2102, 2103, 2109, 2111, 2113], "1": [0, 1, 2, 3, 11, 12, 14, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 32, 34, 35, 36, 37, 39, 40, 44, 45, 47, 51, 52, 53, 55, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 138, 153, 154, 175, 193, 207, 210, 227, 229, 230, 231, 235, 244, 256, 260, 265, 288, 291, 313, 314, 315, 317, 319, 323, 354, 379, 403, 404, 447, 449, 450, 456, 473, 483, 485, 489, 490, 495, 498, 501, 515, 517, 519, 539, 540, 544, 546, 556, 557, 558, 560, 562, 563, 564, 565, 566, 583, 585, 586, 587, 589, 590, 609, 610, 617, 619, 682, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 714, 715, 716, 717, 718, 719, 720, 721, 725, 726, 727, 728, 729, 732, 733, 736, 738, 739, 740, 741, 742, 743, 744, 745, 746, 748, 753, 754, 755, 758, 759, 760, 762, 772, 774, 775, 776, 777, 780, 784, 785, 787, 798, 810, 811, 821, 822, 823, 824, 827, 829, 858, 864, 865, 868, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 893, 897, 898, 903, 904, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 918, 919, 927, 928, 931, 938, 940, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 956, 957, 958, 959, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 973, 974, 976, 977, 986, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1002, 1007, 1019, 1050, 1051, 1066, 1071, 1078, 1082, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1103, 1105, 1106, 1107, 1108, 1111, 1112, 1113, 1114, 1118, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1161, 1162, 1165, 1166, 1167, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1187, 1191, 1197, 1213, 1214, 1215, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1243, 1244, 1247, 1249, 1257, 1259, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1272, 1276, 1278, 1283, 1284, 1288, 1289, 1292, 1293, 1294, 1296, 1297, 1298, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1315, 1316, 1318, 1319, 1320, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1348, 1350, 1352, 1353, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1362, 1363, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1378, 1379, 1380, 1392, 1395, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1467, 1468, 1469, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1482, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1565, 1566, 1569, 1570, 1572, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1587, 1588, 1589, 1597, 1599, 1602, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1621, 1622, 1623, 1624, 1626, 1627, 1628, 1630, 1631, 1632, 1634, 1636, 1638, 1639, 1640, 1641, 1642, 1650, 1652, 1657, 1658, 1659, 1665, 1668, 1669, 1670, 1671, 1673, 1674, 1675, 1677, 1682, 1683, 1684, 1685, 1686, 1688, 1690, 1691, 1692, 1694, 1699, 1700, 1701, 1702, 1703, 1706, 1715, 1716, 1723, 1724, 1730, 1731, 1732, 1736, 1741, 1742, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1757, 1758, 1759, 1760, 1762, 1763, 1764, 1765, 1766, 1768, 1769, 1770, 1771, 1772, 1774, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1815, 1816, 1817, 1819, 1820, 1823, 1824, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1857, 1858, 1862, 1865, 1866, 1867, 1868, 1869, 1871, 1874, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1894, 1895, 1899, 1900, 1901, 1902, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 2012, 2014, 2015, 2016, 2017, 2020, 2023, 2024, 2025, 2026, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2044, 2045, 2048, 2050, 2051, 2052, 2053, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2075, 2076, 2077, 2079, 2080, 2081, 2083, 2084, 2085, 2086, 2087, 2089, 2091, 2093, 2096, 2097, 2098, 2100, 2102, 2104, 2105, 2106, 2109, 2110, 2111, 2112, 2115, 2116], "10": [0, 1, 3, 12, 22, 23, 24, 28, 29, 33, 35, 36, 47, 50, 52, 66, 71, 74, 75, 315, 323, 337, 473, 515, 562, 583, 585, 586, 587, 687, 688, 699, 736, 747, 748, 762, 763, 764, 765, 767, 943, 955, 965, 968, 969, 973, 989, 997, 1087, 1088, 1089, 1090, 1091, 1106, 1108, 1125, 1127, 1130, 1131, 1133, 1134, 1137, 1138, 1140, 1141, 1143, 1145, 1151, 1159, 1160, 1214, 1226, 1236, 1261, 1277, 1283, 1284, 1290, 1295, 1298, 1325, 1327, 1328, 1343, 1345, 1347, 1352, 1354, 1355, 1356, 1357, 1359, 1367, 1412, 1428, 1429, 1430, 1432, 1433, 1439, 1442, 1445, 1446, 1452, 1455, 1458, 1468, 1469, 1477, 1478, 1479, 1480, 1490, 1496, 1497, 1498, 1523, 1527, 1528, 1533, 1536, 1537, 1542, 1544, 1566, 1570, 1571, 1572, 1573, 1574, 1578, 1584, 1609, 1612, 1616, 1623, 1624, 1634, 1706, 1716, 1723, 1724, 1747, 1755, 1769, 1772, 1778, 1782, 1802, 1808, 1810, 1828, 1829, 1837, 1862, 1870, 1874, 1880, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1890, 1899, 1915, 1927, 1928, 1939, 1943, 1944, 1947, 1949, 1962, 1964, 1977, 2011, 2013, 2014, 2015, 2016, 2017, 2020, 2023, 2025, 2042, 2045, 2047, 2048, 2051, 2055, 2058, 2059, 2060, 2061, 2063, 2064, 2065, 2066, 2070, 2079, 2080, 2081, 2083, 2085, 2086, 2087, 2093, 2096, 2097, 2099, 2100, 2102, 2104, 2109, 2111, 2116], "autocast_mod": 0, "is_autocast_avail": 0, "device_typ": [0, 1, 28, 2014, 2020, 2041, 2074], "sourc": [0, 1, 2, 3, 4, 5, 9, 13, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 49, 50, 52, 53, 55, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 152, 198, 211, 235, 314, 315, 323, 342, 353, 398, 403, 404, 417, 418, 419, 455, 473, 489, 490, 515, 517, 519, 522, 526, 549, 558, 559, 561, 588, 605, 608, 611, 612, 626, 682, 697, 698, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 838, 839, 840, 841, 842, 843, 844, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 876, 889, 890, 891, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 927, 928, 929, 930, 931, 932, 933, 934, 935, 937, 938, 939, 941, 942, 954, 956, 957, 961, 963, 965, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1008, 1009, 1010, 1011, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1108, 1112, 1159, 1162, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1218, 1220, 1221, 1222, 1225, 1244, 1245, 1246, 1248, 1253, 1258, 1259, 1260, 1270, 1272, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1290, 1291, 1344, 1345, 1365, 1374, 1379, 1380, 1381, 1382, 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1390, 1391, 1392, 1393, 1394, 1396, 1399, 1400, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409, 1410, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1586, 1587, 1588, 1589, 1590, 1592, 1593, 1597, 1598, 1602, 1604, 1605, 1606, 1613, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1626, 1629, 1631, 1632, 1633, 1634, 1636, 1637, 1638, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1650, 1651, 1653, 1654, 1655, 1656, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1671, 1676, 1678, 1679, 1681, 1682, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1694, 1695, 1696, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1771, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1816, 1833, 1858, 1863, 1866, 1867, 1868, 1869, 1870, 1874, 1875, 1876, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1901, 1902, 1908, 1915, 1923, 1928, 1931, 1932, 1933, 1934, 1935, 1936, 1944, 1962, 1964, 1965, 1966, 1967, 1968, 1969, 1981, 1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2011, 2013, 2014, 2015, 2020, 2022, 2023, 2026, 2027, 2028, 2029, 2032, 2034, 2035, 2040, 2050, 2053, 2054, 2058, 2060, 2063, 2065, 2067, 2069, 2070, 2074, 2075, 2076, 2082, 2085, 2087, 2089, 2090, 2091, 2099, 2100, 2102, 2103, 2111, 2112, 2113], "return": [0, 1, 2, 3, 5, 11, 12, 13, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 45, 47, 50, 52, 53, 55, 56, 57, 59, 60, 61, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 90, 121, 155, 157, 172, 174, 177, 180, 181, 182, 192, 193, 197, 198, 208, 210, 211, 218, 220, 221, 223, 234, 235, 242, 244, 256, 262, 269, 291, 299, 313, 321, 325, 327, 328, 330, 331, 332, 333, 335, 339, 341, 343, 354, 379, 395, 417, 437, 447, 448, 449, 450, 451, 460, 474, 475, 476, 477, 478, 480, 485, 489, 490, 498, 499, 500, 515, 517, 525, 527, 539, 545, 546, 558, 559, 560, 561, 562, 582, 583, 584, 585, 591, 605, 606, 609, 611, 615, 616, 619, 686, 694, 695, 697, 698, 699, 700, 701, 737, 768, 769, 772, 773, 777, 778, 779, 780, 783, 786, 793, 794, 795, 796, 816, 817, 818, 819, 838, 842, 843, 844, 857, 858, 859, 860, 862, 863, 864, 865, 866, 867, 868, 876, 877, 878, 879, 880, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 897, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 924, 925, 927, 928, 931, 935, 936, 937, 939, 940, 941, 942, 944, 945, 946, 953, 954, 956, 959, 961, 963, 964, 965, 966, 968, 969, 970, 972, 974, 975, 976, 977, 978, 980, 982, 983, 984, 989, 990, 991, 993, 994, 995, 997, 1000, 1001, 1002, 1003, 1005, 1007, 1008, 1010, 1011, 1013, 1017, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1030, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1045, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1069, 1070, 1071, 1072, 1073, 1074, 1083, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1095, 1096, 1097, 1098, 1099, 1102, 1108, 1109, 1110, 1111, 1112, 1113, 1118, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1144, 1147, 1148, 1149, 1150, 1151, 1152, 1156, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1182, 1184, 1185, 1187, 1194, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1211, 1215, 1216, 1218, 1220, 1221, 1222, 1223, 1224, 1225, 1229, 1230, 1231, 1233, 1234, 1235, 1239, 1243, 1247, 1248, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1268, 1269, 1270, 1272, 1273, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1287, 1288, 1289, 1290, 1291, 1292, 1294, 1297, 1298, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1322, 1325, 1326, 1327, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1339, 1340, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1352, 1353, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1378, 1381, 1382, 1383, 1385, 1386, 1392, 1400, 1401, 1402, 1404, 1406, 1407, 1409, 1412, 1417, 1418, 1420, 1421, 1422, 1423, 1424, 1426, 1430, 1431, 1432, 1433, 1438, 1439, 1459, 1460, 1461, 1462, 1469, 1473, 1474, 1485, 1491, 1492, 1516, 1517, 1518, 1519, 1520, 1521, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1536, 1537, 1540, 1541, 1542, 1555, 1558, 1559, 1560, 1561, 1562, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1588, 1589, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1602, 1604, 1605, 1606, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1631, 1632, 1633, 1634, 1636, 1637, 1638, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1650, 1651, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1676, 1678, 1679, 1681, 1682, 1684, 1685, 1687, 1688, 1689, 1690, 1691, 1697, 1699, 1700, 1701, 1702, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1720, 1721, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1749, 1750, 1751, 1752, 1753, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1768, 1769, 1770, 1771, 1772, 1774, 1775, 1776, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1790, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1816, 1817, 1819, 1822, 1823, 1824, 1825, 1826, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1847, 1848, 1849, 1850, 1851, 1852, 1855, 1857, 1862, 1863, 1864, 1865, 1871, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1894, 1895, 1899, 1901, 1902, 1903, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1922, 1923, 1926, 1927, 1928, 1937, 1938, 1939, 1940, 1941, 1942, 1944, 1946, 1947, 1948, 1951, 1952, 1953, 1954, 1955, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1966, 1967, 1970, 1972, 1974, 1975, 1976, 1979, 1981, 1982, 1984, 1985, 1987, 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 2007, 2009, 2010, 2011, 2013, 2017, 2020, 2026, 2027, 2028, 2029, 2032, 2034, 2035, 2040, 2041, 2042, 2044, 2045, 2048, 2049, 2052, 2053, 2054, 2055, 2058, 2059, 2060, 2061, 2063, 2064, 2065, 2067, 2068, 2069, 2070, 2074, 2075, 2076, 2079, 2080, 2081, 2082, 2083, 2084, 2086, 2087, 2089, 2090, 2091, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2109, 2110, 2111, 2112, 2113, 2116], "bool": [0, 1, 2, 3, 5, 12, 14, 19, 23, 24, 27, 28, 29, 30, 32, 34, 35, 37, 41, 47, 50, 52, 53, 55, 62, 63, 64, 152, 198, 211, 247, 262, 321, 323, 330, 331, 332, 333, 335, 336, 341, 343, 403, 404, 417, 447, 448, 449, 450, 451, 460, 473, 498, 519, 583, 605, 682, 695, 696, 697, 698, 699, 701, 737, 758, 760, 781, 782, 787, 796, 816, 843, 868, 876, 877, 878, 879, 883, 896, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 931, 944, 947, 949, 950, 952, 953, 959, 960, 966, 967, 968, 974, 975, 976, 977, 982, 983, 989, 1003, 1010, 1011, 1013, 1017, 1047, 1053, 1065, 1109, 1110, 1111, 1114, 1121, 1126, 1144, 1160, 1161, 1162, 1163, 1164, 1165, 1167, 1168, 1170, 1171, 1172, 1176, 1179, 1180, 1185, 1187, 1201, 1202, 1204, 1205, 1211, 1213, 1230, 1231, 1234, 1235, 1253, 1257, 1260, 1261, 1263, 1269, 1272, 1276, 1279, 1280, 1288, 1289, 1292, 1294, 1302, 1303, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1325, 1327, 1329, 1330, 1333, 1334, 1335, 1336, 1342, 1343, 1344, 1345, 1354, 1355, 1356, 1357, 1359, 1360, 1362, 1364, 1370, 1372, 1373, 1375, 1378, 1385, 1388, 1389, 1406, 1412, 1417, 1418, 1419, 1420, 1421, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1473, 1474, 1478, 1479, 1480, 1482, 1483, 1484, 1485, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1512, 1513, 1517, 1518, 1519, 1520, 1521, 1526, 1529, 1530, 1531, 1532, 1533, 1535, 1540, 1541, 1544, 1545, 1546, 1547, 1554, 1558, 1559, 1566, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1579, 1587, 1588, 1589, 1597, 1604, 1605, 1615, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1629, 1632, 1634, 1636, 1643, 1644, 1668, 1676, 1684, 1703, 1709, 1715, 1716, 1717, 1721, 1722, 1725, 1726, 1728, 1730, 1733, 1735, 1736, 1737, 1758, 1759, 1760, 1761, 1763, 1766, 1771, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1816, 1822, 1824, 1826, 1827, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1862, 1867, 1871, 1876, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1899, 1909, 1910, 1911, 1912, 1913, 1914, 1921, 1922, 1923, 1926, 1927, 1936, 1942, 1946, 1951, 1960, 1961, 1964, 1965, 1967, 1970, 1971, 1972, 1981, 1982, 1997, 2009, 2010, 2011, 2013, 2014, 2015, 2016, 2020, 2027, 2029, 2032, 2033, 2034, 2035, 2053, 2060, 2063, 2064, 2065, 2066, 2068, 2069, 2074, 2075, 2080, 2082, 2083, 2085, 2086, 2087, 2089, 2091, 2098, 2102, 2106, 2110, 2112], "indic": [0, 1, 2, 3, 12, 19, 23, 24, 28, 29, 30, 34, 35, 47, 52, 53, 82, 85, 86, 193, 210, 315, 317, 319, 320, 321, 323, 473, 515, 517, 519, 546, 574, 575, 585, 616, 697, 698, 737, 747, 748, 801, 826, 877, 878, 879, 880, 881, 903, 905, 908, 922, 923, 959, 966, 967, 968, 980, 981, 982, 983, 989, 1003, 1008, 1010, 1011, 1013, 1047, 1053, 1087, 1088, 1111, 1167, 1168, 1170, 1171, 1172, 1176, 1177, 1187, 1213, 1226, 1247, 1270, 1272, 1277, 1286, 1289, 1290, 1294, 1303, 1314, 1316, 1327, 1330, 1344, 1352, 1360, 1362, 1364, 1370, 1373, 1375, 1378, 1385, 1412, 1418, 1422, 1430, 1431, 1432, 1433, 1461, 1468, 1469, 1473, 1474, 1520, 1521, 1522, 1523, 1524, 1529, 1531, 1532, 1594, 1595, 1596, 1615, 1623, 1624, 1627, 1628, 1644, 1660, 1661, 1662, 1670, 1684, 1699, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1750, 1751, 1770, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1806, 1808, 1827, 1862, 1899, 1904, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1938, 1939, 1943, 1946, 1952, 1953, 1954, 1955, 1960, 1961, 1962, 1964, 1965, 1967, 1976, 1979, 1981, 1982, 1997, 2013, 2014, 2016, 2020, 2033, 2034, 2048, 2057, 2060, 2065, 2066, 2075, 2080, 2084, 2085, 2087, 2103, 2106, 2111], "i": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 21, 23, 24, 27, 28, 29, 30, 32, 34, 35, 36, 37, 38, 39, 40, 41, 44, 45, 46, 47, 48, 50, 51, 52, 55, 56, 57, 58, 59, 60, 62, 63, 64, 65, 66, 67, 68, 71, 73, 74, 75, 76, 77, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 99, 152, 155, 156, 157, 172, 174, 175, 177, 180, 181, 182, 192, 193, 197, 198, 208, 210, 211, 220, 225, 235, 242, 256, 257, 260, 269, 288, 292, 299, 313, 315, 317, 321, 323, 325, 327, 330, 331, 332, 333, 334, 335, 337, 338, 340, 342, 343, 344, 345, 354, 395, 402, 404, 408, 417, 450, 460, 473, 474, 483, 485, 488, 489, 490, 495, 497, 498, 499, 500, 501, 502, 505, 506, 515, 517, 519, 522, 526, 527, 539, 545, 546, 547, 559, 562, 582, 583, 585, 586, 587, 589, 590, 591, 605, 606, 609, 616, 619, 620, 622, 626, 682, 683, 685, 686, 688, 689, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 725, 726, 727, 728, 729, 736, 737, 738, 739, 740, 741, 742, 743, 745, 746, 751, 752, 753, 754, 755, 756, 757, 758, 760, 761, 762, 763, 765, 766, 767, 770, 771, 774, 775, 776, 777, 778, 779, 780, 781, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 799, 801, 803, 804, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 821, 822, 823, 826, 828, 829, 835, 840, 841, 842, 860, 861, 862, 864, 865, 868, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 927, 928, 929, 930, 931, 935, 936, 938, 940, 942, 943, 944, 945, 946, 948, 951, 953, 955, 956, 959, 960, 961, 963, 964, 965, 966, 967, 968, 969, 970, 972, 973, 974, 975, 977, 978, 980, 982, 983, 985, 989, 990, 991, 992, 993, 994, 995, 996, 997, 1003, 1007, 1008, 1010, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1019, 1020, 1022, 1024, 1027, 1028, 1029, 1031, 1035, 1036, 1039, 1042, 1043, 1044, 1046, 1047, 1048, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1095, 1096, 1097, 1098, 1099, 1100, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1118, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1147, 1148, 1149, 1150, 1151, 1152, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1175, 1176, 1177, 1180, 1181, 1183, 1185, 1186, 1187, 1189, 1191, 1193, 1194, 1196, 1197, 1199, 1200, 1201, 1203, 1204, 1205, 1207, 1208, 1211, 1213, 1215, 1216, 1217, 1219, 1221, 1225, 1226, 1229, 1230, 1231, 1232, 1234, 1235, 1236, 1237, 1238, 1239, 1243, 1247, 1248, 1249, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1272, 1273, 1275, 1276, 1277, 1278, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1288, 1289, 1290, 1292, 1293, 1294, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1352, 1353, 1355, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1386, 1393, 1395, 1399, 1401, 1402, 1403, 1406, 1408, 1409, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1548, 1549, 1550, 1551, 1552, 1553, 1555, 1556, 1558, 1559, 1560, 1562, 1563, 1566, 1567, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, 1587, 1588, 1589, 1590, 1597, 1600, 1601, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1629, 1630, 1631, 1632, 1634, 1641, 1643, 1644, 1649, 1650, 1651, 1653, 1654, 1655, 1657, 1658, 1659, 1668, 1669, 1671, 1673, 1674, 1675, 1676, 1677, 1684, 1687, 1690, 1691, 1699, 1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1723, 1724, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1757, 1758, 1759, 1760, 1761, 1764, 1765, 1766, 1768, 1769, 1770, 1771, 1772, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1815, 1816, 1819, 1820, 1822, 1823, 1824, 1825, 1826, 1827, 1833, 1835, 1836, 1837, 1839, 1840, 1842, 1843, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1853, 1854, 1855, 1857, 1858, 1862, 1864, 1866, 1867, 1868, 1869, 1870, 1871, 1872, 1873, 1874, 1876, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1892, 1894, 1899, 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1918, 1919, 1921, 1922, 1923, 1926, 1927, 1928, 1929, 1930, 1934, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1959, 1960, 1961, 1962, 1964, 1967, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1981, 1982, 1983, 1985, 1986, 1988, 1990, 1991, 1993, 1995, 1997, 1999, 2000, 2001, 2002, 2003, 2004, 2006, 2007, 2008, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2022, 2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2040, 2041, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2074, 2075, 2076, 2077, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2090, 2091, 2092, 2093, 2095, 2096, 2097, 2098, 2099, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2118], "avail": [0, 1, 2, 5, 8, 9, 14, 15, 19, 20, 23, 28, 30, 33, 37, 47, 48, 53, 84, 85, 1003, 1030, 1032, 1033, 1044, 1047, 1054, 1055, 1075, 1076, 1078, 1108, 1203, 1279, 1336, 1337, 1362, 1382, 1404, 1406, 1570, 1579, 1585, 1643, 1684, 1703, 1816, 1870, 1923, 1928, 1964, 1967, 1987, 1989, 1997, 1999, 2000, 2001, 2002, 2011, 2012, 2013, 2015, 2016, 2020, 2023, 2032, 2042, 2045, 2048, 2053, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2067, 2068, 2069, 2070, 2071, 2075, 2091, 2092, 2100, 2104, 2111, 2112], "paramet": [0, 1, 2, 3, 5, 11, 12, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 41, 45, 47, 50, 52, 53, 55, 57, 59, 62, 63, 64, 66, 90, 152, 157, 172, 174, 175, 177, 180, 181, 182, 197, 198, 208, 211, 235, 242, 256, 257, 262, 269, 299, 315, 317, 319, 321, 323, 327, 333, 395, 402, 404, 417, 447, 448, 449, 450, 451, 460, 473, 495, 498, 499, 500, 501, 502, 515, 517, 519, 522, 527, 539, 546, 547, 548, 562, 568, 585, 586, 587, 589, 590, 605, 606, 609, 619, 620, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 696, 697, 698, 699, 700, 701, 737, 740, 741, 742, 743, 744, 745, 746, 747, 748, 752, 757, 758, 760, 761, 762, 766, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 779, 781, 782, 783, 784, 785, 787, 788, 789, 790, 791, 793, 795, 797, 799, 801, 802, 803, 816, 821, 822, 823, 824, 825, 826, 827, 828, 829, 840, 841, 842, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 896, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 932, 934, 935, 938, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 972, 973, 974, 975, 977, 978, 981, 984, 986, 989, 990, 991, 992, 993, 994, 995, 996, 997, 1001, 1006, 1007, 1008, 1010, 1011, 1013, 1014, 1015, 1016, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1027, 1028, 1029, 1031, 1035, 1036, 1037, 1039, 1042, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1067, 1068, 1070, 1071, 1072, 1073, 1074, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1103, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1113, 1118, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1158, 1159, 1160, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1175, 1176, 1177, 1185, 1187, 1194, 1196, 1205, 1211, 1213, 1214, 1215, 1216, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1243, 1247, 1249, 1251, 1252, 1254, 1257, 1258, 1259, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1272, 1273, 1274, 1275, 1276, 1279, 1280, 1283, 1284, 1285, 1286, 1288, 1289, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1380, 1385, 1386, 1387, 1388, 1389, 1392, 1393, 1395, 1399, 1401, 1402, 1403, 1408, 1409, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1558, 1559, 1560, 1562, 1563, 1564, 1566, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1588, 1589, 1590, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1629, 1631, 1632, 1634, 1636, 1643, 1644, 1651, 1657, 1658, 1659, 1668, 1669, 1670, 1671, 1673, 1674, 1675, 1676, 1677, 1684, 1690, 1691, 1699, 1703, 1704, 1705, 1706, 1709, 1714, 1715, 1716, 1718, 1719, 1720, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1770, 1771, 1772, 1774, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1815, 1816, 1817, 1819, 1820, 1822, 1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1857, 1858, 1862, 1864, 1865, 1866, 1867, 1868, 1869, 1870, 1871, 1874, 1875, 1876, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1894, 1895, 1897, 1899, 1900, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1926, 1927, 1931, 1932, 1936, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1966, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1981, 1982, 1983, 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2016, 2018, 2020, 2024, 2026, 2027, 2032, 2034, 2035, 2036, 2040, 2041, 2042, 2045, 2047, 2048, 2051, 2053, 2054, 2055, 2057, 2059, 2060, 2063, 2065, 2068, 2069, 2070, 2074, 2075, 2076, 2080, 2081, 2082, 2085, 2087, 2090, 2091, 2093, 2102, 2110, 2112, 2113], "str": [0, 1, 2, 3, 5, 14, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 41, 45, 47, 50, 52, 53, 55, 64, 323, 515, 519, 605, 781, 787, 794, 795, 796, 817, 818, 819, 857, 911, 913, 925, 932, 934, 938, 963, 975, 984, 1000, 1008, 1020, 1021, 1022, 1024, 1033, 1034, 1035, 1036, 1037, 1038, 1042, 1050, 1051, 1052, 1064, 1065, 1067, 1068, 1070, 1082, 1103, 1108, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1160, 1165, 1166, 1170, 1175, 1177, 1178, 1184, 1185, 1187, 1222, 1270, 1271, 1272, 1273, 1275, 1276, 1279, 1282, 1318, 1331, 1336, 1337, 1344, 1345, 1374, 1388, 1389, 1419, 1438, 1439, 1445, 1453, 1454, 1455, 1459, 1461, 1469, 1475, 1479, 1485, 1486, 1491, 1492, 1502, 1503, 1504, 1517, 1518, 1526, 1527, 1529, 1530, 1531, 1533, 1536, 1540, 1541, 1544, 1555, 1558, 1559, 1570, 1572, 1574, 1575, 1576, 1577, 1579, 1604, 1605, 1615, 1616, 1624, 1629, 1632, 1643, 1644, 1668, 1671, 1676, 1703, 1715, 1716, 1730, 1731, 1732, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1765, 1766, 1768, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1827, 1858, 1862, 1869, 1870, 1904, 1923, 1966, 1967, 1976, 1986, 1990, 1991, 1992, 2003, 2011, 2013, 2014, 2015, 2016, 2017, 2020, 2026, 2027, 2029, 2032, 2034, 2040, 2042, 2060, 2063, 2065, 2066, 2067, 2068, 2069, 2074, 2075, 2082, 2085, 2087, 2090, 2091, 2099, 2106, 2110, 2112, 2113], "devic": [0, 1, 2, 5, 8, 14, 16, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 34, 48, 50, 53, 55, 59, 62, 63, 64, 66, 71, 75, 76, 77, 90, 198, 208, 211, 291, 315, 323, 337, 447, 448, 449, 450, 451, 517, 519, 582, 688, 691, 725, 726, 727, 728, 729, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 751, 752, 753, 754, 755, 756, 757, 868, 880, 882, 883, 911, 913, 940, 943, 944, 946, 953, 955, 977, 980, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1010, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1019, 1020, 1021, 1022, 1023, 1024, 1026, 1027, 1028, 1031, 1035, 1036, 1037, 1039, 1040, 1048, 1050, 1051, 1052, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1077, 1078, 1079, 1080, 1084, 1085, 1086, 1109, 1110, 1111, 1121, 1126, 1144, 1160, 1163, 1164, 1185, 1218, 1221, 1230, 1231, 1272, 1276, 1280, 1283, 1286, 1292, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1313, 1314, 1315, 1316, 1319, 1320, 1321, 1327, 1330, 1331, 1333, 1334, 1336, 1337, 1343, 1344, 1359, 1362, 1365, 1367, 1373, 1377, 1382, 1385, 1386, 1392, 1393, 1394, 1399, 1400, 1401, 1402, 1404, 1406, 1410, 1430, 1440, 1441, 1442, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1468, 1469, 1477, 1478, 1480, 1488, 1489, 1490, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1526, 1532, 1534, 1541, 1542, 1543, 1544, 1566, 1570, 1572, 1574, 1586, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1624, 1643, 1649, 1684, 1699, 1703, 1704, 1705, 1706, 1715, 1716, 1718, 1719, 1721, 1722, 1723, 1724, 1757, 1764, 1770, 1772, 1775, 1776, 1786, 1826, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1863, 1866, 1870, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1904, 1905, 1909, 1910, 1911, 1912, 1913, 1914, 1927, 1942, 1944, 1953, 1955, 1964, 1965, 1967, 1981, 1982, 1983, 1984, 1985, 1987, 1988, 1990, 1991, 1992, 1993, 1994, 2003, 2004, 2005, 2008, 2009, 2010, 2012, 2013, 2014, 2015, 2016, 2018, 2020, 2026, 2033, 2035, 2041, 2047, 2048, 2049, 2050, 2051, 2053, 2055, 2056, 2057, 2059, 2060, 2063, 2065, 2066, 2067, 2068, 2069, 2070, 2074, 2075, 2080, 2082, 2086, 2087, 2093, 2096, 2099, 2100, 2101, 2102, 2104, 2106, 2108, 2109, 2111, 2113], "possibl": [0, 2, 9, 14, 15, 18, 19, 28, 30, 33, 35, 52, 59, 60, 64, 499, 500, 582, 619, 861, 882, 883, 969, 975, 1017, 1109, 1110, 1111, 1156, 1169, 1186, 1197, 1199, 1200, 1272, 1286, 1313, 1326, 1329, 1330, 1333, 1338, 1344, 1439, 1445, 1526, 1532, 1623, 1624, 1643, 1677, 1703, 1736, 1737, 1846, 1849, 1864, 1915, 1923, 1942, 2013, 2015, 2016, 2020, 2028, 2032, 2033, 2040, 2042, 2044, 2045, 2046, 2047, 2048, 2051, 2052, 2057, 2058, 2059, 2061, 2068, 2070, 2075, 2077, 2083, 2087, 2092, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2107, 2111], "valu": [0, 1, 2, 3, 5, 7, 8, 9, 11, 12, 14, 18, 19, 20, 23, 24, 25, 27, 29, 30, 32, 33, 35, 37, 39, 41, 44, 45, 47, 48, 52, 53, 55, 57, 60, 62, 63, 64, 66, 68, 71, 75, 77, 90, 103, 104, 105, 106, 121, 152, 156, 237, 238, 239, 240, 256, 261, 262, 274, 275, 301, 313, 315, 317, 318, 319, 320, 321, 323, 325, 328, 354, 401, 402, 417, 421, 422, 424, 425, 473, 483, 485, 501, 515, 517, 519, 539, 546, 562, 585, 586, 587, 589, 590, 601, 602, 682, 683, 686, 689, 690, 693, 697, 698, 699, 737, 758, 766, 772, 773, 781, 786, 787, 789, 795, 797, 798, 801, 804, 821, 822, 823, 824, 827, 828, 829, 857, 862, 868, 877, 878, 879, 883, 888, 892, 894, 896, 897, 901, 902, 903, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 931, 940, 942, 944, 945, 946, 951, 953, 959, 963, 967, 968, 970, 981, 992, 993, 996, 1011, 1041, 1050, 1051, 1064, 1078, 1087, 1088, 1091, 1099, 1100, 1109, 1110, 1111, 1113, 1122, 1123, 1125, 1127, 1129, 1130, 1131, 1136, 1138, 1139, 1140, 1141, 1142, 1151, 1156, 1163, 1165, 1166, 1167, 1168, 1172, 1173, 1175, 1176, 1179, 1180, 1181, 1185, 1186, 1187, 1189, 1191, 1195, 1197, 1199, 1200, 1201, 1203, 1204, 1205, 1213, 1215, 1220, 1222, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1243, 1257, 1262, 1263, 1264, 1265, 1268, 1269, 1270, 1272, 1273, 1275, 1279, 1280, 1288, 1291, 1293, 1294, 1297, 1301, 1302, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1315, 1316, 1317, 1318, 1322, 1325, 1327, 1329, 1330, 1332, 1336, 1337, 1342, 1343, 1344, 1345, 1348, 1353, 1359, 1361, 1362, 1365, 1370, 1372, 1373, 1375, 1378, 1392, 1395, 1412, 1416, 1417, 1418, 1419, 1422, 1423, 1426, 1430, 1435, 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1444, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1467, 1468, 1469, 1470, 1472, 1479, 1480, 1481, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1511, 1512, 1513, 1516, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1540, 1541, 1555, 1558, 1560, 1561, 1562, 1563, 1564, 1566, 1569, 1570, 1572, 1574, 1575, 1576, 1578, 1579, 1587, 1588, 1589, 1604, 1605, 1607, 1608, 1609, 1614, 1615, 1624, 1625, 1629, 1632, 1634, 1643, 1644, 1645, 1657, 1658, 1659, 1668, 1669, 1670, 1671, 1673, 1676, 1684, 1697, 1698, 1703, 1705, 1706, 1708, 1710, 1713, 1714, 1715, 1716, 1722, 1730, 1731, 1733, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1750, 1751, 1757, 1760, 1761, 1764, 1766, 1770, 1771, 1775, 1776, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 1816, 1820, 1823, 1827, 1829, 1830, 1842, 1844, 1846, 1847, 1848, 1853, 1854, 1855, 1862, 1865, 1874, 1877, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1895, 1899, 1904, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1927, 1928, 1939, 1943, 1946, 1949, 1952, 1953, 1954, 1955, 1957, 1960, 1961, 1963, 1979, 2009, 2010, 2011, 2012, 2013, 2014, 2017, 2020, 2023, 2024, 2027, 2029, 2032, 2035, 2040, 2041, 2042, 2044, 2045, 2048, 2049, 2050, 2052, 2053, 2055, 2057, 2059, 2060, 2063, 2065, 2066, 2067, 2068, 2069, 2070, 2073, 2074, 2075, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2091, 2096, 2098, 2099, 2100, 2102, 2105, 2106, 2110, 2111], "xpu": [0, 1, 1221, 1272, 1526, 1981, 1982, 1983, 1986, 1988, 2012, 2020, 2069, 2083], "so": [0, 1, 2, 3, 7, 9, 14, 17, 19, 23, 24, 27, 28, 29, 30, 32, 33, 35, 36, 39, 40, 47, 48, 51, 52, 53, 55, 58, 59, 60, 63, 64, 66, 76, 77, 337, 460, 488, 498, 682, 862, 898, 903, 906, 908, 911, 912, 917, 929, 972, 975, 977, 980, 1009, 1032, 1096, 1098, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1133, 1134, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1159, 1162, 1165, 1169, 1170, 1172, 1173, 1183, 1185, 1187, 1198, 1200, 1211, 1259, 1269, 1272, 1275, 1276, 1282, 1284, 1286, 1328, 1330, 1340, 1345, 1350, 1362, 1384, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1469, 1472, 1479, 1526, 1543, 1560, 1562, 1578, 1588, 1589, 1597, 1607, 1608, 1609, 1632, 1690, 1691, 1706, 1716, 1770, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1891, 1906, 1918, 1923, 1927, 1948, 1960, 1965, 1970, 1989, 2013, 2015, 2016, 2022, 2024, 2026, 2028, 2029, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2045, 2046, 2047, 2048, 2049, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2065, 2067, 2068, 2070, 2074, 2075, 2080, 2081, 2083, 2085, 2086, 2089, 2093, 2096, 2098, 2099, 2101, 2102, 2103, 2105, 2107, 2109, 2110, 2111, 2113, 2118], "The": [0, 1, 2, 3, 5, 7, 8, 11, 12, 14, 15, 18, 19, 20, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 59, 60, 61, 62, 63, 66, 74, 75, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 152, 193, 198, 210, 211, 223, 235, 257, 292, 313, 315, 317, 321, 323, 400, 402, 403, 404, 450, 460, 485, 488, 489, 490, 495, 497, 500, 501, 515, 517, 519, 539, 546, 585, 605, 619, 620, 682, 686, 689, 690, 691, 692, 696, 697, 698, 699, 737, 750, 759, 762, 768, 769, 770, 771, 774, 775, 776, 781, 782, 784, 785, 787, 788, 789, 794, 795, 796, 797, 801, 804, 817, 819, 821, 822, 823, 824, 827, 829, 839, 840, 857, 862, 867, 880, 881, 883, 887, 888, 892, 893, 895, 896, 898, 901, 902, 908, 909, 911, 912, 914, 915, 916, 917, 922, 923, 927, 928, 931, 934, 936, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 960, 961, 974, 977, 978, 981, 986, 989, 993, 997, 1010, 1016, 1023, 1039, 1050, 1051, 1053, 1054, 1055, 1064, 1078, 1079, 1080, 1085, 1091, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1108, 1109, 1113, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1156, 1158, 1159, 1161, 1162, 1163, 1166, 1169, 1172, 1175, 1176, 1177, 1181, 1183, 1185, 1191, 1192, 1196, 1211, 1215, 1216, 1225, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1237, 1239, 1243, 1247, 1248, 1249, 1269, 1272, 1275, 1277, 1280, 1283, 1284, 1286, 1288, 1289, 1292, 1293, 1297, 1298, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1318, 1319, 1320, 1322, 1325, 1327, 1328, 1329, 1330, 1331, 1332, 1335, 1336, 1337, 1339, 1342, 1344, 1345, 1360, 1361, 1362, 1363, 1365, 1366, 1367, 1373, 1381, 1383, 1386, 1387, 1388, 1389, 1392, 1393, 1412, 1418, 1421, 1423, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1468, 1469, 1470, 1472, 1473, 1474, 1477, 1478, 1479, 1480, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1514, 1516, 1517, 1518, 1519, 1520, 1521, 1526, 1529, 1531, 1533, 1540, 1541, 1542, 1543, 1544, 1545, 1555, 1556, 1558, 1566, 1569, 1570, 1575, 1576, 1578, 1579, 1585, 1587, 1588, 1589, 1597, 1600, 1601, 1615, 1616, 1623, 1624, 1625, 1627, 1628, 1631, 1632, 1634, 1643, 1644, 1650, 1657, 1658, 1659, 1671, 1684, 1687, 1703, 1706, 1708, 1709, 1710, 1713, 1714, 1715, 1716, 1718, 1719, 1721, 1722, 1723, 1724, 1725, 1727, 1729, 1730, 1731, 1732, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1748, 1750, 1751, 1754, 1760, 1764, 1765, 1768, 1770, 1771, 1772, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1816, 1817, 1820, 1823, 1826, 1831, 1832, 1833, 1835, 1837, 1839, 1844, 1846, 1848, 1850, 1851, 1853, 1855, 1858, 1865, 1866, 1867, 1868, 1870, 1874, 1875, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1895, 1901, 1902, 1904, 1905, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1921, 1922, 1923, 1927, 1928, 1938, 1944, 1945, 1946, 1948, 1949, 1952, 1953, 1954, 1955, 1962, 1963, 1964, 1967, 1970, 1971, 1972, 1974, 1976, 1979, 1981, 1993, 1999, 2000, 2004, 2005, 2011, 2013, 2014, 2015, 2017, 2018, 2020, 2022, 2023, 2024, 2026, 2027, 2028, 2029, 2031, 2032, 2033, 2034, 2035, 2036, 2039, 2040, 2041, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2068, 2069, 2070, 2073, 2074, 2075, 2076, 2077, 2078, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2092, 2093, 2095, 2096, 2097, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2108, 2109, 2110, 2111, 2113, 2115, 2116], "same": [0, 1, 3, 7, 11, 12, 14, 15, 17, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 44, 45, 47, 48, 50, 51, 52, 53, 55, 58, 60, 63, 64, 66, 74, 75, 141, 155, 197, 223, 257, 313, 315, 317, 321, 323, 341, 447, 448, 449, 450, 451, 473, 485, 488, 499, 500, 502, 515, 517, 519, 522, 546, 582, 619, 620, 688, 691, 695, 697, 698, 699, 701, 722, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 747, 748, 758, 760, 763, 764, 765, 766, 767, 781, 787, 790, 791, 795, 796, 798, 802, 804, 816, 821, 824, 826, 827, 862, 864, 881, 882, 883, 895, 901, 903, 905, 908, 909, 911, 912, 913, 914, 915, 916, 922, 923, 943, 945, 946, 955, 957, 959, 962, 967, 968, 969, 975, 978, 986, 989, 1007, 1008, 1010, 1021, 1023, 1042, 1053, 1091, 1096, 1098, 1099, 1105, 1108, 1110, 1111, 1114, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1156, 1161, 1162, 1164, 1165, 1166, 1169, 1170, 1171, 1172, 1175, 1176, 1177, 1195, 1213, 1216, 1226, 1234, 1235, 1243, 1247, 1263, 1269, 1272, 1278, 1283, 1284, 1288, 1289, 1293, 1294, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1340, 1360, 1364, 1366, 1367, 1370, 1372, 1373, 1374, 1375, 1378, 1417, 1420, 1421, 1422, 1428, 1429, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1472, 1475, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1494, 1495, 1498, 1508, 1509, 1510, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1529, 1530, 1531, 1532, 1534, 1535, 1540, 1541, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1578, 1579, 1582, 1583, 1584, 1597, 1603, 1604, 1605, 1607, 1608, 1609, 1615, 1624, 1632, 1634, 1643, 1644, 1684, 1703, 1711, 1712, 1715, 1716, 1723, 1724, 1730, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1757, 1761, 1766, 1771, 1772, 1776, 1779, 1810, 1814, 1819, 1820, 1823, 1824, 1836, 1838, 1840, 1844, 1846, 1848, 1849, 1853, 1855, 1862, 1865, 1866, 1870, 1877, 1882, 1900, 1909, 1910, 1911, 1913, 1914, 1920, 1921, 1922, 1926, 1927, 1938, 1949, 1960, 1961, 1962, 1963, 1964, 1971, 1972, 1973, 1976, 1981, 2010, 2011, 2013, 2015, 2016, 2020, 2024, 2029, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2057, 2058, 2059, 2060, 2063, 2065, 2067, 2068, 2069, 2070, 2073, 2075, 2076, 2077, 2080, 2082, 2083, 2084, 2085, 2086, 2087, 2090, 2091, 2095, 2096, 2097, 2098, 2099, 2101, 2102, 2107, 2109, 2110, 2111, 2112], "attribut": [0, 1, 11, 12, 23, 24, 28, 29, 36, 53, 64, 66, 68, 73, 82, 87, 152, 292, 497, 498, 729, 740, 741, 742, 743, 744, 745, 747, 748, 758, 766, 793, 803, 804, 817, 818, 819, 840, 841, 842, 861, 862, 892, 896, 901, 903, 906, 908, 909, 1159, 1184, 1185, 1269, 1271, 1272, 1273, 1275, 1276, 1278, 1283, 1284, 1345, 1462, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1526, 1570, 1716, 1717, 1736, 1737, 1758, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1923, 1964, 1965, 1968, 2012, 2014, 2017, 2020, 2028, 2035, 2041, 2042, 2045, 2048, 2049, 2055, 2060, 2065, 2068, 2070, 2075, 2086, 2087, 2102, 2110, 2112, 2116], "thu": [0, 1, 11, 23, 28, 33, 35, 48, 53, 55, 64, 787, 824, 827, 857, 1108, 1129, 1336, 1367, 1465, 1579, 1703, 1716, 1783, 1784, 1794, 1796, 1912, 2013, 2016, 2023, 2026, 2042, 2044, 2048, 2050, 2052, 2054, 2055, 2065, 2070, 2075, 2080, 2084, 2086, 2087], "you": [0, 1, 3, 4, 5, 7, 8, 9, 11, 14, 15, 17, 19, 22, 23, 24, 28, 32, 33, 34, 35, 37, 39, 44, 46, 47, 48, 50, 51, 52, 55, 56, 57, 58, 59, 60, 63, 64, 65, 66, 68, 76, 77, 84, 85, 152, 256, 337, 450, 488, 490, 501, 682, 691, 736, 737, 750, 759, 783, 892, 893, 894, 895, 896, 903, 905, 906, 908, 909, 911, 912, 913, 917, 918, 929, 957, 975, 977, 1008, 1031, 1042, 1044, 1053, 1054, 1075, 1108, 1165, 1166, 1169, 1170, 1171, 1172, 1177, 1181, 1185, 1186, 1187, 1196, 1200, 1272, 1276, 1277, 1285, 1288, 1289, 1290, 1318, 1336, 1344, 1367, 1374, 1377, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1496, 1522, 1523, 1524, 1526, 1531, 1532, 1533, 1542, 1561, 1570, 1572, 1574, 1579, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1643, 1649, 1684, 1702, 1703, 1706, 1716, 1731, 1758, 1766, 1768, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1808, 1826, 1849, 1858, 1866, 1909, 1910, 1911, 1913, 1914, 1918, 1965, 1967, 1976, 1988, 1999, 2001, 2011, 2013, 2015, 2016, 2018, 2020, 2023, 2024, 2026, 2028, 2029, 2032, 2033, 2035, 2040, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2074, 2075, 2076, 2077, 2080, 2083, 2084, 2085, 2086, 2087, 2089, 2092, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2114, 2118], "obtain": [0, 3, 23, 28, 35, 40, 63, 64, 90, 498, 1216, 1392, 1445, 1533, 1616, 1716, 1787, 1793, 1816, 1833, 1928, 2016, 2032, 2033, 2044, 2059, 2065, 2067, 2069, 2070, 2109], "tensor": [0, 2, 5, 7, 8, 12, 14, 17, 18, 23, 24, 27, 28, 29, 30, 32, 33, 35, 36, 53, 55, 58, 59, 60, 61, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 86, 90, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 722, 734, 736, 737, 740, 741, 742, 743, 744, 745, 746, 747, 748, 750, 752, 757, 758, 759, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 786, 787, 788, 789, 790, 791, 792, 796, 800, 801, 802, 803, 804, 821, 822, 823, 824, 826, 827, 829, 868, 869, 870, 871, 872, 873, 874, 875, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 901, 902, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 922, 923, 925, 927, 928, 929, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 977, 980, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 1007, 1020, 1021, 1022, 1023, 1024, 1031, 1039, 1040, 1046, 1050, 1053, 1056, 1060, 1072, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1184, 1185, 1187, 1192, 1195, 1196, 1209, 1213, 1214, 1215, 1216, 1217, 1218, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1249, 1250, 1251, 1252, 1254, 1257, 1259, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1272, 1273, 1275, 1276, 1278, 1279, 1280, 1283, 1284, 1286, 1288, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1366, 1367, 1368, 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1386, 1395, 1411, 1412, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1430, 1434, 1435, 1438, 1439, 1443, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1471, 1472, 1477, 1478, 1479, 1485, 1491, 1492, 1496, 1497, 1513, 1516, 1517, 1518, 1519, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1536, 1537, 1538, 1539, 1541, 1542, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1559, 1560, 1561, 1562, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1587, 1588, 1589, 1591, 1592, 1593, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1715, 1716, 1717, 1718, 1720, 1721, 1722, 1723, 1724, 1726, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1754, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1770, 1771, 1772, 1773, 1774, 1775, 1776, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1813, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, 1823, 1824, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1857, 1858, 1859, 1860, 1861, 1862, 1864, 1865, 1866, 1867, 1868, 1871, 1874, 1877, 1878, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1897, 1898, 1899, 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1937, 1938, 1939, 1940, 1941, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1956, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1967, 1969, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1988, 1993, 1994, 2009, 2010, 2012, 2013, 2015, 2016, 2020, 2023, 2036, 2040, 2041, 2043, 2044, 2045, 2046, 2047, 2049, 2050, 2051, 2053, 2055, 2056, 2057, 2058, 2059, 2061, 2063, 2066, 2067, 2068, 2069, 2071, 2074, 2075, 2076, 2079, 2081, 2082, 2085, 2087, 2090, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2102, 2104, 2106, 2109, 2110, 2111, 2112, 2113], "class": [0, 1, 2, 3, 12, 15, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 39, 41, 44, 45, 47, 49, 50, 52, 53, 55, 60, 63, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 90, 559, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 838, 839, 840, 841, 842, 843, 844, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 897, 898, 903, 904, 905, 906, 907, 908, 909, 918, 919, 920, 930, 931, 938, 939, 940, 941, 942, 998, 999, 1008, 1009, 1010, 1011, 1013, 1014, 1029, 1031, 1042, 1083, 1112, 1175, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1270, 1271, 1272, 1275, 1277, 1278, 1279, 1283, 1284, 1287, 1288, 1289, 1290, 1385, 1397, 1398, 1399, 1403, 1409, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, 1587, 1605, 1615, 1623, 1668, 1670, 1684, 1706, 1715, 1716, 1717, 1718, 1719, 1733, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1757, 1764, 1769, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1833, 1902, 1965, 1981, 1982, 1983, 1986, 1988, 2012, 2014, 2017, 2020, 2023, 2028, 2029, 2032, 2034, 2036, 2037, 2041, 2042, 2045, 2048, 2049, 2050, 2051, 2055, 2057, 2060, 2063, 2069, 2070, 2072, 2073, 2075, 2076, 2079, 2080, 2082, 2083, 2085, 2087, 2089, 2090, 2091, 2093, 2097, 2098, 2099, 2100, 2102, 2107, 2109, 2110, 2111, 2112, 2113, 2116], "dtype": [0, 1, 5, 11, 12, 18, 21, 23, 24, 28, 30, 33, 52, 53, 55, 62, 64, 66, 74, 75, 87, 155, 156, 193, 210, 214, 215, 216, 217, 244, 313, 315, 317, 319, 321, 323, 403, 404, 411, 431, 434, 447, 448, 449, 450, 451, 455, 460, 472, 483, 485, 515, 517, 567, 582, 583, 605, 619, 689, 691, 694, 695, 699, 701, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 747, 748, 751, 752, 753, 754, 755, 756, 757, 758, 760, 762, 763, 766, 767, 774, 775, 776, 796, 797, 801, 821, 822, 823, 824, 825, 826, 827, 828, 829, 844, 861, 862, 864, 865, 868, 882, 883, 903, 904, 906, 908, 909, 911, 913, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 960, 967, 968, 977, 986, 989, 990, 991, 997, 1007, 1023, 1089, 1090, 1109, 1110, 1111, 1121, 1125, 1126, 1127, 1133, 1134, 1144, 1151, 1156, 1158, 1160, 1161, 1162, 1163, 1164, 1185, 1219, 1230, 1231, 1243, 1272, 1276, 1286, 1292, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1354, 1355, 1356, 1357, 1359, 1362, 1363, 1367, 1371, 1372, 1373, 1376, 1377, 1412, 1416, 1417, 1418, 1420, 1430, 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1447, 1448, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1468, 1469, 1472, 1477, 1478, 1480, 1488, 1489, 1490, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1526, 1532, 1533, 1534, 1541, 1542, 1543, 1544, 1548, 1549, 1550, 1551, 1552, 1566, 1570, 1572, 1574, 1578, 1579, 1580, 1581, 1586, 1587, 1588, 1589, 1599, 1615, 1616, 1643, 1649, 1651, 1684, 1690, 1691, 1706, 1715, 1718, 1719, 1723, 1724, 1733, 1736, 1737, 1757, 1771, 1775, 1776, 1777, 1779, 1814, 1820, 1824, 1825, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1844, 1852, 1855, 1867, 1868, 1870, 1871, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1898, 1901, 1903, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1926, 1927, 1939, 1942, 1953, 1955, 1960, 1964, 1965, 1967, 1974, 1975, 1979, 2009, 2010, 2012, 2013, 2014, 2015, 2016, 2018, 2020, 2023, 2034, 2035, 2045, 2048, 2055, 2058, 2063, 2065, 2066, 2070, 2071, 2072, 2080, 2081, 2082, 2085, 2086, 2087, 2100, 2101, 2102, 2106, 2111, 2116], "none": [0, 1, 2, 3, 5, 12, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 38, 40, 44, 45, 47, 50, 52, 53, 55, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 90, 114, 116, 117, 118, 120, 136, 137, 140, 152, 155, 156, 158, 175, 187, 188, 189, 190, 206, 207, 209, 211, 214, 215, 216, 217, 231, 235, 237, 238, 239, 240, 260, 288, 292, 303, 337, 353, 356, 379, 409, 411, 412, 413, 416, 423, 429, 430, 431, 432, 433, 434, 447, 448, 449, 450, 451, 455, 456, 472, 481, 483, 489, 490, 496, 505, 515, 522, 539, 540, 554, 555, 557, 558, 567, 582, 583, 585, 590, 592, 605, 610, 611, 612, 617, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 657, 659, 661, 662, 664, 666, 668, 670, 672, 674, 676, 678, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 697, 698, 699, 700, 701, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 747, 748, 751, 752, 753, 754, 755, 756, 757, 768, 769, 770, 771, 781, 782, 783, 784, 785, 787, 788, 789, 790, 791, 795, 796, 797, 799, 801, 816, 819, 821, 822, 823, 824, 827, 828, 840, 841, 842, 860, 861, 862, 863, 864, 865, 868, 869, 870, 871, 872, 873, 874, 875, 877, 878, 881, 882, 883, 884, 885, 886, 887, 888, 892, 893, 894, 895, 896, 898, 900, 901, 902, 903, 906, 907, 908, 909, 912, 914, 915, 916, 917, 922, 923, 927, 928, 938, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 955, 959, 962, 964, 965, 966, 967, 968, 970, 971, 973, 975, 977, 981, 986, 987, 988, 991, 992, 994, 995, 996, 997, 1001, 1006, 1007, 1008, 1010, 1011, 1013, 1014, 1015, 1019, 1020, 1022, 1023, 1024, 1027, 1028, 1029, 1035, 1036, 1042, 1052, 1053, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1078, 1081, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1095, 1100, 1101, 1103, 1104, 1105, 1107, 1109, 1110, 1111, 1112, 1113, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1160, 1163, 1164, 1165, 1166, 1167, 1171, 1174, 1175, 1177, 1180, 1185, 1187, 1189, 1190, 1192, 1196, 1197, 1203, 1213, 1214, 1215, 1216, 1217, 1221, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1237, 1238, 1239, 1240, 1241, 1242, 1244, 1245, 1246, 1247, 1249, 1250, 1266, 1267, 1269, 1271, 1272, 1276, 1280, 1282, 1283, 1284, 1286, 1288, 1289, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1354, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1366, 1367, 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1395, 1399, 1401, 1402, 1403, 1408, 1409, 1411, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1422, 1423, 1424, 1425, 1426, 1428, 1429, 1430, 1432, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1468, 1469, 1471, 1473, 1474, 1475, 1477, 1478, 1479, 1480, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1536, 1537, 1540, 1541, 1542, 1543, 1544, 1558, 1559, 1560, 1561, 1562, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1579, 1580, 1581, 1592, 1593, 1597, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1623, 1624, 1627, 1628, 1629, 1630, 1632, 1633, 1640, 1642, 1643, 1644, 1645, 1646, 1649, 1651, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1664, 1665, 1666, 1667, 1668, 1669, 1671, 1673, 1676, 1681, 1684, 1688, 1689, 1690, 1691, 1699, 1700, 1701, 1703, 1704, 1705, 1708, 1709, 1710, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1722, 1730, 1731, 1732, 1735, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1757, 1759, 1760, 1762, 1765, 1766, 1768, 1769, 1770, 1771, 1772, 1773, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1815, 1816, 1819, 1820, 1821, 1823, 1824, 1826, 1827, 1830, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1845, 1846, 1847, 1848, 1853, 1855, 1856, 1857, 1862, 1874, 1877, 1878, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1898, 1899, 1903, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1939, 1940, 1941, 1942, 1944, 1946, 1949, 1951, 1952, 1953, 1954, 1955, 1956, 1957, 1960, 1961, 1965, 1967, 1970, 1971, 1972, 1973, 1976, 1978, 1979, 1980, 1981, 1982, 1983, 1985, 1986, 1990, 1991, 1992, 2006, 2007, 2008, 2009, 2010, 2011, 2013, 2014, 2015, 2016, 2017, 2020, 2026, 2027, 2029, 2032, 2033, 2034, 2035, 2040, 2042, 2045, 2048, 2049, 2055, 2057, 2060, 2063, 2065, 2068, 2069, 2070, 2072, 2074, 2075, 2081, 2082, 2083, 2085, 2086, 2087, 2090, 2091, 2097, 2099, 2100, 2102, 2106, 2111, 2112, 2113], "enabl": [0, 1, 2, 3, 5, 8, 12, 18, 20, 21, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 48, 52, 55, 62, 81, 87, 89, 490, 505, 506, 682, 762, 799, 814, 815, 903, 906, 908, 918, 919, 920, 929, 975, 989, 1008, 1053, 1108, 1112, 1255, 1256, 1274, 1281, 1388, 1389, 1477, 1496, 1542, 1543, 1573, 1590, 1684, 1716, 1733, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1769, 1796, 1871, 1874, 1902, 1964, 2016, 2022, 2023, 2026, 2030, 2031, 2035, 2041, 2042, 2043, 2044, 2045, 2048, 2051, 2054, 2056, 2058, 2059, 2063, 2065, 2069, 2074, 2075, 2076, 2080, 2089, 2091, 2092, 2093, 2095, 2097, 2099, 2100, 2101, 2102, 2105, 2109, 2111, 2112, 2113, 2115], "true": [0, 1, 2, 3, 5, 12, 14, 19, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 37, 50, 52, 55, 60, 61, 63, 64, 66, 74, 75, 152, 198, 211, 262, 321, 323, 330, 331, 332, 333, 334, 335, 337, 338, 339, 340, 341, 342, 343, 344, 345, 353, 398, 402, 404, 417, 450, 460, 462, 473, 479, 489, 490, 497, 498, 501, 506, 518, 519, 558, 569, 582, 583, 592, 596, 605, 611, 619, 682, 689, 694, 695, 696, 697, 698, 699, 701, 720, 721, 722, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 740, 741, 742, 743, 744, 745, 748, 751, 756, 758, 762, 763, 766, 767, 770, 771, 781, 787, 788, 799, 816, 828, 843, 862, 876, 878, 879, 883, 892, 895, 896, 898, 903, 904, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 925, 927, 928, 938, 944, 947, 950, 952, 953, 959, 960, 966, 974, 975, 978, 981, 983, 989, 990, 1010, 1048, 1103, 1109, 1110, 1111, 1112, 1113, 1114, 1130, 1131, 1137, 1138, 1160, 1162, 1165, 1166, 1167, 1168, 1170, 1171, 1172, 1176, 1177, 1185, 1187, 1197, 1199, 1200, 1204, 1205, 1208, 1211, 1213, 1215, 1229, 1230, 1231, 1234, 1235, 1246, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1272, 1276, 1277, 1279, 1282, 1288, 1289, 1290, 1292, 1294, 1297, 1302, 1303, 1314, 1315, 1316, 1319, 1320, 1321, 1322, 1325, 1327, 1329, 1330, 1331, 1333, 1334, 1335, 1336, 1338, 1339, 1342, 1344, 1345, 1354, 1355, 1356, 1357, 1360, 1361, 1362, 1364, 1366, 1370, 1372, 1373, 1374, 1375, 1378, 1385, 1406, 1412, 1417, 1420, 1423, 1426, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1473, 1474, 1477, 1478, 1479, 1480, 1485, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1535, 1540, 1541, 1542, 1543, 1544, 1558, 1559, 1566, 1570, 1572, 1573, 1574, 1575, 1576, 1579, 1580, 1589, 1597, 1599, 1600, 1601, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1632, 1634, 1636, 1642, 1643, 1644, 1657, 1658, 1659, 1668, 1676, 1684, 1703, 1704, 1706, 1709, 1715, 1716, 1717, 1719, 1721, 1725, 1726, 1728, 1730, 1731, 1732, 1735, 1736, 1737, 1749, 1757, 1758, 1759, 1760, 1761, 1763, 1764, 1765, 1766, 1768, 1769, 1770, 1771, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1814, 1816, 1824, 1826, 1827, 1831, 1832, 1833, 1850, 1851, 1858, 1861, 1862, 1870, 1871, 1874, 1876, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1899, 1901, 1902, 1904, 1912, 1921, 1922, 1923, 1926, 1927, 1942, 1946, 1951, 1960, 1961, 1964, 1965, 1967, 1970, 1971, 1972, 1976, 1979, 2011, 2014, 2015, 2016, 2020, 2023, 2024, 2025, 2027, 2028, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2058, 2059, 2060, 2063, 2064, 2065, 2068, 2069, 2070, 2071, 2072, 2074, 2075, 2076, 2079, 2080, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2098, 2099, 2100, 2102, 2103, 2104, 2106, 2107, 2109, 2110, 2111, 2112], "cache_en": [0, 1053], "instanc": [0, 2, 3, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 44, 47, 48, 50, 52, 55, 60, 64, 141, 619, 682, 736, 750, 759, 795, 798, 826, 828, 844, 861, 1008, 1108, 1159, 1270, 1272, 1284, 1288, 1345, 1468, 1469, 1472, 1488, 1489, 1490, 1498, 1526, 1536, 1571, 1573, 1574, 1578, 1587, 1588, 1589, 1642, 1716, 1731, 1743, 1757, 1765, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1912, 1943, 2013, 2015, 2017, 2022, 2026, 2028, 2032, 2034, 2041, 2042, 2045, 2047, 2048, 2050, 2051, 2055, 2059, 2063, 2068, 2070, 2075, 2076, 2077, 2080, 2082, 2087, 2096, 2110, 2112], "serv": [0, 7, 8, 15, 28, 89, 2023, 2063, 2067, 2075, 2076, 2097, 2102, 2106], "context": [0, 2, 5, 23, 28, 30, 32, 34, 37, 38, 47, 48, 52, 55, 64, 66, 152, 794, 892, 893, 894, 896, 898, 908, 909, 917, 918, 919, 920, 922, 938, 981, 999, 1005, 1014, 1029, 1031, 1042, 1048, 1053, 1060, 1068, 1081, 1083, 1112, 1167, 1171, 1176, 1187, 1192, 1272, 1388, 1399, 1403, 1408, 1409, 1526, 1585, 1590, 1684, 1706, 1716, 1717, 1734, 1736, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1902, 1983, 1986, 1988, 2006, 2007, 2012, 2016, 2017, 2024, 2028, 2032, 2041, 2042, 2045, 2048, 2053, 2054, 2057, 2063, 2065, 2068, 2069, 2074, 2075, 2077, 2080, 2083, 2089, 2098, 2099, 2101, 2102, 2109, 2113], "manag": [0, 1, 2, 5, 7, 20, 28, 30, 32, 33, 34, 35, 37, 38, 45, 47, 48, 50, 52, 55, 64, 66, 90, 488, 898, 918, 919, 920, 938, 981, 999, 1005, 1008, 1011, 1014, 1015, 1016, 1018, 1029, 1031, 1032, 1033, 1042, 1043, 1053, 1056, 1058, 1059, 1060, 1062, 1063, 1064, 1065, 1068, 1072, 1073, 1074, 1081, 1083, 1112, 1167, 1171, 1176, 1187, 1388, 1399, 1403, 1408, 1409, 1543, 1585, 1590, 1684, 1716, 1733, 1734, 1736, 1769, 1902, 1983, 1986, 1988, 2006, 2007, 2012, 2016, 2017, 2024, 2028, 2042, 2048, 2050, 2051, 2054, 2063, 2065, 2069, 2074, 2075, 2076, 2080, 2083, 2089, 2098, 2101, 2102, 2114], "decor": [0, 1, 35, 40, 44, 48, 64, 77, 903, 906, 908, 918, 938, 977, 981, 1068, 1112, 1277, 1278, 1284, 1290, 1769, 1901, 1902, 2013, 2015, 2016, 2020, 2041, 2042, 2048, 2075, 2097, 2099, 2100, 2102, 2103, 2112], "allow": [0, 1, 2, 3, 5, 7, 8, 9, 11, 12, 14, 15, 19, 23, 24, 28, 29, 30, 33, 35, 37, 39, 47, 48, 52, 55, 56, 58, 59, 60, 61, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 152, 515, 682, 737, 795, 799, 801, 826, 896, 900, 901, 917, 960, 974, 978, 989, 1050, 1053, 1078, 1108, 1170, 1187, 1192, 1272, 1277, 1290, 1328, 1350, 1392, 1421, 1435, 1436, 1437, 1461, 1462, 1479, 1519, 1520, 1521, 1526, 1529, 1532, 1555, 1570, 1716, 1722, 1770, 1779, 1795, 1810, 1904, 1964, 2011, 2015, 2016, 2023, 2024, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2060, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2075, 2080, 2083, 2084, 2085, 2087, 2098, 2099, 2100, 2101, 2102, 2107, 2110, 2111, 2114], "region": [0, 3, 5, 35, 44, 770, 771, 975, 1435, 1436, 1437, 1473, 1474, 1484, 1486, 1519, 1520, 1521, 1600, 1601, 1627, 1628, 1658, 1659, 2013, 2032, 2041, 2045, 2088, 2101, 2102], "your": [0, 1, 2, 4, 7, 8, 9, 11, 14, 15, 17, 19, 23, 27, 28, 32, 34, 35, 38, 39, 40, 44, 46, 47, 48, 50, 51, 52, 55, 59, 60, 63, 64, 488, 896, 903, 906, 908, 911, 912, 917, 918, 975, 977, 978, 1181, 1186, 1187, 1272, 1276, 1277, 1282, 1285, 1288, 1289, 1290, 1526, 1533, 1574, 1716, 1733, 1736, 1769, 1797, 1871, 1960, 1965, 1967, 2012, 2013, 2015, 2016, 2020, 2022, 2024, 2032, 2033, 2034, 2036, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2053, 2054, 2055, 2056, 2058, 2059, 2061, 2065, 2067, 2071, 2074, 2076, 2080, 2085, 2086, 2089, 2092, 2093, 2095, 2096, 2097, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2108, 2109, 2111, 2112, 2113, 2118], "script": [0, 2, 4, 18, 23, 28, 31, 37, 40, 46, 48, 50, 52, 1273, 1275, 1276, 1277, 1278, 1279, 1282, 1283, 1285, 1287, 1288, 1290, 2011, 2012, 2015, 2016, 2026, 2044, 2054, 2056, 2060, 2062, 2063, 2068, 2070, 2075, 2092, 2093, 2095, 2097, 2099, 2104, 2105, 2108], "run": [0, 1, 2, 3, 4, 5, 7, 9, 10, 12, 14, 15, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 39, 40, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 57, 58, 60, 61, 63, 64, 66, 152, 488, 490, 682, 794, 800, 819, 821, 822, 823, 824, 827, 860, 864, 865, 866, 896, 917, 918, 922, 923, 938, 975, 977, 1052, 1053, 1166, 1171, 1177, 1185, 1196, 1212, 1272, 1273, 1275, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1289, 1309, 1318, 1344, 1345, 1373, 1440, 1441, 1442, 1462, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1541, 1555, 1566, 1684, 1706, 1709, 1715, 1716, 1723, 1724, 1725, 1726, 1727, 1728, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1870, 1873, 1906, 1953, 1955, 1964, 1976, 2012, 2013, 2016, 2020, 2026, 2028, 2029, 2030, 2032, 2040, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2070, 2073, 2074, 2075, 2076, 2077, 2080, 2085, 2089, 2090, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2109, 2110, 2111, 2113], "In": [0, 2, 3, 4, 5, 7, 9, 15, 18, 19, 23, 28, 30, 32, 33, 34, 35, 40, 47, 48, 50, 52, 53, 55, 57, 58, 59, 60, 63, 64, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 123, 125, 127, 129, 132, 133, 135, 143, 145, 148, 149, 151, 154, 156, 160, 162, 164, 166, 168, 170, 179, 188, 196, 200, 203, 205, 215, 217, 223, 233, 238, 240, 246, 249, 251, 253, 255, 259, 260, 264, 271, 273, 275, 279, 281, 285, 287, 294, 296, 298, 306, 308, 310, 312, 358, 360, 362, 364, 366, 368, 370, 373, 375, 377, 378, 385, 387, 389, 391, 393, 397, 422, 425, 428, 430, 441, 443, 445, 453, 458, 468, 471, 487, 488, 492, 494, 510, 513, 524, 529, 531, 534, 536, 538, 551, 553, 555, 564, 566, 573, 577, 579, 595, 598, 600, 602, 604, 614, 624, 682, 762, 802, 826, 857, 903, 906, 908, 909, 942, 951, 959, 990, 991, 1007, 1053, 1064, 1077, 1078, 1082, 1083, 1108, 1129, 1131, 1139, 1140, 1141, 1156, 1167, 1171, 1174, 1176, 1181, 1183, 1186, 1197, 1269, 1270, 1272, 1282, 1285, 1286, 1288, 1304, 1313, 1318, 1319, 1320, 1328, 1331, 1336, 1341, 1345, 1350, 1353, 1362, 1367, 1373, 1374, 1409, 1417, 1435, 1436, 1437, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1464, 1465, 1466, 1470, 1472, 1477, 1486, 1496, 1511, 1519, 1520, 1521, 1526, 1532, 1570, 1572, 1574, 1578, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1622, 1632, 1639, 1641, 1648, 1677, 1680, 1683, 1684, 1698, 1716, 1723, 1724, 1730, 1736, 1737, 1769, 1797, 1808, 1810, 1846, 1862, 1864, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1923, 1927, 1928, 1951, 1964, 1965, 1967, 1973, 1974, 2011, 2012, 2013, 2015, 2016, 2020, 2022, 2023, 2024, 2032, 2033, 2034, 2035, 2040, 2041, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2057, 2058, 2059, 2060, 2065, 2067, 2068, 2069, 2070, 2071, 2072, 2075, 2076, 2077, 2080, 2085, 2087, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2108, 2109, 2110, 2111, 2113], "an": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 18, 19, 20, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 44, 45, 46, 47, 48, 50, 51, 53, 55, 56, 57, 58, 59, 60, 62, 63, 64, 66, 81, 82, 83, 85, 87, 88, 90, 152, 156, 192, 244, 256, 315, 317, 323, 325, 330, 337, 417, 488, 501, 515, 517, 519, 539, 547, 562, 609, 616, 619, 682, 689, 690, 737, 743, 744, 745, 758, 762, 767, 781, 787, 789, 791, 794, 798, 817, 819, 825, 828, 857, 864, 865, 867, 881, 882, 883, 892, 894, 895, 896, 901, 903, 904, 906, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 930, 931, 932, 934, 935, 942, 946, 962, 972, 975, 977, 990, 991, 993, 1008, 1010, 1011, 1013, 1020, 1021, 1022, 1023, 1024, 1042, 1043, 1050, 1051, 1053, 1065, 1067, 1068, 1078, 1082, 1083, 1093, 1106, 1108, 1109, 1110, 1111, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1148, 1151, 1159, 1162, 1165, 1166, 1169, 1170, 1171, 1173, 1175, 1177, 1180, 1181, 1183, 1185, 1186, 1187, 1191, 1193, 1196, 1197, 1203, 1213, 1216, 1226, 1234, 1235, 1236, 1269, 1270, 1272, 1275, 1276, 1278, 1280, 1283, 1284, 1286, 1288, 1289, 1290, 1291, 1302, 1303, 1308, 1309, 1314, 1316, 1318, 1321, 1322, 1326, 1329, 1331, 1334, 1335, 1342, 1344, 1345, 1362, 1364, 1367, 1373, 1385, 1392, 1412, 1421, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1441, 1442, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1473, 1474, 1477, 1485, 1489, 1490, 1493, 1494, 1495, 1496, 1498, 1508, 1509, 1510, 1511, 1513, 1514, 1516, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1528, 1532, 1533, 1534, 1536, 1537, 1538, 1539, 1541, 1542, 1544, 1546, 1555, 1558, 1560, 1561, 1562, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1578, 1580, 1581, 1585, 1586, 1589, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1607, 1608, 1609, 1610, 1611, 1612, 1617, 1623, 1624, 1626, 1627, 1628, 1632, 1643, 1650, 1651, 1653, 1654, 1655, 1657, 1658, 1659, 1684, 1688, 1703, 1705, 1706, 1709, 1715, 1716, 1720, 1721, 1722, 1723, 1724, 1729, 1730, 1734, 1736, 1743, 1760, 1767, 1769, 1771, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1826, 1831, 1832, 1833, 1852, 1854, 1858, 1866, 1867, 1869, 1877, 1883, 1904, 1907, 1908, 1912, 1915, 1920, 1923, 1927, 1928, 1942, 1943, 1959, 1960, 1961, 1962, 1964, 1967, 1974, 1975, 1976, 1977, 1982, 2010, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2022, 2023, 2024, 2026, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2040, 2041, 2042, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2069, 2070, 2071, 2072, 2073, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2090, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2108, 2109, 2110, 2111, 2112, 2113, 2114, 2116], "chosen": [0, 17, 19, 60, 1096, 1294, 1463, 1684, 1874, 1899, 1946, 2035, 2041, 2045, 2048, 2057, 2065, 2070, 2111], "improv": [0, 1, 3, 9, 14, 21, 24, 28, 30, 52, 55, 64, 762, 806, 807, 808, 911, 913, 917, 993, 1226, 1463, 1477, 1496, 1542, 1573, 1684, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1810, 2012, 2013, 2041, 2047, 2048, 2049, 2058, 2059, 2063, 2070, 2071, 2075, 2105, 2107, 2109, 2111, 2113, 2114], "perform": [0, 1, 2, 3, 4, 5, 11, 14, 21, 22, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 47, 48, 52, 55, 59, 63, 64, 83, 121, 208, 211, 460, 488, 582, 605, 619, 688, 689, 690, 691, 692, 693, 762, 783, 806, 807, 808, 850, 861, 864, 903, 906, 908, 911, 913, 914, 917, 918, 922, 943, 955, 975, 990, 991, 1015, 1064, 1089, 1090, 1103, 1128, 1151, 1153, 1165, 1166, 1169, 1181, 1201, 1237, 1272, 1282, 1283, 1288, 1316, 1318, 1321, 1325, 1328, 1329, 1333, 1334, 1342, 1343, 1345, 1350, 1359, 1372, 1377, 1388, 1389, 1414, 1417, 1420, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1465, 1468, 1469, 1477, 1496, 1526, 1532, 1534, 1542, 1555, 1570, 1573, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1651, 1669, 1684, 1690, 1691, 1706, 1716, 1718, 1719, 1731, 1736, 1757, 1764, 1766, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1810, 1816, 1824, 1852, 1866, 1870, 1897, 1903, 1904, 1905, 1906, 1926, 1928, 1964, 2012, 2013, 2016, 2020, 2024, 2029, 2030, 2032, 2033, 2034, 2036, 2040, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2053, 2056, 2057, 2058, 2059, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2092, 2093, 2095, 2097, 2098, 2099, 2100, 2102, 2103, 2104, 2105, 2107, 2112, 2113, 2114], "while": [0, 2, 3, 5, 7, 8, 11, 14, 23, 24, 28, 29, 30, 33, 35, 50, 52, 53, 55, 60, 63, 64, 87, 697, 698, 737, 857, 887, 895, 908, 909, 1011, 1083, 1166, 1180, 1235, 1272, 1275, 1288, 1289, 1345, 1409, 1418, 1430, 1446, 1486, 1488, 1489, 1490, 1496, 1526, 1532, 1545, 1558, 1566, 1570, 1574, 1651, 1709, 1716, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1771, 1862, 1870, 1969, 2017, 2020, 2022, 2023, 2034, 2035, 2041, 2042, 2045, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2059, 2060, 2065, 2067, 2070, 2073, 2075, 2077, 2080, 2081, 2082, 2084, 2085, 2086, 2099, 2101, 2102, 2103, 2105, 2111, 2114], "maintain": [0, 7, 8, 23, 24, 28, 35, 52, 55, 59, 64, 931, 941, 1185, 1189, 1434, 1465, 1470, 1625, 1716, 1757, 1771, 2012, 2035, 2041, 2042, 2045, 2048, 2055, 2063, 2098, 2101], "accuraci": [0, 19, 24, 975, 1187, 1336, 1684, 1870, 2012, 2029, 2041, 2055, 2085, 2090, 2102], "see": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 14, 15, 17, 19, 20, 22, 23, 25, 28, 29, 30, 33, 35, 39, 47, 48, 51, 52, 55, 60, 61, 62, 63, 64, 91, 95, 97, 99, 101, 103, 105, 107, 109, 111, 114, 115, 116, 117, 118, 119, 120, 122, 124, 126, 128, 130, 131, 134, 136, 137, 138, 139, 140, 142, 144, 146, 147, 150, 152, 153, 155, 156, 157, 158, 159, 161, 163, 165, 167, 169, 171, 172, 173, 174, 177, 178, 180, 181, 182, 183, 184, 185, 186, 187, 191, 194, 195, 199, 201, 202, 204, 206, 207, 209, 212, 213, 214, 216, 219, 220, 222, 226, 227, 228, 229, 230, 231, 232, 236, 237, 239, 241, 242, 243, 245, 247, 248, 250, 252, 254, 257, 258, 263, 265, 266, 267, 268, 269, 270, 272, 274, 276, 277, 278, 280, 282, 283, 284, 286, 289, 290, 293, 295, 297, 299, 300, 301, 302, 303, 304, 305, 307, 309, 311, 315, 323, 324, 325, 326, 327, 329, 330, 336, 346, 347, 348, 349, 350, 351, 352, 353, 354, 356, 357, 359, 361, 363, 365, 367, 369, 371, 372, 374, 376, 380, 381, 382, 383, 384, 386, 388, 390, 392, 394, 395, 396, 398, 399, 405, 406, 407, 409, 410, 411, 412, 413, 414, 415, 416, 418, 419, 420, 421, 423, 424, 426, 427, 429, 431, 432, 433, 434, 435, 436, 440, 442, 444, 452, 454, 455, 457, 459, 461, 462, 463, 464, 466, 467, 469, 470, 472, 479, 481, 482, 484, 486, 488, 489, 490, 491, 493, 495, 496, 497, 499, 500, 501, 503, 504, 507, 508, 509, 512, 517, 519, 520, 521, 523, 525, 526, 527, 528, 530, 532, 533, 535, 537, 540, 541, 542, 544, 545, 549, 550, 552, 554, 556, 557, 558, 563, 565, 567, 569, 570, 571, 572, 574, 575, 576, 578, 580, 581, 592, 593, 594, 596, 597, 599, 601, 603, 607, 608, 611, 612, 613, 616, 617, 618, 620, 621, 622, 623, 682, 695, 697, 698, 701, 732, 733, 734, 735, 736, 737, 740, 741, 742, 743, 744, 745, 747, 748, 750, 758, 759, 762, 763, 764, 765, 766, 767, 768, 769, 770, 773, 774, 775, 776, 781, 782, 783, 784, 785, 786, 787, 795, 842, 862, 863, 864, 865, 868, 877, 878, 879, 882, 893, 895, 896, 897, 898, 901, 902, 903, 905, 906, 908, 909, 913, 917, 918, 919, 922, 927, 928, 930, 935, 944, 946, 953, 955, 958, 972, 975, 977, 979, 980, 995, 1008, 1010, 1011, 1013, 1015, 1016, 1018, 1032, 1033, 1042, 1043, 1053, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1072, 1073, 1074, 1103, 1106, 1108, 1109, 1111, 1112, 1121, 1126, 1144, 1147, 1160, 1163, 1168, 1169, 1170, 1172, 1180, 1185, 1187, 1191, 1199, 1201, 1216, 1225, 1226, 1230, 1231, 1235, 1236, 1244, 1245, 1246, 1272, 1276, 1284, 1288, 1289, 1291, 1292, 1294, 1302, 1304, 1309, 1312, 1313, 1315, 1318, 1320, 1327, 1329, 1330, 1333, 1338, 1342, 1343, 1345, 1351, 1353, 1359, 1360, 1362, 1370, 1372, 1373, 1374, 1375, 1377, 1378, 1388, 1389, 1395, 1417, 1419, 1420, 1430, 1438, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1468, 1469, 1471, 1477, 1479, 1485, 1486, 1491, 1492, 1496, 1517, 1518, 1522, 1523, 1524, 1525, 1526, 1529, 1530, 1531, 1532, 1533, 1538, 1539, 1540, 1542, 1554, 1556, 1558, 1559, 1571, 1572, 1573, 1574, 1575, 1576, 1579, 1585, 1586, 1591, 1592, 1593, 1594, 1595, 1596, 1598, 1599, 1600, 1601, 1602, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1626, 1629, 1630, 1631, 1632, 1633, 1635, 1636, 1637, 1638, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1670, 1671, 1672, 1674, 1675, 1676, 1677, 1678, 1679, 1681, 1682, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1700, 1701, 1702, 1703, 1704, 1705, 1716, 1717, 1730, 1731, 1732, 1736, 1742, 1751, 1760, 1765, 1768, 1769, 1770, 1771, 1775, 1814, 1824, 1825, 1835, 1837, 1839, 1841, 1842, 1846, 1849, 1852, 1858, 1867, 1870, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1894, 1902, 1903, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1921, 1922, 1923, 1926, 1937, 1939, 1942, 1948, 1949, 1953, 1955, 1960, 1964, 1967, 1971, 1972, 1977, 1979, 2009, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2020, 2021, 2022, 2023, 2025, 2027, 2030, 2032, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2045, 2046, 2047, 2048, 2049, 2050, 2052, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2063, 2064, 2065, 2069, 2070, 2071, 2074, 2075, 2076, 2080, 2081, 2082, 2083, 2084, 2086, 2087, 2089, 2092, 2094, 2097, 2098, 2099, 2100, 2103, 2104, 2105, 2109, 2110, 2111, 2112, 2113, 2115, 2116], "detail": [0, 1, 2, 3, 7, 8, 11, 13, 14, 15, 17, 18, 23, 28, 29, 30, 33, 35, 37, 47, 48, 52, 53, 55, 61, 64, 81, 82, 83, 152, 325, 497, 526, 616, 682, 737, 740, 741, 742, 743, 744, 745, 762, 768, 769, 770, 773, 774, 775, 776, 781, 782, 784, 785, 786, 787, 795, 862, 863, 864, 865, 876, 893, 895, 896, 897, 898, 901, 902, 903, 906, 908, 909, 922, 958, 979, 980, 995, 1013, 1015, 1016, 1018, 1032, 1033, 1042, 1053, 1056, 1058, 1059, 1060, 1062, 1063, 1064, 1065, 1072, 1073, 1074, 1091, 1108, 1147, 1170, 1185, 1216, 1220, 1222, 1226, 1253, 1260, 1272, 1276, 1288, 1309, 1312, 1330, 1337, 1345, 1351, 1353, 1373, 1412, 1430, 1434, 1444, 1456, 1457, 1458, 1462, 1468, 1469, 1470, 1471, 1473, 1474, 1477, 1496, 1526, 1532, 1538, 1539, 1542, 1554, 1575, 1585, 1591, 1592, 1593, 1594, 1595, 1596, 1598, 1599, 1600, 1601, 1602, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1633, 1635, 1636, 1637, 1638, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1672, 1674, 1675, 1676, 1677, 1678, 1679, 1681, 1682, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1700, 1701, 1702, 1706, 1711, 1712, 1716, 1717, 1760, 1770, 1778, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1797, 1802, 1814, 1826, 1858, 1869, 1894, 1903, 1949, 1960, 1964, 1967, 2011, 2013, 2014, 2015, 2016, 2020, 2023, 2027, 2030, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2058, 2059, 2060, 2065, 2068, 2069, 2070, 2072, 2074, 2075, 2076, 2077, 2080, 2082, 2084, 2085, 2086, 2087, 2089, 2093, 2099, 2108, 2111, 2112, 2113], "when": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 14, 18, 19, 23, 24, 25, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 45, 46, 47, 48, 50, 51, 52, 53, 55, 58, 60, 61, 62, 63, 64, 65, 84, 86, 87, 99, 152, 193, 210, 262, 315, 323, 417, 450, 488, 489, 490, 499, 500, 515, 517, 519, 546, 562, 582, 619, 682, 688, 691, 737, 770, 771, 781, 787, 793, 795, 826, 842, 861, 868, 880, 883, 896, 903, 911, 912, 913, 914, 915, 916, 917, 918, 922, 923, 927, 928, 929, 931, 938, 943, 946, 955, 963, 966, 967, 968, 974, 975, 977, 983, 990, 991, 995, 1010, 1012, 1022, 1024, 1046, 1052, 1053, 1065, 1091, 1108, 1147, 1151, 1156, 1162, 1166, 1167, 1172, 1175, 1176, 1177, 1187, 1188, 1196, 1197, 1200, 1201, 1209, 1212, 1226, 1261, 1262, 1264, 1265, 1268, 1272, 1273, 1275, 1284, 1285, 1286, 1288, 1289, 1293, 1294, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1322, 1325, 1326, 1327, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1342, 1343, 1344, 1345, 1353, 1359, 1362, 1367, 1373, 1374, 1377, 1412, 1417, 1418, 1419, 1430, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1468, 1469, 1472, 1475, 1477, 1478, 1480, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1513, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1540, 1541, 1542, 1554, 1555, 1558, 1559, 1560, 1561, 1563, 1566, 1572, 1573, 1574, 1575, 1577, 1578, 1579, 1580, 1581, 1585, 1587, 1597, 1599, 1600, 1601, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1624, 1630, 1632, 1641, 1643, 1644, 1668, 1671, 1673, 1676, 1677, 1684, 1692, 1703, 1704, 1705, 1706, 1715, 1716, 1717, 1718, 1719, 1723, 1724, 1730, 1731, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1760, 1765, 1769, 1770, 1771, 1772, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1814, 1816, 1823, 1826, 1827, 1849, 1855, 1862, 1867, 1870, 1876, 1894, 1900, 1904, 1906, 1907, 1908, 1912, 1918, 1923, 1927, 1937, 1942, 1944, 1949, 1953, 1955, 1964, 1965, 1976, 1979, 1981, 2011, 2012, 2013, 2015, 2020, 2022, 2026, 2029, 2032, 2033, 2034, 2040, 2043, 2044, 2045, 2047, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2063, 2067, 2068, 2069, 2074, 2075, 2076, 2077, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2093, 2097, 2098, 2099, 2100, 2101, 2103, 2105, 2107, 2109, 2110, 2111, 2113, 2115], "enter": [0, 28, 898, 899, 900, 918, 2048, 2099], "ani": [0, 1, 2, 3, 4, 5, 7, 9, 11, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 41, 45, 46, 47, 48, 51, 52, 53, 55, 60, 63, 64, 90, 152, 256, 488, 501, 547, 619, 699, 760, 783, 794, 795, 796, 802, 812, 813, 814, 815, 817, 818, 819, 820, 825, 826, 828, 857, 868, 892, 893, 894, 896, 903, 906, 908, 909, 911, 917, 922, 923, 931, 951, 957, 961, 962, 990, 1010, 1046, 1053, 1064, 1109, 1110, 1111, 1124, 1125, 1127, 1128, 1129, 1131, 1135, 1139, 1140, 1141, 1156, 1165, 1166, 1173, 1174, 1175, 1177, 1178, 1183, 1184, 1185, 1187, 1191, 1197, 1235, 1271, 1272, 1276, 1279, 1283, 1284, 1286, 1288, 1294, 1302, 1304, 1308, 1309, 1310, 1313, 1318, 1319, 1320, 1325, 1326, 1329, 1333, 1336, 1342, 1344, 1427, 1428, 1429, 1431, 1432, 1433, 1434, 1438, 1439, 1443, 1444, 1453, 1454, 1455, 1462, 1463, 1467, 1471, 1472, 1475, 1476, 1479, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1491, 1492, 1512, 1513, 1515, 1516, 1517, 1525, 1526, 1534, 1536, 1537, 1540, 1545, 1546, 1547, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1576, 1577, 1578, 1603, 1607, 1608, 1609, 1649, 1669, 1670, 1684, 1706, 1716, 1723, 1724, 1735, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1748, 1758, 1759, 1761, 1764, 1766, 1771, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 1842, 1858, 1872, 1874, 1927, 1934, 1976, 1981, 1990, 2011, 2013, 2014, 2015, 2020, 2023, 2024, 2026, 2027, 2029, 2032, 2033, 2034, 2035, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2059, 2060, 2062, 2063, 2065, 2066, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2080, 2082, 2085, 2087, 2090, 2091, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2104, 2106, 2107, 2109, 2110, 2111, 2112, 2113], "should": [0, 1, 3, 4, 5, 9, 14, 15, 17, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 35, 36, 37, 39, 40, 45, 46, 47, 48, 50, 52, 53, 55, 59, 60, 63, 64, 66, 71, 72, 75, 86, 121, 152, 156, 315, 323, 400, 404, 417, 447, 448, 449, 450, 451, 489, 490, 498, 515, 517, 519, 585, 586, 587, 589, 590, 688, 691, 692, 737, 762, 774, 775, 776, 795, 798, 802, 819, 826, 840, 841, 857, 858, 859, 862, 868, 892, 893, 894, 895, 896, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 917, 927, 928, 943, 944, 945, 946, 953, 966, 981, 982, 997, 1010, 1013, 1023, 1024, 1042, 1044, 1053, 1099, 1109, 1110, 1111, 1121, 1126, 1129, 1130, 1131, 1139, 1140, 1141, 1144, 1159, 1162, 1163, 1164, 1166, 1167, 1171, 1175, 1176, 1177, 1185, 1186, 1191, 1193, 1230, 1231, 1234, 1235, 1269, 1270, 1272, 1273, 1277, 1282, 1284, 1285, 1288, 1289, 1290, 1292, 1317, 1342, 1343, 1344, 1350, 1359, 1362, 1363, 1364, 1385, 1430, 1438, 1439, 1443, 1459, 1461, 1462, 1464, 1465, 1466, 1469, 1470, 1491, 1496, 1518, 1526, 1532, 1533, 1534, 1549, 1554, 1575, 1579, 1597, 1603, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1624, 1632, 1684, 1706, 1708, 1709, 1710, 1713, 1714, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1757, 1758, 1759, 1762, 1764, 1775, 1776, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1826, 1828, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1849, 1865, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1909, 1910, 1911, 1912, 1913, 1914, 1927, 1942, 1951, 1976, 1981, 1982, 2009, 2010, 2011, 2012, 2013, 2015, 2016, 2020, 2022, 2023, 2024, 2027, 2029, 2032, 2034, 2035, 2040, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2055, 2057, 2058, 2059, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2075, 2077, 2080, 2084, 2085, 2087, 2093, 2095, 2097, 2099, 2100, 2101, 2102, 2104, 2105, 2107, 2109, 2110, 2111, 2112], "call": [0, 1, 2, 3, 8, 11, 14, 15, 18, 19, 20, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 47, 50, 52, 53, 55, 56, 57, 59, 60, 61, 63, 64, 66, 68, 75, 82, 88, 152, 292, 325, 337, 460, 488, 489, 490, 558, 582, 616, 619, 682, 699, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 783, 792, 799, 826, 860, 864, 865, 896, 903, 904, 905, 906, 907, 908, 909, 911, 913, 914, 917, 918, 927, 928, 929, 931, 938, 942, 958, 975, 980, 981, 985, 1008, 1011, 1013, 1042, 1044, 1054, 1055, 1064, 1072, 1073, 1075, 1076, 1106, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1160, 1165, 1166, 1177, 1178, 1185, 1196, 1216, 1236, 1257, 1269, 1272, 1275, 1277, 1283, 1284, 1285, 1292, 1328, 1336, 1340, 1342, 1344, 1345, 1374, 1434, 1440, 1441, 1442, 1462, 1463, 1464, 1465, 1466, 1468, 1470, 1472, 1511, 1522, 1523, 1524, 1526, 1533, 1534, 1555, 1566, 1578, 1610, 1611, 1612, 1618, 1619, 1620, 1625, 1643, 1684, 1703, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1723, 1724, 1733, 1736, 1743, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1759, 1765, 1766, 1768, 1769, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1839, 1866, 1872, 1873, 1902, 1921, 1922, 1923, 1944, 1960, 1964, 1965, 1967, 1971, 1972, 1976, 1977, 1982, 1999, 2000, 2001, 2002, 2011, 2013, 2017, 2020, 2024, 2029, 2032, 2034, 2035, 2036, 2041, 2042, 2044, 2045, 2046, 2047, 2048, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2090, 2092, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2107, 2109, 2110, 2111, 2112, 2113, 2115, 2116], "model": [0, 1, 2, 3, 4, 5, 8, 9, 12, 24, 28, 29, 30, 32, 34, 35, 46, 48, 51, 53, 55, 56, 57, 58, 59, 60, 61, 64, 66, 84, 85, 794, 795, 796, 799, 800, 812, 813, 814, 815, 816, 817, 818, 819, 821, 822, 823, 826, 827, 828, 838, 839, 840, 841, 843, 857, 860, 861, 862, 863, 864, 865, 866, 918, 931, 975, 977, 1054, 1075, 1165, 1166, 1167, 1175, 1177, 1272, 1276, 1277, 1282, 1284, 1288, 1290, 1344, 1430, 1462, 1479, 1490, 1491, 1526, 1532, 1555, 1566, 1570, 1572, 1573, 1574, 1631, 1716, 1717, 1723, 1724, 1729, 1731, 1734, 1747, 1766, 1767, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 1810, 1923, 1976, 1999, 2001, 2012, 2013, 2015, 2016, 2017, 2024, 2026, 2027, 2035, 2042, 2044, 2045, 2047, 2048, 2051, 2053, 2055, 2056, 2057, 2058, 2059, 2060, 2062, 2065, 2069, 2071, 2075, 2076, 2078, 2080, 2081, 2085, 2090, 2091, 2092, 2095, 2096, 2097, 2099, 2101, 2102, 2103, 2107, 2109, 2111], "": [0, 1, 2, 3, 4, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 23, 24, 28, 29, 30, 32, 34, 35, 36, 37, 40, 43, 44, 45, 47, 48, 49, 52, 53, 55, 57, 59, 60, 63, 64, 82, 83, 88, 89, 90, 465, 483, 495, 498, 515, 546, 560, 583, 626, 682, 691, 695, 696, 697, 698, 701, 737, 762, 795, 796, 797, 803, 817, 818, 819, 822, 825, 828, 857, 864, 865, 878, 880, 881, 882, 883, 902, 903, 904, 905, 908, 912, 913, 915, 923, 942, 959, 961, 974, 983, 989, 990, 991, 992, 997, 1006, 1008, 1010, 1014, 1029, 1033, 1042, 1044, 1049, 1050, 1053, 1054, 1055, 1075, 1076, 1077, 1083, 1100, 1103, 1105, 1106, 1108, 1122, 1123, 1125, 1126, 1127, 1130, 1131, 1133, 1134, 1137, 1138, 1140, 1141, 1142, 1143, 1145, 1147, 1148, 1149, 1150, 1151, 1154, 1155, 1156, 1162, 1163, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1175, 1177, 1180, 1185, 1187, 1216, 1226, 1234, 1235, 1236, 1259, 1261, 1269, 1270, 1272, 1276, 1283, 1284, 1288, 1303, 1304, 1314, 1315, 1316, 1318, 1321, 1325, 1327, 1329, 1330, 1334, 1335, 1336, 1337, 1342, 1344, 1345, 1360, 1367, 1372, 1374, 1377, 1379, 1380, 1385, 1388, 1389, 1392, 1399, 1403, 1407, 1409, 1416, 1417, 1420, 1422, 1428, 1429, 1430, 1438, 1439, 1440, 1441, 1442, 1445, 1462, 1468, 1479, 1526, 1527, 1532, 1536, 1543, 1555, 1558, 1560, 1566, 1570, 1572, 1574, 1580, 1581, 1604, 1605, 1616, 1629, 1632, 1643, 1644, 1649, 1684, 1690, 1703, 1706, 1716, 1717, 1723, 1724, 1736, 1747, 1760, 1764, 1771, 1772, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1802, 1810, 1814, 1816, 1820, 1827, 1842, 1845, 1846, 1849, 1850, 1851, 1853, 1855, 1862, 1869, 1870, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1921, 1922, 1923, 1926, 1927, 1928, 1929, 1930, 1939, 1943, 1945, 1951, 1967, 1971, 1972, 1973, 1976, 1977, 1981, 1983, 1986, 1995, 1998, 1999, 2000, 2001, 2002, 2007, 2011, 2013, 2014, 2015, 2016, 2017, 2020, 2022, 2023, 2027, 2029, 2030, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2060, 2061, 2062, 2063, 2065, 2066, 2067, 2069, 2070, 2072, 2073, 2075, 2076, 2077, 2078, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2091, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2115], "wrap": [0, 1, 15, 19, 23, 24, 28, 32, 33, 38, 40, 50, 55, 59, 60, 62, 64, 66, 262, 792, 793, 1011, 1083, 1166, 1187, 1272, 1288, 1409, 1462, 1526, 1566, 1710, 1716, 1734, 1760, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2007, 2013, 2014, 2016, 2020, 2023, 2041, 2042, 2045, 2047, 2048, 2049, 2055, 2061, 2065, 2069, 2070, 2075, 2076, 2089, 2091, 2097, 2099, 2100, 2102, 2111, 2112], "forward": [0, 5, 7, 8, 12, 14, 24, 28, 29, 30, 32, 33, 35, 52, 53, 55, 56, 57, 59, 61, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 152, 223, 224, 682, 737, 750, 759, 762, 802, 817, 818, 819, 822, 826, 864, 865, 892, 894, 895, 896, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 911, 913, 914, 917, 918, 919, 920, 922, 982, 983, 989, 1053, 1100, 1112, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1166, 1168, 1169, 1170, 1172, 1175, 1272, 1275, 1276, 1277, 1279, 1283, 1284, 1288, 1289, 1290, 1434, 1438, 1440, 1441, 1442, 1462, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1477, 1496, 1511, 1522, 1523, 1524, 1526, 1527, 1528, 1532, 1533, 1536, 1537, 1541, 1542, 1543, 1555, 1570, 1571, 1572, 1573, 1574, 1618, 1619, 1620, 1625, 1671, 1684, 1706, 1709, 1710, 1715, 1716, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1765, 1768, 1769, 1833, 1900, 1964, 2012, 2013, 2015, 2016, 2020, 2022, 2026, 2028, 2032, 2034, 2040, 2041, 2042, 2044, 2045, 2047, 2049, 2050, 2051, 2052, 2054, 2055, 2058, 2060, 2063, 2065, 2067, 2068, 2069, 2070, 2075, 2089, 2090, 2091, 2092, 2093, 2096, 2097, 2099, 2100, 2102, 2105, 2106, 2107, 2109, 2110, 2111], "pass": [0, 1, 3, 5, 6, 7, 14, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 38, 39, 40, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 57, 59, 60, 63, 64, 66, 74, 75, 81, 88, 152, 256, 417, 450, 490, 515, 517, 519, 546, 562, 682, 795, 796, 825, 828, 862, 864, 883, 892, 893, 894, 895, 896, 903, 906, 908, 909, 917, 959, 975, 977, 982, 983, 984, 1008, 1042, 1053, 1068, 1098, 1129, 1130, 1131, 1139, 1140, 1141, 1147, 1160, 1162, 1165, 1166, 1170, 1171, 1172, 1175, 1176, 1185, 1187, 1197, 1204, 1205, 1209, 1235, 1270, 1272, 1273, 1276, 1278, 1282, 1284, 1288, 1289, 1317, 1333, 1335, 1344, 1392, 1430, 1431, 1432, 1433, 1440, 1441, 1442, 1462, 1468, 1469, 1473, 1474, 1526, 1531, 1532, 1541, 1555, 1571, 1572, 1573, 1574, 1579, 1597, 1627, 1628, 1632, 1643, 1644, 1671, 1684, 1706, 1709, 1710, 1715, 1716, 1734, 1736, 1738, 1743, 1757, 1760, 1764, 1766, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1858, 1862, 1866, 1927, 2011, 2013, 2017, 2020, 2024, 2026, 2028, 2029, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2044, 2046, 2047, 2048, 2049, 2051, 2052, 2054, 2055, 2058, 2060, 2063, 2065, 2067, 2068, 2069, 2075, 2077, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2091, 2092, 2097, 2098, 2099, 2100, 2101, 2102, 2108, 2109, 2111, 2112], "e": [0, 1, 2, 3, 5, 7, 11, 12, 14, 15, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 45, 46, 47, 48, 49, 51, 52, 53, 55, 58, 60, 62, 63, 64, 65, 66, 74, 75, 152, 260, 337, 379, 488, 582, 619, 737, 750, 759, 762, 795, 796, 817, 819, 843, 857, 861, 862, 864, 865, 883, 892, 896, 903, 905, 908, 909, 918, 922, 923, 942, 954, 956, 959, 977, 989, 1039, 1053, 1079, 1108, 1118, 1159, 1166, 1171, 1175, 1180, 1187, 1193, 1195, 1200, 1203, 1221, 1223, 1251, 1252, 1254, 1257, 1269, 1272, 1279, 1280, 1288, 1308, 1309, 1324, 1336, 1344, 1345, 1346, 1348, 1350, 1367, 1378, 1386, 1393, 1438, 1439, 1440, 1441, 1442, 1445, 1453, 1454, 1455, 1461, 1462, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1477, 1480, 1485, 1488, 1489, 1490, 1491, 1492, 1496, 1498, 1499, 1500, 1501, 1517, 1526, 1527, 1532, 1533, 1535, 1536, 1540, 1542, 1558, 1566, 1570, 1574, 1575, 1576, 1578, 1607, 1608, 1609, 1616, 1623, 1624, 1632, 1643, 1676, 1684, 1703, 1705, 1706, 1716, 1717, 1718, 1719, 1723, 1724, 1731, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1757, 1758, 1764, 1768, 1771, 1783, 1784, 1796, 1819, 1830, 1848, 1855, 1862, 1866, 1870, 1872, 1877, 1912, 1923, 1927, 1993, 2004, 2011, 2013, 2015, 2016, 2017, 2020, 2023, 2024, 2026, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2053, 2054, 2055, 2058, 2060, 2063, 2065, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2080, 2081, 2083, 2084, 2085, 2087, 2096, 2098, 2099, 2101, 2102, 2109, 2111, 2112, 2116], "network": [0, 1, 7, 8, 15, 32, 35, 47, 844, 1053, 1272, 1288, 1289, 1434, 1440, 1441, 1442, 1445, 1456, 1457, 1458, 1462, 1463, 1464, 1465, 1466, 1467, 1470, 1479, 1491, 1496, 1526, 1533, 1538, 1539, 1545, 1554, 1556, 1566, 1570, 1572, 1574, 1597, 1631, 1632, 1687, 1706, 1731, 1734, 1760, 1765, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 2013, 2015, 2016, 2040, 2041, 2048, 2058, 2059, 2062, 2065, 2067, 2070, 2075, 2076, 2077, 2098], "includ": [0, 1, 2, 3, 4, 5, 7, 9, 14, 15, 23, 24, 28, 30, 33, 44, 47, 48, 52, 53, 55, 64, 83, 85, 323, 490, 519, 585, 770, 771, 862, 864, 977, 1064, 1136, 1138, 1234, 1270, 1272, 1273, 1302, 1381, 1383, 1435, 1436, 1437, 1443, 1445, 1462, 1471, 1479, 1513, 1522, 1523, 1524, 1526, 1532, 1570, 1571, 1572, 1573, 1574, 1577, 1587, 1599, 1600, 1601, 1616, 1629, 1649, 1716, 1723, 1724, 1758, 1759, 1761, 1890, 1952, 1953, 1954, 1955, 2011, 2013, 2015, 2016, 2020, 2023, 2026, 2029, 2032, 2042, 2044, 2045, 2050, 2051, 2054, 2055, 2060, 2062, 2063, 2065, 2067, 2069, 2070, 2075, 2077, 2085, 2089, 2092, 2093, 2095, 2097, 2100, 2101, 2102, 2103, 2107, 2108, 2109, 2111, 2112, 2113], "loss": [0, 1, 24, 28, 29, 32, 33, 34, 35, 55, 1167, 1269, 1308, 1309, 1318, 1336, 1430, 1438, 1439, 1445, 1459, 1461, 1468, 1479, 1485, 1486, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1576, 1604, 1605, 1615, 1616, 1629, 1641, 1644, 1668, 1676, 1688, 1700, 1701, 1716, 1758, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 1801, 1870, 2011, 2012, 2029, 2034, 2042, 2045, 2047, 2050, 2052, 2055, 2067, 2070, 2071, 2075, 2076, 2085], "comput": [0, 3, 5, 7, 8, 11, 14, 17, 23, 24, 28, 30, 33, 34, 35, 37, 40, 52, 53, 55, 56, 57, 59, 152, 292, 488, 489, 497, 682, 683, 685, 699, 700, 762, 770, 771, 802, 804, 821, 822, 823, 824, 826, 827, 892, 896, 898, 899, 901, 903, 904, 905, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 927, 928, 947, 948, 949, 950, 951, 952, 963, 965, 966, 967, 968, 974, 991, 993, 1007, 1050, 1053, 1085, 1091, 1100, 1102, 1105, 1108, 1112, 1113, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1151, 1153, 1154, 1155, 1157, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1187, 1214, 1215, 1216, 1226, 1229, 1232, 1233, 1234, 1235, 1249, 1272, 1275, 1284, 1292, 1293, 1295, 1297, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1339, 1341, 1342, 1343, 1345, 1353, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1362, 1371, 1372, 1373, 1376, 1417, 1419, 1423, 1430, 1434, 1435, 1436, 1437, 1440, 1441, 1442, 1456, 1457, 1458, 1460, 1461, 1463, 1468, 1469, 1477, 1480, 1488, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1516, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1532, 1533, 1535, 1540, 1541, 1542, 1560, 1562, 1566, 1575, 1576, 1579, 1599, 1600, 1601, 1614, 1615, 1623, 1624, 1631, 1632, 1634, 1641, 1643, 1644, 1651, 1657, 1658, 1659, 1660, 1661, 1662, 1668, 1673, 1676, 1684, 1688, 1690, 1691, 1700, 1701, 1706, 1709, 1715, 1716, 1717, 1721, 1723, 1724, 1725, 1727, 1730, 1732, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1750, 1751, 1758, 1764, 1768, 1769, 1771, 1781, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1820, 1826, 1827, 1831, 1832, 1846, 1847, 1870, 1877, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1900, 1903, 1904, 1905, 1906, 1923, 1927, 1928, 1944, 1949, 1973, 1976, 2012, 2013, 2016, 2020, 2023, 2024, 2029, 2033, 2034, 2036, 2041, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2055, 2056, 2062, 2063, 2065, 2067, 2070, 2073, 2075, 2080, 2081, 2086, 2090, 2093, 2095, 2096, 2098, 2099, 2100, 2101, 2104, 2110, 2113, 2114, 2115], "backward": [0, 1, 5, 9, 28, 29, 30, 32, 33, 34, 35, 48, 52, 55, 59, 60, 64, 66, 292, 337, 489, 490, 498, 505, 506, 515, 517, 519, 583, 682, 688, 691, 762, 817, 818, 819, 893, 901, 902, 903, 904, 905, 906, 907, 908, 909, 912, 914, 917, 920, 922, 927, 928, 938, 941, 943, 955, 980, 1053, 1112, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1165, 1166, 1185, 1272, 1345, 1353, 1367, 1377, 1438, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1468, 1477, 1478, 1479, 1492, 1496, 1497, 1513, 1517, 1518, 1526, 1532, 1533, 1540, 1542, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1604, 1605, 1615, 1616, 1632, 1643, 1668, 1671, 1707, 1711, 1712, 1716, 1736, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1801, 1814, 1826, 1900, 1904, 1908, 1927, 1964, 2011, 2012, 2014, 2016, 2020, 2022, 2028, 2034, 2035, 2040, 2041, 2047, 2048, 2049, 2050, 2051, 2054, 2055, 2057, 2058, 2063, 2067, 2068, 2069, 2070, 2075, 2080, 2081, 2086, 2092, 2093, 2096, 2097, 2102, 2105, 2107, 2109, 2110, 2111], "under": [0, 1, 3, 4, 5, 9, 23, 28, 33, 34, 47, 52, 55, 56, 58, 60, 63, 83, 918, 922, 934, 960, 1186, 1272, 1445, 1526, 1597, 1623, 1624, 1730, 1733, 1736, 1778, 1847, 2020, 2025, 2032, 2041, 2042, 2045, 2047, 2049, 2051, 2052, 2057, 2063, 2068, 2070, 2073, 2074, 2077, 2081, 2085, 2089, 2091, 2098, 2100, 2101, 2106, 2109, 2111], "recommend": [0, 1, 5, 19, 23, 24, 28, 30, 34, 35, 37, 47, 48, 55, 59, 63, 64, 66, 86, 87, 89, 450, 896, 985, 990, 991, 1129, 1130, 1131, 1139, 1140, 1141, 1259, 1275, 1318, 1345, 1392, 1430, 1462, 1644, 1716, 1833, 1960, 1965, 2011, 2013, 2020, 2027, 2032, 2034, 2040, 2041, 2042, 2044, 2045, 2046, 2048, 2049, 2055, 2057, 2058, 2060, 2063, 2065, 2068, 2070, 2086, 2103], "correspond": [0, 1, 7, 18, 20, 23, 24, 28, 30, 32, 34, 35, 47, 52, 55, 62, 64, 84, 417, 475, 476, 515, 517, 519, 546, 547, 682, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 737, 795, 799, 838, 840, 860, 861, 862, 867, 883, 892, 894, 895, 896, 903, 905, 908, 909, 911, 913, 922, 942, 992, 1053, 1074, 1103, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1183, 1187, 1226, 1235, 1261, 1272, 1289, 1303, 1308, 1309, 1314, 1317, 1336, 1343, 1345, 1359, 1374, 1412, 1439, 1462, 1468, 1496, 1526, 1532, 1549, 1573, 1623, 1632, 1670, 1716, 1731, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1765, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 1819, 1820, 1830, 1853, 1862, 1867, 1877, 1912, 1921, 1922, 1927, 1959, 1962, 1963, 1971, 1972, 2011, 2013, 2014, 2016, 2020, 2029, 2033, 2034, 2041, 2042, 2045, 2047, 2048, 2049, 2051, 2052, 2054, 2055, 2059, 2063, 2065, 2068, 2069, 2070, 2075, 2076, 2080, 2081, 2082, 2085, 2087, 2090, 2091, 2099, 2103, 2108, 2109, 2111], "creat": [0, 1, 3, 5, 6, 7, 9, 14, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 39, 45, 47, 48, 50, 51, 52, 53, 55, 63, 64, 66, 86, 87, 89, 90, 152, 224, 256, 337, 488, 582, 583, 585, 586, 587, 589, 590, 682, 734, 740, 741, 742, 747, 748, 758, 766, 794, 795, 796, 816, 817, 818, 819, 821, 826, 857, 881, 882, 883, 896, 901, 917, 918, 919, 920, 942, 954, 956, 967, 968, 972, 973, 975, 977, 992, 1018, 1050, 1051, 1060, 1096, 1111, 1160, 1161, 1162, 1163, 1187, 1188, 1189, 1190, 1193, 1269, 1272, 1275, 1285, 1291, 1296, 1302, 1303, 1309, 1311, 1330, 1343, 1359, 1374, 1412, 1438, 1459, 1468, 1469, 1472, 1486, 1492, 1517, 1518, 1526, 1529, 1530, 1531, 1555, 1558, 1559, 1566, 1575, 1576, 1578, 1586, 1588, 1589, 1716, 1736, 1757, 1764, 1769, 1777, 1778, 1797, 1865, 1866, 1895, 1907, 1912, 1942, 1949, 2012, 2016, 2017, 2026, 2032, 2041, 2042, 2045, 2047, 2048, 2049, 2051, 2052, 2055, 2056, 2057, 2060, 2065, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2080, 2082, 2084, 2085, 2086, 2087, 2089, 2090, 2091, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2110, 2111, 2113, 2116], "optim": [0, 1, 2, 7, 8, 14, 15, 24, 28, 29, 30, 35, 53, 55, 64, 821, 922, 931, 935, 975, 978, 1108, 1175, 1187, 1272, 1276, 1282, 1284, 1288, 1289, 1328, 1345, 1440, 1441, 1442, 1461, 1468, 1488, 1489, 1490, 1526, 1529, 1530, 1531, 1532, 1559, 1566, 1574, 1684, 1706, 1716, 1723, 1724, 1730, 1736, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1816, 2012, 2013, 2015, 2026, 2034, 2040, 2044, 2047, 2048, 2050, 2052, 2055, 2057, 2065, 2069, 2070, 2080, 2092, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2102, 2104, 2111, 2114, 2118], "default": [0, 2, 3, 5, 9, 11, 12, 14, 18, 19, 20, 27, 28, 29, 30, 32, 33, 34, 35, 37, 39, 44, 45, 46, 47, 48, 51, 52, 53, 55, 58, 59, 60, 61, 62, 64, 66, 70, 71, 74, 75, 76, 77, 79, 83, 86, 87, 89, 152, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 269, 292, 299, 327, 333, 395, 417, 447, 448, 449, 450, 451, 460, 498, 501, 502, 522, 527, 582, 583, 585, 682, 696, 699, 714, 715, 716, 717, 718, 719, 722, 732, 733, 734, 735, 737, 757, 760, 762, 770, 771, 772, 774, 775, 776, 781, 787, 795, 800, 805, 810, 811, 816, 819, 821, 822, 823, 824, 827, 830, 831, 832, 833, 834, 835, 836, 837, 844, 845, 846, 847, 848, 849, 851, 852, 857, 858, 859, 861, 868, 882, 883, 896, 898, 899, 900, 903, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 942, 944, 953, 959, 963, 966, 967, 968, 972, 975, 977, 997, 1007, 1010, 1013, 1015, 1019, 1022, 1023, 1024, 1027, 1028, 1035, 1036, 1039, 1050, 1052, 1053, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1078, 1079, 1082, 1084, 1085, 1086, 1089, 1090, 1091, 1096, 1097, 1098, 1099, 1100, 1103, 1108, 1109, 1110, 1111, 1121, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1160, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1176, 1177, 1180, 1181, 1216, 1218, 1219, 1225, 1226, 1230, 1231, 1234, 1235, 1248, 1261, 1263, 1269, 1272, 1273, 1276, 1284, 1288, 1289, 1292, 1293, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1359, 1362, 1364, 1370, 1372, 1373, 1374, 1378, 1385, 1386, 1393, 1401, 1402, 1416, 1417, 1419, 1420, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1554, 1558, 1559, 1563, 1564, 1566, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1578, 1579, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1629, 1631, 1632, 1634, 1636, 1643, 1644, 1651, 1657, 1658, 1659, 1668, 1669, 1671, 1676, 1684, 1690, 1691, 1699, 1703, 1709, 1715, 1716, 1717, 1718, 1719, 1721, 1722, 1725, 1726, 1728, 1730, 1731, 1732, 1733, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1758, 1759, 1761, 1763, 1764, 1765, 1766, 1768, 1770, 1771, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1816, 1824, 1826, 1827, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1845, 1848, 1854, 1855, 1858, 1862, 1866, 1867, 1868, 1869, 1870, 1874, 1876, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1920, 1921, 1922, 1923, 1926, 1927, 1928, 1942, 1943, 1949, 1951, 1953, 1955, 1960, 1961, 1964, 1965, 1970, 1971, 1972, 1976, 1981, 1982, 1985, 1990, 1991, 1993, 2004, 2008, 2009, 2010, 2011, 2012, 2013, 2016, 2020, 2022, 2025, 2026, 2027, 2032, 2035, 2040, 2041, 2044, 2047, 2048, 2049, 2050, 2051, 2053, 2055, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2074, 2075, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2092, 2097, 2098, 2101, 2102, 2105, 2107, 2108, 2110, 2111, 2113, 2116], "net": [0, 6, 15, 37, 58, 64, 1272, 1288, 1289, 1462, 1526, 1716, 1747, 1801, 2015, 2016, 2041, 2042, 2045, 2055, 2085], "sgd": [0, 23, 24, 32, 490, 931, 1468, 1706, 1716, 1802, 1808, 1810, 2041, 2045, 2047, 2055, 2057, 2067, 2075, 2076], "target": [0, 14, 32, 33, 34, 52, 53, 55, 59, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 82, 501, 737, 768, 769, 794, 799, 862, 864, 865, 960, 1167, 1185, 1272, 1282, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1438, 1439, 1445, 1459, 1461, 1473, 1474, 1479, 1485, 1486, 1491, 1492, 1517, 1518, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1540, 1558, 1559, 1570, 1579, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1604, 1605, 1613, 1615, 1616, 1627, 1628, 1629, 1640, 1641, 1644, 1645, 1656, 1664, 1665, 1666, 1667, 1668, 1676, 1684, 1688, 1689, 1716, 1796, 1964, 2014, 2016, 2041, 2042, 2045, 2057, 2063, 2065, 2067, 2068, 2070, 2071, 2075, 2085, 2090, 2093, 2097, 2098, 2100, 2110, 2111], "data": [0, 1, 2, 3, 7, 11, 12, 18, 21, 24, 28, 30, 32, 33, 35, 36, 37, 41, 44, 47, 50, 53, 55, 59, 62, 64, 66, 67, 71, 75, 141, 152, 197, 198, 328, 331, 335, 338, 343, 447, 450, 483, 495, 499, 501, 619, 740, 741, 742, 762, 774, 775, 776, 783, 794, 795, 796, 800, 821, 822, 823, 824, 825, 827, 829, 868, 882, 883, 893, 896, 908, 929, 944, 953, 959, 1011, 1089, 1090, 1109, 1110, 1111, 1121, 1122, 1123, 1126, 1128, 1129, 1131, 1139, 1140, 1141, 1144, 1147, 1148, 1149, 1150, 1159, 1160, 1162, 1163, 1164, 1166, 1175, 1185, 1193, 1197, 1201, 1203, 1230, 1231, 1233, 1251, 1254, 1280, 1284, 1288, 1292, 1343, 1344, 1345, 1359, 1364, 1372, 1374, 1388, 1389, 1417, 1419, 1420, 1443, 1445, 1453, 1454, 1455, 1462, 1477, 1480, 1488, 1489, 1490, 1496, 1498, 1513, 1533, 1542, 1543, 1579, 1597, 1602, 1603, 1607, 1608, 1609, 1642, 1649, 1651, 1690, 1691, 1716, 1717, 1718, 1719, 1723, 1724, 1757, 1758, 1759, 1760, 1762, 1771, 1775, 1776, 1777, 1778, 1802, 1808, 1816, 1824, 1827, 1828, 1829, 1833, 1835, 1836, 1838, 1839, 1840, 1841, 1842, 1849, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1903, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1926, 1927, 1942, 1951, 1953, 1955, 1963, 1964, 1965, 2009, 2010, 2012, 2013, 2014, 2017, 2020, 2023, 2024, 2026, 2029, 2032, 2035, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2054, 2057, 2059, 2060, 2061, 2062, 2063, 2066, 2067, 2068, 2070, 2073, 2075, 2077, 2080, 2081, 2082, 2083, 2084, 2085, 2087, 2089, 2090, 2091, 2095, 2098, 2099, 2101, 2102, 2104, 2109, 2113], "zero_grad": [0, 1, 32, 1272, 1526, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1801, 2041, 2045, 2050, 2055, 2057, 2067], "output": [0, 1, 4, 5, 7, 12, 18, 23, 24, 28, 30, 32, 33, 34, 35, 37, 44, 45, 52, 59, 60, 61, 64, 66, 75, 82, 141, 315, 323, 447, 448, 449, 451, 515, 519, 568, 619, 682, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 697, 698, 699, 700, 701, 722, 730, 731, 736, 737, 740, 741, 742, 743, 744, 745, 746, 747, 748, 751, 752, 753, 754, 755, 756, 757, 758, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 774, 775, 776, 777, 779, 781, 782, 783, 784, 785, 787, 788, 789, 794, 795, 796, 798, 801, 804, 816, 819, 861, 864, 865, 866, 868, 877, 878, 881, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 903, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 943, 945, 946, 947, 948, 949, 950, 951, 952, 955, 959, 962, 963, 964, 965, 966, 967, 968, 970, 973, 977, 989, 991, 992, 994, 995, 1007, 1020, 1022, 1023, 1024, 1050, 1051, 1053, 1063, 1064, 1087, 1088, 1089, 1090, 1092, 1095, 1096, 1098, 1100, 1103, 1105, 1107, 1108, 1109, 1110, 1111, 1113, 1118, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1151, 1152, 1153, 1154, 1155, 1156, 1158, 1163, 1164, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1175, 1176, 1177, 1185, 1187, 1196, 1213, 1214, 1215, 1216, 1226, 1229, 1232, 1233, 1234, 1235, 1237, 1238, 1239, 1247, 1249, 1266, 1267, 1269, 1272, 1276, 1284, 1288, 1289, 1293, 1294, 1295, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1362, 1363, 1364, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1395, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1422, 1423, 1424, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1603, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1623, 1624, 1626, 1627, 1628, 1629, 1632, 1634, 1643, 1644, 1649, 1651, 1657, 1658, 1659, 1668, 1669, 1673, 1674, 1675, 1676, 1684, 1699, 1703, 1704, 1705, 1709, 1715, 1716, 1731, 1732, 1733, 1734, 1737, 1758, 1760, 1761, 1765, 1768, 1770, 1771, 1772, 1775, 1776, 1778, 1779, 1797, 1801, 1814, 1815, 1823, 1824, 1827, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1845, 1846, 1847, 1848, 1850, 1851, 1855, 1857, 1862, 1870, 1874, 1877, 1879, 1891, 1892, 1894, 1899, 1904, 1905, 1907, 1908, 1916, 1917, 1919, 1920, 1921, 1922, 1923, 1924, 1926, 1927, 1939, 1940, 1941, 1946, 1949, 1951, 1952, 1954, 1957, 1959, 1960, 1961, 1962, 1964, 1970, 1971, 1972, 1973, 1976, 1978, 1979, 2009, 2010, 2011, 2013, 2014, 2016, 2020, 2022, 2024, 2026, 2033, 2034, 2035, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2055, 2058, 2059, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2076, 2080, 2081, 2083, 2084, 2085, 2087, 2090, 2091, 2093, 2096, 2097, 2098, 2100, 2101, 2102, 2104, 2105, 2110, 2111], "loss_fn": [0, 32, 33, 1533, 1796, 2041, 2045, 2047, 2057, 2067], "exit": [0, 1, 2, 4, 18, 28, 37, 47, 55, 63, 64, 898, 900, 1590, 1716, 2016, 2032, 2042, 2048, 2057, 2065, 2077, 2082, 2115], "befor": [0, 1, 3, 6, 7, 14, 18, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 39, 44, 46, 47, 48, 50, 52, 53, 55, 64, 99, 152, 417, 488, 749, 762, 790, 791, 792, 896, 903, 904, 906, 908, 931, 973, 985, 1010, 1089, 1090, 1091, 1100, 1108, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1149, 1150, 1153, 1162, 1165, 1185, 1272, 1284, 1286, 1309, 1325, 1329, 1342, 1372, 1385, 1417, 1420, 1462, 1468, 1469, 1472, 1477, 1526, 1527, 1528, 1566, 1570, 1578, 1632, 1651, 1690, 1691, 1706, 1709, 1710, 1711, 1712, 1716, 1723, 1724, 1731, 1765, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1810, 1824, 1827, 1853, 1862, 1872, 1873, 1903, 1906, 1909, 1910, 1911, 1913, 1914, 1923, 1926, 1949, 1960, 1981, 2013, 2015, 2016, 2020, 2024, 2032, 2041, 2042, 2045, 2047, 2048, 2051, 2052, 2054, 2055, 2058, 2060, 2061, 2062, 2063, 2067, 2068, 2070, 2075, 2076, 2077, 2080, 2081, 2085, 2087, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2107, 2115], "step": [0, 1, 2, 4, 9, 11, 14, 19, 23, 24, 28, 29, 30, 32, 35, 48, 52, 55, 59, 64, 81, 540, 609, 688, 770, 771, 860, 868, 898, 901, 902, 931, 946, 970, 1232, 1320, 1343, 1344, 1345, 1359, 1362, 1374, 1473, 1474, 1496, 1600, 1601, 1627, 1628, 1716, 1757, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1833, 1842, 1870, 1895, 1967, 2012, 2013, 2014, 2020, 2034, 2041, 2042, 2045, 2047, 2048, 2050, 2055, 2057, 2059, 2061, 2063, 2065, 2068, 2069, 2070, 2075, 2076, 2080, 2085, 2093, 2102, 2105, 2106, 2109, 2111], "usag": [0, 1, 4, 7, 9, 12, 15, 23, 28, 30, 32, 33, 34, 35, 36, 37, 41, 44, 45, 47, 50, 55, 64, 66, 67, 750, 759, 794, 795, 796, 812, 813, 814, 815, 817, 818, 819, 857, 861, 893, 908, 975, 977, 1056, 1077, 1081, 1108, 1159, 1165, 1169, 1170, 1171, 1199, 1286, 1345, 1408, 1716, 2006, 2012, 2013, 2016, 2020, 2028, 2029, 2034, 2041, 2042, 2048, 2050, 2051, 2063, 2065, 2069, 2077, 2085, 2086, 2089, 2090, 2091, 2098, 2101, 2102, 2103, 2111], "along": [0, 14, 18, 23, 28, 35, 40, 47, 52, 66, 72, 315, 317, 319, 323, 495, 515, 517, 519, 688, 699, 821, 878, 879, 962, 969, 996, 1007, 1022, 1024, 1091, 1099, 1100, 1107, 1108, 1124, 1129, 1132, 1136, 1139, 1142, 1148, 1177, 1213, 1238, 1247, 1249, 1294, 1305, 1341, 1395, 1417, 1421, 1422, 1431, 1432, 1433, 1439, 1460, 1473, 1474, 1516, 1519, 1520, 1521, 1560, 1562, 1597, 1605, 1614, 1627, 1628, 1631, 1634, 1651, 1657, 1658, 1659, 1669, 1690, 1691, 1742, 1744, 1751, 1752, 1761, 1770, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1808, 1827, 1847, 1848, 1853, 1864, 1899, 1903, 1906, 1907, 1915, 1920, 1939, 1943, 1946, 1949, 1958, 1973, 1976, 1978, 2011, 2015, 2020, 2035, 2043, 2048, 2050, 2054, 2055, 2063, 2068, 2070, 2081], "more": [0, 1, 2, 3, 4, 5, 8, 9, 11, 12, 14, 15, 17, 19, 20, 22, 23, 24, 25, 28, 29, 30, 33, 34, 35, 36, 37, 39, 43, 46, 47, 48, 50, 53, 55, 58, 59, 60, 61, 62, 64, 83, 85, 87, 88, 152, 256, 257, 315, 323, 488, 489, 490, 495, 497, 500, 515, 517, 519, 526, 547, 620, 682, 737, 773, 782, 786, 795, 857, 862, 863, 864, 876, 889, 890, 891, 893, 895, 896, 897, 903, 906, 908, 909, 911, 913, 917, 918, 919, 922, 923, 927, 928, 929, 930, 931, 946, 954, 957, 959, 965, 966, 967, 968, 975, 980, 989, 1010, 1011, 1013, 1015, 1016, 1032, 1051, 1056, 1058, 1059, 1060, 1062, 1063, 1064, 1065, 1072, 1073, 1074, 1078, 1086, 1091, 1097, 1106, 1108, 1112, 1124, 1125, 1127, 1129, 1148, 1149, 1150, 1151, 1162, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1181, 1199, 1200, 1201, 1220, 1222, 1226, 1235, 1236, 1253, 1257, 1259, 1260, 1272, 1286, 1289, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1340, 1342, 1345, 1348, 1351, 1353, 1363, 1364, 1388, 1389, 1392, 1412, 1418, 1430, 1434, 1439, 1444, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1468, 1469, 1470, 1486, 1496, 1522, 1523, 1524, 1526, 1532, 1538, 1539, 1540, 1542, 1554, 1566, 1574, 1585, 1597, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1621, 1623, 1624, 1632, 1635, 1636, 1637, 1638, 1643, 1647, 1651, 1652, 1663, 1670, 1677, 1678, 1679, 1682, 1684, 1685, 1686, 1687, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1702, 1703, 1704, 1705, 1706, 1711, 1712, 1716, 1717, 1723, 1724, 1730, 1733, 1734, 1769, 1770, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1814, 1825, 1826, 1852, 1858, 1862, 1870, 1902, 1903, 1927, 1949, 1951, 1960, 1964, 1967, 1976, 1977, 2011, 2012, 2013, 2015, 2016, 2020, 2022, 2023, 2025, 2027, 2028, 2029, 2030, 2032, 2033, 2034, 2035, 2036, 2040, 2042, 2044, 2045, 2047, 2048, 2050, 2051, 2052, 2053, 2054, 2055, 2058, 2060, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2076, 2077, 2079, 2080, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2093, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2109, 2111, 2112, 2113, 2114, 2115, 2116], "complex": [0, 1, 3, 7, 8, 23, 24, 27, 28, 33, 34, 37, 64, 313, 331, 485, 501, 687, 694, 922, 923, 967, 968, 990, 991, 1103, 1109, 1110, 1111, 1129, 1151, 1156, 1185, 1226, 1243, 1251, 1262, 1264, 1265, 1268, 1269, 1272, 1293, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1315, 1322, 1325, 1327, 1329, 1330, 1331, 1332, 1336, 1337, 1341, 1342, 1343, 1345, 1353, 1359, 1371, 1372, 1376, 1411, 1453, 1454, 1455, 1492, 1526, 1607, 1608, 1609, 1730, 1771, 1782, 1820, 1833, 1839, 1840, 1846, 1867, 1877, 1923, 1924, 1927, 1964, 1973, 1974, 1975, 2011, 2012, 2014, 2016, 2017, 2027, 2057, 2063, 2066, 2082, 2083, 2086, 2087, 2099, 2102, 2103, 2104], "scenario": [0, 23, 28, 33, 47, 1439, 1777, 1965, 2045, 2052, 2057, 2063, 2065, 2069, 2075, 2102, 2103], "g": [0, 1, 2, 3, 5, 7, 11, 12, 14, 15, 23, 24, 28, 30, 32, 33, 35, 36, 37, 39, 40, 41, 45, 46, 47, 48, 49, 51, 52, 55, 58, 60, 62, 63, 64, 65, 66, 74, 75, 488, 582, 619, 750, 759, 762, 795, 796, 817, 819, 843, 857, 862, 864, 865, 892, 903, 905, 908, 909, 918, 922, 923, 956, 977, 989, 1053, 1108, 1159, 1166, 1170, 1171, 1193, 1195, 1200, 1203, 1221, 1223, 1226, 1269, 1272, 1279, 1280, 1288, 1344, 1445, 1446, 1462, 1464, 1465, 1466, 1470, 1472, 1477, 1485, 1491, 1496, 1497, 1526, 1527, 1536, 1542, 1558, 1616, 1632, 1716, 1717, 1718, 1719, 1723, 1724, 1732, 1768, 1771, 1781, 1782, 1783, 1784, 1787, 1793, 1794, 1795, 1796, 1848, 1855, 1866, 1870, 1872, 1890, 1912, 1923, 2011, 2013, 2015, 2016, 2017, 2020, 2023, 2024, 2032, 2041, 2042, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2058, 2059, 2063, 2065, 2067, 2068, 2069, 2070, 2075, 2080, 2083, 2084, 2085, 2097, 2098, 2101, 2102, 2109, 2111, 2112], "penalti": [0, 783, 1064, 1780, 1781, 1782, 1783, 1785, 1787, 1793, 1794, 1796, 2071, 2107], "multipl": [0, 1, 2, 3, 5, 17, 19, 23, 24, 28, 29, 30, 33, 35, 36, 40, 47, 48, 50, 52, 55, 60, 63, 64, 81, 193, 210, 317, 515, 688, 690, 691, 697, 698, 762, 844, 877, 878, 881, 917, 929, 930, 931, 963, 965, 975, 993, 997, 1022, 1023, 1024, 1050, 1106, 1108, 1111, 1162, 1165, 1170, 1171, 1172, 1173, 1176, 1177, 1216, 1222, 1236, 1237, 1286, 1289, 1294, 1295, 1310, 1311, 1312, 1328, 1333, 1338, 1339, 1367, 1370, 1375, 1377, 1438, 1439, 1456, 1457, 1458, 1459, 1461, 1469, 1472, 1477, 1485, 1491, 1492, 1514, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1578, 1604, 1605, 1615, 1624, 1644, 1668, 1676, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1795, 1799, 1803, 1804, 1805, 1806, 1807, 1812, 1814, 1870, 1897, 1904, 1905, 1909, 1910, 1911, 1913, 1914, 1943, 1944, 1951, 1959, 1964, 1976, 1977, 2011, 2015, 2016, 2020, 2032, 2033, 2034, 2035, 2042, 2044, 2047, 2049, 2051, 2052, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2065, 2067, 2068, 2069, 2070, 2071, 2075, 2076, 2077, 2080, 2082, 2091, 2092, 2095, 2096, 2099, 2101, 2102, 2110], "custom": [0, 3, 5, 14, 15, 23, 28, 29, 30, 31, 32, 33, 34, 37, 40, 43, 44, 46, 52, 53, 55, 65, 66, 68, 82, 84, 85, 86, 87, 89, 501, 795, 799, 816, 817, 818, 819, 840, 842, 862, 863, 864, 865, 893, 903, 906, 908, 929, 975, 977, 1018, 1083, 1180, 1272, 1526, 1570, 1574, 1575, 1576, 1701, 1738, 1747, 1764, 1802, 1965, 1967, 2012, 2017, 2022, 2032, 2033, 2042, 2048, 2054, 2059, 2060, 2061, 2063, 2073, 2074, 2090, 2092, 2099, 2101, 2103, 2109], "autograd": [0, 4, 5, 7, 8, 12, 28, 32, 35, 52, 55, 56, 59, 66, 141, 337, 447, 448, 449, 450, 451, 488, 490, 498, 682, 691, 868, 882, 883, 897, 898, 903, 908, 909, 918, 919, 920, 930, 931, 938, 939, 940, 941, 942, 944, 953, 972, 989, 1053, 1109, 1110, 1111, 1121, 1126, 1144, 1162, 1163, 1164, 1165, 1166, 1175, 1177, 1230, 1231, 1272, 1292, 1312, 1343, 1359, 1367, 1377, 1479, 1526, 1532, 1574, 1634, 1649, 1716, 1775, 1776, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1873, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1901, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 1976, 2009, 2010, 2012, 2014, 2018, 2020, 2023, 2025, 2035, 2040, 2045, 2046, 2047, 2050, 2051, 2052, 2054, 2055, 2069, 2086, 2087, 2092, 2102, 2103, 2107, 2109, 2111, 2112], "function": [0, 2, 3, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 22, 23, 29, 30, 32, 33, 36, 37, 38, 40, 44, 45, 47, 48, 52, 53, 55, 58, 60, 62, 63, 66, 69, 74, 75, 78, 80, 82, 83, 85, 86, 89, 90, 121, 152, 197, 260, 262, 291, 300, 323, 488, 489, 490, 495, 498, 519, 543, 558, 682, 691, 695, 696, 700, 701, 750, 759, 760, 762, 766, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 792, 793, 794, 795, 799, 800, 802, 816, 817, 818, 819, 826, 828, 840, 844, 857, 860, 862, 864, 865, 866, 880, 881, 896, 898, 899, 900, 901, 902, 903, 908, 909, 917, 918, 919, 920, 922, 923, 927, 928, 929, 931, 935, 938, 941, 944, 953, 955, 963, 965, 969, 972, 975, 977, 978, 979, 981, 982, 983, 985, 989, 990, 991, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1008, 1011, 1013, 1015, 1018, 1035, 1036, 1039, 1044, 1045, 1050, 1051, 1053, 1054, 1055, 1056, 1058, 1063, 1064, 1072, 1073, 1075, 1076, 1077, 1081, 1091, 1096, 1098, 1099, 1106, 1108, 1112, 1124, 1125, 1127, 1147, 1151, 1154, 1155, 1156, 1159, 1162, 1165, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1185, 1187, 1196, 1211, 1216, 1217, 1226, 1230, 1231, 1232, 1234, 1236, 1244, 1245, 1246, 1259, 1269, 1270, 1271, 1272, 1273, 1275, 1277, 1278, 1283, 1284, 1285, 1286, 1288, 1290, 1292, 1293, 1294, 1296, 1301, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1320, 1321, 1322, 1324, 1327, 1328, 1329, 1330, 1331, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1344, 1345, 1348, 1350, 1359, 1362, 1363, 1367, 1370, 1373, 1375, 1377, 1378, 1379, 1408, 1414, 1417, 1418, 1434, 1438, 1439, 1444, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1459, 1463, 1467, 1470, 1475, 1476, 1477, 1478, 1479, 1481, 1482, 1483, 1484, 1485, 1491, 1493, 1494, 1495, 1496, 1497, 1512, 1515, 1516, 1518, 1525, 1526, 1531, 1532, 1534, 1542, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1560, 1562, 1563, 1564, 1565, 1567, 1568, 1570, 1572, 1574, 1575, 1576, 1578, 1580, 1582, 1583, 1584, 1586, 1588, 1589, 1590, 1707, 1711, 1712, 1716, 1723, 1724, 1730, 1731, 1736, 1747, 1757, 1758, 1759, 1761, 1764, 1765, 1766, 1768, 1769, 1771, 1772, 1776, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1798, 1801, 1802, 1804, 1807, 1808, 1809, 1814, 1815, 1816, 1820, 1826, 1833, 1837, 1842, 1855, 1864, 1865, 1866, 1868, 1869, 1875, 1877, 1885, 1889, 1890, 1895, 1898, 1900, 1901, 1902, 1903, 1904, 1906, 1912, 1919, 1923, 1928, 1929, 1930, 1939, 1943, 1945, 1949, 1959, 1960, 1961, 1964, 1965, 1969, 1973, 1974, 1975, 1976, 1977, 1990, 1991, 1993, 1996, 1999, 2000, 2001, 2002, 2003, 2006, 2010, 2011, 2012, 2017, 2020, 2022, 2024, 2027, 2028, 2029, 2032, 2034, 2037, 2040, 2043, 2045, 2046, 2047, 2048, 2050, 2054, 2055, 2057, 2058, 2063, 2067, 2068, 2069, 2070, 2072, 2074, 2075, 2076, 2077, 2079, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2091, 2092, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2106, 2107, 2108, 2109, 2110, 2111, 2114], "also": [0, 1, 2, 3, 5, 7, 8, 9, 11, 12, 14, 15, 18, 19, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 44, 47, 52, 53, 55, 59, 60, 61, 63, 64, 66, 74, 75, 89, 156, 220, 223, 224, 256, 325, 515, 517, 519, 525, 545, 616, 743, 744, 745, 762, 802, 816, 864, 883, 892, 893, 899, 900, 903, 906, 908, 909, 918, 931, 944, 953, 967, 968, 975, 981, 1007, 1050, 1053, 1064, 1089, 1090, 1108, 1112, 1128, 1159, 1166, 1172, 1176, 1177, 1187, 1197, 1204, 1205, 1213, 1216, 1225, 1230, 1231, 1235, 1272, 1275, 1276, 1279, 1282, 1286, 1288, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1340, 1341, 1374, 1380, 1395, 1418, 1430, 1438, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1469, 1472, 1477, 1491, 1496, 1526, 1533, 1542, 1556, 1558, 1566, 1575, 1576, 1578, 1610, 1611, 1612, 1632, 1670, 1687, 1706, 1716, 1734, 1736, 1746, 1748, 1750, 1751, 1752, 1753, 1769, 1783, 1784, 1798, 1804, 1808, 1811, 1814, 1839, 1846, 1858, 1868, 1882, 1883, 1900, 1904, 1918, 1927, 1937, 1939, 1942, 1948, 1949, 1960, 1961, 1976, 1979, 2011, 2012, 2013, 2015, 2016, 2020, 2022, 2023, 2026, 2028, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2074, 2075, 2076, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2091, 2092, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2114], "autocastmodel": 0, "nn": [0, 3, 5, 12, 15, 23, 24, 28, 29, 30, 32, 33, 34, 48, 52, 53, 55, 56, 62, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 300, 417, 543, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 794, 795, 816, 843, 857, 862, 863, 864, 865, 867, 975, 1050, 1053, 1108, 1165, 1174, 1175, 1270, 1272, 1273, 1275, 1276, 1277, 1278, 1279, 1282, 1283, 1284, 1288, 1289, 1290, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1587, 1706, 1715, 1716, 1717, 1718, 1719, 1733, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1757, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1898, 1923, 1964, 1965, 2011, 2012, 2013, 2014, 2018, 2024, 2028, 2034, 2035, 2041, 2047, 2050, 2051, 2055, 2057, 2058, 2059, 2062, 2063, 2065, 2067, 2068, 2070, 2072, 2075, 2085, 2087, 2091, 2093, 2097, 2101, 2102, 2109, 2110, 2111], "modul": [0, 1, 3, 5, 6, 8, 11, 12, 14, 18, 19, 20, 24, 28, 30, 32, 33, 34, 38, 39, 41, 44, 47, 48, 52, 53, 55, 56, 58, 62, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 82, 417, 489, 490, 682, 688, 691, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 740, 741, 742, 747, 748, 749, 758, 763, 764, 765, 766, 767, 790, 791, 792, 793, 794, 795, 799, 801, 802, 804, 812, 813, 814, 815, 816, 817, 818, 819, 821, 822, 823, 824, 825, 826, 827, 828, 829, 840, 841, 842, 843, 857, 860, 861, 862, 863, 864, 865, 867, 918, 927, 928, 931, 943, 955, 975, 1053, 1165, 1174, 1175, 1177, 1185, 1221, 1270, 1272, 1273, 1275, 1276, 1277, 1278, 1279, 1280, 1282, 1283, 1284, 1288, 1289, 1290, 1344, 1367, 1377, 1430, 1434, 1440, 1441, 1442, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1478, 1480, 1488, 1489, 1490, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1527, 1528, 1536, 1537, 1541, 1543, 1555, 1560, 1566, 1570, 1571, 1573, 1574, 1623, 1624, 1684, 1699, 1706, 1715, 1716, 1717, 1723, 1724, 1725, 1726, 1727, 1728, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1760, 1764, 1765, 1766, 1768, 1797, 1858, 1902, 1965, 1967, 1968, 1976, 2011, 2012, 2017, 2020, 2022, 2024, 2026, 2028, 2029, 2032, 2034, 2035, 2037, 2039, 2040, 2044, 2045, 2047, 2050, 2051, 2054, 2056, 2057, 2059, 2061, 2062, 2063, 2065, 2067, 2069, 2072, 2073, 2075, 2078, 2080, 2081, 2085, 2089, 2090, 2091, 2093, 2097, 2099, 2100, 2101, 2102, 2105, 2109, 2110, 2111, 2112], "def": [0, 1, 12, 23, 24, 28, 29, 30, 33, 35, 37, 38, 39, 40, 44, 45, 48, 50, 51, 52, 53, 55, 57, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 400, 794, 795, 864, 865, 893, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 918, 975, 977, 980, 982, 983, 989, 1112, 1165, 1166, 1167, 1169, 1170, 1171, 1173, 1175, 1176, 1177, 1270, 1272, 1273, 1275, 1276, 1277, 1278, 1279, 1283, 1284, 1287, 1288, 1289, 1290, 1526, 1527, 1528, 1536, 1537, 1542, 1576, 1684, 1706, 1716, 1736, 1769, 1902, 1976, 2011, 2013, 2015, 2016, 2020, 2028, 2034, 2041, 2042, 2044, 2047, 2048, 2049, 2050, 2055, 2057, 2059, 2060, 2061, 2063, 2064, 2065, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2093, 2096, 2097, 2098, 2099, 2100, 2102, 2104, 2109, 2110, 2111, 2112], "self": [0, 1, 9, 12, 18, 23, 24, 28, 30, 33, 37, 39, 44, 50, 52, 53, 55, 60, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 99, 141, 152, 155, 156, 157, 172, 174, 177, 180, 181, 182, 192, 193, 197, 198, 210, 218, 220, 234, 235, 242, 256, 257, 260, 261, 269, 288, 292, 299, 313, 315, 317, 319, 321, 323, 325, 327, 328, 330, 331, 332, 333, 335, 343, 379, 395, 400, 402, 403, 404, 417, 456, 473, 483, 485, 499, 500, 501, 502, 515, 517, 519, 522, 525, 527, 539, 545, 546, 547, 548, 560, 562, 582, 583, 585, 586, 587, 589, 590, 606, 609, 610, 616, 619, 620, 622, 625, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 864, 865, 936, 938, 982, 983, 1165, 1166, 1175, 1185, 1187, 1243, 1270, 1271, 1272, 1275, 1276, 1277, 1278, 1279, 1283, 1284, 1288, 1289, 1290, 1430, 1434, 1470, 1525, 1526, 1527, 1528, 1532, 1536, 1537, 1554, 1556, 1566, 1572, 1574, 1663, 1684, 1687, 1706, 1733, 1734, 1736, 1742, 1744, 1757, 1766, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1823, 1844, 2013, 2014, 2015, 2016, 2020, 2029, 2033, 2034, 2040, 2042, 2043, 2044, 2048, 2050, 2055, 2060, 2063, 2065, 2067, 2068, 2070, 2075, 2082, 2086, 2093, 2097, 2099, 2100, 2106, 2109, 2110, 2111], "produc": [0, 7, 14, 19, 23, 28, 30, 36, 39, 41, 44, 50, 52, 53, 60, 61, 63, 64, 81, 90, 488, 734, 740, 741, 742, 747, 748, 758, 766, 794, 795, 946, 966, 1046, 1139, 1140, 1141, 1159, 1170, 1171, 1177, 1187, 1191, 1196, 1197, 1226, 1284, 1288, 1289, 1292, 1308, 1309, 1312, 1319, 1320, 1331, 1336, 1370, 1373, 1374, 1375, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1502, 1503, 1504, 1505, 1506, 1507, 1532, 1616, 1624, 1632, 1643, 1703, 1704, 1705, 1771, 1797, 1826, 1833, 1842, 1927, 1964, 1976, 2013, 2015, 2016, 2024, 2032, 2033, 2041, 2042, 2043, 2045, 2048, 2051, 2053, 2054, 2055, 2058, 2059, 2060, 2061, 2063, 2065, 2080, 2084, 2093, 2097, 2098, 2107, 2111], "after": [0, 1, 7, 9, 12, 14, 23, 24, 28, 29, 30, 32, 33, 34, 47, 50, 55, 63, 64, 66, 71, 72, 262, 488, 490, 682, 762, 792, 860, 862, 864, 865, 896, 898, 903, 904, 908, 975, 985, 1008, 1010, 1046, 1053, 1107, 1166, 1195, 1196, 1257, 1272, 1276, 1328, 1344, 1367, 1385, 1426, 1462, 1477, 1511, 1526, 1529, 1570, 1572, 1574, 1597, 1632, 1706, 1709, 1716, 1736, 1738, 1742, 1744, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1801, 1802, 1808, 1810, 1866, 1923, 1949, 1965, 1967, 1978, 1981, 2011, 2015, 2016, 2028, 2029, 2032, 2040, 2041, 2042, 2045, 2047, 2048, 2050, 2051, 2052, 2054, 2055, 2057, 2058, 2065, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2078, 2081, 2082, 2083, 2085, 2093, 2096, 2099, 2100, 2102, 2103, 2105, 2107, 2109, 2110, 2111, 2115], "disabl": [0, 2, 5, 20, 29, 35, 55, 64, 812, 813, 918, 919, 920, 975, 1053, 1108, 1112, 1165, 1272, 1274, 1288, 1289, 1526, 1532, 1566, 1574, 1684, 1716, 1717, 1766, 1769, 1871, 1874, 1902, 2012, 2015, 2022, 2041, 2044, 2045, 2047, 2048, 2053, 2058, 2059, 2065, 2074, 2080, 2087, 2101, 2105, 2108, 2109, 2111, 2113, 2115], "them": [0, 1, 3, 5, 7, 8, 9, 11, 14, 15, 23, 28, 30, 33, 37, 39, 44, 47, 50, 52, 55, 59, 60, 63, 64, 152, 223, 256, 488, 736, 896, 903, 906, 908, 913, 957, 1053, 1093, 1096, 1108, 1185, 1187, 1198, 1294, 1303, 1314, 1328, 1340, 1344, 1372, 1420, 1446, 1468, 1526, 1560, 1562, 1690, 1743, 1758, 1761, 1798, 1906, 1908, 1926, 1960, 1965, 1967, 2011, 2015, 2016, 2020, 2023, 2025, 2026, 2029, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2045, 2048, 2049, 2050, 2051, 2053, 2054, 2055, 2059, 2061, 2067, 2069, 2070, 2071, 2075, 2076, 2077, 2080, 2081, 2085, 2086, 2090, 2093, 2098, 2099, 2100, 2101, 2102, 2107, 2108, 2109, 2110, 2111], "differ": [0, 1, 2, 3, 5, 8, 14, 18, 23, 28, 30, 32, 33, 34, 35, 37, 39, 40, 44, 45, 47, 48, 51, 52, 53, 55, 56, 59, 60, 61, 62, 64, 198, 488, 495, 619, 682, 688, 691, 697, 698, 737, 762, 795, 798, 799, 824, 826, 827, 864, 865, 868, 882, 883, 922, 923, 943, 955, 956, 997, 1091, 1096, 1098, 1100, 1108, 1148, 1149, 1150, 1154, 1155, 1162, 1165, 1170, 1171, 1177, 1187, 1189, 1192, 1226, 1247, 1272, 1278, 1283, 1288, 1302, 1308, 1309, 1319, 1320, 1328, 1331, 1336, 1340, 1367, 1377, 1430, 1439, 1440, 1441, 1442, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1477, 1478, 1486, 1488, 1489, 1490, 1491, 1496, 1497, 1513, 1526, 1529, 1532, 1548, 1549, 1551, 1552, 1553, 1555, 1558, 1566, 1572, 1574, 1576, 1579, 1582, 1583, 1584, 1585, 1597, 1605, 1623, 1632, 1641, 1643, 1645, 1657, 1658, 1659, 1684, 1706, 1716, 1718, 1719, 1730, 1766, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1802, 1810, 1816, 1826, 1848, 1882, 1921, 1922, 1927, 1949, 1960, 1961, 1964, 1969, 1971, 1972, 1976, 2011, 2013, 2015, 2016, 2018, 2020, 2022, 2023, 2032, 2033, 2035, 2041, 2043, 2044, 2045, 2047, 2048, 2049, 2052, 2055, 2057, 2058, 2059, 2061, 2063, 2067, 2068, 2069, 2070, 2071, 2075, 2077, 2080, 2082, 2083, 2085, 2086, 2087, 2091, 2092, 2093, 2095, 2099, 2100, 2101, 2103, 2104, 2105, 2107, 2108, 2109, 2111], "caus": [0, 1, 2, 3, 5, 11, 14, 18, 23, 27, 28, 30, 32, 40, 46, 47, 48, 52, 60, 64, 86, 558, 619, 880, 883, 896, 978, 1162, 1190, 1199, 1200, 1284, 1288, 1308, 1309, 1336, 1392, 1643, 1644, 1703, 1706, 1716, 1770, 1779, 1866, 1867, 1876, 1923, 1964, 2013, 2016, 2032, 2043, 2045, 2048, 2050, 2057, 2059, 2061, 2065, 2068, 2070, 2071, 2077, 2080, 2083, 2096, 2099, 2101, 2103], "mismatch": [0, 28, 64, 87, 89, 883, 1273, 1778, 1779, 2015, 2041, 2047, 2048, 2050, 2065, 2087], "error": [0, 1, 5, 8, 14, 17, 18, 19, 20, 23, 24, 25, 28, 29, 30, 31, 33, 35, 38, 45, 47, 48, 52, 55, 58, 60, 63, 64, 66, 67, 84, 85, 86, 87, 89, 192, 223, 315, 317, 323, 325, 330, 490, 547, 558, 616, 619, 682, 821, 868, 881, 883, 903, 906, 908, 911, 912, 913, 914, 915, 916, 917, 921, 929, 930, 975, 1008, 1018, 1042, 1053, 1064, 1078, 1082, 1106, 1165, 1169, 1170, 1172, 1177, 1185, 1187, 1196, 1200, 1203, 1236, 1272, 1277, 1287, 1302, 1303, 1312, 1314, 1316, 1319, 1320, 1321, 1331, 1334, 1344, 1362, 1392, 1412, 1438, 1439, 1475, 1486, 1492, 1517, 1526, 1556, 1558, 1585, 1630, 1641, 1664, 1684, 1687, 1688, 1716, 1718, 1719, 1721, 1766, 1771, 1778, 1779, 1822, 1862, 1869, 1912, 1918, 1923, 1964, 1967, 1976, 1977, 2011, 2012, 2013, 2015, 2016, 2020, 2022, 2023, 2032, 2033, 2034, 2035, 2041, 2042, 2045, 2048, 2049, 2052, 2053, 2059, 2063, 2065, 2068, 2073, 2075, 2081, 2086, 2087, 2090, 2091, 2096, 2098, 2099, 2101, 2102, 2113, 2114, 2115], "If": [0, 1, 2, 3, 4, 5, 7, 9, 11, 12, 14, 15, 19, 20, 21, 23, 24, 25, 28, 29, 30, 32, 33, 34, 35, 36, 37, 40, 44, 45, 46, 47, 48, 51, 52, 53, 55, 56, 57, 58, 59, 60, 62, 63, 64, 66, 74, 75, 76, 77, 84, 85, 86, 87, 89, 99, 152, 156, 197, 208, 211, 256, 317, 321, 323, 447, 448, 449, 450, 451, 460, 473, 483, 488, 498, 501, 519, 522, 539, 547, 582, 583, 585, 586, 587, 589, 590, 605, 609, 619, 682, 688, 691, 692, 693, 695, 697, 698, 699, 701, 737, 758, 762, 766, 781, 783, 787, 796, 797, 819, 822, 823, 824, 827, 857, 861, 864, 868, 877, 878, 879, 880, 881, 882, 883, 892, 894, 895, 896, 903, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 931, 942, 943, 944, 946, 953, 955, 956, 957, 959, 963, 965, 966, 969, 970, 975, 977, 980, 986, 990, 991, 992, 996, 997, 1007, 1010, 1011, 1013, 1015, 1018, 1020, 1022, 1024, 1031, 1042, 1048, 1053, 1054, 1068, 1075, 1078, 1089, 1090, 1091, 1095, 1096, 1097, 1098, 1099, 1108, 1109, 1110, 1111, 1121, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1151, 1159, 1160, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1176, 1177, 1181, 1185, 1186, 1187, 1195, 1197, 1199, 1200, 1203, 1213, 1221, 1226, 1230, 1231, 1233, 1234, 1235, 1236, 1247, 1249, 1263, 1269, 1272, 1275, 1276, 1277, 1280, 1282, 1284, 1285, 1288, 1289, 1292, 1293, 1294, 1298, 1302, 1303, 1308, 1309, 1311, 1312, 1314, 1315, 1316, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1333, 1335, 1336, 1338, 1339, 1340, 1342, 1343, 1344, 1345, 1355, 1359, 1360, 1362, 1364, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1392, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1421, 1430, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1473, 1474, 1477, 1478, 1479, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1502, 1503, 1504, 1505, 1506, 1507, 1511, 1513, 1517, 1518, 1519, 1520, 1521, 1526, 1527, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1540, 1541, 1542, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1558, 1559, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1578, 1579, 1582, 1583, 1584, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1632, 1634, 1636, 1643, 1644, 1649, 1651, 1653, 1654, 1655, 1657, 1658, 1659, 1668, 1669, 1670, 1673, 1676, 1677, 1684, 1690, 1691, 1702, 1703, 1709, 1715, 1716, 1717, 1721, 1722, 1725, 1726, 1728, 1730, 1731, 1733, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1757, 1758, 1759, 1761, 1764, 1765, 1766, 1768, 1769, 1770, 1771, 1775, 1776, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1814, 1815, 1820, 1824, 1826, 1827, 1831, 1832, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1847, 1848, 1853, 1855, 1858, 1862, 1864, 1866, 1869, 1870, 1874, 1876, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1899, 1903, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1918, 1921, 1922, 1923, 1926, 1927, 1928, 1939, 1942, 1943, 1945, 1946, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1960, 1961, 1964, 1967, 1970, 1971, 1972, 1976, 1982, 1988, 1999, 2001, 2009, 2010, 2011, 2013, 2016, 2018, 2020, 2024, 2025, 2026, 2027, 2028, 2029, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2074, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115], "cast": [0, 3, 6, 24, 45, 55, 88, 337, 605, 606, 960, 1089, 1090, 1272, 1325, 1329, 1342, 1372, 1417, 1420, 1526, 1651, 1690, 1691, 1771, 1824, 1903, 1906, 1912, 1926, 1931, 1932, 2013, 2041, 2058, 2065, 2081, 2082, 2083, 2102], "back": [0, 1, 2, 14, 19, 20, 23, 24, 28, 30, 35, 47, 52, 55, 60, 64, 66, 71, 75, 488, 737, 839, 922, 972, 975, 1128, 1160, 1166, 1273, 1286, 1338, 1344, 1573, 1721, 1722, 1723, 1724, 1908, 2011, 2013, 2016, 2042, 2048, 2049, 2051, 2057, 2058, 2059, 2065, 2067, 2068, 2070, 2075, 2080, 2082, 2096, 2099, 2102, 2104, 2109, 2112], "from": [0, 1, 3, 5, 6, 7, 8, 9, 14, 15, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 40, 41, 44, 45, 47, 50, 51, 52, 53, 55, 56, 58, 60, 61, 63, 66, 68, 69, 71, 72, 74, 75, 78, 81, 82, 83, 86, 90, 155, 156, 175, 198, 223, 224, 235, 260, 288, 315, 317, 321, 323, 379, 404, 450, 456, 473, 475, 476, 483, 488, 489, 490, 495, 501, 515, 517, 519, 546, 548, 562, 582, 609, 610, 612, 619, 682, 714, 715, 716, 717, 718, 719, 722, 730, 731, 734, 737, 740, 741, 742, 743, 744, 745, 747, 748, 758, 762, 766, 774, 775, 776, 783, 790, 791, 794, 795, 796, 799, 802, 817, 818, 819, 826, 828, 842, 857, 861, 863, 864, 865, 867, 868, 882, 883, 894, 903, 905, 907, 908, 909, 911, 912, 913, 914, 915, 916, 922, 923, 927, 928, 942, 944, 945, 953, 954, 959, 972, 977, 1009, 1010, 1013, 1022, 1023, 1024, 1042, 1046, 1064, 1092, 1108, 1109, 1110, 1111, 1128, 1130, 1131, 1136, 1137, 1138, 1142, 1143, 1144, 1145, 1148, 1149, 1150, 1159, 1161, 1162, 1163, 1165, 1166, 1167, 1169, 1170, 1171, 1172, 1173, 1179, 1185, 1186, 1187, 1192, 1201, 1209, 1230, 1231, 1235, 1249, 1269, 1270, 1272, 1273, 1275, 1277, 1278, 1279, 1280, 1283, 1284, 1296, 1312, 1316, 1325, 1328, 1329, 1342, 1343, 1344, 1345, 1359, 1363, 1364, 1377, 1383, 1385, 1388, 1389, 1390, 1392, 1412, 1421, 1422, 1430, 1434, 1440, 1441, 1442, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1477, 1478, 1479, 1480, 1486, 1488, 1489, 1490, 1491, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1526, 1527, 1528, 1532, 1536, 1537, 1541, 1542, 1544, 1545, 1566, 1571, 1572, 1574, 1576, 1578, 1579, 1586, 1590, 1597, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1629, 1632, 1634, 1641, 1643, 1657, 1658, 1659, 1671, 1702, 1706, 1715, 1716, 1721, 1723, 1724, 1732, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1751, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1768, 1772, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1814, 1816, 1819, 1827, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1848, 1849, 1852, 1854, 1855, 1862, 1874, 1882, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1924, 1928, 1939, 1942, 1951, 1953, 1955, 1960, 1961, 1964, 1966, 1970, 1979, 1981, 1982, 2012, 2013, 2014, 2015, 2016, 2018, 2022, 2023, 2024, 2026, 2027, 2028, 2029, 2032, 2034, 2035, 2036, 2040, 2042, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2062, 2063, 2067, 2069, 2070, 2071, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2108, 2109, 2110, 2111, 2112, 2113], "alreadi": [0, 1, 3, 11, 19, 23, 24, 28, 29, 34, 47, 48, 55, 63, 64, 197, 208, 211, 465, 488, 526, 582, 605, 606, 794, 842, 857, 861, 882, 929, 931, 1018, 1044, 1183, 1187, 1272, 1282, 1344, 1526, 1710, 1716, 1757, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1958, 1960, 1967, 1995, 2011, 2013, 2023, 2027, 2033, 2034, 2041, 2042, 2045, 2048, 2051, 2052, 2057, 2065, 2067, 2068, 2070, 2076, 2077, 2080, 2082, 2084, 2096, 2099, 2100, 2101], "incur": [0, 5, 24, 28, 30, 55, 965, 2035, 2057, 2075], "addit": [0, 1, 3, 5, 7, 9, 14, 15, 23, 28, 29, 30, 33, 35, 41, 47, 48, 53, 55, 63, 64, 66, 75, 337, 515, 609, 751, 753, 754, 755, 756, 760, 762, 783, 797, 816, 862, 1064, 1269, 1272, 1276, 1282, 1287, 1430, 1441, 1442, 1443, 1456, 1457, 1458, 1469, 1476, 1477, 1479, 1486, 1489, 1490, 1498, 1505, 1506, 1507, 1511, 1512, 1513, 1514, 1516, 1522, 1523, 1524, 1526, 1532, 1534, 1560, 1562, 1566, 1570, 1572, 1574, 1576, 1603, 1610, 1611, 1612, 1649, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1870, 1960, 1961, 1964, 1974, 2013, 2015, 2016, 2022, 2029, 2033, 2034, 2035, 2042, 2044, 2045, 2048, 2049, 2051, 2054, 2057, 2058, 2061, 2063, 2065, 2069, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2084, 2087, 2096, 2098, 2102, 2107, 2108, 2111, 2113], "overhead": [0, 1, 3, 4, 8, 24, 28, 55, 783, 975, 980, 1064, 1284, 1716, 1723, 1724, 2042, 2044, 2045, 2047, 2053, 2054, 2057, 2069, 2076, 2080, 2086, 2096, 2101, 2102, 2104, 2105], "here": [0, 1, 7, 8, 9, 12, 15, 18, 23, 24, 28, 29, 30, 33, 34, 35, 46, 48, 52, 53, 57, 58, 59, 60, 63, 64, 87, 89, 582, 795, 796, 908, 909, 995, 1016, 1108, 1125, 1127, 1128, 1133, 1134, 1135, 1137, 1138, 1143, 1145, 1165, 1175, 1270, 1284, 1440, 1441, 1442, 1456, 1457, 1458, 1488, 1489, 1490, 1534, 1566, 1632, 1634, 1723, 1724, 1733, 1794, 1797, 1808, 1870, 1894, 2011, 2013, 2015, 2016, 2018, 2020, 2031, 2033, 2034, 2035, 2041, 2042, 2045, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2060, 2061, 2065, 2067, 2068, 2070, 2073, 2080, 2084, 2085, 2087, 2093, 2096, 2098, 2099, 2101, 2102, 2103, 2104, 2105, 2108, 2109, 2111], "assum": [0, 11, 12, 19, 20, 23, 28, 30, 34, 35, 37, 40, 47, 48, 52, 53, 55, 58, 61, 64, 819, 989, 1091, 1126, 1129, 1131, 1139, 1140, 1141, 1144, 1187, 1191, 1197, 1263, 1269, 1270, 1273, 1288, 1308, 1309, 1311, 1318, 1327, 1330, 1333, 1335, 1345, 1445, 1518, 1579, 1616, 1716, 1733, 1736, 1761, 1798, 1799, 1804, 1805, 1806, 1809, 1811, 1812, 1816, 1949, 1951, 1967, 2011, 2013, 2015, 2016, 2020, 2033, 2042, 2045, 2048, 2049, 2052, 2054, 2057, 2058, 2065, 2067, 2068, 2075, 2076, 2077, 2080, 2093, 2098, 2099, 2100, 2107], "a_float32": 0, "rand": [0, 1, 11, 18, 35, 52, 64, 337, 695, 701, 911, 912, 913, 914, 915, 916, 980, 997, 1050, 1051, 1125, 1127, 1130, 1131, 1133, 1134, 1137, 1138, 1140, 1141, 1143, 1145, 1167, 1278, 1279, 1284, 1288, 1289, 1290, 1346, 1347, 1349, 1438, 1491, 1570, 1571, 1572, 1573, 1574, 1604, 1623, 1624, 1684, 1716, 1736, 1769, 1819, 1830, 1831, 1832, 1836, 2013, 2014, 2015, 2016, 2018, 2028, 2033, 2034, 2048, 2063, 2065, 2066, 2070, 2075, 2076, 2080, 2081, 2084, 2085, 2089, 2106, 2109], "8": [0, 1, 14, 20, 22, 23, 24, 28, 34, 35, 37, 52, 66, 71, 315, 317, 319, 323, 403, 404, 473, 515, 519, 562, 609, 619, 687, 688, 699, 700, 747, 748, 763, 765, 767, 775, 776, 822, 823, 824, 827, 914, 946, 954, 965, 968, 969, 973, 975, 1050, 1051, 1087, 1091, 1106, 1111, 1124, 1140, 1141, 1147, 1148, 1151, 1158, 1226, 1235, 1236, 1296, 1325, 1328, 1329, 1330, 1338, 1340, 1343, 1344, 1421, 1422, 1427, 1428, 1429, 1431, 1432, 1433, 1446, 1447, 1448, 1449, 1460, 1496, 1522, 1523, 1529, 1531, 1533, 1540, 1542, 1545, 1548, 1549, 1550, 1551, 1552, 1553, 1566, 1570, 1571, 1572, 1573, 1574, 1578, 1586, 1608, 1611, 1614, 1671, 1676, 1682, 1683, 1684, 1723, 1724, 1771, 1772, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1802, 1810, 1815, 1819, 1823, 1843, 1853, 1854, 1855, 1862, 1870, 1874, 1881, 1884, 1889, 1895, 1899, 1907, 1909, 1910, 1915, 1923, 1927, 1938, 1943, 1944, 1945, 1947, 1949, 1958, 1962, 1964, 1970, 1977, 2013, 2017, 2023, 2045, 2048, 2051, 2060, 2063, 2065, 2066, 2068, 2069, 2070, 2073, 2075, 2080, 2081, 2083, 2084, 2086, 2093, 2099, 2100, 2111], "b_float32": 0, "c_float32": 0, "d_float32": 0, "mm": [0, 33, 1276, 1328, 1367, 1536, 1537, 1826, 1927, 1964, 2014, 2015, 2028, 2033, 2041, 2044, 2048, 2058, 2066, 2080, 2105, 2106], "list": [0, 1, 3, 5, 6, 7, 9, 12, 14, 15, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 35, 37, 47, 50, 52, 53, 55, 60, 63, 64, 66, 67, 71, 72, 74, 75, 85, 186, 243, 304, 447, 449, 451, 580, 585, 586, 587, 591, 621, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 794, 795, 800, 816, 817, 818, 819, 840, 857, 864, 882, 889, 890, 891, 957, 958, 961, 965, 969, 974, 975, 977, 984, 989, 1034, 1040, 1093, 1106, 1108, 1109, 1148, 1163, 1175, 1185, 1187, 1196, 1212, 1226, 1236, 1272, 1276, 1278, 1279, 1284, 1286, 1288, 1289, 1372, 1374, 1420, 1462, 1468, 1498, 1526, 1528, 1537, 1541, 1555, 1566, 1577, 1590, 1623, 1699, 1715, 1716, 1717, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1771, 1775, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1829, 1831, 1832, 1835, 1839, 1854, 1890, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1921, 1922, 1926, 1942, 1943, 1944, 1960, 1961, 1964, 1965, 1967, 1971, 1972, 1977, 1994, 2009, 2011, 2013, 2014, 2017, 2018, 2022, 2026, 2029, 2033, 2034, 2035, 2048, 2053, 2055, 2058, 2059, 2060, 2062, 2063, 2066, 2067, 2068, 2069, 2070, 2075, 2076, 2080, 2082, 2083, 2084, 2085, 2086, 2089, 2090, 2091, 2092, 2097, 2098, 2099, 2100, 2101, 2103, 2105, 2109, 2110, 2111, 2112, 2113], "No": [0, 9, 12, 30, 51, 53, 63, 66, 75, 76, 77, 903, 907, 908, 909, 1716, 1723, 1724, 1769, 2015, 2045, 2065, 2073, 2084, 2087, 2096], "manual": [0, 19, 23, 28, 34, 45, 47, 48, 51, 55, 64, 488, 799, 840, 881, 917, 980, 1187, 1235, 1276, 1438, 1439, 1461, 1530, 1531, 1533, 1555, 1574, 1604, 1605, 1615, 1668, 1736, 1757, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2013, 2016, 2024, 2032, 2041, 2045, 2046, 2048, 2049, 2050, 2052, 2055, 2061, 2065, 2070, 2071, 2085, 2098, 2108, 2109, 2115], "e_float16": 0, "handl": [0, 1, 5, 9, 11, 14, 20, 23, 24, 28, 29, 30, 31, 32, 33, 37, 39, 40, 47, 48, 55, 63, 64, 489, 490, 842, 893, 903, 907, 908, 909, 927, 928, 977, 992, 1010, 1025, 1052, 1065, 1108, 1154, 1155, 1177, 1180, 1272, 1303, 1462, 1526, 1574, 1576, 1632, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1846, 1891, 1928, 1976, 2011, 2029, 2032, 2034, 2045, 2048, 2049, 2050, 2052, 2057, 2062, 2063, 2065, 2068, 2070, 2075, 2077, 2087, 2089, 2096, 2099, 2101, 2103, 2107], "f_float16": 0, "g_float32": 0, "epoch": [0, 23, 40, 51, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2029, 2041, 2057, 2067, 2085], "eval": [0, 64, 816, 863, 864, 1175, 1272, 1276, 1282, 1288, 1440, 1441, 1442, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1532, 1566, 1574, 1725, 1727, 1731, 2016, 2026, 2055, 2062, 2065, 2070, 2091, 2098, 2099, 2112], "jit": [0, 2, 11, 14, 52, 1050, 1051, 1083, 1223, 1270, 1271, 1272, 1287, 1409, 1777, 1778, 1872, 1873, 2012, 2015, 2018, 2026, 2034, 2044, 2054, 2060, 2062, 2065, 2070, 2075, 2085, 2097, 2098, 2099, 2100, 2102, 2104], "trace": [0, 1, 5, 12, 15, 18, 23, 24, 25, 28, 33, 40, 52, 53, 66, 69, 70, 71, 72, 73, 76, 77, 81, 488, 682, 843, 932, 934, 935, 938, 975, 977, 982, 983, 989, 1108, 1181, 1186, 1191, 1275, 1284, 1285, 1289, 1345, 1388, 1389, 1390, 1779, 1966, 2012, 2014, 2015, 2016, 2018, 2020, 2022, 2042, 2045, 2051, 2060, 2062, 2063, 2066, 2068, 2085, 2092, 2097, 2098, 2099, 2101, 2104, 2105, 2107, 2111, 2113, 2114], "testmodel": 0, "__init__": [0, 1, 12, 23, 24, 28, 33, 35, 52, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 864, 865, 1175, 1270, 1273, 1275, 1278, 1279, 1284, 1288, 1289, 1290, 1526, 1527, 1528, 1536, 1537, 1541, 1684, 1706, 1715, 2013, 2015, 2016, 2029, 2042, 2048, 2050, 2055, 2060, 2063, 2065, 2068, 2070, 2085, 2086, 2093, 2097, 2098, 2109, 2110, 2111], "input_s": [0, 736, 762, 763, 767, 1477, 1478, 1496, 1497, 1542, 1543, 1544, 2014], "num_class": [0, 1670, 2014, 2104], "super": [0, 9, 12, 23, 24, 28, 30, 33, 52, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 864, 865, 1175, 1270, 1275, 1278, 1279, 1284, 1288, 1289, 1290, 1526, 1527, 1528, 1536, 1537, 1538, 1539, 1684, 1706, 1808, 2013, 2015, 2016, 2017, 2042, 2048, 2055, 2060, 2063, 2065, 2068, 2070, 2093, 2097, 2109, 2110, 2111], "fc1": [0, 24, 1706, 2063, 2093], "x": [0, 1, 3, 11, 12, 14, 23, 24, 28, 33, 35, 37, 44, 52, 53, 55, 57, 58, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 175, 256, 260, 288, 291, 313, 315, 317, 319, 323, 354, 379, 450, 485, 488, 495, 501, 560, 562, 585, 609, 610, 619, 694, 760, 762, 772, 781, 782, 786, 787, 798, 801, 804, 822, 823, 864, 865, 881, 887, 889, 890, 891, 898, 901, 902, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 918, 919, 938, 957, 958, 959, 962, 963, 968, 975, 977, 982, 983, 989, 990, 993, 996, 997, 1050, 1051, 1091, 1098, 1102, 1103, 1108, 1112, 1122, 1123, 1124, 1125, 1127, 1128, 1133, 1134, 1136, 1138, 1142, 1143, 1145, 1148, 1149, 1150, 1158, 1165, 1167, 1169, 1170, 1171, 1172, 1173, 1175, 1176, 1177, 1197, 1211, 1212, 1226, 1243, 1247, 1259, 1277, 1278, 1279, 1283, 1284, 1287, 1288, 1289, 1290, 1294, 1304, 1317, 1318, 1322, 1325, 1329, 1333, 1335, 1338, 1339, 1340, 1341, 1342, 1345, 1350, 1351, 1352, 1360, 1363, 1366, 1367, 1374, 1416, 1417, 1421, 1422, 1428, 1429, 1438, 1439, 1440, 1441, 1442, 1444, 1459, 1461, 1467, 1473, 1474, 1475, 1477, 1478, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1488, 1489, 1490, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1512, 1515, 1516, 1517, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1533, 1534, 1535, 1536, 1537, 1541, 1542, 1544, 1545, 1546, 1547, 1554, 1556, 1557, 1558, 1559, 1561, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1575, 1576, 1579, 1606, 1630, 1632, 1636, 1637, 1643, 1647, 1651, 1663, 1668, 1673, 1677, 1679, 1685, 1686, 1687, 1691, 1692, 1694, 1695, 1696, 1703, 1706, 1715, 1731, 1734, 1736, 1757, 1758, 1759, 1760, 1761, 1766, 1769, 1770, 1771, 1778, 1781, 1802, 1816, 1817, 1828, 1830, 1844, 1847, 1848, 1850, 1851, 1853, 1854, 1858, 1862, 1899, 1901, 1904, 1918, 1920, 1921, 1922, 1923, 1929, 1930, 1937, 1943, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1961, 1963, 1970, 1971, 1972, 1974, 1975, 1976, 1979, 2011, 2013, 2014, 2015, 2016, 2017, 2020, 2032, 2033, 2034, 2040, 2042, 2043, 2044, 2045, 2048, 2049, 2050, 2052, 2053, 2055, 2056, 2061, 2063, 2064, 2065, 2068, 2070, 2075, 2079, 2080, 2081, 2083, 2085, 2086, 2089, 2090, 2091, 2092, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2109, 2111], "2": [0, 1, 3, 5, 11, 12, 14, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 32, 34, 35, 36, 37, 40, 45, 47, 48, 52, 53, 55, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 79, 175, 193, 210, 227, 235, 236, 256, 262, 288, 315, 317, 319, 323, 337, 379, 403, 404, 447, 448, 449, 450, 451, 483, 489, 490, 495, 498, 501, 515, 517, 519, 546, 560, 562, 582, 583, 585, 586, 587, 589, 590, 591, 609, 619, 682, 683, 685, 686, 687, 688, 691, 692, 693, 694, 695, 697, 699, 700, 701, 736, 740, 741, 742, 743, 744, 745, 747, 748, 757, 760, 762, 764, 795, 798, 822, 868, 877, 878, 879, 880, 881, 882, 883, 887, 889, 890, 891, 893, 903, 906, 908, 909, 911, 912, 913, 914, 915, 916, 918, 919, 927, 928, 931, 938, 940, 942, 944, 946, 947, 948, 949, 950, 951, 952, 953, 954, 956, 957, 958, 959, 961, 962, 963, 965, 966, 967, 968, 969, 973, 974, 982, 986, 990, 991, 992, 993, 996, 997, 1007, 1082, 1087, 1091, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1103, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1118, 1121, 1122, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1148, 1149, 1150, 1151, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1165, 1166, 1167, 1170, 1171, 1172, 1176, 1177, 1180, 1187, 1197, 1213, 1214, 1215, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1238, 1239, 1247, 1249, 1259, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1272, 1275, 1276, 1282, 1284, 1286, 1288, 1292, 1293, 1294, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1333, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1349, 1350, 1351, 1352, 1359, 1361, 1362, 1363, 1364, 1366, 1367, 1370, 1371, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1392, 1395, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1426, 1429, 1434, 1435, 1436, 1437, 1438, 1439, 1444, 1445, 1446, 1447, 1448, 1450, 1451, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1475, 1476, 1477, 1479, 1481, 1482, 1483, 1484, 1486, 1493, 1494, 1495, 1496, 1497, 1498, 1512, 1514, 1515, 1516, 1517, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1528, 1529, 1531, 1532, 1534, 1535, 1537, 1538, 1539, 1540, 1541, 1542, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1554, 1556, 1557, 1558, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1575, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1597, 1599, 1604, 1605, 1616, 1623, 1624, 1630, 1632, 1634, 1636, 1643, 1649, 1657, 1658, 1659, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1684, 1700, 1706, 1715, 1716, 1720, 1721, 1723, 1724, 1730, 1731, 1736, 1748, 1749, 1750, 1751, 1753, 1754, 1757, 1759, 1760, 1762, 1763, 1764, 1765, 1766, 1769, 1770, 1771, 1772, 1774, 1775, 1776, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1815, 1816, 1817, 1819, 1820, 1823, 1824, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1837, 1839, 1841, 1842, 1843, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1858, 1862, 1864, 1865, 1867, 1868, 1869, 1870, 1874, 1879, 1880, 1881, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1895, 1899, 1900, 1901, 1902, 1904, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1920, 1921, 1922, 1923, 1924, 1926, 1927, 1928, 1929, 1930, 1937, 1938, 1940, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1967, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 2009, 2010, 2012, 2014, 2015, 2016, 2017, 2020, 2023, 2024, 2026, 2028, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2044, 2047, 2048, 2051, 2052, 2053, 2055, 2056, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2072, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2105, 2106, 2109, 2110], "For": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 23, 24, 25, 28, 29, 30, 33, 34, 35, 37, 44, 45, 47, 48, 51, 52, 53, 55, 56, 57, 59, 60, 61, 62, 63, 64, 81, 82, 83, 198, 256, 291, 315, 317, 323, 337, 354, 417, 473, 483, 488, 495, 501, 515, 517, 519, 585, 591, 605, 619, 688, 689, 690, 691, 692, 695, 701, 736, 740, 741, 742, 743, 744, 745, 762, 794, 795, 796, 816, 861, 862, 895, 908, 909, 922, 929, 943, 947, 949, 950, 952, 955, 964, 975, 980, 1042, 1056, 1058, 1064, 1089, 1090, 1091, 1108, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1152, 1160, 1175, 1177, 1186, 1187, 1191, 1201, 1213, 1226, 1235, 1249, 1272, 1283, 1284, 1286, 1288, 1289, 1302, 1304, 1308, 1309, 1312, 1313, 1315, 1316, 1317, 1318, 1320, 1329, 1330, 1332, 1333, 1336, 1345, 1352, 1360, 1367, 1373, 1377, 1430, 1434, 1438, 1439, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1468, 1469, 1471, 1472, 1477, 1479, 1486, 1491, 1496, 1498, 1526, 1529, 1530, 1531, 1532, 1540, 1541, 1542, 1548, 1549, 1550, 1551, 1552, 1553, 1558, 1563, 1577, 1578, 1582, 1583, 1584, 1586, 1587, 1588, 1589, 1605, 1607, 1608, 1609, 1618, 1619, 1620, 1625, 1632, 1643, 1669, 1671, 1684, 1692, 1706, 1715, 1716, 1734, 1757, 1758, 1759, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1810, 1839, 1855, 1864, 1867, 1875, 1901, 1902, 1912, 1918, 1927, 1928, 1943, 1945, 1949, 1957, 1960, 1965, 1967, 1974, 1975, 1976, 2011, 2012, 2013, 2016, 2018, 2020, 2022, 2023, 2025, 2026, 2029, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2055, 2057, 2058, 2059, 2060, 2064, 2065, 2067, 2068, 2069, 2070, 2071, 2074, 2075, 2076, 2077, 2080, 2082, 2083, 2084, 2085, 2086, 2087, 2092, 2093, 2094, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2107, 2108, 2110, 2111, 2112, 2115], "now": [0, 1, 12, 19, 28, 33, 36, 44, 48, 50, 52, 55, 60, 64, 498, 903, 907, 908, 909, 917, 931, 989, 1022, 1050, 1072, 1073, 1159, 1160, 1219, 1269, 1270, 1276, 1288, 1322, 1523, 1543, 1579, 1706, 1720, 1736, 1747, 1768, 1786, 1867, 1918, 1923, 1967, 2013, 2023, 2033, 2034, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2051, 2055, 2056, 2060, 2065, 2068, 2070, 2076, 2077, 2080, 2096, 2098, 2099, 2101, 2102, 2103, 2105, 2110, 2111, 2112, 2113], "we": [0, 1, 2, 5, 7, 8, 9, 11, 12, 14, 15, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 47, 48, 51, 52, 53, 55, 56, 57, 58, 59, 60, 63, 64, 66, 76, 77, 88, 488, 498, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 781, 787, 790, 791, 793, 795, 796, 840, 860, 862, 864, 865, 868, 896, 898, 903, 904, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 931, 944, 953, 975, 977, 1004, 1064, 1108, 1126, 1128, 1129, 1130, 1131, 1138, 1143, 1144, 1145, 1159, 1169, 1170, 1172, 1177, 1180, 1181, 1183, 1185, 1186, 1187, 1189, 1191, 1193, 1194, 1195, 1196, 1197, 1199, 1200, 1201, 1209, 1226, 1230, 1231, 1247, 1270, 1272, 1275, 1276, 1284, 1286, 1288, 1328, 1331, 1336, 1344, 1345, 1364, 1412, 1430, 1435, 1436, 1437, 1438, 1439, 1473, 1491, 1526, 1579, 1632, 1634, 1643, 1703, 1710, 1716, 1723, 1724, 1730, 1733, 1742, 1744, 1747, 1751, 1752, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1799, 1805, 1810, 1827, 1870, 1960, 1965, 1976, 2011, 2012, 2013, 2015, 2016, 2018, 2020, 2023, 2024, 2025, 2026, 2032, 2033, 2034, 2035, 2042, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2055, 2057, 2058, 2061, 2062, 2063, 2065, 2067, 2068, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2083, 2085, 2086, 2090, 2091, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2108, 2109, 2110, 2111, 2115], "suggest": [0, 9, 24, 52, 56, 84, 85, 87, 89, 1459, 2011, 2018, 2041, 2042, 2050, 2080, 2093, 2105, 2111], "issu": [0, 2, 3, 5, 9, 10, 11, 14, 22, 23, 28, 30, 35, 36, 52, 55, 56, 58, 60, 64, 66, 86, 87, 911, 917, 931, 975, 977, 978, 1159, 1171, 1173, 1177, 1194, 1318, 1362, 1374, 1491, 1496, 1522, 1523, 1524, 1542, 1579, 1643, 1644, 1768, 1866, 1976, 2011, 2012, 2016, 2018, 2023, 2028, 2032, 2033, 2034, 2035, 2041, 2042, 2045, 2048, 2051, 2057, 2058, 2059, 2060, 2061, 2065, 2068, 2070, 2071, 2075, 2080, 2081, 2082, 2083, 2086, 2096, 2099, 2100, 2102, 2103, 2111, 2113, 2114], "http": [0, 2, 3, 4, 7, 9, 11, 12, 14, 15, 24, 26, 28, 33, 35, 37, 47, 48, 55, 152, 682, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 794, 795, 896, 931, 975, 989, 1108, 1166, 1194, 1201, 1345, 1374, 1445, 1496, 1522, 1523, 1524, 1546, 1570, 1573, 1576, 1732, 1768, 1816, 1833, 1866, 1870, 1890, 1964, 1967, 2011, 2017, 2020, 2027, 2030, 2042, 2043, 2052, 2053, 2055, 2059, 2061, 2065, 2068, 2083, 2085, 2086, 2100, 2104, 2105, 2110, 2116], "github": [0, 7, 9, 14, 26, 28, 43, 52, 55, 56, 60, 152, 794, 795, 896, 917, 931, 1166, 1194, 1201, 1374, 1522, 1523, 1524, 1570, 1768, 1802, 1866, 1967, 2011, 2018, 2023, 2052, 2059, 2065, 2070, 2080, 2081, 2083, 2086, 2099, 2100, 2102, 2105, 2110], "com": [0, 7, 14, 26, 28, 46, 48, 55, 152, 794, 795, 896, 931, 1166, 1194, 1201, 1374, 1522, 1523, 1524, 1570, 1768, 1866, 1964, 1967, 2011, 2020, 2027, 2030, 2052, 2053, 2059, 2061, 2065, 2083, 2086, 2100, 2105, 2110], "pytorch": [0, 1, 2, 3, 4, 11, 12, 13, 14, 17, 18, 19, 20, 23, 24, 25, 30, 31, 32, 33, 34, 35, 36, 37, 40, 47, 48, 53, 55, 56, 59, 61, 64, 65, 66, 84, 85, 86, 87, 88, 89, 152, 515, 585, 682, 700, 732, 733, 734, 735, 747, 748, 758, 762, 763, 764, 765, 766, 767, 794, 795, 812, 813, 814, 815, 883, 896, 929, 931, 960, 965, 966, 975, 976, 977, 989, 1032, 1033, 1044, 1049, 1108, 1153, 1159, 1166, 1167, 1171, 1176, 1177, 1201, 1217, 1258, 1259, 1283, 1303, 1309, 1314, 1316, 1317, 1318, 1321, 1334, 1343, 1359, 1362, 1363, 1374, 1407, 1438, 1439, 1445, 1477, 1491, 1522, 1523, 1524, 1570, 1587, 1588, 1589, 1605, 1684, 1716, 1723, 1724, 1757, 1765, 1766, 1768, 1771, 1779, 1826, 1858, 1866, 1867, 1868, 1876, 1923, 1927, 1951, 1964, 1967, 1976, 1989, 1995, 1998, 2011, 2015, 2016, 2020, 2022, 2023, 2027, 2029, 2034, 2035, 2036, 2043, 2044, 2049, 2050, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2061, 2062, 2063, 2067, 2068, 2069, 2071, 2072, 2075, 2076, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2093, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2109, 2110, 2112, 2113, 2114, 2116], "75956": 0, "_c": [0, 23, 24, 28, 63, 911, 917, 1271, 1284, 1777, 2014, 2016, 2029, 2045, 2054, 2058, 2061, 2065, 2075], "_jit_set_autocast_mod": 0, "fals": [0, 1, 2, 3, 5, 12, 14, 19, 23, 24, 28, 29, 30, 32, 34, 35, 37, 52, 55, 58, 62, 64, 66, 71, 74, 75, 76, 77, 114, 115, 116, 117, 118, 120, 136, 137, 138, 152, 183, 184, 185, 198, 211, 262, 303, 320, 321, 323, 330, 334, 337, 338, 340, 344, 345, 346, 353, 356, 394, 398, 409, 411, 412, 413, 416, 417, 423, 431, 432, 433, 434, 447, 448, 449, 450, 451, 455, 460, 462, 472, 473, 481, 497, 498, 506, 519, 544, 557, 558, 567, 582, 596, 605, 611, 612, 617, 619, 682, 695, 696, 697, 698, 699, 701, 714, 715, 716, 717, 718, 719, 734, 736, 737, 740, 741, 742, 747, 748, 753, 754, 755, 757, 758, 760, 762, 766, 770, 771, 778, 780, 781, 782, 784, 785, 787, 799, 801, 816, 821, 822, 823, 824, 826, 827, 828, 840, 841, 843, 860, 861, 866, 867, 868, 877, 878, 879, 883, 896, 898, 903, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 935, 944, 947, 950, 952, 953, 959, 960, 966, 967, 968, 974, 975, 989, 990, 1010, 1048, 1053, 1065, 1108, 1109, 1110, 1111, 1113, 1114, 1121, 1125, 1126, 1127, 1128, 1133, 1134, 1139, 1140, 1141, 1143, 1144, 1145, 1160, 1162, 1163, 1164, 1165, 1167, 1168, 1170, 1171, 1172, 1174, 1176, 1180, 1187, 1196, 1199, 1200, 1207, 1211, 1213, 1215, 1229, 1230, 1231, 1234, 1235, 1257, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1272, 1277, 1279, 1280, 1288, 1289, 1290, 1292, 1294, 1297, 1302, 1303, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1325, 1327, 1329, 1330, 1333, 1334, 1335, 1336, 1337, 1342, 1343, 1344, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1362, 1364, 1366, 1370, 1372, 1373, 1375, 1378, 1385, 1388, 1389, 1412, 1417, 1418, 1419, 1420, 1423, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1459, 1461, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1473, 1474, 1477, 1478, 1479, 1480, 1482, 1483, 1484, 1485, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1511, 1512, 1513, 1517, 1518, 1519, 1520, 1521, 1525, 1526, 1529, 1530, 1531, 1532, 1533, 1535, 1540, 1542, 1543, 1544, 1545, 1546, 1547, 1554, 1556, 1558, 1559, 1566, 1569, 1570, 1571, 1572, 1574, 1575, 1576, 1579, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1604, 1605, 1606, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1627, 1628, 1629, 1632, 1634, 1636, 1637, 1638, 1643, 1644, 1647, 1653, 1654, 1655, 1657, 1658, 1659, 1663, 1668, 1672, 1676, 1678, 1679, 1682, 1683, 1684, 1685, 1687, 1697, 1700, 1701, 1703, 1709, 1716, 1718, 1720, 1721, 1725, 1726, 1728, 1730, 1733, 1735, 1736, 1737, 1749, 1758, 1759, 1760, 1761, 1763, 1766, 1769, 1770, 1771, 1775, 1776, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1814, 1824, 1826, 1827, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1850, 1851, 1858, 1862, 1870, 1871, 1874, 1876, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1899, 1909, 1910, 1911, 1912, 1913, 1914, 1921, 1922, 1923, 1926, 1927, 1942, 1946, 1951, 1960, 1964, 1965, 1967, 1970, 1971, 1972, 1979, 1981, 2009, 2010, 2011, 2013, 2014, 2015, 2016, 2020, 2022, 2023, 2026, 2027, 2032, 2034, 2035, 2041, 2042, 2045, 2047, 2048, 2050, 2051, 2055, 2058, 2059, 2060, 2063, 2064, 2065, 2068, 2069, 2071, 2074, 2075, 2080, 2081, 2082, 2084, 2085, 2087, 2089, 2091, 2096, 2098, 2099, 2100, 2102, 2106, 2107, 2110, 2111, 2112], "randn": [0, 1, 11, 12, 28, 33, 34, 35, 52, 57, 59, 60, 61, 64, 66, 68, 73, 74, 75, 291, 313, 485, 546, 582, 586, 587, 588, 589, 590, 591, 619, 685, 686, 687, 688, 689, 690, 691, 692, 697, 698, 701, 722, 730, 731, 736, 740, 741, 742, 743, 744, 745, 758, 760, 762, 763, 764, 765, 766, 767, 774, 775, 776, 864, 865, 877, 878, 879, 881, 884, 885, 886, 887, 888, 938, 943, 955, 962, 964, 965, 966, 967, 968, 970, 992, 993, 994, 995, 996, 1007, 1087, 1088, 1089, 1095, 1096, 1097, 1098, 1102, 1108, 1122, 1123, 1152, 1160, 1165, 1166, 1167, 1169, 1170, 1171, 1172, 1175, 1176, 1177, 1243, 1247, 1249, 1284, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1326, 1327, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1341, 1348, 1352, 1353, 1360, 1362, 1363, 1364, 1366, 1367, 1370, 1372, 1373, 1375, 1377, 1379, 1380, 1395, 1411, 1414, 1424, 1427, 1428, 1429, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1457, 1458, 1459, 1460, 1461, 1463, 1464, 1465, 1466, 1467, 1468, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1524, 1525, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1553, 1554, 1556, 1557, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1575, 1577, 1578, 1582, 1583, 1584, 1586, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1627, 1628, 1634, 1668, 1674, 1675, 1715, 1774, 1817, 1822, 1823, 1824, 1826, 1827, 1840, 1844, 1845, 1857, 1892, 1894, 1899, 1905, 1908, 1916, 1917, 1920, 1926, 1927, 1937, 1940, 1941, 1944, 1948, 1951, 1952, 1954, 1957, 1959, 1964, 1974, 1975, 1976, 1979, 2014, 2015, 2016, 2018, 2020, 2024, 2033, 2034, 2035, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2053, 2055, 2059, 2060, 2063, 2064, 2065, 2066, 2068, 2070, 2075, 2080, 2081, 2083, 2085, 2089, 2091, 2093, 2096, 2097, 2099, 2100, 2102, 2104, 2106, 2109, 2111], "freez": [0, 55, 1187, 1272, 1282, 1468, 1469, 1526, 2042, 2093], "_": [0, 1, 3, 11, 23, 24, 28, 29, 32, 33, 40, 61, 64, 683, 685, 686, 700, 868, 884, 885, 886, 887, 888, 898, 903, 907, 908, 909, 919, 945, 964, 980, 991, 992, 994, 995, 997, 1152, 1157, 1172, 1176, 1239, 1293, 1301, 1352, 1360, 1440, 1441, 1442, 1488, 1489, 1490, 1542, 1566, 1716, 1731, 1765, 1794, 1796, 1816, 1839, 1842, 1845, 1857, 1877, 1879, 1885, 1892, 1894, 1916, 1927, 1940, 1941, 2020, 2042, 2045, 2049, 2050, 2055, 2061, 2067, 2068, 2069, 2081, 2097, 2100, 2105, 2109, 2111], "3": [0, 1, 3, 4, 6, 9, 11, 12, 18, 19, 21, 23, 24, 25, 28, 30, 32, 33, 35, 36, 37, 40, 45, 47, 52, 55, 57, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 79, 193, 210, 235, 256, 262, 291, 315, 317, 319, 323, 403, 404, 447, 448, 449, 450, 451, 473, 489, 490, 495, 498, 501, 515, 517, 519, 525, 539, 546, 560, 562, 583, 585, 586, 587, 589, 590, 609, 619, 682, 683, 687, 688, 689, 690, 691, 692, 693, 694, 695, 699, 700, 701, 736, 740, 741, 742, 743, 744, 745, 748, 750, 759, 762, 763, 764, 765, 767, 774, 775, 776, 787, 795, 826, 864, 865, 868, 878, 879, 881, 882, 883, 890, 891, 911, 912, 913, 914, 915, 916, 918, 938, 943, 945, 946, 947, 948, 949, 950, 951, 952, 954, 955, 956, 957, 958, 959, 961, 962, 963, 965, 966, 967, 968, 969, 973, 974, 986, 990, 991, 996, 997, 1007, 1050, 1051, 1053, 1090, 1091, 1092, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1103, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1121, 1124, 1129, 1132, 1136, 1142, 1147, 1148, 1149, 1150, 1151, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1161, 1162, 1163, 1165, 1167, 1172, 1173, 1175, 1177, 1213, 1214, 1215, 1226, 1229, 1232, 1233, 1234, 1235, 1236, 1238, 1239, 1247, 1249, 1257, 1259, 1261, 1263, 1272, 1276, 1279, 1282, 1284, 1288, 1289, 1293, 1294, 1295, 1296, 1297, 1298, 1302, 1304, 1305, 1306, 1308, 1309, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1350, 1353, 1355, 1359, 1360, 1361, 1362, 1363, 1364, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1395, 1411, 1412, 1414, 1416, 1417, 1418, 1420, 1421, 1422, 1423, 1435, 1436, 1437, 1438, 1439, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1457, 1458, 1459, 1461, 1468, 1469, 1472, 1473, 1474, 1475, 1477, 1478, 1480, 1482, 1483, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1529, 1531, 1533, 1538, 1539, 1541, 1542, 1544, 1545, 1548, 1549, 1550, 1551, 1552, 1553, 1560, 1561, 1562, 1566, 1574, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1587, 1588, 1589, 1597, 1599, 1604, 1605, 1608, 1609, 1611, 1612, 1615, 1623, 1624, 1627, 1628, 1630, 1632, 1636, 1637, 1643, 1651, 1668, 1670, 1671, 1674, 1675, 1682, 1683, 1690, 1691, 1703, 1715, 1716, 1723, 1724, 1731, 1746, 1748, 1750, 1751, 1752, 1753, 1757, 1759, 1760, 1761, 1762, 1765, 1769, 1770, 1771, 1772, 1774, 1775, 1776, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1805, 1808, 1809, 1811, 1815, 1817, 1819, 1823, 1824, 1826, 1827, 1830, 1831, 1832, 1833, 1834, 1835, 1837, 1839, 1841, 1842, 1843, 1846, 1847, 1848, 1849, 1850, 1851, 1853, 1854, 1855, 1858, 1862, 1867, 1868, 1874, 1877, 1879, 1881, 1883, 1884, 1885, 1889, 1890, 1891, 1899, 1901, 1902, 1904, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1918, 1920, 1926, 1927, 1929, 1930, 1937, 1938, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1970, 1973, 1976, 1977, 1978, 1979, 2009, 2010, 2013, 2014, 2015, 2016, 2017, 2020, 2023, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2051, 2052, 2055, 2056, 2060, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2075, 2076, 2077, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2093, 2096, 2099, 2100, 2102, 2104, 2105, 2106, 2108, 2109, 2110, 2111], "bug": [0, 14, 18, 28, 52, 64, 977, 1169, 1170, 1172, 1362, 2041, 2057, 2080, 2102, 2111], "thi": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 62, 63, 64, 65, 66, 74, 75, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 121, 152, 156, 197, 198, 208, 211, 223, 224, 225, 257, 262, 291, 292, 315, 323, 325, 337, 339, 342, 354, 417, 447, 448, 449, 450, 451, 460, 488, 489, 490, 495, 497, 498, 499, 500, 501, 502, 505, 506, 515, 517, 519, 526, 558, 568, 585, 586, 587, 589, 590, 591, 605, 606, 616, 619, 620, 682, 686, 688, 691, 695, 696, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 737, 738, 739, 746, 750, 751, 752, 753, 754, 755, 756, 757, 759, 761, 762, 777, 778, 779, 780, 781, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 801, 802, 804, 812, 813, 814, 815, 817, 818, 819, 821, 822, 823, 824, 825, 826, 827, 828, 857, 862, 864, 865, 877, 878, 879, 880, 881, 888, 892, 893, 894, 895, 896, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 927, 928, 929, 931, 935, 942, 943, 946, 948, 951, 955, 956, 959, 963, 965, 966, 969, 970, 972, 975, 977, 978, 979, 980, 981, 985, 990, 991, 993, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1010, 1011, 1013, 1014, 1015, 1018, 1029, 1031, 1034, 1035, 1036, 1038, 1039, 1042, 1043, 1044, 1045, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1058, 1060, 1063, 1064, 1065, 1072, 1073, 1075, 1076, 1077, 1081, 1082, 1083, 1089, 1090, 1091, 1096, 1098, 1099, 1103, 1106, 1107, 1108, 1112, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1147, 1148, 1149, 1150, 1151, 1154, 1155, 1156, 1159, 1162, 1165, 1166, 1167, 1169, 1170, 1171, 1172, 1173, 1175, 1176, 1177, 1180, 1181, 1185, 1186, 1187, 1189, 1190, 1191, 1192, 1193, 1196, 1197, 1198, 1200, 1201, 1209, 1211, 1214, 1216, 1217, 1226, 1230, 1236, 1238, 1259, 1269, 1270, 1272, 1273, 1275, 1277, 1278, 1280, 1282, 1283, 1284, 1288, 1289, 1290, 1292, 1293, 1294, 1295, 1296, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1327, 1328, 1329, 1330, 1331, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1344, 1345, 1348, 1350, 1353, 1362, 1363, 1367, 1370, 1372, 1373, 1374, 1375, 1377, 1378, 1379, 1385, 1388, 1389, 1399, 1403, 1408, 1409, 1414, 1417, 1418, 1419, 1420, 1422, 1430, 1434, 1438, 1439, 1440, 1441, 1442, 1445, 1446, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1473, 1474, 1477, 1478, 1480, 1485, 1486, 1488, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1511, 1513, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1532, 1533, 1536, 1538, 1541, 1542, 1543, 1555, 1558, 1560, 1563, 1566, 1572, 1573, 1574, 1575, 1578, 1579, 1580, 1581, 1585, 1586, 1587, 1588, 1589, 1590, 1597, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1632, 1634, 1636, 1641, 1643, 1649, 1651, 1657, 1658, 1659, 1669, 1671, 1673, 1684, 1690, 1691, 1699, 1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1720, 1723, 1724, 1730, 1731, 1732, 1733, 1734, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1757, 1758, 1759, 1760, 1761, 1764, 1765, 1766, 1768, 1769, 1771, 1772, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1805, 1806, 1808, 1810, 1812, 1814, 1815, 1816, 1820, 1824, 1826, 1833, 1837, 1839, 1842, 1846, 1848, 1855, 1862, 1864, 1865, 1866, 1868, 1869, 1870, 1874, 1875, 1876, 1877, 1882, 1895, 1900, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1919, 1921, 1922, 1923, 1926, 1927, 1928, 1929, 1930, 1934, 1937, 1939, 1943, 1945, 1949, 1951, 1959, 1960, 1961, 1963, 1964, 1965, 1967, 1969, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1981, 1982, 1983, 1986, 1988, 1990, 1991, 1993, 1995, 1996, 1999, 2000, 2001, 2002, 2003, 2006, 2007, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2022, 2023, 2024, 2025, 2026, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2037, 2040, 2041, 2042, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2066, 2067, 2068, 2069, 2070, 2071, 2072, 2073, 2074, 2075, 2076, 2077, 2079, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2118], "what": [0, 1, 3, 5, 7, 8, 9, 17, 19, 28, 30, 35, 40, 44, 45, 52, 55, 57, 59, 60, 64, 66, 895, 908, 909, 1170, 1186, 1187, 1196, 1197, 1288, 1289, 1362, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1520, 1521, 1555, 1578, 1657, 1658, 1659, 1684, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1810, 1827, 2011, 2012, 2013, 2015, 2016, 2020, 2024, 2033, 2045, 2048, 2049, 2051, 2052, 2055, 2057, 2065, 2067, 2069, 2070, 2075, 2076, 2080, 2086, 2096, 2097, 2098, 2099, 2101, 2103, 2104, 2111, 2113], "observ": [0, 24, 30, 37, 47, 488, 758, 791, 792, 794, 795, 796, 797, 798, 799, 801, 802, 803, 804, 810, 811, 813, 815, 817, 819, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 844, 862, 864, 865, 867, 930, 993, 997, 1203, 1286, 1374, 1438, 1439, 1440, 1441, 1442, 1459, 1461, 1485, 1488, 1489, 1490, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1566, 1575, 1604, 1605, 1615, 1644, 1668, 1676, 2041, 2042, 2045, 2054, 2057, 2069, 2071, 2091, 2095, 2096, 2102, 2109, 2111, 2113], "file": [0, 1, 3, 6, 7, 9, 11, 14, 18, 20, 23, 30, 37, 40, 45, 47, 50, 52, 53, 58, 60, 64, 911, 917, 918, 932, 934, 1009, 1046, 1160, 1169, 1170, 1172, 1173, 1272, 1280, 1283, 1344, 1778, 1858, 1902, 2011, 2013, 2016, 2017, 2018, 2024, 2027, 2033, 2034, 2035, 2042, 2045, 2048, 2054, 2057, 2059, 2061, 2063, 2065, 2069, 2073, 2076, 2080, 2082, 2085, 2093, 2097, 2098, 2099, 2102, 2104, 2105, 2108, 2109, 2111, 2113, 2114, 2115], "subregion": 0, "nest": [0, 1, 5, 12, 14, 30, 45, 52, 55, 64, 66, 69, 74, 75, 591, 795, 817, 819, 898, 913, 989, 1069, 1070, 1167, 1177, 1272, 1275, 1288, 1526, 1532, 1573, 1574, 1723, 1724, 1779, 1976, 2012, 2049, 2051, 2063, 2065, 2069, 2075, 2086, 2109], "local": [0, 5, 28, 30, 32, 33, 34, 37, 45, 47, 48, 50, 51, 55, 64, 918, 919, 920, 1050, 1112, 1166, 1272, 1345, 1464, 1465, 1466, 1470, 1472, 1514, 1526, 1578, 1626, 1650, 1702, 1716, 1717, 1769, 1902, 2011, 2012, 2015, 2032, 2041, 2045, 2047, 2050, 2060, 2065, 2068, 2072, 2075, 2076, 2077, 2085, 2099, 2100, 2102, 2111, 2113], "want": [0, 1, 7, 8, 9, 14, 23, 28, 33, 34, 35, 44, 52, 55, 57, 58, 59, 60, 63, 64, 66, 450, 488, 498, 501, 783, 793, 840, 977, 1046, 1165, 1166, 1186, 1187, 1285, 1288, 1289, 1374, 1473, 1474, 1579, 1627, 1628, 1643, 1703, 1716, 1717, 1766, 1769, 1783, 1784, 1796, 1797, 1858, 1967, 2011, 2020, 2023, 2041, 2042, 2044, 2045, 2048, 2049, 2051, 2052, 2055, 2065, 2067, 2068, 2070, 2080, 2085, 2086, 2090, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2110], "forc": [0, 1, 14, 18, 20, 55, 460, 883, 975, 1046, 1181, 1275, 1287, 1291, 1876, 1912, 2011, 2013, 2016, 2020, 2041, 2045, 2085, 2089, 2096, 2099, 2100], "particular": [0, 1, 3, 7, 23, 30, 37, 44, 47, 48, 52, 53, 64, 66, 71, 75, 90, 488, 562, 682, 864, 865, 1082, 1177, 1180, 1272, 1367, 1462, 1526, 1976, 2013, 2015, 2023, 2028, 2035, 2045, 2048, 2050, 2051, 2054, 2055, 2058, 2063, 2065, 2067, 2080, 2082, 2089, 2096, 2099, 2101, 2102, 2103, 2111, 2112, 2113], "give": [0, 3, 4, 7, 9, 12, 18, 23, 24, 30, 32, 46, 52, 55, 64, 922, 923, 997, 1126, 1128, 1129, 1131, 1135, 1144, 1169, 1170, 1171, 1187, 1273, 1286, 1287, 1302, 1309, 1453, 1454, 1455, 1531, 1578, 1579, 1730, 1770, 1783, 1784, 1796, 1923, 1964, 2011, 2013, 2032, 2034, 2040, 2042, 2045, 2048, 2049, 2051, 2052, 2053, 2055, 2059, 2065, 2067, 2080, 2096, 2098, 2099, 2101, 2108, 2111, 2112], "explicit": [0, 8, 12, 28, 52, 55, 59, 64, 1042, 1186, 1226, 1259, 1644, 1866, 1944, 2011, 2012, 2016, 2017, 2033, 2041, 2045, 2048, 2051, 2068, 2083, 2084, 2106], "control": [0, 1, 2, 14, 19, 23, 24, 28, 29, 30, 31, 35, 37, 43, 46, 47, 53, 66, 72, 75, 757, 801, 861, 864, 879, 883, 898, 944, 953, 989, 1095, 1096, 1097, 1098, 1099, 1181, 1230, 1231, 1284, 1285, 1286, 1288, 1303, 1309, 1311, 1314, 1316, 1319, 1321, 1325, 1331, 1334, 1336, 1342, 1362, 1430, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1512, 1520, 1521, 1578, 1684, 1716, 1814, 1870, 1871, 1884, 1899, 1902, 1907, 1923, 1927, 1946, 1952, 1953, 1954, 1955, 2012, 2013, 2022, 2042, 2044, 2045, 2047, 2055, 2057, 2058, 2060, 2062, 2063, 2065, 2067, 2069, 2070, 2077, 2080, 2096, 2097, 2098, 2099, 2102, 2114, 2115], "execut": [0, 1, 2, 3, 4, 5, 7, 14, 15, 19, 23, 28, 30, 32, 37, 39, 41, 48, 52, 53, 55, 60, 63, 64, 83, 489, 490, 903, 927, 928, 975, 977, 982, 1013, 1024, 1086, 1166, 1185, 1272, 1273, 1275, 1288, 1289, 1344, 1388, 1389, 1462, 1526, 1532, 1570, 1571, 1572, 1573, 1574, 1709, 1716, 1872, 1982, 2012, 2013, 2015, 2017, 2026, 2028, 2041, 2043, 2044, 2050, 2051, 2054, 2055, 2057, 2059, 2061, 2063, 2065, 2069, 2070, 2075, 2076, 2080, 2093, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2107, 2110, 2111, 2114], "surround": [0, 64, 792, 2015, 2041, 2045], "ensur": [0, 1, 4, 6, 7, 9, 19, 23, 28, 29, 30, 32, 33, 34, 37, 47, 48, 51, 52, 55, 63, 64, 90, 483, 488, 737, 821, 903, 904, 906, 908, 985, 1166, 1185, 1196, 1272, 1434, 1439, 1519, 1526, 1529, 1570, 1632, 1657, 1658, 1659, 1684, 1716, 1723, 1724, 1873, 2011, 2013, 2015, 2023, 2027, 2032, 2041, 2042, 2045, 2048, 2049, 2052, 2057, 2059, 2060, 2068, 2070, 2071, 2075, 2076, 2082, 2096, 2101, 2102, 2111], "necessari": [0, 1, 3, 9, 14, 23, 28, 30, 33, 34, 37, 47, 48, 52, 55, 62, 63, 90, 193, 210, 488, 562, 591, 903, 907, 908, 909, 1166, 1187, 1247, 1284, 1532, 1758, 1759, 2013, 2016, 2020, 2034, 2040, 2042, 2043, 2045, 2047, 2053, 2055, 2058, 2061, 2068, 2070, 2075, 2076, 2077, 2083, 2086, 2093, 2099, 2102, 2111, 2115], "becaus": [0, 1, 3, 4, 5, 7, 8, 14, 19, 23, 24, 28, 30, 32, 33, 35, 40, 52, 53, 55, 59, 60, 63, 64, 66, 74, 75, 488, 498, 913, 922, 923, 977, 1129, 1130, 1131, 1139, 1140, 1141, 1167, 1171, 1176, 1177, 1186, 1187, 1193, 1197, 1203, 1269, 1273, 1276, 1280, 1319, 1320, 1344, 1362, 1440, 1441, 1442, 1462, 1566, 1706, 1716, 1717, 1800, 1842, 1867, 1870, 1923, 1976, 2012, 2013, 2016, 2018, 2024, 2032, 2033, 2034, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2058, 2059, 2063, 2065, 2068, 2070, 2071, 2075, 2077, 2080, 2085, 2087, 2091, 2093, 2096, 2098, 2099, 2101, 2102, 2103, 2104, 2105, 2109, 2111, 2112, 2113], "wa": [0, 1, 3, 7, 18, 24, 28, 33, 45, 46, 47, 48, 52, 53, 55, 63, 64, 337, 488, 498, 682, 787, 794, 843, 857, 929, 965, 976, 1010, 1011, 1034, 1038, 1053, 1066, 1086, 1128, 1177, 1185, 1269, 1279, 1288, 1303, 1314, 1316, 1320, 1344, 1362, 1385, 1412, 1496, 1522, 1523, 1524, 1556, 1579, 1597, 1632, 1687, 1703, 1716, 1717, 1760, 1802, 1808, 1921, 1922, 1960, 1961, 1971, 1972, 1976, 1981, 2011, 2013, 2015, 2016, 2017, 2020, 2022, 2023, 2029, 2032, 2041, 2042, 2043, 2045, 2048, 2049, 2050, 2051, 2054, 2056, 2063, 2065, 2067, 2070, 2074, 2075, 2080, 2082, 2087, 2093, 2098, 2099, 2101, 2102, 2109, 2110, 2111, 2113], "f_float32": 0, "re": [0, 1, 4, 5, 7, 14, 23, 28, 32, 34, 36, 44, 47, 52, 55, 56, 57, 58, 59, 60, 62, 63, 64, 490, 955, 977, 1008, 1042, 1159, 1209, 1272, 1276, 1526, 1690, 1717, 1760, 1839, 1853, 1906, 2013, 2016, 2020, 2023, 2032, 2041, 2042, 2045, 2046, 2048, 2049, 2052, 2057, 2070, 2076, 2077, 2081, 2096, 2098, 2100, 2101, 2102, 2110, 2111, 2112], "again": [0, 18, 23, 28, 32, 37, 52, 58, 1187, 1412, 2042, 2048, 2049, 2055, 2096, 2099, 2101, 2105], "regardless": [0, 3, 23, 28, 46, 52, 55, 991, 1191, 1272, 1276, 1282, 1526, 1709, 1960, 2041, 2045, 2060, 2075, 2087, 2099, 2102], "g_float16": 0, "state": [0, 1, 2, 5, 9, 23, 28, 30, 32, 33, 35, 37, 40, 47, 51, 52, 53, 55, 62, 64, 90, 417, 682, 750, 762, 838, 985, 1039, 1040, 1044, 1049, 1053, 1063, 1079, 1080, 1166, 1175, 1185, 1225, 1272, 1275, 1288, 1345, 1386, 1393, 1407, 1477, 1478, 1496, 1497, 1526, 1542, 1544, 1574, 1590, 1706, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1717, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1833, 1875, 1902, 1967, 1993, 1994, 1995, 1998, 2004, 2005, 2012, 2020, 2026, 2041, 2042, 2045, 2047, 2048, 2053, 2057, 2060, 2063, 2065, 2067, 2074, 2090, 2096, 2098, 2099, 2100, 2101, 2102, 2107], "thread": [0, 1, 3, 23, 28, 30, 37, 50, 55, 63, 918, 919, 920, 1008, 1010, 1042, 1112, 1223, 1224, 1385, 1462, 1769, 1872, 1873, 1966, 1981, 2012, 2013, 2025, 2029, 2041, 2045, 2051, 2054, 2057, 2075, 2077, 2082, 2089, 2100, 2102, 2114, 2115], "must": [0, 3, 5, 6, 9, 12, 14, 23, 28, 30, 32, 33, 34, 35, 36, 37, 45, 48, 50, 52, 53, 55, 58, 60, 63, 64, 66, 74, 75, 90, 99, 141, 155, 156, 198, 262, 315, 317, 323, 400, 402, 404, 488, 515, 546, 547, 568, 585, 586, 587, 589, 590, 619, 688, 689, 690, 691, 692, 693, 699, 750, 759, 774, 775, 776, 795, 817, 819, 862, 863, 881, 887, 892, 893, 894, 895, 898, 903, 907, 908, 909, 912, 914, 915, 916, 922, 923, 943, 945, 947, 948, 949, 950, 951, 952, 955, 959, 962, 977, 986, 989, 997, 1010, 1020, 1021, 1022, 1024, 1050, 1051, 1053, 1096, 1098, 1099, 1100, 1102, 1105, 1106, 1108, 1129, 1130, 1131, 1136, 1138, 1139, 1140, 1141, 1142, 1149, 1150, 1159, 1160, 1162, 1165, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1175, 1176, 1177, 1186, 1191, 1196, 1197, 1198, 1213, 1214, 1226, 1235, 1236, 1239, 1249, 1269, 1272, 1283, 1288, 1295, 1298, 1327, 1328, 1329, 1330, 1336, 1338, 1339, 1342, 1343, 1345, 1359, 1363, 1365, 1366, 1367, 1372, 1373, 1380, 1412, 1418, 1421, 1422, 1426, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1468, 1469, 1472, 1473, 1474, 1479, 1480, 1486, 1519, 1526, 1529, 1530, 1532, 1533, 1558, 1564, 1579, 1605, 1614, 1616, 1624, 1643, 1657, 1658, 1659, 1677, 1684, 1716, 1725, 1727, 1747, 1757, 1758, 1764, 1766, 1771, 1786, 1806, 1808, 1815, 1816, 1819, 1820, 1823, 1831, 1832, 1853, 1862, 1865, 1873, 1889, 1900, 1904, 1905, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1928, 1939, 1943, 1944, 1948, 1953, 1955, 1959, 1962, 1964, 1967, 1973, 1974, 1976, 1977, 1979, 1981, 2013, 2014, 2015, 2016, 2020, 2023, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2045, 2046, 2047, 2048, 2049, 2051, 2053, 2057, 2060, 2061, 2063, 2065, 2067, 2068, 2070, 2074, 2075, 2076, 2077, 2080, 2081, 2082, 2085, 2087, 2096, 2097, 2098, 2102, 2103, 2110, 2111, 2112, 2115], "invok": [0, 2, 8, 15, 18, 19, 23, 28, 45, 48, 63, 64, 911, 913, 1050, 1051, 1272, 1275, 1282, 1462, 1526, 1708, 1710, 1713, 1714, 1768, 1802, 1808, 2013, 2016, 2017, 2026, 2041, 2044, 2045, 2047, 2048, 2049, 2054, 2055, 2068, 2075, 2076, 2093, 2096, 2101, 2102, 2103], "affect": [0, 1, 2, 7, 9, 18, 19, 22, 33, 36, 52, 55, 223, 224, 787, 918, 919, 920, 1011, 1013, 1112, 1159, 1160, 1188, 1196, 1272, 1388, 1389, 1496, 1526, 1542, 1579, 1703, 1717, 1723, 1724, 1769, 1866, 1869, 1870, 2041, 2042, 2045, 2051, 2052, 2055, 2058, 2059, 2082, 2107, 2109], "dataparallel": [0, 28, 1699, 1716, 1760, 2012, 2042, 2050, 2057, 2075], "parallel": [0, 13, 14, 23, 24, 28, 29, 30, 32, 47, 48, 55, 1223, 1224, 1275, 1462, 1532, 1566, 1632, 1684, 1716, 1872, 1873, 2012, 2016, 2041, 2042, 2044, 2057, 2061, 2075, 2077, 2088, 2095, 2102, 2114], "distributeddataparallel": [0, 23, 24, 28, 29, 30, 32, 48, 55, 682, 1462, 1566, 2057, 2075, 2102], "than": [0, 3, 4, 5, 6, 8, 9, 11, 14, 17, 19, 23, 24, 28, 29, 30, 35, 36, 37, 40, 44, 47, 48, 50, 52, 55, 60, 64, 66, 67, 75, 152, 256, 488, 501, 547, 619, 682, 781, 787, 896, 912, 913, 930, 935, 946, 957, 964, 965, 969, 970, 983, 1022, 1060, 1064, 1078, 1096, 1097, 1148, 1149, 1150, 1152, 1156, 1162, 1173, 1200, 1215, 1229, 1233, 1247, 1257, 1269, 1270, 1273, 1284, 1293, 1294, 1297, 1302, 1303, 1309, 1313, 1318, 1326, 1327, 1329, 1330, 1333, 1338, 1344, 1348, 1361, 1364, 1370, 1373, 1375, 1378, 1392, 1412, 1422, 1438, 1439, 1453, 1454, 1455, 1462, 1468, 1469, 1486, 1496, 1518, 1532, 1540, 1549, 1558, 1564, 1574, 1575, 1576, 1597, 1607, 1608, 1609, 1623, 1624, 1632, 1643, 1670, 1684, 1700, 1702, 1703, 1706, 1716, 1723, 1724, 1730, 1731, 1733, 1734, 1760, 1765, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1808, 1810, 1824, 1825, 1846, 1847, 1870, 1874, 1908, 1909, 1910, 1911, 1913, 1914, 1945, 1953, 1955, 1964, 2011, 2013, 2015, 2016, 2018, 2024, 2034, 2035, 2036, 2040, 2042, 2043, 2045, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2058, 2059, 2060, 2065, 2067, 2068, 2070, 2071, 2075, 2080, 2081, 2083, 2085, 2086, 2087, 2096, 2097, 2099, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2111, 2113, 2115], "one": [0, 1, 2, 3, 4, 5, 6, 7, 8, 11, 12, 14, 15, 17, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 44, 47, 50, 51, 52, 53, 55, 59, 60, 61, 63, 64, 66, 71, 74, 75, 84, 85, 86, 87, 89, 152, 223, 256, 354, 404, 488, 515, 522, 547, 562, 699, 750, 759, 795, 857, 858, 859, 883, 889, 895, 896, 898, 901, 903, 904, 908, 909, 913, 918, 919, 931, 946, 957, 959, 969, 973, 977, 989, 1006, 1011, 1013, 1018, 1020, 1024, 1051, 1075, 1086, 1097, 1108, 1112, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1138, 1139, 1140, 1141, 1142, 1144, 1147, 1151, 1154, 1155, 1156, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1187, 1226, 1235, 1236, 1251, 1254, 1257, 1276, 1282, 1286, 1292, 1293, 1303, 1304, 1314, 1317, 1318, 1320, 1325, 1329, 1331, 1333, 1336, 1342, 1343, 1359, 1367, 1371, 1376, 1412, 1418, 1430, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1445, 1453, 1456, 1457, 1458, 1469, 1473, 1474, 1479, 1488, 1489, 1490, 1492, 1493, 1494, 1495, 1505, 1506, 1507, 1517, 1526, 1530, 1532, 1566, 1574, 1579, 1597, 1607, 1610, 1611, 1612, 1627, 1628, 1629, 1634, 1670, 1684, 1702, 1706, 1716, 1717, 1731, 1732, 1733, 1737, 1747, 1758, 1768, 1769, 1770, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1804, 1806, 1807, 1808, 1828, 1829, 1846, 1862, 1870, 1874, 1877, 1918, 1927, 1943, 1948, 1949, 1951, 1959, 1960, 1962, 1963, 1964, 1967, 1976, 1982, 2001, 2012, 2013, 2016, 2020, 2022, 2026, 2029, 2032, 2033, 2034, 2035, 2042, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2051, 2052, 2054, 2055, 2057, 2059, 2060, 2061, 2065, 2067, 2068, 2069, 2070, 2071, 2075, 2077, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2105, 2108, 2110, 2111, 2113], "gpu": [0, 1, 2, 3, 4, 7, 14, 17, 19, 20, 21, 23, 24, 30, 37, 48, 50, 55, 63, 198, 211, 291, 334, 605, 762, 975, 1015, 1016, 1019, 1020, 1021, 1022, 1023, 1024, 1030, 1031, 1032, 1039, 1045, 1046, 1052, 1054, 1055, 1056, 1058, 1059, 1060, 1062, 1063, 1064, 1065, 1071, 1072, 1073, 1074, 1075, 1076, 1079, 1085, 1086, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1156, 1272, 1282, 1319, 1344, 1373, 1381, 1383, 1384, 1388, 1389, 1430, 1462, 1477, 1496, 1526, 1542, 1543, 1566, 1699, 1716, 1757, 1927, 1993, 1996, 1999, 2000, 2001, 2002, 2004, 2012, 2013, 2026, 2030, 2045, 2048, 2051, 2053, 2055, 2056, 2058, 2059, 2061, 2070, 2075, 2080, 2082, 2083, 2086, 2089, 2092, 2093, 2096, 2100, 2101, 2104, 2108, 2109, 2111, 2113, 2118], "per": [0, 11, 14, 19, 20, 23, 24, 28, 29, 30, 32, 33, 34, 36, 37, 40, 46, 48, 50, 55, 56, 61, 474, 475, 476, 682, 736, 737, 803, 810, 821, 824, 827, 835, 848, 856, 895, 908, 909, 911, 913, 931, 975, 1014, 1091, 1122, 1167, 1177, 1345, 1399, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1459, 1461, 1462, 1469, 1480, 1485, 1488, 1489, 1490, 1491, 1492, 1498, 1517, 1518, 1526, 1529, 1530, 1531, 1532, 1533, 1540, 1541, 1558, 1559, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1604, 1605, 1615, 1644, 1668, 1676, 1715, 1716, 1732, 1768, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 1816, 1828, 1874, 1876, 1928, 1945, 1949, 1967, 1976, 1983, 2029, 2034, 2036, 2044, 2045, 2047, 2049, 2051, 2054, 2070, 2071, 2073, 2076, 2080, 2085, 2098, 2099, 2102, 2111, 2113, 2115], "process": [0, 1, 3, 11, 14, 19, 20, 24, 28, 29, 30, 32, 33, 35, 37, 39, 40, 41, 47, 48, 49, 50, 51, 52, 55, 64, 81, 82, 83, 86, 88, 1010, 1046, 1052, 1078, 1160, 1185, 1199, 1200, 1272, 1283, 1345, 1383, 1392, 1438, 1439, 1459, 1461, 1468, 1469, 1485, 1492, 1517, 1518, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1566, 1570, 1572, 1574, 1575, 1604, 1605, 1615, 1644, 1668, 1676, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1805, 1811, 1840, 1876, 1890, 1967, 2011, 2012, 2013, 2020, 2029, 2032, 2033, 2034, 2042, 2044, 2045, 2047, 2048, 2051, 2054, 2055, 2057, 2059, 2061, 2063, 2065, 2068, 2069, 2070, 2073, 2075, 2077, 2080, 2082, 2090, 2093, 2095, 2098, 2099, 2102, 2109, 2111, 2114, 2115], "work": [0, 1, 2, 3, 5, 7, 8, 9, 11, 14, 15, 17, 28, 29, 30, 32, 33, 35, 36, 37, 47, 48, 50, 51, 52, 55, 56, 60, 64, 66, 74, 75, 83, 121, 152, 354, 447, 448, 449, 450, 451, 488, 589, 590, 823, 864, 865, 896, 903, 917, 975, 977, 1008, 1010, 1011, 1013, 1042, 1053, 1054, 1075, 1109, 1111, 1128, 1148, 1149, 1150, 1160, 1166, 1191, 1198, 1272, 1276, 1284, 1285, 1308, 1309, 1336, 1337, 1345, 1385, 1462, 1523, 1526, 1543, 1560, 1671, 1684, 1690, 1716, 1778, 1808, 1835, 1839, 1841, 1872, 1875, 1934, 1939, 1942, 1967, 1969, 1981, 1982, 1999, 2001, 2011, 2012, 2013, 2016, 2017, 2020, 2030, 2032, 2033, 2034, 2035, 2038, 2040, 2042, 2044, 2045, 2046, 2047, 2048, 2049, 2051, 2052, 2053, 2055, 2057, 2060, 2061, 2065, 2068, 2070, 2073, 2074, 2075, 2076, 2086, 2089, 2093, 2097, 2098, 2099, 2100, 2103, 2104, 2107, 2108, 2111, 2112, 2113], "hpu": [0, 2082], "option": [0, 1, 2, 3, 5, 14, 19, 23, 28, 30, 32, 34, 35, 37, 38, 41, 45, 46, 47, 50, 51, 52, 55, 64, 66, 73, 83, 85, 90, 152, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 269, 299, 327, 333, 395, 447, 448, 449, 450, 451, 489, 501, 502, 515, 522, 527, 539, 562, 582, 583, 585, 586, 587, 589, 590, 682, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 696, 697, 698, 699, 700, 701, 737, 760, 781, 782, 787, 794, 796, 801, 820, 857, 862, 865, 868, 879, 881, 882, 883, 884, 885, 886, 887, 888, 895, 896, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 926, 927, 928, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 955, 959, 962, 964, 965, 966, 967, 968, 970, 972, 973, 974, 975, 981, 984, 991, 992, 994, 995, 996, 997, 1001, 1006, 1007, 1008, 1010, 1011, 1013, 1015, 1019, 1020, 1022, 1023, 1024, 1027, 1028, 1035, 1036, 1039, 1042, 1050, 1051, 1052, 1053, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1078, 1079, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1092, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1103, 1105, 1107, 1108, 1109, 1110, 1111, 1113, 1118, 1121, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1151, 1152, 1153, 1154, 1155, 1156, 1158, 1160, 1162, 1163, 1164, 1165, 1166, 1169, 1170, 1171, 1185, 1187, 1213, 1214, 1215, 1216, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1237, 1238, 1239, 1247, 1249, 1261, 1263, 1266, 1267, 1269, 1270, 1272, 1273, 1276, 1279, 1286, 1288, 1289, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1362, 1363, 1364, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1385, 1386, 1388, 1389, 1393, 1395, 1401, 1402, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1422, 1423, 1424, 1426, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1444, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1472, 1473, 1474, 1475, 1479, 1482, 1483, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1512, 1517, 1518, 1522, 1523, 1524, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1535, 1536, 1537, 1540, 1541, 1545, 1546, 1547, 1554, 1558, 1559, 1566, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1578, 1579, 1580, 1581, 1597, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1623, 1624, 1627, 1628, 1629, 1632, 1634, 1643, 1644, 1651, 1657, 1658, 1659, 1668, 1669, 1671, 1676, 1684, 1690, 1691, 1703, 1715, 1716, 1717, 1725, 1726, 1728, 1730, 1731, 1732, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1763, 1765, 1766, 1768, 1770, 1771, 1772, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1801, 1814, 1815, 1816, 1819, 1823, 1824, 1826, 1827, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1845, 1846, 1847, 1848, 1855, 1857, 1862, 1874, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1894, 1895, 1899, 1900, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1926, 1927, 1928, 1939, 1940, 1941, 1942, 1943, 1946, 1948, 1951, 1952, 1953, 1954, 1955, 1957, 1960, 1961, 1964, 1970, 1971, 1972, 1973, 1978, 1979, 1981, 1982, 1985, 1990, 1991, 1993, 2004, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2016, 2018, 2020, 2022, 2026, 2027, 2029, 2035, 2040, 2045, 2048, 2049, 2050, 2051, 2054, 2055, 2059, 2060, 2063, 2065, 2068, 2069, 2070, 2075, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2091, 2092, 2093, 2102, 2108, 2110, 2111, 2113], "whether": [0, 1, 2, 5, 7, 14, 19, 20, 23, 24, 28, 29, 30, 33, 34, 35, 37, 46, 47, 55, 62, 64, 89, 321, 323, 473, 519, 619, 682, 695, 697, 698, 701, 801, 826, 877, 878, 883, 892, 903, 904, 907, 908, 909, 911, 913, 918, 919, 920, 922, 923, 929, 944, 953, 966, 967, 968, 974, 976, 978, 981, 982, 983, 1049, 1065, 1160, 1165, 1177, 1180, 1183, 1230, 1231, 1269, 1272, 1280, 1281, 1289, 1294, 1302, 1303, 1309, 1311, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1327, 1329, 1330, 1333, 1334, 1335, 1336, 1344, 1360, 1362, 1370, 1372, 1373, 1375, 1378, 1407, 1412, 1417, 1418, 1419, 1420, 1445, 1485, 1491, 1526, 1535, 1540, 1576, 1594, 1595, 1596, 1616, 1644, 1676, 1709, 1716, 1730, 1733, 1736, 1749, 1763, 1766, 1771, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1802, 1814, 1824, 1827, 1871, 1921, 1922, 1923, 1926, 1927, 1946, 1951, 1960, 1961, 1964, 1965, 1971, 1972, 1976, 1998, 2011, 2016, 2022, 2023, 2027, 2035, 2041, 2045, 2048, 2051, 2053, 2063, 2065, 2070, 2075, 2080, 2082, 2084, 2085, 2091, 2099, 2100, 2102, 2103, 2109, 2111, 2115], "torch_dtyp": 0, "It": [0, 1, 2, 3, 4, 5, 7, 8, 12, 17, 18, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 44, 45, 46, 47, 48, 50, 52, 53, 55, 57, 59, 60, 61, 63, 64, 66, 86, 152, 198, 417, 475, 476, 515, 517, 519, 546, 559, 892, 893, 894, 895, 896, 903, 904, 908, 909, 919, 920, 929, 938, 975, 981, 983, 985, 989, 990, 991, 1024, 1029, 1035, 1036, 1054, 1055, 1075, 1076, 1084, 1131, 1161, 1166, 1169, 1177, 1181, 1187, 1189, 1196, 1200, 1213, 1234, 1269, 1272, 1273, 1278, 1279, 1283, 1305, 1309, 1313, 1316, 1318, 1320, 1321, 1326, 1330, 1331, 1333, 1334, 1336, 1338, 1341, 1342, 1344, 1345, 1353, 1403, 1430, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1462, 1472, 1520, 1521, 1522, 1523, 1524, 1526, 1533, 1555, 1558, 1578, 1580, 1634, 1644, 1690, 1708, 1709, 1710, 1713, 1714, 1716, 1731, 1733, 1736, 1757, 1760, 1769, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1814, 1833, 1846, 1862, 1877, 1884, 1906, 1959, 1976, 1986, 1990, 1991, 1999, 2000, 2001, 2002, 2008, 2011, 2015, 2016, 2020, 2028, 2029, 2032, 2034, 2042, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2061, 2062, 2063, 2065, 2067, 2068, 2070, 2075, 2076, 2077, 2079, 2080, 2081, 2082, 2084, 2089, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2110, 2111, 2113, 2115], "given": [0, 1, 2, 3, 7, 9, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 45, 47, 50, 52, 55, 61, 63, 64, 152, 221, 315, 317, 319, 323, 328, 379, 400, 404, 474, 475, 476, 477, 478, 480, 515, 517, 519, 586, 587, 606, 609, 682, 695, 697, 698, 700, 701, 737, 762, 781, 787, 794, 795, 796, 797, 801, 802, 804, 822, 826, 839, 842, 857, 861, 868, 879, 892, 894, 896, 901, 903, 904, 906, 908, 909, 911, 913, 914, 915, 916, 923, 929, 931, 941, 942, 945, 946, 949, 957, 961, 962, 967, 968, 969, 974, 975, 977, 991, 993, 996, 997, 999, 1001, 1005, 1007, 1010, 1011, 1013, 1014, 1015, 1019, 1027, 1028, 1031, 1035, 1036, 1052, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1068, 1071, 1072, 1073, 1074, 1083, 1084, 1085, 1086, 1093, 1095, 1100, 1108, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1148, 1165, 1175, 1176, 1183, 1187, 1189, 1190, 1191, 1192, 1198, 1221, 1239, 1269, 1272, 1280, 1288, 1289, 1294, 1298, 1308, 1310, 1311, 1320, 1322, 1328, 1336, 1345, 1352, 1354, 1355, 1356, 1357, 1360, 1370, 1372, 1374, 1375, 1378, 1399, 1401, 1402, 1409, 1420, 1430, 1438, 1439, 1445, 1453, 1454, 1455, 1457, 1458, 1459, 1461, 1462, 1468, 1469, 1473, 1474, 1477, 1485, 1496, 1518, 1522, 1523, 1524, 1526, 1528, 1530, 1531, 1533, 1535, 1537, 1542, 1555, 1561, 1575, 1576, 1579, 1580, 1581, 1597, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1623, 1624, 1627, 1628, 1632, 1643, 1668, 1684, 1699, 1700, 1703, 1704, 1705, 1709, 1710, 1731, 1732, 1757, 1764, 1765, 1768, 1771, 1772, 1778, 1804, 1807, 1809, 1811, 1814, 1819, 1824, 1827, 1828, 1829, 1842, 1848, 1853, 1864, 1865, 1870, 1895, 1899, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1918, 1920, 1923, 1926, 1928, 1938, 1939, 1944, 1946, 1948, 1958, 1960, 1962, 1964, 1968, 1981, 1982, 1983, 1985, 1988, 1990, 1991, 2007, 2008, 2011, 2012, 2013, 2016, 2020, 2022, 2023, 2027, 2032, 2035, 2036, 2040, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2054, 2055, 2059, 2060, 2063, 2065, 2067, 2069, 2070, 2075, 2076, 2079, 2080, 2081, 2082, 2085, 2087, 2091, 2095, 2099, 2101, 2102, 2110, 2111, 2112], "get_autocast_dtyp": [0, 2014, 2066], "weight": [0, 23, 28, 30, 33, 34, 35, 52, 53, 61, 64, 66, 158, 303, 363, 364, 498, 714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 735, 736, 737, 740, 741, 742, 743, 744, 745, 747, 748, 751, 753, 754, 755, 756, 758, 762, 763, 765, 766, 767, 774, 775, 776, 783, 794, 795, 796, 810, 811, 835, 837, 844, 848, 852, 853, 854, 855, 856, 861, 862, 864, 865, 946, 997, 1165, 1167, 1175, 1177, 1234, 1235, 1272, 1276, 1282, 1284, 1289, 1298, 1412, 1438, 1439, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1468, 1469, 1477, 1478, 1480, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1526, 1530, 1531, 1532, 1533, 1534, 1541, 1542, 1544, 1556, 1570, 1602, 1603, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1623, 1624, 1633, 1642, 1646, 1649, 1665, 1667, 1668, 1677, 1681, 1684, 1687, 1706, 1715, 1716, 1723, 1724, 1725, 1726, 1728, 1730, 1731, 1732, 1734, 1736, 1747, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1764, 1765, 1766, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1830, 1964, 1976, 2011, 2012, 2014, 2015, 2016, 2020, 2026, 2027, 2034, 2035, 2040, 2048, 2050, 2051, 2055, 2060, 2063, 2070, 2071, 2072, 2073, 2080, 2085, 2090, 2091, 2102, 2106], "cach": [0, 2, 3, 20, 35, 64, 488, 975, 985, 1032, 1033, 1050, 1053, 1058, 1060, 1062, 1064, 1073, 1078, 1188, 1189, 1381, 1383, 1384, 1717, 1736, 1768, 1989, 2032, 2050, 2096, 2099, 2100, 2102, 2105, 2109, 2113], "insid": [0, 1, 7, 23, 47, 53, 55, 60, 63, 64, 908, 909, 977, 1016, 1053, 1167, 1171, 1176, 1187, 1284, 1716, 2013, 2015, 2016, 2020, 2026, 2028, 2041, 2045, 2048, 2049, 2054, 2065, 2080, 2090, 2099, 2101, 2111], "custom_fwd": [0, 2041], "fwd": [0, 2105, 2109], "cast_input": [0, 2041], "helper": [0, 3, 28, 33, 38, 59, 64, 2011, 2012, 2015, 2045, 2047, 2065, 2068, 2075, 2110, 2112], "subclass": [0, 1, 14, 23, 30, 33, 35, 40, 60, 64, 141, 539, 892, 893, 894, 908, 909, 1192, 1270, 1273, 1283, 1526, 1717, 1738, 1743, 1764, 2013, 2016, 2017, 2023, 2038, 2041, 2055, 2065, 2075, 2079, 2080, 2099, 2100, 2102, 2112], "page": [0, 6, 7, 9, 23, 29, 48, 51, 977, 1345, 1570, 1572, 1574, 2012, 2020, 2045, 2047, 2048, 2055, 2066, 2075, 2108], "incom": [0, 28, 50, 783, 821, 822, 823, 824, 827, 1443, 1513, 1603, 1649, 2032, 2042], "non": [0, 1, 2, 3, 5, 14, 20, 22, 24, 28, 29, 30, 33, 35, 37, 40, 47, 50, 51, 53, 55, 63, 66, 77, 80, 86, 90, 152, 337, 488, 490, 506, 515, 519, 547, 700, 736, 741, 742, 743, 744, 745, 747, 748, 758, 762, 766, 767, 774, 775, 776, 840, 868, 880, 892, 894, 896, 903, 905, 907, 908, 909, 922, 923, 938, 946, 959, 962, 975, 990, 991, 996, 1064, 1154, 1155, 1162, 1166, 1171, 1177, 1191, 1194, 1201, 1249, 1272, 1275, 1285, 1288, 1289, 1308, 1309, 1316, 1320, 1321, 1334, 1336, 1345, 1355, 1362, 1367, 1372, 1412, 1417, 1418, 1422, 1435, 1436, 1437, 1454, 1455, 1457, 1458, 1461, 1471, 1477, 1494, 1495, 1496, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1529, 1531, 1533, 1542, 1544, 1558, 1615, 1634, 1663, 1668, 1684, 1716, 1770, 1797, 1819, 1862, 1863, 1877, 1882, 1889, 1904, 1905, 1909, 1910, 1911, 1912, 1913, 1914, 1944, 1960, 1962, 1976, 2011, 2012, 2013, 2015, 2016, 2018, 2020, 2032, 2035, 2040, 2043, 2048, 2049, 2050, 2051, 2055, 2059, 2065, 2067, 2069, 2074, 2075, 2077, 2080, 2081, 2083, 2084, 2085, 2087, 2093, 2096, 2098, 2099, 2101, 2102, 2103, 2109, 2110, 2112, 2115], "intern": [0, 3, 8, 9, 14, 19, 23, 24, 28, 30, 35, 37, 44, 52, 55, 60, 64, 903, 1008, 1042, 1063, 1166, 1185, 1272, 1309, 1311, 1327, 1330, 1353, 1440, 1441, 1442, 1479, 1566, 1607, 1608, 1609, 1632, 1733, 1777, 1870, 1912, 1964, 2017, 2041, 2042, 2044, 2045, 2051, 2052, 2058, 2059, 2063, 2076, 2077, 2082, 2084, 2092, 2099, 2102, 2111], "current": [0, 1, 2, 3, 5, 6, 7, 9, 11, 12, 14, 23, 28, 30, 32, 33, 34, 35, 40, 47, 48, 52, 53, 55, 56, 63, 64, 82, 90, 152, 211, 223, 499, 500, 501, 743, 745, 783, 795, 797, 819, 862, 864, 865, 868, 882, 883, 893, 896, 898, 899, 900, 908, 911, 912, 913, 922, 944, 953, 989, 1000, 1001, 1003, 1004, 1008, 1010, 1011, 1013, 1018, 1019, 1022, 1023, 1025, 1026, 1027, 1028, 1031, 1032, 1033, 1035, 1036, 1039, 1041, 1042, 1045, 1047, 1048, 1052, 1053, 1054, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1075, 1077, 1079, 1081, 1084, 1085, 1086, 1109, 1111, 1121, 1126, 1144, 1160, 1161, 1163, 1181, 1185, 1187, 1197, 1219, 1220, 1221, 1222, 1230, 1231, 1255, 1256, 1272, 1276, 1288, 1292, 1343, 1345, 1359, 1374, 1381, 1384, 1385, 1386, 1393, 1400, 1401, 1402, 1408, 1465, 1468, 1472, 1526, 1566, 1574, 1578, 1626, 1632, 1643, 1684, 1702, 1703, 1705, 1716, 1737, 1741, 1742, 1743, 1744, 1745, 1750, 1751, 1752, 1753, 1775, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1826, 1835, 1837, 1839, 1841, 1842, 1866, 1870, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1902, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1942, 1953, 1955, 1960, 1966, 1967, 1981, 1982, 1984, 1985, 1988, 1989, 1990, 1991, 1993, 1996, 1997, 1999, 2001, 2003, 2004, 2006, 2008, 2009, 2012, 2013, 2015, 2016, 2017, 2018, 2020, 2023, 2028, 2029, 2032, 2035, 2041, 2042, 2045, 2051, 2053, 2054, 2055, 2056, 2057, 2060, 2061, 2065, 2067, 2068, 2070, 2073, 2075, 2076, 2080, 2082, 2083, 2085, 2086, 2090, 2099, 2101, 2102, 2107, 2108, 2109, 2111, 2112, 2113], "outsid": [0, 5, 9, 23, 40, 52, 53, 55, 60, 686, 797, 888, 1165, 1167, 1171, 1176, 1273, 1632, 1716, 1799, 1800, 1805, 1806, 1812, 2015, 2016, 2040, 2042, 2045, 2049, 2050, 2087, 2096, 2102, 2107], "ha": [0, 1, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 19, 21, 23, 24, 28, 29, 30, 32, 34, 35, 36, 37, 40, 44, 46, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 61, 63, 64, 152, 198, 211, 257, 262, 337, 447, 448, 449, 450, 451, 475, 476, 488, 490, 498, 500, 546, 582, 583, 605, 606, 620, 682, 691, 695, 697, 698, 699, 701, 737, 762, 781, 783, 787, 789, 793, 799, 840, 842, 850, 861, 867, 877, 878, 880, 892, 895, 896, 901, 903, 904, 908, 909, 913, 922, 923, 931, 942, 945, 963, 966, 967, 968, 975, 980, 989, 990, 991, 992, 1007, 1010, 1011, 1013, 1018, 1046, 1049, 1050, 1096, 1098, 1112, 1144, 1156, 1165, 1166, 1169, 1170, 1173, 1177, 1209, 1226, 1235, 1236, 1247, 1269, 1272, 1276, 1280, 1283, 1284, 1285, 1288, 1293, 1294, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1324, 1326, 1327, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1339, 1340, 1344, 1353, 1360, 1362, 1367, 1370, 1372, 1373, 1374, 1375, 1377, 1378, 1385, 1407, 1417, 1418, 1419, 1420, 1438, 1439, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1462, 1463, 1469, 1472, 1473, 1474, 1477, 1480, 1485, 1488, 1489, 1490, 1496, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1530, 1531, 1532, 1533, 1534, 1541, 1542, 1558, 1560, 1566, 1574, 1578, 1579, 1597, 1607, 1608, 1609, 1615, 1624, 1627, 1628, 1632, 1643, 1668, 1670, 1671, 1673, 1677, 1684, 1690, 1703, 1705, 1706, 1709, 1710, 1715, 1716, 1730, 1733, 1735, 1738, 1742, 1744, 1747, 1757, 1758, 1765, 1766, 1770, 1772, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1800, 1801, 1802, 1804, 1808, 1810, 1814, 1824, 1826, 1827, 1828, 1829, 1846, 1848, 1858, 1870, 1891, 1905, 1906, 1907, 1912, 1918, 1920, 1921, 1922, 1923, 1926, 1927, 1939, 1945, 1951, 1962, 1971, 1972, 1976, 1981, 1982, 1998, 2013, 2015, 2016, 2020, 2022, 2023, 2029, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2053, 2054, 2055, 2057, 2058, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2074, 2075, 2076, 2077, 2080, 2082, 2083, 2085, 2086, 2087, 2089, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2107, 2111], "effect": [0, 3, 5, 7, 14, 23, 24, 28, 33, 35, 50, 52, 53, 55, 60, 64, 66, 69, 198, 211, 605, 737, 781, 787, 1042, 1091, 1177, 1272, 1315, 1316, 1317, 1318, 1374, 1430, 1445, 1456, 1457, 1458, 1463, 1464, 1465, 1466, 1470, 1526, 1532, 1579, 1643, 1671, 1703, 1709, 1716, 1717, 1743, 1794, 1802, 1902, 1907, 1927, 1949, 1965, 1976, 2011, 2013, 2020, 2026, 2040, 2041, 2042, 2045, 2048, 2051, 2070, 2073, 2082, 2083, 2085, 2096, 2099], "custom_bwd": [0, 2041], "bwd": [0, 2109], "small": [0, 3, 7, 9, 23, 24, 28, 35, 47, 868, 922, 923, 975, 1021, 1064, 1335, 1336, 1348, 1350, 1430, 1460, 1535, 1540, 1555, 1575, 1579, 1614, 1669, 1676, 1716, 1799, 1805, 1814, 1927, 1928, 2013, 2015, 2016, 2024, 2045, 2048, 2050, 2055, 2058, 2060, 2065, 2068, 2070, 2071, 2080, 2081, 2087, 2096, 2098, 2099, 2101, 2102, 2103, 2105, 2109, 2111, 2113], "magnitud": [0, 992, 1732, 1768, 1877, 2040, 2041, 2045], "represent": [0, 3, 14, 23, 28, 30, 33, 45, 52, 53, 64, 83, 483, 760, 803, 828, 1011, 1124, 1125, 1127, 1151, 1216, 1272, 1312, 1315, 1317, 1320, 1416, 1526, 1532, 1814, 2012, 2013, 2016, 2024, 2035, 2048, 2058, 2063, 2065, 2068, 2069, 2070, 2080, 2087, 2098, 2102, 2111, 2116], "These": [0, 1, 2, 3, 8, 14, 15, 23, 28, 29, 35, 52, 56, 58, 61, 64, 65, 488, 796, 883, 997, 1108, 1173, 1269, 1272, 1315, 1380, 1430, 1526, 1706, 1731, 2012, 2013, 2014, 2015, 2016, 2024, 2029, 2033, 2034, 2036, 2041, 2042, 2045, 2047, 2048, 2055, 2058, 2060, 2065, 2068, 2073, 2075, 2076, 2083, 2089, 2093, 2096, 2098, 2099, 2100, 2102, 2107, 2111, 2112, 2114], "flush": [0, 1, 19, 30, 52, 1064, 1283, 1858, 1871, 2058, 2085], "zero": [0, 1, 2, 24, 28, 29, 32, 33, 35, 50, 53, 55, 60, 64, 66, 73, 75, 76, 77, 152, 260, 262, 317, 488, 515, 517, 546, 547, 586, 587, 589, 590, 625, 681, 693, 700, 714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 746, 751, 752, 753, 754, 755, 756, 757, 758, 761, 762, 766, 770, 771, 774, 775, 776, 777, 779, 782, 783, 797, 801, 821, 822, 823, 824, 827, 832, 880, 889, 890, 891, 896, 903, 905, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 945, 946, 966, 967, 968, 973, 992, 996, 1053, 1064, 1069, 1070, 1099, 1103, 1121, 1122, 1123, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1147, 1156, 1162, 1165, 1173, 1187, 1232, 1233, 1236, 1257, 1269, 1272, 1284, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1340, 1345, 1353, 1354, 1355, 1356, 1357, 1362, 1363, 1412, 1416, 1420, 1434, 1435, 1436, 1437, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1460, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1477, 1478, 1480, 1493, 1494, 1495, 1496, 1497, 1498, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1532, 1535, 1538, 1539, 1540, 1541, 1542, 1544, 1564, 1578, 1579, 1582, 1583, 1584, 1599, 1600, 1601, 1610, 1611, 1612, 1614, 1616, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1632, 1653, 1654, 1655, 1669, 1670, 1671, 1684, 1715, 1741, 1742, 1743, 1744, 1759, 1766, 1770, 1774, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1814, 1826, 1828, 1829, 1830, 1839, 1846, 1855, 1865, 1882, 1891, 1895, 1905, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1927, 1942, 1943, 1945, 1951, 2010, 2013, 2014, 2016, 2018, 2029, 2032, 2033, 2034, 2035, 2040, 2042, 2045, 2048, 2055, 2058, 2061, 2065, 2066, 2068, 2069, 2070, 2073, 2075, 2080, 2081, 2083, 2085, 2086, 2087, 2089, 2099, 2101, 2104], "underflow": [0, 1491, 2041], "updat": [0, 9, 11, 23, 24, 30, 32, 37, 47, 52, 55, 58, 59, 64, 88, 490, 515, 517, 762, 801, 802, 826, 899, 900, 929, 942, 1165, 1174, 1272, 1309, 1440, 1441, 1442, 1462, 1468, 1469, 1477, 1488, 1489, 1490, 1526, 1527, 1536, 1566, 1623, 1624, 1723, 1724, 1731, 1736, 1766, 1780, 1786, 1791, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 2011, 2013, 2026, 2041, 2042, 2045, 2047, 2048, 2055, 2057, 2060, 2061, 2063, 2065, 2066, 2067, 2070, 2075, 2076, 2077, 2085, 2096, 2110], "lost": [0, 37, 48, 51, 1462, 1522, 1523, 1524, 2102], "To": [0, 1, 2, 3, 4, 5, 6, 9, 14, 15, 19, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 39, 45, 46, 48, 50, 52, 53, 55, 60, 64, 81, 84, 85, 88, 337, 501, 559, 736, 898, 917, 931, 941, 972, 975, 1054, 1075, 1096, 1098, 1108, 1136, 1138, 1142, 1153, 1272, 1273, 1275, 1276, 1283, 1284, 1318, 1340, 1345, 1373, 1430, 1439, 1465, 1491, 1522, 1523, 1524, 1526, 1580, 1581, 1605, 1684, 1716, 1732, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1768, 1808, 1816, 1827, 1866, 1870, 1873, 1912, 1928, 1999, 2001, 2011, 2013, 2015, 2016, 2020, 2032, 2033, 2034, 2036, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2055, 2056, 2057, 2060, 2065, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2080, 2083, 2084, 2085, 2086, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2109, 2110, 2111, 2113], "prevent": [0, 7, 8, 23, 24, 28, 29, 30, 37, 55, 64, 488, 501, 737, 903, 906, 908, 977, 1010, 1089, 1090, 1109, 1110, 1111, 1269, 1372, 1385, 1417, 1420, 1463, 1532, 1558, 1651, 1690, 1691, 1706, 1716, 1781, 1824, 1903, 1906, 1926, 1953, 1955, 1964, 1981, 2020, 2032, 2041, 2042, 2045, 2047, 2050, 2051, 2055, 2059, 2068, 2069, 2077, 2080, 2081, 2096, 2101, 2102, 2115], "multipli": [0, 28, 315, 323, 425, 515, 687, 688, 689, 690, 691, 692, 693, 762, 781, 787, 788, 789, 943, 955, 1078, 1091, 1108, 1226, 1237, 1296, 1308, 1309, 1313, 1326, 1328, 1330, 1336, 1338, 1367, 1377, 1392, 1411, 1414, 1438, 1453, 1454, 1455, 1458, 1477, 1496, 1579, 1580, 1581, 1643, 1703, 1704, 1705, 1736, 1799, 1805, 1807, 1814, 1870, 1897, 1900, 1904, 1905, 1919, 1923, 1924, 1927, 1949, 2014, 2033, 2045, 2052, 2055, 2058, 2066, 2080, 2081, 2102], "factor": [0, 3, 24, 35, 64, 691, 692, 693, 801, 821, 943, 966, 1315, 1316, 1317, 1320, 1322, 1362, 1363, 1364, 1463, 1486, 1514, 1538, 1539, 1641, 1674, 1675, 1684, 1794, 1795, 1796, 1798, 1799, 1801, 1803, 1804, 1805, 1806, 1807, 1810, 1811, 1812, 1826, 1883, 1905, 1927, 2040, 2041, 2070, 2080, 2102], "flow": [0, 33, 53, 66, 72, 75, 972, 989, 1285, 1288, 1597, 1632, 1716, 2012, 2013, 2040, 2042, 2045, 2048, 2060, 2062, 2065, 2077, 2096, 2097, 2098, 2099, 2102, 2109], "through": [0, 5, 7, 9, 11, 15, 19, 23, 28, 30, 33, 35, 40, 52, 53, 55, 60, 63, 64, 66, 69, 71, 73, 81, 498, 794, 842, 857, 864, 865, 903, 906, 908, 909, 922, 923, 977, 1015, 1053, 1135, 1169, 1177, 1185, 1270, 1273, 1275, 1288, 1289, 1291, 1308, 1309, 1330, 1336, 1353, 1533, 1571, 1572, 1573, 1574, 1634, 1706, 1716, 1723, 1724, 1736, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1826, 1976, 2011, 2012, 2015, 2016, 2020, 2022, 2029, 2032, 2033, 2034, 2042, 2045, 2048, 2050, 2052, 2054, 2055, 2061, 2062, 2063, 2065, 2068, 2070, 2073, 2075, 2076, 2077, 2080, 2084, 2089, 2091, 2092, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2110, 2111, 2116], "word": [0, 1, 8, 28, 47, 48, 53, 58, 63, 64, 959, 1186, 1430, 1453, 1454, 1455, 1468, 1469, 1570, 1623, 1624, 1643, 1703, 1716, 1730, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2015, 2042, 2050, 2051, 2068, 2076, 2099], "have": [0, 1, 3, 5, 6, 7, 8, 9, 11, 12, 14, 17, 23, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 64, 66, 74, 75, 86, 155, 156, 223, 224, 315, 317, 323, 337, 400, 404, 450, 460, 473, 488, 489, 490, 505, 515, 517, 519, 522, 546, 619, 682, 691, 695, 697, 698, 699, 701, 817, 819, 844, 883, 892, 896, 908, 909, 911, 913, 918, 927, 928, 931, 944, 945, 953, 962, 963, 975, 978, 989, 997, 1013, 1022, 1023, 1053, 1064, 1099, 1107, 1114, 1128, 1138, 1143, 1145, 1159, 1165, 1166, 1177, 1187, 1195, 1196, 1197, 1213, 1214, 1230, 1231, 1234, 1235, 1247, 1269, 1271, 1272, 1275, 1276, 1278, 1280, 1282, 1284, 1285, 1286, 1287, 1288, 1289, 1294, 1295, 1304, 1308, 1309, 1325, 1332, 1333, 1336, 1338, 1344, 1353, 1355, 1360, 1367, 1370, 1372, 1373, 1374, 1375, 1377, 1378, 1412, 1417, 1418, 1420, 1422, 1430, 1434, 1438, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1462, 1468, 1469, 1473, 1474, 1479, 1484, 1488, 1489, 1490, 1518, 1526, 1529, 1530, 1531, 1532, 1533, 1574, 1576, 1614, 1623, 1624, 1627, 1628, 1632, 1649, 1668, 1670, 1673, 1708, 1709, 1710, 1713, 1714, 1716, 1717, 1721, 1722, 1723, 1724, 1725, 1727, 1730, 1733, 1736, 1760, 1764, 1769, 1771, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1810, 1824, 1850, 1851, 1865, 1870, 1877, 1900, 1904, 1908, 1909, 1910, 1911, 1913, 1914, 1921, 1922, 1923, 1926, 1927, 1939, 1943, 1949, 1951, 1964, 1967, 1969, 1971, 1972, 1974, 1976, 1978, 1982, 2011, 2013, 2014, 2015, 2016, 2018, 2020, 2022, 2023, 2024, 2026, 2029, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2070, 2071, 2074, 2075, 2076, 2077, 2080, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2108, 2109, 2110, 2111, 2112, 2113], "larger": [0, 9, 28, 35, 64, 256, 501, 923, 946, 1064, 1166, 1327, 1330, 1430, 1462, 1468, 1469, 1518, 1576, 1579, 1623, 1624, 2042, 2045, 2050, 2052, 2054, 2058, 2060, 2080, 2085, 2105, 2108, 2109, 2110, 2115], "thei": [0, 1, 3, 5, 7, 9, 11, 12, 17, 18, 23, 28, 30, 34, 35, 36, 47, 52, 53, 55, 58, 59, 63, 64, 86, 323, 337, 338, 488, 688, 691, 692, 699, 802, 817, 818, 819, 826, 844, 857, 862, 893, 903, 906, 908, 918, 919, 923, 943, 975, 1053, 1068, 1112, 1159, 1165, 1175, 1181, 1193, 1198, 1200, 1261, 1262, 1269, 1272, 1280, 1285, 1294, 1308, 1309, 1315, 1336, 1344, 1366, 1370, 1373, 1375, 1378, 1412, 1435, 1436, 1437, 1456, 1457, 1458, 1472, 1511, 1519, 1520, 1521, 1526, 1532, 1543, 1555, 1578, 1632, 1634, 1706, 1716, 1717, 1721, 1730, 1734, 1757, 1760, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1804, 1807, 1866, 1923, 1927, 1964, 1965, 1970, 2011, 2013, 2015, 2016, 2018, 2020, 2022, 2023, 2026, 2029, 2032, 2033, 2034, 2035, 2036, 2040, 2041, 2042, 2045, 2047, 2048, 2049, 2051, 2053, 2055, 2057, 2058, 2060, 2061, 2063, 2065, 2067, 2068, 2070, 2075, 2080, 2083, 2085, 2086, 2087, 2089, 2090, 2096, 2098, 2099, 2101, 2102, 2103, 2104, 2107, 2111, 2112], "don": [0, 1, 4, 7, 9, 11, 28, 30, 46, 55, 56, 58, 60, 64, 66, 76, 77, 787, 797, 896, 898, 917, 977, 1082, 1165, 1186, 1187, 1193, 1195, 1197, 1201, 1288, 1366, 1488, 1489, 1490, 1579, 1703, 1706, 1716, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1869, 2011, 2013, 2017, 2032, 2033, 2034, 2042, 2045, 2048, 2050, 2055, 2057, 2061, 2065, 2067, 2068, 2070, 2075, 2076, 2080, 2096, 2098, 2100, 2101, 2102, 2103, 2109, 2112], "t": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 19, 23, 24, 28, 30, 35, 36, 37, 40, 44, 45, 46, 47, 52, 53, 55, 56, 58, 60, 61, 63, 64, 66, 76, 77, 83, 86, 152, 315, 317, 323, 460, 488, 525, 539, 573, 689, 690, 699, 762, 783, 787, 797, 825, 828, 880, 881, 882, 883, 892, 894, 896, 898, 903, 904, 906, 908, 909, 912, 917, 929, 966, 967, 968, 973, 977, 989, 997, 1008, 1011, 1032, 1050, 1051, 1082, 1106, 1124, 1129, 1130, 1131, 1132, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1147, 1159, 1160, 1161, 1162, 1165, 1166, 1167, 1177, 1186, 1187, 1189, 1193, 1195, 1197, 1200, 1201, 1211, 1213, 1226, 1236, 1269, 1270, 1272, 1273, 1275, 1279, 1280, 1284, 1286, 1288, 1291, 1293, 1302, 1303, 1309, 1311, 1312, 1315, 1322, 1330, 1331, 1336, 1344, 1345, 1353, 1366, 1379, 1380, 1395, 1409, 1419, 1439, 1443, 1445, 1453, 1454, 1455, 1462, 1468, 1469, 1477, 1486, 1488, 1489, 1490, 1491, 1496, 1513, 1526, 1542, 1558, 1560, 1570, 1578, 1579, 1603, 1607, 1608, 1609, 1616, 1623, 1624, 1644, 1649, 1669, 1690, 1703, 1706, 1709, 1710, 1716, 1717, 1730, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1758, 1760, 1761, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1800, 1816, 1822, 1826, 1843, 1866, 1868, 1869, 1877, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1900, 1904, 1923, 1927, 1933, 1939, 1942, 1948, 1951, 1976, 1977, 1989, 2011, 2013, 2014, 2015, 2017, 2020, 2028, 2029, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2051, 2052, 2054, 2055, 2057, 2060, 2061, 2065, 2066, 2067, 2068, 2070, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2096, 2098, 2100, 2101, 2103, 2106, 2109, 2111, 2112], "grad": [0, 1, 5, 28, 35, 56, 59, 60, 64, 152, 337, 460, 489, 490, 497, 498, 505, 506, 583, 682, 883, 892, 896, 898, 899, 900, 903, 907, 908, 909, 911, 913, 918, 919, 922, 923, 927, 928, 1053, 1112, 1165, 1166, 1168, 1172, 1176, 1177, 1255, 1272, 1345, 1526, 1716, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1904, 1964, 1976, 2012, 2013, 2014, 2020, 2033, 2034, 2035, 2041, 2047, 2048, 2049, 2052, 2055, 2057, 2066, 2075, 2076, 2080, 2086, 2107, 2109], "unscal": 0, "doe": [0, 1, 3, 4, 5, 7, 8, 9, 14, 17, 19, 27, 28, 32, 35, 37, 40, 44, 46, 47, 50, 52, 53, 55, 58, 59, 60, 63, 64, 66, 256, 260, 337, 437, 460, 585, 619, 682, 697, 698, 750, 759, 762, 792, 796, 864, 883, 919, 920, 942, 955, 977, 1044, 1048, 1099, 1108, 1112, 1162, 1165, 1177, 1183, 1187, 1247, 1271, 1273, 1276, 1283, 1288, 1298, 1302, 1303, 1313, 1315, 1316, 1320, 1321, 1328, 1333, 1334, 1336, 1342, 1345, 1362, 1366, 1367, 1373, 1377, 1381, 1414, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1465, 1468, 1469, 1472, 1477, 1478, 1488, 1489, 1490, 1496, 1497, 1499, 1500, 1501, 1508, 1509, 1510, 1520, 1521, 1527, 1533, 1536, 1542, 1544, 1566, 1578, 1615, 1668, 1716, 1717, 1723, 1724, 1733, 1736, 1740, 1766, 1769, 1771, 1776, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1811, 1815, 1820, 1862, 1865, 1866, 1870, 1895, 1900, 1927, 1948, 1951, 1964, 1965, 1976, 1995, 2010, 2011, 2012, 2013, 2015, 2016, 2018, 2020, 2026, 2032, 2033, 2034, 2035, 2041, 2043, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2055, 2057, 2058, 2059, 2062, 2063, 2065, 2068, 2070, 2075, 2077, 2079, 2080, 2082, 2083, 2086, 2087, 2096, 2098, 2099, 2111, 2113], "interfer": [0, 2029, 2045, 2065, 2102], "learn": [0, 7, 8, 15, 33, 35, 46, 52, 55, 64, 88, 1443, 1459, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1485, 1498, 1511, 1513, 1534, 1556, 1570, 1572, 1574, 1575, 1576, 1687, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2012, 2020, 2036, 2040, 2045, 2053, 2055, 2056, 2062, 2068, 2070, 2075, 2077, 2092, 2095, 2097, 2098, 2099, 2100], "rate": [0, 2, 8, 24, 35, 55, 1464, 1465, 1466, 1470, 1573, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1819, 2012, 2054, 2085, 2102, 2108], "fp16": [0, 2, 731, 1716, 1723, 1724, 2070, 2071], "everi": [0, 1, 2, 8, 9, 19, 23, 24, 28, 30, 32, 35, 37, 53, 55, 60, 64, 483, 489, 612, 682, 783, 821, 903, 904, 908, 927, 928, 931, 975, 1091, 1108, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1166, 1226, 1235, 1272, 1304, 1318, 1328, 1331, 1362, 1434, 1463, 1464, 1465, 1466, 1470, 1519, 1526, 1560, 1562, 1566, 1618, 1619, 1620, 1625, 1642, 1657, 1658, 1659, 1673, 1691, 1708, 1709, 1710, 1713, 1714, 1716, 1731, 1765, 1768, 1769, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1866, 1927, 1961, 2016, 2022, 2029, 2041, 2042, 2045, 2047, 2048, 2049, 2051, 2052, 2056, 2065, 2067, 2068, 2069, 2070, 2071, 2075, 2076, 2077, 2080, 2082, 2083, 2085, 2087, 2098, 2099, 2101, 2108, 2109, 2110, 2111], "most": [0, 1, 3, 4, 7, 8, 15, 23, 28, 30, 32, 35, 36, 37, 39, 46, 47, 50, 51, 52, 55, 60, 61, 63, 64, 66, 488, 501, 795, 903, 904, 905, 906, 908, 909, 914, 918, 922, 935, 975, 1077, 1091, 1108, 1166, 1257, 1270, 1273, 1378, 1430, 1574, 1632, 1684, 1716, 1723, 1724, 1736, 1791, 1870, 1902, 1949, 2011, 2013, 2015, 2016, 2018, 2023, 2024, 2032, 2034, 2035, 2042, 2045, 2048, 2051, 2057, 2058, 2059, 2063, 2067, 2069, 2070, 2076, 2077, 2080, 2083, 2085, 2087, 2092, 2095, 2098, 2100, 2101, 2102, 2103, 2104, 2105, 2110, 2111], "bf16": [0, 2], "pretrain": [0, 30, 865, 1468, 1469, 2011, 2042, 2065], "cannot": [0, 3, 8, 9, 12, 23, 24, 28, 30, 33, 35, 36, 40, 47, 52, 55, 56, 60, 61, 63, 64, 224, 256, 526, 547, 883, 977, 989, 1129, 1131, 1139, 1140, 1141, 1147, 1160, 1165, 1196, 1269, 1277, 1412, 1445, 1468, 1579, 1616, 1684, 1716, 1777, 1928, 2011, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2024, 2029, 2034, 2035, 2042, 2047, 2048, 2060, 2061, 2063, 2065, 2070, 2075, 2077, 2080, 2082, 2083, 2085, 2096, 2099, 2100, 2102, 2103, 2112], "numer": [0, 11, 23, 25, 35, 53, 56, 61, 689, 795, 922, 923, 959, 1276, 1288, 1289, 1304, 1308, 1309, 1313, 1318, 1326, 1327, 1330, 1333, 1336, 1338, 1360, 1362, 1439, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1541, 1560, 1563, 1566, 1575, 1623, 1644, 1651, 1684, 1690, 1692, 1715, 1731, 1765, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1830, 1862, 1882, 1927, 2012, 2016, 2017, 2021, 2026, 2045, 2048, 2049, 2065, 2070, 2081, 2085, 2086, 2102, 2111, 2116], "max": [0, 19, 23, 28, 37, 40, 46, 47, 50, 52, 55, 64, 66, 76, 77, 118, 187, 188, 189, 190, 302, 697, 698, 699, 758, 760, 772, 773, 782, 784, 785, 804, 821, 822, 823, 824, 827, 877, 931, 946, 963, 970, 971, 975, 997, 1087, 1122, 1123, 1197, 1233, 1284, 1304, 1318, 1325, 1327, 1329, 1330, 1342, 1431, 1432, 1433, 1444, 1445, 1459, 1460, 1469, 1473, 1474, 1479, 1484, 1485, 1493, 1494, 1495, 1512, 1514, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1529, 1530, 1531, 1534, 1546, 1547, 1554, 1575, 1576, 1578, 1594, 1595, 1596, 1606, 1614, 1624, 1627, 1628, 1643, 1647, 1657, 1658, 1659, 1669, 1673, 1677, 1679, 1685, 1703, 1721, 1760, 1783, 1784, 1785, 1795, 1800, 1801, 1810, 1831, 1832, 1904, 1921, 1922, 1934, 1964, 1971, 1972, 2013, 2014, 2023, 2029, 2035, 2043, 2045, 2050, 2066, 2070, 2073, 2093, 2098, 2101, 2105, 2106, 2116], "65504": 0, "overflow": [0, 1089, 1090, 1372, 1417, 1420, 1651, 1690, 1691, 1824, 1855, 1903, 1906, 1926, 1953, 1955, 2045, 2058, 2081], "case": [0, 1, 3, 4, 8, 9, 11, 14, 15, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 39, 40, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 61, 63, 64, 66, 86, 152, 156, 198, 354, 488, 498, 501, 682, 762, 786, 793, 796, 822, 823, 825, 828, 861, 868, 896, 908, 909, 911, 913, 917, 938, 942, 944, 946, 951, 977, 992, 1007, 1032, 1054, 1055, 1075, 1076, 1077, 1108, 1129, 1131, 1139, 1140, 1141, 1156, 1167, 1171, 1176, 1187, 1197, 1200, 1232, 1257, 1270, 1273, 1282, 1285, 1286, 1288, 1302, 1304, 1308, 1309, 1313, 1318, 1319, 1320, 1322, 1328, 1331, 1336, 1338, 1342, 1344, 1345, 1350, 1353, 1362, 1373, 1412, 1430, 1435, 1436, 1437, 1438, 1439, 1453, 1454, 1455, 1457, 1458, 1459, 1461, 1464, 1465, 1466, 1467, 1469, 1470, 1472, 1481, 1482, 1483, 1484, 1485, 1486, 1492, 1493, 1494, 1495, 1512, 1517, 1519, 1520, 1521, 1532, 1533, 1534, 1545, 1558, 1564, 1569, 1574, 1576, 1578, 1597, 1615, 1624, 1632, 1636, 1637, 1668, 1670, 1677, 1716, 1718, 1719, 1723, 1724, 1730, 1733, 1736, 1737, 1770, 1771, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1808, 1810, 1846, 1849, 1862, 1864, 1877, 1880, 1912, 1927, 1928, 1959, 1960, 1964, 1979, 1989, 1999, 2000, 2001, 2002, 2011, 2016, 2021, 2024, 2032, 2033, 2034, 2040, 2041, 2042, 2043, 2044, 2045, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2058, 2060, 2065, 2067, 2068, 2070, 2071, 2073, 2074, 2075, 2076, 2077, 2080, 2081, 2082, 2084, 2085, 2086, 2087, 2092, 2098, 2099, 2101, 2102, 2103, 2107, 2109, 2110, 2111, 2112, 2116], "decreas": [0, 35, 1064, 1318, 1464, 1465, 1466, 1470, 1539, 1758, 1759, 1795, 1802, 1810, 2022, 2026, 2057, 2059, 2080, 2083], "attempt": [0, 1, 8, 14, 19, 28, 30, 45, 47, 48, 60, 87, 969, 975, 978, 993, 1276, 1283, 1362, 1684, 1718, 1719, 1783, 1784, 1796, 1964, 1967, 2013, 2016, 2032, 2033, 2034, 2041, 2045, 2048, 2061, 2062, 2067, 2068, 2075, 2098, 2101, 2102, 2111], "bring": [0, 56, 64, 1129, 1597, 1632, 2046, 2048, 2105, 2108], "number": [0, 1, 2, 3, 4, 5, 7, 14, 19, 23, 24, 28, 30, 32, 33, 35, 37, 45, 46, 47, 51, 52, 56, 58, 61, 64, 66, 71, 87, 90, 156, 175, 220, 234, 256, 315, 354, 379, 400, 404, 437, 448, 473, 475, 476, 483, 495, 499, 501, 515, 517, 519, 545, 547, 548, 560, 585, 586, 587, 589, 590, 591, 610, 619, 687, 688, 689, 690, 691, 692, 693, 700, 760, 762, 770, 771, 774, 775, 776, 783, 821, 868, 880, 893, 897, 908, 922, 938, 940, 943, 945, 946, 951, 955, 957, 961, 969, 970, 974, 992, 996, 997, 1002, 1013, 1015, 1021, 1030, 1039, 1040, 1051, 1053, 1054, 1055, 1064, 1075, 1076, 1079, 1080, 1100, 1103, 1105, 1108, 1109, 1113, 1121, 1151, 1153, 1156, 1160, 1162, 1164, 1181, 1213, 1215, 1223, 1224, 1225, 1229, 1233, 1234, 1235, 1247, 1248, 1269, 1270, 1272, 1286, 1293, 1296, 1297, 1304, 1318, 1327, 1336, 1340, 1345, 1350, 1361, 1365, 1373, 1382, 1386, 1387, 1391, 1393, 1404, 1411, 1412, 1416, 1420, 1423, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1443, 1444, 1445, 1446, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1467, 1468, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1491, 1492, 1496, 1497, 1502, 1503, 1504, 1505, 1506, 1507, 1512, 1513, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1525, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1540, 1542, 1544, 1545, 1546, 1547, 1554, 1556, 1557, 1558, 1559, 1560, 1562, 1563, 1564, 1565, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1599, 1600, 1601, 1603, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1623, 1624, 1627, 1628, 1633, 1643, 1644, 1646, 1649, 1657, 1658, 1659, 1668, 1670, 1676, 1677, 1684, 1706, 1716, 1731, 1736, 1741, 1742, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1757, 1758, 1759, 1761, 1765, 1770, 1771, 1772, 1774, 1775, 1778, 1786, 1799, 1800, 1801, 1802, 1805, 1806, 1808, 1809, 1810, 1816, 1819, 1823, 1833, 1835, 1836, 1837, 1839, 1840, 1841, 1846, 1848, 1849, 1852, 1853, 1854, 1855, 1863, 1867, 1870, 1871, 1872, 1873, 1874, 1875, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1900, 1905, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1919, 1920, 1921, 1922, 1923, 1924, 1928, 1943, 1944, 1945, 1953, 1955, 1960, 1961, 1966, 1967, 1970, 1971, 1972, 1973, 1974, 1975, 1982, 1987, 1993, 1994, 1999, 2000, 2001, 2002, 2004, 2005, 2009, 2012, 2014, 2015, 2016, 2017, 2018, 2020, 2023, 2029, 2032, 2035, 2040, 2043, 2045, 2048, 2052, 2055, 2057, 2058, 2060, 2065, 2067, 2068, 2069, 2071, 2074, 2075, 2076, 2079, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2088, 2096, 2098, 2101, 2102, 2108, 2111, 2113, 2114, 2115, 2116], "expect": [0, 1, 3, 5, 7, 9, 12, 23, 24, 28, 30, 32, 33, 37, 45, 47, 50, 52, 53, 55, 60, 61, 64, 82, 417, 488, 682, 699, 762, 857, 911, 912, 913, 914, 915, 916, 1053, 1129, 1131, 1148, 1149, 1150, 1170, 1171, 1187, 1197, 1247, 1269, 1272, 1288, 1289, 1317, 1338, 1339, 1373, 1441, 1442, 1461, 1477, 1478, 1479, 1480, 1489, 1490, 1491, 1496, 1497, 1498, 1508, 1509, 1510, 1526, 1532, 1533, 1541, 1542, 1544, 1560, 1566, 1570, 1572, 1574, 1579, 1623, 1629, 1643, 1668, 1676, 1677, 1690, 1703, 1704, 1705, 1706, 1715, 1716, 1758, 1811, 1937, 1974, 2011, 2012, 2018, 2020, 2024, 2042, 2047, 2050, 2051, 2052, 2055, 2065, 2067, 2068, 2070, 2071, 2072, 2075, 2080, 2085, 2087, 2097, 2099, 2101, 2102, 2105, 2107, 2109], "alwai": [0, 5, 7, 14, 17, 19, 23, 24, 28, 35, 45, 50, 52, 53, 55, 59, 64, 342, 417, 450, 460, 796, 883, 903, 905, 908, 911, 917, 944, 953, 963, 969, 975, 1000, 1002, 1053, 1064, 1095, 1103, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1139, 1140, 1141, 1142, 1144, 1147, 1151, 1181, 1200, 1230, 1231, 1272, 1283, 1288, 1303, 1308, 1309, 1310, 1311, 1313, 1314, 1326, 1329, 1330, 1331, 1332, 1336, 1338, 1418, 1438, 1440, 1441, 1442, 1462, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1563, 1566, 1684, 1706, 1716, 1757, 1810, 1850, 1851, 1876, 1923, 1927, 1934, 1960, 1964, 2016, 2017, 2024, 2028, 2032, 2035, 2042, 2043, 2044, 2045, 2047, 2048, 2051, 2052, 2054, 2055, 2068, 2074, 2075, 2077, 2080, 2083, 2086, 2087, 2096, 2098, 2101, 2109, 2118], "abov": [0, 1, 3, 12, 15, 28, 30, 33, 34, 35, 40, 47, 50, 52, 53, 55, 56, 61, 64, 66, 68, 619, 682, 794, 795, 883, 944, 953, 1064, 1091, 1095, 1096, 1097, 1098, 1099, 1108, 1216, 1230, 1231, 1272, 1273, 1293, 1304, 1308, 1309, 1327, 1329, 1330, 1333, 1336, 1339, 1342, 1345, 1435, 1436, 1437, 1438, 1439, 1472, 1526, 1555, 1563, 1578, 1684, 1706, 1772, 1802, 1826, 1837, 1838, 1870, 1874, 1887, 1907, 1949, 1952, 1953, 1954, 1955, 2011, 2013, 2015, 2016, 2020, 2023, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2052, 2055, 2057, 2059, 2060, 2063, 2065, 2068, 2070, 2075, 2076, 2077, 2080, 2081, 2087, 2092, 2097, 2098, 2099, 2100, 2102, 2103, 2104, 2105, 2111], "our": [0, 3, 7, 8, 11, 33, 43, 46, 47, 48, 51, 59, 60, 61, 64, 65, 903, 904, 908, 1128, 1166, 1180, 1187, 1438, 1723, 1724, 1783, 1784, 1870, 2020, 2042, 2046, 2048, 2051, 2052, 2057, 2062, 2065, 2067, 2070, 2076, 2080, 2092, 2096, 2098, 2099, 2101, 2102, 2104, 2105, 2110, 2111], "NOT": [0, 23, 28, 37, 47, 48, 50, 52, 64, 949, 1008, 1042, 1272, 1355, 1465, 1716, 1717, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1769, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 2042, 2063, 2077, 2080, 2082, 2109], "make": [0, 1, 2, 3, 4, 5, 8, 14, 15, 20, 23, 24, 25, 28, 30, 31, 33, 35, 37, 39, 44, 47, 48, 50, 51, 52, 59, 60, 64, 65, 66, 77, 141, 224, 488, 498, 781, 787, 864, 865, 896, 899, 966, 977, 1010, 1011, 1013, 1096, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1148, 1149, 1150, 1181, 1186, 1187, 1272, 1276, 1282, 1283, 1302, 1303, 1315, 1316, 1317, 1345, 1385, 1438, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1486, 1526, 1536, 1574, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1632, 1634, 1643, 1684, 1703, 1716, 1730, 1747, 1764, 1804, 1867, 1899, 1909, 1910, 1911, 1913, 1914, 1946, 1964, 1967, 1969, 1981, 1982, 2011, 2013, 2015, 2016, 2020, 2024, 2029, 2032, 2033, 2035, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2052, 2055, 2057, 2059, 2061, 2065, 2067, 2068, 2069, 2070, 2071, 2075, 2076, 2077, 2080, 2082, 2083, 2085, 2090, 2092, 2093, 2098, 2100, 2101, 2102, 2104, 2105, 2107, 2110, 2111, 2112], "guarante": [0, 1, 5, 9, 23, 28, 30, 32, 35, 47, 50, 52, 53, 60, 63, 64, 66, 488, 879, 975, 1185, 1187, 1197, 1272, 1282, 1308, 1310, 1462, 1526, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1899, 1901, 1964, 2042, 2044, 2045, 2058, 2059, 2068, 2069, 2071, 2075, 2077, 2098], "encount": [0, 5, 19, 28, 52, 60, 63, 64, 682, 977, 1716, 1723, 1724, 2013, 2016, 2018, 2022, 2044, 2058, 2068, 2070, 2097, 2099, 2102, 2107, 2109, 2111, 2114], "nan": [0, 1, 27, 35, 429, 430, 501, 686, 688, 691, 692, 693, 696, 699, 700, 884, 888, 943, 959, 1109, 1110, 1111, 1154, 1155, 1156, 1233, 1261, 1262, 1264, 1265, 1312, 1319, 1332, 1335, 1348, 1353, 1371, 1372, 1376, 1416, 1417, 1418, 1419, 1420, 1632, 1721, 1820, 1857, 1862, 1916, 1951, 2014, 2023, 2041, 2042, 2049, 2058, 2081, 2087, 2115], "verifi": [0, 28, 52, 64, 89, 922, 1091, 1778, 1779, 1797, 1949, 2011, 2013, 2027, 2048, 2052, 2065, 2104], "compat": [0, 1, 14, 23, 28, 30, 34, 35, 37, 48, 52, 55, 59, 60, 64, 66, 499, 500, 605, 619, 682, 817, 818, 819, 893, 908, 909, 941, 956, 990, 991, 1185, 1272, 1277, 1290, 1303, 1314, 1327, 1330, 1526, 1532, 1570, 1571, 1572, 1573, 1574, 1643, 1723, 1724, 1768, 1777, 1849, 2011, 2012, 2015, 2016, 2022, 2032, 2033, 2041, 2048, 2060, 2063, 2068, 2069, 2070, 2073, 2075, 2080, 2082, 2086, 2089, 2093], "init_scal": 0, "65536": 0, "0": [0, 1, 3, 11, 12, 14, 18, 19, 20, 23, 24, 25, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 45, 47, 48, 50, 52, 53, 55, 58, 59, 60, 61, 63, 64, 66, 67, 68, 69, 71, 72, 73, 74, 75, 76, 77, 78, 80, 156, 158, 175, 186, 193, 210, 226, 227, 228, 229, 230, 235, 256, 260, 262, 265, 288, 291, 300, 302, 313, 315, 317, 319, 323, 354, 403, 404, 429, 430, 447, 450, 451, 456, 483, 485, 489, 490, 498, 509, 510, 515, 517, 519, 522, 540, 545, 546, 549, 558, 560, 562, 580, 582, 583, 585, 586, 587, 589, 590, 591, 597, 598, 599, 600, 607, 609, 610, 619, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 701, 714, 715, 716, 717, 718, 719, 720, 721, 725, 726, 727, 728, 729, 732, 733, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 750, 753, 754, 755, 757, 758, 759, 760, 762, 770, 771, 772, 774, 775, 776, 777, 780, 782, 784, 785, 787, 796, 798, 801, 804, 819, 822, 823, 824, 827, 857, 859, 868, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 897, 903, 905, 908, 909, 911, 912, 913, 915, 922, 923, 925, 927, 928, 935, 938, 940, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 956, 957, 959, 962, 963, 964, 965, 966, 967, 969, 970, 973, 987, 988, 989, 992, 993, 994, 995, 996, 997, 1007, 1013, 1022, 1024, 1050, 1051, 1078, 1082, 1087, 1088, 1089, 1090, 1091, 1092, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1103, 1105, 1106, 1108, 1110, 1111, 1118, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1142, 1143, 1144, 1145, 1147, 1148, 1149, 1150, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1165, 1167, 1168, 1169, 1170, 1171, 1175, 1176, 1177, 1187, 1191, 1197, 1213, 1214, 1221, 1226, 1230, 1232, 1233, 1234, 1235, 1236, 1239, 1243, 1247, 1249, 1257, 1268, 1269, 1270, 1272, 1276, 1278, 1279, 1280, 1284, 1292, 1293, 1294, 1295, 1296, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1314, 1315, 1316, 1317, 1318, 1319, 1324, 1325, 1326, 1327, 1329, 1330, 1331, 1332, 1334, 1339, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1352, 1353, 1354, 1355, 1356, 1357, 1359, 1362, 1366, 1370, 1371, 1372, 1373, 1375, 1376, 1377, 1378, 1379, 1380, 1392, 1395, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1424, 1426, 1428, 1429, 1430, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1444, 1445, 1447, 1448, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1477, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1512, 1514, 1516, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1554, 1558, 1560, 1561, 1562, 1563, 1564, 1566, 1569, 1570, 1572, 1574, 1575, 1576, 1578, 1579, 1582, 1583, 1584, 1587, 1588, 1589, 1597, 1598, 1599, 1600, 1601, 1602, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1626, 1627, 1628, 1630, 1632, 1635, 1636, 1637, 1640, 1641, 1642, 1643, 1647, 1648, 1650, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1668, 1669, 1670, 1671, 1672, 1673, 1676, 1677, 1679, 1684, 1685, 1688, 1690, 1693, 1699, 1700, 1701, 1702, 1703, 1706, 1715, 1716, 1720, 1721, 1723, 1724, 1730, 1731, 1732, 1736, 1741, 1742, 1744, 1745, 1746, 1747, 1749, 1750, 1751, 1752, 1753, 1754, 1757, 1758, 1760, 1761, 1763, 1764, 1765, 1766, 1768, 1770, 1771, 1772, 1776, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1817, 1819, 1820, 1822, 1823, 1824, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1844, 1845, 1846, 1847, 1848, 1849, 1852, 1853, 1854, 1855, 1857, 1858, 1862, 1864, 1865, 1866, 1869, 1871, 1874, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1894, 1895, 1899, 1901, 1902, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1920, 1921, 1922, 1923, 1924, 1926, 1927, 1928, 1929, 1930, 1937, 1938, 1940, 1941, 1942, 1943, 1944, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1957, 1958, 1960, 1961, 1962, 1963, 1966, 1970, 1971, 1972, 1974, 1975, 1976, 1977, 1979, 1982, 2009, 2010, 2011, 2013, 2014, 2015, 2016, 2020, 2023, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2075, 2076, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2090, 2092, 2093, 2096, 2097, 2098, 2100, 2102, 2104, 2105, 2106, 2109, 2110, 2112, 2115, 2116], "growth_factor": 0, "backoff_factor": 0, "5": [0, 1, 10, 11, 12, 14, 18, 21, 23, 24, 28, 33, 35, 45, 52, 59, 60, 61, 63, 64, 66, 67, 71, 72, 74, 75, 76, 77, 156, 193, 210, 235, 262, 291, 300, 315, 317, 319, 323, 403, 404, 447, 473, 501, 515, 517, 519, 525, 539, 546, 560, 562, 586, 587, 588, 589, 590, 609, 687, 688, 692, 699, 736, 741, 742, 743, 744, 745, 747, 748, 762, 764, 775, 776, 864, 865, 868, 883, 889, 890, 891, 911, 914, 916, 943, 946, 953, 954, 955, 959, 961, 965, 967, 968, 969, 970, 973, 980, 992, 996, 1087, 1089, 1091, 1098, 1100, 1102, 1103, 1106, 1107, 1108, 1111, 1124, 1126, 1128, 1129, 1135, 1136, 1139, 1144, 1147, 1148, 1151, 1154, 1156, 1157, 1158, 1160, 1167, 1169, 1170, 1171, 1172, 1175, 1176, 1177, 1214, 1226, 1232, 1234, 1236, 1238, 1239, 1249, 1257, 1261, 1279, 1283, 1284, 1293, 1294, 1295, 1298, 1301, 1302, 1319, 1322, 1325, 1328, 1329, 1330, 1331, 1336, 1337, 1340, 1342, 1343, 1345, 1346, 1347, 1348, 1349, 1355, 1359, 1366, 1367, 1373, 1374, 1412, 1419, 1421, 1422, 1424, 1427, 1428, 1429, 1431, 1432, 1433, 1434, 1435, 1439, 1440, 1441, 1442, 1446, 1447, 1448, 1450, 1451, 1452, 1454, 1455, 1457, 1458, 1459, 1461, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1477, 1479, 1480, 1481, 1486, 1488, 1489, 1490, 1491, 1492, 1496, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1514, 1517, 1522, 1523, 1526, 1533, 1536, 1540, 1541, 1542, 1548, 1549, 1550, 1551, 1552, 1555, 1558, 1564, 1566, 1570, 1572, 1574, 1576, 1577, 1578, 1579, 1580, 1581, 1598, 1599, 1607, 1608, 1610, 1611, 1615, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1627, 1628, 1630, 1632, 1635, 1643, 1668, 1670, 1676, 1684, 1693, 1703, 1704, 1705, 1715, 1716, 1736, 1746, 1749, 1751, 1752, 1754, 1759, 1760, 1762, 1764, 1770, 1771, 1772, 1774, 1775, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1799, 1805, 1809, 1815, 1817, 1819, 1820, 1822, 1823, 1826, 1827, 1830, 1831, 1832, 1833, 1837, 1842, 1843, 1846, 1847, 1853, 1854, 1855, 1862, 1874, 1881, 1882, 1883, 1884, 1885, 1886, 1889, 1890, 1899, 1905, 1907, 1908, 1909, 1910, 1912, 1915, 1923, 1926, 1927, 1928, 1929, 1930, 1938, 1940, 1942, 1943, 1944, 1946, 1947, 1949, 1958, 1959, 1962, 1964, 1970, 1976, 1977, 1978, 2009, 2013, 2014, 2015, 2016, 2017, 2023, 2024, 2034, 2035, 2040, 2041, 2042, 2043, 2045, 2048, 2050, 2052, 2055, 2056, 2060, 2061, 2063, 2065, 2067, 2070, 2075, 2080, 2081, 2083, 2085, 2086, 2087, 2099, 2100, 2102, 2109, 2110, 2111], "growth_interv": 0, "2000": [0, 28, 32, 1126, 1128, 1135, 1144, 1157, 1350, 1579, 1802, 1830, 1879, 1880, 1942], "float64": [0, 11, 242, 448, 451, 582, 883, 986, 1151, 1160, 1161, 1219, 1254, 1272, 1302, 1308, 1309, 1311, 1313, 1329, 1526, 1684, 1782, 1783, 1784, 1796, 1820, 1828, 1867, 1868, 1871, 1901, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 1974, 1979, 2055, 2058, 2080, 2082, 2083, 2086, 2087, 2116], "out": [0, 1, 2, 3, 7, 8, 9, 11, 15, 18, 19, 23, 25, 28, 30, 35, 44, 45, 48, 52, 56, 59, 64, 66, 70, 72, 235, 314, 316, 318, 320, 401, 403, 450, 498, 514, 516, 518, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 697, 698, 699, 700, 701, 758, 762, 766, 774, 775, 776, 781, 783, 787, 799, 838, 840, 841, 860, 861, 868, 869, 870, 871, 872, 873, 874, 875, 880, 884, 885, 886, 887, 888, 898, 901, 902, 903, 906, 908, 918, 922, 931, 942, 943, 945, 946, 947, 948, 949, 950, 951, 952, 955, 959, 962, 964, 965, 966, 967, 968, 970, 971, 973, 975, 986, 987, 988, 991, 992, 994, 995, 1007, 1012, 1020, 1022, 1024, 1051, 1052, 1064, 1065, 1078, 1082, 1087, 1088, 1089, 1090, 1092, 1095, 1100, 1101, 1103, 1104, 1105, 1107, 1108, 1109, 1113, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1162, 1163, 1166, 1167, 1169, 1170, 1172, 1177, 1196, 1209, 1213, 1214, 1215, 1216, 1217, 1227, 1228, 1229, 1232, 1233, 1234, 1235, 1237, 1238, 1239, 1240, 1241, 1242, 1244, 1245, 1246, 1247, 1249, 1250, 1266, 1267, 1272, 1278, 1284, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1341, 1342, 1343, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1354, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1366, 1367, 1369, 1370, 1371, 1372, 1373, 1375, 1376, 1377, 1378, 1392, 1395, 1411, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1422, 1423, 1424, 1425, 1426, 1427, 1431, 1432, 1433, 1435, 1436, 1437, 1443, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1463, 1464, 1465, 1466, 1468, 1470, 1473, 1474, 1477, 1478, 1493, 1494, 1495, 1496, 1511, 1513, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1538, 1539, 1542, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1570, 1571, 1572, 1573, 1574, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1586, 1603, 1607, 1608, 1609, 1610, 1611, 1612, 1618, 1619, 1620, 1625, 1628, 1632, 1643, 1649, 1669, 1671, 1703, 1716, 1723, 1724, 1741, 1742, 1743, 1744, 1770, 1771, 1772, 1773, 1775, 1776, 1797, 1814, 1815, 1819, 1820, 1821, 1823, 1826, 1827, 1833, 1834, 1835, 1837, 1839, 1841, 1842, 1845, 1846, 1847, 1851, 1855, 1856, 1857, 1859, 1860, 1861, 1862, 1877, 1878, 1879, 1891, 1892, 1893, 1894, 1899, 1904, 1905, 1916, 1917, 1919, 1920, 1921, 1922, 1924, 1925, 1927, 1939, 1940, 1941, 1944, 1946, 1948, 1951, 1952, 1954, 1956, 1957, 1964, 1971, 1972, 1973, 1976, 1978, 1979, 1980, 2009, 2010, 2011, 2012, 2013, 2014, 2016, 2020, 2023, 2024, 2029, 2031, 2032, 2034, 2035, 2042, 2043, 2045, 2047, 2048, 2049, 2051, 2055, 2057, 2063, 2065, 2067, 2068, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2081, 2083, 2085, 2086, 2091, 2098, 2099, 2100, 2101, 2102, 2104, 2105, 2106, 2109, 2110, 2111, 2112, 2113, 2115], "place": [0, 3, 7, 11, 12, 19, 23, 24, 28, 30, 36, 37, 52, 55, 58, 59, 62, 64, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 123, 125, 127, 129, 132, 133, 135, 143, 145, 148, 149, 151, 154, 160, 162, 164, 166, 168, 170, 179, 188, 196, 200, 203, 205, 215, 217, 223, 224, 233, 238, 240, 246, 249, 251, 253, 255, 256, 259, 262, 264, 271, 273, 275, 279, 281, 285, 287, 294, 296, 298, 306, 308, 310, 312, 314, 316, 318, 320, 358, 360, 362, 364, 366, 368, 370, 373, 375, 377, 378, 385, 387, 389, 391, 393, 397, 401, 403, 422, 425, 428, 430, 441, 443, 445, 453, 458, 468, 471, 487, 488, 489, 490, 492, 494, 498, 501, 510, 513, 514, 516, 518, 524, 529, 531, 534, 536, 538, 551, 553, 555, 564, 566, 573, 577, 579, 595, 598, 600, 602, 604, 605, 614, 624, 760, 795, 799, 816, 840, 841, 860, 861, 903, 904, 906, 908, 927, 928, 929, 931, 957, 989, 1020, 1021, 1023, 1024, 1044, 1108, 1159, 1165, 1166, 1174, 1272, 1317, 1335, 1412, 1434, 1444, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1470, 1482, 1483, 1484, 1512, 1526, 1544, 1545, 1546, 1547, 1554, 1569, 1617, 1618, 1619, 1620, 1622, 1623, 1624, 1625, 1636, 1639, 1648, 1680, 1683, 1698, 1702, 1706, 1716, 1721, 1722, 1723, 1724, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1766, 1797, 1853, 1855, 1859, 1860, 1861, 1907, 1944, 2012, 2013, 2015, 2016, 2023, 2034, 2041, 2045, 2048, 2054, 2062, 2063, 2067, 2068, 2075, 2076, 2082, 2084, 2086, 2098, 2099, 2101, 2102, 2108, 2110], "variant": [0, 5, 24, 52, 861, 1303, 1314, 1327, 1330, 1419, 1422, 1586, 1587, 1588, 1589, 1783, 1784, 1785, 1797, 1901, 1951, 2012, 2054, 2073, 2086, 2106, 2107, 2110], "explicitli": [0, 5, 8, 14, 28, 40, 55, 88, 929, 1044, 1096, 1098, 1108, 1235, 1313, 1326, 1330, 1338, 1380, 1643, 1703, 1808, 1870, 1923, 2013, 2015, 2016, 2017, 2022, 2024, 2045, 2048, 2051, 2052, 2054, 2060, 2065, 2068, 2074, 2075, 2080, 2086, 2102], "suppli": [0, 5, 7, 14, 15, 28, 1042, 1532, 1779, 2016, 2045, 2065, 2080, 2102, 2111], "won": [0, 8, 24, 30, 47, 52, 58, 460, 1187, 1272, 1284, 1526, 1669, 1709, 1710, 2011, 2042, 2048, 2075, 2089, 2101, 2102, 2109, 2111], "go": [0, 1, 7, 15, 23, 28, 30, 33, 44, 50, 52, 59, 64, 501, 502, 562, 903, 905, 908, 927, 931, 1193, 1345, 1435, 1436, 1437, 1519, 1520, 1521, 1723, 1724, 2015, 2016, 2023, 2032, 2033, 2034, 2042, 2044, 2045, 2048, 2049, 2054, 2055, 2057, 2061, 2065, 2068, 2083, 2085, 2097, 2098, 2099, 2100, 2101, 2102], "addmm": [0, 52, 53, 108, 1919, 2014, 2033, 2058, 2066, 2080, 2106, 2110], "b": [0, 1, 3, 11, 12, 23, 28, 30, 35, 45, 52, 64, 66, 69, 87, 89, 262, 337, 400, 619, 687, 688, 750, 759, 783, 826, 883, 903, 904, 906, 907, 908, 909, 925, 927, 928, 943, 954, 955, 957, 961, 963, 965, 968, 973, 989, 992, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1050, 1051, 1100, 1103, 1107, 1108, 1153, 1154, 1155, 1156, 1162, 1166, 1179, 1180, 1198, 1214, 1238, 1249, 1269, 1275, 1283, 1284, 1292, 1293, 1295, 1304, 1305, 1313, 1317, 1318, 1322, 1325, 1326, 1327, 1328, 1329, 1330, 1333, 1334, 1335, 1338, 1339, 1342, 1345, 1354, 1356, 1357, 1363, 1371, 1376, 1378, 1411, 1439, 1443, 1468, 1469, 1476, 1513, 1603, 1605, 1624, 1631, 1649, 1730, 1758, 1759, 1760, 1761, 1762, 1763, 1771, 1794, 1796, 1827, 1833, 1846, 1849, 1865, 1876, 1895, 1904, 1909, 1910, 1911, 1913, 1914, 1918, 1923, 1924, 1926, 1933, 1934, 1935, 1944, 1951, 1952, 1954, 1973, 1978, 2013, 2014, 2015, 2016, 2020, 2033, 2034, 2035, 2040, 2041, 2043, 2045, 2048, 2050, 2052, 2053, 2058, 2060, 2063, 2069, 2070, 2076, 2077, 2080, 2081, 2084, 2085, 2091, 2097, 2099, 2100, 2102, 2104, 2106, 2110, 2111, 2113], "c": [0, 1, 3, 8, 9, 14, 19, 23, 25, 28, 35, 45, 53, 58, 64, 87, 89, 262, 337, 501, 522, 619, 687, 823, 880, 883, 903, 906, 908, 909, 954, 965, 977, 993, 1044, 1050, 1085, 1100, 1103, 1108, 1154, 1155, 1156, 1166, 1167, 1171, 1176, 1214, 1226, 1272, 1283, 1295, 1302, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1318, 1319, 1322, 1324, 1328, 1329, 1331, 1333, 1335, 1336, 1411, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1439, 1440, 1441, 1442, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1461, 1464, 1465, 1466, 1470, 1472, 1473, 1474, 1480, 1488, 1489, 1490, 1493, 1494, 1495, 1497, 1498, 1508, 1509, 1510, 1514, 1519, 1520, 1521, 1522, 1523, 1524, 1529, 1530, 1531, 1533, 1538, 1539, 1548, 1549, 1550, 1551, 1552, 1553, 1561, 1566, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1597, 1605, 1615, 1616, 1628, 1632, 1668, 1674, 1675, 1684, 1730, 1759, 1761, 1762, 1763, 1770, 1771, 1814, 1830, 1846, 1883, 1904, 1918, 1923, 1944, 1961, 1966, 1967, 2012, 2013, 2014, 2015, 2016, 2024, 2033, 2034, 2035, 2044, 2045, 2047, 2048, 2049, 2052, 2054, 2061, 2068, 2076, 2077, 2080, 2081, 2084, 2085, 2092, 2098, 2099, 2100, 2101, 2102, 2104, 2106, 2110, 2111, 2112, 2113, 2114], "addmm_": [0, 2014, 2033, 2080], "d": [0, 1, 11, 23, 24, 28, 34, 35, 56, 64, 315, 323, 337, 473, 515, 517, 519, 546, 585, 619, 688, 692, 868, 883, 903, 906, 908, 909, 942, 943, 944, 946, 953, 954, 955, 959, 962, 965, 1095, 1097, 1107, 1108, 1121, 1125, 1126, 1127, 1128, 1133, 1134, 1137, 1138, 1143, 1144, 1145, 1148, 1149, 1150, 1177, 1180, 1213, 1230, 1231, 1238, 1247, 1269, 1273, 1277, 1315, 1316, 1366, 1377, 1414, 1429, 1437, 1438, 1442, 1455, 1459, 1460, 1464, 1465, 1466, 1468, 1470, 1472, 1477, 1479, 1490, 1496, 1498, 1510, 1521, 1535, 1541, 1542, 1566, 1575, 1576, 1578, 1597, 1632, 1643, 1649, 1677, 1702, 1703, 1715, 1731, 1765, 1770, 1771, 1795, 1815, 1833, 1839, 1842, 1854, 1862, 1867, 1918, 1923, 1937, 1938, 1944, 1947, 1952, 1953, 1954, 1955, 1970, 1976, 1978, 2011, 2014, 2023, 2033, 2034, 2035, 2040, 2042, 2045, 2048, 2049, 2051, 2052, 2053, 2060, 2065, 2075, 2076, 2080, 2081, 2085, 2086, 2101, 2102, 2104, 2109, 2111, 2113], "best": [0, 1, 7, 15, 18, 23, 28, 35, 48, 55, 58, 864, 865, 935, 962, 975, 1187, 1288, 1289, 1318, 1532, 1797, 1810, 2012, 2013, 2015, 2020, 2032, 2040, 2041, 2042, 2048, 2050, 2068, 2075, 2080, 2082, 2092, 2099, 2100, 2102, 2105], "stabil": [0, 1304, 1309, 1360, 1439, 1440, 1441, 1442, 1479, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1541, 1563, 1566, 1575, 1629, 1692, 1715, 1731, 1765, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1830, 2049, 2102, 2111], "argument": [0, 1, 3, 4, 5, 8, 9, 12, 14, 18, 19, 23, 28, 29, 30, 32, 33, 34, 35, 37, 45, 48, 51, 52, 53, 55, 59, 60, 63, 64, 66, 72, 74, 75, 86, 90, 152, 198, 211, 315, 323, 417, 447, 448, 449, 450, 451, 489, 490, 515, 519, 562, 582, 583, 585, 586, 587, 589, 590, 605, 682, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 697, 698, 699, 700, 701, 736, 740, 741, 742, 743, 744, 745, 795, 796, 801, 821, 822, 823, 826, 827, 828, 844, 860, 861, 864, 866, 868, 883, 884, 885, 886, 887, 888, 892, 893, 894, 895, 896, 903, 904, 905, 906, 908, 909, 912, 914, 915, 916, 917, 919, 920, 927, 928, 943, 944, 945, 947, 948, 949, 950, 951, 952, 953, 955, 959, 962, 964, 965, 966, 967, 968, 970, 972, 973, 979, 986, 989, 991, 992, 994, 995, 997, 1007, 1029, 1031, 1035, 1036, 1042, 1050, 1051, 1053, 1068, 1077, 1081, 1087, 1088, 1089, 1090, 1091, 1092, 1095, 1096, 1097, 1098, 1099, 1100, 1103, 1105, 1106, 1107, 1109, 1110, 1111, 1113, 1118, 1121, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1151, 1152, 1153, 1154, 1155, 1156, 1158, 1160, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1185, 1187, 1207, 1208, 1213, 1214, 1215, 1216, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1247, 1249, 1266, 1267, 1269, 1272, 1275, 1284, 1288, 1289, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1363, 1364, 1366, 1367, 1370, 1371, 1372, 1373, 1375, 1376, 1377, 1378, 1395, 1403, 1408, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1422, 1423, 1424, 1426, 1440, 1441, 1442, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1469, 1472, 1475, 1477, 1484, 1487, 1488, 1489, 1490, 1491, 1496, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1522, 1523, 1524, 1526, 1527, 1532, 1533, 1534, 1542, 1566, 1570, 1574, 1578, 1580, 1581, 1589, 1630, 1632, 1669, 1684, 1706, 1709, 1710, 1716, 1736, 1738, 1742, 1743, 1747, 1751, 1757, 1766, 1770, 1772, 1775, 1776, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1802, 1814, 1815, 1819, 1820, 1823, 1824, 1826, 1827, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1845, 1846, 1847, 1848, 1855, 1857, 1862, 1866, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1894, 1899, 1902, 1904, 1905, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1916, 1917, 1919, 1920, 1921, 1922, 1923, 1924, 1926, 1927, 1934, 1939, 1940, 1941, 1942, 1944, 1945, 1946, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1957, 1960, 1964, 1971, 1972, 1976, 1977, 1978, 1979, 1986, 1988, 1990, 1991, 2003, 2006, 2009, 2010, 2011, 2014, 2015, 2016, 2018, 2020, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2059, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2074, 2075, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2096, 2099, 2100, 2101, 2105, 2110, 2112, 2116], "respect": [0, 1, 5, 8, 17, 28, 29, 32, 33, 35, 37, 45, 47, 55, 61, 63, 64, 198, 211, 489, 582, 605, 691, 692, 693, 762, 797, 821, 844, 888, 894, 896, 908, 909, 917, 923, 927, 928, 970, 997, 1096, 1098, 1099, 1165, 1167, 1168, 1169, 1170, 1171, 1172, 1176, 1226, 1272, 1304, 1308, 1309, 1318, 1328, 1331, 1333, 1336, 1345, 1377, 1416, 1438, 1439, 1445, 1456, 1457, 1458, 1462, 1477, 1479, 1496, 1526, 1527, 1542, 1572, 1574, 1575, 1576, 1577, 1579, 1623, 1632, 1738, 1742, 1744, 1766, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1810, 1900, 1904, 1927, 1944, 2034, 2041, 2042, 2045, 2048, 2052, 2055, 2056, 2073, 2080, 2081, 2082, 2087, 2101, 2105], "follow": [0, 1, 2, 3, 5, 7, 9, 11, 12, 14, 15, 18, 21, 22, 23, 24, 27, 28, 30, 32, 33, 34, 35, 37, 44, 45, 46, 47, 48, 52, 53, 55, 57, 59, 60, 62, 64, 66, 68, 74, 75, 76, 77, 84, 85, 86, 489, 490, 619, 682, 762, 781, 787, 794, 795, 796, 802, 816, 817, 818, 819, 821, 822, 823, 824, 826, 827, 857, 864, 865, 892, 893, 894, 901, 908, 909, 927, 928, 942, 959, 964, 968, 977, 980, 989, 1050, 1064, 1108, 1126, 1128, 1152, 1162, 1175, 1187, 1197, 1226, 1272, 1273, 1282, 1286, 1293, 1304, 1315, 1325, 1328, 1329, 1342, 1345, 1364, 1367, 1430, 1439, 1445, 1461, 1469, 1472, 1477, 1496, 1526, 1542, 1558, 1574, 1578, 1585, 1637, 1651, 1684, 1708, 1709, 1710, 1713, 1714, 1716, 1723, 1724, 1730, 1771, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1805, 1808, 1816, 1827, 1855, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1902, 1903, 1904, 1905, 1912, 1923, 1928, 1949, 1957, 1964, 1967, 2011, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2022, 2023, 2026, 2027, 2032, 2033, 2034, 2035, 2036, 2040, 2041, 2042, 2043, 2044, 2045, 2048, 2049, 2050, 2051, 2052, 2053, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2081, 2083, 2084, 2086, 2087, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2109, 2111, 2112, 2113, 2116], "describ": [0, 5, 7, 8, 9, 23, 24, 28, 30, 32, 34, 37, 39, 45, 47, 52, 53, 64, 235, 515, 794, 795, 796, 817, 818, 819, 844, 862, 960, 1033, 1050, 1067, 1108, 1226, 1293, 1333, 1430, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1463, 1464, 1465, 1466, 1467, 1470, 1472, 1473, 1474, 1480, 1483, 1486, 1488, 1489, 1490, 1492, 1498, 1517, 1519, 1520, 1521, 1532, 1533, 1540, 1541, 1545, 1558, 1566, 1574, 1575, 1576, 1578, 1615, 1627, 1628, 1637, 1671, 1715, 1808, 1949, 2012, 2013, 2015, 2016, 2024, 2040, 2041, 2042, 2045, 2047, 2048, 2050, 2052, 2054, 2055, 2060, 2065, 2068, 2069, 2070, 2073, 2076, 2077, 2079, 2087, 2099, 2102, 2103, 2107], "part": [0, 1, 3, 4, 5, 6, 7, 9, 14, 15, 18, 23, 24, 28, 30, 33, 35, 47, 48, 52, 53, 55, 59, 60, 64, 84, 85, 844, 917, 982, 986, 1108, 1262, 1264, 1265, 1268, 1272, 1283, 1285, 1288, 1289, 1303, 1309, 1311, 1320, 1327, 1330, 1526, 1566, 1684, 1716, 1734, 1800, 1827, 1839, 1908, 1949, 1952, 1953, 1954, 1955, 2011, 2012, 2013, 2015, 2016, 2026, 2027, 2041, 2042, 2045, 2048, 2050, 2051, 2052, 2055, 2060, 2065, 2068, 2070, 2075, 2076, 2077, 2080, 2085, 2087, 2097, 2099, 2101, 2102, 2103, 2109, 2110, 2111], "expos": [0, 1, 8, 19, 28, 32, 38, 55, 63, 64, 797, 1162, 2042, 2045, 2054, 2068, 2070, 2099, 2112], "namespac": [0, 64, 1082, 2013, 2017, 2020, 2048, 2055, 2063, 2065, 2073, 2092, 2100, 2112], "below": [0, 1, 5, 9, 12, 14, 23, 24, 28, 30, 33, 34, 35, 37, 39, 44, 47, 48, 50, 51, 53, 64, 66, 74, 75, 682, 736, 750, 759, 797, 816, 1050, 1095, 1096, 1097, 1098, 1099, 1108, 1136, 1138, 1142, 1180, 1191, 1216, 1226, 1272, 1289, 1329, 1330, 1335, 1342, 1374, 1456, 1457, 1458, 1477, 1479, 1486, 1496, 1498, 1522, 1523, 1524, 1526, 1542, 1558, 1579, 1615, 1641, 1688, 1716, 1736, 1770, 1797, 1867, 1870, 1907, 1939, 1949, 1952, 1953, 1954, 1955, 2013, 2015, 2016, 2018, 2022, 2023, 2032, 2033, 2041, 2042, 2045, 2047, 2048, 2052, 2053, 2055, 2057, 2059, 2060, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2075, 2076, 2077, 2081, 2084, 2087, 2093, 2097, 2098, 2100, 2102, 2103, 2104, 2105, 2107, 2109, 2111], "do": [0, 1, 4, 7, 8, 9, 11, 14, 15, 23, 24, 28, 30, 32, 33, 36, 37, 40, 47, 48, 50, 52, 55, 56, 57, 60, 61, 63, 64, 86, 497, 515, 517, 519, 760, 825, 828, 866, 895, 898, 903, 906, 908, 909, 918, 922, 929, 931, 961, 974, 975, 977, 1004, 1008, 1042, 1053, 1087, 1088, 1089, 1090, 1159, 1160, 1162, 1171, 1185, 1187, 1197, 1213, 1235, 1259, 1269, 1272, 1275, 1284, 1288, 1318, 1336, 1345, 1352, 1362, 1364, 1373, 1412, 1422, 1434, 1444, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1482, 1483, 1484, 1512, 1545, 1546, 1547, 1554, 1569, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1634, 1636, 1651, 1706, 1716, 1723, 1724, 1772, 1777, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1814, 1820, 1870, 1964, 2011, 2012, 2013, 2014, 2016, 2018, 2020, 2024, 2025, 2032, 2033, 2034, 2041, 2042, 2043, 2045, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2069, 2070, 2071, 2075, 2077, 2080, 2081, 2082, 2083, 2084, 2085, 2087, 2096, 2098, 2099, 2101, 2104, 2107, 2108, 2110, 2111, 2113], "defin": [0, 1, 3, 5, 9, 11, 14, 15, 23, 24, 28, 29, 30, 33, 34, 35, 37, 39, 45, 47, 48, 52, 53, 55, 60, 64, 417, 437, 447, 449, 451, 519, 568, 794, 799, 800, 801, 803, 804, 840, 862, 863, 864, 865, 892, 893, 894, 895, 908, 909, 1064, 1091, 1108, 1109, 1128, 1156, 1163, 1187, 1214, 1216, 1232, 1234, 1235, 1261, 1272, 1278, 1295, 1302, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1318, 1319, 1322, 1324, 1325, 1329, 1330, 1331, 1333, 1335, 1336, 1342, 1378, 1462, 1467, 1473, 1474, 1481, 1482, 1483, 1484, 1491, 1493, 1494, 1495, 1526, 1532, 1545, 1560, 1562, 1567, 1569, 1586, 1587, 1597, 1632, 1684, 1690, 1706, 1709, 1716, 1742, 1744, 1751, 1752, 1772, 1775, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1802, 1805, 1808, 1826, 1835, 1837, 1839, 1846, 1874, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1904, 1906, 1949, 1952, 1953, 1954, 1955, 1979, 2009, 2011, 2013, 2016, 2018, 2020, 2031, 2032, 2033, 2035, 2038, 2042, 2045, 2047, 2051, 2052, 2053, 2055, 2057, 2061, 2065, 2067, 2068, 2069, 2070, 2073, 2075, 2080, 2081, 2086, 2087, 2089, 2096, 2097, 2099, 2101, 2103, 2108, 2112], "still": [0, 1, 2, 7, 8, 23, 27, 28, 33, 35, 37, 47, 52, 55, 63, 64, 488, 796, 801, 903, 905, 908, 913, 977, 1186, 1187, 1276, 1282, 1290, 1492, 1517, 1716, 1717, 1718, 1719, 1858, 2013, 2016, 2032, 2033, 2034, 2035, 2041, 2042, 2045, 2047, 2048, 2050, 2051, 2053, 2060, 2061, 2065, 2067, 2068, 2070, 2075, 2076, 2077, 2080, 2093, 2099, 2101, 2106, 2107, 2111, 2113], "chang": [0, 1, 2, 3, 7, 11, 12, 18, 19, 24, 28, 30, 32, 33, 34, 35, 37, 46, 52, 53, 55, 56, 59, 60, 62, 63, 64, 65, 141, 235, 256, 323, 460, 498, 501, 519, 522, 558, 619, 682, 821, 880, 922, 923, 975, 977, 989, 990, 991, 1007, 1008, 1018, 1029, 1031, 1042, 1043, 1050, 1051, 1053, 1096, 1144, 1160, 1165, 1181, 1196, 1200, 1219, 1226, 1247, 1269, 1272, 1283, 1284, 1303, 1314, 1316, 1317, 1318, 1321, 1334, 1374, 1403, 1465, 1468, 1486, 1496, 1526, 1558, 1579, 1585, 1586, 1587, 1590, 1597, 1632, 1657, 1658, 1659, 1684, 1706, 1707, 1716, 1718, 1719, 1723, 1724, 1733, 1736, 1737, 1766, 1770, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1802, 1805, 1806, 1808, 1810, 1812, 1826, 1866, 1870, 1907, 1918, 1921, 1922, 1923, 1948, 1967, 1971, 1972, 1986, 1988, 2011, 2012, 2013, 2020, 2022, 2023, 2024, 2026, 2029, 2032, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2048, 2049, 2051, 2052, 2053, 2055, 2057, 2058, 2060, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2075, 2077, 2080, 2082, 2083, 2084, 2085, 2086, 2089, 2090, 2091, 2093, 2096, 2098, 2099, 2101, 2103, 2104, 2107, 2108, 2111], "which": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 23, 24, 29, 30, 32, 33, 34, 35, 36, 37, 40, 45, 46, 47, 48, 50, 52, 53, 55, 59, 60, 63, 64, 66, 74, 75, 90, 152, 260, 291, 315, 317, 319, 321, 323, 337, 474, 488, 489, 501, 515, 517, 519, 539, 562, 606, 609, 619, 626, 686, 699, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 762, 766, 783, 792, 793, 799, 801, 821, 825, 828, 840, 857, 860, 861, 862, 864, 879, 883, 888, 895, 896, 898, 908, 909, 911, 912, 913, 914, 915, 916, 917, 927, 928, 935, 946, 959, 962, 965, 969, 975, 977, 989, 995, 996, 997, 1007, 1008, 1013, 1020, 1021, 1022, 1023, 1024, 1035, 1036, 1037, 1064, 1066, 1072, 1073, 1084, 1086, 1091, 1095, 1096, 1097, 1098, 1099, 1108, 1124, 1129, 1132, 1136, 1139, 1142, 1147, 1148, 1149, 1150, 1151, 1156, 1159, 1165, 1166, 1169, 1170, 1171, 1172, 1173, 1177, 1185, 1186, 1187, 1191, 1197, 1201, 1209, 1213, 1216, 1235, 1247, 1257, 1263, 1269, 1270, 1272, 1273, 1275, 1278, 1282, 1283, 1284, 1288, 1305, 1308, 1309, 1318, 1322, 1324, 1325, 1328, 1329, 1333, 1335, 1336, 1338, 1341, 1342, 1344, 1350, 1366, 1373, 1378, 1412, 1418, 1421, 1422, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1440, 1441, 1442, 1445, 1454, 1455, 1457, 1458, 1461, 1462, 1469, 1476, 1477, 1486, 1488, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1498, 1512, 1516, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1531, 1532, 1541, 1555, 1558, 1560, 1562, 1566, 1570, 1575, 1576, 1590, 1614, 1631, 1632, 1634, 1644, 1651, 1670, 1671, 1684, 1690, 1691, 1699, 1706, 1715, 1716, 1723, 1724, 1730, 1732, 1733, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1754, 1766, 1768, 1771, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1816, 1828, 1842, 1846, 1848, 1849, 1853, 1855, 1862, 1866, 1870, 1874, 1875, 1876, 1882, 1894, 1899, 1903, 1904, 1906, 1907, 1915, 1918, 1923, 1928, 1934, 1943, 1948, 1949, 1952, 1953, 1954, 1955, 1959, 1960, 1963, 1964, 1976, 1982, 1990, 1991, 1992, 2008, 2011, 2013, 2015, 2016, 2018, 2022, 2023, 2024, 2026, 2027, 2028, 2029, 2032, 2034, 2035, 2040, 2041, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2074, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2090, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116], "unlist": 0, "downstream": [0, 3, 977, 2029, 2102, 2103, 2111], "stabl": [0, 1, 2, 11, 12, 28, 35, 47, 48, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 879, 989, 1108, 1308, 1309, 1313, 1318, 1326, 1330, 1333, 1336, 1338, 1345, 1439, 1899, 2012, 2014, 2020, 2040, 2043, 2059, 2075], "believ": [0, 8, 2102], "unstabl": [0, 35, 1308, 1309, 1336, 1353, 1362, 1651, 1927, 2081], "__matmul__": 0, "addbmm": [0, 102, 943, 2014, 2058, 2066], "addmv": [0, 110, 2014, 2033, 2066], "addr": [0, 28, 47, 112, 2014, 2066, 2113], "baddbmm": [0, 154, 2014, 2058, 2066], "bmm": [0, 1964, 2014, 2033, 2035, 2058, 2059, 2066, 2080, 2106], "chain_matmul": [0, 2014, 2066], "multi_dot": [0, 965], "conv1d": [0, 710, 714, 717, 727, 743, 1456, 1464, 1502, 1964, 2014, 2066, 2070, 2072], "conv2d": [0, 52, 711, 715, 718, 720, 728, 744, 794, 816, 1272, 1282, 1284, 1288, 1289, 1457, 1465, 1503, 1526, 1527, 1533, 1555, 1578, 1723, 1751, 1964, 2013, 2014, 2026, 2063, 2065, 2066, 2070, 2072, 2073, 2085, 2091], "conv3d": [0, 712, 716, 719, 721, 729, 745, 1458, 1466, 1504, 1724, 1964, 2014, 2066, 2070, 2072], "conv_transpose1d": [0, 2014, 2066, 2072], "conv_transpose2d": [0, 2014, 2066, 2072], "conv_transpose3d": [0, 2014, 2066, 2072], "grucel": [0, 2058, 2070, 2072, 2073], "lstmcell": [0, 2058, 2070, 2072, 2073], "matmul": [0, 2, 11, 955, 975, 1176, 1377, 1578, 1816, 1826, 1870, 1927, 2014, 2033, 2035, 2045, 2058, 2066, 2072, 2080, 2096], "mv": [0, 11, 1284, 1964, 2014, 2033, 2066, 2080], "prelu": [0, 1527, 2014, 2066, 2072], "rnncell": [0, 2070, 2072, 2073], "__pow__": 0, "__rdiv__": 0, "__rpow__": 0, "__rtruediv__": 0, "aco": [0, 96, 629, 630, 869, 2014, 2033, 2066, 2087, 2106], "asin": [0, 143, 631, 632, 871, 2014, 2033, 2066, 2080, 2106], "cosh": [0, 205, 639, 640, 686, 2014, 2033, 2066, 2106], "cosine_embedding_loss": [0, 2014, 2066], "cdist": [0, 2045, 2066], "cosine_similar": [0, 1576, 2014, 2066], "cross_entropi": [0, 34, 2014], "cumprod": [0, 215, 2014, 2033, 2066], "cumsum": [0, 217, 1091, 1964, 2014, 2033, 2066, 2106], "dist": [0, 24, 28, 29, 32, 33, 35, 48, 55, 966, 967, 968, 1302, 1308, 1309, 1310, 1312, 1313, 1314, 1318, 1319, 1330, 1331, 1334, 1336, 1337, 1360, 1363, 1535, 1566, 1716, 1730, 1927, 2014, 2047, 2066, 2075, 2077], "erfinv": [0, 253, 2014, 2033, 2066, 2080, 2081], "exp": [0, 1, 35, 255, 645, 646, 772, 913, 914, 916, 1151, 1332, 1352, 1360, 1439, 1444, 1461, 1467, 1491, 1515, 1516, 1530, 1540, 1554, 1557, 1559, 1560, 1562, 1563, 1567, 1606, 1652, 1676, 1685, 1686, 1690, 1692, 1695, 1730, 1823, 1883, 1884, 1906, 1923, 2014, 2033, 2042, 2065, 2066, 2081, 2106], "expm1": [0, 259, 647, 648, 2014, 2033, 2066, 2080, 2081, 2106], "group_norm": [0, 2014, 2066, 2072], "hinge_embedding_loss": [0, 2014, 2066], "kl_div": [0, 2014, 2066], "l1_loss": [0, 2014, 2066], "layer_norm": [0, 1498, 2014, 2066, 2072], "log": [0, 2, 14, 23, 24, 25, 35, 37, 40, 41, 44, 45, 81, 83, 378, 379, 655, 662, 682, 1118, 1348, 1350, 1352, 1353, 1360, 1389, 1430, 1438, 1439, 1461, 1479, 1491, 1515, 1516, 1530, 1533, 1540, 1559, 1560, 1563, 1629, 1634, 1644, 1651, 1652, 1668, 1676, 1690, 1692, 1716, 2012, 2014, 2022, 2029, 2033, 2042, 2045, 2047, 2048, 2063, 2065, 2066, 2069, 2081, 2085, 2089, 2090, 2091, 2104, 2105, 2106, 2108, 2109, 2111], "log_softmax": [0, 1445, 1491, 1533, 1616, 1668, 1690, 2014, 2034, 2063, 2066, 2081], "log10": [0, 373, 656, 657, 2014, 2033, 2066, 2106], "log1p": [0, 375, 658, 659, 2014, 2033, 2066, 2080, 2081, 2106], "log2": [0, 377, 660, 661, 2014, 2033, 2066, 2081, 2106], "margin_ranking_loss": [0, 2014, 2066], "mse_loss": [0, 59, 1165, 2014, 2066], "multilabel_margin_loss": [0, 2014, 2066], "multi_margin_loss": [0, 2014, 2066], "nll_loss": [0, 2014, 2066], "norm": [0, 33, 34, 35, 55, 57, 64, 704, 705, 706, 707, 708, 709, 963, 1102, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1304, 1308, 1317, 1318, 1325, 1330, 1342, 1468, 1469, 1517, 1535, 1541, 1571, 1572, 1573, 1574, 1575, 1623, 1624, 1669, 1673, 1715, 1720, 1721, 1731, 1732, 1741, 1742, 1747, 1750, 1751, 1765, 1768, 1785, 1847, 2014, 2018, 2041, 2042, 2052, 2055, 2058, 2066], "normal": [0, 1, 19, 24, 28, 32, 47, 52, 55, 64, 84, 338, 353, 379, 456, 558, 997, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1187, 1234, 1269, 1272, 1277, 1308, 1350, 1434, 1440, 1441, 1442, 1464, 1465, 1466, 1470, 1480, 1488, 1489, 1490, 1498, 1508, 1509, 1510, 1514, 1541, 1554, 1566, 1570, 1571, 1572, 1573, 1574, 1602, 1632, 1633, 1642, 1646, 1650, 1677, 1681, 1715, 1721, 1722, 1731, 1732, 1747, 1755, 1756, 1765, 1768, 1794, 1810, 1830, 1839, 1840, 1847, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1923, 1964, 2011, 2012, 2013, 2014, 2018, 2033, 2035, 2040, 2042, 2045, 2049, 2065, 2066, 2068, 2072, 2075, 2081, 2085, 2087, 2089, 2095, 2106, 2116], "pdist": [0, 1535, 2014, 2066], "poisson_nll_loss": [0, 2014, 2066], "pow": [0, 1, 471, 498, 911, 912, 915, 938, 1151, 2014, 2016, 2033, 2041, 2042, 2066, 2080, 2086, 2106], "prod": [0, 44, 323, 519, 1125, 1127, 1130, 1131, 1133, 1134, 1137, 1138, 1140, 1141, 1143, 1145, 1338, 1339, 1468, 1472, 1578, 1962, 1964, 2014, 2033, 2066, 2080, 2102, 2106], "reciproc": [0, 487, 665, 666, 1857, 2014, 2033, 2066, 2106], "rsqrt": [0, 513, 2014, 2033, 2066, 2106], "sinh": [0, 538, 673, 674, 885, 2014, 2033, 2066, 2080, 2106], "smooth_l1_loss": [0, 2014, 2066], "soft_margin_loss": [0, 2014, 2066], "softmax": [0, 35, 798, 1430, 1461, 1491, 1516, 1561, 1615, 1634, 1644, 1651, 1684, 1691, 1903, 2014, 2033, 2034, 2035, 2045, 2066, 2072, 2080, 2081], "softmin": [0, 2014], "softplu": [0, 35, 1525, 1663, 2014, 2066], "sum": [0, 1, 3, 12, 23, 28, 29, 32, 35, 37, 55, 60, 61, 64, 66, 73, 80, 498, 519, 568, 748, 896, 911, 912, 913, 914, 915, 916, 917, 927, 928, 936, 997, 1022, 1023, 1024, 1090, 1091, 1108, 1169, 1170, 1171, 1176, 1249, 1304, 1325, 1329, 1342, 1350, 1351, 1352, 1360, 1412, 1420, 1438, 1439, 1445, 1458, 1459, 1461, 1462, 1469, 1472, 1479, 1485, 1486, 1491, 1492, 1493, 1494, 1495, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1560, 1562, 1575, 1576, 1578, 1604, 1605, 1615, 1616, 1624, 1629, 1634, 1644, 1653, 1654, 1655, 1668, 1676, 1690, 1691, 1716, 1747, 1752, 1753, 1771, 1782, 1802, 1848, 1870, 1885, 1904, 1906, 1947, 1949, 1964, 2012, 2014, 2016, 2020, 2029, 2033, 2041, 2042, 2045, 2046, 2047, 2048, 2049, 2055, 2058, 2065, 2066, 2075, 2076, 2080, 2081, 2086, 2096, 2097, 2099, 2100, 2102, 2106, 2109, 2111, 2113], "renorm": [0, 494, 1468, 1469, 1623, 1624, 2014, 2066], "tan": [0, 577, 677, 678, 886, 2014, 2033, 2066, 2080, 2085, 2106], "triplet_margin_loss": [0, 2014, 2066], "take": [0, 1, 2, 3, 4, 5, 7, 9, 14, 19, 23, 24, 28, 30, 32, 33, 35, 37, 46, 47, 48, 50, 52, 55, 57, 58, 60, 61, 62, 63, 64, 66, 74, 75, 89, 762, 795, 800, 816, 822, 828, 865, 911, 912, 913, 914, 915, 916, 922, 923, 931, 1007, 1044, 1096, 1098, 1099, 1124, 1126, 1129, 1132, 1136, 1137, 1139, 1142, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1197, 1305, 1327, 1330, 1430, 1439, 1469, 1473, 1474, 1477, 1496, 1520, 1521, 1522, 1523, 1524, 1526, 1534, 1542, 1570, 1580, 1581, 1627, 1628, 1645, 1670, 1684, 1716, 1769, 1794, 1798, 1823, 1928, 1951, 1965, 1976, 2011, 2013, 2014, 2016, 2023, 2024, 2026, 2032, 2033, 2034, 2035, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2053, 2055, 2059, 2060, 2061, 2065, 2066, 2068, 2069, 2070, 2075, 2076, 2077, 2080, 2083, 2084, 2085, 2088, 2090, 2093, 2096, 2097, 2098, 2100, 2101, 2102, 2103, 2105, 2109, 2110], "all": [0, 1, 2, 3, 5, 6, 7, 9, 11, 12, 14, 15, 17, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 35, 37, 40, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 58, 60, 61, 63, 64, 90, 152, 262, 315, 317, 323, 337, 488, 490, 515, 517, 519, 548, 562, 609, 612, 619, 682, 688, 689, 694, 696, 700, 701, 737, 762, 793, 795, 796, 816, 842, 843, 857, 861, 864, 877, 880, 892, 893, 894, 896, 898, 900, 903, 904, 905, 906, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 931, 935, 936, 937, 941, 942, 945, 954, 956, 961, 962, 966, 969, 970, 974, 975, 977, 985, 989, 996, 1006, 1010, 1011, 1013, 1014, 1022, 1023, 1032, 1040, 1044, 1050, 1054, 1055, 1063, 1064, 1072, 1073, 1075, 1076, 1080, 1082, 1084, 1125, 1126, 1127, 1128, 1131, 1134, 1135, 1138, 1141, 1143, 1145, 1160, 1162, 1165, 1166, 1172, 1173, 1175, 1176, 1187, 1188, 1194, 1213, 1235, 1238, 1268, 1269, 1272, 1280, 1283, 1284, 1287, 1288, 1308, 1335, 1344, 1360, 1362, 1365, 1370, 1372, 1375, 1384, 1385, 1394, 1399, 1410, 1417, 1418, 1419, 1420, 1430, 1437, 1439, 1443, 1445, 1447, 1448, 1449, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1468, 1469, 1472, 1477, 1478, 1479, 1480, 1485, 1491, 1492, 1496, 1497, 1513, 1517, 1521, 1522, 1523, 1524, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1536, 1537, 1542, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1566, 1570, 1572, 1574, 1575, 1578, 1583, 1584, 1590, 1597, 1603, 1605, 1623, 1624, 1629, 1634, 1653, 1654, 1655, 1684, 1690, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1721, 1730, 1736, 1743, 1747, 1757, 1761, 1769, 1770, 1771, 1772, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1802, 1805, 1806, 1809, 1810, 1811, 1812, 1824, 1863, 1870, 1902, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1920, 1921, 1922, 1923, 1926, 1943, 1945, 1952, 1953, 1954, 1955, 1958, 1960, 1961, 1962, 1967, 1971, 1972, 1974, 1978, 1981, 1982, 1983, 1989, 1994, 1999, 2000, 2001, 2002, 2005, 2008, 2011, 2013, 2014, 2015, 2016, 2020, 2022, 2023, 2024, 2026, 2028, 2029, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2044, 2045, 2047, 2049, 2050, 2051, 2052, 2053, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2066, 2068, 2069, 2070, 2074, 2075, 2076, 2077, 2079, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2091, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2115], "addcdiv": [0, 104, 2014, 2066], "addcmul": [0, 106, 2014, 2066], "atan2": [0, 148, 874, 2014, 2033, 2066, 2106], "bilinear": [0, 781, 787, 788, 1108, 1579, 1580, 1632, 1643, 1703, 1704, 1964, 2014, 2066], "cross": [0, 7, 8, 28, 30, 33, 34, 35, 37, 1438, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1604, 1605, 1615, 2014, 2045, 2061, 2066], "dot": [0, 2, 14, 60, 61, 619, 912, 914, 915, 916, 1087, 1088, 1089, 1090, 1167, 1177, 1249, 1293, 1340, 1341, 1367, 1438, 1439, 1461, 1472, 1485, 1492, 1517, 1532, 1533, 1576, 1585, 1590, 1684, 1923, 1973, 1974, 1975, 1976, 2014, 2033, 2052, 2066, 2068], "grid_sampl": [0, 1597, 1964, 2014, 2066], "index_put": [0, 1964, 2014, 2066, 2106], "scatter_add": [0, 2014, 2066, 2106], "tensordot": [0, 1249, 1338, 1339, 2018, 2045, 2066], "binari": [0, 2, 14, 15, 24, 35, 37, 40, 45, 47, 64, 156, 737, 945, 1366, 1438, 1439, 1532, 1604, 1605, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1778, 2012, 2017, 2033, 2034, 2054, 2063, 2065, 2068, 2085, 2093], "add": [0, 1, 3, 7, 12, 15, 17, 23, 28, 30, 32, 33, 38, 44, 47, 52, 53, 55, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 78, 84, 88, 100, 292, 315, 515, 517, 688, 689, 690, 693, 749, 750, 759, 792, 795, 840, 862, 864, 865, 931, 938, 977, 1023, 1053, 1064, 1108, 1166, 1180, 1186, 1187, 1193, 1272, 1284, 1430, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1502, 1503, 1504, 1505, 1506, 1507, 1526, 1528, 1532, 1533, 1537, 1540, 1634, 1676, 1706, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1870, 1919, 2011, 2013, 2014, 2015, 2016, 2020, 2026, 2029, 2033, 2035, 2041, 2043, 2044, 2046, 2047, 2048, 2049, 2053, 2054, 2055, 2057, 2062, 2063, 2065, 2066, 2068, 2069, 2070, 2071, 2072, 2073, 2075, 2076, 2077, 2080, 2083, 2084, 2085, 2090, 2091, 2096, 2097, 2098, 2099, 2100, 2102, 2103, 2105, 2106, 2109, 2110, 2111, 2112, 2113], "nativ": [0, 11, 24, 55, 64, 1033, 1166, 1283, 1721, 1722, 1870, 2013, 2016, 2032, 2045, 2060, 2062, 2067, 2068, 2075, 2102], "without": [0, 1, 3, 5, 7, 8, 9, 14, 18, 23, 24, 28, 29, 30, 32, 33, 34, 35, 40, 47, 52, 55, 56, 61, 63, 64, 65, 66, 67, 256, 488, 490, 619, 972, 975, 1011, 1013, 1048, 1129, 1130, 1131, 1139, 1140, 1141, 1187, 1196, 1197, 1212, 1269, 1270, 1272, 1273, 1319, 1320, 1335, 1342, 1412, 1440, 1441, 1442, 1465, 1469, 1488, 1489, 1490, 1523, 1526, 1534, 1566, 1574, 1624, 1706, 1716, 1748, 1764, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 1833, 1849, 1866, 1958, 2011, 2015, 2016, 2020, 2023, 2024, 2026, 2029, 2032, 2034, 2041, 2042, 2043, 2045, 2047, 2048, 2052, 2053, 2055, 2057, 2059, 2060, 2063, 2065, 2068, 2070, 2074, 2075, 2080, 2082, 2085, 2091, 2096, 2098, 2099, 2100, 2101, 2102, 2111, 2116], "intervent": [0, 8, 33, 2075], "mixtur": [0, 35, 1461, 1615], "bceloss": [0, 1439, 1604], "aren": [0, 8, 52, 60, 64, 83, 1166, 1187, 2033, 2042, 2051, 2076, 2112], "mean": [0, 2, 3, 5, 7, 8, 12, 15, 18, 19, 23, 24, 28, 30, 32, 34, 35, 47, 48, 50, 52, 53, 55, 56, 58, 61, 63, 64, 256, 260, 323, 337, 379, 456, 490, 497, 519, 760, 762, 783, 796, 798, 864, 865, 931, 943, 956, 989, 997, 1165, 1167, 1196, 1197, 1283, 1373, 1392, 1412, 1417, 1421, 1422, 1428, 1429, 1430, 1432, 1433, 1434, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1459, 1461, 1463, 1467, 1469, 1470, 1471, 1475, 1476, 1477, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1496, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1512, 1513, 1515, 1516, 1517, 1518, 1525, 1529, 1530, 1531, 1533, 1534, 1540, 1541, 1542, 1545, 1546, 1547, 1554, 1556, 1557, 1558, 1559, 1560, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1575, 1576, 1577, 1603, 1604, 1605, 1613, 1615, 1616, 1624, 1625, 1629, 1640, 1641, 1644, 1645, 1649, 1656, 1664, 1665, 1666, 1667, 1668, 1676, 1681, 1688, 1689, 1700, 1701, 1715, 1716, 1726, 1728, 1772, 1797, 1830, 1839, 1840, 1870, 1883, 1904, 1921, 1922, 1928, 1964, 1971, 1972, 2011, 2013, 2014, 2015, 2029, 2030, 2032, 2033, 2034, 2035, 2040, 2041, 2042, 2045, 2047, 2048, 2050, 2051, 2052, 2055, 2061, 2065, 2066, 2067, 2068, 2069, 2070, 2071, 2072, 2075, 2076, 2077, 2080, 2098, 2099, 2101, 2102, 2105, 2106, 2107, 2108], "doesn": [0, 1, 2, 5, 7, 8, 11, 12, 19, 23, 28, 30, 37, 45, 53, 58, 60, 63, 64, 86, 825, 828, 883, 903, 904, 908, 917, 929, 989, 1011, 1032, 1166, 1177, 1186, 1187, 1197, 1200, 1211, 1280, 1286, 1312, 1336, 1344, 1353, 1409, 1453, 1454, 1455, 1462, 1491, 1560, 1607, 1608, 1609, 1644, 1690, 1716, 1717, 1747, 1786, 1866, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1900, 1904, 1976, 1989, 2013, 2016, 2017, 2033, 2035, 2042, 2043, 2045, 2046, 2048, 2049, 2054, 2057, 2061, 2067, 2076, 2080, 2101, 2106, 2109, 2111], "help": [0, 1, 4, 7, 8, 11, 14, 20, 23, 24, 28, 33, 44, 47, 55, 60, 64, 922, 923, 935, 1032, 1064, 1166, 1167, 1177, 1186, 1187, 1272, 1273, 1292, 1374, 1388, 1389, 1464, 1465, 1466, 1470, 1526, 1730, 1781, 1876, 1976, 1989, 2011, 2016, 2023, 2024, 2033, 2034, 2041, 2042, 2043, 2045, 2047, 2049, 2053, 2055, 2058, 2063, 2065, 2068, 2075, 2076, 2080, 2089, 2099, 2101, 2104, 2105, 2109, 2111, 2115], "revers": [0, 35, 61, 64, 515, 736, 795, 911, 913, 1126, 1128, 1148, 1169, 1171, 1176, 1340, 1496, 1539, 1675, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1951, 1970, 2014, 2015, 2016, 2042, 2047, 2049, 2066, 2086], "therefor": [0, 3, 5, 23, 24, 28, 35, 36, 45, 55, 58, 66, 71, 72, 88, 450, 546, 917, 944, 953, 977, 1053, 1142, 1159, 1162, 1165, 1209, 1230, 1231, 1285, 1288, 1318, 1331, 1468, 1469, 1578, 1623, 1624, 1632, 1766, 1802, 2016, 2025, 2041, 2042, 2045, 2048, 2050, 2051, 2054, 2065, 2077, 2080, 2100, 2103], "rais": [0, 1, 5, 7, 14, 28, 30, 32, 33, 35, 37, 40, 47, 52, 55, 60, 63, 64, 66, 90, 315, 317, 323, 699, 897, 903, 906, 908, 911, 912, 913, 914, 915, 916, 917, 921, 922, 923, 930, 940, 942, 956, 965, 975, 1012, 1078, 1151, 1156, 1203, 1270, 1272, 1277, 1280, 1288, 1290, 1302, 1304, 1312, 1313, 1316, 1320, 1321, 1326, 1333, 1334, 1338, 1339, 1344, 1365, 1392, 1526, 1684, 1709, 1736, 1737, 1742, 1744, 1747, 1757, 1777, 1778, 1864, 1867, 1912, 1964, 2011, 2017, 2020, 2023, 2032, 2042, 2045, 2048, 2050, 2058, 2063, 2065, 2068, 2074, 2075, 2079, 2087, 2098, 2101, 2102, 2110, 2111, 2112], "mani": [0, 3, 7, 11, 14, 19, 23, 24, 28, 35, 47, 55, 59, 60, 64, 90, 404, 682, 892, 894, 908, 909, 911, 913, 1108, 1197, 1285, 1445, 1797, 1895, 1945, 1952, 1953, 1954, 1955, 2013, 2015, 2024, 2033, 2040, 2042, 2043, 2044, 2045, 2048, 2049, 2054, 2055, 2058, 2062, 2065, 2067, 2070, 2077, 2080, 2083, 2085, 2086, 2089, 2097, 2098, 2099, 2101, 2102, 2104, 2105, 2107, 2110, 2111, 2113, 2114], "sigmoid": [0, 35, 64, 529, 669, 670, 762, 797, 1438, 1439, 1468, 1477, 1478, 1496, 1497, 1556, 1604, 1631, 1687, 2014, 2033, 2034, 2040, 2063, 2066, 2072, 2081, 2093, 2106], "right": [0, 2, 7, 9, 12, 28, 30, 35, 52, 55, 64, 822, 868, 944, 951, 953, 954, 959, 964, 968, 989, 1102, 1108, 1149, 1152, 1153, 1157, 1226, 1230, 1231, 1235, 1239, 1269, 1292, 1317, 1322, 1333, 1335, 1336, 1344, 1350, 1351, 1412, 1430, 1435, 1436, 1437, 1438, 1439, 1453, 1454, 1455, 1472, 1479, 1492, 1493, 1494, 1495, 1514, 1515, 1516, 1517, 1519, 1520, 1521, 1529, 1530, 1531, 1535, 1536, 1543, 1575, 1578, 1579, 1580, 1581, 1586, 1587, 1588, 1632, 1652, 1671, 1722, 1736, 1786, 1800, 1801, 1842, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1923, 1944, 1949, 1951, 1967, 1970, 2011, 2014, 2016, 2032, 2033, 2034, 2042, 2048, 2052, 2063, 2065, 2067, 2073, 2077, 2081, 2099, 2100, 2101, 2109], "entropi": [0, 34, 35, 1438, 1461, 1530, 1604, 1605, 1615, 2081], "combin": [0, 3, 23, 24, 28, 30, 38, 47, 61, 63, 619, 691, 714, 715, 716, 717, 718, 719, 720, 721, 795, 857, 893, 908, 1064, 1137, 1138, 1143, 1145, 1200, 1235, 1367, 1377, 1439, 1472, 1486, 1532, 1574, 1578, 1626, 1649, 1743, 2013, 2014, 2015, 2041, 2045, 2049, 2063, 2065, 2066, 2067, 2070, 2073, 2075, 2097], "two": [0, 1, 3, 4, 5, 6, 8, 11, 12, 14, 15, 18, 19, 23, 24, 28, 29, 30, 32, 33, 35, 44, 45, 47, 48, 52, 53, 55, 59, 60, 64, 86, 87, 585, 586, 587, 589, 590, 609, 619, 682, 694, 696, 762, 795, 890, 893, 908, 909, 913, 922, 963, 965, 1017, 1056, 1058, 1087, 1088, 1096, 1105, 1108, 1114, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1154, 1155, 1165, 1175, 1183, 1186, 1195, 1198, 1234, 1236, 1261, 1270, 1272, 1283, 1286, 1293, 1296, 1298, 1303, 1305, 1308, 1309, 1314, 1315, 1320, 1325, 1328, 1329, 1331, 1332, 1334, 1336, 1341, 1344, 1367, 1370, 1373, 1375, 1378, 1419, 1436, 1438, 1439, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1472, 1477, 1485, 1486, 1492, 1494, 1496, 1517, 1518, 1520, 1526, 1529, 1530, 1531, 1533, 1534, 1540, 1542, 1558, 1559, 1574, 1575, 1576, 1578, 1586, 1587, 1604, 1605, 1615, 1634, 1644, 1651, 1668, 1676, 1684, 1730, 1732, 1736, 1758, 1768, 1770, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1801, 1802, 1804, 1808, 1827, 1842, 1855, 1870, 1907, 1912, 1927, 1944, 1948, 1949, 1951, 1960, 1969, 1973, 1977, 2011, 2013, 2015, 2016, 2020, 2022, 2023, 2024, 2033, 2034, 2035, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2055, 2060, 2061, 2062, 2065, 2067, 2068, 2070, 2071, 2075, 2076, 2077, 2080, 2081, 2083, 2085, 2086, 2087, 2090, 2091, 2096, 2097, 2098, 2099, 2101, 2106, 2109, 2111], "bcewithlogitsloss": [0, 1605], "bcewithlogit": 0, "safe": [0, 28, 30, 47, 52, 63, 64, 87, 89, 90, 488, 1054, 1055, 1075, 1076, 1200, 1716, 1780, 1781, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1999, 2000, 2001, 2002, 2013, 2016, 2041, 2042, 2045, 2049, 2051, 2054, 2060, 2075, 2082, 2092, 2096, 2102, 2103, 2111], "_convolut": [0, 2066], "avg_pool3d": [0, 2014, 2066, 2072, 2106], "grid_sampler_2d": [0, 2014, 2066, 2106], "_grid_sampler_2d_cpu_fallback": [0, 2066], "grid_sampler_3d": [0, 2014, 2066], "polar": [0, 35, 1332, 2014, 2066], "quantil": [0, 1373, 1419, 2014, 2066, 2081], "nanquantil": [0, 2014, 2066], "stft": [0, 944, 953, 1230, 1231, 1269, 1292, 2014, 2066], "view_as_complex": [0, 11, 2014, 2066], "choleski": [0, 2, 35, 967, 968, 1303, 1309, 1345, 2014, 2066], "cholesky_invers": [0, 2, 2014, 2066], "cholesky_solv": [0, 2, 2014, 2066], "invers": [0, 35, 685, 686, 885, 888, 962, 967, 968, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1269, 1313, 1314, 1326, 1330, 1333, 1338, 1339, 1456, 1457, 1458, 1468, 1469, 1472, 1522, 1523, 1524, 1578, 1623, 1624, 1660, 1661, 1662, 1733, 1736, 1760, 1802, 1808, 1959, 2012, 2014, 2016, 2058, 2066, 2081], "lu_solv": [0, 2, 1320, 2014, 2066], "orgqr": [0, 2014, 2066], "ormqr": [0, 1216, 1312, 2014, 2066], "pinvers": [0, 1318, 2014, 2066], "max_pool3d": [0, 2014, 2066, 2072], "max_unpool2d": [0, 1627, 1658, 2014, 2066], "max_unpool3d": [0, 1628, 1659, 2014, 2066], "adaptive_avg_pool3d": [0, 2014, 2048, 2066, 2072], "reflection_pad1d": [0, 2014, 2066, 2106], "reflection_pad2d": [0, 2014, 2066, 2106], "replication_pad1d": [0, 2014, 2066], "replication_pad2d": [0, 2014, 2066, 2106], "replication_pad3d": [0, 2014, 2066, 2106], "ctc_loss": [0, 1445, 2014, 2066], "fft_fft": [0, 2014, 2066], "fft_ifft": [0, 2014, 2066], "fft_fft2": [0, 2014, 2066], "fft_ifft2": [0, 2014, 2066], "fft_fftn": [0, 2014, 2066], "fft_ifftn": [0, 2014, 2066], "fft_rfft": [0, 2014, 2066], "fft_irfft": [0, 2014, 2066], "fft_rfft2": [0, 2014, 2066], "fft_irfft2": [0, 2014, 2066], "fft_rfftn": [0, 2014, 2066], "fft_irfftn": [0, 2014, 2066], "fft_hfft": [0, 2014, 2066], "fft_ihfft": [0, 2014, 2066], "linalg_matrix_norm": [0, 2014, 2066], "linalg_cond": [0, 2014, 2066], "linalg_matrix_rank": [0, 2014, 2066], "linalg_solv": [0, 2014, 2066], "linalg_choleski": [0, 2014, 2066], "linalg_svdv": [0, 2014, 2066], "linalg_eigv": [0, 2014, 2066], "linalg_eigvalsh": [0, 2014, 2066], "linalg_inv": [0, 2014, 2066], "linalg_householder_product": [0, 2014, 2066], "linalg_tensorinv": [0, 2014, 2066], "linalg_tensorsolv": [0, 2014, 2066], "fake_quantize_per_tensor_affin": [0, 2014, 2066], "eig": [0, 1309, 1310, 1336, 2058], "geqrf": [0, 1312, 1814, 2014, 2066], "lstsq": [0, 1216, 1304, 1330], "_lu_with_info": [0, 2066], "qr": [0, 2, 1216, 1308, 1309, 1312, 1318, 1336, 1730, 1814, 2014, 2066], "solv": [0, 7, 11, 52, 1180, 1216, 1226, 1304, 1313, 1315, 1316, 1319, 1320, 1322, 1326, 1334, 1335, 1339, 1345, 1363, 1951, 2042, 2052, 2058, 2061, 2092, 2099, 2102], "svd": [0, 2, 11, 1308, 1309, 1318, 1330, 1337, 1353, 1736, 1816, 1928, 2014, 2058, 2066, 2080, 2106], "symeig": 0, "triangular_solv": [0, 2014, 2066], "fractional_max_pool2d": [0, 2014, 2066], "fractional_max_pool3d": [0, 2014, 2066], "adaptive_max_pool3d": [0, 2014, 2066], "multilabel_margin_loss_forward": [0, 2066], "linalg_qr": [0, 2014, 2066], "linalg_cholesky_ex": [0, 2014, 2066], "linalg_svd": [0, 2014, 2066], "linalg_eig": [0, 2014, 2066], "linalg_eigh": [0, 2014, 2066], "linalg_lstsq": [0, 2014, 2066], "linalg_inv_ex": [0, 2014, 2066], "cat": [0, 28, 35, 64, 546, 749, 750, 759, 795, 798, 987, 988, 1374, 1546, 1908, 1920, 2013, 2014, 2033, 2065, 2066, 2068, 2070, 2072, 2080, 2090, 2098, 2106], "stack": [0, 8, 18, 23, 24, 25, 28, 35, 40, 47, 52, 53, 59, 60, 64, 682, 762, 935, 962, 973, 977, 1069, 1070, 1107, 1175, 1177, 1187, 1238, 1374, 1445, 1477, 1496, 1497, 1542, 1571, 1573, 1761, 1966, 1976, 1978, 2012, 2014, 2020, 2035, 2045, 2047, 2050, 2066, 2068, 2069, 2072, 2080, 2099, 2100, 2101, 2102, 2109, 2111, 2113, 2114], "index_copi": [0, 1964, 2014, 2066], "implement": [1, 2, 5, 8, 11, 12, 16, 17, 19, 23, 24, 28, 29, 30, 32, 34, 35, 39, 44, 45, 48, 52, 55, 60, 64, 65, 82, 86, 152, 417, 515, 517, 519, 689, 740, 741, 742, 743, 744, 745, 762, 781, 783, 787, 795, 802, 821, 822, 823, 826, 827, 828, 881, 883, 896, 912, 922, 923, 989, 995, 1108, 1151, 1156, 1162, 1169, 1170, 1172, 1173, 1272, 1278, 1280, 1282, 1283, 1286, 1328, 1331, 1344, 1345, 1373, 1430, 1445, 1462, 1477, 1480, 1493, 1494, 1495, 1498, 1526, 1532, 1538, 1541, 1542, 1543, 1546, 1563, 1572, 1574, 1632, 1637, 1671, 1684, 1692, 1715, 1716, 1721, 1722, 1730, 1731, 1736, 1747, 1760, 1764, 1765, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1800, 1802, 1808, 1826, 1833, 1846, 1855, 1858, 1882, 1894, 1904, 1927, 1928, 1944, 1960, 1964, 1965, 1967, 2012, 2015, 2016, 2017, 2020, 2023, 2024, 2031, 2032, 2035, 2036, 2040, 2041, 2042, 2044, 2045, 2048, 2049, 2050, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2063, 2067, 2068, 2070, 2075, 2076, 2080, 2081, 2084, 2086, 2096, 2098, 2100, 2102, 2107, 2109, 2112], "arbitrari": [1, 3, 28, 32, 33, 52, 66, 69, 74, 256, 893, 908, 1091, 1187, 1336, 1344, 1462, 1468, 1492, 1517, 1578, 1604, 1605, 1623, 1644, 1671, 1757, 1927, 1949, 1962, 2016, 2022, 2024, 2042, 2049, 2054, 2055, 2067, 2070, 2080, 2089, 2099, 2102, 2107, 2111, 2112], "scalar": [1, 12, 28, 35, 53, 66, 75, 99, 152, 156, 262, 315, 448, 515, 591, 689, 690, 740, 741, 742, 743, 744, 745, 882, 883, 896, 911, 912, 915, 917, 948, 951, 959, 993, 997, 1103, 1108, 1123, 1156, 1163, 1226, 1233, 1249, 1263, 1297, 1298, 1367, 1374, 1419, 1430, 1438, 1439, 1445, 1459, 1461, 1462, 1479, 1485, 1486, 1491, 1492, 1498, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1576, 1634, 1677, 1770, 1775, 1776, 1777, 1810, 1823, 1825, 1827, 1845, 1846, 1862, 1910, 1911, 1912, 1913, 1914, 1942, 1960, 1961, 1979, 2009, 2010, 2014, 2015, 2016, 2029, 2035, 2040, 2042, 2048, 2052, 2065, 2075, 2080, 2083, 2085, 2087, 2099, 2101, 2102, 2106, 2110], "minim": [1, 7, 8, 821, 878, 1375, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1810, 2011, 2029, 2041, 2045, 2055, 2057, 2060, 2065, 2070, 2102, 2109, 2111], "exist": [1, 7, 8, 9, 11, 14, 23, 28, 29, 30, 35, 37, 38, 40, 45, 47, 48, 53, 55, 59, 62, 64, 66, 84, 85, 256, 501, 794, 857, 881, 911, 912, 913, 914, 915, 916, 917, 922, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1048, 1186, 1199, 1200, 1209, 1272, 1276, 1288, 1308, 1313, 1319, 1320, 1345, 1419, 1526, 1527, 1536, 1586, 1902, 1920, 1967, 2011, 2013, 2020, 2023, 2032, 2033, 2035, 2036, 2042, 2043, 2045, 2048, 2049, 2051, 2053, 2054, 2055, 2056, 2058, 2060, 2065, 2068, 2069, 2070, 2075, 2077, 2080, 2084, 2086, 2096, 2097, 2099, 2101, 2103, 2106, 2109, 2113], "code": [1, 3, 4, 8, 9, 11, 14, 15, 16, 19, 23, 24, 28, 30, 33, 35, 37, 48, 50, 52, 53, 60, 61, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 121, 682, 918, 938, 975, 977, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1050, 1051, 1064, 1166, 1187, 1272, 1276, 1277, 1283, 1284, 1285, 1288, 1289, 1290, 1303, 1314, 1316, 1344, 1543, 1555, 1587, 1588, 1589, 1716, 1873, 2011, 2012, 2015, 2016, 2022, 2024, 2034, 2042, 2043, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2060, 2061, 2063, 2065, 2067, 2069, 2074, 2075, 2076, 2077, 2080, 2083, 2084, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2107, 2109, 2110, 2111, 2112, 2113], "need": [1, 3, 5, 6, 7, 8, 9, 14, 15, 23, 24, 28, 29, 30, 32, 33, 34, 35, 39, 40, 45, 47, 48, 51, 52, 53, 55, 57, 58, 60, 64, 65, 66, 76, 77, 87, 88, 152, 256, 473, 490, 497, 501, 585, 682, 736, 737, 821, 822, 823, 826, 827, 828, 844, 861, 892, 896, 903, 904, 905, 908, 909, 917, 929, 956, 957, 965, 982, 983, 1044, 1053, 1060, 1096, 1098, 1108, 1129, 1131, 1151, 1160, 1165, 1187, 1209, 1272, 1366, 1412, 1461, 1468, 1522, 1523, 1524, 1526, 1532, 1570, 1572, 1574, 1607, 1608, 1609, 1702, 1716, 1723, 1724, 1736, 1738, 1742, 1744, 1772, 1797, 1843, 1848, 1862, 1920, 1949, 1965, 1967, 2012, 2013, 2015, 2016, 2020, 2023, 2024, 2026, 2028, 2032, 2034, 2035, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2054, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2070, 2073, 2075, 2076, 2077, 2080, 2082, 2083, 2086, 2087, 2091, 2092, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2107, 2109, 2111, 2112], "declar": [1, 9, 14, 23, 48, 1197, 2015, 2016, 2017, 2048, 2065, 2068], "requires_grad": [1, 5, 30, 34, 35, 61, 337, 447, 448, 449, 450, 451, 489, 490, 498, 868, 883, 903, 904, 906, 907, 908, 909, 918, 919, 922, 923, 925, 927, 928, 938, 944, 953, 1053, 1109, 1110, 1111, 1112, 1121, 1126, 1144, 1162, 1163, 1164, 1167, 1177, 1230, 1231, 1272, 1292, 1343, 1359, 1438, 1439, 1459, 1461, 1468, 1469, 1479, 1491, 1492, 1517, 1518, 1526, 1532, 1533, 1540, 1574, 1575, 1604, 1605, 1615, 1668, 1716, 1717, 1718, 1719, 1764, 1769, 1775, 1776, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1904, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 1964, 1976, 2009, 2010, 2014, 2018, 2020, 2033, 2034, 2035, 2045, 2048, 2049, 2055, 2060, 2065, 2066, 2075, 2076, 2086, 2087, 2089, 2099, 2100, 2101, 2102, 2106, 2109], "keyword": [1, 5, 23, 28, 29, 32, 33, 34, 52, 53, 60, 64, 66, 72, 86, 90, 315, 323, 447, 448, 449, 450, 451, 515, 583, 682, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 697, 698, 699, 700, 701, 864, 868, 883, 884, 885, 886, 887, 888, 943, 944, 945, 947, 948, 949, 950, 951, 952, 953, 955, 959, 962, 964, 966, 967, 968, 970, 972, 973, 986, 991, 992, 994, 995, 997, 1007, 1020, 1022, 1024, 1050, 1051, 1087, 1088, 1089, 1090, 1091, 1092, 1095, 1100, 1103, 1105, 1107, 1109, 1110, 1111, 1113, 1118, 1121, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1151, 1152, 1153, 1154, 1155, 1156, 1158, 1160, 1162, 1163, 1164, 1165, 1185, 1213, 1214, 1215, 1216, 1226, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1237, 1238, 1239, 1247, 1249, 1266, 1267, 1272, 1275, 1288, 1289, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1354, 1355, 1356, 1357, 1359, 1360, 1361, 1363, 1364, 1366, 1367, 1370, 1371, 1372, 1373, 1375, 1376, 1377, 1378, 1395, 1411, 1412, 1414, 1416, 1417, 1418, 1419, 1420, 1422, 1423, 1424, 1426, 1462, 1484, 1487, 1526, 1684, 1709, 1710, 1716, 1736, 1738, 1743, 1747, 1766, 1770, 1772, 1775, 1776, 1814, 1815, 1819, 1820, 1823, 1824, 1826, 1827, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1845, 1846, 1847, 1848, 1855, 1857, 1862, 1877, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1894, 1899, 1902, 1905, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1916, 1917, 1919, 1920, 1921, 1922, 1924, 1926, 1927, 1939, 1940, 1941, 1942, 1946, 1949, 1951, 1952, 1953, 1954, 1955, 1957, 1964, 1971, 1972, 1973, 1978, 1979, 2009, 2010, 2011, 2016, 2017, 2020, 2034, 2035, 2048, 2058, 2063, 2065, 2067, 2075, 2080, 2081, 2085, 2086, 2112], "support": [1, 2, 3, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 23, 24, 25, 28, 30, 32, 33, 34, 35, 36, 37, 39, 40, 47, 48, 52, 53, 55, 58, 59, 60, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 84, 85, 88, 89, 260, 313, 323, 460, 585, 619, 687, 688, 689, 691, 695, 697, 698, 701, 731, 740, 741, 742, 774, 775, 776, 781, 787, 788, 789, 794, 795, 796, 797, 803, 819, 835, 862, 864, 865, 895, 898, 903, 906, 908, 909, 922, 923, 943, 944, 948, 951, 953, 955, 967, 968, 975, 989, 992, 1007, 1050, 1051, 1053, 1071, 1103, 1105, 1108, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1153, 1154, 1155, 1156, 1158, 1159, 1161, 1172, 1176, 1230, 1231, 1243, 1269, 1276, 1284, 1288, 1292, 1293, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1345, 1363, 1367, 1371, 1376, 1377, 1409, 1411, 1453, 1454, 1455, 1456, 1457, 1458, 1465, 1468, 1469, 1472, 1492, 1513, 1531, 1532, 1566, 1574, 1576, 1578, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1624, 1626, 1632, 1643, 1649, 1684, 1702, 1703, 1705, 1716, 1723, 1724, 1771, 1776, 1782, 1783, 1784, 1786, 1796, 1814, 1826, 1845, 1846, 1867, 1870, 1871, 1900, 1901, 1904, 1907, 1924, 1927, 1951, 1953, 1955, 1965, 1967, 1973, 1974, 1975, 2010, 2011, 2012, 2013, 2015, 2017, 2018, 2020, 2026, 2029, 2032, 2036, 2042, 2043, 2044, 2045, 2048, 2053, 2055, 2057, 2058, 2060, 2061, 2062, 2063, 2064, 2067, 2068, 2069, 2073, 2074, 2075, 2079, 2081, 2083, 2084, 2085, 2086, 2087, 2089, 2090, 2092, 2093, 2095, 2096, 2097, 2098, 2100, 2101, 2103, 2104, 2110, 2111, 2112, 2118], "type": [1, 2, 3, 5, 12, 14, 15, 17, 19, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 40, 41, 45, 47, 48, 50, 52, 55, 60, 62, 63, 64, 66, 68, 71, 72, 75, 76, 77, 82, 83, 87, 89, 90, 152, 193, 198, 210, 328, 331, 335, 343, 447, 448, 449, 450, 451, 483, 561, 606, 682, 687, 688, 689, 690, 691, 692, 736, 737, 740, 741, 742, 751, 753, 754, 755, 756, 758, 766, 768, 769, 772, 773, 774, 775, 776, 777, 778, 779, 780, 783, 786, 794, 795, 796, 799, 817, 818, 819, 821, 822, 823, 824, 825, 827, 829, 842, 843, 857, 858, 859, 860, 861, 862, 863, 864, 865, 868, 876, 882, 892, 893, 894, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 924, 925, 927, 928, 931, 943, 944, 946, 947, 948, 949, 950, 951, 952, 953, 954, 956, 957, 959, 960, 961, 962, 963, 965, 974, 975, 976, 977, 982, 983, 984, 989, 1000, 1001, 1002, 1003, 1005, 1011, 1013, 1017, 1019, 1026, 1027, 1028, 1030, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1045, 1047, 1050, 1051, 1052, 1053, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1064, 1065, 1066, 1071, 1083, 1085, 1086, 1089, 1090, 1103, 1108, 1109, 1110, 1111, 1121, 1122, 1123, 1126, 1144, 1151, 1153, 1154, 1155, 1156, 1159, 1160, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1170, 1173, 1174, 1175, 1177, 1178, 1179, 1180, 1182, 1184, 1185, 1187, 1194, 1201, 1202, 1203, 1204, 1205, 1211, 1214, 1218, 1220, 1222, 1225, 1230, 1231, 1233, 1234, 1235, 1248, 1251, 1253, 1254, 1257, 1260, 1268, 1269, 1270, 1272, 1273, 1275, 1278, 1279, 1282, 1284, 1286, 1288, 1289, 1291, 1292, 1295, 1304, 1308, 1325, 1329, 1336, 1342, 1343, 1344, 1345, 1359, 1362, 1365, 1372, 1374, 1381, 1382, 1383, 1386, 1400, 1401, 1402, 1404, 1406, 1409, 1411, 1417, 1420, 1430, 1434, 1453, 1454, 1455, 1462, 1469, 1516, 1526, 1527, 1528, 1532, 1536, 1537, 1541, 1555, 1560, 1561, 1562, 1570, 1571, 1572, 1573, 1574, 1587, 1588, 1589, 1592, 1593, 1597, 1598, 1602, 1604, 1605, 1606, 1607, 1608, 1609, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1626, 1629, 1631, 1632, 1633, 1634, 1636, 1637, 1638, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1650, 1651, 1653, 1654, 1655, 1656, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1671, 1676, 1678, 1679, 1681, 1682, 1684, 1685, 1687, 1688, 1689, 1690, 1691, 1697, 1699, 1700, 1701, 1702, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1720, 1721, 1722, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1733, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1755, 1756, 1758, 1759, 1760, 1761, 1762, 1763, 1765, 1766, 1768, 1770, 1771, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1816, 1824, 1825, 1826, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1845, 1846, 1848, 1852, 1855, 1862, 1863, 1866, 1867, 1868, 1870, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1903, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1923, 1924, 1926, 1927, 1928, 1934, 1942, 1951, 1953, 1955, 1960, 1961, 1962, 1964, 1965, 1966, 1970, 1976, 1979, 1981, 1982, 1984, 1985, 1987, 1990, 1991, 1992, 1993, 1994, 1996, 1997, 2007, 2009, 2010, 2011, 2012, 2013, 2014, 2017, 2020, 2024, 2026, 2027, 2029, 2033, 2035, 2040, 2041, 2042, 2044, 2045, 2052, 2053, 2054, 2055, 2058, 2060, 2061, 2063, 2064, 2066, 2069, 2070, 2072, 2073, 2074, 2075, 2079, 2080, 2081, 2082, 2083, 2085, 2087, 2089, 2090, 2091, 2097, 2098, 2099, 2100, 2102, 2106, 2109, 2110, 2112], "doubl": [1, 3, 35, 37, 52, 53, 55, 483, 489, 619, 751, 753, 754, 755, 756, 758, 768, 769, 783, 903, 904, 906, 908, 909, 914, 922, 923, 931, 960, 967, 968, 986, 1007, 1053, 1123, 1151, 1226, 1272, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1353, 1354, 1355, 1356, 1357, 1363, 1469, 1526, 1592, 1595, 1624, 1706, 1777, 1814, 1820, 1909, 1910, 1911, 1913, 1914, 1927, 1942, 1951, 1979, 2016, 2033, 2045, 2048, 2049, 2051, 2058, 2065, 2068, 2082, 2083, 2086], "bfloat16": [1, 24, 55, 1254, 1272, 1324, 1341, 1526, 1777, 1782, 1783, 1784, 1796, 1867, 1870, 2033, 2045, 2080, 2082, 2083, 2086, 2087, 2116], "cfloat": [1, 11, 28, 313, 485, 619, 967, 968, 1007, 1243, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1363, 1814, 1844, 1927, 1951, 1975, 2083, 2086], "cdoubl": [1, 11, 967, 968, 1007, 1272, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1363, 1526, 1814, 1927, 1951, 2083, 2086], "beta": [1, 2, 11, 56, 63, 101, 102, 107, 108, 109, 110, 111, 112, 153, 154, 323, 519, 556, 688, 691, 692, 693, 943, 1008, 1042, 1043, 1050, 1051, 1053, 1230, 1292, 1367, 1377, 1440, 1441, 1442, 1480, 1486, 1488, 1489, 1490, 1498, 1514, 1558, 1563, 1566, 1585, 1590, 1641, 1649, 1650, 1684, 1688, 1692, 1783, 1784, 1785, 1787, 1793, 1797, 1830, 1887, 1889, 1900, 1905, 1919, 2012, 2014, 2022, 2026, 2045, 2062, 2063, 2064, 2065, 2070, 2075, 2080, 2083, 2089, 2106], "even": [1, 2, 8, 19, 23, 24, 28, 30, 37, 52, 55, 63, 64, 89, 488, 546, 582, 682, 817, 818, 819, 896, 997, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1139, 1140, 1141, 1144, 1176, 1186, 1191, 1197, 1199, 1200, 1211, 1257, 1285, 1302, 1304, 1308, 1309, 1310, 1311, 1325, 1329, 1331, 1332, 1336, 1337, 1342, 1367, 1373, 1462, 1566, 1607, 1608, 1609, 1671, 1716, 1723, 1724, 1730, 1769, 1771, 1808, 1855, 1866, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1927, 1974, 2013, 2016, 2017, 2029, 2035, 2041, 2042, 2045, 2048, 2050, 2051, 2053, 2055, 2057, 2058, 2059, 2060, 2067, 2068, 2077, 2082, 2083, 2086, 2096, 2098, 2099, 2101, 2102, 2104, 2113], "though": [1, 11, 28, 64, 66, 69, 152, 488, 796, 893, 896, 908, 913, 1124, 1125, 1127, 1187, 1191, 1197, 1199, 1200, 1270, 1273, 1367, 1771, 2013, 2017, 2034, 2035, 2042, 2045, 2048, 2049, 2051, 2057, 2058, 2067, 2070, 2082, 2099, 2101, 2113], "signatur": [1, 12, 23, 30, 40, 52, 53, 55, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 86, 400, 489, 490, 558, 908, 909, 927, 928, 989, 1272, 1526, 1708, 1709, 1710, 1713, 1714, 1716, 1736, 1757, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1923, 2020, 2045, 2048, 2049, 2063, 2065, 2068, 2075, 2080, 2100, 2104, 2112], "veri": [1, 4, 7, 8, 18, 23, 24, 59, 61, 64, 995, 1167, 1175, 1177, 1186, 1335, 1488, 1489, 1490, 1716, 1717, 1730, 1786, 1808, 1814, 1890, 1894, 1951, 1976, 2015, 2032, 2042, 2048, 2050, 2052, 2053, 2055, 2057, 2058, 2061, 2067, 2068, 2074, 2075, 2076, 2080, 2086, 2099, 2101, 2102, 2104, 2110, 2111], "unlik": [1, 3, 7, 35, 53, 55, 61, 490, 495, 918, 1105, 1144, 1147, 1151, 1308, 1309, 1328, 1331, 1336, 1340, 1370, 1373, 1375, 1498, 1718, 1719, 1845, 1934, 1973, 2015, 2016, 2032, 2035, 2045, 2057, 2059, 2083, 2086, 2089, 2098, 2102], "coverag": [1, 7, 52, 56, 64, 1169, 1170, 2012, 2013, 2018, 2034, 2070, 2080, 2112], "plan": [1, 2, 7, 9, 28, 30, 34, 898, 1716, 1826, 2035, 2042, 2048, 2068, 2080, 2086, 2110, 2111], "consid": [1, 5, 8, 24, 28, 33, 37, 47, 52, 53, 55, 60, 63, 64, 65, 488, 696, 781, 787, 796, 911, 913, 914, 922, 997, 1095, 1096, 1097, 1098, 1099, 1129, 1165, 1261, 1265, 1268, 1272, 1313, 1315, 1316, 1317, 1318, 1326, 1327, 1330, 1335, 1338, 1461, 1472, 1526, 1529, 1578, 1597, 1632, 1643, 1703, 1717, 1730, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1766, 1796, 1810, 1864, 1883, 1918, 1923, 1952, 1953, 1954, 1955, 2011, 2013, 2016, 2023, 2041, 2042, 2043, 2048, 2050, 2051, 2052, 2055, 2058, 2063, 2067, 2068, 2069, 2071, 2076, 2077, 2080, 2083, 2086, 2087, 2098, 2099, 2102, 2110, 2112, 2113], "ad": [1, 3, 9, 14, 19, 23, 24, 28, 32, 33, 35, 37, 44, 55, 58, 60, 61, 63, 64, 223, 224, 315, 321, 473, 517, 682, 688, 689, 690, 691, 692, 693, 737, 898, 901, 902, 903, 911, 912, 913, 914, 918, 919, 920, 922, 931, 943, 1108, 1112, 1169, 1170, 1172, 1180, 1187, 1272, 1286, 1344, 1350, 1435, 1436, 1437, 1439, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1532, 1533, 1535, 1540, 1541, 1555, 1566, 1570, 1575, 1578, 1610, 1611, 1612, 1629, 1657, 1658, 1659, 1684, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1736, 1743, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1830, 1831, 1832, 1900, 1905, 1919, 2011, 2013, 2015, 2028, 2029, 2034, 2041, 2049, 2051, 2052, 2054, 2055, 2060, 2068, 2069, 2070, 2073, 2080, 2085, 2089, 2097, 2098, 2101, 2102, 2106, 2107, 2109, 2110, 2113], "tutori": [1, 3, 9, 15, 28, 29, 32, 55, 64, 898, 901, 902, 903, 906, 908, 1967, 2012, 2013, 2020, 2023, 2036, 2046, 2048, 2054, 2055, 2057, 2060, 2065, 2070, 2093, 2101, 2102, 2104], "how": [1, 3, 5, 7, 8, 9, 12, 15, 17, 19, 23, 28, 30, 34, 39, 47, 48, 50, 55, 56, 57, 59, 60, 64, 66, 74, 75, 82, 235, 417, 488, 489, 490, 787, 794, 795, 798, 844, 862, 864, 865, 898, 901, 902, 903, 906, 908, 909, 918, 919, 927, 928, 1091, 1112, 1166, 1175, 1181, 1193, 1201, 1226, 1272, 1283, 1344, 1472, 1526, 1578, 1579, 1671, 1703, 1716, 1757, 1769, 1846, 1870, 1884, 1895, 2012, 2013, 2015, 2016, 2020, 2023, 2024, 2027, 2032, 2033, 2034, 2036, 2041, 2044, 2045, 2047, 2049, 2050, 2051, 2052, 2055, 2057, 2059, 2060, 2065, 2069, 2070, 2073, 2075, 2076, 2077, 2080, 2085, 2093, 2098, 2099, 2104, 2105, 2109, 2113, 2115], "major": [1, 6, 7, 8, 9, 1035, 1927, 2012, 2016, 2051, 2067, 2098, 2102, 2104, 2105], "contain": [1, 2, 3, 5, 11, 14, 15, 23, 28, 29, 30, 32, 34, 35, 37, 38, 40, 41, 45, 48, 53, 55, 63, 64, 66, 67, 72, 75, 85, 90, 156, 193, 197, 210, 292, 313, 315, 317, 321, 323, 473, 485, 546, 605, 609, 688, 699, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 762, 816, 839, 844, 880, 895, 896, 897, 908, 909, 911, 912, 913, 914, 915, 916, 917, 935, 943, 944, 945, 953, 955, 959, 977, 992, 993, 997, 1020, 1021, 1022, 1023, 1024, 1053, 1108, 1142, 1143, 1145, 1151, 1160, 1172, 1173, 1176, 1177, 1230, 1231, 1234, 1235, 1243, 1247, 1263, 1270, 1272, 1273, 1279, 1280, 1283, 1288, 1289, 1292, 1303, 1309, 1310, 1311, 1312, 1314, 1316, 1318, 1319, 1335, 1344, 1362, 1366, 1373, 1412, 1418, 1430, 1439, 1461, 1462, 1468, 1469, 1472, 1477, 1478, 1480, 1485, 1496, 1497, 1518, 1526, 1527, 1528, 1533, 1536, 1542, 1544, 1555, 1559, 1566, 1578, 1585, 1615, 1623, 1624, 1626, 1699, 1706, 1709, 1710, 1716, 1723, 1724, 1731, 1732, 1733, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1764, 1765, 1768, 1770, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1816, 1819, 1844, 1858, 1862, 1922, 1923, 1927, 1944, 1951, 1953, 1955, 1960, 1961, 1962, 1966, 1972, 1976, 2012, 2013, 2015, 2016, 2017, 2020, 2023, 2024, 2028, 2029, 2034, 2035, 2037, 2039, 2041, 2042, 2047, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2058, 2059, 2060, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2073, 2075, 2077, 2079, 2080, 2082, 2083, 2085, 2086, 2089, 2090, 2091, 2095, 2097, 2099, 2100, 2101, 2102, 2104, 2105, 2108, 2109, 2110, 2111, 2112, 2113], "build": [1, 2, 3, 8, 9, 14, 15, 19, 28, 30, 35, 45, 47, 52, 64, 1272, 1282, 1526, 1573, 1597, 1632, 2012, 2013, 2023, 2036, 2042, 2065, 2070, 2076, 2085, 2086, 2092, 2093, 2096, 2099, 2111], "basic": [1, 3, 7, 9, 12, 30, 47, 64, 66, 67, 1169, 1170, 1171, 1279, 1345, 1716, 1802, 2012, 2017, 2036, 2043, 2045, 2047, 2052, 2068, 2076, 2084, 2085, 2100, 2102, 2104], "jacobian": [1, 35, 56, 57, 59, 60, 896, 901, 911, 914, 916, 917, 922, 923, 1170, 1171, 1172, 1176, 1177, 1976, 2042, 2048, 2052], "hessian": [1, 56, 59, 60, 912, 915, 1170, 1171, 2040, 2049], "etc": [1, 3, 5, 11, 12, 23, 24, 28, 33, 35, 37, 47, 48, 52, 53, 55, 862, 863, 864, 931, 977, 989, 1189, 1269, 1272, 1461, 1526, 1716, 1848, 2011, 2015, 2016, 2020, 2023, 2046, 2048, 2049, 2050, 2055, 2057, 2063, 2065, 2067, 2068, 2070, 2075, 2080, 2082, 2085, 2089, 2096, 2100, 2109], "user": [1, 2, 5, 7, 8, 9, 10, 11, 12, 15, 19, 23, 24, 28, 29, 30, 33, 34, 35, 36, 37, 40, 44, 47, 48, 51, 53, 55, 56, 59, 60, 63, 64, 66, 79, 86, 152, 337, 488, 734, 740, 741, 742, 747, 748, 758, 766, 796, 799, 801, 840, 857, 864, 865, 896, 903, 906, 908, 909, 917, 977, 1011, 1044, 1159, 1165, 1166, 1187, 1272, 1309, 1344, 1526, 1570, 1572, 1573, 1574, 1684, 1709, 1710, 1716, 1723, 1724, 1733, 1747, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2011, 2012, 2013, 2015, 2016, 2020, 2023, 2028, 2032, 2034, 2035, 2041, 2042, 2043, 2045, 2048, 2052, 2053, 2054, 2055, 2057, 2060, 2063, 2065, 2067, 2069, 2070, 2072, 2075, 2076, 2080, 2082, 2083, 2084, 2085, 2087, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2107, 2109, 2111, 2112], "input": [1, 2, 3, 5, 9, 11, 12, 15, 21, 23, 24, 27, 28, 29, 32, 33, 34, 35, 36, 40, 53, 55, 56, 57, 58, 59, 60, 61, 64, 66, 73, 74, 75, 79, 82, 84, 86, 87, 88, 90, 152, 262, 282, 303, 403, 485, 501, 519, 611, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 695, 696, 697, 698, 699, 700, 701, 722, 730, 731, 736, 737, 740, 741, 742, 743, 744, 745, 747, 748, 758, 760, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 792, 793, 794, 795, 796, 798, 799, 800, 801, 802, 804, 816, 819, 821, 839, 840, 841, 842, 860, 861, 864, 865, 866, 867, 868, 869, 870, 871, 872, 873, 874, 875, 877, 878, 879, 880, 881, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 896, 903, 904, 906, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 935, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 958, 959, 961, 963, 964, 966, 967, 968, 969, 970, 971, 972, 974, 975, 977, 986, 989, 990, 991, 992, 993, 994, 995, 996, 997, 1007, 1023, 1050, 1051, 1053, 1087, 1088, 1089, 1090, 1092, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1108, 1109, 1110, 1111, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1177, 1183, 1185, 1187, 1213, 1214, 1215, 1216, 1217, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1249, 1250, 1251, 1252, 1254, 1257, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1272, 1275, 1276, 1279, 1280, 1284, 1286, 1288, 1289, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 1360, 1361, 1363, 1365, 1366, 1367, 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1395, 1411, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1708, 1709, 1710, 1713, 1714, 1715, 1716, 1723, 1724, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1750, 1751, 1752, 1753, 1757, 1758, 1759, 1766, 1769, 1770, 1771, 1773, 1774, 1776, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1801, 1813, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, 1823, 1824, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1834, 1836, 1838, 1840, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1857, 1859, 1860, 1861, 1864, 1865, 1866, 1867, 1870, 1877, 1878, 1879, 1891, 1892, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1900, 1901, 1903, 1904, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1916, 1917, 1918, 1919, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1934, 1937, 1938, 1939, 1940, 1941, 1943, 1945, 1946, 1947, 1948, 1951, 1952, 1954, 1957, 1958, 1959, 1960, 1961, 1963, 1964, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1979, 1980, 2010, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2023, 2026, 2034, 2035, 2036, 2040, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2054, 2055, 2058, 2059, 2060, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2073, 2074, 2075, 2076, 2080, 2081, 2083, 2084, 2086, 2087, 2090, 2091, 2093, 2096, 2098, 2099, 2100, 2101, 2102, 2106, 2109, 2110, 2111, 2112, 2115], "set": [1, 2, 3, 5, 8, 9, 13, 14, 19, 20, 22, 23, 24, 25, 27, 28, 29, 30, 32, 33, 35, 37, 40, 45, 47, 48, 50, 51, 52, 53, 55, 58, 59, 62, 63, 64, 66, 81, 83, 90, 152, 156, 256, 332, 447, 448, 449, 450, 451, 460, 498, 501, 522, 582, 583, 682, 737, 743, 745, 762, 781, 787, 794, 795, 796, 799, 817, 818, 819, 822, 823, 824, 827, 828, 844, 857, 861, 862, 864, 865, 868, 881, 893, 895, 896, 903, 907, 908, 909, 911, 912, 913, 914, 915, 916, 917, 919, 920, 942, 954, 959, 970, 974, 975, 983, 991, 1004, 1008, 1018, 1033, 1042, 1054, 1055, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1109, 1110, 1111, 1160, 1165, 1174, 1180, 1188, 1235, 1252, 1253, 1272, 1276, 1280, 1282, 1284, 1286, 1288, 1289, 1308, 1309, 1312, 1318, 1325, 1327, 1329, 1330, 1342, 1343, 1344, 1345, 1359, 1362, 1365, 1387, 1391, 1392, 1393, 1408, 1430, 1434, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1464, 1465, 1466, 1470, 1477, 1480, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1511, 1513, 1517, 1518, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1536, 1540, 1541, 1542, 1558, 1559, 1566, 1570, 1572, 1574, 1575, 1576, 1588, 1589, 1597, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1617, 1618, 1619, 1620, 1625, 1632, 1636, 1643, 1644, 1653, 1654, 1655, 1668, 1670, 1676, 1684, 1703, 1715, 1716, 1718, 1719, 1731, 1737, 1766, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1812, 1826, 1833, 1835, 1839, 1841, 1842, 1850, 1851, 1862, 1863, 1866, 1867, 1868, 1869, 1870, 1872, 1873, 1874, 1875, 1876, 1891, 1907, 1912, 1921, 1922, 1928, 1942, 1952, 1953, 1954, 1955, 1964, 1967, 1968, 1971, 1972, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2011, 2013, 2015, 2016, 2017, 2020, 2022, 2025, 2026, 2027, 2028, 2029, 2032, 2035, 2040, 2041, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2063, 2066, 2067, 2068, 2069, 2070, 2071, 2073, 2074, 2075, 2085, 2087, 2088, 2090, 2091, 2093, 2095, 2099, 2101, 2102, 2103, 2106, 2107, 2108, 2110, 2111, 2112, 2114, 2115], "can": [1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 30, 32, 33, 34, 35, 37, 39, 40, 41, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 57, 58, 59, 60, 61, 63, 64, 66, 67, 68, 72, 74, 75, 76, 77, 84, 85, 86, 87, 88, 89, 90, 152, 156, 256, 260, 325, 337, 460, 488, 489, 490, 515, 517, 547, 616, 619, 682, 689, 731, 743, 744, 745, 750, 759, 760, 762, 770, 771, 774, 775, 776, 787, 793, 794, 795, 796, 799, 801, 816, 822, 825, 826, 828, 839, 844, 857, 861, 862, 864, 865, 866, 882, 883, 892, 893, 894, 896, 898, 899, 901, 902, 903, 906, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 922, 923, 927, 928, 931, 934, 945, 962, 975, 977, 978, 984, 990, 997, 1008, 1010, 1020, 1022, 1024, 1031, 1032, 1042, 1050, 1052, 1056, 1058, 1060, 1064, 1065, 1091, 1108, 1109, 1112, 1113, 1126, 1128, 1129, 1130, 1131, 1144, 1147, 1160, 1165, 1166, 1167, 1168, 1170, 1171, 1172, 1175, 1176, 1177, 1180, 1181, 1183, 1185, 1186, 1187, 1191, 1197, 1199, 1200, 1209, 1211, 1215, 1216, 1226, 1229, 1234, 1235, 1263, 1269, 1270, 1272, 1273, 1275, 1276, 1278, 1279, 1283, 1284, 1286, 1288, 1289, 1294, 1297, 1304, 1312, 1315, 1319, 1320, 1325, 1328, 1331, 1332, 1336, 1342, 1344, 1361, 1362, 1364, 1374, 1384, 1385, 1419, 1421, 1422, 1423, 1428, 1429, 1430, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1444, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1462, 1463, 1467, 1468, 1469, 1470, 1473, 1474, 1477, 1482, 1483, 1484, 1486, 1492, 1494, 1495, 1496, 1499, 1500, 1501, 1512, 1516, 1517, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1528, 1531, 1532, 1533, 1535, 1536, 1537, 1540, 1542, 1543, 1544, 1545, 1546, 1547, 1554, 1555, 1558, 1563, 1566, 1569, 1570, 1571, 1572, 1573, 1574, 1576, 1577, 1579, 1590, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1627, 1628, 1643, 1657, 1658, 1659, 1677, 1684, 1703, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1718, 1719, 1721, 1736, 1757, 1758, 1761, 1764, 1769, 1771, 1775, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1805, 1806, 1808, 1812, 1814, 1823, 1827, 1835, 1839, 1849, 1855, 1858, 1870, 1872, 1874, 1899, 1902, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1921, 1922, 1923, 1927, 1928, 1942, 1946, 1949, 1951, 1959, 1960, 1963, 1965, 1967, 1971, 1972, 1976, 1981, 1988, 1989, 2009, 2011, 2012, 2013, 2014, 2015, 2016, 2020, 2022, 2023, 2024, 2025, 2026, 2028, 2029, 2030, 2032, 2033, 2034, 2035, 2036, 2041, 2043, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2073, 2075, 2076, 2077, 2080, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2090, 2091, 2092, 2093, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2106, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2118], "lambda": [1, 12, 23, 30, 35, 58, 61, 63, 64, 66, 69, 74, 75, 78, 260, 489, 490, 927, 928, 963, 1167, 1172, 1176, 1177, 1187, 1308, 1309, 1310, 1311, 1344, 1481, 1564, 1576, 1673, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1796, 1802, 1804, 1807, 1901, 1976, 2017, 2042, 2048, 2049, 2067, 2068, 2075, 2087, 2102, 2112], "captur": [1, 11, 12, 15, 33, 52, 53, 64, 66, 69, 74, 75, 90, 682, 975, 977, 989, 1008, 1010, 1042, 1048, 1053, 1275, 1385, 1780, 1781, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1981, 2013, 2017, 2022, 2048, 2049, 2053, 2062, 2063, 2065, 2068, 2070, 2091, 2092, 2093, 2096, 2098, 2100, 2102, 2109, 2111], "f": [1, 10, 24, 28, 34, 35, 37, 39, 44, 45, 52, 53, 56, 57, 60, 61, 63, 64, 66, 67, 175, 260, 337, 379, 610, 901, 902, 942, 997, 1126, 1128, 1135, 1144, 1166, 1167, 1169, 1170, 1171, 1172, 1176, 1177, 1226, 1272, 1280, 1283, 1284, 1344, 1491, 1493, 1494, 1495, 1497, 1526, 1576, 1586, 1599, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1616, 1623, 1624, 1627, 1628, 1634, 1668, 1670, 1671, 1684, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1858, 1933, 1976, 2013, 2015, 2016, 2020, 2028, 2033, 2034, 2042, 2045, 2048, 2050, 2052, 2053, 2055, 2061, 2063, 2064, 2065, 2068, 2075, 2080, 2085, 2087, 2097, 2098, 2102, 2110, 2112], "three": [1, 2, 6, 9, 28, 32, 55, 57, 60, 64, 89, 891, 1106, 1108, 1226, 1235, 1316, 1319, 1320, 1321, 1336, 1364, 1437, 1455, 1458, 1495, 1498, 1521, 1684, 1730, 1771, 1802, 1870, 2016, 2042, 2045, 2047, 2048, 2065, 2068, 2070, 2075, 2077, 2080, 2085, 2099, 2108], "anoth": [1, 5, 7, 12, 23, 24, 28, 30, 33, 35, 36, 37, 50, 52, 55, 64, 488, 985, 1008, 1010, 1011, 1013, 1159, 1183, 1187, 1193, 1207, 1208, 1282, 1308, 1309, 1312, 1336, 1468, 1469, 1527, 1536, 1706, 1723, 1724, 1731, 1866, 1982, 2013, 2015, 2016, 2022, 2024, 2028, 2041, 2042, 2044, 2045, 2048, 2051, 2055, 2057, 2059, 2061, 2068, 2076, 2077, 2080, 2086, 2089, 2096, 2097, 2099, 2102, 2104, 2107, 2110, 2113], "constant": [1, 12, 23, 52, 53, 64, 66, 67, 71, 72, 75, 77, 746, 777, 823, 824, 978, 1091, 1148, 1149, 1150, 1194, 1211, 1269, 1272, 1276, 1288, 1320, 1345, 1450, 1451, 1452, 1469, 1472, 1479, 1535, 1558, 1575, 1578, 1629, 1632, 1671, 1731, 1778, 1794, 1799, 1802, 1949, 2014, 2016, 2041, 2048, 2055, 2059, 2065, 2067, 2080, 2099, 2101, 2102, 2104, 2111], "boolean": [1, 12, 14, 35, 47, 64, 66, 74, 75, 402, 404, 892, 908, 909, 918, 947, 949, 950, 952, 981, 989, 1010, 1011, 1013, 1113, 1187, 1194, 1201, 1215, 1229, 1261, 1262, 1263, 1264, 1265, 1268, 1297, 1345, 1361, 1366, 1423, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1541, 1566, 1570, 1684, 1715, 1733, 1736, 1826, 1921, 1922, 1946, 1971, 1972, 1981, 1982, 2015, 2017, 2045, 2048, 2082, 2083, 2085, 2086, 2087, 2089, 2099, 2102], "flag": [1, 2, 5, 14, 28, 29, 36, 48, 51, 52, 55, 58, 60, 64, 737, 799, 876, 911, 913, 918, 919, 920, 944, 953, 966, 967, 968, 975, 982, 983, 1038, 1159, 1165, 1167, 1168, 1170, 1171, 1172, 1176, 1177, 1230, 1231, 1253, 1260, 1344, 1364, 1532, 1590, 1643, 1644, 1716, 1733, 1736, 1766, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1870, 1876, 1902, 1912, 1951, 1964, 1976, 2012, 2013, 2016, 2028, 2032, 2042, 2045, 2048, 2058, 2059, 2060, 2065, 2068, 2070, 2085, 2086, 2102, 2103, 2110, 2111], "inform": [1, 2, 3, 4, 5, 7, 8, 9, 17, 18, 20, 22, 23, 24, 28, 29, 30, 32, 33, 34, 37, 39, 40, 43, 47, 48, 52, 53, 55, 57, 62, 64, 81, 193, 210, 257, 315, 323, 489, 490, 500, 517, 519, 620, 682, 737, 913, 918, 919, 922, 923, 927, 928, 946, 1112, 1143, 1145, 1166, 1201, 1209, 1269, 1272, 1289, 1302, 1309, 1318, 1453, 1454, 1455, 1456, 1457, 1458, 1486, 1496, 1522, 1523, 1524, 1526, 1532, 1541, 1542, 1554, 1570, 1572, 1574, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1624, 1643, 1684, 1703, 1704, 1705, 1715, 1716, 1757, 1769, 1778, 1793, 1825, 1852, 1870, 1876, 1902, 1964, 2013, 2016, 2020, 2022, 2023, 2035, 2036, 2042, 2044, 2045, 2046, 2047, 2048, 2054, 2055, 2058, 2063, 2065, 2068, 2069, 2070, 2083, 2085, 2086, 2087, 2091, 2095, 2098, 2099, 2101, 2102, 2105, 2109, 2111, 2113, 2115, 2116], "between": [1, 2, 3, 7, 11, 17, 18, 24, 28, 33, 35, 36, 47, 48, 50, 52, 53, 55, 62, 63, 64, 198, 488, 515, 585, 586, 587, 589, 590, 609, 619, 691, 692, 693, 697, 698, 762, 774, 775, 776, 804, 860, 868, 887, 896, 901, 912, 914, 915, 916, 922, 923, 963, 975, 997, 1010, 1017, 1019, 1066, 1071, 1085, 1086, 1091, 1108, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1159, 1160, 1183, 1186, 1197, 1226, 1233, 1269, 1272, 1284, 1308, 1309, 1331, 1336, 1419, 1438, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1460, 1461, 1464, 1465, 1466, 1470, 1472, 1477, 1486, 1492, 1502, 1503, 1504, 1505, 1506, 1507, 1517, 1519, 1520, 1521, 1526, 1529, 1530, 1531, 1535, 1555, 1558, 1559, 1560, 1575, 1576, 1578, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1631, 1657, 1658, 1659, 1673, 1690, 1700, 1716, 1723, 1724, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1801, 1802, 1808, 1810, 1819, 1827, 1831, 1832, 1837, 1838, 1842, 1867, 1920, 1921, 1922, 1923, 1927, 1948, 1949, 1971, 1972, 2013, 2015, 2016, 2017, 2023, 2032, 2035, 2041, 2042, 2044, 2045, 2047, 2052, 2053, 2055, 2057, 2059, 2060, 2062, 2063, 2065, 2067, 2070, 2071, 2075, 2077, 2080, 2082, 2083, 2085, 2090, 2096, 2099, 2109, 2111], "well": [1, 3, 5, 7, 9, 14, 19, 24, 28, 38, 47, 52, 55, 60, 61, 64, 65, 619, 793, 824, 827, 864, 1050, 1170, 1171, 1272, 1276, 1283, 1288, 1308, 1309, 1310, 1312, 1318, 1331, 1336, 1383, 1440, 1441, 1442, 1461, 1496, 1526, 1566, 1653, 1654, 1655, 1716, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1826, 1960, 2013, 2016, 2023, 2033, 2035, 2041, 2042, 2045, 2048, 2049, 2051, 2052, 2055, 2057, 2059, 2063, 2065, 2068, 2070, 2073, 2075, 2077, 2080, 2084, 2085, 2089, 2093, 2097, 2099, 2101, 2102, 2103, 2107, 2111], "relat": [1, 6, 7, 9, 23, 28, 33, 37, 52, 53, 54, 55, 60, 66, 67, 72, 75, 81, 682, 982, 1183, 1226, 1312, 1472, 1558, 1578, 1716, 1816, 1965, 2035, 2042, 2060, 2067, 2068, 2075, 2080, 2081, 2087, 2093, 2102, 2112], "mechan": [1, 8, 28, 30, 41, 45, 47, 53, 64, 882, 918, 919, 977, 1112, 1272, 1526, 1587, 1684, 1769, 1942, 2012, 2032, 2035, 2048, 2051, 2054, 2055, 2073, 2075, 2076, 2096, 2099], "confus": [1, 8, 66, 1272, 1526, 2042, 2045, 2068, 2080, 2103], "spars": [1, 11, 192, 193, 210, 220, 325, 330, 344, 345, 437, 545, 546, 547, 548, 583, 585, 586, 587, 588, 589, 590, 616, 691, 747, 748, 922, 923, 1082, 1213, 1237, 1257, 1345, 1367, 1377, 1422, 1468, 1469, 1560, 1623, 1624, 1649, 1782, 1797, 1816, 1864, 1897, 1902, 1909, 1910, 1911, 1912, 1913, 1914, 1919, 1928, 1948, 1964, 1965, 2012, 2014, 2018, 2023, 2040, 2058, 2059, 2066, 2072, 2083, 2084, 2087, 2098, 2106], "param": [1, 2, 11, 30, 32, 35, 39, 47, 55, 57, 59, 62, 64, 490, 766, 797, 1165, 1175, 1204, 1272, 1291, 1526, 1532, 1536, 1537, 1714, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1792, 1793, 1794, 1795, 1796, 1797, 1810, 2014, 2026, 2040, 2041, 2045, 2047, 2067, 2112], "receiv": [1, 7, 9, 23, 28, 30, 32, 33, 35, 53, 60, 63, 1064, 1272, 1526, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1811, 2016, 2029, 2032, 2041, 2042, 2057, 2075, 2076, 2077, 2080, 2098, 2111, 2113], "dure": [1, 5, 14, 18, 19, 24, 28, 30, 33, 37, 39, 41, 47, 52, 55, 63, 64, 66, 73, 83, 85, 86, 337, 490, 505, 506, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 795, 829, 892, 893, 903, 906, 908, 909, 1008, 1042, 1052, 1053, 1065, 1066, 1086, 1283, 1285, 1286, 1288, 1344, 1434, 1440, 1441, 1442, 1462, 1463, 1468, 1469, 1488, 1489, 1490, 1545, 1566, 1572, 1574, 1617, 1623, 1624, 1684, 1706, 1716, 1718, 1719, 1764, 1778, 1811, 1908, 1953, 1955, 2015, 2020, 2022, 2023, 2026, 2028, 2029, 2033, 2035, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2054, 2055, 2058, 2059, 2063, 2065, 2068, 2069, 2070, 2073, 2075, 2077, 2087, 2093, 2096, 2097, 2099, 2102, 2108, 2109, 2110, 2111, 2113], "accumul": [1, 2, 55, 152, 292, 315, 320, 321, 323, 473, 490, 688, 896, 917, 1187, 1342, 1716, 1781, 1782, 1964, 2013, 2014, 2029, 2042, 2045, 2047, 2050, 2058, 2067, 2071, 2075, 2076, 2080, 2098, 2099, 2102, 2106, 2111], "initi": [1, 2, 3, 4, 8, 17, 18, 20, 23, 24, 30, 32, 33, 34, 37, 40, 47, 48, 51, 52, 55, 82, 90, 498, 501, 582, 714, 715, 716, 717, 718, 719, 722, 732, 733, 734, 735, 747, 748, 758, 762, 766, 864, 865, 882, 931, 985, 1010, 1018, 1039, 1044, 1045, 1048, 1049, 1075, 1109, 1110, 1111, 1219, 1248, 1270, 1272, 1285, 1286, 1344, 1345, 1362, 1407, 1440, 1441, 1442, 1443, 1445, 1456, 1457, 1458, 1462, 1468, 1469, 1477, 1478, 1480, 1488, 1489, 1490, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1526, 1534, 1541, 1542, 1543, 1544, 1566, 1576, 1623, 1706, 1715, 1716, 1718, 1719, 1730, 1733, 1736, 1764, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1812, 1866, 1867, 1868, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 1981, 1993, 1995, 1996, 1998, 2001, 2012, 2014, 2015, 2016, 2024, 2029, 2035, 2040, 2042, 2045, 2047, 2048, 2051, 2054, 2061, 2063, 2065, 2067, 2068, 2070, 2074, 2075, 2076, 2096, 2099, 2109, 2110, 2118], "memori": [1, 2, 3, 5, 11, 20, 24, 27, 30, 32, 35, 36, 53, 55, 59, 64, 152, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 235, 242, 256, 269, 299, 327, 333, 339, 341, 342, 395, 447, 448, 449, 450, 451, 460, 465, 488, 501, 502, 526, 527, 582, 605, 619, 736, 765, 881, 883, 896, 903, 906, 908, 922, 923, 929, 957, 972, 975, 1008, 1009, 1012, 1015, 1016, 1018, 1032, 1033, 1042, 1043, 1046, 1050, 1052, 1053, 1056, 1058, 1059, 1060, 1062, 1063, 1064, 1065, 1066, 1072, 1073, 1074, 1078, 1108, 1109, 1110, 1111, 1159, 1160, 1161, 1162, 1164, 1165, 1166, 1171, 1173, 1177, 1272, 1290, 1318, 1344, 1381, 1383, 1384, 1392, 1469, 1496, 1497, 1526, 1570, 1571, 1572, 1574, 1684, 1702, 1716, 1723, 1724, 1730, 1757, 1769, 1776, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1835, 1836, 1838, 1839, 1840, 1841, 1866, 1942, 1964, 1976, 1989, 2010, 2012, 2013, 2024, 2026, 2032, 2034, 2035, 2036, 2042, 2048, 2051, 2057, 2063, 2065, 2069, 2070, 2075, 2080, 2082, 2083, 2084, 2086, 2087, 2096, 2101, 2102, 2104, 2108, 2110], "overlap": [1, 23, 24, 28, 30, 32, 55, 64, 488, 682, 881, 922, 923, 1111, 1166, 1269, 1472, 1578, 1716, 1923, 1964, 2045, 2047, 2051, 2083, 2098, 2102, 2110], "dens": [1, 32, 220, 547, 548, 585, 586, 587, 588, 589, 590, 944, 953, 1230, 1231, 1292, 1345, 1797, 1897, 1900, 1904, 1905, 1908, 1909, 1910, 1911, 1913, 1914, 1919, 1928, 1964, 2035, 2059, 2080, 2083, 2098], "stride": [1, 11, 53, 140, 256, 341, 447, 448, 449, 450, 451, 501, 522, 546, 583, 585, 586, 587, 589, 590, 619, 714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 770, 771, 774, 775, 776, 784, 785, 868, 881, 944, 953, 1109, 1111, 1121, 1126, 1144, 1160, 1163, 1164, 1187, 1200, 1230, 1231, 1237, 1272, 1282, 1286, 1292, 1343, 1359, 1377, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1493, 1494, 1495, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1538, 1578, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1626, 1653, 1654, 1655, 1657, 1658, 1659, 1660, 1661, 1662, 1702, 1716, 1775, 1831, 1832, 1835, 1837, 1838, 1839, 1841, 1842, 1849, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1901, 1904, 1948, 1953, 1955, 1974, 2009, 2014, 2017, 2020, 2033, 2035, 2065, 2066, 2075, 2080, 2082, 2083, 2085, 2086, 2087, 2098, 2099, 2100, 2101, 2102, 2106, 2111], "otherwis": [1, 2, 3, 5, 7, 9, 11, 14, 19, 23, 28, 29, 36, 40, 52, 55, 56, 60, 64, 90, 211, 323, 330, 334, 338, 340, 344, 345, 497, 506, 562, 582, 583, 585, 605, 619, 682, 688, 689, 690, 691, 692, 695, 697, 698, 699, 701, 737, 762, 770, 771, 786, 796, 819, 822, 823, 843, 864, 865, 868, 895, 901, 908, 909, 913, 922, 942, 943, 959, 1048, 1108, 1114, 1147, 1159, 1186, 1187, 1200, 1203, 1263, 1269, 1272, 1275, 1279, 1285, 1294, 1318, 1333, 1342, 1344, 1345, 1360, 1365, 1370, 1372, 1373, 1375, 1377, 1378, 1417, 1420, 1436, 1437, 1459, 1461, 1464, 1465, 1466, 1470, 1477, 1481, 1482, 1483, 1484, 1486, 1496, 1512, 1526, 1530, 1531, 1532, 1533, 1534, 1536, 1542, 1543, 1545, 1558, 1564, 1566, 1569, 1570, 1572, 1574, 1575, 1576, 1600, 1601, 1634, 1636, 1637, 1641, 1677, 1688, 1716, 1730, 1735, 1736, 1757, 1758, 1760, 1761, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 1814, 1816, 1824, 1826, 1833, 1842, 1849, 1862, 1870, 1877, 1921, 1922, 1923, 1926, 1959, 1960, 1961, 1971, 1972, 1979, 2011, 2013, 2015, 2016, 2020, 2023, 2026, 2033, 2035, 2041, 2048, 2049, 2057, 2063, 2064, 2065, 2070, 2071, 2073, 2074, 2075, 2077, 2081, 2082, 2083, 2084, 2085, 2087, 2093, 2112], "rowmajor": [1, 1716], "contigu": [1, 11, 24, 30, 333, 501, 522, 619, 1471, 1529, 1673, 1716, 1723, 1724, 1843, 1849, 1927, 2014, 2034, 2035, 2066, 2072, 2080, 2082, 2084, 2099], "create_graph": [1, 152, 490, 896, 911, 912, 913, 914, 915, 916, 917, 2014, 2041, 2048], "preserv": [1, 5, 12, 23, 33, 35, 52, 55, 64, 90, 501, 781, 787, 857, 879, 882, 1149, 1150, 1166, 1200, 1272, 1276, 1283, 1526, 1527, 1536, 1579, 1643, 1703, 1779, 1858, 1862, 1899, 1934, 1942, 1969, 2013, 2026, 2033, 2035, 2040, 2045, 2049, 2059, 2062, 2063, 2065, 2075, 2080, 2083, 2096, 2101], "replac": [1, 14, 19, 23, 24, 28, 30, 46, 48, 52, 55, 57, 58, 59, 60, 61, 64, 86, 87, 89, 121, 423, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 749, 816, 841, 861, 966, 1053, 1108, 1165, 1166, 1187, 1277, 1280, 1290, 1362, 1363, 1412, 1416, 1558, 1569, 1706, 1732, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1766, 1768, 1826, 1923, 1927, 1951, 2013, 2014, 2016, 2026, 2045, 2048, 2049, 2052, 2054, 2057, 2061, 2065, 2066, 2068, 2070, 2087, 2097, 2098, 2102, 2104, 2110], "preexist": [1, 2069, 2098, 2101], "behavior": [1, 2, 7, 11, 14, 20, 22, 24, 28, 29, 30, 32, 33, 35, 36, 40, 45, 48, 52, 53, 55, 59, 60, 63, 64, 88, 256, 321, 473, 501, 515, 546, 619, 689, 787, 795, 828, 881, 895, 908, 909, 951, 957, 959, 961, 974, 977, 1007, 1103, 1108, 1109, 1110, 1111, 1153, 1161, 1162, 1166, 1201, 1272, 1283, 1286, 1288, 1318, 1325, 1329, 1342, 1343, 1344, 1359, 1367, 1374, 1462, 1465, 1486, 1496, 1526, 1542, 1566, 1579, 1597, 1632, 1702, 1703, 1706, 1707, 1716, 1717, 1768, 1770, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1820, 1826, 1842, 1849, 1890, 1927, 1949, 1964, 2011, 2013, 2017, 2018, 2020, 2022, 2024, 2033, 2035, 2037, 2043, 2045, 2048, 2049, 2058, 2059, 2060, 2065, 2067, 2068, 2071, 2072, 2075, 2081, 2084, 2098, 2099, 2101, 2103, 2114], "let": [1, 7, 8, 23, 24, 33, 35, 48, 60, 64, 488, 498, 967, 968, 970, 1187, 1196, 1226, 1272, 1292, 1302, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1318, 1319, 1322, 1324, 1331, 1333, 1335, 1336, 1526, 1716, 1730, 1942, 2020, 2033, 2034, 2042, 2045, 2047, 2048, 2049, 2051, 2055, 2057, 2058, 2059, 2060, 2061, 2068, 2076, 2077, 2080, 2085, 2096, 2097, 2099, 2101, 2102, 2103, 2104, 2105, 2110, 2111, 2114], "first": [1, 4, 5, 7, 9, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 44, 47, 48, 52, 53, 55, 61, 64, 66, 75, 78, 218, 256, 288, 292, 591, 612, 688, 691, 693, 696, 762, 795, 796, 816, 857, 860, 862, 877, 878, 887, 892, 893, 894, 895, 897, 908, 909, 917, 922, 931, 940, 942, 943, 947, 948, 950, 951, 952, 955, 957, 959, 973, 1007, 1010, 1021, 1096, 1098, 1099, 1100, 1105, 1108, 1113, 1126, 1128, 1147, 1162, 1167, 1168, 1170, 1171, 1172, 1176, 1177, 1185, 1200, 1215, 1226, 1229, 1237, 1238, 1239, 1249, 1261, 1270, 1280, 1285, 1286, 1292, 1297, 1302, 1305, 1312, 1328, 1331, 1338, 1339, 1341, 1344, 1345, 1361, 1367, 1370, 1373, 1374, 1375, 1377, 1395, 1412, 1418, 1423, 1426, 1430, 1436, 1437, 1443, 1454, 1455, 1457, 1458, 1468, 1469, 1471, 1476, 1477, 1491, 1494, 1495, 1496, 1511, 1518, 1520, 1521, 1542, 1555, 1614, 1702, 1706, 1716, 1723, 1724, 1733, 1734, 1736, 1747, 1757, 1763, 1783, 1784, 1785, 1786, 1787, 1793, 1796, 1797, 1801, 1805, 1808, 1810, 1816, 1826, 1827, 1853, 1854, 1862, 1870, 1889, 1904, 1907, 1912, 1943, 1944, 1948, 1953, 1955, 1961, 1970, 1973, 1976, 1978, 1981, 1995, 2011, 2013, 2015, 2016, 2020, 2023, 2027, 2032, 2033, 2035, 2041, 2042, 2045, 2046, 2048, 2050, 2051, 2052, 2055, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2067, 2069, 2070, 2075, 2076, 2077, 2079, 2080, 2081, 2085, 2090, 2093, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2109, 2110, 2111, 2112], "accord": [1, 9, 30, 33, 34, 37, 52, 799, 864, 865, 883, 945, 957, 1106, 1236, 1333, 1366, 1412, 1430, 1469, 1538, 1684, 1706, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1802, 1808, 1827, 1890, 1915, 1928, 1943, 1977, 2016, 2029, 2034, 2035, 2055, 2059, 2061, 2068, 2077, 2087], "retain": [1, 9, 28, 695, 697, 698, 701, 877, 878, 883, 1276, 1294, 1325, 1329, 1342, 1360, 1370, 1372, 1373, 1375, 1378, 1417, 1418, 1419, 1420, 1446, 1771, 1824, 1827, 1858, 1921, 1922, 1926, 1952, 1953, 1954, 1955, 1971, 1972, 2032, 2057, 2080], "over": [1, 9, 11, 12, 19, 23, 24, 28, 29, 33, 35, 37, 45, 48, 52, 55, 56, 57, 58, 59, 60, 61, 62, 64, 66, 69, 74, 75, 78, 483, 682, 699, 740, 741, 742, 743, 744, 745, 768, 769, 774, 775, 776, 784, 785, 895, 908, 909, 931, 935, 962, 1019, 1064, 1066, 1071, 1086, 1087, 1088, 1089, 1090, 1108, 1165, 1169, 1171, 1175, 1177, 1183, 1185, 1226, 1234, 1272, 1288, 1305, 1325, 1329, 1341, 1342, 1344, 1352, 1372, 1374, 1420, 1427, 1428, 1429, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1469, 1472, 1473, 1474, 1480, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1498, 1514, 1517, 1518, 1519, 1520, 1521, 1526, 1529, 1530, 1531, 1533, 1540, 1541, 1555, 1558, 1559, 1561, 1566, 1575, 1578, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1627, 1628, 1644, 1650, 1653, 1654, 1655, 1657, 1658, 1659, 1668, 1669, 1676, 1684, 1710, 1715, 1721, 1732, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1831, 1832, 1847, 1906, 1908, 1921, 1922, 1923, 1926, 1944, 1959, 1971, 1972, 1976, 2016, 2029, 2032, 2033, 2034, 2041, 2043, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2055, 2057, 2058, 2063, 2067, 2070, 2075, 2076, 2080, 2088, 2089, 2096, 2098, 2099, 2100, 2101, 2102, 2111, 2113, 2114], "time": [1, 3, 4, 7, 8, 9, 14, 19, 23, 24, 28, 29, 30, 32, 33, 35, 36, 37, 44, 47, 48, 50, 52, 53, 55, 60, 63, 64, 66, 76, 77, 90, 292, 315, 488, 489, 495, 619, 682, 687, 688, 689, 690, 691, 692, 693, 696, 747, 748, 758, 762, 766, 770, 771, 801, 803, 826, 844, 880, 917, 927, 928, 929, 930, 936, 938, 943, 955, 963, 965, 975, 990, 997, 1010, 1011, 1013, 1053, 1066, 1086, 1089, 1100, 1122, 1123, 1129, 1130, 1131, 1137, 1148, 1149, 1150, 1158, 1162, 1171, 1172, 1173, 1176, 1177, 1181, 1191, 1201, 1261, 1269, 1272, 1280, 1285, 1288, 1289, 1293, 1298, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1317, 1318, 1319, 1322, 1324, 1328, 1331, 1333, 1335, 1336, 1344, 1345, 1367, 1377, 1385, 1411, 1412, 1414, 1424, 1432, 1433, 1435, 1436, 1437, 1439, 1440, 1441, 1442, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1469, 1472, 1473, 1474, 1477, 1488, 1489, 1490, 1496, 1498, 1512, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1538, 1539, 1541, 1542, 1563, 1566, 1578, 1579, 1580, 1581, 1597, 1600, 1601, 1627, 1628, 1673, 1674, 1675, 1692, 1708, 1709, 1710, 1713, 1714, 1715, 1716, 1730, 1731, 1734, 1736, 1770, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1804, 1814, 1815, 1826, 1848, 1854, 1904, 1918, 1923, 1924, 1944, 1976, 1981, 1982, 1995, 2012, 2015, 2016, 2017, 2020, 2022, 2024, 2026, 2032, 2040, 2042, 2044, 2047, 2048, 2050, 2051, 2052, 2054, 2055, 2057, 2059, 2060, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2075, 2076, 2077, 2080, 2085, 2091, 2092, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2107, 2108, 2110, 2113, 2115], "4": [1, 3, 5, 11, 12, 18, 19, 21, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 45, 47, 52, 53, 60, 64, 66, 67, 68, 71, 72, 74, 75, 77, 193, 210, 244, 256, 291, 313, 315, 317, 319, 323, 403, 404, 447, 448, 473, 485, 489, 495, 498, 501, 515, 519, 525, 539, 546, 558, 560, 562, 586, 587, 609, 619, 685, 686, 687, 688, 691, 693, 694, 695, 697, 698, 699, 701, 741, 743, 744, 745, 748, 750, 759, 762, 775, 776, 826, 868, 877, 878, 879, 883, 884, 885, 886, 887, 888, 890, 891, 903, 906, 908, 909, 911, 914, 915, 916, 943, 946, 949, 953, 954, 955, 959, 961, 964, 965, 968, 969, 970, 973, 980, 986, 989, 992, 993, 994, 995, 1007, 1088, 1091, 1098, 1100, 1102, 1103, 1106, 1107, 1108, 1111, 1113, 1123, 1124, 1126, 1128, 1136, 1142, 1144, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1156, 1158, 1159, 1162, 1165, 1167, 1175, 1176, 1177, 1187, 1213, 1214, 1215, 1226, 1229, 1233, 1234, 1235, 1236, 1238, 1239, 1243, 1247, 1249, 1261, 1263, 1269, 1278, 1279, 1284, 1286, 1293, 1294, 1295, 1296, 1297, 1298, 1302, 1304, 1305, 1311, 1313, 1315, 1316, 1317, 1320, 1322, 1325, 1327, 1329, 1331, 1333, 1335, 1338, 1339, 1340, 1342, 1343, 1346, 1348, 1349, 1354, 1356, 1357, 1359, 1361, 1366, 1367, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1378, 1395, 1411, 1412, 1420, 1421, 1422, 1423, 1430, 1435, 1446, 1447, 1448, 1450, 1451, 1454, 1455, 1457, 1458, 1466, 1468, 1469, 1470, 1472, 1476, 1477, 1496, 1497, 1522, 1523, 1529, 1531, 1533, 1538, 1539, 1542, 1548, 1549, 1550, 1551, 1552, 1566, 1578, 1579, 1580, 1581, 1582, 1583, 1586, 1587, 1588, 1589, 1597, 1599, 1608, 1611, 1623, 1624, 1632, 1643, 1668, 1671, 1674, 1675, 1702, 1703, 1704, 1705, 1706, 1716, 1723, 1724, 1730, 1736, 1747, 1759, 1760, 1762, 1764, 1770, 1771, 1772, 1774, 1776, 1780, 1793, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1815, 1819, 1820, 1823, 1824, 1826, 1827, 1829, 1833, 1835, 1837, 1839, 1841, 1842, 1843, 1844, 1845, 1846, 1848, 1849, 1853, 1854, 1855, 1857, 1858, 1862, 1874, 1881, 1883, 1884, 1889, 1890, 1892, 1894, 1899, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1923, 1926, 1929, 1930, 1938, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1949, 1952, 1953, 1954, 1955, 1957, 1958, 1959, 1961, 1962, 1963, 1964, 1970, 1973, 1974, 1975, 1976, 1977, 1978, 2010, 2013, 2014, 2015, 2016, 2017, 2020, 2023, 2032, 2034, 2035, 2040, 2042, 2043, 2045, 2047, 2048, 2051, 2055, 2057, 2060, 2061, 2063, 2065, 2068, 2070, 2072, 2075, 2080, 2081, 2083, 2084, 2085, 2086, 2087, 2096, 2098, 2099, 2100, 2102, 2106, 2109, 2111], "fact": [1, 3, 8, 55, 497, 944, 953, 991, 1197, 1226, 1230, 1231, 1308, 1309, 1336, 2013, 2048, 2051, 2052, 2065, 2076, 2080, 2096, 2099, 2101, 2105], "reset": [1, 28, 30, 762, 822, 827, 896, 1008, 1056, 1058, 1072, 1073, 1074, 1272, 1477, 1526, 1541, 1543, 1715, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1816, 1833, 2029, 2065, 2074], "phase": [1, 19, 24, 795, 1308, 1309, 1808, 1927, 2014, 2061, 2102, 2111], "iter": [1, 2, 3, 7, 19, 24, 28, 29, 30, 32, 33, 35, 48, 51, 52, 55, 64, 66, 71, 931, 942, 980, 1020, 1021, 1022, 1023, 1024, 1053, 1056, 1058, 1080, 1185, 1272, 1345, 1526, 1527, 1528, 1536, 1537, 1716, 1717, 1720, 1721, 1722, 1729, 1731, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1765, 1767, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1800, 1801, 1802, 1805, 1816, 1885, 1928, 2005, 2014, 2016, 2017, 2020, 2032, 2034, 2041, 2042, 2043, 2045, 2047, 2048, 2051, 2052, 2055, 2067, 2069, 2074, 2079, 2085, 2096, 2099, 2102, 2110, 2111, 2112], "recreat": [1, 2042, 2110], "valid": [1, 19, 25, 27, 28, 30, 33, 35, 45, 47, 52, 53, 64, 86, 90, 749, 750, 759, 793, 966, 977, 978, 984, 1050, 1108, 1187, 1197, 1270, 1272, 1273, 1294, 1308, 1309, 1318, 1319, 1320, 1331, 1336, 1367, 1453, 1454, 1455, 1607, 1608, 1609, 1632, 1706, 1742, 1747, 1751, 1777, 1778, 1798, 1799, 1804, 1805, 1806, 1807, 1809, 1810, 1811, 1812, 1826, 2013, 2014, 2015, 2016, 2020, 2034, 2035, 2042, 2045, 2048, 2053, 2059, 2065, 2067, 2069, 2075, 2076, 2098, 2099, 2100, 2107, 2111], "altern": [1, 9, 23, 28, 33, 64, 682, 816, 1169, 1170, 1235, 1280, 1344, 1555, 1572, 1574, 1651, 1716, 1776, 1797, 1868, 1869, 1964, 2010, 2011, 2017, 2042, 2053, 2055, 2058, 2059, 2061, 2086, 2102, 2109], "assign": [1, 7, 9, 10, 23, 28, 33, 37, 47, 48, 52, 60, 62, 64, 417, 682, 840, 841, 842, 938, 1270, 1272, 1273, 1430, 1461, 1526, 1533, 1536, 1537, 1717, 1736, 2013, 2017, 2020, 2041, 2048, 2049, 2050, 2051, 2065, 2070, 2076, 2077, 2084, 2085], "never": [1, 5, 7, 24, 28, 47, 48, 52, 53, 223, 224, 798, 882, 963, 975, 1269, 1319, 1320, 1331, 1344, 1716, 1757, 2028, 2042, 2045, 2048, 2068, 2075, 2083, 2087, 2098], "long": [1, 7, 9, 23, 47, 52, 63, 736, 751, 753, 754, 755, 756, 758, 765, 783, 980, 1166, 1200, 1248, 1373, 1418, 1445, 1461, 1469, 1496, 1497, 1533, 1616, 1777, 1825, 1939, 1943, 1953, 1955, 1960, 2012, 2016, 2023, 2032, 2033, 2034, 2042, 2043, 2045, 2048, 2050, 2057, 2059, 2065, 2068, 2069, 2074, 2082, 2083, 2085, 2086, 2096, 2099, 2102, 2106, 2109, 2111], "hard": [1, 7, 8, 28, 33, 48, 52, 1108, 1345, 1481, 1634, 1635, 2013, 2014, 2015, 2042, 2051, 2065, 2068, 2102, 2103], "matter": [1, 4, 28, 55, 903, 904, 908, 1096, 1108, 1200, 1280, 1283, 1288, 1716, 2035, 2042, 2068], "discourag": [1, 1077, 1081, 1408, 2006, 2042, 2075, 2086], "aggress": [1, 55, 1269, 1723, 1724, 2042, 2075, 2102], "buffer": [1, 4, 23, 24, 28, 33, 52, 53, 55, 59, 62, 417, 883, 1021, 1053, 1162, 1165, 1175, 1272, 1280, 1283, 1294, 1344, 1440, 1441, 1442, 1462, 1499, 1500, 1501, 1526, 1566, 1708, 1716, 1718, 1725, 1727, 1733, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1754, 1764, 1766, 1794, 1796, 1858, 1899, 1946, 2015, 2036, 2042, 2047, 2048, 2055, 2060, 2063, 2065, 2102, 2115], "free": [1, 7, 28, 30, 35, 37, 47, 48, 55, 64, 66, 74, 75, 980, 1059, 1064, 1078, 1275, 1288, 1345, 2026, 2040, 2042, 2045, 2048, 2050, 2051, 2057, 2061, 2065, 2080, 2083, 2096, 2098, 2102, 2113], "reus": [1, 19, 28, 64, 488, 758, 1189, 2042, 2045, 2075, 2096, 2098, 2099, 2102, 2106, 2111, 2113], "effici": [1, 2, 3, 8, 11, 23, 34, 35, 37, 55, 56, 61, 152, 762, 896, 903, 905, 908, 917, 965, 1173, 1183, 1216, 1290, 1328, 1345, 1364, 1430, 1464, 1465, 1466, 1469, 1470, 1477, 1532, 1538, 1539, 1542, 1574, 1585, 1590, 1684, 2026, 2035, 2042, 2047, 2048, 2052, 2056, 2057, 2058, 2067, 2070, 2075, 2076, 2080, 2083, 2084, 2089, 2099, 2114], "few": [1, 7, 8, 24, 30, 37, 1108, 1166, 1469, 1716, 2011, 2016, 2042, 2045, 2048, 2050, 2053, 2058, 2061, 2065, 2067, 2070, 2073, 2080, 2084, 2086, 2089, 2098, 2099, 2102, 2104, 2105, 2109, 2111], "occas": [1, 7, 2042], "actual": [1, 8, 30, 37, 40, 52, 53, 55, 60, 64, 260, 682, 792, 990, 1126, 1144, 1197, 1284, 1288, 1456, 1457, 1458, 1632, 1716, 1748, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 2011, 2015, 2016, 2024, 2033, 2042, 2045, 2047, 2048, 2051, 2052, 2057, 2059, 2061, 2063, 2070, 2075, 2082, 2087, 2098, 2099, 2101, 2102, 2104], "signific": [1, 3, 30, 1810, 1870, 2042, 2045, 2080, 2104, 2107], "amount": [1, 2, 3, 4, 7, 23, 28, 30, 37, 47, 64, 923, 1032, 1058, 1060, 1064, 1269, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1472, 1514, 1529, 1578, 1615, 1741, 1742, 1744, 1745, 1747, 1749, 1750, 1751, 1752, 1753, 1754, 1989, 2034, 2042, 2044, 2045, 2049, 2050, 2053, 2063, 2075, 2080, 2098, 2105, 2109, 2111, 2113], "unless": [1, 2, 4, 7, 19, 24, 28, 50, 52, 55, 56, 64, 490, 501, 842, 861, 903, 904, 908, 946, 1008, 1042, 1165, 1272, 1316, 1321, 1334, 1344, 1373, 1479, 1526, 1574, 1710, 1747, 1766, 1791, 1797, 1862, 1964, 2016, 2020, 2022, 2035, 2042, 2045, 2051, 2059, 2060, 2065, 2068, 2071, 2098, 2109], "heavi": [1, 28, 2042, 2061, 2101], "pressur": [1, 55, 2042], "might": [1, 2, 3, 4, 9, 14, 15, 18, 19, 28, 30, 32, 34, 35, 47, 52, 60, 63, 64, 87, 89, 152, 488, 546, 896, 1007, 1288, 1289, 1632, 1716, 1717, 1723, 1724, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 2011, 2013, 2029, 2035, 2042, 2044, 2045, 2047, 2048, 2052, 2054, 2059, 2060, 2062, 2065, 2068, 2070, 2075, 2076, 2077, 2080, 2084, 2086, 2087, 2092, 2096, 2098, 2099, 2101, 2102, 2103, 2104, 2110, 2111], "keep": [1, 3, 5, 7, 23, 24, 30, 32, 34, 37, 47, 49, 52, 53, 55, 66, 488, 1011, 1187, 1344, 1440, 1441, 1442, 1468, 1488, 1489, 1490, 1535, 1566, 1632, 1643, 1716, 1743, 1779, 1847, 1870, 2011, 2012, 2013, 2032, 2034, 2035, 2042, 2045, 2047, 2049, 2050, 2052, 2057, 2063, 2067, 2070, 2075, 2076, 2077, 2098, 2099, 2100, 2101, 2102, 2109, 2113], "track": [1, 34, 37, 49, 52, 53, 66, 81, 82, 83, 88, 337, 918, 929, 1016, 1056, 1058, 1072, 1073, 1074, 1165, 1166, 1187, 1374, 1440, 1441, 1442, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1566, 1716, 1743, 2028, 2029, 2032, 2034, 2035, 2042, 2045, 2048, 2049, 2050, 2054, 2055, 2063, 2067, 2069, 2076, 2077, 2080, 2086, 2098, 2099, 2101, 2102, 2108], "appli": [1, 3, 5, 8, 12, 24, 30, 32, 33, 34, 35, 37, 40, 47, 55, 59, 60, 61, 64, 66, 77, 83, 84, 85, 121, 323, 400, 474, 488, 515, 519, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 737, 740, 741, 742, 743, 744, 745, 760, 762, 768, 769, 770, 771, 772, 773, 774, 775, 776, 782, 783, 784, 785, 786, 812, 813, 814, 815, 842, 861, 903, 904, 906, 907, 908, 909, 919, 920, 922, 948, 951, 977, 989, 992, 1096, 1098, 1103, 1112, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1156, 1165, 1166, 1173, 1176, 1177, 1181, 1187, 1197, 1272, 1276, 1322, 1345, 1362, 1427, 1428, 1429, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1447, 1448, 1449, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1467, 1473, 1474, 1475, 1476, 1477, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1498, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1525, 1526, 1529, 1530, 1531, 1532, 1533, 1534, 1540, 1541, 1542, 1545, 1546, 1547, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1580, 1581, 1591, 1592, 1593, 1594, 1595, 1596, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1625, 1627, 1628, 1629, 1630, 1633, 1635, 1636, 1637, 1638, 1642, 1643, 1644, 1646, 1647, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1657, 1658, 1659, 1663, 1668, 1676, 1677, 1678, 1679, 1681, 1684, 1685, 1686, 1687, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1706, 1715, 1716, 1723, 1724, 1730, 1731, 1732, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1757, 1758, 1765, 1766, 1768, 1769, 1771, 1781, 1793, 1797, 1810, 1823, 1828, 1829, 1830, 1831, 1832, 1903, 1904, 1906, 1923, 1960, 1961, 1963, 1976, 2013, 2016, 2020, 2023, 2032, 2034, 2036, 2041, 2042, 2045, 2047, 2048, 2049, 2051, 2052, 2055, 2058, 2065, 2067, 2068, 2070, 2073, 2076, 2080, 2081, 2095, 2096, 2100, 2102, 2103, 2109, 2110], "save": [1, 5, 7, 11, 15, 24, 28, 30, 32, 33, 52, 55, 498, 839, 892, 893, 903, 906, 908, 909, 1173, 1271, 1272, 1277, 1280, 1288, 1290, 1344, 1526, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1804, 1807, 1811, 2012, 2013, 2014, 2020, 2024, 2027, 2034, 2048, 2049, 2051, 2055, 2057, 2059, 2063, 2066, 2067, 2068, 2069, 2075, 2080, 2085, 2090, 2093, 2096, 2099, 2102, 2104, 2113], "modifi": [1, 18, 19, 23, 24, 28, 29, 30, 32, 33, 55, 64, 66, 262, 489, 490, 793, 840, 841, 842, 903, 904, 906, 907, 908, 909, 927, 928, 929, 977, 990, 991, 1226, 1269, 1272, 1275, 1276, 1292, 1468, 1526, 1570, 1572, 1574, 1623, 1624, 1708, 1709, 1710, 1713, 1714, 1716, 1721, 1722, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 1800, 1808, 1889, 2013, 2016, 2033, 2041, 2045, 2048, 2049, 2055, 2060, 2068, 2070, 2075, 2081, 2084, 2086, 2091, 2098, 2099, 2100, 2101, 2102, 2107, 2111, 2114], "afterward": [1, 30, 52, 1526, 1706, 1716, 2051, 2080], "onc": [1, 7, 9, 15, 23, 24, 28, 29, 30, 32, 33, 35, 36, 39, 40, 47, 55, 58, 60, 63, 64, 903, 904, 905, 906, 908, 909, 911, 913, 930, 1108, 1272, 1276, 1344, 1462, 1526, 1716, 1734, 1736, 1768, 1806, 1810, 1826, 1872, 1876, 1967, 2013, 2016, 2020, 2026, 2029, 2032, 2041, 2042, 2044, 2045, 2048, 2051, 2052, 2054, 2055, 2067, 2068, 2080, 2085, 2096, 2099, 2100, 2102, 2108], "start": [1, 3, 4, 8, 9, 23, 24, 28, 30, 35, 37, 44, 46, 47, 48, 50, 55, 59, 63, 64, 235, 404, 435, 436, 498, 540, 700, 868, 897, 939, 940, 980, 1056, 1058, 1070, 1072, 1073, 1130, 1131, 1147, 1162, 1185, 1270, 1285, 1298, 1343, 1359, 1421, 1422, 1430, 1435, 1436, 1437, 1469, 1471, 1519, 1520, 1521, 1527, 1529, 1624, 1671, 1716, 1738, 1742, 1744, 1757, 1780, 1802, 1808, 1842, 1872, 1882, 1895, 1909, 1910, 1911, 1913, 1914, 2011, 2012, 2014, 2015, 2020, 2023, 2032, 2034, 2042, 2043, 2045, 2047, 2048, 2050, 2051, 2055, 2056, 2057, 2061, 2065, 2067, 2069, 2070, 2075, 2076, 2077, 2079, 2080, 2095, 2096, 2098, 2099, 2102, 2106, 2108, 2115], "sure": [1, 7, 9, 23, 28, 30, 33, 36, 39, 48, 51, 58, 64, 864, 865, 896, 1159, 1276, 1288, 1289, 1345, 1684, 1716, 1769, 1804, 1946, 2016, 2033, 2042, 2047, 2050, 2052, 2061, 2065, 2069, 2070, 2075, 2076, 2077, 2080, 2082, 2085, 2102, 2104, 2111], "been": [1, 5, 7, 9, 11, 17, 19, 23, 24, 28, 29, 32, 33, 34, 35, 37, 40, 47, 51, 52, 58, 59, 64, 86, 488, 490, 682, 762, 903, 904, 908, 980, 1011, 1013, 1018, 1046, 1049, 1107, 1112, 1166, 1275, 1276, 1282, 1287, 1344, 1407, 1477, 1484, 1496, 1542, 1597, 1632, 1716, 1723, 1724, 1733, 1738, 1742, 1744, 1764, 1765, 1780, 1800, 1801, 1802, 1808, 1810, 1826, 1923, 1939, 1967, 1978, 1982, 1998, 2020, 2022, 2024, 2029, 2032, 2033, 2034, 2041, 2042, 2044, 2045, 2051, 2052, 2053, 2055, 2057, 2059, 2061, 2063, 2065, 2067, 2068, 2075, 2077, 2080, 2085, 2093, 2096, 2097, 2099, 2102, 2107, 2111], "longer": [1, 28, 50, 59, 60, 488, 689, 893, 908, 922, 923, 1269, 1276, 1716, 1768, 1771, 2029, 2042, 2045, 2063, 2075, 2077, 2096, 2111], "find": [1, 7, 14, 28, 47, 60, 64, 84, 1108, 1180, 1183, 1226, 1294, 1345, 1456, 1457, 1458, 1496, 1778, 1816, 1827, 1862, 1928, 2011, 2024, 2026, 2032, 2034, 2042, 2044, 2045, 2047, 2048, 2050, 2052, 2057, 2059, 2063, 2065, 2066, 2070, 2075, 2080, 2083, 2085, 2087, 2090, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2110, 2111, 2112, 2114], "quick": [1, 7, 58, 2023, 2055, 2064, 2094, 2109], "guid": [1, 8, 23, 82, 84, 85, 1284, 1768, 2012, 2020, 2045, 2049, 2068, 2070, 2109], "var": [1, 40, 45, 47, 51, 1440, 1441, 1442, 1479, 1480, 1488, 1489, 1490, 1498, 1566, 1629, 1830, 1972, 2014, 2016, 2033, 2066, 2098, 2106, 2111], "thing": [1, 3, 7, 8, 28, 33, 52, 60, 64, 1166, 1282, 1374, 1438, 1634, 1900, 2015, 2020, 2042, 2045, 2048, 2049, 2050, 2052, 2057, 2065, 2068, 2070, 2071, 2077, 2096, 2099, 2101, 2102, 2109, 2111], "detach": [1, 5, 66, 74, 75, 224, 417, 450, 460, 972, 1165, 1272, 1445, 1526, 1616, 1634, 1904, 1942, 2013, 2014, 2033, 2035, 2048, 2050, 2065, 2066, 2072, 2080, 2084, 2086, 2110], "register_hook": [1, 2033, 2042], "name": [1, 2, 3, 14, 19, 24, 28, 30, 33, 34, 35, 37, 40, 41, 44, 45, 47, 50, 52, 53, 55, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 89, 682, 699, 749, 750, 759, 794, 816, 817, 818, 819, 826, 842, 857, 861, 935, 938, 940, 984, 1036, 1160, 1165, 1175, 1178, 1180, 1185, 1187, 1270, 1272, 1273, 1280, 1283, 1288, 1289, 1303, 1308, 1309, 1315, 1316, 1318, 1319, 1320, 1321, 1331, 1332, 1334, 1336, 1337, 1344, 1526, 1577, 1585, 1708, 1713, 1714, 1716, 1730, 1731, 1732, 1733, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1765, 1766, 1768, 1777, 1778, 1858, 1868, 1965, 1967, 1970, 1991, 2011, 2012, 2013, 2014, 2016, 2017, 2020, 2022, 2027, 2028, 2029, 2032, 2040, 2042, 2045, 2047, 2048, 2052, 2054, 2055, 2063, 2065, 2066, 2067, 2068, 2069, 2075, 2076, 2082, 2085, 2086, 2087, 2089, 2090, 2091, 2093, 2097, 2099, 2100, 2102, 2105, 2108, 2109, 2110, 2111, 2112, 2113, 2116], "factori": [1, 2, 11, 35, 37, 41, 45, 47, 60, 826, 942, 1108, 1586, 1769, 1778, 1866, 2012, 2014, 2024, 2034, 2045, 2048, 2066, 2083, 2086, 2098, 2101], "ones": [1, 3, 19, 23, 28, 32, 34, 35, 52, 55, 60, 61, 63, 64, 66, 71, 256, 315, 404, 447, 448, 450, 517, 796, 862, 896, 912, 914, 915, 916, 917, 918, 922, 945, 957, 975, 1091, 1099, 1121, 1165, 1172, 1173, 1176, 1269, 1272, 1284, 1293, 1319, 1335, 1344, 1439, 1459, 1468, 1472, 1479, 1480, 1498, 1526, 1530, 1531, 1533, 1535, 1541, 1578, 1587, 1588, 1589, 1623, 1684, 1702, 1706, 1715, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1748, 1761, 1763, 1766, 1776, 1777, 1830, 1847, 1865, 1870, 1895, 1945, 1949, 1967, 1979, 2014, 2016, 2018, 2024, 2033, 2034, 2042, 2043, 2045, 2048, 2056, 2058, 2059, 2065, 2066, 2067, 2068, 2070, 2075, 2077, 2079, 2081, 2083, 2086, 2105, 2110, 2111], "autograd_tensor": 1, "kwarg": [1, 5, 14, 23, 28, 29, 30, 32, 33, 34, 47, 52, 53, 55, 64, 66, 582, 605, 749, 762, 764, 765, 821, 822, 823, 824, 826, 827, 893, 908, 909, 979, 1011, 1013, 1050, 1051, 1068, 1165, 1176, 1177, 1181, 1185, 1187, 1272, 1275, 1277, 1336, 1362, 1487, 1515, 1526, 1557, 1561, 1565, 1567, 1568, 1706, 1716, 1738, 1743, 1747, 1757, 1764, 1766, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1858, 1961, 1976, 1982, 2011, 2016, 2017, 2020, 2048, 2049, 2055, 2063, 2067, 2075, 2082, 2086, 2091, 2097, 2100, 2110, 2111, 2112], "base": [1, 3, 7, 9, 12, 14, 15, 20, 23, 28, 29, 30, 32, 33, 34, 35, 37, 40, 45, 47, 48, 50, 52, 53, 55, 60, 64, 87, 88, 89, 682, 793, 798, 802, 804, 822, 823, 824, 826, 827, 864, 865, 903, 906, 908, 919, 920, 929, 975, 997, 1069, 1070, 1085, 1106, 1108, 1151, 1193, 1203, 1226, 1236, 1274, 1286, 1298, 1336, 1347, 1349, 1351, 1359, 1439, 1462, 1499, 1526, 1529, 1530, 1531, 1535, 1541, 1543, 1570, 1572, 1574, 1649, 1684, 1715, 1716, 1721, 1722, 1730, 1738, 1742, 1744, 1785, 1796, 1797, 1823, 1833, 1858, 1870, 1927, 1928, 1943, 1953, 1955, 1977, 2012, 2014, 2016, 2044, 2045, 2047, 2048, 2051, 2055, 2069, 2070, 2075, 2076, 2080, 2081, 2084, 2085, 2087, 2090, 2091, 2098, 2099, 2100, 2102, 2104], "static": [1, 3, 8, 14, 29, 33, 35, 37, 47, 52, 53, 55, 66, 71, 72, 75, 796, 801, 817, 819, 826, 828, 834, 860, 862, 892, 893, 894, 895, 908, 909, 1180, 1181, 1187, 1193, 1211, 1286, 1570, 1716, 1902, 2013, 2014, 2015, 2016, 2045, 2051, 2054, 2062, 2063, 2071, 2075, 2082, 2096, 2098, 2100], "Then": [1, 29, 33, 48, 64, 931, 1472, 1578, 1733, 1778, 1965, 2042, 2043, 2045, 2047, 2048, 2049, 2058, 2059, 2065, 2067, 2068, 2075, 2076, 2090, 2096, 2099, 2100, 2103], "op": [1, 4, 24, 28, 30, 33, 52, 53, 55, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 80, 84, 88, 89, 152, 505, 526, 606, 759, 794, 795, 796, 825, 828, 857, 864, 865, 896, 903, 906, 908, 909, 917, 965, 975, 977, 1014, 1029, 1031, 1035, 1036, 1050, 1051, 1077, 1081, 1083, 1166, 1223, 1273, 1286, 1288, 1289, 1344, 1350, 1399, 1403, 1408, 1409, 1543, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1814, 1872, 1951, 1983, 1986, 1988, 1990, 1991, 2003, 2006, 2007, 2012, 2016, 2022, 2023, 2026, 2029, 2033, 2034, 2044, 2045, 2047, 2048, 2057, 2063, 2068, 2069, 2070, 2071, 2072, 2073, 2080, 2082, 2084, 2086, 2091, 2096, 2097, 2101, 2102, 2103, 2104, 2106, 2109, 2110, 2111], "directli": [1, 3, 7, 9, 14, 15, 23, 24, 28, 30, 33, 34, 35, 36, 37, 46, 52, 53, 55, 59, 64, 66, 79, 559, 734, 749, 893, 903, 906, 908, 977, 1091, 1159, 1166, 1173, 1175, 1183, 1216, 1303, 1532, 1560, 1579, 1643, 1690, 1758, 1949, 2013, 2015, 2016, 2020, 2024, 2029, 2035, 2042, 2045, 2047, 2048, 2049, 2051, 2052, 2054, 2055, 2056, 2057, 2060, 2068, 2069, 2070, 2075, 2080, 2085, 2087, 2093, 2098, 2100, 2101, 2102, 2104, 2105, 2110, 2111], "ctx": [1, 45, 66, 70, 892, 893, 894, 903, 904, 905, 906, 907, 908, 909, 2020, 2041, 2048, 2049, 2065], "gradcheck": [1, 1901, 2012, 2020, 2048, 2080], "extend": [1, 23, 28, 30, 33, 35, 39, 50, 64, 864, 893, 895, 903, 906, 908, 909, 1528, 1537, 1901, 1965, 2012, 2035, 2042, 2054, 2056, 2057, 2065, 2066, 2068, 2073, 2080, 2098, 2099, 2112], "staticmethod": [1, 66, 893, 895, 903, 904, 905, 906, 907, 908, 909, 2016, 2041, 2048, 2065, 2075], "result": [1, 3, 4, 5, 7, 8, 9, 12, 14, 18, 19, 23, 24, 28, 30, 33, 35, 37, 40, 45, 48, 52, 53, 55, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 86, 87, 89, 155, 223, 224, 256, 257, 317, 337, 400, 498, 500, 501, 558, 585, 586, 587, 589, 590, 620, 688, 689, 690, 691, 692, 695, 697, 698, 699, 701, 762, 795, 880, 882, 901, 902, 911, 912, 913, 914, 915, 916, 922, 923, 931, 943, 946, 957, 961, 966, 972, 974, 975, 977, 978, 989, 993, 995, 1020, 1022, 1024, 1050, 1051, 1064, 1087, 1088, 1089, 1090, 1091, 1103, 1108, 1126, 1129, 1135, 1144, 1151, 1153, 1156, 1161, 1162, 1165, 1166, 1167, 1170, 1171, 1176, 1177, 1181, 1185, 1196, 1201, 1209, 1212, 1216, 1234, 1235, 1237, 1249, 1263, 1272, 1275, 1276, 1284, 1288, 1289, 1291, 1293, 1294, 1298, 1312, 1314, 1316, 1317, 1325, 1329, 1331, 1333, 1334, 1335, 1342, 1352, 1353, 1360, 1370, 1372, 1373, 1374, 1375, 1377, 1378, 1417, 1420, 1435, 1436, 1437, 1462, 1464, 1465, 1466, 1470, 1472, 1477, 1491, 1496, 1526, 1532, 1542, 1570, 1571, 1572, 1573, 1574, 1578, 1614, 1632, 1643, 1699, 1702, 1703, 1716, 1730, 1733, 1766, 1769, 1770, 1771, 1772, 1778, 1781, 1808, 1814, 1816, 1823, 1824, 1827, 1833, 1846, 1852, 1867, 1882, 1894, 1905, 1907, 1908, 1912, 1919, 1921, 1922, 1923, 1926, 1927, 1928, 1938, 1942, 1943, 1948, 1949, 1951, 1952, 1954, 1964, 1967, 1971, 1972, 1976, 2013, 2014, 2015, 2016, 2020, 2023, 2024, 2026, 2033, 2034, 2035, 2040, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2050, 2052, 2053, 2055, 2057, 2058, 2059, 2060, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2075, 2076, 2079, 2080, 2083, 2085, 2086, 2087, 2091, 2092, 2093, 2097, 2098, 2099, 2101, 2105, 2108, 2109, 2110, 2111, 2112], "save_for_backward": [1, 893, 903, 905, 907, 908, 909, 2020, 2041, 2042, 2048, 2049, 2065], "grad_output": [1, 66, 892, 903, 904, 908, 909, 917, 923, 927, 928, 1272, 1526, 2014, 2042, 2045, 2048, 2049, 2055, 2106], "saved_tensor": [1, 903, 905, 906, 907, 908, 909, 2020, 2041, 2042, 2048, 2049], "inspect": [1, 28, 52, 55, 64, 934, 1272, 1284, 2041, 2048, 2054, 2068, 2071, 2083, 2102, 2104, 2105, 2112], "cost": [1, 3, 4, 8, 9, 24, 30, 33, 55, 488, 965, 975, 1328, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 1866, 2045, 2051, 2059, 2071, 2076, 2083, 2107, 2111], "both": [1, 2, 3, 14, 19, 23, 24, 25, 28, 29, 30, 34, 35, 40, 45, 46, 47, 48, 52, 53, 55, 63, 64, 66, 74, 75, 99, 341, 501, 731, 770, 771, 774, 775, 776, 795, 854, 895, 902, 908, 909, 912, 913, 914, 915, 916, 943, 981, 997, 1031, 1064, 1103, 1109, 1110, 1111, 1124, 1154, 1155, 1156, 1165, 1214, 1216, 1226, 1233, 1249, 1262, 1263, 1269, 1272, 1286, 1294, 1295, 1343, 1359, 1367, 1373, 1388, 1389, 1435, 1436, 1440, 1441, 1442, 1450, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1480, 1486, 1488, 1489, 1490, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1519, 1520, 1521, 1526, 1532, 1566, 1574, 1578, 1579, 1582, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1657, 1658, 1659, 1684, 1716, 1725, 1727, 1736, 1766, 1783, 1784, 1796, 1808, 1831, 1832, 1870, 1875, 1900, 1902, 1904, 1923, 1927, 1948, 1949, 1988, 2013, 2015, 2016, 2020, 2023, 2033, 2034, 2036, 2041, 2042, 2043, 2048, 2049, 2051, 2052, 2055, 2058, 2059, 2060, 2065, 2068, 2070, 2073, 2074, 2075, 2077, 2080, 2081, 2082, 2087, 2090, 2091, 2092, 2096, 2098, 2100, 2101, 2102, 2103, 2104, 2108, 2111], "cpu": [1, 4, 5, 14, 17, 23, 28, 30, 53, 55, 66, 71, 75, 76, 77, 90, 121, 198, 291, 328, 337, 447, 448, 449, 450, 451, 460, 488, 582, 591, 868, 883, 936, 938, 944, 953, 995, 998, 999, 1010, 1020, 1021, 1022, 1024, 1109, 1111, 1121, 1126, 1144, 1156, 1160, 1162, 1163, 1221, 1223, 1224, 1225, 1230, 1231, 1248, 1272, 1280, 1282, 1283, 1292, 1302, 1304, 1308, 1309, 1310, 1311, 1313, 1315, 1318, 1320, 1321, 1327, 1330, 1333, 1336, 1337, 1343, 1344, 1359, 1362, 1373, 1385, 1462, 1468, 1526, 1699, 1716, 1721, 1722, 1757, 1758, 1772, 1775, 1782, 1826, 1835, 1837, 1839, 1841, 1842, 1866, 1871, 1872, 1873, 1875, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1894, 1904, 1909, 1910, 1911, 1912, 1913, 1914, 1927, 1942, 1943, 1944, 1953, 1955, 1960, 1964, 1981, 2009, 2012, 2013, 2014, 2016, 2020, 2024, 2026, 2032, 2033, 2035, 2045, 2048, 2049, 2051, 2053, 2055, 2058, 2059, 2061, 2065, 2066, 2069, 2071, 2074, 2075, 2080, 2082, 2083, 2086, 2087, 2089, 2092, 2093, 2096, 2100, 2101, 2102, 2104, 2105, 2108, 2109, 2111], "There": [1, 5, 6, 7, 9, 12, 14, 19, 22, 28, 30, 47, 52, 53, 55, 56, 60, 61, 64, 86, 87, 89, 682, 736, 795, 893, 908, 909, 975, 1166, 1462, 1496, 1542, 1684, 1716, 1723, 1724, 1764, 1769, 2011, 2013, 2015, 2016, 2022, 2023, 2034, 2042, 2045, 2048, 2049, 2050, 2054, 2057, 2061, 2062, 2065, 2068, 2069, 2070, 2071, 2075, 2077, 2086, 2089, 2096, 2098, 2099, 2101, 2102, 2109, 2110, 2111], "moment": [1, 66, 72, 774, 775, 776, 993, 1783, 1784, 1785, 1787, 1793, 1797, 2026, 2032, 2035, 2069, 2075, 2108], "nvprof": [1, 4, 932, 2045], "regist": [1, 15, 19, 24, 28, 32, 35, 47, 50, 52, 53, 55, 64, 82, 84, 85, 86, 87, 89, 489, 490, 682, 794, 795, 857, 927, 928, 975, 1053, 1187, 1272, 1344, 1526, 1527, 1528, 1536, 1537, 1555, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1717, 1730, 1731, 1733, 1734, 1736, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1965, 1967, 2020, 2029, 2032, 2045, 2046, 2047, 2048, 2054, 2055, 2060, 2063, 2065, 2068, 2075, 2092, 2099, 2101, 2104, 2107], "activ": [1, 5, 7, 9, 33, 34, 50, 52, 55, 61, 81, 83, 794, 796, 798, 805, 809, 844, 845, 854, 864, 865, 1018, 1033, 1046, 1064, 1165, 1167, 1177, 1345, 1434, 1464, 1465, 1466, 1470, 1480, 1498, 1525, 1527, 1545, 1556, 1570, 1572, 1574, 1625, 1663, 1687, 1716, 1734, 1736, 1766, 1771, 1976, 2012, 2026, 2032, 2045, 2048, 2049, 2051, 2055, 2065, 2067, 2069, 2070, 2071, 2072, 2075, 2080, 2090, 2091, 2096, 2101, 2106, 2111], "emit_nvtx": [1, 4], "vtune": [1, 4], "emit_itt": [1, 4], "use_cuda": [1, 2069], "use_devic": 1, "record_shap": [1, 2069], "with_flop": [1, 2069], "profile_memori": [1, 2069], "with_stack": [1, 2069], "with_modul": [1, 2069], "use_kineto": 1, "use_cpu": 1, "use_mtia": 1, "experimental_config": [1, 2069], "hold": [1, 28, 30, 45, 47, 50, 51, 52, 55, 59, 63, 64, 539, 1345, 1472, 1527, 1528, 1536, 1537, 1578, 1706, 1716, 1718, 1719, 1733, 1743, 1757, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1909, 1910, 1911, 1912, 1913, 1914, 2043, 2045, 2048, 2050, 2054, 2057, 2067, 2069, 2075, 2076, 2077, 2079, 2080, 2082, 2083, 2085, 2086, 2098, 2099, 2102], "summari": [1, 3, 48, 1065, 1874, 2012, 2029, 2051, 2085, 2091, 2099, 2105, 2111, 2113], "hood": [1, 52, 63, 2032, 2042, 2045, 2047, 2057, 2068, 2077, 2100, 2101], "just": [1, 2, 7, 14, 24, 28, 35, 37, 45, 52, 64, 591, 689, 699, 792, 825, 828, 892, 894, 908, 909, 931, 935, 990, 991, 1185, 1187, 1195, 1288, 1289, 1327, 1330, 1335, 1336, 1362, 1464, 1465, 1466, 1470, 1716, 1736, 1737, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1866, 1952, 1953, 1954, 1955, 1965, 2011, 2013, 2032, 2041, 2042, 2048, 2049, 2052, 2054, 2055, 2056, 2060, 2068, 2075, 2076, 2080, 2083, 2084, 2086, 2091, 2096, 2097, 2098, 2099, 2100, 2101, 2102, 2110], "record": [1, 5, 19, 20, 32, 38, 39, 40, 41, 45, 48, 52, 63, 64, 447, 448, 449, 450, 451, 488, 498, 821, 822, 823, 824, 827, 829, 868, 941, 944, 953, 1010, 1011, 1013, 1109, 1110, 1111, 1121, 1126, 1144, 1162, 1163, 1164, 1196, 1230, 1231, 1272, 1288, 1292, 1343, 1359, 1385, 1388, 1389, 1462, 1526, 1775, 1776, 1778, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 1981, 1982, 2009, 2010, 2013, 2015, 2024, 2035, 2041, 2042, 2045, 2048, 2051, 2062, 2063, 2065, 2069, 2075, 2085, 2086, 2087, 2089, 2090, 2091, 2096, 2098, 2099, 2102, 2111, 2113, 2115], "event": [1, 31, 35, 37, 46, 50, 63, 488, 935, 936, 937, 1011, 1013, 1064, 1067, 1288, 1289, 1350, 1388, 1389, 1684, 1982, 2012, 2029, 2032, 2045, 2069, 2085, 2109, 2113, 2115], "being": [1, 3, 5, 9, 11, 12, 19, 20, 23, 28, 30, 32, 33, 35, 37, 40, 47, 48, 50, 52, 53, 55, 59, 60, 63, 64, 66, 75, 83, 99, 152, 404, 417, 762, 795, 801, 895, 908, 909, 911, 917, 973, 977, 989, 1011, 1019, 1066, 1071, 1085, 1086, 1121, 1154, 1155, 1166, 1175, 1176, 1186, 1187, 1261, 1272, 1276, 1362, 1371, 1376, 1435, 1436, 1437, 1438, 1439, 1459, 1461, 1468, 1469, 1473, 1474, 1479, 1485, 1492, 1517, 1518, 1526, 1529, 1530, 1531, 1532, 1533, 1540, 1558, 1559, 1575, 1597, 1604, 1605, 1615, 1627, 1628, 1632, 1644, 1668, 1676, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1750, 1751, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1826, 1827, 1921, 1922, 1923, 1971, 1972, 2013, 2016, 2023, 2028, 2029, 2033, 2034, 2035, 2041, 2042, 2045, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2058, 2059, 2063, 2065, 2068, 2070, 2073, 2075, 2076, 2080, 2087, 2090, 2093, 2096, 2098, 2099, 2101, 2102, 2109, 2111, 2112, 2115], "those": [1, 2, 4, 5, 14, 23, 24, 28, 30, 32, 33, 34, 35, 52, 55, 60, 63, 64, 83, 682, 826, 922, 1022, 1032, 1096, 1098, 1177, 1187, 1200, 1288, 1330, 1336, 1344, 1384, 1430, 1438, 1439, 1459, 1461, 1469, 1485, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1579, 1604, 1605, 1615, 1624, 1644, 1668, 1676, 1711, 1712, 1778, 1797, 1927, 1949, 1976, 1989, 2015, 2033, 2034, 2035, 2041, 2042, 2045, 2048, 2049, 2051, 2053, 2055, 2059, 2060, 2063, 2065, 2067, 2073, 2075, 2077, 2096, 2099, 2100, 2101, 2102, 2104, 2105, 2108, 2111], "python": [1, 3, 4, 9, 12, 14, 15, 18, 19, 23, 32, 34, 35, 36, 37, 40, 41, 44, 45, 47, 48, 49, 53, 55, 64, 66, 74, 75, 354, 591, 626, 682, 783, 883, 911, 912, 913, 914, 915, 916, 918, 922, 923, 961, 962, 974, 975, 977, 989, 1044, 1050, 1051, 1053, 1103, 1108, 1126, 1128, 1156, 1162, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1248, 1270, 1272, 1273, 1275, 1276, 1277, 1283, 1284, 1288, 1344, 1527, 1528, 1536, 1537, 1684, 1716, 1820, 1842, 1846, 1866, 1867, 1902, 1967, 1968, 1976, 1995, 2011, 2022, 2024, 2030, 2031, 2032, 2034, 2042, 2043, 2044, 2045, 2047, 2049, 2050, 2054, 2057, 2060, 2061, 2062, 2063, 2068, 2070, 2074, 2075, 2076, 2077, 2083, 2086, 2087, 2092, 2093, 2095, 2097, 2098, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2108, 2111, 2112, 2113], "report": [1, 3, 4, 18, 28, 40, 48, 63, 64, 86, 87, 89, 1010, 1064, 1169, 1170, 1172, 1185, 1981, 2041, 2045, 2080, 2102, 2105, 2108, 2111, 2113], "runtim": [1, 3, 5, 14, 17, 19, 20, 28, 30, 33, 34, 37, 52, 53, 64, 66, 76, 77, 829, 881, 935, 1106, 1108, 1187, 1197, 1203, 1211, 1236, 1272, 1278, 1288, 1526, 1718, 1719, 1779, 1822, 1964, 1977, 2016, 2033, 2034, 2042, 2045, 2048, 2057, 2062, 2063, 2064, 2065, 2070, 2092, 2098, 2099, 2102, 2107, 2114], "note": [1, 2, 3, 5, 8, 11, 12, 14, 15, 18, 22, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 45, 47, 50, 52, 53, 55, 59, 61, 63, 64, 66, 74, 75, 86, 87, 88, 89, 152, 260, 379, 488, 490, 501, 502, 515, 517, 519, 737, 743, 744, 745, 793, 795, 796, 797, 801, 844, 864, 868, 887, 896, 903, 906, 908, 911, 912, 913, 914, 915, 916, 917, 918, 923, 929, 931, 938, 959, 965, 975, 982, 989, 1007, 1083, 1091, 1096, 1108, 1129, 1159, 1162, 1177, 1183, 1185, 1213, 1226, 1259, 1269, 1270, 1272, 1273, 1279, 1345, 1367, 1388, 1389, 1409, 1435, 1436, 1437, 1438, 1439, 1445, 1456, 1457, 1458, 1459, 1461, 1468, 1469, 1473, 1477, 1479, 1485, 1491, 1492, 1496, 1517, 1518, 1526, 1527, 1529, 1530, 1531, 1532, 1533, 1536, 1537, 1540, 1542, 1549, 1558, 1559, 1566, 1570, 1575, 1579, 1587, 1604, 1605, 1615, 1623, 1624, 1632, 1643, 1644, 1668, 1671, 1676, 1677, 1691, 1706, 1716, 1717, 1723, 1724, 1771, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1800, 1802, 1808, 1810, 1826, 1833, 1908, 1909, 1910, 1911, 1913, 1914, 1919, 1923, 1927, 1949, 1964, 1965, 1967, 1976, 2011, 2013, 2014, 2017, 2020, 2023, 2028, 2032, 2035, 2036, 2042, 2043, 2044, 2047, 2048, 2049, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2060, 2063, 2065, 2067, 2068, 2069, 2071, 2073, 2074, 2076, 2077, 2080, 2082, 2084, 2085, 2088, 2093, 2098, 2099, 2100, 2103, 2109, 2110, 2111, 2114, 2116], "propag": [1, 5, 30, 31, 33, 35, 37, 47, 64, 66, 75, 515, 688, 691, 692, 693, 697, 698, 699, 700, 768, 769, 770, 771, 781, 784, 785, 787, 788, 789, 840, 842, 943, 1154, 1155, 1185, 1196, 1209, 1417, 1723, 1724, 1908, 2012, 2032, 2033, 2041, 2045, 2048, 2052, 2054, 2075, 2086, 2090, 2091, 2098, 2101, 2102, 2110], "async": [1, 28, 29, 30, 63, 605, 1716, 2017, 2045, 2054, 2082, 2099], "task": [1, 3, 7, 24, 56, 61, 1275, 1291, 1488, 1489, 1490, 2016, 2044, 2054, 2055, 2057, 2061, 2099, 2102, 2114], "cuda": [1, 3, 4, 5, 14, 16, 19, 22, 23, 24, 28, 30, 32, 34, 55, 62, 90, 152, 291, 315, 323, 337, 342, 488, 517, 519, 526, 582, 868, 880, 882, 896, 917, 938, 944, 946, 953, 975, 980, 1008, 1009, 1010, 1011, 1013, 1014, 1029, 1031, 1042, 1109, 1110, 1111, 1121, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1160, 1163, 1221, 1230, 1231, 1272, 1292, 1294, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1327, 1330, 1333, 1334, 1336, 1337, 1343, 1344, 1359, 1362, 1378, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1468, 1496, 1526, 1542, 1566, 1586, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1624, 1632, 1643, 1671, 1684, 1703, 1704, 1705, 1706, 1716, 1718, 1719, 1721, 1722, 1723, 1724, 1770, 1772, 1775, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1826, 1835, 1837, 1839, 1841, 1842, 1866, 1870, 1875, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1905, 1909, 1910, 1911, 1912, 1913, 1914, 1927, 1942, 1944, 1953, 1955, 1960, 1964, 2009, 2012, 2014, 2020, 2024, 2033, 2035, 2048, 2049, 2051, 2055, 2058, 2065, 2066, 2067, 2069, 2074, 2075, 2080, 2082, 2083, 2086, 2087, 2089, 2092, 2093, 2100, 2101, 2104, 2108, 2111, 2114, 2115], "cudaev": 1, "approxim": [1, 3, 24, 32, 47, 64, 1173, 1226, 1336, 1345, 1430, 1475, 1540, 1556, 1563, 1630, 1676, 1687, 1731, 1780, 1797, 1816, 1870, 1928, 1949, 2014, 2016, 2045, 2047, 2048, 2052, 2065, 2106, 2116], "4u": 1, "privateuseon": 1, "shape": [1, 5, 8, 11, 12, 19, 24, 28, 33, 34, 35, 53, 55, 57, 59, 61, 64, 65, 66, 67, 68, 71, 72, 74, 78, 99, 152, 173, 193, 210, 220, 402, 404, 447, 449, 451, 473, 499, 500, 515, 517, 519, 546, 568, 619, 682, 687, 689, 690, 699, 737, 747, 748, 758, 760, 762, 766, 768, 769, 770, 771, 774, 775, 776, 783, 798, 881, 887, 903, 905, 908, 912, 914, 915, 916, 935, 945, 946, 948, 951, 956, 958, 962, 963, 967, 968, 975, 989, 992, 1023, 1098, 1099, 1100, 1102, 1103, 1108, 1109, 1111, 1113, 1128, 1129, 1130, 1131, 1139, 1140, 1141, 1147, 1153, 1154, 1155, 1156, 1163, 1170, 1171, 1175, 1176, 1177, 1187, 1188, 1203, 1209, 1213, 1215, 1229, 1234, 1235, 1239, 1247, 1249, 1263, 1269, 1286, 1288, 1289, 1292, 1297, 1298, 1302, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1361, 1362, 1366, 1374, 1379, 1380, 1411, 1412, 1423, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1587, 1588, 1589, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1603, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1623, 1624, 1628, 1632, 1634, 1643, 1644, 1649, 1657, 1658, 1659, 1669, 1670, 1673, 1674, 1675, 1677, 1684, 1703, 1706, 1715, 1716, 1718, 1719, 1730, 1733, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1772, 1775, 1777, 1779, 1814, 1823, 1827, 1831, 1832, 1835, 1837, 1838, 1839, 1846, 1848, 1849, 1853, 1862, 1865, 1889, 1904, 1905, 1907, 1910, 1912, 1918, 1923, 1924, 1927, 1938, 1945, 1949, 1959, 1960, 1961, 1962, 1976, 1979, 2009, 2013, 2014, 2016, 2020, 2033, 2034, 2035, 2042, 2043, 2045, 2048, 2049, 2050, 2063, 2066, 2069, 2072, 2075, 2080, 2081, 2085, 2086, 2087, 2089, 2092, 2100, 2102, 2106, 2109, 2110, 2111], "about": [1, 8, 9, 12, 17, 23, 28, 29, 30, 32, 33, 37, 40, 44, 46, 47, 48, 52, 56, 60, 64, 88, 257, 488, 500, 620, 922, 923, 929, 980, 989, 1015, 1016, 1032, 1056, 1058, 1059, 1060, 1062, 1063, 1064, 1065, 1072, 1073, 1074, 1166, 1186, 1187, 1197, 1209, 1302, 1309, 1469, 1541, 1715, 1757, 1793, 1797, 1808, 1869, 1995, 2011, 2013, 2015, 2016, 2020, 2023, 2044, 2045, 2047, 2048, 2050, 2051, 2054, 2057, 2058, 2060, 2062, 2068, 2069, 2070, 2073, 2076, 2077, 2080, 2084, 2086, 2097, 2098, 2099, 2100, 2102, 2107, 2111], "dimens": [1, 11, 23, 28, 34, 35, 52, 55, 60, 61, 64, 66, 71, 75, 78, 86, 220, 234, 235, 256, 262, 315, 317, 319, 323, 435, 436, 474, 475, 476, 495, 515, 517, 519, 539, 545, 547, 548, 562, 585, 586, 587, 589, 590, 609, 619, 688, 694, 695, 697, 698, 699, 701, 737, 760, 781, 783, 787, 877, 878, 879, 880, 889, 890, 891, 895, 908, 909, 917, 954, 962, 965, 966, 967, 968, 969, 1007, 1022, 1024, 1087, 1088, 1089, 1090, 1091, 1096, 1097, 1098, 1099, 1100, 1106, 1108, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1147, 1177, 1180, 1181, 1183, 1186, 1191, 1197, 1213, 1226, 1235, 1236, 1247, 1249, 1269, 1293, 1294, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1352, 1353, 1360, 1363, 1367, 1370, 1372, 1373, 1374, 1375, 1378, 1380, 1395, 1417, 1418, 1419, 1420, 1421, 1422, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1447, 1448, 1449, 1454, 1455, 1457, 1458, 1459, 1460, 1461, 1462, 1465, 1467, 1468, 1469, 1471, 1472, 1475, 1476, 1479, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1498, 1506, 1507, 1512, 1513, 1514, 1515, 1516, 1517, 1520, 1521, 1525, 1532, 1533, 1534, 1535, 1538, 1539, 1540, 1541, 1545, 1546, 1547, 1549, 1554, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1572, 1574, 1575, 1576, 1577, 1578, 1597, 1603, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1631, 1632, 1633, 1634, 1643, 1646, 1649, 1650, 1651, 1669, 1670, 1671, 1684, 1690, 1691, 1703, 1715, 1723, 1724, 1730, 1731, 1732, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1751, 1752, 1758, 1759, 1761, 1763, 1765, 1768, 1770, 1771, 1814, 1817, 1824, 1826, 1827, 1828, 1833, 1847, 1848, 1849, 1853, 1862, 1864, 1865, 1874, 1895, 1899, 1903, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1918, 1920, 1921, 1922, 1923, 1926, 1927, 1937, 1939, 1943, 1944, 1945, 1946, 1948, 1949, 1951, 1952, 1953, 1954, 1955, 1958, 1959, 1960, 1961, 1962, 1963, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 2012, 2014, 2015, 2016, 2035, 2040, 2043, 2045, 2049, 2050, 2052, 2058, 2063, 2070, 2079, 2080, 2081, 2083, 2085, 2086, 2093, 2098, 2099, 2106], "collect": [1, 3, 7, 23, 29, 30, 32, 37, 47, 55, 63, 64, 801, 802, 826, 864, 865, 963, 1046, 1109, 1235, 1374, 1716, 1747, 1775, 1835, 1839, 2009, 2012, 2014, 2015, 2016, 2020, 2029, 2045, 2047, 2049, 2067, 2069, 2070, 2073, 2077, 2080, 2085, 2087, 2102, 2108, 2110, 2111, 2113, 2115], "further": [1, 4, 9, 12, 14, 19, 24, 28, 30, 55, 63, 66, 76, 77, 488, 1187, 1216, 1276, 1312, 1430, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1716, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1814, 2016, 2023, 2051, 2055, 2057, 2068, 2069, 2077, 2085, 2091, 2095, 2098, 2106, 2109, 2111], "group": [1, 3, 9, 23, 24, 29, 30, 32, 33, 37, 44, 47, 48, 50, 51, 55, 58, 64, 612, 682, 714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 774, 775, 776, 883, 935, 1446, 1453, 1454, 1455, 1456, 1457, 1458, 1480, 1502, 1503, 1504, 1505, 1506, 1507, 1566, 1607, 1608, 1609, 1610, 1611, 1612, 1633, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1961, 2011, 2012, 2014, 2016, 2032, 2040, 2045, 2047, 2051, 2065, 2067, 2068, 2069, 2075, 2085, 2097, 2099, 2102, 2106, 2111], "prof": [1, 44, 938, 2069, 2102, 2109, 2111], "key_averag": [1, 938, 2069], "group_by_input_shap": [1, 935, 2069], "skew": [1, 3, 4, 1324, 1730], "neglig": [1, 1285, 1870], "bottom": [1, 55, 1436, 1632, 2023, 2109], "But": [1, 7, 55, 63, 488, 1288, 1318, 1716, 2042, 2048, 2050, 2052, 2059, 2068, 2080, 2084, 2101, 2102, 2105, 2112], "total": [1, 3, 4, 7, 20, 23, 24, 28, 33, 44, 46, 47, 48, 55, 880, 936, 938, 1024, 1059, 1064, 1078, 1162, 1234, 1235, 1383, 1445, 1472, 1485, 1492, 1517, 1532, 1578, 1670, 1716, 1721, 1770, 1772, 1774, 1802, 1808, 1848, 1874, 1923, 2011, 2015, 2029, 2045, 2051, 2053, 2057, 2067, 2080], "artifici": [1, 2080], "increas": [1, 3, 7, 24, 28, 35, 44, 55, 682, 857, 903, 905, 908, 959, 1032, 1064, 1108, 1234, 1235, 1430, 1439, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1538, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1674, 1675, 1684, 1795, 1801, 1802, 1806, 1808, 1810, 1862, 1870, 1949, 1970, 1989, 2014, 2022, 2030, 2044, 2045, 2053, 2057, 2071, 2080, 2096, 2102, 2105], "estim": [1, 3, 23, 35, 993, 997, 1226, 1269, 1440, 1441, 1442, 1475, 1479, 1480, 1488, 1489, 1490, 1498, 1566, 1630, 1731, 1794, 1890, 1928, 2069], "flop": [1, 2069], "hardwar": [1, 8, 862, 864, 865, 1308, 1309, 1336, 1964, 2045, 2057, 2059, 2071, 2080, 2102, 2104, 2111], "matrix": [1, 2, 24, 28, 35, 193, 210, 688, 691, 692, 693, 762, 943, 945, 954, 955, 963, 965, 966, 967, 968, 975, 993, 997, 1091, 1095, 1096, 1098, 1108, 1177, 1216, 1222, 1237, 1293, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1338, 1340, 1342, 1345, 1353, 1362, 1364, 1367, 1377, 1412, 1414, 1468, 1469, 1477, 1496, 1578, 1587, 1588, 1589, 1623, 1624, 1684, 1730, 1736, 1765, 1771, 1814, 1815, 1816, 1826, 1870, 1897, 1900, 1904, 1905, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1919, 1927, 1928, 1944, 1947, 1949, 1951, 1952, 1953, 1954, 1955, 1970, 1976, 2012, 2016, 2033, 2034, 2035, 2040, 2044, 2045, 2048, 2052, 2055, 2058, 2069, 2080, 2085, 2086, 2102], "2d": [1, 30, 35, 55, 589, 590, 702, 705, 708, 737, 741, 744, 768, 770, 775, 781, 785, 787, 788, 789, 993, 997, 1096, 1108, 1328, 1329, 1428, 1432, 1436, 1439, 1440, 1441, 1454, 1457, 1461, 1465, 1469, 1473, 1488, 1489, 1494, 1520, 1529, 1531, 1532, 1533, 1579, 1580, 1581, 1592, 1595, 1597, 1600, 1605, 1608, 1611, 1619, 1624, 1627, 1654, 1658, 1668, 1671, 1731, 1765, 1832, 1907, 1912, 1951, 2026, 2040, 2048, 2069, 2070, 2080], "alloc": [1, 4, 11, 18, 20, 24, 30, 33, 35, 37, 47, 55, 256, 333, 447, 448, 449, 450, 451, 488, 1009, 1011, 1013, 1015, 1016, 1018, 1031, 1032, 1033, 1056, 1058, 1060, 1062, 1063, 1064, 1065, 1073, 1074, 1078, 1109, 1111, 1160, 1181, 1187, 1190, 1193, 1196, 1197, 1218, 1381, 1383, 1384, 1392, 1835, 1839, 1841, 1866, 1942, 1982, 1988, 1989, 2012, 2032, 2035, 2042, 2047, 2051, 2053, 2057, 2063, 2069, 2083, 2095, 2096, 2098, 2101, 2109, 2111], "dealloc": [1, 64, 488, 1162, 1185, 2032, 2045, 2050, 2053, 2069], "line": [1, 4, 18, 19, 28, 37, 52, 53, 64, 682, 918, 935, 1108, 1166, 1272, 1283, 1526, 1597, 1632, 1874, 1902, 2013, 2016, 2017, 2024, 2035, 2043, 2048, 2052, 2053, 2059, 2061, 2065, 2069, 2080, 2095, 2099, 2100, 2102, 2104, 2105, 2108, 2111, 2113], "hierarchi": [1, 30, 33, 52, 64, 842, 1185, 1276, 1783, 1784, 1796, 2017, 2028, 2048, 2069, 2070, 2098], "callstack": [1, 28, 2069], "A": [1, 2, 3, 5, 7, 8, 9, 12, 14, 18, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 37, 44, 47, 48, 50, 52, 53, 55, 56, 61, 63, 66, 67, 68, 71, 87, 90, 562, 582, 585, 586, 587, 596, 626, 682, 694, 699, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 747, 748, 758, 763, 764, 765, 766, 767, 792, 794, 816, 861, 862, 864, 865, 937, 944, 953, 954, 956, 961, 966, 967, 968, 974, 975, 977, 980, 981, 984, 989, 993, 997, 1010, 1011, 1013, 1021, 1023, 1024, 1093, 1108, 1113, 1121, 1122, 1123, 1135, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1176, 1177, 1183, 1187, 1189, 1215, 1226, 1229, 1230, 1231, 1235, 1262, 1263, 1264, 1265, 1268, 1270, 1272, 1275, 1280, 1283, 1285, 1288, 1289, 1293, 1297, 1302, 1303, 1304, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1342, 1345, 1353, 1361, 1362, 1363, 1364, 1368, 1423, 1439, 1443, 1445, 1461, 1464, 1465, 1466, 1468, 1469, 1470, 1478, 1479, 1487, 1497, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1516, 1525, 1526, 1532, 1555, 1560, 1562, 1570, 1575, 1576, 1586, 1590, 1597, 1603, 1615, 1634, 1644, 1651, 1663, 1684, 1690, 1691, 1706, 1717, 1718, 1719, 1725, 1727, 1730, 1733, 1736, 1758, 1759, 1762, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1791, 1793, 1794, 1795, 1796, 1797, 1801, 1802, 1804, 1807, 1808, 1810, 1816, 1826, 1828, 1829, 1830, 1831, 1832, 1843, 1849, 1855, 1858, 1890, 1899, 1902, 1903, 1906, 1918, 1922, 1923, 1927, 1928, 1946, 1951, 1952, 1953, 1954, 1955, 1959, 1960, 1961, 1963, 1964, 1972, 1976, 1979, 1981, 1982, 2011, 2013, 2014, 2015, 2016, 2020, 2023, 2026, 2033, 2034, 2035, 2040, 2041, 2044, 2045, 2048, 2049, 2050, 2051, 2057, 2058, 2060, 2065, 2067, 2068, 2069, 2070, 2075, 2076, 2077, 2080, 2081, 2082, 2083, 2085, 2086, 2091, 2096, 2097, 2100, 2101, 2102, 2104, 2106, 2109, 2110, 2111, 2112, 2113, 2116], "aten": [1, 3, 12, 14, 18, 52, 53, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 82, 84, 87, 1050, 1166, 1777, 1778, 1902, 2013, 2014, 2020, 2044, 2048, 2061, 2063, 2066, 2069, 2070, 2080, 2092, 2097, 2103, 2105, 2109, 2111], "torchscript": [1, 3, 8, 32, 52, 53, 64, 619, 1270, 1273, 1275, 1276, 1277, 1279, 1284, 1288, 1290, 1778, 2012, 2017, 2060, 2069, 2075, 2113], "eager": [1, 8, 52, 975, 1083, 1181, 1186, 1270, 1273, 1284, 1409, 1873, 2016, 2020, 2045, 2051, 2069, 2071, 2073, 2086, 2096, 2102, 2103, 2104, 2108, 2111], "experiment": [1, 2, 24, 28, 30, 32, 33, 34, 52, 53, 55, 64, 66, 69, 74, 75, 78, 235, 911, 913, 917, 975, 984, 1082, 1166, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1303, 1314, 1316, 1317, 1321, 1334, 1716, 1797, 2012, 2013, 2015, 2020, 2024, 2033, 2034, 2045, 2047, 2059, 2061, 2063, 2065, 2068, 2069, 2098, 2101, 2111], "kineto": [1, 931, 2069], "_experimentalconfig": [1, 2069], "librari": [1, 2, 3, 4, 8, 9, 11, 14, 15, 17, 19, 20, 23, 28, 36, 45, 50, 52, 56, 59, 60, 61, 64, 682, 931, 995, 1011, 1034, 1038, 1050, 1159, 1285, 1316, 1362, 1894, 2029, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2054, 2055, 2057, 2058, 2061, 2065, 2068, 2069, 2070, 2075, 2088, 2093, 2099, 2102, 2112, 2114], "100": [1, 19, 23, 28, 32, 35, 64, 66, 74, 75, 80, 302, 740, 741, 744, 745, 931, 1233, 1272, 1284, 1290, 1328, 1343, 1350, 1359, 1374, 1411, 1430, 1438, 1439, 1440, 1441, 1442, 1454, 1455, 1457, 1458, 1460, 1461, 1488, 1489, 1490, 1526, 1533, 1535, 1566, 1575, 1614, 1615, 1668, 1786, 1798, 1799, 1804, 1805, 1806, 1807, 1809, 1811, 1812, 1828, 1883, 1962, 2014, 2016, 2032, 2044, 2045, 2065, 2067, 2071, 2077, 2079, 2080, 2085, 2097, 2100, 2105], "realli": [1, 7, 64, 1185, 1187, 2016, 2042, 2068, 2101], "y": [1, 11, 14, 23, 35, 44, 52, 53, 55, 60, 61, 64, 66, 69, 71, 72, 73, 74, 75, 76, 77, 78, 488, 619, 622, 783, 887, 889, 890, 891, 901, 902, 903, 906, 908, 909, 911, 912, 913, 914, 915, 916, 918, 919, 938, 957, 963, 990, 997, 1050, 1051, 1091, 1102, 1108, 1112, 1165, 1167, 1170, 1171, 1172, 1176, 1177, 1212, 1279, 1284, 1288, 1341, 1350, 1351, 1374, 1438, 1439, 1440, 1441, 1442, 1443, 1459, 1461, 1480, 1485, 1486, 1488, 1489, 1490, 1492, 1498, 1513, 1517, 1518, 1529, 1530, 1531, 1533, 1535, 1541, 1558, 1559, 1566, 1569, 1575, 1576, 1603, 1632, 1649, 1673, 1706, 1715, 1736, 1769, 1830, 1848, 1850, 1851, 1904, 1918, 1945, 1949, 1950, 1976, 1979, 2013, 2014, 2015, 2016, 2020, 2025, 2033, 2034, 2040, 2042, 2043, 2044, 2045, 2049, 2052, 2053, 2056, 2063, 2065, 2068, 2070, 2075, 2077, 2081, 2085, 2089, 2090, 2091, 2096, 2097, 2098, 2099, 2102, 2110, 2111], "column": [1, 3, 24, 28, 34, 193, 262, 586, 589, 938, 973, 993, 997, 1091, 1121, 1149, 1150, 1238, 1308, 1309, 1312, 1328, 1331, 1336, 1340, 1345, 1412, 1535, 1578, 1623, 1624, 1730, 1816, 1826, 1907, 1909, 1910, 1911, 1913, 1914, 1927, 1928, 1949, 1953, 1955, 1970, 2040, 2052, 2080, 2085], "were": [1, 2, 3, 18, 19, 28, 37, 47, 51, 55, 60, 63, 64, 152, 323, 337, 473, 488, 857, 892, 894, 896, 908, 909, 917, 938, 1053, 1166, 1177, 1280, 1344, 1597, 1716, 1721, 1743, 1760, 1862, 1866, 1938, 1945, 1976, 2015, 2023, 2041, 2045, 2048, 2060, 2065, 2068, 2096, 2099, 2101, 2102, 2107, 2110, 2111], "remov": [1, 3, 23, 24, 28, 30, 35, 48, 51, 55, 64, 489, 490, 515, 548, 559, 699, 799, 817, 818, 819, 862, 927, 928, 931, 938, 942, 965, 966, 1053, 1166, 1180, 1217, 1269, 1272, 1276, 1362, 1363, 1367, 1447, 1448, 1449, 1526, 1527, 1536, 1634, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1731, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1750, 1751, 1752, 1753, 1755, 1756, 1766, 1768, 1771, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1826, 1842, 1864, 1918, 1927, 1951, 1958, 1960, 2011, 2012, 2026, 2036, 2042, 2048, 2055, 2060, 2065, 2066, 2068, 2077, 2082, 2087, 2099, 2102, 2104, 2107, 2110], "breviti": [1, 64, 938, 997, 2065], "print": [1, 3, 5, 12, 18, 23, 25, 28, 33, 37, 39, 44, 45, 48, 52, 53, 55, 60, 63, 722, 730, 731, 736, 747, 748, 758, 766, 925, 927, 928, 938, 1165, 1166, 1175, 1187, 1272, 1276, 1279, 1280, 1284, 1362, 1443, 1487, 1513, 1526, 1538, 1539, 1614, 1671, 1674, 1675, 1736, 1746, 1747, 1748, 1749, 1752, 1762, 1766, 1778, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1874, 1968, 2011, 2013, 2014, 2024, 2028, 2042, 2044, 2045, 2048, 2055, 2056, 2063, 2064, 2065, 2066, 2068, 2069, 2075, 2085, 2086, 2090, 2091, 2096, 2097, 2099, 2100, 2102, 2104, 2111], "tabl": [1, 3, 28, 64, 938, 977, 1468, 1623, 2014, 2016, 2048, 2067, 2068, 2069, 2070, 2075, 2080, 2087, 2101, 2103, 2105, 2108, 2111], "sort_bi": [1, 938, 2069], "self_cpu_time_tot": [1, 938, 2069], "avg": [1, 28, 938, 1781], "mul": [1, 18, 52, 64, 66, 69, 72, 74, 76, 77, 79, 422, 749, 750, 759, 938, 1249, 1413, 2013, 2014, 2016, 2033, 2035, 2048, 2065, 2066, 2072, 2076, 2080, 2083, 2090, 2097, 2098, 2099, 2106, 2110], "32": [1, 2, 24, 28, 52, 90, 748, 1162, 1269, 1282, 1362, 1436, 1464, 1465, 1466, 1470, 1471, 1473, 1474, 1494, 1514, 1520, 1531, 1570, 1571, 1572, 1573, 1574, 1586, 1597, 1627, 1628, 1634, 1684, 2034, 2035, 2055, 2059, 2063, 2065, 2073, 2080, 2083, 2085, 2086, 2099, 2100, 2105], "048m": 1, "200": [1, 32, 1272, 1350, 1526, 1828, 2016, 2080, 2099, 2111], "27": [1, 619, 1340, 1801, 1823, 1970, 2065], "041m": 1, "powbackward0": [1, 938], "9": [1, 7, 23, 24, 28, 36, 64, 66, 71, 315, 317, 319, 323, 403, 404, 473, 515, 562, 583, 585, 699, 747, 748, 857, 959, 965, 969, 973, 997, 1090, 1091, 1106, 1109, 1130, 1131, 1140, 1141, 1151, 1154, 1155, 1158, 1159, 1226, 1236, 1302, 1304, 1325, 1327, 1329, 1340, 1342, 1421, 1422, 1428, 1429, 1432, 1433, 1446, 1448, 1468, 1469, 1522, 1523, 1538, 1539, 1549, 1552, 1623, 1624, 1671, 1674, 1675, 1771, 1772, 1781, 1783, 1784, 1785, 1786, 1787, 1793, 1796, 1797, 1798, 1802, 1808, 1810, 1811, 1815, 1819, 1855, 1862, 1871, 1874, 1884, 1889, 1899, 1907, 1915, 1942, 1943, 1947, 1949, 1958, 1970, 1977, 2013, 2017, 2023, 2050, 2055, 2060, 2061, 2065, 2066, 2067, 2068, 2075, 2080, 2081, 2083, 2087, 2099], "727m": 1, "55": [1, 1479, 2065], "483m": 1, "accumulategrad": [1, 938, 2042], "148m": 1, "graphroot": [1, 938], "691": 1, "816u": 1, "emit": [1, 14, 39, 44, 64, 682, 1187, 1288, 1876, 2016, 2022, 2063, 2074, 2080], "nvtx": [1, 4, 2012], "program": [1, 3, 4, 8, 12, 18, 23, 27, 28, 39, 48, 51, 52, 53, 55, 64, 619, 931, 990, 991, 1056, 1058, 1166, 1200, 1870, 2013, 2015, 2017, 2030, 2042, 2045, 2050, 2054, 2056, 2057, 2059, 2060, 2061, 2063, 2070, 2085, 2092, 2096, 2098, 2099, 2100, 2101, 2102, 2104, 2107, 2109, 2111, 2113], "off": [1, 5, 7, 8, 14, 19, 27, 28, 37, 55, 64, 83, 919, 920, 944, 953, 1069, 1166, 1230, 1231, 1269, 1288, 1336, 1435, 1436, 1437, 1439, 1519, 1520, 1521, 1632, 1671, 2044, 2045, 2047, 2050, 2054, 2058, 2059, 2069, 2070, 2071, 2075, 2076, 2098, 2101, 2108], "o": [1, 24, 28, 29, 30, 35, 37, 48, 50, 51, 52, 64, 1178, 1182, 1184, 1272, 1344, 1388, 1389, 1390, 1497, 1526, 1532, 1716, 1858, 2011, 2017, 2032, 2042, 2045, 2047, 2050, 2061, 2068, 2075, 2093], "trace_nam": 1, "regular": [1, 3, 4, 28, 37, 48, 55, 58, 64, 977, 1050, 1051, 1165, 1187, 1201, 1320, 1321, 1445, 1463, 1464, 1465, 1466, 1470, 1511, 1525, 1526, 1527, 1528, 1536, 1537, 1625, 1663, 1706, 1718, 1719, 1766, 1784, 1793, 2016, 2020, 2034, 2035, 2045, 2048, 2049, 2054, 2055, 2063, 2065, 2068, 2070, 2073, 2075, 2080, 2081, 2089, 2091], "command": [1, 4, 28, 37, 45, 48, 64, 2045, 2053, 2057, 2061, 2069, 2076, 2093, 2105, 2108], "unfortun": [1, 9, 23, 52, 1716, 2042, 2048, 2100, 2101], "wai": [1, 3, 5, 7, 8, 9, 14, 23, 24, 28, 30, 32, 35, 40, 44, 52, 55, 57, 58, 63, 64, 152, 582, 682, 798, 821, 824, 827, 864, 865, 893, 896, 908, 909, 912, 914, 915, 916, 917, 922, 929, 1129, 1131, 1166, 1187, 1200, 1201, 1226, 1272, 1302, 1303, 1318, 1333, 1438, 1469, 1488, 1489, 1490, 1496, 1508, 1509, 1510, 1526, 1555, 1572, 1574, 1624, 1677, 1711, 1712, 1716, 1734, 1764, 1797, 1801, 1808, 2013, 2015, 2016, 2022, 2023, 2024, 2029, 2032, 2033, 2034, 2042, 2044, 2045, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2058, 2061, 2063, 2065, 2067, 2068, 2070, 2075, 2076, 2080, 2084, 2086, 2091, 2096, 2097, 2098, 2099, 2101, 2102, 2109, 2110, 2111, 2113], "disk": [1, 19, 23, 30, 1344, 1858, 2042, 2055, 2068, 2085], "annot": [1, 33, 34, 40, 45, 64, 932, 1270, 1278, 1284, 2013, 2015, 2017, 2020, 2065, 2075, 2102, 2103, 2109], "wait": [1, 18, 28, 37, 45, 47, 63, 488, 826, 1006, 1010, 1011, 1013, 1084, 1196, 1275, 1385, 1388, 1389, 1394, 1410, 1810, 1981, 1982, 2008, 2014, 2016, 2025, 2032, 2044, 2045, 2047, 2066, 2069, 2075, 2102, 2109, 2113, 2115], "either": [1, 8, 9, 14, 17, 18, 19, 23, 24, 28, 30, 32, 34, 35, 37, 40, 45, 47, 48, 50, 52, 53, 55, 60, 63, 64, 156, 223, 315, 323, 515, 517, 619, 682, 734, 740, 741, 742, 747, 748, 758, 766, 781, 787, 793, 861, 893, 895, 903, 904, 905, 906, 907, 908, 909, 918, 922, 923, 942, 962, 975, 1021, 1108, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1162, 1166, 1195, 1226, 1249, 1265, 1272, 1288, 1328, 1343, 1344, 1359, 1372, 1374, 1428, 1429, 1432, 1433, 1436, 1437, 1438, 1439, 1453, 1454, 1455, 1457, 1458, 1459, 1461, 1469, 1479, 1485, 1492, 1494, 1495, 1517, 1518, 1520, 1521, 1526, 1529, 1530, 1531, 1532, 1533, 1540, 1542, 1544, 1558, 1559, 1574, 1575, 1577, 1579, 1580, 1581, 1604, 1605, 1615, 1643, 1644, 1668, 1676, 1703, 1710, 1716, 1749, 1771, 1783, 1784, 1786, 1793, 1796, 1808, 1823, 1825, 1867, 1870, 1923, 1927, 1979, 2011, 2013, 2015, 2016, 2018, 2020, 2022, 2034, 2040, 2042, 2043, 2045, 2048, 2049, 2050, 2052, 2054, 2055, 2057, 2059, 2060, 2063, 2065, 2067, 2068, 2070, 2076, 2077, 2080, 2081, 2084, 2087, 2098, 2103, 2116], "nvidia": [1, 14, 28, 1019, 1032, 1060, 1066, 1071, 1085, 1086, 1964, 2012, 2045, 2050, 2053, 2059, 2061, 2070, 2080, 2089, 2092, 2108, 2109, 2111], "visual": [1, 64, 1374, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1519, 1520, 1521, 1578, 2012, 2045, 2055, 2061, 2063, 2069, 2085, 2102, 2109, 2111], "nvvp": 1, "timelin": [1, 4, 1388, 1389, 2012, 2069], "load_nvprof": 1, "load": [1, 11, 14, 15, 24, 30, 32, 33, 52, 55, 62, 417, 839, 865, 934, 975, 1009, 1272, 1276, 1283, 1288, 1469, 1526, 1706, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1858, 2012, 2013, 2024, 2025, 2026, 2027, 2034, 2042, 2045, 2054, 2055, 2059, 2061, 2063, 2065, 2075, 2085, 2093, 2104, 2105, 2109, 2111], "repl": [1, 2104], "append": [1, 3, 30, 63, 64, 66, 71, 231, 256, 609, 763, 765, 767, 1053, 1098, 1100, 1367, 1478, 1497, 1528, 1537, 1542, 1544, 1555, 1716, 2014, 2015, 2016, 2042, 2044, 2045, 2057, 2061, 2065, 2066, 2080, 2085, 2110], "size": [1, 2, 3, 7, 11, 18, 20, 23, 24, 28, 30, 34, 35, 37, 47, 52, 53, 55, 58, 64, 66, 67, 72, 74, 75, 80, 140, 210, 244, 256, 257, 315, 317, 323, 341, 447, 448, 449, 451, 489, 495, 500, 501, 502, 515, 517, 519, 522, 525, 546, 547, 548, 568, 583, 585, 586, 587, 589, 590, 608, 609, 619, 620, 682, 692, 693, 695, 697, 698, 699, 701, 722, 730, 731, 737, 743, 744, 745, 747, 748, 758, 766, 768, 769, 770, 771, 781, 787, 788, 789, 798, 861, 868, 880, 881, 883, 895, 908, 909, 911, 912, 913, 914, 915, 916, 935, 943, 944, 946, 953, 955, 956, 957, 958, 959, 962, 966, 969, 975, 997, 1007, 1015, 1021, 1022, 1024, 1064, 1089, 1090, 1096, 1099, 1106, 1108, 1109, 1110, 1111, 1114, 1125, 1126, 1127, 1129, 1130, 1131, 1133, 1134, 1137, 1138, 1139, 1140, 1141, 1143, 1144, 1145, 1160, 1162, 1163, 1164, 1171, 1172, 1177, 1186, 1187, 1191, 1197, 1201, 1213, 1230, 1231, 1235, 1236, 1247, 1249, 1269, 1272, 1294, 1303, 1312, 1317, 1325, 1329, 1333, 1336, 1338, 1339, 1340, 1342, 1343, 1345, 1353, 1359, 1360, 1362, 1363, 1367, 1370, 1372, 1373, 1374, 1375, 1378, 1379, 1380, 1381, 1383, 1412, 1414, 1417, 1420, 1422, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1468, 1469, 1471, 1472, 1473, 1474, 1477, 1479, 1480, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1514, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1538, 1539, 1541, 1542, 1548, 1549, 1550, 1551, 1552, 1553, 1558, 1566, 1570, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1587, 1588, 1589, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1605, 1610, 1611, 1612, 1615, 1616, 1623, 1624, 1627, 1628, 1632, 1643, 1650, 1657, 1658, 1659, 1668, 1669, 1671, 1674, 1675, 1677, 1684, 1703, 1704, 1705, 1706, 1715, 1716, 1732, 1747, 1757, 1758, 1759, 1760, 1761, 1763, 1765, 1768, 1770, 1772, 1775, 1776, 1786, 1795, 1814, 1815, 1816, 1817, 1819, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1842, 1848, 1853, 1862, 1865, 1883, 1904, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1918, 1920, 1921, 1922, 1923, 1926, 1927, 1928, 1942, 1943, 1944, 1949, 1951, 1959, 1960, 1961, 1962, 1963, 1971, 1972, 1974, 1975, 1976, 1977, 2009, 2010, 2012, 2013, 2014, 2016, 2020, 2024, 2026, 2029, 2033, 2034, 2041, 2042, 2043, 2045, 2047, 2048, 2049, 2050, 2052, 2053, 2054, 2059, 2060, 2065, 2066, 2069, 2070, 2072, 2075, 2080, 2082, 2083, 2085, 2086, 2093, 2096, 2098, 2099, 2100, 2101, 2102, 2105, 2106, 2109, 2111, 2113], "format": [1, 3, 20, 21, 24, 28, 30, 40, 50, 53, 64, 83, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 269, 299, 327, 333, 395, 501, 502, 527, 582, 585, 586, 587, 588, 589, 590, 682, 737, 762, 794, 795, 972, 1053, 1068, 1108, 1109, 1110, 1164, 1180, 1187, 1191, 1269, 1272, 1286, 1315, 1445, 1469, 1477, 1496, 1526, 1542, 1574, 1716, 1723, 1724, 1758, 1760, 1761, 1776, 1778, 1836, 1838, 1840, 1858, 1900, 1904, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 2010, 2011, 2013, 2016, 2017, 2022, 2036, 2045, 2048, 2055, 2062, 2063, 2065, 2066, 2069, 2070, 2075, 2076, 2080, 2083, 2085, 2087, 2109, 2111], "arg0": [1, 28, 2063], "arg1": [1, 28, 45, 46, 48, 53, 2063], "repres": [1, 8, 11, 23, 24, 28, 30, 33, 34, 35, 36, 37, 40, 41, 47, 50, 52, 53, 55, 64, 82, 84, 152, 235, 762, 794, 795, 798, 892, 908, 909, 977, 984, 993, 997, 1008, 1040, 1043, 1108, 1129, 1131, 1136, 1138, 1139, 1140, 1141, 1166, 1183, 1198, 1226, 1233, 1261, 1262, 1265, 1268, 1271, 1288, 1289, 1320, 1362, 1430, 1445, 1526, 1532, 1574, 1576, 1578, 1586, 1587, 1588, 1589, 1716, 1729, 1733, 1738, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1757, 1767, 1778, 1802, 1808, 1814, 1816, 1827, 1870, 1909, 1910, 1911, 1913, 1914, 1923, 1927, 1960, 1961, 1967, 1974, 1975, 1994, 2013, 2015, 2016, 2020, 2023, 2024, 2029, 2035, 2042, 2048, 2049, 2052, 2054, 2055, 2058, 2062, 2063, 2065, 2070, 2071, 2073, 2075, 2080, 2083, 2098, 2099, 2101, 2113, 2116], "order": [1, 3, 5, 28, 29, 30, 32, 33, 35, 48, 52, 55, 56, 61, 63, 64, 152, 193, 210, 235, 315, 317, 319, 323, 333, 404, 489, 490, 682, 794, 795, 804, 857, 879, 896, 898, 917, 927, 928, 942, 954, 965, 1011, 1053, 1064, 1096, 1099, 1100, 1108, 1126, 1128, 1129, 1135, 1147, 1148, 1149, 1150, 1167, 1177, 1185, 1226, 1272, 1292, 1303, 1308, 1309, 1310, 1311, 1318, 1325, 1328, 1329, 1336, 1337, 1340, 1342, 1345, 1374, 1380, 1395, 1412, 1430, 1439, 1445, 1462, 1526, 1527, 1536, 1554, 1555, 1597, 1605, 1632, 1657, 1658, 1659, 1684, 1706, 1716, 1733, 1743, 1757, 1758, 1759, 1760, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1808, 1814, 1817, 1827, 1862, 1865, 1889, 1899, 1927, 1946, 1953, 1955, 1960, 1964, 1970, 1976, 2011, 2012, 2013, 2014, 2016, 2017, 2020, 2032, 2033, 2034, 2035, 2040, 2043, 2045, 2046, 2047, 2048, 2049, 2051, 2052, 2055, 2058, 2060, 2063, 2065, 2067, 2068, 2070, 2072, 2075, 2076, 2077, 2080, 2081, 2083, 2087, 2091, 2093, 2096, 2097, 2098, 2099, 2102, 2105, 2109, 2111], "backend": [1, 14, 46, 51, 53, 55, 743, 744, 745, 794, 795, 835, 858, 859, 862, 864, 865, 911, 917, 975, 977, 984, 1033, 1064, 1108, 1166, 1186, 1191, 1286, 1309, 1316, 1318, 1336, 1388, 1389, 1390, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1585, 1590, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1632, 1671, 1684, 1716, 1778, 1779, 1870, 1965, 1967, 2012, 2014, 2015, 2026, 2030, 2031, 2044, 2045, 2048, 2058, 2059, 2065, 2071, 2073, 2080, 2100, 2102, 2103, 2104, 2105, 2106, 2108, 2118], "side": [1, 14, 28, 47, 50, 52, 53, 60, 64, 66, 69, 488, 770, 771, 774, 775, 776, 968, 1042, 1102, 1124, 1125, 1127, 1130, 1131, 1136, 1138, 1139, 1140, 1141, 1144, 1269, 1270, 1317, 1322, 1333, 1335, 1435, 1436, 1437, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1548, 1549, 1551, 1552, 1553, 1578, 1582, 1583, 1584, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1657, 1658, 1659, 1671, 1802, 1831, 1832, 1862, 1923, 1951, 2011, 2013, 2014, 2016, 2041, 2042, 2045, 2048, 2052, 2075, 2093, 2096, 2109], "creation": [1, 2, 23, 28, 30, 64, 488, 747, 748, 758, 766, 826, 1010, 1166, 1716, 1717, 1738, 1981, 2013, 2017, 2029, 2032, 2042, 2045, 2075, 2077, 2080, 2086, 2098], "warmup": [1, 3, 1053, 2045, 2069, 2096, 2102, 2109], "correl": [1, 35, 48, 993, 1453, 1454, 1455, 1456, 1457, 1458, 1464, 1465, 1466, 1470], "view": [1, 7, 8, 11, 18, 23, 24, 30, 32, 37, 53, 55, 64, 66, 75, 81, 224, 256, 437, 499, 500, 501, 609, 620, 694, 699, 762, 881, 889, 890, 891, 902, 918, 942, 957, 969, 990, 991, 1098, 1099, 1106, 1147, 1148, 1149, 1150, 1166, 1236, 1272, 1328, 1339, 1389, 1446, 1469, 1477, 1496, 1526, 1542, 1578, 1579, 1580, 1581, 1624, 1670, 1716, 1721, 1817, 1849, 1853, 1854, 1858, 1864, 1865, 1895, 1915, 1926, 1938, 1943, 1947, 1959, 1974, 1975, 1977, 2012, 2013, 2014, 2032, 2034, 2035, 2043, 2048, 2051, 2063, 2066, 2072, 2075, 2077, 2080, 2082, 2083, 2086, 2101, 2106, 2108, 2110, 2111], "difficult": [1, 7, 9, 33, 52, 60, 977, 1177, 1976, 2101, 2102, 2107, 2109, 2111], "eas": [1, 64, 2044, 2048, 2053, 2098, 2110], "sequenc": [1, 23, 30, 32, 33, 34, 35, 52, 55, 152, 568, 737, 762, 816, 883, 896, 917, 959, 961, 962, 965, 973, 1013, 1020, 1021, 1024, 1093, 1107, 1109, 1177, 1234, 1235, 1238, 1288, 1328, 1374, 1430, 1440, 1445, 1453, 1462, 1469, 1477, 1496, 1532, 1542, 1555, 1570, 1571, 1572, 1573, 1574, 1587, 1624, 1684, 1733, 1736, 1743, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1772, 1775, 1779, 1794, 1798, 1833, 1835, 1839, 1862, 1920, 1923, 1962, 1976, 1978, 1982, 2009, 2013, 2014, 2015, 2020, 2035, 2045, 2050, 2051, 2063, 2065, 2068, 2075, 2079, 2086, 2087, 2096, 2098, 2099, 2100, 2102, 2110, 2111], "gener": [1, 2, 3, 7, 8, 14, 23, 24, 28, 33, 35, 38, 40, 41, 45, 48, 52, 55, 56, 57, 65, 66, 71, 75, 86, 155, 156, 175, 260, 288, 379, 423, 456, 483, 610, 682, 923, 945, 975, 1039, 1042, 1050, 1051, 1054, 1055, 1075, 1076, 1078, 1079, 1080, 1108, 1130, 1131, 1166, 1177, 1181, 1187, 1197, 1225, 1230, 1248, 1276, 1282, 1293, 1304, 1309, 1318, 1335, 1336, 1340, 1345, 1365, 1373, 1386, 1387, 1388, 1389, 1390, 1391, 1393, 1412, 1461, 1472, 1486, 1570, 1578, 1579, 1588, 1589, 1597, 1623, 1641, 1706, 1731, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1765, 1768, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1814, 1816, 1819, 1833, 1835, 1837, 1838, 1839, 1841, 1863, 1875, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1928, 1944, 1948, 1965, 1967, 1970, 1976, 1993, 1999, 2000, 2001, 2002, 2004, 2005, 2012, 2013, 2014, 2015, 2016, 2017, 2022, 2026, 2033, 2040, 2042, 2045, 2048, 2050, 2051, 2052, 2054, 2055, 2057, 2061, 2063, 2065, 2066, 2067, 2068, 2069, 2074, 2075, 2076, 2080, 2083, 2085, 2087, 2090, 2092, 2093, 2097, 2098, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2112], "seq": [1, 607, 762, 962, 974, 1374, 1477, 1496, 1532, 1542, 1570, 1572, 1574, 1760, 1958], "n": [1, 3, 19, 28, 30, 34, 35, 37, 40, 45, 48, 53, 64, 231, 262, 408, 467, 468, 488, 688, 691, 692, 693, 737, 760, 762, 783, 880, 935, 941, 943, 944, 946, 953, 955, 959, 965, 966, 967, 968, 997, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1089, 1090, 1100, 1121, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1148, 1169, 1172, 1177, 1185, 1191, 1197, 1209, 1226, 1230, 1231, 1235, 1269, 1272, 1284, 1288, 1289, 1292, 1293, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1345, 1353, 1362, 1367, 1369, 1374, 1377, 1414, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1473, 1474, 1476, 1477, 1478, 1479, 1480, 1485, 1486, 1488, 1489, 1490, 1492, 1493, 1494, 1495, 1496, 1498, 1508, 1509, 1510, 1514, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1529, 1530, 1531, 1532, 1533, 1535, 1541, 1542, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1558, 1560, 1561, 1562, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1597, 1603, 1615, 1616, 1624, 1628, 1632, 1668, 1671, 1673, 1684, 1715, 1716, 1730, 1742, 1751, 1770, 1814, 1815, 1816, 1821, 1826, 1827, 1833, 1839, 1841, 1854, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1904, 1905, 1921, 1922, 1923, 1927, 1928, 1943, 1944, 1949, 1953, 1955, 1970, 1971, 1972, 1973, 1976, 2014, 2016, 2020, 2033, 2034, 2040, 2042, 2045, 2048, 2050, 2052, 2057, 2068, 2069, 2070, 2071, 2080, 2081, 2085, 2086, 2087, 2099, 2102, 2106], "counter": [1, 28, 918, 929, 1046, 1064, 1462, 2029, 2032, 2042], "increment": [1, 28, 47, 52, 929, 931, 1162, 1235, 1276, 1462, 2013, 2015, 2042, 2063, 2075, 2099], "object": [1, 3, 5, 6, 8, 14, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 40, 47, 49, 50, 52, 53, 55, 63, 64, 66, 68, 90, 208, 211, 417, 605, 795, 796, 803, 857, 864, 883, 893, 894, 895, 903, 908, 909, 935, 937, 942, 975, 1031, 1042, 1083, 1108, 1147, 1159, 1162, 1167, 1168, 1170, 1171, 1172, 1176, 1178, 1182, 1187, 1204, 1205, 1258, 1259, 1272, 1279, 1280, 1283, 1284, 1288, 1289, 1304, 1325, 1329, 1342, 1344, 1345, 1365, 1462, 1464, 1465, 1466, 1470, 1488, 1489, 1490, 1526, 1536, 1566, 1716, 1734, 1749, 1758, 1759, 1762, 1763, 1764, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1858, 1931, 1932, 1936, 1965, 1968, 1969, 1988, 2011, 2015, 2016, 2017, 2020, 2024, 2026, 2027, 2032, 2036, 2042, 2044, 2045, 2048, 2049, 2050, 2052, 2054, 2057, 2059, 2060, 2061, 2063, 2065, 2067, 2069, 2070, 2072, 2073, 2074, 2075, 2077, 2082, 2083, 2085, 2087, 2097, 2099, 2100, 2101, 2107, 2110, 2112, 2113, 2116], "stash": [1, 5, 2045, 2048], "associ": [1, 8, 9, 28, 30, 41, 49, 52, 53, 55, 900, 901, 975, 1016, 1067, 1068, 1070, 1221, 1234, 1235, 1272, 1322, 1333, 1335, 1344, 1445, 1526, 1536, 1616, 1716, 1731, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1883, 1885, 2013, 2016, 2022, 2028, 2033, 2034, 2042, 2052, 2055, 2058, 2060, 2063, 2068, 2069, 2075, 2076, 2082, 2083, 2086, 2099, 2101, 2102, 2111, 2113, 2114], "tell": [1, 7, 64, 498, 977, 1199, 1200, 1209, 1273, 1280, 1344, 1779, 2013, 2020, 2023, 2042, 2048, 2049, 2068, 2099, 2100, 2113], "top": [1, 3, 7, 8, 12, 23, 30, 34, 35, 40, 53, 55, 59, 64, 749, 935, 1438, 1439, 1461, 1485, 1492, 1517, 1533, 1576, 1579, 1632, 1738, 1742, 1744, 1890, 1946, 1966, 2017, 2020, 2023, 2032, 2048, 2063, 2067, 2099, 2100, 2108, 2109], "m": [1, 4, 8, 19, 24, 28, 34, 35, 44, 48, 52, 64, 688, 691, 692, 693, 722, 730, 731, 740, 741, 742, 743, 744, 745, 747, 748, 758, 760, 766, 816, 843, 863, 864, 865, 943, 955, 959, 963, 1121, 1175, 1235, 1270, 1272, 1277, 1279, 1283, 1284, 1290, 1293, 1304, 1312, 1318, 1319, 1320, 1321, 1325, 1326, 1327, 1329, 1330, 1331, 1336, 1337, 1338, 1339, 1345, 1362, 1363, 1367, 1377, 1412, 1414, 1427, 1428, 1429, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1444, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1457, 1458, 1463, 1464, 1465, 1466, 1467, 1468, 1470, 1471, 1473, 1474, 1475, 1476, 1480, 1481, 1482, 1483, 1484, 1487, 1488, 1489, 1490, 1493, 1494, 1495, 1512, 1513, 1515, 1516, 1519, 1520, 1521, 1525, 1526, 1534, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1556, 1557, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1577, 1579, 1580, 1581, 1582, 1583, 1584, 1671, 1673, 1716, 1730, 1732, 1736, 1746, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1764, 1765, 1768, 1814, 1815, 1816, 1826, 1833, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1904, 1905, 1923, 1927, 1928, 1944, 1951, 2013, 2014, 2015, 2016, 2024, 2045, 2048, 2050, 2052, 2055, 2057, 2060, 2070, 2080, 2091, 2092, 2095, 2102], "By": [1, 2, 3, 5, 14, 19, 23, 28, 34, 39, 44, 52, 55, 64, 447, 448, 449, 450, 451, 883, 975, 997, 1013, 1056, 1058, 1091, 1103, 1126, 1128, 1129, 1130, 1131, 1139, 1140, 1141, 1167, 1168, 1170, 1171, 1177, 1226, 1234, 1235, 1276, 1325, 1336, 1344, 1373, 1378, 1416, 1438, 1439, 1440, 1441, 1442, 1459, 1461, 1479, 1485, 1488, 1489, 1490, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1566, 1575, 1604, 1605, 1615, 1644, 1668, 1676, 1732, 1768, 1816, 1827, 1848, 1902, 1949, 1976, 1982, 2011, 2015, 2020, 2023, 2042, 2045, 2048, 2050, 2051, 2055, 2057, 2058, 2060, 2063, 2065, 2067, 2074, 2075, 2080, 2087, 2099, 2100, 2102, 2105, 2107, 2111], "compar": [1, 3, 5, 14, 23, 52, 55, 64, 87, 696, 868, 879, 917, 918, 919, 977, 1064, 1112, 1113, 1136, 1137, 1138, 1142, 1143, 1144, 1145, 1154, 1155, 1187, 1215, 1229, 1261, 1297, 1361, 1371, 1376, 1423, 1532, 1716, 1769, 1928, 1960, 2012, 2016, 2029, 2045, 2048, 2052, 2058, 2061, 2070, 2071, 2075, 2080, 2085, 2087, 2090, 2091, 2096, 2103, 2108], "down": [1, 7, 14, 23, 35, 37, 45, 46, 48, 64, 781, 1064, 1103, 1150, 1156, 1282, 1318, 1643, 1827, 1855, 2048, 2054, 2057, 2065, 2075, 2077, 2080, 2085, 2105, 2111], "irrelev": [1, 3, 2017], "simpli": [1, 3, 14, 23, 28, 30, 34, 35, 40, 48, 52, 63, 64, 866, 1166, 1259, 1270, 1434, 1463, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1870, 2013, 2015, 2042, 2049, 2052, 2055, 2056, 2069, 2080, 2098, 2099, 2100, 2102, 2107], "earlier": [1, 6, 33, 55, 1723, 1724, 1927, 2042, 2045, 2050, 2051, 2054, 2060, 2065, 2069], "hand": [1, 4, 33, 55, 60, 64, 968, 1102, 1108, 1270, 1317, 1322, 1333, 1335, 1434, 1536, 1555, 1951, 1964, 2013, 2015, 2016, 2042, 2052, 2055, 2057, 2060, 2068, 2080, 2099, 2101, 2102], "underwai": [1, 1048, 2045], "up": [1, 6, 7, 8, 9, 14, 19, 23, 24, 28, 32, 33, 35, 37, 40, 44, 46, 47, 50, 51, 52, 55, 58, 64, 781, 787, 893, 908, 909, 931, 975, 1050, 1051, 1053, 1091, 1108, 1137, 1138, 1143, 1145, 1150, 1166, 1185, 1211, 1263, 1276, 1282, 1285, 1286, 1331, 1430, 1445, 1472, 1532, 1572, 1574, 1578, 1579, 1597, 1623, 1632, 1643, 1703, 1716, 1723, 1724, 1797, 1833, 1855, 1960, 1961, 2011, 2013, 2015, 2023, 2029, 2032, 2033, 2034, 2042, 2043, 2044, 2045, 2050, 2051, 2052, 2053, 2055, 2058, 2065, 2068, 2069, 2070, 2075, 2076, 2087, 2096, 2099, 2100, 2101, 2107, 2109, 2111, 2113, 2115], "nonzero": [1, 55, 1197, 1269, 1354, 1356, 1357, 1362, 1979, 2014, 2020, 2024, 2066, 2098, 2101, 2106], "themselv": [1, 9, 35, 47, 55, 795, 844, 1946, 2045, 2068, 2112], "later": [1, 2, 3, 7, 22, 24, 28, 30, 33, 55, 63, 64, 90, 488, 747, 748, 758, 766, 934, 1042, 1196, 1344, 1496, 1519, 1520, 1521, 1542, 1556, 1657, 1658, 1659, 1687, 1716, 1927, 2013, 2042, 2044, 2047, 2055, 2060, 2069, 2076, 2077, 2093, 2099, 2107], "origin": [1, 5, 11, 12, 18, 19, 23, 24, 28, 30, 33, 40, 52, 53, 55, 58, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 208, 211, 223, 488, 547, 605, 609, 619, 682, 762, 799, 840, 841, 857, 860, 861, 960, 965, 1011, 1128, 1129, 1131, 1135, 1139, 1140, 1141, 1147, 1165, 1175, 1247, 1269, 1280, 1284, 1285, 1288, 1289, 1366, 1380, 1430, 1434, 1446, 1461, 1462, 1477, 1556, 1566, 1615, 1687, 1723, 1724, 1730, 1731, 1732, 1733, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1757, 1760, 1763, 1765, 1766, 1768, 1778, 1793, 1808, 1817, 1853, 1864, 1899, 1915, 1949, 1960, 1961, 2013, 2016, 2022, 2032, 2034, 2042, 2045, 2048, 2050, 2051, 2054, 2057, 2058, 2060, 2065, 2068, 2070, 2071, 2079, 2080, 2082, 2086, 2090, 2091, 2097, 2099, 2100, 2101, 2102, 2103, 2105, 2109, 2110, 2111, 2112], "did": [1, 7, 8, 28, 47, 1200, 1419, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2015, 2016, 2052, 2060, 2068, 2099, 2104, 2113], "relationship": [1, 9, 33, 44, 52, 64, 972, 1226, 1576, 2042, 2045, 2060, 2068, 2101], "conceptu": [1, 3, 2042, 2049, 2077, 2100], "tag": [1, 3, 7, 28, 52, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 984, 1344, 2011, 2012, 2020, 2054, 2060, 2068, 2070, 2085], "eventu": [1, 7, 47, 55, 982, 2011, 2101], "itt": [1, 2069], "intel": [1, 4, 2012, 2061, 2088, 2108, 2118], "r": [1, 35, 61, 152, 892, 894, 896, 908, 909, 917, 922, 963, 974, 977, 993, 1108, 1167, 1169, 1172, 1177, 1189, 1213, 1216, 1226, 1284, 1302, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1318, 1319, 1322, 1324, 1331, 1333, 1335, 1336, 1345, 1468, 1469, 1478, 1538, 1539, 1558, 1623, 1624, 1674, 1675, 1730, 1795, 1826, 1976, 2013, 2014, 2015, 2042, 2044, 2048, 2052, 2061, 2080, 2083, 2085, 2111], "instrument": [1, 3, 24, 1389, 2012, 2054, 2091], "technologi": [1, 52, 2012, 2063, 2064, 2092], "applic": [1, 2, 9, 19, 33, 35, 38, 47, 490, 801, 812, 813, 814, 815, 903, 906, 908, 975, 1032, 1171, 1187, 1197, 1384, 1461, 1462, 1572, 1574, 1615, 1716, 1964, 1989, 2012, 2020, 2023, 2035, 2042, 2044, 2045, 2047, 2048, 2053, 2054, 2055, 2059, 2065, 2067, 2070, 2075, 2076, 2077, 2080, 2086, 2102, 2113], "across": [1, 8, 14, 19, 20, 23, 24, 28, 30, 32, 33, 34, 37, 48, 55, 60, 64, 619, 737, 821, 877, 936, 1024, 1063, 1064, 1108, 1122, 1177, 1283, 1288, 1374, 1439, 1462, 1472, 1514, 1532, 1534, 1566, 1578, 1602, 1605, 1634, 1650, 1699, 1716, 1743, 1747, 1771, 1858, 1976, 2012, 2013, 2029, 2033, 2035, 2042, 2047, 2050, 2054, 2055, 2057, 2058, 2059, 2067, 2071, 2075, 2076, 2082, 2085, 2091, 2096, 2098, 2102, 2111], "tool": [1, 4, 8, 9, 18, 28, 33, 48, 53, 64, 864, 934, 1389, 1902, 2011, 2012, 2013, 2015, 2028, 2044, 2045, 2061, 2065, 2068, 2069, 2091, 2099, 2100, 2102, 2105, 2109, 2111, 2113], "With": [1, 18, 23, 28, 35, 55, 63, 741, 742, 743, 744, 745, 787, 1064, 1129, 1130, 1131, 1139, 1140, 1141, 1289, 1440, 1441, 1442, 1454, 1455, 1457, 1458, 1471, 1488, 1489, 1490, 1566, 1577, 1579, 1608, 1611, 1643, 1669, 1703, 1794, 1837, 2017, 2042, 2045, 2048, 2070, 2075, 2080, 2085, 2095, 2099, 2103, 2108, 2111], "abl": [1, 2, 7, 8, 18, 28, 30, 33, 47, 52, 60, 488, 977, 1166, 1186, 1270, 1283, 1570, 2013, 2024, 2028, 2033, 2042, 2048, 2049, 2051, 2060, 2065, 2068, 2070, 2075, 2080, 2087, 2097, 2098, 2099, 2101, 2102, 2103, 2107, 2111, 2113], "labl": 1, "gui": 1, "detect_anomali": 1, "check_nan": 1, "engin": [1, 8, 9, 11, 15, 20, 52, 337, 743, 744, 745, 903, 904, 908, 917, 929, 1177, 1833, 1976, 2042, 2045, 2047, 2048, 2062, 2063, 2075, 2076, 2092, 2102], "traceback": [1, 18, 40, 48, 52, 63, 64, 918, 1187, 1257, 1902, 2015, 2016, 2017, 2024, 2032, 2035, 2048, 2059, 2080, 2087, 2111, 2113], "fail": [1, 7, 19, 20, 28, 30, 37, 39, 40, 45, 46, 47, 48, 51, 52, 63, 64, 66, 71, 75, 76, 77, 86, 488, 922, 923, 1011, 1064, 1197, 1273, 1280, 1283, 1302, 1331, 1336, 1344, 1345, 1362, 1777, 2016, 2018, 2032, 2042, 2045, 2048, 2057, 2058, 2061, 2065, 2068, 2075, 2083, 2097, 2098, 2099, 2100, 2102, 2103, 2111, 2113], "test": [1, 3, 14, 18, 19, 28, 45, 47, 48, 64, 66, 74, 75, 695, 701, 975, 1125, 1127, 1128, 1133, 1134, 1139, 1140, 1141, 1143, 1145, 1187, 1195, 1201, 1204, 1205, 1212, 1258, 1259, 1263, 1264, 1266, 1267, 1778, 1891, 2012, 2013, 2017, 2029, 2032, 2042, 2051, 2052, 2059, 2065, 2067, 2069, 2085, 2091, 2101, 2111, 2112], "slow": [1, 922, 1282, 1286, 1302, 1303, 1721, 1722, 1764, 1960, 2052, 2057, 2085, 2098, 2101, 2111], "import": [1, 2, 3, 5, 7, 9, 12, 14, 17, 18, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 39, 41, 44, 45, 50, 52, 53, 55, 58, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 86, 682, 736, 743, 744, 745, 774, 775, 776, 794, 863, 864, 865, 918, 925, 927, 954, 961, 997, 1159, 1162, 1165, 1166, 1167, 1169, 1170, 1171, 1172, 1173, 1175, 1269, 1270, 1273, 1275, 1277, 1278, 1279, 1280, 1282, 1283, 1284, 1288, 1289, 1290, 1324, 1325, 1328, 1329, 1342, 1374, 1526, 1586, 1590, 1716, 1734, 1736, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1750, 1751, 1759, 1760, 1761, 1762, 1763, 1764, 1771, 1796, 1797, 1820, 1962, 2013, 2015, 2016, 2017, 2020, 2024, 2029, 2032, 2035, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2054, 2055, 2057, 2058, 2059, 2060, 2063, 2064, 2065, 2070, 2073, 2075, 2076, 2077, 2080, 2083, 2085, 2086, 2087, 2091, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2102, 2104, 2109, 2110, 2111, 2112, 2118], "myfunc": [1, 2049], "inp": [1, 12, 23, 28, 64, 898, 901, 902, 1578, 1716, 2106, 2109, 2112], "clone": [1, 15, 23, 55, 66, 90, 256, 450, 903, 904, 907, 908, 909, 918, 919, 920, 925, 927, 928, 957, 1276, 1468, 1702, 1942, 1951, 2014, 2032, 2035, 2060, 2066, 2080, 2082, 2083, 2087, 2096, 2106], "runtimeerror": [1, 14, 28, 32, 60, 64, 86, 90, 585, 699, 903, 904, 908, 918, 956, 1156, 1257, 1272, 1302, 1303, 1304, 1312, 1313, 1314, 1316, 1320, 1326, 1333, 1338, 1339, 1365, 1412, 1526, 1777, 1864, 1902, 1964, 2013, 2015, 2016, 2033, 2035, 2041, 2043, 2050, 2059, 2061, 2065, 2070, 2074, 2080, 2083, 2096], "run_fn": [1, 5, 860, 866], "recent": [1, 7, 8, 51, 63, 918, 1257, 1902, 2015, 2016, 2024, 2035, 2048, 2058, 2059, 2080, 2087, 2108, 2111], "last": [1, 5, 6, 11, 23, 24, 29, 32, 34, 35, 37, 47, 53, 63, 64, 317, 619, 694, 762, 880, 918, 942, 944, 953, 959, 969, 1050, 1091, 1096, 1100, 1125, 1127, 1130, 1131, 1133, 1134, 1137, 1138, 1140, 1141, 1143, 1145, 1147, 1187, 1230, 1231, 1235, 1249, 1257, 1269, 1294, 1328, 1331, 1336, 1373, 1378, 1430, 1435, 1436, 1437, 1443, 1461, 1469, 1471, 1477, 1496, 1498, 1513, 1533, 1536, 1540, 1541, 1542, 1555, 1571, 1572, 1578, 1603, 1624, 1633, 1646, 1670, 1671, 1715, 1716, 1717, 1723, 1724, 1770, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1853, 1862, 1870, 1899, 1902, 1909, 1910, 1911, 1913, 1914, 1915, 1923, 1927, 1944, 1946, 1949, 1974, 1975, 2015, 2016, 2024, 2029, 2033, 2035, 2042, 2045, 2048, 2052, 2059, 2063, 2065, 2068, 2069, 2080, 2086, 2087, 2097, 2099, 2105, 2111], "stdin": [1, 28, 918, 1902, 2024, 2035, 2048, 2059, 2080], "instal": [1, 3, 14, 15, 28, 64, 1187, 2011, 2055, 2056, 2063, 2064, 2065, 2068, 2075, 2085, 2093, 2097, 2098, 2099, 2100, 2107], "_tensor": [1, 156], "py": [1, 4, 14, 18, 28, 32, 33, 35, 39, 46, 48, 53, 55, 64, 88, 863, 1716, 2011, 2013, 2016, 2022, 2047, 2052, 2054, 2057, 2065, 2068, 2069, 2070, 2076, 2093, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2105, 2108, 2110, 2111, 2113], "93": [1, 619, 1090], "retain_graph": [1, 152, 896, 917, 927, 928, 1177, 1976, 2014, 2041, 2042, 2048, 2075], "90": [1, 1092, 1812, 1854, 2020], "allow_unreach": 1, "76": 1, "_forward_cl": 1, "tmp": [1, 3, 14, 28, 45, 47, 2011, 2045, 2069, 2105], "53": [1, 483], "44": [1, 323, 447, 1111, 1437, 1495, 1521, 1764, 2100], "set_detect_anomali": 1, "behaviour": [1, 695, 696, 701, 1632, 1671, 1808, 1876, 2011, 2058], "interpos": [1, 2048], "grad_fn": [1, 152, 337, 490, 883, 896, 911, 912, 913, 914, 915, 916, 925, 927, 928, 1165, 1731, 1904, 2042, 2048, 2055, 2060], "node": [1, 28, 32, 37, 40, 46, 47, 52, 55, 64, 66, 75, 76, 77, 81, 84, 682, 821, 822, 823, 826, 827, 828, 903, 938, 1053, 1175, 1185, 1187, 1209, 1287, 1445, 1462, 1716, 1778, 2026, 2045, 2063, 2065, 2068, 2075, 2076, 2077, 2089, 2091, 2098, 2099, 2101, 2102, 2108, 2110, 2111], "grad_mod": [1, 918, 919, 920, 2014], "least": [1, 5, 6, 8, 24, 30, 35, 46, 47, 55, 262, 404, 699, 946, 1096, 1098, 1099, 1108, 1149, 1150, 1160, 1186, 1226, 1234, 1235, 1269, 1295, 1318, 1345, 1367, 1416, 1430, 1716, 1758, 2011, 2016, 2034, 2040, 2042, 2043, 2045, 2050, 2051, 2071, 2077, 2080, 2081, 2082, 2087, 2102, 2104, 2111], "intermediari": [1, 14, 35, 903, 906, 908, 2042, 2052, 2099, 2102], "access": [1, 9, 18, 23, 28, 30, 33, 52, 53, 63, 66, 68, 82, 490, 559, 736, 903, 906, 908, 909, 941, 1017, 1272, 1275, 1288, 1335, 1430, 1526, 1706, 1718, 1719, 1731, 1736, 1758, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1995, 2012, 2013, 2014, 2016, 2017, 2020, 2026, 2028, 2030, 2031, 2032, 2034, 2035, 2042, 2045, 2050, 2052, 2054, 2055, 2063, 2075, 2080, 2083, 2084, 2086, 2093, 2096, 2098, 2099, 2100, 2101, 2102, 2104, 2107, 2116], "isinst": [1, 23, 35, 64, 925, 927, 928, 1259, 1270, 2014, 2016, 2042, 2048, 2055, 2068, 2080, 2102, 2110, 2111], "dir": [1, 1050, 1778, 2011, 2016, 2068], "__call__": [1, 1272, 1526, 2099], "__class__": [1, 66], "__delattr__": 1, "__dir__": 1, "__doc__": 1, "__eq__": 1, "__format__": [1, 2017], "__ge__": 1, "__getattribute__": 1, "__gt__": 1, "__hash__": [1, 2017], "__init_subclass__": 1, "__le__": 1, "__lt__": [1, 2016], "__ne__": 1, "__new__": [1, 2015, 2017], "__reduce__": [1, 2068], "__reduce_ex__": 1, "__repr__": [1, 3, 2048], "__setattr__": 1, "__sizeof__": 1, "__str__": [1, 64, 2014, 2016], "__subclasshook__": 1, "_raw_saved_result": 1, "_register_hook_dict": 1, "_saved_result": [1, 2042], "metadata": [1, 3, 12, 30, 40, 41, 52, 66, 74, 75, 929, 989, 1344, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1858, 2020, 2024, 2035, 2042, 2048, 2049, 2060, 2063, 2068, 2069, 2075, 2076, 2080, 2085, 2101, 2110], "next_funct": 1, "register_prehook": [1, 2042], "allclos": [1, 60, 61, 64, 922, 923, 1050, 1130, 1131, 1137, 1138, 1166, 1167, 1169, 1170, 1171, 1172, 1176, 1177, 1320, 1322, 1333, 1335, 1338, 1339, 1364, 1736, 1763, 1826, 1976, 2014, 2020, 2049, 2066, 2080], "pack": [1, 32, 740, 741, 742, 743, 744, 745, 747, 748, 762, 783, 1288, 1289, 1364, 1462, 1477, 1496, 1542, 1757, 1758, 1759, 1760, 2014, 2026, 2035, 2042, 2050, 2061, 2070, 2099], "unpack": [1, 66, 71, 72, 762, 899, 902, 1288, 1320, 1364, 1462, 1760, 1762, 1769, 1779, 2016, 2017, 2042, 2048, 2050], "hook": [1, 29, 32, 55, 489, 490, 750, 759, 903, 906, 908, 927, 928, 931, 1053, 1272, 1462, 1526, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1732, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1749, 1754, 1765, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 2012, 2036, 2047, 2048, 2054, 2062, 2063, 2068, 2070, 2100, 2101, 2102, 2103], "common": [1, 3, 8, 23, 33, 37, 47, 60, 66, 83, 88, 687, 795, 868, 948, 951, 956, 977, 992, 1103, 1108, 1153, 1154, 1155, 1156, 1197, 1214, 1295, 1344, 1411, 1440, 1441, 1442, 1445, 1566, 1614, 1707, 1708, 1710, 1711, 1712, 1713, 1714, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1846, 1858, 1924, 1967, 2012, 2016, 2021, 2034, 2042, 2045, 2048, 2049, 2050, 2055, 2057, 2060, 2065, 2068, 2079, 2080, 2081, 2084, 2087, 2097, 2098, 2102, 2107, 2109, 2110, 2111, 2113], "trade": [1, 5, 8, 55, 1336, 1439, 1870, 2044, 2050, 2071], "leav": [1, 8, 37, 48, 152, 896, 1180, 1277, 1290, 1734, 1737, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1918, 2013, 2015, 2016, 2024, 2042, 2045, 2063, 2068, 2071, 2099], "especi": [1, 9, 11, 23, 28, 53, 64, 256, 957, 975, 985, 1702, 2015, 2024, 2042, 2048, 2049, 2058, 2060, 2070, 2075, 2080, 2096, 2101, 2102, 2104], "notic": [1, 12, 28, 53, 65, 691, 1128, 1142, 1181, 1367, 1377, 1438, 1579, 1649, 1799, 1800, 1805, 1806, 1812, 1960, 2012, 2013, 2042, 2080, 2099, 2102, 2105, 2107, 2109], "fit": [1, 9, 39, 59, 60, 501, 935, 1269, 1786, 1848, 2045, 2063, 2087, 2111], "evalu": [1, 4, 8, 9, 32, 35, 52, 58, 64, 695, 701, 800, 866, 922, 1064, 1172, 1173, 1185, 1187, 1207, 1208, 1211, 1272, 1430, 1434, 1440, 1441, 1442, 1463, 1480, 1488, 1489, 1490, 1498, 1526, 1540, 1545, 1566, 1676, 1684, 1699, 1734, 1786, 1802, 1833, 2016, 2017, 2048, 2055, 2062, 2063, 2080, 2089, 2092, 2098, 2100, 2101], "saved_tensors_hook": [1, 903, 906, 908, 2042], "pack_hook": [1, 2042], "unpack_hook": [1, 2042], "pair": [1, 28, 30, 34, 35, 47, 50, 52, 619, 737, 868, 942, 963, 997, 1142, 1183, 1187, 1286, 1336, 1374, 1518, 1527, 1532, 1536, 1673, 1795, 1842, 2015, 2016, 2022, 2033, 2042, 2045, 2075, 2076, 2077, 2085, 2087, 2091, 2102, 2109], "retriev": [1, 23, 24, 28, 32, 33, 47, 64, 90, 539, 892, 893, 908, 909, 1185, 1203, 1468, 1472, 1578, 1623, 1716, 1758, 1778, 1902, 2035, 2042, 2054, 2065, 2068, 2075, 2076, 2077, 2093, 2100, 2113], "everytim": 1, "store": [1, 3, 5, 14, 18, 24, 30, 33, 37, 48, 52, 53, 55, 64, 328, 334, 400, 688, 824, 827, 893, 908, 955, 1020, 1022, 1024, 1189, 1216, 1272, 1278, 1280, 1283, 1303, 1314, 1315, 1316, 1345, 1350, 1362, 1440, 1441, 1442, 1468, 1526, 1555, 1716, 1730, 1733, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1757, 1870, 1907, 2011, 2012, 2013, 2024, 2034, 2035, 2036, 2047, 2048, 2049, 2050, 2054, 2063, 2068, 2070, 2075, 2076, 2077, 2080, 2082, 2085, 2086, 2091, 2093, 2099, 2100, 2101, 2104, 2115], "content": [1, 3, 7, 19, 30, 40, 52, 53, 64, 903, 906, 908, 1280, 1283, 1303, 1314, 1316, 1321, 1334, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1918, 1948, 1969, 2011, 2016, 2027, 2032, 2042, 2045, 2057, 2075, 2084, 2085, 2086, 2107, 2111], "equal": [1, 12, 24, 28, 35, 47, 48, 50, 52, 64, 262, 501, 547, 619, 682, 696, 697, 698, 741, 742, 743, 744, 745, 762, 770, 771, 822, 823, 824, 827, 864, 879, 944, 951, 953, 964, 965, 986, 997, 1022, 1024, 1078, 1097, 1113, 1152, 1167, 1168, 1180, 1183, 1187, 1197, 1198, 1212, 1215, 1230, 1231, 1233, 1234, 1235, 1257, 1261, 1297, 1318, 1331, 1335, 1338, 1339, 1362, 1363, 1374, 1392, 1423, 1427, 1428, 1429, 1431, 1432, 1433, 1438, 1439, 1445, 1454, 1455, 1457, 1458, 1462, 1469, 1472, 1473, 1474, 1477, 1479, 1496, 1531, 1532, 1540, 1542, 1578, 1587, 1600, 1601, 1605, 1608, 1611, 1623, 1624, 1627, 1628, 1634, 1641, 1716, 1761, 1814, 1827, 1883, 1884, 1889, 1915, 1923, 1943, 1959, 1965, 1979, 2014, 2017, 2023, 2033, 2034, 2035, 2042, 2043, 2052, 2058, 2066, 2067, 2081, 2085, 2087, 2099, 2110], "term": [1, 8, 9, 35, 47, 53, 64, 560, 736, 765, 794, 965, 1124, 1125, 1126, 1127, 1128, 1129, 1131, 1139, 1140, 1141, 1144, 1156, 1187, 1194, 1304, 1430, 1438, 1479, 1486, 1496, 1497, 1531, 1540, 1558, 1629, 1641, 1676, 1688, 1716, 1730, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1846, 1882, 1890, 2012, 2016, 2041, 2042, 2048, 2049, 2050, 2052, 2057, 2065, 2067, 2068, 2070, 2076, 2080, 2092, 2098, 2099, 2102], "mulbackward0": [1, 912, 915, 916, 2048], "inplac": [1, 30, 52, 58, 60, 64, 66, 74, 75, 757, 760, 778, 780, 782, 793, 799, 816, 840, 841, 842, 860, 861, 866, 903, 904, 908, 929, 975, 1166, 1272, 1434, 1444, 1463, 1464, 1465, 1466, 1467, 1470, 1482, 1483, 1484, 1512, 1525, 1526, 1545, 1546, 1547, 1554, 1556, 1569, 1598, 1606, 1617, 1618, 1619, 1620, 1621, 1625, 1636, 1637, 1638, 1647, 1663, 1678, 1679, 1682, 1685, 1687, 1697, 1709, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1964, 2014, 2023, 2042, 2048, 2065, 2091, 2106, 2110], "lead": [1, 5, 7, 22, 28, 36, 52, 53, 55, 61, 64, 903, 904, 908, 911, 913, 917, 923, 977, 1159, 1177, 1187, 1303, 1496, 1542, 1558, 1912, 1918, 1976, 2016, 2034, 2035, 2042, 2044, 2047, 2048, 2049, 2051, 2057, 2058, 2061, 2067, 2068, 2070, 2075, 2080, 2085, 2086, 2096, 2103], "undefin": [1, 28, 36, 40, 321, 473, 619, 881, 903, 907, 908, 909, 922, 923, 951, 959, 977, 1111, 1161, 1162, 1438, 1777, 1820, 2020, 2042, 2045, 2048, 2049, 2059, 2075], "recurs": [1, 35, 52, 55, 64, 977, 981, 1100, 1185, 1194, 1212, 1272, 1284, 1286, 1526, 1723, 1724, 1800, 2015, 2048, 2055, 2068, 2075, 2099, 2100, 2102, 2103], "inner": [1, 3, 55, 56, 61, 817, 819, 911, 1091, 1167, 1171, 1176, 1184, 1192, 1949, 2014, 2048, 2066, 2075, 2091], "save_on_cpu": 1, "pin_memori": [1, 23, 66, 71, 75, 76, 77, 447, 448, 449, 450, 451, 1109, 1111, 1160, 1835, 1839, 1841, 1942, 2013, 2014, 2035, 2045, 2048, 2066, 2082, 2106], "within": [1, 5, 9, 12, 19, 23, 24, 28, 32, 33, 34, 35, 47, 48, 50, 52, 53, 55, 63, 64, 81, 82, 90, 490, 881, 898, 922, 923, 929, 975, 989, 1014, 1091, 1185, 1191, 1272, 1276, 1365, 1399, 1435, 1436, 1437, 1445, 1464, 1465, 1466, 1470, 1472, 1519, 1520, 1521, 1526, 1532, 1566, 1578, 1632, 1642, 1657, 1658, 1659, 1716, 1734, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1750, 1751, 1752, 1753, 1754, 1831, 1832, 1862, 1883, 1963, 1967, 1983, 2013, 2015, 2016, 2028, 2029, 2032, 2040, 2044, 2045, 2048, 2053, 2054, 2055, 2058, 2065, 2067, 2068, 2070, 2073, 2074, 2075, 2076, 2085, 2093, 2098, 2099, 2102, 2104, 2110, 2111], "move": [1, 5, 7, 8, 9, 14, 28, 30, 55, 62, 64, 526, 591, 823, 1128, 1194, 1272, 1280, 1339, 1344, 1380, 1440, 1441, 1442, 1499, 1500, 1501, 1526, 1566, 1671, 1718, 1719, 1794, 2015, 2026, 2027, 2032, 2034, 2045, 2046, 2049, 2050, 2055, 2056, 2057, 2060, 2067, 2068, 2070, 2075, 2082, 2083, 2087, 2102, 2111], "copi": [1, 7, 11, 23, 24, 28, 30, 37, 45, 52, 55, 59, 60, 64, 192, 198, 208, 211, 317, 404, 450, 460, 465, 473, 495, 501, 582, 583, 584, 585, 605, 619, 794, 816, 840, 841, 882, 883, 901, 918, 919, 920, 942, 964, 972, 975, 1020, 1021, 1147, 1148, 1149, 1150, 1152, 1166, 1187, 1272, 1284, 1344, 1345, 1422, 1462, 1472, 1526, 1536, 1578, 1716, 1757, 1767, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1843, 1849, 1855, 1942, 1951, 1957, 2013, 2014, 2024, 2032, 2035, 2042, 2043, 2045, 2048, 2051, 2057, 2066, 2068, 2069, 2070, 2075, 2080, 2082, 2083, 2084, 2086, 2091, 2101, 2102, 2104, 2106, 2110], "pin": [1, 30, 211, 339, 447, 448, 449, 450, 451, 465, 582, 605, 1109, 1111, 1160, 1272, 1526, 1757, 1835, 1839, 1841, 1942, 2012, 2035, 2082], "asynchron": [1, 3, 4, 30, 63, 198, 211, 582, 605, 1033, 1272, 1275, 1291, 1526, 2012, 2017, 2044, 2047, 2075, 2082, 2085, 2109], "prod_1": 1, "prod_2": 1, "del": [1, 33, 488, 2017, 2032, 2048, 2050], "illustr": [1, 2016, 2041, 2048, 2080, 2093, 2096, 2105], "aliv": [1, 5, 23, 37, 47, 1011, 2042, 2045, 2050, 2057, 2075, 2076, 2077], "live": [1, 32, 488, 1050, 1053, 1272, 1526, 2013, 2045, 2050, 2051, 2075, 2077, 2096, 2099, 2101, 2113], "releas": [1, 7, 22, 28, 47, 50, 59, 64, 515, 689, 965, 966, 1007, 1008, 1015, 1032, 1042, 1043, 1046, 1050, 1051, 1053, 1064, 1217, 1269, 1272, 1303, 1314, 1316, 1317, 1318, 1321, 1334, 1362, 1363, 1384, 1465, 1496, 1526, 1542, 1657, 1658, 1659, 1771, 1826, 1842, 1858, 1923, 1927, 1951, 1989, 2011, 2012, 2015, 2029, 2032, 2042, 2044, 2045, 2053, 2058, 2059, 2060, 2061, 2065, 2070, 2075, 2086, 2087, 2093, 2099, 2102, 2111], "delet": [1, 28, 33, 64, 898, 900, 1008, 1016, 2011, 2026, 2032, 2042, 2066, 2074, 2075, 2077, 2082, 2110], "disable_saved_tensors_hook": 1, "error_messag": 1, "featur": [1, 5, 8, 9, 11, 12, 15, 18, 19, 28, 30, 32, 33, 48, 52, 55, 56, 63, 66, 83, 682, 691, 762, 770, 771, 911, 913, 917, 989, 1082, 1181, 1284, 1367, 1377, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1440, 1458, 1463, 1464, 1465, 1466, 1470, 1473, 1474, 1477, 1478, 1488, 1496, 1497, 1532, 1542, 1544, 1561, 1570, 1572, 1574, 1575, 1576, 1577, 1600, 1601, 1618, 1619, 1620, 1625, 1627, 1628, 1649, 1716, 1816, 1964, 2012, 2013, 2015, 2016, 2017, 2022, 2034, 2035, 2042, 2048, 2050, 2052, 2059, 2060, 2062, 2065, 2069, 2070, 2075, 2080, 2085, 2089, 2092, 2093, 2099, 2103, 2104], "messag": [1, 2, 5, 18, 19, 28, 40, 50, 52, 60, 64, 66, 67, 85, 86, 626, 682, 1067, 1068, 1070, 1180, 1187, 1302, 1303, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 2011, 2013, 2014, 2016, 2017, 2022, 2050, 2063, 2065, 2069, 2075, 2077, 2087, 2104, 2111], "get": [1, 8, 12, 14, 19, 23, 28, 29, 30, 37, 47, 48, 50, 51, 52, 55, 60, 63, 64, 90, 152, 337, 688, 736, 790, 791, 864, 865, 896, 902, 918, 931, 942, 959, 1035, 1036, 1037, 1054, 1095, 1166, 1169, 1170, 1171, 1178, 1179, 1182, 1184, 1187, 1218, 1219, 1288, 1340, 1447, 1448, 1449, 1468, 1469, 1493, 1494, 1495, 1522, 1523, 1524, 1536, 1554, 1716, 1717, 1731, 1758, 1765, 1847, 1862, 1967, 1990, 1991, 1992, 1999, 2011, 2012, 2013, 2016, 2024, 2028, 2029, 2032, 2033, 2042, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2060, 2063, 2066, 2068, 2070, 2075, 2077, 2080, 2084, 2085, 2086, 2096, 2098, 2099, 2100, 2101, 2105, 2111, 2112, 2115], "register_multi_grad_hook": [1, 2042], "fn": [1, 37, 39, 40, 44, 50, 52, 55, 64, 910, 927, 928, 977, 978, 981, 1167, 1173, 1177, 1272, 1273, 1285, 1290, 1526, 1976, 2013, 2015, 2016, 2020, 2032, 2042, 2054, 2075, 2097, 2099, 2103, 2104, 2109, 2110, 2111], "multi": [1, 4, 33, 37, 47, 762, 1011, 1054, 1075, 1108, 1128, 1196, 1235, 1272, 1439, 1462, 1477, 1496, 1526, 1529, 1530, 1531, 1532, 1542, 1570, 1572, 1579, 1605, 1716, 1999, 2001, 2012, 2013, 2016, 2042, 2044, 2045, 2055, 2059, 2067, 2075, 2080, 2083, 2085, 2086, 2089], "specifi": [1, 2, 3, 5, 8, 12, 14, 19, 20, 23, 24, 28, 30, 32, 33, 34, 35, 37, 38, 40, 45, 46, 47, 48, 52, 53, 55, 64, 90, 99, 152, 197, 261, 321, 333, 473, 483, 495, 499, 501, 502, 515, 517, 519, 539, 547, 548, 562, 582, 585, 586, 587, 589, 590, 605, 682, 737, 743, 744, 745, 770, 771, 795, 796, 797, 801, 816, 825, 828, 842, 857, 861, 862, 864, 881, 883, 895, 896, 908, 909, 912, 915, 917, 942, 946, 969, 975, 996, 997, 1008, 1010, 1011, 1020, 1021, 1022, 1024, 1039, 1042, 1053, 1079, 1089, 1090, 1091, 1095, 1096, 1098, 1108, 1111, 1122, 1125, 1127, 1128, 1130, 1131, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1143, 1145, 1167, 1168, 1171, 1177, 1183, 1191, 1193, 1213, 1226, 1234, 1235, 1269, 1272, 1276, 1288, 1289, 1315, 1325, 1327, 1329, 1330, 1339, 1342, 1344, 1345, 1355, 1372, 1374, 1380, 1416, 1417, 1420, 1436, 1437, 1438, 1439, 1445, 1457, 1459, 1461, 1462, 1468, 1469, 1472, 1479, 1485, 1486, 1491, 1492, 1496, 1517, 1518, 1523, 1526, 1529, 1530, 1531, 1532, 1533, 1540, 1558, 1559, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1580, 1581, 1600, 1601, 1604, 1605, 1615, 1616, 1623, 1624, 1629, 1632, 1644, 1651, 1668, 1669, 1676, 1684, 1690, 1691, 1716, 1722, 1723, 1724, 1730, 1731, 1732, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1768, 1771, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1802, 1807, 1808, 1816, 1824, 1833, 1849, 1854, 1855, 1858, 1874, 1901, 1903, 1905, 1906, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1918, 1921, 1922, 1923, 1926, 1943, 1945, 1949, 1959, 1960, 1961, 1962, 1963, 1967, 1970, 1971, 1972, 1976, 1981, 1993, 2004, 2011, 2013, 2015, 2016, 2020, 2022, 2023, 2024, 2027, 2029, 2033, 2034, 2042, 2045, 2048, 2053, 2055, 2057, 2061, 2063, 2065, 2067, 2068, 2069, 2070, 2074, 2075, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2093, 2097, 2098, 2110, 2114], "ignor": [1, 5, 7, 28, 32, 39, 45, 55, 64, 152, 501, 546, 688, 691, 692, 693, 737, 796, 797, 861, 864, 865, 896, 917, 922, 923, 943, 965, 967, 968, 997, 1001, 1006, 1054, 1055, 1075, 1076, 1129, 1131, 1139, 1140, 1141, 1187, 1216, 1233, 1272, 1284, 1290, 1293, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1341, 1342, 1364, 1417, 1418, 1419, 1435, 1436, 1437, 1438, 1439, 1459, 1461, 1469, 1477, 1479, 1485, 1491, 1492, 1496, 1517, 1518, 1519, 1520, 1521, 1526, 1529, 1530, 1531, 1532, 1533, 1540, 1542, 1558, 1559, 1570, 1575, 1604, 1605, 1615, 1624, 1644, 1668, 1676, 1716, 1771, 1779, 1802, 1810, 1814, 1874, 1905, 1906, 1923, 1927, 1951, 1999, 2000, 2001, 2002, 2013, 2016, 2017, 2023, 2034, 2042, 2045, 2048, 2065, 2080, 2087, 2100, 2107, 2110], "rel": [1, 8, 9, 14, 24, 28, 35, 55, 64, 489, 490, 696, 879, 922, 923, 927, 928, 997, 1194, 1261, 1327, 1330, 1575, 1576, 1597, 1632, 1779, 1783, 1784, 1796, 1810, 1928, 2023, 2029, 2044, 2045, 2051, 2054, 2065, 2068, 2087], "allow_mutation_on_saved_tensor": 1, "mutat": [1, 12, 52, 53, 64, 66, 73, 74, 75, 799, 840, 841, 860, 861, 975, 989, 1166, 1276, 2020, 2063, 2068, 2086, 2101, 2102], "_allowmutationonsavedcontext": 1, "purpos": [1, 19, 24, 28, 30, 64, 89, 473, 501, 762, 922, 1282, 1367, 1445, 1477, 1532, 1709, 1710, 1711, 1712, 1874, 2029, 2035, 2042, 2048, 2068, 2076, 2097, 2099, 2102, 2105, 2111], "clear": [1, 2, 8, 9, 50, 64, 942, 985, 1185, 1189, 1272, 1526, 1527, 1536, 1778, 2011, 2042, 2045, 2055, 2060, 2066, 2067, 2099], "upon": [1, 2, 23, 29, 37, 40, 64, 975, 1590, 1716, 1733, 1736, 1960, 2032, 2042, 2045, 2065, 2070, 2077, 2114], "sin_": [1, 2014, 2033], "8415": [1, 2048, 2080], "sinbackward0": 1, "gradientedg": [1, 896, 917], "output_nr": [1, 2014, 2066], "edg": [1, 53, 781, 787, 1226, 1234, 1235, 1643, 1703, 2021, 2049, 2076, 2107], "get_gradient_edg": 1, "equival": [1, 3, 5, 11, 12, 23, 24, 25, 32, 35, 40, 47, 48, 52, 59, 60, 64, 84, 157, 172, 174, 177, 180, 181, 182, 242, 257, 269, 299, 321, 327, 395, 450, 460, 488, 500, 502, 515, 527, 606, 612, 619, 620, 622, 694, 746, 750, 757, 759, 761, 787, 788, 789, 879, 892, 893, 908, 909, 956, 958, 961, 963, 966, 973, 974, 989, 1096, 1100, 1103, 1106, 1107, 1108, 1110, 1125, 1127, 1130, 1133, 1134, 1137, 1138, 1140, 1143, 1145, 1164, 1166, 1169, 1171, 1177, 1187, 1200, 1209, 1236, 1238, 1249, 1271, 1272, 1283, 1288, 1289, 1292, 1304, 1325, 1329, 1337, 1342, 1374, 1379, 1395, 1417, 1430, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1468, 1469, 1480, 1486, 1488, 1489, 1490, 1491, 1496, 1498, 1526, 1542, 1558, 1566, 1578, 1580, 1587, 1588, 1589, 1641, 1651, 1673, 1684, 1703, 1704, 1705, 1716, 1776, 1797, 1833, 1836, 1839, 1840, 1864, 1870, 1877, 1899, 1906, 1919, 1929, 1930, 1937, 1942, 1961, 1976, 1977, 1978, 2010, 2013, 2015, 2016, 2024, 2042, 2063, 2065, 2068, 2071, 2080, 2081, 2083, 2086, 2087, 2097, 2100, 2112, 2116], "variou": [2, 5, 14, 23, 28, 30, 52, 64, 682, 1183, 1345, 1965, 1967, 2020, 2023, 2032, 2035, 2048, 2055, 2057, 2067, 2070, 2072, 2080, 2092, 2101, 2112, 2114], "get_cpu_cap": 2, "capabl": [2, 8, 14, 15, 28, 1035, 1723, 1724, 1833, 1990, 2045, 2053, 2054, 2056, 2089], "string": [2, 3, 5, 13, 14, 23, 28, 37, 44, 45, 47, 52, 53, 64, 605, 803, 816, 857, 984, 1033, 1050, 1051, 1108, 1166, 1187, 1272, 1280, 1283, 1344, 1453, 1454, 1455, 1526, 1527, 1536, 1570, 1572, 1574, 1607, 1608, 1609, 1747, 1826, 1858, 1866, 1868, 1966, 1967, 2011, 2014, 2015, 2016, 2017, 2020, 2034, 2042, 2048, 2054, 2055, 2060, 2063, 2065, 2068, 2069, 2075, 2082, 2083, 2085, 2091, 2097, 2099, 2102, 2111, 2112, 2113], "vsx": 2, "z": [2, 3, 10, 35, 55, 60, 66, 69, 74, 75, 619, 822, 880, 903, 906, 908, 909, 938, 966, 986, 1108, 1112, 1374, 1478, 1632, 1736, 1769, 1770, 1820, 1850, 1851, 2013, 2014, 2015, 2034, 2042, 2044, 2045, 2052, 2053, 2063, 2065, 2068, 2073, 2075, 2077, 2096, 2098, 2099, 2102, 2110, 2111], "vector": [2, 11, 24, 35, 55, 56, 60, 256, 315, 317, 323, 691, 692, 693, 887, 896, 901, 911, 912, 913, 914, 915, 916, 917, 956, 957, 963, 974, 993, 997, 1007, 1089, 1090, 1095, 1097, 1172, 1176, 1177, 1216, 1305, 1312, 1320, 1328, 1329, 1333, 1336, 1340, 1341, 1342, 1362, 1367, 1374, 1412, 1414, 1440, 1441, 1442, 1445, 1462, 1468, 1469, 1472, 1480, 1488, 1489, 1490, 1535, 1566, 1575, 1578, 1623, 1624, 1632, 1634, 1669, 1673, 1702, 1721, 1729, 1731, 1736, 1767, 1771, 1815, 1816, 1907, 1927, 1970, 1973, 1976, 2036, 2042, 2052, 2070, 2080, 2083, 2085, 2093, 2104], "NO": [2, 87, 89, 1186], "avx": [2, 2095], "avx2": [2, 2070, 2095], "avx512": [2, 2095], "is_built": [2, 2056], "built": [2, 3, 7, 8, 14, 24, 28, 34, 40, 58, 64, 976, 1033, 1282, 1576, 1802, 2012, 2020, 2035, 2042, 2044, 2045, 2046, 2048, 2053, 2055, 2056, 2057, 2070, 2072, 2097, 2099, 2100, 2111, 2112], "necessarili": [2, 24, 28, 35, 37, 47, 52, 86, 473, 922, 1309, 1331, 1342, 1373, 1461, 1533, 1723, 1724, 2045, 2048], "machin": [2, 28, 37, 47, 55, 56, 61, 1276, 1282, 1318, 1563, 2053, 2054, 2055, 2056, 2057, 2059, 2062, 2063, 2068, 2074, 2075, 2076, 2092, 2093, 2095, 2099, 2104], "driver": [2, 20, 1216, 1318, 1336, 1337, 1383, 2014, 2045, 2058, 2075, 2109, 2111], "would": [2, 3, 5, 8, 9, 11, 14, 23, 28, 33, 35, 40, 47, 48, 52, 53, 55, 57, 60, 64, 447, 448, 449, 450, 451, 488, 700, 762, 796, 857, 896, 903, 904, 908, 917, 965, 983, 1109, 1111, 1129, 1160, 1170, 1171, 1186, 1187, 1199, 1200, 1201, 1270, 1272, 1273, 1277, 1280, 1288, 1289, 1388, 1389, 1412, 1435, 1436, 1437, 1438, 1439, 1477, 1491, 1496, 1519, 1520, 1521, 1526, 1536, 1542, 1632, 1643, 1706, 1716, 1717, 1723, 1724, 1757, 1769, 1797, 1835, 1839, 1841, 1852, 1862, 1942, 1943, 1948, 2013, 2015, 2016, 2024, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2045, 2047, 2048, 2049, 2051, 2052, 2057, 2060, 2065, 2067, 2068, 2070, 2075, 2076, 2077, 2079, 2080, 2096, 2097, 2098, 2099, 2101, 2102, 2103, 2104, 2105, 2107, 2110, 2111, 2115], "allow_tf32": [2, 1870, 2014, 2045, 2058], "tensorfloat": 2, "core": [2, 3, 7, 8, 52, 59, 82, 975, 1002, 1064, 1777, 2016, 2020, 2044, 2045, 2047, 2057, 2058, 2068, 2089, 2097, 2098, 2102, 2108], "amper": [2, 2080], "newer": [2, 14, 52, 1071, 1765, 2044, 2045, 2060, 2062, 2067, 2068, 2071, 2104], "tf32": [2, 20], "allow_fp16_reduced_precision_reduct": [2, 2045, 2058], "reduc": [2, 3, 14, 24, 28, 29, 30, 32, 34, 55, 323, 515, 518, 519, 688, 695, 697, 698, 699, 701, 821, 822, 823, 824, 827, 829, 877, 878, 975, 980, 1021, 1032, 1064, 1181, 1183, 1246, 1325, 1329, 1331, 1336, 1342, 1360, 1370, 1372, 1373, 1375, 1378, 1417, 1418, 1419, 1420, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1459, 1461, 1469, 1485, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1566, 1575, 1604, 1605, 1613, 1615, 1624, 1640, 1643, 1644, 1645, 1656, 1664, 1665, 1666, 1667, 1668, 1669, 1676, 1688, 1689, 1700, 1703, 1716, 1731, 1769, 1786, 1810, 1824, 1826, 1827, 1861, 1904, 1908, 1921, 1922, 1926, 1927, 1949, 1964, 1971, 1972, 1989, 2014, 2032, 2033, 2042, 2044, 2047, 2048, 2051, 2055, 2057, 2059, 2060, 2061, 2066, 2067, 2068, 2070, 2080, 2096, 2098, 2102, 2106, 2107, 2111, 2112], "precis": [2, 3, 8, 11, 14, 24, 33, 35, 55, 688, 691, 922, 923, 943, 955, 1053, 1151, 1222, 1318, 1336, 1342, 1367, 1377, 1435, 1436, 1437, 1439, 1453, 1454, 1455, 1456, 1457, 1458, 1478, 1497, 1513, 1519, 1520, 1521, 1579, 1643, 1684, 1716, 1855, 1870, 1874, 2012, 2016, 2029, 2036, 2042, 2055, 2068, 2070, 2071, 2073, 2081, 2083, 2085, 2086, 2108, 2116], "gemm": [2, 17, 19, 2044, 2065, 2102], "allow_bf16_reduced_precision_reduct": [2, 2045, 2058], "cufft_plan_cach": [2, 2045], "cufft": 2, "queri": [2, 19, 28, 47, 64, 737, 1010, 1011, 1013, 1019, 1066, 1071, 1085, 1086, 1197, 1272, 1385, 1526, 1532, 1587, 1684, 1735, 1981, 1982, 2014, 2028, 2045, 2068, 2098, 2101], "specif": [2, 3, 7, 8, 9, 11, 14, 19, 20, 28, 30, 32, 33, 34, 35, 37, 45, 47, 50, 52, 55, 60, 64, 66, 81, 83, 88, 90, 515, 857, 864, 935, 1013, 1099, 1128, 1235, 1269, 1276, 1282, 1286, 1288, 1289, 1308, 1310, 1373, 1472, 1498, 1541, 1684, 1709, 1710, 1715, 1738, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1793, 1794, 1795, 1796, 1797, 1865, 1870, 1904, 1965, 1982, 2011, 2013, 2015, 2016, 2018, 2020, 2022, 2029, 2034, 2036, 2042, 2045, 2048, 2053, 2057, 2059, 2063, 2067, 2068, 2069, 2070, 2071, 2073, 2075, 2077, 2085, 2086, 2093, 2095, 2098, 2099, 2101, 2102, 2104, 2108, 2110, 2111, 2118], "via": [2, 7, 14, 15, 23, 28, 30, 34, 35, 38, 45, 52, 53, 55, 59, 62, 64, 417, 488, 519, 619, 922, 923, 931, 962, 983, 1008, 1044, 1064, 1112, 1169, 1185, 1189, 1190, 1192, 1194, 1344, 1345, 1440, 1441, 1442, 1462, 1480, 1488, 1489, 1490, 1498, 1566, 1730, 1765, 1768, 1808, 2013, 2015, 2016, 2020, 2028, 2029, 2032, 2035, 2040, 2042, 2045, 2046, 2048, 2049, 2050, 2053, 2055, 2057, 2058, 2065, 2068, 2070, 2075, 2076, 2080, 2083, 2084, 2097, 2098, 2099, 2101, 2103, 2107, 2112], "readonli": 2, "int": [2, 3, 12, 18, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 41, 44, 45, 47, 48, 51, 52, 53, 55, 64, 66, 74, 75, 77, 80, 90, 218, 220, 234, 235, 244, 256, 315, 317, 319, 323, 439, 446, 447, 449, 451, 459, 474, 478, 495, 499, 501, 515, 517, 519, 522, 539, 545, 547, 548, 560, 562, 568, 585, 586, 587, 589, 590, 609, 619, 682, 695, 697, 698, 699, 701, 757, 758, 777, 779, 781, 782, 787, 788, 789, 821, 877, 878, 879, 881, 895, 903, 906, 908, 909, 926, 931, 938, 944, 946, 953, 960, 962, 969, 974, 977, 996, 997, 1001, 1002, 1006, 1007, 1011, 1013, 1015, 1016, 1019, 1020, 1021, 1022, 1023, 1024, 1026, 1027, 1028, 1029, 1030, 1035, 1036, 1037, 1039, 1041, 1045, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1077, 1078, 1079, 1082, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1095, 1096, 1097, 1098, 1099, 1100, 1106, 1109, 1111, 1121, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1147, 1160, 1162, 1163, 1167, 1168, 1169, 1170, 1171, 1177, 1179, 1182, 1185, 1203, 1205, 1213, 1220, 1223, 1224, 1226, 1230, 1231, 1233, 1234, 1235, 1236, 1247, 1248, 1269, 1270, 1272, 1273, 1275, 1278, 1279, 1284, 1292, 1294, 1304, 1305, 1325, 1326, 1329, 1338, 1339, 1340, 1341, 1342, 1343, 1345, 1352, 1359, 1360, 1365, 1370, 1372, 1373, 1375, 1378, 1380, 1381, 1382, 1383, 1386, 1387, 1393, 1400, 1401, 1402, 1403, 1404, 1412, 1417, 1418, 1419, 1420, 1421, 1422, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1440, 1441, 1442, 1443, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1460, 1461, 1462, 1468, 1469, 1471, 1472, 1473, 1474, 1476, 1478, 1480, 1488, 1489, 1490, 1493, 1494, 1495, 1497, 1498, 1502, 1503, 1504, 1505, 1506, 1507, 1511, 1513, 1514, 1516, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1528, 1531, 1533, 1534, 1537, 1538, 1539, 1541, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1560, 1562, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1614, 1615, 1616, 1623, 1624, 1631, 1634, 1643, 1651, 1668, 1669, 1670, 1674, 1675, 1690, 1691, 1699, 1703, 1704, 1705, 1715, 1716, 1731, 1732, 1741, 1742, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1758, 1760, 1765, 1768, 1771, 1772, 1774, 1775, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1816, 1817, 1824, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1837, 1838, 1839, 1841, 1847, 1848, 1849, 1852, 1853, 1854, 1855, 1858, 1863, 1864, 1865, 1869, 1872, 1873, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1895, 1899, 1903, 1906, 1907, 1908, 1915, 1918, 1920, 1921, 1922, 1923, 1926, 1928, 1932, 1934, 1939, 1943, 1944, 1946, 1948, 1949, 1952, 1953, 1954, 1955, 1958, 1959, 1960, 1961, 1962, 1963, 1966, 1967, 1970, 1971, 1972, 1976, 1977, 1982, 1984, 1985, 1986, 1987, 1990, 1991, 1992, 1993, 1996, 1999, 2000, 2003, 2004, 2008, 2009, 2013, 2014, 2015, 2016, 2017, 2020, 2029, 2032, 2033, 2035, 2040, 2045, 2049, 2057, 2060, 2063, 2065, 2066, 2069, 2070, 2074, 2075, 2079, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2093, 2098, 2102, 2106, 2110, 2113, 2116], "show": [2, 4, 7, 13, 18, 23, 24, 28, 30, 33, 36, 52, 55, 64, 82, 911, 917, 975, 1159, 1272, 1374, 1526, 1706, 1778, 2011, 2012, 2016, 2033, 2044, 2045, 2047, 2048, 2052, 2053, 2055, 2063, 2065, 2067, 2068, 2069, 2075, 2077, 2099, 2104, 2105, 2108, 2109, 2111, 2113], "max_siz": [2, 46, 48, 2045], "capac": [2, 1078, 2045, 2057], "preferred_blas_librari": 2, "overrid": [2, 5, 14, 19, 20, 24, 28, 29, 30, 35, 40, 48, 55, 60, 64, 794, 795, 857, 893, 895, 908, 909, 1050, 1438, 1439, 1459, 1461, 1485, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1543, 1558, 1559, 1575, 1604, 1605, 1615, 1644, 1668, 1676, 1738, 1797, 1858, 1874, 2012, 2016, 2020, 2024, 2060, 2067, 2068, 2070, 2075, 2080, 2085, 2091, 2110, 2111], "bla": [2, 19, 2044], "choos": [2, 9, 15, 19, 64, 895, 908, 909, 935, 1033, 1318, 1331, 1336, 1438, 1928, 2040, 2041, 2044, 2068, 2071, 2085], "cubla": [2, 11, 17, 19, 20, 1025, 1964, 2059, 2096], "cublaslt": [2, 17, 20], "subject": [2, 3, 11, 18, 19, 28, 30, 32, 34, 55, 63, 64, 65, 235, 868, 1585, 1586, 1587, 1590, 1684, 1716, 2016, 2033, 2034, 2042, 2048, 2065, 2069, 2070, 2071, 2075, 2080, 2083, 2089, 2090, 2091, 2093, 2098, 2108], "rocm": [2, 17, 688, 691, 943, 955, 1367, 1377, 1453, 1454, 1455, 1456, 1457, 1458, 1478, 1497, 1513, 2012], "hipbla": [2, 17, 19], "hipblaslt": [2, 17, 19], "offer": [2, 28, 30, 55, 58, 1716, 1964, 2045, 2046, 2048, 2053, 2068, 2069, 2075, 2080, 2097, 2102, 2106, 2114], "wherev": [2, 9, 2033], "prefer": [2, 9, 23, 28, 30, 37, 48, 55, 865, 881, 913, 1200, 1270, 1313, 1326, 1330, 1338, 1345, 1533, 1684, 1862, 1923, 1942, 2013, 2024, 2042, 2045, 2067, 2068, 2080, 2102], "environ": [2, 3, 7, 14, 15, 18, 19, 22, 24, 30, 33, 35, 37, 40, 45, 51, 52, 64, 81, 83, 682, 1185, 1187, 1209, 1282, 1288, 1496, 1542, 1964, 2011, 2012, 2013, 2022, 2042, 2044, 2045, 2047, 2053, 2058, 2059, 2061, 2075, 2093, 2095, 2097, 2102, 2111], "variabl": [2, 3, 5, 12, 14, 18, 19, 22, 24, 35, 37, 40, 45, 47, 51, 53, 55, 60, 64, 66, 69, 74, 75, 81, 83, 450, 682, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 730, 731, 732, 733, 734, 736, 740, 741, 742, 743, 744, 745, 747, 748, 758, 762, 766, 801, 903, 904, 908, 977, 989, 993, 997, 1077, 1109, 1177, 1187, 1286, 1288, 1345, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1468, 1469, 1477, 1478, 1496, 1497, 1498, 1511, 1513, 1526, 1529, 1534, 1542, 1544, 1716, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1775, 1779, 1781, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1835, 1837, 1839, 1964, 1976, 2009, 2011, 2012, 2022, 2023, 2034, 2035, 2042, 2044, 2045, 2047, 2050, 2052, 2058, 2059, 2061, 2063, 2065, 2067, 2070, 2075, 2082, 2085, 2093, 2098, 2099, 2100, 2102, 2104, 2111], "torch_blas_prefer_cublaslt": 2, "global": [2, 3, 5, 8, 11, 12, 20, 23, 28, 30, 32, 35, 37, 48, 55, 60, 63, 64, 66, 69, 74, 75, 857, 864, 868, 876, 931, 944, 953, 989, 1008, 1042, 1059, 1066, 1109, 1111, 1121, 1126, 1144, 1160, 1163, 1166, 1230, 1231, 1253, 1260, 1272, 1275, 1288, 1292, 1343, 1359, 1526, 1684, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1743, 1747, 1775, 1835, 1837, 1839, 1842, 1866, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2009, 2013, 2016, 2017, 2020, 2028, 2036, 2041, 2048, 2054, 2055, 2057, 2059, 2060, 2070, 2075, 2076, 2077, 2080, 2083, 2085, 2099, 2100, 2101, 2102, 2103, 2110, 2111], "overridden": [2, 14, 64, 892, 893, 894, 908, 909, 1050, 1526, 1902, 2016, 2042, 2048, 2058, 2111, 2112], "achiev": [2, 19, 23, 24, 28, 30, 34, 35, 48, 55, 1108, 1173, 1272, 1439, 1445, 1526, 1532, 1533, 1605, 1634, 1716, 2030, 2045, 2054, 2057, 2068, 2075, 2077, 2111], "better": [2, 3, 7, 8, 9, 14, 23, 27, 28, 37, 52, 59, 918, 975, 1077, 1165, 1169, 1170, 1181, 1259, 1288, 1461, 1560, 1684, 1690, 1716, 1723, 1724, 1808, 1833, 2016, 2041, 2042, 2044, 2045, 2051, 2052, 2059, 2061, 2065, 2067, 2069, 2070, 2071, 2080, 2085, 2091, 2093, 2097, 2099, 2100, 2108], "select": [2, 5, 15, 17, 19, 21, 23, 28, 30, 35, 37, 45, 53, 315, 317, 319, 323, 762, 965, 999, 1001, 1005, 1014, 1015, 1019, 1026, 1027, 1028, 1029, 1031, 1052, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1077, 1078, 1081, 1083, 1085, 1086, 1128, 1345, 1374, 1399, 1400, 1401, 1402, 1403, 1408, 1409, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1477, 1496, 1542, 1590, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 1752, 1753, 1865, 1939, 1967, 1979, 1983, 1984, 1985, 1986, 1988, 2003, 2006, 2007, 2013, 2014, 2016, 2032, 2033, 2034, 2035, 2042, 2044, 2045, 2052, 2059, 2066, 2070, 2080, 2084, 2087, 2100, 2106, 2108, 2113], "incorrect": [2, 4, 5, 55, 64, 86, 87, 89, 256, 515, 558, 903, 906, 908, 922, 931, 957, 977, 1288, 1331, 1344, 1532, 1570, 1571, 1572, 1573, 1574, 1702, 1771, 1912, 1923, 2013, 2016, 2020, 2045, 2058, 2065, 2099, 2114], "_blasbackend": 2, "preferred_linalg_librari": [2, 1309], "heurist": [2, 14, 23, 47, 48, 64, 980, 1108, 2051, 2096, 2105], "cusolv": [2, 1336, 1337, 1927], "magma": [2, 1318, 1362, 1826, 1927, 2061, 2080], "algebra": [2, 9, 1108, 1309, 1330, 2012, 2021], "decid": [2, 4, 7, 28, 37, 47, 59, 488, 1183, 1747, 2023, 2065, 2080, 2101, 2102, 2105], "pick": [2, 28, 46, 48, 515, 975, 2042, 2075, 2098, 2100, 2105], "torch_linalg_prefer_cusolv": 2, "linalg": [2, 11, 408, 965, 966, 967, 968, 1007, 1094, 1216, 1250, 1353, 1362, 1363, 1364, 1368, 1369, 1730, 1731, 1736, 1771, 1813, 1818, 1820, 1826, 1896, 1927, 1928, 1951, 1973, 2012], "inv": [2, 35, 1250, 1304, 1308, 1314, 1330, 1334, 1338], "inv_ex": [2, 1313], "cholesky_ex": [2, 1302], "lu_factor": [2, 1321, 1322, 1362, 1363, 1364], "lu": [2, 10, 1314, 1320, 1321, 1322, 1363, 1364, 2014], "eigh": [2, 1302, 1308, 1311, 1330, 1336, 2058], "eighval": 2, "svdval": [2, 1304, 1318, 1327, 1336, 1927, 2058], "_linalgbackend": 2, "sdpaparam": 2, "flash_sdp_en": 2, "flash": [2, 1585, 1590], "scale": [2, 7, 23, 33, 35, 37, 46, 48, 99, 175, 475, 477, 619, 687, 691, 692, 693, 740, 741, 742, 743, 744, 745, 746, 751, 752, 753, 754, 755, 756, 757, 758, 761, 772, 774, 775, 776, 777, 779, 782, 783, 797, 801, 804, 821, 822, 823, 824, 827, 943, 1122, 1123, 1126, 1144, 1176, 1177, 1359, 1434, 1463, 1468, 1469, 1470, 1486, 1498, 1554, 1579, 1580, 1581, 1585, 1590, 1623, 1624, 1625, 1641, 1643, 1684, 1685, 1690, 1781, 1802, 1828, 1829, 1830, 1831, 1832, 1905, 1906, 1924, 1976, 2012, 2014, 2034, 2040, 2045, 2050, 2055, 2063, 2070, 2071, 2073, 2081, 2102], "product": [2, 15, 28, 30, 35, 47, 152, 688, 691, 692, 693, 762, 896, 901, 912, 914, 915, 916, 917, 943, 955, 961, 965, 993, 1007, 1019, 1066, 1071, 1085, 1086, 1089, 1105, 1108, 1172, 1176, 1177, 1249, 1293, 1305, 1312, 1338, 1339, 1341, 1367, 1374, 1377, 1414, 1477, 1478, 1496, 1497, 1585, 1590, 1631, 1684, 1730, 1814, 1815, 1824, 1870, 1944, 1959, 1973, 1976, 2012, 2013, 2033, 2042, 2052, 2054, 2058, 2068, 2070, 2080, 2099, 2113], "attent": [2, 7, 34, 737, 1439, 1532, 1570, 1572, 1574, 1585, 1587, 1605, 1684, 2012, 2061, 2084, 2099], "enable_mem_efficient_sdp": [2, 1684], "mem_efficient_sdp_en": 2, "enable_flash_sdp": [2, 1684], "math_sdp_en": 2, "math": [2, 23, 64, 1118, 1324, 1585, 1590, 1644, 1684, 1816, 1833, 1928, 2012, 2013, 2015, 2016, 2051, 2058, 2080, 2081, 2087, 2111], "enable_math_sdp": [2, 1684], "cudnn_sdp_en": 2, "enable_cudnn_sdp": 2, "can_use_flash_attent": 2, "debug": [2, 4, 5, 8, 19, 20, 23, 38, 44, 52, 81, 83, 682, 829, 830, 834, 846, 922, 923, 975, 977, 984, 1008, 1041, 1082, 1220, 1302, 1709, 1710, 1711, 1712, 1869, 1876, 2012, 2015, 2022, 2042, 2044, 2045, 2047, 2053, 2054, 2055, 2059, 2061, 2063, 2065, 2068, 2090, 2098, 2099, 2100, 2103, 2104, 2107, 2113, 2114, 2115], "check": [2, 3, 4, 5, 11, 14, 19, 20, 23, 28, 29, 30, 35, 47, 55, 66, 67, 72, 75, 223, 342, 501, 696, 864, 903, 904, 906, 907, 908, 909, 922, 923, 929, 977, 1010, 1011, 1013, 1017, 1046, 1064, 1183, 1187, 1195, 1196, 1204, 1205, 1259, 1269, 1272, 1288, 1289, 1302, 1303, 1308, 1309, 1311, 1314, 1316, 1321, 1327, 1330, 1334, 1336, 1337, 1362, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1526, 1706, 1716, 1733, 1736, 1749, 1759, 1778, 1779, 1902, 1909, 1910, 1911, 1912, 1913, 1914, 1965, 1981, 1982, 2011, 2012, 2015, 2016, 2017, 2020, 2033, 2034, 2041, 2045, 2048, 2049, 2050, 2051, 2054, 2055, 2056, 2059, 2063, 2065, 2067, 2068, 2069, 2070, 2075, 2080, 2085, 2087, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2110, 2111, 2112, 2115], "flashattent": [2, 1574, 1684], "util": [2, 7, 17, 30, 48, 52, 53, 55, 56, 63, 64, 66, 501, 734, 737, 740, 741, 742, 747, 748, 758, 762, 766, 792, 800, 909, 1024, 1109, 1110, 1111, 1159, 1204, 1205, 1272, 1477, 1496, 1526, 1542, 1543, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1716, 1733, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1757, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 1931, 1932, 1934, 1935, 1936, 1964, 2012, 2016, 2020, 2024, 2029, 2041, 2043, 2044, 2045, 2048, 2050, 2052, 2054, 2055, 2057, 2059, 2065, 2067, 2068, 2098, 2099, 2102, 2109, 2111, 2114], "scaled_dot_product_attent": [2, 1532, 1570, 1586, 1589, 1590, 2014, 2037, 2038, 2039, 2066], "_sdpaparam": 2, "kei": [2, 3, 23, 30, 33, 37, 45, 47, 48, 52, 53, 55, 62, 64, 417, 605, 682, 737, 857, 862, 930, 935, 942, 1074, 1165, 1175, 1188, 1272, 1288, 1289, 1344, 1526, 1527, 1532, 1536, 1570, 1571, 1572, 1573, 1574, 1587, 1684, 1750, 1766, 1965, 1967, 2012, 2014, 2015, 2016, 2020, 2028, 2045, 2048, 2054, 2055, 2060, 2065, 2066, 2067, 2069, 2070, 2073, 2075, 2076, 2082, 2085, 2087, 2090, 2092, 2103, 2108, 2109, 2110, 2114], "mask": [2, 45, 401, 402, 403, 404, 405, 546, 737, 922, 923, 1366, 1434, 1445, 1470, 1532, 1570, 1571, 1572, 1573, 1574, 1625, 1684, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1797, 2012, 2014, 2033, 2034, 2080, 2102], "dropout": [2, 5, 34, 64, 736, 737, 762, 1272, 1434, 1464, 1465, 1466, 1470, 1477, 1496, 1526, 1532, 1542, 1543, 1570, 1572, 1574, 1598, 1618, 1619, 1620, 1625, 1684, 2012, 2014, 2026, 2034, 2035, 2042, 2045, 2066, 2072], "causal": [2, 737, 1532, 1570, 1571, 1572, 1573, 1574, 1586, 1587, 1588, 1589, 1684], "warn": [2, 3, 14, 18, 19, 24, 25, 28, 32, 52, 86, 87, 89, 682, 911, 917, 965, 1019, 1066, 1071, 1082, 1085, 1086, 1177, 1187, 1253, 1269, 1288, 1330, 1344, 1374, 1532, 1570, 1571, 1572, 1573, 1574, 1684, 1733, 1736, 1869, 1876, 1923, 1964, 1976, 2011, 2022, 2029, 2043, 2048, 2063, 2066, 2068, 2074, 2099, 2111], "why": [2, 3, 7, 23, 60, 64, 1108, 1276, 1684, 2012, 2020, 2023, 2049, 2051, 2098, 2099, 2101, 2109, 2113], "could": [2, 4, 5, 7, 8, 17, 19, 23, 28, 34, 35, 37, 47, 60, 63, 64, 585, 586, 587, 589, 590, 931, 1046, 1129, 1131, 1139, 1140, 1141, 1188, 1269, 1278, 1303, 1344, 1362, 1388, 1389, 1723, 1724, 1801, 1904, 1928, 1960, 2015, 2016, 2023, 2032, 2042, 2045, 2047, 2061, 2065, 2068, 2070, 2071, 2075, 2076, 2077, 2080, 2084, 2085, 2097, 2099, 2102, 2103, 2107, 2111, 2114, 2115], "depend": [2, 4, 5, 12, 14, 23, 28, 30, 32, 33, 35, 37, 47, 48, 50, 53, 55, 64, 317, 787, 881, 1019, 1066, 1071, 1085, 1086, 1129, 1131, 1139, 1140, 1141, 1167, 1171, 1176, 1187, 1197, 1199, 1200, 1201, 1203, 1284, 1288, 1308, 1309, 1336, 1362, 1367, 1438, 1439, 1459, 1461, 1469, 1472, 1485, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1578, 1579, 1597, 1604, 1605, 1615, 1624, 1632, 1644, 1668, 1676, 1684, 1703, 1706, 1716, 1736, 1737, 1743, 1802, 1814, 1816, 1833, 1849, 1909, 1910, 1911, 1913, 1914, 1927, 1951, 1979, 2011, 2013, 2015, 2016, 2020, 2023, 2024, 2032, 2042, 2044, 2045, 2048, 2049, 2051, 2052, 2055, 2057, 2062, 2064, 2065, 2069, 2070, 2075, 2080, 2086, 2087, 2092, 2093, 2096, 2097, 2098, 2099, 2101, 2102, 2104, 2107, 2109, 2110, 2113], "can_use_efficient_attent": 2, "efficient_attent": [2, 1585, 1590], "can_use_cudnn_attent": 2, "cudnn_attent": [2, 1585], "sdp_kernel": [2, 1684], "enable_flash": 2, "enable_math": [2, 1684], "enable_mem_effici": 2, "enable_cudnn": 2, "temporarili": [2, 37, 1866, 2042, 2065, 2069, 2105], "previou": [2, 18, 28, 30, 47, 52, 55, 64, 515, 558, 762, 975, 977, 1042, 1153, 1226, 1283, 1343, 1359, 1477, 1496, 1542, 1590, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1778, 1810, 1833, 1923, 2026, 2042, 2045, 2048, 2055, 2061, 2063, 2070, 2077, 2093, 2102], "restor": [2, 5, 30, 32, 64, 90, 985, 1153, 1343, 1359, 1590, 1768, 1853, 1874, 2055, 2060, 2100], "is_avail": [2, 17, 28, 1967, 2012, 2045, 2053, 2056, 2060, 2069, 2093, 2118], "determinist": [2, 3, 5, 22, 28, 35, 64, 86, 90, 488, 501, 515, 876, 1109, 1110, 1111, 1220, 1253, 1275, 1288, 1289, 1370, 1373, 1375, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1496, 1542, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 1863, 1869, 1964, 2012, 2014, 2042, 2051, 2059, 2067, 2074], "algorithm": [2, 3, 7, 11, 19, 21, 24, 29, 32, 35, 37, 55, 90, 762, 781, 787, 965, 1269, 1313, 1320, 1326, 1330, 1336, 1345, 1362, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1475, 1477, 1496, 1542, 1578, 1579, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1632, 1643, 1684, 1703, 1716, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1797, 1816, 1855, 1870, 1927, 1928, 1964, 2012, 2042, 2045, 2058, 2075, 2080], "are_deterministic_algorithms_en": 2, "use_deterministic_algorithm": [2, 27, 501, 876, 1109, 1110, 1111, 1253, 1869, 2059], "benchmark": [2, 19, 2012, 2014, 2045, 2053, 2069, 2096, 2102, 2108], "fastest": [2, 17, 19, 880, 1716, 1770, 1783, 1784, 1796, 2052, 2059, 2067], "benchmark_limit": 2, "maximum": [2, 27, 35, 47, 48, 52, 501, 697, 699, 773, 797, 822, 823, 824, 827, 828, 877, 1021, 1056, 1058, 1064, 1072, 1073, 1087, 1088, 1109, 1110, 1111, 1154, 1171, 1233, 1234, 1235, 1345, 1370, 1392, 1484, 1623, 1624, 1722, 1800, 1808, 1833, 1847, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1966, 2014, 2022, 2040, 2041, 2045, 2057, 2066, 2073, 2087, 2088, 2098, 2106, 2113, 2115], "try": [2, 3, 4, 7, 8, 28, 30, 39, 40, 44, 47, 52, 59, 60, 84, 85, 975, 977, 1078, 1162, 1171, 1177, 1187, 1211, 1277, 1279, 1288, 1309, 1392, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1570, 1571, 1573, 1579, 1585, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1976, 2011, 2013, 2016, 2017, 2020, 2042, 2045, 2048, 2050, 2052, 2057, 2065, 2067, 2068, 2070, 2071, 2075, 2080, 2097, 2098, 2101, 2102, 2104, 2111, 2113], "dispatch": [2, 28, 55, 64, 86, 87, 89, 1188, 1277, 1388, 1389, 1965, 1967, 2016, 2020, 2045, 2048, 2063, 2065, 2102, 2103, 2112], "v8": [2, 20], "api": [2, 3, 5, 8, 9, 14, 20, 24, 28, 29, 30, 34, 37, 39, 40, 44, 45, 46, 47, 48, 55, 56, 59, 63, 65, 66, 71, 75, 76, 77, 235, 802, 826, 864, 898, 899, 900, 901, 902, 914, 919, 920, 964, 977, 980, 1008, 1042, 1043, 1044, 1050, 1051, 1053, 1081, 1112, 1152, 1166, 1169, 1170, 1172, 1177, 1185, 1197, 1283, 1284, 1286, 1392, 1408, 1716, 1766, 1768, 1769, 1777, 1855, 1866, 1957, 1967, 1976, 1995, 2006, 2011, 2022, 2023, 2024, 2026, 2030, 2032, 2033, 2035, 2042, 2046, 2047, 2060, 2062, 2071, 2075, 2076, 2080, 2083, 2085, 2089, 2092, 2093, 2100, 2104, 2112, 2115], "get_fastpath_en": 2, "fast": [2, 7, 8, 23, 28, 64, 922, 1467, 1488, 1489, 1490, 1558, 1574, 1723, 1724, 1808, 1814, 1833, 1855, 1870, 2012, 2044, 2045, 2048, 2053, 2067, 2075, 2080, 2083, 2084, 2092, 2099, 2101, 2104, 2105, 2113], "path": [2, 3, 4, 8, 14, 20, 28, 30, 45, 47, 50, 51, 52, 55, 64, 736, 932, 933, 934, 1008, 1108, 1196, 1272, 1526, 1543, 1574, 1778, 2011, 2012, 2014, 2015, 2042, 2045, 2048, 2051, 2053, 2060, 2063, 2068, 2069, 2080, 2090, 2093, 2096, 2104, 2105], "transformerencod": 2, "multiheadattent": [2, 1570, 1572, 1574, 2070], "fastpath": [2, 1532, 2101], "condit": [2, 5, 12, 21, 23, 52, 53, 60, 64, 66, 69, 74, 75, 619, 622, 626, 696, 762, 912, 922, 923, 989, 1269, 1288, 1302, 1304, 1309, 1312, 1318, 1319, 1320, 1331, 1336, 1477, 1496, 1542, 1574, 1759, 1764, 1951, 1979, 2013, 2014, 2015, 2017, 2025, 2035, 2042, 2048, 2058, 2085, 2098, 2099, 2100, 2101, 2106], "met": [2, 12, 619, 989, 1312, 1319, 1320, 1331, 1345, 1574, 1912], "set_fastpath_en": 2, "verbos": [2, 14, 19, 28, 64, 682, 1187, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2011, 2022, 2034, 2065, 2085, 2111], "On": [2, 14, 22, 23, 28, 29, 35, 48, 55, 60, 64, 688, 691, 943, 955, 1320, 1367, 1377, 1453, 1454, 1455, 1456, 1457, 1458, 1478, 1493, 1494, 1495, 1496, 1497, 1513, 1536, 1542, 1555, 1716, 1783, 1784, 1793, 1796, 2012, 2013, 2023, 2042, 2044, 2045, 2048, 2058, 2068, 2075, 2076, 2077, 2080, 2096, 2098, 2099, 2101, 2102], "demand": [2, 23, 1044, 2015, 2054, 2057, 2075], "onemkl": 2, "easier": [2, 7, 23, 52, 64, 2013, 2015, 2035, 2042, 2043, 2048, 2049, 2051, 2080, 2102, 2111], "dump": [2, 64, 1008, 2047, 2061, 2102, 2111, 2115], "durat": [2, 19, 28, 44, 47, 940, 1388, 1389, 2029, 2063, 2069, 2102, 2111], "kernel": [2, 3, 4, 11, 14, 18, 20, 28, 55, 63, 488, 682, 741, 742, 743, 744, 745, 774, 775, 776, 929, 975, 1006, 1011, 1013, 1014, 1050, 1051, 1084, 1086, 1394, 1399, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1473, 1474, 1493, 1494, 1495, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1522, 1523, 1524, 1578, 1607, 1608, 1609, 1610, 1611, 1612, 1627, 1628, 1684, 1723, 1724, 1734, 1965, 1967, 1982, 1983, 2008, 2013, 2020, 2026, 2035, 2045, 2046, 2048, 2049, 2051, 2056, 2058, 2065, 2067, 2069, 2071, 2080, 2096, 2099, 2100, 2101, 2102, 2103, 2104, 2111, 2114], "mkl_verbos": 2, "methodologi": 2, "larg": [2, 3, 7, 8, 23, 28, 33, 34, 64, 682, 861, 995, 997, 1064, 1309, 1318, 1336, 1430, 1472, 1578, 1626, 1716, 1808, 1894, 1907, 1928, 2012, 2032, 2034, 2035, 2044, 2045, 2046, 2048, 2050, 2055, 2058, 2060, 2063, 2065, 2068, 2071, 2075, 2080, 2083, 2086, 2096, 2102, 2109, 2111, 2113], "moreov": [2, 30, 83, 515, 1716, 1796, 2114], "investig": [2, 7, 28, 60, 2109], "singl": [2, 3, 5, 12, 14, 19, 24, 28, 30, 32, 33, 35, 37, 40, 45, 46, 47, 55, 56, 58, 59, 60, 61, 63, 64, 66, 74, 75, 256, 682, 697, 698, 768, 769, 770, 771, 774, 775, 776, 816, 898, 911, 912, 913, 914, 915, 916, 917, 929, 944, 953, 957, 975, 977, 989, 993, 997, 1050, 1053, 1165, 1167, 1168, 1171, 1173, 1177, 1230, 1231, 1235, 1257, 1271, 1272, 1288, 1289, 1292, 1350, 1374, 1388, 1389, 1428, 1429, 1432, 1433, 1436, 1437, 1439, 1453, 1454, 1455, 1457, 1458, 1461, 1462, 1473, 1474, 1480, 1493, 1494, 1495, 1498, 1520, 1521, 1526, 1534, 1538, 1539, 1541, 1555, 1566, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1627, 1628, 1629, 1657, 1658, 1659, 1702, 1708, 1710, 1713, 1714, 1715, 1716, 1721, 1722, 1725, 1727, 1729, 1766, 1767, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 1802, 1823, 1849, 1870, 1915, 1921, 1922, 1960, 1961, 1964, 1971, 1972, 1976, 2012, 2013, 2015, 2016, 2023, 2026, 2032, 2034, 2035, 2036, 2042, 2044, 2045, 2048, 2049, 2051, 2052, 2055, 2057, 2058, 2059, 2060, 2065, 2067, 2068, 2069, 2070, 2076, 2077, 2080, 2083, 2086, 2087, 2091, 2095, 2096, 2098, 2100, 2101, 2102, 2104, 2110, 2111], "enough": [2, 8, 24, 64, 1187, 1194, 1284, 1576, 1909, 1910, 1911, 1912, 1913, 1914, 1964, 2015, 2032, 2035, 2045, 2048, 2052, 2067, 2068, 2083, 2099, 2102, 2112], "scope": [2, 7, 12, 50, 64, 83, 989, 1068, 1162, 1566, 1747, 2013, 2015, 2016, 2045, 2050, 2065, 2068, 2077, 2101, 2110], "second": [2, 5, 14, 18, 28, 32, 33, 37, 40, 47, 50, 52, 61, 64, 688, 691, 693, 696, 762, 796, 877, 878, 879, 887, 895, 908, 909, 922, 923, 943, 947, 948, 950, 951, 952, 955, 1007, 1019, 1066, 1071, 1085, 1086, 1096, 1098, 1099, 1105, 1113, 1154, 1155, 1167, 1170, 1171, 1172, 1176, 1177, 1200, 1214, 1215, 1226, 1229, 1237, 1238, 1239, 1249, 1261, 1295, 1297, 1305, 1341, 1344, 1350, 1351, 1361, 1367, 1371, 1373, 1374, 1376, 1377, 1418, 1423, 1426, 1430, 1436, 1437, 1443, 1454, 1455, 1457, 1458, 1468, 1469, 1476, 1477, 1491, 1494, 1495, 1496, 1514, 1518, 1520, 1521, 1542, 1555, 1614, 1616, 1650, 1706, 1747, 1783, 1784, 1787, 1793, 1797, 1808, 1810, 1854, 1904, 1912, 1948, 1953, 1955, 1970, 1973, 1976, 2015, 2016, 2020, 2025, 2042, 2045, 2048, 2049, 2050, 2051, 2052, 2055, 2059, 2063, 2065, 2069, 2075, 2081, 2085, 2093, 2096, 2099], "verbose_on": 2, "level": [2, 3, 7, 8, 9, 12, 15, 18, 23, 25, 28, 30, 33, 37, 39, 40, 44, 52, 53, 55, 59, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 87, 88, 89, 501, 682, 749, 898, 899, 900, 901, 902, 914, 1166, 1181, 1197, 1200, 1216, 1439, 1462, 1716, 1810, 1969, 2012, 2013, 2016, 2017, 2022, 2029, 2032, 2035, 2040, 2042, 2044, 2048, 2051, 2052, 2063, 2068, 2069, 2070, 2071, 2075, 2080, 2085, 2090, 2091, 2092, 2098, 2099, 2100, 2102, 2103, 2106, 2109, 2110, 2111, 2112], "verbose_off": 2, "dnn": [2, 2044], "onednn": [2, 858, 859, 862, 1274, 1281, 2066, 2070], "former": [2, 55, 1496, 1526, 2042], "dnnl_verbos": 2, "verbose_on_cr": 2, "set_flag": 2, "_enabl": 2, "get_opt_einsum": 2, "packag": [2, 7, 8, 16, 17, 24, 33, 35, 52, 59, 63, 1632, 2011, 2012, 2030, 2031, 2048, 2055, 2063, 2067, 2073, 2075, 2085, 2089, 2093, 2097, 2118], "els": [2, 7, 12, 23, 28, 30, 33, 35, 37, 39, 47, 52, 64, 66, 605, 762, 942, 946, 989, 1284, 1290, 1491, 1566, 1670, 1684, 1716, 1783, 1784, 1787, 1793, 1794, 1795, 1796, 1850, 1851, 1883, 1889, 1967, 2013, 2015, 2017, 2018, 2023, 2032, 2035, 2045, 2048, 2049, 2051, 2055, 2056, 2063, 2064, 2065, 2067, 2068, 2082, 2093, 2096, 2097, 2098, 2099, 2110], "einsum": [2, 2014, 2066], "readthedoc": [2, 1108], "io": [2, 7, 14, 23, 30, 52, 1108, 1280, 1283, 1344, 1496, 1497, 1574, 1858, 2061, 2063, 2070], "en": [2, 14, 24, 1108, 2053, 2085, 2116], "path_find": [2, 1108], "html": [2, 3, 4, 7, 11, 14, 15, 48, 55, 682, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 975, 1108, 1576, 1964, 1967, 2020, 2043, 2053, 2055, 2059, 2068, 2069, 2085], "calcul": [2, 19, 23, 28, 30, 37, 44, 762, 770, 771, 801, 804, 821, 822, 826, 919, 963, 997, 1096, 1100, 1108, 1112, 1263, 1269, 1328, 1350, 1351, 1353, 1435, 1436, 1437, 1439, 1440, 1441, 1442, 1445, 1456, 1457, 1458, 1472, 1477, 1479, 1480, 1488, 1489, 1490, 1498, 1566, 1575, 1578, 1579, 1599, 1600, 1601, 1605, 1624, 1629, 1632, 1643, 1673, 1731, 1743, 1765, 1769, 1771, 1802, 1848, 1921, 1922, 1953, 1955, 1971, 1972, 2042, 2043, 2047, 2051, 2052, 2058, 2061, 2070, 2080, 2091], "contract": [2, 53, 1108, 1944, 2012, 2034, 2068, 2097], "fall": [2, 7, 14, 19, 20, 23, 52, 797, 922, 975, 1200, 1235, 1286, 1344, 1486, 1558, 1641, 1688, 1721, 1722, 2011, 2065, 2070, 2102, 2109], "left": [2, 23, 64, 462, 501, 816, 822, 868, 944, 948, 951, 953, 954, 959, 964, 1108, 1128, 1135, 1149, 1152, 1153, 1157, 1226, 1230, 1231, 1235, 1269, 1270, 1277, 1292, 1313, 1322, 1326, 1330, 1333, 1334, 1335, 1336, 1338, 1350, 1351, 1412, 1430, 1435, 1436, 1437, 1438, 1439, 1453, 1454, 1455, 1472, 1479, 1492, 1493, 1494, 1495, 1514, 1515, 1516, 1517, 1519, 1520, 1521, 1529, 1530, 1531, 1535, 1536, 1575, 1578, 1579, 1580, 1581, 1587, 1589, 1632, 1652, 1671, 1684, 1722, 1800, 1801, 1814, 1842, 1855, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1923, 1944, 1949, 1970, 2013, 2014, 2016, 2042, 2048, 2051, 2055, 2073, 2081, 2099, 2109, 2113], "strategi": [2, 3, 7, 17, 23, 24, 28, 32, 40, 55, 911, 913, 1108, 1169, 1286, 1430, 1716, 1723, 1724, 1808, 2015, 2049, 2052, 2071, 2099, 2102], "auto": [2, 28, 52, 56, 84, 85, 1108, 1438, 1439, 2016, 2065, 2083, 2085, 2101], "greedi": [2, 32, 1108], "doc": [2, 3, 4, 9, 11, 15, 47, 48, 53, 88, 682, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 842, 975, 1177, 1291, 1571, 1572, 1573, 1574, 1964, 1976, 2017, 2020, 2032, 2043, 2046, 2047, 2048, 2055, 2059, 2068, 2070, 2075, 2080, 2085, 2093, 2099, 2103, 2107], "timer": [3, 31, 2012], "stmt": [3, 2044], "setup": [3, 14, 24, 30, 33, 47, 48, 55, 65, 822, 823, 824, 827, 1177, 1716, 1976, 2044, 2045, 2056, 2069, 2075, 2076, 2097], "global_setup": 3, "perf_count": 3, "label": [3, 6, 7, 23, 28, 32, 938, 1108, 1430, 1439, 1445, 1459, 1461, 1485, 1518, 1529, 1530, 1616, 1758, 1801, 2047, 2051, 2057, 2059, 2085], "sub_label": 3, "descript": [3, 7, 13, 14, 20, 23, 25, 40, 47, 64, 736, 1244, 1245, 1246, 1318, 1345, 1597, 1826, 1870, 2015, 2016, 2025, 2045, 2048, 2052, 2054, 2055, 2065, 2088, 2092, 2103, 2109, 2115, 2116], "env": [3, 28, 35, 40, 45, 47, 48, 49, 51, 64, 1064, 1187, 1188, 2047, 2058, 2063, 2064, 2075, 2102, 2111], "num_thread": [3, 2057], "languag": [3, 14, 40, 52, 1284, 1430, 1570, 1631, 2050, 2062, 2099], "measur": [3, 35, 44, 1010, 1056, 1058, 1304, 1385, 1438, 1439, 1459, 1485, 1492, 1517, 1518, 1575, 1576, 1604, 1664, 1810, 1981, 2012, 2028, 2045, 2054, 2055, 2067, 2080, 2111], "statement": [3, 12, 35, 53, 60, 64, 66, 71, 74, 75, 989, 1273, 1288, 2017, 2042, 2048, 2057, 2060, 2062, 2065, 2068, 2073, 2075, 2097, 2099], "full": [3, 7, 8, 14, 15, 23, 24, 28, 30, 32, 33, 35, 47, 51, 52, 55, 56, 60, 64, 488, 515, 903, 907, 908, 909, 944, 953, 975, 1136, 1137, 1138, 1142, 1143, 1145, 1164, 1176, 1177, 1230, 1231, 1276, 1310, 1311, 1318, 1319, 1320, 1331, 1336, 1337, 1345, 1362, 1439, 1445, 1479, 1540, 1570, 1607, 1608, 1609, 1616, 1629, 1676, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1874, 1927, 1928, 1976, 2011, 2013, 2014, 2015, 2016, 2018, 2034, 2041, 2045, 2047, 2048, 2051, 2052, 2055, 2058, 2059, 2063, 2066, 2068, 2070, 2073, 2076, 2084, 2087, 2100, 2102, 2105, 2106, 2111], "org": [3, 4, 7, 9, 10, 11, 12, 15, 24, 33, 35, 48, 55, 682, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 975, 989, 1345, 1496, 1546, 1573, 1576, 1732, 1768, 1816, 1870, 1890, 1967, 2011, 2017, 2020, 2042, 2043, 2055, 2059, 2061, 2065, 2068, 2085, 2086, 2113, 2116], "timeit": [3, 2044], "sever": [3, 12, 15, 23, 28, 35, 55, 64, 740, 741, 742, 743, 744, 745, 768, 769, 774, 775, 776, 784, 785, 918, 919, 1053, 1112, 1207, 1208, 1272, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1435, 1436, 1437, 1438, 1453, 1454, 1455, 1456, 1457, 1458, 1473, 1474, 1493, 1494, 1495, 1514, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1580, 1581, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1607, 1608, 1609, 1610, 1611, 1612, 1627, 1628, 1650, 1653, 1654, 1655, 1657, 1658, 1659, 1716, 1733, 1734, 1736, 1737, 1769, 1777, 1831, 1832, 1959, 1964, 2013, 2041, 2042, 2044, 2045, 2054, 2055, 2057, 2065, 2067, 2070, 2075, 2099, 2100, 2102, 2114], "awar": [3, 7, 55, 86, 488, 714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 735, 840, 841, 858, 865, 866, 1574, 1716, 1931, 1932, 1934, 1935, 1936, 2034, 2042, 2045, 2068, 2073, 2076, 2080, 2108], "element": [3, 12, 23, 28, 35, 53, 60, 61, 64, 66, 72, 74, 75, 99, 121, 152, 156, 198, 218, 244, 256, 260, 288, 315, 317, 319, 321, 323, 354, 400, 402, 404, 437, 456, 473, 475, 476, 499, 501, 515, 517, 519, 522, 547, 548, 560, 562, 611, 612, 619, 683, 685, 686, 689, 690, 695, 696, 700, 701, 737, 760, 762, 772, 773, 774, 775, 776, 782, 786, 795, 877, 879, 880, 881, 884, 885, 886, 887, 888, 896, 911, 912, 914, 915, 916, 922, 923, 945, 954, 957, 964, 970, 974, 989, 991, 993, 994, 995, 1050, 1087, 1088, 1089, 1090, 1091, 1092, 1095, 1097, 1098, 1099, 1103, 1105, 1108, 1111, 1113, 1114, 1118, 1138, 1142, 1143, 1145, 1147, 1152, 1154, 1155, 1157, 1160, 1162, 1167, 1168, 1170, 1171, 1172, 1176, 1177, 1213, 1214, 1215, 1216, 1226, 1229, 1232, 1233, 1234, 1235, 1249, 1257, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1292, 1294, 1295, 1297, 1314, 1316, 1320, 1331, 1335, 1346, 1347, 1349, 1352, 1354, 1355, 1356, 1357, 1361, 1362, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1395, 1412, 1417, 1418, 1420, 1421, 1422, 1423, 1424, 1434, 1435, 1438, 1439, 1440, 1441, 1442, 1444, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1472, 1477, 1478, 1481, 1482, 1483, 1484, 1485, 1486, 1491, 1492, 1496, 1498, 1502, 1503, 1504, 1505, 1506, 1507, 1512, 1515, 1517, 1518, 1519, 1520, 1521, 1525, 1527, 1529, 1530, 1531, 1532, 1533, 1534, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1554, 1556, 1557, 1558, 1559, 1560, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1575, 1576, 1578, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1617, 1621, 1624, 1625, 1630, 1631, 1635, 1636, 1637, 1638, 1641, 1644, 1645, 1647, 1652, 1657, 1658, 1659, 1663, 1664, 1668, 1669, 1671, 1674, 1675, 1676, 1677, 1678, 1679, 1684, 1685, 1686, 1687, 1688, 1690, 1692, 1694, 1695, 1696, 1697, 1702, 1715, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1750, 1751, 1757, 1758, 1760, 1761, 1770, 1771, 1772, 1774, 1779, 1819, 1820, 1823, 1824, 1831, 1832, 1834, 1845, 1848, 1849, 1853, 1855, 1857, 1874, 1877, 1879, 1891, 1892, 1894, 1895, 1899, 1906, 1909, 1910, 1911, 1912, 1913, 1914, 1916, 1917, 1921, 1922, 1926, 1938, 1940, 1941, 1945, 1946, 1947, 1949, 1951, 1952, 1953, 1954, 1955, 1957, 1959, 1960, 1961, 1962, 1971, 1972, 1973, 1976, 1979, 2014, 2016, 2020, 2023, 2035, 2040, 2043, 2044, 2048, 2052, 2058, 2060, 2065, 2067, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2099, 2101, 2104, 2112], "lazili": [3, 17, 30, 1010, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1706, 1981, 2013, 2048, 2109, 2118], "threadpool": 3, "comparison": [3, 24, 28, 52, 64, 1272, 1288, 1289, 1526, 1779, 2017, 2048, 2087, 2091, 2100], "appl": [3, 2030], "synchron": [3, 4, 17, 18, 20, 24, 30, 32, 37, 47, 55, 63, 488, 880, 1010, 1011, 1013, 1021, 1041, 1082, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1313, 1314, 1315, 1316, 1320, 1321, 1327, 1330, 1333, 1334, 1336, 1337, 1345, 1385, 1566, 1716, 1770, 1772, 1848, 1981, 1982, 2012, 2044, 2045, 2047, 2053, 2057, 2075, 2082, 2102, 2114], "focu": [3, 53, 1810], "replic": [3, 23, 30, 34, 55, 88, 1226, 1283, 1453, 1454, 1455, 1462, 1472, 1502, 1503, 1504, 1551, 1552, 1553, 1578, 1671, 1699], "particularli": [3, 23, 24, 50, 1461, 1462, 1533, 2013, 2045, 2098, 2099, 2109], "variat": [3, 35, 2016, 2048, 2067, 2071, 2111], "confound": 3, "quantifi": [3, 1576], "nois": [3, 2014, 2059, 2111], "median": [3, 35, 175, 1418, 1964, 2014, 2033, 2066], "robust": [3, 1345, 2032, 2055], "deviat": [3, 35, 55, 379, 1434, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1566, 1772, 1884, 1889, 1921, 1922, 2040, 2102], "merg": [3, 6, 7, 9, 23, 28, 30, 33, 48, 1527, 1532, 1536], "repeat": [3, 35, 52, 60, 496, 997, 1108, 1197, 1336, 1362, 1522, 1523, 1524, 1604, 1605, 1816, 1848, 1907, 1927, 1928, 1945, 2014, 2016, 2045, 2066, 2069, 2072, 2080, 2106], "autorang": 3, "exact": [3, 14, 23, 37, 50, 53, 87, 341, 743, 744, 745, 797, 877, 878, 879, 922, 923, 975, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1373, 1457, 1496, 1574, 1643, 1716, 1811, 1900, 2040, 2045, 2047, 2057, 2080, 2101, 2102, 2108, 2111, 2112], "discuss": [3, 5, 8, 9, 10, 35, 52, 64, 1486, 1643, 2023, 2042, 2048, 2051, 2055, 2059, 2075, 2077, 2080, 2099, 2100], "docstr": [3, 14, 64, 864, 865, 1272, 1526, 2011, 2045], "adapt": [3, 52, 768, 769, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1463, 1591, 1592, 1593, 1594, 1595, 1596, 1781, 1782, 1793, 1795, 1802, 2045, 2063, 2098], "field": [3, 7, 19, 28, 30, 32, 37, 40, 44, 45, 53, 55, 64, 490, 828, 896, 897, 940, 1270, 1272, 1430, 1438, 1439, 1459, 1461, 1485, 1491, 1492, 1517, 1518, 1526, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1597, 1604, 1605, 1615, 1632, 1644, 1668, 1676, 1757, 1791, 1797, 2029, 2042, 2047, 2057, 2065, 2075, 2076, 2085, 2099, 2110, 2111], "displai": [3, 18, 1052, 1065, 1187, 1643, 1703, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2011, 2017, 2022, 2027, 2065, 2085, 2087, 2093, 2102, 2111, 2114], "instruct": [3, 4, 11, 14, 50, 63, 682, 1870, 2013, 2016, 2045, 2058, 2065, 2070, 2087, 2095, 2099, 2101, 2111], "count": [3, 20, 23, 35, 44, 52, 64, 897, 931, 940, 946, 996, 1046, 1162, 1234, 1235, 1270, 1310, 1311, 1757, 1810, 1960, 1961, 2014, 2029, 2045, 2051, 2066, 2069, 2075, 2077, 2079, 2080, 2102, 2111], "wall": [3, 2105], "callgrind": 3, "analog": [3, 52, 64, 488, 696, 918, 1091, 1129, 1131, 1322, 1496, 1796, 1927, 1945, 2035, 2055, 2098], "constructor": [3, 14, 23, 24, 32, 47, 55, 64, 66, 75, 826, 864, 865, 1430, 1531, 1536, 1537, 1555, 1580, 1581, 1716, 1764, 1902, 2012, 2013, 2016, 2017, 2045, 2047, 2055, 2075, 2080, 2082, 2083, 2086, 2116], "snippet": [3, 48, 2011, 2055, 2060, 2093, 2104], "loop": [3, 19, 24, 30, 33, 50, 52, 53, 56, 60, 61, 64, 66, 71, 865, 866, 917, 1053, 1056, 1058, 1171, 1177, 1288, 1716, 1734, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1976, 2013, 2014, 2016, 2029, 2044, 2045, 2050, 2052, 2055, 2058, 2062, 2065, 2067, 2069, 2070, 2085, 2096, 2099, 2101, 2110], "callabl": [3, 5, 12, 23, 24, 28, 32, 33, 35, 37, 40, 45, 50, 52, 53, 55, 63, 64, 121, 400, 844, 975, 977, 989, 1050, 1051, 1053, 1166, 1167, 1168, 1173, 1176, 1177, 1272, 1275, 1278, 1284, 1288, 1344, 1345, 1526, 1570, 1572, 1574, 1576, 1709, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 1804, 1807, 1976, 2011, 2013, 2015, 2016, 2017, 2020, 2029, 2045, 2048, 2055, 2060, 2063, 2065, 2068, 2069, 2075, 2087, 2090, 2091, 2097, 2100, 2103, 2110, 2112], "present": [3, 9, 23, 28, 30, 47, 53, 55, 897, 940, 942, 1221, 1270, 1272, 1362, 1430, 1496, 1526, 1536, 1757, 1777, 1923, 2011, 2027, 2032, 2033, 2034, 2041, 2042, 2045, 2048, 2052, 2055, 2060, 2065, 2068, 2076, 2079, 2080, 2083, 2099, 2103, 2107, 2113], "default_tim": 3, "dict": [3, 5, 12, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 41, 45, 50, 52, 53, 55, 57, 59, 62, 64, 417, 682, 794, 795, 796, 817, 818, 819, 838, 857, 924, 931, 942, 975, 989, 1050, 1051, 1064, 1074, 1165, 1175, 1177, 1185, 1270, 1271, 1272, 1273, 1279, 1284, 1288, 1289, 1344, 1345, 1462, 1472, 1526, 1527, 1536, 1578, 1706, 1716, 1747, 1766, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1976, 1990, 2011, 2013, 2014, 2016, 2017, 2020, 2027, 2029, 2048, 2055, 2060, 2063, 2065, 2066, 2067, 2075, 2076, 2085, 2090, 2091, 2100, 2107, 2110, 2112], "summar": [3, 4, 48, 1874, 2016, 2035, 2051, 2080, 2099, 2100, 2111], "relu": [3, 24, 33, 52, 60, 61, 64, 702, 703, 707, 708, 709, 710, 711, 712, 713, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 730, 731, 767, 794, 795, 816, 857, 863, 1050, 1167, 1177, 1278, 1284, 1526, 1542, 1544, 1555, 1563, 1570, 1572, 1574, 1680, 1682, 1706, 1778, 1976, 2013, 2014, 2026, 2034, 2035, 2040, 2042, 2055, 2060, 2065, 2066, 2070, 2072, 2073, 2093, 2097, 2104, 2106, 2109, 2110, 2111], "readabl": [3, 13, 23, 45, 52, 64, 1052, 1065, 1942, 2065, 2067, 2100, 2111, 2112], "supplement": 3, "disambigu": [3, 45, 64, 1350, 2111], "ident": [3, 14, 23, 28, 30, 35, 64, 790, 816, 922, 923, 1096, 1200, 1310, 1311, 1312, 1313, 1326, 1338, 1341, 1345, 1362, 1417, 1418, 1434, 1463, 1673, 1723, 1724, 1730, 1733, 1969, 1973, 1979, 2017, 2040, 2058, 2059, 2068, 2072, 2080, 2101, 2111], "easi": [3, 23, 28, 30, 33, 47, 52, 1183, 2013, 2042, 2050, 2054, 2055, 2057, 2068, 2070, 2075, 2076, 2080, 2097, 2100, 2102, 2104], "differenti": [3, 35, 56, 61, 152, 354, 591, 804, 892, 894, 896, 901, 902, 903, 905, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 972, 1053, 1170, 1171, 1172, 1176, 1287, 1319, 1320, 1331, 1362, 1444, 1445, 1468, 1634, 1669, 1716, 1736, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1964, 2012, 2023, 2048, 2050, 2052, 2065, 2067, 2075, 2086, 2102], "distinguish": [3, 1186, 2070, 2080], "princip": [3, 1319, 1816], "signal": [3, 11, 30, 37, 47, 50, 740, 741, 742, 768, 769, 784, 785, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1269, 1335, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1453, 1454, 1455, 1473, 1474, 1493, 1494, 1495, 1514, 1519, 1520, 1521, 1580, 1581, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1607, 1610, 1627, 1628, 1650, 1653, 1654, 1655, 1657, 1658, 1659, 1923, 2012, 2032, 2057, 2069, 2075], "form": [3, 7, 9, 11, 23, 28, 33, 35, 46, 47, 48, 52, 53, 55, 60, 64, 762, 781, 787, 966, 1096, 1136, 1138, 1180, 1183, 1272, 1312, 1315, 1320, 1331, 1336, 1428, 1429, 1432, 1433, 1445, 1473, 1474, 1477, 1496, 1526, 1542, 1578, 1579, 1616, 1627, 1628, 1631, 1643, 1671, 1684, 1703, 1736, 1796, 1814, 1944, 2011, 2013, 2017, 2035, 2042, 2048, 2055, 2063, 2065, 2068, 2070, 2080, 2085, 2099, 2100, 2104, 2110], "treat": [3, 35, 46, 52, 58, 64, 66, 71, 72, 323, 473, 795, 922, 923, 977, 1050, 1165, 1201, 1272, 1325, 1328, 1330, 1342, 1344, 1354, 1355, 1356, 1357, 1374, 1420, 1469, 1479, 1498, 1530, 1531, 1532, 1533, 1536, 1541, 1555, 1560, 1624, 1715, 1716, 1766, 1770, 1802, 1855, 1870, 1923, 1938, 1939, 1945, 1960, 2015, 2016, 2033, 2035, 2042, 2063, 2065, 2075, 2080, 2083, 2091, 2099, 2102, 2103, 2107, 2110], "distinct": [3, 11, 82, 1165, 1308, 1309, 1353, 1439, 2016, 2048, 2060, 2065, 2067, 2075, 2076, 2098], "workload": [3, 8, 19, 23, 28, 55, 1053, 2045, 2051, 2054, 2058, 2069, 2075, 2095, 2109], "good": [3, 7, 8, 14, 64, 975, 1169, 1194, 1534, 1810, 1890, 2011, 2020, 2032, 2045, 2048, 2051, 2054, 2055, 2057, 2065, 2068, 2070, 2098, 2099, 2100, 2101, 2108], "intrins": [3, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 794, 795, 2071, 2072], "contrast": [3, 35, 37, 52, 762, 1183, 1477, 1796, 2040, 2042, 2049, 2051, 2080, 2106], "adaptive_autorang": 3, "threshold": [3, 24, 64, 1327, 1330, 1486, 1558, 1563, 1692, 1698, 1810, 1874, 2014, 2041, 2045, 2066, 2085], "min_run_tim": 3, "01": [3, 29, 32, 50, 490, 757, 782, 823, 824, 1109, 1151, 1512, 1647, 1648, 1706, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1802, 1808, 1828, 1881, 1883, 1884, 1889, 1890, 2014, 2040, 2054, 2065, 2067, 2106], "max_run_tim": 3, "callback": [3, 24, 37, 50, 63, 64, 1716, 2029, 2048, 2054, 2069, 2075], "similar": [3, 7, 9, 11, 20, 23, 28, 35, 44, 53, 55, 60, 63, 64, 495, 517, 714, 715, 716, 717, 718, 719, 722, 732, 733, 734, 735, 747, 748, 758, 766, 804, 880, 895, 908, 909, 956, 961, 974, 975, 1154, 1155, 1272, 1459, 1460, 1472, 1485, 1488, 1489, 1490, 1526, 1575, 1614, 1757, 1771, 1772, 1797, 1820, 1848, 1855, 1904, 1939, 1945, 1961, 2015, 2016, 2032, 2033, 2035, 2042, 2044, 2045, 2048, 2049, 2052, 2058, 2065, 2068, 2070, 2075, 2076, 2080, 2081, 2086, 2098, 2101, 2102, 2104, 2107, 2111, 2116], "blocked_autorang": 3, "variablil": 3, "until": [3, 5, 7, 23, 24, 28, 30, 32, 37, 47, 55, 63, 64, 488, 1010, 1011, 1013, 1044, 1162, 1293, 1345, 1385, 1388, 1389, 1632, 1706, 1716, 1799, 1801, 1805, 1945, 1981, 1982, 1995, 2028, 2032, 2040, 2045, 2050, 2051, 2060, 2063, 2065, 2069, 2075, 2077, 2082, 2099, 2102], "iqr": 3, "smaller": [3, 19, 23, 64, 501, 547, 997, 1496, 1716, 1810, 1825, 1870, 1915, 2045, 2060, 2068, 2097, 2113, 2116], "reach": [3, 7, 8, 9, 23, 24, 28, 29, 37, 47, 52, 56, 1345, 1716, 1799, 1802, 1805, 1806, 2042, 2048, 2057, 2071, 2075, 2100], "At": [3, 5, 6, 7, 15, 17, 19, 23, 33, 1142, 1440, 1441, 1442, 1453, 1454, 1455, 1456, 1457, 1458, 1493, 1494, 1495, 1965, 1969, 2026, 2035, 2044, 2052, 2070, 2075, 2081, 2099, 2100, 2102, 2111], "high": [3, 4, 7, 8, 9, 10, 15, 24, 28, 35, 37, 44, 46, 48, 64, 121, 1445, 1573, 1837, 1838, 1870, 1969, 2014, 2024, 2029, 2032, 2048, 2052, 2053, 2055, 2056, 2057, 2067, 2070, 2071, 2075, 2080, 2085, 2086, 2087, 2091, 2095, 2096, 2099, 2106, 2111, 2114, 2115], "pseudo": [3, 90], "block_siz": 3, "enough_data": 3, "len": [3, 23, 30, 64, 66, 71, 220, 547, 695, 697, 698, 701, 1127, 1131, 1134, 1138, 1141, 1145, 1176, 1276, 1339, 1360, 1372, 1417, 1420, 1462, 1671, 1742, 1744, 1760, 1801, 1808, 1912, 1915, 1921, 1922, 1926, 1970, 1971, 1972, 2013, 2014, 2016, 2048, 2066, 2079, 2080, 2085, 2099, 2110], "small_iqr": 3, "break": [3, 7, 28, 35, 58, 64, 682, 896, 975, 977, 1187, 1272, 1318, 1526, 1855, 1874, 2012, 2017, 2022, 2024, 2047, 2067, 2080, 2089, 2093, 2096, 2098, 2100, 2103, 2105, 2107], "stop": [3, 5, 28, 35, 37, 47, 48, 50, 53, 868, 897, 940, 1046, 1187, 1270, 1345, 1445, 1757, 1810, 1842, 2013, 2016, 2069, 2075, 2079, 2099, 2103], "repetit": [3, 1848, 1945], "statist": [3, 24, 28, 35, 801, 802, 822, 823, 824, 826, 827, 1019, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1085, 1086, 1350, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1566, 2029, 2042, 2050, 2067, 2070, 2073, 2102, 2111], "minimum": [3, 14, 19, 24, 47, 48, 52, 698, 699, 773, 797, 822, 823, 824, 827, 828, 878, 946, 1088, 1155, 1233, 1234, 1235, 1375, 1445, 1484, 1576, 1800, 1801, 1808, 1890, 1909, 1910, 1911, 1912, 1913, 1914, 2014, 2040, 2042, 2052, 2066, 2073, 2083, 2106], "total_tim": 3, "choic": [3, 8, 9, 28, 1288, 1336, 1527, 1536, 1833, 2044, 2065, 2073, 2080, 2098, 2099, 2111], "block": [3, 7, 8, 20, 23, 28, 30, 32, 34, 47, 50, 55, 63, 64, 488, 585, 586, 587, 938, 954, 1010, 1064, 1293, 1345, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1502, 1503, 1504, 1505, 1506, 1507, 1529, 1578, 1626, 1702, 1716, 1909, 1910, 1911, 2013, 2015, 2016, 2029, 2032, 2036, 2042, 2045, 2047, 2051, 2052, 2065, 2070, 2075, 2080, 2092, 2109, 2113, 2115], "qualiti": [3, 7, 24], "balanc": [3, 975], "compet": [3, 2057], "amort": 3, "invoc": [3, 5, 64, 844, 975, 980, 1185, 1275, 1288, 2013, 2016, 2045, 2048, 2054, 2065, 2075, 2077, 2096, 2109], "less": [3, 6, 7, 14, 23, 24, 28, 35, 50, 52, 55, 366, 682, 911, 913, 922, 923, 997, 1060, 1078, 1108, 1152, 1156, 1165, 1297, 1309, 1345, 1361, 1362, 1430, 1486, 1540, 1549, 1558, 1564, 1760, 1846, 1953, 1955, 2011, 2014, 2016, 2035, 2045, 2048, 2051, 2057, 2058, 2066, 2070, 2099, 2102, 2109], "bias": [3, 24, 736, 762, 796, 1440, 1441, 1442, 1477, 1478, 1480, 1488, 1489, 1490, 1496, 1497, 1498, 1541, 1542, 1544, 1566, 1586, 1587, 1715, 2067], "trivial": [3, 37, 40, 737, 965, 1180, 1187, 1730, 1912, 2065, 2077, 2099, 2101], "low": [3, 7, 18, 24, 35, 55, 501, 914, 1197, 1216, 1445, 1816, 1833, 1837, 1838, 1855, 1928, 2012, 2014, 2029, 2032, 2045, 2057, 2069, 2087, 2099, 2105, 2106, 2112], "digit": [3, 959, 1874, 2011, 2027, 2054, 2058], "microsecond": [3, 2045], "bia": [3, 9, 28, 33, 52, 66, 714, 715, 716, 717, 718, 719, 720, 721, 722, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 740, 741, 742, 743, 744, 745, 751, 753, 754, 755, 756, 758, 762, 763, 766, 767, 774, 775, 776, 783, 794, 1175, 1272, 1282, 1430, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1477, 1478, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1526, 1532, 1542, 1543, 1544, 1570, 1572, 1574, 1587, 1602, 1603, 1607, 1608, 1609, 1610, 1611, 1612, 1633, 1642, 1646, 1649, 1684, 1706, 1716, 1726, 1728, 1730, 1731, 1732, 1746, 1748, 1750, 1765, 1768, 1830, 2014, 2020, 2024, 2026, 2028, 2039, 2048, 2055, 2060, 2063, 2067, 2070, 2072, 2080, 2085, 2106], "period": [3, 9, 32, 47, 944, 953, 1019, 1052, 1065, 1066, 1071, 1085, 1086, 1128, 1230, 1231, 1292, 1812, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2014, 2029, 2057, 2115], "overal": [3, 9, 23, 33, 37, 47, 922, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1573, 2042, 2044, 2045, 2057, 2070, 2071, 2105], "main": [3, 7, 23, 24, 28, 29, 30, 35, 37, 38, 39, 40, 45, 47, 48, 50, 51, 52, 64, 262, 498, 975, 1095, 1096, 1097, 1098, 1099, 1335, 1634, 1907, 1952, 1953, 1954, 1955, 2011, 2013, 2020, 2029, 2032, 2034, 2035, 2041, 2042, 2045, 2047, 2048, 2049, 2051, 2055, 2057, 2061, 2062, 2063, 2070, 2075, 2076, 2085, 2086, 2092, 2093, 2097, 2099, 2102, 2110], "collect_callgrind": 3, "collect_baselin": 3, "retain_out_fil": 3, "callgrindstat": [3, 2012], "tupl": [3, 5, 12, 14, 19, 23, 28, 30, 33, 34, 37, 44, 45, 47, 52, 53, 55, 64, 235, 321, 447, 449, 451, 499, 522, 539, 562, 585, 586, 587, 695, 697, 698, 699, 701, 737, 768, 769, 770, 771, 774, 775, 776, 781, 787, 788, 789, 795, 819, 820, 857, 864, 865, 881, 882, 889, 890, 891, 892, 893, 895, 908, 909, 911, 912, 913, 914, 915, 916, 917, 922, 923, 926, 927, 928, 942, 958, 977, 984, 989, 996, 1020, 1021, 1024, 1035, 1053, 1059, 1087, 1088, 1106, 1109, 1111, 1125, 1127, 1128, 1130, 1131, 1133, 1134, 1135, 1137, 1138, 1140, 1141, 1143, 1145, 1148, 1158, 1163, 1165, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1175, 1176, 1177, 1185, 1187, 1196, 1212, 1216, 1234, 1236, 1272, 1279, 1284, 1288, 1289, 1294, 1303, 1308, 1309, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1325, 1329, 1331, 1332, 1334, 1336, 1339, 1342, 1360, 1362, 1364, 1370, 1372, 1374, 1375, 1378, 1380, 1417, 1420, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1462, 1472, 1473, 1474, 1493, 1494, 1495, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1527, 1532, 1536, 1548, 1549, 1550, 1551, 1552, 1553, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1592, 1593, 1595, 1596, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1627, 1628, 1643, 1657, 1658, 1659, 1669, 1671, 1703, 1704, 1705, 1710, 1716, 1726, 1728, 1736, 1747, 1760, 1766, 1770, 1771, 1775, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1816, 1817, 1826, 1835, 1837, 1839, 1849, 1853, 1854, 1899, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1918, 1921, 1922, 1926, 1927, 1928, 1942, 1943, 1944, 1945, 1946, 1951, 1958, 1959, 1960, 1961, 1962, 1971, 1972, 1976, 1977, 1979, 2009, 2013, 2014, 2017, 2020, 2032, 2035, 2042, 2045, 2048, 2049, 2055, 2060, 2063, 2065, 2067, 2068, 2070, 2075, 2079, 2080, 2085, 2087, 2091, 2099, 2102, 2110, 2112], "modulo": [3, 35, 1156, 1200, 1846], "determin": [3, 5, 8, 11, 14, 17, 18, 22, 23, 24, 28, 33, 35, 37, 45, 47, 55, 60, 64, 87, 88, 89, 825, 828, 911, 913, 922, 923, 944, 953, 960, 965, 1054, 1110, 1129, 1139, 1164, 1165, 1189, 1190, 1230, 1231, 1234, 1235, 1286, 1306, 1318, 1329, 1332, 1353, 1367, 1469, 1473, 1474, 1496, 1532, 1535, 1542, 1585, 1624, 1627, 1628, 1643, 1677, 1703, 1735, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1766, 1776, 1808, 1836, 1838, 1840, 1867, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1999, 2010, 2016, 2017, 2034, 2045, 2047, 2048, 2051, 2055, 2063, 2065, 2068, 2070, 2075, 2077, 2080, 2082, 2083, 2085, 2087, 2098, 2101, 2102, 2110, 2111, 2118], "itself": [3, 5, 7, 8, 19, 28, 29, 52, 55, 61, 64, 490, 695, 701, 882, 997, 1170, 1171, 1272, 1284, 1288, 1526, 1560, 1690, 1716, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1901, 2013, 2032, 2048, 2055, 2057, 2059, 2062, 2065, 2068, 2075, 2080, 2084, 2097, 2099, 2101, 2111], "jitter": 3, "interpret": [3, 23, 28, 32, 35, 37, 47, 50, 52, 53, 781, 787, 883, 917, 1063, 1129, 1130, 1131, 1136, 1138, 1139, 1140, 1141, 1162, 1185, 1223, 1235, 1272, 1277, 1284, 1288, 1325, 1345, 1465, 1632, 1643, 1644, 1703, 1867, 1872, 2011, 2015, 2016, 2020, 2032, 2044, 2045, 2065, 2080, 2082, 2084, 2099, 2100], "ideal": [3, 46, 48, 1288, 1797, 2034, 2102], "analysi": [3, 24, 35, 52, 64, 1292, 1816, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2017, 2024, 2055, 2062, 2063, 2091, 2097, 2099, 2100, 2101, 2105], "valgrind": 3, "degrad": [3, 14, 55, 2044, 2047, 2048, 2080], "due": [3, 4, 5, 7, 24, 28, 30, 35, 46, 55, 60, 64, 66, 68, 86, 488, 883, 911, 912, 917, 993, 1285, 1308, 1309, 1336, 1362, 1465, 1479, 1570, 1579, 1643, 1684, 1764, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1928, 2013, 2045, 2052, 2053, 2059, 2063, 2065, 2070, 2071, 2075, 2077, 2102, 2107, 2111, 2113, 2115], "amelior": 3, "suffici": [3, 14, 24, 28, 35, 39, 47, 48, 53, 55, 488, 1783, 1784, 1796, 1928, 2011, 2020, 2080, 2083, 2102], "callgrind_control": 3, "callgrind_annot": 3, "boundari": [3, 33, 64, 781, 787, 959, 1226, 1447, 1448, 1449, 1450, 1451, 1452, 1548, 1549, 1550, 1551, 1552, 1553, 1579, 1582, 1583, 1584, 1643, 1703, 1802, 1808, 2014, 2047, 2068, 2075, 2102], "caller": [3, 37, 47, 64, 857, 1272, 1526, 1574, 1912, 2042, 2045, 2075, 2077], "structur": [3, 5, 9, 12, 23, 24, 28, 33, 36, 37, 40, 48, 52, 53, 55, 64, 66, 67, 71, 75, 895, 908, 909, 941, 975, 989, 1159, 1172, 1177, 1185, 1192, 1193, 1288, 1526, 1586, 1743, 1747, 1779, 1816, 1928, 1976, 2012, 2015, 2017, 2023, 2029, 2035, 2045, 2047, 2048, 2049, 2051, 2057, 2060, 2061, 2063, 2065, 2067, 2068, 2075, 2085, 2086, 2087, 2089, 2091, 2093, 2098, 2099, 2100, 2102, 2103, 2107, 2113], "restrict": [3, 8, 12, 23, 35, 53, 55, 60, 61, 977, 989, 1344, 1367, 1461, 2015, 2016, 2017, 2034, 2035, 2042, 2045, 2052, 2070, 2098], "builtin": [3, 28, 64, 66, 75, 1285, 1344, 1842, 1934, 2013, 2015, 2017, 2075, 2077, 2099, 2102, 2111], "surpris": [3, 8, 55, 2011, 2052, 2058, 2101], "serial": [3, 15, 23, 28, 30, 32, 47, 1272, 1282, 1283, 1344, 1526, 1706, 1858, 2011, 2012, 2014, 2027, 2034, 2042, 2045, 2051, 2054, 2055, 2057, 2063, 2068, 2070, 2075, 2076], "subsequ": [3, 7, 14, 15, 17, 28, 52, 55, 64, 975, 1011, 1272, 1284, 1288, 1453, 1454, 1455, 1456, 1457, 1458, 1526, 1555, 2045, 2059, 2065, 2075, 2080, 2093, 2096, 2102], "deseri": [3, 30, 1344, 1706, 2011, 2027, 2060, 2075], "globalsbridg": 3, "care": [3, 7, 14, 28, 35, 55, 63, 64, 1129, 1131, 1186, 1187, 1526, 2032, 2044, 2045, 2047, 2048, 2050, 2055, 2057, 2060, 2065, 2075, 2080, 2096, 2101, 2102, 2103], "reli": [3, 9, 14, 23, 24, 28, 33, 37, 40, 55, 64, 152, 896, 922, 1108, 1462, 1797, 2025, 2042, 2044, 2047, 2048, 2051, 2059, 2060, 2080, 2084, 2101, 2102, 2103, 2105], "pickl": [3, 23, 24, 28, 1272, 1344, 1526, 1858, 2011, 2027, 2032, 2060, 2068, 2075, 2113], "transfer": [3, 23, 28, 55, 2026, 2032, 2045, 2053, 2055, 2068, 2075, 2083], "properli": [3, 7, 23, 24, 30, 32, 37, 47, 63, 977, 1129, 1130, 1131, 1139, 1140, 1141, 1374, 1527, 1528, 1536, 1537, 1716, 1965, 2011, 2023, 2048, 2052, 2055, 2057, 2060, 2063, 2075, 2076, 2083], "profil": [3, 4, 17, 19, 44, 64, 488, 930, 931, 938, 975, 1286, 1709, 1710, 1711, 1712, 1874, 2012, 2051, 2066, 2075, 2092, 2102], "empti": [3, 27, 28, 30, 37, 45, 53, 55, 60, 64, 235, 323, 515, 517, 525, 539, 547, 819, 942, 945, 946, 956, 962, 1108, 1110, 1270, 1273, 1298, 1318, 1319, 1324, 1331, 1354, 1355, 1356, 1357, 1364, 1439, 1461, 1469, 1533, 1605, 1624, 1671, 1764, 1776, 1777, 1912, 1927, 1942, 2010, 2013, 2014, 2015, 2016, 2018, 2026, 2028, 2033, 2034, 2035, 2040, 2043, 2045, 2048, 2059, 2065, 2066, 2068, 2080, 2081, 2086, 2089, 2098, 2099, 2101, 2106, 2109], "drive": [3, 9, 28, 2042], "facil": [3, 25, 1344, 2032], "analyz": [3, 4, 18, 52, 64, 2047, 2048, 2051, 2058, 2111], "manipul": [3, 19, 55, 63, 1276, 2012, 2041, 2050, 2055, 2073, 2110], "1000000": [3, 1780], "mirror": [3, 141, 1716], "semant": [3, 9, 11, 17, 28, 47, 48, 52, 53, 55, 62, 64, 152, 796, 877, 878, 879, 896, 917, 957, 1013, 1064, 1166, 1177, 1185, 1200, 1201, 1276, 1283, 1342, 1439, 1605, 1677, 1723, 1724, 1797, 1961, 1976, 2012, 2015, 2016, 2020, 2023, 2035, 2049, 2055, 2065, 2075, 2080, 2102], "number_per_run": 3, "raw_tim": 3, "task_spec": 3, "serializ": [3, 24, 2013], "consum": [3, 23, 30, 33, 36, 50, 53, 63, 437, 1108, 2032, 2045, 2051, 2057, 2062, 2063, 2065, 2085], "extrapol": 3, "sinc": [3, 7, 23, 24, 28, 30, 34, 35, 40, 50, 52, 55, 64, 317, 490, 536, 749, 787, 795, 810, 811, 965, 1056, 1058, 1060, 1108, 1148, 1149, 1150, 1165, 1269, 1272, 1277, 1283, 1286, 1362, 1438, 1468, 1522, 1523, 1524, 1526, 1579, 1587, 1597, 1632, 1634, 1703, 1706, 1709, 1747, 1766, 1771, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1923, 2013, 2014, 2015, 2016, 2020, 2022, 2026, 2029, 2035, 2041, 2042, 2045, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2060, 2061, 2062, 2065, 2066, 2068, 2069, 2070, 2074, 2075, 2076, 2077, 2080, 2081, 2083, 2084, 2086, 2087, 2096, 2099, 2102, 2107, 2110, 2111], "properti": [3, 23, 28, 29, 30, 32, 35, 39, 44, 47, 52, 53, 55, 64, 794, 883, 909, 926, 936, 1037, 1124, 1125, 1127, 1129, 1130, 1131, 1139, 1140, 1141, 1272, 1434, 1526, 1560, 1585, 1690, 1706, 1717, 1718, 1719, 1757, 1992, 2012, 2016, 2020, 2029, 2042, 2045, 2048, 2052, 2063, 2067, 2069, 2075, 2080, 2082, 2083, 2086, 2089, 2093, 2098, 2099, 2100, 2101, 2112, 2116], "significant_figur": 3, "figur": [3, 7, 8, 30, 64, 931, 2044, 2047, 2048, 2065, 2077, 2085, 2091, 2099, 2102, 2115], "intend": [3, 30, 47, 52, 64, 893, 903, 906, 908, 1015, 1187, 1292, 1597, 1709, 1710, 1711, 1712, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 2016, 2035, 2040, 2042, 2055, 2068], "interquartil": 3, "mitig": [3, 66, 2051, 2082], "tail": [3, 23, 45], "645": 3, "conjunct": [3, 23, 28, 32, 55, 826, 1597, 1632, 1716, 2073, 2075], "trim_sigfig": 3, "human": [3, 13, 33, 45, 52, 1052, 1065, 2040, 2065, 2100, 2112], "raw": [3, 64, 1162, 1331, 2045, 2068, 2069], "built_with_debug_symbol": 3, "baseline_inclusive_stat": 3, "baseline_exclusive_stat": 3, "stmt_inclusive_stat": 3, "stmt_exclusive_stat": 3, "stmt_callgrind_out": 3, "done": [3, 15, 23, 28, 30, 32, 33, 35, 37, 45, 53, 55, 63, 64, 488, 498, 682, 762, 922, 929, 1125, 1127, 1130, 1131, 1133, 1134, 1137, 1138, 1140, 1141, 1143, 1145, 1288, 1362, 1440, 1441, 1442, 1462, 1465, 1477, 1488, 1489, 1490, 1500, 1501, 1508, 1509, 1510, 1511, 1536, 1566, 1572, 1574, 1624, 1716, 1918, 2016, 2026, 2028, 2032, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2055, 2058, 2063, 2065, 2067, 2070, 2071, 2075, 2077, 2083, 2097, 2098, 2099, 2101, 2108, 2110], "functioncount": [3, 2012], "stat": [3, 58, 828, 838, 839, 1064, 1072, 1073, 1074, 1566, 1716, 2029, 2045, 2090, 2102, 2111], "as_standard": 3, "strip": [3, 1634, 1716, 2013, 2066, 2080], "prefix": [3, 28, 30, 37, 47, 55, 64, 750, 759, 1272, 1526, 1716, 2011, 2022, 2042, 2065, 2068, 2070, 2090, 2109], "stumbl": 3, "filepath": 3, "dif": 3, "compon": [3, 7, 9, 15, 18, 19, 28, 35, 64, 682, 897, 922, 977, 1128, 1129, 1131, 1139, 1140, 1141, 1312, 1570, 1571, 1572, 1573, 1574, 1706, 1816, 1923, 1974, 1975, 2017, 2022, 2042, 2047, 2048, 2054, 2055, 2063, 2080, 2097, 2103, 2111], "locat": [3, 9, 14, 28, 30, 33, 35, 44, 89, 156, 256, 488, 515, 682, 957, 959, 1022, 1087, 1088, 1294, 1344, 1370, 1375, 1378, 1412, 1462, 1472, 1561, 1578, 1597, 1632, 1699, 1702, 1716, 1736, 1827, 1862, 1883, 1905, 1908, 1964, 2011, 2013, 2027, 2045, 2060, 2065, 2068, 2069, 2075, 2077, 2080, 2085, 2090, 2093, 2096, 2101], "someth": [3, 7, 52, 53, 57, 64, 66, 68, 911, 913, 1108, 1187, 1197, 1272, 1288, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 2018, 2029, 2032, 2033, 2042, 2053, 2061, 2065, 2075, 2097, 2098, 2099, 2102, 2104, 2110, 2111, 2112], "resembl": [3, 15, 2093], "23234231": 3, "first_build_dir": 3, "foo": [3, 14, 28, 44, 45, 52, 63, 64, 66, 74, 75, 826, 857, 862, 975, 1165, 1166, 1175, 1270, 1275, 1280, 1283, 1284, 1287, 1288, 1766, 1965, 1967, 2011, 2013, 2015, 2016, 2020, 2024, 2048, 2055, 2065, 2068, 2087, 2096, 2099, 2102, 2111], "9823794": 3, "bar": [3, 7, 44, 52, 64, 66, 74, 75, 857, 862, 997, 1165, 1275, 1283, 1921, 1922, 1971, 1972, 2011, 2013, 2015, 2027, 2055, 2065, 2068, 2087], "53453": 3, "src": [3, 28, 64, 198, 230, 315, 323, 473, 514, 515, 516, 517, 518, 519, 521, 540, 1099, 1166, 1570, 1573, 1574, 1777, 1859, 1860, 1861, 1865, 1895, 1938, 1964, 2014, 2068, 2106], "function_that_actually_chang": 3, "second_build_dir": 3, "cancel": [3, 1269], "site": [3, 7, 2046, 2093], "denois": 3, "explan": [3, 9, 20, 25, 40, 865, 1272, 1526, 2012, 2047, 2048, 2055, 2102, 2108, 2111], "delta": [3, 35, 762, 997, 1477, 1486, 1496, 1558, 1641, 1781, 1921, 1922, 1949, 1971, 1972, 2014, 2040], "inclus": [3, 35, 52, 90, 515, 1191, 1197, 1233, 1235, 1343, 1359, 1365, 1588, 1589, 1837, 1838, 1920, 2074, 2087], "diff": [3, 7, 2013, 2014, 2066], "One": [3, 8, 14, 28, 30, 52, 53, 58, 60, 64, 585, 954, 977, 1166, 1177, 1263, 1579, 1670, 1723, 1724, 1730, 1734, 1797, 1802, 1810, 1837, 1838, 1959, 1976, 2013, 2015, 2016, 2034, 2035, 2043, 2044, 2045, 2048, 2054, 2069, 2077, 2085, 2097, 2098, 2100, 2101, 2109, 2115], "reason": [3, 8, 9, 23, 28, 30, 37, 52, 55, 60, 65, 83, 682, 844, 908, 909, 1129, 1131, 1272, 1288, 1289, 1308, 1309, 1336, 1373, 1438, 1465, 1526, 1634, 1684, 1736, 1858, 2013, 2015, 2016, 2020, 2022, 2042, 2047, 2049, 2051, 2058, 2060, 2065, 2075, 2083, 2097, 2098, 2099, 2101, 2102, 2105, 2111, 2112], "unit": [3, 11, 14, 35, 37, 48, 55, 64, 762, 763, 1126, 1144, 1434, 1444, 1467, 1470, 1475, 1476, 1477, 1478, 1545, 1546, 1556, 1597, 1621, 1625, 1630, 1631, 1678, 1687, 1740, 1741, 1745, 1748, 1750, 1753, 1839, 1951, 2030, 2042, 2044, 2051, 2052, 2068, 2095, 2096], "next": [3, 23, 28, 35, 47, 53, 55, 64, 562, 762, 1185, 1276, 1426, 1478, 1497, 1544, 1716, 1759, 2032, 2041, 2042, 2044, 2045, 2048, 2049, 2051, 2055, 2057, 2063, 2069, 2075, 2076, 2080, 2083, 2085, 2093, 2096, 2099, 2100, 2102], "logic": [3, 5, 12, 14, 23, 38, 51, 55, 64, 947, 949, 950, 952, 982, 983, 1125, 1127, 1130, 1131, 1133, 1134, 1137, 1138, 1140, 1141, 1143, 1145, 1166, 1207, 1208, 1276, 1354, 1355, 1356, 1357, 1367, 1532, 1716, 1825, 1852, 1936, 2016, 2017, 2045, 2047, 2048, 2049, 2052, 2070, 2099], "question": [3, 10, 23, 64, 1195, 1768, 2012, 2042, 2051, 2092, 2098, 2101], "involv": [3, 5, 7, 9, 11, 23, 28, 52, 55, 58, 60, 64, 82, 1183, 1716, 1870, 2016, 2033, 2035, 2042, 2045, 2047, 2050, 2055, 2065, 2070, 2075, 2076, 2077, 2080, 2098, 2101, 2109], "look": [3, 4, 7, 8, 9, 12, 15, 28, 35, 46, 47, 52, 53, 57, 60, 64, 66, 76, 77, 488, 865, 911, 913, 989, 1185, 1196, 1272, 1367, 1430, 1526, 1623, 1749, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1909, 1910, 1911, 1913, 1914, 2013, 2015, 2020, 2033, 2041, 2042, 2045, 2049, 2054, 2055, 2057, 2061, 2065, 2068, 2070, 2075, 2076, 2091, 2096, 2097, 2098, 2099, 2102, 2103, 2104, 2105, 2108, 2109, 2110, 2111, 2113], "autom": [3, 8, 64, 2013, 2070, 2093, 2102, 2111], "easili": [3, 7, 8, 11, 24, 28, 30, 33, 52, 1166, 1533, 1632, 1671, 1797, 1855, 1965, 2028, 2048, 2049, 2052, 2055, 2060, 2067, 2074, 2075, 2077, 2085, 2102, 2110, 2111], "exclus": [3, 23, 28, 35, 37, 47, 52, 55, 64, 737, 1235, 1716, 1837, 1838, 1841, 2042, 2087], "basi": [3, 9, 10, 35, 682, 1345, 1802, 2045, 2054, 2070, 2075], "thought": [3, 44, 64, 1126, 1128, 1144, 1191, 2099], "path_and_function_nam": 3, "children": [3, 33, 40, 55, 64, 793, 1272, 1526, 2032, 2051, 2055, 2068, 2077], "identifi": [3, 7, 9, 28, 30, 37, 41, 44, 47, 48, 50, 64, 488, 819, 1235, 1344, 1566, 2017, 2020, 2032, 2043, 2054, 2055, 2068, 2075, 2076, 2077, 2085, 2098, 2109], "hot": [3, 35, 1634, 1670, 2052, 2096], "spot": [3, 1723, 1724], "_data": 3, "truncate_row": 3, "_linewidth": 3, "subtract": [3, 315, 566, 868, 1162, 1634, 1909, 1910, 1911, 1913, 1914, 1924, 2014, 2035, 2066, 2080], "index": [3, 15, 19, 23, 24, 28, 30, 33, 35, 45, 52, 64, 193, 210, 283, 314, 315, 316, 317, 318, 319, 321, 323, 324, 473, 474, 514, 515, 516, 517, 518, 519, 520, 521, 697, 698, 819, 857, 880, 895, 897, 908, 909, 940, 959, 1026, 1029, 1087, 1088, 1128, 1169, 1170, 1171, 1175, 1177, 1213, 1244, 1245, 1246, 1247, 1270, 1289, 1294, 1302, 1320, 1338, 1352, 1360, 1362, 1366, 1370, 1373, 1374, 1375, 1378, 1400, 1403, 1412, 1418, 1421, 1422, 1430, 1445, 1461, 1468, 1469, 1527, 1528, 1533, 1536, 1537, 1576, 1578, 1623, 1624, 1670, 1742, 1744, 1751, 1752, 1757, 1770, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1811, 1812, 1827, 1859, 1860, 1861, 1862, 1864, 1865, 1866, 1895, 1906, 1907, 1909, 1910, 1911, 1913, 1914, 1923, 1938, 1959, 1960, 1962, 1963, 1964, 1967, 1976, 1984, 1986, 2012, 2013, 2014, 2016, 2023, 2032, 2033, 2034, 2035, 2042, 2045, 2047, 2049, 2053, 2059, 2066, 2077, 2079, 2080, 2083, 2084, 2085, 2086, 2087, 2101, 2102, 2106, 2111], "cpython": [3, 52, 64, 2092, 2100], "known": [3, 7, 9, 22, 27, 28, 32, 41, 47, 53, 56, 59, 61, 66, 76, 77, 1172, 1173, 1187, 1288, 1289, 1344, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1496, 1542, 1556, 1558, 1578, 1643, 1687, 1706, 1882, 1883, 1928, 1942, 2012, 2014, 2018, 2020, 2029, 2032, 2035, 2040, 2042, 2044, 2059, 2065, 2067, 2075, 2077, 2081, 2092, 2098, 2103, 2111], "quit": [3, 7, 64, 1723, 1724, 2016, 2048, 2050, 2068, 2075, 2109], "noisi": 3, "higher": [3, 7, 8, 24, 28, 55, 56, 61, 66, 71, 75, 152, 896, 898, 917, 1013, 1053, 1064, 1100, 1167, 1173, 1177, 1181, 1197, 1200, 1233, 1249, 1419, 1461, 1518, 1533, 1684, 1781, 1827, 1928, 1976, 1982, 2012, 2020, 2045, 2048, 2049, 2052, 2054, 2060, 2070, 2071, 2075, 2083, 2098], "filter": [3, 20, 546, 774, 775, 776, 1269, 1292, 1453, 1454, 1455, 1456, 1457, 1458, 1607, 1608, 1609, 1610, 1611, 1612, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1923, 2016, 2068, 2111], "transform": [3, 12, 23, 30, 33, 34, 52, 55, 60, 62, 81, 82, 83, 417, 783, 799, 840, 841, 860, 861, 966, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1165, 1166, 1167, 1171, 1176, 1194, 1269, 1282, 1443, 1480, 1488, 1489, 1490, 1498, 1513, 1555, 1571, 1572, 1573, 1574, 1597, 1603, 1632, 1649, 1669, 1706, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1890, 1923, 2012, 2024, 2049, 2051, 2063, 2070, 2080, 2085, 2091, 2092, 2093, 2100, 2104], "rather": [3, 8, 9, 14, 28, 37, 40, 50, 52, 55, 64, 66, 75, 781, 787, 935, 965, 1270, 1273, 1344, 1422, 1597, 1632, 1643, 1703, 1731, 1870, 1874, 2013, 2016, 2034, 2035, 2043, 2045, 2048, 2049, 2065, 2068, 2070, 2075, 2080, 2085, 2096, 2097, 2099, 2101, 2102, 2103, 2104], "unicod": [3, 2017], "dictionari": [3, 14, 23, 24, 30, 33, 35, 59, 64, 417, 682, 794, 795, 796, 799, 817, 818, 819, 840, 841, 842, 857, 861, 867, 942, 975, 1064, 1165, 1175, 1180, 1187, 1272, 1273, 1280, 1284, 1288, 1289, 1344, 1345, 1468, 1469, 1526, 1527, 1536, 1623, 1747, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1990, 2015, 2017, 2048, 2055, 2063, 2065, 2067, 2075, 2085, 2090, 2107, 2112, 2113], "lookup": [3, 30, 35, 47, 1468, 1623, 2013, 2017, 2044, 2076, 2105], "map": [3, 14, 28, 30, 35, 37, 44, 45, 47, 48, 52, 53, 55, 60, 61, 64, 66, 75, 82, 686, 737, 794, 795, 799, 801, 816, 817, 819, 841, 842, 857, 858, 859, 860, 861, 867, 888, 956, 1160, 1166, 1177, 1183, 1185, 1187, 1192, 1226, 1235, 1280, 1283, 1344, 1345, 1456, 1457, 1458, 1464, 1465, 1466, 1470, 1472, 1522, 1523, 1524, 1527, 1536, 1618, 1619, 1620, 1625, 1730, 1747, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1827, 1829, 1960, 1961, 1976, 2017, 2028, 2034, 2036, 2042, 2045, 2047, 2048, 2049, 2056, 2061, 2063, 2068, 2070, 2073, 2075, 2076, 2077, 2082, 2087, 2100, 2101, 2110, 2112], "agnost": [3, 16, 47, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1632, 1723, 1724, 2020, 2034], "reliabl": 3, "warrant": 3, "except": [3, 5, 7, 9, 14, 28, 29, 30, 35, 37, 39, 40, 44, 48, 52, 53, 55, 63, 64, 585, 589, 590, 619, 686, 695, 697, 698, 701, 762, 888, 921, 922, 923, 962, 969, 973, 1012, 1022, 1024, 1052, 1065, 1100, 1106, 1108, 1154, 1155, 1175, 1177, 1189, 1236, 1272, 1280, 1288, 1290, 1294, 1328, 1344, 1360, 1370, 1372, 1373, 1375, 1378, 1396, 1417, 1420, 1422, 1477, 1496, 1526, 1542, 1579, 1670, 1709, 1716, 1731, 1757, 1764, 1765, 1769, 1771, 1824, 1848, 1864, 1867, 1900, 1912, 1919, 1921, 1922, 1926, 1949, 1971, 1972, 1976, 1977, 2012, 2013, 2015, 2016, 2017, 2018, 2020, 2023, 2032, 2035, 2045, 2048, 2051, 2055, 2058, 2061, 2063, 2068, 2075, 2077, 2080, 2082, 2083, 2087, 2097, 2099, 2101, 2109, 2110, 2113, 2115], "filter_fn": 3, "map_fn": 3, "coalesc": [3, 325, 330, 546, 616, 1021, 1908, 1912, 1959, 2014, 2047, 2066, 2080, 2102], "entri": [3, 19, 28, 29, 32, 35, 37, 38, 45, 47, 48, 317, 737, 861, 862, 935, 1050, 1051, 1098, 1149, 1150, 1247, 1272, 1293, 1468, 1469, 1526, 1532, 1623, 1624, 1742, 1743, 1751, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 1906, 2013, 2016, 2017, 2023, 2035, 2042, 2047, 2048, 2052, 2055, 2060, 2073, 2080, 2085, 2090, 2097, 2098, 2099, 2100], "color": [3, 1490, 2015, 2016, 2085, 2103], "rowwis": [3, 34], "columnwis": 3, "extend_result": 3, "highlight_warn": 3, "highlight": [3, 53, 732, 733, 2016], "trim_significant_figur": 3, "trim": [3, 944, 953, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1230, 1231, 1269], "h": [4, 10, 14, 33, 35, 489, 490, 743, 744, 745, 762, 967, 968, 1302, 1309, 1312, 1322, 1336, 1428, 1429, 1436, 1437, 1439, 1441, 1442, 1446, 1454, 1455, 1457, 1465, 1466, 1468, 1470, 1477, 1478, 1489, 1490, 1496, 1497, 1498, 1509, 1510, 1520, 1521, 1538, 1539, 1542, 1544, 1561, 1577, 1580, 1581, 1597, 1605, 1632, 1668, 1674, 1675, 1730, 1731, 1765, 1777, 1816, 1927, 1928, 2014, 2033, 2034, 2042, 2045, 2047, 2048, 2050, 2052, 2065, 2084, 2085, 2086, 2093, 2098, 2108], "finit": [4, 35, 922, 923, 1261, 1262, 1308, 1309, 1319, 1320, 1336, 1362, 1412, 1416, 1438, 1927, 2048, 2052, 2087], "natur": [4, 7, 8, 11, 30, 35, 55, 66, 68, 86, 922, 923, 1301, 1306, 1332, 1346, 1348, 1353, 1430, 1574, 1684, 2051, 2052, 2062, 2063, 2080, 2081], "against": [4, 5, 14, 28, 37, 47, 796, 868, 922, 923, 1136, 1137, 1138, 1142, 1143, 1145, 1213, 1263, 1272, 1288, 1289, 1526, 1532, 1778, 2011, 2016, 2068, 2091, 2099, 2108], "cprofil": 4, "mode": [4, 8, 23, 24, 28, 30, 35, 47, 50, 52, 60, 61, 64, 81, 223, 224, 490, 748, 749, 774, 775, 776, 781, 787, 788, 789, 817, 819, 825, 828, 862, 863, 892, 894, 898, 901, 902, 903, 908, 909, 911, 912, 913, 914, 918, 919, 920, 922, 975, 980, 1008, 1041, 1082, 1083, 1112, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1166, 1169, 1170, 1171, 1172, 1175, 1176, 1181, 1186, 1220, 1255, 1256, 1270, 1272, 1273, 1276, 1288, 1290, 1331, 1344, 1374, 1388, 1389, 1409, 1440, 1441, 1442, 1453, 1454, 1455, 1469, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1543, 1566, 1579, 1580, 1607, 1608, 1609, 1624, 1632, 1643, 1671, 1684, 1703, 1704, 1705, 1717, 1725, 1727, 1731, 1736, 1769, 1802, 1810, 1826, 1869, 1871, 1964, 2012, 2014, 2020, 2026, 2033, 2040, 2045, 2047, 2049, 2050, 2051, 2055, 2062, 2063, 2066, 2069, 2071, 2072, 2073, 2075, 2086, 2089, 2093, 2096, 2100, 2101, 2102, 2103, 2104, 2106, 2111, 2112], "correct": [4, 6, 7, 24, 28, 29, 30, 33, 35, 47, 52, 207, 208, 211, 223, 557, 582, 605, 606, 617, 903, 904, 908, 923, 997, 1129, 1131, 1139, 1140, 1141, 1192, 1247, 1270, 1273, 1288, 1289, 1308, 1462, 1479, 1491, 1706, 1716, 1757, 1873, 1921, 1922, 1971, 1972, 2013, 2014, 2015, 2020, 2024, 2033, 2034, 2045, 2048, 2052, 2082, 2099, 2106, 2110], "launch": [4, 14, 23, 31, 33, 37, 39, 40, 45, 46, 47, 51, 63, 1011, 1716, 2012, 2042, 2044, 2045, 2047, 2048, 2075, 2096, 2104], "spent": [4, 28, 936, 1808, 2044, 2055, 2102, 2109, 2111], "appear": [4, 24, 28, 35, 52, 53, 64, 857, 1053, 1108, 1149, 1150, 1177, 1344, 1378, 1380, 1717, 1848, 1876, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1976, 2013, 2016, 2033, 2034, 2048, 2049, 2055, 2065, 2068, 2099, 2102, 2109], "extrem": [4, 28, 1716, 2042, 2065, 2101], "expens": [4, 23, 35, 55, 1730, 2045, 2052, 2054, 2069, 2075, 2086, 2098, 2102, 2105, 2108, 2111], "bound": [4, 15, 24, 28, 52, 55, 483, 797, 959, 970, 1122, 1123, 1180, 1187, 1191, 1196, 1272, 1435, 1436, 1437, 1519, 1520, 1521, 1526, 1545, 1632, 1810, 1841, 1862, 2016, 2017, 2040, 2042, 2051, 2068, 2070, 2081, 2098, 2099, 2101, 2111], "greater": [4, 28, 47, 66, 67, 294, 619, 682, 951, 964, 965, 970, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1215, 1229, 1327, 1392, 1438, 1575, 1632, 1643, 1670, 1684, 1700, 1703, 1731, 1765, 1810, 1964, 2014, 2042, 2045, 2059, 2066, 2080, 2081], "spend": [4, 7, 19, 33, 1345, 2024, 2105], "sens": [4, 35, 47, 64, 1747, 1960, 1961, 2016, 2042, 2051], "respons": [4, 7, 9, 28, 30, 32, 33, 35, 37, 44, 50, 55, 63, 977, 1011, 1189, 1514, 1650, 1716, 1912, 2042, 2045, 2048, 2049, 2055, 2063, 2075, 2101], "Of": [4, 1770, 2011, 2047, 2048, 2092, 2099, 2102], "cours": [4, 19, 64, 2011, 2047, 2048, 2075, 2099, 2102], "realiti": [4, 2051], "complic": [4, 24, 34, 52, 64, 795, 1870, 2020, 2034, 2043, 2068, 2075, 2077, 2099, 2101], "account": [4, 33, 45, 64, 1439, 1716, 2040, 2044, 2051, 2080, 2096], "heavili": [4, 65, 1786, 2044, 2048, 2068], "similarli": [4, 7, 30, 33, 52, 63, 64, 762, 792, 796, 861, 966, 1128, 1272, 1328, 1526, 1574, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1952, 1953, 1954, 1955, 2015, 2042, 2048, 2050, 2052, 2058, 2080, 2101, 2104, 2109], "platform": [4, 8, 9, 14, 28, 39, 40, 44, 1319, 1320, 1331, 1826, 1927, 2026, 2053, 2058, 2059, 2070], "startup": [4, 19], "slower": [4, 14, 28, 33, 879, 912, 1148, 1149, 1150, 1302, 1309, 1651, 1730, 1781, 2048, 2059, 2067, 2081, 2101], "rerun": [5, 28, 2045], "segment": [5, 1064, 1162, 1558, 2045, 2068, 2102, 2107, 2113], "persist": [5, 21, 30, 52, 55, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 762, 817, 818, 819, 1272, 1477, 1496, 1526, 1542, 2055, 2060, 2061, 2063, 2082, 2096, 2101, 2105], "rng": [5, 23, 1039, 1079, 1386, 1393, 1863, 1993, 2004, 2045, 2059, 2074], "advanc": [5, 15, 23, 24, 37, 1570, 1572, 1574, 1770, 1928, 1967, 2023, 2034, 2045, 2048, 2053, 2057, 2063, 2084, 2085, 2102], "juggl": 5, "moder": 5, "hit": [5, 8, 14, 1189, 1716, 2011, 2045, 2096, 2099, 2102, 2105, 2111], "preserve_rng_st": 5, "checkpoint_sequenti": [5, 2012], "omit": [5, 14, 28, 48, 152, 1142, 1143, 1145, 1187, 1479, 1540, 2061, 2065, 2075, 2087, 2099, 2103], "exclud": [5, 9, 24, 47, 64, 984, 1235, 1469, 1624, 1673, 1778, 1870, 1952, 1953, 1954, 1955, 2042, 2053, 2060, 2068, 2076, 2100, 2102], "_infer_device_typ": 5, "remain": [5, 8, 19, 35, 47, 64, 1380, 1468, 1469, 1623, 1624, 1706, 1716, 1723, 1724, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1810, 1827, 1849, 2028, 2029, 2032, 2041, 2050, 2055, 2082, 2093, 2100], "consequ": [5, 60, 1336, 1496, 1927, 2016, 2042, 2045, 2057, 2059, 2062], "random": [5, 35, 37, 47, 48, 56, 64, 86, 90, 156, 762, 895, 908, 909, 945, 1039, 1040, 1045, 1054, 1055, 1075, 1076, 1079, 1080, 1170, 1177, 1225, 1248, 1345, 1365, 1386, 1387, 1391, 1393, 1434, 1445, 1470, 1477, 1496, 1545, 1625, 1676, 1682, 1706, 1731, 1738, 1744, 1745, 1752, 1753, 1772, 1816, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1863, 1875, 1928, 1967, 1976, 1993, 1994, 1996, 1999, 2000, 2001, 2002, 2004, 2005, 2011, 2012, 2014, 2022, 2040, 2049, 2052, 2058, 2065, 2066, 2085, 2102, 2111], "gradient": [5, 11, 23, 24, 28, 32, 33, 35, 55, 56, 59, 152, 223, 224, 292, 337, 489, 490, 497, 515, 697, 698, 883, 892, 894, 896, 899, 900, 901, 902, 903, 904, 905, 906, 908, 909, 911, 912, 913, 914, 915, 916, 917, 918, 919, 922, 923, 927, 928, 946, 972, 1112, 1165, 1166, 1167, 1168, 1172, 1177, 1213, 1272, 1308, 1309, 1312, 1319, 1320, 1336, 1345, 1362, 1364, 1370, 1373, 1375, 1438, 1445, 1456, 1457, 1458, 1461, 1462, 1468, 1469, 1479, 1493, 1494, 1495, 1526, 1533, 1558, 1615, 1616, 1623, 1624, 1634, 1643, 1651, 1653, 1654, 1655, 1668, 1703, 1704, 1705, 1716, 1717, 1720, 1721, 1722, 1730, 1736, 1769, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1800, 1801, 1814, 1908, 1927, 1976, 2012, 2013, 2014, 2020, 2023, 2034, 2035, 2036, 2040, 2045, 2047, 2048, 2050, 2051, 2052, 2055, 2058, 2066, 2067, 2075, 2076, 2080, 2086, 2109], "among": [5, 19, 23, 24, 28, 32, 35, 47, 1020, 1021, 1024, 1235, 1462, 1772, 2016, 2057, 2099, 2101], "detect": [5, 14, 17, 18, 19, 23, 25, 28, 29, 37, 40, 55, 911, 912, 913, 914, 915, 916, 975, 1166, 1276, 1570, 1571, 1573, 1716, 1964, 2012, 2032, 2035, 2045, 2052, 2058, 2065, 2068, 2075, 2098, 2099, 2102, 2111, 2115], "priorit": [5, 33, 1169, 1170, 1172, 1783, 1784, 1796, 2080], "defaultdevicetyp": 5, "anticip": [5, 2113], "belong": [5, 28, 30, 32, 35, 44, 64, 959, 1013, 1185, 1798, 1982, 2011, 2045, 2067, 2110, 2112], "use_reentr": [5, 1716], "context_fn": 5, "noop_context_fn": 5, "determinism_check": 5, "techniqu": [5, 17, 19, 33, 64, 1463, 1738, 1949, 2036, 2055, 2062, 2063, 2067, 2070, 2095, 2111], "recomput": [5, 34, 903, 906, 908, 1579, 1643, 1768, 2067, 2098], "refer": [5, 7, 14, 23, 24, 28, 29, 30, 32, 35, 39, 42, 43, 47, 48, 55, 56, 63, 81, 82, 83, 86, 88, 256, 736, 737, 758, 766, 794, 795, 796, 821, 822, 823, 826, 827, 828, 862, 876, 881, 896, 957, 1046, 1051, 1111, 1162, 1187, 1220, 1222, 1253, 1260, 1272, 1275, 1284, 1291, 1304, 1309, 1325, 1329, 1342, 1345, 1412, 1445, 1479, 1526, 1576, 1597, 1605, 1614, 1632, 1643, 1644, 1702, 1711, 1712, 1716, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1816, 1833, 1840, 1869, 1870, 1890, 1928, 1964, 2011, 2012, 2014, 2032, 2033, 2035, 2036, 2041, 2042, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2059, 2065, 2067, 2071, 2072, 2075, 2076, 2080, 2082, 2083, 2084, 2085, 2089, 2092, 2093, 2097, 2099, 2100, 2104, 2107, 2111], "potenti": [5, 8, 30, 47, 50, 86, 193, 210, 488, 1188, 1282, 1318, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 1716, 1964, 2015, 2023, 2032, 2042, 2045, 2048, 2051, 2068, 2080, 2083, 2084, 2098, 2102], "silent": [5, 19, 28, 977, 1054, 1055, 1075, 1076, 1247, 1288, 1526, 1721, 1722, 1912, 1999, 2000, 2001, 2002, 2045, 2058, 2065], "consider": [5, 7, 83, 887, 1269, 1469, 1716, 1723, 1724, 2016, 2044], "limit": [5, 8, 9, 12, 20, 23, 33, 53, 55, 56, 61, 83, 989, 1078, 1166, 1286, 1392, 1445, 1468, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1684, 1716, 1747, 1874, 2012, 2016, 2020, 2032, 2034, 2035, 2042, 2045, 2047, 2055, 2058, 2059, 2060, 2062, 2068, 2070, 2073, 2075, 2076, 2080, 2086, 2087, 2098, 2101, 2102, 2108, 2111], "reentrant": [5, 1716], "soon": [5, 47, 52, 55, 795, 2042, 2070, 2075, 2077, 2102], "intermedi": [5, 12, 14, 33, 36, 52, 53, 60, 64, 83, 956, 989, 1159, 1166, 1173, 1362, 1469, 1570, 1572, 1574, 1624, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 2013, 2016, 2020, 2045, 2048, 2049, 2050, 2058, 2091, 2098, 2099, 2101], "set_checkpoint_early_stop": 5, "entireti": 5, "graph": [5, 8, 12, 28, 30, 33, 35, 55, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 82, 83, 84, 85, 90, 141, 152, 223, 224, 682, 749, 794, 795, 817, 818, 819, 825, 828, 896, 903, 906, 908, 917, 975, 977, 980, 982, 983, 1008, 1043, 1048, 1053, 1166, 1181, 1185, 1187, 1191, 1197, 1209, 1272, 1275, 1276, 1282, 1284, 1288, 1716, 1778, 1780, 1781, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 2012, 2016, 2020, 2022, 2026, 2036, 2041, 2047, 2048, 2051, 2052, 2055, 2056, 2062, 2063, 2065, 2068, 2069, 2071, 2072, 2073, 2075, 2076, 2077, 2080, 2085, 2091, 2092, 2093, 2097, 2098, 2100, 2101, 2103, 2104, 2105, 2107, 2108], "no_grad": [5, 490, 864, 918, 1112, 1167, 1171, 1176, 1272, 1468, 1526, 1532, 1574, 1717, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 2040, 2042, 2055, 2070, 2089, 2093], "unmet": 5, "particip": [5, 9, 23, 28, 29, 32, 47, 48, 1570, 1716, 2076, 2098], "wherea": [5, 11, 33, 35, 52, 288, 1328, 1388, 1389, 1417, 1797, 1927, 2016, 2042, 2067, 2087, 2101], "avoid": [5, 8, 9, 19, 20, 23, 28, 30, 33, 35, 45, 48, 55, 64, 66, 74, 75, 193, 210, 450, 783, 868, 896, 956, 975, 1064, 1207, 1208, 1272, 1318, 1344, 1460, 1491, 1492, 1517, 1526, 1535, 1540, 1578, 1614, 1644, 1669, 1676, 1716, 1731, 1797, 1848, 1876, 1923, 1934, 1942, 1960, 1995, 2020, 2029, 2034, 2042, 2044, 2045, 2050, 2052, 2055, 2067, 2075, 2076, 2084, 2085, 2086, 2098, 2099, 2101, 2102, 2103], "know": [5, 7, 8, 14, 17, 19, 28, 29, 33, 36, 52, 64, 488, 903, 904, 908, 912, 929, 977, 1159, 1166, 1187, 1197, 1201, 1283, 1716, 2013, 2016, 2023, 2024, 2028, 2041, 2042, 2045, 2048, 2051, 2052, 2065, 2068, 2075, 2076, 2077, 2080, 2096, 2098, 2099, 2100, 2101, 2102, 2114], "lstm": [5, 765, 1497, 1543, 2014, 2045, 2065, 2066, 2070, 2072, 2073, 2085], "hidden": [5, 762, 1175, 1477, 1478, 1496, 1497, 1542, 1544, 1717, 2045, 2085], "correctli": [5, 19, 23, 28, 33, 47, 55, 488, 1166, 1272, 1288, 1526, 1651, 2013, 2015, 2016, 2020, 2029, 2034, 2041, 2042, 2047, 2048, 2049, 2057, 2059, 2070, 2075, 2109], "compil": [5, 12, 14, 15, 52, 53, 55, 56, 64, 65, 682, 989, 1034, 1038, 1050, 1051, 1166, 1181, 1186, 1201, 1270, 1272, 1273, 1276, 1277, 1284, 1285, 1286, 1288, 1289, 1290, 1526, 2012, 2013, 2015, 2016, 2017, 2018, 2020, 2022, 2041, 2044, 2046, 2047, 2054, 2061, 2064, 2065, 2075, 2086, 2089, 2095, 2096, 2097, 2098, 2099, 2100, 2101, 2104, 2105, 2106, 2107, 2108], "turn": [5, 14, 23, 27, 33, 60, 64, 83, 682, 876, 975, 1260, 1288, 1571, 1573, 1909, 1910, 1911, 1912, 1913, 1914, 1964, 2023, 2042, 2045, 2058, 2059, 2065, 2069, 2070, 2076, 2080, 2098, 2101, 2104, 2109], "open": [5, 8, 9, 11, 14, 23, 35, 47, 52, 56, 691, 932, 959, 1158, 1280, 1344, 1367, 1377, 1649, 2013, 2023, 2032, 2035, 2053, 2060, 2061, 2062, 2065, 2068, 2070, 2075, 2080, 2081, 2082, 2099, 2102, 2109, 2113, 2114], "ran": [5, 18, 55, 2102, 2107, 2111], "sequenti": [5, 23, 32, 52, 53, 55, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 795, 1177, 1272, 1282, 1471, 1526, 1566, 1577, 1723, 1724, 1733, 1747, 1811, 1976, 2013, 2032, 2035, 2045, 2055, 2065, 2067, 2070, 2091, 2099, 2102, 2109, 2111], "divid": [5, 24, 28, 32, 33, 240, 585, 586, 587, 1024, 1103, 1106, 1126, 1144, 1153, 1182, 1235, 1236, 1283, 1438, 1439, 1445, 1446, 1459, 1485, 1486, 1492, 1517, 1518, 1529, 1530, 1531, 1540, 1558, 1559, 1575, 1576, 1604, 1605, 1615, 1616, 1644, 1668, 1676, 1716, 1927, 1977, 2014, 2066], "func": [5, 28, 58, 60, 64, 66, 68, 865, 895, 898, 903, 905, 906, 907, 908, 909, 911, 912, 913, 914, 915, 916, 918, 922, 923, 1275, 1288, 1289, 1766, 1967, 1976, 2012, 2016, 2020, 2041, 2075, 2077, 2112], "compris": [5, 48, 52, 2051], "chunk": [5, 23, 28, 30, 33, 55, 962, 1024, 1171, 1462, 1716, 1915, 2013, 2014, 2033, 2051, 2066, 2067, 2075, 2080, 2084], "input_var": [5, 1462], "set_checkpoint_debug_en": [5, 2012], "defer": [5, 24, 55, 1187, 2045], "person": [6, 7, 9], "land": [6, 9, 10, 977, 2012, 2020, 2048, 2099, 2108], "six": [6, 82, 1455], "commit": [6, 7, 9, 14, 56, 2011, 2012, 2058, 2059, 2108], "repositori": [6, 9, 59, 64, 2011, 2057, 2070], "submit": [6, 9, 1010, 1011, 1013, 1385, 1981, 1982, 2028, 2045, 2059, 2100, 2102, 2108], "month": [6, 9], "qualifi": [6, 28, 30, 33, 34, 44, 64, 682, 1185, 1272, 1526, 2020, 2022, 2028, 2063, 2068], "pr": [6, 7, 1816, 1928, 2091, 2102], "interest": [6, 7, 9, 83, 2042, 2049, 2052, 2055, 2062, 2099, 2100, 2101, 2105, 2109], "merge_rul": 6, "vote": [6, 9], "decis": [6, 30, 37, 47, 50, 64, 682, 1187, 1288, 2023, 2041, 2098], "criteria": [6, 9, 1345, 1928], "approv": [6, 9], "Not": [6, 48, 82, 1194, 1420, 1574, 1923, 2013, 2015, 2016, 2017, 2045, 2048, 2066, 2070, 2075], "busi": [6, 9, 2105], "dai": [6, 7, 2099, 2101, 2108], "contributor": [6, 7, 8, 9], "seen": [6, 12, 18, 35, 64, 223, 930, 962, 975, 997, 1374, 1456, 1457, 1458, 1558, 1657, 1658, 1659, 1810, 2013, 2023, 2042, 2045, 2065, 2080], "thumb": [6, 28], "wiki": [7, 9, 24, 2062, 2116], "acceler": [7, 24, 941, 1221, 1440, 1441, 1442, 1566, 1780, 2053, 2092, 2096, 2102], "deep": [7, 9, 64, 1440, 1441, 1442, 1467, 1566, 1796, 2012, 2040, 2045, 2055, 2070, 2095, 2098, 2100, 2111], "neural": [7, 8, 15, 64, 1434, 1445, 1463, 1470, 1479, 1491, 1525, 1526, 1533, 1538, 1539, 1554, 1556, 1570, 1572, 1574, 1663, 1687, 1734, 1794, 1802, 1808, 2013, 2015, 2016, 2040, 2045, 2048, 2058, 2062, 2070, 2098], "tape": [7, 2096], "system": [7, 8, 14, 15, 17, 23, 48, 52, 60, 64, 65, 968, 985, 1180, 1226, 1280, 1304, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1333, 1334, 1335, 1339, 1344, 1363, 1392, 1570, 1572, 1574, 1716, 1734, 1736, 1871, 1951, 2017, 2022, 2028, 2029, 2042, 2044, 2045, 2054, 2055, 2057, 2061, 2063, 2068, 2075, 2093, 2098, 2099, 2109, 2118], "organ": [7, 2047, 2054, 2068, 2102], "govern": [7, 8, 2012], "technic": [7, 9, 47, 52, 55, 64, 1272, 1526, 2012, 2042, 2050, 2051, 2057, 2068, 2098], "found": [7, 14, 15, 16, 18, 19, 28, 30, 47, 52, 64, 66, 73, 75, 76, 77, 85, 87, 88, 89, 942, 959, 1007, 1087, 1088, 1196, 1272, 1276, 1294, 1370, 1373, 1375, 1378, 1418, 1434, 1444, 1470, 1526, 1554, 1793, 1862, 2011, 2013, 2016, 2020, 2035, 2041, 2048, 2051, 2052, 2055, 2057, 2065, 2068, 2069, 2070, 2075, 2085, 2097, 2101, 2105, 2108, 2110, 2112], "md": [7, 64, 794, 2068], "healthi": [7, 37, 47], "team": [7, 28, 56, 86, 87, 89, 2060], "commun": [7, 8, 9, 29, 30, 32, 33, 37, 47, 50, 55, 488, 682, 1716, 2042, 2047, 2062, 2075, 2076, 2099, 2102, 2115], "project": [7, 30, 33, 1374, 1496, 1532, 1736, 1816, 2011, 2020, 2051, 2053, 2062, 2093, 2110], "ve": [7, 58, 59, 60, 64, 1181, 1276, 1967, 2023, 2032, 2042, 2049, 2076, 2085, 2101, 2102], "come": [7, 8, 9, 23, 33, 35, 36, 44, 47, 52, 56, 60, 488, 975, 1108, 1159, 1166, 1272, 1344, 1464, 1465, 1466, 1470, 1491, 1777, 2026, 2047, 2049, 2054, 2068, 2075, 2077, 2080, 2100, 2107], "peopl": [7, 28, 2042, 2070, 2104], "scratch": [7, 2042, 2111], "own": [7, 9, 28, 29, 32, 35, 39, 47, 50, 52, 55, 64, 682, 1042, 1142, 1162, 1235, 1272, 1344, 1453, 1454, 1455, 1456, 1457, 1458, 1526, 1733, 1736, 2012, 2016, 2020, 2036, 2041, 2045, 2051, 2068, 2070, 2071, 2075, 2077, 2080, 2099, 2101, 2104], "itch": 7, "acquaint": 7, "tip": [7, 2045, 2099, 2102], "tracker": [7, 1345, 2028], "confirm": [7, 2011, 2013, 2048, 2065, 2075, 2077, 2107], "tend": [7, 913, 1964], "bootcamp": 7, "1hr": 7, "although": [7, 8, 35, 60, 64, 65, 1456, 1457, 1458, 1526, 1534, 1716, 2012, 2016, 2024, 2041, 2048, 2058, 2070, 2109, 2110], "join": [7, 24, 28, 32, 47, 48, 63, 64, 1212, 1716, 2011, 2012, 2017, 2032, 2042, 2047, 2057, 2066, 2086, 2093], "u": [7, 8, 12, 30, 33, 56, 61, 64, 66, 762, 911, 941, 966, 1186, 1209, 1309, 1311, 1315, 1319, 1320, 1336, 1362, 1364, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1477, 1478, 1496, 1497, 1511, 1513, 1542, 1544, 1545, 1566, 1577, 1736, 1816, 1927, 1928, 2013, 2014, 2020, 2023, 2034, 2040, 2042, 2045, 2047, 2048, 2049, 2055, 2068, 2080, 2084, 2085, 2093, 2096, 2098, 2099, 2102, 2105, 2106, 2111, 2114], "dev": [7, 10, 39, 44, 2100, 2111], "happi": 7, "research": [7, 8, 9, 1716, 2011, 2042, 2052, 2060], "partner": [7, 2092], "speed": [7, 8, 14, 32, 55, 1019, 1108, 1165, 1263, 1276, 1282, 1336, 1430, 1532, 1723, 1724, 1870, 1927, 2024, 2042, 2044, 2045, 2047, 2048, 2050, 2053, 2058, 2070, 2075, 2099, 2100], "design": [7, 9, 23, 30, 35, 44, 47, 53, 56, 59, 60, 61, 922, 923, 1272, 1292, 1439, 1526, 1585, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1939, 2011, 2012, 2029, 2039, 2045, 2048, 2051, 2053, 2055, 2063, 2068, 2093, 2098, 2099, 2100, 2102, 2104, 2106, 2111], "comment": [7, 64, 1768, 1866, 2016, 2017, 2048, 2083, 2085, 2105], "crack": 7, "usual": [7, 14, 23, 24, 28, 30, 34, 47, 48, 52, 53, 55, 64, 87, 483, 833, 834, 835, 836, 844, 896, 911, 913, 917, 1177, 1464, 1465, 1466, 1470, 1485, 1488, 1489, 1490, 1491, 1706, 1716, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1976, 2013, 2016, 2035, 2041, 2042, 2044, 2045, 2048, 2050, 2054, 2067, 2071, 2073, 2075, 2085, 2095, 2099, 2101, 2105, 2112], "idea": [7, 56, 931, 1108, 1191, 1430, 1716, 2045, 2054, 2065, 2076, 2102], "rfc": [7, 28, 2042, 2070, 2076], "big": [7, 19, 1783, 1784, 1787, 1793, 1794, 1909, 1910, 1911, 1912, 1913, 1914, 2045, 2060, 2067, 2070, 2098, 2101, 2105], "post": [7, 8, 29, 30, 32, 55, 488, 490, 682, 859, 860, 864, 1272, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 2012, 2020, 2042, 2047, 2048, 2050, 2061, 2080, 2099, 2100, 2101, 2111], "standard": [7, 14, 18, 19, 24, 35, 40, 41, 45, 53, 60, 64, 354, 379, 591, 1128, 1187, 1434, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1491, 1498, 1566, 1572, 1574, 1587, 1772, 1816, 1839, 1884, 1889, 1921, 1922, 2015, 2017, 2035, 2040, 2044, 2045, 2057, 2058, 2062, 2065, 2068, 2081, 2099], "lot": [7, 14, 19, 23, 52, 1197, 2032, 2042, 2045, 2052, 2057, 2068, 2074, 2076, 2085, 2098, 2101, 2105, 2113], "boil": 7, "mostli": [7, 35, 1270, 1716, 2045, 2070, 2080, 2098, 2099, 2112], "evid": 7, "peer": [7, 28, 32, 33, 47, 55, 1017, 1716, 2045, 2075], "paper": [7, 9, 24, 34, 35, 53, 762, 1430, 1434, 1440, 1441, 1442, 1444, 1456, 1457, 1458, 1463, 1464, 1465, 1466, 1467, 1470, 1473, 1474, 1477, 1480, 1483, 1488, 1489, 1490, 1498, 1532, 1538, 1539, 1541, 1545, 1554, 1558, 1566, 1570, 1572, 1574, 1575, 1576, 1627, 1628, 1637, 1715, 1783, 1784, 1793, 1795, 1802, 1808, 2052], "framework": [7, 8, 9, 35, 44, 56, 63, 65, 762, 1015, 1383, 1477, 1716, 1730, 1796, 1874, 2012, 2024, 2056, 2070, 2076, 2077], "bit": [7, 64, 90, 332, 460, 763, 765, 767, 821, 822, 823, 824, 827, 829, 861, 948, 951, 990, 991, 1162, 1252, 1850, 1851, 1863, 1870, 1891, 2045, 2051, 2055, 2058, 2061, 2070, 2073, 2074, 2080, 2083, 2086, 2099, 2105, 2116], "accept": [7, 9, 23, 28, 30, 34, 52, 55, 56, 57, 59, 61, 515, 804, 892, 893, 894, 895, 896, 903, 905, 908, 909, 917, 965, 1050, 1053, 1161, 1177, 1186, 1235, 1272, 1276, 1461, 1526, 1533, 1555, 1574, 1743, 1757, 1758, 1764, 1779, 1904, 1918, 1976, 2016, 2020, 2034, 2045, 2048, 2049, 2065, 2067, 2075, 2083, 2085, 2102, 2111], "overwhelm": [7, 2075, 2111], "newli": [7, 55, 64, 90, 1122, 1123, 1187, 1468, 1469, 1828, 1829], "publish": [7, 9, 39, 44, 47, 1345, 2012], "ground": [7, 9, 34, 1461, 1615, 2085, 2099], "becom": [7, 8, 9, 12, 23, 28, 34, 35, 64, 292, 762, 879, 1226, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1477, 1496, 1511, 1531, 1542, 1555, 1615, 1632, 1706, 1800, 1899, 1949, 2023, 2026, 2042, 2047, 2048, 2065, 2068, 2069, 2075, 2111, 2113], "refactor": [7, 64, 2061, 2070], "coordin": [7, 28, 30, 35, 37, 585, 588, 887, 1226, 1235, 1374, 1820, 1912, 1953, 1955, 1962, 2042, 2080, 2085, 2102, 2111], "pace": 7, "branch": [7, 12, 52, 64, 66, 69, 71, 74, 75, 989, 1934, 2011, 2015, 2016, 2045, 2098, 2099, 2108], "definit": [7, 8, 23, 28, 29, 35, 43, 52, 53, 64, 87, 966, 967, 968, 997, 1050, 1156, 1197, 1200, 1207, 1208, 1293, 1302, 1303, 1345, 1412, 1491, 1570, 1644, 1691, 1771, 1846, 1949, 2011, 2013, 2015, 2017, 2041, 2042, 2048, 2052, 2068, 2070, 2085, 2087], "fundament": [7, 60, 2015, 2055, 2075, 2080, 2101], "cut": [7, 33, 2102], "guidanc": [7, 9, 15, 57, 488, 1199, 2098], "stage": [7, 18, 19, 24, 30, 32, 44, 55, 63, 2012, 2023, 2035, 2077, 2111], "piec": [7, 11, 2022, 2035, 2076, 2099, 2102, 2113], "advic": [7, 2102], "readi": [7, 14, 33, 63, 864, 865, 944, 953, 1230, 1231, 1716, 2013, 2047, 2075, 2076, 2096], "draft": 7, "convert": [7, 11, 23, 28, 30, 34, 35, 36, 53, 55, 59, 62, 64, 82, 83, 84, 582, 586, 587, 588, 589, 590, 737, 790, 791, 792, 794, 795, 796, 817, 818, 819, 840, 841, 857, 860, 861, 862, 882, 883, 941, 961, 974, 1092, 1128, 1159, 1272, 1342, 1526, 1536, 1537, 1566, 1573, 1706, 1718, 1719, 1723, 1724, 1777, 1778, 1797, 1828, 1829, 1834, 1909, 1910, 1911, 1912, 1913, 1914, 1962, 2013, 2014, 2015, 2016, 2024, 2035, 2036, 2048, 2053, 2062, 2063, 2065, 2070, 2073, 2080, 2085, 2087, 2091, 2092, 2102, 2110, 2111], "press": [7, 64], "button": [7, 2108], "prepend": [7, 14, 23, 28, 32, 64, 231, 1100, 1272, 1367, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1945, 2014, 2043], "titl": [7, 2066, 2070], "wip": 7, "progress": [7, 32, 37, 48, 51, 81, 488, 1010, 1385, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1970, 1981, 2011, 2027], "ci": [7, 2012, 2108], "folk": 7, "who": [7, 8, 9, 11, 47, 66, 83, 2068], "regularli": 7, "queue": [7, 37, 50, 2032, 2085], "everyth": [7, 23, 35, 53, 59, 66, 2013, 2032, 2068, 2098, 2102, 2105, 2111], "happen": [7, 9, 28, 30, 32, 34, 35, 37, 40, 47, 52, 55, 60, 64, 609, 793, 816, 908, 909, 1166, 1209, 1566, 1716, 1730, 1799, 1805, 1806, 1812, 1927, 2012, 2029, 2032, 2042, 2045, 2047, 2048, 2049, 2050, 2051, 2056, 2057, 2061, 2065, 2070, 2075, 2082, 2084, 2096, 2099, 2102, 2115], "subsystem": [7, 11, 56, 61, 2012, 2020, 2046, 2048], "patch": [7, 57, 1472, 1578, 1965, 2103], "feel": [7, 2035, 2065, 2080, 2102], "ll": [7, 19, 60, 64, 762, 822, 823, 864, 865, 966, 967, 968, 1053, 1187, 1195, 1302, 1477, 1478, 1496, 1497, 2020, 2023, 2042, 2045, 2048, 2049, 2057, 2065, 2070, 2076, 2097, 2101, 2105, 2109], "round": [7, 23, 28, 66, 68, 75, 510, 667, 668, 801, 804, 822, 868, 993, 995, 1064, 1103, 1129, 1130, 1131, 1135, 1139, 1140, 1141, 1156, 1318, 1331, 1579, 1643, 1826, 1827, 1846, 1894, 2014, 2016, 2033, 2045, 2066, 2070, 2073, 2080, 2081, 2106, 2113], "trip": [7, 64, 1129, 1130, 1131, 1135, 1139, 1140, 1141], "noth": [7, 14, 37, 64, 682, 1004, 1044, 1736, 1783, 1784, 1796, 1811, 1995, 2013, 2015, 2059, 2077], "accompani": [7, 85, 2063], "solut": [7, 8, 19, 60, 968, 1180, 1317, 1318, 1322, 1330, 1333, 1335, 1339, 1438, 1716, 1951, 2013, 2014, 2040, 2041, 2050, 2057, 2070], "think": [7, 9, 11, 64, 66, 488, 1797, 2013, 2015, 2042, 2067, 2068, 2077, 2099, 2101, 2103], "confid": [7, 1723, 1724, 2051, 2085, 2103], "ahead": [7, 52, 2012, 2070, 2092, 2098, 2102], "search": [7, 11, 24, 821, 959, 1483, 1637, 1716, 1778, 1833, 1862, 2013, 2033, 2034, 2065, 2068, 2080, 2097, 2101, 2108, 2113], "repo": [7, 33, 66, 1802, 2011, 2048, 2061], "unabl": [7, 52, 60, 84, 2065, 2067], "reproduc": [7, 23, 60, 315, 323, 517, 519, 946, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1522, 1523, 1524, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1624, 1632, 1643, 1671, 1684, 1703, 1704, 1705, 1706, 1964, 2011, 2012, 2067, 2068, 2102, 2111], "problemat": [7, 23, 64, 929, 2013, 2058, 2071, 2099, 2102], "insight": [7, 18, 81, 2093, 2113], "individu": [7, 9, 14, 23, 24, 28, 30, 32, 55, 64, 85, 244, 682, 794, 840, 841, 966, 1074, 1126, 1144, 1181, 1272, 1445, 1526, 1566, 1716, 2016, 2033, 2041, 2042, 2045, 2048, 2051, 2054, 2058, 2059, 2063, 2065, 2067, 2073, 2075, 2087, 2096, 2104, 2108, 2113], "intent": [7, 30, 45, 47, 55, 60, 1867, 2023, 2068, 2070, 2107], "lock": [7, 23, 28, 32, 35, 2042, 2045, 2057, 2068, 2076, 2113], "strike": 7, "convers": [7, 30, 460, 582, 585, 795, 960, 1257, 1723, 1724, 1757, 2012, 2017, 2048, 2053, 2065, 2070, 2071, 2080, 2098, 2101, 2110], "medium": [7, 55, 1870], "prioriti": [7, 9, 10, 28, 682, 857, 998, 1013, 1982, 2015, 2060, 2115], "entranc": [7, 2045], "great": [7, 12, 52, 2042, 2052, 2096, 2098, 2099], "deal": [7, 8, 23, 37, 50, 52, 82, 1944, 2032, 2050, 2075, 2098, 2101, 2105, 2109], "welcom": [7, 2035, 2062, 2067, 2080], "aim": [7, 52, 83, 2049, 2080, 2092], "rare": [7, 2041, 2065, 2098, 2103, 2112], "typo": 7, "send": [7, 23, 28, 33, 37, 50, 1046, 1706, 1716, 2032, 2047, 2057, 2061, 2066, 2069, 2075, 2076, 2077, 2089, 2091, 2101, 2104], "forum": [7, 9, 2050, 2057], "share": [7, 10, 14, 23, 29, 30, 32, 33, 35, 36, 47, 48, 53, 55, 60, 223, 313, 342, 460, 485, 522, 526, 619, 862, 882, 883, 903, 904, 908, 909, 922, 923, 975, 1008, 1010, 1042, 1046, 1053, 1159, 1160, 1161, 1162, 1243, 1320, 1421, 1422, 1462, 1543, 1716, 1734, 1772, 1844, 1858, 1866, 1918, 1942, 1948, 1963, 2014, 2041, 2042, 2044, 2057, 2060, 2061, 2065, 2075, 2080, 2082, 2084, 2090, 2093, 2096], "resolv": [7, 8, 9, 30, 35, 64, 84, 85, 1187, 1272, 1456, 1457, 1458, 1472, 1523, 1526, 2015, 2016, 2017, 2061, 2068, 2082, 2098, 2112], "challeng": [7, 28, 30, 2076, 2102], "feedback": [7, 18, 19, 24, 55, 56, 1716, 2012, 2080], "direct": [7, 9, 11, 28, 33, 53, 762, 794, 1149, 1150, 1197, 1272, 1477, 1496, 1526, 1542, 1732, 1768, 1795, 1816, 1833, 1854, 2020, 2042, 2045, 2048, 2051, 2055, 2075, 2099, 2110], "yourself": [7, 58, 1008, 1965, 2048, 2055, 2057, 2102, 2112], "problem": [7, 23, 28, 47, 52, 60, 66, 1108, 1183, 1187, 1288, 1318, 1330, 1345, 1461, 1533, 1597, 1866, 2032, 2042, 2045, 2050, 2052, 2057, 2061, 2070, 2071, 2077, 2083, 2092, 2098, 2099, 2102, 2111], "area": [7, 9, 53, 1643, 2055, 2070, 2081], "appreci": 7, "strive": 7, "respond": [7, 28], "quickli": [7, 8, 24, 47, 2051, 2104], "ey": [7, 35, 193, 210, 967, 968, 1177, 1293, 1302, 1313, 1324, 1327, 1330, 1331, 1338, 1339, 1730, 1826, 1905, 1976, 2014, 2018, 2048, 2066, 2099], "everyon": [7, 37, 47], "touch": [7, 45, 64], "versu": [7, 1186, 1530], "write": [7, 8, 9, 12, 16, 18, 19, 23, 28, 30, 33, 37, 40, 44, 45, 46, 47, 52, 55, 60, 61, 65, 82, 84, 85, 256, 488, 515, 699, 957, 977, 1161, 1166, 1177, 1189, 1249, 1283, 1303, 1314, 1315, 1316, 1320, 1321, 1334, 1702, 1858, 1951, 1964, 1976, 2012, 2015, 2020, 2029, 2034, 2045, 2046, 2048, 2049, 2050, 2052, 2063, 2067, 2068, 2070, 2080, 2085, 2092, 2098, 2099, 2102, 2104, 2105, 2111, 2115], "blog": [7, 8, 12, 989, 2020, 2047, 2048, 2070, 2080], "around": [7, 9, 11, 28, 35, 49, 59, 60, 63, 64, 152, 626, 896, 917, 1005, 1008, 1010, 1011, 1013, 1083, 1154, 1155, 1166, 1272, 1385, 1409, 1716, 1855, 1981, 1982, 2007, 2013, 2020, 2032, 2042, 2045, 2065, 2070, 2075, 2096, 2099, 2102], "internet": 7, "grow": [7, 8, 64, 2045, 2080], "market": [7, 9], "benefit": [7, 8, 28, 64, 850, 1723, 1724, 1810, 2032, 2045, 2051, 2070, 2080, 2111], "opinion": [7, 8, 2080], "isn": [7, 19, 23, 64, 460, 1269, 2042, 2045, 2048, 2075, 2087, 2101], "categor": [7, 40, 1634, 2012, 2017, 2071, 2075, 2085, 2110], "aspect": [7, 28, 33, 64, 1543, 2048, 2055], "seem": [7, 1187, 2065], "unusu": [7, 2101], "claim": [7, 1723, 1724, 1808, 2052], "wast": [7, 2045], "someon": [7, 9, 1270, 2034], "end": [7, 8, 9, 19, 23, 24, 28, 33, 35, 40, 44, 58, 64, 363, 364, 540, 762, 786, 799, 822, 823, 868, 931, 939, 944, 992, 1008, 1068, 1069, 1098, 1108, 1147, 1162, 1211, 1226, 1232, 1233, 1269, 1272, 1293, 1298, 1328, 1338, 1340, 1343, 1345, 1359, 1421, 1422, 1437, 1438, 1439, 1447, 1448, 1449, 1459, 1461, 1467, 1471, 1477, 1478, 1481, 1482, 1483, 1484, 1485, 1486, 1492, 1496, 1497, 1512, 1517, 1520, 1521, 1526, 1528, 1533, 1534, 1537, 1542, 1545, 1555, 1558, 1564, 1569, 1576, 1615, 1624, 1636, 1637, 1716, 1730, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1800, 1805, 1810, 1842, 1874, 1877, 1880, 1882, 1895, 1949, 1960, 1961, 1979, 2013, 2014, 2015, 2016, 2032, 2041, 2042, 2045, 2047, 2048, 2050, 2051, 2052, 2058, 2061, 2063, 2065, 2067, 2069, 2070, 2073, 2075, 2081, 2096, 2099, 2101, 2104, 2106, 2111, 2115], "too": [7, 9, 14, 24, 30, 47, 60, 64, 488, 1064, 1165, 1187, 1191, 1318, 1336, 1418, 1445, 1461, 1526, 1616, 1717, 1766, 2016, 2044, 2050, 2052, 2057, 2058, 2059, 2061, 2063, 2068, 2077, 2080, 2098, 2099, 2101, 2111, 2113], "advisori": 7, "fashion": [7, 23, 28, 33, 34, 50, 52, 517, 1350, 1747, 2013], "rough": [7, 9], "consensu": [7, 9], "corpor": [7, 2111], "wrote": [7, 8], "implicitli": [7, 28, 40, 64, 1091, 1186, 1216, 1226, 1288, 1289, 1344, 1435, 1436, 1437, 1519, 1520, 1521, 1867, 1949, 2013, 2015, 2016, 2042, 2048, 2053], "lifetim": [7, 488, 2020, 2075, 2101], "immedi": [7, 8, 9, 19, 28, 30, 47, 48, 55, 63, 488, 1197, 1272, 1275, 1526, 1723, 1724, 2016, 2045, 2051, 2055, 2060, 2067, 2075, 2077, 2098, 2101], "sai": [7, 52, 53, 64, 498, 931, 1169, 1170, 1171, 1186, 1193, 1272, 1526, 2013, 2041, 2042, 2050, 2068, 2076, 2077, 2080, 2096, 2098, 2099, 2105, 2110], "bugfix": 7, "Or": [7, 19, 37, 64, 883, 970, 2053, 2056, 2065, 2080], "motiv": [7, 8, 64, 737, 2051, 2055, 2076, 2111], "ye": [7, 2025, 2065, 2067, 2080, 2102], "knuth": 7, "bewar": 7, "mere": [7, 33, 53], "proven": [7, 1463, 1716], "ok": [7, 40, 45, 59, 1191, 1200, 1277, 2077, 2083, 2099], "sometim": [7, 64, 914, 1064, 1278, 1288, 1472, 1578, 1610, 1611, 1612, 1736, 2012, 2016, 2020, 2032, 2042, 2045, 2049, 2050, 2051, 2055, 2057, 2068, 2083, 2086, 2098, 2099, 2101, 2105, 2109, 2112], "obvious": [7, 2101], "broken": [7, 23, 1064, 2065, 2068], "contrari": [7, 33, 2044], "accident": 7, "put": [7, 9, 23, 28, 37, 59, 63, 64, 321, 1128, 1344, 1480, 1802, 2011, 2014, 2032, 2045, 2051, 2057, 2066, 2068, 2076, 2077, 2099, 2101, 2103], "difficulti": [7, 28, 2040], "nonlinearli": 7, "sign": [7, 35, 343, 531, 887, 992, 1096, 1144, 1156, 1162, 1306, 1331, 1332, 1353, 1518, 1795, 1846, 1877, 1891, 2014, 2033, 2066, 2073, 2080, 2083, 2086, 2087, 2099, 2106], "split": [7, 11, 23, 28, 64, 619, 774, 775, 776, 862, 864, 865, 962, 969, 1064, 1106, 1236, 1462, 1476, 1477, 1496, 1532, 1542, 1607, 1608, 1609, 1610, 1611, 1612, 1631, 1943, 1977, 2012, 2013, 2014, 2033, 2045, 2051, 2066, 2068, 2070, 2075, 2080, 2084, 2099, 2109, 2113], "shippabl": 7, "complet": [7, 14, 19, 23, 28, 30, 37, 39, 45, 47, 50, 60, 63, 488, 682, 864, 1006, 1010, 1011, 1013, 1084, 1165, 1166, 1275, 1284, 1286, 1291, 1303, 1331, 1385, 1388, 1389, 1394, 1410, 1597, 1716, 1733, 1766, 1826, 1870, 1981, 1982, 2008, 2012, 2015, 2016, 2017, 2022, 2032, 2041, 2042, 2045, 2049, 2053, 2059, 2068, 2075, 2076, 2095, 2103, 2108, 2109], "subtl": [7, 1488, 1489, 1490, 2048, 2101, 2102, 2111], "nuanc": [7, 2062], "extra": [7, 14, 23, 24, 28, 32, 35, 38, 52, 55, 64, 1068, 1108, 1166, 1177, 1186, 1272, 1280, 1283, 1318, 1344, 1526, 1533, 1541, 1715, 1730, 1923, 1976, 2015, 2034, 2042, 2044, 2047, 2048, 2050, 2054, 2063, 2068, 2069, 2080, 2086, 2087, 2089, 2101, 2105, 2115], "understand": [7, 8, 9, 28, 30, 37, 39, 52, 55, 66, 82, 83, 1166, 1195, 1330, 2012, 2040, 2042, 2045, 2046, 2053, 2060, 2063, 2069, 2085, 2092, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2111], "hack": 7, "answer": [7, 10, 64, 826, 1197, 1439, 1749, 2101, 2102], "regress": [7, 1438, 2059, 2096, 2107], "scrutini": 7, "undertak": 7, "rest": [7, 23, 24, 48, 55, 64, 795, 816, 982, 983, 1162, 1338, 1339, 1943, 2034, 2055, 2059, 2068, 2070, 2075, 2080, 2099, 2102, 2103, 2105, 2109], "stai": [7, 32, 141, 1462, 1723, 1724, 2045, 2057, 2070, 2075, 2080], "chanc": [7, 30, 35, 2048, 2101, 2102, 2105], "unrel": [7, 985, 1175, 2041, 2048, 2068], "aid": [7, 64, 2042, 2111], "troubleshoot": [7, 28, 2092, 2102], "mayb": [7, 1173, 1967, 2101], "rebas": 7, "latest": [7, 14, 28, 32, 35, 52, 900, 931, 1743, 2011, 2048, 2053, 2063, 2065], "statu": [7, 9, 37, 1362, 2012, 2017, 2032, 2070, 2093], "hud": 7, "risk": [7, 8, 30, 52, 55, 1733, 1736], "anyth": [7, 36, 40, 55, 63, 825, 828, 1197, 1284, 1797, 2013, 2028, 2035, 2051, 2055, 2068, 2098, 2099, 2114, 2115], "configur": [7, 13, 20, 23, 24, 28, 33, 34, 37, 39, 44, 47, 48, 50, 55, 790, 791, 794, 795, 799, 816, 817, 818, 819, 825, 828, 840, 841, 842, 846, 848, 851, 861, 862, 863, 864, 865, 975, 1064, 1716, 1757, 1871, 1964, 2012, 2022, 2029, 2045, 2047, 2048, 2059, 2061, 2068, 2073, 2075, 2085, 2087, 2098, 2102, 2108, 2109, 2111], "riski": 7, "had": [7, 64, 931, 1187, 1209, 1288, 1706, 1945, 2042, 2049, 2098, 2099, 2107], "beforehand": [7, 63, 2102], "hei": 7, "my": [7, 23, 1462, 2054, 2065, 2070], "member": [7, 9, 23, 28, 37, 47, 48, 64, 1272, 1479, 1526, 1629, 2013, 2015, 2016, 2029, 2050, 2069, 2075, 2087, 2089], "sphinx": 7, "folder": [7, 9, 14, 23, 30, 45, 64, 1778, 2011, 2063, 2085, 2099, 2104, 2111], "tree": [7, 40, 59, 975, 1526, 1570, 1778, 2031, 2065, 2068, 2069, 2077, 2092], "master": [7, 28, 51, 794, 1166, 1570, 2011, 2075], "doxygen": 7, "special": [7, 11, 33, 40, 50, 53, 60, 64, 66, 71, 74, 75, 743, 744, 745, 825, 828, 922, 975, 1101, 1115, 1116, 1117, 1119, 1120, 1129, 1131, 1180, 1186, 1240, 1241, 1242, 1286, 1344, 1358, 1415, 1462, 1574, 1717, 1718, 1719, 1770, 1797, 1821, 1878, 1889, 1893, 1980, 2012, 2017, 2022, 2034, 2045, 2048, 2051, 2052, 2054, 2068, 2070, 2082, 2084, 2085, 2093, 2098, 2100, 2101, 2107], "server": [7, 23, 28, 48, 1276, 2045, 2068, 2070, 2075, 2093, 2095], "cppdoc": [7, 15], "cpp": [7, 14, 28, 2047, 2093, 2098], "accomplish": [7, 30, 2055, 2102], "holist": 7, "concept": [7, 52, 53, 60, 64, 2020, 2048, 2049, 2055, 2083, 2103], "galleri": 7, "restructur": [7, 2068], "text": [7, 23, 35, 53, 155, 156, 175, 610, 619, 683, 685, 686, 687, 688, 689, 690, 691, 692, 693, 696, 700, 747, 748, 758, 760, 762, 766, 770, 771, 772, 774, 775, 776, 782, 786, 822, 823, 868, 884, 885, 886, 887, 888, 943, 944, 945, 948, 951, 953, 955, 964, 967, 968, 970, 991, 992, 994, 995, 997, 1103, 1122, 1123, 1152, 1153, 1157, 1158, 1215, 1229, 1230, 1231, 1232, 1239, 1261, 1269, 1293, 1296, 1297, 1298, 1301, 1302, 1309, 1312, 1318, 1322, 1327, 1330, 1336, 1343, 1352, 1359, 1360, 1361, 1362, 1411, 1412, 1423, 1424, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1464, 1465, 1466, 1467, 1468, 1470, 1471, 1472, 1473, 1474, 1475, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1511, 1512, 1513, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1529, 1530, 1531, 1532, 1533, 1534, 1538, 1539, 1540, 1541, 1542, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1556, 1557, 1558, 1559, 1560, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1599, 1600, 1601, 1603, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1618, 1619, 1620, 1625, 1628, 1630, 1631, 1632, 1636, 1637, 1647, 1652, 1657, 1658, 1659, 1663, 1668, 1671, 1676, 1677, 1679, 1684, 1685, 1686, 1687, 1690, 1691, 1692, 1694, 1695, 1696, 1715, 1722, 1730, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1802, 1816, 1819, 1820, 1823, 1826, 1839, 1842, 1845, 1857, 1877, 1879, 1880, 1892, 1894, 1905, 1906, 1916, 1923, 1924, 1927, 1928, 1940, 1941, 1979, 2035, 2040, 2052, 2065, 2068, 2073, 2081, 2085, 2087, 2099, 2104], "rst": 7, "trigger": [7, 9, 28, 47, 48, 63, 86, 87, 89, 223, 931, 1082, 1874, 2041, 2042, 2045, 2047, 2048, 2054, 2060, 2075, 2096, 2099, 2100, 2102, 2107, 2108, 2111, 2115], "rebuild": [7, 24], "entir": [7, 14, 23, 28, 30, 33, 34, 37, 48, 64, 515, 699, 922, 923, 975, 1166, 1226, 1464, 1465, 1466, 1470, 1488, 1489, 1490, 1498, 1618, 1619, 1620, 1625, 1716, 1732, 1742, 1744, 1768, 2016, 2035, 2042, 2045, 2048, 2049, 2050, 2054, 2055, 2065, 2068, 2070, 2075, 2077, 2080, 2097, 2098, 2100, 2101, 2102, 2109, 2111, 2113], "circleci": 7, "shard": [7, 23, 30, 32, 33, 34, 55, 1716, 2051, 2102], "worker": [7, 14, 23, 24, 28, 29, 32, 33, 37, 39, 40, 41, 47, 50, 51, 55, 1716, 2016, 2059, 2069, 2075, 2076, 2077], "40": [7, 1226, 1345, 1443, 1488, 1730, 1731, 1732, 1755, 1756, 1765, 1768, 1939, 2100], "minut": [7, 10, 28, 2085], "netlifi": 7, "noplot": 7, "render": [7, 28, 1191, 2085, 2113], "notebook": 7, "rebuilt": [7, 24, 32], "deploi": [7, 12, 37, 47, 2012, 2054, 2060, 2068, 2095, 2099], "action": [7, 28, 35, 37, 41, 64, 1008, 1042, 2045, 2063, 2068, 2069, 2077, 2113], "document": [8, 9, 10, 17, 23, 28, 53, 55, 56, 64, 682, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 876, 877, 878, 879, 960, 1010, 1011, 1013, 1064, 1108, 1216, 1220, 1222, 1253, 1260, 1272, 1388, 1389, 1419, 1430, 1468, 1469, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1526, 1711, 1712, 1733, 1742, 1751, 1771, 1825, 1852, 1869, 1964, 2011, 2013, 2015, 2016, 2025, 2030, 2032, 2033, 2034, 2036, 2041, 2045, 2048, 2050, 2055, 2059, 2060, 2065, 2067, 2068, 2070, 2071, 2072, 2075, 2080, 2084, 2089, 2092, 2097, 2103, 2107, 2110, 2114], "develop": [8, 9, 10, 14, 24, 28, 33, 64, 2015, 2016, 2023, 2030, 2042, 2048, 2054, 2055, 2059, 2065, 2068, 2070, 2071, 2075, 2080, 2099, 2104, 2106, 2111, 2114], "meant": [8, 29, 30, 47, 50, 55, 1757, 2041, 2046, 2075], "rule": [8, 9, 14, 28, 35, 64, 66, 74, 75, 87, 88, 89, 152, 863, 896, 959, 960, 1091, 1322, 1333, 1440, 1441, 1442, 1488, 1489, 1490, 1566, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1862, 1949, 2013, 2015, 2033, 2035, 2042, 2043, 2048, 2051, 2052, 2083, 2087, 2101, 2102], "concern": [8, 23, 55, 2032, 2045, 2065, 2109], "disagr": 8, "contribut": [8, 9, 30, 935, 1234, 1235, 1461, 1468, 1469, 1533, 1615, 1623, 1624, 1668, 1716, 2012, 2035, 2048, 2049, 2065], "maintainership": [8, 9], "escal": [8, 9], "hacker": 8, "poster": 8, "amaz": 8, "ml": [8, 2069], "obsess": 8, "soumith": [8, 10], "goe": [8, 64, 1162, 1434, 1783, 1784, 1796, 2020, 2050, 2063, 2099, 2102, 2103], "depth": [8, 9, 20, 25, 33, 52, 83, 781, 787, 1069, 1070, 1286, 1437, 1455, 1458, 1495, 1521, 1579, 1643, 1703, 2023, 2047, 2055, 2069, 2100, 2113], "primari": [8, 9, 28, 64, 83, 1282, 2017, 2023, 2024, 2080, 2101], "goal": [8, 44, 59, 64, 1374, 2023, 2042, 2047, 2052, 2077, 2104], "secondari": 8, "abil": [8, 12, 52, 1858, 2023, 2054, 2063, 2068, 2099], "flexibl": [8, 24, 33, 52, 55, 59, 1192, 1329, 2023, 2045, 2048, 2055, 2070, 2099], "abstract": [8, 16, 23, 24, 28, 29, 30, 35, 37, 45, 47, 50, 924, 925, 926, 927, 928, 931, 1193, 1738, 2016, 2020, 2024, 2047, 2070, 2075, 2096], "critic": [8, 28, 47, 682, 1731, 1765, 2029, 2044, 2045, 2102, 2103], "futur": [8, 9, 12, 24, 28, 30, 37, 47, 48, 52, 60, 64, 292, 323, 488, 515, 519, 559, 682, 689, 795, 817, 818, 819, 828, 864, 903, 906, 908, 909, 965, 966, 975, 989, 990, 991, 1007, 1008, 1010, 1011, 1013, 1042, 1043, 1050, 1051, 1053, 1217, 1269, 1272, 1275, 1282, 1283, 1286, 1291, 1303, 1314, 1316, 1317, 1318, 1321, 1334, 1362, 1363, 1374, 1385, 1465, 1526, 1634, 1657, 1658, 1659, 1707, 1716, 1721, 1765, 1766, 1771, 1786, 1826, 1842, 1923, 1927, 1951, 1981, 1982, 2012, 2013, 2014, 2015, 2016, 2017, 2022, 2023, 2026, 2029, 2034, 2035, 2044, 2045, 2048, 2060, 2063, 2065, 2067, 2068, 2069, 2070, 2072, 2075, 2080, 2082, 2086, 2087, 2089, 2091, 2098, 2102, 2106, 2113], "concret": [8, 11, 30, 35, 51, 53, 59, 64, 488, 787, 802, 826, 844, 975, 1185, 1187, 1204, 1205, 1579, 1671, 1703, 2013, 2016, 2020, 2045, 2048, 2057, 2098, 2099, 2101], "manner": [8, 28, 33, 37, 90, 515, 911, 913, 2034, 2036, 2043, 2067], "jump": [8, 562, 2083], "regim": 8, "ei": 8, "tradeoff": [8, 24, 52, 488, 2070, 2076, 2102, 2107], "temptat": 8, "impos": [8, 50, 61, 1866, 2032, 2041, 2083], "strict": [8, 30, 53, 911, 912, 913, 914, 915, 916, 1165, 1172, 1191, 1272, 1288, 1289, 1526, 1766, 2068, 2085, 2087], "upfront": [8, 2098], "simplifi": [8, 24, 63, 1180, 1187, 1211, 1280, 1516, 1731, 1797, 2023, 2042, 2048, 2052, 2055, 2067, 2070, 2076, 2098], "worth": [8, 9, 23, 24, 33, 51, 53, 1166, 2011, 2082, 2084, 2105], "friction": 8, "compel": 8, "narrow": [8, 1197, 1342, 1422, 1797, 1884, 2014, 2016, 2033, 2066, 2072, 2084, 2111], "subproblem": 8, "fragment": [8, 1032, 1064, 1989, 2020, 2045, 2111], "ecosystem": [8, 2054, 2056, 2099], "incomprehens": 8, "seamlessli": [8, 2035], "softwar": [8, 19, 1308, 1309, 1336, 1964, 2045, 2053, 2092, 2099], "experi": [8, 9, 11, 24, 52, 61, 1177, 1556, 1687, 1716, 1976, 2023, 2048, 2085, 2100], "rich": [8, 33, 2016], "denomin": [8, 689, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1541, 1566, 1715, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1830], "subset": [8, 23, 28, 32, 48, 53, 1284, 1797, 2013, 2015, 2016, 2048, 2062, 2065, 2073, 2106], "borrow": 8, "zen": 8, "implicit": [8, 52, 53, 88, 770, 771, 774, 775, 776, 1216, 1226, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1519, 1520, 1521, 1578, 1599, 1600, 1601, 1607, 1608, 1609, 1657, 1658, 1659, 1814, 2014, 2016, 2017, 2051, 2065, 2068, 2084, 2106], "concis": [8, 40, 2075], "interchang": [8, 35, 1794, 1948, 2015, 2053, 2063, 2092, 2103], "everydai": 8, "english": 8, "movement": [8, 2084, 2102], "worri": [8, 28, 2075], "placement": [8, 34, 37, 862, 1706, 2070, 2075, 2102], "favor": [8, 28, 787, 788, 789, 966, 1077, 1081, 1272, 1327, 1330, 1362, 1363, 1408, 1484, 1526, 1580, 1581, 1703, 1704, 1705, 1707, 1720, 1826, 1927, 1951, 2006], "practition": 8, "debugg": [8, 28, 1277, 2052, 2102, 2111], "plug": 8, "ir": [8, 52, 64, 81, 83, 762, 1275, 1276, 1477, 1478, 1778, 2013, 2016, 2063, 2065, 2092, 2098, 2099, 2101, 2102, 2111], "classic": [8, 2042], "sort": [8, 32, 64, 592, 611, 879, 880, 903, 905, 908, 1108, 1197, 1233, 1395, 1430, 1758, 1759, 1770, 1827, 1862, 1946, 1960, 2014, 2016, 2048, 2050, 2066, 2080, 2098, 2102, 2106], "distribut": [8, 11, 23, 24, 29, 37, 38, 39, 40, 41, 44, 45, 46, 47, 49, 50, 51, 55, 63, 175, 260, 288, 379, 456, 483, 610, 682, 697, 698, 821, 938, 945, 1082, 1412, 1430, 1434, 1439, 1461, 1462, 1463, 1464, 1465, 1466, 1470, 1475, 1479, 1491, 1540, 1545, 1566, 1615, 1617, 1618, 1619, 1620, 1625, 1629, 1630, 1634, 1644, 1676, 1716, 1772, 1819, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 2012, 2016, 2034, 2035, 2040, 2041, 2045, 2069, 2071, 2077, 2081, 2085, 2089], "tldr": 8, "resourc": [8, 14, 23, 28, 37, 43, 47, 52, 64, 2016, 2032, 2057, 2080, 2102, 2108, 2114, 2115], "characterist": [8, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1928, 2049, 2055], "uniformli": [8, 35, 1837, 1838, 2087], "leak": [8, 896, 903, 906, 908, 2016, 2032, 2042], "smart": [8, 2048, 2068, 2075], "anywai": [8, 2042, 2051], "obviou": [8, 1180, 2050, 2077, 2101], "extens": [8, 14, 18, 28, 30, 35, 52, 65, 1344, 1345, 1858, 1877, 2012, 2023, 2035, 2041, 2046, 2048, 2060, 2063, 2068, 2080, 2101, 2102, 2111], "unavoid": 8, "latenc": [8, 30, 44, 2024, 2044, 2045, 2102, 2108], "caveat": [8, 1706, 1764, 2023, 2032, 2045, 2055, 2060, 2097, 2107, 2111], "valuabl": 8, "certainli": [8, 2023], "heterogen": [8, 2015], "cluster": [8, 28, 30, 33, 46, 47, 48, 55, 1430, 2085, 2115], "focus": [8, 2015, 2016, 2048, 2111], "beaten": 8, "space": [8, 9, 23, 35, 774, 775, 776, 1091, 1126, 1128, 1130, 1131, 1137, 1144, 1226, 1235, 1288, 1289, 1343, 1359, 1430, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1491, 1502, 1503, 1504, 1505, 1506, 1507, 1520, 1521, 1578, 1607, 1608, 1609, 1610, 1611, 1612, 1644, 1949, 2014, 2024, 2036, 2042, 2052, 2055, 2060, 2082, 2113], "innov": 8, "growth": 8, "ultim": [8, 9, 14, 40, 50, 52, 2092], "evidenc": 8, "began": 8, "bind": [8, 14, 28, 53, 64, 1044, 1185, 1187, 1196, 1209, 1210, 2016, 2017, 2020, 2046, 2048, 2068, 2099], "monolith": 8, "deepli": 8, "integr": [8, 23, 41, 53, 59, 156, 945, 947, 948, 949, 950, 951, 952, 997, 1091, 1234, 1272, 1296, 1340, 1341, 1421, 1526, 1833, 1845, 1949, 2035, 2048, 2054, 2055, 2064, 2067, 2070, 2081, 2083, 2087, 2097], "numpi": [8, 23, 60, 450, 495, 695, 696, 701, 880, 882, 883, 903, 904, 908, 959, 1103, 1105, 1106, 1108, 1126, 1147, 1148, 1149, 1150, 1154, 1155, 1160, 1161, 1177, 1236, 1303, 1304, 1314, 1325, 1327, 1328, 1329, 1330, 1331, 1336, 1337, 1340, 1342, 1374, 1379, 1820, 1845, 1848, 1855, 1867, 1874, 1909, 1910, 1911, 1912, 1913, 1914, 1927, 1929, 1930, 1939, 1942, 1943, 1945, 1973, 1976, 1977, 2020, 2023, 2043, 2048, 2049, 2058, 2059, 2060, 2061, 2068, 2083, 2084, 2085, 2086, 2087, 2099, 2116], "scipi": [8, 963, 1319, 1320, 1673, 1820, 1882, 2068, 2078, 2081, 2085], "scikit": [8, 1643], "favorit": 8, "cython": 8, "numba": 8, "reinvent": 8, "wheel": [8, 2061], "year": [8, 2080], "rewrot": 8, "frontend": [8, 15, 33, 53, 59, 64, 977, 2102], "familiar": [8, 15, 53, 64, 1008, 1042, 1063, 2013, 2042, 2049, 2068, 2070, 2076, 2077, 2099, 2100, 2102, 2110], "perhap": [8, 1187, 2049, 2099], "importantli": 8, "huge": [8, 1928, 2029, 2099], "scientif": [8, 1874], "pareto": [8, 2012], "close": [8, 15, 28, 47, 64, 66, 69, 74, 1046, 1183, 1261, 1308, 1309, 1335, 1336, 1362, 1439, 1558, 1576, 1605, 1793, 1927, 1951, 2029, 2042, 2048, 2058, 2065, 2068, 2070, 2075, 2085, 2087], "curv": [8, 2085], "torchdynamo": [8, 52, 66, 77, 682, 975, 983, 2012, 2022, 2065, 2092, 2097, 2098, 2099, 2100, 2102, 2104], "frame": [8, 52, 64, 975, 1269, 1923, 1966, 2050, 2062, 2063, 2085, 2092, 2098, 2100, 2102, 2103, 2111, 2113], "torch_funct": [8, 2048], "torch_dispatch": 8, "torch": [8, 9, 11, 15, 18, 19, 21, 24, 25, 29, 32, 37, 38, 39, 40, 41, 44, 45, 47, 49, 50, 51, 55, 58, 66, 67, 68, 69, 70, 71, 72, 73, 83, 84, 86, 88, 90, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 897, 898, 903, 908, 909, 918, 919, 920, 930, 931, 938, 939, 940, 941, 942, 998, 999, 1008, 1009, 1010, 1011, 1013, 1014, 1029, 1031, 1042, 1112, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1270, 1271, 1272, 1287, 1385, 1397, 1398, 1399, 1403, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1587, 1706, 1715, 1716, 1717, 1718, 1719, 1733, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1757, 1769, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1833, 1902, 1981, 1982, 1983, 1986, 1988, 2012, 2013, 2014, 2015, 2024, 2025, 2033, 2034, 2041, 2042, 2043, 2044, 2046, 2047, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2059, 2061, 2063, 2064, 2070, 2071, 2072, 2075, 2076, 2077, 2084, 2096, 2097, 2098, 2099, 2100, 2101, 2104, 2107, 2110, 2111, 2113], "fx": [8, 12, 52, 53, 84, 682, 749, 817, 818, 819, 820, 843, 863, 977, 1166, 1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 2012, 2020, 2062, 2063, 2071, 2072, 2097, 2098, 2099, 2100, 2101, 2102, 2110, 2111], "tracer": [8, 33, 52, 1288, 2065, 2085, 2091, 2099], "functorch": [8, 53, 56, 61, 66, 69, 74, 75, 78, 2097], "anchor": [8, 64, 1575, 1576, 1700, 1701, 2014, 2110], "hackabl": 8, "todai": [8, 28, 53, 56, 61, 975, 2028, 2070, 2098], "evolv": [8, 2047, 2063, 2064], "ai": [8, 2065, 2069, 2081], "adopt": [9, 28, 722, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 1723, 1724, 2063], "hierarch": [9, 2085], "pull": [9, 10, 15, 64, 152, 896, 1201, 2065, 2068, 2101, 2114], "request": [9, 10, 11, 28, 30, 50, 64, 691, 864, 882, 883, 931, 1064, 1345, 1367, 1377, 1649, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2011, 2028, 2035, 2041, 2042, 2044, 2045, 2065, 2068, 2075, 2076, 2077, 2080, 2081, 2098, 2113, 2114], "overseen": 9, "catch": [9, 40, 2013, 2042, 2049, 2052, 2099, 2103], "maker": 9, "strong": 9, "toward": [9, 66, 1103, 1234, 1235, 1345, 1426, 1716, 1805, 1846, 1854, 1855, 2051], "philosophi": [9, 59, 2012], "beyond": [9, 24, 1392, 1461, 1783, 1784, 1793, 1853, 2050, 2055, 2063, 2098], "encourag": [9, 37, 2035, 2070, 2080, 2087], "propos": [9, 1780, 1800, 1801, 2023, 2052, 2067, 2076], "review": [9, 10, 24, 2068, 2104], "willing": 9, "invest": 9, "anyon": 9, "ownership": [9, 64], "codebas": [9, 2048], "strictli": [9, 23, 152, 193, 210, 488, 896, 959, 1235, 1272, 1276, 1526, 2042, 2081], "compani": 9, "bui": 9, "addition": [9, 23, 24, 28, 30, 35, 37, 52, 55, 152, 515, 619, 896, 931, 1166, 1170, 1171, 1276, 1336, 1430, 1488, 1489, 1490, 1904, 2050, 2089, 2113], "membership": [9, 37, 46, 47, 2017], "That": [9, 12, 17, 19, 37, 45, 48, 57, 64, 989, 1269, 1343, 1359, 1964, 2020, 2026, 2048, 2049, 2050, 2051, 2060, 2068, 2075, 2099, 2102], "seat": 9, "reserv": [9, 44, 1064, 2017, 2045, 2055, 2113], "emploi": [9, 30, 1796, 2055, 2068, 2093], "directori": [9, 14, 28, 30, 33, 37, 45, 83, 1778, 2011, 2027, 2054, 2063, 2068, 2069, 2085, 2093, 2102, 2111], "procedur": [9, 35, 862, 1288, 1289, 1345, 1902, 2075, 2111], "disput": 9, "made": [9, 20, 28, 30, 32, 48, 52, 64, 83, 903, 906, 908, 923, 1272, 1526, 1572, 1574, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1843, 2015, 2023, 2042, 2061, 2068, 2085, 2087, 2096, 2098, 2099, 2111, 2112], "public": [9, 10, 65, 1166, 2048, 2112], "relev": [9, 29, 33, 37, 47, 63, 682, 1336, 1498, 2017, 2041, 2042, 2063, 2068, 2070, 2093, 2102, 2103, 2104, 2114], "resolut": [9, 1538, 1539, 1597, 1632, 1674, 1675, 1949, 2017, 2068, 2116], "conclus": 9, "publicli": [9, 2112], "vision": [9, 1282, 1461, 1615, 2011, 2012, 2104], "roadmap": [9, 10], "parti": [9, 47, 2011, 2012, 2013, 2020, 2045, 2049, 2055, 2068, 2099], "triag": [9, 10], "meet": [9, 10, 12, 30, 46, 989, 1302, 2045, 2102], "Their": [9, 1100, 2048, 2101], "articul": 9, "cohes": 9, "negoti": [9, 2075], "contenti": 9, "broad": [9, 2055, 2114], "stakehold": 9, "power": [9, 12, 46, 1071, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1151, 1296, 1326, 1340, 1493, 1494, 1495, 1653, 1654, 1655, 1731, 1765, 1780, 1809, 1823, 1847, 1970, 2017, 2045, 2080], "veto": 9, "admin": 9, "amongst": 9, "commonli": [9, 35, 55, 1374, 2016, 2018, 2041, 2042, 2051, 2067, 2070, 2083, 2092, 2095, 2098], "merit": 9, "demonstr": [9, 33, 48, 52, 64, 66, 74, 75, 1445, 2013, 2055, 2060, 2063, 2075, 2093, 2100, 2102, 2104, 2109], "expertis": 9, "align": [9, 24, 52, 83, 762, 781, 787, 822, 828, 975, 1108, 1226, 1328, 1437, 1445, 1461, 1477, 1491, 1496, 1520, 1521, 1542, 1579, 1587, 1588, 1589, 1615, 1616, 1643, 1644, 1684, 1703, 1730, 1771, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1800, 1949, 2012, 2033, 2042, 2052, 2060, 2065, 2073, 2081], "continu": [9, 23, 28, 30, 35, 51, 55, 64, 404, 610, 821, 912, 1226, 1308, 1309, 1336, 1444, 1445, 1716, 1810, 2017, 2023, 2042, 2051, 2060, 2063, 2069, 2089, 2097, 2099, 2100, 2102], "light": [9, 2085], "mainten": [9, 47, 48], "emeritu": [9, 10], "inact": [9, 1064, 2045, 2113], "contact": [9, 28], "item": [9, 23, 30, 66, 76, 77, 585, 586, 587, 591, 794, 795, 796, 816, 817, 818, 819, 942, 1461, 1527, 1536, 1736, 1874, 2011, 2013, 2014, 2015, 2016, 2024, 2033, 2045, 2065, 2066, 2068, 2075, 2085, 2086, 2096, 2098, 2099, 2101, 2106], "nomine": 9, "breadth": [9, 33], "testimoni": 9, "posit": [9, 23, 30, 33, 35, 52, 64, 90, 404, 473, 547, 737, 860, 864, 866, 944, 946, 953, 966, 967, 968, 1124, 1125, 1126, 1127, 1128, 1136, 1138, 1142, 1144, 1162, 1172, 1173, 1176, 1185, 1187, 1230, 1231, 1264, 1267, 1272, 1275, 1302, 1303, 1314, 1316, 1327, 1330, 1331, 1345, 1365, 1380, 1416, 1421, 1422, 1439, 1453, 1454, 1455, 1460, 1461, 1462, 1469, 1479, 1486, 1526, 1532, 1563, 1570, 1575, 1576, 1597, 1605, 1624, 1629, 1632, 1700, 1701, 1709, 1710, 1853, 1855, 1882, 1905, 1907, 1952, 1953, 1954, 1955, 1963, 2011, 2014, 2028, 2033, 2034, 2048, 2049, 2052, 2063, 2065, 2066, 2074, 2080, 2081, 2087, 2102, 2112, 2116], "neg": [9, 11, 14, 20, 23, 28, 35, 50, 64, 90, 443, 445, 460, 663, 664, 700, 757, 782, 946, 951, 992, 995, 1013, 1029, 1035, 1036, 1064, 1077, 1124, 1125, 1126, 1127, 1128, 1129, 1143, 1144, 1145, 1151, 1162, 1191, 1197, 1262, 1264, 1266, 1326, 1353, 1365, 1388, 1389, 1403, 1412, 1416, 1421, 1422, 1430, 1439, 1447, 1448, 1449, 1470, 1479, 1512, 1519, 1520, 1521, 1529, 1533, 1535, 1540, 1558, 1575, 1576, 1625, 1629, 1632, 1634, 1643, 1647, 1657, 1658, 1659, 1668, 1676, 1700, 1701, 1703, 1819, 1820, 1851, 1855, 1889, 1891, 1894, 1906, 1944, 1952, 1953, 1954, 1955, 1962, 1963, 1982, 1986, 1990, 1991, 2003, 2013, 2014, 2033, 2035, 2040, 2042, 2048, 2049, 2065, 2066, 2074, 2080, 2081, 2098, 2102, 2106], "interact": [9, 15, 23, 28, 64, 65, 865, 918, 1011, 1044, 2017, 2020, 2048, 2068, 2070, 2085, 2096, 2099, 2102, 2105, 2113], "final": [9, 19, 28, 30, 32, 35, 37, 47, 53, 55, 59, 60, 688, 691, 692, 762, 943, 961, 974, 1108, 1147, 1166, 1200, 1226, 1344, 1362, 1367, 1446, 1477, 1479, 1496, 1542, 1555, 1706, 1905, 1949, 2013, 2015, 2016, 2017, 2033, 2048, 2051, 2052, 2055, 2058, 2060, 2062, 2063, 2065, 2067, 2068, 2076, 2077, 2096, 2098, 2100, 2111], "declin": 9, "conflict": [9, 24, 30, 48, 52, 2068], "lack": [9, 11, 33, 942, 1308, 1309, 1336], "unfit": 9, "conduct": [9, 1716, 1816, 1928, 2075, 2093], "filial": 9, "romant": 9, "strength": 9, "candid": [9, 935, 2068], "letter": [9, 1108], "befit": 9, "candidaci": 9, "behind": [9, 28, 2012, 2060, 2076, 2101], "75": [9, 1234, 1514, 1632, 1650, 1780, 1827, 2014, 2081], "unforeseen": 9, "circumst": [9, 47, 975, 1197, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 2045], "perman": [9, 64, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 2042, 2077], "unavail": [9, 2015], "rank": [9, 23, 24, 28, 29, 30, 32, 33, 35, 37, 45, 47, 48, 51, 53, 55, 66, 75, 1318, 1319, 1320, 1327, 1362, 1518, 1566, 1716, 1736, 1816, 1928, 2014, 2047, 2051, 2057, 2065, 2075, 2076, 2098, 2111, 2115], "elect": 9, "invit": [9, 2011], "convinc": 9, "approach": [9, 24, 28, 64, 89, 963, 1177, 1439, 1870, 1902, 1976, 2013, 2032, 2035, 2045, 2046, 2048, 2052, 2062, 2063, 2070, 2075, 2102], "interview": 9, "talk": [9, 50, 2020, 2054], "gather": [9, 28, 30, 33, 47, 55, 515, 1939, 1964, 2014, 2048, 2050, 2051, 2054, 2066, 2068, 2106], "read": [9, 12, 18, 19, 23, 28, 30, 36, 37, 40, 47, 48, 53, 55, 59, 64, 450, 929, 989, 1066, 1091, 1159, 1161, 1162, 1189, 1280, 1344, 1810, 2012, 2020, 2033, 2034, 2042, 2043, 2045, 2046, 2047, 2049, 2054, 2058, 2062, 2068, 2070, 2075, 2096, 2099, 2100, 2102, 2104], "attend": [9, 737, 1532, 1570], "confer": [9, 1479], "pipelin": [9, 30, 64, 2012, 2075, 2102], "world": [9, 28, 32, 37, 45, 47, 48, 55, 1566, 1716, 2042, 2068, 2070, 2098, 2099, 2100], "cover": [9, 53, 56, 64, 66, 1082, 1108, 1519, 1657, 1658, 1659, 2016, 2018, 2034, 2042, 2048, 2051, 2052, 2054, 2055, 2075, 2100, 2105, 2109, 2114], "push": [9, 39, 1068, 1070, 1177, 1338, 1976, 2069, 2099], "codeown": 9, "notifi": [9, 29, 48, 2077], "expert": 9, "strongli": [9, 28, 37, 48, 1464, 1465, 1466, 1470, 1923, 2011], "failur": [9, 28, 35, 37, 39, 40, 44, 45, 46, 47, 51, 52, 682, 922, 923, 975, 977, 1166, 1187, 1288, 1289, 1392, 2016, 2022, 2032, 2051, 2063, 2075, 2077, 2087, 2099, 2102, 2111], "revert": [9, 35, 55, 1563, 1692, 2077], "substanti": [9, 24, 2045, 2096], "syntact": [9, 40, 64], "incompat": [9, 14, 60, 956, 1269, 1723, 1724, 2043, 2068], "establish": [9, 19, 28, 47, 1810, 2042], "seri": [9, 33, 64, 1445, 1488, 2059, 2065, 2096, 2107, 2112], "lf": 9, "llc": 9, "guidelin": [9, 1747, 2057, 2062, 2068, 2070, 2071], "trademark": 9, "www": [9, 1445, 1576, 2085], "lfproject": 9, "acknowledg": [9, 28, 2012, 2077, 2080], "copyright": [9, 2111], "holder": 9, "independ": [9, 23, 28, 47, 52, 55, 63, 155, 156, 781, 787, 911, 912, 913, 914, 915, 916, 1013, 1226, 1235, 1331, 1463, 1464, 1465, 1466, 1470, 1618, 1619, 1620, 1625, 1642, 1643, 1703, 1732, 1768, 1826, 1982, 2012, 2013, 2041, 2042, 2045, 2060, 2068, 2109], "authorship": 9, "claus": [9, 2050], "bsd": 9, "licens": 9, "opensourc": 9, "outbound": 9, "inbound": 9, "q": [9, 24, 35, 433, 481, 760, 801, 1216, 1293, 1309, 1312, 1331, 1373, 1419, 1491, 1532, 1586, 1730, 1814, 1816, 1826, 1827, 1928, 2013, 2014, 2052, 2059, 2065, 2080, 2081, 2101], "partli": [9, 2016], "domain": [9, 35, 40, 686, 828, 888, 1122, 1123, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1136, 1138, 1139, 1140, 1141, 1226, 2035, 2065, 2070], "absolut": [9, 11, 14, 64, 94, 683, 696, 922, 923, 1156, 1261, 1301, 1306, 1327, 1330, 1332, 1353, 1486, 1492, 1558, 1641, 1645, 1688, 1741, 1742, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1771, 1779, 1820, 1846, 1877, 2014, 2024, 2055, 2065, 2066, 2081, 2086, 2087, 2093], "health": 9, "success": [9, 30, 35, 37, 44, 64, 288, 1303, 1314, 1316, 1362, 1743, 1909, 1910, 1911, 1913, 1914, 2045, 2055, 2065, 2080, 2111], "am": 9, "grant": 9, "purchas": 9, "board": 9, "driven": [9, 2065], "clearli": [9, 1187, 2023, 2068], "sponsorship": 9, "foundat": [9, 2063], "ptf": 9, "minor": [9, 1035, 1303, 1319, 2080, 2099], "committ": 9, "prior": [9, 19, 24, 28, 30, 37, 81, 83, 903, 907, 908, 909, 980, 1345, 1572, 1574, 1684, 1747, 2043, 2045, 2048, 2067, 2070, 2096, 2102], "walkthrough": [9, 2041], "facebook": 9, "infrastructur": [9, 39, 2068, 2098], "employe": 9, "expand": [9, 35, 257, 495, 881, 903, 907, 908, 909, 922, 923, 958, 1325, 1326, 1374, 1532, 1577, 1677, 1959, 2011, 2014, 2022, 2033, 2034, 2043, 2045, 2048, 2049, 2063, 2065, 2066, 2068, 2084, 2106], "deliv": [9, 2069], "offici": [9, 28, 89, 1430, 2070], "showcas": [9, 12, 34, 1522, 2045, 2057], "whenev": [9, 52, 883, 1730, 1731, 2029, 2032, 2048, 2073, 2076, 2077, 2101, 2111, 2112], "fix": [10, 18, 23, 28, 35, 37, 47, 52, 57, 60, 64, 84, 85, 264, 797, 803, 931, 1166, 1286, 1318, 1468, 1469, 1545, 1623, 1624, 1643, 1716, 2013, 2014, 2029, 2040, 2041, 2045, 2050, 2051, 2057, 2061, 2063, 2065, 2066, 2067, 2096, 2099, 2102, 2109, 2111], "plu": [10, 14, 30, 911, 1162, 1319, 2051, 2080], "quarterli": 10, "chintala": 10, "edward": 10, "yang": [10, 1345], "ezyang": [10, 2020, 2084], "greg": 10, "chanan": 10, "gchanan": 10, "dmytro": 10, "dzhulgakov": 10, "nikita": 10, "shulga": 10, "malfet": 10, "joel": [10, 1816, 1928], "schlosser": 10, "jbschlosser": 10, "alban": 10, "desmaison": 10, "alband": 10, "sam": 10, "gross": 10, "colesburi": 10, "adam": [10, 29, 30, 32, 35, 55, 931, 1784, 1785, 1787, 1793, 1797, 2067], "paszk": 10, "apaszk": 10, "ilqar": 10, "ramazanli": 10, "iramazanli": 10, "vincent": 10, "quennevil": 10, "belair": 10, "vincentqb": 10, "jeffrei": 10, "wan": 10, "soulitz": 10, "elia": 10, "ellison": 10, "eellison": 10, "michael": [10, 2111], "suo": 10, "yanan": 10, "cao": 10, "gmagogsfm": 10, "jame": 10, "reed": 10, "jamesr66a": 10, "jason": [10, 2100], "ansel": [10, 2100], "jansel": 10, "jiong": 10, "gong": 10, "jgong5": 10, "zach": 10, "devito": 10, "zdevito": 10, "fritz": 10, "obermey": 10, "fritzo": 10, "neeraj": 10, "pradhan": 10, "neerajprad": 10, "alican": 10, "bozkurt": 10, "alicanb": 10, "vishwak": 10, "srinivasan": 10, "vishwakftw": 10, "shen": 10, "li": [10, 12, 1419, 1827, 2045, 2099, 2102], "mrshenli": 10, "pritam": 10, "damania": 10, "pritamdamania87": 10, "yanli": 10, "zhao": 10, "zhaojuanmao": 10, "rohan": 10, "varma": 10, "wanchao": 10, "liang": 10, "wanchaol": 10, "junji": 10, "wang": [10, 35], "fduwjj": 10, "howard": 10, "huang": 10, "tristan": 10, "rice": 10, "d4l3k": 10, "alisson": 10, "azzolini": 10, "aazzolini": 10, "ke": 10, "wen": 10, "kwen2501": 10, "kiuk": 10, "chung": 10, "kiukchung": 10, "pieter": 10, "noordhui": 10, "pietern": 10, "mingzh": 10, "mingzhe09088": 10, "omkar": 10, "salpekar": 10, "osalpekar": 10, "simon": 10, "ssnl": 10, "vitali": 10, "fedyunin": 10, "vitalyfedyunin": 10, "mike": 10, "ruberri": 10, "mruberri": 10, "mario": 10, "lezcano": 10, "ivan": 10, "yashchuk": 10, "ivanyashchuk": 10, "pearu": 10, "peterson": 10, "vedeneev": 10, "nikitav": 10, "christian": 10, "puhrsch": 10, "cpuhrsch": 10, "andrew": [10, 1345], "amjam": 10, "driss": 10, "guessou": 10, "drisspg": 10, "mikayla": 10, "gawarecki": 10, "mikaylagawarecki": 10, "natalia": 10, "gimelshein": 10, "ngimel": 10, "georg": 10, "qi": 10, "peter": 10, "bell": 10, "peterbell10": 10, "mingfei": 10, "ma": 10, "mingfeima": 10, "xiaob": 10, "zhang": 10, "xiaobingsup": 10, "xiaoqiang": 10, "zheng": 10, "xq": 10, "ilia": 10, "cherniavskii": 10, "cher": 10, "bai": 10, "bddppq": 10, "yinghai": 10, "jianhui": 10, "piotr": 10, "bialecki": 10, "ptrblck": 10, "sarofeen": 10, "csarofeen": 10, "tulloch": 10, "ajtulloch": 10, "alex": 10, "jann": 10, "jjsjann123": 10, "peng": 10, "sun": 10, "sunway513": 10, "jithun": 10, "nair": 10, "jithunnair": 10, "jeff": 10, "daili": 10, "jeffdaili": 10, "eli": 10, "uriega": 10, "seemether": 10, "mikei": 10, "dagits": 10, "zain": 10, "rizvi": 10, "zainrizvi": 10, "nirav": 10, "mehta": 10, "mehtanirav": 10, "andrei": 10, "talman": 10, "atalman": 10, "zhuoji": 10, "zhou": 10, "zhouzhuoji": 10, "karl": 10, "ostmo": 10, "kostmo": 10, "adnan": 10, "aziz": 10, "adnanaziz": 10, "ck": 10, "luk": 10, "ckluk": 10, "taylor": [10, 1226], "robi": 10, "robieta": 10, "xu": [10, 55], "xuzhao9": 10, "geeta": 10, "chauhan": 10, "chauhang": 10, "victor": 10, "bittorf": 10, "bitfort": 10, "gisl": 10, "dankel": 10, "gdankel": 10, "Will": [10, 28, 33, 55, 64, 977, 1185, 1912, 2015, 2034, 2065, 2096, 2107], "feng": 10, "yf225": 10, "brian": 10, "hirsh": 10, "bdhirsh": [10, 1967], "sebastian": 10, "messmer": 10, "smessmer": 10, "aaron": 10, "bockov": 10, "abock": 10, "bowen": 10, "bao": [10, 2108], "bowenbao": 10, "thiago": 10, "crepaldi": 10, "thiagocrepaldi": 10, "gari": 10, "miguel": 10, "garymm": 10, "lara": 10, "haidar": 10, "hdr": 10, "fang": 10, "houseroad": 10, "negin": 10, "raoof": 10, "neginraoof": 10, "spandan": 10, "tiwari": 10, "spandantiwari": 10, "david": [10, 1430], "reiss": 10, "dreiss": 10, "raziel": 10, "guevara": 10, "linbin": 10, "yu": 10, "linbinyu": 10, "kobzarev": 10, "ivankobzarev": 10, "tao": 10, "xta0": 10, "vasilii": 10, "kuznetsov": 10, "vkuzo": 10, "jerri": 10, "jerryzh168": [10, 795], "supriya": 10, "rao": 10, "supriyar": 10, "zafar": 10, "takhirov": 10, "raghuraman": 10, "krishnamoorthi": 10, "raghuramank100": 10, "guoliang": 10, "hua": 10, "nbcsm": 10, "teng": 10, "gao": 10, "gaoteng": 10, "git": [10, 2102, 2111], "johnson": 10, "peterjc123": [10, 2061], "kulin": 10, "seth": 10, "kulinseth": 10, "ramin": 10, "azarmehr": 10, "razarmehr": 10, "alfredo": 10, "mendoza": 10, "avmgithub": 10, "sunita": 10, "nadamp": 10, "snadamp": 10, "svetlana": 10, "karslioglu": 10, "svekar": 10, "jack": 10, "jackcaog": 10, "daniel": [10, 35], "sohn": 10, "jysohn23": 10, "cain": 10, "zcain117": 10, "gregori": 10, "ail": 10, "ailzhang": 10, "libenzi": 10, "dlibenzi": 10, "suhan": 10, "asuhan": 10, "manoj": 10, "mycpuorg": 10, "vamshi": 10, "dantu": 10, "vdantu": 10, "dhanasekar": 10, "karuppasami": 10, "dhanainm": 10, "francisco": 10, "massa": 10, "fmassa": 10, "vasili": 10, "vrynioti": 10, "datumbox": 10, "nicola": 10, "hug": [10, 2012], "nicolashug": 10, "yosua": 10, "maranatha": 10, "yosuamichael": 10, "joao": 10, "gome": 10, "jdsgome": 10, "philip": 10, "meier": 10, "pmeier": 10, "fomin": 10, "vfdev": 10, "nayef": 10, "ahm": 10, "nayef211": 10, "parmeet": 10, "singh": 10, "bhatia": 10, "guanheng": 10, "zhangguanheng66": 10, "moto": 10, "hira": 10, "mthrok": 10, "hwang": 10, "hwangjeff": 10, "carolin": 10, "chen": 10, "carolineechen": 10, "xiaohui": 10, "zhaoheng": 10, "ni": 10, "nateanl": 10, "qb": 10, "ivchenko": 10, "divchenko": 10, "colin": 10, "colin2328": 10, "wenlei": 10, "xie": 10, "wenleix": 10, "express": [11, 12, 30, 53, 56, 60, 61, 64, 66, 74, 75, 76, 77, 321, 989, 1180, 1183, 1187, 1194, 1198, 1199, 1200, 1201, 1211, 1270, 1273, 1768, 1923, 2017, 2026, 2035, 2042, 2046, 2048, 2070, 2098, 2099, 2102, 2111, 2112], "bj": 11, "j": [11, 28, 35, 315, 317, 323, 515, 517, 519, 694, 901, 911, 913, 986, 1108, 1124, 1125, 1132, 1138, 1142, 1143, 1213, 1269, 1272, 1308, 1309, 1320, 1336, 1345, 1352, 1360, 1362, 1367, 1464, 1465, 1466, 1470, 1526, 1529, 1618, 1619, 1620, 1625, 1820, 1827, 1906, 1923, 1927, 1973, 2015, 2040, 2042, 2048, 2052, 2060, 2085], "imaginari": [11, 313, 922, 986, 993, 1129, 1131, 1139, 1140, 1141, 1243, 1262, 1264, 1265, 1268, 1839, 1923, 1974, 1975, 2017, 2042, 2052, 2087], "satisfi": [11, 21, 24, 35, 53, 64, 619, 696, 762, 796, 912, 922, 923, 959, 1124, 1125, 1127, 1129, 1131, 1139, 1140, 1141, 1156, 1186, 1286, 1312, 1338, 1339, 1430, 1445, 1472, 1477, 1496, 1542, 1747, 1764, 1846, 1862, 1902, 1923, 2016, 2020, 2042, 2045, 2051, 2052, 2067, 2069, 2080, 2083, 2100], "equat": [11, 968, 1108, 1180, 1191, 1216, 1230, 1316, 1317, 1318, 1319, 1320, 1322, 1333, 1335, 1438, 1887, 1951, 2014, 2042, 2052, 2067, 2081], "frequent": [11, 30, 1430, 2012, 2045, 2057, 2058, 2060, 2080, 2092, 2093, 2104], "occur": [11, 23, 28, 40, 41, 47, 55, 60, 84, 86, 198, 317, 898, 931, 975, 1067, 1108, 1187, 1269, 1275, 1286, 1316, 1445, 1566, 1585, 1616, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1797, 2015, 2029, 2034, 2041, 2042, 2045, 2050, 2058, 2063, 2065, 2069, 2075, 2077, 2084, 2087, 2098, 2099, 2102, 2104, 2107, 2109, 2111, 2112, 2113], "mathemat": [11, 64, 911, 912, 913, 914, 915, 916, 1156, 1226, 1438, 1440, 1441, 1442, 1488, 1489, 1490, 1491, 1566, 1651, 1691, 1716, 1771, 1846, 1904, 1905, 2016, 2020, 2042, 2058, 2081, 2089], "topic": [11, 52, 2054, 2055, 2100], "tradition": 11, "torchaudio": [11, 2012], "mimick": 11, "assembli": 11, "lapack": [11, 1216, 1303, 1314, 1315, 1316, 1318, 1321, 1334, 1826, 1927], "spectral": [11, 1292, 1308, 1336, 1731, 1755, 1765, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2058], "fft": [11, 2012, 2045], "4621": 11, "0303j": 11, "2438": [11, 1318, 1623], "5874j": 11, "7706": 11, "1421j": 11, "2110": 11, "1918j": 11, "complex128": [11, 177, 986, 1151, 1161, 1251, 1272, 1302, 1303, 1308, 1309, 1310, 1311, 1312, 1313, 1453, 1454, 1455, 1526, 1607, 1608, 1609, 1777, 1820, 1867, 2082, 2083, 2086, 2087, 2102], "complex64": [11, 180, 967, 968, 986, 1125, 1127, 1128, 1133, 1134, 1161, 1251, 1304, 1327, 1330, 1453, 1454, 1455, 1607, 1608, 1609, 1777, 1820, 1867, 2082, 2083, 2086, 2087, 2102], "apart": [11, 2016, 2042], "linspac": [11, 946, 970, 1129, 1139, 1374, 2014, 2018, 2066], "logspac": [11, 2014, 2018, 2066], "arang": [11, 23, 28, 36, 515, 609, 693, 694, 695, 699, 701, 889, 890, 891, 957, 969, 973, 1091, 1106, 1124, 1128, 1136, 1142, 1144, 1148, 1149, 1150, 1151, 1158, 1159, 1236, 1293, 1294, 1298, 1301, 1325, 1328, 1329, 1342, 1422, 1446, 1447, 1448, 1548, 1549, 1550, 1551, 1552, 1579, 1580, 1581, 1670, 1771, 1772, 1815, 1823, 1827, 1842, 1849, 1854, 1874, 1907, 1915, 1926, 1943, 1944, 1946, 1947, 1949, 1977, 2014, 2023, 2035, 2060, 2066, 2081, 2085, 2086, 2096, 2099, 2104, 2106], "switch": [11, 19, 23, 30, 58, 63, 64, 81, 1315, 1316, 1317, 1465, 1632, 1671, 1721, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1858, 1964, 2032, 2042, 2045, 2055, 2057, 2067, 2103], "view_as_r": [11, 1923, 2014, 2066, 2084], "6125": 11, "1681": 11, "3773": 11, "3487": 11, "0861": 11, "7981": 11, "1681j": 11, "3487j": 11, "7981j": 11, "mul_": [11, 66, 79, 2014, 2033, 2035, 2080], "2250": [11, 1329, 1771], "7546": 11, "1722": 11, "x1": [11, 52, 963, 1320, 1460, 1518, 1575, 1576, 1614, 1672, 2014, 2106], "3j": [11, 28, 700, 990, 991, 1850, 1851, 1867], "4j": [11, 28, 1877], "0000": [11, 35, 515, 868, 911, 913, 946, 966, 970, 993, 1089, 1095, 1096, 1097, 1122, 1123, 1126, 1128, 1129, 1135, 1136, 1139, 1144, 1156, 1157, 1158, 1226, 1232, 1235, 1239, 1298, 1301, 1302, 1303, 1315, 1316, 1319, 1324, 1325, 1329, 1331, 1343, 1359, 1417, 1468, 1469, 1579, 1580, 1582, 1583, 1623, 1624, 1771, 1820, 1826, 1831, 1832, 1833, 1842, 1846, 1847, 1871, 1877, 1879, 1880, 1882, 1885, 1886, 1887, 1888, 1905, 1951, 1952, 1954, 1979, 2023, 2035, 2045, 2080, 2081, 2086], "6569": [11, 1239], "5708": [11, 1092], "7854": 11, "complex_tensor": 11, "pt": [11, 24, 30, 55, 1160, 1277, 1280, 1283, 1290, 1344, 1858, 2013, 2024, 2055, 2060, 2068, 2104], "conjug": [11, 332, 460, 694, 922, 967, 968, 990, 991, 1252, 1302, 1303, 1309, 1312, 1315, 1322, 1336, 1341, 1345, 1730, 1814, 1850, 1923, 1927, 1973, 2052, 2067, 2086], "wirting": [11, 922, 2052], "deriv": [11, 28, 33, 52, 55, 152, 740, 741, 742, 743, 744, 745, 783, 802, 826, 896, 917, 922, 923, 1172, 1176, 1183, 1226, 1269, 1330, 1362, 1574, 1706, 2012, 2015, 2048, 2049, 2052, 2075, 2080, 2081], "steepest": [11, 2042], "descent": [11, 35, 1780, 1796, 1800, 1801, 2042, 2055], "box": [11, 28, 48, 64, 977, 1177, 1185, 1976, 2034, 2042, 2047, 2063, 2097, 2098, 2103, 2104], "real_param": 11, "p": [11, 24, 30, 35, 55, 64, 156, 236, 288, 427, 428, 455, 490, 493, 494, 688, 691, 943, 945, 955, 963, 1102, 1293, 1304, 1310, 1311, 1319, 1320, 1362, 1364, 1367, 1377, 1415, 1434, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1491, 1493, 1494, 1495, 1531, 1535, 1537, 1575, 1576, 1598, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1653, 1654, 1655, 1665, 1669, 1672, 1673, 1684, 1700, 1721, 1734, 1736, 1742, 1751, 1771, 1796, 1847, 1904, 2014, 2015, 2041, 2042, 2045, 2050, 2052, 2057, 2067, 2069, 2080, 2081, 2105, 2106], "complex_optim": 11, "adamw": [11, 1787, 1793, 2067], "real_optim": 11, "slight": [11, 1866, 2080, 2083], "discrep": [11, 1716, 1833, 2052], "foreach": [11, 1720, 1721, 1722, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 2067], "v": [11, 30, 44, 52, 59, 64, 489, 490, 546, 901, 912, 914, 915, 916, 942, 959, 1165, 1175, 1177, 1187, 1269, 1286, 1308, 1312, 1336, 1340, 1345, 1532, 1575, 1576, 1586, 1623, 1669, 1732, 1763, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1816, 1849, 1908, 1912, 1927, 1928, 1976, 2013, 2014, 2015, 2016, 2029, 2042, 2052, 2061, 2068, 2080, 2085, 2095], "forloop": 11, "numerical_accuraci": 11, "impact": [11, 24, 28, 83, 1064, 1870, 2026, 2029, 2042, 2070, 2084, 2102, 2107, 2111], "pointwis": [11, 35, 975, 1170, 1171, 1350, 1351, 1491, 2033, 2043, 2101, 2104, 2105], "lbfg": [11, 2067], "yet": [11, 29, 33, 37, 52, 53, 55, 60, 63, 64, 73, 82, 84, 85, 488, 1010, 1082, 1198, 1275, 1277, 1290, 1378, 1716, 1738, 1742, 1744, 1783, 1784, 2012, 2015, 2016, 2022, 2028, 2034, 2035, 2045, 2065, 2066, 2070, 2075, 2077, 2080, 2081, 2083, 2096, 2099, 2101, 2102, 2107, 2109, 2111], "fulli": [11, 14, 23, 28, 30, 32, 33, 34, 52, 59, 64, 682, 1071, 1185, 1272, 1522, 1523, 1524, 1526, 2016, 2022, 2028, 2048, 2068, 2070, 2098, 2099, 2106], "quantiz": [11, 27, 64, 221, 328, 340, 474, 475, 476, 477, 478, 480, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 861, 862, 863, 864, 865, 866, 867, 935, 1093, 1122, 1123, 1828, 1829, 1830, 1831, 1832, 1964, 2012, 2066, 2083, 2086, 2087, 2090, 2091], "pred": [12, 66, 69, 74, 75, 989, 1491, 1716, 2056, 2067, 2075, 2106], "union": [12, 14, 23, 30, 33, 34, 37, 41, 45, 48, 52, 53, 55, 64, 989, 1183, 1185, 1284, 1344, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1473, 1474, 1493, 1494, 1495, 1519, 1520, 1521, 1526, 1570, 1572, 1574, 1577, 1590, 1736, 1777, 1858, 1967, 2014, 2015, 2016, 2020, 2060, 2063, 2065, 2068, 2082, 2085, 2087, 2091], "true_fn": [12, 53, 66, 69, 74, 75, 989], "false_fn": [12, 53, 66, 69, 74, 75, 989], "operand": [12, 66, 74, 75, 951, 989, 1108, 2016, 2017, 2048, 2080, 2083], "Its": [12, 28, 53, 64, 997, 1022, 1272, 1320, 1338, 1339, 1364, 1526, 1771, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1973, 2016, 2047], "uniqu": [12, 28, 37, 44, 47, 48, 50, 515, 519, 1263, 1308, 1309, 1313, 1319, 1320, 1322, 1331, 1333, 1335, 1336, 1373, 1380, 1904, 1927, 1961, 2011, 2013, 2022, 2027, 2048, 2068, 2069, 2075, 2076, 2077, 2080, 2085, 2096, 2113], "predic": [12, 66, 71, 74, 75, 1187], "unlock": [12, 2023], "flexibilti": 12, "architectur": [12, 34, 64, 1034, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1461, 1570, 1615, 1871, 2045, 2058, 2070, 2080, 2095], "prototyp": [12, 18, 19, 28, 52, 682, 911, 913, 917, 922, 923, 989, 1282, 1574, 1586, 1587, 2012, 2023, 2029, 2034, 2035, 2055, 2060, 2073, 2075, 2080, 2083, 2089, 2090, 2091, 2093], "classif": [12, 989, 1439, 1445, 1461, 1529, 1531, 1533, 1559, 1616, 2012, 2040, 2071], "co": [12, 52, 53, 61, 66, 70, 71, 74, 75, 76, 77, 79, 203, 637, 638, 685, 953, 975, 989, 1050, 1167, 1170, 1171, 1176, 1230, 1231, 1324, 1459, 1460, 1463, 1800, 1801, 1808, 1820, 1881, 1885, 1886, 1887, 1888, 1890, 1909, 1910, 1911, 1913, 1914, 2014, 2020, 2033, 2066, 2067, 2080, 2085, 2087, 2097, 2104, 2106], "sin": [12, 14, 52, 53, 60, 61, 66, 70, 71, 74, 75, 76, 77, 534, 671, 672, 884, 975, 989, 1050, 1167, 1169, 1170, 1171, 1173, 1176, 1231, 1324, 1374, 1820, 1882, 1888, 2014, 2020, 2033, 2042, 2066, 2080, 2081, 2085, 2097, 2102, 2104, 2106, 2109], "dynamicshapecondpred": 12, "dyn_shape_mod": 12, "eagerli": [12, 33, 1039, 1045, 1993, 1996, 2045, 2098, 2102], "vari": [12, 52, 821, 1278, 1439, 1558, 1757, 1883, 2029, 2045, 2057, 2058, 2067, 2070, 2071, 2080, 2093, 2098, 2099, 2101, 2102, 2111, 2113], "inp2": 12, "assert": [12, 23, 24, 30, 33, 35, 57, 59, 60, 61, 64, 66, 72, 75, 76, 77, 626, 925, 927, 928, 1167, 1169, 1170, 1171, 1172, 1175, 1176, 1177, 1186, 1187, 1270, 1275, 1276, 1282, 1684, 1777, 1976, 2015, 2017, 2020, 2048, 2049, 2055, 2060, 2068, 2070, 2087, 2098, 2102, 2110, 2111], "export": [12, 15, 18, 28, 33, 36, 51, 66, 68, 71, 73, 74, 75, 80, 83, 682, 843, 934, 977, 982, 989, 1010, 1181, 1191, 1275, 1277, 1278, 1284, 1290, 1758, 1759, 1778, 1779, 2012, 2013, 2015, 2016, 2020, 2048, 2066, 2069, 2075, 2092, 2098, 2103, 2109, 2110], "deploy": [12, 37, 47, 52, 2012, 2026, 2093, 2103], "dim_batch": 12, "dim": [12, 28, 34, 35, 52, 60, 61, 64, 66, 68, 74, 75, 114, 116, 117, 118, 120, 136, 137, 138, 186, 206, 209, 212, 213, 214, 215, 216, 217, 231, 235, 262, 266, 283, 314, 315, 316, 317, 318, 319, 323, 324, 356, 382, 394, 409, 411, 412, 413, 416, 431, 432, 433, 434, 438, 439, 455, 464, 472, 481, 493, 494, 496, 507, 508, 514, 515, 516, 517, 518, 519, 520, 521, 539, 540, 543, 544, 546, 549, 554, 555, 557, 562, 567, 575, 580, 581, 585, 586, 587, 589, 590, 592, 607, 608, 611, 612, 613, 614, 617, 619, 695, 697, 698, 699, 701, 877, 878, 879, 913, 914, 916, 962, 969, 987, 989, 996, 1007, 1022, 1024, 1087, 1088, 1089, 1090, 1091, 1100, 1106, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1147, 1148, 1167, 1177, 1181, 1213, 1226, 1236, 1244, 1245, 1246, 1247, 1249, 1294, 1304, 1305, 1325, 1329, 1339, 1341, 1342, 1352, 1360, 1370, 1372, 1373, 1375, 1378, 1380, 1395, 1417, 1418, 1419, 1420, 1421, 1422, 1430, 1460, 1461, 1462, 1465, 1469, 1471, 1476, 1491, 1497, 1516, 1532, 1533, 1534, 1560, 1562, 1574, 1576, 1577, 1614, 1615, 1631, 1634, 1643, 1651, 1657, 1658, 1659, 1668, 1669, 1671, 1673, 1677, 1684, 1690, 1691, 1699, 1716, 1731, 1732, 1738, 1742, 1744, 1751, 1752, 1765, 1768, 1771, 1817, 1824, 1827, 1847, 1848, 1853, 1854, 1859, 1860, 1861, 1864, 1865, 1895, 1898, 1899, 1903, 1906, 1908, 1915, 1918, 1920, 1921, 1922, 1923, 1926, 1939, 1943, 1944, 1945, 1946, 1949, 1950, 1958, 1959, 1960, 1961, 1963, 1971, 1972, 1976, 1977, 2012, 2013, 2014, 2020, 2034, 2035, 2049, 2050, 2060, 2063, 2065, 2066, 2080, 2081, 2083, 2090, 2093, 2098, 2106], "batch": [12, 24, 28, 33, 35, 48, 51, 52, 55, 56, 57, 60, 61, 64, 66, 74, 75, 585, 586, 587, 589, 590, 688, 704, 705, 706, 707, 708, 709, 737, 762, 781, 787, 917, 922, 923, 943, 955, 956, 963, 966, 967, 968, 975, 1007, 1096, 1098, 1108, 1170, 1171, 1177, 1269, 1302, 1303, 1304, 1305, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1340, 1341, 1342, 1345, 1353, 1362, 1363, 1367, 1438, 1439, 1440, 1441, 1442, 1445, 1453, 1454, 1459, 1461, 1462, 1464, 1465, 1466, 1468, 1469, 1470, 1472, 1477, 1478, 1479, 1480, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1496, 1497, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1517, 1518, 1529, 1530, 1531, 1532, 1533, 1535, 1538, 1539, 1540, 1541, 1542, 1544, 1558, 1559, 1566, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1578, 1597, 1602, 1604, 1605, 1615, 1616, 1618, 1619, 1620, 1623, 1624, 1625, 1626, 1629, 1642, 1643, 1644, 1668, 1676, 1684, 1702, 1703, 1706, 1715, 1716, 1730, 1757, 1758, 1760, 1761, 1763, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1801, 1802, 1808, 1814, 1816, 1826, 1830, 1909, 1910, 1911, 1913, 1914, 1918, 1923, 1927, 1928, 1948, 1951, 1952, 1954, 1973, 1976, 2012, 2033, 2034, 2035, 2041, 2045, 2049, 2050, 2055, 2057, 2070, 2075, 2080, 2085, 2086, 2093, 2098, 2099, 2101, 2102, 2105, 2109, 2111], "min": [12, 23, 28, 40, 47, 52, 64, 66, 76, 77, 118, 187, 188, 189, 190, 302, 697, 698, 699, 758, 760, 772, 773, 782, 804, 821, 822, 823, 824, 827, 878, 970, 971, 1050, 1088, 1122, 1123, 1197, 1233, 1304, 1318, 1319, 1325, 1329, 1331, 1336, 1342, 1362, 1412, 1444, 1484, 1512, 1514, 1534, 1547, 1554, 1606, 1643, 1647, 1677, 1679, 1685, 1703, 1795, 1800, 1801, 1810, 1814, 1816, 1826, 1927, 1928, 1935, 1952, 1953, 1954, 1955, 2014, 2029, 2033, 2065, 2066, 2070, 2073, 2075, 2093, 2098, 2102, 2106, 2116], "ep": [12, 52, 714, 715, 716, 717, 718, 719, 725, 726, 738, 739, 751, 753, 754, 755, 756, 797, 821, 822, 823, 824, 827, 828, 922, 923, 1282, 1358, 1426, 1440, 1441, 1442, 1460, 1479, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1535, 1540, 1541, 1566, 1570, 1572, 1574, 1575, 1602, 1614, 1629, 1633, 1634, 1642, 1646, 1669, 1672, 1676, 1681, 1700, 1715, 1731, 1765, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1810, 1830, 2014, 2048, 2052, 2081, 2106, 2116], "dynamic_shap": [12, 52, 1180, 2063, 2093, 2108], "graphmodul": [12, 33, 52, 53, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 83, 862, 863, 864, 865, 2070, 2091, 2097, 2100, 2101, 2110, 2111], "arg0_1": [12, 52], "f32": [12, 52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79], "s0": [12, 52, 53, 66, 74, 75, 488, 1187, 2098, 2099, 2111], "sym_siz": [12, 53, 66, 74, 75, 2066, 2106], "sym": [12, 66, 74, 75, 76, 77, 1187, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890], "gt": [12, 66, 74, 75, 298, 1227, 2014, 2033, 2052, 2066, 2106], "true_graph_0": [12, 53, 66, 69, 74, 75], "false_graph_0": [12, 53, 66, 69, 74, 75], "symbol": [12, 14, 22, 52, 53, 65, 66, 75, 81, 82, 85, 86, 89, 626, 819, 843, 977, 1180, 1181, 1183, 1185, 1187, 1189, 1190, 1192, 1193, 1196, 1197, 1201, 1287, 1341, 1496, 1542, 1951, 1973, 2012, 2016, 2020, 2063, 2098, 2101, 2102, 2111], "sub": [12, 28, 34, 40, 55, 63, 66, 69, 74, 75, 564, 816, 864, 865, 1284, 1288, 1289, 1370, 1373, 1375, 1538, 1539, 1570, 1571, 1573, 1847, 1925, 1943, 1960, 2011, 2013, 2014, 2033, 2035, 2042, 2057, 2065, 2066, 2080, 2083, 2099, 2106, 2110], "exmapl": 12, "dependet": 12, "datadependentcondpred": 12, "sum_1": [12, 64, 2097, 2099, 2100], "b8": [12, 66, 69, 74], "flatten": [12, 24, 52, 53, 55, 64, 66, 72, 877, 878, 1097, 1235, 1329, 1342, 1578, 1729, 1771, 1779, 1827, 1843, 1848, 1853, 1939, 1959, 1960, 1961, 1962, 2014, 2033, 2034, 2036, 2040, 2063, 2065, 2066, 2072, 2084, 2098], "closur": [12, 32, 66, 74, 75, 977, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 2100, 2103], "flat": [12, 52, 1848, 1890, 1962, 2014, 2035, 2051, 2065, 2080], "_higher_order_op": 12, "condition": [12, 47, 919, 989], "constraint": [12, 28, 52, 55, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 796, 797, 912, 989, 1042, 1053, 1171, 1180, 1181, 1183, 1186, 1187, 1197, 1716, 1731, 1757, 1797, 2012, 2016, 2035, 2049, 2051, 2052, 2055, 2080, 2096, 2098, 2099], "true_branch": [12, 989], "false_branch": [12, 989], "consist": [12, 19, 23, 28, 30, 33, 35, 47, 48, 53, 64, 922, 966, 967, 968, 989, 1195, 1272, 1302, 1308, 1309, 1311, 1313, 1314, 1315, 1316, 1340, 1342, 1526, 1574, 1733, 1736, 1779, 1826, 1927, 2013, 2016, 2020, 2023, 2034, 2035, 2049, 2051, 2055, 2057, 2059, 2065, 2067, 2069, 2080, 2097, 2100, 2105, 2111], "possibli": [12, 23, 47, 52, 55, 989, 1199, 1200, 1272, 1288, 1315, 1320, 1526, 1716, 2013, 2020, 2059, 2068, 2101, 2113], "aka": [12, 28, 52, 53, 66, 74, 75, 935, 989, 1486, 1641, 2042, 2063, 2083, 2098], "add_": [12, 28, 52, 58, 60, 490, 989, 1166, 2014, 2033, 2043, 2063, 2080], "tempor": [12, 989, 1440, 1442, 1445, 1566, 1579, 1616, 1643, 1703], "pytre": [12, 52, 64, 66, 989, 2049], "parallel_info": [13, 2012, 2044], "cppextens": [14, 2012], "setuptool": 14, "bare": 14, "pypa": 14, "userguid": 14, "ext_modul": 14, "buildextens": [14, 2012], "extra_compile_arg": [14, 2061], "extra_link_flag": 14, "wl": 14, "lm": [14, 33], "cmdclass": 14, "build_ext": 14, "cudaextens": [14, 2012], "cuda_extens": 14, "extension_kernel": 14, "cu": 14, "cxx": 14, "nvcc": [14, 1038, 2111], "o2": 14, "lcuda": 14, "arch": 14, "card": [14, 2061], "visibl": [14, 28, 30, 44, 53, 1032, 1078, 1527, 1528, 1536, 1537, 2098, 2099, 2103], "ptx": 14, "road": 14, "recompil": [14, 64, 682, 975, 1286, 2022, 2053, 2098, 2099, 2100, 2107], "cc": [14, 28, 2045], "newest": [14, 59, 2062], "torch_cuda_arch_list": 14, "6": [14, 18, 19, 23, 24, 28, 35, 45, 52, 64, 66, 67, 71, 72, 74, 75, 76, 77, 79, 315, 317, 319, 323, 403, 404, 473, 489, 501, 515, 519, 562, 609, 687, 688, 693, 699, 743, 744, 745, 747, 748, 760, 763, 765, 767, 911, 912, 914, 915, 916, 938, 946, 954, 959, 965, 969, 973, 997, 1019, 1066, 1071, 1085, 1086, 1087, 1091, 1092, 1103, 1106, 1107, 1124, 1132, 1137, 1138, 1142, 1143, 1145, 1147, 1148, 1151, 1158, 1226, 1236, 1238, 1239, 1261, 1283, 1294, 1298, 1309, 1325, 1329, 1330, 1331, 1338, 1339, 1343, 1345, 1374, 1420, 1421, 1422, 1435, 1446, 1447, 1448, 1449, 1452, 1457, 1460, 1468, 1469, 1478, 1479, 1480, 1482, 1483, 1522, 1523, 1527, 1535, 1544, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1570, 1571, 1573, 1575, 1578, 1584, 1599, 1629, 1636, 1637, 1670, 1672, 1679, 1706, 1759, 1760, 1762, 1764, 1770, 1771, 1772, 1781, 1795, 1815, 1816, 1819, 1826, 1827, 1833, 1834, 1837, 1843, 1853, 1854, 1858, 1862, 1874, 1895, 1899, 1904, 1907, 1909, 1910, 1915, 1926, 1928, 1929, 1930, 1938, 1943, 1944, 1945, 1946, 1947, 1949, 1952, 1954, 1958, 1962, 1964, 1977, 1978, 2013, 2016, 2017, 2023, 2035, 2040, 2042, 2048, 2049, 2051, 2055, 2060, 2065, 2075, 2076, 2080, 2081, 2083, 2086, 2087, 2099, 2100, 2102, 2106, 2108, 2111, 2112], "build_my_extens": 14, "7": [14, 23, 24, 28, 35, 52, 64, 66, 71, 235, 262, 315, 317, 319, 323, 403, 404, 473, 515, 562, 609, 619, 687, 699, 747, 748, 914, 916, 951, 954, 959, 965, 967, 969, 973, 1090, 1091, 1103, 1105, 1106, 1109, 1124, 1147, 1148, 1151, 1154, 1158, 1226, 1236, 1249, 1294, 1298, 1308, 1313, 1315, 1316, 1319, 1320, 1324, 1325, 1329, 1336, 1420, 1421, 1422, 1428, 1429, 1432, 1433, 1435, 1446, 1447, 1448, 1468, 1514, 1522, 1523, 1548, 1549, 1550, 1551, 1552, 1575, 1578, 1599, 1671, 1723, 1724, 1749, 1754, 1764, 1771, 1786, 1833, 1837, 1843, 1853, 1854, 1855, 1862, 1870, 1874, 1877, 1879, 1890, 1891, 1899, 1907, 1909, 1910, 1915, 1927, 1929, 1930, 1938, 1943, 1944, 1947, 1949, 1958, 1962, 1973, 1977, 2013, 2017, 2023, 2034, 2035, 2043, 2045, 2048, 2058, 2060, 2065, 2066, 2070, 2080, 2083, 2084, 2085, 2086, 2087, 2099, 2104, 2108], "older": [14, 2045, 2060, 2068], "modestli": [14, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797], "imag": [14, 23, 743, 744, 745, 864, 865, 986, 1428, 1432, 1433, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1472, 1473, 1474, 1489, 1498, 1533, 1538, 1539, 1561, 1578, 1580, 1581, 1597, 1608, 1609, 1611, 1612, 1626, 1627, 1632, 1643, 1702, 1703, 1851, 2012, 2014, 2035, 2052, 2066, 2070, 2084, 2085, 2098, 2102, 2103, 2106], "11": [14, 28, 323, 515, 682, 965, 969, 1053, 1106, 1236, 1293, 1329, 1343, 1359, 1430, 1446, 1474, 1523, 1628, 1771, 1772, 1778, 1899, 1943, 1977, 2013, 2017, 2023, 2034, 2045, 2048, 2060, 2061, 2065, 2066, 2080, 2081, 2111], "pars": [14, 28, 48, 51, 932, 2016, 2068, 2075, 2099], "window": [14, 23, 28, 64, 353, 558, 944, 953, 1230, 1231, 1269, 1292, 1435, 1436, 1437, 1473, 1474, 1493, 1494, 1495, 1519, 1520, 1521, 1522, 1523, 1524, 1599, 1627, 1628, 1657, 1658, 1659, 1831, 1832, 1923, 2012, 2014, 2029, 2045, 2060, 2069, 2098], "workaround": [14, 23, 33, 52, 64, 1276, 2011, 2059, 2065, 2070, 2102], "pure": [14, 15, 52, 60, 1275, 2013, 2048], "sigmoidalphablendforwardcuda": 14, "69460": 14, "facebookresearch": 14, "pytorch3d": 14, "cb170ac024a949f1f9614ffe6af1c38d972f7d48": 14, "relocat": 14, "link": [14, 15, 35, 64, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1519, 1520, 1521, 1578, 1634, 2034, 2054, 2055, 2080, 2099, 2109], "rdc": 14, "dc": 14, "anymor": [14, 28, 55], "dlto": 14, "dlink": 14, "protent": 14, "perf": [14, 682, 2070, 2105], "lib": [14, 2020, 2061], "nvshmem": 14, "ninja": [14, 2061], "dlink_librari": 14, "dlink_lib": 14, "std": [14, 37, 45, 90, 379, 456, 1122, 1123, 1154, 1155, 1156, 1772, 1820, 1846, 1884, 1889, 1922, 1961, 2014, 2033, 2040, 2045, 2054, 2061, 2066, 2093, 2106], "17": [14, 28, 687, 1269, 1325, 1446, 1523, 1899, 2013, 2065, 2066, 2080, 2093, 2100], "mix": [14, 24, 33, 35, 52, 55, 1053, 1716, 2012, 2042, 2044, 2070, 2080, 2098, 2100, 2102], "use_ninja": 14, "greatli": [14, 64, 2045, 2098], "fallback": [14, 20, 28, 47, 52, 58, 89, 1203, 1286, 1336, 2041, 2045, 2060, 2096, 2102, 2111], "distutil": 14, "max_job": 14, "extra_cflag": 14, "extra_cuda_cflag": 14, "extra_ldflag": 14, "extra_include_path": 14, "build_directori": 14, "with_cuda": [14, 2061], "is_python_modul": 14, "is_standalon": 14, "keep_intermedi": 14, "torch_extens": 14, "temporari": [14, 64, 825, 828, 1717, 2042, 2050, 2093, 2104], "torch_extensions_dir": 14, "subfold": [14, 2111], "o3": 14, "cuh": 14, "Such": [14, 23, 24, 33, 52, 63, 1970, 2080, 2099], "lib64": 14, "cudart": [14, 2061], "fine": [14, 28, 30, 32, 36, 52, 488, 861, 864, 1159, 1288, 1684, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 2011, 2032, 2042, 2045, 2048, 2065, 2068, 2070, 2080, 2092, 2099, 2104, 2111], "cuda_hom": 14, "safest": 14, "pybind11": [14, 15, 2015], "linker": 14, "workspac": [14, 20, 975], "header": [14, 45, 2060, 2061, 2085, 2087], "automat": [14, 19, 24, 28, 30, 47, 48, 52, 64, 88, 591, 892, 894, 908, 909, 929, 975, 1044, 1053, 1235, 1282, 1288, 1374, 1573, 1684, 1717, 1845, 1874, 1965, 2011, 2012, 2015, 2016, 2026, 2027, 2033, 2034, 2042, 2043, 2045, 2048, 2055, 2057, 2065, 2068, 2070, 2071, 2075, 2083, 2085, 2086, 2098, 2101, 2102, 2104, 2109, 2111], "construct": [14, 15, 23, 28, 30, 32, 33, 35, 47, 52, 53, 55, 57, 59, 60, 64, 66, 71, 72, 76, 77, 152, 450, 794, 795, 826, 881, 882, 896, 917, 986, 1095, 1111, 1177, 1235, 1272, 1284, 1288, 1289, 1296, 1303, 1343, 1344, 1359, 1374, 1468, 1469, 1526, 1587, 1588, 1589, 1623, 1706, 1716, 1718, 1719, 1757, 1764, 1816, 1820, 1867, 1902, 1909, 1910, 1911, 1912, 1913, 1914, 1928, 1942, 1945, 1976, 2012, 2013, 2020, 2024, 2029, 2045, 2047, 2051, 2055, 2057, 2060, 2063, 2068, 2070, 2075, 2077, 2083, 2085, 2086, 2087, 2093, 2099, 2102], "plain": [14, 1439, 1527, 1536, 1730, 1911, 2048, 2080, 2101], "standalon": [14, 46, 47, 48, 819, 1284, 1288, 2013, 2105], "torch_lib_path": 14, "load_inlin": [14, 2012], "cpp_sourc": 14, "cuda_sourc": 14, "with_pytorch_error_handl": 14, "use_pch": 14, "behav": [14, 15, 28, 52, 63, 64, 315, 323, 495, 517, 519, 844, 1176, 1189, 1200, 1288, 1522, 1523, 1524, 1711, 1712, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2015, 2016, 2023, 2042, 2048, 2049, 2055, 2059, 2068, 2075, 2080], "exactli": [14, 24, 28, 35, 50, 52, 53, 922, 923, 969, 1020, 1024, 1108, 1154, 1155, 1193, 1269, 1272, 1314, 1469, 1472, 1473, 1474, 1526, 1555, 1558, 1624, 1634, 1716, 1771, 1779, 1870, 2023, 2024, 2033, 2042, 2045, 2047, 2048, 2051, 2052, 2053, 2065, 2068, 2096, 2099], "filenam": [14, 19, 23, 30, 52, 64, 1160, 1271, 1280, 1283, 2011, 2014, 2027, 2042, 2068, 2082, 2085, 2104, 2113], "typic": [14, 23, 24, 28, 33, 35, 36, 37, 40, 47, 48, 50, 53, 55, 64, 85, 90, 488, 977, 997, 1008, 1151, 1159, 1195, 1201, 1272, 1293, 1296, 1344, 1459, 1485, 1526, 1706, 1716, 1783, 1784, 1796, 1870, 1882, 1909, 1910, 1911, 1913, 1914, 2011, 2012, 2013, 2016, 2022, 2024, 2042, 2044, 2045, 2058, 2059, 2060, 2065, 2067, 2070, 2075, 2084, 2086, 2095, 2097, 2098, 2101, 2109, 2111, 2113, 2116], "inlin": [14, 52, 63, 66, 68, 72, 1050, 1272, 1276, 1288, 2044, 2102], "concaten": [14, 23, 28, 913, 962, 973, 1022, 1107, 1187, 1238, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1469, 1496, 1616, 1624, 1721, 1736, 1920, 1978, 2014, 2017, 2065, 2066, 2080], "furthermor": [14, 45, 59, 1308, 1309, 1336, 1362, 1463, 2033, 2034, 2042, 2059, 2073, 2075], "cuda_runtim": 14, "se": 14, "macro": [14, 37, 2053], "pybind": [14, 2046], "_safe_foo": 14, "redirect": [14, 37, 45, 49, 2089], "obscur": 14, "sin_add": 14, "inline_extens": 14, "include_path": [14, 2012], "get_compiler_abi_compatibility_and_vers": [14, 2012], "abi": [14, 15], "alongsid": [14, 1272, 1526, 2048], "shell": 14, "torchvers": 14, "verify_ninja_avail": [14, 2012], "is_ninja_avail": [14, 2012], "embed": [15, 33, 34, 53, 64, 737, 748, 901, 1099, 1430, 1459, 1469, 1485, 1498, 1532, 1576, 1624, 1684, 1797, 1865, 2014, 2044, 2051, 2066, 2069, 2070, 2072, 2075, 2080, 2085, 2106], "simpl": [15, 18, 24, 33, 40, 47, 52, 59, 61, 64, 490, 997, 1064, 1167, 1175, 1176, 1177, 1272, 1276, 1284, 1440, 1441, 1442, 1468, 1499, 1500, 1501, 1526, 1566, 1623, 1882, 1976, 2011, 2013, 2017, 2035, 2044, 2045, 2047, 2048, 2050, 2052, 2054, 2065, 2068, 2075, 2080, 2099, 2100, 2101, 2104], "modif": [15, 55, 64, 83, 223, 782, 903, 904, 908, 1161, 1162, 1272, 1526, 1716, 2042, 2048, 2055, 2068, 2070, 2099, 2102], "submodul": [15, 30, 33, 34, 52, 53, 55, 64, 82, 793, 799, 816, 840, 841, 842, 861, 864, 865, 1175, 1272, 1276, 1278, 1283, 1284, 1462, 1526, 1527, 1528, 1555, 1713, 2012, 2013, 2015, 2016, 2026, 2055, 2060, 2068, 2070, 2075, 2091, 2097, 2110], "preprocess": [15, 52, 81, 498, 1272], "augment": [15, 2017, 2087], "walk": [15, 64, 2020, 2048, 2068, 2076, 2077, 2084, 2111], "interfac": [15, 19, 24, 30, 36, 37, 39, 44, 47, 65, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 747, 748, 758, 763, 764, 765, 766, 767, 796, 1162, 1706, 1869, 1923, 1964, 2016, 2020, 2029, 2030, 2031, 2048, 2054, 2067, 2069, 2073, 2080, 2085, 2106], "opencv": [15, 1632, 1643], "struct": [15, 1176, 1177, 1976, 2026, 2054], "explain": [15, 28, 52, 1797, 1870, 2011, 2016, 2041, 2045, 2049, 2051, 2057, 2080, 2099, 2102, 2111], "reshap": [15, 28, 35, 500, 501, 515, 546, 619, 694, 973, 1091, 1106, 1107, 1147, 1236, 1293, 1325, 1329, 1338, 1339, 1342, 1422, 1447, 1448, 1472, 1548, 1549, 1550, 1551, 1552, 1578, 1731, 1765, 1771, 1907, 1908, 1915, 1943, 1944, 1949, 1977, 1978, 2014, 2023, 2034, 2035, 2065, 2066, 2072, 2084, 2085, 2106], "classat_1_1_tensor": 15, "tensor_index": 15, "crucial": [15, 90, 2093, 2104, 2105], "cpp_autograd": 15, "workflow": [15, 2011, 2048, 2070, 2071, 2089, 2092, 2098, 2101, 2105, 2107, 2108], "undesir": [15, 33, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 1923, 2048], "overview": [15, 28, 44, 1586, 1716, 2012, 2023, 2032, 2042, 2052, 2055, 2064, 2070, 2075, 2092, 2094], "cpp_frontend": 15, "library_root": 15, "libtorch": [15, 2093], "linux": [15, 28, 2011], "gcc": 15, "pre": [15, 28, 32, 52, 55, 64, 917, 928, 1185, 1209, 1272, 1526, 1710, 1712, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1749, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1799, 1805, 2011, 2013, 2016, 2026, 2042, 2044, 2045, 2055, 2063, 2065, 2086, 2101, 2102, 2107, 2111], "cxx11": 15, "facilit": [16, 29, 35, 60, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1011, 1096, 1723, 1724, 1867, 2011, 2013, 2016, 2059], "certain": [17, 23, 24, 28, 33, 44, 45, 52, 60, 63, 64, 86, 688, 691, 737, 918, 943, 955, 1032, 1096, 1185, 1269, 1272, 1280, 1344, 1367, 1377, 1453, 1454, 1455, 1456, 1457, 1458, 1478, 1497, 1513, 1526, 1532, 1578, 1633, 1644, 1646, 1770, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1989, 2016, 2020, 2023, 2034, 2042, 2043, 2045, 2048, 2054, 2055, 2058, 2060, 2065, 2069, 2075, 2076, 2080, 2101, 2102, 2105, 2111], "tunabl": 17, "earli": [18, 19, 24, 28, 37, 63, 488, 1464, 1465, 1466, 1470, 2012, 2070, 2071, 2090, 2091, 2101], "introduc": [18, 35, 52, 60, 762, 1187, 1197, 1199, 1200, 1211, 1345, 1477, 1496, 1542, 1643, 1853, 2011, 2016, 2043, 2047, 2048, 2056, 2065, 2068, 2069, 2075, 2080, 2086, 2092, 2099, 2102, 2107, 2109, 2111, 2118], "race": [18, 28, 488, 2042], "enable_cuda_sanit": 18, "torch_cuda_sanit": 18, "concurr": [18, 28, 32, 33, 2044, 2045, 2051, 2057, 2075, 2076], "uniniti": [18, 27, 447, 501, 1109, 1110, 1706, 1718, 1719, 1764, 1964, 2024, 2035, 2066], "overwrit": [18, 19, 23, 28, 30, 64, 1527, 1536, 2016, 2042, 2096], "commandlin": 18, "example_error": 18, "csan": 18, "pointer": [18, 141, 929, 1025, 1543, 2020, 2045, 2046, 2047, 2054, 2075, 2077, 2101], "139719969079296": 18, "94646435460352": 18, "_sanit": 18, "364": 18, "_handle_kernel_launch": 18, "stack_trac": [18, 53, 64], "stacksummari": 18, "extract": [18, 64, 838, 967, 968, 1209, 1468, 1472, 1578, 1623, 1702, 2063, 2075, 2091, 2095, 2097, 2099, 2100, 2102, 2103], "10000": [18, 20, 24, 696, 1808, 1855, 2050, 2055, 2080, 2085, 2104], "420": 18, "_handle_memory_alloc": 18, "incorrectli": [18, 55, 1011, 1153, 2103], "id": [18, 20, 23, 28, 30, 32, 33, 37, 45, 46, 47, 48, 55, 64, 776, 826, 938, 1008, 1043, 1566, 1659, 1699, 1716, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 2016, 2054, 2065, 2066, 2068, 2074, 2075, 2076, 2082, 2108, 2111], "faulti": [18, 28], "schema": [18, 28, 30, 53, 87, 2013, 2014, 2016, 2020, 2063, 2065, 2106, 2110], "current_stream": [18, 1010, 1981, 2012, 2045], "wait_stream": [18, 28, 488, 1011, 1013, 1982, 2045], "default_stream": [18, 28, 2012, 2045], "begin": [18, 23, 24, 28, 29, 30, 33, 39, 47, 55, 64, 498, 682, 762, 786, 822, 823, 868, 944, 980, 992, 1008, 1056, 1058, 1068, 1128, 1226, 1232, 1293, 1328, 1340, 1437, 1438, 1439, 1447, 1448, 1449, 1459, 1461, 1467, 1477, 1478, 1481, 1482, 1483, 1484, 1485, 1486, 1492, 1496, 1497, 1512, 1517, 1520, 1521, 1533, 1534, 1542, 1545, 1558, 1564, 1569, 1576, 1615, 1636, 1637, 1716, 1730, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1800, 1802, 1808, 1874, 1877, 1880, 1923, 1949, 1960, 1979, 2016, 2041, 2042, 2045, 2048, 2051, 2052, 2054, 2059, 2065, 2067, 2073, 2081, 2096, 2102, 2111], "suspect": [18, 1797, 2045, 2111], "blaslt": 19, "rocbla": [19, 2058], "databas": [19, 23], "prepar": [19, 30, 33, 34, 55, 64, 799, 841, 844, 860, 862, 864, 865, 866, 1175, 1282, 2017, 2065, 2090, 2091], "tunableop_result": 19, "csv": 19, "ordin": [19, 291, 1909, 1910, 1911, 1913, 1914, 2083], "insert": [19, 23, 28, 35, 55, 63, 64, 749, 794, 795, 864, 865, 942, 1527, 1528, 1536, 1716, 1862, 1865, 1874, 1895, 1920, 1963, 2013, 2026, 2045, 2063, 2066, 2070, 2109, 2110], "discov": [19, 28, 975, 2075], "termin": [19, 28, 37, 47, 50, 53, 64, 1786, 2032, 2075, 2104], "pt_version": 19, "rocm_vers": [19, 2053], "12969": 19, "1544e39": 19, "hipblaslt_vers": 19, "a9c5cc7": 19, "rocblas_vers": 19, "72e57364": 19, "dirti": [19, 2042, 2099], "gemmtunableop_float_nt": 19, "nt_25088_4096_64": 19, "1219": [19, 1332, 1845], "262": 19, "nt_4096_4096_64": 19, "1216": [19, 1411], "033": 19, "verison": 19, "reject": 19, "comma": [19, 20, 28, 1108, 2016, 2022], "averag": [19, 28, 32, 737, 768, 769, 770, 771, 823, 824, 935, 937, 997, 1071, 1085, 1272, 1427, 1428, 1429, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1459, 1461, 1469, 1479, 1485, 1491, 1492, 1493, 1494, 1495, 1499, 1500, 1501, 1517, 1518, 1526, 1529, 1530, 1531, 1532, 1533, 1540, 1558, 1559, 1566, 1575, 1591, 1592, 1593, 1599, 1600, 1601, 1604, 1605, 1615, 1629, 1644, 1653, 1654, 1655, 1668, 1676, 1716, 1780, 1781, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1949, 2012, 2047, 2069], "edit": [19, 64, 2068, 2084], "caution": [19, 45, 2103], "untun": 19, "gemmtunableop": 19, "transpos": [19, 462, 595, 596, 619, 694, 743, 744, 745, 967, 968, 1108, 1176, 1302, 1303, 1309, 1312, 1315, 1322, 1333, 1336, 1456, 1457, 1458, 1542, 1578, 1610, 1611, 1612, 1684, 1725, 1726, 1728, 1730, 1814, 1927, 1929, 1930, 1937, 1951, 2014, 2033, 2035, 2042, 2058, 2065, 2066, 2072, 2080, 2084, 2086, 2106], "k": [19, 23, 28, 30, 35, 48, 64, 288, 356, 508, 515, 517, 519, 592, 619, 762, 942, 968, 1095, 1108, 1165, 1180, 1187, 1213, 1235, 1294, 1302, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1317, 1318, 1319, 1322, 1324, 1331, 1333, 1335, 1336, 1345, 1363, 1367, 1435, 1437, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1473, 1474, 1477, 1478, 1496, 1497, 1511, 1513, 1514, 1519, 1521, 1532, 1533, 1542, 1544, 1586, 1615, 1627, 1628, 1650, 1668, 1730, 1814, 1816, 1826, 1854, 1904, 1905, 1906, 1907, 1909, 1910, 1911, 1913, 1914, 1923, 1928, 1946, 1951, 2014, 2015, 2040, 2045, 2048, 2061, 2080, 2081, 2083, 2102, 2105, 2106, 2111], "diagnost": [19, 40, 81, 82, 83, 84, 85, 88, 682, 2013], "besid": [19, 24, 28, 931, 2045, 2047, 2063, 2085, 2097], "pytorch_tunableop_verobs": 19, "30m": 19, "whichev": [19, 997, 1177, 1632, 1827, 1976, 2045], "successfulli": [19, 28, 37, 44, 52, 1279, 1871, 2032, 2055, 2060, 2075, 2111], "bgemm": 19, "rout": [19, 2099], "transa": 19, "transb": [19, 2065], "gettuningcontext": 19, "tuningcontext": 19, "preced": [19, 32, 37, 55, 62, 682, 1710, 1808, 1948, 2017, 2044, 2067, 2070, 2088], "val": [19, 53, 66, 69, 74, 1050, 1187, 1202, 1278, 1279, 2014, 2016, 2020, 2040, 2101], "is_en": [19, 1902, 1909, 1910, 1911, 1912, 1913, 1914], "tuning_en": 19, "tuning_is_en": 19, "set_max_tuning_dur": 19, "millisecond": [19, 41, 44, 1010, 1385, 1981, 2029], "honor": [19, 2016], "get_max_tuning_dur": 19, "set_max_tuning_iter": 19, "get_max_tuning_iter": 19, "set_filenam": 19, "insert_device_ordin": 19, "cenario": 19, "get_filenam": 19, "get_result": 19, "get_valid": 19, "write_file_on_exit": 19, "destruct": [19, 28, 2029, 2075, 2077], "write_fil": 19, "read_fil": 19, "pytorch_no_cuda_memory_cach": [20, 2045, 2053], "pytorch_cuda_alloc_conf": [20, 1033], "pytorch_nvml_based_cuda_check": [20, 2045], "nvml": [20, 2045], "fork": [20, 23, 45, 1291, 1716, 2016, 2044, 2045, 2050, 2054, 2057, 2061, 2074, 2075, 2077], "torch_cudnn_v8_api_lru_cache_limit": 20, "cudnn": [20, 21, 22, 762, 1282, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1477, 1496, 1542, 1543, 1585, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1684, 1723, 1724, 1870, 2012, 2014, 2045, 2058, 2059, 2105, 2114], "roughli": [20, 23, 1491, 2047, 2109, 2110], "2gib": 20, "200kib": 20, "executionplan": 20, "torch_cudnn_v8_api_dis": 20, "And": [20, 52, 59, 799, 826, 1087, 1088, 1165, 1194, 1272, 1294, 1370, 1375, 1445, 1526, 1965, 1967, 2042, 2048, 2049, 2051, 2061, 2065, 2070, 2091, 2097, 2099, 2103, 2104], "v7": 20, "torch_allow_tf32_cublas_overrid": 20, "set_float32_matmul_precis": [20, 1222], "torch_nccl_use_comm_nonblock": 20, "nccl": [20, 24, 29, 30, 48, 55, 1716, 2051, 2053, 2115], "torch_nccl_avoid_record_stream": 20, "stream": [20, 23, 28, 37, 45, 55, 63, 152, 488, 896, 917, 999, 1001, 1006, 1008, 1010, 1011, 1014, 1015, 1016, 1024, 1027, 1028, 1042, 1048, 1081, 1084, 1385, 1388, 1389, 1394, 1399, 1401, 1402, 1408, 1410, 1848, 1981, 1983, 1985, 2006, 2008, 2012, 2014, 2016, 2051, 2063, 2075, 2102, 2113, 2115], "torch_cudnn_v8_api_debug": 20, "saniti": [20, 30, 2049], "cuda_visible_devic": [20, 28, 1077, 1716, 2045], "cuda_launch_block": [20, 22, 1496, 1542, 2045], "cublas_workspace_config": [20, 22, 1496, 1542, 1964, 2045, 2059], "4096": [20, 22, 52, 1496, 1542, 1964, 2045, 2065], "16": [20, 22, 28, 34, 52, 323, 619, 740, 741, 742, 743, 744, 745, 774, 1106, 1124, 1151, 1226, 1236, 1272, 1284, 1296, 1302, 1308, 1309, 1313, 1329, 1422, 1434, 1436, 1437, 1445, 1446, 1449, 1452, 1453, 1454, 1455, 1457, 1458, 1463, 1464, 1465, 1466, 1470, 1473, 1474, 1493, 1494, 1495, 1496, 1514, 1519, 1520, 1521, 1523, 1524, 1526, 1533, 1542, 1553, 1570, 1584, 1607, 1609, 1610, 1612, 1616, 1627, 1628, 1774, 1823, 1870, 1899, 1964, 1973, 1977, 2013, 2017, 2040, 2045, 2055, 2058, 2060, 2065, 2066, 2075, 2080, 2081, 2083, 2085, 2086, 2093, 2099, 2100], "kib": [20, 2045], "cudnn_conv_wscap_dbg": 20, "cublaslt_workspace_s": 20, "cudnn_errata_json_fil": 20, "errata": 20, "config": [20, 24, 55, 64, 794, 795, 796, 797, 862, 864, 975, 2047, 2061, 2073, 2093, 2102, 2104, 2105, 2107, 2109, 2111], "primarili": [20, 35, 53, 63, 817, 818, 819, 825, 1187, 1196, 1422, 2029, 2070, 2086], "hardcod": [20, 2101], "autotun": [20, 975, 2105], "nvidia_tf32_overrid": 20, "float16": [21, 24, 30, 55, 299, 688, 691, 762, 825, 828, 836, 853, 854, 861, 943, 955, 1161, 1254, 1272, 1367, 1377, 1453, 1454, 1455, 1456, 1457, 1458, 1477, 1478, 1496, 1497, 1513, 1526, 1542, 1586, 1643, 1684, 1723, 1724, 1782, 1783, 1784, 1796, 1855, 1867, 2041, 2055, 2070, 2072, 2080, 2082, 2083, 2086, 2087, 2116], "v100": [21, 762, 1477, 1496, 1542, 2045], "packedsequ": [21, 762, 1477, 1496, 1542, 1758, 1759, 1760, 1762], "rnn": [22, 736, 762, 763, 764, 765, 767, 861, 1477, 1478, 1496, 1497, 1543, 1544, 1717, 1734, 1757, 2018, 2050, 2055, 2072, 2085], "enforc": [22, 24, 63, 797, 893, 908, 1272, 1496, 1526, 1542, 2016, 2055, 2084], "colon": [22, 1496, 1542, 2075], "heart": 23, "dataload": [23, 498, 1716, 1801, 1802, 1808, 2045, 2050, 2057, 2061, 2067, 2085], "batch_siz": [23, 33, 35, 59, 61, 895, 908, 909, 1167, 1175, 1177, 1532, 1542, 1757, 1759, 1760, 1762, 1976, 2014, 2049, 2050, 2059, 2065, 2085], "shuffl": [23, 1446, 2012, 2085], "batch_sampl": 23, "num_work": [23, 37, 2059, 2061], "drop_last": 23, "timeout": [23, 28, 37, 47, 2025, 2032, 2075, 2115], "worker_init_fn": [23, 2050, 2059], "prefetch_factor": 23, "persistent_work": 23, "__getitem__": [23, 1964], "__len__": [23, 64, 2014], "protocol": [23, 30, 36, 47, 50, 883, 1159, 1162, 1858, 2048, 2061, 2063, 2065, 2075, 2112], "sampl": [23, 30, 35, 52, 56, 61, 64, 90, 155, 156, 260, 288, 379, 456, 483, 610, 781, 945, 997, 1019, 1053, 1066, 1071, 1085, 1086, 1126, 1144, 1167, 1177, 1226, 1269, 1289, 1362, 1412, 1434, 1438, 1439, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1463, 1464, 1465, 1466, 1468, 1469, 1470, 1479, 1485, 1488, 1489, 1490, 1491, 1492, 1511, 1513, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1545, 1558, 1559, 1566, 1575, 1597, 1604, 1605, 1615, 1617, 1618, 1619, 1620, 1623, 1624, 1625, 1629, 1632, 1634, 1642, 1643, 1644, 1668, 1676, 1716, 1731, 1772, 1801, 1816, 1819, 1833, 1835, 1837, 1839, 1840, 1841, 1882, 1905, 1921, 1922, 1923, 1971, 1972, 1976, 2012, 2020, 2029, 2040, 2041, 2045, 2054, 2055, 2068, 2069, 2070, 2071, 2085, 2111], "idx": [23, 64, 903, 905, 908, 1272, 1278, 1430, 1468, 1526, 1731, 1960, 2014, 2034], "th": [23, 35, 156, 288, 315, 317, 323, 762, 911, 913, 945, 965, 1095, 1100, 1247, 1269, 1294, 1312, 1320, 1326, 1362, 1464, 1465, 1466, 1470, 1477, 1485, 1496, 1542, 1618, 1619, 1620, 1625, 1827, 1923, 1962, 2048, 2061, 2081, 2083, 2113], "iterabledataset": [23, 2054], "__iter__": [23, 2017], "suitabl": [23, 35, 488, 959, 1292, 1797, 1862, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2058, 2071, 2085, 2103], "improb": 23, "fetch": [23, 63, 64, 1185, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1777, 2075], "remot": [23, 28, 32, 37, 1716, 2075, 2076], "real": [23, 35, 64, 86, 688, 689, 690, 691, 692, 694, 700, 922, 943, 967, 968, 986, 993, 1053, 1124, 1125, 1127, 1129, 1130, 1131, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1187, 1203, 1262, 1264, 1265, 1268, 1269, 1293, 1302, 1303, 1304, 1308, 1309, 1310, 1311, 1312, 1315, 1316, 1317, 1322, 1325, 1327, 1329, 1330, 1331, 1332, 1336, 1337, 1341, 1342, 1343, 1353, 1359, 1492, 1535, 1538, 1539, 1576, 1730, 1786, 1839, 1923, 1927, 1973, 1974, 1975, 2011, 2012, 2014, 2016, 2024, 2042, 2045, 2063, 2066, 2068, 2070, 2075, 2077, 2084, 2086, 2087, 2097, 2098, 2099, 2101, 2104, 2106, 2112], "replica": [23, 24, 28, 32, 45, 1462, 1716, 2047], "duplic": [23, 45, 317, 321, 473, 546, 944, 953, 974, 1187, 1230, 1231, 1272, 1526, 1960, 1961, 2065, 2080], "yield": [23, 24, 52, 55, 64, 1096, 1098, 1272, 1319, 1320, 1526, 1979, 2016, 2017, 2023, 2070, 2081, 2102], "stochast": [23, 35, 1473, 1474, 1627, 1628, 1780, 1782, 1783, 1785, 1796, 1800, 1801, 2055, 2067], "decent": 23, "randomli": [23, 747, 748, 758, 766, 923, 1434, 1463, 1464, 1465, 1466, 1470, 1545, 1617, 1618, 1619, 1620, 1625, 1744, 2035, 2054, 2055], "permut": [23, 52, 66, 75, 1108, 1319, 1320, 1339, 1362, 1364, 1723, 1724, 1841, 2012, 2014, 2034, 2066, 2072, 2084, 2086, 2106], "mini": [23, 781, 787, 1440, 1441, 1442, 1468, 1469, 1480, 1485, 1488, 1489, 1490, 1498, 1518, 1529, 1531, 1541, 1566, 1575, 1623, 1624, 1643, 1703, 1715], "neither": [23, 28, 797, 902, 903, 906, 908, 1091, 1151, 1532, 1574, 1771, 1949, 2048, 2058, 2075], "nor": [23, 28, 37, 55, 797, 903, 906, 908, 1187, 1308, 1309, 1336, 1532, 1574, 1716, 1771, 1825, 1927, 2048, 2065, 2096, 2101], "notion": [23, 922, 1440, 1441, 1442, 1488, 1489, 1490, 1566, 2099, 2102], "collat": 23, "minibatch": [23, 770, 771, 774, 775, 776, 1362, 1430, 1438, 1439, 1459, 1461, 1485, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1579, 1599, 1600, 1601, 1604, 1605, 1607, 1608, 1609, 1610, 1611, 1612, 1615, 1644, 1657, 1658, 1659, 1668, 1676], "loader": [23, 2067], "essenti": [23, 28, 37, 64, 1362, 2034, 2045, 2061, 2080, 2093, 2096], "dummi": [23, 32, 2020, 2042, 2048, 2063, 2111, 2112], "infinit": [23, 1264, 1345, 1438, 1445, 1616, 1820, 2048, 2058, 2075, 2101], "drop": [23, 59, 61, 64, 1053, 1277, 1331, 1336, 1434, 1766, 1870, 1923, 2013, 2034, 2057, 2113], "dataset_it": 23, "pad": [23, 24, 52, 714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 737, 740, 741, 742, 743, 744, 745, 770, 771, 774, 775, 776, 781, 784, 785, 787, 975, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1269, 1435, 1436, 1437, 1445, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1468, 1469, 1472, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1522, 1523, 1524, 1529, 1532, 1548, 1549, 1550, 1551, 1552, 1553, 1573, 1574, 1578, 1582, 1583, 1584, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1623, 1624, 1626, 1632, 1643, 1657, 1658, 1659, 1660, 1661, 1662, 1702, 1703, 1758, 1760, 1761, 1763, 1831, 1832, 1923, 2012, 2014, 2035, 2050, 2060, 2065, 2066, 2070, 2085, 2098, 2106], "length": [23, 24, 28, 30, 34, 35, 262, 315, 317, 323, 353, 435, 436, 585, 586, 587, 737, 762, 816, 896, 917, 939, 974, 1024, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1162, 1177, 1247, 1269, 1288, 1292, 1421, 1422, 1440, 1445, 1453, 1462, 1469, 1472, 1477, 1493, 1496, 1532, 1542, 1570, 1578, 1587, 1607, 1608, 1609, 1616, 1624, 1643, 1684, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1833, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1907, 1909, 1910, 1911, 1913, 1914, 1923, 1949, 1976, 2014, 2016, 2023, 2034, 2035, 2043, 2050, 2079, 2080, 2087, 2098, 2099, 2102, 2106, 2111], "cheaper": [23, 63], "bulk": [23, 2101], "arrai": [23, 28, 450, 762, 822, 823, 882, 883, 946, 964, 993, 997, 1108, 1152, 1161, 1162, 1170, 1171, 1344, 1374, 1472, 1477, 1478, 1496, 1497, 1626, 1848, 1855, 1874, 1909, 1910, 1911, 1913, 1914, 1939, 1942, 1949, 1957, 1970, 2013, 2017, 2045, 2055, 2060, 2080, 2082, 2086, 2087, 2102], "untouch": 23, "slightli": [23, 28, 35, 52, 55, 1771, 1816, 1928, 2011, 2045, 2048, 2051, 2052, 2058, 2068, 2102], "default_col": 23, "channel": [23, 58, 474, 475, 476, 781, 787, 810, 824, 827, 835, 848, 856, 1122, 1440, 1441, 1442, 1446, 1453, 1454, 1455, 1456, 1457, 1458, 1463, 1464, 1465, 1466, 1470, 1472, 1480, 1488, 1489, 1490, 1498, 1502, 1503, 1504, 1505, 1506, 1507, 1514, 1534, 1561, 1566, 1578, 1579, 1580, 1581, 1602, 1618, 1619, 1620, 1625, 1642, 1643, 1650, 1677, 1703, 1723, 1724, 1732, 1742, 1743, 1744, 1751, 1752, 1768, 1828, 2034, 2036, 2040, 2042, 2070, 2071, 2073, 2085], "class_index": 23, "namedtupl": [23, 30, 52, 53, 64, 897, 902, 1087, 1088, 1216, 1272, 1284, 1294, 1314, 1364, 1370, 1373, 1375, 1378, 1418, 1430, 1526, 1816, 1826, 1899, 1927, 1946, 1951, 2013, 2015, 2016, 2070], "situat": [23, 35, 50, 64, 87, 89, 488, 975, 993, 1736, 2024, 2032, 2048, 2049, 2057, 2058, 2068, 2070, 2077, 2098, 2101, 2102, 2112], "gil": [23, 28, 32, 2042, 2045, 2075], "integ": [23, 27, 28, 35, 47, 52, 53, 90, 291, 447, 449, 451, 501, 547, 562, 568, 687, 688, 689, 690, 691, 692, 768, 769, 787, 789, 868, 895, 908, 909, 943, 944, 953, 964, 992, 1011, 1013, 1029, 1035, 1036, 1064, 1103, 1106, 1108, 1109, 1110, 1111, 1151, 1152, 1153, 1154, 1155, 1156, 1162, 1163, 1167, 1168, 1169, 1170, 1171, 1187, 1197, 1204, 1205, 1214, 1230, 1231, 1234, 1235, 1236, 1283, 1286, 1295, 1296, 1303, 1314, 1316, 1326, 1345, 1403, 1411, 1430, 1445, 1453, 1454, 1455, 1498, 1541, 1591, 1592, 1593, 1594, 1595, 1596, 1705, 1715, 1757, 1772, 1775, 1804, 1807, 1811, 1816, 1828, 1829, 1835, 1837, 1838, 1839, 1841, 1846, 1855, 1867, 1915, 1924, 1928, 1943, 1944, 1957, 1962, 1977, 1982, 1986, 1990, 1991, 2009, 2015, 2016, 2017, 2033, 2049, 2052, 2070, 2071, 2073, 2080, 2081, 2083, 2086, 2087, 2089, 2098, 2099, 2116], "descriptor": [23, 28, 1575, 1576, 2017, 2065], "parent": [23, 40, 45, 50, 55, 682, 790, 791, 819, 1526, 2028, 2032, 2061, 2068, 2077, 2085, 2099], "simplest": [23, 28, 30, 37, 64, 861, 1435, 1436, 1437, 1453, 1454, 1455, 1519, 1520, 1521, 1734, 2047, 2048, 2055, 2063, 2070, 2077, 2080, 2102], "refcount": [23, 2032, 2057], "panda": 23, "pyarrow": 23, "13246": 23, "enumer": [23, 33, 35, 52, 64, 1272, 1526, 1528, 1537, 1801, 2014, 2015, 2041, 2045, 2061, 2085, 2110], "get_worker_info": [23, 2075], "seed": [23, 90, 1045, 1054, 1055, 1076, 1248, 1365, 1387, 1816, 1833, 1928, 1967, 1996, 1999, 2000, 2002, 2012, 2014, 2050, 2057, 2059, 2066, 2074], "naiv": [23, 2098, 2102], "shut": [23, 2075], "garbag": [23, 2077], "subtleti": [23, 66, 1462, 2048, 2050], "multiprocess": [23, 24, 28, 29, 31, 37, 40, 48, 49, 50, 1462, 1716, 2012, 2034, 2047, 2076, 2082], "unix": [23, 38, 45, 2032], "child": [23, 28, 33, 37, 40, 55, 793, 1272, 1526, 1743, 2032, 2055, 2061, 2077], "address": [23, 28, 47, 51, 60, 218, 922, 923, 1016, 2023, 2032, 2045, 2048, 2067, 2075, 2076, 2082, 2096, 2099, 2113], "maco": [23, 28, 2032, 2056], "spawn": [23, 24, 29, 37, 39, 45, 50, 1275, 1716, 2012, 2041, 2047, 2057, 2061, 2076], "__name__": [23, 24, 28, 38, 39, 40, 48, 2047, 2048, 2057, 2061, 2068, 2076], "__main__": [23, 24, 28, 38, 39, 40, 48, 2043, 2047, 2057, 2061, 2076], "bytecod": [23, 52, 682, 2022, 2062, 2063, 2068, 2097, 2099, 2100, 2102, 2111], "base_se": 23, "worker_id": [23, 50, 2059], "therebi": [23, 35, 2067, 2070], "mandatorili": 23, "faq": [23, 1462, 1760, 2012], "initial_se": [23, 90, 2012, 2014, 2059, 2066, 2074], "host": [23, 28, 33, 37, 40, 46, 47, 48, 50, 198, 211, 582, 605, 880, 1272, 1526, 1716, 1736, 1770, 2045, 2075, 2076, 2082, 2096, 2101, 2113], "recogn": [23, 83, 2016, 2075, 2080, 2101], "simplecustombatch": 23, "transposed_data": 23, "zip": [23, 30, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 2011, 2014, 2015, 2027, 2045, 2054, 2080], "tgt": [23, 1570, 1571, 1572], "collate_wrapp": 23, "float32": [23, 24, 28, 52, 55, 269, 582, 619, 821, 822, 823, 824, 827, 828, 883, 986, 1122, 1123, 1161, 1219, 1222, 1254, 1426, 1439, 1446, 1579, 1580, 1581, 1599, 1782, 1783, 1784, 1796, 1820, 1825, 1837, 1852, 1867, 1868, 1870, 1912, 1974, 2035, 2041, 2045, 2058, 2063, 2065, 2072, 2080, 2081, 2082, 2083, 2086, 2087, 2099, 2100, 2102, 2111, 2116], "tensordataset": 23, "batch_ndx": 23, "is_pin": [23, 1757, 2014, 2033, 2066, 2082], "multiprocessing_context": 23, "pin_memory_devic": 23, "reshuffl": 23, "draw": [23, 35, 156, 288, 945, 1071, 1412, 1833, 2085, 2109], "mutual": [23, 28, 37, 55, 737, 2087], "subprocess": [23, 28, 31, 45, 47, 50, 2050, 2057], "incomplet": [23, 911, 2018, 2081], "divis": [23, 52, 585, 619, 689, 774, 775, 776, 969, 1103, 1153, 1156, 1283, 1316, 1453, 1454, 1455, 1456, 1457, 1458, 1460, 1480, 1492, 1517, 1535, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1669, 1846, 1915, 1943, 2016, 2041, 2045], "basecontext": 23, "randomsampl": 23, "prefetch": [23, 55], "unpickl": [23, 28, 1344, 2068], "practic": [23, 28, 33, 34, 35, 53, 55, 488, 2012, 2013, 2023, 2032, 2034, 2042, 2048, 2051, 2052, 2055, 2060, 2068, 2075, 2082, 2092, 2099, 2101, 2102], "proper": [23, 37, 63, 64, 1099, 1865, 2015, 2042, 2045, 2048, 2057, 2061, 2082], "guess": [23, 1187], "trust": [23, 28, 1344, 2011, 2068], "inaccur": [23, 24, 1643], "kwd": 23, "__getitems__": 23, "speedup": [23, 24, 1532, 1574, 2092, 2096, 2108], "myiterabledataset": 23, "worker_info": 23, "iter_start": 23, "iter_end": 23, "per_work": 23, "ceil": [23, 179, 635, 636, 770, 771, 1435, 1436, 1437, 1493, 1494, 1495, 1519, 1520, 1521, 1599, 1600, 1601, 1657, 1658, 1659, 1831, 1832, 1855, 2014, 2033, 2045, 2066, 2080, 2106], "mult": 23, "12": [23, 24, 28, 55, 323, 519, 619, 688, 743, 744, 745, 747, 748, 938, 965, 969, 1106, 1124, 1236, 1276, 1292, 1309, 1331, 1430, 1446, 1457, 1472, 1473, 1474, 1523, 1538, 1539, 1561, 1570, 1578, 1586, 1627, 1628, 1669, 1674, 1675, 1731, 1765, 1815, 1826, 1874, 1889, 1899, 1943, 1959, 1962, 1977, 2011, 2013, 2014, 2017, 2023, 2045, 2048, 2051, 2056, 2060, 2065, 2066, 2080, 2087, 2099, 2100, 2108, 2109], "overall_start": 23, "overall_end": 23, "stackdataset": 23, "assembl": [23, 2100], "imagedataset": 23, "textdataset": 23, "tuple_stack": 23, "dict_stack": 23, "concatdataset": 23, "chaindataset": 23, "chain": [23, 24, 35, 63, 64, 152, 896, 965, 1328, 1469, 1555, 1798, 1811, 2016, 2042, 2045, 2048, 2052, 2055, 2067, 2099, 2110], "fly": [23, 1050, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 2042], "whole": [23, 28, 30, 33, 1177, 1272, 1526, 1555, 1566, 1716, 1976, 2042, 2047, 2057, 2058, 2068, 2099, 2101, 2103, 2107, 2108], "_util": [23, 982, 2109], "collate_fn_map": 23, "registri": [23, 2012, 2060, 2063, 2065], "default_collate_fn_map": 23, "collate_tensor_fn": 23, "custom_col": 23, "collate_map": 23, "outer": [23, 28, 693, 911, 1108, 1167, 1171, 1176, 1192, 1217, 2014, 2016, 2066], "unchang": [23, 501, 515, 517, 737, 816, 1147, 1272, 1526, 1570, 1847, 1918, 2041, 2058, 2070], "byte": [23, 28, 30, 35, 47, 244, 437, 560, 883, 1015, 1056, 1058, 1060, 1062, 1160, 1162, 1271, 1344, 1381, 1383, 1777, 1786, 1858, 2015, 2016, 2017, 2033, 2051, 2060, 2068, 2080, 2082, 2113], "v_i": [23, 1312], "v_1": 23, "v_2": 23, "v1_i": 23, "v2_i": 23, "v1_1": 23, "v1_2": 23, "v2_1": 23, "v2_2": 23, "elem": [23, 2014], "customtyp": 23, "collate_customtype_fn": 23, "default_convert": 23, "np": [23, 963, 1103, 1108, 1148, 1149, 1150, 1673, 1820, 2020, 2049, 2059, 2065, 2085, 2086, 2087, 2102], "fraction": [23, 35, 1078, 1151, 1157, 1392, 1456, 1457, 1458, 1473, 1474, 1532, 1574, 1627, 1628, 1741, 1742, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1827, 2040, 2102], "workerinfo": [23, 2075], "random_split": 23, "floor": [23, 273, 649, 650, 770, 771, 1103, 1153, 1283, 1435, 1436, 1437, 1493, 1494, 1495, 1519, 1520, 1521, 1599, 1600, 1601, 1657, 1658, 1659, 1831, 1832, 1846, 1855, 1923, 2014, 2016, 2033, 2057, 2060, 2066, 2080, 2106], "frac": [23, 35, 281, 379, 651, 652, 689, 762, 774, 775, 776, 868, 944, 953, 993, 997, 1103, 1153, 1226, 1230, 1231, 1292, 1304, 1308, 1309, 1312, 1324, 1336, 1343, 1359, 1430, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1446, 1453, 1454, 1455, 1456, 1457, 1458, 1461, 1463, 1472, 1477, 1478, 1479, 1480, 1488, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1497, 1498, 1511, 1513, 1514, 1515, 1516, 1519, 1520, 1521, 1529, 1530, 1531, 1533, 1541, 1542, 1544, 1545, 1557, 1559, 1560, 1562, 1563, 1565, 1566, 1567, 1578, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1652, 1669, 1671, 1673, 1684, 1686, 1690, 1692, 1694, 1695, 1715, 1781, 1782, 1785, 1787, 1793, 1800, 1801, 1830, 1839, 1842, 1845, 1857, 1877, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1906, 1921, 1922, 1923, 1927, 1949, 1971, 1972, 2014, 2033, 2040, 2042, 2052, 2066, 2080, 2081], "remaind": [23, 492, 1156, 1226, 2014, 2066, 2100, 2106], "robin": [23, 28], "generator1": 23, "manual_se": [23, 60, 90, 1875, 2012, 2014, 2033, 2055, 2057, 2059, 2065, 2066, 2074], "42": [23, 826, 953, 1411, 1881, 2045, 2055, 2100], "generator2": 23, "30": [23, 28, 35, 47, 447, 619, 722, 730, 731, 758, 766, 1226, 1295, 1443, 1445, 1452, 1513, 1578, 1584, 1607, 1616, 1706, 1804, 1806, 1812, 1829, 1939, 2024, 2048, 2051, 2065, 2067, 2075, 2079, 2099, 2100], "data_sourc": 23, "accedingsequencelengthsampl": 23, "argsort": [23, 1939, 2014, 2049, 2066], "tolist": [23, 354, 2016, 2066, 2082], "accedingsequencelengthbatchsampl": 23, "sequentialsampl": 23, "num_sampl": [23, 423, 1412, 2014], "drawn": [23, 175, 260, 288, 1412, 1772, 1833, 1837, 1838, 2040, 2087, 2089], "subsetrandomsampl": 23, "weightedrandomsampl": 23, "probabl": [23, 34, 55, 156, 260, 762, 945, 977, 1193, 1234, 1350, 1412, 1430, 1434, 1438, 1445, 1461, 1463, 1464, 1465, 1466, 1470, 1477, 1479, 1496, 1532, 1533, 1542, 1604, 1615, 1616, 1617, 1618, 1619, 1620, 1625, 1634, 1644, 1668, 1684, 1826, 2032, 2048, 2061, 2065, 2081, 2085, 2098, 2101, 2102, 2103, 2107], "row": [23, 24, 28, 34, 35, 210, 315, 317, 323, 587, 590, 695, 701, 880, 911, 913, 963, 993, 997, 1091, 1121, 1149, 1150, 1171, 1177, 1235, 1294, 1320, 1328, 1331, 1336, 1360, 1362, 1370, 1372, 1373, 1375, 1378, 1412, 1418, 1419, 1420, 1623, 1624, 1673, 1730, 1770, 1824, 1827, 1847, 1907, 1908, 1909, 1910, 1911, 1913, 1914, 1926, 1946, 1949, 1953, 1955, 1970, 1976, 1978, 2014, 2052, 2080, 2085], "05": [23, 32, 64, 115, 346, 696, 714, 715, 716, 717, 718, 719, 725, 726, 738, 739, 751, 753, 754, 755, 756, 922, 923, 1122, 1261, 1288, 1289, 1359, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1566, 1570, 1572, 1574, 1602, 1633, 1642, 1646, 1716, 1799, 1805, 1806, 1812, 1883, 1884, 1889, 2013, 2014, 2067, 2076, 2087], "batchsampl": 23, "distributedsampl": [23, 1716], "num_replica": 23, "world_siz": [23, 24, 28, 29, 32, 37, 47, 48, 51, 55, 1716, 2047, 2075, 2076], "evenli": [23, 34, 585, 586, 587, 697, 698, 1106, 1236, 1343, 1359, 1977], "set_epoch": 23, "is_distribut": [23, 2014, 2066], "start_epoch": 23, "n_epoch": 23, "vanilla": [24, 53, 2023], "allreduc": [24, 28, 1716, 2045, 2047, 2066], "register_comm_hook": [24, 32, 55, 1716], "mainli": [24, 35, 829, 1445, 1616, 2090], "bucket": [24, 32, 1200, 1430, 1716, 2014, 2047, 2066, 2102, 2111], "gradbucket": [24, 1716], "decompos": [24, 52, 64, 1158, 1166, 1317, 2042, 2048, 2065, 2106], "get_per_parameter_tensor": 24, "wise": [24, 28, 34, 35, 689, 690, 700, 760, 772, 773, 782, 786, 887, 991, 1108, 1113, 1154, 1155, 1214, 1215, 1229, 1238, 1295, 1297, 1354, 1355, 1356, 1357, 1361, 1371, 1374, 1376, 1423, 1444, 1458, 1465, 1467, 1481, 1482, 1483, 1484, 1486, 1512, 1515, 1525, 1534, 1545, 1546, 1547, 1554, 1556, 1557, 1558, 1563, 1564, 1565, 1567, 1568, 1606, 1621, 1630, 1631, 1635, 1636, 1637, 1638, 1641, 1645, 1647, 1652, 1663, 1664, 1677, 1678, 1679, 1685, 1686, 1687, 1688, 1692, 1694, 1695, 1696, 1907, 1978, 2044, 2048, 2080, 2081, 2084, 2108], "_distributed_c10d": [24, 28], "1d": [24, 35, 55, 704, 707, 740, 743, 774, 784, 974, 993, 997, 1091, 1105, 1234, 1235, 1249, 1269, 1328, 1329, 1374, 1419, 1427, 1431, 1435, 1445, 1453, 1456, 1461, 1464, 1465, 1469, 1493, 1518, 1519, 1531, 1533, 1579, 1591, 1594, 1597, 1599, 1607, 1610, 1618, 1624, 1653, 1657, 1827, 1828, 1831, 1923, 1939, 1949, 1973, 2051], "is_last": 24, "set_buff": 24, "stateless": [24, 60, 2036, 2055], "ddp_comm_hook": [24, 32], "default_hook": 24, "allreduce_hook": 24, "process_group": [24, 29, 30, 32, 55, 1566, 1716], "aggreg": [24, 28, 30, 40, 55, 1469, 1624, 1716, 1747, 2029, 2102, 2111], "henc": [24, 30, 32, 35, 37, 46, 47, 51, 55, 63, 288, 945, 1162, 1522, 1523, 1524, 1579, 1723, 1724, 1912, 2035, 2042, 2045, 2047, 2051, 2075, 2077, 2096], "unaffect": [24, 501, 502, 1479], "ddp_model": [24, 28, 30, 1716, 2047], "fp16_compress_hook": 24, "compress": [24, 55, 210, 589, 590, 1130, 1131, 1140, 1141, 1716, 1909, 1910, 1911, 1913, 1914, 1948, 2012, 2108], "decompress": [24, 2011, 2027], "bf16_compress_hook": 24, "brain": [24, 2083, 2086], "wrapper": [24, 28, 35, 49, 55, 63, 64, 66, 626, 759, 792, 826, 1005, 1008, 1010, 1011, 1013, 1081, 1154, 1155, 1175, 1272, 1385, 1408, 1462, 1981, 1982, 2006, 2013, 2015, 2016, 2029, 2032, 2045, 2047, 2065, 2096], "fp16_compress_wrapp": 24, "powersgdst": 24, "matrix_approximation_rank": 24, "start_powersgd_it": 24, "powersgd_hook": 24, "bf16_compress_wrapp": 24, "wikipedia": [24, 1670, 2042, 2052, 2116], "bfloat16_float": 24, "point_format": 24, "vogel": 24, "et": [24, 35, 55, 1445, 1538, 1539, 1575, 1576, 1796, 1890, 1928, 2040], "al": [24, 35, 55, 1445, 1538, 1539, 1575, 1576, 1796, 1890, 1928, 2040], "neurip": [24, 35], "2019": [24, 35, 1064], "bandwidth": [24, 28, 33, 46, 48, 2045, 2070, 2075, 2104, 2105], "hyperparamet": [24, 55, 64, 2085], "1000": [24, 1123, 1128, 1154, 1155, 1430, 1468, 1576, 1828, 1855, 1874, 1942, 2042, 2060, 2065, 2085], "min_compression_r": 24, "use_error_feedback": 24, "warm_start": 24, "orthogonalization_epsilon": 24, "random_se": 24, "compression_stats_logging_frequ": 24, "batch_tensors_with_same_shap": 24, "tune": [24, 28, 32, 1064, 1309, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 2042, 2045, 2056, 2070, 2111], "stronger": [24, 52, 53, 66], "exponenti": [24, 260, 1118, 1324, 1350, 1351, 1352, 1360, 1444, 1467, 1621, 2012, 2016, 2066, 2067, 2081, 2089], "grid": [24, 1128, 1374, 1597, 1632, 2014, 2045, 2085, 2106], "satisfactori": 24, "nlp": [24, 1488, 1489, 1490, 1498, 2102], "appendix": [24, 2012], "hybrid": [24, 55, 220, 545, 585, 586, 587, 589, 590, 1237], "scheme": [24, 45, 47, 480, 804, 821, 822, 823, 824, 827, 829, 2055], "sensit": [24, 1486, 1558, 2065, 2068, 2076, 2101], "suboptim": [24, 2104], "trajectori": 24, "irrecover": 24, "warm": [24, 32, 1053, 1800, 1801, 2045, 2069, 2096, 2102, 2109], "num_row": 24, "num_col": 24, "1e": [24, 30, 35, 64, 115, 346, 696, 714, 715, 716, 717, 718, 719, 725, 726, 738, 739, 751, 753, 754, 755, 756, 757, 922, 923, 966, 967, 968, 1261, 1288, 1289, 1339, 1440, 1441, 1442, 1460, 1479, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1512, 1535, 1540, 1566, 1570, 1572, 1574, 1575, 1602, 1614, 1629, 1633, 1634, 1642, 1646, 1669, 1672, 1676, 1700, 1731, 1765, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1810, 1818, 1871, 2013, 2014, 2048, 2055, 2065, 2067, 2081, 2087], "orthogon": [24, 1309, 1312, 1331, 1336, 1345, 1814, 1826, 2040, 2042, 2055, 2080], "div": [24, 238, 1104, 1153, 1156, 1430, 1538, 1539, 1846, 1956, 2014, 2020, 2033, 2066, 2080, 2083, 2106, 2110], "epsilon": [24, 64, 821, 822, 823, 824, 827, 868, 1327, 1330, 1440, 1441, 1442, 1460, 1480, 1488, 1489, 1490, 1498, 1535, 1541, 1566, 1614, 1669, 1715, 1726, 1728, 1731, 1765, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1830, 2014, 2081], "bucket_cap_mb": [24, 1716, 2047], "footprint": [24, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2063, 2080, 2108], "bottleneck": [24, 2012, 2055, 2104], "memor": 24, "compens": 24, "apex": 24, "uncompress": [24, 2060, 2080], "pq": 24, "mq": [24, 2070, 2091], "tp": [24, 28, 33, 34, 55], "awai": [24, 64, 978, 1632, 2012, 2034, 2042, 2049, 2098], "comm": [24, 28, 47, 682, 2047], "handler": [24, 28, 38, 40, 41, 49, 1743, 2029, 2048, 2054, 2068, 2112], "batched_powersgd_hook": 24, "destroi": [24, 28, 47, 1187, 1462, 2042, 2069, 2075], "squar": [24, 35, 553, 741, 742, 743, 744, 781, 787, 796, 997, 1095, 1097, 1269, 1304, 1306, 1308, 1309, 1310, 1313, 1314, 1318, 1320, 1322, 1324, 1326, 1330, 1332, 1333, 1335, 1336, 1340, 1353, 1362, 1428, 1432, 1436, 1437, 1454, 1455, 1457, 1458, 1473, 1474, 1486, 1494, 1495, 1517, 1520, 1521, 1524, 1541, 1558, 1570, 1587, 1599, 1608, 1611, 1627, 1628, 1632, 1641, 1643, 1664, 1681, 1684, 1688, 1703, 1715, 1730, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1857, 1916, 1951, 1970, 2014, 2066, 2080], "truncat": [24, 1153, 1870, 1957, 2035, 2040, 2050, 2058], "impli": [24, 47, 55, 260, 1191, 2032, 2042, 2051, 2065, 2073, 2075, 2101], "debugging_hook": 24, "noop_hook": 24, "headroom": 24, "desynchron": [24, 28], "trainer": [24, 28, 30, 37, 40, 45, 47, 48, 50, 1716, 2075], "restart": [24, 37, 46, 48, 51, 1800, 1801, 2032, 2085, 2100], "__setstate__": 24, "__getstate__": 24, "reload": [24, 32, 55, 2011], "sy": [24, 39, 48, 51, 2011, 2045, 2060, 2068], "tempfil": 24, "mp": [24, 28, 29, 50, 1385, 1716, 2012, 2020, 2047, 2048, 2057, 2070, 2076, 2082, 2083, 2091], "simplemodel": 24, "24": [24, 28, 35, 948, 1331, 1514, 1597, 1764, 1826, 1870, 1944, 2013, 2040, 2044, 2081, 2099, 2100], "fc2": [24, 1706, 2063, 2093], "master_addr": [24, 28, 37, 47, 48, 51, 2047, 2075, 2076], "localhost": [24, 28, 47, 48, 2047, 2075, 2076], "master_port": [24, 28, 37, 47, 48, 51, 2047, 2075, 2076], "12355": 24, "init_process_group": [24, 28, 29, 32, 37, 48, 51, 1716, 2045, 2047, 2075], "cleanup": [24, 1187, 2082], "destroy_process_group": [24, 28], "run_demo": 24, "demo_fn": 24, "nproc": [24, 28, 45, 46, 48, 2032, 2047, 2076], "demo_seri": 24, "gettempdir": 24, "device_id": [24, 28, 29, 32, 48, 55, 1344, 1462, 1566, 1699, 1716, 2047], "powersgd_st": 24, "lr": [24, 29, 30, 32, 55, 490, 1706, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2045, 2047, 2055, 2057, 2067, 2076, 2085], "001": [24, 922, 923, 1282, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1809, 2047, 2065], "state_dict": [24, 30, 32, 52, 53, 55, 62, 838, 839, 1272, 1526, 1706, 1750, 1768, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1789, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 2011, 2027, 2028, 2047, 2055, 2057, 2060, 2063, 2065, 2070, 2090], "comm_hook": 24, "comm_hook_st": 24, "barrier": [24, 37, 47, 2066], "map_loc": [24, 1276, 1280, 1344, 1716, 2011, 2024, 2027, 2068], "new_ddp_model": 24, "load_state_dict": [24, 30, 32, 55, 62, 417, 1272, 1344, 1526, 1706, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812, 2011, 2012, 2055, 2060, 2070], "n_gpu": 24, "device_count": [24, 1967, 2012], "got": [24, 28, 894, 908, 909, 931, 2048, 2087], "thank": [24, 35, 2048], "author": [24, 47, 48, 55, 1177, 1793, 1833, 1976, 2041, 2093, 2100, 2102, 2104, 2107, 2108, 2109, 2111], "thij": 24, "par": 24, "torch_show_cpp_stacktrac": [25, 28], "torch_cpp_log_level": [25, 28], "c10": [25, 47, 2054, 2093, 2098], "glog": 25, "logger": [25, 28, 37, 2090, 2091], "info": [25, 28, 33, 41, 45, 47, 55, 682, 895, 908, 909, 1010, 1011, 1013, 1187, 1303, 1314, 1316, 1317, 1321, 1334, 1362, 1388, 1389, 1777, 2012, 2014, 2020, 2022, 2047, 2048, 2049, 2065, 2068, 2098, 2100, 2101, 2111, 2115], "fatal": [25, 52, 2032, 2057], "torch_log": [25, 81, 83, 682, 975, 2022, 2047, 2098, 2099, 2100, 2102, 2111], "_log": [25, 2012, 2016, 2111, 2114], "home": [26, 2045], "fill_uninitialized_memori": [27, 501, 1109, 1110, 1111, 1964, 2012, 2059], "fill": [27, 28, 33, 156, 175, 260, 261, 262, 288, 319, 323, 379, 402, 447, 448, 449, 451, 456, 483, 515, 610, 625, 946, 1096, 1109, 1110, 1111, 1163, 1164, 1303, 1314, 1316, 1469, 1570, 1624, 1671, 1775, 1776, 1835, 1836, 1837, 1838, 1839, 1840, 1927, 1964, 2009, 2010, 2014, 2024, 2034, 2035, 2040, 2045, 2048, 2060, 2066, 2080, 2087, 2101, 2106], "detriment": [27, 2059], "resize_": [27, 502, 1166, 1294, 1964, 2013, 2014, 2015, 2033, 2059, 2072, 2082, 2106], "empty_strid": [27, 2014, 2018, 2066, 2098, 2106], "empty_permut": [27, 2014, 2066, 2106], "empty_lik": [27, 30, 2014, 2018, 2020, 2024, 2033, 2035, 2066, 2080], "brief": [28, 33, 1716, 2032, 2075], "introduct": [28, 35, 1042, 1716, 2012, 2013, 2017, 2033, 2043, 2055, 2065, 2075, 2080, 2085], "mpi": [28, 1716], "gloo": [28, 48, 1716, 2047, 2053, 2075], "recv": [28, 33, 1716, 2066, 2076], "broadcast": [28, 30, 32, 34, 35, 55, 60, 99, 198, 400, 402, 403, 404, 515, 517, 519, 568, 687, 688, 689, 690, 691, 692, 693, 699, 737, 887, 943, 948, 951, 955, 956, 957, 958, 992, 1021, 1050, 1102, 1103, 1108, 1113, 1153, 1154, 1155, 1156, 1213, 1215, 1229, 1239, 1297, 1298, 1305, 1318, 1322, 1327, 1328, 1330, 1333, 1341, 1361, 1366, 1367, 1377, 1411, 1414, 1423, 1426, 1439, 1460, 1479, 1532, 1605, 1614, 1677, 1684, 1716, 1779, 1815, 1823, 1846, 1848, 1924, 1928, 1944, 1949, 1979, 2012, 2016, 2033, 2034, 2035, 2047, 2065, 2066, 2081, 2102, 2106], "all_reduc": [28, 50, 1716, 2066], "all_gath": 28, "scatter": [28, 32, 55, 515, 517, 519, 1462, 1964, 2014, 2050, 2051, 2066, 2075, 2106], "reduce_scatt": [28, 2066], "all_to_al": 28, "v1": [28, 55, 1341, 1815, 2011, 2047, 2075], "init_method": [28, 1716, 2075], "adher": [28, 977, 2016, 2080], "some_fil": 28, "machine_nam": 28, "share_folder_nam": 28, "tcpstore": [28, 47], "past": [28, 52, 64, 1019, 1066, 1071, 1085, 1086, 1716, 2050, 2104, 2105, 2108], "ask": [28, 59, 60, 2011, 2012, 2049, 2052, 2092, 2099, 2113], "infiniband": [28, 1716, 2075], "interconnect": [28, 33], "gpudirect": 28, "ethernet": 28, "ip": [28, 47], "ib": 28, "upcom": [28, 2041], "nccl_socket_ifnam": 28, "eth0": 28, "gloo_socket_ifnam": 28, "eth1": 28, "eth2": 28, "eth3": 28, "imper": 28, "nccl_debug": 28, "nccl_debug_subsi": 28, "coll": 28, "hang": [28, 29, 32, 35, 1716, 2025, 2047, 2115], "topologi": [28, 30, 34, 37], "effort": [28, 52, 2075, 2099, 2105], "socket": [28, 38, 2032, 2075], "nccl_socket_nthread": 28, "nccl_nsocks_perthread": 28, "cloud": [28, 2080, 2085], "aw": [28, 39, 997], "gcp": [28, 2108], "primit": [28, 32, 33, 47, 1344, 2013, 2015, 2017, 2044, 2056, 2065, 2075, 2106], "kind": [28, 41, 52, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 682, 1183, 1292, 1717, 1778, 1825, 1889, 2011, 2020, 2048, 2057, 2059, 2063, 2065, 2068, 2070, 2081, 2083], "connect": [28, 37, 47, 1453, 1454, 1455, 1456, 1457, 1458, 1502, 1503, 1504, 1505, 1506, 1507, 1555, 2032, 2075, 2109], "advantag": [28, 47, 48, 1439, 1486, 2047, 2050, 2062, 2063, 2075, 2080, 2096], "redund": [28, 55, 1124, 1125, 1127, 1129, 1143, 1145, 1923], "elimin": [28, 52, 64, 612, 1180, 1960, 1961, 2059, 2104, 2110], "thrash": 28, "recurr": [28, 762, 763, 1288, 1445, 1462, 1477, 1478, 1496, 1542, 1734, 1760, 1794, 2012, 2045], "device_mesh": [28, 34, 55, 1716], "init_device_mesh": [28, 34], "use_distribut": 28, "group_nam": [28, 44], "pg_option": 28, "url": [28, 47, 934, 2011, 2027, 2075], "encod": [28, 37, 44, 47, 64, 1320, 1344, 1388, 1389, 1438, 1439, 1570, 1571, 1572, 1573, 1574, 1716, 1870, 1909, 1910, 1911, 1913, 1914, 2013, 2016, 2017, 2048, 2060, 2068, 2076, 2080], "ucc": 28, "lowercas": 28, "deadlock": [28, 1716], "job": [28, 37, 40, 41, 43, 44, 46, 47, 48, 50, 1410, 1716, 1802, 1808, 2045, 2054, 2069, 2085, 2099, 2113, 2115], "exchang": [28, 47, 1011, 1096, 2045, 2062], "timedelta": [28, 47, 2029], "abort": [28, 2045, 2115], "crash": [28, 40, 47, 1309, 2032, 2042, 2075, 2077, 2085, 2098], "corrupt": [28, 47, 1462, 2045, 2057], "torch_nccl_blocking_wait": [28, 2115], "processgroupopt": 28, "processgroupnccl": [28, 2012, 2047, 2114], "is_high_priority_stream": 28, "ncclcomminit": 28, "lazi": [28, 990, 1285, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1706, 1995, 2012, 2098], "ncclcommsplit": 28, "unnecessari": [28, 32, 1723, 1724, 1764, 2016, 2042, 2045, 2048, 2060, 2068, 2084], "backend_nam": [28, 1967], "custom_backend": 28, "mesh_shap": 28, "mesh_dim_nam": 28, "dimension": [28, 35, 53, 515, 517, 883, 889, 890, 891, 954, 961, 973, 1096, 1098, 1099, 1108, 1124, 1125, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1142, 1143, 1145, 1147, 1149, 1150, 1162, 1226, 1235, 1236, 1237, 1305, 1310, 1311, 1312, 1313, 1338, 1343, 1359, 1367, 1374, 1430, 1447, 1448, 1449, 1450, 1451, 1452, 1461, 1462, 1468, 1469, 1498, 1516, 1533, 1541, 1548, 1549, 1550, 1551, 1552, 1553, 1560, 1562, 1566, 1582, 1583, 1584, 1615, 1668, 1671, 1704, 1705, 1715, 1730, 1770, 1833, 1909, 1910, 1911, 1912, 1913, 1914, 1923, 1939, 1942, 1943, 1949, 2016, 2035, 2040, 2043, 2052, 2058, 2080, 2082, 2083, 2086, 2089, 2098], "layout": [28, 34, 53, 64, 152, 193, 210, 235, 344, 345, 437, 447, 448, 449, 450, 451, 460, 546, 583, 584, 585, 586, 587, 589, 590, 619, 691, 868, 896, 901, 944, 953, 1023, 1109, 1110, 1111, 1121, 1126, 1144, 1160, 1163, 1164, 1185, 1230, 1231, 1292, 1343, 1359, 1367, 1377, 1422, 1649, 1775, 1776, 1797, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1904, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1948, 1953, 1955, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2018, 2035, 2045, 2066, 2068, 2080, 2085, 2086, 2087, 2106], "spmd": [28, 30], "nd": [28, 1328, 2051, 2058], "inconsist": [28, 55, 868, 1842, 2048], "scene": [28, 2060, 2085], "mesh": [28, 34, 2085], "mesh_1d": 28, "mesh_2d": 28, "dp": 28, "is_initi": [28, 2012], "is_mpi_avail": 28, "is_nccl_avail": 28, "is_gloo_avail": 28, "is_torchelastic_launch": 28, "elast": [28, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 51, 2012], "torchelast": [28, 31, 37, 39, 40, 41, 44, 46, 47, 48, 50, 51], "torchelastic_run_id": [28, 48], "proxi": [28, 30, 977, 2070, 2075, 2099], "rendezv": [28, 31, 37, 44, 46, 51, 2047, 2075], "null": [28, 39, 41, 44, 66, 70], "discoveri": [28, 47, 2045, 2068], "reachabl": 28, "multicast": 28, "20": [28, 32, 35, 323, 687, 722, 730, 731, 736, 740, 741, 742, 743, 744, 745, 748, 758, 762, 763, 764, 765, 766, 767, 774, 1090, 1226, 1270, 1273, 1284, 1295, 1329, 1411, 1434, 1436, 1437, 1439, 1440, 1441, 1442, 1443, 1445, 1452, 1453, 1454, 1455, 1457, 1458, 1463, 1464, 1465, 1466, 1470, 1473, 1474, 1477, 1478, 1480, 1487, 1488, 1489, 1490, 1493, 1494, 1495, 1496, 1497, 1498, 1513, 1519, 1520, 1521, 1523, 1524, 1526, 1542, 1544, 1555, 1563, 1566, 1569, 1570, 1571, 1572, 1584, 1607, 1609, 1610, 1612, 1616, 1627, 1628, 1634, 1692, 1730, 1731, 1732, 1756, 1765, 1768, 1786, 1801, 1829, 1939, 2013, 2014, 2024, 2035, 2047, 2048, 2058, 2063, 2066, 2067, 2075, 2079, 2080, 2099, 2100, 2101], "23456": 28, "clean": [28, 37, 52, 64, 985, 1046, 2011, 2032, 2068], "fcntl": 28, "nf": 28, "init": [28, 34, 37, 40, 55, 1272, 1526, 1534, 1554, 1764, 2012, 2018, 2042, 2048, 2054, 2055, 2098], "brand": [28, 30, 2048], "succe": [28, 47, 52, 60, 2045, 2048, 2061, 2111, 2113], "unexpect": [28, 30, 36, 64, 1007, 1159, 1162, 1165, 1272, 1526, 1766, 1779, 1918, 2013, 2042, 2048, 2058, 2099, 2101, 2102], "unsuccess": 28, "mnt": 28, "sharedfil": 28, "port": [28, 37, 46, 47, 48, 2024, 2053], "enum": [28, 45, 66, 798, 1585, 1586, 1587, 2048, 2065, 2069, 2070, 2075], "backend_str": 28, "uppercas": 28, "classmethod": [28, 30, 47, 66, 68, 734, 740, 741, 742, 747, 748, 758, 766, 794, 795, 796, 817, 818, 819, 826, 857, 931, 1010, 1468, 1469, 1566, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1777, 2016, 2017, 2048, 2070, 2075, 2082, 2112], "register_backend": [28, 2075, 2097], "extended_api": 28, "instanti": [28, 32, 46, 47, 48, 55, 64, 844, 1469, 1624, 1733, 1757, 1764, 2011, 2013, 2015, 2016, 2045, 2048, 2055, 2063, 2082], "3rd": [28, 35, 47, 619, 2043], "processgroup": [28, 30, 32, 33, 55, 1716], "four": [28, 1318, 1454, 2048, 2052, 2075, 2077], "c10d": [28, 37, 46, 48, 51, 682, 1716, 2047, 2066], "distributedbackendopt": 28, "get_backend": [28, 47], "get_rank": [28, 55, 1566], "consecut": [28, 55, 612, 1759, 1798, 1949, 1960, 1961, 2034, 2065], "get_world_s": 28, "pattern": [28, 30, 33, 794, 795, 796, 798, 857, 864, 1050, 1200, 1462, 1586, 1760, 1905, 2013, 2016, 2042, 2045, 2048, 2050, 2053, 2060, 2071, 2072, 2080, 2096, 2098, 2099, 2101, 2110, 2113], "launcher": [28, 33, 48], "pg": [28, 1716], "destructor": [28, 2032, 2077], "ncclcommabort": 28, "gc": [28, 488, 2077], "fault": [28, 31, 37, 46, 47, 1162], "toler": [28, 31, 37, 46, 47, 64, 696, 922, 923, 1261, 1288, 1289, 1327, 1330, 1345, 1779, 1786, 2013, 2087], "_after_": [28, 2047], "unsupport": [28, 37, 52, 60, 66, 80, 83, 84, 86, 89, 2013, 2017, 2034, 2063, 2075, 2080, 2102, 2107], "untest": [28, 2034], "filestor": [28, 47], "hashstor": 28, "client": [28, 32, 47, 1064, 1186, 1191, 2068, 2102], "host_nam": 28, "hostnam": [28, 37, 47, 2069], "listen": 28, "is_mast": 28, "300": [28, 37, 1350, 1439, 1761, 1763, 2067], "wait_for_work": 28, "multi_ten": 28, "underli": [28, 30, 32, 35, 37, 52, 55, 60, 64, 99, 313, 328, 379, 402, 404, 475, 476, 477, 478, 485, 501, 522, 526, 559, 560, 561, 615, 750, 759, 881, 1010, 1160, 1187, 1203, 1204, 1205, 1243, 1247, 1272, 1344, 1421, 1676, 1844, 1948, 1963, 1981, 2013, 2016, 2035, 2045, 2050, 2053, 2075, 2082, 2084, 2092, 2101], "tcpserver": 28, "master_listen_fd": 28, "use_libuv": 28, "libuv": 28, "datetim": [28, 2029, 2102], "server_stor": 28, "127": [28, 796, 2034, 2070, 2085], "1234": [28, 47, 1249, 1962], "client_stor": 28, "first_kei": 28, "first_valu": 28, "hashmap": 28, "file_nam": [28, 2011, 2027, 2068], "store1": 28, "store2": 28, "prefixstor": 28, "old": [28, 36, 52, 64, 488, 857, 1159, 1195, 1209, 1465, 1768, 1776, 1810, 1858, 2010, 2012, 2013, 2042, 2045, 2048, 2061, 2070, 2085, 2101], "throw": [28, 29, 63, 64, 192, 325, 330, 547, 616, 1257, 1272, 1313, 1362, 1526, 1716, 1718, 1719, 1760, 1771, 1822, 1964, 2035, 2042, 2059, 2075, 2086, 2099, 2102, 2111, 2115], "whose": [28, 35, 55, 64, 546, 862, 901, 965, 992, 1096, 1108, 1113, 1162, 1215, 1226, 1229, 1235, 1272, 1278, 1289, 1297, 1343, 1359, 1361, 1423, 1526, 1716, 1772, 1820, 1877, 1945, 2016, 2042, 2048, 2051, 2052, 2068, 2080, 2085, 2086, 2087, 2098, 2111], "quantiti": [28, 56, 61, 1195, 1197, 1200, 1308, 1309, 1336, 1491, 1741, 1742, 1744, 1745, 1747, 1750, 1751, 1752, 1753, 1810, 2020, 2028, 2049, 2052], "compare_set": 28, "arg2": [28, 53], "expected_valu": 28, "desired_valu": 28, "second_valu": 28, "overload": [28, 52, 64, 619, 2015, 2016, 2020, 2063], "bad_kei": 28, "num_kei": 28, "written": [28, 30, 37, 40, 45, 60, 934, 1066, 1160, 1462, 1796, 2013, 2015, 2023, 2026, 2042, 2047, 2048, 2049, 2055, 2060, 2063, 2065, 2068, 2069, 2082, 2085, 2092, 2098, 2099, 2100, 2104, 2105, 2110, 2111], "delete_kei": 28, "set_timeout": 28, "grain": [28, 52, 861, 864, 1684, 2042, 2065, 2080, 2092, 2104, 2111], "plai": [28, 2100, 2102], "new_group": [28, 55, 1566], "opaqu": [28, 36, 977, 1008, 1042, 1043, 1159, 2107], "use_local_synchron": 28, "group_desc": 28, "enqueu": [28, 63, 1011, 1013, 1014, 1399, 1983, 2045, 2076], "groupmemb": 28, "non_group_memb": 28, "significantli": [28, 52, 55, 912, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1870, 2080, 2097], "taken": [28, 33, 35, 52, 63, 64, 66, 71, 868, 1129, 1131, 1154, 1155, 1445, 1461, 1469, 1533, 1541, 1616, 1624, 1715, 1874, 1907, 2040, 2042, 2044, 2045, 2048, 2050, 2051, 2054, 2060, 2065, 2068, 2069, 2111, 2113], "get_group_rank": 28, "global_rank": [28, 37], "translat": [28, 1226, 2042, 2062, 2063, 2077], "get_global_rank": 28, "group_rank": [28, 37, 48], "get_process_group_rank": 28, "inter": [28, 37, 40, 55, 1223, 1872, 2044, 2047, 2075], "intra": [28, 32, 55, 2044, 2047], "_init_backend": 28, "ndarrai": [28, 36, 460, 882, 1159, 1161, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 2065, 2085, 2087, 2102], "dst": [28, 2011, 2068], "destin": [28, 30, 41, 44, 45, 55, 211, 418, 419, 605, 1022, 1023, 1213, 1272, 1379, 1380, 1526, 2014, 2063, 2075, 2076, 2082], "unspecifi": [28, 483, 822, 823, 824, 827, 922, 923, 1187, 1560, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1906, 1912, 2023, 2080, 2085, 2093], "sender": [28, 2077], "isend": 28, "irecv": 28, "is_complet": 28, "finish": [28, 30, 37, 44, 45, 47, 50, 63, 488, 1166, 2045, 2047, 2061, 2069, 2075, 2077, 2113], "send_object_list": 28, "object_list": 28, "picklabl": [28, 1272, 1526, 2068], "sent": [28, 30, 1046, 2016, 2032, 2057, 2075, 2076, 2077], "current_devic": [28, 30, 55, 1019, 1027, 1028, 1035, 1036, 1052, 1056, 1058, 1059, 1060, 1062, 1064, 1065, 1066, 1071, 1072, 1073, 1074, 1084, 1085, 1086, 1401, 1402, 1967, 1985, 1990, 1991, 2008, 2012, 2083, 2100], "set_devic": [28, 30, 55, 1716, 1866, 2012, 2075, 2083], "insecur": [28, 1344], "malici": [28, 1344, 2068], "ineffici": [28, 55, 2045, 2070], "recv_object_list": 28, "batch_isend_irecv": 28, "p2p_op_list": 28, "p2pop": 28, "op_list": 28, "send_tensor": 28, "recv_tensor": 28, "send_op": 28, "recv_op": 28, "req": 28, "p2p": [28, 47], "async_op": 28, "onto": [28, 55, 64, 66, 68, 1070, 1280, 1283, 1344, 1736, 1965, 2011, 2020, 2024, 2032, 2045, 2049, 2050, 2055, 2069, 2110, 2113], "get_futur": [28, 1716], "regard": [28, 1456, 1457, 1458, 1468, 1469, 1623, 1624, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 2013, 2077, 2080, 2105], "101": [28, 931, 1430], "overwrot": 28, "broadcast_object_list": 28, "redoptyp": 28, "bitwis": [28, 947, 949, 950, 952, 2017, 2058, 2071, 2089], "reduceop": 28, "int64": [28, 86, 193, 210, 315, 323, 395, 868, 946, 959, 1109, 1122, 1123, 1161, 1615, 1757, 1837, 1841, 1842, 1862, 1909, 1910, 1911, 1913, 1914, 2020, 2045, 2080, 2082, 2083, 2086, 2116], "1j": [28, 700, 990, 991, 1268, 1850, 1851, 1973, 2042, 2052, 2060], "2j": [28, 700, 990, 991, 1850, 1851, 1877, 1973, 2042], "tensor_list": [28, 2035], "all_gather_into_tensor": [28, 2066], "output_tensor": 28, "input_tensor": [28, 2104], "accommod": [28, 1522, 1523, 1524], "ii": [28, 993, 1108, 1496, 1497], "tensor_in": 28, "tensor_out": 28, "tensor_out2": 28, "all_gather_object": 28, "obj": [28, 64, 883, 1031, 1258, 1259, 1278, 1279, 1284, 1858, 1968, 1988, 2060, 2061, 2068], "pickabl": 28, "popul": [28, 35, 45, 55, 63, 64, 337, 497, 505, 506, 1185, 1196, 1373, 1418, 2045, 2048, 2101, 2110], "unmodifi": [28, 33, 2100], "responsibl": 28, "gather_object": 28, "gather_list": 28, "object_gather_list": 28, "scatter_list": 28, "tensor_s": 28, "t_one": 28, "t_five": 28, "scatter_object_list": 28, "scatter_object_output_list": 28, "scatter_object_input_list": 28, "output_list": 28, "input_list": 28, "reduce_scatter_tensor": [28, 2066], "all_to_all_singl": [28, 2066], "output_split_s": 28, "input_split_s": 28, "13": [28, 878, 938, 969, 1090, 1106, 1109, 1153, 1226, 1236, 1276, 1362, 1446, 1473, 1474, 1523, 1561, 1627, 1628, 1899, 1943, 1977, 2013, 2017, 2066, 2080, 2105, 2111], "14": [28, 66, 74, 75, 323, 519, 968, 1050, 1051, 1091, 1106, 1236, 1325, 1331, 1416, 1446, 1523, 1826, 1833, 1870, 1899, 1943, 1944, 1949, 1977, 2013, 2017, 2020, 2051, 2066, 2080, 2084, 2085, 2099, 2100], "15": [28, 64, 619, 1090, 1106, 1214, 1226, 1236, 1295, 1309, 1446, 1523, 1524, 1761, 1763, 1818, 1890, 1899, 1947, 1977, 2013, 2014, 2017, 2065, 2080, 2100], "uneven": [28, 29, 32, 34, 1716], "18": [28, 323, 447, 687, 1109, 1226, 1269, 1345, 1523, 2013, 2020, 2063, 2065, 2066, 2080, 2093, 2099, 2100], "21": [28, 35, 687, 938, 965, 1091, 1331, 1826, 1949, 2013, 2080], "22": [28, 35, 323, 619, 1345, 1706, 1761, 1763, 2013, 2059, 2080, 2099, 2100], "23": [28, 515, 1090, 1345, 1870, 1885, 2013, 2080, 2111], "31": [28, 951, 1437, 1495, 1521, 1706, 1885, 2065], "33": [28, 740, 741, 742, 743, 744, 745, 774, 1090, 1272, 1453, 1454, 1455, 1457, 1458, 1524, 1526, 1607, 1609, 1610, 1612, 2013, 2065], "34": [28, 1962, 2100, 2111], "35": [28, 1331, 1441, 1442, 1489, 1490, 1566, 1826], "36": [28, 323, 1151, 1226, 2100], "input_split": 28, "output_split": 28, "5j": 28, "6j": 28, "7j": 28, "8j": 28, "9j": 28, "10j": 28, "11j": 28, "12j": 28, "13j": 28, "14j": 28, "15j": 28, "16j": 28, "output_tensor_list": 28, "input_tensor_list": 28, "monitored_barri": [28, 2066], "wait_all_rank": 28, "pend": [28, 980, 2045, 2075, 2085, 2096], "band": 28, "bor": 28, "bxor": 28, "premul_sum": 28, "suppos": [28, 64, 1209, 1269, 1811, 2023, 2051, 2080, 2101, 2102, 2103], "_make_nccl_premul_sum": 28, "__members__": 28, "reduce_op": 28, "mention": [28, 1181, 2011, 2015, 2016, 2034, 2042, 2045, 2055, 2065, 2080, 2084, 2092, 2105, 2111], "stand": [28, 33, 1172, 1176, 2015, 2068, 2077], "exemplifi": 28, "cpp_extens": [28, 2012], "cpp_c10d_extens": 28, "torchrun": [28, 31, 33, 39, 46, 51], "benefici": [28, 90, 1723, 1724, 2055], "nproc_per_nod": [28, 33, 39], "num_gpus_you_hav": 28, "your_training_script": [28, 46, 48], "arg3": [28, 53], "192": [28, 619, 2065], "168": 28, "nnode": [28, 46, 48], "local_process_rank": 28, "local_rank": [28, 37, 40, 45, 48, 51, 1566], "argpars": [28, 48, 2045], "parser": [28, 48, 2045], "argumentpars": [28, 48, 2045], "add_argu": [28, 48, 2045], "parse_arg": [28, 39, 48, 51, 2045, 2065], "onward": [28, 48, 1810, 2081], "dash": [28, 48, 2075, 2077], "previous": [28, 47, 48, 52, 56, 59, 60, 61, 64, 700, 794, 795, 931, 1177, 1200, 1209, 1280, 1921, 1922, 1971, 1972, 1976, 2013, 2043, 2045, 2055, 2068, 2074, 2075, 2081, 2093, 2099, 2107, 2113], "underscor": [28, 48, 2011, 2034, 2075, 2086], "unrecogn": [28, 48, 2016], "output_devic": [28, 32, 48, 1462, 1566, 1699, 1716], "adjust": [28, 30, 32, 34, 35, 55, 1439, 1810, 1882, 2012, 2044, 2071, 2093, 2113, 2114], "filesystem": [28, 30, 2011, 2068], "12042": 28, "wrong": [28, 64, 86, 903, 904, 908, 980, 1270, 1273, 2047, 2057, 2061, 2065, 2067, 2096, 2099, 2100], "imagenet": [28, 2040], "suit": [28, 2013, 2015, 2016, 2065, 2071, 2075, 2108], "pdb": [28, 1277, 1284, 1290, 2013, 2015], "streamlin": [28, 53, 83], "attach": [28, 33, 53, 58, 63, 64, 141, 714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 735, 795, 840, 842, 867, 1706, 2011, 2013, 2015, 2033, 2042, 2048, 2070, 2076, 2090, 2098], "rerout": 28, "sync": [28, 32, 55, 63, 64, 488, 1716, 2045, 2047, 2096, 2101], "group_gloo": 28, "29501": 28, "monitoredbarri": 28, "transport": [28, 2075], "598": 28, "2401": 28, "db00": 28, "eef0": 28, "1100": 28, "3560": 28, "1c05": 28, "25d": 28, "8594": 28, "twolinlayernet": 28, "ddp": [28, 29, 30, 32, 33, 55, 682, 1566, 1716, 2012, 2045, 2047, 2075, 2102], "i0607": 28, "739390": 28, "515217": 28, "173": 28, "broadcast_buff": [28, 1716], "bucket_cap_byt": 28, "26214400": 28, "find_unused_paramet": [28, 1716, 2047], "gradient_as_bucket_view": [28, 1716], "is_multi_device_modul": 28, "num_parameter_tensor": 28, "total_parameter_size_byt": 28, "440": 28, "bucket_s": 28, "module_nam": [28, 64, 819, 857, 2068], "nccl_async_error_handl": [28, 2045], "nccl_blocking_wait": 28, "nccl_ib_timeout": 28, "nccl_nthread": 28, "58": [28, 2105], "085681": 28, "544067": 28, "344": 28, "unused_parameter_s": 28, "40838608": 28, "5983335": 28, "4326421": 28, "comp": [28, 35], "4207652": 28, "085693": 28, "544066": 28, "42850427": 28, "3885553": 28, "2357981": 28, "2234674": 28, "enhanc": [28, 55, 2098], "unus": [28, 32, 47, 64, 917, 1046, 1060, 1277, 1284, 1487, 1716, 1779, 2013, 2015, 2016, 2045, 2047, 2053, 2068], "went": [28, 64, 2099], "wasn": [28, 47, 1344, 2013], "va": 28, "lue": 28, "indirectli": 28, "outstand": [28, 2075], "stuck": [28, 37, 50, 2115], "uninform": 28, "root": [28, 30, 33, 40, 47, 48, 52, 55, 64, 795, 1174, 1183, 1310, 1311, 1541, 1681, 1715, 1794, 1857, 1916, 2026, 2042, 2051, 2068, 2075, 2076, 2080, 2102], "nontrivi": [28, 1187, 1198, 2045, 2098], "reveal": [28, 2047], "default_pg": 28, "opt": [28, 32, 1284, 2013, 2052], "longtensor": [28, 33, 136, 137, 138, 317, 319, 321, 454, 473, 515, 517, 519, 877, 878, 1213, 1247, 1294, 1412, 1468, 1469, 1529, 1623, 1624, 1670, 1770, 1862, 1899, 1912, 1938, 1946, 1979, 2083, 2086], "set_debug_level": 28, "set_debug_level_from_env": 28, "get_debug_level": 28, "disterror": 28, "distbackenderror": 28, "thrown": [28, 63, 619, 881, 883, 1064, 1106, 1236, 1272, 1303, 1312, 1314, 1316, 1319, 1320, 1331, 1526, 1684, 1721, 1977, 2063, 2068, 2115], "distnetworkerror": 28, "ex": [28, 39, 1187, 1716, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 2029, 2110], "diststoreerror": 28, "outlin": [29, 2076, 2102], "joinabl": [29, 32, 1716, 2012], "joinhook": [29, 2012], "throw_on_early_termin": [29, 1716], "shadow": [29, 32, 1716, 2090, 2091], "notify_join_context": 29, "zeroredundancyoptim": [29, 32, 1716, 2012], "vacuou": 29, "inherit": [29, 1749, 2013, 2015, 2035, 2048, 2055, 2057], "join_hook": [29, 32, 1716], "join_devic": [29, 32], "join_process_group": [29, 32], "repeatedli": [29, 2045, 2080], "main_hook": 29, "post_hook": 29, "is_last_join": 29, "dcp": 30, "reshard": [30, 55], "storag": [30, 55, 223, 313, 341, 344, 345, 437, 460, 485, 501, 522, 526, 560, 561, 586, 587, 589, 590, 881, 901, 903, 904, 908, 1031, 1099, 1160, 1243, 1247, 1258, 1272, 1280, 1344, 1366, 1421, 1422, 1462, 1526, 1543, 1844, 1858, 1865, 1895, 1900, 1904, 1918, 1942, 1948, 1965, 1988, 2011, 2012, 2014, 2020, 2027, 2032, 2035, 2042, 2045, 2057, 2060, 2068, 2075, 2080, 2083, 2084, 2086, 2101], "entrypoint": [30, 34, 37, 40, 45, 48, 49, 52, 2020, 2032], "state_dict_sav": 30, "checkpoint_id": 30, "storage_writ": 30, "planner": 30, "style": [30, 34, 36, 64, 880, 1103, 1159, 1770, 1793, 2013, 2015, 2016, 2065, 2068, 2085, 2103], "shardedtensor": [30, 55], "dtensor": [30, 34, 55], "save_state_dict": [30, 2012], "fsdp": [30, 33, 55, 488, 682, 2012, 2102], "shardingstrategi": [30, 55, 2012], "hybrid_shard": [30, 55], "shard_group": 30, "pathlik": [30, 52, 64, 1344, 1858, 2068], "storagewrit": [30, 2012], "writer": [30, 2029, 2055, 2085], "saveplann": [30, 2012], "my_model": [30, 1716, 2013, 2096, 2102, 2111], "mymodul": [30, 52, 53, 55, 64, 1276, 1277, 1279, 1283, 1284, 1290, 1527, 1528, 1536, 1537, 2013, 2015, 2016, 2050, 2060, 2075], "fs_storage_writ": 30, "filesystemwrit": [30, 2012], "async_sav": [30, 2012], "de": [30, 2015, 2068, 2070], "checkpoint_futur": 30, "coordinator_rank": 30, "no_dist": 30, "state_dict_load": 30, "storage_read": 30, "fullfil": 30, "po": [30, 2065], "storageread": [30, 2012], "reader": [30, 2099], "loadplann": [30, 2012], "adagrad": [30, 1468, 2023, 2067, 2075], "model_state_dict": [30, 2063], "fs_storage_read": 30, "filesystemread": [30, 2012], "asyncstag": [30, 2012], "stage_data": 30, "opportun": [30, 1303, 2013, 2076, 2101, 2102], "reflect": [30, 59, 460, 522, 558, 1161, 1162, 1165, 1269, 1453, 1454, 1455, 1502, 1503, 1504, 1548, 1549, 1550, 1632, 1671, 1766, 1811, 1923, 2013, 2014, 2050, 2084, 2096], "ram": [30, 1344, 2045], "responds": 30, "should_synchronize_after_execut": 30, "assumpt": [30, 34, 44, 48, 52, 922, 978, 1445, 1479, 2042, 2047, 2052, 2075, 2076, 2080, 2098, 2099, 2102, 2107, 2111], "synchronize_stag": 30, "innocul": 30, "statefult": 30, "blockingasyncstag": [30, 2012], "cache_staged_state_dict": 30, "type_check": 30, "automodul": 30, "act": [30, 35, 37, 45, 60, 861, 1439, 1527, 1528, 1537, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1750, 1751, 1752, 1753, 1754, 1964, 2045, 2055, 2059, 2067, 2099], "told": [30, 2015], "role": [30, 37, 39, 47, 48], "read_metadata": 30, "set_up_storage_read": 30, "prepare_local_plan": 30, "prepare_global_plan": 30, "read_data": 30, "central": [30, 66, 1226, 2052, 2054], "loadplan": [30, 2012], "storage_data": 30, "load_byt": 30, "bytesio": [30, 52, 1280, 1283, 1344, 1858, 2065, 2070], "resolve_tensor": 30, "storagelay": 30, "schedul": [30, 37, 40, 46, 63, 682, 1794, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2012, 2022, 2047, 2051, 2054, 2069, 2102], "checkpiont_id": 30, "is_coordin": 30, "validate_checkpoint_id": 30, "stroag": 30, "set_up_storage_writ": 30, "write_data": 30, "mark": [30, 33, 47, 63, 64, 66, 71, 75, 488, 828, 903, 904, 905, 908, 931, 978, 980, 1181, 1388, 1389, 1716, 1778, 2013, 2015, 2042, 2047, 2048, 2055, 2060, 2068, 2069, 2076, 2086, 2092, 2096, 2099, 2102, 2103], "recover": 30, "writeresult": 30, "saveplan": [30, 2012], "storage_meta": 30, "storagemeta": 30, "todo": [30, 84, 85, 828, 862, 864, 865, 1197, 2091, 2101, 2107], "resolve_data": 30, "writeitem": [30, 2012], "tensor_data": 30, "set_up_plann": 30, "create_local_plan": 30, "create_global_plan": 30, "commit_tensor": 30, "defaultloadplann": [30, 2012], "rewrit": [30, 52, 60, 66, 68, 74, 75, 84, 85, 2026, 2042, 2052, 2062, 2063, 2065, 2099, 2100], "requit": 30, "intrincaci": 30, "renameplann": 30, "state_dict_typ": [30, 55], "original_state_dict": 30, "foo_": [30, 1166], "flatten_sharded_tensor": 30, "_flatten_sharded_tensor": 30, "flatten_state_dict": 30, "read_item": 30, "dest_index": 30, "fqn": [30, 33, 34, 52, 2028, 2091], "metamodelmateri": 30, "defaultsaveplann": [30, 2012], "global_plan": 30, "finish_plan": 30, "central_plan": 30, "resolve_byt": 30, "alia": [30, 35, 41, 52, 93, 94, 113, 189, 190, 355, 408, 438, 439, 446, 525, 543, 684, 869, 870, 871, 872, 873, 874, 875, 897, 940, 971, 987, 988, 1094, 1101, 1104, 1115, 1116, 1117, 1119, 1120, 1146, 1217, 1227, 1228, 1240, 1241, 1242, 1250, 1270, 1272, 1299, 1300, 1307, 1323, 1327, 1330, 1358, 1368, 1369, 1379, 1413, 1415, 1425, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1526, 1643, 1719, 1757, 1773, 1813, 1818, 1821, 1856, 1878, 1893, 1896, 1898, 1925, 1929, 1930, 1950, 1956, 1980, 2016, 2048, 2066, 2081, 2082, 2086, 2101, 2106], "readitem": [30, 2012], "planner_data": 30, "loaditemtyp": 30, "metadataindex": 30, "dest_offset": 30, "storage_index": 30, "storage_offset": [30, 140, 522, 619, 881, 2014, 2020, 2066, 2106], "tandem": 30, "fp16planner": 30, "write_item": 30, "writeitemtyp": 30, "byte_io": 30, "itertool": [30, 35, 961, 974], "islic": 30, "dataclass": [30, 52, 53, 2110], "ddploadbalancingplann": 30, "all_plan": 30, "saveextradataplann": 30, "merged_data": 30, "new_plan": 30, "idempot": [30, 2075, 2077], "safeti": [30, 37, 52, 64, 488, 977, 978, 1197, 2013, 2016, 2034, 2059, 2103], "hi": [30, 1496, 1497, 2013, 2015, 2052, 2099], "peak": [30, 32, 55, 1056, 1058, 1064, 1072, 1073, 1074, 1716, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1802, 1808, 2051, 2108], "late": [30, 47, 488], "tensor_storage_s": 30, "single_file_per_rank": 30, "sync_fil": 30, "thread_count": 30, "per_thread_copy_ahead": 30, "10000000": 30, "simplif": [30, 2098], "atom": [30, 47, 64, 2017], "distributedtensor": [30, 34], "dedup_replicated_tensor": 30, "dedup_save_to_lowest_rank": 30, "lookup_object": 30, "transform_object": 30, "allow_partial_load": 30, "lookup_tensor": 30, "transform_tensor": 30, "legaci": [30, 36, 48, 1634, 2069, 2083, 2086], "layer1": 30, "unparallel": 30, "tackl": [30, 2102], "get_model_state_dict": [30, 2012], "get_optimizer_state_dict": [30, 2012], "uniform": [30, 483, 610, 945, 1461, 1545, 1615, 1835, 1836, 1883, 2012, 2040, 2066, 2089, 2106], "hese": 30, "get_state_dict": [30, 2012], "fully_shard": 30, "tensor_parallel": 30, "parallelize_modul": [30, 34, 2012], "hide": [30, 33, 61, 1177, 1976, 2049], "canon": [30, 37, 40, 1183, 1194, 2013, 2101], "named_paramet": [30, 52, 55, 57, 59, 1165, 1272, 1276, 1526, 2055, 2060, 2067], "named_buff": [30, 52, 55, 1272, 1526, 2055, 2060], "fullyshardeddataparallel": [30, 682, 2012, 2051], "fsdp_model": [30, 55], "deepcopi": [30, 59, 2070, 2091], "fsdp_optim": 30, "ddp_optim": 30, "ddp_state_dict": 30, "ddp_optim_state_dict": 30, "fsdp_state_dict": 30, "fsdp_optim_state_dict": 30, "ddp_optim_st": 30, "statedictopt": [30, 2012], "valuetyp": 30, "optimizerstatetyp": 30, "set_state_dict": [30, 2012], "optim_state_dict": [30, 55], "counterpart": [30, 860, 867, 1342, 1716, 2016, 2026, 2036, 2089, 2090, 2098, 2102], "missing_kei": [30, 1272, 1526], "miss": [30, 45, 691, 1165, 1189, 1272, 1367, 1377, 1459, 1488, 1489, 1490, 1526, 1649, 1766, 2024, 2048, 2061, 2065, 2080, 2098, 2103, 2114], "unexpected_kei": [30, 1272, 1526], "set_model_state_dict": [30, 2012], "set_optimizer_state_dict": [30, 2012], "full_state_dict": [30, 55], "cpu_offload": [30, 55], "ignore_frozen_param": 30, "keep_submodule_prefix": 30, "broadcast_from_rank0": 30, "flatten_optimizer_state_dict": 30, "offload": [30, 55], "oom": [30, 33, 55, 2050, 2113], "rank0": [30, 37, 47, 55], "frozen": [30, 32, 55, 1187, 1276, 1282, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 2017, 2061], "offlin": [30, 1283], "betwe": 30, "format_util": 30, "dcp_to_torch_sav": [30, 2012], "dcp_checkpoint_dir": 30, "torch_save_path": 30, "torch_save_to_dcp": [30, 2012], "onlin": [30, 64, 1782], "broadcastingtorchsaveread": [30, 2012], "dynamicmetaloadplann": [30, 2012], "sd": [30, 771, 776], "path_to_model": 30, "incurr": 30, "hopefulli": [30, 2051, 2101], "extnd": 30, "quickstart": 31, "agent": [31, 39, 40, 41, 44, 46, 48, 50, 2075], "expir": 31, "metric": [31, 1056, 1058, 1064, 1810, 2029, 2055, 2069, 2085, 2102, 2108], "plane": [31, 37, 47, 740, 741, 742, 743, 744, 745, 768, 769, 770, 771, 774, 775, 776, 784, 785, 1096, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1473, 1474, 1493, 1494, 1495, 1498, 1514, 1519, 1520, 1521, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1627, 1628, 1650, 1653, 1654, 1655, 1657, 1658, 1659, 1732, 1768, 1831, 1832, 1854], "kubernet": 31, "distributedoptim": [32, 1716, 2012, 2075, 2076], "rref": [32, 1716, 2012, 2016, 2076], "optimizer_class": 32, "params_rref": 32, "get_gradi": [32, 2066, 2075, 2076], "multithread": [32, 920, 2045], "dist_autograd": [32, 1716, 2075, 2076], "rpc": [32, 63, 1716, 2012, 2016, 2076, 2077], "context_id": [32, 1716, 2075, 2076], "rref1": [32, 2075, 2076], "worker1": [32, 63, 1716, 2075, 2076], "rref2": [32, 2075, 2076], "to_her": [32, 1716, 2066, 2075, 2076, 2077], "dist_optim": [32, 1716, 2076], "postlocalsgdoptim": [32, 2012], "afer": 32, "modelaverag": 32, "localsgd": 32, "model_averag": 32, "post_localsgd_hook": 32, "postlocalsgdst": 32, "subgroup": 32, "start_localsgd_it": 32, "warmup_step": 32, "local_optim": 32, "periodicmodelaverag": 32, "checkpoint": [32, 33, 48, 51, 55, 934, 1344, 1716, 2011, 2012, 2027, 2050, 2051, 2060, 2063, 2077, 2096], "parameters_as_bucket_view": 32, "overlap_with_ddp": 32, "consumpt": [32, 59, 1769, 2080, 2085, 2092], "partit": [32, 33, 34, 1430, 1684, 1949, 2065, 2066, 2076, 2077, 2102, 2110], "registr": [32, 63, 1050, 1708, 1713, 1714, 1716, 1733, 1736, 2020, 2046, 2063, 2068, 2097], "offset": [32, 227, 228, 229, 230, 341, 522, 560, 748, 881, 1096, 1097, 1098, 1099, 1162, 1307, 1469, 1588, 1589, 1624, 1716, 1828, 1829, 1907, 1953, 1955, 2014, 2016, 2060, 2100, 2106], "intact": [32, 2075], "ddp_zero_hook": 32, "disjointli": 32, "trail": [32, 1759, 1761, 2035, 2040, 2043, 2048], "wari": 32, "static_graph": [32, 1716], "third": [32, 35, 1107, 1108, 1362, 1430, 1437, 1455, 1458, 1495, 1521, 1808, 1810, 2012, 2013, 2020, 2045, 2049, 2055, 2068, 2069, 2099], "add_param_group": [32, 1736, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "param_group": [32, 55, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1793, 1794, 1795, 1796, 1797, 1804, 1807], "trainabl": [32, 1053, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 2048], "consolidate_state_dict": 32, "consolid": [32, 55], "pertain": 32, "alpha": [33, 35, 99, 100, 101, 102, 107, 108, 109, 110, 111, 112, 153, 154, 314, 315, 490, 556, 563, 564, 565, 566, 687, 688, 691, 692, 693, 746, 772, 777, 943, 1050, 1051, 1230, 1244, 1434, 1444, 1467, 1514, 1554, 1598, 1606, 1621, 1622, 1632, 1650, 1685, 1780, 1794, 1886, 1887, 1900, 1905, 1919, 1924, 1925, 2014, 2042, 2048, 2065, 2067, 2102, 2106], "migrat": [33, 48, 56, 1374, 1768, 2073], "pippi": 33, "micro": 33, "convent": [33, 47, 52, 55, 64, 337, 964, 1108, 1126, 1128, 1152, 1175, 1185, 1187, 1440, 1441, 1442, 1488, 1489, 1490, 1566, 1855, 1858, 1957, 2011, 2027, 2035, 2042, 2052, 2055, 2060], "promis": 33, "intrus": [33, 2048], "toolkit": [33, 64], "said": [33, 911, 912, 913, 914, 915, 916, 977, 2023, 2058, 2099], "gpipe": 33, "1f1b": 33, "interleav": [33, 1801], "bf": [33, 1575, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "infrastrutur": 33, "pp": [33, 1269, 1479, 1890], "compos": [33, 34, 35, 59, 64, 740, 741, 742, 743, 744, 745, 768, 769, 774, 775, 776, 784, 785, 966, 1167, 1169, 1170, 1171, 1177, 1272, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1473, 1474, 1493, 1494, 1495, 1514, 1519, 1520, 1521, 1526, 1575, 1580, 1581, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1607, 1608, 1609, 1610, 1611, 1612, 1627, 1628, 1650, 1653, 1654, 1655, 1657, 1658, 1659, 1831, 1832, 1976, 2012, 2013, 2016, 2033, 2046, 2048, 2049, 2055, 2063, 2065, 2068, 2085, 2106, 2107], "torchtitan": 33, "3d": [33, 703, 706, 709, 737, 742, 745, 769, 771, 776, 781, 1374, 1429, 1433, 1437, 1440, 1442, 1455, 1458, 1465, 1466, 1472, 1474, 1488, 1490, 1495, 1521, 1532, 1579, 1593, 1596, 1597, 1601, 1609, 1612, 1620, 1626, 1628, 1643, 1655, 1659, 1671, 1703, 2035, 2058, 2070, 2080, 2085], "llama": [33, 2051], "pipeliningshapeerror": 33, "paral": 33, "portion": [33, 55, 1157, 1462, 1558, 1673, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 2071, 2103, 2111], "traceabl": [33, 52, 64, 626, 819, 1192, 2013, 2020, 2070, 2098], "appeal": 33, "schedulegpip": 33, "n_microbatch": 33, "in_dim": [33, 60, 61, 895, 908, 909, 1167, 1177, 1976, 2049], "servic": [33, 39, 2047, 2098], "condens": 33, "partition": 33, "model_arg": [33, 2063], "modelarg": 33, "tok_embed": 33, "moduledict": [33, 1730, 1731, 1732, 2015, 2055], "witout": 33, "layer_id": 33, "n_layer": 33, "transformerblock": [33, 34, 2051], "token": [33, 47, 1008, 1042, 1043, 1053, 2011, 2017, 2104, 2106], "freqs_ci": 33, "meta": [33, 49, 53, 55, 59, 64, 338, 1195, 2012, 2020, 2063, 2075, 2083, 2085, 2087, 2098, 2101, 2103, 2110], "num_stag": 33, "stage_index": 33, "elif": [33, 64, 1279, 1491, 2015, 2016, 2053, 2063, 2099, 2110], "input_arg": [33, 52, 1778, 2065], "example_input_microbatch": 33, "output_arg": 33, "emb": [33, 1099, 1865, 1895], "modulelist": [33, 1555, 2055], "lmhead": 33, "lin": 33, "in_featur": [33, 59, 722, 730, 731, 734, 735, 758, 766, 1175, 1272, 1430, 1511, 1513, 1526, 1706, 1730, 1731, 1732, 1764, 1765, 1768, 2024, 2055], "out_featur": [33, 59, 722, 730, 731, 734, 735, 758, 766, 1175, 1272, 1443, 1511, 1513, 1526, 1706, 1730, 1731, 1732, 1764, 1765, 1768, 2024, 2055], "proj": [33, 1496], "splitpoint": 33, "pipe": [33, 37, 50, 2075, 2115], "mod": [33, 52, 53, 64, 723, 724, 734, 740, 741, 742, 747, 748, 758, 766, 812, 813, 814, 815, 838, 839, 867, 1165, 1275, 1276, 1282, 1289, 1766, 1968, 2015, 2016, 2028, 2068, 2090, 2097, 2111], "num_chunk": 33, "example_arg": [33, 52], "split_spec": 33, "submod_0": 33, "interpretermodul": [33, 52], "submod_1": 33, "reconstruct": [33, 1010, 1362, 1438, 1439, 2052, 2060, 2068, 2099], "replai": [33, 1008, 1042, 2045, 2063, 2069, 2096, 2099, 2111], "stage_mod": 33, "get_stage_modul": 33, "stage_idx": 33, "build_stag": 33, "kept": [33, 55, 59, 64, 699, 781, 787, 1440, 1441, 1442, 1488, 1489, 1490, 1566, 1643, 1703, 2016, 2032, 2051, 2073, 2080], "huggingfac": [33, 2104, 2108], "gpt2": 33, "acycl": [33, 53, 794, 2042, 2099], "dag": [33, 53, 2045, 2099], "pipelininig": 33, "truli": [33, 64, 2020, 2062, 2065], "almost": [33, 1319, 1320, 2061, 2099, 2101, 2109], "pipelineschedulesingl": 33, "pipelineschedulemulti": 33, "schedule1f1b": 33, "scheduleinterleaved1f1b": 33, "scheduleloopedbf": 33, "mb_arg": 33, "mb_kwarg": 33, "split_polici": 33, "marker": [33, 1010, 1385, 1981], "polici": [33, 35, 37, 40, 55, 1181, 1802, 1808], "split_gm": 33, "has_loss_and_backward": 33, "loss_spec": 33, "pipe_split": 33, "mm_param": 33, "tensorchunkspec": 33, "split_dim": [33, 2066, 2106], "split_args_kwargs_into_chunk": 33, "args_chunk_spec": 33, "kwargs_chunk_spec": 33, "spec": [33, 37, 39, 50, 52, 53, 66, 795, 796, 821, 822, 823, 826, 827, 828, 2063, 2072, 2086], "kwargs_split": 33, "args_split": 33, "merge_chunk": 33, "chunk_spec": 33, "oppos": [33, 903, 906, 908, 909, 977, 2049], "_pipelinestagebas": 33, "pipelinescheul": 33, "stage_modul": 33, "pipe_info": 33, "pipeinfo": 33, "_pipelinestag": 33, "output_merge_spec": 33, "drain": 33, "steadi": 33, "arxiv": [33, 35, 1496, 1546, 1573, 1732, 1768, 1816, 1870, 1928, 2042, 2086], "pdf": [33, 260, 1445, 1890, 2042], "2104": [33, 1108], "04473": 33, "ab": [33, 35, 92, 93, 627, 628, 684, 963, 1108, 1304, 1325, 1326, 1328, 1329, 1342, 1345, 1496, 1546, 1573, 1576, 1578, 1673, 1732, 1768, 1771, 1810, 1816, 1820, 1870, 2012, 2014, 2016, 2033, 2034, 2035, 2045, 2055, 2066, 2080, 2086, 2096, 2097, 2099, 2100, 2102, 2106, 2111], "2211": 33, "05953": 33, "simliar": 33, "_step_microbatch": 33, "colwis": 34, "parallelize_plan": 34, "sub_modul": 34, "parallelstyl": 34, "devicemesh": [34, 55, 2012], "slice": [34, 52, 64, 66, 75, 77, 609, 697, 698, 1185, 1440, 1441, 1442, 1560, 1562, 1566, 1690, 1691, 1767, 1847, 1864, 1865, 1895, 1906, 1958, 2013, 2017, 2035, 2045, 2065, 2066, 2080, 2081, 2084, 2086, 2102, 2106], "colwiseparallel": [34, 2012], "tp_mesh": 34, "w1": [34, 64], "w2": [34, 64], "rowwiseparallel": [34, 2012], "mlp": [34, 1706, 2063, 2070], "input_layout": 34, "output_layout": 34, "use_local_output": 34, "sharded_mod": 34, "mind": [34, 1318, 1336, 1468, 2042, 2049, 2052, 2070, 2093, 2102, 2109], "sequenceparallel": [34, 2012], "sequence_dim": 34, "layernorm": [34, 1480, 1488, 1489, 1490, 1570, 1572, 1574, 1646, 2035, 2072, 2111], "rmsnorm": [34, 1681], "redistribut": [34, 2061], "preparemoduleinput": [34, 2012], "desired_input_layout": 34, "input_kwarg_layout": 34, "desired_input_kwarg_layout": 34, "placehold": [34, 52, 64, 82, 801, 826, 836, 1185, 1187, 1487, 2045, 2097, 2100, 2110], "attn": [34, 1572, 1574], "preparemoduleoutput": [34, 2012], "desired_output_layout": 34, "loss_parallel": [34, 2012], "crossentropyloss": [34, 1533, 1615], "logit": [34, 35, 393, 1439, 1461, 1605, 1615, 1634, 2014, 2066, 2081], "truth": [34, 1461, 1615, 2023, 2085], "label_smooth": [34, 1461, 1615, 2014], "dist_input": 34, "distribute_tensor": 34, "randint": [34, 546, 946, 997, 1090, 1151, 1445, 1576, 1615, 1616, 1723, 1724, 1908, 2014, 2018, 2034, 2066, 2085, 2089], "parameteriz": 35, "tensorflow": [35, 1794, 2042, 2085], "backpropag": [35, 923, 1795, 1826, 2050, 2092], "surrog": 35, "likelihood": [35, 1430, 1479, 1533, 1540, 1629, 1668, 1676], "ratio": [35, 37, 619, 1473, 1474, 1627, 1628, 2014, 2045, 2080, 2108], "reinforc": [35, 1556, 1687], "reparameter": [35, 1165, 1732, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754, 1755, 1756, 1768], "trick": [35, 914, 1439, 1634, 2042, 2052, 2054, 2102], "autoencod": 35, "whilst": [35, 2045], "densiti": [35, 260, 303, 1234, 1235, 1890, 2014, 2081], "log_prob": [35, 1430, 1445, 1616, 2014], "theta": [35, 1597, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 2014], "partial": [35, 37, 47, 52, 55, 58, 64, 844, 1053, 1098, 1176, 1185, 1226, 1319, 1320, 1321, 1363, 1522, 1523, 1524, 1660, 1661, 1662, 1716, 1743, 1901, 1907, 2016, 2017, 2034, 2042, 2048, 2051, 2052, 2063, 2065, 2077, 2087, 2107], "pi": [35, 175, 379, 700, 953, 1230, 1231, 1324, 1475, 1540, 1630, 1676, 1800, 1801, 1820, 1881, 1882, 1885, 1886, 1887, 1888, 1923, 2015, 2016, 2068, 2081], "reward": 35, "ascent": 35, "prob": [35, 2014], "policy_network": 35, "next_stat": 35, "rsampl": 35, "parameter": [35, 379, 456, 1279, 2036, 2080], "has_rsampl": 35, "batch_shap": 35, "event_shap": 35, "validate_arg": 35, "arg_constraint": 35, "cdf": 35, "cumul": [35, 37, 1087, 1088, 1089, 1090, 1091, 1352, 1440, 1441, 1442, 1475, 1499, 1500, 1501, 1566, 1630], "mass": 35, "enumerate_support": 35, "discret": [35, 54, 483, 488, 1124, 1125, 1126, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1137, 1138, 1143, 1145, 1634, 1890, 2085, 2089], "cardin": [35, 1374], "univari": 35, "singleton": [35, 256, 1498, 1541, 1715, 1963, 2043], "cartesian": [35, 961, 1374, 1820], "_instanc": 35, "icdf": 35, "perplex": 35, "sample_shap": 35, "sample_n": 35, "set_default_validate_arg": 35, "mimic": [35, 1808], "stddev": 35, "varianc": [35, 997, 1440, 1441, 1442, 1470, 1479, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1566, 1625, 1629, 1726, 1728, 1793, 1794, 1830, 1839, 1840, 1971, 1972, 2040, 2055], "exp_famili": 35, "famili": 35, "p_": [35, 965, 1364, 1796], "langl": 35, "rangl": 35, "denot": [35, 52, 64, 175, 997, 1197, 1293, 1312, 1318, 1331, 1341, 1453, 1454, 1491, 1733, 1736, 1796, 1909, 1910, 1911, 1913, 1914, 1973, 2024, 2052, 2073, 2076, 2080], "carrier": 35, "analyt": [35, 922, 923, 1623, 2080], "bregman": 35, "courtesi": 35, "frank": 35, "nielsen": 35, "richard": 35, "nock": 35, "70": [35, 938, 1331, 1826, 2035], "odd": [35, 1129, 1130, 1131, 1139, 1140, 1141, 1607, 1608, 1609], "interv": [35, 50, 260, 868, 941, 993, 1158, 1388, 1389, 1811, 1835, 1836, 1883, 2029, 2045, 2051, 2069], "lower_bound": 35, "upper_bound": 35, "has_enumerate_support": 35, "param_shap": 35, "concentration1": 35, "concentration0": 35, "concentr": 35, "1046": 35, "1st": [35, 2043, 2051], "2nd": [35, 619, 1534, 1561, 1671, 2043], "greaterthan": 35, "total_count": 35, "71": 35, "trial": [35, 288], "integergreaterthan": 35, "ldot": [35, 288, 1304, 1312, 1343, 1359, 1374, 1498, 1519, 1520, 1521, 1541, 1715, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "unnorm": [35, 1461, 1605, 1615, 1634], "likewis": [35, 1948], "25": [35, 498, 588, 589, 590, 938, 963, 1234, 1340, 1345, 1471, 1529, 1531, 1534, 1706, 1716, 1761, 1763, 1786, 1808, 1827, 1970, 2013, 2080, 2081], "independentconstraint": 35, "simplex": 35, "loc": [35, 1344], "lorentz": 35, "3214": 35, "width": [35, 781, 787, 861, 1233, 1234, 1235, 1436, 1437, 1454, 1455, 1457, 1458, 1494, 1495, 1520, 1521, 1533, 1561, 1579, 1643, 1703, 2034, 2080], "df": 35, "chi": 35, "continuous_bernoulli": 35, "lim": [35, 1269], "499": 35, "501": 35, "2538": [35, 1304], "pervas": 35, "loaiza": 35, "ganem": 35, "cunningham": 35, "jp": 35, "1907": 35, "06845": 35, "8954": 35, "greaterthaneq": 35, "df1": 35, "df2": 35, "fisher": 35, "snedecor": 35, "2453": 35, "degre": [35, 997, 1085, 1092, 1272, 1310, 1311, 1526, 1535, 1575, 1716, 1834, 1854, 1921, 1922, 1971, 1972, 2014, 2052, 2066, 2080], "freedom": [35, 997, 1921, 1922, 1971, 1972, 2052], "geometric_": [35, 2014, 2089], "0124": 35, "half_cauchi": 35, "half_norm": 35, "base_distribut": 35, "reinterpreted_batch_ndim": 35, "reinterpret": [35, 501, 2101], "diagon": [35, 60, 226, 262, 597, 598, 599, 600, 954, 993, 997, 1095, 1096, 1097, 1099, 1108, 1121, 1170, 1171, 1216, 1302, 1314, 1316, 1319, 1331, 1335, 1587, 1588, 1589, 1673, 1684, 1907, 1947, 1951, 1952, 1953, 1954, 1955, 2014, 2048, 2066, 2084, 2106], "multivari": [35, 1412, 2081], "multivariate_norm": 35, "mvn": 35, "scale_tril": 35, "diag": [35, 61, 1169, 1170, 1171, 1308, 1309, 1336, 1816, 1907, 1927, 1928, 2014, 2066], "diagn": 35, "inverse_gamma": 35, "2953": 35, "1729": [35, 2055], "lkj_choleski": 35, "lkj": 35, "matric": [35, 262, 688, 691, 943, 955, 956, 965, 966, 967, 968, 1096, 1216, 1293, 1302, 1303, 1304, 1306, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1322, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1335, 1336, 1337, 1345, 1353, 1362, 1364, 1377, 1476, 1535, 1597, 1631, 1730, 1736, 1814, 1816, 1826, 1874, 1900, 1904, 1905, 1927, 1928, 1951, 1952, 1954, 2014, 2033, 2052, 2058, 2080, 2086], "eta": [35, 1780, 1782, 1795], "proport": [35, 1493, 1494, 1495, 1532, 1574], "det": [35, 1310, 1311, 1332, 1353, 2014, 2066], "l": [35, 737, 762, 944, 953, 959, 966, 967, 968, 1108, 1230, 1231, 1272, 1292, 1302, 1303, 1308, 1309, 1310, 1311, 1315, 1319, 1320, 1362, 1364, 1435, 1438, 1439, 1440, 1453, 1461, 1464, 1465, 1472, 1477, 1485, 1486, 1488, 1491, 1492, 1496, 1508, 1517, 1519, 1526, 1528, 1532, 1533, 1542, 1558, 1575, 1576, 1578, 1684, 1742, 1751, 1759, 1761, 1786, 1862, 1923, 2014, 2016, 2042, 2085, 2099, 2100, 2111], "lkjcorr": 35, "onion": 35, "3x3": [35, 1091, 1949], "3516": 35, "9361": 35, "1899": [35, 1375], "4748": 35, "8593": 35, "vine": 35, "2009": [35, 1816, 1928], "lewandowski": 35, "dorota": 35, "kurowicka": 35, "harri": [35, 1890], "joe": 35, "journal": [35, 1833], "1016": 35, "jmva": 35, "04": [35, 1151, 1350, 1883, 1890], "008": 35, "corrcholeski": 35, "log_norm": [35, 2066], "lowrank_multivariate_norm": 35, "cov_factor": 35, "cov_diag": 35, "covari": [35, 956, 993, 997, 1440, 1441, 1442, 1566, 1816], "covariance_matrix": 35, "2102": 35, "5429": [35, 1951], "woodburi": 35, "lemma": 35, "formula": [35, 90, 770, 771, 892, 894, 908, 909, 944, 953, 1230, 1231, 1298, 1364, 1365, 1540, 1600, 1601, 1691, 1796, 1829, 1882, 2020, 2048, 2052, 2069, 2074, 2080, 2098], "capacit": 35, "precision_matrix": 35, "mixture_same_famili": 35, "mixture_distribut": 35, "component_distribut": 35, "rightmost": [35, 956, 1234, 1235, 2016], "gaussian": [35, 1475, 1479, 1556, 1629, 1630, 1687, 1889, 2081], "gmm": 35, "bivari": 35, "categori": [35, 40, 86, 2016, 2018, 2065, 2067, 2069, 2071, 2083, 2105, 2107], "innermost": [35, 64, 235, 1226, 1235, 1862], "1338": 35, "mathbf": [35, 1293, 1731, 1732, 1765, 1768, 1795], "sigma": [35, 175, 379, 762, 1439, 1476, 1477, 1478, 1496, 1497, 1556, 1557, 1631, 1687, 1731, 1765, 1884, 1921, 1922, 1971, 1972, 2014], "triangular": [35, 966, 967, 968, 1302, 1303, 1309, 1311, 1319, 1320, 1327, 1330, 1331, 1333, 1335, 1586, 1587, 1588, 1589, 1673, 1684, 1802, 1826, 1951, 1952, 1953, 1954, 1955], "decomposit": [35, 52, 53, 64, 83, 966, 967, 968, 1216, 1302, 1303, 1308, 1309, 1310, 1311, 1312, 1314, 1319, 1320, 1321, 1322, 1327, 1331, 1332, 1336, 1337, 1362, 1364, 1730, 1814, 1816, 1820, 1826, 1927, 1928, 2012, 2058, 2098, 2101, 2110], "positivedefinit": 35, "lowercholeski": 35, "negative_binomi": 35, "halfopeninterv": 35, "mu": [35, 379, 1794, 1796], "one_hot_categor": 35, "onehot": 35, "5623": 35, "nonneg": [35, 1576, 1816, 1928, 2081], "pmf": 35, "mathrm": [35, 945, 1310, 1311, 1312, 1313, 1324, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1535, 1540, 1541, 1566, 1715, 1730, 1783, 1784, 1785, 1795, 1830, 2042, 2081], "relaxed_bernoulli": 35, "temperatur": [35, 1634, 2012], "parametr": [35, 1165, 1733, 1740, 1765, 1766, 1768, 2036, 2048], "relax": [35, 1008, 1042, 1288, 1289, 1736, 2070, 2102], "reparametriz": 35, "99": [35, 1284, 1794, 2013], "2951": [35, 1372], "3442": 35, "8918": 35, "9021": 35, "maddison": 35, "2017": [35, 1570, 1572, 1574, 1908, 2061], "reparametr": [35, 1634, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1748, 1768], "jang": 35, "relaxed_categor": 35, "1294": [35, 966], "2324": [35, 1272, 1526], "3859": 35, "2523": 35, "student": 35, "transformed_distribut": 35, "composit": [35, 60, 1169, 1320, 1730, 2013, 2016, 2020, 2046, 2049, 2055, 2065], "basedistribut": 35, "dx": [35, 52, 1091, 1438, 1949, 2014, 2048, 2049, 2081], "dy": [35, 52], "logist": [35, 1556, 1559, 1687, 2081], "sigmoidtransform": 35, "affinetransform": 35, "invert": [35, 64, 1263, 1304, 1313, 1314, 1320, 1326, 1333, 1335, 1338, 1339, 1353, 1522, 1523, 1524, 1951, 2014, 2058, 2075], "3418": 35, "upper": [35, 52, 53, 183, 184, 185, 596, 797, 954, 959, 966, 967, 968, 970, 1122, 1123, 1191, 1233, 1234, 1302, 1303, 1309, 1311, 1319, 1320, 1331, 1335, 1545, 1587, 1589, 1673, 1682, 1683, 1684, 1802, 1808, 1826, 1841, 1862, 1951, 1954, 1955, 2014, 2040, 2066, 2081, 2087], "von_mis": 35, "circular": [35, 1447, 1448, 1449, 1453, 1454, 1455, 1502, 1503, 1504, 1671], "von": 35, "mise": 35, "unconstrain": [35, 1736], "angl": [35, 757, 887, 1092, 1353, 1512, 1820, 1834, 1877, 2012, 2014, 2066, 2080], "9777": 35, "radian": [35, 700, 887, 1092, 1834, 2014, 2066], "simul": [35, 490, 801, 803, 2069, 2070, 2073, 2098], "1979": 35, "152": [35, 619], "157": 35, "_rejection_sampl": 35, "88443": 35, "4784": [35, 1377], "symmetr": [35, 804, 822, 944, 953, 966, 967, 968, 1129, 1130, 1131, 1136, 1138, 1142, 1143, 1145, 1230, 1231, 1292, 1302, 1303, 1308, 1309, 1311, 1315, 1316, 1317, 1324, 1327, 1330, 1336, 1345, 1730, 1736, 1808, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2070, 2073], "x_ij": 35, "wu": [35, 1345], "chu": 35, "2018": [35, 1345, 1824], "sawyer": 35, "2007": 35, "anderson": 35, "w": [35, 61, 152, 762, 892, 894, 896, 903, 906, 908, 909, 917, 944, 953, 977, 997, 1167, 1189, 1213, 1230, 1231, 1269, 1312, 1428, 1429, 1436, 1437, 1439, 1441, 1442, 1446, 1454, 1455, 1461, 1465, 1466, 1468, 1469, 1470, 1477, 1489, 1490, 1498, 1509, 1510, 1520, 1521, 1531, 1532, 1533, 1538, 1539, 1561, 1577, 1578, 1580, 1581, 1597, 1605, 1623, 1624, 1632, 1668, 1674, 1675, 1731, 1732, 1765, 1768, 2014, 2020, 2033, 2034, 2040, 2042, 2048, 2052, 2067, 2068, 2085, 2109], "2003": 35, "ed": [35, 60, 1464, 1465, 2042, 2046, 2068, 2092, 2109, 2112], "odel": 35, "feiveson": 35, "1966": 35, "samplecovari": 35, "jasa": 35, "61": 35, "313": 35, "199": 35, "203": [35, 619], "ku": 35, "bloomfield": 35, "2010": [35, 2040], "ox": 35, "max_try_correct": 35, "bartlett": [35, 944], "singular": [35, 1304, 1318, 1319, 1325, 1327, 1329, 1330, 1336, 1337, 1345, 1353, 1362, 1731, 1736, 1816, 1927, 1928, 2058], "inf": [35, 52, 55, 260, 686, 688, 691, 692, 693, 888, 943, 959, 1261, 1262, 1264, 1266, 1267, 1304, 1319, 1325, 1329, 1332, 1342, 1353, 1416, 1516, 1560, 1570, 1684, 1721, 1742, 1751, 1771, 1855, 1862, 2041, 2042, 2049, 2058, 2081, 2087, 2111], "accordingli": [35, 64, 1282, 1496, 1736, 2035, 2075, 2107], "kl_diverg": 35, "kullback": [35, 1491, 1644], "leibler": [35, 1491, 1644], "notimplementederror": [35, 2020, 2024, 2068], "register_kl": 35, "type_p": 35, "type_q": 35, "pairwis": [35, 1485, 1535, 1575], "kl_normal_norm": 35, "ambigu": [35, 64, 1257, 1456, 1457, 1458, 1472, 1522, 1523, 1524, 1579, 2016], "runtimewarn": 35, "basep": 35, "derivedq": 35, "kl_version1": 35, "derivedp": 35, "baseq": 35, "kl_version2": 35, "tie": 35, "abstransform": 35, "cache_s": 35, "event_dim": 35, "affin": [35, 55, 474, 475, 476, 477, 478, 751, 753, 754, 755, 804, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1499, 1500, 1501, 1508, 1509, 1510, 1513, 1541, 1566, 1597, 1715, 2045, 2055], "cattransform": 35, "tseq": 35, "functor": [35, 1050, 1051], "submatrix": 35, "x0": [35, 2104], "t0": [35, 52, 1226, 1780, 2015], "exptransform": 35, "identity_transform": 35, "composetransform": 35, "corrcholeskytransform": 35, "uncontrain": 35, "euclidean": [35, 963, 1669], "x_i": [35, 60, 970, 1087, 1088, 1089, 1090, 1341, 1348, 1516, 1535, 1560, 1562, 1575, 1652, 1690, 1823, 1906, 1921, 1922, 1949, 1971, 1972, 1973, 2081], "stickbreakingtransform": 35, "r_i": 35, "tanh": [35, 579, 762, 767, 797, 888, 1475, 1477, 1478, 1496, 1497, 1525, 1542, 1544, 1568, 1630, 1663, 1696, 2014, 2033, 2034, 2040, 2066, 2072, 2080, 2106], "unsign": [35, 2073, 2083, 2086, 2087, 2102], "z_i": 35, "s_i": 35, "y_i": [35, 970, 1087, 1088, 1089, 1090, 1341, 1348, 1575, 1949, 1973, 2052], "sqrt": [35, 64, 379, 551, 675, 676, 762, 993, 1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1239, 1292, 1374, 1440, 1441, 1442, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1475, 1477, 1478, 1480, 1488, 1489, 1490, 1493, 1494, 1495, 1496, 1497, 1498, 1511, 1513, 1541, 1542, 1544, 1566, 1630, 1684, 1715, 1736, 1781, 1782, 1783, 1784, 1787, 1793, 1794, 1830, 1857, 1889, 1921, 1922, 2014, 2033, 2040, 2041, 2042, 2066, 2080, 2081, 2087, 2106], "cumulativedistributiontransform": 35, "copula": 35, "base_dist": 35, "independenttransform": 35, "base_transform": 35, "log_abs_det_jacobian": 35, "lowercholeskytransform": 35, "positivedefinitetransform": 35, "powertransform": 35, "expon": [35, 270, 271, 282, 470, 471, 1151, 1158, 1296, 1326, 1430, 1514, 1669, 1823, 1833, 1870, 2014, 2083, 2086, 2106], "reshapetransform": 35, "in_shap": 35, "out_shap": 35, "softplustransform": 35, "tanhtransform": 35, "softmaxtransform": 35, "biject": 35, "hmc": 35, "stacktransform": 35, "stick": [35, 2067], "aris": [35, 64, 2042, 2098], "memoiz": [35, 2101], "_call": 35, "_invers": 35, "codomain": [35, 2042], "iff": [35, 1532], "weaker": [35, 2045], "pseudoinvers": [35, 1313, 1330, 1338], "monoton": [35, 1525, 1663, 1862], "forward_shap": 35, "inverse_shap": 35, "corr_choleski": 35, "greater_than": 35, "greater_than_eq": 35, "integer_interv": 35, "less_than": 35, "lower_choleski": 35, "lower_triangular": 35, "nonnegative_integ": 35, "one_hot": [35, 2014, 2066], "positive_integ": 35, "positive_semidefinit": 35, "positive_definit": 35, "real_vector": 35, "unit_interv": 35, "is_discret": 35, "constrain": [35, 1191, 1198, 1563, 2016, 2055], "_cat": 35, "dependent_properti": 35, "_dependentproperti": 35, "_greaterthan": 35, "_greaterthaneq": 35, "_independentconstraint": 35, "_integerinterv": 35, "_interv": 35, "half_open_interv": 35, "_halfopeninterv": 35, "_lessthan": 35, "_multinomi": 35, "_stack": [35, 2066], "constraintregistri": 35, "biject_to": 35, "transform_to": 35, "overparameter": 35, "rotat": [35, 1854, 1927], "hamiltonian": 35, "mont": 35, "carlo": 35, "invari": [35, 52, 1757, 1902, 1909, 1910, 1911, 1912, 1913, 1914, 2077, 2080, 2096, 2110], "potential_energi": 35, "cheap": [35, 1187, 1430, 2102], "svi": 35, "fewer": [35, 52, 60, 695, 697, 698, 701, 969, 1293, 1294, 1360, 1370, 1372, 1373, 1375, 1378, 1417, 1420, 1479, 1614, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 1824, 1908, 1921, 1922, 1926, 1945, 1971, 1972, 2022, 2043, 2060, 2087, 2113], "my_constraint": 35, "my_transform": 35, "myconstraintclass": 35, "my_factori": 35, "mytransform": 35, "param1": [35, 2055], "param2": [35, 2055], "constraint_registri": 35, "my_registri": 35, "construct_transform": 35, "myconstraint": 35, "from_dlpack": [36, 883, 2012], "ext_tensor": [36, 1159], "extern": [36, 1011, 1159, 1288, 1967, 2029, 2044, 2045, 2058, 2063, 2065, 2092], "immut": [36, 1159, 2016], "__dlpack__": [36, 1159], "capsul": [36, 883, 1159], "pycapsul": [36, 1159], "to_dlpack": [36, 1159, 2012], "t2": [36, 689, 690, 883, 1105, 1159, 1226, 1716, 1969, 2075, 2076], "dltensor": [36, 1159], "t3": [36, 1159, 2076], "idiomat": 36, "inde": [36, 2013, 2023, 2068, 2077, 2107], "monitor": [37, 48, 50, 88, 1010, 1385, 1810, 1981, 2012, 2045, 2053, 2115], "unhealthi": 37, "tear": 37, "react": [37, 2107], "decentr": 37, "diagram": [37, 47, 1272, 1526, 2070, 2077, 2100, 2102, 2111], "elasticag": 37, "quad": [37, 1438, 1439, 1461, 1492, 1517, 1533, 1576, 1839], "group_result": 37, "is_fail": [37, 39], "exit_cod": 37, "return_valu": [37, 39, 45, 2099, 2100], "get_worker_group": 37, "workergroup": [37, 48], "mutabl": [37, 1288, 2013, 2020, 2068, 2085], "implementor": 37, "defens": 37, "retri": [37, 40, 50, 1064, 2075, 2077], "max_restart": [37, 39, 48], "runresult": 37, "workerspec": [37, 39, 48, 50], "local_world_s": [37, 39, 48], "rdzv_handler": [37, 39, 47], "monitor_interv": [37, 39], "local_addr": [37, 47], "blueprint": 37, "homogen": [37, 48], "rendezvoushandl": [37, 39, 47, 48], "rdzv": [37, 46, 48], "chose": [37, 2041, 2098], "tee": [37, 45], "consol": [37, 41, 44, 45, 48, 2085], "get_entrypoint_nam": 37, "__qualname__": 37, "workerst": 37, "unknown": [37, 53, 1718, 1719, 2077], "unrecover": 37, "interrupt": [37, 2032, 2109], "succeed": [37, 47, 1362, 2099, 2102], "uncaught": [37, 40], "unhandl": 37, "recov": [37, 901, 1129, 1332, 1364, 1716, 1757, 1923, 2041, 2045, 2050, 2052, 2071], "is_run": 37, "role_rank": [37, 48], "role_world_s": [37, 48], "pid": [37, 40, 48, 50, 1566, 2050, 2069], "local_elastic_ag": 37, "localelasticag": [37, 39, 50], "logs_spec": [37, 45], "start_method": [37, 39, 45, 50, 2032], "exit_barrier_timeout": 37, "log_line_prefix_templ": 37, "advis": [37, 546, 619, 868, 2057], "torchelastic_enable_file_tim": 37, "torchelastic_timer_fil": 37, "role_nam": 37, "trainer0": [37, 2075], "foobar": [37, 40, 44, 45], "templat": [37, 975, 1050, 1051, 2067], "substitut": [37, 47, 64, 83, 1285, 2042, 2083], "shared_queu": 37, "get_context": [37, 50, 2057], "nproc_per_process": 37, "other_param": [37, 50], "usr": [37, 45, 47, 2045], "bin": [37, 45, 47, 302, 303, 821, 946, 1233, 1234, 1235, 2014, 2016, 2066, 2085, 2108], "simpleelasticag": 37, "scaffold": [37, 2099], "_assign_worker_rank": 37, "group_world_s": 37, "role_info": 37, "front": [37, 256, 975, 1345, 1529, 2034, 2049], "base_global_rank": 37, "_exit_barri": 37, "guard": [37, 66, 71, 75, 682, 975, 1186, 1187, 1191, 1197, 1199, 1200, 1201, 1207, 1208, 1211, 1212, 2022, 2057, 2066, 2068, 2089, 2102, 2107, 2111, 2112], "_initialize_work": 37, "worker_group": 37, "fresh": [37, 64, 1099, 1190, 1193, 1865, 1895, 2011], "start_work": 37, "_stop_work": 37, "optimist": 37, "deleg": [37, 2099], "_monitor_work": 37, "_rendezv": 37, "_restart_work": 37, "_shutdown": 37, "death_sig": 37, "sigterm": 37, "is_restart": 37, "_start_work": [37, 45], "gracefulli": [37, 47, 48, 60, 1303, 2049, 2102], "meaning": [37, 40, 41, 1064, 2045, 2105], "meaningless": 37, "intention": [37, 1105, 1973, 2053, 2075], "torchelastic_health_check_port": 37, "tcp": [37, 47, 48, 2075], "health_check_serv": 37, "healthcheckserv": 37, "alive_callback": 37, "dead": [37, 64, 2110], "create_healthcheck_serv": 37, "control_plan": 38, "worker_main": 38, "_workerserv": 38, "torch_worker_server_socket": 38, "ship": [39, 1166, 2044, 2075], "programmat": [39, 64, 2055], "my_launch": 39, "argv": [39, 51], "trainer_entrypoint_fn": 39, "fn_arg": 39, "run_result": 39, "tricki": [39, 56, 61, 2042, 2055, 2077, 2096, 2099], "myrendezvoushandl": 39, "elastic_ag": 39, "metrichandl": [39, 44], "mymetrichandl": 39, "metric_data": [39, 44], "metricdata": 39, "sink": [39, 44, 2029], "eventhandl": 39, "cloudwatch": 39, "nulleventhandl": 39, "myeventhandl": 39, "invalid": [40, 45, 978, 1272, 1526, 2041, 2042, 2059, 2063, 2065, 2068, 2101], "infra": [40, 2110], "start_process": [40, 45, 2032], "torchelastic_error_fil": 40, "smallest": [40, 53, 964, 1294, 1304, 1325, 1329, 1345, 1825, 1946, 2087, 2111, 2116], "timestamp": [40, 41, 44, 48, 2029, 2069, 2085, 2111], "error_handl": 40, "sugar": [40, 2015], "get_error_handl": 40, "childfailederror": 40, "get_first_failur": 40, "dump_error_fil": 40, "error_fil": [40, 45], "exitcod": [40, 48], "nanni": 40, "accur": [40, 929, 1010, 1226, 1348, 1385, 1439, 1467, 1833, 1949, 2020, 2028, 2045, 2076, 2092, 2101, 2115], "torchelastic_ag": 40, "trainer_0": 40, "trainer_1": 40, "json": [40, 45, 50, 2054, 2069, 2105, 2109], "trainer_n": 40, "errorhandl": 40, "record_except": 40, "processfailur": 40, "test_ev": 41, "eventsourc": 41, "get_logging_handl": 41, "eventmetadatavalu": 41, "readm": [42, 43, 64, 794, 2035], "telemetri": 44, "timeseri": 44, "metric_group": 44, "metric_nam": 44, "sensibl": 44, "my_modul": [44, 53, 55, 64, 2013, 2068], "nullmetricshandl": 44, "consolemetricshandl": 44, "my_method": 44, "put_metr": 44, "calculate_lat": 44, "succinctli": 44, "baz": [44, 64, 857, 2068, 2087], "leaf_modul": 44, "classnam": [44, 2016], "threw": [44, 2113], "my_app": 44, "consolemetrichandl": 44, "toi": [44, 2111], "stdout": [44, 45, 49, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 2065], "stdoutmetrichandl": 44, "1574213883": 44, "4182858": 44, "my_metr": 44, "1574213940": 44, "5237644": 44, "nullmetrichandl": 44, "class_nam": [44, 64], "def_nam": 44, "leaf": [44, 53, 152, 224, 337, 450, 490, 505, 506, 793, 840, 842, 882, 896, 1175, 1778, 1942, 2035, 2041, 2042, 2051, 2065, 2091, 2101], "metric_valu": 44, "metric_group_nam": 44, "popen": [45, 49], "log_dir": [45, 2029, 2085], "stderr": [45, 49, 2011, 2027], "err": 45, "echo": 45, "hello": [45, 60, 2015, 2068, 2099], "pcontext": 45, "multiprocesscontext": 45, "subprocesscontext": 45, "log_line_prefix": 45, "keyset": 45, "bitmask": 45, "bar0": 45, "bar1": 45, "file1": 45, "file2": 45, "short": [45, 64, 736, 765, 1108, 1200, 1269, 1445, 1496, 1497, 1616, 1777, 1874, 1923, 2015, 2020, 2033, 2042, 2043, 2055, 2075, 2082, 2083, 2086, 2099, 2100], "ing": 45, "cmd": [45, 48], "forkserv": [45, 1716, 2032, 2057], "local_ranks_filt": 45, "processcontext": [45, 2032], "superset": [45, 48, 2020], "tee_stdout": 45, "tee_stderr": 45, "runprocsresult": 45, "defaultlogsspec": 45, "logsspec": 45, "reifi": 45, "rdzv_run_id": 45, "attempt_": 45, "logsdest": 45, "num_nod": [46, 48], "trainers_per_nod": 46, "num_allowed_failur": 46, "job_id": [46, 47, 48], "endpoint": [46, 47, 48], "host_node_addr": [46, 48], "min_siz": [46, 48], "num_allowed_failures_or_membership_chang": 46, "node1": [46, 48], "29400": [46, 48], "sidecar": [46, 47], "agre": [47, 1331, 2077], "resum": [47, 1802, 1808, 1810, 2075, 2077, 2085, 2100, 2102], "retryabl": 47, "announc": 47, "lose": [47, 51, 55, 193, 210, 2034], "train_loop": [47, 865], "arriv": [47, 48, 2075, 2077], "dynamicrendezvoushandl": 47, "rendezvousbackend": 47, "c10drendezvousbackend": 47, "etcdrendezvousbackend": 47, "supersed": [47, 1189], "etcdrendezvoushandl": 47, "my_run_id": 47, "from_backend": 47, "run_id": [47, 48, 50], "min_nod": 47, "max_nod": 47, "rendezvousparamet": 47, "admit": [47, 48, 2045], "get_as_bool": 47, "get_as_int": 47, "rendezvoushandlerregistri": 47, "get_run_id": 47, "is_clos": 47, "set_clos": 47, "next_rendezv": 47, "rendezvousinfo": 47, "rendezvousclosederror": 47, "rendezvousconnectionerror": 47, "rendezvousstateerror": 47, "rendezvoustimeouterror": 47, "num_nodes_wait": 47, "shutdown": [47, 2012, 2025, 2075, 2076], "use_agent_stor": 47, "lifecyl": 47, "rendez": 47, "impl": [47, 52, 1050, 1278, 2020], "rendezvousstoreinfo": 47, "bootstrap_store_info": 47, "bootstrap": [47, 2061], "rendezvouserror": 47, "rendezvousgracefulexiterror": 47, "dynamic_rendezv": 47, "create_handl": 47, "join_timeout": 47, "600": 47, "last_call_timeout": 47, "close_timeout": 47, "rendezvoustimeout": 47, "get_stat": [47, 90, 2045], "fenc": 47, "set_stat": [47, 90, 2045], "last_cal": 47, "heartbeat": [47, 2115], "keep_al": 47, "c10d_rendezvous_backend": 47, "create_backend": 47, "store_typ": 47, "read_timeout": 47, "60": [47, 50, 938, 1479, 1812, 1939, 1944, 2029, 2075, 2100], "is_host": 47, "skip": [47, 53, 862, 977, 1108, 1162, 1195, 1302, 1303, 1435, 1436, 1437, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 1895, 1966, 2013, 2041, 2042, 2045, 2047, 2048, 2055, 2067, 2069, 2075, 2098, 2102, 2103, 2109], "cname": 47, "fqdn": [47, 48], "etcd_rendezvous_backend": 47, "ssl_cert": 47, "ssl": 47, "certif": 47, "ssl_cert_kei": 47, "privat": [47, 55, 1187, 2045, 2048, 2068, 2096, 2102], "ca_cert": 47, "rool": 47, "key_prefix": 47, "ttl": 47, "hour": 47, "etcd_rendezv": 47, "rdzv_impl": 47, "etcdrendezv": 47, "etcd_address": 47, "min_work": 47, "max_work": 47, "noqa": 47, "w605": 47, "2379": [47, 1370], "etcd_prefix": 47, "etcdstor": 47, "etcd_stor": 47, "etcd_client": 47, "etcd_store_prefix": 47, "piggyback": 47, "num": [47, 747, 748, 762, 1412, 1477, 1480, 1496, 1532, 1542, 1570, 2057], "lookuperror": 47, "override_timeout": 47, "etcdserv": 47, "cumbersom": [47, 2016], "highli": [47, 1430, 1716, 2011, 2056, 2063, 2065, 2080, 2087, 2114], "etcd_serv": 47, "data_dir": 47, "v3": [47, 48], "torchelastic_etcd_binary_path": 47, "get_client": 47, "etcd_binary_path": 47, "entry_point": [48, 2097], "train_script": 48, "aforment": 48, "suffic": [48, 64], "compliant": [48, 51], "num_train": 48, "wors": [48, 1810, 1964, 2102], "port_k": 48, "etcd": 48, "v2": [48, 1341, 1815, 2011], "revis": 48, "physic": [48, 235, 1126, 1144, 2017, 2044, 2057, 2080, 2102], "localworkergroup": 48, "rdzv_id": 48, "rdzv_backend": [48, 51], "rdzv_endpoint": [48, 51], "max_nnod": 48, "torchelastic_restart_count": 48, "far": [48, 1282, 1632, 2098], "torchelastic_max_restart": 48, "python_exec": 48, "gang": 48, "departur": 48, "surviv": 48, "kill": [48, 50, 2032, 2050], "frequenc": [48, 946, 997, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1136, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1269, 1430, 1468, 1469, 1623, 1624, 1802, 1923, 2110], "ness": [48, 2101], "load_checkpoint": [48, 51], "checkpoint_path": [48, 51], "dataset": [48, 51, 66, 800, 1430, 1439, 1491, 2012, 2050, 2054, 2057, 2061, 2067, 2070, 2071, 2085], "train_step": 48, "should_checkpoint": 48, "save_checkpoint": [48, 51], "subprocess_handl": 49, "get_subprocess_handl": 49, "local_rank_id": 49, "fd": [49, 2085], "acquir": [50, 64, 2055, 2077, 2080], "deadlin": 50, "message_queu": 50, "localtimerserv": 50, "max_interv": 50, "trainer_func": 50, "localtimercli": 50, "expiri": 50, "timer_cli": 50, "countdown": 50, "timefram": [50, 2075], "elig": [50, 2077], "reap": 50, "timerserv": 50, "mp_queue": 50, "daemon": [50, 2032], "filetimerserv": 50, "file_path": 50, "log_ev": [50, 2029], "filetimercli": 50, "fifo": 50, "watchdog": [50, 2115], "filetimerrequest": 50, "sigkil": 50, "named_pip": 50, "mkfifo": 50, "timercli": 50, "timerrequest": 50, "scope_id": 50, "expiration_tim": 50, "acquisit": 50, "whatev": [50, 64, 450, 1186, 1716, 1866, 1951, 2016, 2049, 2101, 2102], "request_queu": 50, "entiti": [50, 64], "clear_tim": 50, "get_expired_tim": 50, "register_tim": 50, "timer_request": 50, "debug_info_log": 50, "log_debug_info_for_expired_tim": 50, "expired_tim": 50, "use_env": 51, "expositori": 51, "worst": [51, 2048], "total_num_epoch": 51, "visit": [51, 55, 2065, 2105], "WILL": 52, "BE": 52, "aot": [52, 682, 2022, 2092, 2093, 2102, 2103, 2111], "exported_program": [52, 2063], "exportedprogram": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "arg1_1": 52, "exportgraphsignatur": [52, 53, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "user_input": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "user_output": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "inputs_to_paramet": 52, "inputs_to_buff": 52, "buffers_to_mut": 52, "backward_signatur": 52, "assertion_dep_token": 52, "sound": [52, 64, 978, 1181, 1555, 2085, 2103, 2111], "alias": [52, 55, 901, 1166, 1177, 1643, 1976, 2015, 2016, 2020, 2048, 2101], "stacktrac": [52, 53, 66, 75, 76, 77], "leverag": [52, 975, 2062, 2063, 2075, 2092, 2095, 2099, 2104, 2114], "_dynamo": [52, 66, 77, 975, 982, 2047, 2092, 2096, 2097, 2098, 2099, 2100, 2107, 2109, 2111], "massiv": [52, 2099], "pt2": [52, 2098, 2101], "artifact": [52, 682, 1923, 2022, 2054, 2068, 2093, 2099, 2109, 2111], "untrac": [52, 64], "disjoint": [52, 2013], "usabl": [52, 63, 2016, 2064, 2065, 2099, 2100], "symbolic_trac": [52, 53, 64, 2070], "comprehens": [52, 83, 1890, 2016, 2017, 2034, 2045, 2053, 2070, 2093], "simpler": [52, 61, 1177, 1976, 2013, 2042, 2048, 2052, 2055, 2099], "straight": [52, 1438, 1634], "conv": [52, 64, 704, 705, 706, 707, 708, 709, 710, 711, 712, 794, 795, 798, 816, 857, 863, 1272, 1282, 1288, 1289, 1453, 1454, 1455, 1456, 1457, 1458, 1526, 1527, 1533, 1725, 1726, 1728, 2026, 2040, 2045, 2058, 2065, 2070, 2071, 2072, 2073, 2105, 2107], "in_channel": [52, 714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 1282, 1453, 1454, 1455, 1456, 1457, 1458, 1502, 1503, 1504, 1505, 1506, 1507], "out_channel": [52, 714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 1282, 1453, 1454, 1455, 1456, 1457, 1458, 1502, 1503, 1504, 1505, 1506, 1507], "kernel_s": [52, 714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 770, 771, 784, 785, 1272, 1282, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1473, 1474, 1493, 1494, 1495, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1522, 1523, 1524, 1526, 1578, 1599, 1600, 1601, 1610, 1611, 1612, 1626, 1627, 1628, 1653, 1654, 1655, 1657, 1658, 1659, 1660, 1661, 1662, 1702, 1831, 1832, 2014, 2085, 2106], "maxpool": [52, 798, 1473, 1474, 1627, 1628, 2065], "maxpool2d": [52, 785, 1523, 1527, 1658, 1661, 2065, 2072], "256": [52, 1445, 1823, 2045, 2065], "example_kwarg": 52, "arg2_1": 52, "arg3_1": 52, "max_pool2d_with_indic": [52, 2014, 2066, 2106], "getitem": [52, 66, 69, 74, 75, 78, 2099, 2110], "85": [52, 64, 1529, 1808, 2105, 2109], "l__self___conv": 52, "lift": [52, 53, 61, 1177, 1976, 2034, 2066, 2101], "get_attr": [52, 64, 82, 1185], "harden": 52, "oncal": 52, "proxytensor": 52, "contextlib": [52, 66, 70], "contextmanag": 52, "__enter__": [52, 2016], "__exit__": [52, 64, 2016, 2017, 2068], "exc_typ": [52, 2017], "exc_valu": [52, 2017], "exc": 52, "expot": 52, "branch1": 52, "64": [52, 58, 59, 61, 938, 1090, 1170, 1171, 1175, 1427, 1428, 1429, 1431, 1432, 1433, 1439, 1555, 1684, 1863, 1966, 2020, 2035, 2060, 2061, 2063, 2065, 2070, 2074, 2080, 2083, 2085, 2086, 2102, 2104], "branch2": 52, "128": [52, 619, 722, 730, 731, 758, 766, 796, 821, 822, 1443, 1460, 1487, 1513, 1535, 1575, 1576, 1614, 1684, 2034, 2035, 2063, 2070, 2072, 2073, 2075, 2080, 2083, 2086, 2109], "x2": [52, 66, 72, 963, 1320, 1518, 1575, 1576, 1614, 1672, 2014, 2106], "out1": [52, 1166, 2014, 2102], "out2": [52, 1166, 2014, 2102], "arg4_1": 52, "arg5_1": 52, "arg6_1": 52, "permute_1": 52, "addmm_1": 52, "relu_1": [52, 64], "l__self___buff": 52, "rangeconstraint": [52, 53], "min_val": [52, 780, 1484, 1638, 1639, 2014, 2106], "max_val": [52, 780, 1484, 1638, 1639, 2014, 2106], "9223372036854775806": [52, 66, 74, 75], "range_constraint": [52, 53], "dimx": 52, "dimi": 52, "slice_1": [52, 66, 75, 77], "9223372036854775807": [52, 66, 75, 897, 940, 1270, 1757, 2029, 2079, 2113], "input_spec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "inputspec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "inputkind": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "tensorargu": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "output_spec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "outputspec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "outputkind": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2063], "valuerang": [52, 1186, 1187], "is_bool": 52, "saved_exported_program": 52, "fold": [52, 1578, 2014, 2026, 2065, 2073, 2080], "argumen": 52, "example_input": [52, 819, 820, 864, 865, 1284, 1288, 2070, 2091, 2093, 2097, 2100, 2101], "dynamic_dim": [52, 1180, 1181, 1187], "rewritten": [52, 2052, 2099], "primtivi": 52, "symint": [52, 1187, 1193, 1195, 1196, 1197, 1198, 1199, 1200, 1205, 1207, 1208, 1209, 1931, 1932, 1934, 1935, 1936, 2020, 2089, 2099, 2101, 2106], "symfloat": [52, 1187, 1931, 1932, 2089, 2098, 2099], "symbool": [52, 1187, 1204, 1211, 1936, 2089, 2098, 2099], "const": [52, 1479, 2054], "add_1": [52, 64, 66, 69, 71, 74, 75, 1166], "add_2": [52, 64, 66, 69, 71, 74, 75], "shot": 52, "nearli": [52, 152, 896, 917, 1816, 2057, 2075, 2111], "imposs": [52, 1177, 1197, 1976, 2041], "exportdb": [52, 2089], "combinatori": 52, "explod": [52, 1558], "cond": [52, 53, 60, 66, 69, 75, 2058], "faketensor": [52, 1188, 2020, 2063, 2101], "register_fak": [52, 2020], "dynamo": [52, 64, 81, 682, 977, 1187, 2022, 2047, 2065, 2092, 2097, 2098, 2101, 2102, 2103, 2105, 2107, 2108, 2109, 2111], "preserve_module_call_signatur": 52, "dim0_x": [52, 66, 68, 74, 75], "verbatim": 52, "bake": [52, 64, 66, 75, 1276, 1282, 1783, 1784, 1796, 2067], "diverg": [52, 1201, 1288, 1289, 1491, 1644, 2012, 2016], "register_dataclass": [52, 53], "ordereddict": [52, 1272, 1526, 1527, 1536, 1555, 1706, 1747, 2014, 2055, 2060, 2087], "debug_nam": 52, "_constraint": 52, "unbound": [52, 1186, 1191, 1197], "t1": [52, 689, 690, 883, 1105, 1226, 1716, 1969, 2015, 2075, 2076], "extra_fil": [52, 1280, 1283], "opset_vers": [52, 2063, 2065, 2066], "opset": [52, 2063, 2065, 2066, 2097, 2106], "txt": [52, 1280, 1283, 2068, 2093, 2111], "decod": [52, 1344, 1570, 1571, 1572, 1716, 2068], "utf": [52, 1344, 2065, 2068], "expected_opset_vers": 52, "rb": [52, 1280, 1344], "seek": [52, 1280, 1344, 2068, 2070], "cl": [52, 66, 68, 141, 2048, 2070, 2075, 2112], "serialized_type_nam": 52, "treespec": 52, "inputdataclass": 52, "outputdataclass": 52, "shapescollect": 52, "builder": [52, 2099], "tensor_x": [52, 2063], "tensor_i": 52, "tensor_z": 52, "refine_dynamic_shapes_from_suggested_fix": 52, "msg": [52, 1067, 1068, 1070, 1187, 2069, 2087], "refin": [52, 1279, 2034], "constraintviol": 52, "straightforward": [52, 64, 2023, 2035, 2055, 2067, 2080, 2097], "ti": [52, 1165, 1766, 1855, 2020, 2051, 2101], "_dx": 52, "1024": [52, 2044, 2045, 2093, 2102], "_derivedconstraint": 52, "graph_signatur": [52, 53], "module_call_graph": 52, "tensor_const": 52, "run_decomposit": [52, 2063], "decomp_t": 52, "joint": [52, 682, 2022], "exportbackwardsignatur": 52, "gradients_to_paramet": 52, "gradients_to_user_input": 52, "loss_output": 52, "gurante": 52, "getattr": [52, 64, 2014, 2048, 2060, 2075], "parameters_buffers_constant_tensor": 52, "flattened_user_input": 52, "mutated_input": 52, "flattened_user_output": 52, "custommodul": [52, 840, 2063, 2070], "my_paramet": [52, 2015, 2063], "register_buff": [52, 1272, 1526, 1708, 2013, 2015, 2048, 2055, 2063], "my_buffer1": [52, 2063], "my_buffer2": [52, 2063], "add_tensor": [52, 53], "call_funct": [52, 64, 66, 80, 82, 84, 85, 1185, 2097, 2099, 2100, 2110, 2111], "mul_tensor": 52, "mul_tensor_1": 52, "add_tensor_1": 52, "add_tensor_2": 52, "buffer_mut": [52, 2063], "modulecallsignatur": 52, "symintargu": [52, 66, 75], "constantargu": [52, 66, 68], "customobjargu": 52, "tokenargu": 52, "in_spec": 52, "_pytre": [52, 66], "out_spec": 52, "modulecallentri": 52, "nonetyp": [52, 2014, 2016], "replace_all_us": 52, "get_replace_hook": 52, "class_fqn": 52, "unflatten": [52, 55, 2014, 2033, 2034, 2036, 2066, 2084], "flatargsadapt": 52, "target_spec": 52, "input_args_with_path": 52, "codegen": [52, 53, 64, 2048, 2102, 2105, 2111], "flat_args_adapt": 52, "hierachi": 52, "swap": [52, 62, 64, 417, 619, 790, 791, 792, 795, 799, 867, 1278, 1362, 1575, 1576, 1700, 1701, 1948, 1969, 2014, 2017, 2045, 2070, 2080, 2082, 2090], "submod": [52, 64], "new_mod": 52, "unflattenedmodul": 52, "bear": 53, "mlir": 53, "soundli": 53, "audienc": 53, "realiz": [53, 1870], "implic": [53, 1270, 1288, 2048, 2075, 2080], "bundl": [53, 2054], "notabl": [53, 975, 2016, 2086, 2093], "graph_modul": [53, 862, 2100, 2110], "sympi": [53, 1187, 1194, 2098], "i0": [53, 308, 1292, 1889, 2014, 2066, 2081], "collorari": 53, "textual": 53, "machineri": [53, 64, 2048, 2099, 2102, 2109], "op_nam": [53, 2020, 2063], "arg4": 53, "arg5": 53, "compact": [53, 1124, 1125, 1127, 1315, 1317, 1320, 2048, 2065, 2070], "args1": 53, "add1": 53, "predefin": [53, 2065], "readign": 53, "referenc": [53, 64, 1011, 1185, 1272, 1526, 1951, 2015, 2042, 2065, 2075, 2100], "19": [53, 687, 748, 968, 1523, 2013, 2065, 2080, 2100, 2105], "dummy_help": 53, "helper_util": 53, "89": [53, 619, 1834, 2105], "nn_module_stack": 53, "came": [53, 2099], "self_linear": 53, "self_sequenti": 53, "source_fn_stack": 53, "source_fn": 53, "encapsul": [53, 63, 64, 1188, 2063, 2075, 2096], "control_flow": [53, 66, 69, 74, 75, 78], "x_1": [53, 66, 1087, 1088, 1089, 1090, 1340, 1443, 1459, 1460, 1603, 1614, 1949], "num_us": [53, 64], "y_1": [53, 1949], "higher_ord": [53, 66, 69, 74, 75, 78], "liter": [53, 1777, 2014, 2017, 2034, 2068, 2110, 2113], "dim_ord": 53, "tensormeta": 53, "promot": [53, 55, 83, 687, 948, 951, 960, 1050, 1103, 1151, 1153, 1154, 1155, 1156, 1411, 1464, 1465, 1466, 1470, 1614, 1825, 1845, 1846, 1852, 1867, 1924, 1934, 2016, 2063, 2083, 2087, 2101, 2102, 2106], "max_pool2d_with_index": 53, "add_on": 53, "ph_0": 53, "jax": [53, 56, 59, 60, 61, 2042, 2049, 2099, 2102], "int64_t": [53, 1777], "scalartyp": [53, 2106], "memoryformat": [53, 2106], "memory_format": [53, 64, 157, 172, 174, 177, 180, 181, 182, 191, 197, 208, 211, 235, 242, 269, 299, 327, 333, 395, 501, 502, 527, 582, 972, 1109, 1110, 1164, 1185, 1272, 1526, 1716, 1723, 1724, 1776, 1836, 1838, 1840, 2010, 2012, 2014, 2048, 2087, 2106], "_register_pytree_nod": 53, "sharding_strategi": 55, "auto_wrap_polici": 55, "backward_prefetch": 55, "backwardprefetch": [55, 2012], "backward_pr": [55, 1272, 1526], "mixed_precis": [55, 1716], "ignored_modul": 55, "param_init_fn": 55, "sync_module_st": 55, "forward_prefetch": [55, 2051], "limit_all_gath": [55, 2051], "use_orig_param": 55, "ignored_st": 55, "inspir": [55, 1786, 2048, 2104], "deepspe": 55, "shorten": 55, "sharded_modul": 55, "0001": [55, 64, 1089, 1317, 1514, 1650, 1780, 1810, 2014, 2067], "dev_id": 55, "shard_grad_op": 55, "unshard": [55, 2051], "full_shard": 55, "summon_full_param": 55, "with_grad": 55, "gap": [55, 868, 1842, 2012, 2070, 2109], "delai": [55, 488, 1716, 2077, 2102], "reacquir": 55, "nccl_cross_nic": 55, "no_sync": [55, 1716], "cpuoffload": [55, 2012], "modulewrappolici": 55, "custompolici": 55, "proceed": [55, 1010, 1385, 1479, 1981, 2076, 2077], "nonwrapped_numel": 55, "travers": [55, 838, 1196, 1716, 2047, 2048, 2076, 2090, 2091], "subtre": 55, "size_based_auto_wrap_polici": 55, "exce": [55, 1350, 2045, 2057], "100m": 55, "numel": [55, 437, 446, 973, 997, 1139, 1257, 2014, 2033, 2042, 2066, 2079, 2080], "custom_auto_wrap_polici": 55, "min_num_param": 55, "1e8": 55, "my_auto_wrap_polici": 55, "functool": [55, 58, 66, 844, 1901, 2048, 2087], "1e5": 55, "mixedprecis": [55, 2012], "granular": [55, 82, 1286, 2041, 2109], "is_meta": [55, 2066], "reset_paramet": [55, 1541, 1715], "torchdistx": 55, "deferred_init": 55, "materialize_modul": 55, "my_init_fn": 55, "fullstatedictconfig": [55, 2012], "flatparamet": 55, "unifi": [55, 2012, 2034, 2099], "alten": 55, "distributed_device_mesh": 55, "check_is_root": 55, "clip_grad_norm_": [55, 1720, 2041], "max_norm": [55, 747, 748, 1468, 1469, 1623, 1624, 1720, 1721, 2014, 2041], "norm_typ": [55, 747, 748, 1468, 1469, 1493, 1494, 1495, 1623, 1624, 1653, 1654, 1655, 1720, 1721, 2014], "clip": [55, 993, 1720, 1721, 1722, 2014, 2036, 2049, 2065, 2066], "infin": [55, 995, 1262, 1264, 1266, 1267, 1416, 1438, 1519, 1520, 1521, 1657, 1658, 1659, 1721, 1785, 1894, 1906, 2080, 2081], "no_shard": 55, "largest": [55, 592, 946, 1152, 1304, 1325, 1327, 1329, 1330, 1345, 1670, 1731, 1946, 2014, 2015, 2016, 2106, 2110, 2116], "fp32": [55, 783, 796, 819, 1093, 1716, 2045, 2051, 2058, 2070, 2071, 2073, 2104], "flatten_sharded_optim_state_dict": 55, "sharded_optim_state_dict": 55, "shard_full_optim_state_dict": 55, "fsdp_modul": 55, "root_onli": 55, "full_optim_state_dict": 55, "optim_input": 55, "rank0_onli": 55, "get_state_dict_typ": 55, "statedictset": [55, 2012], "assertionerror": [55, 64, 66, 68, 73, 75, 86, 736, 1778, 2087, 2111], "differen": 55, "intercept": [55, 64, 2048, 2103], "occurr": [55, 404, 897, 940, 1270, 1373, 1757, 1960, 1961, 2079, 2098], "statedicttyp": 55, "fulloptimstatedictconfig": [55, 2012], "set_state_dict_typ": 55, "save_a_checkpoint": 55, "load_a_checkpoint": 55, "optim_state_dict_to_load": 55, "is_named_optim": 55, "load_directli": 55, "original_osd": 55, "namedoptim": 55, "keyedoptim": 55, "torchrec": [55, 2012], "gossipgrad": [55, 1716], "latter": [55, 689, 793, 1272, 1496, 1526, 1533, 2049, 2055, 2057, 2059, 2099], "rekey_optim_state_dict": 55, "optim_state_key_typ": 55, "loadabl": [55, 2011], "wrapped_model": 55, "wrapped_optim": 55, "full_osd": 55, "nonwrapped_model": 55, "nonwrapped_optim": 55, "rekeyed_osd": 55, "optimstatekeytyp": 55, "param_id": 55, "osd": 55, "param_nam": 55, "sharded_osd": 55, "scatter_full_optim_state_dict": 55, "new_model": 55, "new_optim": 55, "resid": [55, 63, 198, 291, 339, 1344, 1716, 2045, 2075], "remap": [55, 90, 417, 1276, 1280, 1344, 1365, 2011, 2027, 2074], "state_dict_config": 55, "optim_state_dict_config": 55, "descend": [55, 64, 138, 544, 879, 1185, 1272, 1336, 1337, 1526, 1899, 1927, 2014, 2106], "transpar": [55, 2047, 2075, 2077], "sharded_state_dict": 55, "shardedstatedictconfig": [55, 2012], "offload_to_cpu": 55, "optimstatedictconfig": [55, 2012], "param_state_dict": 55, "statedictconfig": [55, 2012], "writeback": 55, "summon": 55, "discard": [55, 1187, 1269, 1330, 1734, 2011, 2015, 2029], "redundantli": [55, 2041], "materi": [55, 903, 907, 908, 909, 990, 1587, 1588, 1589, 1718, 1719, 1797, 1850, 1851, 2048, 2102], "throughput": [55, 2013, 2044, 2045, 2051], "backward_post": 55, "altogeth": [55, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2099], "contend": 55, "volum": [55, 1235], "_hybrid_shard_zero2": 55, "freed": [55, 152, 896, 917, 1016, 1064, 2032, 2045, 2051, 2053, 2075, 2082, 2096, 2113], "param_dtyp": 55, "reduce_dtyp": 55, "buffer_dtyp": 55, "keep_low_precision_grad": 55, "cast_forward_input": 55, "cast_root_forward_input": 55, "_module_classes_to_ignor": 55, "batchnorm": [55, 702, 703, 1174, 1272, 1282, 1526, 1566, 1716, 1725, 1726, 1727, 1728, 2026, 2036, 2042, 2055, 2067, 2070, 2072, 2073], "_batchnorm": [55, 1725, 1727], "permit": [55, 1186, 1197, 2045, 2080], "thereaft": 55, "local_state_dict": 55, "upcast": 55, "recast": 55, "offload_param": 55, "cfg": 55, "finetun": [55, 1272, 1526], "model_fn": 55, "my_checkpoint": 55, "_use_dtensor": 55, "localstatedictconfig": [55, 2012], "shardedoptimstatedictconfig": [55, 2012], "localoptimstatedictconfig": [55, 2012], "love": 56, "hear": 56, "vmap": [56, 58, 59, 64, 908, 909, 911, 913, 917, 922, 923, 1167, 1170, 1171, 1173, 1175, 2046], "arbitrarili": [56, 61, 515, 1275, 1597, 2016, 2042, 2048, 2049], "stock": [56, 61], "ensembl": [56, 59, 61, 1175], "maml": [56, 61], "vjp": [56, 59, 60, 892, 893, 903, 908, 909, 1171, 1177, 1976, 2048, 2049], "whirlwind": 56, "tour": 56, "ux": [56, 61, 1181, 2070], "jacrev": [57, 59, 60, 913, 1169, 1170, 2049], "functional_cal": [57, 59, 1175, 2102], "running_mean": [58, 1174, 1272, 1440, 1441, 1442, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1566, 1602, 1642, 2014, 2055, 2060, 2106], "running_var": [58, 1174, 1272, 1440, 1441, 1442, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1566, 1602, 1642, 2014, 2060, 2106], "groupnorm": [58, 1633], "anywher": [58, 2041], "batchnorm2d": [58, 715, 718, 720, 725, 816, 1282, 1462, 1500, 1602, 2026, 2042, 2055, 2070, 2072], "track_running_stat": [58, 753, 754, 755, 1174, 1440, 1441, 1442, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1566, 2060], "resnet": [58, 2011, 2013, 2015, 2055, 2068, 2085, 2090], "regnet": 58, "norm_lay": 58, "resnet18": [58, 64, 2011, 2013, 2015, 2027, 2068, 2109], "num_group": [58, 751, 1480, 1633, 2014], "fragil": 58, "replace_all_batch_norm_modules_": 58, "upstream": [59, 2061], "coupl": [59, 83, 2013, 2054, 2062, 2076, 2096, 2110], "jvp": [59, 60, 893, 898, 901, 902, 903, 907, 908, 909, 1173, 2048], "jacfwd": [59, 60, 913, 1169, 2049], "carefulli": [59, 63, 488, 2020, 2049, 2068, 2101, 2103], "make_functional_with_buff": 59, "hurri": 59, "gist": [59, 795, 2052, 2105], "emul": [59, 801, 2017, 2048, 2069], "fmodel": 59, "compute_loss": [59, 61, 1165, 1167], "predict": [59, 488, 1430, 1439, 1479, 1615, 2067, 2085, 2093], "argnum": [59, 1167, 1168, 1169, 1170, 1171], "carri": [59, 338, 799, 840, 841, 860, 861, 2020, 2043, 2070], "stack_module_st": 59, "num_model": [59, 1175], "base_model": 59, "clearer": [59, 1329, 1797, 2048, 2063], "call_single_model": 59, "aotautograd": [59, 682, 977, 2022, 2047, 2101, 2102, 2103, 2111], "stori": [59, 2052, 2099], "grad_x": [60, 2020, 2049, 2102], "has_aux": [60, 1167, 1168, 1170, 1171, 1172, 1176], "mental": [60, 1166], "absenc": [60, 2093], "unbind": [60, 1177, 1976, 2012, 2014, 2033, 2066, 2084], "presenc": [60, 64, 1282, 1417, 2057, 2068, 2080, 2107], "pop": [60, 942, 1068, 1069, 1527, 1536, 2066, 2069, 2099], "lst": 60, "batchedtensor": 60, "batched_tensor_input": 60, "new_": [60, 2045, 2086], "new_zero": [60, 2014, 2066], "new_empti": [60, 2014, 2020, 2066], "diag_emb": [60, 1098, 1308, 1309, 1336, 1927, 2014, 2066], "vec": [60, 109, 110, 426, 692, 1414, 1767, 2014, 2080, 2106], "copy_": [60, 62, 417, 1166, 1579, 2013, 2014, 2033, 2045, 2082], "arithmet": [60, 948, 951, 965, 1328, 1852, 2017, 2029, 2033, 2034, 2058, 2070, 2083], "extra_arg": 60, "theoret": 60, "custom_dot": 60, "lax": 60, "while_loop": 60, "is_nonzero": [60, 2014, 2066, 2080], "rag": 60, "unclear": [60, 619], "add_nois": 60, "prng": 60, "cos_x": [61, 1167], "neg_sin_x": [61, 1167], "feature_s": [61, 1167, 1177, 1976], "feature_vec": [61, 1167, 1177, 1976], "mseloss": [61, 1167, 1486, 1558, 1664, 2045, 2047], "grad_weight_per_exampl": [61, 1167], "cotang": [61, 1176], "vjp_fn": [61, 1176], "out_tang": 61, "hessian0": 61, "hessian1": 61, "hess": [61, 1169], "set_overwrite_module_params_on_convers": [62, 2012], "to_empti": [62, 1272, 1526, 2024], "get_overwrite_module_params_on_convers": [62, 2012], "set_swap_module_params_on_convers": [62, 2012], "swap_tensor": [62, 417, 2012], "module_load": 62, "get_swap_module_params_on_convers": [62, 417, 1272, 1526, 2012], "rpc_async": [63, 2016, 2066, 2075, 2077], "add_done_callback": 63, "fut": [63, 1275, 1716, 2044, 2075], "set_result": [63, 1716, 2075], "haven": [63, 2055, 2109], "set_except": 63, "baseexcept": 63, "valueerror": [63, 66, 86, 897, 940, 1270, 1736, 1737, 1757, 1760, 2048, 2063, 2079, 2087, 2110], "twice": [63, 619, 912, 1173, 2041, 2042, 2050, 2052, 2101], "slow_set_futur": 63, "sleep": 63, "cb1": 63, "cb2": 63, "dedic": [63, 2045], "pool": [63, 768, 769, 770, 771, 784, 785, 1008, 1042, 1043, 1053, 1064, 1381, 1383, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1473, 1474, 1493, 1494, 1495, 1519, 1520, 1521, 1522, 1523, 1524, 1527, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1600, 1601, 1627, 1628, 1653, 1654, 1655, 1657, 1658, 1659, 1831, 1832, 2012, 2044, 2045, 2048, 2055, 2057, 2072, 2075, 2096], "didn": [63, 2048, 2051, 2061, 2067], "cb_fut": 63, "chain_cb_fut": 63, "cb": [63, 2075], "held": [63, 1008, 1032, 1060, 1384, 1989, 2041, 2104], "collect_al": [63, 2012], "fut0": 63, "fut1": [63, 2075], "fut_list": 63, "wait_al": [63, 2012], "clamp": [64, 188, 189, 801, 804, 971, 1279, 1438, 1479, 1632, 1643, 1703, 2014, 2026, 2033, 2065, 2066, 2070, 2072, 2073, 2081, 2087, 2106], "call_modul": [64, 82, 1185, 2110], "call_method": [64, 82, 1185, 2097, 2100], "feed": [64, 2013, 2050, 2055, 2090, 2101], "fake": [64, 86, 714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 758, 795, 799, 801, 802, 812, 814, 840, 864, 865, 1122, 1123, 1185, 1187, 1196, 1209, 2020, 2063, 2068, 2070, 2073, 2092], "theses": 64, "callsit": 64, "constitut": [64, 2099], "isol": [64, 1706, 2032, 2067, 2111], "tracer_class": 64, "gm": [64, 2097, 2100, 2101, 2110, 2111], "treatment": 64, "topk": [64, 2014, 2033, 2066, 2106], "print_tabular": [64, 2097, 2100], "opcod": [64, 2068, 2097, 2100], "linear_weight": 64, "linear_1": 64, "topk_1": 64, "pose": [64, 2076, 2099], "explor": [64, 81, 2011, 2042, 2055, 2068, 2113], "lint": 64, "inserting_aft": [64, 2110], "new_nod": 64, "replace_all_uses_with": [64, 2110], "tediou": 64, "unwieldi": 64, "fusion": [64, 682, 794, 816, 863, 864, 975, 1274, 1281, 1286, 1287, 1288, 1289, 1723, 1724, 2026, 2055, 2070, 2099, 2102, 2104, 2110, 2111], "imagin": [64, 2075, 2099, 2103], "requisit": 64, "relu_decomposit": 64, "decomposition_rul": 64, "constitu": [64, 2035, 2048, 2051], "new_graph": 64, "graphappendingtrac": 64, "proxy_arg": 64, "output_proxi": 64, "node_copi": 64, "ari": [64, 2065], "unari": [64, 1170, 1171, 1570, 1572, 1574, 2017, 2033], "organiz": 64, "shapeprop": 64, "named_modul": [64, 1272, 1526, 2055], "args_it": 64, "load_arg": 64, "map_arg": 64, "fetch_attr": [64, 1185], "target_atom": 64, "attr_itr": 64, "hasattr": [64, 1965, 2014, 2048, 2070, 2100], "nonexist": [64, 2015, 2016], "self_obj": 64, "encompass": 64, "prove": [64, 2032], "disprov": 64, "led": [64, 2113], "auxiliari": [64, 1167, 1168, 1170, 1171, 1172, 1176, 2011, 2054], "nondeterminist": [64, 315, 317, 323, 501, 517, 519, 946, 1109, 1110, 1111, 1294, 1445, 1453, 1454, 1455, 1456, 1457, 1458, 1522, 1523, 1524, 1607, 1608, 1609, 1610, 1611, 1612, 1616, 1624, 1632, 1643, 1671, 1684, 1703, 1704, 1705, 1869, 1964, 2097], "unord": [64, 1527, 1536], "nondetermin": [64, 923, 2059], "dedupl": [64, 2065], "torchvis": [64, 2011, 2012, 2013, 2015, 2065, 2068, 2070, 2085, 2090, 2109], "transformed_resnet18": 64, "input_imag": 64, "224": [64, 864, 865, 2013, 2015, 2065, 2109], "margin": [64, 1459, 1485, 1518, 1529, 1531, 1575, 1576, 1613, 1640, 1656, 1665, 1700, 1701, 2014, 2085], "commut": 64, "toolbox": 64, "tradit": [64, 1574, 2062, 2063], "luckili": 64, "my_pass": 64, "my_module_transform": 64, "input_valu": 64, "prompt": [64, 2011, 2061], "set_trac": [64, 1277, 1284, 1290, 2013], "examin": [64, 2055, 2065, 2069, 2113], "undergon": 64, "subclassm": 64, "pre_trac": 64, "post_trac": 64, "sake": 64, "tabular": [64, 682, 2022], "transform_graph": 64, "session": [64, 2063], "luck": [64, 2099], "input_nod": 64, "stepwis": 64, "breakpoint": [64, 2016, 2099, 2110], "excel": [64, 2103], "realpython": 64, "pycharm": 64, "vscode": 64, "graphic": [64, 2030, 2061, 2109], "parlanc": 64, "func_to_trac": 64, "dyn": 64, "155": 64, "__bool__": [64, 2014, 2016], "to_bool": 64, "traceerror": [64, 2070], "hyper": [64, 1558, 2015, 2085], "do_activ": 64, "512": [64, 1570, 1571, 1572, 1573, 1574, 2045, 2095], "without_activ": 64, "with_activ": 64, "traced_without_activ": 64, "traced_with_activ": 64, "concrete_arg": 64, "__torch_function__": [64, 2112], "161": 64, "len_1": 64, "sqrt_1": 64, "truediv": [64, 2097, 2099, 2100], "mycustomtrac": 64, "traced_graph": 64, "runnabl": [64, 2041, 2065, 2085, 2111], "myspecialsubmodul": 64, "neg_1": 64, "is_leaf_modul": [64, 2091], "sparse_coo_tensor": [64, 546, 583, 1908, 2014, 2018, 2066, 2080], "ones_lik": [64, 1170, 1171, 1176, 2014, 2020, 2035, 2045, 2048, 2049, 2055, 2066], "zeros_lik": [64, 903, 905, 907, 908, 909, 2014, 2018, 2035, 2045, 2066, 2080], "viabl": [64, 2042, 2045], "torch_randn": 64, "gotcha": 64, "dropoutrepro": 64, "assert_clos": [64, 1125, 1127, 1128, 1133, 1134, 1139, 1140, 1141, 1143, 1145, 2012, 2013, 2087], "greatest": [64, 1214, 1416, 2065, 2087], "6207983493804932": 64, "dropoutrepro2": 64, "overspeci": [64, 975, 2098], "ph": 64, "shouldn": [64, 1008, 2068, 2080, 2084], "fn_or_nam": 64, "callfunct": 64, "my_custom_funct": [64, 977], "fn_to_be_trac": 64, "reassign": [64, 2045], "regener": 64, "unset": [64, 938, 2058], "add_submodul": 64, "subpath": 64, "get_submodul": [64, 1272, 1526], "delete_all_unused_submodul": 64, "delete_submodul": 64, "print_read": [64, 2020, 2100], "print_output": 64, "include_strid": 64, "include_devic": 64, "date": [64, 2068], "pythoncod": 64, "fxmodul": 64, "owning_modul": 64, "tracer_cl": 64, "tracer_extra": 64, "the_funct": 64, "type_expr": 64, "create_nod": 64, "method_nam": 64, "0th": [64, 1177, 1976], "inserting_befor": 64, "influenc": [64, 2063, 2080, 2114], "eliminate_dead_cod": 64, "topolog": [64, 2065], "attr_1": 64, "is_impur": 64, "bad": [64, 66, 2032, 2065, 2068, 2070, 2101, 2109], "erase_nod": 64, "to_eras": 64, "eras": [64, 66, 71, 72, 2098], "find_nod": 64, "iterat": 64, "qualified_nam": 64, "graph_copi": 64, "val_map": 64, "return_output_nod": 64, "opoverload": [64, 1185, 2020], "companion": 64, "arg_transform": 64, "value_remap": 64, "_node_list": 64, "doubli": 64, "on_generate_cod": 64, "make_transform": 64, "transformcodefunc": 64, "insert_pdb": 64, "bodi": [64, 66, 75, 78, 1284, 2013, 2015, 2016, 2017, 2063, 2100, 2111], "current_tran": 64, "stuff": [64, 2101], "default_valu": 64, "_not_": 64, "tabul": 64, "process_input": [64, 1185], "process_output": [64, 1185], "python_cod": 64, "root_modul": [64, 795, 2072], "set_codegen": 64, "return_typ": [64, 699, 1087, 1088, 1235, 1294, 1332, 1370, 1373, 1375, 1378, 1418, 1899, 1946, 1951], "printout": [64, 1052, 1065, 2100], "all_input_nod": 64, "format_nod": 64, "placeholder_nam": 64, "maybe_return_typenam": 64, "autogener": [64, 2049], "insert_arg": 64, "impur": 64, "normalized_argu": 64, "arg_typ": 64, "kwarg_typ": 64, "normalize_to_only_use_kwarg": 64, "vararg": 64, "argskwargspair": 64, "bx": 64, "ax": [64, 968, 1304, 1318, 1322, 1333, 1334, 1335, 1363, 1374, 1534, 1545, 1951, 2065, 2102], "prev": [64, 1795], "replace_with": 64, "delete_user_cb": 64, "propagate_meta": 64, "replace_input_with": 64, "old_input": 64, "new_input": 64, "create_proxi": [64, 2099], "record_stack_trac": 64, "outputgraph": [64, 2099], "update_arg": 64, "update_kwarg": 64, "autowrap_modul": 64, "autowrap_funct": 64, "create_arg": 64, "create_args_for_root": 64, "root_fn": 64, "is_modul": 64, "introspect": [64, 977, 2102], "disallow": [64, 2068, 2075, 2083, 2102, 2103], "proxy_factory_fn": 64, "get_fresh_qualnam": 64, "clash": 64, "attr": [64, 66, 68, 1192, 1374, 1855], "attr_val": 64, "parameter_proxy_cach": 64, "module_qualified_nam": [64, 2091], "path_of_modul": 64, "some_hyperparamet": 64, "indexed_item": 64, "proxied_valu": 64, "garbage_collect_valu": [64, 1185], "run_nod": [64, 1185], "vice": [64, 460, 605, 1161, 1162, 1518, 2042, 2073, 2082, 2098], "versa": [64, 460, 605, 1161, 1162, 1518, 2042, 2073, 2082, 2098], "negsigmswapinterpret": 64, "call_self": 64, "args_tail": 64, "boxed_run": [64, 1185], "args_list": [64, 1185], "promptli": [64, 1185, 1196, 1345], "fetch_args_kwargs_from_env": [64, 1185], "map_nodes_to_valu": [64, 1185], "initial_env": [64, 1185], "enable_io_process": [64, 1185], "negsigmswapxform": 64, "nodes_map": [64, 2110], "subgraph_rewrit": [64, 2110], "m1": [64, 1974, 1975, 2028], "m2": [64, 1764, 1974, 1975, 2028, 2070], "traced_modul": [64, 2060, 2110], "despit": [64, 1288, 1289, 2045, 2052], "stack_1": 64, "stack_2": 64, "sum_2": 64, "max_1": 64, "max_2": 64, "exhaust": [66, 1716, 2114], "wih": 66, "escap": [66, 76, 977], "hatch": [66, 76, 977], "mypi": [66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 1259, 2015, 2016], "untyp": [66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 2082], "assumeconstantresult": [66, 77], "burn": [66, 77, 2096], "tracabl": [66, 77], "get_item": [66, 77], "i64": [66, 71, 72, 76, 77], "myautogradfunct": 66, "autogradfunct": 66, "allow_in_graph": [66, 2102], "p_linear_weight": 66, "p_linear_bia": 66, "mul_1": [66, 72, 2097, 2099], "mysubmodul": [66, 74, 75], "condbranchclassmethod": [66, 74, 75], "subm": [66, 74, 75], "condbranchnestedfunct": [66, 74, 75], "inner_true_fn": [66, 74, 75], "inner_false_fn": [66, 74, 75], "condbranchnonlocalvari": [66, 74, 75], "my_tensor_var": [66, 74, 75], "my_primitive_var": [66, 74, 75], "nonloc": [66, 69, 74, 75, 2017], "c_lifted_tensor_0": [66, 74, 75], "lift_fresh_copi": [66, 74, 75, 2066], "sub_1": [66, 74, 75], "constant_tensor": [66, 74, 75], "lifted_tensor_0": [66, 74, 75], "condclosedovervari": [66, 69, 74], "condoperand": [66, 74, 75], "sym_size_int": [66, 74, 75], "vr": [66, 74, 75, 76, 77, 1191], "condpred": [66, 74, 75], "constrainassizeexampl": [66, 76, 77], "hint": [66, 76, 77, 682, 1008, 1042, 1053, 1187, 1197, 1203, 1273, 1532, 1570, 1571, 1572, 1573, 1574, 2013, 2015, 2016, 2020, 2045, 2048, 2063, 2075, 2098], "_check": [66, 76, 77], "_check_is_s": [66, 76, 77], "u0": [66, 76, 77], "sym_constrain_range_for_size_default": [66, 76, 77], "sym_constrain_range_for_s": [66, 76, 77, 2014, 2066], "sym_constrain_rang": [66, 76, 77, 2014, 2066], "le": [66, 76, 77, 362, 1194, 1300, 1482, 1483, 1636, 1637, 2014, 2033, 2066, 2087, 2106], "_assert_scalar": [66, 76, 77, 2066], "le_1": [66, 76, 77], "_assert_scalar_1": [66, 76, 77], "le_2": [66, 76, 77], "u1": [66, 76, 77], "u2": [66, 76, 77, 2111], "constrainasvalueexampl": [66, 76, 77], "test_decor": 66, "dynamicshapeassert": [66, 67], "dynamicshapeconstructor": [66, 75], "dynamicshapeifguard": [66, 71, 75], "dynamicshapemap": [66, 75, 78], "body_graph_0": [66, 75, 78], "map_impl": [66, 75, 78], "dynamicshapesl": [66, 75], "slice_2": [66, 75], "dynamicshapeview": [66, 75], "new_x_shap": [66, 75], "support_level": [66, 72], "supportlevel": [66, 72], "fnwithkwarg": [66, 72], "pos0": [66, 72], "tuple0": [66, 72], "myarg": [66, 72], "mykw0": [66, 72], "mykwarg": [66, 72], "input0": [66, 72], "input1": [66, 72, 762, 1443, 1459, 1460, 1518, 1535, 1603, 1613, 1614, 1656, 2014, 2041, 2065], "tuple0_0": [66, 72], "tuple0_1": [66, 72], "myargs_0": [66, 72], "myargs_1": [66, 72], "mul_2": [66, 72], "mul_3": [66, 72], "mul_4": [66, 72], "mul_5": [66, 72], "mul_6": [66, 72], "listcontain": [66, 67, 72, 75], "monkei": [66, 67, 72, 75, 1965], "cow": [66, 67, 72, 75], "pig": [66, 67, 72, 75], "listunpack": [66, 71, 72], "args_0": [66, 71, 72], "args_1": [66, 71, 72], "args_2": [66, 71, 72], "nestedfunct": [66, 69], "nullcontextmanag": [66, 70], "nullcontext": [66, 70], "pytreeflatten": 66, "tree_flatten": 66, "x_2": [66, 1087, 1088, 1089, 1090, 1340, 1443, 1459, 1460, 1603, 1614], "dim1_x": [66, 75], "scalaroutput": [66, 75], "anim": 66, "moo": 66, "specializedattribut": 66, "staticforloop": [66, 71], "unrol": [66, 71, 2015, 2016, 2065, 2102], "ret": [66, 71, 2048, 2075], "add_3": [66, 71], "add_4": [66, 71], "add_5": [66, 71], "add_6": [66, 71], "add_7": [66, 71], "add_8": [66, 71], "add_9": [66, 71], "staticif": [66, 71], "tensorsetattr": [66, 68], "setattr": [66, 68, 2016], "typereflectionmethod": [66, 68], "overli": [66, 68, 2048, 2099], "typereflectionmethodrewrit": [66, 68], "userinputmut": [66, 79], "not_supported_yet": [66, 68, 73, 75, 80], "dynamicshaperound": [66, 68, 75], "roundtoint": [66, 68, 75], "inttruediv": [66, 68, 75], "modelattrmut": [66, 73], "attr_list": [66, 73], "recreate_list": [66, 73], "optionalinput": [66, 73], "unexpectedli": [66, 73, 488, 2082, 2086], "torchsymmin": [66, 80], "sym_min": [66, 80], "0x7f4817597ca0": [66, 80], "undergo": 81, "gain": [81, 1797, 2040, 2092, 2093, 2104, 2105], "fxe0010": [81, 2063], "diagnosticopt": [81, 83, 2063], "verbosity_level": [81, 83, 2063], "onnx_diagnost": [81, 83, 682], "fxe0008": [81, 2063], "onnxscript": [82, 2063, 2064, 2065], "torchlib": [82, 86], "fxonnxinterpret": 82, "onnx": [83, 84, 86, 87, 88, 682, 1758, 1759, 1777, 1778, 1779, 2012, 2034, 2070, 2092], "endors": 83, "noteworthi": 83, "_intern": [83, 88, 2082], "disregard": [86, 87, 89, 2099], "indexerror": [86, 1742, 1744], "typeerror": [86, 1747, 2048, 2087, 2112], "opschema": [86, 2063], "onnxfunct": [87, 89, 2063], "perfect": 87, "nearest": [87, 781, 787, 789, 1419, 1579, 1581, 1632, 1643, 1703, 1705, 1827, 1855, 2045], "highest": [87, 682, 1430, 1837, 1838, 1870, 2071, 2087], "stringent": 87, "op_level_debug": [87, 89, 2063], "elementwise_dtyp": 88, "_ref": 88, "type_promot": 88, "absent": [89, 2047], "signifi": 89, "g_cpu": 90, "g_cuda": 90, "clone_st": 90, "cloned_st": 90, "bytetensor": [90, 1039, 1040, 1079, 1080, 1225, 1386, 1393, 1875, 1967, 1993, 1994, 2004, 2005, 2074, 2083, 2086], "graphsafe_get_st": [90, 2045], "current_st": 90, "graphsafe_set_st": [90, 2045], "g_cuda_oth": 90, "2147483647": 90, "0x8000_0000_0000_0000": [90, 1365, 2074], "0xffff_ffff_ffff_ffff": [90, 1365, 2074], "random_devic": 90, "1516516984916": 90, "new_stat": [90, 1079, 1080, 1393, 1875, 1967, 2004, 2005, 2074], "void": [90, 1051, 2045, 2054, 2102], "g_cpu_oth": 90, "abs_": [94, 2014, 2033, 2086], "acosh": [98, 124, 870, 2014, 2033, 2066, 2106], "batch1": [101, 102, 153, 154, 688, 943, 2014], "batch2": [101, 102, 153, 154, 171, 688, 943, 2014], "tensor1": [103, 104, 105, 106, 689, 690, 1367, 1852, 2014], "tensor2": [103, 104, 105, 106, 316, 406, 689, 690, 1367, 1852, 2014], "mat1": [107, 108, 556, 691, 1237, 1293, 1377, 1900, 1904, 1905, 1919, 2014, 2106], "mat2": [107, 108, 415, 556, 691, 955, 1237, 1293, 1377, 1900, 1904, 1905, 1919, 2014, 2106], "mat": [109, 110, 542, 692, 1414, 1833, 1897, 1900, 1919, 2014, 2085], "vec1": [111, 112, 693, 2014], "vec2": [111, 112, 290, 463, 693, 1217, 1815, 2014], "keepdim": [114, 116, 117, 118, 120, 136, 137, 356, 394, 409, 411, 412, 413, 416, 431, 432, 433, 434, 455, 472, 481, 557, 567, 617, 695, 697, 698, 699, 701, 877, 878, 1294, 1325, 1329, 1342, 1360, 1370, 1372, 1373, 1375, 1378, 1417, 1418, 1419, 1420, 1535, 1672, 1771, 1824, 1827, 1921, 1922, 1926, 1971, 1972, 2014, 2033, 2081, 2106], "rtol": [115, 346, 696, 922, 923, 1261, 1327, 1330, 1779, 2013, 2014, 2087], "atol": [115, 346, 696, 922, 923, 1261, 1327, 1330, 1339, 1779, 2013, 2014, 2048, 2087], "08": [115, 346, 696, 953, 1261, 1319, 1460, 1540, 1676, 1783, 1784, 1785, 1787, 1793, 1794, 1797, 1810, 1881, 2014], "equal_nan": [115, 346, 696, 1261, 2014, 2087], "arcco": [123, 2014, 2066, 2087], "acosh_": [125, 2014, 2033], "arccosh": [125, 2014, 2066], "arcsin": [127, 884, 2014, 2066, 2080], "arcsinh": [129, 2014, 2066], "atan2_": [132, 2014, 2033], "arctan2": [132, 2014, 2066], "arctan": [133, 2014, 2066], "arctanh": [135, 2014, 2066], "asinh": [145, 872, 2014, 2033, 2066, 2080, 2106], "atan": [149, 633, 634, 873, 2014, 2033, 2066, 2080, 2106], "atanh": [151, 875, 2014, 2033, 2066, 2080, 2106], "wrt": [152, 922, 923], "60521": [152, 896], "issuecom": [152, 896], "867061780": [152, 896], "texttt": [155, 156, 696, 1261, 1430, 2087], "bernoulli": [156, 762, 1434, 1463, 1464, 1465, 1466, 1470, 1477, 1496, 1617, 1618, 1619, 1620, 1625, 2012, 2014, 2033, 2066, 2089], "preserve_format": [157, 172, 174, 177, 180, 181, 182, 191, 208, 211, 242, 269, 299, 327, 395, 527, 582, 972, 1110, 1164, 1776, 1836, 1838, 1840, 2010, 2048, 2083], "minlength": [158, 946, 2014], "bitwise_and": [160, 2014, 2066, 2106], "bitwise_left_shift": [162, 2014, 2066], "bitwise_not": [164, 2014, 2033, 2066, 2106], "bitwise_or": [166, 2014, 2066, 2106], "bitwise_right_shift": [168, 2014, 2066], "bitwise_xor": [170, 2014, 2066, 2106], "uint8": [174, 244, 619, 695, 701, 1161, 1825, 1828, 1829, 1852, 2070, 2082, 2083, 2085, 2086, 2116], "cauchi": [175, 2012, 2042, 2052, 2066, 2089], "dfrac": [175, 379, 610, 1460, 1614, 1731, 1732, 1765, 1768], "complex32": [181, 1453, 1454, 1455, 1607, 1608, 1609, 1777, 1867, 2086, 2087], "int8": [182, 450, 731, 947, 948, 949, 950, 951, 952, 1161, 1354, 1355, 1356, 1357, 2070, 2073, 2080, 2082, 2083, 2086, 2116], "input2": [185, 461, 462, 519, 762, 1443, 1459, 1460, 1518, 1535, 1603, 1613, 1614, 1656, 2014, 2041, 2065], "clamp_": [190, 2014, 2033], "uncoalesc": [192, 330, 1912], "coo": [192, 325, 330, 344, 616, 1237, 1900, 1904, 1909, 1910, 1911, 1912, 1913, 1914, 2012, 2023, 2083, 2087], "inttensor": [193, 210, 1247, 1362, 1363, 1468, 2083, 2086], "csr": [193, 210, 345, 585, 590, 1469, 1900, 1904, 1905, 1911, 1914, 2023, 2087], "sparse_csr": [193, 210, 585, 590, 1904, 1905, 1907, 1911, 1914, 2080], "nnz": [193, 546, 585, 586, 587, 589, 590, 1197, 1422, 1902, 1904, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 2020, 2080], "int32": [193, 210, 315, 323, 327, 449, 619, 959, 1110, 1122, 1123, 1158, 1161, 1162, 1303, 1314, 1315, 1316, 1334, 1362, 1445, 1825, 1862, 2070, 2080, 2082, 2083, 2086, 2104, 2116], "mkl": [193, 210, 2012, 2044, 2061, 2066, 2080, 2088, 2114], "routin": [193, 210, 879, 1345, 1899, 1927, 2080], "downcast": [193, 210], "to_sparse_csr": [193, 210, 586, 587, 1901, 1904, 1905, 2014, 2066, 2080], "conj_phys": [196, 990, 2014, 2066, 2080, 2106], "contiguous_format": [197, 333, 501, 502, 1109, 1716, 1723, 1724, 2083], "non_block": [198, 211, 582, 605, 1272, 1526, 1757, 2014, 2045, 2082, 2106], "copysign": [200, 2014, 2066], "fweight": [207, 997, 2014], "aweight": [207, 997, 2014], "sparse_dim": [220, 547, 548, 1900, 1904, 1908, 1912, 2014, 2066, 2080], "dim1": [227, 229, 230, 571, 594, 595, 1096, 1098, 1099, 1307, 1930, 1948, 2014, 2106], "dim2": [227, 229, 230, 1096, 1098, 1099, 1307, 2014, 2106], "digamma": [233, 2014, 2033, 2066, 2081, 2106], "laid": 235, "outermost": [235, 1226], "channels_last": [235, 1272, 1526, 1716, 1723, 1724, 2083], "rounding_mod": [237, 238, 239, 240, 1103, 1104, 1153, 1156, 1846, 1956, 2014, 2106], "split_size_or_sect": [243, 304, 621, 1915], "eq": [246, 2014, 2033, 2066, 2106], "erf": [249, 641, 642, 2014, 2033, 2066, 2080, 2081, 2106], "erfc": [251, 643, 644, 2014, 2033, 2066, 2081, 2106], "lambd": [260, 300, 1481, 1564, 1635, 1693, 1780, 2014], "theori": [260, 2042, 2051], "fill_valu": [262, 448, 1163, 1164, 1445, 2014, 2045, 2106], "tall": [262, 1331, 1336, 2048], "start_dim": [265, 1147, 1471, 2014], "end_dim": [265, 1147, 1471, 2014], "float_pow": [271, 2014, 2066], "floor_divid": [275, 1103, 2014, 2066, 2080], "divisor": [278, 279, 491, 492, 770, 771, 1103, 1153, 1156, 1182, 1214, 1436, 1437, 1472, 1578, 1600, 1601, 1846, 1956], "fmod": [279, 1846, 2014, 2066, 2106], "mantissa": [282, 483, 1158, 1296, 1870, 2014, 2045, 2058, 2106], "gcd": [285, 2014, 2066, 2106], "ge": [287, 1228, 1366, 1477, 1482, 1483, 1496, 1534, 1636, 1637, 2014, 2033, 2066, 2106], "geometr": [288, 781, 787, 1632, 1643, 1703, 1970, 2012, 2066, 2089, 2108], "greater_equ": [296, 2014, 2066], "hypot": [306, 2014, 2066, 2106], "igamma": [310, 2014, 2066, 2106], "igammac": [312, 2014, 2066, 2106], "3100": [313, 485, 1243, 1844], "3553j": [313, 485, 1243, 1844], "5445": [313, 485, 1243, 1844], "7896j": [313, 485, 1243, 1844], "6492": [313, 485, 1243, 1844], "0633j": [313, 485, 1243, 1844], "0638": [313, 485, 1243, 1844], "8119j": [313, 485, 1243, 1844], "3553": [313, 1243], "7896": [313, 1243], "0633": [313, 1243, 1319, 2035], "8119": [313, 1243], "index_add_": [314, 903, 905, 908, 1244, 1245, 2014, 2059], "index_copy_": [316, 2014], "index_fill_": [318, 2014, 2033], "index_put_": [320, 2014], "include_self": [323, 518, 519, 1246, 1861, 2014, 2106], "identit": 323, "floattensor": [323, 489, 688, 689, 690, 691, 692, 737, 922, 923, 943, 1468, 1469, 1529, 1570, 1868, 2083, 2086], "amax": [323, 519, 698, 699, 1904, 2014, 2066, 2106], "amin": [323, 519, 697, 699, 1904, 2014, 2066, 2106], "fill_": [323, 1272, 1298, 1526, 1847, 2014, 2033, 2055, 2082], "72": [323, 619, 1226, 2070], "uint8_t": [328, 1777], "retain_grad": [337, 2014, 2042, 2066], "requires_grad_": [337, 450, 1272, 1445, 1526, 1616, 1901, 1904, 1942, 2014, 2033, 2042, 2086, 2111], "n_fft": [353, 558, 1269, 1923, 2014], "hop_length": [353, 558, 1269, 1923, 2014], "win_length": [353, 558, 1269, 1923, 2014], "center": [353, 558, 781, 787, 1128, 1269, 1597, 1632, 1643, 1703, 1794, 1816, 1883, 1923, 2014, 2042, 2066, 2085], "onesid": [353, 558, 1269, 1923, 2014, 2106], "return_complex": [353, 558, 1269, 1923, 2014], "element_s": [355, 437, 2014, 2033, 2066, 2082], "lcm": [358, 2014, 2066], "ldexp": [360, 1158, 2014, 2066], "lerp": [364, 2014, 2066], "lt": [365, 397, 1194, 1299, 2013, 2014, 2033, 2066, 2097, 2100, 2106], "less_equ": [368, 2014, 2066], "lgamma": [370, 653, 654, 2014, 2066, 2106], "ln": [379, 1301, 2081], "logical_and": [385, 2014, 2066, 2106], "logical_not": [387, 1684, 2014, 2033, 2035, 2066, 2106], "logical_or": [389, 2014, 2066, 2106], "logical_xor": [391, 2014, 2066, 2106], "pivot": [398, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1362, 1363, 1364, 1826, 2014], "get_info": [398, 1362], "lu_data": [399, 1363, 1364, 2014], "lu_pivot": [399, 1363, 1364, 2014], "masked_fill_": [401, 1684, 2014, 2033, 2034], "booltensor": [402, 404, 737, 1366, 1570, 1979, 2083, 2086], "masked_scatter_": [403, 2014], "mvlgamma": [428, 2014, 2066], "posinf": [429, 430, 1416, 2014], "neginf": [429, 430, 1416, 2014], "nan_to_num": [430, 2014, 2066], "interpol": [433, 481, 787, 788, 789, 821, 1298, 1419, 1579, 1580, 1581, 1597, 1632, 1703, 1704, 1705, 1827, 1964, 2014, 2072], "ne": [441, 1731, 1765, 1773, 2014, 2033, 2066, 2106, 2111], "8182e": 447, "5765e": 447, "41": [447, 1111, 1331, 1706, 1764, 1826], "0545e": 447, "0949e": 447, "4842e": [447, 1111], "0000e": [447, 1111, 1151, 1350, 1359, 1416, 1764, 1881, 1883, 1884, 1889, 1890], "00": [447, 1111, 1151, 1350, 1359, 1416, 1764, 1778, 1881, 1883, 1884, 1889, 1890, 2013, 2065], "141592": [448, 1163], "1416": [448, 1092, 1163, 1942], "from_numpi": [450, 882, 883, 1866, 1942, 2020, 2102], "array_lik": [450, 882, 1909, 1910, 1911, 1912, 1913, 1914, 1942, 2035, 2086], "nextaft": [453, 2014, 2066, 2106], "fro": [455, 1304, 1325, 1329, 1342, 1742, 1751, 1771, 2014], "not_equ": [458, 2014, 2066], "resolve_conj": [460, 990, 2014, 2066], "resolve_neg": [460, 2014, 2066], "shorthand": [460, 1292], "input3": [462, 2014], "polygamma": [468, 2014, 2066, 2081], "q_per_channel_axi": [475, 476, 2014, 2066], "zero_point": [476, 478, 740, 741, 742, 743, 744, 745, 746, 751, 752, 753, 754, 755, 756, 757, 758, 760, 761, 772, 774, 775, 776, 777, 779, 782, 783, 801, 804, 821, 822, 823, 824, 827, 855, 1122, 1123, 1828, 1829, 1830, 1831, 1832, 2014, 2070, 2071], "qtensor": [480, 2014], "queu": [488, 1014, 1399, 1983, 2045], "life": [488, 1011], "cycl": [488, 896, 1011, 1187, 1802, 1808, 2042, 2069], "poll": 488, "realloc": [488, 975, 1247, 2045], "counterintuit": [488, 2042], "s1": [488, 1187, 2099], "some_comm_op": 488, "wouldn": [488, 1200, 2077, 2101], "chrome": [488, 934, 2069, 2105, 2109], "export_chrome_trac": [488, 2069, 2109], "cudacachingalloc": [488, 2045, 2051], "enable_grad": [490, 2018, 2089], "0100": [490, 878, 1326, 1828], "0200": 490, "0300": [490, 2060], "maxnorm": [493, 494, 1847, 2014], "tile": [495, 2014, 2045, 2066, 2080, 2102], "repeat_interleav": [495, 1964, 2014, 2066, 2072], "output_s": [496, 743, 744, 745, 768, 769, 1427, 1428, 1429, 1431, 1432, 1433, 1457, 1472, 1473, 1474, 1522, 1523, 1524, 1578, 1591, 1592, 1593, 1594, 1595, 1596, 1626, 1627, 1628, 1660, 1661, 1662, 1848, 2014, 2035, 2106], "is_leaf": [497, 2014, 2033, 2035, 2066], "saved_weight": 498, "loaded_weight": 498, "5503": 498, "4926": [498, 2055], "1158": 498, "8303": 498, "1007": 498, "9853": 498, "2316": 498, "6606": 498, "resiz": [501, 502, 526, 547, 548, 1161, 1162, 1166, 1579, 1632, 1643, 2014, 2033, 2066, 2082, 2106], "set_": [501, 2014], "shift": [507, 948, 951, 1128, 1135, 1434, 1440, 1441, 1442, 1470, 1566, 1625, 1853, 1882, 2014, 2017], "decim": [509, 510, 1855, 2014, 2058, 2116], "scatter_": [514, 517, 1859, 2014], "scatter_add_": [515, 516, 1860, 1964, 2014], "scatter_reduce_": [515, 518, 1861, 2014], "axi": [515, 517, 519, 824, 827, 988, 1107, 1122, 1148, 1213, 1238, 1828, 1848, 1853, 1854, 1978, 2014, 2020, 2049, 2065, 2070, 2102, 2106, 2110], "4600": 515, "2300": 515, "scatter_reduc": [519, 1964, 2014, 2066, 2106], "sgn": [524, 1157, 1879, 2014, 2033, 2035, 2066, 2080], "untypedstorag": [526, 559, 615, 2012, 2060, 2082], "int16": [527, 1161, 1355, 2080, 2082, 2083, 2086, 2116], "dense_dim": [545, 547, 548, 585, 586, 587, 589, 590, 1912, 2014, 2066, 2080], "nse": [546, 2080], "6550": 546, "2397": 546, "1611": 546, "0779": [546, 1309, 1823, 1979], "2326": 546, "0558": 546, "4711": 546, "9678": 546, "5138": 546, "0411": 546, "9417": 546, "5158": 546, "0793": 546, "0036": [546, 1370], "2569": 546, "1055": 546, "sparse_coo": [546, 585, 1422, 1904, 1907, 1908, 1912, 2080, 2083], "split_siz": [549, 1915, 2014, 2106], "squeez": [555, 695, 697, 698, 699, 701, 1294, 1360, 1370, 1372, 1373, 1375, 1378, 1417, 1420, 1614, 1824, 1908, 1921, 1922, 1926, 1971, 1972, 2014, 2033, 2065, 2066, 2072, 2084, 2106], "pad_mod": [558, 1923, 2014], "typedstorag": [559, 2012, 2060, 2082], "untyped_storag": 559, "compute_uv": [569, 1336, 1337, 1927, 2014], "axis0": [570, 1929, 2014], "axis1": [570, 1929, 2014], "dim0": [571, 594, 595, 1930, 1948, 2014], "indices_or_sect": [580, 1106, 1236, 1943, 1977], "5044": 582, "0005": [582, 1806, 1812], "3310": 582, "0584": [582, 1927], "cuda0": [582, 2045, 2053, 2086], "masked_grad": [583, 2014], "sparse_mask": [583, 2014, 2066], "mkldnn": [584, 1282, 2012, 2066], "sparsedim": 585, "blocksiz": [585, 586, 587, 1909, 1910, 1911, 2014, 2080], "sparse_csc": [585, 589, 1907, 1911, 1913, 2080], "sparse_bsr": [585, 587, 1910, 1911, 2080], "sparse_bsc": [585, 586, 1909, 1911, 2080], "bsr": [585, 587, 1910, 1911, 2087], "bsc": [585, 586, 1909, 1911, 2087], "csc": [585, 589, 1911, 1913, 2087], "minu": [585, 586, 587, 589, 590, 2081], "crow_indic": [585, 587, 590, 1902, 1904, 1905, 1907, 1910, 1911, 1914, 2014, 2066, 2080, 2087], "col_indic": [585, 587, 590, 1904, 1905, 1907, 1910, 1911, 1914, 2014, 2066, 2080, 2087], "sparsecsr": [585, 1948, 2080], "row_indic": [586, 589, 1909, 1913, 2014, 2066, 2080, 2087], "ccol_indic": [586, 589, 1909, 1913, 2014, 2066, 2080, 2087], "_nnz": [588, 589, 590, 2066], "012766935862600803": 591, "5415473580360413": 591, "08909505605697632": 591, "7729271650314331": 591, "unitriangular": [596, 1335, 1951, 2014], "tril": [598, 1587, 1588, 1589, 1684, 2014, 2066, 2102], "triu": [600, 1736, 1951, 2014, 2065, 2066, 2102], "trunc": [604, 679, 680, 689, 1103, 1146, 1153, 1156, 1855, 2014, 2033, 2066, 2080, 2106], "sizedim": 609, "return_invers": [611, 612, 1960, 1961, 2014], "return_count": [611, 612, 1960, 1961, 2014], "unsqueez": [614, 1128, 1293, 1333, 1468, 1546, 1736, 1945, 2014, 2048, 2066, 2072, 2080, 2084, 2085, 2106], "subspac": [619, 1336, 1532, 1816, 1927, 1928], "span": [619, 1069, 1070, 1461, 1927, 2069, 2075], "foral": 619, "proportion": [619, 787, 1579, 1703], "9482": [619, 1249], "0310": 619, "4999": 619, "5316": 619, "1520": 619, "7472": 619, "5617": 619, "8649": 619, "4724": [619, 2055], "0334": 619, "2976": 619, "8499": 619, "2109": 619, "9913": 619, "9607": 619, "6123": 619, "1064483442": 619, "1124191867": 619, "1069546515": 619, "1089989247": 619, "1105482831": 619, "1061112040": 619, "1057999968": 619, "1084397505": 619, "1071760287": 619, "1123489973": 619, "1097310419": 619, "1084649136": 619, "1101533110": 619, "1073668768": 619, "1082790149": 619, "1088634448": 619, "1000000000": 619, "0047": 619, "0310j": 619, "5316j": 619, "7472j": 619, "8649j": 619, "0334j": 619, "8499j": 619, "9913j": 619, "6123j": 619, "202": 619, "154": [619, 2061], "59": [619, 1953, 1955], "182": 619, "243": [619, 1269, 1927], "253": 619, "188": 619, "185": [619, 2111], "252": [619, 2061], "191": 619, "63": [619, 2061, 2070], "240": 619, "227": 619, "165": 619, "190": 619, "146": 619, "106": 619, "205": 619, "112": [619, 2111], "206": 619, "189": 619, "95": [619, 1804, 1807, 1808], "147": 619, "43": 619, "246": [619, 2111], "87": 619, "235": 619, "226": 619, "254": [619, 2061], "111": [619, 1778, 2065], "117": 619, "177": 619, "28": [619, 748, 1091, 1949, 2063, 2065, 2099, 2100, 2105], "xlogi": [624, 2014, 2066, 2081], "inductor": [682, 975, 977, 2022, 2066, 2070, 2092, 2093, 2096, 2097, 2098, 2099, 2100, 2102, 2104, 2105, 2108, 2109, 2111, 2113], "dist_c10d": 682, "dist_ddp": [682, 2047], "dist_fsdp": 682, "aot_graph": [682, 2022], "aot_joint_graph": [682, 2022], "ddp_graph": [682, 2022, 2047], "graph_cod": [682, 2022, 2099], "graph_break": [682, 2022, 2096, 2099, 2102, 2103, 2109], "graph_siz": [682, 2099], "recompiles_verbos": 682, "trace_sourc": 682, "trace_cal": 682, "trace_bytecod": 682, "output_cod": [682, 2022, 2102, 2104, 2111], "kernel_cod": 682, "perf_hint": [682, 975], "post_grad_graph": 682, "cudagraph": [682, 975, 1042, 2045, 2092, 2102, 2104, 2105, 2108], "sym_nod": 682, "compiled_autograd_verbos": 682, "toggl": [682, 2045], "suppress": [682, 2016, 2074], "silenc": 682, "lowest": [682, 965, 1416, 1741, 1742, 1750, 1751, 1837, 1838, 2048, 2087], "notset": 682, "torchinductor": [682, 2022, 2092, 2093, 2098, 2100, 2102, 2104], "ddpoptim": [682, 2022], "symnod": [682, 2098], "opter": 682, "unregist": [682, 2022, 2029, 2065], "cosin": [685, 686, 994, 995, 1459, 1460, 1614, 1800, 1801, 1808, 1885, 2067, 2102], "3348": 685, "5889": 685, "2005": [685, 1972, 2111], "1584": 685, "2294": [685, 1372], "2004": 685, "3690": 685, "7298": [685, 1845], "hyperbol": [686, 885, 888, 995, 1567, 1894, 1941], "uniform_": [686, 888, 945, 2012, 2014, 2033, 2040, 2048, 2081, 2089], "3192": 686, "9915": 686, "9674": 686, "7151": 686, "7791": 686, "3120": [686, 992], "2979": 686, "1341": 686, "_i": [687, 688, 689, 690, 691, 943, 945, 948, 951, 955, 970, 1103, 1153, 1296, 1298, 1411, 1575, 1819, 1823, 1839, 1842, 1877, 1924, 1979, 2081], "0202": 687, "0985": 687, "3506": [687, 1362], "6056": 687, "3944": 687, "9732": 687, "3497": 687, "6245": [687, 1304], "4022": [687, 1927], "3743": 687, "7724": 687, "5811": 687, "8017": 687, "7695": 687, "3930": 687, "3672": [687, 1007, 1305], "1450": [687, 1908], "6971": 687, "0736": [687, 2055], "0994": 687, "3216": 687, "7845": 687, "1610": 687, "1868": 687, "4090": 687, "9902": [687, 1007, 1305], "3667": [687, 992], "3925": 687, "6147": 687, "sum_": [688, 1269, 1324, 1341, 1435, 1436, 1437, 1453, 1454, 1455, 1461, 1493, 1494, 1495, 1514, 1529, 1533, 1535, 1921, 1922, 1923, 1944, 1949, 1971, 1972, 1973, 2081], "mathbin": [688, 691, 692, 943, 955, 1905], "doubletensor": [688, 689, 690, 691, 692, 943, 1868, 2083, 2086], "tensorfloat32": [688, 691, 943, 955, 1367, 1377, 1453, 1454, 1455, 1456, 1457, 1458, 1513, 1607, 1608, 1609, 1610, 1611, 1612, 1649, 1870, 2045, 2058], "6311": 688, "0503": 688, "9768": [688, 2055], "0362": 688, "1653": 688, "8185": 688, "4255": [688, 1411], "6760": 688, "9453": 688, "5743": 688, "8202": 688, "3691": 688, "0943": 688, "1109": [688, 1479, 1890], "4730": [688, 1944], "histor": [689, 1064, 1465, 2044, 2055, 2060], "2312": [689, 1908], "6496": 689, "1312": 689, "0428": 689, "4292": 689, "1030": 689, "5369": 689, "9829": 689, "0430": 689, "8635": 690, "6391": 690, "6174": 690, "7617": 690, "5879": 690, "7388": 690, "8353": 690, "6249": 690, "6511": 690, "8716": 691, "4671": 691, "3746": 691, "7573": 691, "9555": 691, "8681": 691, "3768": 692, "5565": 692, "otim": [693, 1293, 1476, 1631], "conj": [694, 991, 1124, 1125, 1127, 1129, 1136, 1138, 1142, 1143, 1145, 1302, 1303, 1309, 1311, 1330, 1850, 1851, 2014, 2048, 2052, 2066, 2086, 2106], "mh": [694, 966, 967, 968, 1309, 1927, 2014, 2066, 2084, 2086], "lvert": [696, 1261, 1575, 1669, 2087], "rvert": [696, 1261, 2087], "leq": [696, 944, 945, 992, 1126, 1128, 1261, 1293, 1297, 1312, 1318, 1445, 1467, 1529, 1531, 1533, 1616, 1668, 1671, 1880, 1923, 2040, 2081], "elementwis": [696, 992, 1023, 1050, 1051, 1151, 1153, 1426, 1488, 1489, 1490, 1693, 1970, 2016, 2035, 2052, 2081], "07": [696, 821, 822, 823, 824, 827, 966, 1111, 1310, 1313, 1318, 1330, 1331, 1337, 1360, 1363, 1730, 1779, 1786, 1884, 1889, 1927, 2065], "09": [696, 1786, 1798, 2020, 2087], "8177": 697, "4878": 697, "2491": 697, "9130": 697, "7158": 697, "1775": 697, "0992": 697, "4817": 697, "0053": 697, "0164": 697, "3738": 697, "0507": [697, 2065], "9700": 697, "1106": 697, "0318": 697, "0816": [697, 1362], "6451": 698, "4866": [698, 2093], "2987": 698, "3312": 698, "5744": 698, "2980": [698, 2035], "8397": 698, "2713": 698, "9128": 698, "9214": 698, "7268": 698, "2995": 698, "9023": [698, 1303], "4853": 698, "9075": 698, "6165": 698, "180": [700, 1092, 1834], "14159": [700, 1942], "135": 700, "45": [700, 1441, 1442, 1489, 1490, 1566, 1764, 2065], "ao": [702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 2070, 2071, 2072], "batch_norm": [702, 703, 1282, 2014, 2066], "fuse": [702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 725, 726, 727, 728, 729, 730, 731, 794, 795, 804, 806, 807, 808, 816, 850, 862, 863, 975, 1286, 1287, 1684, 1723, 1724, 1725, 1726, 1727, 1728, 1782, 1783, 1784, 1796, 1870, 2026, 2033, 2036, 2065, 2067, 2070, 2071, 2072, 2073, 2102, 2104, 2110], "bn": [704, 705, 706, 707, 708, 709, 816, 863, 1108, 1282, 1725, 1727, 2060, 2067, 2070, 2071], "qat": [714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 732, 733, 734, 735, 794, 795, 849, 2070, 2071, 2072], "dilat": [714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 774, 775, 776, 784, 785, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1502, 1503, 1504, 1505, 1506, 1507, 1519, 1520, 1521, 1578, 1607, 1608, 1609, 1610, 1611, 1612, 1626, 1657, 1658, 1659, 1702, 1831, 1832, 2014, 2065, 2106], "padding_mod": [714, 715, 716, 717, 718, 719, 720, 721, 727, 728, 729, 732, 733, 740, 741, 742, 743, 744, 745, 774, 775, 776, 1453, 1454, 1455, 1456, 1457, 1458, 1502, 1503, 1504, 1505, 1506, 1507, 1632, 2014, 2106], "momentum": [714, 715, 716, 717, 718, 719, 725, 726, 738, 739, 753, 754, 755, 1440, 1441, 1442, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1566, 1602, 1642, 1787, 1794, 1796, 1802, 1808, 1810, 2014, 2055, 2057, 2067, 2106], "freeze_bn": [714, 715, 716, 717, 718, 719], "qconfig": [714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 735, 790, 791, 793, 796, 797, 798, 799, 840, 841, 842, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 861, 862, 864, 865, 2091], "batchnorm1d": [714, 717, 1499, 1566, 1602, 2060, 2072], "fakequant": [714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 735, 804, 865], "weight_fake_qu": [714, 715, 716, 717, 718, 719, 720, 721, 732, 733], "quant": [714, 715, 716, 717, 718, 719, 720, 721, 722, 732, 733, 734, 758, 792, 794, 795, 799, 840, 864, 865, 1122, 1123, 2070, 2071], "batchnorm3d": [716, 719, 721, 726, 1501, 1566, 1602, 2072], "num_featur": [725, 726, 738, 739, 753, 754, 755, 1272, 1440, 1441, 1442, 1488, 1489, 1490, 1499, 1500, 1501, 1508, 1509, 1510, 1526, 1566, 1634, 2055], "qint8": [730, 731, 758, 763, 766, 767, 774, 775, 776, 783, 794, 796, 801, 822, 844, 861, 862, 864, 865, 1777, 1828, 1829, 2070, 2072, 2073, 2082, 2086, 2087], "from_float": [734, 740, 741, 742, 747, 748, 758, 766, 799, 819, 825, 828, 840, 2070], "use_precomputed_fake_qu": [734, 740, 741, 742, 747, 748, 758, 766, 799, 867], "qparams_dict": [734, 740, 741, 742, 766], "hidden_s": [736, 762, 763, 767, 1477, 1478, 1496, 1497, 1542, 1543, 1544, 2014], "num_lay": [736, 762, 1477, 1496, 1542, 1543, 1571, 1573, 2014, 2055], "batch_first": [736, 737, 762, 1477, 1496, 1532, 1542, 1543, 1570, 1572, 1574, 1758, 1760, 1761, 1763, 2014, 2050], "bidirect": [736, 762, 1477, 1496, 1542, 1543, 2014], "_lstmlayer": 736, "nnqa": 736, "h0": [736, 762, 764, 1477, 1496, 1542], "c0": [736, 764, 1496, 2111], "hn": [736, 762, 764, 1477, 1478, 1496, 1542], "cn": [736, 764, 1272, 1496, 1839], "weight_ih": [736, 1478, 1497, 1542, 1544], "weight_hh": [736, 1478, 1497, 1542, 1544], "embed_dim": [737, 1532], "num_head": [737, 1532, 1586, 2014], "add_bias_kv": [737, 1532], "add_zero_attn": [737, 1532, 2014], "kdim": [737, 1532], "vdim": [737, 1532], "dequant": [737, 790, 792, 794, 795, 801, 803, 864, 2014, 2066, 2071, 2073, 2087, 2090], "mha": [737, 1532, 2012], "conver": 737, "key_padding_mask": [737, 1532, 2014], "need_weight": [737, 1532, 2014], "attn_mask": [737, 1532, 1570, 1684, 2014], "average_attn_weight": [737, 1532, 2014], "is_caus": [737, 1532, 1573, 1574, 1589, 1684, 2014], "attn_output_weight": [737, 1532], "unmask": [737, 1570], "attn_weight": [737, 1532, 1684], "head": [737, 1430, 1532, 1570, 1572, 1574, 2046], "attn_output": [737, 1532], "quint8": [740, 741, 742, 743, 744, 745, 747, 748, 758, 774, 775, 776, 783, 794, 796, 801, 821, 822, 823, 824, 827, 829, 862, 864, 1777, 1828, 1829, 1830, 1831, 1832, 2070, 2072, 2073, 2082, 2086, 2087], "learnabl": [740, 741, 742, 743, 744, 745, 747, 748, 758, 762, 766, 1440, 1441, 1442, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1468, 1469, 1477, 1478, 1480, 1488, 1489, 1490, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1534, 1541, 1542, 1544, 1566, 1624, 1677, 1715, 2055, 2067], "q_input": [740, 741, 742, 743, 744, 745, 774, 775, 776], "quantize_per_tensor": [740, 741, 742, 743, 744, 745, 758, 759, 760, 774, 775, 776, 1830, 1831, 1832, 2014, 2066, 2070], "unequ": [741, 742, 743, 744, 745, 1454, 1455, 1457, 1458], "50": [741, 743, 744, 745, 774, 826, 1328, 1436, 1437, 1445, 1453, 1454, 1455, 1457, 1458, 1473, 1474, 1493, 1494, 1495, 1519, 1520, 1521, 1577, 1609, 1610, 1612, 1616, 1627, 1628, 1795, 1939, 2035, 2045, 2085, 2102], "56": [742, 1962, 2080, 2105], "output_pad": [743, 744, 745, 1456, 1457, 1458, 1505, 1506, 1507, 1610, 1611, 1612, 2014, 2106], "qnnpack": [743, 744, 758, 766, 858, 859, 862, 864, 865, 2070, 2072], "convtranspose2d": [743, 1506, 1611, 1723, 1964, 2072], "nnq": [743, 744, 745, 790, 791, 792, 867, 2070], "downsampl": [743, 744, 745, 1457, 1579, 1597, 1632, 1643], "upsampl": [743, 744, 745, 781, 788, 789, 821, 1457, 1580, 1581, 1597, 1632, 1643, 1704, 1705], "fbgemm": [744, 745, 758, 766, 835, 858, 859, 862, 864, 865, 2020, 2070, 2071, 2102, 2103], "cubic": [745, 1474, 1628, 1632], "num_embed": [747, 748, 1468, 1469, 1624], "embedding_dim": [747, 748, 1468, 1469, 1498, 1623, 1624], "padding_idx": [747, 1468, 1469, 1623, 1624, 2014, 2106], "scale_grad_by_freq": [747, 748, 1468, 1469, 1623, 1624, 2014, 2106], "_weight": [747, 748, 1468, 1469, 1787, 1793, 2110], "overwritten": [747, 748, 758, 766, 799, 844, 864, 2042, 2048, 2087, 2096], "_embed": [747, 748], "_dim": [747, 748, 1468], "include_last_offset": [748, 1469, 1624, 2014, 2106], "embedding_bag": [748, 2014, 2066], "floatfunct": [749, 2070], "activation_post_process": [749, 801, 2070], "add_relu": [749, 750, 759, 2026, 2066, 2090], "add_scalar": [749, 750, 759, 2066, 2085, 2090], "mul_scalar": [749, 750, 759, 2066, 2090], "collector": 750, "f_add": 750, "num_channel": [751, 1480, 2034], "normalized_shap": [756, 1498, 1541, 1646, 1681, 1715, 2014, 2035, 2106], "elementwise_affin": [756, 1498, 1541, 1715], "negative_slop": [757, 782, 1512, 1647, 1648, 2014, 2040, 2106], "slope": [757, 782, 1512, 1558, 2040], "bias_": [758, 766], "_featur": [758, 766, 783, 1430, 1443, 1511, 1513, 1603, 1649], "precomput": [758, 799, 2098], "from_refer": [758, 766], "ref_qlinear": [758, 766], "output_scal": [758, 761, 1830, 2014], "output_zero_point": [758, 761, 1830, 2014], "q_add": 759, "qint32": [759, 760, 1777, 1828, 1829, 2070, 2073, 2082, 2086, 2087], "x_0": [760, 1949], "gate": [762, 763, 1476, 1477, 1478, 1496, 1556, 1631, 1687], "r_t": [762, 1477, 1793], "w_": [762, 1428, 1429, 1432, 1433, 1436, 1437, 1439, 1447, 1448, 1449, 1450, 1451, 1452, 1454, 1455, 1457, 1458, 1461, 1473, 1474, 1477, 1478, 1494, 1495, 1496, 1497, 1520, 1521, 1523, 1524, 1533, 1538, 1539, 1542, 1544, 1548, 1549, 1550, 1551, 1552, 1553, 1579, 1580, 1581, 1582, 1583, 1584, 1628, 1632], "x_t": [762, 898, 902, 903, 908, 909, 1440, 1441, 1442, 1477, 1488, 1489, 1490, 1496, 1542, 1566, 1781], "b_": [762, 1477, 1478, 1496, 1497, 1514, 1542, 1544, 1944, 2080], "hr": [762, 1477, 1478, 1496, 2052], "h_": [762, 1428, 1429, 1432, 1433, 1436, 1437, 1443, 1448, 1449, 1451, 1452, 1454, 1455, 1457, 1458, 1473, 1474, 1477, 1478, 1494, 1495, 1496, 1513, 1520, 1521, 1522, 1523, 1524, 1538, 1539, 1542, 1544, 1549, 1550, 1552, 1553, 1579, 1580, 1581, 1583, 1584, 1603, 1628, 1632], "z_t": [762, 1477], "iz": [762, 1477, 1478], "hz": [762, 1019, 1477, 1478, 2085], "n_t": [762, 1477], "odot": [762, 1477, 1478, 1496, 1497], "h_t": [762, 1477, 1496, 1542], "hadamard": [762, 1477, 1478, 1496, 1497], "multilay": [762, 1477, 1496, 2063], "_t": [762, 1477, 1496, 1794, 1796, 2048, 2067], "b_ih": [762, 1477, 1478, 1496, 1497, 1542, 1544, 2014], "b_hh": [762, 1477, 1478, 1496, 1497, 1542, 1544, 2014], "h_0": [762, 1477, 1496, 1497, 1542], "seq_len": [762, 1477, 1496, 1532, 1542], "pack_padded_sequ": [762, 1477, 1496, 1542, 1757, 1759, 1760, 2050], "num_direct": [762, 1477, 1496, 1542], "h_n": [762, 1477, 1496, 1542], "_size": [762, 1427, 1428, 1429, 1431, 1432, 1433, 1435, 1436, 1437, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1473, 1474, 1477, 1478, 1493, 1494, 1495, 1496, 1497, 1519, 1520, 1521, 1522, 1523, 1524, 1542, 1544, 1578, 1628], "_layer": [762, 1477, 1496, 1542], "_direct": 762, "output1": [762, 1430, 2041, 2065], "output2": [762, 1430], "weight_ih_l": [762, 1477, 1496, 1542], "w_ir": [762, 1477], "w_iz": [762, 1477], "w_in": [762, 1477], "weight_hh_l": [762, 1477, 1496, 1542], "w_hr": [762, 1477], "w_hz": [762, 1477], "w_hn": [762, 1477], "bias_ih_l": [762, 1477, 1496, 1542], "b_ir": [762, 1477], "b_iz": [762, 1477], "b_in": [762, 1477], "bias_hh_l": [762, 1477, 1496, 1542], "b_hr": [762, 1477], "b_hz": [762, 1477], "b_hn": [762, 1477], "mathcal": [762, 1443, 1453, 1454, 1455, 1456, 1457, 1458, 1468, 1469, 1477, 1478, 1496, 1497, 1511, 1513, 1542, 1544, 1545, 1566, 1839, 2040, 2052], "subtli": [762, 1477, 1796], "gru": [763, 1478, 1543, 2014, 2045, 2066, 2070, 2072], "cell": [763, 765, 767, 1477, 1478, 1496, 1497, 1542, 1544], "hx": [763, 765, 767, 1478, 1497, 1544, 2014], "cx": [765, 1497, 2014], "nonlinear": [767, 1438, 1459, 1485, 1542, 1544, 1554, 2012, 2040, 2080], "elman": [767, 1542, 1544], "adaptiveavgpool2d": [768, 1592, 1964, 2072], "adaptiveavgpool3d": [769, 1593, 1964, 2072], "ceil_mod": [770, 771, 784, 785, 1435, 1436, 1437, 1493, 1494, 1495, 1519, 1520, 1521, 1599, 1600, 1601, 1653, 1654, 1655, 1657, 1658, 1659, 1831, 1832, 2014, 2106], "count_include_pad": [770, 771, 1435, 1436, 1437, 1599, 1600, 1601, 2014, 2106], "divisor_overrid": [770, 771, 1436, 1437, 1600, 1601, 2014, 2106], "kh": [770, 771, 775, 776, 1436, 1437, 1473, 1474, 1520, 1521, 1600, 1601, 1608, 1609, 1611, 1612, 1627, 1628, 1658, 1659], "kw": [770, 771, 775, 776, 1436, 1437, 1473, 1474, 1520, 1521, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1627, 1628, 1657, 1658, 1659], "sh": [770, 771, 775, 776, 1600, 1601, 1608, 1609, 1611, 1612, 1658, 1659, 2111], "sw": [770, 771, 774, 775, 776, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1657, 1658, 1659], "avgpool2d": [770, 1600, 2072], "_channel": [770, 771, 774, 775, 776, 1453, 1454, 1455, 1456, 1457, 1458, 1480, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1657, 1658, 1659, 2075], "ih": [770, 771, 775, 776, 1542, 1544, 1600, 1601, 1608, 1609, 1611, 1612, 1658, 1659], "iw": [770, 771, 774, 775, 776, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612, 1657, 1658, 1659], "padh": [770, 771, 775, 776, 1600, 1601, 1608, 1609, 1611, 1612], "padw": [770, 771, 774, 775, 776, 1599, 1600, 1601, 1607, 1608, 1609, 1610, 1611, 1612], "kd": [771, 776, 1437, 1521], "padd": [771, 776], "formul": [772, 1444, 1467, 1481, 1516, 1563, 1564, 1651, 1669, 1684, 2052, 2080], "min_": [773, 1308, 1309, 1318, 1336, 1927], "max_": [773, 1519, 1520, 1521, 1731, 1765], "convolv": [774, 775, 776, 1453, 1454, 1455, 1456, 1457, 1458, 1502, 1503, 1504, 1505, 1506, 1507, 1607, 1608, 1609, 1610, 1611, 1612], "dw": [774, 775, 776, 1607, 1608, 1609, 1610, 1611, 1612], "qf": [774, 775, 776], "dtype_input": [774, 775, 776], "dtype_filt": [774, 775, 776], "q_filter": [774, 775, 776], "dh": [775, 776, 1608, 1609, 1611, 1612], "dd": 776, "scale_factor": [781, 787, 788, 789, 1579, 1580, 1581, 1643, 1684, 1703, 1704, 1705, 2014, 2106], "align_corn": [781, 787, 788, 1579, 1580, 1597, 1632, 1643, 1703, 1704, 2014, 2106], "height": [781, 787, 1436, 1437, 1454, 1455, 1457, 1458, 1494, 1495, 1520, 1521, 1533, 1561, 1579, 1643, 1703, 2034], "spatial": [781, 787, 788, 789, 963, 1128, 1441, 1472, 1498, 1538, 1539, 1561, 1578, 1579, 1580, 1581, 1597, 1605, 1632, 1643, 1673, 1674, 1675, 1703, 1704, 1705], "pixel": [781, 787, 789, 1454, 1461, 1464, 1465, 1466, 1470, 1533, 1538, 1539, 1579, 1597, 1632, 1643, 1703, 1705], "corner": [781, 787, 954, 1579, 1587, 1588, 1589, 1597, 1632, 1643, 1703, 2063], "leakyrelu": [782, 1527, 1647, 2055, 2072], "_slope": [782, 1512, 1647, 2040], "xa": [783, 1322, 1333, 1334, 1335, 1513, 1649], "return_indic": [784, 785, 1431, 1432, 1433, 1473, 1474, 1519, 1520, 1521, 1522, 1523, 1524, 1594, 1595, 1596, 1627, 1628, 1657, 1658, 1659, 2014], "maxpool1d": [784, 1522, 1657, 1660, 2072], "linearli": [787, 1173, 1331, 1579, 1703, 1805, 1826, 2050, 2067, 2073], "neighbour": [789, 1514, 1643, 1705], "stub": [790, 791, 2068], "calibr": [790, 791, 799, 840, 841, 860, 862, 864, 2041, 2070, 2071, 2073, 2091], "quantstub": [792, 2070], "dequantstub": [792, 2070], "quantwrapp": 793, "backend_config": [794, 795, 796, 797, 798, 819, 820, 862, 863, 864, 865, 2091], "backendpatternconfig": [794, 864], "blob": [794, 1166, 2052, 2054, 2085, 2110], "dtypeconfig": [794, 795, 797, 864], "observationtyp": [794, 795, 864, 2072], "weighted_int8_dtype_config": [794, 864], "input_dtyp": [794, 796, 864, 2072], "output_dtyp": [794, 796, 864, 2072, 2106], "weight_dtyp": [794, 796, 864, 2072], "bias_dtyp": [794, 796, 2072], "fuse_conv2d_relu": 794, "is_qat": [794, 795], "convrelu2d": [794, 2072], "linear_config": 794, "set_observation_typ": [794, 795, 864], "output_use_different_observer_as_input": [794, 795, 798, 864, 2072], "add_dtype_config": [794, 795, 864], "set_root_modul": [794, 795], "set_qat_modul": [794, 795], "set_reference_quantized_modul": [794, 795], "conv_relu_config": 794, "set_fused_modul": [794, 795], "set_fuser_method": [794, 795], "fused_conv_relu_config": 794, "my_backend": [794, 2097], "set_backend_pattern_config": [794, 864], "from_dict": [794, 795, 796, 817, 818, 819, 857], "backend_config_dict": [794, 2070], "set_nam": 794, "to_dict": [794, 795, 796, 817, 818, 819, 857], "backendconfig": [795, 820, 862, 864, 2073], "dtype_config": [795, 2072], "backend_pattern_config_dict": 795, "observation_typ": [795, 2072], "qat_modul": [795, 2072], "reference_quantized_modul": 795, "fused_modul": [795, 2072], "fuser_method": [795, 816, 2072], "pattern_complex_format": 795, "set_dtype_config": 795, "fuser": [795, 1286, 2013], "fuse_linear_relu": 795, "linearrelu": [795, 2072], "8bea7180a8ba3c279f2c9b050f2a69a6": 795, "understood": [795, 962], "output_share_observer_with_input": [795, 798, 2072], "renam": [795, 1965, 1967, 2014, 2020, 2033, 2034, 2066, 2105], "quantdequantstub": 795, "set_pattern": 795, "is_dynam": [796, 801, 821, 822, 823, 824, 826, 827, 828, 2072], "quant1": 796, "dequant1": 796, "fp32_linear": 796, "quant2": 796, "dequant2": 796, "bracket": [796, 2076], "dtype_config1": 796, "dtype_config2": 796, "dtypewithconstraint": [796, 2072], "quant_min_lower_bound": [796, 797, 2072], "quant_max_upper_bound": [796, 797, 2072], "255": [796, 804, 1122, 1123, 1162, 1632, 1643, 1703, 2070, 2072, 2085], "input_dtype_with_constraint": 796, "scale_min_lower_bound": [796, 797, 2072], "scale_max_upper_bound": [796, 797, 2072], "dtype_config_dict": 796, "bias_typ": [796, 864], "scale_exact_match": [797, 2072], "zero_point_exact_match": [797, 2072], "quant_min": [797, 801, 804, 821, 822, 823, 824, 827, 828, 1122, 1123, 2014, 2070], "quant_max": [797, 801, 804, 821, 822, 823, 824, 827, 828, 1122, 1123, 2014, 2070], "fixedqparamsobserv": 797, "fixedqparamsfakequant": 797, "input_output_not_observ": [798, 2072], "remove_qconfig": 799, "is_refer": 799, "convert_custom_config_dict": [799, 817, 2070], "from_observ": [799, 817, 2070], "observed_to_quantized_custom_module_class": [799, 817, 2070], "observedcustommodul": [799, 817, 819, 840, 2070], "quantizedcustommodul": [799, 817], "calib_data": 800, "fake_quant": [801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 865, 1122, 1123, 2091], "movingaverageminmaxobserv": [801, 804, 824], "observer_kwarg": [801, 804], "x_out": [801, 804], "quanti": 801, "choose_qparam": 801, "dq": 801, "fake_quant_en": 801, "observer_en": 801, "calculate_qparam": [802, 821, 822, 826], "extra_repr": [803, 1272, 1526, 1541, 1715, 2048], "qscheme": [804, 821, 822, 823, 824, 827, 828, 829, 2014, 2066, 2070, 2073, 2087], "fake_qu": [805, 809, 810, 811, 2070], "default_fake_qu": 806, "default_per_channel_weight_fake_qu": 807, "default_weight_fake_qu": 808, "histogram": [809, 821, 833, 1233, 1235, 2014, 2066, 2085], "memoryless": [810, 811], "averaging_const": [810, 811, 823, 824, 2014], "modules_to_fus": 816, "fuser_func": 816, "fuse_known_modul": 816, "fuse_custom_config_dict": [816, 818], "convmodul": 816, "bnmodul": 816, "convbnmodul": 816, "additional_fuser_method_map": 816, "fuse_conv_bn": [816, 2072], "conv1": [816, 1284, 1526, 1555, 2013, 2063, 2070, 2085], "bn1": 816, "relu1": [816, 1555, 1706], "fused_m": 816, "custom_config": [817, 818, 819, 820], "convert_fx": [817, 2070, 2091], "convert_custom_config": [817, 862, 2070], "set_observed_to_quantized_map": 817, "set_preserved_attribut": [817, 818, 819], "attr1": [817, 818, 819, 2065], "attr2": [817, 818, 819, 2065], "floatcustommodul": [817, 819], "weight_onli": [817, 862, 2025, 2070], "preserved_attribut": [817, 818, 819], "observed_class": [817, 819], "quantized_class": 817, "quant_typ": [817, 819], "quanttyp": [817, 819], "fuse_fx": [818, 2070], "fuse_custom_config": [818, 863], "convertcustomconfig": [818, 862], "prepare_fx": [819, 842, 862, 865, 2070, 2091], "prepare_qat_fx": [819, 862, 2070], "prepare_custom_config": [819, 820, 864, 865, 2070], "set_standalone_module_nam": 819, "module1": [819, 857, 2045], "qconfig_map": [819, 820, 857, 858, 859, 862, 864, 865, 2070], "child_prepare_custom_config": 819, "set_standalone_module_class": 819, "mystandalonemodul": 819, "set_float_to_observed_map": 819, "set_non_traceable_module_nam": 819, "module2": [819, 857, 2045], "module3": [819, 2045], "set_non_traceable_module_class": 819, "nontraceablemodule1": 819, "nontraceablemodule2": 819, "set_input_quantized_index": 819, "set_output_quantized_index": 819, "prepare_custom_config_dict": [819, 840, 842, 2070], "standalone_module_nam": 819, "standalone_module_class": 819, "module_class": 819, "float_to_observed_custom_module_class": [819, 840, 2070], "non_traceable_module_nam": 819, "non_traceable_module_class": 819, "input_quantized_idx": 819, "output_quantized_idx": 819, "float_class": 819, "qconfigmap": [820, 858, 859, 862, 864, 2070, 2073], "preparecustomconfig": [820, 864], "2048": [821, 1570, 1572, 1574, 2045], "upsample_r": 821, "per_tensor_affin": [821, 822, 823, 1829, 1830, 1831, 1832, 2070, 2073], "reduce_rang": [821, 822, 823, 824, 827, 829, 2014, 2070, 2071], "factory_kwarg": [821, 822, 827], "1920928955078125e": [821, 822, 823, 824, 827], "finfo": [821, 822, 823, 824, 827, 1327, 1330, 1426, 1541, 1715, 2012, 2087], "minmaxobserv": [821, 823, 827, 844, 864, 865, 2073], "x_": [822, 823, 997, 1118, 1346, 1347, 1349, 1352, 1360, 1439, 1461, 1516, 1533, 1560, 1562, 1690, 1904, 1906, 1949, 2042, 2073, 2081], "q_": [822, 2073], "x_orig": 822, "reset_min_max_v": [822, 827], "ch_axi": [824, 827, 2014], "per_channel_affin": [824, 827, 1828, 2070, 2073], "custom_op_nam": [825, 828], "with_arg": [826, 844, 864, 865], "_callable_arg": 826, "_with_arg": 826, "foo_build": 826, "foo_instance1": 826, "foo_instance2": 826, "with_callable_arg": 826, "_with_callable_arg": 826, "cur_tim": 826, "get_time_func": 826, "dan": 826, "creation_tim": 826, "compute_dtyp": 828, "ptq": [833, 2070, 2071, 2073], "obs_dict": 839, "get_observer_state_dict": 839, "allow_list": [840, 2090], "observer_non_leaf_module_list": 840, "preemptiv": [840, 841, 1716], "propagate_qconfig_": 842, "qconfig_dict": [842, 857], "pt2e": 843, "export_util": 843, "my_qconfig": 844, "default_observ": 844, "default_qat_config": 850, "set_glob": [857, 862, 864, 2070], "set_object_typ": [857, 862, 864], "set_module_name_regex": 857, "regex": 857, "set_module_nam": [857, 862, 864], "set_module_name_object_type_ord": 857, "global_qconfig": 857, "qconfig1": 857, "qconfig2": 857, "qconfig3": 857, "object_typ": 857, "module_name_regex": 857, "module_name_object_type_ord": 857, "conv0": 857, "x86": [858, 859, 1871, 2070, 2072], "run_arg": [860, 866], "qconfig_spec": 861, "quantize_fx": [862, 863, 864, 865, 2070, 2091], "_remove_qconfig": 862, "qconfig_from_prepar": 862, "prepared_model": [862, 864, 865], "xnnpack": [862, 2026, 2070], "get_default_backend_config": [862, 864, 865], "quantized_model": 862, "fusion_pattern": 863, "fusecustomconfig": 863, "_equalization_config": 864, "get_default_qconfig_map": [864, 2070], "float_model": [864, 865, 2070, 2090], "data_load": [864, 865, 1802, 1808, 2057, 2070], "get_default_qconfig": [864, 865, 2070], "linear_pattern_config": 864, "suer": 864, "sample_inference_data": [864, 2070], "get_default_qat_qconfig_map": [865, 2070], "load_weight": 865, "train_data": 865, "get_default_qat_qconfig": [865, 2070], "custom_module_class_map": 867, "lceil": [868, 964], "rceil": [868, 964], "adjac": [868, 954, 1464, 1465, 1466, 1470, 1842, 2080, 2109], "set_default_dtyp": [868, 944, 953, 1109, 1111, 1121, 1126, 1144, 1160, 1163, 1219, 1230, 1231, 1292, 1775, 1835, 1839, 1842, 1868, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2009], "get_default_dtyp": [868, 1343, 1359, 1777, 1842, 2082, 2083, 2116], "set_default_devic": [868, 944, 953, 1109, 1111, 1121, 1126, 1144, 1160, 1163, 1230, 1231, 1292, 1343, 1359, 1775, 1835, 1837, 1839, 1841, 1842, 1868, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1909, 1910, 1911, 1912, 1913, 1914, 1953, 1955, 2009, 2083], "5000": [868, 883, 889, 890, 891, 946, 970, 1126, 1128, 1129, 1136, 1139, 1144, 1154, 1156, 1157, 1158, 1226, 1232, 1235, 1298, 1324, 1343, 1417, 1419, 1450, 1451, 1469, 1579, 1827, 1831, 1832, 1833, 1842, 1846, 2081, 2086], "maxim": [877, 1370, 1459, 1522, 1523, 1524, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 2080, 2102, 2111], "3398": 877, "2663": [877, 2045], "2686": 877, "2450": 877, "7401": 877, "8805": 877, "3402": 877, "1936": 877, "4907": [877, 1360], "3948": [877, 992], "0691": 877, "3132": 877, "6092": 877, "5419": 877, "2993": [877, 1926], "3195": 877, "1139": 878, "2254": 878, "1381": [878, 1926], "3687": 878, "1975": [878, 2035], "0102": 878, "4732": 878, "9240": 878, "1207": [878, 1411], "7506": 878, "0213": 878, "7809": 878, "2960": 878, "9384": 878, "1438": 878, "ascend": [879, 1128, 1309, 1311, 1340, 1395, 1862, 1899, 1960], "0785": 879, "5267": 879, "8521": 879, "4065": 879, "1598": 879, "0788": 879, "0745": 879, "2700": 879, "2208": 879, "0722": 879, "7064": 879, "2564": 879, "0669": 879, "2318": 879, "8229": 879, "9280": 879, "lexicograph": [880, 1770, 2016, 2080], "9039": 881, "6291": 881, "0795": [881, 1827, 2055], "1586": 881, "1939": [881, 2048], "4900": 881, "1909": 881, "7503": 881, "9355": 881, "histori": [882, 883, 1175, 1786, 1942, 2012, 2023, 2035, 2048, 2050, 2076], "dlpack": [883, 1159, 2012], "frombuff": [883, 1866], "data_ptr": [883, 929, 2020, 2033, 2066, 2082, 2084], "addbackward0": [883, 2055, 2060], "__array_interface__": [883, 2102], "5962": 884, "4985": 884, "4396": 884, "4525": [884, 2035], "6387": 884, "4552": 884, "sine": [885, 1882, 1892, 1894], "1606": 885, "4267": 885, "0899": 885, "0250": 885, "1599": 885, "1534": 885, "9435": 885, "8990": [885, 1103], "arctang": [886, 887], "2341": 886, "2539": 886, "6256": 886, "6448": 886, "2299": 886, "2487": 886, "5591": [886, 915], "5727": 886, "quadrant": 887, "9041": [887, 963], "0196": [887, 963], "3108": [887, 963], "4423": [887, 963], "9833": 887, "0811": 887, "9743": 887, "4151": 887, "tangent": [888, 897, 901, 902, 1172, 1173, 1567, 1940, 1941], "9385": 888, "2968": 888, "8591": 888, "1871": 888, "7253": 888, "3060": 888, "2899": 888, "1893": 888, "needs_input_grad": [892, 908, 909, 2048], "setup_context": [893, 903, 904, 905, 906, 907, 908, 909, 2020, 2049], "save_for_forward": [893, 903, 908, 909, 2049], "grad_input": [894, 903, 905, 907, 908, 909, 927, 1272, 1526, 2048, 2055], "underneath": [895, 908, 909], "generate_vmap_rul": [895, 908, 909, 2049], "out_dim": [895, 908, 909, 1177, 1976, 2014, 2034, 2049], "grad_tensor": [896, 917, 2014, 2045], "grad_vari": 896, "forward_ad": [897, 898], "primal": [897, 902, 1168, 1172, 1173, 1176], "unpack_du": [897, 898, 901], "dual": [897, 898, 899, 901, 902, 1769, 2049], "make_du": [898, 902, 903, 908, 909], "your_fn": 898, "grad_aft": 898, "dual_level": [901, 902, 903, 908, 909], "apply_jvp": 903, "mark_dirti": [903, 908, 909, 929, 2048], "x_npy": [903, 904, 908], "once_differenti": [903, 904, 905, 906, 907, 908, 909, 2048], "mark_non_differenti": [903, 908, 909, 2048, 2049], "g1": [903, 905, 907, 908, 909, 2045, 2077], "g2": [903, 905, 907, 908, 909, 2045, 2077], "weren": [903, 906, 908], "grad_out": [903, 906, 908, 909, 2014, 2052, 2106], "gx": [903, 906, 908], "gy": [903, 906, 908], "gz": [903, 906, 908, 2069], "y_t": [903, 908, 909], "fwad": [903, 908, 909], "a_dual": [903, 908, 909], "set_materialize_grad": [903, 908, 909, 2048], "simplefunc": [903, 907, 908, 909], "induc": [903, 907, 908, 909, 1632, 1671, 2040, 2098], "backward_extend": 909, "forward_extend": 909, "outer_jacobian_strategi": 911, "disconnect": [911, 912, 913, 914, 915, 916], "cliff": [911, 913, 917], "_debug_only_display_vmap_fallback_warn": [911, 917], "pow_reduc": [911, 912, 915], "2265": 911, "8221": 911, "9456": [911, 945], "2550": 911, "viewbackward": [911, 913, 2051], "pow_adder_reduc": [911, 912, 915], "func_output": [912, 914, 915, 916], "1448": 912, "0239": 912, "6456": 912, "4988": 912, "4310": 912, "sumbackward0": [912, 915, 2048], "3030": 912, "vhp": 912, "batched_grad": 913, "exp_reduc": [913, 914, 916], "4917": 913, "4352": 913, "4369": 913, "3799": 913, "exp_add": 913, "8052": 913, "3963": 913, "3090": 914, "6742": 914, "9114": 914, "2106": 914, "sumbackward1": [914, 916], "squeezebackward1": 914, "adder": [914, 916], "2399": 914, "5005": 914, "0689": 915, "2431": 915, "0989": 915, "4456": 915, "8053": [915, 1857], "7817": 916, "2458": 916, "7830": 916, "7782": 916, "4458": 916, "3962": 916, "3042": [916, 1330], "6354": 916, "1288": [916, 1885, 1920], "0652": 916, "5483": 916, "5035": 916, "2046": [916, 992, 2048], "1292": 916, "1432": 916, "3059": 916, "3225": 916, "6652": 916, "7753": 916, "0152": 916, "4225": 916, "3340": 916, "only_input": 917, "allow_unus": [917, 2014], "is_grads_batch": 917, "materialize_grad": 917, "require_grad": [917, 2016, 2042], "inferencemod": [918, 2042, 2093], "bump": 918, "_version": [918, 2066], "doubler": [918, 1112, 1769], "is_train": [919, 2089], "gradgradcheck": [921, 922, 2048], "06": [922, 923, 1313, 1318, 1319, 1330, 1331, 1336, 1479, 1535, 1575, 1578, 1629, 1700, 1781, 1795, 1927, 2014, 2087], "raise_except": [922, 923, 2020], "nondet_tol": [922, 923], "check_undefined_grad": [922, 923], "check_grad_dtyp": [922, 923], "check_batched_grad": [922, 923], "check_batched_forward_grad": 922, "check_forward_ad": 922, "check_backward_ad": 922, "fast_mod": [922, 923, 2052], "differenc": [922, 2048], "perturb": [922, 923, 2052], "gen_non_contig_grad_output": 923, "check_fwd_over_rev": 923, "check_rev_over_rev": 923, "noncontigu": [923, 1050, 2087], "inaccuraci": 923, "clonebackward0": 925, "gi": [927, 928, 2055], "removablehandl": [927, 928, 1272, 1526, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 2068], "88446": 931, "profilerstep": 931, "optimizer1step": 931, "optimizer2step": 931, "optimizer1": [931, 2041], "current_step": 931, "erase_step_count": 931, "increment_step": 931, "_kineto_step": 931, "init_step_count": 931, "eventlist": [934, 935], "group_by_stack_n": [935, 2069], "roof": 935, "functioneventavg": [935, 937], "node_id": 938, "77": 938, "47": 938, "470u": 938, "73": 938, "465u": 938, "03": [938, 966, 1350, 1883, 1884, 1889], "121": 938, "891u": 938, "324u": 938, "421u": 938, "503u": 938, "234": [938, 2020], "344u": 938, "000u": 938, "profiler_util": [939, 940, 941, 942], "elapsed_u": 939, "mem_record": 941, "in_interv": 941, "start_u": 941, "end_u": 941, "shallow": [942, 1272, 1462, 1526, 1575, 1576, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "default_factori": [942, 2110], "__missing__": 942, "fromkei": [942, 1536], "keyerror": 942, "popitem": [942, 1536, 2066], "lifo": 942, "setdefault": [942, 1536, 2066], "window_length": [944, 953, 1230, 1231, 1292, 2014], "2n": [944, 1880, 2080], "_length": [944, 953, 1230, 1231, 1269, 1445, 1923], "sim": [945, 1540, 1676, 1819, 1839], "pseudorandom": [945, 1412, 1772, 1816, 1819, 1835, 1837, 1839, 1841, 1928], "1737": 945, "0950": [945, 1905], "3609": 945, "7148": 945, "0289": [945, 1952], "2676": 945, "8937": 945, "7202": 945, "2500": [946, 1126, 1128, 1129, 1139, 1144, 1343, 1579, 1833], "7500": [946, 1129, 1139, 1158, 1226, 1328, 1343, 1579, 1833, 1888], "AND": [947, 1191, 1354, 2016, 2042], "OR": [950, 1208, 1356, 2016, 2035], "xor": [952, 1357, 2016], "blackman": [953, 1890], "arrang": 954, "broadcast_tensor": [956, 2014, 2066], "out_int32": [959, 1862, 2014], "opposit": [959, 1129, 1131, 1570, 1927, 1948], "formal": [959, 1862, 2023, 2034, 2051, 2079], "eg": [959, 1855, 1862, 2110], "from_": [960, 2014], "tensor_a": [961, 974], "tensor_b": 961, "6580": 962, "0969": 962, "4614": 962, "1034": [962, 1108], "5790": 962, "1497": 962, "compute_mod": [963, 2106], "use_mm_for_euclid_dist_if_necessari": 963, "distanc": [963, 1269, 1308, 1309, 1336, 1485, 1535, 1575, 1576, 1673, 1701, 1802, 1923, 1927, 2012], "infti": [963, 1269, 1324, 1438, 1493, 1494, 1495, 1558, 1673, 1793, 2081], "use_mm_for_euclid_dist": 963, "donot_use_mm_for_euclid_dist": 963, "minkowski": [963, 1673], "ham": [963, 1230, 1673, 1886], "closest": [963, 1673], "xn": [963, 1673], "4821": [963, 966], "059": 963, "0590": 963, "1763": [963, 1845], "4713": [963, 1845], "6986": [963, 1845], "3702": [963, 1845], "1193": [963, 1362], "0959": 963, "7138": 963, "8322": 963, "2830": [963, 1954], "3791": 963, "6341": 964, "4208": 964, "0900": 964, "5826": 964, "clr": [965, 1802], "3375": 965, "9790": 965, "1119": 965, "6577": 965, "5609": [965, 1623], "5095": 965, "2614": 965, "4038": 965, "3378": [965, 1954], "4982": 965, "2457": [965, 1375], "2561": 965, "4684": 965, "7163": 965, "9647": 965, "8917": [965, 1353], "3213": [965, 1349], "2284": [965, 1088], "8615": 965, "2816": 965, "tu": 966, "mt": [966, 1302, 1309, 1311, 1315, 1316, 1317, 1322, 1331, 1826, 1927, 2014, 2023, 2066, 2084, 2086], "4112": 966, "7486": 966, "4551": 966, "3544": 966, "6724": 966, "5528": 966, "0592": [966, 2055], "9371": 966, "5487": 966, "7023": 966, "3842e": [966, 1318], "hermitian": [967, 968, 993, 1124, 1125, 1127, 1129, 1130, 1131, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1302, 1303, 1308, 1309, 1311, 1315, 1316, 1317, 1327, 1330, 1336, 2014], "9314": 967, "2251": [967, 1007, 1305, 1347], "0889": 967, "4439": 967, "2122": 967, "1412": 967, "6358e": 967, "lh": [968, 1318], "1625": 968, "6097": 968, "8398": 968, "2387": [968, 993], "3771": [968, 1319], "4173": 968, "1626": [968, 1007, 1305], "6881e": 968, "tensor_split": [969, 1106, 1236, 1977, 2014, 2066, 2084], "min_valu": [970, 1484], "max_valu": [970, 1484, 2014], "_valu": [970, 1430, 1722, 1912, 2048, 2066, 2080], "7120": 970, "1734": [970, 1247], "0478": [970, 1979], "0922": 970, "3333": [970, 1226, 1235, 1579, 1580, 1942], "horizont": [973, 1236, 1238, 2067, 2102], "hstack": [973, 2014, 2066, 2080], "with_replac": [974, 2014], "combinations_with_replac": 974, "fullgraph": [975, 2102, 2111], "cache_size_limit": [975, 2102, 2111], "list_backend": [975, 2092, 2097, 2104], "compiler_custom_backend": 975, "triton": [975, 2049, 2092, 2096, 2100, 2102, 2104, 2109, 2111], "_inductor": [975, 2102, 2111], "list_mode_opt": 975, "epilogue_fus": 975, "max_autotun": 975, "fallback_random": [975, 2102, 2111], "shape_pad": 975, "graph_diagram": 975, "pictur": 975, "list_opt": 975, "_glibcxx_use_cxx11_abi": 976, "black": [977, 2103], "throughout": [977, 1757, 2052, 2055, 2070, 2099], "footgun": [977, 2096, 2101], "bypass": [977, 1108, 2020, 2035, 2045, 2048, 2103, 2109], "bullet": [977, 2016], "rand_foo": 980, "compiler_cudagraph_tre": 980, "external_util": 982, "stricter": [983, 1412, 2103], "is_compil": [983, 2103], "exclude_tag": 984, "flip": [990, 1149, 1150, 1340, 2014, 2066, 2067, 2102, 2106], "writeabl": [990, 991], "is_conj": [990, 1850, 2014, 2066], "geq": [992, 1215, 1304, 1312, 1435, 1436, 1437, 1461, 1512, 1533, 1545, 1615, 1668, 1730, 2040, 2081], "signbit": [992, 2014, 2066, 2080, 2106], "2557": 992, "0026": 992, "5387": 992, "4740": 992, "9244": 992, "7079": 992, "2778": 992, "0249": [992, 1348], "5719": 992, "0059": 992, "2600": 992, "4475": 992, "9567": [992, 1308, 1926], "5757": 992, "1751": 992, "0742": 992, "2998": 992, "1054": 992, "2373": 992, "3190": [992, 2045], "1128": [992, 1330, 1496], "pearson": 993, "coeffici": [993, 1230, 1781, 1783, 1784, 1785, 1787, 1793, 1797, 1885, 1886, 1887, 1951], "r_": [993, 1944], "ij": [993, 1108, 1352, 1360, 1374, 1529, 1904], "c_": [993, 1453, 1454, 1455, 1456, 1457, 1458, 1496, 1538, 1539], "jj": 993, "cov": [993, 2014, 2066], "2678": [993, 1468], "0908": 993, "3766": 993, "2780": 993, "5812": 993, "1535": [993, 1468], "2350": 993, "3582": 993, "4309": 994, "2706": 994, "8562": 994, "9796": [994, 1318], "1395": 994, "2957": 994, "6553": 994, "5574": 994, "1632": 995, "1835": 995, "6979": 995, "7325": [995, 1098], "0133": 995, "7860": 995, "2536": 995, "2805": 995, "sleef": [995, 1894], "y_": [997, 1118, 1346, 1347, 1349, 1439, 1461, 1491, 1904, 1949, 2042, 2081], "_w": [997, 1473], "w_i": [997, 1312], "mu_x": 997, "mu_i": [997, 1787], "w_ia_i": 997, "w_ix_": 997, "mathbb": [997, 1226, 1302, 1304, 1308, 1309, 1310, 1311, 1312, 1313, 1318, 1319, 1322, 1324, 1331, 1333, 1335, 1336, 1461, 1533, 1730], "bessel": [997, 1292, 1889, 1921, 1922, 1971, 1972, 2081], "unbias": [997, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1566, 1921, 1922, 1971, 1972, 2014, 2106], "corrcoef": [997, 2014, 2066], "6667": [997, 1235, 1579, 1580, 1847, 1880, 2060], "fw": 997, "4282": 997, "0255": [997, 1095], "4144": [997, 2045], "4169": 997, "streamcontext": [1005, 1083, 1409, 2007, 2012], "abstractcontextmanag": 1005, "3956": [1007, 1305], "1455": [1007, 1305, 2061], "6895": [1007, 1305], "5849": [1007, 1305], "3599": [1007, 1305], "7180": [1007, 1305], "0521": [1007, 1305], "1339": [1007, 1305], "0225": [1007, 1305, 1318], "0257": [1007, 1305], "4725": [1007, 1305], "1479": [1007, 1305], "7005": [1007, 1305], "9757": [1007, 1305], "3904": [1007, 1305], "3726": [1007, 1305], "1836": [1007, 1305], "9688": [1007, 1305], "7153": [1007, 1305, 2081], "2159": [1007, 1305], "0844": [1007, 1305], "5281": [1007, 1305], "6120": [1007, 1305], "4490": [1007, 1305], "5687": [1007, 1305], "9792": [1007, 1089, 1305], "8304": [1007, 1305], "3037": [1007, 1305, 2055], "5650": [1007, 1305], "2329": [1007, 1305], "9883": [1007, 1305], "0551": [1007, 1305], "capture_begin": [1008, 2045], "capture_error_mod": [1008, 1042], "make_graphed_cal": [1008, 2045], "graph_pool_handl": [1008, 1042, 1053], "other_graph_inst": [1008, 1042, 1053], "cudastreamcapturemod": [1008, 1042], "thread_loc": [1008, 1042], "cudamalloc": [1008, 1042, 1064, 2045, 2113], "unsaf": [1008, 1042, 1344, 1733, 1736, 2014, 2045, 2068], "capture_end": [1008, 2045], "debug_dump": 1008, "debug_path": 1008, "enable_debug_mod": 1008, "path_to_so_fil": 1009, "alloc_fn_nam": 1009, "free_fn_nam": 1009, "enable_tim": [1010, 1385, 1981, 2045], "interprocess": 1010, "elapsed_tim": [1010, 1385, 1981, 2045], "end_ev": [1010, 1385, 1981, 2045], "elaps": [1010, 1385, 1981, 2029], "from_ipc_handl": 1010, "ipc": [1010, 1046], "ipc_handl": 1010, "cudaeventsynchron": 1010, "cudastreamwaitev": [1010, 1011, 1013], "stream_ptr": 1011, "cudastream_t": [1011, 2045], "record_ev": [1011, 1013, 1982], "cudastreamsynchron": [1011, 1013], "wait_ev": [1011, 1013, 1982], "interoper": 1015, "caching_allocator_delet": 1015, "mem_ptr": 1016, "caching_allocator_alloc": 1016, "peer_devic": 1017, "_cudaalloc": 1018, "clock": 1019, "sm": 1019, "hertz": 1019, "smi": [1019, 1032, 1060, 1066, 1071, 1085, 1086, 2045, 2050, 2053], "buffer_s": 1021, "10485760": 1021, "chunk_siz": [1024, 1171, 1177, 1976], "cublashandle_t": 1025, "unoccupi": [1032, 1384, 1989], "cudamallocasync": [1033, 1064, 2045], "_cudadeviceproperti": 1037, "gencod": 1038, "cuda_graph": 1042, "ordinari": [1044, 1716, 1967, 2041], "code_str": [1050, 1051], "temp": 1050, "typenam": [1050, 1051], "my_kernel": [1050, 1051], "jitted_fn": [1050, 1051], "create_jit_fn": [1050, 1051], "util_fn": 1050, "gelu": [1050, 1556, 1570, 1572, 1574, 1687, 2014, 2035, 2066, 2106], "my_gelu": 1050, "my_lib": [1050, 2020, 2061], "num_output": 1051, "sample_arg": 1053, "num_warmup_it": 1053, "allow_unused_input": 1053, "datadistributedparallel": 1053, "amp": [1053, 1967, 2012, 2041, 2105, 2108, 2111], "autocast": [1053, 1532, 2012, 2045, 2048, 2111], "insuffici": [1054, 1999, 2098, 2111], "manual_seed_al": [1054, 1967, 1999], "occupi": [1056, 1060, 1072, 1381, 1514, 1650, 2045, 2053, 2116], "reset_peak_memory_stat": [1056, 1058, 1072, 1073], "max_memory_reserv": [1057, 2045, 2053], "cudamemgetinfo": 1059, "memory_reserv": [1061, 2045, 2053], "snapshot": [1063, 2012, 2016, 2045, 2053], "large_pool": 1064, "small_pool": 1064, "allocated_byt": 1064, "reserved_byt": 1064, "active_byt": 1064, "inactive_split": 1064, "inactive_split_byt": 1064, "octob": 1064, "1mb": [1064, 2113], "num_alloc_retri": 1064, "num_oom": 1064, "num_sync_all_stream": 1064, "synchronize_and_free_ev": 1064, "num_device_alloc": 1064, "cumemmap": 1064, "num_device_fre": 1064, "cumemunmap": 1064, "cudafre": [1064, 2045, 2113], "assist": [1064, 2047, 2086], "max_split_s": 1064, "oversize_alloc": 1064, "oversize_seg": 1064, "requested_byt": 1064, "abbrevi": 1065, "percent": [1066, 1086, 2105], "instantan": [1067, 2069], "ascii": [1067, 1070, 1344, 2016, 2069], "sensor": [1071, 1085], "mw": 1071, "milliwatt": 1071, "fermi": 1071, "max_memory_alloc": [1072, 2045, 2053], "max_memory_cach": 1073, "memory_stat": [1074, 2045, 2053], "seed_al": [1075, 2001], "environment": [1077, 2105], "total_memori": [1078, 1392], "debug_mod": [1082, 1869], "centigrad": 1085, "x_3": [1087, 1088, 1089, 1090, 1340], "3449": 1087, "5447": 1087, "0685": 1087, "5104": [1087, 2045], "1706": 1087, "2259": 1087, "4696": 1087, "3284": 1087, "9946": 1087, "8209": 1087, "6628": 1088, "0975": 1088, "2680": [1088, 2044], "3298": [1088, 1095], "4220": 1088, "3885": 1088, "1762": 1088, "9165": 1088, "6684": [1088, 1249], "6001": 1089, "2069": 1089, "1919": 1089, "6727": [1089, 1102], "0062": 1089, "4126": 1089, "2129": 1089, "4206": 1089, "1968": [1089, 2081], "1241": 1089, "0238": 1089, "0233": [1089, 1834], "0157": 1089, "0158": [1089, 1927], "0065": 1089, "0014": [1089, 2081], "0006": 1089, "46": [1090, 1230, 1885], "49": [1090, 1151, 1328, 2044], "74": 1090, "83": 1090, "trapezoid": [1091, 1950, 2014, 2066], "360": 1092, "2832": 1092, "diagflat": [1095, 2014, 2066], "5950": 1095, "0872": 1095, "4264": 1095, "1064": [1095, 2055], "8795": 1095, "2429": 1095, "1374": 1095, "1029": 1095, "6482": 1095, "6300": 1095, "5410": 1096, "2934": 1096, "1788": [1096, 2081], "5684": 1096, "0845": [1096, 1894, 2055], "3986": 1096, "2956": [1097, 1305], "9068": 1097, "1695": 1097, "2094": [1097, 2045], "3018": 1097, "1516": 1097, "9342": 1097, "0854": 1098, "1431": 1098, "1752": 1098, "8536": 1098, "0905": 1098, "0360": [1098, 1411], "6927": 1098, "3735": 1098, "4945": 1098, "2631": [1098, 1395, 2045], "3755": 1098, "5977": [1098, 2048], "8172": 1098, "1065": [1098, 2055], "0401": 1098, "2235": [1098, 1926], "7938": 1098, "3081": 1098, "6166": 1098, "2335": 1098, "0500": 1098, "7336": 1098, "3836": 1098, "1015": 1098, "5393": 1102, "8675": 1102, "5916": 1102, "6321": 1102, "0967": 1102, "0511": 1102, "6295": 1102, "8360": 1102, "6973": 1102, "6537": 1102, "dividend": [1103, 1153, 1156, 1846, 1956], "true_divid": [1103, 2014, 2066], "3810": [1103, 1234], "2774": 1103, "2972": 1103, "3719": 1103, "4637": [1103, 2048], "7620": 1103, "5548": 1103, "5944": 1103, "7438": 1103, "9274": 1103, "3711": 1103, "9353": 1103, "4605": 1103, "2917": 1103, "1815": [1103, 1353], "0111": [1103, 1883], "9805": 1103, "5923": 1103, "1062": 1103, "4581": [1103, 1312], "7759": 1103, "2344": 1103, "1830": 1103, "0313": 1103, "1908": 1103, "4757": 1103, "8032": 1103, "2930": 1103, "8113": 1103, "2308": 1103, "4620": [1103, 1979], "6051": 1103, "5676": 1103, "2639": 1103, "2260": 1103, "4509": [1103, 1326], "2086": 1103, "1322": 1103, "9764": 1103, "9564": 1103, "3484": 1103, "2278": 1103, "1068": [1103, 1247], "4678": 1103, "3938": [1103, 1937], "depthwis": [1106, 1107, 1453, 1454, 1455], "atleast_3d": [1107, 2014, 2066], "notat": [1108, 1491, 1874, 2017, 2055, 2086], "einstein": 1108, "summat": [1108, 1269, 1352, 1360, 2080], "subscript": [1108, 2017, 2057], "jk": [1108, 2105], "ik": [1108, 1345, 1904], "za": 1108, "alphabet": [1108, 1616, 2075], "arrow": [1108, 2077], "ki": 1108, "ellipsi": [1108, 2016, 2017, 2034], "fourth": 1108, "whitespac": [1108, 2017], "opt_einsum": [1108, 2012], "_the_": 1108, "disclaim": 1108, "sublist": [1108, 2111], "52": 1108, "op1": [1108, 2016], "sublist1": 1108, "op2": [1108, 2016], "sublist2": 1108, "subslist_out": 1108, "7952": 1108, "2433": 1108, "4545": 1108, "1156": 1108, "2897": [1108, 2055], "3918": 1108, "4963": 1108, "3744": 1108, "9381": 1108, "2685": 1108, "6070": 1108, "7208": 1108, "8058": 1108, "4419": 1108, "0936": 1108, "1713": 1108, "4291": 1108, "5802": 1108, "7350": [1108, 2081], "5704": 1108, "4290": 1108, "9323": 1108, "4480": 1108, "bij": 1108, "bjk": 1108, "bik": 1108, "0564": 1108, "5904": 1108, "2023": 1108, "1271": 1108, "6706": [1108, 1827], "8097": 1108, "8025": 1108, "1183": 1108, "2239": [1108, 1332], "3107": 1108, "5756": 1108, "2354": 1108, "4558": 1108, "3460": 1108, "5087": 1108, "8530": [1108, 1450, 1582], "8153": 1108, "8787": 1108, "3839": [1108, 1975], "2112": [1108, 1952], "3728": 1108, "1131": [1108, 1824], "0921": 1108, "8305": 1108, "ji": 1108, "anm": 1108, "bm": 1108, "ba": 1108, "3430": [1108, 1353], "2405": 1108, "4494": 1108, "3311": 1108, "5201": 1108, "0356": 1108, "4064e": 1109, "8000e": 1109, "3493e": 1109, "5751e": 1109, "1428e": 1109, "5955e": 1109, "9683e": 1111, "1239e": 1111, "0705e": 1111, "orig_func": [1112, 1769], "set_grad_en": [1112, 2014, 2066, 2089], "tripler": [1112, 1769], "elsewher": [1113, 1121, 1215, 1229, 1262, 1264, 1265, 1268, 1297, 1361, 1423, 1905, 2011], "_max": [1122, 1123, 2070], "_min": [1122, 1123, 2070], "nearbi": [1122, 1123], "_int": [1122, 1123], "_point": [1122, 1123], "2525": 1122, "0466": 1122, "3491": [1122, 1326], "2168": [1122, 1944], "5906": [1122, 2081], "6258": 1122, "6444": 1122, "0542": 1122, "0475": [1122, 2081], "0486": 1122, "3405": 1122, "6134": [1122, 1375], "6323": 1122, "0552": 1123, "9730": 1123, "3973": 1123, "0780": 1123, "4000": [1123, 1126, 1135, 1144, 1579, 1830, 1880], "fourier": [1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1269, 1890, 1923, 2012], "rfft": [1124, 1129, 1139, 1143, 1144, 1145], "chalf": [1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1139, 1140, 1141, 2014, 2066, 2086], "sm53": [1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145], "ortho": [1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1345], "orthonorm": [1124, 1125, 1127, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1145, 1336, 1730, 1927], "ifft": [1124, 1128, 1129, 1133, 1134, 1136, 1137, 1138, 1139, 1140, 1141], "fftn": [1125, 1128, 1134, 1137, 1145], "rfft2": [1125, 1140], "ifft2": [1125, 1137], "two_fft": [1125, 1127, 1137, 1143, 1145], "check_strid": [1125, 1127, 1128, 1133, 1134, 1139, 1140, 1141, 1143, 1145, 2087], "nyquist": [1126, 1128, 1136, 1138, 1142, 1143, 1144, 1145], "i_1": [1127, 1145, 1293], "i_n": [1127, 1145, 1177, 1293, 1944, 1976], "rfftn": [1127, 1131, 1137, 1141, 1143], "ifftn": [1127, 1133, 1138], "reorder": [1128, 1328, 2051], "rearrang": [1128, 1135, 1446, 1538, 1539, 1674, 1675, 2034], "fftfreq": [1128, 1135, 1144], "9000": [1128, 1942], "8000": [1128, 1417, 1579, 1827, 1880], "uncent": 1128, "ifftshift": 1128, "x_center": 1128, "x_uncent": 1128, "fft_uncent": 1128, "fft_center": 1128, "x_centered_2": 1128, "ihfft": [1129, 1137, 1138], "irfft": [1129, 1141, 1142], "symmetri": [1129, 1131, 1923], "transformed_dim_s": [1129, 1139], "0000j": [1129, 1136, 1139, 1302, 1303, 1308, 1309, 1311, 1820, 1877], "1250": [1129, 1395], "1720j": 1129, "0406j": 1129, "2809": 1129, "6250": [1129, 1139, 1158, 1579], "9691": 1129, "hfftn": [1130, 1138], "last_dim_s": [1130, 1131, 1140, 1141, 2106], "ihfft2": 1130, "roundtrip": [1130, 1131, 1139, 1140, 1141], "ihfftn": [1131, 1137], "irfftn": [1131, 1140, 1145], "fft2": [1133, 1143], "two_ifft": [1133, 1134, 1138], "fftshift": 1135, "hfft": 1136, "6882j": 1136, "1625j": 1136, "hfft2": 1137, "8602j": 1139, "2031j": 1139, "1562": 1139, "3511": 1139, "7812": 1139, "2114": 1139, "irfft2": 1143, "wider": [1151, 2013, 2016, 2067, 2080], "2500e": 1151, "1000e": 1151, "7656e": 1151, "lfloor": [1152, 1157, 1430, 1435, 1436, 1437, 1453, 1454, 1455, 1472, 1493, 1494, 1495, 1519, 1520, 1521, 1578, 1579, 1580, 1581, 1601, 1671, 1842, 1923], "rfloor": [1152, 1157, 1430, 1435, 1436, 1437, 1453, 1454, 1455, 1472, 1493, 1494, 1495, 1519, 1520, 1521, 1578, 1579, 1580, 1581, 1601, 1671, 1842, 1923], "8166": 1152, "5308": 1152, "2530": 1152, "2091": 1152, "7000": [1154, 1469, 1879], "3000": [1155, 1468, 1879, 2045], "entrywis": [1156, 1846], "modulu": [1156, 1332, 1846], "operatornam": [1157, 1308, 1309, 1310, 1311, 1328, 1336, 1438, 1439, 1445, 1485, 1486, 1492, 1517, 1558, 1576, 1816, 1839, 1879, 1928], "8750": [1158, 1579], "sizeof": [1160, 1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 2082], "map_shar": [1160, 2060, 2082], "map_priv": [1160, 2060, 2082], "mmap": [1160, 1344, 2032, 2060, 2082], "tofil": 1160, "t_map": 1160, "char": [1162, 1777, 2033, 2082, 2102], "parameter_and_buffer_dict": 1165, "tie_weight": [1165, 1766], "submodule_nam": [1165, 1766], "parameter_nam": [1165, 1766], "ty": [1165, 1766, 2115], "foo_ti": [1165, 1766], "new_a": [1165, 1766], "grad_weight": [1165, 2048], "detached_param": 1165, "parameters_and_buffer_dict": 1165, "intermediate_upd": 1166, "mutations_and_view": 1166, "proxy_tensor": [1166, 2020, 2101, 2111], "make_fx": [1166, 2020, 2098, 2111], "inpt": 1166, "f_trace": 1166, "f_no_mutations_trac": 1166, "f_no_mutations_and_views_trac": 1166, "a_1": [1166, 1293], "view_1": 1166, "view_copi": [1166, 2014, 2066, 2089], "view_copy_1": 1166, "as_strid": [1166, 2014, 2066, 2084, 2106], "native_funct": [1166, 2048, 2106], "yaml": [1166, 2048, 2106], "aux": [1167, 1168, 1170, 1171, 1172, 1176], "my_loss_func": 1167, "y_pred": [1167, 2045], "loss_per_sampl": 1167, "y_true": 1167, "autodiff": [1170, 1171, 1172, 1331], "jacobian_f": [1170, 1171], "f_x": [1170, 1171], "jacboian": [1170, 1171], "expectedx": [1170, 1171], "expectedi": [1170, 1171], "_preallocate_and_copi": 1171, "jvp_out": 1172, "wish": [1172, 1918, 2020, 2041, 2045, 2046, 2048, 2049], "jvp_fn": 1173, "optimiz": [1175, 2013], "l1": [1175, 1485, 1486, 1558, 1641, 1688, 1741, 1750, 2055, 2060], "l2": [1175, 1486, 1517, 1558, 1780, 1781, 1782, 1783, 1785, 1787, 1793, 1794, 1796, 2041], "vjpfunc": 1176, "unsuccessfulli": [1177, 1976], "rummag": [1177, 1976], "batched_dot": [1177, 1976], "jacobian_row": [1177, 1976], "get_vjp": [1177, 1976], "n1": [1177, 1848, 1976], "n0": [1177, 1976], "batched_pow": [1177, 1976], "autobatch": [1177, 1976], "symbolic_shap": [1178, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 2012, 2098], "symbol_to_sourc": 1180, "var_to_v": 1180, "marked_dynam": 1180, "source_name_to_debug_nam": 1180, "_allow_complex_guards_as_runtime_assert": [1180, 1188], "solver": [1180, 2012, 2058], "expr": [1180, 1187, 1194, 1201], "tautologi": 1180, "add_equ": 1180, "forced_speci": 1180, "prettify_result": 1180, "original_signatur": 1180, "constraint_violation_error": 1180, "violat": [1180, 1187, 2045, 2049], "erro": 1180, "remove_redundant_dynamic_result": 1180, "rewrite_with_congru": 1180, "congruenc": 1180, "ration": 1180, "inequ": [1180, 1194], "_disable_forced_speci": [1180, 1187], "duck": [1181, 1187, 2048, 2068], "nb": [1181, 1191, 1194, 1196, 1198, 1286], "simplic": [1181, 1736, 2075, 2076], "varieti": [1181, 2045, 2076], "assume_static_by_default": [1181, 1188], "mark_dynamic_dim": 1181, "warn_onli": [1183, 1186, 1191, 1964], "source_pair": 1183, "derived_equ": 1183, "phantom_symbol": 1183, "forest": 1183, "transit": [1183, 1272, 1374, 1526, 1923, 2012, 2013, 2092], "phantom": 1183, "inner_nam": 1184, "unback": [1185, 1187, 1196, 1197, 1199, 1200, 1207, 1208, 1209, 2101], "unspeci": 1186, "unspec": 1186, "brittl": 1186, "strictminmaxconstraint": 1186, "should_record_ev": 1187, "tracked_fak": 1187, "add_var_to_v": 1187, "bind_symbol": 1187, "littl": [1187, 2048, 2060, 2077], "evaluate_guard": 1187, "cleanest": 1187, "shenanigan": 1187, "bound_sympi": 1187, "size_oblivi": 1187, "check_equ": 1187, "create_symbol": 1187, "dimdynam": [1187, 1190], "constraint_dim": 1187, "do_not_specialize_zero_on": 1187, "symbolic_context": [1187, 1189, 1190], "create_symbolic_sizes_strides_storage_offset": [1187, 1189, 1190, 1193], "create_symboolnod": 1187, "create_symfloatnod": 1187, "create_symintnod": 1187, "create_unbacked_symbool": 1187, "create_unbacked_symfloat": 1187, "create_unbacked_symint": 1187, "create_unspecified_symbol": 1187, "specialz": 1187, "create_unspecified_symint_and_symbol": 1187, "defer_runtime_assert": 1187, "orig_expr": 1187, "fx_node": 1187, "evaluate_expr": [1187, 2099], "expect_r": 1187, "forcing_spec": 1187, "evaluate_guards_express": 1187, "produce_guards_express": 1187, "evaluate_guards_for_arg": 1187, "ignore_stat": 1187, "format_guard": 1187, "freeze_runtime_assert": 1187, "discharg": [1187, 1197], "get_axiom": 1187, "get_impl": 1187, "compute_hint": 1187, "booleanatom": 1187, "get_nontrivial_guard": 1187, "get_pruned_guard": 1187, "prune": [1187, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 2036, 2080], "ignore_fresh_unbacked_symbol": 1187, "is_unbacked_symint": 1187, "produce_guard": 1187, "source_ref": 1187, "input_context": 1187, "equalities_input": 1187, "_simplifi": 1187, "localsourc": [1187, 2099], "boilerpl": [1187, 2055, 2068], "nice": [1187, 1453, 1454, 1455, 1456, 1457, 1458, 1472, 1519, 1520, 1521, 1578, 2034, 2042, 2045, 2068, 2099], "set_unbacked_var_to_v": 1187, "propagate_real_tensor": [1187, 1196], "resort": [1187, 2045, 2052, 2068], "size_hint": [1187, 2104], "allow_non": 1187, "suppress_guard": 1187, "allow_scalar_output": 1188, "allow_dynamic_output_shape_op": 1188, "specialize_zero_on": 1188, "duck_shap": 1188, "prefer_deferred_runtime_asserts_over_guard": 1188, "dynamic_s": [1189, 1190, 1192], "constraint_s": [1189, 1190, 1192], "view_base_context": [1189, 1190, 1192], "tensor_sourc": [1189, 1192], "shape_env_to_source_to_symbol_cach": [1189, 1192], "statelesssymboliccontext": 1189, "owner": [1189, 2011, 2048, 2066, 2075, 2076], "lifecycl": [1189, 2101], "shape_env": [1189, 1196, 1209, 1210], "dimconstraint": 1190, "relaxedunspecconstraint": 1191, "unsoundli": [1191, 1197], "inner_context": 1192, "canonic": 1194, "rh": [1194, 1318, 1363], "Ors": 1194, "cnf": 1194, "subexpr": 1194, "25924": 1194, "retrac": [1195, 1209, 1280, 2099], "example_valu": 1196, "old_example_valu": 1196, "peek": [1196, 2020], "freshli": 1196, "unbacked_var_to_v": 1196, "lie": [1197, 1560, 1562, 1690, 1906, 2081, 2085, 2101], "intersect": 1197, "fairli": [1197, 2057, 2098, 2102, 2104], "perfectli": [1197, 1870, 2045], "definitely_tru": 1199, "parallel_or": 1200, "parallel_and": 1200, "circuit": 1200, "oblivi": 1201, "118579": 1201, "free_symbol": 1202, "maxsiz": 1206, "sym_and": 1212, "sparse_grad": [1213, 2014, 2106], "tau": [1216, 1312, 1634, 1782, 1796, 1813, 1814, 1883, 2014], "elementari": [1216, 2042, 2052], "reflector": [1216, 1730, 1814], "household": [1216, 1312, 1730, 1814], "householder_product": [1216, 1730, 1813], "gel": [1216, 1318], "set_deterministic_debug_mod": [1220, 1964], "mtia": [1221, 1397, 1398, 1399, 1403, 2012, 2069], "fork_rng": [1225, 2012, 2074], "edge_ord": [1226, 2014], "rightarrow": 1226, "closer": [1226, 1576, 1827, 2048, 2055, 2070, 2104, 2105], "interior": 1226, "theorem": 1226, "h_l": 1226, "h_r": 1226, "neighbor": [1226, 1269, 1579, 1581, 1923], "xi_1": 1226, "xi_2": 1226, "approx": [1226, 1816, 1928, 2052], "80": [1226, 1328, 1806, 1874, 2045, 2067], "halv": 1226, "coord": 1226, "54": [1230, 1487, 1886], "hann_window": [1230, 1923, 2014, 2018, 2066], "hann": [1231, 1886], "hist": [1234, 1235, 2014], "bin_edg": [1234, 1235, 2014], "9524": 1234, "leftmost": [1235, 2035], "leg": 1239, "triangl": [1239, 2085], "hypotenus": 1239, "4031": 1239, "gammainc": [1241, 2081], "gammaincc": [1242, 2081], "index_reduce_": [1246, 2014], "1427": 1247, "0231": 1247, "5414": 1247, "0009": 1247, "4664": [1247, 1944], "2647": 1247, "1228": 1247, "6571": 1247, "7230": 1247, "6004": 1247, "multidimension": [1249, 1342, 1488], "8173": 1249, "0874": 1249, "1784": 1249, "3279": 1249, "7894": 1249, "4682": 1249, "7159": 1249, "1506": 1249, "4034": 1249, "3657": 1249, "0387": 1249, "9892": 1249, "1774": 1249, "3261": 1249, "3917": 1249, "4537": [1249, 1706], "7493": 1249, "1724": 1249, "2291": 1249, "5749": 1249, "2267": 1249, "7920": 1249, "3607": 1249, "3701": 1249, "3666": 1249, "5850": [1249, 1303], "7242": 1249, "9837": 1249, "1560": 1249, "2907": 1249, "6785": 1249, "5671": [1249, 1304], "5452": 1249, "6912": 1249, "5509": 1249, "1782": 1249, "9843": 1249, "7366": 1249, "5672": [1249, 1772], "5115": 1249, "4864": 1249, "2476": 1249, "4337": 1249, "6347": 1249, "1748": 1249, "3567": [1249, 1303], "6558": 1249, "2469": [1249, 2055], "5787": [1249, 1353], "typecheck": [1259, 2066], "warn_alwai": 1260, "set_warn_alwai": 1260, "nonfinit": 1261, "test_el": [1263, 2014], "assume_uniqu": [1263, 2014], "0j": [1268, 1973], "nola": 1269, "envelop": 1269, "hop": [1269, 1923], "shorter": [1269, 2065, 2075], "griffin": 1269, "ieee": [1269, 1479, 1890, 2058], "tran": 1269, "assp": 1269, "vol": [1269, 1479, 1890], "236": 1269, "apr": 1269, "1984": 1269, "slide": [1269, 1435, 1436, 1437, 1472, 1519, 1520, 1521, 1578, 1626, 1657, 1658, 1659, 1702, 1831, 1832, 1923], "fft_size": 1269, "scriptmodul": [1270, 1271, 1276, 1280, 1282, 1283, 1284, 1288, 1289, 1779, 2013, 2015, 2026, 2065], "attributemodul": 1270, "names_ag": 1270, "get_debug_st": 1271, "graphexecutorst": 1271, "_extra_fil": [1271, 1272, 1280, 1283, 2054], "save_to_buff": 1271, "add_modul": [1272, 1526], "init_weight": [1272, 1526, 2055], "buf": [1272, 1526], "20l": [1272, 1526], "1l": [1272, 1526], "5l": [1272, 1526], "pretti": [1272, 1778, 1874, 2013, 2097, 2101], "syntax": [1272, 2013, 2016, 2068, 2103], "code_with_const": 1272, "constmap": 1272, "get_buff": [1272, 1526], "attributeerror": [1272, 1526, 2048, 2070], "get_extra_st": [1272, 1526], "set_extra_st": [1272, 1526], "get_paramet": [1272, 1526], "net_b": [1272, 1526], "net_c": [1272, 1526], "inlined_graph": 1272, "ipu": [1272, 1526, 2020, 2060], "remove_dupl": [1272, 1526], "named_children": [1272, 1526, 2055], "conv4": [1272, 1526], "conv5": [1272, 1526], "memo": [1272, 1526, 2101], "register_backward_hook": [1272, 1526], "register_full_backward_hook": [1272, 1526, 1711, 2055], "register_forward_hook": [1272, 1526, 1709, 2055], "with_kwarg": [1272, 1526], "always_cal": [1272, 1526, 1709], "fire": [1272, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797, 2047, 2054, 2107], "register_module_forward_hook": [1272, 1526, 2055], "register_forward_pre_hook": [1272, 1462, 1526, 1710, 2055], "forward_pr": [1272, 1526], "register_module_forward_pre_hook": [1272, 1526, 2055], "register_module_full_backward_hook": [1272, 1526, 1707, 2042, 2055], "register_full_backward_pre_hook": [1272, 1526, 1712, 2055], "register_module_full_backward_pre_hook": [1272, 1526, 2055], "register_load_state_dict_post_hook": [1272, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "incompatible_kei": [1272, 1526], "register_modul": [1272, 1526, 1713], "register_paramet": [1272, 1526, 1714, 2048, 2055], "register_state_dict_pre_hook": [1272, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "keep_var": [1272, 1526], "gan": [1272, 1526, 1731, 1765], "share_memori": [1272, 1526, 2057], "share_memory_": [1272, 1526, 2032, 2082], "4d": [1272, 1441, 1472, 1489, 1526, 1579, 1626, 1643, 1671, 1703, 1723, 1724, 1830], "1913": [1272, 1526], "3420": [1272, 1526], "5113": [1272, 1526, 1944], "2325": [1272, 1305, 1526], "gpu1": [1272, 1526], "1914": [1272, 1526], "5112": [1272, 1526, 2045], "3741": [1272, 1526], "2382": [1272, 1411, 1526], "5593": [1272, 1526], "4443": [1272, 1526], "6122": [1272, 1526], "1150": [1272, 1526], "dst_type": [1272, 1526], "set_to_non": [1272, 1526, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1792, 1793, 1794, 1795, 1796, 1797, 2045], "the_typ": 1273, "the_valu": 1273, "script_bar": 1275, "addmod": 1275, "preserved_attr": 1276, "optimize_numer": 1276, "optimize_for_infer": [1276, 2097], "run_frozen_optim": 1276, "scripted_modul": [1276, 1284, 2060], "frozen_modul": 1276, "modified_tensor": 1276, "mymodule2": 1276, "dump_alias_db": 1276, "training_method": 1277, "testcod": [1278, 1279, 1280, 1283, 2015], "interfacetyp": 1278, "impl1": 1278, "impl2": 1278, "user_fn": 1278, "user_fn_jit": 1278, "target_typ": 1279, "key1": 1279, "val1": 1279, "key2": 1279, "val2": 1279, "_restore_shap": 1280, "scriptfunct": [1280, 1284, 1285, 1288, 2065], "readlin": [1280, 1344, 2068], "other_method": 1282, "lesser": [1282, 2042, 2044], "extent": [1282, 2044, 2080], "frozen_mod": 1282, "_frames_up": 1284, "_rcb": 1284, "scriptdict": 1284, "scriptlist": 1284, "test_sum": 1284, "scripted_fn": [1284, 2013], "conv2": [1284, 1526, 1555, 2013, 2063, 2070], "some_entry_point": 1284, "python_only_fn": 1284, "testnnmodul": 1284, "pdt_model": 1284, "scripted_model": [1284, 2068], "un": [1286, 1445, 2070], "unfus": 1286, "check_trac": [1288, 1289], "check_input": [1288, 1289, 2013], "check_toler": [1288, 1289], "_force_outplac": [1288, 1289], "_module_class": [1288, 1289], "_compilation_unit": [1288, 1289], "compilationunit": [1288, 1289], "example_kwarg_input": 1288, "_store_input": [1288, 1289], "trace_modul": [1288, 2013, 2016], "untrack": 1288, "checker": [1288, 1289, 2016, 2065], "traced_foo": [1288, 2013], "example_weight": [1288, 1289], "example_forward_input": [1288, 1289], "example_inputs_is_kwarg": 1289, "method2": 1289, "example_method2_input": 1289, "weighted_kernel_sum": 1289, "use_memory_effici": 1290, "memory_effici": 1290, "scriptabl": 1290, "kaiser": [1292, 1570, 1572, 1574], "i_0": [1292, 1293, 1889, 1944, 2081], "zeroth": [1292, 1889, 2081], "out_i": 1292, "kroneck": 1293, "a_0": 1293, "a_n": 1293, "b_0": 1293, "b_1": 1293, "b_n": 1293, "k_0": [1293, 1944], "k_1": 1293, "k_n": 1293, "j_0": 1293, "j_1": 1293, "j_n": 1293, "k_t": 1293, "i_t": [1293, 1496, 1795], "b_t": 1293, "j_t": 1293, "bmatrix": 1293, "a_": [1293, 1364, 1514, 1944], "cdot": [1293, 1318, 1330, 1438, 1439, 1460, 1461, 1483, 1491, 1529, 1530, 1531, 1532, 1533, 1570, 1614, 1637, 1820, 1923, 2081, 2087], "vdot": [1293, 1340, 1341, 2014, 2066], "ddot": [1293, 1340], "kth": 1294, "full_lik": [1298, 2014, 2018, 2020, 2066], "logarithm": [1301, 1306, 1332, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1359, 1445, 1616, 1651, 1903, 2081], "gamma": [1301, 1440, 1441, 1442, 1480, 1488, 1489, 1490, 1498, 1541, 1566, 1715, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1796, 1798, 1802, 1803, 1806, 1811, 1812, 1830, 2012, 2014, 2065, 2066, 2067, 2081], "5724": [1301, 2081], "1208": [1301, 2081], "mathrlap": [1302, 1308, 1309, 1310, 1311, 1319, 1322, 1331, 1333, 1335, 1336, 1730], "qquad": [1302, 1304, 1308, 1309, 1310, 1311, 1312, 1319, 1322, 1331, 1333, 1335, 1336, 1730], "eigenvalu": [1302, 1308, 1309, 1310, 1311, 1324, 1327, 1330, 1335, 1336, 1345, 1816, 2014], "resp": [1302, 1309, 1311, 1335, 1336, 1353], "5266": 1302, "9586": 1302, "0626j": 1302, "4160": 1302, "5895": 1302, "2322": 1302, "2976j": 1302, "4928": [1302, 1944], "4692e": 1302, "8747e": 1302, "check_error": [1303, 1314, 1316, 1321, 1334, 2014], "performantli": 1303, "3792": 1303, "9831j": 1303, "8757": 1303, "5425": 1303, "6374j": 1303, "kappa": 1304, "_p": [1304, 1535], "frobeniu": [1304, 1318, 1325, 1329, 1771], "nuc": [1304, 1325, 1329, 1342, 1742, 1751, 1771], "nuclear": [1304, 1325, 1329, 1771], "sigma_1": [1304, 1318, 1327, 1330], "sigma_n": 1304, "kappa_2": 1304, "kappa_": 1304, "4142": [1304, 1329, 1771, 1820], "1623": [1304, 1325], "2426": [1304, 1329, 1771], "7071": [1304, 1882], "5917": 1304, "9941": 1305, "5132": 1305, "5681": 1305, "4653": 1305, "4507": 1305, "4119": 1305, "6163": 1305, "1073": 1305, "3957": 1305, "9666": [1305, 1623], "0840": 1305, "3357": 1305, "2139": 1305, "slogdet": [1306, 1353, 2014, 2066], "0934": 1306, "1990": [1306, 1353], "4099": [1306, 1353], "7386": [1306, 1353], "diagonaliz": [1308, 1310], "eigenvector": [1308, 1309, 1345, 2014], "neq": [1308, 1309, 1312, 1336, 1423, 1529, 1531, 1781, 1782, 1783, 1785, 1787, 1793, 1794, 1796, 1800, 1927], "phi": [1308, 1309, 1336, 1475, 1630, 1927], "shall": [1308, 1309, 1336, 1733, 1737], "lambda_i": [1308, 1309, 1324], "lambda_j": [1308, 1309], "eigval": [1308, 2014], "9828": [1308, 1952, 2055], "3889j": 1308, "4617": 1308, "3010j": 1308, "1662": 1308, "7435j": 1308, "6139": 1308, "0562j": 1308, "1226": [1308, 1310], "5738j": [1308, 1310], "7537": [1308, 1310], "1286j": [1308, 1310], "9218": 1308, "1882": 1308, "2220j": 1308, "0270": 1308, "3867j": 1308, "7119e": 1308, "2841e": 1308, "uplo": [1309, 1311, 2014], "unitari": [1309, 1312, 1331, 1336, 1730, 1814], "ill": [1309, 1597, 2058], "eigvalsh": [1309, 1327], "9228": [1309, 1311], "2029": [1309, 1311], "0862j": [1309, 1311], "3464": [1309, 1311], "3277": [1309, 1311], "9415": [1309, 1311], "0846": 1309, "9964": 1309, "9170": 1309, "3898j": 1309, "0331j": 1309, "1062e": 1309, "5423e": 1309, "polynomi": [1310, 1311, 1809, 2102], "_n": [1310, 1311, 1313, 1730, 2048], "4576e": [1310, 1337], "5797": 1311, "4629": 1311, "1605": 1311, "3780": 1311, "1113": [1311, 2055], "7381": 1311, "h_1h_2": 1312, "h_k": 1312, "h_i": [1312, 1561], "_m": [1312, 1730], "tau_i": 1312, "8034": 1312, "4184j": 1312, "2588": 1312, "0174j": 1312, "6853": 1312, "7953j": 1312, "0790": 1312, "5620j": 1312, "6989j": 1312, "5360": 1312, "1193j": 1312, "3877": 1312, "6691j": 1312, "3512": 1312, "3024j": 1312, "4766": 1312, "5783j": 1312, "0361": [1312, 2055], "6587j": 1312, "6396": [1312, 2055], "1612j": 1312, "3693": 1312, "4481j": 1312, "aa": 1313, "pinv": [1313, 1318, 1818], "moor": [1313, 1330], "penros": [1313, 1330], "ainv": [1313, 1314, 1334, 1338], "1921e": 1313, "9073e": [1313, 1578], "5107e": 1313, "ldl": [1315, 1317], "indefinit": [1315, 2025], "ld": [1315, 1316, 1317, 2014], "sytrf": [1315, 1316], "ldl_solv": 1315, "ldl_factor_ex": [1315, 1317], "2079": [1315, 1316, 2081], "2414": [1315, 1316], "9428": [1315, 1316], "4554": [1315, 1316], "3264": [1315, 1316], "3823": [1315, 1316], "5884": [1315, 1316], "9595": [1315, 1316, 1882], "2695": [1315, 1316], "8513": [1315, 1316], "1633": [1315, 1316], "ldl_factor": 1316, "rcond": [1318, 1330, 1818, 2014], "_f": 1318, "gelsi": 1318, "gelsd": 1318, "gelss": 1318, "tridiagon": 1318, "sigma_i": [1318, 1336, 1927], "residu": [1318, 1345, 2014, 2055], "singular_valu": [1318, 2014], "0838": [1318, 2055], "2275": [1318, 1395], "3844": 1318, "5499": 1318, "1175": 1318, "9102": 1318, "0870": 1318, "6772": 1318, "7758": 1318, "5109": 1318, "4382": 1318, "3769": 1318, "1818": 1318, "3450": 1318, "0806": [1318, 2055], "3967": 1318, "3994": 1318, "1521": 1318, "1473": 1318, "9194": 1318, "0458": 1318, "6705": [1318, 1377], "1802": 1318, "4086": 1318, "5152e": 1318, "zero_": [1318, 1623, 2014, 2033, 2034, 2055, 2080], "5007": 1319, "9755": 1319, "0489": 1319, "9644": [1319, 1372], "9605e": 1319, "0376e": 1319, "lu_factor_ex": [1320, 1362], "lu_unpack": [1320, 1362, 2014, 2066], "b1": 1320, "b2": [1320, 2045, 2053], "getrf": [1321, 1334], "adjoint": [1322, 2014, 2066, 2084, 2086], "_exp": 1324, "7183": [1324, 2055], "3891": 1324, "8660": 1324, "ord": [1325, 1329, 1342, 1771, 2014, 2016, 2066], "la": [1325, 1329, 1342, 2055], "2829": 1325, "2627": 1325, "0756": 1326, "4980": 1326, "6617": 1326, "4994": 1326, "9980": 1326, "2731": 1326, "8001": 1326, "2640": 1326, "4571": 1326, "5511": 1326, "0163": [1326, 1372], "5292": 1326, "4899": 1326, "0822": 1326, "2773": [1326, 2035], "varepsilon": [1327, 1330, 1575], "tol": [1327, 1345, 2014], "fewest": 1328, "bc": [1328, 2024, 2067], "75000": 1328, "26": [1328, 1801, 2099, 2100, 2111], "148": 1328, "vector_norm": [1329, 1771], "matrix_norm": [1329, 1342, 1731, 1771], "7460": [1329, 1771], "3485": 1329, "8570e": 1329, "8480": 1329, "2361": [1329, 1771, 1772], "7417": [1329, 1771], "computation": [1330, 2052], "5495": [1330, 1395], "0979": 1330, "4092": 1330, "4132": [1330, 1888], "1143": 1330, "3662": 1330, "6374": 1330, "9294": 1330, "3269": [1330, 2055], "5745": [1330, 1921, 1922, 1971, 1972], "0382": [1330, 1411], "5922": 1330, "6759": 1330, "0600": 1330, "1933": 1330, "2090": 1330, "0903": 1330, "0817": 1330, "4752": [1330, 1926], "7124": 1330, "1631": 1330, "2272": 1330, "1356": 1330, "3933": 1330, "5023": 1330, "0308": 1330, "1725": 1330, "5216": 1330, "apinv": 1330, "5633e": 1330, "0830e": 1330, "wide": [1331, 1336, 1730, 1884, 2044, 2048, 2055, 2067, 2102], "51": [1331, 1524, 1826], "167": [1331, 1826], "68": [1331, 1826, 2013, 2015], "8571": [1331, 1826], "3943": [1331, 1826], "3314": [1331, 1826], "4286": [1331, 1826], "9029": [1331, 1826], "0343": [1331, 1826], "2857": [1331, 1826], "1714": [1331, 1826, 2055], "9429": [1331, 1826], "175": [1331, 1826], "q2": 1331, "r2": [1331, 1566], "6099e": 1331, "2158e": 1331, "logabsdet": [1332, 2014], "0032": 1332, "6690": 1332, "1161": 1332, "4053": 1332, "6218": [1332, 1941], "9273": 1332, "0082": 1332, "7576": 1332, "logdet": [1332, 2014, 2066], "linalg_slogdet": [1332, 2014, 2066], "2776": 1332, "solve_ex": 1333, "solve_triangular": [1333, 1951], "expand_a": [1333, 2014, 2048, 2066, 2084], "rectangular": [1335, 1336, 1364, 1923, 2040, 2063], "triu_": [1335, 2014], "tril_": [1335, 2014], "full_matric": [1336, 1337, 1736, 1927, 2014, 2106], "vh": [1336, 1736, 1927, 2014, 2106], "gesvdj": [1336, 1337, 1927], "jacobi": 1336, "gesvda": [1336, 1337], "gesvd": [1336, 1337, 1927], "u_k": 1336, "v_k": 1336, "sigma_j": [1336, 1927], "eigendecomposit": 1336, "0486e": 1336, "0957e": 1336, "5139": 1337, "1087": 1337, "1066": 1337, "ind": [1338, 1339, 2014, 2049], "tensorsolv": 1338, "ndim": [1338, 1339, 2033, 2035, 2080, 2086, 2100], "atensorinv": 1338, "movedim": [1339, 1379, 2014, 2049, 2066, 2084], "tensorinv": 1339, "vandermond": [1340, 1970], "pmatrix": 1340, "x_n": [1340, 1438, 1439, 1485, 1486, 1492, 1517, 1558, 1949, 2042], "125": [1340, 1545, 1970, 2014], "overlin": [1341, 1973], "3223": 1341, "2815": 1341, "1944": [1341, 2055], "4345": 1342, "pickle_modul": [1344, 1858], "weights_onli": [1344, 2011, 2027, 2060], "pickle_load_arg": 1344, "register_packag": [1344, 2060], "binaryio": [1344, 1858, 2068], "add_safe_glob": [1344, 2060], "mmape": 1344, "untrust": [1344, 2011, 2027, 2068], "tamper": [1344, 2068], "surg": 1344, "unicodedecodeerror": 1344, "codec": 1344, "0x": 1344, "latin1": 1344, "byte_arrai": 1344, "niter": [1345, 1816, 1928], "ortho_iparam": 1345, "ortho_fparam": 1345, "ortho_bparam": 1345, "knyazev": 1345, "knyazev2001": 1345, "stathopoulosetal2002": 1345, "converg": [1345, 1558, 1730, 1783, 1784, 1808, 2041, 2055, 2058], "precondition": 1345, "eigenpair": 1345, "criterion": [1345, 1438, 1439, 1459, 1461, 1486, 1492, 1517, 1518, 1529, 1530, 1531, 1558, 1559, 1575, 1576, 1801, 1833, 2050], "fep": 1345, "eigenproblem": 1345, "iparam": 1345, "fparam": 1345, "bparam": 1345, "ivar": 1345, "fvar": 1345, "bvar": 1345, "tvar": 1345, "istep": 1345, "converged_count": 1345, "rerr": 1345, "force_stop": 1345, "2001": 1345, "precondit": 1345, "eigensolv": 1345, "siam": 1345, "sci": 1345, "517": 1345, "541": 1345, "epub": 1345, "doi": [1345, 1479, 1890], "1137": 1345, "s1064827500366124": 1345, "andrea": 1345, "stathopoulo": 1345, "kesheng": 1345, "2002": [1345, 1890], "2165": 1345, "2182": 1345, "s1064827500370883": 1345, "duerschetal2018": 1345, "jed": 1345, "duersch": 1345, "meiyu": 1345, "shao": 1345, "chao": 1345, "ming": 1345, "gu": 1345, "c655": 1345, "c676": 1345, "17m1129830": 1345, "log_": [1346, 1347, 1348, 1349, 2014, 2033], "7767": 1346, "3234": 1346, "2156": 1346, "2411": 1346, "5739": 1346, "5637": 1346, "4640": 1346, "1952": 1346, "4226": 1346, "5204": [1346, 1835], "5224": 1347, "9354": 1347, "7257": 1347, "1301": 1347, "2820": 1347, "0290": 1347, "1392": 1347, "8857": 1347, "6476": 1347, "0090": [1348, 1424, 1822, 2081], "9923": 1348, "5372": 1348, "2492": 1348, "8653": 1348, "7055": 1348, "7705": 1348, "2225": 1348, "8419": 1349, "8003": [1349, 2059], "9971": 1349, "5287": 1349, "0490": 1349, "2483": 1349, "0042": 1349, "9196": 1349, "3504": [1349, 1944], "logsumexp": [1350, 2014, 2033, 2066, 2081], "3069": 1350, "6867": 1350, "8731": 1350, "30000": 1350, "1269e": 1350, "log_2": 1351, "logaddexp": [1351, 2014, 2066], "limits_": 1352, "42296738": 1352, "04462666": 1352, "86278635": 1352, "94622083": 1352, "05277811": 1352, "39202815": 1352, "83525007": 1352, "84492621": 1352, "06084887": 1352, "06844475": 1352, "2611": [1353, 1941], "9254": 1353, "6213": [1353, 2055], "6843": 1353, "3242": 1353, "9665": 1353, "4539": 1353, "0887": [1353, 2081], "1336": 1353, "4025": 1353, "7089": [1353, 1468], "9032": 1353, "3031": 1353, "2589": 1359, "1135": 1359, "5481": [1359, 1372, 2055], "9566": 1359, "sum_j": [1360, 1516, 1560, 1562, 1690, 1906, 2081], "0593": [1360, 2055], "5696": 1360, "6859e": 1360, "compute_pivot": 1362, "transposit": [1362, 1948, 2080], "perm": 1362, "a_lu": 1362, "5558": 1362, "1684": 1362, "1551": 1362, "1940": 1362, "6189": 1362, "5497": 1362, "4526": 1362, "2526": 1362, "3285": 1362, "7988": 1362, "7175": 1362, "9701": 1362, "2634": 1362, "9255": 1362, "3459": 1362, "00000e": 1363, "8312": 1363, "unpack_data": [1364, 2014], "unpack_pivot": [1364, 2014], "l_": [1364, 1427, 1431, 1435, 1439, 1453, 1454, 1455, 1456, 1493, 1519], "u_": [1364, 1781, 1785], "3552": [1366, 1624], "3825": 1366, "8297": 1366, "3477": 1366, "2035": [1366, 1921, 1922, 1971, 1972], "2252": [1366, 2081], "5002": 1366, "6248": [1366, 1375], "1307": 1366, "0608": [1366, 1899], "1244": 1366, "0139": 1366, "6763": 1370, "7445": 1370, "2369": 1370, "argmax": [1370, 1430, 1519, 1657, 1658, 1659, 1939, 2014, 2066, 2106], "max_indic": 1370, "2360": 1370, "2942": 1370, "1222": [1370, 2055], "8475": 1370, "1949": 1370, "1127": 1370, "6702": 1370, "5717": 1370, "9207": 1370, "1297": 1370, "8768": 1370, "6172": 1370, "6060": 1370, "2432": 1370, "3288": 1372, "3367": [1372, 1920], "nanmean": [1372, 2014, 2066], "3841": 1372, "6320": 1372, "4254": 1372, "7384": 1372, "0131": 1372, "6549": [1372, 1882], "4279": 1372, "3350": 1372, "7694": 1372, "5600": [1372, 1579], "0842": 1372, "9580": 1372, "3623": 1372, "2343": [1372, 2035], "5085": 1372, "4599": 1372, "1807": 1372, "5219": 1373, "5212": 1373, "2202": 1373, "2505": 1373, "3982": 1373, "9948": 1373, "3518": 1373, "3131": 1373, "3180": [1373, 2080], "6993": 1373, "0436": 1373, "0438": 1373, "2270": 1373, "2751": 1373, "7303": 1373, "2192": 1373, "3321": 1373, "2488": 1373, "0778": 1373, "9510": 1373, "7048": 1373, "4742": [1373, 1957, 2081], "7125": [1373, 1908], "plot": [1374, 2044, 2069, 2085, 2113], "t_0": [1374, 1801], "t_": [1374, 1474, 1628, 1800, 1801, 2014, 2080, 2101], "s_0": 1374, "s_": [1374, 1428, 1429, 1471, 1577], "g_0": 1374, "g_": [1374, 1785, 1795, 1796], "g_i": 1374, "t_i": 1374, "0d": [1374, 1518], "xy": 1374, "50276": 1374, "cartesian_prod": [1374, 2014, 2066], "grid_x": 1374, "grid_i": 1374, "dstack": [1374, 2014, 2066, 2080], "matplotlib": [1374, 2085], "pyplot": 1374, "plt": 1374, "plot_surfac": 1374, "6750": 1375, "0857": [1375, 1954], "7197": [1375, 1979], "argmin": [1375, 2014, 2066, 2106], "min_indic": [1375, 2014], "1334": 1375, "2803": 1375, "4644": [1375, 1921, 1922, 1971, 1972], "2635": [1375, 2055], "3651": 1375, "0384": 1375, "0128": 1375, "7015": 1375, "1153": 1375, "9849": 1375, "1458": [1375, 2081], "5788": 1375, "deduc": [1377, 2080], "4851": 1377, "5037": 1377, "3633": 1377, "0760": 1377, "3362": [1379, 1380], "8437": [1379, 1380], "9627": [1379, 1380], "1727": [1379, 1380], "5173": [1379, 1380], "1398": [1379, 1380], "mpsalloc": [1381, 1383], "metal": [1383, 1392, 2026, 2030, 2056], "mpsgraph": 1383, "wait_until_complet": [1388, 1389], "signpost": [1388, 1389, 1390], "xcode": 1389, "recommendedmaxworkingsets": 1392, "unlimit": [1392, 2045], "1321": 1395, "4370": [1395, 2080], "1289": 1395, "0527": 1395, "3077": [1395, 1917], "0881": 1395, "1259": 1395, "0284": 1395, "2015": [1411, 2040, 2055], "6087": 1411, "1494": 1411, "5491": 1411, "260": 1411, "8663": 1411, "3137": 1411, "0700": 1411, "8378": 1411, "5146": 1411, "5244": 1411, "5767": 1411, "1363": 1411, "5877": 1411, "5083": 1411, "1614": 1411, "1645": 1411, "7021": 1411, "0085": 1411, "0367": 1411, "1567": 1411, "4312": 1411, "1019": 1411, "4394": 1411, "8753": 1411, "_sampl": 1412, "n_sampl": 1412, "prob_dist": 1412, "0404": 1414, "6361": 1414, "multigammaln": [1415, 2081], "4028e": 1416, "38": [1416, 2100], "1400e": 1416, "isnan": [1417, 2014, 2066, 2080, 2106], "midpoint": [1419, 1827], "weakli": [1421, 1422, 2081], "to_spars": [1422, 1904, 2014, 2059, 2066, 2080], "2262": [1424, 1822], "0682": [1424, 1822], "2866": [1424, 1822], "3940": [1424, 1822], "5x7": [1428, 1432], "7x7": [1428, 1432], "10x7": [1428, 1432], "cube": [1429, 1433, 1833], "d_": [1429, 1433, 1437, 1449, 1452, 1455, 1458, 1495, 1521, 1524, 1550, 1553, 1579, 1584, 1632, 1952, 1953, 1954, 1955], "5x7x9": [1429, 1433], "7x7x7": [1429, 1433], "7x9x8": [1429, 1433], "n_class": 1430, "cutoff": [1430, 2040], "div_valu": 1430, "head_bia": 1430, "edouard": 1430, "grave": [1430, 1445], "armand": 1430, "joulin": 1430, "moustapha": 1430, "cissu00e9": 1430, "grangier": 1430, "hervu00e9": 1430, "ju00e9g": 1430, "imbalanc": 1430, "zipf": 1430, "law": 1430, "102": [1430, 2105], "1001": 1430, "1002": 1430, "_class": 1430, "maxunpool1d": [1431, 1519, 1660, 1964], "maxunpool2d": [1432, 1473, 1520, 1661, 1964], "maxunpool3d": [1433, 1474, 1521, 1662, 1964], "selu": [1434, 1470, 1625, 2014, 2040, 2065, 2066], "n_i": [1435, 1436, 1437, 1453, 1454, 1455, 1519, 1520, 1521, 1575, 1576], "c_j": [1435, 1436, 1437, 1519, 1520, 1521], "size_averag": [1438, 1439, 1459, 1461, 1485, 1491, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1604, 1605, 1613, 1615, 1640, 1644, 1645, 1656, 1664, 1665, 1666, 1667, 1668, 1676, 1688, 1689, 1700, 2014], "unreduc": [1438, 1439, 1461, 1486, 1492, 1517, 1533, 1558, 1576], "ell": [1438, 1439, 1461, 1485, 1486, 1492, 1517, 1533, 1558, 1576], "l_1": [1438, 1439, 1461, 1485, 1486, 1492, 1517, 1533, 1558, 1576], "l_n": [1438, 1439, 1461, 1485, 1486, 1492, 1517, 1533, 1558, 1576], "w_n": [1438, 1439, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890], "y_n": [1438, 1439, 1461, 1485, 1486, 1492, 1517, 1533, 1558, 1949, 2042], "lim_": [1438, 2042], "secondli": 1438, "rescal": [1438, 1439, 1461, 1530, 1531, 1533, 1560, 1562, 1604, 1605, 1615, 1668, 1736, 1765], "nbatch": [1438, 1439], "meantim": [1438, 1439, 1459, 1461, 1485, 1492, 1517, 1518, 1529, 1530, 1531, 1533, 1540, 1558, 1559, 1575, 1604, 1605, 1615, 1644, 1668, 1676, 2111], "pos_weight": [1439, 1605, 2014], "recal": [1439, 2048, 2085], "ell_c": 1439, "l_c": 1439, "p_c": 1439, "imbal": 1439, "pai": [1439, 1605, 2061, 2084], "spacial": 1439, "random_": [1439, 1461, 1533, 1605, 2014, 2033, 2089], "hat": [1440, 1441, 1442, 1488, 1489, 1490, 1566], "terminologi": [1440, 1441, 1442, 1566], "5d": [1442, 1490, 1579, 1632, 1643, 1671, 1703], "volumetr": [1442, 1566, 1579, 1632, 1643, 1703, 1704, 1705], "spatio": [1442, 1566], "in1_featur": 1443, "in2_featur": 1443, "in1": [1443, 1603], "in2": [1443, 1603], "blank": [1445, 1616, 2014, 2017], "zero_infin": [1445, 1616, 2014], "connectionist": [1445, 1616], "unseg": 1445, "longest": [1445, 1758, 1760, 1761, 2050], "input_length": [1445, 1616, 2014, 2050], "target_length": [1445, 1616, 2014], "s_n": 1445, "target_n": 1445, "unbatch": [1445, 1461, 1472, 1477, 1488, 1496, 1532, 1542, 1570, 1626], "s_min": 1445, "toronto": 1445, "edu": [1445, 1833], "icml_2006": 1445, "background": [1445, 1456, 1632, 1671, 2057, 2075], "channel_shuffl": [1446, 2014, 2066], "_left": [1447, 1448, 1449, 1450, 1451, 1452, 1548, 1549, 1550, 1551, 1552, 1553, 1582, 1583, 1584, 1671], "_right": [1447, 1448, 1449, 1450, 1451, 1452, 1548, 1549, 1550, 1551, 1552, 1553, 1582, 1583, 1584, 1671], "_top": [1448, 1449, 1451, 1452, 1549, 1550, 1552, 1553, 1583, 1584, 1671], "_bottom": [1448, 1449, 1451, 1452, 1549, 1550, 1552, 1553, 1583, 1584, 1671], "_front": [1449, 1452, 1550, 1553, 1584, 1671], "_back": [1449, 1452, 1550, 1553, 1584, 1671], "320": [1449, 1553], "480": [1449, 1553], "0491": [1450, 1582], "7152": [1450, 1582], "0749": [1450, 1582], "3287": [1450, 1582], "8966": [1450, 1582], "1466": [1450, 1582], "2771": [1450, 1582], "6616": [1450, 1582], "4523": [1450, 1582], "1255": [1450, 1582], "6372": [1450, 1582, 1927], "1182": [1450, 1582], "8652": [1450, 1582], "6585": 1451, "4320": [1451, 1937], "8701": 1451, "4649": 1451, "_j": [1453, 1454], "star": [1453, 1454, 1455, 2016], "uue0": 1453, "trou": [1453, 1454, 1455, 1456, 1457, 1458, 1472, 1578], "harder": [1453, 1454, 1455, 1456, 1457, 1458, 1472, 1520, 1521, 1578], "u00e0": [1454, 1455, 1456, 1457, 1458, 1472, 1578], "prod_": [1454, 1455, 1457, 1458, 1471, 1577, 1787], "out_j": 1455, "deconvolut": [1456, 1457, 1458, 1610, 1611, 1612], "_pad": [1456, 1457, 1458], "semi": [1459, 1485, 2012, 2040], "supervis": [1459, 1485], "vert": [1460, 1535, 1614], "_2": [1460, 1614, 1731, 1765], "ast_1": [1460, 1476], "ast_2": [1460, 1476], "ignore_index": [1461, 1533, 1615, 1668, 2014], "unbalanc": [1461, 1533], "d_1": [1461, 1533, 1615, 1668], "d_2": [1461, 1533, 1615, 1668], "d_k": [1461, 1533, 1615, 1668], "_index": [1461, 1533], "logsoftmax": [1461, 1533, 1560, 1651], "nllloss": [1461, 1560, 1668, 1690, 1964], "blend": 1461, "smooth": [1461, 1486, 1558, 1563, 1615, 1641, 1688, 1794], "w_c": 1461, "rethink": [1461, 1615], "incept": [1461, 1615], "spectral_norm": [1462, 1755], "neuron": 1463, "detector": 1463, "dropout1d": [1465, 2014], "_freez": 1468, "sparseadam": [1468, 2067], "0251": 1468, "6902": [1468, 1824], "7172": 1468, "6431": 1468, "0748": 1468, "6969": 1468, "4970": 1468, "3448": 1468, "9685": 1468, "3677": 1468, "7265": 1468, "1685": 1468, "4362": 1468, "4004": [1468, 1908], "9400": 1468, "9124": 1468, "3616": 1468, "1151": 1468, "0309": 1468, "9315": 1468, "1655": [1468, 2045], "9897": [1468, 2048], "0635": 1468, "7895": 1468, "0364": 1468, "6778": 1468, "5803": 1468, "from_pretrain": [1468, 1469, 2104], "bag": [1469, 1624], "per_sample_weight": [1469, 1624, 2014, 2106], "embedding_sum": 1469, "8861": 1469, "4350": 1469, "0523": 1469, "1306": 1469, "5798": 1469, "0044": 1469, "7082": [1469, 1624], "2145": [1469, 1624], "6251": [1469, 1624], "6500": 1469, "satur": [1470, 1625], "alphadropout": [1470, 1598], "160": [1471, 2067], "unfold": [1472, 2014, 2066, 2084], "prod_d": [1472, 1578], "neighborhood": [1472, 1578], "col2im": [1472, 2014, 2066, 2106], "fold_param": [1472, 1578], "input_on": [1472, 1578], "output_ratio": [1473, 1474, 1627, 1628, 2014], "_random_sampl": [1473, 1474, 1627, 1628, 2014], "ben": [1473, 1474, 1627, 1628], "graham": [1473, 1474, 1627, 1628], "oh": [1473, 1474, 1627, 1628], "ow": [1473, 1474, 1627, 1628], "_ratio": [1473, 1474, 1628], "_h": 1473, "13x12": [1473, 1627], "kt": [1474, 1601, 1609, 1612, 1628, 1659], "ot": [1474, 1628], "13x12x11": [1474, 1628], "044715": [1475, 1630], "pack_sequ": [1477, 1496, 1542, 1760, 1762], "bias_ih": [1478, 1497, 1542, 1544], "bias_hh": [1478, 1497, 1542, 1544], "homoscedast": [1479, 1629], "heteroscedast": [1479, 1629], "nix": 1479, "weigend": 1479, "1994": 1479, "icnn": 1479, "94": 1479, "orlando": 1479, "fl": 1479, "usa": [1479, 2085], "374138": 1479, "instancenorm": [1480, 2072], "shrinkag": [1481, 1564, 1635, 1693], "mobilenetv3": [1483, 1637], "_val": 1484, "dissimilar": 1485, "l1loss": [1486, 1558, 1645], "outlier": [1486, 1558, 2070, 2071], "huber": [1486, 1558, 1641], "smoothl1loss": [1486, 1641, 1688], "insensit": 1487, "unused_argument1": 1487, "unused_argument2": 1487, "ingredi": [1488, 1489, 1490], "styliz": [1488, 1489, 1490], "rgb": [1489, 1490, 2085], "log_target": [1491, 1644, 2014], "kl": [1491, 1644, 2012], "summaris": 1491, "loss_pointwis": 1491, "batchmean": [1491, 1644], "kl_loss": 1491, "mae": 1492, "proj_siz": [1496, 1543], "f_t": [1496, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "hf": [1496, 1497], "g_t": [1496, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "ig": [1496, 1497], "hg": [1496, 1497], "o_t": 1496, "ho": [1496, 1497], "c_t": 1496, "forget": [1496, 2015, 2016], "1402": 1496, "c_0": [1496, 1497], "c_n": 1496, "w_ii": 1496, "w_if": 1496, "w_ig": 1496, "w_io": 1496, "w_hi": 1496, "w_hf": 1496, "w_hg": 1496, "w_ho": 1496, "b_ii": 1496, "b_if": 1496, "b_ig": 1496, "b_io": 1496, "b_hi": 1496, "b_hf": 1496, "b_hg": 1496, "b_ho": 1496, "weight_hr_l": 1496, "_revers": 1496, "h_1": 1497, "c_1": 1497, "time_step": 1497, "_shape": [1498, 1541, 1715], "sentence_length": 1498, "lazymodulemixin": [1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511], "cls_to_becom": [1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1719], "convtranspose1d": [1505, 1610, 1964, 2072], "convtranspose3d": [1507, 1612, 1724, 1964, 2072], "instancenorm1d": [1508, 1642, 2072], "instancenorm2d": [1509, 1642, 2072], "instancenorm3d": [1510, 1642, 2072], "uninitializedparamet": [1511, 1706, 2012], "lrn": 1514, "signal_2d": 1514, "signal_4d": 1514, "output_2d": 1514, "output_4d": 1514, "x_j": [1516, 1560, 1562, 1690, 1906, 2052, 2081], "80827": [1522, 1523, 1524], "unpool": [1522, 1523, 1524], "maxpool3d": [1524, 1659, 1662, 1964, 2072], "unpooled_output": 1524, "t_destin": 1526, "lrelu": [1527, 2055], "hing": [1529, 1531], "sum_i": [1530, 1531, 1559], "nelement": [1530, 1559], "jointli": 1532, "multihead": [1532, 1572], "concat": [1532, 2014, 2051, 2066], "head_1": 1532, "head_h": 1532, "head_i": 1532, "qw_i": 1532, "kw_i": 1532, "vw_i": 1532, "inference_mod": [1532, 1574], "nestedtensor": [1532, 1574, 2035], "multihead_attn": 1532, "e_q": 1532, "e_k": 1532, "e_v": 1532, "_head": [1532, 1570], "merge_mask": 1532, "mask_typ": 1532, "merged_mask": 1532, "nll": 1533, "num_paramet": 1534, "nchannel": 1534, "decai": [1534, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 1803, 1805, 1806, 1809, 1810, 1812, 1883, 2067], "legitim": [1534, 1632, 2049], "vert_p": 1535, "upscale_factor": [1538, 1674, 2014], "upscal": 1538, "video": [1538, 1539, 2034, 2085, 2100], "shi": [1538, 1539], "2016": [1538, 1539, 1576], "_factor": [1538, 1539, 1579, 1580, 1581], "pixel_shuffl": [1538, 2014, 2066, 2072], "downscale_factor": [1539, 1675, 2014], "pixelshuffl": [1539, 1674, 1675, 2072], "downscal": 1539, "pixel_unshuffl": [1539, 2014, 2066, 2072], "log_input": [1540, 1676, 2014], "poisson": [1540, 1676, 1883, 2012, 2014, 2066], "stirl": [1540, 1676], "rm": [1541, 1575, 1576, 1715], "rms_norm": [1541, 1715, 2014, 2066], "hh": [1542, 1544], "h_t_minus_1": 1542, "flatten_paramet": 1543, "3333333333333333": [1545, 1799, 1805, 2014], "leaki": [1545, 1682, 2040], "rectifi": [1545, 1546, 1678, 2040], "empir": 1545, "crelu": 1546, "1603": 1546, "05201": 1546, "6732632423543772848170429916717": [1554, 1685], "0507009873554804934193349852946": [1554, 1685], "kaiming_norm": 1554, "kaiming_normal_": [1554, 2012, 2018, 2040], "initialis": 1554, "calculate_gain": [1554, 2012, 2018, 2040], "cascad": 1555, "relu2": [1555, 1706], "swish": [1556, 1687], "coin": [1556, 1687], "cnn": [1558, 2070], "ross": 1558, "girshick": 1558, "quadrat": [1558, 2050], "huberloss": [1558, 1641], "w_j": 1561, "soft": [1564, 1634, 1693], "softshrinkag": 1564, "convert_sync_batchnorm": 1566, "r1": 1566, "sync_bn_network": 1566, "ddp_sync_bn_network": 1566, "sync_bn_modul": 1566, "d_model": [1570, 1571, 1572, 1573, 1574], "nhead": [1570, 1571, 1572, 1573, 1574], "num_encoder_lay": 1570, "num_decoder_lay": 1570, "dim_feedforward": [1570, 1572, 1574], "custom_encod": 1570, "custom_decod": 1570, "layer_norm_ep": [1570, 1572, 1574], "norm_first": [1570, 1572, 1574], "ashish": [1570, 1572, 1574], "vaswani": [1570, 1572, 1574], "noam": [1570, 1572, 1574], "shazeer": [1570, 1572, 1574], "niki": [1570, 1572, 1574], "parmar": [1570, 1572, 1574], "jakob": [1570, 1572, 1574], "uszkoreit": [1570, 1572, 1574], "llion": [1570, 1572, 1574], "jone": [1570, 1572, 1574], "aidan": [1570, 1572, 1574], "gomez": [1570, 1572, 1574], "lukasz": [1570, 1572, 1574], "illia": [1570, 1572, 1574], "polosukhin": [1570, 1572, 1574], "6000": [1570, 1572, 1574, 1579, 1830, 1877, 1880], "6010": [1570, 1572, 1574], "feedforward": [1570, 1572, 1574, 2040], "transformer_model": 1570, "word_language_model": 1570, "src_mask": [1570, 1574], "tgt_mask": [1570, 1571, 1572], "memory_mask": [1570, 1571, 1572], "src_key_padding_mask": [1570, 1573, 1574], "tgt_key_padding_mask": [1570, 1571, 1572], "memory_key_padding_mask": [1570, 1571, 1572], "src_is_caus": 1570, "tgt_is_caus": [1570, 1571, 1572], "memory_is_caus": [1570, 1571, 1572], "_mask": [1570, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1754], "_key_padding_mask": 1570, "generate_square_subsequent_mask": 1570, "sz": 1570, "decoder_lay": [1571, 1572], "transformerdecoderlay": 1571, "transformer_decod": 1571, "encoder_lay": [1573, 1574], "enable_nested_tensor": 1573, "mask_check": 1573, "bert": [1573, 2104], "1810": 1573, "04805": 1573, "transformerencoderlay": 1573, "transformer_encod": 1573, "triplet": [1575, 1576, 1700, 1701], "x3": 1575, "balnta": [1575, 1576], "riba": [1575, 1576], "a_i": [1575, 1576, 1885], "p_i": [1575, 1576], "rvert_p": [1575, 1669], "tripletmarginwithdistanceloss": [1575, 1701], "triplet_loss": [1575, 1576], "distance_funct": [1576, 1701], "l_i": 1576, "tripletmarginloss": [1576, 1700], "l_p": [1576, 1669], "pairwisedist": [1576, 1672], "penal": [1576, 2055, 2067], "distant": 1576, "anchor_id": 1576, "positive_id": 1576, "negative_id": 1576, "l_infin": 1576, "bmva": 1576, "bmvc": 1576, "paper119": 1576, "unflattened_s": 1577, "namedtensor": 1577, "namedshap": 1577, "u_1": 1577, "u_n": 1577, "u_i": 1577, "im2col": [1578, 2014, 2066], "2x3": 1578, "3x4": 1578, "inp_unf": 1578, "out_unf": 1578, "recompute_scale_factor": [1579, 1643], "bicub": [1579, 1632, 1643, 1703, 1964], "trilinear": [1579, 1632, 1643, 1703, 1964], "input_3x3": 1579, "4375": 1579, "8125": 1579, "9375": 1579, "2400": [1579, 1944], "1200": [1579, 1855, 2045], "8800": 1579, "4400": [1579, 1944], "7200": 1579, "0400": 1579, "2800": [1579, 1877], "3600": 1579, "5200": 1579, "6400": 1579, "1678": 1583, "4418": 1583, "9466": [1583, 2081], "9604": 1583, "4219": 1583, "5241": 1583, "9162": 1583, "5436": [1583, 1944], "6446": 1583, "sdpa_kernel": [1585, 1684], "flash_attent": [1585, 1590], "seq_len_q": 1586, "seq_len_kv": 1586, "causalvari": 1586, "constru": 1586, "causal_upper_left": 1586, "causal_lower_right": 1586, "bsz": 1586, "seqlen_q": 1586, "seqlen_kv": 1586, "head_dim": 1586, "attn_bia": [1586, 1684], "upper_left": [1587, 1589], "lower_right": [1587, 1588], "diagonal_offset": [1587, 1588], "causalbia": [1588, 1589, 1684], "sdpbackend": 1590, "adaptiveavgpool1d": [1591, 2072], "tripl": [1593, 1596], "adaptivemaxpool1d": 1594, "adaptivemaxpool2d": [1595, 1964], "adaptivemaxpool3d": 1596, "avgpool1d": [1599, 2072], "st": [1601, 1609, 1612, 1659, 2111], "avgpool3d": [1601, 1964, 2072], "iT": [1601, 1609, 1612], "padt": [1601, 1609, 1612], "score": [1605, 1684, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751, 2012], "dt": [1609, 1612, 2081, 2111], "out_padw": [1610, 1611, 1612], "out_padh": [1611, 1612], "out_padt": 1612, "cosineembeddingloss": 1613, "ctcloss": [1616, 1964], "charact": [1616, 1874, 2013, 2016, 2034, 2075], "elu": [1622, 2014, 2065, 2066, 2072], "embedding_matrix": [1623, 1624], "8490": 1623, "9625": 1623, "6753": 1623, "7761": 1623, "6108": 1623, "6246": 1623, "9751": 1623, "3618": 1623, "4161": [1623, 2080], "2419": 1623, "7383": 1623, "0237": 1623, "7794": 1623, "0528": 1623, "3385": 1623, "8612": 1623, "1867": 1623, "5384": 1623, "8720": 1623, "6262": 1623, "7471": 1623, "embeddingbag": [1624, 1964, 2070, 2072, 2086], "3397": 1624, "5545": 1624, "5893": 1624, "4386": 1624, "5882": 1624, "featurealphadropout": 1625, "gaussiannllloss": 1629, "border": 1632, "affine_grid": [1632, 2014], "extrema": 1632, "pil": [1632, 1643], "overshoot": [1632, 1643, 1703], "gumbel": [1634, 2012], "y_hard": 1634, "y_soft": 1634, "hardtanh": [1639, 2014, 2026, 2066, 2072, 2106], "hingeembeddingloss": 1640, "use_input_stat": [1642, 2014], "antialia": 1643, "anti": 1643, "pillow": [1643, 2085], "buggi": 1643, "inter_nearest": 1643, "104157": 1643, "kldivloss": 1644, "batchsiz": [1644, 1909, 1910, 1911, 1913, 1914, 2080], "leaky_relu": [1648, 2014, 2040, 2066, 2072, 2106], "localresponsenorm": 1650, "_stacklevel": [1651, 1690, 1691, 2014, 2020], "lppool1d": 1653, "lppool2d": 1654, "lppool3d": 1655, "marginrankingloss": 1656, "max_unpool1d": [1657, 2014], "multimarginloss": 1665, "multilabelmarginloss": 1666, "multilabelsoftmarginloss": 1667, "n_0": 1669, "n_": 1669, "n_k": 1669, "everywher": [1670, 1923, 2067], "circularpad2d": 1671, "constantpad2d": 1671, "reflectionpad2d": [1671, 1964], "replicationpad2d": [1671, 1964], "t4d": 1671, "p1d": 1671, "p2d": 1671, "p3d": 1671, "pixelunshuffl": [1675, 2072], "poissonnllloss": 1676, "rrelu": [1683, 2014, 2066], "dropout_p": [1684, 2014], "temp_mask": 1684, "mymodel": [1684, 2016, 2041, 2057, 2063], "ev": 1684, "legend": 1684, "softmarginloss": 1689, "module_kwarg": 1699, "upsample_trilinear": 1704, "fo": 1704, "spatia": 1705, "mixin": [1706, 2012], "dry": 1706, "lazymlp": 1706, "lazylinear": 1706, "lazy_mlp": 1706, "8832e": 1706, "5636e": 1706, "1598e": 1706, "5637e": 1706, "8788e": 1706, "0042e": 1706, "0019": 1706, "lazymodul": 1706, "full_mlp": 1706, "3837": [1706, 1824], "0907": 1706, "6708": 1706, "5223": 1706, "9028": 1706, "2851": 1706, "6813": 1706, "5766": 1706, "8678": 1706, "1320": 1706, "2938": 1706, "0679": [1706, 1954], "2793": [1706, 1772], "1088": 1706, "1795": 1706, "2301": 1706, "2807": 1706, "2479": 1706, "1091": 1706, "has_uninitialized_param": 1706, "initialize_paramet": 1706, "check_reduct": 1716, "delay_all_reduce_named_param": 1716, "param_to_hook_all_reduc": 1716, "optimizer_param": 1716, "loss_func": [1716, 2075], "consume_prefix_in_state_dict_if_pres": 1716, "nccl2": 1716, "dictat": [1716, 2016], "mebibyt": 1716, "mib": 1716, "detach_": [1716, 2014, 2033, 2072, 2080], "ddp_logging_data": 1716, "can_set_static_graph": 1716, "model_ddp": 1716, "_get_ddp_logging_data": 1716, "divide_by_initial_world_s": 1716, "caught": [1716, 2032], "syncbatchnorm": 1716, "deplet": 1716, "pariti": 1716, "another_input": 1716, "predivid": 1716, "noop": 1716, "encode_and_decod": 1716, "encoded_tensor": 1716, "decoded_tensor": 1716, "error_if_nonfinit": [1720, 1721], "clip_valu": 1722, "nhwc": [1723, 1724, 2083, 2085], "outweigh": [1723, 1724, 1797, 2111], "_convnd": 1725, "conv_w": 1726, "conv_b": 1726, "bn_rm": [1726, 1728], "bn_rv": [1726, 1728], "bn_ep": [1726, 1728], "bn_w": [1726, 1728], "bn_b": [1726, 1728], "linear_w": 1728, "linear_b": 1728, "orthogonal_map": 1730, "use_trivi": 1730, "qq": 1730, "matrix_exp": [1730, 2014, 2066], "caylei": 1730, "thin": [1730, 1826], "manifold": 1730, "register_parametr": [1730, 1731, 1733, 1734, 1765, 2036], "orth_linear": 1730, "parametrizedlinear": [1730, 1731, 1732], "parametrizationlist": [1730, 1731, 1732, 1736], "_orthogon": 1730, "9332e": 1730, "n_power_iter": [1731, 1765], "sn": [1731, 1765], "discrimin": [1731, 1765], "adversari": [1731, 1765], "lipschitz": 1731, "reimplement": [1731, 1765, 2099], "_spectralnorm": 1731, "convtranspos": [1731, 1765], "snm": 1731, "0081": 1731, "amaxbackward0": 1731, "decoupl": [1732, 1768, 1784, 1787, 1793], "1602": [1732, 1768], "07868": [1732, 1768], "_weightnorm": 1732, "original0": [1732, 1733, 1736, 1768], "original1": [1732, 1733, 1736, 1768], "tensor_nam": [1733, 1735, 1736, 1737], "right_invers": [1733, 1736], "out_rnn": 1734, "rnn_cell": 1734, "inbuilt": 1736, "unparametr": 1736, "rankon": 1736, "surject": 1736, "s0_sqrt": 1736, "linear_rank_on": 1736, "matrix_rank": 1736, "leave_parametr": 1737, "unparametris": 1737, "skeleton": 1738, "compute_mask": [1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745], "importance_scor": [1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1747, 1750, 1751], "apply_mask": [1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745], "pruned_tensor": [1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745], "default_mask": [1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745], "_orig": [1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1754], "undon": [1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1754], "unprun": [1741, 1742, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753], "basepruningmethod": [1743, 1749], "add_pruning_method": 1743, "pruning_typ": [1743, 1747], "unstructur": [1743, 1747], "ravel": [1743, 2014, 2066], "nonmask": 1743, "bias_mask": [1746, 1748], "pruning_method": 1747, "parameters_to_prun": 1747, "l1unstructur": 1747, "parameters_to_vector": 1747, "forward_pre_hook": [1749, 2055], "random_unstructur": [1749, 1754], "odict_kei": 1750, "weight_orig": 1750, "weight_mask": [1750, 1753], "columns_prun": 1752, "t_modul": [1755, 1756, 1765, 1768], "weight_norm": 1756, "sorted_indic": [1757, 1759, 1760, 1762], "unsorted_indic": [1757, 1759, 1760, 1762], "abc": [1757, 2016], "axbc": 1757, "conform": [1757, 2055], "is_cuda": [1757, 2033, 2066, 2082], "enforce_sort": [1758, 1759, 1760], "unsort": [1758, 1759, 1862], "shortest": 1758, "uncondition": [1758, 2011, 2048, 2112], "pad_sequ": [1759, 1763, 2014, 2066], "padding_valu": [1760, 1761, 2014], "total_length": [1760, 2050], "seq_unpack": 1760, "lens_unpack": 1760, "packed_sequ": 1762, "unpacked_sequ": 1762, "padded_sequ": 1763, "unpad": 1763, "unstack": 1763, "as_tensor": [1763, 1942, 2014, 2035, 2048, 2066, 2085, 2086, 2087], "unpadded_sequ": 1763, "module_cl": [1764, 2075], "5846e": 1764, "29": [1764, 1890, 2065], "8307e": 1764, "5250e": 1764, "1210e": 1764, "4677e": 1764, "5915e": 1764, "4013e": 1764, "weight_u": 1765, "parameters_and_buff": 1766, "reparamater": 1766, "weight_g": [1768, 2014], "weight_v": 1768, "modern": [1768, 2058, 2095, 2102], "bother": 1768, "102999": 1768, "remove_parametr": 1768, "as_tupl": [1770, 1979], "complexfloat": [1771, 1777], "0425": 1772, "7969": 1772, "2925": 1772, "7229": 1772, "2134": 1772, "0505": 1772, "1408": 1772, "0563": 1772, "0566": 1772, "0732": [1772, 1944], "0687": 1772, "1177": 1772, "2303": [1772, 1920], "1552": 1772, "6148": 1772, "6535": 1772, "8318": 1772, "3987": 1772, "9544": [1772, 1834], "6048": 1772, "7909": 1772, "120": [1774, 2085], "from_valu": 1777, "onnx_typ": 1777, "tensorprotodatatyp": 1777, "torch_c_value_with_type_float": 1777, "from_dtyp": 1777, "jit_type_bas": 1777, "safer": [1777, 2023], "onnxexportererror": [1777, 2063, 2065], "from_onnx_typ": 1777, "_onnx": 1777, "symbolicvalueerror": 1777, "onnx_compat": 1777, "scalar_nam": 1777, "complexhalf": 1777, "complexdoubl": 1777, "float8e5m2": 1777, "float8e4m3fn": 1777, "float8e5m2fnuz": 1777, "float8e4m3fnuz": 1777, "torch_nam": 1777, "int8_t": 1777, "int16_t": 1777, "float8_e5m2": [1777, 2082, 2086], "float8_e4m3fn": [1777, 2082, 2086], "float8_e5m2fnuz": [1777, 2082], "float8_e4m3fnuz": [1777, 2082], "verif": [1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 2065], "params_dict": 1778, "export_opt": [1778, 2063], "_excluded_node_kind": 1778, "frozenset": [1778, 2016], "scalarimplicit": [1778, 2066], "prim": [1778, 2013, 2065, 2066], "listconstruct": [1778, 2013], "all_mismatch_leaf_graph_info": 1778, "essential_node_count": 1778, "subgraph": [1778, 2042, 2047, 2065, 2068, 2091, 2100, 2101, 2111], "essential_node_kind": 1778, "export_repro": 1778, "repro_dir": 1778, "repro": [1778, 2102, 2111], "test_": 1778, "test_data_set_0": 1778, "input_0": [1778, 2065], "pb": [1778, 2065], "input_1": [1778, 2065], "output_0": 1778, "output_1": 1778, "find_mismatch": [1778, 2065], "exhibit": [1778, 2065, 2098, 2111], "verificationopt": [1778, 2065], "find_partit": 1778, "has_mismatch": 1778, "pretty_print_mismatch": 1778, "pretty_print_tre": 1778, "graph_info": [1778, 2065], "__2": [1778, 2065], "__1": [1778, 2065], "__3": [1778, 2065], "110": [1778, 2065], "verify_export": 1778, "onnx_graph": 1778, "onnx_out": 1778, "pt_out": 1778, "ignore_non": 1779, "check_shap": 1779, "check_dtyp": [1779, 2087], "onnxbackend": 1779, "onnx_runtime_cpu": 1779, "remained_onnx_input_idx": 1779, "acceptable_error_percentag": 1779, "percentag": [1779, 1808, 1883, 2070], "weight_decai": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797, 2055, 2067], "1e6": 1780, "tensorlist": [1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "prohibit": [1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796, 2045, 2060], "impair": [1780, 1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "ungraph": [1780, 1781, 1783, 1784, 1785, 1787, 1793, 1794, 1795], "removeablehandl": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "register_load_state_dict_pre_hook": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "register_state_dict_post_hook": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "register_step_post_hook": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "register_step_pre_hook": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "new_arg": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "new_kwarg": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1793, 1794, 1795, 1796, 1797], "momentum_buff": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1790, 1793, 1794, 1795, 1796, 1797], "reevalu": [1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1791, 1793, 1794, 1795, 1796, 1797, 2067], "rho": 1781, "110mm": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "4pt": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "textbf": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "theta_0": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "v_0": [1781, 1783, 1784, 1787, 1793, 1794], "leftarrow": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "u_0": [1781, 1785], "hspace": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "5mm": [1781, 1782, 1783, 1784, 1785, 1787, 1794, 1795, 1796], "nabla_": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "theta_": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "10mm": [1781, 1782, 1783, 1784, 1785, 1787, 1794, 1795, 1796], "v_t": [1781, 1783, 1784, 1787, 1793, 1794], "v_": [1781, 1783, 1784, 1787, 1793, 1794, 1796], "2_t": [1781, 1782, 1783, 1784, 1787, 1793, 1794], "21mm": 1781, "u_t": [1781, 1785], "theta_t": [1781, 1782, 1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "oscil": 1781, "lr_decai": 1782, "initial_accumulator_valu": 1782, "12mm": [1782, 1793], "_sum_0": 1782, "tild": [1782, 1794], "_sum_t": 1782, "_sum_": 1782, "subgradi": 1782, "999": [1783, 1784, 1785, 1787, 1793, 1797, 2060, 2067], "amsgrad": [1783, 1784], "beta_1": [1783, 1784, 1785, 1787, 1793], "beta_2": [1783, 1784, 1785, 1787, 1793], "13mm": [1783, 1784, 1785, 1787, 1793, 1794, 1795, 1796], "textit": [1783, 1784, 1787, 1793, 1796], "m_0": [1783, 1784, 1785, 1787, 1793], "widehat": [1783, 1784, 1787, 1793], "m_t": [1783, 1784, 1785, 1787, 1793], "m_": [1783, 1784, 1785, 1787, 1793, 2080], "002": [1785, 1787], "t_1": 1785, "2e": [1785, 1787], "max_it": 1786, "max_ev": 1786, "tolerance_grad": 1786, "tolerance_chang": 1786, "history_s": 1786, "line_search_fn": 1786, "bfg": 1786, "minfunc": 1786, "intens": [1786, 2058], "param_byt": 1786, "strong_wolf": 1786, "momentum_decai": 1787, "004": 1787, "decoupled_weight_decai": [1787, 1793], "gamma_t": 1787, "psi": [1787, 2081], "_decai": [1787, 1793], "15mm": [1787, 1795, 1796], "mu_t": 1787, "96": 1787, "mu_": 1787, "11mm": 1787, "incorpor": [1787, 1882, 2070], "nesterov": [1787, 1796], "4e": 1787, "nadamw": 1787, "weightdecai": 1793, "18mm": 1793, "rho_": 1793, "6mm": 1793, "rho_t": 1793, "t_2": 1793, "l_t": 1793, "radamw": 1793, "_0": [1794, 2049, 2068, 2113], "av": 1794, "8mm": 1794, "3mm": 1794, "lectur": 1794, "hinton": 1794, "step_siz": [1795, 1812], "resili": [1795, 2058], "eta_": [1795, 1800, 1801], "etaplu": 1795, "etaminu": 1795, "gamma_": [1795, 2081], "0_": 1795, "eta_0": 1795, "i_": [1795, 1944], "eta_t": [1795, 1800, 1801], "dampen": 1796, "sutskev": 1796, "veloc": 1796, "conflat": 1797, "is_spars": [1797, 2033, 2066, 2080, 2082], "maskedtensor": [1797, 2080], "rig": 1797, "insist": 1797, "lr_schedul": [1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 2067], "chainabl": [1798, 1808], "081": 1798, "729": [1798, 1811], "6561": [1798, 1974], "59049": 1798, "scheduler1": [1798, 1811, 2067], "constantlr": [1798, 1811], "total_it": [1798, 1799, 1805, 1809, 1811], "scheduler2": [1798, 1811, 2067], "exponentiallr": [1798, 1811, 2067], "get_last_lr": [1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812], "print_lr": [1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812], "is_verbos": [1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812], "__dict__": [1798, 1799, 1800, 1801, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812], "last_epoch": [1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1811, 1812], "mileston": [1799, 1805, 1806, 1811, 2067], "simultan": [1799, 1800, 1805, 1806, 1812, 2042, 2075], "025": [1799, 1805], "t_max": [1800, 2067], "eta_min": [1800, 1801], "anneal": [1800, 1801, 1808, 2067], "cur": [1800, 1801], "sgdr": [1800, 1801], "2k": 1800, "sole": 1800, "t_mult": 1801, "base_lr": [1802, 1808], "max_lr": [1802, 1808], "step_size_up": 1802, "step_size_down": 1802, "scale_fn": 1802, "scale_mod": 1802, "cycle_momentum": [1802, 1808], "base_momentum": [1802, 1808], "max_momentum": [1802, 1808], "cyclic": 1802, "forth": [1802, 1970, 2011, 2069, 2096], "amplitud": [1802, 1808], "triangular2": 1802, "exp_rang": 1802, "bckenstler": 1802, "train_batch": [1802, 1808], "get_lr": 1802, "lr_lambda": [1804, 1807], "lambda1": 1804, "lambda2": 1804, "start_factor": 1805, "end_factor": 1805, "03125": 1805, "0375": 1805, "04375": 1805, "005": [1806, 1812], "lmbda": 1807, "total_step": 1808, "steps_per_epoch": 1808, "pct_start": 1808, "anneal_strategi": [1808, 2067], "div_factor": 1808, "final_div_factor": 1808, "three_phas": 1808, "1cycl": 1808, "fastai": 1808, "unpublish": 1808, "initial_lr": 1808, "min_lr": [1808, 1810], "1e4": 1808, "annihil": 1808, "00075": 1809, "00050": 1809, "00025": 1809, "patienc": 1810, "threshold_mod": 1810, "cooldown": 1810, "stagnat": 1810, "new_lr": 1810, "baselin": 1810, "intoler": 1810, "optimum": 1810, "dynamic_threshold": 1810, "val_loss": 1810, "81": 1811, "mn": 1814, "pca": [1816, 2080], "overestim": [1816, 1928], "nathan": [1816, 1928], "halko": [1816, 1928], "gunnar": [1816, 1928], "martinsson": [1816, 1928], "tropp": [1816, 1928], "probabilist": [1816, 1928], "0909": [1816, 1928], "4061": [1816, 1928], "na": [1816, 1928], "cmath": [1820, 2014], "4142j": 1820, "4331": 1823, "2475": [1823, 1926], "6834": 1823, "2791": 1823, "1875": 1823, "5561": 1823, "4670": 1823, "8020": 1824, "5428": 1824, "5854": 1824, "5261": [1824, 1927], "1857": 1824, "2498": 1824, "1646": [1824, 2045], "0705": [1824, 2048], "0629": 1824, "2962": 1824, "0821": [1824, 1883], "1831": 1824, "type1": [1825, 2014], "type2": [1825, 2014], "2117": 1827, "9765": 1827, "1707": 1827, "4884": 1827, "5661": 1827, "5795": 1827, "5280": 1827, "9206": 1827, "quantization_schem": [1828, 1829, 1830, 1831, 1832], "int_repr": [1828, 1829, 2014, 2066], "nchw": [1830, 2085], "qx": [1830, 1831, 1832], "00001": 1830, "max_pool1d": [1831, 2014, 2066, 2072], "max_pool2d": [1832, 2014, 2063, 2066, 2072], "quasirandom": 1833, "scrambl": 1833, "sobol": 1833, "quasi": 1833, "21201": 1833, "web": 1833, "unsw": 1833, "au": [1833, 2068], "fkuo": 1833, "art": 1833, "owen": 1833, "niederreit": 1833, "xing": 1833, "466": 1833, "489": 1833, "decemb": 1833, "1998": 1833, "zh": 1833, "vychisl": 1833, "phy": 1833, "784": 1833, "802": 1833, "1967": 1833, "soboleng": 1833, "draw_base2": 1833, "base2": 1833, "fast_forward": 1833, "142": 1834, "283": 1834, "570": 1834, "359": 1834, "9894": 1834, "2503": 1835, "3525": 1835, "5673": 1835, "8237": 1835, "5781": 1835, "6879": 1835, "3816": 1835, "7249": 1835, "0998": 1835, "im": [1839, 2042], "1436": 1839, "9966": 1839, "3426": 1839, "6366": 1839, "5954": 1839, "8929": 1839, "0923": 1839, "1719": 1839, "4709": 1839, "1996": 1839, "4595": 1845, "4314": 1845, "n2": 1848, "n3": 1848, "negat": [1851, 1936, 2016], "is_neg": [1851, 2014, 2066], "equidist": 1855, "inexact": 1855, "1234567": 1855, "1230": 1855, "vstack": [1856, 2014, 2066, 2080], "0370": 1857, "2970": 1857, "5420": 1857, "9105": 1857, "8351": 1857, "pickle_protocol": [1858, 2068], "default_protocol": 1858, "_use_new_zipfile_seri": [1858, 2060], "zipfil": [1858, 2068], "sorted_sequ": [1862, 2014], "sorter": [1862, 2014], "sorted_sequence_1d": 1862, "select_copi": [1864, 2014, 2066], "92701": [1866, 2083], "get_default_devic": 1866, "henry2019": 1870, "briefli": [1870, 2051], "nine": 1870, "fma": 1870, "10x": [1870, 1928], "1904": 1870, "06376": 1870, "denorm": [1871, 2058], "sse3": 1871, "aarch64": 1871, "323": 1871, "88131e": 1871, "324": 1871, "interop": 1872, "intraop": 1873, "edgeitem": 1874, "linewidth": 1874, "sci_mod": 1874, "shamelessli": 1874, "repr": [1874, 2016], "sane": 1874, "_tensor_str": 1874, "_formatt": 1874, "12345": 1874, "1235": 1874, "excess": [1876, 2102], "24j": 1877, "8000j": 1877, "9600j": 1877, "4472": [1877, 2055], "8944j": 1877, "expit": [1878, 2081], "2222": [1880, 1942], "4444": 1880, "8889": 1880, "4901e": 1881, "4000e": 1881, "0077e": 1881, "4923e": 1881, "waveform": [1882, 1883, 1884], "1564": 1882, "4540": 1882, "8910": 1882, "9877": 1882, "1423": [1882, 1905], "4154": 1882, "8413": [1882, 2081], "0302": 1883, "2231": 1883, "6065": 1883, "5400e": 1883, "3546e": 1883, "4788e": 1883, "8316e": 1883, "02": [1883, 1884, 1889], "3534e": 1883, "0065e": [1884, 1889], "1875e": [1884, 1889], "3937e": [1884, 1889], "2465e": [1884, 1889], "8250e": [1884, 1889], "9858e": [1884, 1889], "1365e": [1884, 1889], "8659e": [1884, 1889], "4658e": [1884, 1889], "3941e": [1884, 1889], "5400": 1885, "3376": 1885, "4200": 1885, "9136": 1885, "wit": [1885, 2013], "0955": [1885, 1886, 1888], "3455": [1885, 1886, 1888], "6545": [1885, 1886, 1888], "9045": [1885, 1886, 1888], "0800": [1886, 1887], "1876": [1886, 1887], "4601": [1886, 1887], "7700": [1886, 1887], "9723": [1886, 1887], "1679": 1887, "3979": 1887, "6821": 1887, "9121": 1887, "1170": 1888, "9698": 1888, "36358": 1890, "z_n": [1890, 2042], "48917": 1890, "2z_n": 1890, "13659": 1890, "3z_n": 1890, "01064": 1890, "4z_n": 1890, "u03c0": 1890, "sidelob": 1890, "transact": 1890, "acoust": 1890, "speech": 1890, "84": 1890, "91": 1890, "feb": 1890, "1981": 1890, "tassp": 1890, "1163506": 1890, "heinzel": 1890, "spectrum": [1890, 2058, 2114], "dft": 1890, "februari": 1890, "holomet": 1890, "fnal": 1890, "gov": 1890, "gh_fft": 1890, "nutal": 1890, "general_ham": 1890, "6280e": 1890, "2698e": 1890, "1052e": 1890, "9826e": 1890, "5461": [1892, 2055], "1347": 1892, "7266": 1892, "2746": 1892, "5194": 1892, "1343": 1892, "4032": 1892, "2711": 1892, "5380": 1894, "8632": 1894, "1265": 1894, "9399": 1894, "5644": 1894, "9744": 1894, "1268": 1894, "2162": 1899, "6719": 1899, "3332": 1899, "5793": [1899, 2055], "0061": 1899, "6058": 1899, "9497": 1899, "5071": 1899, "3343": 1899, "9553": 1899, "0960": 1899, "derivati": [1900, 1904], "to_sparse_coo": 1901, "sparsr": 1902, "run_my_model": 1902, "prev_checks_en": 1902, "check_invari": [1902, 1909, 1910, 1911, 1912, 1913, 1914, 2080], "sparse_csr_tensor": [1902, 2014, 2066, 2080], "z_": [1904, 2042, 2081], "bigoplus_": 1904, "kj": 1904, "bigoplu": 1904, "sparseaddmmbackward0": 1904, "y1": [1904, 2063, 2096], "sparsemmreduceimplbackward0": 1904, "y2": [1904, 2063, 2096], "sparsiti": [1905, 2012, 2023, 2040], "spy": 1905, "2847": 1905, "7805": 1905, "1900": [1905, 2059], "to_dens": [1905, 1907, 2014, 2066, 2080], "3903": 1905, "x_k": 1906, "6438": 1908, "6467": 1908, "3411": 1908, "0918": 1908, "5348": 1908, "0634": 1908, "0494": 1908, "0646": 1908, "1844": 1908, "1276": 1908, "1874": 1908, "6334": 1908, "9682": 1908, "5340": 1908, "7483": 1908, "4512": 1908, "4073": 1908, "8901": 1908, "3183": 1908, "7539": 1908, "6596": 1908, "ncolblock": [1909, 2080], "array_list": [1909, 1910, 1911, 1913, 1914], "nrow": [1909, 1910, 1911, 1913, 1914, 2080], "ncol": [1909, 1910, 1911, 1913, 1914, 2080], "denses": [1909, 1910, 1911, 1913, 1914, 2080], "check_sparse_tensor_invari": [1909, 1910, 1911, 1912, 1913, 1914, 2080], "nrowblock": [1910, 2080], "compressed_indic": [1911, 2014, 2080], "plain_indic": [1911, 2014, 2080], "compressed_dim_s": [1911, 2080], "is_coalesc": [1912, 2014, 2066, 2080], "rdinat": 1912, "prerequisit": [1912, 2047], "coalescion": 1912, "sparsetensor": 1912, "_indic": [1912, 2066, 2080, 2102], "0755": [1916, 1917], "0226": [1916, 1917], "0831": [1916, 1917], "4806": [1916, 1917], "0112": 1916, "2883": 1916, "6933": 1916, "0457": 1917, "0069": 1917, "2310": 1917, "2345": [1920, 1952], "1229": 1920, "1863": 1920, "2959": [1921, 1922, 1971, 1972], "8101": [1921, 1922, 1971, 1972], "5027": [1921, 1922, 1971, 1972], "3270": [1921, 1922, 1971, 1972], "5905": [1921, 1922, 1971, 1972], "6538": [1921, 1922, 1971, 1972, 2055], "3330": [1921, 1922, 1971, 1972], "5596": [1921, 1922, 1971, 1972], "6548": [1921, 1922, 1971, 1972], "1264": [1921, 1922, 1971, 1972], "5080": [1921, 1922, 1927, 1971, 1972, 2055], "6420": [1921, 1922, 1971, 1972], "1992": [1921, 1922, 1971, 1972], "0311": [1921, 2081], "7477": 1921, "2204": 1921, "9087": 1921, "2620": 1922, "0028": [1922, 1948], "0957": 1922, "6038": 1922, "0645": [1922, 1972], "4485": [1922, 1972], "8707": [1922, 1972], "0665": [1922, 1972], "taper": 1923, "librosa": 1923, "omega": 1923, "win": [1923, 2042], "_fft": [1923, 2014], "1133": 1926, "2958": 1926, "5475": 1926, "0569": 1926, "0737": 1926, "3429": 1926, "9138": 1926, "9337": 1926, "6864": [1926, 1952], "1132": 1926, "7892": 1926, "1003": 1926, "5688": 1926, "3637": 1926, "9906": 1926, "5197": 1926, "4598": 1926, "3708": 1926, "6217": 1926, "435": 1926, "1335": 1926, "3135": 1926, "gesdd": 1927, "conquer": 1927, "gesvdjbatch": 1927, "fortran": 1927, "2364": 1927, "7752": 1927, "7201": 1927, "7394": 1927, "0504": 1927, "3371": 1927, "5296": 1927, "3550": 1927, "5569": 1927, "2445": 1927, "1414": 1927, "4027": 1927, "0287": 1927, "5434": 1927, "1946": 1927, "8833": 1927, "3679": 1927, "4296": 1927, "2890": 1927, "6604": 1927, "2717": 1927, "2618": 1927, "4234": 1927, "2481": 1927, "4733": 1927, "3289": [1927, 2060], "0315": 1927, "7806": 1927, "0199": 1927, "8766": 1927, "4809": 1927, "4054": 1927, "7600": 1927, "8611": 1927, "2594": 1927, "4373": 1927, "6531e": 1927, "a_big": 1927, "6503e": 1927, "adequ": 1928, "choosen": 1928, "swapax": [1930, 2014, 2066, 2084], "faithfulli": [1934, 2024, 2098], "1995": 1937, "4608": 1937, "7702": 1937, "4875": 1937, "9158": 1937, "5872": 1937, "6929": 1937, "6932": 1937, "take_along_axi": [1939, 2049], "max_idx": 1939, "sorted_idx": 1939, "2027": 1940, "7687": 1940, "4412": 1940, "3856": 1940, "5930": 1940, "9859": 1940, "4722": 1940, "3366": 1940, "8986": 1941, "7279": 1941, "1745": [1941, 2048], "7156": 1941, "8257": 1941, "2553": 1941, "11111": 1942, "222222": 1942, "3333333": 1942, "1111": 1942, "array_split": 1943, "i_d": 1944, "k_": 1944, "4532": 1944, "4874": 1944, "5018": 1944, "4796": [1944, 2059], "5162": 1944, "5306": 1944, "2922": 1944, "7556": 1944, "2741": 1944, "3161": 1944, "0704": 1944, "0187": 1944, "4079": 1944, "3126": 1944, "8744": 1944, "8223": 1944, "9445": 1944, "4117": 1944, "7780": 1944, "7193": 1944, "4867": 1944, "3204": 1944, "5513": 1944, "4737": [1944, 1975], "2850": 1944, "2573": 1944, "5997": 1944, "sparsebsr": 1948, "sparsecsc": 1948, "sparsebsc": 1948, "9893": 1948, "5809": 1948, "1669": 1948, "7299": 1948, "4942": [1948, 2055], "y_0": 1949, "x_diff": 1949, "y_diff": 1949, "riemann": [1949, 2042, 2052, 2081], "badli": 1951, "cloned_coeffici": 1951, "1527": 1951, "0753": 1951, "7986": 1951, "0210": 1951, "3513": 1951, "5492": 1951, "7403": 1951, "0243": 1951, "7841": 1951, "9046": 1951, "5405": 1951, "9320": 1951, "9270": 1951, "2826": 1951, "lbrace": [1952, 1953, 1954, 1955], "rbrace": [1952, 1953, 1954, 1955], "0813": 1952, "8619": 1952, "7105": 1952, "0935": 1952, "1380": 1952, "3409": [1952, 2059], "2219": 1952, "5653": 1952, "2521": 1952, "2544": 1952, "3461": 1952, "4785": 1952, "4477": 1952, "6049": 1952, "6368": 1952, "8775": 1952, "7145": 1952, "1502": 1952, "2716": 1952, "1243": 1952, "5413": 1952, "3615": 1952, "0614": 1952, "7344": 1952, "3164": 1952, "7648": 1952, "4024": 1952, "0978": 1952, "col": [1953, 1955, 2014], "2309": 1954, "5207": 1954, "0049": 1954, "2072": 1954, "0680": 1954, "6602": 1954, "3480": 1954, "5211": 1954, "4573": 1954, "5876": 1954, "0794": [1954, 2081], "8373": 1954, "6654": 1954, "2604": 1954, "5235": 1954, "2447": 1954, "9556": 1954, "2919": 1954, "1768": 1954, "4333": 1954, "3146": [1954, 2055], "6576": 1954, "0432": 1954, "9348": [1954, 2081], "4410": 1954, "9888": 1954, "3337": 1954, "6556": 1954, "4798": 1954, "5466": 1957, "8008": 1957, "9079": 1957, "unique_consecut": [1960, 2014, 2066], "inverse_indic": [1960, 1961], "a_unique_dim0": 1960, "5678": [1962, 2075], "78": 1962, "alon": [1964, 2015, 2068], "put_": [1964, 2014], "index_add": [1964, 2014, 2066], "index_select": [1964, 2014, 2066, 2080, 2106], "fractionalmaxpool2d": 1964, "fractionalmaxpool3d": 1964, "reflectionpad1d": 1964, "reflectionpad3d": 1964, "replicationpad1d": 1964, "replicationpad3d": 1964, "histc": [1964, 2014, 2066], "bincount": [1964, 2014, 2066], "kthvalu": [1964, 2014, 2033, 2066], "avg_pool3d_backward_cuda": 1964, "for_tensor": 1965, "for_modul": 1965, "for_packed_sequ": 1965, "for_storag": 1965, "unsupported_dtyp": 1965, "privateuse1": [1965, 1967, 2069, 2074], "rename_privateuse1_backend": [1965, 2012], "is_foo": 1965, "frames_to_skip": 1966, "maximum_number_of_fram": 1966, "_register_device_modul": 1967, "backendmodul": 1967, "get_amp_supported_dtyp": 1967, "_is_in_bad_fork": 1967, "bad_fork": 1967, "get_rng_stat": [1967, 2012, 2074], "set_rng_stat": [1967, 2012, 2074], "extend_dispatch": 1967, "pytorch_open_registration_exampl": 1967, "nicer": 1968, "slot": [1969, 2095, 2112], "alexandr": 1970, "theophil": 1970, "0631": 1971, "5590": 1971, "4893": 1971, "8258": 1971, "5926": 1972, "0056": 1972, "3646": 1972, "vecdot": 1973, "mi": [1974, 1975], "6116": 1974, "5772": [1974, 2081], "4606": 1974, "9120": 1974, "0786": 1974, "7497": 1974, "6623": 1974, "5772j": 1974, "9120j": 1974, "7497j": 1974, "6623j": 1974, "3839j": 1975, "2098": 1975, "6699j": 1975, "3470": 1975, "9451j": 1975, "5174": 1975, "3136j": 1975, "6699": 1975, "9451": 1975, "3136": 1975, "vertic": [1977, 1978, 2067, 2085, 2102], "atleast_2d": [1978, 2014, 2066], "3139": 1979, "3898": 1979, "1657": 1979, "0383": 1979, "8785": 1979, "1089": 1979, "_xpudeviceproperti": 1992, "hubconf": 2011, "entrypoint_nam": 2011, "_resnet18": 2011, "smoother": [2011, 2023], "load_state_dict_from_url": [2011, 2027], "2gb": 2011, "relative_path_to_checkpoint": 2011, "pth": [2011, 2027, 2060], "dirnam": 2011, "__file__": [2011, 2061, 2068, 2104], "5c106cde": [2011, 2027], "force_reload": 2011, "skip_valid": 2011, "trust_repo": 2011, "repo_own": 2011, "repo_nam": 2011, "ref": [2011, 2080, 2098, 2111], "torchhub": 2011, "github_token": 2011, "mute": 2011, "repo_or_dir": 2011, "resnet50": [2011, 2085, 2104], "resnet50_weight": 2011, "imagenet1k_v1": 2011, "download_url_to_fil": 2011, "hash_prefix": 2011, "temporary_fil": 2011, "sha256": [2011, 2027], "s3": [2011, 2027, 2061], "amazonaw": [2011, 2027, 2061], "model_dir": [2011, 2027], "check_hash": [2011, 2027], "hub_dir": [2011, 2027], "get_dir": [2011, 2027], "ext": [2011, 2027], "eight": [2011, 2027], "hash": [2011, 2014, 2016, 2027, 2066], "succinct": 2011, "set_dir": 2011, "path_to_hub_dir": 2011, "torch_hom": 2011, "xdg_cache_hom": [2011, 2045], "reiniti": [2011, 2024], "path_importer_cach": 2011, "subpackag": [2011, 2068], "offend": [2011, 2103], "classifi": [2012, 2016, 2065, 2067, 2070, 2085], "pypi": 2012, "conda": [2012, 2061], "hip": 2012, "javadoc": 2012, "uninitializedbuff": 2012, "anomali": 2012, "can_device_access_p": 2012, "current_blas_handl": 2012, "device_of": 2012, "get_arch_list": 2012, "get_device_cap": 2012, "get_device_nam": 2012, "get_device_properti": 2012, "get_gencode_flag": 2012, "get_sync_debug_mod": 2012, "ipc_collect": 2012, "memory_usag": 2012, "set_stream": 2012, "set_sync_debug_mod": 2012, "power_draw": 2012, "clock_rat": 2012, "outofmemoryerror": 2012, "jiter": 2012, "tunableop": 2012, "sanit": 2012, "_record_memory_histori": [2012, 2113], "_snapshot": [2012, 2113], "_dump_snapshot": [2012, 2113], "empty_cach": [2012, 2045, 2053, 2113], "set_per_process_memory_fract": 2012, "current_allocated_memori": 2012, "driver_allocated_memori": 2012, "deferredmtiacallerror": 2012, "idiom": [2012, 2061], "nnpack": 2012, "openmp": [2012, 2044, 2061, 2088, 2100, 2102], "xeon": [2012, 2044, 2108], "pipelinestag": 2012, "pipelineschedul": 2012, "face": [2012, 2048, 2068, 2085, 2101], "dive": [2012, 2047, 2052, 2068, 2100, 2105], "pathwis": 2012, "exponentialfamili": 2012, "binomi": [2012, 2014, 2066], "chi2": 2012, "continuousbernoulli": 2012, "dirichlet": 2012, "fishersnedecor": 2012, "halfcauchi": 2012, "halfnorm": 2012, "inversegamma": 2012, "kumaraswami": 2012, "lkjcholeski": 2012, "laplac": 2012, "lognorm": 2012, "lowrankmultivariatenorm": 2012, "mixturesamefamili": 2012, "multinomi": [2012, 2014, 2066], "multivariatenorm": 2012, "negativebinomi": 2012, "onehotcategor": 2012, "relaxedbernoulli": 2012, "logitrelaxedbernoulli": 2012, "relaxedonehotcategor": 2012, "studentt": 2012, "transformeddistribut": 2012, "vonmis": 2012, "weibul": 2012, "wishart": 2012, "hub": [2012, 2027, 2104], "misc": 2012, "normal_": [2012, 2014, 2033, 2040, 2045, 2089], "constant_": [2012, 2040], "ones_": [2012, 2040], "zeros_": [2012, 2040], "eye_": [2012, 2018, 2040], "dirac_": [2012, 2018, 2040], "xavier_uniform_": [2012, 2040], "xavier_normal_": [2012, 2040, 2055], "kaiming_uniform_": [2012, 2040], "trunc_normal_": [2012, 2040], "orthogonal_": [2012, 2018, 2040, 2055], "sparse_": [2012, 2040], "swa": 2012, "ema": 2012, "powersgd": 2012, "remotemodul": 2012, "doublestorag": [2012, 2082], "floatstorag": [2012, 2082], "halfstorag": [2012, 2082], "longstorag": [2012, 2082], "intstorag": [2012, 2082], "shortstorag": [2012, 2082], "charstorag": [2012, 2082], "bytestorag": [2012, 2082], "boolstorag": [2012, 2082], "bfloat16storag": [2012, 2082], "complexdoublestorag": [2012, 2082], "complexfloatstorag": [2012, 2082], "quint8storag": [2012, 2082], "qint8storag": [2012, 2082], "qint32storag": [2012, 2082], "quint4x2storag": [2012, 2082], "quint2x4storag": [2012, 2082], "make_tensor": [2012, 2087], "assert_allclos": [2012, 2087], "generate_methods_for_privateuse1_backend": 2012, "get_cpp_backtrac": 2012, "set_modul": 2012, "sampler": 2012, "mobile_optim": 2012, "optimize_for_mobil": [2012, 2026], "model_zoo": 2012, "load_url": [2012, 2027], "tensorboard": [2012, 2029, 2069], "summarywrit": [2012, 2029, 2085], "module_track": 2012, "moduletrack": [2012, 2028], "iinfo": 2012, "__config__": [2012, 2044], "__future__": 2012, "set_log": [2012, 2022, 2111], "miscellan": [2012, 2114], "torchdata": 2012, "torchserv": 2012, "torchtext": 2012, "xla": [2012, 2020, 2060, 2083, 2098], "disadvantag": 2013, "gentl": 2013, "beam": 2013, "traced_bar": 2013, "myscriptmodul": 2013, "103": [2013, 2015], "939": [2013, 2015], "116": [2013, 2015], "779": [2013, 2015], "123": [2013, 2015], "my_script_modul": [2013, 2015], "ins": 2013, "pytorch_jit": 2013, "traced_fn": 2013, "disable_jit_exampl": 2013, "printer": 2013, "rv": 2013, "rv0": 2013, "rv1": 2013, "ssa": 2013, "block0": 2013, "block1": 2013, "loop_in_traced_fn": 2013, "input_tupl": 2013, "fill_row_zero": 2013, "tracerwarn": 2013, "nr": 2013, "09115803241729736": 2013, "6782537698745728": 2013, "cpu_model": 2013, "gpu_model": 2013, "sample_input_cpu": 2013, "sample_input_gpu": 2013, "traced_cpu": 2013, "traced_gpu": 2013, "use_gpu": 2013, "__constants__": [2013, 2015], "my_module_inst": 2013, "redeclar": 2013, "nn_module_inst": 2013, "my_scripted_model": 2013, "pep": [2013, 2015, 2065, 2100], "526": [2013, 2015, 2065], "script_method": 2013, "implicitly_compiled_method": 2013, "another_forward": 2013, "unused_method": 2013, "some_fn": 2013, "ever": [2013, 2082, 2113], "some_fn2": 2013, "some_fn3": 2013, "some_fn4": 2013, "my_dict": [2013, 2015], "my_int": [2013, 2015], "my_const": 2013, "make_dict": 2013, "nnc": 2013, "nvfuser": 2013, "__and__": 2014, "__iand__": 2014, "__ilshift__": 2014, "__ior__": 2014, "__irshift__": 2014, "__ixor__": 2014, "__lshift__": 2014, "__or__": 2014, "__rshift__": 2014, "__xor__": 2014, "absolute_": 2014, "acos_": [2014, 2033], "addbmm_": 2014, "addcdiv_": 2014, "addcmul_": 2014, "addmv_": [2014, 2033], "addr_": 2014, "align_a": [2014, 2033, 2034, 2066], "align_to": [2014, 2033, 2034, 2066], "ellipsis_idx": 2014, "aminmax": [2014, 2066], "arccos_": 2014, "arccosh_": 2014, "arcsin_": [2014, 2080], "arcsinh_": 2014, "arctan2_": 2014, "arctan_": 2014, "arctanh_": 2014, "argwher": [2014, 2066], "as_strided_": 2014, "as_strided_scatt": [2014, 2066, 2106], "asin_": [2014, 2033, 2080], "asinh_": [2014, 2033], "atan_": [2014, 2033], "atanh_": [2014, 2033], "baddbmm_": 2014, "bernoulli_": [2014, 2033, 2089], "bitwise_and_": 2014, "bitwise_left_shift_": 2014, "bitwise_not_": [2014, 2033], "bitwise_or_": 2014, "bitwise_right_shift_": 2014, "bitwise_xor_": 2014, "broadcast_to": [2014, 2066], "cauchy_": [2014, 2033, 2089], "ceil_": [2014, 2033], "clamp_max": [2014, 2066], "clamp_max_": 2014, "clamp_min": [2014, 2066], "clamp_min_": 2014, "clip_": 2014, "conj_physical_": 2014, "copysign_": 2014, "cos_": [2014, 2033, 2042], "cosh_": [2014, 2033], "count_nonzero": [2014, 2066], "cummax": [2014, 2066], "cummin": [2014, 2066], "cumprod_": 2014, "cumsum_": 2014, "deg2rad": [2014, 2033, 2066, 2080], "deg2rad_": [2014, 2033, 2080], "outdim": 2014, "diagonal_scatt": [2014, 2066], "digamma_": [2014, 2033], "div_": [2014, 2033, 2080], "divide_": 2014, "dsplit": [2014, 2066], "eq_": 2014, "erf_": [2014, 2033], "erfc_": [2014, 2033], "erfinv_": [2014, 2033], "exp2": [2014, 2066, 2081, 2085, 2106], "exp2_": 2014, "exp_": [2014, 2033], "expm1_": [2014, 2033], "exponential_": [2014, 2033, 2089], "fill_diagonal_": 2014, "fix_": 2014, "fliplr": [2014, 2066], "flipud": [2014, 2066], "float_power_": 2014, "floor_": [2014, 2033], "floor_divide_": [2014, 2080], "fmax": [2014, 2066, 2106], "fmin": [2014, 2066, 2106], "fmod_": 2014, "frac_": [2014, 2033], "frexp": [2014, 2066, 2106], "gcd_": 2014, "ge_": 2014, "ger": [2014, 2066], "get_devic": [2014, 2033, 2066, 2080, 2082, 2083], "greater_": 2014, "greater_equal_": 2014, "gt_": 2014, "hardshrink": [2014, 2066], "heavisid": [2014, 2066], "heaviside_": 2014, "hsplit": [2014, 2066, 2084], "hypot_": 2014, "i0_": 2014, "igamma_": 2014, "igammac_": 2014, "index_fil": [2014, 2033, 2066], "index_reduc": [2014, 2066], "is_complex": [2014, 2063, 2066, 2080, 2083], "is_contigu": [2014, 2033, 2066, 2084], "is_floating_point": [2014, 2033, 2066, 2080, 2083], "is_infer": [2014, 2066], "is_same_s": [2014, 2066, 2080], "is_set_to": [2014, 2066], "is_sign": [2014, 2033, 2066, 2080], "isclos": [2014, 2066], "isfinit": [2014, 2058, 2066, 2106], "isinf": [2014, 2066, 2080, 2106], "isneginf": [2014, 2066, 2080], "isposinf": [2014, 2066, 2080], "isreal": [2014, 2066], "istft": [2014, 2066], "kron": [2014, 2066], "lcm_": 2014, "ldexp_": 2014, "le_": 2014, "lerp_": 2014, "less_": 2014, "less_equal_": 2014, "lgamma_": 2014, "log10_": [2014, 2033], "log1p_": [2014, 2033, 2080], "log2_": [2014, 2033], "log_normal_": [2014, 2033, 2089], "logaddexp2": [2014, 2066], "logcumsumexp": [2014, 2066], "logical_and_": 2014, "logical_not_": [2014, 2033], "logical_or_": 2014, "logical_xor_": 2014, "logit_": 2014, "lt_": 2014, "masked_fil": [2014, 2033, 2035, 2066, 2080], "masked_scatt": [2014, 2066], "masked_select": [2014, 2033, 2066], "matrix_pow": [2014, 2066], "moveaxi": [2014, 2066], "msort": [2014, 2066], "multiply_": 2014, "mvlgamma_": 2014, "nan_to_num_": 2014, "nanmedian": [2014, 2033, 2066], "nansum": [2014, 2066], "narrow_copi": [2014, 2066, 2080], "ne_": 2014, "neg_": [2014, 2033, 2080], "negative_": [2014, 2080], "new_empty_strid": [2014, 2066], "new_ful": [2014, 2045, 2066], "new_on": [2014, 2066], "nextafter_": 2014, "nonzero_stat": [2014, 2066], "not_equal_": 2014, "polygamma_": 2014, "pow_": [2014, 2033], "q_per_channel_scal": [2014, 2066], "q_per_channel_zero_point": [2014, 2066], "q_scale": [2014, 2066], "q_zero_point": [2014, 2066], "rad2deg": [2014, 2033, 2066, 2080], "rad2deg_": [2014, 2033, 2080], "reciprocal_": [2014, 2033], "record_stream": [2014, 2045, 2066], "refine_nam": [2014, 2033, 2034, 2066], "relu_": [2014, 2072], "remainder_": 2014, "rename_": [2014, 2033, 2034], "renorm_": 2014, "reshape_a": [2014, 2035, 2066, 2084], "resize_a": [2014, 2066], "the_templ": 2014, "resize_as_": [2014, 2033, 2080], "resize_as_sparse_": 2014, "retains_grad": [2014, 2066], "roll": [2014, 2066], "rot90": [2014, 2066], "round_": [2014, 2033], "rsqrt_": [2014, 2033], "select_scatt": [2014, 2066, 2106], "sgn_": [2014, 2033], "sigmoid_": [2014, 2033, 2072], "sign_": [2014, 2033], "sinc_": 2014, "sinh_": [2014, 2033], "slice_invers": [2014, 2066], "slice_scatt": [2014, 2066, 2106], "smm": [2014, 2066, 2080], "sparse_resize_": 2014, "sparse_resize_and_clear_": 2014, "split_with_s": [2014, 2066, 2084, 2106], "sqrt_": [2014, 2033], "square_": 2014, "squeeze_": [2014, 2072], "sspaddmm": [2014, 2066, 2080], "sub_": [2014, 2033, 2080], "subtract_": 2014, "sum_to_s": [2014, 2066], "swapaxes_": 2014, "swapdim": [2014, 2066, 2084], "swapdims_": 2014, "take_along_dim": [2014, 2066], "tan_": [2014, 2033], "tanh_": [2014, 2033, 2072], "tensor_indices_or_sect": 2014, "to_mkldnn": [2014, 2066], "to_padded_tensor": [2014, 2035, 2066], "to_sparse_bsc": [2014, 2066], "to_sparse_bsr": [2014, 2066, 2080], "to_sparse_csc": [2014, 2066, 2080], "transpose_": [2014, 2080], "true_divide_": 2014, "trunc_": [2014, 2033], "type_a": [2014, 2033, 2066], "out0": [2014, 2060], "unsafe_chunk": [2014, 2066], "unsafe_split": [2014, 2066], "unsafe_split_with_s": [2014, 2066], "unsqueeze_": [2014, 2072], "view_a": [2014, 2066, 2084], "vsplit": [2014, 2066, 2084], "xlogy_": 2014, "adaptive_avg_pool2d": [2014, 2048, 2066, 2072], "adaptive_max_pool1d_with_indic": [2014, 2048], "adaptive_max_pool2d_with_indic": 2014, "adaptive_max_pool3d_with_indic": 2014, "alpha_dropout": [2014, 2066], "assert_int_or_pair": 2014, "arg_nam": 2014, "binary_cross_entropi": [2014, 2066], "binary_cross_entropy_with_logit": [2014, 2066], "celu": [2014, 2066], "dropout2d": 2014, "dropout3d": 2014, "feature_alpha_dropout": [2014, 2066], "fractional_max_pool2d_with_indic": 2014, "fractional_max_pool3d_with_indic": 2014, "gaussian_nll_loss": 2014, "glu": [2014, 2066], "gumbel_softmax": 2014, "hardsigmoid": [2014, 2066, 2072], "hardswish": [2014, 2066, 2072], "huber_loss": [2014, 2066], "instance_norm": [2014, 2066, 2072], "local_response_norm": 2014, "lp_pool1d": 2014, "lp_pool2d": 2014, "lp_pool3d": 2014, "max_pool1d_with_indic": [2014, 2066], "max_pool3d_with_indic": [2014, 2066, 2106], "mish": [2014, 2066], "multi_head_attention_forward": 2014, "embed_dim_to_check": 2014, "in_proj_weight": 2014, "in_proj_bia": 2014, "bias_k": 2014, "bias_v": 2014, "out_proj_weight": 2014, "out_proj_bia": 2014, "use_separate_proj_weight": 2014, "q_proj_weight": 2014, "k_proj_weight": 2014, "v_proj_weight": 2014, "static_k": 2014, "static_v": 2014, "multilabel_soft_margin_loss": 2014, "relu6": [2014, 2066, 2072], "silu": [2014, 2035, 2066], "softsign": 2014, "tanhshrink": 2014, "adaptive_avg_pool1d": [2014, 2048, 2066, 2072, 2106], "adaptive_max_pool1d": [2014, 2048, 2066], "affine_grid_gener": [2014, 2066], "alias_copi": [2014, 2066], "align_tensor": [2014, 2066], "alpha_dropout_": 2014, "as_strided_copi": [2014, 2066], "atleast_1d": [2014, 2066], "avg_pool1d": [2014, 2066, 2072, 2106], "bartlett_window": [2014, 2018, 2066], "cudnn_en": 2014, "batch_norm_backward_elemt": 2014, "invstd": 2014, "sum_di": 2014, "sum_dy_xmu": 2014, "batch_norm_backward_reduc": 2014, "input_g": 2014, "bias_g": 2014, "out3": 2014, "batch_norm_elemt": [2014, 2066], "batch_norm_gather_stat": [2014, 2066], "batch_norm_gather_stats_with_count": [2014, 2066], "batch_norm_stat": [2014, 2066], "batch_norm_update_stat": [2014, 2066], "blackman_window": [2014, 2018, 2066], "block_diag": [2014, 2066], "can_cast": [2014, 2066], "ccol_indices_copi": [2014, 2066], "celu_": 2014, "choose_qparams_optim": [2014, 2066], "n_bin": 2014, "bit_width": 2014, "col_indices_copi": [2014, 2066], "column_stack": [2014, 2066], "constant_pad_nd": [2014, 2066, 2106], "conv_tbc": [2014, 2066], "crow_indices_copi": [2014, 2066], "cudnn_affine_grid_gener": [2014, 2066], "cudnn_batch_norm": [2014, 2066], "exponential_average_factor": 2014, "cudnn_convolut": [2014, 2066], "cudnn_convolution_add_relu": [2014, 2066], "cudnn_convolution_relu": [2014, 2066], "cudnn_convolution_transpos": [2014, 2066], "cudnn_grid_sampl": [2014, 2066], "cudnn_is_accept": [2014, 2066], "cumulative_trapezoid": [2014, 2066], "detach_copi": [2014, 2066, 2110], "diagonal_copi": [2014, 2066], "dropout_": [2014, 2026], "embedding_renorm_": 2014, "physical_layout": [2014, 2106], "empty_quant": [2014, 2066], "anyenumtyp": 2014, "expand_copi": [2014, 2066], "fake_quantize_per_channel_affin": [2014, 2066], "fbgemm_linear_fp16_weight": [2014, 2066], "packed_weight": 2014, "fbgemm_linear_fp16_weight_fp32_activ": [2014, 2066], "fbgemm_linear_int8_weight": [2014, 2066], "col_offset": 2014, "weight_scal": 2014, "weight_zero_point": 2014, "fbgemm_linear_int8_weight_fp32_activ": [2014, 2066], "fbgemm_linear_quantize_weight": [2014, 2066], "fbgemm_pack_gemm_matrix_fp16": [2014, 2066], "fbgemm_pack_quantized_matrix": [2014, 2066], "feature_alpha_dropout_": 2014, "feature_dropout": [2014, 2066], "feature_dropout_": 2014, "frobenius_norm": [2014, 2066], "from_fil": [2014, 2066, 2082], "fused_moving_avg_obs_fake_qu": [2014, 2066], "observer_on": 2014, "fake_quant_on": 2014, "running_min": 2014, "running_max": 2014, "per_row_fake_qu": 2014, "symmetric_qu": 2014, "interpolation_mod": [2014, 2106], "has_bias": 2014, "gru_cel": [2014, 2066], "w_ih": 2014, "w_hh": 2014, "hamming_window": [2014, 2018, 2066], "histogramdd": [2014, 2066], "hspmm": [2014, 2066, 2080], "indices_copi": [2014, 2066], "is_autocast_cpu_en": [2014, 2066], "is_autocast_en": [2014, 2066], "is_grad_en": [2014, 2066], "is_vulkan_avail": [2014, 2066], "isin": [2014, 2066], "kaiser_window": [2014, 2066], "lstm_cell": [2014, 2066], "meshgrid": [2014, 2066], "miopen_batch_norm": [2014, 2066], "miopen_convolut": [2014, 2066], "miopen_convolution_add_relu": [2014, 2066], "miopen_convolution_relu": [2014, 2066], "miopen_convolution_transpos": [2014, 2066], "miopen_depthwise_convolut": [2014, 2066], "miopen_rnn": [2014, 2066], "weight_stride0": 2014, "dropout_st": 2014, "out4": 2014, "mkldnn_adaptive_avg_pool2d": [2014, 2066], "mkldnn_convolut": [2014, 2066], "mkldnn_linear_backward_weight": 2014, "bias_defin": 2014, "mkldnn_max_pool2d": [2014, 2066], "mkldnn_max_pool3d": [2014, 2066], "mkldnn_rnn_layer": [2014, 2066], "weight0": 2014, "weight1": 2014, "weight2": 2014, "weight3": 2014, "hx_": 2014, "cx_": 2014, "native_batch_norm": [2014, 2066], "save_mean": 2014, "save_invstd": 2014, "native_channel_shuffl": [2014, 2066], "native_dropout": [2014, 2066, 2106], "native_group_norm": [2014, 2066, 2106], "hxw": [2014, 2106], "native_layer_norm": [2014, 2066, 2106], "native_norm": [2014, 2066, 2080], "norm_except_dim": [2014, 2066], "nuclear_norm": [2014, 2066], "pairwise_dist": [2014, 2066], "permute_copi": [2014, 2066], "promote_typ": [2014, 2066, 2087], "quantize_per_channel": [2014, 2066, 2070], "quantize_per_tensor_dynam": [2014, 2066, 2070], "quantized_batch_norm": [2014, 2066], "quantized_gru_cel": [2014, 2066], "packed_ih": 2014, "packed_hh": 2014, "col_offsets_ih": 2014, "col_offsets_hh": 2014, "scale_ih": 2014, "scale_hh": 2014, "zero_point_ih": 2014, "zero_point_hh": 2014, "quantized_lstm_cel": [2014, 2066], "quantized_max_pool1d": [2014, 2066], "quantized_max_pool2d": [2014, 2066], "quantized_max_pool3d": [2014, 2066], "quantized_rnn_relu_cel": [2014, 2066], "quantized_rnn_tanh_cel": [2014, 2066], "rand_lik": [2014, 2018, 2045, 2066, 2089, 2096], "randint_lik": [2014, 2018, 2066, 2089], "randn_lik": [2014, 2018, 2035, 2066, 2089], "randperm": [2014, 2018, 2066, 2089, 2106], "result_typ": [2014, 2066], "scalar1": 2014, "scalar2": 2014, "rnn_relu": [2014, 2066], "rnn_relu_cel": [2014, 2066], "rnn_tanh": [2014, 2066], "rnn_tanh_cel": [2014, 2066], "row_indices_copi": [2014, 2066], "row_stack": [2014, 2066], "rrelu_": 2014, "rsub": [2014, 2066], "scalar_tensor": [2014, 2066, 2106], "searchsort": [2014, 2066], "segment_reduc": [2014, 2066], "selu_": 2014, "slice_copi": [2014, 2066], "sparse_bsc_tensor": [2014, 2066, 2080], "sparse_bsr_tensor": [2014, 2066, 2080], "sparse_compressed_tensor": [2014, 2066, 2080], "sparse_csc_tensor": [2014, 2066, 2080], "split_copi": [2014, 2066], "split_with_sizes_copi": [2014, 2066], "squeeze_copi": [2014, 2066], "std_mean": [2014, 2033, 2066], "t_copi": [2014, 2066], "threshold_": 2014, "transpose_copi": [2014, 2066], "trapz": [2014, 2066], "tril_indic": [2014, 2018, 2066], "triu_indic": [2014, 2018, 2066], "unbind_copi": [2014, 2066], "unfold_copi": [2014, 2066], "unsqueeze_copi": [2014, 2066], "values_copi": [2014, 2066], "vander": [2014, 2018, 2066], "var_mean": [2014, 2033, 2066], "view_as_complex_copi": [2014, 2066], "view_as_real_copi": [2014, 2066], "_nn": 2014, "adaptive_max_pool2d": [2014, 2066], "avg_pool2d": [2014, 2066, 2072, 2106], "conv_depthwise3d": [2014, 2066], "cross_entropy_loss": [2014, 2066], "input_scal": 2014, "elu_": 2014, "flatten_dense_tensor": [2014, 2066], "random_sampl": 2014, "gelu_": 2014, "hardsigmoid_": [2014, 2072], "hardswish_": 2014, "hardtanh_": [2014, 2072], "leaky_relu_": 2014, "log_sigmoid": [2014, 2066], "mish_": 2014, "mkldnn_linear": [2014, 2066], "mkldnn_reorder_conv2d_weight": [2014, 2066], "mkldnn_reorder_conv3d_weight": [2014, 2066], "nll_loss2d": [2014, 2066], "nll_loss_nd": [2014, 2066], "reflection_pad3d": [2014, 2066, 2106], "relu6_": 2014, "rrelu_with_nois": [2014, 2066], "rrelu_with_noise_": 2014, "silu_": 2014, "slow_conv3d": [2014, 2066], "slow_conv_dilated2d": [2014, 2066], "slow_conv_dilated3d": [2014, 2066], "slow_conv_transpose2d": [2014, 2066], "slow_conv_transpose3d": [2014, 2066], "softshrink": [2014, 2066], "thnn_conv2d": [2014, 2066], "unflatten_dense_tensor": [2014, 2066], "upsample_bicubic2d": [2014, 2066], "scales_h": 2014, "scales_w": 2014, "upsample_bilinear2d": [2014, 2066, 2106], "upsample_linear1d": [2014, 2066], "upsample_nearest1d": [2014, 2066], "upsample_nearest2d": [2014, 2066, 2106], "upsample_nearest3d": [2014, 2066], "scales_d": 2014, "upsample_trilinear3d": [2014, 2066], "fft_fftfreq": [2014, 2066], "fft_fftshift": [2014, 2066], "fft_hfft2": [2014, 2066], "fft_hfftn": [2014, 2066], "fft_ifftshift": [2014, 2066], "fft_ihfft2": [2014, 2066], "fft_ihfftn": [2014, 2066], "fft_rfftfreq": [2014, 2066], "_linalg": 2014, "linalg_cross": [2014, 2066], "linalg_det": [2014, 2066], "linalg_diagon": [2014, 2066], "eigvec": 2014, "linalg_ldl_factor": [2014, 2066], "linalg_ldl_factor_ex": [2014, 2066], "linalg_ldl_solv": [2014, 2066], "linalg_lu": [2014, 2066], "linalg_lu_factor": [2014, 2066], "linalg_lu_factor_ex": [2014, 2066], "linalg_lu_solv": [2014, 2066], "linalg_matmul": [2014, 2066], "linalg_matrix_exp": [2014, 2066], "linalg_matrix_pow": [2014, 2066], "linalg_multi_dot": [2014, 2066], "linalg_norm": [2014, 2066], "linalg_pinv": [2014, 2066], "linalg_solve_ex": [2014, 2066], "linalg_solve_triangular": [2014, 2066], "linalg_vand": [2014, 2066], "linalg_vecdot": [2014, 2066], "linalg_vector_norm": [2014, 2066], "_nest": 2014, "nested_to_padded_tensor": [2014, 2066], "_spars": 2014, "sparse_sampled_addmm": [2014, 2066], "_special": 2014, "special_airy_ai": [2014, 2066], "special_bessel_j0": [2014, 2066], "special_bessel_j1": [2014, 2066], "special_bessel_y0": [2014, 2066], "special_bessel_y1": [2014, 2066], "special_chebyshev_polynomial_t": [2014, 2066], "special_chebyshev_polynomial_u": [2014, 2066], "special_chebyshev_polynomial_v": [2014, 2066], "special_chebyshev_polynomial_w": [2014, 2066], "special_digamma": [2014, 2066], "special_entr": [2014, 2066], "special_erf": [2014, 2066], "special_erfc": [2014, 2066], "special_erfcx": [2014, 2066], "special_erfinv": [2014, 2066], "special_exp2": [2014, 2066], "special_expit": [2014, 2066], "special_expm1": [2014, 2066], "special_gammainc": [2014, 2066], "special_gammaincc": [2014, 2066], "special_gammaln": [2014, 2066], "special_hermite_polynomial_h": [2014, 2066], "special_i0": [2014, 2066], "special_i1": [2014, 2066], "special_laguerre_polynomial_l": [2014, 2066], "special_legendre_polynomial_p": [2014, 2066], "special_log1p": [2014, 2066], "special_log_ndtr": [2014, 2066], "special_log_softmax": [2014, 2066], "special_logit": [2014, 2066], "special_logsumexp": [2014, 2066], "special_modified_bessel_i0": [2014, 2066], "special_modified_bessel_i1": [2014, 2066], "special_modified_bessel_k0": [2014, 2066], "special_modified_bessel_k1": [2014, 2066], "special_multigammaln": [2014, 2066], "special_ndtr": [2014, 2066], "special_ndtri": [2014, 2066], "special_polygamma": [2014, 2066], "special_psi": [2014, 2066], "special_round": [2014, 2066], "special_scaled_modified_bessel_k0": [2014, 2066], "special_scaled_modified_bessel_k1": [2014, 2066], "special_shifted_chebyshev_polynomial_t": [2014, 2066], "special_shifted_chebyshev_polynomial_u": [2014, 2066], "special_shifted_chebyshev_polynomial_v": [2014, 2066], "special_shifted_chebyshev_polynomial_w": [2014, 2066], "special_sinc": [2014, 2066], "special_softmax": [2014, 2066], "special_spherical_bessel_j0": [2014, 2066], "special_xlog1pi": [2014, 2066], "special_xlogi": [2014, 2066], "special_zeta": [2014, 2066], "tval": 2014, "is_accept": 2014, "rect": 2014, "magic": [2014, 2017, 2089, 2101], "__complex__": 2014, "__float__": 2014, "__int__": 2014, "hex": [2014, 2016, 2066], "__hex__": 2014, "oct": [2014, 2066], "__oct__": 2014, "divmod": [2014, 2016, 2066], "chr": [2014, 2016, 2066], "int_float": 2014, "float_int": 2014, "fab": [2014, 2066], "int_int": 2014, "float_float": 2014, "complex_complex": 2014, "int_complex": 2014, "complex_int": 2014, "float_complex": 2014, "complex_float": [2014, 2082], "scalar_scalar": 2014, "int_to_int": 2014, "modf": [2014, 2066], "mathremaind": [2014, 2066], "programm": [2015, 2016, 2070], "tn": 2015, "subtyp": 2015, "an_error": 2015, "noreturn": [2015, 2016], "classvar": [2015, 2016], "anystr": [2015, 2016], "nomin": 2015, "newtyp": [2015, 2016], "tup": [2015, 2016], "emptydatastructur": 2015, "my_list": 2015, "aug_add_x": 2015, "inc": [2015, 2016], "assign_x": [2015, 2016], "polymorph": 2015, "sum_pair": 2015, "red": [2015, 2016], "green": [2015, 2016, 2103], "enum_fn": [2015, 2016], "my_variable_nam": 2015, "top_level_method": 2015, "other_help": 2015, "ten": [2015, 2085], "my_submodul": 2015, "tuple_or_list": 2015, "a_tupl": 2015, "is_script": [2015, 2016, 2066], "unsupported_linear_op": 2015, "is_trac": [2015, 2016], "univers": 2015, "typing_extens": 2015, "a_dict": 2015, "some_dict": 2015, "delimit": [2016, 2017], "tstype": 2016, "tsmoduletyp": 2016, "tsalltyp": 2016, "tsmetatyp": 2016, "tsprimitivetyp": 2016, "tsstructuraltyp": 2016, "tsnominaltyp": 2016, "myclass": [2016, 2068], "printabl": [2016, 2068], "sortabl": 2016, "nevertheless": [2016, 2077], "inc_first_el": 2016, "cpufloattyp": 2016, "tstupl": 2016, "tsnamedtupl": 2016, "tslist": 2016, "tsdict": 2016, "tsoption": 2016, "tsunion": 2016, "tsfutur": 2016, "tsrref": 2016, "tsawait": 2016, "await": [2016, 2017, 2075], "keytyp": 2016, "tensortyp": [2016, 2110], "_await": 2016, "mytupl": 2016, "scripted_inc": 2016, "_annotatednamedtupl": 2016, "_namedtupleannot": 2016, "_unannotatednamedtupl": 2016, "mistak": [2016, 2050, 2101], "nameerror": 2016, "remedi": 2016, "tsbuiltinclass": 2016, "tscustomclass": 2016, "tsenum": 2016, "tstensor": 2016, "subtensor": [2016, 2048, 2112], "subwithtorchfunct": 2016, "script_g": 2016, "tsclassdef": 2016, "methoddefinit": 2016, "__torch__": [2016, 2065], "class2": 2016, "tsenumdef": 2016, "tsenumtyp": 2016, "memberidentifi": 2016, "intenum": 2016, "intflag": 2016, "basecolor": 2016, "compli": [2016, 2068], "classbodydefinit": 2016, "moduleobj": 2016, "testmodul": 2016, "dosometh": 2016, "strateg": 2016, "congruent": 2016, "python3": 2016, "unannot": 2016, "python3annot": 2016, "paramannot": 2016, "returnannot": 2016, "funcormethodbodi": 2016, "mypyannot": 2016, "localvarannot": 2016, "setval": 2016, "moduletyp": [2016, 2068], "classidentifi": 2016, "instanceattridentifi": 2016, "offset_": 2016, "tsstructualtyp": 2016, "grammar": 2016, "chapter": [2016, 2052], "floattyp": 2016, "inttyp": 2016, "stringtyp": 2016, "devicetyp": 2016, "tupletyp": 2016, "listtyp": 2016, "enclosur": 2016, "parenth_form": 2016, "list_displai": 2016, "dict_displai": 2016, "legal": 2016, "stringliter": 2016, "floatnumb": 2016, "expression_list": 2016, "list_comprehens": 2016, "comp_for": 2016, "target_list": 2016, "or_expr": 2016, "key_datum_list": 2016, "dict_comprehens": 2016, "key_datum": 2016, "ongo": [2016, 2065, 2073, 2075], "enclos": 2016, "datum": [2016, 2091], "attributeref": 2016, "slice_list": 2016, "slice_item": 2016, "proper_slic": 2016, "argument_list": 2016, "desugar": [2016, 2101], "u_expr": 2016, "tightli": [2016, 2055], "m_expr": 2016, "a_expr": 2016, "shift_expr": 2016, "and_expr": 2016, "xor_expr": 2016, "comp_oper": 2016, "__contains__": 2016, "or_test": 2016, "and_test": 2016, "not_test": 2016, "conditional_express": 2016, "starred_item": 2016, "expression_stmt": 2016, "starred_express": 2016, "assignment_express": 2016, "assignment_stmt": 2016, "augmented_assignment_stmt": 2016, "augtarget": 2016, "augop": 2016, "annotated_assignment_stmt": 2016, "raise_stmt": 2016, "assert_stmt": 2016, "return_stmt": 2016, "del_stmt": 2016, "pass_stmt": 2016, "print_stmt": 2016, "break_stmt": 2016, "continue_stmt": 2016, "if_stmt": 2016, "while_stmt": 2016, "for_stmt": 2016, "with_stmt": 2016, "with_item": 2016, "tuple_stmt": 2016, "getattr_stmt": 2016, "hasattr_stmt": 2016, "zip_stmt": 2016, "iterable1": 2016, "iterable2": 2016, "enumerate_stmt": 2016, "five": [2016, 2060], "add_stat_valu": 2016, "sugaredvalu": 2016, "__abs__": 2016, "bytearrai": 2016, "delattr": 2016, "exec": 2016, "__index__": 2016, "isint": 2016, "issubclass": [2016, 2048], "ndigit": 2016, "__import__": [2016, 2068], "notimpl": [2016, 2017, 2048, 2101], "rpc_sync": [2016, 2066, 2075, 2076, 2077], "synonym": 2016, "_fork": [2016, 2044], "_wait": [2016, 2044], "lexic": 2017, "indent": 2017, "coroutin": [2017, 2099], "__del__": [2017, 2042], "__bytes__": 2017, "__slots__": 2017, "metaclass": 2017, "mro": 2017, "__r": 2017, "__": [2017, 2110], "bytesliter": 2017, "imagnumb": 2017, "parenthes": 2017, "ifs": 2017, "customiz": [2017, 2090, 2100, 2114], "compound": 2017, "adaptivelogsoftmaxwithloss": 2018, "opcheck": 2020, "torch_librari": [2020, 2046, 2048, 2093], "test_util": 2020, "test_schema": 2020, "test_autograd_registr": 2020, "test_faketensor": 2020, "test_aot_dispatch_dynam": 2020, "opoverloadpacket": 2020, "customopdef": 2020, "custom_op": [2020, 2065, 2101], "mylib": 2020, "numpy_mul": 2020, "mutates_arg": 2020, "numpy_add": 2020, "x_np": 2020, "z_np": 2020, "numpy_sin": 2020, "register_autograd": 2020, "sample_input": [2020, 2055], "718": 2020, "my_linear": [2020, 2028], "collis": 2020, "prone": [2020, 2032, 2057, 2101], "y_np": 2020, "numpy_sin_cpu": 2020, "numpy_sin_inplac": 2020, "register_kernel": 2020, "x_cpu": [2020, 2045], "x_cuda": 2020, "backward_fn": 2020, "functionctx": 2020, "keyword_only_input": 2020, "setup_context_fn": 2020, "custom_oper": 2020, "custom_linear": 2020, "_subclass": [2020, 2024, 2063, 2098, 2101], "fake_tensor": [2020, 2063, 2098, 2101], "faketensormod": [2020, 2063, 2098, 2101], "custom_nonzero": 2020, "get_ctx": 2020, "new_dynamic_s": 2020, "tracing_mod": 2020, "impl_abstract": [2020, 2101], "qualnam": 2020, "abstractimplctx": 2020, "2020": 2020, "googl": 2020, "colab": [2020, 2068, 2101], "dispatch_kei": [2020, 2100], "keynam": 2020, "alias_analysi": 2020, "conserv": [2020, 2023, 2045, 2101], "with_keyset": 2020, "fallthrough_kernel": 2020, "fallthrough": 2020, "div_cpu": 2020, "impl_": 2020, "operator_nam": 2020, "mysin": 2020, "off_by_default": 2022, "_registr": 2022, "spammi": [2022, 2100, 2111], "compiled_autograd": 2022, "born": 2023, "citizen": 2023, "afterthought": 2023, "intuit": [2023, 2104], "alik": 2023, "grai": 2023, "systemat": 2023, "onboard": 2023, "maskedarrai": 2023, "masked_tensor": 2023, "principl": [2024, 2045, 2102], "meta_util": 2024, "undocu": 2024, "fidel": 2024, "torch_force_weights_only_load": 2025, "torch_autograd_shutdown_wait_limit": 2025, "mobil": [2026, 2070, 2102], "blocklist": [2026, 2068], "mobileoptimizertyp": 2026, "conv_bn_fus": 2026, "correspondingli": 2026, "prepack": [2026, 2066], "insert_fold_prepack_op": 2026, "arm": [2026, 2070], "remove_dropout": 2026, "hoist": 2026, "hoist_conv_packed_param": 2026, "fuse_add_relu": 2026, "vulkan": 2026, "vulkan_automatic_gpu_transf": 2026, "freeze_modul": 2026, "script_modul": 2026, "optimization_blocklist": 2026, "preserved_method": 2026, "_mobileoptimizertyp": 2026, "recursivescriptmodul": [2026, 2060], "download": [2027, 2061, 2085, 2104], "friendli": 2028, "flopcountermod": 2028, "is_bw": 2028, "infrequ": 2029, "window_s": 2029, "max_sampl": 2029, "cap": 2029, "hasn": [2029, 2068], "_monitor": 2029, "data_value_t": 2029, "eventhandlerhandl": 2029, "register_event_handl": 2029, "unregister_event_handl": 2029, "tensorboardeventhandl": 2029, "shader": [2030, 2056], "processor": [2030, 2102], "metalperformanceshad": 2030, "shared_memori": 2032, "abruptli": 2032, "get_all_sharing_strategi": 2032, "get_sharing_strategi": 2032, "set_sharing_strategi": 2032, "new_strategi": 2032, "di": [2032, 2096, 2099, 2100], "abnorm": [2032, 2057], "forev": [2032, 2047], "asap": 2032, "queue_2": 2032, "x_clone": 2032, "segfault": [2032, 2058, 2101], "shm_open": [2032, 2082], "seriou": [2032, 2045, 2101], "torch_shm_manag": 2032, "unnot": 2032, "spawncontext": 2032, "has_nam": 2033, "is_shar": [2033, 2082], "is_sparse_csr": [2033, 2066, 2082], "is_tensor": [2033, 2080], "items": [2033, 2066], "unifies_names_from_input_tensor": 2033, "nbyte": [2033, 2066, 2082], "ndimens": 2033, "register_post_accumulate_grad_hook": [2033, 2042], "position": [2033, 2034], "unnam": [2033, 2034], "misalign": 2033, "inher": 2033, "collaps": [2033, 2066, 2106], "disappear": 2033, "img": [2034, 2085], "renamed_img": 2034, "coexist": 2034, "wildcard": [2034, 2068, 2110], "somewher": [2034, 2054], "scale_channel": 2034, "more_img": 2034, "named_tensor": 2034, "named_img": 2034, "flat_img": 2034, "named_flat_img": 2034, "unflattened_named_img": 2034, "grad_loss": 2034, "8107": 2034, "6357": 2034, "0783": 2034, "rename_map": 2034, "greedili": 2034, "unment": 2034, "49152": 2034, "datastructur": 2035, "seamless": 2035, "nested_tensor": 2035, "nt": 2035, "vein": 2035, "as_nested_tensor": 2035, "irregular": 2035, "indistinguish": 2035, "2286": 2035, "4842": 2035, "7827": 2035, "6745": [2035, 2081], "0658": 2035, "1247": 2035, "4078": 2035, "8083": 2035, "2871": 2035, "5559": 2035, "9885": 2035, "4074": 2035, "4855": 2035, "0733": 2035, "8285": 2035, "6858": 2035, "7030": 2035, "3481": 2035, "0236": 2035, "jag": [2035, 2098], "fake_grad": 2035, "nt2": 2035, "6862": 2035, "1282": 2035, "1031": 2035, "0464": 2035, "3276": 2035, "9967": 2035, "0054": 2035, "8972": 2035, "9174": 2035, "4995": 2035, "8546": 2035, "7194": 2035, "2918": 2035, "1846": 2035, "8793": 2035, "5183": 2035, "6447": 2035, "8009": 2035, "8468": 2035, "9832": 2035, "5272": 2035, "pt_infer": 2035, "pt_larg": 2035, "pt_small": 2035, "bitwidth": [2036, 2055, 2070, 2080], "asymmetr": [2036, 2070, 2073], "alter": [2037, 2041, 2048, 2107], "attention_bias": 2039, "sacrific": [2040, 2045], "dirac": 2040, "xavier": 2040, "glorot": 2040, "bengio": 2040, "fan": 2040, "_in": 2040, "_out": [2040, 2052, 2106], "fan_in": 2040, "kaim": 2040, "delv": 2040, "surpass": 2040, "he": 2040, "_mode": 2040, "fan_out": 2040, "redrawn": 2040, "sax": 2040, "2013": 2040, "marten": 2040, "scaler": [2041, 2045], "clip_grad_value_": 2041, "unscale_": 2041, "optimizer2": 2041, "batch_per_it": 2041, "iters_to_accumul": 2041, "num_proc": 2041, "grad_param": 2041, "grad_norm": 2041, "scaled_grad_param": 2041, "inv_scal": 2041, "get_scal": 2041, "proce": [2041, 2045, 2075, 2111], "optimizer0": 2041, "output0": 2041, "model0": 2041, "model1": 2041, "loss0": 2041, "loss1": 2041, "hundr": [2041, 2054, 2111], "imped": 2041, "poor": [2041, 2042], "dp_model": 2041, "imported_funct": 2041, "mymm": 2041, "myfloat32func": 2041, "fwd_output": 2041, "cleaner": 2042, "mapsto": 2042, "educ": 2042, "_save": 2042, "_saved_self": 2042, "convex": 2042, "concav": 2042, "togglabl": 2042, "drawback": 2042, "0011": 2042, "creator": [2042, 2075, 2077], "hogwild": 2042, "train_fn": 2042, "graphtask": 2042, "copyslic": 2042, "mutex": 2042, "gotten": 2042, "curiou": 2042, "\u2102": 2042, "yj": 2042, "holomorph": 2042, "fulfil": [2042, 2049, 2068, 2098], "mathematician": 2042, "studi": [2042, 2069], "beauti": 2042, "somewhat": [2042, 2045, 2080], "0906": 2042, "4835": 2042, "audio": [2042, 2085, 2099], "\u211d": 2042, "_output": 2042, "vj": 2042, "handi": [2042, 2045], "selfdeletingtempfil": 2042, "tmp_dir": 2042, "uuid": 2042, "uuid4": 2042, "temp_fil": 2042, "forbidden": 2042, "savedtensor": 2042, "_raw_saved_": 2042, "_raw_saved_self": 2042, "save_on_disk_threshold": 2042, "tensor_or_sctf": 2042, "_saved_oth": 2042, "4th": 2043, "backcompat": 2043, "broadcast_warn": 2043, "userwarn": 2043, "compute_z": 2044, "w_z": 2044, "w_y": 2044, "tbb": 2044, "aten_thread": 2044, "omp": 2044, "mkl_thread": 2044, "mkldnn_cpu_runtim": 2044, "use_mkldnn": 2044, "use_tbb": 2044, "use_openmp": 2044, "ON": [2044, 2053, 2054], "set_num_interop_thread": 2044, "get_num_interop_thread": 2044, "set_num_thread": [2044, 2057], "get_num_thread": 2044, "omp_num_thread": [2044, 2088], "mkl_num_thread": [2044, 2088], "e5": 2044, "oversubscript": 2044, "memory manag": 2045, "optimize pytorch": 2045, "irrespect": 2045, "spread": 2045, "cuda2": [2045, 2053], "broadli": [2045, 2070], "set_float_32_matmul_precis": 2045, "a100": [2045, 2108, 2111], "a_ful": 2045, "10240": 2045, "b_full": 2045, "ab_ful": 2045, "7277": 2045, "ab_tf32": 2045, "016": 2045, "ga100": 2045, "1747": 2045, "relative_error": 2045, "0022": 2045, "ab_fp32": 2045, "0031": 2045, "000039": 2045, "7x": 2045, "globalcontext": 2045, "setallowtf32cubla": 2045, "setallowtf32cudnn": 2045, "bench_gemm_transform": 2045, "allow_fp16_reduc": 2045, "4048": 2045, "1634": 2045, "1639": 2045, "4056": 2045, "1670": 2045, "1661": 2045, "4080": 2045, "1664": 2045, "1658": 2045, "1651": 2045, "4104": 2045, "1677": 2045, "1674": 2045, "4128": 2045, "1796": [2045, 2055], "2519": 2045, "5096": 2045, "2144": 2045, "2149": 2045, "2766": 2045, "5120": 2045, "2142": 2045, "9728": 2045, "3875": 2045, "5779": 2045, "16384": [2045, 2104], "6182": 2045, "9656": 2045, "setallowfp16reductioncubla": 2045, "instabl": 2045, "setallowbf16reductioncubla": 2045, "invis": [2045, 2099, 2102, 2111], "start_ev": 2045, "elapsed_time_m": 2045, "pointless": 2045, "exploit": 2045, "paragraph": [2045, 2052], "initial_grad": 2045, "memory_alloc": [2045, 2053], "memory_snapshot": [2045, 2053], "memcheck": 2045, "option2": 2045, "value2": 2045, "max_split_size_mb": 2045, "mb": 2045, "borderlin": 2045, "memory_summari": 2045, "roundup_power2_divis": 2045, "1280": 2045, "1536": 2045, "1792": 2045, "256mb": 2045, "512mb": 2045, "1gb": [2045, 2109], "knob": [2045, 2098], "garbage_collection_threshold": 2045, "reclaim": [2045, 2096], "release_cached_block": 2045, "unfavor": 2045, "expandable_seg": 2045, "2mb": 2045, "sliver": 2045, "pinned_use_cuda_host_regist": 2045, "cudahostregist": 2045, "cudahostalloc": 2045, "malloc": [2045, 2113], "pinned_num_register_thread": 2045, "cuda_runtime_api": 2045, "iostream": [2045, 2093], "fpic": 2045, "my_malloc": 2045, "ssize_t": 2045, "ptr": 2045, "cout": [2045, 2093], "endl": [2045, 2054, 2093], "my_fre": 2045, "cudapluggablealloc": 2045, "new_alloc": 2045, "change_current_alloc": 2045, "_cuda_clearcublasworkspac": 2045, "lru": 2045, "geometri": 2045, "1023": 2045, "zeta": [2045, 2066, 2081, 2106], "use_pytorch_kernel_cach": 2045, "pytorch_kernel_cache_path": 2045, "store_tru": 2045, "disable_cuda": 2045, "assess": 2045, "cudagetdevicecount": 2045, "cuinit": 2045, "nvmldevicegetcount_v2": 2045, "poison": 2045, "aforement": [2045, 2057], "train_load": [2045, 2057], "x_gpu": 2045, "x_cpu_long": 2045, "y_cpu": 2045, "y_gpu": 2045, "y_cpu_long": 2045, "new_tensor": 2045, "overus": 2045, "cudagraphlaunch": 2045, "elid": 2045, "versatil": 2045, "static_input": 2045, "static_output": 2045, "realist": 2045, "sophist": [2045, 2067], "register_generator_st": 2045, "virtual": [2045, 2082], "d_in": 2045, "d_out": 2045, "640": 2045, "static_target": 2045, "static_y_pr": 2045, "static_loss": 2045, "real_input": [2045, 2101], "real_target": 2045, "refil": 2045, "rejoin": 2045, "cuda_work": 2045, "nsight": 2045, "reorgan": 2045, "graphabl": 2045, "illeg": [2045, 2098], "needlessli": [2045, 2098], "econom": 2045, "static_out_1": 2045, "g1_workload": 2045, "static_in_1": 2045, "static_out_2": 2045, "g2_workload": 2045, "static_in_2": 2045, "real_data_1": 2045, "real_data_2": 2045, "occasion": [2045, 2080, 2101], "gdoc": 2046, "toe": 2046, "grab": 2046, "29500": [2047, 2075, 2076], "grad0": 2047, "grad1": 2047, "bucket1": 2047, "bucket0": 2047, "hurt": 2047, "kick": [2047, 2075, 2076, 2108], "earliest": 2047, "unreadi": 2047, "perspect": [2047, 2051, 2058, 2076], "hpp": 2047, "processgroupgloo": 2047, "processgroupmpi": 2047, "_sync_param": 2047, "autograd_hook": 2047, "prepare_for_backward": 2047, "optimize_ddp": 2047, "linearfunct": 2048, "grad_bia": 2048, "mulconst": 2048, "mycub": [2048, 2049], "grad_dx": [2048, 2049], "my_cub": [2048, 2049], "input_featur": 2048, "output_featur": 2048, "__array_function__": [2048, 2102], "nep": [2048, 2102], "0018": 2048, "scalartensor": 2048, "handled_funct": 2048, "mandat": 2048, "update_wrapp": 2048, "ensure_tensor": 2048, "metadatatensor": 2048, "__add__": 2048, "subtensor2": 2048, "othersubtensor": 2048, "loggingtensor": 2048, "permiss": 2048, "_metadata": 2048, "ndata": 2048, "ministri": 2048, "silli": 2048, "superclass": 2048, "troublesom": 2048, "_get_overridable_funct": 2048, "get_overridable_funct": [2048, 2112], "func_dict": 2048, "nn_func": 2048, "labori": 2048, "_get_testing_overrid": 2048, "get_testing_overrid": [2048, 2112], "override_dict": 2048, "dummy_add": 2048, "get_ignored_funct": [2048, 2112], "__torch_dispatch__": [2048, 2080, 2101], "outdat": 2048, "redispatch": [2048, 2101], "dispatchkei": 2048, "zerotensor": 2048, "compositeimplicitautograd": 2048, "func_nam": [2048, 2075], "overload_nam": 2048, "exot": 2048, "zoo": 2048, "torchfunctionmod": 2048, "_python_dispatch": 2048, "torchdispatchmod": 2048, "resolve_nam": [2048, 2112], "functionlog": 2048, "dispatchlog": 2048, "7164": 2048, "9336": 2048, "4287": 2048, "7989": 2048, "2169": 2048, "7474": 2048, "5624": 2048, "5970": 2048, "4328": 2048, "9794": 2048, "3490": 2048, "8671": 2048, "8573": 2048, "4338": 2048, "4948": 2048, "1249": 2048, "3307": 2048, "2151": 2048, "6018": 2048, "9060": 2048, "2974": 2048, "7708": 2048, "6668": 2048, "0352": 2048, "7948": 2048, "6023": 2048, "4303": 2048, "2036": 2048, "6831": 2048, "8120": 2048, "5949": 2048, "5416": 2048, "3335": 2048, "5897": 2048, "custom_vjp": 2049, "custom_jvp": 2049, "to_numpi": 2049, "numpysort": 2049, "ind_inv": 2049, "_1": [2049, 2068], "numpytak": 2049, "numpy_sort": 2049, "ggx": 2049, "vmappabl": 2049, "x_bdim": 2049, "ind_bdim": 2049, "ind_inv_bdim": 2049, "expanded_x": 2049, "expanded_ind": 2049, "expanded_ind_inv": 2049, "new_dim": 2049, "logical_dim": 2049, "maybe_expand_bdim_at_front": 2049, "pseudocod": 2049, "rapidli": [2050, 2057, 2063, 2064], "fortun": [2050, 2111], "abridg": 2050, "total_loss": 2050, "extrud": 2050, "phenomenon": 2050, "plenti": [2050, 2099], "bptt": 2050, "repackag": 2050, "nm": 2050, "blow": 2050, "rememb": [2050, 2057, 2067], "elf": 2050, "grep": [2050, 2095, 2099], "run_model": 2050, "recoveri": 2050, "data_parallel": 2050, "pad_packed_sequ": 2050, "padded_input": 2050, "packed_input": 2050, "packed_output": 2050, "my_lstm": 2050, "dp_m": 2050, "padding_input": 2050, "flava": 2051, "sooner": 2051, "llm": 2051, "6b": 2051, "2b": 2051, "8gb": 2051, "1x": 2051, "24gb": 2051, "total_transformer_block_params_in_b": 2051, "dtype_byt": 2051, "num_gpu": 2051, "gb": 2051, "2x": [2051, 2085, 2105], "record_funct": [2051, 2109], "recordstream": 2051, "flat_param": 2051, "splitwithsizesbackward": 2051, "4gb": 2051, "6gb": 2051, "_another_": 2051, "_could_": 2051, "ur": 2052, "ui": [2052, 2085, 2105, 2108], "j_f": 2052, "calculu": 2052, "cw": 2052, "bigger": 2052, "articl": 2052, "58eb23378f2a376565a66ac32c93a316c45b6131": 2052, "l99": 2052, "l105": 2052, "ds_dx": 2052, "compute_gradi": 2052, "ds_dy": 2052, "conj_w_d": 2052, "w_d": 2052, "d_idx": 2052, "albeit": 2052, "wonder": 2052, "amd": [2053, 2092], "dialect": 2053, "portabl": 2053, "rocmdoc": 2053, "programming_guid": 2053, "hip_api_guid": 2053, "cuda_vers": 2053, "cudaruntimegetvers": 2053, "cudadrivergetvers": 2053, "hip_vers": 2053, "hipruntimegetvers": 2053, "hipdrivergetvers": 2053, "11000": 2053, "use_rocm": 2053, "40300": 2053, "cmake": [2053, 2061, 2093], "drocm_force_enable_gpu_assert": 2053, "addglobalcallback": 2054, "recordfunct": 2054, "ivalu": 2054, "threadlocaldebuginfo": 2054, "debuginfoguard": 2054, "recordfunctioncallback": 2054, "onfunctionent": 2054, "onfunctionexit": 2054, "needsinput": 2054, "samplingprob": 2054, "enablerecordfunct": 2054, "cerr": 2054, "broader": [2054, 2089], "inject": 2054, "setapiusagehandl": 2054, "setapiusagelogg": 2054, "event_nam": 2054, "c10_log_api_usage_onc": 2054, "my_api": 2054, "_log_api_usage_onc": 2054, "archiv": [2054, 2060], "akin": [2054, 2093], "jpeg": 2054, "camera": [2054, 2085], "setexportmoduleextrafileshook": 2054, "extrafilesmap": 2054, "producer_info": 2054, "getenv": 2054, "getsourc": 2054, "precompil": 2054, "pyc": 2054, "loos": 2054, "elabor": 2055, "tpu": 2055, "mylinear": 2055, "0413": 2055, "2057": 2055, "0597": 2055, "8247": 2055, "1045": 2055, "4299": 2055, "5457": 2055, "4793": 2055, "3634": 2055, "8525": 2055, "6749": 2055, "l0": [2055, 2060, 2099], "deeper": [2055, 2068], "bignet": 2055, "big_net": 2055, "dynamicnet": 2055, "dynamic_net": 2055, "2051": 2055, "7601": 2055, "1963": 2055, "4354": 2055, "6598": 2055, "4446": 2055, "4628": 2055, "8774": 2055, "6848": 2055, "5458": 2055, "4647": 2055, "5310": 2055, "0609": 2055, "0940": 2055, "1266": 2055, "0623": 2055, "3508": 2055, "0550": 2055, "5317": 2055, "5562": 2055, "4028": 2055, "6942": 2055, "0140": 2055, "0329": 2055, "1160": 2055, "0434": 2055, "3889": 2055, "1613": 2055, "6340": 2055, "3887": 2055, "9979": 2055, "0767": 2055, "3526": 2055, "8756": 2055, "5847": 2055, "6016": 2055, "1608": 2055, "0829": 2055, "6338": 2055, "9239": 2055, "6943": 2055, "5034": 2055, "0268": 2055, "4489": 2055, "9403": 2055, "1571": [2055, 2060], "2509": 2055, "5052": 2055, "3088": 2055, "4951": 2055, "3381": 2055, "5166": 2055, "beginn": 2055, "examples_nn": 2055, "polynomial_modul": 2055, "teach": 2055, "0013": [2055, 2081], "0030": 2055, "0008": 2055, "modalmodul": 2055, "6614": 2055, "2669": 2055, "0617": 2055, "4519": 2055, "two_layer_net_optim": 2055, "blitz": 2055, "neural_networks_tutori": 2055, "autograd_tutori": 2055, "new_net": 2055, "runningmean": 2055, "1041": 2055, "0647": 2055, "1515": 2055, "m_load": 2055, "unserialized_th": 2055, "statefulmodul": 2055, "param3": 2055, "param_list": 2055, "parameterlist": 2055, "param_dict": 2055, "parameterdict": 2055, "buffer1": 2055, "buffer2": 2055, "buffer3": 2055, "0322": 2055, "9066": 2055, "1409": 2055, "4852": 2055, "6949": 2055, "2911": 2055, "1044": 2055, "4202": 2055, "1953": 2055, "5299": 2055, "8747": 2055, "6289": 2055, "4898": 2055, "6434": 2055, "5187": 2055, "0346": 2055, "4077": 2055, "4324": 2055, "7022": 2055, "3915": 2055, "6176": 2055, "6062": 2055, "5992": 2055, "4452": 2055, "2843": 2055, "3710": 2055, "3947": 2055, "saving_loading_model": 2055, "what_is_state_dict": 2055, "skip_init": 2055, "skip_param_init": 2055, "forward_hook": [2055, 2107], "backward_hook": [2055, 2107], "new_grad_input": 2055, "5059": 2055, "8158": 2055, "2390": 2055, "0043": 2055, "addmmbackward": 2055, "forward_pre_hook_handl": 2055, "5752": 2055, "7421": 2055, "forward_hook_handl": 2055, "0980": 2055, "4666": 2055, "0256": 2055, "4497": 2055, "5046": 2055, "combat": 2055, "mps_devic": 2056, "yourfavoritenet": 2056, "a3c": 2057, "set_start_method": 2057, "simplequeu": 2057, "cope": 2057, "eleg": 2057, "num_process": 2057, "inappropri": 2057, "vcpu": 2057, "htop": 2057, "exceed": 2057, "competit": 2057, "oversubscrib": 2057, "mnist_hogwild": 2057, "dataloader_kwarg": 2057, "train_epoch": 2057, "30x": 2057, "boost": [2057, 2095], "754": 2058, "1e20": 2058, "4142e": 2058, "struggl": 2058, "benign": 2058, "v_dot2": 2058, "mfma": 2058, "fp64": 2058, "miopen": 2058, "rocblas_internal_fp16_alt_impl": 2058, "miopen_debug_convolution_attrib_fp16_alt_impl": 2058, "_convbackend": 2058, "slownd": 2058, "slownd_transpos": 2058, "slownd_dil": 2058, "slownd_dilated_transpos": 2058, "convbackend": 2058, "miopendepthwis": 2058, "miopentranspos": 2058, "svd_lowrank": [2059, 2080], "22modul": 2059, "20determin": 2059, "index_add_cuda_": 2059, "1509": 2059, "8027": 2059, "0333": 2059, "1444": 2059, "rese": 2059, "seed_work": 2059, "worker_se": 2059, "train_dataset": 2059, "tensor_dict": 2060, "loaded_numb": 2060, "loaded_even": 2060, "loaded_smal": 2060, "num_batches_track": 2060, "bn_state_dict": 2060, "new_bn": 2060, "out0_relu": 2060, "1400": 2060, "4563": 2060, "0271": 2060, "4406": 2060, "2827": 2060, "4588": 2060, "2031": 2060, "1316": 2060, "6533": 2060, "3413": 2060, "1112": 2060, "m_state_dict": 2060, "new_m": 2060, "zip64": 2060, "pkl": [2060, 2068], "byteord": 2060, "original_nam": 2060, "controlflowmodul": 2060, "controlflowmodule_trac": 2060, "3793": 2060, "controlflowmodule_script": 2060, "tagger": 2060, "ipu_tag": 2060, "ipu_deseri": 2060, "startswith": [2060, 2066], "get_default_load_endian": 2060, "loadendian": 2060, "default_load_endian": 2060, "set_default_load_endian": 2060, "endian": 2060, "get_default_mmap_opt": 2060, "default_mmap_opt": 2060, "set_default_mmap_opt": 2060, "safe_glob": 2060, "clear_safe_glob": 2060, "get_safe_glob": 2060, "rem": 2061, "7z": 2061, "curl": 2061, "ossci": 2061, "mkl_2020": 2061, "aoa": 2061, "omkl": 2061, "cuda_prefix": 2061, "cuda102": 2061, "magma_2": 2061, "4_": 2061, "omagma": 2061, "cmake_include_path": 2061, "cd": [2061, 2068, 2093, 2111], "magma_hom": 2061, "studio": [2061, 2063], "pip": [2061, 2063, 2064, 2065, 2085, 2100], "cmake_gener": 2061, "ffi": 2061, "create_extens": 2061, "_ext": 2061, "define_macro": 2061, "relative_to": 2061, "c99": 2061, "x86_x64": 2061, "packagesnotfounderror": 2061, "anaconda": 2061, "noarch": 2061, "continuum": 2061, "pkg": 2061, "pro": [2061, 2085], "msys2": 2061, "importerror": [2061, 2068], "dll": 2061, "vc2017": 2061, "vc": 2061, "vs2017_runtim": 2061, "mkl_fft": 2061, "intel_openmp": 2061, "vs2017": 2061, "openbla": 2061, "forg": 2061, "emerg": [2061, 2098], "forgotten": 2061, "freeze_support": 2061, "forkingpickl": 2061, "brokenpipeerror": 2061, "errno": 2061, "couldn": [2061, 2065], "torch_14808_1591070686": 2061, "thalloc": 2061, "tdr": 2061, "thcudacheck": 2061, "csrc": [2061, 2093, 2098, 2099], "storageshar": 2061, "microsoft": [2062, 2070], "flavor": 2062, "polish": [2062, 2063], "orchestr": [2063, 2107], "exportopt": 2063, "onnxregistri": 2063, "extractor": 2063, "fxgraphextractor": 2063, "onnxfakecontext": 2063, "onnxprogram": [2063, 2065], "onnxprogramseri": 2063, "upgrad": [2063, 2067, 2087], "perceptron": 2063, "mlpmodel": 2063, "fc0": 2063, "fc3": 2063, "97": [2063, 2081], "onnx_program": 2063, "dynamo_export": [2063, 2065], "model_proto": 2063, "modelproto": 2063, "complianc": 2063, "protobuf": [2063, 2065], "netron": 2063, "icon": 2063, "viewer": [2063, 2113], "parseabl": 2063, "fxe0007": 2063, "fxe0011": 2063, "fxe0012": 2063, "fxe0013": 2063, "fxe0014": 2063, "fxe0015": 2063, "fxe0016": 2063, "model_kwarg": 2063, "my_simple_model": 2063, "my_dynamic_model": 2063, "tensor_typ": 2063, "elem_typ": 2063, "dim_param": [2063, 2065], "arg0_dim_0": 2063, "arg0_dim_1": 2063, "arg0_dim_2": 2063, "fake_context": 2063, "onnx_registri": 2063, "diagnostic_opt": 2063, "enable_fake_mod": 2063, "xdoctest": [2063, 2064], "torch_doctest_onnx": [2063, 2064], "my_nn_modul": 2063, "my_model_without_initi": 2063, "WITH": [2063, 2065], "my_model_with_initi": 2063, "model_st": 2063, "input_adapt": 2063, "output_adapt": 2063, "diagnostic_context": 2063, "export_except": 2063, "model_signatur": 2063, "model_torch": 2063, "io_adapt": 2063, "inputadapt": 2063, "outputadapt": 2063, "diagnosticcontext": 2063, "adapt_torch_inputs_to_onnx": 2063, "model_with_state_dict": 2063, "func_nested_input": 2063, "x_dict": 2063, "y_tupl": 2063, "y3": 2063, "adapt_torch_outputs_to_onnx": 2063, "model_output": 2063, "func_returning_tupl": 2063, "pt_output": 2063, "inputadaptstep": 2063, "outputadaptstep": 2063, "pprint": 2063, "9216": [2063, 2065], "p_conv1_weight": 2063, "p_conv2_weight": 2063, "p_fc1_weight": 2063, "p_fc2_weight": 2063, "b_my_buffer2": 2063, "b_my_buffer1": 2063, "_log_softmax": [2063, 2066, 2106], "include_initi": 2063, "bufferediobas": 2063, "safetensor": 2063, "safe_open": 2063, "save_diagnost": 2063, "protobufonnxprogramseri": 2063, "serializetostr": 2063, "exported_model": 2063, "onnxruntimeopt": 2063, "session_opt": 2063, "execution_provid": 2063, "execution_provider_opt": 2063, "onnxruntim": [2063, 2064, 2065, 2070], "sessionopt": 2063, "invalidexportoptionserror": 2063, "get_op_funct": 2063, "is_registered_op": 2063, "register_op": 2063, "tracedonnxfunct": 2063, "sctip": 2063, "warnings_as_error": 2063, "is_onnxrt_backend_support": 2064, "onnxrt": [2064, 2092], "dummy_input": 2065, "input_nam": 2065, "actual_input_1": 2065, "learned_": 2065, "output_nam": 2065, "learned_0": 2065, "learned_1": 2065, "learned_2": 2065, "learned_3": 2065, "learned_14": 2065, "learned_15": 2065, "kernel_shap": 2065, "check_model": 2065, "printable_graph": 2065, "ort": 2065, "ort_sess": 2065, "inferencesess": 2065, "astyp": 2065, "seq_length": 2065, "real_seq_length": 2065, "experienc": 2065, "new_data": 2065, "hope": [2065, 2099, 2105], "symbolic_opset": 2065, "symbolic_opset9": 2065, "_variablefunct": 2065, "pyi": 2065, "checkout": 2065, "___torch_mangle_0": 2065, "alpha_f": 2065, "myrelu": 2065, "value_t": 2065, "pythonop": [2065, 2066], "mylogexp": 2065, "operator_export_typ": 2065, "onnx_fallthrough": 2065, "onnx_aten_fallback": 2065, "onnx_opset": 2065, "opset15": 2065, "custom_opset": 2065, "67326": 2065, "alphax": 2065, "castlik": 2065, "gammax": 2065, "settyp": 2065, "custom_selu": 2065, "jit_util": 2065, "graphcontext": 2065, "onnxscript_op": 2065, "register_custom_op_symbol": 2065, "symbolic_nam": 2065, "symbolic_fn": 2065, "Be": [2065, 2108], "symbolic_help": 2065, "symbolic_foo_forward": 2065, "custom_domain": 2065, "attr1_f": 2065, "attr2_i": 2065, "foo_forward": 2065, "foomodel": 2065, "example_input1": 2065, "caffe2": [2065, 2085], "torch_script_graph": 2065, "unconvertible_op": 2065, "dynamic_ax": 2065, "export_param": 2065, "trainingmod": 2065, "operatorexporttyp": 2065, "do_constant_fold": 2065, "keep_initializers_as_input": 2065, "export_modules_as_funct": 2065, "autograd_inlin": 2065, "OF": 2065, "input_i": 2065, "input_z": 2065, "fileno": 2065, "untrain": 2065, "doc_str": 2065, "onnx_aten": 2065, "summodul": 2065, "dim_valu": 2065, "my_custom_axis_nam": 2065, "sum_dynamic_axes_1": 2065, "deduplicate_initi": 2065, "74765": 2065, "checkererror": 2065, "unsupportedoperatorerror": 2065, "export_to_pretty_str": 2065, "export_typ": 2065, "google_print": 2065, "add_node_nam": 2065, "nodeproto": 2065, "debugstr": 2065, "contrib": 2065, "test_aten_embedding_2": 2065, "test_oper": 2065, "unregister_custom_op_symbol": 2065, "select_model_mode_for_export": 2065, "is_in_onnx_export": 2065, "middl": [2065, 2099], "enable_log": 2065, "disable_log": 2065, "graphinfo": 2065, "incorrect_relu_symbolic_funct": 2065, "2328854203224182": 2065, "699536174352349": 2065, "u2713": 2065, "constantchunk": 2066, "__and_": 2066, "__contains_": 2066, "__derive_index": 2066, "__getitem_": 2066, "__interpol": 2066, "__is_": 2066, "__isnot_": 2066, "__lshift_": 2066, "__not_": 2066, "__or_": 2066, "__range_length": 2066, "__rshift_": 2066, "__xor_": 2066, "_cast_byt": 2066, "_cast_char": 2066, "_cast_doubl": 2066, "_cast_float": 2066, "_cast_half": 2066, "_cast_int": 2066, "_cast_long": 2066, "_cast_short": 2066, "_conj": 2066, "_convolution_mod": 2066, "_dim_arang": 2066, "_pack_padded_sequ": 2066, "_pad_packed_sequ": 2066, "_reshape_from_tensor": 2066, "_sample_dirichlet": 2066, "_set_item": 2066, "_shape_as_tensor": 2066, "_standard_gamma": 2066, "_uniqu": 2066, "_unique2": 2066, "_weight_norm": 2066, "conv1d_relu": 2066, "conv2d_relu": 2066, "conv3d_relu": 2066, "embedding_renorm": 2066, "floordiv": [2066, 2072], "linear_relu": [2066, 2072], "nonzero_numpi": 2066, "numpy_t": 2066, "unchecked_cast": 2066, "unique_dim": 2066, "_c10d_function": 2066, "all_gather_into_tensor_coalesc": 2066, "all_gather_into_tensor_out": 2066, "all_reduce_coalesc": 2066, "reduce_scatter_tensor_coalesc": 2066, "wait_tensor": 2066, "_c10d_functional_autograd": 2066, "_dtensor": 2066, "shard_dim_alltoal": 2066, "_quantiz": 2066, "conv2d_prepack": 2066, "conv3d_prepack": 2066, "conv_transpose1d_prepack": 2066, "conv_transpose2d_prepack": 2066, "conv_transpose3d_prepack": 2066, "linear_dynam": 2066, "linear_prepack": 2066, "linear_prepack_fp16": 2066, "linear_prepack_fp16_legaci": 2066, "linear_prepack_legaci": 2066, "wrapped_fbgemm_linear_fp16_weight": 2066, "wrapped_fbgemm_pack_gemm_matrix_fp16": 2066, "_test": 2066, "get_first": 2066, "compleximplicit": 2066, "floatimplicit": 2066, "intimplicit": 2066, "__iand_": 2066, "__ilshift_": 2066, "__ior_": 2066, "__irshift_": 2066, "__ixor_": 2066, "__round_to_zero_floordiv": 2066, "__upsampl": 2066, "__upsample_bilinear": 2066, "__upsample_nearest": 2066, "_adaptive_avg_pool2d": [2066, 2106], "_adaptive_avg_pool3d": [2066, 2106], "_add_batch_dim": 2066, "_add_relu": 2066, "_addmm_activ": 2066, "_aminmax": 2066, "_amp_foreach_non_finite_check_and_unscal": 2066, "_amp_update_scal": 2066, "_assert_async": 2066, "_assert_tensor_metadata": 2066, "_autocast_to_full_precis": 2066, "_autocast_to_reduced_precis": 2066, "_batch_norm_impl_index": 2066, "_batch_norm_no_upd": 2066, "_batch_norm_with_upd": 2066, "_batch_norm_with_update_funct": 2066, "_cdist_forward": [2066, 2106], "_cholesky_solve_help": 2066, "_choose_qparams_per_tensor": 2066, "_chunk_cat": 2066, "_coalesc": 2066, "_compute_linear_combin": 2066, "_conj_copi": 2066, "_conj_phys": 2066, "_conv_depthwise2d": 2066, "_convert_indices_from_coo_to_csr": 2066, "_convert_indices_from_csr_to_coo": 2066, "_convert_weight_to_int4pack": 2066, "_copy_from": 2066, "_copy_from_and_res": 2066, "_cslt_compress": 2066, "_cslt_sparse_mm": 2066, "_cslt_sparse_mm_search": 2066, "_ctc_loss": 2066, "_cudnn_ctc_loss": 2066, "_cudnn_init_dropout_st": 2066, "_cudnn_rnn": 2066, "_cudnn_rnn_flatten_weight": 2066, "_cufft_clear_plan_cach": 2066, "_cufft_get_plan_cache_max_s": 2066, "_cufft_get_plan_cache_s": 2066, "_cufft_set_plan_cache_max_s": 2066, "_cummax_help": 2066, "_cummin_help": 2066, "_debug_has_internal_overlap": 2066, "_dimi": 2066, "_dimv": 2066, "_dirichlet_grad": 2066, "_efficient_attention_forward": 2066, "_efficientzerotensor": 2066, "_embedding_bag": [2066, 2106], "_embedding_bag_forward_onli": 2066, "_empty_affine_quant": 2066, "_empty_per_channel_affine_quant": 2066, "_euclidean_dist": 2066, "_fake_quantize_learnable_per_channel_affin": 2066, "_fake_quantize_learnable_per_tensor_affin": 2066, "_fake_quantize_per_tensor_affine_cachemask_tensor_qparam": 2066, "_fft_c2c": 2066, "_fft_c2r": 2066, "_fft_r2c": 2066, "_fill_mem_eff_dropout_mask": 2066, "_flash_attention_forward": 2066, "_foobar": [2066, 2111], "_foreach_ab": 2066, "_foreach_aco": 2066, "_foreach_add": 2066, "_foreach_addcdiv": 2066, "_foreach_addcmul": 2066, "_foreach_asin": 2066, "_foreach_atan": 2066, "_foreach_ceil": 2066, "_foreach_clamp_max": 2066, "_foreach_clamp_min": 2066, "_foreach_copi": 2066, "_foreach_co": 2066, "_foreach_cosh": 2066, "_foreach_div": 2066, "_foreach_erf": 2066, "_foreach_erfc": 2066, "_foreach_exp": 2066, "_foreach_expm1": 2066, "_foreach_floor": 2066, "_foreach_frac": 2066, "_foreach_lerp": 2066, "_foreach_lgamma": 2066, "_foreach_log": 2066, "_foreach_log10": 2066, "_foreach_log1p": 2066, "_foreach_log2": 2066, "_foreach_max": 2066, "_foreach_maximum": 2066, "_foreach_minimum": 2066, "_foreach_mul": 2066, "_foreach_neg": 2066, "_foreach_norm": 2066, "_foreach_pow": 2066, "_foreach_reciproc": 2066, "_foreach_round": 2066, "_foreach_sigmoid": 2066, "_foreach_sign": 2066, "_foreach_sin": 2066, "_foreach_sinh": 2066, "_foreach_sqrt": 2066, "_foreach_sub": 2066, "_foreach_tan": 2066, "_foreach_tanh": 2066, "_foreach_trunc": 2066, "_foreach_zero": 2066, "_functional_assert_async": 2066, "_functional_assert_scalar": 2066, "_functional_sym_constrain_rang": 2066, "_functional_sym_constrain_range_for_s": 2066, "_fused_adagrad": 2066, "_fused_adam": 2066, "_fused_adamw": 2066, "_fused_dropout": 2066, "_fused_moving_avg_obs_fq_help": 2066, "_fused_moving_avg_obs_fq_helper_funct": 2066, "_fused_sdp_choic": 2066, "_fused_sgd": 2066, "_fw_primal": 2066, "_fw_primal_copi": 2066, "_get_cpu_cap": 2066, "_get_tracing_st": 2066, "_grad_sum_to_s": 2066, "_has_compatible_shallow_copy_typ": 2066, "_has_same_storage_numel": 2066, "_histogramdd_bin_edg": 2066, "_histogramdd_from_bin_ct": 2066, "_histogramdd_from_bin_tensor": 2066, "_index_put_impl": 2066, "_indices_copi": 2066, "_infer_s": 2066, "_int_mm": 2066, "_is_all_tru": 2066, "_is_any_tru": 2066, "_is_zerotensor": 2066, "_jagged_to_padded_dense_forward": 2066, "_lazy_clon": 2066, "_linalg_check_error": 2066, "_linalg_det": 2066, "_linalg_eigh": 2066, "_linalg_eigv": 2066, "_linalg_slogdet": 2066, "_linalg_solve_ex": 2066, "_linalg_svd": 2066, "_list_to_tensor": 2066, "_local_scalar_dens": [2066, 2106], "_logcumsumexp": 2066, "_lstm_mp": 2066, "_make_dep_token": 2066, "_make_du": 2066, "_make_dual_copi": 2066, "_make_per_channel_quantized_tensor": 2066, "_make_per_tensor_quantized_tensor": 2066, "_masked_scal": 2066, "_masked_softmax": 2066, "_mixed_dtypes_linear": 2066, "_mkldnn_reshap": 2066, "_mkldnn_transpos": 2066, "_mps_convolut": 2066, "_mps_convolution_transpos": 2066, "_native_batch_norm_legit": [2066, 2106], "_native_batch_norm_legit_funct": 2066, "_native_batch_norm_legit_no_train": [2066, 2106], "_native_multi_head_attent": 2066, "_ncf_unsqueez": 2066, "_ncf_view": 2066, "_neg_view": 2066, "_neg_view_copi": 2066, "_nested_compute_contiguous_strides_offset": 2066, "_nested_from_pad": 2066, "_nested_from_padded_and_nested_exampl": 2066, "_nested_get_jagged_dummi": 2066, "_nested_get_length": 2066, "_nested_get_offset": 2066, "_nested_get_ragged_idx": 2066, "_nested_get_valu": 2066, "_nested_get_values_copi": 2066, "_nested_tensor_from_mask": 2066, "_nested_tensor_from_mask_left_align": 2066, "_nested_tensor_from_tensor_list": 2066, "_nested_tensor_s": 2066, "_nested_tensor_softmax_with_shap": 2066, "_nested_tensor_storage_offset": 2066, "_nested_tensor_strid": 2066, "_nested_view_from_buff": 2066, "_nested_view_from_buffer_copi": 2066, "_nested_view_from_jag": 2066, "_nested_view_from_jagged_copi": 2066, "_new_zeros_with_same_feature_meta": 2066, "_nnpack_avail": 2066, "_nnpack_spatial_convolut": 2066, "_no_grad_embedding_renorm": 2066, "_no_grad_fil": 2066, "_no_grad_norm": 2066, "_no_grad_uniform": 2066, "_no_grad_zero": 2066, "_pack_sequ": 2066, "_pad_circular": 2066, "_pad_enum": 2066, "_padded_dense_to_jagged_forward": 2066, "_pdist_forward": [2066, 2106], "_pin_memori": 2066, "_prelu_kernel": 2066, "_print": 2066, "_propagate_xla_data": 2066, "_remove_batch_dim": 2066, "_reshape_alia": 2066, "_reshape_alias_copi": 2066, "_reshape_copi": 2066, "_resize_output": 2066, "_rowwise_prun": 2066, "_saturate_weight_to_fp16": 2066, "_scaled_dot_product_attention_math": 2066, "_scaled_dot_product_cudnn_attent": 2066, "_scaled_dot_product_efficient_attent": 2066, "_scaled_dot_product_flash_attent": 2066, "_scaled_dot_product_flash_attention_for_cpu": 2066, "_scaled_mm": 2066, "_size_if_not_equ": 2066, "_slow_conv2d_forward": 2066, "_sobol_engine_draw": 2066, "_sobol_engine_ff": 2066, "_sobol_engine_initialize_st": 2066, "_sobol_engine_scrambl": 2066, "_softmax": [2066, 2081, 2106], "_sparse_addmm": 2066, "_sparse_broadcast_to": 2066, "_sparse_broadcast_to_copi": 2066, "_sparse_bsc_tensor_unsaf": 2066, "_sparse_bsr_tensor_unsaf": 2066, "_sparse_compressed_tensor_unsaf": 2066, "_sparse_compressed_tensor_with_dim": 2066, "_sparse_coo_tensor_unsaf": 2066, "_sparse_coo_tensor_with_dim": 2066, "_sparse_coo_tensor_with_dims_and_tensor": 2066, "_sparse_csc_tensor_unsaf": 2066, "_sparse_csr_prod": 2066, "_sparse_csr_sum": 2066, "_sparse_csr_tensor_unsaf": 2066, "_sparse_log_softmax": 2066, "_sparse_mask_project": 2066, "_sparse_mm": 2066, "_sparse_mm_reduce_impl": 2066, "_sparse_semi_structured_addmm": 2066, "_sparse_semi_structured_appli": 2066, "_sparse_semi_structured_apply_dens": 2066, "_sparse_semi_structured_linear": 2066, "_sparse_semi_structured_mm": 2066, "_sparse_semi_structured_til": 2066, "_sparse_softmax": 2066, "_sparse_sparse_matmul": 2066, "_sparse_sum": 2066, "_spdiag": 2066, "_standard_gamma_grad": 2066, "_tensor_to_list": 2066, "_test_ambiguous_default": 2066, "_test_autograd_multiple_dispatch": 2066, "_test_autograd_multiple_dispatch_view": 2066, "_test_autograd_multiple_dispatch_view_copi": 2066, "_test_check_tensor": 2066, "_test_functorch_fallback": 2066, "_test_optional_filled_intlist": 2066, "_test_optional_floatlist": 2066, "_test_optional_intlist": 2066, "_test_parallel_materi": 2066, "_test_serialization_subcmul": 2066, "_test_string_default": 2066, "_test_warn_in_autograd": 2066, "_thnn_fused_gru_cel": 2066, "_thnn_fused_lstm_cel": 2066, "_to_copi": [2066, 2106], "_to_cpu": 2066, "_to_dens": 2066, "_to_spars": 2066, "_to_sparse_bsc": 2066, "_to_sparse_bsr": 2066, "_to_sparse_csc": 2066, "_to_sparse_csr": 2066, "_to_sparse_semi_structur": 2066, "_transform_bias_rescale_qkv": 2066, "_transformer_encoder_layer_fwd": 2066, "_trilinear": 2066, "_triton_multi_head_attent": 2066, "_triton_scaled_dot_attent": 2066, "_unpack_du": 2066, "_unsafe_index": 2066, "_unsafe_index_put": 2066, "_unsafe_view": 2066, "_unwrap_opt": 2066, "_upsample_bicubic2d_aa": 2066, "_upsample_bilinear2d_aa": 2066, "_upsample_nearest_exact1d": 2066, "_upsample_nearest_exact2d": 2066, "_upsample_nearest_exact3d": 2066, "_use_cudnn_ctc_loss": 2066, "_use_cudnn_rnn_flatten_weight": 2066, "_validate_compressed_sparse_indic": 2066, "_validate_sparse_bsc_tensor_arg": 2066, "_validate_sparse_bsr_tensor_arg": 2066, "_validate_sparse_compressed_tensor_arg": 2066, "_validate_sparse_coo_tensor_arg": 2066, "_validate_sparse_csc_tensor_arg": 2066, "_validate_sparse_csr_tensor_arg": 2066, "_values_copi": 2066, "_weight_int4pack_mm": 2066, "_weight_int8pack_mm": 2066, "_weight_norm_interfac": 2066, "capit": 2066, "confirmed_by_own": [2066, 2075], "convolution_overrid": 2066, "copy_sparse_to_spars": 2066, "endswith": 2066, "expandtab": 2066, "fake_quantize_per_channel_affine_cachemask": 2066, "fake_quantize_per_tensor_affine_cachemask": 2066, "fill_diagon": 2066, "glu_jvp": 2066, "has_torch_funct": [2066, 2112], "is_non_overlapping_and_dens": 2066, "is_own": [2066, 2075], "is_strides_like_format": 2066, "isalnum": 2066, "isalpha": 2066, "isdecim": 2066, "isdigit": 2066, "isidentifi": 2066, "islow": 2066, "isnumer": 2066, "isprint": 2066, "isspac": 2066, "istitl": 2066, "isupp": 2066, "lift_fresh": 2066, "ljust": 2066, "local_valu": [2066, 2075], "log_sigmoid_forward": 2066, "lstrip": 2066, "matrix_h": 2066, "nll_loss2d_forward": 2066, "nll_loss_forward": 2066, "normal_funct": 2066, "owner_nam": [2066, 2075], "percentformat": 2066, "quantized_gru": 2066, "quantized_lstm": 2066, "resize_as_spars": 2066, "rfind": 2066, "rindex": 2066, "rjust": 2066, "rpartit": 2066, "rsplit": 2066, "rstrip": 2066, "set_data": 2066, "slow_conv3d_forward": 2066, "sparse_res": 2066, "sparse_resize_and_clear": 2066, "splitlin": 2066, "swapcas": 2066, "sym_numel": [2066, 2106], "sym_storage_offset": [2066, 2106], "sym_strid": [2066, 2106], "unique_dim_consecut": 2066, "zfill": 2066, "_allgather_bas": 2066, "_reduce_scatter_bas": 2066, "allgath": 2066, "allgather_coalesc": 2066, "allgather_into_tensor_coalesc": 2066, "allreduce_coalesc": 2066, "alltoal": 2066, "alltoall_bas": 2066, "recv_any_sourc": 2066, "debugprim": 2066, "load_tensor": 2066, "_alloc_from_pool": 2066, "_mm_plus_mm": 2066, "_reinterpret_tensor": 2066, "accumulate_grad": 2066, "resize_storage_byt": 2066, "_mkl_linear": 2066, "_mkl_reorder_linear_weight": 2066, "_convolution_pointwis": 2066, "_convolution_transpose_pointwis": 2066, "_get_mkldnn_serialized_md": 2066, "_is_mkldnn_acl_support": 2066, "_is_mkldnn_bf16_support": 2066, "_is_mkldnn_fp16_support": 2066, "_linear_pointwis": 2066, "_nbyte": 2066, "_reorder_convolution_transpose_weight": 2066, "_reorder_convolution_weight": 2066, "_reorder_linear_weight": 2066, "_reorder_mkldnn_rnn_layer_weight": 2066, "mkldnn_prepack": 2066, "conv2d_run": 2066, "qconv1d_pointwis": 2066, "qconv2d_pointwis": 2066, "qconv3d_pointwis": 2066, "qconv_prepack": 2066, "qlinear_pointwis": 2066, "qlinear_prepack": 2066, "conv2d_clamp_prepack": 2066, "conv2d_clamp_run": 2066, "conv2d_transpose_clamp_prepack": 2066, "conv2d_transpose_clamp_run": 2066, "linear_clamp_prepack": 2066, "linear_clamp_run": 2066, "unpack_prepacked_sizes_conv2d": 2066, "unpack_prepacked_sizes_linear": 2066, "addstatvalu": 2066, "autogradadd": 2066, "autogradallnonzero": 2066, "autogradallzero": 2066, "autogradanynonzero": 2066, "autogradzero": 2066, "bailout": [2066, 2098], "bailouttempl": 2066, "broadcastmkldnntensor": 2066, "broadcasts": 2066, "chunksiz": 2066, "constantmkldnntensor": 2066, "differentiablegraph": 2066, "enumnam": 2066, "enumvalu": 2066, "fallbackgraph": 2066, "fusedconcat": 2066, "fusiongroup": 2066, "ifthenels": 2066, "ignoredpythonop": 2066, "mkldnnclamp": 2066, "mkldnnhardsigmoid": 2066, "mkldnnhardswish": 2066, "mkldnnhardtanh": 2066, "mkldnnlayernorm": 2066, "mkldnnscalarmul": 2066, "mmbatchsid": 2066, "mmtreereduc": 2066, "modulecontainerindex": 2066, "numtotensor": 2066, "raiseexcept": 2066, "reductions": 2066, "requiresgradcheck": 2066, "staticruntimecopyout": 2066, "staticsubgraph": 2066, "stringindex": 2066, "tensorexprdynamicgroup": 2066, "tensorexprdynamicguard": 2066, "tensorexprgroup": 2066, "timepoint": 2066, "tupleindex": 2066, "tupleunpack": 2066, "varconcat": 2066, "varstack": 2066, "awaitable_nowait": 2066, "awaitable_wait": 2066, "is_cpu": 2066, "is_ipu": 2066, "is_maia": 2066, "is_mkldnn": 2066, "is_mp": 2066, "is_mtia": 2066, "is_nest": 2066, "is_quant": 2066, "is_vulkan": 2066, "is_xla": 2066, "is_xpu": 2066, "onednnfusiongroup": 2066, "onednnfusionguard": 2066, "profile_ivalu": 2066, "rangelist": 2066, "rpc_remot": 2066, "unchecked_unwrap_opt": 2066, "_make_token": [2066, 2106], "_sink_token": [2066, 2106], "bessel_i0": [2066, 2106], "bessel_i1": [2066, 2106], "bessel_j0": [2066, 2081, 2106], "bessel_j1": [2066, 2081, 2106], "broadcast_in_dim": [2066, 2106], "cbrt": [2066, 2106], "collapse_view": [2066, 2106], "convert_element_typ": [2066, 2106], "copy_strid": [2066, 2106], "copy_to": [2066, 2106], "device_put": [2066, 2106], "erf_inv": [2066, 2106], "erfcx": [2066, 2081, 2106], "fft_c2c": [2066, 2106], "fft_c2r": [2066, 2106], "fft_r2c": [2066, 2106], "iota": [2066, 2106], "maximum_valu": [2066, 2106], "minimum_valu": [2066, 2106], "ndtri": [2066, 2081, 2106], "rev": [2066, 2106], "shift_left": [2066, 2106], "shift_right_arithmet": [2066, 2106], "slice_in_dim": [2066, 2106], "spherical_bessel_j0": [2066, 2081, 2106], "view_of": [2066, 2106], "view_of_dtyp": [2066, 2106], "xor_sum": [2066, 2106], "_call_end_callbacks_on_jit_fut": 2066, "_record_function_ent": 2066, "_record_function_enter_new": 2066, "_record_function_exit": 2066, "_bfloat16quantizedtofloat": 2066, "_floattobfloat16quant": 2066, "add_out": 2066, "add_relu_out": 2066, "add_scalar_out": 2066, "add_scalar_relu": 2066, "add_scalar_relu_out": 2066, "batch_norm1d": 2066, "batch_norm1d_relu": 2066, "batch_norm2d": 2066, "batch_norm2d_relu": 2066, "batch_norm3d": 2066, "batch_norm3d_relu": 2066, "batch_norm_relu": 2066, "cat_out": 2066, "cat_relu": 2066, "cat_relu_out": 2066, "conv1d_dynam": 2066, "conv1d_prepack": 2066, "conv1d_unpack": 2066, "conv2d_add": 2066, "conv2d_add_relu": 2066, "conv2d_dil": 2066, "conv2d_dynam": 2066, "conv2d_group": 2066, "conv2d_output_pad": 2066, "conv2d_pad": 2066, "conv2d_strid": 2066, "conv2d_transpos": 2066, "conv2d_unpack": 2066, "conv2d_unpack_s": 2066, "conv3d_dil": 2066, "conv3d_dynam": 2066, "conv3d_group": 2066, "conv3d_output_pad": 2066, "conv3d_pad": 2066, "conv3d_strid": 2066, "conv3d_transpos": 2066, "conv3d_unpack": 2066, "conv_prepack": 2066, "conv_transpose1d_dynam": 2066, "conv_transpose1d_unpack": 2066, "conv_transpose2d_dil": 2066, "conv_transpose2d_dynam": 2066, "conv_transpose2d_group": 2066, "conv_transpose2d_output_pad": 2066, "conv_transpose2d_pad": 2066, "conv_transpose2d_strid": 2066, "conv_transpose2d_transpos": 2066, "conv_transpose2d_unpack": 2066, "conv_transpose3d_dil": 2066, "conv_transpose3d_dynam": 2066, "conv_transpose3d_group": 2066, "conv_transpose3d_output_pad": 2066, "conv_transpose3d_pad": 2066, "conv_transpose3d_strid": 2066, "conv_transpose3d_transpos": 2066, "conv_transpose3d_unpack": 2066, "conv_unpack": 2066, "embedding_4bit": 2066, "embedding_bag_2bit_prepack": 2066, "embedding_bag_2bit_rowwise_offset": 2066, "embedding_bag_2bit_unpack": 2066, "embedding_bag_4bit": 2066, "embedding_bag_4bit_prepack": 2066, "embedding_bag_4bit_rowwise_offset": 2066, "embedding_bag_4bit_unpack": 2066, "embedding_bag_byt": 2066, "embedding_bag_byte_prepack": 2066, "embedding_bag_byte_rowwise_offset": 2066, "embedding_bag_byte_unpack": 2066, "embedding_bag_prepack": 2066, "embedding_bag_unpack": 2066, "embedding_byt": 2066, "linear_dynamic_fp16": 2066, "linear_dynamic_fp16_unpacked_weight": 2066, "linear_leaky_relu": 2066, "linear_relu_dynam": 2066, "linear_relu_dynamic_fp16": 2066, "linear_tanh": 2066, "linear_unpack": 2066, "linear_unpack_fp16": 2066, "linear_with_input_q_dq_qweight_dq_output_fp32": 2066, "linear_with_input_q_dq_qweight_dq_relu_output_fp32": 2066, "make_quantized_cell_param": 2066, "make_quantized_cell_params_dynam": 2066, "make_quantized_cell_params_fp16": 2066, "mul_out": 2066, "mul_relu": 2066, "mul_relu_out": 2066, "mul_scalar_out": 2066, "mul_scalar_relu": 2066, "mul_scalar_relu_out": 2066, "quantized_gru_cell_dynam": 2066, "quantized_lstm_cell_dynam": 2066, "quantized_rnn_relu_cell_dynam": 2066, "quantized_rnn_tanh_cell_dynam": 2066, "rngprim": 2066, "philox_rand": 2066, "qlinear": 2066, "qlinear_dynam": 2066, "qlinear_relu": 2066, "qlinear_relu_dynam": 2066, "qlinear_unpack": 2066, "static_runtim": 2066, "vartupleunpack": 2066, "clamp_nan_to_num": 2066, "create_owned_ref": 2066, "dequantize_copi": 2066, "dict_unpack": 2066, "expand_dims_copi": 2066, "flatten_copi": 2066, "fused_equally_split": 2066, "reshape_copi": 2066, "select_tensor": 2066, "signed_log1p": 2066, "to_copi": 2066, "to_maybe_copy_out": 2066, "var1": 2067, "var2": 2067, "bias_param": 2067, "adadelta": 2067, "adamax": 2067, "asgd": 2067, "nadam": 2067, "radam": 2067, "rmsprop": 2067, "rprop": 2067, "reducelronplateau": 2067, "multisteplr": 2067, "swa_util": 2067, "averagedmodel": 2067, "swalr": 2067, "update_bn": 2067, "optima": 2067, "polyak": 2067, "averaged_model": 2067, "multi_avg_fn": 2067, "get_ema_multi_avg_fn": 2067, "textrm": 2067, "update_paramet": 2067, "avg_fn": 2067, "_foreach": 2067, "ema_model": 2067, "ema_avg": 2067, "averaged_model_paramet": 2067, "model_paramet": 2067, "num_averag": 2067, "swa_schedul": 2067, "anneal_epoch": 2067, "swa_lr": 2067, "swa_model": 2067, "cosineannealinglr": 2067, "swa_start": 2067, "test_input": 2067, "secur": 2068, "unpackag": 2068, "exercis": 2068, "unzip": 2068, "my_packag": 2068, "freeli": 2068, "94304870911616": 2068, "94304900784016": 2068, "extern_modul": 2068, "model_1": 2068, "myzip": 2068, "file_byt": 2068, "writestr": 2068, "new_file_byt": 2068, "vim": 2068, "vimrc": 2068, "bufreadcmd": 2068, "brows": 2068, "amatch": 2068, "vi": 2068, "packageimport": 2068, "queryabl": 2068, "glob": 2068, "packageexport": 2068, "pe": 2068, "save_pickl": 2068, "has_fil": 2068, "importer_file_structur": 2068, "package_a": 2068, "get_rdep": 2068, "all_path": 2068, "dependency_graph_str": 2068, "save_text": 2068, "save_binari": 2068, "my_resourc": 2068, "config_stuff": 2068, "raw_data": 2068, "my_byt": 2068, "complementari": [2068, 2081], "load_pickl": 2068, "load_text": 2068, "load_binari": 2068, "my_tensor": 2068, "__reduce_package__": 2068, "my_str": 2068, "time_import": 2068, "time_export": 2068, "pickler": 2068, "persistent_id": 2068, "persistent_load": 2068, "generated_module_nam": 2068, "get_unique_id": 2068, "clock_gettim": 2068, "unpackage_foo": 2068, "depickl": 2068, "foo_1": 2068, "foo_2": 2068, "foo_packag": 2068, "foo_collect": 2068, "foo1": 2068, "foo2": 2068, "imported_foo": 2068, "9857706": 2068, "650140837": 2068, "652698385": 2068, "__torch_package__": 2068, "is_in_packag": 2068, "userexcept": 2068, "unpackageableexcept": 2068, "loaded_modul": 2068, "import_modul": 2068, "save_source_str": 2068, "save_modul": 2068, "textwrap": 2068, "dedent": 2068, "my_funct": 2068, "is_packag": 2068, "importlib": 2068, "my_pickl": 2068, "get_my_resourc": 2068, "read_text": 2068, "torch_package_import": 2068, "get_my_pickl": 2068, "is_from_packag": 2068, "stdlib": 2068, "my_test": 2068, "f2": 2068, "sys_import": 2068, "script_model": 2068, "mixed_model": 2068, "python_model_with_scripted_submodul": 2068, "loaded_script": 2068, "loaded_mix": 2068, "convention": 2068, "94286146172688": 2068, "94286146172784": 2068, "consult": [2068, 2098], "essai": 2068, "another_packag": 2068, "pickletool": 2068, "ast": 2068, "deni": 2068, "my_export": 2068, "my_interned_modul": 2068, "package_export": 2068, "my_externed_modul": 2068, "my_mocked_modul": 2068, "unwant": [2068, 2085], "hodg": 2068, "podg": 2068, "bazel": 2068, "buck": 2068, "my_class_inst": 2068, "imported_myclass": 2068, "okai": 2068, "torch_package_0": 2068, "handle_me_this_wai": 2068, "inadvert": 2068, "pun": 2068, "packagingerror": 2068, "dependency_graph": 2068, "emptymatcherror": 2068, "allow_empti": 2068, "_sysimport": 2068, "hermet": 2068, "scan": 2068, "orderedimport": 2068, "add_depend": 2068, "graphviz": 2068, "lang": 2068, "denied_modul": 2068, "my_subpackag": 2068, "digraph": 2068, "externed_modul": 2068, "interned_modul": 2068, "mocked_modul": 2068, "register_extern_hook": 2068, "register_intern_hook": 2068, "register_mock_hook": 2068, "myobject": 2068, "save_source_fil": 2068, "file_or_directori": 2068, "my_subsubpackag": 2068, "file_or_buff": 2068, "module_allow": 2068, "pytorchfileread": 2068, "python_vers": 2068, "is_dir": 2068, "_kinetoprofil": 2069, "execution_trace_observ": 2069, "profileract": 2069, "export_memory_timelin": 2069, "executiontraceobserv": 2069, "add_metadata": 2069, "add_metadata_json": 2069, "unaggreg": 2069, "suffix": [2069, 2085, 2086], "png": 2069, "gzip": 2069, "numbyt": 2069, "increment_vers": 2069, "_memory_profil": 2069, "export_stack": 2069, "self_cuda_time_tot": 2069, "preset_metadata_json": 2069, "preset": 2069, "on_trace_readi": 2069, "record_and_sav": 2069, "tensorboard_trace_handl": 2069, "dir_nam": 2069, "logdir": [2069, 2085], "plugin": [2069, 2085, 2097], "code_to_profil": 2069, "row_limit": 2069, "trace_handl": 2069, "test_trace_": 2069, "step_num": 2069, "code_iteration_to_profil": 2069, "register_callback": 2069, "execution_trac": 2069, "test_execution_trace_with_kineto": 2069, "test_profil": 2069, "_itraceobserv": 2069, "skip_first": 2069, "worker_nam": [2069, 2075], "use_gzip": 2069, "range_push": 2069, "range_pop": 2069, "4x": 2070, "88": [2070, 2105], "14k": 2070, "domin": 2070, "previous_layer_fp32": 2070, "linear_fp32": 2070, "activation_fp32": 2070, "next_layer_fp32": 2070, "linear_weight_fp32": 2070, "linear_int8_w_fp32_inp": 2070, "linear_weight_int8": 2070, "ptdq": 2070, "fc": 2070, "model_fp32": 2070, "model_int8": 2070, "quantize_dynam": 2070, "input_fp32": 2070, "previous_layer_int8": 2070, "linear_with_activation_int8": 2070, "next_layer_int8": 2070, "ptsq": 2070, "minmax": 2070, "l2norm": 2070, "model_fp32_fus": 2070, "fuse_modul": [2070, 2071], "model_fp32_prepar": 2070, "fq": 2070, "prepare_qat": 2070, "training_loop": 2070, "requant": 2070, "linear1": 2070, "custom_qconfig": 2070, "fxptq": 2070, "model_fp": 2070, "usermodel": 2070, "model_to_quant": 2070, "default_dynamic_qconfig": 2070, "model_prepar": 2070, "model_quant": 2070, "model_fus": 2070, "quantize_pt2": 2070, "prepare_pt2": 2070, "_export": [2070, 2093], "capture_pre_autograd_graph": 2070, "xnnpackquant": 2070, "get_symmetric_quantization_config": 2070, "prepare_qat_pt2": 2070, "convert_pt2": 2070, "per_tensor_symmetr": [2070, 2073], "per_channel_symmetr": [2070, 2073], "per_channel_scal": 2070, "per_channel_zero_point": 2070, "quantized_tensor": 2070, "qengin": 2070, "in4": 2070, "tensorrt": [2070, 2092, 2097, 2102], "fx2trt": 2070, "float_modul": [2070, 2090], "staticquantcustommodul": 2070, "observed_modul": 2070, "default_qconfig": [2070, 2091], "vnni": 2070, "test_quantized_op": 2070, "testquantizedop": 2070, "test_custom_module_lstm": 2070, "test_quantize_fx": 2070, "testquantizefx": 2070, "test_static_lstm": 2070, "some_oper": 2070, "e2": 2070, "thnn_conv2d_forward": 2070, "quantizedcpu": 2070, "some_qconfig": 2070, "linearpackedparam": 2070, "_modul": 2070, "prepare_orig": 2070, "quantized_orig": 2070, "scripted_quant": 2070, "fp32_op": 2071, "int8_op": 2071, "cooperlak": 2071, "audit": 2071, "op_fp32": 2071, "op_int8": 2071, "_numeric_suit": 2071, "_numeric_suite_fx": 2071, "0x7ff62a2b2bc0": 2072, "0x7ff603a55700": 2072, "0x7ff603a55790": 2072, "num_tensor_args_to_observation_typ": 2072, "convbn1d": 2072, "0x7ff5fc3ba550": 2072, "reference_quantized_module_for_root": 2072, "fuse_convtranspose_bn": 2072, "0x7ff5fc3ba700": 2072, "linearbn1d": 2072, "fuse_linear_bn": 2072, "0x7ff5fc3ba670": 2072, "convbn2d": 2072, "convbn3d": 2072, "bnrelu2d": 2072, "bnrelu3d": 2072, "input_type_to_index": 2072, "conv_fus": 2072, "convbnrelu1d": 2072, "convbnrelu2d": 2072, "convbnrelu3d": 2072, "convrelu1d": 2072, "convrelu3d": 2072, "0x7ff603a55820": 2072, "0x7ff603a55e50": 2072, "quint4x2": [2072, 2082, 2086, 2087], "embedding_op": 2072, "0x7ff603a59dc0": 2072, "00390625": 2072, "0x7ff603a59670": 2072, "0x7ff603a59820": 2072, "0x7ff603a55d30": 2072, "0x7ff603a59c10": 2072, "0x7ff603798430": 2072, "0x7ff603a59ca0": 2072, "0x7ff603a59040": 2072, "linear_fus": 2072, "_sequential_wrapper2": 2072, "0x7ff631501430": 2072, "0x7ff603a55c10": 2072, "0x7ff5fb6ef820": 2072, "fuse_conv_bn_relu": 2072, "0x7ff5fc3ba5e0": 2072, "0x7ff5fb6ef8b0": 2072, "0x7ff5fb6ef940": 2072, "0x7ff5fb6ef9d0": 2072, "0x7ff5fb6efa60": 2072, "0x7ff5fb6efaf0": 2072, "0x7ff5fb6efb80": 2072, "0x7ff5fb6efc10": 2072, "0x7ff5fb6efca0": 2072, "0x7ff5fb6efd30": 2072, "0x7ff5fb6efdc0": 2072, "0x7ff603a55dc0": 2072, "0078125": 2072, "customconfig": 2073, "custom_module_config": 2073, "_caller": 2074, "_devices_kw": 2074, "slowli": 2074, "unind": 2074, "shortcom": 2075, "stitch": 2075, "init_rpc": [2075, 2076], "rpc_backend_opt": 2075, "trainer3": 2075, "parameterserver2": 2075, "backendtyp": 2075, "rpcbackendopt": 2075, "rpcagent": 2075, "transmit": 2075, "calle": [2075, 2077], "_set_rpc_timeout": 2075, "worker0": 2075, "my_script_add": 2075, "wire": 2075, "fut2": 2075, "meth": 2075, "grace": 2075, "userrref": [2075, 2077], "async_execut": 2075, "paus": 2075, "outmost": 2075, "async_add_chain": 2075, "worker2": 2075, "script_add": 2075, "async_add": 2075, "asyncexecutionclass": 2075, "static_async_add": 2075, "class_async_add": 2075, "ret_fut": 2075, "bound_async_add": 2075, "rpc_timeout": 2075, "incid": [2075, 2077], "nvlink": 2075, "multiplex": 2075, "tensorpiperpcbackendopt": 2075, "num_worker_thread": 2075, "device_map": 2075, "_transport": 2075, "tensorpipeag": 2075, "set_device_map": 2075, "intermitt": 2075, "backoff": 2075, "pyrref": 2075, "type_hint": 2075, "_distributed_rpc": 2075, "dist_autograd_ctx_id": 2075, "ctx_id": 2075, "ownerrref": [2075, 2077], "remote_modul": 2075, "forward_async": 2075, "remote_devic": 2075, "workernam": 2075, "ps0": 2075, "remote_linear_modul": 2075, "get_module_rref": 2075, "remote_paramet": 2075, "my_add": [2076, 2112], "t4": 2076, "t5": 2076, "autograd_message_id": 2076, "autograd_context_id": 2076, "send1": 2076, "kickoff": 2076, "recv2": 2076, "heard": 2076, "send2": 2076, "recv1": 2076, "dist_autograd_simpl": 2076, "random_tensor": 2076, "_run_process": 2076, "dst_rank": 2076, "dst_name": 2076, "run_process": 2076, "rrefid": 2077, "transient": 2077, "udf": 2077, "deliveri": 2077, "knowledg": 2077, "danger": 2077, "ancestor": 2077, "trickier": 2077, "forkid": 2077, "ack": 2077, "solid": 2077, "followup": 2077, "lil": 2080, "stark": 2080, "9093": 2080, "1411": 2080, "7568": 2080, "9589": 2080, "2794": 2080, "catastroph": 2080, "9900": 2080, "metadata_mask": 2080, "rce": 2080, "rc": 2080, "62": 2080, "to_sparse_semi_structur": 2080, "1x4": 2080, "16x16": 2080, "a_spars": 2080, "sparsesemistructuredtensor": 2080, "000": 2080, "400": 2080, "s2": 2080, "plain_dim_s": 2080, "lp64": 2080, "280": 2080, "310": 2080, "sp": 2080, "9078": 2080, "conception": 2080, "sparsesemistructur": 2080, "lobpcg": 2080, "geneig": 2080, "pca_lowrank": 2080, "kindli": 2080, "airy_ai": 2081, "airi": 2081, "9635": 2081, "entr": 2081, "3466": 2081, "int_": 2081, "8427": 2081, "0561": 2081, "4769": 2081, "9213": 2081, "8858": 2081, "7683": 2081, "7481": 2081, "2920": 2081, "int_0": 2081, "gammaln": 2081, "a1": 2081, "a2": 2081, "3528": 2081, "5665": 2081, "6472": 2081, "4335": 2081, "2650": 2081, "2661": 2081, "2796": 2081, "8808": 2081, "3019": 2081, "4658": 2081, "3085": 2081, "2430": 2081, "2070": 2081, "i1": 2081, "5652": 2081, "9534": 2081, "7595": 2081, "2153": 2081, "log_ndtr": 2081, "_ndtr": 2081, "6077": 2081, "7832": 2081, "841": 2081, "6931": 2081, "1728": 2081, "023": 2081, "9331": 2081, "6486": 2081, "1523": 2081, "6516": 2081, "6352": 2081, "6131": 2081, "7169": 2081, "6261": 2081, "displaystyl": 2081, "undefiend": 2081, "6835": 2081, "8474": 2081, "1929": 2081, "7162": 2081, "4180": 2081, "3928": 2081, "4007": 2081, "7586": 2081, "3901": 2081, "5049": 2081, "ndtr": 2081, "0228": 2081, "1587": 2081, "9772": 2081, "9987": 2081, "2p": 2081, "64493": 2081, "4041": 2081, "8288": 2081, "4939": 2081, "4091": 2081, "8863": 2081, "771": 2081, "scaled_modified_bessel_k0": 2081, "scaled_modified_bessel_k1": 2081, "2948": 2081, "0267": 2081, "1566": 2081, "9186": 2081, "8631": 2081, "0259": 2081, "1300": 2081, "spheric": 2081, "xlog1pi": 2081, "3863": 2081, "1972": 2081, "6094": 2081, "2189": 2081, "8283": 2081, "7726": 2081, "0986": 2081, "1589": 2081, "hurwitz": 2081, "6449": 2081, "0823": 2081, "wrap_storag": 2082, "complex_doubl": 2082, "from_buff": 2082, "is_hpu": 2082, "pickle_storage_typ": 2082, "byteswap": 2082, "posix": 2082, "shm_unlink": 2082, "unlink": 2082, "quint2x4": [2082, 2087], "twelv": 2083, "halftensor": [2083, 2086], "bfloat16tensor": [2083, 2086], "chartensor": [2083, 2086], "shorttensor": [2083, 2086], "binary16": [2083, 2086], "significand": [2083, 2086], "float_tensor": 2083, "double_tensor": 2083, "complex_float_tensor": 2083, "complex_double_tensor": 2083, "int_tensor": 2083, "long_tensor": 2083, "uint_tensor": 2083, "bool_tensor": 2083, "long_zerodim": 2083, "int_zerodim": 2083, "cuda1": 2083, "channels_last_3d": 2083, "ndhwc": 2083, "blogpost": [2084, 2099], "totensor": 2085, "trainset": 2085, "mnist": 2085, "mnist_train": 2085, "trainload": 2085, "grayscal": 2085, "make_grid": 2085, "add_imag": 2085, "add_graph": 2085, "clutter": 2085, "n_iter": 2085, "purge_step": 2085, "max_queu": 2085, "flush_sec": 2085, "filename_suffix": 2085, "current_datetime_hostnam": 2085, "exp1": 2085, "global_step": 2085, "purg": 2085, "event_file_writ": 2085, "eventfilewrit": 2085, "may04_22": 2085, "54_": 2085, "macbook": 2085, "my_experi": 2085, "lr_0": 2085, "1_batch_16": 2085, "locallr_0": 2085, "scalar_valu": 2085, "walltim": 2085, "new_styl": 2085, "double_precis": 2085, "blobnam": 2085, "simple_valu": 2085, "main_tag": 2085, "tag_scalar_dict": 2085, "run_14h": 2085, "xsinx": 2085, "xcosx": 2085, "tanx": 2085, "add_histogram": 2085, "max_bin": 2085, "img_tensor": 2085, "dataformat": 2085, "chw": 2085, "hwc": 2085, "hw": 2085, "wh": 2085, "3xhxw": 2085, "img_hwc": 2085, "my_imag": 2085, "my_image_hwc": 2085, "img_batch": 2085, "my_image_batch": 2085, "add_figur": 2085, "add_video": 2085, "vid_tensor": 2085, "fp": 2085, "moviepi": 2085, "add_audio": 2085, "snd_tensor": 2085, "sample_r": 2085, "44100": 2085, "add_text": 2085, "text_str": 2085, "input_to_model": 2085, "use_strict_trac": 2085, "fed": 2085, "add_embed": 2085, "label_img": 2085, "metadata_head": 2085, "projector": 2085, "kwlist": 2085, "add_pr_curv": 2085, "num_threshold": 2085, "pr_curv": 2085, "add_custom_scalar": 2085, "chart": 2085, "categorynam": 2085, "chartnam": 2085, "listofproperti": 2085, "multilin": 2085, "taiwan": 2085, "twse": 2085, "0050": 2085, "2330": 2085, "dow": 2085, "aaa": 2085, "bbb": 2085, "ccc": 2085, "nasdaq": 2085, "add_mesh": 2085, "config_dict": 2085, "threej": 2085, "vertex": 2085, "number_of_vertic": 2085, "vertices_tensor": 2085, "colors_tensor": 2085, "faces_tensor": 2085, "my_mesh": 2085, "add_hparam": 2085, "hparam_dict": 2085, "metric_dict": 2085, "hparam_domain_discret": 2085, "run_nam": 2085, "hparam": 2085, "bsize": 2085, "uint16": [2086, 2102], "uint32": [2086, 2102], "uint64": [2086, 2102], "e4m3": 2086, "e5m2": 2086, "asid": 2086, "58734": 2086, "2209": 2086, "05433": 2086, "tini": [2086, 2087, 2105, 2116], "_like": 2086, "coercion": 2086, "allow_subclass": 2087, "check_devic": 2087, "check_layout": 2087, "6e": 2087, "3e": 2087, "assert_equ": 2087, "000000000000001e": 2087, "1e0": 2087, "argh": 2087, "nfooter": 2087, "66": 2087, "footer": 2087, "exclude_zero": 2087, "1205": 2087, "2282": 2087, "6380": 2087, "default_gener": 2089, "click": [2089, 2108, 2109], "is_integ": 2089, "data_dependent_output": 2089, "dynamic_output_shap": 2089, "inplace_view": 2089, "needs_fixed_stride_ord": 2089, "nondeterministic_bitwis": 2089, "nondeterministic_seed": 2089, "pt2_compliant_tag": 2089, "compare_weight": 2090, "float_dict": 2090, "quantized_dict": 2090, "wt_compare_dict": 2090, "qmodel": 2090, "compute_error": 2090, "weight_dict": 2090, "get_logger_dict": 2090, "shadowlogg": 2090, "outputlogg": [2090, 2091], "target_dict": 2090, "q_modul": 2090, "logger_cl": [2090, 2091], "prepare_model_with_stub": 2090, "module_swap_list": 2090, "q_model": 2090, "ob_dict": 2090, "compare_model_stub": 2090, "quantizablebasicblock": 2090, "get_matching_activ": 2090, "act_dict": 2090, "prepare_model_output": 2090, "compare_model_output": 2090, "act_compare_dict": 2090, "weight_comparison": 2091, "extract_weight": 2091, "sqnr": 2091, "extend_logger_results_with_comparison": 2091, "compute_sqnr": 2091, "mp_n": 2091, "mq_n": 2091, "add_logg": 2091, "act_comparison": 2091, "extract_logger_info": 2091, "mp_shadows_mq": 2091, "add_shadow_logg": 2091, "shadow_act_comparison": 2091, "extract_shadow_logger_info": 2091, "ref_node_nam": 2091, "prev_node_nam": 2091, "model_nam": 2091, "ref_nam": 2091, "prev_node_target_typ": 2091, "ref_node_target_typ": 2091, "results_typ": 2091, "index_within_arg": 2091, "index_of_arg": 2091, "qconfig_str": 2091, "outputcomparisonlogg": 2091, "x_ref": 2091, "nstracer": 2091, "skipped_module_nam": 2091, "skipped_module_class": 2091, "model_name_a": 2091, "model_a": 2091, "model_name_b": 2091, "model_b": 2091, "base_name_to_sets_of_related_op": 2091, "unmatchable_types_map": 2091, "op_to_type_to_weight_extraction_fn": 2091, "unmatch": 2091, "nsresultstyp": 2091, "name_a": 2091, "name_b": 2091, "should_log_input": 2091, "model_a_with_logg": 2091, "model_b_with_logg": 2091, "model_name_to_use_for_layer_nam": 2091, "node_type_to_io_type_map": 2091, "model_a_shadows_b": 2091, "model_name_1": 2091, "model_name_2": 2091, "comparison_fn": 2091, "comparison_nam": 2091, "prepare_n_shadows_model": 2091, "qconfig_multi_map": 2091, "custom_prepare_fn": 2091, "custom_prepare_kwarg": 2091, "custom_trac": 2091, "args_kwargs_m": 2091, "op_m": 2091, "output_m": 2091, "op_m_n": 2091, "log_m_n": 2091, "log_m_0": 2091, "qconfig_n": 2091, "args_m": 2091, "op_m_prepared_with_qconfig_n": 2091, "out_m_n": 2091, "kwargs_m": 2091, "docblock": 2091, "loggers_set_en": 2091, "loggers_set_save_activ": 2091, "save_activ": 2091, "convert_n_shadows_model": 2091, "custom_convert_fn": 2091, "custom_convert_kwarg": 2091, "extract_results_n_shadows_model": 2091, "print_comparisons_n_shadows_model": 2091, "compute_normalized_l2_error": 2091, "compute_cosine_similar": 2091, "surfac": 2092, "openai": 2092, "ipex": 2092, "torch_tensorrt": 2092, "tvm": 2092, "apach": 2092, "openvino": 2092, "aotinductor": 2092, "dashboard": [2092, 2100, 2105], "nnmodul": 2092, "craft": 2093, "aot_compil": 2093, "torchinductor_freez": 2093, "batch_dim": 2093, "so_path": 2093, "aot_inductor": 2093, "output_path": 2093, "getcwd": 2093, "model_container_runner_cuda": 2093, "model_container_runner_cpu": 2093, "aotimodelcontainerrunnercuda": 2093, "aotimodelcontainerrunnercpu": 2093, "kcuda": 2093, "kcpu": 2093, "aoti_runn": 2093, "runner": 2093, "inputs2": 2093, "cmakelist": 2093, "aoti_exampl": 2093, "cmake_minimum_requir": 2093, "fatal_error": 2093, "find_packag": 2093, "add_execut": 2093, "add_custom_command": 2093, "cmake_current_source_dir": 2093, "target_link_librari": 2093, "set_properti": 2093, "cxx_standard": 2093, "cmake_prefix_path": 2093, "mkdir": 2093, "5184": 2093, "4462": 2093, "4611": 2093, "4744": 2093, "4811": 2093, "4938": 2093, "4193": 2093, "cudafloattyp": 2093, "4883": 2093, "4703": 2093, "simd": 2095, "isa": 2095, "amx": 2095, "collect_env": 2095, "avx512f": 2095, "avx512bw": 2095, "avx512_vnni": 2095, "amx_til": 2095, "amx_bf16": 2095, "amx_int8": 2095, "debut": 2096, "cachingalloc": 2096, "cudagraph_tre": 2096, "unintend": 2096, "prematur": 2096, "mark_step_begin": 2096, "my_custom_backend": 2097, "f_opt": 2097, "my_compil": [2097, 2100], "torch_dynamo_backend": 2097, "your_modul": 2097, "minifi": [2097, 2102], "aot_autograd": 2097, "fw_compil": 2097, "bw_compil": 2097, "make_boxed_func": 2097, "model_opt": 2097, "0x7f1a894649a8": 2097, "mockmodul": 2097, "optimized_mod": 2097, "toy_exampl": [2097, 2100, 2102, 2111], "abs_1": [2097, 2100], "0x7f8d259298a0": 2097, "superior": 2097, "optimize_for_inference_compil": 2097, "code_to_acceler": 2097, "lookup_backend": 2097, "trt_compil": 2097, "inductor_compil": 2097, "recognit": 2098, "induct": 2098, "mark_dynam": [2098, 2099], "shapeenv": [2098, 2101], "reusabl": 2098, "plumb": 2098, "symnodeimpl": 2098, "python_symnod": 2098, "_meta_registr": 2098, "decomp": [2098, 2101], "primtorch": [2098, 2101], "apparatu": 2098, "constrain_rang": 2098, "wherebi": 2098, "blame": 2099, "insan": 2099, "backtrac": [2099, 2101, 2111], "blindli": 2099, "arduou": 2099, "mse": 2099, "l_x_": 2099, "l_y_": 2099, "l_n_": 2099, "sequel": 2099, "_convert_frame_assert": 2099, "variabletrack": 2099, "listvari": 2099, "constantvari": [2099, 2102, 2111], "tensorvari": [2099, 2102, 2111], "variablebuild": 2099, "_wrap": 2099, "userdefinedobjectvari": 2099, "sourcebuild": 2099, "load_glob": [2099, 2100], "torchingraphfunctionvari": 2099, "instructortranslatorbas": 2099, "symbolic_convert": [2099, 2111], "instructiontranslatorbas": 2099, "build_list": 2099, "inst": 2099, "popn": 2099, "argval": 2099, "mutable_loc": 2099, "mutableloc": 2099, "instructiontransl": 2099, "wrap_fx_proxi": 2099, "overkil": 2099, "___check_type_id": 2099, "94334122025024": 2099, "9433": 2099, "getitemsourc": 2099, "94439025877664": 2099, "94439025840192": 2099, "saw": 2099, "l_a_": [2099, 2100], "l_b_": [2099, 2100], "__compiled_fn_1": 2099, "check_tensor": [2099, 2100], "maybe_mark_dynam": 2099, "mark_stat": 2099, "symnodevari": 2099, "812": 2099, "django": 2099, "rust": 2099, "choke": 2099, "doctr_det_predictor": 2099, "cv2": 2099, "postprocess": 2099, "confess": 2099, "revisit": 2099, "__compiled_fn_0": [2099, 2100], "load_fast": [2099, 2100], "store_fast": [2099, 2100], "graph_out_0": 2099, "load_const": [2099, 2100], "binary_subscr": 2099, "__resume_at_14_1": 2099, "rot_two": 2099, "resume_in_fn": 2099, "__compiled_fn_2": 2099, "unpack_sequ": [2099, 2100], "l6": 2099, "l8": 2099, "l20": 2099, "l22": 2099, "hamper": 2099, "ride": 2099, "demystifi": 2099, "literatur": 2099, "eval_fram": [2099, 2100], "lingo": 2099, "interestingli": 2099, "523": 2100, "watch": 2100, "kaichao": 2100, "_dynamo_dynamic_indic": 2100, "utils_devic": 2100, "___skip_backend_check": 2100, "___current_backend": 2100, "___lookup_backend": 2100, "140355900538256": 2100, "dispatchkeyset": 2100, "backendselect": 2100, "adinplaceorview": 2100, "autogradcpu": 2100, "recaptur": 2100, "decompil": 2100, "depyf": 2100, "eval_with_kei": 2100, "0x7f9ca082f8a0": 2100, "load_method": 2100, "binary_add": 2100, "binary_true_divid": 2100, "compare_op": 2100, "pop_jump_if_fals": 2100, "binary_multipli": 2100, "__resume_at_30_1": 2100, "__resume_at_38_2": 2100, "__temp_1": 2100, "youkaichao": 2100, "__resume_at_": 2100, "jump_absolut": 2100, "resume_at": 2100, "_debug_get_cache_entry_list": 2100, "__code__": 2100, "codetyp": 2100, "innermost_fn": 2100, "cache_entri": 2100, "check_fn": 2100, "code_part": 2100, "___guarded_cod": 2100, "___check_global_st": 2100, "140215810860528": 2100, "___check_tensor": 2100, "tensor_check_nam": 2100, "co_freevar": 2100, "__closure__": 2100, "___is_grad_en": 2100, "___are_deterministic_algorithms_en": 2100, "___is_torch_function_en": 2100, "value_a": 2100, "value_b": 2100, "__self__": 2100, "compiled_exampl": 2100, "get_cache_entri": 2100, "recompile_and_add_another_cache_entri": 2100, "trash": 2101, "subclass_zoo": 2101, "bunch": 2101, "from_real_tensor": 2101, "fakeifi": 2101, "dispatch_devic": 2101, "ly": 2101, "derefer": 2101, "in_kernel_invocation_manag": 2101, "unwrap": 2101, "test_fake_tensor": 2101, "fake_mod": 2101, "fake_x": 2101, "fake_i": 2101, "fake_z": 2101, "_guard": 2101, "detect_fake_mod": 2101, "fake_arg": 2101, "maybe_disable_fake_tensor_mod": 2101, "nich": 2101, "faketensorprop": 2101, "fake_tensor_prop": 2101, "propagate_dont_convert_input": 2101, "fake_input": 2101, "real_tensor": 2101, "annoi": 2101, "somehow": 2101, "fakecopymod": 2101, "gave": 2101, "fakeif": 2101, "tension": 2101, "analys": 2101, "metaconvert": 2101, "die": 2101, "saroufim": 2102, "evalfram": 2102, "usercod": 2102, "rob": 2102, "diminish": 2102, "vast": 2102, "250k": 2102, "aitempl": 2102, "aot_eag": [2102, 2111], "compile_tim": [2102, 2109, 2111], "torch_compile_debug": [2102, 2104], "troubl": [2102, 2103, 2111], "compileprofil": [2102, 2111], "profiler_model": [2102, 2111], "traffic": 2102, "frozen_toy_exampl": 2102, "multiprocessor": 2102, "some_fun": [2102, 2111], "insurmount": [2102, 2111], "woo": [2102, 2111], "framesummari": [2102, 2111], "generic_jump": [2102, 2111], "torch_dynamo_resume_in_toy_example_at_5": [2102, 2111], "torchdynamo_dynamic_shap": 2102, "cv": 2102, "app": 2102, "unnecessarili": 2102, "cold": [2102, 2108], "visibli": 2102, "torchdynamo_repro_level": [2102, 2111], "bisect": [2102, 2111], "torchdynamo_repro_aft": [2102, 2111], "dramat": [2102, 2111], "allevi": 2102, "wrapper_fn": 2102, "my_fn": 2102, "pitfal": 2102, "_indices_from": 2102, "recarrai": 2102, "float128": 2102, "complex256": 2102, "esoter": 2102, "ufunc": 2102, "poly1d": 2102, "__array_wrap__": 2102, "ctype": 2102, "numpy_fn": 2102, "tweak": 2102, "wrap_numpi": 2102, "charg": 2102, "oop": 2102, "costli": 2102, "daunt": 2102, "diagnos": 2102, "pinpoint": 2102, "discern": 2102, "trace_numpi": 2102, "_numpi": 2102, "uncommon": 2102, "finer": 2102, "a_fn": [2102, 2103], "aa_fn": [2102, 2103], "ab_fn": [2102, 2103], "handel": 2103, "unblock": 2103, "nnthi": 2103, "nnnote": 2103, "screen": [2103, 2109], "is_dynamo_compil": 2103, "b_fn": 2103, "white": 2103, "new_fn": 2104, "famou": 2104, "crunch": 2104, "torchinductor_": 2104, "your_usernam": 2104, "triton_meta": 2104, "i32": 2104, "mutated_arg_nam": 2104, "instance_descriptor": 2104, "divisible_by_16": 2104, "equal_to_1": 2104, "triton_": [2104, 2109], "in_ptr0": 2104, "out_ptr0": 2104, "xnumel": 2104, "xblock": 2104, "tl": 2104, "constexpr": 2104, "xoffset": 2104, "program_id": 2104, "xindex": 2104, "xmask": 2104, "tmp0": 2104, "tmp1": 2104, "tmp2": 2104, "v0": 2104, "opt_model": 2104, "timm": [2104, 2108], "berttoken": 2104, "bertmodel": 2104, "uncas": 2104, "me": 2104, "encoded_input": 2104, "return_tensor": 2104, "trigonometri": 2104, "skim": 2104, "create_model": 2104, "resnext101_32x8d": 2104, "torchinductor_unique_kernel_nam": 2105, "triton_poi_fused_cat_155": 2105, "poi": 2105, "torchinductor_benchmark_kernel": 2105, "har": 2105, "torchinductor_max_autotun": 2105, "mixnet_l": 2105, "timm_model": 2105, "torchinductor_shunt": 2105, "qz": 2105, "cqz7hvhood7y3psp7fy6msjxsxyli7qiwiybizdwtjw6ffyq5wwd": 2105, "shunting314": 2105, "c2a4d8a28b00fcb5586d0e9d9bf77f9f": 2105, "48efc83b12ec3ead950052e4a0220b10": 2105, "compiled_module_profil": 2105, "browser": [2105, 2109], "zoom": [2105, 2109, 2113], "distort": [2105, 2109], "densenet121": 2105, "69": 2105, "cutlass": 2105, "57": 2105, "ff": 2105, "justifi": 2105, "triton_red_fus": 2105, "__native_batch_norm_legit_functional_16": 2105, "cjk2vm3446xrk7rth7hr6pun7xxo3dnzubwcn6ydrpifal4eykrz": 2105, "_adaptive_avg_pool2d_backward": 2106, "half_to_float": 2106, "no_stat": 2106, "start_step": 2106, "avg_pool2d_backward": 2106, "convolution_backward": 2106, "bias_siz": 2106, "output_mask": 2106, "scalar_mod": 2106, "tensor_mod": 2106, "embedding_dense_backward": 2106, "num_weight": 2106, "max_pool2d_with_indices_backward": 2106, "native_group_norm_backward": 2106, "rstd": 2106, "native_layer_norm_backward": 2106, "tensor_scalar": 2106, "tensor_tensor": 2106, "dim_int": 2106, "dim_intlist": 2106, "broadcast_dimens": 2106, "start_indic": 2106, "limit_indic": 2106, "start_index": 2106, "limit_index": 2106, "outer_length": 2106, "constabl": 2107, "_forward_pre_hook": 2107, "_backward_pre_hook": 2107, "_backward_hook": 2107, "_state_dict_hook": 2107, "load_": 2107, "avoiabl": 2107, "skip_nnmodule_hook_guard": 2107, "pre_backward": 2107, "warn_onc": 2107, "hui": 2108, "nightli": 2108, "night": 2108, "40gb": [2108, 2111], "2ghz": 2108, "torchbench": 2108, "trend": 2108, "droplist": 2108, "with_cudagraph": 2108, "toosl": 2109, "ncu": 2109, "model_c": 2109, "fwd_bwd": 2109, "scroll": 2109, "shortcut": 2109, "compiledfunctionbackward": 2109, "ac2g": 2109, "dropdown": 2109, "525": 2109, "_init_for_cuda_graph": 2109, "warmup_compil": 2109, "fn_c": 2109, "trace_compil": 2109, "meanwhil": 2109, "clue": 2109, "synthet": 2109, "modelwithbreak": 2109, "create_sequenti": 2109, "mod1": 2109, "mod2": 2109, "mod3": 2109, "mod4": 2109, "trace_break": 2109, "culaunchkernel": 2109, "cudalaunchkernel": 2109, "unique_kernel_nam": 2109, "sit": 2110, "replace_add_with_mul": 2110, "insert_relu_after_add": 2110, "new_relu_nod": 2110, "replaceaddwithmul": 2110, "transformed_graph_modul": 2110, "replaceaddwithmulsub": 2110, "mul_r": 2110, "removedetachpass": 2110, "args_map": 2110, "_schema": 2110, "kwarg_onli": 2110, "scalartotensorpass": 2110, "try_coerc": 2110, "replace_pattern": 2110, "replaced_pattern": 2110, "replace_pattern_with_filt": 2110, "replacedpattern": 2110, "passmanag": 2110, "pass_manag": 2110, "pm": 2110, "replace_add_with_div": 2110, "replace_div_with_mul": 2110, "run_checks_after_each_pass": 2110, "suppress_check_failur": 2110, "graph_module_out": 2110, "set_check": 2110, "check_div_target": 2110, "add_check": 2110, "subgraphmatch": 2110, "matcher_util": 2110, "match_output": 2110, "match_placehold": 2110, "remove_overlapping_match": 2110, "ignore_liter": 2110, "largemodel": 2110, "_bia": 2110, "large_model_graph": 2110, "patternmodel": 2110, "_weight_1": 2110, "_bias_1": 2110, "pattern_graph": 2110, "subgraph_match": 2110, "match_result": 2110, "internalmatch": 2110, "placeholder_nod": 2110, "returning_nod": 2110, "capabilitybasedpartition": 2110, "l34": 2110, "operator_support": 2110, "operatorsupportbas": 2110, "allows_single_node_partit": 2110, "non_compute_op": 2110, "_oper": 2110, "allowed_single_node_partition_op": 2110, "ll28c1": 2110, "l28c1": 2110, "is_node_support": 2110, "l150": 2110, "any_chain": 2110, "l164": 2110, "addmuloperatorsupport": 2110, "capability_partition": 2110, "op_support": 2110, "partition_list": 2110, "propose_partit": 2110, "fused_graph_modul": 2110, "fuse_partit": 2110, "lazo": 2111, "torchdynamo_verbos": 2111, "replay_record_en": 2111, "torchdynamo_debug_funct": 2111, "test_assertion_error": 2111, "compiled_test_assertion_error": 2111, "convert_fram": 2111, "mlazo": 2111, "837": 2111, "build_map": 2111, "log_level": 2111, "thousand": 2111, "test_backend_error": 2111, "compiled_test_backend_error": 2111, "decomp_fn": 2111, "810": 2111, "repro_aft": 2111, "minifier_launch": 2111, "base_dir": 2111, "rand_strid": 2111, "0a0": 2111, "gitfddfc44": 2111, "fddfc4488afb207971c54ad4bf58130fdc8a4dc5": 2111, "2022": 2111, "thu_feb_10_18": 2111, "41_pst_2022": 2111, "v11": 2111, "cuda_11": 2111, "r11": 2111, "30978841_0": 2111, "sxm4": 2111, "compile_fx": 2111, "compile_fx_inn": 2111, "toy_compil": 2111, "debug_util": 2111, "run_fwd_maybe_bwd": 2111, "opt_mod": 2111, "rg": 2111, "test_model": 2111, "debug_dir_root": 2111, "torch_compile_debug_dir": 2111, "run_2023_03_01_08_20_52_143510": 2111, "pid_180167": 2111, "model__0_forward_1": 2111, "aot_model___0_debug": 2111, "fx_graph_read": 2111, "fx_graph_runn": 2111, "fx_graph_transform": 2111, "ir_post_fus": 2111, "ir_pre_fus": 2111, "fx_graph": 2111, "buf1": 2111, "schedulernod": 2111, "computedbuff": 2111, "memorydep": 2111, "unmet_depend": 2111, "buf0": 2111, "met_depend": 2111, "primals_2": 2111, "buf1_loop_bodi": 2111, "var_rang": 2111, "z0": 2111, "index0": 2111, "index1": 2111, "get_index": 2111, "get_index_1": 2111, "load_1": 2111, "get_index_2": 2111, "compiled_fun": 2111, "hinder": 2111, "explanation_verbos": 2111, "out_guard": 2111, "ops_per_graph": 2111, "compiled_toi": 2111, "torchdynamo_extended_debug_guard_ad": 2111, "torchdynamo_extended_debug_create_symbol": 2111, "torchdynamo_extended_debug_cpp": 2111, "torchinductor_force_disable_cach": 2111, "force_disable_cach": 2111, "as_subclass": 2112, "handle_torch_funct": 2112, "public_api": 2112, "relevant_arg": 2112, "has_torch_function_unari": 2112, "is_tensor_lik": 2112, "notatensor": 2112, "tensorlik": 2112, "is_tensor_method_or_properti": 2112, "__get__": 2112, "__module__": 2112, "wrap_torch_funct": 2112, "drag": 2113, "interactiv": 2113, "memory_viz": 2113, "run_your_cod": 2113, "my_snapshot": 2113, "javascript": 2113, "upload": 2113, "pan": 2113, "mous": 2113, "slider": 2113, "b7f064c000000_0": 2113, "7f064c000000": 2113, "max_entri": 2113, "_memory_viz": 2113, "2u": 2113, "50n": 2113, "currenli": 2113, "typeddict": 2113, "device_trac": 2113, "traceentri": 2113, "total_s": 2113, "segment_typ": 2113, "allocated_s": 2113, "active_s": 2113, "active_awaiting_fre": 2113, "requested_s": 2113, "active_alloc": 2113, "took": 2113, "free_request": 2113, "free_complet": 2113, "segment_alloc": 2113, "segment_fre": 2113, "coorel": 2113, "device_fre": 2113, "dump_snapshot": 2113, "interplai": 2114, "torch_nccl_high_prior": 2115, "torch_nccl_dump_on_timeout": 2115, "torch_nccl_trace_buffer_s": 2115, "torch_nccl_desync_debug": 2115, "desync": 2115, "culprit": 2115, "torch_nccl_enable_tim": 2115, "torch_nccl_enable_monitor": 2115, "torch_nccl_heartbeat_timeout_sec": 2115, "prolong": 2115, "flight": 2115, "ring": 2115, "tracebuff": 2115, "torch_nccl_wait_timeout_dump_milsec": 2115, "torch_nccl_debug_info_temp_fil": 2115, "torch_nccl_debug_info_pipe_fil": 2115, "torch_nccl_nan_check": 2115, "smallest_norm": 2116, "subnorm": 2116, "denormal_numb": 2116, "tailor": 2118}, "objects": {"": [[2089, 0, 0, "-", "torch"], [2013, 7, 1, "-", "PYTORCH_JIT"]], "torch": [[2082, 1, 1, "", "BFloat16Storage"], [2082, 1, 1, "", "BoolStorage"], [2082, 1, 1, "", "ByteStorage"], [2082, 1, 1, "", "CharStorage"], [2082, 1, 1, "", "ComplexDoubleStorage"], [2082, 1, 1, "", "ComplexFloatStorage"], [2082, 1, 1, "", "DoubleStorage"], [2082, 1, 1, "", "FloatStorage"], [90, 1, 1, "", "Generator"], [2082, 1, 1, "", "HalfStorage"], [2082, 1, 1, "", "IntStorage"], [2082, 1, 1, "", "LongStorage"], [2082, 1, 1, "", "QInt32Storage"], [2082, 1, 1, "", "QInt8Storage"], [2082, 1, 1, "", "QUInt2x4Storage"], [2082, 1, 1, "", "QUInt4x2Storage"], [2082, 1, 1, "", "QUInt8Storage"], [2082, 1, 1, "", "ShortStorage"], [2079, 1, 1, "", "Size"], [2089, 1, 1, "", "SymBool"], [2089, 1, 1, "", "SymFloat"], [2089, 1, 1, "", "SymInt"], [2089, 1, 1, "", "Tag"], [2086, 1, 1, "", "Tensor"], [2082, 1, 1, "", "TypedStorage"], [2082, 1, 1, "", "UntypedStorage"], [13, 0, 0, "-", "__config__"], [62, 0, 0, "-", "__future__"], [626, 5, 1, "", "_assert"], [627, 5, 1, "", "_foreach_abs"], [628, 5, 1, "", "_foreach_abs_"], [629, 5, 1, "", "_foreach_acos"], [630, 5, 1, "", "_foreach_acos_"], [631, 5, 1, "", "_foreach_asin"], [632, 5, 1, "", "_foreach_asin_"], [633, 5, 1, "", "_foreach_atan"], [634, 5, 1, "", "_foreach_atan_"], [635, 5, 1, "", "_foreach_ceil"], [636, 5, 1, "", "_foreach_ceil_"], [637, 5, 1, "", "_foreach_cos"], [638, 5, 1, "", "_foreach_cos_"], [639, 5, 1, "", "_foreach_cosh"], [640, 5, 1, "", "_foreach_cosh_"], [641, 5, 1, "", "_foreach_erf"], [642, 5, 1, "", "_foreach_erf_"], [643, 5, 1, "", "_foreach_erfc"], [644, 5, 1, "", "_foreach_erfc_"], [645, 5, 1, "", "_foreach_exp"], [646, 5, 1, "", "_foreach_exp_"], [647, 5, 1, "", "_foreach_expm1"], [648, 5, 1, "", "_foreach_expm1_"], [649, 5, 1, "", "_foreach_floor"], [650, 5, 1, "", "_foreach_floor_"], [651, 5, 1, "", "_foreach_frac"], [652, 5, 1, "", "_foreach_frac_"], [653, 5, 1, "", "_foreach_lgamma"], [654, 5, 1, "", "_foreach_lgamma_"], [655, 5, 1, "", "_foreach_log"], [656, 5, 1, "", "_foreach_log10"], [657, 5, 1, "", "_foreach_log10_"], [658, 5, 1, "", "_foreach_log1p"], [659, 5, 1, "", "_foreach_log1p_"], [660, 5, 1, "", "_foreach_log2"], [661, 5, 1, "", "_foreach_log2_"], [662, 5, 1, "", "_foreach_log_"], [663, 5, 1, "", "_foreach_neg"], [664, 5, 1, "", "_foreach_neg_"], [665, 5, 1, "", "_foreach_reciprocal"], [666, 5, 1, "", "_foreach_reciprocal_"], [667, 5, 1, "", "_foreach_round"], [668, 5, 1, "", "_foreach_round_"], [669, 5, 1, "", "_foreach_sigmoid"], [670, 5, 1, "", "_foreach_sigmoid_"], [671, 5, 1, "", "_foreach_sin"], [672, 5, 1, "", "_foreach_sin_"], [673, 5, 1, "", "_foreach_sinh"], [674, 5, 1, "", "_foreach_sinh_"], [675, 5, 1, "", "_foreach_sqrt"], [676, 5, 1, "", "_foreach_sqrt_"], [677, 5, 1, "", "_foreach_tan"], [678, 5, 1, "", "_foreach_tan_"], [679, 5, 1, "", "_foreach_trunc"], [680, 5, 1, "", "_foreach_trunc_"], [681, 5, 1, "", "_foreach_zero_"], [2022, 0, 0, "-", "_logging"], [683, 5, 1, "", "abs"], [684, 5, 1, "", "absolute"], [685, 5, 1, "", "acos"], [686, 5, 1, "", "acosh"], [687, 5, 1, "", "add"], [688, 5, 1, "", "addbmm"], [689, 5, 1, "", "addcdiv"], [690, 5, 1, "", "addcmul"], [691, 5, 1, "", "addmm"], [692, 5, 1, "", "addmv"], [693, 5, 1, "", "addr"], [694, 5, 1, "", "adjoint"], [695, 5, 1, "", "all"], [696, 5, 1, "", "allclose"], [697, 5, 1, "", "amax"], [698, 5, 1, "", "amin"], [699, 5, 1, "", "aminmax"], [0, 0, 0, "-", "amp"], [700, 5, 1, "", "angle"], [701, 5, 1, "", "any"], [2070, 0, 0, "-", "ao"], [868, 5, 1, "", "arange"], [869, 5, 1, "", "arccos"], [870, 5, 1, "", "arccosh"], [871, 5, 1, "", "arcsin"], [872, 5, 1, "", "arcsinh"], [873, 5, 1, "", "arctan"], [874, 5, 1, "", "arctan2"], [875, 5, 1, "", "arctanh"], [876, 5, 1, "", "are_deterministic_algorithms_enabled"], [877, 5, 1, "", "argmax"], [878, 5, 1, "", "argmin"], [879, 5, 1, "", "argsort"], [880, 5, 1, "", "argwhere"], [881, 5, 1, "", "as_strided"], [882, 5, 1, "", "as_tensor"], [883, 5, 1, "", "asarray"], [884, 5, 1, "", "asin"], [885, 5, 1, "", "asinh"], [886, 5, 1, "", "atan"], [887, 5, 1, "", "atan2"], [888, 5, 1, "", "atanh"], [889, 5, 1, "", "atleast_1d"], [890, 5, 1, "", "atleast_2d"], [891, 5, 1, "", "atleast_3d"], [0, 1, 1, "", "autocast"], [1, 0, 0, "-", "autograd"], [2, 0, 0, "-", "backends"], [943, 5, 1, "", "baddbmm"], [944, 5, 1, "", "bartlett_window"], [945, 5, 1, "", "bernoulli"], [946, 5, 1, "", "bincount"], [947, 5, 1, "", "bitwise_and"], [948, 5, 1, "", "bitwise_left_shift"], [949, 5, 1, "", "bitwise_not"], [950, 5, 1, "", "bitwise_or"], [951, 5, 1, "", "bitwise_right_shift"], [952, 5, 1, "", "bitwise_xor"], [953, 5, 1, "", "blackman_window"], [954, 5, 1, "", "block_diag"], [955, 5, 1, "", "bmm"], [956, 5, 1, "", "broadcast_shapes"], [957, 5, 1, "", "broadcast_tensors"], [958, 5, 1, "", "broadcast_to"], [959, 5, 1, "", "bucketize"], [960, 5, 1, "", "can_cast"], [961, 5, 1, "", "cartesian_prod"], [962, 5, 1, "", "cat"], [963, 5, 1, "", "cdist"], [964, 5, 1, "", "ceil"], [965, 5, 1, "", "chain_matmul"], [966, 5, 1, "", "cholesky"], [967, 5, 1, "", "cholesky_inverse"], [968, 5, 1, "", "cholesky_solve"], [969, 5, 1, "", "chunk"], [970, 5, 1, "", "clamp"], [971, 5, 1, "", "clip"], [972, 5, 1, "", "clone"], [973, 5, 1, "", "column_stack"], [974, 5, 1, "", "combinations"], [975, 5, 1, "", "compile"], [976, 5, 1, "", "compiled_with_cxx11_abi"], [2094, 0, 0, "-", "compiler"], [986, 5, 1, "", "complex"], [987, 5, 1, "", "concat"], [988, 5, 1, "", "concatenate"], [989, 5, 1, "", "cond"], [990, 5, 1, "", "conj"], [991, 5, 1, "", "conj_physical"], [2089, 0, 0, "-", "contrib"], [992, 5, 1, "", "copysign"], [993, 5, 1, "", "corrcoef"], [994, 5, 1, "", "cos"], [995, 5, 1, "", "cosh"], [996, 5, 1, "", "count_nonzero"], [997, 5, 1, "", "cov"], [16, 0, 0, "-", "cpu"], [1007, 5, 1, "", "cross"], [17, 0, 0, "-", "cuda"], [1087, 5, 1, "", "cummax"], [1088, 5, 1, "", "cummin"], [1089, 5, 1, "", "cumprod"], [1090, 5, 1, "", "cumsum"], [1091, 5, 1, "", "cumulative_trapezoid"], [1092, 5, 1, "", "deg2rad"], [1093, 5, 1, "", "dequantize"], [1094, 5, 1, "", "det"], [2083, 1, 1, "", "device"], [1095, 5, 1, "", "diag"], [1096, 5, 1, "", "diag_embed"], [1097, 5, 1, "", "diagflat"], [1098, 5, 1, "", "diagonal"], [1099, 5, 1, "", "diagonal_scatter"], [1100, 5, 1, "", "diff"], [1101, 5, 1, "", "digamma"], [1102, 5, 1, "", "dist"], [28, 0, 0, "-", "distributed"], [35, 0, 0, "-", "distributions"], [1103, 5, 1, "", "div"], [1104, 5, 1, "", "divide"], [1105, 5, 1, "", "dot"], [1106, 5, 1, "", "dsplit"], [1107, 5, 1, "", "dstack"], [2083, 1, 1, "", "dtype"], [1108, 5, 1, "", "einsum"], [1109, 5, 1, "", "empty"], [1110, 5, 1, "", "empty_like"], [1111, 5, 1, "", "empty_strided"], [1112, 1, 1, "", "enable_grad"], [1113, 5, 1, "", "eq"], [1114, 5, 1, "", "equal"], [1115, 5, 1, "", "erf"], [1116, 5, 1, "", "erfc"], [1117, 5, 1, "", "erfinv"], [1118, 5, 1, "", "exp"], [1119, 5, 1, "", "exp2"], [1120, 5, 1, "", "expm1"], [52, 0, 0, "-", "export"], [1121, 5, 1, "", "eye"], [1122, 5, 1, "", "fake_quantize_per_channel_affine"], [1123, 5, 1, "", "fake_quantize_per_tensor_affine"], [54, 0, 0, "-", "fft"], [1146, 5, 1, "", "fix"], [1147, 5, 1, "", "flatten"], [1148, 5, 1, "", "flip"], [1149, 5, 1, "", "fliplr"], [1150, 5, 1, "", "flipud"], [1151, 5, 1, "", "float_power"], [1152, 5, 1, "", "floor"], [1153, 5, 1, "", "floor_divide"], [1154, 5, 1, "", "fmax"], [1155, 5, 1, "", "fmin"], [1156, 5, 1, "", "fmod"], [1157, 5, 1, "", "frac"], [1158, 5, 1, "", "frexp"], [1159, 5, 1, "", "from_dlpack"], [1160, 5, 1, "", "from_file"], [1161, 5, 1, "", "from_numpy"], [1162, 5, 1, "", "frombuffer"], [1163, 5, 1, "", "full"], [1164, 5, 1, "", "full_like"], [57, 0, 0, "-", "func"], [2089, 0, 0, "-", "functional"], [63, 0, 0, "-", "futures"], [64, 0, 0, "-", "fx"], [1213, 5, 1, "", "gather"], [1214, 5, 1, "", "gcd"], [1215, 5, 1, "", "ge"], [1216, 5, 1, "", "geqrf"], [1217, 5, 1, "", "ger"], [1218, 5, 1, "", "get_default_device"], [1219, 5, 1, "", "get_default_dtype"], [1220, 5, 1, "", "get_deterministic_debug_mode"], [1221, 5, 1, "", "get_device_module"], [1222, 5, 1, "", "get_float32_matmul_precision"], [1223, 5, 1, "", "get_num_interop_threads"], [1224, 5, 1, "", "get_num_threads"], [1225, 5, 1, "", "get_rng_state"], [1226, 5, 1, "", "gradient"], [1227, 5, 1, "", "greater"], [1228, 5, 1, "", "greater_equal"], [1229, 5, 1, "", "gt"], [1230, 5, 1, "", "hamming_window"], [1231, 5, 1, "", "hann_window"], [1232, 5, 1, "", "heaviside"], [1233, 5, 1, "", "histc"], [1234, 5, 1, "", "histogram"], [1235, 5, 1, "", "histogramdd"], [1236, 5, 1, "", "hsplit"], [1237, 5, 1, "", "hspmm"], [1238, 5, 1, "", "hstack"], [2011, 0, 0, "-", "hub"], [1239, 5, 1, "", "hypot"], [1240, 5, 1, "", "i0"], [1241, 5, 1, "", "igamma"], [1242, 5, 1, "", "igammac"], [1243, 5, 1, "", "imag"], [1244, 5, 1, "", "index_add"], [1245, 5, 1, "", "index_copy"], [1246, 5, 1, "", "index_reduce"], [1247, 5, 1, "", "index_select"], [1248, 5, 1, "", "initial_seed"], [1249, 5, 1, "", "inner"], [1250, 5, 1, "", "inverse"], [1251, 5, 1, "", "is_complex"], [1252, 5, 1, "", "is_conj"], [1253, 5, 1, "", "is_deterministic_algorithms_warn_only_enabled"], [1254, 5, 1, "", "is_floating_point"], [1255, 5, 1, "", "is_grad_enabled"], [1256, 5, 1, "", "is_inference_mode_enabled"], [1257, 5, 1, "", "is_nonzero"], [1258, 5, 1, "", "is_storage"], [1259, 5, 1, "", "is_tensor"], [1260, 5, 1, "", "is_warn_always_enabled"], [1261, 5, 1, "", "isclose"], [1262, 5, 1, "", "isfinite"], [1263, 5, 1, "", "isin"], [1264, 5, 1, "", "isinf"], [1265, 5, 1, "", "isnan"], [1266, 5, 1, "", "isneginf"], [1267, 5, 1, "", "isposinf"], [1268, 5, 1, "", "isreal"], [1269, 5, 1, "", "istft"], [2013, 0, 0, "-", "jit"], [1292, 5, 1, "", "kaiser_window"], [1293, 5, 1, "", "kron"], [1294, 5, 1, "", "kthvalue"], [2083, 1, 1, "", "layout"], [1295, 5, 1, "", "lcm"], [1296, 5, 1, "", "ldexp"], [1297, 5, 1, "", "le"], [1298, 5, 1, "", "lerp"], [1299, 5, 1, "", "less"], [1300, 5, 1, "", "less_equal"], [1301, 5, 1, "", "lgamma"], [2020, 0, 0, "-", "library"], [2021, 0, 0, "-", "linalg"], [1343, 5, 1, "", "linspace"], [1344, 5, 1, "", "load"], [1345, 5, 1, "", "lobpcg"], [1346, 5, 1, "", "log"], [1347, 5, 1, "", "log10"], [1348, 5, 1, "", "log1p"], [1349, 5, 1, "", "log2"], [1350, 5, 1, "", "logaddexp"], [1351, 5, 1, "", "logaddexp2"], [1352, 5, 1, "", "logcumsumexp"], [1353, 5, 1, "", "logdet"], [1354, 5, 1, "", "logical_and"], [1355, 5, 1, "", "logical_not"], [1356, 5, 1, "", "logical_or"], [1357, 5, 1, "", "logical_xor"], [1358, 5, 1, "", "logit"], [1359, 5, 1, "", "logspace"], [1360, 5, 1, "", "logsumexp"], [1361, 5, 1, "", "lt"], [1362, 5, 1, "", "lu"], [1363, 5, 1, "", "lu_solve"], [1364, 5, 1, "", "lu_unpack"], [1365, 5, 1, "", "manual_seed"], [2023, 0, 0, "-", "masked"], [1366, 5, 1, "", "masked_select"], [1367, 5, 1, "", "matmul"], [1368, 5, 1, "", "matrix_exp"], [1369, 5, 1, "", "matrix_power"], [1370, 5, 1, "", "max"], [1371, 5, 1, "", "maximum"], [1372, 5, 1, "", "mean"], [1373, 5, 1, "", "median"], [2083, 1, 1, "", "memory_format"], [1374, 5, 1, "", "meshgrid"], [1375, 5, 1, "", "min"], [1376, 5, 1, "", "minimum"], [1377, 5, 1, "", "mm"], [1378, 5, 1, "", "mode"], [2029, 0, 0, "-", "monitor"], [1379, 5, 1, "", "moveaxis"], [1380, 5, 1, "", "movedim"], [2030, 0, 0, "-", "mps"], [1395, 5, 1, "", "msort"], [2031, 0, 0, "-", "mtia"], [1411, 5, 1, "", "mul"], [1412, 5, 1, "", "multinomial"], [1413, 5, 1, "", "multiply"], [2032, 0, 0, "-", "multiprocessing"], [1414, 5, 1, "", "mv"], [1415, 5, 1, "", "mvlgamma"], [1416, 5, 1, "", "nan_to_num"], [1417, 5, 1, "", "nanmean"], [1418, 5, 1, "", "nanmedian"], [1419, 5, 1, "", "nanquantile"], [1420, 5, 1, "", "nansum"], [1421, 5, 1, "", "narrow"], [1422, 5, 1, "", "narrow_copy"], [1423, 5, 1, "", "ne"], [1424, 5, 1, "", "neg"], [1425, 5, 1, "", "negative"], [2035, 0, 0, "-", "nested"], [1426, 5, 1, "", "nextafter"], [2036, 0, 0, "-", "nn"], [1769, 1, 1, "", "no_grad"], [1770, 5, 1, "", "nonzero"], [1771, 5, 1, "", "norm"], [1772, 5, 1, "", "normal"], [1773, 5, 1, "", "not_equal"], [1774, 5, 1, "", "numel"], [1775, 5, 1, "", "ones"], [1776, 5, 1, "", "ones_like"], [2065, 0, 0, "-", "onnx"], [2067, 0, 0, "-", "optim"], [1813, 5, 1, "", "orgqr"], [1814, 5, 1, "", "ormqr"], [1815, 5, 1, "", "outer"], [2112, 0, 0, "-", "overrides"], [2068, 0, 0, "-", "package"], [1816, 5, 1, "", "pca_lowrank"], [1817, 5, 1, "", "permute"], [1818, 5, 1, "", "pinverse"], [1819, 5, 1, "", "poisson"], [1820, 5, 1, "", "polar"], [1821, 5, 1, "", "polygamma"], [1822, 5, 1, "", "positive"], [1823, 5, 1, "", "pow"], [1824, 5, 1, "", "prod"], [2069, 0, 0, "-", "profiler"], [1825, 5, 1, "", "promote_types"], [1826, 5, 1, "", "qr"], [1827, 5, 1, "", "quantile"], [2073, 0, 0, "-", "quantization"], [1828, 5, 1, "", "quantize_per_channel"], [1829, 5, 1, "", "quantize_per_tensor"], [1830, 5, 1, "", "quantized_batch_norm"], [1831, 5, 1, "", "quantized_max_pool1d"], [1832, 5, 1, "", "quantized_max_pool2d"], [2089, 0, 0, "-", "quasirandom"], [1834, 5, 1, "", "rad2deg"], [1835, 5, 1, "", "rand"], [1836, 5, 1, "", "rand_like"], [1837, 5, 1, "", "randint"], [1838, 5, 1, "", "randint_like"], [1839, 5, 1, "", "randn"], [1840, 5, 1, "", "randn_like"], [2074, 0, 0, "-", "random"], [1841, 5, 1, "", "randperm"], [1842, 5, 1, "", "range"], [1843, 5, 1, "", "ravel"], [1844, 5, 1, "", "real"], [1845, 5, 1, "", "reciprocal"], [1846, 5, 1, "", "remainder"], [1847, 5, 1, "", "renorm"], [1848, 5, 1, "", "repeat_interleave"], [1849, 5, 1, "", "reshape"], [1850, 5, 1, "", "resolve_conj"], [1851, 5, 1, "", "resolve_neg"], [1852, 5, 1, "", "result_type"], [2089, 0, 0, "-", "return_types"], [1853, 5, 1, "", "roll"], [1854, 5, 1, "", "rot90"], [1855, 5, 1, "", "round"], [1856, 5, 1, "", "row_stack"], [1857, 5, 1, "", "rsqrt"], [1858, 5, 1, "", "save"], [1859, 5, 1, "", "scatter"], [1860, 5, 1, "", "scatter_add"], [1861, 5, 1, "", "scatter_reduce"], [1862, 5, 1, "", "searchsorted"], [1863, 5, 1, "", "seed"], [1864, 5, 1, "", "select"], [1865, 5, 1, "", "select_scatter"], [2089, 0, 0, "-", "serialization"], [1866, 5, 1, "", "set_default_device"], [1867, 5, 1, "", "set_default_dtype"], [1868, 5, 1, "", "set_default_tensor_type"], [1869, 5, 1, "", "set_deterministic_debug_mode"], [1870, 5, 1, "", "set_float32_matmul_precision"], [1871, 5, 1, "", "set_flush_denormal"], [1872, 5, 1, "", "set_num_interop_threads"], [1873, 5, 1, "", "set_num_threads"], [1874, 5, 1, "", "set_printoptions"], [1875, 5, 1, "", "set_rng_state"], [1876, 5, 1, "", "set_warn_always"], [1877, 5, 1, "", "sgn"], [1878, 5, 1, "", "sigmoid"], [1879, 5, 1, "", "sign"], [2078, 0, 0, "-", "signal"], [1891, 5, 1, "", "signbit"], [1892, 5, 1, "", "sin"], [1893, 5, 1, "", "sinc"], [1894, 5, 1, "", "sinh"], [1895, 5, 1, "", "slice_scatter"], [1896, 5, 1, "", "slogdet"], [1897, 5, 1, "", "smm"], [1898, 5, 1, "", "softmax"], [1899, 5, 1, "", "sort"], [2080, 0, 0, "-", "sparse"], [1909, 5, 1, "", "sparse_bsc_tensor"], [1910, 5, 1, "", "sparse_bsr_tensor"], [1911, 5, 1, "", "sparse_compressed_tensor"], [1912, 5, 1, "", "sparse_coo_tensor"], [1913, 5, 1, "", "sparse_csc_tensor"], [1914, 5, 1, "", "sparse_csr_tensor"], [2081, 0, 0, "-", "special"], [1915, 5, 1, "", "split"], [1916, 5, 1, "", "sqrt"], [1917, 5, 1, "", "square"], [1918, 5, 1, "", "squeeze"], [1919, 5, 1, "", "sspaddmm"], [1920, 5, 1, "", "stack"], [1921, 5, 1, "", "std"], [1922, 5, 1, "", "std_mean"], [1923, 5, 1, "", "stft"], [2089, 0, 0, "-", "storage"], [1924, 5, 1, "", "sub"], [1925, 5, 1, "", "subtract"], [1926, 5, 1, "", "sum"], [1927, 5, 1, "", "svd"], [1928, 5, 1, "", "svd_lowrank"], [1929, 5, 1, "", "swapaxes"], [1930, 5, 1, "", "swapdims"], [1931, 5, 1, "", "sym_float"], [1932, 5, 1, "", "sym_int"], [1933, 5, 1, "", "sym_ite"], [1934, 5, 1, "", "sym_max"], [1935, 5, 1, "", "sym_min"], [1936, 5, 1, "", "sym_not"], [1937, 5, 1, "", "t"], [1938, 5, 1, "", "take"], [1939, 5, 1, "", "take_along_dim"], [1940, 5, 1, "", "tan"], [1941, 5, 1, "", "tanh"], [1942, 5, 1, "", "tensor"], [1943, 5, 1, "", "tensor_split"], [1944, 5, 1, "", "tensordot"], [2087, 0, 0, "-", "testing"], [1945, 5, 1, "", "tile"], [1946, 5, 1, "", "topk"], [2089, 0, 0, "-", "torch_version"], [1947, 5, 1, "", "trace"], [1948, 5, 1, "", "transpose"], [1949, 5, 1, "", "trapezoid"], [1950, 5, 1, "", "trapz"], [1951, 5, 1, "", "triangular_solve"], [1952, 5, 1, "", "tril"], [1953, 5, 1, "", "tril_indices"], [1954, 5, 1, "", "triu"], [1955, 5, 1, "", "triu_indices"], [1956, 5, 1, "", "true_divide"], [1957, 5, 1, "", "trunc"], [2089, 0, 0, "-", "types"], [1958, 5, 1, "", "unbind"], [1959, 5, 1, "", "unflatten"], [1960, 5, 1, "", "unique"], [1961, 5, 1, "", "unique_consecutive"], [1962, 5, 1, "", "unravel_index"], [1963, 5, 1, "", "unsqueeze"], [1964, 5, 1, "", "use_deterministic_algorithms"], [2117, 0, 0, "-", "utils"], [1970, 5, 1, "", "vander"], [1971, 5, 1, "", "var"], [1972, 5, 1, "", "var_mean"], [1973, 5, 1, "", "vdot"], [2089, 0, 0, "-", "version"], [1974, 5, 1, "", "view_as_complex"], [1975, 5, 1, "", "view_as_real"], [1976, 5, 1, "", "vmap"], [1977, 5, 1, "", "vsplit"], [1978, 5, 1, "", "vstack"], [1979, 5, 1, "", "where"], [1980, 5, 1, "", "xlogy"], [2118, 0, 0, "-", "xpu"], [2009, 5, 1, "", "zeros"], [2010, 5, 1, "", "zeros_like"]], "torch.BFloat16Storage": [[2082, 2, 1, "", "dtype"]], "torch.BoolStorage": [[2082, 2, 1, "", "dtype"]], "torch.ByteStorage": [[2082, 2, 1, "", "dtype"]], "torch.CharStorage": [[2082, 2, 1, "", "dtype"]], "torch.ComplexDoubleStorage": [[2082, 2, 1, "", "dtype"]], "torch.ComplexFloatStorage": [[2082, 2, 1, "", "dtype"]], "torch.DoubleStorage": [[2082, 2, 1, "", "dtype"]], "torch.FloatStorage": [[2082, 2, 1, "", "dtype"]], "torch.Generator": [[90, 3, 1, "", "clone_state"], [90, 2, 1, "", "device"], [90, 3, 1, "", "get_state"], [90, 3, 1, "", "graphsafe_get_state"], [90, 3, 1, "", "graphsafe_set_state"], [90, 3, 1, "", "initial_seed"], [90, 3, 1, "", "manual_seed"], [90, 3, 1, "", "seed"], [90, 3, 1, "", "set_state"]], "torch.HalfStorage": [[2082, 2, 1, "", "dtype"]], "torch.IntStorage": [[2082, 2, 1, "", "dtype"]], "torch.LongStorage": [[2082, 2, 1, "", "dtype"]], "torch.QInt32Storage": [[2082, 2, 1, "", "dtype"]], "torch.QInt8Storage": [[2082, 2, 1, "", "dtype"]], "torch.QUInt2x4Storage": [[2082, 2, 1, "", "dtype"]], "torch.QUInt4x2Storage": [[2082, 2, 1, "", "dtype"]], "torch.QUInt8Storage": [[2082, 2, 1, "", "dtype"]], "torch.ShortStorage": [[2082, 2, 1, "", "dtype"]], "torch.Size": [[2079, 3, 1, "", "count"], [2079, 3, 1, "", "index"], [2079, 3, 1, "", "numel"]], "torch.SymFloat": [[2089, 3, 1, "", "is_integer"]], "torch.Tag": [[2089, 4, 1, "", "name"]], "torch.Tensor": [[2086, 2, 1, "", "H"], [2086, 2, 1, "", "T"], [2086, 3, 1, "", "__init__"], [91, 3, 1, "", "abs"], [92, 3, 1, "", "abs_"], [93, 3, 1, "", "absolute"], [94, 3, 1, "", "absolute_"], [95, 3, 1, "", "acos"], [96, 3, 1, "", "acos_"], [97, 3, 1, "", "acosh"], [98, 3, 1, "", "acosh_"], [99, 3, 1, "", "add"], [100, 3, 1, "", "add_"], [101, 3, 1, "", "addbmm"], [102, 3, 1, "", "addbmm_"], [103, 3, 1, "", "addcdiv"], [104, 3, 1, "", "addcdiv_"], [105, 3, 1, "", "addcmul"], [106, 3, 1, "", "addcmul_"], [107, 3, 1, "", "addmm"], [108, 3, 1, "", "addmm_"], [109, 3, 1, "", "addmv"], [110, 3, 1, "", "addmv_"], [111, 3, 1, "", "addr"], [112, 3, 1, "", "addr_"], [113, 3, 1, "", "adjoint"], [2034, 3, 1, "", "align_as"], [2034, 3, 1, "", "align_to"], [114, 3, 1, "", "all"], [115, 3, 1, "", "allclose"], [116, 3, 1, "", "amax"], [117, 3, 1, "", "amin"], [118, 3, 1, "", "aminmax"], [119, 3, 1, "", "angle"], [120, 3, 1, "", "any"], [121, 3, 1, "", "apply_"], [122, 3, 1, "", "arccos"], [123, 3, 1, "", "arccos_"], [124, 3, 1, "", "arccosh"], [125, 3, 1, "", "arccosh_"], [126, 3, 1, "", "arcsin"], [127, 3, 1, "", "arcsin_"], [128, 3, 1, "", "arcsinh"], [129, 3, 1, "", "arcsinh_"], [130, 3, 1, "", "arctan"], [131, 3, 1, "", "arctan2"], [132, 3, 1, "", "arctan2_"], [133, 3, 1, "", "arctan_"], [134, 3, 1, "", "arctanh"], [135, 3, 1, "", "arctanh_"], [136, 3, 1, "", "argmax"], [137, 3, 1, "", "argmin"], [138, 3, 1, "", "argsort"], [139, 3, 1, "", "argwhere"], [140, 3, 1, "", "as_strided"], [141, 3, 1, "", "as_subclass"], [142, 3, 1, "", "asin"], [143, 3, 1, "", "asin_"], [144, 3, 1, "", "asinh"], [145, 3, 1, "", "asinh_"], [146, 3, 1, "", "atan"], [147, 3, 1, "", "atan2"], [148, 3, 1, "", "atan2_"], [149, 3, 1, "", "atan_"], [150, 3, 1, "", "atanh"], [151, 3, 1, "", "atanh_"], [152, 3, 1, "", "backward"], [153, 3, 1, "", "baddbmm"], [154, 3, 1, "", "baddbmm_"], [155, 3, 1, "", "bernoulli"], [156, 3, 1, "", "bernoulli_"], [157, 3, 1, "", "bfloat16"], [158, 3, 1, "", "bincount"], [159, 3, 1, "", "bitwise_and"], [160, 3, 1, "", "bitwise_and_"], [161, 3, 1, "", "bitwise_left_shift"], [162, 3, 1, "", "bitwise_left_shift_"], [163, 3, 1, "", "bitwise_not"], [164, 3, 1, "", "bitwise_not_"], [165, 3, 1, "", "bitwise_or"], [166, 3, 1, "", "bitwise_or_"], [167, 3, 1, "", "bitwise_right_shift"], [168, 3, 1, "", "bitwise_right_shift_"], [169, 3, 1, "", "bitwise_xor"], [170, 3, 1, "", "bitwise_xor_"], [171, 3, 1, "", "bmm"], [172, 3, 1, "", "bool"], [173, 3, 1, "", "broadcast_to"], [174, 3, 1, "", "byte"], [175, 3, 1, "", "cauchy_"], [176, 3, 1, "", "ccol_indices"], [177, 3, 1, "", "cdouble"], [178, 3, 1, "", "ceil"], [179, 3, 1, "", "ceil_"], [180, 3, 1, "", "cfloat"], [181, 3, 1, "", "chalf"], [182, 3, 1, "", "char"], [183, 3, 1, "", "cholesky"], [184, 3, 1, "", "cholesky_inverse"], [185, 3, 1, "", "cholesky_solve"], [186, 3, 1, "", "chunk"], [187, 3, 1, "", "clamp"], [188, 3, 1, "", "clamp_"], [189, 3, 1, "", "clip"], [190, 3, 1, "", "clip_"], [191, 3, 1, "", "clone"], [192, 3, 1, "", "coalesce"], [193, 3, 1, "", "col_indices"], [194, 3, 1, "", "conj"], [195, 3, 1, "", "conj_physical"], [196, 3, 1, "", "conj_physical_"], [197, 3, 1, "", "contiguous"], [198, 3, 1, "", "copy_"], [199, 3, 1, "", "copysign"], [200, 3, 1, "", "copysign_"], [201, 3, 1, "", "corrcoef"], [202, 3, 1, "", "cos"], [203, 3, 1, "", "cos_"], [204, 3, 1, "", "cosh"], [205, 3, 1, "", "cosh_"], [206, 3, 1, "", "count_nonzero"], [207, 3, 1, "", "cov"], [208, 3, 1, "", "cpu"], [209, 3, 1, "", "cross"], [210, 3, 1, "", "crow_indices"], [211, 3, 1, "", "cuda"], [212, 3, 1, "", "cummax"], [213, 3, 1, "", "cummin"], [214, 3, 1, "", "cumprod"], [215, 3, 1, "", "cumprod_"], [216, 3, 1, "", "cumsum"], [217, 3, 1, "", "cumsum_"], [218, 3, 1, "", "data_ptr"], [219, 3, 1, "", "deg2rad"], [220, 3, 1, "", "dense_dim"], [221, 3, 1, "", "dequantize"], [222, 3, 1, "", "det"], [223, 3, 1, "", "detach"], [224, 3, 1, "", "detach_"], [225, 2, 1, "", "device"], [226, 3, 1, "", "diag"], [227, 3, 1, "", "diag_embed"], [228, 3, 1, "", "diagflat"], [229, 3, 1, "", "diagonal"], [230, 3, 1, "", "diagonal_scatter"], [231, 3, 1, "", "diff"], [232, 3, 1, "", "digamma"], [233, 3, 1, "", "digamma_"], [234, 3, 1, "", "dim"], [235, 3, 1, "", "dim_order"], [236, 3, 1, "", "dist"], [237, 3, 1, "", "div"], [238, 3, 1, "", "div_"], [239, 3, 1, "", "divide"], [240, 3, 1, "", "divide_"], [241, 3, 1, "", "dot"], [242, 3, 1, "", "double"], [243, 3, 1, "", "dsplit"], [244, 3, 1, "", "element_size"], [245, 3, 1, "", "eq"], [246, 3, 1, "", "eq_"], [247, 3, 1, "", "equal"], [248, 3, 1, "", "erf"], [249, 3, 1, "", "erf_"], [250, 3, 1, "", "erfc"], [251, 3, 1, "", "erfc_"], [252, 3, 1, "", "erfinv"], [253, 3, 1, "", "erfinv_"], [254, 3, 1, "", "exp"], [255, 3, 1, "", "exp_"], [256, 3, 1, "", "expand"], [257, 3, 1, "", "expand_as"], [258, 3, 1, "", "expm1"], [259, 3, 1, "", "expm1_"], [260, 3, 1, "", "exponential_"], [261, 3, 1, "", "fill_"], [262, 3, 1, "", "fill_diagonal_"], [263, 3, 1, "", "fix"], [264, 3, 1, "", "fix_"], [265, 3, 1, "", "flatten"], [266, 3, 1, "", "flip"], [267, 3, 1, "", "fliplr"], [268, 3, 1, "", "flipud"], [269, 3, 1, "", "float"], [270, 3, 1, "", "float_power"], [271, 3, 1, "", "float_power_"], [272, 3, 1, "", "floor"], [273, 3, 1, "", "floor_"], [274, 3, 1, "", "floor_divide"], [275, 3, 1, "", "floor_divide_"], [276, 3, 1, "", "fmax"], [277, 3, 1, "", "fmin"], [278, 3, 1, "", "fmod"], [279, 3, 1, "", "fmod_"], [280, 3, 1, "", "frac"], [281, 3, 1, "", "frac_"], [282, 3, 1, "", "frexp"], [283, 3, 1, "", "gather"], [284, 3, 1, "", "gcd"], [285, 3, 1, "", "gcd_"], [286, 3, 1, "", "ge"], [287, 3, 1, "", "ge_"], [288, 3, 1, "", "geometric_"], [289, 3, 1, "", "geqrf"], [290, 3, 1, "", "ger"], [291, 3, 1, "", "get_device"], [292, 2, 1, "", "grad"], [293, 3, 1, "", "greater"], [294, 3, 1, "", "greater_"], [295, 3, 1, "", "greater_equal"], [296, 3, 1, "", "greater_equal_"], [297, 3, 1, "", "gt"], [298, 3, 1, "", "gt_"], [299, 3, 1, "", "half"], [300, 3, 1, "", "hardshrink"], [301, 3, 1, "", "heaviside"], [302, 3, 1, "", "histc"], [303, 3, 1, "", "histogram"], [304, 3, 1, "", "hsplit"], [305, 3, 1, "", "hypot"], [306, 3, 1, "", "hypot_"], [307, 3, 1, "", "i0"], [308, 3, 1, "", "i0_"], [309, 3, 1, "", "igamma"], [310, 3, 1, "", "igamma_"], [311, 3, 1, "", "igammac"], [312, 3, 1, "", "igammac_"], [313, 2, 1, "", "imag"], [314, 3, 1, "", "index_add"], [315, 3, 1, "", "index_add_"], [316, 3, 1, "", "index_copy"], [317, 3, 1, "", "index_copy_"], [318, 3, 1, "", "index_fill"], [319, 3, 1, "", "index_fill_"], [320, 3, 1, "", "index_put"], [321, 3, 1, "", "index_put_"], [322, 3, 1, "", "index_reduce"], [323, 3, 1, "", "index_reduce_"], [324, 3, 1, "", "index_select"], [325, 3, 1, "", "indices"], [326, 3, 1, "", "inner"], [327, 3, 1, "", "int"], [328, 3, 1, "", "int_repr"], [329, 3, 1, "", "inverse"], [330, 3, 1, "", "is_coalesced"], [331, 3, 1, "", "is_complex"], [332, 3, 1, "", "is_conj"], [333, 3, 1, "", "is_contiguous"], [334, 2, 1, "", "is_cuda"], [335, 3, 1, "", "is_floating_point"], [336, 3, 1, "", "is_inference"], [337, 2, 1, "", "is_leaf"], [338, 2, 1, "", "is_meta"], [339, 3, 1, "", "is_pinned"], [340, 2, 1, "", "is_quantized"], [341, 3, 1, "", "is_set_to"], [342, 3, 1, "", "is_shared"], [343, 3, 1, "", "is_signed"], [344, 2, 1, "", "is_sparse"], [345, 2, 1, "", "is_sparse_csr"], [346, 3, 1, "", "isclose"], [347, 3, 1, "", "isfinite"], [348, 3, 1, "", "isinf"], [349, 3, 1, "", "isnan"], [350, 3, 1, "", "isneginf"], [351, 3, 1, "", "isposinf"], [352, 3, 1, "", "isreal"], [353, 3, 1, "", "istft"], [354, 3, 1, "", "item"], [355, 2, 1, "", "itemsize"], [356, 3, 1, "", "kthvalue"], [357, 3, 1, "", "lcm"], [358, 3, 1, "", "lcm_"], [359, 3, 1, "", "ldexp"], [360, 3, 1, "", "ldexp_"], [361, 3, 1, "", "le"], [362, 3, 1, "", "le_"], [363, 3, 1, "", "lerp"], [364, 3, 1, "", "lerp_"], [365, 3, 1, "", "less"], [366, 3, 1, "", "less_"], [367, 3, 1, "", "less_equal"], [368, 3, 1, "", "less_equal_"], [369, 3, 1, "", "lgamma"], [370, 3, 1, "", "lgamma_"], [371, 3, 1, "", "log"], [372, 3, 1, "", "log10"], [373, 3, 1, "", "log10_"], [374, 3, 1, "", "log1p"], [375, 3, 1, "", "log1p_"], [376, 3, 1, "", "log2"], [377, 3, 1, "", "log2_"], [378, 3, 1, "", "log_"], [379, 3, 1, "", "log_normal_"], [380, 3, 1, "", "logaddexp"], [381, 3, 1, "", "logaddexp2"], [382, 3, 1, "", "logcumsumexp"], [383, 3, 1, "", "logdet"], [384, 3, 1, "", "logical_and"], [385, 3, 1, "", "logical_and_"], [386, 3, 1, "", "logical_not"], [387, 3, 1, "", "logical_not_"], [388, 3, 1, "", "logical_or"], [389, 3, 1, "", "logical_or_"], [390, 3, 1, "", "logical_xor"], [391, 3, 1, "", "logical_xor_"], [392, 3, 1, "", "logit"], [393, 3, 1, "", "logit_"], [394, 3, 1, "", "logsumexp"], [395, 3, 1, "", "long"], [396, 3, 1, "", "lt"], [397, 3, 1, "", "lt_"], [398, 3, 1, "", "lu"], [399, 3, 1, "", "lu_solve"], [2086, 2, 1, "", "mH"], [2086, 2, 1, "", "mT"], [400, 3, 1, "", "map_"], [401, 3, 1, "", "masked_fill"], [402, 3, 1, "", "masked_fill_"], [403, 3, 1, "", "masked_scatter"], [404, 3, 1, "", "masked_scatter_"], [405, 3, 1, "", "masked_select"], [406, 3, 1, "", "matmul"], [407, 3, 1, "", "matrix_exp"], [408, 3, 1, "", "matrix_power"], [409, 3, 1, "", "max"], [410, 3, 1, "", "maximum"], [411, 3, 1, "", "mean"], [412, 3, 1, "", "median"], [413, 3, 1, "", "min"], [414, 3, 1, "", "minimum"], [415, 3, 1, "", "mm"], [416, 3, 1, "", "mode"], [417, 3, 1, "", "module_load"], [418, 3, 1, "", "moveaxis"], [419, 3, 1, "", "movedim"], [420, 3, 1, "", "msort"], [421, 3, 1, "", "mul"], [422, 3, 1, "", "mul_"], [423, 3, 1, "", "multinomial"], [424, 3, 1, "", "multiply"], [425, 3, 1, "", "multiply_"], [426, 3, 1, "", "mv"], [427, 3, 1, "", "mvlgamma"], [428, 3, 1, "", "mvlgamma_"], [2034, 2, 1, "", "names"], [429, 3, 1, "", "nan_to_num"], [430, 3, 1, "", "nan_to_num_"], [431, 3, 1, "", "nanmean"], [432, 3, 1, "", "nanmedian"], [433, 3, 1, "", "nanquantile"], [434, 3, 1, "", "nansum"], [435, 3, 1, "", "narrow"], [436, 3, 1, "", "narrow_copy"], [437, 2, 1, "", "nbytes"], [438, 2, 1, "", "ndim"], [439, 3, 1, "", "ndimension"], [440, 3, 1, "", "ne"], [441, 3, 1, "", "ne_"], [442, 3, 1, "", "neg"], [443, 3, 1, "", "neg_"], [444, 3, 1, "", "negative"], [445, 3, 1, "", "negative_"], [446, 3, 1, "", "nelement"], [447, 3, 1, "", "new_empty"], [448, 3, 1, "", "new_full"], [449, 3, 1, "", "new_ones"], [450, 3, 1, "", "new_tensor"], [451, 3, 1, "", "new_zeros"], [452, 3, 1, "", "nextafter"], [453, 3, 1, "", "nextafter_"], [454, 3, 1, "", "nonzero"], [455, 3, 1, "", "norm"], [456, 3, 1, "", "normal_"], [457, 3, 1, "", "not_equal"], [458, 3, 1, "", "not_equal_"], [459, 3, 1, "", "numel"], [460, 3, 1, "", "numpy"], [461, 3, 1, "", "orgqr"], [462, 3, 1, "", "ormqr"], [463, 3, 1, "", "outer"], [464, 3, 1, "", "permute"], [465, 3, 1, "", "pin_memory"], [466, 3, 1, "", "pinverse"], [467, 3, 1, "", "polygamma"], [468, 3, 1, "", "polygamma_"], [469, 3, 1, "", "positive"], [470, 3, 1, "", "pow"], [471, 3, 1, "", "pow_"], [472, 3, 1, "", "prod"], [473, 3, 1, "", "put_"], [474, 3, 1, "", "q_per_channel_axis"], [475, 3, 1, "", "q_per_channel_scales"], [476, 3, 1, "", "q_per_channel_zero_points"], [477, 3, 1, "", "q_scale"], [478, 3, 1, "", "q_zero_point"], [479, 3, 1, "", "qr"], [480, 3, 1, "", "qscheme"], [481, 3, 1, "", "quantile"], [482, 3, 1, "", "rad2deg"], [483, 3, 1, "", "random_"], [484, 3, 1, "", "ravel"], [485, 2, 1, "", "real"], [486, 3, 1, "", "reciprocal"], [487, 3, 1, "", "reciprocal_"], [488, 3, 1, "", "record_stream"], [2034, 3, 1, "", "refine_names"], [489, 3, 1, "", "register_hook"], [490, 3, 1, "", "register_post_accumulate_grad_hook"], [491, 3, 1, "", "remainder"], [492, 3, 1, "", "remainder_"], [2034, 3, 1, "", "rename"], [2034, 3, 1, "", "rename_"], [493, 3, 1, "", "renorm"], [494, 3, 1, "", "renorm_"], [495, 3, 1, "", "repeat"], [496, 3, 1, "", "repeat_interleave"], [497, 2, 1, "", "requires_grad"], [498, 3, 1, "", "requires_grad_"], [499, 3, 1, "", "reshape"], [500, 3, 1, "", "reshape_as"], [501, 3, 1, "", "resize_"], [502, 3, 1, "", "resize_as_"], [503, 3, 1, "", "resolve_conj"], [504, 3, 1, "", "resolve_neg"], [505, 3, 1, "", "retain_grad"], [506, 2, 1, "", "retains_grad"], [507, 3, 1, "", "roll"], [508, 3, 1, "", "rot90"], [509, 3, 1, "", "round"], [510, 3, 1, "", "round_"], [511, 3, 1, "", "row_indices"], [512, 3, 1, "", "rsqrt"], [513, 3, 1, "", "rsqrt_"], [514, 3, 1, "", "scatter"], [515, 3, 1, "", "scatter_"], [516, 3, 1, "", "scatter_add"], [517, 3, 1, "", "scatter_add_"], [518, 3, 1, "", "scatter_reduce"], [519, 3, 1, "", "scatter_reduce_"], [520, 3, 1, "", "select"], [521, 3, 1, "", "select_scatter"], [522, 3, 1, "", "set_"], [523, 3, 1, "", "sgn"], [524, 3, 1, "", "sgn_"], [525, 2, 1, "", "shape"], [526, 3, 1, "", "share_memory_"], [527, 3, 1, "", "short"], [528, 3, 1, "", "sigmoid"], [529, 3, 1, "", "sigmoid_"], [530, 3, 1, "", "sign"], [531, 3, 1, "", "sign_"], [532, 3, 1, "", "signbit"], [533, 3, 1, "", "sin"], [534, 3, 1, "", "sin_"], [535, 3, 1, "", "sinc"], [536, 3, 1, "", "sinc_"], [537, 3, 1, "", "sinh"], [538, 3, 1, "", "sinh_"], [539, 3, 1, "", "size"], [540, 3, 1, "", "slice_scatter"], [541, 3, 1, "", "slogdet"], [542, 3, 1, "", "smm"], [543, 3, 1, "", "softmax"], [544, 3, 1, "", "sort"], [545, 3, 1, "", "sparse_dim"], [546, 3, 1, "", "sparse_mask"], [547, 3, 1, "", "sparse_resize_"], [548, 3, 1, "", "sparse_resize_and_clear_"], [549, 3, 1, "", "split"], [550, 3, 1, "", "sqrt"], [551, 3, 1, "", "sqrt_"], [552, 3, 1, "", "square"], [553, 3, 1, "", "square_"], [554, 3, 1, "", "squeeze"], [555, 3, 1, "", "squeeze_"], [556, 3, 1, "", "sspaddmm"], [557, 3, 1, "", "std"], [558, 3, 1, "", "stft"], [559, 3, 1, "", "storage"], [560, 3, 1, "", "storage_offset"], [561, 3, 1, "", "storage_type"], [562, 3, 1, "", "stride"], [563, 3, 1, "", "sub"], [564, 3, 1, "", "sub_"], [565, 3, 1, "", "subtract"], [566, 3, 1, "", "subtract_"], [567, 3, 1, "", "sum"], [568, 3, 1, "", "sum_to_size"], [569, 3, 1, "", "svd"], [570, 3, 1, "", "swapaxes"], [571, 3, 1, "", "swapdims"], [572, 3, 1, "", "t"], [573, 3, 1, "", "t_"], [574, 3, 1, "", "take"], [575, 3, 1, "", "take_along_dim"], [576, 3, 1, "", "tan"], [577, 3, 1, "", "tan_"], [578, 3, 1, "", "tanh"], [579, 3, 1, "", "tanh_"], [580, 3, 1, "", "tensor_split"], [581, 3, 1, "", "tile"], [582, 3, 1, "", "to"], [583, 3, 1, "", "to_dense"], [584, 3, 1, "", "to_mkldnn"], [585, 3, 1, "", "to_sparse"], [586, 3, 1, "", "to_sparse_bsc"], [587, 3, 1, "", "to_sparse_bsr"], [588, 3, 1, "", "to_sparse_coo"], [589, 3, 1, "", "to_sparse_csc"], [590, 3, 1, "", "to_sparse_csr"], [591, 3, 1, "", "tolist"], [592, 3, 1, "", "topk"], [593, 3, 1, "", "trace"], [594, 3, 1, "", "transpose"], [595, 3, 1, "", "transpose_"], [596, 3, 1, "", "triangular_solve"], [597, 3, 1, "", "tril"], [598, 3, 1, "", "tril_"], [599, 3, 1, "", "triu"], [600, 3, 1, "", "triu_"], [601, 3, 1, "", "true_divide"], [602, 3, 1, "", "true_divide_"], [603, 3, 1, "", "trunc"], [604, 3, 1, "", "trunc_"], [605, 3, 1, "", "type"], [606, 3, 1, "", "type_as"], [607, 3, 1, "", "unbind"], [608, 3, 1, "", "unflatten"], [609, 3, 1, "", "unfold"], [610, 3, 1, "", "uniform_"], [611, 3, 1, "", "unique"], [612, 3, 1, "", "unique_consecutive"], [613, 3, 1, "", "unsqueeze"], [614, 3, 1, "", "unsqueeze_"], [615, 3, 1, "", "untyped_storage"], [616, 3, 1, "", "values"], [617, 3, 1, "", "var"], [618, 3, 1, "", "vdot"], [619, 3, 1, "", "view"], [620, 3, 1, "", "view_as"], [621, 3, 1, "", "vsplit"], [622, 3, 1, "", "where"], [623, 3, 1, "", "xlogy"], [624, 3, 1, "", "xlogy_"], [625, 3, 1, "", "zero_"]], "torch.TypedStorage": [[2082, 3, 1, "", "bfloat16"], [2082, 3, 1, "", "bool"], [2082, 3, 1, "", "byte"], [2082, 3, 1, "", "char"], [2082, 3, 1, "", "clone"], [2082, 3, 1, "", "complex_double"], [2082, 3, 1, "", "complex_float"], [2082, 3, 1, "", "copy_"], [2082, 3, 1, "", "cpu"], [2082, 3, 1, "", "cuda"], [2082, 3, 1, "", "data_ptr"], [2082, 4, 1, "", "device"], [2082, 3, 1, "", "double"], [2082, 2, 1, "", "dtype"], [2082, 3, 1, "", "element_size"], [2082, 4, 1, "", "filename"], [2082, 3, 1, "", "fill_"], [2082, 3, 1, "", "float"], [2082, 3, 1, "", "float8_e4m3fn"], [2082, 3, 1, "", "float8_e4m3fnuz"], [2082, 3, 1, "", "float8_e5m2"], [2082, 3, 1, "", "float8_e5m2fnuz"], [2082, 3, 1, "", "from_buffer"], [2082, 3, 1, "", "from_file"], [2082, 3, 1, "", "get_device"], [2082, 3, 1, "", "half"], [2082, 3, 1, "", "hpu"], [2082, 3, 1, "", "int"], [2082, 4, 1, "", "is_cuda"], [2082, 4, 1, "", "is_hpu"], [2082, 3, 1, "", "is_pinned"], [2082, 3, 1, "", "is_shared"], [2082, 2, 1, "", "is_sparse"], [2082, 3, 1, "", "long"], [2082, 3, 1, "", "nbytes"], [2082, 3, 1, "", "pickle_storage_type"], [2082, 3, 1, "", "pin_memory"], [2082, 3, 1, "", "resizable"], [2082, 3, 1, "", "resize_"], [2082, 3, 1, "", "share_memory_"], [2082, 3, 1, "", "short"], [2082, 3, 1, "", "size"], [2082, 3, 1, "", "to"], [2082, 3, 1, "", "tolist"], [2082, 3, 1, "", "type"], [2082, 3, 1, "", "untyped"]], "torch.UntypedStorage": [[2082, 3, 1, "", "bfloat16"], [2082, 3, 1, "", "bool"], [2082, 3, 1, "", "byte"], [2082, 3, 1, "", "byteswap"], [2082, 3, 1, "", "char"], [2082, 3, 1, "", "clone"], [2082, 3, 1, "", "complex_double"], [2082, 3, 1, "", "complex_float"], [2082, 3, 1, "", "copy_"], [2082, 3, 1, "", "cpu"], [2082, 3, 1, "", "cuda"], [2082, 3, 1, "", "data_ptr"], [2082, 2, 1, "", "device"], [2082, 3, 1, "", "double"], [2082, 3, 1, "", "element_size"], [2082, 4, 1, "", "filename"], [2082, 3, 1, "", "fill_"], [2082, 3, 1, "", "float"], [2082, 3, 1, "", "float8_e4m3fn"], [2082, 3, 1, "", "float8_e4m3fnuz"], [2082, 3, 1, "", "float8_e5m2"], [2082, 3, 1, "", "float8_e5m2fnuz"], [2082, 3, 1, "", "from_buffer"], [2082, 3, 1, "", "from_file"], [2082, 3, 1, "", "get_device"], [2082, 3, 1, "", "half"], [2082, 3, 1, "", "hpu"], [2082, 3, 1, "", "int"], [2082, 4, 1, "", "is_cuda"], [2082, 4, 1, "", "is_hpu"], [2082, 3, 1, "", "is_pinned"], [2082, 3, 1, "", "is_shared"], [2082, 2, 1, "", "is_sparse"], [2082, 2, 1, "", "is_sparse_csr"], [2082, 3, 1, "", "long"], [2082, 3, 1, "", "mps"], [2082, 3, 1, "", "nbytes"], [2082, 3, 1, "", "new"], [2082, 3, 1, "", "pin_memory"], [2082, 3, 1, "", "resizable"], [2082, 3, 1, "", "resize_"], [2082, 3, 1, "", "share_memory_"], [2082, 3, 1, "", "short"], [2082, 3, 1, "", "size"], [2082, 3, 1, "", "to"], [2082, 3, 1, "", "tolist"], [2082, 3, 1, "", "type"], [2082, 3, 1, "", "untyped"]], "torch.__config__": [[13, 5, 1, "", "parallel_info"], [13, 5, 1, "", "show"]], "torch.__future__": [[62, 5, 1, "", "get_overwrite_module_params_on_conversion"], [62, 5, 1, "", "get_swap_module_params_on_conversion"], [62, 5, 1, "", "set_overwrite_module_params_on_conversion"], [62, 5, 1, "", "set_swap_module_params_on_conversion"]], "torch._higher_order_ops.cond": [[12, 5, 1, "", "cond"]], "torch._logging": [[682, 5, 1, "", "set_logs"]], "torch.amp": [[0, 0, 0, "-", "autocast_mode"], [0, 5, 1, "", "custom_bwd"], [0, 5, 1, "", "custom_fwd"], [0, 0, 0, "-", "grad_scaler"]], "torch.amp.autocast_mode": [[0, 5, 1, "", "is_autocast_available"]], "torch.ao": [[2070, 0, 0, "-", "nn"], [2070, 0, 0, "-", "ns"], [2070, 0, 0, "-", "pruning"], [2070, 0, 0, "-", "quantization"]], "torch.ao.nn": [[2073, 0, 0, "-", "intrinsic"], [2073, 0, 0, "-", "qat"], [2070, 0, 0, "-", "quantizable"], [2070, 0, 0, "-", "quantized"], [2070, 0, 0, "-", "sparse"]], "torch.ao.nn.intrinsic": [[702, 1, 1, "", "BNReLU2d"], [703, 1, 1, "", "BNReLU3d"], [704, 1, 1, "", "ConvBn1d"], [705, 1, 1, "", "ConvBn2d"], [706, 1, 1, "", "ConvBn3d"], [707, 1, 1, "", "ConvBnReLU1d"], [708, 1, 1, "", "ConvBnReLU2d"], [709, 1, 1, "", "ConvBnReLU3d"], [710, 1, 1, "", "ConvReLU1d"], [711, 1, 1, "", "ConvReLU2d"], [712, 1, 1, "", "ConvReLU3d"], [713, 1, 1, "", "LinearReLU"], [2073, 0, 0, "-", "modules"], [2073, 0, 0, "-", "qat"], [2073, 0, 0, "-", "quantized"]], "torch.ao.nn.intrinsic.modules": [[2070, 0, 0, "-", "fused"]], "torch.ao.nn.intrinsic.qat": [[714, 1, 1, "", "ConvBn1d"], [715, 1, 1, "", "ConvBn2d"], [716, 1, 1, "", "ConvBn3d"], [717, 1, 1, "", "ConvBnReLU1d"], [718, 1, 1, "", "ConvBnReLU2d"], [719, 1, 1, "", "ConvBnReLU3d"], [720, 1, 1, "", "ConvReLU2d"], [721, 1, 1, "", "ConvReLU3d"], [722, 1, 1, "", "LinearReLU"], [723, 1, 1, "", "freeze_bn_stats"], [2073, 0, 0, "-", "modules"], [724, 1, 1, "", "update_bn_stats"]], "torch.ao.nn.intrinsic.qat.modules": [[2070, 0, 0, "-", "conv_fused"], [2070, 0, 0, "-", "linear_fused"], [2070, 0, 0, "-", "linear_relu"]], "torch.ao.nn.intrinsic.quantized": [[725, 1, 1, "", "BNReLU2d"], [726, 1, 1, "", "BNReLU3d"], [727, 1, 1, "", "ConvReLU1d"], [728, 1, 1, "", "ConvReLU2d"], [729, 1, 1, "", "ConvReLU3d"], [730, 1, 1, "", "LinearReLU"], [2073, 0, 0, "-", "dynamic"], [2073, 0, 0, "-", "modules"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[731, 1, 1, "", "LinearReLU"], [2073, 0, 0, "-", "modules"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules": [[2070, 0, 0, "-", "linear_relu"]], "torch.ao.nn.intrinsic.quantized.modules": [[2070, 0, 0, "-", "bn_relu"], [2070, 0, 0, "-", "conv_add"], [2070, 0, 0, "-", "conv_relu"], [2070, 0, 0, "-", "linear_relu"]], "torch.ao.nn.qat": [[732, 1, 1, "", "Conv2d"], [733, 1, 1, "", "Conv3d"], [734, 1, 1, "", "Linear"], [2073, 0, 0, "-", "dynamic"], [2073, 0, 0, "-", "modules"]], "torch.ao.nn.qat.Linear": [[734, 3, 1, "", "from_float"]], "torch.ao.nn.qat.dynamic": [[735, 1, 1, "", "Linear"], [2073, 0, 0, "-", "modules"]], "torch.ao.nn.qat.dynamic.modules": [[2070, 0, 0, "-", "linear"]], "torch.ao.nn.qat.modules": [[2070, 0, 0, "-", "conv"], [2070, 0, 0, "-", "embedding_ops"], [2070, 0, 0, "-", "linear"]], "torch.ao.nn.quantizable": [[736, 1, 1, "", "LSTM"], [737, 1, 1, "", "MultiheadAttention"], [2070, 0, 0, "-", "modules"]], "torch.ao.nn.quantizable.MultiheadAttention": [[737, 3, 1, "", "dequantize"], [737, 3, 1, "", "forward"]], "torch.ao.nn.quantizable.modules": [[2070, 0, 0, "-", "activation"], [2070, 0, 0, "-", "rnn"]], "torch.ao.nn.quantized": [[738, 1, 1, "", "BatchNorm2d"], [739, 1, 1, "", "BatchNorm3d"], [740, 1, 1, "", "Conv1d"], [741, 1, 1, "", "Conv2d"], [742, 1, 1, "", "Conv3d"], [743, 1, 1, "", "ConvTranspose1d"], [744, 1, 1, "", "ConvTranspose2d"], [745, 1, 1, "", "ConvTranspose3d"], [746, 1, 1, "", "ELU"], [747, 1, 1, "", "Embedding"], [748, 1, 1, "", "EmbeddingBag"], [749, 1, 1, "", "FXFloatFunctional"], [750, 1, 1, "", "FloatFunctional"], [751, 1, 1, "", "GroupNorm"], [752, 1, 1, "", "Hardswish"], [753, 1, 1, "", "InstanceNorm1d"], [754, 1, 1, "", "InstanceNorm2d"], [755, 1, 1, "", "InstanceNorm3d"], [756, 1, 1, "", "LayerNorm"], [757, 1, 1, "", "LeakyReLU"], [758, 1, 1, "", "Linear"], [759, 1, 1, "", "QFunctional"], [760, 1, 1, "", "ReLU6"], [761, 1, 1, "", "Sigmoid"], [2073, 0, 0, "-", "dynamic"], [2073, 0, 0, "-", "functional"], [2073, 0, 0, "-", "modules"], [2070, 0, 0, "-", "reference"]], "torch.ao.nn.quantized.Conv1d": [[740, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Conv2d": [[741, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Conv3d": [[742, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Embedding": [[747, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.EmbeddingBag": [[748, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Linear": [[758, 3, 1, "", "from_float"], [758, 3, 1, "", "from_reference"]], "torch.ao.nn.quantized.dynamic": [[762, 1, 1, "", "GRU"], [763, 1, 1, "", "GRUCell"], [764, 1, 1, "", "LSTM"], [765, 1, 1, "", "LSTMCell"], [766, 1, 1, "", "Linear"], [767, 1, 1, "", "RNNCell"], [2073, 0, 0, "-", "modules"]], "torch.ao.nn.quantized.dynamic.Linear": [[766, 3, 1, "", "from_float"], [766, 3, 1, "", "from_reference"]], "torch.ao.nn.quantized.dynamic.modules": [[2070, 0, 0, "-", "conv"], [2070, 0, 0, "-", "linear"], [2070, 0, 0, "-", "rnn"]], "torch.ao.nn.quantized.functional": [[768, 1, 1, "", "adaptive_avg_pool2d"], [769, 1, 1, "", "adaptive_avg_pool3d"], [770, 1, 1, "", "avg_pool2d"], [771, 1, 1, "", "avg_pool3d"], [772, 1, 1, "", "celu"], [773, 1, 1, "", "clamp"], [774, 1, 1, "", "conv1d"], [775, 1, 1, "", "conv2d"], [776, 1, 1, "", "conv3d"], [777, 1, 1, "", "elu"], [778, 1, 1, "", "hardsigmoid"], [779, 1, 1, "", "hardswish"], [780, 1, 1, "", "hardtanh"], [781, 1, 1, "", "interpolate"], [782, 1, 1, "", "leaky_relu"], [783, 1, 1, "", "linear"], [784, 1, 1, "", "max_pool1d"], [785, 1, 1, "", "max_pool2d"], [786, 1, 1, "", "threshold"], [787, 1, 1, "", "upsample"], [788, 1, 1, "", "upsample_bilinear"], [789, 1, 1, "", "upsample_nearest"]], "torch.ao.nn.quantized.modules": [[2070, 0, 0, "-", "activation"], [2070, 0, 0, "-", "batchnorm"], [2070, 0, 0, "-", "conv"], [2070, 0, 0, "-", "dropout"], [2070, 0, 0, "-", "embedding_ops"], [2070, 0, 0, "-", "functional_modules"], [2070, 0, 0, "-", "linear"], [2070, 0, 0, "-", "normalization"], [2070, 0, 0, "-", "rnn"], [2070, 0, 0, "-", "utils"]], "torch.ao.nn.quantized.reference": [[2070, 0, 0, "-", "modules"]], "torch.ao.nn.quantized.reference.modules": [[2070, 0, 0, "-", "conv"], [2070, 0, 0, "-", "linear"], [2070, 0, 0, "-", "rnn"], [2070, 0, 0, "-", "sparse"], [2070, 0, 0, "-", "utils"]], "torch.ao.nn.sparse": [[2070, 0, 0, "-", "quantized"]], "torch.ao.nn.sparse.quantized": [[2070, 0, 0, "-", "dynamic"], [2070, 0, 0, "-", "linear"], [2070, 0, 0, "-", "utils"]], "torch.ao.nn.sparse.quantized.dynamic": [[2070, 0, 0, "-", "linear"]], "torch.ao.ns": [[2090, 0, 0, "-", "_numeric_suite"], [2091, 0, 0, "-", "_numeric_suite_fx"], [2070, 0, 0, "-", "fx"]], "torch.ao.ns._numeric_suite": [[2090, 1, 1, "", "Logger"], [2090, 1, 1, "", "OutputLogger"], [2090, 1, 1, "", "Shadow"], [2090, 1, 1, "", "ShadowLogger"], [2090, 5, 1, "", "compare_model_outputs"], [2090, 5, 1, "", "compare_model_stub"], [2090, 5, 1, "", "compare_weights"], [2090, 5, 1, "", "get_logger_dict"], [2090, 5, 1, "", "get_matching_activations"], [2090, 5, 1, "", "prepare_model_outputs"], [2090, 5, 1, "", "prepare_model_with_stubs"]], "torch.ao.ns._numeric_suite.Logger": [[2090, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite.OutputLogger": [[2090, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite.Shadow": [[2090, 3, 1, "", "add"], [2090, 3, 1, "", "add_relu"], [2090, 3, 1, "", "add_scalar"], [2090, 3, 1, "", "cat"], [2090, 3, 1, "", "forward"], [2090, 3, 1, "", "mul"], [2090, 3, 1, "", "mul_scalar"]], "torch.ao.ns._numeric_suite.ShadowLogger": [[2090, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite_fx": [[2091, 1, 1, "", "NSTracer"], [2091, 1, 1, "", "OutputComparisonLogger"], [2091, 1, 1, "", "OutputLogger"], [2091, 5, 1, "", "add_loggers"], [2091, 5, 1, "", "add_shadow_loggers"], [2091, 5, 1, "", "convert_n_shadows_model"], [2091, 5, 1, "", "extend_logger_results_with_comparison"], [2091, 5, 1, "", "extract_logger_info"], [2091, 5, 1, "", "extract_results_n_shadows_model"], [2091, 5, 1, "", "extract_shadow_logger_info"], [2091, 5, 1, "", "extract_weights"], [2091, 5, 1, "", "loggers_set_enabled"], [2091, 5, 1, "", "loggers_set_save_activations"], [2091, 5, 1, "", "prepare_n_shadows_model"], [2091, 5, 1, "", "print_comparisons_n_shadows_model"]], "torch.ao.ns._numeric_suite_fx.NSTracer": [[2091, 3, 1, "", "is_leaf_module"]], "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger": [[2091, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite_fx.OutputLogger": [[2091, 3, 1, "", "forward"]], "torch.ao.ns.fx": [[2070, 0, 0, "-", "graph_matcher"], [2070, 0, 0, "-", "graph_passes"], [2070, 0, 0, "-", "mappings"], [2070, 0, 0, "-", "n_shadows_utils"], [2070, 0, 0, "-", "ns_types"], [2070, 0, 0, "-", "pattern_utils"], [2070, 0, 0, "-", "qconfig_multi_mapping"], [2070, 0, 0, "-", "utils"], [2070, 0, 0, "-", "weight_utils"]], "torch.ao.ns.fx.utils": [[2091, 5, 1, "", "compute_cosine_similarity"], [2091, 5, 1, "", "compute_normalized_l2_error"], [2091, 5, 1, "", "compute_sqnr"]], "torch.ao.pruning": [[2070, 0, 0, "-", "scheduler"], [2070, 0, 0, "-", "sparsifier"]], "torch.ao.pruning.scheduler": [[2070, 0, 0, "-", "base_scheduler"], [2070, 0, 0, "-", "cubic_scheduler"], [2070, 0, 0, "-", "lambda_scheduler"]], "torch.ao.pruning.sparsifier": [[2070, 0, 0, "-", "base_sparsifier"], [2070, 0, 0, "-", "nearly_diagonal_sparsifier"], [2070, 0, 0, "-", "utils"], [2070, 0, 0, "-", "weight_norm_sparsifier"]], "torch.ao.quantization": [[790, 1, 1, "", "DeQuantStub"], [791, 1, 1, "", "QuantStub"], [792, 1, 1, "", "QuantWrapper"], [793, 1, 1, "", "add_quant_dequant"], [2070, 0, 0, "-", "backend_config"], [799, 1, 1, "", "convert"], [800, 1, 1, "", "default_eval_fn"], [2070, 0, 0, "-", "fake_quantize"], [2070, 0, 0, "-", "fuse_modules"], [2070, 0, 0, "-", "fuser_method_mappings"], [2070, 0, 0, "-", "fx"], [2070, 0, 0, "-", "observer"], [840, 1, 1, "", "prepare"], [841, 1, 1, "", "prepare_qat"], [842, 1, 1, "", "propagate_qconfig_"], [2073, 0, 0, "-", "pt2e"], [2070, 0, 0, "-", "qconfig"], [2070, 0, 0, "-", "qconfig_mapping"], [2070, 0, 0, "-", "quant_type"], [2070, 0, 0, "-", "quantization_mappings"], [860, 1, 1, "", "quantize"], [861, 1, 1, "", "quantize_dynamic"], [2070, 0, 0, "-", "quantize_fx"], [2070, 0, 0, "-", "quantize_jit"], [2070, 0, 0, "-", "quantize_pt2e"], [866, 1, 1, "", "quantize_qat"], [2073, 0, 0, "-", "quantizer"], [2070, 0, 0, "-", "stubs"], [867, 1, 1, "", "swap_module"], [2070, 0, 0, "-", "utils"]], "torch.ao.quantization.backend_config": [[794, 1, 1, "", "BackendConfig"], [795, 1, 1, "", "BackendPatternConfig"], [796, 1, 1, "", "DTypeConfig"], [797, 1, 1, "", "DTypeWithConstraints"], [798, 1, 1, "", "ObservationType"], [2070, 0, 0, "-", "backend_config"], [2070, 0, 0, "-", "executorch"], [2070, 0, 0, "-", "fbgemm"], [2070, 0, 0, "-", "native"], [2070, 0, 0, "-", "observation_type"], [2070, 0, 0, "-", "onednn"], [2070, 0, 0, "-", "qnnpack"], [2070, 0, 0, "-", "tensorrt"], [2070, 0, 0, "-", "utils"], [2070, 0, 0, "-", "x86"]], "torch.ao.quantization.backend_config.BackendConfig": [[794, 4, 1, "", "configs"], [794, 3, 1, "", "from_dict"], [794, 3, 1, "", "set_backend_pattern_config"], [794, 3, 1, "", "set_backend_pattern_configs"], [794, 3, 1, "", "set_name"], [794, 3, 1, "", "to_dict"]], "torch.ao.quantization.backend_config.BackendPatternConfig": [[795, 3, 1, "", "add_dtype_config"], [795, 3, 1, "", "from_dict"], [795, 3, 1, "", "set_dtype_configs"], [795, 3, 1, "", "set_fused_module"], [795, 3, 1, "", "set_fuser_method"], [795, 3, 1, "", "set_observation_type"], [795, 3, 1, "", "set_pattern"], [795, 3, 1, "", "set_qat_module"], [795, 3, 1, "", "set_reference_quantized_module"], [795, 3, 1, "", "set_root_module"], [795, 3, 1, "", "to_dict"]], "torch.ao.quantization.backend_config.DTypeConfig": [[796, 3, 1, "", "from_dict"], [796, 3, 1, "", "to_dict"]], "torch.ao.quantization.backend_config.ObservationType": [[798, 2, 1, "", "INPUT_OUTPUT_NOT_OBSERVED"], [798, 2, 1, "", "OUTPUT_SHARE_OBSERVER_WITH_INPUT"], [798, 2, 1, "", "OUTPUT_USE_DIFFERENT_OBSERVER_AS_INPUT"]], "torch.ao.quantization.fake_quantize": [[801, 1, 1, "", "FakeQuantize"], [802, 1, 1, "", "FakeQuantizeBase"], [803, 1, 1, "", "FixedQParamsFakeQuantize"], [804, 1, 1, "", "FusedMovingAvgObsFakeQuantize"], [805, 2, 1, "", "default_fake_quant"], [806, 2, 1, "", "default_fused_act_fake_quant"], [807, 2, 1, "", "default_fused_per_channel_wt_fake_quant"], [808, 2, 1, "", "default_fused_wt_fake_quant"], [809, 2, 1, "", "default_histogram_fake_quant"], [810, 2, 1, "", "default_per_channel_weight_fake_quant"], [811, 2, 1, "", "default_weight_fake_quant"], [812, 1, 1, "", "disable_fake_quant"], [813, 1, 1, "", "disable_observer"], [814, 1, 1, "", "enable_fake_quant"], [815, 1, 1, "", "enable_observer"]], "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize": [[803, 3, 1, "", "extra_repr"]], "torch.ao.quantization.fuse_modules": [[816, 1, 1, "", "fuse_modules"]], "torch.ao.quantization.fx": [[2070, 0, 0, "-", "convert"], [2070, 0, 0, "-", "custom_config"], [2070, 0, 0, "-", "fuse"], [2070, 0, 0, "-", "fuse_handler"], [2070, 0, 0, "-", "graph_module"], [2070, 0, 0, "-", "lower_to_fbgemm"], [2070, 0, 0, "-", "lower_to_qnnpack"], [2070, 0, 0, "-", "lstm_utils"], [2070, 0, 0, "-", "match_utils"], [2070, 0, 0, "-", "pattern_utils"], [2070, 0, 0, "-", "prepare"], [2070, 0, 0, "-", "qconfig_mapping_utils"], [2070, 0, 0, "-", "quantize_handler"], [2070, 0, 0, "-", "tracer"], [2070, 0, 0, "-", "utils"]], "torch.ao.quantization.fx.custom_config": [[817, 1, 1, "", "ConvertCustomConfig"], [818, 1, 1, "", "FuseCustomConfig"], [819, 1, 1, "", "PrepareCustomConfig"], [820, 1, 1, "", "StandaloneModuleConfigEntry"]], "torch.ao.quantization.fx.custom_config.ConvertCustomConfig": [[817, 3, 1, "", "from_dict"], [817, 3, 1, "", "set_observed_to_quantized_mapping"], [817, 3, 1, "", "set_preserved_attributes"], [817, 3, 1, "", "to_dict"]], "torch.ao.quantization.fx.custom_config.FuseCustomConfig": [[818, 3, 1, "", "from_dict"], [818, 3, 1, "", "set_preserved_attributes"], [818, 3, 1, "", "to_dict"]], "torch.ao.quantization.fx.custom_config.PrepareCustomConfig": [[819, 3, 1, "", "from_dict"], [819, 3, 1, "", "set_float_to_observed_mapping"], [819, 3, 1, "", "set_input_quantized_indexes"], [819, 3, 1, "", "set_non_traceable_module_classes"], [819, 3, 1, "", "set_non_traceable_module_names"], [819, 3, 1, "", "set_output_quantized_indexes"], [819, 3, 1, "", "set_preserved_attributes"], [819, 3, 1, "", "set_standalone_module_class"], [819, 3, 1, "", "set_standalone_module_name"], [819, 3, 1, "", "to_dict"]], "torch.ao.quantization.observer": [[821, 1, 1, "", "HistogramObserver"], [822, 1, 1, "", "MinMaxObserver"], [823, 1, 1, "", "MovingAverageMinMaxObserver"], [824, 1, 1, "", "MovingAveragePerChannelMinMaxObserver"], [825, 1, 1, "", "NoopObserver"], [826, 1, 1, "", "ObserverBase"], [827, 1, 1, "", "PerChannelMinMaxObserver"], [828, 1, 1, "", "PlaceholderObserver"], [829, 1, 1, "", "RecordingObserver"], [830, 2, 1, "", "default_debug_observer"], [831, 2, 1, "", "default_dynamic_quant_observer"], [832, 2, 1, "", "default_float_qparams_observer"], [833, 2, 1, "", "default_histogram_observer"], [834, 2, 1, "", "default_observer"], [835, 2, 1, "", "default_per_channel_weight_observer"], [836, 2, 1, "", "default_placeholder_observer"], [837, 2, 1, "", "default_weight_observer"], [838, 1, 1, "", "get_observer_state_dict"], [839, 1, 1, "", "load_observer_state_dict"]], "torch.ao.quantization.observer.MinMaxObserver": [[822, 3, 1, "", "calculate_qparams"], [822, 3, 1, "", "forward"], [822, 3, 1, "", "reset_min_max_vals"]], "torch.ao.quantization.observer.ObserverBase": [[826, 3, 1, "", "with_args"], [826, 3, 1, "", "with_callable_args"]], "torch.ao.quantization.observer.PerChannelMinMaxObserver": [[827, 3, 1, "", "reset_min_max_vals"]], "torch.ao.quantization.pt2e": [[2070, 0, 0, "-", "duplicate_dq_pass"], [2070, 0, 0, "-", "export_utils"], [2073, 0, 0, "-", "generate_numeric_debug_handle"], [2070, 0, 0, "-", "graph_utils"], [2070, 0, 0, "-", "port_metadata_pass"], [2070, 0, 0, "-", "prepare"], [2070, 0, 0, "-", "qat_utils"], [2073, 0, 0, "-", "representation"], [2070, 0, 0, "-", "utils"]], "torch.ao.quantization.pt2e.export_utils": [[843, 1, 1, "", "model_is_exported"]], "torch.ao.quantization.pt2e.representation": [[2070, 0, 0, "-", "rewrite"]], "torch.ao.quantization.qconfig": [[844, 1, 1, "", "QConfig"], [845, 2, 1, "", "default_activation_only_qconfig"], [846, 2, 1, "", "default_debug_qconfig"], [847, 2, 1, "", "default_dynamic_qconfig"], [848, 2, 1, "", "default_per_channel_qconfig"], [849, 2, 1, "", "default_qat_qconfig"], [850, 2, 1, "", "default_qat_qconfig_v2"], [851, 2, 1, "", "default_qconfig"], [852, 2, 1, "", "default_weight_only_qconfig"], [853, 2, 1, "", "float16_dynamic_qconfig"], [854, 2, 1, "", "float16_static_qconfig"], [855, 2, 1, "", "float_qparams_weight_only_qconfig"], [856, 2, 1, "", "per_channel_dynamic_qconfig"]], "torch.ao.quantization.qconfig_mapping": [[857, 1, 1, "", "QConfigMapping"], [858, 1, 1, "", "get_default_qat_qconfig_mapping"], [859, 1, 1, "", "get_default_qconfig_mapping"]], "torch.ao.quantization.qconfig_mapping.QConfigMapping": [[857, 3, 1, "", "from_dict"], [857, 3, 1, "", "set_global"], [857, 3, 1, "", "set_module_name"], [857, 3, 1, "", "set_module_name_object_type_order"], [857, 3, 1, "", "set_module_name_regex"], [857, 3, 1, "", "set_object_type"], [857, 3, 1, "", "to_dict"]], "torch.ao.quantization.quantize_fx": [[862, 1, 1, "", "convert_fx"], [863, 1, 1, "", "fuse_fx"], [864, 1, 1, "", "prepare_fx"], [865, 1, 1, "", "prepare_qat_fx"]], "torch.ao.quantization.quantizer": [[2070, 0, 0, "-", "composable_quantizer"], [2070, 0, 0, "-", "embedding_quantizer"], [2070, 0, 0, "-", "quantizer"], [2070, 0, 0, "-", "utils"], [2070, 0, 0, "-", "x86_inductor_quantizer"], [2070, 0, 0, "-", "xnnpack_quantizer"], [2070, 0, 0, "-", "xnnpack_quantizer_utils"]], "torch.autograd": [[1, 1, 1, "", "Function"], [1, 0, 0, "-", "anomaly_mode"], [896, 5, 1, "", "backward"], [1, 1, 1, "", "detect_anomaly"], [1, 0, 0, "-", "forward_ad"], [1, 0, 0, "-", "function"], [1, 0, 0, "-", "functional"], [917, 5, 1, "", "grad"], [1, 0, 0, "-", "grad_mode"], [1, 0, 0, "-", "gradcheck"], [1, 0, 0, "-", "graph"], [1, 0, 0, "-", "profiler"], [1, 0, 0, "-", "profiler_legacy"], [1, 0, 0, "-", "profiler_util"], [1, 1, 1, "", "set_detect_anomaly"], [1, 0, 0, "-", "variable"]], "torch.autograd.Function": [[892, 3, 1, "", "backward"], [893, 3, 1, "", "forward"], [894, 3, 1, "", "jvp"], [895, 3, 1, "", "vmap"]], "torch.autograd.forward_ad": [[897, 1, 1, "", "UnpackedDualTensor"], [898, 1, 1, "", "dual_level"], [899, 5, 1, "", "enter_dual_level"], [900, 5, 1, "", "exit_dual_level"], [901, 5, 1, "", "make_dual"], [902, 5, 1, "", "unpack_dual"]], "torch.autograd.forward_ad.UnpackedDualTensor": [[897, 3, 1, "", "count"], [897, 3, 1, "", "index"], [897, 2, 1, "", "primal"], [897, 2, 1, "", "tangent"]], "torch.autograd.function": [[903, 1, 1, "", "BackwardCFunction"], [908, 1, 1, "", "InplaceFunction"], [909, 1, 1, "", "NestedIOFunction"], [910, 5, 1, "", "once_differentiable"]], "torch.autograd.function.BackwardCFunction": [[903, 3, 1, "", "apply"], [903, 3, 1, "", "apply_jvp"], [903, 3, 1, "", "mark_dirty"], [903, 3, 1, "", "mark_non_differentiable"], [903, 3, 1, "", "save_for_backward"], [903, 3, 1, "", "save_for_forward"], [903, 3, 1, "", "set_materialize_grads"]], "torch.autograd.function.FunctionCtx": [[904, 3, 1, "", "mark_dirty"], [905, 3, 1, "", "mark_non_differentiable"], [906, 3, 1, "", "save_for_backward"], [907, 3, 1, "", "set_materialize_grads"]], "torch.autograd.function.InplaceFunction": [[908, 3, 1, "", "backward"], [908, 3, 1, "", "forward"], [908, 3, 1, "", "jvp"], [908, 3, 1, "", "mark_dirty"], [908, 3, 1, "", "mark_non_differentiable"], [908, 3, 1, "", "save_for_backward"], [908, 3, 1, "", "save_for_forward"], [908, 3, 1, "", "set_materialize_grads"], [908, 3, 1, "", "setup_context"], [908, 3, 1, "", "vjp"], [908, 3, 1, "", "vmap"]], "torch.autograd.function.NestedIOFunction": [[909, 3, 1, "", "backward"], [909, 3, 1, "", "backward_extended"], [909, 3, 1, "", "forward"], [909, 3, 1, "", "forward_extended"], [909, 3, 1, "", "jvp"], [909, 3, 1, "", "mark_dirty"], [909, 3, 1, "", "mark_non_differentiable"], [909, 3, 1, "", "save_for_backward"], [909, 3, 1, "", "save_for_forward"], [909, 4, 1, "", "saved_tensors"], [909, 3, 1, "", "set_materialize_grads"], [909, 3, 1, "", "setup_context"], [909, 3, 1, "", "vjp"], [909, 3, 1, "", "vmap"]], "torch.autograd.functional": [[911, 5, 1, "", "hessian"], [912, 5, 1, "", "hvp"], [913, 5, 1, "", "jacobian"], [914, 5, 1, "", "jvp"], [915, 5, 1, "", "vhp"], [916, 5, 1, "", "vjp"]], "torch.autograd.grad_mode": [[918, 1, 1, "", "inference_mode"], [919, 1, 1, "", "set_grad_enabled"], [920, 1, 1, "", "set_multithreading_enabled"]], "torch.autograd.grad_mode.inference_mode": [[918, 3, 1, "", "clone"]], "torch.autograd.grad_mode.set_grad_enabled": [[919, 3, 1, "", "clone"]], "torch.autograd.grad_mode.set_multithreading_enabled": [[920, 3, 1, "", "clone"]], "torch.autograd.gradcheck": [[921, 6, 1, "", "GradcheckError"], [922, 5, 1, "", "gradcheck"], [923, 5, 1, "", "gradgradcheck"]], "torch.autograd.graph": [[1, 1, 1, "", "GradientEdge"], [1, 1, 1, "", "allow_mutation_on_saved_tensors"], [1, 1, 1, "", "disable_saved_tensors_hooks"], [1, 5, 1, "", "get_gradient_edge"], [929, 5, 1, "", "increment_version"], [1, 1, 1, "", "register_multi_grad_hook"], [1, 1, 1, "", "save_on_cpu"], [1, 1, 1, "", "saved_tensors_hooks"]], "torch.autograd.graph.Node": [[924, 3, 1, "", "metadata"], [925, 3, 1, "", "name"], [926, 4, 1, "", "next_functions"], [927, 3, 1, "", "register_hook"], [928, 3, 1, "", "register_prehook"]], "torch.autograd.profiler": [[930, 1, 1, "", "EnforceUnique"], [931, 1, 1, "", "KinetoStepTracker"], [1, 1, 1, "", "emit_itt"], [1, 1, 1, "", "emit_nvtx"], [932, 5, 1, "", "load_nvprof"], [933, 5, 1, "", "parse_nvprof_trace"], [1, 1, 1, "", "profile"], [938, 1, 1, "", "record_function"]], "torch.autograd.profiler.EnforceUnique": [[930, 3, 1, "", "see"]], "torch.autograd.profiler.KinetoStepTracker": [[931, 3, 1, "", "current_step"], [931, 3, 1, "", "erase_step_count"], [931, 3, 1, "", "increment_step"], [931, 3, 1, "", "init_step_count"]], "torch.autograd.profiler.profile": [[934, 3, 1, "", "export_chrome_trace"], [935, 3, 1, "", "key_averages"], [936, 4, 1, "", "self_cpu_time_total"], [937, 3, 1, "", "total_average"]], "torch.autograd.profiler_util": [[939, 1, 1, "", "Interval"], [940, 1, 1, "", "Kernel"], [941, 1, 1, "", "MemRecordsAcc"], [942, 1, 1, "", "StringTable"]], "torch.autograd.profiler_util.Interval": [[939, 3, 1, "", "elapsed_us"]], "torch.autograd.profiler_util.Kernel": [[940, 3, 1, "", "count"], [940, 2, 1, "", "device"], [940, 2, 1, "", "duration"], [940, 3, 1, "", "index"], [940, 2, 1, "", "name"]], "torch.autograd.profiler_util.MemRecordsAcc": [[941, 3, 1, "", "in_interval"]], "torch.autograd.profiler_util.StringTable": [[942, 3, 1, "", "clear"], [942, 3, 1, "", "copy"], [942, 2, 1, "", "default_factory"], [942, 3, 1, "", "fromkeys"], [942, 3, 1, "", "get"], [942, 3, 1, "", "items"], [942, 3, 1, "", "keys"], [942, 3, 1, "", "pop"], [942, 3, 1, "", "popitem"], [942, 3, 1, "", "setdefault"], [942, 3, 1, "", "update"], [942, 3, 1, "", "values"]], "torch.backends": [[2, 0, 0, "-", "cpu"], [2, 0, 0, "-", "cuda"], [2, 0, 0, "-", "cudnn"], [2, 0, 0, "-", "mha"], [2, 0, 0, "-", "mkl"], [2, 0, 0, "-", "mkldnn"], [2, 0, 0, "-", "mps"], [2, 0, 0, "-", "nnpack"], [2, 0, 0, "-", "openmp"], [2, 0, 0, "-", "opt_einsum"], [2, 0, 0, "-", "quantized"], [2, 0, 0, "-", "xeon"], [2, 0, 0, "-", "xnnpack"]], "torch.backends.cpu": [[2, 5, 1, "", "get_cpu_capability"]], "torch.backends.cuda": [[2, 1, 1, "", "SDPAParams"], [2, 5, 1, "", "can_use_cudnn_attention"], [2, 5, 1, "", "can_use_efficient_attention"], [2, 5, 1, "", "can_use_flash_attention"], [2, 5, 1, "", "cudnn_sdp_enabled"], [2, 2, 1, "", "cufft_plan_cache"], [2, 5, 1, "", "enable_cudnn_sdp"], [2, 5, 1, "", "enable_flash_sdp"], [2, 5, 1, "", "enable_math_sdp"], [2, 5, 1, "", "enable_mem_efficient_sdp"], [2, 5, 1, "", "flash_sdp_enabled"], [2, 5, 1, "", "is_built"], [2, 5, 1, "", "math_sdp_enabled"], [2, 5, 1, "", "mem_efficient_sdp_enabled"], [2, 5, 1, "", "preferred_blas_library"], [2, 5, 1, "", "preferred_linalg_library"], [2, 5, 1, "", "sdp_kernel"]], "torch.backends.cuda.cufft_plan_cache": [[2, 3, 1, "", "clear"], [2, 2, 1, "", "max_size"], [2, 2, 1, "", "size"]], "torch.backends.cuda.matmul": [[2, 2, 1, "", "allow_bf16_reduced_precision_reduction"], [2, 2, 1, "", "allow_fp16_reduced_precision_reduction"], [2, 2, 1, "", "allow_tf32"]], "torch.backends.cudnn": [[2, 2, 1, "", "allow_tf32"], [2, 2, 1, "", "benchmark"], [2, 2, 1, "", "benchmark_limit"], [2, 2, 1, "", "deterministic"], [2, 2, 1, "", "enabled"], [2, 5, 1, "", "is_available"], [2, 0, 0, "-", "rnn"], [2, 5, 1, "", "version"]], "torch.backends.mha": [[2, 5, 1, "", "get_fastpath_enabled"], [2, 5, 1, "", "set_fastpath_enabled"]], "torch.backends.mkl": [[2, 5, 1, "", "is_available"], [2, 1, 1, "", "verbose"]], "torch.backends.mkldnn": [[2, 5, 1, "", "is_available"], [2, 1, 1, "", "verbose"]], "torch.backends.mps": [[2, 5, 1, "", "is_available"], [2, 5, 1, "", "is_built"]], "torch.backends.nnpack": [[2, 5, 1, "", "flags"], [2, 5, 1, "", "is_available"], [2, 5, 1, "", "set_flags"]], "torch.backends.openmp": [[2, 5, 1, "", "is_available"]], "torch.backends.opt_einsum": [[2, 2, 1, "", "enabled"], [2, 5, 1, "", "get_opt_einsum"], [2, 5, 1, "", "is_available"], [2, 2, 1, "", "strategy"]], "torch.backends.xeon": [[2, 0, 0, "-", "run_cpu"]], "torch.compiler": [[977, 5, 1, "", "allow_in_graph"], [978, 5, 1, "", "assume_constant_result"], [979, 5, 1, "", "compile"], [980, 5, 1, "", "cudagraph_mark_step_begin"], [981, 5, 1, "", "disable"], [982, 5, 1, "", "is_compiling"], [983, 5, 1, "", "is_dynamo_compiling"], [984, 5, 1, "", "list_backends"], [985, 5, 1, "", "reset"]], "torch.cpu": [[998, 1, 1, "", "Stream"], [999, 1, 1, "", "StreamContext"], [0, 0, 0, "-", "amp"], [1000, 5, 1, "", "current_device"], [1001, 5, 1, "", "current_stream"], [1002, 5, 1, "", "device_count"], [1003, 5, 1, "", "is_available"], [1004, 5, 1, "", "set_device"], [1005, 5, 1, "", "stream"], [1006, 5, 1, "", "synchronize"]], "torch.cpu.amp": [[0, 1, 1, "", "autocast"], [0, 0, 0, "-", "autocast_mode"], [0, 0, 0, "-", "grad_scaler"]], "torch.cuda": [[1008, 1, 1, "", "CUDAGraph"], [1009, 1, 1, "", "CUDAPluggableAllocator"], [1010, 1, 1, "", "Event"], [1011, 1, 1, "", "ExternalStream"], [1012, 6, 1, "", "OutOfMemoryError"], [1013, 1, 1, "", "Stream"], [1014, 1, 1, "", "StreamContext"], [18, 0, 0, "-", "_sanitizer"], [0, 0, 0, "-", "amp"], [1015, 5, 1, "", "caching_allocator_alloc"], [1016, 5, 1, "", "caching_allocator_delete"], [1017, 5, 1, "", "can_device_access_peer"], [1018, 5, 1, "", "change_current_allocator"], [1019, 5, 1, "", "clock_rate"], [17, 0, 0, "-", "comm"], [1025, 5, 1, "", "current_blas_handle"], [1026, 5, 1, "", "current_device"], [1027, 5, 1, "", "current_stream"], [1028, 5, 1, "", "default_stream"], [1029, 1, 1, "", "device"], [1030, 5, 1, "", "device_count"], [1031, 1, 1, "", "device_of"], [1032, 5, 1, "", "empty_cache"], [17, 0, 0, "-", "error"], [1033, 5, 1, "", "get_allocator_backend"], [1034, 5, 1, "", "get_arch_list"], [1035, 5, 1, "", "get_device_capability"], [1036, 5, 1, "", "get_device_name"], [1037, 5, 1, "", "get_device_properties"], [1038, 5, 1, "", "get_gencode_flags"], [1039, 5, 1, "", "get_rng_state"], [1040, 5, 1, "", "get_rng_state_all"], [1041, 5, 1, "", "get_sync_debug_mode"], [1042, 1, 1, "", "graph"], [1043, 5, 1, "", "graph_pool_handle"], [17, 0, 0, "-", "graphs"], [1044, 5, 1, "", "init"], [1045, 5, 1, "", "initial_seed"], [1046, 5, 1, "", "ipc_collect"], [1047, 5, 1, "", "is_available"], [1048, 5, 1, "", "is_current_stream_capturing"], [1049, 5, 1, "", "is_initialized"], [17, 0, 0, "-", "jiterator"], [1052, 5, 1, "", "list_gpu_processes"], [1053, 5, 1, "", "make_graphed_callables"], [1054, 5, 1, "", "manual_seed"], [1055, 5, 1, "", "manual_seed_all"], [1056, 5, 1, "", "max_memory_allocated"], [1057, 5, 1, "", "max_memory_cached"], [1058, 5, 1, "", "max_memory_reserved"], [1059, 5, 1, "", "mem_get_info"], [17, 0, 0, "-", "memory"], [1060, 5, 1, "", "memory_allocated"], [1061, 5, 1, "", "memory_cached"], [1062, 5, 1, "", "memory_reserved"], [1063, 5, 1, "", "memory_snapshot"], [1064, 5, 1, "", "memory_stats"], [1065, 5, 1, "", "memory_summary"], [1066, 5, 1, "", "memory_usage"], [17, 0, 0, "-", "nccl"], [17, 0, 0, "-", "nvtx"], [1071, 5, 1, "", "power_draw"], [17, 0, 0, "-", "profiler"], [17, 0, 0, "-", "random"], [1072, 5, 1, "", "reset_max_memory_allocated"], [1073, 5, 1, "", "reset_max_memory_cached"], [1074, 5, 1, "", "reset_peak_memory_stats"], [1075, 5, 1, "", "seed"], [1076, 5, 1, "", "seed_all"], [1077, 5, 1, "", "set_device"], [1078, 5, 1, "", "set_per_process_memory_fraction"], [1079, 5, 1, "", "set_rng_state"], [1080, 5, 1, "", "set_rng_state_all"], [1081, 5, 1, "", "set_stream"], [1082, 5, 1, "", "set_sync_debug_mode"], [17, 0, 0, "-", "sparse"], [1083, 5, 1, "", "stream"], [17, 0, 0, "-", "streams"], [1084, 5, 1, "", "synchronize"], [1085, 5, 1, "", "temperature"], [19, 0, 0, "-", "tunable"], [1086, 5, 1, "", "utilization"]], "torch.cuda.CUDAGraph": [[1008, 3, 1, "", "capture_begin"], [1008, 3, 1, "", "capture_end"], [1008, 3, 1, "", "debug_dump"], [1008, 3, 1, "", "enable_debug_mode"], [1008, 3, 1, "", "pool"], [1008, 3, 1, "", "replay"], [1008, 3, 1, "", "reset"]], "torch.cuda.Event": [[1010, 3, 1, "", "elapsed_time"], [1010, 3, 1, "", "from_ipc_handle"], [1010, 3, 1, "", "ipc_handle"], [1010, 3, 1, "", "query"], [1010, 3, 1, "", "record"], [1010, 3, 1, "", "synchronize"], [1010, 3, 1, "", "wait"]], "torch.cuda.ExternalStream": [[1011, 3, 1, "", "query"], [1011, 3, 1, "", "record_event"], [1011, 3, 1, "", "synchronize"], [1011, 3, 1, "", "wait_event"], [1011, 3, 1, "", "wait_stream"]], "torch.cuda.Stream": [[1013, 3, 1, "", "query"], [1013, 3, 1, "", "record_event"], [1013, 3, 1, "", "synchronize"], [1013, 3, 1, "", "wait_event"], [1013, 3, 1, "", "wait_stream"]], "torch.cuda._sanitizer": [[18, 5, 1, "", "enable_cuda_sanitizer"]], "torch.cuda.amp": [[0, 1, 1, "", "GradScaler"], [0, 1, 1, "", "autocast"], [0, 0, 0, "-", "autocast_mode"], [0, 0, 0, "-", "common"], [0, 5, 1, "", "custom_bwd"], [0, 5, 1, "", "custom_fwd"], [0, 0, 0, "-", "grad_scaler"]], "torch.cuda.comm": [[1020, 5, 1, "", "broadcast"], [1021, 5, 1, "", "broadcast_coalesced"], [1022, 5, 1, "", "gather"], [1023, 5, 1, "", "reduce_add"], [1024, 5, 1, "", "scatter"]], "torch.cuda.jiterator": [[1050, 5, 1, "", "_create_jit_fn"], [1051, 5, 1, "", "_create_multi_output_jit_fn"]], "torch.cuda.memory": [[2113, 5, 1, "", "_dump_snapshot"], [2113, 5, 1, "", "_record_memory_history"], [2113, 5, 1, "", "_snapshot"]], "torch.cuda.nvtx": [[1067, 5, 1, "", "mark"], [1068, 5, 1, "", "range"], [1069, 5, 1, "", "range_pop"], [1070, 5, 1, "", "range_push"]], "torch.cuda.tunable": [[19, 5, 1, "", "enable"], [19, 5, 1, "", "get_filename"], [19, 5, 1, "", "get_max_tuning_duration"], [19, 5, 1, "", "get_max_tuning_iterations"], [19, 5, 1, "", "get_results"], [19, 5, 1, "", "get_validators"], [19, 5, 1, "", "is_enabled"], [19, 5, 1, "", "read_file"], [19, 5, 1, "", "set_filename"], [19, 5, 1, "", "set_max_tuning_duration"], [19, 5, 1, "", "set_max_tuning_iterations"], [19, 5, 1, "", "tuning_enable"], [19, 5, 1, "", "tuning_is_enabled"], [19, 5, 1, "", "write_file"], [19, 5, 1, "", "write_file_on_exit"]], "torch.distributed": [[28, 1, 1, "", "Backend"], [28, 1, 1, "", "DistBackendError"], [28, 1, 1, "", "DistError"], [28, 1, 1, "", "DistNetworkError"], [28, 1, 1, "", "DistStoreError"], [28, 1, 1, "", "FileStore"], [24, 1, 1, "", "GradBucket"], [28, 1, 1, "", "HashStore"], [28, 1, 1, "", "P2POp"], [28, 1, 1, "", "PrefixStore"], [28, 1, 1, "", "ReduceOp"], [28, 1, 1, "", "Store"], [28, 1, 1, "", "TCPStore"], [28, 1, 1, "", "Work"], [28, 0, 0, "-", "algorithms"], [28, 5, 1, "", "all_gather"], [28, 5, 1, "", "all_gather_into_tensor"], [28, 5, 1, "", "all_gather_object"], [28, 5, 1, "", "all_reduce"], [28, 5, 1, "", "all_to_all"], [28, 5, 1, "", "all_to_all_single"], [28, 0, 0, "-", "argparse_util"], [2075, 0, 0, "-", "autograd"], [28, 5, 1, "", "barrier"], [28, 5, 1, "", "batch_isend_irecv"], [28, 5, 1, "", "breakpoint"], [28, 5, 1, "", "broadcast"], [28, 5, 1, "", "broadcast_object_list"], [28, 0, 0, "-", "c10d_logger"], [30, 0, 0, "-", "checkpoint"], [28, 0, 0, "-", "collective_utils"], [28, 0, 0, "-", "constants"], [28, 0, 0, "-", "device_mesh"], [28, 0, 0, "-", "distributed_c10d"], [28, 0, 0, "-", "elastic"], [55, 0, 0, "-", "fsdp"], [28, 5, 1, "", "gather"], [28, 5, 1, "", "gather_object"], [28, 5, 1, "", "get_backend"], [28, 5, 1, "", "get_global_rank"], [28, 5, 1, "", "get_group_rank"], [28, 5, 1, "", "get_process_group_ranks"], [28, 5, 1, "", "get_rank"], [28, 5, 1, "", "get_world_size"], [28, 5, 1, "", "init_process_group"], [28, 5, 1, "", "irecv"], [28, 5, 1, "", "is_available"], [28, 5, 1, "", "is_gloo_available"], [28, 5, 1, "", "is_initialized"], [28, 5, 1, "", "is_mpi_available"], [28, 5, 1, "", "is_nccl_available"], [28, 5, 1, "", "is_torchelastic_launched"], [28, 5, 1, "", "isend"], [28, 0, 0, "-", "launch"], [28, 0, 0, "-", "launcher"], [28, 0, 0, "-", "logging_handlers"], [28, 5, 1, "", "monitored_barrier"], [28, 5, 1, "", "new_group"], [28, 0, 0, "-", "nn"], [32, 0, 0, "-", "optim"], [33, 0, 0, "-", "pipelining"], [28, 5, 1, "", "recv"], [28, 5, 1, "", "recv_object_list"], [28, 5, 1, "", "reduce"], [28, 1, 1, "", "reduce_op"], [28, 5, 1, "", "reduce_scatter"], [28, 5, 1, "", "reduce_scatter_tensor"], [28, 0, 0, "-", "remote_device"], [28, 0, 0, "-", "rendezvous"], [2075, 0, 0, "-", "rpc"], [48, 0, 0, "-", "run"], [28, 5, 1, "", "scatter"], [28, 5, 1, "", "scatter_object_list"], [28, 5, 1, "", "send"], [28, 5, 1, "", "send_object_list"], [28, 0, 0, "-", "tensor"], [28, 0, 0, "-", "utils"]], "torch.distributed.Backend": [[28, 3, 1, "", "register_backend"]], "torch.distributed.GradBucket": [[24, 5, 1, "", "buffer"], [24, 5, 1, "", "gradients"], [24, 5, 1, "", "index"], [24, 5, 1, "", "is_last"], [24, 5, 1, "", "parameters"], [24, 5, 1, "", "set_buffer"]], "torch.distributed.Store": [[28, 5, 1, "", "add"], [28, 5, 1, "", "compare_set"], [28, 5, 1, "", "delete_key"], [28, 5, 1, "", "get"], [28, 5, 1, "", "num_keys"], [28, 5, 1, "", "set"], [28, 5, 1, "", "set_timeout"], [28, 5, 1, "", "wait"]], "torch.distributed.algorithms": [[29, 1, 1, "", "Join"], [29, 1, 1, "", "JoinHook"], [29, 1, 1, "", "Joinable"], [28, 0, 0, "-", "ddp_comm_hooks"], [28, 0, 0, "-", "join"], [28, 0, 0, "-", "model_averaging"]], "torch.distributed.algorithms.Join": [[29, 3, 1, "", "notify_join_context"]], "torch.distributed.algorithms.JoinHook": [[29, 3, 1, "", "main_hook"], [29, 3, 1, "", "post_hook"]], "torch.distributed.algorithms.Joinable": [[29, 4, 1, "", "join_device"], [29, 3, 1, "", "join_hook"], [29, 4, 1, "", "join_process_group"]], "torch.distributed.algorithms.ddp_comm_hooks": [[28, 0, 0, "-", "ddp_zero_hook"], [28, 0, 0, "-", "debugging_hooks"], [28, 0, 0, "-", "default_hooks"], [28, 0, 0, "-", "mixed_precision_hooks"], [28, 0, 0, "-", "optimizer_overlap_hooks"], [28, 0, 0, "-", "post_localSGD_hook"], [28, 0, 0, "-", "powerSGD_hook"], [28, 0, 0, "-", "quantization_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks": [[24, 5, 1, "", "noop_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.default_hooks": [[24, 5, 1, "", "allreduce_hook"], [24, 5, 1, "", "bf16_compress_hook"], [24, 5, 1, "", "bf16_compress_wrapper"], [24, 5, 1, "", "fp16_compress_hook"], [24, 5, 1, "", "fp16_compress_wrapper"]], "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook": [[24, 1, 1, "", "PowerSGDState"], [24, 5, 1, "", "batched_powerSGD_hook"], [24, 5, 1, "", "powerSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState": [[24, 3, 1, "", "__getstate__"], [24, 3, 1, "", "__setstate__"]], "torch.distributed.algorithms.model_averaging": [[28, 0, 0, "-", "averagers"], [28, 0, 0, "-", "hierarchical_model_averager"], [28, 0, 0, "-", "utils"]], "torch.distributed.autograd": [[2075, 5, 1, "", "backward"], [2075, 1, 1, "", "context"], [2075, 5, 1, "", "get_gradients"]], "torch.distributed.checkpoint": [[30, 1, 1, "", "DefaultLoadPlanner"], [30, 1, 1, "", "DefaultSavePlanner"], [30, 1, 1, "", "FileSystemReader"], [30, 1, 1, "", "FileSystemWriter"], [30, 1, 1, "", "LoadPlan"], [30, 1, 1, "", "LoadPlanner"], [30, 1, 1, "", "ReadItem"], [30, 1, 1, "", "SavePlan"], [30, 1, 1, "", "SavePlanner"], [30, 1, 1, "", "StorageReader"], [30, 1, 1, "", "StorageWriter"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "default_planner"], [28, 0, 0, "-", "filesystem"], [30, 0, 0, "-", "format_utils"], [30, 0, 0, "-", "logger"], [30, 0, 0, "-", "logging_handlers"], [28, 0, 0, "-", "metadata"], [28, 0, 0, "-", "optimizer"], [28, 0, 0, "-", "planner"], [28, 0, 0, "-", "planner_helpers"], [28, 0, 0, "-", "resharding"], [30, 0, 0, "-", "staging"], [28, 0, 0, "-", "state_dict"], [28, 0, 0, "-", "state_dict_loader"], [28, 0, 0, "-", "state_dict_saver"], [28, 0, 0, "-", "stateful"], [28, 0, 0, "-", "storage"], [28, 0, 0, "-", "utils"]], "torch.distributed.checkpoint.DefaultLoadPlanner": [[30, 3, 1, "", "lookup_tensor"], [30, 3, 1, "", "transform_tensor"]], "torch.distributed.checkpoint.DefaultSavePlanner": [[30, 3, 1, "", "lookup_object"], [30, 3, 1, "", "transform_object"]], "torch.distributed.checkpoint.FileSystemReader": [[30, 4, 1, "", "checkpoint_id"]], "torch.distributed.checkpoint.FileSystemWriter": [[30, 3, 1, "", "stage"]], "torch.distributed.checkpoint.LoadPlanner": [[30, 3, 1, "", "commit_tensor"], [30, 3, 1, "", "create_global_plan"], [30, 3, 1, "", "create_local_plan"], [30, 3, 1, "", "finish_plan"], [30, 3, 1, "", "load_bytes"], [30, 3, 1, "", "resolve_bytes"], [30, 3, 1, "", "resolve_tensor"], [30, 3, 1, "", "set_up_planner"]], "torch.distributed.checkpoint.SavePlanner": [[30, 3, 1, "", "create_global_plan"], [30, 3, 1, "", "create_local_plan"], [30, 3, 1, "", "finish_plan"], [30, 3, 1, "", "resolve_data"], [30, 3, 1, "", "set_up_planner"]], "torch.distributed.checkpoint.StorageReader": [[30, 3, 1, "", "prepare_global_plan"], [30, 3, 1, "", "prepare_local_plan"], [30, 3, 1, "", "read_data"], [30, 3, 1, "", "read_metadata"], [30, 3, 1, "", "reset"], [30, 3, 1, "", "set_up_storage_reader"], [30, 3, 1, "", "validate_checkpoint_id"]], "torch.distributed.checkpoint.StorageWriter": [[30, 3, 1, "", "finish"], [30, 3, 1, "", "prepare_global_plan"], [30, 3, 1, "", "prepare_local_plan"], [30, 3, 1, "", "reset"], [30, 3, 1, "", "set_up_storage_writer"], [30, 3, 1, "", "storage_meta"], [30, 3, 1, "", "validate_checkpoint_id"], [30, 3, 1, "", "write_data"]], "torch.distributed.checkpoint.format_utils": [[30, 1, 1, "", "BroadcastingTorchSaveReader"], [30, 1, 1, "", "DynamicMetaLoadPlanner"], [30, 5, 1, "", "dcp_to_torch_save"], [30, 5, 1, "", "torch_save_to_dcp"]], "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader": [[30, 3, 1, "", "prepare_global_plan"], [30, 3, 1, "", "prepare_local_plan"], [30, 3, 1, "", "read_data"], [30, 3, 1, "", "read_metadata"], [30, 3, 1, "", "reset"], [30, 3, 1, "", "set_up_storage_reader"], [30, 3, 1, "", "validate_checkpoint_id"]], "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner": [[30, 3, 1, "", "set_up_planner"]], "torch.distributed.checkpoint.planner": [[30, 1, 1, "", "WriteItem"]], "torch.distributed.checkpoint.planner.WriteItem": [[30, 3, 1, "", "tensor_storage_size"]], "torch.distributed.checkpoint.staging": [[30, 1, 1, "", "AsyncStager"], [30, 1, 1, "", "BlockingAsyncStager"]], "torch.distributed.checkpoint.staging.AsyncStager": [[30, 4, 1, "", "should_synchronize_after_execute"], [30, 3, 1, "", "stage"], [30, 3, 1, "", "synchronize_staging"]], "torch.distributed.checkpoint.staging.BlockingAsyncStager": [[30, 3, 1, "", "stage"], [30, 3, 1, "", "synchronize_staging"]], "torch.distributed.checkpoint.state_dict": [[30, 1, 1, "", "StateDictOptions"], [30, 5, 1, "", "get_model_state_dict"], [30, 5, 1, "", "get_optimizer_state_dict"], [30, 5, 1, "", "get_state_dict"], [30, 5, 1, "", "set_model_state_dict"], [30, 5, 1, "", "set_optimizer_state_dict"], [30, 5, 1, "", "set_state_dict"]], "torch.distributed.checkpoint.state_dict_loader": [[30, 5, 1, "", "load"], [30, 5, 1, "", "load_state_dict"]], "torch.distributed.checkpoint.state_dict_saver": [[30, 5, 1, "", "async_save"], [30, 5, 1, "", "save"], [30, 5, 1, "", "save_state_dict"]], "torch.distributed.checkpoint.stateful": [[30, 1, 1, "", "Stateful"]], "torch.distributed.checkpoint.stateful.Stateful": [[30, 3, 1, "", "load_state_dict"], [30, 3, 1, "", "state_dict"]], "torch.distributed.device_mesh": [[28, 1, 1, "", "DeviceMesh"], [28, 5, 1, "", "init_device_mesh"]], "torch.distributed.elastic": [[37, 0, 0, "-", "agent"], [38, 0, 0, "-", "control_plane"], [41, 0, 0, "-", "events"], [44, 0, 0, "-", "metrics"], [45, 0, 0, "-", "multiprocessing"], [47, 0, 0, "-", "rendezvous"], [50, 0, 0, "-", "timer"], [28, 0, 0, "-", "utils"]], "torch.distributed.elastic.agent": [[37, 0, 0, "-", "server"]], "torch.distributed.elastic.agent.server": [[37, 1, 1, "", "ElasticAgent"], [37, 1, 1, "", "SimpleElasticAgent"], [37, 1, 1, "", "Worker"], [37, 1, 1, "", "WorkerGroup"], [37, 1, 1, "", "WorkerSpec"], [37, 1, 1, "", "WorkerState"], [28, 0, 0, "-", "api"], [37, 0, 0, "-", "health_check_server"], [28, 0, 0, "-", "local_elastic_agent"]], "torch.distributed.elastic.agent.server.ElasticAgent": [[37, 3, 1, "", "get_worker_group"], [37, 3, 1, "", "run"]], "torch.distributed.elastic.agent.server.SimpleElasticAgent": [[37, 3, 1, "", "_assign_worker_ranks"], [37, 3, 1, "", "_exit_barrier"], [37, 3, 1, "", "_initialize_workers"], [37, 3, 1, "", "_monitor_workers"], [37, 3, 1, "", "_rendezvous"], [37, 3, 1, "", "_restart_workers"], [37, 3, 1, "", "_shutdown"], [37, 3, 1, "", "_start_workers"], [37, 3, 1, "", "_stop_workers"]], "torch.distributed.elastic.agent.server.WorkerSpec": [[37, 3, 1, "", "get_entrypoint_name"]], "torch.distributed.elastic.agent.server.WorkerState": [[37, 3, 1, "", "is_running"]], "torch.distributed.elastic.agent.server.api": [[37, 1, 1, "", "RunResult"]], "torch.distributed.elastic.agent.server.health_check_server": [[37, 1, 1, "", "HealthCheckServer"], [37, 5, 1, "", "create_healthcheck_server"]], "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer": [[37, 3, 1, "", "start"], [37, 3, 1, "", "stop"]], "torch.distributed.elastic.agent.server.local_elastic_agent": [[37, 1, 1, "", "LocalElasticAgent"]], "torch.distributed.elastic.control_plane": [[38, 5, 1, "", "worker_main"]], "torch.distributed.elastic.events": [[28, 0, 0, "-", "api"], [41, 5, 1, "", "get_logging_handler"], [28, 0, 0, "-", "handlers"], [41, 5, 1, "", "record"]], "torch.distributed.elastic.events.api": [[41, 1, 1, "", "Event"], [41, 2, 1, "", "EventMetadataValue"], [41, 1, 1, "", "EventSource"]], "torch.distributed.elastic.metrics": [[28, 0, 0, "-", "api"], [44, 5, 1, "", "configure"], [44, 5, 1, "", "prof"], [44, 5, 1, "", "put_metric"]], "torch.distributed.elastic.metrics.api": [[44, 1, 1, "", "ConsoleMetricHandler"], [44, 1, 1, "", "MetricHandler"], [44, 1, 1, "", "NullMetricHandler"]], "torch.distributed.elastic.multiprocessing": [[28, 0, 0, "-", "api"], [40, 0, 0, "-", "errors"], [28, 0, 0, "-", "redirects"], [45, 5, 1, "", "start_processes"], [49, 0, 0, "-", "subprocess_handler"], [28, 0, 0, "-", "tail_log"]], "torch.distributed.elastic.multiprocessing.api": [[45, 1, 1, "", "DefaultLogsSpecs"], [45, 1, 1, "", "LogsDest"], [45, 1, 1, "", "LogsSpecs"], [45, 1, 1, "", "MultiprocessContext"], [45, 1, 1, "", "PContext"], [45, 1, 1, "", "RunProcsResult"], [45, 1, 1, "", "SubprocessContext"]], "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs": [[45, 3, 1, "", "reify"]], "torch.distributed.elastic.multiprocessing.api.LogsSpecs": [[45, 3, 1, "", "reify"]], "torch.distributed.elastic.multiprocessing.errors": [[40, 1, 1, "", "ChildFailedError"], [40, 1, 1, "", "ErrorHandler"], [40, 1, 1, "", "ProcessFailure"], [28, 0, 0, "-", "error_handler"], [28, 0, 0, "-", "handlers"], [40, 5, 1, "", "record"]], "torch.distributed.elastic.multiprocessing.subprocess_handler": [[49, 0, 0, "-", "handlers"], [49, 0, 0, "-", "subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers": [[49, 5, 1, "", "get_subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler": [[49, 1, 1, "", "SubprocessHandler"]], "torch.distributed.elastic.rendezvous": [[47, 1, 1, "", "RendezvousHandler"], [47, 1, 1, "", "RendezvousHandlerRegistry"], [47, 1, 1, "", "RendezvousInfo"], [47, 1, 1, "", "RendezvousParameters"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "c10d_rendezvous_backend"], [28, 0, 0, "-", "dynamic_rendezvous"], [28, 0, 0, "-", "etcd_rendezvous"], [28, 0, 0, "-", "etcd_rendezvous_backend"], [28, 0, 0, "-", "etcd_server"], [28, 0, 0, "-", "etcd_store"], [47, 0, 0, "-", "registry"], [28, 0, 0, "-", "static_tcp_rendezvous"], [28, 0, 0, "-", "utils"]], "torch.distributed.elastic.rendezvous.RendezvousHandler": [[47, 3, 1, "", "get_backend"], [47, 3, 1, "", "get_run_id"], [47, 3, 1, "", "is_closed"], [47, 3, 1, "", "next_rendezvous"], [47, 3, 1, "", "num_nodes_waiting"], [47, 3, 1, "", "set_closed"], [47, 3, 1, "", "shutdown"], [47, 4, 1, "", "use_agent_store"]], "torch.distributed.elastic.rendezvous.RendezvousParameters": [[47, 3, 1, "", "get"], [47, 3, 1, "", "get_as_bool"], [47, 3, 1, "", "get_as_int"]], "torch.distributed.elastic.rendezvous.api": [[47, 1, 1, "", "RendezvousClosedError"], [47, 1, 1, "", "RendezvousConnectionError"], [47, 1, 1, "", "RendezvousError"], [47, 1, 1, "", "RendezvousGracefulExitError"], [47, 1, 1, "", "RendezvousStateError"], [47, 1, 1, "", "RendezvousStoreInfo"], [47, 1, 1, "", "RendezvousTimeoutError"]], "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo": [[47, 3, 1, "", "build"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend": [[47, 1, 1, "", "C10dRendezvousBackend"], [47, 5, 1, "", "create_backend"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend": [[47, 3, 1, "", "get_state"], [47, 4, 1, "", "name"], [47, 3, 1, "", "set_state"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous": [[47, 1, 1, "", "DynamicRendezvousHandler"], [47, 1, 1, "", "RendezvousBackend"], [47, 1, 1, "", "RendezvousTimeout"], [47, 5, 1, "", "create_handler"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler": [[47, 3, 1, "", "from_backend"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend": [[47, 3, 1, "", "get_state"], [47, 4, 1, "", "name"], [47, 3, 1, "", "set_state"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout": [[47, 4, 1, "", "close"], [47, 4, 1, "", "heartbeat"], [47, 4, 1, "", "join"], [47, 4, 1, "", "last_call"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous": [[47, 1, 1, "", "EtcdRendezvousHandler"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend": [[47, 1, 1, "", "EtcdRendezvousBackend"], [47, 5, 1, "", "create_backend"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend": [[47, 3, 1, "", "get_state"], [47, 4, 1, "", "name"], [47, 3, 1, "", "set_state"]], "torch.distributed.elastic.rendezvous.etcd_server": [[47, 1, 1, "", "EtcdServer"]], "torch.distributed.elastic.rendezvous.etcd_store": [[47, 1, 1, "", "EtcdStore"]], "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore": [[47, 3, 1, "", "add"], [47, 3, 1, "", "check"], [47, 3, 1, "", "get"], [47, 3, 1, "", "set"], [47, 3, 1, "", "wait"]], "torch.distributed.elastic.timer": [[50, 1, 1, "", "FileTimerClient"], [50, 1, 1, "", "FileTimerServer"], [50, 1, 1, "", "LocalTimerClient"], [50, 1, 1, "", "LocalTimerServer"], [50, 1, 1, "", "TimerClient"], [50, 1, 1, "", "TimerRequest"], [50, 1, 1, "", "TimerServer"], [28, 0, 0, "-", "api"], [50, 5, 1, "", "configure"], [50, 0, 0, "-", "debug_info_logging"], [50, 5, 1, "", "expires"], [28, 0, 0, "-", "file_based_local_timer"], [28, 0, 0, "-", "local_timer"]], "torch.distributed.elastic.timer.TimerClient": [[50, 3, 1, "", "acquire"], [50, 3, 1, "", "release"]], "torch.distributed.elastic.timer.TimerServer": [[50, 3, 1, "", "clear_timers"], [50, 3, 1, "", "get_expired_timers"], [50, 3, 1, "", "register_timers"]], "torch.distributed.elastic.timer.debug_info_logging": [[50, 5, 1, "", "log_debug_info_for_expired_timers"]], "torch.distributed.elastic.utils": [[28, 0, 0, "-", "api"], [28, 0, 0, "-", "data"], [28, 0, 0, "-", "distributed"], [28, 0, 0, "-", "log_level"], [28, 0, 0, "-", "logging"], [28, 0, 0, "-", "store"]], "torch.distributed.elastic.utils.data": [[28, 0, 0, "-", "cycling_iterator"], [28, 0, 0, "-", "elastic_distributed_sampler"]], "torch.distributed.fsdp": [[55, 1, 1, "", "BackwardPrefetch"], [55, 1, 1, "", "CPUOffload"], [55, 1, 1, "", "FullOptimStateDictConfig"], [55, 1, 1, "", "FullStateDictConfig"], [55, 1, 1, "", "FullyShardedDataParallel"], [55, 1, 1, "", "LocalOptimStateDictConfig"], [55, 1, 1, "", "LocalStateDictConfig"], [55, 1, 1, "", "MixedPrecision"], [55, 1, 1, "", "OptimStateDictConfig"], [55, 1, 1, "", "ShardedOptimStateDictConfig"], [55, 1, 1, "", "ShardedStateDictConfig"], [55, 1, 1, "", "ShardingStrategy"], [55, 1, 1, "", "StateDictConfig"], [55, 1, 1, "", "StateDictSettings"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "fully_sharded_data_parallel"], [28, 0, 0, "-", "sharded_grad_scaler"], [28, 0, 0, "-", "wrap"]], "torch.distributed.fsdp.FullyShardedDataParallel": [[55, 3, 1, "", "apply"], [55, 3, 1, "", "check_is_root"], [55, 3, 1, "", "clip_grad_norm_"], [55, 3, 1, "", "flatten_sharded_optim_state_dict"], [55, 3, 1, "", "forward"], [55, 3, 1, "", "fsdp_modules"], [55, 3, 1, "", "full_optim_state_dict"], [55, 3, 1, "", "get_state_dict_type"], [55, 4, 1, "", "module"], [55, 3, 1, "", "named_buffers"], [55, 3, 1, "", "named_parameters"], [55, 3, 1, "", "no_sync"], [55, 3, 1, "", "optim_state_dict"], [55, 3, 1, "", "optim_state_dict_to_load"], [55, 3, 1, "", "register_comm_hook"], [55, 3, 1, "", "rekey_optim_state_dict"], [55, 3, 1, "", "scatter_full_optim_state_dict"], [55, 3, 1, "", "set_state_dict_type"], [55, 3, 1, "", "shard_full_optim_state_dict"], [55, 3, 1, "", "sharded_optim_state_dict"], [55, 3, 1, "", "state_dict_type"], [55, 3, 1, "", "summon_full_params"]], "torch.distributed.launcher": [[28, 0, 0, "-", "api"]], "torch.distributed.nn": [[28, 0, 0, "-", "api"], [28, 0, 0, "-", "functional"], [28, 0, 0, "-", "jit"]], "torch.distributed.nn.api": [[28, 0, 0, "-", "remote_module"]], "torch.distributed.nn.api.remote_module": [[2075, 1, 1, "", "RemoteModule"]], "torch.distributed.nn.api.remote_module.RemoteModule": [[2075, 3, 1, "", "get_module_rref"], [2075, 3, 1, "", "remote_parameters"]], "torch.distributed.nn.jit": [[28, 0, 0, "-", "instantiator"], [28, 0, 0, "-", "templates"]], "torch.distributed.nn.jit.templates": [[28, 0, 0, "-", "remote_module_template"]], "torch.distributed.optim": [[32, 1, 1, "", "DistributedOptimizer"], [32, 1, 1, "", "PostLocalSGDOptimizer"], [32, 1, 1, "", "ZeroRedundancyOptimizer"], [28, 0, 0, "-", "apply_optimizer_in_backward"], [28, 0, 0, "-", "functional_adadelta"], [28, 0, 0, "-", "functional_adagrad"], [28, 0, 0, "-", "functional_adam"], [28, 0, 0, "-", "functional_adamax"], [28, 0, 0, "-", "functional_adamw"], [28, 0, 0, "-", "functional_rmsprop"], [28, 0, 0, "-", "functional_rprop"], [28, 0, 0, "-", "functional_sgd"], [28, 0, 0, "-", "named_optimizer"], [28, 0, 0, "-", "optimizer"], [28, 0, 0, "-", "post_localSGD_optimizer"], [28, 0, 0, "-", "utils"], [28, 0, 0, "-", "zero_redundancy_optimizer"]], "torch.distributed.optim.DistributedOptimizer": [[32, 3, 1, "", "step"]], "torch.distributed.optim.PostLocalSGDOptimizer": [[32, 3, 1, "", "load_state_dict"], [32, 3, 1, "", "state_dict"], [32, 3, 1, "", "step"]], "torch.distributed.optim.ZeroRedundancyOptimizer": [[32, 3, 1, "", "add_param_group"], [32, 3, 1, "", "consolidate_state_dict"], [32, 4, 1, "", "join_device"], [32, 3, 1, "", "join_hook"], [32, 4, 1, "", "join_process_group"], [32, 3, 1, "", "load_state_dict"], [32, 3, 1, "", "state_dict"], [32, 3, 1, "", "step"]], "torch.distributed.pipelining": [[33, 1, 1, "", "Pipe"], [33, 1, 1, "", "SplitPoint"], [33, 0, 0, "-", "microbatch"], [33, 5, 1, "", "pipe_split"], [33, 5, 1, "", "pipeline"], [33, 0, 0, "-", "schedules"], [33, 0, 0, "-", "stage"]], "torch.distributed.pipelining.microbatch": [[33, 1, 1, "", "TensorChunkSpec"], [33, 5, 1, "", "merge_chunks"], [33, 5, 1, "", "split_args_kwargs_into_chunks"]], "torch.distributed.pipelining.schedules": [[33, 1, 1, "", "PipelineScheduleMulti"], [33, 1, 1, "", "PipelineScheduleSingle"], [33, 1, 1, "", "Schedule1F1B"], [33, 1, 1, "", "ScheduleGPipe"], [33, 1, 1, "", "ScheduleInterleaved1F1B"], [33, 1, 1, "", "ScheduleLoopedBFS"]], "torch.distributed.pipelining.schedules.PipelineScheduleMulti": [[33, 3, 1, "", "step"]], "torch.distributed.pipelining.schedules.PipelineScheduleSingle": [[33, 3, 1, "", "step"]], "torch.distributed.pipelining.stage": [[33, 1, 1, "", "PipelineStage"], [33, 5, 1, "", "build_stage"]], "torch.distributed.rpc": [[2075, 1, 1, "", "BackendType"], [2075, 1, 1, "", "PyRRef"], [2075, 1, 1, "", "RpcBackendOptions"], [2075, 1, 1, "", "TensorPipeRpcBackendOptions"], [2075, 1, 1, "", "WorkerInfo"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "backend_registry"], [28, 0, 0, "-", "constants"], [28, 0, 0, "-", "functions"], [2075, 5, 1, "", "get_worker_info"], [2075, 5, 1, "", "init_rpc"], [28, 0, 0, "-", "internal"], [28, 0, 0, "-", "options"], [2075, 5, 1, "", "remote"], [2075, 5, 1, "", "rpc_async"], [2075, 5, 1, "", "rpc_sync"], [28, 0, 0, "-", "rref_proxy"], [28, 0, 0, "-", "server_process_global_profiler"], [2075, 5, 1, "", "shutdown"]], "torch.distributed.rpc.PyRRef": [[2075, 3, 1, "", "backward"], [2075, 3, 1, "", "confirmed_by_owner"], [2075, 3, 1, "", "is_owner"], [2075, 3, 1, "", "local_value"], [2075, 3, 1, "", "owner"], [2075, 3, 1, "", "owner_name"], [2075, 3, 1, "", "remote"], [2075, 3, 1, "", "rpc_async"], [2075, 3, 1, "", "rpc_sync"], [2075, 3, 1, "", "to_here"]], "torch.distributed.rpc.RpcBackendOptions": [[2075, 4, 1, "", "init_method"], [2075, 4, 1, "", "rpc_timeout"]], "torch.distributed.rpc.TensorPipeRpcBackendOptions": [[2075, 4, 1, "", "device_maps"], [2075, 4, 1, "", "devices"], [2075, 4, 1, "", "init_method"], [2075, 4, 1, "", "num_worker_threads"], [2075, 4, 1, "", "rpc_timeout"], [2075, 3, 1, "", "set_device_map"], [2075, 3, 1, "", "set_devices"]], "torch.distributed.rpc.WorkerInfo": [[2075, 4, 1, "", "id"], [2075, 4, 1, "", "name"]], "torch.distributed.rpc.functions": [[2075, 5, 1, "", "async_execution"]], "torch.distributed.tensor": [[34, 0, 0, "-", "parallel"]], "torch.distributed.tensor.parallel": [[34, 1, 1, "", "ColwiseParallel"], [34, 1, 1, "", "PrepareModuleInput"], [34, 1, 1, "", "PrepareModuleOutput"], [34, 1, 1, "", "RowwiseParallel"], [34, 1, 1, "", "SequenceParallel"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "ddp"], [28, 0, 0, "-", "fsdp"], [28, 0, 0, "-", "input_reshard"], [28, 0, 0, "-", "loss"], [34, 5, 1, "", "loss_parallel"], [34, 5, 1, "", "parallelize_module"], [28, 0, 0, "-", "style"]], "torch.distributions": [[35, 0, 0, "-", "bernoulli"], [35, 0, 0, "-", "beta"], [35, 0, 0, "-", "binomial"], [35, 0, 0, "-", "categorical"], [35, 0, 0, "-", "cauchy"], [35, 0, 0, "-", "chi2"], [35, 0, 0, "-", "constraint_registry"], [35, 0, 0, "-", "constraints"], [35, 0, 0, "-", "continuous_bernoulli"], [35, 0, 0, "-", "dirichlet"], [35, 0, 0, "-", "distribution"], [35, 0, 0, "-", "exp_family"], [35, 0, 0, "-", "exponential"], [35, 0, 0, "-", "fishersnedecor"], [35, 0, 0, "-", "gamma"], [35, 0, 0, "-", "geometric"], [35, 0, 0, "-", "gumbel"], [35, 0, 0, "-", "half_cauchy"], [35, 0, 0, "-", "half_normal"], [35, 0, 0, "-", "independent"], [35, 0, 0, "-", "inverse_gamma"], [35, 0, 0, "-", "kl"], [35, 0, 0, "-", "kumaraswamy"], [35, 0, 0, "-", "laplace"], [35, 0, 0, "-", "lkj_cholesky"], [35, 0, 0, "-", "log_normal"], [35, 0, 0, "-", "logistic_normal"], [35, 0, 0, "-", "lowrank_multivariate_normal"], [35, 0, 0, "-", "mixture_same_family"], [35, 0, 0, "-", "multinomial"], [35, 0, 0, "-", "multivariate_normal"], [35, 0, 0, "-", "negative_binomial"], [35, 0, 0, "-", "normal"], [35, 0, 0, "-", "one_hot_categorical"], [35, 0, 0, "-", "pareto"], [35, 0, 0, "-", "poisson"], [35, 0, 0, "-", "relaxed_bernoulli"], [35, 0, 0, "-", "relaxed_categorical"], [35, 0, 0, "-", "studentT"], [35, 0, 0, "-", "transformed_distribution"], [35, 0, 0, "-", "transforms"], [35, 0, 0, "-", "uniform"], [35, 0, 0, "-", "utils"], [35, 0, 0, "-", "von_mises"], [35, 0, 0, "-", "weibull"], [35, 0, 0, "-", "wishart"]], "torch.distributions.bernoulli": [[35, 1, 1, "", "Bernoulli"]], "torch.distributions.bernoulli.Bernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.beta": [[35, 1, 1, "", "Beta"]], "torch.distributions.beta.Beta": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "concentration0"], [35, 4, 1, "", "concentration1"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.binomial": [[35, 1, 1, "", "Binomial"]], "torch.distributions.binomial.Binomial": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.categorical": [[35, 1, 1, "", "Categorical"]], "torch.distributions.categorical.Categorical": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.cauchy": [[35, 1, 1, "", "Cauchy"]], "torch.distributions.cauchy.Cauchy": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.chi2": [[35, 1, 1, "", "Chi2"]], "torch.distributions.chi2.Chi2": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "df"], [35, 3, 1, "", "expand"]], "torch.distributions.constraint_registry": [[35, 1, 1, "", "ConstraintRegistry"]], "torch.distributions.constraint_registry.ConstraintRegistry": [[35, 3, 1, "", "register"]], "torch.distributions.constraints": [[35, 1, 1, "", "Constraint"], [35, 2, 1, "", "cat"], [35, 2, 1, "", "dependent_property"], [35, 2, 1, "", "greater_than"], [35, 2, 1, "", "greater_than_eq"], [35, 2, 1, "", "half_open_interval"], [35, 2, 1, "", "independent"], [35, 2, 1, "", "integer_interval"], [35, 2, 1, "", "interval"], [35, 2, 1, "", "less_than"], [35, 2, 1, "", "multinomial"], [35, 2, 1, "", "stack"]], "torch.distributions.constraints.Constraint": [[35, 3, 1, "", "check"]], "torch.distributions.continuous_bernoulli": [[35, 1, 1, "", "ContinuousBernoulli"]], "torch.distributions.continuous_bernoulli.ContinuousBernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.dirichlet": [[35, 1, 1, "", "Dirichlet"]], "torch.distributions.dirichlet.Dirichlet": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.distribution": [[35, 1, 1, "", "Distribution"]], "torch.distributions.distribution.Distribution": [[35, 4, 1, "", "arg_constraints"], [35, 4, 1, "", "batch_shape"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 4, 1, "", "event_shape"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "perplexity"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 3, 1, "", "sample_n"], [35, 3, 1, "", "set_default_validate_args"], [35, 4, 1, "", "stddev"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.exp_family": [[35, 1, 1, "", "ExponentialFamily"]], "torch.distributions.exp_family.ExponentialFamily": [[35, 3, 1, "", "entropy"]], "torch.distributions.exponential": [[35, 1, 1, "", "Exponential"]], "torch.distributions.exponential.Exponential": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.fishersnedecor": [[35, 1, 1, "", "FisherSnedecor"]], "torch.distributions.fishersnedecor.FisherSnedecor": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.gamma": [[35, 1, 1, "", "Gamma"]], "torch.distributions.gamma.Gamma": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.geometric": [[35, 1, 1, "", "Geometric"]], "torch.distributions.geometric.Geometric": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.gumbel": [[35, 1, 1, "", "Gumbel"]], "torch.distributions.gumbel.Gumbel": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.half_cauchy": [[35, 1, 1, "", "HalfCauchy"]], "torch.distributions.half_cauchy.HalfCauchy": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "scale"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.half_normal": [[35, 1, 1, "", "HalfNormal"]], "torch.distributions.half_normal.HalfNormal": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "scale"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.independent": [[35, 1, 1, "", "Independent"]], "torch.distributions.independent.Independent": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "has_enumerate_support"], [35, 4, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.inverse_gamma": [[35, 1, 1, "", "InverseGamma"]], "torch.distributions.inverse_gamma.InverseGamma": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "concentration"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "rate"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.kl": [[35, 5, 1, "", "kl_divergence"], [35, 5, 1, "", "register_kl"]], "torch.distributions.kumaraswamy": [[35, 1, 1, "", "Kumaraswamy"]], "torch.distributions.kumaraswamy.Kumaraswamy": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.laplace": [[35, 1, 1, "", "Laplace"]], "torch.distributions.laplace.Laplace": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.lkj_cholesky": [[35, 1, 1, "", "LKJCholesky"]], "torch.distributions.lkj_cholesky.LKJCholesky": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"]], "torch.distributions.log_normal": [[35, 1, 1, "", "LogNormal"]], "torch.distributions.log_normal.LogNormal": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "loc"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "scale"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.lowrank_multivariate_normal": [[35, 1, 1, "", "LowRankMultivariateNormal"]], "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "covariance_matrix"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "precision_matrix"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "scale_tril"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.mixture_same_family": [[35, 1, 1, "", "MixtureSameFamily"]], "torch.distributions.mixture_same_family.MixtureSameFamily": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 4, 1, "", "component_distribution"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mixture_distribution"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.multinomial": [[35, 1, 1, "", "Multinomial"]], "torch.distributions.multinomial.Multinomial": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 2, 1, "", "total_count"], [35, 4, 1, "", "variance"]], "torch.distributions.multivariate_normal": [[35, 1, 1, "", "MultivariateNormal"]], "torch.distributions.multivariate_normal.MultivariateNormal": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "covariance_matrix"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "precision_matrix"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "scale_tril"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.negative_binomial": [[35, 1, 1, "", "NegativeBinomial"]], "torch.distributions.negative_binomial.NegativeBinomial": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.normal": [[35, 1, 1, "", "Normal"]], "torch.distributions.normal.Normal": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.one_hot_categorical": [[35, 1, 1, "", "OneHotCategorical"]], "torch.distributions.one_hot_categorical.OneHotCategorical": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.pareto": [[35, 1, 1, "", "Pareto"]], "torch.distributions.pareto.Pareto": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.poisson": [[35, 1, 1, "", "Poisson"]], "torch.distributions.poisson.Poisson": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.relaxed_bernoulli": [[35, 1, 1, "", "LogitRelaxedBernoulli"], [35, 1, 1, "", "RelaxedBernoulli"]], "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"]], "torch.distributions.relaxed_bernoulli.RelaxedBernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "probs"], [35, 2, 1, "", "support"], [35, 4, 1, "", "temperature"]], "torch.distributions.relaxed_categorical": [[35, 1, 1, "", "RelaxedOneHotCategorical"]], "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "probs"], [35, 2, 1, "", "support"], [35, 4, 1, "", "temperature"]], "torch.distributions.studentT": [[35, 1, 1, "", "StudentT"]], "torch.distributions.studentT.StudentT": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.transformed_distribution": [[35, 1, 1, "", "TransformedDistribution"]], "torch.distributions.transformed_distribution.TransformedDistribution": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"]], "torch.distributions.transforms": [[35, 1, 1, "", "AbsTransform"], [35, 1, 1, "", "AffineTransform"], [35, 1, 1, "", "CatTransform"], [35, 1, 1, "", "ComposeTransform"], [35, 1, 1, "", "CorrCholeskyTransform"], [35, 1, 1, "", "CumulativeDistributionTransform"], [35, 1, 1, "", "ExpTransform"], [35, 1, 1, "", "IndependentTransform"], [35, 1, 1, "", "LowerCholeskyTransform"], [35, 1, 1, "", "PositiveDefiniteTransform"], [35, 1, 1, "", "PowerTransform"], [35, 1, 1, "", "ReshapeTransform"], [35, 1, 1, "", "SigmoidTransform"], [35, 1, 1, "", "SoftmaxTransform"], [35, 1, 1, "", "SoftplusTransform"], [35, 1, 1, "", "StackTransform"], [35, 1, 1, "", "StickBreakingTransform"], [35, 1, 1, "", "TanhTransform"], [35, 1, 1, "", "Transform"]], "torch.distributions.transforms.Transform": [[35, 3, 1, "", "forward_shape"], [35, 4, 1, "", "inv"], [35, 3, 1, "", "inverse_shape"], [35, 3, 1, "", "log_abs_det_jacobian"], [35, 4, 1, "", "sign"]], "torch.distributions.uniform": [[35, 1, 1, "", "Uniform"]], "torch.distributions.uniform.Uniform": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "stddev"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.von_mises": [[35, 1, 1, "", "VonMises"]], "torch.distributions.von_mises.VonMises": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.weibull": [[35, 1, 1, "", "Weibull"]], "torch.distributions.weibull.Weibull": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.wishart": [[35, 1, 1, "", "Wishart"]], "torch.distributions.wishart.Wishart": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "covariance_matrix"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "precision_matrix"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "scale_tril"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.export": [[52, 2, 1, "", "Constraint"], [52, 1, 1, "", "ExportBackwardSignature"], [52, 1, 1, "", "ExportGraphSignature"], [52, 1, 1, "", "ExportedProgram"], [52, 1, 1, "", "ModuleCallEntry"], [52, 1, 1, "", "ModuleCallSignature"], [52, 0, 0, "-", "custom_obj"], [52, 5, 1, "", "dims"], [52, 0, 0, "-", "dynamic_shapes"], [52, 5, 1, "", "export"], [52, 0, 0, "-", "exported_program"], [52, 0, 0, "-", "graph_signature"], [52, 5, 1, "", "load"], [52, 5, 1, "", "register_dataclass"], [52, 5, 1, "", "save"], [52, 0, 0, "-", "unflatten"]], "torch.export.ExportedProgram": [[52, 3, 1, "", "buffers"], [52, 3, 1, "", "module"], [52, 3, 1, "", "named_buffers"], [52, 3, 1, "", "named_parameters"], [52, 3, 1, "", "parameters"], [52, 3, 1, "", "run_decompositions"]], "torch.export.dynamic_shapes": [[52, 5, 1, "", "Dim"], [52, 1, 1, "", "ShapesCollection"], [52, 5, 1, "", "dynamic_dim"], [52, 5, 1, "", "refine_dynamic_shapes_from_suggested_fixes"]], "torch.export.dynamic_shapes.ShapesCollection": [[52, 3, 1, "", "dynamic_shapes"]], "torch.export.graph_signature": [[52, 1, 1, "", "CustomObjArgument"], [52, 1, 1, "", "ExportGraphSignature"], [52, 1, 1, "", "InputKind"], [52, 1, 1, "", "InputSpec"], [52, 1, 1, "", "OutputKind"], [52, 1, 1, "", "OutputSpec"]], "torch.export.graph_signature.ExportGraphSignature": [[52, 3, 1, "", "get_replace_hook"], [52, 3, 1, "", "replace_all_uses"]], "torch.export.unflatten": [[52, 1, 1, "", "FlatArgsAdapter"], [52, 1, 1, "", "InterpreterModule"], [52, 5, 1, "", "unflatten"]], "torch.export.unflatten.FlatArgsAdapter": [[52, 3, 1, "", "adapt"]], "torch.fft": [[1124, 5, 1, "", "fft"], [1125, 5, 1, "", "fft2"], [1126, 5, 1, "", "fftfreq"], [1127, 5, 1, "", "fftn"], [1128, 5, 1, "", "fftshift"], [1129, 5, 1, "", "hfft"], [1130, 5, 1, "", "hfft2"], [1131, 5, 1, "", "hfftn"], [1132, 5, 1, "", "ifft"], [1133, 5, 1, "", "ifft2"], [1134, 5, 1, "", "ifftn"], [1135, 5, 1, "", "ifftshift"], [1136, 5, 1, "", "ihfft"], [1137, 5, 1, "", "ihfft2"], [1138, 5, 1, "", "ihfftn"], [1139, 5, 1, "", "irfft"], [1140, 5, 1, "", "irfft2"], [1141, 5, 1, "", "irfftn"], [1142, 5, 1, "", "rfft"], [1143, 5, 1, "", "rfft2"], [1144, 5, 1, "", "rfftfreq"], [1145, 5, 1, "", "rfftn"]], "torch.func": [[1165, 5, 1, "", "functional_call"], [1166, 5, 1, "", "functionalize"], [1167, 5, 1, "", "grad"], [1168, 5, 1, "", "grad_and_value"], [1169, 5, 1, "", "hessian"], [1170, 5, 1, "", "jacfwd"], [1171, 5, 1, "", "jacrev"], [1172, 5, 1, "", "jvp"], [1173, 5, 1, "", "linearize"], [1174, 5, 1, "", "replace_all_batch_norm_modules_"], [1175, 5, 1, "", "stack_module_state"], [1176, 5, 1, "", "vjp"], [1177, 5, 1, "", "vmap"]], "torch.futures": [[63, 1, 1, "", "Future"], [63, 5, 1, "", "collect_all"], [63, 5, 1, "", "wait_all"]], "torch.futures.Future": [[63, 3, 1, "", "add_done_callback"], [63, 3, 1, "", "done"], [63, 3, 1, "", "set_exception"], [63, 3, 1, "", "set_result"], [63, 3, 1, "", "then"], [63, 3, 1, "", "value"], [63, 3, 1, "", "wait"]], "torch.fx": [[64, 1, 1, "", "Graph"], [64, 1, 1, "", "GraphModule"], [64, 1, 1, "", "Interpreter"], [64, 1, 1, "", "Node"], [64, 1, 1, "", "Proxy"], [64, 1, 1, "", "Tracer"], [64, 1, 1, "", "Transformer"], [64, 0, 0, "-", "annotate"], [64, 0, 0, "-", "config"], [64, 0, 0, "-", "experimental"], [64, 0, 0, "-", "graph"], [64, 0, 0, "-", "graph_module"], [64, 0, 0, "-", "immutable_collections"], [64, 0, 0, "-", "interpreter"], [64, 0, 0, "-", "node"], [64, 0, 0, "-", "operator_schemas"], [64, 0, 0, "-", "passes"], [64, 0, 0, "-", "proxy"], [64, 5, 1, "", "replace_pattern"], [64, 0, 0, "-", "subgraph_rewriter"], [64, 5, 1, "", "symbolic_trace"], [64, 0, 0, "-", "tensor_type"], [64, 0, 0, "-", "traceback"], [64, 5, 1, "", "wrap"]], "torch.fx.Graph": [[64, 3, 1, "", "__init__"], [64, 3, 1, "", "call_function"], [64, 3, 1, "", "call_method"], [64, 3, 1, "", "call_module"], [64, 3, 1, "", "create_node"], [64, 3, 1, "", "eliminate_dead_code"], [64, 3, 1, "", "erase_node"], [64, 3, 1, "", "find_nodes"], [64, 3, 1, "", "get_attr"], [64, 3, 1, "", "graph_copy"], [64, 3, 1, "", "inserting_after"], [64, 3, 1, "", "inserting_before"], [64, 3, 1, "", "lint"], [64, 3, 1, "", "node_copy"], [64, 4, 1, "", "nodes"], [64, 3, 1, "", "on_generate_code"], [64, 3, 1, "", "output"], [64, 3, 1, "", "placeholder"], [64, 3, 1, "", "print_tabular"], [64, 3, 1, "", "process_inputs"], [64, 3, 1, "", "process_outputs"], [64, 3, 1, "", "python_code"], [64, 3, 1, "", "set_codegen"]], "torch.fx.GraphModule": [[64, 3, 1, "", "__init__"], [64, 3, 1, "", "add_submodule"], [64, 4, 1, "", "code"], [64, 3, 1, "", "delete_all_unused_submodules"], [64, 3, 1, "", "delete_submodule"], [64, 4, 1, "", "graph"], [64, 3, 1, "", "print_readable"], [64, 3, 1, "", "recompile"], [64, 3, 1, "", "to_folder"]], "torch.fx.Interpreter": [[64, 3, 1, "", "boxed_run"], [64, 3, 1, "", "call_function"], [64, 3, 1, "", "call_method"], [64, 3, 1, "", "call_module"], [64, 3, 1, "", "fetch_args_kwargs_from_env"], [64, 3, 1, "", "fetch_attr"], [64, 3, 1, "", "get_attr"], [64, 3, 1, "", "map_nodes_to_values"], [64, 3, 1, "", "output"], [64, 3, 1, "", "placeholder"], [64, 3, 1, "", "run"], [64, 3, 1, "", "run_node"]], "torch.fx.Node": [[64, 4, 1, "", "all_input_nodes"], [64, 3, 1, "", "append"], [64, 4, 1, "", "args"], [64, 3, 1, "", "format_node"], [64, 3, 1, "", "insert_arg"], [64, 3, 1, "", "is_impure"], [64, 4, 1, "", "kwargs"], [64, 4, 1, "", "next"], [64, 3, 1, "", "normalized_arguments"], [64, 3, 1, "", "prepend"], [64, 4, 1, "", "prev"], [64, 3, 1, "", "replace_all_uses_with"], [64, 3, 1, "", "replace_input_with"], [64, 4, 1, "", "stack_trace"], [64, 3, 1, "", "update_arg"], [64, 3, 1, "", "update_kwarg"]], "torch.fx.Tracer": [[64, 3, 1, "", "call_module"], [64, 3, 1, "", "create_arg"], [64, 3, 1, "", "create_args_for_root"], [64, 3, 1, "", "create_node"], [64, 3, 1, "", "create_proxy"], [64, 3, 1, "", "get_fresh_qualname"], [64, 3, 1, "", "getattr"], [64, 3, 1, "", "is_leaf_module"], [64, 3, 1, "", "iter"], [64, 3, 1, "", "keys"], [64, 3, 1, "", "path_of_module"], [64, 3, 1, "", "proxy"], [64, 3, 1, "", "to_bool"], [64, 3, 1, "", "trace"]], "torch.fx.Transformer": [[64, 3, 1, "", "call_function"], [64, 3, 1, "", "call_module"], [64, 3, 1, "", "get_attr"], [64, 3, 1, "", "placeholder"], [64, 3, 1, "", "transform"]], "torch.fx.experimental": [[64, 0, 0, "-", "accelerator_partitioner"], [64, 0, 0, "-", "const_fold"], [64, 0, 0, "-", "debug"], [64, 0, 0, "-", "graph_gradual_typechecker"], [64, 0, 0, "-", "merge_matmul"], [64, 0, 0, "-", "meta_tracer"], [64, 0, 0, "-", "migrate_gradual_types"], [64, 0, 0, "-", "normalize"], [64, 0, 0, "-", "optimization"], [64, 0, 0, "-", "partitioner_utils"], [64, 0, 0, "-", "proxy_tensor"], [64, 0, 0, "-", "recording"], [64, 0, 0, "-", "refinement_types"], [64, 0, 0, "-", "rewriter"], [64, 0, 0, "-", "schema_type_annotation"], [64, 0, 0, "-", "sym_node"], [65, 0, 0, "-", "symbolic_shapes"], [64, 0, 0, "-", "unification"], [64, 0, 0, "-", "unify_refinements"], [64, 0, 0, "-", "validator"]], "torch.fx.experimental.migrate_gradual_types": [[64, 0, 0, "-", "constraint"], [64, 0, 0, "-", "constraint_generator"], [64, 0, 0, "-", "constraint_transformation"], [64, 0, 0, "-", "operation"], [64, 0, 0, "-", "transform_to_z3"], [64, 0, 0, "-", "util"], [64, 0, 0, "-", "z3_types"]], "torch.fx.experimental.symbolic_shapes": [[1178, 1, 1, "", "CallMethodKey"], [1179, 1, 1, "", "ConvertIntKey"], [1180, 1, 1, "", "DimConstraints"], [1181, 1, 1, "", "DimDynamic"], [1182, 1, 1, "", "DivideByKey"], [1183, 1, 1, "", "EqualityConstraint"], [1184, 1, 1, "", "InnerTensorKey"], [1185, 1, 1, "", "PropagateUnbackedSymInts"], [1186, 1, 1, "", "RelaxedUnspecConstraint"], [1187, 1, 1, "", "ShapeEnv"], [1188, 1, 1, "", "ShapeEnvSettings"], [1189, 1, 1, "", "StatefulSymbolicContext"], [1190, 1, 1, "", "StatelessSymbolicContext"], [1191, 1, 1, "", "StrictMinMaxConstraint"], [1192, 1, 1, "", "SubclassSymbolicContext"], [1193, 1, 1, "", "SymbolicContext"], [1194, 5, 1, "", "canonicalize_bool_expr"], [1195, 5, 1, "", "check_consistent"], [1196, 5, 1, "", "compute_unbacked_bindings"], [1197, 5, 1, "", "constrain_range"], [1198, 5, 1, "", "constrain_unify"], [1199, 5, 1, "", "definitely_false"], [1200, 5, 1, "", "definitely_true"], [1201, 5, 1, "", "guard_size_oblivious"], [1202, 5, 1, "", "has_free_symbols"], [1203, 5, 1, "", "hint_int"], [1204, 5, 1, "", "is_concrete_bool"], [1205, 5, 1, "", "is_concrete_int"], [1206, 5, 1, "", "lru_cache"], [1207, 5, 1, "", "parallel_and"], [1208, 5, 1, "", "parallel_or"], [1209, 5, 1, "", "rebind_unbacked"], [1210, 5, 1, "", "resolve_unbacked_bindings"], [1211, 5, 1, "", "statically_known_true"], [1212, 5, 1, "", "sym_eq"]], "torch.fx.experimental.symbolic_shapes.CallMethodKey": [[1178, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.ConvertIntKey": [[1179, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.DimConstraints": [[1180, 3, 1, "", "add"], [1180, 3, 1, "", "add_equality"], [1180, 3, 1, "", "forced_specializations"], [1180, 3, 1, "", "prettify_results"], [1180, 3, 1, "", "remove_redundant_dynamic_results"], [1180, 3, 1, "", "rewrite_with_congruences"], [1180, 3, 1, "", "solve"]], "torch.fx.experimental.symbolic_shapes.DivideByKey": [[1182, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.InnerTensorKey": [[1184, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts": [[1185, 3, 1, "", "boxed_run"], [1185, 3, 1, "", "call_function"], [1185, 3, 1, "", "call_method"], [1185, 3, 1, "", "call_module"], [1185, 3, 1, "", "fetch_args_kwargs_from_env"], [1185, 3, 1, "", "fetch_attr"], [1185, 3, 1, "", "get_attr"], [1185, 3, 1, "", "map_nodes_to_values"], [1185, 3, 1, "", "output"], [1185, 3, 1, "", "placeholder"], [1185, 3, 1, "", "run"], [1185, 3, 1, "", "run_node"]], "torch.fx.experimental.symbolic_shapes.ShapeEnv": [[1187, 3, 1, "", "add_var_to_val"], [1187, 3, 1, "", "bind_symbols"], [1187, 3, 1, "", "bound_sympy"], [1187, 3, 1, "", "check_equal"], [1187, 3, 1, "", "cleanup"], [1187, 3, 1, "", "create_symbol"], [1187, 3, 1, "", "create_symbolic_sizes_strides_storage_offset"], [1187, 3, 1, "", "create_symboolnode"], [1187, 3, 1, "", "create_symfloatnode"], [1187, 3, 1, "", "create_symintnode"], [1187, 3, 1, "", "create_unbacked_symbool"], [1187, 3, 1, "", "create_unbacked_symfloat"], [1187, 3, 1, "", "create_unbacked_symint"], [1187, 3, 1, "", "create_unspecified_symbol"], [1187, 3, 1, "", "create_unspecified_symint_and_symbol"], [1187, 3, 1, "", "defer_runtime_assert"], [1187, 3, 1, "", "evaluate_expr"], [1187, 3, 1, "", "evaluate_guards_expression"], [1187, 3, 1, "", "evaluate_guards_for_args"], [1187, 3, 1, "", "format_guards"], [1187, 3, 1, "", "freeze"], [1187, 3, 1, "", "freeze_runtime_asserts"], [1187, 3, 1, "", "get_axioms"], [1187, 3, 1, "", "get_implications"], [1187, 3, 1, "", "get_nontrivial_guards"], [1187, 3, 1, "", "get_pruned_guards"], [1187, 3, 1, "", "ignore_fresh_unbacked_symbols"], [1187, 3, 1, "", "is_unbacked_symint"], [1187, 3, 1, "", "produce_guards"], [1187, 3, 1, "", "produce_guards_expression"], [1187, 3, 1, "", "replace"], [1187, 3, 1, "", "set_unbacked_var_to_val"], [1187, 3, 1, "", "simplify"], [1187, 3, 1, "", "size_hint"], [1187, 3, 1, "", "suppress_guards"]], "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint": [[1191, 3, 1, "", "render"]], "torch.fx.experimental.unification": [[64, 0, 0, "-", "core"], [64, 0, 0, "-", "dispatch"], [64, 0, 0, "-", "match"], [64, 0, 0, "-", "more"], [64, 0, 0, "-", "multipledispatch"], [64, 0, 0, "-", "unification_tools"], [64, 0, 0, "-", "utils"], [64, 0, 0, "-", "variable"]], "torch.fx.experimental.unification.multipledispatch": [[64, 0, 0, "-", "conflict"], [64, 0, 0, "-", "core"], [64, 0, 0, "-", "dispatcher"], [64, 0, 0, "-", "utils"], [64, 0, 0, "-", "variadic"]], "torch.fx.passes": [[64, 0, 0, "-", "annotate_getitem_nodes"], [64, 0, 0, "-", "backends"], [64, 0, 0, "-", "dialect"], [64, 0, 0, "-", "fake_tensor_prop"], [64, 0, 0, "-", "graph_drawer"], [64, 0, 0, "-", "graph_manipulation"], [64, 0, 0, "-", "graph_transform_observer"], [64, 0, 0, "-", "infra"], [64, 0, 0, "-", "net_min_base"], [64, 0, 0, "-", "operator_support"], [64, 0, 0, "-", "param_fetch"], [64, 0, 0, "-", "pass_manager"], [64, 0, 0, "-", "reinplace"], [64, 0, 0, "-", "runtime_assert"], [64, 0, 0, "-", "shape_prop"], [64, 0, 0, "-", "split_module"], [64, 0, 0, "-", "split_utils"], [64, 0, 0, "-", "splitter_base"], [64, 0, 0, "-", "tests"], [64, 0, 0, "-", "tools_common"], [64, 0, 0, "-", "utils"]], "torch.fx.passes.backends": [[64, 0, 0, "-", "cudagraphs"]], "torch.fx.passes.dialect": [[64, 0, 0, "-", "common"]], "torch.fx.passes.dialect.common": [[64, 0, 0, "-", "cse_pass"]], "torch.fx.passes.infra": [[64, 0, 0, "-", "partitioner"], [64, 0, 0, "-", "pass_base"], [64, 0, 0, "-", "pass_manager"]], "torch.fx.passes.tests": [[64, 0, 0, "-", "test_pass_manager"]], "torch.fx.passes.utils": [[64, 0, 0, "-", "common"], [64, 0, 0, "-", "fuser_utils"], [64, 0, 0, "-", "matcher_utils"], [64, 0, 0, "-", "matcher_with_name_node_map_utils"], [64, 0, 0, "-", "source_matcher_utils"]], "torch.hub": [[2011, 5, 1, "", "download_url_to_file"], [2011, 5, 1, "", "get_dir"], [2011, 5, 1, "", "help"], [2011, 5, 1, "", "list"], [2011, 5, 1, "", "load"], [2011, 5, 1, "", "load_state_dict_from_url"], [2011, 5, 1, "", "set_dir"]], "torch.jit": [[1270, 1, 1, "", "Attribute"], [1271, 1, 1, "", "ScriptFunction"], [1272, 1, 1, "", "ScriptModule"], [1273, 5, 1, "", "annotate"], [2013, 0, 0, "-", "annotations"], [1274, 5, 1, "", "enable_onednn_fusion"], [2013, 5, 1, "", "export"], [1275, 5, 1, "", "fork"], [1276, 5, 1, "", "freeze"], [2013, 0, 0, "-", "frontend"], [2013, 0, 0, "-", "generate_bytecode"], [1277, 5, 1, "", "ignore"], [1278, 5, 1, "", "interface"], [2015, 5, 1, "", "is_scripting"], [2015, 5, 1, "", "is_tracing"], [1279, 5, 1, "", "isinstance"], [1280, 5, 1, "", "load"], [2013, 0, 0, "-", "mobile"], [1281, 5, 1, "", "onednn_fusion_enabled"], [1282, 5, 1, "", "optimize_for_inference"], [2013, 0, 0, "-", "quantized"], [1283, 5, 1, "", "save"], [1284, 5, 1, "", "script"], [1285, 5, 1, "", "script_if_tracing"], [1286, 5, 1, "", "set_fusion_strategy"], [1287, 1, 1, "", "strict_fusion"], [2014, 0, 0, "-", "supported_ops"], [1288, 5, 1, "", "trace"], [1289, 5, 1, "", "trace_module"], [2018, 0, 0, "-", "unsupported_tensor_ops"], [1290, 5, 1, "", "unused"], [1291, 5, 1, "", "wait"]], "torch.jit.Attribute": [[1270, 3, 1, "", "count"], [1270, 3, 1, "", "index"], [1270, 2, 1, "", "type"], [1270, 2, 1, "", "value"]], "torch.jit.ScriptFunction": [[1271, 3, 1, "", "get_debug_state"], [1271, 3, 1, "", "save"], [1271, 3, 1, "", "save_to_buffer"]], "torch.jit.ScriptModule": [[1272, 3, 1, "", "add_module"], [1272, 3, 1, "", "apply"], [1272, 3, 1, "", "bfloat16"], [1272, 3, 1, "", "buffers"], [1272, 3, 1, "", "children"], [1272, 4, 1, "", "code"], [1272, 4, 1, "", "code_with_constants"], [1272, 3, 1, "", "compile"], [1272, 3, 1, "", "cpu"], [1272, 3, 1, "", "cuda"], [1272, 3, 1, "", "double"], [1272, 3, 1, "", "eval"], [1272, 3, 1, "", "extra_repr"], [1272, 3, 1, "", "float"], [1272, 3, 1, "", "get_buffer"], [1272, 3, 1, "", "get_extra_state"], [1272, 3, 1, "", "get_parameter"], [1272, 3, 1, "", "get_submodule"], [1272, 4, 1, "", "graph"], [1272, 3, 1, "", "half"], [1272, 4, 1, "", "inlined_graph"], [1272, 3, 1, "", "ipu"], [1272, 3, 1, "", "load_state_dict"], [1272, 3, 1, "", "modules"], [1272, 3, 1, "", "named_buffers"], [1272, 3, 1, "", "named_children"], [1272, 3, 1, "", "named_modules"], [1272, 3, 1, "", "named_parameters"], [1272, 3, 1, "", "parameters"], [1272, 3, 1, "", "register_backward_hook"], [1272, 3, 1, "", "register_buffer"], [1272, 3, 1, "", "register_forward_hook"], [1272, 3, 1, "", "register_forward_pre_hook"], [1272, 3, 1, "", "register_full_backward_hook"], [1272, 3, 1, "", "register_full_backward_pre_hook"], [1272, 3, 1, "", "register_load_state_dict_post_hook"], [1272, 3, 1, "", "register_module"], [1272, 3, 1, "", "register_parameter"], [1272, 3, 1, "", "register_state_dict_pre_hook"], [1272, 3, 1, "", "requires_grad_"], [1272, 3, 1, "", "save"], [1272, 3, 1, "", "set_extra_state"], [1272, 3, 1, "", "share_memory"], [1272, 3, 1, "", "state_dict"], [1272, 3, 1, "", "to"], [1272, 3, 1, "", "to_empty"], [1272, 3, 1, "", "train"], [1272, 3, 1, "", "type"], [1272, 3, 1, "", "xpu"], [1272, 3, 1, "", "zero_grad"]], "torch.library": [[2020, 1, 1, "", "Library"], [2020, 5, 1, "", "custom_op"], [2020, 5, 1, "", "define"], [2020, 5, 1, "", "fallthrough_kernel"], [2020, 5, 1, "", "get_ctx"], [2020, 5, 1, "", "impl"], [2020, 5, 1, "", "impl_abstract"], [2020, 5, 1, "", "opcheck"], [2020, 5, 1, "", "register_autograd"], [2020, 5, 1, "", "register_fake"], [2020, 5, 1, "", "register_kernel"]], "torch.library.Library": [[2020, 3, 1, "", "define"], [2020, 3, 1, "", "impl"]], "torch.linalg": [[1302, 5, 1, "", "cholesky"], [1303, 5, 1, "", "cholesky_ex"], [1304, 5, 1, "", "cond"], [1305, 5, 1, "", "cross"], [1306, 5, 1, "", "det"], [1307, 5, 1, "", "diagonal"], [1308, 5, 1, "", "eig"], [1309, 5, 1, "", "eigh"], [1310, 5, 1, "", "eigvals"], [1311, 5, 1, "", "eigvalsh"], [1312, 5, 1, "", "householder_product"], [1313, 5, 1, "", "inv"], [1314, 5, 1, "", "inv_ex"], [1315, 5, 1, "", "ldl_factor"], [1316, 5, 1, "", "ldl_factor_ex"], [1317, 5, 1, "", "ldl_solve"], [1318, 5, 1, "", "lstsq"], [1319, 5, 1, "", "lu"], [1320, 5, 1, "", "lu_factor"], [1321, 5, 1, "", "lu_factor_ex"], [1322, 5, 1, "", "lu_solve"], [1323, 5, 1, "", "matmul"], [1324, 5, 1, "", "matrix_exp"], [1325, 5, 1, "", "matrix_norm"], [1326, 5, 1, "", "matrix_power"], [1327, 5, 1, "", "matrix_rank"], [1328, 5, 1, "", "multi_dot"], [1329, 5, 1, "", "norm"], [1330, 5, 1, "", "pinv"], [1331, 5, 1, "", "qr"], [1332, 5, 1, "", "slogdet"], [1333, 5, 1, "", "solve"], [1334, 5, 1, "", "solve_ex"], [1335, 5, 1, "", "solve_triangular"], [1336, 5, 1, "", "svd"], [1337, 5, 1, "", "svdvals"], [1338, 5, 1, "", "tensorinv"], [1339, 5, 1, "", "tensorsolve"], [1340, 5, 1, "", "vander"], [1341, 5, 1, "", "vecdot"], [1342, 5, 1, "", "vector_norm"]], "torch.masked": [[2023, 0, 0, "-", "maskedtensor"]], "torch.masked.maskedtensor": [[2023, 0, 0, "-", "binary"], [2023, 0, 0, "-", "core"], [2023, 0, 0, "-", "creation"], [2023, 0, 0, "-", "passthrough"], [2023, 0, 0, "-", "reductions"], [2023, 0, 0, "-", "unary"]], "torch.monitor": [[2029, 1, 1, "", "Aggregation"], [2029, 1, 1, "", "Event"], [2029, 1, 1, "", "EventHandlerHandle"], [2029, 1, 1, "", "Stat"], [2029, 1, 1, "", "TensorboardEventHandler"], [2029, 1, 1, "", "data_value_t"], [2029, 5, 1, "", "log_event"], [2029, 5, 1, "", "register_event_handler"], [2029, 5, 1, "", "unregister_event_handler"]], "torch.monitor.Aggregation": [[2029, 4, 1, "", "name"]], "torch.monitor.Event": [[2029, 3, 1, "", "__init__"], [2029, 4, 1, "", "data"], [2029, 4, 1, "", "name"], [2029, 4, 1, "", "timestamp"]], "torch.monitor.Stat": [[2029, 3, 1, "", "__init__"], [2029, 3, 1, "", "add"], [2029, 4, 1, "", "count"], [2029, 3, 1, "", "get"], [2029, 4, 1, "", "name"]], "torch.monitor.TensorboardEventHandler": [[2029, 3, 1, "", "__init__"]], "torch.mps": [[1381, 5, 1, "", "current_allocated_memory"], [1382, 5, 1, "", "device_count"], [1383, 5, 1, "", "driver_allocated_memory"], [1384, 5, 1, "", "empty_cache"], [2030, 0, 0, "-", "event"], [1386, 5, 1, "", "get_rng_state"], [1387, 5, 1, "", "manual_seed"], [2030, 0, 0, "-", "profiler"], [1391, 5, 1, "", "seed"], [1392, 5, 1, "", "set_per_process_memory_fraction"], [1393, 5, 1, "", "set_rng_state"], [1394, 5, 1, "", "synchronize"]], "torch.mps.event": [[1385, 1, 1, "", "Event"]], "torch.mps.event.Event": [[1385, 3, 1, "", "elapsed_time"], [1385, 3, 1, "", "query"], [1385, 3, 1, "", "record"], [1385, 3, 1, "", "synchronize"], [1385, 3, 1, "", "wait"]], "torch.mps.profiler": [[1388, 5, 1, "", "profile"], [1389, 5, 1, "", "start"], [1390, 5, 1, "", "stop"]], "torch.mtia": [[1396, 6, 1, "", "DeferredMtiaCallError"], [1397, 1, 1, "", "Event"], [1398, 1, 1, "", "Stream"], [1399, 1, 1, "", "StreamContext"], [1400, 5, 1, "", "current_device"], [1401, 5, 1, "", "current_stream"], [1402, 5, 1, "", "default_stream"], [1403, 1, 1, "", "device"], [1404, 5, 1, "", "device_count"], [1405, 5, 1, "", "init"], [1406, 5, 1, "", "is_available"], [1407, 5, 1, "", "is_initialized"], [1408, 5, 1, "", "set_stream"], [1409, 5, 1, "", "stream"], [1410, 5, 1, "", "synchronize"]], "torch.multiprocessing": [[2032, 1, 1, "", "SpawnContext"], [2032, 5, 1, "", "get_all_sharing_strategies"], [2032, 5, 1, "", "get_sharing_strategy"], [2032, 0, 0, "-", "pool"], [2032, 0, 0, "-", "queue"], [2032, 0, 0, "-", "reductions"], [2032, 5, 1, "", "set_sharing_strategy"], [2032, 0, 0, "-", "spawn"]], "torch.multiprocessing.SpawnContext": [[2032, 3, 1, "", "join"]], "torch.multiprocessing.spawn": [[2032, 5, 1, "", "spawn"]], "torch.nested": [[2035, 5, 1, "", "as_nested_tensor"], [2035, 5, 1, "", "nested_tensor"], [2035, 5, 1, "", "to_padded_tensor"]], "torch.nn": [[1427, 1, 1, "", "AdaptiveAvgPool1d"], [1428, 1, 1, "", "AdaptiveAvgPool2d"], [1429, 1, 1, "", "AdaptiveAvgPool3d"], [1430, 1, 1, "", "AdaptiveLogSoftmaxWithLoss"], [1431, 1, 1, "", "AdaptiveMaxPool1d"], [1432, 1, 1, "", "AdaptiveMaxPool2d"], [1433, 1, 1, "", "AdaptiveMaxPool3d"], [1434, 1, 1, "", "AlphaDropout"], [1435, 1, 1, "", "AvgPool1d"], [1436, 1, 1, "", "AvgPool2d"], [1437, 1, 1, "", "AvgPool3d"], [1438, 1, 1, "", "BCELoss"], [1439, 1, 1, "", "BCEWithLogitsLoss"], [1440, 1, 1, "", "BatchNorm1d"], [1441, 1, 1, "", "BatchNorm2d"], [1442, 1, 1, "", "BatchNorm3d"], [1443, 1, 1, "", "Bilinear"], [1444, 1, 1, "", "CELU"], [1445, 1, 1, "", "CTCLoss"], [1446, 1, 1, "", "ChannelShuffle"], [1447, 1, 1, "", "CircularPad1d"], [1448, 1, 1, "", "CircularPad2d"], [1449, 1, 1, "", "CircularPad3d"], [1450, 1, 1, "", "ConstantPad1d"], [1451, 1, 1, "", "ConstantPad2d"], [1452, 1, 1, "", "ConstantPad3d"], [1453, 1, 1, "", "Conv1d"], [1454, 1, 1, "", "Conv2d"], [1455, 1, 1, "", "Conv3d"], [1456, 1, 1, "", "ConvTranspose1d"], [1457, 1, 1, "", "ConvTranspose2d"], [1458, 1, 1, "", "ConvTranspose3d"], [1459, 1, 1, "", "CosineEmbeddingLoss"], [1460, 1, 1, "", "CosineSimilarity"], [1461, 1, 1, "", "CrossEntropyLoss"], [1462, 1, 1, "", "DataParallel"], [1463, 1, 1, "", "Dropout"], [1464, 1, 1, "", "Dropout1d"], [1465, 1, 1, "", "Dropout2d"], [1466, 1, 1, "", "Dropout3d"], [1467, 1, 1, "", "ELU"], [1468, 1, 1, "", "Embedding"], [1469, 1, 1, "", "EmbeddingBag"], [1470, 1, 1, "", "FeatureAlphaDropout"], [1471, 1, 1, "", "Flatten"], [1472, 1, 1, "", "Fold"], [1473, 1, 1, "", "FractionalMaxPool2d"], [1474, 1, 1, "", "FractionalMaxPool3d"], [1475, 1, 1, "", "GELU"], [1476, 1, 1, "", "GLU"], [1477, 1, 1, "", "GRU"], [1478, 1, 1, "", "GRUCell"], [1479, 1, 1, "", "GaussianNLLLoss"], [1480, 1, 1, "", "GroupNorm"], [1481, 1, 1, "", "Hardshrink"], [1482, 1, 1, "", "Hardsigmoid"], [1483, 1, 1, "", "Hardswish"], [1484, 1, 1, "", "Hardtanh"], [1485, 1, 1, "", "HingeEmbeddingLoss"], [1486, 1, 1, "", "HuberLoss"], [1487, 1, 1, "", "Identity"], [1488, 1, 1, "", "InstanceNorm1d"], [1489, 1, 1, "", "InstanceNorm2d"], [1490, 1, 1, "", "InstanceNorm3d"], [1491, 1, 1, "", "KLDivLoss"], [1492, 1, 1, "", "L1Loss"], [1493, 1, 1, "", "LPPool1d"], [1494, 1, 1, "", "LPPool2d"], [1495, 1, 1, "", "LPPool3d"], [1496, 1, 1, "", "LSTM"], [1497, 1, 1, "", "LSTMCell"], [1498, 1, 1, "", "LayerNorm"], [1499, 1, 1, "", "LazyBatchNorm1d"], [1500, 1, 1, "", "LazyBatchNorm2d"], [1501, 1, 1, "", "LazyBatchNorm3d"], [1502, 1, 1, "", "LazyConv1d"], [1503, 1, 1, "", "LazyConv2d"], [1504, 1, 1, "", "LazyConv3d"], [1505, 1, 1, "", "LazyConvTranspose1d"], [1506, 1, 1, "", "LazyConvTranspose2d"], [1507, 1, 1, "", "LazyConvTranspose3d"], [1508, 1, 1, "", "LazyInstanceNorm1d"], [1509, 1, 1, "", "LazyInstanceNorm2d"], [1510, 1, 1, "", "LazyInstanceNorm3d"], [1511, 1, 1, "", "LazyLinear"], [1512, 1, 1, "", "LeakyReLU"], [1513, 1, 1, "", "Linear"], [1514, 1, 1, "", "LocalResponseNorm"], [1515, 1, 1, "", "LogSigmoid"], [1516, 1, 1, "", "LogSoftmax"], [1517, 1, 1, "", "MSELoss"], [1518, 1, 1, "", "MarginRankingLoss"], [1519, 1, 1, "", "MaxPool1d"], [1520, 1, 1, "", "MaxPool2d"], [1521, 1, 1, "", "MaxPool3d"], [1522, 1, 1, "", "MaxUnpool1d"], [1523, 1, 1, "", "MaxUnpool2d"], [1524, 1, 1, "", "MaxUnpool3d"], [1525, 1, 1, "", "Mish"], [1526, 1, 1, "", "Module"], [1527, 1, 1, "", "ModuleDict"], [1528, 1, 1, "", "ModuleList"], [1529, 1, 1, "", "MultiLabelMarginLoss"], [1530, 1, 1, "", "MultiLabelSoftMarginLoss"], [1531, 1, 1, "", "MultiMarginLoss"], [1532, 1, 1, "", "MultiheadAttention"], [1533, 1, 1, "", "NLLLoss"], [1534, 1, 1, "", "PReLU"], [1535, 1, 1, "", "PairwiseDistance"], [1536, 1, 1, "", "ParameterDict"], [1537, 1, 1, "", "ParameterList"], [1538, 1, 1, "", "PixelShuffle"], [1539, 1, 1, "", "PixelUnshuffle"], [1540, 1, 1, "", "PoissonNLLLoss"], [1541, 1, 1, "", "RMSNorm"], [1542, 1, 1, "", "RNN"], [1543, 1, 1, "", "RNNBase"], [1544, 1, 1, "", "RNNCell"], [1545, 1, 1, "", "RReLU"], [1546, 1, 1, "", "ReLU"], [1547, 1, 1, "", "ReLU6"], [1548, 1, 1, "", "ReflectionPad1d"], [1549, 1, 1, "", "ReflectionPad2d"], [1550, 1, 1, "", "ReflectionPad3d"], [1551, 1, 1, "", "ReplicationPad1d"], [1552, 1, 1, "", "ReplicationPad2d"], [1553, 1, 1, "", "ReplicationPad3d"], [1554, 1, 1, "", "SELU"], [1555, 1, 1, "", "Sequential"], [1556, 1, 1, "", "SiLU"], [1557, 1, 1, "", "Sigmoid"], [1558, 1, 1, "", "SmoothL1Loss"], [1559, 1, 1, "", "SoftMarginLoss"], [1560, 1, 1, "", "Softmax"], [1561, 1, 1, "", "Softmax2d"], [1562, 1, 1, "", "Softmin"], [1563, 1, 1, "", "Softplus"], [1564, 1, 1, "", "Softshrink"], [1565, 1, 1, "", "Softsign"], [1566, 1, 1, "", "SyncBatchNorm"], [1567, 1, 1, "", "Tanh"], [1568, 1, 1, "", "Tanhshrink"], [1569, 1, 1, "", "Threshold"], [1570, 1, 1, "", "Transformer"], [1571, 1, 1, "", "TransformerDecoder"], [1572, 1, 1, "", "TransformerDecoderLayer"], [1573, 1, 1, "", "TransformerEncoder"], [1574, 1, 1, "", "TransformerEncoderLayer"], [1575, 1, 1, "", "TripletMarginLoss"], [1576, 1, 1, "", "TripletMarginWithDistanceLoss"], [1577, 1, 1, "", "Unflatten"], [1578, 1, 1, "", "Unfold"], [1579, 1, 1, "", "Upsample"], [1580, 1, 1, "", "UpsamplingBilinear2d"], [1581, 1, 1, "", "UpsamplingNearest2d"], [1582, 1, 1, "", "ZeroPad1d"], [1583, 1, 1, "", "ZeroPad2d"], [1584, 1, 1, "", "ZeroPad3d"], [2037, 0, 0, "-", "attention"], [2036, 0, 0, "-", "backends"], [2036, 0, 0, "-", "common_types"], [2036, 0, 0, "-", "cpp"], [2036, 0, 0, "-", "functional"], [2036, 0, 0, "-", "grad"], [2036, 0, 0, "-", "init"], [2073, 0, 0, "-", "intrinsic"], [2036, 0, 0, "-", "modules"], [2036, 0, 0, "-", "parallel"], [2036, 0, 0, "-", "parameter"], [2073, 0, 0, "-", "qat"], [2073, 0, 0, "-", "quantizable"], [2073, 0, 0, "-", "quantized"], [2036, 0, 0, "-", "utils"]], "torch.nn.AdaptiveLogSoftmaxWithLoss": [[1430, 3, 1, "", "log_prob"], [1430, 3, 1, "", "predict"]], "torch.nn.Embedding": [[1468, 3, 1, "", "from_pretrained"]], "torch.nn.EmbeddingBag": [[1469, 3, 1, "", "forward"], [1469, 3, 1, "", "from_pretrained"]], "torch.nn.LazyBatchNorm1d": [[1499, 2, 1, "", "cls_to_become"]], "torch.nn.LazyBatchNorm2d": [[1500, 2, 1, "", "cls_to_become"]], "torch.nn.LazyBatchNorm3d": [[1501, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConv1d": [[1502, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConv2d": [[1503, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConv3d": [[1504, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConvTranspose1d": [[1505, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConvTranspose2d": [[1506, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConvTranspose3d": [[1507, 2, 1, "", "cls_to_become"]], "torch.nn.LazyInstanceNorm1d": [[1508, 2, 1, "", "cls_to_become"]], "torch.nn.LazyInstanceNorm2d": [[1509, 2, 1, "", "cls_to_become"]], "torch.nn.LazyInstanceNorm3d": [[1510, 2, 1, "", "cls_to_become"]], "torch.nn.LazyLinear": [[1511, 2, 1, "", "cls_to_become"]], "torch.nn.Module": [[1526, 3, 1, "", "add_module"], [1526, 3, 1, "", "apply"], [1526, 3, 1, "", "bfloat16"], [1526, 3, 1, "", "buffers"], [1526, 3, 1, "", "children"], [1526, 3, 1, "", "compile"], [1526, 3, 1, "", "cpu"], [1526, 3, 1, "", "cuda"], [1526, 3, 1, "", "double"], [1526, 3, 1, "", "eval"], [1526, 3, 1, "", "extra_repr"], [1526, 3, 1, "", "float"], [1526, 3, 1, "", "forward"], [1526, 3, 1, "", "get_buffer"], [1526, 3, 1, "", "get_extra_state"], [1526, 3, 1, "", "get_parameter"], [1526, 3, 1, "", "get_submodule"], [1526, 3, 1, "", "half"], [1526, 3, 1, "", "ipu"], [1526, 3, 1, "", "load_state_dict"], [1526, 3, 1, "", "modules"], [1526, 3, 1, "", "named_buffers"], [1526, 3, 1, "", "named_children"], [1526, 3, 1, "", "named_modules"], [1526, 3, 1, "", "named_parameters"], [1526, 3, 1, "", "parameters"], [1526, 3, 1, "", "register_backward_hook"], [1526, 3, 1, "", "register_buffer"], [1526, 3, 1, "", "register_forward_hook"], [1526, 3, 1, "", "register_forward_pre_hook"], [1526, 3, 1, "", "register_full_backward_hook"], [1526, 3, 1, "", "register_full_backward_pre_hook"], [1526, 3, 1, "", "register_load_state_dict_post_hook"], [1526, 3, 1, "", "register_module"], [1526, 3, 1, "", "register_parameter"], [1526, 3, 1, "", "register_state_dict_pre_hook"], [1526, 3, 1, "", "requires_grad_"], [1526, 3, 1, "", "set_extra_state"], [1526, 3, 1, "", "share_memory"], [1526, 3, 1, "", "state_dict"], [1526, 3, 1, "", "to"], [1526, 3, 1, "", "to_empty"], [1526, 3, 1, "", "train"], [1526, 3, 1, "", "type"], [1526, 3, 1, "", "xpu"], [1526, 3, 1, "", "zero_grad"]], "torch.nn.ModuleDict": [[1527, 3, 1, "", "clear"], [1527, 3, 1, "", "items"], [1527, 3, 1, "", "keys"], [1527, 3, 1, "", "pop"], [1527, 3, 1, "", "update"], [1527, 3, 1, "", "values"]], "torch.nn.ModuleList": [[1528, 3, 1, "", "append"], [1528, 3, 1, "", "extend"], [1528, 3, 1, "", "insert"]], "torch.nn.MultiheadAttention": [[1532, 3, 1, "", "forward"], [1532, 3, 1, "", "merge_masks"]], "torch.nn.ParameterDict": [[1536, 3, 1, "", "clear"], [1536, 3, 1, "", "copy"], [1536, 3, 1, "", "fromkeys"], [1536, 3, 1, "", "get"], [1536, 3, 1, "", "items"], [1536, 3, 1, "", "keys"], [1536, 3, 1, "", "pop"], [1536, 3, 1, "", "popitem"], [1536, 3, 1, "", "setdefault"], [1536, 3, 1, "", "update"], [1536, 3, 1, "", "values"]], "torch.nn.ParameterList": [[1537, 3, 1, "", "append"], [1537, 3, 1, "", "extend"]], "torch.nn.RMSNorm": [[1541, 3, 1, "", "extra_repr"], [1541, 3, 1, "", "forward"], [1541, 3, 1, "", "reset_parameters"]], "torch.nn.RNNBase": [[1543, 3, 1, "", "flatten_parameters"]], "torch.nn.Sequential": [[1555, 3, 1, "", "append"]], "torch.nn.SyncBatchNorm": [[1566, 3, 1, "", "convert_sync_batchnorm"]], "torch.nn.Transformer": [[1570, 3, 1, "", "forward"], [1570, 3, 1, "", "generate_square_subsequent_mask"]], "torch.nn.TransformerDecoder": [[1571, 3, 1, "", "forward"]], "torch.nn.TransformerDecoderLayer": [[1572, 3, 1, "", "forward"]], "torch.nn.TransformerEncoder": [[1573, 3, 1, "", "forward"]], "torch.nn.TransformerEncoderLayer": [[1574, 3, 1, "", "forward"]], "torch.nn.attention": [[1585, 1, 1, "", "SDPBackend"], [2038, 0, 0, "-", "bias"], [1590, 5, 1, "", "sdpa_kernel"]], "torch.nn.attention.SDPBackend": [[1585, 4, 1, "", "name"]], "torch.nn.attention.bias": [[1586, 1, 1, "", "CausalBias"], [1587, 1, 1, "", "CausalVariant"], [1588, 5, 1, "", "causal_lower_right"], [1589, 5, 1, "", "causal_upper_left"]], "torch.nn.backends": [[2036, 0, 0, "-", "thnn"]], "torch.nn.functional": [[1591, 5, 1, "", "adaptive_avg_pool1d"], [1592, 5, 1, "", "adaptive_avg_pool2d"], [1593, 5, 1, "", "adaptive_avg_pool3d"], [1594, 5, 1, "", "adaptive_max_pool1d"], [1595, 5, 1, "", "adaptive_max_pool2d"], [1596, 5, 1, "", "adaptive_max_pool3d"], [1597, 5, 1, "", "affine_grid"], [1598, 5, 1, "", "alpha_dropout"], [1599, 5, 1, "", "avg_pool1d"], [1600, 5, 1, "", "avg_pool2d"], [1601, 5, 1, "", "avg_pool3d"], [1602, 5, 1, "", "batch_norm"], [1603, 5, 1, "", "bilinear"], [1604, 5, 1, "", "binary_cross_entropy"], [1605, 5, 1, "", "binary_cross_entropy_with_logits"], [1606, 5, 1, "", "celu"], [1607, 5, 1, "", "conv1d"], [1608, 5, 1, "", "conv2d"], [1609, 5, 1, "", "conv3d"], [1610, 5, 1, "", "conv_transpose1d"], [1611, 5, 1, "", "conv_transpose2d"], [1612, 5, 1, "", "conv_transpose3d"], [1613, 5, 1, "", "cosine_embedding_loss"], [1614, 5, 1, "", "cosine_similarity"], [1615, 5, 1, "", "cross_entropy"], [1616, 5, 1, "", "ctc_loss"], [1617, 5, 1, "", "dropout"], [1618, 5, 1, "", "dropout1d"], [1619, 5, 1, "", "dropout2d"], [1620, 5, 1, "", "dropout3d"], [1621, 5, 1, "", "elu"], [1622, 5, 1, "", "elu_"], [1623, 5, 1, "", "embedding"], [1624, 5, 1, "", "embedding_bag"], [1625, 5, 1, "", "feature_alpha_dropout"], [1626, 5, 1, "", "fold"], [1627, 5, 1, "", "fractional_max_pool2d"], [1628, 5, 1, "", "fractional_max_pool3d"], [1629, 5, 1, "", "gaussian_nll_loss"], [1630, 5, 1, "", "gelu"], [1631, 5, 1, "", "glu"], [1632, 5, 1, "", "grid_sample"], [1633, 5, 1, "", "group_norm"], [1634, 5, 1, "", "gumbel_softmax"], [1635, 5, 1, "", "hardshrink"], [1636, 5, 1, "", "hardsigmoid"], [1637, 5, 1, "", "hardswish"], [1638, 5, 1, "", "hardtanh"], [1639, 5, 1, "", "hardtanh_"], [1640, 5, 1, "", "hinge_embedding_loss"], [1641, 5, 1, "", "huber_loss"], [1642, 5, 1, "", "instance_norm"], [1643, 5, 1, "", "interpolate"], [1644, 5, 1, "", "kl_div"], [1645, 5, 1, "", "l1_loss"], [1646, 5, 1, "", "layer_norm"], [1647, 5, 1, "", "leaky_relu"], [1648, 5, 1, "", "leaky_relu_"], [1649, 5, 1, "", "linear"], [1650, 5, 1, "", "local_response_norm"], [1651, 5, 1, "", "log_softmax"], [1652, 5, 1, "", "logsigmoid"], [1653, 5, 1, "", "lp_pool1d"], [1654, 5, 1, "", "lp_pool2d"], [1655, 5, 1, "", "lp_pool3d"], [1656, 5, 1, "", "margin_ranking_loss"], [1657, 5, 1, "", "max_pool1d"], [1658, 5, 1, "", "max_pool2d"], [1659, 5, 1, "", "max_pool3d"], [1660, 5, 1, "", "max_unpool1d"], [1661, 5, 1, "", "max_unpool2d"], [1662, 5, 1, "", "max_unpool3d"], [1663, 5, 1, "", "mish"], [1664, 5, 1, "", "mse_loss"], [1665, 5, 1, "", "multi_margin_loss"], [1666, 5, 1, "", "multilabel_margin_loss"], [1667, 5, 1, "", "multilabel_soft_margin_loss"], [1668, 5, 1, "", "nll_loss"], [1669, 5, 1, "", "normalize"], [1670, 5, 1, "", "one_hot"], [1671, 5, 1, "", "pad"], [1672, 5, 1, "", "pairwise_distance"], [1673, 5, 1, "", "pdist"], [1674, 5, 1, "", "pixel_shuffle"], [1675, 5, 1, "", "pixel_unshuffle"], [1676, 5, 1, "", "poisson_nll_loss"], [1677, 5, 1, "", "prelu"], [1678, 5, 1, "", "relu"], [1679, 5, 1, "", "relu6"], [1680, 5, 1, "", "relu_"], [1681, 5, 1, "", "rms_norm"], [1682, 5, 1, "", "rrelu"], [1683, 5, 1, "", "rrelu_"], [1684, 5, 1, "", "scaled_dot_product_attention"], [1685, 5, 1, "", "selu"], [1686, 5, 1, "", "sigmoid"], [1687, 5, 1, "", "silu"], [1688, 5, 1, "", "smooth_l1_loss"], [1689, 5, 1, "", "soft_margin_loss"], [1690, 5, 1, "", "softmax"], [1691, 5, 1, "", "softmin"], [1692, 5, 1, "", "softplus"], [1693, 5, 1, "", "softshrink"], [1694, 5, 1, "", "softsign"], [1695, 5, 1, "", "tanh"], [1696, 5, 1, "", "tanhshrink"], [1697, 5, 1, "", "threshold"], [1698, 5, 1, "", "threshold_"], [1700, 5, 1, "", "triplet_margin_loss"], [1701, 5, 1, "", "triplet_margin_with_distance_loss"], [1702, 5, 1, "", "unfold"], [1703, 5, 1, "", "upsample"], [1704, 5, 1, "", "upsample_bilinear"], [1705, 5, 1, "", "upsample_nearest"]], "torch.nn.init": [[2040, 5, 1, "", "calculate_gain"], [2040, 5, 1, "", "constant_"], [2040, 5, 1, "", "dirac_"], [2040, 5, 1, "", "eye_"], [2040, 5, 1, "", "kaiming_normal_"], [2040, 5, 1, "", "kaiming_uniform_"], [2040, 5, 1, "", "normal_"], [2040, 5, 1, "", "ones_"], [2040, 5, 1, "", "orthogonal_"], [2040, 5, 1, "", "sparse_"], [2040, 5, 1, "", "trunc_normal_"], [2040, 5, 1, "", "uniform_"], [2040, 5, 1, "", "xavier_normal_"], [2040, 5, 1, "", "xavier_uniform_"], [2040, 5, 1, "", "zeros_"]], "torch.nn.intrinsic": [[2073, 0, 0, "-", "modules"], [2073, 0, 0, "-", "qat"], [2073, 0, 0, "-", "quantized"]], "torch.nn.intrinsic.modules": [[2070, 0, 0, "-", "fused"]], "torch.nn.intrinsic.qat": [[2073, 0, 0, "-", "modules"]], "torch.nn.intrinsic.qat.modules": [[2070, 0, 0, "-", "conv_fused"], [2070, 0, 0, "-", "linear_fused"], [2070, 0, 0, "-", "linear_relu"]], "torch.nn.intrinsic.quantized": [[2073, 0, 0, "-", "dynamic"], [2073, 0, 0, "-", "modules"]], "torch.nn.intrinsic.quantized.dynamic": [[2073, 0, 0, "-", "modules"]], "torch.nn.intrinsic.quantized.dynamic.modules": [[2070, 0, 0, "-", "linear_relu"]], "torch.nn.intrinsic.quantized.modules": [[2070, 0, 0, "-", "bn_relu"], [2070, 0, 0, "-", "conv_relu"], [2070, 0, 0, "-", "linear_relu"]], "torch.nn.modules": [[2036, 0, 0, "-", "activation"], [2036, 0, 0, "-", "adaptive"], [2036, 0, 0, "-", "batchnorm"], [2036, 0, 0, "-", "channelshuffle"], [2036, 0, 0, "-", "container"], [2036, 0, 0, "-", "conv"], [2036, 0, 0, "-", "distance"], [2036, 0, 0, "-", "dropout"], [2036, 0, 0, "-", "flatten"], [2036, 0, 0, "-", "fold"], [2036, 0, 0, "-", "instancenorm"], [2036, 0, 0, "-", "lazy"], [2036, 0, 0, "-", "linear"], [2036, 0, 0, "-", "loss"], [2036, 0, 0, "-", "module"], [2036, 0, 0, "-", "normalization"], [2036, 0, 0, "-", "padding"], [2036, 0, 0, "-", "pixelshuffle"], [2036, 0, 0, "-", "pooling"], [2036, 0, 0, "-", "rnn"], [2036, 0, 0, "-", "sparse"], [2036, 0, 0, "-", "transformer"], [2036, 0, 0, "-", "upsampling"], [2036, 0, 0, "-", "utils"]], "torch.nn.modules.lazy": [[1706, 1, 1, "", "LazyModuleMixin"]], "torch.nn.modules.lazy.LazyModuleMixin": [[1706, 3, 1, "", "has_uninitialized_params"], [1706, 3, 1, "", "initialize_parameters"]], "torch.nn.modules.module": [[1707, 5, 1, "", "register_module_backward_hook"], [1708, 5, 1, "", "register_module_buffer_registration_hook"], [1709, 5, 1, "", "register_module_forward_hook"], [1710, 5, 1, "", "register_module_forward_pre_hook"], [1711, 5, 1, "", "register_module_full_backward_hook"], [1712, 5, 1, "", "register_module_full_backward_pre_hook"], [1713, 5, 1, "", "register_module_module_registration_hook"], [1714, 5, 1, "", "register_module_parameter_registration_hook"]], "torch.nn.modules.normalization": [[1715, 1, 1, "", "RMSNorm"]], "torch.nn.modules.normalization.RMSNorm": [[1715, 3, 1, "", "extra_repr"], [1715, 3, 1, "", "forward"], [1715, 3, 1, "", "reset_parameters"]], "torch.nn.parallel": [[1716, 1, 1, "", "DistributedDataParallel"], [2036, 0, 0, "-", "comm"], [1699, 5, 1, "", "data_parallel"], [2036, 0, 0, "-", "distributed"], [2036, 0, 0, "-", "parallel_apply"], [2036, 0, 0, "-", "replicate"], [2036, 0, 0, "-", "scatter_gather"]], "torch.nn.parallel.DistributedDataParallel": [[1716, 3, 1, "", "join"], [1716, 3, 1, "", "join_hook"], [1716, 3, 1, "", "no_sync"], [1716, 3, 1, "", "register_comm_hook"]], "torch.nn.parameter": [[1717, 1, 1, "", "Parameter"], [1718, 1, 1, "", "UninitializedBuffer"], [1719, 1, 1, "", "UninitializedParameter"]], "torch.nn.parameter.UninitializedParameter": [[1719, 2, 1, "", "cls_to_become"]], "torch.nn.qat": [[2073, 0, 0, "-", "dynamic"], [2073, 0, 0, "-", "modules"]], "torch.nn.qat.dynamic": [[2073, 0, 0, "-", "modules"]], "torch.nn.qat.dynamic.modules": [[2070, 0, 0, "-", "linear"]], "torch.nn.qat.modules": [[2070, 0, 0, "-", "conv"], [2070, 0, 0, "-", "embedding_ops"], [2070, 0, 0, "-", "linear"]], "torch.nn.quantizable": [[2073, 0, 0, "-", "modules"]], "torch.nn.quantizable.modules": [[2070, 0, 0, "-", "activation"], [2070, 0, 0, "-", "rnn"]], "torch.nn.quantized": [[2073, 0, 0, "-", "dynamic"], [2070, 0, 0, "-", "functional"], [2073, 0, 0, "-", "modules"]], "torch.nn.quantized.dynamic": [[2073, 0, 0, "-", "modules"]], "torch.nn.quantized.dynamic.modules": [[2070, 0, 0, "-", "conv"], [2070, 0, 0, "-", "linear"], [2070, 0, 0, "-", "rnn"]], "torch.nn.quantized.modules": [[2070, 0, 0, "-", "activation"], [2070, 0, 0, "-", "batchnorm"], [2070, 0, 0, "-", "conv"], [2070, 0, 0, "-", "dropout"], [2070, 0, 0, "-", "embedding_ops"], [2070, 0, 0, "-", "functional_modules"], [2070, 0, 0, "-", "linear"], [2070, 0, 0, "-", "normalization"], [2070, 0, 0, "-", "rnn"], [2070, 0, 0, "-", "utils"]], "torch.nn.utils": [[2036, 0, 0, "-", "clip_grad"], [1720, 5, 1, "", "clip_grad_norm"], [1721, 5, 1, "", "clip_grad_norm_"], [1722, 5, 1, "", "clip_grad_value_"], [1723, 5, 1, "", "convert_conv2d_weight_memory_format"], [1724, 5, 1, "", "convert_conv3d_weight_memory_format"], [2036, 0, 0, "-", "convert_parameters"], [1725, 5, 1, "", "fuse_conv_bn_eval"], [1726, 5, 1, "", "fuse_conv_bn_weights"], [1727, 5, 1, "", "fuse_linear_bn_eval"], [1728, 5, 1, "", "fuse_linear_bn_weights"], [2036, 0, 0, "-", "fusion"], [2036, 0, 0, "-", "init"], [2036, 0, 0, "-", "memory_format"], [1729, 5, 1, "", "parameters_to_vector"], [2036, 0, 0, "-", "parametrizations"], [2036, 0, 0, "-", "parametrize"], [2036, 0, 0, "-", "prune"], [1755, 5, 1, "", "remove_spectral_norm"], [1756, 5, 1, "", "remove_weight_norm"], [2036, 0, 0, "-", "rnn"], [1764, 5, 1, "", "skip_init"], [1765, 5, 1, "", "spectral_norm"], [2036, 0, 0, "-", "stateless"], [1767, 5, 1, "", "vector_to_parameters"], [1768, 5, 1, "", "weight_norm"]], "torch.nn.utils.parametrizations": [[1730, 5, 1, "", "orthogonal"], [1731, 5, 1, "", "spectral_norm"], [1732, 5, 1, "", "weight_norm"]], "torch.nn.utils.parametrize": [[1733, 1, 1, "", "ParametrizationList"], [1734, 5, 1, "", "cached"], [1735, 5, 1, "", "is_parametrized"], [1736, 5, 1, "", "register_parametrization"], [1737, 5, 1, "", "remove_parametrizations"]], "torch.nn.utils.parametrize.ParametrizationList": [[1733, 3, 1, "", "right_inverse"]], "torch.nn.utils.prune": [[1738, 1, 1, "", "BasePruningMethod"], [1739, 1, 1, "", "CustomFromMask"], [1740, 1, 1, "", "Identity"], [1741, 1, 1, "", "L1Unstructured"], [1742, 1, 1, "", "LnStructured"], [1743, 1, 1, "", "PruningContainer"], [1744, 1, 1, "", "RandomStructured"], [1745, 1, 1, "", "RandomUnstructured"], [1746, 5, 1, "", "custom_from_mask"], [1747, 5, 1, "", "global_unstructured"], [1748, 5, 1, "", "identity"], [1749, 5, 1, "", "is_pruned"], [1750, 5, 1, "", "l1_unstructured"], [1751, 5, 1, "", "ln_structured"], [1752, 5, 1, "", "random_structured"], [1753, 5, 1, "", "random_unstructured"], [1754, 5, 1, "", "remove"]], "torch.nn.utils.prune.BasePruningMethod": [[1738, 3, 1, "", "apply"], [1738, 3, 1, "", "apply_mask"], [1738, 3, 1, "", "compute_mask"], [1738, 3, 1, "", "prune"], [1738, 3, 1, "", "remove"]], "torch.nn.utils.prune.CustomFromMask": [[1739, 3, 1, "", "apply"], [1739, 3, 1, "", "apply_mask"], [1739, 3, 1, "", "prune"], [1739, 3, 1, "", "remove"]], "torch.nn.utils.prune.Identity": [[1740, 3, 1, "", "apply"], [1740, 3, 1, "", "apply_mask"], [1740, 3, 1, "", "prune"], [1740, 3, 1, "", "remove"]], "torch.nn.utils.prune.L1Unstructured": [[1741, 3, 1, "", "apply"], [1741, 3, 1, "", "apply_mask"], [1741, 3, 1, "", "prune"], [1741, 3, 1, "", "remove"]], "torch.nn.utils.prune.LnStructured": [[1742, 3, 1, "", "apply"], [1742, 3, 1, "", "apply_mask"], [1742, 3, 1, "", "compute_mask"], [1742, 3, 1, "", "prune"], [1742, 3, 1, "", "remove"]], "torch.nn.utils.prune.PruningContainer": [[1743, 3, 1, "", "add_pruning_method"], [1743, 3, 1, "", "apply"], [1743, 3, 1, "", "apply_mask"], [1743, 3, 1, "", "compute_mask"], [1743, 3, 1, "", "prune"], [1743, 3, 1, "", "remove"]], "torch.nn.utils.prune.RandomStructured": [[1744, 3, 1, "", "apply"], [1744, 3, 1, "", "apply_mask"], [1744, 3, 1, "", "compute_mask"], [1744, 3, 1, "", "prune"], [1744, 3, 1, "", "remove"]], "torch.nn.utils.prune.RandomUnstructured": [[1745, 3, 1, "", "apply"], [1745, 3, 1, "", "apply_mask"], [1745, 3, 1, "", "prune"], [1745, 3, 1, "", "remove"]], "torch.nn.utils.rnn": [[1757, 1, 1, "", "PackedSequence"], [1758, 5, 1, "", "pack_padded_sequence"], [1759, 5, 1, "", "pack_sequence"], [1760, 5, 1, "", "pad_packed_sequence"], [1761, 5, 1, "", "pad_sequence"], [1762, 5, 1, "", "unpack_sequence"], [1763, 5, 1, "", "unpad_sequence"]], "torch.nn.utils.rnn.PackedSequence": [[1757, 2, 1, "", "batch_sizes"], [1757, 3, 1, "", "count"], [1757, 2, 1, "", "data"], [1757, 3, 1, "", "index"], [1757, 4, 1, "", "is_cuda"], [1757, 3, 1, "", "is_pinned"], [1757, 2, 1, "", "sorted_indices"], [1757, 3, 1, "", "to"], [1757, 2, 1, "", "unsorted_indices"]], "torch.nn.utils.stateless": [[1766, 5, 1, "", "functional_call"]], "torch.onnx": [[2063, 1, 1, "", "DiagnosticOptions"], [2063, 1, 1, "", "ExportOptions"], [2063, 1, 1, "", "InvalidExportOptionsError"], [1777, 1, 1, "", "JitScalarType"], [2063, 1, 1, "", "ONNXProgram"], [2063, 1, 1, "", "ONNXProgramSerializer"], [2063, 1, 1, "", "ONNXRuntimeOptions"], [2063, 1, 1, "", "OnnxExporterError"], [2063, 1, 1, "", "OnnxRegistry"], [2065, 5, 1, "", "disable_log"], [2063, 5, 1, "", "dynamo_export"], [2063, 5, 1, "", "enable_fake_mode"], [2065, 5, 1, "", "enable_log"], [2062, 0, 0, "-", "errors"], [2065, 5, 1, "", "export"], [2065, 5, 1, "", "export_to_pretty_string"], [2065, 5, 1, "", "is_in_onnx_export"], [2064, 5, 1, "", "is_onnxrt_backend_supported"], [2062, 0, 0, "-", "operators"], [2065, 5, 1, "", "register_custom_op_symbolic"], [2065, 5, 1, "", "select_model_mode_for_export"], [2062, 0, 0, "-", "symbolic_caffe2"], [2062, 0, 0, "-", "symbolic_helper"], [2062, 0, 0, "-", "symbolic_opset10"], [2062, 0, 0, "-", "symbolic_opset11"], [2062, 0, 0, "-", "symbolic_opset12"], [2062, 0, 0, "-", "symbolic_opset13"], [2062, 0, 0, "-", "symbolic_opset14"], [2062, 0, 0, "-", "symbolic_opset15"], [2062, 0, 0, "-", "symbolic_opset16"], [2062, 0, 0, "-", "symbolic_opset17"], [2062, 0, 0, "-", "symbolic_opset18"], [2062, 0, 0, "-", "symbolic_opset19"], [2062, 0, 0, "-", "symbolic_opset20"], [2062, 0, 0, "-", "symbolic_opset7"], [2062, 0, 0, "-", "symbolic_opset8"], [2062, 0, 0, "-", "symbolic_opset9"], [2065, 5, 1, "", "unregister_custom_op_symbolic"], [2062, 0, 0, "-", "utils"], [2062, 0, 0, "-", "verification"]], "torch.onnx.JitScalarType": [[1777, 3, 1, "", "dtype"], [1777, 3, 1, "", "from_dtype"], [1777, 3, 1, "", "from_onnx_type"], [1777, 3, 1, "", "from_value"], [1777, 3, 1, "", "onnx_compatible"], [1777, 3, 1, "", "onnx_type"], [1777, 3, 1, "", "scalar_name"], [1777, 3, 1, "", "torch_name"]], "torch.onnx.ONNXProgram": [[2063, 3, 1, "", "adapt_torch_inputs_to_onnx"], [2063, 3, 1, "", "adapt_torch_outputs_to_onnx"], [2063, 4, 1, "", "diagnostic_context"], [2063, 4, 1, "", "fake_context"], [2063, 4, 1, "", "model_proto"], [2063, 4, 1, "", "model_signature"], [2063, 3, 1, "", "save"], [2063, 3, 1, "", "save_diagnostics"]], "torch.onnx.ONNXProgramSerializer": [[2063, 3, 1, "", "serialize"]], "torch.onnx.OnnxRegistry": [[2063, 3, 1, "", "get_op_functions"], [2063, 3, 1, "", "is_registered_op"], [2063, 4, 1, "", "opset_version"], [2063, 3, 1, "", "register_op"]], "torch.onnx.verification": [[1778, 1, 1, "", "GraphInfo"], [1779, 1, 1, "", "VerificationOptions"], [2065, 5, 1, "", "find_mismatch"]], "torch.onnx.verification.GraphInfo": [[1778, 3, 1, "", "all_mismatch_leaf_graph_info"], [1778, 3, 1, "", "clear"], [1778, 3, 1, "", "essential_node_count"], [1778, 3, 1, "", "essential_node_kinds"], [1778, 3, 1, "", "export_repro"], [1778, 3, 1, "", "find_mismatch"], [1778, 3, 1, "", "find_partition"], [1778, 3, 1, "", "has_mismatch"], [1778, 3, 1, "", "pretty_print_mismatch"], [1778, 3, 1, "", "pretty_print_tree"], [1778, 3, 1, "", "verify_export"]], "torch.optim": [[1780, 1, 1, "", "ASGD"], [1781, 1, 1, "", "Adadelta"], [1782, 1, 1, "", "Adagrad"], [1783, 1, 1, "", "Adam"], [1784, 1, 1, "", "AdamW"], [1785, 1, 1, "", "Adamax"], [1786, 1, 1, "", "LBFGS"], [1787, 1, 1, "", "NAdam"], [2067, 1, 1, "", "Optimizer"], [1793, 1, 1, "", "RAdam"], [1794, 1, 1, "", "RMSprop"], [1795, 1, 1, "", "Rprop"], [1796, 1, 1, "", "SGD"], [1797, 1, 1, "", "SparseAdam"], [2067, 0, 0, "-", "adadelta"], [2067, 0, 0, "-", "adagrad"], [2067, 0, 0, "-", "adam"], [2067, 0, 0, "-", "adamax"], [2067, 0, 0, "-", "adamw"], [2067, 0, 0, "-", "asgd"], [2067, 0, 0, "-", "lbfgs"], [2067, 0, 0, "-", "lr_scheduler"], [2067, 0, 0, "-", "nadam"], [2067, 0, 0, "-", "optimizer"], [2067, 0, 0, "-", "radam"], [2067, 0, 0, "-", "rmsprop"], [2067, 0, 0, "-", "rprop"], [2067, 0, 0, "-", "sgd"], [2067, 0, 0, "-", "sparse_adam"], [2067, 0, 0, "-", "swa_utils"]], "torch.optim.ASGD": [[1780, 3, 1, "", "add_param_group"], [1780, 3, 1, "", "load_state_dict"], [1780, 3, 1, "", "register_load_state_dict_post_hook"], [1780, 3, 1, "", "register_load_state_dict_pre_hook"], [1780, 3, 1, "", "register_state_dict_post_hook"], [1780, 3, 1, "", "register_state_dict_pre_hook"], [1780, 3, 1, "", "register_step_post_hook"], [1780, 3, 1, "", "register_step_pre_hook"], [1780, 3, 1, "", "state_dict"], [1780, 3, 1, "", "step"], [1780, 3, 1, "", "zero_grad"]], "torch.optim.Adadelta": [[1781, 3, 1, "", "add_param_group"], [1781, 3, 1, "", "load_state_dict"], [1781, 3, 1, "", "register_load_state_dict_post_hook"], [1781, 3, 1, "", "register_load_state_dict_pre_hook"], [1781, 3, 1, "", "register_state_dict_post_hook"], [1781, 3, 1, "", "register_state_dict_pre_hook"], [1781, 3, 1, "", "register_step_post_hook"], [1781, 3, 1, "", "register_step_pre_hook"], [1781, 3, 1, "", "state_dict"], [1781, 3, 1, "", "step"], [1781, 3, 1, "", "zero_grad"]], "torch.optim.Adagrad": [[1782, 3, 1, "", "add_param_group"], [1782, 3, 1, "", "load_state_dict"], [1782, 3, 1, "", "register_load_state_dict_post_hook"], [1782, 3, 1, "", "register_load_state_dict_pre_hook"], [1782, 3, 1, "", "register_state_dict_post_hook"], [1782, 3, 1, "", "register_state_dict_pre_hook"], [1782, 3, 1, "", "register_step_post_hook"], [1782, 3, 1, "", "register_step_pre_hook"], [1782, 3, 1, "", "state_dict"], [1782, 3, 1, "", "step"], [1782, 3, 1, "", "zero_grad"]], "torch.optim.Adam": [[1783, 3, 1, "", "add_param_group"], [1783, 3, 1, "", "load_state_dict"], [1783, 3, 1, "", "register_load_state_dict_post_hook"], [1783, 3, 1, "", "register_load_state_dict_pre_hook"], [1783, 3, 1, "", "register_state_dict_post_hook"], [1783, 3, 1, "", "register_state_dict_pre_hook"], [1783, 3, 1, "", "register_step_post_hook"], [1783, 3, 1, "", "register_step_pre_hook"], [1783, 3, 1, "", "state_dict"], [1783, 3, 1, "", "step"], [1783, 3, 1, "", "zero_grad"]], "torch.optim.AdamW": [[1784, 3, 1, "", "add_param_group"], [1784, 3, 1, "", "load_state_dict"], [1784, 3, 1, "", "register_load_state_dict_post_hook"], [1784, 3, 1, "", "register_load_state_dict_pre_hook"], [1784, 3, 1, "", "register_state_dict_post_hook"], [1784, 3, 1, "", "register_state_dict_pre_hook"], [1784, 3, 1, "", "register_step_post_hook"], [1784, 3, 1, "", "register_step_pre_hook"], [1784, 3, 1, "", "state_dict"], [1784, 3, 1, "", "step"], [1784, 3, 1, "", "zero_grad"]], "torch.optim.Adamax": [[1785, 3, 1, "", "add_param_group"], [1785, 3, 1, "", "load_state_dict"], [1785, 3, 1, "", "register_load_state_dict_post_hook"], [1785, 3, 1, "", "register_load_state_dict_pre_hook"], [1785, 3, 1, "", "register_state_dict_post_hook"], [1785, 3, 1, "", "register_state_dict_pre_hook"], [1785, 3, 1, "", "register_step_post_hook"], [1785, 3, 1, "", "register_step_pre_hook"], [1785, 3, 1, "", "state_dict"], [1785, 3, 1, "", "step"], [1785, 3, 1, "", "zero_grad"]], "torch.optim.LBFGS": [[1786, 3, 1, "", "add_param_group"], [1786, 3, 1, "", "load_state_dict"], [1786, 3, 1, "", "register_load_state_dict_post_hook"], [1786, 3, 1, "", "register_load_state_dict_pre_hook"], [1786, 3, 1, "", "register_state_dict_post_hook"], [1786, 3, 1, "", "register_state_dict_pre_hook"], [1786, 3, 1, "", "register_step_post_hook"], [1786, 3, 1, "", "register_step_pre_hook"], [1786, 3, 1, "", "state_dict"], [1786, 3, 1, "", "step"], [1786, 3, 1, "", "zero_grad"]], "torch.optim.NAdam": [[1787, 3, 1, "", "add_param_group"], [1787, 3, 1, "", "load_state_dict"], [1787, 3, 1, "", "register_load_state_dict_post_hook"], [1787, 3, 1, "", "register_load_state_dict_pre_hook"], [1787, 3, 1, "", "register_state_dict_post_hook"], [1787, 3, 1, "", "register_state_dict_pre_hook"], [1787, 3, 1, "", "register_step_post_hook"], [1787, 3, 1, "", "register_step_pre_hook"], [1787, 3, 1, "", "state_dict"], [1787, 3, 1, "", "step"], [1787, 3, 1, "", "zero_grad"]], "torch.optim.Optimizer": [[1788, 3, 1, "", "add_param_group"], [1789, 3, 1, "", "load_state_dict"], [1790, 3, 1, "", "state_dict"], [1791, 3, 1, "", "step"], [1792, 3, 1, "", "zero_grad"]], "torch.optim.RAdam": [[1793, 3, 1, "", "add_param_group"], [1793, 3, 1, "", "load_state_dict"], [1793, 3, 1, "", "register_load_state_dict_post_hook"], [1793, 3, 1, "", "register_load_state_dict_pre_hook"], [1793, 3, 1, "", "register_state_dict_post_hook"], [1793, 3, 1, "", "register_state_dict_pre_hook"], [1793, 3, 1, "", "register_step_post_hook"], [1793, 3, 1, "", "register_step_pre_hook"], [1793, 3, 1, "", "state_dict"], [1793, 3, 1, "", "step"], [1793, 3, 1, "", "zero_grad"]], "torch.optim.RMSprop": [[1794, 3, 1, "", "add_param_group"], [1794, 3, 1, "", "load_state_dict"], [1794, 3, 1, "", "register_load_state_dict_post_hook"], [1794, 3, 1, "", "register_load_state_dict_pre_hook"], [1794, 3, 1, "", "register_state_dict_post_hook"], [1794, 3, 1, "", "register_state_dict_pre_hook"], [1794, 3, 1, "", "register_step_post_hook"], [1794, 3, 1, "", "register_step_pre_hook"], [1794, 3, 1, "", "state_dict"], [1794, 3, 1, "", "step"], [1794, 3, 1, "", "zero_grad"]], "torch.optim.Rprop": [[1795, 3, 1, "", "add_param_group"], [1795, 3, 1, "", "load_state_dict"], [1795, 3, 1, "", "register_load_state_dict_post_hook"], [1795, 3, 1, "", "register_load_state_dict_pre_hook"], [1795, 3, 1, "", "register_state_dict_post_hook"], [1795, 3, 1, "", "register_state_dict_pre_hook"], [1795, 3, 1, "", "register_step_post_hook"], [1795, 3, 1, "", "register_step_pre_hook"], [1795, 3, 1, "", "state_dict"], [1795, 3, 1, "", "step"], [1795, 3, 1, "", "zero_grad"]], "torch.optim.SGD": [[1796, 3, 1, "", "add_param_group"], [1796, 3, 1, "", "load_state_dict"], [1796, 3, 1, "", "register_load_state_dict_post_hook"], [1796, 3, 1, "", "register_load_state_dict_pre_hook"], [1796, 3, 1, "", "register_state_dict_post_hook"], [1796, 3, 1, "", "register_state_dict_pre_hook"], [1796, 3, 1, "", "register_step_post_hook"], [1796, 3, 1, "", "register_step_pre_hook"], [1796, 3, 1, "", "state_dict"], [1796, 3, 1, "", "step"], [1796, 3, 1, "", "zero_grad"]], "torch.optim.SparseAdam": [[1797, 3, 1, "", "add_param_group"], [1797, 3, 1, "", "load_state_dict"], [1797, 3, 1, "", "register_load_state_dict_post_hook"], [1797, 3, 1, "", "register_load_state_dict_pre_hook"], [1797, 3, 1, "", "register_state_dict_post_hook"], [1797, 3, 1, "", "register_state_dict_pre_hook"], [1797, 3, 1, "", "register_step_post_hook"], [1797, 3, 1, "", "register_step_pre_hook"], [1797, 3, 1, "", "state_dict"], [1797, 3, 1, "", "step"], [1797, 3, 1, "", "zero_grad"]], "torch.optim.lr_scheduler": [[1798, 1, 1, "", "ChainedScheduler"], [1799, 1, 1, "", "ConstantLR"], [1800, 1, 1, "", "CosineAnnealingLR"], [1801, 1, 1, "", "CosineAnnealingWarmRestarts"], [1802, 1, 1, "", "CyclicLR"], [1803, 1, 1, "", "ExponentialLR"], [1804, 1, 1, "", "LambdaLR"], [1805, 1, 1, "", "LinearLR"], [1806, 1, 1, "", "MultiStepLR"], [1807, 1, 1, "", "MultiplicativeLR"], [1808, 1, 1, "", "OneCycleLR"], [1809, 1, 1, "", "PolynomialLR"], [1810, 1, 1, "", "ReduceLROnPlateau"], [1811, 1, 1, "", "SequentialLR"], [1812, 1, 1, "", "StepLR"]], "torch.optim.lr_scheduler.ChainedScheduler": [[1798, 3, 1, "", "get_last_lr"], [1798, 3, 1, "", "load_state_dict"], [1798, 3, 1, "", "print_lr"], [1798, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.ConstantLR": [[1799, 3, 1, "", "get_last_lr"], [1799, 3, 1, "", "load_state_dict"], [1799, 3, 1, "", "print_lr"], [1799, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.CosineAnnealingLR": [[1800, 3, 1, "", "get_last_lr"], [1800, 3, 1, "", "load_state_dict"], [1800, 3, 1, "", "print_lr"], [1800, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts": [[1801, 3, 1, "", "get_last_lr"], [1801, 3, 1, "", "load_state_dict"], [1801, 3, 1, "", "print_lr"], [1801, 3, 1, "", "state_dict"], [1801, 3, 1, "", "step"]], "torch.optim.lr_scheduler.CyclicLR": [[1802, 3, 1, "", "get_last_lr"], [1802, 3, 1, "", "get_lr"], [1802, 3, 1, "", "print_lr"]], "torch.optim.lr_scheduler.ExponentialLR": [[1803, 3, 1, "", "get_last_lr"], [1803, 3, 1, "", "load_state_dict"], [1803, 3, 1, "", "print_lr"], [1803, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.LambdaLR": [[1804, 3, 1, "", "get_last_lr"], [1804, 3, 1, "", "load_state_dict"], [1804, 3, 1, "", "print_lr"], [1804, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.LinearLR": [[1805, 3, 1, "", "get_last_lr"], [1805, 3, 1, "", "load_state_dict"], [1805, 3, 1, "", "print_lr"], [1805, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.MultiStepLR": [[1806, 3, 1, "", "get_last_lr"], [1806, 3, 1, "", "load_state_dict"], [1806, 3, 1, "", "print_lr"], [1806, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.MultiplicativeLR": [[1807, 3, 1, "", "get_last_lr"], [1807, 3, 1, "", "load_state_dict"], [1807, 3, 1, "", "print_lr"], [1807, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.OneCycleLR": [[1808, 3, 1, "", "get_last_lr"], [1808, 3, 1, "", "load_state_dict"], [1808, 3, 1, "", "print_lr"], [1808, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.PolynomialLR": [[1809, 3, 1, "", "get_last_lr"], [1809, 3, 1, "", "load_state_dict"], [1809, 3, 1, "", "print_lr"], [1809, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.ReduceLROnPlateau": [[1810, 3, 1, "", "get_last_lr"], [1810, 3, 1, "", "print_lr"]], "torch.optim.lr_scheduler.SequentialLR": [[1811, 3, 1, "", "get_last_lr"], [1811, 3, 1, "", "load_state_dict"], [1811, 3, 1, "", "print_lr"], [1811, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.StepLR": [[1812, 3, 1, "", "get_last_lr"], [1812, 3, 1, "", "load_state_dict"], [1812, 3, 1, "", "print_lr"], [1812, 3, 1, "", "state_dict"]], "torch.overrides": [[2112, 5, 1, "", "get_ignored_functions"], [2112, 5, 1, "", "get_overridable_functions"], [2112, 5, 1, "", "get_testing_overrides"], [2112, 5, 1, "", "handle_torch_function"], [2112, 5, 1, "", "has_torch_function"], [2112, 5, 1, "", "is_tensor_like"], [2112, 5, 1, "", "is_tensor_method_or_property"], [2112, 5, 1, "", "resolve_name"], [2112, 5, 1, "", "wrap_torch_function"]], "torch.package": [[2068, 1, 1, "", "Directory"], [2068, 1, 1, "", "EmptyMatchError"], [2068, 1, 1, "", "PackageExporter"], [2068, 1, 1, "", "PackageImporter"], [2068, 1, 1, "", "PackagingError"], [2068, 0, 0, "-", "analyze"], [2068, 0, 0, "-", "file_structure_representation"], [2068, 0, 0, "-", "find_file_dependencies"], [2068, 0, 0, "-", "glob_group"], [2068, 0, 0, "-", "importer"], [2068, 0, 0, "-", "package_exporter"], [2068, 0, 0, "-", "package_importer"]], "torch.package.Directory": [[2068, 3, 1, "", "has_file"]], "torch.package.PackageExporter": [[2068, 3, 1, "", "__init__"], [2068, 3, 1, "", "add_dependency"], [2068, 3, 1, "", "all_paths"], [2068, 3, 1, "", "close"], [2068, 3, 1, "", "denied_modules"], [2068, 3, 1, "", "deny"], [2068, 3, 1, "", "dependency_graph_string"], [2068, 3, 1, "", "extern"], [2068, 3, 1, "", "externed_modules"], [2068, 3, 1, "", "get_rdeps"], [2068, 3, 1, "", "get_unique_id"], [2068, 3, 1, "", "intern"], [2068, 3, 1, "", "interned_modules"], [2068, 3, 1, "", "mock"], [2068, 3, 1, "", "mocked_modules"], [2068, 3, 1, "", "register_extern_hook"], [2068, 3, 1, "", "register_intern_hook"], [2068, 3, 1, "", "register_mock_hook"], [2068, 3, 1, "", "save_binary"], [2068, 3, 1, "", "save_module"], [2068, 3, 1, "", "save_pickle"], [2068, 3, 1, "", "save_source_file"], [2068, 3, 1, "", "save_source_string"], [2068, 3, 1, "", "save_text"]], "torch.package.PackageImporter": [[2068, 3, 1, "", "__init__"], [2068, 3, 1, "", "file_structure"], [2068, 3, 1, "", "id"], [2068, 3, 1, "", "import_module"], [2068, 3, 1, "", "load_binary"], [2068, 3, 1, "", "load_pickle"], [2068, 3, 1, "", "load_text"], [2068, 3, 1, "", "python_version"]], "torch.package.analyze": [[2068, 0, 0, "-", "find_first_use_of_broken_modules"], [2068, 0, 0, "-", "is_from_package"], [2068, 0, 0, "-", "trace_dependencies"]], "torch.profiler": [[2069, 1, 1, "", "ProfilerAction"], [2069, 1, 1, "", "ProfilerActivity"], [2069, 1, 1, "", "_KinetoProfile"], [2069, 0, 0, "-", "itt"], [2069, 1, 1, "", "profile"], [2069, 0, 0, "-", "profiler"], [2069, 0, 0, "-", "python_tracer"], [2069, 5, 1, "", "schedule"], [2069, 5, 1, "", "tensorboard_trace_handler"]], "torch.profiler.ProfilerActivity": [[2069, 4, 1, "", "name"]], "torch.profiler._KinetoProfile": [[2069, 3, 1, "", "add_metadata"], [2069, 3, 1, "", "add_metadata_json"], [2069, 3, 1, "", "events"], [2069, 3, 1, "", "export_chrome_trace"], [2069, 3, 1, "", "export_memory_timeline"], [2069, 3, 1, "", "export_stacks"], [2069, 3, 1, "", "key_averages"], [2069, 3, 1, "", "preset_metadata_json"]], "torch.profiler.itt": [[2069, 5, 1, "", "is_available"], [2069, 5, 1, "", "mark"], [2069, 5, 1, "", "range_pop"], [2069, 5, 1, "", "range_push"]], "torch.profiler.profile": [[2069, 3, 1, "", "step"]], "torch.quantization": [[2070, 0, 0, "-", "fake_quantize"], [2070, 0, 0, "-", "fuse_modules"], [2070, 0, 0, "-", "fuser_method_mappings"], [2073, 0, 0, "-", "fx"], [2070, 0, 0, "-", "observer"], [2070, 0, 0, "-", "qconfig"], [2070, 0, 0, "-", "quant_type"], [2070, 0, 0, "-", "quantization_mappings"], [2070, 0, 0, "-", "quantize"], [2070, 0, 0, "-", "quantize_fx"], [2070, 0, 0, "-", "quantize_jit"], [2070, 0, 0, "-", "stubs"], [2070, 0, 0, "-", "utils"]], "torch.quantization.fx": [[2070, 0, 0, "-", "convert"], [2070, 0, 0, "-", "fuse"], [2070, 0, 0, "-", "fusion_patterns"], [2070, 0, 0, "-", "graph_module"], [2070, 0, 0, "-", "match_utils"], [2070, 0, 0, "-", "pattern_utils"], [2070, 0, 0, "-", "prepare"], [2070, 0, 0, "-", "quantization_patterns"], [2070, 0, 0, "-", "quantization_types"], [2070, 0, 0, "-", "utils"]], "torch.quasirandom": [[1833, 1, 1, "", "SobolEngine"]], "torch.quasirandom.SobolEngine": [[1833, 3, 1, "", "draw"], [1833, 3, 1, "", "draw_base2"], [1833, 3, 1, "", "fast_forward"], [1833, 3, 1, "", "reset"]], "torch.random": [[2074, 5, 1, "", "fork_rng"], [2074, 5, 1, "", "get_rng_state"], [2074, 5, 1, "", "initial_seed"], [2074, 5, 1, "", "manual_seed"], [2074, 5, 1, "", "seed"], [2074, 5, 1, "", "set_rng_state"]], "torch.serialization": [[2060, 5, 1, "", "add_safe_globals"], [2060, 5, 1, "", "clear_safe_globals"], [2060, 5, 1, "", "get_default_load_endianness"], [2060, 5, 1, "", "get_default_mmap_options"], [2060, 5, 1, "", "get_safe_globals"], [2060, 5, 1, "", "register_package"], [2060, 5, 1, "", "set_default_load_endianness"], [2060, 5, 1, "", "set_default_mmap_options"]], "torch.signal": [[2078, 0, 0, "-", "windows"]], "torch.signal.windows": [[1880, 5, 1, "", "bartlett"], [1881, 5, 1, "", "blackman"], [1882, 5, 1, "", "cosine"], [1883, 5, 1, "", "exponential"], [1884, 5, 1, "", "gaussian"], [1885, 5, 1, "", "general_cosine"], [1886, 5, 1, "", "general_hamming"], [1887, 5, 1, "", "hamming"], [1888, 5, 1, "", "hann"], [1889, 5, 1, "", "kaiser"], [1890, 5, 1, "", "nuttall"], [2089, 0, 0, "-", "windows"]], "torch.sparse": [[1900, 5, 1, "", "addmm"], [1901, 5, 1, "", "as_sparse_gradcheck"], [1902, 1, 1, "", "check_sparse_tensor_invariants"], [1903, 5, 1, "", "log_softmax"], [1904, 5, 1, "", "mm"], [1905, 5, 1, "", "sampled_addmm"], [2089, 0, 0, "-", "semi_structured"], [1906, 5, 1, "", "softmax"], [1907, 5, 1, "", "spdiags"], [1908, 5, 1, "", "sum"]], "torch.sparse.check_sparse_tensor_invariants": [[1902, 3, 1, "", "disable"], [1902, 3, 1, "", "enable"], [1902, 3, 1, "", "is_enabled"]], "torch.special": [[2081, 5, 1, "", "airy_ai"], [2081, 5, 1, "", "bessel_j0"], [2081, 5, 1, "", "bessel_j1"], [2081, 5, 1, "", "digamma"], [2081, 5, 1, "", "entr"], [2081, 5, 1, "", "erf"], [2081, 5, 1, "", "erfc"], [2081, 5, 1, "", "erfcx"], [2081, 5, 1, "", "erfinv"], [2081, 5, 1, "", "exp2"], [2081, 5, 1, "", "expit"], [2081, 5, 1, "", "expm1"], [2081, 5, 1, "", "gammainc"], [2081, 5, 1, "", "gammaincc"], [2081, 5, 1, "", "gammaln"], [2081, 5, 1, "", "i0"], [2081, 5, 1, "", "i0e"], [2081, 5, 1, "", "i1"], [2081, 5, 1, "", "i1e"], [2081, 5, 1, "", "log1p"], [2081, 5, 1, "", "log_ndtr"], [2081, 5, 1, "", "log_softmax"], [2081, 5, 1, "", "logit"], [2081, 5, 1, "", "logsumexp"], [2081, 5, 1, "", "multigammaln"], [2081, 5, 1, "", "ndtr"], [2081, 5, 1, "", "ndtri"], [2081, 5, 1, "", "polygamma"], [2081, 5, 1, "", "psi"], [2081, 5, 1, "", "round"], [2081, 5, 1, "", "scaled_modified_bessel_k0"], [2081, 5, 1, "", "scaled_modified_bessel_k1"], [2081, 5, 1, "", "sinc"], [2081, 5, 1, "", "softmax"], [2081, 5, 1, "", "spherical_bessel_j0"], [2081, 5, 1, "", "xlog1py"], [2081, 5, 1, "", "xlogy"], [2081, 5, 1, "", "zeta"]], "torch.testing": [[2087, 5, 1, "", "assert_allclose"], [2087, 5, 1, "", "assert_close"], [2087, 5, 1, "", "make_tensor"]], "torch.torch": [[2089, 2, 1, "", "default_generator"], [2116, 1, 1, "", "finfo"], [2116, 1, 1, "", "iinfo"]], "torch.utils": [[2089, 0, 0, "-", "backcompat"], [2117, 0, 0, "-", "backend_registration"], [3, 0, 0, "-", "benchmark"], [4, 0, 0, "-", "bottleneck"], [2117, 0, 0, "-", "bundled_inputs"], [2117, 0, 0, "-", "checkpoint"], [2117, 0, 0, "-", "collect_env"], [2117, 0, 0, "-", "cpp_backtrace"], [2117, 0, 0, "-", "cpp_extension"], [23, 0, 0, "-", "data"], [27, 0, 0, "-", "deterministic"], [2117, 0, 0, "-", "dlpack"], [2117, 0, 0, "-", "file_baton"], [2117, 0, 0, "-", "flop_counter"], [1965, 5, 1, "", "generate_methods_for_privateuse1_backend"], [1966, 5, 1, "", "get_cpp_backtrace"], [2089, 0, 0, "-", "hipify"], [2117, 0, 0, "-", "hooks"], [2019, 0, 0, "-", "jit"], [2117, 0, 0, "-", "mkldnn"], [2117, 0, 0, "-", "mobile_optimizer"], [2089, 0, 0, "-", "model_dump"], [2027, 0, 0, "-", "model_zoo"], [2028, 0, 0, "-", "module_tracker"], [1967, 5, 1, "", "rename_privateuse1_backend"], [1968, 5, 1, "", "set_module"], [2117, 0, 0, "-", "show_pickle"], [1969, 5, 1, "", "swap_tensors"], [2085, 0, 0, "-", "tensorboard"], [2117, 0, 0, "-", "throughput_benchmark"], [2089, 0, 0, "-", "viz"], [2117, 0, 0, "-", "weak"]], "torch.utils.benchmark": [[3, 1, 1, "", "CallgrindStats"], [3, 1, 1, "", "Compare"], [3, 1, 1, "", "FunctionCounts"], [3, 1, 1, "", "Measurement"], [3, 1, 1, "", "Timer"], [3, 0, 0, "-", "examples"], [3, 0, 0, "-", "op_fuzzers"], [3, 0, 0, "-", "utils"]], "torch.utils.benchmark.CallgrindStats": [[3, 3, 1, "", "as_standardized"], [3, 3, 1, "", "counts"], [3, 3, 1, "", "delta"], [3, 3, 1, "", "stats"]], "torch.utils.benchmark.Compare": [[3, 3, 1, "", "colorize"], [3, 3, 1, "", "extend_results"], [3, 3, 1, "", "highlight_warnings"], [3, 3, 1, "", "print"], [3, 3, 1, "", "trim_significant_figures"]], "torch.utils.benchmark.FunctionCounts": [[3, 3, 1, "", "denoise"], [3, 3, 1, "", "filter"], [3, 3, 1, "", "transform"]], "torch.utils.benchmark.Measurement": [[3, 3, 1, "", "merge"], [3, 4, 1, "", "significant_figures"]], "torch.utils.benchmark.Timer": [[3, 3, 1, "", "adaptive_autorange"], [3, 3, 1, "", "blocked_autorange"], [3, 3, 1, "", "collect_callgrind"], [3, 3, 1, "", "timeit"]], "torch.utils.benchmark.examples": [[2117, 0, 0, "-", "blas_compare_setup"], [2117, 0, 0, "-", "compare"], [2117, 0, 0, "-", "fuzzer"], [2117, 0, 0, "-", "op_benchmark"], [2117, 0, 0, "-", "simple_timeit"], [2117, 0, 0, "-", "spectral_ops_fuzz_test"]], "torch.utils.benchmark.op_fuzzers": [[2117, 0, 0, "-", "binary"], [2117, 0, 0, "-", "sparse_binary"], [2117, 0, 0, "-", "sparse_unary"], [2117, 0, 0, "-", "spectral"], [2117, 0, 0, "-", "unary"]], "torch.utils.benchmark.utils": [[2117, 0, 0, "-", "common"], [2117, 0, 0, "-", "compare"], [2117, 0, 0, "-", "compile"], [2117, 0, 0, "-", "cpp_jit"], [2117, 0, 0, "-", "fuzzer"], [2117, 0, 0, "-", "sparse_fuzzer"], [2117, 0, 0, "-", "timer"], [3, 0, 0, "-", "valgrind_wrapper"]], "torch.utils.benchmark.utils.valgrind_wrapper": [[2117, 0, 0, "-", "timer_interface"]], "torch.utils.checkpoint": [[5, 5, 1, "", "checkpoint"], [5, 5, 1, "", "checkpoint_sequential"], [5, 5, 1, "", "set_checkpoint_debug_enabled"]], "torch.utils.cpp_extension": [[14, 5, 1, "", "BuildExtension"], [14, 5, 1, "", "CUDAExtension"], [14, 5, 1, "", "CppExtension"], [14, 5, 1, "", "get_compiler_abi_compatibility_and_version"], [14, 5, 1, "", "include_paths"], [14, 5, 1, "", "is_ninja_available"], [14, 5, 1, "", "load"], [14, 5, 1, "", "load_inline"], [14, 5, 1, "", "verify_ninja_availability"]], "torch.utils.data": [[23, 1, 1, "", "BatchSampler"], [23, 1, 1, "", "ChainDataset"], [23, 1, 1, "", "ConcatDataset"], [23, 1, 1, "", "DataLoader"], [23, 1, 1, "", "Dataset"], [23, 1, 1, "", "IterableDataset"], [23, 1, 1, "", "RandomSampler"], [23, 1, 1, "", "Sampler"], [23, 1, 1, "", "SequentialSampler"], [23, 1, 1, "", "StackDataset"], [23, 1, 1, "", "Subset"], [23, 1, 1, "", "SubsetRandomSampler"], [23, 1, 1, "", "TensorDataset"], [23, 1, 1, "", "WeightedRandomSampler"], [2117, 0, 0, "-", "backward_compatibility"], [2117, 0, 0, "-", "dataloader"], [23, 0, 0, "-", "datapipes"], [2117, 0, 0, "-", "dataset"], [23, 5, 1, "", "default_collate"], [23, 5, 1, "", "default_convert"], [2117, 0, 0, "-", "distributed"], [23, 5, 1, "", "get_worker_info"], [2117, 0, 0, "-", "graph"], [2117, 0, 0, "-", "graph_settings"], [23, 5, 1, "", "random_split"], [2117, 0, 0, "-", "sampler"]], "torch.utils.data._utils.collate": [[23, 5, 1, "", "collate"]], "torch.utils.data.datapipes": [[23, 0, 0, "-", "dataframe"], [2117, 0, 0, "-", "datapipe"], [2117, 0, 0, "-", "gen_pyi"], [23, 0, 0, "-", "iter"], [23, 0, 0, "-", "map"], [23, 0, 0, "-", "utils"]], "torch.utils.data.datapipes.dataframe": [[2117, 0, 0, "-", "dataframe_wrapper"], [2117, 0, 0, "-", "dataframes"], [2117, 0, 0, "-", "datapipes"], [2117, 0, 0, "-", "structures"]], "torch.utils.data.datapipes.iter": [[2117, 0, 0, "-", "callable"], [2117, 0, 0, "-", "combinatorics"], [2117, 0, 0, "-", "combining"], [2117, 0, 0, "-", "filelister"], [2117, 0, 0, "-", "fileopener"], [2117, 0, 0, "-", "grouping"], [2117, 0, 0, "-", "routeddecoder"], [2117, 0, 0, "-", "selecting"], [2117, 0, 0, "-", "sharding"], [2117, 0, 0, "-", "streamreader"], [2117, 0, 0, "-", "utils"]], "torch.utils.data.datapipes.map": [[2117, 0, 0, "-", "callable"], [2117, 0, 0, "-", "combinatorics"], [2117, 0, 0, "-", "combining"], [2117, 0, 0, "-", "grouping"], [2117, 0, 0, "-", "utils"]], "torch.utils.data.datapipes.utils": [[2117, 0, 0, "-", "common"], [2117, 0, 0, "-", "decoder"], [2117, 0, 0, "-", "snapshot"]], "torch.utils.data.distributed": [[23, 1, 1, "", "DistributedSampler"]], "torch.utils.deterministic": [[27, 2, 1, "", "fill_uninitialized_memory"]], "torch.utils.dlpack": [[36, 5, 1, "", "from_dlpack"], [36, 5, 1, "", "to_dlpack"]], "torch.utils.hipify": [[2117, 0, 0, "-", "constants"], [2117, 0, 0, "-", "cuda_to_hip_mappings"], [2117, 0, 0, "-", "hipify_python"], [2117, 0, 0, "-", "version"]], "torch.utils.jit": [[2117, 0, 0, "-", "log_extract"]], "torch.utils.mobile_optimizer": [[2026, 5, 1, "", "optimize_for_mobile"]], "torch.utils.model_zoo": [[2027, 5, 1, "", "load_url"]], "torch.utils.module_tracker": [[2028, 1, 1, "", "ModuleTracker"]], "torch.utils.tensorboard": [[2117, 0, 0, "-", "summary"], [2117, 0, 0, "-", "writer"]], "torch.utils.tensorboard.writer": [[2085, 1, 1, "", "SummaryWriter"]], "torch.utils.tensorboard.writer.SummaryWriter": [[2085, 3, 1, "", "__init__"], [2085, 3, 1, "", "add_audio"], [2085, 3, 1, "", "add_custom_scalars"], [2085, 3, 1, "", "add_embedding"], [2085, 3, 1, "", "add_figure"], [2085, 3, 1, "", "add_graph"], [2085, 3, 1, "", "add_histogram"], [2085, 3, 1, "", "add_hparams"], [2085, 3, 1, "", "add_image"], [2085, 3, 1, "", "add_images"], [2085, 3, 1, "", "add_mesh"], [2085, 3, 1, "", "add_pr_curve"], [2085, 3, 1, "", "add_scalar"], [2085, 3, 1, "", "add_scalars"], [2085, 3, 1, "", "add_text"], [2085, 3, 1, "", "add_video"], [2085, 3, 1, "", "close"], [2085, 3, 1, "", "flush"]], "torch.xpu": [[1981, 1, 1, "", "Event"], [1982, 1, 1, "", "Stream"], [1983, 1, 1, "", "StreamContext"], [1984, 5, 1, "", "current_device"], [1985, 5, 1, "", "current_stream"], [1986, 1, 1, "", "device"], [1987, 5, 1, "", "device_count"], [1988, 1, 1, "", "device_of"], [1989, 5, 1, "", "empty_cache"], [1990, 5, 1, "", "get_device_capability"], [1991, 5, 1, "", "get_device_name"], [1992, 5, 1, "", "get_device_properties"], [1993, 5, 1, "", "get_rng_state"], [1994, 5, 1, "", "get_rng_state_all"], [1995, 5, 1, "", "init"], [1996, 5, 1, "", "initial_seed"], [1997, 5, 1, "", "is_available"], [1998, 5, 1, "", "is_initialized"], [1999, 5, 1, "", "manual_seed"], [2000, 5, 1, "", "manual_seed_all"], [2118, 0, 0, "-", "random"], [2001, 5, 1, "", "seed"], [2002, 5, 1, "", "seed_all"], [2003, 5, 1, "", "set_device"], [2004, 5, 1, "", "set_rng_state"], [2005, 5, 1, "", "set_rng_state_all"], [2006, 5, 1, "", "set_stream"], [2007, 5, 1, "", "stream"], [2118, 0, 0, "-", "streams"], [2008, 5, 1, "", "synchronize"]], "torch.xpu.Event": [[1981, 3, 1, "", "elapsed_time"], [1981, 3, 1, "", "query"], [1981, 3, 1, "", "record"], [1981, 3, 1, "", "synchronize"], [1981, 3, 1, "", "wait"]], "torch.xpu.Stream": [[1982, 3, 1, "", "query"], [1982, 3, 1, "", "record_event"], [1982, 3, 1, "", "synchronize"], [1982, 3, 1, "", "wait_event"], [1982, 3, 1, "", "wait_stream"]]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute", "3": "py:method", "4": "py:property", "5": "py:function", "6": "py:exception", "7": "std:envvar"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "method", "Python method"], "4": ["py", "property", "Python property"], "5": ["py", "function", "Python function"], "6": ["py", "exception", "Python exception"], "7": ["std", "envvar", "environment variable"]}, "titleterms": {"automat": [0, 1, 23, 33, 2013, 2041, 2049], "mix": [0, 2013, 2041], "precis": [0, 2041, 2045, 2058], "packag": [0, 1, 15, 28, 2032, 2061, 2068], "torch": [0, 1, 2, 3, 4, 5, 10, 12, 13, 14, 16, 17, 23, 26, 27, 28, 30, 31, 33, 34, 35, 36, 48, 52, 53, 54, 56, 57, 59, 60, 61, 62, 63, 64, 65, 74, 75, 76, 77, 78, 79, 80, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 868, 869, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 899, 900, 901, 902, 904, 905, 906, 907, 910, 911, 912, 913, 914, 915, 916, 917, 921, 922, 923, 924, 925, 926, 927, 928, 929, 932, 933, 934, 935, 936, 937, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1012, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1030, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1288, 1289, 1290, 1291, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1382, 1383, 1384, 1386, 1387, 1388, 1389, 1390, 1391, 1392, 1393, 1394, 1395, 1396, 1400, 1401, 1402, 1404, 1405, 1406, 1407, 1408, 1409, 1410, 1411, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1586, 1588, 1589, 1590, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1720, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1734, 1735, 1736, 1737, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1770, 1771, 1772, 1773, 1774, 1775, 1776, 1788, 1789, 1790, 1791, 1792, 1813, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, 1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1857, 1858, 1859, 1860, 1861, 1862, 1863, 1864, 1865, 1866, 1867, 1868, 1869, 1870, 1871, 1872, 1873, 1874, 1875, 1876, 1877, 1878, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1900, 1901, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1931, 1932, 1933, 1934, 1935, 1936, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1956, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1984, 1985, 1987, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2016, 2018, 2019, 2020, 2021, 2022, 2023, 2026, 2027, 2028, 2029, 2030, 2031, 2032, 2035, 2036, 2037, 2038, 2039, 2040, 2045, 2048, 2049, 2053, 2058, 2060, 2062, 2065, 2067, 2068, 2069, 2073, 2074, 2078, 2079, 2080, 2081, 2082, 2083, 2085, 2086, 2087, 2089, 2090, 2091, 2092, 2093, 2094, 2102, 2103, 2109, 2112, 2114, 2116, 2117, 2118], "amp": [0, 2045], "autocast": [0, 2041], "gradient": [0, 1, 61, 1226, 2041, 2042, 2049, 2089, 2102], "scale": [0, 2041, 2054], "op": [0, 12, 86, 2018, 2020, 2041, 2046, 2065, 2089], "refer": [0, 12, 18, 19, 33, 52, 53, 57, 64, 2013, 2015, 2016, 2017, 2029, 2034, 2053, 2063, 2068, 2069, 2070, 2073, 2077, 2086, 2094, 2113], "elig": 0, "cuda": [0, 2, 10, 17, 18, 20, 211, 1012, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1030, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 2032, 2041, 2045, 2046, 2050, 2053, 2057, 2059, 2061, 2096, 2102, 2109, 2113], "specif": [0, 23, 53, 2080], "behavior": [0, 19, 23, 2042, 2052], "can": [0, 2042, 2050, 2102, 2108], "float16": 0, "float32": 0, "promot": [0, 7, 88], "widest": 0, "input": [0, 19, 52, 2033, 2041, 2052], "type": [0, 23, 53, 88, 605, 2015, 2016, 2048, 2065, 2068, 2086, 2116], "prefer": 0, "binary_cross_entropy_with_logit": [0, 1605], "over": [0, 8, 2015], "binary_cross_entropi": [0, 1604], "cpu": [0, 2, 10, 16, 208, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 2042, 2044, 2057, 2070, 2095], "bfloat16": [0, 157], "differenti": [1, 2042], "autograd": [1, 10, 11, 15, 60, 892, 893, 894, 895, 896, 899, 900, 901, 902, 904, 905, 906, 907, 910, 911, 912, 913, 914, 915, 916, 917, 921, 922, 923, 924, 925, 926, 927, 928, 929, 932, 933, 934, 935, 936, 937, 2034, 2041, 2042, 2048, 2049, 2065, 2075, 2076], "forward": [1, 893, 2048, 2076], "mode": [1, 48, 58, 416, 1378, 2042, 2048, 2052, 2065, 2070, 2076], "function": [1, 28, 35, 54, 56, 57, 59, 61, 64, 84, 87, 892, 893, 894, 895, 904, 905, 906, 907, 910, 911, 912, 913, 914, 915, 916, 1166, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 2013, 2014, 2015, 2016, 2018, 2021, 2023, 2033, 2035, 2036, 2039, 2041, 2042, 2049, 2052, 2060, 2065, 2073, 2080, 2081, 2102, 2112], "higher": 1, "level": [1, 10, 86, 2020, 2073], "api": [1, 10, 12, 15, 18, 19, 31, 33, 41, 52, 57, 60, 64, 2012, 2013, 2016, 2020, 2029, 2034, 2044, 2045, 2048, 2053, 2054, 2063, 2065, 2068, 2069, 2070, 2073, 2094, 2098, 2099, 2101, 2102, 2103, 2113], "local": [1, 2016, 2042, 2089, 2108, 2110], "disabl": [1, 23, 981, 2013, 2042, 2089, 2102, 2103], "comput": [1, 61, 2042, 2058, 2076, 2089, 2102], "default": [1, 23, 24, 2015, 2042, 2045, 2052, 2072, 2099], "layout": [1, 2083], "manual": [1, 33], "In": [1, 8, 2042, 2043, 2089], "place": [1, 60, 2033, 2042, 2043, 2065, 2089], "oper": [1, 10, 19, 24, 28, 52, 60, 80, 89, 2015, 2016, 2021, 2023, 2033, 2034, 2035, 2042, 2046, 2048, 2054, 2061, 2065, 2066, 2070, 2080, 2086, 2089, 2101, 2109], "tensor": [1, 11, 15, 34, 52, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 1942, 2014, 2018, 2021, 2024, 2032, 2033, 2034, 2035, 2042, 2048, 2060, 2065, 2070, 2073, 2080, 2083, 2084, 2086, 2089, 2101], "correct": [1, 64, 2042], "check": [1, 37, 64, 2013, 2042, 2053, 2108], "variabl": [1, 20, 25, 28, 48, 2013, 2015, 2016, 2025, 2088, 2105, 2114, 2115], "deprec": 1, "context": [1, 19, 29, 45, 70, 2076], "method": [1, 40, 41, 44, 50, 2014, 2015, 2018, 2065, 2073, 2080], "mixin": 1, "custom": [1, 39, 50, 64, 2016, 2020, 2041, 2045, 2046, 2049, 2055, 2065, 2067, 2068, 2070, 2097], "util": [1, 3, 4, 5, 10, 14, 23, 27, 28, 33, 36, 57, 59, 1086, 1720, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1734, 1735, 1736, 1737, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1965, 1966, 1967, 1968, 1969, 2019, 2026, 2027, 2028, 2036, 2037, 2060, 2073, 2085, 2089, 2091, 2110, 2117], "numer": [1, 2052, 2058, 2071], "profil": [1, 28, 932, 933, 934, 935, 936, 937, 1388, 1389, 1390, 2030, 2054, 2055, 2069, 2105, 2109, 2111], "debug": [1, 24, 25, 28, 50, 64, 86, 2013, 2070, 2071, 2097, 2102, 2111], "anomali": 1, "detect": 1, "graph": [1, 17, 52, 53, 64, 81, 924, 925, 926, 927, 928, 929, 1042, 2013, 2042, 2045, 2070, 2096, 2099, 2102, 2109, 2110, 2111], "backend": [2, 28, 47, 48, 2013, 2053, 2056, 2064, 2070, 2072, 2075, 2092, 2095, 2097, 2111], "cudnn": 2, "mha": 2, "mp": [2, 10, 1381, 1382, 1383, 1384, 1386, 1387, 1388, 1389, 1390, 1391, 1392, 1393, 1394, 2030, 2056], "mkl": 2, "mkldnn": [2, 10], "nnpack": 2, "openmp": 2, "opt_einsum": 2, "xeon": 2, "benchmark": [3, 2059, 2105], "bottleneck": 4, "checkpoint": [5, 24, 30], "pytorch": [6, 7, 8, 9, 10, 15, 26, 28, 52, 60, 2012, 2013, 2014, 2018, 2042, 2045, 2046, 2048, 2059, 2060, 2065, 2070, 2073, 2092, 2096, 2107, 2108, 2111, 2115], "govern": [6, 9, 10], "build": [6, 7, 10, 33, 2044, 2054, 2055, 2061], "ci": [6, 10], "how": [6, 24, 33, 58, 2011, 2042, 2046, 2048, 2067, 2068, 2100, 2101, 2102, 2108], "add": [6, 9, 99, 687], "new": [6, 7, 9, 2020, 2048], "maintain": [6, 9, 10], "contribut": [7, 2062], "guid": 7, "process": [7, 9, 23, 45, 2041], "get": [7, 31, 2065, 2092, 2102, 2104], "start": [7, 31, 45, 1389, 2092, 2104, 2111], "propos": 7, "featur": [7, 2054, 2055, 2102], "report": [7, 2050], "issu": [7, 2013, 2063, 2109], "implement": [7, 33, 37, 47, 50, 2011, 2047, 2052, 2065, 2071, 2073, 2077, 2099, 2101], "fix": [7, 48, 58, 263, 1146], "bug": 7, "ad": [7, 2048, 2065, 2099], "tutori": [7, 10, 2068, 2075], "improv": [7, 2055], "document": [7, 31, 2012], "particip": 7, "onlin": 7, "discuss": 7, "submit": 7, "pull": 7, "request": 7, "open": 7, "review": 7, "code": [7, 64, 2013, 2045, 2046, 2068, 2102], "readabl": 7, "test": [7, 2016, 2020, 2048, 2068, 2087, 2108], "case": [7, 2013], "make": [7, 9, 2096, 2099], "codebas": 7, "more": [7, 52, 56, 2046, 2075, 2092], "robust": 7, "triag": 7, "about": [7, 2042, 2075, 2101], "sourc": [7, 2059, 2061, 2068], "develop": [7, 52, 2012, 2062, 2092], "common": [7, 28, 64, 2054, 2070], "mistak": 7, "To": 7, "avoid": [7, 2057, 2059, 2065, 2068], "frequent": [7, 2013, 2050, 2065, 2070, 2102], "ask": [7, 2013, 2050, 2065, 2070, 2102], "question": [7, 2013, 2050, 2065, 2070, 2102], "On": [7, 24, 2015], "python": [7, 8, 28, 52, 60, 67, 68, 69, 70, 71, 72, 73, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2020, 2046, 2048, 2059, 2065, 2099], "doc": [7, 10, 2053], "c": [7, 10, 15, 2020, 2042, 2046, 2053, 2060, 2065, 2093], "overview": [7, 18, 19, 52, 64, 2062, 2063, 2068, 2069, 2080, 2097, 2100], "design": [8, 2016, 2047, 2075, 2076, 2077], "philosophi": 8, "principl": [8, 9], "1": [8, 33, 48, 58, 2013, 2049, 2099], "usabl": 8, "perform": [8, 10, 2055, 2060, 2101, 2108, 2109, 2111], "2": [8, 33, 58, 2013, 2045, 2049, 2050, 2070, 2073, 2107, 2108, 2111], "simpl": [8, 2015, 2016, 2055, 2063, 2076], "easi": 8, "3": [8, 48, 58], "first": [8, 2068], "best": [8, 2045, 2057, 2070, 2095], "class": [8, 40, 64, 2013, 2015, 2016, 2018, 2065, 2067, 2068, 2086], "languag": [8, 2012, 2013, 2015, 2016, 2017], "interoper": 8, "mechan": [9, 2039, 2042, 2052], "summari": [9, 2070], "modul": [9, 10, 57, 59, 64, 1526, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 2013, 2014, 2015, 2016, 2018, 2036, 2042, 2048, 2055, 2060, 2068, 2070, 2107], "core": [9, 10, 2106], "lead": [9, 10], "bdfl": [9, 10], "nomin": [9, 2016], "confirm": 9, "remov": [9, 1754, 2033], "The": [9, 64, 2016, 2042, 2098], "re": [9, 2068], "scope": 9, "project": 9, "decis": 9, "uncontroversi": 9, "chang": [9, 48, 58, 2102], "controversi": 9, "gener": [9, 17, 29, 60, 64, 90, 2043, 2049, 2059, 2070, 2071, 2089, 2099, 2100, 2113, 2118], "polici": [9, 2098], "faq": [9, 2061], "respons": 10, "nn": [10, 57, 59, 1586, 1588, 1589, 1590, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1720, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1734, 1735, 1736, 1737, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 2015, 2016, 2036, 2037, 2038, 2039, 2040, 2042, 2045, 2048, 2060, 2073, 2080, 2107], "optim": [10, 11, 32, 1788, 1789, 1790, 1791, 1792, 2041, 2042, 2045, 2067, 2075, 2076, 2089], "compil": [10, 59, 975, 977, 978, 979, 980, 981, 982, 983, 984, 985, 2045, 2092, 2093, 2094, 2102, 2103, 2109, 2111], "jit": [10, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1288, 1289, 1290, 1291, 2013, 2016, 2019], "torchscript": [10, 15, 2013, 2014, 2015, 2016, 2018, 2044, 2054, 2062, 2065, 2066, 2068], "fx": [10, 64, 65, 81, 82, 83, 85, 88, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 2055, 2070, 2073, 2091], "torchdynamo": [10, 2047, 2062, 2063, 2064, 2096, 2103, 2111], "distribut": [10, 28, 30, 31, 32, 33, 34, 35, 48, 2036, 2039, 2047, 2053, 2055, 2075, 2076, 2102], "rng": 10, "multiprocess": [10, 45, 2032, 2045, 2057, 2061], "dataload": [10, 2059], "linear": [10, 11, 734, 735, 758, 766, 783, 1173, 1513, 1649, 2036, 2039, 2058, 2080], "algebra": [10, 11, 2058, 2080], "linalg": [10, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 2021, 2058], "spars": [10, 1900, 1901, 1903, 1904, 1905, 1906, 1907, 1908, 2036, 2039, 2080], "nestedtensor": 10, "nest": [10, 2035], "maskedtensor": [10, 2023], "mask": [10, 2023], "fast": [10, 54, 2052, 2076], "fourier": [10, 54], "transform": [10, 35, 54, 56, 57, 59, 61, 64, 1570, 2036, 2048, 2055, 2102, 2110], "fft": [10, 54, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145], "inductor": 10, "gpu": [10, 28, 2036, 2039, 2041, 2050, 2105], "triton": [10, 2105], "nvfuser": 10, "amd": [10, 2058], "rocm": [10, 19, 2053], "hip": [10, 2053], "tool": [10, 17, 2071, 2080], "c10": 10, "dispatch": 10, "onnx": [10, 81, 82, 89, 2062, 2063, 2064, 2065, 2066], "export": [10, 52, 53, 2062, 2063, 2065, 2068, 2070, 2073, 2089, 2093, 2102], "mobil": 10, "edg": [10, 2013, 2068], "model": [10, 15, 33, 52, 73, 2011, 2041, 2050, 2054, 2063, 2067, 2068, 2070, 2073, 2093, 2098, 2104, 2105], "compress": [10, 2080], "window": [10, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2061, 2078], "appl": 10, "m1": 10, "powerpc": 10, "aarch64": 10, "librari": [10, 2012, 2020, 2059], "xla": 10, "torchserv": 10, "torchvis": [10, 58], "torchtext": 10, "torchaudio": 10, "torchrec": 10, "torchx": 10, "torchdata": 10, "torcharrow": 10, "complex": [11, 986, 2042, 2052], "number": [11, 17, 48, 2042, 2044, 2050, 2059, 2089, 2118], "creat": [11, 2013, 2020, 2034, 2046], "transit": [11, 48], "from": [11, 48, 59, 64, 2011, 2020, 2033, 2046, 2061, 2065, 2068], "old": 11, "represent": [11, 81], "access": [11, 2015, 2068, 2111], "real": [11, 485, 1844, 2052], "imag": [11, 313, 1243], "angl": [11, 119, 700], "ab": [11, 91, 683], "serial": [11, 52, 2060, 2089], "control": [12, 38, 52, 60, 64, 71, 2059, 2089, 2103], "flow": [12, 52, 60, 64, 71, 2070, 2089], "cond": [12, 74, 989, 1304], "exampl": [12, 33, 42, 52, 64, 2041, 2047, 2048, 2049, 2063, 2065, 2076, 2097], "invari": 12, "higher_ord": 12, "__config__": 13, "cpp_extens": 14, "extend": [15, 37, 2020, 2048, 2049, 2111], "extens": [15, 17, 2054, 2061], "author": [15, 64, 2046], "stream": [16, 17, 18, 998, 1005, 1013, 1083, 1398, 1409, 1982, 2007, 2031, 2045, 2118], "event": [16, 17, 39, 41, 1010, 1385, 1397, 1981, 2030, 2031, 2118], "random": [17, 23, 60, 2050, 2059, 2074, 2089, 2118], "commun": [17, 24, 28, 2012, 2051], "collect": [17, 28], "beta": [17, 35], "memori": [17, 23, 2045, 2050, 2053, 2055, 2059, 2113], "manag": [17, 29, 70, 2032, 2045, 2053, 2068, 2110], "nvidia": [17, 2058], "nvtx": [17, 1067, 1068, 1069, 1070], "jiter": [17, 1050, 1051], "tunableop": [17, 19], "sanit": [17, 18], "prototyp": [17, 2070, 2071], "usag": [18, 31, 48, 2045, 2049, 2054, 2055, 2061, 2107, 2113], "enabl": [19, 2053], "tune": [19, 2044], "separ": [19, 2048], "file": [19, 28, 2032, 2060, 2068], "output": [19, 53, 2052, 2099], "A": [19, 64, 2055, 2063, 2099], "note": [19, 48, 81, 2012, 2016, 2045, 2051, 2070, 2075], "current": [19, 2034], "tunabl": 19, "tunablegemm": 19, "environ": [20, 25, 28, 48, 2025, 2054, 2068, 2088, 2105, 2114, 2115], "data": [23, 52, 60, 72, 2016, 2047, 2050, 2065, 2071, 2086], "dataset": 23, "map": [23, 78, 2053], "style": 23, "iter": [23, 2015], "load": [23, 1280, 1344, 2011, 2060, 2068, 2070], "order": [23, 2042], "sampler": 23, "batch": [23, 58, 2058, 2067], "non": [23, 52, 64, 2036, 2039, 2042, 2045, 2058, 2068, 2070], "work": [23, 57, 2024, 2041, 2050, 2080, 2101, 2102, 2109], "collate_fn": 23, "singl": [23, 48, 2041], "multi": [23, 28, 48, 2036, 2039], "platform": 23, "pin": [23, 2045], "ddp": 24, "hook": [24, 2042, 2055, 2107], "us": [24, 28, 33, 64, 2015, 2042, 2045, 2048, 2052, 2063, 2065, 2067, 2068, 2070, 2080, 2102, 2104, 2109, 2111, 2113], "what": [24, 33, 53, 56, 58, 61, 2023, 2042, 2068, 2100, 2102, 2108, 2109], "doe": [24, 33, 2042, 2101, 2102], "powersgd": 24, "state": [24, 2055, 2068, 2113], "acknowledg": 24, "deploi": 26, "ha": 26, "been": 26, "move": 26, "multipi": 26, "determinist": 27, "come": [28, 2042], "which": [28, 2042, 2102], "choos": 28, "network": [28, 2045, 2050, 2055], "interfac": [28, 1278, 2053], "other": [28, 2036, 2059, 2068, 2080, 2089, 2101], "nccl": [28, 2045], "basic": [28, 2016, 2034, 2049, 2075, 2086, 2109], "initi": [28, 2036, 2055, 2086], "tcp": 28, "share": [28, 2032, 2045, 2068, 2077], "system": [28, 2016, 2032, 2049], "post": [28, 2070], "shutdown": 28, "reiniti": 28, "kei": [28, 81, 2061], "valu": [28, 76, 616, 2015, 2016, 2058, 2072, 2077], "store": [28, 47], "group": 28, "devicemesh": 28, "point": [28, 2054], "synchron": [28, 1006, 1084, 1394, 1410, 2008], "asynchron": [28, 2016, 2045, 2057], "third": 28, "parti": 28, "launch": [28, 48, 2109], "spawn": [28, 2032], "applic": 28, "breakpoint": 28, "monitor": [28, 2029], "barrier": 28, "torch_distributed_debug": 28, "log": [28, 50, 371, 1346, 2054], "join": [29, 2089], "elast": [31, 37, 48], "advanc": [31, 2055], "plugin": 31, "pipelin": 33, "parallel": [33, 34, 1699, 2045, 2047, 2050, 2089], "why": [33, 56, 61, 2045, 2046, 2052, 2068, 2080, 2102], "i": [33, 53, 61, 2023, 2042, 2046, 2068, 2100, 2101, 2102, 2108], "step": [33, 1791, 2067, 2104], "pipelinestag": 33, "execut": [33, 2016, 2042, 2045, 2068, 2102], "pipelineschedul": 33, "option": [33, 58, 2015, 2044, 2061, 2067], "split": [33, 549, 1915], "hug": 33, "face": 33, "technic": 33, "deep": [33, 52, 2092, 2099], "dive": [33, 52, 2092, 2099], "your": [33, 2068], "own": [33, 2042], "schedul": [33, 2067], "microbatch": 33, "stage": 33, "probabl": 35, "score": 35, "pathwis": 35, "deriv": [35, 2042], "exponentialfamili": 35, "bernoulli": [35, 155, 945], "binomi": 35, "categor": 35, "cauchi": 35, "chi2": 35, "continuousbernoulli": 35, "dirichlet": 35, "exponenti": [35, 1883], "fishersnedecor": 35, "gamma": 35, "geometr": 35, "gumbel": 35, "halfcauchi": 35, "halfnorm": 35, "independ": 35, "inversegamma": 35, "kumaraswami": 35, "lkjcholeski": 35, "laplac": 35, "lognorm": 35, "lowrankmultivariatenorm": 35, "mixturesamefamili": 35, "multinomi": [35, 423, 1412], "multivariatenorm": 35, "negativebinomi": 35, "normal": [35, 1669, 1772, 2036, 2067], "onehotcategor": 35, "pareto": 35, "poisson": [35, 1819], "relaxedbernoulli": 35, "logitrelaxedbernoulli": 35, "relaxedonehotcategor": 35, "studentt": 35, "transformeddistribut": 35, "uniform": 35, "vonmis": 35, "weibul": 35, "wishart": 35, "kl": 35, "diverg": [35, 2018], "constraint": [35, 2045], "registri": [35, 47, 89], "dlpack": 36, "agent": 37, "server": [37, 47, 50], "concept": 37, "watchdog": 37, "health": 37, "plane": 38, "launcher": 39, "rendezv": [39, 47, 48], "handler": [39, 44, 47, 2050], "metric": [39, 44], "error": [40, 2050, 2061, 2070, 2071, 2111], "propag": [40, 2034], "object": [41, 73, 2068], "torchelast": 43, "kubernet": 43, "multipl": [45, 2041, 2045, 2048], "worker": [45, 48, 2050], "quickstart": 46, "dataclass": 47, "except": [47, 2050], "dynam": [47, 52, 60, 64, 75, 76, 2070, 2073, 2098, 2101], "c10d": 47, "etcd": 47, "legaci": 47, "torchrun": 48, "node": [48, 53, 82, 85, 88, 924, 925, 926, 927, 928, 2042], "stack": [48, 1920, 2070], "fault": 48, "toler": 48, "size": [48, 539, 2035, 2051, 2079], "failur": 48, "min": [48, 413, 1375], "max": [48, 409, 1370], "4": [48, 58], "up": [48, 2102], "membership": [48, 2016], "definit": [48, 2016], "deploy": [48, 2054], "import": [48, 2011, 2061, 2068, 2101], "notic": [48, 2011], "subprocess": [49, 2032], "handl": [49, 2102], "retriev": 49, "subprocesshandl": 49, "expir": 50, "timer": 50, "client": 50, "write": [50, 64, 2042, 2065, 2110], "info": [50, 2116], "train": [51, 2041, 2055, 2057, 2070, 2102], "script": [51, 1284, 2013, 2061, 2065], "exist": 52, "framework": [52, 2068, 2075], "an": [52, 2011, 2065, 2067, 2068], "strict": 52, "express": [52, 2015, 2016], "special": [52, 2016, 2042, 2081, 2099], "shape": [52, 60, 75, 525, 2065, 2098, 2099, 2101], "primit": [52, 2016], "contain": [52, 2036], "limit": [52, 60, 64, 2011, 2049, 2065, 2096, 2103, 2107], "break": [52, 2015, 2016, 2099, 2102, 2109, 2111], "depend": [52, 60, 2063, 2068, 2076], "miss": 52, "fake": [52, 2101], "meta": [52, 2016, 2024], "abstract": 52, "kernel": [52, 940, 2053, 2070, 2105, 2109], "read": [52, 56, 2065, 2092, 2108], "addit": [52, 81], "link": 52, "user": [52, 2068, 2077, 2092], "ir": [53, 2106, 2110], "assumpt": [53, 2077], "exportedprogram": 53, "call_funct": 53, "metadata": [53, 924, 2054], "placehold": 53, "get_attr": 53, "symint": [53, 2098], "faketensor": 53, "pytre": 53, "abl": 53, "helper": 54, "fullyshardeddataparallel": 55, "func": [56, 57, 59, 61, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 2048, 2049, 2102], "ar": [56, 61, 2011, 2042, 2068, 2099, 2102], "compos": [56, 61, 2097], "patch": [58, 2068], "norm": [58, 455, 1329, 1771], "": [58, 2068, 2102, 2108], "happen": 58, "batchnorm": 58, "paramet": [58, 1717, 2015, 2067], "functorch": [58, 59], "eval": [58, 2042], "migrat": [59, 2013], "make_funct": 59, "combine_state_for_ensembl": 59, "ux": 60, "vmap": [60, 61, 895, 1177, 1976, 2048, 2049, 2102], "mutat": [60, 79, 2089], "arbitrari": [60, 2068], "structur": [60, 72, 2016, 2080], "out": [60, 2033, 2050], "item": [60, 354], "nonzero": [60, 454, 1770], "friend": 60, "whirlwind": 61, "tour": 61, "grad": [61, 292, 917, 1167, 2042, 2045, 2102], "auto": 61, "vector": 61, "vjp": [61, 916, 1176], "jacobian": [61, 913], "product": [61, 2021, 2102], "jvp": [61, 894, 914, 1172, 2049], "jacrev": [61, 1171], "jacfwd": [61, 1170], "hessian": [61, 911, 1169], "__future__": 62, "futur": 63, "quick": 64, "primer": 64, "manipul": [64, 2034], "direct": 64, "subgraph": [64, 2110], "rewrit": [64, 2110], "With": [64, 2018], "replace_pattern": 64, "proxi": 64, "retrac": 64, "interpret": [64, 2013], "pattern": [64, 2015, 2065, 2068], "introduct": [64, 2023, 2035, 2070, 2099], "pitfal": [64, 2065], "pdb": 64, "print": [64, 2015, 2016], "to_fold": 64, "graphmodul": 64, "avail": 64, "debugg": 64, "symbol": [64, 84, 87, 2065, 2070, 2089, 2099], "trace": [64, 593, 1288, 1947, 2013, 2065, 2069, 2070, 2102, 2103, 2109], "static": [64, 2065, 2070, 2099], "tracer": [64, 2013], "leaf": 64, "miscellanea": 64, "experiment": [65, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 2021], "symbolic_shap": [65, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212], "exportdb": 66, "tag": [66, 2089], "support": [66, 2014, 2016, 2023, 2033, 2034, 2035, 2049, 2065, 2066, 2070, 2080, 2102, 2107], "assume_constant_result": [66, 77, 978], "autograd_funct": 66, "class_method": 66, "cond_branch_class_method": [66, 74, 75], "cond_branch_nested_funct": [66, 74, 75], "cond_branch_nonlocal_vari": [66, 74, 75], "cond_closed_over_vari": [66, 69, 74], "cond_operand": [66, 74, 75], "cond_pred": [66, 74, 75], "constrain_as_size_exampl": [66, 76, 77], "constrain_as_value_exampl": [66, 76, 77], "decor": 66, "dictionari": [66, 72, 2016], "dynamic_shape_assert": [66, 67], "dynamic_shape_constructor": [66, 75], "dynamic_shape_if_guard": [66, 71, 75], "dynamic_shape_map": [66, 75, 78], "dynamic_shape_sl": [66, 75], "dynamic_shape_view": [66, 75], "fn_with_kwarg": [66, 72], "list_contain": [66, 67, 72, 75], "list_unpack": [66, 71, 72], "nested_funct": [66, 69], "null_context_manag": [66, 70], "pytree_flatten": 66, "scalar_output": [66, 75], "specialized_attribut": 66, "static_for_loop": [66, 71], "static_if": [66, 71], "tensor_setattr": [66, 68], "type_reflection_method": [66, 68], "user_input_mut": [66, 79], "Not": [66, 2018], "yet": 66, "dynamic_shape_round": [66, 68, 75], "model_attr_mut": [66, 73], "optional_input": [66, 73], "torch_sym_min": [66, 80], "assert": [67, 2016, 2053], "builtin": [68, 2014], "closur": [69, 2067], "escap": [77, 2102], "hatch": [77, 2102], "fxe0007": 81, "fxe0008": 82, "fxe0010": 83, "pass": [83, 2016, 2045, 2057, 2070, 2076, 2110], "fxe0011": 84, "call": [84, 2015, 2016, 2049, 2102], "fxe0012": 85, "unsupport": [85, 2015, 2016, 2018, 2065, 2066], "analysi": 85, "fxe0013": 86, "fxe0014": 87, "find": [87, 89, 2068, 2109], "opschema": 87, "match": [87, 2015, 2034], "fxe0015": 88, "insert": 88, "fxe0016": 89, "overload": 89, "abs_": 92, "absolut": [93, 684], "absolute_": 94, "aco": [95, 685], "acos_": 96, "acosh": [97, 686], "acosh_": 98, "add_": 100, "addbmm": [101, 688], "addbmm_": 102, "addcdiv": [103, 689], "addcdiv_": 104, "addcmul": [105, 690], "addcmul_": 106, "addmm": [107, 691, 1900], "addmm_": 108, "addmv": [109, 692], "addmv_": 110, "addr": [111, 693], "addr_": 112, "adjoint": [113, 694], "all": [114, 695, 2048, 2065, 2067], "allclos": [115, 696], "amax": [116, 697], "amin": [117, 698], "aminmax": [118, 699], "ani": [120, 701, 2016, 2102, 2108], "apply_": 121, "arcco": [122, 869], "arccos_": 123, "arccosh": [124, 870], "arccosh_": 125, "arcsin": [126, 871], "arcsin_": 127, "arcsinh": [128, 872], "arcsinh_": 129, "arctan": [130, 873], "arctan2": [131, 874], "arctan2_": 132, "arctan_": 133, "arctanh": [134, 875], "arctanh_": 135, "argmax": [136, 877], "argmin": [137, 878], "argsort": [138, 879], "argwher": [139, 880], "as_strid": [140, 881], "as_subclass": 141, "asin": [142, 884], "asin_": 143, "asinh": [144, 885], "asinh_": 145, "atan": [146, 886], "atan2": [147, 887], "atan2_": 148, "atan_": 149, "atanh": [150, 888], "atanh_": 151, "backward": [152, 892, 896, 2042, 2043, 2045, 2052, 2076], "baddbmm": [153, 943], "baddbmm_": 154, "bernoulli_": 156, "bincount": [158, 946], "bitwise_and": [159, 947], "bitwise_and_": 160, "bitwise_left_shift": [161, 948], "bitwise_left_shift_": 162, "bitwise_not": [163, 949], "bitwise_not_": 164, "bitwise_or": [165, 950], "bitwise_or_": 166, "bitwise_right_shift": [167, 951], "bitwise_right_shift_": 168, "bitwise_xor": [169, 952], "bitwise_xor_": 170, "bmm": [171, 955], "bool": 172, "broadcast_to": [173, 958], "byte": 174, "cauchy_": 175, "ccol_indic": 176, "cdoubl": 177, "ceil": [178, 964], "ceil_": 179, "cfloat": 180, "chalf": 181, "char": 182, "choleski": [183, 966, 1302], "cholesky_invers": [184, 967], "cholesky_solv": [185, 968], "chunk": [186, 969], "clamp": [187, 773, 970], "clamp_": 188, "clip": [189, 971, 2041], "clip_": 190, "clone": [191, 972], "coalesc": 192, "col_indic": 193, "conj": [194, 990], "conj_phys": [195, 991], "conj_physical_": 196, "contigu": 197, "copy_": 198, "copysign": [199, 992], "copysign_": 200, "corrcoef": [201, 993], "co": [202, 994], "cos_": 203, "cosh": [204, 995], "cosh_": 205, "count_nonzero": [206, 996], "cov": [207, 997], "cross": [209, 1007, 1305, 2042], "crow_indic": 210, "cummax": [212, 1087], "cummin": [213, 1088], "cumprod": [214, 1089], "cumprod_": 215, "cumsum": [216, 1090], "cumsum_": 217, "data_ptr": 218, "deg2rad": [219, 1092], "dense_dim": 220, "dequant": [221, 1093, 2070], "det": [222, 1094, 1306], "detach": 223, "detach_": 224, "devic": [225, 1029, 1403, 1986, 2024, 2045, 2058, 2083], "diag": [226, 1095], "diag_emb": [227, 1096], "diagflat": [228, 1097], "diagon": [229, 1098, 1307], "diagonal_scatt": [230, 1099], "diff": [231, 1100], "digamma": [232, 1101], "digamma_": 233, "dim": [234, 2033], "dim_ord": 235, "dist": [236, 1102], "div": [237, 1103, 2060], "div_": 238, "divid": [239, 1104], "divide_": 240, "dot": [241, 1105], "doubl": 242, "dsplit": [243, 1106], "element_s": 244, "eq": [245, 1113], "eq_": 246, "equal": [247, 1114], "erf": [248, 1115], "erf_": 249, "erfc": [250, 1116], "erfc_": 251, "erfinv": [252, 1117], "erfinv_": 253, "exp": [254, 1118], "exp_": 255, "expand": 256, "expand_a": 257, "expm1": [258, 1120], "expm1_": 259, "exponential_": 260, "fill_": 261, "fill_diagonal_": 262, "fix_": 264, "flatten": [265, 1147, 1471], "flip": [266, 1148], "fliplr": [267, 1149], "flipud": [268, 1150], "float": [269, 2060], "float_pow": [270, 1151], "float_power_": 271, "floor": [272, 1152], "floor_": 273, "floor_divid": [274, 1153], "floor_divide_": 275, "fmax": [276, 1154], "fmin": [277, 1155], "fmod": [278, 1156], "fmod_": 279, "frac": [280, 1157], "frac_": 281, "frexp": [282, 1158], "gather": [283, 1022, 1213], "gcd": [284, 1214], "gcd_": 285, "ge": [286, 1215], "ge_": 287, "geometric_": 288, "geqrf": [289, 1216], "ger": [290, 1217], "get_devic": 291, "greater": [293, 1227], "greater_": 294, "greater_equ": [295, 1228], "greater_equal_": 296, "gt": [297, 1229], "gt_": 298, "half": 299, "hardshrink": [300, 1481, 1635], "heavisid": [301, 1232], "histc": [302, 1233], "histogram": [303, 1234], "hsplit": [304, 1236], "hypot": [305, 1239], "hypot_": 306, "i0": [307, 1240], "i0_": 308, "igamma": [309, 1241], "igamma_": 310, "igammac": [311, 1242], "igammac_": 312, "index_add": [314, 1244], "index_add_": 315, "index_copi": [316, 1245], "index_copy_": 317, "index_fil": 318, "index_fill_": 319, "index_put": 320, "index_put_": 321, "index_reduc": [322, 1246], "index_reduce_": 323, "index_select": [324, 1247], "indic": [325, 2012], "inner": [326, 1249], "int": [327, 2099], "int_repr": 328, "invers": [329, 1250, 2021], "is_coalesc": 330, "is_complex": [331, 1251], "is_conj": [332, 1252], "is_contigu": 333, "is_cuda": 334, "is_floating_point": [335, 1254], "is_infer": 336, "is_leaf": 337, "is_meta": 338, "is_pin": 339, "is_quant": 340, "is_set_to": 341, "is_shar": 342, "is_sign": 343, "is_spars": 344, "is_sparse_csr": 345, "isclos": [346, 1261], "isfinit": [347, 1262], "isinf": [348, 1264], "isnan": [349, 1265], "isneginf": [350, 1266], "isposinf": [351, 1267], "isreal": [352, 1268], "istft": [353, 1269], "items": 355, "kthvalu": [356, 1294], "lcm": [357, 1295], "lcm_": 358, "ldexp": [359, 1296], "ldexp_": 360, "le": [361, 1297], "le_": 362, "lerp": [363, 1298], "lerp_": 364, "less": [365, 1299], "less_": 366, "less_equ": [367, 1300], "less_equal_": 368, "lgamma": [369, 1301], "lgamma_": 370, "log10": [372, 1347], "log10_": 373, "log1p": [374, 1348], "log1p_": 375, "log2": [376, 1349], "log2_": 377, "log_": 378, "log_normal_": 379, "logaddexp": [380, 1350], "logaddexp2": [381, 1351], "logcumsumexp": [382, 1352], "logdet": [383, 1353], "logical_and": [384, 1354], "logical_and_": 385, "logical_not": [386, 1355], "logical_not_": 387, "logical_or": [388, 1356], "logical_or_": 389, "logical_xor": [390, 1357], "logical_xor_": 391, "logit": [392, 1358], "logit_": 393, "logsumexp": [394, 1360], "long": 395, "lt": [396, 1361], "lt_": 397, "lu": [398, 1319, 1362], "lu_solv": [399, 1322, 1363], "map_": 400, "masked_fil": 401, "masked_fill_": 402, "masked_scatt": 403, "masked_scatter_": 404, "masked_select": [405, 1366], "matmul": [406, 1323, 1367], "matrix_exp": [407, 1324, 1368], "matrix_pow": [408, 1326, 1369], "maximum": [410, 1371], "mean": [411, 1372], "median": [412, 1373], "minimum": [414, 1376], "mm": [415, 1377, 1904], "module_load": 417, "moveaxi": [418, 1379], "movedim": [419, 1380], "msort": [420, 1395], "mul": [421, 1411], "mul_": 422, "multipli": [424, 1413], "multiply_": 425, "mv": [426, 1414], "mvlgamma": [427, 1415], "mvlgamma_": 428, "nan_to_num": [429, 1416], "nan_to_num_": 430, "nanmean": [431, 1417], "nanmedian": [432, 1418], "nanquantil": [433, 1419], "nansum": [434, 1420], "narrow": [435, 1421], "narrow_copi": [436, 1422], "nbyte": 437, "ndim": 438, "ndimens": 439, "ne": [440, 1423], "ne_": 441, "neg": [442, 444, 1424, 1425], "neg_": 443, "negative_": 445, "nelement": 446, "new_empti": 447, "new_ful": 448, "new_on": 449, "new_tensor": 450, "new_zero": 451, "nextaft": [452, 1426], "nextafter_": 453, "normal_": 456, "not_equ": [457, 1773], "not_equal_": 458, "numel": [459, 1774], "numpi": [460, 2065, 2102], "orgqr": [461, 1813], "ormqr": [462, 1814], "outer": [463, 1815], "permut": [464, 1817, 2033], "pin_memori": 465, "pinvers": [466, 1818], "polygamma": [467, 1821], "polygamma_": 468, "posit": [469, 1822], "pow": [470, 1823], "pow_": 471, "prod": [472, 1824], "put_": 473, "q_per_channel_axi": 474, "q_per_channel_scal": 475, "q_per_channel_zero_point": 476, "q_scale": 477, "q_zero_point": 478, "qr": [479, 1331, 1826], "qscheme": 480, "quantil": [481, 1827], "rad2deg": [482, 1834], "random_": 483, "ravel": [484, 1843], "reciproc": [486, 1845], "reciprocal_": 487, "record_stream": 488, "register_hook": [489, 927], "register_post_accumulate_grad_hook": 490, "remaind": [491, 1846], "remainder_": 492, "renorm": [493, 1847], "renorm_": 494, "repeat": 495, "repeat_interleav": [496, 1848], "requires_grad": [497, 2042], "requires_grad_": 498, "reshap": [499, 1849], "reshape_a": 500, "resize_": 501, "resize_as_": 502, "resolve_conj": [503, 1850], "resolve_neg": [504, 1851], "retain_grad": 505, "retains_grad": 506, "roll": [507, 1853], "rot90": [508, 1854], "round": [509, 1855], "round_": 510, "row_indic": 511, "rsqrt": [512, 1857], "rsqrt_": 513, "scatter": [514, 1024, 1859], "scatter_": 515, "scatter_add": [516, 1860], "scatter_add_": 517, "scatter_reduc": [518, 1861], "scatter_reduce_": 519, "select": [520, 1864, 2023], "select_scatt": [521, 1865], "set_": 522, "sgn": [523, 1877], "sgn_": 524, "share_memory_": 526, "short": 527, "sigmoid": [528, 761, 1557, 1686, 1878], "sigmoid_": 529, "sign": [530, 1879], "sign_": 531, "signbit": [532, 1891], "sin": [533, 1892], "sin_": 534, "sinc": [535, 1893], "sinc_": 536, "sinh": [537, 1894], "sinh_": 538, "slice_scatt": [540, 1895], "slogdet": [541, 1332, 1896], "smm": [542, 1897], "softmax": [543, 1560, 1690, 1898, 1906], "sort": [544, 1899], "sparse_dim": 545, "sparse_mask": 546, "sparse_resize_": 547, "sparse_resize_and_clear_": 548, "sqrt": [550, 1916], "sqrt_": 551, "squar": [552, 1917], "square_": 553, "squeez": [554, 1918], "squeeze_": 555, "sspaddmm": [556, 1919], "std": [557, 1921], "stft": [558, 1923], "storag": [559, 2082], "storage_offset": 560, "storage_typ": 561, "stride": 562, "sub": [563, 1924], "sub_": 564, "subtract": [565, 1925], "subtract_": 566, "sum": [567, 1908, 1926, 2036], "sum_to_s": 568, "svd": [569, 1336, 1927], "swapax": [570, 1929], "swapdim": [571, 1930], "t": [572, 1937, 2016, 2050, 2102], "t_": 573, "take": [574, 1938, 2067], "take_along_dim": [575, 1939], "tan": [576, 1940], "tan_": 577, "tanh": [578, 1567, 1695, 1941], "tanh_": 579, "tensor_split": [580, 1943], "tile": [581, 1945], "to_dens": 583, "to_mkldnn": 584, "to_spars": 585, "to_sparse_bsc": 586, "to_sparse_bsr": 587, "to_sparse_coo": 588, "to_sparse_csc": 589, "to_sparse_csr": 590, "tolist": 591, "topk": [592, 1946], "transpos": [594, 1948], "transpose_": 595, "triangular_solv": [596, 1951], "tril": [597, 1952], "tril_": 598, "triu": [599, 1954], "triu_": 600, "true_divid": [601, 1956], "true_divide_": 602, "trunc": [603, 1957], "trunc_": 604, "type_a": 606, "unbind": [607, 1958, 2035], "unflatten": [608, 1577, 1959], "unfold": [609, 1578, 1702], "uniform_": 610, "uniqu": [611, 1960], "unique_consecut": [612, 1961], "unsqueez": [613, 1963], "unsqueeze_": 614, "untyped_storag": 615, "var": [617, 1971], "vdot": [618, 1973], "view": [619, 2023, 2060, 2084, 2109], "view_a": 620, "vsplit": [621, 1977], "where": [622, 1979, 2011], "xlogi": [623, 1980], "xlogy_": 624, "zero_": 625, "_assert": 626, "_foreach_ab": 627, "_foreach_abs_": 628, "_foreach_aco": 629, "_foreach_acos_": 630, "_foreach_asin": 631, "_foreach_asin_": 632, "_foreach_atan": 633, "_foreach_atan_": 634, "_foreach_ceil": 635, "_foreach_ceil_": 636, "_foreach_co": 637, "_foreach_cos_": 638, "_foreach_cosh": 639, "_foreach_cosh_": 640, "_foreach_erf": 641, "_foreach_erf_": 642, "_foreach_erfc": 643, "_foreach_erfc_": 644, "_foreach_exp": 645, "_foreach_exp_": 646, "_foreach_expm1": 647, "_foreach_expm1_": 648, "_foreach_floor": 649, "_foreach_floor_": 650, "_foreach_frac": 651, "_foreach_frac_": 652, "_foreach_lgamma": 653, "_foreach_lgamma_": 654, "_foreach_log": 655, "_foreach_log10": 656, "_foreach_log10_": 657, "_foreach_log1p": 658, "_foreach_log1p_": 659, "_foreach_log2": 660, "_foreach_log2_": 661, "_foreach_log_": 662, "_foreach_neg": 663, "_foreach_neg_": 664, "_foreach_reciproc": 665, "_foreach_reciprocal_": 666, "_foreach_round": 667, "_foreach_round_": 668, "_foreach_sigmoid": 669, "_foreach_sigmoid_": 670, "_foreach_sin": 671, "_foreach_sin_": 672, "_foreach_sinh": 673, "_foreach_sinh_": 674, "_foreach_sqrt": 675, "_foreach_sqrt_": 676, "_foreach_tan": 677, "_foreach_tan_": 678, "_foreach_trunc": 679, "_foreach_trunc_": 680, "_foreach_zero_": 681, "_log": [682, 2022], "set_log": 682, "bnrelu2d": [702, 725], "bnrelu3d": [703, 726], "convbn1d": [704, 714], "convbn2d": [705, 715], "convbn3d": [706, 716], "convbnrelu1d": [707, 717], "convbnrelu2d": [708, 718], "convbnrelu3d": [709, 719], "convrelu1d": [710, 727], "convrelu2d": [711, 720, 728], "convrelu3d": [712, 721, 729], "linearrelu": [713, 722, 730, 731], "freeze_bn_stat": 723, "update_bn_stat": 724, "conv2d": [732, 741, 775, 1454, 1608], "conv3d": [733, 742, 776, 1455, 1609], "lstm": [736, 764, 1496, 2059], "multiheadattent": [737, 1532], "batchnorm2d": [738, 1441], "batchnorm3d": [739, 1442], "conv1d": [740, 774, 1453, 1607], "convtranspose1d": [743, 1456], "convtranspose2d": [744, 1457], "convtranspose3d": [745, 1458], "elu": [746, 777, 1467, 1621], "embed": [747, 1468, 1623], "embeddingbag": [748, 1469], "fxfloatfunct": 749, "floatfunct": 750, "groupnorm": [751, 1480], "hardswish": [752, 779, 1483, 1637], "instancenorm1d": [753, 1488], "instancenorm2d": [754, 1489], "instancenorm3d": [755, 1490], "layernorm": [756, 1498], "leakyrelu": [757, 1512], "qfunction": 759, "relu6": [760, 1547, 1679], "gru": [762, 1477], "grucel": [763, 1478], "lstmcell": [765, 1497], "rnncell": [767, 1544], "adaptive_avg_pool2d": [768, 1592], "adaptive_avg_pool3d": [769, 1593], "avg_pool2d": [770, 1600], "avg_pool3d": [771, 1601], "celu": [772, 1444, 1606], "hardsigmoid": [778, 1482, 1636], "hardtanh": [780, 1484, 1638], "interpol": [781, 1643], "leaky_relu": [782, 1647], "max_pool1d": [784, 1657], "max_pool2d": [785, 1658], "threshold": [786, 1569, 1697], "upsampl": [787, 1579, 1703], "upsample_bilinear": [788, 1704], "upsample_nearest": [789, 1705], "dequantstub": 790, "quantstub": 791, "quantwrapp": 792, "add_quant_dequ": 793, "backendconfig": 794, "backendpatternconfig": 795, "dtypeconfig": 796, "dtypewithconstraint": 797, "observationtyp": 798, "convert": [799, 2101], "default_eval_fn": 800, "fakequant": [801, 2070], "fakequantizebas": 802, "fixedqparamsfakequant": 803, "fusedmovingavgobsfakequant": 804, "default_fake_qu": 805, "default_fused_act_fake_qu": 806, "default_fused_per_channel_wt_fake_qu": 807, "default_fused_wt_fake_qu": 808, "default_histogram_fake_qu": 809, "default_per_channel_weight_fake_qu": 810, "default_weight_fake_qu": 811, "disable_fake_qu": 812, "disable_observ": 813, "enable_fake_qu": 814, "enable_observ": 815, "fuse_modul": 816, "convertcustomconfig": 817, "fusecustomconfig": 818, "preparecustomconfig": 819, "standalonemoduleconfigentri": 820, "histogramobserv": 821, "minmaxobserv": 822, "movingaverageminmaxobserv": 823, "movingaverageperchannelminmaxobserv": 824, "noopobserv": 825, "observerbas": 826, "perchannelminmaxobserv": 827, "placeholderobserv": 828, "recordingobserv": 829, "default_debug_observ": 830, "default_dynamic_quant_observ": 831, "default_float_qparams_observ": 832, "default_histogram_observ": 833, "default_observ": 834, "default_per_channel_weight_observ": 835, "default_placeholder_observ": 836, "default_weight_observ": 837, "get_observer_state_dict": 838, "load_observer_state_dict": 839, "prepar": [840, 2070, 2073], "prepare_qat": 841, "propagate_qconfig": 842, "model_is_export": 843, "qconfig": [844, 2070, 2073], "default_activation_only_qconfig": 845, "default_debug_qconfig": 846, "default_dynamic_qconfig": 847, "default_per_channel_qconfig": 848, "default_qat_qconfig": 849, "default_qat_qconfig_v2": 850, "default_qconfig": 851, "default_weight_only_qconfig": 852, "float16_dynamic_qconfig": 853, "float16_static_qconfig": 854, "float_qparams_weight_only_qconfig": 855, "per_channel_dynamic_qconfig": 856, "qconfigmap": 857, "get_default_qat_qconfig_map": 858, "get_default_qconfig_map": 859, "quantiz": [860, 2036, 2055, 2065, 2070, 2071, 2072, 2073], "quantize_dynam": 861, "convert_fx": 862, "fuse_fx": 863, "prepare_fx": 864, "prepare_qat_fx": 865, "quantize_qat": 866, "swap_modul": 867, "arang": 868, "are_deterministic_algorithms_en": 876, "as_tensor": 882, "asarrai": 883, "atleast_1d": 889, "atleast_2d": 890, "atleast_3d": 891, "unpackeddualtensor": 897, "dual_level": 898, "forward_ad": [899, 900, 901, 902], "enter_dual_level": 899, "exit_dual_level": 900, "make_du": 901, "unpack_du": 902, "backwardcfunct": 903, "functionctx": [904, 905, 906, 907], "mark_dirti": 904, "mark_non_differenti": 905, "save_for_backward": 906, "set_materialize_grad": 907, "inplacefunct": 908, "nestediofunct": 909, "once_differenti": 910, "hvp": 912, "vhp": 915, "inference_mod": 918, "set_grad_en": 919, "set_multithreading_en": 920, "gradcheck": [921, 922, 923, 2052], "gradcheckerror": 921, "gradgradcheck": [923, 2052], "name": [925, 2015, 2033, 2034], "next_funct": 926, "register_prehook": 928, "increment_vers": 929, "enforceuniqu": 930, "kinetosteptrack": 931, "load_nvprof": 932, "parse_nvprof_trac": 933, "export_chrome_trac": 934, "key_averag": 935, "self_cpu_time_tot": 936, "total_averag": 937, "record_funct": 938, "interv": 939, "memrecordsacc": 941, "stringtabl": 942, "bartlett_window": 944, "blackman_window": 953, "block_diag": 954, "broadcast_shap": 956, "broadcast_tensor": 957, "bucket": 959, "can_cast": 960, "cartesian_prod": 961, "cat": 962, "cdist": 963, "chain_matmul": 965, "column_stack": 973, "combin": [974, 2048], "compiled_with_cxx11_abi": 976, "allow_in_graph": [977, 2103], "cudagraph_mark_step_begin": 980, "is_compil": 982, "is_dynamo_compil": 983, "list_backend": 984, "reset": 985, "concat": 987, "concaten": 988, "streamcontext": [999, 1014, 1399, 1983], "current_devic": [1000, 1026, 1400, 1984], "current_stream": [1001, 1027, 1401, 1985], "device_count": [1002, 1030, 1382, 1404, 1987], "is_avail": [1003, 1047, 1406, 1997], "set_devic": [1004, 1077, 2003], "cudagraph": [1008, 2096], "cudapluggablealloc": 1009, "externalstream": 1011, "outofmemoryerror": 1012, "caching_allocator_alloc": 1015, "caching_allocator_delet": 1016, "can_device_access_p": 1017, "change_current_alloc": 1018, "clock_rat": 1019, "comm": [1020, 1021, 1022, 1023, 1024], "broadcast": [1020, 2043], "broadcast_coalesc": 1021, "reduce_add": 1023, "current_blas_handl": 1025, "default_stream": [1028, 1402], "device_of": [1031, 1988], "empty_cach": [1032, 1384, 1989], "get_allocator_backend": 1033, "get_arch_list": 1034, "get_device_cap": [1035, 1990], "get_device_nam": [1036, 1991], "get_device_properti": [1037, 1992], "get_gencode_flag": 1038, "get_rng_stat": [1039, 1225, 1386, 1993], "get_rng_state_al": [1040, 1994], "get_sync_debug_mod": 1041, "graph_pool_handl": 1043, "init": [1044, 1405, 1995, 2040], "initial_se": [1045, 1248, 1996], "ipc_collect": 1046, "is_current_stream_captur": 1048, "is_initi": [1049, 1407, 1998], "_create_jit_fn": 1050, "_create_multi_output_jit_fn": 1051, "list_gpu_process": 1052, "make_graphed_cal": 1053, "manual_se": [1054, 1365, 1387, 1999], "manual_seed_al": [1055, 2000], "max_memory_alloc": 1056, "max_memory_cach": 1057, "max_memory_reserv": 1058, "mem_get_info": 1059, "memory_alloc": 1060, "memory_cach": 1061, "memory_reserv": 1062, "memory_snapshot": 1063, "memory_stat": 1064, "memory_summari": 1065, "memory_usag": 1066, "mark": 1067, "rang": [1068, 1842, 2015], "range_pop": 1069, "range_push": 1070, "power_draw": 1071, "reset_max_memory_alloc": 1072, "reset_max_memory_cach": 1073, "reset_peak_memory_stat": 1074, "seed": [1075, 1391, 1863, 2001], "seed_al": [1076, 2002], "set_per_process_memory_fract": [1078, 1392], "set_rng_stat": [1079, 1393, 1875, 2004], "set_rng_state_al": [1080, 2005], "set_stream": [1081, 1408, 2006], "set_sync_debug_mod": 1082, "temperatur": 1085, "cumulative_trapezoid": 1091, "dstack": 1107, "einsum": 1108, "empti": 1109, "empty_lik": 1110, "empty_strid": 1111, "enable_grad": 1112, "exp2": 1119, "ey": 1121, "fake_quantize_per_channel_affin": 1122, "fake_quantize_per_tensor_affin": 1123, "fft2": 1125, "fftfreq": 1126, "fftn": 1127, "fftshift": 1128, "hfft": 1129, "hfft2": 1130, "hfftn": 1131, "ifft": 1132, "ifft2": 1133, "ifftn": 1134, "ifftshift": 1135, "ihfft": 1136, "ihfft2": 1137, "ihfftn": 1138, "irfft": 1139, "irfft2": 1140, "irfftn": 1141, "rfft": 1142, "rfft2": 1143, "rfftfreq": 1144, "rfftn": 1145, "from_dlpack": 1159, "from_fil": 1160, "from_numpi": 1161, "frombuff": 1162, "full": [1163, 2060], "full_lik": 1164, "functional_cal": [1165, 1766], "grad_and_valu": 1168, "replace_all_batch_norm_modules_": 1174, "stack_module_st": 1175, "callmethodkei": 1178, "convertintkei": 1179, "dimconstraint": 1180, "dimdynam": [1181, 2098], "dividebykei": 1182, "equalityconstraint": 1183, "innertensorkei": 1184, "propagateunbackedsymint": 1185, "relaxedunspecconstraint": 1186, "shapeenv": 1187, "shapeenvset": 1188, "statefulsymboliccontext": 1189, "statelesssymboliccontext": 1190, "strictminmaxconstraint": 1191, "subclasssymboliccontext": 1192, "symboliccontext": 1193, "canonicalize_bool_expr": 1194, "check_consist": 1195, "compute_unbacked_bind": 1196, "constrain_rang": 1197, "constrain_unifi": 1198, "definitely_fals": 1199, "definitely_tru": 1200, "guard_size_oblivi": 1201, "has_free_symbol": 1202, "hint_int": 1203, "is_concrete_bool": 1204, "is_concrete_int": 1205, "lru_cach": 1206, "parallel_and": 1207, "parallel_or": 1208, "rebind_unback": 1209, "resolve_unbacked_bind": 1210, "statically_known_tru": 1211, "sym_eq": 1212, "get_default_devic": 1218, "get_default_dtyp": 1219, "get_deterministic_debug_mod": 1220, "get_device_modul": 1221, "get_float32_matmul_precis": 1222, "get_num_interop_thread": 1223, "get_num_thread": 1224, "hamming_window": 1230, "hann_window": 1231, "histogramdd": 1235, "hspmm": 1237, "hstack": 1238, "is_deterministic_algorithms_warn_only_en": 1253, "is_grad_en": 1255, "is_inference_mode_en": 1256, "is_nonzero": 1257, "is_storag": 1258, "is_tensor": 1259, "is_warn_always_en": 1260, "isin": 1263, "attribut": [1270, 2013, 2015, 2016, 2018, 2083], "scriptfunct": 1271, "scriptmodul": [1272, 2060], "annot": [1273, 2016], "enable_onednn_fus": 1274, "fork": 1275, "freez": 1276, "ignor": 1277, "isinst": 1279, "onednn_fusion_en": 1281, "optimize_for_infer": 1282, "save": [1283, 1858, 2011, 2042, 2054, 2060, 2070], "script_if_trac": 1285, "set_fusion_strategi": 1286, "strict_fus": 1287, "trace_modul": 1289, "unus": 1290, "wait": 1291, "kaiser_window": 1292, "kron": 1293, "cholesky_ex": 1303, "eig": 1308, "eigh": 1309, "eigval": 1310, "eigvalsh": 1311, "householder_product": 1312, "inv": 1313, "inv_ex": 1314, "ldl_factor": 1315, "ldl_factor_ex": 1316, "ldl_solv": 1317, "lstsq": 1318, "lu_factor": 1320, "lu_factor_ex": 1321, "matrix_norm": 1325, "matrix_rank": 1327, "multi_dot": 1328, "pinv": 1330, "solv": 1333, "solve_ex": 1334, "solve_triangular": 1335, "svdval": 1337, "tensorinv": 1338, "tensorsolv": 1339, "vander": [1340, 1970], "vecdot": 1341, "vector_norm": 1342, "linspac": 1343, "lobpcg": 1345, "logspac": 1359, "lu_unpack": 1364, "meshgrid": 1374, "current_allocated_memori": 1381, "driver_allocated_memori": 1383, "stop": 1390, "mtia": [1396, 1400, 1401, 1402, 1404, 1405, 1406, 1407, 1408, 1409, 1410, 2031], "deferredmtiacallerror": 1396, "adaptiveavgpool1d": 1427, "adaptiveavgpool2d": 1428, "adaptiveavgpool3d": 1429, "adaptivelogsoftmaxwithloss": 1430, "adaptivemaxpool1d": 1431, "adaptivemaxpool2d": 1432, "adaptivemaxpool3d": 1433, "alphadropout": 1434, "avgpool1d": 1435, "avgpool2d": 1436, "avgpool3d": 1437, "bceloss": 1438, "bcewithlogitsloss": 1439, "batchnorm1d": 1440, "bilinear": [1443, 1603], "ctcloss": 1445, "channelshuffl": 1446, "circularpad1d": 1447, "circularpad2d": 1448, "circularpad3d": 1449, "constantpad1d": 1450, "constantpad2d": 1451, "constantpad3d": 1452, "cosineembeddingloss": 1459, "cosinesimilar": 1460, "crossentropyloss": 1461, "dataparallel": [1462, 2036, 2039, 2041, 2045], "dropout": [1463, 1617, 2036, 2039], "dropout1d": [1464, 1618], "dropout2d": [1465, 1619], "dropout3d": [1466, 1620], "featurealphadropout": 1470, "fold": [1472, 1626], "fractionalmaxpool2d": 1473, "fractionalmaxpool3d": 1474, "gelu": [1475, 1630], "glu": [1476, 1631], "gaussiannllloss": 1479, "hingeembeddingloss": 1485, "huberloss": 1486, "ident": [1487, 1740, 1748, 2016, 2050], "kldivloss": 1491, "l1loss": 1492, "lppool1d": 1493, "lppool2d": 1494, "lppool3d": 1495, "lazybatchnorm1d": 1499, "lazybatchnorm2d": 1500, "lazybatchnorm3d": 1501, "lazyconv1d": 1502, "lazyconv2d": 1503, "lazyconv3d": 1504, "lazyconvtranspose1d": 1505, "lazyconvtranspose2d": 1506, "lazyconvtranspose3d": 1507, "lazyinstancenorm1d": 1508, "lazyinstancenorm2d": 1509, "lazyinstancenorm3d": 1510, "lazylinear": 1511, "localresponsenorm": 1514, "logsigmoid": [1515, 1652], "logsoftmax": 1516, "mseloss": 1517, "marginrankingloss": 1518, "maxpool1d": 1519, "maxpool2d": 1520, "maxpool3d": 1521, "maxunpool1d": 1522, "maxunpool2d": 1523, "maxunpool3d": 1524, "mish": [1525, 1663], "moduledict": [1527, 2016], "modulelist": [1528, 2015, 2016], "multilabelmarginloss": 1529, "multilabelsoftmarginloss": 1530, "multimarginloss": 1531, "nllloss": 1533, "prelu": [1534, 1677], "pairwisedist": 1535, "parameterdict": 1536, "parameterlist": 1537, "pixelshuffl": 1538, "pixelunshuffl": 1539, "poissonnllloss": 1540, "rmsnorm": [1541, 1715], "rnn": [1542, 1758, 1759, 1760, 1761, 1762, 1763, 2059], "rnnbase": 1543, "rrelu": [1545, 1682], "relu": [1546, 1678], "reflectionpad1d": 1548, "reflectionpad2d": 1549, "reflectionpad3d": 1550, "replicationpad1d": 1551, "replicationpad2d": 1552, "replicationpad3d": 1553, "selu": [1554, 1685], "sequenti": 1555, "silu": [1556, 1687], "smoothl1loss": 1558, "softmarginloss": 1559, "softmax2d": 1561, "softmin": [1562, 1691], "softplu": [1563, 1692], "softshrink": [1564, 1693], "softsign": [1565, 1694], "syncbatchnorm": 1566, "tanhshrink": [1568, 1696], "transformerdecod": 1571, "transformerdecoderlay": 1572, "transformerencod": 1573, "transformerencoderlay": 1574, "tripletmarginloss": 1575, "tripletmarginwithdistanceloss": 1576, "upsamplingbilinear2d": 1580, "upsamplingnearest2d": 1581, "zeropad1d": 1582, "zeropad2d": 1583, "zeropad3d": 1584, "sdpbackend": 1585, "attent": [1586, 1588, 1589, 1590, 2037, 2038, 2039], "bia": [1586, 1588, 1589, 2038], "causalbia": [1586, 2038], "causalvari": 1587, "causal_lower_right": 1588, "causal_upper_left": 1589, "sdpa_kernel": 1590, "adaptive_avg_pool1d": 1591, "adaptive_max_pool1d": 1594, "adaptive_max_pool2d": 1595, "adaptive_max_pool3d": 1596, "affine_grid": 1597, "alpha_dropout": 1598, "avg_pool1d": 1599, "batch_norm": 1602, "conv_transpose1d": 1610, "conv_transpose2d": 1611, "conv_transpose3d": 1612, "cosine_embedding_loss": 1613, "cosine_similar": 1614, "cross_entropi": 1615, "ctc_loss": 1616, "elu_": 1622, "embedding_bag": 1624, "feature_alpha_dropout": 1625, "fractional_max_pool2d": 1627, "fractional_max_pool3d": 1628, "gaussian_nll_loss": 1629, "grid_sampl": 1632, "group_norm": 1633, "gumbel_softmax": 1634, "hardtanh_": 1639, "hinge_embedding_loss": 1640, "huber_loss": 1641, "instance_norm": 1642, "kl_div": 1644, "l1_loss": 1645, "layer_norm": 1646, "leaky_relu_": 1648, "local_response_norm": 1650, "log_softmax": [1651, 1903], "lp_pool1d": 1653, "lp_pool2d": 1654, "lp_pool3d": 1655, "margin_ranking_loss": 1656, "max_pool3d": 1659, "max_unpool1d": 1660, "max_unpool2d": 1661, "max_unpool3d": 1662, "mse_loss": 1664, "multi_margin_loss": 1665, "multilabel_margin_loss": 1666, "multilabel_soft_margin_loss": 1667, "nll_loss": 1668, "one_hot": 1670, "pad": [1671, 2036], "pairwise_dist": 1672, "pdist": 1673, "pixel_shuffl": 1674, "pixel_unshuffl": 1675, "poisson_nll_loss": 1676, "relu_": 1680, "rms_norm": 1681, "rrelu_": 1683, "scaled_dot_product_attent": 1684, "smooth_l1_loss": 1688, "soft_margin_loss": 1689, "threshold_": 1698, "data_parallel": [1699, 2039], "triplet_margin_loss": 1700, "triplet_margin_with_distance_loss": 1701, "lazymodulemixin": 1706, "register_module_backward_hook": 1707, "register_module_buffer_registration_hook": 1708, "register_module_forward_hook": 1709, "register_module_forward_pre_hook": 1710, "register_module_full_backward_hook": 1711, "register_module_full_backward_pre_hook": 1712, "register_module_module_registration_hook": 1713, "register_module_parameter_registration_hook": 1714, "distributeddataparallel": [1716, 2041, 2045, 2047], "uninitializedbuff": 1718, "uninitializedparamet": 1719, "clip_grad_norm": 1720, "clip_grad_norm_": 1721, "clip_grad_value_": 1722, "convert_conv2d_weight_memory_format": 1723, "convert_conv3d_weight_memory_format": 1724, "fuse_conv_bn_ev": 1725, "fuse_conv_bn_weight": 1726, "fuse_linear_bn_ev": 1727, "fuse_linear_bn_weight": 1728, "parameters_to_vector": 1729, "parametr": [1730, 1731, 1732, 1734, 1735, 1736, 1737, 2055], "orthogon": 1730, "spectral_norm": [1731, 1765], "weight_norm": [1732, 1768], "parametrizationlist": 1733, "cach": [1734, 2011, 2045, 2053, 2111], "is_parametr": 1735, "register_parametr": 1736, "remove_parametr": 1737, "basepruningmethod": 1738, "customfrommask": 1739, "l1unstructur": 1741, "lnstructur": 1742, "pruningcontain": 1743, "randomstructur": 1744, "randomunstructur": 1745, "prune": [1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 2055], "custom_from_mask": 1746, "global_unstructur": 1747, "is_prun": 1749, "l1_unstructur": 1750, "ln_structur": 1751, "random_structur": 1752, "random_unstructur": 1753, "remove_spectral_norm": 1755, "remove_weight_norm": 1756, "packedsequ": 1757, "pack_padded_sequ": 1758, "pack_sequ": 1759, "pad_packed_sequ": 1760, "pad_sequ": 1761, "unpack_sequ": 1762, "unpad_sequ": 1763, "skip_init": 1764, "stateless": 1766, "vector_to_paramet": 1767, "no_grad": 1769, "ones": [1775, 2102], "ones_lik": 1776, "jitscalartyp": 1777, "graphinfo": 1778, "verificationopt": 1779, "asgd": 1780, "adadelta": 1781, "adagrad": 1782, "adam": 1783, "adamw": 1784, "adamax": 1785, "lbfg": 1786, "nadam": 1787, "add_param_group": 1788, "load_state_dict": 1789, "state_dict": [1790, 2107], "zero_grad": 1792, "radam": 1793, "rmsprop": 1794, "rprop": 1795, "sgd": 1796, "sparseadam": 1797, "chainedschedul": 1798, "constantlr": 1799, "cosineannealinglr": 1800, "cosineannealingwarmrestart": 1801, "cycliclr": 1802, "exponentiallr": 1803, "lambdalr": 1804, "linearlr": 1805, "multisteplr": 1806, "multiplicativelr": 1807, "onecyclelr": 1808, "polynomiallr": 1809, "reducelronplateau": 1810, "sequentiallr": 1811, "steplr": 1812, "pca_lowrank": 1816, "polar": 1820, "promote_typ": 1825, "quantize_per_channel": 1828, "quantize_per_tensor": 1829, "quantized_batch_norm": 1830, "quantized_max_pool1d": 1831, "quantized_max_pool2d": 1832, "sobolengin": 1833, "rand": 1835, "rand_lik": 1836, "randint": 1837, "randint_lik": 1838, "randn": 1839, "randn_lik": 1840, "randperm": 1841, "result_typ": 1852, "row_stack": 1856, "searchsort": 1862, "set_default_devic": 1866, "set_default_dtyp": 1867, "set_default_tensor_typ": 1868, "set_deterministic_debug_mod": 1869, "set_float32_matmul_precis": 1870, "set_flush_denorm": 1871, "set_num_interop_thread": 1872, "set_num_thread": 1873, "set_printopt": 1874, "set_warn_alwai": 1876, "signal": [1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 2078], "bartlett": 1880, "blackman": 1881, "cosin": 1882, "gaussian": 1884, "general_cosin": 1885, "general_ham": 1886, "ham": 1887, "hann": 1888, "kaiser": 1889, "nuttal": 1890, "as_sparse_gradcheck": 1901, "check_sparse_tensor_invari": 1902, "sampled_addmm": 1905, "spdiag": 1907, "sparse_bsc_tensor": 1909, "sparse_bsr_tensor": 1910, "sparse_compressed_tensor": 1911, "sparse_coo_tensor": 1912, "sparse_csc_tensor": 1913, "sparse_csr_tensor": 1914, "std_mean": 1922, "svd_lowrank": 1928, "sym_float": 1931, "sym_int": 1932, "sym_it": 1933, "sym_max": 1934, "sym_min": 1935, "sym_not": 1936, "tensordot": 1944, "trapezoid": 1949, "trapz": 1950, "tril_indic": 1953, "triu_indic": 1955, "unravel_index": 1962, "use_deterministic_algorithm": 1964, "generate_methods_for_privateuse1_backend": 1965, "get_cpp_backtrac": 1966, "rename_privateuse1_backend": 1967, "set_modul": 1968, "swap_tensor": 1969, "var_mean": 1972, "view_as_complex": 1974, "view_as_r": 1975, "vstack": 1978, "xpu": [1984, 1985, 1987, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2118], "zero": 2009, "zeros_lik": 2010, "hub": 2011, "publish": 2011, "entrypoint": 2011, "run": [2011, 2108], "my": [2011, 2042, 2050, 2068, 2102, 2108], "download": 2011, "logic": [2011, 2015], "known": [2011, 2013], "bind": 2012, "tabl": [2012, 2060], "built": [2013, 2014, 2016, 2065], "comparison": [2013, 2015, 2016, 2089, 2096], "inspect": [2013, 2063, 2100], "warn": 2013, "appendix": [2013, 2016], "recurs": 2013, "constant": [2013, 2015], "fusion": 2013, "math": [2014, 2089], "construct": [2015, 2016, 2018, 2035, 2067, 2080], "refin": [2015, 2016], "enum": [2015, 2016], "tupl": [2015, 2016], "liter": [2015, 2016], "list": [2015, 2016, 2065], "dict": 2015, "arithmet": [2015, 2016], "subscript": [2015, 2016], "slice": [2015, 2016, 2058, 2089], "ternari": [2015, 2016], "cast": 2015, "statement": [2015, 2016], "assign": [2015, 2016], "If": 2015, "while": [2015, 2016], "loop": 2015, "For": [2015, 2046], "continu": [2015, 2016], "return": [2015, 2016, 2050, 2077], "resolut": [2015, 2016], "lookup": 2015, "defin": [2015, 2048, 2049], "terminologi": 2016, "instanc": 2016, "when": [2016, 2042, 2046, 2048, 2065, 2070, 2080, 2102], "signatur": 2016, "expr": 2016, "convers": [2016, 2035], "atom": 2016, "identifi": [2016, 2102, 2111], "parenthes": 2016, "form": 2016, "displai": 2016, "primari": 2016, "power": 2016, "unari": [2016, 2023, 2080], "bitwis": 2016, "binari": [2016, 2023], "shift": 2016, "boolean": 2016, "condit": 2016, "augment": 2016, "rais": 2016, "del": 2016, "compound": 2016, "els": 2016, "getattr": 2016, "hasattr": 2016, "zip": [2016, 2068], "enumer": 2016, "rule": [2016, 2034, 2049, 2063], "remot": [2016, 2077], "procedur": 2016, "program": 2016, "coverag": [2017, 2033, 2048], "properti": [2018, 2021], "correctli": 2018, "bound": 2018, "schema": 2018, "between": [2018, 2068, 2102], "low": 2020, "matrix": [2021, 2070], "decomposit": 2021, "solver": 2021, "misc": 2021, "motiv": [2023, 2098, 2101], "reduct": [2023, 2045, 2058, 2089], "idiom": 2024, "miscellan": 2025, "mobile_optim": 2026, "model_zoo": 2027, "module_track": 2028, "strategi": [2032, 2067], "descriptor": 2032, "file_descriptor": 2032, "file_system": 2032, "keep": [2033, 2068], "dimens": [2033, 2034], "unifi": 2033, "contract": 2033, "awai": 2033, "factori": 2033, "variant": 2033, "semant": [2034, 2043, 2045, 2053, 2060], "infer": [2034, 2042, 2044, 2060, 2093], "explicit": 2034, "align": 2034, "subsystem": 2034, "constructor": 2035, "convolut": [2036, 2039, 2058, 2059], "layer": 2036, "pool": [2036, 2039], "activ": [2036, 2039, 2113], "weight": [2036, 2067], "nonlinear": 2036, "recurr": [2036, 2050], "distanc": [2036, 2039], "loss": [2036, 2039, 2041], "vision": [2036, 2039], "shuffl": 2036, "lazi": 2036, "alias": 2036, "submodul": 2037, "typic": 2041, "unscal": 2041, "accumul": 2041, "penalti": 2041, "one": 2041, "per": [2041, 2067], "need": [2041, 2102], "particular": [2041, 2042], "dtype": [2041, 2060, 2073, 2083], "encod": 2042, "histori": [2042, 2113], "set": [2042, 2065], "No": 2042, "evalu": [2042, 2052, 2099], "multithread": 2042, "concurr": 2042, "determin": [2042, 2059], "retain": 2042, "thread": [2042, 2044, 2088], "safeti": 2042, "wirting": 2042, "calculu": 2042, "pictur": 2042, "conjug": 2042, "formula": 2042, "domain": 2042, "regist": [2042, 2097], "whether": [2042, 2068], "fire": 2042, "differ": [2042, 2065, 2102], "modifi": 2042, "compat": 2043, "runtim": [2044, 2050, 2111], "tensorfloat": [2045, 2053, 2058], "32": [2045, 2053, 2058, 2061], "tf32": [2045, 2053, 2058], "amper": [2045, 2058], "later": [2045, 2058, 2068], "reduc": [2045, 2058], "fp16": [2045, 2058], "gemm": [2045, 2058], "bf16": [2045, 2058], "bc": 2045, "pytorch_cuda_alloc_conf": 2045, "alloc": [2045, 2050, 2113], "cubla": 2045, "workspac": 2045, "cufft": 2045, "plan": [2045, 2053], "just": 2045, "time": [2045, 2093, 2105, 2109, 2111], "practic": [2045, 2057, 2070, 2095], "agnost": 2045, "buffer": [2045, 2051, 2057], "instead": 2045, "whole": [2045, 2102], "captur": 2045, "partial": 2045, "9": 2045, "6": 2045, "across": [2045, 2060], "land": 2046, "page": 2046, "tl": 2046, "dr": 2046, "do": [2046, 2068, 2100, 2102], "integr": [2046, 2096], "detail": [2046, 2101], "should": 2046, "intern": [2047, 2065, 2068, 2098, 2100], "processgroup": 2047, "ddpoptim": 2047, "setup_context": 2048, "like": [2048, 2068], "subclass": [2048, 2101], "wrapper": 2048, "__torch_function__": 2048, "overrid": [2048, 2112], "nativ": [2048, 2070, 2072], "anoth": 2049, "specifi": 2049, "gotcha": 2049, "staticmethod": 2049, "isn": 2050, "freed": 2050, "properli": 2050, "loader": 2050, "doesn": 2050, "fsdp": 2051, "prefetch": 2051, "nuanc": 2051, "payload": 2051, "notat": 2052, "background": [2052, 2076, 2077, 2096], "inform": [2052, 2075, 2110], "analyt": 2052, "u": 2052, "reus": [2053, 2057], "hipfft": 2053, "rocfft": 2053, "larg": 2054, "fleet": 2054, "wide": 2054, "attach": 2054, "consider": 2054, "block": 2055, "neural": 2055, "tip": [2057, 2071], "fight": 2057, "deadlock": 2057, "through": 2057, "queue": 2057, "e": 2057, "g": 2057, "hogwild": 2057, "oversubscript": 2057, "accuraci": [2058, 2070, 2071, 2111], "extrem": 2058, "finit": 2058, "instinct": 2058, "mi200": 2058, "reproduc": 2059, "nondeterminist": 2059, "algorithm": [2059, 2067, 2076], "fill": 2059, "uniniti": 2059, "content": [2060, 2068], "preserv": 2060, "format": [2060, 2068], "them": [2060, 2068], "version": 2060, "integ": 2060, "divis": 2060, "alwai": [2060, 2099], "includ": [2061, 2068], "compon": 2061, "speed": [2061, 2102], "One": [2061, 2110], "instal": 2061, "cffi": 2061, "cpp": 2061, "found": 2061, "win": 2061, "channel": 2061, "without": 2061, "claus": 2061, "protect": 2061, "broken": 2061, "pipe": 2061, "driver": 2061, "shut": 2061, "down": 2061, "ipc": 2061, "base": [2062, 2063, 2065, 2067, 2110], "gui": 2063, "diagnos": [2063, 2111], "sarif": 2063, "diagnost": 2063, "alexnet": 2065, "v": 2065, "index": [2065, 2089], "aten": [2065, 2106, 2110], "inlin": 2065, "discov": 2065, "unconvert": 2065, "onc": 2065, "adjust": 2067, "learn": 2067, "rate": 2067, "averag": 2067, "swa": 2067, "ema": 2067, "care": 2067, "put": 2067, "togeth": 2067, "see": [2068, 2102], "insid": [2068, 2102], "treat": 2068, "archiv": 2068, "file_structur": 2068, "given": 2068, "wa": 2068, "resourc": [2068, 2101], "distinguish": 2068, "explan": 2068, "analyz": 2068, "extern": 2068, "mock": 2068, "refactor": 2068, "sharp": 2068, "global": 2068, "isol": 2068, "each": [2068, 2101], "mangl": 2068, "intel": 2069, "instrument": 2069, "technologi": 2069, "eager": 2070, "awar": 2070, "mainten": 2070, "engin": 2070, "observ": [2070, 2073], "hardwar": 2070, "configur": [2070, 2072], "insensit": 2071, "int8": 2071, "sensit": 2071, "ao": [2073, 2090, 2091], "top": 2073, "quantize_fx": 2073, "qconfig_map": 2073, "backend_config": 2073, "custom_config": 2073, "pt2e": 2073, "0": [2073, 2099, 2107, 2108, 2111], "export_util": 2073, "relat": [2073, 2101], "fake_quant": 2073, "intrins": 2073, "qat": 2073, "scheme": 2073, "rpc": 2075, "tensorpip": 2075, "rref": [2075, 2077], "remotemodul": 2075, "record": 2076, "dure": 2076, "smart": 2076, "end": 2076, "protocol": 2077, "lifetim": 2077, "reason": 2077, "scenario": 2077, "owner": 2077, "argument": 2077, "sparsiti": 2080, "semi": 2080, "acceler": 2080, "coo": 2080, "hybrid": 2080, "uncoalesc": 2080, "csr": 2080, "csc": 2080, "bsr": 2080, "bsc": 2080, "memory_format": 2083, "tensorboard": 2085, "creation": 2089, "sampl": 2089, "quasi": 2089, "pointwis": 2089, "spectral": 2089, "bla": 2089, "lapack": 2089, "foreach": 2089, "path": 2089, "n": [2090, 2091], "_numeric_suit": 2090, "_numeric_suite_fx": 2091, "howto": 2092, "vendor": 2092, "aotinductor": 2093, "ahead": 2093, "Of": 2093, "ed": 2093, "x86": 2095, "tree": 2096, "callabl": 2096, "previou": 2096, "after": 2097, "aotautograd": 2097, "speedi": 2097, "abridg": 2098, "public": 2098, "guard": [2098, 2099, 2100], "overal": [2098, 2101], "architectur": [2098, 2101], "unback": 2098, "dynamo": [2099, 2100], "gentl": 2099, "pep": 2099, "523": 2099, "frame": 2099, "cpython": 2099, "sound": 2099, "duck": 2099, "complet": 2099, "conclus": 2099, "footnot": 2099, "artifact": 2100, "bit": 2101, "individu": [2101, 2105], "characterist": 2101, "interact": 2101, "you": 2102, "still": 2102, "crash": 2102, "slow": 2102, "recompil": [2102, 2111], "am": 2102, "speedup": 2102, "caus": [2102, 2111], "didn": 2102, "incorrect": 2102, "result": 2102, "oom": 2102, "besid": 2102, "via": 2102, "under": 2102, "some": 2102, "did": 2102, "fine": [2102, 2103], "grain": [2102, 2103], "_dynamo": [2102, 2103], "disallow_in_graph": [2102, 2103], "_dynamo_skip": 2102, "pretrain": 2104, "next": 2104, "torchinductor": [2105, 2108, 2111], "relev": 2105, "breakdown": 2105, "prim": 2106, "nnmodul": 2107, "__call__": 2107, "dashboard": 2108, "measur": 2108, "pr": 2108, "affect": 2108, "befor": 2108, "merg": 2108, "understand": [2109, 2113], "around": 2109, "region": 2109, "compiledfunct": 2109, "overhead": 2109, "x": 2110, "none": 2110, "partition": 2110, "matcher": 2110, "capabl": 2110, "troubleshoot": 2111, "titl": 2111, "minifi": 2111, "torch_compile_debug": 2111, "excess": 2111, "cold": 2111, "corrupt": 2111, "snapshot": 2113, "visual": 2113, "timelin": 2113, "processgroupnccl": 2115, "finfo": 2116, "iinfo": 2116}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx.ext.todo": 2, "sphinx.ext.viewcode": 1, "sphinx": 57}, "alltitles": {"torch.Tensor.flatten": [[265, "torch-tensor-flatten"]], "torch.Tensor.geqrf": [[289, "torch-tensor-geqrf"]], "torch.Tensor.fliplr": [[267, "torch-tensor-fliplr"]], "torch.Tensor.gcd_": [[285, "torch-tensor-gcd"]], "torch.Tensor.fmax": [[276, "torch-tensor-fmax"]], "torch.Tensor.floor_divide_": [[275, "torch-tensor-floor-divide"]], "torch.Tensor.fix_": [[264, "torch-tensor-fix"]], "torch.Tensor.half": [[299, "torch-tensor-half"]], "torch.Tensor.greater_": [[294, "torch-tensor-greater"]], "torch.Tensor.gt": [[297, "torch-tensor-gt"]], "torch.Tensor.float": [[269, "torch-tensor-float"]], "torch.Tensor.frexp": [[282, "torch-tensor-frexp"]], "torch.Tensor.exponential_": [[260, "torch-tensor-exponential"]], "torch.Tensor.gather": [[283, "torch-tensor-gather"]], "torch.Tensor.fix": [[263, "torch-tensor-fix"]], "torch.Tensor.gt_": [[298, "torch-tensor-gt"]], "torch.Tensor.fill_": [[261, "torch-tensor-fill"]], "torch.Tensor.frac": [[280, "torch-tensor-frac"]], "torch.Tensor.fmod": [[278, "torch-tensor-fmod"]], "torch.Tensor.greater_equal_": [[296, "torch-tensor-greater-equal"]], "torch.Tensor.float_power_": [[271, "torch-tensor-float-power"]], "torch.Tensor.expm1": [[258, "torch-tensor-expm1"]], "torch.Tensor.floor": [[272, "torch-tensor-floor"]], "torch.Tensor.hsplit": [[304, "torch-tensor-hsplit"]], "torch.Tensor.expand": [[256, "torch-tensor-expand"]], "torch.Tensor.frac_": [[281, "torch-tensor-frac"]], "torch.Tensor.fmin": [[277, "torch-tensor-fmin"]], "torch.Tensor.greater": [[293, "torch-tensor-greater"]], "torch.Tensor.expand_as": [[257, "torch-tensor-expand-as"]], "torch.Tensor.ge": [[286, "torch-tensor-ge"]], "torch.Tensor.get_device": [[291, "torch-tensor-get-device"]], "torch.Tensor.grad": [[292, "torch-tensor-grad"]], "torch.Tensor.histogram": [[303, "torch-tensor-histogram"]], "torch.Tensor.gcd": [[284, "torch-tensor-gcd"]], "torch.Tensor.float_power": [[270, "torch-tensor-float-power"]], "torch.Tensor.flipud": [[268, "torch-tensor-flipud"]], "torch.Tensor.greater_equal": [[295, "torch-tensor-greater-equal"]], "torch.Tensor.hypot": [[305, "torch-tensor-hypot"]], "torch.Tensor.hardshrink": [[300, "torch-tensor-hardshrink"]], "torch.Tensor.exp_": [[255, "torch-tensor-exp"]], "torch.Tensor.fmod_": [[279, "torch-tensor-fmod"]], "torch.Tensor.histc": [[302, "torch-tensor-histc"]], "torch.Tensor.ger": [[290, "torch-tensor-ger"]], "torch.Tensor.heaviside": [[301, "torch-tensor-heaviside"]], "torch.Tensor.floor_divide": [[274, "torch-tensor-floor-divide"]], "torch.Tensor.fill_diagonal_": [[262, "torch-tensor-fill-diagonal"]], "torch.Tensor.expm1_": [[259, "torch-tensor-expm1"]], "torch.Tensor.floor_": [[273, "torch-tensor-floor"]], "torch.Tensor.flip": [[266, "torch-tensor-flip"]], "torch.Tensor.geometric_": [[288, "torch-tensor-geometric"]], "torch.Tensor.ge_": [[287, "torch-tensor-ge"]], "torch.Tensor.clamp": [[187, "torch-tensor-clamp"]], "torch.Tensor.bmm": [[171, "torch-tensor-bmm"]], "torch.Tensor.copysign": [[199, "torch-tensor-copysign"]], "torch.Tensor.contiguous": [[197, "torch-tensor-contiguous"]], "torch.Tensor.bitwise_left_shift": [[161, "torch-tensor-bitwise-left-shift"]], "torch.Tensor.bitwise_and": [[159, "torch-tensor-bitwise-and"]], "torch.Tensor.chunk": [[186, "torch-tensor-chunk"]], "torch.Tensor.bitwise_xor": [[169, "torch-tensor-bitwise-xor"]], "torch.Tensor.cfloat": [[180, "torch-tensor-cfloat"]], "torch.Tensor.baddbmm_": [[154, "torch-tensor-baddbmm"]], "torch.Tensor.broadcast_to": [[173, "torch-tensor-broadcast-to"]], "torch.Tensor.coalesce": [[192, "torch-tensor-coalesce"]], "torch.Tensor.baddbmm": [[153, "torch-tensor-baddbmm"]], "torch.Tensor.bitwise_xor_": [[170, "torch-tensor-bitwise-xor"]], "torch.Tensor.conj_physical": [[195, "torch-tensor-conj-physical"]], "torch.Tensor.bernoulli": [[155, "torch-tensor-bernoulli"]], "torch.Tensor.corrcoef": [[201, "torch-tensor-corrcoef"]], "torch.Tensor.cos_": [[203, "torch-tensor-cos"]], "torch.Tensor.bfloat16": [[157, "torch-tensor-bfloat16"]], "torch.Tensor.clamp_": [[188, "torch-tensor-clamp"]], "torch.Tensor.ccol_indices": [[176, "torch-tensor-ccol-indices"]], "torch.Tensor.copysign_": [[200, "torch-tensor-copysign"]], "torch.Tensor.bitwise_not": [[163, "torch-tensor-bitwise-not"]], "torch.Tensor.cholesky": [[183, "torch-tensor-cholesky"]], "torch.Tensor.bitwise_left_shift_": [[162, "torch-tensor-bitwise-left-shift"]], "torch.Tensor.bitwise_or": [[165, "torch-tensor-bitwise-or"]], "torch.Tensor.bitwise_and_": [[160, "torch-tensor-bitwise-and"]], "torch.Tensor.col_indices": [[193, "torch-tensor-col-indices"]], "torch.Tensor.conj_physical_": [[196, "torch-tensor-conj-physical"]], "torch.Tensor.conj": [[194, "torch-tensor-conj"]], "torch.Tensor.cdouble": [[177, "torch-tensor-cdouble"]], "torch.Tensor.cholesky_solve": [[185, "torch-tensor-cholesky-solve"]], "torch.Tensor.bitwise_right_shift": [[167, "torch-tensor-bitwise-right-shift"]], "torch.Tensor.byte": [[174, "torch-tensor-byte"]], "torch.Tensor.bitwise_right_shift_": [[168, "torch-tensor-bitwise-right-shift"]], "torch.Tensor.bitwise_not_": [[164, "torch-tensor-bitwise-not"]], "torch.Tensor.cos": [[202, "torch-tensor-cos"]], "torch.Tensor.clip": [[189, "torch-tensor-clip"]], "torch.Tensor.cauchy_": [[175, "torch-tensor-cauchy"]], "torch.Tensor.bitwise_or_": [[166, "torch-tensor-bitwise-or"]], "torch.Tensor.ceil_": [[179, "torch-tensor-ceil"]], "torch.Tensor.chalf": [[181, "torch-tensor-chalf"]], "torch.Tensor.clone": [[191, "torch-tensor-clone"]], "torch.Tensor.ceil": [[178, "torch-tensor-ceil"]], "torch.Tensor.bincount": [[158, "torch-tensor-bincount"]], "torch.Tensor.clip_": [[190, "torch-tensor-clip"]], "torch.Tensor.bernoulli_": [[156, "torch-tensor-bernoulli"]], "torch.Tensor.char": [[182, "torch-tensor-char"]], "torch.Tensor.copy_": [[198, "torch-tensor-copy"]], "torch.Tensor.bool": [[172, "torch-tensor-bool"]], "torch.Tensor.cholesky_inverse": [[184, "torch-tensor-cholesky-inverse"]], "torch.Tensor.arcsin_": [[127, "torch-tensor-arcsin"]], "torch.Tensor.arctan": [[130, "torch-tensor-arctan"]], "torch.Tensor.addmv_": [[110, "torch-tensor-addmv"]], "torch.Tensor.angle": [[119, "torch-tensor-angle"]], "torch.Tensor.atanh": [[150, "torch-tensor-atanh"]], "torch.Tensor.amax": [[116, "torch-tensor-amax"]], "torch.Tensor.addcdiv": [[103, "torch-tensor-addcdiv"]], "torch.Tensor.addcdiv_": [[104, "torch-tensor-addcdiv"]], "torch.Tensor.arccos_": [[123, "torch-tensor-arccos"]], "torch.Tensor.arctan2": [[131, "torch-tensor-arctan2"]], "torch.Tensor.argsort": [[138, "torch-tensor-argsort"]], "torch.Tensor.asin_": [[143, "torch-tensor-asin"]], "torch.Tensor.aminmax": [[118, "torch-tensor-aminmax"]], "torch.Tensor.atanh_": [[151, "torch-tensor-atanh"]], "torch.Tensor.asinh_": [[145, "torch-tensor-asinh"]], "torch.Tensor.asinh": [[144, "torch-tensor-asinh"]], "torch.Tensor.addmm": [[107, "torch-tensor-addmm"]], "torch.Tensor.apply_": [[121, "torch-tensor-apply"]], "torch.Tensor.arcsinh_": [[129, "torch-tensor-arcsinh"]], "torch.Tensor.as_subclass": [[141, "torch-tensor-as-subclass"]], "torch.Tensor.arccosh": [[124, "torch-tensor-arccosh"]], "torch.Tensor.arcsin": [[126, "torch-tensor-arcsin"]], "torch.Tensor.argmax": [[136, "torch-tensor-argmax"]], "torch.Tensor.allclose": [[115, "torch-tensor-allclose"]], "torch.Tensor.arccosh_": [[125, "torch-tensor-arccosh"]], "torch.Tensor.arctan2_": [[132, "torch-tensor-arctan2"]], "torch.Tensor.argwhere": [[139, "torch-tensor-argwhere"]], "torch.Tensor.addmm_": [[108, "torch-tensor-addmm"]], "torch.Tensor.addcmul_": [[106, "torch-tensor-addcmul"]], "torch.Tensor.addbmm_": [[102, "torch-tensor-addbmm"]], "torch.Tensor.as_strided": [[140, "torch-tensor-as-strided"]], "torch.Tensor.addr": [[111, "torch-tensor-addr"]], "torch.Tensor.atan2": [[147, "torch-tensor-atan2"]], "torch.Tensor.arcsinh": [[128, "torch-tensor-arcsinh"]], "torch.Tensor.adjoint": [[113, "torch-tensor-adjoint"]], "torch.Tensor.amin": [[117, "torch-tensor-amin"]], "torch.Tensor.atan_": [[149, "torch-tensor-atan"]], "torch.Tensor.arccos": [[122, "torch-tensor-arccos"]], "torch.Tensor.arctan_": [[133, "torch-tensor-arctan"]], "torch.Tensor.argmin": [[137, "torch-tensor-argmin"]], "torch.Tensor.asin": [[142, "torch-tensor-asin"]], "torch.Tensor.addmv": [[109, "torch-tensor-addmv"]], "torch.Tensor.atan": [[146, "torch-tensor-atan"]], "torch.Tensor.atan2_": [[148, "torch-tensor-atan2"]], "torch.Tensor.addr_": [[112, "torch-tensor-addr"]], "torch.Tensor.any": [[120, "torch-tensor-any"]], "torch.Tensor.arctanh": [[134, "torch-tensor-arctanh"]], "torch.Tensor.backward": [[152, "torch-tensor-backward"]], "torch.Tensor.addcmul": [[105, "torch-tensor-addcmul"]], "torch.Tensor.all": [[114, "torch-tensor-all"]], "torch.Tensor.arctanh_": [[135, "torch-tensor-arctanh"]], "torch.Tensor.div": [[237, "torch-tensor-div"]], "torch.Tensor.cummax": [[212, "torch-tensor-cummax"]], "torch.Tensor.digamma_": [[233, "torch-tensor-digamma"]], "torch.Tensor.dot": [[241, "torch-tensor-dot"]], "torch.Tensor.eq_": [[246, "torch-tensor-eq"]], "torch.Tensor.dsplit": [[243, "torch-tensor-dsplit"]], "torch.Tensor.count_nonzero": [[206, "torch-tensor-count-nonzero"]], "torch.Tensor.dense_dim": [[220, "torch-tensor-dense-dim"]], "torch.Tensor.erf": [[248, "torch-tensor-erf"]], "torch.Tensor.cumsum": [[216, "torch-tensor-cumsum"]], "torch.Tensor.equal": [[247, "torch-tensor-equal"]], "torch.Tensor.deg2rad": [[219, "torch-tensor-deg2rad"]], "torch.Tensor.cumprod_": [[215, "torch-tensor-cumprod"]], "torch.Tensor.device": [[225, "torch-tensor-device"]], "torch.Tensor.cosh_": [[205, "torch-tensor-cosh"]], "torch.Tensor.cosh": [[204, "torch-tensor-cosh"]], "torch.Tensor.digamma": [[232, "torch-tensor-digamma"]], "torch.Tensor.diagonal_scatter": [[230, "torch-tensor-diagonal-scatter"]], "torch.Tensor.dequantize": [[221, "torch-tensor-dequantize"]], "torch.Tensor.erfc": [[250, "torch-tensor-erfc"]], "torch.Tensor.cumprod": [[214, "torch-tensor-cumprod"]], "torch.Tensor.diag_embed": [[227, "torch-tensor-diag-embed"]], "torch.Tensor.div_": [[238, "torch-tensor-div"]], "torch.Tensor.crow_indices": [[210, "torch-tensor-crow-indices"]], "torch.Tensor.diff": [[231, "torch-tensor-diff"]], "torch.Tensor.diag": [[226, "torch-tensor-diag"]], "torch.Tensor.data_ptr": [[218, "torch-tensor-data-ptr"]], "torch.Tensor.double": [[242, "torch-tensor-double"]], "torch.Tensor.cuda": [[211, "torch-tensor-cuda"]], "torch.Tensor.cov": [[207, "torch-tensor-cov"]], "torch.Tensor.cross": [[209, "torch-tensor-cross"]], "torch.Tensor.erfinv_": [[253, "torch-tensor-erfinv"]], "torch.Tensor.dim": [[234, "torch-tensor-dim"]], "torch.Tensor.element_size": [[244, "torch-tensor-element-size"]], "torch.Tensor.erfinv": [[252, "torch-tensor-erfinv"]], "torch.Tensor.eq": [[245, "torch-tensor-eq"]], "torch.Tensor.erfc_": [[251, "torch-tensor-erfc"]], "torch.Tensor.det": [[222, "torch-tensor-det"]], "torch.Tensor.cumsum_": [[217, "torch-tensor-cumsum"]], "torch.Tensor.divide": [[239, "torch-tensor-divide"]], "torch.Tensor.divide_": [[240, "torch-tensor-divide"]], "torch.Tensor.dist": [[236, "torch-tensor-dist"]], "torch.Tensor.exp": [[254, "torch-tensor-exp"]], "torch.Tensor.cpu": [[208, "torch-tensor-cpu"]], "torch.Tensor.detach": [[223, "torch-tensor-detach"]], "torch.Tensor.diagonal": [[229, "torch-tensor-diagonal"]], "torch.Tensor.erf_": [[249, "torch-tensor-erf"]], "torch.Tensor.detach_": [[224, "torch-tensor-detach"]], "torch.Tensor.dim_order": [[235, "torch-tensor-dim-order"]], "torch.Tensor.cummin": [[213, "torch-tensor-cummin"]], "torch.Tensor.diagflat": [[228, "torch-tensor-diagflat"]], "torch.Tensor.isreal": [[352, "torch-tensor-isreal"]], "torch.Tensor.index_select": [[324, "torch-tensor-index-select"]], "torch.Tensor.index_fill": [[318, "torch-tensor-index-fill"]], "torch.Tensor.kthvalue": [[356, "torch-tensor-kthvalue"]], "torch.Tensor.index_put": [[320, "torch-tensor-index-put"]], "torch.Tensor.inverse": [[329, "torch-tensor-inverse"]], "torch.Tensor.index_add_": [[315, "torch-tensor-index-add"]], "torch.Tensor.is_shared": [[342, "torch-tensor-is-shared"]], "torch.Tensor.isfinite": [[347, "torch-tensor-isfinite"]], "torch.Tensor.index_fill_": [[319, "torch-tensor-index-fill"]], "torch.Tensor.index_copy": [[316, "torch-tensor-index-copy"]], "torch.Tensor.isinf": [[348, "torch-tensor-isinf"]], "torch.Tensor.int": [[327, "torch-tensor-int"]], "torch.Tensor.is_meta": [[338, "torch-tensor-is-meta"]], "torch.Tensor.is_complex": [[331, "torch-tensor-is-complex"]], "torch.Tensor.i0": [[307, "torch-tensor-i0"]], "torch.Tensor.isposinf": [[351, "torch-tensor-isposinf"]], "torch.Tensor.igammac": [[311, "torch-tensor-igammac"]], "torch.Tensor.item": [[354, "torch-tensor-item"]], "torch.Tensor.igammac_": [[312, "torch-tensor-igammac"]], "torch.Tensor.is_quantized": [[340, "torch-tensor-is-quantized"]], "torch.Tensor.is_signed": [[343, "torch-tensor-is-signed"]], "torch.Tensor.imag": [[313, "torch-tensor-imag"]], "torch.Tensor.is_sparse": [[344, "torch-tensor-is-sparse"]], "torch.Tensor.itemsize": [[355, "torch-tensor-itemsize"]], "torch.Tensor.is_floating_point": [[335, "torch-tensor-is-floating-point"]], "torch.Tensor.index_put_": [[321, "torch-tensor-index-put"]], "torch.Tensor.index_copy_": [[317, "torch-tensor-index-copy"]], "torch.Tensor.igamma": [[309, "torch-tensor-igamma"]], "torch.Tensor.i0_": [[308, "torch-tensor-i0"]], "torch.Tensor.is_conj": [[332, "torch-tensor-is-conj"]], "torch.Tensor.is_sparse_csr": [[345, "torch-tensor-is-sparse-csr"]], "torch.Tensor.isclose": [[346, "torch-tensor-isclose"]], "torch.Tensor.hypot_": [[306, "torch-tensor-hypot"]], "torch.Tensor.isneginf": [[350, "torch-tensor-isneginf"]], "torch.Tensor.index_add": [[314, "torch-tensor-index-add"]], "torch.Tensor.int_repr": [[328, "torch-tensor-int-repr"]], "torch.Tensor.is_inference": [[336, "torch-tensor-is-inference"]], "torch.Tensor.is_pinned": [[339, "torch-tensor-is-pinned"]], "torch.Tensor.is_set_to": [[341, "torch-tensor-is-set-to"]], "torch.Tensor.is_contiguous": [[333, "torch-tensor-is-contiguous"]], "torch.Tensor.index_reduce_": [[323, "torch-tensor-index-reduce"]], "torch.Tensor.istft": [[353, "torch-tensor-istft"]], "torch.Tensor.is_coalesced": [[330, "torch-tensor-is-coalesced"]], "torch.Tensor.is_leaf": [[337, "torch-tensor-is-leaf"]], "torch.Tensor.index_reduce": [[322, "torch-tensor-index-reduce"]], "torch.Tensor.indices": [[325, "torch-tensor-indices"]], "torch.Tensor.igamma_": [[310, "torch-tensor-igamma"]], "torch.Tensor.inner": [[326, "torch-tensor-inner"]], "torch.Tensor.is_cuda": [[334, "torch-tensor-is-cuda"]], "torch.Tensor.isnan": [[349, "torch-tensor-isnan"]], "torch.Tensor.logcumsumexp": [[382, "torch-tensor-logcumsumexp"]], "torch.Tensor.lerp": [[363, "torch-tensor-lerp"]], "torch.Tensor.log_": [[378, "torch-tensor-log"]], "torch.Tensor.logit_": [[393, "torch-tensor-logit"]], "torch.Tensor.lgamma_": [[370, "torch-tensor-lgamma"]], "torch.Tensor.lcm": [[357, "torch-tensor-lcm"]], "torch.Tensor.less_equal_": [[368, "torch-tensor-less-equal"]], "torch.Tensor.logical_or": [[388, "torch-tensor-logical-or"]], "torch.Tensor.masked_select": [[405, "torch-tensor-masked-select"]], "torch.Tensor.less_equal": [[367, "torch-tensor-less-equal"]], "torch.Tensor.log10": [[372, "torch-tensor-log10"]], "torch.Tensor.logical_not_": [[387, "torch-tensor-logical-not"]], "torch.Tensor.logical_and_": [[385, "torch-tensor-logical-and"]], "torch.Tensor.ldexp": [[359, "torch-tensor-ldexp"]], "torch.Tensor.masked_scatter_": [[404, "torch-tensor-masked-scatter"]], "torch.Tensor.masked_fill": [[401, "torch-tensor-masked-fill"]], "torch.Tensor.lt_": [[397, "torch-tensor-lt"]], "torch.Tensor.lcm_": [[358, "torch-tensor-lcm"]], "torch.Tensor.less_": [[366, "torch-tensor-less"]], "torch.Tensor.ldexp_": [[360, "torch-tensor-ldexp"]], "torch.Tensor.logical_xor_": [[391, "torch-tensor-logical-xor"]], "torch.Tensor.masked_fill_": [[402, "torch-tensor-masked-fill"]], "torch.Tensor.le_": [[362, "torch-tensor-le"]], "torch.Tensor.logical_not": [[386, "torch-tensor-logical-not"]], "torch.Tensor.lu": [[398, "torch-tensor-lu"]], "torch.Tensor.long": [[395, "torch-tensor-long"]], "torch.Tensor.lt": [[396, "torch-tensor-lt"]], "torch.Tensor.logical_or_": [[389, "torch-tensor-logical-or"]], "torch.Tensor.logaddexp": [[380, "torch-tensor-logaddexp"]], "torch.Tensor.log": [[371, "torch-tensor-log"]], "torch.Tensor.le": [[361, "torch-tensor-le"]], "torch.Tensor.logsumexp": [[394, "torch-tensor-logsumexp"]], "torch.Tensor.logical_xor": [[390, "torch-tensor-logical-xor"]], "torch.Tensor.logaddexp2": [[381, "torch-tensor-logaddexp2"]], "torch.Tensor.log1p_": [[375, "torch-tensor-log1p"]], "torch.Tensor.logit": [[392, "torch-tensor-logit"]], "torch.Tensor.lerp_": [[364, "torch-tensor-lerp"]], "torch.Tensor.log2_": [[377, "torch-tensor-log2"]], "torch.Tensor.log2": [[376, "torch-tensor-log2"]], "torch.Tensor.log10_": [[373, "torch-tensor-log10"]], "torch.Tensor.matmul": [[406, "torch-tensor-matmul"]], "torch.Tensor.lgamma": [[369, "torch-tensor-lgamma"]], "torch.Tensor.log1p": [[374, "torch-tensor-log1p"]], "torch.Tensor.logdet": [[383, "torch-tensor-logdet"]], "torch.Tensor.masked_scatter": [[403, "torch-tensor-masked-scatter"]], "torch.Tensor.less": [[365, "torch-tensor-less"]], "torch.Tensor.map_": [[400, "torch-tensor-map"]], "torch.Tensor.logical_and": [[384, "torch-tensor-logical-and"]], "torch.Tensor.lu_solve": [[399, "torch-tensor-lu-solve"]], "torch.Tensor.log_normal_": [[379, "torch-tensor-log-normal"]], "torch.Tensor.matrix_exp": [[407, "torch-tensor-matrix-exp"]], "torch.vdot": [[1973, "torch-vdot"]], "torch.var_mean": [[1972, "torch-var-mean"]], "torch.topk": [[1946, "torch-topk"]], "torch.use_deterministic_algorithms": [[1964, "torch-use-deterministic-algorithms"]], "torch.trunc": [[1957, "torch-trunc"]], "torch.utils.get_cpp_backtrace": [[1966, "torch-utils-get-cpp-backtrace"]], "torch.utils.rename_privateuse1_backend": [[1967, "torch-utils-rename-privateuse1-backend"]], "torch.utils.generate_methods_for_privateuse1_backend": [[1965, "torch-utils-generate-methods-for-privateuse1-backend"]], "device": [[1986, "device"], [1403, "device"], [1029, "device"]], "Event": [[1981, "event"], [1385, "event"], [1397, "event"], [1010, "event"]], "torch.tril": [[1952, "torch-tril"]], "torch.xpu.current_device": [[1984, "torch-xpu-current-device"]], "torch.tan": [[1940, "torch-tan"]], "torch.tanh": [[1941, "torch-tanh"]], "torch.view_as_complex": [[1974, "torch-view-as-complex"]], "torch.vstack": [[1978, "torch-vstack"]], "StreamContext": [[1983, "streamcontext"], [1399, "streamcontext"], [999, "streamcontext"], [1014, "streamcontext"]], "torch.unflatten": [[1959, "torch-unflatten"]], "torch.xpu.current_stream": [[1985, "torch-xpu-current-stream"]], "torch.xpu.device_count": [[1987, "torch-xpu-device-count"]], "torch.vander": [[1970, "torch-vander"]], "torch.triu": [[1954, "torch-triu"]], "torch.vmap": [[1976, "torch-vmap"]], "torch.var": [[1971, "torch-var"]], "torch.where": [[1979, "torch-where"]], "torch.take": [[1938, "torch-take"]], "torch.tensordot": [[1944, "torch-tensordot"]], "torch.unsqueeze": [[1963, "torch-unsqueeze"]], "torch.trapezoid": [[1949, "torch-trapezoid"]], "torch.triangular_solve": [[1951, "torch-triangular-solve"]], "torch.trapz": [[1950, "torch-trapz"]], "torch.view_as_real": [[1975, "torch-view-as-real"]], "torch.take_along_dim": [[1939, "torch-take-along-dim"]], "torch.tril_indices": [[1953, "torch-tril-indices"]], "torch.unique": [[1960, "torch-unique"]], "torch.trace": [[1947, "torch-trace"]], "torch.triu_indices": [[1955, "torch-triu-indices"]], "Stream": [[1982, "stream"], [1398, "stream"], [1013, "stream"], [998, "stream"]], "device_of": [[1988, "device-of"], [1031, "device-of"]], "torch.vsplit": [[1977, "torch-vsplit"]], "torch.tensor": [[1942, "torch-tensor"]], "torch.xlogy": [[1980, "torch-xlogy"]], "torch.unravel_index": [[1962, "torch-unravel-index"]], "torch.tensor_split": [[1943, "torch-tensor-split"]], "torch.unique_consecutive": [[1961, "torch-unique-consecutive"]], "torch.true_divide": [[1956, "torch-true-divide"]], "torch.tile": [[1945, "torch-tile"]], "torch.transpose": [[1948, "torch-transpose"]], "torch.utils.swap_tensors": [[1969, "torch-utils-swap-tensors"]], "torch.utils.set_module": [[1968, "torch-utils-set-module"]], "torch.unbind": [[1958, "torch-unbind"]], "torch.square": [[1917, "torch-square"]], "torch.sspaddmm": [[1919, "torch-sspaddmm"]], "torch.sparse_coo_tensor": [[1912, "torch-sparse-coo-tensor"]], "torch.sym_min": [[1935, "torch-sym-min"]], "torch.sparse.spdiags": [[1907, "torch-sparse-spdiags"]], "torch.sym_max": [[1934, "torch-sym-max"]], "torch.sparse.log_softmax": [[1903, "torch-sparse-log-softmax"]], "torch.sin": [[1892, "torch-sin"]], "torch.sparse_compressed_tensor": [[1911, "torch-sparse-compressed-tensor"]], "torch.sparse.softmax": [[1906, "torch-sparse-softmax"]], "torch.sub": [[1924, "torch-sub"]], "torch.swapaxes": [[1929, "torch-swapaxes"]], "torch.sqrt": [[1916, "torch-sqrt"]], "torch.sparse.sampled_addmm": [[1905, "torch-sparse-sampled-addmm"]], "torch.sparse_csc_tensor": [[1913, "torch-sparse-csc-tensor"]], "torch.subtract": [[1925, "torch-subtract"]], "torch.sparse_bsc_tensor": [[1909, "torch-sparse-bsc-tensor"]], "torch.swapdims": [[1930, "torch-swapdims"]], "torch.signal.windows.kaiser": [[1889, "torch-signal-windows-kaiser"]], "torch.sparse.addmm": [[1900, "torch-sparse-addmm"]], "torch.sparse.sum": [[1908, "torch-sparse-sum"]], "torch.sinh": [[1894, "torch-sinh"]], "torch.smm": [[1897, "torch-smm"]], "torch.sum": [[1926, "torch-sum"]], "torch.signal.windows.hann": [[1888, "torch-signal-windows-hann"]], "check_sparse_tensor_invariants": [[1902, "check-sparse-tensor-invariants"]], "torch.sparse.mm": [[1904, "torch-sparse-mm"]], "torch.sym_ite": [[1933, "torch-sym-ite"]], "torch.sparse_bsr_tensor": [[1910, "torch-sparse-bsr-tensor"]], "torch.slogdet": [[1896, "torch-slogdet"]], "torch.svd_lowrank": [[1928, "torch-svd-lowrank"]], "torch.t": [[1937, "torch-t"]], "torch.std": [[1921, "torch-std"]], "torch.stft": [[1923, "torch-stft"]], "torch.squeeze": [[1918, "torch-squeeze"]], "torch.stack": [[1920, "torch-stack"]], "torch.sparse.as_sparse_gradcheck": [[1901, "torch-sparse-as-sparse-gradcheck"]], "torch.std_mean": [[1922, "torch-std-mean"]], "torch.sym_not": [[1936, "torch-sym-not"]], "torch.svd": [[1927, "torch-svd"]], "torch.signbit": [[1891, "torch-signbit"]], "torch.slice_scatter": [[1895, "torch-slice-scatter"]], "torch.signal.windows.hamming": [[1887, "torch-signal-windows-hamming"]], "torch.sparse_csr_tensor": [[1914, "torch-sparse-csr-tensor"]], "torch.sort": [[1899, "torch-sort"]], "torch.signal.windows.nuttall": [[1890, "torch-signal-windows-nuttall"]], "torch.sym_int": [[1932, "torch-sym-int"]], "torch.softmax": [[1898, "torch-softmax"]], "torch.sinc": [[1893, "torch-sinc"]], "torch.split": [[1915, "torch-split"]], "torch.sym_float": [[1931, "torch-sym-float"]], "torch.sign": [[1879, "torch-sign"]], "torch.ravel": [[1843, "torch-ravel"]], "torch.randint": [[1837, "torch-randint"]], "torch.save": [[1858, "torch-save"]], "torch.randint_like": [[1838, "torch-randint-like"]], "torch.scatter_reduce": [[1861, "torch-scatter-reduce"]], "torch.range": [[1842, "torch-range"]], "torch.sgn": [[1877, "torch-sgn"]], "torch.set_float32_matmul_precision": [[1870, "torch-set-float32-matmul-precision"]], "torch.signal.windows.blackman": [[1881, "torch-signal-windows-blackman"]], "torch.randn": [[1839, "torch-randn"]], "torch.signal.windows.gaussian": [[1884, "torch-signal-windows-gaussian"]], "torch.set_default_device": [[1866, "torch-set-default-device"]], "torch.signal.windows.bartlett": [[1880, "torch-signal-windows-bartlett"]], "torch.set_deterministic_debug_mode": [[1869, "torch-set-deterministic-debug-mode"]], "torch.set_flush_denormal": [[1871, "torch-set-flush-denormal"]], "torch.set_printoptions": [[1874, "torch-set-printoptions"]], "torch.randperm": [[1841, "torch-randperm"]], "torch.renorm": [[1847, "torch-renorm"]], "torch.real": [[1844, "torch-real"]], "torch.set_num_interop_threads": [[1872, "torch-set-num-interop-threads"]], "torch.reshape": [[1849, "torch-reshape"]], "torch.row_stack": [[1856, "torch-row-stack"]], "torch.select": [[1864, "torch-select"]], "torch.set_default_tensor_type": [[1868, "torch-set-default-tensor-type"]], "torch.signal.windows.general_cosine": [[1885, "torch-signal-windows-general-cosine"]], "torch.set_num_threads": [[1873, "torch-set-num-threads"]], "torch.scatter": [[1859, "torch-scatter"]], "torch.sigmoid": [[1878, "torch-sigmoid"]], "torch.remainder": [[1846, "torch-remainder"]], "torch.scatter_add": [[1860, "torch-scatter-add"]], "torch.reciprocal": [[1845, "torch-reciprocal"]], "torch.round": [[1855, "torch-round"]], "torch.roll": [[1853, "torch-roll"]], "torch.set_default_dtype": [[1867, "torch-set-default-dtype"]], "torch.resolve_neg": [[1851, "torch-resolve-neg"]], "torch.select_scatter": [[1865, "torch-select-scatter"]], "torch.rand_like": [[1836, "torch-rand-like"]], "torch.seed": [[1863, "torch-seed"]], "torch.repeat_interleave": [[1848, "torch-repeat-interleave"]], "torch.signal.windows.cosine": [[1882, "torch-signal-windows-cosine"]], "torch.rot90": [[1854, "torch-rot90"]], "torch.set_warn_always": [[1876, "torch-set-warn-always"]], "torch.result_type": [[1852, "torch-result-type"]], "torch.searchsorted": [[1862, "torch-searchsorted"]], "torch.randn_like": [[1840, "torch-randn-like"]], "torch.rsqrt": [[1857, "torch-rsqrt"]], "torch.signal.windows.exponential": [[1883, "torch-signal-windows-exponential"]], "torch.signal.windows.general_hamming": [[1886, "torch-signal-windows-general-hamming"]], "torch.set_rng_state": [[1875, "torch-set-rng-state"]], "torch.resolve_conj": [[1850, "torch-resolve-conj"]], "Dynamic shapes": [[2098, "dynamic-shapes"]], "Motivation": [[2098, "motivation"], [2101, "motivation"], [2023, "motivation"]], "Abridged public API": [[2098, "abridged-public-api"]], "The Guard Model": [[2098, "the-guard-model"]], "Overall architecture": [[2098, "overall-architecture"], [2101, "overall-architecture"]], "Abridged internal API": [[2098, "abridged-internal-api"]], "DimDynamic policy": [[2098, "dimdynamic-policy"]], "Unbacked SymInts": [[2098, "unbacked-symints"]], "Custom Backends": [[2097, "custom-backends"]], "Overview": [[2097, "overview"], [64, "module-torch.fx"], [52, "overview"], [19, "module-torch.cuda.tunable"], [18, "module-torch.cuda._sanitizer"], [2069, "module-torch.profiler"], [2063, "overview"], [2062, "overview"]], "Registering Custom Backends": [[2097, "registering-custom-backends"]], "Custom Backends after AOTAutograd": [[2097, "custom-backends-after-aotautograd"]], "Examples": [[2097, "examples"], [42, "examples"], [12, "examples"]], "Debugging Backend": [[2097, "debugging-backend"]], "Speedy Backend": [[2097, "speedy-backend"]], "Composable Backends": [[2097, "composable-backends"]], "Profiling to understand torch.compile performance": [[2109, "profiling-to-understand-torch-compile-performance"]], "What to use torch.profiler for:": [[2109, "what-to-use-torch-profiler-for"]], "Basics of using torch.profiler and viewing traces": [[2109, "basics-of-using-torch-profiler-and-viewing-traces"]], "Working around CUDA Graph profiling issues": [[2109, "working-around-cuda-graph-profiling-issues"]], "Understanding compilation time": [[2109, "understanding-compilation-time"]], "Finding graph breaks: \u201cTorch-Compiled Region\u201d and \u201cCompiledFunction\u201d": [[2109, "finding-graph-breaks-torch-compiled-region-and-compiledfunction"]], "Operator Kernels": [[2109, "operator-kernels"]], "Launch overhead": [[2109, "launch-overhead"]], "PyTorch 2.0 Performance Dashboard": [[2108, "pytorch-2-0-performance-dashboard"]], "How to read the dashboard?": [[2108, "how-to-read-the-dashboard"]], "What is measured on the dashboard?": [[2108, "what-is-measured-on-the-dashboard"]], "Can I check if my PR affects TorchInductor\u2019s performance on the dashboard before merging?": [[2108, "can-i-check-if-my-pr-affects-torchinductor-s-performance-on-the-dashboard-before-merging"]], "How can I run any performance test locally?": [[2108, "how-can-i-run-any-performance-test-locally"]], "IRs": [[2106, "irs"]], "Core Aten IR": [[2106, "core-aten-ir"]], "Prims IR": [[2106, "prims-ir"]], "Understanding CUDA Memory Usage": [[2113, "understanding-cuda-memory-usage"]], "Generating a Snapshot": [[2113, "generating-a-snapshot"]], "Using the visualizer": [[2113, "using-the-visualizer"]], "Active Memory Timeline": [[2113, "active-memory-timeline"]], "Allocator State History": [[2113, "allocator-state-history"]], "Snapshot API Reference": [[2113, "snapshot-api-reference"]], "torch.xpu": [[2118, "module-torch.xpu"]], "Random Number Generator": [[2118, "random-number-generator"], [17, "random-number-generator"]], "Streams and events": [[2118, "streams-and-events"], [2031, "streams-and-events"], [17, "streams-and-events"], [16, "streams-and-events"]], "torch.utils": [[2117, "module-torch.utils"]], "PyTorch 2.0 NNModule Support": [[2107, "pytorch-2-0-nnmodule-support"]], "NNModule Hooks Support": [[2107, "nnmodule-hooks-support"]], "nn.Module.__call__ Hooks Usage and limitations": [[2107, "nn-module-call-hooks-usage-and-limitations"]], "state_dict Hooks": [[2107, "state-dict-hooks"]], "Dynamo Deep-Dive": [[2099, "dynamo-deep-dive"]], "A Gentle Introduction to Dynamo": [[2099, "a-gentle-introduction-to-dynamo"]], "PEP 523: Adding a frame evaluation API to CPython": [[2099, "pep-523-adding-a-frame-evaluation-api-to-cpython"]], "Implementing CPython in Python": [[2099, "implementing-cpython-in-python"]], "Generating the Output Graph": [[2099, "generating-the-output-graph"]], "Making Dynamo Sound: Guards": [[2099, "making-dynamo-sound-guards"]], "Symbolic Shapes": [[2099, "symbolic-shapes"]], "Static by default": [[2099, "static-by-default"]], "0, 1 are always specialized": [[2099, "are-always-specialized"]], "Duck shaping": [[2099, "duck-shaping"]], "Guards on symbolic ints": [[2099, "guards-on-symbolic-ints"]], "Making Dynamo Complete: Graph Breaks": [[2099, "making-dynamo-complete-graph-breaks"]], "Conclusion": [[2099, "conclusion"]], "Footnotes": [[2099, "footnotes"]], "torch.compiler": [[2092, "torch-compiler"]], "Read More": [[2092, "read-more"], [56, "read-more"], [52, "read-more"]], "Getting Started for PyTorch Users": [[2092, null]], "Deep Dive for PyTorch Developers": [[2092, null], [52, null]], "HowTo for PyTorch Backend Vendors": [[2092, null]], "TorchDynamo APIs for fine-grained tracing": [[2103, "torchdynamo-apis-for-fine-grained-tracing"]], "TorchDynamo APIs to control fine-grained tracing": [[2103, "id1"]], "torch.compiler.disable": [[2103, "torch-compiler-disable"], [981, "torch-compiler-disable"]], "torch._dynamo.disallow_in_graph": [[2103, "torch-dynamo-disallow-in-graph"]], "torch.compiler.allow_in_graph": [[2103, "torch-compiler-allow-in-graph"], [977, "torch-compiler-allow-in-graph"]], "Limitations": [[2103, "limitations"], [2096, "limitations"], [2065, "limitations"]], "Dynamo Overview": [[2100, "dynamo-overview"]], "Dynamo Internals": [[2100, "dynamo-internals"]], "What is a guard?": [[2100, "what-is-a-guard"]], "What is Dynamo doing?": [[2100, "what-is-dynamo-doing"]], "How to inspect artifacts generated by Dynamo?": [[2100, "how-to-inspect-artifacts-generated-by-dynamo"]], "Writing Graph Transformations on ATen IR": [[2110, "writing-graph-transformations-on-aten-ir"]], "Passes": [[2110, "passes"]], "Transformer": [[2110, "transformer"], [1570, "transformer"]], "One-to-One Pass": [[2110, "one-to-one-pass"]], "One-to-X Pass": [[2110, "one-to-x-pass"]], "One-to-None Pass": [[2110, "one-to-none-pass"]], "Utilizing Local Information": [[2110, "utilizing-local-information"]], "Subgraph Rewriter": [[2110, "subgraph-rewriter"]], "Pass Manager": [[2110, "pass-manager"]], "Partitioner": [[2110, "partitioner"]], "Subgraph Matcher": [[2110, "subgraph-matcher"]], "Capability Based Partitioner": [[2110, "capability-based-partitioner"]], "TorchInductor GPU Profiling": [[2105, "torchinductor-gpu-profiling"]], "Relevant Environment Variables": [[2105, "relevant-environment-variables"]], "Breakdown Model GPU Time": [[2105, "breakdown-model-gpu-time"]], "Benchmark Individual Triton Kernel": [[2105, "benchmark-individual-triton-kernel"]], "PyTorch 2.0 Troubleshooting": [[2111, "pytorch-2-0-troubleshooting"]], "Title": [[2111, "id1"]], "Diagnosing Runtime Errors": [[2111, "diagnosing-runtime-errors"]], "Torchdynamo Errors": [[2111, "torchdynamo-errors"]], "Diagnosing TorchInductor Errors": [[2111, "diagnosing-torchinductor-errors"]], "Minifying TorchInductor Errors": [[2111, "minifying-torchinductor-errors"]], "Minifying Backend Compiler Errors": [[2111, "minifying-backend-compiler-errors"]], "Performance Profiling": [[2111, "performance-profiling"]], "Accessing TorchDynamo Profiler": [[2111, "accessing-torchdynamo-profiler"]], "TorchInductor Debugging using TORCH_COMPILE_DEBUG": [[2111, "torchinductor-debugging-using-torch-compile-debug"]], "Graph Breaks": [[2111, "graph-breaks"], [2102, "graph-breaks"], [52, "graph-breaks"]], "Identifying the Cause of a Graph Break": [[2111, "identifying-the-cause-of-a-graph-break"]], "Excessive Recompilation": [[2111, "excessive-recompilation"]], "Accuracy Debugging": [[2111, "accuracy-debugging"]], "Extended Debugging": [[2111, "extended-debugging"]], "Cold Start Timing and Cache Corruption Debugging": [[2111, "cold-start-timing-and-cache-corruption-debugging"]], "Frequently Asked Questions": [[2102, "frequently-asked-questions"], [2013, "frequently-asked-questions"], [7, "frequently-asked-questions"], [2070, "frequently-asked-questions"], [2065, "frequently-asked-questions"], [2050, "frequently-asked-questions"]], "Does torch.compile support training?": [[2102, "does-torch-compile-support-training"]], "Do you support Distributed code?": [[2102, "do-you-support-distributed-code"]], "Do I still need to export whole graphs?": [[2102, "do-i-still-need-to-export-whole-graphs"]], "Why is my code crashing?": [[2102, "why-is-my-code-crashing"]], "Why is compilation slow?": [[2102, "why-is-compilation-slow"]], "Why are you recompiling in production?": [[2102, "why-are-you-recompiling-in-production"]], "How are you speeding up my code?": [[2102, "how-are-you-speeding-up-my-code"]], "Why am I not seeing speedups?": [[2102, "why-am-i-not-seeing-speedups"]], "Identifying the cause of a graph break": [[2102, "identifying-the-cause-of-a-graph-break"]], "Why didn\u2019t my code recompile when I changed it?": [[2102, "why-didnt-my-code-recompile-when-i-changed-it"]], "Why am I getting incorrect results?": [[2102, "why-am-i-getting-incorrect-results"]], "Why am I getting OOMs?": [[2102, "why-am-i-getting-ooms"]], "Does torch.func work with torch.compile (for grad and vmap transforms)?": [[2102, "does-torch-func-work-with-torch-compile-for-grad-and-vmap-transforms"]], "Calling torch.func transform inside of a function handled with torch.compile": [[2102, "calling-torch-func-transform-inside-of-a-function-handled-with-torch-compile"]], "Compiling torch.func.grad with torch.compile": [[2102, "compiling-torch-func-grad-with-torch-compile"]], "Compiling torch.vmap with torch.compile": [[2102, "compiling-torch-vmap-with-torch-compile"]], "Compiling functions besides the ones which are supported (escape hatch)": [[2102, "compiling-functions-besides-the-ones-which-are-supported-escape-hatch"]], "Does NumPy work with torch.compile?": [[2102, "does-numpy-work-with-torch-compile"]], "Which NumPy features does torch.compile support?": [[2102, "which-numpy-features-does-torch-compile-support"]], "Can I compile NumPy code using torch.compile?": [[2102, "can-i-compile-numpy-code-using-torch-compile"]], "Can I execute NumPy code on CUDA and compute gradients via torch.compile?": [[2102, "can-i-execute-numpy-code-on-cuda-and-compute-gradients-via-torch-compile"]], "How do I debug NumPy code under torch.compile?": [[2102, "how-do-i-debug-numpy-code-under-torch-compile"]], "I torch.compile some NumPy code and I did not see any speed-up.": [[2102, "i-torch-compile-some-numpy-code-and-i-did-not-see-any-speed-up"]], "Which API to use for fine grain tracing?": [[2102, "which-api-to-use-for-fine-grain-tracing"]], "How do I graph break on a function?": [[2102, "how-do-i-graph-break-on-a-function"]], "What\u2019s the difference between torch._dynamo.disable and torch._dynamo.disallow_in_graph": [[2102, "what-s-the-difference-between-torch-dynamo-disable-and-torch-dynamo-disallow-in-graph"]], "What\u2019s the difference between torch._dynamo.disable and torch._dynamo_skip": [[2102, "what-s-the-difference-between-torch-dynamo-disable-and-torch-dynamo-skip"]], "Fake tensor": [[2101, "fake-tensor"]], "Related work": [[2101, "related-work"]], "API: the important bits": [[2101, "api-the-important-bits"]], "Details": [[2101, "details"]], "About the tensor subclass": [[2101, "about-the-tensor-subclass"]], "How is each individual operator implemented?": [[2101, "how-is-each-individual-operator-implemented"]], "How does the converter work?": [[2101, "how-does-the-converter-work"]], "Performance characteristics": [[2101, "performance-characteristics"]], "Fake tensor of fake tensor?": [[2101, "fake-tensor-of-fake-tensor"]], "Interaction with dynamic shapes": [[2101, "interaction-with-dynamic-shapes"]], "Other resources": [[2101, "other-resources"]], "PYTORCH ProcessGroupNCCL Environment Variables": [[2115, "pytorch-processgroupnccl-environment-variables"]], "CUDAGraph Trees": [[2096, "cudagraph-trees"]], "CUDAGraph Background": [[2096, "cudagraph-background"]], "PyTorch CUDAGraph Integration": [[2096, "pytorch-cudagraph-integration"]], "Make Graphed Callables": [[2096, "make-graphed-callables"]], "TorchDynamo Previous CUDA Graphs Integration": [[2096, "torchdynamo-previous-cuda-graphs-integration"]], "CUDAGraph Trees Integration": [[2096, "cudagraph-trees-integration"]], "Comparisons": [[2096, "comparisons"], [2016, "comparisons"]], "torch.compiler API reference": [[2094, "torch-compiler-api-reference"]], "Best Practices for Backends": [[2095, "best-practices-for-backends"]], "x86 CPU": [[2095, "x86-cpu"]], "Torch Environment Variables": [[2114, "torch-environment-variables"]], "torch.ao.ns._numeric_suite_fx": [[2091, "torch-ao-ns-numeric-suite-fx"]], "torch.ao.ns.fx.utils": [[2091, "torch-ao-ns-fx-utils"]], "Type Info": [[2116, "type-info"]], "torch.finfo": [[2116, "torch-finfo"]], "torch.iinfo": [[2116, "torch-iinfo"]], "torch.overrides": [[2112, "module-torch.overrides"]], "Functions": [[2112, "functions"], [2015, "functions"], [2013, "functions"], [2065, "functions"], [2081, "functions"]], "AOTInductor: Ahead-Of-Time Compilation for Torch.Export-ed Models": [[2093, "aotinductor-ahead-of-time-compilation-for-torch-export-ed-models"]], "Model Compilation": [[2093, "model-compilation"]], "Inference in C++": [[2093, "inference-in-c"]], "Getting Started": [[2104, "getting-started"], [7, "getting-started"]], "Using a pretrained model": [[2104, "using-a-pretrained-model"]], "Next Steps": [[2104, "next-steps"]], "torch.nn.modules.module.register_module_full_backward_hook": [[1711, "torch-nn-modules-module-register-module-full-backward-hook"]], "torch.nn.functional.tanhshrink": [[1696, "torch-nn-functional-tanhshrink"]], "torch.nn.functional.triplet_margin_loss": [[1700, "torch-nn-functional-triplet-margin-loss"]], "torch.nn.modules.module.register_module_parameter_registration_hook": [[1714, "torch-nn-modules-module-register-module-parameter-registration-hook"]], "torch.nn.utils.clip_grad_norm_": [[1721, "torch-nn-utils-clip-grad-norm"]], "torch.nn.functional.threshold": [[1697, "torch-nn-functional-threshold"]], "torch.nn.functional.sigmoid": [[1686, "torch-nn-functional-sigmoid"]], "torch.nn.utils.clip_grad_value_": [[1722, "torch-nn-utils-clip-grad-value"]], "torch.nn.functional.triplet_margin_with_distance_loss": [[1701, "torch-nn-functional-triplet-margin-with-distance-loss"]], "torch.nn.functional.unfold": [[1702, "torch-nn-functional-unfold"]], "torch.nn.functional.torch.nn.parallel.data_parallel": [[1699, "torch-nn-functional-torch-nn-parallel-data-parallel"]], "torch.nn.functional.softsign": [[1694, "torch-nn-functional-softsign"]], "RMSNorm": [[1715, "rmsnorm"], [1541, "rmsnorm"]], "torch.nn.functional.scaled_dot_product_attention": [[1684, "torch-nn-functional-scaled-dot-product-attention"]], "torch.nn.functional.upsample_nearest": [[1705, "torch-nn-functional-upsample-nearest"]], "torch.nn.modules.module.register_module_module_registration_hook": [[1713, "torch-nn-modules-module-register-module-module-registration-hook"]], "torch.nn.utils.parameters_to_vector": [[1729, "torch-nn-utils-parameters-to-vector"]], "torch.nn.utils.convert_conv2d_weight_memory_format": [[1723, "torch-nn-utils-convert-conv2d-weight-memory-format"]], "torch.nn.utils.parametrizations.orthogonal": [[1730, "torch-nn-utils-parametrizations-orthogonal"]], "torch.nn.utils.fuse_conv_bn_weights": [[1726, "torch-nn-utils-fuse-conv-bn-weights"]], "torch.nn.functional.softshrink": [[1693, "torch-nn-functional-softshrink"]], "LazyModuleMixin": [[1706, "lazymodulemixin"]], "torch.nn.functional.softplus": [[1692, "torch-nn-functional-softplus"]], "torch.nn.functional.threshold_": [[1698, "torch-nn-functional-threshold"]], "torch.nn.functional.silu": [[1687, "torch-nn-functional-silu"]], "torch.nn.modules.module.register_module_forward_hook": [[1709, "torch-nn-modules-module-register-module-forward-hook"]], "DistributedDataParallel": [[1716, "distributeddataparallel"], [2047, "distributeddataparallel"]], "torch.nn.utils.fuse_linear_bn_weights": [[1728, "torch-nn-utils-fuse-linear-bn-weights"]], "torch.nn.utils.parametrizations.weight_norm": [[1732, "torch-nn-utils-parametrizations-weight-norm"]], "torch.nn.functional.rrelu_": [[1683, "torch-nn-functional-rrelu"]], "torch.nn.functional.softmax": [[1690, "torch-nn-functional-softmax"]], "ParametrizationList": [[1733, "parametrizationlist"]], "UninitializedBuffer": [[1718, "uninitializedbuffer"]], "torch.nn.functional.upsample": [[1703, "torch-nn-functional-upsample"]], "torch.nn.utils.parametrizations.spectral_norm": [[1731, "torch-nn-utils-parametrizations-spectral-norm"]], "torch.nn.functional.tanh": [[1695, "torch-nn-functional-tanh"]], "torch.nn.modules.module.register_module_full_backward_pre_hook": [[1712, "torch-nn-modules-module-register-module-full-backward-pre-hook"]], "torch.nn.modules.module.register_module_buffer_registration_hook": [[1708, "torch-nn-modules-module-register-module-buffer-registration-hook"]], "torch.nn.modules.module.register_module_backward_hook": [[1707, "torch-nn-modules-module-register-module-backward-hook"]], "UninitializedParameter": [[1719, "uninitializedparameter"]], "torch.nn.functional.soft_margin_loss": [[1689, "torch-nn-functional-soft-margin-loss"]], "torch.nn.functional.softmin": [[1691, "torch-nn-functional-softmin"]], "torch.nn.utils.convert_conv3d_weight_memory_format": [[1724, "torch-nn-utils-convert-conv3d-weight-memory-format"]], "torch.nn.utils.fuse_linear_bn_eval": [[1727, "torch-nn-utils-fuse-linear-bn-eval"]], "torch.nn.functional.selu": [[1685, "torch-nn-functional-selu"]], "torch.nn.utils.fuse_conv_bn_eval": [[1725, "torch-nn-utils-fuse-conv-bn-eval"]], "Parameter": [[1717, "parameter"]], "torch.nn.functional.smooth_l1_loss": [[1688, "torch-nn-functional-smooth-l1-loss"]], "torch.nn.functional.upsample_bilinear": [[1704, "torch-nn-functional-upsample-bilinear"]], "torch.nn.modules.module.register_module_forward_pre_hook": [[1710, "torch-nn-modules-module-register-module-forward-pre-hook"]], "torch.nn.utils.clip_grad_norm": [[1720, "torch-nn-utils-clip-grad-norm"]], "torch.Tensor.absolute_": [[94, "torch-tensor-absolute"]], "torch.func Whirlwind Tour": [[61, "torch-func-whirlwind-tour"]], "What is torch.func?": [[61, "what-is-torch-func"]], "Why composable function transforms?": [[61, "why-composable-function-transforms"], [56, "why-composable-function-transforms"]], "What are the transforms?": [[61, "what-are-the-transforms"]], "grad() (gradient computation)": [[61, "grad-gradient-computation"]], "vmap() (auto-vectorization)": [[61, "vmap-auto-vectorization"]], "vjp() (vector-Jacobian product)": [[61, "vjp-vector-jacobian-product"]], "jvp() (Jacobian-vector product)": [[61, "jvp-jacobian-vector-product"]], "jacrev(), jacfwd(), and hessian()": [[61, "jacrev-jacfwd-and-hessian"]], "python.context-manager": [[70, "python-context-manager"]], "null_context_manager": [[70, "null-context-manager"], [66, "null-context-manager"]], "FXE0010:fx-pass": [[83, "fxe0010-fx-pass"]], "FXE0013:op-level-debugging": [[86, "fxe0013-op-level-debugging"]], "Train script": [[51, "train-script"]], "FXE0007:fx-graph-to-onnx": [[81, "fxe0007-fx-graph-to-onnx"]], "Key Representations:": [[81, "key-representations"]], "Additional Notes:": [[81, "additional-notes"]], "Generator": [[90, "generator"]], "torch.Tensor.acosh_": [[98, "torch-tensor-acosh"]], "torch.dynamic-shape": [[75, "torch-dynamic-shape"]], "cond_branch_class_method": [[75, "cond-branch-class-method"], [74, "cond-branch-class-method"], [66, "cond-branch-class-method"]], "cond_branch_nested_function": [[75, "cond-branch-nested-function"], [74, "cond-branch-nested-function"], [66, "cond-branch-nested-function"]], "cond_branch_nonlocal_variables": [[75, "cond-branch-nonlocal-variables"], [74, "cond-branch-nonlocal-variables"], [66, "cond-branch-nonlocal-variables"]], "cond_operands": [[75, "cond-operands"], [74, "cond-operands"], [66, "cond-operands"]], "cond_predicate": [[75, "cond-predicate"], [74, "cond-predicate"], [66, "cond-predicate"]], "dynamic_shape_constructor": [[75, "dynamic-shape-constructor"], [66, "dynamic-shape-constructor"]], "dynamic_shape_if_guard": [[75, "dynamic-shape-if-guard"], [66, "dynamic-shape-if-guard"], [71, "dynamic-shape-if-guard"]], "dynamic_shape_map": [[75, "dynamic-shape-map"], [66, "dynamic-shape-map"], [78, "dynamic-shape-map"]], "dynamic_shape_round": [[75, "dynamic-shape-round"], [66, "dynamic-shape-round"], [68, "dynamic-shape-round"]], "dynamic_shape_slicing": [[75, "dynamic-shape-slicing"], [66, "dynamic-shape-slicing"]], "dynamic_shape_view": [[75, "dynamic-shape-view"], [66, "dynamic-shape-view"]], "list_contains": [[75, "list-contains"], [72, "list-contains"], [66, "list-contains"], [67, "list-contains"]], "scalar_output": [[75, "scalar-output"], [66, "scalar-output"]], "Migrating from functorch to torch.func": [[59, "migrating-from-functorch-to-torch-func"]], "function transforms": [[59, "function-transforms"]], "NN module utilities": [[59, "nn-module-utilities"]], "functorch.make_functional": [[59, "functorch-make-functional"]], "functorch.combine_state_for_ensemble": [[59, "functorch-combine-state-for-ensemble"]], "functorch.compile": [[59, "functorch-compile"]], "FXE0008:fx-node-to-onnx": [[82, "fxe0008-fx-node-to-onnx"]], "FXE0016:find-operator-overloads-in-onnx-registry": [[89, "fxe0016-find-operator-overloads-in-onnx-registry"]], "torch.fx": [[64, "torch-fx"]], "Writing Transformations": [[64, "writing-transformations"]], "A Quick Primer on Graphs": [[64, "a-quick-primer-on-graphs"]], "Graph Manipulation": [[64, "graph-manipulation"]], "Direct Graph Manipulation": [[64, "direct-graph-manipulation"]], "Subgraph Rewriting With replace_pattern()": [[64, "subgraph-rewriting-with-replace-pattern"]], "Graph Manipulation Examples": [[64, "graph-manipulation-examples"]], "Proxy/Retracing": [[64, "proxy-retracing"]], "The Interpreter Pattern": [[64, "the-interpreter-pattern"]], "Examples of the Interpreter Pattern": [[64, "examples-of-the-interpreter-pattern"]], "Debugging": [[64, "debugging"], [2013, "debugging"]], "Introduction": [[64, "introduction"], [2023, "introduction"], [2035, "introduction"]], "Common Pitfalls in Transform Authoring": [[64, "common-pitfalls-in-transform-authoring"]], "Checking Correctness of Modules": [[64, "checking-correctness-of-modules"]], "Debugging the Generated Code": [[64, "debugging-the-generated-code"]], "Use pdb": [[64, "use-pdb"]], "Print the Generated Code": [[64, "print-the-generated-code"]], "Use the to_folder Function From GraphModule": [[64, "use-the-to-folder-function-from-graphmodule"]], "Debugging the Transformation": [[64, "debugging-the-transformation"]], "Available Debuggers": [[64, "available-debuggers"]], "Limitations of Symbolic Tracing": [[64, "limitations-of-symbolic-tracing"]], "Dynamic Control Flow": [[64, "dynamic-control-flow"]], "Static Control Flow": [[64, "static-control-flow"]], "Non-torch Functions": [[64, "non-torch-functions"]], "Customizing Tracing with the Tracer class": [[64, "customizing-tracing-with-the-tracer-class"]], "Leaf Modules": [[64, "leaf-modules"]], "Miscellanea": [[64, "miscellanea"]], "API Reference": [[64, "api-reference"], [52, "module-torch.export"], [2029, "module-torch.monitor"], [19, "api-reference"], [18, "api-reference"], [33, "module-torch.distributed.pipelining"], [12, "api-reference"], [2069, "api-reference"], [2068, "api-reference"], [2063, "api-reference"]], "FXE0015:fx-node-insert-type-promotion": [[88, "fxe0015-fx-node-insert-type-promotion"]], "torch.Tensor.abs_": [[92, "torch-tensor-abs"]], "python.data-structure": [[72, "python-data-structure"]], "dictionary": [[72, "dictionary"], [66, "dictionary"]], "fn_with_kwargs": [[72, "fn-with-kwargs"], [66, "fn-with-kwargs"]], "list_unpack": [[72, "list-unpack"], [66, "list-unpack"], [71, "list-unpack"]], "torch.mutation": [[79, "torch-mutation"]], "user_input_mutation": [[79, "user-input-mutation"], [66, "user-input-mutation"]], "torch.func": [[56, "torch-func"]], "What are composable function transforms?": [[56, "what-are-composable-function-transforms"]], "FXE0011:no-symbolic-function-for-call-function": [[84, "fxe0011-no-symbolic-function-for-call-function"]], "FullyShardedDataParallel": [[55, "module-torch.distributed.fsdp"]], "torch.cond": [[74, "torch-cond"], [989, "torch-cond"]], "cond_closed_over_variable": [[74, "cond-closed-over-variable"], [66, "cond-closed-over-variable"], [69, "cond-closed-over-variable"]], "ExportDB": [[66, "exportdb"]], "Tags": [[66, null]], "Supported": [[66, "supported"]], "assume_constant_result": [[66, "assume-constant-result"], [77, "assume-constant-result"]], "autograd_function": [[66, "autograd-function"]], "class_method": [[66, "class-method"]], "constrain_as_size_example": [[66, "constrain-as-size-example"], [77, "constrain-as-size-example"], [76, "constrain-as-size-example"]], "constrain_as_value_example": [[66, "constrain-as-value-example"], [77, "constrain-as-value-example"], [76, "constrain-as-value-example"]], "decorator": [[66, "decorator"]], "dynamic_shape_assert": [[66, "dynamic-shape-assert"], [67, "dynamic-shape-assert"]], "nested_function": [[66, "nested-function"], [69, "nested-function"]], "pytree_flatten": [[66, "pytree-flatten"]], "specialized_attribute": [[66, "specialized-attribute"]], "static_for_loop": [[66, "static-for-loop"], [71, "static-for-loop"]], "static_if": [[66, "static-if"], [71, "static-if"]], "tensor_setattr": [[66, "tensor-setattr"], [68, "tensor-setattr"]], "type_reflection_method": [[66, "type-reflection-method"], [68, "type-reflection-method"]], "Not Supported Yet": [[66, "not-supported-yet"]], "model_attr_mutation": [[66, "model-attr-mutation"], [73, "model-attr-mutation"]], "optional_input": [[66, "optional-input"], [73, "optional-input"]], "torch_sym_min": [[66, "torch-sym-min"], [80, "torch-sym-min"]], "torch.futures": [[63, "torch-futures"]], "Patching Batch Norm": [[58, "patching-batch-norm"]], "What\u2019s happening?": [[58, "what-s-happening"]], "How to fix": [[58, "how-to-fix"]], "Option 1: Change the BatchNorm": [[58, "option-1-change-the-batchnorm"]], "Option 2: torchvision parameter": [[58, "option-2-torchvision-parameter"]], "Option 3: functorch\u2019s patching": [[58, "option-3-functorch-s-patching"]], "Option 4: eval mode": [[58, "option-4-eval-mode"]], "torch.escape-hatch": [[77, "torch-escape-hatch"]], "torch.Tensor.acos": [[95, "torch-tensor-acos"]], "torch.dynamic-value": [[76, "torch-dynamic-value"]], "FXE0014:find-opschema-matched-symbolic-function": [[87, "fxe0014-find-opschema-matched-symbolic-function"]], "python.assert": [[67, "python-assert"]], "torch.fft": [[54, "torch-fft"]], "Fast Fourier Transforms": [[54, "fast-fourier-transforms"]], "Helper Functions": [[54, "helper-functions"]], "python.builtin": [[68, "python-builtin"]], "UX Limitations": [[60, "ux-limitations"]], "General limitations": [[60, "general-limitations"]], "torch.autograd APIs": [[60, "torch-autograd-apis"]], "vmap limitations": [[60, "vmap-limitations"]], "Mutation: Arbitrary mutation of Python data structures": [[60, "mutation-arbitrary-mutation-of-python-data-structures"]], "Mutation: in-place PyTorch Operations": [[60, "mutation-in-place-pytorch-operations"]], "Mutation: out= PyTorch Operations": [[60, "mutation-out-pytorch-operations"]], "Data-dependent Python control flow": [[60, "data-dependent-python-control-flow"]], "Data-dependent operations (.item())": [[60, "data-dependent-operations-item"]], "Dynamic shape operations (nonzero and friends)": [[60, "dynamic-shape-operations-nonzero-and-friends"]], "Randomness": [[60, "randomness"]], "torch.operator": [[80, "torch-operator"]], "torch.Tensor.add_": [[100, "torch-tensor-add"]], "torch.export IR Specification": [[53, "torch-export-ir-specification"]], "Assumptions": [[53, "assumptions"], [2077, "assumptions"]], "What is Export IR": [[53, "what-is-export-ir"]], "ExportedProgram": [[53, "exportedprogram"]], "Graph": [[53, "graph"]], "Node": [[53, "node"]], "call_function": [[53, "call-function"]], "Metadata": [[53, "metadata"]], "placeholder": [[53, "placeholder"]], "output": [[53, "output"]], "get_attr": [[53, "get-attr"]], "References": [[53, "references"], [2013, "references"]], "SymInt": [[53, "symint"]], "FakeTensor": [[53, "faketensor"]], "Pytree-able Types": [[53, "pytree-able-types"]], "torch.Tensor.addbmm": [[101, "torch-tensor-addbmm"]], "torch.Tensor.abs": [[91, "torch-tensor-abs"]], "torch.Tensor.acos_": [[96, "torch-tensor-acos"]], "torch.__future__": [[62, "module-torch.__future__"]], "python.closure": [[69, "python-closure"]], "torch.Tensor.absolute": [[93, "torch-tensor-absolute"]], "torch.map": [[78, "torch-map"]], "torch.fx.experimental": [[65, "torch-fx-experimental"]], "torch.fx.experimental.symbolic_shapes": [[65, "module-torch.fx.experimental.symbolic_shapes"]], "python.control-flow": [[71, "python-control-flow"]], "torch.export": [[52, "torch-export"]], "Existing frameworks": [[52, "existing-frameworks"]], "Exporting a PyTorch Model": [[52, "exporting-a-pytorch-model"]], "An Example": [[52, "an-example"]], "Non-Strict Export": [[52, "non-strict-export"]], "Expressing Dynamism": [[52, "expressing-dynamism"]], "Serialization": [[52, "serialization"], [11, "serialization"], [2089, "serialization"]], "Specializations": [[52, "specializations"]], "Input Tensor Shapes": [[52, "input-tensor-shapes"]], "Python Primitives": [[52, "python-primitives"]], "Python Containers": [[52, "python-containers"]], "Limitations of torch.export": [[52, "limitations-of-torch-export"]], "Data/Shape-Dependent Control Flow": [[52, "data-shape-dependent-control-flow"]], "Missing Fake/Meta/Abstract Kernels for Operators": [[52, "missing-fake-meta-abstract-kernels-for-operators"]], "Additional Links for Export Users": [[52, null]], "FXE0012:unsupported-fx-node-analysis": [[85, "fxe0012-unsupported-fx-node-analysis"]], "torch.Tensor.acosh": [[97, "torch-tensor-acosh"]], "torch.func API Reference": [[57, "module-torch.func"]], "Function Transforms": [[57, "function-transforms"]], "Utilities for working with torch.nn.Modules": [[57, "utilities-for-working-with-torch-nn-modules"]], "python.object-model": [[73, "python-object-model"]], "torch.Tensor.add": [[99, "torch-tensor-add"]], "RAdam": [[1793, "radam"]], "torch.optim.Optimizer.state_dict": [[1790, "torch-optim-optimizer-state-dict"]], "ExponentialLR": [[1803, "exponentiallr"]], "SequentialLR": [[1811, "sequentiallr"]], "torch.promote_types": [[1825, "torch-promote-types"]], "SobolEngine": [[1833, "sobolengine"]], "torch.positive": [[1822, "torch-positive"]], "torch.quantize_per_tensor": [[1829, "torch-quantize-per-tensor"]], "torch.permute": [[1817, "torch-permute"]], "torch.quantized_max_pool2d": [[1832, "torch-quantized-max-pool2d"]], "torch.pow": [[1823, "torch-pow"]], "SparseAdam": [[1797, "sparseadam"]], "Adamax": [[1785, "adamax"]], "torch.outer": [[1815, "torch-outer"]], "torch.qr": [[1826, "torch-qr"]], "torch.quantize_per_channel": [[1828, "torch-quantize-per-channel"]], "NAdam": [[1787, "nadam"]], "StepLR": [[1812, "steplr"]], "torch.quantized_max_pool1d": [[1831, "torch-quantized-max-pool1d"]], "torch.polar": [[1820, "torch-polar"]], "SGD": [[1796, "sgd"]], "torch.ormqr": [[1814, "torch-ormqr"]], "torch.pca_lowrank": [[1816, "torch-pca-lowrank"]], "torch.rad2deg": [[1834, "torch-rad2deg"]], "LinearLR": [[1805, "linearlr"]], "Rprop": [[1795, "rprop"]], "RMSprop": [[1794, "rmsprop"]], "LambdaLR": [[1804, "lambdalr"]], "ReduceLROnPlateau": [[1810, "reducelronplateau"]], "torch.quantile": [[1827, "torch-quantile"]], "ConstantLR": [[1799, "constantlr"]], "torch.orgqr": [[1813, "torch-orgqr"]], "torch.rand": [[1835, "torch-rand"]], "PolynomialLR": [[1809, "polynomiallr"]], "torch.pinverse": [[1818, "torch-pinverse"]], "MultiplicativeLR": [[1807, "multiplicativelr"]], "CosineAnnealingWarmRestarts": [[1801, "cosineannealingwarmrestarts"]], "torch.poisson": [[1819, "torch-poisson"]], "torch.optim.Optimizer.step": [[1791, "torch-optim-optimizer-step"]], "torch.optim.Optimizer.zero_grad": [[1792, "torch-optim-optimizer-zero-grad"]], "CyclicLR": [[1802, "cycliclr"]], "CosineAnnealingLR": [[1800, "cosineannealinglr"]], "torch.optim.Optimizer.load_state_dict": [[1789, "torch-optim-optimizer-load-state-dict"]], "ChainedScheduler": [[1798, "chainedscheduler"]], "LBFGS": [[1786, "lbfgs"]], "torch.prod": [[1824, "torch-prod"]], "torch.optim.Optimizer.add_param_group": [[1788, "torch-optim-optimizer-add-param-group"]], "MultiStepLR": [[1806, "multisteplr"]], "torch.polygamma": [[1821, "torch-polygamma"]], "torch.quantized_batch_norm": [[1830, "torch-quantized-batch-norm"]], "OneCycleLR": [[1808, "onecyclelr"]], "torch.nn.utils.prune.identity": [[1748, "torch-nn-utils-prune-identity"]], "torch.nn.utils.prune.custom_from_mask": [[1746, "torch-nn-utils-prune-custom-from-mask"]], "Adagrad": [[1782, "adagrad"]], "torch.ones": [[1775, "torch-ones"]], "L1Unstructured": [[1741, "l1unstructured"]], "torch.nn.utils.stateless.functional_call": [[1766, "torch-nn-utils-stateless-functional-call"]], "torch.nn.utils.rnn.pack_sequence": [[1759, "torch-nn-utils-rnn-pack-sequence"]], "torch.nn.utils.weight_norm": [[1768, "torch-nn-utils-weight-norm"]], "Adam": [[1783, "adam"]], "AdamW": [[1784, "adamw"]], "LnStructured": [[1742, "lnstructured"]], "torch.nn.utils.rnn.unpad_sequence": [[1763, "torch-nn-utils-rnn-unpad-sequence"]], "torch.not_equal": [[1773, "torch-not-equal"]], "torch.nn.utils.prune.l1_unstructured": [[1750, "torch-nn-utils-prune-l1-unstructured"]], "torch.normal": [[1772, "torch-normal"]], "torch.nn.utils.vector_to_parameters": [[1767, "torch-nn-utils-vector-to-parameters"]], "torch.ones_like": [[1776, "torch-ones-like"]], "torch.nn.utils.prune.random_structured": [[1752, "torch-nn-utils-prune-random-structured"]], "torch.nn.utils.rnn.unpack_sequence": [[1762, "torch-nn-utils-rnn-unpack-sequence"]], "VerificationOptions": [[1779, "verificationoptions"]], "Identity": [[1740, "identity"], [1487, "identity"]], "torch.nn.utils.parametrize.is_parametrized": [[1735, "torch-nn-utils-parametrize-is-parametrized"]], "torch.nn.utils.prune.ln_structured": [[1751, "torch-nn-utils-prune-ln-structured"]], "RandomUnstructured": [[1745, "randomunstructured"]], "torch.nn.utils.prune.global_unstructured": [[1747, "torch-nn-utils-prune-global-unstructured"]], "torch.nn.utils.parametrize.cached": [[1734, "torch-nn-utils-parametrize-cached"]], "PruningContainer": [[1743, "pruningcontainer"]], "JitScalarType": [[1777, "jitscalartype"]], "BasePruningMethod": [[1738, "basepruningmethod"]], "torch.nn.utils.prune.remove": [[1754, "torch-nn-utils-prune-remove"]], "torch.nn.utils.parametrize.register_parametrization": [[1736, "torch-nn-utils-parametrize-register-parametrization"]], "torch.nn.utils.rnn.pad_sequence": [[1761, "torch-nn-utils-rnn-pad-sequence"]], "torch.nn.utils.rnn.pad_packed_sequence": [[1760, "torch-nn-utils-rnn-pad-packed-sequence"]], "GraphInfo": [[1778, "graphinfo"]], "torch.nn.utils.prune.random_unstructured": [[1753, "torch-nn-utils-prune-random-unstructured"]], "RandomStructured": [[1744, "randomstructured"]], "CustomFromMask": [[1739, "customfrommask"]], "PackedSequence": [[1757, "packedsequence"]], "torch.nn.utils.skip_init": [[1764, "torch-nn-utils-skip-init"]], "torch.nn.utils.parametrize.remove_parametrizations": [[1737, "torch-nn-utils-parametrize-remove-parametrizations"]], "Adadelta": [[1781, "adadelta"]], "torch.nn.utils.spectral_norm": [[1765, "torch-nn-utils-spectral-norm"]], "torch.nn.utils.prune.is_pruned": [[1749, "torch-nn-utils-prune-is-pruned"]], "torch.numel": [[1774, "torch-numel"]], "torch.nonzero": [[1770, "torch-nonzero"]], "torch.norm": [[1771, "torch-norm"]], "ASGD": [[1780, "asgd"]], "torch.nn.utils.remove_spectral_norm": [[1755, "torch-nn-utils-remove-spectral-norm"]], "torch.nn.utils.rnn.pack_padded_sequence": [[1758, "torch-nn-utils-rnn-pack-padded-sequence"]], "torch.nn.utils.remove_weight_norm": [[1756, "torch-nn-utils-remove-weight-norm"]], "no_grad": [[1769, "no-grad"]], "torch.nn.functional.pixel_shuffle": [[1674, "torch-nn-functional-pixel-shuffle"]], "torch.nn.functional.lp_pool1d": [[1653, "torch-nn-functional-lp-pool1d"]], "torch.nn.functional.local_response_norm": [[1650, "torch-nn-functional-local-response-norm"]], "torch.nn.functional.hardsigmoid": [[1636, "torch-nn-functional-hardsigmoid"]], "torch.nn.functional.one_hot": [[1670, "torch-nn-functional-one-hot"]], "torch.nn.functional.multilabel_margin_loss": [[1666, "torch-nn-functional-multilabel-margin-loss"]], "torch.nn.functional.max_pool1d": [[1657, "torch-nn-functional-max-pool1d"]], "torch.nn.functional.nll_loss": [[1668, "torch-nn-functional-nll-loss"]], "torch.nn.functional.hardtanh": [[1638, "torch-nn-functional-hardtanh"]], "torch.nn.functional.mse_loss": [[1664, "torch-nn-functional-mse-loss"]], "torch.nn.functional.rms_norm": [[1681, "torch-nn-functional-rms-norm"]], "torch.nn.functional.hinge_embedding_loss": [[1640, "torch-nn-functional-hinge-embedding-loss"]], "torch.nn.functional.gumbel_softmax": [[1634, "torch-nn-functional-gumbel-softmax"]], "torch.nn.functional.instance_norm": [[1642, "torch-nn-functional-instance-norm"]], "torch.nn.functional.group_norm": [[1633, "torch-nn-functional-group-norm"]], "torch.nn.functional.max_unpool1d": [[1660, "torch-nn-functional-max-unpool1d"]], "torch.nn.functional.lp_pool3d": [[1655, "torch-nn-functional-lp-pool3d"]], "torch.nn.functional.multi_margin_loss": [[1665, "torch-nn-functional-multi-margin-loss"]], "torch.nn.functional.leaky_relu": [[1647, "torch-nn-functional-leaky-relu"]], "torch.nn.functional.poisson_nll_loss": [[1676, "torch-nn-functional-poisson-nll-loss"]], "torch.nn.functional.kl_div": [[1644, "torch-nn-functional-kl-div"]], "torch.nn.functional.margin_ranking_loss": [[1656, "torch-nn-functional-margin-ranking-loss"]], "torch.nn.functional.max_pool3d": [[1659, "torch-nn-functional-max-pool3d"]], "torch.nn.functional.pdist": [[1673, "torch-nn-functional-pdist"]], "torch.nn.functional.relu": [[1678, "torch-nn-functional-relu"]], "torch.nn.functional.huber_loss": [[1641, "torch-nn-functional-huber-loss"]], "torch.nn.functional.relu6": [[1679, "torch-nn-functional-relu6"]], "torch.nn.functional.relu_": [[1680, "torch-nn-functional-relu"]], "torch.nn.functional.l1_loss": [[1645, "torch-nn-functional-l1-loss"]], "torch.nn.functional.prelu": [[1677, "torch-nn-functional-prelu"]], "torch.nn.functional.max_pool2d": [[1658, "torch-nn-functional-max-pool2d"]], "torch.nn.functional.hardtanh_": [[1639, "torch-nn-functional-hardtanh"]], "torch.nn.functional.linear": [[1649, "torch-nn-functional-linear"]], "torch.nn.functional.pairwise_distance": [[1672, "torch-nn-functional-pairwise-distance"]], "torch.nn.functional.normalize": [[1669, "torch-nn-functional-normalize"]], "torch.nn.functional.layer_norm": [[1646, "torch-nn-functional-layer-norm"]], "torch.nn.functional.log_softmax": [[1651, "torch-nn-functional-log-softmax"]], "torch.nn.functional.hardswish": [[1637, "torch-nn-functional-hardswish"]], "torch.nn.functional.grid_sample": [[1632, "torch-nn-functional-grid-sample"]], "torch.nn.functional.pad": [[1671, "torch-nn-functional-pad"]], "torch.nn.functional.multilabel_soft_margin_loss": [[1667, "torch-nn-functional-multilabel-soft-margin-loss"]], "torch.nn.functional.logsigmoid": [[1652, "torch-nn-functional-logsigmoid"]], "torch.nn.functional.interpolate": [[1643, "torch-nn-functional-interpolate"]], "torch.nn.functional.leaky_relu_": [[1648, "torch-nn-functional-leaky-relu"]], "torch.nn.functional.max_unpool2d": [[1661, "torch-nn-functional-max-unpool2d"]], "torch.nn.functional.max_unpool3d": [[1662, "torch-nn-functional-max-unpool3d"]], "torch.nn.functional.hardshrink": [[1635, "torch-nn-functional-hardshrink"]], "torch.nn.functional.mish": [[1663, "torch-nn-functional-mish"]], "torch.nn.functional.pixel_unshuffle": [[1675, "torch-nn-functional-pixel-unshuffle"]], "torch.nn.functional.rrelu": [[1682, "torch-nn-functional-rrelu"]], "torch.nn.functional.lp_pool2d": [[1654, "torch-nn-functional-lp-pool2d"]], "torch.nn.functional.avg_pool1d": [[1599, "torch-nn-functional-avg-pool1d"]], "torch.nn.functional.conv_transpose2d": [[1611, "torch-nn-functional-conv-transpose2d"]], "torch.nn.functional.avg_pool2d": [[1600, "torch-nn-functional-avg-pool2d"]], "torch.nn.functional.conv_transpose1d": [[1610, "torch-nn-functional-conv-transpose1d"]], "torch.nn.functional.gaussian_nll_loss": [[1629, "torch-nn-functional-gaussian-nll-loss"]], "torch.nn.functional.adaptive_avg_pool1d": [[1591, "torch-nn-functional-adaptive-avg-pool1d"]], "torch.nn.functional.conv_transpose3d": [[1612, "torch-nn-functional-conv-transpose3d"]], "torch.nn.functional.conv1d": [[1607, "torch-nn-functional-conv1d"]], "torch.nn.functional.affine_grid": [[1597, "torch-nn-functional-affine-grid"]], "torch.nn.functional.alpha_dropout": [[1598, "torch-nn-functional-alpha-dropout"]], "torch.nn.functional.cosine_embedding_loss": [[1613, "torch-nn-functional-cosine-embedding-loss"]], "torch.nn.functional.gelu": [[1630, "torch-nn-functional-gelu"]], "torch.nn.functional.dropout2d": [[1619, "torch-nn-functional-dropout2d"]], "torch.nn.functional.elu_": [[1622, "torch-nn-functional-elu"]], "torch.nn.functional.cosine_similarity": [[1614, "torch-nn-functional-cosine-similarity"]], "torch.nn.functional.binary_cross_entropy_with_logits": [[1605, "torch-nn-functional-binary-cross-entropy-with-logits"]], "torch.nn.functional.dropout3d": [[1620, "torch-nn-functional-dropout3d"]], "torch.nn.attention.bias.CausalBias": [[1586, "torch-nn-attention-bias-causalbias"]], "ZeroPad2d": [[1583, "zeropad2d"]], "torch.nn.functional.embedding_bag": [[1624, "torch-nn-functional-embedding-bag"]], "torch.nn.functional.fractional_max_pool2d": [[1627, "torch-nn-functional-fractional-max-pool2d"]], "torch.nn.functional.embedding": [[1623, "torch-nn-functional-embedding"]], "ZeroPad1d": [[1582, "zeropad1d"]], "torch.nn.functional.avg_pool3d": [[1601, "torch-nn-functional-avg-pool3d"]], "torch.nn.functional.glu": [[1631, "torch-nn-functional-glu"]], "CausalVariant": [[1587, "causalvariant"]], "torch.nn.attention.bias.causal_lower_right": [[1588, "torch-nn-attention-bias-causal-lower-right"]], "torch.nn.functional.adaptive_avg_pool3d": [[1593, "torch-nn-functional-adaptive-avg-pool3d"]], "ZeroPad3d": [[1584, "zeropad3d"]], "torch.nn.functional.cross_entropy": [[1615, "torch-nn-functional-cross-entropy"]], "torch.nn.functional.conv2d": [[1608, "torch-nn-functional-conv2d"]], "torch.nn.attention.bias.causal_upper_left": [[1589, "torch-nn-attention-bias-causal-upper-left"]], "torch.nn.functional.dropout": [[1617, "torch-nn-functional-dropout"]], "torch.nn.functional.adaptive_avg_pool2d": [[1592, "torch-nn-functional-adaptive-avg-pool2d"]], "torch.nn.functional.ctc_loss": [[1616, "torch-nn-functional-ctc-loss"]], "torch.nn.functional.celu": [[1606, "torch-nn-functional-celu"]], "torch.nn.functional.elu": [[1621, "torch-nn-functional-elu"]], "torch.nn.functional.fractional_max_pool3d": [[1628, "torch-nn-functional-fractional-max-pool3d"]], "torch.nn.functional.binary_cross_entropy": [[1604, "torch-nn-functional-binary-cross-entropy"]], "torch.nn.functional.feature_alpha_dropout": [[1625, "torch-nn-functional-feature-alpha-dropout"]], "torch.nn.functional.batch_norm": [[1602, "torch-nn-functional-batch-norm"]], "torch.nn.functional.adaptive_max_pool2d": [[1595, "torch-nn-functional-adaptive-max-pool2d"]], "torch.nn.functional.adaptive_max_pool3d": [[1596, "torch-nn-functional-adaptive-max-pool3d"]], "torch.nn.functional.bilinear": [[1603, "torch-nn-functional-bilinear"]], "torch.nn.attention.sdpa_kernel": [[1590, "torch-nn-attention-sdpa-kernel"]], "UpsamplingNearest2d": [[1581, "upsamplingnearest2d"]], "SDPBackend": [[1585, "sdpbackend"]], "torch.nn.functional.conv3d": [[1609, "torch-nn-functional-conv3d"]], "torch.nn.functional.fold": [[1626, "torch-nn-functional-fold"]], "torch.nn.functional.dropout1d": [[1618, "torch-nn-functional-dropout1d"]], "torch.nn.functional.adaptive_max_pool1d": [[1594, "torch-nn-functional-adaptive-max-pool1d"]], "torch.msort": [[1395, "torch-msort"]], "torch.mtia.set_stream": [[1408, "torch-mtia-set-stream"]], "torch.nan_to_num": [[1416, "torch-nan-to-num"]], "torch.mtia.is_initialized": [[1407, "torch-mtia-is-initialized"]], "torch.ne": [[1423, "torch-ne"]], "torch.nanmean": [[1417, "torch-nanmean"]], "torch.mtia.current_device": [[1400, "torch-mtia-current-device"]], "torch.multinomial": [[1412, "torch-multinomial"]], "AdaptiveAvgPool1d": [[1427, "adaptiveavgpool1d"]], "torch.nanquantile": [[1419, "torch-nanquantile"]], "torch.mps.get_rng_state": [[1386, "torch-mps-get-rng-state"]], "torch.mtia.is_available": [[1406, "torch-mtia-is-available"]], "torch.mtia.stream": [[1409, "torch-mtia-stream"]], "torch.mtia.synchronize": [[1410, "torch-mtia-synchronize"]], "torch.narrow": [[1421, "torch-narrow"]], "torch.nanmedian": [[1418, "torch-nanmedian"]], "torch.moveaxis": [[1379, "torch-moveaxis"]], "torch.mps.seed": [[1391, "torch-mps-seed"]], "torch.mps.driver_allocated_memory": [[1383, "torch-mps-driver-allocated-memory"]], "torch.mps.current_allocated_memory": [[1381, "torch-mps-current-allocated-memory"]], "torch.mtia.current_stream": [[1401, "torch-mtia-current-stream"]], "torch.mul": [[1411, "torch-mul"]], "torch.mps.profiler.start": [[1389, "torch-mps-profiler-start"]], "torch.multiply": [[1413, "torch-multiply"]], "torch.mm": [[1377, "torch-mm"]], "torch.nansum": [[1420, "torch-nansum"]], "torch.mode": [[1378, "torch-mode"]], "torch.mps.device_count": [[1382, "torch-mps-device-count"]], "torch.mps.profiler.stop": [[1390, "torch-mps-profiler-stop"]], "torch.mtia.default_stream": [[1402, "torch-mtia-default-stream"]], "torch.mtia.device_count": [[1404, "torch-mtia-device-count"]], "torch.mps.set_per_process_memory_fraction": [[1392, "torch-mps-set-per-process-memory-fraction"]], "torch.mps.profiler.profile": [[1388, "torch-mps-profiler-profile"]], "torch.mps.manual_seed": [[1387, "torch-mps-manual-seed"]], "torch.narrow_copy": [[1422, "torch-narrow-copy"]], "torch.nextafter": [[1426, "torch-nextafter"]], "torch.mv": [[1414, "torch-mv"]], "torch.mvlgamma": [[1415, "torch-mvlgamma"]], "torch.mps.synchronize": [[1394, "torch-mps-synchronize"]], "torch.mtia.DeferredMtiaCallError": [[1396, "torch-mtia-deferredmtiacallerror"]], "torch.mps.set_rng_state": [[1393, "torch-mps-set-rng-state"]], "torch.negative": [[1425, "torch-negative"]], "torch.neg": [[1424, "torch-neg"]], "torch.movedim": [[1380, "torch-movedim"]], "torch.mtia.init": [[1405, "torch-mtia-init"]], "torch.mps.empty_cache": [[1384, "torch-mps-empty-cache"]], "PixelShuffle": [[1538, "pixelshuffle"]], "Tanhshrink": [[1568, "tanhshrink"]], "NLLLoss": [[1533, "nllloss"]], "RNNBase": [[1543, "rnnbase"]], "ReLU": [[1546, "relu"]], "SELU": [[1554, "selu"]], "RNN": [[1542, "rnn"]], "ParameterList": [[1537, "parameterlist"]], "ReflectionPad2d": [[1549, "reflectionpad2d"]], "SiLU": [[1556, "silu"]], "PixelUnshuffle": [[1539, "pixelunshuffle"]], "MultiMarginLoss": [[1531, "multimarginloss"]], "Softmax": [[1560, "softmax"]], "ReplicationPad1d": [[1551, "replicationpad1d"]], "ReplicationPad2d": [[1552, "replicationpad2d"]], "Softshrink": [[1564, "softshrink"]], "TransformerEncoder": [[1573, "transformerencoder"]], "MultiheadAttention": [[1532, "multiheadattention"], [737, "multiheadattention"]], "TransformerDecoderLayer": [[1572, "transformerdecoderlayer"]], "PReLU": [[1534, "prelu"]], "Softplus": [[1563, "softplus"]], "Softsign": [[1565, "softsign"]], "Unflatten": [[1577, "unflatten"]], "Unfold": [[1578, "unfold"]], "PoissonNLLLoss": [[1540, "poissonnllloss"]], "UpsamplingBilinear2d": [[1580, "upsamplingbilinear2d"]], "ReflectionPad3d": [[1550, "reflectionpad3d"]], "SyncBatchNorm": [[1566, "syncbatchnorm"]], "Softmax2d": [[1561, "softmax2d"]], "Sequential": [[1555, "sequential"]], "TripletMarginLoss": [[1575, "tripletmarginloss"]], "Tanh": [[1567, "tanh"]], "Softmin": [[1562, "softmin"]], "Sigmoid": [[1557, "sigmoid"], [761, "sigmoid"]], "PairwiseDistance": [[1535, "pairwisedistance"]], "SmoothL1Loss": [[1558, "smoothl1loss"]], "MultiLabelSoftMarginLoss": [[1530, "multilabelsoftmarginloss"]], "SoftMarginLoss": [[1559, "softmarginloss"]], "RNNCell": [[1544, "rnncell"], [767, "rnncell"]], "ReflectionPad1d": [[1548, "reflectionpad1d"]], "TripletMarginWithDistanceLoss": [[1576, "tripletmarginwithdistanceloss"]], "ReplicationPad3d": [[1553, "replicationpad3d"]], "TransformerEncoderLayer": [[1574, "transformerencoderlayer"]], "RReLU": [[1545, "rrelu"]], "ParameterDict": [[1536, "parameterdict"]], "ReLU6": [[1547, "relu6"], [760, "relu6"]], "Threshold": [[1569, "threshold"]], "Upsample": [[1579, "upsample"]], "TransformerDecoder": [[1571, "transformerdecoder"]], "torch.mps": [[2030, "module-torch.mps"]], "MPS Profiler": [[2030, "mps-profiler"]], "MPS Event": [[2030, "mps-event"]], "torch.nn": [[2036, "module-torch.nn"], [2036, "id1"]], "Containers": [[2036, "containers"]], "Convolution Layers": [[2036, "convolution-layers"]], "Pooling layers": [[2036, "pooling-layers"]], "Padding Layers": [[2036, "padding-layers"]], "Non-linear Activations (weighted sum, nonlinearity)": [[2036, "non-linear-activations-weighted-sum-nonlinearity"]], "Non-linear Activations (other)": [[2036, "non-linear-activations-other"]], "Normalization Layers": [[2036, "normalization-layers"]], "Recurrent Layers": [[2036, "recurrent-layers"]], "Transformer Layers": [[2036, "transformer-layers"]], "Linear Layers": [[2036, "linear-layers"]], "Dropout Layers": [[2036, "dropout-layers"]], "Sparse Layers": [[2036, "sparse-layers"]], "Distance Functions": [[2036, "distance-functions"]], "Loss Functions": [[2036, "loss-functions"]], "Vision Layers": [[2036, "vision-layers"]], "Shuffle Layers": [[2036, "shuffle-layers"]], "DataParallel Layers (multi-GPU, distributed)": [[2036, "module-torch.nn.parallel"]], "Utilities": [[2036, "module-torch.nn.utils"], [2089, "utilities"]], "Quantized Functions": [[2036, "quantized-functions"]], "Lazy Modules Initialization": [[2036, "lazy-modules-initialization"]], "Aliases": [[2036, "aliases"]], "torch.xpu.init": [[1995, "torch-xpu-init"]], "torch.xpu.seed": [[2001, "torch-xpu-seed"]], "torch.xpu.get_device_capability": [[1990, "torch-xpu-get-device-capability"]], "torch.xpu.seed_all": [[2002, "torch-xpu-seed-all"]], "torch.xpu.synchronize": [[2008, "torch-xpu-synchronize"]], "Named Tensors": [[2034, "named-tensors"]], "Creating named tensors": [[2034, "creating-named-tensors"]], "Named dimensions": [[2034, "named-dimensions"]], "Name propagation semantics": [[2034, "name-propagation-semantics"]], "match semantics": [[2034, "match-semantics"]], "Basic name inference rules": [[2034, "basic-name-inference-rules"]], "Explicit alignment by names": [[2034, "explicit-alignment-by-names"]], "Manipulating dimensions": [[2034, "manipulating-dimensions"]], "Autograd support": [[2034, "autograd-support"]], "Currently supported operations and subsystems": [[2034, "currently-supported-operations-and-subsystems"]], "Operators": [[2034, "operators"]], "Subsystems": [[2034, "subsystems"]], "Named tensor API reference": [[2034, "named-tensor-api-reference"]], "torch.hub": [[2011, "torch-hub"]], "Publishing models": [[2011, "publishing-models"]], "How to implement an entrypoint?": [[2011, "how-to-implement-an-entrypoint"]], "Important Notice": [[2011, "important-notice"]], "Loading models from Hub": [[2011, "loading-models-from-hub"]], "Running a loaded model:": [[2011, "running-a-loaded-model"]], "Where are my downloaded models saved?": [[2011, "where-are-my-downloaded-models-saved"]], "Caching logic": [[2011, "caching-logic"]], "Known limitations:": [[2011, "known-limitations"]], "torch.masked": [[2023, "torch-masked"]], "What is a MaskedTensor?": [[2023, "what-is-a-maskedtensor"]], "Supported Operators": [[2023, "supported-operators"]], "Unary Operators": [[2023, "unary-operators"]], "Binary Operators": [[2023, "binary-operators"]], "Reductions": [[2023, "reductions"]], "View and select functions": [[2023, "view-and-select-functions"]], "JIT Utils - torch.utils.jit": [[2019, "module-torch.utils.jit"]], "torch._logging": [[2022, "torch-logging"]], "PyTorch documentation": [[2012, "pytorch-documentation"]], "Community": [[2012, null]], "Developer Notes": [[2012, null]], "Language Bindings": [[2012, null]], "Python API": [[2012, null], [2065, "module-torch.onnx"]], "Libraries": [[2012, null]], "Indices and tables": [[2012, "indices-and-tables"]], "TorchScript Unsupported PyTorch Constructs": [[2018, "torchscript-unsupported-pytorch-constructs"]], "Torch and Tensor Unsupported Attributes": [[2018, "torch-and-tensor-unsupported-attributes"]], "Unsupported Tensor Methods": [[2018, "unsupported-tensor-methods"]], "Unsupported Tensor Properties": [[2018, "unsupported-tensor-properties"]], "Functions Not Correctly Bound on Torch": [[2018, "functions-not-correctly-bound-on-torch"]], "Ops With Divergent Schemas Between Torch & Python": [[2018, "ops-with-divergent-schemas-between-torch-python"]], "PyTorch Unsupported Modules and Classes": [[2018, "pytorch-unsupported-modules-and-classes"]], "torch.xpu.get_rng_state": [[1993, "torch-xpu-get-rng-state"]], "Multiprocessing package - torch.multiprocessing": [[2032, "module-torch.multiprocessing"]], "Strategy management": [[2032, "strategy-management"]], "Sharing CUDA tensors": [[2032, "sharing-cuda-tensors"]], "Sharing strategies": [[2032, "sharing-strategies"]], "File descriptor - file_descriptor": [[2032, "file-descriptor-file-descriptor"]], "File system - file_system": [[2032, "file-system-file-system"]], "Spawning subprocesses": [[2032, "spawning-subprocesses"]], "TorchScript Language Reference": [[2015, "torchscript-language-reference"], [2016, "torchscript-language-reference"]], "Types": [[2015, "supported-type"], [2065, "types"]], "Unsupported Typing Constructs": [[2015, "unsupported-typing-constructs"], [2016, "unsupported-typing-constructs"]], "Default Types": [[2015, "default-types"]], "Optional Type Refinement": [[2015, "optional-type-refinement"]], "TorchScript Classes": [[2015, "id2"], [2013, "torchscript-classes"]], "TorchScript Enums": [[2015, "id4"]], "Named Tuples": [[2015, "named-tuples"]], "Iterables": [[2015, "iterables"]], "Expressions": [[2015, "expressions"], [2016, "expressions"]], "Literals": [[2015, "literals"], [2016, "literals"]], "List Construction": [[2015, "list-construction"]], "Tuple Construction": [[2015, "tuple-construction"]], "Dict Construction": [[2015, "dict-construction"]], "Variables": [[2015, "variables"], [2013, "variables"]], "Arithmetic Operators": [[2015, "arithmetic-operators"]], "Comparison Operators": [[2015, "comparison-operators"]], "Logical Operators": [[2015, "logical-operators"]], "Subscripts and Slicing": [[2015, "subscripts-and-slicing"]], "Function Calls": [[2015, "function-calls"]], "Method Calls": [[2015, "method-calls"]], "Ternary Expressions": [[2015, "ternary-expressions"]], "Casts": [[2015, "casts"]], "Accessing Module Parameters": [[2015, "accessing-module-parameters"]], "Statements": [[2015, "statements"]], "Simple Assignments": [[2015, "simple-assignments"]], "Pattern Matching Assignments": [[2015, "pattern-matching-assignments"]], "Print Statements": [[2015, "print-statements"]], "If Statements": [[2015, "if-statements"]], "While Loops": [[2015, "while-loops"]], "For loops with range": [[2015, "for-loops-with-range"]], "For loops over tuples": [[2015, "for-loops-over-tuples"]], "For loops over constant nn.ModuleList": [[2015, "for-loops-over-constant-nn-modulelist"]], "Break and Continue": [[2015, "break-and-continue"]], "Return": [[2015, "return"]], "Variable Resolution": [[2015, "variable-resolution"]], "Use of Python Values": [[2015, "use-of-python-values"]], "Attribute Lookup On Python Modules": [[2015, "attribute-lookup-on-python-modules"]], "Python-defined Constants": [[2015, "python-defined-constants"]], "Module Attributes": [[2015, "module-attributes"]], "torch.xpu.initial_seed": [[1996, "torch-xpu-initial-seed"]], "torch.xpu.set_rng_state": [[2004, "torch-xpu-set-rng-state"]], "Python Language Reference Coverage": [[2017, "python-language-reference-coverage"]], "torch.mtia": [[2031, "torch-mtia"]], "torch.xpu.is_initialized": [[1998, "torch-xpu-is-initialized"]], "torch.xpu.set_device": [[2003, "torch-xpu-set-device"]], "Meta device": [[2024, "meta-device"]], "Idioms for working with meta tensors": [[2024, "idioms-for-working-with-meta-tensors"]], "torch.utils.mobile_optimizer": [[2026, "torch-utils-mobile-optimizer"]], "torch.xpu.get_device_properties": [[1992, "torch-xpu-get-device-properties"]], "torch.xpu.manual_seed_all": [[2000, "torch-xpu-manual-seed-all"]], "torch.nn.functional": [[2039, "torch-nn-functional"]], "Convolution functions": [[2039, "convolution-functions"]], "Pooling functions": [[2039, "pooling-functions"]], "Attention Mechanisms": [[2039, "attention-mechanisms"]], "Non-linear activation functions": [[2039, "non-linear-activation-functions"]], "Linear functions": [[2039, "linear-functions"]], "Dropout functions": [[2039, "dropout-functions"]], "Sparse functions": [[2039, "sparse-functions"]], "Distance functions": [[2039, "distance-functions"]], "Loss functions": [[2039, "loss-functions"]], "Vision functions": [[2039, "vision-functions"]], "DataParallel functions (multi-GPU, distributed)": [[2039, "dataparallel-functions-multi-gpu-distributed"]], "data_parallel": [[2039, "data-parallel"]], "torch.xpu.set_stream": [[2006, "torch-xpu-set-stream"]], "torch.xpu.get_rng_state_all": [[1994, "torch-xpu-get-rng-state-all"]], "torch.linalg": [[2021, "torch-linalg"]], "Matrix Properties": [[2021, "matrix-properties"]], "Decompositions": [[2021, "decompositions"]], "Solvers": [[2021, "solvers"]], "Inverses": [[2021, "inverses"]], "Matrix Functions": [[2021, "matrix-functions"]], "Matrix Products": [[2021, "matrix-products"]], "Tensor Operations": [[2021, "tensor-operations"]], "Misc": [[2021, "misc"]], "Experimental Functions": [[2021, "experimental-functions"]], "torch.xpu.manual_seed": [[1999, "torch-xpu-manual-seed"]], "Miscellaneous Environment Variables": [[2025, "miscellaneous-environment-variables"]], "torch.xpu.get_device_name": [[1991, "torch-xpu-get-device-name"]], "TorchScript": [[2013, "torchscript"]], "Creating TorchScript Code": [[2013, "creating-torchscript-code"]], "Mixing Tracing and Scripting": [[2013, "mixing-tracing-and-scripting"]], "TorchScript Language": [[2013, "torchscript-language"]], "Built-in Functions and Modules": [[2013, "built-in-functions-and-modules"]], "PyTorch Functions and Modules": [[2013, "pytorch-functions-and-modules"]], "Python Functions and Modules": [[2013, "python-functions-and-modules"]], "Python Language Reference Comparison": [[2013, "python-language-reference-comparison"]], "Disable JIT for Debugging": [[2013, "disable-jit-for-debugging"]], "Inspecting Code": [[2013, "inspecting-code"]], "Interpreting Graphs": [[2013, "interpreting-graphs"]], "Tracer": [[2013, "tracer"]], "Tracing Edge Cases": [[2013, "tracing-edge-cases"]], "Automatic Trace Checking": [[2013, "automatic-trace-checking"]], "Tracer Warnings": [[2013, "tracer-warnings"]], "Known Issues": [[2013, "known-issues"]], "Appendix": [[2013, "appendix"]], "Migrating to PyTorch 1.2 Recursive Scripting API": [[2013, "migrating-to-pytorch-1-2-recursive-scripting-api"]], "Modules": [[2013, "modules"], [2055, "modules"]], "Attributes": [[2013, "attributes"]], "Constants": [[2013, "constants"]], "Fusion Backends": [[2013, "fusion-backends"]], "torch.xpu.empty_cache": [[1989, "torch-xpu-empty-cache"]], "torch.zeros_like": [[2010, "torch-zeros-like"]], "Terminology": [[2016, "terminology"]], "Type System": [[2016, "id1"]], "TorchScript Types": [[2016, "torchscript-types"]], "Meta Types": [[2016, "meta-types"]], "Any Type": [[2016, "any-type"]], "Operators Supported for Any Type": [[2016, "operators-supported-for-any-type"]], "Design Notes": [[2016, "design-notes"], [2075, "design-notes"]], "Primitive Types": [[2016, "primitive-types"]], "Structural Types": [[2016, "structural-types"]], "Nominal Types": [[2016, "nominal-types"]], "Built-in Class": [[2016, "built-in-class"]], "Special Note on torch.nn.ModuleList and torch.nn.ModuleDict": [[2016, "special-note-on-torch-nn-modulelist-and-torch-nn-moduledict"]], "Custom Class": [[2016, "custom-class"]], "Enum Type": [[2016, "enum-type"]], "TorchScript Module Class": [[2016, "torchscript-module-class"]], "Module Instance Class": [[2016, "module-instance-class"]], "Type Annotation": [[2016, "type-annotation"]], "When to Annotate Types": [[2016, "when-to-annotate-types"]], "Annotate Function Signature": [[2016, "annotate-function-signature"]], "Annotate Variables and Data Attributes": [[2016, "annotate-variables-and-data-attributes"]], "Local Variables": [[2016, "local-variables"]], "Instance Data Attributes": [[2016, "instance-data-attributes"]], "Type Annotation APIs": [[2016, "type-annotation-apis"]], "torch.jit.annotate(T, expr)": [[2016, "torch-jit-annotate-t-expr"]], "Type Annotation Appendix": [[2016, "type-annotation-appendix"]], "TorchScript Type System Definition": [[2016, "torchscript-type-system-definition"]], "Arithmetic Conversions": [[2016, "arithmetic-conversions"]], "Atoms": [[2016, "atoms"]], "Identifiers": [[2016, "identifiers"]], "Parenthesized Forms": [[2016, "parenthesized-forms"]], "List and Dictionary Displays": [[2016, "list-and-dictionary-displays"]], "Primaries": [[2016, "primaries"]], "Attribute References": [[2016, "attribute-references"]], "Subscriptions": [[2016, "subscriptions"]], "Slicings": [[2016, "slicings"]], "Calls": [[2016, "calls"]], "Power Operator": [[2016, "power-operator"]], "Unary and Arithmetic Bitwise Operations": [[2016, "unary-and-arithmetic-bitwise-operations"]], "Binary Arithmetic Operations": [[2016, "binary-arithmetic-operations"]], "Shifting Operations": [[2016, "shifting-operations"]], "Binary Bitwise Operations": [[2016, "binary-bitwise-operations"]], "Value Comparisons": [[2016, "value-comparisons"]], "Membership Test Operations": [[2016, "membership-test-operations"]], "Identity Comparisons": [[2016, "identity-comparisons"]], "Boolean Operations": [[2016, "boolean-operations"]], "Conditional Expressions": [[2016, "conditional-expressions"]], "Expression Lists": [[2016, "expression-lists"]], "Simple Statements": [[2016, "simple-statements"]], "Expression Statements": [[2016, "expression-statements"]], "Assignment Statements": [[2016, "assignment-statements"]], "Augmented Assignment Statements": [[2016, "augmented-assignment-statements"]], "Annotated Assignment Statements": [[2016, "annotated-assignment-statements"]], "The raise Statement": [[2016, "the-raise-statement"]], "The assert Statement": [[2016, "the-assert-statement"]], "The return Statement": [[2016, "the-return-statement"]], "The del Statement": [[2016, "the-del-statement"]], "The pass Statement": [[2016, "the-pass-statement"]], "The print Statement": [[2016, "the-print-statement"]], "The break Statement": [[2016, "the-break-statement"]], "The continue Statement:": [[2016, "the-continue-statement"]], "Compound Statements": [[2016, "compound-statements"]], "The if Statement": [[2016, "the-if-statement"]], "Basic if/else Statement": [[2016, "basic-if-else-statement"]], "Ternary if/else Statement": [[2016, "ternary-if-else-statement"]], "The while Statement": [[2016, "the-while-statement"]], "The for-in Statement": [[2016, "the-for-in-statement"]], "The with Statement": [[2016, "the-with-statement"]], "The tuple Statement": [[2016, "the-tuple-statement"]], "The getattr Statement": [[2016, "the-getattr-statement"]], "The hasattr Statement": [[2016, "the-hasattr-statement"]], "The zip Statement": [[2016, "the-zip-statement"]], "The enumerate Statement": [[2016, "the-enumerate-statement"]], "Python Values": [[2016, "python-values"]], "Resolution Rules": [[2016, "resolution-rules"]], "Python Built-in Functions Support": [[2016, "python-built-in-functions-support"]], "TorchScript Support for Python Built-in Functions": [[2016, "id5"]], "Python Built-in Values Support": [[2016, "python-built-in-values-support"]], "TorchScript Support for Python Built-in Values": [[2016, "id6"]], "torch.* APIs": [[2016, "torch-apis"]], "Remote Procedure Calls": [[2016, "remote-procedure-calls"]], "Asynchronous Execution": [[2016, "asynchronous-execution"]], "Type Annotations": [[2016, "type-annotations"]], "Meta Programming": [[2016, "meta-programming"]], "Type Refinement": [[2016, "type-refinement"]], "torch.nested": [[2035, "module-torch.nested"]], "Construction": [[2035, "construction"], [2080, "construction"]], "size": [[2035, "size"]], "unbind": [[2035, "unbind"]], "Nested tensor constructor and conversion functions": [[2035, "nested-tensor-constructor-and-conversion-functions"]], "Supported operations": [[2035, "supported-operations"], [2080, "supported-operations"]], "TorchScript Builtins": [[2014, "torchscript-builtins"]], "Supported Tensor Methods": [[2014, "supported-tensor-methods"]], "Supported PyTorch Functions": [[2014, "supported-pytorch-functions"]], "TorchScript Builtin Functions": [[2014, "torchscript-builtin-functions"]], "Python Built-in Functions": [[2014, "python-built-in-functions"]], "math Module": [[2014, "math-module"]], "torch.nn.attention.bias": [[2038, "module-torch.nn.attention.bias"]], "CausalBias": [[2038, "causalbias"]], "torch.xpu.is_available": [[1997, "torch-xpu-is-available"]], "torch.zeros": [[2009, "torch-zeros"]], "torch.utils.model_zoo": [[2027, "torch-utils-model-zoo"]], "torch.xpu.stream": [[2007, "torch-xpu-stream"]], "Named Tensors operator coverage": [[2033, "named-tensors-operator-coverage"]], "Supported Operations": [[2033, "id1"]], "Keeps input names": [[2033, "keeps-input-names"]], "Removes dimensions": [[2033, "removes-dimensions"]], "Unifies names from inputs": [[2033, "unifies-names-from-inputs"]], "Permutes dimensions": [[2033, "permutes-dimensions"]], "Contracts away dims": [[2033, "contracts-away-dims"]], "Factory functions": [[2033, "factory-functions"]], "out function and in-place variants": [[2033, "out-function-and-in-place-variants"]], "torch.xpu.set_rng_state_all": [[2005, "torch-xpu-set-rng-state-all"]], "torch.monitor": [[2029, "torch-monitor"]], "torch.nn.attention": [[2037, "module-torch.nn.attention"]], "Utils": [[2037, "utils"]], "Submodules": [[2037, "submodules"]], "torch.utils.module_tracker": [[2028, "module-torch.utils.module_tracker"]], "torch.library": [[2020, "module-torch.library"]], "Testing custom ops": [[2020, "testing-custom-ops"]], "Creating new custom ops in Python": [[2020, "creating-new-custom-ops-in-python"]], "Extending custom ops (created from Python or C++)": [[2020, "extending-custom-ops-created-from-python-or-c"]], "Low-level APIs": [[2020, "low-level-apis"]], "LPPool1d": [[1493, "lppool1d"]], "LazyConvTranspose2d": [[1506, "lazyconvtranspose2d"]], "MaxPool1d": [[1519, "maxpool1d"]], "MaxUnpool1d": [[1522, "maxunpool1d"]], "LazyLinear": [[1511, "lazylinear"]], "LazyConv3d": [[1504, "lazyconv3d"]], "MaxUnpool3d": [[1524, "maxunpool3d"]], "LPPool2d": [[1494, "lppool2d"]], "Hardtanh": [[1484, "hardtanh"]], "MSELoss": [[1517, "mseloss"]], "KLDivLoss": [[1491, "kldivloss"]], "LazyBatchNorm1d": [[1499, "lazybatchnorm1d"]], "MarginRankingLoss": [[1518, "marginrankingloss"]], "Module": [[1526, "module"]], "LogSigmoid": [[1515, "logsigmoid"]], "ModuleList": [[1528, "modulelist"]], "HuberLoss": [[1486, "huberloss"]], "Mish": [[1525, "mish"]], "LSTMCell": [[1497, "lstmcell"], [765, "lstmcell"]], "InstanceNorm2d": [[1489, "instancenorm2d"], [754, "instancenorm2d"]], "Hardsigmoid": [[1482, "hardsigmoid"]], "LazyBatchNorm3d": [[1501, "lazybatchnorm3d"]], "MaxPool3d": [[1521, "maxpool3d"]], "LPPool3d": [[1495, "lppool3d"]], "LazyConvTranspose3d": [[1507, "lazyconvtranspose3d"]], "LocalResponseNorm": [[1514, "localresponsenorm"]], "MaxUnpool2d": [[1523, "maxunpool2d"]], "HingeEmbeddingLoss": [[1485, "hingeembeddingloss"]], "LazyConvTranspose1d": [[1505, "lazyconvtranspose1d"]], "LSTM": [[1496, "lstm"], [736, "lstm"], [764, "lstm"]], "GaussianNLLLoss": [[1479, "gaussiannllloss"]], "LogSoftmax": [[1516, "logsoftmax"]], "LayerNorm": [[1498, "layernorm"], [756, "layernorm"]], "Linear": [[1513, "linear"], [766, "linear"], [758, "linear"], [734, "linear"], [735, "linear"]], "Hardswish": [[1483, "hardswish"], [752, "hardswish"]], "LeakyReLU": [[1512, "leakyrelu"], [757, "leakyrelu"]], "InstanceNorm3d": [[1490, "instancenorm3d"], [755, "instancenorm3d"]], "LazyInstanceNorm1d": [[1508, "lazyinstancenorm1d"]], "InstanceNorm1d": [[1488, "instancenorm1d"], [753, "instancenorm1d"]], "Hardshrink": [[1481, "hardshrink"]], "L1Loss": [[1492, "l1loss"]], "LazyBatchNorm2d": [[1500, "lazybatchnorm2d"]], "LazyConv2d": [[1503, "lazyconv2d"]], "ModuleDict": [[1527, "moduledict"]], "MaxPool2d": [[1520, "maxpool2d"]], "GroupNorm": [[1480, "groupnorm"], [751, "groupnorm"]], "MultiLabelMarginLoss": [[1529, "multilabelmarginloss"]], "LazyConv1d": [[1502, "lazyconv1d"]], "LazyInstanceNorm2d": [[1509, "lazyinstancenorm2d"]], "LazyInstanceNorm3d": [[1510, "lazyinstancenorm3d"]], "AvgPool3d": [[1437, "avgpool3d"]], "FeatureAlphaDropout": [[1470, "featurealphadropout"]], "BCELoss": [[1438, "bceloss"]], "BatchNorm3d": [[1442, "batchnorm3d"], [739, "batchnorm3d"]], "AdaptiveAvgPool2d": [[1428, "adaptiveavgpool2d"]], "Dropout3d": [[1466, "dropout3d"]], "CircularPad2d": [[1448, "circularpad2d"]], "ConvTranspose3d": [[1458, "convtranspose3d"], [745, "convtranspose3d"]], "EmbeddingBag": [[1469, "embeddingbag"], [748, "embeddingbag"]], "Embedding": [[1468, "embedding"], [747, "embedding"]], "AdaptiveMaxPool3d": [[1433, "adaptivemaxpool3d"]], "AdaptiveMaxPool1d": [[1431, "adaptivemaxpool1d"]], "CosineEmbeddingLoss": [[1459, "cosineembeddingloss"]], "AdaptiveMaxPool2d": [[1432, "adaptivemaxpool2d"]], "CosineSimilarity": [[1460, "cosinesimilarity"]], "AlphaDropout": [[1434, "alphadropout"]], "BatchNorm1d": [[1440, "batchnorm1d"]], "AvgPool2d": [[1436, "avgpool2d"]], "Bilinear": [[1443, "bilinear"]], "GELU": [[1475, "gelu"]], "BatchNorm2d": [[1441, "batchnorm2d"], [738, "batchnorm2d"]], "BCEWithLogitsLoss": [[1439, "bcewithlogitsloss"]], "Conv3d": [[1455, "conv3d"], [733, "conv3d"], [742, "conv3d"]], "ConvTranspose1d": [[1456, "convtranspose1d"], [743, "convtranspose1d"]], "DataParallel": [[1462, "dataparallel"]], "Conv2d": [[1454, "conv2d"], [741, "conv2d"], [732, "conv2d"]], "CrossEntropyLoss": [[1461, "crossentropyloss"]], "FractionalMaxPool2d": [[1473, "fractionalmaxpool2d"]], "AdaptiveLogSoftmaxWithLoss": [[1430, "adaptivelogsoftmaxwithloss"]], "Conv1d": [[1453, "conv1d"], [740, "conv1d"]], "ConvTranspose2d": [[1457, "convtranspose2d"], [744, "convtranspose2d"]], "Fold": [[1472, "fold"]], "CircularPad3d": [[1449, "circularpad3d"]], "ELU": [[1467, "elu"], [746, "elu"]], "FractionalMaxPool3d": [[1474, "fractionalmaxpool3d"]], "GRU": [[1477, "gru"], [762, "gru"]], "GLU": [[1476, "glu"]], "CircularPad1d": [[1447, "circularpad1d"]], "ConstantPad3d": [[1452, "constantpad3d"]], "Dropout": [[1463, "dropout"]], "ChannelShuffle": [[1446, "channelshuffle"]], "ConstantPad1d": [[1450, "constantpad1d"]], "GRUCell": [[1478, "grucell"], [763, "grucell"]], "AvgPool1d": [[1435, "avgpool1d"]], "ConstantPad2d": [[1451, "constantpad2d"]], "CELU": [[1444, "celu"]], "AdaptiveAvgPool3d": [[1429, "adaptiveavgpool3d"]], "CTCLoss": [[1445, "ctcloss"]], "Dropout2d": [[1465, "dropout2d"]], "Dropout1d": [[1464, "dropout1d"]], "Flatten": [[1471, "flatten"]], "torch.linalg.qr": [[1331, "torch-linalg-qr"]], "torch.minimum": [[1376, "torch-minimum"]], "torch.meshgrid": [[1374, "torch-meshgrid"]], "torch.log2": [[1349, "torch-log2"]], "torch.logit": [[1358, "torch-logit"]], "torch.linalg.tensorinv": [[1338, "torch-linalg-tensorinv"]], "torch.mean": [[1372, "torch-mean"]], "torch.median": [[1373, "torch-median"]], "torch.lu": [[1362, "torch-lu"]], "torch.linalg.solve_ex": [[1334, "torch-linalg-solve-ex"]], "torch.linalg.vecdot": [[1341, "torch-linalg-vecdot"]], "torch.linalg.multi_dot": [[1328, "torch-linalg-multi-dot"]], "torch.linalg.svdvals": [[1337, "torch-linalg-svdvals"]], "torch.linalg.solve_triangular": [[1335, "torch-linalg-solve-triangular"]], "torch.lobpcg": [[1345, "torch-lobpcg"]], "torch.linalg.slogdet": [[1332, "torch-linalg-slogdet"]], "torch.masked_select": [[1366, "torch-masked-select"]], "torch.linalg.norm": [[1329, "torch-linalg-norm"]], "torch.logical_or": [[1356, "torch-logical-or"]], "torch.matmul": [[1367, "torch-matmul"]], "torch.linalg.matrix_power": [[1326, "torch-linalg-matrix-power"]], "torch.linalg.pinv": [[1330, "torch-linalg-pinv"]], "torch.matrix_exp": [[1368, "torch-matrix-exp"]], "torch.linalg.solve": [[1333, "torch-linalg-solve"]], "torch.logcumsumexp": [[1352, "torch-logcumsumexp"]], "torch.log": [[1346, "torch-log"]], "torch.linalg.tensorsolve": [[1339, "torch-linalg-tensorsolve"]], "torch.lt": [[1361, "torch-lt"]], "torch.manual_seed": [[1365, "torch-manual-seed"]], "torch.lu_solve": [[1363, "torch-lu-solve"]], "torch.linalg.svd": [[1336, "torch-linalg-svd"]], "torch.logical_not": [[1355, "torch-logical-not"]], "torch.linspace": [[1343, "torch-linspace"]], "torch.max": [[1370, "torch-max"]], "torch.logical_xor": [[1357, "torch-logical-xor"]], "torch.log10": [[1347, "torch-log10"]], "torch.logical_and": [[1354, "torch-logical-and"]], "torch.logdet": [[1353, "torch-logdet"]], "torch.lu_unpack": [[1364, "torch-lu-unpack"]], "torch.linalg.vander": [[1340, "torch-linalg-vander"]], "torch.matrix_power": [[1369, "torch-matrix-power"]], "torch.maximum": [[1371, "torch-maximum"]], "torch.log1p": [[1348, "torch-log1p"]], "torch.linalg.matrix_rank": [[1327, "torch-linalg-matrix-rank"]], "torch.logspace": [[1359, "torch-logspace"]], "torch.logsumexp": [[1360, "torch-logsumexp"]], "torch.linalg.vector_norm": [[1342, "torch-linalg-vector-norm"]], "torch.load": [[1344, "torch-load"]], "torch.logaddexp": [[1350, "torch-logaddexp"]], "torch.logaddexp2": [[1351, "torch-logaddexp2"]], "torch.min": [[1375, "torch-min"]], "torch.linalg.ldl_solve": [[1317, "torch-linalg-ldl-solve"]], "torch.jit.isinstance": [[1279, "torch-jit-isinstance"]], "torch.kaiser_window": [[1292, "torch-kaiser-window"]], "torch.linalg.lu": [[1319, "torch-linalg-lu"]], "torch.linalg.cholesky": [[1302, "torch-linalg-cholesky"]], "torch.jit.set_fusion_strategy": [[1286, "torch-jit-set-fusion-strategy"]], "torch.jit.unused": [[1290, "torch-jit-unused"]], "torch.kthvalue": [[1294, "torch-kthvalue"]], "torch.linalg.ldl_factor": [[1315, "torch-linalg-ldl-factor"]], "torch.linalg.householder_product": [[1312, "torch-linalg-householder-product"]], "torch.linalg.matrix_norm": [[1325, "torch-linalg-matrix-norm"]], "torch.jit.ignore": [[1277, "torch-jit-ignore"]], "torch.lcm": [[1295, "torch-lcm"]], "torch.jit.save": [[1283, "torch-jit-save"]], "torch.jit.interface": [[1278, "torch-jit-interface"]], "torch.jit.script_if_tracing": [[1285, "torch-jit-script-if-tracing"]], "torch.jit.trace_module": [[1289, "torch-jit-trace-module"]], "torch.linalg.det": [[1306, "torch-linalg-det"]], "torch.kron": [[1293, "torch-kron"]], "torch.linalg.eig": [[1308, "torch-linalg-eig"]], "torch.linalg.lu_factor_ex": [[1321, "torch-linalg-lu-factor-ex"]], "torch.le": [[1297, "torch-le"]], "torch.jit.optimize_for_inference": [[1282, "torch-jit-optimize-for-inference"]], "torch.linalg.inv": [[1313, "torch-linalg-inv"]], "torch.jit.wait": [[1291, "torch-jit-wait"]], "torch.ldexp": [[1296, "torch-ldexp"]], "torch.less_equal": [[1300, "torch-less-equal"]], "torch.linalg.matmul": [[1323, "torch-linalg-matmul"]], "torch.linalg.inv_ex": [[1314, "torch-linalg-inv-ex"]], "torch.linalg.cond": [[1304, "torch-linalg-cond"]], "torch.linalg.ldl_factor_ex": [[1316, "torch-linalg-ldl-factor-ex"]], "torch.linalg.lstsq": [[1318, "torch-linalg-lstsq"]], "torch.jit.load": [[1280, "torch-jit-load"]], "torch.linalg.lu_solve": [[1322, "torch-linalg-lu-solve"]], "torch.linalg.lu_factor": [[1320, "torch-linalg-lu-factor"]], "torch.jit.freeze": [[1276, "torch-jit-freeze"]], "torch.linalg.cholesky_ex": [[1303, "torch-linalg-cholesky-ex"]], "torch.linalg.eigvalsh": [[1311, "torch-linalg-eigvalsh"]], "torch.lgamma": [[1301, "torch-lgamma"]], "torch.linalg.matrix_exp": [[1324, "torch-linalg-matrix-exp"]], "torch.linalg.eigh": [[1309, "torch-linalg-eigh"]], "torch.linalg.eigvals": [[1310, "torch-linalg-eigvals"]], "torch.jit.script": [[1284, "torch-jit-script"]], "torch.jit.trace": [[1288, "torch-jit-trace"]], "torch.jit.fork": [[1275, "torch-jit-fork"]], "strict_fusion": [[1287, "strict-fusion"]], "torch.linalg.cross": [[1305, "torch-linalg-cross"]], "torch.linalg.diagonal": [[1307, "torch-linalg-diagonal"]], "torch.jit.onednn_fusion_enabled": [[1281, "torch-jit-onednn-fusion-enabled"]], "torch.less": [[1299, "torch-less"]], "torch.lerp": [[1298, "torch-lerp"]], "torch.index_copy": [[1245, "torch-index-copy"]], "torch.is_floating_point": [[1254, "torch-is-floating-point"]], "torch.heaviside": [[1232, "torch-heaviside"]], "torch.inner": [[1249, "torch-inner"]], "torch.imag": [[1243, "torch-imag"]], "torch.initial_seed": [[1248, "torch-initial-seed"]], "torch.igammac": [[1242, "torch-igammac"]], "torch.index_select": [[1247, "torch-index-select"]], "torch.histogramdd": [[1235, "torch-histogramdd"]], "torch.jit.annotate": [[1273, "torch-jit-annotate"]], "torch.hsplit": [[1236, "torch-hsplit"]], "Attribute": [[1270, "attribute"]], "torch.greater_equal": [[1228, "torch-greater-equal"]], "torch.is_complex": [[1251, "torch-is-complex"]], "torch.isclose": [[1261, "torch-isclose"]], "torch.gt": [[1229, "torch-gt"]], "ScriptModule": [[1272, "scriptmodule"]], "torch.hamming_window": [[1230, "torch-hamming-window"]], "torch.isneginf": [[1266, "torch-isneginf"]], "torch.inverse": [[1250, "torch-inverse"]], "torch.igamma": [[1241, "torch-igamma"]], "torch.is_inference_mode_enabled": [[1256, "torch-is-inference-mode-enabled"]], "torch.histc": [[1233, "torch-histc"]], "torch.isinf": [[1264, "torch-isinf"]], "torch.isposinf": [[1267, "torch-isposinf"]], "torch.hstack": [[1238, "torch-hstack"]], "torch.hypot": [[1239, "torch-hypot"]], "torch.is_deterministic_algorithms_warn_only_enabled": [[1253, "torch-is-deterministic-algorithms-warn-only-enabled"]], "torch.istft": [[1269, "torch-istft"]], "ScriptFunction": [[1271, "scriptfunction"]], "torch.is_conj": [[1252, "torch-is-conj"]], "torch.isfinite": [[1262, "torch-isfinite"]], "torch.jit.enable_onednn_fusion": [[1274, "torch-jit-enable-onednn-fusion"]], "torch.is_storage": [[1258, "torch-is-storage"]], "torch.is_warn_always_enabled": [[1260, "torch-is-warn-always-enabled"]], "torch.hann_window": [[1231, "torch-hann-window"]], "torch.histogram": [[1234, "torch-histogram"]], "torch.is_nonzero": [[1257, "torch-is-nonzero"]], "torch.greater": [[1227, "torch-greater"]], "torch.i0": [[1240, "torch-i0"]], "torch.isin": [[1263, "torch-isin"]], "torch.is_grad_enabled": [[1255, "torch-is-grad-enabled"]], "torch.hspmm": [[1237, "torch-hspmm"]], "torch.isnan": [[1265, "torch-isnan"]], "torch.get_rng_state": [[1225, "torch-get-rng-state"]], "torch.get_num_threads": [[1224, "torch-get-num-threads"]], "torch.gradient": [[1226, "torch-gradient"]], "torch.index_reduce": [[1246, "torch-index-reduce"]], "torch.isreal": [[1268, "torch-isreal"]], "torch.is_tensor": [[1259, "torch-is-tensor"]], "torch.index_add": [[1244, "torch-index-add"]], "SymbolicContext": [[1193, "symboliccontext"]], "torch.fx.experimental.symbolic_shapes.check_consistent": [[1195, "torch-fx-experimental-symbolic-shapes-check-consistent"]], "torch.fx.experimental.symbolic_shapes.is_concrete_int": [[1205, "torch-fx-experimental-symbolic-shapes-is-concrete-int"]], "InnerTensorKey": [[1184, "innertensorkey"]], "torch.fx.experimental.symbolic_shapes.statically_known_true": [[1211, "torch-fx-experimental-symbolic-shapes-statically-known-true"]], "ShapeEnv": [[1187, "shapeenv"]], "torch.fx.experimental.symbolic_shapes.guard_size_oblivious": [[1201, "torch-fx-experimental-symbolic-shapes-guard-size-oblivious"]], "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings": [[1196, "torch-fx-experimental-symbolic-shapes-compute-unbacked-bindings"]], "torch.get_num_interop_threads": [[1223, "torch-get-num-interop-threads"]], "torch.fx.experimental.symbolic_shapes.parallel_or": [[1208, "torch-fx-experimental-symbolic-shapes-parallel-or"]], "StatelessSymbolicContext": [[1190, "statelesssymboliccontext"]], "PropagateUnbackedSymInts": [[1185, "propagateunbackedsymints"]], "CallMethodKey": [[1178, "callmethodkey"]], "RelaxedUnspecConstraint": [[1186, "relaxedunspecconstraint"]], "torch.fx.experimental.symbolic_shapes.hint_int": [[1203, "torch-fx-experimental-symbolic-shapes-hint-int"]], "torch.fx.experimental.symbolic_shapes.sym_eq": [[1212, "torch-fx-experimental-symbolic-shapes-sym-eq"]], "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr": [[1194, "torch-fx-experimental-symbolic-shapes-canonicalize-bool-expr"]], "torch.geqrf": [[1216, "torch-geqrf"]], "torch.fx.experimental.symbolic_shapes.definitely_true": [[1200, "torch-fx-experimental-symbolic-shapes-definitely-true"]], "torch.func.vmap": [[1177, "torch-func-vmap"]], "ShapeEnvSettings": [[1188, "shapeenvsettings"]], "torch.func.stack_module_state": [[1175, "torch-func-stack-module-state"]], "torch.fx.experimental.symbolic_shapes.constrain_range": [[1197, "torch-fx-experimental-symbolic-shapes-constrain-range"]], "torch.fx.experimental.symbolic_shapes.has_free_symbols": [[1202, "torch-fx-experimental-symbolic-shapes-has-free-symbols"]], "torch.fx.experimental.symbolic_shapes.rebind_unbacked": [[1209, "torch-fx-experimental-symbolic-shapes-rebind-unbacked"]], "torch.get_float32_matmul_precision": [[1222, "torch-get-float32-matmul-precision"]], "SubclassSymbolicContext": [[1192, "subclasssymboliccontext"]], "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings": [[1210, "torch-fx-experimental-symbolic-shapes-resolve-unbacked-bindings"]], "DimConstraints": [[1180, "dimconstraints"]], "torch.func.replace_all_batch_norm_modules_": [[1174, "torch-func-replace-all-batch-norm-modules"]], "torch.get_default_dtype": [[1219, "torch-get-default-dtype"]], "torch.func.linearize": [[1173, "torch-func-linearize"]], "torch.gcd": [[1214, "torch-gcd"]], "torch.fx.experimental.symbolic_shapes.constrain_unify": [[1198, "torch-fx-experimental-symbolic-shapes-constrain-unify"]], "torch.ger": [[1217, "torch-ger"]], "torch.fx.experimental.symbolic_shapes.is_concrete_bool": [[1204, "torch-fx-experimental-symbolic-shapes-is-concrete-bool"]], "torch.func.vjp": [[1176, "torch-func-vjp"]], "StrictMinMaxConstraint": [[1191, "strictminmaxconstraint"]], "torch.fx.experimental.symbolic_shapes.parallel_and": [[1207, "torch-fx-experimental-symbolic-shapes-parallel-and"]], "torch.gather": [[1213, "torch-gather"]], "torch.ge": [[1215, "torch-ge"]], "torch.get_device_module": [[1221, "torch-get-device-module"]], "torch.fx.experimental.symbolic_shapes.lru_cache": [[1206, "torch-fx-experimental-symbolic-shapes-lru-cache"]], "EqualityConstraint": [[1183, "equalityconstraint"]], "ConvertIntKey": [[1179, "convertintkey"]], "torch.get_default_device": [[1218, "torch-get-default-device"]], "DivideByKey": [[1182, "dividebykey"]], "torch.fx.experimental.symbolic_shapes.definitely_false": [[1199, "torch-fx-experimental-symbolic-shapes-definitely-false"]], "torch.get_deterministic_debug_mode": [[1220, "torch-get-deterministic-debug-mode"]], "DimDynamic": [[1181, "dimdynamic"]], "StatefulSymbolicContext": [[1189, "statefulsymboliccontext"]], "torch.fft.rfft2": [[1143, "torch-fft-rfft2"]], "torch.fft.ihfft": [[1136, "torch-fft-ihfft"]], "torch.fft.ifftn": [[1134, "torch-fft-ifftn"]], "torch.floor": [[1152, "torch-floor"]], "torch.fmax": [[1154, "torch-fmax"]], "torch.frac": [[1157, "torch-frac"]], "torch.full_like": [[1164, "torch-full-like"]], "torch.fft.ifft": [[1132, "torch-fft-ifft"]], "torch.fft.fft2": [[1125, "torch-fft-fft2"]], "torch.fft.rfft": [[1142, "torch-fft-rfft"]], "torch.flipud": [[1150, "torch-flipud"]], "torch.fft.rfftn": [[1145, "torch-fft-rfftn"]], "torch.frexp": [[1158, "torch-frexp"]], "torch.func.jacrev": [[1171, "torch-func-jacrev"]], "torch.fmod": [[1156, "torch-fmod"]], "torch.fake_quantize_per_tensor_affine": [[1123, "torch-fake-quantize-per-tensor-affine"]], "torch.func.jvp": [[1172, "torch-func-jvp"]], "torch.func.grad_and_value": [[1168, "torch-func-grad-and-value"]], "torch.fft.ihfftn": [[1138, "torch-fft-ihfftn"]], "torch.fliplr": [[1149, "torch-fliplr"]], "torch.fft.ifft2": [[1133, "torch-fft-ifft2"]], "torch.float_power": [[1151, "torch-float-power"]], "torch.fix": [[1146, "torch-fix"]], "torch.from_numpy": [[1161, "torch-from-numpy"]], "torch.fft.ihfft2": [[1137, "torch-fft-ihfft2"]], "torch.func.functionalize": [[1166, "torch-func-functionalize"]], "torch.fft.fftfreq": [[1126, "torch-fft-fftfreq"]], "torch.fft.ifftshift": [[1135, "torch-fft-ifftshift"]], "torch.flatten": [[1147, "torch-flatten"]], "torch.fft.hfftn": [[1131, "torch-fft-hfftn"]], "torch.full": [[1163, "torch-full"]], "torch.func.jacfwd": [[1170, "torch-func-jacfwd"]], "torch.func.functional_call": [[1165, "torch-func-functional-call"]], "torch.from_dlpack": [[1159, "torch-from-dlpack"]], "torch.frombuffer": [[1162, "torch-frombuffer"]], "torch.func.hessian": [[1169, "torch-func-hessian"]], "torch.floor_divide": [[1153, "torch-floor-divide"]], "torch.from_file": [[1160, "torch-from-file"]], "torch.func.grad": [[1167, "torch-func-grad"]], "torch.fft.hfft": [[1129, "torch-fft-hfft"]], "torch.fft.hfft2": [[1130, "torch-fft-hfft2"]], "torch.fft.irfft": [[1139, "torch-fft-irfft"]], "torch.fmin": [[1155, "torch-fmin"]], "torch.fake_quantize_per_channel_affine": [[1122, "torch-fake-quantize-per-channel-affine"]], "torch.fft.rfftfreq": [[1144, "torch-fft-rfftfreq"]], "torch.fft.fft": [[1124, "torch-fft-fft"]], "torch.fft.fftn": [[1127, "torch-fft-fftn"]], "torch.fft.fftshift": [[1128, "torch-fft-fftshift"]], "torch.fft.irfft2": [[1140, "torch-fft-irfft2"]], "torch.flip": [[1148, "torch-flip"]], "torch.fft.irfftn": [[1141, "torch-fft-irfftn"]], "torch.cuda.set_per_process_memory_fraction": [[1078, "torch-cuda-set-per-process-memory-fraction"]], "torch.diag": [[1095, "torch-diag"]], "torch.eq": [[1113, "torch-eq"]], "torch.diag_embed": [[1096, "torch-diag-embed"]], "torch.diagflat": [[1097, "torch-diagflat"]], "torch.erfinv": [[1117, "torch-erfinv"]], "torch.erfc": [[1116, "torch-erfc"]], "torch.divide": [[1104, "torch-divide"]], "torch.cuda.temperature": [[1085, "torch-cuda-temperature"]], "torch.diff": [[1100, "torch-diff"]], "torch.empty_strided": [[1111, "torch-empty-strided"]], "torch.diagonal_scatter": [[1099, "torch-diagonal-scatter"]], "torch.equal": [[1114, "torch-equal"]], "torch.cuda.set_sync_debug_mode": [[1082, "torch-cuda-set-sync-debug-mode"]], "torch.eye": [[1121, "torch-eye"]], "torch.cumsum": [[1090, "torch-cumsum"]], "torch.expm1": [[1120, "torch-expm1"]], "torch.cuda.power_draw": [[1071, "torch-cuda-power-draw"]], "torch.cuda.utilization": [[1086, "torch-cuda-utilization"]], "torch.cuda.synchronize": [[1084, "torch-cuda-synchronize"]], "torch.det": [[1094, "torch-det"]], "torch.empty": [[1109, "torch-empty"]], "torch.cumulative_trapezoid": [[1091, "torch-cumulative-trapezoid"]], "torch.empty_like": [[1110, "torch-empty-like"]], "torch.cuda.set_rng_state": [[1079, "torch-cuda-set-rng-state"]], "torch.cuda.reset_max_memory_allocated": [[1072, "torch-cuda-reset-max-memory-allocated"]], "torch.exp": [[1118, "torch-exp"]], "torch.cuda.reset_peak_memory_stats": [[1074, "torch-cuda-reset-peak-memory-stats"]], "torch.digamma": [[1101, "torch-digamma"]], "torch.cuda.set_stream": [[1081, "torch-cuda-set-stream"]], "torch.cuda.seed": [[1075, "torch-cuda-seed"]], "torch.cumprod": [[1089, "torch-cumprod"]], "torch.einsum": [[1108, "torch-einsum"]], "torch.cuda.set_rng_state_all": [[1080, "torch-cuda-set-rng-state-all"]], "torch.div": [[1103, "torch-div"]], "torch.dequantize": [[1093, "torch-dequantize"]], "torch.exp2": [[1119, "torch-exp2"]], "torch.cummin": [[1088, "torch-cummin"]], "torch.dstack": [[1107, "torch-dstack"]], "torch.dsplit": [[1106, "torch-dsplit"]], "torch.cuda.set_device": [[1077, "torch-cuda-set-device"]], "torch.cuda.reset_max_memory_cached": [[1073, "torch-cuda-reset-max-memory-cached"]], "torch.cuda.seed_all": [[1076, "torch-cuda-seed-all"]], "torch.diagonal": [[1098, "torch-diagonal"]], "torch.dot": [[1105, "torch-dot"]], "enable_grad": [[1112, "enable-grad"]], "torch.erf": [[1115, "torch-erf"]], "torch.cummax": [[1087, "torch-cummax"]], "torch.dist": [[1102, "torch-dist"]], "torch.deg2rad": [[1092, "torch-deg2rad"]], "torch.cuda.stream": [[1083, "torch-cuda-stream"]], "torch.cuda.get_allocator_backend": [[1033, "torch-cuda-get-allocator-backend"]], "torch.cuda.manual_seed_all": [[1055, "torch-cuda-manual-seed-all"]], "torch.cuda.memory_usage": [[1066, "torch-cuda-memory-usage"]], "torch.cuda.comm.broadcast": [[1020, "torch-cuda-comm-broadcast"]], "torch.cuda.current_blas_handle": [[1025, "torch-cuda-current-blas-handle"]], "torch.cuda.max_memory_cached": [[1057, "torch-cuda-max-memory-cached"]], "torch.cuda.get_gencode_flags": [[1038, "torch-cuda-get-gencode-flags"]], "torch.cuda.jiterator._create_jit_fn": [[1050, "torch-cuda-jiterator-create-jit-fn"]], "torch.cuda.nvtx.range_push": [[1070, "torch-cuda-nvtx-range-push"]], "torch.cuda.init": [[1044, "torch-cuda-init"]], "torch.cuda.initial_seed": [[1045, "torch-cuda-initial-seed"]], "torch.cuda.get_sync_debug_mode": [[1041, "torch-cuda-get-sync-debug-mode"]], "torch.cuda.memory_stats": [[1064, "torch-cuda-memory-stats"]], "graph": [[1042, "graph"]], "torch.cuda.max_memory_allocated": [[1056, "torch-cuda-max-memory-allocated"]], "torch.cuda.memory_cached": [[1061, "torch-cuda-memory-cached"]], "torch.cuda.get_rng_state_all": [[1040, "torch-cuda-get-rng-state-all"]], "torch.cuda.comm.gather": [[1022, "torch-cuda-comm-gather"]], "torch.cuda.max_memory_reserved": [[1058, "torch-cuda-max-memory-reserved"]], "torch.cuda.nvtx.range": [[1068, "torch-cuda-nvtx-range"]], "torch.cuda.memory_allocated": [[1060, "torch-cuda-memory-allocated"]], "torch.cuda.memory_reserved": [[1062, "torch-cuda-memory-reserved"]], "torch.cuda.mem_get_info": [[1059, "torch-cuda-mem-get-info"]], "torch.cuda.comm.broadcast_coalesced": [[1021, "torch-cuda-comm-broadcast-coalesced"]], "torch.cuda.jiterator._create_multi_output_jit_fn": [[1051, "torch-cuda-jiterator-create-multi-output-jit-fn"]], "torch.cuda.nvtx.range_pop": [[1069, "torch-cuda-nvtx-range-pop"]], "torch.cuda.empty_cache": [[1032, "torch-cuda-empty-cache"]], "torch.cuda.manual_seed": [[1054, "torch-cuda-manual-seed"]], "torch.cuda.get_device_capability": [[1035, "torch-cuda-get-device-capability"]], "torch.cuda.get_rng_state": [[1039, "torch-cuda-get-rng-state"]], "torch.cuda.current_stream": [[1027, "torch-cuda-current-stream"]], "torch.cuda.is_current_stream_capturing": [[1048, "torch-cuda-is-current-stream-capturing"]], "torch.cuda.get_device_properties": [[1037, "torch-cuda-get-device-properties"]], "torch.cuda.comm.reduce_add": [[1023, "torch-cuda-comm-reduce-add"]], "torch.cuda.memory_summary": [[1065, "torch-cuda-memory-summary"]], "torch.cuda.device_count": [[1030, "torch-cuda-device-count"]], "torch.cuda.comm.scatter": [[1024, "torch-cuda-comm-scatter"]], "torch.cuda.memory_snapshot": [[1063, "torch-cuda-memory-snapshot"]], "torch.cuda.ipc_collect": [[1046, "torch-cuda-ipc-collect"]], "torch.cuda.make_graphed_callables": [[1053, "torch-cuda-make-graphed-callables"]], "torch.cuda.default_stream": [[1028, "torch-cuda-default-stream"]], "torch.cuda.graph_pool_handle": [[1043, "torch-cuda-graph-pool-handle"]], "torch.cuda.is_available": [[1047, "torch-cuda-is-available"]], "torch.cuda.current_device": [[1026, "torch-cuda-current-device"]], "torch.cuda.get_arch_list": [[1034, "torch-cuda-get-arch-list"]], "torch.cuda.nvtx.mark": [[1067, "torch-cuda-nvtx-mark"]], "torch.cuda.list_gpu_processes": [[1052, "torch-cuda-list-gpu-processes"]], "torch.cuda.get_device_name": [[1036, "torch-cuda-get-device-name"]], "torch.cuda.is_initialized": [[1049, "torch-cuda-is-initialized"]], "torch.cuda.clock_rate": [[1019, "torch-cuda-clock-rate"]], "torch.cuda.caching_allocator_delete": [[1016, "torch-cuda-caching-allocator-delete"]], "torch.column_stack": [[973, "torch-column-stack"]], "torch.cpu.set_device": [[1004, "torch-cpu-set-device"]], "torch.chunk": [[969, "torch-chunk"]], "torch.cos": [[994, "torch-cos"]], "ExternalStream": [[1011, "externalstream"]], "torch.cpu.current_device": [[1000, "torch-cpu-current-device"]], "torch.compiler.compile": [[979, "torch-compiler-compile"]], "torch.copysign": [[992, "torch-copysign"]], "torch.compiler.is_compiling": [[982, "torch-compiler-is-compiling"]], "torch.cpu.synchronize": [[1006, "torch-cpu-synchronize"]], "torch.cov": [[997, "torch-cov"]], "torch.cuda.can_device_access_peer": [[1017, "torch-cuda-can-device-access-peer"]], "torch.cpu.stream": [[1005, "torch-cpu-stream"]], "torch.compiler.assume_constant_result": [[978, "torch-compiler-assume-constant-result"]], "torch.cuda.caching_allocator_alloc": [[1015, "torch-cuda-caching-allocator-alloc"]], "torch.clamp": [[970, "torch-clamp"]], "torch.cpu.current_stream": [[1001, "torch-cpu-current-stream"]], "torch.cross": [[1007, "torch-cross"]], "torch.corrcoef": [[993, "torch-corrcoef"]], "torch.compiled_with_cxx11_abi": [[976, "torch-compiled-with-cxx11-abi"]], "torch.count_nonzero": [[996, "torch-count-nonzero"]], "torch.cpu.is_available": [[1003, "torch-cpu-is-available"]], "torch.cuda.OutOfMemoryError": [[1012, "torch-cuda-outofmemoryerror"]], "torch.compiler.is_dynamo_compiling": [[983, "torch-compiler-is-dynamo-compiling"]], "torch.cpu.device_count": [[1002, "torch-cpu-device-count"]], "CUDAGraph": [[1008, "cudagraph"]], "torch.conj": [[990, "torch-conj"]], "torch.concat": [[987, "torch-concat"]], "torch.compile": [[975, "torch-compile"]], "torch.complex": [[986, "torch-complex"]], "torch.clone": [[972, "torch-clone"]], "torch.combinations": [[974, "torch-combinations"]], "torch.cuda.change_current_allocator": [[1018, "torch-cuda-change-current-allocator"]], "torch.conj_physical": [[991, "torch-conj-physical"]], "torch.cosh": [[995, "torch-cosh"]], "torch.compiler.reset": [[985, "torch-compiler-reset"]], "torch.concatenate": [[988, "torch-concatenate"]], "CUDAPluggableAllocator": [[1009, "cudapluggableallocator"]], "torch.compiler.cudagraph_mark_step_begin": [[980, "torch-compiler-cudagraph-mark-step-begin"]], "torch.compiler.list_backends": [[984, "torch-compiler-list-backends"]], "torch.clip": [[971, "torch-clip"]], "torch.bartlett_window": [[944, "torch-bartlett-window"]], "torch.autograd.profiler.load_nvprof": [[932, "torch-autograd-profiler-load-nvprof"]], "torch.autograd.gradcheck.gradcheck": [[922, "torch-autograd-gradcheck-gradcheck"]], "torch.autograd.graph.increment_version": [[929, "torch-autograd-graph-increment-version"]], "record_function": [[938, "record-function"]], "torch.cdist": [[963, "torch-cdist"]], "inference_mode": [[918, "inference-mode"]], "torch.cat": [[962, "torch-cat"]], "Kernel": [[940, "kernel"]], "torch.autograd.profiler.profile.key_averages": [[935, "torch-autograd-profiler-profile-key-averages"]], "torch.broadcast_tensors": [[957, "torch-broadcast-tensors"]], "set_multithreading_enabled": [[920, "set-multithreading-enabled"]], "KinetoStepTracker": [[931, "kinetosteptracker"]], "torch.autograd.profiler.profile.export_chrome_trace": [[934, "torch-autograd-profiler-profile-export-chrome-trace"]], "torch.autograd.gradcheck.GradcheckError": [[921, "torch-autograd-gradcheck-gradcheckerror"]], "torch.autograd.graph.Node.name": [[925, "torch-autograd-graph-node-name"]], "torch.broadcast_shapes": [[956, "torch-broadcast-shapes"]], "torch.autograd.profiler.profile.total_average": [[937, "torch-autograd-profiler-profile-total-average"]], "Interval": [[939, "interval"]], "torch.bucketize": [[959, "torch-bucketize"]], "EnforceUnique": [[930, "enforceunique"]], "torch.can_cast": [[960, "torch-can-cast"]], "torch.bmm": [[955, "torch-bmm"]], "torch.cholesky": [[966, "torch-cholesky"]], "torch.cholesky_solve": [[968, "torch-cholesky-solve"]], "torch.broadcast_to": [[958, "torch-broadcast-to"]], "torch.autograd.graph.Node.register_prehook": [[928, "torch-autograd-graph-node-register-prehook"]], "MemRecordsAcc": [[941, "memrecordsacc"]], "set_grad_enabled": [[919, "set-grad-enabled"]], "torch.bitwise_right_shift": [[951, "torch-bitwise-right-shift"]], "torch.bernoulli": [[945, "torch-bernoulli"]], "torch.autograd.graph.Node.metadata": [[924, "torch-autograd-graph-node-metadata"]], "torch.autograd.graph.Node.next_functions": [[926, "torch-autograd-graph-node-next-functions"]], "torch.chain_matmul": [[965, "torch-chain-matmul"]], "torch.autograd.profiler.parse_nvprof_trace": [[933, "torch-autograd-profiler-parse-nvprof-trace"]], "torch.bitwise_left_shift": [[948, "torch-bitwise-left-shift"]], "StringTable": [[942, "stringtable"]], "torch.autograd.gradcheck.gradgradcheck": [[923, "torch-autograd-gradcheck-gradgradcheck"]], "torch.cartesian_prod": [[961, "torch-cartesian-prod"]], "torch.cholesky_inverse": [[967, "torch-cholesky-inverse"]], "torch.bincount": [[946, "torch-bincount"]], "torch.block_diag": [[954, "torch-block-diag"]], "torch.blackman_window": [[953, "torch-blackman-window"]], "torch.autograd.profiler.profile.self_cpu_time_total": [[936, "torch-autograd-profiler-profile-self-cpu-time-total"]], "torch.baddbmm": [[943, "torch-baddbmm"]], "torch.bitwise_or": [[950, "torch-bitwise-or"]], "torch.bitwise_xor": [[952, "torch-bitwise-xor"]], "torch.bitwise_not": [[949, "torch-bitwise-not"]], "torch.bitwise_and": [[947, "torch-bitwise-and"]], "torch.ceil": [[964, "torch-ceil"]], "torch.autograd.graph.Node.register_hook": [[927, "torch-autograd-graph-node-register-hook"]], "torch.argsort": [[879, "torch-argsort"]], "torch.as_tensor": [[882, "torch-as-tensor"]], "torch.autograd.forward_ad.unpack_dual": [[902, "torch-autograd-forward-ad-unpack-dual"]], "torch.autograd.Function.forward": [[893, "torch-autograd-function-forward"]], "torch.autograd.forward_ad.exit_dual_level": [[900, "torch-autograd-forward-ad-exit-dual-level"]], "torch.arcsin": [[871, "torch-arcsin"]], "torch.autograd.forward_ad.enter_dual_level": [[899, "torch-autograd-forward-ad-enter-dual-level"]], "NestedIOFunction": [[909, "nestediofunction"]], "torch.atleast_1d": [[889, "torch-atleast-1d"]], "torch.arctanh": [[875, "torch-arctanh"]], "torch.arccos": [[869, "torch-arccos"]], "torch.autograd.functional.jacobian": [[913, "torch-autograd-functional-jacobian"]], "torch.autograd.forward_ad.make_dual": [[901, "torch-autograd-forward-ad-make-dual"]], "torch.autograd.functional.vjp": [[916, "torch-autograd-functional-vjp"]], "torch.autograd.Function.backward": [[892, "torch-autograd-function-backward"]], "torch.atanh": [[888, "torch-atanh"]], "torch.autograd.function.once_differentiable": [[910, "torch-autograd-function-once-differentiable"]], "swap_module": [[867, "swap-module"]], "torch.atleast_3d": [[891, "torch-atleast-3d"]], "torch.autograd.function.FunctionCtx.save_for_backward": [[906, "torch-autograd-function-functionctx-save-for-backward"]], "torch.asin": [[884, "torch-asin"]], "dual_level": [[898, "dual-level"]], "torch.autograd.functional.vhp": [[915, "torch-autograd-functional-vhp"]], "torch.autograd.function.FunctionCtx.set_materialize_grads": [[907, "torch-autograd-function-functionctx-set-materialize-grads"]], "torch.autograd.grad": [[917, "torch-autograd-grad"]], "torch.asinh": [[885, "torch-asinh"]], "torch.asarray": [[883, "torch-asarray"]], "torch.atleast_2d": [[890, "torch-atleast-2d"]], "torch.arctan": [[873, "torch-arctan"]], "torch.autograd.function.FunctionCtx.mark_non_differentiable": [[905, "torch-autograd-function-functionctx-mark-non-differentiable"]], "torch.autograd.functional.jvp": [[914, "torch-autograd-functional-jvp"]], "torch.autograd.backward": [[896, "torch-autograd-backward"]], "UnpackedDualTensor": [[897, "unpackeddualtensor"]], "InplaceFunction": [[908, "inplacefunction"]], "torch.atan2": [[887, "torch-atan2"]], "torch.argwhere": [[880, "torch-argwhere"]], "torch.autograd.functional.hessian": [[911, "torch-autograd-functional-hessian"]], "torch.as_strided": [[881, "torch-as-strided"]], "torch.autograd.function.FunctionCtx.mark_dirty": [[904, "torch-autograd-function-functionctx-mark-dirty"]], "torch.autograd.Function.jvp": [[894, "torch-autograd-function-jvp"]], "torch.arange": [[868, "torch-arange"]], "torch.argmax": [[877, "torch-argmax"]], "torch.atan": [[886, "torch-atan"]], "torch.argmin": [[878, "torch-argmin"]], "torch.arcsinh": [[872, "torch-arcsinh"]], "BackwardCFunction": [[903, "backwardcfunction"]], "torch.arctan2": [[874, "torch-arctan2"]], "torch.autograd.Function.vmap": [[895, "torch-autograd-function-vmap"]], "torch.arccosh": [[870, "torch-arccosh"]], "torch.autograd.functional.hvp": [[912, "torch-autograd-functional-hvp"]], "torch.are_deterministic_algorithms_enabled": [[876, "torch-are-deterministic-algorithms-enabled"]], "Probability distributions - torch.distributions": [[35, "module-torch.distributions"]], "Score function": [[35, "score-function"]], "Pathwise derivative": [[35, "pathwise-derivative"]], "Distribution": [[35, "distribution"]], "ExponentialFamily": [[35, "exponentialfamily"]], "Bernoulli": [[35, "bernoulli"]], "Beta": [[35, "beta"]], "Binomial": [[35, "binomial"]], "Categorical": [[35, "categorical"]], "Cauchy": [[35, "cauchy"]], "Chi2": [[35, "chi2"]], "ContinuousBernoulli": [[35, "continuousbernoulli"]], "Dirichlet": [[35, "dirichlet"]], "Exponential": [[35, "exponential"]], "FisherSnedecor": [[35, "fishersnedecor"]], "Gamma": [[35, "gamma"]], "Geometric": [[35, "geometric"]], "Gumbel": [[35, "gumbel"]], "HalfCauchy": [[35, "halfcauchy"]], "HalfNormal": [[35, "halfnormal"]], "Independent": [[35, "independent"]], "InverseGamma": [[35, "inversegamma"]], "Kumaraswamy": [[35, "kumaraswamy"]], "LKJCholesky": [[35, "lkjcholesky"]], "Laplace": [[35, "laplace"]], "LogNormal": [[35, "lognormal"]], "LowRankMultivariateNormal": [[35, "lowrankmultivariatenormal"]], "MixtureSameFamily": [[35, "mixturesamefamily"]], "Multinomial": [[35, "multinomial"]], "MultivariateNormal": [[35, "multivariatenormal"]], "NegativeBinomial": [[35, "negativebinomial"]], "Normal": [[35, "normal"]], "OneHotCategorical": [[35, "onehotcategorical"]], "Pareto": [[35, "pareto"]], "Poisson": [[35, "poisson"]], "RelaxedBernoulli": [[35, "relaxedbernoulli"]], "LogitRelaxedBernoulli": [[35, "logitrelaxedbernoulli"]], "RelaxedOneHotCategorical": [[35, "relaxedonehotcategorical"]], "StudentT": [[35, "studentt"]], "TransformedDistribution": [[35, "transformeddistribution"]], "Uniform": [[35, "uniform"]], "VonMises": [[35, "vonmises"]], "Weibull": [[35, "weibull"]], "Wishart": [[35, "wishart"]], "KL Divergence": [[35, "module-torch.distributions.kl"]], "Transforms": [[35, "module-torch.distributions.transforms"]], "Constraints": [[35, "module-torch.distributions.constraints"], [2045, "constraints"]], "Constraint Registry": [[35, "module-torch.distributions.constraint_registry"]], "PyTorch Governance | Build + CI": [[6, "pytorch-governance-build-ci"]], "How to Add a New Maintainer": [[6, "how-to-add-a-new-maintainer"]], "Automatic differentiation package - torch.autograd": [[1, "module-torch.autograd"]], "Forward-mode Automatic Differentiation": [[1, "forward-mode-automatic-differentiation"]], "Functional higher level API": [[1, "functional-higher-level-api"]], "Locally disabling gradient computation": [[1, "locally-disabling-gradient-computation"], [2042, "locally-disabling-gradient-computation"], [2089, "locally-disabling-gradient-computation"]], "Default gradient layouts": [[1, "default-gradient-layouts"]], "Manual gradient layouts": [[1, "manual-gradient-layouts"]], "In-place operations on Tensors": [[1, "in-place-operations-on-tensors"]], "In-place correctness checks": [[1, "in-place-correctness-checks"], [2042, "in-place-correctness-checks"]], "Variable (deprecated)": [[1, "variable-deprecated"]], "Tensor autograd functions": [[1, "tensor-autograd-functions"]], "Function": [[1, "function"]], "Context method mixins": [[1, "context-method-mixins"]], "Custom Function utilities": [[1, "custom-function-utilities"]], "Numerical gradient checking": [[1, "module-torch.autograd.gradcheck"]], "Profiler": [[1, "profiler"]], "Debugging and anomaly detection": [[1, "debugging-and-anomaly-detection"]], "Autograd graph": [[1, "autograd-graph"]], "DDP Communication Hooks": [[24, "ddp-communication-hooks"]], "How to Use a Communication Hook?": [[24, "how-to-use-a-communication-hook"]], "What Does a Communication Hook Operate On?": [[24, "what-does-a-communication-hook-operate-on"]], "Default Communication Hooks": [[24, "default-communication-hooks"]], "PowerSGD Communication Hook": [[24, "powersgd-communication-hook"]], "PowerSGD State": [[24, "powersgd-state"]], "PowerSGD Hooks": [[24, "powersgd-hooks"]], "Debugging Communication Hooks": [[24, "debugging-communication-hooks"]], "Checkpointing of Communication Hooks": [[24, "checkpointing-of-communication-hooks"]], "Acknowledgements": [[24, "acknowledgements"]], "TunableOp": [[19, "tunableop"], [17, "tunableop"]], "Enabling TunableOp and Tuning Separately": [[19, "enabling-tunableop-and-tuning-separately"]], "File Input and Output": [[19, "file-input-and-output"]], "A Note on Tuning Behavior": [[19, "a-note-on-tuning-behavior"]], "Current Tunable Operators": [[19, "current-tunable-operators"]], "TunableGemm for ROCm": [[19, "tunablegemm-for-rocm"]], "Tuning Context": [[19, "tuning-context"]], "Events": [[41, "module-torch.distributed.elastic.events"]], "API Methods": [[41, "api-methods"]], "Event Objects": [[41, "event-objects"]], "CUDA Stream Sanitizer": [[18, "cuda-stream-sanitizer"]], "Usage": [[18, "usage"], [31, null], [48, "usage"]], "Subprocess Handling": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"]], "Retrieve SubprocessHandler": [[49, "retrieve-subprocesshandler"]], "SubprocessHandler": [[49, "subprocesshandler"]], "Benchmark Utils - torch.utils.benchmark": [[3, "module-torch.utils.benchmark"]], "Torch Distributed Elastic": [[31, "torch-distributed-elastic"]], "Get Started": [[31, "get-started"]], "Documentation": [[31, "documentation"]], "API": [[31, null]], "Advanced": [[31, null]], "Plugins": [[31, null]], "torch.__config__": [[13, "module-torch.__config__"]], "torch.utils.data": [[23, "module-torch.utils.data"]], "Dataset Types": [[23, "dataset-types"]], "Map-style datasets": [[23, "map-style-datasets"]], "Iterable-style datasets": [[23, "iterable-style-datasets"]], "Data Loading Order and Sampler": [[23, "data-loading-order-and-sampler"]], "Loading Batched and Non-Batched Data": [[23, "loading-batched-and-non-batched-data"]], "Automatic batching (default)": [[23, "automatic-batching-default"]], "Disable automatic batching": [[23, "disable-automatic-batching"]], "Working with collate_fn": [[23, "working-with-collate-fn"]], "Single- and Multi-process Data Loading": [[23, "single-and-multi-process-data-loading"]], "Single-process data loading (default)": [[23, "single-process-data-loading-default"]], "Multi-process data loading": [[23, "multi-process-data-loading"]], "Platform-specific behaviors": [[23, "platform-specific-behaviors"]], "Randomness in multi-process data loading": [[23, "randomness-in-multi-process-data-loading"]], "Memory Pinning": [[23, "memory-pinning"]], "Quickstart": [[46, "quickstart"]], "torch.backends": [[2, "module-torch.backends"]], "torch.backends.cpu": [[2, "module-torch.backends.cpu"]], "torch.backends.cuda": [[2, "module-torch.backends.cuda"]], "torch.backends.cudnn": [[2, "module-torch.backends.cudnn"]], "torch.backends.mha": [[2, "module-torch.backends.mha"]], "torch.backends.mps": [[2, "module-torch.backends.mps"]], "torch.backends.mkl": [[2, "module-torch.backends.mkl"]], "torch.backends.mkldnn": [[2, "module-torch.backends.mkldnn"]], "torch.backends.nnpack": [[2, "module-torch.backends.nnpack"]], "torch.backends.openmp": [[2, "module-torch.backends.openmp"]], "torch.backends.opt_einsum": [[2, "module-torch.backends.opt_einsum"]], "torch.backends.xeon": [[2, "module-torch.backends.xeon"]], "torch.utils.dlpack": [[36, "torch-utils-dlpack"]], "Pipeline Parallelism": [[33, "pipeline-parallelism"]], "Why Pipeline Parallel?": [[33, "why-pipeline-parallel"]], "What is torch.distributed.pipelining?": [[33, "what-is-torch-distributed-pipelining"]], "Step 1: build PipelineStage for execution": [[33, "step-1-build-pipelinestage-for-execution"]], "Step 2: use PipelineSchedule for execution": [[33, "step-2-use-pipelineschedule-for-execution"]], "Options for Splitting a Model": [[33, "options-for-splitting-a-model"]], "Option 1: splitting a model manually": [[33, "option-1-splitting-a-model-manually"]], "Option 2: splitting a model automatically": [[33, "option-2-splitting-a-model-automatically"]], "Hugging Face Examples": [[33, "hugging-face-examples"]], "Technical Deep Dive": [[33, "technical-deep-dive"]], "How does the pipeline API split a model?": [[33, "how-does-the-pipeline-api-split-a-model"]], "Implementing Your Own Schedule": [[33, "implementing-your-own-schedule"]], "Model Split APIs": [[33, "model-split-apis"]], "Microbatch Utilities": [[33, "module-torch.distributed.pipelining.microbatch"]], "Pipeline Stages": [[33, "module-torch.distributed.pipelining.stage"]], "Pipeline Schedules": [[33, "module-torch.distributed.pipelining.schedules"]], "C++": [[15, "c"]], "TorchScript C++ API": [[15, "torchscript-c-api"]], "Extending PyTorch and TorchScript with C++ Extensions": [[15, "extending-pytorch-and-torchscript-with-c-extensions"]], "Tensor and Autograd in C++": [[15, "tensor-and-autograd-in-c"]], "Authoring Models in C++": [[15, "authoring-models-in-c"]], "Packaging for C++": [[15, "packaging-for-c"]], "PyTorch Governance | Maintainers": [[10, "pytorch-governance-maintainers"]], "Responsibilities": [[10, "responsibilities"]], "Lead Core Maintainer (BDFL)": [[10, "lead-core-maintainer-bdfl"], [9, "lead-core-maintainer-bdfl"]], "Core Maintainers": [[10, "core-maintainers"], [9, "core-maintainers"]], "Module-level maintainers": [[10, "module-level-maintainers"]], "NN APIs (torch.nn)": [[10, "nn-apis-torch-nn"]], "Optimizers (torch.optim)": [[10, "optimizers-torch-optim"]], "Autograd (torch.autograd)": [[10, "autograd-torch-autograd"]], "Compilers (JIT / TorchScript / FX / TorchDynamo)": [[10, "compilers-jit-torchscript-fx-torchdynamo"]], "Distributions & RNG": [[10, "distributions-rng"]], "Distributed": [[10, "distributed"]], "Multiprocessing and DataLoaders": [[10, "multiprocessing-and-dataloaders"]], "Linear Algebra (torch.linalg)": [[10, "linear-algebra-torch-linalg"]], "Sparse (torch.sparse)": [[10, "sparse-torch-sparse"]], "NestedTensor (torch.nested)": [[10, "nestedtensor-torch-nested"]], "MaskedTensor (torch.masked)": [[10, "maskedtensor-torch-masked"]], "Fast Fourier Transform (torch.fft)": [[10, "fast-fourier-transform-torch-fft"]], "CPU Performance (Torch Inductor / MKLDNN)": [[10, "cpu-performance-torch-inductor-mkldnn"]], "GPU Performance (Torch Inductor / Triton / CUDA)": [[10, "gpu-performance-torch-inductor-triton-cuda"]], "NVFuser": [[10, "nvfuser"]], "AMD/ROCm/HIP": [[10, "amd-rocm-hip"]], "Build + CI": [[10, "build-ci"]], "Performance Tools": [[10, "performance-tools"]], "C++ API": [[10, "c-api"]], "C10 utils and operator dispatch": [[10, "c10-utils-and-operator-dispatch"]], "ONNX exporter": [[10, "onnx-exporter"]], "Mobile / Edge": [[10, "mobile-edge"]], "Model Compression & Optimization": [[10, "model-compression-optimization"]], "Windows": [[10, "windows"]], "Apple M1/MPS": [[10, "apple-m1-mps"]], "PowerPC": [[10, "powerpc"]], "AArch64 CPU": [[10, "aarch64-cpu"]], "Docs / Tutorials": [[10, "docs-tutorials"]], "Library-level maintainers": [[10, "library-level-maintainers"]], "XLA": [[10, "xla"]], "TorchServe": [[10, "torchserve"]], "TorchVision": [[10, "torchvision"]], "TorchText": [[10, "torchtext"]], "TorchAudio": [[10, "torchaudio"]], "TorchRec": [[10, "torchrec"]], "TorchX": [[10, "torchx"]], "TorchData / TorchArrow": [[10, "torchdata-torcharrow"]], "Error Propagation": [[40, "module-torch.distributed.elastic.multiprocessing.errors"]], "Methods and Classes": [[40, "methods-and-classes"]], "Tensor Parallelism - torch.distributed.tensor.parallel": [[34, "tensor-parallelism-torch-distributed-tensor-parallel"]], "TorchElastic Kubernetes": [[43, "torchelastic-kubernetes"]], "torch.utils.bottleneck": [[4, "module-torch.utils.bottleneck"]], "torch.cuda": [[17, "module-torch.cuda"]], "Communication collectives": [[17, "communication-collectives"]], "Graphs (beta)": [[17, "graphs-beta"]], "Memory management": [[17, "memory-management"], [2045, "memory-management"], [2053, "memory-management"]], "NVIDIA Tools Extension (NVTX)": [[17, "nvidia-tools-extension-nvtx"]], "Jiterator (beta)": [[17, "jiterator-beta"]], "Stream Sanitizer (prototype)": [[17, "stream-sanitizer-prototype"]], "Elastic Agent": [[37, "module-torch.distributed.elastic.agent"]], "Server": [[37, "module-torch.distributed.elastic.agent.server"]], "Concepts": [[37, "concepts"]], "Implementations": [[37, "implementations"], [47, "implementations"]], "Extending the Agent": [[37, "extending-the-agent"]], "Watchdog in the Agent": [[37, "watchdog-in-the-agent"]], "Health Check Server": [[37, "health-check-server"]], "Automatic Mixed Precision package - torch.amp": [[0, "automatic-mixed-precision-package-torch-amp"]], "Autocasting": [[0, "autocasting"]], "Gradient Scaling": [[0, "gradient-scaling"]], "Autocast Op Reference": [[0, "autocast-op-reference"]], "Op Eligibility": [[0, "op-eligibility"]], "CUDA Op-Specific Behavior": [[0, "cuda-op-specific-behavior"]], "CUDA Ops that can autocast to float16": [[0, "cuda-ops-that-can-autocast-to-float16"]], "CUDA Ops that can autocast to float32": [[0, "cuda-ops-that-can-autocast-to-float32"]], "CUDA Ops that promote to the widest input type": [[0, "cuda-ops-that-promote-to-the-widest-input-type"]], "Prefer binary_cross_entropy_with_logits over binary_cross_entropy": [[0, "prefer-binary-cross-entropy-with-logits-over-binary-cross-entropy"]], "CPU Op-Specific Behavior": [[0, "cpu-op-specific-behavior"]], "CPU Ops that can autocast to bfloat16": [[0, "cpu-ops-that-can-autocast-to-bfloat16"]], "CPU Ops that can autocast to float32": [[0, "cpu-ops-that-can-autocast-to-float32"]], "CPU Ops that promote to the widest input type": [[0, "cpu-ops-that-promote-to-the-widest-input-type"]], "Rendezvous": [[47, "module-torch.distributed.elastic.rendezvous"]], "Registry": [[47, "registry"]], "Handler": [[47, "handler"]], "Dataclasses": [[47, "dataclasses"]], "Exceptions": [[47, "exceptions"]], "Dynamic Rendezvous": [[47, "dynamic-rendezvous"]], "C10d Backend": [[47, "c10d-backend"]], "Etcd Backend": [[47, "etcd-backend"]], "Etcd Rendezvous (Legacy)": [[47, "etcd-rendezvous-legacy"]], "Etcd Store": [[47, "etcd-store"]], "Etcd Server": [[47, "etcd-server"]], "PyTorch Contribution Guide": [[7, "pytorch-contribution-guide"]], "Contribution Process": [[7, "contribution-process"]], "Proposing New Features": [[7, "proposing-new-features"]], "Reporting Issues": [[7, "reporting-issues"]], "Implementing Features or Fixing Bugs": [[7, "implementing-features-or-fixing-bugs"]], "Adding Tutorials": [[7, "adding-tutorials"]], "Improving Documentation & Tutorials": [[7, "improving-documentation-tutorials"]], "Participating in Online Discussions": [[7, "participating-in-online-discussions"]], "Submitting Pull Requests to Fix Open Issues": [[7, "submitting-pull-requests-to-fix-open-issues"]], "Reviewing Open Pull Requests": [[7, "reviewing-open-pull-requests"]], "Improving Code Readability": [[7, "improving-code-readability"]], "Adding Test Cases to Make the Codebase More Robust": [[7, "adding-test-cases-to-make-the-codebase-more-robust"]], "Promoting PyTorch": [[7, "promoting-pytorch"]], "Triaging Issues": [[7, "triaging-issues"]], "About Open Source Development": [[7, "about-open-source-development"]], "Common Mistakes To Avoid": [[7, "common-mistakes-to-avoid"]], "On Documentation": [[7, "on-documentation"]], "Python Docs": [[7, "python-docs"]], "C++ Docs": [[7, "c-docs"]], "Tutorials": [[7, "tutorials"], [2075, "tutorials"], [2068, "tutorials"]], "Tutorials Build Overview": [[7, "tutorials-build-overview"]], "Contributing a New Tutorial": [[7, "contributing-a-new-tutorial"]], "torch.utils.deterministic": [[27, "module-torch.utils.deterministic"]], "Distributed Optimizers": [[32, "distributed-optimizers"]], "Multiprocessing": [[45, "module-torch.distributed.elastic.multiprocessing"]], "Starting Multiple Workers": [[45, "starting-multiple-workers"]], "Process Context": [[45, "process-context"]], "torchrun (Elastic Launch)": [[48, "module-torch.distributed.run"]], "Transitioning from torch.distributed.launch to torchrun": [[48, "transitioning-from-torch-distributed-launch-to-torchrun"]], "Single-node multi-worker": [[48, "single-node-multi-worker"]], "Stacked single-node multi-worker": [[48, "stacked-single-node-multi-worker"]], "Fault tolerant (fixed sized number of workers, no elasticity, tolerates 3 failures)": [[48, "fault-tolerant-fixed-sized-number-of-workers-no-elasticity-tolerates-3-failures"]], "Elastic (min=1, max=4, tolerates up to 3 membership changes or failures)": [[48, "elastic-min-1-max-4-tolerates-up-to-3-membership-changes-or-failures"]], "Note on rendezvous backend": [[48, "note-on-rendezvous-backend"]], "Definitions": [[48, "definitions"]], "Environment Variables": [[48, "environment-variables"]], "Deployment": [[48, "deployment"]], "Failure Modes": [[48, "failure-modes"]], "Membership Changes": [[48, "membership-changes"]], "Important Notices": [[48, "important-notices"]], "Metrics": [[44, "module-torch.distributed.elastic.metrics"]], "Metric Handlers": [[44, "metric-handlers"]], "Methods": [[44, "methods"]], "Generic Join Context Manager": [[29, "generic-join-context-manager"]], "Control Flow - Cond": [[12, "control-flow-cond"]], "Invariants of torch.ops.higher_order.cond": [[12, "invariants-of-torch-ops-higher-order-cond"]], "Distributed communication package - torch.distributed": [[28, "distributed-communication-package-torch-distributed"]], "Backends": [[28, "backends"], [2075, "backends"]], "Backends that come with PyTorch": [[28, "backends-that-come-with-pytorch"]], "Which backend to use?": [[28, "which-backend-to-use"]], "Common environment variables": [[28, "common-environment-variables"]], "Choosing the network interface to use": [[28, "choosing-the-network-interface-to-use"]], "Other NCCL environment variables": [[28, "other-nccl-environment-variables"]], "Basics": [[28, "basics"], [2075, "basics"]], "Initialization": [[28, "initialization"]], "TCP initialization": [[28, "tcp-initialization"]], "Shared file-system initialization": [[28, "shared-file-system-initialization"]], "Environment variable initialization": [[28, "environment-variable-initialization"]], "Post-Initialization": [[28, "post-initialization"]], "Shutdown": [[28, "shutdown"]], "Reinitialization": [[28, "reinitialization"]], "Distributed Key-Value Store": [[28, "distributed-key-value-store"]], "Groups": [[28, "groups"]], "DeviceMesh": [[28, "devicemesh"]], "Point-to-point communication": [[28, "point-to-point-communication"]], "Synchronous and asynchronous collective operations": [[28, "synchronous-and-asynchronous-collective-operations"]], "Collective functions": [[28, "collective-functions"]], "Profiling Collective Communication": [[28, "profiling-collective-communication"]], "Multi-GPU collective functions": [[28, "multi-gpu-collective-functions"]], "Third-party backends": [[28, "third-party-backends"]], "Launch utility": [[28, "launch-utility"]], "Spawn utility": [[28, "spawn-utility"]], "Debugging torch.distributed applications": [[28, "debugging-torch-distributed-applications"]], "Python Breakpoint": [[28, "python-breakpoint"]], "Monitored Barrier": [[28, "monitored-barrier"]], "TORCH_DISTRIBUTED_DEBUG": [[28, "torch-distributed-debug"]], "Logging": [[28, "logging"]], "Expiration Timers": [[50, "module-torch.distributed.elastic.timer"]], "Client Methods": [[50, "client-methods"]], "Server/Client Implementations": [[50, "server-client-implementations"]], "Writing a custom timer server/client": [[50, "writing-a-custom-timer-server-client"]], "Debug info logging": [[50, "module-torch.distributed.elastic.timer.debug_info_logging"]], "Distributed Checkpoint - torch.distributed.checkpoint": [[30, "distributed-checkpoint-torch-distributed-checkpoint"]], "PyTorch Design Philosophy": [[8, "pytorch-design-philosophy"]], "Design Principles": [[8, "design-principles"]], "Principle 1: Usability over Performance": [[8, "principle-1-usability-over-performance"]], "Principle 2: Simple Over Easy": [[8, "principle-2-simple-over-easy"]], "Principle 3: Python First with Best In Class Language Interoperability": [[8, "principle-3-python-first-with-best-in-class-language-interoperability"]], "PyTorch Governance | Mechanics": [[9, "pytorch-governance-mechanics"]], "Summary": [[9, "summary"]], "Module Maintainers": [[9, "module-maintainers"]], "Nominating, Confirming and Removing Maintainers": [[9, "nominating-confirming-and-removing-maintainers"]], "The Principles": [[9, "the-principles"]], "The Process for Nomination": [[9, "the-process-for-nomination"]], "The Process for Removal": [[9, "the-process-for-removal"]], "Nominating Core Maintainers": [[9, "nominating-core-maintainers"]], "Removing the Lead Core Maintainer and Nominating a New Lead Core Maintainer": [[9, "removing-the-lead-core-maintainer-and-nominating-a-new-lead-core-maintainer"]], "Add, Remove, and Re-Scope Modules and Projects": [[9, "add-remove-and-re-scope-modules-and-projects"]], "Decision Making": [[9, "decision-making"]], "Uncontroversial Changes": [[9, "uncontroversial-changes"]], "Controversial Decision Process": [[9, "controversial-decision-process"]], "General Project Policies": [[9, "general-project-policies"]], "FAQ": [[9, "faq"]], "torch.utils.checkpoint": [[5, "torch-utils-checkpoint"]], "Control Plane": [[38, "module-torch.distributed.elastic.control_plane"]], "torch.cpu": [[16, "module-torch.cpu"]], "Complex Numbers": [[11, "complex-numbers"]], "Creating Complex Tensors": [[11, "creating-complex-tensors"]], "Transition from the old representation": [[11, "transition-from-the-old-representation"]], "Accessing real and imag": [[11, "accessing-real-and-imag"]], "Angle and abs": [[11, "angle-and-abs"]], "Linear Algebra": [[11, "linear-algebra"]], "Autograd": [[11, "autograd"]], "Optimizers": [[11, "optimizers"]], "Debugging Environment Variables": [[25, "debugging-environment-variables"]], "torch::deploy has been moved to pytorch/multipy": [[26, "torch-deploy-has-been-moved-to-pytorch-multipy"]], "CUDA Environment Variables": [[20, "cuda-environment-variables"]], "torch.utils.cpp_extension": [[14, "torch-utils-cpp-extension"]], "Customization": [[39, "customization"]], "Launcher": [[39, "launcher"]], "Rendezvous Handler": [[39, "rendezvous-handler"]], "Metric Handler": [[39, "metric-handler"]], "Events Handler": [[39, "events-handler"]], "Quantization": [[2070, "module-torch.ao.quantization"]], "Introduction to Quantization": [[2070, "introduction-to-quantization"]], "Quantization API Summary": [[2070, "quantization-api-summary"]], "Eager Mode Quantization": [[2070, "eager-mode-quantization"]], "Post Training Dynamic Quantization": [[2070, "post-training-dynamic-quantization"]], "Post Training Static Quantization": [[2070, "post-training-static-quantization"]], "Quantization Aware Training for Static Quantization": [[2070, "quantization-aware-training-for-static-quantization"]], "Model Preparation for Eager Mode Static Quantization": [[2070, "model-preparation-for-eager-mode-static-quantization"]], "(Prototype - maintenance mode) FX Graph Mode Quantization": [[2070, "prototype-maintenance-mode-fx-graph-mode-quantization"]], "(Prototype) PyTorch 2 Export Quantization": [[2070, "prototype-pytorch-2-export-quantization"]], "Quantization Stack": [[2070, "quantization-stack"]], "Quantized Model": [[2070, "quantized-model"]], "Quantized Tensor": [[2070, "quantized-tensor"]], "Quantize and Dequantize": [[2070, "quantize-and-dequantize"]], "Quantized Operators/Modules": [[2070, "quantized-operators-modules"]], "Quantized Engine": [[2070, "quantized-engine"]], "Quantization Flow": [[2070, "quantization-flow"]], "Observer and FakeQuantize": [[2070, "observer-and-fakequantize"]], "QConfig": [[2070, "qconfig"], [844, "qconfig"]], "General Quantization Flow": [[2070, "general-quantization-flow"]], "Quantization Support Matrix": [[2070, "quantization-support-matrix"]], "Quantization Mode Support": [[2070, "quantization-mode-support"]], "Quantization Flow Support": [[2070, "quantization-flow-support"]], "Backend/Hardware Support": [[2070, "backend-hardware-support"]], "Note for native CPU backends": [[2070, "note-for-native-cpu-backends"]], "Operator Support": [[2070, "operator-support"]], "Quantization API Reference": [[2070, "quantization-api-reference"], [2073, "quantization-api-reference"]], "Quantization Backend Configuration": [[2070, "quantization-backend-configuration"], [2072, "quantization-backend-configuration"]], "Quantization Accuracy Debugging": [[2070, "quantization-accuracy-debugging"], [2071, "quantization-accuracy-debugging"]], "Quantization Customizations": [[2070, "quantization-customizations"]], "Quantization Custom Module API": [[2070, "quantization-custom-module-api"]], "Best Practices": [[2070, "best-practices"]], "Common Errors": [[2070, "common-errors"]], "Passing a non-quantized Tensor into a quantized kernel": [[2070, "passing-a-non-quantized-tensor-into-a-quantized-kernel"]], "Passing a quantized Tensor into a non-quantized kernel": [[2070, "passing-a-quantized-tensor-into-a-non-quantized-kernel"]], "Saving and Loading Quantized models": [[2070, "saving-and-loading-quantized-models"]], "Symbolic Trace Error when using FX Graph Mode Quantization": [[2070, "symbolic-trace-error-when-using-fx-graph-mode-quantization"]], "Data insensitive error": [[2071, "data-insensitive-error"]], "General tips": [[2071, "general-tips"]], "Int8 quantization tips": [[2071, "int8-quantization-tips"]], "Data sensitive error": [[2071, "data-sensitive-error"]], "Implementation error": [[2071, "implementation-error"]], "Numerical Debugging Tooling (prototype)": [[2071, "numerical-debugging-tooling-prototype"]], "Autograd mechanics": [[2042, "autograd-mechanics"]], "How autograd encodes the history": [[2042, "how-autograd-encodes-the-history"]], "Saved tensors": [[2042, "saved-tensors"]], "Gradients for non-differentiable functions": [[2042, "gradients-for-non-differentiable-functions"]], "Setting requires_grad": [[2042, "setting-requires-grad"]], "Grad Modes": [[2042, "grad-modes"]], "Default Mode (Grad Mode)": [[2042, "default-mode-grad-mode"]], "No-grad Mode": [[2042, "no-grad-mode"]], "Inference Mode": [[2042, "inference-mode"]], "Evaluation Mode (nn.Module.eval())": [[2042, "evaluation-mode-nn-module-eval"]], "In-place operations with autograd": [[2042, "in-place-operations-with-autograd"]], "Multithreaded Autograd": [[2042, "multithreaded-autograd"]], "Concurrency on CPU": [[2042, "concurrency-on-cpu"]], "Non-determinism": [[2042, "non-determinism"]], "Graph retaining": [[2042, "graph-retaining"]], "Thread Safety on Autograd Node": [[2042, "thread-safety-on-autograd-node"]], "No thread safety on C++ hooks": [[2042, "no-thread-safety-on-c-hooks"]], "Autograd for Complex Numbers": [[2042, "autograd-for-complex-numbers"]], "What are complex derivatives?": [[2042, "what-are-complex-derivatives"]], "Wirtinger Calculus comes into the picture \u2026": [[2042, "wirtinger-calculus-comes-into-the-picture"]], "How is Wirtinger Calculus useful in optimization?": [[2042, "how-is-wirtinger-calculus-useful-in-optimization"]], "How does PyTorch compute the conjugate Wirtinger derivative?": [[2042, "how-does-pytorch-compute-the-conjugate-wirtinger-derivative"]], "How can I write my own derivative formula for a complex function?": [[2042, "how-can-i-write-my-own-derivative-formula-for-a-complex-function"]], "What about cross-domain functions?": [[2042, "what-about-cross-domain-functions"]], "Hooks for saved tensors": [[2042, "hooks-for-saved-tensors"]], "Registering hooks for a saved tensor": [[2042, "registering-hooks-for-a-saved-tensor"]], "Registering default hooks for saved tensors": [[2042, "registering-default-hooks-for-saved-tensors"]], "Backward Hooks execution": [[2042, "backward-hooks-execution"]], "Whether a particular hook will be fired": [[2042, "whether-a-particular-hook-will-be-fired"]], "The order in which the different hooks are fired": [[2042, "the-order-in-which-the-different-hooks-are-fired"]], "Special hooks": [[2042, "special-hooks"]], "Behavior of Tensor hooks when Tensor is modified in-place": [[2042, "behavior-of-tensor-hooks-when-tensor-is-modified-in-place"]], "Broadcasting semantics": [[2043, "broadcasting-semantics"]], "General semantics": [[2043, "general-semantics"]], "In-place semantics": [[2043, "in-place-semantics"]], "Backwards compatibility": [[2043, "backwards-compatibility"]], "CUDA Automatic Mixed Precision examples": [[2041, "cuda-automatic-mixed-precision-examples"]], "Typical Mixed Precision Training": [[2041, "typical-mixed-precision-training"]], "Working with Unscaled Gradients": [[2041, "working-with-unscaled-gradients"]], "Gradient clipping": [[2041, "gradient-clipping"]], "Working with Scaled Gradients": [[2041, "working-with-scaled-gradients"]], "Gradient accumulation": [[2041, "gradient-accumulation"]], "Gradient penalty": [[2041, "gradient-penalty"]], "Working with Multiple Models, Losses, and Optimizers": [[2041, "working-with-multiple-models-losses-and-optimizers"]], "Working with Multiple GPUs": [[2041, "working-with-multiple-gpus"]], "DataParallel in a single process": [[2041, "dataparallel-in-a-single-process"]], "DistributedDataParallel, one GPU per process": [[2041, "distributeddataparallel-one-gpu-per-process"]], "DistributedDataParallel, multiple GPUs per process": [[2041, "distributeddataparallel-multiple-gpus-per-process"]], "Autocast and Custom Autograd Functions": [[2041, "autocast-and-custom-autograd-functions"]], "Functions with multiple inputs or autocastable ops": [[2041, "functions-with-multiple-inputs-or-autocastable-ops"]], "Functions that need a particular dtype": [[2041, "functions-that-need-a-particular-dtype"]], "ONNX Backend for TorchDynamo": [[2064, "onnx-backend-for-torchdynamo"]], "torch.profiler": [[2069, "torch-profiler"]], "Intel Instrumentation and Tracing Technology APIs": [[2069, "intel-instrumentation-and-tracing-technology-apis"]], "Remote Reference Protocol": [[2077, "remote-reference-protocol"]], "Background": [[2077, "background"], [2076, "background"]], "RRef Lifetime": [[2077, "rref-lifetime"]], "Design Reasoning": [[2077, "design-reasoning"]], "Implementation": [[2077, "implementation"], [2047, "implementation"]], "Protocol Scenarios": [[2077, "protocol-scenarios"]], "User Share RRef with Owner as Return Value": [[2077, "user-share-rref-with-owner-as-return-value"]], "User Share RRef with Owner as Argument": [[2077, "user-share-rref-with-owner-as-argument"]], "Owner Share RRef with User": [[2077, "owner-share-rref-with-user"]], "User Share RRef with User": [[2077, "user-share-rref-with-user"]], "torch.utils.tensorboard": [[2085, "module-torch.utils.tensorboard"]], "torch.Tensor": [[2086, "torch-tensor"]], "Data types": [[2086, "data-types"]], "Initializing and basic operations": [[2086, "initializing-and-basic-operations"]], "Tensor class reference": [[2086, "tensor-class-reference"]], "Numerical accuracy": [[2058, "numerical-accuracy"]], "Batched computations or slice computations": [[2058, "batched-computations-or-slice-computations"]], "Extremal values": [[2058, "extremal-values"]], "Linear algebra (torch.linalg)": [[2058, "linear-algebra-torch-linalg"]], "Non-finite values": [[2058, "non-finite-values"]], "Extremal values in linalg": [[2058, "extremal-values-in-linalg"]], "TensorFloat-32(TF32) on Nvidia Ampere (and later) devices": [[2058, "tensorfloat-32-tf32-on-nvidia-ampere-and-later-devices"]], "Reduced Precision Reduction for FP16 and BF16 GEMMs": [[2058, "reduced-precision-reduction-for-fp16-and-bf16-gemms"]], "Reduced Precision FP16 and BF16 GEMMs and Convolutions on AMD Instinct MI200 devices": [[2058, "reduced-precision-fp16-and-bf16-gemms-and-convolutions-on-amd-instinct-mi200-devices"]], "TorchScript-based ONNX Exporter": [[2065, "torchscript-based-onnx-exporter"], [2062, "torchscript-based-onnx-exporter"]], "Example: AlexNet from PyTorch to ONNX": [[2065, "example-alexnet-from-pytorch-to-onnx"]], "Tracing vs Scripting": [[2065, "tracing-vs-scripting"]], "Avoiding Pitfalls": [[2065, "avoiding-pitfalls"]], "Avoid NumPy and built-in Python types": [[2065, "avoid-numpy-and-built-in-python-types"]], "Avoid Tensor.data": [[2065, "avoid-tensor-data"]], "Avoid in-place operations when using tensor.shape in tracing mode": [[2065, "avoid-in-place-operations-when-using-tensor-shape-in-tracing-mode"]], "Differences in Operator Implementations": [[2065, "differences-in-operator-implementations"]], "Unsupported Tensor Indexing Patterns": [[2065, "unsupported-tensor-indexing-patterns"]], "Reads / Gets": [[2065, "reads-gets"]], "Writes / Sets": [[2065, "writes-sets"]], "Adding support for operators": [[2065, "adding-support-for-operators"]], "ONNX exporter internals": [[2065, "onnx-exporter-internals"]], "ATen operators": [[2065, "aten-operators"]], "List of supported operators": [[2065, "list-of-supported-operators"]], "Adding support for an aten or quantized operator": [[2065, "adding-support-for-an-aten-or-quantized-operator"]], "torch.autograd.Functions": [[2065, "torch-autograd-functions"]], "Static Symbolic Method": [[2065, "static-symbolic-method"]], "Inline Autograd Function": [[2065, "inline-autograd-function"]], "Custom operators": [[2065, "custom-operators"]], "ONNX-script functions": [[2065, "onnx-script-functions"]], "C++ Operators": [[2065, "c-operators"]], "Discovering all unconvertible ATen ops at once": [[2065, "discovering-all-unconvertible-aten-ops-at-once"]], "Classes": [[2065, "classes"]], "Tensor Attributes": [[2083, "tensor-attributes"]], "torch.dtype": [[2083, "torch-dtype"]], "torch.device": [[2083, "torch-device"]], "torch.layout": [[2083, "torch-layout"]], "torch.memory_format": [[2083, "torch-memory-format"]], "CUDA semantics": [[2045, "cuda-semantics"]], "TensorFloat-32 (TF32) on Ampere (and later) devices": [[2045, "tensorfloat-32-tf32-on-ampere-and-later-devices"]], "Reduced Precision Reduction in FP16 GEMMs": [[2045, "reduced-precision-reduction-in-fp16-gemms"]], "Reduced Precision Reduction in BF16 GEMMs": [[2045, "reduced-precision-reduction-in-bf16-gemms"]], "Asynchronous execution": [[2045, "asynchronous-execution"]], "CUDA streams": [[2045, "cuda-streams"]], "Stream semantics of backward passes": [[2045, "stream-semantics-of-backward-passes"]], "BC note: Using grads on the default stream": [[2045, "bc-note-using-grads-on-the-default-stream"]], "Optimizing memory usage with PYTORCH_CUDA_ALLOC_CONF": [[2045, "optimizing-memory-usage-with-pytorch-cuda-alloc-conf"]], "Using custom memory allocators for CUDA": [[2045, "using-custom-memory-allocators-for-cuda"]], "cuBLAS workspaces": [[2045, "cublas-workspaces"]], "cuFFT plan cache": [[2045, "cufft-plan-cache"]], "Just-in-Time Compilation": [[2045, "just-in-time-compilation"]], "Best practices": [[2045, "best-practices"]], "Device-agnostic code": [[2045, "device-agnostic-code"]], "Use pinned memory buffers": [[2045, "use-pinned-memory-buffers"]], "Use nn.parallel.DistributedDataParallel instead of multiprocessing or nn.DataParallel": [[2045, "use-nn-parallel-distributeddataparallel-instead-of-multiprocessing-or-nn-dataparallel"]], "CUDA Graphs": [[2045, "cuda-graphs"]], "Why CUDA Graphs?": [[2045, "why-cuda-graphs"]], "PyTorch API": [[2045, "pytorch-api"]], "Non-constraints": [[2045, "non-constraints"]], "Whole-network capture": [[2045, "whole-network-capture"]], "Partial-network capture": [[2045, "partial-network-capture"]], "Usage with torch.cuda.amp": [[2045, "usage-with-torch-cuda-amp"]], "Usage with multiple streams": [[2045, "usage-with-multiple-streams"]], "Usage with DistributedDataParallel": [[2045, "usage-with-distributeddataparallel"]], "NCCL < 2.9.6": [[2045, "nccl-2-9-6"]], "NCCL >= 2.9.6": [[2045, "id5"]], "Graph memory management": [[2045, "graph-memory-management"]], "Sharing memory across captures": [[2045, "sharing-memory-across-captures"]], "Distributed RPC Framework": [[2075, "distributed-rpc-framework"]], "RPC": [[2075, "rpc"]], "TensorPipe Backend": [[2075, "tensorpipe-backend"]], "RRef": [[2075, "rref"]], "More Information about RRef": [[2075, null]], "RemoteModule": [[2075, "remotemodule"]], "Distributed Autograd Framework": [[2075, "distributed-autograd-framework"]], "More Information about RPC Autograd": [[2075, null]], "Distributed Optimizer": [[2075, "distributed-optimizer"], [2076, "distributed-optimizer"]], "torch.sparse": [[2080, "torch-sparse"]], "Why and when to use sparsity": [[2080, "why-and-when-to-use-sparsity"]], "Functionality overview": [[2080, "functionality-overview"]], "Operator overview": [[2080, "operator-overview"]], "Sparse Semi-Structured Tensors": [[2080, "sparse-semi-structured-tensors"]], "Constructing Sparse Semi-Structured Tensors": [[2080, "constructing-sparse-semi-structured-tensors"]], "Sparse Semi-Structured Tensor Operations": [[2080, "sparse-semi-structured-tensor-operations"]], "Accelerating nn.Linear with semi-structured sparsity": [[2080, "accelerating-nn-linear-with-semi-structured-sparsity"]], "Sparse COO tensors": [[2080, "sparse-coo-tensors"]], "Sparse hybrid COO tensors": [[2080, "sparse-hybrid-coo-tensors"]], "Uncoalesced sparse COO tensors": [[2080, "uncoalesced-sparse-coo-tensors"]], "Working with sparse COO tensors": [[2080, "working-with-sparse-coo-tensors"]], "Sparse Compressed Tensors": [[2080, "sparse-compressed-tensors"]], "Sparse CSR Tensor": [[2080, "sparse-csr-tensor"]], "Construction of CSR tensors": [[2080, "construction-of-csr-tensors"]], "CSR Tensor Operations": [[2080, "csr-tensor-operations"]], "Sparse CSC Tensor": [[2080, "sparse-csc-tensor"]], "Construction of CSC tensors": [[2080, "construction-of-csc-tensors"]], "Sparse BSR Tensor": [[2080, "sparse-bsr-tensor"]], "Construction of BSR tensors": [[2080, "construction-of-bsr-tensors"]], "Sparse BSC Tensor": [[2080, "sparse-bsc-tensor"]], "Construction of BSC tensors": [[2080, "construction-of-bsc-tensors"]], "Tools for working with sparse compressed tensors": [[2080, "tools-for-working-with-sparse-compressed-tensors"]], "Construction of sparse compressed tensors": [[2080, "construction-of-sparse-compressed-tensors"]], "Linear Algebra operations": [[2080, "linear-algebra-operations"]], "Tensor methods and sparse": [[2080, "tensor-methods-and-sparse"]], "Torch functions specific to sparse Tensors": [[2080, "torch-functions-specific-to-sparse-tensors"]], "Other functions": [[2080, "other-functions"]], "Unary functions": [[2080, "unary-functions"]], "torch.package": [[2068, "torch-package"]], "Packaging your first model": [[2068, "packaging-your-first-model"]], "How do I\u2026": [[2068, "how-do-i"]], "See what is inside a package?": [[2068, "see-what-is-inside-a-package"]], "Treat the package like a ZIP archive": [[2068, "treat-the-package-like-a-zip-archive"]], "Use the file_structure() API": [[2068, "use-the-file-structure-api"]], "See why a given module was included as a dependency?": [[2068, "see-why-a-given-module-was-included-as-a-dependency"]], "Include arbitrary resources with my package and access them later?": [[2068, "include-arbitrary-resources-with-my-package-and-access-them-later"]], "Customize how a class is packaged?": [[2068, "customize-how-a-class-is-packaged"]], "Test in my source code whether or not it is executing inside a package?": [[2068, "test-in-my-source-code-whether-or-not-it-is-executing-inside-a-package"]], "Patch code into a package?": [[2068, "patch-code-into-a-package"]], "Access package contents from packaged code?": [[2068, "access-package-contents-from-packaged-code"]], "Distinguish between packaged code and non-packaged code?": [[2068, "distinguish-between-packaged-code-and-non-packaged-code"]], "Re-export an imported object?": [[2068, "re-export-an-imported-object"]], "Package a TorchScript module?": [[2068, "package-a-torchscript-module"]], "Explanation": [[2068, "explanation"]], "torch.package Format Overview": [[2068, "torch-package-format-overview"]], "Framework files": [[2068, "framework-files"]], "User files": [[2068, "user-files"]], "How torch.package finds your code\u2019s dependencies": [[2068, "how-torch-package-finds-your-code-s-dependencies"]], "Analyzing an object\u2019s dependencies": [[2068, "analyzing-an-object-s-dependencies"]], "Analyzing a module\u2019s dependencies": [[2068, "analyzing-a-module-s-dependencies"]], "Dependency Management": [[2068, "dependency-management"]], "intern": [[2068, "intern"]], "extern": [[2068, "extern"]], "mock": [[2068, "mock"]], "Refactoring": [[2068, "refactoring"]], "Patterns": [[2068, "patterns"]], "torch.package sharp edges": [[2068, "torch-package-sharp-edges"]], "Avoid global state in your modules": [[2068, "avoid-global-state-in-your-modules"]], "Types are not shared between packages and the loading environment": [[2068, "types-are-not-shared-between-packages-and-the-loading-environment"]], "How torch.package keeps packages isolated from each other": [[2068, "how-torch-package-keeps-packages-isolated-from-each-other"]], "Mangling": [[2068, "mangling"]], "MPS backend": [[2056, "mps-backend"]], "ONNX supported TorchScript operators": [[2066, "onnx-supported-torchscript-operators"]], "Supported operators": [[2066, "supported-operators"]], "ONNX support for TorchScript operators": [[2066, "id1"]], "Unsupported operators": [[2066, "unsupported-operators"], [2066, "id2"]], "Gradcheck mechanics": [[2052, "gradcheck-mechanics"]], "Notations and background information": [[2052, "notations-and-background-information"]], "Default backward mode gradcheck behavior": [[2052, "default-backward-mode-gradcheck-behavior"]], "Real-to-real functions": [[2052, "real-to-real-functions"]], "Default real input numerical evaluation": [[2052, "default-real-input-numerical-evaluation"]], "Default real input analytical evaluation": [[2052, "default-real-input-analytical-evaluation"]], "Complex-to-real functions": [[2052, "complex-to-real-functions"]], "Default complex input numerical evaluation": [[2052, "default-complex-input-numerical-evaluation"]], "Default complex input analytical evaluation": [[2052, "default-complex-input-analytical-evaluation"]], "Functions with complex outputs": [[2052, "functions-with-complex-outputs"]], "Fast backward mode gradcheck": [[2052, "fast-backward-mode-gradcheck"]], "Fast gradcheck for real-to-real functions": [[2052, "fast-gradcheck-for-real-to-real-functions"]], "Fast gradcheck for complex-to-real functions": [[2052, "fast-gradcheck-for-complex-to-real-functions"]], "Fast complex input numerical evaluation": [[2052, "fast-complex-input-numerical-evaluation"]], "Fast complex input analytical evaluation": [[2052, "fast-complex-input-analytical-evaluation"]], "Why not use a complex u": [[2052, "why-not-use-a-complex-u"]], "Fast gradcheck for functions with complex outputs": [[2052, "fast-gradcheck-for-functions-with-complex-outputs"]], "Gradgradcheck implementation": [[2052, "gradgradcheck-implementation"]], "Extending PyTorch": [[2048, "extending-pytorch"]], "Adding new operators": [[2048, "adding-new-operators"]], "Extending torch.autograd": [[2048, "extending-torch-autograd"]], "When to use": [[2048, "when-to-use"]], "When not to use": [[2048, "when-not-to-use"]], "How to use": [[2048, "how-to-use"]], "Example": [[2048, "example"], [2047, "example"]], "Combined or separate forward() and setup_context()": [[2048, "combined-or-separate-forward-and-setup-context"]], "Forward mode AD": [[2048, "forward-mode-ad"]], "torch.func transforms and/or torch.vmap()": [[2048, "torch-func-transforms-and-or-torch-vmap"]], "Extending torch.nn": [[2048, "extending-torch-nn"]], "Adding a Module": [[2048, "adding-a-module"]], "Extending torch Python API": [[2048, "extending-torch-python-api"]], "Extending torch with a Tensor-like type": [[2048, "extending-torch-with-a-tensor-like-type"]], "Subclassing torch.Tensor": [[2048, "subclassing-torch-tensor"]], "Extending torch with a Tensor wrapper type": [[2048, "extending-torch-with-a-tensor-wrapper-type"]], "Operations on multiple types that define __torch_function__": [[2048, "operations-on-multiple-types-that-define-torch-function"]], "Testing Coverage of Overrides for the PyTorch API": [[2048, "testing-coverage-of-overrides-for-the-pytorch-api"]], "Extending torch native API": [[2048, "extending-torch-native-api"]], "Extending all torch API with Modes": [[2048, "extending-all-torch-api-with-modes"]], "CPU threading and TorchScript inference": [[2044, "cpu-threading-and-torchscript-inference"]], "Build options": [[2044, "build-options"]], "Runtime API": [[2044, "runtime-api"]], "Tuning the number of threads": [[2044, "tuning-the-number-of-threads"]], "Distributed Data Parallel": [[2047, "distributed-data-parallel"]], "Internal Design": [[2047, "internal-design"]], "ProcessGroup": [[2047, "processgroup"]], "TorchDynamo DDPOptimizer": [[2047, "id1"]], "torch.Size": [[2079, "torch-size"]], "torch.nn.init": [[2040, "torch-nn-init"]], "Tensor Views": [[2084, "tensor-views"]], "My model reports \u201ccuda runtime error(2): out of memory\u201d": [[2050, "my-model-reports-cuda-runtime-error-2-out-of-memory"]], "My GPU memory isn\u2019t freed properly": [[2050, "my-gpu-memory-isn-t-freed-properly"]], "My out of memory exception handler can\u2019t allocate memory": [[2050, "my-out-of-memory-exception-handler-can-t-allocate-memory"]], "My data loader workers return identical random numbers": [[2050, "my-data-loader-workers-return-identical-random-numbers"]], "My recurrent network doesn\u2019t work with data parallelism": [[2050, "my-recurrent-network-doesn-t-work-with-data-parallelism"]], "Multiprocessing best practices": [[2057, "multiprocessing-best-practices"]], "CUDA in multiprocessing": [[2057, "cuda-in-multiprocessing"]], "Best practices and tips": [[2057, "best-practices-and-tips"]], "Avoiding and fighting deadlocks": [[2057, "avoiding-and-fighting-deadlocks"]], "Reuse buffers passed through a Queue": [[2057, "reuse-buffers-passed-through-a-queue"]], "Asynchronous multiprocess training (e.g. Hogwild)": [[2057, "asynchronous-multiprocess-training-e-g-hogwild"]], "Hogwild": [[2057, "hogwild"]], "CPU in multiprocessing": [[2057, "cpu-in-multiprocessing"]], "CPU oversubscription": [[2057, "cpu-oversubscription"]], "Avoid CPU oversubscription": [[2057, "avoid-cpu-oversubscription"]], "torch.random": [[2074, "module-torch.random"]], "Distributed Autograd Design": [[2076, "distributed-autograd-design"]], "Autograd recording during the forward pass": [[2076, "autograd-recording-during-the-forward-pass"]], "Distributed Autograd Context": [[2076, "distributed-autograd-context"]], "Distributed Backward Pass": [[2076, "distributed-backward-pass"]], "Computing dependencies": [[2076, "computing-dependencies"]], "FAST mode algorithm": [[2076, "fast-mode-algorithm"]], "SMART mode algorithm": [[2076, "smart-mode-algorithm"]], "Simple end to end example": [[2076, "simple-end-to-end-example"]], "Threading Environment Variables": [[2088, "threading-environment-variables"]], "Windows FAQ": [[2061, "windows-faq"]], "Building from source": [[2061, "building-from-source"]], "Include optional components": [[2061, "include-optional-components"]], "Speeding CUDA build for Windows": [[2061, "speeding-cuda-build-for-windows"]], "One key install script": [[2061, "one-key-install-script"]], "Extension": [[2061, "extension"]], "CFFI Extension": [[2061, "cffi-extension"]], "Cpp Extension": [[2061, "cpp-extension"]], "Installation": [[2061, "installation"]], "Package not found in win-32 channel.": [[2061, "package-not-found-in-win-32-channel"]], "Import error": [[2061, "import-error"]], "Usage (multiprocessing)": [[2061, "usage-multiprocessing"]], "Multiprocessing error without if-clause protection": [[2061, "multiprocessing-error-without-if-clause-protection"]], "Multiprocessing error \u201cBroken pipe\u201d": [[2061, "multiprocessing-error-broken-pipe"]], "Multiprocessing error \u201cdriver shut down\u201d": [[2061, "multiprocessing-error-driver-shut-down"]], "CUDA IPC operations": [[2061, "cuda-ipc-operations"]], "FSDP Notes": [[2051, "fsdp-notes"]], "FSDP Prefetch Nuances": [[2051, "fsdp-prefetch-nuances"]], "Communication payload size": [[2051, "communication-payload-size"]], "FSDP buffers sizes": [[2051, "fsdp-buffers-sizes"]], "HIP (ROCm) semantics": [[2053, "hip-rocm-semantics"]], "HIP Interfaces Reuse the CUDA Interfaces": [[2053, "hip-interfaces-reuse-the-cuda-interfaces"]], "Checking for HIP": [[2053, "checking-for-hip"]], "TensorFloat-32(TF32) on ROCm": [[2053, "tensorfloat-32-tf32-on-rocm"]], "hipFFT/rocFFT plan cache": [[2053, "hipfft-rocfft-plan-cache"]], "torch.distributed backends": [[2053, "torch-distributed-backends"]], "CUDA API to HIP API mappings in C++": [[2053, "cuda-api-to-hip-api-mappings-in-c"]], "Refer to CUDA Semantics doc": [[2053, "refer-to-cuda-semantics-doc"]], "Enabling kernel asserts": [[2053, "enabling-kernel-asserts"]], "Default values for native configurations": [[2072, "default-values-for-native-configurations"]], "torch.Storage": [[2082, "torch-storage"]], "torch.ao.quantization": [[2073, "torch-ao-quantization"]], "Top level APIs": [[2073, "top-level-apis"]], "Preparing model for quantization": [[2073, "preparing-model-for-quantization"]], "Utility functions": [[2073, "utility-functions"], [2060, "utility-functions"]], "torch.ao.quantization.quantize_fx": [[2073, "torch-ao-quantization-quantize-fx"]], "torch.ao.quantization.qconfig_mapping": [[2073, "torch-ao-quantization-qconfig-mapping"]], "torch.ao.quantization.backend_config": [[2073, "torch-ao-quantization-backend-config"]], "torch.ao.quantization.fx.custom_config": [[2073, "torch-ao-quantization-fx-custom-config"]], "torch.ao.quantization.quantizer": [[2073, "module-torch.ao.quantization.quantizer"]], "torch.ao.quantization.pt2e (quantization in pytorch 2.0 export implementation)": [[2073, "module-torch.ao.quantization.pt2e"]], "torch.ao.quantization.pt2e.export_utils": [[2073, "torch-ao-quantization-pt2e-export-utils"]], "torch (quantization related functions)": [[2073, "torch-quantization-related-functions"]], "torch.Tensor (quantization related methods)": [[2073, "torch-tensor-quantization-related-methods"]], "torch.ao.quantization.observer": [[2073, "torch-ao-quantization-observer"]], "torch.ao.quantization.fake_quantize": [[2073, "torch-ao-quantization-fake-quantize"]], "torch.ao.quantization.qconfig": [[2073, "torch-ao-quantization-qconfig"]], "torch.ao.nn.intrinsic": [[2073, "module-torch.ao.nn.intrinsic"]], "torch.ao.nn.intrinsic.qat": [[2073, "module-torch.ao.nn.intrinsic.qat"]], "torch.ao.nn.intrinsic.quantized": [[2073, "module-torch.ao.nn.intrinsic.quantized"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[2073, "module-torch.ao.nn.intrinsic.quantized.dynamic"]], "torch.ao.nn.qat": [[2073, "module-torch.ao.nn.qat"]], "torch.ao.nn.qat.dynamic": [[2073, "module-torch.ao.nn.qat.dynamic"]], "torch.ao.nn.quantized": [[2073, "module-torch.ao.nn.quantized.modules"]], "torch.ao.nn.quantized.functional": [[2073, "module-torch.ao.nn.quantized.functional"]], "torch.ao.nn.quantizable": [[2073, "torch-ao-nn-quantizable"]], "torch.ao.nn.quantized.dynamic": [[2073, "module-torch.ao.nn.quantized.dynamic"]], "Quantized dtypes and quantization schemes": [[2073, "quantized-dtypes-and-quantization-schemes"]], "PyTorch Custom Operators Landing Page": [[2046, "pytorch-custom-operators-landing-page"]], "TL;DR": [[2046, "tl-dr"]], "How do I author a custom op from Python?": [[2046, "how-do-i-author-a-custom-op-from-python"]], "How do I integrate custom C++ and/or CUDA code with PyTorch?": [[2046, "how-do-i-integrate-custom-c-and-or-cuda-code-with-pytorch"]], "For more details": [[2046, "for-more-details"]], "When should I create a Custom Operator?": [[2046, "when-should-i-create-a-custom-operator"]], "Why should I create a Custom Operator?": [[2046, "why-should-i-create-a-custom-operator"]], "Features for large-scale deployments": [[2054, "features-for-large-scale-deployments"]], "Fleet-wide operator profiling": [[2054, "fleet-wide-operator-profiling"]], "API usage logging": [[2054, "api-usage-logging"]], "Attaching metadata to saved TorchScript models": [[2054, "attaching-metadata-to-saved-torchscript-models"]], "Build environment considerations": [[2054, "build-environment-considerations"]], "Common extension points": [[2054, "common-extension-points"]], "Reproducibility": [[2059, "reproducibility"]], "Controlling sources of randomness": [[2059, "controlling-sources-of-randomness"]], "PyTorch random number generator": [[2059, "pytorch-random-number-generator"]], "Python": [[2059, "python"]], "Random number generators in other libraries": [[2059, "random-number-generators-in-other-libraries"]], "CUDA convolution benchmarking": [[2059, "cuda-convolution-benchmarking"]], "Avoiding nondeterministic algorithms": [[2059, "avoiding-nondeterministic-algorithms"]], "CUDA convolution determinism": [[2059, "cuda-convolution-determinism"]], "CUDA RNN and LSTM": [[2059, "cuda-rnn-and-lstm"]], "Filling uninitialized memory": [[2059, "filling-uninitialized-memory"]], "DataLoader": [[2059, "dataloader"]], "TorchDynamo-based ONNX Exporter": [[2063, "torchdynamo-based-onnx-exporter"], [2062, "torchdynamo-based-onnx-exporter"]], "Dependencies": [[2063, "dependencies"]], "A simple example": [[2063, "a-simple-example"]], "Inspecting the ONNX model using GUI": [[2063, "inspecting-the-onnx-model-using-gui"]], "Diagnosing issues with SARIF": [[2063, "diagnosing-issues-with-sarif"]], "ONNX Diagnostic SARIF Rules": [[2063, null]], "torch.optim": [[2067, "module-torch.optim"]], "How to use an optimizer": [[2067, "how-to-use-an-optimizer"]], "Constructing it": [[2067, "constructing-it"]], "Per-parameter options": [[2067, "per-parameter-options"]], "Taking an optimization step": [[2067, "taking-an-optimization-step"]], "optimizer.step()": [[2067, "optimizer-step"]], "optimizer.step(closure)": [[2067, "optimizer-step-closure"]], "Base class": [[2067, "base-class"]], "Algorithms": [[2067, "algorithms"]], "How to adjust learning rate": [[2067, "how-to-adjust-learning-rate"]], "Weight Averaging (SWA and EMA)": [[2067, "weight-averaging-swa-and-ema"]], "Constructing averaged models": [[2067, "constructing-averaged-models"]], "Custom averaging strategies": [[2067, "custom-averaging-strategies"]], "SWA learning rate schedules": [[2067, "swa-learning-rate-schedules"]], "Taking care of batch normalization": [[2067, "taking-care-of-batch-normalization"]], "Putting it all together: SWA": [[2067, "putting-it-all-together-swa"]], "Putting it all together: EMA": [[2067, "putting-it-all-together-ema"]], "torch.onnx": [[2062, "torch-onnx"]], "Contributing / Developing": [[2062, "contributing-developing"]], "Serialization semantics": [[2060, "serialization-semantics"]], "Table of Contents": [[2060, "table-of-contents"]], "Saving and loading tensors": [[2060, "saving-and-loading-tensors"]], "Saving and loading tensors preserves views": [[2060, "saving-and-loading-tensors-preserves-views"]], "Saving and loading torch.nn.Modules": [[2060, "saving-and-loading-torch-nn-modules"]], "Serialized file format for torch.save": [[2060, "serialized-file-format-for-torch-save"]], "Serializing torch.nn.Modules and loading them in C++": [[2060, "serializing-torch-nn-modules-and-loading-them-in-c"]], "Saving and loading ScriptModules across PyTorch versions": [[2060, "saving-and-loading-scriptmodules-across-pytorch-versions"]], "torch.div performing integer division": [[2060, "torch-div-performing-integer-division"]], "torch.full always inferring a float dtype": [[2060, "torch-full-always-inferring-a-float-dtype"]], "torch.testing": [[2087, "module-torch.testing"]], "torch.special": [[2081, "torch-special"]], "torch": [[2089, "module-torch"]], "Tensors": [[2089, "tensors"]], "Creation Ops": [[2089, "creation-ops"]], "Indexing, Slicing, Joining, Mutating Ops": [[2089, "indexing-slicing-joining-mutating-ops"]], "Generators": [[2089, "generators"]], "Random sampling": [[2089, "random-sampling"]], "In-place random sampling": [[2089, "in-place-random-sampling"]], "Quasi-random sampling": [[2089, "quasi-random-sampling"]], "Parallelism": [[2089, "parallelism"]], "Math operations": [[2089, "math-operations"]], "Pointwise Ops": [[2089, "pointwise-ops"]], "Reduction Ops": [[2089, "reduction-ops"]], "Comparison Ops": [[2089, "comparison-ops"]], "Spectral Ops": [[2089, "spectral-ops"]], "Other Operations": [[2089, "other-operations"]], "BLAS and LAPACK Operations": [[2089, "blas-and-lapack-operations"]], "Foreach Operations": [[2089, "foreach-operations"]], "Symbolic Numbers": [[2089, "symbolic-numbers"]], "Export Path": [[2089, "export-path"]], "Control Flow": [[2089, "control-flow"]], "Optimizations": [[2089, "optimizations"]], "Operator Tags": [[2089, "operator-tags"]], "torch.signal": [[2078, "module-torch.signal"]], "torch.signal.windows": [[2078, "module-torch.signal.windows"]], "A Simple Custom Module": [[2055, "a-simple-custom-module"]], "Modules as Building Blocks": [[2055, "modules-as-building-blocks"]], "Neural Network Training with Modules": [[2055, "neural-network-training-with-modules"]], "Module State": [[2055, "module-state"]], "Module Initialization": [[2055, "module-initialization"]], "Module Hooks": [[2055, "module-hooks"]], "Advanced Features": [[2055, "advanced-features"]], "Distributed Training": [[2055, "distributed-training"]], "Profiling Performance": [[2055, "profiling-performance"]], "Improving Performance with Quantization": [[2055, "improving-performance-with-quantization"]], "Improving Memory Usage with Pruning": [[2055, "improving-memory-usage-with-pruning"]], "Parametrizations": [[2055, "parametrizations"]], "Transforming Modules with FX": [[2055, "transforming-modules-with-fx"]], "Extending torch.func with autograd.Function": [[2049, "extending-torch-func-with-autograd-function"]], "Basic Usage": [[2049, "basic-usage"]], "Example 1: autograd.Function calls into another system": [[2049, "example-1-autograd-function-calls-into-another-system"]], "Example 2: autograd.Function specifies custom gradient rules": [[2049, "example-2-autograd-function-specifies-custom-gradient-rules"]], "Limitations and gotchas": [[2049, "limitations-and-gotchas"]], "torch.vmap() Support": [[2049, "torch-vmap-support"]], "Automatically generate a vmap rule": [[2049, "automatically-generate-a-vmap-rule"]], "Defining the vmap staticmethod": [[2049, "defining-the-vmap-staticmethod"]], "torch.func.jvp() Support": [[2049, "torch-func-jvp-support"]], "torch.ao.ns._numeric_suite": [[2090, "torch-ao-ns-numeric-suite"]], "MinMaxObserver": [[822, "minmaxobserver"]], "default_debug_observer": [[830, "default-debug-observer"]], "PlaceholderObserver": [[828, "placeholderobserver"]], "float_qparams_weight_only_qconfig": [[855, "float-qparams-weight-only-qconfig"]], "MovingAveragePerChannelMinMaxObserver": [[824, "movingaverageperchannelminmaxobserver"]], "default_debug_qconfig": [[846, "default-debug-qconfig"]], "PrepareCustomConfig": [[819, "preparecustomconfig"]], "default_activation_only_qconfig": [[845, "default-activation-only-qconfig"]], "FuseCustomConfig": [[818, "fusecustomconfig"]], "per_channel_dynamic_qconfig": [[856, "per-channel-dynamic-qconfig"]], "default_observer": [[834, "default-observer"]], "default_per_channel_qconfig": [[848, "default-per-channel-qconfig"]], "default_float_qparams_observer": [[832, "default-float-qparams-observer"]], "float16_static_qconfig": [[854, "float16-static-qconfig"]], "RecordingObserver": [[829, "recordingobserver"]], "default_qconfig": [[851, "default-qconfig"]], "NoopObserver": [[825, "noopobserver"]], "prepare_qat": [[841, "prepare-qat"]], "default_dynamic_qconfig": [[847, "default-dynamic-qconfig"]], "default_dynamic_quant_observer": [[831, "default-dynamic-quant-observer"]], "load_observer_state_dict": [[839, "load-observer-state-dict"]], "StandaloneModuleConfigEntry": [[820, "standalonemoduleconfigentry"]], "prepare_qat_fx": [[865, "prepare-qat-fx"]], "quantize": [[860, "quantize"]], "get_default_qat_qconfig_mapping": [[858, "get-default-qat-qconfig-mapping"]], "fuse_fx": [[863, "fuse-fx"]], "default_weight_observer": [[837, "default-weight-observer"]], "ConvertCustomConfig": [[817, "convertcustomconfig"]], "propagate_qconfig": [[842, "propagate-qconfig"]], "model_is_exported": [[843, "model-is-exported"]], "default_histogram_observer": [[833, "default-histogram-observer"]], "fuse_modules": [[816, "fuse-modules"]], "QConfigMapping": [[857, "qconfigmapping"]], "default_weight_only_qconfig": [[852, "default-weight-only-qconfig"]], "HistogramObserver": [[821, "histogramobserver"]], "default_qat_qconfig": [[849, "default-qat-qconfig"]], "default_placeholder_observer": [[836, "default-placeholder-observer"]], "get_default_qconfig_mapping": [[859, "get-default-qconfig-mapping"]], "quantize_qat": [[866, "quantize-qat"]], "get_observer_state_dict": [[838, "get-observer-state-dict"]], "MovingAverageMinMaxObserver": [[823, "movingaverageminmaxobserver"]], "PerChannelMinMaxObserver": [[827, "perchannelminmaxobserver"]], "default_qat_qconfig_v2": [[850, "default-qat-qconfig-v2"]], "convert_fx": [[862, "convert-fx"]], "float16_dynamic_qconfig": [[853, "float16-dynamic-qconfig"]], "prepare_fx": [[864, "prepare-fx"]], "ObserverBase": [[826, "observerbase"]], "quantize_dynamic": [[861, "quantize-dynamic"]], "prepare": [[840, "prepare"]], "default_per_channel_weight_observer": [[835, "default-per-channel-weight-observer"]], "conv1d": [[774, "conv1d"]], "add_quant_dequant": [[793, "add-quant-dequant"]], "default_fused_per_channel_wt_fake_quant": [[807, "default-fused-per-channel-wt-fake-quant"]], "FakeQuantize": [[801, "fakequantize"]], "FixedQParamsFakeQuantize": [[803, "fixedqparamsfakequantize"]], "conv3d": [[776, "conv3d"]], "DTypeConfig": [[796, "dtypeconfig"]], "clamp": [[773, "clamp"]], "BackendPatternConfig": [[795, "backendpatternconfig"]], "celu": [[772, "celu"]], "BackendConfig": [[794, "backendconfig"]], "FakeQuantizeBase": [[802, "fakequantizebase"]], "default_fake_quant": [[805, "default-fake-quant"]], "default_fused_act_fake_quant": [[806, "default-fused-act-fake-quant"]], "interpolate": [[781, "interpolate"]], "upsample": [[787, "upsample"]], "enable_observer": [[815, "enable-observer"]], "max_pool2d": [[785, "max-pool2d"]], "upsample_bilinear": [[788, "upsample-bilinear"]], "threshold": [[786, "threshold"]], "QuantStub": [[791, "quantstub"]], "ObservationType": [[798, "observationtype"]], "QuantWrapper": [[792, "quantwrapper"]], "FusedMovingAvgObsFakeQuantize": [[804, "fusedmovingavgobsfakequantize"]], "adaptive_avg_pool3d": [[769, "adaptive-avg-pool3d"]], "conv2d": [[775, "conv2d"]], "disable_fake_quant": [[812, "disable-fake-quant"]], "hardsigmoid": [[778, "hardsigmoid"]], "default_histogram_fake_quant": [[809, "default-histogram-fake-quant"]], "linear": [[783, "linear"]], "max_pool1d": [[784, "max-pool1d"]], "default_per_channel_weight_fake_quant": [[810, "default-per-channel-weight-fake-quant"]], "hardswish": [[779, "hardswish"]], "upsample_nearest": [[789, "upsample-nearest"]], "default_fused_wt_fake_quant": [[808, "default-fused-wt-fake-quant"]], "elu": [[777, "elu"]], "default_eval_fn": [[800, "default-eval-fn"]], "DeQuantStub": [[790, "dequantstub"]], "avg_pool3d": [[771, "avg-pool3d"]], "convert": [[799, "convert"]], "enable_fake_quant": [[814, "enable-fake-quant"]], "adaptive_avg_pool2d": [[768, "adaptive-avg-pool2d"]], "default_weight_fake_quant": [[811, "default-weight-fake-quant"]], "DTypeWithConstraints": [[797, "dtypewithconstraints"]], "leaky_relu": [[782, "leaky-relu"]], "avg_pool2d": [[770, "avg-pool2d"]], "disable_observer": [[813, "disable-observer"]], "hardtanh": [[780, "hardtanh"]], "update_bn_stats": [[724, "update-bn-stats"]], "freeze_bn_stats": [[723, "freeze-bn-stats"]], "BNReLU3d": [[726, "bnrelu3d"], [703, "bnrelu3d"]], "ConvBn2d": [[715, "convbn2d"], [705, "convbn2d"]], "ConvReLU2d": [[728, "convrelu2d"], [720, "convrelu2d"], [711, "convrelu2d"]], "LinearReLU": [[722, "linearrelu"], [730, "linearrelu"], [731, "linearrelu"], [713, "linearrelu"]], "ConvBn3d": [[716, "convbn3d"], [706, "convbn3d"]], "ConvBn1d": [[714, "convbn1d"], [704, "convbn1d"]], "ConvReLU1d": [[727, "convrelu1d"], [710, "convrelu1d"]], "FloatFunctional": [[750, "floatfunctional"]], "ConvBnReLU3d": [[719, "convbnrelu3d"], [709, "convbnrelu3d"]], "QFunctional": [[759, "qfunctional"]], "ConvReLU3d": [[729, "convrelu3d"], [721, "convrelu3d"], [712, "convrelu3d"]], "ConvBnReLU1d": [[717, "convbnrelu1d"], [707, "convbnrelu1d"]], "ConvBnReLU2d": [[718, "convbnrelu2d"], [708, "convbnrelu2d"]], "BNReLU2d": [[725, "bnrelu2d"], [702, "bnrelu2d"]], "FXFloatFunctional": [[749, "fxfloatfunctional"]], "torch._foreach_sqrt": [[675, "torch-foreach-sqrt"]], "torch.addcmul": [[690, "torch-addcmul"]], "torch._foreach_round": [[667, "torch-foreach-round"]], "torch._logging.set_logs": [[682, "torch-logging-set-logs"]], "torch.absolute": [[684, "torch-absolute"]], "torch._foreach_reciprocal": [[665, "torch-foreach-reciprocal"]], "torch.addbmm": [[688, "torch-addbmm"]], "torch.all": [[695, "torch-all"]], "torch.angle": [[700, "torch-angle"]], "torch.acosh": [[686, "torch-acosh"]], "torch._foreach_sin_": [[672, "torch-foreach-sin"]], "torch._foreach_sinh": [[673, "torch-foreach-sinh"]], "torch._foreach_sqrt_": [[676, "torch-foreach-sqrt"]], "torch.addmm": [[691, "torch-addmm"]], "torch._foreach_round_": [[668, "torch-foreach-round"]], "torch._foreach_trunc_": [[680, "torch-foreach-trunc"]], "torch._foreach_sinh_": [[674, "torch-foreach-sinh"]], "torch._foreach_neg": [[663, "torch-foreach-neg"]], "torch.addmv": [[692, "torch-addmv"]], "torch.amax": [[697, "torch-amax"]], "torch.any": [[701, "torch-any"]], "torch._foreach_zero_": [[681, "torch-foreach-zero"]], "torch._foreach_reciprocal_": [[666, "torch-foreach-reciprocal"]], "torch._foreach_sin": [[671, "torch-foreach-sin"]], "torch._foreach_sigmoid": [[669, "torch-foreach-sigmoid"]], "torch.abs": [[683, "torch-abs"]], "torch.addcdiv": [[689, "torch-addcdiv"]], "torch.adjoint": [[694, "torch-adjoint"]], "torch.aminmax": [[699, "torch-aminmax"]], "torch._foreach_sigmoid_": [[670, "torch-foreach-sigmoid"]], "torch.add": [[687, "torch-add"]], "torch.addr": [[693, "torch-addr"]], "torch._foreach_tan": [[677, "torch-foreach-tan"]], "torch._foreach_neg_": [[664, "torch-foreach-neg"]], "torch.acos": [[685, "torch-acos"]], "torch._foreach_tan_": [[678, "torch-foreach-tan"]], "torch._foreach_trunc": [[679, "torch-foreach-trunc"]], "torch.amin": [[698, "torch-amin"]], "torch.allclose": [[696, "torch-allclose"]], "torch._foreach_atan": [[633, "torch-foreach-atan"]], "torch._foreach_abs": [[627, "torch-foreach-abs"]], "torch._foreach_acos": [[629, "torch-foreach-acos"]], "torch.Tensor.view": [[619, "torch-tensor-view"]], "torch._foreach_erfc": [[643, "torch-foreach-erfc"]], "torch._foreach_exp": [[645, "torch-foreach-exp"]], "torch._foreach_log_": [[662, "torch-foreach-log"]], "torch._foreach_acos_": [[630, "torch-foreach-acos"]], "torch._foreach_frac_": [[652, "torch-foreach-frac"]], "torch._foreach_floor": [[649, "torch-foreach-floor"]], "torch._foreach_log2_": [[661, "torch-foreach-log2"]], "torch._foreach_expm1": [[647, "torch-foreach-expm1"]], "torch._foreach_frac": [[651, "torch-foreach-frac"]], "torch._foreach_lgamma": [[653, "torch-foreach-lgamma"]], "torch._foreach_cosh_": [[640, "torch-foreach-cosh"]], "torch._foreach_lgamma_": [[654, "torch-foreach-lgamma"]], "torch._foreach_abs_": [[628, "torch-foreach-abs"]], "torch._foreach_erf_": [[642, "torch-foreach-erf"]], "torch.Tensor.var": [[617, "torch-tensor-var"]], "torch._foreach_floor_": [[650, "torch-foreach-floor"]], "torch._foreach_cos": [[637, "torch-foreach-cos"]], "torch._foreach_exp_": [[646, "torch-foreach-exp"]], "torch._foreach_cosh": [[639, "torch-foreach-cosh"]], "torch._foreach_erf": [[641, "torch-foreach-erf"]], "torch.Tensor.untyped_storage": [[615, "torch-tensor-untyped-storage"]], "torch._foreach_log": [[655, "torch-foreach-log"]], "torch._foreach_log1p_": [[659, "torch-foreach-log1p"]], "torch.Tensor.unsqueeze_": [[614, "torch-tensor-unsqueeze"]], "torch._foreach_ceil_": [[636, "torch-foreach-ceil"]], "torch._foreach_cos_": [[638, "torch-foreach-cos"]], "torch._foreach_log10_": [[657, "torch-foreach-log10"]], "torch.Tensor.values": [[616, "torch-tensor-values"]], "torch._foreach_log10": [[656, "torch-foreach-log10"]], "torch.Tensor.vdot": [[618, "torch-tensor-vdot"]], "torch._foreach_log2": [[660, "torch-foreach-log2"]], "torch._foreach_asin": [[631, "torch-foreach-asin"]], "torch._foreach_erfc_": [[644, "torch-foreach-erfc"]], "torch.Tensor.zero_": [[625, "torch-tensor-zero"]], "torch._foreach_expm1_": [[648, "torch-foreach-expm1"]], "torch.Tensor.vsplit": [[621, "torch-tensor-vsplit"]], "torch.Tensor.xlogy_": [[624, "torch-tensor-xlogy"]], "torch._foreach_asin_": [[632, "torch-foreach-asin"]], "torch._foreach_ceil": [[635, "torch-foreach-ceil"]], "torch._assert": [[626, "torch-assert"]], "torch._foreach_log1p": [[658, "torch-foreach-log1p"]], "torch.Tensor.xlogy": [[623, "torch-tensor-xlogy"]], "torch._foreach_atan_": [[634, "torch-foreach-atan"]], "torch.Tensor.view_as": [[620, "torch-tensor-view-as"]], "torch.Tensor.unique_consecutive": [[612, "torch-tensor-unique-consecutive"]], "torch.Tensor.unsqueeze": [[613, "torch-tensor-unsqueeze"]], "torch.Tensor.where": [[622, "torch-tensor-where"]], "torch.Tensor.take_along_dim": [[575, "torch-tensor-take-along-dim"]], "torch.Tensor.tan_": [[577, "torch-tensor-tan"]], "torch.Tensor.swapdims": [[571, "torch-tensor-swapdims"]], "torch.Tensor.trunc": [[603, "torch-tensor-trunc"]], "torch.Tensor.unfold": [[609, "torch-tensor-unfold"]], "torch.Tensor.to": [[582, "torch-tensor-to"]], "torch.Tensor.true_divide": [[601, "torch-tensor-true-divide"]], "torch.Tensor.tile": [[581, "torch-tensor-tile"]], "torch.Tensor.subtract_": [[566, "torch-tensor-subtract"]], "torch.Tensor.tolist": [[591, "torch-tensor-tolist"]], "torch.Tensor.to_sparse_coo": [[588, "torch-tensor-to-sparse-coo"]], "torch.Tensor.trace": [[593, "torch-tensor-trace"]], "torch.Tensor.triu_": [[600, "torch-tensor-triu"]], "torch.Tensor.trunc_": [[604, "torch-tensor-trunc"]], "torch.Tensor.unbind": [[607, "torch-tensor-unbind"]], "torch.Tensor.unflatten": [[608, "torch-tensor-unflatten"]], "torch.Tensor.tanh": [[578, "torch-tensor-tanh"]], "torch.Tensor.to_dense": [[583, "torch-tensor-to-dense"]], "torch.Tensor.sub_": [[564, "torch-tensor-sub"]], "torch.Tensor.triu": [[599, "torch-tensor-triu"]], "torch.Tensor.swapaxes": [[570, "torch-tensor-swapaxes"]], "torch.Tensor.to_sparse_csc": [[589, "torch-tensor-to-sparse-csc"]], "torch.Tensor.transpose_": [[595, "torch-tensor-transpose"]], "torch.Tensor.transpose": [[594, "torch-tensor-transpose"]], "torch.Tensor.tensor_split": [[580, "torch-tensor-tensor-split"]], "torch.Tensor.subtract": [[565, "torch-tensor-subtract"]], "torch.Tensor.to_sparse_bsc": [[586, "torch-tensor-to-sparse-bsc"]], "torch.Tensor.sum": [[567, "torch-tensor-sum"]], "torch.Tensor.topk": [[592, "torch-tensor-topk"]], "torch.Tensor.tril_": [[598, "torch-tensor-tril"]], "torch.Tensor.uniform_": [[610, "torch-tensor-uniform"]], "torch.Tensor.t_": [[573, "torch-tensor-t"]], "torch.Tensor.sub": [[563, "torch-tensor-sub"]], "torch.Tensor.t": [[572, "torch-tensor-t"]], "torch.Tensor.to_sparse": [[585, "torch-tensor-to-sparse"]], "torch.Tensor.tril": [[597, "torch-tensor-tril"]], "torch.Tensor.tan": [[576, "torch-tensor-tan"]], "torch.Tensor.svd": [[569, "torch-tensor-svd"]], "torch.Tensor.triangular_solve": [[596, "torch-tensor-triangular-solve"]], "torch.Tensor.unique": [[611, "torch-tensor-unique"]], "torch.Tensor.storage_type": [[561, "torch-tensor-storage-type"]], "torch.Tensor.sum_to_size": [[568, "torch-tensor-sum-to-size"]], "torch.Tensor.to_mkldnn": [[584, "torch-tensor-to-mkldnn"]], "torch.Tensor.stride": [[562, "torch-tensor-stride"]], "torch.Tensor.to_sparse_bsr": [[587, "torch-tensor-to-sparse-bsr"]], "torch.Tensor.type": [[605, "torch-tensor-type"]], "torch.Tensor.take": [[574, "torch-tensor-take"]], "torch.Tensor.type_as": [[606, "torch-tensor-type-as"]], "torch.Tensor.to_sparse_csr": [[590, "torch-tensor-to-sparse-csr"]], "torch.Tensor.tanh_": [[579, "torch-tensor-tanh"]], "torch.Tensor.true_divide_": [[602, "torch-tensor-true-divide"]], "torch.Tensor.sgn_": [[524, "torch-tensor-sgn"]], "torch.Tensor.row_indices": [[511, "torch-tensor-row-indices"]], "torch.Tensor.scatter_": [[515, "torch-tensor-scatter"]], "torch.Tensor.sign_": [[531, "torch-tensor-sign"]], "torch.Tensor.scatter_add_": [[517, "torch-tensor-scatter-add"]], "torch.Tensor.squeeze_": [[555, "torch-tensor-squeeze"]], "torch.Tensor.sinc": [[535, "torch-tensor-sinc"]], "torch.Tensor.share_memory_": [[526, "torch-tensor-share-memory"]], "torch.Tensor.short": [[527, "torch-tensor-short"]], "torch.Tensor.sparse_dim": [[545, "torch-tensor-sparse-dim"]], "torch.Tensor.sqrt_": [[551, "torch-tensor-sqrt"]], "torch.Tensor.select_scatter": [[521, "torch-tensor-select-scatter"]], "torch.Tensor.softmax": [[543, "torch-tensor-softmax"]], "torch.Tensor.rsqrt": [[512, "torch-tensor-rsqrt"]], "torch.Tensor.signbit": [[532, "torch-tensor-signbit"]], "torch.Tensor.sgn": [[523, "torch-tensor-sgn"]], "torch.Tensor.set_": [[522, "torch-tensor-set"]], "torch.Tensor.rsqrt_": [[513, "torch-tensor-rsqrt"]], "torch.Tensor.sigmoid_": [[529, "torch-tensor-sigmoid"]], "torch.Tensor.squeeze": [[554, "torch-tensor-squeeze"]], "torch.Tensor.sigmoid": [[528, "torch-tensor-sigmoid"]], "torch.Tensor.sparse_resize_": [[547, "torch-tensor-sparse-resize"]], "torch.Tensor.square_": [[553, "torch-tensor-square"]], "torch.Tensor.smm": [[542, "torch-tensor-smm"]], "torch.Tensor.split": [[549, "torch-tensor-split"]], "torch.Tensor.sparse_resize_and_clear_": [[548, "torch-tensor-sparse-resize-and-clear"]], "torch.Tensor.sinc_": [[536, "torch-tensor-sinc"]], "torch.Tensor.sparse_mask": [[546, "torch-tensor-sparse-mask"]], "torch.Tensor.sinh_": [[538, "torch-tensor-sinh"]], "torch.Tensor.sqrt": [[550, "torch-tensor-sqrt"]], "torch.Tensor.sin": [[533, "torch-tensor-sin"]], "torch.Tensor.std": [[557, "torch-tensor-std"]], "torch.Tensor.scatter_reduce": [[518, "torch-tensor-scatter-reduce"]], "torch.Tensor.sinh": [[537, "torch-tensor-sinh"]], "torch.Tensor.storage_offset": [[560, "torch-tensor-storage-offset"]], "torch.Tensor.sspaddmm": [[556, "torch-tensor-sspaddmm"]], "torch.Tensor.storage": [[559, "torch-tensor-storage"]], "torch.Tensor.shape": [[525, "torch-tensor-shape"]], "torch.Tensor.sign": [[530, "torch-tensor-sign"]], "torch.Tensor.slogdet": [[541, "torch-tensor-slogdet"]], "torch.Tensor.round_": [[510, "torch-tensor-round"]], "torch.Tensor.stft": [[558, "torch-tensor-stft"]], "torch.Tensor.square": [[552, "torch-tensor-square"]], "torch.Tensor.scatter_add": [[516, "torch-tensor-scatter-add"]], "torch.Tensor.scatter_reduce_": [[519, "torch-tensor-scatter-reduce"]], "torch.Tensor.size": [[539, "torch-tensor-size"]], "torch.Tensor.scatter": [[514, "torch-tensor-scatter"]], "torch.Tensor.sort": [[544, "torch-tensor-sort"]], "torch.Tensor.sin_": [[534, "torch-tensor-sin"]], "torch.Tensor.slice_scatter": [[540, "torch-tensor-slice-scatter"]], "torch.Tensor.select": [[520, "torch-tensor-select"]], "torch.Tensor.q_per_channel_zero_points": [[476, "torch-tensor-q-per-channel-zero-points"]], "torch.Tensor.round": [[509, "torch-tensor-round"]], "torch.Tensor.numpy": [[460, "torch-tensor-numpy"]], "torch.Tensor.remainder": [[491, "torch-tensor-remainder"]], "torch.Tensor.outer": [[463, "torch-tensor-outer"]], "torch.Tensor.repeat_interleave": [[496, "torch-tensor-repeat-interleave"]], "torch.Tensor.remainder_": [[492, "torch-tensor-remainder"]], "torch.Tensor.reciprocal_": [[487, "torch-tensor-reciprocal"]], "torch.Tensor.pinverse": [[466, "torch-tensor-pinverse"]], "torch.Tensor.reshape": [[499, "torch-tensor-reshape"]], "torch.Tensor.ormqr": [[462, "torch-tensor-ormqr"]], "torch.Tensor.pin_memory": [[465, "torch-tensor-pin-memory"]], "torch.Tensor.ravel": [[484, "torch-tensor-ravel"]], "torch.Tensor.renorm_": [[494, "torch-tensor-renorm"]], "torch.Tensor.q_per_channel_axis": [[474, "torch-tensor-q-per-channel-axis"]], "torch.Tensor.orgqr": [[461, "torch-tensor-orgqr"]], "torch.Tensor.resize_as_": [[502, "torch-tensor-resize-as"]], "torch.Tensor.qscheme": [[480, "torch-tensor-qscheme"]], "torch.Tensor.resolve_neg": [[504, "torch-tensor-resolve-neg"]], "torch.Tensor.retains_grad": [[506, "torch-tensor-retains-grad"]], "torch.Tensor.resolve_conj": [[503, "torch-tensor-resolve-conj"]], "torch.Tensor.roll": [[507, "torch-tensor-roll"]], "torch.Tensor.positive": [[469, "torch-tensor-positive"]], "torch.Tensor.register_post_accumulate_grad_hook": [[490, "torch-tensor-register-post-accumulate-grad-hook"]], "torch.Tensor.repeat": [[495, "torch-tensor-repeat"]], "torch.Tensor.renorm": [[493, "torch-tensor-renorm"]], "torch.Tensor.random_": [[483, "torch-tensor-random"]], "torch.Tensor.polygamma": [[467, "torch-tensor-polygamma"]], "torch.Tensor.quantile": [[481, "torch-tensor-quantile"]], "torch.Tensor.numel": [[459, "torch-tensor-numel"]], "torch.Tensor.real": [[485, "torch-tensor-real"]], "torch.Tensor.record_stream": [[488, "torch-tensor-record-stream"]], "torch.Tensor.reshape_as": [[500, "torch-tensor-reshape-as"]], "torch.Tensor.pow_": [[471, "torch-tensor-pow"]], "torch.Tensor.register_hook": [[489, "torch-tensor-register-hook"]], "torch.Tensor.qr": [[479, "torch-tensor-qr"]], "torch.Tensor.q_scale": [[477, "torch-tensor-q-scale"]], "torch.Tensor.permute": [[464, "torch-tensor-permute"]], "torch.Tensor.requires_grad_": [[498, "torch-tensor-requires-grad"]], "torch.Tensor.pow": [[470, "torch-tensor-pow"]], "torch.Tensor.prod": [[472, "torch-tensor-prod"]], "torch.Tensor.rad2deg": [[482, "torch-tensor-rad2deg"]], "torch.Tensor.polygamma_": [[468, "torch-tensor-polygamma"]], "torch.Tensor.put_": [[473, "torch-tensor-put"]], "torch.Tensor.q_zero_point": [[478, "torch-tensor-q-zero-point"]], "torch.Tensor.reciprocal": [[486, "torch-tensor-reciprocal"]], "torch.Tensor.q_per_channel_scales": [[475, "torch-tensor-q-per-channel-scales"]], "torch.Tensor.requires_grad": [[497, "torch-tensor-requires-grad"]], "torch.Tensor.retain_grad": [[505, "torch-tensor-retain-grad"]], "torch.Tensor.rot90": [[508, "torch-tensor-rot90"]], "torch.Tensor.resize_": [[501, "torch-tensor-resize"]], "torch.Tensor.negative_": [[445, "torch-tensor-negative"]], "torch.Tensor.ndim": [[438, "torch-tensor-ndim"]], "torch.Tensor.mvlgamma": [[427, "torch-tensor-mvlgamma"]], "torch.Tensor.multinomial": [[423, "torch-tensor-multinomial"]], "torch.Tensor.nbytes": [[437, "torch-tensor-nbytes"]], "torch.Tensor.nextafter_": [[453, "torch-tensor-nextafter"]], "torch.Tensor.mm": [[415, "torch-tensor-mm"]], "torch.Tensor.neg_": [[443, "torch-tensor-neg"]], "torch.Tensor.nan_to_num": [[429, "torch-tensor-nan-to-num"]], "torch.Tensor.nansum": [[434, "torch-tensor-nansum"]], "torch.Tensor.ne_": [[441, "torch-tensor-ne"]], "torch.Tensor.matrix_power": [[408, "torch-tensor-matrix-power"]], "torch.Tensor.moveaxis": [[418, "torch-tensor-moveaxis"]], "torch.Tensor.nanmedian": [[432, "torch-tensor-nanmedian"]], "torch.Tensor.mul_": [[422, "torch-tensor-mul"]], "torch.Tensor.ndimension": [[439, "torch-tensor-ndimension"]], "torch.Tensor.new_full": [[448, "torch-tensor-new-full"]], "torch.Tensor.new_zeros": [[451, "torch-tensor-new-zeros"]], "torch.Tensor.movedim": [[419, "torch-tensor-movedim"]], "torch.Tensor.narrow": [[435, "torch-tensor-narrow"]], "torch.Tensor.module_load": [[417, "torch-tensor-module-load"]], "torch.Tensor.mul": [[421, "torch-tensor-mul"]], "torch.Tensor.neg": [[442, "torch-tensor-neg"]], "torch.Tensor.negative": [[444, "torch-tensor-negative"]], "torch.Tensor.new_ones": [[449, "torch-tensor-new-ones"]], "torch.Tensor.nonzero": [[454, "torch-tensor-nonzero"]], "torch.Tensor.mean": [[411, "torch-tensor-mean"]], "torch.Tensor.norm": [[455, "torch-tensor-norm"]], "torch.Tensor.maximum": [[410, "torch-tensor-maximum"]], "torch.Tensor.multiply": [[424, "torch-tensor-multiply"]], "torch.Tensor.nanquantile": [[433, "torch-tensor-nanquantile"]], "torch.Tensor.min": [[413, "torch-tensor-min"]], "torch.Tensor.nextafter": [[452, "torch-tensor-nextafter"]], "torch.Tensor.mv": [[426, "torch-tensor-mv"]], "torch.Tensor.mvlgamma_": [[428, "torch-tensor-mvlgamma"]], "torch.Tensor.max": [[409, "torch-tensor-max"]], "torch.Tensor.multiply_": [[425, "torch-tensor-multiply"]], "torch.Tensor.normal_": [[456, "torch-tensor-normal"]], "torch.Tensor.not_equal_": [[458, "torch-tensor-not-equal"]], "torch.Tensor.nan_to_num_": [[430, "torch-tensor-nan-to-num"]], "torch.Tensor.nanmean": [[431, "torch-tensor-nanmean"]], "torch.Tensor.new_empty": [[447, "torch-tensor-new-empty"]], "torch.Tensor.msort": [[420, "torch-tensor-msort"]], "torch.Tensor.not_equal": [[457, "torch-tensor-not-equal"]], "torch.Tensor.ne": [[440, "torch-tensor-ne"]], "torch.Tensor.nelement": [[446, "torch-tensor-nelement"]], "torch.Tensor.new_tensor": [[450, "torch-tensor-new-tensor"]], "torch.Tensor.median": [[412, "torch-tensor-median"]], "torch.Tensor.narrow_copy": [[436, "torch-tensor-narrow-copy"]], "torch.Tensor.mode": [[416, "torch-tensor-mode"]], "torch.Tensor.minimum": [[414, "torch-tensor-minimum"]]}, "indexentries": {"gradscaler (class in torch.cuda.amp)": [[0, "torch.cuda.amp.GradScaler"]], "autocast (class in torch)": [[0, "torch.autocast"]], "autocast (class in torch.cpu.amp)": [[0, "torch.cpu.amp.autocast"]], "autocast (class in torch.cuda.amp)": [[0, "torch.cuda.amp.autocast"]], "custom_bwd() (in module torch.amp)": [[0, "torch.amp.custom_bwd"]], "custom_bwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_bwd"]], "custom_fwd() (in module torch.amp)": [[0, "torch.amp.custom_fwd"]], "custom_fwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_fwd"]], "is_autocast_available() (in module torch.amp.autocast_mode)": [[0, "torch.amp.autocast_mode.is_autocast_available"]], "module": [[0, "module-torch.amp"], [0, "module-torch.amp.autocast_mode"], [0, "module-torch.amp.grad_scaler"], [0, "module-torch.cpu.amp"], [0, "module-torch.cpu.amp.autocast_mode"], [0, "module-torch.cpu.amp.grad_scaler"], [0, "module-torch.cuda.amp"], [0, "module-torch.cuda.amp.autocast_mode"], [0, "module-torch.cuda.amp.common"], [0, "module-torch.cuda.amp.grad_scaler"], [1, "module-torch.autograd"], [1, "module-torch.autograd.anomaly_mode"], [1, "module-torch.autograd.forward_ad"], [1, "module-torch.autograd.function"], [1, "module-torch.autograd.functional"], [1, "module-torch.autograd.grad_mode"], [1, "module-torch.autograd.gradcheck"], [1, "module-torch.autograd.graph"], [1, "module-torch.autograd.profiler"], [1, "module-torch.autograd.profiler_legacy"], [1, "module-torch.autograd.profiler_util"], [1, "module-torch.autograd.variable"], [2, "module-torch.backends"], [2, "module-torch.backends.cpu"], [2, "module-torch.backends.cuda"], [2, "module-torch.backends.cudnn"], [2, "module-torch.backends.cudnn.rnn"], [2, "module-torch.backends.mha"], [2, "module-torch.backends.mkl"], [2, "module-torch.backends.mkldnn"], [2, "module-torch.backends.mps"], [2, "module-torch.backends.nnpack"], [2, "module-torch.backends.openmp"], [2, "module-torch.backends.opt_einsum"], [2, "module-torch.backends.quantized"], [2, "module-torch.backends.xeon"], [2, "module-torch.backends.xeon.run_cpu"], [2, "module-torch.backends.xnnpack"], [3, "module-torch.utils.benchmark"], [3, "module-torch.utils.benchmark.examples"], [3, "module-torch.utils.benchmark.op_fuzzers"], [3, "module-torch.utils.benchmark.utils"], [3, "module-torch.utils.benchmark.utils.valgrind_wrapper"], [4, "module-torch.utils.bottleneck"], [13, "module-torch.__config__"], [16, "module-torch.cpu"], [17, "module-torch.cuda"], [17, "module-torch.cuda.comm"], [17, "module-torch.cuda.error"], [17, "module-torch.cuda.graphs"], [17, "module-torch.cuda.jiterator"], [17, "module-torch.cuda.memory"], [17, "module-torch.cuda.nccl"], [17, "module-torch.cuda.nvtx"], [17, "module-torch.cuda.profiler"], [17, "module-torch.cuda.random"], [17, "module-torch.cuda.sparse"], [17, "module-torch.cuda.streams"], [18, "module-torch.cuda._sanitizer"], [19, "module-torch.cuda.tunable"], [23, "module-torch.utils.data"], [23, "module-torch.utils.data.datapipes"], [23, "module-torch.utils.data.datapipes.dataframe"], [23, "module-torch.utils.data.datapipes.iter"], [23, "module-torch.utils.data.datapipes.map"], [23, "module-torch.utils.data.datapipes.utils"], [27, "module-torch.utils.deterministic"], [28, "module-torch.distributed"], [28, "module-torch.distributed.algorithms"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"], [28, "module-torch.distributed.algorithms.join"], [28, "module-torch.distributed.algorithms.model_averaging"], [28, "module-torch.distributed.algorithms.model_averaging.averagers"], [28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"], [28, "module-torch.distributed.algorithms.model_averaging.utils"], [28, "module-torch.distributed.argparse_util"], [28, "module-torch.distributed.c10d_logger"], [28, "module-torch.distributed.checkpoint.api"], [28, "module-torch.distributed.checkpoint.default_planner"], [28, "module-torch.distributed.checkpoint.filesystem"], [28, "module-torch.distributed.checkpoint.metadata"], [28, "module-torch.distributed.checkpoint.optimizer"], [28, "module-torch.distributed.checkpoint.planner"], [28, "module-torch.distributed.checkpoint.planner_helpers"], [28, "module-torch.distributed.checkpoint.resharding"], [28, "module-torch.distributed.checkpoint.state_dict"], [28, "module-torch.distributed.checkpoint.state_dict_loader"], [28, "module-torch.distributed.checkpoint.state_dict_saver"], [28, "module-torch.distributed.checkpoint.stateful"], [28, "module-torch.distributed.checkpoint.storage"], [28, "module-torch.distributed.checkpoint.utils"], [28, "module-torch.distributed.collective_utils"], [28, "module-torch.distributed.constants"], [28, "module-torch.distributed.device_mesh"], [28, "module-torch.distributed.distributed_c10d"], [28, "module-torch.distributed.elastic"], [28, "module-torch.distributed.elastic.agent.server.api"], [28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"], [28, "module-torch.distributed.elastic.events.api"], [28, "module-torch.distributed.elastic.events.handlers"], [28, "module-torch.distributed.elastic.metrics.api"], [28, "module-torch.distributed.elastic.multiprocessing.api"], [28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"], [28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"], [28, "module-torch.distributed.elastic.multiprocessing.redirects"], [28, "module-torch.distributed.elastic.multiprocessing.tail_log"], [28, "module-torch.distributed.elastic.rendezvous.api"], [28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.etcd_server"], [28, "module-torch.distributed.elastic.rendezvous.etcd_store"], [28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.utils"], [28, "module-torch.distributed.elastic.timer.api"], [28, "module-torch.distributed.elastic.timer.file_based_local_timer"], [28, "module-torch.distributed.elastic.timer.local_timer"], [28, "module-torch.distributed.elastic.utils"], [28, "module-torch.distributed.elastic.utils.api"], [28, "module-torch.distributed.elastic.utils.data"], [28, "module-torch.distributed.elastic.utils.data.cycling_iterator"], [28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"], [28, "module-torch.distributed.elastic.utils.distributed"], [28, "module-torch.distributed.elastic.utils.log_level"], [28, "module-torch.distributed.elastic.utils.logging"], [28, "module-torch.distributed.elastic.utils.store"], [28, "module-torch.distributed.fsdp.api"], [28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"], [28, "module-torch.distributed.fsdp.sharded_grad_scaler"], [28, "module-torch.distributed.fsdp.wrap"], [28, "module-torch.distributed.launch"], [28, "module-torch.distributed.launcher"], [28, "module-torch.distributed.launcher.api"], [28, "module-torch.distributed.logging_handlers"], [28, "module-torch.distributed.nn"], [28, "module-torch.distributed.nn.api"], [28, "module-torch.distributed.nn.api.remote_module"], [28, "module-torch.distributed.nn.functional"], [28, "module-torch.distributed.nn.jit"], [28, "module-torch.distributed.nn.jit.instantiator"], [28, "module-torch.distributed.nn.jit.templates"], [28, "module-torch.distributed.nn.jit.templates.remote_module_template"], [28, "module-torch.distributed.optim.apply_optimizer_in_backward"], [28, "module-torch.distributed.optim.functional_adadelta"], [28, "module-torch.distributed.optim.functional_adagrad"], [28, "module-torch.distributed.optim.functional_adam"], [28, "module-torch.distributed.optim.functional_adamax"], [28, "module-torch.distributed.optim.functional_adamw"], [28, "module-torch.distributed.optim.functional_rmsprop"], [28, "module-torch.distributed.optim.functional_rprop"], [28, "module-torch.distributed.optim.functional_sgd"], [28, "module-torch.distributed.optim.named_optimizer"], [28, "module-torch.distributed.optim.optimizer"], [28, "module-torch.distributed.optim.post_localSGD_optimizer"], [28, "module-torch.distributed.optim.utils"], [28, "module-torch.distributed.optim.zero_redundancy_optimizer"], [28, "module-torch.distributed.remote_device"], [28, "module-torch.distributed.rendezvous"], [28, "module-torch.distributed.rpc.api"], [28, "module-torch.distributed.rpc.backend_registry"], [28, "module-torch.distributed.rpc.constants"], [28, "module-torch.distributed.rpc.functions"], [28, "module-torch.distributed.rpc.internal"], [28, "module-torch.distributed.rpc.options"], [28, "module-torch.distributed.rpc.rref_proxy"], [28, "module-torch.distributed.rpc.server_process_global_profiler"], [28, "module-torch.distributed.tensor"], [28, "module-torch.distributed.tensor.parallel.api"], [28, "module-torch.distributed.tensor.parallel.ddp"], [28, "module-torch.distributed.tensor.parallel.fsdp"], [28, "module-torch.distributed.tensor.parallel.input_reshard"], [28, "module-torch.distributed.tensor.parallel.loss"], [28, "module-torch.distributed.tensor.parallel.style"], [28, "module-torch.distributed.utils"], [30, "module-torch.distributed.checkpoint"], [30, "module-torch.distributed.checkpoint.format_utils"], [30, "module-torch.distributed.checkpoint.logger"], [30, "module-torch.distributed.checkpoint.logging_handlers"], [30, "module-torch.distributed.checkpoint.staging"], [32, "module-torch.distributed.optim"], [33, "module-torch.distributed.pipelining"], [33, "module-torch.distributed.pipelining.microbatch"], [33, "module-torch.distributed.pipelining.schedules"], [33, "module-torch.distributed.pipelining.stage"], [34, "module-torch.distributed.tensor.parallel"], [35, "module-torch.distributions"], [35, "module-torch.distributions.bernoulli"], [35, "module-torch.distributions.beta"], [35, "module-torch.distributions.binomial"], [35, "module-torch.distributions.categorical"], [35, "module-torch.distributions.cauchy"], [35, "module-torch.distributions.chi2"], [35, "module-torch.distributions.constraint_registry"], [35, "module-torch.distributions.constraints"], [35, "module-torch.distributions.continuous_bernoulli"], [35, "module-torch.distributions.dirichlet"], [35, "module-torch.distributions.distribution"], [35, "module-torch.distributions.exp_family"], [35, "module-torch.distributions.exponential"], [35, "module-torch.distributions.fishersnedecor"], [35, "module-torch.distributions.gamma"], [35, "module-torch.distributions.geometric"], [35, "module-torch.distributions.gumbel"], [35, "module-torch.distributions.half_cauchy"], [35, "module-torch.distributions.half_normal"], [35, "module-torch.distributions.independent"], [35, "module-torch.distributions.inverse_gamma"], [35, "module-torch.distributions.kl"], [35, "module-torch.distributions.kumaraswamy"], [35, "module-torch.distributions.laplace"], [35, "module-torch.distributions.lkj_cholesky"], [35, "module-torch.distributions.log_normal"], [35, "module-torch.distributions.logistic_normal"], [35, "module-torch.distributions.lowrank_multivariate_normal"], [35, "module-torch.distributions.mixture_same_family"], [35, "module-torch.distributions.multinomial"], [35, "module-torch.distributions.multivariate_normal"], [35, "module-torch.distributions.negative_binomial"], [35, "module-torch.distributions.normal"], [35, "module-torch.distributions.one_hot_categorical"], [35, "module-torch.distributions.pareto"], [35, "module-torch.distributions.poisson"], [35, "module-torch.distributions.relaxed_bernoulli"], [35, "module-torch.distributions.relaxed_categorical"], [35, "module-torch.distributions.studentT"], [35, "module-torch.distributions.transformed_distribution"], [35, "module-torch.distributions.transforms"], [35, "module-torch.distributions.uniform"], [35, "module-torch.distributions.utils"], [35, "module-torch.distributions.von_mises"], [35, "module-torch.distributions.weibull"], [35, "module-torch.distributions.wishart"], [37, "module-torch.distributed.elastic.agent"], [37, "module-torch.distributed.elastic.agent.server"], [37, "module-torch.distributed.elastic.agent.server.health_check_server"], [38, "module-torch.distributed.elastic.control_plane"], [40, "module-torch.distributed.elastic.multiprocessing.errors"], [41, "module-torch.distributed.elastic.events"], [44, "module-torch.distributed.elastic.metrics"], [45, "module-torch.distributed.elastic.multiprocessing"], [47, "module-torch.distributed.elastic.rendezvous"], [47, "module-torch.distributed.elastic.rendezvous.registry"], [48, "module-torch.distributed.run"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"], [50, "module-torch.distributed.elastic.timer"], [50, "module-torch.distributed.elastic.timer.debug_info_logging"], [52, "module-torch.export"], [52, "module-torch.export.custom_obj"], [52, "module-torch.export.dynamic_shapes"], [52, "module-torch.export.exported_program"], [52, "module-torch.export.graph_signature"], [52, "module-torch.export.unflatten"], [54, "module-torch.fft"], [55, "module-torch.distributed.fsdp"], [57, "module-torch.func"], [62, "module-torch.__future__"], [63, "module-torch.futures"], [64, "module-torch.fx"], [64, "module-torch.fx.annotate"], [64, "module-torch.fx.config"], [64, "module-torch.fx.experimental"], [64, "module-torch.fx.experimental.accelerator_partitioner"], [64, "module-torch.fx.experimental.const_fold"], [64, "module-torch.fx.experimental.debug"], [64, "module-torch.fx.experimental.graph_gradual_typechecker"], [64, "module-torch.fx.experimental.merge_matmul"], [64, "module-torch.fx.experimental.meta_tracer"], [64, "module-torch.fx.experimental.migrate_gradual_types"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"], [64, "module-torch.fx.experimental.migrate_gradual_types.operation"], [64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"], [64, "module-torch.fx.experimental.migrate_gradual_types.util"], [64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"], [64, "module-torch.fx.experimental.normalize"], [64, "module-torch.fx.experimental.optimization"], [64, "module-torch.fx.experimental.partitioner_utils"], [64, "module-torch.fx.experimental.proxy_tensor"], [64, "module-torch.fx.experimental.recording"], [64, "module-torch.fx.experimental.refinement_types"], [64, "module-torch.fx.experimental.rewriter"], [64, "module-torch.fx.experimental.schema_type_annotation"], [64, "module-torch.fx.experimental.sym_node"], [64, "module-torch.fx.experimental.unification"], [64, "module-torch.fx.experimental.unification.core"], [64, "module-torch.fx.experimental.unification.dispatch"], [64, "module-torch.fx.experimental.unification.match"], [64, "module-torch.fx.experimental.unification.more"], [64, "module-torch.fx.experimental.unification.multipledispatch"], [64, "module-torch.fx.experimental.unification.multipledispatch.conflict"], [64, "module-torch.fx.experimental.unification.multipledispatch.core"], [64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"], [64, "module-torch.fx.experimental.unification.multipledispatch.utils"], [64, "module-torch.fx.experimental.unification.multipledispatch.variadic"], [64, "module-torch.fx.experimental.unification.unification_tools"], [64, "module-torch.fx.experimental.unification.utils"], [64, "module-torch.fx.experimental.unification.variable"], [64, "module-torch.fx.experimental.unify_refinements"], [64, "module-torch.fx.experimental.validator"], [64, "module-torch.fx.graph"], [64, "module-torch.fx.graph_module"], [64, "module-torch.fx.immutable_collections"], [64, "module-torch.fx.interpreter"], [64, "module-torch.fx.node"], [64, "module-torch.fx.operator_schemas"], [64, "module-torch.fx.passes"], [64, "module-torch.fx.passes.annotate_getitem_nodes"], [64, "module-torch.fx.passes.backends"], [64, "module-torch.fx.passes.backends.cudagraphs"], [64, "module-torch.fx.passes.dialect"], [64, "module-torch.fx.passes.dialect.common"], [64, "module-torch.fx.passes.dialect.common.cse_pass"], [64, "module-torch.fx.passes.fake_tensor_prop"], [64, "module-torch.fx.passes.graph_drawer"], [64, "module-torch.fx.passes.graph_manipulation"], [64, "module-torch.fx.passes.graph_transform_observer"], [64, "module-torch.fx.passes.infra"], [64, "module-torch.fx.passes.infra.partitioner"], [64, "module-torch.fx.passes.infra.pass_base"], [64, "module-torch.fx.passes.infra.pass_manager"], [64, "module-torch.fx.passes.net_min_base"], [64, "module-torch.fx.passes.operator_support"], [64, "module-torch.fx.passes.param_fetch"], [64, "module-torch.fx.passes.pass_manager"], [64, "module-torch.fx.passes.reinplace"], [64, "module-torch.fx.passes.runtime_assert"], [64, "module-torch.fx.passes.shape_prop"], [64, "module-torch.fx.passes.split_module"], [64, "module-torch.fx.passes.split_utils"], [64, "module-torch.fx.passes.splitter_base"], [64, "module-torch.fx.passes.tests"], [64, "module-torch.fx.passes.tests.test_pass_manager"], [64, "module-torch.fx.passes.tools_common"], [64, "module-torch.fx.passes.utils"], [64, "module-torch.fx.passes.utils.common"], [64, "module-torch.fx.passes.utils.fuser_utils"], [64, "module-torch.fx.passes.utils.matcher_utils"], [64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"], [64, "module-torch.fx.passes.utils.source_matcher_utils"], [64, "module-torch.fx.proxy"], [64, "module-torch.fx.subgraph_rewriter"], [64, "module-torch.fx.tensor_type"], [64, "module-torch.fx.traceback"], [65, "module-torch.fx.experimental.symbolic_shapes"], [2011, "module-torch.hub"], [2013, "module-torch.jit"], [2013, "module-torch.jit.annotations"], [2013, "module-torch.jit.frontend"], [2013, "module-torch.jit.generate_bytecode"], [2013, "module-torch.jit.mobile"], [2013, "module-torch.jit.quantized"], [2014, "module-torch.jit.supported_ops"], [2018, "module-torch.jit.unsupported_tensor_ops"], [2019, "module-torch.utils.jit"], [2020, "module-torch.library"], [2021, "module-torch.linalg"], [2022, "module-torch._logging"], [2023, "module-torch.masked"], [2023, "module-torch.masked.maskedtensor"], [2023, "module-torch.masked.maskedtensor.binary"], [2023, "module-torch.masked.maskedtensor.core"], [2023, "module-torch.masked.maskedtensor.creation"], [2023, "module-torch.masked.maskedtensor.passthrough"], [2023, "module-torch.masked.maskedtensor.reductions"], [2023, "module-torch.masked.maskedtensor.unary"], [2027, "module-torch.utils.model_zoo"], [2028, "module-torch.utils.module_tracker"], [2029, "module-torch.monitor"], [2030, "module-torch.mps"], [2030, "module-torch.mps.event"], [2030, "module-torch.mps.profiler"], [2031, "module-torch.mtia"], [2032, "module-torch.multiprocessing"], [2032, "module-torch.multiprocessing.pool"], [2032, "module-torch.multiprocessing.queue"], [2032, "module-torch.multiprocessing.reductions"], [2032, "module-torch.multiprocessing.spawn"], [2035, "module-torch.nested"], [2036, "module-torch.nn"], [2036, "module-torch.nn.backends"], [2036, "module-torch.nn.backends.thnn"], [2036, "module-torch.nn.common_types"], [2036, "module-torch.nn.cpp"], [2036, "module-torch.nn.functional"], [2036, "module-torch.nn.grad"], [2036, "module-torch.nn.init"], [2036, "module-torch.nn.modules"], [2036, "module-torch.nn.modules.activation"], [2036, "module-torch.nn.modules.adaptive"], [2036, "module-torch.nn.modules.batchnorm"], [2036, "module-torch.nn.modules.channelshuffle"], [2036, "module-torch.nn.modules.container"], [2036, "module-torch.nn.modules.conv"], [2036, "module-torch.nn.modules.distance"], [2036, "module-torch.nn.modules.dropout"], [2036, "module-torch.nn.modules.flatten"], [2036, "module-torch.nn.modules.fold"], [2036, "module-torch.nn.modules.instancenorm"], [2036, "module-torch.nn.modules.lazy"], [2036, "module-torch.nn.modules.linear"], [2036, "module-torch.nn.modules.loss"], [2036, "module-torch.nn.modules.module"], [2036, "module-torch.nn.modules.normalization"], [2036, "module-torch.nn.modules.padding"], [2036, "module-torch.nn.modules.pixelshuffle"], [2036, "module-torch.nn.modules.pooling"], [2036, "module-torch.nn.modules.rnn"], [2036, "module-torch.nn.modules.sparse"], [2036, "module-torch.nn.modules.transformer"], [2036, "module-torch.nn.modules.upsampling"], [2036, "module-torch.nn.modules.utils"], [2036, "module-torch.nn.parallel"], [2036, "module-torch.nn.parallel.comm"], [2036, "module-torch.nn.parallel.distributed"], [2036, "module-torch.nn.parallel.parallel_apply"], [2036, "module-torch.nn.parallel.replicate"], [2036, "module-torch.nn.parallel.scatter_gather"], [2036, "module-torch.nn.parameter"], [2036, "module-torch.nn.utils"], [2036, "module-torch.nn.utils.clip_grad"], [2036, "module-torch.nn.utils.convert_parameters"], [2036, "module-torch.nn.utils.fusion"], [2036, "module-torch.nn.utils.init"], [2036, "module-torch.nn.utils.memory_format"], [2036, "module-torch.nn.utils.parametrizations"], [2036, "module-torch.nn.utils.parametrize"], [2036, "module-torch.nn.utils.prune"], [2036, "module-torch.nn.utils.rnn"], [2036, "module-torch.nn.utils.stateless"], [2037, "module-torch.nn.attention"], [2038, "module-torch.nn.attention.bias"], [2062, "module-torch.onnx.errors"], [2062, "module-torch.onnx.operators"], [2062, "module-torch.onnx.symbolic_caffe2"], [2062, "module-torch.onnx.symbolic_helper"], [2062, "module-torch.onnx.symbolic_opset10"], [2062, "module-torch.onnx.symbolic_opset11"], [2062, "module-torch.onnx.symbolic_opset12"], [2062, "module-torch.onnx.symbolic_opset13"], [2062, "module-torch.onnx.symbolic_opset14"], [2062, "module-torch.onnx.symbolic_opset15"], [2062, "module-torch.onnx.symbolic_opset16"], [2062, "module-torch.onnx.symbolic_opset17"], [2062, "module-torch.onnx.symbolic_opset18"], [2062, "module-torch.onnx.symbolic_opset19"], [2062, "module-torch.onnx.symbolic_opset20"], [2062, "module-torch.onnx.symbolic_opset7"], [2062, "module-torch.onnx.symbolic_opset8"], [2062, "module-torch.onnx.symbolic_opset9"], [2062, "module-torch.onnx.utils"], [2062, "module-torch.onnx.verification"], [2065, "module-torch.onnx"], [2067, "module-torch.optim"], [2067, "module-torch.optim.adadelta"], [2067, "module-torch.optim.adagrad"], [2067, "module-torch.optim.adam"], [2067, "module-torch.optim.adamax"], [2067, "module-torch.optim.adamw"], [2067, "module-torch.optim.asgd"], [2067, "module-torch.optim.lbfgs"], [2067, "module-torch.optim.lr_scheduler"], [2067, "module-torch.optim.nadam"], [2067, "module-torch.optim.optimizer"], [2067, "module-torch.optim.radam"], [2067, "module-torch.optim.rmsprop"], [2067, "module-torch.optim.rprop"], [2067, "module-torch.optim.sgd"], [2067, "module-torch.optim.sparse_adam"], [2067, "module-torch.optim.swa_utils"], [2068, "module-torch.package"], [2068, "module-torch.package.analyze"], [2068, "module-torch.package.analyze.find_first_use_of_broken_modules"], [2068, "module-torch.package.analyze.is_from_package"], [2068, "module-torch.package.analyze.trace_dependencies"], [2068, "module-torch.package.file_structure_representation"], [2068, "module-torch.package.find_file_dependencies"], [2068, "module-torch.package.glob_group"], [2068, "module-torch.package.importer"], [2068, "module-torch.package.package_exporter"], [2068, "module-torch.package.package_importer"], [2069, "module-torch.profiler"], [2069, "module-torch.profiler.itt"], [2069, "module-torch.profiler.profiler"], [2069, "module-torch.profiler.python_tracer"], [2070, "module-torch.ao"], [2070, "module-torch.ao.nn"], [2070, "module-torch.ao.nn.intrinsic.modules.fused"], [2070, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"], [2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"], [2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"], [2070, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"], [2070, "module-torch.ao.nn.qat.dynamic.modules.linear"], [2070, "module-torch.ao.nn.qat.modules.conv"], [2070, "module-torch.ao.nn.qat.modules.embedding_ops"], [2070, "module-torch.ao.nn.qat.modules.linear"], [2070, "module-torch.ao.nn.quantizable"], [2070, "module-torch.ao.nn.quantizable.modules"], [2070, "module-torch.ao.nn.quantizable.modules.activation"], [2070, "module-torch.ao.nn.quantizable.modules.rnn"], [2070, "module-torch.ao.nn.quantized"], [2070, "module-torch.ao.nn.quantized.dynamic.modules.conv"], [2070, "module-torch.ao.nn.quantized.dynamic.modules.linear"], [2070, "module-torch.ao.nn.quantized.dynamic.modules.rnn"], [2070, "module-torch.ao.nn.quantized.modules.activation"], [2070, "module-torch.ao.nn.quantized.modules.batchnorm"], [2070, "module-torch.ao.nn.quantized.modules.conv"], [2070, "module-torch.ao.nn.quantized.modules.dropout"], [2070, "module-torch.ao.nn.quantized.modules.embedding_ops"], [2070, "module-torch.ao.nn.quantized.modules.functional_modules"], [2070, "module-torch.ao.nn.quantized.modules.linear"], [2070, "module-torch.ao.nn.quantized.modules.normalization"], [2070, "module-torch.ao.nn.quantized.modules.rnn"], [2070, "module-torch.ao.nn.quantized.modules.utils"], [2070, "module-torch.ao.nn.quantized.reference"], [2070, "module-torch.ao.nn.quantized.reference.modules"], [2070, "module-torch.ao.nn.quantized.reference.modules.conv"], [2070, "module-torch.ao.nn.quantized.reference.modules.linear"], [2070, "module-torch.ao.nn.quantized.reference.modules.rnn"], [2070, "module-torch.ao.nn.quantized.reference.modules.sparse"], [2070, "module-torch.ao.nn.quantized.reference.modules.utils"], [2070, "module-torch.ao.nn.sparse"], [2070, "module-torch.ao.nn.sparse.quantized"], [2070, "module-torch.ao.nn.sparse.quantized.dynamic"], [2070, "module-torch.ao.nn.sparse.quantized.dynamic.linear"], [2070, "module-torch.ao.nn.sparse.quantized.linear"], [2070, "module-torch.ao.nn.sparse.quantized.utils"], [2070, "module-torch.ao.ns"], [2070, "module-torch.ao.ns.fx"], [2070, "module-torch.ao.ns.fx.graph_matcher"], [2070, "module-torch.ao.ns.fx.graph_passes"], [2070, "module-torch.ao.ns.fx.mappings"], [2070, "module-torch.ao.ns.fx.n_shadows_utils"], [2070, "module-torch.ao.ns.fx.ns_types"], [2070, "module-torch.ao.ns.fx.pattern_utils"], [2070, "module-torch.ao.ns.fx.qconfig_multi_mapping"], [2070, "module-torch.ao.ns.fx.utils"], [2070, "module-torch.ao.ns.fx.weight_utils"], [2070, "module-torch.ao.pruning"], [2070, "module-torch.ao.pruning.scheduler"], [2070, "module-torch.ao.pruning.scheduler.base_scheduler"], [2070, "module-torch.ao.pruning.scheduler.cubic_scheduler"], [2070, "module-torch.ao.pruning.scheduler.lambda_scheduler"], [2070, "module-torch.ao.pruning.sparsifier"], [2070, "module-torch.ao.pruning.sparsifier.base_sparsifier"], [2070, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"], [2070, "module-torch.ao.pruning.sparsifier.utils"], [2070, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"], [2070, "module-torch.ao.quantization"], [2070, "module-torch.ao.quantization.backend_config"], [2070, "module-torch.ao.quantization.backend_config.backend_config"], [2070, "module-torch.ao.quantization.backend_config.executorch"], [2070, "module-torch.ao.quantization.backend_config.fbgemm"], [2070, "module-torch.ao.quantization.backend_config.native"], [2070, "module-torch.ao.quantization.backend_config.observation_type"], [2070, "module-torch.ao.quantization.backend_config.onednn"], [2070, "module-torch.ao.quantization.backend_config.qnnpack"], [2070, "module-torch.ao.quantization.backend_config.tensorrt"], [2070, "module-torch.ao.quantization.backend_config.utils"], [2070, "module-torch.ao.quantization.backend_config.x86"], [2070, "module-torch.ao.quantization.fake_quantize"], [2070, "module-torch.ao.quantization.fuse_modules"], [2070, "module-torch.ao.quantization.fuser_method_mappings"], [2070, "module-torch.ao.quantization.fx"], [2070, "module-torch.ao.quantization.fx.convert"], [2070, "module-torch.ao.quantization.fx.custom_config"], [2070, "module-torch.ao.quantization.fx.fuse"], [2070, "module-torch.ao.quantization.fx.fuse_handler"], [2070, "module-torch.ao.quantization.fx.graph_module"], [2070, "module-torch.ao.quantization.fx.lower_to_fbgemm"], [2070, "module-torch.ao.quantization.fx.lower_to_qnnpack"], [2070, "module-torch.ao.quantization.fx.lstm_utils"], [2070, "module-torch.ao.quantization.fx.match_utils"], [2070, "module-torch.ao.quantization.fx.pattern_utils"], [2070, "module-torch.ao.quantization.fx.prepare"], [2070, "module-torch.ao.quantization.fx.qconfig_mapping_utils"], [2070, "module-torch.ao.quantization.fx.quantize_handler"], [2070, "module-torch.ao.quantization.fx.tracer"], [2070, "module-torch.ao.quantization.fx.utils"], [2070, "module-torch.ao.quantization.observer"], [2070, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"], [2070, "module-torch.ao.quantization.pt2e.export_utils"], [2070, "module-torch.ao.quantization.pt2e.graph_utils"], [2070, "module-torch.ao.quantization.pt2e.port_metadata_pass"], [2070, "module-torch.ao.quantization.pt2e.prepare"], [2070, "module-torch.ao.quantization.pt2e.qat_utils"], [2070, "module-torch.ao.quantization.pt2e.representation.rewrite"], [2070, "module-torch.ao.quantization.pt2e.utils"], [2070, "module-torch.ao.quantization.qconfig"], [2070, "module-torch.ao.quantization.qconfig_mapping"], [2070, "module-torch.ao.quantization.quant_type"], [2070, "module-torch.ao.quantization.quantization_mappings"], [2070, "module-torch.ao.quantization.quantize_fx"], [2070, "module-torch.ao.quantization.quantize_jit"], [2070, "module-torch.ao.quantization.quantize_pt2e"], [2070, "module-torch.ao.quantization.quantizer.composable_quantizer"], [2070, "module-torch.ao.quantization.quantizer.embedding_quantizer"], [2070, "module-torch.ao.quantization.quantizer.quantizer"], [2070, "module-torch.ao.quantization.quantizer.utils"], [2070, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"], [2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"], [2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"], [2070, "module-torch.ao.quantization.stubs"], [2070, "module-torch.ao.quantization.utils"], [2070, "module-torch.nn.intrinsic.modules.fused"], [2070, "module-torch.nn.intrinsic.qat.modules.conv_fused"], [2070, "module-torch.nn.intrinsic.qat.modules.linear_fused"], [2070, "module-torch.nn.intrinsic.qat.modules.linear_relu"], [2070, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2070, "module-torch.nn.intrinsic.quantized.modules.bn_relu"], [2070, "module-torch.nn.intrinsic.quantized.modules.conv_relu"], [2070, "module-torch.nn.intrinsic.quantized.modules.linear_relu"], [2070, "module-torch.nn.qat.dynamic.modules.linear"], [2070, "module-torch.nn.qat.modules.conv"], [2070, "module-torch.nn.qat.modules.embedding_ops"], [2070, "module-torch.nn.qat.modules.linear"], [2070, "module-torch.nn.quantizable.modules.activation"], [2070, "module-torch.nn.quantizable.modules.rnn"], [2070, "module-torch.nn.quantized.dynamic.modules.conv"], [2070, "module-torch.nn.quantized.dynamic.modules.linear"], [2070, "module-torch.nn.quantized.dynamic.modules.rnn"], [2070, "module-torch.nn.quantized.functional"], [2070, "module-torch.nn.quantized.modules.activation"], [2070, "module-torch.nn.quantized.modules.batchnorm"], [2070, "module-torch.nn.quantized.modules.conv"], [2070, "module-torch.nn.quantized.modules.dropout"], [2070, "module-torch.nn.quantized.modules.embedding_ops"], [2070, "module-torch.nn.quantized.modules.functional_modules"], [2070, "module-torch.nn.quantized.modules.linear"], [2070, "module-torch.nn.quantized.modules.normalization"], [2070, "module-torch.nn.quantized.modules.rnn"], [2070, "module-torch.nn.quantized.modules.utils"], [2070, "module-torch.quantization.fake_quantize"], [2070, "module-torch.quantization.fuse_modules"], [2070, "module-torch.quantization.fuser_method_mappings"], [2070, "module-torch.quantization.fx.convert"], [2070, "module-torch.quantization.fx.fuse"], [2070, "module-torch.quantization.fx.fusion_patterns"], [2070, "module-torch.quantization.fx.graph_module"], [2070, "module-torch.quantization.fx.match_utils"], [2070, "module-torch.quantization.fx.pattern_utils"], [2070, "module-torch.quantization.fx.prepare"], [2070, "module-torch.quantization.fx.quantization_patterns"], [2070, "module-torch.quantization.fx.quantization_types"], [2070, "module-torch.quantization.fx.utils"], [2070, "module-torch.quantization.observer"], [2070, "module-torch.quantization.qconfig"], [2070, "module-torch.quantization.quant_type"], [2070, "module-torch.quantization.quantization_mappings"], [2070, "module-torch.quantization.quantize"], [2070, "module-torch.quantization.quantize_fx"], [2070, "module-torch.quantization.quantize_jit"], [2070, "module-torch.quantization.stubs"], [2070, "module-torch.quantization.utils"], [2073, "module-torch.ao.nn.intrinsic"], [2073, "module-torch.ao.nn.intrinsic.modules"], [2073, "module-torch.ao.nn.intrinsic.qat"], [2073, "module-torch.ao.nn.intrinsic.qat.modules"], [2073, "module-torch.ao.nn.intrinsic.quantized"], [2073, "module-torch.ao.nn.intrinsic.quantized.dynamic"], [2073, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"], [2073, "module-torch.ao.nn.intrinsic.quantized.modules"], [2073, "module-torch.ao.nn.qat"], [2073, "module-torch.ao.nn.qat.dynamic"], [2073, "module-torch.ao.nn.qat.dynamic.modules"], [2073, "module-torch.ao.nn.qat.modules"], [2073, "module-torch.ao.nn.quantized.dynamic"], [2073, "module-torch.ao.nn.quantized.dynamic.modules"], [2073, "module-torch.ao.nn.quantized.functional"], [2073, "module-torch.ao.nn.quantized.modules"], [2073, "module-torch.ao.quantization.pt2e"], [2073, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"], [2073, "module-torch.ao.quantization.pt2e.representation"], [2073, "module-torch.ao.quantization.quantizer"], [2073, "module-torch.nn.intrinsic"], [2073, "module-torch.nn.intrinsic.modules"], [2073, "module-torch.nn.intrinsic.qat"], [2073, "module-torch.nn.intrinsic.qat.modules"], [2073, "module-torch.nn.intrinsic.quantized"], [2073, "module-torch.nn.intrinsic.quantized.dynamic"], [2073, "module-torch.nn.intrinsic.quantized.dynamic.modules"], [2073, "module-torch.nn.intrinsic.quantized.modules"], [2073, "module-torch.nn.qat"], [2073, "module-torch.nn.qat.dynamic"], [2073, "module-torch.nn.qat.dynamic.modules"], [2073, "module-torch.nn.qat.modules"], [2073, "module-torch.nn.quantizable"], [2073, "module-torch.nn.quantizable.modules"], [2073, "module-torch.nn.quantized"], [2073, "module-torch.nn.quantized.dynamic"], [2073, "module-torch.nn.quantized.dynamic.modules"], [2073, "module-torch.nn.quantized.modules"], [2073, "module-torch.quantization"], [2073, "module-torch.quantization.fx"], [2074, "module-torch.random"], [2075, "module-torch.distributed.autograd"], [2075, "module-torch.distributed.rpc"], [2078, "module-torch.signal"], [2078, "module-torch.signal.windows"], [2080, "module-torch.sparse"], [2081, "module-torch.special"], [2085, "module-torch.utils.tensorboard"], [2087, "module-torch.testing"], [2089, "module-torch"], [2089, "module-torch.contrib"], [2089, "module-torch.functional"], [2089, "module-torch.quasirandom"], [2089, "module-torch.return_types"], [2089, "module-torch.serialization"], [2089, "module-torch.signal.windows.windows"], [2089, "module-torch.sparse.semi_structured"], [2089, "module-torch.storage"], [2089, "module-torch.torch_version"], [2089, "module-torch.types"], [2089, "module-torch.utils.backcompat"], [2089, "module-torch.utils.hipify"], [2089, "module-torch.utils.model_dump"], [2089, "module-torch.utils.viz"], [2089, "module-torch.version"], [2090, "module-torch.ao.ns._numeric_suite"], [2091, "module-torch.ao.ns._numeric_suite_fx"], [2094, "module-torch.compiler"], [2112, "module-torch.overrides"], [2117, "module-torch.utils"], [2117, "module-torch.utils.backend_registration"], [2117, "module-torch.utils.benchmark.examples.blas_compare_setup"], [2117, "module-torch.utils.benchmark.examples.compare"], [2117, "module-torch.utils.benchmark.examples.fuzzer"], [2117, "module-torch.utils.benchmark.examples.op_benchmark"], [2117, "module-torch.utils.benchmark.examples.simple_timeit"], [2117, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"], [2117, "module-torch.utils.benchmark.op_fuzzers.binary"], [2117, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"], [2117, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"], [2117, "module-torch.utils.benchmark.op_fuzzers.spectral"], [2117, "module-torch.utils.benchmark.op_fuzzers.unary"], [2117, "module-torch.utils.benchmark.utils.common"], [2117, "module-torch.utils.benchmark.utils.compare"], [2117, "module-torch.utils.benchmark.utils.compile"], [2117, "module-torch.utils.benchmark.utils.cpp_jit"], [2117, "module-torch.utils.benchmark.utils.fuzzer"], [2117, "module-torch.utils.benchmark.utils.sparse_fuzzer"], [2117, "module-torch.utils.benchmark.utils.timer"], [2117, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"], [2117, "module-torch.utils.bundled_inputs"], [2117, "module-torch.utils.checkpoint"], [2117, "module-torch.utils.collect_env"], [2117, "module-torch.utils.cpp_backtrace"], [2117, "module-torch.utils.cpp_extension"], [2117, "module-torch.utils.data.backward_compatibility"], [2117, "module-torch.utils.data.dataloader"], [2117, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"], [2117, "module-torch.utils.data.datapipes.dataframe.dataframes"], [2117, "module-torch.utils.data.datapipes.dataframe.datapipes"], [2117, "module-torch.utils.data.datapipes.dataframe.structures"], [2117, "module-torch.utils.data.datapipes.datapipe"], [2117, "module-torch.utils.data.datapipes.gen_pyi"], [2117, "module-torch.utils.data.datapipes.iter.callable"], [2117, "module-torch.utils.data.datapipes.iter.combinatorics"], [2117, "module-torch.utils.data.datapipes.iter.combining"], [2117, "module-torch.utils.data.datapipes.iter.filelister"], [2117, "module-torch.utils.data.datapipes.iter.fileopener"], [2117, "module-torch.utils.data.datapipes.iter.grouping"], [2117, "module-torch.utils.data.datapipes.iter.routeddecoder"], [2117, "module-torch.utils.data.datapipes.iter.selecting"], [2117, "module-torch.utils.data.datapipes.iter.sharding"], [2117, "module-torch.utils.data.datapipes.iter.streamreader"], [2117, "module-torch.utils.data.datapipes.iter.utils"], [2117, "module-torch.utils.data.datapipes.map.callable"], [2117, "module-torch.utils.data.datapipes.map.combinatorics"], [2117, "module-torch.utils.data.datapipes.map.combining"], [2117, "module-torch.utils.data.datapipes.map.grouping"], [2117, "module-torch.utils.data.datapipes.map.utils"], [2117, "module-torch.utils.data.datapipes.utils.common"], [2117, "module-torch.utils.data.datapipes.utils.decoder"], [2117, "module-torch.utils.data.datapipes.utils.snapshot"], [2117, "module-torch.utils.data.dataset"], [2117, "module-torch.utils.data.distributed"], [2117, "module-torch.utils.data.graph"], [2117, "module-torch.utils.data.graph_settings"], [2117, "module-torch.utils.data.sampler"], [2117, "module-torch.utils.dlpack"], [2117, "module-torch.utils.file_baton"], [2117, "module-torch.utils.flop_counter"], [2117, "module-torch.utils.hipify.constants"], [2117, "module-torch.utils.hipify.cuda_to_hip_mappings"], [2117, "module-torch.utils.hipify.hipify_python"], [2117, "module-torch.utils.hipify.version"], [2117, "module-torch.utils.hooks"], [2117, "module-torch.utils.jit.log_extract"], [2117, "module-torch.utils.mkldnn"], [2117, "module-torch.utils.mobile_optimizer"], [2117, "module-torch.utils.show_pickle"], [2117, "module-torch.utils.tensorboard.summary"], [2117, "module-torch.utils.tensorboard.writer"], [2117, "module-torch.utils.throughput_benchmark"], [2117, "module-torch.utils.weak"], [2118, "module-torch.xpu"], [2118, "module-torch.xpu.random"], [2118, "module-torch.xpu.streams"]], "torch.amp": [[0, "module-torch.amp"]], "torch.amp.autocast_mode": [[0, "module-torch.amp.autocast_mode"]], "torch.amp.grad_scaler": [[0, "module-torch.amp.grad_scaler"]], "torch.cpu.amp": [[0, "module-torch.cpu.amp"]], "torch.cpu.amp.autocast_mode": [[0, "module-torch.cpu.amp.autocast_mode"]], "torch.cpu.amp.grad_scaler": [[0, "module-torch.cpu.amp.grad_scaler"]], "torch.cuda.amp": [[0, "module-torch.cuda.amp"]], "torch.cuda.amp.autocast_mode": [[0, "module-torch.cuda.amp.autocast_mode"]], "torch.cuda.amp.common": [[0, "module-torch.cuda.amp.common"]], "torch.cuda.amp.grad_scaler": [[0, "module-torch.cuda.amp.grad_scaler"]], "function (class in torch.autograd)": [[1, "torch.autograd.Function"]], "gradientedge (class in torch.autograd.graph)": [[1, "torch.autograd.graph.GradientEdge"]], "allow_mutation_on_saved_tensors (class in torch.autograd.graph)": [[1, "torch.autograd.graph.allow_mutation_on_saved_tensors"]], "detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.detect_anomaly"]], "disable_saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.disable_saved_tensors_hooks"]], "emit_itt (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_itt"]], "emit_nvtx (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_nvtx"]], "get_gradient_edge() (in module torch.autograd.graph)": [[1, "torch.autograd.graph.get_gradient_edge"]], "profile (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.profile"]], "register_multi_grad_hook (class in torch.autograd.graph)": [[1, "torch.autograd.graph.register_multi_grad_hook"]], "save_on_cpu (class in torch.autograd.graph)": [[1, "torch.autograd.graph.save_on_cpu"]], "saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.saved_tensors_hooks"]], "set_detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.set_detect_anomaly"]], "torch.autograd": [[1, "module-torch.autograd"]], "torch.autograd.anomaly_mode": [[1, "module-torch.autograd.anomaly_mode"]], "torch.autograd.forward_ad": [[1, "module-torch.autograd.forward_ad"]], "torch.autograd.function": [[1, "module-torch.autograd.function"]], "torch.autograd.functional": [[1, "module-torch.autograd.functional"]], "torch.autograd.grad_mode": [[1, "module-torch.autograd.grad_mode"]], "torch.autograd.gradcheck": [[1, "module-torch.autograd.gradcheck"]], "torch.autograd.graph": [[1, "module-torch.autograd.graph"]], "torch.autograd.profiler": [[1, "module-torch.autograd.profiler"]], "torch.autograd.profiler_legacy": [[1, "module-torch.autograd.profiler_legacy"]], "torch.autograd.profiler_util": [[1, "module-torch.autograd.profiler_util"]], "torch.autograd.variable": [[1, "module-torch.autograd.variable"]], "sdpaparams (class in torch.backends.cuda)": [[2, "torch.backends.cuda.SDPAParams"]], "allow_bf16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_bf16_reduced_precision_reduction"]], "allow_fp16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction"]], "allow_tf32 (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_tf32"]], "allow_tf32 (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.allow_tf32"]], "benchmark (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark"]], "benchmark_limit (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark_limit"]], "can_use_cudnn_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_cudnn_attention"]], "can_use_efficient_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_efficient_attention"]], "can_use_flash_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_flash_attention"]], "clear() (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.clear"]], "cudnn_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cudnn_sdp_enabled"]], "cufft_plan_cache (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cufft_plan_cache"]], "deterministic (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.deterministic"]], "enable_cudnn_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_cudnn_sdp"]], "enable_flash_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_flash_sdp"]], "enable_math_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_math_sdp"]], "enable_mem_efficient_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_mem_efficient_sdp"]], "enabled (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.enabled"]], "enabled (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.enabled"]], "flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.flags"]], "flash_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.flash_sdp_enabled"]], "get_cpu_capability() (in module torch.backends.cpu)": [[2, "torch.backends.cpu.get_cpu_capability"]], "get_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.get_fastpath_enabled"]], "get_opt_einsum() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.get_opt_einsum"]], "is_available() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.is_available"]], "is_available() (in module torch.backends.mkl)": [[2, "torch.backends.mkl.is_available"]], "is_available() (in module torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.is_available"]], "is_available() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_available"]], "is_available() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.is_available"]], "is_available() (in module torch.backends.openmp)": [[2, "torch.backends.openmp.is_available"]], "is_available() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.is_available"]], "is_built() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.is_built"]], "is_built() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_built"]], "math_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.math_sdp_enabled"]], "max_size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.max_size"]], "mem_efficient_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.mem_efficient_sdp_enabled"]], "preferred_blas_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_blas_library"]], "preferred_linalg_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_linalg_library"]], "sdp_kernel() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.sdp_kernel"]], "set_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.set_fastpath_enabled"]], "set_flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.set_flags"]], "size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.size"]], "strategy (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.strategy"]], "torch.backends": [[2, "module-torch.backends"]], "torch.backends.cpu": [[2, "module-torch.backends.cpu"]], "torch.backends.cuda": [[2, "module-torch.backends.cuda"]], "torch.backends.cudnn": [[2, "module-torch.backends.cudnn"]], "torch.backends.cudnn.rnn": [[2, "module-torch.backends.cudnn.rnn"]], "torch.backends.mha": [[2, "module-torch.backends.mha"]], "torch.backends.mkl": [[2, "module-torch.backends.mkl"]], "torch.backends.mkldnn": [[2, "module-torch.backends.mkldnn"]], "torch.backends.mps": [[2, "module-torch.backends.mps"]], "torch.backends.nnpack": [[2, "module-torch.backends.nnpack"]], "torch.backends.openmp": [[2, "module-torch.backends.openmp"]], "torch.backends.opt_einsum": [[2, "module-torch.backends.opt_einsum"]], "torch.backends.quantized": [[2, "module-torch.backends.quantized"]], "torch.backends.xeon": [[2, "module-torch.backends.xeon"]], "torch.backends.xeon.run_cpu": [[2, "module-torch.backends.xeon.run_cpu"]], "torch.backends.xnnpack": [[2, "module-torch.backends.xnnpack"]], "verbose (class in torch.backends.mkl)": [[2, "torch.backends.mkl.verbose"]], "verbose (class in torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.verbose"]], "version() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.version"]], "callgrindstats (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.CallgrindStats"]], "compare (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Compare"]], "functioncounts (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.FunctionCounts"]], "measurement (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Measurement"]], "timer (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Timer"]], "adaptive_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.adaptive_autorange"]], "as_standardized() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.as_standardized"]], "blocked_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.blocked_autorange"]], "collect_callgrind() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.collect_callgrind"]], "colorize() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.colorize"]], "counts() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.counts"]], "delta() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.delta"]], "denoise() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.denoise"]], "extend_results() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.extend_results"]], "filter() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.filter"]], "highlight_warnings() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.highlight_warnings"]], "merge() (torch.utils.benchmark.measurement static method)": [[3, "torch.utils.benchmark.Measurement.merge"]], "print() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.print"]], "significant_figures (torch.utils.benchmark.measurement property)": [[3, "torch.utils.benchmark.Measurement.significant_figures"]], "stats() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.stats"]], "timeit() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.timeit"]], "torch.utils.benchmark": [[3, "module-torch.utils.benchmark"]], "torch.utils.benchmark.examples": [[3, "module-torch.utils.benchmark.examples"]], "torch.utils.benchmark.op_fuzzers": [[3, "module-torch.utils.benchmark.op_fuzzers"]], "torch.utils.benchmark.utils": [[3, "module-torch.utils.benchmark.utils"]], "torch.utils.benchmark.utils.valgrind_wrapper": [[3, "module-torch.utils.benchmark.utils.valgrind_wrapper"]], "transform() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.transform"]], "trim_significant_figures() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.trim_significant_figures"]], "torch.utils.bottleneck": [[4, "module-torch.utils.bottleneck"]], "checkpoint() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint"]], "checkpoint_sequential() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint_sequential"]], "set_checkpoint_debug_enabled() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.set_checkpoint_debug_enabled"]], "cond() (in module torch._higher_order_ops.cond)": [[12, "torch._higher_order_ops.cond.cond"]], "parallel_info() (in module torch.__config__)": [[13, "torch.__config__.parallel_info"]], "show() (in module torch.__config__)": [[13, "torch.__config__.show"]], "torch.__config__": [[13, "module-torch.__config__"]], "buildextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.BuildExtension"]], "cudaextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CUDAExtension"]], "cppextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CppExtension"]], "get_compiler_abi_compatibility_and_version() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.get_compiler_abi_compatibility_and_version"]], "include_paths() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.include_paths"]], "is_ninja_available() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.is_ninja_available"]], "load() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load"]], "load_inline() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load_inline"]], "verify_ninja_availability() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.verify_ninja_availability"]], "torch.cpu": [[16, "module-torch.cpu"]], "torch.cuda": [[17, "module-torch.cuda"]], "torch.cuda.comm": [[17, "module-torch.cuda.comm"]], "torch.cuda.error": [[17, "module-torch.cuda.error"]], "torch.cuda.graphs": [[17, "module-torch.cuda.graphs"]], "torch.cuda.jiterator": [[17, "module-torch.cuda.jiterator"]], "torch.cuda.memory": [[17, "module-torch.cuda.memory"]], "torch.cuda.nccl": [[17, "module-torch.cuda.nccl"]], "torch.cuda.nvtx": [[17, "module-torch.cuda.nvtx"]], "torch.cuda.profiler": [[17, "module-torch.cuda.profiler"]], "torch.cuda.random": [[17, "module-torch.cuda.random"]], "torch.cuda.sparse": [[17, "module-torch.cuda.sparse"]], "torch.cuda.streams": [[17, "module-torch.cuda.streams"]], "enable_cuda_sanitizer() (in module torch.cuda._sanitizer)": [[18, "torch.cuda._sanitizer.enable_cuda_sanitizer"]], "torch.cuda._sanitizer": [[18, "module-torch.cuda._sanitizer"]], "enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.enable"]], "get_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_filename"]], "get_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_duration"]], "get_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_iterations"]], "get_results() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_results"]], "get_validators() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_validators"]], "is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.is_enabled"]], "read_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.read_file"]], "set_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_filename"]], "set_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_duration"]], "set_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_iterations"]], "torch.cuda.tunable": [[19, "module-torch.cuda.tunable"]], "tuning_enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_enable"]], "tuning_is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_is_enabled"]], "write_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file"]], "write_file_on_exit() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file_on_exit"]], "batchsampler (class in torch.utils.data)": [[23, "torch.utils.data.BatchSampler"]], "chaindataset (class in torch.utils.data)": [[23, "torch.utils.data.ChainDataset"]], "concatdataset (class in torch.utils.data)": [[23, "torch.utils.data.ConcatDataset"]], "dataloader (class in torch.utils.data)": [[23, "torch.utils.data.DataLoader"]], "dataset (class in torch.utils.data)": [[23, "torch.utils.data.Dataset"]], "distributedsampler (class in torch.utils.data.distributed)": [[23, "torch.utils.data.distributed.DistributedSampler"]], "iterabledataset (class in torch.utils.data)": [[23, "torch.utils.data.IterableDataset"]], "randomsampler (class in torch.utils.data)": [[23, "torch.utils.data.RandomSampler"]], "sampler (class in torch.utils.data)": [[23, "torch.utils.data.Sampler"]], "sequentialsampler (class in torch.utils.data)": [[23, "torch.utils.data.SequentialSampler"]], "stackdataset (class in torch.utils.data)": [[23, "torch.utils.data.StackDataset"]], "subset (class in torch.utils.data)": [[23, "torch.utils.data.Subset"]], "subsetrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.SubsetRandomSampler"]], "tensordataset (class in torch.utils.data)": [[23, "torch.utils.data.TensorDataset"]], "weightedrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.WeightedRandomSampler"]], "collate() (in module torch.utils.data._utils.collate)": [[23, "torch.utils.data._utils.collate.collate"]], "default_collate() (in module torch.utils.data)": [[23, "torch.utils.data.default_collate"]], "default_convert() (in module torch.utils.data)": [[23, "torch.utils.data.default_convert"]], "get_worker_info() (in module torch.utils.data)": [[23, "torch.utils.data.get_worker_info"]], "random_split() (in module torch.utils.data)": [[23, "torch.utils.data.random_split"]], "torch.utils.data": [[23, "module-torch.utils.data"]], "torch.utils.data.datapipes": [[23, "module-torch.utils.data.datapipes"]], "torch.utils.data.datapipes.dataframe": [[23, "module-torch.utils.data.datapipes.dataframe"]], "torch.utils.data.datapipes.iter": [[23, "module-torch.utils.data.datapipes.iter"]], "torch.utils.data.datapipes.map": [[23, "module-torch.utils.data.datapipes.map"]], "torch.utils.data.datapipes.utils": [[23, "module-torch.utils.data.datapipes.utils"]], "gradbucket (class in torch.distributed)": [[24, "torch.distributed.GradBucket"]], "powersgdstate (class in torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState"]], "__getstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__getstate__"]], "__setstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__setstate__"]], "allreduce_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.allreduce_hook"]], "batched_powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.batched_powerSGD_hook"]], "bf16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_hook"]], "bf16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_wrapper"]], "buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.buffer"]], "fp16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_hook"]], "fp16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_wrapper"]], "gradients() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.gradients"]], "index() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.index"]], "is_last() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.is_last"]], "noop_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks.noop_hook"]], "parameters() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.parameters"]], "powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.powerSGD_hook"]], "set_buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.set_buffer"]], "fill_uninitialized_memory (in module torch.utils.deterministic)": [[27, "torch.utils.deterministic.fill_uninitialized_memory"]], "torch.utils.deterministic": [[27, "module-torch.utils.deterministic"]], "backend (class in torch.distributed)": [[28, "torch.distributed.Backend"]], "devicemesh (class in torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.DeviceMesh"]], "distbackenderror (class in torch.distributed)": [[28, "torch.distributed.DistBackendError"]], "disterror (class in torch.distributed)": [[28, "torch.distributed.DistError"]], "distnetworkerror (class in torch.distributed)": [[28, "torch.distributed.DistNetworkError"]], "diststoreerror (class in torch.distributed)": [[28, "torch.distributed.DistStoreError"]], "filestore (class in torch.distributed)": [[28, "torch.distributed.FileStore"]], "hashstore (class in torch.distributed)": [[28, "torch.distributed.HashStore"]], "p2pop (class in torch.distributed)": [[28, "torch.distributed.P2POp"]], "prefixstore (class in torch.distributed)": [[28, "torch.distributed.PrefixStore"]], "reduceop (class in torch.distributed)": [[28, "torch.distributed.ReduceOp"]], "store (class in torch.distributed)": [[28, "torch.distributed.Store"]], "tcpstore (class in torch.distributed)": [[28, "torch.distributed.TCPStore"]], "work (class in torch.distributed)": [[28, "torch.distributed.Work"]], "add() (in module torch.distributed.store)": [[28, "torch.distributed.Store.add"]], "all_gather() (in module torch.distributed)": [[28, "torch.distributed.all_gather"]], "all_gather_into_tensor() (in module torch.distributed)": [[28, "torch.distributed.all_gather_into_tensor"]], "all_gather_object() (in module torch.distributed)": [[28, "torch.distributed.all_gather_object"]], "all_reduce() (in module torch.distributed)": [[28, "torch.distributed.all_reduce"]], "all_to_all() (in module torch.distributed)": [[28, "torch.distributed.all_to_all"]], "all_to_all_single() (in module torch.distributed)": [[28, "torch.distributed.all_to_all_single"]], "barrier() (in module torch.distributed)": [[28, "torch.distributed.barrier"]], "batch_isend_irecv() (in module torch.distributed)": [[28, "torch.distributed.batch_isend_irecv"]], "breakpoint() (in module torch.distributed)": [[28, "torch.distributed.breakpoint"]], "broadcast() (in module torch.distributed)": [[28, "torch.distributed.broadcast"]], "broadcast_object_list() (in module torch.distributed)": [[28, "torch.distributed.broadcast_object_list"]], "compare_set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.compare_set"]], "delete_key() (in module torch.distributed.store)": [[28, "torch.distributed.Store.delete_key"]], "gather() (in module torch.distributed)": [[28, "torch.distributed.gather"]], "gather_object() (in module torch.distributed)": [[28, "torch.distributed.gather_object"]], "get() (in module torch.distributed.store)": [[28, "torch.distributed.Store.get"]], "get_backend() (in module torch.distributed)": [[28, "torch.distributed.get_backend"]], "get_global_rank() (in module torch.distributed)": [[28, "torch.distributed.get_global_rank"]], "get_group_rank() (in module torch.distributed)": [[28, "torch.distributed.get_group_rank"]], "get_process_group_ranks() (in module torch.distributed)": [[28, "torch.distributed.get_process_group_ranks"]], "get_rank() (in module torch.distributed)": [[28, "torch.distributed.get_rank"]], "get_world_size() (in module torch.distributed)": [[28, "torch.distributed.get_world_size"]], "init_device_mesh() (in module torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.init_device_mesh"]], "init_process_group() (in module torch.distributed)": [[28, "torch.distributed.init_process_group"]], "irecv() (in module torch.distributed)": [[28, "torch.distributed.irecv"]], "is_available() (in module torch.distributed)": [[28, "torch.distributed.is_available"]], "is_gloo_available() (in module torch.distributed)": [[28, "torch.distributed.is_gloo_available"]], "is_initialized() (in module torch.distributed)": [[28, "torch.distributed.is_initialized"]], "is_mpi_available() (in module torch.distributed)": [[28, "torch.distributed.is_mpi_available"]], "is_nccl_available() (in module torch.distributed)": [[28, "torch.distributed.is_nccl_available"]], "is_torchelastic_launched() (in module torch.distributed)": [[28, "torch.distributed.is_torchelastic_launched"]], "isend() (in module torch.distributed)": [[28, "torch.distributed.isend"]], "monitored_barrier() (in module torch.distributed)": [[28, "torch.distributed.monitored_barrier"]], "new_group() (in module torch.distributed)": [[28, "torch.distributed.new_group"]], "num_keys() (in module torch.distributed.store)": [[28, "torch.distributed.Store.num_keys"]], "recv() (in module torch.distributed)": [[28, "torch.distributed.recv"]], "recv_object_list() (in module torch.distributed)": [[28, "torch.distributed.recv_object_list"]], "reduce() (in module torch.distributed)": [[28, "torch.distributed.reduce"]], "reduce_op (class in torch.distributed)": [[28, "torch.distributed.reduce_op"]], "reduce_scatter() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter"]], "reduce_scatter_tensor() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter_tensor"]], "register_backend() (torch.distributed.backend class method)": [[28, "torch.distributed.Backend.register_backend"]], "scatter() (in module torch.distributed)": [[28, "torch.distributed.scatter"]], "scatter_object_list() (in module torch.distributed)": [[28, "torch.distributed.scatter_object_list"]], "send() (in module torch.distributed)": [[28, "torch.distributed.send"]], "send_object_list() (in module torch.distributed)": [[28, "torch.distributed.send_object_list"]], "set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set"]], "set_timeout() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set_timeout"]], "torch.distributed": [[28, "module-torch.distributed"]], "torch.distributed.algorithms": [[28, "module-torch.distributed.algorithms"]], "torch.distributed.algorithms.ddp_comm_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.default_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.post_localsgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"]], "torch.distributed.algorithms.join": [[28, "module-torch.distributed.algorithms.join"]], "torch.distributed.algorithms.model_averaging": [[28, "module-torch.distributed.algorithms.model_averaging"]], "torch.distributed.algorithms.model_averaging.averagers": [[28, "module-torch.distributed.algorithms.model_averaging.averagers"]], "torch.distributed.algorithms.model_averaging.hierarchical_model_averager": [[28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"]], "torch.distributed.algorithms.model_averaging.utils": [[28, "module-torch.distributed.algorithms.model_averaging.utils"]], "torch.distributed.argparse_util": [[28, "module-torch.distributed.argparse_util"]], "torch.distributed.c10d_logger": [[28, "module-torch.distributed.c10d_logger"]], "torch.distributed.checkpoint.api": [[28, "module-torch.distributed.checkpoint.api"]], "torch.distributed.checkpoint.default_planner": [[28, "module-torch.distributed.checkpoint.default_planner"]], "torch.distributed.checkpoint.filesystem": [[28, "module-torch.distributed.checkpoint.filesystem"]], "torch.distributed.checkpoint.metadata": [[28, "module-torch.distributed.checkpoint.metadata"]], "torch.distributed.checkpoint.optimizer": [[28, "module-torch.distributed.checkpoint.optimizer"]], "torch.distributed.checkpoint.planner": [[28, "module-torch.distributed.checkpoint.planner"]], "torch.distributed.checkpoint.planner_helpers": [[28, "module-torch.distributed.checkpoint.planner_helpers"]], "torch.distributed.checkpoint.resharding": [[28, "module-torch.distributed.checkpoint.resharding"]], "torch.distributed.checkpoint.state_dict": [[28, "module-torch.distributed.checkpoint.state_dict"]], "torch.distributed.checkpoint.state_dict_loader": [[28, "module-torch.distributed.checkpoint.state_dict_loader"]], "torch.distributed.checkpoint.state_dict_saver": [[28, "module-torch.distributed.checkpoint.state_dict_saver"]], "torch.distributed.checkpoint.stateful": [[28, "module-torch.distributed.checkpoint.stateful"]], "torch.distributed.checkpoint.storage": [[28, "module-torch.distributed.checkpoint.storage"]], "torch.distributed.checkpoint.utils": [[28, "module-torch.distributed.checkpoint.utils"]], "torch.distributed.collective_utils": [[28, "module-torch.distributed.collective_utils"]], "torch.distributed.constants": [[28, "module-torch.distributed.constants"]], "torch.distributed.device_mesh": [[28, "module-torch.distributed.device_mesh"]], "torch.distributed.distributed_c10d": [[28, "module-torch.distributed.distributed_c10d"]], "torch.distributed.elastic": [[28, "module-torch.distributed.elastic"]], "torch.distributed.elastic.agent.server.api": [[28, "module-torch.distributed.elastic.agent.server.api"]], "torch.distributed.elastic.agent.server.local_elastic_agent": [[28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"]], "torch.distributed.elastic.events.api": [[28, "module-torch.distributed.elastic.events.api"]], "torch.distributed.elastic.events.handlers": [[28, "module-torch.distributed.elastic.events.handlers"]], "torch.distributed.elastic.metrics.api": [[28, "module-torch.distributed.elastic.metrics.api"]], "torch.distributed.elastic.multiprocessing.api": [[28, "module-torch.distributed.elastic.multiprocessing.api"]], "torch.distributed.elastic.multiprocessing.errors.error_handler": [[28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"]], "torch.distributed.elastic.multiprocessing.errors.handlers": [[28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"]], "torch.distributed.elastic.multiprocessing.redirects": [[28, "module-torch.distributed.elastic.multiprocessing.redirects"]], "torch.distributed.elastic.multiprocessing.tail_log": [[28, "module-torch.distributed.elastic.multiprocessing.tail_log"]], "torch.distributed.elastic.rendezvous.api": [[28, "module-torch.distributed.elastic.rendezvous.api"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.etcd_server": [[28, "module-torch.distributed.elastic.rendezvous.etcd_server"]], "torch.distributed.elastic.rendezvous.etcd_store": [[28, "module-torch.distributed.elastic.rendezvous.etcd_store"]], "torch.distributed.elastic.rendezvous.static_tcp_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"]], "torch.distributed.elastic.rendezvous.utils": [[28, "module-torch.distributed.elastic.rendezvous.utils"]], "torch.distributed.elastic.timer.api": [[28, "module-torch.distributed.elastic.timer.api"]], "torch.distributed.elastic.timer.file_based_local_timer": [[28, "module-torch.distributed.elastic.timer.file_based_local_timer"]], "torch.distributed.elastic.timer.local_timer": [[28, "module-torch.distributed.elastic.timer.local_timer"]], "torch.distributed.elastic.utils": [[28, "module-torch.distributed.elastic.utils"]], "torch.distributed.elastic.utils.api": [[28, "module-torch.distributed.elastic.utils.api"]], "torch.distributed.elastic.utils.data": [[28, "module-torch.distributed.elastic.utils.data"]], "torch.distributed.elastic.utils.data.cycling_iterator": [[28, "module-torch.distributed.elastic.utils.data.cycling_iterator"]], "torch.distributed.elastic.utils.data.elastic_distributed_sampler": [[28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"]], "torch.distributed.elastic.utils.distributed": [[28, "module-torch.distributed.elastic.utils.distributed"]], "torch.distributed.elastic.utils.log_level": [[28, "module-torch.distributed.elastic.utils.log_level"]], "torch.distributed.elastic.utils.logging": [[28, "module-torch.distributed.elastic.utils.logging"]], "torch.distributed.elastic.utils.store": [[28, "module-torch.distributed.elastic.utils.store"]], "torch.distributed.fsdp.api": [[28, "module-torch.distributed.fsdp.api"]], "torch.distributed.fsdp.fully_sharded_data_parallel": [[28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"]], "torch.distributed.fsdp.sharded_grad_scaler": [[28, "module-torch.distributed.fsdp.sharded_grad_scaler"]], "torch.distributed.fsdp.wrap": [[28, "module-torch.distributed.fsdp.wrap"]], "torch.distributed.launch": [[28, "module-torch.distributed.launch"]], "torch.distributed.launcher": [[28, "module-torch.distributed.launcher"]], "torch.distributed.launcher.api": [[28, "module-torch.distributed.launcher.api"]], "torch.distributed.logging_handlers": [[28, "module-torch.distributed.logging_handlers"]], "torch.distributed.nn": [[28, "module-torch.distributed.nn"]], "torch.distributed.nn.api": [[28, "module-torch.distributed.nn.api"]], "torch.distributed.nn.api.remote_module": [[28, "module-torch.distributed.nn.api.remote_module"]], "torch.distributed.nn.functional": [[28, "module-torch.distributed.nn.functional"]], "torch.distributed.nn.jit": [[28, "module-torch.distributed.nn.jit"]], "torch.distributed.nn.jit.instantiator": [[28, "module-torch.distributed.nn.jit.instantiator"]], "torch.distributed.nn.jit.templates": [[28, "module-torch.distributed.nn.jit.templates"]], "torch.distributed.nn.jit.templates.remote_module_template": [[28, "module-torch.distributed.nn.jit.templates.remote_module_template"]], "torch.distributed.optim.apply_optimizer_in_backward": [[28, "module-torch.distributed.optim.apply_optimizer_in_backward"]], "torch.distributed.optim.functional_adadelta": [[28, "module-torch.distributed.optim.functional_adadelta"]], "torch.distributed.optim.functional_adagrad": [[28, "module-torch.distributed.optim.functional_adagrad"]], "torch.distributed.optim.functional_adam": [[28, "module-torch.distributed.optim.functional_adam"]], "torch.distributed.optim.functional_adamax": [[28, "module-torch.distributed.optim.functional_adamax"]], "torch.distributed.optim.functional_adamw": [[28, "module-torch.distributed.optim.functional_adamw"]], "torch.distributed.optim.functional_rmsprop": [[28, "module-torch.distributed.optim.functional_rmsprop"]], "torch.distributed.optim.functional_rprop": [[28, "module-torch.distributed.optim.functional_rprop"]], "torch.distributed.optim.functional_sgd": [[28, "module-torch.distributed.optim.functional_sgd"]], "torch.distributed.optim.named_optimizer": [[28, "module-torch.distributed.optim.named_optimizer"]], "torch.distributed.optim.optimizer": [[28, "module-torch.distributed.optim.optimizer"]], "torch.distributed.optim.post_localsgd_optimizer": [[28, "module-torch.distributed.optim.post_localSGD_optimizer"]], "torch.distributed.optim.utils": [[28, "module-torch.distributed.optim.utils"]], "torch.distributed.optim.zero_redundancy_optimizer": [[28, "module-torch.distributed.optim.zero_redundancy_optimizer"]], "torch.distributed.remote_device": [[28, "module-torch.distributed.remote_device"]], "torch.distributed.rendezvous": [[28, "module-torch.distributed.rendezvous"]], "torch.distributed.rpc.api": [[28, "module-torch.distributed.rpc.api"]], "torch.distributed.rpc.backend_registry": [[28, "module-torch.distributed.rpc.backend_registry"]], "torch.distributed.rpc.constants": [[28, "module-torch.distributed.rpc.constants"]], "torch.distributed.rpc.functions": [[28, "module-torch.distributed.rpc.functions"]], "torch.distributed.rpc.internal": [[28, "module-torch.distributed.rpc.internal"]], "torch.distributed.rpc.options": [[28, "module-torch.distributed.rpc.options"]], "torch.distributed.rpc.rref_proxy": [[28, "module-torch.distributed.rpc.rref_proxy"]], "torch.distributed.rpc.server_process_global_profiler": [[28, "module-torch.distributed.rpc.server_process_global_profiler"]], "torch.distributed.tensor": [[28, "module-torch.distributed.tensor"]], "torch.distributed.tensor.parallel.api": [[28, "module-torch.distributed.tensor.parallel.api"]], "torch.distributed.tensor.parallel.ddp": [[28, "module-torch.distributed.tensor.parallel.ddp"]], "torch.distributed.tensor.parallel.fsdp": [[28, "module-torch.distributed.tensor.parallel.fsdp"]], "torch.distributed.tensor.parallel.input_reshard": [[28, "module-torch.distributed.tensor.parallel.input_reshard"]], "torch.distributed.tensor.parallel.loss": [[28, "module-torch.distributed.tensor.parallel.loss"]], "torch.distributed.tensor.parallel.style": [[28, "module-torch.distributed.tensor.parallel.style"]], "torch.distributed.utils": [[28, "module-torch.distributed.utils"]], "wait() (in module torch.distributed.store)": [[28, "torch.distributed.Store.wait"]], "join (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Join"]], "joinhook (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.JoinHook"]], "joinable (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Joinable"]], "join_device (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_device"]], "join_hook() (torch.distributed.algorithms.joinable method)": [[29, "torch.distributed.algorithms.Joinable.join_hook"]], "join_process_group (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_process_group"]], "main_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.main_hook"]], "notify_join_context() (torch.distributed.algorithms.join static method)": [[29, "torch.distributed.algorithms.Join.notify_join_context"]], "post_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.post_hook"]], "asyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.AsyncStager"]], "blockingasyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager"]], "broadcastingtorchsavereader (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader"]], "defaultloadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner"]], "defaultsaveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner"]], "dynamicmetaloadplanner (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner"]], "filesystemreader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemReader"]], "filesystemwriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemWriter"]], "loadplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlan"]], "loadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlanner"]], "readitem (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.ReadItem"]], "saveplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlan"]], "saveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlanner"]], "statedictoptions (class in torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.StateDictOptions"]], "stateful (class in torch.distributed.checkpoint.stateful)": [[30, "torch.distributed.checkpoint.stateful.Stateful"]], "storagereader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageReader"]], "storagewriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageWriter"]], "writeitem (class in torch.distributed.checkpoint.planner)": [[30, "torch.distributed.checkpoint.planner.WriteItem"]], "async_save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.async_save"]], "checkpoint_id (torch.distributed.checkpoint.filesystemreader property)": [[30, "torch.distributed.checkpoint.FileSystemReader.checkpoint_id"]], "commit_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.commit_tensor"]], "create_global_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_global_plan"]], "create_global_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_global_plan"]], "create_local_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_local_plan"]], "create_local_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_local_plan"]], "dcp_to_torch_save() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.dcp_to_torch_save"]], "finish() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.finish"]], "finish_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.finish_plan"]], "finish_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.finish_plan"]], "get_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_model_state_dict"]], "get_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_optimizer_state_dict"]], "get_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_state_dict"]], "load() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load"]], "load_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.load_bytes"]], "load_state_dict() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load_state_dict"]], "load_state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.load_state_dict"]], "lookup_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.lookup_object"]], "lookup_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.lookup_tensor"]], "prepare_global_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_global_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_local_plan"]], "read_data() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_data"]], "read_data() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_data"]], "read_metadata() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_metadata"]], "read_metadata() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_metadata"]], "reset() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.reset"]], "reset() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.reset"]], "reset() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.reset"]], "resolve_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_bytes"]], "resolve_data() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.resolve_data"]], "resolve_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_tensor"]], "save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save"]], "save_state_dict() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save_state_dict"]], "set_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_model_state_dict"]], "set_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_optimizer_state_dict"]], "set_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_state_dict"]], "set_up_planner() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.format_utils.dynamicmetaloadplanner method)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner.set_up_planner"]], "set_up_storage_reader() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.set_up_storage_reader"]], "set_up_storage_reader() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.set_up_storage_reader"]], "set_up_storage_writer() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.set_up_storage_writer"]], "should_synchronize_after_execute (torch.distributed.checkpoint.staging.asyncstager property)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.should_synchronize_after_execute"]], "stage() (torch.distributed.checkpoint.filesystemwriter method)": [[30, "torch.distributed.checkpoint.FileSystemWriter.stage"]], "stage() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.stage"]], "stage() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.stage"]], "state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.state_dict"]], "storage_meta() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.storage_meta"]], "synchronize_staging() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.synchronize_staging"]], "synchronize_staging() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.synchronize_staging"]], "tensor_storage_size() (torch.distributed.checkpoint.planner.writeitem method)": [[30, "torch.distributed.checkpoint.planner.WriteItem.tensor_storage_size"]], "torch.distributed.checkpoint": [[30, "module-torch.distributed.checkpoint"]], "torch.distributed.checkpoint.format_utils": [[30, "module-torch.distributed.checkpoint.format_utils"]], "torch.distributed.checkpoint.logger": [[30, "module-torch.distributed.checkpoint.logger"]], "torch.distributed.checkpoint.logging_handlers": [[30, "module-torch.distributed.checkpoint.logging_handlers"]], "torch.distributed.checkpoint.staging": [[30, "module-torch.distributed.checkpoint.staging"]], "torch_save_to_dcp() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.torch_save_to_dcp"]], "transform_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.transform_object"]], "transform_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.transform_tensor"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagereader class method)": [[30, "torch.distributed.checkpoint.StorageReader.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagewriter class method)": [[30, "torch.distributed.checkpoint.StorageWriter.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader class method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.validate_checkpoint_id"]], "write_data() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.write_data"]], "distributedoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.DistributedOptimizer"]], "postlocalsgdoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer"]], "zeroredundancyoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer"]], "add_param_group() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.add_param_group"]], "consolidate_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.consolidate_state_dict"]], "join_device (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_device"]], "join_hook() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_hook"]], "join_process_group (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_process_group"]], "load_state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.load_state_dict"]], "load_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.load_state_dict"]], "state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.state_dict"]], "state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.state_dict"]], "step() (torch.distributed.optim.distributedoptimizer method)": [[32, "torch.distributed.optim.DistributedOptimizer.step"]], "step() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.step"]], "step() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.step"]], "torch.distributed.optim": [[32, "module-torch.distributed.optim"]], "pipe (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.Pipe"]], "pipelineschedulemulti (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti"]], "pipelineschedulesingle (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle"]], "pipelinestage (class in torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.PipelineStage"]], "schedule1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.Schedule1F1B"]], "schedulegpipe (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleGPipe"]], "scheduleinterleaved1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleInterleaved1F1B"]], "scheduleloopedbfs (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleLoopedBFS"]], "splitpoint (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.SplitPoint"]], "tensorchunkspec (class in torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.TensorChunkSpec"]], "build_stage() (in module torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.build_stage"]], "merge_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.merge_chunks"]], "pipe_split() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipe_split"]], "pipeline() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipeline"]], "split_args_kwargs_into_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.split_args_kwargs_into_chunks"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulemulti method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti.step"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulesingle method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle.step"]], "torch.distributed.pipelining": [[33, "module-torch.distributed.pipelining"]], "torch.distributed.pipelining.microbatch": [[33, "module-torch.distributed.pipelining.microbatch"]], "torch.distributed.pipelining.schedules": [[33, "module-torch.distributed.pipelining.schedules"]], "torch.distributed.pipelining.stage": [[33, "module-torch.distributed.pipelining.stage"]], "colwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.ColwiseParallel"]], "preparemoduleinput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleInput"]], "preparemoduleoutput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleOutput"]], "rowwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.RowwiseParallel"]], "sequenceparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.SequenceParallel"]], "loss_parallel() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.loss_parallel"]], "parallelize_module() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.parallelize_module"]], "torch.distributed.tensor.parallel": [[34, "module-torch.distributed.tensor.parallel"]], "abstransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AbsTransform"]], "affinetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AffineTransform"]], "bernoulli (class in torch.distributions.bernoulli)": [[35, "torch.distributions.bernoulli.Bernoulli"]], "beta (class in torch.distributions.beta)": [[35, "torch.distributions.beta.Beta"]], "binomial (class in torch.distributions.binomial)": [[35, "torch.distributions.binomial.Binomial"]], "cattransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CatTransform"]], "categorical (class in torch.distributions.categorical)": [[35, "torch.distributions.categorical.Categorical"]], "cauchy (class in torch.distributions.cauchy)": [[35, "torch.distributions.cauchy.Cauchy"]], "chi2 (class in torch.distributions.chi2)": [[35, "torch.distributions.chi2.Chi2"]], "composetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ComposeTransform"]], "constraint (class in torch.distributions.constraints)": [[35, "torch.distributions.constraints.Constraint"]], "constraintregistry (class in torch.distributions.constraint_registry)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry"]], "continuousbernoulli (class in torch.distributions.continuous_bernoulli)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli"]], "corrcholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CorrCholeskyTransform"]], "cumulativedistributiontransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CumulativeDistributionTransform"]], "dirichlet (class in torch.distributions.dirichlet)": [[35, "torch.distributions.dirichlet.Dirichlet"]], "distribution (class in torch.distributions.distribution)": [[35, "torch.distributions.distribution.Distribution"]], "exptransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ExpTransform"]], "exponential (class in torch.distributions.exponential)": [[35, "torch.distributions.exponential.Exponential"]], "exponentialfamily (class in torch.distributions.exp_family)": [[35, "torch.distributions.exp_family.ExponentialFamily"]], "fishersnedecor (class in torch.distributions.fishersnedecor)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor"]], "gamma (class in torch.distributions.gamma)": [[35, "torch.distributions.gamma.Gamma"]], "geometric (class in torch.distributions.geometric)": [[35, "torch.distributions.geometric.Geometric"]], "gumbel (class in torch.distributions.gumbel)": [[35, "torch.distributions.gumbel.Gumbel"]], "halfcauchy (class in torch.distributions.half_cauchy)": [[35, "torch.distributions.half_cauchy.HalfCauchy"]], "halfnormal (class in torch.distributions.half_normal)": [[35, "torch.distributions.half_normal.HalfNormal"]], "independent (class in torch.distributions.independent)": [[35, "torch.distributions.independent.Independent"]], "independenttransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.IndependentTransform"]], "inversegamma (class in torch.distributions.inverse_gamma)": [[35, "torch.distributions.inverse_gamma.InverseGamma"]], "kumaraswamy (class in torch.distributions.kumaraswamy)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy"]], "lkjcholesky (class in torch.distributions.lkj_cholesky)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky"]], "laplace (class in torch.distributions.laplace)": [[35, "torch.distributions.laplace.Laplace"]], "lognormal (class in torch.distributions.log_normal)": [[35, "torch.distributions.log_normal.LogNormal"]], "logitrelaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli"]], "lowrankmultivariatenormal (class in torch.distributions.lowrank_multivariate_normal)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal"]], "lowercholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.LowerCholeskyTransform"]], "mixturesamefamily (class in torch.distributions.mixture_same_family)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily"]], "multinomial (class in torch.distributions.multinomial)": [[35, "torch.distributions.multinomial.Multinomial"]], "multivariatenormal (class in torch.distributions.multivariate_normal)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal"]], "negativebinomial (class in torch.distributions.negative_binomial)": [[35, "torch.distributions.negative_binomial.NegativeBinomial"]], "normal (class in torch.distributions.normal)": [[35, "torch.distributions.normal.Normal"]], "onehotcategorical (class in torch.distributions.one_hot_categorical)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical"]], "pareto (class in torch.distributions.pareto)": [[35, "torch.distributions.pareto.Pareto"]], "poisson (class in torch.distributions.poisson)": [[35, "torch.distributions.poisson.Poisson"]], "positivedefinitetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PositiveDefiniteTransform"]], "powertransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PowerTransform"]], "relaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli"]], "relaxedonehotcategorical (class in torch.distributions.relaxed_categorical)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical"]], "reshapetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ReshapeTransform"]], "sigmoidtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SigmoidTransform"]], "softmaxtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftmaxTransform"]], "softplustransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftplusTransform"]], "stacktransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StackTransform"]], "stickbreakingtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StickBreakingTransform"]], "studentt (class in torch.distributions.studentt)": [[35, "torch.distributions.studentT.StudentT"]], "tanhtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.TanhTransform"]], "transform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.Transform"]], "transformeddistribution (class in torch.distributions.transformed_distribution)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution"]], "uniform (class in torch.distributions.uniform)": [[35, "torch.distributions.uniform.Uniform"]], "vonmises (class in torch.distributions.von_mises)": [[35, "torch.distributions.von_mises.VonMises"]], "weibull (class in torch.distributions.weibull)": [[35, "torch.distributions.weibull.Weibull"]], "wishart (class in torch.distributions.wishart)": [[35, "torch.distributions.wishart.Wishart"]], "arg_constraints (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.arg_constraints"]], "arg_constraints (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.arg_constraints"]], "arg_constraints (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.arg_constraints"]], "arg_constraints (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.arg_constraints"]], "arg_constraints (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.arg_constraints"]], "arg_constraints (torch.distributions.chi2.chi2 attribute)": [[35, "torch.distributions.chi2.Chi2.arg_constraints"]], "arg_constraints (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.arg_constraints"]], "arg_constraints (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.arg_constraints"]], "arg_constraints (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.arg_constraints"]], "arg_constraints (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.arg_constraints"]], "arg_constraints (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.arg_constraints"]], "arg_constraints (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.arg_constraints"]], "arg_constraints (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.arg_constraints"]], "arg_constraints (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.arg_constraints"]], "arg_constraints (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.arg_constraints"]], "arg_constraints (torch.distributions.independent.independent attribute)": [[35, "torch.distributions.independent.Independent.arg_constraints"]], "arg_constraints (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.arg_constraints"]], "arg_constraints (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.arg_constraints"]], "arg_constraints (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.arg_constraints"]], "arg_constraints (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.arg_constraints"]], "arg_constraints (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.arg_constraints"]], "arg_constraints (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.arg_constraints"]], "arg_constraints (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.arg_constraints"]], "arg_constraints (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.arg_constraints"]], "arg_constraints (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.arg_constraints"]], "arg_constraints (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.pareto.pareto attribute)": [[35, "torch.distributions.pareto.Pareto.arg_constraints"]], "arg_constraints (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.arg_constraints"]], "arg_constraints (torch.distributions.transformed_distribution.transformeddistribution attribute)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.arg_constraints"]], "arg_constraints (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.arg_constraints"]], "arg_constraints (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.arg_constraints"]], "arg_constraints (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.arg_constraints"]], "arg_constraints (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.arg_constraints"]], "batch_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.batch_shape"]], "cat (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.cat"]], "cdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.cdf"]], "cdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.cdf"]], "cdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.cdf"]], "cdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.cdf"]], "cdf() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.cdf"]], "cdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.cdf"]], "cdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.cdf"]], "cdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.cdf"]], "cdf() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.cdf"]], "cdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.cdf"]], "cdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.cdf"]], "cdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.cdf"]], "check() (torch.distributions.constraints.constraint method)": [[35, "torch.distributions.constraints.Constraint.check"]], "component_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.component_distribution"]], "concentration (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.concentration"]], "concentration0 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration0"]], "concentration1 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration1"]], "covariance_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.covariance_matrix"]], "dependent_property (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.dependent_property"]], "df (torch.distributions.chi2.chi2 property)": [[35, "torch.distributions.chi2.Chi2.df"]], "entropy() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.entropy"]], "entropy() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.entropy"]], "entropy() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.entropy"]], "entropy() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.entropy"]], "entropy() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.entropy"]], "entropy() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.entropy"]], "entropy() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.entropy"]], "entropy() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.entropy"]], "entropy() (torch.distributions.exp_family.exponentialfamily method)": [[35, "torch.distributions.exp_family.ExponentialFamily.entropy"]], "entropy() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.entropy"]], "entropy() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.entropy"]], "entropy() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.entropy"]], "entropy() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.entropy"]], "entropy() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.entropy"]], "entropy() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.entropy"]], "entropy() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.entropy"]], "entropy() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.entropy"]], "entropy() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.entropy"]], "entropy() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.entropy"]], "entropy() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.entropy"]], "entropy() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.entropy"]], "entropy() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.entropy"]], "entropy() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.entropy"]], "entropy() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.entropy"]], "entropy() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.entropy"]], "entropy() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.entropy"]], "entropy() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.entropy"]], "entropy() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.entropy"]], "entropy() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.entropy"]], "entropy() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.entropy"]], "enumerate_support() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.enumerate_support"]], "enumerate_support() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.enumerate_support"]], "enumerate_support() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.enumerate_support"]], "enumerate_support() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.enumerate_support"]], "enumerate_support() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.enumerate_support"]], "enumerate_support() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.enumerate_support"]], "event_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.event_shape"]], "expand() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.expand"]], "expand() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.expand"]], "expand() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.expand"]], "expand() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.expand"]], "expand() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.expand"]], "expand() (torch.distributions.chi2.chi2 method)": [[35, "torch.distributions.chi2.Chi2.expand"]], "expand() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.expand"]], "expand() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.expand"]], "expand() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.expand"]], "expand() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.expand"]], "expand() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.expand"]], "expand() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.expand"]], "expand() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.expand"]], "expand() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.expand"]], "expand() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.expand"]], "expand() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.expand"]], "expand() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.expand"]], "expand() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.expand"]], "expand() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.expand"]], "expand() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.expand"]], "expand() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.expand"]], "expand() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.expand"]], "expand() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.expand"]], "expand() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.expand"]], "expand() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.expand"]], "expand() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.expand"]], "expand() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.expand"]], "expand() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.expand"]], "expand() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.expand"]], "expand() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.expand"]], "expand() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.expand"]], "expand() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_bernoulli.relaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_categorical.relaxedonehotcategorical method)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.expand"]], "expand() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.expand"]], "expand() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.expand"]], "expand() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.expand"]], "expand() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.expand"]], "expand() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.expand"]], "expand() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.expand"]], "forward_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.forward_shape"]], "greater_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than"]], "greater_than_eq (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than_eq"]], "half_open_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.half_open_interval"]], "has_enumerate_support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.has_enumerate_support"]], "has_enumerate_support (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.has_enumerate_support"]], "has_enumerate_support (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.has_enumerate_support"]], "has_enumerate_support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_enumerate_support"]], "has_enumerate_support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.has_enumerate_support"]], "has_rsample (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.has_rsample"]], "has_rsample (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.has_rsample"]], "has_rsample (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.has_rsample"]], "has_rsample (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.has_rsample"]], "has_rsample (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.has_rsample"]], "has_rsample (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.has_rsample"]], "has_rsample (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.has_rsample"]], "has_rsample (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.has_rsample"]], "has_rsample (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.has_rsample"]], "has_rsample (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_rsample"]], "has_rsample (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.has_rsample"]], "has_rsample (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.has_rsample"]], "has_rsample (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.has_rsample"]], "has_rsample (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.has_rsample"]], "has_rsample (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.has_rsample"]], "has_rsample (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.has_rsample"]], "has_rsample (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.has_rsample"]], "has_rsample (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.has_rsample"]], "has_rsample (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.has_rsample"]], "has_rsample (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.has_rsample"]], "has_rsample (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.has_rsample"]], "has_rsample (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.has_rsample"]], "has_rsample (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.has_rsample"]], "icdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.icdf"]], "icdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.icdf"]], "icdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.icdf"]], "icdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.icdf"]], "icdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.icdf"]], "icdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.icdf"]], "icdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.icdf"]], "icdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.icdf"]], "icdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.icdf"]], "icdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.icdf"]], "independent (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.independent"]], "integer_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.integer_interval"]], "interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.interval"]], "inv (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.inv"]], "inverse_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.inverse_shape"]], "kl_divergence() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.kl_divergence"]], "less_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.less_than"]], "loc (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.loc"]], "log_abs_det_jacobian() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.log_abs_det_jacobian"]], "log_prob() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.log_prob"]], "log_prob() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.log_prob"]], "log_prob() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.log_prob"]], "log_prob() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.log_prob"]], "log_prob() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.log_prob"]], "log_prob() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.log_prob"]], "log_prob() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.log_prob"]], "log_prob() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.log_prob"]], "log_prob() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.log_prob"]], "log_prob() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.log_prob"]], "log_prob() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.log_prob"]], "log_prob() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.log_prob"]], "log_prob() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.log_prob"]], "log_prob() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.log_prob"]], "log_prob() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.log_prob"]], "log_prob() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.log_prob"]], "log_prob() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.log_prob"]], "log_prob() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.log_prob"]], "log_prob() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.log_prob"]], "log_prob() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.log_prob"]], "log_prob() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.log_prob"]], "log_prob() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.log_prob"]], "log_prob() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.log_prob"]], "log_prob() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.log_prob"]], "log_prob() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.log_prob"]], "log_prob() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.log_prob"]], "log_prob() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.log_prob"]], "log_prob() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.log_prob"]], "log_prob() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.log_prob"]], "log_prob() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.log_prob"]], "log_prob() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.log_prob"]], "log_prob() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.log_prob"]], "logits (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.logits"]], "logits (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.logits"]], "logits (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.logits"]], "logits (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.logits"]], "logits (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.logits"]], "logits (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.logits"]], "logits (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.logits"]], "logits (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.logits"]], "logits (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.logits"]], "mean (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mean"]], "mean (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mean"]], "mean (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mean"]], "mean (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mean"]], "mean (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mean"]], "mean (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.mean"]], "mean (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mean"]], "mean (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mean"]], "mean (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mean"]], "mean (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mean"]], "mean (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mean"]], "mean (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mean"]], "mean (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mean"]], "mean (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mean"]], "mean (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mean"]], "mean (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mean"]], "mean (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mean"]], "mean (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mean"]], "mean (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mean"]], "mean (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mean"]], "mean (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mean"]], "mean (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mean"]], "mean (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.mean"]], "mean (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mean"]], "mean (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mean"]], "mean (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mean"]], "mean (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mean"]], "mean (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mean"]], "mean (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mean"]], "mean (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mean"]], "mean (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mean"]], "mean (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mean"]], "mean (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mean"]], "mean (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mean"]], "mixture_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mixture_distribution"]], "mode (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mode"]], "mode (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mode"]], "mode (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mode"]], "mode (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mode"]], "mode (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mode"]], "mode (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mode"]], "mode (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mode"]], "mode (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mode"]], "mode (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mode"]], "mode (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mode"]], "mode (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mode"]], "mode (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mode"]], "mode (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mode"]], "mode (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mode"]], "mode (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mode"]], "mode (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mode"]], "mode (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mode"]], "mode (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mode"]], "mode (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mode"]], "mode (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mode"]], "mode (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mode"]], "mode (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mode"]], "mode (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mode"]], "mode (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mode"]], "mode (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mode"]], "mode (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mode"]], "mode (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mode"]], "mode (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mode"]], "mode (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mode"]], "mode (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mode"]], "mode (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mode"]], "multinomial (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.multinomial"]], "param_shape (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.param_shape"]], "param_shape (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.param_shape"]], "param_shape (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.param_shape"]], "param_shape (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.param_shape"]], "param_shape (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.param_shape"]], "param_shape (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.param_shape"]], "param_shape (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.param_shape"]], "param_shape (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.param_shape"]], "perplexity() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.perplexity"]], "precision_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.precision_matrix"]], "probs (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.probs"]], "probs (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.probs"]], "probs (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.probs"]], "probs (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.probs"]], "probs (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.probs"]], "probs (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.probs"]], "probs (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.probs"]], "probs (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.probs"]], "probs (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.probs"]], "rate (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.rate"]], "register() (torch.distributions.constraint_registry.constraintregistry method)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry.register"]], "register_kl() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.register_kl"]], "rsample() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.rsample"]], "rsample() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.rsample"]], "rsample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.rsample"]], "rsample() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.rsample"]], "rsample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.rsample"]], "rsample() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.rsample"]], "rsample() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.rsample"]], "rsample() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.rsample"]], "rsample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.rsample"]], "rsample() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.rsample"]], "rsample() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.rsample"]], "rsample() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.rsample"]], "rsample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.rsample"]], "rsample() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.rsample"]], "rsample() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.rsample"]], "rsample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.rsample"]], "rsample() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.rsample"]], "rsample() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.rsample"]], "sample() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.sample"]], "sample() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.sample"]], "sample() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.sample"]], "sample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.sample"]], "sample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample"]], "sample() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.sample"]], "sample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.sample"]], "sample() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.sample"]], "sample() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.sample"]], "sample() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.sample"]], "sample() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.sample"]], "sample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.sample"]], "sample() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.sample"]], "sample() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.sample"]], "sample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.sample"]], "sample() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.sample"]], "sample_n() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample_n"]], "scale (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.scale"]], "scale (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.scale"]], "scale (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.scale"]], "scale_tril (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.scale_tril"]], "set_default_validate_args() (torch.distributions.distribution.distribution static method)": [[35, "torch.distributions.distribution.Distribution.set_default_validate_args"]], "sign (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.sign"]], "stack (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.stack"]], "stddev (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.stddev"]], "stddev (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.stddev"]], "stddev (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.stddev"]], "stddev (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.stddev"]], "stddev (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.stddev"]], "stddev (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.stddev"]], "stddev (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.stddev"]], "support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.support"]], "support (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.support"]], "support (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.support"]], "support (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.support"]], "support (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.support"]], "support (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.support"]], "support (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.support"]], "support (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.support"]], "support (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.support"]], "support (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.support"]], "support (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.support"]], "support (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.support"]], "support (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.support"]], "support (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.support"]], "support (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.support"]], "support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.support"]], "support (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.support"]], "support (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.support"]], "support (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.support"]], "support (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.support"]], "support (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.support"]], "support (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.support"]], "support (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.support"]], "support (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.support"]], "support (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.support"]], "support (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.support"]], "support (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.support"]], "support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.support"]], "support (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.support"]], "support (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.support"]], "support (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.support"]], "support (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.support"]], "support (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.support"]], "support (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.support"]], "support (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.support"]], "support (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.support"]], "support (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.support"]], "support (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.support"]], "support (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.support"]], "temperature (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.temperature"]], "temperature (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.temperature"]], "torch.distributions": [[35, "module-torch.distributions"]], "torch.distributions.bernoulli": [[35, "module-torch.distributions.bernoulli"]], "torch.distributions.beta": [[35, "module-torch.distributions.beta"]], "torch.distributions.binomial": [[35, "module-torch.distributions.binomial"]], "torch.distributions.categorical": [[35, "module-torch.distributions.categorical"]], "torch.distributions.cauchy": [[35, "module-torch.distributions.cauchy"]], "torch.distributions.chi2": [[35, "module-torch.distributions.chi2"]], "torch.distributions.constraint_registry": [[35, "module-torch.distributions.constraint_registry"]], "torch.distributions.constraints": [[35, "module-torch.distributions.constraints"]], "torch.distributions.continuous_bernoulli": [[35, "module-torch.distributions.continuous_bernoulli"]], "torch.distributions.dirichlet": [[35, "module-torch.distributions.dirichlet"]], "torch.distributions.distribution": [[35, "module-torch.distributions.distribution"]], "torch.distributions.exp_family": [[35, "module-torch.distributions.exp_family"]], "torch.distributions.exponential": [[35, "module-torch.distributions.exponential"]], "torch.distributions.fishersnedecor": [[35, "module-torch.distributions.fishersnedecor"]], "torch.distributions.gamma": [[35, "module-torch.distributions.gamma"]], "torch.distributions.geometric": [[35, "module-torch.distributions.geometric"]], "torch.distributions.gumbel": [[35, "module-torch.distributions.gumbel"]], "torch.distributions.half_cauchy": [[35, "module-torch.distributions.half_cauchy"]], "torch.distributions.half_normal": [[35, "module-torch.distributions.half_normal"]], "torch.distributions.independent": [[35, "module-torch.distributions.independent"]], "torch.distributions.inverse_gamma": [[35, "module-torch.distributions.inverse_gamma"]], "torch.distributions.kl": [[35, "module-torch.distributions.kl"]], "torch.distributions.kumaraswamy": [[35, "module-torch.distributions.kumaraswamy"]], "torch.distributions.laplace": [[35, "module-torch.distributions.laplace"]], "torch.distributions.lkj_cholesky": [[35, "module-torch.distributions.lkj_cholesky"]], "torch.distributions.log_normal": [[35, "module-torch.distributions.log_normal"]], "torch.distributions.logistic_normal": [[35, "module-torch.distributions.logistic_normal"]], "torch.distributions.lowrank_multivariate_normal": [[35, "module-torch.distributions.lowrank_multivariate_normal"]], "torch.distributions.mixture_same_family": [[35, "module-torch.distributions.mixture_same_family"]], "torch.distributions.multinomial": [[35, "module-torch.distributions.multinomial"]], "torch.distributions.multivariate_normal": [[35, "module-torch.distributions.multivariate_normal"]], "torch.distributions.negative_binomial": [[35, "module-torch.distributions.negative_binomial"]], "torch.distributions.normal": [[35, "module-torch.distributions.normal"]], "torch.distributions.one_hot_categorical": [[35, "module-torch.distributions.one_hot_categorical"]], "torch.distributions.pareto": [[35, "module-torch.distributions.pareto"]], "torch.distributions.poisson": [[35, "module-torch.distributions.poisson"]], "torch.distributions.relaxed_bernoulli": [[35, "module-torch.distributions.relaxed_bernoulli"]], "torch.distributions.relaxed_categorical": [[35, "module-torch.distributions.relaxed_categorical"]], "torch.distributions.studentt": [[35, "module-torch.distributions.studentT"]], "torch.distributions.transformed_distribution": [[35, "module-torch.distributions.transformed_distribution"]], "torch.distributions.transforms": [[35, "module-torch.distributions.transforms"]], "torch.distributions.uniform": [[35, "module-torch.distributions.uniform"]], "torch.distributions.utils": [[35, "module-torch.distributions.utils"]], "torch.distributions.von_mises": [[35, "module-torch.distributions.von_mises"]], "torch.distributions.weibull": [[35, "module-torch.distributions.weibull"]], "torch.distributions.wishart": [[35, "module-torch.distributions.wishart"]], "total_count (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.total_count"]], "variance (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.variance"]], "variance (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.variance"]], "variance (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.variance"]], "variance (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.variance"]], "variance (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.variance"]], "variance (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.variance"]], "variance (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.variance"]], "variance (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.variance"]], "variance (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.variance"]], "variance (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.variance"]], "variance (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.variance"]], "variance (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.variance"]], "variance (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.variance"]], "variance (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.variance"]], "variance (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.variance"]], "variance (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.variance"]], "variance (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.variance"]], "variance (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.variance"]], "variance (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.variance"]], "variance (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.variance"]], "variance (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.variance"]], "variance (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.variance"]], "variance (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.variance"]], "variance (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.variance"]], "variance (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.variance"]], "variance (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.variance"]], "variance (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.variance"]], "variance (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.variance"]], "variance (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.variance"]], "variance (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.variance"]], "variance (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.variance"]], "variance (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.variance"]], "variance (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.variance"]], "variance (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.variance"]], "from_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.from_dlpack"]], "to_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.to_dlpack"]], "elasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent"]], "healthcheckserver (class in torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer"]], "localelasticagent (class in torch.distributed.elastic.agent.server.local_elastic_agent)": [[37, "torch.distributed.elastic.agent.server.local_elastic_agent.LocalElasticAgent"]], "runresult (class in torch.distributed.elastic.agent.server.api)": [[37, "torch.distributed.elastic.agent.server.api.RunResult"]], "simpleelasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent"]], "worker (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.Worker"]], "workergroup (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerGroup"]], "workerspec (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec"]], "workerstate (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerState"]], "_assign_worker_ranks() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._assign_worker_ranks"]], "_exit_barrier() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._exit_barrier"]], "_initialize_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._initialize_workers"]], "_monitor_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._monitor_workers"]], "_rendezvous() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._rendezvous"]], "_restart_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._restart_workers"]], "_shutdown() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._shutdown"]], "_start_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._start_workers"]], "_stop_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._stop_workers"]], "create_healthcheck_server() (in module torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.create_healthcheck_server"]], "get_entrypoint_name() (torch.distributed.elastic.agent.server.workerspec method)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec.get_entrypoint_name"]], "get_worker_group() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.get_worker_group"]], "is_running() (torch.distributed.elastic.agent.server.workerstate static method)": [[37, "torch.distributed.elastic.agent.server.WorkerState.is_running"]], "run() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.run"]], "start() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.start"]], "stop() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.stop"]], "torch.distributed.elastic.agent": [[37, "module-torch.distributed.elastic.agent"]], "torch.distributed.elastic.agent.server": [[37, "module-torch.distributed.elastic.agent.server"]], "torch.distributed.elastic.agent.server.health_check_server": [[37, "module-torch.distributed.elastic.agent.server.health_check_server"]], "torch.distributed.elastic.control_plane": [[38, "module-torch.distributed.elastic.control_plane"]], "worker_main() (in module torch.distributed.elastic.control_plane)": [[38, "torch.distributed.elastic.control_plane.worker_main"]], "childfailederror (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ChildFailedError"]], "errorhandler (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ErrorHandler"]], "processfailure (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ProcessFailure"]], "record() (in module torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.record"]], "torch.distributed.elastic.multiprocessing.errors": [[40, "module-torch.distributed.elastic.multiprocessing.errors"]], "event (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.Event"]], "eventmetadatavalue (in module torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventMetadataValue"]], "eventsource (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventSource"]], "get_logging_handler() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.get_logging_handler"]], "record() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.record"]], "torch.distributed.elastic.events": [[41, "module-torch.distributed.elastic.events"]], "consolemetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.ConsoleMetricHandler"]], "metrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.MetricHandler"]], "nullmetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.NullMetricHandler"]], "configure() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.configure"]], "prof() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.prof"]], "put_metric() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.put_metric"]], "torch.distributed.elastic.metrics": [[44, "module-torch.distributed.elastic.metrics"]], "defaultlogsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs"]], "logsdest (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsDest"]], "logsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs"]], "multiprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.MultiprocessContext"]], "pcontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.PContext"]], "runprocsresult (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.RunProcsResult"]], "subprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.SubprocessContext"]], "reify() (torch.distributed.elastic.multiprocessing.api.defaultlogsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs.reify"]], "reify() (torch.distributed.elastic.multiprocessing.api.logsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs.reify"]], "start_processes() (in module torch.distributed.elastic.multiprocessing)": [[45, "torch.distributed.elastic.multiprocessing.start_processes"]], "torch.distributed.elastic.multiprocessing": [[45, "module-torch.distributed.elastic.multiprocessing"]], "c10drendezvousbackend (class in torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend"]], "dynamicrendezvoushandler (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler"]], "etcdrendezvousbackend (class in torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend"]], "etcdrendezvoushandler (class in torch.distributed.elastic.rendezvous.etcd_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous.EtcdRendezvousHandler"]], "etcdserver (class in torch.distributed.elastic.rendezvous.etcd_server)": [[47, "torch.distributed.elastic.rendezvous.etcd_server.EtcdServer"]], "etcdstore (class in torch.distributed.elastic.rendezvous.etcd_store)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore"]], "rendezvousbackend (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend"]], "rendezvousclosederror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousClosedError"]], "rendezvousconnectionerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousConnectionError"]], "rendezvouserror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousError"]], "rendezvousgracefulexiterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousGracefulExitError"]], "rendezvoushandler (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler"]], "rendezvoushandlerregistry (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandlerRegistry"]], "rendezvousinfo (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousInfo"]], "rendezvousparameters (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters"]], "rendezvousstateerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStateError"]], "rendezvousstoreinfo (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo"]], "rendezvoustimeout (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout"]], "rendezvoustimeouterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousTimeoutError"]], "add() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.add"]], "build() (torch.distributed.elastic.rendezvous.api.rendezvousstoreinfo static method)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo.build"]], "check() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.check"]], "close (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.close"]], "create_backend() (in module torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.create_backend"]], "create_backend() (in module torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.create_backend"]], "create_handler() (in module torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.create_handler"]], "from_backend() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.dynamicrendezvoushandler class method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler.from_backend"]], "get() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get"]], "get() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.get"]], "get_as_bool() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_bool"]], "get_as_int() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_int"]], "get_backend() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_backend"]], "get_run_id() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_run_id"]], "get_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.get_state"]], "heartbeat (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.heartbeat"]], "is_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.is_closed"]], "join (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.join"]], "last_call (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.last_call"]], "name (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.name"]], "next_rendezvous() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.next_rendezvous"]], "num_nodes_waiting() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.num_nodes_waiting"]], "set() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.set"]], "set_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.set_closed"]], "set_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.set_state"]], "shutdown() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.shutdown"]], "torch.distributed.elastic.rendezvous": [[47, "module-torch.distributed.elastic.rendezvous"]], "torch.distributed.elastic.rendezvous.registry": [[47, "module-torch.distributed.elastic.rendezvous.registry"]], "use_agent_store (torch.distributed.elastic.rendezvous.rendezvoushandler property)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.use_agent_store"]], "wait() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.wait"]], "torch.distributed.run": [[48, "module-torch.distributed.run"]], "subprocesshandler (class in torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler.SubprocessHandler"]], "get_subprocess_handler() (in module torch.distributed.elastic.multiprocessing.subprocess_handler.handlers)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers.get_subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"]], "filetimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerClient"]], "filetimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerServer"]], "localtimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerClient"]], "localtimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerServer"]], "timerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerClient"]], "timerrequest (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerRequest"]], "timerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerServer"]], "acquire() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.acquire"]], "clear_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.clear_timers"]], "configure() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.configure"]], "expires() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.expires"]], "get_expired_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.get_expired_timers"]], "log_debug_info_for_expired_timers() (in module torch.distributed.elastic.timer.debug_info_logging)": [[50, "torch.distributed.elastic.timer.debug_info_logging.log_debug_info_for_expired_timers"]], "register_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.register_timers"]], "release() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.release"]], "torch.distributed.elastic.timer": [[50, "module-torch.distributed.elastic.timer"]], "torch.distributed.elastic.timer.debug_info_logging": [[50, "module-torch.distributed.elastic.timer.debug_info_logging"]], "constraint (in module torch.export)": [[52, "torch.export.Constraint"]], "customobjargument (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.CustomObjArgument"]], "dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.Dim"]], "exportbackwardsignature (class in torch.export)": [[52, "torch.export.ExportBackwardSignature"]], "exportgraphsignature (class in torch.export)": [[52, "torch.export.ExportGraphSignature"]], "exportgraphsignature (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.ExportGraphSignature"]], "exportedprogram (class in torch.export)": [[52, "torch.export.ExportedProgram"]], "flatargsadapter (class in torch.export.unflatten)": [[52, "torch.export.unflatten.FlatArgsAdapter"]], "inputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputKind"]], "inputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputSpec"]], "interpretermodule (class in torch.export.unflatten)": [[52, "torch.export.unflatten.InterpreterModule"]], "modulecallentry (class in torch.export)": [[52, "torch.export.ModuleCallEntry"]], "modulecallsignature (class in torch.export)": [[52, "torch.export.ModuleCallSignature"]], "outputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputKind"]], "outputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputSpec"]], "shapescollection (class in torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.ShapesCollection"]], "adapt() (torch.export.unflatten.flatargsadapter method)": [[52, "torch.export.unflatten.FlatArgsAdapter.adapt"]], "buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.buffers"]], "dims() (in module torch.export)": [[52, "torch.export.dims"]], "dynamic_dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.dynamic_dim"]], "dynamic_shapes() (torch.export.dynamic_shapes.shapescollection method)": [[52, "torch.export.dynamic_shapes.ShapesCollection.dynamic_shapes"]], "export() (in module torch.export)": [[52, "torch.export.export"]], "get_replace_hook() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.get_replace_hook"]], "load() (in module torch.export)": [[52, "torch.export.load"]], "module() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.module"]], "named_buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_buffers"]], "named_parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_parameters"]], "parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.parameters"]], "refine_dynamic_shapes_from_suggested_fixes() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.refine_dynamic_shapes_from_suggested_fixes"]], "register_dataclass() (in module torch.export)": [[52, "torch.export.register_dataclass"]], "replace_all_uses() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.replace_all_uses"]], "run_decompositions() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.run_decompositions"]], "save() (in module torch.export)": [[52, "torch.export.save"]], "torch.export": [[52, "module-torch.export"]], "torch.export.custom_obj": [[52, "module-torch.export.custom_obj"]], "torch.export.dynamic_shapes": [[52, "module-torch.export.dynamic_shapes"]], "torch.export.exported_program": [[52, "module-torch.export.exported_program"]], "torch.export.graph_signature": [[52, "module-torch.export.graph_signature"]], "torch.export.unflatten": [[52, "module-torch.export.unflatten"]], "unflatten() (in module torch.export.unflatten)": [[52, "torch.export.unflatten.unflatten"]], "torch.fft": [[54, "module-torch.fft"]], "backwardprefetch (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.BackwardPrefetch"]], "cpuoffload (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.CPUOffload"]], "fulloptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullOptimStateDictConfig"]], "fullstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullStateDictConfig"]], "fullyshardeddataparallel (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel"]], "localoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalOptimStateDictConfig"]], "localstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalStateDictConfig"]], "mixedprecision (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.MixedPrecision"]], "optimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.OptimStateDictConfig"]], "shardedoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedOptimStateDictConfig"]], "shardedstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedStateDictConfig"]], "shardingstrategy (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardingStrategy"]], "statedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictConfig"]], "statedictsettings (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictSettings"]], "apply() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.apply"]], "check_is_root() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.check_is_root"]], "clip_grad_norm_() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.clip_grad_norm_"]], "flatten_sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.flatten_sharded_optim_state_dict"]], "forward() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.forward"]], "fsdp_modules() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.fsdp_modules"]], "full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.full_optim_state_dict"]], "get_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.get_state_dict_type"]], "module (torch.distributed.fsdp.fullyshardeddataparallel property)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.module"]], "named_buffers() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_buffers"]], "named_parameters() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_parameters"]], "no_sync() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.no_sync"]], "optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict"]], "optim_state_dict_to_load() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict_to_load"]], "register_comm_hook() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.register_comm_hook"]], "rekey_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.rekey_optim_state_dict"]], "scatter_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.scatter_full_optim_state_dict"]], "set_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.set_state_dict_type"]], "shard_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.shard_full_optim_state_dict"]], "sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.sharded_optim_state_dict"]], "state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.state_dict_type"]], "summon_full_params() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.summon_full_params"]], "torch.distributed.fsdp": [[55, "module-torch.distributed.fsdp"]], "torch.func": [[57, "module-torch.func"]], "get_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_overwrite_module_params_on_conversion"]], "get_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_swap_module_params_on_conversion"]], "set_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_overwrite_module_params_on_conversion"]], "set_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_swap_module_params_on_conversion"]], "torch.__future__": [[62, "module-torch.__future__"]], "future (class in torch.futures)": [[63, "torch.futures.Future"]], "add_done_callback() (torch.futures.future method)": [[63, "torch.futures.Future.add_done_callback"]], "collect_all() (in module torch.futures)": [[63, "torch.futures.collect_all"]], "done() (torch.futures.future method)": [[63, "torch.futures.Future.done"]], "set_exception() (torch.futures.future method)": [[63, "torch.futures.Future.set_exception"]], "set_result() (torch.futures.future method)": [[63, "torch.futures.Future.set_result"]], "then() (torch.futures.future method)": [[63, "torch.futures.Future.then"]], "torch.futures": [[63, "module-torch.futures"]], "value() (torch.futures.future method)": [[63, "torch.futures.Future.value"]], "wait() (torch.futures.future method)": [[63, "torch.futures.Future.wait"]], "wait_all() (in module torch.futures)": [[63, "torch.futures.wait_all"]], "graph (class in torch.fx)": [[64, "torch.fx.Graph"]], "graphmodule (class in torch.fx)": [[64, "torch.fx.GraphModule"]], "interpreter (class in torch.fx)": [[64, "torch.fx.Interpreter"]], "node (class in torch.fx)": [[64, "torch.fx.Node"]], "proxy (class in torch.fx)": [[64, "torch.fx.Proxy"]], "tracer (class in torch.fx)": [[64, "torch.fx.Tracer"]], "transformer (class in torch.fx)": [[64, "torch.fx.Transformer"]], "__init__() (torch.fx.graph method)": [[64, "torch.fx.Graph.__init__"]], "__init__() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.__init__"]], "add_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.add_submodule"]], "all_input_nodes (torch.fx.node property)": [[64, "torch.fx.Node.all_input_nodes"]], "append() (torch.fx.node method)": [[64, "torch.fx.Node.append"]], "args (torch.fx.node property)": [[64, "torch.fx.Node.args"]], "boxed_run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.boxed_run"]], "call_function() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_function"]], "call_function() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_function"]], "call_function() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_function"]], "call_method() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_method"]], "call_method() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_method"]], "call_module() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_module"]], "call_module() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_module"]], "call_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.call_module"]], "call_module() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_module"]], "code (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.code"]], "create_arg() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_arg"]], "create_args_for_root() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_args_for_root"]], "create_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.create_node"]], "create_node() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_node"]], "create_proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_proxy"]], "delete_all_unused_submodules() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_all_unused_submodules"]], "delete_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_submodule"]], "eliminate_dead_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.eliminate_dead_code"]], "erase_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.erase_node"]], "fetch_args_kwargs_from_env() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_attr"]], "find_nodes() (torch.fx.graph method)": [[64, "torch.fx.Graph.find_nodes"]], "format_node() (torch.fx.node method)": [[64, "torch.fx.Node.format_node"]], "get_attr() (torch.fx.graph method)": [[64, "torch.fx.Graph.get_attr"]], "get_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.get_attr"]], "get_attr() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.get_attr"]], "get_fresh_qualname() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.get_fresh_qualname"]], "getattr() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.getattr"]], "graph (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.graph"]], "graph_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.graph_copy"]], "insert_arg() (torch.fx.node method)": [[64, "torch.fx.Node.insert_arg"]], "inserting_after() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_after"]], "inserting_before() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_before"]], "is_impure() (torch.fx.node method)": [[64, "torch.fx.Node.is_impure"]], "is_leaf_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.is_leaf_module"]], "iter() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.iter"]], "keys() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.keys"]], "kwargs (torch.fx.node property)": [[64, "torch.fx.Node.kwargs"]], "lint() (torch.fx.graph method)": [[64, "torch.fx.Graph.lint"]], "map_nodes_to_values() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.map_nodes_to_values"]], "next (torch.fx.node property)": [[64, "torch.fx.Node.next"]], "node_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.node_copy"]], "nodes (torch.fx.graph property)": [[64, "torch.fx.Graph.nodes"]], "normalized_arguments() (torch.fx.node method)": [[64, "torch.fx.Node.normalized_arguments"]], "on_generate_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.on_generate_code"]], "output() (torch.fx.graph method)": [[64, "torch.fx.Graph.output"]], "output() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.output"]], "path_of_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.path_of_module"]], "placeholder() (torch.fx.graph method)": [[64, "torch.fx.Graph.placeholder"]], "placeholder() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.placeholder"]], "placeholder() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.placeholder"]], "prepend() (torch.fx.node method)": [[64, "torch.fx.Node.prepend"]], "prev (torch.fx.node property)": [[64, "torch.fx.Node.prev"]], "print_readable() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.print_readable"]], "print_tabular() (torch.fx.graph method)": [[64, "torch.fx.Graph.print_tabular"]], "process_inputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_inputs"]], "process_outputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_outputs"]], "proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.proxy"]], "python_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.python_code"]], "recompile() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.recompile"]], "replace_all_uses_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_all_uses_with"]], "replace_input_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_input_with"]], "replace_pattern() (in module torch.fx)": [[64, "torch.fx.replace_pattern"]], "run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run"]], "run_node() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run_node"]], "set_codegen() (torch.fx.graph method)": [[64, "torch.fx.Graph.set_codegen"]], "stack_trace (torch.fx.node property)": [[64, "torch.fx.Node.stack_trace"]], "symbolic_trace() (in module torch.fx)": [[64, "torch.fx.symbolic_trace"]], "to_bool() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.to_bool"]], "to_folder() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.to_folder"]], "torch.fx": [[64, "module-torch.fx"]], "torch.fx.annotate": [[64, "module-torch.fx.annotate"]], "torch.fx.config": [[64, "module-torch.fx.config"]], "torch.fx.experimental": [[64, "module-torch.fx.experimental"]], "torch.fx.experimental.accelerator_partitioner": [[64, "module-torch.fx.experimental.accelerator_partitioner"]], "torch.fx.experimental.const_fold": [[64, "module-torch.fx.experimental.const_fold"]], "torch.fx.experimental.debug": [[64, "module-torch.fx.experimental.debug"]], "torch.fx.experimental.graph_gradual_typechecker": [[64, "module-torch.fx.experimental.graph_gradual_typechecker"]], "torch.fx.experimental.merge_matmul": [[64, "module-torch.fx.experimental.merge_matmul"]], "torch.fx.experimental.meta_tracer": [[64, "module-torch.fx.experimental.meta_tracer"]], "torch.fx.experimental.migrate_gradual_types": [[64, "module-torch.fx.experimental.migrate_gradual_types"]], "torch.fx.experimental.migrate_gradual_types.constraint": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint"]], "torch.fx.experimental.migrate_gradual_types.constraint_generator": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"]], "torch.fx.experimental.migrate_gradual_types.constraint_transformation": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"]], "torch.fx.experimental.migrate_gradual_types.operation": [[64, "module-torch.fx.experimental.migrate_gradual_types.operation"]], "torch.fx.experimental.migrate_gradual_types.transform_to_z3": [[64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"]], "torch.fx.experimental.migrate_gradual_types.util": [[64, "module-torch.fx.experimental.migrate_gradual_types.util"]], "torch.fx.experimental.migrate_gradual_types.z3_types": [[64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"]], "torch.fx.experimental.normalize": [[64, "module-torch.fx.experimental.normalize"]], "torch.fx.experimental.optimization": [[64, "module-torch.fx.experimental.optimization"]], "torch.fx.experimental.partitioner_utils": [[64, "module-torch.fx.experimental.partitioner_utils"]], "torch.fx.experimental.proxy_tensor": [[64, "module-torch.fx.experimental.proxy_tensor"]], "torch.fx.experimental.recording": [[64, "module-torch.fx.experimental.recording"]], "torch.fx.experimental.refinement_types": [[64, "module-torch.fx.experimental.refinement_types"]], "torch.fx.experimental.rewriter": [[64, "module-torch.fx.experimental.rewriter"]], "torch.fx.experimental.schema_type_annotation": [[64, "module-torch.fx.experimental.schema_type_annotation"]], "torch.fx.experimental.sym_node": [[64, "module-torch.fx.experimental.sym_node"]], "torch.fx.experimental.unification": [[64, "module-torch.fx.experimental.unification"]], "torch.fx.experimental.unification.core": [[64, "module-torch.fx.experimental.unification.core"]], "torch.fx.experimental.unification.dispatch": [[64, "module-torch.fx.experimental.unification.dispatch"]], "torch.fx.experimental.unification.match": [[64, "module-torch.fx.experimental.unification.match"]], "torch.fx.experimental.unification.more": [[64, "module-torch.fx.experimental.unification.more"]], "torch.fx.experimental.unification.multipledispatch": [[64, "module-torch.fx.experimental.unification.multipledispatch"]], "torch.fx.experimental.unification.multipledispatch.conflict": [[64, "module-torch.fx.experimental.unification.multipledispatch.conflict"]], "torch.fx.experimental.unification.multipledispatch.core": [[64, "module-torch.fx.experimental.unification.multipledispatch.core"]], "torch.fx.experimental.unification.multipledispatch.dispatcher": [[64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"]], "torch.fx.experimental.unification.multipledispatch.utils": [[64, "module-torch.fx.experimental.unification.multipledispatch.utils"]], "torch.fx.experimental.unification.multipledispatch.variadic": [[64, "module-torch.fx.experimental.unification.multipledispatch.variadic"]], "torch.fx.experimental.unification.unification_tools": [[64, "module-torch.fx.experimental.unification.unification_tools"]], "torch.fx.experimental.unification.utils": [[64, "module-torch.fx.experimental.unification.utils"]], "torch.fx.experimental.unification.variable": [[64, "module-torch.fx.experimental.unification.variable"]], "torch.fx.experimental.unify_refinements": [[64, "module-torch.fx.experimental.unify_refinements"]], "torch.fx.experimental.validator": [[64, "module-torch.fx.experimental.validator"]], "torch.fx.graph": [[64, "module-torch.fx.graph"]], "torch.fx.graph_module": [[64, "module-torch.fx.graph_module"]], "torch.fx.immutable_collections": [[64, "module-torch.fx.immutable_collections"]], "torch.fx.interpreter": [[64, "module-torch.fx.interpreter"]], "torch.fx.node": [[64, "module-torch.fx.node"]], "torch.fx.operator_schemas": [[64, "module-torch.fx.operator_schemas"]], "torch.fx.passes": [[64, "module-torch.fx.passes"]], "torch.fx.passes.annotate_getitem_nodes": [[64, "module-torch.fx.passes.annotate_getitem_nodes"]], "torch.fx.passes.backends": [[64, "module-torch.fx.passes.backends"]], "torch.fx.passes.backends.cudagraphs": [[64, "module-torch.fx.passes.backends.cudagraphs"]], "torch.fx.passes.dialect": [[64, "module-torch.fx.passes.dialect"]], "torch.fx.passes.dialect.common": [[64, "module-torch.fx.passes.dialect.common"]], "torch.fx.passes.dialect.common.cse_pass": [[64, "module-torch.fx.passes.dialect.common.cse_pass"]], "torch.fx.passes.fake_tensor_prop": [[64, "module-torch.fx.passes.fake_tensor_prop"]], "torch.fx.passes.graph_drawer": [[64, "module-torch.fx.passes.graph_drawer"]], "torch.fx.passes.graph_manipulation": [[64, "module-torch.fx.passes.graph_manipulation"]], "torch.fx.passes.graph_transform_observer": [[64, "module-torch.fx.passes.graph_transform_observer"]], "torch.fx.passes.infra": [[64, "module-torch.fx.passes.infra"]], "torch.fx.passes.infra.partitioner": [[64, "module-torch.fx.passes.infra.partitioner"]], "torch.fx.passes.infra.pass_base": [[64, "module-torch.fx.passes.infra.pass_base"]], "torch.fx.passes.infra.pass_manager": [[64, "module-torch.fx.passes.infra.pass_manager"]], "torch.fx.passes.net_min_base": [[64, "module-torch.fx.passes.net_min_base"]], "torch.fx.passes.operator_support": [[64, "module-torch.fx.passes.operator_support"]], "torch.fx.passes.param_fetch": [[64, "module-torch.fx.passes.param_fetch"]], "torch.fx.passes.pass_manager": [[64, "module-torch.fx.passes.pass_manager"]], "torch.fx.passes.reinplace": [[64, "module-torch.fx.passes.reinplace"]], "torch.fx.passes.runtime_assert": [[64, "module-torch.fx.passes.runtime_assert"]], "torch.fx.passes.shape_prop": [[64, "module-torch.fx.passes.shape_prop"]], "torch.fx.passes.split_module": [[64, "module-torch.fx.passes.split_module"]], "torch.fx.passes.split_utils": [[64, "module-torch.fx.passes.split_utils"]], "torch.fx.passes.splitter_base": [[64, "module-torch.fx.passes.splitter_base"]], "torch.fx.passes.tests": [[64, "module-torch.fx.passes.tests"]], "torch.fx.passes.tests.test_pass_manager": [[64, "module-torch.fx.passes.tests.test_pass_manager"]], "torch.fx.passes.tools_common": [[64, "module-torch.fx.passes.tools_common"]], "torch.fx.passes.utils": [[64, "module-torch.fx.passes.utils"]], "torch.fx.passes.utils.common": [[64, "module-torch.fx.passes.utils.common"]], "torch.fx.passes.utils.fuser_utils": [[64, "module-torch.fx.passes.utils.fuser_utils"]], "torch.fx.passes.utils.matcher_utils": [[64, "module-torch.fx.passes.utils.matcher_utils"]], "torch.fx.passes.utils.matcher_with_name_node_map_utils": [[64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"]], "torch.fx.passes.utils.source_matcher_utils": [[64, "module-torch.fx.passes.utils.source_matcher_utils"]], "torch.fx.proxy": [[64, "module-torch.fx.proxy"]], "torch.fx.subgraph_rewriter": [[64, "module-torch.fx.subgraph_rewriter"]], "torch.fx.tensor_type": [[64, "module-torch.fx.tensor_type"]], "torch.fx.traceback": [[64, "module-torch.fx.traceback"]], "trace() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.trace"]], "transform() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.transform"]], "update_arg() (torch.fx.node method)": [[64, "torch.fx.Node.update_arg"]], "update_kwarg() (torch.fx.node method)": [[64, "torch.fx.Node.update_kwarg"]], "wrap() (in module torch.fx)": [[64, "torch.fx.wrap"]], "torch.fx.experimental.symbolic_shapes": [[65, "module-torch.fx.experimental.symbolic_shapes"]], "generator (class in torch)": [[90, "torch.Generator"]], "clone_state() (torch.generator method)": [[90, "torch.Generator.clone_state"]], "device (torch.generator attribute)": [[90, "torch.Generator.device"]], "get_state() (torch.generator method)": [[90, "torch.Generator.get_state"]], "graphsafe_get_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_get_state"]], "graphsafe_set_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_set_state"]], "initial_seed() (torch.generator method)": [[90, "torch.Generator.initial_seed"]], "manual_seed() (torch.generator method)": [[90, "torch.Generator.manual_seed"]], "seed() (torch.generator method)": [[90, "torch.Generator.seed"]], "set_state() (torch.generator method)": [[90, "torch.Generator.set_state"]], "abs() (torch.tensor method)": [[91, "torch.Tensor.abs"]], "abs_() (torch.tensor method)": [[92, "torch.Tensor.abs_"]], "absolute() (torch.tensor method)": [[93, "torch.Tensor.absolute"]], "absolute_() (torch.tensor method)": [[94, "torch.Tensor.absolute_"]], "acos() (torch.tensor method)": [[95, "torch.Tensor.acos"]], "acos_() (torch.tensor method)": [[96, "torch.Tensor.acos_"]], "acosh() (torch.tensor method)": [[97, "torch.Tensor.acosh"]], "acosh_() (torch.tensor method)": [[98, "torch.Tensor.acosh_"]], "add() (torch.tensor method)": [[99, "torch.Tensor.add"]], "add_() (torch.tensor method)": [[100, "torch.Tensor.add_"]], "addbmm() (torch.tensor method)": [[101, "torch.Tensor.addbmm"]], "addbmm_() (torch.tensor method)": [[102, "torch.Tensor.addbmm_"]], "addcdiv() (torch.tensor method)": [[103, "torch.Tensor.addcdiv"]], "addcdiv_() (torch.tensor method)": [[104, "torch.Tensor.addcdiv_"]], "addcmul() (torch.tensor method)": [[105, "torch.Tensor.addcmul"]], "addcmul_() (torch.tensor method)": [[106, "torch.Tensor.addcmul_"]], "addmm() (torch.tensor method)": [[107, "torch.Tensor.addmm"]], "addmm_() (torch.tensor method)": [[108, "torch.Tensor.addmm_"]], "addmv() (torch.tensor method)": [[109, "torch.Tensor.addmv"]], "addmv_() (torch.tensor method)": [[110, "torch.Tensor.addmv_"]], "addr() (torch.tensor method)": [[111, "torch.Tensor.addr"]], "addr_() (torch.tensor method)": [[112, "torch.Tensor.addr_"]], "adjoint() (torch.tensor method)": [[113, "torch.Tensor.adjoint"]], "all() (torch.tensor method)": [[114, "torch.Tensor.all"]], "allclose() (torch.tensor method)": [[115, "torch.Tensor.allclose"]], "amax() (torch.tensor method)": [[116, "torch.Tensor.amax"]], "amin() (torch.tensor method)": [[117, "torch.Tensor.amin"]], "aminmax() (torch.tensor method)": [[118, "torch.Tensor.aminmax"]], "angle() (torch.tensor method)": [[119, "torch.Tensor.angle"]], "any() (torch.tensor method)": [[120, "torch.Tensor.any"]], "apply_() (torch.tensor method)": [[121, "torch.Tensor.apply_"]], "arccos() (torch.tensor method)": [[122, "torch.Tensor.arccos"]], "arccos_() (torch.tensor method)": [[123, "torch.Tensor.arccos_"]], "arccosh() (torch.tensor method)": [[124, "torch.Tensor.arccosh"]], "arccosh_() (torch.tensor method)": [[125, "torch.Tensor.arccosh_"]], "arcsin() (torch.tensor method)": [[126, "torch.Tensor.arcsin"]], "arcsin_() (torch.tensor method)": [[127, "torch.Tensor.arcsin_"]], "arcsinh() (torch.tensor method)": [[128, "torch.Tensor.arcsinh"]], "arcsinh_() (torch.tensor method)": [[129, "torch.Tensor.arcsinh_"]], "arctan() (torch.tensor method)": [[130, "torch.Tensor.arctan"]], "arctan2() (torch.tensor method)": [[131, "torch.Tensor.arctan2"]], "arctan2_() (torch.tensor method)": [[132, "torch.Tensor.arctan2_"]], "arctan_() (torch.tensor method)": [[133, "torch.Tensor.arctan_"]], "arctanh() (torch.tensor method)": [[134, "torch.Tensor.arctanh"]], "arctanh_() (torch.tensor method)": [[135, "torch.Tensor.arctanh_"]], "argmax() (torch.tensor method)": [[136, "torch.Tensor.argmax"]], "argmin() (torch.tensor method)": [[137, "torch.Tensor.argmin"]], "argsort() (torch.tensor method)": [[138, "torch.Tensor.argsort"]], "argwhere() (torch.tensor method)": [[139, "torch.Tensor.argwhere"]], "as_strided() (torch.tensor method)": [[140, "torch.Tensor.as_strided"]], "as_subclass() (torch.tensor method)": [[141, "torch.Tensor.as_subclass"]], "asin() (torch.tensor method)": [[142, "torch.Tensor.asin"]], "asin_() (torch.tensor method)": [[143, "torch.Tensor.asin_"]], "asinh() (torch.tensor method)": [[144, "torch.Tensor.asinh"]], "asinh_() (torch.tensor method)": [[145, "torch.Tensor.asinh_"]], "atan() (torch.tensor method)": [[146, "torch.Tensor.atan"]], "atan2() (torch.tensor method)": [[147, "torch.Tensor.atan2"]], "atan2_() (torch.tensor method)": [[148, "torch.Tensor.atan2_"]], "atan_() (torch.tensor method)": [[149, "torch.Tensor.atan_"]], "atanh() (torch.tensor method)": [[150, "torch.Tensor.atanh"]], "atanh_() (torch.tensor method)": [[151, "torch.Tensor.atanh_"]], "backward() (torch.tensor method)": [[152, "torch.Tensor.backward"]], "baddbmm() (torch.tensor method)": [[153, "torch.Tensor.baddbmm"]], "baddbmm_() (torch.tensor method)": [[154, "torch.Tensor.baddbmm_"]], "bernoulli() (torch.tensor method)": [[155, "torch.Tensor.bernoulli"]], "bernoulli_() (torch.tensor method)": [[156, "torch.Tensor.bernoulli_"]], "bfloat16() (torch.tensor method)": [[157, "torch.Tensor.bfloat16"]], "bincount() (torch.tensor method)": [[158, "torch.Tensor.bincount"]], "bitwise_and() (torch.tensor method)": [[159, "torch.Tensor.bitwise_and"]], "bitwise_and_() (torch.tensor method)": [[160, "torch.Tensor.bitwise_and_"]], "bitwise_left_shift() (torch.tensor method)": [[161, "torch.Tensor.bitwise_left_shift"]], "bitwise_left_shift_() (torch.tensor method)": [[162, "torch.Tensor.bitwise_left_shift_"]], "bitwise_not() (torch.tensor method)": [[163, "torch.Tensor.bitwise_not"]], "bitwise_not_() (torch.tensor method)": [[164, "torch.Tensor.bitwise_not_"]], "bitwise_or() (torch.tensor method)": [[165, "torch.Tensor.bitwise_or"]], "bitwise_or_() (torch.tensor method)": [[166, "torch.Tensor.bitwise_or_"]], "bitwise_right_shift() (torch.tensor method)": [[167, "torch.Tensor.bitwise_right_shift"]], "bitwise_right_shift_() (torch.tensor method)": [[168, "torch.Tensor.bitwise_right_shift_"]], "bitwise_xor() (torch.tensor method)": [[169, "torch.Tensor.bitwise_xor"]], "bitwise_xor_() (torch.tensor method)": [[170, "torch.Tensor.bitwise_xor_"]], "bmm() (torch.tensor method)": [[171, "torch.Tensor.bmm"]], "bool() (torch.tensor method)": [[172, "torch.Tensor.bool"]], "broadcast_to() (torch.tensor method)": [[173, "torch.Tensor.broadcast_to"]], "byte() (torch.tensor method)": [[174, "torch.Tensor.byte"]], "cauchy_() (torch.tensor method)": [[175, "torch.Tensor.cauchy_"]], "ccol_indices() (torch.tensor method)": [[176, "torch.Tensor.ccol_indices"]], "cdouble() (torch.tensor method)": [[177, "torch.Tensor.cdouble"]], "ceil() (torch.tensor method)": [[178, "torch.Tensor.ceil"]], "ceil_() (torch.tensor method)": [[179, "torch.Tensor.ceil_"]], "cfloat() (torch.tensor method)": [[180, "torch.Tensor.cfloat"]], "chalf() (torch.tensor method)": [[181, "torch.Tensor.chalf"]], "char() (torch.tensor method)": [[182, "torch.Tensor.char"]], "cholesky() (torch.tensor method)": [[183, "torch.Tensor.cholesky"]], "cholesky_inverse() (torch.tensor method)": [[184, "torch.Tensor.cholesky_inverse"]], "cholesky_solve() (torch.tensor method)": [[185, "torch.Tensor.cholesky_solve"]], "chunk() (torch.tensor method)": [[186, "torch.Tensor.chunk"]], "clamp() (torch.tensor method)": [[187, "torch.Tensor.clamp"]], "clamp_() (torch.tensor method)": [[188, "torch.Tensor.clamp_"]], "clip() (torch.tensor method)": [[189, "torch.Tensor.clip"]], "clip_() (torch.tensor method)": [[190, "torch.Tensor.clip_"]], "clone() (torch.tensor method)": [[191, "torch.Tensor.clone"]], "coalesce() (torch.tensor method)": [[192, "torch.Tensor.coalesce"]], "col_indices() (torch.tensor method)": [[193, "torch.Tensor.col_indices"]], "conj() (torch.tensor method)": [[194, "torch.Tensor.conj"]], "conj_physical() (torch.tensor method)": [[195, "torch.Tensor.conj_physical"]], "conj_physical_() (torch.tensor method)": [[196, "torch.Tensor.conj_physical_"]], "contiguous() (torch.tensor method)": [[197, "torch.Tensor.contiguous"]], "copy_() (torch.tensor method)": [[198, "torch.Tensor.copy_"]], "copysign() (torch.tensor method)": [[199, "torch.Tensor.copysign"]], "copysign_() (torch.tensor method)": [[200, "torch.Tensor.copysign_"]], "corrcoef() (torch.tensor method)": [[201, "torch.Tensor.corrcoef"]], "cos() (torch.tensor method)": [[202, "torch.Tensor.cos"]], "cos_() (torch.tensor method)": [[203, "torch.Tensor.cos_"]], "cosh() (torch.tensor method)": [[204, "torch.Tensor.cosh"]], "cosh_() (torch.tensor method)": [[205, "torch.Tensor.cosh_"]], "count_nonzero() (torch.tensor method)": [[206, "torch.Tensor.count_nonzero"]], "cov() (torch.tensor method)": [[207, "torch.Tensor.cov"]], "cpu() (torch.tensor method)": [[208, "torch.Tensor.cpu"]], "cross() (torch.tensor method)": [[209, "torch.Tensor.cross"]], "crow_indices() (torch.tensor method)": [[210, "torch.Tensor.crow_indices"]], "cuda() (torch.tensor method)": [[211, "torch.Tensor.cuda"]], "cummax() (torch.tensor method)": [[212, "torch.Tensor.cummax"]], "cummin() (torch.tensor method)": [[213, "torch.Tensor.cummin"]], "cumprod() (torch.tensor method)": [[214, "torch.Tensor.cumprod"]], "cumprod_() (torch.tensor method)": [[215, "torch.Tensor.cumprod_"]], "cumsum() (torch.tensor method)": [[216, "torch.Tensor.cumsum"]], "cumsum_() (torch.tensor method)": [[217, "torch.Tensor.cumsum_"]], "data_ptr() (torch.tensor method)": [[218, "torch.Tensor.data_ptr"]], "deg2rad() (torch.tensor method)": [[219, "torch.Tensor.deg2rad"]], "dense_dim() (torch.tensor method)": [[220, "torch.Tensor.dense_dim"]], "dequantize() (torch.tensor method)": [[221, "torch.Tensor.dequantize"]], "det() (torch.tensor method)": [[222, "torch.Tensor.det"]], "detach() (torch.tensor method)": [[223, "torch.Tensor.detach"]], "detach_() (torch.tensor method)": [[224, "torch.Tensor.detach_"]], "device (torch.tensor attribute)": [[225, "torch.Tensor.device"]], "diag() (torch.tensor method)": [[226, "torch.Tensor.diag"]], "diag_embed() (torch.tensor method)": [[227, "torch.Tensor.diag_embed"]], "diagflat() (torch.tensor method)": [[228, "torch.Tensor.diagflat"]], "diagonal() (torch.tensor method)": [[229, "torch.Tensor.diagonal"]], "diagonal_scatter() (torch.tensor method)": [[230, "torch.Tensor.diagonal_scatter"]], "diff() (torch.tensor method)": [[231, "torch.Tensor.diff"]], "digamma() (torch.tensor method)": [[232, "torch.Tensor.digamma"]], "digamma_() (torch.tensor method)": [[233, "torch.Tensor.digamma_"]], "dim() (torch.tensor method)": [[234, "torch.Tensor.dim"]], "dim_order() (torch.tensor method)": [[235, "torch.Tensor.dim_order"]], "dist() (torch.tensor method)": [[236, "torch.Tensor.dist"]], "div() (torch.tensor method)": [[237, "torch.Tensor.div"]], "div_() (torch.tensor method)": [[238, "torch.Tensor.div_"]], "divide() (torch.tensor method)": [[239, "torch.Tensor.divide"]], "divide_() (torch.tensor method)": [[240, "torch.Tensor.divide_"]], "dot() (torch.tensor method)": [[241, "torch.Tensor.dot"]], "double() (torch.tensor method)": [[242, "torch.Tensor.double"]], "dsplit() (torch.tensor method)": [[243, "torch.Tensor.dsplit"]], "element_size() (torch.tensor method)": [[244, "torch.Tensor.element_size"]], "eq() (torch.tensor method)": [[245, "torch.Tensor.eq"]], "eq_() (torch.tensor method)": [[246, "torch.Tensor.eq_"]], "equal() (torch.tensor method)": [[247, "torch.Tensor.equal"]], "erf() (torch.tensor method)": [[248, "torch.Tensor.erf"]], "erf_() (torch.tensor method)": [[249, "torch.Tensor.erf_"]], "erfc() (torch.tensor method)": [[250, "torch.Tensor.erfc"]], "erfc_() (torch.tensor method)": [[251, "torch.Tensor.erfc_"]], "erfinv() (torch.tensor method)": [[252, "torch.Tensor.erfinv"]], "erfinv_() (torch.tensor method)": [[253, "torch.Tensor.erfinv_"]], "exp() (torch.tensor method)": [[254, "torch.Tensor.exp"]], "exp_() (torch.tensor method)": [[255, "torch.Tensor.exp_"]], "expand() (torch.tensor method)": [[256, "torch.Tensor.expand"]], "expand_as() (torch.tensor method)": [[257, "torch.Tensor.expand_as"]], "expm1() (torch.tensor method)": [[258, "torch.Tensor.expm1"]], "expm1_() (torch.tensor method)": [[259, "torch.Tensor.expm1_"]], "exponential_() (torch.tensor method)": [[260, "torch.Tensor.exponential_"]], "fill_() (torch.tensor method)": [[261, "torch.Tensor.fill_"]], "fill_diagonal_() (torch.tensor method)": [[262, "torch.Tensor.fill_diagonal_"]], "fix() (torch.tensor method)": [[263, "torch.Tensor.fix"]], "fix_() (torch.tensor method)": [[264, "torch.Tensor.fix_"]], "flatten() (torch.tensor method)": [[265, "torch.Tensor.flatten"]], "flip() (torch.tensor method)": [[266, "torch.Tensor.flip"]], "fliplr() (torch.tensor method)": [[267, "torch.Tensor.fliplr"]], "flipud() (torch.tensor method)": [[268, "torch.Tensor.flipud"]], "float() (torch.tensor method)": [[269, "torch.Tensor.float"]], "float_power() (torch.tensor method)": [[270, "torch.Tensor.float_power"]], "float_power_() (torch.tensor method)": [[271, "torch.Tensor.float_power_"]], "floor() (torch.tensor method)": [[272, "torch.Tensor.floor"]], "floor_() (torch.tensor method)": [[273, "torch.Tensor.floor_"]], "floor_divide() (torch.tensor method)": [[274, "torch.Tensor.floor_divide"]], "floor_divide_() (torch.tensor method)": [[275, "torch.Tensor.floor_divide_"]], "fmax() (torch.tensor method)": [[276, "torch.Tensor.fmax"]], "fmin() (torch.tensor method)": [[277, "torch.Tensor.fmin"]], "fmod() (torch.tensor method)": [[278, "torch.Tensor.fmod"]], "fmod_() (torch.tensor method)": [[279, "torch.Tensor.fmod_"]], "frac() (torch.tensor method)": [[280, "torch.Tensor.frac"]], "frac_() (torch.tensor method)": [[281, "torch.Tensor.frac_"]], "frexp() (torch.tensor method)": [[282, "torch.Tensor.frexp"]], "gather() (torch.tensor method)": [[283, "torch.Tensor.gather"]], "gcd() (torch.tensor method)": [[284, "torch.Tensor.gcd"]], "gcd_() (torch.tensor method)": [[285, "torch.Tensor.gcd_"]], "ge() (torch.tensor method)": [[286, "torch.Tensor.ge"]], "ge_() (torch.tensor method)": [[287, "torch.Tensor.ge_"]], "geometric_() (torch.tensor method)": [[288, "torch.Tensor.geometric_"]], "geqrf() (torch.tensor method)": [[289, "torch.Tensor.geqrf"]], "ger() (torch.tensor method)": [[290, "torch.Tensor.ger"]], "get_device() (torch.tensor method)": [[291, "torch.Tensor.get_device"]], "grad (torch.tensor attribute)": [[292, "torch.Tensor.grad"]], "greater() (torch.tensor method)": [[293, "torch.Tensor.greater"]], "greater_() (torch.tensor method)": [[294, "torch.Tensor.greater_"]], "greater_equal() (torch.tensor method)": [[295, "torch.Tensor.greater_equal"]], "greater_equal_() (torch.tensor method)": [[296, "torch.Tensor.greater_equal_"]], "gt() (torch.tensor method)": [[297, "torch.Tensor.gt"]], "gt_() (torch.tensor method)": [[298, "torch.Tensor.gt_"]], "half() (torch.tensor method)": [[299, "torch.Tensor.half"]], "hardshrink() (torch.tensor method)": [[300, "torch.Tensor.hardshrink"]], "heaviside() (torch.tensor method)": [[301, "torch.Tensor.heaviside"]], "histc() (torch.tensor method)": [[302, "torch.Tensor.histc"]], "histogram() (torch.tensor method)": [[303, "torch.Tensor.histogram"]], "hsplit() (torch.tensor method)": [[304, "torch.Tensor.hsplit"]], "hypot() (torch.tensor method)": [[305, "torch.Tensor.hypot"]], "hypot_() (torch.tensor method)": [[306, "torch.Tensor.hypot_"]], "i0() (torch.tensor method)": [[307, "torch.Tensor.i0"]], "i0_() (torch.tensor method)": [[308, "torch.Tensor.i0_"]], "igamma() (torch.tensor method)": [[309, "torch.Tensor.igamma"]], "igamma_() (torch.tensor method)": [[310, "torch.Tensor.igamma_"]], "igammac() (torch.tensor method)": [[311, "torch.Tensor.igammac"]], "igammac_() (torch.tensor method)": [[312, "torch.Tensor.igammac_"]], "imag (torch.tensor attribute)": [[313, "torch.Tensor.imag"]], "index_add() (torch.tensor method)": [[314, "torch.Tensor.index_add"]], "index_add_() (torch.tensor method)": [[315, "torch.Tensor.index_add_"]], "index_copy() (torch.tensor method)": [[316, "torch.Tensor.index_copy"]], "index_copy_() (torch.tensor method)": [[317, "torch.Tensor.index_copy_"]], "index_fill() (torch.tensor method)": [[318, "torch.Tensor.index_fill"]], "index_fill_() (torch.tensor method)": [[319, "torch.Tensor.index_fill_"]], "index_put() (torch.tensor method)": [[320, "torch.Tensor.index_put"]], "index_put_() (torch.tensor method)": [[321, "torch.Tensor.index_put_"]], "index_reduce() (torch.tensor method)": [[322, "torch.Tensor.index_reduce"]], "index_reduce_() (torch.tensor method)": [[323, "torch.Tensor.index_reduce_"]], "index_select() (torch.tensor method)": [[324, "torch.Tensor.index_select"]], "indices() (torch.tensor method)": [[325, "torch.Tensor.indices"]], "inner() (torch.tensor method)": [[326, "torch.Tensor.inner"]], "int() (torch.tensor method)": [[327, "torch.Tensor.int"]], "int_repr() (torch.tensor method)": [[328, "torch.Tensor.int_repr"]], "inverse() (torch.tensor method)": [[329, "torch.Tensor.inverse"]], "is_coalesced() (torch.tensor method)": [[330, "torch.Tensor.is_coalesced"]], "is_complex() (torch.tensor method)": [[331, "torch.Tensor.is_complex"]], "is_conj() (torch.tensor method)": [[332, "torch.Tensor.is_conj"]], "is_contiguous() (torch.tensor method)": [[333, "torch.Tensor.is_contiguous"]], "is_cuda (torch.tensor attribute)": [[334, "torch.Tensor.is_cuda"]], "is_floating_point() (torch.tensor method)": [[335, "torch.Tensor.is_floating_point"]], "is_inference() (torch.tensor method)": [[336, "torch.Tensor.is_inference"]], "is_leaf (torch.tensor attribute)": [[337, "torch.Tensor.is_leaf"]], "is_meta (torch.tensor attribute)": [[338, "torch.Tensor.is_meta"]], "is_pinned() (torch.tensor method)": [[339, "torch.Tensor.is_pinned"]], "is_quantized (torch.tensor attribute)": [[340, "torch.Tensor.is_quantized"]], "is_set_to() (torch.tensor method)": [[341, "torch.Tensor.is_set_to"]], "is_shared() (torch.tensor method)": [[342, "torch.Tensor.is_shared"]], "is_signed() (torch.tensor method)": [[343, "torch.Tensor.is_signed"]], "is_sparse (torch.tensor attribute)": [[344, "torch.Tensor.is_sparse"]], "is_sparse_csr (torch.tensor attribute)": [[345, "torch.Tensor.is_sparse_csr"]], "isclose() (torch.tensor method)": [[346, "torch.Tensor.isclose"]], "isfinite() (torch.tensor method)": [[347, "torch.Tensor.isfinite"]], "isinf() (torch.tensor method)": [[348, "torch.Tensor.isinf"]], "isnan() (torch.tensor method)": [[349, "torch.Tensor.isnan"]], "isneginf() (torch.tensor method)": [[350, "torch.Tensor.isneginf"]], "isposinf() (torch.tensor method)": [[351, "torch.Tensor.isposinf"]], "isreal() (torch.tensor method)": [[352, "torch.Tensor.isreal"]], "istft() (torch.tensor method)": [[353, "torch.Tensor.istft"]], "item() (torch.tensor method)": [[354, "torch.Tensor.item"]], "itemsize (torch.tensor attribute)": [[355, "torch.Tensor.itemsize"]], "kthvalue() (torch.tensor method)": [[356, "torch.Tensor.kthvalue"]], "lcm() (torch.tensor method)": [[357, "torch.Tensor.lcm"]], "lcm_() (torch.tensor method)": [[358, "torch.Tensor.lcm_"]], "ldexp() (torch.tensor method)": [[359, "torch.Tensor.ldexp"]], "ldexp_() (torch.tensor method)": [[360, "torch.Tensor.ldexp_"]], "le() (torch.tensor method)": [[361, "torch.Tensor.le"]], "le_() (torch.tensor method)": [[362, "torch.Tensor.le_"]], "lerp() (torch.tensor method)": [[363, "torch.Tensor.lerp"]], "lerp_() (torch.tensor method)": [[364, "torch.Tensor.lerp_"]], "less() (torch.tensor method)": [[365, "torch.Tensor.less"]], "less_() (torch.tensor method)": [[366, "torch.Tensor.less_"]], "less_equal() (torch.tensor method)": [[367, "torch.Tensor.less_equal"]], "less_equal_() (torch.tensor method)": [[368, "torch.Tensor.less_equal_"]], "lgamma() (torch.tensor method)": [[369, "torch.Tensor.lgamma"]], "lgamma_() (torch.tensor method)": [[370, "torch.Tensor.lgamma_"]], "log() (torch.tensor method)": [[371, "torch.Tensor.log"]], "log10() (torch.tensor method)": [[372, "torch.Tensor.log10"]], "log10_() (torch.tensor method)": [[373, "torch.Tensor.log10_"]], "log1p() (torch.tensor method)": [[374, "torch.Tensor.log1p"]], "log1p_() (torch.tensor method)": [[375, "torch.Tensor.log1p_"]], "log2() (torch.tensor method)": [[376, "torch.Tensor.log2"]], "log2_() (torch.tensor method)": [[377, "torch.Tensor.log2_"]], "log_() (torch.tensor method)": [[378, "torch.Tensor.log_"]], "log_normal_() (torch.tensor method)": [[379, "torch.Tensor.log_normal_"]], "logaddexp() (torch.tensor method)": [[380, "torch.Tensor.logaddexp"]], "logaddexp2() (torch.tensor method)": [[381, "torch.Tensor.logaddexp2"]], "logcumsumexp() (torch.tensor method)": [[382, "torch.Tensor.logcumsumexp"]], "logdet() (torch.tensor method)": [[383, "torch.Tensor.logdet"]], "logical_and() (torch.tensor method)": [[384, "torch.Tensor.logical_and"]], "logical_and_() (torch.tensor method)": [[385, "torch.Tensor.logical_and_"]], "logical_not() (torch.tensor method)": [[386, "torch.Tensor.logical_not"]], "logical_not_() (torch.tensor method)": [[387, "torch.Tensor.logical_not_"]], "logical_or() (torch.tensor method)": [[388, "torch.Tensor.logical_or"]], "logical_or_() (torch.tensor method)": [[389, "torch.Tensor.logical_or_"]], "logical_xor() (torch.tensor method)": [[390, "torch.Tensor.logical_xor"]], "logical_xor_() (torch.tensor method)": [[391, "torch.Tensor.logical_xor_"]], "logit() (torch.tensor method)": [[392, "torch.Tensor.logit"]], "logit_() (torch.tensor method)": [[393, "torch.Tensor.logit_"]], "logsumexp() (torch.tensor method)": [[394, "torch.Tensor.logsumexp"]], "long() (torch.tensor method)": [[395, "torch.Tensor.long"]], "lt() (torch.tensor method)": [[396, "torch.Tensor.lt"]], "lt_() (torch.tensor method)": [[397, "torch.Tensor.lt_"]], "lu() (torch.tensor method)": [[398, "torch.Tensor.lu"]], "lu_solve() (torch.tensor method)": [[399, "torch.Tensor.lu_solve"]], "map_() (torch.tensor method)": [[400, "torch.Tensor.map_"]], "masked_fill() (torch.tensor method)": [[401, "torch.Tensor.masked_fill"]], "masked_fill_() (torch.tensor method)": [[402, "torch.Tensor.masked_fill_"]], "masked_scatter() (torch.tensor method)": [[403, "torch.Tensor.masked_scatter"]], "masked_scatter_() (torch.tensor method)": [[404, "torch.Tensor.masked_scatter_"]], "masked_select() (torch.tensor method)": [[405, "torch.Tensor.masked_select"]], "matmul() (torch.tensor method)": [[406, "torch.Tensor.matmul"]], "matrix_exp() (torch.tensor method)": [[407, "torch.Tensor.matrix_exp"]], "matrix_power() (torch.tensor method)": [[408, "torch.Tensor.matrix_power"]], "max() (torch.tensor method)": [[409, "torch.Tensor.max"]], "maximum() (torch.tensor method)": [[410, "torch.Tensor.maximum"]], "mean() (torch.tensor method)": [[411, "torch.Tensor.mean"]], "median() (torch.tensor method)": [[412, "torch.Tensor.median"]], "min() (torch.tensor method)": [[413, "torch.Tensor.min"]], "minimum() (torch.tensor method)": [[414, "torch.Tensor.minimum"]], "mm() (torch.tensor method)": [[415, "torch.Tensor.mm"]], "mode() (torch.tensor method)": [[416, "torch.Tensor.mode"]], "module_load() (torch.tensor method)": [[417, "torch.Tensor.module_load"]], "moveaxis() (torch.tensor method)": [[418, "torch.Tensor.moveaxis"]], "movedim() (torch.tensor method)": [[419, "torch.Tensor.movedim"]], "msort() (torch.tensor method)": [[420, "torch.Tensor.msort"]], "mul() (torch.tensor method)": [[421, "torch.Tensor.mul"]], "mul_() (torch.tensor method)": [[422, "torch.Tensor.mul_"]], "multinomial() (torch.tensor method)": [[423, "torch.Tensor.multinomial"]], "multiply() (torch.tensor method)": [[424, "torch.Tensor.multiply"]], "multiply_() (torch.tensor method)": [[425, "torch.Tensor.multiply_"]], "mv() (torch.tensor method)": [[426, "torch.Tensor.mv"]], "mvlgamma() (torch.tensor method)": [[427, "torch.Tensor.mvlgamma"]], "mvlgamma_() (torch.tensor method)": [[428, "torch.Tensor.mvlgamma_"]], "nan_to_num() (torch.tensor method)": [[429, "torch.Tensor.nan_to_num"]], "nan_to_num_() (torch.tensor method)": [[430, "torch.Tensor.nan_to_num_"]], "nanmean() (torch.tensor method)": [[431, "torch.Tensor.nanmean"]], "nanmedian() (torch.tensor method)": [[432, "torch.Tensor.nanmedian"]], "nanquantile() (torch.tensor method)": [[433, "torch.Tensor.nanquantile"]], "nansum() (torch.tensor method)": [[434, "torch.Tensor.nansum"]], "narrow() (torch.tensor method)": [[435, "torch.Tensor.narrow"]], "narrow_copy() (torch.tensor method)": [[436, "torch.Tensor.narrow_copy"]], "nbytes (torch.tensor attribute)": [[437, "torch.Tensor.nbytes"]], "ndim (torch.tensor attribute)": [[438, "torch.Tensor.ndim"]], "ndimension() (torch.tensor method)": [[439, "torch.Tensor.ndimension"]], "ne() (torch.tensor method)": [[440, "torch.Tensor.ne"]], "ne_() (torch.tensor method)": [[441, "torch.Tensor.ne_"]], "neg() (torch.tensor method)": [[442, "torch.Tensor.neg"]], "neg_() (torch.tensor method)": [[443, "torch.Tensor.neg_"]], "negative() (torch.tensor method)": [[444, "torch.Tensor.negative"]], "negative_() (torch.tensor method)": [[445, "torch.Tensor.negative_"]], "nelement() (torch.tensor method)": [[446, "torch.Tensor.nelement"]], "new_empty() (torch.tensor method)": [[447, "torch.Tensor.new_empty"]], "new_full() (torch.tensor method)": [[448, "torch.Tensor.new_full"]], "new_ones() (torch.tensor method)": [[449, "torch.Tensor.new_ones"]], "new_tensor() (torch.tensor method)": [[450, "torch.Tensor.new_tensor"]], "new_zeros() (torch.tensor method)": [[451, "torch.Tensor.new_zeros"]], "nextafter() (torch.tensor method)": [[452, "torch.Tensor.nextafter"]], "nextafter_() (torch.tensor method)": [[453, "torch.Tensor.nextafter_"]], "nonzero() (torch.tensor method)": [[454, "torch.Tensor.nonzero"]], "norm() (torch.tensor method)": [[455, "torch.Tensor.norm"]], "normal_() (torch.tensor method)": [[456, "torch.Tensor.normal_"]], "not_equal() (torch.tensor method)": [[457, "torch.Tensor.not_equal"]], "not_equal_() (torch.tensor method)": [[458, "torch.Tensor.not_equal_"]], "numel() (torch.tensor method)": [[459, "torch.Tensor.numel"]], "numpy() (torch.tensor method)": [[460, "torch.Tensor.numpy"]], "orgqr() (torch.tensor method)": [[461, "torch.Tensor.orgqr"]], "ormqr() (torch.tensor method)": [[462, "torch.Tensor.ormqr"]], "outer() (torch.tensor method)": [[463, "torch.Tensor.outer"]], "permute() (torch.tensor method)": [[464, "torch.Tensor.permute"]], "pin_memory() (torch.tensor method)": [[465, "torch.Tensor.pin_memory"]], "pinverse() (torch.tensor method)": [[466, "torch.Tensor.pinverse"]], "polygamma() (torch.tensor method)": [[467, "torch.Tensor.polygamma"]], "polygamma_() (torch.tensor method)": [[468, "torch.Tensor.polygamma_"]], "positive() (torch.tensor method)": [[469, "torch.Tensor.positive"]], "pow() (torch.tensor method)": [[470, "torch.Tensor.pow"]], "pow_() (torch.tensor method)": [[471, "torch.Tensor.pow_"]], "prod() (torch.tensor method)": [[472, "torch.Tensor.prod"]], "put_() (torch.tensor method)": [[473, "torch.Tensor.put_"]], "q_per_channel_axis() (torch.tensor method)": [[474, "torch.Tensor.q_per_channel_axis"]], "q_per_channel_scales() (torch.tensor method)": [[475, "torch.Tensor.q_per_channel_scales"]], "q_per_channel_zero_points() (torch.tensor method)": [[476, "torch.Tensor.q_per_channel_zero_points"]], "q_scale() (torch.tensor method)": [[477, "torch.Tensor.q_scale"]], "q_zero_point() (torch.tensor method)": [[478, "torch.Tensor.q_zero_point"]], "qr() (torch.tensor method)": [[479, "torch.Tensor.qr"]], "qscheme() (torch.tensor method)": [[480, "torch.Tensor.qscheme"]], "quantile() (torch.tensor method)": [[481, "torch.Tensor.quantile"]], "rad2deg() (torch.tensor method)": [[482, "torch.Tensor.rad2deg"]], "random_() (torch.tensor method)": [[483, "torch.Tensor.random_"]], "ravel() (torch.tensor method)": [[484, "torch.Tensor.ravel"]], "real (torch.tensor attribute)": [[485, "torch.Tensor.real"]], "reciprocal() (torch.tensor method)": [[486, "torch.Tensor.reciprocal"]], "reciprocal_() (torch.tensor method)": [[487, "torch.Tensor.reciprocal_"]], "record_stream() (torch.tensor method)": [[488, "torch.Tensor.record_stream"]], "register_hook() (torch.tensor method)": [[489, "torch.Tensor.register_hook"]], "register_post_accumulate_grad_hook() (torch.tensor method)": [[490, "torch.Tensor.register_post_accumulate_grad_hook"]], "remainder() (torch.tensor method)": [[491, "torch.Tensor.remainder"]], "remainder_() (torch.tensor method)": [[492, "torch.Tensor.remainder_"]], "renorm() (torch.tensor method)": [[493, "torch.Tensor.renorm"]], "renorm_() (torch.tensor method)": [[494, "torch.Tensor.renorm_"]], "repeat() (torch.tensor method)": [[495, "torch.Tensor.repeat"]], "repeat_interleave() (torch.tensor method)": [[496, "torch.Tensor.repeat_interleave"]], "requires_grad (torch.tensor attribute)": [[497, "torch.Tensor.requires_grad"]], "requires_grad_() (torch.tensor method)": [[498, "torch.Tensor.requires_grad_"]], "reshape() (torch.tensor method)": [[499, "torch.Tensor.reshape"]], "reshape_as() (torch.tensor method)": [[500, "torch.Tensor.reshape_as"]], "resize_() (torch.tensor method)": [[501, "torch.Tensor.resize_"]], "resize_as_() (torch.tensor method)": [[502, "torch.Tensor.resize_as_"]], "resolve_conj() (torch.tensor method)": [[503, "torch.Tensor.resolve_conj"]], "resolve_neg() (torch.tensor method)": [[504, "torch.Tensor.resolve_neg"]], "retain_grad() (torch.tensor method)": [[505, "torch.Tensor.retain_grad"]], "retains_grad (torch.tensor attribute)": [[506, "torch.Tensor.retains_grad"]], "roll() (torch.tensor method)": [[507, "torch.Tensor.roll"]], "rot90() (torch.tensor method)": [[508, "torch.Tensor.rot90"]], "round() (torch.tensor method)": [[509, "torch.Tensor.round"]], "round_() (torch.tensor method)": [[510, "torch.Tensor.round_"]], "row_indices() (torch.tensor method)": [[511, "torch.Tensor.row_indices"]], "rsqrt() (torch.tensor method)": [[512, "torch.Tensor.rsqrt"]], "rsqrt_() (torch.tensor method)": [[513, "torch.Tensor.rsqrt_"]], "scatter() (torch.tensor method)": [[514, "torch.Tensor.scatter"]], "scatter_() (torch.tensor method)": [[515, "torch.Tensor.scatter_"]], "scatter_add() (torch.tensor method)": [[516, "torch.Tensor.scatter_add"]], "scatter_add_() (torch.tensor method)": [[517, "torch.Tensor.scatter_add_"]], "scatter_reduce() (torch.tensor method)": [[518, "torch.Tensor.scatter_reduce"]], "scatter_reduce_() (torch.tensor method)": [[519, "torch.Tensor.scatter_reduce_"]], "select() (torch.tensor method)": [[520, "torch.Tensor.select"]], "select_scatter() (torch.tensor method)": [[521, "torch.Tensor.select_scatter"]], "set_() (torch.tensor method)": [[522, "torch.Tensor.set_"]], "sgn() (torch.tensor method)": [[523, "torch.Tensor.sgn"]], "sgn_() (torch.tensor method)": [[524, "torch.Tensor.sgn_"]], "shape (torch.tensor attribute)": [[525, "torch.Tensor.shape"]], "share_memory_() (torch.tensor method)": [[526, "torch.Tensor.share_memory_"]], "short() (torch.tensor method)": [[527, "torch.Tensor.short"]], "sigmoid() (torch.tensor method)": [[528, "torch.Tensor.sigmoid"]], "sigmoid_() (torch.tensor method)": [[529, "torch.Tensor.sigmoid_"]], "sign() (torch.tensor method)": [[530, "torch.Tensor.sign"]], "sign_() (torch.tensor method)": [[531, "torch.Tensor.sign_"]], "signbit() (torch.tensor method)": [[532, "torch.Tensor.signbit"]], "sin() (torch.tensor method)": [[533, "torch.Tensor.sin"]], "sin_() (torch.tensor method)": [[534, "torch.Tensor.sin_"]], "sinc() (torch.tensor method)": [[535, "torch.Tensor.sinc"]], "sinc_() (torch.tensor method)": [[536, "torch.Tensor.sinc_"]], "sinh() (torch.tensor method)": [[537, "torch.Tensor.sinh"]], "sinh_() (torch.tensor method)": [[538, "torch.Tensor.sinh_"]], "size() (torch.tensor method)": [[539, "torch.Tensor.size"]], "slice_scatter() (torch.tensor method)": [[540, "torch.Tensor.slice_scatter"]], "slogdet() (torch.tensor method)": [[541, "torch.Tensor.slogdet"]], "smm() (torch.tensor method)": [[542, "torch.Tensor.smm"]], "softmax() (torch.tensor method)": [[543, "torch.Tensor.softmax"]], "sort() (torch.tensor method)": [[544, "torch.Tensor.sort"]], "sparse_dim() (torch.tensor method)": [[545, "torch.Tensor.sparse_dim"]], "sparse_mask() (torch.tensor method)": [[546, "torch.Tensor.sparse_mask"]], "sparse_resize_() (torch.tensor method)": [[547, "torch.Tensor.sparse_resize_"]], "sparse_resize_and_clear_() (torch.tensor method)": [[548, "torch.Tensor.sparse_resize_and_clear_"]], "split() (torch.tensor method)": [[549, "torch.Tensor.split"]], "sqrt() (torch.tensor method)": [[550, "torch.Tensor.sqrt"]], "sqrt_() (torch.tensor method)": [[551, "torch.Tensor.sqrt_"]], "square() (torch.tensor method)": [[552, "torch.Tensor.square"]], "square_() (torch.tensor method)": [[553, "torch.Tensor.square_"]], "squeeze() (torch.tensor method)": [[554, "torch.Tensor.squeeze"]], "squeeze_() (torch.tensor method)": [[555, "torch.Tensor.squeeze_"]], "sspaddmm() (torch.tensor method)": [[556, "torch.Tensor.sspaddmm"]], "std() (torch.tensor method)": [[557, "torch.Tensor.std"]], "stft() (torch.tensor method)": [[558, "torch.Tensor.stft"]], "storage() (torch.tensor method)": [[559, "torch.Tensor.storage"]], "storage_offset() (torch.tensor method)": [[560, "torch.Tensor.storage_offset"]], "storage_type() (torch.tensor method)": [[561, "torch.Tensor.storage_type"]], "stride() (torch.tensor method)": [[562, "torch.Tensor.stride"]], "sub() (torch.tensor method)": [[563, "torch.Tensor.sub"]], "sub_() (torch.tensor method)": [[564, "torch.Tensor.sub_"]], "subtract() (torch.tensor method)": [[565, "torch.Tensor.subtract"]], "subtract_() (torch.tensor method)": [[566, "torch.Tensor.subtract_"]], "sum() (torch.tensor method)": [[567, "torch.Tensor.sum"]], "sum_to_size() (torch.tensor method)": [[568, "torch.Tensor.sum_to_size"]], "svd() (torch.tensor method)": [[569, "torch.Tensor.svd"]], "swapaxes() (torch.tensor method)": [[570, "torch.Tensor.swapaxes"]], "swapdims() (torch.tensor method)": [[571, "torch.Tensor.swapdims"]], "t() (torch.tensor method)": [[572, "torch.Tensor.t"]], "t_() (torch.tensor method)": [[573, "torch.Tensor.t_"]], "take() (torch.tensor method)": [[574, "torch.Tensor.take"]], "take_along_dim() (torch.tensor method)": [[575, "torch.Tensor.take_along_dim"]], "tan() (torch.tensor method)": [[576, "torch.Tensor.tan"]], "tan_() (torch.tensor method)": [[577, "torch.Tensor.tan_"]], "tanh() (torch.tensor method)": [[578, "torch.Tensor.tanh"]], "tanh_() (torch.tensor method)": [[579, "torch.Tensor.tanh_"]], "tensor_split() (torch.tensor method)": [[580, "torch.Tensor.tensor_split"]], "tile() (torch.tensor method)": [[581, "torch.Tensor.tile"]], "to() (torch.tensor method)": [[582, "torch.Tensor.to"]], "to_dense() (torch.tensor method)": [[583, "torch.Tensor.to_dense"]], "to_mkldnn() (torch.tensor method)": [[584, "torch.Tensor.to_mkldnn"]], "to_sparse() (torch.tensor method)": [[585, "torch.Tensor.to_sparse"]], "to_sparse_bsc() (torch.tensor method)": [[586, "torch.Tensor.to_sparse_bsc"]], "to_sparse_bsr() (torch.tensor method)": [[587, "torch.Tensor.to_sparse_bsr"]], "to_sparse_coo() (torch.tensor method)": [[588, "torch.Tensor.to_sparse_coo"]], "to_sparse_csc() (torch.tensor method)": [[589, "torch.Tensor.to_sparse_csc"]], "to_sparse_csr() (torch.tensor method)": [[590, "torch.Tensor.to_sparse_csr"]], "tolist() (torch.tensor method)": [[591, "torch.Tensor.tolist"]], "topk() (torch.tensor method)": [[592, "torch.Tensor.topk"]], "trace() (torch.tensor method)": [[593, "torch.Tensor.trace"]], "transpose() (torch.tensor method)": [[594, "torch.Tensor.transpose"]], "transpose_() (torch.tensor method)": [[595, "torch.Tensor.transpose_"]], "triangular_solve() (torch.tensor method)": [[596, "torch.Tensor.triangular_solve"]], "tril() (torch.tensor method)": [[597, "torch.Tensor.tril"]], "tril_() (torch.tensor method)": [[598, "torch.Tensor.tril_"]], "triu() (torch.tensor method)": [[599, "torch.Tensor.triu"]], "triu_() (torch.tensor method)": [[600, "torch.Tensor.triu_"]], "true_divide() (torch.tensor method)": [[601, "torch.Tensor.true_divide"]], "true_divide_() (torch.tensor method)": [[602, "torch.Tensor.true_divide_"]], "trunc() (torch.tensor method)": [[603, "torch.Tensor.trunc"]], "trunc_() (torch.tensor method)": [[604, "torch.Tensor.trunc_"]], "type() (torch.tensor method)": [[605, "torch.Tensor.type"]], "type_as() (torch.tensor method)": [[606, "torch.Tensor.type_as"]], "unbind() (torch.tensor method)": [[607, "torch.Tensor.unbind"]], "unflatten() (torch.tensor method)": [[608, "torch.Tensor.unflatten"]], "unfold() (torch.tensor method)": [[609, "torch.Tensor.unfold"]], "uniform_() (torch.tensor method)": [[610, "torch.Tensor.uniform_"]], "unique() (torch.tensor method)": [[611, "torch.Tensor.unique"]], "unique_consecutive() (torch.tensor method)": [[612, "torch.Tensor.unique_consecutive"]], "unsqueeze() (torch.tensor method)": [[613, "torch.Tensor.unsqueeze"]], "unsqueeze_() (torch.tensor method)": [[614, "torch.Tensor.unsqueeze_"]], "untyped_storage() (torch.tensor method)": [[615, "torch.Tensor.untyped_storage"]], "values() (torch.tensor method)": [[616, "torch.Tensor.values"]], "var() (torch.tensor method)": [[617, "torch.Tensor.var"]], "vdot() (torch.tensor method)": [[618, "torch.Tensor.vdot"]], "view() (torch.tensor method)": [[619, "torch.Tensor.view"]], "view_as() (torch.tensor method)": [[620, "torch.Tensor.view_as"]], "vsplit() (torch.tensor method)": [[621, "torch.Tensor.vsplit"]], "where() (torch.tensor method)": [[622, "torch.Tensor.where"]], "xlogy() (torch.tensor method)": [[623, "torch.Tensor.xlogy"]], "xlogy_() (torch.tensor method)": [[624, "torch.Tensor.xlogy_"]], "zero_() (torch.tensor method)": [[625, "torch.Tensor.zero_"]], "_assert() (in module torch)": [[626, "torch._assert"]], "_foreach_abs() (in module torch)": [[627, "torch._foreach_abs"]], "_foreach_abs_() (in module torch)": [[628, "torch._foreach_abs_"]], "_foreach_acos() (in module torch)": [[629, "torch._foreach_acos"]], "_foreach_acos_() (in module torch)": [[630, "torch._foreach_acos_"]], "_foreach_asin() (in module torch)": [[631, "torch._foreach_asin"]], "_foreach_asin_() (in module torch)": [[632, "torch._foreach_asin_"]], "_foreach_atan() (in module torch)": [[633, "torch._foreach_atan"]], "_foreach_atan_() (in module torch)": [[634, "torch._foreach_atan_"]], "_foreach_ceil() (in module torch)": [[635, "torch._foreach_ceil"]], "_foreach_ceil_() (in module torch)": [[636, "torch._foreach_ceil_"]], "_foreach_cos() (in module torch)": [[637, "torch._foreach_cos"]], "_foreach_cos_() (in module torch)": [[638, "torch._foreach_cos_"]], "_foreach_cosh() (in module torch)": [[639, "torch._foreach_cosh"]], "_foreach_cosh_() (in module torch)": [[640, "torch._foreach_cosh_"]], "_foreach_erf() (in module torch)": [[641, "torch._foreach_erf"]], "_foreach_erf_() (in module torch)": [[642, "torch._foreach_erf_"]], "_foreach_erfc() (in module torch)": [[643, "torch._foreach_erfc"]], "_foreach_erfc_() (in module torch)": [[644, "torch._foreach_erfc_"]], "_foreach_exp() (in module torch)": [[645, "torch._foreach_exp"]], "_foreach_exp_() (in module torch)": [[646, "torch._foreach_exp_"]], "_foreach_expm1() (in module torch)": [[647, "torch._foreach_expm1"]], "_foreach_expm1_() (in module torch)": [[648, "torch._foreach_expm1_"]], "_foreach_floor() (in module torch)": [[649, "torch._foreach_floor"]], "_foreach_floor_() (in module torch)": [[650, "torch._foreach_floor_"]], "_foreach_frac() (in module torch)": [[651, "torch._foreach_frac"]], "_foreach_frac_() (in module torch)": [[652, "torch._foreach_frac_"]], "_foreach_lgamma() (in module torch)": [[653, "torch._foreach_lgamma"]], "_foreach_lgamma_() (in module torch)": [[654, "torch._foreach_lgamma_"]], "_foreach_log() (in module torch)": [[655, "torch._foreach_log"]], "_foreach_log10() (in module torch)": [[656, "torch._foreach_log10"]], "_foreach_log10_() (in module torch)": [[657, "torch._foreach_log10_"]], "_foreach_log1p() (in module torch)": [[658, "torch._foreach_log1p"]], "_foreach_log1p_() (in module torch)": [[659, "torch._foreach_log1p_"]], "_foreach_log2() (in module torch)": [[660, "torch._foreach_log2"]], "_foreach_log2_() (in module torch)": [[661, "torch._foreach_log2_"]], "_foreach_log_() (in module torch)": [[662, "torch._foreach_log_"]], "_foreach_neg() (in module torch)": [[663, "torch._foreach_neg"]], "_foreach_neg_() (in module torch)": [[664, "torch._foreach_neg_"]], "_foreach_reciprocal() (in module torch)": [[665, "torch._foreach_reciprocal"]], "_foreach_reciprocal_() (in module torch)": [[666, "torch._foreach_reciprocal_"]], "_foreach_round() (in module torch)": [[667, "torch._foreach_round"]], "_foreach_round_() (in module torch)": [[668, "torch._foreach_round_"]], "_foreach_sigmoid() (in module torch)": [[669, "torch._foreach_sigmoid"]], "_foreach_sigmoid_() (in module torch)": [[670, "torch._foreach_sigmoid_"]], "_foreach_sin() (in module torch)": [[671, "torch._foreach_sin"]], "_foreach_sin_() (in module torch)": [[672, "torch._foreach_sin_"]], "_foreach_sinh() (in module torch)": [[673, "torch._foreach_sinh"]], "_foreach_sinh_() (in module torch)": [[674, "torch._foreach_sinh_"]], "_foreach_sqrt() (in module torch)": [[675, "torch._foreach_sqrt"]], "_foreach_sqrt_() (in module torch)": [[676, "torch._foreach_sqrt_"]], "_foreach_tan() (in module torch)": [[677, "torch._foreach_tan"]], "_foreach_tan_() (in module torch)": [[678, "torch._foreach_tan_"]], "_foreach_trunc() (in module torch)": [[679, "torch._foreach_trunc"]], "_foreach_trunc_() (in module torch)": [[680, "torch._foreach_trunc_"]], "_foreach_zero_() (in module torch)": [[681, "torch._foreach_zero_"]], "set_logs() (in module torch._logging)": [[682, "torch._logging.set_logs"]], "abs() (in module torch)": [[683, "torch.abs"]], "absolute() (in module torch)": [[684, "torch.absolute"]], "acos() (in module torch)": [[685, "torch.acos"]], "acosh() (in module torch)": [[686, "torch.acosh"]], "add() (in module torch)": [[687, "torch.add"]], "addbmm() (in module torch)": [[688, "torch.addbmm"]], "addcdiv() (in module torch)": [[689, "torch.addcdiv"]], "addcmul() (in module torch)": [[690, "torch.addcmul"]], "addmm() (in module torch)": [[691, "torch.addmm"]], "addmv() (in module torch)": [[692, "torch.addmv"]], "addr() (in module torch)": [[693, "torch.addr"]], "adjoint() (in module torch)": [[694, "torch.adjoint"]], "all() (in module torch)": [[695, "torch.all"]], "allclose() (in module torch)": [[696, "torch.allclose"]], "amax() (in module torch)": [[697, "torch.amax"]], "amin() (in module torch)": [[698, "torch.amin"]], "aminmax() (in module torch)": [[699, "torch.aminmax"]], "angle() (in module torch)": [[700, "torch.angle"]], "any() (in module torch)": [[701, "torch.any"]], "bnrelu2d (class in torch.ao.nn.intrinsic)": [[702, "torch.ao.nn.intrinsic.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic)": [[703, "torch.ao.nn.intrinsic.BNReLU3d"]], "convbn1d (class in torch.ao.nn.intrinsic)": [[704, "torch.ao.nn.intrinsic.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic)": [[705, "torch.ao.nn.intrinsic.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic)": [[706, "torch.ao.nn.intrinsic.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic)": [[707, "torch.ao.nn.intrinsic.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic)": [[708, "torch.ao.nn.intrinsic.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic)": [[709, "torch.ao.nn.intrinsic.ConvBnReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic)": [[710, "torch.ao.nn.intrinsic.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic)": [[711, "torch.ao.nn.intrinsic.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic)": [[712, "torch.ao.nn.intrinsic.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic)": [[713, "torch.ao.nn.intrinsic.LinearReLU"]], "convbn1d (class in torch.ao.nn.intrinsic.qat)": [[714, "torch.ao.nn.intrinsic.qat.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic.qat)": [[715, "torch.ao.nn.intrinsic.qat.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic.qat)": [[716, "torch.ao.nn.intrinsic.qat.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic.qat)": [[717, "torch.ao.nn.intrinsic.qat.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic.qat)": [[718, "torch.ao.nn.intrinsic.qat.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic.qat)": [[719, "torch.ao.nn.intrinsic.qat.ConvBnReLU3d"]], "convrelu2d (class in torch.ao.nn.intrinsic.qat)": [[720, "torch.ao.nn.intrinsic.qat.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.qat)": [[721, "torch.ao.nn.intrinsic.qat.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.qat)": [[722, "torch.ao.nn.intrinsic.qat.LinearReLU"]], "freeze_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[723, "torch.ao.nn.intrinsic.qat.freeze_bn_stats"]], "update_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[724, "torch.ao.nn.intrinsic.qat.update_bn_stats"]], "bnrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[725, "torch.ao.nn.intrinsic.quantized.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[726, "torch.ao.nn.intrinsic.quantized.BNReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic.quantized)": [[727, "torch.ao.nn.intrinsic.quantized.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[728, "torch.ao.nn.intrinsic.quantized.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[729, "torch.ao.nn.intrinsic.quantized.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized)": [[730, "torch.ao.nn.intrinsic.quantized.LinearReLU"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized.dynamic)": [[731, "torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU"]], "conv2d (class in torch.ao.nn.qat)": [[732, "torch.ao.nn.qat.Conv2d"]], "conv3d (class in torch.ao.nn.qat)": [[733, "torch.ao.nn.qat.Conv3d"]], "linear (class in torch.ao.nn.qat)": [[734, "torch.ao.nn.qat.Linear"]], "from_float() (torch.ao.nn.qat.linear class method)": [[734, "torch.ao.nn.qat.Linear.from_float"]], "linear (class in torch.ao.nn.qat.dynamic)": [[735, "torch.ao.nn.qat.dynamic.Linear"]], "lstm (class in torch.ao.nn.quantizable)": [[736, "torch.ao.nn.quantizable.LSTM"]], "multiheadattention (class in torch.ao.nn.quantizable)": [[737, "torch.ao.nn.quantizable.MultiheadAttention"]], "dequantize() (torch.ao.nn.quantizable.multiheadattention method)": [[737, "torch.ao.nn.quantizable.MultiheadAttention.dequantize"]], "forward() (torch.ao.nn.quantizable.multiheadattention method)": [[737, "torch.ao.nn.quantizable.MultiheadAttention.forward"]], "batchnorm2d (class in torch.ao.nn.quantized)": [[738, "torch.ao.nn.quantized.BatchNorm2d"]], "batchnorm3d (class in torch.ao.nn.quantized)": [[739, "torch.ao.nn.quantized.BatchNorm3d"]], "conv1d (class in torch.ao.nn.quantized)": [[740, "torch.ao.nn.quantized.Conv1d"]], "from_float() (torch.ao.nn.quantized.conv1d class method)": [[740, "torch.ao.nn.quantized.Conv1d.from_float"]], "conv2d (class in torch.ao.nn.quantized)": [[741, "torch.ao.nn.quantized.Conv2d"]], "from_float() (torch.ao.nn.quantized.conv2d class method)": [[741, "torch.ao.nn.quantized.Conv2d.from_float"]], "conv3d (class in torch.ao.nn.quantized)": [[742, "torch.ao.nn.quantized.Conv3d"]], "from_float() (torch.ao.nn.quantized.conv3d class method)": [[742, "torch.ao.nn.quantized.Conv3d.from_float"]], "convtranspose1d (class in torch.ao.nn.quantized)": [[743, "torch.ao.nn.quantized.ConvTranspose1d"]], "convtranspose2d (class in torch.ao.nn.quantized)": [[744, "torch.ao.nn.quantized.ConvTranspose2d"]], "convtranspose3d (class in torch.ao.nn.quantized)": [[745, "torch.ao.nn.quantized.ConvTranspose3d"]], "elu (class in torch.ao.nn.quantized)": [[746, "torch.ao.nn.quantized.ELU"]], "embedding (class in torch.ao.nn.quantized)": [[747, "torch.ao.nn.quantized.Embedding"]], "from_float() (torch.ao.nn.quantized.embedding class method)": [[747, "torch.ao.nn.quantized.Embedding.from_float"]], "embeddingbag (class in torch.ao.nn.quantized)": [[748, "torch.ao.nn.quantized.EmbeddingBag"]], "from_float() (torch.ao.nn.quantized.embeddingbag class method)": [[748, "torch.ao.nn.quantized.EmbeddingBag.from_float"]], "fxfloatfunctional (class in torch.ao.nn.quantized)": [[749, "torch.ao.nn.quantized.FXFloatFunctional"]], "floatfunctional (class in torch.ao.nn.quantized)": [[750, "torch.ao.nn.quantized.FloatFunctional"]], "groupnorm (class in torch.ao.nn.quantized)": [[751, "torch.ao.nn.quantized.GroupNorm"]], "hardswish (class in torch.ao.nn.quantized)": [[752, "torch.ao.nn.quantized.Hardswish"]], "instancenorm1d (class in torch.ao.nn.quantized)": [[753, "torch.ao.nn.quantized.InstanceNorm1d"]], "instancenorm2d (class in torch.ao.nn.quantized)": [[754, "torch.ao.nn.quantized.InstanceNorm2d"]], "instancenorm3d (class in torch.ao.nn.quantized)": [[755, "torch.ao.nn.quantized.InstanceNorm3d"]], "layernorm (class in torch.ao.nn.quantized)": [[756, "torch.ao.nn.quantized.LayerNorm"]], "leakyrelu (class in torch.ao.nn.quantized)": [[757, "torch.ao.nn.quantized.LeakyReLU"]], "linear (class in torch.ao.nn.quantized)": [[758, "torch.ao.nn.quantized.Linear"]], "from_float() (torch.ao.nn.quantized.linear class method)": [[758, "torch.ao.nn.quantized.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.linear class method)": [[758, "torch.ao.nn.quantized.Linear.from_reference"]], "qfunctional (class in torch.ao.nn.quantized)": [[759, "torch.ao.nn.quantized.QFunctional"]], "relu6 (class in torch.ao.nn.quantized)": [[760, "torch.ao.nn.quantized.ReLU6"]], "sigmoid (class in torch.ao.nn.quantized)": [[761, "torch.ao.nn.quantized.Sigmoid"]], "gru (class in torch.ao.nn.quantized.dynamic)": [[762, "torch.ao.nn.quantized.dynamic.GRU"]], "grucell (class in torch.ao.nn.quantized.dynamic)": [[763, "torch.ao.nn.quantized.dynamic.GRUCell"]], "lstm (class in torch.ao.nn.quantized.dynamic)": [[764, "torch.ao.nn.quantized.dynamic.LSTM"]], "lstmcell (class in torch.ao.nn.quantized.dynamic)": [[765, "torch.ao.nn.quantized.dynamic.LSTMCell"]], "linear (class in torch.ao.nn.quantized.dynamic)": [[766, "torch.ao.nn.quantized.dynamic.Linear"]], "from_float() (torch.ao.nn.quantized.dynamic.linear class method)": [[766, "torch.ao.nn.quantized.dynamic.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.dynamic.linear class method)": [[766, "torch.ao.nn.quantized.dynamic.Linear.from_reference"]], "rnncell (class in torch.ao.nn.quantized.dynamic)": [[767, "torch.ao.nn.quantized.dynamic.RNNCell"]], "adaptive_avg_pool2d (class in torch.ao.nn.quantized.functional)": [[768, "torch.ao.nn.quantized.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d (class in torch.ao.nn.quantized.functional)": [[769, "torch.ao.nn.quantized.functional.adaptive_avg_pool3d"]], "avg_pool2d (class in torch.ao.nn.quantized.functional)": [[770, "torch.ao.nn.quantized.functional.avg_pool2d"]], "avg_pool3d (class in torch.ao.nn.quantized.functional)": [[771, "torch.ao.nn.quantized.functional.avg_pool3d"]], "celu (class in torch.ao.nn.quantized.functional)": [[772, "torch.ao.nn.quantized.functional.celu"]], "clamp (class in torch.ao.nn.quantized.functional)": [[773, "torch.ao.nn.quantized.functional.clamp"]], "conv1d (class in torch.ao.nn.quantized.functional)": [[774, "torch.ao.nn.quantized.functional.conv1d"]], "conv2d (class in torch.ao.nn.quantized.functional)": [[775, "torch.ao.nn.quantized.functional.conv2d"]], "conv3d (class in torch.ao.nn.quantized.functional)": [[776, "torch.ao.nn.quantized.functional.conv3d"]], "elu (class in torch.ao.nn.quantized.functional)": [[777, "torch.ao.nn.quantized.functional.elu"]], "hardsigmoid (class in torch.ao.nn.quantized.functional)": [[778, "torch.ao.nn.quantized.functional.hardsigmoid"]], "hardswish (class in torch.ao.nn.quantized.functional)": [[779, "torch.ao.nn.quantized.functional.hardswish"]], "hardtanh (class in torch.ao.nn.quantized.functional)": [[780, "torch.ao.nn.quantized.functional.hardtanh"]], "interpolate (class in torch.ao.nn.quantized.functional)": [[781, "torch.ao.nn.quantized.functional.interpolate"]], "leaky_relu (class in torch.ao.nn.quantized.functional)": [[782, "torch.ao.nn.quantized.functional.leaky_relu"]], "linear (class in torch.ao.nn.quantized.functional)": [[783, "torch.ao.nn.quantized.functional.linear"]], "max_pool1d (class in torch.ao.nn.quantized.functional)": [[784, "torch.ao.nn.quantized.functional.max_pool1d"]], "max_pool2d (class in torch.ao.nn.quantized.functional)": [[785, "torch.ao.nn.quantized.functional.max_pool2d"]], "threshold (class in torch.ao.nn.quantized.functional)": [[786, "torch.ao.nn.quantized.functional.threshold"]], "upsample (class in torch.ao.nn.quantized.functional)": [[787, "torch.ao.nn.quantized.functional.upsample"]], "upsample_bilinear (class in torch.ao.nn.quantized.functional)": [[788, "torch.ao.nn.quantized.functional.upsample_bilinear"]], "upsample_nearest (class in torch.ao.nn.quantized.functional)": [[789, "torch.ao.nn.quantized.functional.upsample_nearest"]], "dequantstub (class in torch.ao.quantization)": [[790, "torch.ao.quantization.DeQuantStub"]], "quantstub (class in torch.ao.quantization)": [[791, "torch.ao.quantization.QuantStub"]], "quantwrapper (class in torch.ao.quantization)": [[792, "torch.ao.quantization.QuantWrapper"]], "add_quant_dequant (class in torch.ao.quantization)": [[793, "torch.ao.quantization.add_quant_dequant"]], "backendconfig (class in torch.ao.quantization.backend_config)": [[794, "torch.ao.quantization.backend_config.BackendConfig"]], "configs (torch.ao.quantization.backend_config.backendconfig property)": [[794, "torch.ao.quantization.backend_config.BackendConfig.configs"]], "from_dict() (torch.ao.quantization.backend_config.backendconfig class method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.from_dict"]], "set_backend_pattern_config() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_config"]], "set_backend_pattern_configs() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_configs"]], "set_name() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.set_name"]], "to_dict() (torch.ao.quantization.backend_config.backendconfig method)": [[794, "torch.ao.quantization.backend_config.BackendConfig.to_dict"]], "backendpatternconfig (class in torch.ao.quantization.backend_config)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig"]], "add_dtype_config() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.add_dtype_config"]], "from_dict() (torch.ao.quantization.backend_config.backendpatternconfig class method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.from_dict"]], "set_dtype_configs() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_dtype_configs"]], "set_fused_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fused_module"]], "set_fuser_method() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fuser_method"]], "set_observation_type() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_observation_type"]], "set_pattern() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_pattern"]], "set_qat_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_qat_module"]], "set_reference_quantized_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_reference_quantized_module"]], "set_root_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.set_root_module"]], "to_dict() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[795, "torch.ao.quantization.backend_config.BackendPatternConfig.to_dict"]], "dtypeconfig (class in torch.ao.quantization.backend_config)": [[796, "torch.ao.quantization.backend_config.DTypeConfig"]], "from_dict() (torch.ao.quantization.backend_config.dtypeconfig class method)": [[796, "torch.ao.quantization.backend_config.DTypeConfig.from_dict"]], "to_dict() (torch.ao.quantization.backend_config.dtypeconfig method)": [[796, "torch.ao.quantization.backend_config.DTypeConfig.to_dict"]], "dtypewithconstraints (class in torch.ao.quantization.backend_config)": [[797, "torch.ao.quantization.backend_config.DTypeWithConstraints"]], "input_output_not_observed (torch.ao.quantization.backend_config.observationtype attribute)": [[798, "torch.ao.quantization.backend_config.ObservationType.INPUT_OUTPUT_NOT_OBSERVED"]], "output_share_observer_with_input (torch.ao.quantization.backend_config.observationtype attribute)": [[798, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_SHARE_OBSERVER_WITH_INPUT"]], "output_use_different_observer_as_input (torch.ao.quantization.backend_config.observationtype attribute)": [[798, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_USE_DIFFERENT_OBSERVER_AS_INPUT"]], "observationtype (class in torch.ao.quantization.backend_config)": [[798, "torch.ao.quantization.backend_config.ObservationType"]], "convert (class in torch.ao.quantization)": [[799, "torch.ao.quantization.convert"]], "default_eval_fn (class in torch.ao.quantization)": [[800, "torch.ao.quantization.default_eval_fn"]], "fakequantize (class in torch.ao.quantization.fake_quantize)": [[801, "torch.ao.quantization.fake_quantize.FakeQuantize"]], "fakequantizebase (class in torch.ao.quantization.fake_quantize)": [[802, "torch.ao.quantization.fake_quantize.FakeQuantizeBase"]], "fixedqparamsfakequantize (class in torch.ao.quantization.fake_quantize)": [[803, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize"]], "extra_repr() (torch.ao.quantization.fake_quantize.fixedqparamsfakequantize method)": [[803, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize.extra_repr"]], "fusedmovingavgobsfakequantize (class in torch.ao.quantization.fake_quantize)": [[804, "torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize"]], "default_fake_quant (in module torch.ao.quantization.fake_quantize)": [[805, "torch.ao.quantization.fake_quantize.default_fake_quant"]], "default_fused_act_fake_quant (in module torch.ao.quantization.fake_quantize)": [[806, "torch.ao.quantization.fake_quantize.default_fused_act_fake_quant"]], "default_fused_per_channel_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[807, "torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant"]], "default_fused_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[808, "torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant"]], "default_histogram_fake_quant (in module torch.ao.quantization.fake_quantize)": [[809, "torch.ao.quantization.fake_quantize.default_histogram_fake_quant"]], "default_per_channel_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[810, "torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant"]], "default_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[811, "torch.ao.quantization.fake_quantize.default_weight_fake_quant"]], "disable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[812, "torch.ao.quantization.fake_quantize.disable_fake_quant"]], "disable_observer (class in torch.ao.quantization.fake_quantize)": [[813, "torch.ao.quantization.fake_quantize.disable_observer"]], "enable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[814, "torch.ao.quantization.fake_quantize.enable_fake_quant"]], "enable_observer (class in torch.ao.quantization.fake_quantize)": [[815, "torch.ao.quantization.fake_quantize.enable_observer"]], "fuse_modules (class in torch.ao.quantization.fuse_modules)": [[816, "torch.ao.quantization.fuse_modules.fuse_modules"]], "convertcustomconfig (class in torch.ao.quantization.fx.custom_config)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig class method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.from_dict"]], "set_observed_to_quantized_mapping() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_observed_to_quantized_mapping"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[817, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.to_dict"]], "fusecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig class method)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.from_dict"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.to_dict"]], "preparecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig class method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.from_dict"]], "set_float_to_observed_mapping() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_float_to_observed_mapping"]], "set_input_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_input_quantized_indexes"]], "set_non_traceable_module_classes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_classes"]], "set_non_traceable_module_names() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_names"]], "set_output_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_output_quantized_indexes"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_preserved_attributes"]], "set_standalone_module_class() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_class"]], "set_standalone_module_name() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_name"]], "to_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.to_dict"]], "standalonemoduleconfigentry (class in torch.ao.quantization.fx.custom_config)": [[820, "torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry"]], "histogramobserver (class in torch.ao.quantization.observer)": [[821, "torch.ao.quantization.observer.HistogramObserver"]], "minmaxobserver (class in torch.ao.quantization.observer)": [[822, "torch.ao.quantization.observer.MinMaxObserver"]], "calculate_qparams() (torch.ao.quantization.observer.minmaxobserver method)": [[822, "torch.ao.quantization.observer.MinMaxObserver.calculate_qparams"]], "forward() (torch.ao.quantization.observer.minmaxobserver method)": [[822, "torch.ao.quantization.observer.MinMaxObserver.forward"]], "reset_min_max_vals() (torch.ao.quantization.observer.minmaxobserver method)": [[822, "torch.ao.quantization.observer.MinMaxObserver.reset_min_max_vals"]], "movingaverageminmaxobserver (class in torch.ao.quantization.observer)": [[823, "torch.ao.quantization.observer.MovingAverageMinMaxObserver"]], "movingaverageperchannelminmaxobserver (class in torch.ao.quantization.observer)": [[824, "torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver"]], "noopobserver (class in torch.ao.quantization.observer)": [[825, "torch.ao.quantization.observer.NoopObserver"]], "observerbase (class in torch.ao.quantization.observer)": [[826, "torch.ao.quantization.observer.ObserverBase"]], "with_args() (torch.ao.quantization.observer.observerbase class method)": [[826, "torch.ao.quantization.observer.ObserverBase.with_args"]], "with_callable_args() (torch.ao.quantization.observer.observerbase class method)": [[826, "torch.ao.quantization.observer.ObserverBase.with_callable_args"]], "perchannelminmaxobserver (class in torch.ao.quantization.observer)": [[827, "torch.ao.quantization.observer.PerChannelMinMaxObserver"]], "reset_min_max_vals() (torch.ao.quantization.observer.perchannelminmaxobserver method)": [[827, "torch.ao.quantization.observer.PerChannelMinMaxObserver.reset_min_max_vals"]], "placeholderobserver (class in torch.ao.quantization.observer)": [[828, "torch.ao.quantization.observer.PlaceholderObserver"]], "recordingobserver (class in torch.ao.quantization.observer)": [[829, "torch.ao.quantization.observer.RecordingObserver"]], "default_debug_observer (in module torch.ao.quantization.observer)": [[830, "torch.ao.quantization.observer.default_debug_observer"]], "default_dynamic_quant_observer (in module torch.ao.quantization.observer)": [[831, "torch.ao.quantization.observer.default_dynamic_quant_observer"]], "default_float_qparams_observer (in module torch.ao.quantization.observer)": [[832, "torch.ao.quantization.observer.default_float_qparams_observer"]], "default_histogram_observer (in module torch.ao.quantization.observer)": [[833, "torch.ao.quantization.observer.default_histogram_observer"]], "default_observer (in module torch.ao.quantization.observer)": [[834, "torch.ao.quantization.observer.default_observer"]], "default_per_channel_weight_observer (in module torch.ao.quantization.observer)": [[835, "torch.ao.quantization.observer.default_per_channel_weight_observer"]], "default_placeholder_observer (in module torch.ao.quantization.observer)": [[836, "torch.ao.quantization.observer.default_placeholder_observer"]], "default_weight_observer (in module torch.ao.quantization.observer)": [[837, "torch.ao.quantization.observer.default_weight_observer"]], "get_observer_state_dict (class in torch.ao.quantization.observer)": [[838, "torch.ao.quantization.observer.get_observer_state_dict"]], "load_observer_state_dict (class in torch.ao.quantization.observer)": [[839, "torch.ao.quantization.observer.load_observer_state_dict"]], "prepare (class in torch.ao.quantization)": [[840, "torch.ao.quantization.prepare"]], "prepare_qat (class in torch.ao.quantization)": [[841, "torch.ao.quantization.prepare_qat"]], "propagate_qconfig_ (class in torch.ao.quantization)": [[842, "torch.ao.quantization.propagate_qconfig_"]], "model_is_exported (class in torch.ao.quantization.pt2e.export_utils)": [[843, "torch.ao.quantization.pt2e.export_utils.model_is_exported"]], "qconfig (class in torch.ao.quantization.qconfig)": [[844, "torch.ao.quantization.qconfig.QConfig"]], "default_activation_only_qconfig (in module torch.ao.quantization.qconfig)": [[845, "torch.ao.quantization.qconfig.default_activation_only_qconfig"]], "default_debug_qconfig (in module torch.ao.quantization.qconfig)": [[846, "torch.ao.quantization.qconfig.default_debug_qconfig"]], "default_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[847, "torch.ao.quantization.qconfig.default_dynamic_qconfig"]], "default_per_channel_qconfig (in module torch.ao.quantization.qconfig)": [[848, "torch.ao.quantization.qconfig.default_per_channel_qconfig"]], "default_qat_qconfig (in module torch.ao.quantization.qconfig)": [[849, "torch.ao.quantization.qconfig.default_qat_qconfig"]], "default_qat_qconfig_v2 (in module torch.ao.quantization.qconfig)": [[850, "torch.ao.quantization.qconfig.default_qat_qconfig_v2"]], "default_qconfig (in module torch.ao.quantization.qconfig)": [[851, "torch.ao.quantization.qconfig.default_qconfig"]], "default_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[852, "torch.ao.quantization.qconfig.default_weight_only_qconfig"]], "float16_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[853, "torch.ao.quantization.qconfig.float16_dynamic_qconfig"]], "float16_static_qconfig (in module torch.ao.quantization.qconfig)": [[854, "torch.ao.quantization.qconfig.float16_static_qconfig"]], "float_qparams_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[855, "torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig"]], "per_channel_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[856, "torch.ao.quantization.qconfig.per_channel_dynamic_qconfig"]], "qconfigmapping (class in torch.ao.quantization.qconfig_mapping)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping"]], "from_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping class method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.from_dict"]], "set_global() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_global"]], "set_module_name() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name"]], "set_module_name_object_type_order() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_object_type_order"]], "set_module_name_regex() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_regex"]], "set_object_type() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_object_type"]], "to_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[857, "torch.ao.quantization.qconfig_mapping.QConfigMapping.to_dict"]], "get_default_qat_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[858, "torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping"]], "get_default_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[859, "torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping"]], "quantize (class in torch.ao.quantization)": [[860, "torch.ao.quantization.quantize"]], "quantize_dynamic (class in torch.ao.quantization)": [[861, "torch.ao.quantization.quantize_dynamic"]], "convert_fx (class in torch.ao.quantization.quantize_fx)": [[862, "torch.ao.quantization.quantize_fx.convert_fx"]], "fuse_fx (class in torch.ao.quantization.quantize_fx)": [[863, "torch.ao.quantization.quantize_fx.fuse_fx"]], "prepare_fx (class in torch.ao.quantization.quantize_fx)": [[864, "torch.ao.quantization.quantize_fx.prepare_fx"]], "prepare_qat_fx (class in torch.ao.quantization.quantize_fx)": [[865, "torch.ao.quantization.quantize_fx.prepare_qat_fx"]], "quantize_qat (class in torch.ao.quantization)": [[866, "torch.ao.quantization.quantize_qat"]], "swap_module (class in torch.ao.quantization)": [[867, "torch.ao.quantization.swap_module"]], "arange() (in module torch)": [[868, "torch.arange"]], "arccos() (in module torch)": [[869, "torch.arccos"]], "arccosh() (in module torch)": [[870, "torch.arccosh"]], "arcsin() (in module torch)": [[871, "torch.arcsin"]], "arcsinh() (in module torch)": [[872, "torch.arcsinh"]], "arctan() (in module torch)": [[873, "torch.arctan"]], "arctan2() (in module torch)": [[874, "torch.arctan2"]], "arctanh() (in module torch)": [[875, "torch.arctanh"]], "are_deterministic_algorithms_enabled() (in module torch)": [[876, "torch.are_deterministic_algorithms_enabled"]], "argmax() (in module torch)": [[877, "torch.argmax"]], "argmin() (in module torch)": [[878, "torch.argmin"]], "argsort() (in module torch)": [[879, "torch.argsort"]], "argwhere() (in module torch)": [[880, "torch.argwhere"]], "as_strided() (in module torch)": [[881, "torch.as_strided"]], "as_tensor() (in module torch)": [[882, "torch.as_tensor"]], "asarray() (in module torch)": [[883, "torch.asarray"]], "asin() (in module torch)": [[884, "torch.asin"]], "asinh() (in module torch)": [[885, "torch.asinh"]], "atan() (in module torch)": [[886, "torch.atan"]], "atan2() (in module torch)": [[887, "torch.atan2"]], "atanh() (in module torch)": [[888, "torch.atanh"]], "atleast_1d() (in module torch)": [[889, "torch.atleast_1d"]], "atleast_2d() (in module torch)": [[890, "torch.atleast_2d"]], "atleast_3d() (in module torch)": [[891, "torch.atleast_3d"]], "backward() (torch.autograd.function static method)": [[892, "torch.autograd.Function.backward"]], "forward() (torch.autograd.function static method)": [[893, "torch.autograd.Function.forward"]], "jvp() (torch.autograd.function static method)": [[894, "torch.autograd.Function.jvp"]], "vmap() (torch.autograd.function static method)": [[895, "torch.autograd.Function.vmap"]], "backward() (in module torch.autograd)": [[896, "torch.autograd.backward"]], "unpackeddualtensor (class in torch.autograd.forward_ad)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor"]], "count() (torch.autograd.forward_ad.unpackeddualtensor method)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.count"]], "index() (torch.autograd.forward_ad.unpackeddualtensor method)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.index"]], "primal (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.primal"]], "tangent (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[897, "torch.autograd.forward_ad.UnpackedDualTensor.tangent"]], "dual_level (class in torch.autograd.forward_ad)": [[898, "torch.autograd.forward_ad.dual_level"]], "enter_dual_level() (in module torch.autograd.forward_ad)": [[899, "torch.autograd.forward_ad.enter_dual_level"]], "exit_dual_level() (in module torch.autograd.forward_ad)": [[900, "torch.autograd.forward_ad.exit_dual_level"]], "make_dual() (in module torch.autograd.forward_ad)": [[901, "torch.autograd.forward_ad.make_dual"]], "unpack_dual() (in module torch.autograd.forward_ad)": [[902, "torch.autograd.forward_ad.unpack_dual"]], "backwardcfunction (class in torch.autograd.function)": [[903, "torch.autograd.function.BackwardCFunction"]], "apply() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.apply"]], "apply_jvp() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.apply_jvp"]], "mark_dirty() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.backwardcfunction method)": [[903, "torch.autograd.function.BackwardCFunction.set_materialize_grads"]], "mark_dirty() (torch.autograd.function.functionctx method)": [[904, "torch.autograd.function.FunctionCtx.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.functionctx method)": [[905, "torch.autograd.function.FunctionCtx.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.functionctx method)": [[906, "torch.autograd.function.FunctionCtx.save_for_backward"]], "set_materialize_grads() (torch.autograd.function.functionctx method)": [[907, "torch.autograd.function.FunctionCtx.set_materialize_grads"]], "inplacefunction (class in torch.autograd.function)": [[908, "torch.autograd.function.InplaceFunction"]], "backward() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.backward"]], "forward() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.forward"]], "jvp() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.jvp"]], "mark_dirty() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.inplacefunction method)": [[908, "torch.autograd.function.InplaceFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.setup_context"]], "vjp() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.vjp"]], "vmap() (torch.autograd.function.inplacefunction static method)": [[908, "torch.autograd.function.InplaceFunction.vmap"]], "nestediofunction (class in torch.autograd.function)": [[909, "torch.autograd.function.NestedIOFunction"]], "backward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.backward"]], "backward_extended() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.backward_extended"]], "forward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.forward"]], "forward_extended() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.forward_extended"]], "jvp() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.jvp"]], "mark_dirty() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.save_for_forward"]], "saved_tensors (torch.autograd.function.nestediofunction property)": [[909, "torch.autograd.function.NestedIOFunction.saved_tensors"]], "set_materialize_grads() (torch.autograd.function.nestediofunction method)": [[909, "torch.autograd.function.NestedIOFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.setup_context"]], "vjp() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.vjp"]], "vmap() (torch.autograd.function.nestediofunction static method)": [[909, "torch.autograd.function.NestedIOFunction.vmap"]], "once_differentiable() (in module torch.autograd.function)": [[910, "torch.autograd.function.once_differentiable"]], "hessian() (in module torch.autograd.functional)": [[911, "torch.autograd.functional.hessian"]], "hvp() (in module torch.autograd.functional)": [[912, "torch.autograd.functional.hvp"]], "jacobian() (in module torch.autograd.functional)": [[913, "torch.autograd.functional.jacobian"]], "jvp() (in module torch.autograd.functional)": [[914, "torch.autograd.functional.jvp"]], "vhp() (in module torch.autograd.functional)": [[915, "torch.autograd.functional.vhp"]], "vjp() (in module torch.autograd.functional)": [[916, "torch.autograd.functional.vjp"]], "grad() (in module torch.autograd)": [[917, "torch.autograd.grad"]], "clone() (torch.autograd.grad_mode.inference_mode method)": [[918, "torch.autograd.grad_mode.inference_mode.clone"]], "inference_mode (class in torch.autograd.grad_mode)": [[918, "torch.autograd.grad_mode.inference_mode"]], "clone() (torch.autograd.grad_mode.set_grad_enabled method)": [[919, "torch.autograd.grad_mode.set_grad_enabled.clone"]], "set_grad_enabled (class in torch.autograd.grad_mode)": [[919, "torch.autograd.grad_mode.set_grad_enabled"]], "clone() (torch.autograd.grad_mode.set_multithreading_enabled method)": [[920, "torch.autograd.grad_mode.set_multithreading_enabled.clone"]], "set_multithreading_enabled (class in torch.autograd.grad_mode)": [[920, "torch.autograd.grad_mode.set_multithreading_enabled"]], "gradcheckerror": [[921, "torch.autograd.gradcheck.GradcheckError"]], "gradcheck() (in module torch.autograd.gradcheck)": [[922, "torch.autograd.gradcheck.gradcheck"]], "gradgradcheck() (in module torch.autograd.gradcheck)": [[923, "torch.autograd.gradcheck.gradgradcheck"]], "metadata() (torch.autograd.graph.node method)": [[924, "torch.autograd.graph.Node.metadata"]], "name() (torch.autograd.graph.node method)": [[925, "torch.autograd.graph.Node.name"]], "next_functions (torch.autograd.graph.node property)": [[926, "torch.autograd.graph.Node.next_functions"]], "register_hook() (torch.autograd.graph.node method)": [[927, "torch.autograd.graph.Node.register_hook"]], "register_prehook() (torch.autograd.graph.node method)": [[928, "torch.autograd.graph.Node.register_prehook"]], "increment_version() (in module torch.autograd.graph)": [[929, "torch.autograd.graph.increment_version"]], "enforceunique (class in torch.autograd.profiler)": [[930, "torch.autograd.profiler.EnforceUnique"]], "see() (torch.autograd.profiler.enforceunique method)": [[930, "torch.autograd.profiler.EnforceUnique.see"]], "kinetosteptracker (class in torch.autograd.profiler)": [[931, "torch.autograd.profiler.KinetoStepTracker"]], "current_step() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.current_step"]], "erase_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.erase_step_count"]], "increment_step() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.increment_step"]], "init_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[931, "torch.autograd.profiler.KinetoStepTracker.init_step_count"]], "load_nvprof() (in module torch.autograd.profiler)": [[932, "torch.autograd.profiler.load_nvprof"]], "parse_nvprof_trace() (in module torch.autograd.profiler)": [[933, "torch.autograd.profiler.parse_nvprof_trace"]], "export_chrome_trace() (torch.autograd.profiler.profile method)": [[934, "torch.autograd.profiler.profile.export_chrome_trace"]], "key_averages() (torch.autograd.profiler.profile method)": [[935, "torch.autograd.profiler.profile.key_averages"]], "self_cpu_time_total (torch.autograd.profiler.profile property)": [[936, "torch.autograd.profiler.profile.self_cpu_time_total"]], "total_average() (torch.autograd.profiler.profile method)": [[937, "torch.autograd.profiler.profile.total_average"]], "record_function (class in torch.autograd.profiler)": [[938, "torch.autograd.profiler.record_function"]], "interval (class in torch.autograd.profiler_util)": [[939, "torch.autograd.profiler_util.Interval"]], "elapsed_us() (torch.autograd.profiler_util.interval method)": [[939, "torch.autograd.profiler_util.Interval.elapsed_us"]], "kernel (class in torch.autograd.profiler_util)": [[940, "torch.autograd.profiler_util.Kernel"]], "count() (torch.autograd.profiler_util.kernel method)": [[940, "torch.autograd.profiler_util.Kernel.count"]], "device (torch.autograd.profiler_util.kernel attribute)": [[940, "torch.autograd.profiler_util.Kernel.device"]], "duration (torch.autograd.profiler_util.kernel attribute)": [[940, "torch.autograd.profiler_util.Kernel.duration"]], "index() (torch.autograd.profiler_util.kernel method)": [[940, "torch.autograd.profiler_util.Kernel.index"]], "name (torch.autograd.profiler_util.kernel attribute)": [[940, "torch.autograd.profiler_util.Kernel.name"]], "memrecordsacc (class in torch.autograd.profiler_util)": [[941, "torch.autograd.profiler_util.MemRecordsAcc"]], "in_interval() (torch.autograd.profiler_util.memrecordsacc method)": [[941, "torch.autograd.profiler_util.MemRecordsAcc.in_interval"]], "stringtable (class in torch.autograd.profiler_util)": [[942, "torch.autograd.profiler_util.StringTable"]], "clear() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.clear"]], "copy() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.copy"]], "default_factory (torch.autograd.profiler_util.stringtable attribute)": [[942, "torch.autograd.profiler_util.StringTable.default_factory"]], "fromkeys() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.fromkeys"]], "get() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.get"]], "items() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.items"]], "keys() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.keys"]], "pop() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.pop"]], "popitem() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.popitem"]], "setdefault() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.setdefault"]], "update() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.update"]], "values() (torch.autograd.profiler_util.stringtable method)": [[942, "torch.autograd.profiler_util.StringTable.values"]], "baddbmm() (in module torch)": [[943, "torch.baddbmm"]], "bartlett_window() (in module torch)": [[944, "torch.bartlett_window"]], "bernoulli() (in module torch)": [[945, "torch.bernoulli"]], "bincount() (in module torch)": [[946, "torch.bincount"]], "bitwise_and() (in module torch)": [[947, "torch.bitwise_and"]], "bitwise_left_shift() (in module torch)": [[948, "torch.bitwise_left_shift"]], "bitwise_not() (in module torch)": [[949, "torch.bitwise_not"]], "bitwise_or() (in module torch)": [[950, "torch.bitwise_or"]], "bitwise_right_shift() (in module torch)": [[951, "torch.bitwise_right_shift"]], "bitwise_xor() (in module torch)": [[952, "torch.bitwise_xor"]], "blackman_window() (in module torch)": [[953, "torch.blackman_window"]], "block_diag() (in module torch)": [[954, "torch.block_diag"]], "bmm() (in module torch)": [[955, "torch.bmm"]], "broadcast_shapes() (in module torch)": [[956, "torch.broadcast_shapes"]], "broadcast_tensors() (in module torch)": [[957, "torch.broadcast_tensors"]], "broadcast_to() (in module torch)": [[958, "torch.broadcast_to"]], "bucketize() (in module torch)": [[959, "torch.bucketize"]], "can_cast() (in module torch)": [[960, "torch.can_cast"]], "cartesian_prod() (in module torch)": [[961, "torch.cartesian_prod"]], "cat() (in module torch)": [[962, "torch.cat"]], "cdist() (in module torch)": [[963, "torch.cdist"]], "ceil() (in module torch)": [[964, "torch.ceil"]], "chain_matmul() (in module torch)": [[965, "torch.chain_matmul"]], "cholesky() (in module torch)": [[966, "torch.cholesky"]], "cholesky_inverse() (in module torch)": [[967, "torch.cholesky_inverse"]], "cholesky_solve() (in module torch)": [[968, "torch.cholesky_solve"]], "chunk() (in module torch)": [[969, "torch.chunk"]], "clamp() (in module torch)": [[970, "torch.clamp"]], "clip() (in module torch)": [[971, "torch.clip"]], "clone() (in module torch)": [[972, "torch.clone"]], "column_stack() (in module torch)": [[973, "torch.column_stack"]], "combinations() (in module torch)": [[974, "torch.combinations"]], "compile() (in module torch)": [[975, "torch.compile"]], "compiled_with_cxx11_abi() (in module torch)": [[976, "torch.compiled_with_cxx11_abi"]], "allow_in_graph() (in module torch.compiler)": [[977, "torch.compiler.allow_in_graph"]], "assume_constant_result() (in module torch.compiler)": [[978, "torch.compiler.assume_constant_result"]], "compile() (in module torch.compiler)": [[979, "torch.compiler.compile"]], "cudagraph_mark_step_begin() (in module torch.compiler)": [[980, "torch.compiler.cudagraph_mark_step_begin"]], "disable() (in module torch.compiler)": [[981, "torch.compiler.disable"]], "is_compiling() (in module torch.compiler)": [[982, "torch.compiler.is_compiling"]], "is_dynamo_compiling() (in module torch.compiler)": [[983, "torch.compiler.is_dynamo_compiling"]], "list_backends() (in module torch.compiler)": [[984, "torch.compiler.list_backends"]], "reset() (in module torch.compiler)": [[985, "torch.compiler.reset"]], "complex() (in module torch)": [[986, "torch.complex"]], "concat() (in module torch)": [[987, "torch.concat"]], "concatenate() (in module torch)": [[988, "torch.concatenate"]], "cond() (in module torch)": [[989, "torch.cond"]], "conj() (in module torch)": [[990, "torch.conj"]], "conj_physical() (in module torch)": [[991, "torch.conj_physical"]], "copysign() (in module torch)": [[992, "torch.copysign"]], "corrcoef() (in module torch)": [[993, "torch.corrcoef"]], "cos() (in module torch)": [[994, "torch.cos"]], "cosh() (in module torch)": [[995, "torch.cosh"]], "count_nonzero() (in module torch)": [[996, "torch.count_nonzero"]], "cov() (in module torch)": [[997, "torch.cov"]], "stream (class in torch.cpu)": [[998, "torch.cpu.Stream"]], "streamcontext (class in torch.cpu)": [[999, "torch.cpu.StreamContext"]], "current_device() (in module torch.cpu)": [[1000, "torch.cpu.current_device"]], "current_stream() (in module torch.cpu)": [[1001, "torch.cpu.current_stream"]], "device_count() (in module torch.cpu)": [[1002, "torch.cpu.device_count"]], "is_available() (in module torch.cpu)": [[1003, "torch.cpu.is_available"]], "set_device() (in module torch.cpu)": [[1004, "torch.cpu.set_device"]], "stream() (in module torch.cpu)": [[1005, "torch.cpu.stream"]], "synchronize() (in module torch.cpu)": [[1006, "torch.cpu.synchronize"]], "cross() (in module torch)": [[1007, "torch.cross"]], "cudagraph (class in torch.cuda)": [[1008, "torch.cuda.CUDAGraph"]], "capture_begin() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.capture_begin"]], "capture_end() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.capture_end"]], "debug_dump() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.debug_dump"]], "enable_debug_mode() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.enable_debug_mode"]], "pool() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.pool"]], "replay() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.replay"]], "reset() (torch.cuda.cudagraph method)": [[1008, "torch.cuda.CUDAGraph.reset"]], "cudapluggableallocator (class in torch.cuda)": [[1009, "torch.cuda.CUDAPluggableAllocator"]], "event (class in torch.cuda)": [[1010, "torch.cuda.Event"]], "elapsed_time() (torch.cuda.event method)": [[1010, "torch.cuda.Event.elapsed_time"]], "from_ipc_handle() (torch.cuda.event class method)": [[1010, "torch.cuda.Event.from_ipc_handle"]], "ipc_handle() (torch.cuda.event method)": [[1010, "torch.cuda.Event.ipc_handle"]], "query() (torch.cuda.event method)": [[1010, "torch.cuda.Event.query"]], "record() (torch.cuda.event method)": [[1010, "torch.cuda.Event.record"]], "synchronize() (torch.cuda.event method)": [[1010, "torch.cuda.Event.synchronize"]], "wait() (torch.cuda.event method)": [[1010, "torch.cuda.Event.wait"]], "externalstream (class in torch.cuda)": [[1011, "torch.cuda.ExternalStream"]], "query() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.query"]], "record_event() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.record_event"]], "synchronize() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.synchronize"]], "wait_event() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.wait_event"]], "wait_stream() (torch.cuda.externalstream method)": [[1011, "torch.cuda.ExternalStream.wait_stream"]], "outofmemoryerror": [[1012, "torch.cuda.OutOfMemoryError"]], "stream (class in torch.cuda)": [[1013, "torch.cuda.Stream"]], "query() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.query"]], "record_event() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.record_event"]], "synchronize() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.synchronize"]], "wait_event() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.wait_event"]], "wait_stream() (torch.cuda.stream method)": [[1013, "torch.cuda.Stream.wait_stream"]], "streamcontext (class in torch.cuda)": [[1014, "torch.cuda.StreamContext"]], "caching_allocator_alloc() (in module torch.cuda)": [[1015, "torch.cuda.caching_allocator_alloc"]], "caching_allocator_delete() (in module torch.cuda)": [[1016, "torch.cuda.caching_allocator_delete"]], "can_device_access_peer() (in module torch.cuda)": [[1017, "torch.cuda.can_device_access_peer"]], "change_current_allocator() (in module torch.cuda)": [[1018, "torch.cuda.change_current_allocator"]], "clock_rate() (in module torch.cuda)": [[1019, "torch.cuda.clock_rate"]], "broadcast() (in module torch.cuda.comm)": [[1020, "torch.cuda.comm.broadcast"]], "broadcast_coalesced() (in module torch.cuda.comm)": [[1021, "torch.cuda.comm.broadcast_coalesced"]], "gather() (in module torch.cuda.comm)": [[1022, "torch.cuda.comm.gather"]], "reduce_add() (in module torch.cuda.comm)": [[1023, "torch.cuda.comm.reduce_add"]], "scatter() (in module torch.cuda.comm)": [[1024, "torch.cuda.comm.scatter"]], "current_blas_handle() (in module torch.cuda)": [[1025, "torch.cuda.current_blas_handle"]], "current_device() (in module torch.cuda)": [[1026, "torch.cuda.current_device"]], "current_stream() (in module torch.cuda)": [[1027, "torch.cuda.current_stream"]], "default_stream() (in module torch.cuda)": [[1028, "torch.cuda.default_stream"]], "device (class in torch.cuda)": [[1029, "torch.cuda.device"]], "device_count() (in module torch.cuda)": [[1030, "torch.cuda.device_count"]], "device_of (class in torch.cuda)": [[1031, "torch.cuda.device_of"]], "empty_cache() (in module torch.cuda)": [[1032, "torch.cuda.empty_cache"]], "get_allocator_backend() (in module torch.cuda)": [[1033, "torch.cuda.get_allocator_backend"]], "get_arch_list() (in module torch.cuda)": [[1034, "torch.cuda.get_arch_list"]], "get_device_capability() (in module torch.cuda)": [[1035, "torch.cuda.get_device_capability"]], "get_device_name() (in module torch.cuda)": [[1036, "torch.cuda.get_device_name"]], "get_device_properties() (in module torch.cuda)": [[1037, "torch.cuda.get_device_properties"]], "get_gencode_flags() (in module torch.cuda)": [[1038, "torch.cuda.get_gencode_flags"]], "get_rng_state() (in module torch.cuda)": [[1039, "torch.cuda.get_rng_state"]], "get_rng_state_all() (in module torch.cuda)": [[1040, "torch.cuda.get_rng_state_all"]], "get_sync_debug_mode() (in module torch.cuda)": [[1041, "torch.cuda.get_sync_debug_mode"]], "graph (class in torch.cuda)": [[1042, "torch.cuda.graph"]], "graph_pool_handle() (in module torch.cuda)": [[1043, "torch.cuda.graph_pool_handle"]], "init() (in module torch.cuda)": [[1044, "torch.cuda.init"]], "initial_seed() (in module torch.cuda)": [[1045, "torch.cuda.initial_seed"]], "ipc_collect() (in module torch.cuda)": [[1046, "torch.cuda.ipc_collect"]], "is_available() (in module torch.cuda)": [[1047, "torch.cuda.is_available"]], "is_current_stream_capturing() (in module torch.cuda)": [[1048, "torch.cuda.is_current_stream_capturing"]], "is_initialized() (in module torch.cuda)": [[1049, "torch.cuda.is_initialized"]], "_create_jit_fn() (in module torch.cuda.jiterator)": [[1050, "torch.cuda.jiterator._create_jit_fn"]], "_create_multi_output_jit_fn() (in module torch.cuda.jiterator)": [[1051, "torch.cuda.jiterator._create_multi_output_jit_fn"]], "list_gpu_processes() (in module torch.cuda)": [[1052, "torch.cuda.list_gpu_processes"]], "make_graphed_callables() (in module torch.cuda)": [[1053, "torch.cuda.make_graphed_callables"]], "manual_seed() (in module torch.cuda)": [[1054, "torch.cuda.manual_seed"]], "manual_seed_all() (in module torch.cuda)": [[1055, "torch.cuda.manual_seed_all"]], "max_memory_allocated() (in module torch.cuda)": [[1056, "torch.cuda.max_memory_allocated"]], "max_memory_cached() (in module torch.cuda)": [[1057, "torch.cuda.max_memory_cached"]], "max_memory_reserved() (in module torch.cuda)": [[1058, "torch.cuda.max_memory_reserved"]], "mem_get_info() (in module torch.cuda)": [[1059, "torch.cuda.mem_get_info"]], "memory_allocated() (in module torch.cuda)": [[1060, "torch.cuda.memory_allocated"]], "memory_cached() (in module torch.cuda)": [[1061, "torch.cuda.memory_cached"]], "memory_reserved() (in module torch.cuda)": [[1062, "torch.cuda.memory_reserved"]], "memory_snapshot() (in module torch.cuda)": [[1063, "torch.cuda.memory_snapshot"]], "memory_stats() (in module torch.cuda)": [[1064, "torch.cuda.memory_stats"]], "memory_summary() (in module torch.cuda)": [[1065, "torch.cuda.memory_summary"]], "memory_usage() (in module torch.cuda)": [[1066, "torch.cuda.memory_usage"]], "mark() (in module torch.cuda.nvtx)": [[1067, "torch.cuda.nvtx.mark"]], "range() (in module torch.cuda.nvtx)": [[1068, "torch.cuda.nvtx.range"]], "range_pop() (in module torch.cuda.nvtx)": [[1069, "torch.cuda.nvtx.range_pop"]], "range_push() (in module torch.cuda.nvtx)": [[1070, "torch.cuda.nvtx.range_push"]], "power_draw() (in module torch.cuda)": [[1071, "torch.cuda.power_draw"]], "reset_max_memory_allocated() (in module torch.cuda)": [[1072, "torch.cuda.reset_max_memory_allocated"]], "reset_max_memory_cached() (in module torch.cuda)": [[1073, "torch.cuda.reset_max_memory_cached"]], "reset_peak_memory_stats() (in module torch.cuda)": [[1074, "torch.cuda.reset_peak_memory_stats"]], "seed() (in module torch.cuda)": [[1075, "torch.cuda.seed"]], "seed_all() (in module torch.cuda)": [[1076, "torch.cuda.seed_all"]], "set_device() (in module torch.cuda)": [[1077, "torch.cuda.set_device"]], "set_per_process_memory_fraction() (in module torch.cuda)": [[1078, "torch.cuda.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.cuda)": [[1079, "torch.cuda.set_rng_state"]], "set_rng_state_all() (in module torch.cuda)": [[1080, "torch.cuda.set_rng_state_all"]], "set_stream() (in module torch.cuda)": [[1081, "torch.cuda.set_stream"]], "set_sync_debug_mode() (in module torch.cuda)": [[1082, "torch.cuda.set_sync_debug_mode"]], "stream() (in module torch.cuda)": [[1083, "torch.cuda.stream"]], "synchronize() (in module torch.cuda)": [[1084, "torch.cuda.synchronize"]], "temperature() (in module torch.cuda)": [[1085, "torch.cuda.temperature"]], "utilization() (in module torch.cuda)": [[1086, "torch.cuda.utilization"]], "cummax() (in module torch)": [[1087, "torch.cummax"]], "cummin() (in module torch)": [[1088, "torch.cummin"]], "cumprod() (in module torch)": [[1089, "torch.cumprod"]], "cumsum() (in module torch)": [[1090, "torch.cumsum"]], "cumulative_trapezoid() (in module torch)": [[1091, "torch.cumulative_trapezoid"]], "deg2rad() (in module torch)": [[1092, "torch.deg2rad"]], "dequantize() (in module torch)": [[1093, "torch.dequantize"]], "det() (in module torch)": [[1094, "torch.det"]], "diag() (in module torch)": [[1095, "torch.diag"]], "diag_embed() (in module torch)": [[1096, "torch.diag_embed"]], "diagflat() (in module torch)": [[1097, "torch.diagflat"]], "diagonal() (in module torch)": [[1098, "torch.diagonal"]], "diagonal_scatter() (in module torch)": [[1099, "torch.diagonal_scatter"]], "diff() (in module torch)": [[1100, "torch.diff"]], "digamma() (in module torch)": [[1101, "torch.digamma"]], "dist() (in module torch)": [[1102, "torch.dist"]], "div() (in module torch)": [[1103, "torch.div"]], "divide() (in module torch)": [[1104, "torch.divide"]], "dot() (in module torch)": [[1105, "torch.dot"]], "dsplit() (in module torch)": [[1106, "torch.dsplit"]], "dstack() (in module torch)": [[1107, "torch.dstack"]], "einsum() (in module torch)": [[1108, "torch.einsum"]], "empty() (in module torch)": [[1109, "torch.empty"]], "empty_like() (in module torch)": [[1110, "torch.empty_like"]], "empty_strided() (in module torch)": [[1111, "torch.empty_strided"]], "enable_grad (class in torch)": [[1112, "torch.enable_grad"]], "eq() (in module torch)": [[1113, "torch.eq"]], "equal() (in module torch)": [[1114, "torch.equal"]], "erf() (in module torch)": [[1115, "torch.erf"]], "erfc() (in module torch)": [[1116, "torch.erfc"]], "erfinv() (in module torch)": [[1117, "torch.erfinv"]], "exp() (in module torch)": [[1118, "torch.exp"]], "exp2() (in module torch)": [[1119, "torch.exp2"]], "expm1() (in module torch)": [[1120, "torch.expm1"]], "eye() (in module torch)": [[1121, "torch.eye"]], "fake_quantize_per_channel_affine() (in module torch)": [[1122, "torch.fake_quantize_per_channel_affine"]], "fake_quantize_per_tensor_affine() (in module torch)": [[1123, "torch.fake_quantize_per_tensor_affine"]], "fft() (in module torch.fft)": [[1124, "torch.fft.fft"]], "fft2() (in module torch.fft)": [[1125, "torch.fft.fft2"]], "fftfreq() (in module torch.fft)": [[1126, "torch.fft.fftfreq"]], "fftn() (in module torch.fft)": [[1127, "torch.fft.fftn"]], "fftshift() (in module torch.fft)": [[1128, "torch.fft.fftshift"]], "hfft() (in module torch.fft)": [[1129, "torch.fft.hfft"]], "hfft2() (in module torch.fft)": [[1130, "torch.fft.hfft2"]], "hfftn() (in module torch.fft)": [[1131, "torch.fft.hfftn"]], "ifft() (in module torch.fft)": [[1132, "torch.fft.ifft"]], "ifft2() (in module torch.fft)": [[1133, "torch.fft.ifft2"]], "ifftn() (in module torch.fft)": [[1134, "torch.fft.ifftn"]], "ifftshift() (in module torch.fft)": [[1135, "torch.fft.ifftshift"]], "ihfft() (in module torch.fft)": [[1136, "torch.fft.ihfft"]], "ihfft2() (in module torch.fft)": [[1137, "torch.fft.ihfft2"]], "ihfftn() (in module torch.fft)": [[1138, "torch.fft.ihfftn"]], "irfft() (in module torch.fft)": [[1139, "torch.fft.irfft"]], "irfft2() (in module torch.fft)": [[1140, "torch.fft.irfft2"]], "irfftn() (in module torch.fft)": [[1141, "torch.fft.irfftn"]], "rfft() (in module torch.fft)": [[1142, "torch.fft.rfft"]], "rfft2() (in module torch.fft)": [[1143, "torch.fft.rfft2"]], "rfftfreq() (in module torch.fft)": [[1144, "torch.fft.rfftfreq"]], "rfftn() (in module torch.fft)": [[1145, "torch.fft.rfftn"]], "fix() (in module torch)": [[1146, "torch.fix"]], "flatten() (in module torch)": [[1147, "torch.flatten"]], "flip() (in module torch)": [[1148, "torch.flip"]], "fliplr() (in module torch)": [[1149, "torch.fliplr"]], "flipud() (in module torch)": [[1150, "torch.flipud"]], "float_power() (in module torch)": [[1151, "torch.float_power"]], "floor() (in module torch)": [[1152, "torch.floor"]], "floor_divide() (in module torch)": [[1153, "torch.floor_divide"]], "fmax() (in module torch)": [[1154, "torch.fmax"]], "fmin() (in module torch)": [[1155, "torch.fmin"]], "fmod() (in module torch)": [[1156, "torch.fmod"]], "frac() (in module torch)": [[1157, "torch.frac"]], "frexp() (in module torch)": [[1158, "torch.frexp"]], "from_dlpack() (in module torch)": [[1159, "torch.from_dlpack"]], "from_file() (in module torch)": [[1160, "torch.from_file"]], "from_numpy() (in module torch)": [[1161, "torch.from_numpy"]], "frombuffer() (in module torch)": [[1162, "torch.frombuffer"]], "full() (in module torch)": [[1163, "torch.full"]], "full_like() (in module torch)": [[1164, "torch.full_like"]], "functional_call() (in module torch.func)": [[1165, "torch.func.functional_call"]], "functionalize() (in module torch.func)": [[1166, "torch.func.functionalize"]], "grad() (in module torch.func)": [[1167, "torch.func.grad"]], "grad_and_value() (in module torch.func)": [[1168, "torch.func.grad_and_value"]], "hessian() (in module torch.func)": [[1169, "torch.func.hessian"]], "jacfwd() (in module torch.func)": [[1170, "torch.func.jacfwd"]], "jacrev() (in module torch.func)": [[1171, "torch.func.jacrev"]], "jvp() (in module torch.func)": [[1172, "torch.func.jvp"]], "linearize() (in module torch.func)": [[1173, "torch.func.linearize"]], "replace_all_batch_norm_modules_() (in module torch.func)": [[1174, "torch.func.replace_all_batch_norm_modules_"]], "stack_module_state() (in module torch.func)": [[1175, "torch.func.stack_module_state"]], "vjp() (in module torch.func)": [[1176, "torch.func.vjp"]], "vmap() (in module torch.func)": [[1177, "torch.func.vmap"]], "callmethodkey (class in torch.fx.experimental.symbolic_shapes)": [[1178, "torch.fx.experimental.symbolic_shapes.CallMethodKey"]], "get() (torch.fx.experimental.symbolic_shapes.callmethodkey method)": [[1178, "torch.fx.experimental.symbolic_shapes.CallMethodKey.get"]], "convertintkey (class in torch.fx.experimental.symbolic_shapes)": [[1179, "torch.fx.experimental.symbolic_shapes.ConvertIntKey"]], "get() (torch.fx.experimental.symbolic_shapes.convertintkey method)": [[1179, "torch.fx.experimental.symbolic_shapes.ConvertIntKey.get"]], "dimconstraints (class in torch.fx.experimental.symbolic_shapes)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints"]], "add() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.add"]], "add_equality() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.add_equality"]], "forced_specializations() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.forced_specializations"]], "prettify_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.prettify_results"]], "remove_redundant_dynamic_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.remove_redundant_dynamic_results"]], "rewrite_with_congruences() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.rewrite_with_congruences"]], "solve() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1180, "torch.fx.experimental.symbolic_shapes.DimConstraints.solve"]], "dimdynamic (class in torch.fx.experimental.symbolic_shapes)": [[1181, "torch.fx.experimental.symbolic_shapes.DimDynamic"]], "dividebykey (class in torch.fx.experimental.symbolic_shapes)": [[1182, "torch.fx.experimental.symbolic_shapes.DivideByKey"]], "get() (torch.fx.experimental.symbolic_shapes.dividebykey method)": [[1182, "torch.fx.experimental.symbolic_shapes.DivideByKey.get"]], "equalityconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1183, "torch.fx.experimental.symbolic_shapes.EqualityConstraint"]], "innertensorkey (class in torch.fx.experimental.symbolic_shapes)": [[1184, "torch.fx.experimental.symbolic_shapes.InnerTensorKey"]], "get() (torch.fx.experimental.symbolic_shapes.innertensorkey method)": [[1184, "torch.fx.experimental.symbolic_shapes.InnerTensorKey.get"]], "propagateunbackedsymints (class in torch.fx.experimental.symbolic_shapes)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts"]], "boxed_run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.boxed_run"]], "call_function() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_function"]], "call_method() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_method"]], "call_module() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_module"]], "fetch_args_kwargs_from_env() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_attr"]], "get_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.get_attr"]], "map_nodes_to_values() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.map_nodes_to_values"]], "output() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.output"]], "placeholder() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.placeholder"]], "run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run"]], "run_node() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1185, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run_node"]], "relaxedunspecconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1186, "torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint"]], "shapeenv (class in torch.fx.experimental.symbolic_shapes)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv"]], "add_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.add_var_to_val"]], "bind_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bind_symbols"]], "bound_sympy() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bound_sympy"]], "check_equal() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.check_equal"]], "cleanup() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.cleanup"]], "create_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbol"]], "create_symbolic_sizes_strides_storage_offset() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbolic_sizes_strides_storage_offset"]], "create_symboolnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symboolnode"]], "create_symfloatnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symfloatnode"]], "create_symintnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symintnode"]], "create_unbacked_symbool() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symbool"]], "create_unbacked_symfloat() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symfloat"]], "create_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symint"]], "create_unspecified_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symbol"]], "create_unspecified_symint_and_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symint_and_symbol"]], "defer_runtime_assert() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.defer_runtime_assert"]], "evaluate_expr() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_expr"]], "evaluate_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_expression"]], "evaluate_guards_for_args() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_for_args"]], "format_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.format_guards"]], "freeze() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze"]], "freeze_runtime_asserts() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze_runtime_asserts"]], "get_axioms() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_axioms"]], "get_implications() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_implications"]], "get_nontrivial_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_nontrivial_guards"]], "get_pruned_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_pruned_guards"]], "ignore_fresh_unbacked_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.ignore_fresh_unbacked_symbols"]], "is_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.is_unbacked_symint"]], "produce_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards"]], "produce_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards_expression"]], "replace() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.replace"]], "set_unbacked_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.set_unbacked_var_to_val"]], "simplify() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.simplify"]], "size_hint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.size_hint"]], "suppress_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1187, "torch.fx.experimental.symbolic_shapes.ShapeEnv.suppress_guards"]], "shapeenvsettings (class in torch.fx.experimental.symbolic_shapes)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnvSettings"]], "statefulsymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1189, "torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext"]], "statelesssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1190, "torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext"]], "strictminmaxconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1191, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint"]], "render() (torch.fx.experimental.symbolic_shapes.strictminmaxconstraint method)": [[1191, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint.render"]], "subclasssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1192, "torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext"]], "symboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1193, "torch.fx.experimental.symbolic_shapes.SymbolicContext"]], "canonicalize_bool_expr() (in module torch.fx.experimental.symbolic_shapes)": [[1194, "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr"]], "check_consistent() (in module torch.fx.experimental.symbolic_shapes)": [[1195, "torch.fx.experimental.symbolic_shapes.check_consistent"]], "compute_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1196, "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings"]], "constrain_range() (in module torch.fx.experimental.symbolic_shapes)": [[1197, "torch.fx.experimental.symbolic_shapes.constrain_range"]], "constrain_unify() (in module torch.fx.experimental.symbolic_shapes)": [[1198, "torch.fx.experimental.symbolic_shapes.constrain_unify"]], "definitely_false() (in module torch.fx.experimental.symbolic_shapes)": [[1199, "torch.fx.experimental.symbolic_shapes.definitely_false"]], "definitely_true() (in module torch.fx.experimental.symbolic_shapes)": [[1200, "torch.fx.experimental.symbolic_shapes.definitely_true"]], "guard_size_oblivious() (in module torch.fx.experimental.symbolic_shapes)": [[1201, "torch.fx.experimental.symbolic_shapes.guard_size_oblivious"]], "has_free_symbols() (in module torch.fx.experimental.symbolic_shapes)": [[1202, "torch.fx.experimental.symbolic_shapes.has_free_symbols"]], "hint_int() (in module torch.fx.experimental.symbolic_shapes)": [[1203, "torch.fx.experimental.symbolic_shapes.hint_int"]], "is_concrete_bool() (in module torch.fx.experimental.symbolic_shapes)": [[1204, "torch.fx.experimental.symbolic_shapes.is_concrete_bool"]], "is_concrete_int() (in module torch.fx.experimental.symbolic_shapes)": [[1205, "torch.fx.experimental.symbolic_shapes.is_concrete_int"]], "lru_cache() (in module torch.fx.experimental.symbolic_shapes)": [[1206, "torch.fx.experimental.symbolic_shapes.lru_cache"]], "parallel_and() (in module torch.fx.experimental.symbolic_shapes)": [[1207, "torch.fx.experimental.symbolic_shapes.parallel_and"]], "parallel_or() (in module torch.fx.experimental.symbolic_shapes)": [[1208, "torch.fx.experimental.symbolic_shapes.parallel_or"]], "rebind_unbacked() (in module torch.fx.experimental.symbolic_shapes)": [[1209, "torch.fx.experimental.symbolic_shapes.rebind_unbacked"]], "resolve_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1210, "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings"]], "statically_known_true() (in module torch.fx.experimental.symbolic_shapes)": [[1211, "torch.fx.experimental.symbolic_shapes.statically_known_true"]], "sym_eq() (in module torch.fx.experimental.symbolic_shapes)": [[1212, "torch.fx.experimental.symbolic_shapes.sym_eq"]], "gather() (in module torch)": [[1213, "torch.gather"]], "gcd() (in module torch)": [[1214, "torch.gcd"]], "ge() (in module torch)": [[1215, "torch.ge"]], "geqrf() (in module torch)": [[1216, "torch.geqrf"]], "ger() (in module torch)": [[1217, "torch.ger"]], "get_default_device() (in module torch)": [[1218, "torch.get_default_device"]], "get_default_dtype() (in module torch)": [[1219, "torch.get_default_dtype"]], "get_deterministic_debug_mode() (in module torch)": [[1220, "torch.get_deterministic_debug_mode"]], "get_device_module() (in module torch)": [[1221, "torch.get_device_module"]], "get_float32_matmul_precision() (in module torch)": [[1222, "torch.get_float32_matmul_precision"]], "get_num_interop_threads() (in module torch)": [[1223, "torch.get_num_interop_threads"]], "get_num_threads() (in module torch)": [[1224, "torch.get_num_threads"]], "get_rng_state() (in module torch)": [[1225, "torch.get_rng_state"]], "gradient() (in module torch)": [[1226, "torch.gradient"]], "greater() (in module torch)": [[1227, "torch.greater"]], "greater_equal() (in module torch)": [[1228, "torch.greater_equal"]], "gt() (in module torch)": [[1229, "torch.gt"]], "hamming_window() (in module torch)": [[1230, "torch.hamming_window"]], "hann_window() (in module torch)": [[1231, "torch.hann_window"]], "heaviside() (in module torch)": [[1232, "torch.heaviside"]], "histc() (in module torch)": [[1233, "torch.histc"]], "histogram() (in module torch)": [[1234, "torch.histogram"]], "histogramdd() (in module torch)": [[1235, "torch.histogramdd"]], "hsplit() (in module torch)": [[1236, "torch.hsplit"]], "hspmm() (in module torch)": [[1237, "torch.hspmm"]], "hstack() (in module torch)": [[1238, "torch.hstack"]], "hypot() (in module torch)": [[1239, "torch.hypot"]], "i0() (in module torch)": [[1240, "torch.i0"]], "igamma() (in module torch)": [[1241, "torch.igamma"]], "igammac() (in module torch)": [[1242, "torch.igammac"]], "imag() (in module torch)": [[1243, "torch.imag"]], "index_add() (in module torch)": [[1244, "torch.index_add"]], "index_copy() (in module torch)": [[1245, "torch.index_copy"]], "index_reduce() (in module torch)": [[1246, "torch.index_reduce"]], "index_select() (in module torch)": [[1247, "torch.index_select"]], "initial_seed() (in module torch)": [[1248, "torch.initial_seed"]], "inner() (in module torch)": [[1249, "torch.inner"]], "inverse() (in module torch)": [[1250, "torch.inverse"]], "is_complex() (in module torch)": [[1251, "torch.is_complex"]], "is_conj() (in module torch)": [[1252, "torch.is_conj"]], "is_deterministic_algorithms_warn_only_enabled() (in module torch)": [[1253, "torch.is_deterministic_algorithms_warn_only_enabled"]], "is_floating_point() (in module torch)": [[1254, "torch.is_floating_point"]], "is_grad_enabled() (in module torch)": [[1255, "torch.is_grad_enabled"]], "is_inference_mode_enabled() (in module torch)": [[1256, "torch.is_inference_mode_enabled"]], "is_nonzero() (in module torch)": [[1257, "torch.is_nonzero"]], "is_storage() (in module torch)": [[1258, "torch.is_storage"]], "is_tensor() (in module torch)": [[1259, "torch.is_tensor"]], "is_warn_always_enabled() (in module torch)": [[1260, "torch.is_warn_always_enabled"]], "isclose() (in module torch)": [[1261, "torch.isclose"]], "isfinite() (in module torch)": [[1262, "torch.isfinite"]], "isin() (in module torch)": [[1263, "torch.isin"]], "isinf() (in module torch)": [[1264, "torch.isinf"]], "isnan() (in module torch)": [[1265, "torch.isnan"]], "isneginf() (in module torch)": [[1266, "torch.isneginf"]], "isposinf() (in module torch)": [[1267, "torch.isposinf"]], "isreal() (in module torch)": [[1268, "torch.isreal"]], "istft() (in module torch)": [[1269, "torch.istft"]], "attribute (class in torch.jit)": [[1270, "torch.jit.Attribute"]], "count() (torch.jit.attribute method)": [[1270, "torch.jit.Attribute.count"]], "index() (torch.jit.attribute method)": [[1270, "torch.jit.Attribute.index"]], "type (torch.jit.attribute attribute)": [[1270, "torch.jit.Attribute.type"]], "value (torch.jit.attribute attribute)": [[1270, "torch.jit.Attribute.value"]], "scriptfunction (class in torch.jit)": [[1271, "torch.jit.ScriptFunction"]], "get_debug_state() (torch.jit.scriptfunction method)": [[1271, "torch.jit.ScriptFunction.get_debug_state"]], "save() (torch.jit.scriptfunction method)": [[1271, "torch.jit.ScriptFunction.save"]], "save_to_buffer() (torch.jit.scriptfunction method)": [[1271, "torch.jit.ScriptFunction.save_to_buffer"]], "scriptmodule (class in torch.jit)": [[1272, "torch.jit.ScriptModule"]], "add_module() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.add_module"]], "apply() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.apply"]], "bfloat16() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.bfloat16"]], "buffers() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.buffers"]], "children() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.children"]], "code (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.code"]], "code_with_constants (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.code_with_constants"]], "compile() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.compile"]], "cpu() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.cpu"]], "cuda() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.cuda"]], "double() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.double"]], "eval() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.eval"]], "extra_repr() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.extra_repr"]], "float() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.float"]], "get_buffer() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_buffer"]], "get_extra_state() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_extra_state"]], "get_parameter() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_parameter"]], "get_submodule() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.get_submodule"]], "graph (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.graph"]], "half() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.half"]], "inlined_graph (torch.jit.scriptmodule property)": [[1272, "torch.jit.ScriptModule.inlined_graph"]], "ipu() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.ipu"]], "load_state_dict() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.load_state_dict"]], "modules() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.modules"]], "named_buffers() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_buffers"]], "named_children() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_children"]], "named_modules() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_modules"]], "named_parameters() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.named_parameters"]], "parameters() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.parameters"]], "register_backward_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_backward_hook"]], "register_buffer() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_buffer"]], "register_forward_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_forward_hook"]], "register_forward_pre_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_forward_pre_hook"]], "register_full_backward_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_load_state_dict_post_hook"]], "register_module() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_module"]], "register_parameter() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_parameter"]], "register_state_dict_pre_hook() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.register_state_dict_pre_hook"]], "requires_grad_() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.requires_grad_"]], "save() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.save"]], "set_extra_state() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.set_extra_state"]], "share_memory() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.share_memory"]], "state_dict() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.state_dict"]], "to() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.to"]], "to_empty() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.to_empty"]], "train() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.train"]], "type() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.type"]], "xpu() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.xpu"]], "zero_grad() (torch.jit.scriptmodule method)": [[1272, "torch.jit.ScriptModule.zero_grad"]], "annotate() (in module torch.jit)": [[1273, "torch.jit.annotate"]], "enable_onednn_fusion() (in module torch.jit)": [[1274, "torch.jit.enable_onednn_fusion"]], "fork() (in module torch.jit)": [[1275, "torch.jit.fork"]], "freeze() (in module torch.jit)": [[1276, "torch.jit.freeze"]], "ignore() (in module torch.jit)": [[1277, "torch.jit.ignore"]], "interface() (in module torch.jit)": [[1278, "torch.jit.interface"]], "isinstance() (in module torch.jit)": [[1279, "torch.jit.isinstance"]], "load() (in module torch.jit)": [[1280, "torch.jit.load"]], "onednn_fusion_enabled() (in module torch.jit)": [[1281, "torch.jit.onednn_fusion_enabled"]], "optimize_for_inference() (in module torch.jit)": [[1282, "torch.jit.optimize_for_inference"]], "save() (in module torch.jit)": [[1283, "torch.jit.save"]], "script() (in module torch.jit)": [[1284, "torch.jit.script"]], "script_if_tracing() (in module torch.jit)": [[1285, "torch.jit.script_if_tracing"]], "set_fusion_strategy() (in module torch.jit)": [[1286, "torch.jit.set_fusion_strategy"]], "strict_fusion (class in torch.jit)": [[1287, "torch.jit.strict_fusion"]], "trace() (in module torch.jit)": [[1288, "torch.jit.trace"]], "trace_module() (in module torch.jit)": [[1289, "torch.jit.trace_module"]], "unused() (in module torch.jit)": [[1290, "torch.jit.unused"]], "wait() (in module torch.jit)": [[1291, "torch.jit.wait"]], "kaiser_window() (in module torch)": [[1292, "torch.kaiser_window"]], "kron() (in module torch)": [[1293, "torch.kron"]], "kthvalue() (in module torch)": [[1294, "torch.kthvalue"]], "lcm() (in module torch)": [[1295, "torch.lcm"]], "ldexp() (in module torch)": [[1296, "torch.ldexp"]], "le() (in module torch)": [[1297, "torch.le"]], "lerp() (in module torch)": [[1298, "torch.lerp"]], "less() (in module torch)": [[1299, "torch.less"]], "less_equal() (in module torch)": [[1300, "torch.less_equal"]], "lgamma() (in module torch)": [[1301, "torch.lgamma"]], "cholesky() (in module torch.linalg)": [[1302, "torch.linalg.cholesky"]], "cholesky_ex() (in module torch.linalg)": [[1303, "torch.linalg.cholesky_ex"]], "cond() (in module torch.linalg)": [[1304, "torch.linalg.cond"]], "cross() (in module torch.linalg)": [[1305, "torch.linalg.cross"]], "det() (in module torch.linalg)": [[1306, "torch.linalg.det"]], "diagonal() (in module torch.linalg)": [[1307, "torch.linalg.diagonal"]], "eig() (in module torch.linalg)": [[1308, "torch.linalg.eig"]], "eigh() (in module torch.linalg)": [[1309, "torch.linalg.eigh"]], "eigvals() (in module torch.linalg)": [[1310, "torch.linalg.eigvals"]], "eigvalsh() (in module torch.linalg)": [[1311, "torch.linalg.eigvalsh"]], "householder_product() (in module torch.linalg)": [[1312, "torch.linalg.householder_product"]], "inv() (in module torch.linalg)": [[1313, "torch.linalg.inv"]], "inv_ex() (in module torch.linalg)": [[1314, "torch.linalg.inv_ex"]], "ldl_factor() (in module torch.linalg)": [[1315, "torch.linalg.ldl_factor"]], "ldl_factor_ex() (in module torch.linalg)": [[1316, "torch.linalg.ldl_factor_ex"]], "ldl_solve() (in module torch.linalg)": [[1317, "torch.linalg.ldl_solve"]], "lstsq() (in module torch.linalg)": [[1318, "torch.linalg.lstsq"]], "lu() (in module torch.linalg)": [[1319, "torch.linalg.lu"]], "lu_factor() (in module torch.linalg)": [[1320, "torch.linalg.lu_factor"]], "lu_factor_ex() (in module torch.linalg)": [[1321, "torch.linalg.lu_factor_ex"]], "lu_solve() (in module torch.linalg)": [[1322, "torch.linalg.lu_solve"]], "matmul() (in module torch.linalg)": [[1323, "torch.linalg.matmul"]], "matrix_exp() (in module torch.linalg)": [[1324, "torch.linalg.matrix_exp"]], "matrix_norm() (in module torch.linalg)": [[1325, "torch.linalg.matrix_norm"]], "matrix_power() (in module torch.linalg)": [[1326, "torch.linalg.matrix_power"]], "matrix_rank() (in module torch.linalg)": [[1327, "torch.linalg.matrix_rank"]], "multi_dot() (in module torch.linalg)": [[1328, "torch.linalg.multi_dot"]], "norm() (in module torch.linalg)": [[1329, "torch.linalg.norm"]], "pinv() (in module torch.linalg)": [[1330, "torch.linalg.pinv"]], "qr() (in module torch.linalg)": [[1331, "torch.linalg.qr"]], "slogdet() (in module torch.linalg)": [[1332, "torch.linalg.slogdet"]], "solve() (in module torch.linalg)": [[1333, "torch.linalg.solve"]], "solve_ex() (in module torch.linalg)": [[1334, "torch.linalg.solve_ex"]], "solve_triangular() (in module torch.linalg)": [[1335, "torch.linalg.solve_triangular"]], "svd() (in module torch.linalg)": [[1336, "torch.linalg.svd"]], "svdvals() (in module torch.linalg)": [[1337, "torch.linalg.svdvals"]], "tensorinv() (in module torch.linalg)": [[1338, "torch.linalg.tensorinv"]], "tensorsolve() (in module torch.linalg)": [[1339, "torch.linalg.tensorsolve"]], "vander() (in module torch.linalg)": [[1340, "torch.linalg.vander"]], "vecdot() (in module torch.linalg)": [[1341, "torch.linalg.vecdot"]], "vector_norm() (in module torch.linalg)": [[1342, "torch.linalg.vector_norm"]], "linspace() (in module torch)": [[1343, "torch.linspace"]], "load() (in module torch)": [[1344, "torch.load"]], "lobpcg() (in module torch)": [[1345, "torch.lobpcg"]], "log() (in module torch)": [[1346, "torch.log"]], "log10() (in module torch)": [[1347, "torch.log10"]], "log1p() (in module torch)": [[1348, "torch.log1p"]], "log2() (in module torch)": [[1349, "torch.log2"]], "logaddexp() (in module torch)": [[1350, "torch.logaddexp"]], "logaddexp2() (in module torch)": [[1351, "torch.logaddexp2"]], "logcumsumexp() (in module torch)": [[1352, "torch.logcumsumexp"]], "logdet() (in module torch)": [[1353, "torch.logdet"]], "logical_and() (in module torch)": [[1354, "torch.logical_and"]], "logical_not() (in module torch)": [[1355, "torch.logical_not"]], "logical_or() (in module torch)": [[1356, "torch.logical_or"]], "logical_xor() (in module torch)": [[1357, "torch.logical_xor"]], "logit() (in module torch)": [[1358, "torch.logit"]], "logspace() (in module torch)": [[1359, "torch.logspace"]], "logsumexp() (in module torch)": [[1360, "torch.logsumexp"]], "lt() (in module torch)": [[1361, "torch.lt"]], "lu() (in module torch)": [[1362, "torch.lu"]], "lu_solve() (in module torch)": [[1363, "torch.lu_solve"]], "lu_unpack() (in module torch)": [[1364, "torch.lu_unpack"]], "manual_seed() (in module torch)": [[1365, "torch.manual_seed"]], "masked_select() (in module torch)": [[1366, "torch.masked_select"]], "matmul() (in module torch)": [[1367, "torch.matmul"]], "matrix_exp() (in module torch)": [[1368, "torch.matrix_exp"]], "matrix_power() (in module torch)": [[1369, "torch.matrix_power"]], "max() (in module torch)": [[1370, "torch.max"]], "maximum() (in module torch)": [[1371, "torch.maximum"]], "mean() (in module torch)": [[1372, "torch.mean"]], "median() (in module torch)": [[1373, "torch.median"]], "meshgrid() (in module torch)": [[1374, "torch.meshgrid"]], "min() (in module torch)": [[1375, "torch.min"]], "minimum() (in module torch)": [[1376, "torch.minimum"]], "mm() (in module torch)": [[1377, "torch.mm"]], "mode() (in module torch)": [[1378, "torch.mode"]], "moveaxis() (in module torch)": [[1379, "torch.moveaxis"]], "movedim() (in module torch)": [[1380, "torch.movedim"]], "current_allocated_memory() (in module torch.mps)": [[1381, "torch.mps.current_allocated_memory"]], "device_count() (in module torch.mps)": [[1382, "torch.mps.device_count"]], "driver_allocated_memory() (in module torch.mps)": [[1383, "torch.mps.driver_allocated_memory"]], "empty_cache() (in module torch.mps)": [[1384, "torch.mps.empty_cache"]], "event (class in torch.mps.event)": [[1385, "torch.mps.event.Event"]], "elapsed_time() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.elapsed_time"]], "query() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.query"]], "record() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.record"]], "synchronize() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.synchronize"]], "wait() (torch.mps.event.event method)": [[1385, "torch.mps.event.Event.wait"]], "get_rng_state() (in module torch.mps)": [[1386, "torch.mps.get_rng_state"]], "manual_seed() (in module torch.mps)": [[1387, "torch.mps.manual_seed"]], "profile() (in module torch.mps.profiler)": [[1388, "torch.mps.profiler.profile"]], "start() (in module torch.mps.profiler)": [[1389, "torch.mps.profiler.start"]], "stop() (in module torch.mps.profiler)": [[1390, "torch.mps.profiler.stop"]], "seed() (in module torch.mps)": [[1391, "torch.mps.seed"]], "set_per_process_memory_fraction() (in module torch.mps)": [[1392, "torch.mps.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.mps)": [[1393, "torch.mps.set_rng_state"]], "synchronize() (in module torch.mps)": [[1394, "torch.mps.synchronize"]], "msort() (in module torch)": [[1395, "torch.msort"]], "deferredmtiacallerror": [[1396, "torch.mtia.DeferredMtiaCallError"]], "event (class in torch.mtia)": [[1397, "torch.mtia.Event"]], "stream (class in torch.mtia)": [[1398, "torch.mtia.Stream"]], "streamcontext (class in torch.mtia)": [[1399, "torch.mtia.StreamContext"]], "current_device() (in module torch.mtia)": [[1400, "torch.mtia.current_device"]], "current_stream() (in module torch.mtia)": [[1401, "torch.mtia.current_stream"]], "default_stream() (in module torch.mtia)": [[1402, "torch.mtia.default_stream"]], "device (class in torch.mtia)": [[1403, "torch.mtia.device"]], "device_count() (in module torch.mtia)": [[1404, "torch.mtia.device_count"]], "init() (in module torch.mtia)": [[1405, "torch.mtia.init"]], "is_available() (in module torch.mtia)": [[1406, "torch.mtia.is_available"]], "is_initialized() (in module torch.mtia)": [[1407, "torch.mtia.is_initialized"]], "set_stream() (in module torch.mtia)": [[1408, "torch.mtia.set_stream"]], "stream() (in module torch.mtia)": [[1409, "torch.mtia.stream"]], "synchronize() (in module torch.mtia)": [[1410, "torch.mtia.synchronize"]], "mul() (in module torch)": [[1411, "torch.mul"]], "multinomial() (in module torch)": [[1412, "torch.multinomial"]], "multiply() (in module torch)": [[1413, "torch.multiply"]], "mv() (in module torch)": [[1414, "torch.mv"]], "mvlgamma() (in module torch)": [[1415, "torch.mvlgamma"]], "nan_to_num() (in module torch)": [[1416, "torch.nan_to_num"]], "nanmean() (in module torch)": [[1417, "torch.nanmean"]], "nanmedian() (in module torch)": [[1418, "torch.nanmedian"]], "nanquantile() (in module torch)": [[1419, "torch.nanquantile"]], "nansum() (in module torch)": [[1420, "torch.nansum"]], "narrow() (in module torch)": [[1421, "torch.narrow"]], "narrow_copy() (in module torch)": [[1422, "torch.narrow_copy"]], "ne() (in module torch)": [[1423, "torch.ne"]], "neg() (in module torch)": [[1424, "torch.neg"]], "negative() (in module torch)": [[1425, "torch.negative"]], "nextafter() (in module torch)": [[1426, "torch.nextafter"]], "adaptiveavgpool1d (class in torch.nn)": [[1427, "torch.nn.AdaptiveAvgPool1d"]], "adaptiveavgpool2d (class in torch.nn)": [[1428, "torch.nn.AdaptiveAvgPool2d"]], "adaptiveavgpool3d (class in torch.nn)": [[1429, "torch.nn.AdaptiveAvgPool3d"]], "adaptivelogsoftmaxwithloss (class in torch.nn)": [[1430, "torch.nn.AdaptiveLogSoftmaxWithLoss"]], "log_prob() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1430, "torch.nn.AdaptiveLogSoftmaxWithLoss.log_prob"]], "predict() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1430, "torch.nn.AdaptiveLogSoftmaxWithLoss.predict"]], "adaptivemaxpool1d (class in torch.nn)": [[1431, "torch.nn.AdaptiveMaxPool1d"]], "adaptivemaxpool2d (class in torch.nn)": [[1432, "torch.nn.AdaptiveMaxPool2d"]], "adaptivemaxpool3d (class in torch.nn)": [[1433, "torch.nn.AdaptiveMaxPool3d"]], "alphadropout (class in torch.nn)": [[1434, "torch.nn.AlphaDropout"]], "avgpool1d (class in torch.nn)": [[1435, "torch.nn.AvgPool1d"]], "avgpool2d (class in torch.nn)": [[1436, "torch.nn.AvgPool2d"]], "avgpool3d (class in torch.nn)": [[1437, "torch.nn.AvgPool3d"]], "bceloss (class in torch.nn)": [[1438, "torch.nn.BCELoss"]], "bcewithlogitsloss (class in torch.nn)": [[1439, "torch.nn.BCEWithLogitsLoss"]], "batchnorm1d (class in torch.nn)": [[1440, "torch.nn.BatchNorm1d"]], "batchnorm2d (class in torch.nn)": [[1441, "torch.nn.BatchNorm2d"]], "batchnorm3d (class in torch.nn)": [[1442, "torch.nn.BatchNorm3d"]], "bilinear (class in torch.nn)": [[1443, "torch.nn.Bilinear"]], "celu (class in torch.nn)": [[1444, "torch.nn.CELU"]], "ctcloss (class in torch.nn)": [[1445, "torch.nn.CTCLoss"]], "channelshuffle (class in torch.nn)": [[1446, "torch.nn.ChannelShuffle"]], "circularpad1d (class in torch.nn)": [[1447, "torch.nn.CircularPad1d"]], "circularpad2d (class in torch.nn)": [[1448, "torch.nn.CircularPad2d"]], "circularpad3d (class in torch.nn)": [[1449, "torch.nn.CircularPad3d"]], "constantpad1d (class in torch.nn)": [[1450, "torch.nn.ConstantPad1d"]], "constantpad2d (class in torch.nn)": [[1451, "torch.nn.ConstantPad2d"]], "constantpad3d (class in torch.nn)": [[1452, "torch.nn.ConstantPad3d"]], "conv1d (class in torch.nn)": [[1453, "torch.nn.Conv1d"]], "conv2d (class in torch.nn)": [[1454, "torch.nn.Conv2d"]], "conv3d (class in torch.nn)": [[1455, "torch.nn.Conv3d"]], "convtranspose1d (class in torch.nn)": [[1456, "torch.nn.ConvTranspose1d"]], "convtranspose2d (class in torch.nn)": [[1457, "torch.nn.ConvTranspose2d"]], "convtranspose3d (class in torch.nn)": [[1458, "torch.nn.ConvTranspose3d"]], "cosineembeddingloss (class in torch.nn)": [[1459, "torch.nn.CosineEmbeddingLoss"]], "cosinesimilarity (class in torch.nn)": [[1460, "torch.nn.CosineSimilarity"]], "crossentropyloss (class in torch.nn)": [[1461, "torch.nn.CrossEntropyLoss"]], "dataparallel (class in torch.nn)": [[1462, "torch.nn.DataParallel"]], "dropout (class in torch.nn)": [[1463, "torch.nn.Dropout"]], "dropout1d (class in torch.nn)": [[1464, "torch.nn.Dropout1d"]], "dropout2d (class in torch.nn)": [[1465, "torch.nn.Dropout2d"]], "dropout3d (class in torch.nn)": [[1466, "torch.nn.Dropout3d"]], "elu (class in torch.nn)": [[1467, "torch.nn.ELU"]], "embedding (class in torch.nn)": [[1468, "torch.nn.Embedding"]], "from_pretrained() (torch.nn.embedding class method)": [[1468, "torch.nn.Embedding.from_pretrained"]], "embeddingbag (class in torch.nn)": [[1469, "torch.nn.EmbeddingBag"]], "forward() (torch.nn.embeddingbag method)": [[1469, "torch.nn.EmbeddingBag.forward"]], "from_pretrained() (torch.nn.embeddingbag class method)": [[1469, "torch.nn.EmbeddingBag.from_pretrained"]], "featurealphadropout (class in torch.nn)": [[1470, "torch.nn.FeatureAlphaDropout"]], "flatten (class in torch.nn)": [[1471, "torch.nn.Flatten"]], "fold (class in torch.nn)": [[1472, "torch.nn.Fold"]], "fractionalmaxpool2d (class in torch.nn)": [[1473, "torch.nn.FractionalMaxPool2d"]], "fractionalmaxpool3d (class in torch.nn)": [[1474, "torch.nn.FractionalMaxPool3d"]], "gelu (class in torch.nn)": [[1475, "torch.nn.GELU"]], "glu (class in torch.nn)": [[1476, "torch.nn.GLU"]], "gru (class in torch.nn)": [[1477, "torch.nn.GRU"]], "grucell (class in torch.nn)": [[1478, "torch.nn.GRUCell"]], "gaussiannllloss (class in torch.nn)": [[1479, "torch.nn.GaussianNLLLoss"]], "groupnorm (class in torch.nn)": [[1480, "torch.nn.GroupNorm"]], "hardshrink (class in torch.nn)": [[1481, "torch.nn.Hardshrink"]], "hardsigmoid (class in torch.nn)": [[1482, "torch.nn.Hardsigmoid"]], "hardswish (class in torch.nn)": [[1483, "torch.nn.Hardswish"]], "hardtanh (class in torch.nn)": [[1484, "torch.nn.Hardtanh"]], "hingeembeddingloss (class in torch.nn)": [[1485, "torch.nn.HingeEmbeddingLoss"]], "huberloss (class in torch.nn)": [[1486, "torch.nn.HuberLoss"]], "identity (class in torch.nn)": [[1487, "torch.nn.Identity"]], "instancenorm1d (class in torch.nn)": [[1488, "torch.nn.InstanceNorm1d"]], "instancenorm2d (class in torch.nn)": [[1489, "torch.nn.InstanceNorm2d"]], "instancenorm3d (class in torch.nn)": [[1490, "torch.nn.InstanceNorm3d"]], "kldivloss (class in torch.nn)": [[1491, "torch.nn.KLDivLoss"]], "l1loss (class in torch.nn)": [[1492, "torch.nn.L1Loss"]], "lppool1d (class in torch.nn)": [[1493, "torch.nn.LPPool1d"]], "lppool2d (class in torch.nn)": [[1494, "torch.nn.LPPool2d"]], "lppool3d (class in torch.nn)": [[1495, "torch.nn.LPPool3d"]], "lstm (class in torch.nn)": [[1496, "torch.nn.LSTM"]], "lstmcell (class in torch.nn)": [[1497, "torch.nn.LSTMCell"]], "layernorm (class in torch.nn)": [[1498, "torch.nn.LayerNorm"]], "lazybatchnorm1d (class in torch.nn)": [[1499, "torch.nn.LazyBatchNorm1d"]], "cls_to_become (torch.nn.lazybatchnorm1d attribute)": [[1499, "torch.nn.LazyBatchNorm1d.cls_to_become"]], "lazybatchnorm2d (class in torch.nn)": [[1500, "torch.nn.LazyBatchNorm2d"]], "cls_to_become (torch.nn.lazybatchnorm2d attribute)": [[1500, "torch.nn.LazyBatchNorm2d.cls_to_become"]], "lazybatchnorm3d (class in torch.nn)": [[1501, "torch.nn.LazyBatchNorm3d"]], "cls_to_become (torch.nn.lazybatchnorm3d attribute)": [[1501, "torch.nn.LazyBatchNorm3d.cls_to_become"]], "lazyconv1d (class in torch.nn)": [[1502, "torch.nn.LazyConv1d"]], "cls_to_become (torch.nn.lazyconv1d attribute)": [[1502, "torch.nn.LazyConv1d.cls_to_become"]], "lazyconv2d (class in torch.nn)": [[1503, "torch.nn.LazyConv2d"]], "cls_to_become (torch.nn.lazyconv2d attribute)": [[1503, "torch.nn.LazyConv2d.cls_to_become"]], "lazyconv3d (class in torch.nn)": [[1504, "torch.nn.LazyConv3d"]], "cls_to_become (torch.nn.lazyconv3d attribute)": [[1504, "torch.nn.LazyConv3d.cls_to_become"]], "lazyconvtranspose1d (class in torch.nn)": [[1505, "torch.nn.LazyConvTranspose1d"]], "cls_to_become (torch.nn.lazyconvtranspose1d attribute)": [[1505, "torch.nn.LazyConvTranspose1d.cls_to_become"]], "lazyconvtranspose2d (class in torch.nn)": [[1506, "torch.nn.LazyConvTranspose2d"]], "cls_to_become (torch.nn.lazyconvtranspose2d attribute)": [[1506, "torch.nn.LazyConvTranspose2d.cls_to_become"]], "lazyconvtranspose3d (class in torch.nn)": [[1507, "torch.nn.LazyConvTranspose3d"]], "cls_to_become (torch.nn.lazyconvtranspose3d attribute)": [[1507, "torch.nn.LazyConvTranspose3d.cls_to_become"]], "lazyinstancenorm1d (class in torch.nn)": [[1508, "torch.nn.LazyInstanceNorm1d"]], "cls_to_become (torch.nn.lazyinstancenorm1d attribute)": [[1508, "torch.nn.LazyInstanceNorm1d.cls_to_become"]], "lazyinstancenorm2d (class in torch.nn)": [[1509, "torch.nn.LazyInstanceNorm2d"]], "cls_to_become (torch.nn.lazyinstancenorm2d attribute)": [[1509, "torch.nn.LazyInstanceNorm2d.cls_to_become"]], "lazyinstancenorm3d (class in torch.nn)": [[1510, "torch.nn.LazyInstanceNorm3d"]], "cls_to_become (torch.nn.lazyinstancenorm3d attribute)": [[1510, "torch.nn.LazyInstanceNorm3d.cls_to_become"]], "lazylinear (class in torch.nn)": [[1511, "torch.nn.LazyLinear"]], "cls_to_become (torch.nn.lazylinear attribute)": [[1511, "torch.nn.LazyLinear.cls_to_become"]], "leakyrelu (class in torch.nn)": [[1512, "torch.nn.LeakyReLU"]], "linear (class in torch.nn)": [[1513, "torch.nn.Linear"]], "localresponsenorm (class in torch.nn)": [[1514, "torch.nn.LocalResponseNorm"]], "logsigmoid (class in torch.nn)": [[1515, "torch.nn.LogSigmoid"]], "logsoftmax (class in torch.nn)": [[1516, "torch.nn.LogSoftmax"]], "mseloss (class in torch.nn)": [[1517, "torch.nn.MSELoss"]], "marginrankingloss (class in torch.nn)": [[1518, "torch.nn.MarginRankingLoss"]], "maxpool1d (class in torch.nn)": [[1519, "torch.nn.MaxPool1d"]], "maxpool2d (class in torch.nn)": [[1520, "torch.nn.MaxPool2d"]], "maxpool3d (class in torch.nn)": [[1521, "torch.nn.MaxPool3d"]], "maxunpool1d (class in torch.nn)": [[1522, "torch.nn.MaxUnpool1d"]], "maxunpool2d (class in torch.nn)": [[1523, "torch.nn.MaxUnpool2d"]], "maxunpool3d (class in torch.nn)": [[1524, "torch.nn.MaxUnpool3d"]], "mish (class in torch.nn)": [[1525, "torch.nn.Mish"]], "module (class in torch.nn)": [[1526, "torch.nn.Module"]], "add_module() (torch.nn.module method)": [[1526, "torch.nn.Module.add_module"]], "apply() (torch.nn.module method)": [[1526, "torch.nn.Module.apply"]], "bfloat16() (torch.nn.module method)": [[1526, "torch.nn.Module.bfloat16"]], "buffers() (torch.nn.module method)": [[1526, "torch.nn.Module.buffers"]], "children() (torch.nn.module method)": [[1526, "torch.nn.Module.children"]], "compile() (torch.nn.module method)": [[1526, "torch.nn.Module.compile"]], "cpu() (torch.nn.module method)": [[1526, "torch.nn.Module.cpu"]], "cuda() (torch.nn.module method)": [[1526, "torch.nn.Module.cuda"]], "double() (torch.nn.module method)": [[1526, "torch.nn.Module.double"]], "eval() (torch.nn.module method)": [[1526, "torch.nn.Module.eval"]], "extra_repr() (torch.nn.module method)": [[1526, "torch.nn.Module.extra_repr"]], "float() (torch.nn.module method)": [[1526, "torch.nn.Module.float"]], "forward() (torch.nn.module method)": [[1526, "torch.nn.Module.forward"]], "get_buffer() (torch.nn.module method)": [[1526, "torch.nn.Module.get_buffer"]], "get_extra_state() (torch.nn.module method)": [[1526, "torch.nn.Module.get_extra_state"]], "get_parameter() (torch.nn.module method)": [[1526, "torch.nn.Module.get_parameter"]], "get_submodule() (torch.nn.module method)": [[1526, "torch.nn.Module.get_submodule"]], "half() (torch.nn.module method)": [[1526, "torch.nn.Module.half"]], "ipu() (torch.nn.module method)": [[1526, "torch.nn.Module.ipu"]], "load_state_dict() (torch.nn.module method)": [[1526, "torch.nn.Module.load_state_dict"]], "modules() (torch.nn.module method)": [[1526, "torch.nn.Module.modules"]], "named_buffers() (torch.nn.module method)": [[1526, "torch.nn.Module.named_buffers"]], "named_children() (torch.nn.module method)": [[1526, "torch.nn.Module.named_children"]], "named_modules() (torch.nn.module method)": [[1526, "torch.nn.Module.named_modules"]], "named_parameters() (torch.nn.module method)": [[1526, "torch.nn.Module.named_parameters"]], "parameters() (torch.nn.module method)": [[1526, "torch.nn.Module.parameters"]], "register_backward_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_backward_hook"]], "register_buffer() (torch.nn.module method)": [[1526, "torch.nn.Module.register_buffer"]], "register_forward_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_forward_hook"]], "register_forward_pre_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_forward_pre_hook"]], "register_full_backward_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_load_state_dict_post_hook"]], "register_module() (torch.nn.module method)": [[1526, "torch.nn.Module.register_module"]], "register_parameter() (torch.nn.module method)": [[1526, "torch.nn.Module.register_parameter"]], "register_state_dict_pre_hook() (torch.nn.module method)": [[1526, "torch.nn.Module.register_state_dict_pre_hook"]], "requires_grad_() (torch.nn.module method)": [[1526, "torch.nn.Module.requires_grad_"]], "set_extra_state() (torch.nn.module method)": [[1526, "torch.nn.Module.set_extra_state"]], "share_memory() (torch.nn.module method)": [[1526, "torch.nn.Module.share_memory"]], "state_dict() (torch.nn.module method)": [[1526, "torch.nn.Module.state_dict"]], "to() (torch.nn.module method)": [[1526, "torch.nn.Module.to"]], "to_empty() (torch.nn.module method)": [[1526, "torch.nn.Module.to_empty"]], "train() (torch.nn.module method)": [[1526, "torch.nn.Module.train"]], "type() (torch.nn.module method)": [[1526, "torch.nn.Module.type"]], "xpu() (torch.nn.module method)": [[1526, "torch.nn.Module.xpu"]], "zero_grad() (torch.nn.module method)": [[1526, "torch.nn.Module.zero_grad"]], "moduledict (class in torch.nn)": [[1527, "torch.nn.ModuleDict"]], "clear() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.clear"]], "items() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.items"]], "keys() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.keys"]], "pop() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.pop"]], "update() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.update"]], "values() (torch.nn.moduledict method)": [[1527, "torch.nn.ModuleDict.values"]], "modulelist (class in torch.nn)": [[1528, "torch.nn.ModuleList"]], "append() (torch.nn.modulelist method)": [[1528, "torch.nn.ModuleList.append"]], "extend() (torch.nn.modulelist method)": [[1528, "torch.nn.ModuleList.extend"]], "insert() (torch.nn.modulelist method)": [[1528, "torch.nn.ModuleList.insert"]], "multilabelmarginloss (class in torch.nn)": [[1529, "torch.nn.MultiLabelMarginLoss"]], "multilabelsoftmarginloss (class in torch.nn)": [[1530, "torch.nn.MultiLabelSoftMarginLoss"]], "multimarginloss (class in torch.nn)": [[1531, "torch.nn.MultiMarginLoss"]], "multiheadattention (class in torch.nn)": [[1532, "torch.nn.MultiheadAttention"]], "forward() (torch.nn.multiheadattention method)": [[1532, "torch.nn.MultiheadAttention.forward"]], "merge_masks() (torch.nn.multiheadattention method)": [[1532, "torch.nn.MultiheadAttention.merge_masks"]], "nllloss (class in torch.nn)": [[1533, "torch.nn.NLLLoss"]], "prelu (class in torch.nn)": [[1534, "torch.nn.PReLU"]], "pairwisedistance (class in torch.nn)": [[1535, "torch.nn.PairwiseDistance"]], "parameterdict (class in torch.nn)": [[1536, "torch.nn.ParameterDict"]], "clear() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.clear"]], "copy() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.copy"]], "fromkeys() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.fromkeys"]], "get() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.get"]], "items() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.items"]], "keys() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.keys"]], "pop() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.pop"]], "popitem() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.popitem"]], "setdefault() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.setdefault"]], "update() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.update"]], "values() (torch.nn.parameterdict method)": [[1536, "torch.nn.ParameterDict.values"]], "parameterlist (class in torch.nn)": [[1537, "torch.nn.ParameterList"]], "append() (torch.nn.parameterlist method)": [[1537, "torch.nn.ParameterList.append"]], "extend() (torch.nn.parameterlist method)": [[1537, "torch.nn.ParameterList.extend"]], "pixelshuffle (class in torch.nn)": [[1538, "torch.nn.PixelShuffle"]], "pixelunshuffle (class in torch.nn)": [[1539, "torch.nn.PixelUnshuffle"]], "poissonnllloss (class in torch.nn)": [[1540, "torch.nn.PoissonNLLLoss"]], "rmsnorm (class in torch.nn)": [[1541, "torch.nn.RMSNorm"]], "extra_repr() (torch.nn.rmsnorm method)": [[1541, "torch.nn.RMSNorm.extra_repr"]], "forward() (torch.nn.rmsnorm method)": [[1541, "torch.nn.RMSNorm.forward"]], "reset_parameters() (torch.nn.rmsnorm method)": [[1541, "torch.nn.RMSNorm.reset_parameters"]], "rnn (class in torch.nn)": [[1542, "torch.nn.RNN"]], "rnnbase (class in torch.nn)": [[1543, "torch.nn.RNNBase"]], "flatten_parameters() (torch.nn.rnnbase method)": [[1543, "torch.nn.RNNBase.flatten_parameters"]], "rnncell (class in torch.nn)": [[1544, "torch.nn.RNNCell"]], "rrelu (class in torch.nn)": [[1545, "torch.nn.RReLU"]], "relu (class in torch.nn)": [[1546, "torch.nn.ReLU"]], "relu6 (class in torch.nn)": [[1547, "torch.nn.ReLU6"]], "reflectionpad1d (class in torch.nn)": [[1548, "torch.nn.ReflectionPad1d"]], "reflectionpad2d (class in torch.nn)": [[1549, "torch.nn.ReflectionPad2d"]], "reflectionpad3d (class in torch.nn)": [[1550, "torch.nn.ReflectionPad3d"]], "replicationpad1d (class in torch.nn)": [[1551, "torch.nn.ReplicationPad1d"]], "replicationpad2d (class in torch.nn)": [[1552, "torch.nn.ReplicationPad2d"]], "replicationpad3d (class in torch.nn)": [[1553, "torch.nn.ReplicationPad3d"]], "selu (class in torch.nn)": [[1554, "torch.nn.SELU"]], "sequential (class in torch.nn)": [[1555, "torch.nn.Sequential"]], "append() (torch.nn.sequential method)": [[1555, "torch.nn.Sequential.append"]], "silu (class in torch.nn)": [[1556, "torch.nn.SiLU"]], "sigmoid (class in torch.nn)": [[1557, "torch.nn.Sigmoid"]], "smoothl1loss (class in torch.nn)": [[1558, "torch.nn.SmoothL1Loss"]], "softmarginloss (class in torch.nn)": [[1559, "torch.nn.SoftMarginLoss"]], "softmax (class in torch.nn)": [[1560, "torch.nn.Softmax"]], "softmax2d (class in torch.nn)": [[1561, "torch.nn.Softmax2d"]], "softmin (class in torch.nn)": [[1562, "torch.nn.Softmin"]], "softplus (class in torch.nn)": [[1563, "torch.nn.Softplus"]], "softshrink (class in torch.nn)": [[1564, "torch.nn.Softshrink"]], "softsign (class in torch.nn)": [[1565, "torch.nn.Softsign"]], "syncbatchnorm (class in torch.nn)": [[1566, "torch.nn.SyncBatchNorm"]], "convert_sync_batchnorm() (torch.nn.syncbatchnorm class method)": [[1566, "torch.nn.SyncBatchNorm.convert_sync_batchnorm"]], "tanh (class in torch.nn)": [[1567, "torch.nn.Tanh"]], "tanhshrink (class in torch.nn)": [[1568, "torch.nn.Tanhshrink"]], "threshold (class in torch.nn)": [[1569, "torch.nn.Threshold"]], "transformer (class in torch.nn)": [[1570, "torch.nn.Transformer"]], "forward() (torch.nn.transformer method)": [[1570, "torch.nn.Transformer.forward"]], "generate_square_subsequent_mask() (torch.nn.transformer static method)": [[1570, "torch.nn.Transformer.generate_square_subsequent_mask"]], "transformerdecoder (class in torch.nn)": [[1571, "torch.nn.TransformerDecoder"]], "forward() (torch.nn.transformerdecoder method)": [[1571, "torch.nn.TransformerDecoder.forward"]], "transformerdecoderlayer (class in torch.nn)": [[1572, "torch.nn.TransformerDecoderLayer"]], "forward() (torch.nn.transformerdecoderlayer method)": [[1572, "torch.nn.TransformerDecoderLayer.forward"]], "transformerencoder (class in torch.nn)": [[1573, "torch.nn.TransformerEncoder"]], "forward() (torch.nn.transformerencoder method)": [[1573, "torch.nn.TransformerEncoder.forward"]], "transformerencoderlayer (class in torch.nn)": [[1574, "torch.nn.TransformerEncoderLayer"]], "forward() (torch.nn.transformerencoderlayer method)": [[1574, "torch.nn.TransformerEncoderLayer.forward"]], "tripletmarginloss (class in torch.nn)": [[1575, "torch.nn.TripletMarginLoss"]], "tripletmarginwithdistanceloss (class in torch.nn)": [[1576, "torch.nn.TripletMarginWithDistanceLoss"]], "unflatten (class in torch.nn)": [[1577, "torch.nn.Unflatten"]], "unfold (class in torch.nn)": [[1578, "torch.nn.Unfold"]], "upsample (class in torch.nn)": [[1579, "torch.nn.Upsample"]], "upsamplingbilinear2d (class in torch.nn)": [[1580, "torch.nn.UpsamplingBilinear2d"]], "upsamplingnearest2d (class in torch.nn)": [[1581, "torch.nn.UpsamplingNearest2d"]], "zeropad1d (class in torch.nn)": [[1582, "torch.nn.ZeroPad1d"]], "zeropad2d (class in torch.nn)": [[1583, "torch.nn.ZeroPad2d"]], "zeropad3d (class in torch.nn)": [[1584, "torch.nn.ZeroPad3d"]], "sdpbackend (class in torch.nn.attention)": [[1585, "torch.nn.attention.SDPBackend"]], "name (torch.nn.attention.sdpbackend property)": [[1585, "torch.nn.attention.SDPBackend.name"]], "causalbias (class in torch.nn.attention.bias)": [[1586, "torch.nn.attention.bias.CausalBias"]], "causalvariant (class in torch.nn.attention.bias)": [[1587, "torch.nn.attention.bias.CausalVariant"]], "causal_lower_right() (in module torch.nn.attention.bias)": [[1588, "torch.nn.attention.bias.causal_lower_right"]], "causal_upper_left() (in module torch.nn.attention.bias)": [[1589, "torch.nn.attention.bias.causal_upper_left"]], "sdpa_kernel() (in module torch.nn.attention)": [[1590, "torch.nn.attention.sdpa_kernel"]], "adaptive_avg_pool1d() (in module torch.nn.functional)": [[1591, "torch.nn.functional.adaptive_avg_pool1d"]], "adaptive_avg_pool2d() (in module torch.nn.functional)": [[1592, "torch.nn.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d() (in module torch.nn.functional)": [[1593, "torch.nn.functional.adaptive_avg_pool3d"]], "adaptive_max_pool1d() (in module torch.nn.functional)": [[1594, "torch.nn.functional.adaptive_max_pool1d"]], "adaptive_max_pool2d() (in module torch.nn.functional)": [[1595, "torch.nn.functional.adaptive_max_pool2d"]], "adaptive_max_pool3d() (in module torch.nn.functional)": [[1596, "torch.nn.functional.adaptive_max_pool3d"]], "affine_grid() (in module torch.nn.functional)": [[1597, "torch.nn.functional.affine_grid"]], "alpha_dropout() (in module torch.nn.functional)": [[1598, "torch.nn.functional.alpha_dropout"]], "avg_pool1d() (in module torch.nn.functional)": [[1599, "torch.nn.functional.avg_pool1d"]], "avg_pool2d() (in module torch.nn.functional)": [[1600, "torch.nn.functional.avg_pool2d"]], "avg_pool3d() (in module torch.nn.functional)": [[1601, "torch.nn.functional.avg_pool3d"]], "batch_norm() (in module torch.nn.functional)": [[1602, "torch.nn.functional.batch_norm"]], "bilinear() (in module torch.nn.functional)": [[1603, "torch.nn.functional.bilinear"]], "binary_cross_entropy() (in module torch.nn.functional)": [[1604, "torch.nn.functional.binary_cross_entropy"]], "binary_cross_entropy_with_logits() (in module torch.nn.functional)": [[1605, "torch.nn.functional.binary_cross_entropy_with_logits"]], "celu() (in module torch.nn.functional)": [[1606, "torch.nn.functional.celu"]], "conv1d() (in module torch.nn.functional)": [[1607, "torch.nn.functional.conv1d"]], "conv2d() (in module torch.nn.functional)": [[1608, "torch.nn.functional.conv2d"]], "conv3d() (in module torch.nn.functional)": [[1609, "torch.nn.functional.conv3d"]], "conv_transpose1d() (in module torch.nn.functional)": [[1610, "torch.nn.functional.conv_transpose1d"]], "conv_transpose2d() (in module torch.nn.functional)": [[1611, "torch.nn.functional.conv_transpose2d"]], "conv_transpose3d() (in module torch.nn.functional)": [[1612, "torch.nn.functional.conv_transpose3d"]], "cosine_embedding_loss() (in module torch.nn.functional)": [[1613, "torch.nn.functional.cosine_embedding_loss"]], "cosine_similarity() (in module torch.nn.functional)": [[1614, "torch.nn.functional.cosine_similarity"]], "cross_entropy() (in module torch.nn.functional)": [[1615, "torch.nn.functional.cross_entropy"]], "ctc_loss() (in module torch.nn.functional)": [[1616, "torch.nn.functional.ctc_loss"]], "dropout() (in module torch.nn.functional)": [[1617, "torch.nn.functional.dropout"]], "dropout1d() (in module torch.nn.functional)": [[1618, "torch.nn.functional.dropout1d"]], "dropout2d() (in module torch.nn.functional)": [[1619, "torch.nn.functional.dropout2d"]], "dropout3d() (in module torch.nn.functional)": [[1620, "torch.nn.functional.dropout3d"]], "elu() (in module torch.nn.functional)": [[1621, "torch.nn.functional.elu"]], "elu_() (in module torch.nn.functional)": [[1622, "torch.nn.functional.elu_"]], "embedding() (in module torch.nn.functional)": [[1623, "torch.nn.functional.embedding"]], "embedding_bag() (in module torch.nn.functional)": [[1624, "torch.nn.functional.embedding_bag"]], "feature_alpha_dropout() (in module torch.nn.functional)": [[1625, "torch.nn.functional.feature_alpha_dropout"]], "fold() (in module torch.nn.functional)": [[1626, "torch.nn.functional.fold"]], "fractional_max_pool2d() (in module torch.nn.functional)": [[1627, "torch.nn.functional.fractional_max_pool2d"]], "fractional_max_pool3d() (in module torch.nn.functional)": [[1628, "torch.nn.functional.fractional_max_pool3d"]], "gaussian_nll_loss() (in module torch.nn.functional)": [[1629, "torch.nn.functional.gaussian_nll_loss"]], "gelu() (in module torch.nn.functional)": [[1630, "torch.nn.functional.gelu"]], "glu() (in module torch.nn.functional)": [[1631, "torch.nn.functional.glu"]], "grid_sample() (in module torch.nn.functional)": [[1632, "torch.nn.functional.grid_sample"]], "group_norm() (in module torch.nn.functional)": [[1633, "torch.nn.functional.group_norm"]], "gumbel_softmax() (in module torch.nn.functional)": [[1634, "torch.nn.functional.gumbel_softmax"]], "hardshrink() (in module torch.nn.functional)": [[1635, "torch.nn.functional.hardshrink"]], "hardsigmoid() (in module torch.nn.functional)": [[1636, "torch.nn.functional.hardsigmoid"]], "hardswish() (in module torch.nn.functional)": [[1637, "torch.nn.functional.hardswish"]], "hardtanh() (in module torch.nn.functional)": [[1638, "torch.nn.functional.hardtanh"]], "hardtanh_() (in module torch.nn.functional)": [[1639, "torch.nn.functional.hardtanh_"]], "hinge_embedding_loss() (in module torch.nn.functional)": [[1640, "torch.nn.functional.hinge_embedding_loss"]], "huber_loss() (in module torch.nn.functional)": [[1641, "torch.nn.functional.huber_loss"]], "instance_norm() (in module torch.nn.functional)": [[1642, "torch.nn.functional.instance_norm"]], "interpolate() (in module torch.nn.functional)": [[1643, "torch.nn.functional.interpolate"]], "kl_div() (in module torch.nn.functional)": [[1644, "torch.nn.functional.kl_div"]], "l1_loss() (in module torch.nn.functional)": [[1645, "torch.nn.functional.l1_loss"]], "layer_norm() (in module torch.nn.functional)": [[1646, "torch.nn.functional.layer_norm"]], "leaky_relu() (in module torch.nn.functional)": [[1647, "torch.nn.functional.leaky_relu"]], "leaky_relu_() (in module torch.nn.functional)": [[1648, "torch.nn.functional.leaky_relu_"]], "linear() (in module torch.nn.functional)": [[1649, "torch.nn.functional.linear"]], "local_response_norm() (in module torch.nn.functional)": [[1650, "torch.nn.functional.local_response_norm"]], "log_softmax() (in module torch.nn.functional)": [[1651, "torch.nn.functional.log_softmax"]], "logsigmoid() (in module torch.nn.functional)": [[1652, "torch.nn.functional.logsigmoid"]], "lp_pool1d() (in module torch.nn.functional)": [[1653, "torch.nn.functional.lp_pool1d"]], "lp_pool2d() (in module torch.nn.functional)": [[1654, "torch.nn.functional.lp_pool2d"]], "lp_pool3d() (in module torch.nn.functional)": [[1655, "torch.nn.functional.lp_pool3d"]], "margin_ranking_loss() (in module torch.nn.functional)": [[1656, "torch.nn.functional.margin_ranking_loss"]], "max_pool1d() (in module torch.nn.functional)": [[1657, "torch.nn.functional.max_pool1d"]], "max_pool2d() (in module torch.nn.functional)": [[1658, "torch.nn.functional.max_pool2d"]], "max_pool3d() (in module torch.nn.functional)": [[1659, "torch.nn.functional.max_pool3d"]], "max_unpool1d() (in module torch.nn.functional)": [[1660, "torch.nn.functional.max_unpool1d"]], "max_unpool2d() (in module torch.nn.functional)": [[1661, "torch.nn.functional.max_unpool2d"]], "max_unpool3d() (in module torch.nn.functional)": [[1662, "torch.nn.functional.max_unpool3d"]], "mish() (in module torch.nn.functional)": [[1663, "torch.nn.functional.mish"]], "mse_loss() (in module torch.nn.functional)": [[1664, "torch.nn.functional.mse_loss"]], "multi_margin_loss() (in module torch.nn.functional)": [[1665, "torch.nn.functional.multi_margin_loss"]], "multilabel_margin_loss() (in module torch.nn.functional)": [[1666, "torch.nn.functional.multilabel_margin_loss"]], "multilabel_soft_margin_loss() (in module torch.nn.functional)": [[1667, "torch.nn.functional.multilabel_soft_margin_loss"]], "nll_loss() (in module torch.nn.functional)": [[1668, "torch.nn.functional.nll_loss"]], "normalize() (in module torch.nn.functional)": [[1669, "torch.nn.functional.normalize"]], "one_hot() (in module torch.nn.functional)": [[1670, "torch.nn.functional.one_hot"]], "pad() (in module torch.nn.functional)": [[1671, "torch.nn.functional.pad"]], "pairwise_distance() (in module torch.nn.functional)": [[1672, "torch.nn.functional.pairwise_distance"]], "pdist() (in module torch.nn.functional)": [[1673, "torch.nn.functional.pdist"]], "pixel_shuffle() (in module torch.nn.functional)": [[1674, "torch.nn.functional.pixel_shuffle"]], "pixel_unshuffle() (in module torch.nn.functional)": [[1675, "torch.nn.functional.pixel_unshuffle"]], "poisson_nll_loss() (in module torch.nn.functional)": [[1676, "torch.nn.functional.poisson_nll_loss"]], "prelu() (in module torch.nn.functional)": [[1677, "torch.nn.functional.prelu"]], "relu() (in module torch.nn.functional)": [[1678, "torch.nn.functional.relu"]], "relu6() (in module torch.nn.functional)": [[1679, "torch.nn.functional.relu6"]], "relu_() (in module torch.nn.functional)": [[1680, "torch.nn.functional.relu_"]], "rms_norm() (in module torch.nn.functional)": [[1681, "torch.nn.functional.rms_norm"]], "rrelu() (in module torch.nn.functional)": [[1682, "torch.nn.functional.rrelu"]], "rrelu_() (in module torch.nn.functional)": [[1683, "torch.nn.functional.rrelu_"]], "scaled_dot_product_attention() (in module torch.nn.functional)": [[1684, "torch.nn.functional.scaled_dot_product_attention"]], "selu() (in module torch.nn.functional)": [[1685, "torch.nn.functional.selu"]], "sigmoid() (in module torch.nn.functional)": [[1686, "torch.nn.functional.sigmoid"]], "silu() (in module torch.nn.functional)": [[1687, "torch.nn.functional.silu"]], "smooth_l1_loss() (in module torch.nn.functional)": [[1688, "torch.nn.functional.smooth_l1_loss"]], "soft_margin_loss() (in module torch.nn.functional)": [[1689, "torch.nn.functional.soft_margin_loss"]], "softmax() (in module torch.nn.functional)": [[1690, "torch.nn.functional.softmax"]], "softmin() (in module torch.nn.functional)": [[1691, "torch.nn.functional.softmin"]], "softplus() (in module torch.nn.functional)": [[1692, "torch.nn.functional.softplus"]], "softshrink() (in module torch.nn.functional)": [[1693, "torch.nn.functional.softshrink"]], "softsign() (in module torch.nn.functional)": [[1694, "torch.nn.functional.softsign"]], "tanh() (in module torch.nn.functional)": [[1695, "torch.nn.functional.tanh"]], "tanhshrink() (in module torch.nn.functional)": [[1696, "torch.nn.functional.tanhshrink"]], "threshold() (in module torch.nn.functional)": [[1697, "torch.nn.functional.threshold"]], "threshold_() (in module torch.nn.functional)": [[1698, "torch.nn.functional.threshold_"]], "data_parallel() (in module torch.nn.parallel)": [[1699, "torch.nn.parallel.data_parallel"]], "triplet_margin_loss() (in module torch.nn.functional)": [[1700, "torch.nn.functional.triplet_margin_loss"]], "triplet_margin_with_distance_loss() (in module torch.nn.functional)": [[1701, "torch.nn.functional.triplet_margin_with_distance_loss"]], "unfold() (in module torch.nn.functional)": [[1702, "torch.nn.functional.unfold"]], "upsample() (in module torch.nn.functional)": [[1703, "torch.nn.functional.upsample"]], "upsample_bilinear() (in module torch.nn.functional)": [[1704, "torch.nn.functional.upsample_bilinear"]], "upsample_nearest() (in module torch.nn.functional)": [[1705, "torch.nn.functional.upsample_nearest"]], "lazymodulemixin (class in torch.nn.modules.lazy)": [[1706, "torch.nn.modules.lazy.LazyModuleMixin"]], "has_uninitialized_params() (torch.nn.modules.lazy.lazymodulemixin method)": [[1706, "torch.nn.modules.lazy.LazyModuleMixin.has_uninitialized_params"]], "initialize_parameters() (torch.nn.modules.lazy.lazymodulemixin method)": [[1706, "torch.nn.modules.lazy.LazyModuleMixin.initialize_parameters"]], "register_module_backward_hook() (in module torch.nn.modules.module)": [[1707, "torch.nn.modules.module.register_module_backward_hook"]], "register_module_buffer_registration_hook() (in module torch.nn.modules.module)": [[1708, "torch.nn.modules.module.register_module_buffer_registration_hook"]], "register_module_forward_hook() (in module torch.nn.modules.module)": [[1709, "torch.nn.modules.module.register_module_forward_hook"]], "register_module_forward_pre_hook() (in module torch.nn.modules.module)": [[1710, "torch.nn.modules.module.register_module_forward_pre_hook"]], "register_module_full_backward_hook() (in module torch.nn.modules.module)": [[1711, "torch.nn.modules.module.register_module_full_backward_hook"]], "register_module_full_backward_pre_hook() (in module torch.nn.modules.module)": [[1712, "torch.nn.modules.module.register_module_full_backward_pre_hook"]], "register_module_module_registration_hook() (in module torch.nn.modules.module)": [[1713, "torch.nn.modules.module.register_module_module_registration_hook"]], "register_module_parameter_registration_hook() (in module torch.nn.modules.module)": [[1714, "torch.nn.modules.module.register_module_parameter_registration_hook"]], "rmsnorm (class in torch.nn.modules.normalization)": [[1715, "torch.nn.modules.normalization.RMSNorm"]], "extra_repr() (torch.nn.modules.normalization.rmsnorm method)": [[1715, "torch.nn.modules.normalization.RMSNorm.extra_repr"]], "forward() (torch.nn.modules.normalization.rmsnorm method)": [[1715, "torch.nn.modules.normalization.RMSNorm.forward"]], "reset_parameters() (torch.nn.modules.normalization.rmsnorm method)": [[1715, "torch.nn.modules.normalization.RMSNorm.reset_parameters"]], "distributeddataparallel (class in torch.nn.parallel)": [[1716, "torch.nn.parallel.DistributedDataParallel"]], "join() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.join"]], "join_hook() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.join_hook"]], "no_sync() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.no_sync"]], "register_comm_hook() (torch.nn.parallel.distributeddataparallel method)": [[1716, "torch.nn.parallel.DistributedDataParallel.register_comm_hook"]], "parameter (class in torch.nn.parameter)": [[1717, "torch.nn.parameter.Parameter"]], "uninitializedbuffer (class in torch.nn.parameter)": [[1718, "torch.nn.parameter.UninitializedBuffer"]], "uninitializedparameter (class in torch.nn.parameter)": [[1719, "torch.nn.parameter.UninitializedParameter"]], "cls_to_become (torch.nn.parameter.uninitializedparameter attribute)": [[1719, "torch.nn.parameter.UninitializedParameter.cls_to_become"]], "clip_grad_norm() (in module torch.nn.utils)": [[1720, "torch.nn.utils.clip_grad_norm"]], "clip_grad_norm_() (in module torch.nn.utils)": [[1721, "torch.nn.utils.clip_grad_norm_"]], "clip_grad_value_() (in module torch.nn.utils)": [[1722, "torch.nn.utils.clip_grad_value_"]], "convert_conv2d_weight_memory_format() (in module torch.nn.utils)": [[1723, "torch.nn.utils.convert_conv2d_weight_memory_format"]], "convert_conv3d_weight_memory_format() (in module torch.nn.utils)": [[1724, "torch.nn.utils.convert_conv3d_weight_memory_format"]], "fuse_conv_bn_eval() (in module torch.nn.utils)": [[1725, "torch.nn.utils.fuse_conv_bn_eval"]], "fuse_conv_bn_weights() (in module torch.nn.utils)": [[1726, "torch.nn.utils.fuse_conv_bn_weights"]], "fuse_linear_bn_eval() (in module torch.nn.utils)": [[1727, "torch.nn.utils.fuse_linear_bn_eval"]], "fuse_linear_bn_weights() (in module torch.nn.utils)": [[1728, "torch.nn.utils.fuse_linear_bn_weights"]], "parameters_to_vector() (in module torch.nn.utils)": [[1729, "torch.nn.utils.parameters_to_vector"]], "orthogonal() (in module torch.nn.utils.parametrizations)": [[1730, "torch.nn.utils.parametrizations.orthogonal"]], "spectral_norm() (in module torch.nn.utils.parametrizations)": [[1731, "torch.nn.utils.parametrizations.spectral_norm"]], "weight_norm() (in module torch.nn.utils.parametrizations)": [[1732, "torch.nn.utils.parametrizations.weight_norm"]], "parametrizationlist (class in torch.nn.utils.parametrize)": [[1733, "torch.nn.utils.parametrize.ParametrizationList"]], "right_inverse() (torch.nn.utils.parametrize.parametrizationlist method)": [[1733, "torch.nn.utils.parametrize.ParametrizationList.right_inverse"]], "cached() (in module torch.nn.utils.parametrize)": [[1734, "torch.nn.utils.parametrize.cached"]], "is_parametrized() (in module torch.nn.utils.parametrize)": [[1735, "torch.nn.utils.parametrize.is_parametrized"]], "register_parametrization() (in module torch.nn.utils.parametrize)": [[1736, "torch.nn.utils.parametrize.register_parametrization"]], "remove_parametrizations() (in module torch.nn.utils.parametrize)": [[1737, "torch.nn.utils.parametrize.remove_parametrizations"]], "basepruningmethod (class in torch.nn.utils.prune)": [[1738, "torch.nn.utils.prune.BasePruningMethod"]], "apply() (torch.nn.utils.prune.basepruningmethod class method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.apply"]], "apply_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.apply_mask"]], "compute_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.compute_mask"]], "prune() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.prune"]], "remove() (torch.nn.utils.prune.basepruningmethod method)": [[1738, "torch.nn.utils.prune.BasePruningMethod.remove"]], "customfrommask (class in torch.nn.utils.prune)": [[1739, "torch.nn.utils.prune.CustomFromMask"]], "apply() (torch.nn.utils.prune.customfrommask class method)": [[1739, "torch.nn.utils.prune.CustomFromMask.apply"]], "apply_mask() (torch.nn.utils.prune.customfrommask method)": [[1739, "torch.nn.utils.prune.CustomFromMask.apply_mask"]], "prune() (torch.nn.utils.prune.customfrommask method)": [[1739, "torch.nn.utils.prune.CustomFromMask.prune"]], "remove() (torch.nn.utils.prune.customfrommask method)": [[1739, "torch.nn.utils.prune.CustomFromMask.remove"]], "identity (class in torch.nn.utils.prune)": [[1740, "torch.nn.utils.prune.Identity"]], "apply() (torch.nn.utils.prune.identity class method)": [[1740, "torch.nn.utils.prune.Identity.apply"]], "apply_mask() (torch.nn.utils.prune.identity method)": [[1740, "torch.nn.utils.prune.Identity.apply_mask"]], "prune() (torch.nn.utils.prune.identity method)": [[1740, "torch.nn.utils.prune.Identity.prune"]], "remove() (torch.nn.utils.prune.identity method)": [[1740, "torch.nn.utils.prune.Identity.remove"]], "l1unstructured (class in torch.nn.utils.prune)": [[1741, "torch.nn.utils.prune.L1Unstructured"]], "apply() (torch.nn.utils.prune.l1unstructured class method)": [[1741, "torch.nn.utils.prune.L1Unstructured.apply"]], "apply_mask() (torch.nn.utils.prune.l1unstructured method)": [[1741, "torch.nn.utils.prune.L1Unstructured.apply_mask"]], "prune() (torch.nn.utils.prune.l1unstructured method)": [[1741, "torch.nn.utils.prune.L1Unstructured.prune"]], "remove() (torch.nn.utils.prune.l1unstructured method)": [[1741, "torch.nn.utils.prune.L1Unstructured.remove"]], "lnstructured (class in torch.nn.utils.prune)": [[1742, "torch.nn.utils.prune.LnStructured"]], "apply() (torch.nn.utils.prune.lnstructured class method)": [[1742, "torch.nn.utils.prune.LnStructured.apply"]], "apply_mask() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.compute_mask"]], "prune() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.prune"]], "remove() (torch.nn.utils.prune.lnstructured method)": [[1742, "torch.nn.utils.prune.LnStructured.remove"]], "pruningcontainer (class in torch.nn.utils.prune)": [[1743, "torch.nn.utils.prune.PruningContainer"]], "add_pruning_method() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.add_pruning_method"]], "apply() (torch.nn.utils.prune.pruningcontainer class method)": [[1743, "torch.nn.utils.prune.PruningContainer.apply"]], "apply_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.apply_mask"]], "compute_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.compute_mask"]], "prune() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.prune"]], "remove() (torch.nn.utils.prune.pruningcontainer method)": [[1743, "torch.nn.utils.prune.PruningContainer.remove"]], "randomstructured (class in torch.nn.utils.prune)": [[1744, "torch.nn.utils.prune.RandomStructured"]], "apply() (torch.nn.utils.prune.randomstructured class method)": [[1744, "torch.nn.utils.prune.RandomStructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.compute_mask"]], "prune() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.prune"]], "remove() (torch.nn.utils.prune.randomstructured method)": [[1744, "torch.nn.utils.prune.RandomStructured.remove"]], "randomunstructured (class in torch.nn.utils.prune)": [[1745, "torch.nn.utils.prune.RandomUnstructured"]], "apply() (torch.nn.utils.prune.randomunstructured class method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomunstructured method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.apply_mask"]], "prune() (torch.nn.utils.prune.randomunstructured method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.prune"]], "remove() (torch.nn.utils.prune.randomunstructured method)": [[1745, "torch.nn.utils.prune.RandomUnstructured.remove"]], "custom_from_mask() (in module torch.nn.utils.prune)": [[1746, "torch.nn.utils.prune.custom_from_mask"]], "global_unstructured() (in module torch.nn.utils.prune)": [[1747, "torch.nn.utils.prune.global_unstructured"]], "identity() (in module torch.nn.utils.prune)": [[1748, "torch.nn.utils.prune.identity"]], "is_pruned() (in module torch.nn.utils.prune)": [[1749, "torch.nn.utils.prune.is_pruned"]], "l1_unstructured() (in module torch.nn.utils.prune)": [[1750, "torch.nn.utils.prune.l1_unstructured"]], "ln_structured() (in module torch.nn.utils.prune)": [[1751, "torch.nn.utils.prune.ln_structured"]], "random_structured() (in module torch.nn.utils.prune)": [[1752, "torch.nn.utils.prune.random_structured"]], "random_unstructured() (in module torch.nn.utils.prune)": [[1753, "torch.nn.utils.prune.random_unstructured"]], "remove() (in module torch.nn.utils.prune)": [[1754, "torch.nn.utils.prune.remove"]], "remove_spectral_norm() (in module torch.nn.utils)": [[1755, "torch.nn.utils.remove_spectral_norm"]], "remove_weight_norm() (in module torch.nn.utils)": [[1756, "torch.nn.utils.remove_weight_norm"]], "packedsequence (class in torch.nn.utils.rnn)": [[1757, "torch.nn.utils.rnn.PackedSequence"]], "batch_sizes (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.batch_sizes"]], "count() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.count"]], "data (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.data"]], "index() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.index"]], "is_cuda (torch.nn.utils.rnn.packedsequence property)": [[1757, "torch.nn.utils.rnn.PackedSequence.is_cuda"]], "is_pinned() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.is_pinned"]], "sorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.sorted_indices"]], "to() (torch.nn.utils.rnn.packedsequence method)": [[1757, "torch.nn.utils.rnn.PackedSequence.to"]], "unsorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1757, "torch.nn.utils.rnn.PackedSequence.unsorted_indices"]], "pack_padded_sequence() (in module torch.nn.utils.rnn)": [[1758, "torch.nn.utils.rnn.pack_padded_sequence"]], "pack_sequence() (in module torch.nn.utils.rnn)": [[1759, "torch.nn.utils.rnn.pack_sequence"]], "pad_packed_sequence() (in module torch.nn.utils.rnn)": [[1760, "torch.nn.utils.rnn.pad_packed_sequence"]], "pad_sequence() (in module torch.nn.utils.rnn)": [[1761, "torch.nn.utils.rnn.pad_sequence"]], "unpack_sequence() (in module torch.nn.utils.rnn)": [[1762, "torch.nn.utils.rnn.unpack_sequence"]], "unpad_sequence() (in module torch.nn.utils.rnn)": [[1763, "torch.nn.utils.rnn.unpad_sequence"]], "skip_init() (in module torch.nn.utils)": [[1764, "torch.nn.utils.skip_init"]], "spectral_norm() (in module torch.nn.utils)": [[1765, "torch.nn.utils.spectral_norm"]], "functional_call() (in module torch.nn.utils.stateless)": [[1766, "torch.nn.utils.stateless.functional_call"]], "vector_to_parameters() (in module torch.nn.utils)": [[1767, "torch.nn.utils.vector_to_parameters"]], "weight_norm() (in module torch.nn.utils)": [[1768, "torch.nn.utils.weight_norm"]], "no_grad (class in torch)": [[1769, "torch.no_grad"]], "nonzero() (in module torch)": [[1770, "torch.nonzero"]], "norm() (in module torch)": [[1771, "torch.norm"]], "normal() (in module torch)": [[1772, "torch.normal"]], "not_equal() (in module torch)": [[1773, "torch.not_equal"]], "numel() (in module torch)": [[1774, "torch.numel"]], "ones() (in module torch)": [[1775, "torch.ones"]], "ones_like() (in module torch)": [[1776, "torch.ones_like"]], "jitscalartype (class in torch.onnx)": [[1777, "torch.onnx.JitScalarType"]], "dtype() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.dtype"]], "from_dtype() (torch.onnx.jitscalartype class method)": [[1777, "torch.onnx.JitScalarType.from_dtype"]], "from_onnx_type() (torch.onnx.jitscalartype class method)": [[1777, "torch.onnx.JitScalarType.from_onnx_type"]], "from_value() (torch.onnx.jitscalartype class method)": [[1777, "torch.onnx.JitScalarType.from_value"]], "onnx_compatible() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.onnx_compatible"]], "onnx_type() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.onnx_type"]], "scalar_name() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.scalar_name"]], "torch_name() (torch.onnx.jitscalartype method)": [[1777, "torch.onnx.JitScalarType.torch_name"]], "graphinfo (class in torch.onnx.verification)": [[1778, "torch.onnx.verification.GraphInfo"]], "all_mismatch_leaf_graph_info() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.all_mismatch_leaf_graph_info"]], "clear() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.clear"]], "essential_node_count() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.essential_node_count"]], "essential_node_kinds() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.essential_node_kinds"]], "export_repro() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.export_repro"]], "find_mismatch() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.find_mismatch"]], "find_partition() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.find_partition"]], "has_mismatch() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.has_mismatch"]], "pretty_print_mismatch() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.pretty_print_mismatch"]], "pretty_print_tree() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.pretty_print_tree"]], "verify_export() (torch.onnx.verification.graphinfo method)": [[1778, "torch.onnx.verification.GraphInfo.verify_export"]], "verificationoptions (class in torch.onnx.verification)": [[1779, "torch.onnx.verification.VerificationOptions"]], "asgd (class in torch.optim)": [[1780, "torch.optim.ASGD"]], "add_param_group() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.add_param_group"]], "load_state_dict() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.register_step_pre_hook"]], "state_dict() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.state_dict"]], "step() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.step"]], "zero_grad() (torch.optim.asgd method)": [[1780, "torch.optim.ASGD.zero_grad"]], "adadelta (class in torch.optim)": [[1781, "torch.optim.Adadelta"]], "add_param_group() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.add_param_group"]], "load_state_dict() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.register_step_pre_hook"]], "state_dict() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.state_dict"]], "step() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.step"]], "zero_grad() (torch.optim.adadelta method)": [[1781, "torch.optim.Adadelta.zero_grad"]], "adagrad (class in torch.optim)": [[1782, "torch.optim.Adagrad"]], "add_param_group() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.add_param_group"]], "load_state_dict() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.register_step_pre_hook"]], "state_dict() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.state_dict"]], "step() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.step"]], "zero_grad() (torch.optim.adagrad method)": [[1782, "torch.optim.Adagrad.zero_grad"]], "adam (class in torch.optim)": [[1783, "torch.optim.Adam"]], "add_param_group() (torch.optim.adam method)": [[1783, "torch.optim.Adam.add_param_group"]], "load_state_dict() (torch.optim.adam method)": [[1783, "torch.optim.Adam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adam method)": [[1783, "torch.optim.Adam.register_step_pre_hook"]], "state_dict() (torch.optim.adam method)": [[1783, "torch.optim.Adam.state_dict"]], "step() (torch.optim.adam method)": [[1783, "torch.optim.Adam.step"]], "zero_grad() (torch.optim.adam method)": [[1783, "torch.optim.Adam.zero_grad"]], "adamw (class in torch.optim)": [[1784, "torch.optim.AdamW"]], "add_param_group() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.add_param_group"]], "load_state_dict() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.register_step_pre_hook"]], "state_dict() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.state_dict"]], "step() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.step"]], "zero_grad() (torch.optim.adamw method)": [[1784, "torch.optim.AdamW.zero_grad"]], "adamax (class in torch.optim)": [[1785, "torch.optim.Adamax"]], "add_param_group() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.add_param_group"]], "load_state_dict() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.register_step_pre_hook"]], "state_dict() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.state_dict"]], "step() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.step"]], "zero_grad() (torch.optim.adamax method)": [[1785, "torch.optim.Adamax.zero_grad"]], "lbfgs (class in torch.optim)": [[1786, "torch.optim.LBFGS"]], "add_param_group() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.add_param_group"]], "load_state_dict() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.register_step_pre_hook"]], "state_dict() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.state_dict"]], "step() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.step"]], "zero_grad() (torch.optim.lbfgs method)": [[1786, "torch.optim.LBFGS.zero_grad"]], "nadam (class in torch.optim)": [[1787, "torch.optim.NAdam"]], "add_param_group() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.add_param_group"]], "load_state_dict() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.register_step_pre_hook"]], "state_dict() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.state_dict"]], "step() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.step"]], "zero_grad() (torch.optim.nadam method)": [[1787, "torch.optim.NAdam.zero_grad"]], "add_param_group() (torch.optim.optimizer method)": [[1788, "torch.optim.Optimizer.add_param_group"]], "load_state_dict() (torch.optim.optimizer method)": [[1789, "torch.optim.Optimizer.load_state_dict"]], "state_dict() (torch.optim.optimizer method)": [[1790, "torch.optim.Optimizer.state_dict"]], "step() (torch.optim.optimizer method)": [[1791, "torch.optim.Optimizer.step"]], "zero_grad() (torch.optim.optimizer method)": [[1792, "torch.optim.Optimizer.zero_grad"]], "radam (class in torch.optim)": [[1793, "torch.optim.RAdam"]], "add_param_group() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.add_param_group"]], "load_state_dict() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.register_step_pre_hook"]], "state_dict() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.state_dict"]], "step() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.step"]], "zero_grad() (torch.optim.radam method)": [[1793, "torch.optim.RAdam.zero_grad"]], "rmsprop (class in torch.optim)": [[1794, "torch.optim.RMSprop"]], "add_param_group() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.add_param_group"]], "load_state_dict() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.register_step_pre_hook"]], "state_dict() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.state_dict"]], "step() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.step"]], "zero_grad() (torch.optim.rmsprop method)": [[1794, "torch.optim.RMSprop.zero_grad"]], "rprop (class in torch.optim)": [[1795, "torch.optim.Rprop"]], "add_param_group() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.add_param_group"]], "load_state_dict() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.register_step_pre_hook"]], "state_dict() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.state_dict"]], "step() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.step"]], "zero_grad() (torch.optim.rprop method)": [[1795, "torch.optim.Rprop.zero_grad"]], "sgd (class in torch.optim)": [[1796, "torch.optim.SGD"]], "add_param_group() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.add_param_group"]], "load_state_dict() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.register_step_pre_hook"]], "state_dict() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.state_dict"]], "step() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.step"]], "zero_grad() (torch.optim.sgd method)": [[1796, "torch.optim.SGD.zero_grad"]], "sparseadam (class in torch.optim)": [[1797, "torch.optim.SparseAdam"]], "add_param_group() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.add_param_group"]], "load_state_dict() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.register_step_pre_hook"]], "state_dict() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.state_dict"]], "step() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.step"]], "zero_grad() (torch.optim.sparseadam method)": [[1797, "torch.optim.SparseAdam.zero_grad"]], "chainedscheduler (class in torch.optim.lr_scheduler)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler"]], "get_last_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.print_lr"]], "state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1798, "torch.optim.lr_scheduler.ChainedScheduler.state_dict"]], "constantlr (class in torch.optim.lr_scheduler)": [[1799, "torch.optim.lr_scheduler.ConstantLR"]], "get_last_lr() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1799, "torch.optim.lr_scheduler.ConstantLR.state_dict"]], "cosineannealinglr (class in torch.optim.lr_scheduler)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1800, "torch.optim.lr_scheduler.CosineAnnealingLR.state_dict"]], "cosineannealingwarmrestarts (class in torch.optim.lr_scheduler)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.state_dict"]], "step() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.step"]], "cycliclr (class in torch.optim.lr_scheduler)": [[1802, "torch.optim.lr_scheduler.CyclicLR"]], "get_last_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1802, "torch.optim.lr_scheduler.CyclicLR.get_last_lr"]], "get_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1802, "torch.optim.lr_scheduler.CyclicLR.get_lr"]], "print_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1802, "torch.optim.lr_scheduler.CyclicLR.print_lr"]], "exponentiallr (class in torch.optim.lr_scheduler)": [[1803, "torch.optim.lr_scheduler.ExponentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1803, "torch.optim.lr_scheduler.ExponentialLR.state_dict"]], "lambdalr (class in torch.optim.lr_scheduler)": [[1804, "torch.optim.lr_scheduler.LambdaLR"]], "get_last_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1804, "torch.optim.lr_scheduler.LambdaLR.state_dict"]], "linearlr (class in torch.optim.lr_scheduler)": [[1805, "torch.optim.lr_scheduler.LinearLR"]], "get_last_lr() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1805, "torch.optim.lr_scheduler.LinearLR.state_dict"]], "multisteplr (class in torch.optim.lr_scheduler)": [[1806, "torch.optim.lr_scheduler.MultiStepLR"]], "get_last_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1806, "torch.optim.lr_scheduler.MultiStepLR.state_dict"]], "multiplicativelr (class in torch.optim.lr_scheduler)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR"]], "get_last_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1807, "torch.optim.lr_scheduler.MultiplicativeLR.state_dict"]], "onecyclelr (class in torch.optim.lr_scheduler)": [[1808, "torch.optim.lr_scheduler.OneCycleLR"]], "get_last_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1808, "torch.optim.lr_scheduler.OneCycleLR.state_dict"]], "polynomiallr (class in torch.optim.lr_scheduler)": [[1809, "torch.optim.lr_scheduler.PolynomialLR"]], "get_last_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1809, "torch.optim.lr_scheduler.PolynomialLR.state_dict"]], "reducelronplateau (class in torch.optim.lr_scheduler)": [[1810, "torch.optim.lr_scheduler.ReduceLROnPlateau"]], "get_last_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1810, "torch.optim.lr_scheduler.ReduceLROnPlateau.get_last_lr"]], "print_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1810, "torch.optim.lr_scheduler.ReduceLROnPlateau.print_lr"]], "sequentiallr (class in torch.optim.lr_scheduler)": [[1811, "torch.optim.lr_scheduler.SequentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1811, "torch.optim.lr_scheduler.SequentialLR.state_dict"]], "steplr (class in torch.optim.lr_scheduler)": [[1812, "torch.optim.lr_scheduler.StepLR"]], "get_last_lr() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.steplr method)": [[1812, "torch.optim.lr_scheduler.StepLR.state_dict"]], "orgqr() (in module torch)": [[1813, "torch.orgqr"]], "ormqr() (in module torch)": [[1814, "torch.ormqr"]], "outer() (in module torch)": [[1815, "torch.outer"]], "pca_lowrank() (in module torch)": [[1816, "torch.pca_lowrank"]], "permute() (in module torch)": [[1817, "torch.permute"]], "pinverse() (in module torch)": [[1818, "torch.pinverse"]], "poisson() (in module torch)": [[1819, "torch.poisson"]], "polar() (in module torch)": [[1820, "torch.polar"]], "polygamma() (in module torch)": [[1821, "torch.polygamma"]], "positive() (in module torch)": [[1822, "torch.positive"]], "pow() (in module torch)": [[1823, "torch.pow"]], "prod() (in module torch)": [[1824, "torch.prod"]], "promote_types() (in module torch)": [[1825, "torch.promote_types"]], "qr() (in module torch)": [[1826, "torch.qr"]], "quantile() (in module torch)": [[1827, "torch.quantile"]], "quantize_per_channel() (in module torch)": [[1828, "torch.quantize_per_channel"]], "quantize_per_tensor() (in module torch)": [[1829, "torch.quantize_per_tensor"]], "quantized_batch_norm() (in module torch)": [[1830, "torch.quantized_batch_norm"]], "quantized_max_pool1d() (in module torch)": [[1831, "torch.quantized_max_pool1d"]], "quantized_max_pool2d() (in module torch)": [[1832, "torch.quantized_max_pool2d"]], "sobolengine (class in torch.quasirandom)": [[1833, "torch.quasirandom.SobolEngine"]], "draw() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.draw"]], "draw_base2() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.draw_base2"]], "fast_forward() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.fast_forward"]], "reset() (torch.quasirandom.sobolengine method)": [[1833, "torch.quasirandom.SobolEngine.reset"]], "rad2deg() (in module torch)": [[1834, "torch.rad2deg"]], "rand() (in module torch)": [[1835, "torch.rand"]], "rand_like() (in module torch)": [[1836, "torch.rand_like"]], "randint() (in module torch)": [[1837, "torch.randint"]], "randint_like() (in module torch)": [[1838, "torch.randint_like"]], "randn() (in module torch)": [[1839, "torch.randn"]], "randn_like() (in module torch)": [[1840, "torch.randn_like"]], "randperm() (in module torch)": [[1841, "torch.randperm"]], "range() (in module torch)": [[1842, "torch.range"]], "ravel() (in module torch)": [[1843, "torch.ravel"]], "real() (in module torch)": [[1844, "torch.real"]], "reciprocal() (in module torch)": [[1845, "torch.reciprocal"]], "remainder() (in module torch)": [[1846, "torch.remainder"]], "renorm() (in module torch)": [[1847, "torch.renorm"]], "repeat_interleave() (in module torch)": [[1848, "torch.repeat_interleave"]], "reshape() (in module torch)": [[1849, "torch.reshape"]], "resolve_conj() (in module torch)": [[1850, "torch.resolve_conj"]], "resolve_neg() (in module torch)": [[1851, "torch.resolve_neg"]], "result_type() (in module torch)": [[1852, "torch.result_type"]], "roll() (in module torch)": [[1853, "torch.roll"]], "rot90() (in module torch)": [[1854, "torch.rot90"]], "round() (in module torch)": [[1855, "torch.round"]], "row_stack() (in module torch)": [[1856, "torch.row_stack"]], "rsqrt() (in module torch)": [[1857, "torch.rsqrt"]], "save() (in module torch)": [[1858, "torch.save"]], "scatter() (in module torch)": [[1859, "torch.scatter"]], "scatter_add() (in module torch)": [[1860, "torch.scatter_add"]], "scatter_reduce() (in module torch)": [[1861, "torch.scatter_reduce"]], "searchsorted() (in module torch)": [[1862, "torch.searchsorted"]], "seed() (in module torch)": [[1863, "torch.seed"]], "select() (in module torch)": [[1864, "torch.select"]], "select_scatter() (in module torch)": [[1865, "torch.select_scatter"]], "set_default_device() (in module torch)": [[1866, "torch.set_default_device"]], "set_default_dtype() (in module torch)": [[1867, "torch.set_default_dtype"]], "set_default_tensor_type() (in module torch)": [[1868, "torch.set_default_tensor_type"]], "set_deterministic_debug_mode() (in module torch)": [[1869, "torch.set_deterministic_debug_mode"]], "set_float32_matmul_precision() (in module torch)": [[1870, "torch.set_float32_matmul_precision"]], "set_flush_denormal() (in module torch)": [[1871, "torch.set_flush_denormal"]], "set_num_interop_threads() (in module torch)": [[1872, "torch.set_num_interop_threads"]], "set_num_threads() (in module torch)": [[1873, "torch.set_num_threads"]], "set_printoptions() (in module torch)": [[1874, "torch.set_printoptions"]], "set_rng_state() (in module torch)": [[1875, "torch.set_rng_state"]], "set_warn_always() (in module torch)": [[1876, "torch.set_warn_always"]], "sgn() (in module torch)": [[1877, "torch.sgn"]], "sigmoid() (in module torch)": [[1878, "torch.sigmoid"]], "sign() (in module torch)": [[1879, "torch.sign"]], "bartlett() (in module torch.signal.windows)": [[1880, "torch.signal.windows.bartlett"]], "blackman() (in module torch.signal.windows)": [[1881, "torch.signal.windows.blackman"]], "cosine() (in module torch.signal.windows)": [[1882, "torch.signal.windows.cosine"]], "exponential() (in module torch.signal.windows)": [[1883, "torch.signal.windows.exponential"]], "gaussian() (in module torch.signal.windows)": [[1884, "torch.signal.windows.gaussian"]], "general_cosine() (in module torch.signal.windows)": [[1885, "torch.signal.windows.general_cosine"]], "general_hamming() (in module torch.signal.windows)": [[1886, "torch.signal.windows.general_hamming"]], "hamming() (in module torch.signal.windows)": [[1887, "torch.signal.windows.hamming"]], "hann() (in module torch.signal.windows)": [[1888, "torch.signal.windows.hann"]], "kaiser() (in module torch.signal.windows)": [[1889, "torch.signal.windows.kaiser"]], "nuttall() (in module torch.signal.windows)": [[1890, "torch.signal.windows.nuttall"]], "signbit() (in module torch)": [[1891, "torch.signbit"]], "sin() (in module torch)": [[1892, "torch.sin"]], "sinc() (in module torch)": [[1893, "torch.sinc"]], "sinh() (in module torch)": [[1894, "torch.sinh"]], "slice_scatter() (in module torch)": [[1895, "torch.slice_scatter"]], "slogdet() (in module torch)": [[1896, "torch.slogdet"]], "smm() (in module torch)": [[1897, "torch.smm"]], "softmax() (in module torch)": [[1898, "torch.softmax"]], "sort() (in module torch)": [[1899, "torch.sort"]], "addmm() (in module torch.sparse)": [[1900, "torch.sparse.addmm"]], "as_sparse_gradcheck() (in module torch.sparse)": [[1901, "torch.sparse.as_sparse_gradcheck"]], "check_sparse_tensor_invariants (class in torch.sparse)": [[1902, "torch.sparse.check_sparse_tensor_invariants"]], "disable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1902, "torch.sparse.check_sparse_tensor_invariants.disable"]], "enable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1902, "torch.sparse.check_sparse_tensor_invariants.enable"]], "is_enabled() (torch.sparse.check_sparse_tensor_invariants static method)": [[1902, "torch.sparse.check_sparse_tensor_invariants.is_enabled"]], "log_softmax() (in module torch.sparse)": [[1903, "torch.sparse.log_softmax"]], "mm() (in module torch.sparse)": [[1904, "torch.sparse.mm"]], "sampled_addmm() (in module torch.sparse)": [[1905, "torch.sparse.sampled_addmm"]], "softmax() (in module torch.sparse)": [[1906, "torch.sparse.softmax"]], "spdiags() (in module torch.sparse)": [[1907, "torch.sparse.spdiags"]], "sum() (in module torch.sparse)": [[1908, "torch.sparse.sum"]], "sparse_bsc_tensor() (in module torch)": [[1909, "torch.sparse_bsc_tensor"]], "sparse_bsr_tensor() (in module torch)": [[1910, "torch.sparse_bsr_tensor"]], "sparse_compressed_tensor() (in module torch)": [[1911, "torch.sparse_compressed_tensor"]], "sparse_coo_tensor() (in module torch)": [[1912, "torch.sparse_coo_tensor"]], "sparse_csc_tensor() (in module torch)": [[1913, "torch.sparse_csc_tensor"]], "sparse_csr_tensor() (in module torch)": [[1914, "torch.sparse_csr_tensor"]], "split() (in module torch)": [[1915, "torch.split"]], "sqrt() (in module torch)": [[1916, "torch.sqrt"]], "square() (in module torch)": [[1917, "torch.square"]], "squeeze() (in module torch)": [[1918, "torch.squeeze"]], "sspaddmm() (in module torch)": [[1919, "torch.sspaddmm"]], "stack() (in module torch)": [[1920, "torch.stack"]], "std() (in module torch)": [[1921, "torch.std"]], "std_mean() (in module torch)": [[1922, "torch.std_mean"]], "stft() (in module torch)": [[1923, "torch.stft"]], "sub() (in module torch)": [[1924, "torch.sub"]], "subtract() (in module torch)": [[1925, "torch.subtract"]], "sum() (in module torch)": [[1926, "torch.sum"]], "svd() (in module torch)": [[1927, "torch.svd"]], "svd_lowrank() (in module torch)": [[1928, "torch.svd_lowrank"]], "swapaxes() (in module torch)": [[1929, "torch.swapaxes"]], "swapdims() (in module torch)": [[1930, "torch.swapdims"]], "sym_float() (in module torch)": [[1931, "torch.sym_float"]], "sym_int() (in module torch)": [[1932, "torch.sym_int"]], "sym_ite() (in module torch)": [[1933, "torch.sym_ite"]], "sym_max() (in module torch)": [[1934, "torch.sym_max"]], "sym_min() (in module torch)": [[1935, "torch.sym_min"]], "sym_not() (in module torch)": [[1936, "torch.sym_not"]], "t() (in module torch)": [[1937, "torch.t"]], "take() (in module torch)": [[1938, "torch.take"]], "take_along_dim() (in module torch)": [[1939, "torch.take_along_dim"]], "tan() (in module torch)": [[1940, "torch.tan"]], "tanh() (in module torch)": [[1941, "torch.tanh"]], "tensor() (in module torch)": [[1942, "torch.tensor"]], "tensor_split() (in module torch)": [[1943, "torch.tensor_split"]], "tensordot() (in module torch)": [[1944, "torch.tensordot"]], "tile() (in module torch)": [[1945, "torch.tile"]], "topk() (in module torch)": [[1946, "torch.topk"]], "trace() (in module torch)": [[1947, "torch.trace"]], "transpose() (in module torch)": [[1948, "torch.transpose"]], "trapezoid() (in module torch)": [[1949, "torch.trapezoid"]], "trapz() (in module torch)": [[1950, "torch.trapz"]], "triangular_solve() (in module torch)": [[1951, "torch.triangular_solve"]], "tril() (in module torch)": [[1952, "torch.tril"]], "tril_indices() (in module torch)": [[1953, "torch.tril_indices"]], "triu() (in module torch)": [[1954, "torch.triu"]], "triu_indices() (in module torch)": [[1955, "torch.triu_indices"]], "true_divide() (in module torch)": [[1956, "torch.true_divide"]], "trunc() (in module torch)": [[1957, "torch.trunc"]], "unbind() (in module torch)": [[1958, "torch.unbind"]], "unflatten() (in module torch)": [[1959, "torch.unflatten"]], "unique() (in module torch)": [[1960, "torch.unique"]], "unique_consecutive() (in module torch)": [[1961, "torch.unique_consecutive"]], "unravel_index() (in module torch)": [[1962, "torch.unravel_index"]], "unsqueeze() (in module torch)": [[1963, "torch.unsqueeze"]], "use_deterministic_algorithms() (in module torch)": [[1964, "torch.use_deterministic_algorithms"]], "generate_methods_for_privateuse1_backend() (in module torch.utils)": [[1965, "torch.utils.generate_methods_for_privateuse1_backend"]], "get_cpp_backtrace() (in module torch.utils)": [[1966, "torch.utils.get_cpp_backtrace"]], "rename_privateuse1_backend() (in module torch.utils)": [[1967, "torch.utils.rename_privateuse1_backend"]], "set_module() (in module torch.utils)": [[1968, "torch.utils.set_module"]], "swap_tensors() (in module torch.utils)": [[1969, "torch.utils.swap_tensors"]], "vander() (in module torch)": [[1970, "torch.vander"]], "var() (in module torch)": [[1971, "torch.var"]], "var_mean() (in module torch)": [[1972, "torch.var_mean"]], "vdot() (in module torch)": [[1973, "torch.vdot"]], "view_as_complex() (in module torch)": [[1974, "torch.view_as_complex"]], "view_as_real() (in module torch)": [[1975, "torch.view_as_real"]], "vmap() (in module torch)": [[1976, "torch.vmap"]], "vsplit() (in module torch)": [[1977, "torch.vsplit"]], "vstack() (in module torch)": [[1978, "torch.vstack"]], "where() (in module torch)": [[1979, "torch.where"]], "xlogy() (in module torch)": [[1980, "torch.xlogy"]], "event (class in torch.xpu)": [[1981, "torch.xpu.Event"]], "elapsed_time() (torch.xpu.event method)": [[1981, "torch.xpu.Event.elapsed_time"]], "query() (torch.xpu.event method)": [[1981, "torch.xpu.Event.query"]], "record() (torch.xpu.event method)": [[1981, "torch.xpu.Event.record"]], "synchronize() (torch.xpu.event method)": [[1981, "torch.xpu.Event.synchronize"]], "wait() (torch.xpu.event method)": [[1981, "torch.xpu.Event.wait"]], "stream (class in torch.xpu)": [[1982, "torch.xpu.Stream"]], "query() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.query"]], "record_event() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.record_event"]], "synchronize() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.synchronize"]], "wait_event() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.wait_event"]], "wait_stream() (torch.xpu.stream method)": [[1982, "torch.xpu.Stream.wait_stream"]], "streamcontext (class in torch.xpu)": [[1983, "torch.xpu.StreamContext"]], "current_device() (in module torch.xpu)": [[1984, "torch.xpu.current_device"]], "current_stream() (in module torch.xpu)": [[1985, "torch.xpu.current_stream"]], "device (class in torch.xpu)": [[1986, "torch.xpu.device"]], "device_count() (in module torch.xpu)": [[1987, "torch.xpu.device_count"]], "device_of (class in torch.xpu)": [[1988, "torch.xpu.device_of"]], "empty_cache() (in module torch.xpu)": [[1989, "torch.xpu.empty_cache"]], "get_device_capability() (in module torch.xpu)": [[1990, "torch.xpu.get_device_capability"]], "get_device_name() (in module torch.xpu)": [[1991, "torch.xpu.get_device_name"]], "get_device_properties() (in module torch.xpu)": [[1992, "torch.xpu.get_device_properties"]], "get_rng_state() (in module torch.xpu)": [[1993, "torch.xpu.get_rng_state"]], "get_rng_state_all() (in module torch.xpu)": [[1994, "torch.xpu.get_rng_state_all"]], "init() (in module torch.xpu)": [[1995, "torch.xpu.init"]], "initial_seed() (in module torch.xpu)": [[1996, "torch.xpu.initial_seed"]], "is_available() (in module torch.xpu)": [[1997, "torch.xpu.is_available"]], "is_initialized() (in module torch.xpu)": [[1998, "torch.xpu.is_initialized"]], "manual_seed() (in module torch.xpu)": [[1999, "torch.xpu.manual_seed"]], "manual_seed_all() (in module torch.xpu)": [[2000, "torch.xpu.manual_seed_all"]], "seed() (in module torch.xpu)": [[2001, "torch.xpu.seed"]], "seed_all() (in module torch.xpu)": [[2002, "torch.xpu.seed_all"]], "set_device() (in module torch.xpu)": [[2003, "torch.xpu.set_device"]], "set_rng_state() (in module torch.xpu)": [[2004, "torch.xpu.set_rng_state"]], "set_rng_state_all() (in module torch.xpu)": [[2005, "torch.xpu.set_rng_state_all"]], "set_stream() (in module torch.xpu)": [[2006, "torch.xpu.set_stream"]], "stream() (in module torch.xpu)": [[2007, "torch.xpu.stream"]], "synchronize() (in module torch.xpu)": [[2008, "torch.xpu.synchronize"]], "zeros() (in module torch)": [[2009, "torch.zeros"]], "zeros_like() (in module torch)": [[2010, "torch.zeros_like"]], "download_url_to_file() (in module torch.hub)": [[2011, "torch.hub.download_url_to_file"]], "get_dir() (in module torch.hub)": [[2011, "torch.hub.get_dir"]], "help() (in module torch.hub)": [[2011, "torch.hub.help"]], "list() (in module torch.hub)": [[2011, "torch.hub.list"]], "load() (in module torch.hub)": [[2011, "torch.hub.load"]], "load_state_dict_from_url() (in module torch.hub)": [[2011, "torch.hub.load_state_dict_from_url"]], "set_dir() (in module torch.hub)": [[2011, "torch.hub.set_dir"]], "torch.hub": [[2011, "module-torch.hub"]], "pytorch_jit": [[2013, "envvar-PYTORCH_JIT"]], "environment variable": [[2013, "envvar-PYTORCH_JIT"]], "export() (in module torch.jit)": [[2013, "torch.jit.export"]], "torch.jit": [[2013, "module-torch.jit"]], "torch.jit.annotations": [[2013, "module-torch.jit.annotations"]], "torch.jit.frontend": [[2013, "module-torch.jit.frontend"]], "torch.jit.generate_bytecode": [[2013, "module-torch.jit.generate_bytecode"]], "torch.jit.mobile": [[2013, "module-torch.jit.mobile"]], "torch.jit.quantized": [[2013, "module-torch.jit.quantized"]], "torch.jit.supported_ops": [[2014, "module-torch.jit.supported_ops"]], "is_scripting() (in module torch.jit)": [[2015, "torch.jit.is_scripting"]], "is_tracing() (in module torch.jit)": [[2015, "torch.jit.is_tracing"]], "torch.jit.unsupported_tensor_ops": [[2018, "module-torch.jit.unsupported_tensor_ops"]], "torch.utils.jit": [[2019, "module-torch.utils.jit"]], "library (class in torch.library)": [[2020, "torch.library.Library"]], "custom_op() (in module torch.library)": [[2020, "torch.library.custom_op"]], "define() (in module torch.library)": [[2020, "torch.library.define"]], "define() (torch.library.library method)": [[2020, "torch.library.Library.define"]], "fallthrough_kernel() (in module torch.library)": [[2020, "torch.library.fallthrough_kernel"]], "get_ctx() (in module torch.library)": [[2020, "torch.library.get_ctx"]], "impl() (in module torch.library)": [[2020, "torch.library.impl"]], "impl() (torch.library.library method)": [[2020, "torch.library.Library.impl"]], "impl_abstract() (in module torch.library)": [[2020, "torch.library.impl_abstract"]], "opcheck() (in module torch.library)": [[2020, "torch.library.opcheck"]], "register_autograd() (in module torch.library)": [[2020, "torch.library.register_autograd"]], "register_fake() (in module torch.library)": [[2020, "torch.library.register_fake"]], "register_kernel() (in module torch.library)": [[2020, "torch.library.register_kernel"]], "torch.library": [[2020, "module-torch.library"]], "torch.linalg": [[2021, "module-torch.linalg"]], "torch._logging": [[2022, "module-torch._logging"]], "torch.masked": [[2023, "module-torch.masked"]], "torch.masked.maskedtensor": [[2023, "module-torch.masked.maskedtensor"]], "torch.masked.maskedtensor.binary": [[2023, "module-torch.masked.maskedtensor.binary"]], "torch.masked.maskedtensor.core": [[2023, "module-torch.masked.maskedtensor.core"]], "torch.masked.maskedtensor.creation": [[2023, "module-torch.masked.maskedtensor.creation"]], "torch.masked.maskedtensor.passthrough": [[2023, "module-torch.masked.maskedtensor.passthrough"]], "torch.masked.maskedtensor.reductions": [[2023, "module-torch.masked.maskedtensor.reductions"]], "torch.masked.maskedtensor.unary": [[2023, "module-torch.masked.maskedtensor.unary"]], "optimize_for_mobile() (in module torch.utils.mobile_optimizer)": [[2026, "torch.utils.mobile_optimizer.optimize_for_mobile"]], "load_url() (in module torch.utils.model_zoo)": [[2027, "torch.utils.model_zoo.load_url"]], "torch.utils.model_zoo": [[2027, "module-torch.utils.model_zoo"]], "moduletracker (class in torch.utils.module_tracker)": [[2028, "torch.utils.module_tracker.ModuleTracker"]], "torch.utils.module_tracker": [[2028, "module-torch.utils.module_tracker"]], "aggregation (class in torch.monitor)": [[2029, "torch.monitor.Aggregation"]], "event (class in torch.monitor)": [[2029, "torch.monitor.Event"]], "eventhandlerhandle (class in torch.monitor)": [[2029, "torch.monitor.EventHandlerHandle"]], "stat (class in torch.monitor)": [[2029, "torch.monitor.Stat"]], "tensorboardeventhandler (class in torch.monitor)": [[2029, "torch.monitor.TensorboardEventHandler"]], "__init__() (torch.monitor.event method)": [[2029, "torch.monitor.Event.__init__"]], "__init__() (torch.monitor.stat method)": [[2029, "torch.monitor.Stat.__init__"]], "__init__() (torch.monitor.tensorboardeventhandler method)": [[2029, "torch.monitor.TensorboardEventHandler.__init__"]], "add() (torch.monitor.stat method)": [[2029, "torch.monitor.Stat.add"]], "count (torch.monitor.stat property)": [[2029, "torch.monitor.Stat.count"]], "data (torch.monitor.event property)": [[2029, "torch.monitor.Event.data"]], "data_value_t (class in torch.monitor)": [[2029, "torch.monitor.data_value_t"]], "get() (torch.monitor.stat method)": [[2029, "torch.monitor.Stat.get"]], "log_event() (in module torch.monitor)": [[2029, "torch.monitor.log_event"]], "name (torch.monitor.aggregation property)": [[2029, "torch.monitor.Aggregation.name"]], "name (torch.monitor.event property)": [[2029, "torch.monitor.Event.name"]], "name (torch.monitor.stat property)": [[2029, "torch.monitor.Stat.name"]], "register_event_handler() (in module torch.monitor)": [[2029, "torch.monitor.register_event_handler"]], "timestamp (torch.monitor.event property)": [[2029, "torch.monitor.Event.timestamp"]], "torch.monitor": [[2029, "module-torch.monitor"]], "unregister_event_handler() (in module torch.monitor)": [[2029, "torch.monitor.unregister_event_handler"]], "torch.mps": [[2030, "module-torch.mps"]], "torch.mps.event": [[2030, "module-torch.mps.event"]], "torch.mps.profiler": [[2030, "module-torch.mps.profiler"]], "torch.mtia": [[2031, "module-torch.mtia"]], "spawncontext (class in torch.multiprocessing)": [[2032, "torch.multiprocessing.SpawnContext"]], "get_all_sharing_strategies() (in module torch.multiprocessing)": [[2032, "torch.multiprocessing.get_all_sharing_strategies"]], "get_sharing_strategy() (in module torch.multiprocessing)": [[2032, "torch.multiprocessing.get_sharing_strategy"]], "join() (torch.multiprocessing.spawncontext method)": [[2032, "torch.multiprocessing.SpawnContext.join"]], "set_sharing_strategy() (in module torch.multiprocessing)": [[2032, "torch.multiprocessing.set_sharing_strategy"]], "spawn() (in module torch.multiprocessing.spawn)": [[2032, "torch.multiprocessing.spawn.spawn"]], "torch.multiprocessing": [[2032, "module-torch.multiprocessing"]], "torch.multiprocessing.pool": [[2032, "module-torch.multiprocessing.pool"]], "torch.multiprocessing.queue": [[2032, "module-torch.multiprocessing.queue"]], "torch.multiprocessing.reductions": [[2032, "module-torch.multiprocessing.reductions"]], "torch.multiprocessing.spawn": [[2032, "module-torch.multiprocessing.spawn"]], "align_as() (torch.tensor method)": [[2034, "torch.Tensor.align_as"]], "align_to() (torch.tensor method)": [[2034, "torch.Tensor.align_to"]], "names (torch.tensor attribute)": [[2034, "torch.Tensor.names"]], "refine_names() (torch.tensor method)": [[2034, "torch.Tensor.refine_names"]], "rename() (torch.tensor method)": [[2034, "torch.Tensor.rename"]], "rename_() (torch.tensor method)": [[2034, "torch.Tensor.rename_"]], "as_nested_tensor() (in module torch.nested)": [[2035, "torch.nested.as_nested_tensor"]], "nested_tensor() (in module torch.nested)": [[2035, "torch.nested.nested_tensor"]], "to_padded_tensor() (in module torch.nested)": [[2035, "torch.nested.to_padded_tensor"]], "torch.nested": [[2035, "module-torch.nested"]], "torch.nn": [[2036, "module-torch.nn"]], "torch.nn.backends": [[2036, "module-torch.nn.backends"]], "torch.nn.backends.thnn": [[2036, "module-torch.nn.backends.thnn"]], "torch.nn.common_types": [[2036, "module-torch.nn.common_types"]], "torch.nn.cpp": [[2036, "module-torch.nn.cpp"]], "torch.nn.functional": [[2036, "module-torch.nn.functional"]], "torch.nn.grad": [[2036, "module-torch.nn.grad"]], "torch.nn.init": [[2036, "module-torch.nn.init"]], "torch.nn.modules": [[2036, "module-torch.nn.modules"]], "torch.nn.modules.activation": [[2036, "module-torch.nn.modules.activation"]], "torch.nn.modules.adaptive": [[2036, "module-torch.nn.modules.adaptive"]], "torch.nn.modules.batchnorm": [[2036, "module-torch.nn.modules.batchnorm"]], "torch.nn.modules.channelshuffle": [[2036, "module-torch.nn.modules.channelshuffle"]], "torch.nn.modules.container": [[2036, "module-torch.nn.modules.container"]], "torch.nn.modules.conv": [[2036, "module-torch.nn.modules.conv"]], "torch.nn.modules.distance": [[2036, "module-torch.nn.modules.distance"]], "torch.nn.modules.dropout": [[2036, "module-torch.nn.modules.dropout"]], "torch.nn.modules.flatten": [[2036, "module-torch.nn.modules.flatten"]], "torch.nn.modules.fold": [[2036, "module-torch.nn.modules.fold"]], "torch.nn.modules.instancenorm": [[2036, "module-torch.nn.modules.instancenorm"]], "torch.nn.modules.lazy": [[2036, "module-torch.nn.modules.lazy"]], "torch.nn.modules.linear": [[2036, "module-torch.nn.modules.linear"]], "torch.nn.modules.loss": [[2036, "module-torch.nn.modules.loss"]], "torch.nn.modules.module": [[2036, "module-torch.nn.modules.module"]], "torch.nn.modules.normalization": [[2036, "module-torch.nn.modules.normalization"]], "torch.nn.modules.padding": [[2036, "module-torch.nn.modules.padding"]], "torch.nn.modules.pixelshuffle": [[2036, "module-torch.nn.modules.pixelshuffle"]], "torch.nn.modules.pooling": [[2036, "module-torch.nn.modules.pooling"]], "torch.nn.modules.rnn": [[2036, "module-torch.nn.modules.rnn"]], "torch.nn.modules.sparse": [[2036, "module-torch.nn.modules.sparse"]], "torch.nn.modules.transformer": [[2036, "module-torch.nn.modules.transformer"]], "torch.nn.modules.upsampling": [[2036, "module-torch.nn.modules.upsampling"]], "torch.nn.modules.utils": [[2036, "module-torch.nn.modules.utils"]], "torch.nn.parallel": [[2036, "module-torch.nn.parallel"]], "torch.nn.parallel.comm": [[2036, "module-torch.nn.parallel.comm"]], "torch.nn.parallel.distributed": [[2036, "module-torch.nn.parallel.distributed"]], "torch.nn.parallel.parallel_apply": [[2036, "module-torch.nn.parallel.parallel_apply"]], "torch.nn.parallel.replicate": [[2036, "module-torch.nn.parallel.replicate"]], "torch.nn.parallel.scatter_gather": [[2036, "module-torch.nn.parallel.scatter_gather"]], "torch.nn.parameter": [[2036, "module-torch.nn.parameter"]], "torch.nn.utils": [[2036, "module-torch.nn.utils"]], "torch.nn.utils.clip_grad": [[2036, "module-torch.nn.utils.clip_grad"]], "torch.nn.utils.convert_parameters": [[2036, "module-torch.nn.utils.convert_parameters"]], "torch.nn.utils.fusion": [[2036, "module-torch.nn.utils.fusion"]], "torch.nn.utils.init": [[2036, "module-torch.nn.utils.init"]], "torch.nn.utils.memory_format": [[2036, "module-torch.nn.utils.memory_format"]], "torch.nn.utils.parametrizations": [[2036, "module-torch.nn.utils.parametrizations"]], "torch.nn.utils.parametrize": [[2036, "module-torch.nn.utils.parametrize"]], "torch.nn.utils.prune": [[2036, "module-torch.nn.utils.prune"]], "torch.nn.utils.rnn": [[2036, "module-torch.nn.utils.rnn"]], "torch.nn.utils.stateless": [[2036, "module-torch.nn.utils.stateless"]], "torch.nn.attention": [[2037, "module-torch.nn.attention"]], "torch.nn.attention.bias": [[2038, "module-torch.nn.attention.bias"]], "calculate_gain() (in module torch.nn.init)": [[2040, "torch.nn.init.calculate_gain"]], "constant_() (in module torch.nn.init)": [[2040, "torch.nn.init.constant_"]], "dirac_() (in module torch.nn.init)": [[2040, "torch.nn.init.dirac_"]], "eye_() (in module torch.nn.init)": [[2040, "torch.nn.init.eye_"]], "kaiming_normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.kaiming_normal_"]], "kaiming_uniform_() (in module torch.nn.init)": [[2040, "torch.nn.init.kaiming_uniform_"]], "normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.normal_"]], "ones_() (in module torch.nn.init)": [[2040, "torch.nn.init.ones_"]], "orthogonal_() (in module torch.nn.init)": [[2040, "torch.nn.init.orthogonal_"]], "sparse_() (in module torch.nn.init)": [[2040, "torch.nn.init.sparse_"]], "trunc_normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.trunc_normal_"]], "uniform_() (in module torch.nn.init)": [[2040, "torch.nn.init.uniform_"]], "xavier_normal_() (in module torch.nn.init)": [[2040, "torch.nn.init.xavier_normal_"]], "xavier_uniform_() (in module torch.nn.init)": [[2040, "torch.nn.init.xavier_uniform_"]], "zeros_() (in module torch.nn.init)": [[2040, "torch.nn.init.zeros_"]], "add_safe_globals() (in module torch.serialization)": [[2060, "torch.serialization.add_safe_globals"]], "clear_safe_globals() (in module torch.serialization)": [[2060, "torch.serialization.clear_safe_globals"]], "get_default_load_endianness() (in module torch.serialization)": [[2060, "torch.serialization.get_default_load_endianness"]], "get_default_mmap_options() (in module torch.serialization)": [[2060, "torch.serialization.get_default_mmap_options"]], "get_safe_globals() (in module torch.serialization)": [[2060, "torch.serialization.get_safe_globals"]], "register_package() (in module torch.serialization)": [[2060, "torch.serialization.register_package"]], "set_default_load_endianness() (in module torch.serialization)": [[2060, "torch.serialization.set_default_load_endianness"]], "set_default_mmap_options() (in module torch.serialization)": [[2060, "torch.serialization.set_default_mmap_options"]], "torch.onnx.errors": [[2062, "module-torch.onnx.errors"]], "torch.onnx.operators": [[2062, "module-torch.onnx.operators"]], "torch.onnx.symbolic_caffe2": [[2062, "module-torch.onnx.symbolic_caffe2"]], "torch.onnx.symbolic_helper": [[2062, "module-torch.onnx.symbolic_helper"]], "torch.onnx.symbolic_opset10": [[2062, "module-torch.onnx.symbolic_opset10"]], "torch.onnx.symbolic_opset11": [[2062, "module-torch.onnx.symbolic_opset11"]], "torch.onnx.symbolic_opset12": [[2062, "module-torch.onnx.symbolic_opset12"]], "torch.onnx.symbolic_opset13": [[2062, "module-torch.onnx.symbolic_opset13"]], "torch.onnx.symbolic_opset14": [[2062, "module-torch.onnx.symbolic_opset14"]], "torch.onnx.symbolic_opset15": [[2062, "module-torch.onnx.symbolic_opset15"]], "torch.onnx.symbolic_opset16": [[2062, "module-torch.onnx.symbolic_opset16"]], "torch.onnx.symbolic_opset17": [[2062, "module-torch.onnx.symbolic_opset17"]], "torch.onnx.symbolic_opset18": [[2062, "module-torch.onnx.symbolic_opset18"]], "torch.onnx.symbolic_opset19": [[2062, "module-torch.onnx.symbolic_opset19"]], "torch.onnx.symbolic_opset20": [[2062, "module-torch.onnx.symbolic_opset20"]], "torch.onnx.symbolic_opset7": [[2062, "module-torch.onnx.symbolic_opset7"]], "torch.onnx.symbolic_opset8": [[2062, "module-torch.onnx.symbolic_opset8"]], "torch.onnx.symbolic_opset9": [[2062, "module-torch.onnx.symbolic_opset9"]], "torch.onnx.utils": [[2062, "module-torch.onnx.utils"]], "torch.onnx.verification": [[2062, "module-torch.onnx.verification"]], "diagnosticoptions (class in torch.onnx)": [[2063, "torch.onnx.DiagnosticOptions"]], "exportoptions (class in torch.onnx)": [[2063, "torch.onnx.ExportOptions"]], "invalidexportoptionserror (class in torch.onnx)": [[2063, "torch.onnx.InvalidExportOptionsError"]], "onnxprogram (class in torch.onnx)": [[2063, "torch.onnx.ONNXProgram"]], "onnxprogramserializer (class in torch.onnx)": [[2063, "torch.onnx.ONNXProgramSerializer"]], "onnxruntimeoptions (class in torch.onnx)": [[2063, "torch.onnx.ONNXRuntimeOptions"]], "onnxexportererror (class in torch.onnx)": [[2063, "torch.onnx.OnnxExporterError"]], "onnxregistry (class in torch.onnx)": [[2063, "torch.onnx.OnnxRegistry"]], "adapt_torch_inputs_to_onnx() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.adapt_torch_inputs_to_onnx"]], "adapt_torch_outputs_to_onnx() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.adapt_torch_outputs_to_onnx"]], "diagnostic_context (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.diagnostic_context"]], "dynamo_export() (in module torch.onnx)": [[2063, "torch.onnx.dynamo_export"]], "enable_fake_mode() (in module torch.onnx)": [[2063, "torch.onnx.enable_fake_mode"]], "fake_context (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.fake_context"]], "get_op_functions() (torch.onnx.onnxregistry method)": [[2063, "torch.onnx.OnnxRegistry.get_op_functions"]], "is_registered_op() (torch.onnx.onnxregistry method)": [[2063, "torch.onnx.OnnxRegistry.is_registered_op"]], "model_proto (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.model_proto"]], "model_signature (torch.onnx.onnxprogram property)": [[2063, "torch.onnx.ONNXProgram.model_signature"]], "opset_version (torch.onnx.onnxregistry property)": [[2063, "torch.onnx.OnnxRegistry.opset_version"]], "register_op() (torch.onnx.onnxregistry method)": [[2063, "torch.onnx.OnnxRegistry.register_op"]], "save() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.save"]], "save_diagnostics() (torch.onnx.onnxprogram method)": [[2063, "torch.onnx.ONNXProgram.save_diagnostics"]], "serialize() (torch.onnx.onnxprogramserializer method)": [[2063, "torch.onnx.ONNXProgramSerializer.serialize"]], "is_onnxrt_backend_supported() (in module torch.onnx)": [[2064, "torch.onnx.is_onnxrt_backend_supported"]], "disable_log() (in module torch.onnx)": [[2065, "torch.onnx.disable_log"]], "enable_log() (in module torch.onnx)": [[2065, "torch.onnx.enable_log"]], "export() (in module torch.onnx)": [[2065, "torch.onnx.export"]], "export_to_pretty_string() (in module torch.onnx)": [[2065, "torch.onnx.export_to_pretty_string"]], "find_mismatch() (in module torch.onnx.verification)": [[2065, "torch.onnx.verification.find_mismatch"]], "is_in_onnx_export() (in module torch.onnx)": [[2065, "torch.onnx.is_in_onnx_export"]], "register_custom_op_symbolic() (in module torch.onnx)": [[2065, "torch.onnx.register_custom_op_symbolic"]], "select_model_mode_for_export() (in module torch.onnx)": [[2065, "torch.onnx.select_model_mode_for_export"]], "torch.onnx": [[2065, "module-torch.onnx"]], "unregister_custom_op_symbolic() (in module torch.onnx)": [[2065, "torch.onnx.unregister_custom_op_symbolic"]], "optimizer (class in torch.optim)": [[2067, "torch.optim.Optimizer"]], "torch.optim": [[2067, "module-torch.optim"]], "torch.optim.adadelta": [[2067, "module-torch.optim.adadelta"]], "torch.optim.adagrad": [[2067, "module-torch.optim.adagrad"]], "torch.optim.adam": [[2067, "module-torch.optim.adam"]], "torch.optim.adamax": [[2067, "module-torch.optim.adamax"]], "torch.optim.adamw": [[2067, "module-torch.optim.adamw"]], "torch.optim.asgd": [[2067, "module-torch.optim.asgd"]], "torch.optim.lbfgs": [[2067, "module-torch.optim.lbfgs"]], "torch.optim.lr_scheduler": [[2067, "module-torch.optim.lr_scheduler"]], "torch.optim.nadam": [[2067, "module-torch.optim.nadam"]], "torch.optim.optimizer": [[2067, "module-torch.optim.optimizer"]], "torch.optim.radam": [[2067, "module-torch.optim.radam"]], "torch.optim.rmsprop": [[2067, "module-torch.optim.rmsprop"]], "torch.optim.rprop": [[2067, "module-torch.optim.rprop"]], "torch.optim.sgd": [[2067, "module-torch.optim.sgd"]], "torch.optim.sparse_adam": [[2067, "module-torch.optim.sparse_adam"]], "torch.optim.swa_utils": [[2067, "module-torch.optim.swa_utils"]], "directory (class in torch.package)": [[2068, "torch.package.Directory"]], "emptymatcherror (class in torch.package)": [[2068, "torch.package.EmptyMatchError"]], "packageexporter (class in torch.package)": [[2068, "torch.package.PackageExporter"]], "packageimporter (class in torch.package)": [[2068, "torch.package.PackageImporter"]], "packagingerror (class in torch.package)": [[2068, "torch.package.PackagingError"]], "__init__() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.__init__"]], "__init__() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.__init__"]], "add_dependency() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.add_dependency"]], "all_paths() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.all_paths"]], "close() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.close"]], "denied_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.denied_modules"]], "deny() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.deny"]], "dependency_graph_string() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.dependency_graph_string"]], "extern() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.extern"]], "externed_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.externed_modules"]], "file_structure() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.file_structure"]], "get_rdeps() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.get_rdeps"]], "get_unique_id() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.get_unique_id"]], "has_file() (torch.package.directory method)": [[2068, "torch.package.Directory.has_file"]], "id() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.id"]], "import_module() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.import_module"]], "intern() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.intern"]], "interned_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.interned_modules"]], "load_binary() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.load_binary"]], "load_pickle() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.load_pickle"]], "load_text() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.load_text"]], "mock() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.mock"]], "mocked_modules() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.mocked_modules"]], "python_version() (torch.package.packageimporter method)": [[2068, "torch.package.PackageImporter.python_version"]], "register_extern_hook() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.register_extern_hook"]], "register_intern_hook() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.register_intern_hook"]], "register_mock_hook() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.register_mock_hook"]], "save_binary() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_binary"]], "save_module() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_module"]], "save_pickle() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_pickle"]], "save_source_file() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_source_file"]], "save_source_string() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_source_string"]], "save_text() (torch.package.packageexporter method)": [[2068, "torch.package.PackageExporter.save_text"]], "torch.package": [[2068, "module-torch.package"]], "torch.package.analyze": [[2068, "module-torch.package.analyze"]], "torch.package.analyze.find_first_use_of_broken_modules": [[2068, "module-torch.package.analyze.find_first_use_of_broken_modules"]], "torch.package.analyze.is_from_package": [[2068, "module-torch.package.analyze.is_from_package"]], "torch.package.analyze.trace_dependencies": [[2068, "module-torch.package.analyze.trace_dependencies"]], "torch.package.file_structure_representation": [[2068, "module-torch.package.file_structure_representation"]], "torch.package.find_file_dependencies": [[2068, "module-torch.package.find_file_dependencies"]], "torch.package.glob_group": [[2068, "module-torch.package.glob_group"]], "torch.package.importer": [[2068, "module-torch.package.importer"]], "torch.package.package_exporter": [[2068, "module-torch.package.package_exporter"]], "torch.package.package_importer": [[2068, "module-torch.package.package_importer"]], "profileraction (class in torch.profiler)": [[2069, "torch.profiler.ProfilerAction"]], "profileractivity (class in torch.profiler)": [[2069, "torch.profiler.ProfilerActivity"]], "_kinetoprofile (class in torch.profiler)": [[2069, "torch.profiler._KinetoProfile"]], "add_metadata() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.add_metadata"]], "add_metadata_json() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.add_metadata_json"]], "events() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.events"]], "export_chrome_trace() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.export_chrome_trace"]], "export_memory_timeline() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.export_memory_timeline"]], "export_stacks() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.export_stacks"]], "is_available() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.is_available"]], "key_averages() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.key_averages"]], "mark() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.mark"]], "name (torch.profiler.profileractivity property)": [[2069, "torch.profiler.ProfilerActivity.name"]], "preset_metadata_json() (torch.profiler._kinetoprofile method)": [[2069, "torch.profiler._KinetoProfile.preset_metadata_json"]], "profile (class in torch.profiler)": [[2069, "torch.profiler.profile"]], "range_pop() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.range_pop"]], "range_push() (in module torch.profiler.itt)": [[2069, "torch.profiler.itt.range_push"]], "schedule() (in module torch.profiler)": [[2069, "torch.profiler.schedule"]], "step() (torch.profiler.profile method)": [[2069, "torch.profiler.profile.step"]], "tensorboard_trace_handler() (in module torch.profiler)": [[2069, "torch.profiler.tensorboard_trace_handler"]], "torch.profiler": [[2069, "module-torch.profiler"]], "torch.profiler.itt": [[2069, "module-torch.profiler.itt"]], "torch.profiler.profiler": [[2069, "module-torch.profiler.profiler"]], "torch.profiler.python_tracer": [[2069, "module-torch.profiler.python_tracer"]], "torch.ao": [[2070, "module-torch.ao"]], "torch.ao.nn": [[2070, "module-torch.ao.nn"]], "torch.ao.nn.intrinsic.modules.fused": [[2070, "module-torch.ao.nn.intrinsic.modules.fused"]], "torch.ao.nn.intrinsic.qat.modules.conv_fused": [[2070, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_fused": [[2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_relu": [[2070, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.modules.bn_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"]], "torch.ao.nn.intrinsic.quantized.modules.conv_add": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"]], "torch.ao.nn.intrinsic.quantized.modules.conv_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"]], "torch.ao.nn.intrinsic.quantized.modules.linear_relu": [[2070, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"]], "torch.ao.nn.qat.dynamic.modules.linear": [[2070, "module-torch.ao.nn.qat.dynamic.modules.linear"]], "torch.ao.nn.qat.modules.conv": [[2070, "module-torch.ao.nn.qat.modules.conv"]], "torch.ao.nn.qat.modules.embedding_ops": [[2070, "module-torch.ao.nn.qat.modules.embedding_ops"]], "torch.ao.nn.qat.modules.linear": [[2070, "module-torch.ao.nn.qat.modules.linear"]], "torch.ao.nn.quantizable": [[2070, "module-torch.ao.nn.quantizable"]], "torch.ao.nn.quantizable.modules": [[2070, "module-torch.ao.nn.quantizable.modules"]], "torch.ao.nn.quantizable.modules.activation": [[2070, "module-torch.ao.nn.quantizable.modules.activation"]], "torch.ao.nn.quantizable.modules.rnn": [[2070, "module-torch.ao.nn.quantizable.modules.rnn"]], "torch.ao.nn.quantized": [[2070, "module-torch.ao.nn.quantized"]], "torch.ao.nn.quantized.dynamic.modules.conv": [[2070, "module-torch.ao.nn.quantized.dynamic.modules.conv"]], "torch.ao.nn.quantized.dynamic.modules.linear": [[2070, "module-torch.ao.nn.quantized.dynamic.modules.linear"]], "torch.ao.nn.quantized.dynamic.modules.rnn": [[2070, "module-torch.ao.nn.quantized.dynamic.modules.rnn"]], "torch.ao.nn.quantized.modules.activation": [[2070, "module-torch.ao.nn.quantized.modules.activation"]], "torch.ao.nn.quantized.modules.batchnorm": [[2070, "module-torch.ao.nn.quantized.modules.batchnorm"]], "torch.ao.nn.quantized.modules.conv": [[2070, "module-torch.ao.nn.quantized.modules.conv"]], "torch.ao.nn.quantized.modules.dropout": [[2070, "module-torch.ao.nn.quantized.modules.dropout"]], "torch.ao.nn.quantized.modules.embedding_ops": [[2070, "module-torch.ao.nn.quantized.modules.embedding_ops"]], "torch.ao.nn.quantized.modules.functional_modules": [[2070, "module-torch.ao.nn.quantized.modules.functional_modules"]], "torch.ao.nn.quantized.modules.linear": [[2070, "module-torch.ao.nn.quantized.modules.linear"]], "torch.ao.nn.quantized.modules.normalization": [[2070, "module-torch.ao.nn.quantized.modules.normalization"]], "torch.ao.nn.quantized.modules.rnn": [[2070, "module-torch.ao.nn.quantized.modules.rnn"]], "torch.ao.nn.quantized.modules.utils": [[2070, "module-torch.ao.nn.quantized.modules.utils"]], "torch.ao.nn.quantized.reference": [[2070, "module-torch.ao.nn.quantized.reference"]], "torch.ao.nn.quantized.reference.modules": [[2070, "module-torch.ao.nn.quantized.reference.modules"]], "torch.ao.nn.quantized.reference.modules.conv": [[2070, "module-torch.ao.nn.quantized.reference.modules.conv"]], "torch.ao.nn.quantized.reference.modules.linear": [[2070, "module-torch.ao.nn.quantized.reference.modules.linear"]], "torch.ao.nn.quantized.reference.modules.rnn": [[2070, "module-torch.ao.nn.quantized.reference.modules.rnn"]], "torch.ao.nn.quantized.reference.modules.sparse": [[2070, "module-torch.ao.nn.quantized.reference.modules.sparse"]], "torch.ao.nn.quantized.reference.modules.utils": [[2070, "module-torch.ao.nn.quantized.reference.modules.utils"]], "torch.ao.nn.sparse": [[2070, "module-torch.ao.nn.sparse"]], "torch.ao.nn.sparse.quantized": [[2070, "module-torch.ao.nn.sparse.quantized"]], "torch.ao.nn.sparse.quantized.dynamic": [[2070, "module-torch.ao.nn.sparse.quantized.dynamic"]], "torch.ao.nn.sparse.quantized.dynamic.linear": [[2070, "module-torch.ao.nn.sparse.quantized.dynamic.linear"]], "torch.ao.nn.sparse.quantized.linear": [[2070, "module-torch.ao.nn.sparse.quantized.linear"]], "torch.ao.nn.sparse.quantized.utils": [[2070, "module-torch.ao.nn.sparse.quantized.utils"]], "torch.ao.ns": [[2070, "module-torch.ao.ns"]], "torch.ao.ns.fx": [[2070, "module-torch.ao.ns.fx"]], "torch.ao.ns.fx.graph_matcher": [[2070, "module-torch.ao.ns.fx.graph_matcher"]], "torch.ao.ns.fx.graph_passes": [[2070, "module-torch.ao.ns.fx.graph_passes"]], "torch.ao.ns.fx.mappings": [[2070, "module-torch.ao.ns.fx.mappings"]], "torch.ao.ns.fx.n_shadows_utils": [[2070, "module-torch.ao.ns.fx.n_shadows_utils"]], "torch.ao.ns.fx.ns_types": [[2070, "module-torch.ao.ns.fx.ns_types"]], "torch.ao.ns.fx.pattern_utils": [[2070, "module-torch.ao.ns.fx.pattern_utils"]], "torch.ao.ns.fx.qconfig_multi_mapping": [[2070, "module-torch.ao.ns.fx.qconfig_multi_mapping"]], "torch.ao.ns.fx.utils": [[2070, "module-torch.ao.ns.fx.utils"]], "torch.ao.ns.fx.weight_utils": [[2070, "module-torch.ao.ns.fx.weight_utils"]], "torch.ao.pruning": [[2070, "module-torch.ao.pruning"]], "torch.ao.pruning.scheduler": [[2070, "module-torch.ao.pruning.scheduler"]], "torch.ao.pruning.scheduler.base_scheduler": [[2070, "module-torch.ao.pruning.scheduler.base_scheduler"]], "torch.ao.pruning.scheduler.cubic_scheduler": [[2070, "module-torch.ao.pruning.scheduler.cubic_scheduler"]], "torch.ao.pruning.scheduler.lambda_scheduler": [[2070, "module-torch.ao.pruning.scheduler.lambda_scheduler"]], "torch.ao.pruning.sparsifier": [[2070, "module-torch.ao.pruning.sparsifier"]], "torch.ao.pruning.sparsifier.base_sparsifier": [[2070, "module-torch.ao.pruning.sparsifier.base_sparsifier"]], "torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier": [[2070, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"]], "torch.ao.pruning.sparsifier.utils": [[2070, "module-torch.ao.pruning.sparsifier.utils"]], "torch.ao.pruning.sparsifier.weight_norm_sparsifier": [[2070, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"]], "torch.ao.quantization": [[2070, "module-torch.ao.quantization"]], "torch.ao.quantization.backend_config": [[2070, "module-torch.ao.quantization.backend_config"]], "torch.ao.quantization.backend_config.backend_config": [[2070, "module-torch.ao.quantization.backend_config.backend_config"]], "torch.ao.quantization.backend_config.executorch": [[2070, "module-torch.ao.quantization.backend_config.executorch"]], "torch.ao.quantization.backend_config.fbgemm": [[2070, "module-torch.ao.quantization.backend_config.fbgemm"]], "torch.ao.quantization.backend_config.native": [[2070, "module-torch.ao.quantization.backend_config.native"]], "torch.ao.quantization.backend_config.observation_type": [[2070, "module-torch.ao.quantization.backend_config.observation_type"]], "torch.ao.quantization.backend_config.onednn": [[2070, "module-torch.ao.quantization.backend_config.onednn"]], "torch.ao.quantization.backend_config.qnnpack": [[2070, "module-torch.ao.quantization.backend_config.qnnpack"]], "torch.ao.quantization.backend_config.tensorrt": [[2070, "module-torch.ao.quantization.backend_config.tensorrt"]], "torch.ao.quantization.backend_config.utils": [[2070, "module-torch.ao.quantization.backend_config.utils"]], "torch.ao.quantization.backend_config.x86": [[2070, "module-torch.ao.quantization.backend_config.x86"]], "torch.ao.quantization.fake_quantize": [[2070, "module-torch.ao.quantization.fake_quantize"]], "torch.ao.quantization.fuse_modules": [[2070, "module-torch.ao.quantization.fuse_modules"]], "torch.ao.quantization.fuser_method_mappings": [[2070, "module-torch.ao.quantization.fuser_method_mappings"]], "torch.ao.quantization.fx": [[2070, "module-torch.ao.quantization.fx"]], "torch.ao.quantization.fx.convert": [[2070, "module-torch.ao.quantization.fx.convert"]], "torch.ao.quantization.fx.custom_config": [[2070, "module-torch.ao.quantization.fx.custom_config"]], "torch.ao.quantization.fx.fuse": [[2070, "module-torch.ao.quantization.fx.fuse"]], "torch.ao.quantization.fx.fuse_handler": [[2070, "module-torch.ao.quantization.fx.fuse_handler"]], "torch.ao.quantization.fx.graph_module": [[2070, "module-torch.ao.quantization.fx.graph_module"]], "torch.ao.quantization.fx.lower_to_fbgemm": [[2070, "module-torch.ao.quantization.fx.lower_to_fbgemm"]], "torch.ao.quantization.fx.lower_to_qnnpack": [[2070, "module-torch.ao.quantization.fx.lower_to_qnnpack"]], "torch.ao.quantization.fx.lstm_utils": [[2070, "module-torch.ao.quantization.fx.lstm_utils"]], "torch.ao.quantization.fx.match_utils": [[2070, "module-torch.ao.quantization.fx.match_utils"]], "torch.ao.quantization.fx.pattern_utils": [[2070, "module-torch.ao.quantization.fx.pattern_utils"]], "torch.ao.quantization.fx.prepare": [[2070, "module-torch.ao.quantization.fx.prepare"]], "torch.ao.quantization.fx.qconfig_mapping_utils": [[2070, "module-torch.ao.quantization.fx.qconfig_mapping_utils"]], "torch.ao.quantization.fx.quantize_handler": [[2070, "module-torch.ao.quantization.fx.quantize_handler"]], "torch.ao.quantization.fx.tracer": [[2070, "module-torch.ao.quantization.fx.tracer"]], "torch.ao.quantization.fx.utils": [[2070, "module-torch.ao.quantization.fx.utils"]], "torch.ao.quantization.observer": [[2070, "module-torch.ao.quantization.observer"]], "torch.ao.quantization.pt2e.duplicate_dq_pass": [[2070, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"]], "torch.ao.quantization.pt2e.export_utils": [[2070, "module-torch.ao.quantization.pt2e.export_utils"]], "torch.ao.quantization.pt2e.graph_utils": [[2070, "module-torch.ao.quantization.pt2e.graph_utils"]], "torch.ao.quantization.pt2e.port_metadata_pass": [[2070, "module-torch.ao.quantization.pt2e.port_metadata_pass"]], "torch.ao.quantization.pt2e.prepare": [[2070, "module-torch.ao.quantization.pt2e.prepare"]], "torch.ao.quantization.pt2e.qat_utils": [[2070, "module-torch.ao.quantization.pt2e.qat_utils"]], "torch.ao.quantization.pt2e.representation.rewrite": [[2070, "module-torch.ao.quantization.pt2e.representation.rewrite"]], "torch.ao.quantization.pt2e.utils": [[2070, "module-torch.ao.quantization.pt2e.utils"]], "torch.ao.quantization.qconfig": [[2070, "module-torch.ao.quantization.qconfig"]], "torch.ao.quantization.qconfig_mapping": [[2070, "module-torch.ao.quantization.qconfig_mapping"]], "torch.ao.quantization.quant_type": [[2070, "module-torch.ao.quantization.quant_type"]], "torch.ao.quantization.quantization_mappings": [[2070, "module-torch.ao.quantization.quantization_mappings"]], "torch.ao.quantization.quantize_fx": [[2070, "module-torch.ao.quantization.quantize_fx"]], "torch.ao.quantization.quantize_jit": [[2070, "module-torch.ao.quantization.quantize_jit"]], "torch.ao.quantization.quantize_pt2e": [[2070, "module-torch.ao.quantization.quantize_pt2e"]], "torch.ao.quantization.quantizer.composable_quantizer": [[2070, "module-torch.ao.quantization.quantizer.composable_quantizer"]], "torch.ao.quantization.quantizer.embedding_quantizer": [[2070, "module-torch.ao.quantization.quantizer.embedding_quantizer"]], "torch.ao.quantization.quantizer.quantizer": [[2070, "module-torch.ao.quantization.quantizer.quantizer"]], "torch.ao.quantization.quantizer.utils": [[2070, "module-torch.ao.quantization.quantizer.utils"]], "torch.ao.quantization.quantizer.x86_inductor_quantizer": [[2070, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer": [[2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer_utils": [[2070, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"]], "torch.ao.quantization.stubs": [[2070, "module-torch.ao.quantization.stubs"]], "torch.ao.quantization.utils": [[2070, "module-torch.ao.quantization.utils"]], "torch.nn.intrinsic.modules.fused": [[2070, "module-torch.nn.intrinsic.modules.fused"]], "torch.nn.intrinsic.qat.modules.conv_fused": [[2070, "module-torch.nn.intrinsic.qat.modules.conv_fused"]], "torch.nn.intrinsic.qat.modules.linear_fused": [[2070, "module-torch.nn.intrinsic.qat.modules.linear_fused"]], "torch.nn.intrinsic.qat.modules.linear_relu": [[2070, "module-torch.nn.intrinsic.qat.modules.linear_relu"]], "torch.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2070, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.nn.intrinsic.quantized.modules.bn_relu": [[2070, "module-torch.nn.intrinsic.quantized.modules.bn_relu"]], "torch.nn.intrinsic.quantized.modules.conv_relu": [[2070, "module-torch.nn.intrinsic.quantized.modules.conv_relu"]], "torch.nn.intrinsic.quantized.modules.linear_relu": [[2070, "module-torch.nn.intrinsic.quantized.modules.linear_relu"]], "torch.nn.qat.dynamic.modules.linear": [[2070, "module-torch.nn.qat.dynamic.modules.linear"]], "torch.nn.qat.modules.conv": [[2070, "module-torch.nn.qat.modules.conv"]], "torch.nn.qat.modules.embedding_ops": [[2070, "module-torch.nn.qat.modules.embedding_ops"]], "torch.nn.qat.modules.linear": [[2070, "module-torch.nn.qat.modules.linear"]], "torch.nn.quantizable.modules.activation": [[2070, "module-torch.nn.quantizable.modules.activation"]], "torch.nn.quantizable.modules.rnn": [[2070, "module-torch.nn.quantizable.modules.rnn"]], "torch.nn.quantized.dynamic.modules.conv": [[2070, "module-torch.nn.quantized.dynamic.modules.conv"]], "torch.nn.quantized.dynamic.modules.linear": [[2070, "module-torch.nn.quantized.dynamic.modules.linear"]], "torch.nn.quantized.dynamic.modules.rnn": [[2070, "module-torch.nn.quantized.dynamic.modules.rnn"]], "torch.nn.quantized.functional": [[2070, "module-torch.nn.quantized.functional"]], "torch.nn.quantized.modules.activation": [[2070, "module-torch.nn.quantized.modules.activation"]], "torch.nn.quantized.modules.batchnorm": [[2070, "module-torch.nn.quantized.modules.batchnorm"]], "torch.nn.quantized.modules.conv": [[2070, "module-torch.nn.quantized.modules.conv"]], "torch.nn.quantized.modules.dropout": [[2070, "module-torch.nn.quantized.modules.dropout"]], "torch.nn.quantized.modules.embedding_ops": [[2070, "module-torch.nn.quantized.modules.embedding_ops"]], "torch.nn.quantized.modules.functional_modules": [[2070, "module-torch.nn.quantized.modules.functional_modules"]], "torch.nn.quantized.modules.linear": [[2070, "module-torch.nn.quantized.modules.linear"]], "torch.nn.quantized.modules.normalization": [[2070, "module-torch.nn.quantized.modules.normalization"]], "torch.nn.quantized.modules.rnn": [[2070, "module-torch.nn.quantized.modules.rnn"]], "torch.nn.quantized.modules.utils": [[2070, "module-torch.nn.quantized.modules.utils"]], "torch.quantization.fake_quantize": [[2070, "module-torch.quantization.fake_quantize"]], "torch.quantization.fuse_modules": [[2070, "module-torch.quantization.fuse_modules"]], "torch.quantization.fuser_method_mappings": [[2070, "module-torch.quantization.fuser_method_mappings"]], "torch.quantization.fx.convert": [[2070, "module-torch.quantization.fx.convert"]], "torch.quantization.fx.fuse": [[2070, "module-torch.quantization.fx.fuse"]], "torch.quantization.fx.fusion_patterns": [[2070, "module-torch.quantization.fx.fusion_patterns"]], "torch.quantization.fx.graph_module": [[2070, "module-torch.quantization.fx.graph_module"]], "torch.quantization.fx.match_utils": [[2070, "module-torch.quantization.fx.match_utils"]], "torch.quantization.fx.pattern_utils": [[2070, "module-torch.quantization.fx.pattern_utils"]], "torch.quantization.fx.prepare": [[2070, "module-torch.quantization.fx.prepare"]], "torch.quantization.fx.quantization_patterns": [[2070, "module-torch.quantization.fx.quantization_patterns"]], "torch.quantization.fx.quantization_types": [[2070, "module-torch.quantization.fx.quantization_types"]], "torch.quantization.fx.utils": [[2070, "module-torch.quantization.fx.utils"]], "torch.quantization.observer": [[2070, "module-torch.quantization.observer"]], "torch.quantization.qconfig": [[2070, "module-torch.quantization.qconfig"]], "torch.quantization.quant_type": [[2070, "module-torch.quantization.quant_type"]], "torch.quantization.quantization_mappings": [[2070, "module-torch.quantization.quantization_mappings"]], "torch.quantization.quantize": [[2070, "module-torch.quantization.quantize"]], "torch.quantization.quantize_fx": [[2070, "module-torch.quantization.quantize_fx"]], "torch.quantization.quantize_jit": [[2070, "module-torch.quantization.quantize_jit"]], "torch.quantization.stubs": [[2070, "module-torch.quantization.stubs"]], "torch.quantization.utils": [[2070, "module-torch.quantization.utils"]], "torch.ao.nn.intrinsic": [[2073, "module-torch.ao.nn.intrinsic"]], "torch.ao.nn.intrinsic.modules": [[2073, "module-torch.ao.nn.intrinsic.modules"]], "torch.ao.nn.intrinsic.qat": [[2073, "module-torch.ao.nn.intrinsic.qat"]], "torch.ao.nn.intrinsic.qat.modules": [[2073, "module-torch.ao.nn.intrinsic.qat.modules"]], "torch.ao.nn.intrinsic.quantized": [[2073, "module-torch.ao.nn.intrinsic.quantized"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[2073, "module-torch.ao.nn.intrinsic.quantized.dynamic"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules": [[2073, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"]], "torch.ao.nn.intrinsic.quantized.modules": [[2073, "module-torch.ao.nn.intrinsic.quantized.modules"]], "torch.ao.nn.qat": [[2073, "module-torch.ao.nn.qat"]], "torch.ao.nn.qat.dynamic": [[2073, "module-torch.ao.nn.qat.dynamic"]], "torch.ao.nn.qat.dynamic.modules": [[2073, "module-torch.ao.nn.qat.dynamic.modules"]], "torch.ao.nn.qat.modules": [[2073, "module-torch.ao.nn.qat.modules"]], "torch.ao.nn.quantized.dynamic": [[2073, "module-torch.ao.nn.quantized.dynamic"]], "torch.ao.nn.quantized.dynamic.modules": [[2073, "module-torch.ao.nn.quantized.dynamic.modules"]], "torch.ao.nn.quantized.functional": [[2073, "module-torch.ao.nn.quantized.functional"]], "torch.ao.nn.quantized.modules": [[2073, "module-torch.ao.nn.quantized.modules"]], "torch.ao.quantization.pt2e": [[2073, "module-torch.ao.quantization.pt2e"]], "torch.ao.quantization.pt2e.generate_numeric_debug_handle": [[2073, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"]], "torch.ao.quantization.pt2e.representation": [[2073, "module-torch.ao.quantization.pt2e.representation"]], "torch.ao.quantization.quantizer": [[2073, "module-torch.ao.quantization.quantizer"]], "torch.nn.intrinsic": [[2073, "module-torch.nn.intrinsic"]], "torch.nn.intrinsic.modules": [[2073, "module-torch.nn.intrinsic.modules"]], "torch.nn.intrinsic.qat": [[2073, "module-torch.nn.intrinsic.qat"]], "torch.nn.intrinsic.qat.modules": [[2073, "module-torch.nn.intrinsic.qat.modules"]], "torch.nn.intrinsic.quantized": [[2073, "module-torch.nn.intrinsic.quantized"]], "torch.nn.intrinsic.quantized.dynamic": [[2073, "module-torch.nn.intrinsic.quantized.dynamic"]], "torch.nn.intrinsic.quantized.dynamic.modules": [[2073, "module-torch.nn.intrinsic.quantized.dynamic.modules"]], "torch.nn.intrinsic.quantized.modules": [[2073, "module-torch.nn.intrinsic.quantized.modules"]], "torch.nn.qat": [[2073, "module-torch.nn.qat"]], "torch.nn.qat.dynamic": [[2073, "module-torch.nn.qat.dynamic"]], "torch.nn.qat.dynamic.modules": [[2073, "module-torch.nn.qat.dynamic.modules"]], "torch.nn.qat.modules": [[2073, "module-torch.nn.qat.modules"]], "torch.nn.quantizable": [[2073, "module-torch.nn.quantizable"]], "torch.nn.quantizable.modules": [[2073, "module-torch.nn.quantizable.modules"]], "torch.nn.quantized": [[2073, "module-torch.nn.quantized"]], "torch.nn.quantized.dynamic": [[2073, "module-torch.nn.quantized.dynamic"]], "torch.nn.quantized.dynamic.modules": [[2073, "module-torch.nn.quantized.dynamic.modules"]], "torch.nn.quantized.modules": [[2073, "module-torch.nn.quantized.modules"]], "torch.quantization": [[2073, "module-torch.quantization"]], "torch.quantization.fx": [[2073, "module-torch.quantization.fx"]], "fork_rng() (in module torch.random)": [[2074, "torch.random.fork_rng"]], "get_rng_state() (in module torch.random)": [[2074, "torch.random.get_rng_state"]], "initial_seed() (in module torch.random)": [[2074, "torch.random.initial_seed"]], "manual_seed() (in module torch.random)": [[2074, "torch.random.manual_seed"]], "seed() (in module torch.random)": [[2074, "torch.random.seed"]], "set_rng_state() (in module torch.random)": [[2074, "torch.random.set_rng_state"]], "torch.random": [[2074, "module-torch.random"]], "backendtype (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.BackendType"]], "pyrref (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.PyRRef"]], "remotemodule (class in torch.distributed.nn.api.remote_module)": [[2075, "torch.distributed.nn.api.remote_module.RemoteModule"]], "rpcbackendoptions (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.RpcBackendOptions"]], "tensorpiperpcbackendoptions (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions"]], "workerinfo (class in torch.distributed.rpc)": [[2075, "torch.distributed.rpc.WorkerInfo"]], "async_execution() (in module torch.distributed.rpc.functions)": [[2075, "torch.distributed.rpc.functions.async_execution"]], "backward() (in module torch.distributed.autograd)": [[2075, "torch.distributed.autograd.backward"]], "backward() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.backward"]], "confirmed_by_owner() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.confirmed_by_owner"]], "context (class in torch.distributed.autograd)": [[2075, "torch.distributed.autograd.context"]], "device_maps (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.device_maps"]], "devices (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.devices"]], "get_gradients() (in module torch.distributed.autograd)": [[2075, "torch.distributed.autograd.get_gradients"]], "get_module_rref() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2075, "torch.distributed.nn.api.remote_module.RemoteModule.get_module_rref"]], "get_worker_info() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.get_worker_info"]], "id (torch.distributed.rpc.workerinfo property)": [[2075, "torch.distributed.rpc.WorkerInfo.id"]], "init_method (torch.distributed.rpc.rpcbackendoptions property)": [[2075, "torch.distributed.rpc.RpcBackendOptions.init_method"]], "init_method (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.init_method"]], "init_rpc() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.init_rpc"]], "is_owner() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.is_owner"]], "local_value() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.local_value"]], "name (torch.distributed.rpc.workerinfo property)": [[2075, "torch.distributed.rpc.WorkerInfo.name"]], "num_worker_threads (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.num_worker_threads"]], "owner() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.owner"]], "owner_name() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.owner_name"]], "remote() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.remote"]], "remote() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.remote"]], "remote_parameters() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2075, "torch.distributed.nn.api.remote_module.RemoteModule.remote_parameters"]], "rpc_async() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.rpc_async"]], "rpc_async() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.rpc_async"]], "rpc_sync() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.rpc_sync"]], "rpc_sync() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.rpc_sync"]], "rpc_timeout (torch.distributed.rpc.rpcbackendoptions property)": [[2075, "torch.distributed.rpc.RpcBackendOptions.rpc_timeout"]], "rpc_timeout (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.rpc_timeout"]], "set_device_map() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_device_map"]], "set_devices() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2075, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_devices"]], "shutdown() (in module torch.distributed.rpc)": [[2075, "torch.distributed.rpc.shutdown"]], "to_here() (torch.distributed.rpc.pyrref method)": [[2075, "torch.distributed.rpc.PyRRef.to_here"]], "torch.distributed.autograd": [[2075, "module-torch.distributed.autograd"]], "torch.distributed.rpc": [[2075, "module-torch.distributed.rpc"]], "torch.signal": [[2078, "module-torch.signal"]], "torch.signal.windows": [[2078, "module-torch.signal.windows"]], "size (class in torch)": [[2079, "torch.Size"]], "count() (torch.size method)": [[2079, "torch.Size.count"]], "index() (torch.size method)": [[2079, "torch.Size.index"]], "numel() (torch.size method)": [[2079, "torch.Size.numel"]], "torch.sparse": [[2080, "module-torch.sparse"]], "airy_ai() (in module torch.special)": [[2081, "torch.special.airy_ai"]], "bessel_j0() (in module torch.special)": [[2081, "torch.special.bessel_j0"]], "bessel_j1() (in module torch.special)": [[2081, "torch.special.bessel_j1"]], "digamma() (in module torch.special)": [[2081, "torch.special.digamma"]], "entr() (in module torch.special)": [[2081, "torch.special.entr"]], "erf() (in module torch.special)": [[2081, "torch.special.erf"]], "erfc() (in module torch.special)": [[2081, "torch.special.erfc"]], "erfcx() (in module torch.special)": [[2081, "torch.special.erfcx"]], "erfinv() (in module torch.special)": [[2081, "torch.special.erfinv"]], "exp2() (in module torch.special)": [[2081, "torch.special.exp2"]], "expit() (in module torch.special)": [[2081, "torch.special.expit"]], "expm1() (in module torch.special)": [[2081, "torch.special.expm1"]], "gammainc() (in module torch.special)": [[2081, "torch.special.gammainc"]], "gammaincc() (in module torch.special)": [[2081, "torch.special.gammaincc"]], "gammaln() (in module torch.special)": [[2081, "torch.special.gammaln"]], "i0() (in module torch.special)": [[2081, "torch.special.i0"]], "i0e() (in module torch.special)": [[2081, "torch.special.i0e"]], "i1() (in module torch.special)": [[2081, "torch.special.i1"]], "i1e() (in module torch.special)": [[2081, "torch.special.i1e"]], "log1p() (in module torch.special)": [[2081, "torch.special.log1p"]], "log_ndtr() (in module torch.special)": [[2081, "torch.special.log_ndtr"]], "log_softmax() (in module torch.special)": [[2081, "torch.special.log_softmax"]], "logit() (in module torch.special)": [[2081, "torch.special.logit"]], "logsumexp() (in module torch.special)": [[2081, "torch.special.logsumexp"]], "multigammaln() (in module torch.special)": [[2081, "torch.special.multigammaln"]], "ndtr() (in module torch.special)": [[2081, "torch.special.ndtr"]], "ndtri() (in module torch.special)": [[2081, "torch.special.ndtri"]], "polygamma() (in module torch.special)": [[2081, "torch.special.polygamma"]], "psi() (in module torch.special)": [[2081, "torch.special.psi"]], "round() (in module torch.special)": [[2081, "torch.special.round"]], "scaled_modified_bessel_k0() (in module torch.special)": [[2081, "torch.special.scaled_modified_bessel_k0"]], "scaled_modified_bessel_k1() (in module torch.special)": [[2081, "torch.special.scaled_modified_bessel_k1"]], "sinc() (in module torch.special)": [[2081, "torch.special.sinc"]], "softmax() (in module torch.special)": [[2081, "torch.special.softmax"]], "spherical_bessel_j0() (in module torch.special)": [[2081, "torch.special.spherical_bessel_j0"]], "torch.special": [[2081, "module-torch.special"]], "xlog1py() (in module torch.special)": [[2081, "torch.special.xlog1py"]], "xlogy() (in module torch.special)": [[2081, "torch.special.xlogy"]], "zeta() (in module torch.special)": [[2081, "torch.special.zeta"]], "bfloat16storage (class in torch)": [[2082, "torch.BFloat16Storage"]], "boolstorage (class in torch)": [[2082, "torch.BoolStorage"]], "bytestorage (class in torch)": [[2082, "torch.ByteStorage"]], "charstorage (class in torch)": [[2082, "torch.CharStorage"]], "complexdoublestorage (class in torch)": [[2082, "torch.ComplexDoubleStorage"]], "complexfloatstorage (class in torch)": [[2082, "torch.ComplexFloatStorage"]], "doublestorage (class in torch)": [[2082, "torch.DoubleStorage"]], "floatstorage (class in torch)": [[2082, "torch.FloatStorage"]], "halfstorage (class in torch)": [[2082, "torch.HalfStorage"]], "intstorage (class in torch)": [[2082, "torch.IntStorage"]], "longstorage (class in torch)": [[2082, "torch.LongStorage"]], "qint32storage (class in torch)": [[2082, "torch.QInt32Storage"]], "qint8storage (class in torch)": [[2082, "torch.QInt8Storage"]], "quint2x4storage (class in torch)": [[2082, "torch.QUInt2x4Storage"]], "quint4x2storage (class in torch)": [[2082, "torch.QUInt4x2Storage"]], "quint8storage (class in torch)": [[2082, "torch.QUInt8Storage"]], "shortstorage (class in torch)": [[2082, "torch.ShortStorage"]], "typedstorage (class in torch)": [[2082, "torch.TypedStorage"]], "untypedstorage (class in torch)": [[2082, "torch.UntypedStorage"]], "bfloat16() (torch.typedstorage method)": [[2082, "torch.TypedStorage.bfloat16"]], "bfloat16() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.bfloat16"]], "bool() (torch.typedstorage method)": [[2082, "torch.TypedStorage.bool"]], "bool() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.bool"]], "byte() (torch.typedstorage method)": [[2082, "torch.TypedStorage.byte"]], "byte() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.byte"]], "byteswap() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.byteswap"]], "char() (torch.typedstorage method)": [[2082, "torch.TypedStorage.char"]], "char() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.char"]], "clone() (torch.typedstorage method)": [[2082, "torch.TypedStorage.clone"]], "clone() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.clone"]], "complex_double() (torch.typedstorage method)": [[2082, "torch.TypedStorage.complex_double"]], "complex_double() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.complex_double"]], "complex_float() (torch.typedstorage method)": [[2082, "torch.TypedStorage.complex_float"]], "complex_float() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.complex_float"]], "copy_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.copy_"]], "copy_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.copy_"]], "cpu() (torch.typedstorage method)": [[2082, "torch.TypedStorage.cpu"]], "cpu() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.cpu"]], "cuda() (torch.typedstorage method)": [[2082, "torch.TypedStorage.cuda"]], "cuda() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.cuda"]], "data_ptr() (torch.typedstorage method)": [[2082, "torch.TypedStorage.data_ptr"]], "data_ptr() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.data_ptr"]], "device (torch.typedstorage property)": [[2082, "torch.TypedStorage.device"]], "device (torch.untypedstorage attribute)": [[2082, "torch.UntypedStorage.device"]], "double() (torch.typedstorage method)": [[2082, "torch.TypedStorage.double"]], "double() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.double"]], "dtype (torch.bfloat16storage attribute)": [[2082, "torch.BFloat16Storage.dtype"]], "dtype (torch.boolstorage attribute)": [[2082, "torch.BoolStorage.dtype"]], "dtype (torch.bytestorage attribute)": [[2082, "torch.ByteStorage.dtype"]], "dtype (torch.charstorage attribute)": [[2082, "torch.CharStorage.dtype"]], "dtype (torch.complexdoublestorage attribute)": [[2082, "torch.ComplexDoubleStorage.dtype"]], "dtype (torch.complexfloatstorage attribute)": [[2082, "torch.ComplexFloatStorage.dtype"]], "dtype (torch.doublestorage attribute)": [[2082, "torch.DoubleStorage.dtype"]], "dtype (torch.floatstorage attribute)": [[2082, "torch.FloatStorage.dtype"]], "dtype (torch.halfstorage attribute)": [[2082, "torch.HalfStorage.dtype"]], "dtype (torch.intstorage attribute)": [[2082, "torch.IntStorage.dtype"]], "dtype (torch.longstorage attribute)": [[2082, "torch.LongStorage.dtype"]], "dtype (torch.qint32storage attribute)": [[2082, "torch.QInt32Storage.dtype"]], "dtype (torch.qint8storage attribute)": [[2082, "torch.QInt8Storage.dtype"]], "dtype (torch.quint2x4storage attribute)": [[2082, "torch.QUInt2x4Storage.dtype"]], "dtype (torch.quint4x2storage attribute)": [[2082, "torch.QUInt4x2Storage.dtype"]], "dtype (torch.quint8storage attribute)": [[2082, "torch.QUInt8Storage.dtype"]], "dtype (torch.shortstorage attribute)": [[2082, "torch.ShortStorage.dtype"]], "dtype (torch.typedstorage attribute)": [[2082, "torch.TypedStorage.dtype"]], "element_size() (torch.typedstorage method)": [[2082, "torch.TypedStorage.element_size"]], "element_size() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.element_size"]], "filename (torch.typedstorage property)": [[2082, "torch.TypedStorage.filename"]], "filename (torch.untypedstorage property)": [[2082, "torch.UntypedStorage.filename"]], "fill_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.fill_"]], "fill_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.fill_"]], "float() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float"]], "float() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float"]], "float8_e4m3fn() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e4m3fn"]], "float8_e4m3fn() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e4m3fn"]], "float8_e4m3fnuz() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e4m3fnuz"]], "float8_e4m3fnuz() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e4m3fnuz"]], "float8_e5m2() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e5m2"]], "float8_e5m2() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e5m2"]], "float8_e5m2fnuz() (torch.typedstorage method)": [[2082, "torch.TypedStorage.float8_e5m2fnuz"]], "float8_e5m2fnuz() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.float8_e5m2fnuz"]], "from_buffer() (torch.typedstorage class method)": [[2082, "torch.TypedStorage.from_buffer"]], "from_buffer() (torch.untypedstorage static method)": [[2082, "torch.UntypedStorage.from_buffer"]], "from_file() (torch.typedstorage class method)": [[2082, "torch.TypedStorage.from_file"]], "from_file() (torch.untypedstorage static method)": [[2082, "torch.UntypedStorage.from_file"]], "get_device() (torch.typedstorage method)": [[2082, "torch.TypedStorage.get_device"]], "get_device() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.get_device"]], "half() (torch.typedstorage method)": [[2082, "torch.TypedStorage.half"]], "half() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.half"]], "hpu() (torch.typedstorage method)": [[2082, "torch.TypedStorage.hpu"]], "hpu() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.hpu"]], "int() (torch.typedstorage method)": [[2082, "torch.TypedStorage.int"]], "int() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.int"]], "is_cuda (torch.typedstorage property)": [[2082, "torch.TypedStorage.is_cuda"]], "is_cuda (torch.untypedstorage property)": [[2082, "torch.UntypedStorage.is_cuda"]], "is_hpu (torch.typedstorage property)": [[2082, "torch.TypedStorage.is_hpu"]], "is_hpu (torch.untypedstorage property)": [[2082, "torch.UntypedStorage.is_hpu"]], "is_pinned() (torch.typedstorage method)": [[2082, "torch.TypedStorage.is_pinned"]], "is_pinned() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.is_pinned"]], "is_shared() (torch.typedstorage method)": [[2082, "torch.TypedStorage.is_shared"]], "is_shared() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.is_shared"]], "is_sparse (torch.typedstorage attribute)": [[2082, "torch.TypedStorage.is_sparse"]], "is_sparse (torch.untypedstorage attribute)": [[2082, "torch.UntypedStorage.is_sparse"]], "is_sparse_csr (torch.untypedstorage attribute)": [[2082, "torch.UntypedStorage.is_sparse_csr"]], "long() (torch.typedstorage method)": [[2082, "torch.TypedStorage.long"]], "long() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.long"]], "mps() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.mps"]], "nbytes() (torch.typedstorage method)": [[2082, "torch.TypedStorage.nbytes"]], "nbytes() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.nbytes"]], "new() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.new"]], "pickle_storage_type() (torch.typedstorage method)": [[2082, "torch.TypedStorage.pickle_storage_type"]], "pin_memory() (torch.typedstorage method)": [[2082, "torch.TypedStorage.pin_memory"]], "pin_memory() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.pin_memory"]], "resizable() (torch.typedstorage method)": [[2082, "torch.TypedStorage.resizable"]], "resizable() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.resizable"]], "resize_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.resize_"]], "resize_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.resize_"]], "share_memory_() (torch.typedstorage method)": [[2082, "torch.TypedStorage.share_memory_"]], "share_memory_() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.share_memory_"]], "short() (torch.typedstorage method)": [[2082, "torch.TypedStorage.short"]], "short() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.short"]], "size() (torch.typedstorage method)": [[2082, "torch.TypedStorage.size"]], "size() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.size"]], "to() (torch.typedstorage method)": [[2082, "torch.TypedStorage.to"]], "to() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.to"]], "tolist() (torch.typedstorage method)": [[2082, "torch.TypedStorage.tolist"]], "tolist() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.tolist"]], "type() (torch.typedstorage method)": [[2082, "torch.TypedStorage.type"]], "type() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.type"]], "untyped() (torch.typedstorage method)": [[2082, "torch.TypedStorage.untyped"]], "untyped() (torch.untypedstorage method)": [[2082, "torch.UntypedStorage.untyped"]], "device (class in torch)": [[2083, "torch.device"]], "dtype (class in torch)": [[2083, "torch.dtype"]], "layout (class in torch)": [[2083, "torch.layout"]], "memory_format (class in torch)": [[2083, "torch.memory_format"]], "summarywriter (class in torch.utils.tensorboard.writer)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter"]], "__init__() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.__init__"]], "add_audio() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_audio"]], "add_custom_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_custom_scalars"]], "add_embedding() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_embedding"]], "add_figure() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_figure"]], "add_graph() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_graph"]], "add_histogram() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_histogram"]], "add_hparams() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_hparams"]], "add_image() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_image"]], "add_images() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_images"]], "add_mesh() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_mesh"]], "add_pr_curve() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_pr_curve"]], "add_scalar() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_scalar"]], "add_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_scalars"]], "add_text() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_text"]], "add_video() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.add_video"]], "close() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.close"]], "flush() (torch.utils.tensorboard.writer.summarywriter method)": [[2085, "torch.utils.tensorboard.writer.SummaryWriter.flush"]], "torch.utils.tensorboard": [[2085, "module-torch.utils.tensorboard"]], "h (torch.tensor attribute)": [[2086, "torch.Tensor.H"]], "t (torch.tensor attribute)": [[2086, "torch.Tensor.T"]], "tensor (class in torch)": [[2086, "torch.Tensor"]], "__init__() (torch.tensor method)": [[2086, "torch.Tensor.__init__"]], "mh (torch.tensor attribute)": [[2086, "torch.Tensor.mH"]], "mt (torch.tensor attribute)": [[2086, "torch.Tensor.mT"]], "assert_allclose() (in module torch.testing)": [[2087, "torch.testing.assert_allclose"]], "assert_close() (in module torch.testing)": [[2087, "torch.testing.assert_close"]], "make_tensor() (in module torch.testing)": [[2087, "torch.testing.make_tensor"]], "torch.testing": [[2087, "module-torch.testing"]], "symbool (class in torch)": [[2089, "torch.SymBool"]], "symfloat (class in torch)": [[2089, "torch.SymFloat"]], "symint (class in torch)": [[2089, "torch.SymInt"]], "tag (class in torch)": [[2089, "torch.Tag"]], "default_generator (torch.torch attribute)": [[2089, "torch.torch.default_generator"]], "is_integer() (torch.symfloat method)": [[2089, "torch.SymFloat.is_integer"]], "name (torch.tag property)": [[2089, "torch.Tag.name"]], "torch": [[2089, "module-torch"]], "torch.contrib": [[2089, "module-torch.contrib"]], "torch.functional": [[2089, "module-torch.functional"]], "torch.quasirandom": [[2089, "module-torch.quasirandom"]], "torch.return_types": [[2089, "module-torch.return_types"]], "torch.serialization": [[2089, "module-torch.serialization"]], "torch.signal.windows.windows": [[2089, "module-torch.signal.windows.windows"]], "torch.sparse.semi_structured": [[2089, "module-torch.sparse.semi_structured"]], "torch.storage": [[2089, "module-torch.storage"]], "torch.torch_version": [[2089, "module-torch.torch_version"]], "torch.types": [[2089, "module-torch.types"]], "torch.utils.backcompat": [[2089, "module-torch.utils.backcompat"]], "torch.utils.hipify": [[2089, "module-torch.utils.hipify"]], "torch.utils.model_dump": [[2089, "module-torch.utils.model_dump"]], "torch.utils.viz": [[2089, "module-torch.utils.viz"]], "torch.version": [[2089, "module-torch.version"]], "logger (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.Logger"]], "outputlogger (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.OutputLogger"]], "shadow (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.Shadow"]], "shadowlogger (class in torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.ShadowLogger"]], "add() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.add"]], "add_relu() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.add_relu"]], "add_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.add_scalar"]], "cat() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.cat"]], "compare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.compare_model_outputs"]], "compare_model_stub() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.compare_model_stub"]], "compare_weights() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.compare_weights"]], "forward() (torch.ao.ns._numeric_suite.logger method)": [[2090, "torch.ao.ns._numeric_suite.Logger.forward"]], "forward() (torch.ao.ns._numeric_suite.outputlogger method)": [[2090, "torch.ao.ns._numeric_suite.OutputLogger.forward"]], "forward() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.forward"]], "forward() (torch.ao.ns._numeric_suite.shadowlogger method)": [[2090, "torch.ao.ns._numeric_suite.ShadowLogger.forward"]], "get_logger_dict() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.get_logger_dict"]], "get_matching_activations() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.get_matching_activations"]], "mul() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.mul"]], "mul_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2090, "torch.ao.ns._numeric_suite.Shadow.mul_scalar"]], "prepare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.prepare_model_outputs"]], "prepare_model_with_stubs() (in module torch.ao.ns._numeric_suite)": [[2090, "torch.ao.ns._numeric_suite.prepare_model_with_stubs"]], "torch.ao.ns._numeric_suite": [[2090, "module-torch.ao.ns._numeric_suite"]], "nstracer (class in torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.NSTracer"]], "outputcomparisonlogger (class in torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger"]], "outputlogger (class in torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputLogger"]], "add_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.add_loggers"]], "add_shadow_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.add_shadow_loggers"]], "compute_cosine_similarity() (in module torch.ao.ns.fx.utils)": [[2091, "torch.ao.ns.fx.utils.compute_cosine_similarity"]], "compute_normalized_l2_error() (in module torch.ao.ns.fx.utils)": [[2091, "torch.ao.ns.fx.utils.compute_normalized_l2_error"]], "compute_sqnr() (in module torch.ao.ns.fx.utils)": [[2091, "torch.ao.ns.fx.utils.compute_sqnr"]], "convert_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.convert_n_shadows_model"]], "extend_logger_results_with_comparison() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extend_logger_results_with_comparison"]], "extract_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_logger_info"]], "extract_results_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_results_n_shadows_model"]], "extract_shadow_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_shadow_logger_info"]], "extract_weights() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.extract_weights"]], "forward() (torch.ao.ns._numeric_suite_fx.outputcomparisonlogger method)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger.forward"]], "forward() (torch.ao.ns._numeric_suite_fx.outputlogger method)": [[2091, "torch.ao.ns._numeric_suite_fx.OutputLogger.forward"]], "is_leaf_module() (torch.ao.ns._numeric_suite_fx.nstracer method)": [[2091, "torch.ao.ns._numeric_suite_fx.NSTracer.is_leaf_module"]], "loggers_set_enabled() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.loggers_set_enabled"]], "loggers_set_save_activations() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.loggers_set_save_activations"]], "prepare_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.prepare_n_shadows_model"]], "print_comparisons_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2091, "torch.ao.ns._numeric_suite_fx.print_comparisons_n_shadows_model"]], "torch.ao.ns._numeric_suite_fx": [[2091, "module-torch.ao.ns._numeric_suite_fx"]], "torch.compiler": [[2094, "module-torch.compiler"]], "get_ignored_functions() (in module torch.overrides)": [[2112, "torch.overrides.get_ignored_functions"]], "get_overridable_functions() (in module torch.overrides)": [[2112, "torch.overrides.get_overridable_functions"]], "get_testing_overrides() (in module torch.overrides)": [[2112, "torch.overrides.get_testing_overrides"]], "handle_torch_function() (in module torch.overrides)": [[2112, "torch.overrides.handle_torch_function"]], "has_torch_function() (in module torch.overrides)": [[2112, "torch.overrides.has_torch_function"]], "is_tensor_like() (in module torch.overrides)": [[2112, "torch.overrides.is_tensor_like"]], "is_tensor_method_or_property() (in module torch.overrides)": [[2112, "torch.overrides.is_tensor_method_or_property"]], "resolve_name() (in module torch.overrides)": [[2112, "torch.overrides.resolve_name"]], "torch.overrides": [[2112, "module-torch.overrides"]], "wrap_torch_function() (in module torch.overrides)": [[2112, "torch.overrides.wrap_torch_function"]], "_dump_snapshot() (in module torch.cuda.memory)": [[2113, "torch.cuda.memory._dump_snapshot"]], "_record_memory_history() (in module torch.cuda.memory)": [[2113, "torch.cuda.memory._record_memory_history"]], "_snapshot() (in module torch.cuda.memory)": [[2113, "torch.cuda.memory._snapshot"]], "torch.finfo (class in torch)": [[2116, "torch.torch.finfo"]], "torch.iinfo (class in torch)": [[2116, "torch.torch.iinfo"]], "torch.utils": [[2117, "module-torch.utils"]], "torch.utils.backend_registration": [[2117, "module-torch.utils.backend_registration"]], "torch.utils.benchmark.examples.blas_compare_setup": [[2117, "module-torch.utils.benchmark.examples.blas_compare_setup"]], "torch.utils.benchmark.examples.compare": [[2117, "module-torch.utils.benchmark.examples.compare"]], "torch.utils.benchmark.examples.fuzzer": [[2117, "module-torch.utils.benchmark.examples.fuzzer"]], "torch.utils.benchmark.examples.op_benchmark": [[2117, "module-torch.utils.benchmark.examples.op_benchmark"]], "torch.utils.benchmark.examples.simple_timeit": [[2117, "module-torch.utils.benchmark.examples.simple_timeit"]], "torch.utils.benchmark.examples.spectral_ops_fuzz_test": [[2117, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"]], "torch.utils.benchmark.op_fuzzers.binary": [[2117, "module-torch.utils.benchmark.op_fuzzers.binary"]], "torch.utils.benchmark.op_fuzzers.sparse_binary": [[2117, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"]], "torch.utils.benchmark.op_fuzzers.sparse_unary": [[2117, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"]], "torch.utils.benchmark.op_fuzzers.spectral": [[2117, "module-torch.utils.benchmark.op_fuzzers.spectral"]], "torch.utils.benchmark.op_fuzzers.unary": [[2117, "module-torch.utils.benchmark.op_fuzzers.unary"]], "torch.utils.benchmark.utils.common": [[2117, "module-torch.utils.benchmark.utils.common"]], "torch.utils.benchmark.utils.compare": [[2117, "module-torch.utils.benchmark.utils.compare"]], "torch.utils.benchmark.utils.compile": [[2117, "module-torch.utils.benchmark.utils.compile"]], "torch.utils.benchmark.utils.cpp_jit": [[2117, "module-torch.utils.benchmark.utils.cpp_jit"]], "torch.utils.benchmark.utils.fuzzer": [[2117, "module-torch.utils.benchmark.utils.fuzzer"]], "torch.utils.benchmark.utils.sparse_fuzzer": [[2117, "module-torch.utils.benchmark.utils.sparse_fuzzer"]], "torch.utils.benchmark.utils.timer": [[2117, "module-torch.utils.benchmark.utils.timer"]], "torch.utils.benchmark.utils.valgrind_wrapper.timer_interface": [[2117, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"]], "torch.utils.bundled_inputs": [[2117, "module-torch.utils.bundled_inputs"]], "torch.utils.checkpoint": [[2117, "module-torch.utils.checkpoint"]], "torch.utils.collect_env": [[2117, "module-torch.utils.collect_env"]], "torch.utils.cpp_backtrace": [[2117, "module-torch.utils.cpp_backtrace"]], "torch.utils.cpp_extension": [[2117, "module-torch.utils.cpp_extension"]], "torch.utils.data.backward_compatibility": [[2117, "module-torch.utils.data.backward_compatibility"]], "torch.utils.data.dataloader": [[2117, "module-torch.utils.data.dataloader"]], "torch.utils.data.datapipes.dataframe.dataframe_wrapper": [[2117, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"]], "torch.utils.data.datapipes.dataframe.dataframes": [[2117, "module-torch.utils.data.datapipes.dataframe.dataframes"]], "torch.utils.data.datapipes.dataframe.datapipes": [[2117, "module-torch.utils.data.datapipes.dataframe.datapipes"]], "torch.utils.data.datapipes.dataframe.structures": [[2117, "module-torch.utils.data.datapipes.dataframe.structures"]], "torch.utils.data.datapipes.datapipe": [[2117, "module-torch.utils.data.datapipes.datapipe"]], "torch.utils.data.datapipes.gen_pyi": [[2117, "module-torch.utils.data.datapipes.gen_pyi"]], "torch.utils.data.datapipes.iter.callable": [[2117, "module-torch.utils.data.datapipes.iter.callable"]], "torch.utils.data.datapipes.iter.combinatorics": [[2117, "module-torch.utils.data.datapipes.iter.combinatorics"]], "torch.utils.data.datapipes.iter.combining": [[2117, "module-torch.utils.data.datapipes.iter.combining"]], "torch.utils.data.datapipes.iter.filelister": [[2117, "module-torch.utils.data.datapipes.iter.filelister"]], "torch.utils.data.datapipes.iter.fileopener": [[2117, "module-torch.utils.data.datapipes.iter.fileopener"]], "torch.utils.data.datapipes.iter.grouping": [[2117, "module-torch.utils.data.datapipes.iter.grouping"]], "torch.utils.data.datapipes.iter.routeddecoder": [[2117, "module-torch.utils.data.datapipes.iter.routeddecoder"]], "torch.utils.data.datapipes.iter.selecting": [[2117, "module-torch.utils.data.datapipes.iter.selecting"]], "torch.utils.data.datapipes.iter.sharding": [[2117, "module-torch.utils.data.datapipes.iter.sharding"]], "torch.utils.data.datapipes.iter.streamreader": [[2117, "module-torch.utils.data.datapipes.iter.streamreader"]], "torch.utils.data.datapipes.iter.utils": [[2117, "module-torch.utils.data.datapipes.iter.utils"]], "torch.utils.data.datapipes.map.callable": [[2117, "module-torch.utils.data.datapipes.map.callable"]], "torch.utils.data.datapipes.map.combinatorics": [[2117, "module-torch.utils.data.datapipes.map.combinatorics"]], "torch.utils.data.datapipes.map.combining": [[2117, "module-torch.utils.data.datapipes.map.combining"]], "torch.utils.data.datapipes.map.grouping": [[2117, "module-torch.utils.data.datapipes.map.grouping"]], "torch.utils.data.datapipes.map.utils": [[2117, "module-torch.utils.data.datapipes.map.utils"]], "torch.utils.data.datapipes.utils.common": [[2117, "module-torch.utils.data.datapipes.utils.common"]], "torch.utils.data.datapipes.utils.decoder": [[2117, "module-torch.utils.data.datapipes.utils.decoder"]], "torch.utils.data.datapipes.utils.snapshot": [[2117, "module-torch.utils.data.datapipes.utils.snapshot"]], "torch.utils.data.dataset": [[2117, "module-torch.utils.data.dataset"]], "torch.utils.data.distributed": [[2117, "module-torch.utils.data.distributed"]], "torch.utils.data.graph": [[2117, "module-torch.utils.data.graph"]], "torch.utils.data.graph_settings": [[2117, "module-torch.utils.data.graph_settings"]], "torch.utils.data.sampler": [[2117, "module-torch.utils.data.sampler"]], "torch.utils.dlpack": [[2117, "module-torch.utils.dlpack"]], "torch.utils.file_baton": [[2117, "module-torch.utils.file_baton"]], "torch.utils.flop_counter": [[2117, "module-torch.utils.flop_counter"]], "torch.utils.hipify.constants": [[2117, "module-torch.utils.hipify.constants"]], "torch.utils.hipify.cuda_to_hip_mappings": [[2117, "module-torch.utils.hipify.cuda_to_hip_mappings"]], "torch.utils.hipify.hipify_python": [[2117, "module-torch.utils.hipify.hipify_python"]], "torch.utils.hipify.version": [[2117, "module-torch.utils.hipify.version"]], "torch.utils.hooks": [[2117, "module-torch.utils.hooks"]], "torch.utils.jit.log_extract": [[2117, "module-torch.utils.jit.log_extract"]], "torch.utils.mkldnn": [[2117, "module-torch.utils.mkldnn"]], "torch.utils.mobile_optimizer": [[2117, "module-torch.utils.mobile_optimizer"]], "torch.utils.show_pickle": [[2117, "module-torch.utils.show_pickle"]], "torch.utils.tensorboard.summary": [[2117, "module-torch.utils.tensorboard.summary"]], "torch.utils.tensorboard.writer": [[2117, "module-torch.utils.tensorboard.writer"]], "torch.utils.throughput_benchmark": [[2117, "module-torch.utils.throughput_benchmark"]], "torch.utils.weak": [[2117, "module-torch.utils.weak"]], "torch.xpu": [[2118, "module-torch.xpu"]], "torch.xpu.random": [[2118, "module-torch.xpu.random"]], "torch.xpu.streams": [[2118, "module-torch.xpu.streams"]]}}) \ No newline at end of file