From a665d1c6b9d5236895821370d1a55a97f26bd487 Mon Sep 17 00:00:00 2001 From: pytorchbot Date: Tue, 9 Jul 2024 16:05:54 +0000 Subject: [PATCH] Generate Python docs from pytorch/pytorch@e4ee3be4063b7c430974252fdf7db42273388d86 --- 2.4/_images/RReLU.png | Bin 34249 -> 34174 bytes 2.4/_sources/generated/exportdb/index.rst.txt | 12 +- .../generated/exportdb/python.builtin.rst.txt | 2 +- .../generated/exportdb/python.closure.rst.txt | 2 +- .../generated/exportdb/torch.cond.rst.txt | 2 +- .../exportdb/torch.dynamic-shape.rst.txt | 4 +- .../exportdb/torch.dynamic-value.rst.txt | 4 +- .../exportdb/torch.escape-hatch.rst.txt | 4 +- .../generated/exportdb/torch.map.rst.txt | 2 +- .../generated/exportdb/torch.operator.rst.txt | 2 +- 2.4/generated/exportdb/index.html | 12 +- 2.4/generated/exportdb/python.builtin.html | 2 +- 2.4/generated/exportdb/python.closure.html | 2 +- 2.4/generated/exportdb/torch.cond.html | 2 +- .../exportdb/torch.dynamic-shape.html | 4 +- .../exportdb/torch.dynamic-value.html | 4 +- .../exportdb/torch.escape-hatch.html | 4 +- 2.4/generated/exportdb/torch.map.html | 2 +- 2.4/generated/exportdb/torch.operator.html | 2 +- 2.4/quantization-backend-configuration.html | 180 +++++++++--------- 2.4/searchindex.js | 2 +- 21 files changed, 125 insertions(+), 125 deletions(-) diff --git a/2.4/_images/RReLU.png b/2.4/_images/RReLU.png index 268793c23ab392798de97537bdda6ca82729296c..79beced029a6bb5a48871849e3e1f4c56c59102b 100644 GIT binary patch literal 34174 zcmd?RWmr~S)GfS05D=tGDS<~p8YyWcRRlytIwS<8yGv0TECi(#R3xOkC6x~81}SNf zu5WI9@;&eO@BBMn7ng|leY5vkbIm!%m}Bmso5~8OPn|u5Ajs)!S7mP@2&ONBV2t6O zgr5j>4E=z=gdOEH9o1~^IlA1nH${~0I@(&=I9lE}I_GR^?{MG7nvY9>i;v@+xuc`4 zg9tab)!#4RvavVgZecI(gb%^9y{hGaAVhc3FN{x;$@dY&fBu^6mD{edi^Fc3x4VyU zR<`2&Ficd5ZxYMiwEu#=HDuqyxo%PXqcgFxqTs%1f1=e#&U;!~1HF_@cUrz%7G7%l za7h3gTdodUjuM;VO3&S;or5bk2%0foektZ#_ZE+6n;a_Ub7DIh=*A0NLx z86nx+^ier3n37YwFH==k_O^w563O`Z`1a`0q1p1so7fykTvY&rH?Ev&^U=`IPg(&z_GL4_;?TlWSI_)(S}j@maYTelnAQH?a^>d{$p_zA z)8vL9V&k{xnyLJn>zWB3m?F5XUf0B38T$3>RabZSMB|2V&2o3A&-5|Rp7{woOdF0> z`K@ehJWgco%Q01q+&FdW)Jl(z&E47dcer?XewmrvA}&jzQ%zAdCYQ9*W1c@ZT0KnJ zUS4mLER>7VQ`Xi#d*Md3uYB0qFKul^M>}mkw)@+w+TxpaR0g&Az4{?<-=2eC%OoOl zHuqkO^qo7e>fGn%=886YbUI3$EVg%d6IGMsXFHN`Bz9++aTz&!hO_RTcygNNNqhTE zS6A0_92^9b^b++bIpeupzSUmo6JL*!7A3)g;1^5lBrY;57WUbMRvEABeu6)`K^bF1{#+8b8)h>v!gHDF?o7< zJ$w1me`~~pXw>IOd}4BPd~xyg+XV;j-Cy@ZX@m${VuS)Z%xga?#qc=&yiF_NNzGlc zowy@RMoK!iyc~hU;7RP~=l8X_xfZ_lr5ATMYzU{D{hso|ZFsoUCCQ-DJ6h1{teChs z2^Cd9l)gJ&<^CFefz{y2xD3nK@gLt_d~Ir~p>rAXgHRzSCpSFU-JlkC=O1yKp&N4Q z)tjCP4GEDkG5JG2g8sCRk5BAnJ4}cJ`ii|J(S^Z6qId7!A&tS2Xx#kx`pmo|5!+FO z?r?uyMeJ9*Okc4B3+#-kcPq0(xKwi2c)Z8bm9CFUcH`C9p1X6&MN1{~E3l=;+biSE zdGvg7aHZ4DuOF$W-5EbRTs}(mQt47>`a1quxA+!9T03R`Gc_TpCw70d) z4I$0(ih`0dI4}@*e14w&x}qX?>1mfCyMUF=&58xVSEb`mXdsLF*X8u;dnmcOxdoM# zm9b^2yXk$tbT%xpsAxNw!3WlQd})d6UUT$a{g0`sv%Q%KxcK;Kb~;nj)4>iit$2+7 znx)R4k#pXAi-m0SrlzJrwY9ZuP030z7m&aoKTOth$XJwdk%>qR{o-nU;V8u@uI{>JQ z6zwgQT6nfX{J?#}UUp|_Xlt{|_HNjGYpk#uLVV`TneL3J zgaqz4{P&sQ)ZdwOxYSi*CIX49Bh#SrtU*Wy;S#2^5p3-VgjUo!z1~S&T)d;)!{x`+ z)c8VwF6EXN?8fXsK0B-pF)^_(oCqcXfqeY|-3|={ugCG?9(PJDigmrcCGNGwhpnvG zLQ;9_G~c6H=9*n+9UWs+aR%Z(+jS^RXaq9vN{i5FbkyQdFr%X2oYXt`pqxrtov zOZnDFNLR9oqQbAno!wogbLZaH_u>%{$lkbdBXUY;*pUo5ck$x4pOb6r>#DbI-I`qQ zeZNoa*I1fVVA+4mv~a^0<3xLfmj~P?PPxa{lc*>vCT8Z<%|RO-uZ1@P7AtT_-`?-~ z2;ou>8T507kIT|f5TvK1yERYD+uvfq$u%(5=*ex9z?W0CT^YM_BVO#x;o;%zV4+Ri zZiV%*h~x4I9h@~GZi}vur_odaCv+k>K6l?XI5sv``?Jo?n>Qz&2?z=0A*Vh#h|PEN zD%X#IRg96CnMxmgd*uWcHg z$Uu`e9BwX%lW6>`g@py}jp5#2DkvLBqw}Dvher~W1j>1+2{oQRdiYO&n%ujG$;wX5 z9wX@^LGSfT71A=Cn~BF%`c>`kWad9}Qs@=gVn7LIVGNS=9UUF6@R===mx4n=o<&7zq-{pcq@=QKE|1zcMQKLRi?2YvX6EK5 zs;H>wmPjDRoN0}dxqtuS^A|68&01+^H{1!RdEPdbJbwK6Yg=2x2gRr?r};bSy2YgB z?i-iPR8^Ig2}9+X;h1Ned?WoaJ#Dly_C>vios&}z{^$Dj>rbPj1C!(3Pr>J_+n;Fk z<|n75oS3K!*`AKE$+DZ2AF^-aytC)DG$b5-<3g;6(5bU^h{V7cvE>NheZpYk3qtGP7bgRZhp) z*jUciHjDM*#cWqgz-hA9)){WYkPD;{VwOP4s}B{?|K z8{aWZQF7^;XD^L?dDOBz-U!9PO24h4K|yTeyIQr$C3P(uC?1q;{QwZ>_7;{(mjyGE zyXI zuBxf2X)1p2!QPFot}bjO+q{D*txZOQN&$)c3g?W&y3o7>Gr@cJVs&+OYP!d4J0oN? zEe;wKNcbFituJy{?vtjb8ddw_Q6_09#R$qdI`Ts;6-ujIvm75Pc0e8ox$o2vNlIn* zPWs`{XK6}!Y+9!46cW-1S=Tnd=IbbSFx%eVHXJOlqUJTiLi18q7S9lTMA6Q4jOxvs z2%Pi%!@Xq`6SuVGIxlMfn(rloG)*Z?aOM&gclj?;06kAGs(p-(il7rMxL$x{LDtva zD4)patFD$_H#d}KHT$Ys}+a}`I29!13md&y`NpvaG-r+l$susuUh zv^id!Mrg=ZraV6-HYSD!U8QxaM_+tPmn&!>Epcxy^gpu5Ebs9gT`U~6=6xt=J#^XM z-+v5XWYq~GF<$_Kf3UDjrXzRys|0OELcR7jul0J(eTd?Loq|j;0p&#c z+BG}?U}@?uw?~JYwES-MmesB5I%byvZKgK^7j*!A9kk0pfCWA~)E+HXd)bxr| z{dZF7NPOIvcmD;t%C$#_Taq2X+&a=UxI^%$U_EW}lc?!Lo%u5@LP)BB{6*U=4;2IU zfknF(>BJs^5YhvcKsLmXO4lpx*{9~_&V1nDx|rYVPka=PXw$S5-`YHjfvz$(vI@zjdv|B1`86|g z`;1*iSQ8zF4TXDlIvI2EPB#Y>}oSS`9FAK-4rZw zD7F;9)a+;Q*=IxoZ7L56>Yb8R61b{sYUHk5xspF0C+VXPlu8x?s?J@@wvJCPMK1s++`~dS5{Y3?5d^h?6L!cf=GaneqI%R zeSesbla-YisWpxezcV?ndNBD1F-vEX0ug!!;5BtZqn?j87YB{ketf&*BjE1gVGQS& zTEvlcseDU+f2D?~FW(}nE-*ed^(HyCj(_ z=oZ<&>&aB5fjFpxOH_rpTpTP!`G)!TPb33{HWBboV<@l4VVu;7ms7TIFFGxTfc~ig zf!0}QiZ0=>ipFOM9y7N0R}1Z?g0A)y*xkK*_j93Lv9B+J;*LTyIj8=5 zzZ>B;0GJAuqu4W_n&1?0jt))%|Kas_Mq3I zKj%DD8{K&cl+zjb9606ApCLGjLMa399n&!8&&_#DuU{RB7dqeI)$h@T^y2f zpx+4`qy8MzXg)LL{CSs!K9a%_cXpIfiLHHQHXkVy)-JHbW5fa+YS$vr`#nV*>NV2X zs;H4&#O4Ae4QTX;XDOtdm9@2Mz~T=!hJdvxMn0M%q7yM$9gxzxs{u8?J57U`le1;$ z70S^8jVipjpkxal`1ab13)2mGmi?qrTzaWFAqV!4{r$f`D34^!Vf3{JUUqRv4he#35T^Lz2Q|tQQyGQ@|%~-XH&%-R)8+v z=>_tw;O$+p<7tUB8#qU*u*S#6TH5L0`ay|Dl@X+|fE(G~+3CJdlc|+^5_UYTyJQNx zTsc_=h;@4WIWGWCpfIG`VFum+oXDkLra9?8WzdE`Cv$zU@TeSi28=aGlL+bx=Y9H={eD6WUd2FUsTj+ueaLUw~8|Jl_6i%bmGy}Fd% zd0wBxeKzKs6A5SR^Sc4;VBz2x*9B9kANF}}bZcByRICXkVQy|38XoTWtRplRDZ_SF z*#3_j7sy_5>fFoThtv^}$V%BZ2*{(Tv=o&FaB*>Y7;Jjsl%xOK2Zc2f(+NsiU$(JA zl?0+ z=|5)}Xlp;<#ngj1a2avKfm8zgEv>Eu@=SK!!Vus#MrP)k{R))TLAV70pVXn_EbK9` zXpF!}Z>bD!RIp&H=p|y12!+t5m~4nB)Z79zciHLZ%N=K6dZ1yY=9vO#EV3QPgc@_l zSQ4$}tieWh!S5TBA7VDPl4iNB-wT=HNa)b+52j#8s*Ln3+U~3dpa?10+VUXxv>9H%f~6)Rmm9~M6nMz& ztGooTvTl2Ia&iRf+MSnMiv?k z(+&uxDYGw>R>1z5w-6BGl2F{v10)>PAN5L|CYa}ZMAeTRaf?l>-PeU28 z2PvP|YJfjPRmlRR6kDi(MoUA*mwgV!FflP#c4iV3L*_vl;?T~+g*7Oeeq)h!ujOoQ zL&M~-%jo0M($cyT3=9n5!rTB(p75Eq8UkiE?@p!osFFbV){@*BjvR{E;KO;~_n`3) zQtUQhqOqA7vyD#eNXSjPs;eHTqy}uY255w@0=*m7($W%IkK5m0BZJB~XfzC`B+=*S zaC)8T0t*XTwu+`+>9l`TqM3CR*$2kF{i{m_4k^Ca_SmE8h6+#^3 zU4cZQn^FO3*m1t6#pqHu+y^IP^2t4JQ??FdUA{8B4B*c_zhyGM|inD{g*DgaP49)P53P*MVc zpuX;*8Tl`f7o!8vJgq`XF*z!jx2~3c8~{-r~Vq(Xz}_v$kh|? z0_aI0)T)Pv^<)(kLNxT<>V95npGq!E@nm}lq!bUCfP|?|{~{_1l?~Xn^B#y(sM4<4if z%8mtfg$#wgK)ES33@+kG{aPL!^%MoNjf$;_a3SbkP!pUap`+ux*YZaFLzv{zelE4Z zgO@ve6QFORXcq9j)|A5~n-MXza*Z!9b3>y;*4+FP2+Bs0UB0y9?zd5D#KgvS9RyE2 zDA*)$Z__O?8dIk(qPHURgn&fp4L^yswRQFAr~#luSv0LtqSZG9bmW_k3w<5?F0ca9 zLvAy1>LY_7CQ(9-po05CRUGsx9p^gb$~FfqD3iHJr2`>Uf-1cBf??&yr>8GGeE6^y zLYNg!?Y(SzzI?Bb(l3=7`KSqu&>FnNym!+->r#*6dlD#KEa9}mM)9#%!cC!=qA3c> zh2>`KiIW7Vv=7^ore^~@f%>xTxtg7k^4euL4-{*#9faLqs@J0ENI^>AKw49pE;(Qlni};!m=_p zxyr8xWWGWyPEV+=BUK-WMvXA8!o?}i%=U{lZ*vd^wEeiE>br*{H|clf^Vr8;Mxnym zk(OLhN?&DQPK-=U)uu%EJ`?`)@w_y}X|G#K#FJNlX#}zIzagHT^b@*JpQw^nBToS=sNs$M;R5l!m+OdXrd0oH#}5il9AlZ%99L725oG043Xnp zhAfQ!Qh+Njzty4m&qeMw%H#S8l&bMA$DCm0Qtzm3J7IHUA=D!2?BFk_*we>-RQsz@ z!HTOto?v;>B@iOBK1#&;G3|I^USbPFmllnW7m9F3YFBQkl8vw-cl}-OMOPDZiy<#P zaz~L~@%*MKLyXx!KwnHuN)ldgKK17g{=V`8^!KZ{#8*3v(#c*+34Zz{UQ;B6uOuHg zOI1!uhliHyTyln}_69`X{5)Wm>GM10cBhELox3;%I1Al!>ybSp1)t>Frlnh)vRuJwdtb&mCPD4=E1S>5ze=)u~QcKW#qba=5x0$ zO|kU-`t|EaKoaSHRw=&)R*7Oz#jqnH;AZM6ej;<qzw-}EX9>b0trAM&;RD~{k~ zbiZsFy*Oi66Em2-)2>B5s!-FGc=yjce_&6?l&ilhsPnDG~QtKb=;ND1;GI=ZX zN@*!H`sU!`+J{t4G&4NYi!Hq!8c^96@L3@TL5_k>*Wn8}`9|P0z1XeyhaUZG>fZde zu-LX6ZR-!pni{15?4zJB{?_AS-fL`jb6QmZh+Owq8?r&-g$~?X=zgIU9);X#4Hj0E z&%T*F)Xyw*;o}eb_szzBGr=UM8`)iCeYJ0om5OL%?~Q%_rq^DQvaD_q&;twg#Z&>| z)-Cdt@Y+Qi)O|&E%#fC!#>Nu(_%w*+77=3tsuA1pr8rI!f5m}6@m0};fMNr>YKDeZ z45Z!}ug1aHsJ&_|XDV>GXn|u4+Bb}1V#m@Fh~m(F3IZa7f{ab;0SZ-h3LGMO|J2k? z>)`2sQW%8RV0`T3jwa`^2;-Ttsf>n+)wq?P{>mJO3$>2=9%c}3IA^24h-IskMf6HJSZ@C56X(SjX%79EE5J%L zD?EkKt~DyB_mz7H(YuaeKtvANHPGE*$OUNBoFG}*xCX6~nuZ1(1ns0lhZ1x$CQ;GB zGG$OCq-A960GT_C~m_jZgsGf#UmjU|)bT??yU=R=yHGmM^g7a*^0m~LNXSn?A%iDd= zd;|u5Mr9EvUKb1>VbQT-QCH8l8I?dak=U@Lt#5Y*Q}Xi2L0Cf71P<@p!uAQ4`EhJ! zlLYAXK~J}@(x+0G#%#7L(mL7dn^ttWAjZtP;+R#t@t4^G^LHjOx!Gn3_V)JBK40#9 zA*EY@ga!sy0f&pAV80U`#c5DMi}K3O&dvuLw6Stf8ckBZzn|zBdM*s*_^j>?K4$R)n@Dma@d`Krh)GE9CR28Y5o1C#&9=k6Li?Z7HS@Fo{S3L$ zM5)i~kV>6QtMJoX8}hfGMl_iv32^Bk63{Z=p3kgYnF!%VyCItS4=6^ww)6m&*NiBY zGNq<62@Ai*DbW7%Pc_VlZOII1HT)4XxVrTXe`UkACg)q+>V7ogiX*qEe|2PT2UAoW zRwwXE%f2iOluClJ1wFIp6<--PzPz|;eHJoU4bPj~W&4wiWrYV=$Xl7*Cz^uAd`xFO z28&H^(?xM27={2?Q4o5JV}3ln)kZaT*lic6(t?59SRBT5P$=*Vpvm-4bmG!OM9Ab+ zU8mjf{>kbNA1u8^o(3uOF3DJ{UfdTOUTNG`rl#z=T~fhrI*F_6Oqs=LONnBwHxswW z^aosV*^jPbAhl0~syna6k8y9Wot&xzfym!c#PW2DIV(` zLWfnMa)UoAib^#;2TPi@)pukej?j5P!FsHoDfT~wRoFpL@(+`-CqjkHjEhWX1#BDk zTpVj&2<1Kc^4a-=KcCrTpeq4d0IpG-QF0yM`@?fhqyIyHiQM*N3tZE{*QQp^q!Pnp zL~tkWYimPj+vbF!eE){yGF>|R?U>EVIbm6Csw9w zTg?aByIMJO>AZa$NCUfwrO%0r$K|G_8~%y+AeAe$O#__UH|uBE_Hal8r`$aBiC1CRv#G1Dsl) zuNpSLC_s0fM&pZCl~a`#9LXQ%Toh3gDW~za*(B%?+hKt#u2#}4Si7!$#iklWm6gJH zwf9#@MStVp$S)D>h|@MOBi+HT%u_|Rg&gdmT)K6PF?IIdb2@O6lZv6ny@@8 zB>|#+QsU948^t>uIVoBgx)6l&g=a6WU-ly5(^#(i=74S6b-CbBp7R=WK75Z`x;Z^k0+;@Y$PO5=1s((NBk|3RGmE|m?M)xxEX z|H7rYE`qBm!bNxIocA5a2!=LJO%L}GE)Vsnlhq}Z=Q1jSto?^S@z<7?sRcIne=vBf zem``N50_b1I_qUv)v@2;@K-_R>EKQ8P`6TW+OFuJJf=&89LaC7BGQKg44$`QBJ$4W z!BO?j{t3wk3I|t9kKvQ>Cvldx$wR=3m6}+OVhB|t9t7xk2+mCPe3FMffo2GhYOMc; zsRlcZI)$s*pdH8I4$r?(S^JZX-4drSTVVyZl(uxoDMim}hKs^@bHJ<5{xUwoQ=-PI zUsim=Q|$6bH62(L4nuTR2x%`@{U;K2z07jgw}nGJ9YhOgTOS6*5ONO>Vc6UVbqbt_ zSf;V+I23*IKn~Y0F_G!Nndm1w%?q}b%F*zN4TEqg3aO@ZHe~`I@CW#vVmsI?Z1Nr- z2g4K7DT>K=0;}5x=7NZ!WfYH?(1nwy3HDDVvp2X9!Dh-Ixbf2vdD2HLDAujm_f8h` zF7jqrM3=|NDf?o-ebGMW2tKvx|L(}0{^;9&LV_&DbD7_WF^fJ15skbV!tPM=wEEte zhF&y!=*i{m`d{A=xcg70&@pkiP>4(9O&?Gho`8egSfCGg6RNLjdRr(*w)g~n+nFU) z76)q@C^8Gf-~!-gVF)E?Yg9r(z+J^IQR@hxEVcD1lKlf~ZYD{*tK+X{0&wzuXag@!zKHFTnN zPS!<`8*}}%5EeKpE)5JD%!|i-)r6&zUgS}~!9vYrOlD89p_R@)Lh5eb?;|ZtR8@*! zy8T{pomcaIs08ANK33VkC`Ou;8l-_w*8muWb? zd9f2um2zz zgB+KB;23Q42S&xJ_(F4KHI<{7Hu>=)IuZH-Cq96rND zX30lzn=UxXyGotd6Aaa*M5$?_yW>w?jC&a)AqUCan8?SiklLi0fvU0kI9n4*&#i;W z4^jFL6IoGdx;#c2`20kGKf%z-wDW?b7fIOJBICNn;guh!7d7{v4ck%j_=01}81xp< z3u>zTPJkulxwg2nMQ$K$^)L4s$?zZ7|5dGkf~aEaZ%g1M>^qSn?E=*dHp)LkfgLUk ziR^VY?#Ww>j&6>vNnYUs(G!~NV<6=|s;sOmk8!B;Ck9Id#{3RI$G=uZ8P|{bj4*ZS zzKX-Di^|cf?fnc3y^8HgSU=qgB$n6sq;=8wNsy&#N0d^?)Ya_(w0=r`u>8^dU$TQ2 zUBZ)?k&&PibH%tTf@3xJhk4CpX^Q6c4(DGj(}da0T{{QJQ+RV}I0Wvs_W{~b0xl6_ zGc!Gn$x747mC%#HdJ4GuZT0-^$Rr>Xtc|AJnPPhS1+=VtPU>#O`k4X5)e|Q)M(UgN zyz}==J}+u?vRL$Ac1IDXZ;oW+|H>m?jd+~D;D>F{S|$ccHF#97UcH*N&697@O9%)Y*aD>$Dt3dd zi3DhbM1^!_FSDQ^C5W|Gm6ZiE=jY~vqNAx}MO_G4RO0!$x7OEXz@7nClc&#~-PO0w zl&Gt(PkjF#3y2**cj~xUKijzaLM0bPp^{ZETDs4as z*4uO3Ub$N&0=RXpTf<=dV~lmqW$JE|ORs&DVE;6PZ*~vh8oIh(ZcUK1kqG=A&+H3M zP=y;eLgh*q5AYT7E0-P4`X&d=E1a3Z83KfuQO4cyj(npKp6bpkX8K*DeC@5@yIL*vTU zmJ7rf0Qc#sDXsky#1{-D??A#u$$STBp=T2w<)RLKwJX;y#_#MRJ&VGpk=fxK-dAv` zSvHHCtk1kq`rv}Gx4V1i;#ygGc?Z~Yv?+YFnf;~o_36PJ+|p34dgf0&5SwP^@Z$#k zJ=7L@B| zO)Hsq7S3bsxsT-@>4)G1$jW50vzzM*#0(e;4ou`Q`}SD&=hTfTonvA;-N?;V_kxCW z1;7&W*1++ce&u;eKM8tg>oXmZcTw1TJM7V1adhzm?r^ginM}2nm5tu9kweaRU3Nkj zgt<_)P?g-(-F>a-)IV499IhnIIj(x&hA8pv&avrVJ!PfFylX-V3} zW%M0T-tZwo(HyxLZ_epELnEK>)FtGasZlk{>!9EhQVOu-f0gc;bQI5YBwQ@o@$s4X zQH;rN@#8V~(%j?HjI@JQ@76bAoIEsEAIJ4>|9dVd1GB*iU0OLQL~z}g@rwwL*M)bA{N>oQ{Jj;$PA#PB zE1}YAXJ7cr`VTGxjjClEWlX!X(zx%FlO3ER|5kPs+U~V|Tc_Tsciw;QNO`iHua*&(Tj<;j@%TLzn2B9a9{p~glGj&pye^od&6~x`@I2&bV&W_l_{@sJ00#py5*Tq z{ko^&t;a-3xZEKsnAc-b!csUE8=)S)NX|Vngdst{fA){wS{Dft65)J3me$F`ZV$T) zh1B%cB2WTPl9dKHT*55o{~aw@Cr_>sz4_;&zsaj(9~Mrr>JK`Cz?Jy0Y_;bHmpRQX zADjcx>w=qAmU)kJedm4ALzZ`uJ(HTqha_V0TR-9(BPs-<)j&;5sVCn4;mOPYgA9B& zTc7E$JO+0xW;IQE5gPSQwHAf7`jHbV^Zk8fxhQy5en{2->MsTTz5ojAc0V#luN^9z z0C;m6X&R`&gj`$6^NycNQ z?ZBmF2WnAjL_1KxtmSI1B-T0=_15o71eMKCU*5Qw{!%U3H}#n9*Px*ZSzao6Qvaaw zubQd-?IUOqfGNj(bi%LTzEx5)Kyjt^$(_E>O4{G#L!$q|ywAK$bUA7trA=nDf3S>N zei`4g`s`)aX-cFsMfVihr|ZOK(v}O6JkNaJ`E&kWU zp12JZLAxH-y3{w$|5(=e7n24aK&`|^dE`jFY$-~Jn2$CS0dTnzBDI`05dL$;3pyCT zG4K76IehFQA1B%g4l0Fzc{OAd-O&HXczY4p$Z zlGF?a^ue+aqNfH0y@7#a{{p=l>@m-8QE*#{`4b&p#sJjRL)-*V&QyVr2Z*dFvg z%<=ysNZk?N*Rj!bwHa4|i)`DBUKJB8J56BCLH}uK`=ABX;oltx-+vM5ZWlosvP(Q` zEsK3FyG&=@i;qFn_fO7*s{5WUeDeqEBD5SlgybClicY7Ndon}!hl+l=&^9*9yp6zsZVXW~{lLID3USe1men}! zTe9(h>!7(GlL43W{p2rS8jbYgWI4CpN2D@8)w>!SaXdVrb&1LpI9{Q}V{v6;>FaA~ zOIdP1Z_|IMj6qN9dHNpV18@^%yL1KWlA_2sRY)2)ec0h_n`c}gQ&R9b1EMnFY(DpV zOUvbtB*pytr!olh9523JBeQgOcb8V(4k=;!v!d!+<$i-Qr)F^8NJjVUzssCPT8akU zsF1yOqr4(O+iDWDIdclo$*|;Y?b<(!O`4M6wKK)h*=PxzLi88hSwJ(+Av69l6aD(Y7@zKne8p*5GcY?rS{e1Ai_SoQHl{ z{bwX9mRD;F_c}3<6FNBK-FVr*%b)D+GsiccTbmbxhV-pgiYle#v5vc+dCLu6Du(Wo z2s1^EV3&aojSeS(F9w&8us)jC*cOnLhfql1*Sk>dR>R4M_A4cy*=HavXWj0$442y$ z;?piG{P?QtPPwu&Uc+lQA&b5rAd~Av5DV&0e}qE>z2DOM|GhC`cImZkifmo%#$Glr zFB!t~-inNaeiUUNn_LDJHb3&7haER2KHyh1!h9AnEp6y$okG<3lxF9}S?-s|U#@TQ zS5QggE=V|2=Jqm3{d_E4G*8h?*Fo8}n`gU|zFqOG>z1JQ?qLBH+@)?mIC?M;u%S3x zf16VKUm6hUWKdie8+dmPDxbN(BOb@l*K|>_**6%yTI!QxfdSi$mRGzV3}8_PGtO*R zG7%UUZJgr%{~3c^-52#ZWgIzGE)TbZL<-d@dJrBzj*xw!$(2V2LnH|y?$ z-V8c|lP7@Za}I!u1qZM;vxLNmr{`n+Oa*8jtgWp<@5oZWx2uZ`%ovwlmS~mZ#qR4b zL9YT*kco-OIdGhn4e5IYhlR<36&U7u%=MkmdqGn}+S$24?oU1c&nYr#T!-I3NCpvw zR@lCFh{0@DO|PJ9+~K@t4j+}-y5e|-8c+H)32Teb6Wik_2nUmQ2b3vBg<3mVbS#sh zJ>npE^3j(JuL*w@`V+WFw6Md4io;zC%fW)v0Ao5g(!-D^0RfZyJL}WNxC_FVMX*FL zwxvmHXt8XdslDyQ?f>cd@0P+fb#-)_4J@ldW|Pnw>UL)1k1j+AgL?Q-GJ8Kz zhYRW3%-ETSo>(7JrBEnT&lz{~jZ-x=CN@$e3SYpW9*ius^rJnK{J8&xE*K+vB?!6n z80=jGOloDBHc>*C#-L}?jrbyoaPIof`RD^VF>g^oP(|6ORFxv`DL%Y7bSbf9e5)`- z1@kpI7WA7>_)Hkql>p;<7m4h|zsrcT1x2sMu(A}uZ5d<2KUc=@BApt}0A*x2S1 zI-ie~NrYot z#Uph2>^=GoW~P~HfI_Uni9xw(7QgkEjOSt3C^UC1E{zPu5~w2`UnI6iUvdRnPq2H1 zrkOaL|Ld1y>uR9VdH8~*H3|K`7`EKN=Cmzvfj&|a4N6g<&n`rCLO~F52vBxNcSEm57{Npb$ z;?QGx{WlCM^vS~SkGkIeF@?M0bw8Dm6Jgqh!(|J>f-1CV6oB;vM!BEaWfK!+gg0co zTt|GriN{R7R$UyMYt^fe&3FP(=o$X@;)<47*#&y4Q=u!XwlyI_E>*M2?>4O|N&4}Q zYxgNQwwM1Ba%0caBweeUNFbVw``mzm@V-MeHL5(^UlkKGj;}0#opK4Fp01E+SoHT;S=ENf!pLw!p=TSwuj5P01wXVPTcIAFF%BlCoaC1ap3S=DR6+NP5#Q21h4E+aK96SrVovutkr|#eM;i{DB zu>g7h{IUJV>qQ{KfBp>$>@&f;}6saoT%+_p2%{-oYZjwUd$kvvB ziGlodd3bWixm5h$9VdwXZ{NQCjd(Ykp;5wps9YD;rT+QkKT`1JmT+!R2B4!az4Tx$=5#l-+&A*So_H+Bh5!J;gH~h^E&3{YGF- zSDBw*vgZX#P21EMpp0>k$STZZuh3ul1n`F+x+qlce+}A#%Lk*wLY$krsfj5GEF-bs z&p^j)VVIxE{??nzCwbV|PJ#D4Ek}%n1qVjLoXww2cKnxBlxp|?pg6NyHM8n!B!d~O zs7_!`=g&aDkUWR#{>P52JUKFEUogXhy;A)Z6%B@~n6Qzqd2+g?Q0W30I$480+TfjO zxB}=nAnU=X!oHLGZ%5F^YmiS84xLEbyrTR`Uj(xCMn1mQHi~LcliP_NjD86mIPEzZmSy)JN_V^UVe;9?#V0iX#2UC6`X4@%J^yX z&@A3ARWMu?;u91U^c4*HFj}|TE<@B`>PGm*AR8I+%Zrtk<+t|bUY-vd^bl@X^*$yr=&HTx!?p}dNMM+5ONgMcDI+Nyc8)2J znIztR_xxB}Sh}7gobcPS1buwykU0^2sg4n?>g}>~+0p*{1ib6VBt~FR;@Ch}dPY_@ zKSy**Y{`Hk0Ln0GTiUFrzVVY}iTwC9@T(#fFcwEy_?Q^I4KT1jgz4A56m@uCQYvOs zA$^TSOnBA%Q?l6Zlbi!MI+6h-sX z-L*vm^Y*b~z-dBj^~oPi8pQ+mUU;QY6MYV9*!*FMR4K*9jPLL+jxW9o8tvV9czj@z zKUn7dI!5u>qn zRY3rU(VfL%f`FQS_1}G6^(Yh&tTmC`&|0Nxou*(G>BAXqUI@V()}3^f1itaRHbHoy z3Vk z$l{Ip`b`QZCJX*-{B0DpMHzSp!PglW>BU4A`m#^7N$hd7#)$?1gc>Y_Dze--L4w#n2oIPMGio&4$cyP*g@ zp%xszW32^1YuRb{GT{_H%8{WjQz8G_$_32ukdPYi?vhYaK2eSn!9ZY~E#UccQgp%r z=D2cU;7Hk;n?l z5)9-{C}sTJ1Jb!(K#M?E*caZSyF3*Sz7FhHL(C6GT1xrLDt z1D*&1Q_RX8;CV{!?7S6X8T$_HSHJ^Qm}t;`(fvfk5=QIlVHzl#Lrknp-+)h4l;*+< zJaBSnYmR`K9cF~T{xtp{l<*}TKKcJ`_Zhc6Jiq`3C*b)JjKaeGWf|~P7j%3CM$==( z+&asK;4wBZPY=mQN?!hnTXFd=ISow+>=Lb*YkS$k^<8obipSvM3Hh@p19Q6Q;Eq4> zag%0hFeNLC7YEIKVwxft3&q_Aeo)PO4#_b^vZWY$T<-ypVfo zTJ3jDZr#Sqg#6<(DP<}O;k3Mx0lB|WuNc~W`Y&A+@!aOojk^$zkrx0s7oA9fCm1BV zP4gmRn|;PG*2V6;G^91f+ar(E)YQO(7~Ix*u{LQj;6W&pxNl9cvh0s}Yd8Xvg1w9i zNQBu;&1lo<)8#|ioWl5b!^*}c?^oFbl#lR71Tp#)c2KOGU_Yx2gz#~Q)Uh|o8yh;#uI0>Do{9S68 zbAVyU+gkdE@)=i?bG==srnsW$ICR{Kp{E-Y6VqJ)6I~l_TrNA^OI?w99Rq7WbL_rL zs2A?r55f$NwE)E4+|UOj5DENZF`4h|dt=rZy)nGqaPyaSKlSKRTh)oQN#ZlJVbJ3k z%TU?Ww6bu*^dk6LaU1jP!kQIu5ioj@k7Jxh#uA2O<)AOO(WbUz|LDuj^MCda^U)ya z(Y=Fi4f>?$NRfobq?`?j38Z&&AXx@K;&JfsNQi zk4>ITQe{0S_95vXrau_pzhluqoFn$Wy1nHf0k&@Ex}IzzgStUaGaaBk#Ov}=k>ZX zSGa6wV|$}rtC_B!@*opW`Y2$}V0HaiD*v+>ms0vpO^%4@^^5q>w$zI! zyrR>IvK7I(I0H|It2|W8^EwmtgbZKdO@hPXXtLe*jsEu`Q##>ycU~E9j5Mqu+@ILc z?o~@txJuXqjpa;5*&nkD+K9ZYEP}K($e0)#G$fCsowepL-mi_E)iIUybb^s*J#khIyh$<8LMETxYP=Wwjk6y04vrNlt$f!Anx0|jK4hCOofR4yVaF`j zIXUr2Qu(__rF$N3)k}*e`CWkPHEm6F)*_EfUjznny|{1>10J{1&^_V<4;M1{@ai%b z_yy%_& z9Y-6!*|+OvN>#=IrKpFGr>V3aR{YjEjv=)XWfs^t>a3DF0v|ra1cx)fc{>8bbcJe7 zsSR4Pw+%Y&Pw|c~(wWn@BMb)yM5s;+tq0VM-ebYsJfD(URlHB?=rbNAQa-Xp^sDZ) z&7pNg+*;>pbKaRf5plC(9v^sc!}d8;69=*Tlik5il5zWoV#!ls7k&j2WL={P3}lPO^U0xOj^85LiTQ7pc zf?ngUZ%NNgn=*HU9Zz$=>n=2|IR_r!Icl znJX%DB9$n~ka=g=G7p(DmPCXmWEPby^GwT_5JHl93N1tCdFH+Dh0@-Af5-QD-{W_@ z`>(cSJ)ZTf`@XL0KF`k?DvZ%Ae% zR=~-2Mll`X9Cp^WJPv2Noc>JZi!)F4@TJ3322EK$!g0)kJyhpm9JJWsj;mP-VtF*M zM=y^X1Q~Zl6cXQn8c-?VFnNJ#-dssn1~oCr+m$nn7@*M9j#hk9zm+v<<~kKTe-SSO zaCUYv7mFVbI;4?LpH(xU+H>mdvTEe0X0G;qL3sD9&itSr_iB9G5xn zOS@@qSv5!PBRB3*W1O>ffXF**=5|~Z<2}a;S*dY99a>HhP)w|NA0BZ3hBc^k?`d8b zZG*Lt1r~E8BJLij@EO9vOpQ2a8=Tc>%{)8zDf&5>ri^POh)(N$+U?ci8Og%NMg}OT z_W;2$2tR*jH$flVd#zLfNic`!Zu{CjJ)7N|f91Wq^P6ukSsT7hX_k9tP06cVN)22d z>%h3IES^Z&@?7TL>F0rl$?f+>s8m`Llqrb|@@na8y4@;m^{xxl&&U23q*$Gs-TNcy3 zE4`DP!*@3CeCG}wQ)NWr_11nX@_Ef}wQyCX#mjYcgti+FYHm-Ph(S$+jzK{Bbkew) z7AhS{EiD#kJ06CF?6p5t4X0a=!@^8v+U^BZ=wCChK4K42Rox>rPB7Dfi7=HoSz#WKzx3fKWFe41{+O!-KjhnoP2|gqNgRlYM;x<@QBySwD*UOE9%=Z zX0-!qUGzCoPHz%O*Kcp5)uvH6ndqyFr3a@0k8U|_nz1UsDj=a;XTg-649@yRuwN@w z?Yy7>BBn==C;<}k-BVA=&zWVQ+pA6g+E@v9S>HZ}tGdtoNhR5 z{yhOQA`CSzkM1X_SnkNXaW2iYKB+2kNdL!tpoQL=@xeIRkSI&55N#d+pQD6sCB~uy z&Q+4(YY0AOZSZnppF7=e1>7(D0b0JeCX!On#GkJ@^x*`u;jE1GCj2q-agfQ zQg3bS&+RFwFctEweo)9ML2;li`l9grs9 zNP|>ZBP)gkw2(RT{Nfe$y!wL+BXgU!q){c)K`Y zY(|WkSz*$#BSXS>rgw<6*zE}Lyp#+l;*>^A_dADzgGM?W`ZzG(s(h`xg99BotB916 zF(C`Ob?oO`a~8LURB#tE`)}y=ppU<>*sA1~Am&m@H zwbrbYKP)rcv=;<7=P+O1Yb3J4fbVL+8&DXyCJ*qsz$yy)MQ=W#DX?D6KJ~v@>Q0vFp8bdsz|; zoOrnnN*{V_w9=ollhHnFX@PEUo(^tq1t)twcd;Wm&fz=l?qtE&gCg>rvVPZwbrL)q zoG+d)6z%P>+yoNmPb;VDt`72VAH4_b`E7C@@PJ^za**O0ZzUk9vZ&zD@%Yf%Cl4J^ z;e0Tpq@?=~9oh_TT*VPl=sk&vLq=30pdujC*K!GXIj_xZ(EI)(M%v)x|rZWJJclf9JrEZ*cw z5Nw>hq_7JVE$I4GlujQ_e#oi=KLGTJ%4LO#k z*hsnB?}4+YsCnoi8lsf%@bh(F&@a6hmu&Ilsl|ms~lc(Q(IxOp`WhCMIu&6M7M_ zKY&N(=+Uh!3mq$0)zlsV;^RY@Ok8@|h@ZC;=lmG;`7~2ce5Q5!yLQu+y(CJtXL?+) zCoI_gFI_Q85;|`(6jrND@vW!bnA4v=ASgoK#nWCob6>KE=O)aJl}DI24PEmo)z0Df zYm#4*O;6=S-dZBjn2T@j-lj_xUKVS2Xj`_8nDnGaT-ycf3sKHeOUL#x_xHb%f@UyB zDC0nTW5^?=Be&nD%UR{qhENj&B;|{oty0OF$^?$?;IM!P7``wuBoIu%eh!}ocF}lGK0MCFQ1H0T z84=^FB@FNnb6N z7pm&YRI`r!D8}~1DK@&r=t1SCt!+iTFLB+DhhaTb9dtT`CdWnq4x{H%6SJ_mQCN#r zV0RatHU(wSqd{uo?WZcb7rA=BHIzT&;G$FR*ri$%VD5k7te3O`ER?ZA0hi}Y>OAq` z)Z>P=vg+9_KaQ&Yz`pBsFkRRhoUh?nJAf5!j_i6pSl?dTvba(Rrxa!_*wpXful@0U zSc&*(FVJuSe;Wb)IF;riQNcOGX)tjELXKO-bVzt(P_P zh7D3pTuQgCM4tZa;zFSy|XnBf3 z;`znWlglu&ShKXc8+r^0H(oI!mi!O%yG|Ir28C_Lk-r-T43$%tlJ7QJ9HfxuXDZCT zBpIDQgAZpk1?ekK3^8NZm!#$P_P#xG*1H(NER!$P$_(bl(eW$pH8@@EN5iz9HKF3h zUwqkLvyqFR8)_kqCrAMZvMX?MIQjJ(yv3xB6NH*xwfWDH>)=kqQ6mB61?hV-E3+dN z`(%P=rii>M;)Vpw_t`ikEu@5XTh!g2dNPm9<6y?aWU4dXb-7fmaXZQPyPSWIkG^Y* zyXI;-|W&xO!^n_xCRwo<8_2jK5jP zUQnxzksd$P@MiOJuIG72Rp?!U_)|Xd+#_zdndokAWch1$RSFLIZ1B4ZhkWV+-ph z3rkBobL*daWT+;UG0g=gWm}!%bISsjtiN_Gmp>E73c7!*yY8^Ll(GK%-NKq#0g-J- z;y*3shO{-O*KmDJY+lF|I$jWY!G5fV%61s%ihLcI(J77v{mcCfcY!CMT~{Ul;_HFE zq`CW8y@g`;DwRpcfUD;KzzG;wQs(&Tl7->s!D_uL<^ zhK=J2_>n#EjxDyL)cV$gcwegf(e`zjZFA7{g?`O0H>qXU9pXFY`Sv7hl(eO@QC6bw zT!xLC;oeNj@xt1u=9dad+(D;I%x*k0K*Cs$4xOpNeC8GJ*{^$JXF{Q3NT)c3Ha6S& zSPAfqDV~`L&pr9J^SD*r6ZLy>4q{)MkKTfQ@su?=Uv*G?Y5jV}J303A!e90NX85ufCxo?Gj z^mM*Cp^2^1pPxDSE#n(&cFE`oxLA9NxmynuWkw##Y^eqTWW5w`sLWh(Q_Qgq+qHkE zQelg#Zzm`LpV#x;_iBhWeQYRNT=_zt`rbkK$1?6i zk-BePZK9;D3w_Y6U#_gSDamr8=|O?}e9uJ^Y%0V);do1a+_DXMg2kRT`-b@&zrhCC z{GNF97$7&!d+!v&1`|1C&r7zY%iM<2ltq;_2gkyz_fTj^QF;ty9}YQ4lMU7@J+NFl6Rm zsZq^xhBQHdDX7fGyT0yqBzCiwE4idWaa(u0-{FWy`W;_?ShjbHD|7KuTJnO=k5+=0)@Sc(Yx?E9%#Bg=ZW| zR8(5?p0dY!_h~u4s-!!nvD2>Id+2kdIf;<@IaOd>#$6xH96kgWYm-F_`C-LK6Tvc^mOlD2^T#P#J$yNb8=>z5|T$cvlXShTB#G zHjZ`MwXAU7rhU}+-Z+IRea$deFXG_hJQaWCV1|%DQaQe-TxO{4{o+bHCnE_4i3&8C z8C<-#PoFcgjjFigxUP0>K3$gK|Es`2H6+}*8ob(xtX9UI+U9-PouJp!Paoi$v^ckO z#98XYWxh`-DreHA{B$1}B$&zFyvAy#dmW_ZkFv#NJdTyMa_dfd@y5fs=MJTjwq3iJq#Rg4?Y)&ILHQS1QA&K31luyD;-o2) zdS^+fjk7$iytgS%D79Wav1rz$lWZb0k1-=9C-Tc48+MWcsg;?Lk6rN%{97=sry>;6 zeLKfkzFZwQt*Dv|ON`sMYSxOBdz25XRuiFXLD_FzH40A=+kIV#RP-m~L#2m&t5)u2X;4 zp!|~8*ApOa2I`x6PvAe^>IUL^jN|L0xS2`Qhk$^IO8RjB@dMrBi1dq$m?VSwe5K6Y zP00*lOUpb7-bO}pal%&TR6$qf(?wpY@m#cnPsZV~zdW09i+V%)hoM}Rf?9Q7r1>dk zrs{m)!wM)9Cef;QRX2qV(~>BZ2&fUClN(+4Xy15X=%V_%s|tRjCE?t$C6c92#ed1* zTtLJjdx%-0D`#M=$=P1z^65F#d^RwLP;Fkx^Jx*yFQ5Y-;KR0}v9={gB8<(9FLt~Y zAFUeL&Q0FY)#Byb~sK3@FbsReXMW}7+bN}-_ zz76co^IA|!o}BI|WAWW?=&Jqq1k#f}dYB$!7sp24+|XSo4qHLr4=tj%L?(Vd?r+to zi36N(WtKN|ZFUi3CV^%2fvg-h#3(yNHu_s=%e(#HiEnnBRj)ThfD>)AGW7z4f19Wh zq+wv#3anj3-<#;EJa_TpL-l5PJ9;yGji{ItWyB}gfc6U&P=LRrBY;%FNcdT0YeYC@?#37SMV2E z>bbfP2X8iVqRQQe&PP{LV^2pWpUM#B|EOntTgNZ;rSmUuDM5P3fdTx45AfTAO{F9y zJ3++^sE%xP@`Ca>b>3HUB>ym_cjO#dA zb8}wkkSa4REiDuK((jy~*Y15tfhkY0$3fBLwcz#WHJl5}(M}k}xR9=xVA1E8S*kix zMqbpb>G~F`y@=h&>7?F2b%%^KLh&(?7(LiLEt)FWt1-_2%{a~8 z|7;`n0+KE~;D|eLjmQyr`S8Jm2bCWwulf(yi5DPLP5)TJFb@J_os)YwQ<81M)tD&6 zFi-~<1Nh8v?~-bk87t^dG@~2794+|Fqt*K`rRBt(&D}+!@*MSN!Ys(`;+NKdBcSWo z>GfB%rP?L)C@|}B&_jwC-3qYExquQH7(=QKH^2+H9~kH$|LQ5VhfI>ghe<(FRRja= z%8I4MMbvV^tO|sHF9Nneiw%0?n3x#gkMnbH|D{Bhqm+`6VBz7R1_UW=lE=AyeS9+d z+iyW(-sJ6 zz*>jCdUX&8cqI;koSfSL+ENO1(Ih~X8890fZ{h*kO=oJJOI6g__~zYAgRacOBd$mC z1zhT797mlUqo~i!Z1J+{@iy>^aoo@8bP>)xzHVQ6fYWgIt9_@sG_`|pq+3R&g(n<$ z?Fo?;wKTv1fodrWD$ONzf^SP27(OU=Kn&eZc*W*f=W4=jlRY>t z9Dq}^#w2kz>l@oyHJvsH$ErGI8{62}WCnR3Km?XDEuQOT1W`M_e$8mPDsr#%n~bsG zR~dN5ZjL49fiX9zi1-pcQl8`k6E_m@`#?5&Ez?f9oyw<#G58s$ztydFGW#p|7pAI$ z>6h~R#Eh;i+w{G=aUk*7z}Hs}^h65l#=f72ozngYn1rOtL8nZ8K>B}xot+{uFc1zn z<7y{F;5c`zTFo@sNIcVQuanbhSlURhTAxWT2W->5Ow*xz@Sb!(dD)<)e7Uy5$(Ya* za6Wh!L-}(^LNKTh76KE1WuuSLn%v~^W9o_gcQAUGu(GU}vuFI)`KEMS71!0!w8Wco zuC7795AV}CbPnt}JbF}~57Kv@ZIB?CF)j6;{kDOjleH`zB0#NPCu5CU#$9s?)3vV^ zZkqxbmvs-`dpY}i5Ij9^S;F7(g}}o7ku>DTZZZ3LEd!g0mFVe*5_%@QvZ2*dGjw&~ zs$eMd=rhnJ(kGF>!!MP2$*nzv#`vIN!*-ysSX*tNun_K`YRC^E7bR7Yi6b1Fd|(rq z>6ga#N;sg1RAF0Lw#H9j-hJnF$*3OlghOW8^=*XwC9&6!Uka#rjW1 z@H%*R{`{A)oc`-&!R+x!I5WvUhZnii$v4E z(;`-{ukat2@H;>bgx}W2UiBM`^Plb@9|C*__*sL?FTbptzSiGnf)ThXVO58@?n_&@ zD{gptfk?o?#dRLU6SR_N9UO9UgAwKP`iPnn-6-IhQTz`YdzqV?dw6@tPid#~dwtY7 zem1e81;-GyZWSXu6zjS405v`f=R3&|fn$H_b=9gE&Z}WUabrD0u#oAEkTzC; zs)xsM{x*UGgTmu%{62XYW5T99F$8jIjBdVu`QLm;J&D1Rq@cjr{v8Bwo{?(PD`Rmv5TR;K&{GUW>**ZjQ z_X_}0YhEop69FtY;CaT>b^k+?_Wy2EI_KBDOYlF`B`DnpfTylNyFvk1J$Nu%qgXGX z9RbwfQ^inS^#5vcQ5EAu6msHkDv(@X9}<8P2ffiKTtN$x4?yAvx8y7oNZzFoH1HFH zR!ZGyxrv91%jba0Q<{Qu0%QU#ORK`LetF250gy!nD8nk)f1%ELMs|Q~ETc{TQ@#-!Fw2Y)Z<2idL8RU`SY)4+Jdz z1$?-^rqm+fp?rSBSZ8gzN?>XsX~*w`rPY0xz&Zk$>`#d7ItJjsTD)OXxN}F~ z4=XP0MMnT&1kd$PH_(Qm^q*f841oXDixx@`u5V%l@fN{E1bfBk{+10~ErLh{mhOLs zQXY9(+fTh?2XUxRaM}i<)#}|?XBl{W90Pj|06oEb4b+N_$byZVzBn1gC0VRHi+-(; z|LJ9EXf{F&er{#!@UWlnC4k{zY5dd8K#ro{Z4y5>D^wRi2fHzOli=A0$HJekKdT8x zq{HYLnKgL%P0(+<6!K&pZ~Y(et*f=hk@wp-;)Dnf`zncb!zKJqJOW8)Lx%pg-%46e z-Kf_iruN{&M*ImdP7X64_{3zH9-IFo}-J0V6B z2q>jsDjsY@?|tN*-V`O`8~IMcfPVL(*L0u(iHkU_TL5q#Y6FiI`i`fmF4EG{2-6OH zOlDv|_wEw)t=FT0-WFK*_NUH0`}Op~r|Q-r#(01+xk?Wcw?oeaLJ8o0eugX{IDygn z;6XzrOw4v)46LSVNnwfqS`fIhDP{rHiE)8Y1-iuC={+iG`gbAEZWoZH3lOT2np$hh zytufSjf;!Fqw0TC5so-HQ2L0A69cmo{GkMb(ErrIhsy7AfuyR!NJ%fO`Vk1Y%jrZ> zH;ohSN15q5IqVYfAvetx)`1$+agbsQy zQCdpQT9tDIl#k#Ef^GH|u0~%1>c7M1F_vre&{B**9huhEtJJ{NHBd{0{uWy90vJA3 zZjOL&zI2HkZuKkQiP2FPcXug2Jl^!X=WT%jcO{KsaKxtg1q5F{j1tm}y#PWU-fIz{ zIV7Tfkujb#k&97kNZzV~J97v`*uWY5PM8f)J~+1e?6j(^nW+ghs<`Z@A_`UK4IM2G z$?_4~o&j*Y8w7xKqVN!4=V{pCcR7Hz^z`jMYEY6Ju2D_bH`;)1NUgjJ8CE8^A%0pq zx=65z%mNP8P<`6{5tMiZ>?3qt_OSYvEMgCn8*nwyx+hkT-_^v;X$8tcg~ywFxvyd8M&6Q z=C5O@wV#ZZZ8*@e8$bBxy%OGz-y0~;$b#x>-#@jJ|LWQRjC|!v@4DlcaBUkm{(r+X z(T?%QW)T#CJUoy8s~5F>$n=NBSYWG$j_R+Lg|Jeu0T(5>N-Ep8nd#v3Z^B{cQ0opz z^y-8;AP^kE$De6qdD3kSMX}13_fI*x(r%5fxj^S)g~}@oBqL;6N~58I1s;!o2}K&h z)gzuFi0;8KF*`rs?5JW7Ju2iKJP8W=k#ZtP=l50nD5#c!7C^mc4|!-6pYMZiGA$+q zwQ+N6!ZFZ+b9r#N@XN0QK6HYvQdXiMwOpR9*CY+K*7x`88xHu2;bll*)a>f+?*6By z(QF49JWGJ%C*Zm_4jl;3*oJ~@=K?smP?8{oA{2IXFoSg>+2+~F*Wg-$*)nX3fJ&P- z@lk*M>eaU@!4U~ay@fo~&yeW+DfrZOU}aC61gYmhz#NLXg+yKSqXwJPE81mJaBdXz zk+6!uCj~M9c}gBJxPrr~1$QH8@{2V0SvlHIL45FiHTNNK6X}4FUK#ytpbMbvl4%qF z3T>jkSM`aSzAYxWZIGE%1n_}T$ZXVts4n>HH^`epwryKFV8Btv$lBN(S(Ko#XM$WY z{xI9Ft@fveu&Swg+x7vP8eKG8l4P`OC}L~Fz_7kuCN&m-8#m$l5NH?@@DA_xJ`a(H zv@mps-|o#|doCjy$D{NZpzT3e8vW+C&3107fkYPbZv{j|Vlc*R!d(oSfHX-jkcQZw>c$S#M7cx3@L(>ZOb3O8 z+=PwG+Q@Gju1yg`4c%h8jr>W7*t`O@lozOW9)bx276Lwh9)`)T67 z`v~j@w-X+$+rzC4;GjY|GlXK}qznY%QKB*ggg%?VbCv{;{Qv+CAi*ig=0pM(oJc4S z7Cd3R+_n9;cOTawvmR{xtJMEiHDZYr;M)(>A<_b(5PHq{#x4M#{4->2AU-)nfb>CR zd%OLJFpyLVz|-9Sv&ev;GTJ;yMur=tUEo3lF*-S+s()*Wlc5M82=&_zP6d| zU6Aez5>O-?64aBSihz_=paeE&(@#ZZG$G3&EcP!1Z4z>W9TdLu2Y4c7Dg@}DUmGHL zLGypHVvRn6JdA*NKa7(93b!zSfD(%oP@8a#@-W?k+%W{Pj)V?83&P3J4T*@DnWbQ@7PBP4`If|saA7aVjDD_I0eDv*Xz=4zOh(luYXaKWS4h6a;n+(HLI zIhpcaZ}(D&ZP`Ifdp{x~Vo$>Wq-qHX2_c*gJd*Sn$ZmS_^yxiVHRnymy)eoobT6y*1_%Yx>v$ok!(?5`oZxAr`=lqMR;pe;bJyKgD7p zSnvd>nQwKJAX7U^AV-;25Z=Rb{P>r4nJx zzDg902FD*zKi*xPu2(pO$9g*mLp)#>1ZSbo0l|mm&v6zKtswSEZ{$r=9prB%A#W?h z$iO1V$Go!%ytc5=Hf38);|B&*@yZt=jNyuXMVFOzZ+gVu08T$f3qFf<6-p4B)BP9e_jm7XMtmS!6Inx* zleCCno{R!{(X)j{Pow-Ct=7xG3Aix z51C2S-h24oVG}P(+&=Vcf0Ot=NlLlfWy?NWjIZ$YZiwANVqzi4E2JBM&nNhbwV~62 zBtUWaWYov2{+;IxVK}f`!K|l87q%?;Bb?1Prsz_j2rfui0evo;=QTVZh=1+@O(gq1 zEC*x|^MvBqH6Xc=Lpccp#L*PxUX&pXJptnD!>_*?62>5e;97iGzRgHGGPNN#(32;3 z;Xn^17f#je+8-%^LQuF~!XknmC|G#Rp|XsErJE3jDF7KkrOqo0nrfDne73{cz36Fy zg(Xd~+LUMaXLuywSAxroM2XvwVCkCxZ~(-zve-&UzF~qSNcdo?a121Wpe>?rl4DwiD z5WqA~6pj;bI=U8Oo|RqC_dd8kg^2LI5R=9!GW(4L&_kP`yeNXuAs)*fu;|7r#)G8N z2Q`gQzarQ7*pJvm6`?!`zR0@WzXLHEz`F`j9gR^CEzgH=#FM@NVd{oZRVi4kQ2d?t z)Q>s{S7dTYQU3^uOC=DI@fl(QQH~l~7K*!JXZA+fmYJ;<6wp14!YHJLWS&^)0V)!)6};?Q2WOX!RL_m3(1*DKr2 X(vAF%8JLP;;J=HKvJwg6TKE1B`a9vI literal 34249 zcmd?RWmH!C7e9FDE=frNk&sj=Nl`EmP!UOK1f&~j1W5rE1(Z@$P>}BKkVd3C1ZfcI zntkxrZ|1+&yqR}%*ShN}7kJL|jlDm$pWs`{S4jxz2@wP#xqj`+9R$JjK@f~_d|dd7 zaM$oO{72kTUc*t<=7FP&fxQW$WZ-CPY2#>VZg|Ss#NNT&#`-+JF#mbpQ)Z5iwhk8s z1g!q~27ViRQ-QX##oyqU5ZGSRbU+X?1N2{vPckXy2=YYa`jty}UE`KU9_idwJHlJ- zi1)=XzD<6Y{K~C6)i_(j(Yy{{8*eCCb?Xfze_R@Tpk*HVsyI((AoaD$fU4$elQ(bP zJbs)s{`f>FX2>agVuy|uGDAaS(o@8(@vl4fHh!t;EVst37rVr)#JP8rhmgzp1i?Q# z0qvtCr`Xuok{aqUpTQTJ>a&p3@OOJ5UL+X(24W&v_$wQ2?f(z{x00fwqF+i1XMxR_ zwXTTO;DtP^pWr__^Ln_FMRpitw+Pd!<{8g7B^!> zNap6|)OuMiI)19()z#JgIhcPoivK>Ln%ctm0Q2tT>c+;D29d*!KD~S1GR}({Y4RgA zPl&n-ZS;ug1m7wz+2f|BPX372Q@C;kTT)7@E8jx>i)L=}y@}_ZD-DY$qPqyWx>B*VP46Qk*;) zd`9WbDM3MUKRmLAqrSFT)nTU12neYnFZy}x>Sf31<_ z+RdB(DJd!SCGXz7( zpp@HrV>(i0vYuA2?&-<%ouw`|mi<{6swv7(lDJ|dAKB+eNZC*0>$=TxT#=W5rx?xO zU+j?5vk}fB6TY`n$)bMWiJIB%C+qgk&O}P0SB>Y^NTAHoe)qgc%sCT6g$QP=>@Rwi zUQR2YLQ8BXFat;!0@i0crh+*Wd`e3tbiEIph)U#AAqLr%fUQ??qsFf=j>Or z%(~dt9nPFST{Skwl55(bps#-_CnsmM8jt0zN}@D8IcJH}!UJ8Gjsz*X3uffWDv6{j zDk^pL^%Lm(7pUBSoUUCcmvWZHRP52O(}aYCwtGv3?59s-!@1p_Ps~f|SSBh)aK^w~U;e9P*O(EP8N$cD?T^u+skN zcyX~m8;eYO`;_z2;FF#-_1MJo;^HSS#*=OhJCJGRKYZda+dk2)B2zzJ?YDNYH8wS9 zk!g}l5;OOEu3N}uS?7yxIsMU=_mTRNv{KlWXSLe-4__gtHWvqsve5%Ti_{I~Tku7Z zoji91jaSlX zaBy(cbQiukT}*4Hy8si`5C{BVOiJ3HwM)x5kan%df^ z?r#p+sP(qAx6=d%2dhQFYTrl5xw*M#OgNR}$dS6UnPqnxb;YBJX!*Y{ ztO=C=eu12FUKzF0je7a=^l!KF7*QMD_=NQqfl4Z*?v1d;Lmd}btD2#q(gsiNvu72H zii?9wS1L*yW_5CmgW+eFhc%bnOiWCY;5sE;4kQ(pp4Ia{;6Q@vLuvbpmZ1t^BhO#I zhHrZE{iLMC>h`^AmjbnM#32jb92F? z*4O77_xG2E3L6y#%8l)s$|gI1zTi;Ia-6%9XVFK>%*-t2`14l$`Ti^etfva> z%cU#Q>*wTe+rrhDgbj)c|EadRu^}%jD;t(cNk6?X=iWf!2zO?a9!KQl|BDRF57#%GWCV^yyQ>Qh$;>(FOBw zA9_19WBd9RHlS_g&N=9|mngltaOu{qTYhEI=hI6@d(`#9WDd8nOgj?N*ET*)tijf$ zZ3}K}ZDnrkh9te!%zZFpB2c!9nc%f+j`j_g(MOy)xw+XMJNCrPl5}=>n+M4$Aq_5Yg2g;AhBG8m7SfvJ(rTW3e}NKN{R{A@S@MJ$F8*jBuyP1fzX2d z$@?ojB^)m@6<7@g*iAKHAn*s=cx6-Rm8?&n@Gl1tUvhK1xLmPkooC*SE8)CYRrYB8 z+3VNA9~Gmu@=SYD)wuZi8}7S|;E<7#IbcLZMyjS&?CtK>)Yf9j?ieuG*w{GEfB$M2 zFYUP-57k3*t|irRbq) z)x6|ZU4>dU4wdLhwf-l2*SROOx;{x!u z(LaF|F+AMg>6~-a$bU#3K*kboHB|6=JwwoL@(L7wgRUg`8G(}=92{I+T%XkT&i=Zq zMvX_xnB{iu3*7bXo!P|Ow{9VNdSee0Rq%a>hjm`Qdd14Yfdd8YiEd^iT#2+V?rW3s z=uH6xu?@v7`(Y2y?}fgOIC&E|3xf1vP0h_U-##inPfUzR%P2{0Mz42MOUq?*bM6zA zlxyErRjT{?3U64;?nDQmeRY3l9lcN4aF2!^yOr+ky^4(`iu~#i&C6h-<_V&|u>t4^ z{dMSf!)DgNts-Y;4&PiFQr8rfkzx7x@nh51uhoq#-cRf5u1K!cQMTq$oLd^m<%EsS zHo|KNXR^rm{2sI@wtG$Fj-Bg9w4N6Qv`;oZJ|&NxAJKq6uDQCp2Ib{l2;B%IKFM~` zd65fFoV>Ghftbg3mh11Xt2l_zgI2nAx6hwH%VqY3((1ZaPp%OV5Y)7_wHX$tlVPw( zx!py`TcQOrbCy2_`c&@BFkr|^9__EwEGWeYUg8R23ul(%@h3NgZO8Q4hC1UcP3{G6{D!-^{|&@{3tl(i1K|zI<29h2C^|=pK(o1S+=W5OSNcwE*&@{bz1>@4loH zxBqZUt*1=)_U-3Ti^$dWJo6t7z&^twnlw|HvpihSl!LWJPSNn|X#Ci@m!ZRr1l0zT z^~LBtc#vGb9vBdCPkiHj&18KjUvK$*YFg{w>c@+YKlODx8XK=lZhTj*F+QiJX#-`C zregr=^qiM7^y}EZ?m0)p8h?Vx#ev*=eH*K*f|)~Qj~ucs2hOD}q-*9jOm4Vu%yW=3 zis?Iz0MdBVcb<-T5-Kqbt$^;A9Om=+p2;Uqp2YNVo%ANHhDCnY7VLJ`(H zh%zNAE-pqMin{I8lF7(s_D%WXF=uHUuGW%GL8Gh#;3#C#%j>u@$|Nl<-49R_y~%KI zSub3mDjacUur8XwrJ|;;hTi49zlMjx;`Q|}+J&Uh{2o_UR?3q?DX0n+s0aesGdUA$ zBO*|_*1(WvOoYUWSf#3son;9r2MF=#LRL=5bt&|%_Biy`LvOYH*~#j0bg*UXu(zcm z58u2Bs73RO`$Bpl+r^9YWHS4L4Np&Y>szIc&&&kC0U6rQt=6v^aTyD9Un!@BlUn~u zUDy0@#Hw)QfzV?SgBon$15?0Hs<6l;eYUqfzM#(&n$FHxPa56k5 zrI2BV4k@+43-;4ZS~gRJ)I*sSLt>{+p-(JlXD2i{Iho~ku*-8^;q1-V=Wxp1HsHXB zRO~HF#7TQpzR4bD1xtV(r6;Gj*b_AKl#H=KtWpN{;$yA8?&vi z{Rk+}oMLCk((_ouxp^k?_pLct_S-r-Pd|J(jczL!FK=GSXqG{3mU;J?=^sA~)@RxP z{#81T(CT?mpbG(b0fpM{^75#lpsDp5B^Pw$O|Cq$iU_!cmFt3YV?$l&SC;*c8`h1x~y}tjuTV=9Mbc_o2iw4e3Zj7xR^s##qAt>`& zFDanIupBB#-P&!4Myg1EhML0e!+tfK%D}(?09s26B~r&8A!(@VywRf`ih+fT z&*~4TCI{eph?vWA99(EacnfSoLbnlexT@!68d_S?1{rnTmxNjCD%pK{6&&W9b=iFY zE$1Mo65N-IwPcQf>f!kCu(#*i6|?(T4d#9R#TYAUQy_M|ax$C6WA)N2Cp;2*W9jQ& zON05s;r(h)lkYx7niX95qtg`-fqc(BXMm7!9n$PZPM&`d1a#>3-Me>}+@RG{^Jy1Q z!8+Q8b?gk7CKALeb52f8S?Fv59;YdH-zuQ<%%1^-UDMs&J%8I*u^d=V4N6-Hh@f4u zojId040P)+OBTW7I@Df-u;0xPW56>R#ql7VTN6@w3P;SAS= ziH@4B(f79RVKjH7q_tHxpTxn#V?TQ~ty#GK-1+kuRUUJ=+PhE#*0!f2reOW6RaKgw&q+zx=;Pyw$gD%SRRe<2^BDUkO}r$m08A|Db4k-Mb{aK(%4rqKIGe^u+X`N8@0ZFlRXIKzh?lGuwM ze!EH02OCsz}QaK1#3xd!CAcX!*~ZCSG!E)mTv#6s3=1a z4X&xEh!&cxj#u{=STSf7SO%ZXD8hu}bU`K=E(j2NxmTALqtA+o72R7%*UBdZ8i|F} zH8e~DI=Oo1&WWV=@7op@2lHF@-L(e+d@q-+D-XkBfQ0eGM5zObQ@rGM!a1=m^!aq-|hz#XkTGvBQ-@5*Ktkc)P3K@_4Be)#gyYvAZ^ z(D%w6u9>{Q8WHf-xF&@9EN>K`#)Vr{^^=eGHkXK8TwKb2e^Uh6L@?1t)U5N=KA=Gq z;H6Q0LLn*aS6bW_?n_$U-gc*K1(6cRxk|K!vVfekj|c3MrtsDYx$&jKF$JhMU6tP6 z>%UrCZ!$`I6gBtkT3EmrQQCz|#{9HHdhbiK_;qUjat6#>UYwyor4iW4bfFp;_<(7)+Dv0M%X5_`!oWTcLn^li)XK z0IBThvbQ+MPo89~8r)C&VwjiwtHNu#iDG|uAu~qIE~&mGkc_3_vuaAK3@s2UATBqw zD|YA4#!0x4LYGO+W;r@o@qWh@QqS>%znqHdme(tvwpdYZNz?kLy^7^WhkJ;Ac1Hf} z1^TdUxVaNQewZ9SR=s!cfz!12*7C@;YuB<|+vDQmc)VlzXE)Y={aOX=#LCM{%%_!y zRNZ7?>n4P@35BOjzf8dvyYs8m?$7t|U~H)Tlg62$PQ%0$2AyLHR=yFsaTu`73D}87 zl!3f?6S6kdi0W-X$E*7Eyu(VDONb&Qe+BKVDZHoXkU8>rHA{kzUxn&79EwrnfUB>r zjF#JVD~Y~)v_2CQ8hW0V^i`%7*-Kg(73rN1J2`yk`tG|hp@I%)qQ?n9w?BK+8N3d6 zOi}&FVWw4xmR5F0IMr=)F&GtuJV<2UdSGW|WsyUnQX{Oo%Hl}{K#~t2b|_R)bbFwc zUXhc-tbon=keu8*|4iY&LbTlF%R;8@jA$W)Bm8ruRI~yBGFm+oJvUE+a`0#bo^RrP zgmg_XB`2>!HBZ6UYCk*-B+=s+!Qu8486wCsDFtc{v~4{3+k${J00FfpF~P12N5n|M z)ev)Dq(d==zrX(}VPWc(vUO5IA|e1nHo)5fdDX!!z&2W6ugP)jiSdVmw*z*m7# z$Co1y?K{II%?9Zqk?{ZK$KWqZq|wk|G(8lr$su<%IHUt_oX+Z?4}DA@c-#!wd>jD+A2ATn=IVfT! zdfycU(59T7q72ujabPZ5o<>0QNtaxLg@4b4aE=}ZTlm<`F&Kf}hHXA7sMwK{d z;1jD8wXT3Y+5!gzpm9K}N6A}IaIkM-q3DSdC$fMRUa%UZQjs~x+Yy&J_^n!YusK90 z<0T1Y5lE&_qSr0~+9;j+?gs*#j_qTD)q`V#s?vF8U6?7#@inPxY0sWN_k|wyrtcVU z)g&v$3hGz+(63lu4y#k1Wqv`TCRz<-AQ-L*`tR$^8#YVIr>F}o2iTCHo%Pw+Xis=b z!|^I#P@%M4-Q7dOnIuA9pEnHwG!@fV3=j2@v$N2muf1#(&Q?7@K>x(VMA&g!U`> zqS3k`Z-LKY`oNLYc-2TnMHK{xEg1MJkze&GOe`$2i z+#ENk7SPDg@+M>^saW)Wp#a=S3Dt@mlqAE}7@^7OuaRoHGW`6b| z>yc-Ip*F_$eU8C?5g&iw$eaJFme!jXL4(Ju>+6)jW()y@)41q;Mr1)BuG@D2(uaz! zR~o~ao)i`q3epj?@$lRLR?RBe$mQLZQe5mK_dx{A{<^l z^esPoMh?RAG*2{!xZCefMN0)kSB#B6pvotB2q;i&O_Zr@X1de+MH6@t9V^@{Zg5Kc zfho}m8Ty0#=2wrlU_(H3NzlD#Dk%Y)pz{8w9GjJa+^aX^#BPJ`qX3csJ}5a*jLFtO z!PV3$xrWbr?~4X2E-51y?BbKy*jRi*!V^$H%=%cY4}*eVW*w7y7B(!-qVma+*8Kc+rsypxCpR7>y-2?E=ecl!bsA zdoxC``a!13m1vu}Zy#Mjrm%y>r0>Z8&`|AwT<@OgNQ{tj-@IGQL2I5!jtM`Q?WToL z6o-d6v?RLCbJKX!*P!1w2!2Hx0q+N?kR%PM>v2Ikp{1pz_0t^RaKn)kaKwx060oj; z4KZyfhWY2e8h?GA#s|K9g+CzIXW^2Xnz6BQ73_=Kd3S(x-)Ppr%7cRWp|IT1nHO1L z1#Sh@WYZcKer3-kEiHPu%1a}}rTQ2eWT0Ck%`R`}m(_;oZl0k8`t6f(zf^0OB`QYA zDJG0D_qmaXeXBxy!hnbODJ-Hx7jpTJJeY5aranKXh!H#8yDz>RqJi{Yt~bz=k&#iN z@_~~D2W*NZUQv)i`~iET<@E7y$-OEy z;+%qq6OVsyN+`IVE5;~(ACbpcW?_DU7f=`&s~fn#f5%<4V_5^a2<#*fH1%ky*ZCJ) z6~YIag#LON`tz{@TZdsOmDlU!-xK&sGrJnu^Wx@_S+4duoi3xiCIyA^J@i2_@58bY zG7L&s-jbwm=Oho>E_vjy*dSFtn{8{mO4();3+CC(GL?^*WK-Uy@{lr1;;X4GjZn22 zpbPQ$3-)ymFn;B!a%wrMQr|laP{v@c*d_GX_x4Pq^1d4$=o9Zxj#*z{Kkj3#_U{kD zV$kz2uwwTvU$5P_AxrvjQ(jSqtW2m_BIBv0b$6eg$g#iL#Co5LyEamfQ~$CDE~4K= zeaY48j_8H0wJe90yof(573X(y*6wItj1e%ZZp!#`i;LsS)z3TUbhf_nNBiNtjRckn zEn~9v{MmnZ5gvX3eSPT>C&CU-7yOSVJdb1D40m2G|LhT|Qs8qU0SnVViHlz%^7EPh zd2Gw)N?zF%>%E-gdswd#pR|hwRWE`^##gMeVfagqB$x_;bh9u%5vJ!%8QfuZz(5O82@un;J7(`RlxTpbNOJ{MRr~A z4Yro(dQHgZk*@MEZ>+pdr$1J-(nUSvIwQTMMio32EF>e?I#~UTFO_y&WS>(6TR>+FN1B;eg`S@B44Bmlg$u6y%-fQrDeH>k6UntTM|Jvl zG6fT42CnJ2(b$5f&g{K!2cj7TFnF`E3Mudd;PX=0g8r_;eX$& z>ABa4e4wbeL`a|a^#C59O+Jq-`-fzXW0pu0mv9AHFrvaTg!A2`~Prj3;8%0rtU4) zZ5kD?+V>fKt+RG@x+6;C?L3+D*&B>IP)ARnQH~25^FCq%0jL4R>Y$cfV*!r&_{ED8 z>m55gJ4kb2-r3~*vmu!A%6MvOw|9P)D5rq)6kWdmZSGgj580ikYrg!sYPCLujNmq&aAq9>)&mt7NpB5LHbzMofWJTyvRB%AYgA6%$m0 zY7q7iziJ@-P$2+^h~}=Q*LW2MxZ;pp!NLL@6An`P^z?L09|0+;!nJGofVm#< zGh&>Qlw|bv^}TlE255ZQj~?BcmoQ*QTNm8i+@Q(hf}{%O?uSRkVCR6;ikdUPHrMUlH@HPaXuuHwv+da< z&HKAl;PuDFF@{<)ma7yDD*t8i`jgN6^#v%P*OZk-wi-|F8n-On6=yJ=i%c(2{b3pR zQN?LQ*;L@o>S6V{#fx^NNN0B?wnGMSU41vFkN)1qT+?%QTPJ=Af;xS{w*XMUY6yHZ zfC`_`m-PWP-eWPc!WIPX2dqdq@yf#viYg!wr_P*dn4C7cdf020W>X^<#4{k~mS~wD ze^;&k@L;bCL?s82@KH`}tCXCabBkk@EcZ(sv$ya3X!6YIaW1*QVEPlDFB0r8@aI|w z#4ab!m&6w&8&6F*5r}DP5Zd@L6E!-?X>&Bjg-PwbjE=_h_V$iWz@E&((RXzf%c$6W zSO#2W#>A$4gC@cI@X?2ql)ia!u~0bRO?O^juTP78vDO=9g!m{|dufR0OR)KQEDq;* zudNR~;q*oGBF*Hs_V(uh1TK9~+NF>?1r7|qR)>1%zqd_eq)Y+l& zg}q(&b4`sua=YlG`4Ch}MAZe2JTnR~MMA(rp8~pH4>)jXB=6#FHEJL!lG#0k|F(9A zAC_KH9`x^{t!ni+JF5F?NzNuaH?B{WR&|%}QxU4SD}?GepST8GcMT|#5u^|_ifmZ{ zsi5}ju*Z7arZqhU#q(OhH+Rc^byJj;Zipdo6&I2*|P3|(mlVRus#-fvO z{!$)$2_Vt4Luqw?&9o`<#3(!@rni!BCTtm>Omvh$enF=MaXb@}wA#FW;@>7Vdt+eV zQIIGiV<=*O^4#{Ll8INrBZB9WUpacJH#bleE%NGXkMqJ)*$Z=+oGB|>6TD}of`ilr z2-p7N9-*M=tg9d?CEnk6p16Lh9r_cT+xCo{|CFsbsaVxw`IyySWrsCUqg&V&MWuS_ zAu^^N4P9A?;|WOd09cQ^Z-Vm|q=IHF?jRy_+Ssr)$LiWOnTQ8U!)s$h?h{{yx&(`Q zGG5xkHK6FgK!aDG4c`+8cqIC(9P(BQYiHVeqtJ=9t!#1lTq@Zy7Bub&j+1NMw`3tT zi5gSc8hy94+S#LCb-x1(OeTN>(5pZb`@g-sstChm_gdX9HdWLc@^Pywi^viKvp^ar zFKN@}hLg55wo0EBVfYEu-pD3()Gg8;$T0kwPoV#PRi z)AgRwO1qI?GPcu>7?_@tt6z^c>M!>>`oxB{Z#hg`#{F3odjt88Caw=^FL@Mi{SSJR z4D|TSbG#`0JUfmU54;Mu8^2EVs>8V3V949?SHqCkploy z#r9c}^-8HA!}lRH70enUA5nlds-HQ>HBdy@j+uei4x7Su)#2R-VinJLADhYZ2;!HC zUjqxs+Syp!&SxRHzdv)DJf&KD@gIN+eXTcF{N?iqI{=NMYbQGws3e`O@I1uQUEWi9 zYjrgApy#CQYr)^38%~J1FbdZW)=xK77Be+E5K;_(r%W7k;#g)FWG+#-IHr;|P{eag z5~k}2L&9Q8r7^}>ny?Y#m43`jo6M%!@R7n z4hsn;btxQNEBy;ukB{sBbd*f>=KlA3ci#|^!}rKzrw;WJdwM4_q_?ds%j2QTZ?h?z zLJ@Ng2A)REs*Ng509+r0X9eijwxjV@r>c38D&a1^wPt^7v%$KNcD`=XKL-VTI4B_h zdkzOAVz=`oQT!vr-1@jww|ajeX{|k7O8EBvfCNKM-Q_A}Ieg#!eUJa$sl``e1;vavpA(2`LSAY$>|`EKA|Sivz=wAX(m|IyWW#B2YBBQ7FXqIkB<)kuB& z(V+MDh`eLo@!W6F{TJ`D9#i1c^?Jdgxz*qOX2ahKNo)!ggbgu%o`x_2R8;ZO=KmBu zi!YXU)y7Ax8rPatuIm2>lU=at>`NzG79Fhlds)8s+Y7uLp8!1g&qv?O{fzD;qTtI9J0z(0OS92x+v4x5{Rx?BzF(qwJdA;Ww95ecK z&z~H^4l|29|H1ef?{b7bW7-@#Q)M2#AXY<`9+zeRnqmIg-)yIlSu|!wbR69zY2oLp z_OqB;YwTh~s=}n?)`qv~E(oo%$IXYklR-6BSQ2~VH()XK{QfOW0T-zRhOv{BZUKX( zn$y^S9Tm`3{&zntUxi(#b~SP!j5>OQ$?9`fYghlrcjlmna~AKTM5NIbIlgj}Ip<$L ztoavh7c|W{dI>%7W)75zNW?*IE;9w}^wn3jye*WcTna#Uk=W(LnMeXiuz`=bX&o~2 zZ?Vwv%pG_W7uIECS)16{Q8V|8*h z&0Mf)W4)N?8lG)s7yU8C5YGB`jyhEIj6u)lT|~Ap>w$*QM~NEDH~obM(>cdWBK#I@ zZ}|^cNTG@#g3#fZnFc6uMVbBC_a;WSB2Y6nBuO8_51ZD6&@_U>2wv0WS~8&D-Ufof z!H^r;+ISC}mc@wZs=yWyH2-!QQjG>6`#{*iJe@fuU-Js=v|IUl;?+B}|MqU68Txm9 zfg5 z!~9Kr-*4aUWF=&V{ELD7$*;Ay)`rmbn+L@I6xgVPx6#b zsAg2@Gp(AM9a!<7QXj4)o00z8yRCv$8cA$cYO0@lV7oG`PW&E?kw|49-rzs7YdKIb z(s4V{1k!alXpIJtD#_QX{O|!Al&zV>_4Rd(mt^MZH!akua3GkNnyTf|(lzIJi|R*C zwBE)0x&@3}N6t=j_T{={tp4>38A)N$rDfTB#_3B=bCLOfL1Wn+mLJVaj=!Y`>?PE^ zY$Oy>n3}ihqN2E``}w;5gv)h}+&wDft&8h<-g0gl*25pU9!%LPBpY6-+vVkZ)AKq7 z$W1UJL6b&JafrtD&I>_^r{OUUQ7W&D?KtA$(QMPt1YtK&TRwdJ$jZstSf*@iD+sAe z+r@q%hY5THv^jiY;w#|eg@lH-wY*PC0zo~{-yb_vR29sAVq7fnk@*Hu-|gkgm#3tq zM?JhCt8~@Uk{6Pn&tAMR)a~o;rW6tqf}msl*GSH`mV^C$a%$=)z!y;WD0(PO1rlSB zYN>yBIk2^5{#!~@(=}i(8pcyUQOx~oYpW4ViBtZ9rhmS`!_Ti^Z=X{v)85IcZJp9Q zAf||d@WXb2+F#{Q#NIei5c;rQqP26`k~NMOBi0jbTvWYmnF7ze?qwuMw>X#r+H+M; z@#5)cs7(hxPI>*;PiG>(?pq~6V4^uVwO0YDt*NoyT-02@{$Z05LlV{soCV%Ns}&m! zt!-xk|4%Reu)OPPYA8pDmv9mJ`{~Y}two6hEJ&z;TKF)^BouS)0;GGKBs`g-!%Zus z{FClb0_e#ymNVU^Nsi66tCQ|=-ubcl)$9TrD{J-tlQ@7uSzkzm#7KKygltq0B&`_X z6*Jb3X+x7R^{Xg@!vH^xLI6p!N6u~8j`qA-+r7&{CkAL`D(`QTXJuyEykhHs2qNgk znQCh(!gBZ^i-Vxp=P}Cg>hD5Q>?ZOxg$Tqs&uiii7#DG3JbS2Pgv+FUU#$O;` z&~Lyjv?UzT$~9@BaToOs3cBzq3k6n@L77wEFc7~aOjMV{|BdQjLGuBq0CrpW#kqAM zco?w+Rs3@0>dWeSzV3l}e#(utG1En8hmyZNJ~#Juo#!7!T8!eDFQcA>fMikSOKazm z;m-HvpCcl&yL9Sbllf!93jSpQvv+S<)|G7#Y%hbwHXGn#>B~V`%33v~KO6O&Z*@I3 zQZ_Dt98+9eTwh9$`frhRAou!BnTUm`Bi|KL;~J%u@!&3|wL`1&hid!c?jjr2mU&N5 zD6U4$sgf}LpRV2h29ONP(zSXLn`POD))c7XMZ5L8_P(EmcSU0E10$kV1#MSSc;Cfk z>$G7pdp#JIH(N0g9j1{Jihs*qCvVs4qm7B*%3F*1LHa;oUM}rKhPt55UCx^ohsd_!DYXx3}6Fxff+pa*eymVO{>F z0?&<_&+^o>FOZH$f9OVhy6$UD)H9UUbVLSv+{ciq6ep-N$7cmwsQN!phEXcEoU_B= zH^b1X;x}_th}%k}`w}-;d&~p6pTV#H!;jd`pO5?xgAjixlFGBXxzZ>&7a!jwruVd< zEmr@)^T5tRW+;AT_S3)x+z0RZB=G>*#*Gft+>!h(FIm9&6&HEBK=W8$)X_HrPX_m^ zuw4fy8(T`jUY(|7))`XGo%&VV?uh|Ap_pNbr;!v*AOEZgj2k@j?)-O6ZlJF{6ZPKJ zO7+}bj4Qa0i~!2~q#Cy6Jdn5{vRMT?3DUc7E)erP?$Wo|WS$feR@~x<4?{^|gTs_W zZETZdxy}g{iG{VcKWb_B< zKKAo_*S+@X@Z;ZrMeA#F>j%R*0d9@rA6)*(#e!e;&9Uxp?>~ze8T|0dGB0QV#T9xJOA)b3`p0Yl%Jh$r{y2sdyr~SzxA3Xb?JG0r`V0I^aI)&ZU zbiodho$d7k#DX`7>n8e8A{B64I6M~$b-$JUE(S2L9L)35n zDK!LUUF`o|mEwT*nZ~-CPjw4O_Ku_@l699|j|y~MA6n*Jrt<#t>mgMiNiGKiDlz}L zGs1U8K)m4Hl_p?Wk|aQWb|s&TQd#l^%4WHHktBEz{G2Wt^gSE9d=`I#g1Hmj>+nou z!FB9FuuFCK-g{jM4?@q%K+n;pss89LwhkBfWuHQfOy&6i117EhQq3#MVgLFI|Jjj( z=Hz~}nwk|-gN+PT#7Ei+D+@(w59iH89}MDbflc$C@BiR0Xd655?qQ-VJusK>EmzeS zgC(K8RIVcNZC1TyQMd=%jC=745B`q~U_Q33&(3?QeyVEG5k!hUa3?^>wQ3i{G=UVw7D5BPbAYQHElt1{FF?n3-hV^ zV;p~#XX4W3=GR1>+*ONd8C72n8$V(pxbirY8?lBI zKc~7hl&l#drf6xGo3t|&n2^D&$QVZ*TugB!OvD|S8 z9ZyzW{n0Ho?hh$AgRnPCfL^{?WTP_?U# zm3a0>+aDDZHkZAMhH%=q*zJw^rHHn{&aDTftPAF(AmKg%b4;>^hHR*y4^uVtESZ0E zgwfHq&eMOryEuqVJ-I`@-pf)&MxcrKP{eB)eyz(e>#fTt+p({F4jP8m-hKgIG(RHn zDn34phEHo8OyEWs0d&I#`l$KY#*PbMZe{2p=*G)) z+WAs6)@07jzz1w26y9dVcL*_HATYGzZ2k3zsv0#6Xgp3${SQU`LMb<1ITUU*?|mdU zSR9H^v|1Cr^}F$iUjxwSDGrYMvJALIfV7{*#)d|6 zs=SHb+u32|;URqe`gK!l>ji;A^+XtKyRD@a0%JF?qkBH8Jb(Tih7Yov8oKt3)0U=T zZU*A~viJt=f4*=R(tlw4J9x*vQ`s0${#yLe)l*%%ohqvOeu}P3p!JQlT{56D@hgr7-QAt{cBktH-Y-`)T|N!pU} z1^VY;+q5?%cs94R_(8TpF%j$MH=MWRm~iH0DqubnMn!RM-@YBa;hJ#e#+CL(O9O6Z z%|ez+W@#*>C_@{c=Kk2piUjadPu@OQT_39F^lgn3r-bAS_q|MA^jaI6n6REbO9XlK zJeO|%SaMA8#cU_sbpMcS^^ONBU}$<7F@Q0E%WQp-x1@q7N-6wFW=d&`DO4JW1?1I# zB&sSLbt9VXNJNKMC>R+n4-1`_wA@9Pa$s3rzNAX%fA~i%&v+A-O88}eiR(<|9%s(4 zP>Pe&VKxs)tG-uQwMM4u2*(Z{JFwb7y74QYkW(C-odq*ghXEkj568_@RrEQK18gjT1L75lD_WX>2F9$Mm27E;)boO8YTJr;?{sSMOsxt}i+q z43Rup`+ZcrLvfgD$ZbB4)tNRa@}6(%e-j6^twtx{=;+1CU%3*#c=M9bj0bZBQ=>fw z66yBDu)3&y^<4jvZu%9H;KVV0X@a`f0FxslYuD-1kDt7KAa{qI1MmG8K&xVmPNVtCwCkze}vkY zlGJUCT(&r11Y*I5s%3w0a+ADqyg&n1Vxz%Ua4zEW*>h<8b#~XP<1DKln>sqm-GpN) zZjVdmnayHKq&P7(5dbi{y2zS!)0&22fPhGr^BorrY?nB@d0d=T^_@=q8@*XGn}V$0DXrCF`3a^fX0_?o-p(~AM!}~U5$dhjpD;HK6|?}MFXaaYvP?(k z+nBk8@5fNNK>3^lMM}!!0K_DBe_&8iMZfzTEfIBdF(B=|HYlN2KHUBTXFvn_hs`;> zw7k)LQ{%Vi3F*N{Cy>tfO6O6p=G~zZ{h}}v2ZQ^clPX43#EjZ{i`>E~u(|rUn`4>rPZ!~w7rptK6Na5FBEHKLxF%xFs z`|~s?wS4X;7UhX?US=q;RuktImHtfUX3TjuLTkxs#VgS$dw7nueTJV&#UtM5SfL>ts zFFe)GX8M{UaH^>fD{*9i7F?cp!IK@M17;`x{Qm!^ec*1Qs&OYEMf9}cL4_ky_4bo& znFu${r3&V-!SMOxri{W)RP{8koA}~^+G^n@v~np3nO-->7O6R2JT5?;ref4z$T<%N z%~)iBDcD6X&QYiDOQPE3?_m5amu|3J%pvTIN>(#7_cRltqXXg35_wH)kQarNp2OdW zU-+H~Y>(jHx@)U;q7VF<%M#cVfdhUQ?(f^77y*5p3qqJNZ{83_e!Xk*==ybnZ{6KX zootP&c9B-8$5pquq3Mj!f#ieq(ZKJ+Y(Jd{nHdH!&g05IJbWgA!rJ4Sn}IV9BN!Hk zM`*+oB8S39mgin)Jz9T&ey9N{pMxX_I4}|(f6-|YLJY@3d*3rO3p1C>5i_|F6D*XR2{Uqw^1q$HHjRV7wk zK;8(i{hTXr+jka3SIR>hS#7S60D%<)Q|2x_@T8p{C^>S>uPx_I1*<|CmKmKAP@ovA3NoO?t|F)ziD6rYLHdG*SHlM~outrb)5>F~$hVXK7y}vn194}0>uZ7Y_SKpqh*2r7(HhD9 z@rAjfK#%mlx769J0KxV_KTH7T1JU^4?ezUuZZun&!Ix)t4C#g)T{lbUZq8B8PyuV4 z*12#S;nyx`oit^|hPScyl`~QQ`={H^52-0`FF}4uyOO2WBK8HEU&1Oswxp;Q-^|ib zJl`}oJOR3ue|-m3kV}^?(MfxV&9ueh0nCud|Kg@~F*xs!kHTnGgnf+B^V`m#4Ia`? z1!qiQXI4h(J0ezyZOD{7Dy_+n`EguSPS>~D!g8}o0jf+a4R4UW^acA5ls?KI3lBYK zVr3#`Cw+zO#8hu#s<<%G9HyKU|2XhK>Nrpp#bhy!Z})c%MvYSF0A zH5E;D({n{7j+^V!T_P3_35!!_HtfUxy9ZoIOLOxoB*M|uq1jLY-(Z1ND1@XZVd(C0 zml^DDP*7_-71dM$ne)Vk-ivpH`i331EMtTb^}O#hpLwoa={1`qUxi_V2uOTSp`QU# z>2-i05-!VixgFA>Falh*(W~uj%}PO$bznvc7Ft9c5r>&!=hV(xyxrx9TkfKvK|gcQ zgI?|?EPb@^5PGr_LizGAM4x=~^|!Y;#KC!^I5{{BApG3a-fsP@ z4_<`Nx^zhd&6UFhzW_|hgg7nqPCzOLrc~v_BKih-*&@GUm+RWBJTNhS`Pk<<(~c1bkQ?T*#DONoM8A6ZavVmmt6*r7 ze$hz}rsyCUjRl$TtKdP5dhJ=k;5#;&Yw;_8a2cu)ObOC|niUQ_Cey6gA}q~ypF|5# zOtdQU6D{7n74Je@X#eOeD6&&x#0(D&t%Wm5K~EoIJyL>!Y_5z&fUTtC)T3535x~&8 zkfB#OdL|Q&O9;fh4I$$)V*;lc9sY#joOEdz7pP0R*4EL1ck3`L+3;Ru5*GwCVcgP? zv+o}=r+uS<5rMpE5SqaCDE+puK`tk&rna6FBBHzet6@1Zzo74<%70Ty@=!9+&)Yx= z&E3AG)Bdt@ZqmxrRBS7TCnr_nYYXwtVIV@)UJ!*9&9j-Cz2%u|?%ovD_uU!dn&5fB z+yM+POUR?5w%zqI~YVbwDREh9Ui<#-^D&uqS zU~rPA1%l1N=tP)jD-6D6>sLSKX#uW-uq6I+@csWfY$nsR;Uo(_IGV2`p)FjX+24E*sYM41r`COiX}sV61yDT3TK~fekz?HU>!DK}f5z z5}l>SVD%S=FLZw#x{mLQ&aOd7X4}(6vTOv~k&rx5EhK_W_sU?gcVUV!JG+z+UoR2u zxd<%2{4UO_!y#^Q@qv9tloeY2F`6v1<6*p_bpVxXHvdG>-iYCKGM_tl4*f77$S>7O zuG1mR?u$bEhkGmiFj4xdQG)@47(PU3AlI~Qmb~8q8+t)FyUz=XDlmpndk@=otiqF2 z3ez<^kJu#Dr|?pQ>TF0h+;>QsCh3(TUxs4C1rB!^qSH)^WZe9P<S=@^epPMl`{rLEOvUHo+jr+) zMVIP0=PZ6~Q945jt?)~M{B5eV4u|3vhEf$L^Y{5bdQ7H1d8S(L{3sZKe;$g2^>DIc zQ^|kM+3%PtZfYtGVsP`u!HB#~<$(~sADnRm7~~h0P5?!Jnm_8z_*}+$S{H>#!g5eu_O_Hyrn$t|yhw}~ zubnIJ63#oRJVP^?5wSHXcm3qIb$(2Q*!rS}+f_Z3quyoBdZ=Y1Sw3;9cTrj`#rX-2 zD`bMOri`@TTfG&<&9fjCm69P2(jkzkT$03eg>dHj5qvrnK70zrOd>2A4A5PL5v7u9 z-6NQAsevh_aERfeF;_6jxnXQd$aD8Mu6tVm8#f?QDFmeFs;M6OC zT8C84PvsEZ5cK#;0}^a7t&%gkP=}3R;4e&`* z1ey%qw~pEmg!|6h#Mbjn0ZM_&Q*aQrIp&@|k`j%73_KsieWkV*U zB%~1k1uRkO2!QoVj3*srP+`cHor|k!Ujo9j(9|w$E1!d58{G2VQ_sr@Sf8mI`ya`$ zJHPrESL{=Br>BhhmUPlqbH~-y_LauAh4SJbF+H>VsyGe&M_7np-evO9ijSNbUY-`O zHomp%e#wsKFiG9Xva#AA+-gL9%J6D0{8f=t8}&khO=SwL8XCtSP1&8h<@+9*t)}p^+JSuVARp!K!dGIXJ~v~Z&a(u<586+RP8+A)Pw{t53y9`T(t9k8LtJjMpc z^oOW_Y;0`O6o%tq2pmc*+SVbxo3GlE+NgQO*g}S-wPt;c^PPi*j8300_Q9a=)jJO` z686tv$RV8egcwLH3-jaKiiPjK8+-m99#6jfYyJH*_jlM*iJ3cYTmobL)DfOeZyPq| z6>lRJ-gb~9tM{$!z(Pi27(;g5>h<3T3gpRPPKU@`23tZv;d*qGqw25rpIl^?Z%od;1twUx#ZR z4w-@33Rc3y9u@?wRQvS4^z^G&owK&Z&wSv9(Ua=2As^6Ng1*x>qW6K0;=c5Pw8)p& z#oOJO6)AsvHiJpQ6tX8pMMYuGslW8(blvEX8zWqN0}Q3rsyAZm6IWk@2qqzONOU!a zaJA*~2up2zV$JkC2u zJh-0fdP!N7T#2pMmZ7pVjCr)!__5AJkFO%AABiKzW!?@vuRnA+VTrl)D`zY*Cj0h_ z_Z&wOr>8W@TdrQlG*7=%${MWC&zk;V;XO)sE3u-5f_R9r$4_{Z)bTS;ckd2OLWPa! zTpZv+-^;_Zi(UCu5p?oL8rZC;+AgJ8?u(_!oHlk+^J9l@Ovw%zno20uwM~iamtY9; zK+83$)mJw=Yd)ej0p`02I9hroXf*1inq2rQcwQre9;P*^ETW8pv~)2SjY;7|-~Gnb zlzXVZ3yuPV+ok1B9!}DbcRd^w;2J(VUx4jOHy$4|oOS@gX8|0^0K?JQYbYsCK}F>O zUd}#*i@6ZZ;2m`cY+BaYQ*5@3)uoB}j_sIhZSC|RVvSGpJrZ)PasFsi(-#wojgEb> z?rK5pvD-o8_Q3kt#!dWT*|QrkTtBb}4=!3Z%ztc|l5FzI)otnbw8+p}9Hxrg7jqHI zB_eo$4s+B{f@)Eu14OH2#j^QmTYNo}IWFk*W*~k%Zl!07W>p8a7TupBN=vWgVv(P_ zRih{jl{4I-GiG;`Ug#Sonit!|MDA{TWPLQo_^3z+hapxporQ8U&*a1ee!3Wb$tS^z zF!t_TTsRSDm`LO1pFTGxzR5T2u!q~xt`8%ZGF?*WtMa$QH7!jYA+t9+f80^+smy|` zv(P3fl2fEt!;2l`MA!!x8q1%EnsP2MWig+}-Nr9`OIA-@s9<#YNWzN|;o+mJULFU8 zSyJ)CfqrN)U7g69Z?`<1@Om*kjh}EdM>|SB=3`~$TY!SxL%vwU4GRkkx|3fj?3d>T zQAM@DS94O4fo}&nc~e7X0_xh{@AB1o`;lmfrs-ALbh38#1~}6{bdJGHN!g^$c9aWW zAjZsdbb2rLrm^{*2v@kPf)%}m)}jR(Q49ti!KFaGjahrRO;x>1Grq+uST46WePIaA zOMCxSNPBFFzRho7?^K@irqQ_cCT0y4MvXa8rJRxGO58)Gl*^^~R%qkZi;5J=a-%n& zCj2169GE6|my1QC)aClbp0UbANp+VC*NN7a;1P8UsP+&QT{R2vhPTBrn?~7puyx2cCDKTnXnd;%G>%vh*W)#xO4XD!ujCVAV@rWh~eHb*Y zd8?WHHpgkcCoXxH<(C^L{Rb0X?r+=VxB*jkV|!Up629}&s8x014H}<`a4W@CWrArc zPX7u(kVv5@0K;xim|bTvATW<2t33G51suO0f(}xE%RcS> zP4j|mdi}JziyuY9yXm+@` zSoJ7p*pY&WT9;)vBbRbZTNKFX%~|R0OCeo12GeEU<9rFY46FF7K%bso|jlHF#8H^WBEfeN*}d zI&XuEWn-@_6-?H)|1iVpyM{w~b5AEat`)ai>uhX&qfw#dO5<&%XKKrVk@jV{hp-mF z8Msw^#X-gwOl2ZV-|G>bb+jQT2lmSN%O&pu$PN8zm8m^7x;h*gs=Y@&z`bTar-Bvq z&BpCO{@Bho+o%uY@y3}Wiv?fD&dAT53zw5m8CPs{5>-*1?Kqhy#H~EPP*QmaL&mAw zQFbiS#o9zHSVMm<3YU9D@BBFvp&N9iQoC+BbL`MwaLnne7hc#0Pp(`T$EhED%R}us zhdG}F9i6=|?qwrg_Bv<87G{(1J9yl_eKNLwB#dSBmjwu2wQdCOO;F)NL_Zjp4>hN< zg4+1(*%F|>+=I7>C|ffoz50Uw8Xnx0na?7#w%P6QeV11o7OyD=ZWWP^@5<5zY@r-S z{I!ykPQf{ipfU`a94j{Z+(E%rn4wi3i+xv2!kYN$8%M(1e0m#ywj!5<)jfCk?aq8& zf@9NM9c!SFit7Dg=COyOZ#1qcj3ghQ=(Ca{qpxf!iT}L8d1`=ZcgOzSn4{SjLX4Q1 z;YoCkxNMhC{)0TD%ERaWk`&+8@egbIFDSi7)NcXxO$TaS@Dh~)?BuBHqWxv;(zHFi z8A@QH0BeKw$dQlXvUx{92xl&;G8&dx{sQO!RB@X_KmX;adFAu=PMdeuSNDM{_Mt<; zqDb!PJ#%LSHQn?>6l(giWkDbsJeS&Zz}V#F@$BdZ#Rc43HJ(|0+~AIQ-t#--jzD4Q zeSq4(u)EPfQO=g%XTKP8YEE9si@_aQQVPZ|;8bH#e>kHcD1$dKM6GjwU$ukEQ!gC%UTyIz}jb+S}1RYD~lY(WkFypZHR+nC6Wx`BaR($fts z`(6drF@l{fT@^nuz4>UIodb)>>r36#0l^lBSPMmE`75E6Nb1WT>MUa}Pvh!_0x8SM zLimZM)YX=$Iqe$$eMLGOQ$*|7G(VIE2T_Q zar21sZR}FrBkM!@z3g6S30o!Y^1)4_Opfi&`T{wx%~~U4Nda}-_->5k3JEv`q3KfI zS@rTUxUpPQ@)~4pI0MwhLW8Yxiga|OZhv4^Ps7b=W?F52^XWdxMf}rb{``f5F*By_ zjkzBTBeV{2zWr92*Z4VUmtif(>+AV##F!X5a#n@fF{8@X zFZVtm?HyQeZkX82+0DP)+h%*3twW-2ZMjt(3j@RpQ94N1Lw6D7l7@W$9ko=o-qC^+_^P?wI4kM;p8r zUF_uDfsWJrCT2KJJj}YH=l?V}zogP!GYhQPXzG4Y%I=jy2%UR;{23XUo9do2qMr!< ziAu1nfS3Jc>$9h-U%vo%Rj;Bu@Ad}9PQNKWvW+V9I(XZ7ecZO_Jww zqP<)Dtz;IXW@+M60rq6Jb@xd4hUUA<;E(e29l7fzHAfmb;^;JZKfY7u)Vc889Ti_~ zCgOSEq){LS9%aAbx}3wrdE0}qn%<3FDt6>oV!LBN2TX+z;ib4>e#Rq2t{B=i&onR2 z^|W5Z9{^Cp5f6%?nh&%cCBd4?Z4C0IRV{JrUp=75=g{bgxP{Ogg z?Ya7S;{n;{{*^a zAIRD7Dyrxz8V%#G*{|eT1x|-xRgUg#Hkjt7W@2tkEt6sLA0L>26F8{INuI?E1X)CW z_{alWi-J+s=H&EUDni)@}}>6+iYr@rp%q_XUZhjZ07n@+TUCyl#7 zmhpbCIYV`l);X6f{_W5)KE3eez%<_SM%_Fxh$#o*RA3|Ct-U91QOR0$XNh!hm4+|47d+5SqR z%*nT+FbA#ij6G9)LmXSh^U1%NU$Uuti`6-u(sRu42eDh~$9b88lcG#WOg&jfPg!#& zP*tAb9rl@2_r3G8LM3Q#wwX)xlXMqSUpY-J?pTqTFvEcln+1)~cYnE44TtYHT8}U> zSW1Sq9*y^!ZhJm_W9to}7qxI^10UaAKR?fk=N)Y=Eo7_5OTuDFxE3wrdMllM|&eqzsotx>SpFx`t`--)~1O715lYu1vbfYN4o5# zS$g21{peFSl=-}3DL23E@LlJo<)$CK0#~Eq968ZuRlzSt=b%TJIhze<2l#A{<1f!q zSlHOnsewwsAgR+%EqN3-?5nYmL4wJ{I++J4%J*wkfX!8pS!84T-4D_jd?IFvZs^q8 zrWkEi_Ix&4Ju^H5pVrZ&uP?5`xiZNvM0F9C#p|1;G4%4z>P%=`72|Jku*k@NZ_saz zVfS5b8c=r%jIYuZ8Ib|;fsjxzPxZQI1L=9%x7vP*QAoiq^TOwU-~V)8K)PF{9f{H< z5zd?fuem}MsZHG<-eb8Mt;QdGHLo@>Ck(n(tBM*?rM)7}SlJkD4XPKaltssrdpTMB zafM3tjv;ugQ5Rpc7@qF@Xxk=ZgUk1Ld6M3Nhq))5Wi&q2_$6p##RXESZ-aRWDY|3d zgfdau$m^>8WJL`PitU!=jPRdsfq}HZuk-}%Z8R{~JF8uwblc^q)%iYXg3PH!RG;OO~dNe#!P=dz?BL5uxSoz zgB04A_v~>aSZ={cc^jm8coEMnD}6FC=Mw_)*`?dqkp*$1;&j(%zy}D0jim)9#2ZbN z{gNMVIRBAb#S4B&?+)x{csw|Qja}FG!br{9)fdkjh3MR7)eP!~53Y`5AL zZniZ|jzNmXt-v(os)`X4W^ezHyhwA0TVWfor@7a~*G2< zBbKM$(p2>@j6jJNh1<1OhsysKg2^P%0^p#{%E7T6oL>X|Aa{crv^Z6_{K)+tu!wBj zvirCWza(4+B>b(mS@~Sb=8pw5j@??udHGT^Qu^6QA*rW`V ziP8Rxj!f#a4n>6(g?78{d3mMp?k5iI(}?B(-f$ICSXWXOf&CRtnXRktxPGYMKd|Flb z2!IX(-}V_6ziba&abss!py=7#H#k3~-I3WE<{qm+HN`uflbI^A*#5juVb*a^eCTod zj-02Z)u1pjGcz0DSJFqkwc#17z)G_&m`v**#I!~EGzF*x`b5vf<@5)(*#dDjp+&$7 z=_0nRO_Y4{rdMSvMN`Lv_37S+d1!}hp|L|~Mu)FSqp&;tq z0hn*j4&b@*0}pNZqZH2NSsPyfc^`6O^}Hg!%&4$rywp{6S6!@-f*1luAgF@ZkL}X0 z#eY>lc-d;(4Z!cB=Zep~)4QhTBJjXN@Qq5jk(0vLk8>+B2Ftm+Qg!Ag#?Cbv<8!fl zdn2;x5&~4>%-${grvKOfw^Cc4*R&pfd(4=7&&eq^Zfd-;k4+H1Z-NDIa3c~YAR%vnY>anL3%*;C=PV09fz)EWqB;mgk zCkO?UEdfQc?shUKQQ4uMVPu{j^u!)PHnW~oZ9_Zi zF*+ZbmgZcbtL-NfXW0ls%{xHe-wfbs;DH(?GG*FN=p0c^iTp4Lps=Fy^352E|2H_o zY5yte+@CZx4QJI-C=MbcF~G+euz{O`a@Q_*(0GQ5IMn5}!)^!_1)8Zr3Wq~>{yY^u zy%#7Dfl&HB?-i%ZeQ$3Ga9v5R;K>d`w=h(|lK2$q+8YKu{yzr~hAg1>+P_*oD7VE~ z@%?FP5`rwU%V7#(8=zf!8Bxa@lO8{L;t_#e-h%=fT=7eGL(`5xkA{^xs>~Xb=!3a) z`!=FA4_K3q2omq6r7fovuORJyL13o|5@)UK`@>M^?E%a#e388p>Vq|_QdPu$s9jC| zECI&r1~Ed7q}>3A_6GT^Qm&ky-akl)HqxHG3-X$13UQ4Iv}gwD3<)sG%($K&`F`%A z!sreD!#A2PB}+<4KS!U)k<3XiJ*r?+$(1<@ ze01e>%P$1sNpx_RH}St7?GaHEs9KPw(Fsfua=! zN<^7=6`=T$#wh|Z>tSEdVCG{VOp`KI{PL8}<|MkDV7wrQ|N^s9TT4yU>T}pvou=a9ms|$g% zFgRHLmv!_f|472uGop)u=my;>QV?*g_kOQ#i+?fk{I-Mr?M30)`<<7xQVIO~Me|C3 z?*RO1lKIshSVbePPqqAIYRQt}`m-MzXp7O`8{}+VS%IZj9l>yB6=coI38|#Q)Q^1hMG9s7^l@a5t0F@>GFg6^grlzOU61Q1y=Rf55-4 z7}q4$tOx6^tozS5ymC9)vu8Qh)FtY%ai##D9S9yHtu%?hQFmisz~pw$lnd@A1hQDK z=wE$bguWD*kwP{1|JJ!cg z)nwP~!d_vQ!8R=aq*BXu*yM1F6-L1R+uj`8QKN~`v&o-NlM*Q@RaLUJ+7kr2O|4pi zqFY$vSLr&Jl&FEo52*toNF)xI*4R{t77ubwGeajj|2f0kf*i3`vUrfA`ma!H0U^ zvVjs6jz}<|;Z8<5gy6~rXBGl84)lZa`z|o{L1F;zm8#0o##pEq;Ij0BIdwqaW>^6> zAYg%mMxNl@om*JwgSb+PQ$YlowvjM3EiEeu!rPev0*a`v2vQ1AQ~z|!0MQm5;r;*o z6O_)qdz?jV`Xb~VFSbv+}Q%e*YxLL3x=ja2J#0(XcPR^i&2Q8 z0FYYnZXysWtyzN=S>3>PL%MZqyk$NJe88!_0jBfZv~KXtkU9aF`asc7@RzXdDg~&F zy#L?dVeJKBcJ2fXI6Smt5GCGh0u`vJq%g-8dYXWRDBYPXxo-LRhL_E)GKURK1FwkY7ay zr^x2l*LQ);WTfMj5SaS^PkXV(3W|M&G>KMk0%Q|h2_nfQ|2`$#(xh)ScE@Houm)DR!f7<}o7&Uhpc7TDBc<4A_RNlYuUI*=1JK(K& zizVUi|F2&ZF?0T_7q$0dTsx<&?Y3a96x!0Mr}1kySglo7_rkKO&g~X@)Ll9-y&P@d z362wp8ze&!k!XdrK7M=sAOgHzakK5V{k3Ok+@>MS5`!UycuVVZ&;}T?qNbvXgaQ)8 zgqnzA1P6XF8AJWm>Z0xhaD|eRk_Q0+18F?LdaDQ6l`|Az9tgJ$#m=3?VV`)b@AQ#g zIe}c8cL0L8hX!2GBgJ?>5(#vdF1}*1MtyQvi_=1`WJyX!s@`OM6AyIXaYBYfh+aTxj5B7zny)Tr?}TmSHi*3EXGXnXFKWw`u|)@* z?_(YGDATz%iloes*$-G~0DKAl-s{B$6cUu8+9_M>0h!V5R+Su(CoG75Ho(kK_Q{M{ zKqWwgo}X25aDJ-OAU4@Lia0tA#T*2A@LjK?lukl$%|3SaO1Z>3CM6}5E(CCD478=+ zzH~7F6P*5NOLBRPIMZ*%3;2?$(o;j(fKnq(c%d@ItPD18>I6D*B}jNI@|}sueQMg4$n^vtP_wbdEz{KZ6CeUu^s3zEkfvX!jdc+tMXI6Ov0?NTu0*Ro!db>tr-n5{6 zD@evR^mme<`PxXv-{qWM@4}vF%1I5zu%oD13=whT5aqKWQ_cyRea}AQP>PXPKUguP zpot7Za?_{actjD4CLKA{5IzpS;(?KlTv|v_KIzAF;$tObcf2v{fT$Q{;Kpop%!0WS20 zhNnnT;?XR85D>XY1FK^*JXFZ(49`!oP75fT&?6@TX;PdTAP)ldeEen3Lr_j#7xQ93 ztxSVX1K*kYw5z+iyZe+?j@Dui?P%)|r{1_GTwy^!Vse>^IYVO*nI@n}ET zQu*+o8l?(Ql6t%A!srlm4^S{6qy|SpViveXGwtd}jNrad1+OgNTp(#zdrI&jc;s&f z1_p*{7;3}1sPyUf_n8?fcw?H&3>BN1ohg9cQXVDgg;1s=o%!@|6_d^}U9F*mHUBFl zS=|f?p*PE)R=%8PPl*aTh{*k5%=J(Sr7J;rjpg^DTDIFM6eV(rX?`JQIa?t4)*kHW zU|PEk)#oClm?<5&lNF2}4-A^!c=f~m5I=EzgOw)uOv_Nr7=-?yEMC~`iXht85;C*f za<6kO^cI1X<*-mNGB+uQpc zg#2D#UV=pep54jG$&ZyVhXdUJ$X(IxaXvzn5vVYpQHS7}|6oi(d7T8R9;+?PF5fBh z^7A1%n=@~5vJz3`Ay!K-^;11$v7rTF;RhJ9_`AG;kivTSaGY{=S0NUQ!oZ-mUum5k zU~>5K@XpXyO8NX$fHwrzfCUq6jIj2gw4xeNZklxFwdjNaRJ8Y#@c2838@o9e?u7E^ zO28d$JiWNdoe13&QHQ_)NezkeB+SeafBZzR2zPgPlm+dj4`TU*No!MSAc~>;7~Tbn z;}e7M2qh)sCHt_X#p)LnC3zpO9dC`Xl#uZ8hHF$Sw!Ao1%_e5|wP6berb-(zh;M`! z1%9fIi+eHVaYaLl50EE_P+5Ri%`{^ao$cNLhe2J4V`|QK;y0)YCc&T#52$X?B>*NT z2}D|)&~lkp@+!|XZKI*(yj%nZB{X~lGnLZ~_q#8p5cO;sT)f22U{^p%ilX4zgG;@s zwzd}YVFybx{3`_XNx`-{0a-Io0Pm#eLI3~& diff --git a/2.4/_sources/generated/exportdb/index.rst.txt b/2.4/_sources/generated/exportdb/index.rst.txt index 9620cfdaad..b4fa23a697 100644 --- a/2.4/_sources/generated/exportdb/index.rst.txt +++ b/2.4/_sources/generated/exportdb/index.rst.txt @@ -473,7 +473,7 @@ cond_closed_over_variable .. note:: - Tags: :doc:`python.closure `, :doc:`torch.cond ` + Tags: :doc:`torch.cond `, :doc:`python.closure ` Support Level: SUPPORTED @@ -675,7 +675,7 @@ constrain_as_size_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -739,7 +739,7 @@ constrain_as_value_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -1046,7 +1046,7 @@ dynamic_shape_map .. note:: - Tags: :doc:`torch.map `, :doc:`torch.dynamic-shape ` + Tags: :doc:`torch.dynamic-shape `, :doc:`torch.map ` Support Level: SUPPORTED @@ -1900,7 +1900,7 @@ dynamic_shape_round .. note:: - Tags: :doc:`python.builtin `, :doc:`torch.dynamic-shape ` + Tags: :doc:`torch.dynamic-shape `, :doc:`python.builtin ` Support Level: NOT_SUPPORTED_YET @@ -2044,6 +2044,6 @@ Result: .. code-block:: - Unsupported: torch.* op returned non-Tensor int call_function + Unsupported: torch.* op returned non-Tensor int call_function diff --git a/2.4/_sources/generated/exportdb/python.builtin.rst.txt b/2.4/_sources/generated/exportdb/python.builtin.rst.txt index 413152bb43..8f8952f216 100644 --- a/2.4/_sources/generated/exportdb/python.builtin.rst.txt +++ b/2.4/_sources/generated/exportdb/python.builtin.rst.txt @@ -5,7 +5,7 @@ dynamic_shape_round .. note:: - Tags: :doc:`python.builtin `, :doc:`torch.dynamic-shape ` + Tags: :doc:`torch.dynamic-shape `, :doc:`python.builtin ` Support Level: NOT_SUPPORTED_YET diff --git a/2.4/_sources/generated/exportdb/python.closure.rst.txt b/2.4/_sources/generated/exportdb/python.closure.rst.txt index e09f9b7b3a..d0fa8bf94b 100644 --- a/2.4/_sources/generated/exportdb/python.closure.rst.txt +++ b/2.4/_sources/generated/exportdb/python.closure.rst.txt @@ -5,7 +5,7 @@ cond_closed_over_variable .. note:: - Tags: :doc:`python.closure `, :doc:`torch.cond ` + Tags: :doc:`torch.cond `, :doc:`python.closure ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.cond.rst.txt b/2.4/_sources/generated/exportdb/torch.cond.rst.txt index 2588b6d896..f181581d14 100644 --- a/2.4/_sources/generated/exportdb/torch.cond.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.cond.rst.txt @@ -273,7 +273,7 @@ cond_closed_over_variable .. note:: - Tags: :doc:`python.closure `, :doc:`torch.cond ` + Tags: :doc:`torch.cond `, :doc:`python.closure ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt b/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt index 39a3dbbb92..0dfbca618d 100644 --- a/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.dynamic-shape.rst.txt @@ -505,7 +505,7 @@ dynamic_shape_map .. note:: - Tags: :doc:`torch.map `, :doc:`torch.dynamic-shape ` + Tags: :doc:`torch.dynamic-shape `, :doc:`torch.map ` Support Level: SUPPORTED @@ -561,7 +561,7 @@ dynamic_shape_round .. note:: - Tags: :doc:`python.builtin `, :doc:`torch.dynamic-shape ` + Tags: :doc:`torch.dynamic-shape `, :doc:`python.builtin ` Support Level: NOT_SUPPORTED_YET diff --git a/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt b/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt index c05ca55dc2..2e8ca3dd8c 100644 --- a/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.dynamic-value.rst.txt @@ -5,7 +5,7 @@ constrain_as_size_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -69,7 +69,7 @@ constrain_as_value_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt b/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt index 8db8d539b9..13733bb1b1 100644 --- a/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.escape-hatch.rst.txt @@ -55,7 +55,7 @@ constrain_as_size_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED @@ -119,7 +119,7 @@ constrain_as_value_example .. note:: - Tags: :doc:`torch.escape-hatch `, :doc:`torch.dynamic-value ` + Tags: :doc:`torch.dynamic-value `, :doc:`torch.escape-hatch ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.map.rst.txt b/2.4/_sources/generated/exportdb/torch.map.rst.txt index b235a5bb5e..e81e92a4f8 100644 --- a/2.4/_sources/generated/exportdb/torch.map.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.map.rst.txt @@ -5,7 +5,7 @@ dynamic_shape_map .. note:: - Tags: :doc:`torch.map `, :doc:`torch.dynamic-shape ` + Tags: :doc:`torch.dynamic-shape `, :doc:`torch.map ` Support Level: SUPPORTED diff --git a/2.4/_sources/generated/exportdb/torch.operator.rst.txt b/2.4/_sources/generated/exportdb/torch.operator.rst.txt index 76429e6587..27709f7a75 100644 --- a/2.4/_sources/generated/exportdb/torch.operator.rst.txt +++ b/2.4/_sources/generated/exportdb/torch.operator.rst.txt @@ -31,4 +31,4 @@ Result: .. code-block:: - Unsupported: torch.* op returned non-Tensor int call_function + Unsupported: torch.* op returned non-Tensor int call_function diff --git a/2.4/generated/exportdb/index.html b/2.4/generated/exportdb/index.html index 0b69a31b79..2c3fec5a31 100644 --- a/2.4/generated/exportdb/index.html +++ b/2.4/generated/exportdb/index.html @@ -930,7 +930,7 @@

cond_branch_nonlocal_variables¶

Note

-

Tags: python.closure, torch.cond

+

Tags: torch.cond, python.closure

Support Level: SUPPORTED

Original source code:

@@ -1108,7 +1108,7 @@

cond_predicate¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -1164,7 +1164,7 @@

constrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -1423,7 +1423,7 @@

dynamic_shape_if_guarddynamic_shape_map¶

Note

-

Tags: torch.map, torch.dynamic-shape

+

Tags: torch.dynamic-shape, torch.map

Support Level: SUPPORTED

Original source code:

@@ -2142,7 +2142,7 @@

Not Supported Yet¶

Note

-

Tags: python.builtin, torch.dynamic-shape

+

Tags: torch.dynamic-shape, python.builtin

Support Level: NOT_SUPPORTED_YET

Original source code:

@@ -2258,7 +2258,7 @@

torch_sym_min
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7fa2943d9b80>
+
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7fe9d8bd4b80>
 
diff --git a/2.4/generated/exportdb/python.builtin.html b/2.4/generated/exportdb/python.builtin.html index abb52598c7..80068b24c4 100644 --- a/2.4/generated/exportdb/python.builtin.html +++ b/2.4/generated/exportdb/python.builtin.html @@ -520,7 +520,7 @@

python.builtin¶

Note

-

Tags: python.builtin, torch.dynamic-shape

+

Tags: torch.dynamic-shape, python.builtin

Support Level: NOT_SUPPORTED_YET

Original source code:

diff --git a/2.4/generated/exportdb/python.closure.html b/2.4/generated/exportdb/python.closure.html index d5de34236e..70d9fb68be 100644 --- a/2.4/generated/exportdb/python.closure.html +++ b/2.4/generated/exportdb/python.closure.html @@ -520,7 +520,7 @@

python.closure¶

Note

-

Tags: python.closure, torch.cond

+

Tags: torch.cond, python.closure

Support Level: SUPPORTED

Original source code:

diff --git a/2.4/generated/exportdb/torch.cond.html b/2.4/generated/exportdb/torch.cond.html index edc80f09ef..8470ebf5f1 100644 --- a/2.4/generated/exportdb/torch.cond.html +++ b/2.4/generated/exportdb/torch.cond.html @@ -764,7 +764,7 @@

cond_branch_nonlocal_variables¶

Note

-

Tags: python.closure, torch.cond

+

Tags: torch.cond, python.closure

Support Level: SUPPORTED

Original source code:

diff --git a/2.4/generated/exportdb/torch.dynamic-shape.html b/2.4/generated/exportdb/torch.dynamic-shape.html index 8b8ac9d429..fd30a66f3f 100644 --- a/2.4/generated/exportdb/torch.dynamic-shape.html +++ b/2.4/generated/exportdb/torch.dynamic-shape.html @@ -964,7 +964,7 @@

dynamic_shape_if_guarddynamic_shape_map¶

Note

-

Tags: torch.map, torch.dynamic-shape

+

Tags: torch.dynamic-shape, torch.map

Support Level: SUPPORTED

Original source code:

@@ -1012,7 +1012,7 @@

dynamic_shape_map¶

Note

-

Tags: python.builtin, torch.dynamic-shape

+

Tags: torch.dynamic-shape, python.builtin

Support Level: NOT_SUPPORTED_YET

Original source code:

diff --git a/2.4/generated/exportdb/torch.dynamic-value.html b/2.4/generated/exportdb/torch.dynamic-value.html index e1dab2c20d..40eb09f7a4 100644 --- a/2.4/generated/exportdb/torch.dynamic-value.html +++ b/2.4/generated/exportdb/torch.dynamic-value.html @@ -520,7 +520,7 @@

torch.dynamic-value¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -576,7 +576,7 @@

constrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

diff --git a/2.4/generated/exportdb/torch.escape-hatch.html b/2.4/generated/exportdb/torch.escape-hatch.html index 74654611ca..341d2ec086 100644 --- a/2.4/generated/exportdb/torch.escape-hatch.html +++ b/2.4/generated/exportdb/torch.escape-hatch.html @@ -562,7 +562,7 @@

assume_constant_resultconstrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

@@ -618,7 +618,7 @@

constrain_as_size_example¶

Note

-

Tags: torch.escape-hatch, torch.dynamic-value

+

Tags: torch.dynamic-value, torch.escape-hatch

Support Level: SUPPORTED

Original source code:

diff --git a/2.4/generated/exportdb/torch.map.html b/2.4/generated/exportdb/torch.map.html index ba12da133f..c4a545fcba 100644 --- a/2.4/generated/exportdb/torch.map.html +++ b/2.4/generated/exportdb/torch.map.html @@ -520,7 +520,7 @@

torch.map¶

Note

-

Tags: torch.map, torch.dynamic-shape

+

Tags: torch.dynamic-shape, torch.map

Support Level: SUPPORTED

Original source code:

diff --git a/2.4/generated/exportdb/torch.operator.html b/2.4/generated/exportdb/torch.operator.html index 5bccf3836a..72a9d82325 100644 --- a/2.4/generated/exportdb/torch.operator.html +++ b/2.4/generated/exportdb/torch.operator.html @@ -539,7 +539,7 @@

torch_sym_min
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7fa2943d9b80>
+
Unsupported: torch.* op returned non-Tensor int call_function <function sym_min at 0x7fe9d8bd4b80>
 
diff --git a/2.4/quantization-backend-configuration.html b/2.4/quantization-backend-configuration.html index 885b9ffbd7..191513c347 100644 --- a/2.4/quantization-backend-configuration.html +++ b/2.4/quantization-backend-configuration.html @@ -535,7 +535,7 @@

Default values for native configurations= 2.9.6": [[2046, "id5"]], "Graph memory management": [[2046, "graph-memory-management"]], "Sharing memory across captures": [[2046, "sharing-memory-across-captures"]], "Data insensitive error": [[2073, "data-insensitive-error"]], "General tips": [[2073, "general-tips"]], "Int8 quantization tips": [[2073, "int8-quantization-tips"]], "Data sensitive error": [[2073, "data-sensitive-error"]], "Implementation error": [[2073, "implementation-error"]], "Numerical Debugging Tooling (prototype)": [[2073, "numerical-debugging-tooling-prototype"]], "Distributed RPC Framework": [[2077, "distributed-rpc-framework"]], "Basics": [[2077, "basics"], [28, "basics"]], "RPC": [[2077, "rpc"]], "Backends": [[2077, "backends"], [28, "backends"]], "TensorPipe Backend": [[2077, "tensorpipe-backend"]], "RRef": [[2077, "rref"]], "More Information about RRef": [[2077, null]], "RemoteModule": [[2077, "remotemodule"]], "Distributed Autograd Framework": [[2077, "distributed-autograd-framework"]], "More Information about RPC Autograd": [[2077, null]], "Distributed Optimizer": [[2077, "distributed-optimizer"], [2078, "distributed-optimizer"]], "ONNX supported TorchScript operators": [[2068, "onnx-supported-torchscript-operators"]], "Supported operators": [[2068, "supported-operators"]], "ONNX support for TorchScript operators": [[2068, "id1"]], "Unsupported operators": [[2068, "unsupported-operators"], [2068, "id2"]], "Autograd mechanics": [[2043, "autograd-mechanics"]], "How autograd encodes the history": [[2043, "how-autograd-encodes-the-history"]], "Saved tensors": [[2043, "saved-tensors"]], "Gradients for non-differentiable functions": [[2043, "gradients-for-non-differentiable-functions"]], "Setting requires_grad": [[2043, "setting-requires-grad"]], "Grad Modes": [[2043, "grad-modes"]], "Default Mode (Grad Mode)": [[2043, "default-mode-grad-mode"]], "No-grad Mode": [[2043, "no-grad-mode"]], "Inference Mode": [[2043, "inference-mode"]], "Evaluation Mode (nn.Module.eval())": [[2043, "evaluation-mode-nn-module-eval"]], "In-place operations with autograd": [[2043, "in-place-operations-with-autograd"]], "In-place correctness checks": [[2043, "in-place-correctness-checks"], [1, "in-place-correctness-checks"]], "Multithreaded Autograd": [[2043, "multithreaded-autograd"]], "Concurrency on CPU": [[2043, "concurrency-on-cpu"]], "Non-determinism": [[2043, "non-determinism"]], "Graph retaining": [[2043, "graph-retaining"]], "Thread Safety on Autograd Node": [[2043, "thread-safety-on-autograd-node"]], "No thread safety on C++ hooks": [[2043, "no-thread-safety-on-c-hooks"]], "Autograd for Complex Numbers": [[2043, "autograd-for-complex-numbers"]], "What are complex derivatives?": [[2043, "what-are-complex-derivatives"]], "Wirtinger Calculus comes into the picture \u2026": [[2043, "wirtinger-calculus-comes-into-the-picture"]], "How is Wirtinger Calculus useful in optimization?": [[2043, "how-is-wirtinger-calculus-useful-in-optimization"]], "How does PyTorch compute the conjugate Wirtinger derivative?": [[2043, "how-does-pytorch-compute-the-conjugate-wirtinger-derivative"]], "How can I write my own derivative formula for a complex function?": [[2043, "how-can-i-write-my-own-derivative-formula-for-a-complex-function"]], "What about cross-domain functions?": [[2043, "what-about-cross-domain-functions"]], "Hooks for saved tensors": [[2043, "hooks-for-saved-tensors"]], "Registering hooks for a saved tensor": [[2043, "registering-hooks-for-a-saved-tensor"]], "Registering default hooks for saved tensors": [[2043, "registering-default-hooks-for-saved-tensors"]], "Backward Hooks execution": [[2043, "backward-hooks-execution"]], "Whether a particular hook will be fired": [[2043, "whether-a-particular-hook-will-be-fired"]], "The order in which the different hooks are fired": [[2043, "the-order-in-which-the-different-hooks-are-fired"]], "Special hooks": [[2043, "special-hooks"]], "Behavior of Tensor hooks when Tensor is modified in-place": [[2043, "behavior-of-tensor-hooks-when-tensor-is-modified-in-place"]], "torch.nn.init": [[2041, "torch-nn-init"]], "Extending PyTorch": [[2049, "extending-pytorch"]], "Adding new operators": [[2049, "adding-new-operators"]], "Extending torch.autograd": [[2049, "extending-torch-autograd"]], "When to use": [[2049, "when-to-use"]], "When not to use": [[2049, "when-not-to-use"]], "How to use": [[2049, "how-to-use"]], "Example": [[2049, "example"], [2048, "example"]], "Combined or separate forward() and setup_context()": [[2049, "combined-or-separate-forward-and-setup-context"]], "Forward mode AD": [[2049, "forward-mode-ad"]], "torch.func transforms and/or torch.vmap()": [[2049, "torch-func-transforms-and-or-torch-vmap"]], "Extending torch.nn": [[2049, "extending-torch-nn"]], "Adding a Module": [[2049, "adding-a-module"]], "Extending torch Python API": [[2049, "extending-torch-python-api"]], "Extending torch with a Tensor-like type": [[2049, "extending-torch-with-a-tensor-like-type"]], "Subclassing torch.Tensor": [[2049, "subclassing-torch-tensor"]], "Extending torch with a Tensor wrapper type": [[2049, "extending-torch-with-a-tensor-wrapper-type"]], "Operations on multiple types that define __torch_function__": [[2049, "operations-on-multiple-types-that-define-torch-function"]], "Testing Coverage of Overrides for the PyTorch API": [[2049, "testing-coverage-of-overrides-for-the-pytorch-api"]], "Extending torch native API": [[2049, "extending-torch-native-api"]], "Extending all torch API with Modes": [[2049, "extending-all-torch-api-with-modes"]], "torch.Size": [[2081, "torch-size"]], "Distributed Data Parallel": [[2048, "distributed-data-parallel"]], "Internal Design": [[2048, "internal-design"]], "Implementation": [[2048, "implementation"], [2079, "implementation"]], "ProcessGroup": [[2048, "processgroup"]], "TorchDynamo DDPOptimizer": [[2048, "id1"]], "Broadcasting semantics": [[2044, "broadcasting-semantics"]], "General semantics": [[2044, "general-semantics"]], "In-place semantics": [[2044, "in-place-semantics"]], "Backwards compatibility": [[2044, "backwards-compatibility"]], "Distributed Autograd Design": [[2078, "distributed-autograd-design"]], "Background": [[2078, "background"], [2079, "background"]], "Autograd recording during the forward pass": [[2078, "autograd-recording-during-the-forward-pass"]], "Distributed Autograd Context": [[2078, "distributed-autograd-context"]], "Distributed Backward Pass": [[2078, "distributed-backward-pass"]], "Computing dependencies": [[2078, "computing-dependencies"]], "FAST mode algorithm": [[2078, "fast-mode-algorithm"]], "SMART mode algorithm": [[2078, "smart-mode-algorithm"]], "Simple end to end example": [[2078, "simple-end-to-end-example"]], "ONNX Backend for TorchDynamo": [[2066, "onnx-backend-for-torchdynamo"]], "Remote Reference Protocol": [[2079, "remote-reference-protocol"]], "RRef Lifetime": [[2079, "rref-lifetime"]], "Design Reasoning": [[2079, "design-reasoning"]], "Protocol Scenarios": [[2079, "protocol-scenarios"]], "User Share RRef with Owner as Return Value": [[2079, "user-share-rref-with-owner-as-return-value"]], "User Share RRef with Owner as Argument": [[2079, "user-share-rref-with-owner-as-argument"]], "Owner Share RRef with User": [[2079, "owner-share-rref-with-user"]], "User Share RRef with User": [[2079, "user-share-rref-with-user"]], "Threading Environment Variables": [[2090, "threading-environment-variables"]], "Features for large-scale deployments": [[2056, "features-for-large-scale-deployments"]], "Fleet-wide operator profiling": [[2056, "fleet-wide-operator-profiling"]], "API usage logging": [[2056, "api-usage-logging"]], "Attaching metadata to saved TorchScript models": [[2056, "attaching-metadata-to-saved-torchscript-models"]], "Build environment considerations": [[2056, "build-environment-considerations"]], "Common extension points": [[2056, "common-extension-points"]], "A Simple Custom Module": [[2057, "a-simple-custom-module"]], "Modules as Building Blocks": [[2057, "modules-as-building-blocks"]], "Neural Network Training with Modules": [[2057, "neural-network-training-with-modules"]], "Module State": [[2057, "module-state"]], "Module Initialization": [[2057, "module-initialization"]], "Module Hooks": [[2057, "module-hooks"]], "Advanced Features": [[2057, "advanced-features"]], "Distributed Training": [[2057, "distributed-training"]], "Profiling Performance": [[2057, "profiling-performance"]], "Improving Performance with Quantization": [[2057, "improving-performance-with-quantization"]], "Improving Memory Usage with Pruning": [[2057, "improving-memory-usage-with-pruning"]], "Parametrizations": [[2057, "parametrizations"]], "Transforming Modules with FX": [[2057, "transforming-modules-with-fx"]], "torch.utils.tensorboard": [[2087, "module-torch.utils.tensorboard"]], "MPS backend": [[2058, "mps-backend"]], "torch.signal": [[2080, "module-torch.signal"]], "torch.signal.windows": [[2080, "module-torch.signal.windows"]], "TorchDynamo-based ONNX Exporter": [[2065, "torchdynamo-based-onnx-exporter"], [2064, "torchdynamo-based-onnx-exporter"]], "Dependencies": [[2065, "dependencies"]], "A simple example": [[2065, "a-simple-example"]], "Inspecting the ONNX model using GUI": [[2065, "inspecting-the-onnx-model-using-gui"]], "Diagnosing issues with SARIF": [[2065, "diagnosing-issues-with-sarif"]], "ONNX Diagnostic SARIF Rules": [[2065, null]], "Automatic Mixed Precision examples": [[2042, "automatic-mixed-precision-examples"]], "Typical Mixed Precision Training": [[2042, "typical-mixed-precision-training"]], "Working with Unscaled Gradients": [[2042, "working-with-unscaled-gradients"]], "Gradient clipping": [[2042, "gradient-clipping"]], "Working with Scaled Gradients": [[2042, "working-with-scaled-gradients"]], "Gradient accumulation": [[2042, "gradient-accumulation"]], "Gradient penalty": [[2042, "gradient-penalty"]], "Working with Multiple Models, Losses, and Optimizers": [[2042, "working-with-multiple-models-losses-and-optimizers"]], "Working with Multiple GPUs": [[2042, "working-with-multiple-gpus"]], "DataParallel in a single process": [[2042, "dataparallel-in-a-single-process"]], "DistributedDataParallel, one GPU per process": [[2042, "distributeddataparallel-one-gpu-per-process"]], "DistributedDataParallel, multiple GPUs per process": [[2042, "distributeddataparallel-multiple-gpus-per-process"]], "Autocast and Custom Autograd Functions": [[2042, "autocast-and-custom-autograd-functions"]], "Functions with multiple inputs or autocastable ops": [[2042, "functions-with-multiple-inputs-or-autocastable-ops"]], "Functions that need a particular dtype": [[2042, "functions-that-need-a-particular-dtype"]], "torch.optim": [[2069, "module-torch.optim"]], "How to use an optimizer": [[2069, "how-to-use-an-optimizer"]], "Constructing it": [[2069, "constructing-it"]], "Per-parameter options": [[2069, "per-parameter-options"]], "Taking an optimization step": [[2069, "taking-an-optimization-step"]], "optimizer.step()": [[2069, "optimizer-step"]], "optimizer.step(closure)": [[2069, "optimizer-step-closure"]], "Base class": [[2069, "base-class"]], "Algorithms": [[2069, "algorithms"]], "How to adjust learning rate": [[2069, "how-to-adjust-learning-rate"]], "Weight Averaging (SWA and EMA)": [[2069, "weight-averaging-swa-and-ema"]], "Constructing averaged models": [[2069, "constructing-averaged-models"]], "Custom averaging strategies": [[2069, "custom-averaging-strategies"]], "SWA learning rate schedules": [[2069, "swa-learning-rate-schedules"]], "Taking care of batch normalization": [[2069, "taking-care-of-batch-normalization"]], "Putting it all together: SWA": [[2069, "putting-it-all-together-swa"]], "Putting it all together: EMA": [[2069, "putting-it-all-together-ema"]], "torch.Tensor": [[2088, "torch-tensor"]], "Data types": [[2088, "data-types"]], "Initializing and basic operations": [[2088, "initializing-and-basic-operations"]], "Tensor class reference": [[2088, "tensor-class-reference"]], "torch.Storage": [[2084, "torch-storage"]], "Default values for native configurations": [[2074, "default-values-for-native-configurations"]], "torch.onnx": [[2064, "torch-onnx"]], "Contributing / Developing": [[2064, "contributing-developing"]], "torch.atan": [[887, "torch-atan"]], "torch.autograd.Function.jvp": [[895, "torch-autograd-function-jvp"]], "torch.atleast_1d": [[890, "torch-atleast-1d"]], "torch.autograd.function.FunctionCtx.save_for_backward": [[907, "torch-autograd-function-functionctx-save-for-backward"]], "torch.atan2": [[888, "torch-atan2"]], "BackwardCFunction": [[904, "backwardcfunction"]], "torch.autograd.forward_ad.unpack_dual": [[903, "torch-autograd-forward-ad-unpack-dual"]], "torch.autograd.backward": [[897, "torch-autograd-backward"]], "torch.as_tensor": [[883, "torch-as-tensor"]], "torch.argsort": [[880, "torch-argsort"]], "torch.arccosh": [[871, "torch-arccosh"]], "torch.autograd.functional.hessian": [[912, "torch-autograd-functional-hessian"]], "torch.asarray": [[884, "torch-asarray"]], "torch.argwhere": [[881, "torch-argwhere"]], "torch.autograd.function.once_differentiable": [[911, "torch-autograd-function-once-differentiable"]], "torch.are_deterministic_algorithms_enabled": [[877, "torch-are-deterministic-algorithms-enabled"]], "torch.autograd.functional.vhp": [[916, "torch-autograd-functional-vhp"]], "torch.autograd.functional.vjp": [[917, "torch-autograd-functional-vjp"]], "UnpackedDualTensor": [[898, "unpackeddualtensor"]], "torch.arccos": [[870, "torch-arccos"]], "torch.autograd.function.FunctionCtx.mark_non_differentiable": [[906, "torch-autograd-function-functionctx-mark-non-differentiable"]], "torch.arctanh": [[876, "torch-arctanh"]], "InplaceFunction": [[909, "inplacefunction"]], "torch.autograd.function.FunctionCtx.set_materialize_grads": [[908, "torch-autograd-function-functionctx-set-materialize-grads"]], "torch.atleast_2d": [[891, "torch-atleast-2d"]], "torch.autograd.Function.backward": [[893, "torch-autograd-function-backward"]], "torch.autograd.functional.jvp": [[915, "torch-autograd-functional-jvp"]], "torch.arange": [[869, "torch-arange"]], "torch.autograd.functional.hvp": [[913, "torch-autograd-functional-hvp"]], "torch.autograd.function.FunctionCtx.mark_dirty": [[905, "torch-autograd-function-functionctx-mark-dirty"]], "swap_module": [[868, "swap-module"]], "torch.autograd.Function.forward": [[894, "torch-autograd-function-forward"]], "torch.autograd.forward_ad.enter_dual_level": [[900, "torch-autograd-forward-ad-enter-dual-level"]], "torch.asin": [[885, "torch-asin"]], "torch.autograd.forward_ad.make_dual": [[902, "torch-autograd-forward-ad-make-dual"]], "torch.atanh": [[889, "torch-atanh"]], "torch.autograd.Function.vmap": [[896, "torch-autograd-function-vmap"]], "torch.arctan2": [[875, "torch-arctan2"]], "dual_level": [[899, "dual-level"]], "quantize_qat": [[867, "quantize-qat"]], "NestedIOFunction": [[910, "nestediofunction"]], "torch.arcsinh": [[873, "torch-arcsinh"]], "torch.arctan": [[874, "torch-arctan"]], "torch.asinh": [[886, "torch-asinh"]], "torch.argmax": [[878, "torch-argmax"]], "torch.atleast_3d": [[892, "torch-atleast-3d"]], "torch.argmin": [[879, "torch-argmin"]], "torch.autograd.forward_ad.exit_dual_level": [[901, "torch-autograd-forward-ad-exit-dual-level"]], "torch.autograd.functional.jacobian": [[914, "torch-autograd-functional-jacobian"]], "torch.arcsin": [[872, "torch-arcsin"]], "torch.as_strided": [[882, "torch-as-strided"]], "Metrics": [[44, "module-torch.distributed.elastic.metrics"]], "Metric Handlers": [[44, "metric-handlers"]], "Methods": [[44, "methods"]], "Generic Join Context Manager": [[29, "generic-join-context-manager"]], "PyTorch Governance | Mechanics": [[9, "pytorch-governance-mechanics"]], "Summary": [[9, "summary"]], "Module Maintainers": [[9, "module-maintainers"]], "Core Maintainers": [[9, "core-maintainers"], [10, "core-maintainers"]], "Lead Core Maintainer (BDFL)": [[9, "lead-core-maintainer-bdfl"], [10, "lead-core-maintainer-bdfl"]], "Nominating, Confirming and Removing Maintainers": [[9, "nominating-confirming-and-removing-maintainers"]], "The Principles": [[9, "the-principles"]], "The Process for Nomination": [[9, "the-process-for-nomination"]], "The Process for Removal": [[9, "the-process-for-removal"]], "Nominating Core Maintainers": [[9, "nominating-core-maintainers"]], "Removing the Lead Core Maintainer and Nominating a New Lead Core Maintainer": [[9, "removing-the-lead-core-maintainer-and-nominating-a-new-lead-core-maintainer"]], "Add, Remove, and Re-Scope Modules and Projects": [[9, "add-remove-and-re-scope-modules-and-projects"]], "Decision Making": [[9, "decision-making"]], "Uncontroversial Changes": [[9, "uncontroversial-changes"]], "Controversial Decision Process": [[9, "controversial-decision-process"]], "General Project Policies": [[9, "general-project-policies"]], "FAQ": [[9, "faq"]], "torch.utils.data": [[23, "module-torch.utils.data"]], "Dataset Types": [[23, "dataset-types"]], "Map-style datasets": [[23, "map-style-datasets"]], "Iterable-style datasets": [[23, "iterable-style-datasets"]], "Data Loading Order and Sampler": [[23, "data-loading-order-and-sampler"]], "Loading Batched and Non-Batched Data": [[23, "loading-batched-and-non-batched-data"]], "Automatic batching (default)": [[23, "automatic-batching-default"]], "Disable automatic batching": [[23, "disable-automatic-batching"]], "Working with collate_fn": [[23, "working-with-collate-fn"]], "Single- and Multi-process Data Loading": [[23, "single-and-multi-process-data-loading"]], "Single-process data loading (default)": [[23, "single-process-data-loading-default"]], "Multi-process data loading": [[23, "multi-process-data-loading"]], "Platform-specific behaviors": [[23, "platform-specific-behaviors"]], "Randomness in multi-process data loading": [[23, "randomness-in-multi-process-data-loading"]], "Memory Pinning": [[23, "memory-pinning"]], "Debugging Environment Variables": [[25, "debugging-environment-variables"]], "Pipeline Parallelism": [[33, "pipeline-parallelism"]], "Why Pipeline Parallel?": [[33, "why-pipeline-parallel"]], "What is torch.distributed.pipelining?": [[33, "what-is-torch-distributed-pipelining"]], "Step 1: build PipelineStage for execution": [[33, "step-1-build-pipelinestage-for-execution"]], "Step 2: use PipelineSchedule for execution": [[33, "step-2-use-pipelineschedule-for-execution"]], "Options for Splitting a Model": [[33, "options-for-splitting-a-model"]], "Option 1: splitting a model manually": [[33, "option-1-splitting-a-model-manually"]], "Option 2: splitting a model automatically": [[33, "option-2-splitting-a-model-automatically"]], "Hugging Face Examples": [[33, "hugging-face-examples"]], "Technical Deep Dive": [[33, "technical-deep-dive"]], "How does the pipeline API split a model?": [[33, "how-does-the-pipeline-api-split-a-model"]], "Implementing Your Own Schedule": [[33, "implementing-your-own-schedule"]], "Model Split APIs": [[33, "model-split-apis"]], "Microbatch Utilities": [[33, "module-torch.distributed.pipelining.microbatch"]], "Pipeline Stages": [[33, "module-torch.distributed.pipelining.stage"]], "Pipeline Schedules": [[33, "module-torch.distributed.pipelining.schedules"]], "CUDA Environment Variables": [[20, "cuda-environment-variables"]], "PyTorch Contribution Guide": [[7, "pytorch-contribution-guide"]], "Contribution Process": [[7, "contribution-process"]], "Proposing New Features": [[7, "proposing-new-features"]], "Reporting Issues": [[7, "reporting-issues"]], "Implementing Features or Fixing Bugs": [[7, "implementing-features-or-fixing-bugs"]], "Adding Tutorials": [[7, "adding-tutorials"]], "Improving Documentation & Tutorials": [[7, "improving-documentation-tutorials"]], "Participating in Online Discussions": [[7, "participating-in-online-discussions"]], "Submitting Pull Requests to Fix Open Issues": [[7, "submitting-pull-requests-to-fix-open-issues"]], "Reviewing Open Pull Requests": [[7, "reviewing-open-pull-requests"]], "Improving Code Readability": [[7, "improving-code-readability"]], "Adding Test Cases to Make the Codebase More Robust": [[7, "adding-test-cases-to-make-the-codebase-more-robust"]], "Promoting PyTorch": [[7, "promoting-pytorch"]], "Triaging Issues": [[7, "triaging-issues"]], "About Open Source Development": [[7, "about-open-source-development"]], "Common Mistakes To Avoid": [[7, "common-mistakes-to-avoid"]], "On Documentation": [[7, "on-documentation"]], "Python Docs": [[7, "python-docs"]], "C++ Docs": [[7, "c-docs"]], "Tutorials Build Overview": [[7, "tutorials-build-overview"]], "Contributing a New Tutorial": [[7, "contributing-a-new-tutorial"]], "Automatic differentiation package - torch.autograd": [[1, "module-torch.autograd"]], "Forward-mode Automatic Differentiation": [[1, "forward-mode-automatic-differentiation"]], "Functional higher level API": [[1, "functional-higher-level-api"]], "Default gradient layouts": [[1, "default-gradient-layouts"]], "Manual gradient layouts": [[1, "manual-gradient-layouts"]], "In-place operations on Tensors": [[1, "in-place-operations-on-tensors"]], "Variable (deprecated)": [[1, "variable-deprecated"]], "Tensor autograd functions": [[1, "tensor-autograd-functions"]], "Function": [[1, "function"]], "Context method mixins": [[1, "context-method-mixins"]], "Custom Function utilities": [[1, "custom-function-utilities"]], "Numerical gradient checking": [[1, "module-torch.autograd.gradcheck"]], "Profiler": [[1, "profiler"]], "Debugging and anomaly detection": [[1, "debugging-and-anomaly-detection"]], "Autograd graph": [[1, "autograd-graph"]], "C++": [[15, "c"]], "TorchScript C++ API": [[15, "torchscript-c-api"]], "Extending PyTorch and TorchScript with C++ Extensions": [[15, "extending-pytorch-and-torchscript-with-c-extensions"]], "Tensor and Autograd in C++": [[15, "tensor-and-autograd-in-c"]], "Authoring Models in C++": [[15, "authoring-models-in-c"]], "Packaging for C++": [[15, "packaging-for-c"]], "Elastic Agent": [[37, "module-torch.distributed.elastic.agent"]], "Server": [[37, "module-torch.distributed.elastic.agent.server"]], "Concepts": [[37, "concepts"]], "Implementations": [[37, "implementations"], [47, "implementations"]], "Extending the Agent": [[37, "extending-the-agent"]], "Watchdog in the Agent": [[37, "watchdog-in-the-agent"]], "Health Check Server": [[37, "health-check-server"]], "Benchmark Utils - torch.utils.benchmark": [[3, "module-torch.utils.benchmark"]], "Customization": [[39, "customization"]], "Launcher": [[39, "launcher"]], "Rendezvous Handler": [[39, "rendezvous-handler"]], "Metric Handler": [[39, "metric-handler"]], "Events Handler": [[39, "events-handler"]], "Distributed communication package - torch.distributed": [[28, "distributed-communication-package-torch-distributed"]], "Backends that come with PyTorch": [[28, "backends-that-come-with-pytorch"]], "Which backend to use?": [[28, "which-backend-to-use"]], "Common environment variables": [[28, "common-environment-variables"]], "Choosing the network interface to use": [[28, "choosing-the-network-interface-to-use"]], "Other NCCL environment variables": [[28, "other-nccl-environment-variables"]], "Initialization": [[28, "initialization"]], "TCP initialization": [[28, "tcp-initialization"]], "Shared file-system initialization": [[28, "shared-file-system-initialization"]], "Environment variable initialization": [[28, "environment-variable-initialization"]], "Post-Initialization": [[28, "post-initialization"]], "Shutdown": [[28, "shutdown"]], "Reinitialization": [[28, "reinitialization"]], "Distributed Key-Value Store": [[28, "distributed-key-value-store"]], "Groups": [[28, "groups"]], "DeviceMesh": [[28, "devicemesh"]], "Point-to-point communication": [[28, "point-to-point-communication"]], "Synchronous and asynchronous collective operations": [[28, "synchronous-and-asynchronous-collective-operations"]], "Collective functions": [[28, "collective-functions"]], "Profiling Collective Communication": [[28, "profiling-collective-communication"]], "Multi-GPU collective functions": [[28, "multi-gpu-collective-functions"]], "Third-party backends": [[28, "third-party-backends"]], "Launch utility": [[28, "launch-utility"]], "Spawn utility": [[28, "spawn-utility"]], "Debugging torch.distributed applications": [[28, "debugging-torch-distributed-applications"]], "Python Breakpoint": [[28, "python-breakpoint"]], "Monitored Barrier": [[28, "monitored-barrier"]], "TORCH_DISTRIBUTED_DEBUG": [[28, "torch-distributed-debug"]], "Logging": [[28, "logging"]], "Rendezvous": [[47, "module-torch.distributed.elastic.rendezvous"]], "Registry": [[47, "registry"]], "Handler": [[47, "handler"]], "Dataclasses": [[47, "dataclasses"]], "Exceptions": [[47, "exceptions"]], "Dynamic Rendezvous": [[47, "dynamic-rendezvous"]], "C10d Backend": [[47, "c10d-backend"]], "Etcd Backend": [[47, "etcd-backend"]], "Etcd Rendezvous (Legacy)": [[47, "etcd-rendezvous-legacy"]], "Etcd Store": [[47, "etcd-store"]], "Etcd Server": [[47, "etcd-server"]], "torch.backends": [[2, "module-torch.backends"]], "torch.backends.cpu": [[2, "module-torch.backends.cpu"]], "torch.backends.cuda": [[2, "module-torch.backends.cuda"]], "torch.backends.cudnn": [[2, "module-torch.backends.cudnn"]], "torch.backends.mha": [[2, "module-torch.backends.mha"]], "torch.backends.mps": [[2, "module-torch.backends.mps"]], "torch.backends.mkl": [[2, "module-torch.backends.mkl"]], "torch.backends.mkldnn": [[2, "module-torch.backends.mkldnn"]], "torch.backends.nnpack": [[2, "module-torch.backends.nnpack"]], "torch.backends.openmp": [[2, "module-torch.backends.openmp"]], "torch.backends.opt_einsum": [[2, "module-torch.backends.opt_einsum"]], "torch.backends.xeon": [[2, "module-torch.backends.xeon"]], "TorchElastic Kubernetes": [[43, "torchelastic-kubernetes"]], "Control Plane": [[38, "module-torch.distributed.elastic.control_plane"]], "torch.cpu": [[16, "module-torch.cpu"]], "torch.utils.bottleneck": [[4, "module-torch.utils.bottleneck"]], "TunableOp": [[19, "tunableop"], [17, "tunableop"]], "Enabling TunableOp and Tuning Separately": [[19, "enabling-tunableop-and-tuning-separately"]], "File Input and Output": [[19, "file-input-and-output"]], "A Note on Tuning Behavior": [[19, "a-note-on-tuning-behavior"]], "Current Tunable Operators": [[19, "current-tunable-operators"]], "TunableGemm for ROCm": [[19, "tunablegemm-for-rocm"]], "Tuning Context": [[19, "tuning-context"]], "DDP Communication Hooks": [[24, "ddp-communication-hooks"]], "How to Use a Communication Hook?": [[24, "how-to-use-a-communication-hook"]], "What Does a Communication Hook Operate On?": [[24, "what-does-a-communication-hook-operate-on"]], "Default Communication Hooks": [[24, "default-communication-hooks"]], "PowerSGD Communication Hook": [[24, "powersgd-communication-hook"]], "PowerSGD State": [[24, "powersgd-state"]], "PowerSGD Hooks": [[24, "powersgd-hooks"]], "Debugging Communication Hooks": [[24, "debugging-communication-hooks"]], "Checkpointing of Communication Hooks": [[24, "checkpointing-of-communication-hooks"]], "Acknowledgements": [[24, "acknowledgements"]], "Expiration Timers": [[50, "module-torch.distributed.elastic.timer"]], "Client Methods": [[50, "client-methods"]], "Server/Client Implementations": [[50, "server-client-implementations"]], "Writing a custom timer server/client": [[50, "writing-a-custom-timer-server-client"]], "Debug info logging": [[50, "module-torch.distributed.elastic.timer.debug_info_logging"]], "Distributed Checkpoint - torch.distributed.checkpoint": [[30, "distributed-checkpoint-torch-distributed-checkpoint"]], "Events": [[41, "module-torch.distributed.elastic.events"]], "API Methods": [[41, "api-methods"]], "Event Objects": [[41, "event-objects"]], "PyTorch Governance | Build + CI": [[6, "pytorch-governance-build-ci"]], "How to Add a New Maintainer": [[6, "how-to-add-a-new-maintainer"]], "Control Flow - Cond": [[12, "control-flow-cond"]], "Invariants of torch.ops.higher_order.cond": [[12, "invariants-of-torch-ops-higher-order-cond"]], "torch.utils.deterministic": [[27, "module-torch.utils.deterministic"]], "Error Propagation": [[40, "module-torch.distributed.elastic.multiprocessing.errors"]], "Methods and Classes": [[40, "methods-and-classes"]], "Multiprocessing": [[45, "module-torch.distributed.elastic.multiprocessing"]], "Starting Multiple Workers": [[45, "starting-multiple-workers"]], "Process Context": [[45, "process-context"]], "Probability distributions - torch.distributions": [[35, "module-torch.distributions"]], "Score function": [[35, "score-function"]], "Pathwise derivative": [[35, "pathwise-derivative"]], "Distribution": [[35, "distribution"]], "ExponentialFamily": [[35, "exponentialfamily"]], "Bernoulli": [[35, "bernoulli"]], "Beta": [[35, "beta"]], "Binomial": [[35, "binomial"]], "Categorical": [[35, "categorical"]], "Cauchy": [[35, "cauchy"]], "Chi2": [[35, "chi2"]], "ContinuousBernoulli": [[35, "continuousbernoulli"]], "Dirichlet": [[35, "dirichlet"]], "Exponential": [[35, "exponential"]], "FisherSnedecor": [[35, "fishersnedecor"]], "Gamma": [[35, "gamma"]], "Geometric": [[35, "geometric"]], "Gumbel": [[35, "gumbel"]], "HalfCauchy": [[35, "halfcauchy"]], "HalfNormal": [[35, "halfnormal"]], "Independent": [[35, "independent"]], "InverseGamma": [[35, "inversegamma"]], "Kumaraswamy": [[35, "kumaraswamy"]], "LKJCholesky": [[35, "lkjcholesky"]], "Laplace": [[35, "laplace"]], "LogNormal": [[35, "lognormal"]], "LowRankMultivariateNormal": [[35, "lowrankmultivariatenormal"]], "MixtureSameFamily": [[35, "mixturesamefamily"]], "Multinomial": [[35, "multinomial"]], "MultivariateNormal": [[35, "multivariatenormal"]], "NegativeBinomial": [[35, "negativebinomial"]], "Normal": [[35, "normal"]], "OneHotCategorical": [[35, "onehotcategorical"]], "Pareto": [[35, "pareto"]], "Poisson": [[35, "poisson"]], "RelaxedBernoulli": [[35, "relaxedbernoulli"]], "LogitRelaxedBernoulli": [[35, "logitrelaxedbernoulli"]], "RelaxedOneHotCategorical": [[35, "relaxedonehotcategorical"]], "StudentT": [[35, "studentt"]], "TransformedDistribution": [[35, "transformeddistribution"]], "Uniform": [[35, "uniform"]], "VonMises": [[35, "vonmises"]], "Weibull": [[35, "weibull"]], "Wishart": [[35, "wishart"]], "KL Divergence": [[35, "module-torch.distributions.kl"]], "Transforms": [[35, "module-torch.distributions.transforms"]], "Constraint Registry": [[35, "module-torch.distributions.constraint_registry"]], "torchrun (Elastic Launch)": [[48, "module-torch.distributed.run"]], "Transitioning from torch.distributed.launch to torchrun": [[48, "transitioning-from-torch-distributed-launch-to-torchrun"]], "Usage": [[48, "usage"], [31, null], [18, "usage"]], "Single-node multi-worker": [[48, "single-node-multi-worker"]], "Stacked single-node multi-worker": [[48, "stacked-single-node-multi-worker"]], "Fault tolerant (fixed sized number of workers, no elasticity, tolerates 3 failures)": [[48, "fault-tolerant-fixed-sized-number-of-workers-no-elasticity-tolerates-3-failures"]], "Elastic (min=1, max=4, tolerates up to 3 membership changes or failures)": [[48, "elastic-min-1-max-4-tolerates-up-to-3-membership-changes-or-failures"]], "Note on rendezvous backend": [[48, "note-on-rendezvous-backend"]], "Definitions": [[48, "definitions"]], "Environment Variables": [[48, "environment-variables"]], "Deployment": [[48, "deployment"]], "Failure Modes": [[48, "failure-modes"]], "Membership Changes": [[48, "membership-changes"]], "Important Notices": [[48, "important-notices"]], "PyTorch Governance | Maintainers": [[10, "pytorch-governance-maintainers"]], "Responsibilities": [[10, "responsibilities"]], "Module-level maintainers": [[10, "module-level-maintainers"]], "NN APIs (torch.nn)": [[10, "nn-apis-torch-nn"]], "Optimizers (torch.optim)": [[10, "optimizers-torch-optim"]], "Autograd (torch.autograd)": [[10, "autograd-torch-autograd"]], "Compilers (JIT / TorchScript / FX / TorchDynamo)": [[10, "compilers-jit-torchscript-fx-torchdynamo"]], "Distributions & RNG": [[10, "distributions-rng"]], "Distributed": [[10, "distributed"]], "Multiprocessing and DataLoaders": [[10, "multiprocessing-and-dataloaders"]], "Linear Algebra (torch.linalg)": [[10, "linear-algebra-torch-linalg"]], "Sparse (torch.sparse)": [[10, "sparse-torch-sparse"]], "NestedTensor (torch.nested)": [[10, "nestedtensor-torch-nested"]], "MaskedTensor (torch.masked)": [[10, "maskedtensor-torch-masked"]], "Fast Fourier Transform (torch.fft)": [[10, "fast-fourier-transform-torch-fft"]], "CPU Performance (Torch Inductor / MKLDNN)": [[10, "cpu-performance-torch-inductor-mkldnn"]], "GPU Performance (Torch Inductor / Triton / CUDA)": [[10, "gpu-performance-torch-inductor-triton-cuda"]], "NVFuser": [[10, "nvfuser"]], "AMD/ROCm/HIP": [[10, "amd-rocm-hip"]], "Build + CI": [[10, "build-ci"]], "Performance Tools": [[10, "performance-tools"]], "C++ API": [[10, "c-api"]], "C10 utils and operator dispatch": [[10, "c10-utils-and-operator-dispatch"]], "ONNX exporter": [[10, "onnx-exporter"]], "Mobile / Edge": [[10, "mobile-edge"]], "Model Compression & Optimization": [[10, "model-compression-optimization"]], "Windows": [[10, "windows"]], "Apple M1/MPS": [[10, "apple-m1-mps"]], "PowerPC": [[10, "powerpc"]], "AArch64 CPU": [[10, "aarch64-cpu"]], "Docs / Tutorials": [[10, "docs-tutorials"]], "Library-level maintainers": [[10, "library-level-maintainers"]], "XLA": [[10, "xla"]], "TorchServe": [[10, "torchserve"]], "TorchVision": [[10, "torchvision"]], "TorchText": [[10, "torchtext"]], "TorchAudio": [[10, "torchaudio"]], "TorchRec": [[10, "torchrec"]], "TorchX": [[10, "torchx"]], "TorchData / TorchArrow": [[10, "torchdata-torcharrow"]], "torch.utils.checkpoint": [[5, "torch-utils-checkpoint"]], "torch.cuda": [[17, "module-torch.cuda"]], "Communication collectives": [[17, "communication-collectives"]], "Graphs (beta)": [[17, "graphs-beta"]], "NVIDIA Tools Extension (NVTX)": [[17, "nvidia-tools-extension-nvtx"]], "Jiterator (beta)": [[17, "jiterator-beta"]], "Stream Sanitizer (prototype)": [[17, "stream-sanitizer-prototype"]], "torch.utils.cpp_extension": [[14, "torch-utils-cpp-extension"]], "Tensor Parallelism - torch.distributed.tensor.parallel": [[34, "tensor-parallelism-torch-distributed-tensor-parallel"]], "Quickstart": [[46, "quickstart"]], "Torch Distributed Elastic": [[31, "torch-distributed-elastic"]], "Get Started": [[31, "get-started"]], "Documentation": [[31, "documentation"]], "API": [[31, null]], "Advanced": [[31, null]], "Plugins": [[31, null]], "torch.utils.dlpack": [[36, "torch-utils-dlpack"]], "Complex Numbers": [[11, "complex-numbers"]], "Creating Complex Tensors": [[11, "creating-complex-tensors"]], "Transition from the old representation": [[11, "transition-from-the-old-representation"]], "Accessing real and imag": [[11, "accessing-real-and-imag"]], "Angle and abs": [[11, "angle-and-abs"]], "Linear Algebra": [[11, "linear-algebra"]], "Autograd": [[11, "autograd"]], "Optimizers": [[11, "optimizers"]], "CUDA Stream Sanitizer": [[18, "cuda-stream-sanitizer"]], "torch::deploy has been moved to pytorch/multipy": [[26, "torch-deploy-has-been-moved-to-pytorch-multipy"]], "torch.__config__": [[13, "module-torch.__config__"]], "PyTorch Design Philosophy": [[8, "pytorch-design-philosophy"]], "Design Principles": [[8, "design-principles"]], "Principle 1: Usability over Performance": [[8, "principle-1-usability-over-performance"]], "Principle 2: Simple Over Easy": [[8, "principle-2-simple-over-easy"]], "Principle 3: Python First with Best In Class Language Interoperability": [[8, "principle-3-python-first-with-best-in-class-language-interoperability"]], "Subprocess Handling": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"]], "Retrieve SubprocessHandler": [[49, "retrieve-subprocesshandler"]], "SubprocessHandler": [[49, "subprocesshandler"]], "Distributed Optimizers": [[32, "distributed-optimizers"]], "Automatic Mixed Precision package - torch.amp": [[0, "automatic-mixed-precision-package-torch-amp"]], "Autocasting": [[0, "autocasting"]], "Gradient Scaling": [[0, "gradient-scaling"]], "Autocast Op Reference": [[0, "autocast-op-reference"]], "Op Eligibility": [[0, "op-eligibility"]], "CUDA Op-Specific Behavior": [[0, "cuda-op-specific-behavior"]], "CUDA Ops that can autocast to float16": [[0, "cuda-ops-that-can-autocast-to-float16"]], "CUDA Ops that can autocast to float32": [[0, "cuda-ops-that-can-autocast-to-float32"]], "CUDA Ops that promote to the widest input type": [[0, "cuda-ops-that-promote-to-the-widest-input-type"]], "Prefer binary_cross_entropy_with_logits over binary_cross_entropy": [[0, "prefer-binary-cross-entropy-with-logits-over-binary-cross-entropy"]], "XPU Op-Specific Behavior (Experimental)": [[0, "xpu-op-specific-behavior-experimental"]], "XPU Ops that can autocast to float16": [[0, "xpu-ops-that-can-autocast-to-float16"]], "XPU Ops that can autocast to float32": [[0, "xpu-ops-that-can-autocast-to-float32"]], "XPU Ops that promote to the widest input type": [[0, "xpu-ops-that-promote-to-the-widest-input-type"]], "CPU Op-Specific Behavior": [[0, "cpu-op-specific-behavior"]], "CPU Ops that can autocast to bfloat16": [[0, "cpu-ops-that-can-autocast-to-bfloat16"]], "CPU Ops that can autocast to float32": [[0, "cpu-ops-that-can-autocast-to-float32"]], "CPU Ops that promote to the widest input type": [[0, "cpu-ops-that-promote-to-the-widest-input-type"]], "prepare_fx": [[865, "prepare-fx"]], "prepare_qat": [[842, "prepare-qat"]], "default_activation_only_qconfig": [[846, "default-activation-only-qconfig"]], "default_placeholder_observer": [[837, "default-placeholder-observer"]], "MinMaxObserver": [[823, "minmaxobserver"]], "PlaceholderObserver": [[829, "placeholderobserver"]], "float16_dynamic_qconfig": [[854, "float16-dynamic-qconfig"]], "get_default_qconfig_mapping": [[860, "get-default-qconfig-mapping"]], "quantize_dynamic": [[862, "quantize-dynamic"]], "default_qconfig": [[852, "default-qconfig"]], "get_observer_state_dict": [[839, "get-observer-state-dict"]], "prepare": [[841, "prepare"]], "model_is_exported": [[844, "model-is-exported"]], "default_dynamic_quant_observer": [[832, "default-dynamic-quant-observer"]], "load_observer_state_dict": [[840, "load-observer-state-dict"]], "HistogramObserver": [[822, "histogramobserver"]], "MovingAveragePerChannelMinMaxObserver": [[825, "movingaverageperchannelminmaxobserver"]], "default_qat_qconfig": [[850, "default-qat-qconfig"]], "QConfigMapping": [[858, "qconfigmapping"]], "quantize": [[861, "quantize"]], "StandaloneModuleConfigEntry": [[821, "standalonemoduleconfigentry"]], "RecordingObserver": [[830, "recordingobserver"]], "default_debug_qconfig": [[847, "default-debug-qconfig"]], "MovingAverageMinMaxObserver": [[824, "movingaverageminmaxobserver"]], "PerChannelMinMaxObserver": [[828, "perchannelminmaxobserver"]], "convert_fx": [[863, "convert-fx"]], "fuse_modules": [[817, "fuse-modules"]], "default_per_channel_weight_observer": [[836, "default-per-channel-weight-observer"]], "propagate_qconfig": [[843, "propagate-qconfig"]], "default_dynamic_qconfig": [[848, "default-dynamic-qconfig"]], "FuseCustomConfig": [[819, "fusecustomconfig"]], "float_qparams_weight_only_qconfig": [[856, "float-qparams-weight-only-qconfig"]], "per_channel_dynamic_qconfig": [[857, "per-channel-dynamic-qconfig"]], "ConvertCustomConfig": [[818, "convertcustomconfig"]], "prepare_qat_fx": [[866, "prepare-qat-fx"]], "enable_observer": [[816, "enable-observer"]], "get_default_qat_qconfig_mapping": [[859, "get-default-qat-qconfig-mapping"]], "PrepareCustomConfig": [[820, "preparecustomconfig"]], "default_histogram_observer": [[834, "default-histogram-observer"]], "default_weight_only_qconfig": [[853, "default-weight-only-qconfig"]], "default_float_qparams_observer": [[833, "default-float-qparams-observer"]], "NoopObserver": [[826, "noopobserver"]], "default_debug_observer": [[831, "default-debug-observer"]], "ObserverBase": [[827, "observerbase"]], "fuse_fx": [[864, "fuse-fx"]], "default_qat_qconfig_v2": [[851, "default-qat-qconfig-v2"]], "default_per_channel_qconfig": [[849, "default-per-channel-qconfig"]], "default_observer": [[835, "default-observer"]], "default_weight_observer": [[838, "default-weight-observer"]], "float16_static_qconfig": [[855, "float16-static-qconfig"]], "DTypeConfig": [[797, "dtypeconfig"]], "elu": [[778, "elu"]], "disable_fake_quant": [[813, "disable-fake-quant"]], "threshold": [[787, "threshold"]], "max_pool1d": [[785, "max-pool1d"]], "default_eval_fn": [[801, "default-eval-fn"]], "default_fused_wt_fake_quant": [[809, "default-fused-wt-fake-quant"]], "hardswish": [[780, "hardswish"]], "celu": [[773, "celu"]], "FixedQParamsFakeQuantize": [[804, "fixedqparamsfakequantize"]], "adaptive_avg_pool2d": [[769, "adaptive-avg-pool2d"]], "conv1d": [[775, "conv1d"]], "upsample": [[788, "upsample"]], "BackendConfig": [[795, "backendconfig"]], "FakeQuantize": [[802, "fakequantize"]], "avg_pool2d": [[771, "avg-pool2d"]], "upsample_nearest": [[790, "upsample-nearest"]], "BackendPatternConfig": [[796, "backendpatternconfig"]], "QuantStub": [[792, "quantstub"]], "hardsigmoid": [[779, "hardsigmoid"]], "default_fused_act_fake_quant": [[807, "default-fused-act-fake-quant"]], "default_fused_per_channel_wt_fake_quant": [[808, "default-fused-per-channel-wt-fake-quant"]], "leaky_relu": [[783, "leaky-relu"]], "FusedMovingAvgObsFakeQuantize": [[805, "fusedmovingavgobsfakequantize"]], "default_histogram_fake_quant": [[810, "default-histogram-fake-quant"]], "DeQuantStub": [[791, "dequantstub"]], "QuantWrapper": [[793, "quantwrapper"]], "avg_pool3d": [[772, "avg-pool3d"]], "disable_observer": [[814, "disable-observer"]], "default_weight_fake_quant": [[812, "default-weight-fake-quant"]], "adaptive_avg_pool3d": [[770, "adaptive-avg-pool3d"]], "DTypeWithConstraints": [[798, "dtypewithconstraints"]], "interpolate": [[782, "interpolate"]], "enable_fake_quant": [[815, "enable-fake-quant"]], "upsample_bilinear": [[789, "upsample-bilinear"]], "default_fake_quant": [[806, "default-fake-quant"]], "add_quant_dequant": [[794, "add-quant-dequant"]], "convert": [[800, "convert"]], "linear": [[784, "linear"]], "default_per_channel_weight_fake_quant": [[811, "default-per-channel-weight-fake-quant"]], "FakeQuantizeBase": [[803, "fakequantizebase"]], "clamp": [[774, "clamp"]], "conv3d": [[777, "conv3d"]], "conv2d": [[776, "conv2d"]], "ObservationType": [[799, "observationtype"]], "hardtanh": [[781, "hardtanh"]], "max_pool2d": [[786, "max-pool2d"]], "ConvBnReLU3d": [[720, "convbnrelu3d"], [710, "convbnrelu3d"]], "LinearReLU": [[723, "linearrelu"], [731, "linearrelu"], [714, "linearrelu"], [732, "linearrelu"]], "ConvBnReLU2d": [[719, "convbnrelu2d"], [709, "convbnrelu2d"]], "ConvBn1d": [[715, "convbn1d"], [705, "convbn1d"]], "ConvBn3d": [[717, "convbn3d"], [707, "convbn3d"]], "ConvReLU3d": [[722, "convrelu3d"], [730, "convrelu3d"], [713, "convrelu3d"]], "update_bn_stats": [[725, "update-bn-stats"]], "FXFloatFunctional": [[750, "fxfloatfunctional"]], "BNReLU3d": [[727, "bnrelu3d"], [704, "bnrelu3d"]], "ConvBnReLU1d": [[718, "convbnrelu1d"], [708, "convbnrelu1d"]], "ConvReLU2d": [[721, "convrelu2d"], [729, "convrelu2d"], [712, "convrelu2d"]], "FloatFunctional": [[751, "floatfunctional"]], "ConvBn2d": [[716, "convbn2d"], [706, "convbn2d"]], "ConvReLU1d": [[728, "convrelu1d"], [711, "convrelu1d"]], "freeze_bn_stats": [[724, "freeze-bn-stats"]], "BNReLU2d": [[726, "bnrelu2d"], [703, "bnrelu2d"]], "QFunctional": [[760, "qfunctional"]], "torch._logging.set_logs": [[683, "torch-logging-set-logs"]], "torch._foreach_reciprocal": [[666, "torch-foreach-reciprocal"]], "torch._foreach_sigmoid_": [[671, "torch-foreach-sigmoid"]], "torch.addcdiv": [[690, "torch-addcdiv"]], "torch.angle": [[701, "torch-angle"]], "torch._foreach_trunc": [[680, "torch-foreach-trunc"]], "torch.acos": [[686, "torch-acos"]], "torch.addr": [[694, "torch-addr"]], "torch._foreach_sigmoid": [[670, "torch-foreach-sigmoid"]], "torch.acosh": [[687, "torch-acosh"]], "torch.any": [[702, "torch-any"]], "torch.amin": [[699, "torch-amin"]], "torch.abs": [[684, "torch-abs"]], "torch._foreach_sinh": [[674, "torch-foreach-sinh"]], "torch.all": [[696, "torch-all"]], "torch.addmm": [[692, "torch-addmm"]], "torch._foreach_sinh_": [[675, "torch-foreach-sinh"]], "torch.addmv": [[693, "torch-addmv"]], "torch.addbmm": [[689, "torch-addbmm"]], "torch._foreach_sqrt": [[676, "torch-foreach-sqrt"]], "torch._foreach_sqrt_": [[677, "torch-foreach-sqrt"]], "torch._foreach_tan_": [[679, "torch-foreach-tan"]], "torch._foreach_sin": [[672, "torch-foreach-sin"]], "torch.aminmax": [[700, "torch-aminmax"]], "torch._foreach_trunc_": [[681, "torch-foreach-trunc"]], "torch._foreach_neg": [[664, "torch-foreach-neg"]], "torch._foreach_sin_": [[673, "torch-foreach-sin"]], "torch._foreach_neg_": [[665, "torch-foreach-neg"]], "torch._foreach_round": [[668, "torch-foreach-round"]], "torch._foreach_zero_": [[682, "torch-foreach-zero"]], "torch.amax": [[698, "torch-amax"]], "torch._foreach_log_": [[663, "torch-foreach-log"]], "torch.add": [[688, "torch-add"]], "torch.adjoint": [[695, "torch-adjoint"]], "torch._foreach_tan": [[678, "torch-foreach-tan"]], "torch._foreach_reciprocal_": [[667, "torch-foreach-reciprocal"]], "torch.allclose": [[697, "torch-allclose"]], "torch.absolute": [[685, "torch-absolute"]], "torch.addcmul": [[691, "torch-addcmul"]], "torch._foreach_round_": [[669, "torch-foreach-round"]], "torch._foreach_expm1": [[648, "torch-foreach-expm1"]], "torch._foreach_cosh": [[640, "torch-foreach-cosh"]], "torch._foreach_cos_": [[639, "torch-foreach-cos"]], "torch._foreach_log2": [[661, "torch-foreach-log2"]], "torch._foreach_atan": [[634, "torch-foreach-atan"]], "torch._foreach_erf": [[642, "torch-foreach-erf"]], "torch._foreach_abs_": [[629, "torch-foreach-abs"]], "torch._foreach_log1p_": [[660, "torch-foreach-log1p"]], "torch.Tensor.view": [[619, "torch-tensor-view"]], "torch._foreach_expm1_": [[649, "torch-foreach-expm1"]], "torch._foreach_floor_": [[651, "torch-foreach-floor"]], "torch._foreach_asin": [[632, "torch-foreach-asin"]], "torch.Tensor.xpu": [[625, "torch-tensor-xpu"]], "torch._foreach_atan_": [[635, "torch-foreach-atan"]], "torch._foreach_exp": [[646, "torch-foreach-exp"]], "torch._foreach_exp_": [[647, "torch-foreach-exp"]], "torch.Tensor.vsplit": [[621, "torch-tensor-vsplit"]], "torch._foreach_frac": [[652, "torch-foreach-frac"]], "torch.Tensor.untyped_storage": [[615, "torch-tensor-untyped-storage"]], "torch._foreach_cos": [[638, "torch-foreach-cos"]], "torch._foreach_log": [[656, "torch-foreach-log"]], "torch._foreach_log1p": [[659, "torch-foreach-log1p"]], "torch.Tensor.values": [[616, "torch-tensor-values"]], "torch._foreach_abs": [[628, "torch-foreach-abs"]], "torch._foreach_acos": [[630, "torch-foreach-acos"]], "torch._foreach_erfc": [[644, "torch-foreach-erfc"]], "torch._foreach_erfc_": [[645, "torch-foreach-erfc"]], "torch._foreach_erf_": [[643, "torch-foreach-erf"]], "torch._foreach_ceil_": [[637, "torch-foreach-ceil"]], "torch._foreach_floor": [[650, "torch-foreach-floor"]], "torch._foreach_ceil": [[636, "torch-foreach-ceil"]], "torch.Tensor.unsqueeze": [[613, "torch-tensor-unsqueeze"]], "torch.Tensor.xlogy": [[623, "torch-tensor-xlogy"]], "torch._foreach_lgamma_": [[655, "torch-foreach-lgamma"]], "torch._foreach_lgamma": [[654, "torch-foreach-lgamma"]], "torch.Tensor.xlogy_": [[624, "torch-tensor-xlogy"]], "torch.Tensor.vdot": [[618, "torch-tensor-vdot"]], "torch.Tensor.unique_consecutive": [[612, "torch-tensor-unique-consecutive"]], "torch._foreach_acos_": [[631, "torch-foreach-acos"]], "torch._foreach_log2_": [[662, "torch-foreach-log2"]], "torch._foreach_asin_": [[633, "torch-foreach-asin"]], "torch.Tensor.zero_": [[626, "torch-tensor-zero"]], "torch._foreach_frac_": [[653, "torch-foreach-frac"]], "torch._foreach_cosh_": [[641, "torch-foreach-cosh"]], "torch.Tensor.view_as": [[620, "torch-tensor-view-as"]], "torch._foreach_log10_": [[658, "torch-foreach-log10"]], "torch.Tensor.where": [[622, "torch-tensor-where"]], "torch._foreach_log10": [[657, "torch-foreach-log10"]], "torch.Tensor.unsqueeze_": [[614, "torch-tensor-unsqueeze"]], "torch.Tensor.var": [[617, "torch-tensor-var"]], "torch._assert": [[627, "torch-assert"]], "torch.Tensor.uniform_": [[610, "torch-tensor-uniform"]], "torch.Tensor.triangular_solve": [[596, "torch-tensor-triangular-solve"]], "torch.Tensor.sum_to_size": [[568, "torch-tensor-sum-to-size"]], "torch.Tensor.unbind": [[607, "torch-tensor-unbind"]], "torch.Tensor.sub": [[563, "torch-tensor-sub"]], "torch.Tensor.t": [[572, "torch-tensor-t"]], "torch.Tensor.swapdims": [[571, "torch-tensor-swapdims"]], "torch.Tensor.to_sparse": [[585, "torch-tensor-to-sparse"]], "torch.Tensor.trunc": [[603, "torch-tensor-trunc"]], "torch.Tensor.type": [[605, "torch-tensor-type"]], "torch.Tensor.to_mkldnn": [[584, "torch-tensor-to-mkldnn"]], "torch.Tensor.to_sparse_csc": [[589, "torch-tensor-to-sparse-csc"]], "torch.Tensor.take_along_dim": [[575, "torch-tensor-take-along-dim"]], "torch.Tensor.to_sparse_bsr": [[587, "torch-tensor-to-sparse-bsr"]], "torch.Tensor.trace": [[593, "torch-tensor-trace"]], "torch.Tensor.true_divide_": [[602, "torch-tensor-true-divide"]], "torch.Tensor.svd": [[569, "torch-tensor-svd"]], "torch.Tensor.tanh": [[578, "torch-tensor-tanh"]], "torch.Tensor.transpose_": [[595, "torch-tensor-transpose"]], "torch.Tensor.to": [[582, "torch-tensor-to"]], "torch.Tensor.tanh_": [[579, "torch-tensor-tanh"]], "torch.Tensor.to_sparse_coo": [[588, "torch-tensor-to-sparse-coo"]], "torch.Tensor.tan_": [[577, "torch-tensor-tan"]], "torch.Tensor.to_sparse_bsc": [[586, "torch-tensor-to-sparse-bsc"]], "torch.Tensor.storage_type": [[561, "torch-tensor-storage-type"]], "torch.Tensor.to_dense": [[583, "torch-tensor-to-dense"]], "torch.Tensor.tril_": [[598, "torch-tensor-tril"]], "torch.Tensor.triu_": [[600, "torch-tensor-triu"]], "torch.Tensor.take": [[574, "torch-tensor-take"]], "torch.Tensor.tensor_split": [[580, "torch-tensor-tensor-split"]], "torch.Tensor.tan": [[576, "torch-tensor-tan"]], "torch.Tensor.subtract_": [[566, "torch-tensor-subtract"]], "torch.Tensor.tril": [[597, "torch-tensor-tril"]], "torch.Tensor.to_sparse_csr": [[590, "torch-tensor-to-sparse-csr"]], "torch.Tensor.triu": [[599, "torch-tensor-triu"]], "torch.Tensor.unflatten": [[608, "torch-tensor-unflatten"]], "torch.Tensor.unique": [[611, "torch-tensor-unique"]], "torch.Tensor.tile": [[581, "torch-tensor-tile"]], "torch.Tensor.topk": [[592, "torch-tensor-topk"]], "torch.Tensor.sub_": [[564, "torch-tensor-sub"]], "torch.Tensor.true_divide": [[601, "torch-tensor-true-divide"]], "torch.Tensor.stride": [[562, "torch-tensor-stride"]], "torch.Tensor.t_": [[573, "torch-tensor-t"]], "torch.Tensor.trunc_": [[604, "torch-tensor-trunc"]], "torch.Tensor.sum": [[567, "torch-tensor-sum"]], "torch.Tensor.type_as": [[606, "torch-tensor-type-as"]], "torch.Tensor.subtract": [[565, "torch-tensor-subtract"]], "torch.Tensor.unfold": [[609, "torch-tensor-unfold"]], "torch.Tensor.swapaxes": [[570, "torch-tensor-swapaxes"]], "torch.Tensor.transpose": [[594, "torch-tensor-transpose"]], "torch.Tensor.tolist": [[591, "torch-tensor-tolist"]], "torch.Tensor.sgn_": [[524, "torch-tensor-sgn"]], "torch.Tensor.scatter_reduce_": [[519, "torch-tensor-scatter-reduce"]], "torch.Tensor.sinh_": [[538, "torch-tensor-sinh"]], "torch.Tensor.row_indices": [[511, "torch-tensor-row-indices"]], "torch.Tensor.scatter_add": [[516, "torch-tensor-scatter-add"]], "torch.Tensor.sqrt": [[550, "torch-tensor-sqrt"]], "torch.Tensor.square_": [[553, "torch-tensor-square"]], "torch.Tensor.sinc_": [[536, "torch-tensor-sinc"]], "torch.Tensor.sinh": [[537, "torch-tensor-sinh"]], "torch.Tensor.short": [[527, "torch-tensor-short"]], "torch.Tensor.sinc": [[535, "torch-tensor-sinc"]], "torch.Tensor.size": [[539, "torch-tensor-size"]], "torch.Tensor.sparse_resize_and_clear_": [[548, "torch-tensor-sparse-resize-and-clear"]], "torch.Tensor.squeeze": [[554, "torch-tensor-squeeze"]], "torch.Tensor.sin_": [[534, "torch-tensor-sin"]], "torch.Tensor.set_": [[522, "torch-tensor-set"]], "torch.Tensor.scatter": [[514, "torch-tensor-scatter"]], "torch.Tensor.select": [[520, "torch-tensor-select"]], "torch.Tensor.sqrt_": [[551, "torch-tensor-sqrt"]], "torch.Tensor.sspaddmm": [[556, "torch-tensor-sspaddmm"]], "torch.Tensor.select_scatter": [[521, "torch-tensor-select-scatter"]], "torch.Tensor.slice_scatter": [[540, "torch-tensor-slice-scatter"]], "torch.Tensor.scatter_add_": [[517, "torch-tensor-scatter-add"]], "torch.Tensor.rsqrt_": [[513, "torch-tensor-rsqrt"]], "torch.Tensor.softmax": [[543, "torch-tensor-softmax"]], "torch.Tensor.shape": [[525, "torch-tensor-shape"]], "torch.Tensor.sign": [[530, "torch-tensor-sign"]], "torch.Tensor.storage_offset": [[560, "torch-tensor-storage-offset"]], "torch.Tensor.storage": [[559, "torch-tensor-storage"]], "torch.Tensor.sign_": [[531, "torch-tensor-sign"]], "torch.Tensor.sparse_resize_": [[547, "torch-tensor-sparse-resize"]], "torch.Tensor.squeeze_": [[555, "torch-tensor-squeeze"]], "torch.Tensor.signbit": [[532, "torch-tensor-signbit"]], "torch.Tensor.scatter_": [[515, "torch-tensor-scatter"]], "torch.Tensor.sin": [[533, "torch-tensor-sin"]], "torch.Tensor.share_memory_": [[526, "torch-tensor-share-memory"]], "torch.Tensor.split": [[549, "torch-tensor-split"]], "torch.Tensor.round_": [[510, "torch-tensor-round"]], "torch.Tensor.sgn": [[523, "torch-tensor-sgn"]], "torch.Tensor.sparse_mask": [[546, "torch-tensor-sparse-mask"]], "torch.Tensor.scatter_reduce": [[518, "torch-tensor-scatter-reduce"]], "torch.Tensor.std": [[557, "torch-tensor-std"]], "torch.Tensor.sort": [[544, "torch-tensor-sort"]], "torch.Tensor.sigmoid_": [[529, "torch-tensor-sigmoid"]], "torch.Tensor.slogdet": [[541, "torch-tensor-slogdet"]], "torch.Tensor.sparse_dim": [[545, "torch-tensor-sparse-dim"]], "torch.Tensor.square": [[552, "torch-tensor-square"]], "torch.Tensor.rsqrt": [[512, "torch-tensor-rsqrt"]], "torch.Tensor.smm": [[542, "torch-tensor-smm"]], "torch.Tensor.sigmoid": [[528, "torch-tensor-sigmoid"]], "torch.Tensor.stft": [[558, "torch-tensor-stft"]], "torch.Tensor.qr": [[479, "torch-tensor-qr"]], "torch.Tensor.pow": [[470, "torch-tensor-pow"]], "torch.Tensor.q_per_channel_axis": [[474, "torch-tensor-q-per-channel-axis"]], "torch.Tensor.orgqr": [[461, "torch-tensor-orgqr"]], "torch.Tensor.renorm": [[493, "torch-tensor-renorm"]], "torch.Tensor.roll": [[507, "torch-tensor-roll"]], "torch.Tensor.pin_memory": [[465, "torch-tensor-pin-memory"]], "torch.Tensor.q_scale": [[477, "torch-tensor-q-scale"]], "torch.Tensor.reshape_as": [[500, "torch-tensor-reshape-as"]], "torch.Tensor.retains_grad": [[506, "torch-tensor-retains-grad"]], "torch.Tensor.rad2deg": [[482, "torch-tensor-rad2deg"]], "torch.Tensor.rot90": [[508, "torch-tensor-rot90"]], "torch.Tensor.pinverse": [[466, "torch-tensor-pinverse"]], "torch.Tensor.reciprocal": [[486, "torch-tensor-reciprocal"]], "torch.Tensor.pow_": [[471, "torch-tensor-pow"]], "torch.Tensor.q_zero_point": [[478, "torch-tensor-q-zero-point"]], "torch.Tensor.q_per_channel_zero_points": [[476, "torch-tensor-q-per-channel-zero-points"]], "torch.Tensor.record_stream": [[488, "torch-tensor-record-stream"]], "torch.Tensor.resize_as_": [[502, "torch-tensor-resize-as"]], "torch.Tensor.resize_": [[501, "torch-tensor-resize"]], "torch.Tensor.permute": [[464, "torch-tensor-permute"]], "torch.Tensor.real": [[485, "torch-tensor-real"]], "torch.Tensor.numel": [[459, "torch-tensor-numel"]], "torch.Tensor.polygamma_": [[468, "torch-tensor-polygamma"]], "torch.Tensor.quantile": [[481, "torch-tensor-quantile"]], "torch.Tensor.polygamma": [[467, "torch-tensor-polygamma"]], "torch.Tensor.register_post_accumulate_grad_hook": [[490, "torch-tensor-register-post-accumulate-grad-hook"]], "torch.Tensor.repeat": [[495, "torch-tensor-repeat"]], "torch.Tensor.reshape": [[499, "torch-tensor-reshape"]], "torch.Tensor.numpy": [[460, "torch-tensor-numpy"]], "torch.Tensor.qscheme": [[480, "torch-tensor-qscheme"]], "torch.Tensor.resolve_conj": [[503, "torch-tensor-resolve-conj"]], "torch.Tensor.positive": [[469, "torch-tensor-positive"]], "torch.Tensor.remainder": [[491, "torch-tensor-remainder"]], "torch.Tensor.renorm_": [[494, "torch-tensor-renorm"]], "torch.Tensor.remainder_": [[492, "torch-tensor-remainder"]], "torch.Tensor.register_hook": [[489, "torch-tensor-register-hook"]], "torch.Tensor.repeat_interleave": [[496, "torch-tensor-repeat-interleave"]], "torch.Tensor.retain_grad": [[505, "torch-tensor-retain-grad"]], "torch.Tensor.ormqr": [[462, "torch-tensor-ormqr"]], "torch.Tensor.resolve_neg": [[504, "torch-tensor-resolve-neg"]], "torch.Tensor.prod": [[472, "torch-tensor-prod"]], "torch.Tensor.q_per_channel_scales": [[475, "torch-tensor-q-per-channel-scales"]], "torch.Tensor.requires_grad": [[497, "torch-tensor-requires-grad"]], "torch.Tensor.requires_grad_": [[498, "torch-tensor-requires-grad"]], "torch.Tensor.random_": [[483, "torch-tensor-random"]], "torch.Tensor.put_": [[473, "torch-tensor-put"]], "torch.Tensor.reciprocal_": [[487, "torch-tensor-reciprocal"]], "torch.Tensor.round": [[509, "torch-tensor-round"]], "torch.Tensor.outer": [[463, "torch-tensor-outer"]], "torch.Tensor.ravel": [[484, "torch-tensor-ravel"]], "torch.Tensor.movedim": [[419, "torch-tensor-movedim"]], "torch.Tensor.multiply_": [[425, "torch-tensor-multiply"]], "torch.Tensor.negative_": [[445, "torch-tensor-negative"]], "torch.Tensor.ne_": [[441, "torch-tensor-ne"]], "torch.Tensor.mul_": [[422, "torch-tensor-mul"]], "torch.Tensor.nanmedian": [[432, "torch-tensor-nanmedian"]], "torch.Tensor.normal_": [[456, "torch-tensor-normal"]], "torch.Tensor.maximum": [[410, "torch-tensor-maximum"]], "torch.Tensor.norm": [[455, "torch-tensor-norm"]], "torch.Tensor.median": [[412, "torch-tensor-median"]], "torch.Tensor.minimum": [[414, "torch-tensor-minimum"]], "torch.Tensor.ndim": [[438, "torch-tensor-ndim"]], "torch.Tensor.module_load": [[417, "torch-tensor-module-load"]], "torch.Tensor.neg": [[442, "torch-tensor-neg"]], "torch.Tensor.matrix_power": [[408, "torch-tensor-matrix-power"]], "torch.Tensor.mm": [[415, "torch-tensor-mm"]], "torch.Tensor.new_empty": [[447, "torch-tensor-new-empty"]], "torch.Tensor.multinomial": [[423, "torch-tensor-multinomial"]], "torch.Tensor.nextafter_": [[453, "torch-tensor-nextafter"]], "torch.Tensor.mvlgamma": [[427, "torch-tensor-mvlgamma"]], "torch.Tensor.new_full": [[448, "torch-tensor-new-full"]], "torch.Tensor.nansum": [[434, "torch-tensor-nansum"]], "torch.Tensor.narrow": [[435, "torch-tensor-narrow"]], "torch.Tensor.nan_to_num_": [[430, "torch-tensor-nan-to-num"]], "torch.Tensor.ne": [[440, "torch-tensor-ne"]], "torch.Tensor.max": [[409, "torch-tensor-max"]], "torch.Tensor.new_tensor": [[450, "torch-tensor-new-tensor"]], "torch.Tensor.multiply": [[424, "torch-tensor-multiply"]], "torch.Tensor.new_ones": [[449, "torch-tensor-new-ones"]], "torch.Tensor.mode": [[416, "torch-tensor-mode"]], "torch.Tensor.nan_to_num": [[429, "torch-tensor-nan-to-num"]], "torch.Tensor.min": [[413, "torch-tensor-min"]], "torch.Tensor.mul": [[421, "torch-tensor-mul"]], "torch.Tensor.nanmean": [[431, "torch-tensor-nanmean"]], "torch.Tensor.ndimension": [[439, "torch-tensor-ndimension"]], "torch.Tensor.negative": [[444, "torch-tensor-negative"]], "torch.Tensor.nextafter": [[452, "torch-tensor-nextafter"]], "torch.Tensor.mean": [[411, "torch-tensor-mean"]], "torch.Tensor.nelement": [[446, "torch-tensor-nelement"]], "torch.Tensor.not_equal": [[457, "torch-tensor-not-equal"]], "torch.Tensor.not_equal_": [[458, "torch-tensor-not-equal"]], "torch.Tensor.moveaxis": [[418, "torch-tensor-moveaxis"]], "torch.Tensor.msort": [[420, "torch-tensor-msort"]], "torch.Tensor.nbytes": [[437, "torch-tensor-nbytes"]], "torch.Tensor.neg_": [[443, "torch-tensor-neg"]], "torch.Tensor.mvlgamma_": [[428, "torch-tensor-mvlgamma"]], "torch.Tensor.new_zeros": [[451, "torch-tensor-new-zeros"]], "torch.Tensor.narrow_copy": [[436, "torch-tensor-narrow-copy"]], "torch.Tensor.mv": [[426, "torch-tensor-mv"]], "torch.Tensor.nanquantile": [[433, "torch-tensor-nanquantile"]], "torch.Tensor.nonzero": [[454, "torch-tensor-nonzero"]]}, "indexentries": {"gradscaler (class in torch.cuda.amp)": [[0, "torch.cuda.amp.GradScaler"]], "autocast (class in torch)": [[0, "torch.autocast"]], "autocast (class in torch.cpu.amp)": [[0, "torch.cpu.amp.autocast"]], "autocast (class in torch.cuda.amp)": [[0, "torch.cuda.amp.autocast"]], "custom_bwd() (in module torch.amp)": [[0, "torch.amp.custom_bwd"]], "custom_bwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_bwd"]], "custom_fwd() (in module torch.amp)": [[0, "torch.amp.custom_fwd"]], "custom_fwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_fwd"]], "is_autocast_available() (in module torch.amp.autocast_mode)": [[0, "torch.amp.autocast_mode.is_autocast_available"]], "module": [[0, "module-torch.amp"], [0, "module-torch.amp.autocast_mode"], [0, "module-torch.amp.grad_scaler"], [0, "module-torch.cpu.amp"], [0, "module-torch.cpu.amp.autocast_mode"], [0, "module-torch.cpu.amp.grad_scaler"], [0, "module-torch.cuda.amp"], [0, "module-torch.cuda.amp.autocast_mode"], [0, "module-torch.cuda.amp.common"], [0, "module-torch.cuda.amp.grad_scaler"], [1, "module-torch.autograd"], [1, "module-torch.autograd.anomaly_mode"], [1, "module-torch.autograd.forward_ad"], [1, "module-torch.autograd.function"], [1, "module-torch.autograd.functional"], [1, "module-torch.autograd.grad_mode"], [1, "module-torch.autograd.gradcheck"], [1, "module-torch.autograd.graph"], [1, "module-torch.autograd.profiler"], [1, "module-torch.autograd.profiler_legacy"], [1, "module-torch.autograd.profiler_util"], [1, "module-torch.autograd.variable"], [2, "module-torch.backends"], [2, "module-torch.backends.cpu"], [2, "module-torch.backends.cuda"], [2, "module-torch.backends.cudnn"], [2, "module-torch.backends.cudnn.rnn"], [2, "module-torch.backends.mha"], [2, "module-torch.backends.mkl"], [2, "module-torch.backends.mkldnn"], [2, "module-torch.backends.mps"], [2, "module-torch.backends.nnpack"], [2, "module-torch.backends.openmp"], [2, "module-torch.backends.opt_einsum"], [2, "module-torch.backends.quantized"], [2, "module-torch.backends.xeon"], [2, "module-torch.backends.xeon.run_cpu"], [2, "module-torch.backends.xnnpack"], [3, "module-torch.utils.benchmark"], [3, "module-torch.utils.benchmark.examples"], [3, "module-torch.utils.benchmark.op_fuzzers"], [3, "module-torch.utils.benchmark.utils"], [3, "module-torch.utils.benchmark.utils.valgrind_wrapper"], [4, "module-torch.utils.bottleneck"], [13, "module-torch.__config__"], [16, "module-torch.cpu"], [17, "module-torch.cuda"], [17, "module-torch.cuda.comm"], [17, "module-torch.cuda.error"], [17, "module-torch.cuda.graphs"], [17, "module-torch.cuda.jiterator"], [17, "module-torch.cuda.memory"], [17, "module-torch.cuda.nccl"], [17, "module-torch.cuda.nvtx"], [17, "module-torch.cuda.profiler"], [17, "module-torch.cuda.random"], [17, "module-torch.cuda.sparse"], [17, "module-torch.cuda.streams"], [18, "module-torch.cuda._sanitizer"], [19, "module-torch.cuda.tunable"], [23, "module-torch.utils.data"], [23, "module-torch.utils.data.datapipes"], [23, "module-torch.utils.data.datapipes.dataframe"], [23, "module-torch.utils.data.datapipes.iter"], [23, "module-torch.utils.data.datapipes.map"], [23, "module-torch.utils.data.datapipes.utils"], [27, "module-torch.utils.deterministic"], [28, "module-torch.distributed"], [28, "module-torch.distributed.algorithms"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"], [28, "module-torch.distributed.algorithms.join"], [28, "module-torch.distributed.algorithms.model_averaging"], [28, "module-torch.distributed.algorithms.model_averaging.averagers"], [28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"], [28, "module-torch.distributed.algorithms.model_averaging.utils"], [28, "module-torch.distributed.argparse_util"], [28, "module-torch.distributed.c10d_logger"], [28, "module-torch.distributed.checkpoint.api"], [28, "module-torch.distributed.checkpoint.default_planner"], [28, "module-torch.distributed.checkpoint.filesystem"], [28, "module-torch.distributed.checkpoint.metadata"], [28, "module-torch.distributed.checkpoint.optimizer"], [28, "module-torch.distributed.checkpoint.planner"], [28, "module-torch.distributed.checkpoint.planner_helpers"], [28, "module-torch.distributed.checkpoint.resharding"], [28, "module-torch.distributed.checkpoint.state_dict"], [28, "module-torch.distributed.checkpoint.state_dict_loader"], [28, "module-torch.distributed.checkpoint.state_dict_saver"], [28, "module-torch.distributed.checkpoint.stateful"], [28, "module-torch.distributed.checkpoint.storage"], [28, "module-torch.distributed.checkpoint.utils"], [28, "module-torch.distributed.collective_utils"], [28, "module-torch.distributed.constants"], [28, "module-torch.distributed.device_mesh"], [28, "module-torch.distributed.distributed_c10d"], [28, "module-torch.distributed.elastic"], [28, "module-torch.distributed.elastic.agent.server.api"], [28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"], [28, "module-torch.distributed.elastic.events.api"], [28, "module-torch.distributed.elastic.events.handlers"], [28, "module-torch.distributed.elastic.metrics.api"], [28, "module-torch.distributed.elastic.multiprocessing.api"], [28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"], [28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"], [28, "module-torch.distributed.elastic.multiprocessing.redirects"], [28, "module-torch.distributed.elastic.multiprocessing.tail_log"], [28, "module-torch.distributed.elastic.rendezvous.api"], [28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.etcd_server"], [28, "module-torch.distributed.elastic.rendezvous.etcd_store"], [28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.utils"], [28, "module-torch.distributed.elastic.timer.api"], [28, "module-torch.distributed.elastic.timer.file_based_local_timer"], [28, "module-torch.distributed.elastic.timer.local_timer"], [28, "module-torch.distributed.elastic.utils"], [28, "module-torch.distributed.elastic.utils.api"], [28, "module-torch.distributed.elastic.utils.data"], [28, "module-torch.distributed.elastic.utils.data.cycling_iterator"], [28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"], [28, "module-torch.distributed.elastic.utils.distributed"], [28, "module-torch.distributed.elastic.utils.log_level"], [28, "module-torch.distributed.elastic.utils.logging"], [28, "module-torch.distributed.elastic.utils.store"], [28, "module-torch.distributed.fsdp.api"], [28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"], [28, "module-torch.distributed.fsdp.sharded_grad_scaler"], [28, "module-torch.distributed.fsdp.wrap"], [28, "module-torch.distributed.launch"], [28, "module-torch.distributed.launcher"], [28, "module-torch.distributed.launcher.api"], [28, "module-torch.distributed.logging_handlers"], [28, "module-torch.distributed.nn"], [28, "module-torch.distributed.nn.api"], [28, "module-torch.distributed.nn.api.remote_module"], [28, "module-torch.distributed.nn.functional"], [28, "module-torch.distributed.nn.jit"], [28, "module-torch.distributed.nn.jit.instantiator"], [28, "module-torch.distributed.nn.jit.templates"], [28, "module-torch.distributed.nn.jit.templates.remote_module_template"], [28, "module-torch.distributed.optim.apply_optimizer_in_backward"], [28, "module-torch.distributed.optim.functional_adadelta"], [28, "module-torch.distributed.optim.functional_adagrad"], [28, "module-torch.distributed.optim.functional_adam"], [28, "module-torch.distributed.optim.functional_adamax"], [28, "module-torch.distributed.optim.functional_adamw"], [28, "module-torch.distributed.optim.functional_rmsprop"], [28, "module-torch.distributed.optim.functional_rprop"], [28, "module-torch.distributed.optim.functional_sgd"], [28, "module-torch.distributed.optim.named_optimizer"], [28, "module-torch.distributed.optim.optimizer"], [28, "module-torch.distributed.optim.post_localSGD_optimizer"], [28, "module-torch.distributed.optim.utils"], [28, "module-torch.distributed.optim.zero_redundancy_optimizer"], [28, "module-torch.distributed.remote_device"], [28, "module-torch.distributed.rendezvous"], [28, "module-torch.distributed.rpc.api"], [28, "module-torch.distributed.rpc.backend_registry"], [28, "module-torch.distributed.rpc.constants"], [28, "module-torch.distributed.rpc.functions"], [28, "module-torch.distributed.rpc.internal"], [28, "module-torch.distributed.rpc.options"], [28, "module-torch.distributed.rpc.rref_proxy"], [28, "module-torch.distributed.rpc.server_process_global_profiler"], [28, "module-torch.distributed.tensor"], [28, "module-torch.distributed.tensor.parallel.api"], [28, "module-torch.distributed.tensor.parallel.ddp"], [28, "module-torch.distributed.tensor.parallel.fsdp"], [28, "module-torch.distributed.tensor.parallel.input_reshard"], [28, "module-torch.distributed.tensor.parallel.loss"], [28, "module-torch.distributed.tensor.parallel.style"], [28, "module-torch.distributed.utils"], [30, "module-torch.distributed.checkpoint"], [30, "module-torch.distributed.checkpoint.format_utils"], [30, "module-torch.distributed.checkpoint.logger"], [30, "module-torch.distributed.checkpoint.logging_handlers"], [30, "module-torch.distributed.checkpoint.staging"], [32, "module-torch.distributed.optim"], [33, "module-torch.distributed.pipelining"], [33, "module-torch.distributed.pipelining.microbatch"], [33, "module-torch.distributed.pipelining.schedules"], [33, "module-torch.distributed.pipelining.stage"], [34, "module-torch.distributed.tensor.parallel"], [35, "module-torch.distributions"], [35, "module-torch.distributions.bernoulli"], [35, "module-torch.distributions.beta"], [35, "module-torch.distributions.binomial"], [35, "module-torch.distributions.categorical"], [35, "module-torch.distributions.cauchy"], [35, "module-torch.distributions.chi2"], [35, "module-torch.distributions.constraint_registry"], [35, "module-torch.distributions.constraints"], [35, "module-torch.distributions.continuous_bernoulli"], [35, "module-torch.distributions.dirichlet"], [35, "module-torch.distributions.distribution"], [35, "module-torch.distributions.exp_family"], [35, "module-torch.distributions.exponential"], [35, "module-torch.distributions.fishersnedecor"], [35, "module-torch.distributions.gamma"], [35, "module-torch.distributions.geometric"], [35, "module-torch.distributions.gumbel"], [35, "module-torch.distributions.half_cauchy"], [35, "module-torch.distributions.half_normal"], [35, "module-torch.distributions.independent"], [35, "module-torch.distributions.inverse_gamma"], [35, "module-torch.distributions.kl"], [35, "module-torch.distributions.kumaraswamy"], [35, "module-torch.distributions.laplace"], [35, "module-torch.distributions.lkj_cholesky"], [35, "module-torch.distributions.log_normal"], [35, "module-torch.distributions.logistic_normal"], [35, "module-torch.distributions.lowrank_multivariate_normal"], [35, "module-torch.distributions.mixture_same_family"], [35, "module-torch.distributions.multinomial"], [35, "module-torch.distributions.multivariate_normal"], [35, "module-torch.distributions.negative_binomial"], [35, "module-torch.distributions.normal"], [35, "module-torch.distributions.one_hot_categorical"], [35, "module-torch.distributions.pareto"], [35, "module-torch.distributions.poisson"], [35, "module-torch.distributions.relaxed_bernoulli"], [35, "module-torch.distributions.relaxed_categorical"], [35, "module-torch.distributions.studentT"], [35, "module-torch.distributions.transformed_distribution"], [35, "module-torch.distributions.transforms"], [35, "module-torch.distributions.uniform"], [35, "module-torch.distributions.utils"], [35, "module-torch.distributions.von_mises"], [35, "module-torch.distributions.weibull"], [35, "module-torch.distributions.wishart"], [37, "module-torch.distributed.elastic.agent"], [37, "module-torch.distributed.elastic.agent.server"], [37, "module-torch.distributed.elastic.agent.server.health_check_server"], [38, "module-torch.distributed.elastic.control_plane"], [40, "module-torch.distributed.elastic.multiprocessing.errors"], [41, "module-torch.distributed.elastic.events"], [44, "module-torch.distributed.elastic.metrics"], [45, "module-torch.distributed.elastic.multiprocessing"], [47, "module-torch.distributed.elastic.rendezvous"], [47, "module-torch.distributed.elastic.rendezvous.registry"], [48, "module-torch.distributed.run"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"], [50, "module-torch.distributed.elastic.timer"], [50, "module-torch.distributed.elastic.timer.debug_info_logging"], [52, "module-torch.export"], [52, "module-torch.export.custom_obj"], [52, "module-torch.export.dynamic_shapes"], [52, "module-torch.export.exported_program"], [52, "module-torch.export.graph_signature"], [52, "module-torch.export.unflatten"], [54, "module-torch.fft"], [55, "module-torch.distributed.fsdp"], [57, "module-torch.func"], [62, "module-torch.__future__"], [63, "module-torch.futures"], [64, "module-torch.fx"], [64, "module-torch.fx.annotate"], [64, "module-torch.fx.config"], [64, "module-torch.fx.experimental"], [64, "module-torch.fx.experimental.accelerator_partitioner"], [64, "module-torch.fx.experimental.const_fold"], [64, "module-torch.fx.experimental.debug"], [64, "module-torch.fx.experimental.graph_gradual_typechecker"], [64, "module-torch.fx.experimental.merge_matmul"], [64, "module-torch.fx.experimental.meta_tracer"], [64, "module-torch.fx.experimental.migrate_gradual_types"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"], [64, "module-torch.fx.experimental.migrate_gradual_types.operation"], [64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"], [64, "module-torch.fx.experimental.migrate_gradual_types.util"], [64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"], [64, "module-torch.fx.experimental.normalize"], [64, "module-torch.fx.experimental.optimization"], [64, "module-torch.fx.experimental.partitioner_utils"], [64, "module-torch.fx.experimental.proxy_tensor"], [64, "module-torch.fx.experimental.recording"], [64, "module-torch.fx.experimental.refinement_types"], [64, "module-torch.fx.experimental.rewriter"], [64, "module-torch.fx.experimental.schema_type_annotation"], [64, "module-torch.fx.experimental.sym_node"], [64, "module-torch.fx.experimental.unification"], [64, "module-torch.fx.experimental.unification.core"], [64, "module-torch.fx.experimental.unification.dispatch"], [64, "module-torch.fx.experimental.unification.match"], [64, "module-torch.fx.experimental.unification.more"], [64, "module-torch.fx.experimental.unification.multipledispatch"], [64, "module-torch.fx.experimental.unification.multipledispatch.conflict"], [64, "module-torch.fx.experimental.unification.multipledispatch.core"], [64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"], [64, "module-torch.fx.experimental.unification.multipledispatch.utils"], [64, "module-torch.fx.experimental.unification.multipledispatch.variadic"], [64, "module-torch.fx.experimental.unification.unification_tools"], [64, "module-torch.fx.experimental.unification.utils"], [64, "module-torch.fx.experimental.unification.variable"], [64, "module-torch.fx.experimental.unify_refinements"], [64, "module-torch.fx.experimental.validator"], [64, "module-torch.fx.graph"], [64, "module-torch.fx.graph_module"], [64, "module-torch.fx.immutable_collections"], [64, "module-torch.fx.interpreter"], [64, "module-torch.fx.node"], [64, "module-torch.fx.operator_schemas"], [64, "module-torch.fx.passes"], [64, "module-torch.fx.passes.annotate_getitem_nodes"], [64, "module-torch.fx.passes.backends"], [64, "module-torch.fx.passes.backends.cudagraphs"], [64, "module-torch.fx.passes.dialect"], [64, "module-torch.fx.passes.dialect.common"], [64, "module-torch.fx.passes.dialect.common.cse_pass"], [64, "module-torch.fx.passes.fake_tensor_prop"], [64, "module-torch.fx.passes.graph_drawer"], [64, "module-torch.fx.passes.graph_manipulation"], [64, "module-torch.fx.passes.graph_transform_observer"], [64, "module-torch.fx.passes.infra"], [64, "module-torch.fx.passes.infra.partitioner"], [64, "module-torch.fx.passes.infra.pass_base"], [64, "module-torch.fx.passes.infra.pass_manager"], [64, "module-torch.fx.passes.net_min_base"], [64, "module-torch.fx.passes.operator_support"], [64, "module-torch.fx.passes.param_fetch"], [64, "module-torch.fx.passes.pass_manager"], [64, "module-torch.fx.passes.reinplace"], [64, "module-torch.fx.passes.runtime_assert"], [64, "module-torch.fx.passes.shape_prop"], [64, "module-torch.fx.passes.split_module"], [64, "module-torch.fx.passes.split_utils"], [64, "module-torch.fx.passes.splitter_base"], [64, "module-torch.fx.passes.tests"], [64, "module-torch.fx.passes.tests.test_pass_manager"], [64, "module-torch.fx.passes.tools_common"], [64, "module-torch.fx.passes.utils"], [64, "module-torch.fx.passes.utils.common"], [64, "module-torch.fx.passes.utils.fuser_utils"], [64, "module-torch.fx.passes.utils.matcher_utils"], [64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"], [64, "module-torch.fx.passes.utils.source_matcher_utils"], [64, "module-torch.fx.proxy"], [64, "module-torch.fx.subgraph_rewriter"], [64, "module-torch.fx.tensor_type"], [64, "module-torch.fx.traceback"], [65, "module-torch.fx.experimental.symbolic_shapes"], [2012, "module-torch.hub"], [2014, "module-torch.jit"], [2014, "module-torch.jit.annotations"], [2014, "module-torch.jit.frontend"], [2014, "module-torch.jit.generate_bytecode"], [2014, "module-torch.jit.mobile"], [2014, "module-torch.jit.quantized"], [2015, "module-torch.jit.supported_ops"], [2019, "module-torch.jit.unsupported_tensor_ops"], [2020, "module-torch.utils.jit"], [2021, "module-torch.library"], [2022, "module-torch.linalg"], [2023, "module-torch._logging"], [2024, "module-torch.masked"], [2024, "module-torch.masked.maskedtensor"], [2024, "module-torch.masked.maskedtensor.binary"], [2024, "module-torch.masked.maskedtensor.core"], [2024, "module-torch.masked.maskedtensor.creation"], [2024, "module-torch.masked.maskedtensor.passthrough"], [2024, "module-torch.masked.maskedtensor.reductions"], [2024, "module-torch.masked.maskedtensor.unary"], [2028, "module-torch.utils.model_zoo"], [2029, "module-torch.utils.module_tracker"], [2030, "module-torch.monitor"], [2031, "module-torch.mps"], [2031, "module-torch.mps.event"], [2031, "module-torch.mps.profiler"], [2032, "module-torch.mtia"], [2033, "module-torch.multiprocessing"], [2033, "module-torch.multiprocessing.pool"], [2033, "module-torch.multiprocessing.queue"], [2033, "module-torch.multiprocessing.reductions"], [2033, "module-torch.multiprocessing.spawn"], [2036, "module-torch.nested"], [2037, "module-torch.nn"], [2037, "module-torch.nn.backends"], [2037, "module-torch.nn.backends.thnn"], [2037, "module-torch.nn.common_types"], [2037, "module-torch.nn.cpp"], [2037, "module-torch.nn.functional"], [2037, "module-torch.nn.grad"], [2037, "module-torch.nn.init"], [2037, "module-torch.nn.modules"], [2037, "module-torch.nn.modules.activation"], [2037, "module-torch.nn.modules.adaptive"], [2037, "module-torch.nn.modules.batchnorm"], [2037, "module-torch.nn.modules.channelshuffle"], [2037, "module-torch.nn.modules.container"], [2037, "module-torch.nn.modules.conv"], [2037, "module-torch.nn.modules.distance"], [2037, "module-torch.nn.modules.dropout"], [2037, "module-torch.nn.modules.flatten"], [2037, "module-torch.nn.modules.fold"], [2037, "module-torch.nn.modules.instancenorm"], [2037, "module-torch.nn.modules.lazy"], [2037, "module-torch.nn.modules.linear"], [2037, "module-torch.nn.modules.loss"], [2037, "module-torch.nn.modules.module"], [2037, "module-torch.nn.modules.normalization"], [2037, "module-torch.nn.modules.padding"], [2037, "module-torch.nn.modules.pixelshuffle"], [2037, "module-torch.nn.modules.pooling"], [2037, "module-torch.nn.modules.rnn"], [2037, "module-torch.nn.modules.sparse"], [2037, "module-torch.nn.modules.transformer"], [2037, "module-torch.nn.modules.upsampling"], [2037, "module-torch.nn.modules.utils"], [2037, "module-torch.nn.parallel"], [2037, "module-torch.nn.parallel.comm"], [2037, "module-torch.nn.parallel.distributed"], [2037, "module-torch.nn.parallel.parallel_apply"], [2037, "module-torch.nn.parallel.replicate"], [2037, "module-torch.nn.parallel.scatter_gather"], [2037, "module-torch.nn.parameter"], [2037, "module-torch.nn.utils"], [2037, "module-torch.nn.utils.clip_grad"], [2037, "module-torch.nn.utils.convert_parameters"], [2037, "module-torch.nn.utils.fusion"], [2037, "module-torch.nn.utils.init"], [2037, "module-torch.nn.utils.memory_format"], [2037, "module-torch.nn.utils.parametrizations"], [2037, "module-torch.nn.utils.parametrize"], [2037, "module-torch.nn.utils.prune"], [2037, "module-torch.nn.utils.rnn"], [2037, "module-torch.nn.utils.stateless"], [2038, "module-torch.nn.attention"], [2039, "module-torch.nn.attention.bias"], [2064, "module-torch.onnx.errors"], [2064, "module-torch.onnx.operators"], [2064, "module-torch.onnx.symbolic_caffe2"], [2064, "module-torch.onnx.symbolic_helper"], [2064, "module-torch.onnx.symbolic_opset10"], [2064, "module-torch.onnx.symbolic_opset11"], [2064, "module-torch.onnx.symbolic_opset12"], [2064, "module-torch.onnx.symbolic_opset13"], [2064, "module-torch.onnx.symbolic_opset14"], [2064, "module-torch.onnx.symbolic_opset15"], [2064, "module-torch.onnx.symbolic_opset16"], [2064, "module-torch.onnx.symbolic_opset17"], [2064, "module-torch.onnx.symbolic_opset18"], [2064, "module-torch.onnx.symbolic_opset19"], [2064, "module-torch.onnx.symbolic_opset20"], [2064, "module-torch.onnx.symbolic_opset7"], [2064, "module-torch.onnx.symbolic_opset8"], [2064, "module-torch.onnx.symbolic_opset9"], [2064, "module-torch.onnx.utils"], [2064, "module-torch.onnx.verification"], [2067, "module-torch.onnx"], [2069, "module-torch.optim"], [2069, "module-torch.optim.adadelta"], [2069, "module-torch.optim.adagrad"], [2069, "module-torch.optim.adam"], [2069, "module-torch.optim.adamax"], [2069, "module-torch.optim.adamw"], [2069, "module-torch.optim.asgd"], [2069, "module-torch.optim.lbfgs"], [2069, "module-torch.optim.lr_scheduler"], [2069, "module-torch.optim.nadam"], [2069, "module-torch.optim.optimizer"], [2069, "module-torch.optim.radam"], [2069, "module-torch.optim.rmsprop"], [2069, "module-torch.optim.rprop"], [2069, "module-torch.optim.sgd"], [2069, "module-torch.optim.sparse_adam"], [2069, "module-torch.optim.swa_utils"], [2070, "module-torch.package"], [2070, "module-torch.package.analyze"], [2070, "module-torch.package.analyze.find_first_use_of_broken_modules"], [2070, "module-torch.package.analyze.is_from_package"], [2070, "module-torch.package.analyze.trace_dependencies"], [2070, "module-torch.package.file_structure_representation"], [2070, "module-torch.package.find_file_dependencies"], [2070, "module-torch.package.glob_group"], [2070, "module-torch.package.importer"], [2070, "module-torch.package.package_exporter"], [2070, "module-torch.package.package_importer"], [2071, "module-torch.profiler"], [2071, "module-torch.profiler.itt"], [2071, "module-torch.profiler.profiler"], [2071, "module-torch.profiler.python_tracer"], [2072, "module-torch.ao"], [2072, "module-torch.ao.nn"], [2072, "module-torch.ao.nn.intrinsic.modules.fused"], [2072, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"], [2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"], [2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"], [2072, "module-torch.ao.nn.qat.dynamic.modules.linear"], [2072, "module-torch.ao.nn.qat.modules.conv"], [2072, "module-torch.ao.nn.qat.modules.embedding_ops"], [2072, "module-torch.ao.nn.qat.modules.linear"], [2072, "module-torch.ao.nn.quantizable"], [2072, "module-torch.ao.nn.quantizable.modules"], [2072, "module-torch.ao.nn.quantizable.modules.activation"], [2072, "module-torch.ao.nn.quantizable.modules.rnn"], [2072, "module-torch.ao.nn.quantized"], [2072, "module-torch.ao.nn.quantized.dynamic.modules.conv"], [2072, "module-torch.ao.nn.quantized.dynamic.modules.linear"], [2072, "module-torch.ao.nn.quantized.dynamic.modules.rnn"], [2072, "module-torch.ao.nn.quantized.modules.activation"], [2072, "module-torch.ao.nn.quantized.modules.batchnorm"], [2072, "module-torch.ao.nn.quantized.modules.conv"], [2072, "module-torch.ao.nn.quantized.modules.dropout"], [2072, "module-torch.ao.nn.quantized.modules.embedding_ops"], [2072, "module-torch.ao.nn.quantized.modules.functional_modules"], [2072, "module-torch.ao.nn.quantized.modules.linear"], [2072, "module-torch.ao.nn.quantized.modules.normalization"], [2072, "module-torch.ao.nn.quantized.modules.rnn"], [2072, "module-torch.ao.nn.quantized.modules.utils"], [2072, "module-torch.ao.nn.quantized.reference"], [2072, "module-torch.ao.nn.quantized.reference.modules"], [2072, "module-torch.ao.nn.quantized.reference.modules.conv"], [2072, "module-torch.ao.nn.quantized.reference.modules.linear"], [2072, "module-torch.ao.nn.quantized.reference.modules.rnn"], [2072, "module-torch.ao.nn.quantized.reference.modules.sparse"], [2072, "module-torch.ao.nn.quantized.reference.modules.utils"], [2072, "module-torch.ao.nn.sparse"], [2072, "module-torch.ao.nn.sparse.quantized"], [2072, "module-torch.ao.nn.sparse.quantized.dynamic"], [2072, "module-torch.ao.nn.sparse.quantized.dynamic.linear"], [2072, "module-torch.ao.nn.sparse.quantized.linear"], [2072, "module-torch.ao.nn.sparse.quantized.utils"], [2072, "module-torch.ao.ns"], [2072, "module-torch.ao.ns.fx"], [2072, "module-torch.ao.ns.fx.graph_matcher"], [2072, "module-torch.ao.ns.fx.graph_passes"], [2072, "module-torch.ao.ns.fx.mappings"], [2072, "module-torch.ao.ns.fx.n_shadows_utils"], [2072, "module-torch.ao.ns.fx.ns_types"], [2072, "module-torch.ao.ns.fx.pattern_utils"], [2072, "module-torch.ao.ns.fx.qconfig_multi_mapping"], [2072, "module-torch.ao.ns.fx.utils"], [2072, "module-torch.ao.ns.fx.weight_utils"], [2072, "module-torch.ao.pruning"], [2072, "module-torch.ao.pruning.scheduler"], [2072, "module-torch.ao.pruning.scheduler.base_scheduler"], [2072, "module-torch.ao.pruning.scheduler.cubic_scheduler"], [2072, "module-torch.ao.pruning.scheduler.lambda_scheduler"], [2072, "module-torch.ao.pruning.sparsifier"], [2072, "module-torch.ao.pruning.sparsifier.base_sparsifier"], [2072, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"], [2072, "module-torch.ao.pruning.sparsifier.utils"], [2072, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"], [2072, "module-torch.ao.quantization"], [2072, "module-torch.ao.quantization.backend_config"], [2072, "module-torch.ao.quantization.backend_config.backend_config"], [2072, "module-torch.ao.quantization.backend_config.executorch"], [2072, "module-torch.ao.quantization.backend_config.fbgemm"], [2072, "module-torch.ao.quantization.backend_config.native"], [2072, "module-torch.ao.quantization.backend_config.observation_type"], [2072, "module-torch.ao.quantization.backend_config.onednn"], [2072, "module-torch.ao.quantization.backend_config.qnnpack"], [2072, "module-torch.ao.quantization.backend_config.tensorrt"], [2072, "module-torch.ao.quantization.backend_config.utils"], [2072, "module-torch.ao.quantization.backend_config.x86"], [2072, "module-torch.ao.quantization.fake_quantize"], [2072, "module-torch.ao.quantization.fuse_modules"], [2072, "module-torch.ao.quantization.fuser_method_mappings"], [2072, "module-torch.ao.quantization.fx"], [2072, "module-torch.ao.quantization.fx.convert"], [2072, "module-torch.ao.quantization.fx.custom_config"], [2072, "module-torch.ao.quantization.fx.fuse"], [2072, "module-torch.ao.quantization.fx.fuse_handler"], [2072, "module-torch.ao.quantization.fx.graph_module"], [2072, "module-torch.ao.quantization.fx.lower_to_fbgemm"], [2072, "module-torch.ao.quantization.fx.lower_to_qnnpack"], [2072, "module-torch.ao.quantization.fx.lstm_utils"], [2072, "module-torch.ao.quantization.fx.match_utils"], [2072, "module-torch.ao.quantization.fx.pattern_utils"], [2072, "module-torch.ao.quantization.fx.prepare"], [2072, "module-torch.ao.quantization.fx.qconfig_mapping_utils"], [2072, "module-torch.ao.quantization.fx.quantize_handler"], [2072, "module-torch.ao.quantization.fx.tracer"], [2072, "module-torch.ao.quantization.fx.utils"], [2072, "module-torch.ao.quantization.observer"], [2072, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"], [2072, "module-torch.ao.quantization.pt2e.export_utils"], [2072, "module-torch.ao.quantization.pt2e.graph_utils"], [2072, "module-torch.ao.quantization.pt2e.port_metadata_pass"], [2072, "module-torch.ao.quantization.pt2e.prepare"], [2072, "module-torch.ao.quantization.pt2e.qat_utils"], [2072, "module-torch.ao.quantization.pt2e.representation.rewrite"], [2072, "module-torch.ao.quantization.pt2e.utils"], [2072, "module-torch.ao.quantization.qconfig"], [2072, "module-torch.ao.quantization.qconfig_mapping"], [2072, "module-torch.ao.quantization.quant_type"], [2072, "module-torch.ao.quantization.quantization_mappings"], [2072, "module-torch.ao.quantization.quantize_fx"], [2072, "module-torch.ao.quantization.quantize_jit"], [2072, "module-torch.ao.quantization.quantize_pt2e"], [2072, "module-torch.ao.quantization.quantizer.composable_quantizer"], [2072, "module-torch.ao.quantization.quantizer.embedding_quantizer"], [2072, "module-torch.ao.quantization.quantizer.quantizer"], [2072, "module-torch.ao.quantization.quantizer.utils"], [2072, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"], [2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"], [2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"], [2072, "module-torch.ao.quantization.stubs"], [2072, "module-torch.ao.quantization.utils"], [2072, "module-torch.nn.intrinsic.modules.fused"], [2072, "module-torch.nn.intrinsic.qat.modules.conv_fused"], [2072, "module-torch.nn.intrinsic.qat.modules.linear_fused"], [2072, "module-torch.nn.intrinsic.qat.modules.linear_relu"], [2072, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2072, "module-torch.nn.intrinsic.quantized.modules.bn_relu"], [2072, "module-torch.nn.intrinsic.quantized.modules.conv_relu"], [2072, "module-torch.nn.intrinsic.quantized.modules.linear_relu"], [2072, "module-torch.nn.qat.dynamic.modules.linear"], [2072, "module-torch.nn.qat.modules.conv"], [2072, "module-torch.nn.qat.modules.embedding_ops"], [2072, "module-torch.nn.qat.modules.linear"], [2072, "module-torch.nn.quantizable.modules.activation"], [2072, "module-torch.nn.quantizable.modules.rnn"], [2072, "module-torch.nn.quantized.dynamic.modules.conv"], [2072, "module-torch.nn.quantized.dynamic.modules.linear"], [2072, "module-torch.nn.quantized.dynamic.modules.rnn"], [2072, "module-torch.nn.quantized.functional"], [2072, "module-torch.nn.quantized.modules.activation"], [2072, "module-torch.nn.quantized.modules.batchnorm"], [2072, "module-torch.nn.quantized.modules.conv"], [2072, "module-torch.nn.quantized.modules.dropout"], [2072, "module-torch.nn.quantized.modules.embedding_ops"], [2072, "module-torch.nn.quantized.modules.functional_modules"], [2072, "module-torch.nn.quantized.modules.linear"], [2072, "module-torch.nn.quantized.modules.normalization"], [2072, "module-torch.nn.quantized.modules.rnn"], [2072, "module-torch.nn.quantized.modules.utils"], [2072, "module-torch.quantization.fake_quantize"], [2072, "module-torch.quantization.fuse_modules"], [2072, "module-torch.quantization.fuser_method_mappings"], [2072, "module-torch.quantization.fx.convert"], [2072, "module-torch.quantization.fx.fuse"], [2072, "module-torch.quantization.fx.fusion_patterns"], [2072, "module-torch.quantization.fx.graph_module"], [2072, "module-torch.quantization.fx.match_utils"], [2072, "module-torch.quantization.fx.pattern_utils"], [2072, "module-torch.quantization.fx.prepare"], [2072, "module-torch.quantization.fx.quantization_patterns"], [2072, "module-torch.quantization.fx.quantization_types"], [2072, "module-torch.quantization.fx.utils"], [2072, "module-torch.quantization.observer"], [2072, "module-torch.quantization.qconfig"], [2072, "module-torch.quantization.quant_type"], [2072, "module-torch.quantization.quantization_mappings"], [2072, "module-torch.quantization.quantize"], [2072, "module-torch.quantization.quantize_fx"], [2072, "module-torch.quantization.quantize_jit"], [2072, "module-torch.quantization.stubs"], [2072, "module-torch.quantization.utils"], [2075, "module-torch.ao.nn.intrinsic"], [2075, "module-torch.ao.nn.intrinsic.modules"], [2075, "module-torch.ao.nn.intrinsic.qat"], [2075, "module-torch.ao.nn.intrinsic.qat.modules"], [2075, "module-torch.ao.nn.intrinsic.quantized"], [2075, "module-torch.ao.nn.intrinsic.quantized.dynamic"], [2075, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"], [2075, "module-torch.ao.nn.intrinsic.quantized.modules"], [2075, "module-torch.ao.nn.qat"], [2075, "module-torch.ao.nn.qat.dynamic"], [2075, "module-torch.ao.nn.qat.dynamic.modules"], [2075, "module-torch.ao.nn.qat.modules"], [2075, "module-torch.ao.nn.quantized.dynamic"], [2075, "module-torch.ao.nn.quantized.dynamic.modules"], [2075, "module-torch.ao.nn.quantized.functional"], [2075, "module-torch.ao.nn.quantized.modules"], [2075, "module-torch.ao.quantization.pt2e"], [2075, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"], [2075, "module-torch.ao.quantization.pt2e.representation"], [2075, "module-torch.ao.quantization.quantizer"], [2075, "module-torch.nn.intrinsic"], [2075, "module-torch.nn.intrinsic.modules"], [2075, "module-torch.nn.intrinsic.qat"], [2075, "module-torch.nn.intrinsic.qat.modules"], [2075, "module-torch.nn.intrinsic.quantized"], [2075, "module-torch.nn.intrinsic.quantized.dynamic"], [2075, "module-torch.nn.intrinsic.quantized.dynamic.modules"], [2075, "module-torch.nn.intrinsic.quantized.modules"], [2075, "module-torch.nn.qat"], [2075, "module-torch.nn.qat.dynamic"], [2075, "module-torch.nn.qat.dynamic.modules"], [2075, "module-torch.nn.qat.modules"], [2075, "module-torch.nn.quantizable"], [2075, "module-torch.nn.quantizable.modules"], [2075, "module-torch.nn.quantized"], [2075, "module-torch.nn.quantized.dynamic"], [2075, "module-torch.nn.quantized.dynamic.modules"], [2075, "module-torch.nn.quantized.modules"], [2075, "module-torch.quantization"], [2075, "module-torch.quantization.fx"], [2076, "module-torch.random"], [2077, "module-torch.distributed.autograd"], [2077, "module-torch.distributed.rpc"], [2080, "module-torch.signal"], [2080, "module-torch.signal.windows"], [2082, "module-torch.sparse"], [2083, "module-torch.special"], [2087, "module-torch.utils.tensorboard"], [2089, "module-torch.testing"], [2091, "module-torch"], [2091, "module-torch.contrib"], [2091, "module-torch.functional"], [2091, "module-torch.quasirandom"], [2091, "module-torch.return_types"], [2091, "module-torch.serialization"], [2091, "module-torch.signal.windows.windows"], [2091, "module-torch.sparse.semi_structured"], [2091, "module-torch.storage"], [2091, "module-torch.torch_version"], [2091, "module-torch.types"], [2091, "module-torch.utils.backcompat"], [2091, "module-torch.utils.hipify"], [2091, "module-torch.utils.model_dump"], [2091, "module-torch.utils.viz"], [2091, "module-torch.version"], [2092, "module-torch.ao.ns._numeric_suite"], [2093, "module-torch.ao.ns._numeric_suite_fx"], [2096, "module-torch.compiler"], [2114, "module-torch.overrides"], [2119, "module-torch.utils"], [2119, "module-torch.utils.backend_registration"], [2119, "module-torch.utils.benchmark.examples.blas_compare_setup"], [2119, "module-torch.utils.benchmark.examples.compare"], [2119, "module-torch.utils.benchmark.examples.fuzzer"], [2119, "module-torch.utils.benchmark.examples.op_benchmark"], [2119, "module-torch.utils.benchmark.examples.simple_timeit"], [2119, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"], [2119, "module-torch.utils.benchmark.op_fuzzers.binary"], [2119, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"], [2119, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"], [2119, "module-torch.utils.benchmark.op_fuzzers.spectral"], [2119, "module-torch.utils.benchmark.op_fuzzers.unary"], [2119, "module-torch.utils.benchmark.utils.common"], [2119, "module-torch.utils.benchmark.utils.compare"], [2119, "module-torch.utils.benchmark.utils.compile"], [2119, "module-torch.utils.benchmark.utils.cpp_jit"], [2119, "module-torch.utils.benchmark.utils.fuzzer"], [2119, "module-torch.utils.benchmark.utils.sparse_fuzzer"], [2119, "module-torch.utils.benchmark.utils.timer"], [2119, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"], [2119, "module-torch.utils.bundled_inputs"], [2119, "module-torch.utils.checkpoint"], [2119, "module-torch.utils.collect_env"], [2119, "module-torch.utils.cpp_backtrace"], [2119, "module-torch.utils.cpp_extension"], [2119, "module-torch.utils.data.backward_compatibility"], [2119, "module-torch.utils.data.dataloader"], [2119, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"], [2119, "module-torch.utils.data.datapipes.dataframe.dataframes"], [2119, "module-torch.utils.data.datapipes.dataframe.datapipes"], [2119, "module-torch.utils.data.datapipes.dataframe.structures"], [2119, "module-torch.utils.data.datapipes.datapipe"], [2119, "module-torch.utils.data.datapipes.gen_pyi"], [2119, "module-torch.utils.data.datapipes.iter.callable"], [2119, "module-torch.utils.data.datapipes.iter.combinatorics"], [2119, "module-torch.utils.data.datapipes.iter.combining"], [2119, "module-torch.utils.data.datapipes.iter.filelister"], [2119, "module-torch.utils.data.datapipes.iter.fileopener"], [2119, "module-torch.utils.data.datapipes.iter.grouping"], [2119, "module-torch.utils.data.datapipes.iter.routeddecoder"], [2119, "module-torch.utils.data.datapipes.iter.selecting"], [2119, "module-torch.utils.data.datapipes.iter.sharding"], [2119, "module-torch.utils.data.datapipes.iter.streamreader"], [2119, "module-torch.utils.data.datapipes.iter.utils"], [2119, "module-torch.utils.data.datapipes.map.callable"], [2119, "module-torch.utils.data.datapipes.map.combinatorics"], [2119, "module-torch.utils.data.datapipes.map.combining"], [2119, "module-torch.utils.data.datapipes.map.grouping"], [2119, "module-torch.utils.data.datapipes.map.utils"], [2119, "module-torch.utils.data.datapipes.utils.common"], [2119, "module-torch.utils.data.datapipes.utils.decoder"], [2119, "module-torch.utils.data.datapipes.utils.snapshot"], [2119, "module-torch.utils.data.dataset"], [2119, "module-torch.utils.data.distributed"], [2119, "module-torch.utils.data.graph"], [2119, "module-torch.utils.data.graph_settings"], [2119, "module-torch.utils.data.sampler"], [2119, "module-torch.utils.dlpack"], [2119, "module-torch.utils.file_baton"], [2119, "module-torch.utils.flop_counter"], [2119, "module-torch.utils.hipify.constants"], [2119, "module-torch.utils.hipify.cuda_to_hip_mappings"], [2119, "module-torch.utils.hipify.hipify_python"], [2119, "module-torch.utils.hipify.version"], [2119, "module-torch.utils.hooks"], [2119, "module-torch.utils.jit.log_extract"], [2119, "module-torch.utils.mkldnn"], [2119, "module-torch.utils.mobile_optimizer"], [2119, "module-torch.utils.show_pickle"], [2119, "module-torch.utils.tensorboard.summary"], [2119, "module-torch.utils.tensorboard.writer"], [2119, "module-torch.utils.throughput_benchmark"], [2119, "module-torch.utils.weak"], [2120, "module-torch.xpu"], [2120, "module-torch.xpu.random"], [2120, "module-torch.xpu.streams"]], "torch.amp": [[0, "module-torch.amp"]], "torch.amp.autocast_mode": [[0, "module-torch.amp.autocast_mode"]], "torch.amp.grad_scaler": [[0, "module-torch.amp.grad_scaler"]], "torch.cpu.amp": [[0, "module-torch.cpu.amp"]], "torch.cpu.amp.autocast_mode": [[0, "module-torch.cpu.amp.autocast_mode"]], "torch.cpu.amp.grad_scaler": [[0, "module-torch.cpu.amp.grad_scaler"]], "torch.cuda.amp": [[0, "module-torch.cuda.amp"]], "torch.cuda.amp.autocast_mode": [[0, "module-torch.cuda.amp.autocast_mode"]], "torch.cuda.amp.common": [[0, "module-torch.cuda.amp.common"]], "torch.cuda.amp.grad_scaler": [[0, "module-torch.cuda.amp.grad_scaler"]], "function (class in torch.autograd)": [[1, "torch.autograd.Function"]], "gradientedge (class in torch.autograd.graph)": [[1, "torch.autograd.graph.GradientEdge"]], "allow_mutation_on_saved_tensors (class in torch.autograd.graph)": [[1, "torch.autograd.graph.allow_mutation_on_saved_tensors"]], "detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.detect_anomaly"]], "disable_saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.disable_saved_tensors_hooks"]], "emit_itt (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_itt"]], "emit_nvtx (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_nvtx"]], "get_gradient_edge() (in module torch.autograd.graph)": [[1, "torch.autograd.graph.get_gradient_edge"]], "profile (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.profile"]], "register_multi_grad_hook (class in torch.autograd.graph)": [[1, "torch.autograd.graph.register_multi_grad_hook"]], "save_on_cpu (class in torch.autograd.graph)": [[1, "torch.autograd.graph.save_on_cpu"]], "saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.saved_tensors_hooks"]], "set_detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.set_detect_anomaly"]], "torch.autograd": [[1, "module-torch.autograd"]], "torch.autograd.anomaly_mode": [[1, "module-torch.autograd.anomaly_mode"]], "torch.autograd.forward_ad": [[1, "module-torch.autograd.forward_ad"]], "torch.autograd.function": [[1, "module-torch.autograd.function"]], "torch.autograd.functional": [[1, "module-torch.autograd.functional"]], "torch.autograd.grad_mode": [[1, "module-torch.autograd.grad_mode"]], "torch.autograd.gradcheck": [[1, "module-torch.autograd.gradcheck"]], "torch.autograd.graph": [[1, "module-torch.autograd.graph"]], "torch.autograd.profiler": [[1, "module-torch.autograd.profiler"]], "torch.autograd.profiler_legacy": [[1, "module-torch.autograd.profiler_legacy"]], "torch.autograd.profiler_util": [[1, "module-torch.autograd.profiler_util"]], "torch.autograd.variable": [[1, "module-torch.autograd.variable"]], "sdpaparams (class in torch.backends.cuda)": [[2, "torch.backends.cuda.SDPAParams"]], "allow_bf16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_bf16_reduced_precision_reduction"]], "allow_fp16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction"]], "allow_tf32 (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_tf32"]], "allow_tf32 (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.allow_tf32"]], "benchmark (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark"]], "benchmark_limit (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark_limit"]], "can_use_efficient_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_efficient_attention"]], "can_use_flash_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_flash_attention"]], "clear() (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.clear"]], "cudnn_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cudnn_sdp_enabled"]], "cufft_plan_cache (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cufft_plan_cache"]], "deterministic (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.deterministic"]], "enable_cudnn_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_cudnn_sdp"]], "enable_flash_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_flash_sdp"]], "enable_math_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_math_sdp"]], "enable_mem_efficient_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_mem_efficient_sdp"]], "enabled (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.enabled"]], "enabled (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.enabled"]], "flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.flags"]], "flash_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.flash_sdp_enabled"]], "get_cpu_capability() (in module torch.backends.cpu)": [[2, "torch.backends.cpu.get_cpu_capability"]], "get_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.get_fastpath_enabled"]], "get_opt_einsum() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.get_opt_einsum"]], "is_available() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.is_available"]], "is_available() (in module torch.backends.mkl)": [[2, "torch.backends.mkl.is_available"]], "is_available() (in module torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.is_available"]], "is_available() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_available"]], "is_available() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.is_available"]], "is_available() (in module torch.backends.openmp)": [[2, "torch.backends.openmp.is_available"]], "is_available() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.is_available"]], "is_built() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.is_built"]], "is_built() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_built"]], "math_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.math_sdp_enabled"]], "max_size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.max_size"]], "mem_efficient_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.mem_efficient_sdp_enabled"]], "preferred_blas_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_blas_library"]], "preferred_linalg_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_linalg_library"]], "sdp_kernel() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.sdp_kernel"]], "set_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.set_fastpath_enabled"]], "set_flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.set_flags"]], "size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.size"]], "strategy (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.strategy"]], "torch.backends": [[2, "module-torch.backends"]], "torch.backends.cpu": [[2, "module-torch.backends.cpu"]], "torch.backends.cuda": [[2, "module-torch.backends.cuda"]], "torch.backends.cudnn": [[2, "module-torch.backends.cudnn"]], "torch.backends.cudnn.rnn": [[2, "module-torch.backends.cudnn.rnn"]], "torch.backends.mha": [[2, "module-torch.backends.mha"]], "torch.backends.mkl": [[2, "module-torch.backends.mkl"]], "torch.backends.mkldnn": [[2, "module-torch.backends.mkldnn"]], "torch.backends.mps": [[2, "module-torch.backends.mps"]], "torch.backends.nnpack": [[2, "module-torch.backends.nnpack"]], "torch.backends.openmp": [[2, "module-torch.backends.openmp"]], "torch.backends.opt_einsum": [[2, "module-torch.backends.opt_einsum"]], "torch.backends.quantized": [[2, "module-torch.backends.quantized"]], "torch.backends.xeon": [[2, "module-torch.backends.xeon"]], "torch.backends.xeon.run_cpu": [[2, "module-torch.backends.xeon.run_cpu"]], "torch.backends.xnnpack": [[2, "module-torch.backends.xnnpack"]], "verbose (class in torch.backends.mkl)": [[2, "torch.backends.mkl.verbose"]], "verbose (class in torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.verbose"]], "version() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.version"]], "callgrindstats (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.CallgrindStats"]], "compare (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Compare"]], "functioncounts (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.FunctionCounts"]], "measurement (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Measurement"]], "timer (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Timer"]], "adaptive_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.adaptive_autorange"]], "as_standardized() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.as_standardized"]], "blocked_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.blocked_autorange"]], "collect_callgrind() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.collect_callgrind"]], "colorize() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.colorize"]], "counts() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.counts"]], "delta() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.delta"]], "denoise() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.denoise"]], "extend_results() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.extend_results"]], "filter() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.filter"]], "highlight_warnings() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.highlight_warnings"]], "merge() (torch.utils.benchmark.measurement static method)": [[3, "torch.utils.benchmark.Measurement.merge"]], "print() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.print"]], "significant_figures (torch.utils.benchmark.measurement property)": [[3, "torch.utils.benchmark.Measurement.significant_figures"]], "stats() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.stats"]], "timeit() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.timeit"]], "torch.utils.benchmark": [[3, "module-torch.utils.benchmark"]], "torch.utils.benchmark.examples": [[3, "module-torch.utils.benchmark.examples"]], "torch.utils.benchmark.op_fuzzers": [[3, "module-torch.utils.benchmark.op_fuzzers"]], "torch.utils.benchmark.utils": [[3, "module-torch.utils.benchmark.utils"]], "torch.utils.benchmark.utils.valgrind_wrapper": [[3, "module-torch.utils.benchmark.utils.valgrind_wrapper"]], "transform() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.transform"]], "trim_significant_figures() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.trim_significant_figures"]], "torch.utils.bottleneck": [[4, "module-torch.utils.bottleneck"]], "checkpoint() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint"]], "checkpoint_sequential() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint_sequential"]], "set_checkpoint_debug_enabled() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.set_checkpoint_debug_enabled"]], "cond() (in module torch._higher_order_ops.cond)": [[12, "torch._higher_order_ops.cond.cond"]], "parallel_info() (in module torch.__config__)": [[13, "torch.__config__.parallel_info"]], "show() (in module torch.__config__)": [[13, "torch.__config__.show"]], "torch.__config__": [[13, "module-torch.__config__"]], "buildextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.BuildExtension"]], "cudaextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CUDAExtension"]], "cppextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CppExtension"]], "get_compiler_abi_compatibility_and_version() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.get_compiler_abi_compatibility_and_version"]], "include_paths() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.include_paths"]], "is_ninja_available() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.is_ninja_available"]], "load() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load"]], "load_inline() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load_inline"]], "verify_ninja_availability() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.verify_ninja_availability"]], "torch.cpu": [[16, "module-torch.cpu"]], "torch.cuda": [[17, "module-torch.cuda"]], "torch.cuda.comm": [[17, "module-torch.cuda.comm"]], "torch.cuda.error": [[17, "module-torch.cuda.error"]], "torch.cuda.graphs": [[17, "module-torch.cuda.graphs"]], "torch.cuda.jiterator": [[17, "module-torch.cuda.jiterator"]], "torch.cuda.memory": [[17, "module-torch.cuda.memory"]], "torch.cuda.nccl": [[17, "module-torch.cuda.nccl"]], "torch.cuda.nvtx": [[17, "module-torch.cuda.nvtx"]], "torch.cuda.profiler": [[17, "module-torch.cuda.profiler"]], "torch.cuda.random": [[17, "module-torch.cuda.random"]], "torch.cuda.sparse": [[17, "module-torch.cuda.sparse"]], "torch.cuda.streams": [[17, "module-torch.cuda.streams"]], "enable_cuda_sanitizer() (in module torch.cuda._sanitizer)": [[18, "torch.cuda._sanitizer.enable_cuda_sanitizer"]], "torch.cuda._sanitizer": [[18, "module-torch.cuda._sanitizer"]], "enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.enable"]], "get_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_filename"]], "get_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_duration"]], "get_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_iterations"]], "get_results() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_results"]], "get_validators() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_validators"]], "is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.is_enabled"]], "read_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.read_file"]], "set_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_filename"]], "set_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_duration"]], "set_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_iterations"]], "torch.cuda.tunable": [[19, "module-torch.cuda.tunable"]], "tuning_enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_enable"]], "tuning_is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_is_enabled"]], "write_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file"]], "write_file_on_exit() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file_on_exit"]], "batchsampler (class in torch.utils.data)": [[23, "torch.utils.data.BatchSampler"]], "chaindataset (class in torch.utils.data)": [[23, "torch.utils.data.ChainDataset"]], "concatdataset (class in torch.utils.data)": [[23, "torch.utils.data.ConcatDataset"]], "dataloader (class in torch.utils.data)": [[23, "torch.utils.data.DataLoader"]], "dataset (class in torch.utils.data)": [[23, "torch.utils.data.Dataset"]], "distributedsampler (class in torch.utils.data.distributed)": [[23, "torch.utils.data.distributed.DistributedSampler"]], "iterabledataset (class in torch.utils.data)": [[23, "torch.utils.data.IterableDataset"]], "randomsampler (class in torch.utils.data)": [[23, "torch.utils.data.RandomSampler"]], "sampler (class in torch.utils.data)": [[23, "torch.utils.data.Sampler"]], "sequentialsampler (class in torch.utils.data)": [[23, "torch.utils.data.SequentialSampler"]], "stackdataset (class in torch.utils.data)": [[23, "torch.utils.data.StackDataset"]], "subset (class in torch.utils.data)": [[23, "torch.utils.data.Subset"]], "subsetrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.SubsetRandomSampler"]], "tensordataset (class in torch.utils.data)": [[23, "torch.utils.data.TensorDataset"]], "weightedrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.WeightedRandomSampler"]], "collate() (in module torch.utils.data._utils.collate)": [[23, "torch.utils.data._utils.collate.collate"]], "default_collate() (in module torch.utils.data)": [[23, "torch.utils.data.default_collate"]], "default_convert() (in module torch.utils.data)": [[23, "torch.utils.data.default_convert"]], "get_worker_info() (in module torch.utils.data)": [[23, "torch.utils.data.get_worker_info"]], "random_split() (in module torch.utils.data)": [[23, "torch.utils.data.random_split"]], "torch.utils.data": [[23, "module-torch.utils.data"]], "torch.utils.data.datapipes": [[23, "module-torch.utils.data.datapipes"]], "torch.utils.data.datapipes.dataframe": [[23, "module-torch.utils.data.datapipes.dataframe"]], "torch.utils.data.datapipes.iter": [[23, "module-torch.utils.data.datapipes.iter"]], "torch.utils.data.datapipes.map": [[23, "module-torch.utils.data.datapipes.map"]], "torch.utils.data.datapipes.utils": [[23, "module-torch.utils.data.datapipes.utils"]], "gradbucket (class in torch.distributed)": [[24, "torch.distributed.GradBucket"]], "powersgdstate (class in torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState"]], "__getstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__getstate__"]], "__setstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__setstate__"]], "allreduce_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.allreduce_hook"]], "batched_powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.batched_powerSGD_hook"]], "bf16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_hook"]], "bf16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_wrapper"]], "buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.buffer"]], "fp16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_hook"]], "fp16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_wrapper"]], "gradients() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.gradients"]], "index() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.index"]], "is_last() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.is_last"]], "noop_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks.noop_hook"]], "parameters() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.parameters"]], "powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.powerSGD_hook"]], "set_buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.set_buffer"]], "fill_uninitialized_memory (in module torch.utils.deterministic)": [[27, "torch.utils.deterministic.fill_uninitialized_memory"]], "torch.utils.deterministic": [[27, "module-torch.utils.deterministic"]], "backend (class in torch.distributed)": [[28, "torch.distributed.Backend"]], "devicemesh (class in torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.DeviceMesh"]], "distbackenderror (class in torch.distributed)": [[28, "torch.distributed.DistBackendError"]], "disterror (class in torch.distributed)": [[28, "torch.distributed.DistError"]], "distnetworkerror (class in torch.distributed)": [[28, "torch.distributed.DistNetworkError"]], "diststoreerror (class in torch.distributed)": [[28, "torch.distributed.DistStoreError"]], "filestore (class in torch.distributed)": [[28, "torch.distributed.FileStore"]], "hashstore (class in torch.distributed)": [[28, "torch.distributed.HashStore"]], "p2pop (class in torch.distributed)": [[28, "torch.distributed.P2POp"]], "prefixstore (class in torch.distributed)": [[28, "torch.distributed.PrefixStore"]], "reduceop (class in torch.distributed)": [[28, "torch.distributed.ReduceOp"]], "store (class in torch.distributed)": [[28, "torch.distributed.Store"]], "tcpstore (class in torch.distributed)": [[28, "torch.distributed.TCPStore"]], "work (class in torch.distributed)": [[28, "torch.distributed.Work"]], "add() (in module torch.distributed.store)": [[28, "torch.distributed.Store.add"]], "all_gather() (in module torch.distributed)": [[28, "torch.distributed.all_gather"]], "all_gather_into_tensor() (in module torch.distributed)": [[28, "torch.distributed.all_gather_into_tensor"]], "all_gather_object() (in module torch.distributed)": [[28, "torch.distributed.all_gather_object"]], "all_reduce() (in module torch.distributed)": [[28, "torch.distributed.all_reduce"]], "all_to_all() (in module torch.distributed)": [[28, "torch.distributed.all_to_all"]], "all_to_all_single() (in module torch.distributed)": [[28, "torch.distributed.all_to_all_single"]], "barrier() (in module torch.distributed)": [[28, "torch.distributed.barrier"]], "batch_isend_irecv() (in module torch.distributed)": [[28, "torch.distributed.batch_isend_irecv"]], "breakpoint() (in module torch.distributed)": [[28, "torch.distributed.breakpoint"]], "broadcast() (in module torch.distributed)": [[28, "torch.distributed.broadcast"]], "broadcast_object_list() (in module torch.distributed)": [[28, "torch.distributed.broadcast_object_list"]], "compare_set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.compare_set"]], "delete_key() (in module torch.distributed.store)": [[28, "torch.distributed.Store.delete_key"]], "gather() (in module torch.distributed)": [[28, "torch.distributed.gather"]], "gather_object() (in module torch.distributed)": [[28, "torch.distributed.gather_object"]], "get() (in module torch.distributed.store)": [[28, "torch.distributed.Store.get"]], "get_backend() (in module torch.distributed)": [[28, "torch.distributed.get_backend"]], "get_global_rank() (in module torch.distributed)": [[28, "torch.distributed.get_global_rank"]], "get_group_rank() (in module torch.distributed)": [[28, "torch.distributed.get_group_rank"]], "get_process_group_ranks() (in module torch.distributed)": [[28, "torch.distributed.get_process_group_ranks"]], "get_rank() (in module torch.distributed)": [[28, "torch.distributed.get_rank"]], "get_world_size() (in module torch.distributed)": [[28, "torch.distributed.get_world_size"]], "init_device_mesh() (in module torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.init_device_mesh"]], "init_process_group() (in module torch.distributed)": [[28, "torch.distributed.init_process_group"]], "irecv() (in module torch.distributed)": [[28, "torch.distributed.irecv"]], "is_available() (in module torch.distributed)": [[28, "torch.distributed.is_available"]], "is_gloo_available() (in module torch.distributed)": [[28, "torch.distributed.is_gloo_available"]], "is_initialized() (in module torch.distributed)": [[28, "torch.distributed.is_initialized"]], "is_mpi_available() (in module torch.distributed)": [[28, "torch.distributed.is_mpi_available"]], "is_nccl_available() (in module torch.distributed)": [[28, "torch.distributed.is_nccl_available"]], "is_torchelastic_launched() (in module torch.distributed)": [[28, "torch.distributed.is_torchelastic_launched"]], "isend() (in module torch.distributed)": [[28, "torch.distributed.isend"]], "monitored_barrier() (in module torch.distributed)": [[28, "torch.distributed.monitored_barrier"]], "new_group() (in module torch.distributed)": [[28, "torch.distributed.new_group"]], "num_keys() (in module torch.distributed.store)": [[28, "torch.distributed.Store.num_keys"]], "recv() (in module torch.distributed)": [[28, "torch.distributed.recv"]], "recv_object_list() (in module torch.distributed)": [[28, "torch.distributed.recv_object_list"]], "reduce() (in module torch.distributed)": [[28, "torch.distributed.reduce"]], "reduce_op (class in torch.distributed)": [[28, "torch.distributed.reduce_op"]], "reduce_scatter() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter"]], "reduce_scatter_tensor() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter_tensor"]], "register_backend() (torch.distributed.backend class method)": [[28, "torch.distributed.Backend.register_backend"]], "scatter() (in module torch.distributed)": [[28, "torch.distributed.scatter"]], "scatter_object_list() (in module torch.distributed)": [[28, "torch.distributed.scatter_object_list"]], "send() (in module torch.distributed)": [[28, "torch.distributed.send"]], "send_object_list() (in module torch.distributed)": [[28, "torch.distributed.send_object_list"]], "set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set"]], "set_timeout() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set_timeout"]], "torch.distributed": [[28, "module-torch.distributed"]], "torch.distributed.algorithms": [[28, "module-torch.distributed.algorithms"]], "torch.distributed.algorithms.ddp_comm_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.default_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.post_localsgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"]], "torch.distributed.algorithms.join": [[28, "module-torch.distributed.algorithms.join"]], "torch.distributed.algorithms.model_averaging": [[28, "module-torch.distributed.algorithms.model_averaging"]], "torch.distributed.algorithms.model_averaging.averagers": [[28, "module-torch.distributed.algorithms.model_averaging.averagers"]], "torch.distributed.algorithms.model_averaging.hierarchical_model_averager": [[28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"]], "torch.distributed.algorithms.model_averaging.utils": [[28, "module-torch.distributed.algorithms.model_averaging.utils"]], "torch.distributed.argparse_util": [[28, "module-torch.distributed.argparse_util"]], "torch.distributed.c10d_logger": [[28, "module-torch.distributed.c10d_logger"]], "torch.distributed.checkpoint.api": [[28, "module-torch.distributed.checkpoint.api"]], "torch.distributed.checkpoint.default_planner": [[28, "module-torch.distributed.checkpoint.default_planner"]], "torch.distributed.checkpoint.filesystem": [[28, "module-torch.distributed.checkpoint.filesystem"]], "torch.distributed.checkpoint.metadata": [[28, "module-torch.distributed.checkpoint.metadata"]], "torch.distributed.checkpoint.optimizer": [[28, "module-torch.distributed.checkpoint.optimizer"]], "torch.distributed.checkpoint.planner": [[28, "module-torch.distributed.checkpoint.planner"]], "torch.distributed.checkpoint.planner_helpers": [[28, "module-torch.distributed.checkpoint.planner_helpers"]], "torch.distributed.checkpoint.resharding": [[28, "module-torch.distributed.checkpoint.resharding"]], "torch.distributed.checkpoint.state_dict": [[28, "module-torch.distributed.checkpoint.state_dict"]], "torch.distributed.checkpoint.state_dict_loader": [[28, "module-torch.distributed.checkpoint.state_dict_loader"]], "torch.distributed.checkpoint.state_dict_saver": [[28, "module-torch.distributed.checkpoint.state_dict_saver"]], "torch.distributed.checkpoint.stateful": [[28, "module-torch.distributed.checkpoint.stateful"]], "torch.distributed.checkpoint.storage": [[28, "module-torch.distributed.checkpoint.storage"]], "torch.distributed.checkpoint.utils": [[28, "module-torch.distributed.checkpoint.utils"]], "torch.distributed.collective_utils": [[28, "module-torch.distributed.collective_utils"]], "torch.distributed.constants": [[28, "module-torch.distributed.constants"]], "torch.distributed.device_mesh": [[28, "module-torch.distributed.device_mesh"]], "torch.distributed.distributed_c10d": [[28, "module-torch.distributed.distributed_c10d"]], "torch.distributed.elastic": [[28, "module-torch.distributed.elastic"]], "torch.distributed.elastic.agent.server.api": [[28, "module-torch.distributed.elastic.agent.server.api"]], "torch.distributed.elastic.agent.server.local_elastic_agent": [[28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"]], "torch.distributed.elastic.events.api": [[28, "module-torch.distributed.elastic.events.api"]], "torch.distributed.elastic.events.handlers": [[28, "module-torch.distributed.elastic.events.handlers"]], "torch.distributed.elastic.metrics.api": [[28, "module-torch.distributed.elastic.metrics.api"]], "torch.distributed.elastic.multiprocessing.api": [[28, "module-torch.distributed.elastic.multiprocessing.api"]], "torch.distributed.elastic.multiprocessing.errors.error_handler": [[28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"]], "torch.distributed.elastic.multiprocessing.errors.handlers": [[28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"]], "torch.distributed.elastic.multiprocessing.redirects": [[28, "module-torch.distributed.elastic.multiprocessing.redirects"]], "torch.distributed.elastic.multiprocessing.tail_log": [[28, "module-torch.distributed.elastic.multiprocessing.tail_log"]], "torch.distributed.elastic.rendezvous.api": [[28, "module-torch.distributed.elastic.rendezvous.api"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.etcd_server": [[28, "module-torch.distributed.elastic.rendezvous.etcd_server"]], "torch.distributed.elastic.rendezvous.etcd_store": [[28, "module-torch.distributed.elastic.rendezvous.etcd_store"]], "torch.distributed.elastic.rendezvous.static_tcp_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"]], "torch.distributed.elastic.rendezvous.utils": [[28, "module-torch.distributed.elastic.rendezvous.utils"]], "torch.distributed.elastic.timer.api": [[28, "module-torch.distributed.elastic.timer.api"]], "torch.distributed.elastic.timer.file_based_local_timer": [[28, "module-torch.distributed.elastic.timer.file_based_local_timer"]], "torch.distributed.elastic.timer.local_timer": [[28, "module-torch.distributed.elastic.timer.local_timer"]], "torch.distributed.elastic.utils": [[28, "module-torch.distributed.elastic.utils"]], "torch.distributed.elastic.utils.api": [[28, "module-torch.distributed.elastic.utils.api"]], "torch.distributed.elastic.utils.data": [[28, "module-torch.distributed.elastic.utils.data"]], "torch.distributed.elastic.utils.data.cycling_iterator": [[28, "module-torch.distributed.elastic.utils.data.cycling_iterator"]], "torch.distributed.elastic.utils.data.elastic_distributed_sampler": [[28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"]], "torch.distributed.elastic.utils.distributed": [[28, "module-torch.distributed.elastic.utils.distributed"]], "torch.distributed.elastic.utils.log_level": [[28, "module-torch.distributed.elastic.utils.log_level"]], "torch.distributed.elastic.utils.logging": [[28, "module-torch.distributed.elastic.utils.logging"]], "torch.distributed.elastic.utils.store": [[28, "module-torch.distributed.elastic.utils.store"]], "torch.distributed.fsdp.api": [[28, "module-torch.distributed.fsdp.api"]], "torch.distributed.fsdp.fully_sharded_data_parallel": [[28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"]], "torch.distributed.fsdp.sharded_grad_scaler": [[28, "module-torch.distributed.fsdp.sharded_grad_scaler"]], "torch.distributed.fsdp.wrap": [[28, "module-torch.distributed.fsdp.wrap"]], "torch.distributed.launch": [[28, "module-torch.distributed.launch"]], "torch.distributed.launcher": [[28, "module-torch.distributed.launcher"]], "torch.distributed.launcher.api": [[28, "module-torch.distributed.launcher.api"]], "torch.distributed.logging_handlers": [[28, "module-torch.distributed.logging_handlers"]], "torch.distributed.nn": [[28, "module-torch.distributed.nn"]], "torch.distributed.nn.api": [[28, "module-torch.distributed.nn.api"]], "torch.distributed.nn.api.remote_module": [[28, "module-torch.distributed.nn.api.remote_module"]], "torch.distributed.nn.functional": [[28, "module-torch.distributed.nn.functional"]], "torch.distributed.nn.jit": [[28, "module-torch.distributed.nn.jit"]], "torch.distributed.nn.jit.instantiator": [[28, "module-torch.distributed.nn.jit.instantiator"]], "torch.distributed.nn.jit.templates": [[28, "module-torch.distributed.nn.jit.templates"]], "torch.distributed.nn.jit.templates.remote_module_template": [[28, "module-torch.distributed.nn.jit.templates.remote_module_template"]], "torch.distributed.optim.apply_optimizer_in_backward": [[28, "module-torch.distributed.optim.apply_optimizer_in_backward"]], "torch.distributed.optim.functional_adadelta": [[28, "module-torch.distributed.optim.functional_adadelta"]], "torch.distributed.optim.functional_adagrad": [[28, "module-torch.distributed.optim.functional_adagrad"]], "torch.distributed.optim.functional_adam": [[28, "module-torch.distributed.optim.functional_adam"]], "torch.distributed.optim.functional_adamax": [[28, "module-torch.distributed.optim.functional_adamax"]], "torch.distributed.optim.functional_adamw": [[28, "module-torch.distributed.optim.functional_adamw"]], "torch.distributed.optim.functional_rmsprop": [[28, "module-torch.distributed.optim.functional_rmsprop"]], "torch.distributed.optim.functional_rprop": [[28, "module-torch.distributed.optim.functional_rprop"]], "torch.distributed.optim.functional_sgd": [[28, "module-torch.distributed.optim.functional_sgd"]], "torch.distributed.optim.named_optimizer": [[28, "module-torch.distributed.optim.named_optimizer"]], "torch.distributed.optim.optimizer": [[28, "module-torch.distributed.optim.optimizer"]], "torch.distributed.optim.post_localsgd_optimizer": [[28, "module-torch.distributed.optim.post_localSGD_optimizer"]], "torch.distributed.optim.utils": [[28, "module-torch.distributed.optim.utils"]], "torch.distributed.optim.zero_redundancy_optimizer": [[28, "module-torch.distributed.optim.zero_redundancy_optimizer"]], "torch.distributed.remote_device": [[28, "module-torch.distributed.remote_device"]], "torch.distributed.rendezvous": [[28, "module-torch.distributed.rendezvous"]], "torch.distributed.rpc.api": [[28, "module-torch.distributed.rpc.api"]], "torch.distributed.rpc.backend_registry": [[28, "module-torch.distributed.rpc.backend_registry"]], "torch.distributed.rpc.constants": [[28, "module-torch.distributed.rpc.constants"]], "torch.distributed.rpc.functions": [[28, "module-torch.distributed.rpc.functions"]], "torch.distributed.rpc.internal": [[28, "module-torch.distributed.rpc.internal"]], "torch.distributed.rpc.options": [[28, "module-torch.distributed.rpc.options"]], "torch.distributed.rpc.rref_proxy": [[28, "module-torch.distributed.rpc.rref_proxy"]], "torch.distributed.rpc.server_process_global_profiler": [[28, "module-torch.distributed.rpc.server_process_global_profiler"]], "torch.distributed.tensor": [[28, "module-torch.distributed.tensor"]], "torch.distributed.tensor.parallel.api": [[28, "module-torch.distributed.tensor.parallel.api"]], "torch.distributed.tensor.parallel.ddp": [[28, "module-torch.distributed.tensor.parallel.ddp"]], "torch.distributed.tensor.parallel.fsdp": [[28, "module-torch.distributed.tensor.parallel.fsdp"]], "torch.distributed.tensor.parallel.input_reshard": [[28, "module-torch.distributed.tensor.parallel.input_reshard"]], "torch.distributed.tensor.parallel.loss": [[28, "module-torch.distributed.tensor.parallel.loss"]], "torch.distributed.tensor.parallel.style": [[28, "module-torch.distributed.tensor.parallel.style"]], "torch.distributed.utils": [[28, "module-torch.distributed.utils"]], "wait() (in module torch.distributed.store)": [[28, "torch.distributed.Store.wait"]], "join (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Join"]], "joinhook (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.JoinHook"]], "joinable (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Joinable"]], "join_device (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_device"]], "join_hook() (torch.distributed.algorithms.joinable method)": [[29, "torch.distributed.algorithms.Joinable.join_hook"]], "join_process_group (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_process_group"]], "main_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.main_hook"]], "notify_join_context() (torch.distributed.algorithms.join static method)": [[29, "torch.distributed.algorithms.Join.notify_join_context"]], "post_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.post_hook"]], "asyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.AsyncStager"]], "blockingasyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager"]], "broadcastingtorchsavereader (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader"]], "defaultloadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner"]], "defaultsaveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner"]], "dynamicmetaloadplanner (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner"]], "filesystemreader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemReader"]], "filesystemwriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemWriter"]], "loadplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlan"]], "loadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlanner"]], "readitem (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.ReadItem"]], "saveplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlan"]], "saveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlanner"]], "statedictoptions (class in torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.StateDictOptions"]], "stateful (class in torch.distributed.checkpoint.stateful)": [[30, "torch.distributed.checkpoint.stateful.Stateful"]], "storagereader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageReader"]], "storagewriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageWriter"]], "writeitem (class in torch.distributed.checkpoint.planner)": [[30, "torch.distributed.checkpoint.planner.WriteItem"]], "async_save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.async_save"]], "checkpoint_id (torch.distributed.checkpoint.filesystemreader property)": [[30, "torch.distributed.checkpoint.FileSystemReader.checkpoint_id"]], "commit_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.commit_tensor"]], "create_global_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_global_plan"]], "create_global_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_global_plan"]], "create_local_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_local_plan"]], "create_local_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_local_plan"]], "dcp_to_torch_save() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.dcp_to_torch_save"]], "finish() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.finish"]], "finish_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.finish_plan"]], "finish_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.finish_plan"]], "get_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_model_state_dict"]], "get_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_optimizer_state_dict"]], "get_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_state_dict"]], "load() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load"]], "load_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.load_bytes"]], "load_state_dict() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load_state_dict"]], "load_state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.load_state_dict"]], "lookup_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.lookup_object"]], "lookup_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.lookup_tensor"]], "prepare_global_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_global_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_local_plan"]], "read_data() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_data"]], "read_data() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_data"]], "read_metadata() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_metadata"]], "read_metadata() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_metadata"]], "reset() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.reset"]], "reset() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.reset"]], "reset() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.reset"]], "resolve_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_bytes"]], "resolve_data() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.resolve_data"]], "resolve_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_tensor"]], "save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save"]], "save_state_dict() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save_state_dict"]], "set_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_model_state_dict"]], "set_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_optimizer_state_dict"]], "set_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_state_dict"]], "set_up_planner() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.format_utils.dynamicmetaloadplanner method)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner.set_up_planner"]], "set_up_storage_reader() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.set_up_storage_reader"]], "set_up_storage_reader() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.set_up_storage_reader"]], "set_up_storage_writer() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.set_up_storage_writer"]], "should_synchronize_after_execute (torch.distributed.checkpoint.staging.asyncstager property)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.should_synchronize_after_execute"]], "stage() (torch.distributed.checkpoint.filesystemwriter method)": [[30, "torch.distributed.checkpoint.FileSystemWriter.stage"]], "stage() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.stage"]], "stage() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.stage"]], "state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.state_dict"]], "storage_meta() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.storage_meta"]], "synchronize_staging() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.synchronize_staging"]], "synchronize_staging() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.synchronize_staging"]], "tensor_storage_size() (torch.distributed.checkpoint.planner.writeitem method)": [[30, "torch.distributed.checkpoint.planner.WriteItem.tensor_storage_size"]], "torch.distributed.checkpoint": [[30, "module-torch.distributed.checkpoint"]], "torch.distributed.checkpoint.format_utils": [[30, "module-torch.distributed.checkpoint.format_utils"]], "torch.distributed.checkpoint.logger": [[30, "module-torch.distributed.checkpoint.logger"]], "torch.distributed.checkpoint.logging_handlers": [[30, "module-torch.distributed.checkpoint.logging_handlers"]], "torch.distributed.checkpoint.staging": [[30, "module-torch.distributed.checkpoint.staging"]], "torch_save_to_dcp() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.torch_save_to_dcp"]], "transform_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.transform_object"]], "transform_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.transform_tensor"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagereader class method)": [[30, "torch.distributed.checkpoint.StorageReader.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagewriter class method)": [[30, "torch.distributed.checkpoint.StorageWriter.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader class method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.validate_checkpoint_id"]], "write_data() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.write_data"]], "distributedoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.DistributedOptimizer"]], "postlocalsgdoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer"]], "zeroredundancyoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer"]], "add_param_group() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.add_param_group"]], "consolidate_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.consolidate_state_dict"]], "join_device (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_device"]], "join_hook() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_hook"]], "join_process_group (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_process_group"]], "load_state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.load_state_dict"]], "load_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.load_state_dict"]], "state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.state_dict"]], "state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.state_dict"]], "step() (torch.distributed.optim.distributedoptimizer method)": [[32, "torch.distributed.optim.DistributedOptimizer.step"]], "step() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.step"]], "step() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.step"]], "torch.distributed.optim": [[32, "module-torch.distributed.optim"]], "pipe (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.Pipe"]], "pipelineschedulemulti (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti"]], "pipelineschedulesingle (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle"]], "pipelinestage (class in torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.PipelineStage"]], "schedule1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.Schedule1F1B"]], "schedulegpipe (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleGPipe"]], "scheduleinterleaved1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleInterleaved1F1B"]], "scheduleloopedbfs (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleLoopedBFS"]], "splitpoint (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.SplitPoint"]], "tensorchunkspec (class in torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.TensorChunkSpec"]], "build_stage() (in module torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.build_stage"]], "merge_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.merge_chunks"]], "pipe_split() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipe_split"]], "pipeline() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipeline"]], "split_args_kwargs_into_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.split_args_kwargs_into_chunks"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulemulti method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti.step"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulesingle method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle.step"]], "torch.distributed.pipelining": [[33, "module-torch.distributed.pipelining"]], "torch.distributed.pipelining.microbatch": [[33, "module-torch.distributed.pipelining.microbatch"]], "torch.distributed.pipelining.schedules": [[33, "module-torch.distributed.pipelining.schedules"]], "torch.distributed.pipelining.stage": [[33, "module-torch.distributed.pipelining.stage"]], "colwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.ColwiseParallel"]], "preparemoduleinput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleInput"]], "preparemoduleoutput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleOutput"]], "rowwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.RowwiseParallel"]], "sequenceparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.SequenceParallel"]], "loss_parallel() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.loss_parallel"]], "parallelize_module() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.parallelize_module"]], "torch.distributed.tensor.parallel": [[34, "module-torch.distributed.tensor.parallel"]], "abstransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AbsTransform"]], "affinetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AffineTransform"]], "bernoulli (class in torch.distributions.bernoulli)": [[35, "torch.distributions.bernoulli.Bernoulli"]], "beta (class in torch.distributions.beta)": [[35, "torch.distributions.beta.Beta"]], "binomial (class in torch.distributions.binomial)": [[35, "torch.distributions.binomial.Binomial"]], "cattransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CatTransform"]], "categorical (class in torch.distributions.categorical)": [[35, "torch.distributions.categorical.Categorical"]], "cauchy (class in torch.distributions.cauchy)": [[35, "torch.distributions.cauchy.Cauchy"]], "chi2 (class in torch.distributions.chi2)": [[35, "torch.distributions.chi2.Chi2"]], "composetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ComposeTransform"]], "constraint (class in torch.distributions.constraints)": [[35, "torch.distributions.constraints.Constraint"]], "constraintregistry (class in torch.distributions.constraint_registry)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry"]], "continuousbernoulli (class in torch.distributions.continuous_bernoulli)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli"]], "corrcholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CorrCholeskyTransform"]], "cumulativedistributiontransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CumulativeDistributionTransform"]], "dirichlet (class in torch.distributions.dirichlet)": [[35, "torch.distributions.dirichlet.Dirichlet"]], "distribution (class in torch.distributions.distribution)": [[35, "torch.distributions.distribution.Distribution"]], "exptransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ExpTransform"]], "exponential (class in torch.distributions.exponential)": [[35, "torch.distributions.exponential.Exponential"]], "exponentialfamily (class in torch.distributions.exp_family)": [[35, "torch.distributions.exp_family.ExponentialFamily"]], "fishersnedecor (class in torch.distributions.fishersnedecor)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor"]], "gamma (class in torch.distributions.gamma)": [[35, "torch.distributions.gamma.Gamma"]], "geometric (class in torch.distributions.geometric)": [[35, "torch.distributions.geometric.Geometric"]], "gumbel (class in torch.distributions.gumbel)": [[35, "torch.distributions.gumbel.Gumbel"]], "halfcauchy (class in torch.distributions.half_cauchy)": [[35, "torch.distributions.half_cauchy.HalfCauchy"]], "halfnormal (class in torch.distributions.half_normal)": [[35, "torch.distributions.half_normal.HalfNormal"]], "independent (class in torch.distributions.independent)": [[35, "torch.distributions.independent.Independent"]], "independenttransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.IndependentTransform"]], "inversegamma (class in torch.distributions.inverse_gamma)": [[35, "torch.distributions.inverse_gamma.InverseGamma"]], "kumaraswamy (class in torch.distributions.kumaraswamy)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy"]], "lkjcholesky (class in torch.distributions.lkj_cholesky)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky"]], "laplace (class in torch.distributions.laplace)": [[35, "torch.distributions.laplace.Laplace"]], "lognormal (class in torch.distributions.log_normal)": [[35, "torch.distributions.log_normal.LogNormal"]], "logitrelaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli"]], "lowrankmultivariatenormal (class in torch.distributions.lowrank_multivariate_normal)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal"]], "lowercholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.LowerCholeskyTransform"]], "mixturesamefamily (class in torch.distributions.mixture_same_family)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily"]], "multinomial (class in torch.distributions.multinomial)": [[35, "torch.distributions.multinomial.Multinomial"]], "multivariatenormal (class in torch.distributions.multivariate_normal)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal"]], "negativebinomial (class in torch.distributions.negative_binomial)": [[35, "torch.distributions.negative_binomial.NegativeBinomial"]], "normal (class in torch.distributions.normal)": [[35, "torch.distributions.normal.Normal"]], "onehotcategorical (class in torch.distributions.one_hot_categorical)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical"]], "pareto (class in torch.distributions.pareto)": [[35, "torch.distributions.pareto.Pareto"]], "poisson (class in torch.distributions.poisson)": [[35, "torch.distributions.poisson.Poisson"]], "positivedefinitetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PositiveDefiniteTransform"]], "powertransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PowerTransform"]], "relaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli"]], "relaxedonehotcategorical (class in torch.distributions.relaxed_categorical)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical"]], "reshapetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ReshapeTransform"]], "sigmoidtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SigmoidTransform"]], "softmaxtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftmaxTransform"]], "softplustransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftplusTransform"]], "stacktransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StackTransform"]], "stickbreakingtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StickBreakingTransform"]], "studentt (class in torch.distributions.studentt)": [[35, "torch.distributions.studentT.StudentT"]], "tanhtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.TanhTransform"]], "transform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.Transform"]], "transformeddistribution (class in torch.distributions.transformed_distribution)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution"]], "uniform (class in torch.distributions.uniform)": [[35, "torch.distributions.uniform.Uniform"]], "vonmises (class in torch.distributions.von_mises)": [[35, "torch.distributions.von_mises.VonMises"]], "weibull (class in torch.distributions.weibull)": [[35, "torch.distributions.weibull.Weibull"]], "wishart (class in torch.distributions.wishart)": [[35, "torch.distributions.wishart.Wishart"]], "arg_constraints (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.arg_constraints"]], "arg_constraints (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.arg_constraints"]], "arg_constraints (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.arg_constraints"]], "arg_constraints (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.arg_constraints"]], "arg_constraints (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.arg_constraints"]], "arg_constraints (torch.distributions.chi2.chi2 attribute)": [[35, "torch.distributions.chi2.Chi2.arg_constraints"]], "arg_constraints (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.arg_constraints"]], "arg_constraints (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.arg_constraints"]], "arg_constraints (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.arg_constraints"]], "arg_constraints (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.arg_constraints"]], "arg_constraints (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.arg_constraints"]], "arg_constraints (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.arg_constraints"]], "arg_constraints (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.arg_constraints"]], "arg_constraints (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.arg_constraints"]], "arg_constraints (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.arg_constraints"]], "arg_constraints (torch.distributions.independent.independent attribute)": [[35, "torch.distributions.independent.Independent.arg_constraints"]], "arg_constraints (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.arg_constraints"]], "arg_constraints (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.arg_constraints"]], "arg_constraints (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.arg_constraints"]], "arg_constraints (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.arg_constraints"]], "arg_constraints (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.arg_constraints"]], "arg_constraints (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.arg_constraints"]], "arg_constraints (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.arg_constraints"]], "arg_constraints (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.arg_constraints"]], "arg_constraints (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.arg_constraints"]], "arg_constraints (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.pareto.pareto attribute)": [[35, "torch.distributions.pareto.Pareto.arg_constraints"]], "arg_constraints (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.arg_constraints"]], "arg_constraints (torch.distributions.transformed_distribution.transformeddistribution attribute)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.arg_constraints"]], "arg_constraints (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.arg_constraints"]], "arg_constraints (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.arg_constraints"]], "arg_constraints (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.arg_constraints"]], "arg_constraints (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.arg_constraints"]], "batch_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.batch_shape"]], "cat (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.cat"]], "cdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.cdf"]], "cdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.cdf"]], "cdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.cdf"]], "cdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.cdf"]], "cdf() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.cdf"]], "cdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.cdf"]], "cdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.cdf"]], "cdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.cdf"]], "cdf() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.cdf"]], "cdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.cdf"]], "cdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.cdf"]], "cdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.cdf"]], "check() (torch.distributions.constraints.constraint method)": [[35, "torch.distributions.constraints.Constraint.check"]], "component_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.component_distribution"]], "concentration (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.concentration"]], "concentration0 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration0"]], "concentration1 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration1"]], "covariance_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.covariance_matrix"]], "dependent_property (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.dependent_property"]], "df (torch.distributions.chi2.chi2 property)": [[35, "torch.distributions.chi2.Chi2.df"]], "entropy() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.entropy"]], "entropy() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.entropy"]], "entropy() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.entropy"]], "entropy() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.entropy"]], "entropy() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.entropy"]], "entropy() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.entropy"]], "entropy() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.entropy"]], "entropy() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.entropy"]], "entropy() (torch.distributions.exp_family.exponentialfamily method)": [[35, "torch.distributions.exp_family.ExponentialFamily.entropy"]], "entropy() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.entropy"]], "entropy() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.entropy"]], "entropy() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.entropy"]], "entropy() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.entropy"]], "entropy() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.entropy"]], "entropy() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.entropy"]], "entropy() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.entropy"]], "entropy() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.entropy"]], "entropy() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.entropy"]], "entropy() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.entropy"]], "entropy() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.entropy"]], "entropy() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.entropy"]], "entropy() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.entropy"]], "entropy() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.entropy"]], "entropy() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.entropy"]], "entropy() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.entropy"]], "entropy() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.entropy"]], "entropy() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.entropy"]], "entropy() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.entropy"]], "entropy() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.entropy"]], "entropy() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.entropy"]], "enumerate_support() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.enumerate_support"]], "enumerate_support() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.enumerate_support"]], "enumerate_support() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.enumerate_support"]], "enumerate_support() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.enumerate_support"]], "enumerate_support() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.enumerate_support"]], "enumerate_support() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.enumerate_support"]], "event_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.event_shape"]], "expand() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.expand"]], "expand() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.expand"]], "expand() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.expand"]], "expand() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.expand"]], "expand() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.expand"]], "expand() (torch.distributions.chi2.chi2 method)": [[35, "torch.distributions.chi2.Chi2.expand"]], "expand() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.expand"]], "expand() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.expand"]], "expand() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.expand"]], "expand() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.expand"]], "expand() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.expand"]], "expand() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.expand"]], "expand() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.expand"]], "expand() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.expand"]], "expand() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.expand"]], "expand() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.expand"]], "expand() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.expand"]], "expand() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.expand"]], "expand() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.expand"]], "expand() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.expand"]], "expand() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.expand"]], "expand() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.expand"]], "expand() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.expand"]], "expand() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.expand"]], "expand() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.expand"]], "expand() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.expand"]], "expand() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.expand"]], "expand() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.expand"]], "expand() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.expand"]], "expand() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.expand"]], "expand() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.expand"]], "expand() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_bernoulli.relaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_categorical.relaxedonehotcategorical method)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.expand"]], "expand() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.expand"]], "expand() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.expand"]], "expand() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.expand"]], "expand() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.expand"]], "expand() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.expand"]], "expand() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.expand"]], "forward_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.forward_shape"]], "greater_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than"]], "greater_than_eq (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than_eq"]], "half_open_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.half_open_interval"]], "has_enumerate_support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.has_enumerate_support"]], "has_enumerate_support (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.has_enumerate_support"]], "has_enumerate_support (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.has_enumerate_support"]], "has_enumerate_support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_enumerate_support"]], "has_enumerate_support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.has_enumerate_support"]], "has_rsample (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.has_rsample"]], "has_rsample (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.has_rsample"]], "has_rsample (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.has_rsample"]], "has_rsample (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.has_rsample"]], "has_rsample (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.has_rsample"]], "has_rsample (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.has_rsample"]], "has_rsample (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.has_rsample"]], "has_rsample (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.has_rsample"]], "has_rsample (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.has_rsample"]], "has_rsample (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_rsample"]], "has_rsample (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.has_rsample"]], "has_rsample (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.has_rsample"]], "has_rsample (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.has_rsample"]], "has_rsample (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.has_rsample"]], "has_rsample (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.has_rsample"]], "has_rsample (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.has_rsample"]], "has_rsample (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.has_rsample"]], "has_rsample (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.has_rsample"]], "has_rsample (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.has_rsample"]], "has_rsample (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.has_rsample"]], "has_rsample (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.has_rsample"]], "has_rsample (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.has_rsample"]], "has_rsample (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.has_rsample"]], "icdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.icdf"]], "icdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.icdf"]], "icdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.icdf"]], "icdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.icdf"]], "icdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.icdf"]], "icdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.icdf"]], "icdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.icdf"]], "icdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.icdf"]], "icdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.icdf"]], "icdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.icdf"]], "independent (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.independent"]], "integer_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.integer_interval"]], "interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.interval"]], "inv (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.inv"]], "inverse_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.inverse_shape"]], "kl_divergence() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.kl_divergence"]], "less_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.less_than"]], "loc (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.loc"]], "log_abs_det_jacobian() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.log_abs_det_jacobian"]], "log_prob() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.log_prob"]], "log_prob() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.log_prob"]], "log_prob() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.log_prob"]], "log_prob() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.log_prob"]], "log_prob() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.log_prob"]], "log_prob() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.log_prob"]], "log_prob() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.log_prob"]], "log_prob() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.log_prob"]], "log_prob() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.log_prob"]], "log_prob() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.log_prob"]], "log_prob() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.log_prob"]], "log_prob() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.log_prob"]], "log_prob() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.log_prob"]], "log_prob() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.log_prob"]], "log_prob() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.log_prob"]], "log_prob() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.log_prob"]], "log_prob() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.log_prob"]], "log_prob() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.log_prob"]], "log_prob() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.log_prob"]], "log_prob() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.log_prob"]], "log_prob() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.log_prob"]], "log_prob() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.log_prob"]], "log_prob() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.log_prob"]], "log_prob() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.log_prob"]], "log_prob() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.log_prob"]], "log_prob() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.log_prob"]], "log_prob() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.log_prob"]], "log_prob() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.log_prob"]], "log_prob() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.log_prob"]], "log_prob() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.log_prob"]], "log_prob() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.log_prob"]], "log_prob() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.log_prob"]], "logits (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.logits"]], "logits (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.logits"]], "logits (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.logits"]], "logits (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.logits"]], "logits (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.logits"]], "logits (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.logits"]], "logits (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.logits"]], "logits (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.logits"]], "logits (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.logits"]], "mean (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mean"]], "mean (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mean"]], "mean (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mean"]], "mean (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mean"]], "mean (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mean"]], "mean (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.mean"]], "mean (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mean"]], "mean (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mean"]], "mean (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mean"]], "mean (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mean"]], "mean (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mean"]], "mean (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mean"]], "mean (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mean"]], "mean (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mean"]], "mean (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mean"]], "mean (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mean"]], "mean (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mean"]], "mean (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mean"]], "mean (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mean"]], "mean (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mean"]], "mean (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mean"]], "mean (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mean"]], "mean (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.mean"]], "mean (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mean"]], "mean (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mean"]], "mean (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mean"]], "mean (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mean"]], "mean (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mean"]], "mean (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mean"]], "mean (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mean"]], "mean (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mean"]], "mean (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mean"]], "mean (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mean"]], "mean (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mean"]], "mixture_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mixture_distribution"]], "mode (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mode"]], "mode (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mode"]], "mode (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mode"]], "mode (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mode"]], "mode (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mode"]], "mode (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mode"]], "mode (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mode"]], "mode (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mode"]], "mode (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mode"]], "mode (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mode"]], "mode (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mode"]], "mode (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mode"]], "mode (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mode"]], "mode (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mode"]], "mode (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mode"]], "mode (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mode"]], "mode (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mode"]], "mode (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mode"]], "mode (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mode"]], "mode (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mode"]], "mode (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mode"]], "mode (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mode"]], "mode (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mode"]], "mode (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mode"]], "mode (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mode"]], "mode (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mode"]], "mode (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mode"]], "mode (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mode"]], "mode (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mode"]], "mode (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mode"]], "mode (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mode"]], "multinomial (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.multinomial"]], "param_shape (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.param_shape"]], "param_shape (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.param_shape"]], "param_shape (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.param_shape"]], "param_shape (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.param_shape"]], "param_shape (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.param_shape"]], "param_shape (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.param_shape"]], "param_shape (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.param_shape"]], "param_shape (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.param_shape"]], "perplexity() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.perplexity"]], "precision_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.precision_matrix"]], "probs (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.probs"]], "probs (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.probs"]], "probs (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.probs"]], "probs (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.probs"]], "probs (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.probs"]], "probs (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.probs"]], "probs (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.probs"]], "probs (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.probs"]], "probs (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.probs"]], "rate (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.rate"]], "register() (torch.distributions.constraint_registry.constraintregistry method)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry.register"]], "register_kl() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.register_kl"]], "rsample() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.rsample"]], "rsample() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.rsample"]], "rsample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.rsample"]], "rsample() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.rsample"]], "rsample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.rsample"]], "rsample() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.rsample"]], "rsample() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.rsample"]], "rsample() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.rsample"]], "rsample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.rsample"]], "rsample() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.rsample"]], "rsample() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.rsample"]], "rsample() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.rsample"]], "rsample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.rsample"]], "rsample() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.rsample"]], "rsample() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.rsample"]], "rsample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.rsample"]], "rsample() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.rsample"]], "rsample() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.rsample"]], "sample() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.sample"]], "sample() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.sample"]], "sample() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.sample"]], "sample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.sample"]], "sample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample"]], "sample() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.sample"]], "sample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.sample"]], "sample() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.sample"]], "sample() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.sample"]], "sample() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.sample"]], "sample() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.sample"]], "sample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.sample"]], "sample() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.sample"]], "sample() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.sample"]], "sample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.sample"]], "sample() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.sample"]], "sample_n() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample_n"]], "scale (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.scale"]], "scale (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.scale"]], "scale (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.scale"]], "scale_tril (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.scale_tril"]], "set_default_validate_args() (torch.distributions.distribution.distribution static method)": [[35, "torch.distributions.distribution.Distribution.set_default_validate_args"]], "sign (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.sign"]], "stack (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.stack"]], "stddev (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.stddev"]], "stddev (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.stddev"]], "stddev (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.stddev"]], "stddev (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.stddev"]], "stddev (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.stddev"]], "stddev (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.stddev"]], "stddev (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.stddev"]], "support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.support"]], "support (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.support"]], "support (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.support"]], "support (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.support"]], "support (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.support"]], "support (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.support"]], "support (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.support"]], "support (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.support"]], "support (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.support"]], "support (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.support"]], "support (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.support"]], "support (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.support"]], "support (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.support"]], "support (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.support"]], "support (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.support"]], "support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.support"]], "support (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.support"]], "support (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.support"]], "support (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.support"]], "support (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.support"]], "support (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.support"]], "support (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.support"]], "support (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.support"]], "support (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.support"]], "support (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.support"]], "support (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.support"]], "support (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.support"]], "support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.support"]], "support (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.support"]], "support (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.support"]], "support (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.support"]], "support (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.support"]], "support (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.support"]], "support (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.support"]], "support (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.support"]], "support (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.support"]], "support (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.support"]], "support (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.support"]], "support (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.support"]], "temperature (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.temperature"]], "temperature (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.temperature"]], "torch.distributions": [[35, "module-torch.distributions"]], "torch.distributions.bernoulli": [[35, "module-torch.distributions.bernoulli"]], "torch.distributions.beta": [[35, "module-torch.distributions.beta"]], "torch.distributions.binomial": [[35, "module-torch.distributions.binomial"]], "torch.distributions.categorical": [[35, "module-torch.distributions.categorical"]], "torch.distributions.cauchy": [[35, "module-torch.distributions.cauchy"]], "torch.distributions.chi2": [[35, "module-torch.distributions.chi2"]], "torch.distributions.constraint_registry": [[35, "module-torch.distributions.constraint_registry"]], "torch.distributions.constraints": [[35, "module-torch.distributions.constraints"]], "torch.distributions.continuous_bernoulli": [[35, "module-torch.distributions.continuous_bernoulli"]], "torch.distributions.dirichlet": [[35, "module-torch.distributions.dirichlet"]], "torch.distributions.distribution": [[35, "module-torch.distributions.distribution"]], "torch.distributions.exp_family": [[35, "module-torch.distributions.exp_family"]], "torch.distributions.exponential": [[35, "module-torch.distributions.exponential"]], "torch.distributions.fishersnedecor": [[35, "module-torch.distributions.fishersnedecor"]], "torch.distributions.gamma": [[35, "module-torch.distributions.gamma"]], "torch.distributions.geometric": [[35, "module-torch.distributions.geometric"]], "torch.distributions.gumbel": [[35, "module-torch.distributions.gumbel"]], "torch.distributions.half_cauchy": [[35, "module-torch.distributions.half_cauchy"]], "torch.distributions.half_normal": [[35, "module-torch.distributions.half_normal"]], "torch.distributions.independent": [[35, "module-torch.distributions.independent"]], "torch.distributions.inverse_gamma": [[35, "module-torch.distributions.inverse_gamma"]], "torch.distributions.kl": [[35, "module-torch.distributions.kl"]], "torch.distributions.kumaraswamy": [[35, "module-torch.distributions.kumaraswamy"]], "torch.distributions.laplace": [[35, "module-torch.distributions.laplace"]], "torch.distributions.lkj_cholesky": [[35, "module-torch.distributions.lkj_cholesky"]], "torch.distributions.log_normal": [[35, "module-torch.distributions.log_normal"]], "torch.distributions.logistic_normal": [[35, "module-torch.distributions.logistic_normal"]], "torch.distributions.lowrank_multivariate_normal": [[35, "module-torch.distributions.lowrank_multivariate_normal"]], "torch.distributions.mixture_same_family": [[35, "module-torch.distributions.mixture_same_family"]], "torch.distributions.multinomial": [[35, "module-torch.distributions.multinomial"]], "torch.distributions.multivariate_normal": [[35, "module-torch.distributions.multivariate_normal"]], "torch.distributions.negative_binomial": [[35, "module-torch.distributions.negative_binomial"]], "torch.distributions.normal": [[35, "module-torch.distributions.normal"]], "torch.distributions.one_hot_categorical": [[35, "module-torch.distributions.one_hot_categorical"]], "torch.distributions.pareto": [[35, "module-torch.distributions.pareto"]], "torch.distributions.poisson": [[35, "module-torch.distributions.poisson"]], "torch.distributions.relaxed_bernoulli": [[35, "module-torch.distributions.relaxed_bernoulli"]], "torch.distributions.relaxed_categorical": [[35, "module-torch.distributions.relaxed_categorical"]], "torch.distributions.studentt": [[35, "module-torch.distributions.studentT"]], "torch.distributions.transformed_distribution": [[35, "module-torch.distributions.transformed_distribution"]], "torch.distributions.transforms": [[35, "module-torch.distributions.transforms"]], "torch.distributions.uniform": [[35, "module-torch.distributions.uniform"]], "torch.distributions.utils": [[35, "module-torch.distributions.utils"]], "torch.distributions.von_mises": [[35, "module-torch.distributions.von_mises"]], "torch.distributions.weibull": [[35, "module-torch.distributions.weibull"]], "torch.distributions.wishart": [[35, "module-torch.distributions.wishart"]], "total_count (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.total_count"]], "variance (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.variance"]], "variance (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.variance"]], "variance (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.variance"]], "variance (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.variance"]], "variance (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.variance"]], "variance (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.variance"]], "variance (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.variance"]], "variance (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.variance"]], "variance (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.variance"]], "variance (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.variance"]], "variance (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.variance"]], "variance (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.variance"]], "variance (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.variance"]], "variance (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.variance"]], "variance (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.variance"]], "variance (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.variance"]], "variance (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.variance"]], "variance (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.variance"]], "variance (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.variance"]], "variance (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.variance"]], "variance (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.variance"]], "variance (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.variance"]], "variance (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.variance"]], "variance (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.variance"]], "variance (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.variance"]], "variance (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.variance"]], "variance (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.variance"]], "variance (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.variance"]], "variance (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.variance"]], "variance (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.variance"]], "variance (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.variance"]], "variance (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.variance"]], "variance (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.variance"]], "variance (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.variance"]], "from_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.from_dlpack"]], "to_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.to_dlpack"]], "elasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent"]], "healthcheckserver (class in torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer"]], "localelasticagent (class in torch.distributed.elastic.agent.server.local_elastic_agent)": [[37, "torch.distributed.elastic.agent.server.local_elastic_agent.LocalElasticAgent"]], "runresult (class in torch.distributed.elastic.agent.server.api)": [[37, "torch.distributed.elastic.agent.server.api.RunResult"]], "simpleelasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent"]], "worker (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.Worker"]], "workergroup (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerGroup"]], "workerspec (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec"]], "workerstate (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerState"]], "_assign_worker_ranks() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._assign_worker_ranks"]], "_exit_barrier() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._exit_barrier"]], "_initialize_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._initialize_workers"]], "_monitor_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._monitor_workers"]], "_rendezvous() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._rendezvous"]], "_restart_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._restart_workers"]], "_shutdown() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._shutdown"]], "_start_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._start_workers"]], "_stop_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._stop_workers"]], "create_healthcheck_server() (in module torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.create_healthcheck_server"]], "get_entrypoint_name() (torch.distributed.elastic.agent.server.workerspec method)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec.get_entrypoint_name"]], "get_worker_group() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.get_worker_group"]], "is_running() (torch.distributed.elastic.agent.server.workerstate static method)": [[37, "torch.distributed.elastic.agent.server.WorkerState.is_running"]], "run() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.run"]], "start() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.start"]], "stop() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.stop"]], "torch.distributed.elastic.agent": [[37, "module-torch.distributed.elastic.agent"]], "torch.distributed.elastic.agent.server": [[37, "module-torch.distributed.elastic.agent.server"]], "torch.distributed.elastic.agent.server.health_check_server": [[37, "module-torch.distributed.elastic.agent.server.health_check_server"]], "torch.distributed.elastic.control_plane": [[38, "module-torch.distributed.elastic.control_plane"]], "worker_main() (in module torch.distributed.elastic.control_plane)": [[38, "torch.distributed.elastic.control_plane.worker_main"]], "childfailederror (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ChildFailedError"]], "errorhandler (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ErrorHandler"]], "processfailure (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ProcessFailure"]], "record() (in module torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.record"]], "torch.distributed.elastic.multiprocessing.errors": [[40, "module-torch.distributed.elastic.multiprocessing.errors"]], "event (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.Event"]], "eventmetadatavalue (in module torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventMetadataValue"]], "eventsource (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventSource"]], "get_logging_handler() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.get_logging_handler"]], "record() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.record"]], "torch.distributed.elastic.events": [[41, "module-torch.distributed.elastic.events"]], "consolemetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.ConsoleMetricHandler"]], "metrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.MetricHandler"]], "nullmetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.NullMetricHandler"]], "configure() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.configure"]], "prof() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.prof"]], "put_metric() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.put_metric"]], "torch.distributed.elastic.metrics": [[44, "module-torch.distributed.elastic.metrics"]], "defaultlogsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs"]], "logsdest (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsDest"]], "logsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs"]], "multiprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.MultiprocessContext"]], "pcontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.PContext"]], "runprocsresult (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.RunProcsResult"]], "subprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.SubprocessContext"]], "reify() (torch.distributed.elastic.multiprocessing.api.defaultlogsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs.reify"]], "reify() (torch.distributed.elastic.multiprocessing.api.logsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs.reify"]], "start_processes() (in module torch.distributed.elastic.multiprocessing)": [[45, "torch.distributed.elastic.multiprocessing.start_processes"]], "torch.distributed.elastic.multiprocessing": [[45, "module-torch.distributed.elastic.multiprocessing"]], "c10drendezvousbackend (class in torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend"]], "dynamicrendezvoushandler (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler"]], "etcdrendezvousbackend (class in torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend"]], "etcdrendezvoushandler (class in torch.distributed.elastic.rendezvous.etcd_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous.EtcdRendezvousHandler"]], "etcdserver (class in torch.distributed.elastic.rendezvous.etcd_server)": [[47, "torch.distributed.elastic.rendezvous.etcd_server.EtcdServer"]], "etcdstore (class in torch.distributed.elastic.rendezvous.etcd_store)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore"]], "rendezvousbackend (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend"]], "rendezvousclosederror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousClosedError"]], "rendezvousconnectionerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousConnectionError"]], "rendezvouserror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousError"]], "rendezvousgracefulexiterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousGracefulExitError"]], "rendezvoushandler (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler"]], "rendezvoushandlerregistry (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandlerRegistry"]], "rendezvousinfo (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousInfo"]], "rendezvousparameters (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters"]], "rendezvousstateerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStateError"]], "rendezvousstoreinfo (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo"]], "rendezvoustimeout (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout"]], "rendezvoustimeouterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousTimeoutError"]], "add() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.add"]], "build() (torch.distributed.elastic.rendezvous.api.rendezvousstoreinfo static method)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo.build"]], "check() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.check"]], "close (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.close"]], "create_backend() (in module torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.create_backend"]], "create_backend() (in module torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.create_backend"]], "create_handler() (in module torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.create_handler"]], "from_backend() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.dynamicrendezvoushandler class method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler.from_backend"]], "get() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get"]], "get() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.get"]], "get_as_bool() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_bool"]], "get_as_int() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_int"]], "get_backend() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_backend"]], "get_run_id() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_run_id"]], "get_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.get_state"]], "heartbeat (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.heartbeat"]], "is_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.is_closed"]], "join (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.join"]], "last_call (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.last_call"]], "name (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.name"]], "next_rendezvous() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.next_rendezvous"]], "num_nodes_waiting() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.num_nodes_waiting"]], "set() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.set"]], "set_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.set_closed"]], "set_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.set_state"]], "shutdown() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.shutdown"]], "torch.distributed.elastic.rendezvous": [[47, "module-torch.distributed.elastic.rendezvous"]], "torch.distributed.elastic.rendezvous.registry": [[47, "module-torch.distributed.elastic.rendezvous.registry"]], "use_agent_store (torch.distributed.elastic.rendezvous.rendezvoushandler property)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.use_agent_store"]], "wait() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.wait"]], "torch.distributed.run": [[48, "module-torch.distributed.run"]], "subprocesshandler (class in torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler.SubprocessHandler"]], "get_subprocess_handler() (in module torch.distributed.elastic.multiprocessing.subprocess_handler.handlers)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers.get_subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"]], "filetimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerClient"]], "filetimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerServer"]], "localtimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerClient"]], "localtimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerServer"]], "timerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerClient"]], "timerrequest (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerRequest"]], "timerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerServer"]], "acquire() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.acquire"]], "clear_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.clear_timers"]], "configure() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.configure"]], "expires() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.expires"]], "get_expired_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.get_expired_timers"]], "log_debug_info_for_expired_timers() (in module torch.distributed.elastic.timer.debug_info_logging)": [[50, "torch.distributed.elastic.timer.debug_info_logging.log_debug_info_for_expired_timers"]], "register_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.register_timers"]], "release() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.release"]], "torch.distributed.elastic.timer": [[50, "module-torch.distributed.elastic.timer"]], "torch.distributed.elastic.timer.debug_info_logging": [[50, "module-torch.distributed.elastic.timer.debug_info_logging"]], "constraint (in module torch.export)": [[52, "torch.export.Constraint"]], "customobjargument (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.CustomObjArgument"]], "dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.Dim"]], "exportbackwardsignature (class in torch.export)": [[52, "torch.export.ExportBackwardSignature"]], "exportgraphsignature (class in torch.export)": [[52, "torch.export.ExportGraphSignature"]], "exportgraphsignature (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.ExportGraphSignature"]], "exportedprogram (class in torch.export)": [[52, "torch.export.ExportedProgram"]], "flatargsadapter (class in torch.export.unflatten)": [[52, "torch.export.unflatten.FlatArgsAdapter"]], "inputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputKind"]], "inputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputSpec"]], "interpretermodule (class in torch.export.unflatten)": [[52, "torch.export.unflatten.InterpreterModule"]], "modulecallentry (class in torch.export)": [[52, "torch.export.ModuleCallEntry"]], "modulecallsignature (class in torch.export)": [[52, "torch.export.ModuleCallSignature"]], "outputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputKind"]], "outputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputSpec"]], "shapescollection (class in torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.ShapesCollection"]], "adapt() (torch.export.unflatten.flatargsadapter method)": [[52, "torch.export.unflatten.FlatArgsAdapter.adapt"]], "buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.buffers"]], "dims() (in module torch.export)": [[52, "torch.export.dims"]], "dynamic_dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.dynamic_dim"]], "dynamic_shapes() (torch.export.dynamic_shapes.shapescollection method)": [[52, "torch.export.dynamic_shapes.ShapesCollection.dynamic_shapes"]], "export() (in module torch.export)": [[52, "torch.export.export"]], "get_replace_hook() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.get_replace_hook"]], "load() (in module torch.export)": [[52, "torch.export.load"]], "module() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.module"]], "named_buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_buffers"]], "named_parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_parameters"]], "parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.parameters"]], "refine_dynamic_shapes_from_suggested_fixes() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.refine_dynamic_shapes_from_suggested_fixes"]], "register_dataclass() (in module torch.export)": [[52, "torch.export.register_dataclass"]], "replace_all_uses() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.replace_all_uses"]], "run_decompositions() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.run_decompositions"]], "save() (in module torch.export)": [[52, "torch.export.save"]], "torch.export": [[52, "module-torch.export"]], "torch.export.custom_obj": [[52, "module-torch.export.custom_obj"]], "torch.export.dynamic_shapes": [[52, "module-torch.export.dynamic_shapes"]], "torch.export.exported_program": [[52, "module-torch.export.exported_program"]], "torch.export.graph_signature": [[52, "module-torch.export.graph_signature"]], "torch.export.unflatten": [[52, "module-torch.export.unflatten"]], "unflatten() (in module torch.export.unflatten)": [[52, "torch.export.unflatten.unflatten"]], "torch.fft": [[54, "module-torch.fft"]], "backwardprefetch (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.BackwardPrefetch"]], "cpuoffload (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.CPUOffload"]], "fulloptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullOptimStateDictConfig"]], "fullstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullStateDictConfig"]], "fullyshardeddataparallel (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel"]], "localoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalOptimStateDictConfig"]], "localstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalStateDictConfig"]], "mixedprecision (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.MixedPrecision"]], "optimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.OptimStateDictConfig"]], "shardedoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedOptimStateDictConfig"]], "shardedstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedStateDictConfig"]], "shardingstrategy (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardingStrategy"]], "statedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictConfig"]], "statedictsettings (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictSettings"]], "apply() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.apply"]], "check_is_root() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.check_is_root"]], "clip_grad_norm_() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.clip_grad_norm_"]], "flatten_sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.flatten_sharded_optim_state_dict"]], "forward() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.forward"]], "fsdp_modules() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.fsdp_modules"]], "full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.full_optim_state_dict"]], "get_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.get_state_dict_type"]], "module (torch.distributed.fsdp.fullyshardeddataparallel property)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.module"]], "named_buffers() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_buffers"]], "named_parameters() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_parameters"]], "no_sync() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.no_sync"]], "optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict"]], "optim_state_dict_to_load() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict_to_load"]], "register_comm_hook() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.register_comm_hook"]], "rekey_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.rekey_optim_state_dict"]], "scatter_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.scatter_full_optim_state_dict"]], "set_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.set_state_dict_type"]], "shard_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.shard_full_optim_state_dict"]], "sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.sharded_optim_state_dict"]], "state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.state_dict_type"]], "summon_full_params() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.summon_full_params"]], "torch.distributed.fsdp": [[55, "module-torch.distributed.fsdp"]], "torch.func": [[57, "module-torch.func"]], "get_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_overwrite_module_params_on_conversion"]], "get_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_swap_module_params_on_conversion"]], "set_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_overwrite_module_params_on_conversion"]], "set_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_swap_module_params_on_conversion"]], "torch.__future__": [[62, "module-torch.__future__"]], "future (class in torch.futures)": [[63, "torch.futures.Future"]], "add_done_callback() (torch.futures.future method)": [[63, "torch.futures.Future.add_done_callback"]], "collect_all() (in module torch.futures)": [[63, "torch.futures.collect_all"]], "done() (torch.futures.future method)": [[63, "torch.futures.Future.done"]], "set_exception() (torch.futures.future method)": [[63, "torch.futures.Future.set_exception"]], "set_result() (torch.futures.future method)": [[63, "torch.futures.Future.set_result"]], "then() (torch.futures.future method)": [[63, "torch.futures.Future.then"]], "torch.futures": [[63, "module-torch.futures"]], "value() (torch.futures.future method)": [[63, "torch.futures.Future.value"]], "wait() (torch.futures.future method)": [[63, "torch.futures.Future.wait"]], "wait_all() (in module torch.futures)": [[63, "torch.futures.wait_all"]], "graph (class in torch.fx)": [[64, "torch.fx.Graph"]], "graphmodule (class in torch.fx)": [[64, "torch.fx.GraphModule"]], "interpreter (class in torch.fx)": [[64, "torch.fx.Interpreter"]], "node (class in torch.fx)": [[64, "torch.fx.Node"]], "proxy (class in torch.fx)": [[64, "torch.fx.Proxy"]], "tracer (class in torch.fx)": [[64, "torch.fx.Tracer"]], "transformer (class in torch.fx)": [[64, "torch.fx.Transformer"]], "__init__() (torch.fx.graph method)": [[64, "torch.fx.Graph.__init__"]], "__init__() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.__init__"]], "add_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.add_submodule"]], "all_input_nodes (torch.fx.node property)": [[64, "torch.fx.Node.all_input_nodes"]], "append() (torch.fx.node method)": [[64, "torch.fx.Node.append"]], "args (torch.fx.node property)": [[64, "torch.fx.Node.args"]], "boxed_run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.boxed_run"]], "call_function() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_function"]], "call_function() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_function"]], "call_function() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_function"]], "call_method() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_method"]], "call_method() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_method"]], "call_module() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_module"]], "call_module() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_module"]], "call_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.call_module"]], "call_module() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_module"]], "code (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.code"]], "create_arg() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_arg"]], "create_args_for_root() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_args_for_root"]], "create_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.create_node"]], "create_node() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_node"]], "create_proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_proxy"]], "delete_all_unused_submodules() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_all_unused_submodules"]], "delete_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_submodule"]], "eliminate_dead_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.eliminate_dead_code"]], "erase_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.erase_node"]], "fetch_args_kwargs_from_env() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_attr"]], "find_nodes() (torch.fx.graph method)": [[64, "torch.fx.Graph.find_nodes"]], "format_node() (torch.fx.node method)": [[64, "torch.fx.Node.format_node"]], "get_attr() (torch.fx.graph method)": [[64, "torch.fx.Graph.get_attr"]], "get_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.get_attr"]], "get_attr() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.get_attr"]], "get_fresh_qualname() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.get_fresh_qualname"]], "getattr() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.getattr"]], "graph (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.graph"]], "graph_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.graph_copy"]], "insert_arg() (torch.fx.node method)": [[64, "torch.fx.Node.insert_arg"]], "inserting_after() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_after"]], "inserting_before() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_before"]], "is_impure() (torch.fx.node method)": [[64, "torch.fx.Node.is_impure"]], "is_leaf_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.is_leaf_module"]], "iter() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.iter"]], "keys() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.keys"]], "kwargs (torch.fx.node property)": [[64, "torch.fx.Node.kwargs"]], "lint() (torch.fx.graph method)": [[64, "torch.fx.Graph.lint"]], "map_nodes_to_values() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.map_nodes_to_values"]], "next (torch.fx.node property)": [[64, "torch.fx.Node.next"]], "node_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.node_copy"]], "nodes (torch.fx.graph property)": [[64, "torch.fx.Graph.nodes"]], "normalized_arguments() (torch.fx.node method)": [[64, "torch.fx.Node.normalized_arguments"]], "on_generate_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.on_generate_code"]], "output() (torch.fx.graph method)": [[64, "torch.fx.Graph.output"]], "output() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.output"]], "path_of_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.path_of_module"]], "placeholder() (torch.fx.graph method)": [[64, "torch.fx.Graph.placeholder"]], "placeholder() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.placeholder"]], "placeholder() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.placeholder"]], "prepend() (torch.fx.node method)": [[64, "torch.fx.Node.prepend"]], "prev (torch.fx.node property)": [[64, "torch.fx.Node.prev"]], "print_readable() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.print_readable"]], "print_tabular() (torch.fx.graph method)": [[64, "torch.fx.Graph.print_tabular"]], "process_inputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_inputs"]], "process_outputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_outputs"]], "proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.proxy"]], "python_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.python_code"]], "recompile() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.recompile"]], "replace_all_uses_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_all_uses_with"]], "replace_input_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_input_with"]], "replace_pattern() (in module torch.fx)": [[64, "torch.fx.replace_pattern"]], "run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run"]], "run_node() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run_node"]], "set_codegen() (torch.fx.graph method)": [[64, "torch.fx.Graph.set_codegen"]], "stack_trace (torch.fx.node property)": [[64, "torch.fx.Node.stack_trace"]], "symbolic_trace() (in module torch.fx)": [[64, "torch.fx.symbolic_trace"]], "to_bool() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.to_bool"]], "to_folder() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.to_folder"]], "torch.fx": [[64, "module-torch.fx"]], "torch.fx.annotate": [[64, "module-torch.fx.annotate"]], "torch.fx.config": [[64, "module-torch.fx.config"]], "torch.fx.experimental": [[64, "module-torch.fx.experimental"]], "torch.fx.experimental.accelerator_partitioner": [[64, "module-torch.fx.experimental.accelerator_partitioner"]], "torch.fx.experimental.const_fold": [[64, "module-torch.fx.experimental.const_fold"]], "torch.fx.experimental.debug": [[64, "module-torch.fx.experimental.debug"]], "torch.fx.experimental.graph_gradual_typechecker": [[64, "module-torch.fx.experimental.graph_gradual_typechecker"]], "torch.fx.experimental.merge_matmul": [[64, "module-torch.fx.experimental.merge_matmul"]], "torch.fx.experimental.meta_tracer": [[64, "module-torch.fx.experimental.meta_tracer"]], "torch.fx.experimental.migrate_gradual_types": [[64, "module-torch.fx.experimental.migrate_gradual_types"]], "torch.fx.experimental.migrate_gradual_types.constraint": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint"]], "torch.fx.experimental.migrate_gradual_types.constraint_generator": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"]], "torch.fx.experimental.migrate_gradual_types.constraint_transformation": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"]], "torch.fx.experimental.migrate_gradual_types.operation": [[64, "module-torch.fx.experimental.migrate_gradual_types.operation"]], "torch.fx.experimental.migrate_gradual_types.transform_to_z3": [[64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"]], "torch.fx.experimental.migrate_gradual_types.util": [[64, "module-torch.fx.experimental.migrate_gradual_types.util"]], "torch.fx.experimental.migrate_gradual_types.z3_types": [[64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"]], "torch.fx.experimental.normalize": [[64, "module-torch.fx.experimental.normalize"]], "torch.fx.experimental.optimization": [[64, "module-torch.fx.experimental.optimization"]], "torch.fx.experimental.partitioner_utils": [[64, "module-torch.fx.experimental.partitioner_utils"]], "torch.fx.experimental.proxy_tensor": [[64, "module-torch.fx.experimental.proxy_tensor"]], "torch.fx.experimental.recording": [[64, "module-torch.fx.experimental.recording"]], "torch.fx.experimental.refinement_types": [[64, "module-torch.fx.experimental.refinement_types"]], "torch.fx.experimental.rewriter": [[64, "module-torch.fx.experimental.rewriter"]], "torch.fx.experimental.schema_type_annotation": [[64, "module-torch.fx.experimental.schema_type_annotation"]], "torch.fx.experimental.sym_node": [[64, "module-torch.fx.experimental.sym_node"]], "torch.fx.experimental.unification": [[64, "module-torch.fx.experimental.unification"]], "torch.fx.experimental.unification.core": [[64, "module-torch.fx.experimental.unification.core"]], "torch.fx.experimental.unification.dispatch": [[64, "module-torch.fx.experimental.unification.dispatch"]], "torch.fx.experimental.unification.match": [[64, "module-torch.fx.experimental.unification.match"]], "torch.fx.experimental.unification.more": [[64, "module-torch.fx.experimental.unification.more"]], "torch.fx.experimental.unification.multipledispatch": [[64, "module-torch.fx.experimental.unification.multipledispatch"]], "torch.fx.experimental.unification.multipledispatch.conflict": [[64, "module-torch.fx.experimental.unification.multipledispatch.conflict"]], "torch.fx.experimental.unification.multipledispatch.core": [[64, "module-torch.fx.experimental.unification.multipledispatch.core"]], "torch.fx.experimental.unification.multipledispatch.dispatcher": [[64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"]], "torch.fx.experimental.unification.multipledispatch.utils": [[64, "module-torch.fx.experimental.unification.multipledispatch.utils"]], "torch.fx.experimental.unification.multipledispatch.variadic": [[64, "module-torch.fx.experimental.unification.multipledispatch.variadic"]], "torch.fx.experimental.unification.unification_tools": [[64, "module-torch.fx.experimental.unification.unification_tools"]], "torch.fx.experimental.unification.utils": [[64, "module-torch.fx.experimental.unification.utils"]], "torch.fx.experimental.unification.variable": [[64, "module-torch.fx.experimental.unification.variable"]], "torch.fx.experimental.unify_refinements": [[64, "module-torch.fx.experimental.unify_refinements"]], "torch.fx.experimental.validator": [[64, "module-torch.fx.experimental.validator"]], "torch.fx.graph": [[64, "module-torch.fx.graph"]], "torch.fx.graph_module": [[64, "module-torch.fx.graph_module"]], "torch.fx.immutable_collections": [[64, "module-torch.fx.immutable_collections"]], "torch.fx.interpreter": [[64, "module-torch.fx.interpreter"]], "torch.fx.node": [[64, "module-torch.fx.node"]], "torch.fx.operator_schemas": [[64, "module-torch.fx.operator_schemas"]], "torch.fx.passes": [[64, "module-torch.fx.passes"]], "torch.fx.passes.annotate_getitem_nodes": [[64, "module-torch.fx.passes.annotate_getitem_nodes"]], "torch.fx.passes.backends": [[64, "module-torch.fx.passes.backends"]], "torch.fx.passes.backends.cudagraphs": [[64, "module-torch.fx.passes.backends.cudagraphs"]], "torch.fx.passes.dialect": [[64, "module-torch.fx.passes.dialect"]], "torch.fx.passes.dialect.common": [[64, "module-torch.fx.passes.dialect.common"]], "torch.fx.passes.dialect.common.cse_pass": [[64, "module-torch.fx.passes.dialect.common.cse_pass"]], "torch.fx.passes.fake_tensor_prop": [[64, "module-torch.fx.passes.fake_tensor_prop"]], "torch.fx.passes.graph_drawer": [[64, "module-torch.fx.passes.graph_drawer"]], "torch.fx.passes.graph_manipulation": [[64, "module-torch.fx.passes.graph_manipulation"]], "torch.fx.passes.graph_transform_observer": [[64, "module-torch.fx.passes.graph_transform_observer"]], "torch.fx.passes.infra": [[64, "module-torch.fx.passes.infra"]], "torch.fx.passes.infra.partitioner": [[64, "module-torch.fx.passes.infra.partitioner"]], "torch.fx.passes.infra.pass_base": [[64, "module-torch.fx.passes.infra.pass_base"]], "torch.fx.passes.infra.pass_manager": [[64, "module-torch.fx.passes.infra.pass_manager"]], "torch.fx.passes.net_min_base": [[64, "module-torch.fx.passes.net_min_base"]], "torch.fx.passes.operator_support": [[64, "module-torch.fx.passes.operator_support"]], "torch.fx.passes.param_fetch": [[64, "module-torch.fx.passes.param_fetch"]], "torch.fx.passes.pass_manager": [[64, "module-torch.fx.passes.pass_manager"]], "torch.fx.passes.reinplace": [[64, "module-torch.fx.passes.reinplace"]], "torch.fx.passes.runtime_assert": [[64, "module-torch.fx.passes.runtime_assert"]], "torch.fx.passes.shape_prop": [[64, "module-torch.fx.passes.shape_prop"]], "torch.fx.passes.split_module": [[64, "module-torch.fx.passes.split_module"]], "torch.fx.passes.split_utils": [[64, "module-torch.fx.passes.split_utils"]], "torch.fx.passes.splitter_base": [[64, "module-torch.fx.passes.splitter_base"]], "torch.fx.passes.tests": [[64, "module-torch.fx.passes.tests"]], "torch.fx.passes.tests.test_pass_manager": [[64, "module-torch.fx.passes.tests.test_pass_manager"]], "torch.fx.passes.tools_common": [[64, "module-torch.fx.passes.tools_common"]], "torch.fx.passes.utils": [[64, "module-torch.fx.passes.utils"]], "torch.fx.passes.utils.common": [[64, "module-torch.fx.passes.utils.common"]], "torch.fx.passes.utils.fuser_utils": [[64, "module-torch.fx.passes.utils.fuser_utils"]], "torch.fx.passes.utils.matcher_utils": [[64, "module-torch.fx.passes.utils.matcher_utils"]], "torch.fx.passes.utils.matcher_with_name_node_map_utils": [[64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"]], "torch.fx.passes.utils.source_matcher_utils": [[64, "module-torch.fx.passes.utils.source_matcher_utils"]], "torch.fx.proxy": [[64, "module-torch.fx.proxy"]], "torch.fx.subgraph_rewriter": [[64, "module-torch.fx.subgraph_rewriter"]], "torch.fx.tensor_type": [[64, "module-torch.fx.tensor_type"]], "torch.fx.traceback": [[64, "module-torch.fx.traceback"]], "trace() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.trace"]], "transform() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.transform"]], "update_arg() (torch.fx.node method)": [[64, "torch.fx.Node.update_arg"]], "update_kwarg() (torch.fx.node method)": [[64, "torch.fx.Node.update_kwarg"]], "wrap() (in module torch.fx)": [[64, "torch.fx.wrap"]], "torch.fx.experimental.symbolic_shapes": [[65, "module-torch.fx.experimental.symbolic_shapes"]], "generator (class in torch)": [[90, "torch.Generator"]], "clone_state() (torch.generator method)": [[90, "torch.Generator.clone_state"]], "device (torch.generator attribute)": [[90, "torch.Generator.device"]], "get_state() (torch.generator method)": [[90, "torch.Generator.get_state"]], "graphsafe_get_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_get_state"]], "graphsafe_set_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_set_state"]], "initial_seed() (torch.generator method)": [[90, "torch.Generator.initial_seed"]], "manual_seed() (torch.generator method)": [[90, "torch.Generator.manual_seed"]], "seed() (torch.generator method)": [[90, "torch.Generator.seed"]], "set_state() (torch.generator method)": [[90, "torch.Generator.set_state"]], "abs() (torch.tensor method)": [[91, "torch.Tensor.abs"]], "abs_() (torch.tensor method)": [[92, "torch.Tensor.abs_"]], "absolute() (torch.tensor method)": [[93, "torch.Tensor.absolute"]], "absolute_() (torch.tensor method)": [[94, "torch.Tensor.absolute_"]], "acos() (torch.tensor method)": [[95, "torch.Tensor.acos"]], "acos_() (torch.tensor method)": [[96, "torch.Tensor.acos_"]], "acosh() (torch.tensor method)": [[97, "torch.Tensor.acosh"]], "acosh_() (torch.tensor method)": [[98, "torch.Tensor.acosh_"]], "add() (torch.tensor method)": [[99, "torch.Tensor.add"]], "add_() (torch.tensor method)": [[100, "torch.Tensor.add_"]], "addbmm() (torch.tensor method)": [[101, "torch.Tensor.addbmm"]], "addbmm_() (torch.tensor method)": [[102, "torch.Tensor.addbmm_"]], "addcdiv() (torch.tensor method)": [[103, "torch.Tensor.addcdiv"]], "addcdiv_() (torch.tensor method)": [[104, "torch.Tensor.addcdiv_"]], "addcmul() (torch.tensor method)": [[105, "torch.Tensor.addcmul"]], "addcmul_() (torch.tensor method)": [[106, "torch.Tensor.addcmul_"]], "addmm() (torch.tensor method)": [[107, "torch.Tensor.addmm"]], "addmm_() (torch.tensor method)": [[108, "torch.Tensor.addmm_"]], "addmv() (torch.tensor method)": [[109, "torch.Tensor.addmv"]], "addmv_() (torch.tensor method)": [[110, "torch.Tensor.addmv_"]], "addr() (torch.tensor method)": [[111, "torch.Tensor.addr"]], "addr_() (torch.tensor method)": [[112, "torch.Tensor.addr_"]], "adjoint() (torch.tensor method)": [[113, "torch.Tensor.adjoint"]], "all() (torch.tensor method)": [[114, "torch.Tensor.all"]], "allclose() (torch.tensor method)": [[115, "torch.Tensor.allclose"]], "amax() (torch.tensor method)": [[116, "torch.Tensor.amax"]], "amin() (torch.tensor method)": [[117, "torch.Tensor.amin"]], "aminmax() (torch.tensor method)": [[118, "torch.Tensor.aminmax"]], "angle() (torch.tensor method)": [[119, "torch.Tensor.angle"]], "any() (torch.tensor method)": [[120, "torch.Tensor.any"]], "apply_() (torch.tensor method)": [[121, "torch.Tensor.apply_"]], "arccos() (torch.tensor method)": [[122, "torch.Tensor.arccos"]], "arccos_() (torch.tensor method)": [[123, "torch.Tensor.arccos_"]], "arccosh() (torch.tensor method)": [[124, "torch.Tensor.arccosh"]], "arccosh_() (torch.tensor method)": [[125, "torch.Tensor.arccosh_"]], "arcsin() (torch.tensor method)": [[126, "torch.Tensor.arcsin"]], "arcsin_() (torch.tensor method)": [[127, "torch.Tensor.arcsin_"]], "arcsinh() (torch.tensor method)": [[128, "torch.Tensor.arcsinh"]], "arcsinh_() (torch.tensor method)": [[129, "torch.Tensor.arcsinh_"]], "arctan() (torch.tensor method)": [[130, "torch.Tensor.arctan"]], "arctan2() (torch.tensor method)": [[131, "torch.Tensor.arctan2"]], "arctan2_() (torch.tensor method)": [[132, "torch.Tensor.arctan2_"]], "arctan_() (torch.tensor method)": [[133, "torch.Tensor.arctan_"]], "arctanh() (torch.tensor method)": [[134, "torch.Tensor.arctanh"]], "arctanh_() (torch.tensor method)": [[135, "torch.Tensor.arctanh_"]], "argmax() (torch.tensor method)": [[136, "torch.Tensor.argmax"]], "argmin() (torch.tensor method)": [[137, "torch.Tensor.argmin"]], "argsort() (torch.tensor method)": [[138, "torch.Tensor.argsort"]], "argwhere() (torch.tensor method)": [[139, "torch.Tensor.argwhere"]], "as_strided() (torch.tensor method)": [[140, "torch.Tensor.as_strided"]], "as_subclass() (torch.tensor method)": [[141, "torch.Tensor.as_subclass"]], "asin() (torch.tensor method)": [[142, "torch.Tensor.asin"]], "asin_() (torch.tensor method)": [[143, "torch.Tensor.asin_"]], "asinh() (torch.tensor method)": [[144, "torch.Tensor.asinh"]], "asinh_() (torch.tensor method)": [[145, "torch.Tensor.asinh_"]], "atan() (torch.tensor method)": [[146, "torch.Tensor.atan"]], "atan2() (torch.tensor method)": [[147, "torch.Tensor.atan2"]], "atan2_() (torch.tensor method)": [[148, "torch.Tensor.atan2_"]], "atan_() (torch.tensor method)": [[149, "torch.Tensor.atan_"]], "atanh() (torch.tensor method)": [[150, "torch.Tensor.atanh"]], "atanh_() (torch.tensor method)": [[151, "torch.Tensor.atanh_"]], "backward() (torch.tensor method)": [[152, "torch.Tensor.backward"]], "baddbmm() (torch.tensor method)": [[153, "torch.Tensor.baddbmm"]], "baddbmm_() (torch.tensor method)": [[154, "torch.Tensor.baddbmm_"]], "bernoulli() (torch.tensor method)": [[155, "torch.Tensor.bernoulli"]], "bernoulli_() (torch.tensor method)": [[156, "torch.Tensor.bernoulli_"]], "bfloat16() (torch.tensor method)": [[157, "torch.Tensor.bfloat16"]], "bincount() (torch.tensor method)": [[158, "torch.Tensor.bincount"]], "bitwise_and() (torch.tensor method)": [[159, "torch.Tensor.bitwise_and"]], "bitwise_and_() (torch.tensor method)": [[160, "torch.Tensor.bitwise_and_"]], "bitwise_left_shift() (torch.tensor method)": [[161, "torch.Tensor.bitwise_left_shift"]], "bitwise_left_shift_() (torch.tensor method)": [[162, "torch.Tensor.bitwise_left_shift_"]], "bitwise_not() (torch.tensor method)": [[163, "torch.Tensor.bitwise_not"]], "bitwise_not_() (torch.tensor method)": [[164, "torch.Tensor.bitwise_not_"]], "bitwise_or() (torch.tensor method)": [[165, "torch.Tensor.bitwise_or"]], "bitwise_or_() (torch.tensor method)": [[166, "torch.Tensor.bitwise_or_"]], "bitwise_right_shift() (torch.tensor method)": [[167, "torch.Tensor.bitwise_right_shift"]], "bitwise_right_shift_() (torch.tensor method)": [[168, "torch.Tensor.bitwise_right_shift_"]], "bitwise_xor() (torch.tensor method)": [[169, "torch.Tensor.bitwise_xor"]], "bitwise_xor_() (torch.tensor method)": [[170, "torch.Tensor.bitwise_xor_"]], "bmm() (torch.tensor method)": [[171, "torch.Tensor.bmm"]], "bool() (torch.tensor method)": [[172, "torch.Tensor.bool"]], "broadcast_to() (torch.tensor method)": [[173, "torch.Tensor.broadcast_to"]], "byte() (torch.tensor method)": [[174, "torch.Tensor.byte"]], "cauchy_() (torch.tensor method)": [[175, "torch.Tensor.cauchy_"]], "ccol_indices() (torch.tensor method)": [[176, "torch.Tensor.ccol_indices"]], "cdouble() (torch.tensor method)": [[177, "torch.Tensor.cdouble"]], "ceil() (torch.tensor method)": [[178, "torch.Tensor.ceil"]], "ceil_() (torch.tensor method)": [[179, "torch.Tensor.ceil_"]], "cfloat() (torch.tensor method)": [[180, "torch.Tensor.cfloat"]], "chalf() (torch.tensor method)": [[181, "torch.Tensor.chalf"]], "char() (torch.tensor method)": [[182, "torch.Tensor.char"]], "cholesky() (torch.tensor method)": [[183, "torch.Tensor.cholesky"]], "cholesky_inverse() (torch.tensor method)": [[184, "torch.Tensor.cholesky_inverse"]], "cholesky_solve() (torch.tensor method)": [[185, "torch.Tensor.cholesky_solve"]], "chunk() (torch.tensor method)": [[186, "torch.Tensor.chunk"]], "clamp() (torch.tensor method)": [[187, "torch.Tensor.clamp"]], "clamp_() (torch.tensor method)": [[188, "torch.Tensor.clamp_"]], "clip() (torch.tensor method)": [[189, "torch.Tensor.clip"]], "clip_() (torch.tensor method)": [[190, "torch.Tensor.clip_"]], "clone() (torch.tensor method)": [[191, "torch.Tensor.clone"]], "coalesce() (torch.tensor method)": [[192, "torch.Tensor.coalesce"]], "col_indices() (torch.tensor method)": [[193, "torch.Tensor.col_indices"]], "conj() (torch.tensor method)": [[194, "torch.Tensor.conj"]], "conj_physical() (torch.tensor method)": [[195, "torch.Tensor.conj_physical"]], "conj_physical_() (torch.tensor method)": [[196, "torch.Tensor.conj_physical_"]], "contiguous() (torch.tensor method)": [[197, "torch.Tensor.contiguous"]], "copy_() (torch.tensor method)": [[198, "torch.Tensor.copy_"]], "copysign() (torch.tensor method)": [[199, "torch.Tensor.copysign"]], "copysign_() (torch.tensor method)": [[200, "torch.Tensor.copysign_"]], "corrcoef() (torch.tensor method)": [[201, "torch.Tensor.corrcoef"]], "cos() (torch.tensor method)": [[202, "torch.Tensor.cos"]], "cos_() (torch.tensor method)": [[203, "torch.Tensor.cos_"]], "cosh() (torch.tensor method)": [[204, "torch.Tensor.cosh"]], "cosh_() (torch.tensor method)": [[205, "torch.Tensor.cosh_"]], "count_nonzero() (torch.tensor method)": [[206, "torch.Tensor.count_nonzero"]], "cov() (torch.tensor method)": [[207, "torch.Tensor.cov"]], "cpu() (torch.tensor method)": [[208, "torch.Tensor.cpu"]], "cross() (torch.tensor method)": [[209, "torch.Tensor.cross"]], "crow_indices() (torch.tensor method)": [[210, "torch.Tensor.crow_indices"]], "cuda() (torch.tensor method)": [[211, "torch.Tensor.cuda"]], "cummax() (torch.tensor method)": [[212, "torch.Tensor.cummax"]], "cummin() (torch.tensor method)": [[213, "torch.Tensor.cummin"]], "cumprod() (torch.tensor method)": [[214, "torch.Tensor.cumprod"]], "cumprod_() (torch.tensor method)": [[215, "torch.Tensor.cumprod_"]], "cumsum() (torch.tensor method)": [[216, "torch.Tensor.cumsum"]], "cumsum_() (torch.tensor method)": [[217, "torch.Tensor.cumsum_"]], "data_ptr() (torch.tensor method)": [[218, "torch.Tensor.data_ptr"]], "deg2rad() (torch.tensor method)": [[219, "torch.Tensor.deg2rad"]], "dense_dim() (torch.tensor method)": [[220, "torch.Tensor.dense_dim"]], "dequantize() (torch.tensor method)": [[221, "torch.Tensor.dequantize"]], "det() (torch.tensor method)": [[222, "torch.Tensor.det"]], "detach() (torch.tensor method)": [[223, "torch.Tensor.detach"]], "detach_() (torch.tensor method)": [[224, "torch.Tensor.detach_"]], "device (torch.tensor attribute)": [[225, "torch.Tensor.device"]], "diag() (torch.tensor method)": [[226, "torch.Tensor.diag"]], "diag_embed() (torch.tensor method)": [[227, "torch.Tensor.diag_embed"]], "diagflat() (torch.tensor method)": [[228, "torch.Tensor.diagflat"]], "diagonal() (torch.tensor method)": [[229, "torch.Tensor.diagonal"]], "diagonal_scatter() (torch.tensor method)": [[230, "torch.Tensor.diagonal_scatter"]], "diff() (torch.tensor method)": [[231, "torch.Tensor.diff"]], "digamma() (torch.tensor method)": [[232, "torch.Tensor.digamma"]], "digamma_() (torch.tensor method)": [[233, "torch.Tensor.digamma_"]], "dim() (torch.tensor method)": [[234, "torch.Tensor.dim"]], "dim_order() (torch.tensor method)": [[235, "torch.Tensor.dim_order"]], "dist() (torch.tensor method)": [[236, "torch.Tensor.dist"]], "div() (torch.tensor method)": [[237, "torch.Tensor.div"]], "div_() (torch.tensor method)": [[238, "torch.Tensor.div_"]], "divide() (torch.tensor method)": [[239, "torch.Tensor.divide"]], "divide_() (torch.tensor method)": [[240, "torch.Tensor.divide_"]], "dot() (torch.tensor method)": [[241, "torch.Tensor.dot"]], "double() (torch.tensor method)": [[242, "torch.Tensor.double"]], "dsplit() (torch.tensor method)": [[243, "torch.Tensor.dsplit"]], "element_size() (torch.tensor method)": [[244, "torch.Tensor.element_size"]], "eq() (torch.tensor method)": [[245, "torch.Tensor.eq"]], "eq_() (torch.tensor method)": [[246, "torch.Tensor.eq_"]], "equal() (torch.tensor method)": [[247, "torch.Tensor.equal"]], "erf() (torch.tensor method)": [[248, "torch.Tensor.erf"]], "erf_() (torch.tensor method)": [[249, "torch.Tensor.erf_"]], "erfc() (torch.tensor method)": [[250, "torch.Tensor.erfc"]], "erfc_() (torch.tensor method)": [[251, "torch.Tensor.erfc_"]], "erfinv() (torch.tensor method)": [[252, "torch.Tensor.erfinv"]], "erfinv_() (torch.tensor method)": [[253, "torch.Tensor.erfinv_"]], "exp() (torch.tensor method)": [[254, "torch.Tensor.exp"]], "exp_() (torch.tensor method)": [[255, "torch.Tensor.exp_"]], "expand() (torch.tensor method)": [[256, "torch.Tensor.expand"]], "expand_as() (torch.tensor method)": [[257, "torch.Tensor.expand_as"]], "expm1() (torch.tensor method)": [[258, "torch.Tensor.expm1"]], "expm1_() (torch.tensor method)": [[259, "torch.Tensor.expm1_"]], "exponential_() (torch.tensor method)": [[260, "torch.Tensor.exponential_"]], "fill_() (torch.tensor method)": [[261, "torch.Tensor.fill_"]], "fill_diagonal_() (torch.tensor method)": [[262, "torch.Tensor.fill_diagonal_"]], "fix() (torch.tensor method)": [[263, "torch.Tensor.fix"]], "fix_() (torch.tensor method)": [[264, "torch.Tensor.fix_"]], "flatten() (torch.tensor method)": [[265, "torch.Tensor.flatten"]], "flip() (torch.tensor method)": [[266, "torch.Tensor.flip"]], "fliplr() (torch.tensor method)": [[267, "torch.Tensor.fliplr"]], "flipud() (torch.tensor method)": [[268, "torch.Tensor.flipud"]], "float() (torch.tensor method)": [[269, "torch.Tensor.float"]], "float_power() (torch.tensor method)": [[270, "torch.Tensor.float_power"]], "float_power_() (torch.tensor method)": [[271, "torch.Tensor.float_power_"]], "floor() (torch.tensor method)": [[272, "torch.Tensor.floor"]], "floor_() (torch.tensor method)": [[273, "torch.Tensor.floor_"]], "floor_divide() (torch.tensor method)": [[274, "torch.Tensor.floor_divide"]], "floor_divide_() (torch.tensor method)": [[275, "torch.Tensor.floor_divide_"]], "fmax() (torch.tensor method)": [[276, "torch.Tensor.fmax"]], "fmin() (torch.tensor method)": [[277, "torch.Tensor.fmin"]], "fmod() (torch.tensor method)": [[278, "torch.Tensor.fmod"]], "fmod_() (torch.tensor method)": [[279, "torch.Tensor.fmod_"]], "frac() (torch.tensor method)": [[280, "torch.Tensor.frac"]], "frac_() (torch.tensor method)": [[281, "torch.Tensor.frac_"]], "frexp() (torch.tensor method)": [[282, "torch.Tensor.frexp"]], "gather() (torch.tensor method)": [[283, "torch.Tensor.gather"]], "gcd() (torch.tensor method)": [[284, "torch.Tensor.gcd"]], "gcd_() (torch.tensor method)": [[285, "torch.Tensor.gcd_"]], "ge() (torch.tensor method)": [[286, "torch.Tensor.ge"]], "ge_() (torch.tensor method)": [[287, "torch.Tensor.ge_"]], "geometric_() (torch.tensor method)": [[288, "torch.Tensor.geometric_"]], "geqrf() (torch.tensor method)": [[289, "torch.Tensor.geqrf"]], "ger() (torch.tensor method)": [[290, "torch.Tensor.ger"]], "get_device() (torch.tensor method)": [[291, "torch.Tensor.get_device"]], "grad (torch.tensor attribute)": [[292, "torch.Tensor.grad"]], "greater() (torch.tensor method)": [[293, "torch.Tensor.greater"]], "greater_() (torch.tensor method)": [[294, "torch.Tensor.greater_"]], "greater_equal() (torch.tensor method)": [[295, "torch.Tensor.greater_equal"]], "greater_equal_() (torch.tensor method)": [[296, "torch.Tensor.greater_equal_"]], "gt() (torch.tensor method)": [[297, "torch.Tensor.gt"]], "gt_() (torch.tensor method)": [[298, "torch.Tensor.gt_"]], "half() (torch.tensor method)": [[299, "torch.Tensor.half"]], "hardshrink() (torch.tensor method)": [[300, "torch.Tensor.hardshrink"]], "heaviside() (torch.tensor method)": [[301, "torch.Tensor.heaviside"]], "histc() (torch.tensor method)": [[302, "torch.Tensor.histc"]], "histogram() (torch.tensor method)": [[303, "torch.Tensor.histogram"]], "hsplit() (torch.tensor method)": [[304, "torch.Tensor.hsplit"]], "hypot() (torch.tensor method)": [[305, "torch.Tensor.hypot"]], "hypot_() (torch.tensor method)": [[306, "torch.Tensor.hypot_"]], "i0() (torch.tensor method)": [[307, "torch.Tensor.i0"]], "i0_() (torch.tensor method)": [[308, "torch.Tensor.i0_"]], "igamma() (torch.tensor method)": [[309, "torch.Tensor.igamma"]], "igamma_() (torch.tensor method)": [[310, "torch.Tensor.igamma_"]], "igammac() (torch.tensor method)": [[311, "torch.Tensor.igammac"]], "igammac_() (torch.tensor method)": [[312, "torch.Tensor.igammac_"]], "imag (torch.tensor attribute)": [[313, "torch.Tensor.imag"]], "index_add() (torch.tensor method)": [[314, "torch.Tensor.index_add"]], "index_add_() (torch.tensor method)": [[315, "torch.Tensor.index_add_"]], "index_copy() (torch.tensor method)": [[316, "torch.Tensor.index_copy"]], "index_copy_() (torch.tensor method)": [[317, "torch.Tensor.index_copy_"]], "index_fill() (torch.tensor method)": [[318, "torch.Tensor.index_fill"]], "index_fill_() (torch.tensor method)": [[319, "torch.Tensor.index_fill_"]], "index_put() (torch.tensor method)": [[320, "torch.Tensor.index_put"]], "index_put_() (torch.tensor method)": [[321, "torch.Tensor.index_put_"]], "index_reduce() (torch.tensor method)": [[322, "torch.Tensor.index_reduce"]], "index_reduce_() (torch.tensor method)": [[323, "torch.Tensor.index_reduce_"]], "index_select() (torch.tensor method)": [[324, "torch.Tensor.index_select"]], "indices() (torch.tensor method)": [[325, "torch.Tensor.indices"]], "inner() (torch.tensor method)": [[326, "torch.Tensor.inner"]], "int() (torch.tensor method)": [[327, "torch.Tensor.int"]], "int_repr() (torch.tensor method)": [[328, "torch.Tensor.int_repr"]], "inverse() (torch.tensor method)": [[329, "torch.Tensor.inverse"]], "is_coalesced() (torch.tensor method)": [[330, "torch.Tensor.is_coalesced"]], "is_complex() (torch.tensor method)": [[331, "torch.Tensor.is_complex"]], "is_conj() (torch.tensor method)": [[332, "torch.Tensor.is_conj"]], "is_contiguous() (torch.tensor method)": [[333, "torch.Tensor.is_contiguous"]], "is_cuda (torch.tensor attribute)": [[334, "torch.Tensor.is_cuda"]], "is_floating_point() (torch.tensor method)": [[335, "torch.Tensor.is_floating_point"]], "is_inference() (torch.tensor method)": [[336, "torch.Tensor.is_inference"]], "is_leaf (torch.tensor attribute)": [[337, "torch.Tensor.is_leaf"]], "is_meta (torch.tensor attribute)": [[338, "torch.Tensor.is_meta"]], "is_pinned() (torch.tensor method)": [[339, "torch.Tensor.is_pinned"]], "is_quantized (torch.tensor attribute)": [[340, "torch.Tensor.is_quantized"]], "is_set_to() (torch.tensor method)": [[341, "torch.Tensor.is_set_to"]], "is_shared() (torch.tensor method)": [[342, "torch.Tensor.is_shared"]], "is_signed() (torch.tensor method)": [[343, "torch.Tensor.is_signed"]], "is_sparse (torch.tensor attribute)": [[344, "torch.Tensor.is_sparse"]], "is_sparse_csr (torch.tensor attribute)": [[345, "torch.Tensor.is_sparse_csr"]], "isclose() (torch.tensor method)": [[346, "torch.Tensor.isclose"]], "isfinite() (torch.tensor method)": [[347, "torch.Tensor.isfinite"]], "isinf() (torch.tensor method)": [[348, "torch.Tensor.isinf"]], "isnan() (torch.tensor method)": [[349, "torch.Tensor.isnan"]], "isneginf() (torch.tensor method)": [[350, "torch.Tensor.isneginf"]], "isposinf() (torch.tensor method)": [[351, "torch.Tensor.isposinf"]], "isreal() (torch.tensor method)": [[352, "torch.Tensor.isreal"]], "istft() (torch.tensor method)": [[353, "torch.Tensor.istft"]], "item() (torch.tensor method)": [[354, "torch.Tensor.item"]], "itemsize (torch.tensor attribute)": [[355, "torch.Tensor.itemsize"]], "kthvalue() (torch.tensor method)": [[356, "torch.Tensor.kthvalue"]], "lcm() (torch.tensor method)": [[357, "torch.Tensor.lcm"]], "lcm_() (torch.tensor method)": [[358, "torch.Tensor.lcm_"]], "ldexp() (torch.tensor method)": [[359, "torch.Tensor.ldexp"]], "ldexp_() (torch.tensor method)": [[360, "torch.Tensor.ldexp_"]], "le() (torch.tensor method)": [[361, "torch.Tensor.le"]], "le_() (torch.tensor method)": [[362, "torch.Tensor.le_"]], "lerp() (torch.tensor method)": [[363, "torch.Tensor.lerp"]], "lerp_() (torch.tensor method)": [[364, "torch.Tensor.lerp_"]], "less() (torch.tensor method)": [[365, "torch.Tensor.less"]], "less_() (torch.tensor method)": [[366, "torch.Tensor.less_"]], "less_equal() (torch.tensor method)": [[367, "torch.Tensor.less_equal"]], "less_equal_() (torch.tensor method)": [[368, "torch.Tensor.less_equal_"]], "lgamma() (torch.tensor method)": [[369, "torch.Tensor.lgamma"]], "lgamma_() (torch.tensor method)": [[370, "torch.Tensor.lgamma_"]], "log() (torch.tensor method)": [[371, "torch.Tensor.log"]], "log10() (torch.tensor method)": [[372, "torch.Tensor.log10"]], "log10_() (torch.tensor method)": [[373, "torch.Tensor.log10_"]], "log1p() (torch.tensor method)": [[374, "torch.Tensor.log1p"]], "log1p_() (torch.tensor method)": [[375, "torch.Tensor.log1p_"]], "log2() (torch.tensor method)": [[376, "torch.Tensor.log2"]], "log2_() (torch.tensor method)": [[377, "torch.Tensor.log2_"]], "log_() (torch.tensor method)": [[378, "torch.Tensor.log_"]], "log_normal_() (torch.tensor method)": [[379, "torch.Tensor.log_normal_"]], "logaddexp() (torch.tensor method)": [[380, "torch.Tensor.logaddexp"]], "logaddexp2() (torch.tensor method)": [[381, "torch.Tensor.logaddexp2"]], "logcumsumexp() (torch.tensor method)": [[382, "torch.Tensor.logcumsumexp"]], "logdet() (torch.tensor method)": [[383, "torch.Tensor.logdet"]], "logical_and() (torch.tensor method)": [[384, "torch.Tensor.logical_and"]], "logical_and_() (torch.tensor method)": [[385, "torch.Tensor.logical_and_"]], "logical_not() (torch.tensor method)": [[386, "torch.Tensor.logical_not"]], "logical_not_() (torch.tensor method)": [[387, "torch.Tensor.logical_not_"]], "logical_or() (torch.tensor method)": [[388, "torch.Tensor.logical_or"]], "logical_or_() (torch.tensor method)": [[389, "torch.Tensor.logical_or_"]], "logical_xor() (torch.tensor method)": [[390, "torch.Tensor.logical_xor"]], "logical_xor_() (torch.tensor method)": [[391, "torch.Tensor.logical_xor_"]], "logit() (torch.tensor method)": [[392, "torch.Tensor.logit"]], "logit_() (torch.tensor method)": [[393, "torch.Tensor.logit_"]], "logsumexp() (torch.tensor method)": [[394, "torch.Tensor.logsumexp"]], "long() (torch.tensor method)": [[395, "torch.Tensor.long"]], "lt() (torch.tensor method)": [[396, "torch.Tensor.lt"]], "lt_() (torch.tensor method)": [[397, "torch.Tensor.lt_"]], "lu() (torch.tensor method)": [[398, "torch.Tensor.lu"]], "lu_solve() (torch.tensor method)": [[399, "torch.Tensor.lu_solve"]], "map_() (torch.tensor method)": [[400, "torch.Tensor.map_"]], "masked_fill() (torch.tensor method)": [[401, "torch.Tensor.masked_fill"]], "masked_fill_() (torch.tensor method)": [[402, "torch.Tensor.masked_fill_"]], "masked_scatter() (torch.tensor method)": [[403, "torch.Tensor.masked_scatter"]], "masked_scatter_() (torch.tensor method)": [[404, "torch.Tensor.masked_scatter_"]], "masked_select() (torch.tensor method)": [[405, "torch.Tensor.masked_select"]], "matmul() (torch.tensor method)": [[406, "torch.Tensor.matmul"]], "matrix_exp() (torch.tensor method)": [[407, "torch.Tensor.matrix_exp"]], "matrix_power() (torch.tensor method)": [[408, "torch.Tensor.matrix_power"]], "max() (torch.tensor method)": [[409, "torch.Tensor.max"]], "maximum() (torch.tensor method)": [[410, "torch.Tensor.maximum"]], "mean() (torch.tensor method)": [[411, "torch.Tensor.mean"]], "median() (torch.tensor method)": [[412, "torch.Tensor.median"]], "min() (torch.tensor method)": [[413, "torch.Tensor.min"]], "minimum() (torch.tensor method)": [[414, "torch.Tensor.minimum"]], "mm() (torch.tensor method)": [[415, "torch.Tensor.mm"]], "mode() (torch.tensor method)": [[416, "torch.Tensor.mode"]], "module_load() (torch.tensor method)": [[417, "torch.Tensor.module_load"]], "moveaxis() (torch.tensor method)": [[418, "torch.Tensor.moveaxis"]], "movedim() (torch.tensor method)": [[419, "torch.Tensor.movedim"]], "msort() (torch.tensor method)": [[420, "torch.Tensor.msort"]], "mul() (torch.tensor method)": [[421, "torch.Tensor.mul"]], "mul_() (torch.tensor method)": [[422, "torch.Tensor.mul_"]], "multinomial() (torch.tensor method)": [[423, "torch.Tensor.multinomial"]], "multiply() (torch.tensor method)": [[424, "torch.Tensor.multiply"]], "multiply_() (torch.tensor method)": [[425, "torch.Tensor.multiply_"]], "mv() (torch.tensor method)": [[426, "torch.Tensor.mv"]], "mvlgamma() (torch.tensor method)": [[427, "torch.Tensor.mvlgamma"]], "mvlgamma_() (torch.tensor method)": [[428, "torch.Tensor.mvlgamma_"]], "nan_to_num() (torch.tensor method)": [[429, "torch.Tensor.nan_to_num"]], "nan_to_num_() (torch.tensor method)": [[430, "torch.Tensor.nan_to_num_"]], "nanmean() (torch.tensor method)": [[431, "torch.Tensor.nanmean"]], "nanmedian() (torch.tensor method)": [[432, "torch.Tensor.nanmedian"]], "nanquantile() (torch.tensor method)": [[433, "torch.Tensor.nanquantile"]], "nansum() (torch.tensor method)": [[434, "torch.Tensor.nansum"]], "narrow() (torch.tensor method)": [[435, "torch.Tensor.narrow"]], "narrow_copy() (torch.tensor method)": [[436, "torch.Tensor.narrow_copy"]], "nbytes (torch.tensor attribute)": [[437, "torch.Tensor.nbytes"]], "ndim (torch.tensor attribute)": [[438, "torch.Tensor.ndim"]], "ndimension() (torch.tensor method)": [[439, "torch.Tensor.ndimension"]], "ne() (torch.tensor method)": [[440, "torch.Tensor.ne"]], "ne_() (torch.tensor method)": [[441, "torch.Tensor.ne_"]], "neg() (torch.tensor method)": [[442, "torch.Tensor.neg"]], "neg_() (torch.tensor method)": [[443, "torch.Tensor.neg_"]], "negative() (torch.tensor method)": [[444, "torch.Tensor.negative"]], "negative_() (torch.tensor method)": [[445, "torch.Tensor.negative_"]], "nelement() (torch.tensor method)": [[446, "torch.Tensor.nelement"]], "new_empty() (torch.tensor method)": [[447, "torch.Tensor.new_empty"]], "new_full() (torch.tensor method)": [[448, "torch.Tensor.new_full"]], "new_ones() (torch.tensor method)": [[449, "torch.Tensor.new_ones"]], "new_tensor() (torch.tensor method)": [[450, "torch.Tensor.new_tensor"]], "new_zeros() (torch.tensor method)": [[451, "torch.Tensor.new_zeros"]], "nextafter() (torch.tensor method)": [[452, "torch.Tensor.nextafter"]], "nextafter_() (torch.tensor method)": [[453, "torch.Tensor.nextafter_"]], "nonzero() (torch.tensor method)": [[454, "torch.Tensor.nonzero"]], "norm() (torch.tensor method)": [[455, "torch.Tensor.norm"]], "normal_() (torch.tensor method)": [[456, "torch.Tensor.normal_"]], "not_equal() (torch.tensor method)": [[457, "torch.Tensor.not_equal"]], "not_equal_() (torch.tensor method)": [[458, "torch.Tensor.not_equal_"]], "numel() (torch.tensor method)": [[459, "torch.Tensor.numel"]], "numpy() (torch.tensor method)": [[460, "torch.Tensor.numpy"]], "orgqr() (torch.tensor method)": [[461, "torch.Tensor.orgqr"]], "ormqr() (torch.tensor method)": [[462, "torch.Tensor.ormqr"]], "outer() (torch.tensor method)": [[463, "torch.Tensor.outer"]], "permute() (torch.tensor method)": [[464, "torch.Tensor.permute"]], "pin_memory() (torch.tensor method)": [[465, "torch.Tensor.pin_memory"]], "pinverse() (torch.tensor method)": [[466, "torch.Tensor.pinverse"]], "polygamma() (torch.tensor method)": [[467, "torch.Tensor.polygamma"]], "polygamma_() (torch.tensor method)": [[468, "torch.Tensor.polygamma_"]], "positive() (torch.tensor method)": [[469, "torch.Tensor.positive"]], "pow() (torch.tensor method)": [[470, "torch.Tensor.pow"]], "pow_() (torch.tensor method)": [[471, "torch.Tensor.pow_"]], "prod() (torch.tensor method)": [[472, "torch.Tensor.prod"]], "put_() (torch.tensor method)": [[473, "torch.Tensor.put_"]], "q_per_channel_axis() (torch.tensor method)": [[474, "torch.Tensor.q_per_channel_axis"]], "q_per_channel_scales() (torch.tensor method)": [[475, "torch.Tensor.q_per_channel_scales"]], "q_per_channel_zero_points() (torch.tensor method)": [[476, "torch.Tensor.q_per_channel_zero_points"]], "q_scale() (torch.tensor method)": [[477, "torch.Tensor.q_scale"]], "q_zero_point() (torch.tensor method)": [[478, "torch.Tensor.q_zero_point"]], "qr() (torch.tensor method)": [[479, "torch.Tensor.qr"]], "qscheme() (torch.tensor method)": [[480, "torch.Tensor.qscheme"]], "quantile() (torch.tensor method)": [[481, "torch.Tensor.quantile"]], "rad2deg() (torch.tensor method)": [[482, "torch.Tensor.rad2deg"]], "random_() (torch.tensor method)": [[483, "torch.Tensor.random_"]], "ravel() (torch.tensor method)": [[484, "torch.Tensor.ravel"]], "real (torch.tensor attribute)": [[485, "torch.Tensor.real"]], "reciprocal() (torch.tensor method)": [[486, "torch.Tensor.reciprocal"]], "reciprocal_() (torch.tensor method)": [[487, "torch.Tensor.reciprocal_"]], "record_stream() (torch.tensor method)": [[488, "torch.Tensor.record_stream"]], "register_hook() (torch.tensor method)": [[489, "torch.Tensor.register_hook"]], "register_post_accumulate_grad_hook() (torch.tensor method)": [[490, "torch.Tensor.register_post_accumulate_grad_hook"]], "remainder() (torch.tensor method)": [[491, "torch.Tensor.remainder"]], "remainder_() (torch.tensor method)": [[492, "torch.Tensor.remainder_"]], "renorm() (torch.tensor method)": [[493, "torch.Tensor.renorm"]], "renorm_() (torch.tensor method)": [[494, "torch.Tensor.renorm_"]], "repeat() (torch.tensor method)": [[495, "torch.Tensor.repeat"]], "repeat_interleave() (torch.tensor method)": [[496, "torch.Tensor.repeat_interleave"]], "requires_grad (torch.tensor attribute)": [[497, "torch.Tensor.requires_grad"]], "requires_grad_() (torch.tensor method)": [[498, "torch.Tensor.requires_grad_"]], "reshape() (torch.tensor method)": [[499, "torch.Tensor.reshape"]], "reshape_as() (torch.tensor method)": [[500, "torch.Tensor.reshape_as"]], "resize_() (torch.tensor method)": [[501, "torch.Tensor.resize_"]], "resize_as_() (torch.tensor method)": [[502, "torch.Tensor.resize_as_"]], "resolve_conj() (torch.tensor method)": [[503, "torch.Tensor.resolve_conj"]], "resolve_neg() (torch.tensor method)": [[504, "torch.Tensor.resolve_neg"]], "retain_grad() (torch.tensor method)": [[505, "torch.Tensor.retain_grad"]], "retains_grad (torch.tensor attribute)": [[506, "torch.Tensor.retains_grad"]], "roll() (torch.tensor method)": [[507, "torch.Tensor.roll"]], "rot90() (torch.tensor method)": [[508, "torch.Tensor.rot90"]], "round() (torch.tensor method)": [[509, "torch.Tensor.round"]], "round_() (torch.tensor method)": [[510, "torch.Tensor.round_"]], "row_indices() (torch.tensor method)": [[511, "torch.Tensor.row_indices"]], "rsqrt() (torch.tensor method)": [[512, "torch.Tensor.rsqrt"]], "rsqrt_() (torch.tensor method)": [[513, "torch.Tensor.rsqrt_"]], "scatter() (torch.tensor method)": [[514, "torch.Tensor.scatter"]], "scatter_() (torch.tensor method)": [[515, "torch.Tensor.scatter_"]], "scatter_add() (torch.tensor method)": [[516, "torch.Tensor.scatter_add"]], "scatter_add_() (torch.tensor method)": [[517, "torch.Tensor.scatter_add_"]], "scatter_reduce() (torch.tensor method)": [[518, "torch.Tensor.scatter_reduce"]], "scatter_reduce_() (torch.tensor method)": [[519, "torch.Tensor.scatter_reduce_"]], "select() (torch.tensor method)": [[520, "torch.Tensor.select"]], "select_scatter() (torch.tensor method)": [[521, "torch.Tensor.select_scatter"]], "set_() (torch.tensor method)": [[522, "torch.Tensor.set_"]], "sgn() (torch.tensor method)": [[523, "torch.Tensor.sgn"]], "sgn_() (torch.tensor method)": [[524, "torch.Tensor.sgn_"]], "shape (torch.tensor attribute)": [[525, "torch.Tensor.shape"]], "share_memory_() (torch.tensor method)": [[526, "torch.Tensor.share_memory_"]], "short() (torch.tensor method)": [[527, "torch.Tensor.short"]], "sigmoid() (torch.tensor method)": [[528, "torch.Tensor.sigmoid"]], "sigmoid_() (torch.tensor method)": [[529, "torch.Tensor.sigmoid_"]], "sign() (torch.tensor method)": [[530, "torch.Tensor.sign"]], "sign_() (torch.tensor method)": [[531, "torch.Tensor.sign_"]], "signbit() (torch.tensor method)": [[532, "torch.Tensor.signbit"]], "sin() (torch.tensor method)": [[533, "torch.Tensor.sin"]], "sin_() (torch.tensor method)": [[534, "torch.Tensor.sin_"]], "sinc() (torch.tensor method)": [[535, "torch.Tensor.sinc"]], "sinc_() (torch.tensor method)": [[536, "torch.Tensor.sinc_"]], "sinh() (torch.tensor method)": [[537, "torch.Tensor.sinh"]], "sinh_() (torch.tensor method)": [[538, "torch.Tensor.sinh_"]], "size() (torch.tensor method)": [[539, "torch.Tensor.size"]], "slice_scatter() (torch.tensor method)": [[540, "torch.Tensor.slice_scatter"]], "slogdet() (torch.tensor method)": [[541, "torch.Tensor.slogdet"]], "smm() (torch.tensor method)": [[542, "torch.Tensor.smm"]], "softmax() (torch.tensor method)": [[543, "torch.Tensor.softmax"]], "sort() (torch.tensor method)": [[544, "torch.Tensor.sort"]], "sparse_dim() (torch.tensor method)": [[545, "torch.Tensor.sparse_dim"]], "sparse_mask() (torch.tensor method)": [[546, "torch.Tensor.sparse_mask"]], "sparse_resize_() (torch.tensor method)": [[547, "torch.Tensor.sparse_resize_"]], "sparse_resize_and_clear_() (torch.tensor method)": [[548, "torch.Tensor.sparse_resize_and_clear_"]], "split() (torch.tensor method)": [[549, "torch.Tensor.split"]], "sqrt() (torch.tensor method)": [[550, "torch.Tensor.sqrt"]], "sqrt_() (torch.tensor method)": [[551, "torch.Tensor.sqrt_"]], "square() (torch.tensor method)": [[552, "torch.Tensor.square"]], "square_() (torch.tensor method)": [[553, "torch.Tensor.square_"]], "squeeze() (torch.tensor method)": [[554, "torch.Tensor.squeeze"]], "squeeze_() (torch.tensor method)": [[555, "torch.Tensor.squeeze_"]], "sspaddmm() (torch.tensor method)": [[556, "torch.Tensor.sspaddmm"]], "std() (torch.tensor method)": [[557, "torch.Tensor.std"]], "stft() (torch.tensor method)": [[558, "torch.Tensor.stft"]], "storage() (torch.tensor method)": [[559, "torch.Tensor.storage"]], "storage_offset() (torch.tensor method)": [[560, "torch.Tensor.storage_offset"]], "storage_type() (torch.tensor method)": [[561, "torch.Tensor.storage_type"]], "stride() (torch.tensor method)": [[562, "torch.Tensor.stride"]], "sub() (torch.tensor method)": [[563, "torch.Tensor.sub"]], "sub_() (torch.tensor method)": [[564, "torch.Tensor.sub_"]], "subtract() (torch.tensor method)": [[565, "torch.Tensor.subtract"]], "subtract_() (torch.tensor method)": [[566, "torch.Tensor.subtract_"]], "sum() (torch.tensor method)": [[567, "torch.Tensor.sum"]], "sum_to_size() (torch.tensor method)": [[568, "torch.Tensor.sum_to_size"]], "svd() (torch.tensor method)": [[569, "torch.Tensor.svd"]], "swapaxes() (torch.tensor method)": [[570, "torch.Tensor.swapaxes"]], "swapdims() (torch.tensor method)": [[571, "torch.Tensor.swapdims"]], "t() (torch.tensor method)": [[572, "torch.Tensor.t"]], "t_() (torch.tensor method)": [[573, "torch.Tensor.t_"]], "take() (torch.tensor method)": [[574, "torch.Tensor.take"]], "take_along_dim() (torch.tensor method)": [[575, "torch.Tensor.take_along_dim"]], "tan() (torch.tensor method)": [[576, "torch.Tensor.tan"]], "tan_() (torch.tensor method)": [[577, "torch.Tensor.tan_"]], "tanh() (torch.tensor method)": [[578, "torch.Tensor.tanh"]], "tanh_() (torch.tensor method)": [[579, "torch.Tensor.tanh_"]], "tensor_split() (torch.tensor method)": [[580, "torch.Tensor.tensor_split"]], "tile() (torch.tensor method)": [[581, "torch.Tensor.tile"]], "to() (torch.tensor method)": [[582, "torch.Tensor.to"]], "to_dense() (torch.tensor method)": [[583, "torch.Tensor.to_dense"]], "to_mkldnn() (torch.tensor method)": [[584, "torch.Tensor.to_mkldnn"]], "to_sparse() (torch.tensor method)": [[585, "torch.Tensor.to_sparse"]], "to_sparse_bsc() (torch.tensor method)": [[586, "torch.Tensor.to_sparse_bsc"]], "to_sparse_bsr() (torch.tensor method)": [[587, "torch.Tensor.to_sparse_bsr"]], "to_sparse_coo() (torch.tensor method)": [[588, "torch.Tensor.to_sparse_coo"]], "to_sparse_csc() (torch.tensor method)": [[589, "torch.Tensor.to_sparse_csc"]], "to_sparse_csr() (torch.tensor method)": [[590, "torch.Tensor.to_sparse_csr"]], "tolist() (torch.tensor method)": [[591, "torch.Tensor.tolist"]], "topk() (torch.tensor method)": [[592, "torch.Tensor.topk"]], "trace() (torch.tensor method)": [[593, "torch.Tensor.trace"]], "transpose() (torch.tensor method)": [[594, "torch.Tensor.transpose"]], "transpose_() (torch.tensor method)": [[595, "torch.Tensor.transpose_"]], "triangular_solve() (torch.tensor method)": [[596, "torch.Tensor.triangular_solve"]], "tril() (torch.tensor method)": [[597, "torch.Tensor.tril"]], "tril_() (torch.tensor method)": [[598, "torch.Tensor.tril_"]], "triu() (torch.tensor method)": [[599, "torch.Tensor.triu"]], "triu_() (torch.tensor method)": [[600, "torch.Tensor.triu_"]], "true_divide() (torch.tensor method)": [[601, "torch.Tensor.true_divide"]], "true_divide_() (torch.tensor method)": [[602, "torch.Tensor.true_divide_"]], "trunc() (torch.tensor method)": [[603, "torch.Tensor.trunc"]], "trunc_() (torch.tensor method)": [[604, "torch.Tensor.trunc_"]], "type() (torch.tensor method)": [[605, "torch.Tensor.type"]], "type_as() (torch.tensor method)": [[606, "torch.Tensor.type_as"]], "unbind() (torch.tensor method)": [[607, "torch.Tensor.unbind"]], "unflatten() (torch.tensor method)": [[608, "torch.Tensor.unflatten"]], "unfold() (torch.tensor method)": [[609, "torch.Tensor.unfold"]], "uniform_() (torch.tensor method)": [[610, "torch.Tensor.uniform_"]], "unique() (torch.tensor method)": [[611, "torch.Tensor.unique"]], "unique_consecutive() (torch.tensor method)": [[612, "torch.Tensor.unique_consecutive"]], "unsqueeze() (torch.tensor method)": [[613, "torch.Tensor.unsqueeze"]], "unsqueeze_() (torch.tensor method)": [[614, "torch.Tensor.unsqueeze_"]], "untyped_storage() (torch.tensor method)": [[615, "torch.Tensor.untyped_storage"]], "values() (torch.tensor method)": [[616, "torch.Tensor.values"]], "var() (torch.tensor method)": [[617, "torch.Tensor.var"]], "vdot() (torch.tensor method)": [[618, "torch.Tensor.vdot"]], "view() (torch.tensor method)": [[619, "torch.Tensor.view"]], "view_as() (torch.tensor method)": [[620, "torch.Tensor.view_as"]], "vsplit() (torch.tensor method)": [[621, "torch.Tensor.vsplit"]], "where() (torch.tensor method)": [[622, "torch.Tensor.where"]], "xlogy() (torch.tensor method)": [[623, "torch.Tensor.xlogy"]], "xlogy_() (torch.tensor method)": [[624, "torch.Tensor.xlogy_"]], "xpu() (torch.tensor method)": [[625, "torch.Tensor.xpu"]], "zero_() (torch.tensor method)": [[626, "torch.Tensor.zero_"]], "_assert() (in module torch)": [[627, "torch._assert"]], "_foreach_abs() (in module torch)": [[628, "torch._foreach_abs"]], "_foreach_abs_() (in module torch)": [[629, "torch._foreach_abs_"]], "_foreach_acos() (in module torch)": [[630, "torch._foreach_acos"]], "_foreach_acos_() (in module torch)": [[631, "torch._foreach_acos_"]], "_foreach_asin() (in module torch)": [[632, "torch._foreach_asin"]], "_foreach_asin_() (in module torch)": [[633, "torch._foreach_asin_"]], "_foreach_atan() (in module torch)": [[634, "torch._foreach_atan"]], "_foreach_atan_() (in module torch)": [[635, "torch._foreach_atan_"]], "_foreach_ceil() (in module torch)": [[636, "torch._foreach_ceil"]], "_foreach_ceil_() (in module torch)": [[637, "torch._foreach_ceil_"]], "_foreach_cos() (in module torch)": [[638, "torch._foreach_cos"]], "_foreach_cos_() (in module torch)": [[639, "torch._foreach_cos_"]], "_foreach_cosh() (in module torch)": [[640, "torch._foreach_cosh"]], "_foreach_cosh_() (in module torch)": [[641, "torch._foreach_cosh_"]], "_foreach_erf() (in module torch)": [[642, "torch._foreach_erf"]], "_foreach_erf_() (in module torch)": [[643, "torch._foreach_erf_"]], "_foreach_erfc() (in module torch)": [[644, "torch._foreach_erfc"]], "_foreach_erfc_() (in module torch)": [[645, "torch._foreach_erfc_"]], "_foreach_exp() (in module torch)": [[646, "torch._foreach_exp"]], "_foreach_exp_() (in module torch)": [[647, "torch._foreach_exp_"]], "_foreach_expm1() (in module torch)": [[648, "torch._foreach_expm1"]], "_foreach_expm1_() (in module torch)": [[649, "torch._foreach_expm1_"]], "_foreach_floor() (in module torch)": [[650, "torch._foreach_floor"]], "_foreach_floor_() (in module torch)": [[651, "torch._foreach_floor_"]], "_foreach_frac() (in module torch)": [[652, "torch._foreach_frac"]], "_foreach_frac_() (in module torch)": [[653, "torch._foreach_frac_"]], "_foreach_lgamma() (in module torch)": [[654, "torch._foreach_lgamma"]], "_foreach_lgamma_() (in module torch)": [[655, "torch._foreach_lgamma_"]], "_foreach_log() (in module torch)": [[656, "torch._foreach_log"]], "_foreach_log10() (in module torch)": [[657, "torch._foreach_log10"]], "_foreach_log10_() (in module torch)": [[658, "torch._foreach_log10_"]], "_foreach_log1p() (in module torch)": [[659, "torch._foreach_log1p"]], "_foreach_log1p_() (in module torch)": [[660, "torch._foreach_log1p_"]], "_foreach_log2() (in module torch)": [[661, "torch._foreach_log2"]], "_foreach_log2_() (in module torch)": [[662, "torch._foreach_log2_"]], "_foreach_log_() (in module torch)": [[663, "torch._foreach_log_"]], "_foreach_neg() (in module torch)": [[664, "torch._foreach_neg"]], "_foreach_neg_() (in module torch)": [[665, "torch._foreach_neg_"]], "_foreach_reciprocal() (in module torch)": [[666, "torch._foreach_reciprocal"]], "_foreach_reciprocal_() (in module torch)": [[667, "torch._foreach_reciprocal_"]], "_foreach_round() (in module torch)": [[668, "torch._foreach_round"]], "_foreach_round_() (in module torch)": [[669, "torch._foreach_round_"]], "_foreach_sigmoid() (in module torch)": [[670, "torch._foreach_sigmoid"]], "_foreach_sigmoid_() (in module torch)": [[671, "torch._foreach_sigmoid_"]], "_foreach_sin() (in module torch)": [[672, "torch._foreach_sin"]], "_foreach_sin_() (in module torch)": [[673, "torch._foreach_sin_"]], "_foreach_sinh() (in module torch)": [[674, "torch._foreach_sinh"]], "_foreach_sinh_() (in module torch)": [[675, "torch._foreach_sinh_"]], "_foreach_sqrt() (in module torch)": [[676, "torch._foreach_sqrt"]], "_foreach_sqrt_() (in module torch)": [[677, "torch._foreach_sqrt_"]], "_foreach_tan() (in module torch)": [[678, "torch._foreach_tan"]], "_foreach_tan_() (in module torch)": [[679, "torch._foreach_tan_"]], "_foreach_trunc() (in module torch)": [[680, "torch._foreach_trunc"]], "_foreach_trunc_() (in module torch)": [[681, "torch._foreach_trunc_"]], "_foreach_zero_() (in module torch)": [[682, "torch._foreach_zero_"]], "set_logs() (in module torch._logging)": [[683, "torch._logging.set_logs"]], "abs() (in module torch)": [[684, "torch.abs"]], "absolute() (in module torch)": [[685, "torch.absolute"]], "acos() (in module torch)": [[686, "torch.acos"]], "acosh() (in module torch)": [[687, "torch.acosh"]], "add() (in module torch)": [[688, "torch.add"]], "addbmm() (in module torch)": [[689, "torch.addbmm"]], "addcdiv() (in module torch)": [[690, "torch.addcdiv"]], "addcmul() (in module torch)": [[691, "torch.addcmul"]], "addmm() (in module torch)": [[692, "torch.addmm"]], "addmv() (in module torch)": [[693, "torch.addmv"]], "addr() (in module torch)": [[694, "torch.addr"]], "adjoint() (in module torch)": [[695, "torch.adjoint"]], "all() (in module torch)": [[696, "torch.all"]], "allclose() (in module torch)": [[697, "torch.allclose"]], "amax() (in module torch)": [[698, "torch.amax"]], "amin() (in module torch)": [[699, "torch.amin"]], "aminmax() (in module torch)": [[700, "torch.aminmax"]], "angle() (in module torch)": [[701, "torch.angle"]], "any() (in module torch)": [[702, "torch.any"]], "bnrelu2d (class in torch.ao.nn.intrinsic)": [[703, "torch.ao.nn.intrinsic.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic)": [[704, "torch.ao.nn.intrinsic.BNReLU3d"]], "convbn1d (class in torch.ao.nn.intrinsic)": [[705, "torch.ao.nn.intrinsic.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic)": [[706, "torch.ao.nn.intrinsic.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic)": [[707, "torch.ao.nn.intrinsic.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic)": [[708, "torch.ao.nn.intrinsic.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic)": [[709, "torch.ao.nn.intrinsic.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic)": [[710, "torch.ao.nn.intrinsic.ConvBnReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic)": [[711, "torch.ao.nn.intrinsic.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic)": [[712, "torch.ao.nn.intrinsic.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic)": [[713, "torch.ao.nn.intrinsic.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic)": [[714, "torch.ao.nn.intrinsic.LinearReLU"]], "convbn1d (class in torch.ao.nn.intrinsic.qat)": [[715, "torch.ao.nn.intrinsic.qat.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic.qat)": [[716, "torch.ao.nn.intrinsic.qat.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic.qat)": [[717, "torch.ao.nn.intrinsic.qat.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic.qat)": [[718, "torch.ao.nn.intrinsic.qat.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic.qat)": [[719, "torch.ao.nn.intrinsic.qat.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic.qat)": [[720, "torch.ao.nn.intrinsic.qat.ConvBnReLU3d"]], "convrelu2d (class in torch.ao.nn.intrinsic.qat)": [[721, "torch.ao.nn.intrinsic.qat.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.qat)": [[722, "torch.ao.nn.intrinsic.qat.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.qat)": [[723, "torch.ao.nn.intrinsic.qat.LinearReLU"]], "freeze_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[724, "torch.ao.nn.intrinsic.qat.freeze_bn_stats"]], "update_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[725, "torch.ao.nn.intrinsic.qat.update_bn_stats"]], "bnrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[726, "torch.ao.nn.intrinsic.quantized.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[727, "torch.ao.nn.intrinsic.quantized.BNReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic.quantized)": [[728, "torch.ao.nn.intrinsic.quantized.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[729, "torch.ao.nn.intrinsic.quantized.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[730, "torch.ao.nn.intrinsic.quantized.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized)": [[731, "torch.ao.nn.intrinsic.quantized.LinearReLU"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized.dynamic)": [[732, "torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU"]], "conv2d (class in torch.ao.nn.qat)": [[733, "torch.ao.nn.qat.Conv2d"]], "conv3d (class in torch.ao.nn.qat)": [[734, "torch.ao.nn.qat.Conv3d"]], "linear (class in torch.ao.nn.qat)": [[735, "torch.ao.nn.qat.Linear"]], "from_float() (torch.ao.nn.qat.linear class method)": [[735, "torch.ao.nn.qat.Linear.from_float"]], "linear (class in torch.ao.nn.qat.dynamic)": [[736, "torch.ao.nn.qat.dynamic.Linear"]], "lstm (class in torch.ao.nn.quantizable)": [[737, "torch.ao.nn.quantizable.LSTM"]], "multiheadattention (class in torch.ao.nn.quantizable)": [[738, "torch.ao.nn.quantizable.MultiheadAttention"]], "dequantize() (torch.ao.nn.quantizable.multiheadattention method)": [[738, "torch.ao.nn.quantizable.MultiheadAttention.dequantize"]], "forward() (torch.ao.nn.quantizable.multiheadattention method)": [[738, "torch.ao.nn.quantizable.MultiheadAttention.forward"]], "batchnorm2d (class in torch.ao.nn.quantized)": [[739, "torch.ao.nn.quantized.BatchNorm2d"]], "batchnorm3d (class in torch.ao.nn.quantized)": [[740, "torch.ao.nn.quantized.BatchNorm3d"]], "conv1d (class in torch.ao.nn.quantized)": [[741, "torch.ao.nn.quantized.Conv1d"]], "from_float() (torch.ao.nn.quantized.conv1d class method)": [[741, "torch.ao.nn.quantized.Conv1d.from_float"]], "conv2d (class in torch.ao.nn.quantized)": [[742, "torch.ao.nn.quantized.Conv2d"]], "from_float() (torch.ao.nn.quantized.conv2d class method)": [[742, "torch.ao.nn.quantized.Conv2d.from_float"]], "conv3d (class in torch.ao.nn.quantized)": [[743, "torch.ao.nn.quantized.Conv3d"]], "from_float() (torch.ao.nn.quantized.conv3d class method)": [[743, "torch.ao.nn.quantized.Conv3d.from_float"]], "convtranspose1d (class in torch.ao.nn.quantized)": [[744, "torch.ao.nn.quantized.ConvTranspose1d"]], "convtranspose2d (class in torch.ao.nn.quantized)": [[745, "torch.ao.nn.quantized.ConvTranspose2d"]], "convtranspose3d (class in torch.ao.nn.quantized)": [[746, "torch.ao.nn.quantized.ConvTranspose3d"]], "elu (class in torch.ao.nn.quantized)": [[747, "torch.ao.nn.quantized.ELU"]], "embedding (class in torch.ao.nn.quantized)": [[748, "torch.ao.nn.quantized.Embedding"]], "from_float() (torch.ao.nn.quantized.embedding class method)": [[748, "torch.ao.nn.quantized.Embedding.from_float"]], "embeddingbag (class in torch.ao.nn.quantized)": [[749, "torch.ao.nn.quantized.EmbeddingBag"]], "from_float() (torch.ao.nn.quantized.embeddingbag class method)": [[749, "torch.ao.nn.quantized.EmbeddingBag.from_float"]], "fxfloatfunctional (class in torch.ao.nn.quantized)": [[750, "torch.ao.nn.quantized.FXFloatFunctional"]], "floatfunctional (class in torch.ao.nn.quantized)": [[751, "torch.ao.nn.quantized.FloatFunctional"]], "groupnorm (class in torch.ao.nn.quantized)": [[752, "torch.ao.nn.quantized.GroupNorm"]], "hardswish (class in torch.ao.nn.quantized)": [[753, "torch.ao.nn.quantized.Hardswish"]], "instancenorm1d (class in torch.ao.nn.quantized)": [[754, "torch.ao.nn.quantized.InstanceNorm1d"]], "instancenorm2d (class in torch.ao.nn.quantized)": [[755, "torch.ao.nn.quantized.InstanceNorm2d"]], "instancenorm3d (class in torch.ao.nn.quantized)": [[756, "torch.ao.nn.quantized.InstanceNorm3d"]], "layernorm (class in torch.ao.nn.quantized)": [[757, "torch.ao.nn.quantized.LayerNorm"]], "leakyrelu (class in torch.ao.nn.quantized)": [[758, "torch.ao.nn.quantized.LeakyReLU"]], "linear (class in torch.ao.nn.quantized)": [[759, "torch.ao.nn.quantized.Linear"]], "from_float() (torch.ao.nn.quantized.linear class method)": [[759, "torch.ao.nn.quantized.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.linear class method)": [[759, "torch.ao.nn.quantized.Linear.from_reference"]], "qfunctional (class in torch.ao.nn.quantized)": [[760, "torch.ao.nn.quantized.QFunctional"]], "relu6 (class in torch.ao.nn.quantized)": [[761, "torch.ao.nn.quantized.ReLU6"]], "sigmoid (class in torch.ao.nn.quantized)": [[762, "torch.ao.nn.quantized.Sigmoid"]], "gru (class in torch.ao.nn.quantized.dynamic)": [[763, "torch.ao.nn.quantized.dynamic.GRU"]], "grucell (class in torch.ao.nn.quantized.dynamic)": [[764, "torch.ao.nn.quantized.dynamic.GRUCell"]], "lstm (class in torch.ao.nn.quantized.dynamic)": [[765, "torch.ao.nn.quantized.dynamic.LSTM"]], "lstmcell (class in torch.ao.nn.quantized.dynamic)": [[766, "torch.ao.nn.quantized.dynamic.LSTMCell"]], "linear (class in torch.ao.nn.quantized.dynamic)": [[767, "torch.ao.nn.quantized.dynamic.Linear"]], "from_float() (torch.ao.nn.quantized.dynamic.linear class method)": [[767, "torch.ao.nn.quantized.dynamic.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.dynamic.linear class method)": [[767, "torch.ao.nn.quantized.dynamic.Linear.from_reference"]], "rnncell (class in torch.ao.nn.quantized.dynamic)": [[768, "torch.ao.nn.quantized.dynamic.RNNCell"]], "adaptive_avg_pool2d (class in torch.ao.nn.quantized.functional)": [[769, "torch.ao.nn.quantized.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d (class in torch.ao.nn.quantized.functional)": [[770, "torch.ao.nn.quantized.functional.adaptive_avg_pool3d"]], "avg_pool2d (class in torch.ao.nn.quantized.functional)": [[771, "torch.ao.nn.quantized.functional.avg_pool2d"]], "avg_pool3d (class in torch.ao.nn.quantized.functional)": [[772, "torch.ao.nn.quantized.functional.avg_pool3d"]], "celu (class in torch.ao.nn.quantized.functional)": [[773, "torch.ao.nn.quantized.functional.celu"]], "clamp (class in torch.ao.nn.quantized.functional)": [[774, "torch.ao.nn.quantized.functional.clamp"]], "conv1d (class in torch.ao.nn.quantized.functional)": [[775, "torch.ao.nn.quantized.functional.conv1d"]], "conv2d (class in torch.ao.nn.quantized.functional)": [[776, "torch.ao.nn.quantized.functional.conv2d"]], "conv3d (class in torch.ao.nn.quantized.functional)": [[777, "torch.ao.nn.quantized.functional.conv3d"]], "elu (class in torch.ao.nn.quantized.functional)": [[778, "torch.ao.nn.quantized.functional.elu"]], "hardsigmoid (class in torch.ao.nn.quantized.functional)": [[779, "torch.ao.nn.quantized.functional.hardsigmoid"]], "hardswish (class in torch.ao.nn.quantized.functional)": [[780, "torch.ao.nn.quantized.functional.hardswish"]], "hardtanh (class in torch.ao.nn.quantized.functional)": [[781, "torch.ao.nn.quantized.functional.hardtanh"]], "interpolate (class in torch.ao.nn.quantized.functional)": [[782, "torch.ao.nn.quantized.functional.interpolate"]], "leaky_relu (class in torch.ao.nn.quantized.functional)": [[783, "torch.ao.nn.quantized.functional.leaky_relu"]], "linear (class in torch.ao.nn.quantized.functional)": [[784, "torch.ao.nn.quantized.functional.linear"]], "max_pool1d (class in torch.ao.nn.quantized.functional)": [[785, "torch.ao.nn.quantized.functional.max_pool1d"]], "max_pool2d (class in torch.ao.nn.quantized.functional)": [[786, "torch.ao.nn.quantized.functional.max_pool2d"]], "threshold (class in torch.ao.nn.quantized.functional)": [[787, "torch.ao.nn.quantized.functional.threshold"]], "upsample (class in torch.ao.nn.quantized.functional)": [[788, "torch.ao.nn.quantized.functional.upsample"]], "upsample_bilinear (class in torch.ao.nn.quantized.functional)": [[789, "torch.ao.nn.quantized.functional.upsample_bilinear"]], "upsample_nearest (class in torch.ao.nn.quantized.functional)": [[790, "torch.ao.nn.quantized.functional.upsample_nearest"]], "dequantstub (class in torch.ao.quantization)": [[791, "torch.ao.quantization.DeQuantStub"]], "quantstub (class in torch.ao.quantization)": [[792, "torch.ao.quantization.QuantStub"]], "quantwrapper (class in torch.ao.quantization)": [[793, "torch.ao.quantization.QuantWrapper"]], "add_quant_dequant (class in torch.ao.quantization)": [[794, "torch.ao.quantization.add_quant_dequant"]], "backendconfig (class in torch.ao.quantization.backend_config)": [[795, "torch.ao.quantization.backend_config.BackendConfig"]], "configs (torch.ao.quantization.backend_config.backendconfig property)": [[795, "torch.ao.quantization.backend_config.BackendConfig.configs"]], "from_dict() (torch.ao.quantization.backend_config.backendconfig class method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.from_dict"]], "set_backend_pattern_config() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_config"]], "set_backend_pattern_configs() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_configs"]], "set_name() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.set_name"]], "to_dict() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.to_dict"]], "backendpatternconfig (class in torch.ao.quantization.backend_config)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig"]], "add_dtype_config() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.add_dtype_config"]], "from_dict() (torch.ao.quantization.backend_config.backendpatternconfig class method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.from_dict"]], "set_dtype_configs() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_dtype_configs"]], "set_fused_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fused_module"]], "set_fuser_method() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fuser_method"]], "set_observation_type() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_observation_type"]], "set_pattern() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_pattern"]], "set_qat_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_qat_module"]], "set_reference_quantized_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_reference_quantized_module"]], "set_root_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_root_module"]], "to_dict() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.to_dict"]], "dtypeconfig (class in torch.ao.quantization.backend_config)": [[797, "torch.ao.quantization.backend_config.DTypeConfig"]], "from_dict() (torch.ao.quantization.backend_config.dtypeconfig class method)": [[797, "torch.ao.quantization.backend_config.DTypeConfig.from_dict"]], "to_dict() (torch.ao.quantization.backend_config.dtypeconfig method)": [[797, "torch.ao.quantization.backend_config.DTypeConfig.to_dict"]], "dtypewithconstraints (class in torch.ao.quantization.backend_config)": [[798, "torch.ao.quantization.backend_config.DTypeWithConstraints"]], "input_output_not_observed (torch.ao.quantization.backend_config.observationtype attribute)": [[799, "torch.ao.quantization.backend_config.ObservationType.INPUT_OUTPUT_NOT_OBSERVED"]], "output_share_observer_with_input (torch.ao.quantization.backend_config.observationtype attribute)": [[799, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_SHARE_OBSERVER_WITH_INPUT"]], "output_use_different_observer_as_input (torch.ao.quantization.backend_config.observationtype attribute)": [[799, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_USE_DIFFERENT_OBSERVER_AS_INPUT"]], "observationtype (class in torch.ao.quantization.backend_config)": [[799, "torch.ao.quantization.backend_config.ObservationType"]], "convert (class in torch.ao.quantization)": [[800, "torch.ao.quantization.convert"]], "default_eval_fn (class in torch.ao.quantization)": [[801, "torch.ao.quantization.default_eval_fn"]], "fakequantize (class in torch.ao.quantization.fake_quantize)": [[802, "torch.ao.quantization.fake_quantize.FakeQuantize"]], "fakequantizebase (class in torch.ao.quantization.fake_quantize)": [[803, "torch.ao.quantization.fake_quantize.FakeQuantizeBase"]], "fixedqparamsfakequantize (class in torch.ao.quantization.fake_quantize)": [[804, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize"]], "extra_repr() (torch.ao.quantization.fake_quantize.fixedqparamsfakequantize method)": [[804, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize.extra_repr"]], "fusedmovingavgobsfakequantize (class in torch.ao.quantization.fake_quantize)": [[805, "torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize"]], "default_fake_quant (in module torch.ao.quantization.fake_quantize)": [[806, "torch.ao.quantization.fake_quantize.default_fake_quant"]], "default_fused_act_fake_quant (in module torch.ao.quantization.fake_quantize)": [[807, "torch.ao.quantization.fake_quantize.default_fused_act_fake_quant"]], "default_fused_per_channel_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[808, "torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant"]], "default_fused_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[809, "torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant"]], "default_histogram_fake_quant (in module torch.ao.quantization.fake_quantize)": [[810, "torch.ao.quantization.fake_quantize.default_histogram_fake_quant"]], "default_per_channel_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[811, "torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant"]], "default_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[812, "torch.ao.quantization.fake_quantize.default_weight_fake_quant"]], "disable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[813, "torch.ao.quantization.fake_quantize.disable_fake_quant"]], "disable_observer (class in torch.ao.quantization.fake_quantize)": [[814, "torch.ao.quantization.fake_quantize.disable_observer"]], "enable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[815, "torch.ao.quantization.fake_quantize.enable_fake_quant"]], "enable_observer (class in torch.ao.quantization.fake_quantize)": [[816, "torch.ao.quantization.fake_quantize.enable_observer"]], "fuse_modules (class in torch.ao.quantization.fuse_modules)": [[817, "torch.ao.quantization.fuse_modules.fuse_modules"]], "convertcustomconfig (class in torch.ao.quantization.fx.custom_config)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig class method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.from_dict"]], "set_observed_to_quantized_mapping() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_observed_to_quantized_mapping"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.to_dict"]], "fusecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig class method)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.from_dict"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.to_dict"]], "preparecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig class method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.from_dict"]], "set_float_to_observed_mapping() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_float_to_observed_mapping"]], "set_input_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_input_quantized_indexes"]], "set_non_traceable_module_classes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_classes"]], "set_non_traceable_module_names() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_names"]], "set_output_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_output_quantized_indexes"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_preserved_attributes"]], "set_standalone_module_class() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_class"]], "set_standalone_module_name() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_name"]], "to_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.to_dict"]], "standalonemoduleconfigentry (class in torch.ao.quantization.fx.custom_config)": [[821, "torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry"]], "histogramobserver (class in torch.ao.quantization.observer)": [[822, "torch.ao.quantization.observer.HistogramObserver"]], "minmaxobserver (class in torch.ao.quantization.observer)": [[823, "torch.ao.quantization.observer.MinMaxObserver"]], "calculate_qparams() (torch.ao.quantization.observer.minmaxobserver method)": [[823, "torch.ao.quantization.observer.MinMaxObserver.calculate_qparams"]], "forward() (torch.ao.quantization.observer.minmaxobserver method)": [[823, "torch.ao.quantization.observer.MinMaxObserver.forward"]], "reset_min_max_vals() (torch.ao.quantization.observer.minmaxobserver method)": [[823, "torch.ao.quantization.observer.MinMaxObserver.reset_min_max_vals"]], "movingaverageminmaxobserver (class in torch.ao.quantization.observer)": [[824, "torch.ao.quantization.observer.MovingAverageMinMaxObserver"]], "movingaverageperchannelminmaxobserver (class in torch.ao.quantization.observer)": [[825, "torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver"]], "noopobserver (class in torch.ao.quantization.observer)": [[826, "torch.ao.quantization.observer.NoopObserver"]], "observerbase (class in torch.ao.quantization.observer)": [[827, "torch.ao.quantization.observer.ObserverBase"]], "with_args() (torch.ao.quantization.observer.observerbase class method)": [[827, "torch.ao.quantization.observer.ObserverBase.with_args"]], "with_callable_args() (torch.ao.quantization.observer.observerbase class method)": [[827, "torch.ao.quantization.observer.ObserverBase.with_callable_args"]], "perchannelminmaxobserver (class in torch.ao.quantization.observer)": [[828, "torch.ao.quantization.observer.PerChannelMinMaxObserver"]], "reset_min_max_vals() (torch.ao.quantization.observer.perchannelminmaxobserver method)": [[828, "torch.ao.quantization.observer.PerChannelMinMaxObserver.reset_min_max_vals"]], "placeholderobserver (class in torch.ao.quantization.observer)": [[829, "torch.ao.quantization.observer.PlaceholderObserver"]], "recordingobserver (class in torch.ao.quantization.observer)": [[830, "torch.ao.quantization.observer.RecordingObserver"]], "default_debug_observer (in module torch.ao.quantization.observer)": [[831, "torch.ao.quantization.observer.default_debug_observer"]], "default_dynamic_quant_observer (in module torch.ao.quantization.observer)": [[832, "torch.ao.quantization.observer.default_dynamic_quant_observer"]], "default_float_qparams_observer (in module torch.ao.quantization.observer)": [[833, "torch.ao.quantization.observer.default_float_qparams_observer"]], "default_histogram_observer (in module torch.ao.quantization.observer)": [[834, "torch.ao.quantization.observer.default_histogram_observer"]], "default_observer (in module torch.ao.quantization.observer)": [[835, "torch.ao.quantization.observer.default_observer"]], "default_per_channel_weight_observer (in module torch.ao.quantization.observer)": [[836, "torch.ao.quantization.observer.default_per_channel_weight_observer"]], "default_placeholder_observer (in module torch.ao.quantization.observer)": [[837, "torch.ao.quantization.observer.default_placeholder_observer"]], "default_weight_observer (in module torch.ao.quantization.observer)": [[838, "torch.ao.quantization.observer.default_weight_observer"]], "get_observer_state_dict (class in torch.ao.quantization.observer)": [[839, "torch.ao.quantization.observer.get_observer_state_dict"]], "load_observer_state_dict (class in torch.ao.quantization.observer)": [[840, "torch.ao.quantization.observer.load_observer_state_dict"]], "prepare (class in torch.ao.quantization)": [[841, "torch.ao.quantization.prepare"]], "prepare_qat (class in torch.ao.quantization)": [[842, "torch.ao.quantization.prepare_qat"]], "propagate_qconfig_ (class in torch.ao.quantization)": [[843, "torch.ao.quantization.propagate_qconfig_"]], "model_is_exported (class in torch.ao.quantization.pt2e.export_utils)": [[844, "torch.ao.quantization.pt2e.export_utils.model_is_exported"]], "qconfig (class in torch.ao.quantization.qconfig)": [[845, "torch.ao.quantization.qconfig.QConfig"]], "default_activation_only_qconfig (in module torch.ao.quantization.qconfig)": [[846, "torch.ao.quantization.qconfig.default_activation_only_qconfig"]], "default_debug_qconfig (in module torch.ao.quantization.qconfig)": [[847, "torch.ao.quantization.qconfig.default_debug_qconfig"]], "default_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[848, "torch.ao.quantization.qconfig.default_dynamic_qconfig"]], "default_per_channel_qconfig (in module torch.ao.quantization.qconfig)": [[849, "torch.ao.quantization.qconfig.default_per_channel_qconfig"]], "default_qat_qconfig (in module torch.ao.quantization.qconfig)": [[850, "torch.ao.quantization.qconfig.default_qat_qconfig"]], "default_qat_qconfig_v2 (in module torch.ao.quantization.qconfig)": [[851, "torch.ao.quantization.qconfig.default_qat_qconfig_v2"]], "default_qconfig (in module torch.ao.quantization.qconfig)": [[852, "torch.ao.quantization.qconfig.default_qconfig"]], "default_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[853, "torch.ao.quantization.qconfig.default_weight_only_qconfig"]], "float16_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[854, "torch.ao.quantization.qconfig.float16_dynamic_qconfig"]], "float16_static_qconfig (in module torch.ao.quantization.qconfig)": [[855, "torch.ao.quantization.qconfig.float16_static_qconfig"]], "float_qparams_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[856, "torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig"]], "per_channel_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[857, "torch.ao.quantization.qconfig.per_channel_dynamic_qconfig"]], "qconfigmapping (class in torch.ao.quantization.qconfig_mapping)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping"]], "from_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping class method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.from_dict"]], "set_global() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_global"]], "set_module_name() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name"]], "set_module_name_object_type_order() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_object_type_order"]], "set_module_name_regex() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_regex"]], "set_object_type() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_object_type"]], "to_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.to_dict"]], "get_default_qat_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[859, "torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping"]], "get_default_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[860, "torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping"]], "quantize (class in torch.ao.quantization)": [[861, "torch.ao.quantization.quantize"]], "quantize_dynamic (class in torch.ao.quantization)": [[862, "torch.ao.quantization.quantize_dynamic"]], "convert_fx (class in torch.ao.quantization.quantize_fx)": [[863, "torch.ao.quantization.quantize_fx.convert_fx"]], "fuse_fx (class in torch.ao.quantization.quantize_fx)": [[864, "torch.ao.quantization.quantize_fx.fuse_fx"]], "prepare_fx (class in torch.ao.quantization.quantize_fx)": [[865, "torch.ao.quantization.quantize_fx.prepare_fx"]], "prepare_qat_fx (class in torch.ao.quantization.quantize_fx)": [[866, "torch.ao.quantization.quantize_fx.prepare_qat_fx"]], "quantize_qat (class in torch.ao.quantization)": [[867, "torch.ao.quantization.quantize_qat"]], "swap_module (class in torch.ao.quantization)": [[868, "torch.ao.quantization.swap_module"]], "arange() (in module torch)": [[869, "torch.arange"]], "arccos() (in module torch)": [[870, "torch.arccos"]], "arccosh() (in module torch)": [[871, "torch.arccosh"]], "arcsin() (in module torch)": [[872, "torch.arcsin"]], "arcsinh() (in module torch)": [[873, "torch.arcsinh"]], "arctan() (in module torch)": [[874, "torch.arctan"]], "arctan2() (in module torch)": [[875, "torch.arctan2"]], "arctanh() (in module torch)": [[876, "torch.arctanh"]], "are_deterministic_algorithms_enabled() (in module torch)": [[877, "torch.are_deterministic_algorithms_enabled"]], "argmax() (in module torch)": [[878, "torch.argmax"]], "argmin() (in module torch)": [[879, "torch.argmin"]], "argsort() (in module torch)": [[880, "torch.argsort"]], "argwhere() (in module torch)": [[881, "torch.argwhere"]], "as_strided() (in module torch)": [[882, "torch.as_strided"]], "as_tensor() (in module torch)": [[883, "torch.as_tensor"]], "asarray() (in module torch)": [[884, "torch.asarray"]], "asin() (in module torch)": [[885, "torch.asin"]], "asinh() (in module torch)": [[886, "torch.asinh"]], "atan() (in module torch)": [[887, "torch.atan"]], "atan2() (in module torch)": [[888, "torch.atan2"]], "atanh() (in module torch)": [[889, "torch.atanh"]], "atleast_1d() (in module torch)": [[890, "torch.atleast_1d"]], "atleast_2d() (in module torch)": [[891, "torch.atleast_2d"]], "atleast_3d() (in module torch)": [[892, "torch.atleast_3d"]], "backward() (torch.autograd.function static method)": [[893, "torch.autograd.Function.backward"]], "forward() (torch.autograd.function static method)": [[894, "torch.autograd.Function.forward"]], "jvp() (torch.autograd.function static method)": [[895, "torch.autograd.Function.jvp"]], "vmap() (torch.autograd.function static method)": [[896, "torch.autograd.Function.vmap"]], "backward() (in module torch.autograd)": [[897, "torch.autograd.backward"]], "unpackeddualtensor (class in torch.autograd.forward_ad)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor"]], "count() (torch.autograd.forward_ad.unpackeddualtensor method)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.count"]], "index() (torch.autograd.forward_ad.unpackeddualtensor method)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.index"]], "primal (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.primal"]], "tangent (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.tangent"]], "dual_level (class in torch.autograd.forward_ad)": [[899, "torch.autograd.forward_ad.dual_level"]], "enter_dual_level() (in module torch.autograd.forward_ad)": [[900, "torch.autograd.forward_ad.enter_dual_level"]], "exit_dual_level() (in module torch.autograd.forward_ad)": [[901, "torch.autograd.forward_ad.exit_dual_level"]], "make_dual() (in module torch.autograd.forward_ad)": [[902, "torch.autograd.forward_ad.make_dual"]], "unpack_dual() (in module torch.autograd.forward_ad)": [[903, "torch.autograd.forward_ad.unpack_dual"]], "backwardcfunction (class in torch.autograd.function)": [[904, "torch.autograd.function.BackwardCFunction"]], "apply() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.apply"]], "apply_jvp() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.apply_jvp"]], "mark_dirty() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.set_materialize_grads"]], "mark_dirty() (torch.autograd.function.functionctx method)": [[905, "torch.autograd.function.FunctionCtx.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.functionctx method)": [[906, "torch.autograd.function.FunctionCtx.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.functionctx method)": [[907, "torch.autograd.function.FunctionCtx.save_for_backward"]], "set_materialize_grads() (torch.autograd.function.functionctx method)": [[908, "torch.autograd.function.FunctionCtx.set_materialize_grads"]], "inplacefunction (class in torch.autograd.function)": [[909, "torch.autograd.function.InplaceFunction"]], "backward() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.backward"]], "forward() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.forward"]], "jvp() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.jvp"]], "mark_dirty() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.setup_context"]], "vjp() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.vjp"]], "vmap() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.vmap"]], "nestediofunction (class in torch.autograd.function)": [[910, "torch.autograd.function.NestedIOFunction"]], "backward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.backward"]], "backward_extended() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.backward_extended"]], "forward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.forward"]], "forward_extended() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.forward_extended"]], "jvp() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.jvp"]], "mark_dirty() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.save_for_forward"]], "saved_tensors (torch.autograd.function.nestediofunction property)": [[910, "torch.autograd.function.NestedIOFunction.saved_tensors"]], "set_materialize_grads() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.setup_context"]], "vjp() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.vjp"]], "vmap() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.vmap"]], "once_differentiable() (in module torch.autograd.function)": [[911, "torch.autograd.function.once_differentiable"]], "hessian() (in module torch.autograd.functional)": [[912, "torch.autograd.functional.hessian"]], "hvp() (in module torch.autograd.functional)": [[913, "torch.autograd.functional.hvp"]], "jacobian() (in module torch.autograd.functional)": [[914, "torch.autograd.functional.jacobian"]], "jvp() (in module torch.autograd.functional)": [[915, "torch.autograd.functional.jvp"]], "vhp() (in module torch.autograd.functional)": [[916, "torch.autograd.functional.vhp"]], "vjp() (in module torch.autograd.functional)": [[917, "torch.autograd.functional.vjp"]], "grad() (in module torch.autograd)": [[918, "torch.autograd.grad"]], "clone() (torch.autograd.grad_mode.inference_mode method)": [[919, "torch.autograd.grad_mode.inference_mode.clone"]], "inference_mode (class in torch.autograd.grad_mode)": [[919, "torch.autograd.grad_mode.inference_mode"]], "clone() (torch.autograd.grad_mode.set_grad_enabled method)": [[920, "torch.autograd.grad_mode.set_grad_enabled.clone"]], "set_grad_enabled (class in torch.autograd.grad_mode)": [[920, "torch.autograd.grad_mode.set_grad_enabled"]], "clone() (torch.autograd.grad_mode.set_multithreading_enabled method)": [[921, "torch.autograd.grad_mode.set_multithreading_enabled.clone"]], "set_multithreading_enabled (class in torch.autograd.grad_mode)": [[921, "torch.autograd.grad_mode.set_multithreading_enabled"]], "gradcheckerror": [[922, "torch.autograd.gradcheck.GradcheckError"]], "gradcheck() (in module torch.autograd.gradcheck)": [[923, "torch.autograd.gradcheck.gradcheck"]], "gradgradcheck() (in module torch.autograd.gradcheck)": [[924, "torch.autograd.gradcheck.gradgradcheck"]], "metadata() (torch.autograd.graph.node method)": [[925, "torch.autograd.graph.Node.metadata"]], "name() (torch.autograd.graph.node method)": [[926, "torch.autograd.graph.Node.name"]], "next_functions (torch.autograd.graph.node property)": [[927, "torch.autograd.graph.Node.next_functions"]], "register_hook() (torch.autograd.graph.node method)": [[928, "torch.autograd.graph.Node.register_hook"]], "register_prehook() (torch.autograd.graph.node method)": [[929, "torch.autograd.graph.Node.register_prehook"]], "increment_version() (in module torch.autograd.graph)": [[930, "torch.autograd.graph.increment_version"]], "enforceunique (class in torch.autograd.profiler)": [[931, "torch.autograd.profiler.EnforceUnique"]], "see() (torch.autograd.profiler.enforceunique method)": [[931, "torch.autograd.profiler.EnforceUnique.see"]], "kinetosteptracker (class in torch.autograd.profiler)": [[932, "torch.autograd.profiler.KinetoStepTracker"]], "current_step() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.current_step"]], "erase_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.erase_step_count"]], "increment_step() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.increment_step"]], "init_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.init_step_count"]], "load_nvprof() (in module torch.autograd.profiler)": [[933, "torch.autograd.profiler.load_nvprof"]], "parse_nvprof_trace() (in module torch.autograd.profiler)": [[934, "torch.autograd.profiler.parse_nvprof_trace"]], "export_chrome_trace() (torch.autograd.profiler.profile method)": [[935, "torch.autograd.profiler.profile.export_chrome_trace"]], "key_averages() (torch.autograd.profiler.profile method)": [[936, "torch.autograd.profiler.profile.key_averages"]], "self_cpu_time_total (torch.autograd.profiler.profile property)": [[937, "torch.autograd.profiler.profile.self_cpu_time_total"]], "total_average() (torch.autograd.profiler.profile method)": [[938, "torch.autograd.profiler.profile.total_average"]], "record_function (class in torch.autograd.profiler)": [[939, "torch.autograd.profiler.record_function"]], "interval (class in torch.autograd.profiler_util)": [[940, "torch.autograd.profiler_util.Interval"]], "elapsed_us() (torch.autograd.profiler_util.interval method)": [[940, "torch.autograd.profiler_util.Interval.elapsed_us"]], "kernel (class in torch.autograd.profiler_util)": [[941, "torch.autograd.profiler_util.Kernel"]], "count() (torch.autograd.profiler_util.kernel method)": [[941, "torch.autograd.profiler_util.Kernel.count"]], "device (torch.autograd.profiler_util.kernel attribute)": [[941, "torch.autograd.profiler_util.Kernel.device"]], "duration (torch.autograd.profiler_util.kernel attribute)": [[941, "torch.autograd.profiler_util.Kernel.duration"]], "index() (torch.autograd.profiler_util.kernel method)": [[941, "torch.autograd.profiler_util.Kernel.index"]], "name (torch.autograd.profiler_util.kernel attribute)": [[941, "torch.autograd.profiler_util.Kernel.name"]], "memrecordsacc (class in torch.autograd.profiler_util)": [[942, "torch.autograd.profiler_util.MemRecordsAcc"]], "in_interval() (torch.autograd.profiler_util.memrecordsacc method)": [[942, "torch.autograd.profiler_util.MemRecordsAcc.in_interval"]], "stringtable (class in torch.autograd.profiler_util)": [[943, "torch.autograd.profiler_util.StringTable"]], "clear() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.clear"]], "copy() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.copy"]], "default_factory (torch.autograd.profiler_util.stringtable attribute)": [[943, "torch.autograd.profiler_util.StringTable.default_factory"]], "fromkeys() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.fromkeys"]], "get() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.get"]], "items() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.items"]], "keys() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.keys"]], "pop() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.pop"]], "popitem() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.popitem"]], "setdefault() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.setdefault"]], "update() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.update"]], "values() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.values"]], "baddbmm() (in module torch)": [[944, "torch.baddbmm"]], "bartlett_window() (in module torch)": [[945, "torch.bartlett_window"]], "bernoulli() (in module torch)": [[946, "torch.bernoulli"]], "bincount() (in module torch)": [[947, "torch.bincount"]], "bitwise_and() (in module torch)": [[948, "torch.bitwise_and"]], "bitwise_left_shift() (in module torch)": [[949, "torch.bitwise_left_shift"]], "bitwise_not() (in module torch)": [[950, "torch.bitwise_not"]], "bitwise_or() (in module torch)": [[951, "torch.bitwise_or"]], "bitwise_right_shift() (in module torch)": [[952, "torch.bitwise_right_shift"]], "bitwise_xor() (in module torch)": [[953, "torch.bitwise_xor"]], "blackman_window() (in module torch)": [[954, "torch.blackman_window"]], "block_diag() (in module torch)": [[955, "torch.block_diag"]], "bmm() (in module torch)": [[956, "torch.bmm"]], "broadcast_shapes() (in module torch)": [[957, "torch.broadcast_shapes"]], "broadcast_tensors() (in module torch)": [[958, "torch.broadcast_tensors"]], "broadcast_to() (in module torch)": [[959, "torch.broadcast_to"]], "bucketize() (in module torch)": [[960, "torch.bucketize"]], "can_cast() (in module torch)": [[961, "torch.can_cast"]], "cartesian_prod() (in module torch)": [[962, "torch.cartesian_prod"]], "cat() (in module torch)": [[963, "torch.cat"]], "cdist() (in module torch)": [[964, "torch.cdist"]], "ceil() (in module torch)": [[965, "torch.ceil"]], "chain_matmul() (in module torch)": [[966, "torch.chain_matmul"]], "cholesky() (in module torch)": [[967, "torch.cholesky"]], "cholesky_inverse() (in module torch)": [[968, "torch.cholesky_inverse"]], "cholesky_solve() (in module torch)": [[969, "torch.cholesky_solve"]], "chunk() (in module torch)": [[970, "torch.chunk"]], "clamp() (in module torch)": [[971, "torch.clamp"]], "clip() (in module torch)": [[972, "torch.clip"]], "clone() (in module torch)": [[973, "torch.clone"]], "column_stack() (in module torch)": [[974, "torch.column_stack"]], "combinations() (in module torch)": [[975, "torch.combinations"]], "compile() (in module torch)": [[976, "torch.compile"]], "compiled_with_cxx11_abi() (in module torch)": [[977, "torch.compiled_with_cxx11_abi"]], "allow_in_graph() (in module torch.compiler)": [[978, "torch.compiler.allow_in_graph"]], "assume_constant_result() (in module torch.compiler)": [[979, "torch.compiler.assume_constant_result"]], "compile() (in module torch.compiler)": [[980, "torch.compiler.compile"]], "cudagraph_mark_step_begin() (in module torch.compiler)": [[981, "torch.compiler.cudagraph_mark_step_begin"]], "disable() (in module torch.compiler)": [[982, "torch.compiler.disable"]], "is_compiling() (in module torch.compiler)": [[983, "torch.compiler.is_compiling"]], "is_dynamo_compiling() (in module torch.compiler)": [[984, "torch.compiler.is_dynamo_compiling"]], "list_backends() (in module torch.compiler)": [[985, "torch.compiler.list_backends"]], "reset() (in module torch.compiler)": [[986, "torch.compiler.reset"]], "complex() (in module torch)": [[987, "torch.complex"]], "concat() (in module torch)": [[988, "torch.concat"]], "concatenate() (in module torch)": [[989, "torch.concatenate"]], "cond() (in module torch)": [[990, "torch.cond"]], "conj() (in module torch)": [[991, "torch.conj"]], "conj_physical() (in module torch)": [[992, "torch.conj_physical"]], "copysign() (in module torch)": [[993, "torch.copysign"]], "corrcoef() (in module torch)": [[994, "torch.corrcoef"]], "cos() (in module torch)": [[995, "torch.cos"]], "cosh() (in module torch)": [[996, "torch.cosh"]], "count_nonzero() (in module torch)": [[997, "torch.count_nonzero"]], "cov() (in module torch)": [[998, "torch.cov"]], "stream (class in torch.cpu)": [[999, "torch.cpu.Stream"]], "streamcontext (class in torch.cpu)": [[1000, "torch.cpu.StreamContext"]], "current_device() (in module torch.cpu)": [[1001, "torch.cpu.current_device"]], "current_stream() (in module torch.cpu)": [[1002, "torch.cpu.current_stream"]], "device_count() (in module torch.cpu)": [[1003, "torch.cpu.device_count"]], "is_available() (in module torch.cpu)": [[1004, "torch.cpu.is_available"]], "set_device() (in module torch.cpu)": [[1005, "torch.cpu.set_device"]], "stream() (in module torch.cpu)": [[1006, "torch.cpu.stream"]], "synchronize() (in module torch.cpu)": [[1007, "torch.cpu.synchronize"]], "cross() (in module torch)": [[1008, "torch.cross"]], "cudagraph (class in torch.cuda)": [[1009, "torch.cuda.CUDAGraph"]], "capture_begin() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.capture_begin"]], "capture_end() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.capture_end"]], "debug_dump() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.debug_dump"]], "enable_debug_mode() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.enable_debug_mode"]], "pool() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.pool"]], "replay() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.replay"]], "reset() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.reset"]], "cudapluggableallocator (class in torch.cuda)": [[1010, "torch.cuda.CUDAPluggableAllocator"]], "event (class in torch.cuda)": [[1011, "torch.cuda.Event"]], "elapsed_time() (torch.cuda.event method)": [[1011, "torch.cuda.Event.elapsed_time"]], "from_ipc_handle() (torch.cuda.event class method)": [[1011, "torch.cuda.Event.from_ipc_handle"]], "ipc_handle() (torch.cuda.event method)": [[1011, "torch.cuda.Event.ipc_handle"]], "query() (torch.cuda.event method)": [[1011, "torch.cuda.Event.query"]], "record() (torch.cuda.event method)": [[1011, "torch.cuda.Event.record"]], "synchronize() (torch.cuda.event method)": [[1011, "torch.cuda.Event.synchronize"]], "wait() (torch.cuda.event method)": [[1011, "torch.cuda.Event.wait"]], "externalstream (class in torch.cuda)": [[1012, "torch.cuda.ExternalStream"]], "query() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.query"]], "record_event() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.record_event"]], "synchronize() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.synchronize"]], "wait_event() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.wait_event"]], "wait_stream() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.wait_stream"]], "outofmemoryerror": [[1013, "torch.cuda.OutOfMemoryError"]], "stream (class in torch.cuda)": [[1014, "torch.cuda.Stream"]], "query() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.query"]], "record_event() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.record_event"]], "synchronize() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.synchronize"]], "wait_event() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.wait_event"]], "wait_stream() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.wait_stream"]], "streamcontext (class in torch.cuda)": [[1015, "torch.cuda.StreamContext"]], "caching_allocator_alloc() (in module torch.cuda)": [[1016, "torch.cuda.caching_allocator_alloc"]], "caching_allocator_delete() (in module torch.cuda)": [[1017, "torch.cuda.caching_allocator_delete"]], "can_device_access_peer() (in module torch.cuda)": [[1018, "torch.cuda.can_device_access_peer"]], "change_current_allocator() (in module torch.cuda)": [[1019, "torch.cuda.change_current_allocator"]], "clock_rate() (in module torch.cuda)": [[1020, "torch.cuda.clock_rate"]], "broadcast() (in module torch.cuda.comm)": [[1021, "torch.cuda.comm.broadcast"]], "broadcast_coalesced() (in module torch.cuda.comm)": [[1022, "torch.cuda.comm.broadcast_coalesced"]], "gather() (in module torch.cuda.comm)": [[1023, "torch.cuda.comm.gather"]], "reduce_add() (in module torch.cuda.comm)": [[1024, "torch.cuda.comm.reduce_add"]], "scatter() (in module torch.cuda.comm)": [[1025, "torch.cuda.comm.scatter"]], "current_blas_handle() (in module torch.cuda)": [[1026, "torch.cuda.current_blas_handle"]], "current_device() (in module torch.cuda)": [[1027, "torch.cuda.current_device"]], "current_stream() (in module torch.cuda)": [[1028, "torch.cuda.current_stream"]], "default_stream() (in module torch.cuda)": [[1029, "torch.cuda.default_stream"]], "device (class in torch.cuda)": [[1030, "torch.cuda.device"]], "device_count() (in module torch.cuda)": [[1031, "torch.cuda.device_count"]], "device_of (class in torch.cuda)": [[1032, "torch.cuda.device_of"]], "empty_cache() (in module torch.cuda)": [[1033, "torch.cuda.empty_cache"]], "get_allocator_backend() (in module torch.cuda)": [[1034, "torch.cuda.get_allocator_backend"]], "get_arch_list() (in module torch.cuda)": [[1035, "torch.cuda.get_arch_list"]], "get_device_capability() (in module torch.cuda)": [[1036, "torch.cuda.get_device_capability"]], "get_device_name() (in module torch.cuda)": [[1037, "torch.cuda.get_device_name"]], "get_device_properties() (in module torch.cuda)": [[1038, "torch.cuda.get_device_properties"]], "get_gencode_flags() (in module torch.cuda)": [[1039, "torch.cuda.get_gencode_flags"]], "get_rng_state() (in module torch.cuda)": [[1040, "torch.cuda.get_rng_state"]], "get_rng_state_all() (in module torch.cuda)": [[1041, "torch.cuda.get_rng_state_all"]], "get_sync_debug_mode() (in module torch.cuda)": [[1042, "torch.cuda.get_sync_debug_mode"]], "graph (class in torch.cuda)": [[1043, "torch.cuda.graph"]], "graph_pool_handle() (in module torch.cuda)": [[1044, "torch.cuda.graph_pool_handle"]], "init() (in module torch.cuda)": [[1045, "torch.cuda.init"]], "initial_seed() (in module torch.cuda)": [[1046, "torch.cuda.initial_seed"]], "ipc_collect() (in module torch.cuda)": [[1047, "torch.cuda.ipc_collect"]], "is_available() (in module torch.cuda)": [[1048, "torch.cuda.is_available"]], "is_current_stream_capturing() (in module torch.cuda)": [[1049, "torch.cuda.is_current_stream_capturing"]], "is_initialized() (in module torch.cuda)": [[1050, "torch.cuda.is_initialized"]], "_create_jit_fn() (in module torch.cuda.jiterator)": [[1051, "torch.cuda.jiterator._create_jit_fn"]], "_create_multi_output_jit_fn() (in module torch.cuda.jiterator)": [[1052, "torch.cuda.jiterator._create_multi_output_jit_fn"]], "list_gpu_processes() (in module torch.cuda)": [[1053, "torch.cuda.list_gpu_processes"]], "make_graphed_callables() (in module torch.cuda)": [[1054, "torch.cuda.make_graphed_callables"]], "manual_seed() (in module torch.cuda)": [[1055, "torch.cuda.manual_seed"]], "manual_seed_all() (in module torch.cuda)": [[1056, "torch.cuda.manual_seed_all"]], "max_memory_allocated() (in module torch.cuda)": [[1057, "torch.cuda.max_memory_allocated"]], "max_memory_cached() (in module torch.cuda)": [[1058, "torch.cuda.max_memory_cached"]], "max_memory_reserved() (in module torch.cuda)": [[1059, "torch.cuda.max_memory_reserved"]], "mem_get_info() (in module torch.cuda)": [[1060, "torch.cuda.mem_get_info"]], "memory_allocated() (in module torch.cuda)": [[1061, "torch.cuda.memory_allocated"]], "memory_cached() (in module torch.cuda)": [[1062, "torch.cuda.memory_cached"]], "memory_reserved() (in module torch.cuda)": [[1063, "torch.cuda.memory_reserved"]], "memory_snapshot() (in module torch.cuda)": [[1064, "torch.cuda.memory_snapshot"]], "memory_stats() (in module torch.cuda)": [[1065, "torch.cuda.memory_stats"]], "memory_summary() (in module torch.cuda)": [[1066, "torch.cuda.memory_summary"]], "memory_usage() (in module torch.cuda)": [[1067, "torch.cuda.memory_usage"]], "mark() (in module torch.cuda.nvtx)": [[1068, "torch.cuda.nvtx.mark"]], "range() (in module torch.cuda.nvtx)": [[1069, "torch.cuda.nvtx.range"]], "range_pop() (in module torch.cuda.nvtx)": [[1070, "torch.cuda.nvtx.range_pop"]], "range_push() (in module torch.cuda.nvtx)": [[1071, "torch.cuda.nvtx.range_push"]], "power_draw() (in module torch.cuda)": [[1072, "torch.cuda.power_draw"]], "reset_max_memory_allocated() (in module torch.cuda)": [[1073, "torch.cuda.reset_max_memory_allocated"]], "reset_max_memory_cached() (in module torch.cuda)": [[1074, "torch.cuda.reset_max_memory_cached"]], "reset_peak_memory_stats() (in module torch.cuda)": [[1075, "torch.cuda.reset_peak_memory_stats"]], "seed() (in module torch.cuda)": [[1076, "torch.cuda.seed"]], "seed_all() (in module torch.cuda)": [[1077, "torch.cuda.seed_all"]], "set_device() (in module torch.cuda)": [[1078, "torch.cuda.set_device"]], "set_per_process_memory_fraction() (in module torch.cuda)": [[1079, "torch.cuda.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.cuda)": [[1080, "torch.cuda.set_rng_state"]], "set_rng_state_all() (in module torch.cuda)": [[1081, "torch.cuda.set_rng_state_all"]], "set_stream() (in module torch.cuda)": [[1082, "torch.cuda.set_stream"]], "set_sync_debug_mode() (in module torch.cuda)": [[1083, "torch.cuda.set_sync_debug_mode"]], "stream() (in module torch.cuda)": [[1084, "torch.cuda.stream"]], "synchronize() (in module torch.cuda)": [[1085, "torch.cuda.synchronize"]], "temperature() (in module torch.cuda)": [[1086, "torch.cuda.temperature"]], "utilization() (in module torch.cuda)": [[1087, "torch.cuda.utilization"]], "cummax() (in module torch)": [[1088, "torch.cummax"]], "cummin() (in module torch)": [[1089, "torch.cummin"]], "cumprod() (in module torch)": [[1090, "torch.cumprod"]], "cumsum() (in module torch)": [[1091, "torch.cumsum"]], "cumulative_trapezoid() (in module torch)": [[1092, "torch.cumulative_trapezoid"]], "deg2rad() (in module torch)": [[1093, "torch.deg2rad"]], "dequantize() (in module torch)": [[1094, "torch.dequantize"]], "det() (in module torch)": [[1095, "torch.det"]], "diag() (in module torch)": [[1096, "torch.diag"]], "diag_embed() (in module torch)": [[1097, "torch.diag_embed"]], "diagflat() (in module torch)": [[1098, "torch.diagflat"]], "diagonal() (in module torch)": [[1099, "torch.diagonal"]], "diagonal_scatter() (in module torch)": [[1100, "torch.diagonal_scatter"]], "diff() (in module torch)": [[1101, "torch.diff"]], "digamma() (in module torch)": [[1102, "torch.digamma"]], "dist() (in module torch)": [[1103, "torch.dist"]], "div() (in module torch)": [[1104, "torch.div"]], "divide() (in module torch)": [[1105, "torch.divide"]], "dot() (in module torch)": [[1106, "torch.dot"]], "dsplit() (in module torch)": [[1107, "torch.dsplit"]], "dstack() (in module torch)": [[1108, "torch.dstack"]], "einsum() (in module torch)": [[1109, "torch.einsum"]], "empty() (in module torch)": [[1110, "torch.empty"]], "empty_like() (in module torch)": [[1111, "torch.empty_like"]], "empty_strided() (in module torch)": [[1112, "torch.empty_strided"]], "enable_grad (class in torch)": [[1113, "torch.enable_grad"]], "eq() (in module torch)": [[1114, "torch.eq"]], "equal() (in module torch)": [[1115, "torch.equal"]], "erf() (in module torch)": [[1116, "torch.erf"]], "erfc() (in module torch)": [[1117, "torch.erfc"]], "erfinv() (in module torch)": [[1118, "torch.erfinv"]], "exp() (in module torch)": [[1119, "torch.exp"]], "exp2() (in module torch)": [[1120, "torch.exp2"]], "expm1() (in module torch)": [[1121, "torch.expm1"]], "eye() (in module torch)": [[1122, "torch.eye"]], "fake_quantize_per_channel_affine() (in module torch)": [[1123, "torch.fake_quantize_per_channel_affine"]], "fake_quantize_per_tensor_affine() (in module torch)": [[1124, "torch.fake_quantize_per_tensor_affine"]], "fft() (in module torch.fft)": [[1125, "torch.fft.fft"]], "fft2() (in module torch.fft)": [[1126, "torch.fft.fft2"]], "fftfreq() (in module torch.fft)": [[1127, "torch.fft.fftfreq"]], "fftn() (in module torch.fft)": [[1128, "torch.fft.fftn"]], "fftshift() (in module torch.fft)": [[1129, "torch.fft.fftshift"]], "hfft() (in module torch.fft)": [[1130, "torch.fft.hfft"]], "hfft2() (in module torch.fft)": [[1131, "torch.fft.hfft2"]], "hfftn() (in module torch.fft)": [[1132, "torch.fft.hfftn"]], "ifft() (in module torch.fft)": [[1133, "torch.fft.ifft"]], "ifft2() (in module torch.fft)": [[1134, "torch.fft.ifft2"]], "ifftn() (in module torch.fft)": [[1135, "torch.fft.ifftn"]], "ifftshift() (in module torch.fft)": [[1136, "torch.fft.ifftshift"]], "ihfft() (in module torch.fft)": [[1137, "torch.fft.ihfft"]], "ihfft2() (in module torch.fft)": [[1138, "torch.fft.ihfft2"]], "ihfftn() (in module torch.fft)": [[1139, "torch.fft.ihfftn"]], "irfft() (in module torch.fft)": [[1140, "torch.fft.irfft"]], "irfft2() (in module torch.fft)": [[1141, "torch.fft.irfft2"]], "irfftn() (in module torch.fft)": [[1142, "torch.fft.irfftn"]], "rfft() (in module torch.fft)": [[1143, "torch.fft.rfft"]], "rfft2() (in module torch.fft)": [[1144, "torch.fft.rfft2"]], "rfftfreq() (in module torch.fft)": [[1145, "torch.fft.rfftfreq"]], "rfftn() (in module torch.fft)": [[1146, "torch.fft.rfftn"]], "fix() (in module torch)": [[1147, "torch.fix"]], "flatten() (in module torch)": [[1148, "torch.flatten"]], "flip() (in module torch)": [[1149, "torch.flip"]], "fliplr() (in module torch)": [[1150, "torch.fliplr"]], "flipud() (in module torch)": [[1151, "torch.flipud"]], "float_power() (in module torch)": [[1152, "torch.float_power"]], "floor() (in module torch)": [[1153, "torch.floor"]], "floor_divide() (in module torch)": [[1154, "torch.floor_divide"]], "fmax() (in module torch)": [[1155, "torch.fmax"]], "fmin() (in module torch)": [[1156, "torch.fmin"]], "fmod() (in module torch)": [[1157, "torch.fmod"]], "frac() (in module torch)": [[1158, "torch.frac"]], "frexp() (in module torch)": [[1159, "torch.frexp"]], "from_dlpack() (in module torch)": [[1160, "torch.from_dlpack"]], "from_file() (in module torch)": [[1161, "torch.from_file"]], "from_numpy() (in module torch)": [[1162, "torch.from_numpy"]], "frombuffer() (in module torch)": [[1163, "torch.frombuffer"]], "full() (in module torch)": [[1164, "torch.full"]], "full_like() (in module torch)": [[1165, "torch.full_like"]], "functional_call() (in module torch.func)": [[1166, "torch.func.functional_call"]], "functionalize() (in module torch.func)": [[1167, "torch.func.functionalize"]], "grad() (in module torch.func)": [[1168, "torch.func.grad"]], "grad_and_value() (in module torch.func)": [[1169, "torch.func.grad_and_value"]], "hessian() (in module torch.func)": [[1170, "torch.func.hessian"]], "jacfwd() (in module torch.func)": [[1171, "torch.func.jacfwd"]], "jacrev() (in module torch.func)": [[1172, "torch.func.jacrev"]], "jvp() (in module torch.func)": [[1173, "torch.func.jvp"]], "linearize() (in module torch.func)": [[1174, "torch.func.linearize"]], "replace_all_batch_norm_modules_() (in module torch.func)": [[1175, "torch.func.replace_all_batch_norm_modules_"]], "stack_module_state() (in module torch.func)": [[1176, "torch.func.stack_module_state"]], "vjp() (in module torch.func)": [[1177, "torch.func.vjp"]], "vmap() (in module torch.func)": [[1178, "torch.func.vmap"]], "callmethodkey (class in torch.fx.experimental.symbolic_shapes)": [[1179, "torch.fx.experimental.symbolic_shapes.CallMethodKey"]], "get() (torch.fx.experimental.symbolic_shapes.callmethodkey method)": [[1179, "torch.fx.experimental.symbolic_shapes.CallMethodKey.get"]], "convertintkey (class in torch.fx.experimental.symbolic_shapes)": [[1180, "torch.fx.experimental.symbolic_shapes.ConvertIntKey"]], "get() (torch.fx.experimental.symbolic_shapes.convertintkey method)": [[1180, "torch.fx.experimental.symbolic_shapes.ConvertIntKey.get"]], "dimconstraints (class in torch.fx.experimental.symbolic_shapes)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints"]], "add() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.add"]], "add_equality() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.add_equality"]], "forced_specializations() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.forced_specializations"]], "prettify_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.prettify_results"]], "remove_redundant_dynamic_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.remove_redundant_dynamic_results"]], "rewrite_with_congruences() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.rewrite_with_congruences"]], "solve() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.solve"]], "dimdynamic (class in torch.fx.experimental.symbolic_shapes)": [[1182, "torch.fx.experimental.symbolic_shapes.DimDynamic"]], "dividebykey (class in torch.fx.experimental.symbolic_shapes)": [[1183, "torch.fx.experimental.symbolic_shapes.DivideByKey"]], "get() (torch.fx.experimental.symbolic_shapes.dividebykey method)": [[1183, "torch.fx.experimental.symbolic_shapes.DivideByKey.get"]], "equalityconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1184, "torch.fx.experimental.symbolic_shapes.EqualityConstraint"]], "innertensorkey (class in torch.fx.experimental.symbolic_shapes)": [[1185, "torch.fx.experimental.symbolic_shapes.InnerTensorKey"]], "get() (torch.fx.experimental.symbolic_shapes.innertensorkey method)": [[1185, "torch.fx.experimental.symbolic_shapes.InnerTensorKey.get"]], "propagateunbackedsymints (class in torch.fx.experimental.symbolic_shapes)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts"]], "boxed_run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.boxed_run"]], "call_function() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_function"]], "call_method() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_method"]], "call_module() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_module"]], "fetch_args_kwargs_from_env() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_attr"]], "get_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.get_attr"]], "map_nodes_to_values() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.map_nodes_to_values"]], "output() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.output"]], "placeholder() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.placeholder"]], "run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run"]], "run_node() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run_node"]], "relaxedunspecconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1187, "torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint"]], "shapeenv (class in torch.fx.experimental.symbolic_shapes)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv"]], "add_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.add_var_to_val"]], "bind_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bind_symbols"]], "bound_sympy() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bound_sympy"]], "check_equal() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.check_equal"]], "cleanup() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.cleanup"]], "create_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbol"]], "create_symbolic_sizes_strides_storage_offset() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbolic_sizes_strides_storage_offset"]], "create_symboolnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symboolnode"]], "create_symfloatnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symfloatnode"]], "create_symintnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symintnode"]], "create_unbacked_symbool() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symbool"]], "create_unbacked_symfloat() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symfloat"]], "create_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symint"]], "create_unspecified_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symbol"]], "create_unspecified_symint_and_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symint_and_symbol"]], "defer_runtime_assert() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.defer_runtime_assert"]], "evaluate_expr() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_expr"]], "evaluate_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_expression"]], "evaluate_guards_for_args() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_for_args"]], "format_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.format_guards"]], "freeze() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze"]], "freeze_runtime_asserts() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze_runtime_asserts"]], "get_axioms() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_axioms"]], "get_implications() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_implications"]], "get_nontrivial_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_nontrivial_guards"]], "get_pruned_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_pruned_guards"]], "ignore_fresh_unbacked_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.ignore_fresh_unbacked_symbols"]], "is_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.is_unbacked_symint"]], "produce_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards"]], "produce_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards_expression"]], "replace() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.replace"]], "set_unbacked_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.set_unbacked_var_to_val"]], "simplify() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.simplify"]], "size_hint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.size_hint"]], "suppress_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.suppress_guards"]], "shapeenvsettings (class in torch.fx.experimental.symbolic_shapes)": [[1189, "torch.fx.experimental.symbolic_shapes.ShapeEnvSettings"]], "statefulsymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1190, "torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext"]], "statelesssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1191, "torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext"]], "strictminmaxconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1192, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint"]], "render() (torch.fx.experimental.symbolic_shapes.strictminmaxconstraint method)": [[1192, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint.render"]], "subclasssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1193, "torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext"]], "symboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1194, "torch.fx.experimental.symbolic_shapes.SymbolicContext"]], "canonicalize_bool_expr() (in module torch.fx.experimental.symbolic_shapes)": [[1195, "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr"]], "check_consistent() (in module torch.fx.experimental.symbolic_shapes)": [[1196, "torch.fx.experimental.symbolic_shapes.check_consistent"]], "compute_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1197, "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings"]], "constrain_range() (in module torch.fx.experimental.symbolic_shapes)": [[1198, "torch.fx.experimental.symbolic_shapes.constrain_range"]], "constrain_unify() (in module torch.fx.experimental.symbolic_shapes)": [[1199, "torch.fx.experimental.symbolic_shapes.constrain_unify"]], "definitely_false() (in module torch.fx.experimental.symbolic_shapes)": [[1200, "torch.fx.experimental.symbolic_shapes.definitely_false"]], "definitely_true() (in module torch.fx.experimental.symbolic_shapes)": [[1201, "torch.fx.experimental.symbolic_shapes.definitely_true"]], "guard_size_oblivious() (in module torch.fx.experimental.symbolic_shapes)": [[1202, "torch.fx.experimental.symbolic_shapes.guard_size_oblivious"]], "has_free_symbols() (in module torch.fx.experimental.symbolic_shapes)": [[1203, "torch.fx.experimental.symbolic_shapes.has_free_symbols"]], "hint_int() (in module torch.fx.experimental.symbolic_shapes)": [[1204, "torch.fx.experimental.symbolic_shapes.hint_int"]], "is_concrete_bool() (in module torch.fx.experimental.symbolic_shapes)": [[1205, "torch.fx.experimental.symbolic_shapes.is_concrete_bool"]], "is_concrete_int() (in module torch.fx.experimental.symbolic_shapes)": [[1206, "torch.fx.experimental.symbolic_shapes.is_concrete_int"]], "lru_cache() (in module torch.fx.experimental.symbolic_shapes)": [[1207, "torch.fx.experimental.symbolic_shapes.lru_cache"]], "parallel_and() (in module torch.fx.experimental.symbolic_shapes)": [[1208, "torch.fx.experimental.symbolic_shapes.parallel_and"]], "parallel_or() (in module torch.fx.experimental.symbolic_shapes)": [[1209, "torch.fx.experimental.symbolic_shapes.parallel_or"]], "rebind_unbacked() (in module torch.fx.experimental.symbolic_shapes)": [[1210, "torch.fx.experimental.symbolic_shapes.rebind_unbacked"]], "resolve_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1211, "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings"]], "statically_known_true() (in module torch.fx.experimental.symbolic_shapes)": [[1212, "torch.fx.experimental.symbolic_shapes.statically_known_true"]], "sym_eq() (in module torch.fx.experimental.symbolic_shapes)": [[1213, "torch.fx.experimental.symbolic_shapes.sym_eq"]], "gather() (in module torch)": [[1214, "torch.gather"]], "gcd() (in module torch)": [[1215, "torch.gcd"]], "ge() (in module torch)": [[1216, "torch.ge"]], "geqrf() (in module torch)": [[1217, "torch.geqrf"]], "ger() (in module torch)": [[1218, "torch.ger"]], "get_default_device() (in module torch)": [[1219, "torch.get_default_device"]], "get_default_dtype() (in module torch)": [[1220, "torch.get_default_dtype"]], "get_deterministic_debug_mode() (in module torch)": [[1221, "torch.get_deterministic_debug_mode"]], "get_device_module() (in module torch)": [[1222, "torch.get_device_module"]], "get_float32_matmul_precision() (in module torch)": [[1223, "torch.get_float32_matmul_precision"]], "get_num_interop_threads() (in module torch)": [[1224, "torch.get_num_interop_threads"]], "get_num_threads() (in module torch)": [[1225, "torch.get_num_threads"]], "get_rng_state() (in module torch)": [[1226, "torch.get_rng_state"]], "gradient() (in module torch)": [[1227, "torch.gradient"]], "greater() (in module torch)": [[1228, "torch.greater"]], "greater_equal() (in module torch)": [[1229, "torch.greater_equal"]], "gt() (in module torch)": [[1230, "torch.gt"]], "hamming_window() (in module torch)": [[1231, "torch.hamming_window"]], "hann_window() (in module torch)": [[1232, "torch.hann_window"]], "heaviside() (in module torch)": [[1233, "torch.heaviside"]], "histc() (in module torch)": [[1234, "torch.histc"]], "histogram() (in module torch)": [[1235, "torch.histogram"]], "histogramdd() (in module torch)": [[1236, "torch.histogramdd"]], "hsplit() (in module torch)": [[1237, "torch.hsplit"]], "hspmm() (in module torch)": [[1238, "torch.hspmm"]], "hstack() (in module torch)": [[1239, "torch.hstack"]], "hypot() (in module torch)": [[1240, "torch.hypot"]], "i0() (in module torch)": [[1241, "torch.i0"]], "igamma() (in module torch)": [[1242, "torch.igamma"]], "igammac() (in module torch)": [[1243, "torch.igammac"]], "imag() (in module torch)": [[1244, "torch.imag"]], "index_add() (in module torch)": [[1245, "torch.index_add"]], "index_copy() (in module torch)": [[1246, "torch.index_copy"]], "index_reduce() (in module torch)": [[1247, "torch.index_reduce"]], "index_select() (in module torch)": [[1248, "torch.index_select"]], "initial_seed() (in module torch)": [[1249, "torch.initial_seed"]], "inner() (in module torch)": [[1250, "torch.inner"]], "inverse() (in module torch)": [[1251, "torch.inverse"]], "is_complex() (in module torch)": [[1252, "torch.is_complex"]], "is_conj() (in module torch)": [[1253, "torch.is_conj"]], "is_deterministic_algorithms_warn_only_enabled() (in module torch)": [[1254, "torch.is_deterministic_algorithms_warn_only_enabled"]], "is_floating_point() (in module torch)": [[1255, "torch.is_floating_point"]], "is_grad_enabled() (in module torch)": [[1256, "torch.is_grad_enabled"]], "is_inference_mode_enabled() (in module torch)": [[1257, "torch.is_inference_mode_enabled"]], "is_nonzero() (in module torch)": [[1258, "torch.is_nonzero"]], "is_storage() (in module torch)": [[1259, "torch.is_storage"]], "is_tensor() (in module torch)": [[1260, "torch.is_tensor"]], "is_warn_always_enabled() (in module torch)": [[1261, "torch.is_warn_always_enabled"]], "isclose() (in module torch)": [[1262, "torch.isclose"]], "isfinite() (in module torch)": [[1263, "torch.isfinite"]], "isin() (in module torch)": [[1264, "torch.isin"]], "isinf() (in module torch)": [[1265, "torch.isinf"]], "isnan() (in module torch)": [[1266, "torch.isnan"]], "isneginf() (in module torch)": [[1267, "torch.isneginf"]], "isposinf() (in module torch)": [[1268, "torch.isposinf"]], "isreal() (in module torch)": [[1269, "torch.isreal"]], "istft() (in module torch)": [[1270, "torch.istft"]], "attribute (class in torch.jit)": [[1271, "torch.jit.Attribute"]], "count() (torch.jit.attribute method)": [[1271, "torch.jit.Attribute.count"]], "index() (torch.jit.attribute method)": [[1271, "torch.jit.Attribute.index"]], "type (torch.jit.attribute attribute)": [[1271, "torch.jit.Attribute.type"]], "value (torch.jit.attribute attribute)": [[1271, "torch.jit.Attribute.value"]], "scriptfunction (class in torch.jit)": [[1272, "torch.jit.ScriptFunction"]], "get_debug_state() (torch.jit.scriptfunction method)": [[1272, "torch.jit.ScriptFunction.get_debug_state"]], "save() (torch.jit.scriptfunction method)": [[1272, "torch.jit.ScriptFunction.save"]], "save_to_buffer() (torch.jit.scriptfunction method)": [[1272, "torch.jit.ScriptFunction.save_to_buffer"]], "scriptmodule (class in torch.jit)": [[1273, "torch.jit.ScriptModule"]], "add_module() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.add_module"]], "apply() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.apply"]], "bfloat16() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.bfloat16"]], "buffers() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.buffers"]], "children() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.children"]], "code (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.code"]], "code_with_constants (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.code_with_constants"]], "compile() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.compile"]], "cpu() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.cpu"]], "cuda() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.cuda"]], "double() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.double"]], "eval() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.eval"]], "extra_repr() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.extra_repr"]], "float() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.float"]], "get_buffer() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_buffer"]], "get_extra_state() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_extra_state"]], "get_parameter() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_parameter"]], "get_submodule() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_submodule"]], "graph (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.graph"]], "half() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.half"]], "inlined_graph (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.inlined_graph"]], "ipu() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.ipu"]], "load_state_dict() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.load_state_dict"]], "modules() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.modules"]], "named_buffers() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_buffers"]], "named_children() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_children"]], "named_modules() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_modules"]], "named_parameters() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_parameters"]], "parameters() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.parameters"]], "register_backward_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_backward_hook"]], "register_buffer() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_buffer"]], "register_forward_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_forward_hook"]], "register_forward_pre_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_forward_pre_hook"]], "register_full_backward_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_load_state_dict_post_hook"]], "register_module() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_module"]], "register_parameter() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_parameter"]], "register_state_dict_pre_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_state_dict_pre_hook"]], "requires_grad_() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.requires_grad_"]], "save() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.save"]], "set_extra_state() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.set_extra_state"]], "share_memory() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.share_memory"]], "state_dict() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.state_dict"]], "to() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.to"]], "to_empty() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.to_empty"]], "train() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.train"]], "type() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.type"]], "xpu() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.xpu"]], "zero_grad() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.zero_grad"]], "annotate() (in module torch.jit)": [[1274, "torch.jit.annotate"]], "enable_onednn_fusion() (in module torch.jit)": [[1275, "torch.jit.enable_onednn_fusion"]], "fork() (in module torch.jit)": [[1276, "torch.jit.fork"]], "freeze() (in module torch.jit)": [[1277, "torch.jit.freeze"]], "ignore() (in module torch.jit)": [[1278, "torch.jit.ignore"]], "interface() (in module torch.jit)": [[1279, "torch.jit.interface"]], "isinstance() (in module torch.jit)": [[1280, "torch.jit.isinstance"]], "load() (in module torch.jit)": [[1281, "torch.jit.load"]], "onednn_fusion_enabled() (in module torch.jit)": [[1282, "torch.jit.onednn_fusion_enabled"]], "optimize_for_inference() (in module torch.jit)": [[1283, "torch.jit.optimize_for_inference"]], "save() (in module torch.jit)": [[1284, "torch.jit.save"]], "script() (in module torch.jit)": [[1285, "torch.jit.script"]], "script_if_tracing() (in module torch.jit)": [[1286, "torch.jit.script_if_tracing"]], "set_fusion_strategy() (in module torch.jit)": [[1287, "torch.jit.set_fusion_strategy"]], "strict_fusion (class in torch.jit)": [[1288, "torch.jit.strict_fusion"]], "trace() (in module torch.jit)": [[1289, "torch.jit.trace"]], "trace_module() (in module torch.jit)": [[1290, "torch.jit.trace_module"]], "unused() (in module torch.jit)": [[1291, "torch.jit.unused"]], "wait() (in module torch.jit)": [[1292, "torch.jit.wait"]], "kaiser_window() (in module torch)": [[1293, "torch.kaiser_window"]], "kron() (in module torch)": [[1294, "torch.kron"]], "kthvalue() (in module torch)": [[1295, "torch.kthvalue"]], "lcm() (in module torch)": [[1296, "torch.lcm"]], "ldexp() (in module torch)": [[1297, "torch.ldexp"]], "le() (in module torch)": [[1298, "torch.le"]], "lerp() (in module torch)": [[1299, "torch.lerp"]], "less() (in module torch)": [[1300, "torch.less"]], "less_equal() (in module torch)": [[1301, "torch.less_equal"]], "lgamma() (in module torch)": [[1302, "torch.lgamma"]], "cholesky() (in module torch.linalg)": [[1303, "torch.linalg.cholesky"]], "cholesky_ex() (in module torch.linalg)": [[1304, "torch.linalg.cholesky_ex"]], "cond() (in module torch.linalg)": [[1305, "torch.linalg.cond"]], "cross() (in module torch.linalg)": [[1306, "torch.linalg.cross"]], "det() (in module torch.linalg)": [[1307, "torch.linalg.det"]], "diagonal() (in module torch.linalg)": [[1308, "torch.linalg.diagonal"]], "eig() (in module torch.linalg)": [[1309, "torch.linalg.eig"]], "eigh() (in module torch.linalg)": [[1310, "torch.linalg.eigh"]], "eigvals() (in module torch.linalg)": [[1311, "torch.linalg.eigvals"]], "eigvalsh() (in module torch.linalg)": [[1312, "torch.linalg.eigvalsh"]], "householder_product() (in module torch.linalg)": [[1313, "torch.linalg.householder_product"]], "inv() (in module torch.linalg)": [[1314, "torch.linalg.inv"]], "inv_ex() (in module torch.linalg)": [[1315, "torch.linalg.inv_ex"]], "ldl_factor() (in module torch.linalg)": [[1316, "torch.linalg.ldl_factor"]], "ldl_factor_ex() (in module torch.linalg)": [[1317, "torch.linalg.ldl_factor_ex"]], "ldl_solve() (in module torch.linalg)": [[1318, "torch.linalg.ldl_solve"]], "lstsq() (in module torch.linalg)": [[1319, "torch.linalg.lstsq"]], "lu() (in module torch.linalg)": [[1320, "torch.linalg.lu"]], "lu_factor() (in module torch.linalg)": [[1321, "torch.linalg.lu_factor"]], "lu_factor_ex() (in module torch.linalg)": [[1322, "torch.linalg.lu_factor_ex"]], "lu_solve() (in module torch.linalg)": [[1323, "torch.linalg.lu_solve"]], "matmul() (in module torch.linalg)": [[1324, "torch.linalg.matmul"]], "matrix_exp() (in module torch.linalg)": [[1325, "torch.linalg.matrix_exp"]], "matrix_norm() (in module torch.linalg)": [[1326, "torch.linalg.matrix_norm"]], "matrix_power() (in module torch.linalg)": [[1327, "torch.linalg.matrix_power"]], "matrix_rank() (in module torch.linalg)": [[1328, "torch.linalg.matrix_rank"]], "multi_dot() (in module torch.linalg)": [[1329, "torch.linalg.multi_dot"]], "norm() (in module torch.linalg)": [[1330, "torch.linalg.norm"]], "pinv() (in module torch.linalg)": [[1331, "torch.linalg.pinv"]], "qr() (in module torch.linalg)": [[1332, "torch.linalg.qr"]], "slogdet() (in module torch.linalg)": [[1333, "torch.linalg.slogdet"]], "solve() (in module torch.linalg)": [[1334, "torch.linalg.solve"]], "solve_ex() (in module torch.linalg)": [[1335, "torch.linalg.solve_ex"]], "solve_triangular() (in module torch.linalg)": [[1336, "torch.linalg.solve_triangular"]], "svd() (in module torch.linalg)": [[1337, "torch.linalg.svd"]], "svdvals() (in module torch.linalg)": [[1338, "torch.linalg.svdvals"]], "tensorinv() (in module torch.linalg)": [[1339, "torch.linalg.tensorinv"]], "tensorsolve() (in module torch.linalg)": [[1340, "torch.linalg.tensorsolve"]], "vander() (in module torch.linalg)": [[1341, "torch.linalg.vander"]], "vecdot() (in module torch.linalg)": [[1342, "torch.linalg.vecdot"]], "vector_norm() (in module torch.linalg)": [[1343, "torch.linalg.vector_norm"]], "linspace() (in module torch)": [[1344, "torch.linspace"]], "load() (in module torch)": [[1345, "torch.load"]], "lobpcg() (in module torch)": [[1346, "torch.lobpcg"]], "log() (in module torch)": [[1347, "torch.log"]], "log10() (in module torch)": [[1348, "torch.log10"]], "log1p() (in module torch)": [[1349, "torch.log1p"]], "log2() (in module torch)": [[1350, "torch.log2"]], "logaddexp() (in module torch)": [[1351, "torch.logaddexp"]], "logaddexp2() (in module torch)": [[1352, "torch.logaddexp2"]], "logcumsumexp() (in module torch)": [[1353, "torch.logcumsumexp"]], "logdet() (in module torch)": [[1354, "torch.logdet"]], "logical_and() (in module torch)": [[1355, "torch.logical_and"]], "logical_not() (in module torch)": [[1356, "torch.logical_not"]], "logical_or() (in module torch)": [[1357, "torch.logical_or"]], "logical_xor() (in module torch)": [[1358, "torch.logical_xor"]], "logit() (in module torch)": [[1359, "torch.logit"]], "logspace() (in module torch)": [[1360, "torch.logspace"]], "logsumexp() (in module torch)": [[1361, "torch.logsumexp"]], "lt() (in module torch)": [[1362, "torch.lt"]], "lu() (in module torch)": [[1363, "torch.lu"]], "lu_solve() (in module torch)": [[1364, "torch.lu_solve"]], "lu_unpack() (in module torch)": [[1365, "torch.lu_unpack"]], "manual_seed() (in module torch)": [[1366, "torch.manual_seed"]], "masked_select() (in module torch)": [[1367, "torch.masked_select"]], "matmul() (in module torch)": [[1368, "torch.matmul"]], "matrix_exp() (in module torch)": [[1369, "torch.matrix_exp"]], "matrix_power() (in module torch)": [[1370, "torch.matrix_power"]], "max() (in module torch)": [[1371, "torch.max"]], "maximum() (in module torch)": [[1372, "torch.maximum"]], "mean() (in module torch)": [[1373, "torch.mean"]], "median() (in module torch)": [[1374, "torch.median"]], "meshgrid() (in module torch)": [[1375, "torch.meshgrid"]], "min() (in module torch)": [[1376, "torch.min"]], "minimum() (in module torch)": [[1377, "torch.minimum"]], "mm() (in module torch)": [[1378, "torch.mm"]], "mode() (in module torch)": [[1379, "torch.mode"]], "moveaxis() (in module torch)": [[1380, "torch.moveaxis"]], "movedim() (in module torch)": [[1381, "torch.movedim"]], "current_allocated_memory() (in module torch.mps)": [[1382, "torch.mps.current_allocated_memory"]], "device_count() (in module torch.mps)": [[1383, "torch.mps.device_count"]], "driver_allocated_memory() (in module torch.mps)": [[1384, "torch.mps.driver_allocated_memory"]], "empty_cache() (in module torch.mps)": [[1385, "torch.mps.empty_cache"]], "event (class in torch.mps.event)": [[1386, "torch.mps.event.Event"]], "elapsed_time() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.elapsed_time"]], "query() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.query"]], "record() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.record"]], "synchronize() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.synchronize"]], "wait() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.wait"]], "get_rng_state() (in module torch.mps)": [[1387, "torch.mps.get_rng_state"]], "manual_seed() (in module torch.mps)": [[1388, "torch.mps.manual_seed"]], "profile() (in module torch.mps.profiler)": [[1389, "torch.mps.profiler.profile"]], "start() (in module torch.mps.profiler)": [[1390, "torch.mps.profiler.start"]], "stop() (in module torch.mps.profiler)": [[1391, "torch.mps.profiler.stop"]], "seed() (in module torch.mps)": [[1392, "torch.mps.seed"]], "set_per_process_memory_fraction() (in module torch.mps)": [[1393, "torch.mps.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.mps)": [[1394, "torch.mps.set_rng_state"]], "synchronize() (in module torch.mps)": [[1395, "torch.mps.synchronize"]], "msort() (in module torch)": [[1396, "torch.msort"]], "deferredmtiacallerror": [[1397, "torch.mtia.DeferredMtiaCallError"]], "event (class in torch.mtia)": [[1398, "torch.mtia.Event"]], "stream (class in torch.mtia)": [[1399, "torch.mtia.Stream"]], "streamcontext (class in torch.mtia)": [[1400, "torch.mtia.StreamContext"]], "current_device() (in module torch.mtia)": [[1401, "torch.mtia.current_device"]], "current_stream() (in module torch.mtia)": [[1402, "torch.mtia.current_stream"]], "default_stream() (in module torch.mtia)": [[1403, "torch.mtia.default_stream"]], "device (class in torch.mtia)": [[1404, "torch.mtia.device"]], "device_count() (in module torch.mtia)": [[1405, "torch.mtia.device_count"]], "init() (in module torch.mtia)": [[1406, "torch.mtia.init"]], "is_available() (in module torch.mtia)": [[1407, "torch.mtia.is_available"]], "is_initialized() (in module torch.mtia)": [[1408, "torch.mtia.is_initialized"]], "set_stream() (in module torch.mtia)": [[1409, "torch.mtia.set_stream"]], "stream() (in module torch.mtia)": [[1410, "torch.mtia.stream"]], "synchronize() (in module torch.mtia)": [[1411, "torch.mtia.synchronize"]], "mul() (in module torch)": [[1412, "torch.mul"]], "multinomial() (in module torch)": [[1413, "torch.multinomial"]], "multiply() (in module torch)": [[1414, "torch.multiply"]], "mv() (in module torch)": [[1415, "torch.mv"]], "mvlgamma() (in module torch)": [[1416, "torch.mvlgamma"]], "nan_to_num() (in module torch)": [[1417, "torch.nan_to_num"]], "nanmean() (in module torch)": [[1418, "torch.nanmean"]], "nanmedian() (in module torch)": [[1419, "torch.nanmedian"]], "nanquantile() (in module torch)": [[1420, "torch.nanquantile"]], "nansum() (in module torch)": [[1421, "torch.nansum"]], "narrow() (in module torch)": [[1422, "torch.narrow"]], "narrow_copy() (in module torch)": [[1423, "torch.narrow_copy"]], "ne() (in module torch)": [[1424, "torch.ne"]], "neg() (in module torch)": [[1425, "torch.neg"]], "negative() (in module torch)": [[1426, "torch.negative"]], "nextafter() (in module torch)": [[1427, "torch.nextafter"]], "adaptiveavgpool1d (class in torch.nn)": [[1428, "torch.nn.AdaptiveAvgPool1d"]], "adaptiveavgpool2d (class in torch.nn)": [[1429, "torch.nn.AdaptiveAvgPool2d"]], "adaptiveavgpool3d (class in torch.nn)": [[1430, "torch.nn.AdaptiveAvgPool3d"]], "adaptivelogsoftmaxwithloss (class in torch.nn)": [[1431, "torch.nn.AdaptiveLogSoftmaxWithLoss"]], "log_prob() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1431, "torch.nn.AdaptiveLogSoftmaxWithLoss.log_prob"]], "predict() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1431, "torch.nn.AdaptiveLogSoftmaxWithLoss.predict"]], "adaptivemaxpool1d (class in torch.nn)": [[1432, "torch.nn.AdaptiveMaxPool1d"]], "adaptivemaxpool2d (class in torch.nn)": [[1433, "torch.nn.AdaptiveMaxPool2d"]], "adaptivemaxpool3d (class in torch.nn)": [[1434, "torch.nn.AdaptiveMaxPool3d"]], "alphadropout (class in torch.nn)": [[1435, "torch.nn.AlphaDropout"]], "avgpool1d (class in torch.nn)": [[1436, "torch.nn.AvgPool1d"]], "avgpool2d (class in torch.nn)": [[1437, "torch.nn.AvgPool2d"]], "avgpool3d (class in torch.nn)": [[1438, "torch.nn.AvgPool3d"]], "bceloss (class in torch.nn)": [[1439, "torch.nn.BCELoss"]], "bcewithlogitsloss (class in torch.nn)": [[1440, "torch.nn.BCEWithLogitsLoss"]], "batchnorm1d (class in torch.nn)": [[1441, "torch.nn.BatchNorm1d"]], "batchnorm2d (class in torch.nn)": [[1442, "torch.nn.BatchNorm2d"]], "batchnorm3d (class in torch.nn)": [[1443, "torch.nn.BatchNorm3d"]], "bilinear (class in torch.nn)": [[1444, "torch.nn.Bilinear"]], "celu (class in torch.nn)": [[1445, "torch.nn.CELU"]], "ctcloss (class in torch.nn)": [[1446, "torch.nn.CTCLoss"]], "channelshuffle (class in torch.nn)": [[1447, "torch.nn.ChannelShuffle"]], "circularpad1d (class in torch.nn)": [[1448, "torch.nn.CircularPad1d"]], "circularpad2d (class in torch.nn)": [[1449, "torch.nn.CircularPad2d"]], "circularpad3d (class in torch.nn)": [[1450, "torch.nn.CircularPad3d"]], "constantpad1d (class in torch.nn)": [[1451, "torch.nn.ConstantPad1d"]], "constantpad2d (class in torch.nn)": [[1452, "torch.nn.ConstantPad2d"]], "constantpad3d (class in torch.nn)": [[1453, "torch.nn.ConstantPad3d"]], "conv1d (class in torch.nn)": [[1454, "torch.nn.Conv1d"]], "conv2d (class in torch.nn)": [[1455, "torch.nn.Conv2d"]], "conv3d (class in torch.nn)": [[1456, "torch.nn.Conv3d"]], "convtranspose1d (class in torch.nn)": [[1457, "torch.nn.ConvTranspose1d"]], "convtranspose2d (class in torch.nn)": [[1458, "torch.nn.ConvTranspose2d"]], "convtranspose3d (class in torch.nn)": [[1459, "torch.nn.ConvTranspose3d"]], "cosineembeddingloss (class in torch.nn)": [[1460, "torch.nn.CosineEmbeddingLoss"]], "cosinesimilarity (class in torch.nn)": [[1461, "torch.nn.CosineSimilarity"]], "crossentropyloss (class in torch.nn)": [[1462, "torch.nn.CrossEntropyLoss"]], "dataparallel (class in torch.nn)": [[1463, "torch.nn.DataParallel"]], "dropout (class in torch.nn)": [[1464, "torch.nn.Dropout"]], "dropout1d (class in torch.nn)": [[1465, "torch.nn.Dropout1d"]], "dropout2d (class in torch.nn)": [[1466, "torch.nn.Dropout2d"]], "dropout3d (class in torch.nn)": [[1467, "torch.nn.Dropout3d"]], "elu (class in torch.nn)": [[1468, "torch.nn.ELU"]], "embedding (class in torch.nn)": [[1469, "torch.nn.Embedding"]], "from_pretrained() (torch.nn.embedding class method)": [[1469, "torch.nn.Embedding.from_pretrained"]], "embeddingbag (class in torch.nn)": [[1470, "torch.nn.EmbeddingBag"]], "forward() (torch.nn.embeddingbag method)": [[1470, "torch.nn.EmbeddingBag.forward"]], "from_pretrained() (torch.nn.embeddingbag class method)": [[1470, "torch.nn.EmbeddingBag.from_pretrained"]], "featurealphadropout (class in torch.nn)": [[1471, "torch.nn.FeatureAlphaDropout"]], "flatten (class in torch.nn)": [[1472, "torch.nn.Flatten"]], "fold (class in torch.nn)": [[1473, "torch.nn.Fold"]], "fractionalmaxpool2d (class in torch.nn)": [[1474, "torch.nn.FractionalMaxPool2d"]], "fractionalmaxpool3d (class in torch.nn)": [[1475, "torch.nn.FractionalMaxPool3d"]], "gelu (class in torch.nn)": [[1476, "torch.nn.GELU"]], "glu (class in torch.nn)": [[1477, "torch.nn.GLU"]], "gru (class in torch.nn)": [[1478, "torch.nn.GRU"]], "grucell (class in torch.nn)": [[1479, "torch.nn.GRUCell"]], "gaussiannllloss (class in torch.nn)": [[1480, "torch.nn.GaussianNLLLoss"]], "groupnorm (class in torch.nn)": [[1481, "torch.nn.GroupNorm"]], "hardshrink (class in torch.nn)": [[1482, "torch.nn.Hardshrink"]], "hardsigmoid (class in torch.nn)": [[1483, "torch.nn.Hardsigmoid"]], "hardswish (class in torch.nn)": [[1484, "torch.nn.Hardswish"]], "hardtanh (class in torch.nn)": [[1485, "torch.nn.Hardtanh"]], "hingeembeddingloss (class in torch.nn)": [[1486, "torch.nn.HingeEmbeddingLoss"]], "huberloss (class in torch.nn)": [[1487, "torch.nn.HuberLoss"]], "identity (class in torch.nn)": [[1488, "torch.nn.Identity"]], "instancenorm1d (class in torch.nn)": [[1489, "torch.nn.InstanceNorm1d"]], "instancenorm2d (class in torch.nn)": [[1490, "torch.nn.InstanceNorm2d"]], "instancenorm3d (class in torch.nn)": [[1491, "torch.nn.InstanceNorm3d"]], "kldivloss (class in torch.nn)": [[1492, "torch.nn.KLDivLoss"]], "l1loss (class in torch.nn)": [[1493, "torch.nn.L1Loss"]], "lppool1d (class in torch.nn)": [[1494, "torch.nn.LPPool1d"]], "lppool2d (class in torch.nn)": [[1495, "torch.nn.LPPool2d"]], "lppool3d (class in torch.nn)": [[1496, "torch.nn.LPPool3d"]], "lstm (class in torch.nn)": [[1497, "torch.nn.LSTM"]], "lstmcell (class in torch.nn)": [[1498, "torch.nn.LSTMCell"]], "layernorm (class in torch.nn)": [[1499, "torch.nn.LayerNorm"]], "lazybatchnorm1d (class in torch.nn)": [[1500, "torch.nn.LazyBatchNorm1d"]], "cls_to_become (torch.nn.lazybatchnorm1d attribute)": [[1500, "torch.nn.LazyBatchNorm1d.cls_to_become"]], "lazybatchnorm2d (class in torch.nn)": [[1501, "torch.nn.LazyBatchNorm2d"]], "cls_to_become (torch.nn.lazybatchnorm2d attribute)": [[1501, "torch.nn.LazyBatchNorm2d.cls_to_become"]], "lazybatchnorm3d (class in torch.nn)": [[1502, "torch.nn.LazyBatchNorm3d"]], "cls_to_become (torch.nn.lazybatchnorm3d attribute)": [[1502, "torch.nn.LazyBatchNorm3d.cls_to_become"]], "lazyconv1d (class in torch.nn)": [[1503, "torch.nn.LazyConv1d"]], "cls_to_become (torch.nn.lazyconv1d attribute)": [[1503, "torch.nn.LazyConv1d.cls_to_become"]], "lazyconv2d (class in torch.nn)": [[1504, "torch.nn.LazyConv2d"]], "cls_to_become (torch.nn.lazyconv2d attribute)": [[1504, "torch.nn.LazyConv2d.cls_to_become"]], "lazyconv3d (class in torch.nn)": [[1505, "torch.nn.LazyConv3d"]], "cls_to_become (torch.nn.lazyconv3d attribute)": [[1505, "torch.nn.LazyConv3d.cls_to_become"]], "lazyconvtranspose1d (class in torch.nn)": [[1506, "torch.nn.LazyConvTranspose1d"]], "cls_to_become (torch.nn.lazyconvtranspose1d attribute)": [[1506, "torch.nn.LazyConvTranspose1d.cls_to_become"]], "lazyconvtranspose2d (class in torch.nn)": [[1507, "torch.nn.LazyConvTranspose2d"]], "cls_to_become (torch.nn.lazyconvtranspose2d attribute)": [[1507, "torch.nn.LazyConvTranspose2d.cls_to_become"]], "lazyconvtranspose3d (class in torch.nn)": [[1508, "torch.nn.LazyConvTranspose3d"]], "cls_to_become (torch.nn.lazyconvtranspose3d attribute)": [[1508, "torch.nn.LazyConvTranspose3d.cls_to_become"]], "lazyinstancenorm1d (class in torch.nn)": [[1509, "torch.nn.LazyInstanceNorm1d"]], "cls_to_become (torch.nn.lazyinstancenorm1d attribute)": [[1509, "torch.nn.LazyInstanceNorm1d.cls_to_become"]], "lazyinstancenorm2d (class in torch.nn)": [[1510, "torch.nn.LazyInstanceNorm2d"]], "cls_to_become (torch.nn.lazyinstancenorm2d attribute)": [[1510, "torch.nn.LazyInstanceNorm2d.cls_to_become"]], "lazyinstancenorm3d (class in torch.nn)": [[1511, "torch.nn.LazyInstanceNorm3d"]], "cls_to_become (torch.nn.lazyinstancenorm3d attribute)": [[1511, "torch.nn.LazyInstanceNorm3d.cls_to_become"]], "lazylinear (class in torch.nn)": [[1512, "torch.nn.LazyLinear"]], "cls_to_become (torch.nn.lazylinear attribute)": [[1512, "torch.nn.LazyLinear.cls_to_become"]], "leakyrelu (class in torch.nn)": [[1513, "torch.nn.LeakyReLU"]], "linear (class in torch.nn)": [[1514, "torch.nn.Linear"]], "localresponsenorm (class in torch.nn)": [[1515, "torch.nn.LocalResponseNorm"]], "logsigmoid (class in torch.nn)": [[1516, "torch.nn.LogSigmoid"]], "logsoftmax (class in torch.nn)": [[1517, "torch.nn.LogSoftmax"]], "mseloss (class in torch.nn)": [[1518, "torch.nn.MSELoss"]], "marginrankingloss (class in torch.nn)": [[1519, "torch.nn.MarginRankingLoss"]], "maxpool1d (class in torch.nn)": [[1520, "torch.nn.MaxPool1d"]], "maxpool2d (class in torch.nn)": [[1521, "torch.nn.MaxPool2d"]], "maxpool3d (class in torch.nn)": [[1522, "torch.nn.MaxPool3d"]], "maxunpool1d (class in torch.nn)": [[1523, "torch.nn.MaxUnpool1d"]], "maxunpool2d (class in torch.nn)": [[1524, "torch.nn.MaxUnpool2d"]], "maxunpool3d (class in torch.nn)": [[1525, "torch.nn.MaxUnpool3d"]], "mish (class in torch.nn)": [[1526, "torch.nn.Mish"]], "module (class in torch.nn)": [[1527, "torch.nn.Module"]], "add_module() (torch.nn.module method)": [[1527, "torch.nn.Module.add_module"]], "apply() (torch.nn.module method)": [[1527, "torch.nn.Module.apply"]], "bfloat16() (torch.nn.module method)": [[1527, "torch.nn.Module.bfloat16"]], "buffers() (torch.nn.module method)": [[1527, "torch.nn.Module.buffers"]], "children() (torch.nn.module method)": [[1527, "torch.nn.Module.children"]], "compile() (torch.nn.module method)": [[1527, "torch.nn.Module.compile"]], "cpu() (torch.nn.module method)": [[1527, "torch.nn.Module.cpu"]], "cuda() (torch.nn.module method)": [[1527, "torch.nn.Module.cuda"]], "double() (torch.nn.module method)": [[1527, "torch.nn.Module.double"]], "eval() (torch.nn.module method)": [[1527, "torch.nn.Module.eval"]], "extra_repr() (torch.nn.module method)": [[1527, "torch.nn.Module.extra_repr"]], "float() (torch.nn.module method)": [[1527, "torch.nn.Module.float"]], "forward() (torch.nn.module method)": [[1527, "torch.nn.Module.forward"]], "get_buffer() (torch.nn.module method)": [[1527, "torch.nn.Module.get_buffer"]], "get_extra_state() (torch.nn.module method)": [[1527, "torch.nn.Module.get_extra_state"]], "get_parameter() (torch.nn.module method)": [[1527, "torch.nn.Module.get_parameter"]], "get_submodule() (torch.nn.module method)": [[1527, "torch.nn.Module.get_submodule"]], "half() (torch.nn.module method)": [[1527, "torch.nn.Module.half"]], "ipu() (torch.nn.module method)": [[1527, "torch.nn.Module.ipu"]], "load_state_dict() (torch.nn.module method)": [[1527, "torch.nn.Module.load_state_dict"]], "modules() (torch.nn.module method)": [[1527, "torch.nn.Module.modules"]], "named_buffers() (torch.nn.module method)": [[1527, "torch.nn.Module.named_buffers"]], "named_children() (torch.nn.module method)": [[1527, "torch.nn.Module.named_children"]], "named_modules() (torch.nn.module method)": [[1527, "torch.nn.Module.named_modules"]], "named_parameters() (torch.nn.module method)": [[1527, "torch.nn.Module.named_parameters"]], "parameters() (torch.nn.module method)": [[1527, "torch.nn.Module.parameters"]], "register_backward_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_backward_hook"]], "register_buffer() (torch.nn.module method)": [[1527, "torch.nn.Module.register_buffer"]], "register_forward_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_forward_hook"]], "register_forward_pre_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_forward_pre_hook"]], "register_full_backward_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_load_state_dict_post_hook"]], "register_module() (torch.nn.module method)": [[1527, "torch.nn.Module.register_module"]], "register_parameter() (torch.nn.module method)": [[1527, "torch.nn.Module.register_parameter"]], "register_state_dict_pre_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_state_dict_pre_hook"]], "requires_grad_() (torch.nn.module method)": [[1527, "torch.nn.Module.requires_grad_"]], "set_extra_state() (torch.nn.module method)": [[1527, "torch.nn.Module.set_extra_state"]], "share_memory() (torch.nn.module method)": [[1527, "torch.nn.Module.share_memory"]], "state_dict() (torch.nn.module method)": [[1527, "torch.nn.Module.state_dict"]], "to() (torch.nn.module method)": [[1527, "torch.nn.Module.to"]], "to_empty() (torch.nn.module method)": [[1527, "torch.nn.Module.to_empty"]], "train() (torch.nn.module method)": [[1527, "torch.nn.Module.train"]], "type() (torch.nn.module method)": [[1527, "torch.nn.Module.type"]], "xpu() (torch.nn.module method)": [[1527, "torch.nn.Module.xpu"]], "zero_grad() (torch.nn.module method)": [[1527, "torch.nn.Module.zero_grad"]], "moduledict (class in torch.nn)": [[1528, "torch.nn.ModuleDict"]], "clear() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.clear"]], "items() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.items"]], "keys() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.keys"]], "pop() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.pop"]], "update() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.update"]], "values() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.values"]], "modulelist (class in torch.nn)": [[1529, "torch.nn.ModuleList"]], "append() (torch.nn.modulelist method)": [[1529, "torch.nn.ModuleList.append"]], "extend() (torch.nn.modulelist method)": [[1529, "torch.nn.ModuleList.extend"]], "insert() (torch.nn.modulelist method)": [[1529, "torch.nn.ModuleList.insert"]], "multilabelmarginloss (class in torch.nn)": [[1530, "torch.nn.MultiLabelMarginLoss"]], "multilabelsoftmarginloss (class in torch.nn)": [[1531, "torch.nn.MultiLabelSoftMarginLoss"]], "multimarginloss (class in torch.nn)": [[1532, "torch.nn.MultiMarginLoss"]], "multiheadattention (class in torch.nn)": [[1533, "torch.nn.MultiheadAttention"]], "forward() (torch.nn.multiheadattention method)": [[1533, "torch.nn.MultiheadAttention.forward"]], "merge_masks() (torch.nn.multiheadattention method)": [[1533, "torch.nn.MultiheadAttention.merge_masks"]], "nllloss (class in torch.nn)": [[1534, "torch.nn.NLLLoss"]], "prelu (class in torch.nn)": [[1535, "torch.nn.PReLU"]], "pairwisedistance (class in torch.nn)": [[1536, "torch.nn.PairwiseDistance"]], "parameterdict (class in torch.nn)": [[1537, "torch.nn.ParameterDict"]], "clear() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.clear"]], "copy() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.copy"]], "fromkeys() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.fromkeys"]], "get() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.get"]], "items() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.items"]], "keys() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.keys"]], "pop() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.pop"]], "popitem() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.popitem"]], "setdefault() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.setdefault"]], "update() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.update"]], "values() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.values"]], "parameterlist (class in torch.nn)": [[1538, "torch.nn.ParameterList"]], "append() (torch.nn.parameterlist method)": [[1538, "torch.nn.ParameterList.append"]], "extend() (torch.nn.parameterlist method)": [[1538, "torch.nn.ParameterList.extend"]], "pixelshuffle (class in torch.nn)": [[1539, "torch.nn.PixelShuffle"]], "pixelunshuffle (class in torch.nn)": [[1540, "torch.nn.PixelUnshuffle"]], "poissonnllloss (class in torch.nn)": [[1541, "torch.nn.PoissonNLLLoss"]], "rmsnorm (class in torch.nn)": [[1542, "torch.nn.RMSNorm"]], "extra_repr() (torch.nn.rmsnorm method)": [[1542, "torch.nn.RMSNorm.extra_repr"]], "forward() (torch.nn.rmsnorm method)": [[1542, "torch.nn.RMSNorm.forward"]], "reset_parameters() (torch.nn.rmsnorm method)": [[1542, "torch.nn.RMSNorm.reset_parameters"]], "rnn (class in torch.nn)": [[1543, "torch.nn.RNN"]], "rnnbase (class in torch.nn)": [[1544, "torch.nn.RNNBase"]], "flatten_parameters() (torch.nn.rnnbase method)": [[1544, "torch.nn.RNNBase.flatten_parameters"]], "rnncell (class in torch.nn)": [[1545, "torch.nn.RNNCell"]], "rrelu (class in torch.nn)": [[1546, "torch.nn.RReLU"]], "relu (class in torch.nn)": [[1547, "torch.nn.ReLU"]], "relu6 (class in torch.nn)": [[1548, "torch.nn.ReLU6"]], "reflectionpad1d (class in torch.nn)": [[1549, "torch.nn.ReflectionPad1d"]], "reflectionpad2d (class in torch.nn)": [[1550, "torch.nn.ReflectionPad2d"]], "reflectionpad3d (class in torch.nn)": [[1551, "torch.nn.ReflectionPad3d"]], "replicationpad1d (class in torch.nn)": [[1552, "torch.nn.ReplicationPad1d"]], "replicationpad2d (class in torch.nn)": [[1553, "torch.nn.ReplicationPad2d"]], "replicationpad3d (class in torch.nn)": [[1554, "torch.nn.ReplicationPad3d"]], "selu (class in torch.nn)": [[1555, "torch.nn.SELU"]], "sequential (class in torch.nn)": [[1556, "torch.nn.Sequential"]], "append() (torch.nn.sequential method)": [[1556, "torch.nn.Sequential.append"]], "silu (class in torch.nn)": [[1557, "torch.nn.SiLU"]], "sigmoid (class in torch.nn)": [[1558, "torch.nn.Sigmoid"]], "smoothl1loss (class in torch.nn)": [[1559, "torch.nn.SmoothL1Loss"]], "softmarginloss (class in torch.nn)": [[1560, "torch.nn.SoftMarginLoss"]], "softmax (class in torch.nn)": [[1561, "torch.nn.Softmax"]], "softmax2d (class in torch.nn)": [[1562, "torch.nn.Softmax2d"]], "softmin (class in torch.nn)": [[1563, "torch.nn.Softmin"]], "softplus (class in torch.nn)": [[1564, "torch.nn.Softplus"]], "softshrink (class in torch.nn)": [[1565, "torch.nn.Softshrink"]], "softsign (class in torch.nn)": [[1566, "torch.nn.Softsign"]], "syncbatchnorm (class in torch.nn)": [[1567, "torch.nn.SyncBatchNorm"]], "convert_sync_batchnorm() (torch.nn.syncbatchnorm class method)": [[1567, "torch.nn.SyncBatchNorm.convert_sync_batchnorm"]], "tanh (class in torch.nn)": [[1568, "torch.nn.Tanh"]], "tanhshrink (class in torch.nn)": [[1569, "torch.nn.Tanhshrink"]], "threshold (class in torch.nn)": [[1570, "torch.nn.Threshold"]], "transformer (class in torch.nn)": [[1571, "torch.nn.Transformer"]], "forward() (torch.nn.transformer method)": [[1571, "torch.nn.Transformer.forward"]], "generate_square_subsequent_mask() (torch.nn.transformer static method)": [[1571, "torch.nn.Transformer.generate_square_subsequent_mask"]], "transformerdecoder (class in torch.nn)": [[1572, "torch.nn.TransformerDecoder"]], "forward() (torch.nn.transformerdecoder method)": [[1572, "torch.nn.TransformerDecoder.forward"]], "transformerdecoderlayer (class in torch.nn)": [[1573, "torch.nn.TransformerDecoderLayer"]], "forward() (torch.nn.transformerdecoderlayer method)": [[1573, "torch.nn.TransformerDecoderLayer.forward"]], "transformerencoder (class in torch.nn)": [[1574, "torch.nn.TransformerEncoder"]], "forward() (torch.nn.transformerencoder method)": [[1574, "torch.nn.TransformerEncoder.forward"]], "transformerencoderlayer (class in torch.nn)": [[1575, "torch.nn.TransformerEncoderLayer"]], "forward() (torch.nn.transformerencoderlayer method)": [[1575, "torch.nn.TransformerEncoderLayer.forward"]], "tripletmarginloss (class in torch.nn)": [[1576, "torch.nn.TripletMarginLoss"]], "tripletmarginwithdistanceloss (class in torch.nn)": [[1577, "torch.nn.TripletMarginWithDistanceLoss"]], "unflatten (class in torch.nn)": [[1578, "torch.nn.Unflatten"]], "unfold (class in torch.nn)": [[1579, "torch.nn.Unfold"]], "upsample (class in torch.nn)": [[1580, "torch.nn.Upsample"]], "upsamplingbilinear2d (class in torch.nn)": [[1581, "torch.nn.UpsamplingBilinear2d"]], "upsamplingnearest2d (class in torch.nn)": [[1582, "torch.nn.UpsamplingNearest2d"]], "zeropad1d (class in torch.nn)": [[1583, "torch.nn.ZeroPad1d"]], "zeropad2d (class in torch.nn)": [[1584, "torch.nn.ZeroPad2d"]], "zeropad3d (class in torch.nn)": [[1585, "torch.nn.ZeroPad3d"]], "sdpbackend (class in torch.nn.attention)": [[1586, "torch.nn.attention.SDPBackend"]], "name (torch.nn.attention.sdpbackend property)": [[1586, "torch.nn.attention.SDPBackend.name"]], "causalbias (class in torch.nn.attention.bias)": [[1587, "torch.nn.attention.bias.CausalBias"]], "causalvariant (class in torch.nn.attention.bias)": [[1588, "torch.nn.attention.bias.CausalVariant"]], "causal_lower_right() (in module torch.nn.attention.bias)": [[1589, "torch.nn.attention.bias.causal_lower_right"]], "causal_upper_left() (in module torch.nn.attention.bias)": [[1590, "torch.nn.attention.bias.causal_upper_left"]], "sdpa_kernel() (in module torch.nn.attention)": [[1591, "torch.nn.attention.sdpa_kernel"]], "adaptive_avg_pool1d() (in module torch.nn.functional)": [[1592, "torch.nn.functional.adaptive_avg_pool1d"]], "adaptive_avg_pool2d() (in module torch.nn.functional)": [[1593, "torch.nn.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d() (in module torch.nn.functional)": [[1594, "torch.nn.functional.adaptive_avg_pool3d"]], "adaptive_max_pool1d() (in module torch.nn.functional)": [[1595, "torch.nn.functional.adaptive_max_pool1d"]], "adaptive_max_pool2d() (in module torch.nn.functional)": [[1596, "torch.nn.functional.adaptive_max_pool2d"]], "adaptive_max_pool3d() (in module torch.nn.functional)": [[1597, "torch.nn.functional.adaptive_max_pool3d"]], "affine_grid() (in module torch.nn.functional)": [[1598, "torch.nn.functional.affine_grid"]], "alpha_dropout() (in module torch.nn.functional)": [[1599, "torch.nn.functional.alpha_dropout"]], "avg_pool1d() (in module torch.nn.functional)": [[1600, "torch.nn.functional.avg_pool1d"]], "avg_pool2d() (in module torch.nn.functional)": [[1601, "torch.nn.functional.avg_pool2d"]], "avg_pool3d() (in module torch.nn.functional)": [[1602, "torch.nn.functional.avg_pool3d"]], "batch_norm() (in module torch.nn.functional)": [[1603, "torch.nn.functional.batch_norm"]], "bilinear() (in module torch.nn.functional)": [[1604, "torch.nn.functional.bilinear"]], "binary_cross_entropy() (in module torch.nn.functional)": [[1605, "torch.nn.functional.binary_cross_entropy"]], "binary_cross_entropy_with_logits() (in module torch.nn.functional)": [[1606, "torch.nn.functional.binary_cross_entropy_with_logits"]], "celu() (in module torch.nn.functional)": [[1607, "torch.nn.functional.celu"]], "conv1d() (in module torch.nn.functional)": [[1608, "torch.nn.functional.conv1d"]], "conv2d() (in module torch.nn.functional)": [[1609, "torch.nn.functional.conv2d"]], "conv3d() (in module torch.nn.functional)": [[1610, "torch.nn.functional.conv3d"]], "conv_transpose1d() (in module torch.nn.functional)": [[1611, "torch.nn.functional.conv_transpose1d"]], "conv_transpose2d() (in module torch.nn.functional)": [[1612, "torch.nn.functional.conv_transpose2d"]], "conv_transpose3d() (in module torch.nn.functional)": [[1613, "torch.nn.functional.conv_transpose3d"]], "cosine_embedding_loss() (in module torch.nn.functional)": [[1614, "torch.nn.functional.cosine_embedding_loss"]], "cosine_similarity() (in module torch.nn.functional)": [[1615, "torch.nn.functional.cosine_similarity"]], "cross_entropy() (in module torch.nn.functional)": [[1616, "torch.nn.functional.cross_entropy"]], "ctc_loss() (in module torch.nn.functional)": [[1617, "torch.nn.functional.ctc_loss"]], "dropout() (in module torch.nn.functional)": [[1618, "torch.nn.functional.dropout"]], "dropout1d() (in module torch.nn.functional)": [[1619, "torch.nn.functional.dropout1d"]], "dropout2d() (in module torch.nn.functional)": [[1620, "torch.nn.functional.dropout2d"]], "dropout3d() (in module torch.nn.functional)": [[1621, "torch.nn.functional.dropout3d"]], "elu() (in module torch.nn.functional)": [[1622, "torch.nn.functional.elu"]], "elu_() (in module torch.nn.functional)": [[1623, "torch.nn.functional.elu_"]], "embedding() (in module torch.nn.functional)": [[1624, "torch.nn.functional.embedding"]], "embedding_bag() (in module torch.nn.functional)": [[1625, "torch.nn.functional.embedding_bag"]], "feature_alpha_dropout() (in module torch.nn.functional)": [[1626, "torch.nn.functional.feature_alpha_dropout"]], "fold() (in module torch.nn.functional)": [[1627, "torch.nn.functional.fold"]], "fractional_max_pool2d() (in module torch.nn.functional)": [[1628, "torch.nn.functional.fractional_max_pool2d"]], "fractional_max_pool3d() (in module torch.nn.functional)": [[1629, "torch.nn.functional.fractional_max_pool3d"]], "gaussian_nll_loss() (in module torch.nn.functional)": [[1630, "torch.nn.functional.gaussian_nll_loss"]], "gelu() (in module torch.nn.functional)": [[1631, "torch.nn.functional.gelu"]], "glu() (in module torch.nn.functional)": [[1632, "torch.nn.functional.glu"]], "grid_sample() (in module torch.nn.functional)": [[1633, "torch.nn.functional.grid_sample"]], "group_norm() (in module torch.nn.functional)": [[1634, "torch.nn.functional.group_norm"]], "gumbel_softmax() (in module torch.nn.functional)": [[1635, "torch.nn.functional.gumbel_softmax"]], "hardshrink() (in module torch.nn.functional)": [[1636, "torch.nn.functional.hardshrink"]], "hardsigmoid() (in module torch.nn.functional)": [[1637, "torch.nn.functional.hardsigmoid"]], "hardswish() (in module torch.nn.functional)": [[1638, "torch.nn.functional.hardswish"]], "hardtanh() (in module torch.nn.functional)": [[1639, "torch.nn.functional.hardtanh"]], "hardtanh_() (in module torch.nn.functional)": [[1640, "torch.nn.functional.hardtanh_"]], "hinge_embedding_loss() (in module torch.nn.functional)": [[1641, "torch.nn.functional.hinge_embedding_loss"]], "huber_loss() (in module torch.nn.functional)": [[1642, "torch.nn.functional.huber_loss"]], "instance_norm() (in module torch.nn.functional)": [[1643, "torch.nn.functional.instance_norm"]], "interpolate() (in module torch.nn.functional)": [[1644, "torch.nn.functional.interpolate"]], "kl_div() (in module torch.nn.functional)": [[1645, "torch.nn.functional.kl_div"]], "l1_loss() (in module torch.nn.functional)": [[1646, "torch.nn.functional.l1_loss"]], "layer_norm() (in module torch.nn.functional)": [[1647, "torch.nn.functional.layer_norm"]], "leaky_relu() (in module torch.nn.functional)": [[1648, "torch.nn.functional.leaky_relu"]], "leaky_relu_() (in module torch.nn.functional)": [[1649, "torch.nn.functional.leaky_relu_"]], "linear() (in module torch.nn.functional)": [[1650, "torch.nn.functional.linear"]], "local_response_norm() (in module torch.nn.functional)": [[1651, "torch.nn.functional.local_response_norm"]], "log_softmax() (in module torch.nn.functional)": [[1652, "torch.nn.functional.log_softmax"]], "logsigmoid() (in module torch.nn.functional)": [[1653, "torch.nn.functional.logsigmoid"]], "lp_pool1d() (in module torch.nn.functional)": [[1654, "torch.nn.functional.lp_pool1d"]], "lp_pool2d() (in module torch.nn.functional)": [[1655, "torch.nn.functional.lp_pool2d"]], "lp_pool3d() (in module torch.nn.functional)": [[1656, "torch.nn.functional.lp_pool3d"]], "margin_ranking_loss() (in module torch.nn.functional)": [[1657, "torch.nn.functional.margin_ranking_loss"]], "max_pool1d() (in module torch.nn.functional)": [[1658, "torch.nn.functional.max_pool1d"]], "max_pool2d() (in module torch.nn.functional)": [[1659, "torch.nn.functional.max_pool2d"]], "max_pool3d() (in module torch.nn.functional)": [[1660, "torch.nn.functional.max_pool3d"]], "max_unpool1d() (in module torch.nn.functional)": [[1661, "torch.nn.functional.max_unpool1d"]], "max_unpool2d() (in module torch.nn.functional)": [[1662, "torch.nn.functional.max_unpool2d"]], "max_unpool3d() (in module torch.nn.functional)": [[1663, "torch.nn.functional.max_unpool3d"]], "mish() (in module torch.nn.functional)": [[1664, "torch.nn.functional.mish"]], "mse_loss() (in module torch.nn.functional)": [[1665, "torch.nn.functional.mse_loss"]], "multi_margin_loss() (in module torch.nn.functional)": [[1666, "torch.nn.functional.multi_margin_loss"]], "multilabel_margin_loss() (in module torch.nn.functional)": [[1667, "torch.nn.functional.multilabel_margin_loss"]], "multilabel_soft_margin_loss() (in module torch.nn.functional)": [[1668, "torch.nn.functional.multilabel_soft_margin_loss"]], "nll_loss() (in module torch.nn.functional)": [[1669, "torch.nn.functional.nll_loss"]], "normalize() (in module torch.nn.functional)": [[1670, "torch.nn.functional.normalize"]], "one_hot() (in module torch.nn.functional)": [[1671, "torch.nn.functional.one_hot"]], "pad() (in module torch.nn.functional)": [[1672, "torch.nn.functional.pad"]], "pairwise_distance() (in module torch.nn.functional)": [[1673, "torch.nn.functional.pairwise_distance"]], "pdist() (in module torch.nn.functional)": [[1674, "torch.nn.functional.pdist"]], "pixel_shuffle() (in module torch.nn.functional)": [[1675, "torch.nn.functional.pixel_shuffle"]], "pixel_unshuffle() (in module torch.nn.functional)": [[1676, "torch.nn.functional.pixel_unshuffle"]], "poisson_nll_loss() (in module torch.nn.functional)": [[1677, "torch.nn.functional.poisson_nll_loss"]], "prelu() (in module torch.nn.functional)": [[1678, "torch.nn.functional.prelu"]], "relu() (in module torch.nn.functional)": [[1679, "torch.nn.functional.relu"]], "relu6() (in module torch.nn.functional)": [[1680, "torch.nn.functional.relu6"]], "relu_() (in module torch.nn.functional)": [[1681, "torch.nn.functional.relu_"]], "rms_norm() (in module torch.nn.functional)": [[1682, "torch.nn.functional.rms_norm"]], "rrelu() (in module torch.nn.functional)": [[1683, "torch.nn.functional.rrelu"]], "rrelu_() (in module torch.nn.functional)": [[1684, "torch.nn.functional.rrelu_"]], "scaled_dot_product_attention() (in module torch.nn.functional)": [[1685, "torch.nn.functional.scaled_dot_product_attention"]], "selu() (in module torch.nn.functional)": [[1686, "torch.nn.functional.selu"]], "sigmoid() (in module torch.nn.functional)": [[1687, "torch.nn.functional.sigmoid"]], "silu() (in module torch.nn.functional)": [[1688, "torch.nn.functional.silu"]], "smooth_l1_loss() (in module torch.nn.functional)": [[1689, "torch.nn.functional.smooth_l1_loss"]], "soft_margin_loss() (in module torch.nn.functional)": [[1690, "torch.nn.functional.soft_margin_loss"]], "softmax() (in module torch.nn.functional)": [[1691, "torch.nn.functional.softmax"]], "softmin() (in module torch.nn.functional)": [[1692, "torch.nn.functional.softmin"]], "softplus() (in module torch.nn.functional)": [[1693, "torch.nn.functional.softplus"]], "softshrink() (in module torch.nn.functional)": [[1694, "torch.nn.functional.softshrink"]], "softsign() (in module torch.nn.functional)": [[1695, "torch.nn.functional.softsign"]], "tanh() (in module torch.nn.functional)": [[1696, "torch.nn.functional.tanh"]], "tanhshrink() (in module torch.nn.functional)": [[1697, "torch.nn.functional.tanhshrink"]], "threshold() (in module torch.nn.functional)": [[1698, "torch.nn.functional.threshold"]], "threshold_() (in module torch.nn.functional)": [[1699, "torch.nn.functional.threshold_"]], "data_parallel() (in module torch.nn.parallel)": [[1700, "torch.nn.parallel.data_parallel"]], "triplet_margin_loss() (in module torch.nn.functional)": [[1701, "torch.nn.functional.triplet_margin_loss"]], "triplet_margin_with_distance_loss() (in module torch.nn.functional)": [[1702, "torch.nn.functional.triplet_margin_with_distance_loss"]], "unfold() (in module torch.nn.functional)": [[1703, "torch.nn.functional.unfold"]], "upsample() (in module torch.nn.functional)": [[1704, "torch.nn.functional.upsample"]], "upsample_bilinear() (in module torch.nn.functional)": [[1705, "torch.nn.functional.upsample_bilinear"]], "upsample_nearest() (in module torch.nn.functional)": [[1706, "torch.nn.functional.upsample_nearest"]], "lazymodulemixin (class in torch.nn.modules.lazy)": [[1707, "torch.nn.modules.lazy.LazyModuleMixin"]], "has_uninitialized_params() (torch.nn.modules.lazy.lazymodulemixin method)": [[1707, "torch.nn.modules.lazy.LazyModuleMixin.has_uninitialized_params"]], "initialize_parameters() (torch.nn.modules.lazy.lazymodulemixin method)": [[1707, "torch.nn.modules.lazy.LazyModuleMixin.initialize_parameters"]], "register_module_backward_hook() (in module torch.nn.modules.module)": [[1708, "torch.nn.modules.module.register_module_backward_hook"]], "register_module_buffer_registration_hook() (in module torch.nn.modules.module)": [[1709, "torch.nn.modules.module.register_module_buffer_registration_hook"]], "register_module_forward_hook() (in module torch.nn.modules.module)": [[1710, "torch.nn.modules.module.register_module_forward_hook"]], "register_module_forward_pre_hook() (in module torch.nn.modules.module)": [[1711, "torch.nn.modules.module.register_module_forward_pre_hook"]], "register_module_full_backward_hook() (in module torch.nn.modules.module)": [[1712, "torch.nn.modules.module.register_module_full_backward_hook"]], "register_module_full_backward_pre_hook() (in module torch.nn.modules.module)": [[1713, "torch.nn.modules.module.register_module_full_backward_pre_hook"]], "register_module_module_registration_hook() (in module torch.nn.modules.module)": [[1714, "torch.nn.modules.module.register_module_module_registration_hook"]], "register_module_parameter_registration_hook() (in module torch.nn.modules.module)": [[1715, "torch.nn.modules.module.register_module_parameter_registration_hook"]], "rmsnorm (class in torch.nn.modules.normalization)": [[1716, "torch.nn.modules.normalization.RMSNorm"]], "extra_repr() (torch.nn.modules.normalization.rmsnorm method)": [[1716, "torch.nn.modules.normalization.RMSNorm.extra_repr"]], "forward() (torch.nn.modules.normalization.rmsnorm method)": [[1716, "torch.nn.modules.normalization.RMSNorm.forward"]], "reset_parameters() (torch.nn.modules.normalization.rmsnorm method)": [[1716, "torch.nn.modules.normalization.RMSNorm.reset_parameters"]], "distributeddataparallel (class in torch.nn.parallel)": [[1717, "torch.nn.parallel.DistributedDataParallel"]], "join() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.join"]], "join_hook() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.join_hook"]], "no_sync() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.no_sync"]], "register_comm_hook() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.register_comm_hook"]], "parameter (class in torch.nn.parameter)": [[1718, "torch.nn.parameter.Parameter"]], "uninitializedbuffer (class in torch.nn.parameter)": [[1719, "torch.nn.parameter.UninitializedBuffer"]], "uninitializedparameter (class in torch.nn.parameter)": [[1720, "torch.nn.parameter.UninitializedParameter"]], "cls_to_become (torch.nn.parameter.uninitializedparameter attribute)": [[1720, "torch.nn.parameter.UninitializedParameter.cls_to_become"]], "clip_grad_norm() (in module torch.nn.utils)": [[1721, "torch.nn.utils.clip_grad_norm"]], "clip_grad_norm_() (in module torch.nn.utils)": [[1722, "torch.nn.utils.clip_grad_norm_"]], "clip_grad_value_() (in module torch.nn.utils)": [[1723, "torch.nn.utils.clip_grad_value_"]], "convert_conv2d_weight_memory_format() (in module torch.nn.utils)": [[1724, "torch.nn.utils.convert_conv2d_weight_memory_format"]], "convert_conv3d_weight_memory_format() (in module torch.nn.utils)": [[1725, "torch.nn.utils.convert_conv3d_weight_memory_format"]], "fuse_conv_bn_eval() (in module torch.nn.utils)": [[1726, "torch.nn.utils.fuse_conv_bn_eval"]], "fuse_conv_bn_weights() (in module torch.nn.utils)": [[1727, "torch.nn.utils.fuse_conv_bn_weights"]], "fuse_linear_bn_eval() (in module torch.nn.utils)": [[1728, "torch.nn.utils.fuse_linear_bn_eval"]], "fuse_linear_bn_weights() (in module torch.nn.utils)": [[1729, "torch.nn.utils.fuse_linear_bn_weights"]], "parameters_to_vector() (in module torch.nn.utils)": [[1730, "torch.nn.utils.parameters_to_vector"]], "orthogonal() (in module torch.nn.utils.parametrizations)": [[1731, "torch.nn.utils.parametrizations.orthogonal"]], "spectral_norm() (in module torch.nn.utils.parametrizations)": [[1732, "torch.nn.utils.parametrizations.spectral_norm"]], "weight_norm() (in module torch.nn.utils.parametrizations)": [[1733, "torch.nn.utils.parametrizations.weight_norm"]], "parametrizationlist (class in torch.nn.utils.parametrize)": [[1734, "torch.nn.utils.parametrize.ParametrizationList"]], "right_inverse() (torch.nn.utils.parametrize.parametrizationlist method)": [[1734, "torch.nn.utils.parametrize.ParametrizationList.right_inverse"]], "cached() (in module torch.nn.utils.parametrize)": [[1735, "torch.nn.utils.parametrize.cached"]], "is_parametrized() (in module torch.nn.utils.parametrize)": [[1736, "torch.nn.utils.parametrize.is_parametrized"]], "register_parametrization() (in module torch.nn.utils.parametrize)": [[1737, "torch.nn.utils.parametrize.register_parametrization"]], "remove_parametrizations() (in module torch.nn.utils.parametrize)": [[1738, "torch.nn.utils.parametrize.remove_parametrizations"]], "basepruningmethod (class in torch.nn.utils.prune)": [[1739, "torch.nn.utils.prune.BasePruningMethod"]], "apply() (torch.nn.utils.prune.basepruningmethod class method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.apply"]], "apply_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.apply_mask"]], "compute_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.compute_mask"]], "prune() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.prune"]], "remove() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.remove"]], "customfrommask (class in torch.nn.utils.prune)": [[1740, "torch.nn.utils.prune.CustomFromMask"]], "apply() (torch.nn.utils.prune.customfrommask class method)": [[1740, "torch.nn.utils.prune.CustomFromMask.apply"]], "apply_mask() (torch.nn.utils.prune.customfrommask method)": [[1740, "torch.nn.utils.prune.CustomFromMask.apply_mask"]], "prune() (torch.nn.utils.prune.customfrommask method)": [[1740, "torch.nn.utils.prune.CustomFromMask.prune"]], "remove() (torch.nn.utils.prune.customfrommask method)": [[1740, "torch.nn.utils.prune.CustomFromMask.remove"]], "identity (class in torch.nn.utils.prune)": [[1741, "torch.nn.utils.prune.Identity"]], "apply() (torch.nn.utils.prune.identity class method)": [[1741, "torch.nn.utils.prune.Identity.apply"]], "apply_mask() (torch.nn.utils.prune.identity method)": [[1741, "torch.nn.utils.prune.Identity.apply_mask"]], "prune() (torch.nn.utils.prune.identity method)": [[1741, "torch.nn.utils.prune.Identity.prune"]], "remove() (torch.nn.utils.prune.identity method)": [[1741, "torch.nn.utils.prune.Identity.remove"]], "l1unstructured (class in torch.nn.utils.prune)": [[1742, "torch.nn.utils.prune.L1Unstructured"]], "apply() (torch.nn.utils.prune.l1unstructured class method)": [[1742, "torch.nn.utils.prune.L1Unstructured.apply"]], "apply_mask() (torch.nn.utils.prune.l1unstructured method)": [[1742, "torch.nn.utils.prune.L1Unstructured.apply_mask"]], "prune() (torch.nn.utils.prune.l1unstructured method)": [[1742, "torch.nn.utils.prune.L1Unstructured.prune"]], "remove() (torch.nn.utils.prune.l1unstructured method)": [[1742, "torch.nn.utils.prune.L1Unstructured.remove"]], "lnstructured (class in torch.nn.utils.prune)": [[1743, "torch.nn.utils.prune.LnStructured"]], "apply() (torch.nn.utils.prune.lnstructured class method)": [[1743, "torch.nn.utils.prune.LnStructured.apply"]], "apply_mask() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.compute_mask"]], "prune() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.prune"]], "remove() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.remove"]], "pruningcontainer (class in torch.nn.utils.prune)": [[1744, "torch.nn.utils.prune.PruningContainer"]], "add_pruning_method() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.add_pruning_method"]], "apply() (torch.nn.utils.prune.pruningcontainer class method)": [[1744, "torch.nn.utils.prune.PruningContainer.apply"]], "apply_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.apply_mask"]], "compute_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.compute_mask"]], "prune() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.prune"]], "remove() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.remove"]], "randomstructured (class in torch.nn.utils.prune)": [[1745, "torch.nn.utils.prune.RandomStructured"]], "apply() (torch.nn.utils.prune.randomstructured class method)": [[1745, "torch.nn.utils.prune.RandomStructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.compute_mask"]], "prune() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.prune"]], "remove() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.remove"]], "randomunstructured (class in torch.nn.utils.prune)": [[1746, "torch.nn.utils.prune.RandomUnstructured"]], "apply() (torch.nn.utils.prune.randomunstructured class method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomunstructured method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.apply_mask"]], "prune() (torch.nn.utils.prune.randomunstructured method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.prune"]], "remove() (torch.nn.utils.prune.randomunstructured method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.remove"]], "custom_from_mask() (in module torch.nn.utils.prune)": [[1747, "torch.nn.utils.prune.custom_from_mask"]], "global_unstructured() (in module torch.nn.utils.prune)": [[1748, "torch.nn.utils.prune.global_unstructured"]], "identity() (in module torch.nn.utils.prune)": [[1749, "torch.nn.utils.prune.identity"]], "is_pruned() (in module torch.nn.utils.prune)": [[1750, "torch.nn.utils.prune.is_pruned"]], "l1_unstructured() (in module torch.nn.utils.prune)": [[1751, "torch.nn.utils.prune.l1_unstructured"]], "ln_structured() (in module torch.nn.utils.prune)": [[1752, "torch.nn.utils.prune.ln_structured"]], "random_structured() (in module torch.nn.utils.prune)": [[1753, "torch.nn.utils.prune.random_structured"]], "random_unstructured() (in module torch.nn.utils.prune)": [[1754, "torch.nn.utils.prune.random_unstructured"]], "remove() (in module torch.nn.utils.prune)": [[1755, "torch.nn.utils.prune.remove"]], "remove_spectral_norm() (in module torch.nn.utils)": [[1756, "torch.nn.utils.remove_spectral_norm"]], "remove_weight_norm() (in module torch.nn.utils)": [[1757, "torch.nn.utils.remove_weight_norm"]], "packedsequence (class in torch.nn.utils.rnn)": [[1758, "torch.nn.utils.rnn.PackedSequence"]], "batch_sizes (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.batch_sizes"]], "count() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.count"]], "data (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.data"]], "index() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.index"]], "is_cuda (torch.nn.utils.rnn.packedsequence property)": [[1758, "torch.nn.utils.rnn.PackedSequence.is_cuda"]], "is_pinned() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.is_pinned"]], "sorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.sorted_indices"]], "to() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.to"]], "unsorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.unsorted_indices"]], "pack_padded_sequence() (in module torch.nn.utils.rnn)": [[1759, "torch.nn.utils.rnn.pack_padded_sequence"]], "pack_sequence() (in module torch.nn.utils.rnn)": [[1760, "torch.nn.utils.rnn.pack_sequence"]], "pad_packed_sequence() (in module torch.nn.utils.rnn)": [[1761, "torch.nn.utils.rnn.pad_packed_sequence"]], "pad_sequence() (in module torch.nn.utils.rnn)": [[1762, "torch.nn.utils.rnn.pad_sequence"]], "unpack_sequence() (in module torch.nn.utils.rnn)": [[1763, "torch.nn.utils.rnn.unpack_sequence"]], "unpad_sequence() (in module torch.nn.utils.rnn)": [[1764, "torch.nn.utils.rnn.unpad_sequence"]], "skip_init() (in module torch.nn.utils)": [[1765, "torch.nn.utils.skip_init"]], "spectral_norm() (in module torch.nn.utils)": [[1766, "torch.nn.utils.spectral_norm"]], "functional_call() (in module torch.nn.utils.stateless)": [[1767, "torch.nn.utils.stateless.functional_call"]], "vector_to_parameters() (in module torch.nn.utils)": [[1768, "torch.nn.utils.vector_to_parameters"]], "weight_norm() (in module torch.nn.utils)": [[1769, "torch.nn.utils.weight_norm"]], "no_grad (class in torch)": [[1770, "torch.no_grad"]], "nonzero() (in module torch)": [[1771, "torch.nonzero"]], "norm() (in module torch)": [[1772, "torch.norm"]], "normal() (in module torch)": [[1773, "torch.normal"]], "not_equal() (in module torch)": [[1774, "torch.not_equal"]], "numel() (in module torch)": [[1775, "torch.numel"]], "ones() (in module torch)": [[1776, "torch.ones"]], "ones_like() (in module torch)": [[1777, "torch.ones_like"]], "jitscalartype (class in torch.onnx)": [[1778, "torch.onnx.JitScalarType"]], "dtype() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.dtype"]], "from_dtype() (torch.onnx.jitscalartype class method)": [[1778, "torch.onnx.JitScalarType.from_dtype"]], "from_onnx_type() (torch.onnx.jitscalartype class method)": [[1778, "torch.onnx.JitScalarType.from_onnx_type"]], "from_value() (torch.onnx.jitscalartype class method)": [[1778, "torch.onnx.JitScalarType.from_value"]], "onnx_compatible() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.onnx_compatible"]], "onnx_type() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.onnx_type"]], "scalar_name() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.scalar_name"]], "torch_name() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.torch_name"]], "graphinfo (class in torch.onnx.verification)": [[1779, "torch.onnx.verification.GraphInfo"]], "all_mismatch_leaf_graph_info() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.all_mismatch_leaf_graph_info"]], "clear() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.clear"]], "essential_node_count() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.essential_node_count"]], "essential_node_kinds() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.essential_node_kinds"]], "export_repro() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.export_repro"]], "find_mismatch() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.find_mismatch"]], "find_partition() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.find_partition"]], "has_mismatch() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.has_mismatch"]], "pretty_print_mismatch() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.pretty_print_mismatch"]], "pretty_print_tree() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.pretty_print_tree"]], "verify_export() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.verify_export"]], "verificationoptions (class in torch.onnx.verification)": [[1780, "torch.onnx.verification.VerificationOptions"]], "asgd (class in torch.optim)": [[1781, "torch.optim.ASGD"]], "add_param_group() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.add_param_group"]], "load_state_dict() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_step_pre_hook"]], "state_dict() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.state_dict"]], "step() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.step"]], "zero_grad() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.zero_grad"]], "adadelta (class in torch.optim)": [[1782, "torch.optim.Adadelta"]], "add_param_group() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.add_param_group"]], "load_state_dict() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_step_pre_hook"]], "state_dict() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.state_dict"]], "step() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.step"]], "zero_grad() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.zero_grad"]], "adagrad (class in torch.optim)": [[1783, "torch.optim.Adagrad"]], "add_param_group() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.add_param_group"]], "load_state_dict() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_step_pre_hook"]], "state_dict() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.state_dict"]], "step() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.step"]], "zero_grad() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.zero_grad"]], "adam (class in torch.optim)": [[1784, "torch.optim.Adam"]], "add_param_group() (torch.optim.adam method)": [[1784, "torch.optim.Adam.add_param_group"]], "load_state_dict() (torch.optim.adam method)": [[1784, "torch.optim.Adam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_step_pre_hook"]], "state_dict() (torch.optim.adam method)": [[1784, "torch.optim.Adam.state_dict"]], "step() (torch.optim.adam method)": [[1784, "torch.optim.Adam.step"]], "zero_grad() (torch.optim.adam method)": [[1784, "torch.optim.Adam.zero_grad"]], "adamw (class in torch.optim)": [[1785, "torch.optim.AdamW"]], "add_param_group() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.add_param_group"]], "load_state_dict() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_step_pre_hook"]], "state_dict() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.state_dict"]], "step() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.step"]], "zero_grad() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.zero_grad"]], "adamax (class in torch.optim)": [[1786, "torch.optim.Adamax"]], "add_param_group() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.add_param_group"]], "load_state_dict() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_step_pre_hook"]], "state_dict() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.state_dict"]], "step() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.step"]], "zero_grad() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.zero_grad"]], "lbfgs (class in torch.optim)": [[1787, "torch.optim.LBFGS"]], "add_param_group() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.add_param_group"]], "load_state_dict() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_step_pre_hook"]], "state_dict() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.state_dict"]], "step() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.step"]], "zero_grad() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.zero_grad"]], "nadam (class in torch.optim)": [[1788, "torch.optim.NAdam"]], "add_param_group() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.add_param_group"]], "load_state_dict() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_step_pre_hook"]], "state_dict() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.state_dict"]], "step() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.step"]], "zero_grad() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.zero_grad"]], "add_param_group() (torch.optim.optimizer method)": [[1789, "torch.optim.Optimizer.add_param_group"]], "load_state_dict() (torch.optim.optimizer method)": [[1790, "torch.optim.Optimizer.load_state_dict"]], "state_dict() (torch.optim.optimizer method)": [[1791, "torch.optim.Optimizer.state_dict"]], "step() (torch.optim.optimizer method)": [[1792, "torch.optim.Optimizer.step"]], "zero_grad() (torch.optim.optimizer method)": [[1793, "torch.optim.Optimizer.zero_grad"]], "radam (class in torch.optim)": [[1794, "torch.optim.RAdam"]], "add_param_group() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.add_param_group"]], "load_state_dict() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_step_pre_hook"]], "state_dict() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.state_dict"]], "step() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.step"]], "zero_grad() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.zero_grad"]], "rmsprop (class in torch.optim)": [[1795, "torch.optim.RMSprop"]], "add_param_group() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.add_param_group"]], "load_state_dict() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_step_pre_hook"]], "state_dict() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.state_dict"]], "step() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.step"]], "zero_grad() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.zero_grad"]], "rprop (class in torch.optim)": [[1796, "torch.optim.Rprop"]], "add_param_group() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.add_param_group"]], "load_state_dict() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_step_pre_hook"]], "state_dict() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.state_dict"]], "step() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.step"]], "zero_grad() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.zero_grad"]], "sgd (class in torch.optim)": [[1797, "torch.optim.SGD"]], "add_param_group() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.add_param_group"]], "load_state_dict() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_step_pre_hook"]], "state_dict() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.state_dict"]], "step() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.step"]], "zero_grad() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.zero_grad"]], "sparseadam (class in torch.optim)": [[1798, "torch.optim.SparseAdam"]], "add_param_group() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.add_param_group"]], "load_state_dict() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_step_pre_hook"]], "state_dict() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.state_dict"]], "step() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.step"]], "zero_grad() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.zero_grad"]], "chainedscheduler (class in torch.optim.lr_scheduler)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler"]], "get_last_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.print_lr"]], "state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.state_dict"]], "constantlr (class in torch.optim.lr_scheduler)": [[1800, "torch.optim.lr_scheduler.ConstantLR"]], "get_last_lr() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.state_dict"]], "cosineannealinglr (class in torch.optim.lr_scheduler)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.state_dict"]], "cosineannealingwarmrestarts (class in torch.optim.lr_scheduler)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.state_dict"]], "step() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.step"]], "cycliclr (class in torch.optim.lr_scheduler)": [[1803, "torch.optim.lr_scheduler.CyclicLR"]], "get_last_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1803, "torch.optim.lr_scheduler.CyclicLR.get_last_lr"]], "get_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1803, "torch.optim.lr_scheduler.CyclicLR.get_lr"]], "print_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1803, "torch.optim.lr_scheduler.CyclicLR.print_lr"]], "exponentiallr (class in torch.optim.lr_scheduler)": [[1804, "torch.optim.lr_scheduler.ExponentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.state_dict"]], "lambdalr (class in torch.optim.lr_scheduler)": [[1805, "torch.optim.lr_scheduler.LambdaLR"]], "get_last_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.state_dict"]], "linearlr (class in torch.optim.lr_scheduler)": [[1806, "torch.optim.lr_scheduler.LinearLR"]], "get_last_lr() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.state_dict"]], "multisteplr (class in torch.optim.lr_scheduler)": [[1807, "torch.optim.lr_scheduler.MultiStepLR"]], "get_last_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.state_dict"]], "multiplicativelr (class in torch.optim.lr_scheduler)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR"]], "get_last_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.state_dict"]], "onecyclelr (class in torch.optim.lr_scheduler)": [[1809, "torch.optim.lr_scheduler.OneCycleLR"]], "get_last_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.state_dict"]], "polynomiallr (class in torch.optim.lr_scheduler)": [[1810, "torch.optim.lr_scheduler.PolynomialLR"]], "get_last_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.state_dict"]], "reducelronplateau (class in torch.optim.lr_scheduler)": [[1811, "torch.optim.lr_scheduler.ReduceLROnPlateau"]], "get_last_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1811, "torch.optim.lr_scheduler.ReduceLROnPlateau.get_last_lr"]], "print_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1811, "torch.optim.lr_scheduler.ReduceLROnPlateau.print_lr"]], "sequentiallr (class in torch.optim.lr_scheduler)": [[1812, "torch.optim.lr_scheduler.SequentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.state_dict"]], "steplr (class in torch.optim.lr_scheduler)": [[1813, "torch.optim.lr_scheduler.StepLR"]], "get_last_lr() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.state_dict"]], "orgqr() (in module torch)": [[1814, "torch.orgqr"]], "ormqr() (in module torch)": [[1815, "torch.ormqr"]], "outer() (in module torch)": [[1816, "torch.outer"]], "pca_lowrank() (in module torch)": [[1817, "torch.pca_lowrank"]], "permute() (in module torch)": [[1818, "torch.permute"]], "pinverse() (in module torch)": [[1819, "torch.pinverse"]], "poisson() (in module torch)": [[1820, "torch.poisson"]], "polar() (in module torch)": [[1821, "torch.polar"]], "polygamma() (in module torch)": [[1822, "torch.polygamma"]], "positive() (in module torch)": [[1823, "torch.positive"]], "pow() (in module torch)": [[1824, "torch.pow"]], "prod() (in module torch)": [[1825, "torch.prod"]], "promote_types() (in module torch)": [[1826, "torch.promote_types"]], "qr() (in module torch)": [[1827, "torch.qr"]], "quantile() (in module torch)": [[1828, "torch.quantile"]], "quantize_per_channel() (in module torch)": [[1829, "torch.quantize_per_channel"]], "quantize_per_tensor() (in module torch)": [[1830, "torch.quantize_per_tensor"]], "quantized_batch_norm() (in module torch)": [[1831, "torch.quantized_batch_norm"]], "quantized_max_pool1d() (in module torch)": [[1832, "torch.quantized_max_pool1d"]], "quantized_max_pool2d() (in module torch)": [[1833, "torch.quantized_max_pool2d"]], "sobolengine (class in torch.quasirandom)": [[1834, "torch.quasirandom.SobolEngine"]], "draw() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.draw"]], "draw_base2() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.draw_base2"]], "fast_forward() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.fast_forward"]], "reset() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.reset"]], "rad2deg() (in module torch)": [[1835, "torch.rad2deg"]], "rand() (in module torch)": [[1836, "torch.rand"]], "rand_like() (in module torch)": [[1837, "torch.rand_like"]], "randint() (in module torch)": [[1838, "torch.randint"]], "randint_like() (in module torch)": [[1839, "torch.randint_like"]], "randn() (in module torch)": [[1840, "torch.randn"]], "randn_like() (in module torch)": [[1841, "torch.randn_like"]], "randperm() (in module torch)": [[1842, "torch.randperm"]], "range() (in module torch)": [[1843, "torch.range"]], "ravel() (in module torch)": [[1844, "torch.ravel"]], "real() (in module torch)": [[1845, "torch.real"]], "reciprocal() (in module torch)": [[1846, "torch.reciprocal"]], "remainder() (in module torch)": [[1847, "torch.remainder"]], "renorm() (in module torch)": [[1848, "torch.renorm"]], "repeat_interleave() (in module torch)": [[1849, "torch.repeat_interleave"]], "reshape() (in module torch)": [[1850, "torch.reshape"]], "resolve_conj() (in module torch)": [[1851, "torch.resolve_conj"]], "resolve_neg() (in module torch)": [[1852, "torch.resolve_neg"]], "result_type() (in module torch)": [[1853, "torch.result_type"]], "roll() (in module torch)": [[1854, "torch.roll"]], "rot90() (in module torch)": [[1855, "torch.rot90"]], "round() (in module torch)": [[1856, "torch.round"]], "row_stack() (in module torch)": [[1857, "torch.row_stack"]], "rsqrt() (in module torch)": [[1858, "torch.rsqrt"]], "save() (in module torch)": [[1859, "torch.save"]], "scatter() (in module torch)": [[1860, "torch.scatter"]], "scatter_add() (in module torch)": [[1861, "torch.scatter_add"]], "scatter_reduce() (in module torch)": [[1862, "torch.scatter_reduce"]], "searchsorted() (in module torch)": [[1863, "torch.searchsorted"]], "seed() (in module torch)": [[1864, "torch.seed"]], "select() (in module torch)": [[1865, "torch.select"]], "select_scatter() (in module torch)": [[1866, "torch.select_scatter"]], "set_default_device() (in module torch)": [[1867, "torch.set_default_device"]], "set_default_dtype() (in module torch)": [[1868, "torch.set_default_dtype"]], "set_default_tensor_type() (in module torch)": [[1869, "torch.set_default_tensor_type"]], "set_deterministic_debug_mode() (in module torch)": [[1870, "torch.set_deterministic_debug_mode"]], "set_float32_matmul_precision() (in module torch)": [[1871, "torch.set_float32_matmul_precision"]], "set_flush_denormal() (in module torch)": [[1872, "torch.set_flush_denormal"]], "set_num_interop_threads() (in module torch)": [[1873, "torch.set_num_interop_threads"]], "set_num_threads() (in module torch)": [[1874, "torch.set_num_threads"]], "set_printoptions() (in module torch)": [[1875, "torch.set_printoptions"]], "set_rng_state() (in module torch)": [[1876, "torch.set_rng_state"]], "set_warn_always() (in module torch)": [[1877, "torch.set_warn_always"]], "sgn() (in module torch)": [[1878, "torch.sgn"]], "sigmoid() (in module torch)": [[1879, "torch.sigmoid"]], "sign() (in module torch)": [[1880, "torch.sign"]], "bartlett() (in module torch.signal.windows)": [[1881, "torch.signal.windows.bartlett"]], "blackman() (in module torch.signal.windows)": [[1882, "torch.signal.windows.blackman"]], "cosine() (in module torch.signal.windows)": [[1883, "torch.signal.windows.cosine"]], "exponential() (in module torch.signal.windows)": [[1884, "torch.signal.windows.exponential"]], "gaussian() (in module torch.signal.windows)": [[1885, "torch.signal.windows.gaussian"]], "general_cosine() (in module torch.signal.windows)": [[1886, "torch.signal.windows.general_cosine"]], "general_hamming() (in module torch.signal.windows)": [[1887, "torch.signal.windows.general_hamming"]], "hamming() (in module torch.signal.windows)": [[1888, "torch.signal.windows.hamming"]], "hann() (in module torch.signal.windows)": [[1889, "torch.signal.windows.hann"]], "kaiser() (in module torch.signal.windows)": [[1890, "torch.signal.windows.kaiser"]], "nuttall() (in module torch.signal.windows)": [[1891, "torch.signal.windows.nuttall"]], "signbit() (in module torch)": [[1892, "torch.signbit"]], "sin() (in module torch)": [[1893, "torch.sin"]], "sinc() (in module torch)": [[1894, "torch.sinc"]], "sinh() (in module torch)": [[1895, "torch.sinh"]], "slice_scatter() (in module torch)": [[1896, "torch.slice_scatter"]], "slogdet() (in module torch)": [[1897, "torch.slogdet"]], "smm() (in module torch)": [[1898, "torch.smm"]], "softmax() (in module torch)": [[1899, "torch.softmax"]], "sort() (in module torch)": [[1900, "torch.sort"]], "addmm() (in module torch.sparse)": [[1901, "torch.sparse.addmm"]], "as_sparse_gradcheck() (in module torch.sparse)": [[1902, "torch.sparse.as_sparse_gradcheck"]], "check_sparse_tensor_invariants (class in torch.sparse)": [[1903, "torch.sparse.check_sparse_tensor_invariants"]], "disable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1903, "torch.sparse.check_sparse_tensor_invariants.disable"]], "enable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1903, "torch.sparse.check_sparse_tensor_invariants.enable"]], "is_enabled() (torch.sparse.check_sparse_tensor_invariants static method)": [[1903, "torch.sparse.check_sparse_tensor_invariants.is_enabled"]], "log_softmax() (in module torch.sparse)": [[1904, "torch.sparse.log_softmax"]], "mm() (in module torch.sparse)": [[1905, "torch.sparse.mm"]], "sampled_addmm() (in module torch.sparse)": [[1906, "torch.sparse.sampled_addmm"]], "softmax() (in module torch.sparse)": [[1907, "torch.sparse.softmax"]], "spdiags() (in module torch.sparse)": [[1908, "torch.sparse.spdiags"]], "sum() (in module torch.sparse)": [[1909, "torch.sparse.sum"]], "sparse_bsc_tensor() (in module torch)": [[1910, "torch.sparse_bsc_tensor"]], "sparse_bsr_tensor() (in module torch)": [[1911, "torch.sparse_bsr_tensor"]], "sparse_compressed_tensor() (in module torch)": [[1912, "torch.sparse_compressed_tensor"]], "sparse_coo_tensor() (in module torch)": [[1913, "torch.sparse_coo_tensor"]], "sparse_csc_tensor() (in module torch)": [[1914, "torch.sparse_csc_tensor"]], "sparse_csr_tensor() (in module torch)": [[1915, "torch.sparse_csr_tensor"]], "split() (in module torch)": [[1916, "torch.split"]], "sqrt() (in module torch)": [[1917, "torch.sqrt"]], "square() (in module torch)": [[1918, "torch.square"]], "squeeze() (in module torch)": [[1919, "torch.squeeze"]], "sspaddmm() (in module torch)": [[1920, "torch.sspaddmm"]], "stack() (in module torch)": [[1921, "torch.stack"]], "std() (in module torch)": [[1922, "torch.std"]], "std_mean() (in module torch)": [[1923, "torch.std_mean"]], "stft() (in module torch)": [[1924, "torch.stft"]], "sub() (in module torch)": [[1925, "torch.sub"]], "subtract() (in module torch)": [[1926, "torch.subtract"]], "sum() (in module torch)": [[1927, "torch.sum"]], "svd() (in module torch)": [[1928, "torch.svd"]], "svd_lowrank() (in module torch)": [[1929, "torch.svd_lowrank"]], "swapaxes() (in module torch)": [[1930, "torch.swapaxes"]], "swapdims() (in module torch)": [[1931, "torch.swapdims"]], "sym_float() (in module torch)": [[1932, "torch.sym_float"]], "sym_int() (in module torch)": [[1933, "torch.sym_int"]], "sym_ite() (in module torch)": [[1934, "torch.sym_ite"]], "sym_max() (in module torch)": [[1935, "torch.sym_max"]], "sym_min() (in module torch)": [[1936, "torch.sym_min"]], "sym_not() (in module torch)": [[1937, "torch.sym_not"]], "t() (in module torch)": [[1938, "torch.t"]], "take() (in module torch)": [[1939, "torch.take"]], "take_along_dim() (in module torch)": [[1940, "torch.take_along_dim"]], "tan() (in module torch)": [[1941, "torch.tan"]], "tanh() (in module torch)": [[1942, "torch.tanh"]], "tensor() (in module torch)": [[1943, "torch.tensor"]], "tensor_split() (in module torch)": [[1944, "torch.tensor_split"]], "tensordot() (in module torch)": [[1945, "torch.tensordot"]], "tile() (in module torch)": [[1946, "torch.tile"]], "topk() (in module torch)": [[1947, "torch.topk"]], "trace() (in module torch)": [[1948, "torch.trace"]], "transpose() (in module torch)": [[1949, "torch.transpose"]], "trapezoid() (in module torch)": [[1950, "torch.trapezoid"]], "trapz() (in module torch)": [[1951, "torch.trapz"]], "triangular_solve() (in module torch)": [[1952, "torch.triangular_solve"]], "tril() (in module torch)": [[1953, "torch.tril"]], "tril_indices() (in module torch)": [[1954, "torch.tril_indices"]], "triu() (in module torch)": [[1955, "torch.triu"]], "triu_indices() (in module torch)": [[1956, "torch.triu_indices"]], "true_divide() (in module torch)": [[1957, "torch.true_divide"]], "trunc() (in module torch)": [[1958, "torch.trunc"]], "unbind() (in module torch)": [[1959, "torch.unbind"]], "unflatten() (in module torch)": [[1960, "torch.unflatten"]], "unique() (in module torch)": [[1961, "torch.unique"]], "unique_consecutive() (in module torch)": [[1962, "torch.unique_consecutive"]], "unravel_index() (in module torch)": [[1963, "torch.unravel_index"]], "unsqueeze() (in module torch)": [[1964, "torch.unsqueeze"]], "use_deterministic_algorithms() (in module torch)": [[1965, "torch.use_deterministic_algorithms"]], "generate_methods_for_privateuse1_backend() (in module torch.utils)": [[1966, "torch.utils.generate_methods_for_privateuse1_backend"]], "get_cpp_backtrace() (in module torch.utils)": [[1967, "torch.utils.get_cpp_backtrace"]], "rename_privateuse1_backend() (in module torch.utils)": [[1968, "torch.utils.rename_privateuse1_backend"]], "set_module() (in module torch.utils)": [[1969, "torch.utils.set_module"]], "swap_tensors() (in module torch.utils)": [[1970, "torch.utils.swap_tensors"]], "vander() (in module torch)": [[1971, "torch.vander"]], "var() (in module torch)": [[1972, "torch.var"]], "var_mean() (in module torch)": [[1973, "torch.var_mean"]], "vdot() (in module torch)": [[1974, "torch.vdot"]], "view_as_complex() (in module torch)": [[1975, "torch.view_as_complex"]], "view_as_real() (in module torch)": [[1976, "torch.view_as_real"]], "vmap() (in module torch)": [[1977, "torch.vmap"]], "vsplit() (in module torch)": [[1978, "torch.vsplit"]], "vstack() (in module torch)": [[1979, "torch.vstack"]], "where() (in module torch)": [[1980, "torch.where"]], "xlogy() (in module torch)": [[1981, "torch.xlogy"]], "event (class in torch.xpu)": [[1982, "torch.xpu.Event"]], "elapsed_time() (torch.xpu.event method)": [[1982, "torch.xpu.Event.elapsed_time"]], "query() (torch.xpu.event method)": [[1982, "torch.xpu.Event.query"]], "record() (torch.xpu.event method)": [[1982, "torch.xpu.Event.record"]], "synchronize() (torch.xpu.event method)": [[1982, "torch.xpu.Event.synchronize"]], "wait() (torch.xpu.event method)": [[1982, "torch.xpu.Event.wait"]], "stream (class in torch.xpu)": [[1983, "torch.xpu.Stream"]], "query() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.query"]], "record_event() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.record_event"]], "synchronize() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.synchronize"]], "wait_event() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.wait_event"]], "wait_stream() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.wait_stream"]], "streamcontext (class in torch.xpu)": [[1984, "torch.xpu.StreamContext"]], "current_device() (in module torch.xpu)": [[1985, "torch.xpu.current_device"]], "current_stream() (in module torch.xpu)": [[1986, "torch.xpu.current_stream"]], "device (class in torch.xpu)": [[1987, "torch.xpu.device"]], "device_count() (in module torch.xpu)": [[1988, "torch.xpu.device_count"]], "device_of (class in torch.xpu)": [[1989, "torch.xpu.device_of"]], "empty_cache() (in module torch.xpu)": [[1990, "torch.xpu.empty_cache"]], "get_device_capability() (in module torch.xpu)": [[1991, "torch.xpu.get_device_capability"]], "get_device_name() (in module torch.xpu)": [[1992, "torch.xpu.get_device_name"]], "get_device_properties() (in module torch.xpu)": [[1993, "torch.xpu.get_device_properties"]], "get_rng_state() (in module torch.xpu)": [[1994, "torch.xpu.get_rng_state"]], "get_rng_state_all() (in module torch.xpu)": [[1995, "torch.xpu.get_rng_state_all"]], "init() (in module torch.xpu)": [[1996, "torch.xpu.init"]], "initial_seed() (in module torch.xpu)": [[1997, "torch.xpu.initial_seed"]], "is_available() (in module torch.xpu)": [[1998, "torch.xpu.is_available"]], "is_initialized() (in module torch.xpu)": [[1999, "torch.xpu.is_initialized"]], "manual_seed() (in module torch.xpu)": [[2000, "torch.xpu.manual_seed"]], "manual_seed_all() (in module torch.xpu)": [[2001, "torch.xpu.manual_seed_all"]], "seed() (in module torch.xpu)": [[2002, "torch.xpu.seed"]], "seed_all() (in module torch.xpu)": [[2003, "torch.xpu.seed_all"]], "set_device() (in module torch.xpu)": [[2004, "torch.xpu.set_device"]], "set_rng_state() (in module torch.xpu)": [[2005, "torch.xpu.set_rng_state"]], "set_rng_state_all() (in module torch.xpu)": [[2006, "torch.xpu.set_rng_state_all"]], "set_stream() (in module torch.xpu)": [[2007, "torch.xpu.set_stream"]], "stream() (in module torch.xpu)": [[2008, "torch.xpu.stream"]], "synchronize() (in module torch.xpu)": [[2009, "torch.xpu.synchronize"]], "zeros() (in module torch)": [[2010, "torch.zeros"]], "zeros_like() (in module torch)": [[2011, "torch.zeros_like"]], "download_url_to_file() (in module torch.hub)": [[2012, "torch.hub.download_url_to_file"]], "get_dir() (in module torch.hub)": [[2012, "torch.hub.get_dir"]], "help() (in module torch.hub)": [[2012, "torch.hub.help"]], "list() (in module torch.hub)": [[2012, "torch.hub.list"]], "load() (in module torch.hub)": [[2012, "torch.hub.load"]], "load_state_dict_from_url() (in module torch.hub)": [[2012, "torch.hub.load_state_dict_from_url"]], "set_dir() (in module torch.hub)": [[2012, "torch.hub.set_dir"]], "torch.hub": [[2012, "module-torch.hub"]], "pytorch_jit": [[2014, "envvar-PYTORCH_JIT"]], "environment variable": [[2014, "envvar-PYTORCH_JIT"]], "export() (in module torch.jit)": [[2014, "torch.jit.export"]], "torch.jit": [[2014, "module-torch.jit"]], "torch.jit.annotations": [[2014, "module-torch.jit.annotations"]], "torch.jit.frontend": [[2014, "module-torch.jit.frontend"]], "torch.jit.generate_bytecode": [[2014, "module-torch.jit.generate_bytecode"]], "torch.jit.mobile": [[2014, "module-torch.jit.mobile"]], "torch.jit.quantized": [[2014, "module-torch.jit.quantized"]], "torch.jit.supported_ops": [[2015, "module-torch.jit.supported_ops"]], "is_scripting() (in module torch.jit)": [[2016, "torch.jit.is_scripting"]], "is_tracing() (in module torch.jit)": [[2016, "torch.jit.is_tracing"]], "torch.jit.unsupported_tensor_ops": [[2019, "module-torch.jit.unsupported_tensor_ops"]], "torch.utils.jit": [[2020, "module-torch.utils.jit"]], "library (class in torch.library)": [[2021, "torch.library.Library"]], "custom_op() (in module torch.library)": [[2021, "torch.library.custom_op"]], "define() (in module torch.library)": [[2021, "torch.library.define"]], "define() (torch.library.library method)": [[2021, "torch.library.Library.define"]], "fallthrough_kernel() (in module torch.library)": [[2021, "torch.library.fallthrough_kernel"]], "get_ctx() (in module torch.library)": [[2021, "torch.library.get_ctx"]], "impl() (in module torch.library)": [[2021, "torch.library.impl"]], "impl() (torch.library.library method)": [[2021, "torch.library.Library.impl"]], "impl_abstract() (in module torch.library)": [[2021, "torch.library.impl_abstract"]], "opcheck() (in module torch.library)": [[2021, "torch.library.opcheck"]], "register_autograd() (in module torch.library)": [[2021, "torch.library.register_autograd"]], "register_fake() (in module torch.library)": [[2021, "torch.library.register_fake"]], "register_kernel() (in module torch.library)": [[2021, "torch.library.register_kernel"]], "torch.library": [[2021, "module-torch.library"]], "torch.linalg": [[2022, "module-torch.linalg"]], "torch._logging": [[2023, "module-torch._logging"]], "torch.masked": [[2024, "module-torch.masked"]], "torch.masked.maskedtensor": [[2024, "module-torch.masked.maskedtensor"]], "torch.masked.maskedtensor.binary": [[2024, "module-torch.masked.maskedtensor.binary"]], "torch.masked.maskedtensor.core": [[2024, "module-torch.masked.maskedtensor.core"]], "torch.masked.maskedtensor.creation": [[2024, "module-torch.masked.maskedtensor.creation"]], "torch.masked.maskedtensor.passthrough": [[2024, "module-torch.masked.maskedtensor.passthrough"]], "torch.masked.maskedtensor.reductions": [[2024, "module-torch.masked.maskedtensor.reductions"]], "torch.masked.maskedtensor.unary": [[2024, "module-torch.masked.maskedtensor.unary"]], "optimize_for_mobile() (in module torch.utils.mobile_optimizer)": [[2027, "torch.utils.mobile_optimizer.optimize_for_mobile"]], "load_url() (in module torch.utils.model_zoo)": [[2028, "torch.utils.model_zoo.load_url"]], "torch.utils.model_zoo": [[2028, "module-torch.utils.model_zoo"]], "moduletracker (class in torch.utils.module_tracker)": [[2029, "torch.utils.module_tracker.ModuleTracker"]], "torch.utils.module_tracker": [[2029, "module-torch.utils.module_tracker"]], "aggregation (class in torch.monitor)": [[2030, "torch.monitor.Aggregation"]], "event (class in torch.monitor)": [[2030, "torch.monitor.Event"]], "eventhandlerhandle (class in torch.monitor)": [[2030, "torch.monitor.EventHandlerHandle"]], "stat (class in torch.monitor)": [[2030, "torch.monitor.Stat"]], "tensorboardeventhandler (class in torch.monitor)": [[2030, "torch.monitor.TensorboardEventHandler"]], "__init__() (torch.monitor.event method)": [[2030, "torch.monitor.Event.__init__"]], "__init__() (torch.monitor.stat method)": [[2030, "torch.monitor.Stat.__init__"]], "__init__() (torch.monitor.tensorboardeventhandler method)": [[2030, "torch.monitor.TensorboardEventHandler.__init__"]], "add() (torch.monitor.stat method)": [[2030, "torch.monitor.Stat.add"]], "count (torch.monitor.stat property)": [[2030, "torch.monitor.Stat.count"]], "data (torch.monitor.event property)": [[2030, "torch.monitor.Event.data"]], "data_value_t (class in torch.monitor)": [[2030, "torch.monitor.data_value_t"]], "get() (torch.monitor.stat method)": [[2030, "torch.monitor.Stat.get"]], "log_event() (in module torch.monitor)": [[2030, "torch.monitor.log_event"]], "name (torch.monitor.aggregation property)": [[2030, "torch.monitor.Aggregation.name"]], "name (torch.monitor.event property)": [[2030, "torch.monitor.Event.name"]], "name (torch.monitor.stat property)": [[2030, "torch.monitor.Stat.name"]], "register_event_handler() (in module torch.monitor)": [[2030, "torch.monitor.register_event_handler"]], "timestamp (torch.monitor.event property)": [[2030, "torch.monitor.Event.timestamp"]], "torch.monitor": [[2030, "module-torch.monitor"]], "unregister_event_handler() (in module torch.monitor)": [[2030, "torch.monitor.unregister_event_handler"]], "torch.mps": [[2031, "module-torch.mps"]], "torch.mps.event": [[2031, "module-torch.mps.event"]], "torch.mps.profiler": [[2031, "module-torch.mps.profiler"]], "torch.mtia": [[2032, "module-torch.mtia"]], "spawncontext (class in torch.multiprocessing)": [[2033, "torch.multiprocessing.SpawnContext"]], "get_all_sharing_strategies() (in module torch.multiprocessing)": [[2033, "torch.multiprocessing.get_all_sharing_strategies"]], "get_sharing_strategy() (in module torch.multiprocessing)": [[2033, "torch.multiprocessing.get_sharing_strategy"]], "join() (torch.multiprocessing.spawncontext method)": [[2033, "torch.multiprocessing.SpawnContext.join"]], "set_sharing_strategy() (in module torch.multiprocessing)": [[2033, "torch.multiprocessing.set_sharing_strategy"]], "spawn() (in module torch.multiprocessing.spawn)": [[2033, "torch.multiprocessing.spawn.spawn"]], "torch.multiprocessing": [[2033, "module-torch.multiprocessing"]], "torch.multiprocessing.pool": [[2033, "module-torch.multiprocessing.pool"]], "torch.multiprocessing.queue": [[2033, "module-torch.multiprocessing.queue"]], "torch.multiprocessing.reductions": [[2033, "module-torch.multiprocessing.reductions"]], "torch.multiprocessing.spawn": [[2033, "module-torch.multiprocessing.spawn"]], "align_as() (torch.tensor method)": [[2035, "torch.Tensor.align_as"]], "align_to() (torch.tensor method)": [[2035, "torch.Tensor.align_to"]], "names (torch.tensor attribute)": [[2035, "torch.Tensor.names"]], "refine_names() (torch.tensor method)": [[2035, "torch.Tensor.refine_names"]], "rename() (torch.tensor method)": [[2035, "torch.Tensor.rename"]], "rename_() (torch.tensor method)": [[2035, "torch.Tensor.rename_"]], "as_nested_tensor() (in module torch.nested)": [[2036, "torch.nested.as_nested_tensor"]], "nested_tensor() (in module torch.nested)": [[2036, "torch.nested.nested_tensor"]], "to_padded_tensor() (in module torch.nested)": [[2036, "torch.nested.to_padded_tensor"]], "torch.nested": [[2036, "module-torch.nested"]], "torch.nn": [[2037, "module-torch.nn"]], "torch.nn.backends": [[2037, "module-torch.nn.backends"]], "torch.nn.backends.thnn": [[2037, "module-torch.nn.backends.thnn"]], "torch.nn.common_types": [[2037, "module-torch.nn.common_types"]], "torch.nn.cpp": [[2037, "module-torch.nn.cpp"]], "torch.nn.functional": [[2037, "module-torch.nn.functional"]], "torch.nn.grad": [[2037, "module-torch.nn.grad"]], "torch.nn.init": [[2037, "module-torch.nn.init"]], "torch.nn.modules": [[2037, "module-torch.nn.modules"]], "torch.nn.modules.activation": [[2037, "module-torch.nn.modules.activation"]], "torch.nn.modules.adaptive": [[2037, "module-torch.nn.modules.adaptive"]], "torch.nn.modules.batchnorm": [[2037, "module-torch.nn.modules.batchnorm"]], "torch.nn.modules.channelshuffle": [[2037, "module-torch.nn.modules.channelshuffle"]], "torch.nn.modules.container": [[2037, "module-torch.nn.modules.container"]], "torch.nn.modules.conv": [[2037, "module-torch.nn.modules.conv"]], "torch.nn.modules.distance": [[2037, "module-torch.nn.modules.distance"]], "torch.nn.modules.dropout": [[2037, "module-torch.nn.modules.dropout"]], "torch.nn.modules.flatten": [[2037, "module-torch.nn.modules.flatten"]], "torch.nn.modules.fold": [[2037, "module-torch.nn.modules.fold"]], "torch.nn.modules.instancenorm": [[2037, "module-torch.nn.modules.instancenorm"]], "torch.nn.modules.lazy": [[2037, "module-torch.nn.modules.lazy"]], "torch.nn.modules.linear": [[2037, "module-torch.nn.modules.linear"]], "torch.nn.modules.loss": [[2037, "module-torch.nn.modules.loss"]], "torch.nn.modules.module": [[2037, "module-torch.nn.modules.module"]], "torch.nn.modules.normalization": [[2037, "module-torch.nn.modules.normalization"]], "torch.nn.modules.padding": [[2037, "module-torch.nn.modules.padding"]], "torch.nn.modules.pixelshuffle": [[2037, "module-torch.nn.modules.pixelshuffle"]], "torch.nn.modules.pooling": [[2037, "module-torch.nn.modules.pooling"]], "torch.nn.modules.rnn": [[2037, "module-torch.nn.modules.rnn"]], "torch.nn.modules.sparse": [[2037, "module-torch.nn.modules.sparse"]], "torch.nn.modules.transformer": [[2037, "module-torch.nn.modules.transformer"]], "torch.nn.modules.upsampling": [[2037, "module-torch.nn.modules.upsampling"]], "torch.nn.modules.utils": [[2037, "module-torch.nn.modules.utils"]], "torch.nn.parallel": [[2037, "module-torch.nn.parallel"]], "torch.nn.parallel.comm": [[2037, "module-torch.nn.parallel.comm"]], "torch.nn.parallel.distributed": [[2037, "module-torch.nn.parallel.distributed"]], "torch.nn.parallel.parallel_apply": [[2037, "module-torch.nn.parallel.parallel_apply"]], "torch.nn.parallel.replicate": [[2037, "module-torch.nn.parallel.replicate"]], "torch.nn.parallel.scatter_gather": [[2037, "module-torch.nn.parallel.scatter_gather"]], "torch.nn.parameter": [[2037, "module-torch.nn.parameter"]], "torch.nn.utils": [[2037, "module-torch.nn.utils"]], "torch.nn.utils.clip_grad": [[2037, "module-torch.nn.utils.clip_grad"]], "torch.nn.utils.convert_parameters": [[2037, "module-torch.nn.utils.convert_parameters"]], "torch.nn.utils.fusion": [[2037, "module-torch.nn.utils.fusion"]], "torch.nn.utils.init": [[2037, "module-torch.nn.utils.init"]], "torch.nn.utils.memory_format": [[2037, "module-torch.nn.utils.memory_format"]], "torch.nn.utils.parametrizations": [[2037, "module-torch.nn.utils.parametrizations"]], "torch.nn.utils.parametrize": [[2037, "module-torch.nn.utils.parametrize"]], "torch.nn.utils.prune": [[2037, "module-torch.nn.utils.prune"]], "torch.nn.utils.rnn": [[2037, "module-torch.nn.utils.rnn"]], "torch.nn.utils.stateless": [[2037, "module-torch.nn.utils.stateless"]], "torch.nn.attention": [[2038, "module-torch.nn.attention"]], "torch.nn.attention.bias": [[2039, "module-torch.nn.attention.bias"]], "calculate_gain() (in module torch.nn.init)": [[2041, "torch.nn.init.calculate_gain"]], "constant_() (in module torch.nn.init)": [[2041, "torch.nn.init.constant_"]], "dirac_() (in module torch.nn.init)": [[2041, "torch.nn.init.dirac_"]], "eye_() (in module torch.nn.init)": [[2041, "torch.nn.init.eye_"]], "kaiming_normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.kaiming_normal_"]], "kaiming_uniform_() (in module torch.nn.init)": [[2041, "torch.nn.init.kaiming_uniform_"]], "normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.normal_"]], "ones_() (in module torch.nn.init)": [[2041, "torch.nn.init.ones_"]], "orthogonal_() (in module torch.nn.init)": [[2041, "torch.nn.init.orthogonal_"]], "sparse_() (in module torch.nn.init)": [[2041, "torch.nn.init.sparse_"]], "trunc_normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.trunc_normal_"]], "uniform_() (in module torch.nn.init)": [[2041, "torch.nn.init.uniform_"]], "xavier_normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.xavier_normal_"]], "xavier_uniform_() (in module torch.nn.init)": [[2041, "torch.nn.init.xavier_uniform_"]], "zeros_() (in module torch.nn.init)": [[2041, "torch.nn.init.zeros_"]], "add_safe_globals() (in module torch.serialization)": [[2062, "torch.serialization.add_safe_globals"]], "clear_safe_globals() (in module torch.serialization)": [[2062, "torch.serialization.clear_safe_globals"]], "get_default_load_endianness() (in module torch.serialization)": [[2062, "torch.serialization.get_default_load_endianness"]], "get_default_mmap_options() (in module torch.serialization)": [[2062, "torch.serialization.get_default_mmap_options"]], "get_safe_globals() (in module torch.serialization)": [[2062, "torch.serialization.get_safe_globals"]], "register_package() (in module torch.serialization)": [[2062, "torch.serialization.register_package"]], "set_default_load_endianness() (in module torch.serialization)": [[2062, "torch.serialization.set_default_load_endianness"]], "set_default_mmap_options() (in module torch.serialization)": [[2062, "torch.serialization.set_default_mmap_options"]], "torch.onnx.errors": [[2064, "module-torch.onnx.errors"]], "torch.onnx.operators": [[2064, "module-torch.onnx.operators"]], "torch.onnx.symbolic_caffe2": [[2064, "module-torch.onnx.symbolic_caffe2"]], "torch.onnx.symbolic_helper": [[2064, "module-torch.onnx.symbolic_helper"]], "torch.onnx.symbolic_opset10": [[2064, "module-torch.onnx.symbolic_opset10"]], "torch.onnx.symbolic_opset11": [[2064, "module-torch.onnx.symbolic_opset11"]], "torch.onnx.symbolic_opset12": [[2064, "module-torch.onnx.symbolic_opset12"]], "torch.onnx.symbolic_opset13": [[2064, "module-torch.onnx.symbolic_opset13"]], "torch.onnx.symbolic_opset14": [[2064, "module-torch.onnx.symbolic_opset14"]], "torch.onnx.symbolic_opset15": [[2064, "module-torch.onnx.symbolic_opset15"]], "torch.onnx.symbolic_opset16": [[2064, "module-torch.onnx.symbolic_opset16"]], "torch.onnx.symbolic_opset17": [[2064, "module-torch.onnx.symbolic_opset17"]], "torch.onnx.symbolic_opset18": [[2064, "module-torch.onnx.symbolic_opset18"]], "torch.onnx.symbolic_opset19": [[2064, "module-torch.onnx.symbolic_opset19"]], "torch.onnx.symbolic_opset20": [[2064, "module-torch.onnx.symbolic_opset20"]], "torch.onnx.symbolic_opset7": [[2064, "module-torch.onnx.symbolic_opset7"]], "torch.onnx.symbolic_opset8": [[2064, "module-torch.onnx.symbolic_opset8"]], "torch.onnx.symbolic_opset9": [[2064, "module-torch.onnx.symbolic_opset9"]], "torch.onnx.utils": [[2064, "module-torch.onnx.utils"]], "torch.onnx.verification": [[2064, "module-torch.onnx.verification"]], "diagnosticoptions (class in torch.onnx)": [[2065, "torch.onnx.DiagnosticOptions"]], "exportoptions (class in torch.onnx)": [[2065, "torch.onnx.ExportOptions"]], "invalidexportoptionserror (class in torch.onnx)": [[2065, "torch.onnx.InvalidExportOptionsError"]], "onnxprogram (class in torch.onnx)": [[2065, "torch.onnx.ONNXProgram"]], "onnxprogramserializer (class in torch.onnx)": [[2065, "torch.onnx.ONNXProgramSerializer"]], "onnxruntimeoptions (class in torch.onnx)": [[2065, "torch.onnx.ONNXRuntimeOptions"]], "onnxexportererror (class in torch.onnx)": [[2065, "torch.onnx.OnnxExporterError"]], "onnxregistry (class in torch.onnx)": [[2065, "torch.onnx.OnnxRegistry"]], "adapt_torch_inputs_to_onnx() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.adapt_torch_inputs_to_onnx"]], "adapt_torch_outputs_to_onnx() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.adapt_torch_outputs_to_onnx"]], "diagnostic_context (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.diagnostic_context"]], "dynamo_export() (in module torch.onnx)": [[2065, "torch.onnx.dynamo_export"]], "enable_fake_mode() (in module torch.onnx)": [[2065, "torch.onnx.enable_fake_mode"]], "fake_context (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.fake_context"]], "get_op_functions() (torch.onnx.onnxregistry method)": [[2065, "torch.onnx.OnnxRegistry.get_op_functions"]], "is_registered_op() (torch.onnx.onnxregistry method)": [[2065, "torch.onnx.OnnxRegistry.is_registered_op"]], "model_proto (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.model_proto"]], "model_signature (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.model_signature"]], "opset_version (torch.onnx.onnxregistry property)": [[2065, "torch.onnx.OnnxRegistry.opset_version"]], "register_op() (torch.onnx.onnxregistry method)": [[2065, "torch.onnx.OnnxRegistry.register_op"]], "save() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.save"]], "save_diagnostics() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.save_diagnostics"]], "serialize() (torch.onnx.onnxprogramserializer method)": [[2065, "torch.onnx.ONNXProgramSerializer.serialize"]], "is_onnxrt_backend_supported() (in module torch.onnx)": [[2066, "torch.onnx.is_onnxrt_backend_supported"]], "disable_log() (in module torch.onnx)": [[2067, "torch.onnx.disable_log"]], "enable_log() (in module torch.onnx)": [[2067, "torch.onnx.enable_log"]], "export() (in module torch.onnx)": [[2067, "torch.onnx.export"]], "export_to_pretty_string() (in module torch.onnx)": [[2067, "torch.onnx.export_to_pretty_string"]], "find_mismatch() (in module torch.onnx.verification)": [[2067, "torch.onnx.verification.find_mismatch"]], "is_in_onnx_export() (in module torch.onnx)": [[2067, "torch.onnx.is_in_onnx_export"]], "register_custom_op_symbolic() (in module torch.onnx)": [[2067, "torch.onnx.register_custom_op_symbolic"]], "select_model_mode_for_export() (in module torch.onnx)": [[2067, "torch.onnx.select_model_mode_for_export"]], "torch.onnx": [[2067, "module-torch.onnx"]], "unregister_custom_op_symbolic() (in module torch.onnx)": [[2067, "torch.onnx.unregister_custom_op_symbolic"]], "optimizer (class in torch.optim)": [[2069, "torch.optim.Optimizer"]], "torch.optim": [[2069, "module-torch.optim"]], "torch.optim.adadelta": [[2069, "module-torch.optim.adadelta"]], "torch.optim.adagrad": [[2069, "module-torch.optim.adagrad"]], "torch.optim.adam": [[2069, "module-torch.optim.adam"]], "torch.optim.adamax": [[2069, "module-torch.optim.adamax"]], "torch.optim.adamw": [[2069, "module-torch.optim.adamw"]], "torch.optim.asgd": [[2069, "module-torch.optim.asgd"]], "torch.optim.lbfgs": [[2069, "module-torch.optim.lbfgs"]], "torch.optim.lr_scheduler": [[2069, "module-torch.optim.lr_scheduler"]], "torch.optim.nadam": [[2069, "module-torch.optim.nadam"]], "torch.optim.optimizer": [[2069, "module-torch.optim.optimizer"]], "torch.optim.radam": [[2069, "module-torch.optim.radam"]], "torch.optim.rmsprop": [[2069, "module-torch.optim.rmsprop"]], "torch.optim.rprop": [[2069, "module-torch.optim.rprop"]], "torch.optim.sgd": [[2069, "module-torch.optim.sgd"]], "torch.optim.sparse_adam": [[2069, "module-torch.optim.sparse_adam"]], "torch.optim.swa_utils": [[2069, "module-torch.optim.swa_utils"]], "directory (class in torch.package)": [[2070, "torch.package.Directory"]], "emptymatcherror (class in torch.package)": [[2070, "torch.package.EmptyMatchError"]], "packageexporter (class in torch.package)": [[2070, "torch.package.PackageExporter"]], "packageimporter (class in torch.package)": [[2070, "torch.package.PackageImporter"]], "packagingerror (class in torch.package)": [[2070, "torch.package.PackagingError"]], "__init__() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.__init__"]], "__init__() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.__init__"]], "add_dependency() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.add_dependency"]], "all_paths() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.all_paths"]], "close() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.close"]], "denied_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.denied_modules"]], "deny() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.deny"]], "dependency_graph_string() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.dependency_graph_string"]], "extern() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.extern"]], "externed_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.externed_modules"]], "file_structure() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.file_structure"]], "get_rdeps() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.get_rdeps"]], "get_unique_id() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.get_unique_id"]], "has_file() (torch.package.directory method)": [[2070, "torch.package.Directory.has_file"]], "id() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.id"]], "import_module() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.import_module"]], "intern() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.intern"]], "interned_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.interned_modules"]], "load_binary() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.load_binary"]], "load_pickle() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.load_pickle"]], "load_text() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.load_text"]], "mock() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.mock"]], "mocked_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.mocked_modules"]], "python_version() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.python_version"]], "register_extern_hook() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.register_extern_hook"]], "register_intern_hook() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.register_intern_hook"]], "register_mock_hook() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.register_mock_hook"]], "save_binary() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_binary"]], "save_module() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_module"]], "save_pickle() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_pickle"]], "save_source_file() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_source_file"]], "save_source_string() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_source_string"]], "save_text() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_text"]], "torch.package": [[2070, "module-torch.package"]], "torch.package.analyze": [[2070, "module-torch.package.analyze"]], "torch.package.analyze.find_first_use_of_broken_modules": [[2070, "module-torch.package.analyze.find_first_use_of_broken_modules"]], "torch.package.analyze.is_from_package": [[2070, "module-torch.package.analyze.is_from_package"]], "torch.package.analyze.trace_dependencies": [[2070, "module-torch.package.analyze.trace_dependencies"]], "torch.package.file_structure_representation": [[2070, "module-torch.package.file_structure_representation"]], "torch.package.find_file_dependencies": [[2070, "module-torch.package.find_file_dependencies"]], "torch.package.glob_group": [[2070, "module-torch.package.glob_group"]], "torch.package.importer": [[2070, "module-torch.package.importer"]], "torch.package.package_exporter": [[2070, "module-torch.package.package_exporter"]], "torch.package.package_importer": [[2070, "module-torch.package.package_importer"]], "profileraction (class in torch.profiler)": [[2071, "torch.profiler.ProfilerAction"]], "profileractivity (class in torch.profiler)": [[2071, "torch.profiler.ProfilerActivity"]], "_kinetoprofile (class in torch.profiler)": [[2071, "torch.profiler._KinetoProfile"]], "add_metadata() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.add_metadata"]], "add_metadata_json() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.add_metadata_json"]], "events() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.events"]], "export_chrome_trace() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.export_chrome_trace"]], "export_memory_timeline() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.export_memory_timeline"]], "export_stacks() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.export_stacks"]], "is_available() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.is_available"]], "key_averages() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.key_averages"]], "mark() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.mark"]], "name (torch.profiler.profileractivity property)": [[2071, "torch.profiler.ProfilerActivity.name"]], "preset_metadata_json() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.preset_metadata_json"]], "profile (class in torch.profiler)": [[2071, "torch.profiler.profile"]], "range_pop() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.range_pop"]], "range_push() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.range_push"]], "schedule() (in module torch.profiler)": [[2071, "torch.profiler.schedule"]], "step() (torch.profiler.profile method)": [[2071, "torch.profiler.profile.step"]], "tensorboard_trace_handler() (in module torch.profiler)": [[2071, "torch.profiler.tensorboard_trace_handler"]], "torch.profiler": [[2071, "module-torch.profiler"]], "torch.profiler.itt": [[2071, "module-torch.profiler.itt"]], "torch.profiler.profiler": [[2071, "module-torch.profiler.profiler"]], "torch.profiler.python_tracer": [[2071, "module-torch.profiler.python_tracer"]], "torch.ao": [[2072, "module-torch.ao"]], "torch.ao.nn": [[2072, "module-torch.ao.nn"]], "torch.ao.nn.intrinsic.modules.fused": [[2072, "module-torch.ao.nn.intrinsic.modules.fused"]], "torch.ao.nn.intrinsic.qat.modules.conv_fused": [[2072, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_fused": [[2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_relu": [[2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.modules.bn_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"]], "torch.ao.nn.intrinsic.quantized.modules.conv_add": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"]], "torch.ao.nn.intrinsic.quantized.modules.conv_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"]], "torch.ao.nn.intrinsic.quantized.modules.linear_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"]], "torch.ao.nn.qat.dynamic.modules.linear": [[2072, "module-torch.ao.nn.qat.dynamic.modules.linear"]], "torch.ao.nn.qat.modules.conv": [[2072, "module-torch.ao.nn.qat.modules.conv"]], "torch.ao.nn.qat.modules.embedding_ops": [[2072, "module-torch.ao.nn.qat.modules.embedding_ops"]], "torch.ao.nn.qat.modules.linear": [[2072, "module-torch.ao.nn.qat.modules.linear"]], "torch.ao.nn.quantizable": [[2072, "module-torch.ao.nn.quantizable"]], "torch.ao.nn.quantizable.modules": [[2072, "module-torch.ao.nn.quantizable.modules"]], "torch.ao.nn.quantizable.modules.activation": [[2072, "module-torch.ao.nn.quantizable.modules.activation"]], "torch.ao.nn.quantizable.modules.rnn": [[2072, "module-torch.ao.nn.quantizable.modules.rnn"]], "torch.ao.nn.quantized": [[2072, "module-torch.ao.nn.quantized"]], "torch.ao.nn.quantized.dynamic.modules.conv": [[2072, "module-torch.ao.nn.quantized.dynamic.modules.conv"]], "torch.ao.nn.quantized.dynamic.modules.linear": [[2072, "module-torch.ao.nn.quantized.dynamic.modules.linear"]], "torch.ao.nn.quantized.dynamic.modules.rnn": [[2072, "module-torch.ao.nn.quantized.dynamic.modules.rnn"]], "torch.ao.nn.quantized.modules.activation": [[2072, "module-torch.ao.nn.quantized.modules.activation"]], "torch.ao.nn.quantized.modules.batchnorm": [[2072, "module-torch.ao.nn.quantized.modules.batchnorm"]], "torch.ao.nn.quantized.modules.conv": [[2072, "module-torch.ao.nn.quantized.modules.conv"]], "torch.ao.nn.quantized.modules.dropout": [[2072, "module-torch.ao.nn.quantized.modules.dropout"]], "torch.ao.nn.quantized.modules.embedding_ops": [[2072, "module-torch.ao.nn.quantized.modules.embedding_ops"]], "torch.ao.nn.quantized.modules.functional_modules": [[2072, "module-torch.ao.nn.quantized.modules.functional_modules"]], "torch.ao.nn.quantized.modules.linear": [[2072, "module-torch.ao.nn.quantized.modules.linear"]], "torch.ao.nn.quantized.modules.normalization": [[2072, "module-torch.ao.nn.quantized.modules.normalization"]], "torch.ao.nn.quantized.modules.rnn": [[2072, "module-torch.ao.nn.quantized.modules.rnn"]], "torch.ao.nn.quantized.modules.utils": [[2072, "module-torch.ao.nn.quantized.modules.utils"]], "torch.ao.nn.quantized.reference": [[2072, "module-torch.ao.nn.quantized.reference"]], "torch.ao.nn.quantized.reference.modules": [[2072, "module-torch.ao.nn.quantized.reference.modules"]], "torch.ao.nn.quantized.reference.modules.conv": [[2072, "module-torch.ao.nn.quantized.reference.modules.conv"]], "torch.ao.nn.quantized.reference.modules.linear": [[2072, "module-torch.ao.nn.quantized.reference.modules.linear"]], "torch.ao.nn.quantized.reference.modules.rnn": [[2072, "module-torch.ao.nn.quantized.reference.modules.rnn"]], "torch.ao.nn.quantized.reference.modules.sparse": [[2072, "module-torch.ao.nn.quantized.reference.modules.sparse"]], "torch.ao.nn.quantized.reference.modules.utils": [[2072, "module-torch.ao.nn.quantized.reference.modules.utils"]], "torch.ao.nn.sparse": [[2072, "module-torch.ao.nn.sparse"]], "torch.ao.nn.sparse.quantized": [[2072, "module-torch.ao.nn.sparse.quantized"]], "torch.ao.nn.sparse.quantized.dynamic": [[2072, "module-torch.ao.nn.sparse.quantized.dynamic"]], "torch.ao.nn.sparse.quantized.dynamic.linear": [[2072, "module-torch.ao.nn.sparse.quantized.dynamic.linear"]], "torch.ao.nn.sparse.quantized.linear": [[2072, "module-torch.ao.nn.sparse.quantized.linear"]], "torch.ao.nn.sparse.quantized.utils": [[2072, "module-torch.ao.nn.sparse.quantized.utils"]], "torch.ao.ns": [[2072, "module-torch.ao.ns"]], "torch.ao.ns.fx": [[2072, "module-torch.ao.ns.fx"]], "torch.ao.ns.fx.graph_matcher": [[2072, "module-torch.ao.ns.fx.graph_matcher"]], "torch.ao.ns.fx.graph_passes": [[2072, "module-torch.ao.ns.fx.graph_passes"]], "torch.ao.ns.fx.mappings": [[2072, "module-torch.ao.ns.fx.mappings"]], "torch.ao.ns.fx.n_shadows_utils": [[2072, "module-torch.ao.ns.fx.n_shadows_utils"]], "torch.ao.ns.fx.ns_types": [[2072, "module-torch.ao.ns.fx.ns_types"]], "torch.ao.ns.fx.pattern_utils": [[2072, "module-torch.ao.ns.fx.pattern_utils"]], "torch.ao.ns.fx.qconfig_multi_mapping": [[2072, "module-torch.ao.ns.fx.qconfig_multi_mapping"]], "torch.ao.ns.fx.utils": [[2072, "module-torch.ao.ns.fx.utils"]], "torch.ao.ns.fx.weight_utils": [[2072, "module-torch.ao.ns.fx.weight_utils"]], "torch.ao.pruning": [[2072, "module-torch.ao.pruning"]], "torch.ao.pruning.scheduler": [[2072, "module-torch.ao.pruning.scheduler"]], "torch.ao.pruning.scheduler.base_scheduler": [[2072, "module-torch.ao.pruning.scheduler.base_scheduler"]], "torch.ao.pruning.scheduler.cubic_scheduler": [[2072, "module-torch.ao.pruning.scheduler.cubic_scheduler"]], "torch.ao.pruning.scheduler.lambda_scheduler": [[2072, "module-torch.ao.pruning.scheduler.lambda_scheduler"]], "torch.ao.pruning.sparsifier": [[2072, "module-torch.ao.pruning.sparsifier"]], "torch.ao.pruning.sparsifier.base_sparsifier": [[2072, "module-torch.ao.pruning.sparsifier.base_sparsifier"]], "torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier": [[2072, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"]], "torch.ao.pruning.sparsifier.utils": [[2072, "module-torch.ao.pruning.sparsifier.utils"]], "torch.ao.pruning.sparsifier.weight_norm_sparsifier": [[2072, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"]], "torch.ao.quantization": [[2072, "module-torch.ao.quantization"]], "torch.ao.quantization.backend_config": [[2072, "module-torch.ao.quantization.backend_config"]], "torch.ao.quantization.backend_config.backend_config": [[2072, "module-torch.ao.quantization.backend_config.backend_config"]], "torch.ao.quantization.backend_config.executorch": [[2072, "module-torch.ao.quantization.backend_config.executorch"]], "torch.ao.quantization.backend_config.fbgemm": [[2072, "module-torch.ao.quantization.backend_config.fbgemm"]], "torch.ao.quantization.backend_config.native": [[2072, "module-torch.ao.quantization.backend_config.native"]], "torch.ao.quantization.backend_config.observation_type": [[2072, "module-torch.ao.quantization.backend_config.observation_type"]], "torch.ao.quantization.backend_config.onednn": [[2072, "module-torch.ao.quantization.backend_config.onednn"]], "torch.ao.quantization.backend_config.qnnpack": [[2072, "module-torch.ao.quantization.backend_config.qnnpack"]], "torch.ao.quantization.backend_config.tensorrt": [[2072, "module-torch.ao.quantization.backend_config.tensorrt"]], "torch.ao.quantization.backend_config.utils": [[2072, "module-torch.ao.quantization.backend_config.utils"]], "torch.ao.quantization.backend_config.x86": [[2072, "module-torch.ao.quantization.backend_config.x86"]], "torch.ao.quantization.fake_quantize": [[2072, "module-torch.ao.quantization.fake_quantize"]], "torch.ao.quantization.fuse_modules": [[2072, "module-torch.ao.quantization.fuse_modules"]], "torch.ao.quantization.fuser_method_mappings": [[2072, "module-torch.ao.quantization.fuser_method_mappings"]], "torch.ao.quantization.fx": [[2072, "module-torch.ao.quantization.fx"]], "torch.ao.quantization.fx.convert": [[2072, "module-torch.ao.quantization.fx.convert"]], "torch.ao.quantization.fx.custom_config": [[2072, "module-torch.ao.quantization.fx.custom_config"]], "torch.ao.quantization.fx.fuse": [[2072, "module-torch.ao.quantization.fx.fuse"]], "torch.ao.quantization.fx.fuse_handler": [[2072, "module-torch.ao.quantization.fx.fuse_handler"]], "torch.ao.quantization.fx.graph_module": [[2072, "module-torch.ao.quantization.fx.graph_module"]], "torch.ao.quantization.fx.lower_to_fbgemm": [[2072, "module-torch.ao.quantization.fx.lower_to_fbgemm"]], "torch.ao.quantization.fx.lower_to_qnnpack": [[2072, "module-torch.ao.quantization.fx.lower_to_qnnpack"]], "torch.ao.quantization.fx.lstm_utils": [[2072, "module-torch.ao.quantization.fx.lstm_utils"]], "torch.ao.quantization.fx.match_utils": [[2072, "module-torch.ao.quantization.fx.match_utils"]], "torch.ao.quantization.fx.pattern_utils": [[2072, "module-torch.ao.quantization.fx.pattern_utils"]], "torch.ao.quantization.fx.prepare": [[2072, "module-torch.ao.quantization.fx.prepare"]], "torch.ao.quantization.fx.qconfig_mapping_utils": [[2072, "module-torch.ao.quantization.fx.qconfig_mapping_utils"]], "torch.ao.quantization.fx.quantize_handler": [[2072, "module-torch.ao.quantization.fx.quantize_handler"]], "torch.ao.quantization.fx.tracer": [[2072, "module-torch.ao.quantization.fx.tracer"]], "torch.ao.quantization.fx.utils": [[2072, "module-torch.ao.quantization.fx.utils"]], "torch.ao.quantization.observer": [[2072, "module-torch.ao.quantization.observer"]], "torch.ao.quantization.pt2e.duplicate_dq_pass": [[2072, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"]], "torch.ao.quantization.pt2e.export_utils": [[2072, "module-torch.ao.quantization.pt2e.export_utils"]], "torch.ao.quantization.pt2e.graph_utils": [[2072, "module-torch.ao.quantization.pt2e.graph_utils"]], "torch.ao.quantization.pt2e.port_metadata_pass": [[2072, "module-torch.ao.quantization.pt2e.port_metadata_pass"]], "torch.ao.quantization.pt2e.prepare": [[2072, "module-torch.ao.quantization.pt2e.prepare"]], "torch.ao.quantization.pt2e.qat_utils": [[2072, "module-torch.ao.quantization.pt2e.qat_utils"]], "torch.ao.quantization.pt2e.representation.rewrite": [[2072, "module-torch.ao.quantization.pt2e.representation.rewrite"]], "torch.ao.quantization.pt2e.utils": [[2072, "module-torch.ao.quantization.pt2e.utils"]], "torch.ao.quantization.qconfig": [[2072, "module-torch.ao.quantization.qconfig"]], "torch.ao.quantization.qconfig_mapping": [[2072, "module-torch.ao.quantization.qconfig_mapping"]], "torch.ao.quantization.quant_type": [[2072, "module-torch.ao.quantization.quant_type"]], "torch.ao.quantization.quantization_mappings": [[2072, "module-torch.ao.quantization.quantization_mappings"]], "torch.ao.quantization.quantize_fx": [[2072, "module-torch.ao.quantization.quantize_fx"]], "torch.ao.quantization.quantize_jit": [[2072, "module-torch.ao.quantization.quantize_jit"]], "torch.ao.quantization.quantize_pt2e": [[2072, "module-torch.ao.quantization.quantize_pt2e"]], "torch.ao.quantization.quantizer.composable_quantizer": [[2072, "module-torch.ao.quantization.quantizer.composable_quantizer"]], "torch.ao.quantization.quantizer.embedding_quantizer": [[2072, "module-torch.ao.quantization.quantizer.embedding_quantizer"]], "torch.ao.quantization.quantizer.quantizer": [[2072, "module-torch.ao.quantization.quantizer.quantizer"]], "torch.ao.quantization.quantizer.utils": [[2072, "module-torch.ao.quantization.quantizer.utils"]], "torch.ao.quantization.quantizer.x86_inductor_quantizer": [[2072, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer": [[2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer_utils": [[2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"]], "torch.ao.quantization.stubs": [[2072, "module-torch.ao.quantization.stubs"]], "torch.ao.quantization.utils": [[2072, "module-torch.ao.quantization.utils"]], "torch.nn.intrinsic.modules.fused": [[2072, "module-torch.nn.intrinsic.modules.fused"]], "torch.nn.intrinsic.qat.modules.conv_fused": [[2072, "module-torch.nn.intrinsic.qat.modules.conv_fused"]], "torch.nn.intrinsic.qat.modules.linear_fused": [[2072, "module-torch.nn.intrinsic.qat.modules.linear_fused"]], "torch.nn.intrinsic.qat.modules.linear_relu": [[2072, "module-torch.nn.intrinsic.qat.modules.linear_relu"]], "torch.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2072, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.nn.intrinsic.quantized.modules.bn_relu": [[2072, "module-torch.nn.intrinsic.quantized.modules.bn_relu"]], "torch.nn.intrinsic.quantized.modules.conv_relu": [[2072, "module-torch.nn.intrinsic.quantized.modules.conv_relu"]], "torch.nn.intrinsic.quantized.modules.linear_relu": [[2072, "module-torch.nn.intrinsic.quantized.modules.linear_relu"]], "torch.nn.qat.dynamic.modules.linear": [[2072, "module-torch.nn.qat.dynamic.modules.linear"]], "torch.nn.qat.modules.conv": [[2072, "module-torch.nn.qat.modules.conv"]], "torch.nn.qat.modules.embedding_ops": [[2072, "module-torch.nn.qat.modules.embedding_ops"]], "torch.nn.qat.modules.linear": [[2072, "module-torch.nn.qat.modules.linear"]], "torch.nn.quantizable.modules.activation": [[2072, "module-torch.nn.quantizable.modules.activation"]], "torch.nn.quantizable.modules.rnn": [[2072, "module-torch.nn.quantizable.modules.rnn"]], "torch.nn.quantized.dynamic.modules.conv": [[2072, "module-torch.nn.quantized.dynamic.modules.conv"]], "torch.nn.quantized.dynamic.modules.linear": [[2072, "module-torch.nn.quantized.dynamic.modules.linear"]], "torch.nn.quantized.dynamic.modules.rnn": [[2072, "module-torch.nn.quantized.dynamic.modules.rnn"]], "torch.nn.quantized.functional": [[2072, "module-torch.nn.quantized.functional"]], "torch.nn.quantized.modules.activation": [[2072, "module-torch.nn.quantized.modules.activation"]], "torch.nn.quantized.modules.batchnorm": [[2072, "module-torch.nn.quantized.modules.batchnorm"]], "torch.nn.quantized.modules.conv": [[2072, "module-torch.nn.quantized.modules.conv"]], "torch.nn.quantized.modules.dropout": [[2072, "module-torch.nn.quantized.modules.dropout"]], "torch.nn.quantized.modules.embedding_ops": [[2072, "module-torch.nn.quantized.modules.embedding_ops"]], "torch.nn.quantized.modules.functional_modules": [[2072, "module-torch.nn.quantized.modules.functional_modules"]], "torch.nn.quantized.modules.linear": [[2072, "module-torch.nn.quantized.modules.linear"]], "torch.nn.quantized.modules.normalization": [[2072, "module-torch.nn.quantized.modules.normalization"]], "torch.nn.quantized.modules.rnn": [[2072, "module-torch.nn.quantized.modules.rnn"]], "torch.nn.quantized.modules.utils": [[2072, "module-torch.nn.quantized.modules.utils"]], "torch.quantization.fake_quantize": [[2072, "module-torch.quantization.fake_quantize"]], "torch.quantization.fuse_modules": [[2072, "module-torch.quantization.fuse_modules"]], "torch.quantization.fuser_method_mappings": [[2072, "module-torch.quantization.fuser_method_mappings"]], "torch.quantization.fx.convert": [[2072, "module-torch.quantization.fx.convert"]], "torch.quantization.fx.fuse": [[2072, "module-torch.quantization.fx.fuse"]], "torch.quantization.fx.fusion_patterns": [[2072, "module-torch.quantization.fx.fusion_patterns"]], "torch.quantization.fx.graph_module": [[2072, "module-torch.quantization.fx.graph_module"]], "torch.quantization.fx.match_utils": [[2072, "module-torch.quantization.fx.match_utils"]], "torch.quantization.fx.pattern_utils": [[2072, "module-torch.quantization.fx.pattern_utils"]], "torch.quantization.fx.prepare": [[2072, "module-torch.quantization.fx.prepare"]], "torch.quantization.fx.quantization_patterns": [[2072, "module-torch.quantization.fx.quantization_patterns"]], "torch.quantization.fx.quantization_types": [[2072, "module-torch.quantization.fx.quantization_types"]], "torch.quantization.fx.utils": [[2072, "module-torch.quantization.fx.utils"]], "torch.quantization.observer": [[2072, "module-torch.quantization.observer"]], "torch.quantization.qconfig": [[2072, "module-torch.quantization.qconfig"]], "torch.quantization.quant_type": [[2072, "module-torch.quantization.quant_type"]], "torch.quantization.quantization_mappings": [[2072, "module-torch.quantization.quantization_mappings"]], "torch.quantization.quantize": [[2072, "module-torch.quantization.quantize"]], "torch.quantization.quantize_fx": [[2072, "module-torch.quantization.quantize_fx"]], "torch.quantization.quantize_jit": [[2072, "module-torch.quantization.quantize_jit"]], "torch.quantization.stubs": [[2072, "module-torch.quantization.stubs"]], "torch.quantization.utils": [[2072, "module-torch.quantization.utils"]], "torch.ao.nn.intrinsic": [[2075, "module-torch.ao.nn.intrinsic"]], "torch.ao.nn.intrinsic.modules": [[2075, "module-torch.ao.nn.intrinsic.modules"]], "torch.ao.nn.intrinsic.qat": [[2075, "module-torch.ao.nn.intrinsic.qat"]], "torch.ao.nn.intrinsic.qat.modules": [[2075, "module-torch.ao.nn.intrinsic.qat.modules"]], "torch.ao.nn.intrinsic.quantized": [[2075, "module-torch.ao.nn.intrinsic.quantized"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[2075, "module-torch.ao.nn.intrinsic.quantized.dynamic"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules": [[2075, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"]], "torch.ao.nn.intrinsic.quantized.modules": [[2075, "module-torch.ao.nn.intrinsic.quantized.modules"]], "torch.ao.nn.qat": [[2075, "module-torch.ao.nn.qat"]], "torch.ao.nn.qat.dynamic": [[2075, "module-torch.ao.nn.qat.dynamic"]], "torch.ao.nn.qat.dynamic.modules": [[2075, "module-torch.ao.nn.qat.dynamic.modules"]], "torch.ao.nn.qat.modules": [[2075, "module-torch.ao.nn.qat.modules"]], "torch.ao.nn.quantized.dynamic": [[2075, "module-torch.ao.nn.quantized.dynamic"]], "torch.ao.nn.quantized.dynamic.modules": [[2075, "module-torch.ao.nn.quantized.dynamic.modules"]], "torch.ao.nn.quantized.functional": [[2075, "module-torch.ao.nn.quantized.functional"]], "torch.ao.nn.quantized.modules": [[2075, "module-torch.ao.nn.quantized.modules"]], "torch.ao.quantization.pt2e": [[2075, "module-torch.ao.quantization.pt2e"]], "torch.ao.quantization.pt2e.generate_numeric_debug_handle": [[2075, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"]], "torch.ao.quantization.pt2e.representation": [[2075, "module-torch.ao.quantization.pt2e.representation"]], "torch.ao.quantization.quantizer": [[2075, "module-torch.ao.quantization.quantizer"]], "torch.nn.intrinsic": [[2075, "module-torch.nn.intrinsic"]], "torch.nn.intrinsic.modules": [[2075, "module-torch.nn.intrinsic.modules"]], "torch.nn.intrinsic.qat": [[2075, "module-torch.nn.intrinsic.qat"]], "torch.nn.intrinsic.qat.modules": [[2075, "module-torch.nn.intrinsic.qat.modules"]], "torch.nn.intrinsic.quantized": [[2075, "module-torch.nn.intrinsic.quantized"]], "torch.nn.intrinsic.quantized.dynamic": [[2075, "module-torch.nn.intrinsic.quantized.dynamic"]], "torch.nn.intrinsic.quantized.dynamic.modules": [[2075, "module-torch.nn.intrinsic.quantized.dynamic.modules"]], "torch.nn.intrinsic.quantized.modules": [[2075, "module-torch.nn.intrinsic.quantized.modules"]], "torch.nn.qat": [[2075, "module-torch.nn.qat"]], "torch.nn.qat.dynamic": [[2075, "module-torch.nn.qat.dynamic"]], "torch.nn.qat.dynamic.modules": [[2075, "module-torch.nn.qat.dynamic.modules"]], "torch.nn.qat.modules": [[2075, "module-torch.nn.qat.modules"]], "torch.nn.quantizable": [[2075, "module-torch.nn.quantizable"]], "torch.nn.quantizable.modules": [[2075, "module-torch.nn.quantizable.modules"]], "torch.nn.quantized": [[2075, "module-torch.nn.quantized"]], "torch.nn.quantized.dynamic": [[2075, "module-torch.nn.quantized.dynamic"]], "torch.nn.quantized.dynamic.modules": [[2075, "module-torch.nn.quantized.dynamic.modules"]], "torch.nn.quantized.modules": [[2075, "module-torch.nn.quantized.modules"]], "torch.quantization": [[2075, "module-torch.quantization"]], "torch.quantization.fx": [[2075, "module-torch.quantization.fx"]], "fork_rng() (in module torch.random)": [[2076, "torch.random.fork_rng"]], "get_rng_state() (in module torch.random)": [[2076, "torch.random.get_rng_state"]], "initial_seed() (in module torch.random)": [[2076, "torch.random.initial_seed"]], "manual_seed() (in module torch.random)": [[2076, "torch.random.manual_seed"]], "seed() (in module torch.random)": [[2076, "torch.random.seed"]], "set_rng_state() (in module torch.random)": [[2076, "torch.random.set_rng_state"]], "torch.random": [[2076, "module-torch.random"]], "backendtype (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.BackendType"]], "pyrref (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.PyRRef"]], "remotemodule (class in torch.distributed.nn.api.remote_module)": [[2077, "torch.distributed.nn.api.remote_module.RemoteModule"]], "rpcbackendoptions (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.RpcBackendOptions"]], "tensorpiperpcbackendoptions (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions"]], "workerinfo (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.WorkerInfo"]], "async_execution() (in module torch.distributed.rpc.functions)": [[2077, "torch.distributed.rpc.functions.async_execution"]], "backward() (in module torch.distributed.autograd)": [[2077, "torch.distributed.autograd.backward"]], "backward() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.backward"]], "confirmed_by_owner() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.confirmed_by_owner"]], "context (class in torch.distributed.autograd)": [[2077, "torch.distributed.autograd.context"]], "device_maps (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.device_maps"]], "devices (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.devices"]], "get_gradients() (in module torch.distributed.autograd)": [[2077, "torch.distributed.autograd.get_gradients"]], "get_module_rref() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2077, "torch.distributed.nn.api.remote_module.RemoteModule.get_module_rref"]], "get_worker_info() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.get_worker_info"]], "id (torch.distributed.rpc.workerinfo property)": [[2077, "torch.distributed.rpc.WorkerInfo.id"]], "init_method (torch.distributed.rpc.rpcbackendoptions property)": [[2077, "torch.distributed.rpc.RpcBackendOptions.init_method"]], "init_method (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.init_method"]], "init_rpc() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.init_rpc"]], "is_owner() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.is_owner"]], "local_value() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.local_value"]], "name (torch.distributed.rpc.workerinfo property)": [[2077, "torch.distributed.rpc.WorkerInfo.name"]], "num_worker_threads (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.num_worker_threads"]], "owner() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.owner"]], "owner_name() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.owner_name"]], "remote() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.remote"]], "remote() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.remote"]], "remote_parameters() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2077, "torch.distributed.nn.api.remote_module.RemoteModule.remote_parameters"]], "rpc_async() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.rpc_async"]], "rpc_async() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.rpc_async"]], "rpc_sync() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.rpc_sync"]], "rpc_sync() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.rpc_sync"]], "rpc_timeout (torch.distributed.rpc.rpcbackendoptions property)": [[2077, "torch.distributed.rpc.RpcBackendOptions.rpc_timeout"]], "rpc_timeout (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.rpc_timeout"]], "set_device_map() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_device_map"]], "set_devices() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_devices"]], "shutdown() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.shutdown"]], "to_here() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.to_here"]], "torch.distributed.autograd": [[2077, "module-torch.distributed.autograd"]], "torch.distributed.rpc": [[2077, "module-torch.distributed.rpc"]], "torch.signal": [[2080, "module-torch.signal"]], "torch.signal.windows": [[2080, "module-torch.signal.windows"]], "size (class in torch)": [[2081, "torch.Size"]], "count() (torch.size method)": [[2081, "torch.Size.count"]], "index() (torch.size method)": [[2081, "torch.Size.index"]], "numel() (torch.size method)": [[2081, "torch.Size.numel"]], "torch.sparse": [[2082, "module-torch.sparse"]], "airy_ai() (in module torch.special)": [[2083, "torch.special.airy_ai"]], "bessel_j0() (in module torch.special)": [[2083, "torch.special.bessel_j0"]], "bessel_j1() (in module torch.special)": [[2083, "torch.special.bessel_j1"]], "digamma() (in module torch.special)": [[2083, "torch.special.digamma"]], "entr() (in module torch.special)": [[2083, "torch.special.entr"]], "erf() (in module torch.special)": [[2083, "torch.special.erf"]], "erfc() (in module torch.special)": [[2083, "torch.special.erfc"]], "erfcx() (in module torch.special)": [[2083, "torch.special.erfcx"]], "erfinv() (in module torch.special)": [[2083, "torch.special.erfinv"]], "exp2() (in module torch.special)": [[2083, "torch.special.exp2"]], "expit() (in module torch.special)": [[2083, "torch.special.expit"]], "expm1() (in module torch.special)": [[2083, "torch.special.expm1"]], "gammainc() (in module torch.special)": [[2083, "torch.special.gammainc"]], "gammaincc() (in module torch.special)": [[2083, "torch.special.gammaincc"]], "gammaln() (in module torch.special)": [[2083, "torch.special.gammaln"]], "i0() (in module torch.special)": [[2083, "torch.special.i0"]], "i0e() (in module torch.special)": [[2083, "torch.special.i0e"]], "i1() (in module torch.special)": [[2083, "torch.special.i1"]], "i1e() (in module torch.special)": [[2083, "torch.special.i1e"]], "log1p() (in module torch.special)": [[2083, "torch.special.log1p"]], "log_ndtr() (in module torch.special)": [[2083, "torch.special.log_ndtr"]], "log_softmax() (in module torch.special)": [[2083, "torch.special.log_softmax"]], "logit() (in module torch.special)": [[2083, "torch.special.logit"]], "logsumexp() (in module torch.special)": [[2083, "torch.special.logsumexp"]], "multigammaln() (in module torch.special)": [[2083, "torch.special.multigammaln"]], "ndtr() (in module torch.special)": [[2083, "torch.special.ndtr"]], "ndtri() (in module torch.special)": [[2083, "torch.special.ndtri"]], "polygamma() (in module torch.special)": [[2083, "torch.special.polygamma"]], "psi() (in module torch.special)": [[2083, "torch.special.psi"]], "round() (in module torch.special)": [[2083, "torch.special.round"]], "scaled_modified_bessel_k0() (in module torch.special)": [[2083, "torch.special.scaled_modified_bessel_k0"]], "scaled_modified_bessel_k1() (in module torch.special)": [[2083, "torch.special.scaled_modified_bessel_k1"]], "sinc() (in module torch.special)": [[2083, "torch.special.sinc"]], "softmax() (in module torch.special)": [[2083, "torch.special.softmax"]], "spherical_bessel_j0() (in module torch.special)": [[2083, "torch.special.spherical_bessel_j0"]], "torch.special": [[2083, "module-torch.special"]], "xlog1py() (in module torch.special)": [[2083, "torch.special.xlog1py"]], "xlogy() (in module torch.special)": [[2083, "torch.special.xlogy"]], "zeta() (in module torch.special)": [[2083, "torch.special.zeta"]], "bfloat16storage (class in torch)": [[2084, "torch.BFloat16Storage"]], "boolstorage (class in torch)": [[2084, "torch.BoolStorage"]], "bytestorage (class in torch)": [[2084, "torch.ByteStorage"]], "charstorage (class in torch)": [[2084, "torch.CharStorage"]], "complexdoublestorage (class in torch)": [[2084, "torch.ComplexDoubleStorage"]], "complexfloatstorage (class in torch)": [[2084, "torch.ComplexFloatStorage"]], "doublestorage (class in torch)": [[2084, "torch.DoubleStorage"]], "floatstorage (class in torch)": [[2084, "torch.FloatStorage"]], "halfstorage (class in torch)": [[2084, "torch.HalfStorage"]], "intstorage (class in torch)": [[2084, "torch.IntStorage"]], "longstorage (class in torch)": [[2084, "torch.LongStorage"]], "qint32storage (class in torch)": [[2084, "torch.QInt32Storage"]], "qint8storage (class in torch)": [[2084, "torch.QInt8Storage"]], "quint2x4storage (class in torch)": [[2084, "torch.QUInt2x4Storage"]], "quint4x2storage (class in torch)": [[2084, "torch.QUInt4x2Storage"]], "quint8storage (class in torch)": [[2084, "torch.QUInt8Storage"]], "shortstorage (class in torch)": [[2084, "torch.ShortStorage"]], "typedstorage (class in torch)": [[2084, "torch.TypedStorage"]], "untypedstorage (class in torch)": [[2084, "torch.UntypedStorage"]], "bfloat16() (torch.typedstorage method)": [[2084, "torch.TypedStorage.bfloat16"]], "bfloat16() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.bfloat16"]], "bool() (torch.typedstorage method)": [[2084, "torch.TypedStorage.bool"]], "bool() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.bool"]], "byte() (torch.typedstorage method)": [[2084, "torch.TypedStorage.byte"]], "byte() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.byte"]], "byteswap() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.byteswap"]], "char() (torch.typedstorage method)": [[2084, "torch.TypedStorage.char"]], "char() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.char"]], "clone() (torch.typedstorage method)": [[2084, "torch.TypedStorage.clone"]], "clone() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.clone"]], "complex_double() (torch.typedstorage method)": [[2084, "torch.TypedStorage.complex_double"]], "complex_double() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.complex_double"]], "complex_float() (torch.typedstorage method)": [[2084, "torch.TypedStorage.complex_float"]], "complex_float() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.complex_float"]], "copy_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.copy_"]], "copy_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.copy_"]], "cpu() (torch.typedstorage method)": [[2084, "torch.TypedStorage.cpu"]], "cpu() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.cpu"]], "cuda() (torch.typedstorage method)": [[2084, "torch.TypedStorage.cuda"]], "cuda() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.cuda"]], "data_ptr() (torch.typedstorage method)": [[2084, "torch.TypedStorage.data_ptr"]], "data_ptr() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.data_ptr"]], "device (torch.typedstorage property)": [[2084, "torch.TypedStorage.device"]], "device (torch.untypedstorage attribute)": [[2084, "torch.UntypedStorage.device"]], "double() (torch.typedstorage method)": [[2084, "torch.TypedStorage.double"]], "double() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.double"]], "dtype (torch.bfloat16storage attribute)": [[2084, "torch.BFloat16Storage.dtype"]], "dtype (torch.boolstorage attribute)": [[2084, "torch.BoolStorage.dtype"]], "dtype (torch.bytestorage attribute)": [[2084, "torch.ByteStorage.dtype"]], "dtype (torch.charstorage attribute)": [[2084, "torch.CharStorage.dtype"]], "dtype (torch.complexdoublestorage attribute)": [[2084, "torch.ComplexDoubleStorage.dtype"]], "dtype (torch.complexfloatstorage attribute)": [[2084, "torch.ComplexFloatStorage.dtype"]], "dtype (torch.doublestorage attribute)": [[2084, "torch.DoubleStorage.dtype"]], "dtype (torch.floatstorage attribute)": [[2084, "torch.FloatStorage.dtype"]], "dtype (torch.halfstorage attribute)": [[2084, "torch.HalfStorage.dtype"]], "dtype (torch.intstorage attribute)": [[2084, "torch.IntStorage.dtype"]], "dtype (torch.longstorage attribute)": [[2084, "torch.LongStorage.dtype"]], "dtype (torch.qint32storage attribute)": [[2084, "torch.QInt32Storage.dtype"]], "dtype (torch.qint8storage attribute)": [[2084, "torch.QInt8Storage.dtype"]], "dtype (torch.quint2x4storage attribute)": [[2084, "torch.QUInt2x4Storage.dtype"]], "dtype (torch.quint4x2storage attribute)": [[2084, "torch.QUInt4x2Storage.dtype"]], "dtype (torch.quint8storage attribute)": [[2084, "torch.QUInt8Storage.dtype"]], "dtype (torch.shortstorage attribute)": [[2084, "torch.ShortStorage.dtype"]], "dtype (torch.typedstorage attribute)": [[2084, "torch.TypedStorage.dtype"]], "element_size() (torch.typedstorage method)": [[2084, "torch.TypedStorage.element_size"]], "element_size() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.element_size"]], "filename (torch.typedstorage property)": [[2084, "torch.TypedStorage.filename"]], "filename (torch.untypedstorage property)": [[2084, "torch.UntypedStorage.filename"]], "fill_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.fill_"]], "fill_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.fill_"]], "float() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float"]], "float() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float"]], "float8_e4m3fn() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e4m3fn"]], "float8_e4m3fn() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e4m3fn"]], "float8_e4m3fnuz() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e4m3fnuz"]], "float8_e4m3fnuz() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e4m3fnuz"]], "float8_e5m2() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e5m2"]], "float8_e5m2() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e5m2"]], "float8_e5m2fnuz() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e5m2fnuz"]], "float8_e5m2fnuz() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e5m2fnuz"]], "from_buffer() (torch.typedstorage class method)": [[2084, "torch.TypedStorage.from_buffer"]], "from_buffer() (torch.untypedstorage static method)": [[2084, "torch.UntypedStorage.from_buffer"]], "from_file() (torch.typedstorage class method)": [[2084, "torch.TypedStorage.from_file"]], "from_file() (torch.untypedstorage static method)": [[2084, "torch.UntypedStorage.from_file"]], "get_device() (torch.typedstorage method)": [[2084, "torch.TypedStorage.get_device"]], "get_device() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.get_device"]], "half() (torch.typedstorage method)": [[2084, "torch.TypedStorage.half"]], "half() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.half"]], "hpu() (torch.typedstorage method)": [[2084, "torch.TypedStorage.hpu"]], "hpu() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.hpu"]], "int() (torch.typedstorage method)": [[2084, "torch.TypedStorage.int"]], "int() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.int"]], "is_cuda (torch.typedstorage property)": [[2084, "torch.TypedStorage.is_cuda"]], "is_cuda (torch.untypedstorage property)": [[2084, "torch.UntypedStorage.is_cuda"]], "is_hpu (torch.typedstorage property)": [[2084, "torch.TypedStorage.is_hpu"]], "is_hpu (torch.untypedstorage property)": [[2084, "torch.UntypedStorage.is_hpu"]], "is_pinned() (torch.typedstorage method)": [[2084, "torch.TypedStorage.is_pinned"]], "is_pinned() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.is_pinned"]], "is_shared() (torch.typedstorage method)": [[2084, "torch.TypedStorage.is_shared"]], "is_shared() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.is_shared"]], "is_sparse (torch.typedstorage attribute)": [[2084, "torch.TypedStorage.is_sparse"]], "is_sparse (torch.untypedstorage attribute)": [[2084, "torch.UntypedStorage.is_sparse"]], "is_sparse_csr (torch.untypedstorage attribute)": [[2084, "torch.UntypedStorage.is_sparse_csr"]], "long() (torch.typedstorage method)": [[2084, "torch.TypedStorage.long"]], "long() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.long"]], "mps() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.mps"]], "nbytes() (torch.typedstorage method)": [[2084, "torch.TypedStorage.nbytes"]], "nbytes() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.nbytes"]], "new() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.new"]], "pickle_storage_type() (torch.typedstorage method)": [[2084, "torch.TypedStorage.pickle_storage_type"]], "pin_memory() (torch.typedstorage method)": [[2084, "torch.TypedStorage.pin_memory"]], "pin_memory() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.pin_memory"]], "resizable() (torch.typedstorage method)": [[2084, "torch.TypedStorage.resizable"]], "resizable() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.resizable"]], "resize_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.resize_"]], "resize_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.resize_"]], "share_memory_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.share_memory_"]], "share_memory_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.share_memory_"]], "short() (torch.typedstorage method)": [[2084, "torch.TypedStorage.short"]], "short() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.short"]], "size() (torch.typedstorage method)": [[2084, "torch.TypedStorage.size"]], "size() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.size"]], "to() (torch.typedstorage method)": [[2084, "torch.TypedStorage.to"]], "to() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.to"]], "tolist() (torch.typedstorage method)": [[2084, "torch.TypedStorage.tolist"]], "tolist() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.tolist"]], "type() (torch.typedstorage method)": [[2084, "torch.TypedStorage.type"]], "type() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.type"]], "untyped() (torch.typedstorage method)": [[2084, "torch.TypedStorage.untyped"]], "untyped() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.untyped"]], "device (class in torch)": [[2085, "torch.device"]], "dtype (class in torch)": [[2085, "torch.dtype"]], "layout (class in torch)": [[2085, "torch.layout"]], "memory_format (class in torch)": [[2085, "torch.memory_format"]], "summarywriter (class in torch.utils.tensorboard.writer)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter"]], "__init__() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.__init__"]], "add_audio() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_audio"]], "add_custom_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_custom_scalars"]], "add_embedding() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_embedding"]], "add_figure() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_figure"]], "add_graph() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_graph"]], "add_histogram() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_histogram"]], "add_hparams() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_hparams"]], "add_image() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_image"]], "add_images() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_images"]], "add_mesh() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_mesh"]], "add_pr_curve() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_pr_curve"]], "add_scalar() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_scalar"]], "add_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_scalars"]], "add_text() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_text"]], "add_video() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_video"]], "close() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.close"]], "flush() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.flush"]], "torch.utils.tensorboard": [[2087, "module-torch.utils.tensorboard"]], "h (torch.tensor attribute)": [[2088, "torch.Tensor.H"]], "t (torch.tensor attribute)": [[2088, "torch.Tensor.T"]], "tensor (class in torch)": [[2088, "torch.Tensor"]], "__init__() (torch.tensor method)": [[2088, "torch.Tensor.__init__"]], "mh (torch.tensor attribute)": [[2088, "torch.Tensor.mH"]], "mt (torch.tensor attribute)": [[2088, "torch.Tensor.mT"]], "assert_allclose() (in module torch.testing)": [[2089, "torch.testing.assert_allclose"]], "assert_close() (in module torch.testing)": [[2089, "torch.testing.assert_close"]], "make_tensor() (in module torch.testing)": [[2089, "torch.testing.make_tensor"]], "torch.testing": [[2089, "module-torch.testing"]], "symbool (class in torch)": [[2091, "torch.SymBool"]], "symfloat (class in torch)": [[2091, "torch.SymFloat"]], "symint (class in torch)": [[2091, "torch.SymInt"]], "tag (class in torch)": [[2091, "torch.Tag"]], "default_generator (torch.torch attribute)": [[2091, "torch.torch.default_generator"]], "is_integer() (torch.symfloat method)": [[2091, "torch.SymFloat.is_integer"]], "name (torch.tag property)": [[2091, "torch.Tag.name"]], "torch": [[2091, "module-torch"]], "torch.contrib": [[2091, "module-torch.contrib"]], "torch.functional": [[2091, "module-torch.functional"]], "torch.quasirandom": [[2091, "module-torch.quasirandom"]], "torch.return_types": [[2091, "module-torch.return_types"]], "torch.serialization": [[2091, "module-torch.serialization"]], "torch.signal.windows.windows": [[2091, "module-torch.signal.windows.windows"]], "torch.sparse.semi_structured": [[2091, "module-torch.sparse.semi_structured"]], "torch.storage": [[2091, "module-torch.storage"]], "torch.torch_version": [[2091, "module-torch.torch_version"]], "torch.types": [[2091, "module-torch.types"]], "torch.utils.backcompat": [[2091, "module-torch.utils.backcompat"]], "torch.utils.hipify": [[2091, "module-torch.utils.hipify"]], "torch.utils.model_dump": [[2091, "module-torch.utils.model_dump"]], "torch.utils.viz": [[2091, "module-torch.utils.viz"]], "torch.version": [[2091, "module-torch.version"]], "logger (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.Logger"]], "outputlogger (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.OutputLogger"]], "shadow (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.Shadow"]], "shadowlogger (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.ShadowLogger"]], "add() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.add"]], "add_relu() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.add_relu"]], "add_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.add_scalar"]], "cat() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.cat"]], "compare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.compare_model_outputs"]], "compare_model_stub() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.compare_model_stub"]], "compare_weights() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.compare_weights"]], "forward() (torch.ao.ns._numeric_suite.logger method)": [[2092, "torch.ao.ns._numeric_suite.Logger.forward"]], "forward() (torch.ao.ns._numeric_suite.outputlogger method)": [[2092, "torch.ao.ns._numeric_suite.OutputLogger.forward"]], "forward() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.forward"]], "forward() (torch.ao.ns._numeric_suite.shadowlogger method)": [[2092, "torch.ao.ns._numeric_suite.ShadowLogger.forward"]], "get_logger_dict() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.get_logger_dict"]], "get_matching_activations() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.get_matching_activations"]], "mul() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.mul"]], "mul_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.mul_scalar"]], "prepare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.prepare_model_outputs"]], "prepare_model_with_stubs() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.prepare_model_with_stubs"]], "torch.ao.ns._numeric_suite": [[2092, "module-torch.ao.ns._numeric_suite"]], "nstracer (class in torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.NSTracer"]], "outputcomparisonlogger (class in torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger"]], "outputlogger (class in torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputLogger"]], "add_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.add_loggers"]], "add_shadow_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.add_shadow_loggers"]], "compute_cosine_similarity() (in module torch.ao.ns.fx.utils)": [[2093, "torch.ao.ns.fx.utils.compute_cosine_similarity"]], "compute_normalized_l2_error() (in module torch.ao.ns.fx.utils)": [[2093, "torch.ao.ns.fx.utils.compute_normalized_l2_error"]], "compute_sqnr() (in module torch.ao.ns.fx.utils)": [[2093, "torch.ao.ns.fx.utils.compute_sqnr"]], "convert_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.convert_n_shadows_model"]], "extend_logger_results_with_comparison() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extend_logger_results_with_comparison"]], "extract_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_logger_info"]], "extract_results_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_results_n_shadows_model"]], "extract_shadow_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_shadow_logger_info"]], "extract_weights() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_weights"]], "forward() (torch.ao.ns._numeric_suite_fx.outputcomparisonlogger method)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger.forward"]], "forward() (torch.ao.ns._numeric_suite_fx.outputlogger method)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputLogger.forward"]], "is_leaf_module() (torch.ao.ns._numeric_suite_fx.nstracer method)": [[2093, "torch.ao.ns._numeric_suite_fx.NSTracer.is_leaf_module"]], "loggers_set_enabled() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.loggers_set_enabled"]], "loggers_set_save_activations() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.loggers_set_save_activations"]], "prepare_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.prepare_n_shadows_model"]], "print_comparisons_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.print_comparisons_n_shadows_model"]], "torch.ao.ns._numeric_suite_fx": [[2093, "module-torch.ao.ns._numeric_suite_fx"]], "torch.compiler": [[2096, "module-torch.compiler"]], "get_ignored_functions() (in module torch.overrides)": [[2114, "torch.overrides.get_ignored_functions"]], "get_overridable_functions() (in module torch.overrides)": [[2114, "torch.overrides.get_overridable_functions"]], "get_testing_overrides() (in module torch.overrides)": [[2114, "torch.overrides.get_testing_overrides"]], "handle_torch_function() (in module torch.overrides)": [[2114, "torch.overrides.handle_torch_function"]], "has_torch_function() (in module torch.overrides)": [[2114, "torch.overrides.has_torch_function"]], "is_tensor_like() (in module torch.overrides)": [[2114, "torch.overrides.is_tensor_like"]], "is_tensor_method_or_property() (in module torch.overrides)": [[2114, "torch.overrides.is_tensor_method_or_property"]], "resolve_name() (in module torch.overrides)": [[2114, "torch.overrides.resolve_name"]], "torch.overrides": [[2114, "module-torch.overrides"]], "wrap_torch_function() (in module torch.overrides)": [[2114, "torch.overrides.wrap_torch_function"]], "_dump_snapshot() (in module torch.cuda.memory)": [[2115, "torch.cuda.memory._dump_snapshot"]], "_record_memory_history() (in module torch.cuda.memory)": [[2115, "torch.cuda.memory._record_memory_history"]], "_snapshot() (in module torch.cuda.memory)": [[2115, "torch.cuda.memory._snapshot"]], "torch.finfo (class in torch)": [[2118, "torch.torch.finfo"]], "torch.iinfo (class in torch)": [[2118, "torch.torch.iinfo"]], "torch.utils": [[2119, "module-torch.utils"]], "torch.utils.backend_registration": [[2119, "module-torch.utils.backend_registration"]], "torch.utils.benchmark.examples.blas_compare_setup": [[2119, "module-torch.utils.benchmark.examples.blas_compare_setup"]], "torch.utils.benchmark.examples.compare": [[2119, "module-torch.utils.benchmark.examples.compare"]], "torch.utils.benchmark.examples.fuzzer": [[2119, "module-torch.utils.benchmark.examples.fuzzer"]], "torch.utils.benchmark.examples.op_benchmark": [[2119, "module-torch.utils.benchmark.examples.op_benchmark"]], "torch.utils.benchmark.examples.simple_timeit": [[2119, "module-torch.utils.benchmark.examples.simple_timeit"]], "torch.utils.benchmark.examples.spectral_ops_fuzz_test": [[2119, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"]], "torch.utils.benchmark.op_fuzzers.binary": [[2119, "module-torch.utils.benchmark.op_fuzzers.binary"]], "torch.utils.benchmark.op_fuzzers.sparse_binary": [[2119, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"]], "torch.utils.benchmark.op_fuzzers.sparse_unary": [[2119, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"]], "torch.utils.benchmark.op_fuzzers.spectral": [[2119, "module-torch.utils.benchmark.op_fuzzers.spectral"]], "torch.utils.benchmark.op_fuzzers.unary": [[2119, "module-torch.utils.benchmark.op_fuzzers.unary"]], "torch.utils.benchmark.utils.common": [[2119, "module-torch.utils.benchmark.utils.common"]], "torch.utils.benchmark.utils.compare": [[2119, "module-torch.utils.benchmark.utils.compare"]], "torch.utils.benchmark.utils.compile": [[2119, "module-torch.utils.benchmark.utils.compile"]], "torch.utils.benchmark.utils.cpp_jit": [[2119, "module-torch.utils.benchmark.utils.cpp_jit"]], "torch.utils.benchmark.utils.fuzzer": [[2119, "module-torch.utils.benchmark.utils.fuzzer"]], "torch.utils.benchmark.utils.sparse_fuzzer": [[2119, "module-torch.utils.benchmark.utils.sparse_fuzzer"]], "torch.utils.benchmark.utils.timer": [[2119, "module-torch.utils.benchmark.utils.timer"]], "torch.utils.benchmark.utils.valgrind_wrapper.timer_interface": [[2119, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"]], "torch.utils.bundled_inputs": [[2119, "module-torch.utils.bundled_inputs"]], "torch.utils.checkpoint": [[2119, "module-torch.utils.checkpoint"]], "torch.utils.collect_env": [[2119, "module-torch.utils.collect_env"]], "torch.utils.cpp_backtrace": [[2119, "module-torch.utils.cpp_backtrace"]], "torch.utils.cpp_extension": [[2119, "module-torch.utils.cpp_extension"]], "torch.utils.data.backward_compatibility": [[2119, "module-torch.utils.data.backward_compatibility"]], "torch.utils.data.dataloader": [[2119, "module-torch.utils.data.dataloader"]], "torch.utils.data.datapipes.dataframe.dataframe_wrapper": [[2119, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"]], "torch.utils.data.datapipes.dataframe.dataframes": [[2119, "module-torch.utils.data.datapipes.dataframe.dataframes"]], "torch.utils.data.datapipes.dataframe.datapipes": [[2119, "module-torch.utils.data.datapipes.dataframe.datapipes"]], "torch.utils.data.datapipes.dataframe.structures": [[2119, "module-torch.utils.data.datapipes.dataframe.structures"]], "torch.utils.data.datapipes.datapipe": [[2119, "module-torch.utils.data.datapipes.datapipe"]], "torch.utils.data.datapipes.gen_pyi": [[2119, "module-torch.utils.data.datapipes.gen_pyi"]], "torch.utils.data.datapipes.iter.callable": [[2119, "module-torch.utils.data.datapipes.iter.callable"]], "torch.utils.data.datapipes.iter.combinatorics": [[2119, "module-torch.utils.data.datapipes.iter.combinatorics"]], "torch.utils.data.datapipes.iter.combining": [[2119, "module-torch.utils.data.datapipes.iter.combining"]], "torch.utils.data.datapipes.iter.filelister": [[2119, "module-torch.utils.data.datapipes.iter.filelister"]], "torch.utils.data.datapipes.iter.fileopener": [[2119, "module-torch.utils.data.datapipes.iter.fileopener"]], "torch.utils.data.datapipes.iter.grouping": [[2119, "module-torch.utils.data.datapipes.iter.grouping"]], "torch.utils.data.datapipes.iter.routeddecoder": [[2119, "module-torch.utils.data.datapipes.iter.routeddecoder"]], "torch.utils.data.datapipes.iter.selecting": [[2119, "module-torch.utils.data.datapipes.iter.selecting"]], "torch.utils.data.datapipes.iter.sharding": [[2119, "module-torch.utils.data.datapipes.iter.sharding"]], "torch.utils.data.datapipes.iter.streamreader": [[2119, "module-torch.utils.data.datapipes.iter.streamreader"]], "torch.utils.data.datapipes.iter.utils": [[2119, "module-torch.utils.data.datapipes.iter.utils"]], "torch.utils.data.datapipes.map.callable": [[2119, "module-torch.utils.data.datapipes.map.callable"]], "torch.utils.data.datapipes.map.combinatorics": [[2119, "module-torch.utils.data.datapipes.map.combinatorics"]], "torch.utils.data.datapipes.map.combining": [[2119, "module-torch.utils.data.datapipes.map.combining"]], "torch.utils.data.datapipes.map.grouping": [[2119, "module-torch.utils.data.datapipes.map.grouping"]], "torch.utils.data.datapipes.map.utils": [[2119, "module-torch.utils.data.datapipes.map.utils"]], "torch.utils.data.datapipes.utils.common": [[2119, "module-torch.utils.data.datapipes.utils.common"]], "torch.utils.data.datapipes.utils.decoder": [[2119, "module-torch.utils.data.datapipes.utils.decoder"]], "torch.utils.data.datapipes.utils.snapshot": [[2119, "module-torch.utils.data.datapipes.utils.snapshot"]], "torch.utils.data.dataset": [[2119, "module-torch.utils.data.dataset"]], "torch.utils.data.distributed": [[2119, "module-torch.utils.data.distributed"]], "torch.utils.data.graph": [[2119, "module-torch.utils.data.graph"]], "torch.utils.data.graph_settings": [[2119, "module-torch.utils.data.graph_settings"]], "torch.utils.data.sampler": [[2119, "module-torch.utils.data.sampler"]], "torch.utils.dlpack": [[2119, "module-torch.utils.dlpack"]], "torch.utils.file_baton": [[2119, "module-torch.utils.file_baton"]], "torch.utils.flop_counter": [[2119, "module-torch.utils.flop_counter"]], "torch.utils.hipify.constants": [[2119, "module-torch.utils.hipify.constants"]], "torch.utils.hipify.cuda_to_hip_mappings": [[2119, "module-torch.utils.hipify.cuda_to_hip_mappings"]], "torch.utils.hipify.hipify_python": [[2119, "module-torch.utils.hipify.hipify_python"]], "torch.utils.hipify.version": [[2119, "module-torch.utils.hipify.version"]], "torch.utils.hooks": [[2119, "module-torch.utils.hooks"]], "torch.utils.jit.log_extract": [[2119, "module-torch.utils.jit.log_extract"]], "torch.utils.mkldnn": [[2119, "module-torch.utils.mkldnn"]], "torch.utils.mobile_optimizer": [[2119, "module-torch.utils.mobile_optimizer"]], "torch.utils.show_pickle": [[2119, "module-torch.utils.show_pickle"]], "torch.utils.tensorboard.summary": [[2119, "module-torch.utils.tensorboard.summary"]], "torch.utils.tensorboard.writer": [[2119, "module-torch.utils.tensorboard.writer"]], "torch.utils.throughput_benchmark": [[2119, "module-torch.utils.throughput_benchmark"]], "torch.utils.weak": [[2119, "module-torch.utils.weak"]], "torch.xpu": [[2120, "module-torch.xpu"]], "torch.xpu.random": [[2120, "module-torch.xpu.random"]], "torch.xpu.streams": [[2120, "module-torch.xpu.streams"]]}}) \ No newline at end of file +Search.setIndex({"docnames": ["amp", "autograd", "backends", "benchmark_utils", "bottleneck", "checkpoint", "community/build_ci_governance", "community/contribution_guide", "community/design", "community/governance", "community/persons_of_interest", "complex_numbers", "cond", "config_mod", "cpp_extension", "cpp_index", "cpu", "cuda", "cuda._sanitizer", "cuda.tunable", "cuda_environment_variables", "cudnn_persistent_rnn", "cudnn_rnn_determinism", "data", "ddp_comm_hooks", "debugging_environment_variables", "deploy", "deterministic", "distributed", "distributed.algorithms.join", "distributed.checkpoint", "distributed.elastic", "distributed.optim", "distributed.pipelining", "distributed.tensor.parallel", "distributions", "dlpack", "elastic/agent", "elastic/control_plane", "elastic/customization", "elastic/errors", "elastic/events", "elastic/examples", "elastic/kubernetes", "elastic/metrics", "elastic/multiprocessing", "elastic/quickstart", "elastic/rendezvous", "elastic/run", "elastic/subprocess_handler", "elastic/timer", "elastic/train_script", "export", "export.ir_spec", "fft", "fsdp", "func", "func.api", "func.batch_norm", "func.migrating", "func.ux_limitations", "func.whirlwind_tour", "future_mod", "futures", "fx", "fx.experimental", "generated/exportdb/index", "generated/exportdb/python.assert", "generated/exportdb/python.builtin", "generated/exportdb/python.closure", "generated/exportdb/python.context-manager", "generated/exportdb/python.control-flow", "generated/exportdb/python.data-structure", "generated/exportdb/python.object-model", "generated/exportdb/torch.cond", "generated/exportdb/torch.dynamic-shape", "generated/exportdb/torch.dynamic-value", "generated/exportdb/torch.escape-hatch", "generated/exportdb/torch.map", "generated/exportdb/torch.mutation", "generated/exportdb/torch.operator", "generated/onnx_dynamo_diagnostics_rules/FXE0007:fx-graph-to-onnx", "generated/onnx_dynamo_diagnostics_rules/FXE0008:fx-node-to-onnx", "generated/onnx_dynamo_diagnostics_rules/FXE0010:fx-pass", "generated/onnx_dynamo_diagnostics_rules/FXE0011:no-symbolic-function-for-call-function", "generated/onnx_dynamo_diagnostics_rules/FXE0012:unsupported-fx-node-analysis", "generated/onnx_dynamo_diagnostics_rules/FXE0013:op-level-debugging", "generated/onnx_dynamo_diagnostics_rules/FXE0014:find-opschema-matched-symbolic-function", "generated/onnx_dynamo_diagnostics_rules/FXE0015:fx-node-insert-type-promotion", "generated/onnx_dynamo_diagnostics_rules/FXE0016:find-operator-overloads-in-onnx-registry", "generated/torch.Generator", "generated/torch.Tensor.abs", "generated/torch.Tensor.abs_", "generated/torch.Tensor.absolute", "generated/torch.Tensor.absolute_", "generated/torch.Tensor.acos", "generated/torch.Tensor.acos_", "generated/torch.Tensor.acosh", "generated/torch.Tensor.acosh_", "generated/torch.Tensor.add", "generated/torch.Tensor.add_", "generated/torch.Tensor.addbmm", "generated/torch.Tensor.addbmm_", "generated/torch.Tensor.addcdiv", "generated/torch.Tensor.addcdiv_", "generated/torch.Tensor.addcmul", "generated/torch.Tensor.addcmul_", "generated/torch.Tensor.addmm", "generated/torch.Tensor.addmm_", "generated/torch.Tensor.addmv", "generated/torch.Tensor.addmv_", "generated/torch.Tensor.addr", "generated/torch.Tensor.addr_", "generated/torch.Tensor.adjoint", "generated/torch.Tensor.all", "generated/torch.Tensor.allclose", "generated/torch.Tensor.amax", "generated/torch.Tensor.amin", "generated/torch.Tensor.aminmax", "generated/torch.Tensor.angle", "generated/torch.Tensor.any", "generated/torch.Tensor.apply_", "generated/torch.Tensor.arccos", "generated/torch.Tensor.arccos_", "generated/torch.Tensor.arccosh", "generated/torch.Tensor.arccosh_", "generated/torch.Tensor.arcsin", "generated/torch.Tensor.arcsin_", "generated/torch.Tensor.arcsinh", "generated/torch.Tensor.arcsinh_", "generated/torch.Tensor.arctan", "generated/torch.Tensor.arctan2", "generated/torch.Tensor.arctan2_", "generated/torch.Tensor.arctan_", "generated/torch.Tensor.arctanh", "generated/torch.Tensor.arctanh_", "generated/torch.Tensor.argmax", "generated/torch.Tensor.argmin", "generated/torch.Tensor.argsort", "generated/torch.Tensor.argwhere", "generated/torch.Tensor.as_strided", "generated/torch.Tensor.as_subclass", "generated/torch.Tensor.asin", "generated/torch.Tensor.asin_", "generated/torch.Tensor.asinh", "generated/torch.Tensor.asinh_", "generated/torch.Tensor.atan", "generated/torch.Tensor.atan2", "generated/torch.Tensor.atan2_", "generated/torch.Tensor.atan_", "generated/torch.Tensor.atanh", "generated/torch.Tensor.atanh_", "generated/torch.Tensor.backward", "generated/torch.Tensor.baddbmm", "generated/torch.Tensor.baddbmm_", "generated/torch.Tensor.bernoulli", "generated/torch.Tensor.bernoulli_", "generated/torch.Tensor.bfloat16", "generated/torch.Tensor.bincount", "generated/torch.Tensor.bitwise_and", "generated/torch.Tensor.bitwise_and_", "generated/torch.Tensor.bitwise_left_shift", "generated/torch.Tensor.bitwise_left_shift_", "generated/torch.Tensor.bitwise_not", "generated/torch.Tensor.bitwise_not_", "generated/torch.Tensor.bitwise_or", "generated/torch.Tensor.bitwise_or_", "generated/torch.Tensor.bitwise_right_shift", "generated/torch.Tensor.bitwise_right_shift_", "generated/torch.Tensor.bitwise_xor", "generated/torch.Tensor.bitwise_xor_", "generated/torch.Tensor.bmm", "generated/torch.Tensor.bool", "generated/torch.Tensor.broadcast_to", "generated/torch.Tensor.byte", "generated/torch.Tensor.cauchy_", "generated/torch.Tensor.ccol_indices", "generated/torch.Tensor.cdouble", "generated/torch.Tensor.ceil", "generated/torch.Tensor.ceil_", "generated/torch.Tensor.cfloat", "generated/torch.Tensor.chalf", "generated/torch.Tensor.char", "generated/torch.Tensor.cholesky", "generated/torch.Tensor.cholesky_inverse", "generated/torch.Tensor.cholesky_solve", "generated/torch.Tensor.chunk", "generated/torch.Tensor.clamp", "generated/torch.Tensor.clamp_", "generated/torch.Tensor.clip", "generated/torch.Tensor.clip_", "generated/torch.Tensor.clone", "generated/torch.Tensor.coalesce", "generated/torch.Tensor.col_indices", "generated/torch.Tensor.conj", "generated/torch.Tensor.conj_physical", "generated/torch.Tensor.conj_physical_", "generated/torch.Tensor.contiguous", "generated/torch.Tensor.copy_", "generated/torch.Tensor.copysign", "generated/torch.Tensor.copysign_", "generated/torch.Tensor.corrcoef", "generated/torch.Tensor.cos", "generated/torch.Tensor.cos_", "generated/torch.Tensor.cosh", "generated/torch.Tensor.cosh_", "generated/torch.Tensor.count_nonzero", "generated/torch.Tensor.cov", "generated/torch.Tensor.cpu", "generated/torch.Tensor.cross", "generated/torch.Tensor.crow_indices", "generated/torch.Tensor.cuda", "generated/torch.Tensor.cummax", "generated/torch.Tensor.cummin", "generated/torch.Tensor.cumprod", "generated/torch.Tensor.cumprod_", "generated/torch.Tensor.cumsum", "generated/torch.Tensor.cumsum_", "generated/torch.Tensor.data_ptr", "generated/torch.Tensor.deg2rad", "generated/torch.Tensor.dense_dim", "generated/torch.Tensor.dequantize", "generated/torch.Tensor.det", "generated/torch.Tensor.detach", "generated/torch.Tensor.detach_", "generated/torch.Tensor.device", "generated/torch.Tensor.diag", "generated/torch.Tensor.diag_embed", "generated/torch.Tensor.diagflat", "generated/torch.Tensor.diagonal", "generated/torch.Tensor.diagonal_scatter", "generated/torch.Tensor.diff", "generated/torch.Tensor.digamma", "generated/torch.Tensor.digamma_", "generated/torch.Tensor.dim", "generated/torch.Tensor.dim_order", "generated/torch.Tensor.dist", "generated/torch.Tensor.div", "generated/torch.Tensor.div_", "generated/torch.Tensor.divide", "generated/torch.Tensor.divide_", "generated/torch.Tensor.dot", "generated/torch.Tensor.double", "generated/torch.Tensor.dsplit", "generated/torch.Tensor.element_size", "generated/torch.Tensor.eq", "generated/torch.Tensor.eq_", "generated/torch.Tensor.equal", "generated/torch.Tensor.erf", "generated/torch.Tensor.erf_", "generated/torch.Tensor.erfc", "generated/torch.Tensor.erfc_", "generated/torch.Tensor.erfinv", "generated/torch.Tensor.erfinv_", "generated/torch.Tensor.exp", "generated/torch.Tensor.exp_", "generated/torch.Tensor.expand", "generated/torch.Tensor.expand_as", "generated/torch.Tensor.expm1", "generated/torch.Tensor.expm1_", "generated/torch.Tensor.exponential_", "generated/torch.Tensor.fill_", "generated/torch.Tensor.fill_diagonal_", "generated/torch.Tensor.fix", "generated/torch.Tensor.fix_", "generated/torch.Tensor.flatten", "generated/torch.Tensor.flip", "generated/torch.Tensor.fliplr", "generated/torch.Tensor.flipud", "generated/torch.Tensor.float", "generated/torch.Tensor.float_power", "generated/torch.Tensor.float_power_", "generated/torch.Tensor.floor", "generated/torch.Tensor.floor_", "generated/torch.Tensor.floor_divide", "generated/torch.Tensor.floor_divide_", "generated/torch.Tensor.fmax", "generated/torch.Tensor.fmin", "generated/torch.Tensor.fmod", "generated/torch.Tensor.fmod_", "generated/torch.Tensor.frac", "generated/torch.Tensor.frac_", "generated/torch.Tensor.frexp", "generated/torch.Tensor.gather", "generated/torch.Tensor.gcd", "generated/torch.Tensor.gcd_", "generated/torch.Tensor.ge", "generated/torch.Tensor.ge_", "generated/torch.Tensor.geometric_", "generated/torch.Tensor.geqrf", "generated/torch.Tensor.ger", "generated/torch.Tensor.get_device", "generated/torch.Tensor.grad", "generated/torch.Tensor.greater", "generated/torch.Tensor.greater_", "generated/torch.Tensor.greater_equal", "generated/torch.Tensor.greater_equal_", "generated/torch.Tensor.gt", "generated/torch.Tensor.gt_", "generated/torch.Tensor.half", "generated/torch.Tensor.hardshrink", "generated/torch.Tensor.heaviside", "generated/torch.Tensor.histc", "generated/torch.Tensor.histogram", "generated/torch.Tensor.hsplit", "generated/torch.Tensor.hypot", "generated/torch.Tensor.hypot_", "generated/torch.Tensor.i0", "generated/torch.Tensor.i0_", "generated/torch.Tensor.igamma", "generated/torch.Tensor.igamma_", "generated/torch.Tensor.igammac", "generated/torch.Tensor.igammac_", "generated/torch.Tensor.imag", "generated/torch.Tensor.index_add", "generated/torch.Tensor.index_add_", "generated/torch.Tensor.index_copy", "generated/torch.Tensor.index_copy_", "generated/torch.Tensor.index_fill", "generated/torch.Tensor.index_fill_", "generated/torch.Tensor.index_put", "generated/torch.Tensor.index_put_", "generated/torch.Tensor.index_reduce", "generated/torch.Tensor.index_reduce_", "generated/torch.Tensor.index_select", "generated/torch.Tensor.indices", "generated/torch.Tensor.inner", "generated/torch.Tensor.int", "generated/torch.Tensor.int_repr", "generated/torch.Tensor.inverse", "generated/torch.Tensor.is_coalesced", "generated/torch.Tensor.is_complex", "generated/torch.Tensor.is_conj", "generated/torch.Tensor.is_contiguous", "generated/torch.Tensor.is_cuda", "generated/torch.Tensor.is_floating_point", "generated/torch.Tensor.is_inference", "generated/torch.Tensor.is_leaf", "generated/torch.Tensor.is_meta", "generated/torch.Tensor.is_pinned", "generated/torch.Tensor.is_quantized", "generated/torch.Tensor.is_set_to", "generated/torch.Tensor.is_shared", "generated/torch.Tensor.is_signed", "generated/torch.Tensor.is_sparse", "generated/torch.Tensor.is_sparse_csr", "generated/torch.Tensor.isclose", "generated/torch.Tensor.isfinite", "generated/torch.Tensor.isinf", "generated/torch.Tensor.isnan", "generated/torch.Tensor.isneginf", "generated/torch.Tensor.isposinf", "generated/torch.Tensor.isreal", "generated/torch.Tensor.istft", "generated/torch.Tensor.item", "generated/torch.Tensor.itemsize", "generated/torch.Tensor.kthvalue", "generated/torch.Tensor.lcm", "generated/torch.Tensor.lcm_", "generated/torch.Tensor.ldexp", "generated/torch.Tensor.ldexp_", "generated/torch.Tensor.le", "generated/torch.Tensor.le_", "generated/torch.Tensor.lerp", "generated/torch.Tensor.lerp_", "generated/torch.Tensor.less", "generated/torch.Tensor.less_", "generated/torch.Tensor.less_equal", "generated/torch.Tensor.less_equal_", "generated/torch.Tensor.lgamma", "generated/torch.Tensor.lgamma_", "generated/torch.Tensor.log", "generated/torch.Tensor.log10", "generated/torch.Tensor.log10_", "generated/torch.Tensor.log1p", "generated/torch.Tensor.log1p_", "generated/torch.Tensor.log2", "generated/torch.Tensor.log2_", "generated/torch.Tensor.log_", "generated/torch.Tensor.log_normal_", "generated/torch.Tensor.logaddexp", "generated/torch.Tensor.logaddexp2", "generated/torch.Tensor.logcumsumexp", "generated/torch.Tensor.logdet", "generated/torch.Tensor.logical_and", "generated/torch.Tensor.logical_and_", "generated/torch.Tensor.logical_not", "generated/torch.Tensor.logical_not_", "generated/torch.Tensor.logical_or", "generated/torch.Tensor.logical_or_", "generated/torch.Tensor.logical_xor", "generated/torch.Tensor.logical_xor_", "generated/torch.Tensor.logit", "generated/torch.Tensor.logit_", "generated/torch.Tensor.logsumexp", "generated/torch.Tensor.long", "generated/torch.Tensor.lt", "generated/torch.Tensor.lt_", "generated/torch.Tensor.lu", "generated/torch.Tensor.lu_solve", "generated/torch.Tensor.map_", "generated/torch.Tensor.masked_fill", "generated/torch.Tensor.masked_fill_", "generated/torch.Tensor.masked_scatter", "generated/torch.Tensor.masked_scatter_", "generated/torch.Tensor.masked_select", "generated/torch.Tensor.matmul", "generated/torch.Tensor.matrix_exp", "generated/torch.Tensor.matrix_power", "generated/torch.Tensor.max", "generated/torch.Tensor.maximum", "generated/torch.Tensor.mean", "generated/torch.Tensor.median", "generated/torch.Tensor.min", "generated/torch.Tensor.minimum", "generated/torch.Tensor.mm", "generated/torch.Tensor.mode", "generated/torch.Tensor.module_load", "generated/torch.Tensor.moveaxis", "generated/torch.Tensor.movedim", "generated/torch.Tensor.msort", "generated/torch.Tensor.mul", "generated/torch.Tensor.mul_", "generated/torch.Tensor.multinomial", "generated/torch.Tensor.multiply", "generated/torch.Tensor.multiply_", "generated/torch.Tensor.mv", "generated/torch.Tensor.mvlgamma", "generated/torch.Tensor.mvlgamma_", "generated/torch.Tensor.nan_to_num", "generated/torch.Tensor.nan_to_num_", "generated/torch.Tensor.nanmean", "generated/torch.Tensor.nanmedian", "generated/torch.Tensor.nanquantile", "generated/torch.Tensor.nansum", "generated/torch.Tensor.narrow", "generated/torch.Tensor.narrow_copy", "generated/torch.Tensor.nbytes", "generated/torch.Tensor.ndim", "generated/torch.Tensor.ndimension", "generated/torch.Tensor.ne", "generated/torch.Tensor.ne_", "generated/torch.Tensor.neg", "generated/torch.Tensor.neg_", "generated/torch.Tensor.negative", "generated/torch.Tensor.negative_", "generated/torch.Tensor.nelement", "generated/torch.Tensor.new_empty", "generated/torch.Tensor.new_full", "generated/torch.Tensor.new_ones", "generated/torch.Tensor.new_tensor", "generated/torch.Tensor.new_zeros", "generated/torch.Tensor.nextafter", "generated/torch.Tensor.nextafter_", "generated/torch.Tensor.nonzero", "generated/torch.Tensor.norm", "generated/torch.Tensor.normal_", "generated/torch.Tensor.not_equal", "generated/torch.Tensor.not_equal_", "generated/torch.Tensor.numel", "generated/torch.Tensor.numpy", "generated/torch.Tensor.orgqr", "generated/torch.Tensor.ormqr", "generated/torch.Tensor.outer", "generated/torch.Tensor.permute", "generated/torch.Tensor.pin_memory", "generated/torch.Tensor.pinverse", "generated/torch.Tensor.polygamma", "generated/torch.Tensor.polygamma_", "generated/torch.Tensor.positive", "generated/torch.Tensor.pow", "generated/torch.Tensor.pow_", "generated/torch.Tensor.prod", "generated/torch.Tensor.put_", "generated/torch.Tensor.q_per_channel_axis", "generated/torch.Tensor.q_per_channel_scales", "generated/torch.Tensor.q_per_channel_zero_points", "generated/torch.Tensor.q_scale", "generated/torch.Tensor.q_zero_point", "generated/torch.Tensor.qr", "generated/torch.Tensor.qscheme", "generated/torch.Tensor.quantile", "generated/torch.Tensor.rad2deg", "generated/torch.Tensor.random_", "generated/torch.Tensor.ravel", "generated/torch.Tensor.real", "generated/torch.Tensor.reciprocal", "generated/torch.Tensor.reciprocal_", "generated/torch.Tensor.record_stream", "generated/torch.Tensor.register_hook", "generated/torch.Tensor.register_post_accumulate_grad_hook", "generated/torch.Tensor.remainder", "generated/torch.Tensor.remainder_", "generated/torch.Tensor.renorm", "generated/torch.Tensor.renorm_", "generated/torch.Tensor.repeat", "generated/torch.Tensor.repeat_interleave", "generated/torch.Tensor.requires_grad", "generated/torch.Tensor.requires_grad_", "generated/torch.Tensor.reshape", "generated/torch.Tensor.reshape_as", "generated/torch.Tensor.resize_", "generated/torch.Tensor.resize_as_", "generated/torch.Tensor.resolve_conj", "generated/torch.Tensor.resolve_neg", "generated/torch.Tensor.retain_grad", "generated/torch.Tensor.retains_grad", "generated/torch.Tensor.roll", "generated/torch.Tensor.rot90", "generated/torch.Tensor.round", "generated/torch.Tensor.round_", "generated/torch.Tensor.row_indices", "generated/torch.Tensor.rsqrt", "generated/torch.Tensor.rsqrt_", "generated/torch.Tensor.scatter", "generated/torch.Tensor.scatter_", "generated/torch.Tensor.scatter_add", "generated/torch.Tensor.scatter_add_", "generated/torch.Tensor.scatter_reduce", "generated/torch.Tensor.scatter_reduce_", "generated/torch.Tensor.select", "generated/torch.Tensor.select_scatter", "generated/torch.Tensor.set_", "generated/torch.Tensor.sgn", "generated/torch.Tensor.sgn_", "generated/torch.Tensor.shape", "generated/torch.Tensor.share_memory_", "generated/torch.Tensor.short", "generated/torch.Tensor.sigmoid", "generated/torch.Tensor.sigmoid_", "generated/torch.Tensor.sign", "generated/torch.Tensor.sign_", "generated/torch.Tensor.signbit", "generated/torch.Tensor.sin", "generated/torch.Tensor.sin_", "generated/torch.Tensor.sinc", "generated/torch.Tensor.sinc_", "generated/torch.Tensor.sinh", "generated/torch.Tensor.sinh_", "generated/torch.Tensor.size", "generated/torch.Tensor.slice_scatter", "generated/torch.Tensor.slogdet", "generated/torch.Tensor.smm", "generated/torch.Tensor.softmax", "generated/torch.Tensor.sort", "generated/torch.Tensor.sparse_dim", "generated/torch.Tensor.sparse_mask", "generated/torch.Tensor.sparse_resize_", "generated/torch.Tensor.sparse_resize_and_clear_", "generated/torch.Tensor.split", "generated/torch.Tensor.sqrt", "generated/torch.Tensor.sqrt_", "generated/torch.Tensor.square", "generated/torch.Tensor.square_", "generated/torch.Tensor.squeeze", "generated/torch.Tensor.squeeze_", "generated/torch.Tensor.sspaddmm", "generated/torch.Tensor.std", "generated/torch.Tensor.stft", "generated/torch.Tensor.storage", "generated/torch.Tensor.storage_offset", "generated/torch.Tensor.storage_type", "generated/torch.Tensor.stride", "generated/torch.Tensor.sub", "generated/torch.Tensor.sub_", "generated/torch.Tensor.subtract", "generated/torch.Tensor.subtract_", "generated/torch.Tensor.sum", "generated/torch.Tensor.sum_to_size", "generated/torch.Tensor.svd", "generated/torch.Tensor.swapaxes", "generated/torch.Tensor.swapdims", "generated/torch.Tensor.t", "generated/torch.Tensor.t_", "generated/torch.Tensor.take", "generated/torch.Tensor.take_along_dim", "generated/torch.Tensor.tan", "generated/torch.Tensor.tan_", "generated/torch.Tensor.tanh", "generated/torch.Tensor.tanh_", "generated/torch.Tensor.tensor_split", "generated/torch.Tensor.tile", "generated/torch.Tensor.to", "generated/torch.Tensor.to_dense", "generated/torch.Tensor.to_mkldnn", "generated/torch.Tensor.to_sparse", "generated/torch.Tensor.to_sparse_bsc", "generated/torch.Tensor.to_sparse_bsr", "generated/torch.Tensor.to_sparse_coo", "generated/torch.Tensor.to_sparse_csc", "generated/torch.Tensor.to_sparse_csr", "generated/torch.Tensor.tolist", "generated/torch.Tensor.topk", "generated/torch.Tensor.trace", "generated/torch.Tensor.transpose", "generated/torch.Tensor.transpose_", "generated/torch.Tensor.triangular_solve", "generated/torch.Tensor.tril", "generated/torch.Tensor.tril_", "generated/torch.Tensor.triu", "generated/torch.Tensor.triu_", "generated/torch.Tensor.true_divide", "generated/torch.Tensor.true_divide_", "generated/torch.Tensor.trunc", "generated/torch.Tensor.trunc_", "generated/torch.Tensor.type", "generated/torch.Tensor.type_as", "generated/torch.Tensor.unbind", "generated/torch.Tensor.unflatten", "generated/torch.Tensor.unfold", "generated/torch.Tensor.uniform_", "generated/torch.Tensor.unique", "generated/torch.Tensor.unique_consecutive", "generated/torch.Tensor.unsqueeze", "generated/torch.Tensor.unsqueeze_", "generated/torch.Tensor.untyped_storage", "generated/torch.Tensor.values", "generated/torch.Tensor.var", "generated/torch.Tensor.vdot", "generated/torch.Tensor.view", "generated/torch.Tensor.view_as", "generated/torch.Tensor.vsplit", "generated/torch.Tensor.where", "generated/torch.Tensor.xlogy", "generated/torch.Tensor.xlogy_", "generated/torch.Tensor.xpu", "generated/torch.Tensor.zero_", "generated/torch._assert", "generated/torch._foreach_abs", "generated/torch._foreach_abs_", "generated/torch._foreach_acos", "generated/torch._foreach_acos_", "generated/torch._foreach_asin", "generated/torch._foreach_asin_", "generated/torch._foreach_atan", "generated/torch._foreach_atan_", "generated/torch._foreach_ceil", "generated/torch._foreach_ceil_", "generated/torch._foreach_cos", "generated/torch._foreach_cos_", "generated/torch._foreach_cosh", "generated/torch._foreach_cosh_", "generated/torch._foreach_erf", "generated/torch._foreach_erf_", "generated/torch._foreach_erfc", "generated/torch._foreach_erfc_", "generated/torch._foreach_exp", "generated/torch._foreach_exp_", "generated/torch._foreach_expm1", "generated/torch._foreach_expm1_", "generated/torch._foreach_floor", "generated/torch._foreach_floor_", "generated/torch._foreach_frac", "generated/torch._foreach_frac_", "generated/torch._foreach_lgamma", "generated/torch._foreach_lgamma_", "generated/torch._foreach_log", "generated/torch._foreach_log10", "generated/torch._foreach_log10_", "generated/torch._foreach_log1p", "generated/torch._foreach_log1p_", "generated/torch._foreach_log2", "generated/torch._foreach_log2_", "generated/torch._foreach_log_", "generated/torch._foreach_neg", "generated/torch._foreach_neg_", "generated/torch._foreach_reciprocal", "generated/torch._foreach_reciprocal_", "generated/torch._foreach_round", "generated/torch._foreach_round_", "generated/torch._foreach_sigmoid", "generated/torch._foreach_sigmoid_", "generated/torch._foreach_sin", "generated/torch._foreach_sin_", "generated/torch._foreach_sinh", "generated/torch._foreach_sinh_", "generated/torch._foreach_sqrt", "generated/torch._foreach_sqrt_", "generated/torch._foreach_tan", "generated/torch._foreach_tan_", "generated/torch._foreach_trunc", "generated/torch._foreach_trunc_", "generated/torch._foreach_zero_", "generated/torch._logging.set_logs", "generated/torch.abs", "generated/torch.absolute", "generated/torch.acos", "generated/torch.acosh", "generated/torch.add", "generated/torch.addbmm", "generated/torch.addcdiv", "generated/torch.addcmul", "generated/torch.addmm", "generated/torch.addmv", "generated/torch.addr", "generated/torch.adjoint", "generated/torch.all", "generated/torch.allclose", "generated/torch.amax", "generated/torch.amin", "generated/torch.aminmax", "generated/torch.angle", "generated/torch.any", "generated/torch.ao.nn.intrinsic.BNReLU2d", "generated/torch.ao.nn.intrinsic.BNReLU3d", "generated/torch.ao.nn.intrinsic.ConvBn1d", "generated/torch.ao.nn.intrinsic.ConvBn2d", "generated/torch.ao.nn.intrinsic.ConvBn3d", "generated/torch.ao.nn.intrinsic.ConvBnReLU1d", "generated/torch.ao.nn.intrinsic.ConvBnReLU2d", "generated/torch.ao.nn.intrinsic.ConvBnReLU3d", "generated/torch.ao.nn.intrinsic.ConvReLU1d", "generated/torch.ao.nn.intrinsic.ConvReLU2d", "generated/torch.ao.nn.intrinsic.ConvReLU3d", "generated/torch.ao.nn.intrinsic.LinearReLU", "generated/torch.ao.nn.intrinsic.qat.ConvBn1d", "generated/torch.ao.nn.intrinsic.qat.ConvBn2d", "generated/torch.ao.nn.intrinsic.qat.ConvBn3d", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU1d", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU2d", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU3d", "generated/torch.ao.nn.intrinsic.qat.ConvReLU2d", "generated/torch.ao.nn.intrinsic.qat.ConvReLU3d", "generated/torch.ao.nn.intrinsic.qat.LinearReLU", "generated/torch.ao.nn.intrinsic.qat.freeze_bn_stats", "generated/torch.ao.nn.intrinsic.qat.update_bn_stats", "generated/torch.ao.nn.intrinsic.quantized.BNReLU2d", "generated/torch.ao.nn.intrinsic.quantized.BNReLU3d", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU1d", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU2d", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU3d", "generated/torch.ao.nn.intrinsic.quantized.LinearReLU", "generated/torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU", "generated/torch.ao.nn.qat.Conv2d", "generated/torch.ao.nn.qat.Conv3d", "generated/torch.ao.nn.qat.Linear", "generated/torch.ao.nn.qat.dynamic.Linear", "generated/torch.ao.nn.quantizable.LSTM", "generated/torch.ao.nn.quantizable.MultiheadAttention", "generated/torch.ao.nn.quantized.BatchNorm2d", "generated/torch.ao.nn.quantized.BatchNorm3d", "generated/torch.ao.nn.quantized.Conv1d", "generated/torch.ao.nn.quantized.Conv2d", "generated/torch.ao.nn.quantized.Conv3d", "generated/torch.ao.nn.quantized.ConvTranspose1d", "generated/torch.ao.nn.quantized.ConvTranspose2d", "generated/torch.ao.nn.quantized.ConvTranspose3d", "generated/torch.ao.nn.quantized.ELU", "generated/torch.ao.nn.quantized.Embedding", "generated/torch.ao.nn.quantized.EmbeddingBag", "generated/torch.ao.nn.quantized.FXFloatFunctional", "generated/torch.ao.nn.quantized.FloatFunctional", "generated/torch.ao.nn.quantized.GroupNorm", "generated/torch.ao.nn.quantized.Hardswish", "generated/torch.ao.nn.quantized.InstanceNorm1d", "generated/torch.ao.nn.quantized.InstanceNorm2d", "generated/torch.ao.nn.quantized.InstanceNorm3d", "generated/torch.ao.nn.quantized.LayerNorm", "generated/torch.ao.nn.quantized.LeakyReLU", "generated/torch.ao.nn.quantized.Linear", "generated/torch.ao.nn.quantized.QFunctional", "generated/torch.ao.nn.quantized.ReLU6", "generated/torch.ao.nn.quantized.Sigmoid", "generated/torch.ao.nn.quantized.dynamic.GRU", "generated/torch.ao.nn.quantized.dynamic.GRUCell", "generated/torch.ao.nn.quantized.dynamic.LSTM", "generated/torch.ao.nn.quantized.dynamic.LSTMCell", "generated/torch.ao.nn.quantized.dynamic.Linear", "generated/torch.ao.nn.quantized.dynamic.RNNCell", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool2d", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool3d", "generated/torch.ao.nn.quantized.functional.avg_pool2d", "generated/torch.ao.nn.quantized.functional.avg_pool3d", "generated/torch.ao.nn.quantized.functional.celu", "generated/torch.ao.nn.quantized.functional.clamp", "generated/torch.ao.nn.quantized.functional.conv1d", "generated/torch.ao.nn.quantized.functional.conv2d", "generated/torch.ao.nn.quantized.functional.conv3d", "generated/torch.ao.nn.quantized.functional.elu", "generated/torch.ao.nn.quantized.functional.hardsigmoid", "generated/torch.ao.nn.quantized.functional.hardswish", "generated/torch.ao.nn.quantized.functional.hardtanh", "generated/torch.ao.nn.quantized.functional.interpolate", "generated/torch.ao.nn.quantized.functional.leaky_relu", "generated/torch.ao.nn.quantized.functional.linear", "generated/torch.ao.nn.quantized.functional.max_pool1d", "generated/torch.ao.nn.quantized.functional.max_pool2d", "generated/torch.ao.nn.quantized.functional.threshold", "generated/torch.ao.nn.quantized.functional.upsample", "generated/torch.ao.nn.quantized.functional.upsample_bilinear", "generated/torch.ao.nn.quantized.functional.upsample_nearest", "generated/torch.ao.quantization.DeQuantStub", "generated/torch.ao.quantization.QuantStub", "generated/torch.ao.quantization.QuantWrapper", "generated/torch.ao.quantization.add_quant_dequant", "generated/torch.ao.quantization.backend_config.BackendConfig", "generated/torch.ao.quantization.backend_config.BackendPatternConfig", "generated/torch.ao.quantization.backend_config.DTypeConfig", "generated/torch.ao.quantization.backend_config.DTypeWithConstraints", "generated/torch.ao.quantization.backend_config.ObservationType", "generated/torch.ao.quantization.convert", "generated/torch.ao.quantization.default_eval_fn", "generated/torch.ao.quantization.fake_quantize.FakeQuantize", "generated/torch.ao.quantization.fake_quantize.FakeQuantizeBase", "generated/torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize", "generated/torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize", "generated/torch.ao.quantization.fake_quantize.default_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_fused_act_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_histogram_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant", "generated/torch.ao.quantization.fake_quantize.default_weight_fake_quant", "generated/torch.ao.quantization.fake_quantize.disable_fake_quant", "generated/torch.ao.quantization.fake_quantize.disable_observer", "generated/torch.ao.quantization.fake_quantize.enable_fake_quant", "generated/torch.ao.quantization.fake_quantize.enable_observer", "generated/torch.ao.quantization.fuse_modules.fuse_modules", "generated/torch.ao.quantization.fx.custom_config.ConvertCustomConfig", "generated/torch.ao.quantization.fx.custom_config.FuseCustomConfig", "generated/torch.ao.quantization.fx.custom_config.PrepareCustomConfig", "generated/torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry", "generated/torch.ao.quantization.observer.HistogramObserver", "generated/torch.ao.quantization.observer.MinMaxObserver", "generated/torch.ao.quantization.observer.MovingAverageMinMaxObserver", "generated/torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver", "generated/torch.ao.quantization.observer.NoopObserver", "generated/torch.ao.quantization.observer.ObserverBase", "generated/torch.ao.quantization.observer.PerChannelMinMaxObserver", "generated/torch.ao.quantization.observer.PlaceholderObserver", "generated/torch.ao.quantization.observer.RecordingObserver", "generated/torch.ao.quantization.observer.default_debug_observer", "generated/torch.ao.quantization.observer.default_dynamic_quant_observer", "generated/torch.ao.quantization.observer.default_float_qparams_observer", "generated/torch.ao.quantization.observer.default_histogram_observer", "generated/torch.ao.quantization.observer.default_observer", "generated/torch.ao.quantization.observer.default_per_channel_weight_observer", "generated/torch.ao.quantization.observer.default_placeholder_observer", "generated/torch.ao.quantization.observer.default_weight_observer", "generated/torch.ao.quantization.observer.get_observer_state_dict", "generated/torch.ao.quantization.observer.load_observer_state_dict", "generated/torch.ao.quantization.prepare", "generated/torch.ao.quantization.prepare_qat", "generated/torch.ao.quantization.propagate_qconfig_", "generated/torch.ao.quantization.pt2e.export_utils.model_is_exported", "generated/torch.ao.quantization.qconfig.QConfig", "generated/torch.ao.quantization.qconfig.default_activation_only_qconfig", "generated/torch.ao.quantization.qconfig.default_debug_qconfig", "generated/torch.ao.quantization.qconfig.default_dynamic_qconfig", "generated/torch.ao.quantization.qconfig.default_per_channel_qconfig", "generated/torch.ao.quantization.qconfig.default_qat_qconfig", "generated/torch.ao.quantization.qconfig.default_qat_qconfig_v2", "generated/torch.ao.quantization.qconfig.default_qconfig", "generated/torch.ao.quantization.qconfig.default_weight_only_qconfig", "generated/torch.ao.quantization.qconfig.float16_dynamic_qconfig", "generated/torch.ao.quantization.qconfig.float16_static_qconfig", "generated/torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig", "generated/torch.ao.quantization.qconfig.per_channel_dynamic_qconfig", "generated/torch.ao.quantization.qconfig_mapping.QConfigMapping", "generated/torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping", "generated/torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping", "generated/torch.ao.quantization.quantize", "generated/torch.ao.quantization.quantize_dynamic", "generated/torch.ao.quantization.quantize_fx.convert_fx", "generated/torch.ao.quantization.quantize_fx.fuse_fx", "generated/torch.ao.quantization.quantize_fx.prepare_fx", "generated/torch.ao.quantization.quantize_fx.prepare_qat_fx", "generated/torch.ao.quantization.quantize_qat", "generated/torch.ao.quantization.swap_module", "generated/torch.arange", "generated/torch.arccos", "generated/torch.arccosh", "generated/torch.arcsin", "generated/torch.arcsinh", "generated/torch.arctan", "generated/torch.arctan2", "generated/torch.arctanh", "generated/torch.are_deterministic_algorithms_enabled", "generated/torch.argmax", "generated/torch.argmin", "generated/torch.argsort", "generated/torch.argwhere", "generated/torch.as_strided", "generated/torch.as_tensor", "generated/torch.asarray", "generated/torch.asin", "generated/torch.asinh", "generated/torch.atan", "generated/torch.atan2", "generated/torch.atanh", "generated/torch.atleast_1d", "generated/torch.atleast_2d", "generated/torch.atleast_3d", "generated/torch.autograd.Function.backward", "generated/torch.autograd.Function.forward", "generated/torch.autograd.Function.jvp", "generated/torch.autograd.Function.vmap", "generated/torch.autograd.backward", "generated/torch.autograd.forward_ad.UnpackedDualTensor", "generated/torch.autograd.forward_ad.dual_level", "generated/torch.autograd.forward_ad.enter_dual_level", "generated/torch.autograd.forward_ad.exit_dual_level", "generated/torch.autograd.forward_ad.make_dual", "generated/torch.autograd.forward_ad.unpack_dual", "generated/torch.autograd.function.BackwardCFunction", "generated/torch.autograd.function.FunctionCtx.mark_dirty", "generated/torch.autograd.function.FunctionCtx.mark_non_differentiable", "generated/torch.autograd.function.FunctionCtx.save_for_backward", "generated/torch.autograd.function.FunctionCtx.set_materialize_grads", "generated/torch.autograd.function.InplaceFunction", "generated/torch.autograd.function.NestedIOFunction", "generated/torch.autograd.function.once_differentiable", "generated/torch.autograd.functional.hessian", "generated/torch.autograd.functional.hvp", "generated/torch.autograd.functional.jacobian", "generated/torch.autograd.functional.jvp", "generated/torch.autograd.functional.vhp", "generated/torch.autograd.functional.vjp", "generated/torch.autograd.grad", "generated/torch.autograd.grad_mode.inference_mode", "generated/torch.autograd.grad_mode.set_grad_enabled", "generated/torch.autograd.grad_mode.set_multithreading_enabled", "generated/torch.autograd.gradcheck.GradcheckError", "generated/torch.autograd.gradcheck.gradcheck", "generated/torch.autograd.gradcheck.gradgradcheck", "generated/torch.autograd.graph.Node.metadata", "generated/torch.autograd.graph.Node.name", "generated/torch.autograd.graph.Node.next_functions", "generated/torch.autograd.graph.Node.register_hook", "generated/torch.autograd.graph.Node.register_prehook", "generated/torch.autograd.graph.increment_version", "generated/torch.autograd.profiler.EnforceUnique", "generated/torch.autograd.profiler.KinetoStepTracker", "generated/torch.autograd.profiler.load_nvprof", "generated/torch.autograd.profiler.parse_nvprof_trace", "generated/torch.autograd.profiler.profile.export_chrome_trace", "generated/torch.autograd.profiler.profile.key_averages", "generated/torch.autograd.profiler.profile.self_cpu_time_total", "generated/torch.autograd.profiler.profile.total_average", "generated/torch.autograd.profiler.record_function", "generated/torch.autograd.profiler_util.Interval", "generated/torch.autograd.profiler_util.Kernel", "generated/torch.autograd.profiler_util.MemRecordsAcc", "generated/torch.autograd.profiler_util.StringTable", "generated/torch.baddbmm", "generated/torch.bartlett_window", "generated/torch.bernoulli", "generated/torch.bincount", "generated/torch.bitwise_and", "generated/torch.bitwise_left_shift", "generated/torch.bitwise_not", "generated/torch.bitwise_or", "generated/torch.bitwise_right_shift", "generated/torch.bitwise_xor", "generated/torch.blackman_window", "generated/torch.block_diag", "generated/torch.bmm", "generated/torch.broadcast_shapes", "generated/torch.broadcast_tensors", "generated/torch.broadcast_to", "generated/torch.bucketize", "generated/torch.can_cast", "generated/torch.cartesian_prod", "generated/torch.cat", "generated/torch.cdist", "generated/torch.ceil", "generated/torch.chain_matmul", "generated/torch.cholesky", "generated/torch.cholesky_inverse", "generated/torch.cholesky_solve", "generated/torch.chunk", "generated/torch.clamp", "generated/torch.clip", "generated/torch.clone", "generated/torch.column_stack", "generated/torch.combinations", "generated/torch.compile", "generated/torch.compiled_with_cxx11_abi", "generated/torch.compiler.allow_in_graph", "generated/torch.compiler.assume_constant_result", "generated/torch.compiler.compile", "generated/torch.compiler.cudagraph_mark_step_begin", "generated/torch.compiler.disable", "generated/torch.compiler.is_compiling", "generated/torch.compiler.is_dynamo_compiling", "generated/torch.compiler.list_backends", "generated/torch.compiler.reset", "generated/torch.complex", "generated/torch.concat", "generated/torch.concatenate", "generated/torch.cond", "generated/torch.conj", "generated/torch.conj_physical", "generated/torch.copysign", "generated/torch.corrcoef", "generated/torch.cos", "generated/torch.cosh", "generated/torch.count_nonzero", "generated/torch.cov", "generated/torch.cpu.Stream", "generated/torch.cpu.StreamContext", "generated/torch.cpu.current_device", "generated/torch.cpu.current_stream", "generated/torch.cpu.device_count", "generated/torch.cpu.is_available", "generated/torch.cpu.set_device", "generated/torch.cpu.stream", "generated/torch.cpu.synchronize", "generated/torch.cross", "generated/torch.cuda.CUDAGraph", "generated/torch.cuda.CUDAPluggableAllocator", "generated/torch.cuda.Event", "generated/torch.cuda.ExternalStream", "generated/torch.cuda.OutOfMemoryError", "generated/torch.cuda.Stream", "generated/torch.cuda.StreamContext", "generated/torch.cuda.caching_allocator_alloc", "generated/torch.cuda.caching_allocator_delete", "generated/torch.cuda.can_device_access_peer", "generated/torch.cuda.change_current_allocator", "generated/torch.cuda.clock_rate", "generated/torch.cuda.comm.broadcast", "generated/torch.cuda.comm.broadcast_coalesced", "generated/torch.cuda.comm.gather", "generated/torch.cuda.comm.reduce_add", "generated/torch.cuda.comm.scatter", "generated/torch.cuda.current_blas_handle", "generated/torch.cuda.current_device", "generated/torch.cuda.current_stream", "generated/torch.cuda.default_stream", "generated/torch.cuda.device", "generated/torch.cuda.device_count", "generated/torch.cuda.device_of", "generated/torch.cuda.empty_cache", "generated/torch.cuda.get_allocator_backend", "generated/torch.cuda.get_arch_list", "generated/torch.cuda.get_device_capability", "generated/torch.cuda.get_device_name", "generated/torch.cuda.get_device_properties", "generated/torch.cuda.get_gencode_flags", "generated/torch.cuda.get_rng_state", "generated/torch.cuda.get_rng_state_all", "generated/torch.cuda.get_sync_debug_mode", "generated/torch.cuda.graph", "generated/torch.cuda.graph_pool_handle", "generated/torch.cuda.init", "generated/torch.cuda.initial_seed", "generated/torch.cuda.ipc_collect", "generated/torch.cuda.is_available", "generated/torch.cuda.is_current_stream_capturing", "generated/torch.cuda.is_initialized", "generated/torch.cuda.jiterator._create_jit_fn", "generated/torch.cuda.jiterator._create_multi_output_jit_fn", "generated/torch.cuda.list_gpu_processes", "generated/torch.cuda.make_graphed_callables", "generated/torch.cuda.manual_seed", "generated/torch.cuda.manual_seed_all", "generated/torch.cuda.max_memory_allocated", "generated/torch.cuda.max_memory_cached", "generated/torch.cuda.max_memory_reserved", "generated/torch.cuda.mem_get_info", "generated/torch.cuda.memory_allocated", "generated/torch.cuda.memory_cached", "generated/torch.cuda.memory_reserved", "generated/torch.cuda.memory_snapshot", "generated/torch.cuda.memory_stats", "generated/torch.cuda.memory_summary", "generated/torch.cuda.memory_usage", "generated/torch.cuda.nvtx.mark", "generated/torch.cuda.nvtx.range", "generated/torch.cuda.nvtx.range_pop", "generated/torch.cuda.nvtx.range_push", "generated/torch.cuda.power_draw", "generated/torch.cuda.reset_max_memory_allocated", "generated/torch.cuda.reset_max_memory_cached", "generated/torch.cuda.reset_peak_memory_stats", "generated/torch.cuda.seed", "generated/torch.cuda.seed_all", "generated/torch.cuda.set_device", "generated/torch.cuda.set_per_process_memory_fraction", "generated/torch.cuda.set_rng_state", "generated/torch.cuda.set_rng_state_all", "generated/torch.cuda.set_stream", "generated/torch.cuda.set_sync_debug_mode", "generated/torch.cuda.stream", "generated/torch.cuda.synchronize", "generated/torch.cuda.temperature", "generated/torch.cuda.utilization", "generated/torch.cummax", "generated/torch.cummin", "generated/torch.cumprod", "generated/torch.cumsum", "generated/torch.cumulative_trapezoid", "generated/torch.deg2rad", "generated/torch.dequantize", "generated/torch.det", "generated/torch.diag", "generated/torch.diag_embed", "generated/torch.diagflat", "generated/torch.diagonal", "generated/torch.diagonal_scatter", "generated/torch.diff", "generated/torch.digamma", "generated/torch.dist", "generated/torch.div", "generated/torch.divide", "generated/torch.dot", "generated/torch.dsplit", "generated/torch.dstack", "generated/torch.einsum", "generated/torch.empty", "generated/torch.empty_like", "generated/torch.empty_strided", "generated/torch.enable_grad", "generated/torch.eq", "generated/torch.equal", "generated/torch.erf", "generated/torch.erfc", "generated/torch.erfinv", "generated/torch.exp", "generated/torch.exp2", "generated/torch.expm1", "generated/torch.eye", "generated/torch.fake_quantize_per_channel_affine", "generated/torch.fake_quantize_per_tensor_affine", "generated/torch.fft.fft", "generated/torch.fft.fft2", "generated/torch.fft.fftfreq", "generated/torch.fft.fftn", "generated/torch.fft.fftshift", "generated/torch.fft.hfft", "generated/torch.fft.hfft2", "generated/torch.fft.hfftn", "generated/torch.fft.ifft", "generated/torch.fft.ifft2", "generated/torch.fft.ifftn", "generated/torch.fft.ifftshift", "generated/torch.fft.ihfft", "generated/torch.fft.ihfft2", "generated/torch.fft.ihfftn", "generated/torch.fft.irfft", "generated/torch.fft.irfft2", "generated/torch.fft.irfftn", "generated/torch.fft.rfft", "generated/torch.fft.rfft2", "generated/torch.fft.rfftfreq", "generated/torch.fft.rfftn", "generated/torch.fix", "generated/torch.flatten", "generated/torch.flip", "generated/torch.fliplr", "generated/torch.flipud", "generated/torch.float_power", "generated/torch.floor", "generated/torch.floor_divide", "generated/torch.fmax", "generated/torch.fmin", "generated/torch.fmod", "generated/torch.frac", "generated/torch.frexp", "generated/torch.from_dlpack", "generated/torch.from_file", "generated/torch.from_numpy", "generated/torch.frombuffer", "generated/torch.full", "generated/torch.full_like", "generated/torch.func.functional_call", "generated/torch.func.functionalize", "generated/torch.func.grad", "generated/torch.func.grad_and_value", "generated/torch.func.hessian", "generated/torch.func.jacfwd", "generated/torch.func.jacrev", "generated/torch.func.jvp", "generated/torch.func.linearize", "generated/torch.func.replace_all_batch_norm_modules_", "generated/torch.func.stack_module_state", "generated/torch.func.vjp", "generated/torch.func.vmap", "generated/torch.fx.experimental.symbolic_shapes.CallMethodKey", "generated/torch.fx.experimental.symbolic_shapes.ConvertIntKey", "generated/torch.fx.experimental.symbolic_shapes.DimConstraints", "generated/torch.fx.experimental.symbolic_shapes.DimDynamic", "generated/torch.fx.experimental.symbolic_shapes.DivideByKey", "generated/torch.fx.experimental.symbolic_shapes.EqualityConstraint", "generated/torch.fx.experimental.symbolic_shapes.InnerTensorKey", "generated/torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts", "generated/torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnv", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnvSettings", "generated/torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint", "generated/torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.SymbolicContext", "generated/torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr", "generated/torch.fx.experimental.symbolic_shapes.check_consistent", "generated/torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings", "generated/torch.fx.experimental.symbolic_shapes.constrain_range", "generated/torch.fx.experimental.symbolic_shapes.constrain_unify", "generated/torch.fx.experimental.symbolic_shapes.definitely_false", "generated/torch.fx.experimental.symbolic_shapes.definitely_true", "generated/torch.fx.experimental.symbolic_shapes.guard_size_oblivious", "generated/torch.fx.experimental.symbolic_shapes.has_free_symbols", "generated/torch.fx.experimental.symbolic_shapes.hint_int", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_bool", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_int", "generated/torch.fx.experimental.symbolic_shapes.lru_cache", "generated/torch.fx.experimental.symbolic_shapes.parallel_and", "generated/torch.fx.experimental.symbolic_shapes.parallel_or", "generated/torch.fx.experimental.symbolic_shapes.rebind_unbacked", "generated/torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings", "generated/torch.fx.experimental.symbolic_shapes.statically_known_true", "generated/torch.fx.experimental.symbolic_shapes.sym_eq", "generated/torch.gather", "generated/torch.gcd", "generated/torch.ge", "generated/torch.geqrf", "generated/torch.ger", "generated/torch.get_default_device", "generated/torch.get_default_dtype", "generated/torch.get_deterministic_debug_mode", "generated/torch.get_device_module", "generated/torch.get_float32_matmul_precision", "generated/torch.get_num_interop_threads", "generated/torch.get_num_threads", "generated/torch.get_rng_state", "generated/torch.gradient", "generated/torch.greater", "generated/torch.greater_equal", "generated/torch.gt", "generated/torch.hamming_window", "generated/torch.hann_window", "generated/torch.heaviside", "generated/torch.histc", "generated/torch.histogram", "generated/torch.histogramdd", "generated/torch.hsplit", "generated/torch.hspmm", "generated/torch.hstack", "generated/torch.hypot", "generated/torch.i0", "generated/torch.igamma", "generated/torch.igammac", "generated/torch.imag", "generated/torch.index_add", "generated/torch.index_copy", "generated/torch.index_reduce", "generated/torch.index_select", "generated/torch.initial_seed", "generated/torch.inner", "generated/torch.inverse", "generated/torch.is_complex", "generated/torch.is_conj", "generated/torch.is_deterministic_algorithms_warn_only_enabled", "generated/torch.is_floating_point", "generated/torch.is_grad_enabled", "generated/torch.is_inference_mode_enabled", "generated/torch.is_nonzero", "generated/torch.is_storage", "generated/torch.is_tensor", "generated/torch.is_warn_always_enabled", "generated/torch.isclose", "generated/torch.isfinite", "generated/torch.isin", "generated/torch.isinf", "generated/torch.isnan", "generated/torch.isneginf", "generated/torch.isposinf", "generated/torch.isreal", "generated/torch.istft", "generated/torch.jit.Attribute", "generated/torch.jit.ScriptFunction", "generated/torch.jit.ScriptModule", "generated/torch.jit.annotate", "generated/torch.jit.enable_onednn_fusion", "generated/torch.jit.fork", "generated/torch.jit.freeze", "generated/torch.jit.ignore", "generated/torch.jit.interface", "generated/torch.jit.isinstance", "generated/torch.jit.load", "generated/torch.jit.onednn_fusion_enabled", "generated/torch.jit.optimize_for_inference", "generated/torch.jit.save", "generated/torch.jit.script", "generated/torch.jit.script_if_tracing", "generated/torch.jit.set_fusion_strategy", "generated/torch.jit.strict_fusion", "generated/torch.jit.trace", "generated/torch.jit.trace_module", "generated/torch.jit.unused", "generated/torch.jit.wait", "generated/torch.kaiser_window", "generated/torch.kron", "generated/torch.kthvalue", "generated/torch.lcm", "generated/torch.ldexp", "generated/torch.le", "generated/torch.lerp", "generated/torch.less", "generated/torch.less_equal", "generated/torch.lgamma", "generated/torch.linalg.cholesky", "generated/torch.linalg.cholesky_ex", "generated/torch.linalg.cond", "generated/torch.linalg.cross", "generated/torch.linalg.det", "generated/torch.linalg.diagonal", "generated/torch.linalg.eig", "generated/torch.linalg.eigh", "generated/torch.linalg.eigvals", "generated/torch.linalg.eigvalsh", "generated/torch.linalg.householder_product", "generated/torch.linalg.inv", "generated/torch.linalg.inv_ex", "generated/torch.linalg.ldl_factor", "generated/torch.linalg.ldl_factor_ex", "generated/torch.linalg.ldl_solve", "generated/torch.linalg.lstsq", "generated/torch.linalg.lu", "generated/torch.linalg.lu_factor", "generated/torch.linalg.lu_factor_ex", "generated/torch.linalg.lu_solve", "generated/torch.linalg.matmul", "generated/torch.linalg.matrix_exp", "generated/torch.linalg.matrix_norm", "generated/torch.linalg.matrix_power", "generated/torch.linalg.matrix_rank", "generated/torch.linalg.multi_dot", "generated/torch.linalg.norm", "generated/torch.linalg.pinv", "generated/torch.linalg.qr", "generated/torch.linalg.slogdet", "generated/torch.linalg.solve", "generated/torch.linalg.solve_ex", "generated/torch.linalg.solve_triangular", "generated/torch.linalg.svd", "generated/torch.linalg.svdvals", "generated/torch.linalg.tensorinv", "generated/torch.linalg.tensorsolve", "generated/torch.linalg.vander", "generated/torch.linalg.vecdot", "generated/torch.linalg.vector_norm", "generated/torch.linspace", "generated/torch.load", "generated/torch.lobpcg", "generated/torch.log", "generated/torch.log10", "generated/torch.log1p", "generated/torch.log2", "generated/torch.logaddexp", "generated/torch.logaddexp2", "generated/torch.logcumsumexp", "generated/torch.logdet", "generated/torch.logical_and", "generated/torch.logical_not", "generated/torch.logical_or", "generated/torch.logical_xor", "generated/torch.logit", "generated/torch.logspace", "generated/torch.logsumexp", "generated/torch.lt", "generated/torch.lu", "generated/torch.lu_solve", "generated/torch.lu_unpack", "generated/torch.manual_seed", "generated/torch.masked_select", "generated/torch.matmul", "generated/torch.matrix_exp", "generated/torch.matrix_power", "generated/torch.max", "generated/torch.maximum", "generated/torch.mean", "generated/torch.median", "generated/torch.meshgrid", "generated/torch.min", "generated/torch.minimum", "generated/torch.mm", "generated/torch.mode", "generated/torch.moveaxis", "generated/torch.movedim", "generated/torch.mps.current_allocated_memory", "generated/torch.mps.device_count", "generated/torch.mps.driver_allocated_memory", "generated/torch.mps.empty_cache", "generated/torch.mps.event.Event", "generated/torch.mps.get_rng_state", "generated/torch.mps.manual_seed", "generated/torch.mps.profiler.profile", "generated/torch.mps.profiler.start", "generated/torch.mps.profiler.stop", "generated/torch.mps.seed", "generated/torch.mps.set_per_process_memory_fraction", "generated/torch.mps.set_rng_state", "generated/torch.mps.synchronize", "generated/torch.msort", "generated/torch.mtia.DeferredMtiaCallError", "generated/torch.mtia.Event", "generated/torch.mtia.Stream", "generated/torch.mtia.StreamContext", "generated/torch.mtia.current_device", "generated/torch.mtia.current_stream", "generated/torch.mtia.default_stream", "generated/torch.mtia.device", "generated/torch.mtia.device_count", "generated/torch.mtia.init", "generated/torch.mtia.is_available", "generated/torch.mtia.is_initialized", "generated/torch.mtia.set_stream", "generated/torch.mtia.stream", "generated/torch.mtia.synchronize", "generated/torch.mul", "generated/torch.multinomial", "generated/torch.multiply", "generated/torch.mv", "generated/torch.mvlgamma", "generated/torch.nan_to_num", "generated/torch.nanmean", "generated/torch.nanmedian", "generated/torch.nanquantile", "generated/torch.nansum", "generated/torch.narrow", "generated/torch.narrow_copy", "generated/torch.ne", "generated/torch.neg", "generated/torch.negative", "generated/torch.nextafter", "generated/torch.nn.AdaptiveAvgPool1d", "generated/torch.nn.AdaptiveAvgPool2d", "generated/torch.nn.AdaptiveAvgPool3d", "generated/torch.nn.AdaptiveLogSoftmaxWithLoss", "generated/torch.nn.AdaptiveMaxPool1d", "generated/torch.nn.AdaptiveMaxPool2d", "generated/torch.nn.AdaptiveMaxPool3d", "generated/torch.nn.AlphaDropout", "generated/torch.nn.AvgPool1d", "generated/torch.nn.AvgPool2d", "generated/torch.nn.AvgPool3d", "generated/torch.nn.BCELoss", "generated/torch.nn.BCEWithLogitsLoss", "generated/torch.nn.BatchNorm1d", "generated/torch.nn.BatchNorm2d", "generated/torch.nn.BatchNorm3d", "generated/torch.nn.Bilinear", "generated/torch.nn.CELU", "generated/torch.nn.CTCLoss", "generated/torch.nn.ChannelShuffle", "generated/torch.nn.CircularPad1d", "generated/torch.nn.CircularPad2d", "generated/torch.nn.CircularPad3d", "generated/torch.nn.ConstantPad1d", "generated/torch.nn.ConstantPad2d", "generated/torch.nn.ConstantPad3d", "generated/torch.nn.Conv1d", "generated/torch.nn.Conv2d", "generated/torch.nn.Conv3d", "generated/torch.nn.ConvTranspose1d", "generated/torch.nn.ConvTranspose2d", "generated/torch.nn.ConvTranspose3d", "generated/torch.nn.CosineEmbeddingLoss", "generated/torch.nn.CosineSimilarity", "generated/torch.nn.CrossEntropyLoss", "generated/torch.nn.DataParallel", "generated/torch.nn.Dropout", "generated/torch.nn.Dropout1d", "generated/torch.nn.Dropout2d", "generated/torch.nn.Dropout3d", "generated/torch.nn.ELU", "generated/torch.nn.Embedding", "generated/torch.nn.EmbeddingBag", "generated/torch.nn.FeatureAlphaDropout", "generated/torch.nn.Flatten", "generated/torch.nn.Fold", "generated/torch.nn.FractionalMaxPool2d", "generated/torch.nn.FractionalMaxPool3d", "generated/torch.nn.GELU", "generated/torch.nn.GLU", "generated/torch.nn.GRU", "generated/torch.nn.GRUCell", "generated/torch.nn.GaussianNLLLoss", "generated/torch.nn.GroupNorm", "generated/torch.nn.Hardshrink", "generated/torch.nn.Hardsigmoid", "generated/torch.nn.Hardswish", "generated/torch.nn.Hardtanh", "generated/torch.nn.HingeEmbeddingLoss", "generated/torch.nn.HuberLoss", "generated/torch.nn.Identity", "generated/torch.nn.InstanceNorm1d", "generated/torch.nn.InstanceNorm2d", "generated/torch.nn.InstanceNorm3d", "generated/torch.nn.KLDivLoss", "generated/torch.nn.L1Loss", "generated/torch.nn.LPPool1d", "generated/torch.nn.LPPool2d", "generated/torch.nn.LPPool3d", "generated/torch.nn.LSTM", "generated/torch.nn.LSTMCell", "generated/torch.nn.LayerNorm", "generated/torch.nn.LazyBatchNorm1d", "generated/torch.nn.LazyBatchNorm2d", "generated/torch.nn.LazyBatchNorm3d", "generated/torch.nn.LazyConv1d", "generated/torch.nn.LazyConv2d", "generated/torch.nn.LazyConv3d", "generated/torch.nn.LazyConvTranspose1d", "generated/torch.nn.LazyConvTranspose2d", "generated/torch.nn.LazyConvTranspose3d", "generated/torch.nn.LazyInstanceNorm1d", "generated/torch.nn.LazyInstanceNorm2d", "generated/torch.nn.LazyInstanceNorm3d", "generated/torch.nn.LazyLinear", "generated/torch.nn.LeakyReLU", "generated/torch.nn.Linear", "generated/torch.nn.LocalResponseNorm", "generated/torch.nn.LogSigmoid", "generated/torch.nn.LogSoftmax", "generated/torch.nn.MSELoss", "generated/torch.nn.MarginRankingLoss", "generated/torch.nn.MaxPool1d", "generated/torch.nn.MaxPool2d", "generated/torch.nn.MaxPool3d", "generated/torch.nn.MaxUnpool1d", "generated/torch.nn.MaxUnpool2d", "generated/torch.nn.MaxUnpool3d", "generated/torch.nn.Mish", "generated/torch.nn.Module", "generated/torch.nn.ModuleDict", "generated/torch.nn.ModuleList", "generated/torch.nn.MultiLabelMarginLoss", "generated/torch.nn.MultiLabelSoftMarginLoss", "generated/torch.nn.MultiMarginLoss", "generated/torch.nn.MultiheadAttention", "generated/torch.nn.NLLLoss", "generated/torch.nn.PReLU", "generated/torch.nn.PairwiseDistance", "generated/torch.nn.ParameterDict", "generated/torch.nn.ParameterList", "generated/torch.nn.PixelShuffle", "generated/torch.nn.PixelUnshuffle", "generated/torch.nn.PoissonNLLLoss", "generated/torch.nn.RMSNorm", "generated/torch.nn.RNN", "generated/torch.nn.RNNBase", "generated/torch.nn.RNNCell", "generated/torch.nn.RReLU", "generated/torch.nn.ReLU", "generated/torch.nn.ReLU6", "generated/torch.nn.ReflectionPad1d", "generated/torch.nn.ReflectionPad2d", "generated/torch.nn.ReflectionPad3d", "generated/torch.nn.ReplicationPad1d", "generated/torch.nn.ReplicationPad2d", "generated/torch.nn.ReplicationPad3d", "generated/torch.nn.SELU", "generated/torch.nn.Sequential", "generated/torch.nn.SiLU", "generated/torch.nn.Sigmoid", "generated/torch.nn.SmoothL1Loss", "generated/torch.nn.SoftMarginLoss", "generated/torch.nn.Softmax", "generated/torch.nn.Softmax2d", "generated/torch.nn.Softmin", "generated/torch.nn.Softplus", "generated/torch.nn.Softshrink", "generated/torch.nn.Softsign", "generated/torch.nn.SyncBatchNorm", "generated/torch.nn.Tanh", "generated/torch.nn.Tanhshrink", "generated/torch.nn.Threshold", "generated/torch.nn.Transformer", "generated/torch.nn.TransformerDecoder", "generated/torch.nn.TransformerDecoderLayer", "generated/torch.nn.TransformerEncoder", "generated/torch.nn.TransformerEncoderLayer", "generated/torch.nn.TripletMarginLoss", "generated/torch.nn.TripletMarginWithDistanceLoss", "generated/torch.nn.Unflatten", "generated/torch.nn.Unfold", "generated/torch.nn.Upsample", "generated/torch.nn.UpsamplingBilinear2d", "generated/torch.nn.UpsamplingNearest2d", "generated/torch.nn.ZeroPad1d", "generated/torch.nn.ZeroPad2d", "generated/torch.nn.ZeroPad3d", "generated/torch.nn.attention.SDPBackend", "generated/torch.nn.attention.bias.CausalBias", "generated/torch.nn.attention.bias.CausalVariant", "generated/torch.nn.attention.bias.causal_lower_right", "generated/torch.nn.attention.bias.causal_upper_left", "generated/torch.nn.attention.sdpa_kernel", "generated/torch.nn.functional.adaptive_avg_pool1d", "generated/torch.nn.functional.adaptive_avg_pool2d", "generated/torch.nn.functional.adaptive_avg_pool3d", "generated/torch.nn.functional.adaptive_max_pool1d", "generated/torch.nn.functional.adaptive_max_pool2d", "generated/torch.nn.functional.adaptive_max_pool3d", "generated/torch.nn.functional.affine_grid", "generated/torch.nn.functional.alpha_dropout", "generated/torch.nn.functional.avg_pool1d", "generated/torch.nn.functional.avg_pool2d", "generated/torch.nn.functional.avg_pool3d", "generated/torch.nn.functional.batch_norm", "generated/torch.nn.functional.bilinear", "generated/torch.nn.functional.binary_cross_entropy", "generated/torch.nn.functional.binary_cross_entropy_with_logits", "generated/torch.nn.functional.celu", "generated/torch.nn.functional.conv1d", "generated/torch.nn.functional.conv2d", "generated/torch.nn.functional.conv3d", "generated/torch.nn.functional.conv_transpose1d", "generated/torch.nn.functional.conv_transpose2d", "generated/torch.nn.functional.conv_transpose3d", "generated/torch.nn.functional.cosine_embedding_loss", "generated/torch.nn.functional.cosine_similarity", "generated/torch.nn.functional.cross_entropy", "generated/torch.nn.functional.ctc_loss", "generated/torch.nn.functional.dropout", "generated/torch.nn.functional.dropout1d", "generated/torch.nn.functional.dropout2d", "generated/torch.nn.functional.dropout3d", "generated/torch.nn.functional.elu", "generated/torch.nn.functional.elu_", "generated/torch.nn.functional.embedding", "generated/torch.nn.functional.embedding_bag", "generated/torch.nn.functional.feature_alpha_dropout", "generated/torch.nn.functional.fold", "generated/torch.nn.functional.fractional_max_pool2d", "generated/torch.nn.functional.fractional_max_pool3d", "generated/torch.nn.functional.gaussian_nll_loss", "generated/torch.nn.functional.gelu", "generated/torch.nn.functional.glu", "generated/torch.nn.functional.grid_sample", "generated/torch.nn.functional.group_norm", "generated/torch.nn.functional.gumbel_softmax", "generated/torch.nn.functional.hardshrink", "generated/torch.nn.functional.hardsigmoid", "generated/torch.nn.functional.hardswish", "generated/torch.nn.functional.hardtanh", "generated/torch.nn.functional.hardtanh_", "generated/torch.nn.functional.hinge_embedding_loss", "generated/torch.nn.functional.huber_loss", "generated/torch.nn.functional.instance_norm", "generated/torch.nn.functional.interpolate", "generated/torch.nn.functional.kl_div", "generated/torch.nn.functional.l1_loss", "generated/torch.nn.functional.layer_norm", "generated/torch.nn.functional.leaky_relu", "generated/torch.nn.functional.leaky_relu_", "generated/torch.nn.functional.linear", "generated/torch.nn.functional.local_response_norm", "generated/torch.nn.functional.log_softmax", "generated/torch.nn.functional.logsigmoid", "generated/torch.nn.functional.lp_pool1d", "generated/torch.nn.functional.lp_pool2d", "generated/torch.nn.functional.lp_pool3d", "generated/torch.nn.functional.margin_ranking_loss", "generated/torch.nn.functional.max_pool1d", "generated/torch.nn.functional.max_pool2d", "generated/torch.nn.functional.max_pool3d", "generated/torch.nn.functional.max_unpool1d", "generated/torch.nn.functional.max_unpool2d", "generated/torch.nn.functional.max_unpool3d", "generated/torch.nn.functional.mish", "generated/torch.nn.functional.mse_loss", "generated/torch.nn.functional.multi_margin_loss", "generated/torch.nn.functional.multilabel_margin_loss", "generated/torch.nn.functional.multilabel_soft_margin_loss", "generated/torch.nn.functional.nll_loss", "generated/torch.nn.functional.normalize", "generated/torch.nn.functional.one_hot", "generated/torch.nn.functional.pad", "generated/torch.nn.functional.pairwise_distance", "generated/torch.nn.functional.pdist", "generated/torch.nn.functional.pixel_shuffle", "generated/torch.nn.functional.pixel_unshuffle", "generated/torch.nn.functional.poisson_nll_loss", "generated/torch.nn.functional.prelu", "generated/torch.nn.functional.relu", "generated/torch.nn.functional.relu6", "generated/torch.nn.functional.relu_", "generated/torch.nn.functional.rms_norm", "generated/torch.nn.functional.rrelu", "generated/torch.nn.functional.rrelu_", "generated/torch.nn.functional.scaled_dot_product_attention", "generated/torch.nn.functional.selu", "generated/torch.nn.functional.sigmoid", "generated/torch.nn.functional.silu", "generated/torch.nn.functional.smooth_l1_loss", "generated/torch.nn.functional.soft_margin_loss", "generated/torch.nn.functional.softmax", "generated/torch.nn.functional.softmin", "generated/torch.nn.functional.softplus", "generated/torch.nn.functional.softshrink", "generated/torch.nn.functional.softsign", "generated/torch.nn.functional.tanh", "generated/torch.nn.functional.tanhshrink", "generated/torch.nn.functional.threshold", "generated/torch.nn.functional.threshold_", "generated/torch.nn.functional.torch.nn.parallel.data_parallel", "generated/torch.nn.functional.triplet_margin_loss", "generated/torch.nn.functional.triplet_margin_with_distance_loss", "generated/torch.nn.functional.unfold", "generated/torch.nn.functional.upsample", "generated/torch.nn.functional.upsample_bilinear", "generated/torch.nn.functional.upsample_nearest", "generated/torch.nn.modules.lazy.LazyModuleMixin", "generated/torch.nn.modules.module.register_module_backward_hook", "generated/torch.nn.modules.module.register_module_buffer_registration_hook", "generated/torch.nn.modules.module.register_module_forward_hook", "generated/torch.nn.modules.module.register_module_forward_pre_hook", "generated/torch.nn.modules.module.register_module_full_backward_hook", "generated/torch.nn.modules.module.register_module_full_backward_pre_hook", "generated/torch.nn.modules.module.register_module_module_registration_hook", "generated/torch.nn.modules.module.register_module_parameter_registration_hook", "generated/torch.nn.modules.normalization.RMSNorm", "generated/torch.nn.parallel.DistributedDataParallel", "generated/torch.nn.parameter.Parameter", "generated/torch.nn.parameter.UninitializedBuffer", "generated/torch.nn.parameter.UninitializedParameter", "generated/torch.nn.utils.clip_grad_norm", "generated/torch.nn.utils.clip_grad_norm_", "generated/torch.nn.utils.clip_grad_value_", "generated/torch.nn.utils.convert_conv2d_weight_memory_format", "generated/torch.nn.utils.convert_conv3d_weight_memory_format", "generated/torch.nn.utils.fuse_conv_bn_eval", "generated/torch.nn.utils.fuse_conv_bn_weights", "generated/torch.nn.utils.fuse_linear_bn_eval", "generated/torch.nn.utils.fuse_linear_bn_weights", "generated/torch.nn.utils.parameters_to_vector", "generated/torch.nn.utils.parametrizations.orthogonal", "generated/torch.nn.utils.parametrizations.spectral_norm", "generated/torch.nn.utils.parametrizations.weight_norm", "generated/torch.nn.utils.parametrize.ParametrizationList", "generated/torch.nn.utils.parametrize.cached", "generated/torch.nn.utils.parametrize.is_parametrized", "generated/torch.nn.utils.parametrize.register_parametrization", "generated/torch.nn.utils.parametrize.remove_parametrizations", "generated/torch.nn.utils.prune.BasePruningMethod", "generated/torch.nn.utils.prune.CustomFromMask", "generated/torch.nn.utils.prune.Identity", "generated/torch.nn.utils.prune.L1Unstructured", "generated/torch.nn.utils.prune.LnStructured", "generated/torch.nn.utils.prune.PruningContainer", "generated/torch.nn.utils.prune.RandomStructured", "generated/torch.nn.utils.prune.RandomUnstructured", "generated/torch.nn.utils.prune.custom_from_mask", "generated/torch.nn.utils.prune.global_unstructured", "generated/torch.nn.utils.prune.identity", "generated/torch.nn.utils.prune.is_pruned", "generated/torch.nn.utils.prune.l1_unstructured", "generated/torch.nn.utils.prune.ln_structured", "generated/torch.nn.utils.prune.random_structured", "generated/torch.nn.utils.prune.random_unstructured", "generated/torch.nn.utils.prune.remove", "generated/torch.nn.utils.remove_spectral_norm", "generated/torch.nn.utils.remove_weight_norm", "generated/torch.nn.utils.rnn.PackedSequence", "generated/torch.nn.utils.rnn.pack_padded_sequence", "generated/torch.nn.utils.rnn.pack_sequence", "generated/torch.nn.utils.rnn.pad_packed_sequence", "generated/torch.nn.utils.rnn.pad_sequence", "generated/torch.nn.utils.rnn.unpack_sequence", "generated/torch.nn.utils.rnn.unpad_sequence", "generated/torch.nn.utils.skip_init", "generated/torch.nn.utils.spectral_norm", "generated/torch.nn.utils.stateless.functional_call", "generated/torch.nn.utils.vector_to_parameters", "generated/torch.nn.utils.weight_norm", "generated/torch.no_grad", "generated/torch.nonzero", "generated/torch.norm", "generated/torch.normal", "generated/torch.not_equal", "generated/torch.numel", "generated/torch.ones", "generated/torch.ones_like", "generated/torch.onnx.JitScalarType", "generated/torch.onnx.verification.GraphInfo", "generated/torch.onnx.verification.VerificationOptions", "generated/torch.optim.ASGD", "generated/torch.optim.Adadelta", "generated/torch.optim.Adagrad", "generated/torch.optim.Adam", "generated/torch.optim.AdamW", "generated/torch.optim.Adamax", "generated/torch.optim.LBFGS", "generated/torch.optim.NAdam", "generated/torch.optim.Optimizer.add_param_group", "generated/torch.optim.Optimizer.load_state_dict", "generated/torch.optim.Optimizer.state_dict", "generated/torch.optim.Optimizer.step", "generated/torch.optim.Optimizer.zero_grad", "generated/torch.optim.RAdam", "generated/torch.optim.RMSprop", "generated/torch.optim.Rprop", "generated/torch.optim.SGD", "generated/torch.optim.SparseAdam", "generated/torch.optim.lr_scheduler.ChainedScheduler", "generated/torch.optim.lr_scheduler.ConstantLR", "generated/torch.optim.lr_scheduler.CosineAnnealingLR", "generated/torch.optim.lr_scheduler.CosineAnnealingWarmRestarts", "generated/torch.optim.lr_scheduler.CyclicLR", "generated/torch.optim.lr_scheduler.ExponentialLR", "generated/torch.optim.lr_scheduler.LambdaLR", "generated/torch.optim.lr_scheduler.LinearLR", "generated/torch.optim.lr_scheduler.MultiStepLR", "generated/torch.optim.lr_scheduler.MultiplicativeLR", "generated/torch.optim.lr_scheduler.OneCycleLR", "generated/torch.optim.lr_scheduler.PolynomialLR", "generated/torch.optim.lr_scheduler.ReduceLROnPlateau", "generated/torch.optim.lr_scheduler.SequentialLR", "generated/torch.optim.lr_scheduler.StepLR", "generated/torch.orgqr", "generated/torch.ormqr", "generated/torch.outer", "generated/torch.pca_lowrank", "generated/torch.permute", "generated/torch.pinverse", "generated/torch.poisson", "generated/torch.polar", "generated/torch.polygamma", "generated/torch.positive", "generated/torch.pow", "generated/torch.prod", "generated/torch.promote_types", "generated/torch.qr", "generated/torch.quantile", "generated/torch.quantize_per_channel", "generated/torch.quantize_per_tensor", "generated/torch.quantized_batch_norm", "generated/torch.quantized_max_pool1d", "generated/torch.quantized_max_pool2d", "generated/torch.quasirandom.SobolEngine", "generated/torch.rad2deg", "generated/torch.rand", "generated/torch.rand_like", "generated/torch.randint", "generated/torch.randint_like", "generated/torch.randn", "generated/torch.randn_like", "generated/torch.randperm", "generated/torch.range", "generated/torch.ravel", "generated/torch.real", "generated/torch.reciprocal", "generated/torch.remainder", "generated/torch.renorm", "generated/torch.repeat_interleave", "generated/torch.reshape", "generated/torch.resolve_conj", "generated/torch.resolve_neg", "generated/torch.result_type", "generated/torch.roll", "generated/torch.rot90", "generated/torch.round", "generated/torch.row_stack", "generated/torch.rsqrt", "generated/torch.save", "generated/torch.scatter", "generated/torch.scatter_add", "generated/torch.scatter_reduce", "generated/torch.searchsorted", "generated/torch.seed", "generated/torch.select", "generated/torch.select_scatter", "generated/torch.set_default_device", "generated/torch.set_default_dtype", "generated/torch.set_default_tensor_type", "generated/torch.set_deterministic_debug_mode", "generated/torch.set_float32_matmul_precision", "generated/torch.set_flush_denormal", "generated/torch.set_num_interop_threads", "generated/torch.set_num_threads", "generated/torch.set_printoptions", "generated/torch.set_rng_state", "generated/torch.set_warn_always", "generated/torch.sgn", "generated/torch.sigmoid", "generated/torch.sign", "generated/torch.signal.windows.bartlett", "generated/torch.signal.windows.blackman", "generated/torch.signal.windows.cosine", "generated/torch.signal.windows.exponential", "generated/torch.signal.windows.gaussian", "generated/torch.signal.windows.general_cosine", "generated/torch.signal.windows.general_hamming", "generated/torch.signal.windows.hamming", "generated/torch.signal.windows.hann", "generated/torch.signal.windows.kaiser", "generated/torch.signal.windows.nuttall", "generated/torch.signbit", "generated/torch.sin", "generated/torch.sinc", "generated/torch.sinh", "generated/torch.slice_scatter", "generated/torch.slogdet", "generated/torch.smm", "generated/torch.softmax", "generated/torch.sort", "generated/torch.sparse.addmm", "generated/torch.sparse.as_sparse_gradcheck", "generated/torch.sparse.check_sparse_tensor_invariants", "generated/torch.sparse.log_softmax", "generated/torch.sparse.mm", "generated/torch.sparse.sampled_addmm", "generated/torch.sparse.softmax", "generated/torch.sparse.spdiags", "generated/torch.sparse.sum", "generated/torch.sparse_bsc_tensor", "generated/torch.sparse_bsr_tensor", "generated/torch.sparse_compressed_tensor", "generated/torch.sparse_coo_tensor", "generated/torch.sparse_csc_tensor", "generated/torch.sparse_csr_tensor", "generated/torch.split", "generated/torch.sqrt", "generated/torch.square", "generated/torch.squeeze", "generated/torch.sspaddmm", "generated/torch.stack", "generated/torch.std", "generated/torch.std_mean", "generated/torch.stft", "generated/torch.sub", "generated/torch.subtract", "generated/torch.sum", "generated/torch.svd", "generated/torch.svd_lowrank", "generated/torch.swapaxes", "generated/torch.swapdims", "generated/torch.sym_float", "generated/torch.sym_int", "generated/torch.sym_ite", "generated/torch.sym_max", "generated/torch.sym_min", "generated/torch.sym_not", "generated/torch.t", "generated/torch.take", "generated/torch.take_along_dim", "generated/torch.tan", "generated/torch.tanh", "generated/torch.tensor", "generated/torch.tensor_split", "generated/torch.tensordot", "generated/torch.tile", "generated/torch.topk", "generated/torch.trace", "generated/torch.transpose", "generated/torch.trapezoid", "generated/torch.trapz", "generated/torch.triangular_solve", "generated/torch.tril", "generated/torch.tril_indices", "generated/torch.triu", "generated/torch.triu_indices", "generated/torch.true_divide", "generated/torch.trunc", "generated/torch.unbind", "generated/torch.unflatten", "generated/torch.unique", "generated/torch.unique_consecutive", "generated/torch.unravel_index", "generated/torch.unsqueeze", "generated/torch.use_deterministic_algorithms", "generated/torch.utils.generate_methods_for_privateuse1_backend", "generated/torch.utils.get_cpp_backtrace", "generated/torch.utils.rename_privateuse1_backend", "generated/torch.utils.set_module", "generated/torch.utils.swap_tensors", "generated/torch.vander", "generated/torch.var", "generated/torch.var_mean", "generated/torch.vdot", "generated/torch.view_as_complex", "generated/torch.view_as_real", "generated/torch.vmap", "generated/torch.vsplit", "generated/torch.vstack", "generated/torch.where", "generated/torch.xlogy", "generated/torch.xpu.Event", "generated/torch.xpu.Stream", "generated/torch.xpu.StreamContext", "generated/torch.xpu.current_device", "generated/torch.xpu.current_stream", "generated/torch.xpu.device", "generated/torch.xpu.device_count", "generated/torch.xpu.device_of", "generated/torch.xpu.empty_cache", "generated/torch.xpu.get_device_capability", "generated/torch.xpu.get_device_name", "generated/torch.xpu.get_device_properties", "generated/torch.xpu.get_rng_state", "generated/torch.xpu.get_rng_state_all", "generated/torch.xpu.init", "generated/torch.xpu.initial_seed", "generated/torch.xpu.is_available", "generated/torch.xpu.is_initialized", "generated/torch.xpu.manual_seed", "generated/torch.xpu.manual_seed_all", "generated/torch.xpu.seed", "generated/torch.xpu.seed_all", "generated/torch.xpu.set_device", "generated/torch.xpu.set_rng_state", "generated/torch.xpu.set_rng_state_all", "generated/torch.xpu.set_stream", "generated/torch.xpu.stream", "generated/torch.xpu.synchronize", "generated/torch.zeros", "generated/torch.zeros_like", "hub", "index", "jit", "jit_builtin_functions", "jit_language_reference", "jit_language_reference_v2", "jit_python_reference", "jit_unsupported", "jit_utils", "library", "linalg", "logging", "masked", "meta", "miscellaneous_environment_variables", "mobile_optimizer", "model_zoo", "module_tracker", "monitor", "mps", "mtia", "multiprocessing", "name_inference", "named_tensor", "nested", "nn", "nn.attention", "nn.attention.bias", "nn.functional", "nn.init", "notes/amp_examples", "notes/autograd", "notes/broadcasting", "notes/cpu_threading_torchscript_inference", "notes/cuda", "notes/custom_operators", "notes/ddp", "notes/extending", "notes/extending.func", "notes/faq", "notes/fsdp", "notes/get_start_xpu", "notes/gradcheck", "notes/hip", "notes/large_scale_deployments", "notes/modules", "notes/mps", "notes/multiprocessing", "notes/numerical_accuracy", "notes/randomness", "notes/serialization", "notes/windows", "onnx", "onnx_dynamo", "onnx_dynamo_onnxruntime_backend", "onnx_torchscript", "onnx_torchscript_supported_aten_ops", "optim", "package", "profiler", "quantization", "quantization-accuracy-debugging", "quantization-backend-configuration", "quantization-support", "random", "rpc", "rpc/distributed_autograd", "rpc/rref", "signal", "size", "sparse", "special", "storage", "tensor_attributes", "tensor_view", "tensorboard", "tensors", "testing", "threading_environment_variables", "torch", "torch.ao.ns._numeric_suite", "torch.ao.ns._numeric_suite_fx", "torch.compiler", "torch.compiler_aot_inductor", "torch.compiler_api", "torch.compiler_best_practices_for_backends", "torch.compiler_cudagraph_trees", "torch.compiler_custom_backends", "torch.compiler_dynamic_shapes", "torch.compiler_dynamo_deepdive", "torch.compiler_dynamo_overview", "torch.compiler_fake_tensor", "torch.compiler_faq", "torch.compiler_fine_grain_apis", "torch.compiler_get_started", "torch.compiler_inductor_profiling", "torch.compiler_ir", "torch.compiler_nn_module", "torch.compiler_performance_dashboard", "torch.compiler_profiling_torch_compile", "torch.compiler_transformations", "torch.compiler_troubleshooting", "torch.overrides", "torch_cuda_memory", "torch_environment_variables", "torch_nccl_environment_variables", "type_info", "utils", "xpu"], "filenames": ["amp.rst", "autograd.rst", "backends.rst", "benchmark_utils.rst", "bottleneck.rst", "checkpoint.rst", "community/build_ci_governance.rst", "community/contribution_guide.rst", "community/design.rst", "community/governance.rst", "community/persons_of_interest.rst", "complex_numbers.rst", "cond.rst", "config_mod.rst", "cpp_extension.rst", "cpp_index.rst", "cpu.rst", "cuda.rst", "cuda._sanitizer.rst", "cuda.tunable.rst", "cuda_environment_variables.rst", "cudnn_persistent_rnn.rst", "cudnn_rnn_determinism.rst", "data.rst", "ddp_comm_hooks.rst", "debugging_environment_variables.rst", "deploy.rst", "deterministic.rst", "distributed.rst", "distributed.algorithms.join.rst", "distributed.checkpoint.rst", "distributed.elastic.rst", "distributed.optim.rst", "distributed.pipelining.rst", "distributed.tensor.parallel.rst", "distributions.rst", "dlpack.rst", "elastic/agent.rst", "elastic/control_plane.rst", "elastic/customization.rst", "elastic/errors.rst", "elastic/events.rst", "elastic/examples.rst", "elastic/kubernetes.rst", "elastic/metrics.rst", "elastic/multiprocessing.rst", "elastic/quickstart.rst", "elastic/rendezvous.rst", "elastic/run.rst", "elastic/subprocess_handler.rst", "elastic/timer.rst", "elastic/train_script.rst", "export.rst", "export.ir_spec.rst", "fft.rst", "fsdp.rst", "func.rst", "func.api.rst", "func.batch_norm.rst", "func.migrating.rst", "func.ux_limitations.rst", "func.whirlwind_tour.rst", "future_mod.rst", "futures.rst", "fx.rst", "fx.experimental.rst", "generated/exportdb/index.rst", "generated/exportdb/python.assert.rst", "generated/exportdb/python.builtin.rst", "generated/exportdb/python.closure.rst", "generated/exportdb/python.context-manager.rst", "generated/exportdb/python.control-flow.rst", "generated/exportdb/python.data-structure.rst", "generated/exportdb/python.object-model.rst", "generated/exportdb/torch.cond.rst", "generated/exportdb/torch.dynamic-shape.rst", "generated/exportdb/torch.dynamic-value.rst", "generated/exportdb/torch.escape-hatch.rst", "generated/exportdb/torch.map.rst", "generated/exportdb/torch.mutation.rst", "generated/exportdb/torch.operator.rst", "generated/onnx_dynamo_diagnostics_rules/FXE0007:fx-graph-to-onnx.md", "generated/onnx_dynamo_diagnostics_rules/FXE0008:fx-node-to-onnx.md", "generated/onnx_dynamo_diagnostics_rules/FXE0010:fx-pass.md", "generated/onnx_dynamo_diagnostics_rules/FXE0011:no-symbolic-function-for-call-function.md", "generated/onnx_dynamo_diagnostics_rules/FXE0012:unsupported-fx-node-analysis.md", "generated/onnx_dynamo_diagnostics_rules/FXE0013:op-level-debugging.md", "generated/onnx_dynamo_diagnostics_rules/FXE0014:find-opschema-matched-symbolic-function.md", "generated/onnx_dynamo_diagnostics_rules/FXE0015:fx-node-insert-type-promotion.md", "generated/onnx_dynamo_diagnostics_rules/FXE0016:find-operator-overloads-in-onnx-registry.md", "generated/torch.Generator.rst", "generated/torch.Tensor.abs.rst", "generated/torch.Tensor.abs_.rst", "generated/torch.Tensor.absolute.rst", "generated/torch.Tensor.absolute_.rst", "generated/torch.Tensor.acos.rst", "generated/torch.Tensor.acos_.rst", "generated/torch.Tensor.acosh.rst", "generated/torch.Tensor.acosh_.rst", "generated/torch.Tensor.add.rst", "generated/torch.Tensor.add_.rst", "generated/torch.Tensor.addbmm.rst", "generated/torch.Tensor.addbmm_.rst", "generated/torch.Tensor.addcdiv.rst", "generated/torch.Tensor.addcdiv_.rst", "generated/torch.Tensor.addcmul.rst", "generated/torch.Tensor.addcmul_.rst", "generated/torch.Tensor.addmm.rst", "generated/torch.Tensor.addmm_.rst", "generated/torch.Tensor.addmv.rst", "generated/torch.Tensor.addmv_.rst", "generated/torch.Tensor.addr.rst", "generated/torch.Tensor.addr_.rst", "generated/torch.Tensor.adjoint.rst", "generated/torch.Tensor.all.rst", "generated/torch.Tensor.allclose.rst", "generated/torch.Tensor.amax.rst", "generated/torch.Tensor.amin.rst", "generated/torch.Tensor.aminmax.rst", "generated/torch.Tensor.angle.rst", "generated/torch.Tensor.any.rst", "generated/torch.Tensor.apply_.rst", "generated/torch.Tensor.arccos.rst", "generated/torch.Tensor.arccos_.rst", "generated/torch.Tensor.arccosh.rst", "generated/torch.Tensor.arccosh_.rst", "generated/torch.Tensor.arcsin.rst", "generated/torch.Tensor.arcsin_.rst", "generated/torch.Tensor.arcsinh.rst", "generated/torch.Tensor.arcsinh_.rst", "generated/torch.Tensor.arctan.rst", "generated/torch.Tensor.arctan2.rst", "generated/torch.Tensor.arctan2_.rst", "generated/torch.Tensor.arctan_.rst", "generated/torch.Tensor.arctanh.rst", "generated/torch.Tensor.arctanh_.rst", "generated/torch.Tensor.argmax.rst", "generated/torch.Tensor.argmin.rst", "generated/torch.Tensor.argsort.rst", "generated/torch.Tensor.argwhere.rst", "generated/torch.Tensor.as_strided.rst", "generated/torch.Tensor.as_subclass.rst", "generated/torch.Tensor.asin.rst", "generated/torch.Tensor.asin_.rst", "generated/torch.Tensor.asinh.rst", "generated/torch.Tensor.asinh_.rst", "generated/torch.Tensor.atan.rst", "generated/torch.Tensor.atan2.rst", "generated/torch.Tensor.atan2_.rst", "generated/torch.Tensor.atan_.rst", "generated/torch.Tensor.atanh.rst", "generated/torch.Tensor.atanh_.rst", "generated/torch.Tensor.backward.rst", "generated/torch.Tensor.baddbmm.rst", "generated/torch.Tensor.baddbmm_.rst", "generated/torch.Tensor.bernoulli.rst", "generated/torch.Tensor.bernoulli_.rst", "generated/torch.Tensor.bfloat16.rst", "generated/torch.Tensor.bincount.rst", "generated/torch.Tensor.bitwise_and.rst", "generated/torch.Tensor.bitwise_and_.rst", "generated/torch.Tensor.bitwise_left_shift.rst", "generated/torch.Tensor.bitwise_left_shift_.rst", "generated/torch.Tensor.bitwise_not.rst", "generated/torch.Tensor.bitwise_not_.rst", "generated/torch.Tensor.bitwise_or.rst", "generated/torch.Tensor.bitwise_or_.rst", "generated/torch.Tensor.bitwise_right_shift.rst", "generated/torch.Tensor.bitwise_right_shift_.rst", "generated/torch.Tensor.bitwise_xor.rst", "generated/torch.Tensor.bitwise_xor_.rst", "generated/torch.Tensor.bmm.rst", "generated/torch.Tensor.bool.rst", "generated/torch.Tensor.broadcast_to.rst", "generated/torch.Tensor.byte.rst", "generated/torch.Tensor.cauchy_.rst", "generated/torch.Tensor.ccol_indices.rst", "generated/torch.Tensor.cdouble.rst", "generated/torch.Tensor.ceil.rst", "generated/torch.Tensor.ceil_.rst", "generated/torch.Tensor.cfloat.rst", "generated/torch.Tensor.chalf.rst", "generated/torch.Tensor.char.rst", "generated/torch.Tensor.cholesky.rst", "generated/torch.Tensor.cholesky_inverse.rst", "generated/torch.Tensor.cholesky_solve.rst", "generated/torch.Tensor.chunk.rst", "generated/torch.Tensor.clamp.rst", "generated/torch.Tensor.clamp_.rst", "generated/torch.Tensor.clip.rst", "generated/torch.Tensor.clip_.rst", "generated/torch.Tensor.clone.rst", "generated/torch.Tensor.coalesce.rst", "generated/torch.Tensor.col_indices.rst", "generated/torch.Tensor.conj.rst", "generated/torch.Tensor.conj_physical.rst", "generated/torch.Tensor.conj_physical_.rst", "generated/torch.Tensor.contiguous.rst", "generated/torch.Tensor.copy_.rst", "generated/torch.Tensor.copysign.rst", "generated/torch.Tensor.copysign_.rst", "generated/torch.Tensor.corrcoef.rst", "generated/torch.Tensor.cos.rst", "generated/torch.Tensor.cos_.rst", "generated/torch.Tensor.cosh.rst", "generated/torch.Tensor.cosh_.rst", "generated/torch.Tensor.count_nonzero.rst", "generated/torch.Tensor.cov.rst", "generated/torch.Tensor.cpu.rst", "generated/torch.Tensor.cross.rst", "generated/torch.Tensor.crow_indices.rst", "generated/torch.Tensor.cuda.rst", "generated/torch.Tensor.cummax.rst", "generated/torch.Tensor.cummin.rst", "generated/torch.Tensor.cumprod.rst", "generated/torch.Tensor.cumprod_.rst", "generated/torch.Tensor.cumsum.rst", "generated/torch.Tensor.cumsum_.rst", "generated/torch.Tensor.data_ptr.rst", "generated/torch.Tensor.deg2rad.rst", "generated/torch.Tensor.dense_dim.rst", "generated/torch.Tensor.dequantize.rst", "generated/torch.Tensor.det.rst", "generated/torch.Tensor.detach.rst", "generated/torch.Tensor.detach_.rst", "generated/torch.Tensor.device.rst", "generated/torch.Tensor.diag.rst", "generated/torch.Tensor.diag_embed.rst", "generated/torch.Tensor.diagflat.rst", "generated/torch.Tensor.diagonal.rst", "generated/torch.Tensor.diagonal_scatter.rst", "generated/torch.Tensor.diff.rst", "generated/torch.Tensor.digamma.rst", "generated/torch.Tensor.digamma_.rst", "generated/torch.Tensor.dim.rst", "generated/torch.Tensor.dim_order.rst", "generated/torch.Tensor.dist.rst", "generated/torch.Tensor.div.rst", "generated/torch.Tensor.div_.rst", "generated/torch.Tensor.divide.rst", "generated/torch.Tensor.divide_.rst", "generated/torch.Tensor.dot.rst", "generated/torch.Tensor.double.rst", "generated/torch.Tensor.dsplit.rst", "generated/torch.Tensor.element_size.rst", "generated/torch.Tensor.eq.rst", "generated/torch.Tensor.eq_.rst", "generated/torch.Tensor.equal.rst", "generated/torch.Tensor.erf.rst", "generated/torch.Tensor.erf_.rst", "generated/torch.Tensor.erfc.rst", "generated/torch.Tensor.erfc_.rst", "generated/torch.Tensor.erfinv.rst", "generated/torch.Tensor.erfinv_.rst", "generated/torch.Tensor.exp.rst", "generated/torch.Tensor.exp_.rst", "generated/torch.Tensor.expand.rst", "generated/torch.Tensor.expand_as.rst", "generated/torch.Tensor.expm1.rst", "generated/torch.Tensor.expm1_.rst", "generated/torch.Tensor.exponential_.rst", "generated/torch.Tensor.fill_.rst", "generated/torch.Tensor.fill_diagonal_.rst", "generated/torch.Tensor.fix.rst", "generated/torch.Tensor.fix_.rst", "generated/torch.Tensor.flatten.rst", "generated/torch.Tensor.flip.rst", "generated/torch.Tensor.fliplr.rst", "generated/torch.Tensor.flipud.rst", "generated/torch.Tensor.float.rst", "generated/torch.Tensor.float_power.rst", "generated/torch.Tensor.float_power_.rst", "generated/torch.Tensor.floor.rst", "generated/torch.Tensor.floor_.rst", "generated/torch.Tensor.floor_divide.rst", "generated/torch.Tensor.floor_divide_.rst", "generated/torch.Tensor.fmax.rst", "generated/torch.Tensor.fmin.rst", "generated/torch.Tensor.fmod.rst", "generated/torch.Tensor.fmod_.rst", "generated/torch.Tensor.frac.rst", "generated/torch.Tensor.frac_.rst", "generated/torch.Tensor.frexp.rst", "generated/torch.Tensor.gather.rst", "generated/torch.Tensor.gcd.rst", "generated/torch.Tensor.gcd_.rst", "generated/torch.Tensor.ge.rst", "generated/torch.Tensor.ge_.rst", "generated/torch.Tensor.geometric_.rst", "generated/torch.Tensor.geqrf.rst", "generated/torch.Tensor.ger.rst", "generated/torch.Tensor.get_device.rst", "generated/torch.Tensor.grad.rst", "generated/torch.Tensor.greater.rst", "generated/torch.Tensor.greater_.rst", "generated/torch.Tensor.greater_equal.rst", "generated/torch.Tensor.greater_equal_.rst", "generated/torch.Tensor.gt.rst", "generated/torch.Tensor.gt_.rst", "generated/torch.Tensor.half.rst", "generated/torch.Tensor.hardshrink.rst", "generated/torch.Tensor.heaviside.rst", "generated/torch.Tensor.histc.rst", "generated/torch.Tensor.histogram.rst", "generated/torch.Tensor.hsplit.rst", "generated/torch.Tensor.hypot.rst", "generated/torch.Tensor.hypot_.rst", "generated/torch.Tensor.i0.rst", "generated/torch.Tensor.i0_.rst", "generated/torch.Tensor.igamma.rst", "generated/torch.Tensor.igamma_.rst", "generated/torch.Tensor.igammac.rst", "generated/torch.Tensor.igammac_.rst", "generated/torch.Tensor.imag.rst", "generated/torch.Tensor.index_add.rst", "generated/torch.Tensor.index_add_.rst", "generated/torch.Tensor.index_copy.rst", "generated/torch.Tensor.index_copy_.rst", "generated/torch.Tensor.index_fill.rst", "generated/torch.Tensor.index_fill_.rst", "generated/torch.Tensor.index_put.rst", "generated/torch.Tensor.index_put_.rst", "generated/torch.Tensor.index_reduce.rst", "generated/torch.Tensor.index_reduce_.rst", "generated/torch.Tensor.index_select.rst", "generated/torch.Tensor.indices.rst", "generated/torch.Tensor.inner.rst", "generated/torch.Tensor.int.rst", "generated/torch.Tensor.int_repr.rst", "generated/torch.Tensor.inverse.rst", "generated/torch.Tensor.is_coalesced.rst", "generated/torch.Tensor.is_complex.rst", "generated/torch.Tensor.is_conj.rst", "generated/torch.Tensor.is_contiguous.rst", "generated/torch.Tensor.is_cuda.rst", "generated/torch.Tensor.is_floating_point.rst", "generated/torch.Tensor.is_inference.rst", "generated/torch.Tensor.is_leaf.rst", "generated/torch.Tensor.is_meta.rst", "generated/torch.Tensor.is_pinned.rst", "generated/torch.Tensor.is_quantized.rst", "generated/torch.Tensor.is_set_to.rst", "generated/torch.Tensor.is_shared.rst", "generated/torch.Tensor.is_signed.rst", "generated/torch.Tensor.is_sparse.rst", "generated/torch.Tensor.is_sparse_csr.rst", "generated/torch.Tensor.isclose.rst", "generated/torch.Tensor.isfinite.rst", "generated/torch.Tensor.isinf.rst", "generated/torch.Tensor.isnan.rst", "generated/torch.Tensor.isneginf.rst", "generated/torch.Tensor.isposinf.rst", "generated/torch.Tensor.isreal.rst", "generated/torch.Tensor.istft.rst", "generated/torch.Tensor.item.rst", "generated/torch.Tensor.itemsize.rst", "generated/torch.Tensor.kthvalue.rst", "generated/torch.Tensor.lcm.rst", "generated/torch.Tensor.lcm_.rst", "generated/torch.Tensor.ldexp.rst", "generated/torch.Tensor.ldexp_.rst", "generated/torch.Tensor.le.rst", "generated/torch.Tensor.le_.rst", "generated/torch.Tensor.lerp.rst", "generated/torch.Tensor.lerp_.rst", "generated/torch.Tensor.less.rst", "generated/torch.Tensor.less_.rst", "generated/torch.Tensor.less_equal.rst", "generated/torch.Tensor.less_equal_.rst", "generated/torch.Tensor.lgamma.rst", "generated/torch.Tensor.lgamma_.rst", "generated/torch.Tensor.log.rst", "generated/torch.Tensor.log10.rst", "generated/torch.Tensor.log10_.rst", "generated/torch.Tensor.log1p.rst", "generated/torch.Tensor.log1p_.rst", "generated/torch.Tensor.log2.rst", "generated/torch.Tensor.log2_.rst", "generated/torch.Tensor.log_.rst", "generated/torch.Tensor.log_normal_.rst", "generated/torch.Tensor.logaddexp.rst", "generated/torch.Tensor.logaddexp2.rst", "generated/torch.Tensor.logcumsumexp.rst", "generated/torch.Tensor.logdet.rst", "generated/torch.Tensor.logical_and.rst", "generated/torch.Tensor.logical_and_.rst", "generated/torch.Tensor.logical_not.rst", "generated/torch.Tensor.logical_not_.rst", "generated/torch.Tensor.logical_or.rst", "generated/torch.Tensor.logical_or_.rst", "generated/torch.Tensor.logical_xor.rst", "generated/torch.Tensor.logical_xor_.rst", "generated/torch.Tensor.logit.rst", "generated/torch.Tensor.logit_.rst", "generated/torch.Tensor.logsumexp.rst", "generated/torch.Tensor.long.rst", "generated/torch.Tensor.lt.rst", "generated/torch.Tensor.lt_.rst", "generated/torch.Tensor.lu.rst", "generated/torch.Tensor.lu_solve.rst", "generated/torch.Tensor.map_.rst", "generated/torch.Tensor.masked_fill.rst", "generated/torch.Tensor.masked_fill_.rst", "generated/torch.Tensor.masked_scatter.rst", "generated/torch.Tensor.masked_scatter_.rst", "generated/torch.Tensor.masked_select.rst", "generated/torch.Tensor.matmul.rst", "generated/torch.Tensor.matrix_exp.rst", "generated/torch.Tensor.matrix_power.rst", "generated/torch.Tensor.max.rst", "generated/torch.Tensor.maximum.rst", "generated/torch.Tensor.mean.rst", "generated/torch.Tensor.median.rst", "generated/torch.Tensor.min.rst", "generated/torch.Tensor.minimum.rst", "generated/torch.Tensor.mm.rst", "generated/torch.Tensor.mode.rst", "generated/torch.Tensor.module_load.rst", "generated/torch.Tensor.moveaxis.rst", "generated/torch.Tensor.movedim.rst", "generated/torch.Tensor.msort.rst", "generated/torch.Tensor.mul.rst", "generated/torch.Tensor.mul_.rst", "generated/torch.Tensor.multinomial.rst", "generated/torch.Tensor.multiply.rst", "generated/torch.Tensor.multiply_.rst", "generated/torch.Tensor.mv.rst", "generated/torch.Tensor.mvlgamma.rst", "generated/torch.Tensor.mvlgamma_.rst", "generated/torch.Tensor.nan_to_num.rst", "generated/torch.Tensor.nan_to_num_.rst", "generated/torch.Tensor.nanmean.rst", "generated/torch.Tensor.nanmedian.rst", "generated/torch.Tensor.nanquantile.rst", "generated/torch.Tensor.nansum.rst", "generated/torch.Tensor.narrow.rst", "generated/torch.Tensor.narrow_copy.rst", "generated/torch.Tensor.nbytes.rst", "generated/torch.Tensor.ndim.rst", "generated/torch.Tensor.ndimension.rst", "generated/torch.Tensor.ne.rst", "generated/torch.Tensor.ne_.rst", "generated/torch.Tensor.neg.rst", "generated/torch.Tensor.neg_.rst", "generated/torch.Tensor.negative.rst", "generated/torch.Tensor.negative_.rst", "generated/torch.Tensor.nelement.rst", "generated/torch.Tensor.new_empty.rst", "generated/torch.Tensor.new_full.rst", "generated/torch.Tensor.new_ones.rst", "generated/torch.Tensor.new_tensor.rst", "generated/torch.Tensor.new_zeros.rst", "generated/torch.Tensor.nextafter.rst", "generated/torch.Tensor.nextafter_.rst", "generated/torch.Tensor.nonzero.rst", "generated/torch.Tensor.norm.rst", "generated/torch.Tensor.normal_.rst", "generated/torch.Tensor.not_equal.rst", "generated/torch.Tensor.not_equal_.rst", "generated/torch.Tensor.numel.rst", "generated/torch.Tensor.numpy.rst", "generated/torch.Tensor.orgqr.rst", "generated/torch.Tensor.ormqr.rst", "generated/torch.Tensor.outer.rst", "generated/torch.Tensor.permute.rst", "generated/torch.Tensor.pin_memory.rst", "generated/torch.Tensor.pinverse.rst", "generated/torch.Tensor.polygamma.rst", "generated/torch.Tensor.polygamma_.rst", "generated/torch.Tensor.positive.rst", "generated/torch.Tensor.pow.rst", "generated/torch.Tensor.pow_.rst", "generated/torch.Tensor.prod.rst", "generated/torch.Tensor.put_.rst", "generated/torch.Tensor.q_per_channel_axis.rst", "generated/torch.Tensor.q_per_channel_scales.rst", "generated/torch.Tensor.q_per_channel_zero_points.rst", "generated/torch.Tensor.q_scale.rst", "generated/torch.Tensor.q_zero_point.rst", "generated/torch.Tensor.qr.rst", "generated/torch.Tensor.qscheme.rst", "generated/torch.Tensor.quantile.rst", "generated/torch.Tensor.rad2deg.rst", "generated/torch.Tensor.random_.rst", "generated/torch.Tensor.ravel.rst", "generated/torch.Tensor.real.rst", "generated/torch.Tensor.reciprocal.rst", "generated/torch.Tensor.reciprocal_.rst", "generated/torch.Tensor.record_stream.rst", "generated/torch.Tensor.register_hook.rst", "generated/torch.Tensor.register_post_accumulate_grad_hook.rst", "generated/torch.Tensor.remainder.rst", "generated/torch.Tensor.remainder_.rst", "generated/torch.Tensor.renorm.rst", "generated/torch.Tensor.renorm_.rst", "generated/torch.Tensor.repeat.rst", "generated/torch.Tensor.repeat_interleave.rst", "generated/torch.Tensor.requires_grad.rst", "generated/torch.Tensor.requires_grad_.rst", "generated/torch.Tensor.reshape.rst", "generated/torch.Tensor.reshape_as.rst", "generated/torch.Tensor.resize_.rst", "generated/torch.Tensor.resize_as_.rst", "generated/torch.Tensor.resolve_conj.rst", "generated/torch.Tensor.resolve_neg.rst", "generated/torch.Tensor.retain_grad.rst", "generated/torch.Tensor.retains_grad.rst", "generated/torch.Tensor.roll.rst", "generated/torch.Tensor.rot90.rst", "generated/torch.Tensor.round.rst", "generated/torch.Tensor.round_.rst", "generated/torch.Tensor.row_indices.rst", "generated/torch.Tensor.rsqrt.rst", "generated/torch.Tensor.rsqrt_.rst", "generated/torch.Tensor.scatter.rst", "generated/torch.Tensor.scatter_.rst", "generated/torch.Tensor.scatter_add.rst", "generated/torch.Tensor.scatter_add_.rst", "generated/torch.Tensor.scatter_reduce.rst", "generated/torch.Tensor.scatter_reduce_.rst", "generated/torch.Tensor.select.rst", "generated/torch.Tensor.select_scatter.rst", "generated/torch.Tensor.set_.rst", "generated/torch.Tensor.sgn.rst", "generated/torch.Tensor.sgn_.rst", "generated/torch.Tensor.shape.rst", "generated/torch.Tensor.share_memory_.rst", "generated/torch.Tensor.short.rst", "generated/torch.Tensor.sigmoid.rst", "generated/torch.Tensor.sigmoid_.rst", "generated/torch.Tensor.sign.rst", "generated/torch.Tensor.sign_.rst", "generated/torch.Tensor.signbit.rst", "generated/torch.Tensor.sin.rst", "generated/torch.Tensor.sin_.rst", "generated/torch.Tensor.sinc.rst", "generated/torch.Tensor.sinc_.rst", "generated/torch.Tensor.sinh.rst", "generated/torch.Tensor.sinh_.rst", "generated/torch.Tensor.size.rst", "generated/torch.Tensor.slice_scatter.rst", "generated/torch.Tensor.slogdet.rst", "generated/torch.Tensor.smm.rst", "generated/torch.Tensor.softmax.rst", "generated/torch.Tensor.sort.rst", "generated/torch.Tensor.sparse_dim.rst", "generated/torch.Tensor.sparse_mask.rst", "generated/torch.Tensor.sparse_resize_.rst", "generated/torch.Tensor.sparse_resize_and_clear_.rst", "generated/torch.Tensor.split.rst", "generated/torch.Tensor.sqrt.rst", "generated/torch.Tensor.sqrt_.rst", "generated/torch.Tensor.square.rst", "generated/torch.Tensor.square_.rst", "generated/torch.Tensor.squeeze.rst", "generated/torch.Tensor.squeeze_.rst", "generated/torch.Tensor.sspaddmm.rst", "generated/torch.Tensor.std.rst", "generated/torch.Tensor.stft.rst", "generated/torch.Tensor.storage.rst", "generated/torch.Tensor.storage_offset.rst", "generated/torch.Tensor.storage_type.rst", "generated/torch.Tensor.stride.rst", "generated/torch.Tensor.sub.rst", "generated/torch.Tensor.sub_.rst", "generated/torch.Tensor.subtract.rst", "generated/torch.Tensor.subtract_.rst", "generated/torch.Tensor.sum.rst", "generated/torch.Tensor.sum_to_size.rst", "generated/torch.Tensor.svd.rst", "generated/torch.Tensor.swapaxes.rst", "generated/torch.Tensor.swapdims.rst", "generated/torch.Tensor.t.rst", "generated/torch.Tensor.t_.rst", "generated/torch.Tensor.take.rst", "generated/torch.Tensor.take_along_dim.rst", "generated/torch.Tensor.tan.rst", "generated/torch.Tensor.tan_.rst", "generated/torch.Tensor.tanh.rst", "generated/torch.Tensor.tanh_.rst", "generated/torch.Tensor.tensor_split.rst", "generated/torch.Tensor.tile.rst", "generated/torch.Tensor.to.rst", "generated/torch.Tensor.to_dense.rst", "generated/torch.Tensor.to_mkldnn.rst", "generated/torch.Tensor.to_sparse.rst", "generated/torch.Tensor.to_sparse_bsc.rst", "generated/torch.Tensor.to_sparse_bsr.rst", "generated/torch.Tensor.to_sparse_coo.rst", "generated/torch.Tensor.to_sparse_csc.rst", "generated/torch.Tensor.to_sparse_csr.rst", "generated/torch.Tensor.tolist.rst", "generated/torch.Tensor.topk.rst", "generated/torch.Tensor.trace.rst", "generated/torch.Tensor.transpose.rst", "generated/torch.Tensor.transpose_.rst", "generated/torch.Tensor.triangular_solve.rst", "generated/torch.Tensor.tril.rst", "generated/torch.Tensor.tril_.rst", "generated/torch.Tensor.triu.rst", "generated/torch.Tensor.triu_.rst", "generated/torch.Tensor.true_divide.rst", "generated/torch.Tensor.true_divide_.rst", "generated/torch.Tensor.trunc.rst", "generated/torch.Tensor.trunc_.rst", "generated/torch.Tensor.type.rst", "generated/torch.Tensor.type_as.rst", "generated/torch.Tensor.unbind.rst", "generated/torch.Tensor.unflatten.rst", "generated/torch.Tensor.unfold.rst", "generated/torch.Tensor.uniform_.rst", "generated/torch.Tensor.unique.rst", "generated/torch.Tensor.unique_consecutive.rst", "generated/torch.Tensor.unsqueeze.rst", "generated/torch.Tensor.unsqueeze_.rst", "generated/torch.Tensor.untyped_storage.rst", "generated/torch.Tensor.values.rst", "generated/torch.Tensor.var.rst", "generated/torch.Tensor.vdot.rst", "generated/torch.Tensor.view.rst", "generated/torch.Tensor.view_as.rst", "generated/torch.Tensor.vsplit.rst", "generated/torch.Tensor.where.rst", "generated/torch.Tensor.xlogy.rst", "generated/torch.Tensor.xlogy_.rst", "generated/torch.Tensor.xpu.rst", "generated/torch.Tensor.zero_.rst", "generated/torch._assert.rst", "generated/torch._foreach_abs.rst", "generated/torch._foreach_abs_.rst", "generated/torch._foreach_acos.rst", "generated/torch._foreach_acos_.rst", "generated/torch._foreach_asin.rst", "generated/torch._foreach_asin_.rst", "generated/torch._foreach_atan.rst", "generated/torch._foreach_atan_.rst", "generated/torch._foreach_ceil.rst", "generated/torch._foreach_ceil_.rst", "generated/torch._foreach_cos.rst", "generated/torch._foreach_cos_.rst", "generated/torch._foreach_cosh.rst", "generated/torch._foreach_cosh_.rst", "generated/torch._foreach_erf.rst", "generated/torch._foreach_erf_.rst", "generated/torch._foreach_erfc.rst", "generated/torch._foreach_erfc_.rst", "generated/torch._foreach_exp.rst", "generated/torch._foreach_exp_.rst", "generated/torch._foreach_expm1.rst", "generated/torch._foreach_expm1_.rst", "generated/torch._foreach_floor.rst", "generated/torch._foreach_floor_.rst", "generated/torch._foreach_frac.rst", "generated/torch._foreach_frac_.rst", "generated/torch._foreach_lgamma.rst", "generated/torch._foreach_lgamma_.rst", "generated/torch._foreach_log.rst", "generated/torch._foreach_log10.rst", "generated/torch._foreach_log10_.rst", "generated/torch._foreach_log1p.rst", "generated/torch._foreach_log1p_.rst", "generated/torch._foreach_log2.rst", "generated/torch._foreach_log2_.rst", "generated/torch._foreach_log_.rst", "generated/torch._foreach_neg.rst", "generated/torch._foreach_neg_.rst", "generated/torch._foreach_reciprocal.rst", "generated/torch._foreach_reciprocal_.rst", "generated/torch._foreach_round.rst", "generated/torch._foreach_round_.rst", "generated/torch._foreach_sigmoid.rst", "generated/torch._foreach_sigmoid_.rst", "generated/torch._foreach_sin.rst", "generated/torch._foreach_sin_.rst", "generated/torch._foreach_sinh.rst", "generated/torch._foreach_sinh_.rst", "generated/torch._foreach_sqrt.rst", "generated/torch._foreach_sqrt_.rst", "generated/torch._foreach_tan.rst", "generated/torch._foreach_tan_.rst", "generated/torch._foreach_trunc.rst", "generated/torch._foreach_trunc_.rst", "generated/torch._foreach_zero_.rst", "generated/torch._logging.set_logs.rst", "generated/torch.abs.rst", "generated/torch.absolute.rst", "generated/torch.acos.rst", "generated/torch.acosh.rst", "generated/torch.add.rst", "generated/torch.addbmm.rst", "generated/torch.addcdiv.rst", "generated/torch.addcmul.rst", "generated/torch.addmm.rst", "generated/torch.addmv.rst", "generated/torch.addr.rst", "generated/torch.adjoint.rst", "generated/torch.all.rst", "generated/torch.allclose.rst", "generated/torch.amax.rst", "generated/torch.amin.rst", "generated/torch.aminmax.rst", "generated/torch.angle.rst", "generated/torch.any.rst", "generated/torch.ao.nn.intrinsic.BNReLU2d.rst", "generated/torch.ao.nn.intrinsic.BNReLU3d.rst", "generated/torch.ao.nn.intrinsic.ConvBn1d.rst", "generated/torch.ao.nn.intrinsic.ConvBn2d.rst", "generated/torch.ao.nn.intrinsic.ConvBn3d.rst", "generated/torch.ao.nn.intrinsic.ConvBnReLU1d.rst", "generated/torch.ao.nn.intrinsic.ConvBnReLU2d.rst", "generated/torch.ao.nn.intrinsic.ConvBnReLU3d.rst", "generated/torch.ao.nn.intrinsic.ConvReLU1d.rst", "generated/torch.ao.nn.intrinsic.ConvReLU2d.rst", "generated/torch.ao.nn.intrinsic.ConvReLU3d.rst", "generated/torch.ao.nn.intrinsic.LinearReLU.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBn1d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBn2d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBn3d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU1d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU2d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvBnReLU3d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvReLU2d.rst", "generated/torch.ao.nn.intrinsic.qat.ConvReLU3d.rst", "generated/torch.ao.nn.intrinsic.qat.LinearReLU.rst", "generated/torch.ao.nn.intrinsic.qat.freeze_bn_stats.rst", "generated/torch.ao.nn.intrinsic.qat.update_bn_stats.rst", "generated/torch.ao.nn.intrinsic.quantized.BNReLU2d.rst", "generated/torch.ao.nn.intrinsic.quantized.BNReLU3d.rst", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU1d.rst", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU2d.rst", "generated/torch.ao.nn.intrinsic.quantized.ConvReLU3d.rst", "generated/torch.ao.nn.intrinsic.quantized.LinearReLU.rst", "generated/torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU.rst", "generated/torch.ao.nn.qat.Conv2d.rst", "generated/torch.ao.nn.qat.Conv3d.rst", "generated/torch.ao.nn.qat.Linear.rst", "generated/torch.ao.nn.qat.dynamic.Linear.rst", "generated/torch.ao.nn.quantizable.LSTM.rst", "generated/torch.ao.nn.quantizable.MultiheadAttention.rst", "generated/torch.ao.nn.quantized.BatchNorm2d.rst", "generated/torch.ao.nn.quantized.BatchNorm3d.rst", "generated/torch.ao.nn.quantized.Conv1d.rst", "generated/torch.ao.nn.quantized.Conv2d.rst", "generated/torch.ao.nn.quantized.Conv3d.rst", "generated/torch.ao.nn.quantized.ConvTranspose1d.rst", "generated/torch.ao.nn.quantized.ConvTranspose2d.rst", "generated/torch.ao.nn.quantized.ConvTranspose3d.rst", "generated/torch.ao.nn.quantized.ELU.rst", "generated/torch.ao.nn.quantized.Embedding.rst", "generated/torch.ao.nn.quantized.EmbeddingBag.rst", "generated/torch.ao.nn.quantized.FXFloatFunctional.rst", "generated/torch.ao.nn.quantized.FloatFunctional.rst", "generated/torch.ao.nn.quantized.GroupNorm.rst", "generated/torch.ao.nn.quantized.Hardswish.rst", "generated/torch.ao.nn.quantized.InstanceNorm1d.rst", "generated/torch.ao.nn.quantized.InstanceNorm2d.rst", "generated/torch.ao.nn.quantized.InstanceNorm3d.rst", "generated/torch.ao.nn.quantized.LayerNorm.rst", "generated/torch.ao.nn.quantized.LeakyReLU.rst", "generated/torch.ao.nn.quantized.Linear.rst", "generated/torch.ao.nn.quantized.QFunctional.rst", "generated/torch.ao.nn.quantized.ReLU6.rst", "generated/torch.ao.nn.quantized.Sigmoid.rst", "generated/torch.ao.nn.quantized.dynamic.GRU.rst", "generated/torch.ao.nn.quantized.dynamic.GRUCell.rst", "generated/torch.ao.nn.quantized.dynamic.LSTM.rst", "generated/torch.ao.nn.quantized.dynamic.LSTMCell.rst", "generated/torch.ao.nn.quantized.dynamic.Linear.rst", "generated/torch.ao.nn.quantized.dynamic.RNNCell.rst", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool2d.rst", "generated/torch.ao.nn.quantized.functional.adaptive_avg_pool3d.rst", "generated/torch.ao.nn.quantized.functional.avg_pool2d.rst", "generated/torch.ao.nn.quantized.functional.avg_pool3d.rst", "generated/torch.ao.nn.quantized.functional.celu.rst", "generated/torch.ao.nn.quantized.functional.clamp.rst", "generated/torch.ao.nn.quantized.functional.conv1d.rst", "generated/torch.ao.nn.quantized.functional.conv2d.rst", "generated/torch.ao.nn.quantized.functional.conv3d.rst", "generated/torch.ao.nn.quantized.functional.elu.rst", "generated/torch.ao.nn.quantized.functional.hardsigmoid.rst", "generated/torch.ao.nn.quantized.functional.hardswish.rst", "generated/torch.ao.nn.quantized.functional.hardtanh.rst", "generated/torch.ao.nn.quantized.functional.interpolate.rst", "generated/torch.ao.nn.quantized.functional.leaky_relu.rst", "generated/torch.ao.nn.quantized.functional.linear.rst", "generated/torch.ao.nn.quantized.functional.max_pool1d.rst", "generated/torch.ao.nn.quantized.functional.max_pool2d.rst", "generated/torch.ao.nn.quantized.functional.threshold.rst", "generated/torch.ao.nn.quantized.functional.upsample.rst", "generated/torch.ao.nn.quantized.functional.upsample_bilinear.rst", "generated/torch.ao.nn.quantized.functional.upsample_nearest.rst", "generated/torch.ao.quantization.DeQuantStub.rst", "generated/torch.ao.quantization.QuantStub.rst", "generated/torch.ao.quantization.QuantWrapper.rst", "generated/torch.ao.quantization.add_quant_dequant.rst", "generated/torch.ao.quantization.backend_config.BackendConfig.rst", "generated/torch.ao.quantization.backend_config.BackendPatternConfig.rst", "generated/torch.ao.quantization.backend_config.DTypeConfig.rst", "generated/torch.ao.quantization.backend_config.DTypeWithConstraints.rst", "generated/torch.ao.quantization.backend_config.ObservationType.rst", "generated/torch.ao.quantization.convert.rst", "generated/torch.ao.quantization.default_eval_fn.rst", "generated/torch.ao.quantization.fake_quantize.FakeQuantize.rst", "generated/torch.ao.quantization.fake_quantize.FakeQuantizeBase.rst", "generated/torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize.rst", "generated/torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize.rst", "generated/torch.ao.quantization.fake_quantize.default_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_fused_act_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_histogram_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.default_weight_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.disable_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.disable_observer.rst", "generated/torch.ao.quantization.fake_quantize.enable_fake_quant.rst", "generated/torch.ao.quantization.fake_quantize.enable_observer.rst", "generated/torch.ao.quantization.fuse_modules.fuse_modules.rst", "generated/torch.ao.quantization.fx.custom_config.ConvertCustomConfig.rst", "generated/torch.ao.quantization.fx.custom_config.FuseCustomConfig.rst", "generated/torch.ao.quantization.fx.custom_config.PrepareCustomConfig.rst", "generated/torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry.rst", "generated/torch.ao.quantization.observer.HistogramObserver.rst", "generated/torch.ao.quantization.observer.MinMaxObserver.rst", "generated/torch.ao.quantization.observer.MovingAverageMinMaxObserver.rst", "generated/torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver.rst", "generated/torch.ao.quantization.observer.NoopObserver.rst", "generated/torch.ao.quantization.observer.ObserverBase.rst", "generated/torch.ao.quantization.observer.PerChannelMinMaxObserver.rst", "generated/torch.ao.quantization.observer.PlaceholderObserver.rst", "generated/torch.ao.quantization.observer.RecordingObserver.rst", "generated/torch.ao.quantization.observer.default_debug_observer.rst", "generated/torch.ao.quantization.observer.default_dynamic_quant_observer.rst", "generated/torch.ao.quantization.observer.default_float_qparams_observer.rst", "generated/torch.ao.quantization.observer.default_histogram_observer.rst", "generated/torch.ao.quantization.observer.default_observer.rst", "generated/torch.ao.quantization.observer.default_per_channel_weight_observer.rst", "generated/torch.ao.quantization.observer.default_placeholder_observer.rst", "generated/torch.ao.quantization.observer.default_weight_observer.rst", "generated/torch.ao.quantization.observer.get_observer_state_dict.rst", "generated/torch.ao.quantization.observer.load_observer_state_dict.rst", "generated/torch.ao.quantization.prepare.rst", "generated/torch.ao.quantization.prepare_qat.rst", "generated/torch.ao.quantization.propagate_qconfig_.rst", "generated/torch.ao.quantization.pt2e.export_utils.model_is_exported.rst", "generated/torch.ao.quantization.qconfig.QConfig.rst", "generated/torch.ao.quantization.qconfig.default_activation_only_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_debug_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_dynamic_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_per_channel_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_qat_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_qat_qconfig_v2.rst", "generated/torch.ao.quantization.qconfig.default_qconfig.rst", "generated/torch.ao.quantization.qconfig.default_weight_only_qconfig.rst", "generated/torch.ao.quantization.qconfig.float16_dynamic_qconfig.rst", "generated/torch.ao.quantization.qconfig.float16_static_qconfig.rst", "generated/torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig.rst", "generated/torch.ao.quantization.qconfig.per_channel_dynamic_qconfig.rst", "generated/torch.ao.quantization.qconfig_mapping.QConfigMapping.rst", "generated/torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping.rst", "generated/torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping.rst", "generated/torch.ao.quantization.quantize.rst", "generated/torch.ao.quantization.quantize_dynamic.rst", "generated/torch.ao.quantization.quantize_fx.convert_fx.rst", "generated/torch.ao.quantization.quantize_fx.fuse_fx.rst", "generated/torch.ao.quantization.quantize_fx.prepare_fx.rst", "generated/torch.ao.quantization.quantize_fx.prepare_qat_fx.rst", "generated/torch.ao.quantization.quantize_qat.rst", "generated/torch.ao.quantization.swap_module.rst", "generated/torch.arange.rst", "generated/torch.arccos.rst", "generated/torch.arccosh.rst", "generated/torch.arcsin.rst", "generated/torch.arcsinh.rst", "generated/torch.arctan.rst", "generated/torch.arctan2.rst", "generated/torch.arctanh.rst", "generated/torch.are_deterministic_algorithms_enabled.rst", "generated/torch.argmax.rst", "generated/torch.argmin.rst", "generated/torch.argsort.rst", "generated/torch.argwhere.rst", "generated/torch.as_strided.rst", "generated/torch.as_tensor.rst", "generated/torch.asarray.rst", "generated/torch.asin.rst", "generated/torch.asinh.rst", "generated/torch.atan.rst", "generated/torch.atan2.rst", "generated/torch.atanh.rst", "generated/torch.atleast_1d.rst", "generated/torch.atleast_2d.rst", "generated/torch.atleast_3d.rst", "generated/torch.autograd.Function.backward.rst", "generated/torch.autograd.Function.forward.rst", "generated/torch.autograd.Function.jvp.rst", "generated/torch.autograd.Function.vmap.rst", "generated/torch.autograd.backward.rst", "generated/torch.autograd.forward_ad.UnpackedDualTensor.rst", "generated/torch.autograd.forward_ad.dual_level.rst", "generated/torch.autograd.forward_ad.enter_dual_level.rst", "generated/torch.autograd.forward_ad.exit_dual_level.rst", "generated/torch.autograd.forward_ad.make_dual.rst", "generated/torch.autograd.forward_ad.unpack_dual.rst", "generated/torch.autograd.function.BackwardCFunction.rst", "generated/torch.autograd.function.FunctionCtx.mark_dirty.rst", "generated/torch.autograd.function.FunctionCtx.mark_non_differentiable.rst", "generated/torch.autograd.function.FunctionCtx.save_for_backward.rst", "generated/torch.autograd.function.FunctionCtx.set_materialize_grads.rst", "generated/torch.autograd.function.InplaceFunction.rst", "generated/torch.autograd.function.NestedIOFunction.rst", "generated/torch.autograd.function.once_differentiable.rst", "generated/torch.autograd.functional.hessian.rst", "generated/torch.autograd.functional.hvp.rst", "generated/torch.autograd.functional.jacobian.rst", "generated/torch.autograd.functional.jvp.rst", "generated/torch.autograd.functional.vhp.rst", "generated/torch.autograd.functional.vjp.rst", "generated/torch.autograd.grad.rst", "generated/torch.autograd.grad_mode.inference_mode.rst", "generated/torch.autograd.grad_mode.set_grad_enabled.rst", "generated/torch.autograd.grad_mode.set_multithreading_enabled.rst", "generated/torch.autograd.gradcheck.GradcheckError.rst", "generated/torch.autograd.gradcheck.gradcheck.rst", "generated/torch.autograd.gradcheck.gradgradcheck.rst", "generated/torch.autograd.graph.Node.metadata.rst", "generated/torch.autograd.graph.Node.name.rst", "generated/torch.autograd.graph.Node.next_functions.rst", "generated/torch.autograd.graph.Node.register_hook.rst", "generated/torch.autograd.graph.Node.register_prehook.rst", "generated/torch.autograd.graph.increment_version.rst", "generated/torch.autograd.profiler.EnforceUnique.rst", "generated/torch.autograd.profiler.KinetoStepTracker.rst", "generated/torch.autograd.profiler.load_nvprof.rst", "generated/torch.autograd.profiler.parse_nvprof_trace.rst", "generated/torch.autograd.profiler.profile.export_chrome_trace.rst", "generated/torch.autograd.profiler.profile.key_averages.rst", "generated/torch.autograd.profiler.profile.self_cpu_time_total.rst", "generated/torch.autograd.profiler.profile.total_average.rst", "generated/torch.autograd.profiler.record_function.rst", "generated/torch.autograd.profiler_util.Interval.rst", "generated/torch.autograd.profiler_util.Kernel.rst", "generated/torch.autograd.profiler_util.MemRecordsAcc.rst", "generated/torch.autograd.profiler_util.StringTable.rst", "generated/torch.baddbmm.rst", "generated/torch.bartlett_window.rst", "generated/torch.bernoulli.rst", "generated/torch.bincount.rst", "generated/torch.bitwise_and.rst", "generated/torch.bitwise_left_shift.rst", "generated/torch.bitwise_not.rst", "generated/torch.bitwise_or.rst", "generated/torch.bitwise_right_shift.rst", "generated/torch.bitwise_xor.rst", "generated/torch.blackman_window.rst", "generated/torch.block_diag.rst", "generated/torch.bmm.rst", "generated/torch.broadcast_shapes.rst", "generated/torch.broadcast_tensors.rst", "generated/torch.broadcast_to.rst", "generated/torch.bucketize.rst", "generated/torch.can_cast.rst", "generated/torch.cartesian_prod.rst", "generated/torch.cat.rst", "generated/torch.cdist.rst", "generated/torch.ceil.rst", "generated/torch.chain_matmul.rst", "generated/torch.cholesky.rst", "generated/torch.cholesky_inverse.rst", "generated/torch.cholesky_solve.rst", "generated/torch.chunk.rst", "generated/torch.clamp.rst", "generated/torch.clip.rst", "generated/torch.clone.rst", "generated/torch.column_stack.rst", "generated/torch.combinations.rst", "generated/torch.compile.rst", "generated/torch.compiled_with_cxx11_abi.rst", "generated/torch.compiler.allow_in_graph.rst", "generated/torch.compiler.assume_constant_result.rst", "generated/torch.compiler.compile.rst", "generated/torch.compiler.cudagraph_mark_step_begin.rst", "generated/torch.compiler.disable.rst", "generated/torch.compiler.is_compiling.rst", "generated/torch.compiler.is_dynamo_compiling.rst", "generated/torch.compiler.list_backends.rst", "generated/torch.compiler.reset.rst", "generated/torch.complex.rst", "generated/torch.concat.rst", "generated/torch.concatenate.rst", "generated/torch.cond.rst", "generated/torch.conj.rst", "generated/torch.conj_physical.rst", "generated/torch.copysign.rst", "generated/torch.corrcoef.rst", "generated/torch.cos.rst", "generated/torch.cosh.rst", "generated/torch.count_nonzero.rst", "generated/torch.cov.rst", "generated/torch.cpu.Stream.rst", "generated/torch.cpu.StreamContext.rst", "generated/torch.cpu.current_device.rst", "generated/torch.cpu.current_stream.rst", "generated/torch.cpu.device_count.rst", "generated/torch.cpu.is_available.rst", "generated/torch.cpu.set_device.rst", "generated/torch.cpu.stream.rst", "generated/torch.cpu.synchronize.rst", "generated/torch.cross.rst", "generated/torch.cuda.CUDAGraph.rst", "generated/torch.cuda.CUDAPluggableAllocator.rst", "generated/torch.cuda.Event.rst", "generated/torch.cuda.ExternalStream.rst", "generated/torch.cuda.OutOfMemoryError.rst", "generated/torch.cuda.Stream.rst", "generated/torch.cuda.StreamContext.rst", "generated/torch.cuda.caching_allocator_alloc.rst", "generated/torch.cuda.caching_allocator_delete.rst", "generated/torch.cuda.can_device_access_peer.rst", "generated/torch.cuda.change_current_allocator.rst", "generated/torch.cuda.clock_rate.rst", "generated/torch.cuda.comm.broadcast.rst", "generated/torch.cuda.comm.broadcast_coalesced.rst", "generated/torch.cuda.comm.gather.rst", "generated/torch.cuda.comm.reduce_add.rst", "generated/torch.cuda.comm.scatter.rst", "generated/torch.cuda.current_blas_handle.rst", "generated/torch.cuda.current_device.rst", "generated/torch.cuda.current_stream.rst", "generated/torch.cuda.default_stream.rst", "generated/torch.cuda.device.rst", "generated/torch.cuda.device_count.rst", "generated/torch.cuda.device_of.rst", "generated/torch.cuda.empty_cache.rst", "generated/torch.cuda.get_allocator_backend.rst", "generated/torch.cuda.get_arch_list.rst", "generated/torch.cuda.get_device_capability.rst", "generated/torch.cuda.get_device_name.rst", "generated/torch.cuda.get_device_properties.rst", "generated/torch.cuda.get_gencode_flags.rst", "generated/torch.cuda.get_rng_state.rst", "generated/torch.cuda.get_rng_state_all.rst", "generated/torch.cuda.get_sync_debug_mode.rst", "generated/torch.cuda.graph.rst", "generated/torch.cuda.graph_pool_handle.rst", "generated/torch.cuda.init.rst", "generated/torch.cuda.initial_seed.rst", "generated/torch.cuda.ipc_collect.rst", "generated/torch.cuda.is_available.rst", "generated/torch.cuda.is_current_stream_capturing.rst", "generated/torch.cuda.is_initialized.rst", "generated/torch.cuda.jiterator._create_jit_fn.rst", "generated/torch.cuda.jiterator._create_multi_output_jit_fn.rst", "generated/torch.cuda.list_gpu_processes.rst", "generated/torch.cuda.make_graphed_callables.rst", "generated/torch.cuda.manual_seed.rst", "generated/torch.cuda.manual_seed_all.rst", "generated/torch.cuda.max_memory_allocated.rst", "generated/torch.cuda.max_memory_cached.rst", "generated/torch.cuda.max_memory_reserved.rst", "generated/torch.cuda.mem_get_info.rst", "generated/torch.cuda.memory_allocated.rst", "generated/torch.cuda.memory_cached.rst", "generated/torch.cuda.memory_reserved.rst", "generated/torch.cuda.memory_snapshot.rst", "generated/torch.cuda.memory_stats.rst", "generated/torch.cuda.memory_summary.rst", "generated/torch.cuda.memory_usage.rst", "generated/torch.cuda.nvtx.mark.rst", "generated/torch.cuda.nvtx.range.rst", "generated/torch.cuda.nvtx.range_pop.rst", "generated/torch.cuda.nvtx.range_push.rst", "generated/torch.cuda.power_draw.rst", "generated/torch.cuda.reset_max_memory_allocated.rst", "generated/torch.cuda.reset_max_memory_cached.rst", "generated/torch.cuda.reset_peak_memory_stats.rst", "generated/torch.cuda.seed.rst", "generated/torch.cuda.seed_all.rst", "generated/torch.cuda.set_device.rst", "generated/torch.cuda.set_per_process_memory_fraction.rst", "generated/torch.cuda.set_rng_state.rst", "generated/torch.cuda.set_rng_state_all.rst", "generated/torch.cuda.set_stream.rst", "generated/torch.cuda.set_sync_debug_mode.rst", "generated/torch.cuda.stream.rst", "generated/torch.cuda.synchronize.rst", "generated/torch.cuda.temperature.rst", "generated/torch.cuda.utilization.rst", "generated/torch.cummax.rst", "generated/torch.cummin.rst", "generated/torch.cumprod.rst", "generated/torch.cumsum.rst", "generated/torch.cumulative_trapezoid.rst", "generated/torch.deg2rad.rst", "generated/torch.dequantize.rst", "generated/torch.det.rst", "generated/torch.diag.rst", "generated/torch.diag_embed.rst", "generated/torch.diagflat.rst", "generated/torch.diagonal.rst", "generated/torch.diagonal_scatter.rst", "generated/torch.diff.rst", "generated/torch.digamma.rst", "generated/torch.dist.rst", "generated/torch.div.rst", "generated/torch.divide.rst", "generated/torch.dot.rst", "generated/torch.dsplit.rst", "generated/torch.dstack.rst", "generated/torch.einsum.rst", "generated/torch.empty.rst", "generated/torch.empty_like.rst", "generated/torch.empty_strided.rst", "generated/torch.enable_grad.rst", "generated/torch.eq.rst", "generated/torch.equal.rst", "generated/torch.erf.rst", "generated/torch.erfc.rst", "generated/torch.erfinv.rst", "generated/torch.exp.rst", "generated/torch.exp2.rst", "generated/torch.expm1.rst", "generated/torch.eye.rst", "generated/torch.fake_quantize_per_channel_affine.rst", "generated/torch.fake_quantize_per_tensor_affine.rst", "generated/torch.fft.fft.rst", "generated/torch.fft.fft2.rst", "generated/torch.fft.fftfreq.rst", "generated/torch.fft.fftn.rst", "generated/torch.fft.fftshift.rst", "generated/torch.fft.hfft.rst", "generated/torch.fft.hfft2.rst", "generated/torch.fft.hfftn.rst", "generated/torch.fft.ifft.rst", "generated/torch.fft.ifft2.rst", "generated/torch.fft.ifftn.rst", "generated/torch.fft.ifftshift.rst", "generated/torch.fft.ihfft.rst", "generated/torch.fft.ihfft2.rst", "generated/torch.fft.ihfftn.rst", "generated/torch.fft.irfft.rst", "generated/torch.fft.irfft2.rst", "generated/torch.fft.irfftn.rst", "generated/torch.fft.rfft.rst", "generated/torch.fft.rfft2.rst", "generated/torch.fft.rfftfreq.rst", "generated/torch.fft.rfftn.rst", "generated/torch.fix.rst", "generated/torch.flatten.rst", "generated/torch.flip.rst", "generated/torch.fliplr.rst", "generated/torch.flipud.rst", "generated/torch.float_power.rst", "generated/torch.floor.rst", "generated/torch.floor_divide.rst", "generated/torch.fmax.rst", "generated/torch.fmin.rst", "generated/torch.fmod.rst", "generated/torch.frac.rst", "generated/torch.frexp.rst", "generated/torch.from_dlpack.rst", "generated/torch.from_file.rst", "generated/torch.from_numpy.rst", "generated/torch.frombuffer.rst", "generated/torch.full.rst", "generated/torch.full_like.rst", "generated/torch.func.functional_call.rst", "generated/torch.func.functionalize.rst", "generated/torch.func.grad.rst", "generated/torch.func.grad_and_value.rst", "generated/torch.func.hessian.rst", "generated/torch.func.jacfwd.rst", "generated/torch.func.jacrev.rst", "generated/torch.func.jvp.rst", "generated/torch.func.linearize.rst", "generated/torch.func.replace_all_batch_norm_modules_.rst", "generated/torch.func.stack_module_state.rst", "generated/torch.func.vjp.rst", "generated/torch.func.vmap.rst", "generated/torch.fx.experimental.symbolic_shapes.CallMethodKey.rst", "generated/torch.fx.experimental.symbolic_shapes.ConvertIntKey.rst", "generated/torch.fx.experimental.symbolic_shapes.DimConstraints.rst", "generated/torch.fx.experimental.symbolic_shapes.DimDynamic.rst", "generated/torch.fx.experimental.symbolic_shapes.DivideByKey.rst", "generated/torch.fx.experimental.symbolic_shapes.EqualityConstraint.rst", "generated/torch.fx.experimental.symbolic_shapes.InnerTensorKey.rst", "generated/torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.rst", "generated/torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint.rst", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnv.rst", "generated/torch.fx.experimental.symbolic_shapes.ShapeEnvSettings.rst", "generated/torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint.rst", "generated/torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.SymbolicContext.rst", "generated/torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr.rst", "generated/torch.fx.experimental.symbolic_shapes.check_consistent.rst", "generated/torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings.rst", "generated/torch.fx.experimental.symbolic_shapes.constrain_range.rst", "generated/torch.fx.experimental.symbolic_shapes.constrain_unify.rst", "generated/torch.fx.experimental.symbolic_shapes.definitely_false.rst", "generated/torch.fx.experimental.symbolic_shapes.definitely_true.rst", "generated/torch.fx.experimental.symbolic_shapes.guard_size_oblivious.rst", "generated/torch.fx.experimental.symbolic_shapes.has_free_symbols.rst", "generated/torch.fx.experimental.symbolic_shapes.hint_int.rst", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_bool.rst", "generated/torch.fx.experimental.symbolic_shapes.is_concrete_int.rst", "generated/torch.fx.experimental.symbolic_shapes.lru_cache.rst", "generated/torch.fx.experimental.symbolic_shapes.parallel_and.rst", "generated/torch.fx.experimental.symbolic_shapes.parallel_or.rst", "generated/torch.fx.experimental.symbolic_shapes.rebind_unbacked.rst", "generated/torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings.rst", "generated/torch.fx.experimental.symbolic_shapes.statically_known_true.rst", "generated/torch.fx.experimental.symbolic_shapes.sym_eq.rst", "generated/torch.gather.rst", "generated/torch.gcd.rst", "generated/torch.ge.rst", "generated/torch.geqrf.rst", "generated/torch.ger.rst", "generated/torch.get_default_device.rst", "generated/torch.get_default_dtype.rst", "generated/torch.get_deterministic_debug_mode.rst", "generated/torch.get_device_module.rst", "generated/torch.get_float32_matmul_precision.rst", "generated/torch.get_num_interop_threads.rst", "generated/torch.get_num_threads.rst", "generated/torch.get_rng_state.rst", "generated/torch.gradient.rst", "generated/torch.greater.rst", "generated/torch.greater_equal.rst", "generated/torch.gt.rst", "generated/torch.hamming_window.rst", "generated/torch.hann_window.rst", "generated/torch.heaviside.rst", "generated/torch.histc.rst", "generated/torch.histogram.rst", "generated/torch.histogramdd.rst", "generated/torch.hsplit.rst", "generated/torch.hspmm.rst", "generated/torch.hstack.rst", "generated/torch.hypot.rst", "generated/torch.i0.rst", "generated/torch.igamma.rst", "generated/torch.igammac.rst", "generated/torch.imag.rst", "generated/torch.index_add.rst", "generated/torch.index_copy.rst", "generated/torch.index_reduce.rst", "generated/torch.index_select.rst", "generated/torch.initial_seed.rst", "generated/torch.inner.rst", "generated/torch.inverse.rst", "generated/torch.is_complex.rst", "generated/torch.is_conj.rst", "generated/torch.is_deterministic_algorithms_warn_only_enabled.rst", "generated/torch.is_floating_point.rst", "generated/torch.is_grad_enabled.rst", "generated/torch.is_inference_mode_enabled.rst", "generated/torch.is_nonzero.rst", "generated/torch.is_storage.rst", "generated/torch.is_tensor.rst", "generated/torch.is_warn_always_enabled.rst", "generated/torch.isclose.rst", "generated/torch.isfinite.rst", "generated/torch.isin.rst", "generated/torch.isinf.rst", "generated/torch.isnan.rst", "generated/torch.isneginf.rst", "generated/torch.isposinf.rst", "generated/torch.isreal.rst", "generated/torch.istft.rst", "generated/torch.jit.Attribute.rst", "generated/torch.jit.ScriptFunction.rst", "generated/torch.jit.ScriptModule.rst", "generated/torch.jit.annotate.rst", "generated/torch.jit.enable_onednn_fusion.rst", "generated/torch.jit.fork.rst", "generated/torch.jit.freeze.rst", "generated/torch.jit.ignore.rst", "generated/torch.jit.interface.rst", "generated/torch.jit.isinstance.rst", "generated/torch.jit.load.rst", "generated/torch.jit.onednn_fusion_enabled.rst", "generated/torch.jit.optimize_for_inference.rst", "generated/torch.jit.save.rst", "generated/torch.jit.script.rst", "generated/torch.jit.script_if_tracing.rst", "generated/torch.jit.set_fusion_strategy.rst", "generated/torch.jit.strict_fusion.rst", "generated/torch.jit.trace.rst", "generated/torch.jit.trace_module.rst", "generated/torch.jit.unused.rst", "generated/torch.jit.wait.rst", "generated/torch.kaiser_window.rst", "generated/torch.kron.rst", "generated/torch.kthvalue.rst", "generated/torch.lcm.rst", "generated/torch.ldexp.rst", "generated/torch.le.rst", "generated/torch.lerp.rst", "generated/torch.less.rst", "generated/torch.less_equal.rst", "generated/torch.lgamma.rst", "generated/torch.linalg.cholesky.rst", "generated/torch.linalg.cholesky_ex.rst", "generated/torch.linalg.cond.rst", "generated/torch.linalg.cross.rst", "generated/torch.linalg.det.rst", "generated/torch.linalg.diagonal.rst", "generated/torch.linalg.eig.rst", "generated/torch.linalg.eigh.rst", "generated/torch.linalg.eigvals.rst", "generated/torch.linalg.eigvalsh.rst", "generated/torch.linalg.householder_product.rst", "generated/torch.linalg.inv.rst", "generated/torch.linalg.inv_ex.rst", "generated/torch.linalg.ldl_factor.rst", "generated/torch.linalg.ldl_factor_ex.rst", "generated/torch.linalg.ldl_solve.rst", "generated/torch.linalg.lstsq.rst", "generated/torch.linalg.lu.rst", "generated/torch.linalg.lu_factor.rst", "generated/torch.linalg.lu_factor_ex.rst", "generated/torch.linalg.lu_solve.rst", "generated/torch.linalg.matmul.rst", "generated/torch.linalg.matrix_exp.rst", "generated/torch.linalg.matrix_norm.rst", "generated/torch.linalg.matrix_power.rst", "generated/torch.linalg.matrix_rank.rst", "generated/torch.linalg.multi_dot.rst", "generated/torch.linalg.norm.rst", "generated/torch.linalg.pinv.rst", "generated/torch.linalg.qr.rst", "generated/torch.linalg.slogdet.rst", "generated/torch.linalg.solve.rst", "generated/torch.linalg.solve_ex.rst", "generated/torch.linalg.solve_triangular.rst", "generated/torch.linalg.svd.rst", "generated/torch.linalg.svdvals.rst", "generated/torch.linalg.tensorinv.rst", "generated/torch.linalg.tensorsolve.rst", "generated/torch.linalg.vander.rst", "generated/torch.linalg.vecdot.rst", "generated/torch.linalg.vector_norm.rst", "generated/torch.linspace.rst", "generated/torch.load.rst", "generated/torch.lobpcg.rst", "generated/torch.log.rst", "generated/torch.log10.rst", "generated/torch.log1p.rst", "generated/torch.log2.rst", "generated/torch.logaddexp.rst", "generated/torch.logaddexp2.rst", "generated/torch.logcumsumexp.rst", "generated/torch.logdet.rst", "generated/torch.logical_and.rst", "generated/torch.logical_not.rst", "generated/torch.logical_or.rst", "generated/torch.logical_xor.rst", "generated/torch.logit.rst", "generated/torch.logspace.rst", "generated/torch.logsumexp.rst", "generated/torch.lt.rst", "generated/torch.lu.rst", "generated/torch.lu_solve.rst", "generated/torch.lu_unpack.rst", "generated/torch.manual_seed.rst", "generated/torch.masked_select.rst", "generated/torch.matmul.rst", "generated/torch.matrix_exp.rst", "generated/torch.matrix_power.rst", "generated/torch.max.rst", "generated/torch.maximum.rst", "generated/torch.mean.rst", "generated/torch.median.rst", "generated/torch.meshgrid.rst", "generated/torch.min.rst", "generated/torch.minimum.rst", "generated/torch.mm.rst", "generated/torch.mode.rst", "generated/torch.moveaxis.rst", "generated/torch.movedim.rst", "generated/torch.mps.current_allocated_memory.rst", "generated/torch.mps.device_count.rst", "generated/torch.mps.driver_allocated_memory.rst", "generated/torch.mps.empty_cache.rst", "generated/torch.mps.event.Event.rst", "generated/torch.mps.get_rng_state.rst", "generated/torch.mps.manual_seed.rst", "generated/torch.mps.profiler.profile.rst", "generated/torch.mps.profiler.start.rst", "generated/torch.mps.profiler.stop.rst", "generated/torch.mps.seed.rst", "generated/torch.mps.set_per_process_memory_fraction.rst", "generated/torch.mps.set_rng_state.rst", "generated/torch.mps.synchronize.rst", "generated/torch.msort.rst", "generated/torch.mtia.DeferredMtiaCallError.rst", "generated/torch.mtia.Event.rst", "generated/torch.mtia.Stream.rst", "generated/torch.mtia.StreamContext.rst", "generated/torch.mtia.current_device.rst", "generated/torch.mtia.current_stream.rst", "generated/torch.mtia.default_stream.rst", "generated/torch.mtia.device.rst", "generated/torch.mtia.device_count.rst", "generated/torch.mtia.init.rst", "generated/torch.mtia.is_available.rst", "generated/torch.mtia.is_initialized.rst", "generated/torch.mtia.set_stream.rst", "generated/torch.mtia.stream.rst", "generated/torch.mtia.synchronize.rst", "generated/torch.mul.rst", "generated/torch.multinomial.rst", "generated/torch.multiply.rst", "generated/torch.mv.rst", "generated/torch.mvlgamma.rst", "generated/torch.nan_to_num.rst", "generated/torch.nanmean.rst", "generated/torch.nanmedian.rst", "generated/torch.nanquantile.rst", "generated/torch.nansum.rst", "generated/torch.narrow.rst", "generated/torch.narrow_copy.rst", "generated/torch.ne.rst", "generated/torch.neg.rst", "generated/torch.negative.rst", "generated/torch.nextafter.rst", "generated/torch.nn.AdaptiveAvgPool1d.rst", "generated/torch.nn.AdaptiveAvgPool2d.rst", "generated/torch.nn.AdaptiveAvgPool3d.rst", "generated/torch.nn.AdaptiveLogSoftmaxWithLoss.rst", "generated/torch.nn.AdaptiveMaxPool1d.rst", "generated/torch.nn.AdaptiveMaxPool2d.rst", "generated/torch.nn.AdaptiveMaxPool3d.rst", "generated/torch.nn.AlphaDropout.rst", "generated/torch.nn.AvgPool1d.rst", "generated/torch.nn.AvgPool2d.rst", "generated/torch.nn.AvgPool3d.rst", "generated/torch.nn.BCELoss.rst", "generated/torch.nn.BCEWithLogitsLoss.rst", "generated/torch.nn.BatchNorm1d.rst", "generated/torch.nn.BatchNorm2d.rst", "generated/torch.nn.BatchNorm3d.rst", "generated/torch.nn.Bilinear.rst", "generated/torch.nn.CELU.rst", "generated/torch.nn.CTCLoss.rst", "generated/torch.nn.ChannelShuffle.rst", "generated/torch.nn.CircularPad1d.rst", "generated/torch.nn.CircularPad2d.rst", "generated/torch.nn.CircularPad3d.rst", "generated/torch.nn.ConstantPad1d.rst", "generated/torch.nn.ConstantPad2d.rst", "generated/torch.nn.ConstantPad3d.rst", "generated/torch.nn.Conv1d.rst", "generated/torch.nn.Conv2d.rst", "generated/torch.nn.Conv3d.rst", "generated/torch.nn.ConvTranspose1d.rst", "generated/torch.nn.ConvTranspose2d.rst", "generated/torch.nn.ConvTranspose3d.rst", "generated/torch.nn.CosineEmbeddingLoss.rst", "generated/torch.nn.CosineSimilarity.rst", "generated/torch.nn.CrossEntropyLoss.rst", "generated/torch.nn.DataParallel.rst", "generated/torch.nn.Dropout.rst", "generated/torch.nn.Dropout1d.rst", "generated/torch.nn.Dropout2d.rst", "generated/torch.nn.Dropout3d.rst", "generated/torch.nn.ELU.rst", "generated/torch.nn.Embedding.rst", "generated/torch.nn.EmbeddingBag.rst", "generated/torch.nn.FeatureAlphaDropout.rst", "generated/torch.nn.Flatten.rst", "generated/torch.nn.Fold.rst", "generated/torch.nn.FractionalMaxPool2d.rst", "generated/torch.nn.FractionalMaxPool3d.rst", "generated/torch.nn.GELU.rst", "generated/torch.nn.GLU.rst", "generated/torch.nn.GRU.rst", "generated/torch.nn.GRUCell.rst", "generated/torch.nn.GaussianNLLLoss.rst", "generated/torch.nn.GroupNorm.rst", "generated/torch.nn.Hardshrink.rst", "generated/torch.nn.Hardsigmoid.rst", "generated/torch.nn.Hardswish.rst", "generated/torch.nn.Hardtanh.rst", "generated/torch.nn.HingeEmbeddingLoss.rst", "generated/torch.nn.HuberLoss.rst", "generated/torch.nn.Identity.rst", "generated/torch.nn.InstanceNorm1d.rst", "generated/torch.nn.InstanceNorm2d.rst", "generated/torch.nn.InstanceNorm3d.rst", "generated/torch.nn.KLDivLoss.rst", "generated/torch.nn.L1Loss.rst", "generated/torch.nn.LPPool1d.rst", "generated/torch.nn.LPPool2d.rst", "generated/torch.nn.LPPool3d.rst", "generated/torch.nn.LSTM.rst", "generated/torch.nn.LSTMCell.rst", "generated/torch.nn.LayerNorm.rst", "generated/torch.nn.LazyBatchNorm1d.rst", "generated/torch.nn.LazyBatchNorm2d.rst", "generated/torch.nn.LazyBatchNorm3d.rst", "generated/torch.nn.LazyConv1d.rst", "generated/torch.nn.LazyConv2d.rst", "generated/torch.nn.LazyConv3d.rst", "generated/torch.nn.LazyConvTranspose1d.rst", "generated/torch.nn.LazyConvTranspose2d.rst", "generated/torch.nn.LazyConvTranspose3d.rst", "generated/torch.nn.LazyInstanceNorm1d.rst", "generated/torch.nn.LazyInstanceNorm2d.rst", "generated/torch.nn.LazyInstanceNorm3d.rst", "generated/torch.nn.LazyLinear.rst", "generated/torch.nn.LeakyReLU.rst", "generated/torch.nn.Linear.rst", "generated/torch.nn.LocalResponseNorm.rst", "generated/torch.nn.LogSigmoid.rst", "generated/torch.nn.LogSoftmax.rst", "generated/torch.nn.MSELoss.rst", "generated/torch.nn.MarginRankingLoss.rst", "generated/torch.nn.MaxPool1d.rst", "generated/torch.nn.MaxPool2d.rst", "generated/torch.nn.MaxPool3d.rst", "generated/torch.nn.MaxUnpool1d.rst", "generated/torch.nn.MaxUnpool2d.rst", "generated/torch.nn.MaxUnpool3d.rst", "generated/torch.nn.Mish.rst", "generated/torch.nn.Module.rst", "generated/torch.nn.ModuleDict.rst", "generated/torch.nn.ModuleList.rst", "generated/torch.nn.MultiLabelMarginLoss.rst", "generated/torch.nn.MultiLabelSoftMarginLoss.rst", "generated/torch.nn.MultiMarginLoss.rst", "generated/torch.nn.MultiheadAttention.rst", "generated/torch.nn.NLLLoss.rst", "generated/torch.nn.PReLU.rst", "generated/torch.nn.PairwiseDistance.rst", "generated/torch.nn.ParameterDict.rst", "generated/torch.nn.ParameterList.rst", "generated/torch.nn.PixelShuffle.rst", "generated/torch.nn.PixelUnshuffle.rst", "generated/torch.nn.PoissonNLLLoss.rst", "generated/torch.nn.RMSNorm.rst", "generated/torch.nn.RNN.rst", "generated/torch.nn.RNNBase.rst", "generated/torch.nn.RNNCell.rst", "generated/torch.nn.RReLU.rst", "generated/torch.nn.ReLU.rst", "generated/torch.nn.ReLU6.rst", "generated/torch.nn.ReflectionPad1d.rst", "generated/torch.nn.ReflectionPad2d.rst", "generated/torch.nn.ReflectionPad3d.rst", "generated/torch.nn.ReplicationPad1d.rst", "generated/torch.nn.ReplicationPad2d.rst", "generated/torch.nn.ReplicationPad3d.rst", "generated/torch.nn.SELU.rst", "generated/torch.nn.Sequential.rst", "generated/torch.nn.SiLU.rst", "generated/torch.nn.Sigmoid.rst", "generated/torch.nn.SmoothL1Loss.rst", "generated/torch.nn.SoftMarginLoss.rst", "generated/torch.nn.Softmax.rst", "generated/torch.nn.Softmax2d.rst", "generated/torch.nn.Softmin.rst", "generated/torch.nn.Softplus.rst", "generated/torch.nn.Softshrink.rst", "generated/torch.nn.Softsign.rst", "generated/torch.nn.SyncBatchNorm.rst", "generated/torch.nn.Tanh.rst", "generated/torch.nn.Tanhshrink.rst", "generated/torch.nn.Threshold.rst", "generated/torch.nn.Transformer.rst", "generated/torch.nn.TransformerDecoder.rst", "generated/torch.nn.TransformerDecoderLayer.rst", "generated/torch.nn.TransformerEncoder.rst", "generated/torch.nn.TransformerEncoderLayer.rst", "generated/torch.nn.TripletMarginLoss.rst", "generated/torch.nn.TripletMarginWithDistanceLoss.rst", "generated/torch.nn.Unflatten.rst", "generated/torch.nn.Unfold.rst", "generated/torch.nn.Upsample.rst", "generated/torch.nn.UpsamplingBilinear2d.rst", "generated/torch.nn.UpsamplingNearest2d.rst", "generated/torch.nn.ZeroPad1d.rst", "generated/torch.nn.ZeroPad2d.rst", "generated/torch.nn.ZeroPad3d.rst", "generated/torch.nn.attention.SDPBackend.rst", "generated/torch.nn.attention.bias.CausalBias.rst", "generated/torch.nn.attention.bias.CausalVariant.rst", "generated/torch.nn.attention.bias.causal_lower_right.rst", "generated/torch.nn.attention.bias.causal_upper_left.rst", "generated/torch.nn.attention.sdpa_kernel.rst", "generated/torch.nn.functional.adaptive_avg_pool1d.rst", "generated/torch.nn.functional.adaptive_avg_pool2d.rst", "generated/torch.nn.functional.adaptive_avg_pool3d.rst", "generated/torch.nn.functional.adaptive_max_pool1d.rst", "generated/torch.nn.functional.adaptive_max_pool2d.rst", "generated/torch.nn.functional.adaptive_max_pool3d.rst", "generated/torch.nn.functional.affine_grid.rst", "generated/torch.nn.functional.alpha_dropout.rst", "generated/torch.nn.functional.avg_pool1d.rst", "generated/torch.nn.functional.avg_pool2d.rst", "generated/torch.nn.functional.avg_pool3d.rst", "generated/torch.nn.functional.batch_norm.rst", "generated/torch.nn.functional.bilinear.rst", "generated/torch.nn.functional.binary_cross_entropy.rst", "generated/torch.nn.functional.binary_cross_entropy_with_logits.rst", "generated/torch.nn.functional.celu.rst", "generated/torch.nn.functional.conv1d.rst", "generated/torch.nn.functional.conv2d.rst", "generated/torch.nn.functional.conv3d.rst", "generated/torch.nn.functional.conv_transpose1d.rst", "generated/torch.nn.functional.conv_transpose2d.rst", "generated/torch.nn.functional.conv_transpose3d.rst", "generated/torch.nn.functional.cosine_embedding_loss.rst", "generated/torch.nn.functional.cosine_similarity.rst", "generated/torch.nn.functional.cross_entropy.rst", "generated/torch.nn.functional.ctc_loss.rst", "generated/torch.nn.functional.dropout.rst", "generated/torch.nn.functional.dropout1d.rst", "generated/torch.nn.functional.dropout2d.rst", "generated/torch.nn.functional.dropout3d.rst", "generated/torch.nn.functional.elu.rst", "generated/torch.nn.functional.elu_.rst", "generated/torch.nn.functional.embedding.rst", "generated/torch.nn.functional.embedding_bag.rst", "generated/torch.nn.functional.feature_alpha_dropout.rst", "generated/torch.nn.functional.fold.rst", "generated/torch.nn.functional.fractional_max_pool2d.rst", "generated/torch.nn.functional.fractional_max_pool3d.rst", "generated/torch.nn.functional.gaussian_nll_loss.rst", "generated/torch.nn.functional.gelu.rst", "generated/torch.nn.functional.glu.rst", "generated/torch.nn.functional.grid_sample.rst", "generated/torch.nn.functional.group_norm.rst", "generated/torch.nn.functional.gumbel_softmax.rst", "generated/torch.nn.functional.hardshrink.rst", "generated/torch.nn.functional.hardsigmoid.rst", "generated/torch.nn.functional.hardswish.rst", "generated/torch.nn.functional.hardtanh.rst", "generated/torch.nn.functional.hardtanh_.rst", "generated/torch.nn.functional.hinge_embedding_loss.rst", "generated/torch.nn.functional.huber_loss.rst", "generated/torch.nn.functional.instance_norm.rst", "generated/torch.nn.functional.interpolate.rst", "generated/torch.nn.functional.kl_div.rst", "generated/torch.nn.functional.l1_loss.rst", "generated/torch.nn.functional.layer_norm.rst", "generated/torch.nn.functional.leaky_relu.rst", "generated/torch.nn.functional.leaky_relu_.rst", "generated/torch.nn.functional.linear.rst", "generated/torch.nn.functional.local_response_norm.rst", "generated/torch.nn.functional.log_softmax.rst", "generated/torch.nn.functional.logsigmoid.rst", "generated/torch.nn.functional.lp_pool1d.rst", "generated/torch.nn.functional.lp_pool2d.rst", "generated/torch.nn.functional.lp_pool3d.rst", "generated/torch.nn.functional.margin_ranking_loss.rst", "generated/torch.nn.functional.max_pool1d.rst", "generated/torch.nn.functional.max_pool2d.rst", "generated/torch.nn.functional.max_pool3d.rst", "generated/torch.nn.functional.max_unpool1d.rst", "generated/torch.nn.functional.max_unpool2d.rst", "generated/torch.nn.functional.max_unpool3d.rst", "generated/torch.nn.functional.mish.rst", "generated/torch.nn.functional.mse_loss.rst", "generated/torch.nn.functional.multi_margin_loss.rst", "generated/torch.nn.functional.multilabel_margin_loss.rst", "generated/torch.nn.functional.multilabel_soft_margin_loss.rst", "generated/torch.nn.functional.nll_loss.rst", "generated/torch.nn.functional.normalize.rst", "generated/torch.nn.functional.one_hot.rst", "generated/torch.nn.functional.pad.rst", "generated/torch.nn.functional.pairwise_distance.rst", "generated/torch.nn.functional.pdist.rst", "generated/torch.nn.functional.pixel_shuffle.rst", "generated/torch.nn.functional.pixel_unshuffle.rst", "generated/torch.nn.functional.poisson_nll_loss.rst", "generated/torch.nn.functional.prelu.rst", "generated/torch.nn.functional.relu.rst", "generated/torch.nn.functional.relu6.rst", "generated/torch.nn.functional.relu_.rst", "generated/torch.nn.functional.rms_norm.rst", "generated/torch.nn.functional.rrelu.rst", "generated/torch.nn.functional.rrelu_.rst", "generated/torch.nn.functional.scaled_dot_product_attention.rst", "generated/torch.nn.functional.selu.rst", "generated/torch.nn.functional.sigmoid.rst", "generated/torch.nn.functional.silu.rst", "generated/torch.nn.functional.smooth_l1_loss.rst", "generated/torch.nn.functional.soft_margin_loss.rst", "generated/torch.nn.functional.softmax.rst", "generated/torch.nn.functional.softmin.rst", "generated/torch.nn.functional.softplus.rst", "generated/torch.nn.functional.softshrink.rst", "generated/torch.nn.functional.softsign.rst", "generated/torch.nn.functional.tanh.rst", "generated/torch.nn.functional.tanhshrink.rst", "generated/torch.nn.functional.threshold.rst", "generated/torch.nn.functional.threshold_.rst", "generated/torch.nn.functional.torch.nn.parallel.data_parallel.rst", "generated/torch.nn.functional.triplet_margin_loss.rst", "generated/torch.nn.functional.triplet_margin_with_distance_loss.rst", "generated/torch.nn.functional.unfold.rst", "generated/torch.nn.functional.upsample.rst", "generated/torch.nn.functional.upsample_bilinear.rst", "generated/torch.nn.functional.upsample_nearest.rst", "generated/torch.nn.modules.lazy.LazyModuleMixin.rst", "generated/torch.nn.modules.module.register_module_backward_hook.rst", "generated/torch.nn.modules.module.register_module_buffer_registration_hook.rst", "generated/torch.nn.modules.module.register_module_forward_hook.rst", "generated/torch.nn.modules.module.register_module_forward_pre_hook.rst", "generated/torch.nn.modules.module.register_module_full_backward_hook.rst", "generated/torch.nn.modules.module.register_module_full_backward_pre_hook.rst", "generated/torch.nn.modules.module.register_module_module_registration_hook.rst", "generated/torch.nn.modules.module.register_module_parameter_registration_hook.rst", "generated/torch.nn.modules.normalization.RMSNorm.rst", "generated/torch.nn.parallel.DistributedDataParallel.rst", "generated/torch.nn.parameter.Parameter.rst", "generated/torch.nn.parameter.UninitializedBuffer.rst", "generated/torch.nn.parameter.UninitializedParameter.rst", "generated/torch.nn.utils.clip_grad_norm.rst", "generated/torch.nn.utils.clip_grad_norm_.rst", "generated/torch.nn.utils.clip_grad_value_.rst", "generated/torch.nn.utils.convert_conv2d_weight_memory_format.rst", "generated/torch.nn.utils.convert_conv3d_weight_memory_format.rst", "generated/torch.nn.utils.fuse_conv_bn_eval.rst", "generated/torch.nn.utils.fuse_conv_bn_weights.rst", "generated/torch.nn.utils.fuse_linear_bn_eval.rst", "generated/torch.nn.utils.fuse_linear_bn_weights.rst", "generated/torch.nn.utils.parameters_to_vector.rst", "generated/torch.nn.utils.parametrizations.orthogonal.rst", "generated/torch.nn.utils.parametrizations.spectral_norm.rst", "generated/torch.nn.utils.parametrizations.weight_norm.rst", "generated/torch.nn.utils.parametrize.ParametrizationList.rst", "generated/torch.nn.utils.parametrize.cached.rst", "generated/torch.nn.utils.parametrize.is_parametrized.rst", "generated/torch.nn.utils.parametrize.register_parametrization.rst", "generated/torch.nn.utils.parametrize.remove_parametrizations.rst", "generated/torch.nn.utils.prune.BasePruningMethod.rst", "generated/torch.nn.utils.prune.CustomFromMask.rst", "generated/torch.nn.utils.prune.Identity.rst", "generated/torch.nn.utils.prune.L1Unstructured.rst", "generated/torch.nn.utils.prune.LnStructured.rst", "generated/torch.nn.utils.prune.PruningContainer.rst", "generated/torch.nn.utils.prune.RandomStructured.rst", "generated/torch.nn.utils.prune.RandomUnstructured.rst", "generated/torch.nn.utils.prune.custom_from_mask.rst", "generated/torch.nn.utils.prune.global_unstructured.rst", "generated/torch.nn.utils.prune.identity.rst", "generated/torch.nn.utils.prune.is_pruned.rst", "generated/torch.nn.utils.prune.l1_unstructured.rst", "generated/torch.nn.utils.prune.ln_structured.rst", "generated/torch.nn.utils.prune.random_structured.rst", "generated/torch.nn.utils.prune.random_unstructured.rst", "generated/torch.nn.utils.prune.remove.rst", "generated/torch.nn.utils.remove_spectral_norm.rst", "generated/torch.nn.utils.remove_weight_norm.rst", "generated/torch.nn.utils.rnn.PackedSequence.rst", "generated/torch.nn.utils.rnn.pack_padded_sequence.rst", "generated/torch.nn.utils.rnn.pack_sequence.rst", "generated/torch.nn.utils.rnn.pad_packed_sequence.rst", "generated/torch.nn.utils.rnn.pad_sequence.rst", "generated/torch.nn.utils.rnn.unpack_sequence.rst", "generated/torch.nn.utils.rnn.unpad_sequence.rst", "generated/torch.nn.utils.skip_init.rst", "generated/torch.nn.utils.spectral_norm.rst", "generated/torch.nn.utils.stateless.functional_call.rst", "generated/torch.nn.utils.vector_to_parameters.rst", "generated/torch.nn.utils.weight_norm.rst", "generated/torch.no_grad.rst", "generated/torch.nonzero.rst", "generated/torch.norm.rst", "generated/torch.normal.rst", "generated/torch.not_equal.rst", "generated/torch.numel.rst", "generated/torch.ones.rst", "generated/torch.ones_like.rst", "generated/torch.onnx.JitScalarType.rst", "generated/torch.onnx.verification.GraphInfo.rst", "generated/torch.onnx.verification.VerificationOptions.rst", "generated/torch.optim.ASGD.rst", "generated/torch.optim.Adadelta.rst", "generated/torch.optim.Adagrad.rst", "generated/torch.optim.Adam.rst", "generated/torch.optim.AdamW.rst", "generated/torch.optim.Adamax.rst", "generated/torch.optim.LBFGS.rst", "generated/torch.optim.NAdam.rst", "generated/torch.optim.Optimizer.add_param_group.rst", "generated/torch.optim.Optimizer.load_state_dict.rst", "generated/torch.optim.Optimizer.state_dict.rst", "generated/torch.optim.Optimizer.step.rst", "generated/torch.optim.Optimizer.zero_grad.rst", "generated/torch.optim.RAdam.rst", "generated/torch.optim.RMSprop.rst", "generated/torch.optim.Rprop.rst", "generated/torch.optim.SGD.rst", "generated/torch.optim.SparseAdam.rst", "generated/torch.optim.lr_scheduler.ChainedScheduler.rst", "generated/torch.optim.lr_scheduler.ConstantLR.rst", "generated/torch.optim.lr_scheduler.CosineAnnealingLR.rst", "generated/torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.rst", "generated/torch.optim.lr_scheduler.CyclicLR.rst", "generated/torch.optim.lr_scheduler.ExponentialLR.rst", "generated/torch.optim.lr_scheduler.LambdaLR.rst", "generated/torch.optim.lr_scheduler.LinearLR.rst", "generated/torch.optim.lr_scheduler.MultiStepLR.rst", "generated/torch.optim.lr_scheduler.MultiplicativeLR.rst", "generated/torch.optim.lr_scheduler.OneCycleLR.rst", "generated/torch.optim.lr_scheduler.PolynomialLR.rst", "generated/torch.optim.lr_scheduler.ReduceLROnPlateau.rst", "generated/torch.optim.lr_scheduler.SequentialLR.rst", "generated/torch.optim.lr_scheduler.StepLR.rst", "generated/torch.orgqr.rst", "generated/torch.ormqr.rst", "generated/torch.outer.rst", "generated/torch.pca_lowrank.rst", "generated/torch.permute.rst", "generated/torch.pinverse.rst", "generated/torch.poisson.rst", "generated/torch.polar.rst", "generated/torch.polygamma.rst", "generated/torch.positive.rst", "generated/torch.pow.rst", "generated/torch.prod.rst", "generated/torch.promote_types.rst", "generated/torch.qr.rst", "generated/torch.quantile.rst", "generated/torch.quantize_per_channel.rst", "generated/torch.quantize_per_tensor.rst", "generated/torch.quantized_batch_norm.rst", "generated/torch.quantized_max_pool1d.rst", "generated/torch.quantized_max_pool2d.rst", "generated/torch.quasirandom.SobolEngine.rst", "generated/torch.rad2deg.rst", "generated/torch.rand.rst", "generated/torch.rand_like.rst", "generated/torch.randint.rst", "generated/torch.randint_like.rst", "generated/torch.randn.rst", "generated/torch.randn_like.rst", "generated/torch.randperm.rst", "generated/torch.range.rst", "generated/torch.ravel.rst", "generated/torch.real.rst", "generated/torch.reciprocal.rst", "generated/torch.remainder.rst", "generated/torch.renorm.rst", "generated/torch.repeat_interleave.rst", "generated/torch.reshape.rst", "generated/torch.resolve_conj.rst", "generated/torch.resolve_neg.rst", "generated/torch.result_type.rst", "generated/torch.roll.rst", "generated/torch.rot90.rst", "generated/torch.round.rst", "generated/torch.row_stack.rst", "generated/torch.rsqrt.rst", "generated/torch.save.rst", "generated/torch.scatter.rst", "generated/torch.scatter_add.rst", "generated/torch.scatter_reduce.rst", "generated/torch.searchsorted.rst", "generated/torch.seed.rst", "generated/torch.select.rst", "generated/torch.select_scatter.rst", "generated/torch.set_default_device.rst", "generated/torch.set_default_dtype.rst", "generated/torch.set_default_tensor_type.rst", "generated/torch.set_deterministic_debug_mode.rst", "generated/torch.set_float32_matmul_precision.rst", "generated/torch.set_flush_denormal.rst", "generated/torch.set_num_interop_threads.rst", "generated/torch.set_num_threads.rst", "generated/torch.set_printoptions.rst", "generated/torch.set_rng_state.rst", "generated/torch.set_warn_always.rst", "generated/torch.sgn.rst", "generated/torch.sigmoid.rst", "generated/torch.sign.rst", "generated/torch.signal.windows.bartlett.rst", "generated/torch.signal.windows.blackman.rst", "generated/torch.signal.windows.cosine.rst", "generated/torch.signal.windows.exponential.rst", "generated/torch.signal.windows.gaussian.rst", "generated/torch.signal.windows.general_cosine.rst", "generated/torch.signal.windows.general_hamming.rst", "generated/torch.signal.windows.hamming.rst", "generated/torch.signal.windows.hann.rst", "generated/torch.signal.windows.kaiser.rst", "generated/torch.signal.windows.nuttall.rst", "generated/torch.signbit.rst", "generated/torch.sin.rst", "generated/torch.sinc.rst", "generated/torch.sinh.rst", "generated/torch.slice_scatter.rst", "generated/torch.slogdet.rst", "generated/torch.smm.rst", "generated/torch.softmax.rst", "generated/torch.sort.rst", "generated/torch.sparse.addmm.rst", "generated/torch.sparse.as_sparse_gradcheck.rst", "generated/torch.sparse.check_sparse_tensor_invariants.rst", "generated/torch.sparse.log_softmax.rst", "generated/torch.sparse.mm.rst", "generated/torch.sparse.sampled_addmm.rst", "generated/torch.sparse.softmax.rst", "generated/torch.sparse.spdiags.rst", "generated/torch.sparse.sum.rst", "generated/torch.sparse_bsc_tensor.rst", "generated/torch.sparse_bsr_tensor.rst", "generated/torch.sparse_compressed_tensor.rst", "generated/torch.sparse_coo_tensor.rst", "generated/torch.sparse_csc_tensor.rst", "generated/torch.sparse_csr_tensor.rst", "generated/torch.split.rst", "generated/torch.sqrt.rst", "generated/torch.square.rst", "generated/torch.squeeze.rst", "generated/torch.sspaddmm.rst", "generated/torch.stack.rst", "generated/torch.std.rst", "generated/torch.std_mean.rst", "generated/torch.stft.rst", "generated/torch.sub.rst", "generated/torch.subtract.rst", "generated/torch.sum.rst", "generated/torch.svd.rst", "generated/torch.svd_lowrank.rst", "generated/torch.swapaxes.rst", "generated/torch.swapdims.rst", "generated/torch.sym_float.rst", "generated/torch.sym_int.rst", "generated/torch.sym_ite.rst", "generated/torch.sym_max.rst", "generated/torch.sym_min.rst", "generated/torch.sym_not.rst", "generated/torch.t.rst", "generated/torch.take.rst", "generated/torch.take_along_dim.rst", "generated/torch.tan.rst", "generated/torch.tanh.rst", "generated/torch.tensor.rst", "generated/torch.tensor_split.rst", "generated/torch.tensordot.rst", "generated/torch.tile.rst", "generated/torch.topk.rst", "generated/torch.trace.rst", "generated/torch.transpose.rst", "generated/torch.trapezoid.rst", "generated/torch.trapz.rst", "generated/torch.triangular_solve.rst", "generated/torch.tril.rst", "generated/torch.tril_indices.rst", "generated/torch.triu.rst", "generated/torch.triu_indices.rst", "generated/torch.true_divide.rst", "generated/torch.trunc.rst", "generated/torch.unbind.rst", "generated/torch.unflatten.rst", "generated/torch.unique.rst", "generated/torch.unique_consecutive.rst", "generated/torch.unravel_index.rst", "generated/torch.unsqueeze.rst", "generated/torch.use_deterministic_algorithms.rst", "generated/torch.utils.generate_methods_for_privateuse1_backend.rst", "generated/torch.utils.get_cpp_backtrace.rst", "generated/torch.utils.rename_privateuse1_backend.rst", "generated/torch.utils.set_module.rst", "generated/torch.utils.swap_tensors.rst", "generated/torch.vander.rst", "generated/torch.var.rst", "generated/torch.var_mean.rst", "generated/torch.vdot.rst", "generated/torch.view_as_complex.rst", "generated/torch.view_as_real.rst", "generated/torch.vmap.rst", "generated/torch.vsplit.rst", "generated/torch.vstack.rst", "generated/torch.where.rst", "generated/torch.xlogy.rst", "generated/torch.xpu.Event.rst", "generated/torch.xpu.Stream.rst", "generated/torch.xpu.StreamContext.rst", "generated/torch.xpu.current_device.rst", "generated/torch.xpu.current_stream.rst", "generated/torch.xpu.device.rst", "generated/torch.xpu.device_count.rst", "generated/torch.xpu.device_of.rst", "generated/torch.xpu.empty_cache.rst", "generated/torch.xpu.get_device_capability.rst", "generated/torch.xpu.get_device_name.rst", "generated/torch.xpu.get_device_properties.rst", "generated/torch.xpu.get_rng_state.rst", "generated/torch.xpu.get_rng_state_all.rst", "generated/torch.xpu.init.rst", "generated/torch.xpu.initial_seed.rst", "generated/torch.xpu.is_available.rst", "generated/torch.xpu.is_initialized.rst", "generated/torch.xpu.manual_seed.rst", "generated/torch.xpu.manual_seed_all.rst", "generated/torch.xpu.seed.rst", "generated/torch.xpu.seed_all.rst", "generated/torch.xpu.set_device.rst", "generated/torch.xpu.set_rng_state.rst", "generated/torch.xpu.set_rng_state_all.rst", "generated/torch.xpu.set_stream.rst", "generated/torch.xpu.stream.rst", "generated/torch.xpu.synchronize.rst", "generated/torch.zeros.rst", "generated/torch.zeros_like.rst", "hub.rst", "index.rst", "jit.rst", "jit_builtin_functions.rst", "jit_language_reference.rst", "jit_language_reference_v2.rst", "jit_python_reference.rst", "jit_unsupported.rst", "jit_utils.rst", "library.rst", "linalg.rst", "logging.rst", "masked.rst", "meta.rst", "miscellaneous_environment_variables.rst", "mobile_optimizer.rst", "model_zoo.rst", "module_tracker.rst", "monitor.rst", "mps.rst", "mtia.rst", "multiprocessing.rst", "name_inference.rst", "named_tensor.rst", "nested.rst", "nn.rst", "nn.attention.rst", "nn.attention.bias.rst", "nn.functional.rst", "nn.init.rst", "notes/amp_examples.rst", "notes/autograd.rst", "notes/broadcasting.rst", "notes/cpu_threading_torchscript_inference.rst", "notes/cuda.rst", "notes/custom_operators.rst", "notes/ddp.rst", "notes/extending.rst", "notes/extending.func.rst", "notes/faq.rst", "notes/fsdp.rst", "notes/get_start_xpu.rst", "notes/gradcheck.rst", "notes/hip.rst", "notes/large_scale_deployments.rst", "notes/modules.rst", "notes/mps.rst", "notes/multiprocessing.rst", "notes/numerical_accuracy.rst", "notes/randomness.rst", "notes/serialization.rst", "notes/windows.rst", "onnx.rst", "onnx_dynamo.rst", "onnx_dynamo_onnxruntime_backend.rst", "onnx_torchscript.rst", "onnx_torchscript_supported_aten_ops.rst", "optim.rst", "package.rst", "profiler.rst", "quantization.rst", "quantization-accuracy-debugging.rst", "quantization-backend-configuration.rst", "quantization-support.rst", "random.rst", "rpc.rst", "rpc/distributed_autograd.rst", "rpc/rref.rst", "signal.rst", "size.rst", "sparse.rst", "special.rst", "storage.rst", "tensor_attributes.rst", "tensor_view.rst", "tensorboard.rst", "tensors.rst", "testing.rst", "threading_environment_variables.rst", "torch.rst", "torch.ao.ns._numeric_suite.rst", "torch.ao.ns._numeric_suite_fx.rst", "torch.compiler.rst", "torch.compiler_aot_inductor.rst", "torch.compiler_api.rst", "torch.compiler_best_practices_for_backends.rst", "torch.compiler_cudagraph_trees.rst", "torch.compiler_custom_backends.rst", "torch.compiler_dynamic_shapes.rst", "torch.compiler_dynamo_deepdive.rst", "torch.compiler_dynamo_overview.rst", "torch.compiler_fake_tensor.rst", "torch.compiler_faq.rst", "torch.compiler_fine_grain_apis.rst", "torch.compiler_get_started.rst", "torch.compiler_inductor_profiling.rst", "torch.compiler_ir.rst", "torch.compiler_nn_module.rst", "torch.compiler_performance_dashboard.rst", "torch.compiler_profiling_torch_compile.rst", "torch.compiler_transformations.rst", "torch.compiler_troubleshooting.rst", "torch.overrides.rst", "torch_cuda_memory.rst", "torch_environment_variables.rst", "torch_nccl_environment_variables.rst", "type_info.rst", "utils.rst", "xpu.rst"], "titles": ["Automatic Mixed Precision package - torch.amp", "Automatic differentiation package - torch.autograd", "torch.backends", "Benchmark Utils - torch.utils.benchmark", "torch.utils.bottleneck", "torch.utils.checkpoint", "PyTorch Governance | Build + CI", "PyTorch Contribution Guide", "PyTorch Design Philosophy", "PyTorch Governance | Mechanics", "PyTorch Governance | Maintainers", "Complex Numbers", "Control Flow - Cond", "torch.__config__", "torch.utils.cpp_extension", "C++", "torch.cpu", "torch.cuda", "CUDA Stream Sanitizer", "TunableOp", "CUDA Environment Variables", "<no title>", "<no title>", "torch.utils.data", "DDP Communication Hooks", "Debugging Environment Variables", "torch::deploy has been moved to pytorch/multipy", "torch.utils.deterministic", "Distributed communication package - torch.distributed", "Generic Join Context Manager", "Distributed Checkpoint - torch.distributed.checkpoint", "Torch Distributed Elastic", "Distributed Optimizers", "Pipeline Parallelism", "Tensor Parallelism - torch.distributed.tensor.parallel", "Probability distributions - torch.distributions", "torch.utils.dlpack", "Elastic Agent", "Control Plane", "Customization", "Error Propagation", "Events", "Examples", "TorchElastic Kubernetes", "Metrics", "Multiprocessing", "Quickstart", "Rendezvous", "torchrun (Elastic Launch)", "Subprocess Handling", "Expiration Timers", "Train script", "torch.export", "torch.export IR Specification", "torch.fft", "FullyShardedDataParallel", "torch.func", "torch.func API Reference", "Patching Batch Norm", "Migrating from functorch to torch.func", "UX Limitations", "torch.func Whirlwind Tour", "torch.__future__", "torch.futures", "torch.fx", "torch.fx.experimental", "ExportDB", "python.assert", "python.builtin", "python.closure", "python.context-manager", "python.control-flow", "python.data-structure", "python.object-model", "torch.cond", "torch.dynamic-shape", "torch.dynamic-value", "torch.escape-hatch", "torch.map", "torch.mutation", "torch.operator", "FXE0007:fx-graph-to-onnx", "FXE0008:fx-node-to-onnx", "FXE0010:fx-pass", "FXE0011:no-symbolic-function-for-call-function", "FXE0012:unsupported-fx-node-analysis", "FXE0013:op-level-debugging", "FXE0014:find-opschema-matched-symbolic-function", "FXE0015:fx-node-insert-type-promotion", "FXE0016:find-operator-overloads-in-onnx-registry", "Generator", "torch.Tensor.abs", "torch.Tensor.abs_", "torch.Tensor.absolute", "torch.Tensor.absolute_", "torch.Tensor.acos", "torch.Tensor.acos_", "torch.Tensor.acosh", "torch.Tensor.acosh_", "torch.Tensor.add", "torch.Tensor.add_", "torch.Tensor.addbmm", "torch.Tensor.addbmm_", "torch.Tensor.addcdiv", "torch.Tensor.addcdiv_", "torch.Tensor.addcmul", "torch.Tensor.addcmul_", "torch.Tensor.addmm", "torch.Tensor.addmm_", "torch.Tensor.addmv", "torch.Tensor.addmv_", "torch.Tensor.addr", "torch.Tensor.addr_", "torch.Tensor.adjoint", "torch.Tensor.all", "torch.Tensor.allclose", "torch.Tensor.amax", "torch.Tensor.amin", "torch.Tensor.aminmax", "torch.Tensor.angle", "torch.Tensor.any", "torch.Tensor.apply_", "torch.Tensor.arccos", "torch.Tensor.arccos_", "torch.Tensor.arccosh", "torch.Tensor.arccosh_", "torch.Tensor.arcsin", "torch.Tensor.arcsin_", "torch.Tensor.arcsinh", "torch.Tensor.arcsinh_", "torch.Tensor.arctan", "torch.Tensor.arctan2", "torch.Tensor.arctan2_", "torch.Tensor.arctan_", "torch.Tensor.arctanh", "torch.Tensor.arctanh_", "torch.Tensor.argmax", "torch.Tensor.argmin", "torch.Tensor.argsort", "torch.Tensor.argwhere", "torch.Tensor.as_strided", "torch.Tensor.as_subclass", "torch.Tensor.asin", "torch.Tensor.asin_", "torch.Tensor.asinh", "torch.Tensor.asinh_", "torch.Tensor.atan", "torch.Tensor.atan2", "torch.Tensor.atan2_", "torch.Tensor.atan_", "torch.Tensor.atanh", "torch.Tensor.atanh_", "torch.Tensor.backward", "torch.Tensor.baddbmm", "torch.Tensor.baddbmm_", "torch.Tensor.bernoulli", "torch.Tensor.bernoulli_", "torch.Tensor.bfloat16", "torch.Tensor.bincount", "torch.Tensor.bitwise_and", "torch.Tensor.bitwise_and_", "torch.Tensor.bitwise_left_shift", "torch.Tensor.bitwise_left_shift_", "torch.Tensor.bitwise_not", "torch.Tensor.bitwise_not_", "torch.Tensor.bitwise_or", "torch.Tensor.bitwise_or_", "torch.Tensor.bitwise_right_shift", "torch.Tensor.bitwise_right_shift_", "torch.Tensor.bitwise_xor", "torch.Tensor.bitwise_xor_", "torch.Tensor.bmm", "torch.Tensor.bool", "torch.Tensor.broadcast_to", "torch.Tensor.byte", "torch.Tensor.cauchy_", "torch.Tensor.ccol_indices", "torch.Tensor.cdouble", "torch.Tensor.ceil", "torch.Tensor.ceil_", "torch.Tensor.cfloat", "torch.Tensor.chalf", "torch.Tensor.char", "torch.Tensor.cholesky", "torch.Tensor.cholesky_inverse", "torch.Tensor.cholesky_solve", "torch.Tensor.chunk", "torch.Tensor.clamp", "torch.Tensor.clamp_", "torch.Tensor.clip", "torch.Tensor.clip_", "torch.Tensor.clone", "torch.Tensor.coalesce", "torch.Tensor.col_indices", "torch.Tensor.conj", "torch.Tensor.conj_physical", "torch.Tensor.conj_physical_", "torch.Tensor.contiguous", "torch.Tensor.copy_", "torch.Tensor.copysign", "torch.Tensor.copysign_", "torch.Tensor.corrcoef", "torch.Tensor.cos", "torch.Tensor.cos_", "torch.Tensor.cosh", "torch.Tensor.cosh_", "torch.Tensor.count_nonzero", "torch.Tensor.cov", "torch.Tensor.cpu", "torch.Tensor.cross", "torch.Tensor.crow_indices", "torch.Tensor.cuda", "torch.Tensor.cummax", "torch.Tensor.cummin", "torch.Tensor.cumprod", "torch.Tensor.cumprod_", "torch.Tensor.cumsum", "torch.Tensor.cumsum_", "torch.Tensor.data_ptr", "torch.Tensor.deg2rad", "torch.Tensor.dense_dim", "torch.Tensor.dequantize", "torch.Tensor.det", "torch.Tensor.detach", "torch.Tensor.detach_", "torch.Tensor.device", "torch.Tensor.diag", "torch.Tensor.diag_embed", "torch.Tensor.diagflat", "torch.Tensor.diagonal", "torch.Tensor.diagonal_scatter", "torch.Tensor.diff", "torch.Tensor.digamma", "torch.Tensor.digamma_", "torch.Tensor.dim", "torch.Tensor.dim_order", "torch.Tensor.dist", "torch.Tensor.div", "torch.Tensor.div_", "torch.Tensor.divide", "torch.Tensor.divide_", "torch.Tensor.dot", "torch.Tensor.double", "torch.Tensor.dsplit", "torch.Tensor.element_size", "torch.Tensor.eq", "torch.Tensor.eq_", "torch.Tensor.equal", "torch.Tensor.erf", "torch.Tensor.erf_", "torch.Tensor.erfc", "torch.Tensor.erfc_", "torch.Tensor.erfinv", "torch.Tensor.erfinv_", "torch.Tensor.exp", "torch.Tensor.exp_", "torch.Tensor.expand", "torch.Tensor.expand_as", "torch.Tensor.expm1", "torch.Tensor.expm1_", "torch.Tensor.exponential_", "torch.Tensor.fill_", "torch.Tensor.fill_diagonal_", "torch.Tensor.fix", "torch.Tensor.fix_", "torch.Tensor.flatten", "torch.Tensor.flip", "torch.Tensor.fliplr", "torch.Tensor.flipud", "torch.Tensor.float", "torch.Tensor.float_power", "torch.Tensor.float_power_", "torch.Tensor.floor", "torch.Tensor.floor_", "torch.Tensor.floor_divide", "torch.Tensor.floor_divide_", "torch.Tensor.fmax", "torch.Tensor.fmin", "torch.Tensor.fmod", "torch.Tensor.fmod_", "torch.Tensor.frac", "torch.Tensor.frac_", "torch.Tensor.frexp", "torch.Tensor.gather", "torch.Tensor.gcd", "torch.Tensor.gcd_", "torch.Tensor.ge", "torch.Tensor.ge_", "torch.Tensor.geometric_", "torch.Tensor.geqrf", "torch.Tensor.ger", "torch.Tensor.get_device", "torch.Tensor.grad", "torch.Tensor.greater", "torch.Tensor.greater_", "torch.Tensor.greater_equal", "torch.Tensor.greater_equal_", "torch.Tensor.gt", "torch.Tensor.gt_", "torch.Tensor.half", "torch.Tensor.hardshrink", "torch.Tensor.heaviside", "torch.Tensor.histc", "torch.Tensor.histogram", "torch.Tensor.hsplit", "torch.Tensor.hypot", "torch.Tensor.hypot_", "torch.Tensor.i0", "torch.Tensor.i0_", "torch.Tensor.igamma", "torch.Tensor.igamma_", "torch.Tensor.igammac", "torch.Tensor.igammac_", "torch.Tensor.imag", "torch.Tensor.index_add", "torch.Tensor.index_add_", "torch.Tensor.index_copy", "torch.Tensor.index_copy_", "torch.Tensor.index_fill", "torch.Tensor.index_fill_", "torch.Tensor.index_put", "torch.Tensor.index_put_", "torch.Tensor.index_reduce", "torch.Tensor.index_reduce_", "torch.Tensor.index_select", "torch.Tensor.indices", "torch.Tensor.inner", "torch.Tensor.int", "torch.Tensor.int_repr", "torch.Tensor.inverse", "torch.Tensor.is_coalesced", "torch.Tensor.is_complex", "torch.Tensor.is_conj", "torch.Tensor.is_contiguous", "torch.Tensor.is_cuda", "torch.Tensor.is_floating_point", "torch.Tensor.is_inference", "torch.Tensor.is_leaf", "torch.Tensor.is_meta", "torch.Tensor.is_pinned", "torch.Tensor.is_quantized", "torch.Tensor.is_set_to", "torch.Tensor.is_shared", "torch.Tensor.is_signed", "torch.Tensor.is_sparse", "torch.Tensor.is_sparse_csr", "torch.Tensor.isclose", "torch.Tensor.isfinite", "torch.Tensor.isinf", "torch.Tensor.isnan", "torch.Tensor.isneginf", "torch.Tensor.isposinf", "torch.Tensor.isreal", "torch.Tensor.istft", "torch.Tensor.item", "torch.Tensor.itemsize", "torch.Tensor.kthvalue", "torch.Tensor.lcm", "torch.Tensor.lcm_", "torch.Tensor.ldexp", "torch.Tensor.ldexp_", "torch.Tensor.le", "torch.Tensor.le_", "torch.Tensor.lerp", "torch.Tensor.lerp_", "torch.Tensor.less", "torch.Tensor.less_", "torch.Tensor.less_equal", "torch.Tensor.less_equal_", "torch.Tensor.lgamma", "torch.Tensor.lgamma_", "torch.Tensor.log", "torch.Tensor.log10", "torch.Tensor.log10_", "torch.Tensor.log1p", "torch.Tensor.log1p_", "torch.Tensor.log2", "torch.Tensor.log2_", "torch.Tensor.log_", "torch.Tensor.log_normal_", "torch.Tensor.logaddexp", "torch.Tensor.logaddexp2", "torch.Tensor.logcumsumexp", "torch.Tensor.logdet", "torch.Tensor.logical_and", "torch.Tensor.logical_and_", "torch.Tensor.logical_not", "torch.Tensor.logical_not_", "torch.Tensor.logical_or", "torch.Tensor.logical_or_", "torch.Tensor.logical_xor", "torch.Tensor.logical_xor_", "torch.Tensor.logit", "torch.Tensor.logit_", "torch.Tensor.logsumexp", "torch.Tensor.long", "torch.Tensor.lt", "torch.Tensor.lt_", "torch.Tensor.lu", "torch.Tensor.lu_solve", "torch.Tensor.map_", "torch.Tensor.masked_fill", "torch.Tensor.masked_fill_", "torch.Tensor.masked_scatter", "torch.Tensor.masked_scatter_", "torch.Tensor.masked_select", "torch.Tensor.matmul", "torch.Tensor.matrix_exp", "torch.Tensor.matrix_power", "torch.Tensor.max", "torch.Tensor.maximum", "torch.Tensor.mean", "torch.Tensor.median", "torch.Tensor.min", "torch.Tensor.minimum", "torch.Tensor.mm", "torch.Tensor.mode", "torch.Tensor.module_load", "torch.Tensor.moveaxis", "torch.Tensor.movedim", "torch.Tensor.msort", "torch.Tensor.mul", "torch.Tensor.mul_", "torch.Tensor.multinomial", "torch.Tensor.multiply", "torch.Tensor.multiply_", "torch.Tensor.mv", "torch.Tensor.mvlgamma", "torch.Tensor.mvlgamma_", "torch.Tensor.nan_to_num", "torch.Tensor.nan_to_num_", "torch.Tensor.nanmean", "torch.Tensor.nanmedian", "torch.Tensor.nanquantile", "torch.Tensor.nansum", "torch.Tensor.narrow", "torch.Tensor.narrow_copy", "torch.Tensor.nbytes", "torch.Tensor.ndim", "torch.Tensor.ndimension", "torch.Tensor.ne", "torch.Tensor.ne_", "torch.Tensor.neg", "torch.Tensor.neg_", "torch.Tensor.negative", "torch.Tensor.negative_", "torch.Tensor.nelement", "torch.Tensor.new_empty", "torch.Tensor.new_full", "torch.Tensor.new_ones", "torch.Tensor.new_tensor", "torch.Tensor.new_zeros", "torch.Tensor.nextafter", "torch.Tensor.nextafter_", "torch.Tensor.nonzero", "torch.Tensor.norm", "torch.Tensor.normal_", "torch.Tensor.not_equal", "torch.Tensor.not_equal_", "torch.Tensor.numel", "torch.Tensor.numpy", "torch.Tensor.orgqr", "torch.Tensor.ormqr", "torch.Tensor.outer", "torch.Tensor.permute", "torch.Tensor.pin_memory", "torch.Tensor.pinverse", "torch.Tensor.polygamma", "torch.Tensor.polygamma_", "torch.Tensor.positive", "torch.Tensor.pow", "torch.Tensor.pow_", "torch.Tensor.prod", "torch.Tensor.put_", "torch.Tensor.q_per_channel_axis", "torch.Tensor.q_per_channel_scales", "torch.Tensor.q_per_channel_zero_points", "torch.Tensor.q_scale", "torch.Tensor.q_zero_point", "torch.Tensor.qr", "torch.Tensor.qscheme", "torch.Tensor.quantile", "torch.Tensor.rad2deg", "torch.Tensor.random_", "torch.Tensor.ravel", "torch.Tensor.real", "torch.Tensor.reciprocal", "torch.Tensor.reciprocal_", "torch.Tensor.record_stream", "torch.Tensor.register_hook", "torch.Tensor.register_post_accumulate_grad_hook", "torch.Tensor.remainder", "torch.Tensor.remainder_", "torch.Tensor.renorm", "torch.Tensor.renorm_", "torch.Tensor.repeat", "torch.Tensor.repeat_interleave", "torch.Tensor.requires_grad", "torch.Tensor.requires_grad_", "torch.Tensor.reshape", "torch.Tensor.reshape_as", "torch.Tensor.resize_", "torch.Tensor.resize_as_", "torch.Tensor.resolve_conj", "torch.Tensor.resolve_neg", "torch.Tensor.retain_grad", "torch.Tensor.retains_grad", "torch.Tensor.roll", "torch.Tensor.rot90", "torch.Tensor.round", "torch.Tensor.round_", "torch.Tensor.row_indices", "torch.Tensor.rsqrt", "torch.Tensor.rsqrt_", "torch.Tensor.scatter", "torch.Tensor.scatter_", "torch.Tensor.scatter_add", "torch.Tensor.scatter_add_", "torch.Tensor.scatter_reduce", "torch.Tensor.scatter_reduce_", "torch.Tensor.select", "torch.Tensor.select_scatter", "torch.Tensor.set_", "torch.Tensor.sgn", "torch.Tensor.sgn_", "torch.Tensor.shape", "torch.Tensor.share_memory_", "torch.Tensor.short", "torch.Tensor.sigmoid", "torch.Tensor.sigmoid_", "torch.Tensor.sign", "torch.Tensor.sign_", "torch.Tensor.signbit", "torch.Tensor.sin", "torch.Tensor.sin_", "torch.Tensor.sinc", "torch.Tensor.sinc_", "torch.Tensor.sinh", "torch.Tensor.sinh_", "torch.Tensor.size", "torch.Tensor.slice_scatter", "torch.Tensor.slogdet", "torch.Tensor.smm", "torch.Tensor.softmax", "torch.Tensor.sort", "torch.Tensor.sparse_dim", "torch.Tensor.sparse_mask", "torch.Tensor.sparse_resize_", "torch.Tensor.sparse_resize_and_clear_", "torch.Tensor.split", "torch.Tensor.sqrt", "torch.Tensor.sqrt_", "torch.Tensor.square", "torch.Tensor.square_", "torch.Tensor.squeeze", "torch.Tensor.squeeze_", "torch.Tensor.sspaddmm", "torch.Tensor.std", "torch.Tensor.stft", "torch.Tensor.storage", "torch.Tensor.storage_offset", "torch.Tensor.storage_type", "torch.Tensor.stride", "torch.Tensor.sub", "torch.Tensor.sub_", "torch.Tensor.subtract", "torch.Tensor.subtract_", "torch.Tensor.sum", "torch.Tensor.sum_to_size", "torch.Tensor.svd", "torch.Tensor.swapaxes", "torch.Tensor.swapdims", "torch.Tensor.t", "torch.Tensor.t_", "torch.Tensor.take", "torch.Tensor.take_along_dim", "torch.Tensor.tan", "torch.Tensor.tan_", "torch.Tensor.tanh", "torch.Tensor.tanh_", "torch.Tensor.tensor_split", "torch.Tensor.tile", "torch.Tensor.to", "torch.Tensor.to_dense", "torch.Tensor.to_mkldnn", "torch.Tensor.to_sparse", "torch.Tensor.to_sparse_bsc", "torch.Tensor.to_sparse_bsr", "torch.Tensor.to_sparse_coo", "torch.Tensor.to_sparse_csc", "torch.Tensor.to_sparse_csr", "torch.Tensor.tolist", "torch.Tensor.topk", "torch.Tensor.trace", "torch.Tensor.transpose", "torch.Tensor.transpose_", "torch.Tensor.triangular_solve", "torch.Tensor.tril", "torch.Tensor.tril_", "torch.Tensor.triu", "torch.Tensor.triu_", "torch.Tensor.true_divide", "torch.Tensor.true_divide_", "torch.Tensor.trunc", "torch.Tensor.trunc_", "torch.Tensor.type", "torch.Tensor.type_as", "torch.Tensor.unbind", "torch.Tensor.unflatten", "torch.Tensor.unfold", "torch.Tensor.uniform_", "torch.Tensor.unique", "torch.Tensor.unique_consecutive", "torch.Tensor.unsqueeze", "torch.Tensor.unsqueeze_", "torch.Tensor.untyped_storage", "torch.Tensor.values", "torch.Tensor.var", "torch.Tensor.vdot", "torch.Tensor.view", "torch.Tensor.view_as", "torch.Tensor.vsplit", "torch.Tensor.where", "torch.Tensor.xlogy", "torch.Tensor.xlogy_", "torch.Tensor.xpu", "torch.Tensor.zero_", "torch._assert", "torch._foreach_abs", "torch._foreach_abs_", "torch._foreach_acos", "torch._foreach_acos_", "torch._foreach_asin", "torch._foreach_asin_", "torch._foreach_atan", "torch._foreach_atan_", "torch._foreach_ceil", "torch._foreach_ceil_", "torch._foreach_cos", "torch._foreach_cos_", "torch._foreach_cosh", "torch._foreach_cosh_", "torch._foreach_erf", "torch._foreach_erf_", "torch._foreach_erfc", "torch._foreach_erfc_", "torch._foreach_exp", "torch._foreach_exp_", "torch._foreach_expm1", "torch._foreach_expm1_", "torch._foreach_floor", "torch._foreach_floor_", "torch._foreach_frac", "torch._foreach_frac_", "torch._foreach_lgamma", "torch._foreach_lgamma_", "torch._foreach_log", "torch._foreach_log10", "torch._foreach_log10_", "torch._foreach_log1p", "torch._foreach_log1p_", "torch._foreach_log2", "torch._foreach_log2_", "torch._foreach_log_", "torch._foreach_neg", "torch._foreach_neg_", "torch._foreach_reciprocal", "torch._foreach_reciprocal_", "torch._foreach_round", "torch._foreach_round_", "torch._foreach_sigmoid", "torch._foreach_sigmoid_", "torch._foreach_sin", "torch._foreach_sin_", "torch._foreach_sinh", "torch._foreach_sinh_", "torch._foreach_sqrt", "torch._foreach_sqrt_", "torch._foreach_tan", "torch._foreach_tan_", "torch._foreach_trunc", "torch._foreach_trunc_", "torch._foreach_zero_", "torch._logging.set_logs", "torch.abs", "torch.absolute", "torch.acos", "torch.acosh", "torch.add", "torch.addbmm", "torch.addcdiv", "torch.addcmul", "torch.addmm", "torch.addmv", "torch.addr", "torch.adjoint", "torch.all", "torch.allclose", "torch.amax", "torch.amin", "torch.aminmax", "torch.angle", "torch.any", "BNReLU2d", "BNReLU3d", "ConvBn1d", "ConvBn2d", "ConvBn3d", "ConvBnReLU1d", "ConvBnReLU2d", "ConvBnReLU3d", "ConvReLU1d", "ConvReLU2d", "ConvReLU3d", "LinearReLU", "ConvBn1d", "ConvBn2d", "ConvBn3d", "ConvBnReLU1d", "ConvBnReLU2d", "ConvBnReLU3d", "ConvReLU2d", "ConvReLU3d", "LinearReLU", "freeze_bn_stats", "update_bn_stats", "BNReLU2d", "BNReLU3d", "ConvReLU1d", "ConvReLU2d", "ConvReLU3d", "LinearReLU", "LinearReLU", "Conv2d", "Conv3d", "Linear", "Linear", "LSTM", "MultiheadAttention", "BatchNorm2d", "BatchNorm3d", "Conv1d", "Conv2d", "Conv3d", "ConvTranspose1d", "ConvTranspose2d", "ConvTranspose3d", "ELU", "Embedding", "EmbeddingBag", "FXFloatFunctional", "FloatFunctional", "GroupNorm", "Hardswish", "InstanceNorm1d", "InstanceNorm2d", "InstanceNorm3d", "LayerNorm", "LeakyReLU", "Linear", "QFunctional", "ReLU6", "Sigmoid", "GRU", "GRUCell", "LSTM", "LSTMCell", "Linear", "RNNCell", "adaptive_avg_pool2d", "adaptive_avg_pool3d", "avg_pool2d", "avg_pool3d", "celu", "clamp", "conv1d", "conv2d", "conv3d", "elu", "hardsigmoid", "hardswish", "hardtanh", "interpolate", "leaky_relu", "linear", "max_pool1d", "max_pool2d", "threshold", "upsample", "upsample_bilinear", "upsample_nearest", "DeQuantStub", "QuantStub", "QuantWrapper", "add_quant_dequant", "BackendConfig", "BackendPatternConfig", "DTypeConfig", "DTypeWithConstraints", "ObservationType", "convert", "default_eval_fn", "FakeQuantize", "FakeQuantizeBase", "FixedQParamsFakeQuantize", "FusedMovingAvgObsFakeQuantize", "default_fake_quant", "default_fused_act_fake_quant", "default_fused_per_channel_wt_fake_quant", "default_fused_wt_fake_quant", "default_histogram_fake_quant", "default_per_channel_weight_fake_quant", "default_weight_fake_quant", "disable_fake_quant", "disable_observer", "enable_fake_quant", "enable_observer", "fuse_modules", "ConvertCustomConfig", "FuseCustomConfig", "PrepareCustomConfig", "StandaloneModuleConfigEntry", "HistogramObserver", "MinMaxObserver", "MovingAverageMinMaxObserver", "MovingAveragePerChannelMinMaxObserver", "NoopObserver", "ObserverBase", "PerChannelMinMaxObserver", "PlaceholderObserver", "RecordingObserver", "default_debug_observer", "default_dynamic_quant_observer", "default_float_qparams_observer", "default_histogram_observer", "default_observer", "default_per_channel_weight_observer", "default_placeholder_observer", "default_weight_observer", "get_observer_state_dict", "load_observer_state_dict", "prepare", "prepare_qat", "propagate_qconfig", "model_is_exported", "QConfig", "default_activation_only_qconfig", "default_debug_qconfig", "default_dynamic_qconfig", "default_per_channel_qconfig", "default_qat_qconfig", "default_qat_qconfig_v2", "default_qconfig", "default_weight_only_qconfig", "float16_dynamic_qconfig", "float16_static_qconfig", "float_qparams_weight_only_qconfig", "per_channel_dynamic_qconfig", "QConfigMapping", "get_default_qat_qconfig_mapping", "get_default_qconfig_mapping", "quantize", "quantize_dynamic", "convert_fx", "fuse_fx", "prepare_fx", "prepare_qat_fx", "quantize_qat", "swap_module", "torch.arange", "torch.arccos", "torch.arccosh", "torch.arcsin", "torch.arcsinh", "torch.arctan", "torch.arctan2", "torch.arctanh", "torch.are_deterministic_algorithms_enabled", "torch.argmax", "torch.argmin", "torch.argsort", "torch.argwhere", "torch.as_strided", "torch.as_tensor", "torch.asarray", "torch.asin", "torch.asinh", "torch.atan", "torch.atan2", "torch.atanh", "torch.atleast_1d", "torch.atleast_2d", "torch.atleast_3d", "torch.autograd.Function.backward", "torch.autograd.Function.forward", "torch.autograd.Function.jvp", "torch.autograd.Function.vmap", "torch.autograd.backward", "UnpackedDualTensor", "dual_level", "torch.autograd.forward_ad.enter_dual_level", "torch.autograd.forward_ad.exit_dual_level", "torch.autograd.forward_ad.make_dual", "torch.autograd.forward_ad.unpack_dual", "BackwardCFunction", "torch.autograd.function.FunctionCtx.mark_dirty", "torch.autograd.function.FunctionCtx.mark_non_differentiable", "torch.autograd.function.FunctionCtx.save_for_backward", "torch.autograd.function.FunctionCtx.set_materialize_grads", "InplaceFunction", "NestedIOFunction", "torch.autograd.function.once_differentiable", "torch.autograd.functional.hessian", "torch.autograd.functional.hvp", "torch.autograd.functional.jacobian", "torch.autograd.functional.jvp", "torch.autograd.functional.vhp", "torch.autograd.functional.vjp", "torch.autograd.grad", "inference_mode", "set_grad_enabled", "set_multithreading_enabled", "torch.autograd.gradcheck.GradcheckError", "torch.autograd.gradcheck.gradcheck", "torch.autograd.gradcheck.gradgradcheck", "torch.autograd.graph.Node.metadata", "torch.autograd.graph.Node.name", "torch.autograd.graph.Node.next_functions", "torch.autograd.graph.Node.register_hook", "torch.autograd.graph.Node.register_prehook", "torch.autograd.graph.increment_version", "EnforceUnique", "KinetoStepTracker", "torch.autograd.profiler.load_nvprof", "torch.autograd.profiler.parse_nvprof_trace", "torch.autograd.profiler.profile.export_chrome_trace", "torch.autograd.profiler.profile.key_averages", "torch.autograd.profiler.profile.self_cpu_time_total", "torch.autograd.profiler.profile.total_average", "record_function", "Interval", "Kernel", "MemRecordsAcc", "StringTable", "torch.baddbmm", "torch.bartlett_window", "torch.bernoulli", "torch.bincount", "torch.bitwise_and", "torch.bitwise_left_shift", "torch.bitwise_not", "torch.bitwise_or", "torch.bitwise_right_shift", "torch.bitwise_xor", "torch.blackman_window", "torch.block_diag", "torch.bmm", "torch.broadcast_shapes", "torch.broadcast_tensors", "torch.broadcast_to", "torch.bucketize", "torch.can_cast", "torch.cartesian_prod", "torch.cat", "torch.cdist", "torch.ceil", "torch.chain_matmul", "torch.cholesky", "torch.cholesky_inverse", "torch.cholesky_solve", "torch.chunk", "torch.clamp", "torch.clip", "torch.clone", "torch.column_stack", "torch.combinations", "torch.compile", "torch.compiled_with_cxx11_abi", "torch.compiler.allow_in_graph", "torch.compiler.assume_constant_result", "torch.compiler.compile", "torch.compiler.cudagraph_mark_step_begin", "torch.compiler.disable", "torch.compiler.is_compiling", "torch.compiler.is_dynamo_compiling", "torch.compiler.list_backends", "torch.compiler.reset", "torch.complex", "torch.concat", "torch.concatenate", "torch.cond", "torch.conj", "torch.conj_physical", "torch.copysign", "torch.corrcoef", "torch.cos", "torch.cosh", "torch.count_nonzero", "torch.cov", "Stream", "StreamContext", "torch.cpu.current_device", "torch.cpu.current_stream", "torch.cpu.device_count", "torch.cpu.is_available", "torch.cpu.set_device", "torch.cpu.stream", "torch.cpu.synchronize", "torch.cross", "CUDAGraph", "CUDAPluggableAllocator", "Event", "ExternalStream", "torch.cuda.OutOfMemoryError", "Stream", "StreamContext", "torch.cuda.caching_allocator_alloc", "torch.cuda.caching_allocator_delete", "torch.cuda.can_device_access_peer", "torch.cuda.change_current_allocator", "torch.cuda.clock_rate", "torch.cuda.comm.broadcast", "torch.cuda.comm.broadcast_coalesced", "torch.cuda.comm.gather", "torch.cuda.comm.reduce_add", "torch.cuda.comm.scatter", "torch.cuda.current_blas_handle", "torch.cuda.current_device", "torch.cuda.current_stream", "torch.cuda.default_stream", "device", "torch.cuda.device_count", "device_of", "torch.cuda.empty_cache", "torch.cuda.get_allocator_backend", "torch.cuda.get_arch_list", "torch.cuda.get_device_capability", "torch.cuda.get_device_name", "torch.cuda.get_device_properties", "torch.cuda.get_gencode_flags", "torch.cuda.get_rng_state", "torch.cuda.get_rng_state_all", "torch.cuda.get_sync_debug_mode", "graph", "torch.cuda.graph_pool_handle", "torch.cuda.init", "torch.cuda.initial_seed", "torch.cuda.ipc_collect", "torch.cuda.is_available", "torch.cuda.is_current_stream_capturing", "torch.cuda.is_initialized", "torch.cuda.jiterator._create_jit_fn", "torch.cuda.jiterator._create_multi_output_jit_fn", "torch.cuda.list_gpu_processes", "torch.cuda.make_graphed_callables", "torch.cuda.manual_seed", "torch.cuda.manual_seed_all", "torch.cuda.max_memory_allocated", "torch.cuda.max_memory_cached", "torch.cuda.max_memory_reserved", "torch.cuda.mem_get_info", "torch.cuda.memory_allocated", "torch.cuda.memory_cached", "torch.cuda.memory_reserved", "torch.cuda.memory_snapshot", "torch.cuda.memory_stats", "torch.cuda.memory_summary", "torch.cuda.memory_usage", "torch.cuda.nvtx.mark", "torch.cuda.nvtx.range", "torch.cuda.nvtx.range_pop", "torch.cuda.nvtx.range_push", "torch.cuda.power_draw", "torch.cuda.reset_max_memory_allocated", "torch.cuda.reset_max_memory_cached", "torch.cuda.reset_peak_memory_stats", "torch.cuda.seed", "torch.cuda.seed_all", "torch.cuda.set_device", "torch.cuda.set_per_process_memory_fraction", "torch.cuda.set_rng_state", "torch.cuda.set_rng_state_all", "torch.cuda.set_stream", "torch.cuda.set_sync_debug_mode", "torch.cuda.stream", "torch.cuda.synchronize", "torch.cuda.temperature", "torch.cuda.utilization", "torch.cummax", "torch.cummin", "torch.cumprod", "torch.cumsum", "torch.cumulative_trapezoid", "torch.deg2rad", "torch.dequantize", "torch.det", "torch.diag", "torch.diag_embed", "torch.diagflat", "torch.diagonal", "torch.diagonal_scatter", "torch.diff", "torch.digamma", "torch.dist", "torch.div", "torch.divide", "torch.dot", "torch.dsplit", "torch.dstack", "torch.einsum", "torch.empty", "torch.empty_like", "torch.empty_strided", "enable_grad", "torch.eq", "torch.equal", "torch.erf", "torch.erfc", "torch.erfinv", "torch.exp", "torch.exp2", "torch.expm1", "torch.eye", "torch.fake_quantize_per_channel_affine", "torch.fake_quantize_per_tensor_affine", "torch.fft.fft", "torch.fft.fft2", "torch.fft.fftfreq", "torch.fft.fftn", "torch.fft.fftshift", "torch.fft.hfft", "torch.fft.hfft2", "torch.fft.hfftn", "torch.fft.ifft", "torch.fft.ifft2", "torch.fft.ifftn", "torch.fft.ifftshift", "torch.fft.ihfft", "torch.fft.ihfft2", "torch.fft.ihfftn", "torch.fft.irfft", "torch.fft.irfft2", "torch.fft.irfftn", "torch.fft.rfft", "torch.fft.rfft2", "torch.fft.rfftfreq", "torch.fft.rfftn", "torch.fix", "torch.flatten", "torch.flip", "torch.fliplr", "torch.flipud", "torch.float_power", "torch.floor", "torch.floor_divide", "torch.fmax", "torch.fmin", "torch.fmod", "torch.frac", "torch.frexp", "torch.from_dlpack", "torch.from_file", "torch.from_numpy", "torch.frombuffer", "torch.full", "torch.full_like", "torch.func.functional_call", "torch.func.functionalize", "torch.func.grad", "torch.func.grad_and_value", "torch.func.hessian", "torch.func.jacfwd", "torch.func.jacrev", "torch.func.jvp", "torch.func.linearize", "torch.func.replace_all_batch_norm_modules_", "torch.func.stack_module_state", "torch.func.vjp", "torch.func.vmap", "CallMethodKey", "ConvertIntKey", "DimConstraints", "DimDynamic", "DivideByKey", "EqualityConstraint", "InnerTensorKey", "PropagateUnbackedSymInts", "RelaxedUnspecConstraint", "ShapeEnv", "ShapeEnvSettings", "StatefulSymbolicContext", "StatelessSymbolicContext", "StrictMinMaxConstraint", "SubclassSymbolicContext", "SymbolicContext", "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr", "torch.fx.experimental.symbolic_shapes.check_consistent", "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings", "torch.fx.experimental.symbolic_shapes.constrain_range", "torch.fx.experimental.symbolic_shapes.constrain_unify", "torch.fx.experimental.symbolic_shapes.definitely_false", "torch.fx.experimental.symbolic_shapes.definitely_true", "torch.fx.experimental.symbolic_shapes.guard_size_oblivious", "torch.fx.experimental.symbolic_shapes.has_free_symbols", "torch.fx.experimental.symbolic_shapes.hint_int", "torch.fx.experimental.symbolic_shapes.is_concrete_bool", "torch.fx.experimental.symbolic_shapes.is_concrete_int", "torch.fx.experimental.symbolic_shapes.lru_cache", "torch.fx.experimental.symbolic_shapes.parallel_and", "torch.fx.experimental.symbolic_shapes.parallel_or", "torch.fx.experimental.symbolic_shapes.rebind_unbacked", "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings", "torch.fx.experimental.symbolic_shapes.statically_known_true", "torch.fx.experimental.symbolic_shapes.sym_eq", "torch.gather", "torch.gcd", "torch.ge", "torch.geqrf", "torch.ger", "torch.get_default_device", "torch.get_default_dtype", "torch.get_deterministic_debug_mode", "torch.get_device_module", "torch.get_float32_matmul_precision", "torch.get_num_interop_threads", "torch.get_num_threads", "torch.get_rng_state", "torch.gradient", "torch.greater", "torch.greater_equal", "torch.gt", "torch.hamming_window", "torch.hann_window", "torch.heaviside", "torch.histc", "torch.histogram", "torch.histogramdd", "torch.hsplit", "torch.hspmm", "torch.hstack", "torch.hypot", "torch.i0", "torch.igamma", "torch.igammac", "torch.imag", "torch.index_add", "torch.index_copy", "torch.index_reduce", "torch.index_select", "torch.initial_seed", "torch.inner", "torch.inverse", "torch.is_complex", "torch.is_conj", "torch.is_deterministic_algorithms_warn_only_enabled", "torch.is_floating_point", "torch.is_grad_enabled", "torch.is_inference_mode_enabled", "torch.is_nonzero", "torch.is_storage", "torch.is_tensor", "torch.is_warn_always_enabled", "torch.isclose", "torch.isfinite", "torch.isin", "torch.isinf", "torch.isnan", "torch.isneginf", "torch.isposinf", "torch.isreal", "torch.istft", "Attribute", "ScriptFunction", "ScriptModule", "torch.jit.annotate", "torch.jit.enable_onednn_fusion", "torch.jit.fork", "torch.jit.freeze", "torch.jit.ignore", "torch.jit.interface", "torch.jit.isinstance", "torch.jit.load", "torch.jit.onednn_fusion_enabled", "torch.jit.optimize_for_inference", "torch.jit.save", "torch.jit.script", "torch.jit.script_if_tracing", "torch.jit.set_fusion_strategy", "strict_fusion", "torch.jit.trace", "torch.jit.trace_module", "torch.jit.unused", "torch.jit.wait", "torch.kaiser_window", "torch.kron", "torch.kthvalue", "torch.lcm", "torch.ldexp", "torch.le", "torch.lerp", "torch.less", "torch.less_equal", "torch.lgamma", "torch.linalg.cholesky", "torch.linalg.cholesky_ex", "torch.linalg.cond", "torch.linalg.cross", "torch.linalg.det", "torch.linalg.diagonal", "torch.linalg.eig", "torch.linalg.eigh", "torch.linalg.eigvals", "torch.linalg.eigvalsh", "torch.linalg.householder_product", "torch.linalg.inv", "torch.linalg.inv_ex", "torch.linalg.ldl_factor", "torch.linalg.ldl_factor_ex", "torch.linalg.ldl_solve", "torch.linalg.lstsq", "torch.linalg.lu", "torch.linalg.lu_factor", "torch.linalg.lu_factor_ex", "torch.linalg.lu_solve", "torch.linalg.matmul", "torch.linalg.matrix_exp", "torch.linalg.matrix_norm", "torch.linalg.matrix_power", "torch.linalg.matrix_rank", "torch.linalg.multi_dot", "torch.linalg.norm", "torch.linalg.pinv", "torch.linalg.qr", "torch.linalg.slogdet", "torch.linalg.solve", "torch.linalg.solve_ex", "torch.linalg.solve_triangular", "torch.linalg.svd", "torch.linalg.svdvals", "torch.linalg.tensorinv", "torch.linalg.tensorsolve", "torch.linalg.vander", "torch.linalg.vecdot", "torch.linalg.vector_norm", "torch.linspace", "torch.load", "torch.lobpcg", "torch.log", "torch.log10", "torch.log1p", "torch.log2", "torch.logaddexp", "torch.logaddexp2", "torch.logcumsumexp", "torch.logdet", "torch.logical_and", "torch.logical_not", "torch.logical_or", "torch.logical_xor", "torch.logit", "torch.logspace", "torch.logsumexp", "torch.lt", "torch.lu", "torch.lu_solve", "torch.lu_unpack", "torch.manual_seed", "torch.masked_select", "torch.matmul", "torch.matrix_exp", "torch.matrix_power", "torch.max", "torch.maximum", "torch.mean", "torch.median", "torch.meshgrid", "torch.min", "torch.minimum", "torch.mm", "torch.mode", "torch.moveaxis", "torch.movedim", "torch.mps.current_allocated_memory", "torch.mps.device_count", "torch.mps.driver_allocated_memory", "torch.mps.empty_cache", "Event", "torch.mps.get_rng_state", "torch.mps.manual_seed", "torch.mps.profiler.profile", "torch.mps.profiler.start", "torch.mps.profiler.stop", "torch.mps.seed", "torch.mps.set_per_process_memory_fraction", "torch.mps.set_rng_state", "torch.mps.synchronize", "torch.msort", "torch.mtia.DeferredMtiaCallError", "Event", "Stream", "StreamContext", "torch.mtia.current_device", "torch.mtia.current_stream", "torch.mtia.default_stream", "device", "torch.mtia.device_count", "torch.mtia.init", "torch.mtia.is_available", "torch.mtia.is_initialized", "torch.mtia.set_stream", "torch.mtia.stream", "torch.mtia.synchronize", "torch.mul", "torch.multinomial", "torch.multiply", "torch.mv", "torch.mvlgamma", "torch.nan_to_num", "torch.nanmean", "torch.nanmedian", "torch.nanquantile", "torch.nansum", "torch.narrow", "torch.narrow_copy", "torch.ne", "torch.neg", "torch.negative", "torch.nextafter", "AdaptiveAvgPool1d", "AdaptiveAvgPool2d", "AdaptiveAvgPool3d", "AdaptiveLogSoftmaxWithLoss", "AdaptiveMaxPool1d", "AdaptiveMaxPool2d", "AdaptiveMaxPool3d", "AlphaDropout", "AvgPool1d", "AvgPool2d", "AvgPool3d", "BCELoss", "BCEWithLogitsLoss", "BatchNorm1d", "BatchNorm2d", "BatchNorm3d", "Bilinear", "CELU", "CTCLoss", "ChannelShuffle", "CircularPad1d", "CircularPad2d", "CircularPad3d", "ConstantPad1d", "ConstantPad2d", "ConstantPad3d", "Conv1d", "Conv2d", "Conv3d", "ConvTranspose1d", "ConvTranspose2d", "ConvTranspose3d", "CosineEmbeddingLoss", "CosineSimilarity", "CrossEntropyLoss", "DataParallel", "Dropout", "Dropout1d", "Dropout2d", "Dropout3d", "ELU", "Embedding", "EmbeddingBag", "FeatureAlphaDropout", "Flatten", "Fold", "FractionalMaxPool2d", "FractionalMaxPool3d", "GELU", "GLU", "GRU", "GRUCell", "GaussianNLLLoss", "GroupNorm", "Hardshrink", "Hardsigmoid", "Hardswish", "Hardtanh", "HingeEmbeddingLoss", "HuberLoss", "Identity", "InstanceNorm1d", "InstanceNorm2d", "InstanceNorm3d", "KLDivLoss", "L1Loss", "LPPool1d", "LPPool2d", "LPPool3d", "LSTM", "LSTMCell", "LayerNorm", "LazyBatchNorm1d", "LazyBatchNorm2d", "LazyBatchNorm3d", "LazyConv1d", "LazyConv2d", "LazyConv3d", "LazyConvTranspose1d", "LazyConvTranspose2d", "LazyConvTranspose3d", "LazyInstanceNorm1d", "LazyInstanceNorm2d", "LazyInstanceNorm3d", "LazyLinear", "LeakyReLU", "Linear", "LocalResponseNorm", "LogSigmoid", "LogSoftmax", "MSELoss", "MarginRankingLoss", "MaxPool1d", "MaxPool2d", "MaxPool3d", "MaxUnpool1d", "MaxUnpool2d", "MaxUnpool3d", "Mish", "Module", "ModuleDict", "ModuleList", "MultiLabelMarginLoss", "MultiLabelSoftMarginLoss", "MultiMarginLoss", "MultiheadAttention", "NLLLoss", "PReLU", "PairwiseDistance", "ParameterDict", "ParameterList", "PixelShuffle", "PixelUnshuffle", "PoissonNLLLoss", "RMSNorm", "RNN", "RNNBase", "RNNCell", "RReLU", "ReLU", "ReLU6", "ReflectionPad1d", "ReflectionPad2d", "ReflectionPad3d", "ReplicationPad1d", "ReplicationPad2d", "ReplicationPad3d", "SELU", "Sequential", "SiLU", "Sigmoid", "SmoothL1Loss", "SoftMarginLoss", "Softmax", "Softmax2d", "Softmin", "Softplus", "Softshrink", "Softsign", "SyncBatchNorm", "Tanh", "Tanhshrink", "Threshold", "Transformer", "TransformerDecoder", "TransformerDecoderLayer", "TransformerEncoder", "TransformerEncoderLayer", "TripletMarginLoss", "TripletMarginWithDistanceLoss", "Unflatten", "Unfold", "Upsample", "UpsamplingBilinear2d", "UpsamplingNearest2d", "ZeroPad1d", "ZeroPad2d", "ZeroPad3d", "SDPBackend", "torch.nn.attention.bias.CausalBias", "CausalVariant", "torch.nn.attention.bias.causal_lower_right", "torch.nn.attention.bias.causal_upper_left", "torch.nn.attention.sdpa_kernel", "torch.nn.functional.adaptive_avg_pool1d", "torch.nn.functional.adaptive_avg_pool2d", "torch.nn.functional.adaptive_avg_pool3d", "torch.nn.functional.adaptive_max_pool1d", "torch.nn.functional.adaptive_max_pool2d", "torch.nn.functional.adaptive_max_pool3d", "torch.nn.functional.affine_grid", "torch.nn.functional.alpha_dropout", "torch.nn.functional.avg_pool1d", "torch.nn.functional.avg_pool2d", "torch.nn.functional.avg_pool3d", "torch.nn.functional.batch_norm", "torch.nn.functional.bilinear", "torch.nn.functional.binary_cross_entropy", "torch.nn.functional.binary_cross_entropy_with_logits", "torch.nn.functional.celu", "torch.nn.functional.conv1d", "torch.nn.functional.conv2d", "torch.nn.functional.conv3d", "torch.nn.functional.conv_transpose1d", "torch.nn.functional.conv_transpose2d", "torch.nn.functional.conv_transpose3d", "torch.nn.functional.cosine_embedding_loss", "torch.nn.functional.cosine_similarity", "torch.nn.functional.cross_entropy", "torch.nn.functional.ctc_loss", "torch.nn.functional.dropout", "torch.nn.functional.dropout1d", "torch.nn.functional.dropout2d", "torch.nn.functional.dropout3d", "torch.nn.functional.elu", "torch.nn.functional.elu_", "torch.nn.functional.embedding", "torch.nn.functional.embedding_bag", "torch.nn.functional.feature_alpha_dropout", "torch.nn.functional.fold", "torch.nn.functional.fractional_max_pool2d", "torch.nn.functional.fractional_max_pool3d", "torch.nn.functional.gaussian_nll_loss", "torch.nn.functional.gelu", "torch.nn.functional.glu", "torch.nn.functional.grid_sample", "torch.nn.functional.group_norm", "torch.nn.functional.gumbel_softmax", "torch.nn.functional.hardshrink", "torch.nn.functional.hardsigmoid", "torch.nn.functional.hardswish", "torch.nn.functional.hardtanh", "torch.nn.functional.hardtanh_", "torch.nn.functional.hinge_embedding_loss", "torch.nn.functional.huber_loss", "torch.nn.functional.instance_norm", "torch.nn.functional.interpolate", "torch.nn.functional.kl_div", "torch.nn.functional.l1_loss", "torch.nn.functional.layer_norm", "torch.nn.functional.leaky_relu", "torch.nn.functional.leaky_relu_", "torch.nn.functional.linear", "torch.nn.functional.local_response_norm", "torch.nn.functional.log_softmax", "torch.nn.functional.logsigmoid", "torch.nn.functional.lp_pool1d", "torch.nn.functional.lp_pool2d", "torch.nn.functional.lp_pool3d", "torch.nn.functional.margin_ranking_loss", "torch.nn.functional.max_pool1d", "torch.nn.functional.max_pool2d", "torch.nn.functional.max_pool3d", "torch.nn.functional.max_unpool1d", "torch.nn.functional.max_unpool2d", "torch.nn.functional.max_unpool3d", "torch.nn.functional.mish", "torch.nn.functional.mse_loss", "torch.nn.functional.multi_margin_loss", "torch.nn.functional.multilabel_margin_loss", "torch.nn.functional.multilabel_soft_margin_loss", "torch.nn.functional.nll_loss", "torch.nn.functional.normalize", "torch.nn.functional.one_hot", "torch.nn.functional.pad", "torch.nn.functional.pairwise_distance", "torch.nn.functional.pdist", "torch.nn.functional.pixel_shuffle", "torch.nn.functional.pixel_unshuffle", "torch.nn.functional.poisson_nll_loss", "torch.nn.functional.prelu", "torch.nn.functional.relu", "torch.nn.functional.relu6", "torch.nn.functional.relu_", "torch.nn.functional.rms_norm", "torch.nn.functional.rrelu", "torch.nn.functional.rrelu_", "torch.nn.functional.scaled_dot_product_attention", "torch.nn.functional.selu", "torch.nn.functional.sigmoid", "torch.nn.functional.silu", "torch.nn.functional.smooth_l1_loss", "torch.nn.functional.soft_margin_loss", "torch.nn.functional.softmax", "torch.nn.functional.softmin", "torch.nn.functional.softplus", "torch.nn.functional.softshrink", "torch.nn.functional.softsign", "torch.nn.functional.tanh", "torch.nn.functional.tanhshrink", "torch.nn.functional.threshold", "torch.nn.functional.threshold_", "torch.nn.functional.torch.nn.parallel.data_parallel", "torch.nn.functional.triplet_margin_loss", "torch.nn.functional.triplet_margin_with_distance_loss", "torch.nn.functional.unfold", "torch.nn.functional.upsample", "torch.nn.functional.upsample_bilinear", "torch.nn.functional.upsample_nearest", "LazyModuleMixin", "torch.nn.modules.module.register_module_backward_hook", "torch.nn.modules.module.register_module_buffer_registration_hook", "torch.nn.modules.module.register_module_forward_hook", "torch.nn.modules.module.register_module_forward_pre_hook", "torch.nn.modules.module.register_module_full_backward_hook", "torch.nn.modules.module.register_module_full_backward_pre_hook", "torch.nn.modules.module.register_module_module_registration_hook", "torch.nn.modules.module.register_module_parameter_registration_hook", "RMSNorm", "DistributedDataParallel", "Parameter", "UninitializedBuffer", "UninitializedParameter", "torch.nn.utils.clip_grad_norm", "torch.nn.utils.clip_grad_norm_", "torch.nn.utils.clip_grad_value_", "torch.nn.utils.convert_conv2d_weight_memory_format", "torch.nn.utils.convert_conv3d_weight_memory_format", "torch.nn.utils.fuse_conv_bn_eval", "torch.nn.utils.fuse_conv_bn_weights", "torch.nn.utils.fuse_linear_bn_eval", "torch.nn.utils.fuse_linear_bn_weights", "torch.nn.utils.parameters_to_vector", "torch.nn.utils.parametrizations.orthogonal", "torch.nn.utils.parametrizations.spectral_norm", "torch.nn.utils.parametrizations.weight_norm", "ParametrizationList", "torch.nn.utils.parametrize.cached", "torch.nn.utils.parametrize.is_parametrized", "torch.nn.utils.parametrize.register_parametrization", "torch.nn.utils.parametrize.remove_parametrizations", "BasePruningMethod", "CustomFromMask", "Identity", "L1Unstructured", "LnStructured", "PruningContainer", "RandomStructured", "RandomUnstructured", "torch.nn.utils.prune.custom_from_mask", "torch.nn.utils.prune.global_unstructured", "torch.nn.utils.prune.identity", "torch.nn.utils.prune.is_pruned", "torch.nn.utils.prune.l1_unstructured", "torch.nn.utils.prune.ln_structured", "torch.nn.utils.prune.random_structured", "torch.nn.utils.prune.random_unstructured", "torch.nn.utils.prune.remove", "torch.nn.utils.remove_spectral_norm", "torch.nn.utils.remove_weight_norm", "PackedSequence", "torch.nn.utils.rnn.pack_padded_sequence", "torch.nn.utils.rnn.pack_sequence", "torch.nn.utils.rnn.pad_packed_sequence", "torch.nn.utils.rnn.pad_sequence", "torch.nn.utils.rnn.unpack_sequence", "torch.nn.utils.rnn.unpad_sequence", "torch.nn.utils.skip_init", "torch.nn.utils.spectral_norm", "torch.nn.utils.stateless.functional_call", "torch.nn.utils.vector_to_parameters", "torch.nn.utils.weight_norm", "no_grad", "torch.nonzero", "torch.norm", "torch.normal", "torch.not_equal", "torch.numel", "torch.ones", "torch.ones_like", "JitScalarType", "GraphInfo", "VerificationOptions", "ASGD", "Adadelta", "Adagrad", "Adam", "AdamW", "Adamax", "LBFGS", "NAdam", "torch.optim.Optimizer.add_param_group", "torch.optim.Optimizer.load_state_dict", "torch.optim.Optimizer.state_dict", "torch.optim.Optimizer.step", "torch.optim.Optimizer.zero_grad", "RAdam", "RMSprop", "Rprop", "SGD", "SparseAdam", "ChainedScheduler", "ConstantLR", "CosineAnnealingLR", "CosineAnnealingWarmRestarts", "CyclicLR", "ExponentialLR", "LambdaLR", "LinearLR", "MultiStepLR", "MultiplicativeLR", "OneCycleLR", "PolynomialLR", "ReduceLROnPlateau", "SequentialLR", "StepLR", "torch.orgqr", "torch.ormqr", "torch.outer", "torch.pca_lowrank", "torch.permute", "torch.pinverse", "torch.poisson", "torch.polar", "torch.polygamma", "torch.positive", "torch.pow", "torch.prod", "torch.promote_types", "torch.qr", "torch.quantile", "torch.quantize_per_channel", "torch.quantize_per_tensor", "torch.quantized_batch_norm", "torch.quantized_max_pool1d", "torch.quantized_max_pool2d", "SobolEngine", "torch.rad2deg", "torch.rand", "torch.rand_like", "torch.randint", "torch.randint_like", "torch.randn", "torch.randn_like", "torch.randperm", "torch.range", "torch.ravel", "torch.real", "torch.reciprocal", "torch.remainder", "torch.renorm", "torch.repeat_interleave", "torch.reshape", "torch.resolve_conj", "torch.resolve_neg", "torch.result_type", "torch.roll", "torch.rot90", "torch.round", "torch.row_stack", "torch.rsqrt", "torch.save", "torch.scatter", "torch.scatter_add", "torch.scatter_reduce", "torch.searchsorted", "torch.seed", "torch.select", "torch.select_scatter", "torch.set_default_device", "torch.set_default_dtype", "torch.set_default_tensor_type", "torch.set_deterministic_debug_mode", "torch.set_float32_matmul_precision", "torch.set_flush_denormal", "torch.set_num_interop_threads", "torch.set_num_threads", "torch.set_printoptions", "torch.set_rng_state", "torch.set_warn_always", "torch.sgn", "torch.sigmoid", "torch.sign", "torch.signal.windows.bartlett", "torch.signal.windows.blackman", "torch.signal.windows.cosine", "torch.signal.windows.exponential", "torch.signal.windows.gaussian", "torch.signal.windows.general_cosine", "torch.signal.windows.general_hamming", "torch.signal.windows.hamming", "torch.signal.windows.hann", "torch.signal.windows.kaiser", "torch.signal.windows.nuttall", "torch.signbit", "torch.sin", "torch.sinc", "torch.sinh", "torch.slice_scatter", "torch.slogdet", "torch.smm", "torch.softmax", "torch.sort", "torch.sparse.addmm", "torch.sparse.as_sparse_gradcheck", "check_sparse_tensor_invariants", "torch.sparse.log_softmax", "torch.sparse.mm", "torch.sparse.sampled_addmm", "torch.sparse.softmax", "torch.sparse.spdiags", "torch.sparse.sum", "torch.sparse_bsc_tensor", "torch.sparse_bsr_tensor", "torch.sparse_compressed_tensor", "torch.sparse_coo_tensor", "torch.sparse_csc_tensor", "torch.sparse_csr_tensor", "torch.split", "torch.sqrt", "torch.square", "torch.squeeze", "torch.sspaddmm", "torch.stack", "torch.std", "torch.std_mean", "torch.stft", "torch.sub", "torch.subtract", "torch.sum", "torch.svd", "torch.svd_lowrank", "torch.swapaxes", "torch.swapdims", "torch.sym_float", "torch.sym_int", "torch.sym_ite", "torch.sym_max", "torch.sym_min", "torch.sym_not", "torch.t", "torch.take", "torch.take_along_dim", "torch.tan", "torch.tanh", "torch.tensor", "torch.tensor_split", "torch.tensordot", "torch.tile", "torch.topk", "torch.trace", "torch.transpose", "torch.trapezoid", "torch.trapz", "torch.triangular_solve", "torch.tril", "torch.tril_indices", "torch.triu", "torch.triu_indices", "torch.true_divide", "torch.trunc", "torch.unbind", "torch.unflatten", "torch.unique", "torch.unique_consecutive", "torch.unravel_index", "torch.unsqueeze", "torch.use_deterministic_algorithms", "torch.utils.generate_methods_for_privateuse1_backend", "torch.utils.get_cpp_backtrace", "torch.utils.rename_privateuse1_backend", "torch.utils.set_module", "torch.utils.swap_tensors", "torch.vander", "torch.var", "torch.var_mean", "torch.vdot", "torch.view_as_complex", "torch.view_as_real", "torch.vmap", "torch.vsplit", "torch.vstack", "torch.where", "torch.xlogy", "Event", "Stream", "StreamContext", "torch.xpu.current_device", "torch.xpu.current_stream", "device", "torch.xpu.device_count", "device_of", "torch.xpu.empty_cache", "torch.xpu.get_device_capability", "torch.xpu.get_device_name", "torch.xpu.get_device_properties", "torch.xpu.get_rng_state", "torch.xpu.get_rng_state_all", "torch.xpu.init", "torch.xpu.initial_seed", "torch.xpu.is_available", "torch.xpu.is_initialized", "torch.xpu.manual_seed", "torch.xpu.manual_seed_all", "torch.xpu.seed", "torch.xpu.seed_all", "torch.xpu.set_device", "torch.xpu.set_rng_state", "torch.xpu.set_rng_state_all", "torch.xpu.set_stream", "torch.xpu.stream", "torch.xpu.synchronize", "torch.zeros", "torch.zeros_like", "torch.hub", "PyTorch documentation", "TorchScript", "TorchScript Builtins", "TorchScript Language Reference", "TorchScript Language Reference", "Python Language Reference Coverage", "TorchScript Unsupported PyTorch Constructs", "JIT Utils - torch.utils.jit", "torch.library", "torch.linalg", "torch._logging", "torch.masked", "Meta device", "Miscellaneous Environment Variables", "torch.utils.mobile_optimizer", "torch.utils.model_zoo", "torch.utils.module_tracker", "torch.monitor", "torch.mps", "torch.mtia", "Multiprocessing package - torch.multiprocessing", "Named Tensors operator coverage", "Named Tensors", "torch.nested", "torch.nn", "torch.nn.attention", "torch.nn.attention.bias", "torch.nn.functional", "torch.nn.init", "Automatic Mixed Precision examples", "Autograd mechanics", "Broadcasting semantics", "CPU threading and TorchScript inference", "CUDA semantics", "PyTorch Custom Operators Landing Page", "Distributed Data Parallel", "Extending PyTorch", "Extending torch.func with autograd.Function", "Frequently Asked Questions", "FSDP Notes", "Pytorch 2.4: Getting Started on Intel GPU", "Gradcheck mechanics", "HIP (ROCm) semantics", "Features for large-scale deployments", "Modules", "MPS backend", "Multiprocessing best practices", "Numerical accuracy", "Reproducibility", "Serialization semantics", "Windows FAQ", "torch.onnx", "TorchDynamo-based ONNX Exporter", "ONNX Backend for TorchDynamo", "TorchScript-based ONNX Exporter", "ONNX supported TorchScript operators", "torch.optim", "torch.package", "torch.profiler", "Quantization", "Quantization Accuracy Debugging", "Quantization Backend Configuration", "Quantization API Reference", "torch.random", "Distributed RPC Framework", "Distributed Autograd Design", "Remote Reference Protocol", "torch.signal", "torch.Size", "torch.sparse", "torch.special", "torch.Storage", "Tensor Attributes", "Tensor Views", "torch.utils.tensorboard", "torch.Tensor", "torch.testing", "Threading Environment Variables", "torch", "torch.ao.ns._numeric_suite", "torch.ao.ns._numeric_suite_fx", "torch.compiler", "AOTInductor: Ahead-Of-Time Compilation for Torch.Export-ed Models", "torch.compiler API reference", "Best Practices for Backends", "CUDAGraph Trees", "Custom Backends", "Dynamic shapes", "Dynamo Deep-Dive", "Dynamo Overview", "Fake tensor", "Frequently Asked Questions", "TorchDynamo APIs for fine-grained tracing", "Getting Started", "TorchInductor GPU Profiling", "IRs", "PyTorch 2.0 NNModule Support", "PyTorch 2.0 Performance Dashboard", "Profiling to understand torch.compile performance", "Writing Graph Transformations on ATen IR", "PyTorch 2.0 Troubleshooting", "torch.overrides", "Understanding CUDA Memory Usage", "Torch Environment Variables", "PYTORCH ProcessGroupNCCL Environment Variables", "Type Info", "torch.utils", "torch.xpu"], "terms": {"provid": [0, 1, 3, 7, 8, 9, 11, 14, 15, 17, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 45, 47, 48, 50, 51, 52, 53, 55, 59, 63, 64, 65, 66, 76, 77, 152, 488, 605, 683, 692, 738, 741, 742, 743, 748, 749, 751, 759, 760, 763, 767, 791, 792, 802, 803, 817, 827, 845, 862, 897, 913, 915, 916, 917, 932, 943, 955, 960, 963, 982, 998, 1019, 1065, 1109, 1129, 1166, 1178, 1188, 1198, 1227, 1235, 1236, 1270, 1273, 1280, 1281, 1285, 1287, 1289, 1304, 1321, 1346, 1378, 1457, 1458, 1459, 1462, 1478, 1479, 1487, 1492, 1497, 1498, 1523, 1524, 1525, 1527, 1533, 1534, 1537, 1543, 1545, 1556, 1571, 1572, 1573, 1574, 1575, 1605, 1606, 1685, 1704, 1717, 1724, 1725, 1739, 1759, 1767, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1809, 1812, 1817, 1853, 1863, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 1954, 1956, 1966, 1977, 2012, 2014, 2016, 2017, 2021, 2023, 2024, 2027, 2030, 2033, 2034, 2035, 2036, 2046, 2048, 2049, 2050, 2053, 2054, 2057, 2058, 2060, 2065, 2067, 2069, 2070, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2106, 2107, 2111, 2112, 2113, 2115, 2118], "conveni": [0, 3, 7, 14, 28, 44, 47, 48, 49, 55, 61, 1178, 1188, 1331, 1707, 1871, 1966, 1968, 1977, 2012, 2016, 2025, 2042, 2043, 2046, 2049, 2056, 2057, 2070, 2076, 2098, 2100, 2101, 2102, 2107], "method": [0, 3, 7, 9, 14, 15, 18, 23, 28, 29, 30, 32, 33, 35, 36, 37, 39, 45, 47, 52, 55, 62, 63, 64, 66, 68, 74, 75, 82, 90, 223, 224, 325, 417, 488, 489, 490, 499, 500, 501, 522, 616, 796, 800, 802, 818, 819, 820, 841, 845, 858, 878, 879, 880, 896, 897, 904, 905, 906, 907, 908, 909, 910, 928, 929, 943, 1045, 1160, 1179, 1186, 1227, 1271, 1273, 1274, 1276, 1277, 1278, 1284, 1285, 1289, 1290, 1291, 1319, 1337, 1338, 1345, 1346, 1420, 1423, 1431, 1439, 1441, 1442, 1468, 1469, 1484, 1527, 1528, 1529, 1533, 1537, 1538, 1544, 1546, 1556, 1633, 1707, 1717, 1721, 1732, 1734, 1737, 1739, 1741, 1744, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1755, 1761, 1766, 1782, 1783, 1784, 1786, 1796, 1828, 1909, 1924, 1928, 1929, 1966, 2012, 2013, 2014, 2017, 2018, 2021, 2025, 2027, 2030, 2033, 2034, 2035, 2041, 2042, 2043, 2046, 2049, 2050, 2051, 2054, 2057, 2059, 2062, 2065, 2069, 2070, 2072, 2074, 2077, 2079, 2084, 2085, 2087, 2088, 2091, 2094, 2099, 2101, 2102, 2104, 2111, 2113, 2114], "where": [0, 1, 2, 3, 4, 7, 8, 9, 11, 14, 15, 23, 24, 28, 32, 33, 34, 35, 37, 40, 45, 47, 48, 50, 51, 52, 53, 55, 57, 58, 60, 64, 88, 155, 225, 256, 402, 404, 488, 683, 696, 698, 699, 702, 738, 761, 763, 784, 823, 824, 836, 881, 899, 903, 912, 914, 935, 945, 954, 960, 967, 968, 969, 976, 994, 998, 1012, 1014, 1023, 1025, 1088, 1089, 1092, 1096, 1114, 1125, 1126, 1128, 1131, 1132, 1134, 1135, 1137, 1138, 1139, 1141, 1142, 1144, 1146, 1171, 1172, 1173, 1177, 1178, 1186, 1187, 1216, 1230, 1231, 1232, 1233, 1262, 1263, 1265, 1266, 1269, 1270, 1271, 1274, 1287, 1293, 1294, 1295, 1298, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1341, 1342, 1343, 1345, 1346, 1351, 1354, 1361, 1362, 1363, 1364, 1368, 1371, 1373, 1374, 1375, 1376, 1379, 1413, 1418, 1419, 1421, 1424, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1468, 1469, 1470, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1557, 1558, 1559, 1560, 1561, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1604, 1606, 1616, 1617, 1624, 1629, 1631, 1632, 1650, 1651, 1669, 1671, 1672, 1675, 1676, 1678, 1685, 1688, 1716, 1717, 1719, 1720, 1724, 1725, 1731, 1759, 1760, 1761, 1762, 1771, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1801, 1802, 1803, 1815, 1825, 1827, 1828, 1848, 1849, 1884, 1890, 1891, 1896, 1900, 1905, 1906, 1907, 1910, 1911, 1912, 1913, 1914, 1915, 1922, 1923, 1924, 1927, 1928, 1929, 1943, 1952, 1953, 1954, 1955, 1956, 1961, 1962, 1972, 1973, 1974, 1975, 1976, 1977, 1983, 2014, 2015, 2017, 2018, 2023, 2024, 2025, 2028, 2033, 2036, 2041, 2042, 2043, 2044, 2046, 2049, 2050, 2051, 2052, 2054, 2057, 2059, 2060, 2061, 2062, 2067, 2068, 2069, 2070, 2071, 2072, 2075, 2077, 2079, 2082, 2083, 2085, 2087, 2093, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2108, 2111, 2113, 2114, 2115], "some": [0, 1, 3, 7, 8, 9, 14, 17, 19, 22, 23, 24, 28, 30, 33, 34, 35, 37, 40, 48, 50, 52, 53, 55, 58, 60, 61, 63, 64, 87, 89, 479, 488, 498, 569, 692, 751, 919, 939, 976, 1009, 1043, 1061, 1065, 1068, 1109, 1130, 1132, 1140, 1141, 1142, 1157, 1167, 1178, 1184, 1188, 1200, 1201, 1270, 1271, 1273, 1274, 1284, 1287, 1289, 1319, 1337, 1343, 1368, 1375, 1378, 1431, 1435, 1439, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1486, 1489, 1490, 1491, 1492, 1493, 1497, 1518, 1519, 1527, 1530, 1531, 1532, 1534, 1541, 1543, 1544, 1559, 1560, 1576, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1618, 1645, 1650, 1669, 1672, 1677, 1685, 1707, 1717, 1718, 1719, 1720, 1731, 1765, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 1827, 1847, 1871, 1877, 1891, 1928, 1968, 1977, 2012, 2014, 2015, 2016, 2017, 2018, 2021, 2022, 2023, 2024, 2025, 2026, 2027, 2033, 2034, 2035, 2042, 2043, 2046, 2049, 2050, 2051, 2052, 2054, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2087, 2094, 2100, 2101, 2102, 2103, 2105, 2106, 2109, 2110, 2111, 2112, 2113], "oper": [0, 2, 4, 5, 7, 8, 11, 12, 15, 17, 18, 23, 27, 30, 33, 34, 35, 36, 44, 45, 47, 48, 53, 56, 61, 63, 64, 65, 66, 81, 82, 84, 85, 86, 87, 88, 256, 315, 323, 337, 354, 404, 447, 448, 449, 450, 451, 488, 490, 495, 498, 501, 515, 517, 519, 591, 683, 689, 692, 744, 745, 746, 750, 751, 760, 761, 771, 772, 782, 788, 795, 796, 798, 799, 802, 826, 829, 863, 865, 866, 869, 893, 895, 904, 905, 907, 909, 910, 919, 930, 944, 945, 947, 949, 952, 954, 956, 958, 963, 966, 973, 976, 978, 986, 990, 992, 1012, 1014, 1042, 1051, 1083, 1088, 1089, 1090, 1091, 1104, 1109, 1110, 1111, 1112, 1122, 1127, 1145, 1157, 1160, 1163, 1164, 1165, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1178, 1181, 1221, 1225, 1231, 1232, 1273, 1284, 1285, 1289, 1290, 1293, 1303, 1316, 1326, 1329, 1330, 1343, 1344, 1353, 1360, 1368, 1373, 1378, 1389, 1390, 1418, 1421, 1435, 1440, 1445, 1446, 1447, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1473, 1474, 1475, 1481, 1483, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1493, 1499, 1513, 1518, 1523, 1524, 1525, 1527, 1540, 1542, 1546, 1547, 1548, 1555, 1570, 1571, 1573, 1575, 1579, 1601, 1602, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1618, 1619, 1620, 1621, 1625, 1626, 1628, 1629, 1633, 1637, 1644, 1650, 1652, 1659, 1660, 1670, 1672, 1676, 1685, 1691, 1692, 1703, 1704, 1705, 1706, 1716, 1717, 1719, 1720, 1761, 1767, 1772, 1776, 1777, 1795, 1801, 1811, 1815, 1824, 1825, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1847, 1853, 1870, 1871, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1902, 1904, 1905, 1906, 1907, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1927, 1943, 1961, 1965, 1966, 1968, 1977, 1980, 2010, 2011, 2013, 2014, 2018, 2021, 2025, 2042, 2044, 2045, 2046, 2048, 2050, 2051, 2057, 2058, 2059, 2060, 2061, 2064, 2065, 2069, 2071, 2073, 2075, 2076, 2077, 2078, 2081, 2083, 2085, 2086, 2089, 2098, 2100, 2101, 2102, 2104, 2105, 2106, 2107, 2108, 2112, 2113], "us": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 19, 20, 21, 23, 27, 29, 30, 32, 34, 35, 36, 37, 39, 40, 44, 45, 46, 47, 48, 50, 52, 53, 55, 56, 58, 59, 60, 61, 62, 63, 65, 66, 74, 75, 76, 77, 84, 85, 86, 88, 90, 99, 121, 152, 156, 175, 193, 210, 321, 323, 337, 344, 345, 408, 417, 437, 450, 460, 488, 489, 490, 498, 501, 515, 519, 522, 546, 559, 585, 586, 587, 589, 590, 619, 683, 689, 692, 715, 716, 717, 718, 719, 720, 723, 732, 733, 734, 735, 736, 738, 751, 760, 763, 771, 772, 775, 776, 777, 782, 784, 788, 789, 790, 793, 796, 797, 798, 799, 800, 802, 805, 810, 818, 819, 820, 822, 823, 824, 825, 826, 827, 828, 829, 830, 834, 835, 836, 837, 840, 858, 862, 863, 865, 866, 869, 882, 883, 884, 893, 894, 895, 897, 899, 900, 901, 902, 903, 904, 906, 907, 909, 910, 912, 913, 914, 915, 918, 919, 920, 921, 923, 924, 928, 929, 932, 936, 939, 944, 945, 946, 954, 956, 957, 964, 966, 976, 978, 979, 984, 986, 990, 991, 994, 996, 1009, 1011, 1012, 1014, 1016, 1017, 1019, 1022, 1032, 1033, 1036, 1037, 1043, 1047, 1051, 1053, 1054, 1055, 1057, 1059, 1060, 1065, 1066, 1076, 1078, 1079, 1085, 1090, 1091, 1092, 1101, 1109, 1110, 1111, 1112, 1122, 1123, 1124, 1127, 1129, 1137, 1139, 1143, 1145, 1152, 1154, 1157, 1161, 1164, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1181, 1186, 1187, 1188, 1189, 1194, 1197, 1198, 1201, 1202, 1204, 1213, 1217, 1218, 1224, 1225, 1227, 1231, 1232, 1233, 1234, 1248, 1260, 1270, 1271, 1273, 1274, 1277, 1278, 1279, 1280, 1281, 1283, 1284, 1285, 1286, 1287, 1289, 1290, 1291, 1293, 1295, 1297, 1305, 1309, 1310, 1312, 1313, 1314, 1316, 1317, 1318, 1319, 1320, 1321, 1326, 1327, 1328, 1330, 1331, 1337, 1338, 1339, 1343, 1344, 1345, 1346, 1351, 1354, 1360, 1363, 1364, 1367, 1368, 1373, 1374, 1375, 1378, 1385, 1386, 1413, 1418, 1420, 1421, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1465, 1466, 1467, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1478, 1479, 1480, 1481, 1486, 1487, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1513, 1514, 1515, 1520, 1521, 1522, 1523, 1524, 1527, 1529, 1533, 1534, 1535, 1536, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1559, 1561, 1564, 1567, 1575, 1576, 1577, 1578, 1580, 1583, 1584, 1585, 1586, 1587, 1588, 1591, 1598, 1600, 1601, 1602, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1633, 1635, 1642, 1644, 1645, 1652, 1658, 1659, 1660, 1670, 1672, 1678, 1685, 1689, 1691, 1692, 1700, 1702, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1722, 1723, 1724, 1725, 1731, 1732, 1733, 1734, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1759, 1760, 1761, 1765, 1766, 1767, 1769, 1770, 1772, 1773, 1776, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1825, 1827, 1828, 1829, 1832, 1833, 1834, 1836, 1838, 1840, 1842, 1843, 1849, 1856, 1859, 1864, 1865, 1867, 1868, 1869, 1871, 1873, 1874, 1876, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1895, 1900, 1903, 1904, 1906, 1907, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 1927, 1928, 1929, 1943, 1947, 1950, 1954, 1956, 1961, 1964, 1965, 1966, 1968, 1974, 1977, 1982, 1983, 1989, 1990, 1991, 1992, 2000, 2002, 2009, 2010, 2012, 2013, 2014, 2015, 2017, 2018, 2019, 2021, 2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2033, 2034, 2035, 2036, 2037, 2040, 2041, 2042, 2045, 2048, 2050, 2051, 2052, 2053, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2071, 2073, 2075, 2076, 2077, 2078, 2079, 2083, 2084, 2085, 2087, 2088, 2089, 2090, 2091, 2092, 2093, 2094, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2107, 2108, 2109, 2110, 2112, 2114, 2117, 2120], "float": [0, 1, 3, 11, 19, 23, 24, 27, 28, 32, 33, 35, 37, 41, 50, 52, 53, 55, 62, 64, 155, 156, 221, 315, 317, 319, 323, 335, 402, 477, 483, 501, 688, 690, 695, 697, 701, 735, 738, 741, 742, 743, 747, 748, 749, 751, 758, 759, 764, 765, 766, 767, 768, 773, 774, 775, 776, 777, 778, 780, 782, 783, 784, 788, 795, 796, 797, 802, 820, 822, 833, 841, 842, 856, 861, 862, 865, 866, 869, 884, 923, 924, 945, 946, 954, 961, 964, 968, 969, 978, 987, 993, 994, 998, 1008, 1079, 1092, 1103, 1104, 1110, 1111, 1112, 1114, 1127, 1145, 1154, 1155, 1156, 1157, 1159, 1186, 1188, 1216, 1220, 1230, 1231, 1232, 1235, 1236, 1255, 1262, 1263, 1265, 1266, 1267, 1268, 1271, 1273, 1289, 1290, 1293, 1297, 1299, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1346, 1351, 1360, 1362, 1364, 1373, 1393, 1412, 1413, 1417, 1419, 1420, 1421, 1424, 1427, 1431, 1435, 1441, 1442, 1443, 1445, 1448, 1449, 1460, 1461, 1462, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1474, 1475, 1480, 1481, 1482, 1485, 1486, 1487, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1513, 1515, 1519, 1527, 1532, 1533, 1535, 1536, 1541, 1542, 1546, 1549, 1550, 1551, 1552, 1553, 1559, 1564, 1565, 1567, 1570, 1571, 1573, 1575, 1576, 1577, 1580, 1581, 1582, 1615, 1616, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1630, 1635, 1644, 1670, 1672, 1677, 1685, 1704, 1716, 1717, 1722, 1723, 1727, 1729, 1732, 1742, 1743, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1761, 1762, 1766, 1772, 1773, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1821, 1824, 1828, 1829, 1830, 1831, 1843, 1847, 1848, 1868, 1869, 1872, 1875, 1884, 1885, 1887, 1888, 1890, 1925, 1928, 1932, 1935, 1950, 1952, 1965, 2014, 2015, 2016, 2017, 2018, 2021, 2024, 2030, 2033, 2034, 2036, 2037, 2041, 2042, 2046, 2049, 2051, 2057, 2060, 2065, 2067, 2068, 2070, 2071, 2072, 2073, 2077, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2100, 2104, 2106, 2108, 2112, 2118], "datatyp": [0, 19, 64, 884, 1270, 1273, 1527, 1719, 1720, 1871, 2060, 2067, 2082], "other": [0, 1, 2, 3, 4, 5, 7, 8, 9, 14, 15, 18, 19, 23, 24, 29, 30, 32, 33, 35, 36, 37, 40, 44, 45, 47, 48, 52, 53, 55, 56, 58, 60, 61, 63, 64, 99, 100, 115, 131, 132, 135, 147, 148, 151, 152, 161, 162, 167, 168, 198, 199, 200, 209, 236, 241, 245, 246, 247, 257, 276, 277, 284, 285, 286, 287, 293, 294, 295, 296, 297, 298, 305, 306, 309, 310, 311, 312, 315, 317, 323, 326, 346, 354, 357, 358, 359, 360, 361, 362, 365, 366, 367, 368, 380, 381, 396, 397, 410, 414, 417, 440, 441, 452, 453, 457, 458, 489, 490, 500, 522, 563, 564, 565, 566, 582, 585, 618, 619, 620, 623, 624, 683, 688, 697, 741, 742, 743, 744, 745, 746, 763, 817, 829, 865, 869, 875, 882, 883, 888, 894, 897, 899, 900, 901, 909, 918, 919, 920, 921, 923, 924, 928, 929, 948, 949, 951, 952, 953, 955, 960, 976, 983, 993, 1008, 1009, 1012, 1014, 1016, 1023, 1033, 1043, 1054, 1097, 1103, 1104, 1105, 1113, 1114, 1115, 1129, 1136, 1154, 1155, 1156, 1157, 1160, 1167, 1168, 1169, 1170, 1173, 1177, 1187, 1188, 1190, 1214, 1215, 1216, 1228, 1229, 1230, 1239, 1240, 1242, 1243, 1248, 1250, 1262, 1271, 1273, 1277, 1285, 1294, 1296, 1297, 1298, 1300, 1301, 1306, 1310, 1324, 1326, 1329, 1330, 1343, 1351, 1352, 1353, 1355, 1357, 1358, 1361, 1362, 1368, 1371, 1372, 1376, 1377, 1381, 1385, 1412, 1414, 1424, 1427, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1463, 1473, 1478, 1480, 1492, 1527, 1528, 1537, 1556, 1571, 1575, 1579, 1588, 1608, 1609, 1610, 1635, 1644, 1685, 1704, 1707, 1717, 1722, 1723, 1724, 1725, 1748, 1770, 1774, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1806, 1807, 1813, 1815, 1843, 1847, 1863, 1868, 1871, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1925, 1926, 1943, 1949, 1953, 1955, 1961, 1974, 1975, 1980, 1981, 1983, 1990, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2027, 2029, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2057, 2058, 2059, 2060, 2062, 2065, 2067, 2069, 2072, 2073, 2077, 2078, 2079, 2083, 2084, 2087, 2088, 2089, 2095, 2098, 2100, 2101, 2102, 2104, 2105, 2106, 2108, 2111, 2112, 2113, 2114], "lower": [0, 1, 8, 12, 24, 28, 35, 52, 53, 796, 798, 863, 955, 960, 967, 968, 969, 971, 1123, 1124, 1181, 1188, 1192, 1234, 1235, 1303, 1310, 1312, 1320, 1321, 1328, 1331, 1336, 1374, 1413, 1420, 1431, 1546, 1587, 1588, 1589, 1608, 1609, 1610, 1683, 1684, 1685, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 1811, 1826, 1828, 1848, 1863, 1871, 1908, 1952, 1953, 1954, 2014, 2015, 2017, 2025, 2037, 2041, 2043, 2057, 2062, 2068, 2072, 2083, 2089, 2108, 2113], "point": [0, 1, 7, 8, 9, 11, 23, 24, 27, 29, 30, 32, 33, 37, 44, 47, 48, 53, 55, 64, 90, 155, 156, 323, 335, 341, 483, 488, 501, 701, 741, 742, 743, 744, 745, 746, 747, 752, 753, 754, 755, 756, 757, 758, 759, 762, 764, 765, 766, 767, 768, 778, 780, 782, 783, 784, 788, 798, 802, 822, 823, 824, 825, 828, 833, 856, 865, 866, 869, 884, 913, 915, 916, 917, 923, 924, 945, 946, 954, 976, 993, 994, 998, 1057, 1059, 1068, 1073, 1074, 1110, 1111, 1112, 1155, 1156, 1157, 1220, 1227, 1231, 1232, 1236, 1255, 1270, 1273, 1289, 1290, 1297, 1299, 1320, 1321, 1344, 1346, 1351, 1360, 1373, 1420, 1427, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1473, 1520, 1521, 1522, 1527, 1579, 1580, 1598, 1624, 1633, 1644, 1685, 1704, 1717, 1724, 1725, 1772, 1781, 1812, 1828, 1829, 1830, 1834, 1843, 1856, 1868, 1869, 1875, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1965, 1966, 2013, 2014, 2016, 2017, 2018, 2021, 2030, 2037, 2041, 2042, 2043, 2046, 2048, 2052, 2053, 2054, 2057, 2060, 2071, 2072, 2073, 2075, 2077, 2078, 2082, 2085, 2087, 2088, 2089, 2092, 2099, 2101, 2102, 2104, 2106, 2109, 2112, 2115, 2118], "lower_precision_fp": 0, "half": [0, 1, 10, 24, 35, 619, 987, 1123, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1273, 1336, 1342, 1454, 1455, 1456, 1457, 1458, 1459, 1474, 1475, 1477, 1527, 1628, 1629, 1632, 1724, 1725, 1778, 1803, 1856, 1924, 2034, 2057, 2060, 2065, 2082, 2084, 2085, 2088], "like": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 19, 23, 24, 28, 30, 33, 34, 35, 45, 47, 48, 52, 53, 55, 56, 57, 58, 59, 60, 61, 64, 66, 68, 338, 488, 591, 619, 763, 796, 845, 864, 882, 904, 907, 909, 923, 924, 943, 945, 954, 986, 990, 1051, 1052, 1061, 1104, 1109, 1110, 1130, 1152, 1155, 1156, 1163, 1167, 1170, 1171, 1172, 1178, 1190, 1198, 1201, 1213, 1231, 1232, 1271, 1273, 1274, 1281, 1284, 1289, 1293, 1320, 1321, 1332, 1345, 1431, 1439, 1470, 1473, 1489, 1490, 1491, 1527, 1528, 1529, 1537, 1538, 1556, 1579, 1586, 1627, 1645, 1703, 1707, 1717, 1718, 1719, 1720, 1732, 1758, 1776, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1821, 1836, 1840, 1859, 1867, 1868, 1871, 1928, 1940, 1966, 1977, 2010, 2012, 2013, 2014, 2016, 2017, 2021, 2024, 2025, 2033, 2034, 2035, 2042, 2043, 2046, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2071, 2072, 2075, 2077, 2078, 2081, 2082, 2084, 2085, 2089, 2091, 2093, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2110, 2111, 2112, 2113, 2114, 2115], "linear": [0, 2, 9, 24, 28, 29, 32, 33, 34, 35, 52, 53, 55, 57, 59, 61, 64, 66, 433, 474, 475, 476, 477, 478, 481, 714, 723, 731, 732, 768, 795, 796, 797, 799, 817, 858, 862, 863, 865, 866, 914, 969, 1014, 1109, 1166, 1168, 1176, 1178, 1227, 1273, 1277, 1285, 1299, 1305, 1310, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1334, 1336, 1364, 1420, 1439, 1445, 1468, 1476, 1477, 1485, 1512, 1527, 1529, 1543, 1545, 1546, 1547, 1555, 1557, 1564, 1567, 1571, 1573, 1575, 1578, 1580, 1622, 1631, 1632, 1644, 1679, 1688, 1693, 1704, 1707, 1717, 1728, 1729, 1731, 1732, 1733, 1737, 1747, 1748, 1749, 1750, 1751, 1753, 1754, 1755, 1756, 1757, 1765, 1766, 1769, 1806, 1809, 1817, 1828, 1965, 1977, 1983, 2013, 2015, 2016, 2022, 2025, 2027, 2029, 2036, 2041, 2046, 2048, 2049, 2051, 2052, 2057, 2062, 2065, 2067, 2068, 2069, 2072, 2073, 2074, 2075, 2077, 2095, 2101, 2111, 2113], "layer": [0, 8, 24, 28, 30, 32, 33, 34, 55, 58, 737, 738, 763, 845, 862, 932, 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1465, 1466, 1467, 1471, 1478, 1479, 1481, 1489, 1490, 1491, 1497, 1498, 1499, 1512, 1514, 1520, 1521, 1522, 1533, 1534, 1542, 1543, 1545, 1556, 1567, 1571, 1572, 1573, 1574, 1575, 1647, 1682, 1716, 1717, 1724, 1725, 1731, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 2013, 2041, 2046, 2049, 2051, 2052, 2056, 2057, 2062, 2065, 2067, 2069, 2072, 2073, 2075, 2082, 2093], "convolut": [0, 1, 2, 52, 741, 742, 743, 744, 745, 746, 775, 776, 777, 976, 1283, 1454, 1455, 1456, 1457, 1458, 1459, 1465, 1466, 1467, 1471, 1503, 1504, 1505, 1506, 1507, 1508, 1539, 1540, 1546, 1576, 1577, 1579, 1608, 1609, 1610, 1611, 1612, 1613, 1632, 1633, 1724, 1725, 1726, 1727, 1871, 2013, 2015, 2027, 2041, 2045, 2046, 2054, 2057, 2068, 2071, 2072, 2075, 2108], "ar": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 29, 30, 32, 33, 34, 35, 37, 39, 40, 41, 44, 45, 47, 48, 50, 52, 53, 55, 57, 58, 59, 60, 62, 63, 64, 65, 66, 68, 69, 71, 72, 75, 83, 86, 87, 88, 89, 90, 99, 152, 235, 256, 321, 323, 337, 338, 341, 379, 404, 450, 473, 483, 488, 501, 515, 519, 546, 582, 591, 619, 683, 692, 693, 694, 700, 738, 759, 763, 764, 766, 767, 768, 782, 785, 786, 788, 789, 790, 795, 796, 797, 798, 799, 802, 817, 818, 819, 820, 822, 823, 824, 825, 828, 843, 858, 862, 863, 864, 865, 866, 869, 878, 879, 890, 891, 892, 894, 897, 904, 907, 909, 910, 912, 913, 914, 915, 916, 917, 919, 923, 924, 930, 932, 943, 945, 954, 955, 957, 958, 960, 963, 976, 978, 983, 987, 990, 993, 994, 997, 998, 1011, 1012, 1014, 1015, 1017, 1022, 1034, 1045, 1051, 1054, 1055, 1065, 1069, 1076, 1083, 1092, 1097, 1101, 1104, 1109, 1110, 1111, 1112, 1125, 1126, 1127, 1128, 1130, 1132, 1137, 1139, 1145, 1148, 1150, 1151, 1155, 1156, 1157, 1161, 1166, 1167, 1172, 1174, 1176, 1177, 1178, 1181, 1184, 1186, 1187, 1188, 1192, 1194, 1197, 1198, 1201, 1210, 1217, 1227, 1231, 1232, 1234, 1236, 1250, 1262, 1263, 1265, 1266, 1269, 1270, 1271, 1273, 1274, 1277, 1280, 1281, 1284, 1287, 1289, 1290, 1295, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1326, 1328, 1329, 1330, 1331, 1332, 1334, 1335, 1336, 1337, 1338, 1340, 1343, 1344, 1345, 1346, 1355, 1356, 1357, 1358, 1360, 1361, 1363, 1365, 1366, 1368, 1371, 1374, 1375, 1376, 1379, 1381, 1386, 1400, 1413, 1417, 1418, 1419, 1420, 1421, 1431, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1448, 1449, 1450, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1465, 1466, 1467, 1470, 1471, 1473, 1478, 1479, 1480, 1481, 1486, 1489, 1490, 1491, 1492, 1493, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1541, 1543, 1545, 1556, 1559, 1560, 1561, 1567, 1571, 1573, 1575, 1576, 1577, 1579, 1580, 1586, 1588, 1589, 1590, 1598, 1604, 1605, 1606, 1616, 1617, 1624, 1625, 1626, 1627, 1633, 1644, 1645, 1669, 1672, 1674, 1677, 1685, 1703, 1704, 1705, 1706, 1707, 1717, 1718, 1719, 1720, 1722, 1723, 1724, 1725, 1730, 1731, 1732, 1735, 1737, 1744, 1758, 1762, 1765, 1766, 1767, 1768, 1769, 1770, 1772, 1773, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1805, 1808, 1809, 1821, 1827, 1828, 1834, 1843, 1846, 1847, 1854, 1856, 1867, 1868, 1871, 1900, 1901, 1903, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1920, 1923, 1924, 1927, 1928, 1938, 1940, 1944, 1946, 1947, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1961, 1965, 1966, 1968, 1971, 1972, 1973, 1977, 1982, 1983, 1984, 2000, 2002, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2023, 2024, 2025, 2027, 2029, 2030, 2032, 2033, 2034, 2035, 2036, 2037, 2040, 2041, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2066, 2067, 2068, 2069, 2071, 2072, 2073, 2075, 2076, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2092, 2094, 2095, 2097, 2098, 2099, 2100, 2102, 2103, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2118], "much": [0, 4, 7, 9, 15, 23, 24, 52, 152, 897, 913, 918, 1065, 1187, 1309, 1310, 1337, 1346, 1470, 1717, 1748, 1809, 2014, 2043, 2046, 2052, 2054, 2057, 2062, 2070, 2077, 2078, 2082, 2100, 2101, 2104, 2117], "faster": [0, 2, 8, 11, 23, 24, 28, 913, 923, 924, 978, 1166, 1203, 1303, 1304, 1309, 1310, 1314, 1319, 1327, 1329, 1331, 1334, 1337, 1339, 1343, 1346, 1544, 1561, 1674, 1685, 1691, 1717, 1722, 1723, 1731, 1784, 1785, 1796, 1797, 1871, 1910, 1911, 1912, 1914, 1915, 2025, 2027, 2043, 2046, 2054, 2069, 2072, 2082, 2087, 2094, 2102, 2104, 2109], "reduct": [0, 2, 11, 28, 34, 55, 323, 515, 519, 976, 1319, 1351, 1420, 1439, 1440, 1446, 1460, 1462, 1470, 1480, 1486, 1487, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1577, 1605, 1606, 1614, 1616, 1617, 1625, 1630, 1641, 1642, 1645, 1646, 1657, 1665, 1666, 1667, 1668, 1669, 1677, 1689, 1690, 1701, 1702, 1717, 1828, 1905, 2015, 2034, 2048, 2054, 2070, 2072, 2107, 2113], "often": [0, 2, 4, 7, 8, 14, 23, 28, 33, 35, 48, 55, 58, 64, 152, 897, 918, 1192, 1201, 1289, 1320, 1321, 1330, 1379, 1469, 1489, 1490, 1491, 1598, 1606, 1624, 1633, 1707, 1731, 1811, 2017, 2025, 2043, 2046, 2051, 2056, 2057, 2059, 2060, 2061, 2065, 2070, 2077, 2087, 2101, 2103, 2104, 2111, 2113], "requir": [0, 1, 5, 8, 9, 11, 14, 15, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 46, 47, 52, 53, 55, 58, 60, 63, 64, 121, 152, 223, 337, 460, 488, 490, 498, 515, 517, 519, 562, 796, 798, 826, 829, 884, 893, 897, 904, 906, 909, 910, 912, 913, 914, 915, 916, 917, 918, 976, 978, 1009, 1064, 1109, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1150, 1151, 1174, 1178, 1214, 1270, 1287, 1313, 1344, 1346, 1360, 1462, 1469, 1470, 1571, 1572, 1573, 1574, 1575, 1577, 1625, 1685, 1707, 1717, 1718, 1735, 1737, 1739, 1787, 1815, 1912, 1924, 1965, 1977, 2012, 2014, 2017, 2019, 2021, 2024, 2027, 2033, 2035, 2036, 2042, 2043, 2045, 2046, 2048, 2049, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2059, 2061, 2062, 2065, 2066, 2067, 2070, 2072, 2073, 2077, 2078, 2079, 2082, 2085, 2087, 2089, 2093, 2094, 2095, 2098, 2099, 2101, 2102, 2103, 2104, 2109, 2111, 2114], "dynam": [0, 12, 14, 15, 23, 53, 65, 66, 67, 68, 71, 72, 74, 77, 78, 683, 732, 736, 763, 764, 765, 766, 767, 768, 797, 802, 818, 827, 829, 832, 848, 854, 855, 856, 857, 862, 863, 976, 1181, 1182, 1184, 1187, 1194, 1281, 1287, 1289, 1345, 1731, 2016, 2017, 2018, 2041, 2046, 2057, 2060, 2064, 2065, 2067, 2069, 2070, 2073, 2094, 2095, 2101, 2102, 2104, 2110, 2113], "rang": [0, 1, 3, 11, 23, 28, 29, 32, 33, 35, 37, 47, 48, 51, 52, 53, 59, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 90, 303, 483, 687, 764, 766, 768, 798, 822, 823, 824, 825, 826, 828, 829, 830, 889, 946, 971, 981, 1070, 1071, 1079, 1109, 1159, 1176, 1186, 1192, 1198, 1234, 1235, 1236, 1340, 1351, 1366, 1375, 1393, 1420, 1431, 1462, 1472, 1474, 1475, 1479, 1485, 1498, 1517, 1529, 1534, 1538, 1543, 1545, 1561, 1562, 1563, 1567, 1628, 1629, 1633, 1691, 1717, 1723, 1799, 1800, 1802, 1803, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1828, 1907, 1963, 1964, 2014, 2015, 2017, 2043, 2045, 2046, 2051, 2053, 2057, 2059, 2060, 2067, 2068, 2069, 2071, 2072, 2075, 2076, 2083, 2085, 2087, 2088, 2089, 2091, 2099, 2100, 2102, 2111, 2113], "tri": [0, 2, 3, 7, 23, 35, 40, 64, 89, 582, 1273, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2016, 2017, 2046, 2051, 2067, 2070, 2101, 2103, 2104, 2106], "match": [0, 1, 3, 12, 28, 30, 32, 33, 35, 47, 52, 55, 64, 66, 74, 75, 86, 152, 315, 317, 323, 475, 476, 501, 502, 582, 585, 696, 702, 782, 796, 797, 798, 858, 882, 897, 918, 923, 924, 990, 1008, 1011, 1023, 1024, 1025, 1054, 1101, 1109, 1166, 1178, 1188, 1250, 1273, 1287, 1289, 1306, 1345, 1367, 1461, 1470, 1527, 1533, 1580, 1605, 1606, 1644, 1671, 1678, 1685, 1704, 1717, 1737, 1767, 1773, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1829, 1863, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1945, 1961, 1977, 1982, 2014, 2017, 2021, 2024, 2034, 2036, 2043, 2044, 2046, 2049, 2053, 2057, 2062, 2065, 2067, 2069, 2070, 2072, 2073, 2074, 2077, 2085, 2089, 2092, 2101, 2104, 2112, 2113], "each": [0, 1, 2, 5, 9, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 45, 48, 50, 51, 52, 53, 55, 56, 58, 60, 61, 62, 64, 82, 83, 85, 88, 99, 121, 155, 156, 400, 404, 495, 515, 517, 519, 547, 609, 619, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 686, 689, 696, 698, 699, 702, 738, 763, 795, 843, 845, 863, 865, 866, 869, 881, 890, 891, 892, 893, 895, 896, 897, 904, 906, 909, 910, 918, 944, 947, 956, 960, 964, 965, 967, 970, 974, 976, 978, 981, 998, 1020, 1021, 1025, 1054, 1057, 1059, 1065, 1067, 1072, 1075, 1081, 1086, 1087, 1088, 1089, 1092, 1093, 1104, 1107, 1109, 1126, 1128, 1129, 1130, 1131, 1132, 1134, 1135, 1138, 1139, 1140, 1141, 1142, 1144, 1146, 1150, 1151, 1153, 1158, 1163, 1168, 1169, 1173, 1177, 1178, 1186, 1188, 1214, 1227, 1233, 1235, 1236, 1237, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1273, 1289, 1290, 1295, 1327, 1345, 1346, 1361, 1363, 1371, 1373, 1374, 1375, 1376, 1379, 1381, 1389, 1390, 1413, 1419, 1421, 1431, 1436, 1439, 1440, 1444, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1478, 1479, 1481, 1486, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1499, 1507, 1508, 1512, 1514, 1518, 1519, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1541, 1543, 1545, 1556, 1559, 1560, 1562, 1567, 1570, 1576, 1577, 1579, 1588, 1603, 1605, 1606, 1611, 1612, 1613, 1616, 1617, 1619, 1620, 1621, 1624, 1625, 1626, 1630, 1633, 1643, 1645, 1669, 1670, 1672, 1674, 1677, 1685, 1698, 1707, 1717, 1724, 1725, 1758, 1759, 1761, 1771, 1772, 1773, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 1820, 1824, 1825, 1828, 1835, 1843, 1848, 1849, 1854, 1858, 1863, 1871, 1875, 1886, 1892, 1909, 1910, 1911, 1912, 1914, 1915, 1916, 1927, 1928, 1944, 1946, 1947, 1950, 1961, 1962, 1963, 1971, 1977, 1978, 2006, 2012, 2016, 2017, 2021, 2023, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2056, 2057, 2059, 2060, 2061, 2062, 2065, 2067, 2069, 2071, 2072, 2077, 2078, 2079, 2082, 2083, 2085, 2087, 2088, 2089, 2092, 2093, 2094, 2097, 2098, 2100, 2102, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2115], "its": [0, 1, 4, 5, 7, 8, 9, 12, 14, 15, 18, 19, 23, 24, 26, 28, 29, 30, 32, 33, 34, 35, 37, 39, 47, 48, 50, 53, 55, 62, 63, 64, 66, 68, 83, 84, 85, 152, 260, 460, 489, 490, 506, 515, 517, 519, 585, 586, 587, 619, 683, 826, 829, 878, 879, 880, 882, 883, 884, 897, 902, 903, 920, 921, 928, 929, 968, 969, 976, 986, 987, 994, 998, 1015, 1043, 1045, 1054, 1069, 1092, 1096, 1099, 1109, 1112, 1157, 1163, 1167, 1168, 1169, 1172, 1177, 1188, 1235, 1236, 1240, 1253, 1273, 1284, 1309, 1310, 1314, 1319, 1326, 1328, 1331, 1332, 1337, 1343, 1368, 1378, 1396, 1400, 1439, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1489, 1490, 1491, 1527, 1528, 1537, 1559, 1567, 1575, 1633, 1644, 1672, 1678, 1707, 1712, 1713, 1718, 1719, 1720, 1724, 1725, 1726, 1728, 1732, 1733, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1759, 1765, 1769, 1773, 1779, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1818, 1843, 1847, 1851, 1852, 1868, 1869, 1892, 1902, 1929, 1949, 1950, 1952, 1960, 1970, 1975, 1984, 2014, 2016, 2017, 2021, 2027, 2029, 2030, 2033, 2042, 2043, 2044, 2046, 2048, 2049, 2051, 2052, 2054, 2057, 2059, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2077, 2078, 2079, 2082, 2085, 2086, 2088, 2092, 2093, 2094, 2095, 2098, 2100, 2101, 2102, 2103, 2104, 2107, 2109, 2115, 2116], "appropri": [0, 7, 8, 9, 28, 29, 30, 35, 55, 63, 64, 65, 488, 899, 930, 998, 1054, 1201, 1289, 1871, 2017, 2018, 2035, 2046, 2054, 2059, 2062, 2065, 2072, 2075, 2077, 2078, 2079, 2084, 2101, 2104, 2109, 2114], "ordinarili": [0, 1198, 2042], "train": [0, 1, 12, 15, 23, 24, 28, 29, 30, 31, 32, 33, 37, 45, 46, 47, 48, 55, 58, 59, 64, 715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 736, 802, 804, 841, 842, 859, 860, 861, 863, 865, 866, 867, 919, 981, 990, 1053, 1054, 1057, 1059, 1066, 1176, 1273, 1278, 1285, 1288, 1289, 1431, 1435, 1441, 1442, 1443, 1462, 1463, 1464, 1469, 1470, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1533, 1534, 1546, 1567, 1575, 1599, 1603, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1683, 1684, 1685, 1717, 1732, 1735, 1737, 1766, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1803, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2012, 2014, 2015, 2021, 2027, 2030, 2041, 2043, 2046, 2048, 2051, 2058, 2060, 2062, 2064, 2067, 2069, 2070, 2071, 2075, 2077, 2079, 2087, 2094, 2097, 2098, 2099, 2101, 2102, 2106, 2107, 2108, 2110, 2113], "gradscal": [0, 2042, 2046, 2053], "togeth": [0, 3, 9, 23, 28, 33, 34, 35, 56, 61, 64, 763, 1051, 1109, 1166, 1168, 1172, 1176, 1177, 1213, 1217, 1313, 1478, 1497, 1543, 1644, 1717, 1722, 1950, 2027, 2042, 2049, 2050, 2051, 2052, 2056, 2057, 2072, 2077, 2078, 2079, 2082, 2087, 2099, 2101, 2103, 2104, 2109, 2117], "shown": [0, 12, 24, 39, 48, 52, 1051, 1061, 1499, 1737, 1779, 1809, 1875, 2014, 2017, 2042, 2046, 2049, 2051, 2057, 2065, 2072, 2082, 2095, 2099, 2101, 2102, 2105], "exampl": [0, 1, 2, 3, 5, 7, 8, 9, 14, 17, 18, 19, 20, 23, 24, 28, 29, 30, 31, 32, 34, 35, 36, 37, 38, 40, 41, 44, 45, 46, 47, 48, 50, 51, 53, 55, 56, 57, 59, 60, 61, 63, 66, 68, 74, 75, 90, 193, 210, 235, 244, 256, 262, 291, 313, 315, 317, 319, 323, 337, 354, 403, 404, 417, 447, 448, 449, 450, 451, 473, 483, 485, 488, 489, 490, 495, 498, 501, 515, 517, 519, 525, 539, 546, 560, 562, 582, 583, 585, 586, 587, 588, 589, 590, 591, 609, 619, 683, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 723, 731, 732, 737, 741, 742, 743, 744, 745, 746, 748, 749, 751, 759, 760, 761, 763, 764, 765, 766, 767, 768, 775, 776, 777, 788, 795, 796, 797, 799, 800, 813, 814, 815, 816, 817, 818, 819, 820, 827, 841, 858, 863, 864, 865, 866, 869, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 899, 902, 903, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 919, 920, 926, 928, 929, 930, 932, 939, 944, 946, 947, 948, 949, 950, 951, 952, 953, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 974, 975, 976, 978, 981, 983, 984, 987, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1008, 1051, 1052, 1057, 1059, 1088, 1089, 1090, 1091, 1092, 1093, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1104, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1119, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1166, 1167, 1168, 1169, 1174, 1176, 1178, 1186, 1187, 1188, 1198, 1201, 1214, 1215, 1216, 1220, 1227, 1230, 1233, 1234, 1235, 1236, 1237, 1239, 1240, 1244, 1248, 1250, 1258, 1260, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1273, 1274, 1276, 1277, 1278, 1279, 1280, 1281, 1283, 1284, 1285, 1287, 1288, 1289, 1290, 1291, 1292, 1294, 1295, 1296, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1347, 1348, 1349, 1350, 1351, 1353, 1354, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1363, 1364, 1365, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1396, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1587, 1591, 1598, 1600, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1633, 1635, 1669, 1671, 1672, 1675, 1676, 1685, 1704, 1707, 1716, 1717, 1724, 1725, 1731, 1732, 1733, 1735, 1737, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1769, 1770, 1771, 1772, 1773, 1775, 1776, 1777, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1802, 1803, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1816, 1818, 1820, 1821, 1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1838, 1840, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1858, 1859, 1863, 1865, 1866, 1867, 1868, 1869, 1872, 1875, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1895, 1896, 1900, 1902, 1903, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1921, 1922, 1923, 1925, 1927, 1928, 1930, 1931, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1966, 1968, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 2010, 2011, 2012, 2013, 2014, 2016, 2017, 2018, 2021, 2023, 2024, 2028, 2029, 2030, 2034, 2035, 2036, 2041, 2043, 2044, 2045, 2046, 2051, 2052, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2064, 2066, 2069, 2070, 2071, 2072, 2073, 2077, 2079, 2081, 2082, 2083, 2085, 2086, 2087, 2088, 2089, 2091, 2092, 2093, 2095, 2098, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2110, 2111, 2112, 2113, 2114, 2117], "recip": [0, 3, 32, 55, 1527, 1717, 1739, 2042, 2057, 2103], "howev": [0, 2, 3, 4, 5, 7, 9, 14, 15, 23, 24, 28, 32, 35, 37, 44, 47, 52, 53, 55, 56, 57, 59, 60, 63, 64, 66, 71, 75, 83, 86, 87, 88, 260, 483, 547, 899, 912, 1011, 1033, 1054, 1097, 1099, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1166, 1167, 1171, 1172, 1174, 1177, 1178, 1187, 1198, 1273, 1276, 1277, 1284, 1286, 1313, 1328, 1329, 1331, 1345, 1346, 1363, 1439, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1470, 1527, 1608, 1609, 1610, 1633, 1707, 1717, 1758, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1910, 1911, 1912, 1913, 1914, 1915, 1977, 1982, 1990, 2013, 2017, 2036, 2042, 2043, 2046, 2049, 2055, 2057, 2059, 2061, 2062, 2063, 2064, 2067, 2070, 2077, 2079, 2082, 2088, 2098, 2100, 2101, 2102, 2103, 2104, 2105], "modular": [0, 2042, 2065], "mai": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 14, 19, 22, 23, 24, 28, 30, 32, 33, 35, 36, 37, 40, 44, 46, 47, 48, 52, 55, 56, 60, 63, 64, 65, 86, 198, 223, 256, 315, 323, 460, 488, 517, 519, 558, 605, 619, 683, 692, 818, 819, 820, 865, 896, 904, 907, 909, 910, 912, 914, 918, 923, 936, 947, 958, 970, 976, 991, 992, 994, 996, 1009, 1011, 1012, 1020, 1033, 1043, 1044, 1045, 1051, 1052, 1054, 1065, 1067, 1072, 1086, 1087, 1109, 1148, 1152, 1157, 1160, 1163, 1170, 1171, 1173, 1187, 1188, 1193, 1198, 1200, 1201, 1202, 1212, 1236, 1270, 1273, 1276, 1277, 1283, 1284, 1286, 1287, 1289, 1295, 1304, 1305, 1309, 1310, 1311, 1313, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1329, 1331, 1332, 1335, 1336, 1337, 1343, 1345, 1346, 1351, 1368, 1378, 1393, 1431, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1492, 1497, 1523, 1524, 1525, 1527, 1534, 1543, 1573, 1575, 1580, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1625, 1633, 1635, 1644, 1650, 1672, 1685, 1703, 1704, 1705, 1706, 1707, 1717, 1731, 1734, 1735, 1737, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 1827, 1847, 1850, 1871, 1877, 1895, 1908, 1924, 1928, 1929, 1952, 1965, 1982, 1990, 2012, 2013, 2014, 2016, 2017, 2021, 2023, 2024, 2025, 2027, 2030, 2035, 2042, 2043, 2044, 2045, 2046, 2049, 2050, 2051, 2057, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2077, 2079, 2082, 2084, 2091, 2095, 2098, 2100, 2101, 2103, 2104, 2107, 2111, 2113, 2114, 2115, 2117], "separ": [0, 1, 3, 9, 11, 14, 20, 23, 28, 30, 32, 47, 48, 55, 56, 58, 61, 63, 64, 738, 763, 894, 909, 1109, 1126, 1128, 1134, 1135, 1138, 1139, 1144, 1146, 1166, 1181, 1284, 1319, 1334, 1481, 1489, 1490, 1491, 1533, 1535, 1652, 1773, 1840, 2012, 2015, 2017, 2021, 2023, 2042, 2043, 2045, 2046, 2050, 2052, 2062, 2065, 2069, 2070, 2078, 2082, 2083, 2087, 2098, 2111, 2113], "desir": [0, 1, 3, 23, 28, 34, 35, 55, 64, 90, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 256, 269, 299, 327, 395, 447, 448, 449, 450, 451, 499, 501, 502, 522, 527, 546, 547, 548, 562, 582, 585, 605, 606, 619, 625, 796, 869, 883, 945, 954, 973, 1055, 1056, 1080, 1081, 1090, 1091, 1110, 1111, 1112, 1122, 1127, 1145, 1161, 1163, 1164, 1165, 1231, 1232, 1273, 1293, 1344, 1360, 1366, 1373, 1388, 1394, 1418, 1420, 1421, 1439, 1440, 1527, 1578, 1606, 1652, 1691, 1692, 1707, 1758, 1772, 1776, 1777, 1818, 1825, 1828, 1829, 1830, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1876, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1904, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1927, 1943, 1954, 1956, 2000, 2001, 2005, 2006, 2010, 2011, 2034, 2035, 2036, 2042, 2043, 2046, 2050, 2055, 2057, 2062, 2076, 2077, 2083, 2084, 2085, 2088, 2113], "As": [0, 1, 8, 20, 24, 28, 30, 35, 52, 53, 55, 58, 59, 64, 256, 488, 958, 1109, 1277, 1305, 1309, 1310, 1320, 1321, 1332, 1337, 1431, 1465, 1466, 1467, 1471, 1492, 1527, 1559, 1703, 1724, 1725, 1771, 1777, 2011, 2014, 2016, 2017, 2024, 2036, 2043, 2046, 2049, 2051, 2052, 2053, 2057, 2060, 2062, 2063, 2064, 2065, 2067, 2070, 2076, 2077, 2078, 2079, 2081, 2082, 2094, 2097, 2099, 2101, 2104, 2105, 2106, 2113], "section": [0, 1, 7, 23, 35, 37, 39, 53, 59, 64, 88, 121, 1065, 1109, 1227, 1463, 1478, 1497, 1525, 1543, 1616, 1761, 1944, 2014, 2015, 2016, 2017, 2018, 2033, 2035, 2036, 2042, 2043, 2046, 2048, 2049, 2050, 2053, 2054, 2055, 2057, 2059, 2071, 2078, 2087, 2099, 2101, 2102, 2104, 2105, 2106, 2107, 2111, 2113], "infer": [0, 1, 2, 3, 5, 11, 12, 15, 30, 33, 35, 47, 55, 582, 619, 865, 866, 869, 883, 884, 919, 981, 990, 1025, 1163, 1164, 1187, 1236, 1257, 1271, 1274, 1283, 1288, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1533, 1580, 1644, 1671, 1707, 1770, 1798, 1809, 1843, 1850, 1868, 1869, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 1960, 2013, 2014, 2016, 2017, 2021, 2034, 2036, 2046, 2057, 2067, 2071, 2072, 2073, 2075, 2082, 2094, 2097, 2098, 2100, 2101, 2104, 2106, 2110], "onli": [0, 1, 2, 3, 4, 5, 7, 8, 9, 12, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 45, 47, 48, 52, 53, 55, 59, 60, 63, 64, 84, 85, 86, 121, 256, 313, 323, 325, 337, 354, 447, 448, 449, 450, 451, 460, 483, 488, 490, 515, 517, 519, 559, 585, 586, 587, 589, 590, 616, 619, 683, 698, 699, 738, 741, 742, 743, 744, 746, 775, 776, 777, 782, 788, 789, 790, 794, 796, 804, 817, 820, 823, 824, 827, 831, 846, 853, 862, 882, 899, 901, 904, 907, 908, 909, 910, 912, 914, 923, 930, 932, 945, 946, 954, 976, 984, 990, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1009, 1011, 1012, 1014, 1021, 1023, 1025, 1043, 1051, 1052, 1054, 1076, 1106, 1110, 1112, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1155, 1156, 1160, 1161, 1162, 1167, 1178, 1186, 1188, 1200, 1201, 1226, 1227, 1231, 1232, 1236, 1244, 1249, 1254, 1262, 1271, 1273, 1276, 1277, 1285, 1289, 1290, 1293, 1304, 1309, 1310, 1312, 1313, 1314, 1315, 1317, 1319, 1320, 1321, 1322, 1332, 1334, 1335, 1337, 1338, 1345, 1346, 1363, 1368, 1431, 1457, 1458, 1459, 1462, 1463, 1469, 1470, 1473, 1497, 1499, 1527, 1530, 1532, 1533, 1535, 1544, 1567, 1575, 1579, 1580, 1591, 1616, 1625, 1627, 1633, 1644, 1672, 1685, 1703, 1704, 1707, 1710, 1711, 1712, 1713, 1717, 1719, 1720, 1724, 1725, 1738, 1758, 1759, 1760, 1772, 1780, 1783, 1787, 1798, 1801, 1805, 1808, 1809, 1811, 1815, 1827, 1836, 1840, 1842, 1844, 1867, 1871, 1873, 1876, 1877, 1902, 1905, 1909, 1919, 1924, 1928, 1935, 1943, 1954, 1956, 1961, 1962, 1965, 1966, 1968, 1974, 1975, 1976, 1977, 1982, 2002, 2012, 2015, 2016, 2017, 2021, 2023, 2024, 2025, 2027, 2028, 2030, 2032, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2075, 2076, 2077, 2078, 2079, 2082, 2083, 2085, 2086, 2087, 2088, 2089, 2094, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2111, 2112, 2113, 2115], "arg": [0, 1, 3, 4, 5, 14, 23, 24, 28, 30, 32, 33, 35, 37, 39, 45, 46, 48, 49, 50, 51, 52, 53, 55, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 582, 605, 683, 735, 750, 752, 754, 755, 756, 757, 763, 765, 766, 827, 865, 894, 896, 904, 905, 906, 909, 910, 939, 980, 1054, 1069, 1166, 1170, 1171, 1172, 1178, 1186, 1188, 1208, 1209, 1273, 1276, 1363, 1439, 1440, 1460, 1462, 1486, 1488, 1493, 1516, 1518, 1519, 1527, 1530, 1531, 1532, 1534, 1541, 1556, 1558, 1559, 1560, 1562, 1566, 1567, 1568, 1569, 1576, 1605, 1606, 1616, 1645, 1669, 1677, 1707, 1717, 1739, 1744, 1758, 1765, 1767, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1929, 1962, 1974, 1977, 2012, 2015, 2017, 2018, 2021, 2033, 2035, 2043, 2046, 2048, 2049, 2050, 2059, 2065, 2067, 2070, 2077, 2078, 2079, 2084, 2088, 2093, 2099, 2101, 2102, 2103, 2104, 2112, 2113, 2114], "deprec": [0, 7, 28, 30, 37, 47, 48, 52, 55, 59, 408, 515, 559, 605, 788, 789, 790, 796, 829, 918, 966, 967, 983, 1008, 1058, 1062, 1218, 1273, 1328, 1331, 1363, 1364, 1439, 1440, 1460, 1462, 1485, 1486, 1492, 1493, 1518, 1519, 1527, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1581, 1582, 1605, 1606, 1616, 1645, 1669, 1677, 1704, 1705, 1706, 1708, 1717, 1721, 1766, 1767, 1769, 1772, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1827, 1843, 1869, 1924, 1928, 1952, 2013, 2014, 2019, 2033, 2044, 2071, 2075, 2084, 2088, 2089, 2104], "pleas": [0, 1, 4, 5, 6, 7, 8, 9, 11, 12, 15, 24, 28, 30, 35, 39, 42, 43, 47, 48, 52, 55, 56, 57, 58, 59, 60, 61, 64, 66, 76, 77, 83, 88, 256, 257, 500, 515, 620, 692, 733, 734, 735, 736, 737, 738, 744, 745, 746, 748, 749, 759, 764, 765, 766, 767, 768, 865, 866, 896, 899, 902, 903, 909, 910, 912, 914, 915, 918, 932, 958, 978, 990, 1092, 1166, 1170, 1171, 1172, 1173, 1174, 1178, 1270, 1273, 1337, 1368, 1378, 1446, 1457, 1492, 1527, 1633, 1644, 1645, 1650, 1672, 1685, 1703, 1717, 1766, 1767, 1769, 1783, 1784, 1785, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1841, 1867, 1869, 1876, 1913, 1977, 2019, 2021, 2024, 2029, 2034, 2035, 2036, 2037, 2042, 2043, 2049, 2050, 2052, 2053, 2055, 2060, 2061, 2062, 2063, 2067, 2069, 2070, 2072, 2075, 2076, 2077, 2078, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2095, 2099, 2102, 2103, 2104, 2110, 2113, 2116], "instead": [0, 1, 3, 5, 8, 9, 14, 19, 20, 23, 24, 28, 30, 34, 35, 37, 44, 47, 48, 52, 53, 55, 58, 59, 60, 62, 64, 66, 75, 408, 460, 488, 501, 515, 751, 760, 771, 772, 796, 820, 829, 884, 894, 909, 910, 912, 913, 914, 915, 918, 923, 960, 966, 978, 1144, 1146, 1171, 1172, 1173, 1174, 1177, 1178, 1201, 1204, 1218, 1260, 1271, 1273, 1274, 1278, 1303, 1304, 1310, 1312, 1328, 1331, 1337, 1343, 1345, 1374, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1460, 1462, 1463, 1465, 1466, 1467, 1471, 1478, 1486, 1492, 1493, 1494, 1495, 1496, 1497, 1518, 1519, 1520, 1521, 1522, 1527, 1530, 1531, 1532, 1534, 1541, 1543, 1555, 1559, 1560, 1561, 1567, 1576, 1600, 1601, 1602, 1605, 1606, 1616, 1626, 1633, 1645, 1658, 1659, 1660, 1669, 1677, 1691, 1717, 1767, 1769, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1802, 1803, 1809, 1832, 1833, 1843, 1863, 1867, 1909, 1924, 1928, 1965, 1977, 2014, 2016, 2017, 2021, 2024, 2025, 2035, 2041, 2042, 2043, 2048, 2049, 2050, 2051, 2054, 2055, 2059, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2075, 2078, 2081, 2082, 2088, 2089, 2091, 2093, 2098, 2099, 2100, 2101, 2103, 2104, 2109, 2111, 2112, 2113, 2114], "new": [0, 1, 5, 8, 14, 18, 19, 23, 26, 28, 30, 35, 37, 38, 47, 48, 52, 55, 56, 59, 60, 61, 62, 63, 64, 88, 90, 223, 256, 313, 417, 450, 485, 488, 489, 498, 501, 546, 582, 585, 619, 683, 687, 763, 794, 817, 822, 858, 883, 885, 886, 887, 888, 889, 900, 902, 909, 910, 919, 928, 929, 943, 959, 960, 963, 965, 974, 978, 981, 993, 995, 996, 1012, 1014, 1093, 1097, 1119, 1123, 1124, 1150, 1151, 1153, 1167, 1176, 1178, 1186, 1188, 1196, 1210, 1212, 1244, 1248, 1262, 1263, 1266, 1269, 1273, 1280, 1285, 1287, 1347, 1348, 1349, 1350, 1367, 1422, 1425, 1441, 1442, 1443, 1478, 1489, 1490, 1491, 1527, 1528, 1533, 1537, 1567, 1578, 1580, 1644, 1709, 1714, 1715, 1717, 1726, 1727, 1728, 1729, 1732, 1737, 1739, 1743, 1744, 1745, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1762, 1766, 1769, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1811, 1835, 1845, 1846, 1850, 1851, 1852, 1858, 1859, 1863, 1868, 1869, 1878, 1880, 1891, 1893, 1895, 1917, 1918, 1921, 1939, 1941, 1942, 1958, 1960, 1964, 1975, 1976, 1977, 1983, 2012, 2013, 2014, 2016, 2017, 2024, 2025, 2027, 2033, 2034, 2035, 2036, 2037, 2043, 2046, 2047, 2050, 2053, 2055, 2056, 2057, 2058, 2059, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2075, 2079, 2082, 2083, 2084, 2086, 2087, 2088, 2089, 2093, 2098, 2100, 2101, 2102, 2104, 2112, 2113], "version": [0, 1, 2, 5, 8, 12, 14, 19, 22, 24, 28, 30, 33, 35, 48, 52, 55, 58, 59, 64, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 123, 125, 127, 129, 132, 133, 135, 143, 145, 148, 149, 151, 154, 160, 162, 164, 166, 168, 170, 179, 188, 196, 200, 203, 205, 215, 217, 233, 238, 240, 246, 249, 251, 253, 255, 259, 264, 271, 273, 275, 279, 281, 285, 287, 294, 296, 298, 306, 308, 310, 312, 314, 316, 318, 320, 358, 360, 362, 364, 366, 368, 370, 373, 375, 377, 378, 385, 387, 389, 391, 393, 397, 401, 403, 422, 425, 428, 430, 441, 443, 445, 453, 458, 468, 471, 487, 492, 494, 510, 513, 514, 515, 516, 518, 524, 529, 531, 534, 536, 538, 551, 553, 555, 558, 564, 566, 573, 577, 579, 595, 598, 600, 602, 604, 614, 624, 738, 739, 740, 752, 753, 754, 755, 756, 757, 778, 779, 780, 781, 783, 787, 788, 807, 808, 809, 842, 851, 859, 860, 862, 919, 930, 990, 1054, 1166, 1192, 1194, 1203, 1231, 1270, 1273, 1276, 1277, 1281, 1284, 1303, 1310, 1314, 1316, 1317, 1321, 1322, 1334, 1335, 1368, 1422, 1440, 1497, 1527, 1528, 1543, 1580, 1598, 1623, 1633, 1640, 1649, 1681, 1684, 1699, 1700, 1704, 1708, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1766, 1767, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1860, 1861, 1862, 1919, 1922, 1923, 1924, 1928, 1949, 1963, 1965, 1972, 1973, 2012, 2034, 2035, 2043, 2044, 2046, 2049, 2050, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2061, 2063, 2065, 2067, 2069, 2070, 2071, 2075, 2082, 2089, 2091, 2095, 2100, 2104, 2105, 2111, 2113, 2115], "1": [0, 1, 2, 3, 11, 12, 14, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 32, 34, 35, 36, 37, 39, 40, 44, 45, 47, 51, 52, 53, 55, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 138, 153, 154, 175, 193, 207, 210, 227, 229, 230, 231, 235, 244, 256, 260, 265, 288, 291, 313, 314, 315, 317, 319, 323, 354, 379, 403, 404, 447, 449, 450, 456, 473, 483, 485, 489, 490, 495, 498, 501, 515, 517, 519, 539, 540, 544, 546, 556, 557, 558, 560, 562, 563, 564, 565, 566, 583, 585, 586, 587, 589, 590, 609, 610, 617, 619, 683, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 715, 716, 717, 718, 719, 720, 721, 722, 726, 727, 728, 729, 730, 733, 734, 737, 739, 740, 741, 742, 743, 744, 745, 746, 747, 749, 754, 755, 756, 759, 760, 761, 763, 773, 775, 776, 777, 778, 781, 785, 786, 788, 799, 811, 812, 822, 823, 824, 825, 828, 830, 859, 865, 866, 869, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 894, 898, 899, 904, 905, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 919, 920, 928, 929, 932, 939, 941, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 957, 958, 959, 960, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 974, 975, 977, 978, 987, 990, 991, 992, 993, 994, 995, 996, 997, 998, 999, 1003, 1008, 1020, 1051, 1052, 1067, 1072, 1079, 1083, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1104, 1106, 1107, 1108, 1109, 1112, 1113, 1114, 1115, 1119, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1162, 1163, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1188, 1192, 1198, 1214, 1215, 1216, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1244, 1245, 1248, 1250, 1258, 1260, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1273, 1277, 1279, 1284, 1285, 1289, 1290, 1293, 1294, 1295, 1297, 1298, 1299, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1316, 1317, 1319, 1320, 1321, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1349, 1351, 1353, 1354, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1363, 1364, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1379, 1380, 1381, 1393, 1396, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1468, 1469, 1470, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1483, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1566, 1567, 1570, 1571, 1573, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1588, 1589, 1590, 1598, 1600, 1603, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1622, 1623, 1624, 1625, 1627, 1628, 1629, 1631, 1632, 1633, 1635, 1637, 1639, 1640, 1641, 1642, 1643, 1651, 1653, 1658, 1659, 1660, 1666, 1669, 1670, 1671, 1672, 1674, 1675, 1676, 1678, 1683, 1684, 1685, 1686, 1687, 1689, 1691, 1692, 1693, 1695, 1700, 1701, 1702, 1703, 1704, 1707, 1716, 1717, 1724, 1725, 1731, 1732, 1733, 1737, 1742, 1743, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1758, 1759, 1760, 1761, 1763, 1764, 1765, 1766, 1767, 1769, 1770, 1771, 1772, 1773, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1816, 1817, 1818, 1820, 1821, 1824, 1825, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1858, 1859, 1863, 1866, 1867, 1868, 1869, 1870, 1872, 1875, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1895, 1896, 1900, 1901, 1902, 1903, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1931, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1956, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 2013, 2015, 2016, 2017, 2018, 2021, 2024, 2025, 2026, 2027, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2045, 2046, 2049, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2077, 2078, 2079, 2081, 2082, 2083, 2085, 2086, 2087, 2088, 2089, 2091, 2093, 2095, 2098, 2099, 2100, 2102, 2104, 2106, 2107, 2108, 2111, 2112, 2113, 2114, 2117, 2118], "10": [0, 1, 3, 12, 22, 23, 24, 28, 29, 33, 35, 36, 47, 50, 52, 66, 71, 74, 75, 315, 323, 337, 473, 515, 562, 583, 585, 586, 587, 688, 689, 700, 737, 748, 749, 763, 764, 765, 766, 768, 944, 956, 966, 969, 970, 974, 990, 998, 1088, 1089, 1090, 1091, 1092, 1107, 1109, 1126, 1128, 1131, 1132, 1134, 1135, 1138, 1139, 1141, 1142, 1144, 1146, 1152, 1160, 1161, 1215, 1227, 1237, 1262, 1278, 1284, 1285, 1291, 1296, 1299, 1326, 1328, 1329, 1344, 1346, 1348, 1353, 1355, 1356, 1357, 1358, 1360, 1368, 1413, 1429, 1430, 1431, 1433, 1434, 1440, 1443, 1446, 1447, 1453, 1456, 1459, 1469, 1470, 1478, 1479, 1480, 1481, 1491, 1497, 1498, 1499, 1524, 1528, 1529, 1534, 1537, 1538, 1543, 1545, 1567, 1571, 1572, 1573, 1574, 1575, 1579, 1585, 1610, 1613, 1617, 1624, 1625, 1635, 1707, 1717, 1724, 1725, 1748, 1756, 1770, 1773, 1779, 1783, 1803, 1809, 1811, 1829, 1830, 1838, 1863, 1871, 1875, 1881, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1891, 1900, 1916, 1928, 1929, 1940, 1944, 1945, 1948, 1950, 1963, 1965, 1978, 2012, 2014, 2015, 2016, 2017, 2018, 2021, 2024, 2026, 2043, 2046, 2048, 2049, 2052, 2053, 2057, 2060, 2061, 2062, 2063, 2065, 2066, 2067, 2068, 2072, 2081, 2082, 2083, 2085, 2087, 2088, 2089, 2095, 2098, 2099, 2101, 2102, 2104, 2106, 2111, 2113, 2118], "autocast_mod": 0, "is_autocast_avail": 0, "device_typ": [0, 1, 28, 2015, 2021, 2042, 2053, 2076], "sourc": [0, 1, 2, 3, 4, 5, 9, 13, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 44, 45, 47, 49, 50, 52, 53, 55, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 152, 198, 211, 235, 314, 315, 323, 342, 353, 398, 403, 404, 417, 418, 419, 455, 473, 489, 490, 515, 517, 519, 522, 526, 549, 558, 559, 561, 588, 605, 608, 611, 612, 625, 627, 683, 698, 699, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 839, 840, 841, 842, 843, 844, 845, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 877, 890, 891, 892, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 928, 929, 930, 931, 932, 933, 934, 935, 936, 938, 939, 940, 942, 943, 955, 957, 958, 962, 964, 966, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1009, 1010, 1011, 1012, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1109, 1113, 1160, 1163, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1219, 1221, 1222, 1223, 1226, 1245, 1246, 1247, 1249, 1254, 1259, 1260, 1261, 1271, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1290, 1291, 1292, 1345, 1346, 1366, 1375, 1380, 1381, 1382, 1383, 1384, 1385, 1386, 1387, 1388, 1389, 1390, 1391, 1392, 1393, 1394, 1395, 1397, 1400, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409, 1410, 1411, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1587, 1588, 1589, 1590, 1591, 1593, 1594, 1598, 1599, 1603, 1605, 1606, 1607, 1614, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1624, 1625, 1626, 1627, 1630, 1632, 1633, 1634, 1635, 1637, 1638, 1639, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1651, 1652, 1654, 1655, 1656, 1657, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1672, 1677, 1679, 1680, 1682, 1683, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1695, 1696, 1697, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1770, 1772, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1817, 1834, 1859, 1864, 1867, 1868, 1869, 1870, 1871, 1875, 1876, 1877, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1902, 1903, 1909, 1916, 1924, 1929, 1932, 1933, 1934, 1935, 1936, 1937, 1945, 1963, 1965, 1966, 1967, 1968, 1969, 1970, 1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2012, 2014, 2015, 2016, 2021, 2023, 2024, 2027, 2028, 2029, 2030, 2033, 2035, 2036, 2041, 2051, 2055, 2056, 2060, 2062, 2065, 2067, 2069, 2071, 2072, 2076, 2077, 2078, 2084, 2087, 2089, 2091, 2092, 2093, 2101, 2102, 2104, 2105, 2113, 2114, 2115], "return": [0, 1, 2, 3, 5, 11, 12, 13, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 45, 47, 50, 52, 53, 55, 56, 57, 59, 60, 61, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 90, 121, 155, 157, 172, 174, 177, 180, 181, 182, 192, 193, 197, 198, 208, 210, 211, 218, 220, 221, 223, 234, 235, 242, 244, 256, 262, 269, 291, 299, 313, 321, 325, 327, 328, 330, 331, 332, 333, 335, 339, 341, 343, 354, 379, 395, 417, 437, 447, 448, 449, 450, 451, 460, 474, 475, 476, 477, 478, 480, 485, 489, 490, 498, 499, 500, 515, 517, 525, 527, 539, 545, 546, 558, 559, 560, 561, 562, 582, 583, 584, 585, 591, 605, 606, 609, 611, 615, 616, 619, 625, 687, 695, 696, 698, 699, 700, 701, 702, 738, 769, 770, 773, 774, 778, 779, 780, 781, 784, 787, 794, 795, 796, 797, 817, 818, 819, 820, 839, 843, 844, 845, 858, 859, 860, 861, 863, 864, 865, 866, 867, 868, 869, 877, 878, 879, 880, 881, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 898, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 925, 926, 928, 929, 932, 936, 937, 938, 940, 941, 942, 943, 945, 946, 947, 954, 955, 957, 960, 962, 964, 965, 966, 967, 969, 970, 971, 973, 975, 976, 977, 978, 979, 981, 983, 984, 985, 990, 991, 992, 994, 995, 996, 998, 1001, 1002, 1003, 1004, 1006, 1008, 1009, 1011, 1012, 1014, 1018, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1031, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1046, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1070, 1071, 1072, 1073, 1074, 1075, 1084, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1096, 1097, 1098, 1099, 1100, 1103, 1109, 1110, 1111, 1112, 1113, 1114, 1119, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1145, 1148, 1149, 1150, 1151, 1152, 1153, 1157, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181, 1183, 1185, 1186, 1188, 1195, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1212, 1216, 1217, 1219, 1221, 1222, 1223, 1224, 1225, 1226, 1230, 1231, 1232, 1234, 1235, 1236, 1240, 1244, 1248, 1249, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1269, 1270, 1271, 1273, 1274, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1288, 1289, 1290, 1291, 1292, 1293, 1295, 1298, 1299, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1323, 1326, 1327, 1328, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1340, 1341, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1353, 1354, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1379, 1382, 1383, 1384, 1386, 1387, 1393, 1401, 1402, 1403, 1405, 1407, 1408, 1410, 1413, 1418, 1419, 1421, 1422, 1423, 1424, 1425, 1427, 1431, 1432, 1433, 1434, 1439, 1440, 1460, 1461, 1462, 1463, 1470, 1474, 1475, 1486, 1492, 1493, 1517, 1518, 1519, 1520, 1521, 1522, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1537, 1538, 1541, 1542, 1543, 1556, 1559, 1560, 1561, 1562, 1563, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1589, 1590, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1603, 1605, 1606, 1607, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1632, 1633, 1634, 1635, 1637, 1638, 1639, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1651, 1652, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1677, 1679, 1680, 1682, 1683, 1685, 1686, 1688, 1689, 1690, 1691, 1692, 1698, 1700, 1701, 1702, 1703, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1721, 1722, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1750, 1751, 1752, 1753, 1754, 1756, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1769, 1770, 1771, 1772, 1773, 1775, 1776, 1777, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1791, 1792, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1817, 1818, 1820, 1823, 1824, 1825, 1826, 1827, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1848, 1849, 1850, 1851, 1852, 1853, 1856, 1858, 1863, 1864, 1865, 1866, 1872, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1895, 1896, 1900, 1902, 1903, 1904, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1923, 1924, 1927, 1928, 1929, 1938, 1939, 1940, 1941, 1942, 1943, 1945, 1947, 1948, 1949, 1952, 1953, 1954, 1955, 1956, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1967, 1968, 1971, 1973, 1975, 1976, 1977, 1980, 1982, 1983, 1985, 1986, 1988, 1991, 1992, 1993, 1994, 1995, 1997, 1998, 1999, 2008, 2010, 2011, 2012, 2014, 2018, 2021, 2027, 2028, 2029, 2030, 2033, 2035, 2036, 2041, 2042, 2043, 2045, 2046, 2049, 2050, 2054, 2055, 2056, 2057, 2060, 2061, 2062, 2063, 2065, 2066, 2067, 2069, 2070, 2071, 2072, 2076, 2077, 2078, 2081, 2082, 2083, 2084, 2085, 2086, 2088, 2089, 2091, 2092, 2093, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2106, 2111, 2112, 2113, 2114, 2115, 2118], "bool": [0, 1, 2, 3, 5, 12, 14, 19, 23, 24, 27, 28, 29, 30, 32, 34, 35, 37, 41, 47, 50, 52, 53, 55, 62, 63, 64, 152, 198, 211, 247, 262, 321, 323, 330, 331, 332, 333, 335, 336, 341, 343, 403, 404, 417, 447, 448, 449, 450, 451, 460, 473, 498, 519, 583, 605, 625, 683, 696, 697, 698, 699, 700, 702, 738, 759, 761, 782, 783, 788, 797, 817, 844, 869, 877, 878, 879, 880, 884, 897, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 932, 945, 948, 950, 951, 953, 954, 960, 961, 967, 968, 969, 975, 976, 977, 978, 983, 984, 990, 1004, 1011, 1012, 1014, 1018, 1048, 1054, 1066, 1110, 1111, 1112, 1115, 1122, 1127, 1145, 1161, 1162, 1163, 1164, 1165, 1166, 1168, 1169, 1171, 1172, 1173, 1177, 1180, 1181, 1186, 1188, 1202, 1203, 1205, 1206, 1212, 1214, 1231, 1232, 1235, 1236, 1254, 1258, 1261, 1262, 1264, 1270, 1273, 1277, 1280, 1281, 1289, 1290, 1293, 1295, 1303, 1304, 1315, 1316, 1317, 1318, 1320, 1321, 1322, 1323, 1326, 1328, 1330, 1331, 1334, 1335, 1336, 1337, 1343, 1344, 1345, 1346, 1355, 1356, 1357, 1358, 1360, 1361, 1363, 1365, 1371, 1373, 1374, 1376, 1379, 1386, 1389, 1390, 1407, 1413, 1418, 1419, 1420, 1421, 1422, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1474, 1475, 1479, 1480, 1481, 1483, 1484, 1485, 1486, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1514, 1518, 1519, 1520, 1521, 1522, 1527, 1530, 1531, 1532, 1533, 1534, 1536, 1541, 1542, 1545, 1546, 1547, 1548, 1555, 1559, 1560, 1567, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1580, 1588, 1589, 1590, 1598, 1605, 1606, 1616, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1630, 1633, 1635, 1637, 1644, 1645, 1669, 1677, 1685, 1704, 1710, 1716, 1717, 1718, 1722, 1723, 1726, 1727, 1729, 1731, 1734, 1736, 1737, 1738, 1759, 1760, 1761, 1762, 1764, 1767, 1772, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1817, 1823, 1825, 1827, 1828, 1832, 1833, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1863, 1868, 1872, 1877, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1900, 1910, 1911, 1912, 1913, 1914, 1915, 1922, 1923, 1924, 1927, 1928, 1937, 1943, 1947, 1952, 1961, 1962, 1965, 1966, 1968, 1971, 1972, 1973, 1982, 1983, 1998, 2010, 2011, 2012, 2014, 2015, 2016, 2017, 2021, 2028, 2030, 2033, 2034, 2035, 2036, 2055, 2062, 2065, 2066, 2067, 2068, 2070, 2071, 2076, 2077, 2082, 2084, 2085, 2087, 2088, 2089, 2091, 2093, 2100, 2104, 2108, 2112, 2114], "indic": [0, 1, 2, 3, 12, 19, 23, 24, 28, 29, 30, 34, 35, 47, 52, 53, 82, 85, 86, 193, 210, 315, 317, 319, 320, 321, 323, 473, 515, 517, 519, 546, 574, 575, 585, 616, 698, 699, 738, 748, 749, 802, 827, 878, 879, 880, 881, 882, 904, 906, 909, 923, 924, 960, 967, 968, 969, 981, 982, 983, 984, 990, 1004, 1009, 1011, 1012, 1014, 1048, 1054, 1088, 1089, 1112, 1168, 1169, 1171, 1172, 1173, 1177, 1178, 1188, 1214, 1227, 1248, 1271, 1273, 1278, 1287, 1290, 1291, 1295, 1304, 1315, 1317, 1328, 1331, 1345, 1353, 1361, 1363, 1365, 1371, 1374, 1376, 1379, 1386, 1413, 1419, 1423, 1431, 1432, 1433, 1434, 1462, 1469, 1470, 1474, 1475, 1521, 1522, 1523, 1524, 1525, 1530, 1532, 1533, 1595, 1596, 1597, 1616, 1624, 1625, 1628, 1629, 1645, 1661, 1662, 1663, 1671, 1685, 1700, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1751, 1752, 1771, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1807, 1809, 1828, 1863, 1900, 1905, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1939, 1940, 1944, 1947, 1953, 1954, 1955, 1956, 1961, 1962, 1963, 1965, 1966, 1968, 1977, 1980, 1982, 1983, 1998, 2014, 2015, 2017, 2021, 2034, 2035, 2049, 2059, 2062, 2067, 2068, 2077, 2082, 2086, 2087, 2089, 2105, 2108, 2113], "i": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 21, 23, 24, 27, 28, 29, 30, 32, 34, 35, 36, 37, 38, 39, 40, 41, 44, 45, 46, 47, 48, 50, 51, 52, 55, 56, 57, 58, 59, 60, 62, 63, 64, 65, 66, 67, 68, 71, 73, 74, 75, 76, 77, 80, 82, 83, 84, 85, 86, 87, 88, 89, 90, 99, 152, 155, 156, 157, 172, 174, 175, 177, 180, 181, 182, 192, 193, 197, 198, 208, 210, 211, 220, 225, 235, 242, 256, 257, 260, 269, 288, 292, 299, 313, 315, 317, 321, 323, 325, 327, 330, 331, 332, 333, 334, 335, 337, 338, 340, 342, 343, 344, 345, 354, 395, 402, 404, 408, 417, 450, 460, 473, 474, 483, 485, 488, 489, 490, 495, 497, 498, 499, 500, 501, 502, 505, 506, 515, 517, 519, 522, 526, 527, 539, 545, 546, 547, 559, 562, 582, 583, 585, 586, 587, 589, 590, 591, 605, 606, 609, 616, 619, 620, 622, 625, 627, 683, 684, 686, 687, 689, 690, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 726, 727, 728, 729, 730, 737, 738, 739, 740, 741, 742, 743, 744, 746, 747, 752, 753, 754, 755, 756, 757, 758, 759, 761, 762, 763, 764, 766, 767, 768, 771, 772, 775, 776, 777, 778, 779, 780, 781, 782, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 800, 802, 804, 805, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 822, 823, 824, 827, 829, 830, 836, 841, 842, 843, 861, 862, 863, 865, 866, 869, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 928, 929, 930, 931, 932, 936, 937, 939, 941, 943, 944, 945, 946, 947, 949, 952, 954, 956, 957, 960, 961, 962, 964, 965, 966, 967, 968, 969, 970, 971, 973, 974, 975, 976, 978, 979, 981, 983, 984, 986, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1004, 1008, 1009, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1018, 1020, 1021, 1023, 1025, 1028, 1029, 1030, 1032, 1036, 1037, 1040, 1043, 1044, 1045, 1047, 1048, 1049, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1096, 1097, 1098, 1099, 1100, 1101, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1119, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1148, 1149, 1150, 1151, 1152, 1153, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1176, 1177, 1178, 1181, 1182, 1184, 1186, 1187, 1188, 1190, 1192, 1194, 1195, 1197, 1198, 1200, 1201, 1202, 1204, 1205, 1206, 1208, 1209, 1212, 1214, 1216, 1217, 1218, 1220, 1222, 1226, 1227, 1230, 1231, 1232, 1233, 1235, 1236, 1237, 1238, 1239, 1240, 1244, 1248, 1249, 1250, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1273, 1274, 1276, 1277, 1278, 1279, 1281, 1282, 1283, 1284, 1285, 1286, 1287, 1289, 1290, 1291, 1293, 1294, 1295, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1353, 1354, 1356, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1387, 1394, 1396, 1400, 1402, 1403, 1404, 1407, 1409, 1410, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1549, 1550, 1551, 1552, 1553, 1554, 1556, 1557, 1559, 1560, 1561, 1563, 1564, 1567, 1568, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, 1587, 1588, 1589, 1590, 1591, 1598, 1601, 1602, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1630, 1631, 1632, 1633, 1635, 1642, 1644, 1645, 1650, 1651, 1652, 1654, 1655, 1656, 1658, 1659, 1660, 1669, 1670, 1672, 1674, 1675, 1676, 1677, 1678, 1685, 1688, 1691, 1692, 1700, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1722, 1724, 1725, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1758, 1759, 1760, 1761, 1762, 1765, 1766, 1767, 1769, 1770, 1771, 1772, 1773, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1816, 1817, 1820, 1821, 1823, 1824, 1825, 1826, 1827, 1828, 1834, 1836, 1837, 1838, 1840, 1841, 1843, 1844, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1854, 1855, 1856, 1858, 1859, 1863, 1865, 1867, 1868, 1869, 1870, 1871, 1872, 1873, 1874, 1875, 1877, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1893, 1895, 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1919, 1920, 1922, 1923, 1924, 1927, 1928, 1929, 1930, 1931, 1935, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1960, 1961, 1962, 1963, 1965, 1968, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1982, 1983, 1984, 1986, 1987, 1989, 1991, 1992, 1994, 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2023, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2041, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2066, 2067, 2069, 2071, 2072, 2073, 2074, 2075, 2076, 2077, 2078, 2079, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2092, 2093, 2094, 2095, 2097, 2098, 2099, 2100, 2101, 2105, 2106, 2107, 2108, 2109, 2111, 2112, 2113, 2114, 2115, 2116, 2117, 2118, 2120], "avail": [0, 1, 2, 5, 8, 9, 14, 15, 19, 20, 23, 28, 30, 33, 37, 47, 48, 53, 84, 85, 1004, 1031, 1033, 1034, 1045, 1048, 1055, 1056, 1076, 1077, 1079, 1109, 1204, 1280, 1337, 1338, 1363, 1383, 1405, 1407, 1571, 1580, 1586, 1644, 1685, 1704, 1817, 1871, 1924, 1929, 1965, 1968, 1988, 1990, 1998, 2000, 2001, 2002, 2003, 2012, 2013, 2014, 2016, 2017, 2021, 2024, 2033, 2043, 2046, 2049, 2055, 2057, 2058, 2059, 2061, 2062, 2063, 2064, 2065, 2069, 2070, 2071, 2072, 2073, 2077, 2093, 2094, 2102, 2106, 2113, 2114], "paramet": [0, 1, 2, 3, 5, 11, 12, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 41, 45, 47, 50, 52, 53, 55, 57, 59, 62, 63, 64, 66, 90, 152, 157, 172, 174, 175, 177, 180, 181, 182, 197, 198, 208, 211, 235, 242, 256, 257, 262, 269, 299, 315, 317, 319, 321, 323, 327, 333, 395, 402, 404, 417, 447, 448, 449, 450, 451, 460, 473, 495, 498, 499, 500, 501, 502, 515, 517, 519, 522, 527, 539, 546, 547, 548, 562, 568, 585, 586, 587, 589, 590, 605, 606, 609, 619, 620, 625, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 697, 698, 699, 700, 701, 702, 738, 741, 742, 743, 744, 745, 746, 747, 748, 749, 753, 758, 759, 761, 762, 763, 767, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 780, 782, 783, 784, 785, 786, 788, 789, 790, 791, 792, 794, 796, 798, 800, 802, 803, 804, 817, 822, 823, 824, 825, 826, 827, 828, 829, 830, 841, 842, 843, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 869, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 897, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 933, 935, 936, 939, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 973, 974, 975, 976, 978, 979, 982, 985, 987, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1002, 1007, 1008, 1009, 1011, 1012, 1014, 1015, 1016, 1017, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1028, 1029, 1030, 1032, 1036, 1037, 1038, 1040, 1043, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1068, 1069, 1071, 1072, 1073, 1074, 1075, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1104, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1114, 1119, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1159, 1160, 1161, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1176, 1177, 1178, 1186, 1188, 1195, 1197, 1206, 1212, 1214, 1215, 1216, 1217, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1244, 1248, 1250, 1252, 1253, 1255, 1258, 1259, 1260, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1272, 1273, 1274, 1275, 1276, 1277, 1280, 1281, 1284, 1285, 1286, 1287, 1289, 1290, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1381, 1386, 1387, 1388, 1389, 1390, 1393, 1394, 1396, 1400, 1402, 1403, 1404, 1409, 1410, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1559, 1560, 1561, 1563, 1564, 1565, 1567, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1589, 1590, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1600, 1601, 1602, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1630, 1632, 1633, 1635, 1637, 1644, 1645, 1652, 1658, 1659, 1660, 1669, 1670, 1671, 1672, 1674, 1675, 1676, 1677, 1678, 1685, 1691, 1692, 1700, 1704, 1705, 1706, 1707, 1710, 1715, 1716, 1717, 1719, 1720, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1770, 1771, 1772, 1773, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1816, 1817, 1818, 1820, 1821, 1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1858, 1859, 1863, 1865, 1866, 1867, 1868, 1869, 1870, 1871, 1872, 1875, 1876, 1877, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1895, 1896, 1898, 1900, 1901, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1927, 1928, 1932, 1933, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1966, 1967, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1982, 1983, 1984, 1986, 1987, 1989, 1991, 1992, 1993, 1994, 2000, 2001, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2017, 2019, 2021, 2025, 2027, 2028, 2033, 2035, 2036, 2037, 2041, 2042, 2043, 2046, 2048, 2049, 2052, 2053, 2055, 2056, 2057, 2059, 2061, 2062, 2065, 2067, 2070, 2071, 2072, 2076, 2077, 2078, 2082, 2083, 2084, 2087, 2089, 2092, 2093, 2095, 2104, 2112, 2114, 2115], "str": [0, 1, 2, 3, 5, 14, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 41, 45, 47, 50, 52, 53, 55, 64, 323, 515, 519, 605, 782, 788, 795, 796, 797, 818, 819, 820, 858, 912, 914, 926, 933, 935, 939, 964, 976, 985, 1001, 1009, 1021, 1022, 1023, 1025, 1034, 1035, 1036, 1037, 1038, 1039, 1043, 1051, 1052, 1053, 1065, 1066, 1068, 1069, 1071, 1083, 1104, 1109, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1161, 1166, 1167, 1171, 1176, 1178, 1179, 1185, 1186, 1188, 1223, 1271, 1272, 1273, 1274, 1276, 1277, 1280, 1283, 1319, 1332, 1337, 1338, 1345, 1346, 1375, 1389, 1390, 1420, 1439, 1440, 1446, 1454, 1455, 1456, 1460, 1462, 1470, 1476, 1480, 1486, 1487, 1492, 1493, 1503, 1504, 1505, 1518, 1519, 1527, 1528, 1530, 1531, 1532, 1534, 1537, 1541, 1542, 1545, 1556, 1559, 1560, 1571, 1573, 1575, 1576, 1577, 1578, 1580, 1605, 1606, 1616, 1617, 1625, 1630, 1633, 1644, 1645, 1669, 1672, 1677, 1704, 1716, 1717, 1731, 1732, 1733, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1766, 1767, 1769, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1828, 1859, 1863, 1870, 1871, 1905, 1924, 1967, 1968, 1977, 1987, 1991, 1992, 1993, 2004, 2012, 2014, 2015, 2016, 2017, 2018, 2021, 2027, 2028, 2030, 2033, 2035, 2041, 2043, 2062, 2065, 2067, 2068, 2069, 2070, 2071, 2076, 2077, 2084, 2087, 2089, 2092, 2093, 2101, 2108, 2112, 2114, 2115], "devic": [0, 1, 2, 5, 8, 14, 16, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 34, 48, 50, 53, 55, 59, 62, 63, 64, 66, 71, 75, 76, 77, 90, 198, 208, 211, 291, 315, 323, 337, 447, 448, 449, 450, 451, 517, 519, 582, 625, 689, 692, 726, 727, 728, 729, 730, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 752, 753, 754, 755, 756, 757, 758, 869, 881, 883, 884, 912, 914, 941, 944, 945, 947, 954, 956, 978, 981, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1011, 1012, 1013, 1014, 1015, 1016, 1017, 1018, 1020, 1021, 1022, 1023, 1024, 1025, 1027, 1028, 1029, 1032, 1036, 1037, 1038, 1040, 1041, 1049, 1051, 1052, 1053, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1078, 1079, 1080, 1081, 1085, 1086, 1087, 1110, 1111, 1112, 1122, 1127, 1145, 1161, 1164, 1165, 1186, 1219, 1222, 1231, 1232, 1273, 1277, 1281, 1284, 1287, 1293, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1314, 1315, 1316, 1317, 1320, 1321, 1322, 1328, 1331, 1332, 1334, 1335, 1337, 1338, 1344, 1345, 1360, 1363, 1366, 1368, 1374, 1378, 1383, 1386, 1387, 1393, 1394, 1395, 1400, 1401, 1402, 1403, 1405, 1407, 1411, 1431, 1441, 1442, 1443, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1469, 1470, 1478, 1479, 1481, 1489, 1490, 1491, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1527, 1533, 1535, 1542, 1543, 1544, 1545, 1567, 1571, 1573, 1575, 1587, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1625, 1644, 1650, 1685, 1700, 1704, 1705, 1706, 1707, 1716, 1717, 1719, 1720, 1722, 1723, 1724, 1725, 1758, 1765, 1771, 1773, 1776, 1777, 1787, 1827, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1864, 1867, 1871, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1905, 1906, 1910, 1911, 1912, 1913, 1914, 1915, 1928, 1943, 1945, 1954, 1956, 1965, 1966, 1968, 1982, 1983, 1984, 1985, 1986, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 2004, 2005, 2006, 2009, 2010, 2011, 2013, 2014, 2015, 2016, 2017, 2019, 2021, 2027, 2034, 2036, 2042, 2048, 2049, 2050, 2051, 2052, 2055, 2057, 2058, 2059, 2061, 2062, 2065, 2067, 2068, 2069, 2070, 2071, 2072, 2076, 2077, 2082, 2084, 2088, 2089, 2095, 2098, 2101, 2102, 2103, 2104, 2106, 2108, 2110, 2111, 2113, 2115], "possibl": [0, 2, 9, 14, 15, 18, 19, 28, 30, 33, 35, 52, 59, 60, 64, 499, 500, 582, 619, 862, 883, 884, 970, 976, 1018, 1110, 1111, 1112, 1157, 1170, 1187, 1198, 1200, 1201, 1273, 1287, 1314, 1327, 1330, 1331, 1334, 1339, 1345, 1440, 1446, 1527, 1533, 1624, 1625, 1644, 1678, 1704, 1737, 1738, 1847, 1850, 1865, 1916, 1924, 1943, 2014, 2016, 2017, 2021, 2029, 2033, 2034, 2041, 2043, 2045, 2046, 2048, 2049, 2052, 2054, 2059, 2060, 2061, 2063, 2070, 2072, 2077, 2079, 2085, 2089, 2094, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2109, 2113], "valu": [0, 1, 2, 3, 5, 7, 8, 9, 11, 12, 14, 18, 19, 20, 23, 24, 25, 27, 29, 30, 32, 33, 35, 37, 39, 41, 44, 45, 47, 48, 52, 53, 55, 57, 60, 62, 63, 64, 66, 68, 71, 75, 77, 90, 103, 104, 105, 106, 121, 152, 156, 237, 238, 239, 240, 256, 261, 262, 274, 275, 301, 313, 315, 317, 318, 319, 320, 321, 323, 325, 328, 354, 401, 402, 417, 421, 422, 424, 425, 473, 483, 485, 501, 515, 517, 519, 539, 546, 562, 585, 586, 587, 589, 590, 601, 602, 683, 684, 687, 690, 691, 694, 698, 699, 700, 738, 759, 767, 773, 774, 782, 787, 788, 790, 796, 798, 799, 802, 805, 822, 823, 824, 825, 828, 829, 830, 858, 863, 869, 878, 879, 880, 884, 889, 893, 895, 897, 898, 902, 903, 904, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 932, 941, 943, 945, 946, 947, 952, 954, 960, 964, 968, 969, 971, 982, 993, 994, 997, 1012, 1042, 1051, 1052, 1065, 1079, 1088, 1089, 1092, 1100, 1101, 1110, 1111, 1112, 1114, 1123, 1124, 1126, 1128, 1130, 1131, 1132, 1137, 1139, 1140, 1141, 1142, 1143, 1152, 1157, 1164, 1166, 1167, 1168, 1169, 1173, 1174, 1176, 1177, 1180, 1181, 1182, 1186, 1187, 1188, 1190, 1192, 1196, 1198, 1200, 1201, 1202, 1204, 1205, 1206, 1214, 1216, 1221, 1223, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1244, 1258, 1263, 1264, 1265, 1266, 1269, 1270, 1271, 1273, 1274, 1276, 1280, 1281, 1289, 1292, 1294, 1295, 1298, 1302, 1303, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1316, 1317, 1318, 1319, 1323, 1326, 1328, 1330, 1331, 1333, 1337, 1338, 1343, 1344, 1345, 1346, 1349, 1354, 1360, 1362, 1363, 1366, 1371, 1373, 1374, 1376, 1379, 1393, 1396, 1413, 1417, 1418, 1419, 1420, 1423, 1424, 1427, 1431, 1436, 1437, 1438, 1439, 1441, 1442, 1443, 1444, 1445, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1468, 1469, 1470, 1471, 1473, 1480, 1481, 1482, 1485, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1512, 1513, 1514, 1517, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1541, 1542, 1556, 1559, 1561, 1562, 1563, 1564, 1565, 1567, 1570, 1571, 1573, 1575, 1576, 1577, 1579, 1580, 1588, 1589, 1590, 1605, 1606, 1608, 1609, 1610, 1615, 1616, 1625, 1626, 1630, 1633, 1635, 1644, 1645, 1646, 1658, 1659, 1660, 1669, 1670, 1671, 1672, 1674, 1677, 1685, 1698, 1699, 1704, 1706, 1707, 1709, 1711, 1714, 1715, 1716, 1717, 1723, 1731, 1732, 1734, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1751, 1752, 1758, 1761, 1762, 1765, 1767, 1771, 1772, 1776, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 1817, 1821, 1824, 1828, 1830, 1831, 1843, 1845, 1847, 1848, 1849, 1854, 1855, 1856, 1863, 1866, 1875, 1878, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1896, 1900, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 1928, 1929, 1940, 1944, 1947, 1950, 1953, 1954, 1955, 1956, 1958, 1961, 1962, 1964, 1980, 2010, 2011, 2012, 2013, 2014, 2015, 2018, 2021, 2024, 2025, 2028, 2030, 2033, 2036, 2041, 2042, 2043, 2045, 2046, 2049, 2050, 2051, 2054, 2055, 2057, 2059, 2061, 2062, 2065, 2067, 2068, 2069, 2070, 2071, 2072, 2075, 2076, 2077, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2093, 2098, 2100, 2101, 2102, 2104, 2107, 2108, 2112, 2113], "so": [0, 1, 2, 3, 7, 9, 14, 17, 19, 23, 24, 27, 28, 29, 30, 32, 33, 35, 36, 39, 40, 47, 48, 51, 52, 53, 55, 58, 59, 60, 63, 64, 66, 76, 77, 337, 460, 488, 498, 683, 863, 899, 904, 907, 909, 912, 913, 918, 930, 973, 976, 978, 981, 1010, 1033, 1097, 1099, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1134, 1135, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1160, 1163, 1166, 1170, 1171, 1173, 1174, 1184, 1186, 1188, 1199, 1201, 1212, 1260, 1270, 1273, 1276, 1277, 1283, 1285, 1287, 1329, 1331, 1341, 1346, 1351, 1363, 1385, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1470, 1473, 1480, 1527, 1544, 1561, 1563, 1579, 1589, 1590, 1598, 1608, 1609, 1610, 1633, 1691, 1692, 1707, 1717, 1771, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1892, 1907, 1919, 1924, 1928, 1949, 1961, 1966, 1971, 1990, 2014, 2016, 2017, 2023, 2025, 2027, 2029, 2030, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2046, 2048, 2049, 2050, 2052, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2067, 2069, 2070, 2072, 2076, 2077, 2082, 2083, 2085, 2087, 2088, 2091, 2095, 2098, 2100, 2101, 2103, 2104, 2105, 2107, 2109, 2111, 2112, 2113, 2115, 2120], "The": [0, 1, 2, 3, 5, 7, 8, 11, 12, 14, 15, 18, 19, 20, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 59, 60, 61, 62, 63, 66, 74, 75, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 152, 193, 198, 210, 211, 223, 235, 257, 292, 313, 315, 317, 321, 323, 400, 402, 403, 404, 450, 460, 485, 488, 489, 490, 495, 497, 500, 501, 515, 517, 519, 539, 546, 585, 605, 619, 620, 625, 683, 687, 690, 691, 692, 693, 697, 698, 699, 700, 738, 751, 760, 763, 769, 770, 771, 772, 775, 776, 777, 782, 783, 785, 786, 788, 789, 790, 795, 796, 797, 798, 802, 805, 818, 820, 822, 823, 824, 825, 828, 830, 840, 841, 858, 863, 868, 881, 882, 884, 888, 889, 893, 894, 896, 897, 899, 902, 903, 909, 910, 912, 913, 915, 916, 917, 918, 923, 924, 928, 929, 932, 935, 937, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 961, 962, 975, 978, 979, 982, 987, 990, 994, 998, 1011, 1017, 1024, 1040, 1051, 1052, 1054, 1055, 1056, 1065, 1079, 1080, 1081, 1086, 1092, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1109, 1110, 1114, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1157, 1159, 1160, 1162, 1163, 1164, 1167, 1170, 1173, 1176, 1177, 1178, 1182, 1184, 1186, 1192, 1193, 1197, 1212, 1216, 1217, 1226, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1238, 1240, 1244, 1248, 1249, 1250, 1270, 1273, 1276, 1278, 1281, 1284, 1285, 1287, 1289, 1290, 1293, 1294, 1298, 1299, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1319, 1320, 1321, 1323, 1326, 1328, 1329, 1330, 1331, 1332, 1333, 1336, 1337, 1338, 1340, 1343, 1345, 1346, 1361, 1362, 1363, 1364, 1366, 1367, 1368, 1374, 1382, 1384, 1387, 1388, 1389, 1390, 1393, 1394, 1413, 1419, 1422, 1424, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1469, 1470, 1471, 1473, 1474, 1475, 1478, 1479, 1480, 1481, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1515, 1517, 1518, 1519, 1520, 1521, 1522, 1527, 1530, 1532, 1534, 1541, 1542, 1543, 1544, 1545, 1546, 1556, 1557, 1559, 1567, 1570, 1571, 1576, 1577, 1579, 1580, 1586, 1588, 1589, 1590, 1598, 1601, 1602, 1616, 1617, 1624, 1625, 1626, 1628, 1629, 1632, 1633, 1635, 1644, 1645, 1651, 1658, 1659, 1660, 1672, 1685, 1688, 1704, 1707, 1709, 1710, 1711, 1714, 1715, 1716, 1717, 1719, 1720, 1722, 1723, 1724, 1725, 1726, 1728, 1730, 1731, 1732, 1733, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1749, 1751, 1752, 1755, 1761, 1765, 1766, 1769, 1771, 1772, 1773, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1817, 1818, 1821, 1824, 1827, 1832, 1833, 1834, 1836, 1838, 1840, 1845, 1847, 1849, 1851, 1852, 1854, 1856, 1859, 1866, 1867, 1868, 1869, 1871, 1875, 1876, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1896, 1902, 1903, 1905, 1906, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1922, 1923, 1924, 1928, 1929, 1939, 1945, 1946, 1947, 1949, 1950, 1953, 1954, 1955, 1956, 1963, 1964, 1965, 1968, 1971, 1972, 1973, 1975, 1977, 1980, 1982, 1994, 2000, 2001, 2005, 2006, 2012, 2014, 2015, 2016, 2018, 2019, 2021, 2023, 2024, 2025, 2027, 2028, 2029, 2030, 2032, 2033, 2034, 2035, 2036, 2037, 2040, 2041, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2066, 2067, 2069, 2070, 2071, 2072, 2075, 2076, 2077, 2078, 2079, 2080, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2094, 2095, 2097, 2098, 2099, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2110, 2111, 2112, 2113, 2115, 2117, 2118], "same": [0, 1, 3, 7, 11, 12, 14, 15, 17, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 44, 45, 47, 48, 50, 51, 52, 53, 55, 58, 60, 63, 64, 66, 74, 75, 141, 155, 197, 223, 257, 313, 315, 317, 321, 323, 341, 447, 448, 449, 450, 451, 473, 485, 488, 499, 500, 502, 515, 517, 519, 522, 546, 582, 619, 620, 689, 692, 696, 698, 699, 700, 702, 723, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 748, 749, 759, 761, 764, 765, 766, 767, 768, 782, 788, 791, 792, 796, 797, 799, 803, 805, 817, 822, 825, 827, 828, 863, 865, 882, 883, 884, 896, 902, 904, 906, 909, 910, 912, 913, 914, 915, 916, 917, 923, 924, 944, 946, 947, 956, 958, 960, 963, 968, 969, 970, 976, 979, 987, 990, 1008, 1009, 1011, 1022, 1024, 1043, 1054, 1092, 1097, 1099, 1100, 1106, 1109, 1111, 1112, 1115, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1157, 1162, 1163, 1165, 1166, 1167, 1170, 1171, 1172, 1173, 1176, 1177, 1178, 1196, 1214, 1217, 1227, 1235, 1236, 1244, 1248, 1264, 1270, 1273, 1279, 1284, 1285, 1289, 1290, 1294, 1295, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1341, 1361, 1365, 1367, 1368, 1371, 1373, 1374, 1375, 1376, 1379, 1418, 1421, 1422, 1423, 1429, 1430, 1433, 1434, 1435, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1464, 1465, 1466, 1467, 1468, 1470, 1471, 1473, 1476, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1495, 1496, 1499, 1509, 1510, 1511, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1530, 1531, 1532, 1533, 1535, 1536, 1541, 1542, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1579, 1580, 1583, 1584, 1585, 1598, 1604, 1605, 1606, 1608, 1609, 1610, 1616, 1625, 1633, 1635, 1644, 1645, 1685, 1704, 1712, 1713, 1716, 1717, 1724, 1725, 1731, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1758, 1762, 1767, 1772, 1773, 1777, 1780, 1811, 1815, 1820, 1821, 1824, 1825, 1837, 1839, 1841, 1845, 1847, 1849, 1850, 1854, 1856, 1863, 1866, 1867, 1871, 1878, 1883, 1901, 1910, 1911, 1912, 1914, 1915, 1921, 1922, 1923, 1927, 1928, 1939, 1950, 1961, 1962, 1963, 1964, 1965, 1972, 1973, 1974, 1977, 1982, 2011, 2012, 2014, 2016, 2017, 2021, 2025, 2030, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2059, 2060, 2061, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2075, 2077, 2078, 2079, 2082, 2084, 2085, 2086, 2087, 2088, 2089, 2092, 2093, 2097, 2098, 2099, 2100, 2101, 2103, 2104, 2109, 2111, 2112, 2113, 2114], "attribut": [0, 1, 11, 12, 23, 24, 28, 29, 36, 53, 64, 66, 68, 73, 82, 87, 152, 292, 497, 498, 730, 741, 742, 743, 744, 745, 746, 748, 749, 759, 767, 794, 804, 805, 818, 819, 820, 841, 842, 843, 862, 863, 893, 897, 902, 904, 907, 909, 910, 1160, 1185, 1186, 1270, 1272, 1273, 1274, 1276, 1277, 1279, 1284, 1285, 1346, 1463, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1527, 1571, 1717, 1718, 1737, 1738, 1759, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1924, 1965, 1966, 1969, 2013, 2015, 2018, 2021, 2029, 2036, 2042, 2043, 2046, 2049, 2050, 2057, 2062, 2067, 2070, 2072, 2077, 2088, 2089, 2104, 2112, 2114, 2118], "thu": [0, 1, 11, 23, 28, 33, 35, 48, 53, 55, 64, 788, 825, 828, 858, 1109, 1130, 1337, 1368, 1466, 1580, 1704, 1717, 1784, 1785, 1795, 1797, 1913, 2014, 2017, 2024, 2027, 2043, 2045, 2049, 2051, 2054, 2056, 2057, 2067, 2072, 2077, 2082, 2086, 2088, 2089], "you": [0, 1, 3, 4, 5, 7, 8, 9, 11, 14, 15, 17, 19, 22, 23, 24, 28, 32, 33, 34, 35, 37, 39, 44, 46, 47, 48, 50, 51, 52, 55, 56, 57, 58, 59, 60, 63, 64, 65, 66, 68, 76, 77, 84, 85, 152, 256, 337, 450, 488, 490, 501, 683, 692, 737, 738, 751, 760, 784, 893, 894, 895, 896, 897, 904, 906, 907, 909, 910, 912, 913, 914, 918, 919, 930, 958, 976, 978, 1009, 1032, 1043, 1045, 1054, 1055, 1076, 1109, 1166, 1167, 1170, 1171, 1172, 1173, 1178, 1182, 1186, 1187, 1188, 1197, 1201, 1273, 1277, 1278, 1286, 1289, 1290, 1291, 1319, 1337, 1345, 1368, 1375, 1378, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1497, 1523, 1524, 1525, 1527, 1532, 1533, 1534, 1543, 1562, 1571, 1573, 1575, 1580, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1644, 1650, 1685, 1703, 1704, 1707, 1717, 1732, 1759, 1767, 1769, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1809, 1827, 1850, 1859, 1867, 1910, 1911, 1912, 1914, 1915, 1919, 1966, 1968, 1977, 1989, 2000, 2002, 2012, 2014, 2016, 2017, 2019, 2021, 2024, 2025, 2027, 2029, 2030, 2033, 2034, 2036, 2041, 2042, 2043, 2044, 2046, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2076, 2077, 2078, 2079, 2082, 2085, 2086, 2087, 2088, 2089, 2091, 2094, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2120], "obtain": [0, 3, 23, 28, 35, 40, 63, 64, 90, 498, 1217, 1393, 1446, 1534, 1617, 1717, 1788, 1794, 1817, 1834, 1929, 2017, 2033, 2034, 2045, 2061, 2067, 2069, 2071, 2072, 2111], "tensor": [0, 2, 5, 7, 8, 12, 14, 17, 18, 23, 24, 27, 28, 29, 30, 32, 33, 35, 36, 53, 55, 58, 59, 60, 61, 62, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 86, 90, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 723, 735, 737, 738, 741, 742, 743, 744, 745, 746, 747, 748, 749, 751, 753, 758, 759, 760, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 787, 788, 789, 790, 791, 792, 793, 797, 801, 802, 803, 804, 805, 822, 823, 824, 825, 827, 828, 830, 869, 870, 871, 872, 873, 874, 875, 876, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 902, 903, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 923, 924, 926, 928, 929, 930, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 978, 981, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1008, 1021, 1022, 1023, 1024, 1025, 1032, 1040, 1041, 1047, 1051, 1054, 1057, 1061, 1073, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1185, 1186, 1188, 1193, 1196, 1197, 1210, 1214, 1215, 1216, 1217, 1218, 1219, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1250, 1251, 1252, 1253, 1255, 1258, 1260, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1273, 1274, 1276, 1277, 1279, 1280, 1281, 1284, 1285, 1287, 1289, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1367, 1368, 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1382, 1387, 1396, 1412, 1413, 1415, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1427, 1431, 1435, 1436, 1439, 1440, 1444, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1472, 1473, 1478, 1479, 1480, 1486, 1492, 1493, 1497, 1498, 1514, 1517, 1518, 1519, 1520, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1537, 1538, 1539, 1540, 1542, 1543, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1560, 1561, 1562, 1563, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1588, 1589, 1590, 1592, 1593, 1594, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1716, 1717, 1718, 1719, 1721, 1722, 1723, 1724, 1725, 1727, 1729, 1730, 1731, 1732, 1733, 1734, 1735, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1755, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1770, 1771, 1772, 1773, 1774, 1775, 1776, 1777, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, 1823, 1824, 1825, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1857, 1858, 1859, 1860, 1861, 1862, 1863, 1865, 1866, 1867, 1868, 1869, 1872, 1875, 1878, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1896, 1898, 1899, 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1931, 1938, 1939, 1940, 1941, 1942, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1956, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1966, 1968, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1989, 1994, 1995, 2010, 2011, 2013, 2014, 2016, 2017, 2021, 2024, 2037, 2041, 2042, 2044, 2045, 2046, 2048, 2050, 2051, 2052, 2053, 2055, 2057, 2058, 2059, 2060, 2061, 2063, 2065, 2068, 2069, 2070, 2071, 2073, 2076, 2077, 2078, 2081, 2083, 2084, 2087, 2089, 2092, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2104, 2106, 2108, 2111, 2112, 2113, 2114, 2115], "class": [0, 1, 2, 3, 12, 15, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 39, 41, 44, 45, 47, 49, 50, 52, 53, 55, 60, 63, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 90, 559, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 839, 840, 841, 842, 843, 844, 845, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 898, 899, 904, 905, 906, 907, 908, 909, 910, 919, 920, 921, 931, 932, 939, 940, 941, 942, 943, 999, 1000, 1009, 1010, 1011, 1012, 1014, 1015, 1030, 1032, 1043, 1084, 1113, 1176, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1271, 1272, 1273, 1276, 1278, 1279, 1280, 1284, 1285, 1288, 1289, 1290, 1291, 1386, 1398, 1399, 1400, 1404, 1410, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, 1587, 1588, 1606, 1616, 1624, 1669, 1671, 1685, 1707, 1716, 1717, 1718, 1719, 1720, 1734, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1758, 1765, 1770, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1834, 1903, 1966, 1982, 1983, 1984, 1987, 1989, 2013, 2015, 2018, 2021, 2024, 2029, 2030, 2033, 2035, 2037, 2038, 2042, 2043, 2046, 2049, 2050, 2051, 2052, 2057, 2059, 2062, 2065, 2071, 2072, 2074, 2075, 2077, 2078, 2081, 2082, 2084, 2085, 2087, 2089, 2091, 2092, 2093, 2095, 2099, 2100, 2101, 2102, 2104, 2109, 2111, 2112, 2113, 2114, 2115, 2118], "dtype": [0, 1, 5, 11, 12, 18, 21, 23, 24, 28, 30, 33, 52, 53, 55, 62, 64, 66, 74, 75, 87, 155, 156, 193, 210, 214, 215, 216, 217, 244, 313, 315, 317, 319, 321, 323, 403, 404, 411, 431, 434, 447, 448, 449, 450, 451, 455, 460, 472, 483, 485, 515, 517, 567, 582, 583, 605, 619, 690, 692, 695, 696, 700, 702, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 748, 749, 752, 753, 754, 755, 756, 757, 758, 759, 761, 763, 764, 767, 768, 775, 776, 777, 797, 798, 802, 822, 823, 824, 825, 826, 827, 828, 829, 830, 845, 862, 863, 865, 866, 869, 883, 884, 904, 905, 907, 909, 910, 912, 914, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 961, 968, 969, 978, 987, 990, 991, 992, 998, 1008, 1024, 1090, 1091, 1110, 1111, 1112, 1122, 1126, 1127, 1128, 1134, 1135, 1145, 1152, 1157, 1159, 1161, 1162, 1163, 1164, 1165, 1186, 1220, 1231, 1232, 1244, 1273, 1277, 1287, 1293, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1355, 1356, 1357, 1358, 1360, 1363, 1364, 1368, 1372, 1373, 1374, 1377, 1378, 1413, 1417, 1418, 1419, 1421, 1431, 1440, 1441, 1442, 1443, 1444, 1446, 1447, 1448, 1449, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1469, 1470, 1473, 1478, 1479, 1481, 1489, 1490, 1491, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1527, 1533, 1534, 1535, 1542, 1543, 1544, 1545, 1549, 1550, 1551, 1552, 1553, 1567, 1571, 1573, 1575, 1579, 1580, 1581, 1582, 1587, 1588, 1589, 1590, 1600, 1616, 1617, 1644, 1650, 1652, 1685, 1691, 1692, 1707, 1716, 1719, 1720, 1724, 1725, 1734, 1737, 1738, 1758, 1772, 1776, 1777, 1778, 1780, 1815, 1821, 1825, 1826, 1829, 1830, 1831, 1832, 1833, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1845, 1853, 1856, 1868, 1869, 1871, 1872, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1899, 1902, 1904, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1927, 1928, 1940, 1943, 1954, 1956, 1961, 1965, 1966, 1968, 1975, 1976, 1980, 2010, 2011, 2013, 2014, 2015, 2016, 2017, 2019, 2021, 2024, 2035, 2036, 2046, 2049, 2053, 2057, 2060, 2065, 2067, 2068, 2072, 2073, 2074, 2082, 2083, 2084, 2087, 2088, 2089, 2102, 2103, 2104, 2108, 2113, 2118], "none": [0, 1, 2, 3, 5, 12, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 38, 40, 44, 45, 47, 50, 52, 53, 55, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 90, 114, 116, 117, 118, 120, 136, 137, 140, 152, 155, 156, 158, 175, 187, 188, 189, 190, 206, 207, 209, 211, 214, 215, 216, 217, 231, 235, 237, 238, 239, 240, 260, 288, 292, 303, 337, 353, 356, 379, 409, 411, 412, 413, 416, 423, 429, 430, 431, 432, 433, 434, 447, 448, 449, 450, 451, 455, 456, 472, 481, 483, 489, 490, 496, 505, 515, 522, 539, 540, 554, 555, 557, 558, 567, 582, 583, 585, 590, 592, 605, 610, 611, 612, 617, 625, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 658, 660, 662, 663, 665, 667, 669, 671, 673, 675, 677, 679, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 698, 699, 700, 701, 702, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 748, 749, 752, 753, 754, 755, 756, 757, 758, 769, 770, 771, 772, 782, 783, 784, 785, 786, 788, 789, 790, 791, 792, 796, 797, 798, 800, 802, 817, 820, 822, 823, 824, 825, 828, 829, 841, 842, 843, 861, 862, 863, 864, 865, 866, 869, 870, 871, 872, 873, 874, 875, 876, 878, 879, 882, 883, 884, 885, 886, 887, 888, 889, 893, 894, 895, 896, 897, 899, 901, 902, 903, 904, 907, 908, 909, 910, 913, 915, 916, 917, 918, 923, 924, 928, 929, 939, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 956, 960, 963, 965, 966, 967, 968, 969, 971, 972, 974, 976, 978, 982, 987, 988, 989, 992, 993, 995, 996, 997, 998, 1002, 1007, 1008, 1009, 1011, 1012, 1014, 1015, 1016, 1020, 1021, 1023, 1024, 1025, 1028, 1029, 1030, 1036, 1037, 1043, 1053, 1054, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1079, 1082, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1096, 1101, 1102, 1104, 1105, 1106, 1108, 1110, 1111, 1112, 1113, 1114, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1161, 1164, 1165, 1166, 1167, 1168, 1172, 1175, 1176, 1178, 1181, 1186, 1188, 1190, 1191, 1193, 1197, 1198, 1204, 1214, 1215, 1216, 1217, 1218, 1222, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1238, 1239, 1240, 1241, 1242, 1243, 1245, 1246, 1247, 1248, 1250, 1251, 1267, 1268, 1270, 1272, 1273, 1277, 1281, 1283, 1284, 1285, 1287, 1289, 1290, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1367, 1368, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1396, 1400, 1402, 1403, 1404, 1409, 1410, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1421, 1423, 1424, 1425, 1426, 1427, 1429, 1430, 1431, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1469, 1470, 1472, 1474, 1475, 1476, 1478, 1479, 1480, 1481, 1485, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1537, 1538, 1541, 1542, 1543, 1544, 1545, 1559, 1560, 1561, 1562, 1563, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1580, 1581, 1582, 1593, 1594, 1598, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1616, 1617, 1624, 1625, 1628, 1629, 1630, 1631, 1633, 1634, 1641, 1643, 1644, 1645, 1646, 1647, 1650, 1652, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1665, 1666, 1667, 1668, 1669, 1670, 1672, 1674, 1677, 1682, 1685, 1689, 1690, 1691, 1692, 1700, 1701, 1702, 1704, 1705, 1706, 1709, 1710, 1711, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1722, 1723, 1731, 1732, 1733, 1736, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1758, 1760, 1761, 1763, 1766, 1767, 1769, 1770, 1771, 1772, 1773, 1774, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1816, 1817, 1820, 1821, 1822, 1824, 1825, 1827, 1828, 1831, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1846, 1847, 1848, 1849, 1854, 1856, 1857, 1858, 1863, 1875, 1878, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1896, 1899, 1900, 1904, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1940, 1941, 1942, 1943, 1945, 1947, 1950, 1952, 1953, 1954, 1955, 1956, 1957, 1958, 1961, 1962, 1966, 1968, 1971, 1972, 1973, 1974, 1977, 1979, 1980, 1981, 1982, 1983, 1984, 1986, 1987, 1991, 1992, 1993, 2007, 2008, 2009, 2010, 2011, 2012, 2014, 2015, 2016, 2017, 2018, 2021, 2027, 2028, 2030, 2033, 2034, 2035, 2036, 2041, 2043, 2046, 2049, 2050, 2057, 2059, 2062, 2065, 2067, 2070, 2071, 2072, 2074, 2076, 2077, 2083, 2084, 2085, 2087, 2088, 2089, 2092, 2093, 2099, 2101, 2102, 2104, 2108, 2113, 2114, 2115], "enabl": [0, 1, 2, 3, 5, 8, 12, 18, 20, 21, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 48, 52, 55, 62, 81, 87, 89, 490, 505, 506, 683, 763, 800, 815, 816, 904, 907, 909, 919, 920, 921, 930, 976, 990, 1009, 1054, 1109, 1113, 1256, 1257, 1275, 1282, 1389, 1390, 1478, 1497, 1543, 1544, 1574, 1591, 1685, 1717, 1734, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1770, 1797, 1872, 1875, 1903, 1965, 2017, 2023, 2024, 2027, 2031, 2032, 2036, 2042, 2043, 2044, 2045, 2046, 2049, 2052, 2053, 2056, 2058, 2060, 2061, 2065, 2067, 2071, 2076, 2077, 2078, 2082, 2091, 2093, 2094, 2095, 2097, 2099, 2101, 2102, 2103, 2104, 2107, 2111, 2113, 2114, 2115, 2117], "true": [0, 1, 2, 3, 5, 12, 14, 19, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 37, 50, 52, 55, 60, 61, 63, 64, 66, 74, 75, 152, 198, 211, 262, 321, 323, 330, 331, 332, 333, 334, 335, 337, 338, 339, 340, 341, 342, 343, 344, 345, 353, 398, 402, 404, 417, 450, 460, 462, 473, 479, 489, 490, 497, 498, 501, 506, 518, 519, 558, 569, 582, 583, 592, 596, 605, 611, 619, 625, 683, 690, 695, 696, 697, 698, 699, 700, 702, 721, 722, 723, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 741, 742, 743, 744, 745, 746, 749, 752, 757, 759, 763, 764, 767, 768, 771, 772, 782, 788, 789, 800, 817, 829, 844, 863, 877, 879, 880, 884, 893, 896, 897, 899, 904, 905, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 926, 928, 929, 939, 945, 948, 951, 953, 954, 960, 961, 967, 975, 976, 979, 982, 984, 990, 991, 1011, 1049, 1104, 1110, 1111, 1112, 1113, 1114, 1115, 1131, 1132, 1138, 1139, 1161, 1163, 1166, 1167, 1168, 1169, 1171, 1172, 1173, 1177, 1178, 1186, 1188, 1198, 1200, 1201, 1205, 1206, 1209, 1212, 1214, 1216, 1230, 1231, 1232, 1235, 1236, 1247, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1273, 1277, 1278, 1280, 1283, 1289, 1290, 1291, 1293, 1295, 1298, 1303, 1304, 1315, 1316, 1317, 1320, 1321, 1322, 1323, 1326, 1328, 1330, 1331, 1332, 1334, 1335, 1336, 1337, 1339, 1340, 1343, 1345, 1346, 1355, 1356, 1357, 1358, 1361, 1362, 1363, 1365, 1367, 1371, 1373, 1374, 1375, 1376, 1379, 1386, 1407, 1413, 1418, 1421, 1424, 1427, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1474, 1475, 1478, 1479, 1480, 1481, 1486, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1536, 1541, 1542, 1543, 1544, 1545, 1559, 1560, 1567, 1571, 1573, 1574, 1575, 1576, 1577, 1580, 1581, 1590, 1598, 1600, 1601, 1602, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1633, 1635, 1637, 1643, 1644, 1645, 1658, 1659, 1660, 1669, 1677, 1685, 1704, 1705, 1707, 1710, 1716, 1717, 1718, 1720, 1722, 1726, 1727, 1729, 1731, 1732, 1733, 1736, 1737, 1738, 1750, 1758, 1759, 1760, 1761, 1762, 1764, 1765, 1766, 1767, 1769, 1770, 1771, 1772, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 1815, 1817, 1825, 1827, 1828, 1832, 1833, 1834, 1851, 1852, 1859, 1862, 1863, 1871, 1872, 1875, 1877, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1900, 1902, 1903, 1905, 1913, 1922, 1923, 1924, 1927, 1928, 1943, 1947, 1952, 1961, 1962, 1965, 1966, 1968, 1971, 1972, 1973, 1977, 1980, 2012, 2015, 2016, 2017, 2021, 2024, 2025, 2026, 2028, 2029, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2060, 2061, 2062, 2065, 2066, 2067, 2070, 2071, 2072, 2073, 2074, 2076, 2077, 2078, 2081, 2082, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2100, 2101, 2102, 2104, 2105, 2106, 2108, 2109, 2111, 2112, 2113, 2114], "cache_en": [0, 1054], "instanc": [0, 2, 3, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 44, 47, 48, 50, 52, 55, 60, 64, 141, 619, 683, 737, 751, 760, 796, 799, 827, 829, 845, 862, 1009, 1109, 1160, 1271, 1273, 1285, 1289, 1346, 1469, 1470, 1473, 1489, 1490, 1491, 1499, 1527, 1537, 1572, 1574, 1575, 1579, 1588, 1589, 1590, 1643, 1717, 1732, 1744, 1758, 1766, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1913, 1944, 2014, 2016, 2018, 2023, 2027, 2029, 2033, 2035, 2042, 2043, 2046, 2048, 2049, 2051, 2052, 2057, 2061, 2065, 2070, 2072, 2077, 2078, 2079, 2082, 2084, 2089, 2098, 2112, 2114], "serv": [0, 7, 8, 15, 28, 89, 2024, 2065, 2069, 2077, 2078, 2099, 2104, 2108], "context": [0, 2, 5, 23, 28, 30, 32, 34, 37, 38, 47, 48, 52, 55, 64, 66, 152, 795, 893, 894, 895, 897, 899, 909, 910, 918, 919, 920, 921, 923, 939, 982, 1000, 1006, 1015, 1030, 1032, 1043, 1049, 1054, 1061, 1069, 1082, 1084, 1113, 1168, 1172, 1177, 1188, 1193, 1273, 1389, 1400, 1404, 1409, 1410, 1527, 1586, 1591, 1685, 1707, 1717, 1718, 1735, 1737, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1903, 1984, 1987, 1989, 2007, 2008, 2013, 2017, 2018, 2025, 2029, 2033, 2042, 2043, 2046, 2049, 2055, 2056, 2059, 2065, 2067, 2070, 2071, 2076, 2077, 2079, 2082, 2085, 2091, 2100, 2101, 2103, 2104, 2111, 2115], "manag": [0, 1, 2, 5, 7, 20, 28, 30, 32, 33, 34, 35, 37, 38, 45, 47, 48, 50, 52, 55, 64, 66, 90, 488, 899, 919, 920, 921, 939, 982, 1000, 1006, 1009, 1012, 1015, 1016, 1017, 1019, 1030, 1032, 1033, 1034, 1043, 1044, 1054, 1057, 1059, 1060, 1061, 1063, 1064, 1065, 1066, 1069, 1073, 1074, 1075, 1082, 1084, 1113, 1168, 1172, 1177, 1188, 1389, 1400, 1404, 1409, 1410, 1544, 1586, 1591, 1685, 1717, 1734, 1735, 1737, 1770, 1903, 1984, 1987, 1989, 2007, 2008, 2013, 2017, 2018, 2025, 2029, 2043, 2049, 2051, 2052, 2056, 2065, 2067, 2071, 2076, 2077, 2078, 2082, 2085, 2091, 2100, 2103, 2104, 2116], "decor": [0, 1, 35, 40, 44, 48, 64, 77, 904, 907, 909, 919, 939, 978, 982, 1069, 1113, 1278, 1279, 1285, 1291, 1770, 1902, 1903, 2014, 2016, 2017, 2021, 2042, 2043, 2049, 2077, 2099, 2101, 2102, 2104, 2105, 2114], "allow": [0, 1, 2, 3, 5, 7, 8, 9, 11, 12, 14, 15, 19, 23, 24, 28, 29, 30, 33, 35, 37, 39, 47, 48, 52, 55, 56, 58, 59, 60, 61, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 152, 515, 683, 738, 796, 800, 802, 827, 897, 901, 902, 918, 961, 975, 979, 990, 1051, 1054, 1079, 1109, 1171, 1188, 1193, 1273, 1278, 1291, 1329, 1351, 1393, 1422, 1436, 1437, 1438, 1462, 1463, 1480, 1520, 1521, 1522, 1527, 1530, 1533, 1556, 1571, 1717, 1723, 1771, 1780, 1796, 1811, 1905, 1965, 2012, 2016, 2017, 2024, 2025, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2055, 2056, 2057, 2059, 2060, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2077, 2082, 2085, 2086, 2087, 2089, 2100, 2101, 2102, 2103, 2104, 2109, 2112, 2113, 2116], "region": [0, 3, 5, 35, 44, 771, 772, 976, 1436, 1437, 1438, 1474, 1475, 1485, 1487, 1520, 1521, 1522, 1601, 1602, 1628, 1629, 1659, 1660, 2014, 2033, 2042, 2046, 2090, 2103, 2104], "your": [0, 1, 2, 4, 7, 8, 9, 11, 14, 15, 17, 19, 23, 27, 28, 32, 34, 35, 38, 39, 40, 44, 46, 47, 48, 50, 51, 52, 55, 59, 60, 63, 64, 488, 897, 904, 907, 909, 912, 913, 918, 919, 976, 978, 979, 1182, 1187, 1188, 1273, 1277, 1278, 1283, 1286, 1289, 1290, 1291, 1527, 1534, 1575, 1717, 1734, 1737, 1770, 1798, 1872, 1961, 1966, 1968, 2013, 2014, 2016, 2017, 2021, 2023, 2025, 2033, 2034, 2035, 2037, 2042, 2043, 2044, 2046, 2049, 2050, 2051, 2053, 2055, 2056, 2057, 2058, 2060, 2061, 2063, 2067, 2069, 2073, 2076, 2078, 2082, 2087, 2088, 2091, 2094, 2095, 2097, 2098, 2099, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2113, 2114, 2115, 2120], "script": [0, 2, 4, 18, 23, 28, 31, 37, 40, 46, 48, 50, 52, 1274, 1276, 1277, 1278, 1279, 1280, 1283, 1284, 1286, 1288, 1289, 1291, 2012, 2013, 2016, 2017, 2027, 2045, 2053, 2056, 2058, 2062, 2064, 2065, 2070, 2072, 2077, 2094, 2095, 2097, 2099, 2101, 2106, 2107, 2110], "run": [0, 1, 2, 3, 4, 5, 7, 9, 10, 12, 14, 15, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 39, 40, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 57, 58, 60, 61, 63, 64, 66, 152, 488, 490, 683, 795, 801, 820, 822, 823, 824, 825, 828, 861, 865, 866, 867, 897, 918, 919, 923, 924, 939, 976, 978, 1053, 1054, 1167, 1172, 1178, 1186, 1197, 1213, 1273, 1274, 1276, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1290, 1310, 1319, 1345, 1346, 1374, 1441, 1442, 1443, 1463, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1542, 1556, 1567, 1685, 1707, 1710, 1716, 1717, 1724, 1725, 1726, 1727, 1728, 1729, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1871, 1874, 1907, 1954, 1956, 1965, 1977, 2013, 2014, 2017, 2021, 2027, 2029, 2030, 2031, 2033, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2072, 2075, 2076, 2077, 2078, 2079, 2082, 2087, 2091, 2092, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2111, 2112, 2113, 2115], "In": [0, 2, 3, 4, 5, 7, 9, 15, 18, 19, 23, 28, 30, 32, 33, 34, 35, 40, 47, 48, 50, 52, 53, 55, 57, 58, 59, 60, 63, 64, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 123, 125, 127, 129, 132, 133, 135, 143, 145, 148, 149, 151, 154, 156, 160, 162, 164, 166, 168, 170, 179, 188, 196, 200, 203, 205, 215, 217, 223, 233, 238, 240, 246, 249, 251, 253, 255, 259, 260, 264, 271, 273, 275, 279, 281, 285, 287, 294, 296, 298, 306, 308, 310, 312, 358, 360, 362, 364, 366, 368, 370, 373, 375, 377, 378, 385, 387, 389, 391, 393, 397, 422, 425, 428, 430, 441, 443, 445, 453, 458, 468, 471, 487, 488, 492, 494, 510, 513, 524, 529, 531, 534, 536, 538, 551, 553, 555, 564, 566, 573, 577, 579, 595, 598, 600, 602, 604, 614, 624, 683, 763, 803, 827, 858, 904, 907, 909, 910, 943, 952, 960, 991, 992, 1008, 1054, 1065, 1078, 1079, 1083, 1084, 1109, 1130, 1132, 1140, 1141, 1142, 1157, 1168, 1172, 1175, 1177, 1182, 1184, 1187, 1198, 1270, 1271, 1273, 1283, 1286, 1287, 1289, 1305, 1314, 1319, 1320, 1321, 1329, 1332, 1337, 1342, 1346, 1351, 1354, 1363, 1368, 1374, 1375, 1410, 1418, 1436, 1437, 1438, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1465, 1466, 1467, 1471, 1473, 1478, 1487, 1497, 1512, 1520, 1521, 1522, 1527, 1533, 1571, 1573, 1575, 1579, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1623, 1633, 1640, 1642, 1649, 1678, 1681, 1684, 1685, 1699, 1717, 1724, 1725, 1731, 1737, 1738, 1770, 1798, 1809, 1811, 1847, 1863, 1865, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1924, 1928, 1929, 1952, 1965, 1966, 1968, 1974, 1975, 2012, 2013, 2014, 2016, 2017, 2021, 2023, 2024, 2025, 2033, 2034, 2035, 2036, 2041, 2042, 2045, 2046, 2049, 2050, 2051, 2052, 2054, 2056, 2057, 2059, 2060, 2061, 2062, 2067, 2069, 2070, 2071, 2072, 2073, 2074, 2077, 2078, 2079, 2082, 2087, 2089, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2108, 2110, 2111, 2112, 2113, 2115], "an": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 18, 19, 20, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 44, 45, 46, 47, 48, 50, 51, 53, 55, 56, 57, 58, 59, 60, 62, 63, 64, 66, 81, 82, 83, 85, 87, 88, 90, 152, 156, 192, 244, 256, 315, 317, 323, 325, 330, 337, 417, 488, 501, 515, 517, 519, 539, 547, 562, 609, 616, 619, 683, 690, 691, 738, 744, 745, 746, 759, 763, 768, 782, 788, 790, 792, 795, 799, 818, 820, 826, 829, 858, 865, 866, 868, 882, 883, 884, 893, 895, 896, 897, 902, 904, 905, 907, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 931, 932, 933, 935, 936, 943, 947, 963, 973, 976, 978, 991, 992, 994, 1009, 1011, 1012, 1014, 1021, 1022, 1023, 1024, 1025, 1043, 1044, 1051, 1052, 1054, 1066, 1068, 1069, 1079, 1083, 1084, 1094, 1107, 1109, 1110, 1111, 1112, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1149, 1152, 1160, 1163, 1166, 1167, 1170, 1171, 1172, 1174, 1176, 1178, 1181, 1182, 1184, 1186, 1187, 1188, 1192, 1194, 1197, 1198, 1204, 1214, 1217, 1227, 1235, 1236, 1237, 1270, 1271, 1273, 1276, 1277, 1279, 1281, 1284, 1285, 1287, 1289, 1290, 1291, 1292, 1303, 1304, 1309, 1310, 1315, 1317, 1319, 1322, 1323, 1327, 1330, 1332, 1335, 1336, 1343, 1345, 1346, 1363, 1365, 1368, 1374, 1386, 1393, 1413, 1422, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1442, 1443, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1474, 1475, 1478, 1486, 1490, 1491, 1494, 1495, 1496, 1497, 1499, 1509, 1510, 1511, 1512, 1514, 1515, 1517, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1529, 1533, 1534, 1535, 1537, 1538, 1539, 1540, 1542, 1543, 1545, 1547, 1556, 1559, 1561, 1562, 1563, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1579, 1581, 1582, 1586, 1587, 1590, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1600, 1608, 1609, 1610, 1611, 1612, 1613, 1618, 1624, 1625, 1627, 1628, 1629, 1633, 1644, 1651, 1652, 1654, 1655, 1656, 1658, 1659, 1660, 1685, 1689, 1704, 1706, 1707, 1710, 1716, 1717, 1721, 1722, 1723, 1724, 1725, 1730, 1731, 1735, 1737, 1744, 1761, 1768, 1770, 1772, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1827, 1832, 1833, 1834, 1853, 1855, 1859, 1867, 1868, 1870, 1878, 1884, 1905, 1908, 1909, 1913, 1916, 1921, 1924, 1928, 1929, 1943, 1944, 1960, 1961, 1962, 1963, 1965, 1968, 1975, 1976, 1977, 1978, 1983, 2011, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2023, 2024, 2025, 2027, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2037, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2071, 2072, 2073, 2074, 2075, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2092, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2118], "chosen": [0, 17, 19, 60, 1097, 1295, 1464, 1685, 1875, 1900, 1947, 2036, 2042, 2046, 2049, 2059, 2067, 2072, 2113], "improv": [0, 1, 3, 9, 14, 21, 24, 28, 30, 52, 55, 64, 763, 807, 808, 809, 912, 914, 918, 994, 1227, 1464, 1478, 1497, 1543, 1574, 1685, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1811, 2013, 2014, 2042, 2048, 2049, 2050, 2060, 2061, 2065, 2072, 2073, 2077, 2107, 2109, 2111, 2113, 2115, 2116], "perform": [0, 1, 2, 3, 4, 5, 11, 14, 21, 22, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 47, 48, 52, 55, 59, 63, 64, 83, 121, 208, 211, 460, 488, 582, 605, 619, 625, 689, 690, 691, 692, 693, 694, 763, 784, 807, 808, 809, 851, 862, 865, 904, 907, 909, 912, 914, 915, 918, 919, 923, 944, 956, 976, 991, 992, 1016, 1065, 1090, 1091, 1104, 1129, 1152, 1154, 1166, 1167, 1170, 1182, 1202, 1238, 1273, 1283, 1284, 1289, 1317, 1319, 1322, 1326, 1329, 1330, 1334, 1335, 1343, 1344, 1346, 1351, 1360, 1373, 1378, 1389, 1390, 1415, 1418, 1421, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1466, 1469, 1470, 1478, 1497, 1527, 1533, 1535, 1543, 1556, 1571, 1574, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1652, 1670, 1685, 1691, 1692, 1707, 1717, 1719, 1720, 1732, 1737, 1758, 1765, 1767, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1811, 1817, 1825, 1853, 1867, 1871, 1898, 1904, 1905, 1906, 1907, 1927, 1929, 1965, 2013, 2014, 2017, 2021, 2025, 2030, 2031, 2033, 2034, 2035, 2037, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2055, 2058, 2059, 2060, 2061, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2094, 2095, 2097, 2099, 2100, 2101, 2102, 2104, 2105, 2106, 2107, 2109, 2114, 2115, 2116], "while": [0, 2, 3, 5, 7, 8, 11, 14, 23, 24, 28, 29, 30, 33, 35, 50, 52, 53, 55, 60, 63, 64, 87, 698, 699, 738, 858, 888, 896, 909, 910, 1012, 1084, 1167, 1181, 1236, 1273, 1276, 1289, 1290, 1346, 1410, 1419, 1431, 1447, 1487, 1489, 1490, 1491, 1497, 1527, 1533, 1546, 1559, 1567, 1571, 1575, 1652, 1710, 1717, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1772, 1863, 1871, 1970, 2018, 2021, 2023, 2024, 2035, 2036, 2042, 2043, 2046, 2049, 2050, 2051, 2052, 2054, 2057, 2059, 2061, 2062, 2067, 2069, 2072, 2075, 2077, 2079, 2082, 2083, 2084, 2086, 2087, 2088, 2101, 2103, 2104, 2105, 2107, 2113, 2116], "maintain": [0, 7, 8, 23, 24, 28, 35, 52, 55, 59, 64, 932, 942, 1186, 1190, 1435, 1466, 1471, 1626, 1717, 1758, 1772, 2013, 2036, 2042, 2043, 2046, 2049, 2057, 2065, 2100, 2103], "accuraci": [0, 19, 24, 976, 1188, 1337, 1685, 1871, 2013, 2030, 2042, 2057, 2087, 2092, 2104], "see": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 14, 15, 17, 19, 20, 22, 23, 25, 28, 29, 30, 33, 35, 39, 47, 48, 51, 52, 55, 60, 61, 62, 63, 64, 91, 95, 97, 99, 101, 103, 105, 107, 109, 111, 114, 115, 116, 117, 118, 119, 120, 122, 124, 126, 128, 130, 131, 134, 136, 137, 138, 139, 140, 142, 144, 146, 147, 150, 152, 153, 155, 156, 157, 158, 159, 161, 163, 165, 167, 169, 171, 172, 173, 174, 177, 178, 180, 181, 182, 183, 184, 185, 186, 187, 191, 194, 195, 199, 201, 202, 204, 206, 207, 209, 212, 213, 214, 216, 219, 220, 222, 226, 227, 228, 229, 230, 231, 232, 236, 237, 239, 241, 242, 243, 245, 247, 248, 250, 252, 254, 257, 258, 263, 265, 266, 267, 268, 269, 270, 272, 274, 276, 277, 278, 280, 282, 283, 284, 286, 289, 290, 293, 295, 297, 299, 300, 301, 302, 303, 304, 305, 307, 309, 311, 315, 323, 324, 325, 326, 327, 329, 330, 336, 346, 347, 348, 349, 350, 351, 352, 353, 354, 356, 357, 359, 361, 363, 365, 367, 369, 371, 372, 374, 376, 380, 381, 382, 383, 384, 386, 388, 390, 392, 394, 395, 396, 398, 399, 405, 406, 407, 409, 410, 411, 412, 413, 414, 415, 416, 418, 419, 420, 421, 423, 424, 426, 427, 429, 431, 432, 433, 434, 435, 436, 440, 442, 444, 452, 454, 455, 457, 459, 461, 462, 463, 464, 466, 467, 469, 470, 472, 479, 481, 482, 484, 486, 488, 489, 490, 491, 493, 495, 496, 497, 499, 500, 501, 503, 504, 507, 508, 509, 512, 517, 519, 520, 521, 523, 525, 526, 527, 528, 530, 532, 533, 535, 537, 540, 541, 542, 544, 545, 549, 550, 552, 554, 556, 557, 558, 563, 565, 567, 569, 570, 571, 572, 574, 575, 576, 578, 580, 581, 592, 593, 594, 596, 597, 599, 601, 603, 607, 608, 611, 612, 613, 616, 617, 618, 620, 621, 622, 623, 683, 696, 698, 699, 702, 733, 734, 735, 736, 737, 738, 741, 742, 743, 744, 745, 746, 748, 749, 751, 759, 760, 763, 764, 765, 766, 767, 768, 769, 770, 771, 774, 775, 776, 777, 782, 783, 784, 785, 786, 787, 788, 796, 843, 863, 864, 865, 866, 869, 878, 879, 880, 883, 894, 896, 897, 898, 899, 902, 903, 904, 906, 907, 909, 910, 914, 918, 919, 920, 923, 928, 929, 931, 936, 945, 947, 954, 956, 959, 973, 976, 978, 980, 981, 996, 1009, 1011, 1012, 1014, 1016, 1017, 1019, 1033, 1034, 1043, 1044, 1054, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1073, 1074, 1075, 1104, 1107, 1109, 1110, 1112, 1113, 1122, 1127, 1145, 1148, 1161, 1164, 1169, 1170, 1171, 1173, 1181, 1186, 1188, 1192, 1200, 1202, 1217, 1226, 1227, 1231, 1232, 1236, 1237, 1245, 1246, 1247, 1273, 1277, 1285, 1289, 1290, 1292, 1293, 1295, 1303, 1305, 1310, 1313, 1314, 1316, 1319, 1321, 1328, 1330, 1331, 1334, 1339, 1343, 1344, 1346, 1352, 1354, 1360, 1361, 1363, 1371, 1373, 1374, 1375, 1376, 1378, 1379, 1389, 1390, 1396, 1418, 1420, 1421, 1431, 1439, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1469, 1470, 1472, 1478, 1480, 1486, 1487, 1492, 1493, 1497, 1518, 1519, 1523, 1524, 1525, 1526, 1527, 1530, 1531, 1532, 1533, 1534, 1539, 1540, 1541, 1543, 1555, 1557, 1559, 1560, 1572, 1573, 1574, 1575, 1576, 1577, 1580, 1586, 1587, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1602, 1603, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1624, 1625, 1626, 1627, 1630, 1631, 1632, 1633, 1634, 1636, 1637, 1638, 1639, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1671, 1672, 1673, 1675, 1676, 1677, 1678, 1679, 1680, 1682, 1683, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1701, 1702, 1703, 1704, 1705, 1706, 1717, 1718, 1731, 1732, 1733, 1737, 1743, 1752, 1761, 1766, 1769, 1770, 1771, 1772, 1776, 1815, 1825, 1826, 1836, 1838, 1840, 1842, 1843, 1847, 1850, 1853, 1859, 1868, 1871, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1895, 1903, 1904, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1922, 1923, 1924, 1927, 1938, 1940, 1943, 1949, 1950, 1954, 1956, 1961, 1965, 1968, 1972, 1973, 1978, 1980, 2010, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2021, 2022, 2023, 2024, 2026, 2028, 2031, 2033, 2034, 2035, 2036, 2037, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2054, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2066, 2067, 2071, 2072, 2073, 2076, 2077, 2078, 2082, 2083, 2084, 2085, 2086, 2088, 2089, 2091, 2094, 2096, 2099, 2100, 2101, 2102, 2105, 2106, 2107, 2111, 2112, 2113, 2114, 2115, 2117, 2118], "detail": [0, 1, 2, 3, 7, 8, 11, 13, 14, 15, 17, 18, 23, 28, 29, 30, 33, 35, 37, 47, 48, 52, 53, 55, 61, 64, 81, 82, 83, 152, 325, 497, 526, 616, 683, 738, 741, 742, 743, 744, 745, 746, 763, 769, 770, 771, 774, 775, 776, 777, 782, 783, 785, 786, 787, 788, 796, 863, 864, 865, 866, 877, 894, 896, 897, 898, 899, 902, 903, 904, 907, 909, 910, 923, 959, 980, 981, 996, 1014, 1016, 1017, 1019, 1033, 1034, 1043, 1054, 1057, 1059, 1060, 1061, 1063, 1064, 1065, 1066, 1073, 1074, 1075, 1092, 1109, 1148, 1171, 1186, 1217, 1221, 1223, 1227, 1254, 1261, 1273, 1277, 1289, 1310, 1313, 1331, 1338, 1346, 1352, 1354, 1374, 1413, 1431, 1435, 1445, 1457, 1458, 1459, 1463, 1469, 1470, 1471, 1472, 1474, 1475, 1478, 1497, 1527, 1533, 1539, 1540, 1543, 1555, 1576, 1586, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1602, 1603, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1634, 1636, 1637, 1638, 1639, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1673, 1675, 1676, 1677, 1678, 1679, 1680, 1682, 1683, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1701, 1702, 1703, 1707, 1712, 1713, 1717, 1718, 1761, 1771, 1779, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1798, 1803, 1815, 1827, 1859, 1870, 1895, 1904, 1950, 1961, 1965, 1968, 2012, 2014, 2015, 2016, 2017, 2021, 2024, 2028, 2031, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2056, 2057, 2060, 2061, 2062, 2067, 2070, 2071, 2072, 2074, 2076, 2077, 2078, 2079, 2082, 2084, 2086, 2087, 2088, 2089, 2091, 2095, 2101, 2110, 2113, 2114, 2115], "when": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 14, 18, 19, 23, 24, 25, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 45, 46, 47, 48, 50, 51, 52, 53, 55, 58, 60, 61, 62, 63, 64, 65, 84, 86, 87, 99, 152, 193, 210, 262, 315, 323, 417, 450, 488, 489, 490, 499, 500, 515, 517, 519, 546, 562, 582, 619, 683, 689, 692, 738, 771, 772, 782, 788, 794, 796, 827, 843, 862, 869, 881, 884, 897, 904, 912, 913, 914, 915, 916, 917, 918, 919, 923, 924, 928, 929, 930, 932, 939, 944, 947, 956, 964, 967, 968, 969, 975, 976, 978, 984, 991, 992, 996, 1011, 1013, 1023, 1025, 1047, 1053, 1054, 1066, 1092, 1109, 1148, 1152, 1157, 1163, 1167, 1168, 1173, 1176, 1177, 1178, 1188, 1189, 1197, 1198, 1201, 1202, 1210, 1213, 1227, 1262, 1263, 1265, 1266, 1269, 1273, 1274, 1276, 1285, 1286, 1287, 1289, 1290, 1294, 1295, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1323, 1326, 1327, 1328, 1330, 1331, 1332, 1333, 1334, 1335, 1337, 1338, 1339, 1343, 1344, 1345, 1346, 1354, 1360, 1363, 1368, 1374, 1375, 1378, 1413, 1418, 1419, 1420, 1431, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1469, 1470, 1473, 1476, 1478, 1479, 1481, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1514, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1541, 1542, 1543, 1555, 1556, 1559, 1560, 1561, 1562, 1564, 1567, 1573, 1574, 1575, 1576, 1578, 1579, 1580, 1581, 1582, 1586, 1588, 1598, 1600, 1601, 1602, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1625, 1631, 1633, 1642, 1644, 1645, 1669, 1672, 1674, 1677, 1678, 1685, 1693, 1704, 1705, 1706, 1707, 1716, 1717, 1718, 1719, 1720, 1724, 1725, 1731, 1732, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1761, 1766, 1770, 1771, 1772, 1773, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 1815, 1817, 1824, 1827, 1828, 1850, 1856, 1863, 1868, 1871, 1877, 1895, 1901, 1905, 1907, 1908, 1909, 1913, 1919, 1924, 1928, 1938, 1943, 1945, 1950, 1954, 1956, 1965, 1966, 1977, 1980, 1982, 2012, 2013, 2014, 2016, 2021, 2023, 2027, 2030, 2033, 2034, 2035, 2041, 2044, 2045, 2046, 2048, 2051, 2052, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2069, 2070, 2071, 2076, 2077, 2078, 2079, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2095, 2099, 2100, 2101, 2102, 2103, 2105, 2107, 2109, 2111, 2112, 2113, 2115, 2117], "enter": [0, 28, 899, 900, 901, 919, 2049, 2101], "ani": [0, 1, 2, 3, 4, 5, 7, 9, 11, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 41, 45, 46, 47, 48, 51, 52, 53, 55, 60, 63, 64, 90, 152, 256, 488, 501, 547, 619, 700, 761, 784, 795, 796, 797, 803, 813, 814, 815, 816, 818, 819, 820, 821, 826, 827, 829, 858, 869, 893, 894, 895, 897, 904, 907, 909, 910, 912, 918, 923, 924, 932, 952, 958, 962, 963, 991, 1011, 1047, 1054, 1065, 1110, 1111, 1112, 1125, 1126, 1128, 1129, 1130, 1132, 1136, 1140, 1141, 1142, 1157, 1166, 1167, 1174, 1175, 1176, 1178, 1179, 1184, 1185, 1186, 1188, 1192, 1198, 1236, 1272, 1273, 1277, 1280, 1284, 1285, 1287, 1289, 1295, 1303, 1305, 1309, 1310, 1311, 1314, 1319, 1320, 1321, 1326, 1327, 1330, 1334, 1337, 1343, 1345, 1428, 1429, 1430, 1432, 1433, 1434, 1435, 1439, 1440, 1444, 1445, 1454, 1455, 1456, 1463, 1464, 1468, 1472, 1473, 1476, 1477, 1480, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1492, 1493, 1513, 1514, 1516, 1517, 1518, 1526, 1527, 1535, 1537, 1538, 1541, 1546, 1547, 1548, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1577, 1578, 1579, 1604, 1608, 1609, 1610, 1650, 1670, 1671, 1685, 1707, 1717, 1724, 1725, 1736, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1749, 1759, 1760, 1762, 1765, 1767, 1772, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 1843, 1859, 1873, 1875, 1928, 1935, 1977, 1982, 1991, 2012, 2014, 2015, 2016, 2021, 2024, 2025, 2027, 2028, 2030, 2033, 2034, 2035, 2036, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2055, 2056, 2057, 2058, 2059, 2061, 2062, 2064, 2065, 2067, 2068, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2082, 2084, 2087, 2089, 2092, 2093, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2106, 2108, 2109, 2111, 2112, 2113, 2114, 2115], "should": [0, 1, 3, 4, 5, 9, 14, 15, 17, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 35, 36, 37, 39, 40, 45, 46, 47, 48, 50, 52, 53, 55, 59, 60, 63, 64, 66, 71, 72, 75, 86, 121, 152, 156, 315, 323, 400, 404, 417, 447, 448, 449, 450, 451, 489, 490, 498, 515, 517, 519, 585, 586, 587, 589, 590, 689, 692, 693, 738, 763, 775, 776, 777, 796, 799, 803, 820, 827, 841, 842, 858, 859, 860, 863, 869, 893, 894, 895, 896, 897, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 918, 928, 929, 944, 945, 946, 947, 954, 967, 982, 983, 998, 1011, 1014, 1024, 1025, 1043, 1045, 1054, 1100, 1110, 1111, 1112, 1122, 1127, 1130, 1131, 1132, 1140, 1141, 1142, 1145, 1160, 1163, 1164, 1165, 1167, 1168, 1172, 1176, 1177, 1178, 1186, 1187, 1192, 1194, 1231, 1232, 1235, 1236, 1270, 1271, 1273, 1274, 1278, 1283, 1285, 1286, 1289, 1290, 1291, 1293, 1318, 1343, 1344, 1345, 1351, 1360, 1363, 1364, 1365, 1386, 1431, 1439, 1440, 1444, 1460, 1462, 1463, 1465, 1466, 1467, 1470, 1471, 1492, 1497, 1519, 1527, 1533, 1534, 1535, 1550, 1555, 1576, 1580, 1598, 1604, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1625, 1633, 1685, 1707, 1709, 1710, 1711, 1714, 1715, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1758, 1759, 1760, 1763, 1765, 1776, 1777, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1792, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1827, 1829, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1850, 1866, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1910, 1911, 1912, 1913, 1914, 1915, 1928, 1943, 1952, 1977, 1982, 1983, 2010, 2011, 2012, 2013, 2014, 2016, 2017, 2021, 2023, 2024, 2025, 2028, 2030, 2033, 2035, 2036, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2057, 2059, 2060, 2061, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2077, 2079, 2082, 2086, 2087, 2089, 2095, 2097, 2099, 2101, 2102, 2103, 2104, 2106, 2107, 2109, 2111, 2112, 2113, 2114], "call": [0, 1, 2, 3, 8, 11, 14, 15, 18, 19, 20, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 47, 50, 52, 53, 55, 56, 57, 59, 60, 61, 63, 64, 66, 68, 75, 82, 88, 152, 292, 325, 337, 460, 488, 489, 490, 558, 582, 616, 619, 683, 700, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 784, 793, 800, 827, 861, 865, 866, 897, 904, 905, 906, 907, 908, 909, 910, 912, 914, 915, 918, 919, 928, 929, 930, 932, 939, 943, 959, 976, 981, 982, 986, 1009, 1012, 1014, 1043, 1045, 1055, 1056, 1065, 1073, 1074, 1076, 1077, 1107, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1161, 1166, 1167, 1178, 1179, 1186, 1197, 1217, 1237, 1258, 1270, 1273, 1276, 1278, 1284, 1285, 1286, 1293, 1329, 1337, 1341, 1343, 1345, 1346, 1375, 1435, 1441, 1442, 1443, 1463, 1464, 1465, 1466, 1467, 1469, 1471, 1473, 1512, 1523, 1524, 1525, 1527, 1534, 1535, 1556, 1567, 1579, 1611, 1612, 1613, 1619, 1620, 1621, 1626, 1644, 1685, 1704, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1724, 1725, 1734, 1737, 1744, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1760, 1766, 1767, 1769, 1770, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1840, 1867, 1873, 1874, 1903, 1922, 1923, 1924, 1945, 1961, 1965, 1966, 1968, 1972, 1973, 1977, 1978, 1983, 2000, 2001, 2002, 2003, 2012, 2014, 2018, 2021, 2025, 2030, 2033, 2035, 2036, 2037, 2042, 2043, 2045, 2046, 2048, 2049, 2051, 2052, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2092, 2094, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2107, 2109, 2111, 2112, 2113, 2114, 2115, 2117, 2118], "model": [0, 1, 2, 3, 4, 5, 8, 9, 12, 24, 28, 29, 30, 32, 34, 35, 46, 48, 51, 53, 55, 56, 57, 58, 59, 60, 61, 64, 66, 84, 85, 795, 796, 797, 800, 801, 813, 814, 815, 816, 817, 818, 819, 820, 822, 823, 824, 827, 828, 829, 839, 840, 841, 842, 844, 858, 861, 862, 863, 864, 865, 866, 867, 919, 932, 976, 978, 1055, 1076, 1166, 1167, 1168, 1176, 1178, 1273, 1277, 1278, 1283, 1285, 1289, 1291, 1345, 1431, 1463, 1480, 1491, 1492, 1527, 1533, 1556, 1567, 1571, 1573, 1574, 1575, 1632, 1717, 1718, 1724, 1725, 1730, 1732, 1735, 1748, 1767, 1768, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 1811, 1924, 1977, 2000, 2002, 2013, 2014, 2016, 2017, 2018, 2025, 2027, 2028, 2036, 2043, 2045, 2046, 2048, 2049, 2052, 2053, 2055, 2057, 2058, 2059, 2060, 2061, 2062, 2064, 2067, 2071, 2073, 2077, 2078, 2080, 2082, 2083, 2087, 2092, 2093, 2094, 2097, 2098, 2099, 2101, 2103, 2104, 2105, 2109, 2111, 2113], "": [0, 1, 2, 3, 4, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 23, 24, 28, 29, 30, 32, 34, 35, 36, 37, 40, 43, 44, 45, 47, 48, 49, 52, 53, 55, 57, 59, 60, 63, 64, 82, 83, 88, 89, 90, 465, 483, 495, 498, 515, 546, 560, 583, 627, 683, 692, 696, 697, 698, 699, 702, 738, 763, 796, 797, 798, 804, 818, 819, 820, 823, 826, 829, 858, 865, 866, 879, 881, 882, 883, 884, 903, 904, 905, 906, 909, 913, 914, 916, 924, 943, 960, 962, 975, 984, 990, 991, 992, 993, 998, 1007, 1009, 1011, 1015, 1030, 1034, 1043, 1045, 1050, 1051, 1054, 1055, 1056, 1076, 1077, 1078, 1084, 1101, 1104, 1106, 1107, 1109, 1123, 1124, 1126, 1127, 1128, 1131, 1132, 1134, 1135, 1138, 1139, 1141, 1142, 1143, 1144, 1146, 1148, 1149, 1150, 1151, 1152, 1155, 1156, 1157, 1163, 1164, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1176, 1178, 1181, 1186, 1188, 1217, 1227, 1235, 1236, 1237, 1260, 1262, 1270, 1271, 1273, 1277, 1284, 1285, 1289, 1304, 1305, 1315, 1316, 1317, 1319, 1322, 1326, 1328, 1330, 1331, 1335, 1336, 1337, 1338, 1343, 1345, 1346, 1361, 1368, 1373, 1375, 1378, 1380, 1381, 1386, 1389, 1390, 1393, 1400, 1404, 1408, 1410, 1417, 1418, 1421, 1423, 1429, 1430, 1431, 1439, 1440, 1441, 1442, 1443, 1446, 1463, 1469, 1480, 1527, 1528, 1533, 1537, 1544, 1556, 1559, 1561, 1567, 1571, 1573, 1575, 1581, 1582, 1605, 1606, 1617, 1630, 1633, 1644, 1645, 1650, 1685, 1691, 1704, 1707, 1717, 1718, 1724, 1725, 1737, 1748, 1761, 1765, 1772, 1773, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1803, 1811, 1815, 1817, 1821, 1828, 1843, 1846, 1847, 1850, 1851, 1852, 1854, 1856, 1863, 1870, 1871, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1922, 1923, 1924, 1927, 1928, 1929, 1930, 1931, 1940, 1944, 1946, 1952, 1968, 1972, 1973, 1974, 1977, 1978, 1982, 1984, 1987, 1996, 1999, 2000, 2001, 2002, 2003, 2008, 2012, 2014, 2015, 2016, 2017, 2018, 2021, 2023, 2024, 2028, 2030, 2031, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2059, 2060, 2062, 2063, 2064, 2065, 2067, 2068, 2069, 2071, 2072, 2074, 2075, 2077, 2078, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2093, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2107, 2108, 2109, 2111, 2112, 2113, 2114, 2117], "wrap": [0, 1, 15, 19, 23, 24, 28, 32, 33, 38, 40, 50, 55, 59, 60, 62, 64, 66, 262, 793, 794, 1012, 1084, 1167, 1188, 1273, 1289, 1410, 1463, 1527, 1567, 1711, 1717, 1735, 1761, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2008, 2014, 2015, 2017, 2021, 2024, 2042, 2043, 2046, 2048, 2049, 2050, 2057, 2063, 2067, 2071, 2072, 2077, 2078, 2091, 2093, 2099, 2101, 2102, 2104, 2113, 2114], "forward": [0, 5, 7, 8, 12, 14, 24, 28, 29, 30, 32, 33, 35, 52, 53, 55, 56, 57, 59, 61, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 152, 223, 224, 683, 738, 751, 760, 763, 803, 818, 819, 820, 823, 827, 865, 866, 893, 895, 896, 897, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 912, 914, 915, 918, 919, 920, 921, 923, 983, 984, 990, 1054, 1101, 1113, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1167, 1169, 1170, 1171, 1173, 1176, 1273, 1276, 1277, 1278, 1280, 1284, 1285, 1289, 1290, 1291, 1435, 1439, 1441, 1442, 1443, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1478, 1497, 1512, 1523, 1524, 1525, 1527, 1528, 1529, 1533, 1534, 1537, 1538, 1542, 1543, 1544, 1556, 1571, 1572, 1573, 1574, 1575, 1619, 1620, 1621, 1626, 1672, 1685, 1707, 1710, 1711, 1716, 1717, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1766, 1769, 1770, 1834, 1901, 1965, 2013, 2014, 2016, 2017, 2021, 2023, 2027, 2029, 2033, 2035, 2041, 2042, 2043, 2045, 2046, 2048, 2050, 2051, 2052, 2054, 2056, 2057, 2060, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2077, 2091, 2092, 2093, 2094, 2095, 2098, 2099, 2101, 2102, 2104, 2107, 2108, 2109, 2111, 2112, 2113], "pass": [0, 1, 3, 5, 6, 7, 14, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 38, 39, 40, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 57, 59, 60, 63, 64, 66, 74, 75, 81, 88, 152, 256, 417, 450, 490, 515, 517, 519, 546, 562, 683, 796, 797, 826, 829, 863, 865, 884, 893, 894, 895, 896, 897, 904, 907, 909, 910, 918, 960, 976, 978, 983, 984, 985, 1009, 1043, 1054, 1069, 1099, 1130, 1131, 1132, 1140, 1141, 1142, 1148, 1161, 1163, 1166, 1167, 1171, 1172, 1173, 1176, 1177, 1186, 1188, 1198, 1205, 1206, 1210, 1236, 1271, 1273, 1274, 1277, 1279, 1283, 1285, 1289, 1290, 1318, 1334, 1336, 1345, 1393, 1431, 1432, 1433, 1434, 1441, 1442, 1443, 1463, 1469, 1470, 1474, 1475, 1527, 1532, 1533, 1542, 1556, 1572, 1573, 1574, 1575, 1580, 1598, 1628, 1629, 1633, 1644, 1645, 1672, 1685, 1707, 1710, 1711, 1716, 1717, 1735, 1737, 1739, 1744, 1758, 1761, 1765, 1767, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1859, 1863, 1867, 1928, 2012, 2014, 2018, 2021, 2025, 2027, 2029, 2030, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2045, 2048, 2049, 2050, 2052, 2054, 2056, 2057, 2060, 2062, 2065, 2067, 2069, 2070, 2071, 2077, 2079, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2093, 2094, 2099, 2100, 2101, 2102, 2103, 2104, 2110, 2111, 2113, 2114], "e": [0, 1, 2, 3, 5, 7, 11, 12, 14, 15, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 45, 46, 47, 48, 49, 51, 52, 53, 55, 58, 60, 62, 63, 64, 65, 66, 74, 75, 152, 260, 337, 379, 488, 582, 619, 738, 751, 760, 763, 796, 797, 818, 820, 844, 858, 862, 863, 865, 866, 884, 893, 897, 904, 906, 909, 910, 919, 923, 924, 943, 955, 957, 960, 978, 990, 1040, 1054, 1080, 1109, 1119, 1160, 1167, 1172, 1176, 1181, 1188, 1194, 1196, 1201, 1204, 1222, 1224, 1252, 1253, 1255, 1258, 1270, 1273, 1280, 1281, 1289, 1309, 1310, 1325, 1337, 1345, 1346, 1347, 1349, 1351, 1368, 1379, 1387, 1394, 1439, 1440, 1441, 1442, 1443, 1446, 1454, 1455, 1456, 1462, 1463, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1478, 1481, 1486, 1489, 1490, 1491, 1492, 1493, 1497, 1499, 1500, 1501, 1502, 1518, 1527, 1528, 1533, 1534, 1536, 1537, 1541, 1543, 1559, 1567, 1571, 1575, 1576, 1577, 1579, 1608, 1609, 1610, 1617, 1624, 1625, 1633, 1644, 1677, 1685, 1704, 1706, 1707, 1717, 1718, 1719, 1720, 1724, 1725, 1732, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1758, 1759, 1765, 1769, 1772, 1784, 1785, 1797, 1820, 1831, 1849, 1856, 1863, 1867, 1871, 1873, 1878, 1913, 1924, 1928, 1994, 2005, 2012, 2014, 2016, 2017, 2018, 2021, 2024, 2025, 2027, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2049, 2050, 2051, 2052, 2055, 2056, 2057, 2060, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2082, 2083, 2085, 2086, 2087, 2089, 2098, 2100, 2101, 2103, 2104, 2111, 2113, 2114, 2118], "network": [0, 1, 7, 8, 15, 32, 35, 47, 845, 1054, 1273, 1289, 1290, 1435, 1441, 1442, 1443, 1446, 1457, 1458, 1459, 1463, 1464, 1465, 1466, 1467, 1468, 1471, 1480, 1492, 1497, 1527, 1534, 1539, 1540, 1546, 1555, 1557, 1567, 1571, 1573, 1575, 1598, 1632, 1633, 1688, 1707, 1732, 1735, 1761, 1766, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 2014, 2016, 2017, 2041, 2042, 2049, 2060, 2061, 2064, 2067, 2069, 2072, 2077, 2078, 2079, 2100], "includ": [0, 1, 2, 3, 4, 5, 7, 9, 14, 15, 23, 24, 28, 30, 33, 44, 47, 48, 52, 53, 55, 64, 83, 85, 323, 490, 519, 585, 771, 772, 863, 865, 978, 1065, 1137, 1139, 1235, 1271, 1273, 1274, 1303, 1382, 1384, 1436, 1437, 1438, 1444, 1446, 1463, 1472, 1480, 1514, 1523, 1524, 1525, 1527, 1533, 1571, 1572, 1573, 1574, 1575, 1578, 1588, 1600, 1601, 1602, 1617, 1630, 1650, 1717, 1724, 1725, 1759, 1760, 1762, 1891, 1953, 1954, 1955, 1956, 2012, 2014, 2016, 2017, 2021, 2024, 2027, 2030, 2033, 2043, 2045, 2046, 2051, 2052, 2053, 2056, 2057, 2062, 2064, 2065, 2067, 2069, 2071, 2072, 2077, 2079, 2087, 2091, 2094, 2095, 2097, 2099, 2102, 2103, 2104, 2105, 2109, 2110, 2111, 2113, 2114, 2115], "loss": [0, 1, 24, 28, 29, 32, 33, 34, 35, 55, 1168, 1270, 1309, 1310, 1319, 1337, 1431, 1439, 1440, 1446, 1460, 1462, 1469, 1480, 1486, 1487, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1577, 1605, 1606, 1616, 1617, 1630, 1642, 1645, 1669, 1677, 1689, 1701, 1702, 1717, 1759, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 1802, 1871, 2012, 2013, 2030, 2035, 2043, 2046, 2048, 2051, 2053, 2054, 2057, 2069, 2072, 2073, 2077, 2078, 2087], "comput": [0, 3, 5, 7, 8, 11, 14, 17, 23, 24, 28, 30, 33, 34, 35, 37, 40, 52, 53, 55, 56, 57, 59, 152, 292, 488, 489, 497, 683, 684, 686, 700, 701, 763, 771, 772, 803, 805, 822, 823, 824, 825, 827, 828, 893, 897, 899, 900, 902, 904, 905, 906, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 928, 929, 948, 949, 950, 951, 952, 953, 964, 966, 967, 968, 969, 975, 992, 994, 1008, 1051, 1054, 1086, 1092, 1101, 1103, 1106, 1109, 1113, 1114, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1152, 1154, 1155, 1156, 1158, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1188, 1215, 1216, 1217, 1227, 1230, 1233, 1234, 1235, 1236, 1250, 1273, 1276, 1285, 1293, 1294, 1296, 1298, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1340, 1342, 1343, 1344, 1346, 1354, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1363, 1372, 1373, 1374, 1377, 1418, 1420, 1424, 1431, 1435, 1436, 1437, 1438, 1441, 1442, 1443, 1457, 1458, 1459, 1461, 1462, 1464, 1469, 1470, 1478, 1481, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1497, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1517, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1533, 1534, 1536, 1541, 1542, 1543, 1561, 1563, 1567, 1576, 1577, 1580, 1600, 1601, 1602, 1615, 1616, 1624, 1625, 1632, 1633, 1635, 1642, 1644, 1645, 1652, 1658, 1659, 1660, 1661, 1662, 1663, 1669, 1674, 1677, 1685, 1689, 1691, 1692, 1701, 1702, 1707, 1710, 1716, 1717, 1718, 1722, 1724, 1725, 1726, 1728, 1731, 1733, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1751, 1752, 1759, 1765, 1769, 1770, 1772, 1782, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1821, 1827, 1828, 1832, 1833, 1847, 1848, 1871, 1878, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1901, 1904, 1905, 1906, 1907, 1924, 1928, 1929, 1945, 1950, 1974, 1977, 2013, 2014, 2017, 2021, 2024, 2025, 2030, 2034, 2035, 2037, 2042, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2057, 2058, 2064, 2065, 2067, 2069, 2072, 2075, 2077, 2082, 2083, 2088, 2092, 2095, 2097, 2098, 2100, 2101, 2102, 2103, 2106, 2112, 2115, 2116, 2117], "backward": [0, 1, 5, 9, 28, 29, 30, 32, 33, 34, 35, 48, 52, 55, 59, 60, 64, 66, 292, 337, 489, 490, 498, 505, 506, 515, 517, 519, 583, 683, 689, 692, 763, 818, 819, 820, 894, 902, 903, 904, 905, 906, 907, 908, 909, 910, 913, 915, 918, 921, 923, 928, 929, 939, 942, 944, 956, 981, 1054, 1113, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1166, 1167, 1186, 1273, 1346, 1354, 1368, 1378, 1439, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1469, 1478, 1479, 1480, 1493, 1497, 1498, 1514, 1518, 1519, 1527, 1533, 1534, 1541, 1543, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1605, 1606, 1616, 1617, 1633, 1644, 1669, 1672, 1708, 1712, 1713, 1717, 1737, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1802, 1815, 1827, 1901, 1905, 1909, 1928, 1965, 2012, 2013, 2015, 2017, 2021, 2023, 2029, 2035, 2036, 2041, 2042, 2048, 2049, 2050, 2051, 2052, 2053, 2056, 2057, 2059, 2060, 2065, 2069, 2070, 2071, 2072, 2077, 2082, 2083, 2088, 2094, 2095, 2098, 2099, 2104, 2107, 2109, 2111, 2112, 2113], "under": [0, 1, 3, 4, 5, 9, 23, 28, 33, 34, 47, 52, 55, 56, 58, 60, 63, 83, 919, 923, 935, 961, 1187, 1273, 1446, 1527, 1598, 1624, 1625, 1731, 1734, 1737, 1779, 1848, 2021, 2026, 2033, 2042, 2043, 2046, 2048, 2050, 2052, 2054, 2059, 2065, 2070, 2072, 2075, 2076, 2079, 2083, 2087, 2091, 2093, 2100, 2102, 2103, 2108, 2111, 2113], "recommend": [0, 1, 5, 19, 23, 24, 28, 30, 34, 35, 37, 47, 48, 55, 59, 63, 64, 66, 86, 87, 89, 450, 897, 986, 991, 992, 1130, 1131, 1132, 1140, 1141, 1142, 1260, 1276, 1319, 1346, 1393, 1431, 1463, 1645, 1717, 1834, 1961, 1966, 2012, 2014, 2021, 2028, 2033, 2035, 2041, 2042, 2043, 2045, 2046, 2049, 2050, 2057, 2059, 2060, 2062, 2065, 2067, 2070, 2072, 2088, 2105], "correspond": [0, 1, 7, 18, 20, 23, 24, 28, 30, 32, 34, 35, 47, 52, 55, 62, 64, 84, 417, 475, 476, 515, 517, 519, 546, 547, 683, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 738, 796, 800, 839, 841, 861, 862, 863, 868, 884, 893, 895, 896, 897, 904, 906, 909, 910, 912, 914, 923, 943, 993, 1054, 1075, 1104, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1184, 1188, 1227, 1236, 1262, 1273, 1290, 1304, 1309, 1310, 1315, 1318, 1337, 1344, 1346, 1360, 1375, 1413, 1440, 1463, 1469, 1497, 1527, 1533, 1550, 1574, 1624, 1633, 1671, 1717, 1732, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1766, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 1820, 1821, 1831, 1854, 1863, 1868, 1878, 1913, 1922, 1923, 1928, 1960, 1963, 1964, 1972, 1973, 2012, 2014, 2015, 2017, 2021, 2030, 2034, 2035, 2042, 2043, 2046, 2048, 2049, 2050, 2052, 2054, 2056, 2057, 2061, 2065, 2067, 2070, 2071, 2072, 2077, 2078, 2082, 2083, 2084, 2087, 2089, 2092, 2093, 2101, 2105, 2110, 2111, 2113], "creat": [0, 1, 3, 5, 6, 7, 9, 14, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 39, 45, 47, 48, 50, 51, 52, 53, 55, 63, 64, 66, 86, 87, 89, 90, 152, 224, 256, 337, 488, 582, 583, 585, 586, 587, 589, 590, 683, 735, 741, 742, 743, 748, 749, 759, 767, 795, 796, 797, 817, 818, 819, 820, 822, 827, 858, 882, 883, 884, 897, 902, 918, 919, 920, 921, 943, 955, 957, 968, 969, 973, 974, 976, 978, 993, 1019, 1051, 1052, 1061, 1097, 1112, 1161, 1162, 1163, 1164, 1188, 1189, 1190, 1191, 1194, 1270, 1273, 1276, 1286, 1292, 1297, 1303, 1304, 1310, 1312, 1331, 1344, 1360, 1375, 1413, 1439, 1460, 1469, 1470, 1473, 1487, 1493, 1518, 1519, 1527, 1530, 1531, 1532, 1556, 1559, 1560, 1567, 1576, 1577, 1579, 1587, 1589, 1590, 1717, 1737, 1758, 1765, 1770, 1778, 1779, 1798, 1866, 1867, 1896, 1908, 1913, 1943, 1950, 2013, 2017, 2018, 2027, 2033, 2042, 2043, 2046, 2048, 2049, 2050, 2052, 2054, 2057, 2058, 2059, 2062, 2067, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2082, 2084, 2086, 2087, 2088, 2089, 2091, 2092, 2093, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2112, 2113, 2115, 2118], "optim": [0, 1, 2, 7, 8, 14, 15, 24, 28, 29, 30, 35, 53, 55, 64, 822, 923, 932, 936, 976, 979, 1109, 1176, 1188, 1273, 1277, 1283, 1285, 1289, 1290, 1329, 1346, 1441, 1442, 1443, 1462, 1469, 1489, 1490, 1491, 1527, 1530, 1531, 1532, 1533, 1560, 1567, 1575, 1685, 1707, 1717, 1724, 1725, 1731, 1737, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1817, 2013, 2014, 2016, 2027, 2035, 2041, 2045, 2048, 2049, 2051, 2053, 2054, 2057, 2059, 2067, 2071, 2072, 2082, 2094, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2104, 2106, 2113, 2116, 2120], "default": [0, 2, 3, 5, 9, 11, 12, 14, 18, 19, 20, 27, 28, 29, 30, 32, 33, 34, 35, 37, 39, 44, 45, 46, 47, 48, 51, 52, 53, 55, 58, 59, 60, 61, 62, 64, 66, 70, 71, 74, 75, 76, 77, 79, 83, 86, 87, 89, 152, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 269, 292, 299, 327, 333, 395, 417, 447, 448, 449, 450, 451, 460, 498, 501, 502, 522, 527, 582, 583, 585, 625, 683, 697, 700, 715, 716, 717, 718, 719, 720, 723, 733, 734, 735, 736, 738, 758, 761, 763, 771, 772, 773, 775, 776, 777, 782, 788, 796, 801, 806, 811, 812, 817, 820, 822, 823, 824, 825, 828, 831, 832, 833, 834, 835, 836, 837, 838, 845, 846, 847, 848, 849, 850, 852, 853, 858, 859, 860, 862, 869, 883, 884, 897, 899, 900, 901, 904, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 943, 945, 954, 960, 964, 967, 968, 969, 973, 976, 978, 998, 1008, 1011, 1014, 1016, 1020, 1023, 1024, 1025, 1028, 1029, 1036, 1037, 1040, 1051, 1053, 1054, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1079, 1080, 1083, 1085, 1086, 1087, 1090, 1091, 1092, 1097, 1098, 1099, 1100, 1101, 1104, 1109, 1110, 1111, 1112, 1122, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1161, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1177, 1178, 1181, 1182, 1217, 1219, 1220, 1226, 1227, 1231, 1232, 1235, 1236, 1249, 1262, 1264, 1270, 1273, 1274, 1277, 1285, 1289, 1290, 1293, 1294, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1360, 1363, 1365, 1371, 1373, 1374, 1375, 1379, 1386, 1387, 1394, 1402, 1403, 1417, 1418, 1420, 1421, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1555, 1559, 1560, 1564, 1565, 1567, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1579, 1580, 1595, 1596, 1597, 1598, 1600, 1601, 1602, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1630, 1632, 1633, 1635, 1637, 1644, 1645, 1652, 1658, 1659, 1660, 1669, 1670, 1672, 1677, 1685, 1691, 1692, 1700, 1704, 1710, 1716, 1717, 1718, 1719, 1720, 1722, 1723, 1726, 1727, 1729, 1731, 1732, 1733, 1734, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1759, 1760, 1762, 1764, 1765, 1766, 1767, 1769, 1771, 1772, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1817, 1825, 1827, 1828, 1832, 1833, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1846, 1849, 1855, 1856, 1859, 1863, 1867, 1868, 1869, 1870, 1871, 1875, 1877, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1921, 1922, 1923, 1924, 1927, 1928, 1929, 1943, 1944, 1950, 1952, 1954, 1956, 1961, 1962, 1965, 1966, 1971, 1972, 1973, 1977, 1982, 1983, 1986, 1991, 1992, 1994, 2005, 2009, 2010, 2011, 2012, 2013, 2014, 2017, 2021, 2023, 2026, 2027, 2028, 2033, 2036, 2041, 2042, 2045, 2048, 2049, 2050, 2051, 2052, 2053, 2055, 2057, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2076, 2077, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2094, 2099, 2100, 2103, 2104, 2107, 2109, 2110, 2112, 2113, 2115, 2118], "net": [0, 6, 15, 37, 58, 64, 1273, 1289, 1290, 1463, 1527, 1717, 1748, 1802, 2016, 2017, 2042, 2043, 2046, 2057, 2087], "sgd": [0, 23, 24, 32, 490, 932, 1469, 1707, 1717, 1803, 1809, 1811, 2042, 2046, 2048, 2053, 2057, 2059, 2069, 2077, 2078], "target": [0, 14, 32, 33, 34, 52, 53, 55, 59, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 82, 501, 738, 769, 770, 795, 800, 863, 865, 866, 961, 1168, 1186, 1273, 1283, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1439, 1440, 1446, 1460, 1462, 1474, 1475, 1480, 1486, 1487, 1492, 1493, 1518, 1519, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1541, 1559, 1560, 1571, 1580, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1605, 1606, 1614, 1616, 1617, 1628, 1629, 1630, 1641, 1642, 1645, 1646, 1657, 1665, 1666, 1667, 1668, 1669, 1677, 1685, 1689, 1690, 1717, 1797, 1965, 2015, 2017, 2042, 2043, 2046, 2053, 2059, 2065, 2067, 2069, 2070, 2072, 2073, 2077, 2087, 2092, 2095, 2099, 2100, 2102, 2112, 2113], "data": [0, 1, 2, 3, 7, 11, 12, 18, 21, 24, 28, 30, 32, 33, 35, 36, 37, 41, 44, 47, 50, 53, 55, 59, 62, 64, 66, 67, 71, 75, 141, 152, 197, 198, 328, 331, 335, 338, 343, 447, 450, 483, 495, 499, 501, 619, 741, 742, 743, 763, 775, 776, 777, 784, 795, 796, 797, 801, 822, 823, 824, 825, 826, 828, 830, 869, 883, 884, 894, 897, 909, 930, 945, 954, 960, 1012, 1090, 1091, 1110, 1111, 1112, 1122, 1123, 1124, 1127, 1129, 1130, 1132, 1140, 1141, 1142, 1145, 1148, 1149, 1150, 1151, 1160, 1161, 1163, 1164, 1165, 1167, 1176, 1186, 1194, 1198, 1202, 1204, 1231, 1232, 1234, 1252, 1255, 1281, 1285, 1289, 1293, 1344, 1345, 1346, 1360, 1365, 1373, 1375, 1389, 1390, 1418, 1420, 1421, 1444, 1446, 1454, 1455, 1456, 1463, 1478, 1481, 1489, 1490, 1491, 1497, 1499, 1514, 1534, 1543, 1544, 1580, 1598, 1603, 1604, 1608, 1609, 1610, 1643, 1650, 1652, 1691, 1692, 1717, 1718, 1719, 1720, 1724, 1725, 1758, 1759, 1760, 1761, 1763, 1772, 1776, 1777, 1778, 1779, 1803, 1809, 1817, 1825, 1828, 1829, 1830, 1834, 1836, 1837, 1839, 1840, 1841, 1842, 1843, 1850, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1904, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1927, 1928, 1943, 1952, 1954, 1956, 1964, 1965, 1966, 2010, 2011, 2013, 2014, 2015, 2018, 2021, 2024, 2025, 2027, 2030, 2033, 2036, 2042, 2043, 2044, 2046, 2049, 2050, 2053, 2056, 2059, 2061, 2062, 2063, 2064, 2065, 2068, 2069, 2070, 2072, 2075, 2077, 2079, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2091, 2092, 2093, 2097, 2100, 2101, 2103, 2104, 2106, 2111, 2115], "zero_grad": [0, 1, 32, 1273, 1527, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1802, 2042, 2046, 2051, 2053, 2057, 2059, 2069], "output": [0, 1, 4, 5, 7, 12, 18, 23, 24, 28, 30, 32, 33, 34, 35, 37, 44, 45, 52, 59, 60, 61, 64, 66, 75, 82, 141, 315, 323, 447, 448, 449, 451, 515, 519, 568, 619, 683, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 698, 699, 700, 701, 702, 723, 731, 732, 737, 738, 741, 742, 743, 744, 745, 746, 747, 748, 749, 752, 753, 754, 755, 756, 757, 758, 759, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 775, 776, 777, 778, 780, 782, 783, 784, 785, 786, 788, 789, 790, 795, 796, 797, 799, 802, 805, 817, 820, 862, 865, 866, 867, 869, 878, 879, 882, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 904, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 944, 946, 947, 948, 949, 950, 951, 952, 953, 956, 960, 963, 964, 965, 966, 967, 968, 969, 971, 974, 978, 990, 992, 993, 995, 996, 1008, 1021, 1023, 1024, 1025, 1051, 1052, 1054, 1064, 1065, 1088, 1089, 1090, 1091, 1093, 1096, 1097, 1099, 1101, 1104, 1106, 1108, 1109, 1110, 1111, 1112, 1114, 1119, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1152, 1153, 1154, 1155, 1156, 1157, 1159, 1164, 1165, 1167, 1168, 1169, 1171, 1172, 1173, 1174, 1176, 1177, 1178, 1186, 1188, 1197, 1214, 1215, 1216, 1217, 1227, 1230, 1233, 1234, 1235, 1236, 1238, 1239, 1240, 1248, 1250, 1267, 1268, 1270, 1273, 1277, 1285, 1289, 1290, 1294, 1295, 1296, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1363, 1364, 1365, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1396, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1423, 1424, 1425, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1600, 1601, 1602, 1604, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1624, 1625, 1627, 1628, 1629, 1630, 1633, 1635, 1644, 1645, 1650, 1652, 1658, 1659, 1660, 1669, 1670, 1674, 1675, 1676, 1677, 1685, 1700, 1704, 1705, 1706, 1710, 1716, 1717, 1732, 1733, 1734, 1735, 1738, 1759, 1761, 1762, 1766, 1769, 1771, 1772, 1773, 1776, 1777, 1779, 1780, 1798, 1802, 1815, 1816, 1824, 1825, 1828, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1846, 1847, 1848, 1849, 1851, 1852, 1856, 1858, 1863, 1871, 1875, 1878, 1880, 1892, 1893, 1895, 1900, 1905, 1906, 1908, 1909, 1917, 1918, 1920, 1921, 1922, 1923, 1924, 1925, 1927, 1928, 1940, 1941, 1942, 1947, 1950, 1952, 1953, 1955, 1958, 1960, 1961, 1962, 1963, 1965, 1971, 1972, 1973, 1974, 1977, 1979, 1980, 2010, 2011, 2012, 2014, 2015, 2017, 2021, 2023, 2025, 2027, 2034, 2035, 2036, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2057, 2060, 2061, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2078, 2082, 2083, 2085, 2086, 2087, 2089, 2092, 2093, 2095, 2098, 2099, 2100, 2102, 2103, 2104, 2106, 2107, 2112, 2113], "loss_fn": [0, 32, 33, 1534, 1797, 2042, 2046, 2048, 2059, 2069], "exit": [0, 1, 2, 4, 18, 28, 37, 47, 55, 63, 64, 899, 901, 1591, 1717, 2017, 2033, 2043, 2049, 2059, 2067, 2079, 2084, 2117], "befor": [0, 1, 3, 6, 7, 14, 18, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 39, 44, 46, 47, 48, 50, 52, 53, 55, 64, 99, 152, 417, 488, 750, 763, 791, 792, 793, 897, 904, 905, 907, 909, 932, 974, 986, 1011, 1090, 1091, 1092, 1101, 1109, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1150, 1151, 1154, 1163, 1166, 1186, 1273, 1285, 1287, 1310, 1326, 1330, 1343, 1373, 1386, 1418, 1421, 1463, 1469, 1470, 1473, 1478, 1527, 1528, 1529, 1567, 1571, 1579, 1633, 1652, 1691, 1692, 1707, 1710, 1711, 1712, 1713, 1717, 1724, 1725, 1732, 1766, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1811, 1825, 1828, 1854, 1863, 1873, 1874, 1904, 1907, 1910, 1911, 1912, 1914, 1915, 1924, 1927, 1950, 1961, 1982, 2014, 2016, 2017, 2021, 2025, 2033, 2042, 2043, 2046, 2048, 2049, 2052, 2053, 2054, 2056, 2057, 2060, 2062, 2063, 2064, 2065, 2069, 2070, 2072, 2077, 2078, 2079, 2082, 2083, 2087, 2089, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2109, 2117], "step": [0, 1, 2, 4, 9, 11, 14, 19, 23, 24, 28, 29, 30, 32, 35, 48, 52, 55, 59, 64, 81, 540, 609, 689, 771, 772, 861, 869, 899, 902, 903, 932, 947, 971, 1233, 1321, 1344, 1345, 1346, 1360, 1363, 1375, 1474, 1475, 1497, 1601, 1602, 1628, 1629, 1717, 1758, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1834, 1843, 1871, 1896, 1968, 2013, 2014, 2015, 2021, 2035, 2042, 2043, 2046, 2048, 2049, 2051, 2053, 2057, 2059, 2061, 2063, 2065, 2067, 2070, 2071, 2072, 2077, 2078, 2082, 2087, 2095, 2104, 2107, 2108, 2111, 2113], "usag": [0, 1, 4, 7, 9, 12, 15, 23, 28, 30, 32, 33, 34, 35, 36, 37, 41, 44, 45, 47, 50, 55, 64, 66, 67, 751, 760, 795, 796, 797, 813, 814, 815, 816, 818, 819, 820, 858, 862, 894, 909, 976, 978, 1057, 1078, 1082, 1109, 1160, 1166, 1170, 1171, 1172, 1200, 1287, 1346, 1409, 1717, 2007, 2013, 2014, 2017, 2021, 2029, 2030, 2035, 2042, 2043, 2049, 2051, 2052, 2053, 2065, 2067, 2071, 2079, 2087, 2088, 2091, 2092, 2093, 2100, 2103, 2104, 2105, 2113], "along": [0, 14, 18, 23, 28, 35, 40, 47, 52, 66, 72, 315, 317, 319, 323, 495, 515, 517, 519, 689, 700, 822, 879, 880, 963, 970, 997, 1008, 1023, 1025, 1092, 1100, 1101, 1108, 1109, 1125, 1130, 1133, 1137, 1140, 1143, 1149, 1178, 1214, 1239, 1248, 1250, 1295, 1306, 1342, 1396, 1418, 1422, 1423, 1432, 1433, 1434, 1440, 1461, 1474, 1475, 1517, 1520, 1521, 1522, 1561, 1563, 1598, 1606, 1615, 1628, 1629, 1632, 1635, 1652, 1658, 1659, 1660, 1670, 1691, 1692, 1743, 1745, 1752, 1753, 1762, 1771, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1809, 1828, 1848, 1849, 1854, 1865, 1900, 1904, 1907, 1908, 1916, 1921, 1940, 1944, 1947, 1950, 1959, 1974, 1977, 1979, 2012, 2016, 2021, 2036, 2044, 2049, 2051, 2056, 2057, 2065, 2070, 2072, 2083], "more": [0, 1, 2, 3, 4, 5, 8, 9, 11, 12, 14, 15, 17, 19, 20, 22, 23, 24, 25, 28, 29, 30, 33, 34, 35, 36, 37, 39, 43, 46, 47, 48, 50, 53, 55, 58, 59, 60, 61, 62, 64, 83, 85, 87, 88, 152, 256, 257, 315, 323, 488, 489, 490, 495, 497, 500, 515, 517, 519, 526, 547, 620, 683, 738, 774, 783, 787, 796, 858, 863, 864, 865, 877, 890, 891, 892, 894, 896, 897, 898, 904, 907, 909, 910, 912, 914, 918, 919, 920, 923, 924, 928, 929, 930, 931, 932, 947, 955, 958, 960, 966, 967, 968, 969, 976, 981, 990, 1011, 1012, 1014, 1016, 1017, 1033, 1052, 1057, 1059, 1060, 1061, 1063, 1064, 1065, 1066, 1073, 1074, 1075, 1079, 1087, 1092, 1098, 1107, 1109, 1113, 1125, 1126, 1128, 1130, 1149, 1150, 1151, 1152, 1163, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1182, 1200, 1201, 1202, 1221, 1223, 1227, 1236, 1237, 1254, 1258, 1260, 1261, 1273, 1287, 1290, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1341, 1343, 1346, 1349, 1352, 1354, 1364, 1365, 1389, 1390, 1393, 1413, 1419, 1431, 1435, 1440, 1445, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1469, 1470, 1471, 1487, 1497, 1523, 1524, 1525, 1527, 1533, 1539, 1540, 1541, 1543, 1555, 1567, 1575, 1586, 1598, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1622, 1624, 1625, 1633, 1636, 1637, 1638, 1639, 1644, 1648, 1652, 1653, 1664, 1671, 1678, 1679, 1680, 1683, 1685, 1686, 1687, 1688, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1703, 1704, 1705, 1706, 1707, 1712, 1713, 1717, 1718, 1724, 1725, 1731, 1734, 1735, 1770, 1771, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1815, 1826, 1827, 1853, 1859, 1863, 1871, 1903, 1904, 1928, 1950, 1952, 1961, 1965, 1968, 1977, 1978, 2012, 2013, 2014, 2016, 2017, 2021, 2023, 2024, 2026, 2028, 2029, 2030, 2031, 2033, 2034, 2035, 2036, 2037, 2041, 2043, 2045, 2046, 2048, 2049, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2060, 2062, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2078, 2079, 2081, 2082, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2095, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2108, 2111, 2113, 2114, 2115, 2116, 2117, 2118], "complex": [0, 1, 3, 7, 8, 23, 24, 27, 28, 33, 34, 37, 64, 313, 331, 485, 501, 688, 695, 923, 924, 968, 969, 991, 992, 1104, 1110, 1111, 1112, 1130, 1152, 1157, 1186, 1227, 1244, 1252, 1263, 1265, 1266, 1269, 1270, 1273, 1294, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1316, 1323, 1326, 1328, 1330, 1331, 1332, 1333, 1337, 1338, 1342, 1343, 1344, 1346, 1354, 1360, 1372, 1373, 1377, 1412, 1454, 1455, 1456, 1493, 1527, 1608, 1609, 1610, 1731, 1772, 1783, 1821, 1834, 1840, 1841, 1847, 1868, 1878, 1924, 1925, 1928, 1965, 1974, 1975, 1976, 2012, 2013, 2015, 2017, 2018, 2028, 2059, 2065, 2068, 2084, 2085, 2088, 2089, 2101, 2104, 2105, 2106], "scenario": [0, 23, 28, 33, 47, 1440, 1778, 1966, 2046, 2054, 2059, 2065, 2067, 2071, 2077, 2104, 2105], "g": [0, 1, 2, 3, 5, 7, 11, 12, 14, 15, 23, 24, 28, 30, 32, 33, 35, 36, 37, 39, 40, 41, 45, 46, 47, 48, 49, 51, 52, 55, 58, 60, 62, 63, 64, 65, 66, 74, 75, 488, 582, 619, 751, 760, 763, 796, 797, 818, 820, 844, 858, 863, 865, 866, 893, 904, 906, 909, 910, 919, 923, 924, 957, 978, 990, 1054, 1109, 1160, 1167, 1171, 1172, 1194, 1196, 1201, 1204, 1222, 1224, 1227, 1270, 1273, 1280, 1281, 1289, 1345, 1446, 1447, 1463, 1465, 1466, 1467, 1471, 1473, 1478, 1486, 1492, 1497, 1498, 1527, 1528, 1537, 1543, 1559, 1617, 1633, 1717, 1718, 1719, 1720, 1724, 1725, 1733, 1769, 1772, 1782, 1783, 1784, 1785, 1788, 1794, 1795, 1796, 1797, 1849, 1856, 1867, 1871, 1873, 1891, 1913, 1924, 2012, 2014, 2016, 2017, 2018, 2021, 2024, 2025, 2033, 2042, 2043, 2046, 2049, 2050, 2051, 2052, 2054, 2056, 2057, 2060, 2061, 2065, 2067, 2069, 2070, 2071, 2072, 2077, 2082, 2085, 2086, 2087, 2099, 2100, 2103, 2104, 2111, 2113, 2114], "penalti": [0, 784, 1065, 1781, 1782, 1783, 1784, 1786, 1788, 1794, 1795, 1797, 2073, 2109], "multipl": [0, 1, 2, 3, 5, 17, 19, 23, 24, 28, 29, 30, 33, 35, 36, 40, 47, 48, 50, 52, 55, 60, 63, 64, 81, 193, 210, 317, 515, 689, 691, 692, 698, 699, 763, 845, 878, 879, 882, 918, 930, 931, 932, 964, 966, 976, 994, 998, 1023, 1024, 1025, 1051, 1107, 1109, 1112, 1163, 1166, 1171, 1172, 1173, 1174, 1177, 1178, 1217, 1223, 1237, 1238, 1287, 1290, 1295, 1296, 1311, 1312, 1313, 1329, 1334, 1339, 1340, 1368, 1371, 1376, 1378, 1439, 1440, 1457, 1458, 1459, 1460, 1462, 1470, 1473, 1478, 1486, 1492, 1493, 1515, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1579, 1605, 1606, 1616, 1625, 1645, 1669, 1677, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1796, 1800, 1804, 1805, 1806, 1807, 1808, 1813, 1815, 1871, 1898, 1905, 1906, 1910, 1911, 1912, 1914, 1915, 1944, 1945, 1952, 1960, 1965, 1977, 1978, 2012, 2016, 2017, 2021, 2033, 2034, 2035, 2036, 2043, 2045, 2048, 2050, 2052, 2054, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2067, 2069, 2070, 2071, 2072, 2073, 2077, 2078, 2079, 2082, 2084, 2093, 2094, 2097, 2098, 2101, 2103, 2104, 2112], "custom": [0, 3, 5, 14, 15, 23, 28, 29, 30, 31, 32, 33, 34, 37, 40, 43, 44, 46, 52, 53, 55, 65, 66, 68, 82, 84, 85, 86, 87, 89, 501, 796, 800, 817, 818, 819, 820, 841, 843, 863, 864, 865, 866, 894, 904, 907, 909, 930, 976, 978, 1019, 1084, 1181, 1273, 1527, 1571, 1575, 1576, 1577, 1702, 1739, 1748, 1765, 1803, 1966, 1968, 2013, 2018, 2023, 2033, 2034, 2043, 2049, 2056, 2061, 2062, 2063, 2065, 2075, 2076, 2092, 2094, 2101, 2103, 2105, 2111], "autograd": [0, 4, 5, 7, 8, 12, 28, 32, 35, 52, 55, 56, 59, 66, 141, 337, 447, 448, 449, 450, 451, 488, 490, 498, 683, 692, 869, 883, 884, 898, 899, 904, 909, 910, 919, 920, 921, 931, 932, 939, 940, 941, 942, 943, 945, 954, 973, 990, 1054, 1110, 1111, 1112, 1122, 1127, 1145, 1163, 1164, 1165, 1166, 1167, 1176, 1178, 1231, 1232, 1273, 1293, 1313, 1344, 1360, 1368, 1378, 1480, 1527, 1533, 1575, 1635, 1650, 1717, 1776, 1777, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1874, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1902, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 1977, 2010, 2011, 2013, 2015, 2019, 2021, 2024, 2026, 2036, 2041, 2046, 2048, 2051, 2052, 2054, 2056, 2057, 2071, 2088, 2089, 2094, 2104, 2105, 2109, 2111, 2113, 2114], "function": [0, 2, 3, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 22, 23, 29, 30, 32, 33, 36, 37, 38, 40, 44, 45, 47, 48, 52, 53, 55, 58, 60, 62, 63, 66, 69, 74, 75, 78, 80, 82, 83, 85, 86, 89, 90, 121, 152, 197, 260, 262, 291, 300, 323, 488, 489, 490, 495, 498, 519, 543, 558, 683, 692, 696, 697, 701, 702, 751, 760, 761, 763, 767, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 793, 794, 795, 796, 800, 801, 803, 817, 818, 819, 820, 827, 829, 841, 845, 858, 861, 863, 865, 866, 867, 881, 882, 897, 899, 900, 901, 902, 903, 904, 909, 910, 918, 919, 920, 921, 923, 924, 928, 929, 930, 932, 936, 939, 942, 945, 954, 956, 964, 966, 970, 973, 976, 978, 979, 980, 982, 983, 984, 986, 990, 991, 992, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1009, 1012, 1014, 1016, 1019, 1036, 1037, 1040, 1045, 1046, 1051, 1052, 1054, 1055, 1056, 1057, 1059, 1064, 1065, 1073, 1074, 1076, 1077, 1078, 1082, 1092, 1097, 1099, 1100, 1107, 1109, 1113, 1125, 1126, 1128, 1148, 1152, 1155, 1156, 1157, 1160, 1163, 1166, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1186, 1188, 1197, 1212, 1217, 1218, 1227, 1231, 1232, 1233, 1235, 1237, 1245, 1246, 1247, 1260, 1270, 1271, 1272, 1273, 1274, 1276, 1278, 1279, 1284, 1285, 1286, 1287, 1289, 1291, 1293, 1294, 1295, 1297, 1302, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1321, 1322, 1323, 1325, 1328, 1329, 1330, 1331, 1332, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1345, 1346, 1349, 1351, 1360, 1363, 1364, 1368, 1371, 1374, 1376, 1378, 1379, 1380, 1409, 1415, 1418, 1419, 1435, 1439, 1440, 1445, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1460, 1464, 1468, 1471, 1476, 1477, 1478, 1479, 1480, 1482, 1483, 1484, 1485, 1486, 1492, 1494, 1495, 1496, 1497, 1498, 1513, 1516, 1517, 1519, 1526, 1527, 1532, 1533, 1535, 1543, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1561, 1563, 1564, 1565, 1566, 1568, 1569, 1571, 1573, 1575, 1576, 1577, 1579, 1581, 1583, 1584, 1585, 1587, 1589, 1590, 1591, 1708, 1712, 1713, 1717, 1724, 1725, 1731, 1732, 1737, 1748, 1758, 1759, 1760, 1762, 1765, 1766, 1767, 1769, 1770, 1772, 1773, 1777, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1799, 1802, 1803, 1805, 1808, 1809, 1810, 1815, 1816, 1817, 1821, 1827, 1834, 1838, 1843, 1856, 1865, 1866, 1867, 1869, 1870, 1876, 1878, 1886, 1890, 1891, 1896, 1899, 1901, 1902, 1903, 1904, 1905, 1907, 1913, 1920, 1924, 1929, 1930, 1931, 1940, 1944, 1946, 1950, 1960, 1961, 1962, 1965, 1966, 1970, 1974, 1975, 1976, 1977, 1978, 1991, 1992, 1994, 1997, 2000, 2001, 2002, 2003, 2004, 2007, 2011, 2012, 2013, 2018, 2021, 2023, 2025, 2028, 2029, 2030, 2033, 2035, 2038, 2041, 2044, 2046, 2048, 2049, 2051, 2056, 2057, 2059, 2060, 2065, 2069, 2070, 2071, 2072, 2074, 2076, 2077, 2078, 2079, 2081, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2093, 2094, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2108, 2109, 2110, 2111, 2112, 2113, 2116], "also": [0, 1, 2, 3, 5, 7, 8, 9, 11, 12, 14, 15, 18, 19, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 44, 47, 52, 53, 55, 59, 60, 61, 63, 64, 66, 74, 75, 89, 156, 220, 223, 224, 256, 325, 515, 517, 519, 525, 545, 616, 744, 745, 746, 763, 803, 817, 865, 884, 893, 894, 900, 901, 904, 907, 909, 910, 919, 932, 945, 954, 968, 969, 976, 982, 1008, 1051, 1054, 1065, 1090, 1091, 1109, 1113, 1129, 1160, 1167, 1173, 1177, 1178, 1188, 1198, 1205, 1206, 1214, 1217, 1226, 1231, 1232, 1236, 1273, 1276, 1277, 1280, 1283, 1287, 1289, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1341, 1342, 1375, 1381, 1396, 1419, 1431, 1439, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1470, 1473, 1478, 1492, 1497, 1527, 1534, 1543, 1557, 1559, 1567, 1576, 1577, 1579, 1611, 1612, 1613, 1633, 1671, 1688, 1707, 1717, 1735, 1737, 1747, 1749, 1751, 1752, 1753, 1754, 1770, 1784, 1785, 1799, 1805, 1809, 1812, 1815, 1840, 1847, 1859, 1869, 1883, 1884, 1901, 1905, 1919, 1928, 1938, 1940, 1943, 1949, 1950, 1961, 1962, 1977, 1980, 2012, 2013, 2014, 2016, 2017, 2021, 2023, 2024, 2027, 2029, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2076, 2077, 2078, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2093, 2094, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116], "autocastmodel": 0, "nn": [0, 3, 5, 12, 15, 23, 24, 28, 29, 30, 32, 33, 34, 48, 52, 53, 55, 56, 62, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 300, 417, 543, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 795, 796, 817, 844, 858, 863, 864, 865, 866, 868, 976, 1051, 1054, 1109, 1166, 1175, 1176, 1271, 1273, 1274, 1276, 1277, 1278, 1279, 1280, 1283, 1284, 1285, 1289, 1290, 1291, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, 1588, 1707, 1716, 1717, 1718, 1719, 1720, 1734, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1758, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1899, 1924, 1965, 1966, 2012, 2013, 2014, 2015, 2019, 2025, 2029, 2035, 2036, 2042, 2048, 2051, 2052, 2053, 2057, 2059, 2060, 2061, 2064, 2065, 2067, 2069, 2070, 2072, 2074, 2077, 2087, 2089, 2093, 2095, 2099, 2103, 2104, 2111, 2112, 2113], "modul": [0, 1, 3, 5, 6, 8, 11, 12, 14, 18, 19, 20, 24, 28, 30, 32, 33, 34, 38, 39, 41, 44, 47, 48, 52, 53, 55, 56, 58, 62, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 82, 417, 489, 490, 683, 689, 692, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 741, 742, 743, 748, 749, 750, 759, 764, 765, 766, 767, 768, 791, 792, 793, 794, 795, 796, 800, 802, 803, 805, 813, 814, 815, 816, 817, 818, 819, 820, 822, 823, 824, 825, 826, 827, 828, 829, 830, 841, 842, 843, 844, 858, 861, 862, 863, 864, 865, 866, 868, 919, 928, 929, 932, 944, 956, 976, 1054, 1166, 1175, 1176, 1178, 1186, 1222, 1271, 1273, 1274, 1276, 1277, 1278, 1279, 1280, 1281, 1283, 1284, 1285, 1289, 1290, 1291, 1345, 1368, 1378, 1431, 1435, 1441, 1442, 1443, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1479, 1481, 1489, 1490, 1491, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1528, 1529, 1537, 1538, 1542, 1544, 1556, 1561, 1567, 1571, 1572, 1574, 1575, 1624, 1625, 1685, 1700, 1707, 1716, 1717, 1718, 1724, 1725, 1726, 1727, 1728, 1729, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1761, 1765, 1766, 1767, 1769, 1798, 1859, 1903, 1966, 1968, 1969, 1977, 2012, 2013, 2018, 2021, 2023, 2025, 2027, 2029, 2030, 2033, 2035, 2036, 2038, 2040, 2041, 2045, 2046, 2048, 2051, 2052, 2056, 2058, 2059, 2061, 2063, 2064, 2065, 2067, 2069, 2071, 2074, 2075, 2077, 2080, 2082, 2083, 2087, 2091, 2092, 2093, 2095, 2099, 2101, 2102, 2103, 2104, 2107, 2111, 2112, 2113, 2114], "def": [0, 1, 12, 23, 24, 28, 29, 30, 33, 35, 37, 38, 39, 40, 44, 45, 48, 50, 51, 52, 53, 55, 57, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 400, 795, 796, 865, 866, 894, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 919, 976, 978, 981, 983, 984, 990, 1113, 1166, 1167, 1168, 1170, 1171, 1172, 1174, 1176, 1177, 1178, 1271, 1273, 1274, 1276, 1277, 1278, 1279, 1280, 1284, 1285, 1288, 1289, 1290, 1291, 1527, 1528, 1529, 1537, 1538, 1543, 1577, 1685, 1707, 1717, 1737, 1770, 1903, 1977, 2012, 2014, 2016, 2017, 2021, 2029, 2035, 2042, 2043, 2045, 2048, 2049, 2050, 2051, 2057, 2059, 2061, 2062, 2063, 2065, 2066, 2067, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2095, 2098, 2099, 2100, 2101, 2102, 2104, 2106, 2111, 2112, 2113, 2114], "self": [0, 1, 9, 12, 18, 23, 24, 28, 30, 33, 37, 39, 44, 50, 52, 53, 55, 60, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 99, 141, 152, 155, 156, 157, 172, 174, 177, 180, 181, 182, 192, 193, 197, 198, 210, 218, 220, 234, 235, 242, 256, 257, 260, 261, 269, 288, 292, 299, 313, 315, 317, 319, 321, 323, 325, 327, 328, 330, 331, 332, 333, 335, 343, 379, 395, 400, 402, 403, 404, 417, 456, 473, 483, 485, 499, 500, 501, 502, 515, 517, 519, 522, 525, 527, 539, 545, 546, 547, 548, 560, 562, 582, 583, 585, 586, 587, 589, 590, 606, 609, 610, 616, 619, 620, 622, 626, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 865, 866, 937, 939, 983, 984, 1166, 1167, 1176, 1186, 1188, 1244, 1271, 1272, 1273, 1276, 1277, 1278, 1279, 1280, 1284, 1285, 1289, 1290, 1291, 1431, 1435, 1471, 1526, 1527, 1528, 1529, 1533, 1537, 1538, 1555, 1557, 1567, 1573, 1575, 1664, 1685, 1688, 1707, 1734, 1735, 1737, 1743, 1745, 1758, 1767, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1824, 1845, 2014, 2015, 2016, 2017, 2021, 2030, 2034, 2035, 2041, 2043, 2044, 2045, 2049, 2051, 2057, 2062, 2065, 2067, 2069, 2070, 2072, 2077, 2084, 2088, 2095, 2099, 2101, 2102, 2108, 2111, 2112, 2113], "produc": [0, 7, 14, 19, 23, 28, 30, 36, 39, 41, 44, 50, 52, 53, 60, 61, 63, 64, 81, 90, 488, 735, 741, 742, 743, 748, 749, 759, 767, 795, 796, 947, 967, 1047, 1140, 1141, 1142, 1160, 1171, 1172, 1178, 1188, 1192, 1197, 1198, 1227, 1285, 1289, 1290, 1293, 1309, 1310, 1313, 1320, 1321, 1332, 1337, 1371, 1374, 1375, 1376, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1503, 1504, 1505, 1506, 1507, 1508, 1533, 1617, 1625, 1633, 1644, 1704, 1705, 1706, 1772, 1798, 1827, 1834, 1843, 1928, 1965, 1977, 2014, 2016, 2017, 2025, 2033, 2034, 2042, 2043, 2044, 2046, 2049, 2052, 2055, 2056, 2057, 2060, 2061, 2062, 2063, 2065, 2067, 2082, 2086, 2095, 2099, 2100, 2109, 2113], "after": [0, 1, 7, 9, 12, 14, 23, 24, 28, 29, 30, 32, 33, 34, 47, 50, 55, 63, 64, 66, 71, 72, 262, 488, 490, 683, 763, 793, 861, 863, 865, 866, 897, 899, 904, 905, 909, 976, 986, 1009, 1011, 1047, 1054, 1108, 1167, 1196, 1197, 1258, 1273, 1277, 1329, 1345, 1368, 1386, 1427, 1463, 1478, 1512, 1527, 1530, 1571, 1573, 1575, 1598, 1633, 1707, 1710, 1717, 1737, 1739, 1743, 1745, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1802, 1803, 1809, 1811, 1867, 1924, 1950, 1966, 1968, 1979, 1982, 2012, 2016, 2017, 2029, 2030, 2033, 2041, 2042, 2043, 2046, 2048, 2049, 2051, 2052, 2053, 2054, 2056, 2057, 2059, 2060, 2067, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2080, 2083, 2084, 2085, 2087, 2095, 2098, 2101, 2102, 2104, 2105, 2107, 2109, 2111, 2112, 2113, 2117], "disabl": [0, 2, 5, 20, 29, 35, 55, 64, 813, 814, 919, 920, 921, 976, 1054, 1109, 1113, 1166, 1273, 1275, 1289, 1290, 1527, 1533, 1567, 1575, 1685, 1717, 1718, 1767, 1770, 1872, 1875, 1903, 2013, 2016, 2023, 2042, 2045, 2046, 2048, 2049, 2055, 2060, 2061, 2067, 2076, 2082, 2089, 2103, 2107, 2110, 2111, 2113, 2115, 2117], "them": [0, 1, 3, 5, 7, 8, 9, 11, 14, 15, 23, 28, 30, 33, 37, 39, 44, 47, 50, 52, 55, 59, 60, 63, 64, 152, 223, 256, 488, 737, 897, 904, 907, 909, 914, 958, 1054, 1094, 1097, 1109, 1186, 1188, 1199, 1295, 1304, 1315, 1329, 1341, 1345, 1373, 1421, 1447, 1469, 1527, 1561, 1563, 1691, 1744, 1759, 1762, 1799, 1907, 1909, 1927, 1961, 1966, 1968, 2012, 2016, 2017, 2021, 2024, 2026, 2027, 2030, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2046, 2049, 2050, 2051, 2052, 2055, 2056, 2057, 2061, 2063, 2069, 2071, 2072, 2073, 2077, 2078, 2079, 2082, 2083, 2087, 2088, 2092, 2095, 2100, 2101, 2102, 2103, 2104, 2109, 2110, 2111, 2112, 2113], "differ": [0, 1, 2, 3, 5, 8, 14, 18, 23, 28, 30, 32, 33, 34, 35, 37, 39, 40, 44, 45, 47, 48, 51, 52, 53, 55, 56, 59, 60, 61, 62, 64, 198, 488, 495, 619, 683, 689, 692, 698, 699, 738, 763, 796, 799, 800, 825, 827, 828, 865, 866, 869, 883, 884, 923, 924, 944, 956, 957, 998, 1092, 1097, 1099, 1101, 1109, 1149, 1150, 1151, 1155, 1156, 1163, 1166, 1171, 1172, 1178, 1188, 1190, 1193, 1227, 1248, 1273, 1279, 1284, 1289, 1303, 1309, 1310, 1320, 1321, 1329, 1332, 1337, 1341, 1368, 1378, 1431, 1440, 1441, 1442, 1443, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1478, 1479, 1487, 1489, 1490, 1491, 1492, 1497, 1498, 1514, 1527, 1530, 1533, 1549, 1550, 1552, 1553, 1554, 1556, 1559, 1567, 1573, 1575, 1577, 1580, 1583, 1584, 1585, 1586, 1598, 1606, 1624, 1633, 1642, 1644, 1646, 1658, 1659, 1660, 1685, 1707, 1717, 1719, 1720, 1731, 1767, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1803, 1811, 1817, 1827, 1849, 1883, 1922, 1923, 1928, 1950, 1961, 1962, 1965, 1970, 1972, 1973, 1977, 2012, 2014, 2016, 2017, 2019, 2021, 2023, 2024, 2033, 2034, 2036, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2054, 2057, 2059, 2060, 2061, 2063, 2065, 2069, 2070, 2071, 2072, 2073, 2077, 2079, 2082, 2084, 2085, 2087, 2088, 2089, 2093, 2094, 2095, 2097, 2101, 2102, 2103, 2105, 2106, 2107, 2109, 2110, 2111, 2113], "caus": [0, 1, 2, 3, 5, 11, 14, 18, 23, 27, 28, 30, 32, 40, 46, 47, 48, 52, 60, 64, 86, 558, 619, 881, 884, 897, 979, 1163, 1191, 1200, 1201, 1285, 1289, 1309, 1310, 1337, 1393, 1644, 1645, 1704, 1707, 1717, 1771, 1780, 1867, 1868, 1877, 1924, 1965, 2014, 2017, 2033, 2044, 2046, 2049, 2051, 2059, 2061, 2063, 2067, 2070, 2072, 2073, 2079, 2082, 2085, 2098, 2101, 2103, 2105], "mismatch": [0, 28, 64, 87, 89, 884, 1274, 1779, 1780, 2016, 2042, 2048, 2049, 2051, 2067, 2089], "error": [0, 1, 5, 8, 14, 17, 18, 19, 20, 23, 24, 25, 28, 29, 30, 31, 33, 35, 38, 45, 47, 48, 52, 55, 58, 60, 63, 64, 66, 67, 84, 85, 86, 87, 89, 192, 223, 315, 317, 323, 325, 330, 490, 547, 558, 616, 619, 683, 822, 869, 882, 884, 904, 907, 909, 912, 913, 914, 915, 916, 917, 918, 922, 930, 931, 976, 1009, 1019, 1043, 1054, 1065, 1079, 1083, 1107, 1166, 1170, 1171, 1173, 1178, 1186, 1188, 1197, 1201, 1204, 1237, 1273, 1278, 1288, 1303, 1304, 1313, 1315, 1317, 1320, 1321, 1322, 1332, 1335, 1345, 1363, 1393, 1413, 1439, 1440, 1476, 1487, 1493, 1518, 1527, 1557, 1559, 1586, 1631, 1642, 1665, 1685, 1688, 1689, 1717, 1719, 1720, 1722, 1767, 1772, 1779, 1780, 1823, 1863, 1870, 1913, 1919, 1924, 1965, 1968, 1977, 1978, 2012, 2013, 2014, 2016, 2017, 2021, 2023, 2024, 2033, 2034, 2035, 2036, 2042, 2043, 2046, 2049, 2050, 2054, 2055, 2061, 2065, 2067, 2070, 2075, 2077, 2083, 2088, 2089, 2092, 2093, 2098, 2100, 2101, 2103, 2104, 2115, 2116, 2117], "If": [0, 1, 2, 3, 4, 5, 7, 9, 11, 12, 14, 15, 19, 20, 21, 23, 24, 25, 28, 29, 30, 32, 33, 34, 35, 36, 37, 40, 44, 45, 46, 47, 48, 51, 52, 53, 55, 56, 57, 58, 59, 60, 62, 63, 64, 66, 74, 75, 76, 77, 84, 85, 86, 87, 89, 99, 152, 156, 197, 208, 211, 256, 317, 321, 323, 447, 448, 449, 450, 451, 460, 473, 483, 488, 498, 501, 519, 522, 539, 547, 582, 583, 585, 586, 587, 589, 590, 605, 609, 619, 625, 683, 689, 692, 693, 694, 696, 698, 699, 700, 702, 738, 759, 763, 767, 782, 784, 788, 797, 798, 820, 823, 824, 825, 828, 858, 862, 865, 869, 878, 879, 880, 881, 882, 883, 884, 893, 895, 896, 897, 904, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 932, 943, 944, 945, 947, 954, 956, 957, 958, 960, 964, 966, 967, 970, 971, 976, 978, 981, 987, 991, 992, 993, 997, 998, 1008, 1011, 1012, 1014, 1016, 1019, 1021, 1023, 1025, 1032, 1043, 1049, 1054, 1055, 1069, 1076, 1079, 1090, 1091, 1092, 1096, 1097, 1098, 1099, 1100, 1109, 1110, 1111, 1112, 1122, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1152, 1160, 1161, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1177, 1178, 1182, 1186, 1187, 1188, 1196, 1198, 1200, 1201, 1204, 1214, 1222, 1227, 1231, 1232, 1234, 1235, 1236, 1237, 1248, 1250, 1264, 1270, 1273, 1276, 1277, 1278, 1281, 1283, 1285, 1286, 1289, 1290, 1293, 1294, 1295, 1299, 1303, 1304, 1309, 1310, 1312, 1313, 1315, 1316, 1317, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1334, 1336, 1337, 1339, 1340, 1341, 1343, 1344, 1345, 1346, 1356, 1360, 1361, 1363, 1365, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1393, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1422, 1431, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1474, 1475, 1478, 1479, 1480, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1503, 1504, 1505, 1506, 1507, 1508, 1512, 1514, 1518, 1519, 1520, 1521, 1522, 1527, 1528, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1541, 1542, 1543, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1559, 1560, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1579, 1580, 1583, 1584, 1585, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1633, 1635, 1637, 1644, 1645, 1650, 1652, 1654, 1655, 1656, 1658, 1659, 1660, 1669, 1670, 1671, 1674, 1677, 1678, 1685, 1691, 1692, 1703, 1704, 1710, 1716, 1717, 1718, 1722, 1723, 1726, 1727, 1729, 1731, 1732, 1734, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1758, 1759, 1760, 1762, 1765, 1766, 1767, 1769, 1770, 1771, 1772, 1776, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 1815, 1816, 1821, 1825, 1827, 1828, 1832, 1833, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1848, 1849, 1854, 1856, 1859, 1863, 1865, 1867, 1870, 1871, 1875, 1877, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1900, 1904, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1919, 1922, 1923, 1924, 1927, 1928, 1929, 1940, 1943, 1944, 1946, 1947, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1961, 1962, 1965, 1968, 1971, 1972, 1973, 1977, 1983, 1989, 2000, 2002, 2010, 2011, 2012, 2014, 2017, 2019, 2021, 2025, 2026, 2027, 2028, 2029, 2030, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2046, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2076, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2111, 2112, 2113, 2114, 2115, 2116, 2117], "cast": [0, 3, 6, 24, 45, 55, 88, 337, 605, 606, 961, 1090, 1091, 1273, 1326, 1330, 1343, 1373, 1418, 1421, 1527, 1652, 1691, 1692, 1772, 1825, 1904, 1907, 1913, 1927, 1932, 1933, 2014, 2042, 2060, 2067, 2083, 2084, 2085, 2104], "back": [0, 1, 2, 14, 19, 20, 23, 24, 28, 30, 35, 47, 52, 55, 60, 64, 66, 71, 75, 488, 738, 840, 923, 973, 976, 1129, 1161, 1167, 1274, 1287, 1339, 1345, 1574, 1722, 1723, 1724, 1725, 1909, 2012, 2014, 2017, 2043, 2049, 2050, 2052, 2059, 2060, 2061, 2067, 2069, 2070, 2072, 2077, 2082, 2084, 2098, 2101, 2104, 2106, 2111, 2114], "from": [0, 1, 3, 5, 6, 7, 8, 9, 14, 15, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 40, 41, 44, 45, 47, 50, 51, 52, 53, 55, 56, 58, 60, 61, 63, 66, 68, 69, 71, 72, 74, 75, 78, 81, 82, 83, 86, 90, 155, 156, 175, 198, 223, 224, 235, 260, 288, 315, 317, 321, 323, 379, 404, 450, 456, 473, 475, 476, 483, 488, 489, 490, 495, 501, 515, 517, 519, 546, 548, 562, 582, 609, 610, 612, 619, 683, 715, 716, 717, 718, 719, 720, 723, 731, 732, 735, 738, 741, 742, 743, 744, 745, 746, 748, 749, 759, 763, 767, 775, 776, 777, 784, 791, 792, 795, 796, 797, 800, 803, 818, 819, 820, 827, 829, 843, 858, 862, 864, 865, 866, 868, 869, 883, 884, 895, 904, 906, 908, 909, 910, 912, 913, 914, 915, 916, 917, 923, 924, 928, 929, 943, 945, 946, 954, 955, 960, 973, 978, 1010, 1011, 1014, 1023, 1024, 1025, 1043, 1047, 1065, 1093, 1109, 1110, 1111, 1112, 1129, 1131, 1132, 1137, 1138, 1139, 1143, 1144, 1145, 1146, 1149, 1150, 1151, 1160, 1162, 1163, 1164, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1174, 1180, 1186, 1187, 1188, 1193, 1202, 1210, 1231, 1232, 1236, 1250, 1270, 1271, 1273, 1274, 1276, 1278, 1279, 1280, 1281, 1284, 1285, 1297, 1313, 1317, 1326, 1329, 1330, 1343, 1344, 1345, 1346, 1360, 1364, 1365, 1378, 1384, 1386, 1389, 1390, 1391, 1393, 1413, 1422, 1423, 1431, 1435, 1441, 1442, 1443, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1478, 1479, 1480, 1481, 1487, 1489, 1490, 1491, 1492, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1527, 1528, 1529, 1533, 1537, 1538, 1542, 1543, 1545, 1546, 1567, 1572, 1573, 1575, 1577, 1579, 1580, 1587, 1591, 1598, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1630, 1633, 1635, 1642, 1644, 1658, 1659, 1660, 1672, 1703, 1707, 1716, 1717, 1722, 1724, 1725, 1733, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1752, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1769, 1773, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1815, 1817, 1820, 1828, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1849, 1850, 1853, 1855, 1856, 1863, 1875, 1883, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 1925, 1929, 1940, 1943, 1952, 1954, 1956, 1961, 1962, 1965, 1967, 1971, 1980, 1982, 1983, 2013, 2014, 2015, 2016, 2017, 2019, 2023, 2024, 2025, 2027, 2028, 2029, 2030, 2033, 2035, 2036, 2037, 2041, 2043, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2064, 2065, 2069, 2071, 2072, 2073, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2110, 2111, 2112, 2113, 2114, 2115], "alreadi": [0, 1, 3, 11, 19, 23, 24, 28, 29, 34, 47, 48, 55, 63, 64, 197, 208, 211, 465, 488, 526, 582, 605, 606, 625, 795, 843, 858, 862, 883, 930, 932, 1019, 1045, 1184, 1188, 1273, 1283, 1345, 1527, 1711, 1717, 1758, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1959, 1961, 1968, 1996, 2012, 2014, 2024, 2028, 2034, 2035, 2042, 2043, 2046, 2049, 2052, 2054, 2059, 2067, 2069, 2070, 2072, 2078, 2079, 2082, 2084, 2086, 2098, 2101, 2102, 2103], "incur": [0, 5, 24, 28, 30, 55, 966, 2036, 2059, 2077], "addit": [0, 1, 3, 5, 7, 9, 14, 15, 23, 28, 29, 30, 33, 35, 41, 47, 48, 53, 55, 63, 64, 66, 75, 337, 515, 609, 752, 754, 755, 756, 757, 761, 763, 784, 798, 817, 863, 1065, 1270, 1273, 1277, 1283, 1288, 1431, 1442, 1443, 1444, 1457, 1458, 1459, 1470, 1477, 1478, 1480, 1487, 1490, 1491, 1499, 1506, 1507, 1508, 1512, 1513, 1514, 1515, 1517, 1523, 1524, 1525, 1527, 1533, 1535, 1561, 1563, 1567, 1571, 1573, 1575, 1577, 1604, 1611, 1612, 1613, 1650, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1871, 1961, 1962, 1965, 1975, 2014, 2016, 2017, 2023, 2030, 2034, 2035, 2036, 2043, 2045, 2046, 2049, 2050, 2052, 2056, 2059, 2060, 2063, 2065, 2067, 2071, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2086, 2089, 2098, 2100, 2104, 2109, 2110, 2113, 2115], "overhead": [0, 1, 3, 4, 8, 24, 28, 55, 784, 976, 981, 1065, 1285, 1717, 1724, 1725, 2043, 2045, 2046, 2048, 2055, 2056, 2059, 2071, 2078, 2082, 2088, 2098, 2103, 2104, 2106, 2107], "here": [0, 1, 7, 8, 9, 12, 15, 18, 23, 24, 28, 29, 30, 33, 34, 35, 46, 48, 52, 53, 57, 58, 59, 60, 63, 64, 87, 89, 582, 796, 797, 909, 910, 996, 1017, 1109, 1126, 1128, 1129, 1134, 1135, 1136, 1138, 1139, 1144, 1146, 1166, 1176, 1271, 1285, 1441, 1442, 1443, 1457, 1458, 1459, 1489, 1490, 1491, 1535, 1567, 1633, 1635, 1724, 1725, 1734, 1795, 1798, 1809, 1871, 1895, 2012, 2014, 2016, 2017, 2019, 2021, 2032, 2034, 2035, 2036, 2042, 2043, 2046, 2047, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2062, 2063, 2067, 2069, 2070, 2072, 2075, 2082, 2086, 2087, 2089, 2095, 2098, 2100, 2101, 2103, 2104, 2105, 2106, 2107, 2110, 2111, 2113], "assum": [0, 11, 12, 19, 20, 23, 28, 30, 34, 35, 37, 40, 47, 48, 52, 53, 55, 58, 61, 64, 820, 990, 1092, 1127, 1130, 1132, 1140, 1141, 1142, 1145, 1188, 1192, 1198, 1264, 1270, 1271, 1274, 1289, 1309, 1310, 1312, 1319, 1328, 1331, 1334, 1336, 1346, 1446, 1519, 1580, 1617, 1717, 1734, 1737, 1762, 1799, 1800, 1805, 1806, 1807, 1810, 1812, 1813, 1817, 1950, 1952, 1968, 2012, 2014, 2016, 2017, 2021, 2034, 2043, 2046, 2049, 2050, 2054, 2056, 2059, 2060, 2067, 2069, 2070, 2077, 2078, 2079, 2082, 2095, 2100, 2101, 2102, 2109], "a_float32": 0, "rand": [0, 1, 11, 18, 35, 52, 64, 337, 696, 702, 912, 913, 914, 915, 916, 917, 981, 998, 1051, 1052, 1126, 1128, 1131, 1132, 1134, 1135, 1138, 1139, 1141, 1142, 1144, 1146, 1168, 1279, 1280, 1285, 1289, 1290, 1291, 1347, 1348, 1350, 1439, 1492, 1571, 1572, 1573, 1574, 1575, 1605, 1624, 1625, 1685, 1717, 1737, 1770, 1820, 1831, 1832, 1833, 1837, 2014, 2015, 2016, 2017, 2019, 2029, 2034, 2035, 2049, 2053, 2065, 2067, 2068, 2072, 2077, 2078, 2082, 2083, 2086, 2087, 2091, 2108, 2111], "8": [0, 1, 14, 20, 22, 23, 24, 28, 34, 35, 37, 52, 66, 71, 315, 317, 319, 323, 403, 404, 473, 515, 519, 562, 609, 619, 688, 689, 700, 701, 748, 749, 764, 766, 768, 776, 777, 823, 824, 825, 828, 915, 947, 955, 966, 969, 970, 974, 976, 1051, 1052, 1088, 1092, 1107, 1112, 1125, 1141, 1142, 1148, 1149, 1152, 1159, 1227, 1236, 1237, 1297, 1326, 1329, 1330, 1331, 1339, 1341, 1344, 1345, 1422, 1423, 1428, 1429, 1430, 1432, 1433, 1434, 1447, 1448, 1449, 1450, 1461, 1497, 1523, 1524, 1530, 1532, 1534, 1541, 1543, 1546, 1549, 1550, 1551, 1552, 1553, 1554, 1567, 1571, 1572, 1573, 1574, 1575, 1579, 1587, 1609, 1612, 1615, 1672, 1677, 1683, 1684, 1685, 1724, 1725, 1772, 1773, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1803, 1811, 1816, 1820, 1824, 1844, 1854, 1855, 1856, 1863, 1871, 1875, 1882, 1885, 1890, 1896, 1900, 1908, 1910, 1911, 1916, 1924, 1928, 1939, 1944, 1945, 1946, 1948, 1950, 1959, 1963, 1965, 1971, 1978, 2014, 2018, 2024, 2046, 2049, 2052, 2062, 2065, 2067, 2068, 2070, 2071, 2072, 2075, 2077, 2082, 2083, 2085, 2086, 2088, 2095, 2101, 2102, 2113], "b_float32": 0, "c_float32": 0, "d_float32": 0, "mm": [0, 33, 1277, 1329, 1368, 1537, 1538, 1827, 1928, 1965, 2015, 2016, 2029, 2034, 2042, 2045, 2049, 2060, 2068, 2082, 2107, 2108], "list": [0, 1, 3, 5, 6, 7, 9, 12, 14, 15, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 35, 37, 47, 50, 52, 53, 55, 60, 63, 64, 66, 67, 71, 72, 74, 75, 85, 186, 243, 304, 447, 449, 451, 580, 585, 586, 587, 591, 621, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 795, 796, 801, 817, 818, 819, 820, 841, 858, 865, 883, 890, 891, 892, 958, 959, 962, 966, 970, 975, 976, 978, 985, 990, 1035, 1041, 1094, 1107, 1109, 1110, 1149, 1164, 1176, 1186, 1188, 1197, 1213, 1227, 1237, 1273, 1277, 1279, 1280, 1285, 1287, 1289, 1290, 1373, 1375, 1421, 1463, 1469, 1499, 1527, 1529, 1538, 1542, 1556, 1567, 1578, 1591, 1624, 1700, 1716, 1717, 1718, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1772, 1776, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1830, 1832, 1833, 1836, 1840, 1855, 1891, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1922, 1923, 1927, 1943, 1944, 1945, 1961, 1962, 1965, 1966, 1968, 1972, 1973, 1978, 1995, 2010, 2012, 2014, 2015, 2018, 2019, 2023, 2027, 2030, 2034, 2035, 2036, 2049, 2055, 2057, 2060, 2061, 2062, 2064, 2065, 2068, 2069, 2070, 2071, 2072, 2077, 2078, 2082, 2084, 2085, 2086, 2087, 2088, 2091, 2092, 2093, 2094, 2099, 2100, 2101, 2102, 2103, 2105, 2107, 2111, 2112, 2113, 2114, 2115], "No": [0, 9, 12, 30, 51, 53, 63, 66, 75, 76, 77, 904, 908, 909, 910, 1717, 1724, 1725, 1770, 2016, 2046, 2067, 2075, 2086, 2089, 2098], "manual": [0, 19, 23, 28, 34, 45, 47, 48, 51, 55, 64, 488, 800, 841, 882, 918, 981, 1188, 1236, 1277, 1439, 1440, 1462, 1531, 1532, 1534, 1556, 1575, 1605, 1606, 1616, 1669, 1737, 1758, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2014, 2017, 2025, 2033, 2042, 2046, 2049, 2050, 2051, 2054, 2057, 2063, 2067, 2072, 2073, 2087, 2100, 2110, 2111, 2117], "e_float16": 0, "handl": [0, 1, 5, 9, 11, 14, 20, 23, 24, 28, 29, 30, 31, 32, 33, 37, 39, 40, 47, 48, 55, 63, 64, 489, 490, 843, 894, 904, 908, 909, 910, 928, 929, 978, 993, 1011, 1026, 1053, 1066, 1109, 1155, 1156, 1178, 1181, 1273, 1304, 1463, 1527, 1575, 1577, 1633, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1847, 1892, 1929, 1977, 2012, 2030, 2033, 2035, 2046, 2049, 2050, 2051, 2054, 2059, 2064, 2065, 2067, 2070, 2072, 2077, 2079, 2089, 2091, 2098, 2101, 2103, 2105, 2109], "f_float16": 0, "g_float32": 0, "epoch": [0, 23, 40, 51, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2030, 2042, 2059, 2069, 2087], "eval": [0, 64, 817, 864, 865, 1176, 1273, 1277, 1283, 1289, 1441, 1442, 1443, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1533, 1567, 1575, 1726, 1728, 1732, 2017, 2027, 2053, 2057, 2064, 2067, 2072, 2093, 2100, 2101, 2114], "jit": [0, 2, 11, 14, 52, 1051, 1052, 1084, 1224, 1271, 1272, 1273, 1288, 1410, 1778, 1779, 1873, 1874, 2013, 2016, 2019, 2027, 2035, 2045, 2056, 2062, 2064, 2067, 2072, 2077, 2087, 2099, 2100, 2101, 2102, 2104, 2106], "trace": [0, 1, 5, 12, 15, 18, 23, 24, 25, 28, 33, 40, 52, 53, 66, 69, 70, 71, 72, 73, 76, 77, 81, 488, 683, 844, 933, 935, 936, 939, 976, 978, 983, 984, 990, 1109, 1182, 1187, 1192, 1276, 1285, 1286, 1290, 1346, 1389, 1390, 1391, 1780, 1967, 2013, 2015, 2016, 2017, 2019, 2021, 2023, 2043, 2046, 2052, 2062, 2064, 2065, 2068, 2070, 2087, 2094, 2099, 2100, 2101, 2103, 2106, 2107, 2109, 2113, 2115, 2116], "testmodel": 0, "__init__": [0, 1, 12, 23, 24, 28, 33, 35, 52, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 865, 866, 1176, 1271, 1274, 1276, 1279, 1280, 1285, 1289, 1290, 1291, 1527, 1528, 1529, 1537, 1538, 1542, 1685, 1707, 1716, 2014, 2016, 2017, 2030, 2043, 2049, 2051, 2057, 2062, 2065, 2067, 2070, 2072, 2087, 2088, 2095, 2099, 2100, 2111, 2112, 2113], "input_s": [0, 737, 763, 764, 768, 1478, 1479, 1497, 1498, 1543, 1544, 1545, 2015], "num_class": [0, 1671, 2015, 2106], "super": [0, 9, 12, 23, 24, 28, 30, 33, 52, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 865, 866, 1176, 1271, 1276, 1279, 1280, 1285, 1289, 1290, 1291, 1527, 1528, 1529, 1537, 1538, 1539, 1540, 1685, 1707, 1809, 2014, 2016, 2017, 2018, 2043, 2049, 2057, 2062, 2065, 2067, 2070, 2072, 2095, 2099, 2111, 2112, 2113], "fc1": [0, 24, 1707, 2065, 2095], "x": [0, 1, 3, 11, 12, 14, 23, 24, 28, 33, 35, 37, 44, 52, 53, 55, 57, 58, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 175, 256, 260, 288, 291, 313, 315, 317, 319, 323, 354, 379, 450, 485, 488, 495, 501, 560, 562, 585, 609, 610, 619, 695, 761, 763, 773, 782, 783, 787, 788, 799, 802, 805, 823, 824, 865, 866, 882, 888, 890, 891, 892, 899, 902, 903, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 919, 920, 939, 958, 959, 960, 963, 964, 969, 976, 978, 983, 984, 990, 991, 994, 997, 998, 1051, 1052, 1092, 1099, 1103, 1104, 1109, 1113, 1123, 1124, 1125, 1126, 1128, 1129, 1134, 1135, 1137, 1139, 1143, 1144, 1146, 1149, 1150, 1151, 1159, 1166, 1168, 1170, 1171, 1172, 1173, 1174, 1176, 1177, 1178, 1198, 1212, 1213, 1227, 1244, 1248, 1260, 1278, 1279, 1280, 1284, 1285, 1288, 1289, 1290, 1291, 1295, 1305, 1318, 1319, 1323, 1326, 1330, 1334, 1336, 1339, 1340, 1341, 1342, 1343, 1346, 1351, 1352, 1353, 1361, 1364, 1367, 1368, 1375, 1417, 1418, 1422, 1423, 1429, 1430, 1439, 1440, 1441, 1442, 1443, 1445, 1460, 1462, 1468, 1474, 1475, 1476, 1478, 1479, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1513, 1516, 1517, 1518, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1534, 1535, 1536, 1537, 1538, 1542, 1543, 1545, 1546, 1547, 1548, 1555, 1557, 1558, 1559, 1560, 1562, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1576, 1577, 1580, 1607, 1631, 1633, 1637, 1638, 1644, 1648, 1652, 1664, 1669, 1674, 1678, 1680, 1686, 1687, 1688, 1692, 1693, 1695, 1696, 1697, 1704, 1707, 1716, 1732, 1735, 1737, 1758, 1759, 1760, 1761, 1762, 1767, 1770, 1771, 1772, 1779, 1782, 1803, 1817, 1818, 1829, 1831, 1845, 1848, 1849, 1851, 1852, 1854, 1855, 1859, 1863, 1900, 1902, 1905, 1919, 1921, 1922, 1923, 1924, 1930, 1931, 1938, 1944, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1962, 1964, 1971, 1972, 1973, 1975, 1976, 1977, 1980, 2012, 2014, 2015, 2016, 2017, 2018, 2021, 2033, 2034, 2035, 2041, 2043, 2044, 2045, 2046, 2049, 2050, 2051, 2054, 2055, 2057, 2058, 2063, 2065, 2066, 2067, 2070, 2072, 2077, 2081, 2082, 2083, 2085, 2087, 2088, 2091, 2092, 2093, 2094, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2106, 2111, 2113], "2": [0, 1, 3, 5, 11, 12, 14, 18, 19, 20, 21, 22, 23, 24, 25, 28, 29, 30, 32, 34, 35, 36, 37, 40, 45, 47, 48, 52, 53, 55, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 79, 175, 193, 210, 227, 235, 236, 256, 262, 288, 315, 317, 319, 323, 337, 379, 403, 404, 447, 448, 449, 450, 451, 483, 489, 490, 495, 498, 501, 515, 517, 519, 546, 560, 562, 582, 583, 585, 586, 587, 589, 590, 591, 609, 619, 683, 684, 686, 687, 688, 689, 692, 693, 694, 695, 696, 698, 700, 701, 702, 737, 741, 742, 743, 744, 745, 746, 748, 749, 758, 761, 763, 765, 796, 799, 823, 869, 878, 879, 880, 881, 882, 883, 884, 888, 890, 891, 892, 894, 904, 907, 909, 910, 912, 913, 914, 915, 916, 917, 919, 920, 928, 929, 932, 939, 941, 943, 945, 947, 948, 949, 950, 951, 952, 953, 954, 955, 957, 958, 959, 960, 962, 963, 964, 966, 967, 968, 969, 970, 974, 975, 983, 987, 991, 992, 993, 994, 997, 998, 1008, 1083, 1088, 1092, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1104, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1115, 1119, 1122, 1123, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1149, 1150, 1151, 1152, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1166, 1167, 1168, 1171, 1172, 1173, 1177, 1178, 1181, 1188, 1198, 1214, 1215, 1216, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1239, 1240, 1248, 1250, 1260, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1273, 1276, 1277, 1283, 1285, 1287, 1289, 1293, 1294, 1295, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1334, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1350, 1351, 1352, 1353, 1360, 1362, 1363, 1364, 1365, 1367, 1368, 1371, 1372, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1393, 1396, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1427, 1430, 1435, 1436, 1437, 1438, 1439, 1440, 1445, 1446, 1447, 1448, 1449, 1451, 1452, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1476, 1477, 1478, 1480, 1482, 1483, 1484, 1485, 1487, 1494, 1495, 1496, 1497, 1498, 1499, 1513, 1515, 1516, 1517, 1518, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1529, 1530, 1532, 1533, 1535, 1536, 1538, 1539, 1540, 1541, 1542, 1543, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1555, 1557, 1558, 1559, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1576, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1598, 1600, 1605, 1606, 1617, 1624, 1625, 1631, 1633, 1635, 1637, 1644, 1650, 1658, 1659, 1660, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1685, 1701, 1707, 1716, 1717, 1721, 1722, 1724, 1725, 1731, 1732, 1737, 1749, 1750, 1751, 1752, 1754, 1755, 1758, 1760, 1761, 1763, 1764, 1765, 1766, 1767, 1770, 1771, 1772, 1773, 1775, 1776, 1777, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1816, 1817, 1818, 1820, 1821, 1824, 1825, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1836, 1838, 1840, 1842, 1843, 1844, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1859, 1863, 1865, 1866, 1868, 1869, 1870, 1871, 1875, 1880, 1881, 1882, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1896, 1900, 1901, 1902, 1903, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1921, 1922, 1923, 1924, 1925, 1927, 1928, 1929, 1930, 1931, 1938, 1939, 1941, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1966, 1968, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 2010, 2011, 2013, 2015, 2016, 2017, 2018, 2021, 2024, 2025, 2027, 2029, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2045, 2048, 2049, 2052, 2054, 2055, 2057, 2058, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2074, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2106, 2107, 2108, 2111, 2112], "For": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 23, 24, 25, 28, 29, 30, 33, 34, 35, 37, 44, 45, 47, 48, 51, 52, 53, 55, 56, 57, 59, 60, 61, 62, 63, 64, 81, 82, 83, 198, 256, 291, 315, 317, 323, 337, 354, 417, 473, 483, 488, 495, 501, 515, 517, 519, 585, 591, 605, 619, 689, 690, 691, 692, 693, 696, 702, 737, 741, 742, 743, 744, 745, 746, 763, 795, 796, 797, 817, 862, 863, 896, 909, 910, 923, 930, 944, 948, 950, 951, 953, 956, 965, 976, 981, 1043, 1057, 1059, 1065, 1090, 1091, 1092, 1109, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1153, 1161, 1176, 1178, 1187, 1188, 1192, 1202, 1214, 1227, 1236, 1250, 1273, 1284, 1285, 1287, 1289, 1290, 1303, 1305, 1309, 1310, 1313, 1314, 1316, 1317, 1318, 1319, 1321, 1330, 1331, 1333, 1334, 1337, 1346, 1353, 1361, 1368, 1374, 1378, 1431, 1435, 1439, 1440, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1469, 1470, 1472, 1473, 1478, 1480, 1487, 1492, 1497, 1499, 1527, 1530, 1531, 1532, 1533, 1541, 1542, 1543, 1549, 1550, 1551, 1552, 1553, 1554, 1559, 1564, 1578, 1579, 1583, 1584, 1585, 1587, 1588, 1589, 1590, 1606, 1608, 1609, 1610, 1619, 1620, 1621, 1626, 1633, 1644, 1670, 1672, 1685, 1693, 1707, 1716, 1717, 1735, 1758, 1759, 1760, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1811, 1840, 1856, 1865, 1868, 1876, 1902, 1903, 1913, 1919, 1928, 1929, 1944, 1946, 1950, 1958, 1961, 1966, 1968, 1975, 1976, 1977, 2012, 2013, 2014, 2017, 2019, 2021, 2023, 2024, 2026, 2027, 2030, 2034, 2035, 2036, 2037, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2060, 2061, 2062, 2066, 2067, 2069, 2070, 2071, 2072, 2073, 2076, 2077, 2078, 2079, 2082, 2084, 2085, 2086, 2087, 2088, 2089, 2094, 2095, 2096, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2109, 2110, 2112, 2113, 2114, 2117], "now": [0, 1, 12, 19, 28, 33, 36, 44, 48, 50, 52, 55, 60, 64, 498, 904, 908, 909, 910, 918, 932, 990, 1023, 1051, 1073, 1074, 1160, 1161, 1220, 1270, 1271, 1277, 1289, 1323, 1524, 1544, 1580, 1707, 1721, 1737, 1748, 1769, 1787, 1868, 1919, 1924, 1968, 2014, 2024, 2034, 2035, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2052, 2053, 2057, 2058, 2062, 2067, 2070, 2072, 2078, 2079, 2082, 2098, 2100, 2101, 2103, 2104, 2105, 2107, 2112, 2113, 2114, 2115], "we": [0, 1, 2, 5, 7, 8, 9, 11, 12, 14, 15, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 47, 48, 51, 52, 53, 55, 56, 57, 58, 59, 60, 63, 64, 66, 76, 77, 88, 488, 498, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 782, 788, 791, 792, 794, 796, 797, 841, 861, 863, 865, 866, 869, 897, 899, 904, 905, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 932, 945, 954, 976, 978, 1005, 1065, 1109, 1127, 1129, 1130, 1131, 1132, 1139, 1144, 1145, 1146, 1160, 1170, 1171, 1173, 1178, 1181, 1182, 1184, 1186, 1187, 1188, 1190, 1192, 1194, 1195, 1196, 1197, 1198, 1200, 1201, 1202, 1210, 1227, 1231, 1232, 1248, 1271, 1273, 1276, 1277, 1285, 1287, 1289, 1329, 1332, 1337, 1345, 1346, 1365, 1413, 1431, 1436, 1437, 1438, 1439, 1440, 1474, 1492, 1527, 1580, 1633, 1635, 1644, 1704, 1711, 1717, 1724, 1725, 1731, 1734, 1743, 1745, 1748, 1752, 1753, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1800, 1806, 1811, 1828, 1871, 1961, 1966, 1977, 2012, 2013, 2014, 2016, 2017, 2019, 2021, 2024, 2025, 2026, 2027, 2033, 2034, 2035, 2036, 2043, 2046, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2060, 2063, 2064, 2065, 2067, 2069, 2070, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2085, 2087, 2088, 2092, 2093, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2117], "suggest": [0, 9, 24, 52, 56, 84, 85, 87, 89, 1460, 2012, 2019, 2042, 2043, 2051, 2082, 2095, 2107, 2113], "issu": [0, 2, 3, 5, 9, 10, 11, 14, 22, 23, 28, 30, 35, 36, 52, 55, 56, 58, 60, 64, 66, 86, 87, 912, 918, 932, 976, 978, 979, 1160, 1172, 1174, 1178, 1195, 1319, 1363, 1375, 1492, 1497, 1523, 1524, 1525, 1543, 1580, 1644, 1645, 1769, 1867, 1977, 2012, 2013, 2017, 2019, 2024, 2029, 2033, 2034, 2035, 2036, 2042, 2043, 2046, 2049, 2052, 2059, 2060, 2061, 2062, 2063, 2067, 2070, 2072, 2073, 2077, 2082, 2083, 2084, 2085, 2088, 2098, 2101, 2102, 2104, 2105, 2113, 2115, 2116], "http": [0, 2, 3, 4, 7, 9, 11, 12, 14, 15, 24, 26, 28, 33, 35, 37, 47, 48, 55, 152, 683, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 795, 796, 897, 932, 976, 990, 1109, 1167, 1195, 1202, 1346, 1375, 1446, 1497, 1523, 1524, 1525, 1547, 1571, 1574, 1577, 1733, 1769, 1817, 1834, 1867, 1871, 1891, 1965, 1968, 2012, 2018, 2021, 2028, 2031, 2043, 2044, 2053, 2054, 2055, 2057, 2061, 2063, 2067, 2070, 2085, 2087, 2088, 2102, 2106, 2107, 2112, 2118], "github": [0, 7, 9, 14, 26, 28, 43, 52, 55, 56, 60, 152, 795, 796, 897, 918, 932, 1167, 1195, 1202, 1375, 1523, 1524, 1525, 1571, 1769, 1803, 1867, 1968, 2012, 2019, 2024, 2053, 2054, 2061, 2067, 2072, 2082, 2083, 2085, 2088, 2101, 2102, 2104, 2107, 2112], "com": [0, 7, 14, 26, 28, 46, 48, 55, 152, 795, 796, 897, 932, 1167, 1195, 1202, 1375, 1523, 1524, 1525, 1571, 1769, 1867, 1965, 1968, 2012, 2021, 2028, 2031, 2053, 2054, 2055, 2061, 2063, 2067, 2085, 2088, 2102, 2107, 2112], "pytorch": [0, 1, 2, 3, 4, 11, 12, 13, 14, 17, 18, 19, 20, 23, 24, 25, 30, 31, 32, 33, 34, 35, 36, 37, 40, 47, 48, 53, 55, 56, 59, 61, 64, 65, 66, 84, 85, 86, 87, 88, 89, 152, 515, 585, 683, 701, 733, 734, 735, 736, 748, 749, 759, 763, 764, 765, 766, 767, 768, 795, 796, 813, 814, 815, 816, 884, 897, 930, 932, 961, 966, 967, 976, 977, 978, 990, 1033, 1034, 1045, 1050, 1109, 1154, 1160, 1167, 1168, 1172, 1177, 1178, 1202, 1218, 1259, 1260, 1284, 1304, 1310, 1315, 1317, 1318, 1319, 1322, 1335, 1344, 1360, 1363, 1364, 1375, 1408, 1439, 1440, 1446, 1478, 1492, 1523, 1524, 1525, 1571, 1588, 1589, 1590, 1606, 1685, 1717, 1724, 1725, 1758, 1766, 1767, 1769, 1772, 1780, 1827, 1859, 1867, 1868, 1869, 1877, 1924, 1928, 1952, 1965, 1968, 1977, 1990, 1996, 1999, 2012, 2016, 2017, 2021, 2023, 2024, 2028, 2030, 2035, 2036, 2037, 2044, 2045, 2050, 2051, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2063, 2064, 2065, 2069, 2070, 2071, 2073, 2074, 2077, 2078, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2095, 2097, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2108, 2111, 2112, 2114, 2115, 2116, 2118], "75956": 0, "_c": [0, 23, 24, 28, 63, 912, 918, 1272, 1285, 1778, 2015, 2017, 2030, 2046, 2056, 2060, 2063, 2067, 2077], "_jit_set_autocast_mod": 0, "fals": [0, 1, 2, 3, 5, 12, 14, 19, 23, 24, 28, 29, 30, 32, 34, 35, 37, 52, 55, 58, 62, 64, 66, 71, 74, 75, 76, 77, 114, 115, 116, 117, 118, 120, 136, 137, 138, 152, 183, 184, 185, 198, 211, 262, 303, 320, 321, 323, 330, 334, 337, 338, 340, 344, 345, 346, 353, 356, 394, 398, 409, 411, 412, 413, 416, 417, 423, 431, 432, 433, 434, 447, 448, 449, 450, 451, 455, 460, 462, 472, 473, 481, 497, 498, 506, 519, 544, 557, 558, 567, 582, 596, 605, 611, 612, 617, 619, 625, 683, 696, 697, 698, 699, 700, 702, 715, 716, 717, 718, 719, 720, 735, 737, 738, 741, 742, 743, 748, 749, 754, 755, 756, 758, 759, 761, 763, 767, 771, 772, 779, 781, 782, 783, 785, 786, 788, 800, 802, 817, 822, 823, 824, 825, 827, 828, 829, 841, 842, 844, 861, 862, 867, 868, 869, 878, 879, 880, 884, 897, 899, 904, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 936, 945, 948, 951, 953, 954, 960, 961, 967, 968, 969, 975, 976, 990, 991, 1011, 1049, 1054, 1066, 1109, 1110, 1111, 1112, 1114, 1115, 1122, 1126, 1127, 1128, 1129, 1134, 1135, 1140, 1141, 1142, 1144, 1145, 1146, 1161, 1163, 1164, 1165, 1166, 1168, 1169, 1171, 1172, 1173, 1175, 1177, 1181, 1188, 1197, 1200, 1201, 1208, 1212, 1214, 1216, 1230, 1231, 1232, 1235, 1236, 1258, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1273, 1278, 1280, 1281, 1289, 1290, 1291, 1293, 1295, 1298, 1303, 1304, 1315, 1316, 1317, 1318, 1320, 1321, 1322, 1323, 1326, 1328, 1330, 1331, 1334, 1335, 1336, 1337, 1338, 1343, 1344, 1345, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1363, 1365, 1367, 1371, 1373, 1374, 1376, 1379, 1386, 1389, 1390, 1413, 1418, 1419, 1420, 1421, 1424, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1460, 1462, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1474, 1475, 1478, 1479, 1480, 1481, 1483, 1484, 1485, 1486, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1512, 1513, 1514, 1518, 1519, 1520, 1521, 1522, 1526, 1527, 1530, 1531, 1532, 1533, 1534, 1536, 1541, 1543, 1544, 1545, 1546, 1547, 1548, 1555, 1557, 1559, 1560, 1567, 1570, 1571, 1572, 1573, 1575, 1576, 1577, 1580, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1605, 1606, 1607, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1624, 1625, 1626, 1628, 1629, 1630, 1633, 1635, 1637, 1638, 1639, 1644, 1645, 1648, 1654, 1655, 1656, 1658, 1659, 1660, 1664, 1669, 1673, 1677, 1679, 1680, 1683, 1684, 1685, 1686, 1688, 1698, 1701, 1702, 1704, 1710, 1717, 1719, 1721, 1722, 1726, 1727, 1729, 1731, 1734, 1736, 1737, 1738, 1750, 1759, 1760, 1761, 1762, 1764, 1767, 1770, 1771, 1772, 1776, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 1815, 1825, 1827, 1828, 1832, 1833, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1851, 1852, 1859, 1863, 1871, 1872, 1875, 1877, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1900, 1910, 1911, 1912, 1913, 1914, 1915, 1922, 1923, 1924, 1927, 1928, 1943, 1947, 1952, 1961, 1965, 1966, 1968, 1971, 1972, 1973, 1980, 1982, 2010, 2011, 2012, 2014, 2015, 2016, 2017, 2021, 2023, 2024, 2027, 2028, 2033, 2035, 2036, 2042, 2043, 2046, 2048, 2049, 2051, 2052, 2053, 2057, 2060, 2061, 2062, 2065, 2066, 2067, 2070, 2071, 2073, 2076, 2077, 2082, 2083, 2084, 2086, 2087, 2089, 2091, 2093, 2098, 2100, 2101, 2102, 2104, 2108, 2109, 2112, 2113, 2114], "randn": [0, 1, 11, 12, 28, 33, 34, 35, 52, 57, 59, 60, 61, 64, 66, 68, 73, 74, 75, 291, 313, 485, 546, 582, 586, 587, 588, 589, 590, 591, 619, 686, 687, 688, 689, 690, 691, 692, 693, 698, 699, 702, 723, 731, 732, 737, 741, 742, 743, 744, 745, 746, 759, 761, 763, 764, 765, 766, 767, 768, 775, 776, 777, 865, 866, 878, 879, 880, 882, 885, 886, 887, 888, 889, 939, 944, 956, 963, 965, 966, 967, 968, 969, 971, 993, 994, 995, 996, 997, 1008, 1088, 1089, 1090, 1096, 1097, 1098, 1099, 1103, 1109, 1123, 1124, 1153, 1161, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1176, 1177, 1178, 1244, 1248, 1250, 1285, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1327, 1328, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1342, 1349, 1353, 1354, 1361, 1363, 1364, 1365, 1367, 1368, 1371, 1373, 1374, 1376, 1378, 1380, 1381, 1396, 1412, 1415, 1425, 1428, 1429, 1430, 1432, 1433, 1434, 1435, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1458, 1459, 1460, 1461, 1462, 1464, 1465, 1466, 1467, 1468, 1469, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1525, 1526, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1554, 1555, 1557, 1558, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1576, 1578, 1579, 1583, 1584, 1585, 1587, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1628, 1629, 1635, 1669, 1675, 1676, 1716, 1775, 1818, 1823, 1824, 1825, 1827, 1828, 1841, 1845, 1846, 1858, 1893, 1895, 1900, 1906, 1909, 1917, 1918, 1921, 1927, 1928, 1938, 1941, 1942, 1945, 1949, 1952, 1953, 1955, 1958, 1960, 1965, 1975, 1976, 1977, 1980, 2015, 2016, 2017, 2019, 2021, 2025, 2034, 2035, 2036, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2055, 2057, 2061, 2062, 2065, 2066, 2067, 2068, 2070, 2072, 2077, 2082, 2083, 2085, 2087, 2091, 2093, 2095, 2098, 2099, 2101, 2102, 2104, 2106, 2108, 2111, 2113], "freez": [0, 55, 1188, 1273, 1283, 1469, 1470, 1527, 2043, 2095], "_": [0, 1, 3, 11, 23, 24, 28, 29, 32, 33, 40, 61, 64, 684, 686, 687, 701, 869, 885, 886, 887, 888, 889, 899, 904, 908, 909, 910, 920, 946, 965, 981, 992, 993, 995, 996, 998, 1153, 1158, 1173, 1177, 1240, 1294, 1302, 1353, 1361, 1441, 1442, 1443, 1489, 1490, 1491, 1543, 1567, 1717, 1732, 1766, 1795, 1797, 1817, 1840, 1843, 1846, 1858, 1878, 1880, 1886, 1893, 1895, 1917, 1928, 1941, 1942, 2021, 2043, 2046, 2050, 2051, 2057, 2063, 2069, 2070, 2071, 2083, 2099, 2102, 2107, 2111, 2113], "3": [0, 1, 3, 4, 6, 9, 11, 12, 18, 19, 21, 23, 24, 25, 28, 30, 32, 33, 35, 36, 37, 40, 45, 47, 52, 55, 57, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 77, 78, 79, 193, 210, 235, 256, 262, 291, 315, 317, 319, 323, 403, 404, 447, 448, 449, 450, 451, 473, 489, 490, 495, 498, 501, 515, 517, 519, 525, 539, 546, 560, 562, 583, 585, 586, 587, 589, 590, 609, 619, 683, 684, 688, 689, 690, 691, 692, 693, 694, 695, 696, 700, 701, 702, 737, 741, 742, 743, 744, 745, 746, 749, 751, 760, 763, 764, 765, 766, 768, 775, 776, 777, 788, 796, 827, 865, 866, 869, 879, 880, 882, 883, 884, 891, 892, 912, 913, 914, 915, 916, 917, 919, 939, 944, 946, 947, 948, 949, 950, 951, 952, 953, 955, 956, 957, 958, 959, 960, 962, 963, 964, 966, 967, 968, 969, 970, 974, 975, 987, 991, 992, 997, 998, 1008, 1051, 1052, 1054, 1091, 1092, 1093, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1104, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1114, 1122, 1125, 1130, 1133, 1137, 1143, 1148, 1149, 1150, 1151, 1152, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1162, 1163, 1164, 1166, 1168, 1173, 1174, 1176, 1178, 1214, 1215, 1216, 1227, 1230, 1233, 1234, 1235, 1236, 1237, 1239, 1240, 1248, 1250, 1258, 1260, 1262, 1264, 1273, 1277, 1280, 1283, 1285, 1289, 1290, 1294, 1295, 1296, 1297, 1298, 1299, 1303, 1305, 1306, 1307, 1309, 1310, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1351, 1354, 1356, 1360, 1361, 1362, 1363, 1364, 1365, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1396, 1412, 1413, 1415, 1417, 1418, 1419, 1421, 1422, 1423, 1424, 1436, 1437, 1438, 1439, 1440, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1458, 1459, 1460, 1462, 1469, 1470, 1473, 1474, 1475, 1476, 1478, 1479, 1481, 1483, 1484, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1530, 1532, 1534, 1539, 1540, 1542, 1543, 1545, 1546, 1549, 1550, 1551, 1552, 1553, 1554, 1561, 1562, 1563, 1567, 1575, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1588, 1589, 1590, 1598, 1600, 1605, 1606, 1609, 1610, 1612, 1613, 1616, 1624, 1625, 1628, 1629, 1631, 1633, 1637, 1638, 1644, 1652, 1669, 1671, 1672, 1675, 1676, 1683, 1684, 1691, 1692, 1704, 1716, 1717, 1724, 1725, 1732, 1747, 1749, 1751, 1752, 1753, 1754, 1758, 1760, 1761, 1762, 1763, 1766, 1770, 1771, 1772, 1773, 1775, 1776, 1777, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1806, 1809, 1810, 1812, 1816, 1818, 1820, 1824, 1825, 1827, 1828, 1831, 1832, 1833, 1834, 1835, 1836, 1838, 1840, 1842, 1843, 1844, 1847, 1848, 1849, 1850, 1851, 1852, 1854, 1855, 1856, 1859, 1863, 1868, 1869, 1875, 1878, 1880, 1882, 1884, 1885, 1886, 1890, 1891, 1892, 1900, 1902, 1903, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1919, 1921, 1927, 1928, 1930, 1931, 1938, 1939, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1971, 1974, 1977, 1978, 1979, 1980, 2010, 2011, 2014, 2015, 2016, 2017, 2018, 2021, 2024, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2052, 2053, 2054, 2057, 2058, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2077, 2078, 2079, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2095, 2098, 2101, 2102, 2104, 2106, 2107, 2108, 2110, 2111, 2112, 2113], "bug": [0, 14, 18, 28, 52, 64, 978, 1170, 1171, 1173, 1363, 2042, 2059, 2082, 2104, 2113], "thi": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 16, 17, 18, 19, 20, 22, 23, 24, 25, 27, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 39, 40, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 62, 63, 64, 65, 66, 74, 75, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 121, 152, 156, 197, 198, 208, 211, 223, 224, 225, 257, 262, 291, 292, 315, 323, 325, 337, 339, 342, 354, 417, 447, 448, 449, 450, 451, 460, 488, 489, 490, 495, 497, 498, 499, 500, 501, 502, 505, 506, 515, 517, 519, 526, 558, 568, 585, 586, 587, 589, 590, 591, 605, 606, 616, 619, 620, 625, 683, 687, 689, 692, 696, 697, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 738, 739, 740, 747, 751, 752, 753, 754, 755, 756, 757, 758, 760, 762, 763, 778, 779, 780, 781, 782, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 802, 803, 805, 813, 814, 815, 816, 818, 819, 820, 822, 823, 824, 825, 826, 827, 828, 829, 858, 863, 865, 866, 878, 879, 880, 881, 882, 889, 893, 894, 895, 896, 897, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 928, 929, 930, 932, 936, 943, 944, 947, 949, 952, 956, 957, 960, 964, 966, 967, 970, 971, 973, 976, 978, 979, 980, 981, 982, 986, 991, 992, 994, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1011, 1012, 1014, 1015, 1016, 1019, 1030, 1032, 1035, 1036, 1037, 1039, 1040, 1043, 1044, 1045, 1046, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1059, 1061, 1064, 1065, 1066, 1073, 1074, 1076, 1077, 1078, 1082, 1083, 1084, 1090, 1091, 1092, 1097, 1099, 1100, 1104, 1107, 1108, 1109, 1113, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1148, 1149, 1150, 1151, 1152, 1155, 1156, 1157, 1160, 1163, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1174, 1176, 1177, 1178, 1181, 1182, 1186, 1187, 1188, 1190, 1191, 1192, 1193, 1194, 1197, 1198, 1199, 1201, 1202, 1210, 1212, 1215, 1217, 1218, 1227, 1231, 1237, 1239, 1260, 1270, 1271, 1273, 1274, 1276, 1278, 1279, 1281, 1283, 1284, 1285, 1289, 1290, 1291, 1293, 1294, 1295, 1296, 1297, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1328, 1329, 1330, 1331, 1332, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1345, 1346, 1349, 1351, 1354, 1363, 1364, 1368, 1371, 1373, 1374, 1375, 1376, 1378, 1379, 1380, 1386, 1389, 1390, 1400, 1404, 1409, 1410, 1415, 1418, 1419, 1420, 1421, 1423, 1431, 1435, 1439, 1440, 1441, 1442, 1443, 1446, 1447, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1474, 1475, 1478, 1479, 1481, 1486, 1487, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1512, 1514, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1533, 1534, 1537, 1539, 1542, 1543, 1544, 1556, 1559, 1561, 1564, 1567, 1573, 1574, 1575, 1576, 1579, 1580, 1581, 1582, 1586, 1587, 1588, 1589, 1590, 1591, 1598, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1633, 1635, 1637, 1642, 1644, 1650, 1652, 1658, 1659, 1660, 1670, 1672, 1674, 1685, 1691, 1692, 1700, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1721, 1724, 1725, 1731, 1732, 1733, 1734, 1735, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1758, 1759, 1760, 1761, 1762, 1765, 1766, 1767, 1769, 1770, 1772, 1773, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1806, 1807, 1809, 1811, 1813, 1815, 1816, 1817, 1821, 1825, 1827, 1834, 1838, 1840, 1843, 1847, 1849, 1856, 1863, 1865, 1866, 1867, 1869, 1870, 1871, 1875, 1876, 1877, 1878, 1883, 1896, 1901, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1920, 1922, 1923, 1924, 1927, 1928, 1929, 1930, 1931, 1935, 1938, 1940, 1944, 1946, 1950, 1952, 1960, 1961, 1962, 1964, 1965, 1966, 1968, 1970, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1982, 1983, 1984, 1987, 1989, 1991, 1992, 1994, 1996, 1997, 2000, 2001, 2002, 2003, 2004, 2007, 2008, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2023, 2024, 2025, 2026, 2027, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2038, 2041, 2042, 2043, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2068, 2069, 2070, 2071, 2072, 2073, 2074, 2075, 2076, 2077, 2078, 2079, 2081, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2117, 2118, 2120], "what": [0, 1, 3, 5, 7, 8, 9, 17, 19, 28, 30, 35, 40, 44, 45, 52, 55, 57, 59, 60, 64, 66, 896, 909, 910, 1171, 1187, 1188, 1197, 1198, 1289, 1290, 1363, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1521, 1522, 1556, 1579, 1658, 1659, 1660, 1685, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1811, 1828, 2012, 2013, 2014, 2016, 2017, 2021, 2025, 2034, 2046, 2049, 2050, 2052, 2054, 2057, 2059, 2067, 2069, 2071, 2072, 2077, 2078, 2082, 2088, 2098, 2099, 2100, 2101, 2103, 2105, 2106, 2113, 2115], "observ": [0, 24, 30, 37, 47, 488, 759, 792, 793, 795, 796, 797, 798, 799, 800, 802, 803, 804, 805, 811, 812, 814, 816, 818, 820, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 845, 863, 865, 866, 868, 931, 994, 998, 1204, 1287, 1375, 1439, 1440, 1441, 1442, 1443, 1460, 1462, 1486, 1489, 1490, 1491, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1567, 1576, 1605, 1606, 1616, 1645, 1669, 1677, 2042, 2043, 2046, 2056, 2059, 2071, 2073, 2093, 2097, 2098, 2104, 2111, 2113, 2115], "file": [0, 1, 3, 6, 7, 9, 11, 14, 18, 20, 23, 30, 37, 40, 45, 47, 50, 52, 53, 58, 60, 64, 912, 918, 919, 933, 935, 1010, 1047, 1161, 1170, 1171, 1173, 1174, 1273, 1281, 1284, 1345, 1779, 1859, 1903, 2012, 2014, 2017, 2018, 2019, 2025, 2028, 2034, 2035, 2036, 2043, 2046, 2049, 2056, 2059, 2061, 2063, 2065, 2067, 2071, 2075, 2078, 2082, 2084, 2087, 2095, 2099, 2100, 2101, 2104, 2106, 2107, 2110, 2111, 2113, 2115, 2116, 2117], "subregion": 0, "nest": [0, 1, 5, 12, 14, 30, 45, 52, 55, 64, 66, 69, 74, 75, 591, 796, 818, 820, 899, 914, 990, 1070, 1071, 1168, 1178, 1273, 1276, 1289, 1527, 1533, 1574, 1575, 1724, 1725, 1780, 1977, 2013, 2050, 2052, 2065, 2067, 2071, 2077, 2088, 2111], "local": [0, 5, 28, 30, 32, 33, 34, 37, 45, 47, 48, 50, 51, 55, 64, 919, 920, 921, 1051, 1113, 1167, 1273, 1346, 1465, 1466, 1467, 1471, 1473, 1515, 1527, 1579, 1627, 1651, 1703, 1717, 1718, 1770, 1903, 2012, 2013, 2016, 2033, 2042, 2046, 2048, 2051, 2062, 2067, 2070, 2074, 2077, 2078, 2079, 2087, 2101, 2102, 2104, 2113, 2115], "want": [0, 1, 7, 8, 9, 14, 23, 28, 33, 34, 35, 44, 52, 55, 57, 58, 59, 60, 63, 64, 66, 450, 488, 498, 501, 784, 794, 841, 978, 1047, 1166, 1167, 1187, 1188, 1286, 1289, 1290, 1375, 1474, 1475, 1580, 1628, 1629, 1644, 1704, 1717, 1718, 1767, 1770, 1784, 1785, 1797, 1798, 1859, 1968, 2012, 2021, 2024, 2042, 2043, 2045, 2046, 2049, 2050, 2052, 2054, 2057, 2067, 2069, 2070, 2072, 2082, 2087, 2088, 2092, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2112], "forc": [0, 1, 14, 18, 20, 55, 460, 884, 976, 1047, 1182, 1276, 1288, 1292, 1877, 1913, 2012, 2014, 2017, 2021, 2042, 2046, 2087, 2091, 2098, 2101, 2102], "particular": [0, 1, 3, 7, 23, 30, 37, 44, 47, 48, 52, 53, 64, 66, 71, 75, 90, 488, 562, 683, 865, 866, 1083, 1178, 1181, 1273, 1368, 1463, 1527, 1977, 2014, 2016, 2024, 2029, 2036, 2046, 2049, 2051, 2052, 2056, 2057, 2060, 2065, 2067, 2069, 2082, 2084, 2091, 2098, 2101, 2103, 2104, 2105, 2113, 2114, 2115], "give": [0, 3, 4, 7, 9, 12, 18, 23, 24, 30, 32, 46, 52, 55, 64, 923, 924, 998, 1127, 1129, 1130, 1132, 1136, 1145, 1170, 1171, 1172, 1188, 1274, 1287, 1288, 1303, 1310, 1454, 1455, 1456, 1532, 1579, 1580, 1731, 1771, 1784, 1785, 1797, 1924, 1965, 2012, 2014, 2033, 2035, 2041, 2043, 2046, 2049, 2050, 2052, 2054, 2055, 2057, 2061, 2067, 2069, 2082, 2098, 2100, 2101, 2103, 2110, 2113, 2114], "explicit": [0, 8, 12, 28, 52, 55, 59, 64, 1043, 1187, 1227, 1260, 1645, 1867, 1945, 2012, 2013, 2017, 2018, 2034, 2042, 2046, 2049, 2052, 2070, 2085, 2086, 2108], "control": [0, 1, 2, 14, 19, 23, 24, 28, 29, 30, 31, 35, 37, 43, 46, 47, 53, 66, 72, 75, 758, 802, 862, 865, 880, 884, 899, 945, 954, 990, 1096, 1097, 1098, 1099, 1100, 1182, 1231, 1232, 1285, 1286, 1287, 1289, 1304, 1310, 1312, 1315, 1317, 1320, 1322, 1326, 1332, 1335, 1337, 1343, 1363, 1431, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1513, 1521, 1522, 1579, 1685, 1717, 1815, 1871, 1872, 1885, 1900, 1903, 1908, 1924, 1928, 1947, 1953, 1954, 1955, 1956, 2013, 2014, 2023, 2043, 2045, 2046, 2048, 2057, 2059, 2060, 2062, 2064, 2065, 2067, 2069, 2071, 2072, 2079, 2082, 2098, 2099, 2100, 2101, 2104, 2116, 2117], "execut": [0, 1, 2, 3, 4, 5, 7, 14, 15, 19, 23, 28, 30, 32, 37, 39, 41, 48, 52, 53, 55, 60, 63, 64, 83, 489, 490, 904, 928, 929, 976, 978, 983, 1014, 1025, 1087, 1167, 1186, 1273, 1274, 1276, 1289, 1290, 1345, 1389, 1390, 1463, 1527, 1533, 1571, 1572, 1573, 1574, 1575, 1710, 1717, 1873, 1983, 2013, 2014, 2016, 2018, 2027, 2029, 2042, 2044, 2045, 2051, 2052, 2053, 2056, 2057, 2059, 2061, 2063, 2065, 2067, 2071, 2072, 2077, 2078, 2082, 2095, 2100, 2101, 2102, 2103, 2105, 2106, 2107, 2109, 2112, 2113, 2116], "surround": [0, 64, 793, 2016, 2042, 2046], "ensur": [0, 1, 4, 6, 7, 9, 19, 23, 28, 29, 30, 32, 33, 34, 37, 47, 48, 51, 52, 55, 63, 64, 90, 483, 488, 738, 822, 904, 905, 907, 909, 986, 1167, 1186, 1197, 1273, 1435, 1440, 1520, 1527, 1530, 1571, 1633, 1658, 1659, 1660, 1685, 1717, 1724, 1725, 1874, 2012, 2014, 2016, 2024, 2028, 2033, 2042, 2043, 2046, 2049, 2050, 2053, 2054, 2059, 2061, 2062, 2070, 2072, 2073, 2077, 2078, 2084, 2098, 2103, 2104, 2113], "necessari": [0, 1, 3, 9, 14, 23, 28, 30, 33, 34, 37, 47, 48, 52, 55, 62, 63, 90, 193, 210, 488, 562, 591, 904, 908, 909, 910, 1167, 1188, 1248, 1285, 1533, 1759, 1760, 2014, 2017, 2021, 2035, 2041, 2043, 2044, 2046, 2048, 2055, 2057, 2060, 2063, 2070, 2072, 2077, 2078, 2079, 2085, 2088, 2095, 2101, 2104, 2113, 2117], "becaus": [0, 1, 3, 4, 5, 7, 8, 14, 19, 23, 24, 28, 30, 32, 33, 35, 40, 52, 53, 55, 59, 60, 63, 64, 66, 74, 75, 488, 498, 914, 923, 924, 978, 1130, 1131, 1132, 1140, 1141, 1142, 1168, 1172, 1177, 1178, 1187, 1188, 1194, 1198, 1204, 1270, 1274, 1277, 1281, 1320, 1321, 1345, 1363, 1441, 1442, 1443, 1463, 1567, 1707, 1717, 1718, 1801, 1843, 1868, 1871, 1924, 1977, 2013, 2014, 2017, 2019, 2025, 2033, 2034, 2035, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2058, 2060, 2061, 2065, 2067, 2070, 2072, 2073, 2077, 2079, 2082, 2087, 2089, 2093, 2095, 2098, 2100, 2101, 2103, 2104, 2105, 2106, 2107, 2111, 2113, 2114, 2115], "wa": [0, 1, 3, 7, 18, 24, 28, 33, 45, 46, 47, 48, 52, 53, 55, 63, 64, 337, 488, 498, 683, 788, 795, 844, 858, 930, 966, 977, 1011, 1012, 1035, 1039, 1054, 1067, 1087, 1129, 1178, 1186, 1270, 1280, 1289, 1304, 1315, 1317, 1321, 1345, 1363, 1386, 1413, 1497, 1523, 1524, 1525, 1557, 1580, 1598, 1633, 1688, 1704, 1717, 1718, 1761, 1803, 1809, 1922, 1923, 1961, 1962, 1972, 1973, 1977, 1982, 2012, 2014, 2016, 2017, 2018, 2021, 2023, 2024, 2030, 2033, 2042, 2043, 2044, 2046, 2049, 2050, 2051, 2052, 2056, 2058, 2062, 2065, 2067, 2069, 2072, 2076, 2077, 2082, 2084, 2089, 2095, 2100, 2101, 2103, 2104, 2111, 2112, 2113, 2115], "f_float32": 0, "re": [0, 1, 4, 5, 7, 14, 23, 28, 32, 34, 36, 44, 47, 52, 55, 56, 57, 58, 59, 60, 62, 63, 64, 490, 956, 978, 1009, 1043, 1160, 1210, 1273, 1277, 1527, 1691, 1718, 1761, 1840, 1854, 1907, 2014, 2017, 2021, 2024, 2033, 2042, 2043, 2046, 2049, 2050, 2054, 2059, 2072, 2078, 2079, 2083, 2098, 2100, 2102, 2103, 2104, 2112, 2113, 2114], "again": [0, 18, 23, 28, 32, 37, 52, 58, 1188, 1413, 2043, 2049, 2050, 2057, 2098, 2101, 2103, 2107], "regardless": [0, 3, 23, 28, 46, 52, 55, 992, 1192, 1273, 1277, 1283, 1527, 1710, 1961, 2042, 2046, 2062, 2077, 2089, 2101, 2104], "g_float16": 0, "state": [0, 1, 2, 5, 9, 23, 28, 30, 32, 33, 35, 37, 40, 47, 51, 52, 53, 55, 62, 64, 90, 417, 683, 751, 763, 839, 986, 1040, 1041, 1045, 1050, 1054, 1064, 1080, 1081, 1167, 1176, 1186, 1226, 1273, 1276, 1289, 1346, 1387, 1394, 1408, 1478, 1479, 1497, 1498, 1527, 1543, 1545, 1575, 1591, 1707, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1718, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1834, 1876, 1903, 1968, 1994, 1995, 1996, 1999, 2005, 2006, 2013, 2021, 2027, 2042, 2043, 2046, 2048, 2049, 2055, 2059, 2062, 2065, 2067, 2069, 2076, 2092, 2098, 2100, 2101, 2102, 2103, 2104, 2109], "thread": [0, 1, 3, 23, 28, 30, 37, 50, 55, 63, 919, 920, 921, 1009, 1011, 1043, 1113, 1224, 1225, 1386, 1463, 1770, 1873, 1874, 1967, 1982, 2013, 2014, 2026, 2030, 2042, 2046, 2052, 2056, 2059, 2077, 2079, 2084, 2091, 2102, 2104, 2116, 2117], "must": [0, 3, 5, 6, 9, 12, 14, 23, 28, 30, 32, 33, 34, 35, 36, 37, 45, 48, 50, 52, 53, 55, 58, 60, 63, 64, 66, 74, 75, 90, 99, 141, 155, 156, 198, 262, 315, 317, 323, 400, 402, 404, 488, 515, 546, 547, 568, 585, 586, 587, 589, 590, 619, 689, 690, 691, 692, 693, 694, 700, 751, 760, 775, 776, 777, 796, 818, 820, 863, 864, 882, 888, 893, 894, 895, 896, 899, 904, 908, 909, 910, 913, 915, 916, 917, 923, 924, 944, 946, 948, 949, 950, 951, 952, 953, 956, 960, 963, 978, 987, 990, 998, 1011, 1021, 1022, 1023, 1025, 1051, 1052, 1054, 1097, 1099, 1100, 1101, 1103, 1106, 1107, 1109, 1130, 1131, 1132, 1137, 1139, 1140, 1141, 1142, 1143, 1150, 1151, 1160, 1161, 1163, 1166, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1176, 1177, 1178, 1187, 1192, 1197, 1198, 1199, 1214, 1215, 1227, 1236, 1237, 1240, 1250, 1270, 1273, 1284, 1289, 1296, 1299, 1328, 1329, 1330, 1331, 1337, 1339, 1340, 1343, 1344, 1346, 1360, 1364, 1366, 1367, 1368, 1373, 1374, 1381, 1413, 1419, 1422, 1423, 1427, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1469, 1470, 1473, 1474, 1475, 1480, 1481, 1487, 1520, 1527, 1530, 1531, 1533, 1534, 1559, 1565, 1580, 1606, 1615, 1617, 1625, 1644, 1658, 1659, 1660, 1678, 1685, 1717, 1726, 1728, 1748, 1758, 1759, 1765, 1767, 1772, 1787, 1807, 1809, 1816, 1817, 1820, 1821, 1824, 1832, 1833, 1854, 1863, 1866, 1874, 1890, 1901, 1905, 1906, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 1929, 1940, 1944, 1945, 1949, 1954, 1956, 1960, 1963, 1965, 1968, 1974, 1975, 1977, 1978, 1980, 1982, 2014, 2015, 2016, 2017, 2021, 2024, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2052, 2055, 2059, 2062, 2063, 2065, 2067, 2069, 2070, 2072, 2076, 2077, 2078, 2079, 2082, 2083, 2084, 2087, 2089, 2098, 2099, 2100, 2104, 2105, 2112, 2113, 2114, 2117], "invok": [0, 2, 8, 15, 18, 19, 23, 28, 45, 48, 63, 64, 912, 914, 1051, 1052, 1273, 1276, 1283, 1463, 1527, 1709, 1711, 1714, 1715, 1769, 1803, 1809, 2014, 2017, 2018, 2027, 2042, 2045, 2046, 2048, 2049, 2050, 2056, 2057, 2070, 2077, 2078, 2095, 2098, 2103, 2104, 2105], "affect": [0, 1, 2, 7, 9, 18, 19, 22, 33, 36, 52, 55, 223, 224, 788, 919, 920, 921, 1012, 1014, 1113, 1160, 1161, 1189, 1197, 1273, 1389, 1390, 1497, 1527, 1543, 1580, 1704, 1718, 1724, 1725, 1770, 1867, 1870, 1871, 2042, 2043, 2046, 2052, 2054, 2057, 2060, 2061, 2084, 2109, 2111], "dataparallel": [0, 28, 1700, 1717, 1761, 2013, 2043, 2051, 2059, 2077], "parallel": [0, 13, 14, 23, 24, 28, 29, 30, 32, 47, 48, 55, 1224, 1225, 1276, 1463, 1533, 1567, 1633, 1685, 1717, 1873, 1874, 2013, 2017, 2042, 2043, 2045, 2059, 2063, 2077, 2079, 2090, 2097, 2104, 2116], "distributeddataparallel": [0, 23, 24, 28, 29, 30, 32, 48, 55, 683, 1463, 1567, 2059, 2077, 2104], "than": [0, 3, 4, 5, 6, 8, 9, 11, 14, 17, 19, 23, 24, 28, 29, 30, 35, 36, 37, 40, 44, 47, 48, 50, 52, 55, 60, 64, 66, 67, 75, 152, 256, 488, 501, 547, 619, 683, 782, 788, 897, 913, 914, 931, 936, 947, 958, 965, 966, 970, 971, 984, 1023, 1061, 1065, 1079, 1097, 1098, 1149, 1150, 1151, 1153, 1157, 1163, 1174, 1201, 1216, 1230, 1234, 1248, 1258, 1270, 1271, 1274, 1285, 1294, 1295, 1298, 1303, 1304, 1310, 1314, 1319, 1327, 1328, 1330, 1331, 1334, 1339, 1345, 1349, 1362, 1365, 1371, 1374, 1376, 1379, 1393, 1413, 1423, 1439, 1440, 1454, 1455, 1456, 1463, 1469, 1470, 1487, 1497, 1519, 1533, 1541, 1550, 1559, 1565, 1575, 1576, 1577, 1598, 1608, 1609, 1610, 1624, 1625, 1633, 1644, 1671, 1685, 1701, 1703, 1704, 1707, 1717, 1724, 1725, 1731, 1732, 1734, 1735, 1761, 1766, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1809, 1811, 1825, 1826, 1847, 1848, 1871, 1875, 1909, 1910, 1911, 1912, 1914, 1915, 1946, 1954, 1956, 1965, 2012, 2014, 2016, 2017, 2019, 2025, 2035, 2036, 2037, 2041, 2043, 2044, 2046, 2049, 2050, 2051, 2052, 2054, 2057, 2059, 2060, 2061, 2062, 2067, 2069, 2070, 2072, 2073, 2077, 2082, 2083, 2085, 2087, 2088, 2089, 2098, 2099, 2101, 2103, 2104, 2105, 2106, 2107, 2108, 2109, 2111, 2113, 2115, 2117], "one": [0, 1, 2, 3, 4, 5, 6, 7, 8, 11, 12, 14, 15, 17, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 44, 47, 50, 51, 52, 53, 55, 59, 60, 61, 63, 64, 66, 71, 74, 75, 84, 85, 86, 87, 89, 152, 223, 256, 354, 404, 488, 515, 522, 547, 562, 700, 751, 760, 796, 858, 859, 860, 884, 890, 896, 897, 899, 902, 904, 905, 909, 910, 914, 919, 920, 932, 947, 958, 960, 970, 974, 978, 990, 1007, 1012, 1014, 1019, 1021, 1025, 1052, 1076, 1087, 1098, 1109, 1113, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1139, 1140, 1141, 1142, 1143, 1145, 1148, 1152, 1155, 1156, 1157, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1188, 1227, 1236, 1237, 1252, 1255, 1258, 1277, 1283, 1287, 1293, 1294, 1304, 1305, 1315, 1318, 1319, 1321, 1326, 1330, 1332, 1334, 1337, 1343, 1344, 1360, 1368, 1372, 1377, 1413, 1419, 1431, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1446, 1454, 1457, 1458, 1459, 1470, 1474, 1475, 1480, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1506, 1507, 1508, 1518, 1527, 1531, 1533, 1567, 1575, 1580, 1598, 1608, 1611, 1612, 1613, 1628, 1629, 1630, 1635, 1671, 1685, 1703, 1707, 1717, 1718, 1732, 1733, 1734, 1738, 1748, 1759, 1769, 1770, 1771, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1805, 1807, 1808, 1809, 1829, 1830, 1847, 1863, 1871, 1875, 1878, 1919, 1928, 1944, 1949, 1950, 1952, 1960, 1961, 1963, 1964, 1965, 1968, 1977, 1983, 2002, 2013, 2014, 2017, 2021, 2023, 2027, 2030, 2033, 2034, 2035, 2036, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2056, 2057, 2059, 2061, 2062, 2063, 2067, 2069, 2070, 2071, 2072, 2073, 2077, 2079, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2106, 2107, 2110, 2112, 2113, 2115], "gpu": [0, 1, 2, 3, 4, 7, 14, 17, 19, 20, 21, 23, 24, 30, 37, 48, 50, 55, 63, 198, 211, 291, 334, 605, 763, 976, 1016, 1017, 1020, 1021, 1022, 1023, 1024, 1025, 1031, 1032, 1033, 1040, 1046, 1047, 1053, 1055, 1056, 1057, 1059, 1060, 1061, 1063, 1064, 1065, 1066, 1072, 1073, 1074, 1075, 1076, 1077, 1080, 1086, 1087, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1157, 1273, 1283, 1320, 1345, 1374, 1382, 1384, 1385, 1389, 1390, 1431, 1463, 1478, 1497, 1527, 1543, 1544, 1567, 1700, 1717, 1758, 1928, 1994, 1997, 2000, 2001, 2002, 2003, 2005, 2013, 2014, 2027, 2031, 2046, 2049, 2052, 2055, 2057, 2058, 2060, 2061, 2063, 2072, 2077, 2082, 2084, 2085, 2088, 2091, 2094, 2095, 2098, 2102, 2103, 2106, 2110, 2111, 2113, 2115, 2120], "per": [0, 11, 14, 19, 20, 23, 24, 28, 29, 30, 32, 33, 34, 36, 37, 40, 46, 48, 50, 55, 56, 61, 474, 475, 476, 683, 737, 738, 804, 811, 822, 825, 828, 836, 849, 857, 896, 909, 910, 912, 914, 932, 976, 1015, 1092, 1123, 1168, 1178, 1346, 1400, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1460, 1462, 1463, 1470, 1481, 1486, 1489, 1490, 1491, 1492, 1493, 1499, 1518, 1519, 1527, 1530, 1531, 1532, 1533, 1534, 1541, 1542, 1559, 1560, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1605, 1606, 1616, 1645, 1669, 1677, 1716, 1717, 1733, 1769, 1773, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 1817, 1829, 1875, 1877, 1929, 1946, 1950, 1968, 1977, 1984, 2030, 2035, 2037, 2045, 2046, 2048, 2050, 2052, 2056, 2072, 2073, 2075, 2078, 2082, 2087, 2100, 2101, 2104, 2113, 2115, 2117], "process": [0, 1, 3, 11, 14, 19, 20, 24, 28, 29, 30, 32, 33, 35, 37, 39, 40, 41, 47, 48, 49, 50, 51, 52, 55, 64, 81, 82, 83, 86, 88, 1011, 1047, 1053, 1079, 1161, 1186, 1200, 1201, 1273, 1284, 1346, 1384, 1393, 1439, 1440, 1460, 1462, 1469, 1470, 1486, 1493, 1518, 1519, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1567, 1571, 1573, 1575, 1576, 1605, 1606, 1616, 1645, 1669, 1677, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1806, 1812, 1841, 1877, 1891, 1968, 2012, 2013, 2014, 2021, 2030, 2033, 2034, 2035, 2043, 2045, 2046, 2048, 2049, 2052, 2056, 2057, 2059, 2061, 2063, 2065, 2067, 2070, 2071, 2072, 2075, 2077, 2079, 2082, 2084, 2092, 2095, 2097, 2100, 2101, 2104, 2111, 2113, 2116, 2117], "work": [0, 1, 2, 3, 5, 7, 8, 9, 11, 14, 15, 17, 28, 29, 30, 32, 33, 35, 36, 37, 47, 48, 50, 51, 52, 55, 56, 60, 64, 66, 74, 75, 83, 121, 152, 354, 447, 448, 449, 450, 451, 488, 589, 590, 824, 865, 866, 897, 904, 918, 976, 978, 1009, 1011, 1012, 1014, 1043, 1054, 1055, 1076, 1110, 1112, 1129, 1149, 1150, 1151, 1161, 1167, 1192, 1199, 1273, 1277, 1285, 1286, 1309, 1310, 1337, 1338, 1346, 1386, 1463, 1524, 1527, 1544, 1561, 1672, 1685, 1691, 1717, 1779, 1809, 1836, 1840, 1842, 1873, 1876, 1935, 1940, 1943, 1968, 1970, 1982, 1983, 2000, 2002, 2012, 2013, 2014, 2017, 2018, 2021, 2031, 2033, 2034, 2035, 2036, 2039, 2041, 2043, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2055, 2057, 2059, 2062, 2063, 2067, 2070, 2072, 2075, 2076, 2077, 2078, 2088, 2091, 2095, 2099, 2100, 2101, 2102, 2105, 2106, 2109, 2110, 2113, 2114, 2115], "hpu": [0, 2084], "option": [0, 1, 2, 3, 5, 14, 19, 23, 28, 30, 32, 34, 35, 37, 38, 41, 45, 46, 47, 50, 51, 52, 55, 64, 66, 73, 83, 85, 90, 152, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 269, 299, 327, 333, 395, 447, 448, 449, 450, 451, 489, 501, 502, 515, 522, 527, 539, 562, 582, 583, 585, 586, 587, 589, 590, 625, 683, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 697, 698, 699, 700, 701, 702, 738, 761, 782, 783, 788, 795, 797, 802, 821, 858, 863, 866, 869, 880, 882, 883, 884, 885, 886, 887, 888, 889, 896, 897, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 927, 928, 929, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 956, 960, 963, 965, 966, 967, 968, 969, 971, 973, 974, 975, 976, 982, 985, 992, 993, 995, 996, 997, 998, 1002, 1007, 1008, 1009, 1011, 1012, 1014, 1016, 1020, 1021, 1023, 1024, 1025, 1028, 1029, 1036, 1037, 1040, 1043, 1051, 1052, 1053, 1054, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1079, 1080, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1093, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1104, 1106, 1108, 1109, 1110, 1111, 1112, 1114, 1119, 1122, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1152, 1153, 1154, 1155, 1156, 1157, 1159, 1161, 1163, 1164, 1165, 1166, 1167, 1170, 1171, 1172, 1186, 1188, 1214, 1215, 1216, 1217, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1238, 1239, 1240, 1248, 1250, 1262, 1264, 1267, 1268, 1270, 1271, 1273, 1274, 1277, 1280, 1287, 1289, 1290, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1363, 1364, 1365, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1386, 1387, 1389, 1390, 1394, 1396, 1402, 1403, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1423, 1424, 1425, 1427, 1429, 1430, 1431, 1433, 1434, 1435, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1473, 1474, 1475, 1476, 1480, 1483, 1484, 1485, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1518, 1519, 1523, 1524, 1525, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1536, 1537, 1538, 1541, 1542, 1546, 1547, 1548, 1555, 1559, 1560, 1567, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1579, 1580, 1581, 1582, 1598, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1624, 1625, 1628, 1629, 1630, 1633, 1635, 1644, 1645, 1652, 1658, 1659, 1660, 1669, 1670, 1672, 1677, 1685, 1691, 1692, 1704, 1716, 1717, 1718, 1726, 1727, 1729, 1731, 1732, 1733, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1756, 1757, 1758, 1759, 1760, 1761, 1762, 1764, 1766, 1767, 1769, 1771, 1772, 1773, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1792, 1794, 1795, 1796, 1797, 1798, 1799, 1802, 1815, 1816, 1817, 1820, 1824, 1825, 1827, 1828, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1846, 1847, 1848, 1849, 1856, 1858, 1863, 1875, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1895, 1896, 1900, 1901, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1927, 1928, 1929, 1940, 1941, 1942, 1943, 1944, 1947, 1949, 1952, 1953, 1954, 1955, 1956, 1958, 1961, 1962, 1965, 1971, 1972, 1973, 1974, 1979, 1980, 1982, 1983, 1986, 1991, 1992, 1994, 2005, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2017, 2019, 2021, 2023, 2027, 2028, 2030, 2036, 2041, 2046, 2049, 2050, 2051, 2052, 2053, 2056, 2057, 2061, 2062, 2065, 2067, 2070, 2071, 2072, 2077, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2093, 2094, 2095, 2104, 2110, 2112, 2113, 2115], "whether": [0, 1, 2, 5, 7, 14, 19, 20, 23, 24, 28, 29, 30, 33, 34, 35, 37, 46, 47, 55, 62, 64, 89, 321, 323, 473, 519, 619, 683, 696, 698, 699, 702, 802, 827, 878, 879, 884, 893, 904, 905, 908, 909, 910, 912, 914, 919, 920, 921, 923, 924, 930, 945, 954, 967, 968, 969, 975, 977, 979, 982, 983, 984, 1050, 1066, 1161, 1166, 1178, 1181, 1184, 1231, 1232, 1270, 1273, 1281, 1282, 1290, 1295, 1303, 1304, 1310, 1312, 1315, 1316, 1317, 1318, 1320, 1321, 1322, 1323, 1328, 1330, 1331, 1334, 1335, 1336, 1337, 1345, 1361, 1363, 1371, 1373, 1374, 1376, 1379, 1408, 1413, 1418, 1419, 1420, 1421, 1446, 1486, 1492, 1527, 1536, 1541, 1577, 1595, 1596, 1597, 1617, 1645, 1677, 1710, 1717, 1731, 1734, 1737, 1750, 1764, 1767, 1772, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1803, 1815, 1825, 1828, 1872, 1922, 1923, 1924, 1927, 1928, 1947, 1952, 1961, 1962, 1965, 1966, 1972, 1973, 1977, 1999, 2012, 2017, 2023, 2024, 2028, 2036, 2042, 2046, 2049, 2052, 2055, 2065, 2067, 2072, 2077, 2082, 2084, 2086, 2087, 2093, 2101, 2102, 2104, 2105, 2111, 2113, 2117], "torch_dtyp": 0, "It": [0, 1, 2, 3, 4, 5, 7, 8, 12, 17, 18, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 44, 45, 46, 47, 48, 50, 52, 53, 55, 57, 59, 60, 61, 63, 64, 66, 86, 152, 198, 417, 475, 476, 515, 517, 519, 546, 559, 893, 894, 895, 896, 897, 904, 905, 909, 910, 920, 921, 930, 939, 976, 982, 984, 986, 990, 991, 992, 1025, 1030, 1036, 1037, 1055, 1056, 1076, 1077, 1085, 1132, 1162, 1167, 1170, 1178, 1182, 1188, 1190, 1197, 1201, 1214, 1235, 1270, 1273, 1274, 1279, 1280, 1284, 1306, 1310, 1314, 1317, 1319, 1321, 1322, 1327, 1331, 1332, 1334, 1335, 1337, 1339, 1342, 1343, 1345, 1346, 1354, 1404, 1431, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1463, 1473, 1521, 1522, 1523, 1524, 1525, 1527, 1534, 1556, 1559, 1579, 1581, 1635, 1645, 1691, 1709, 1710, 1711, 1714, 1715, 1717, 1732, 1734, 1737, 1758, 1761, 1770, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1815, 1834, 1847, 1863, 1878, 1885, 1907, 1960, 1977, 1987, 1991, 1992, 2000, 2001, 2002, 2003, 2009, 2012, 2016, 2017, 2021, 2029, 2030, 2033, 2035, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2063, 2064, 2065, 2067, 2069, 2070, 2072, 2077, 2078, 2079, 2081, 2082, 2083, 2084, 2086, 2091, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2112, 2113, 2115, 2117], "given": [0, 1, 2, 3, 7, 9, 14, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 45, 47, 50, 52, 55, 61, 63, 64, 152, 221, 315, 317, 319, 323, 328, 379, 400, 404, 474, 475, 476, 477, 478, 480, 515, 517, 519, 586, 587, 606, 609, 683, 696, 698, 699, 701, 702, 738, 763, 782, 788, 795, 796, 797, 798, 802, 803, 805, 823, 827, 840, 843, 858, 862, 869, 880, 893, 895, 897, 902, 904, 905, 907, 909, 910, 912, 914, 915, 916, 917, 924, 930, 932, 942, 943, 946, 947, 950, 958, 962, 963, 968, 969, 970, 975, 976, 978, 992, 994, 997, 998, 1000, 1002, 1006, 1008, 1011, 1012, 1014, 1015, 1016, 1020, 1028, 1029, 1032, 1036, 1037, 1053, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1069, 1072, 1073, 1074, 1075, 1084, 1085, 1086, 1087, 1094, 1096, 1101, 1109, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1149, 1166, 1176, 1177, 1184, 1188, 1190, 1191, 1192, 1193, 1199, 1222, 1240, 1270, 1273, 1281, 1289, 1290, 1295, 1299, 1309, 1311, 1312, 1321, 1323, 1329, 1337, 1346, 1353, 1355, 1356, 1357, 1358, 1361, 1371, 1373, 1375, 1376, 1379, 1400, 1402, 1403, 1410, 1421, 1431, 1439, 1440, 1446, 1454, 1455, 1456, 1458, 1459, 1460, 1462, 1463, 1469, 1470, 1474, 1475, 1478, 1486, 1497, 1519, 1523, 1524, 1525, 1527, 1529, 1531, 1532, 1534, 1536, 1538, 1543, 1556, 1562, 1576, 1577, 1580, 1581, 1582, 1598, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1624, 1625, 1628, 1629, 1633, 1644, 1669, 1685, 1700, 1701, 1704, 1705, 1706, 1710, 1711, 1732, 1733, 1758, 1765, 1766, 1769, 1772, 1773, 1779, 1805, 1808, 1810, 1812, 1815, 1820, 1825, 1828, 1829, 1830, 1843, 1849, 1854, 1865, 1866, 1871, 1896, 1900, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1919, 1921, 1924, 1927, 1929, 1939, 1940, 1945, 1947, 1949, 1959, 1961, 1963, 1965, 1969, 1982, 1983, 1984, 1986, 1989, 1991, 1992, 2008, 2009, 2012, 2013, 2014, 2017, 2021, 2023, 2024, 2028, 2033, 2036, 2037, 2041, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2056, 2057, 2061, 2062, 2065, 2067, 2069, 2071, 2072, 2077, 2078, 2081, 2082, 2083, 2084, 2087, 2089, 2093, 2097, 2101, 2103, 2104, 2112, 2113, 2114], "get_autocast_dtyp": [0, 2015, 2068], "weight": [0, 23, 28, 30, 33, 34, 35, 52, 53, 61, 64, 66, 158, 303, 363, 364, 498, 715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 736, 737, 738, 741, 742, 743, 744, 745, 746, 748, 749, 752, 754, 755, 756, 757, 759, 763, 764, 766, 767, 768, 775, 776, 777, 784, 795, 796, 797, 811, 812, 836, 838, 845, 849, 853, 854, 855, 856, 857, 862, 863, 865, 866, 947, 998, 1166, 1168, 1176, 1178, 1235, 1236, 1273, 1277, 1283, 1285, 1290, 1299, 1413, 1439, 1440, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1469, 1470, 1478, 1479, 1481, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1527, 1531, 1532, 1533, 1534, 1535, 1542, 1543, 1545, 1557, 1571, 1603, 1604, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1624, 1625, 1634, 1643, 1647, 1650, 1666, 1668, 1669, 1678, 1682, 1685, 1688, 1707, 1716, 1717, 1724, 1725, 1726, 1727, 1729, 1731, 1732, 1733, 1735, 1737, 1748, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1765, 1766, 1767, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1831, 1965, 1977, 2012, 2013, 2015, 2016, 2017, 2021, 2027, 2028, 2035, 2036, 2041, 2049, 2051, 2052, 2053, 2057, 2062, 2065, 2072, 2073, 2074, 2075, 2082, 2087, 2092, 2093, 2104, 2108], "cach": [0, 2, 3, 20, 35, 64, 488, 976, 986, 1033, 1034, 1051, 1054, 1059, 1061, 1063, 1065, 1074, 1079, 1189, 1190, 1382, 1384, 1385, 1718, 1737, 1769, 1990, 2033, 2051, 2098, 2101, 2102, 2104, 2107, 2111, 2115], "insid": [0, 1, 7, 23, 47, 53, 55, 60, 63, 64, 909, 910, 978, 1017, 1054, 1168, 1172, 1177, 1188, 1285, 1717, 2014, 2016, 2017, 2021, 2027, 2029, 2042, 2046, 2049, 2050, 2056, 2067, 2082, 2092, 2101, 2103, 2113], "custom_fwd": [0, 2042], "fwd": [0, 2107, 2111], "cast_input": [0, 2042], "helper": [0, 3, 28, 33, 38, 59, 64, 2012, 2013, 2016, 2046, 2048, 2067, 2070, 2077, 2112, 2114], "subclass": [0, 1, 14, 23, 30, 33, 35, 40, 60, 64, 141, 539, 893, 894, 895, 909, 910, 1193, 1271, 1274, 1284, 1527, 1718, 1739, 1744, 1765, 2014, 2017, 2018, 2024, 2039, 2042, 2057, 2067, 2077, 2081, 2082, 2101, 2102, 2104, 2114], "page": [0, 6, 7, 9, 23, 29, 48, 51, 978, 1346, 1571, 1573, 1575, 2013, 2021, 2046, 2048, 2049, 2057, 2068, 2077, 2110], "incom": [0, 28, 50, 784, 822, 823, 824, 825, 828, 1444, 1514, 1604, 1650, 2033, 2043], "non": [0, 1, 2, 3, 5, 14, 20, 22, 24, 28, 29, 30, 33, 35, 37, 40, 47, 50, 51, 53, 55, 63, 66, 77, 80, 86, 90, 152, 337, 488, 490, 506, 515, 519, 547, 701, 737, 742, 743, 744, 745, 746, 748, 749, 759, 763, 767, 768, 775, 776, 777, 841, 869, 881, 893, 895, 897, 904, 906, 908, 909, 910, 923, 924, 939, 947, 960, 963, 976, 991, 992, 997, 1065, 1155, 1156, 1163, 1167, 1172, 1178, 1192, 1195, 1202, 1250, 1273, 1276, 1286, 1289, 1290, 1309, 1310, 1317, 1321, 1322, 1335, 1337, 1346, 1356, 1363, 1368, 1373, 1413, 1418, 1419, 1423, 1436, 1437, 1438, 1455, 1456, 1458, 1459, 1462, 1472, 1478, 1495, 1496, 1497, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1530, 1532, 1534, 1543, 1545, 1559, 1616, 1635, 1664, 1669, 1685, 1717, 1771, 1798, 1820, 1863, 1864, 1878, 1883, 1890, 1905, 1906, 1910, 1911, 1912, 1913, 1914, 1915, 1945, 1961, 1963, 1977, 2012, 2013, 2014, 2016, 2017, 2019, 2021, 2033, 2036, 2041, 2044, 2049, 2050, 2051, 2052, 2057, 2061, 2067, 2069, 2071, 2076, 2077, 2079, 2082, 2083, 2085, 2086, 2087, 2089, 2095, 2098, 2100, 2101, 2103, 2104, 2105, 2111, 2112, 2114, 2117], "intern": [0, 3, 8, 9, 14, 19, 23, 24, 28, 30, 35, 37, 44, 52, 55, 60, 64, 904, 1009, 1043, 1064, 1167, 1186, 1273, 1310, 1312, 1328, 1331, 1354, 1441, 1442, 1443, 1480, 1567, 1608, 1609, 1610, 1633, 1734, 1778, 1871, 1913, 1965, 2018, 2042, 2043, 2045, 2046, 2052, 2054, 2060, 2061, 2065, 2078, 2079, 2084, 2086, 2094, 2101, 2104, 2113], "current": [0, 1, 2, 3, 5, 6, 7, 9, 11, 12, 14, 23, 28, 30, 32, 33, 34, 35, 40, 47, 48, 52, 53, 55, 56, 63, 64, 82, 90, 152, 211, 223, 499, 500, 501, 625, 744, 746, 784, 796, 798, 820, 863, 865, 866, 869, 883, 884, 894, 897, 899, 900, 901, 909, 912, 913, 914, 923, 945, 954, 990, 1001, 1002, 1004, 1005, 1009, 1011, 1012, 1014, 1019, 1020, 1023, 1024, 1026, 1027, 1028, 1029, 1032, 1033, 1034, 1036, 1037, 1040, 1042, 1043, 1046, 1048, 1049, 1053, 1054, 1055, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1076, 1078, 1080, 1082, 1085, 1086, 1087, 1110, 1112, 1122, 1127, 1145, 1161, 1162, 1164, 1182, 1186, 1188, 1198, 1220, 1221, 1222, 1223, 1231, 1232, 1256, 1257, 1273, 1277, 1289, 1293, 1344, 1346, 1360, 1375, 1382, 1385, 1386, 1387, 1394, 1401, 1402, 1403, 1409, 1466, 1469, 1473, 1527, 1567, 1575, 1579, 1627, 1633, 1644, 1685, 1703, 1704, 1706, 1717, 1738, 1742, 1743, 1744, 1745, 1746, 1751, 1752, 1753, 1754, 1776, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1827, 1836, 1838, 1840, 1842, 1843, 1867, 1871, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1903, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 1943, 1954, 1956, 1961, 1967, 1968, 1982, 1983, 1985, 1986, 1989, 1990, 1991, 1992, 1994, 1997, 1998, 2000, 2002, 2004, 2005, 2007, 2009, 2010, 2013, 2014, 2016, 2017, 2018, 2019, 2021, 2024, 2029, 2030, 2033, 2036, 2042, 2043, 2046, 2052, 2055, 2056, 2057, 2058, 2059, 2062, 2063, 2067, 2069, 2070, 2072, 2075, 2077, 2078, 2082, 2084, 2085, 2087, 2088, 2092, 2101, 2103, 2104, 2109, 2110, 2111, 2113, 2114, 2115], "outsid": [0, 5, 9, 23, 40, 52, 53, 55, 60, 687, 798, 889, 1166, 1168, 1172, 1177, 1274, 1633, 1717, 1800, 1801, 1806, 1807, 1813, 2016, 2017, 2041, 2043, 2046, 2050, 2051, 2089, 2098, 2104, 2109], "ha": [0, 1, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 19, 21, 23, 24, 28, 29, 30, 32, 34, 35, 36, 37, 40, 44, 46, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 61, 63, 64, 152, 198, 211, 257, 262, 337, 447, 448, 449, 450, 451, 475, 476, 488, 490, 498, 500, 546, 582, 583, 605, 606, 620, 625, 683, 692, 696, 698, 699, 700, 702, 738, 763, 782, 784, 788, 790, 794, 800, 841, 843, 851, 862, 868, 878, 879, 881, 893, 896, 897, 902, 904, 905, 909, 910, 914, 923, 924, 932, 943, 946, 964, 967, 968, 969, 976, 981, 990, 991, 992, 993, 1008, 1011, 1012, 1014, 1019, 1047, 1050, 1051, 1097, 1099, 1113, 1145, 1157, 1166, 1167, 1170, 1171, 1174, 1178, 1210, 1227, 1236, 1237, 1248, 1270, 1273, 1277, 1281, 1284, 1285, 1286, 1289, 1294, 1295, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1325, 1327, 1328, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1340, 1341, 1345, 1354, 1361, 1363, 1368, 1371, 1373, 1374, 1375, 1376, 1378, 1379, 1386, 1408, 1418, 1419, 1420, 1421, 1439, 1440, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1463, 1464, 1470, 1473, 1474, 1475, 1478, 1481, 1486, 1489, 1490, 1491, 1497, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1531, 1532, 1533, 1534, 1535, 1542, 1543, 1559, 1561, 1567, 1575, 1579, 1580, 1598, 1608, 1609, 1610, 1616, 1625, 1628, 1629, 1633, 1644, 1669, 1671, 1672, 1674, 1678, 1685, 1691, 1704, 1706, 1707, 1710, 1711, 1716, 1717, 1731, 1734, 1736, 1739, 1743, 1745, 1748, 1758, 1759, 1766, 1767, 1771, 1773, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1801, 1802, 1803, 1805, 1809, 1811, 1815, 1825, 1827, 1828, 1829, 1830, 1847, 1849, 1859, 1871, 1892, 1906, 1907, 1908, 1913, 1919, 1921, 1922, 1923, 1924, 1927, 1928, 1940, 1946, 1952, 1963, 1972, 1973, 1977, 1982, 1983, 1999, 2014, 2016, 2017, 2021, 2023, 2024, 2030, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2055, 2056, 2057, 2059, 2060, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2076, 2077, 2078, 2079, 2082, 2084, 2085, 2087, 2088, 2089, 2091, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2107, 2109, 2113], "effect": [0, 3, 5, 7, 14, 23, 24, 28, 33, 35, 50, 52, 53, 55, 60, 64, 66, 69, 198, 211, 605, 625, 738, 782, 788, 1043, 1092, 1178, 1273, 1316, 1317, 1318, 1319, 1375, 1431, 1446, 1457, 1458, 1459, 1464, 1465, 1466, 1467, 1471, 1527, 1533, 1580, 1644, 1672, 1704, 1710, 1717, 1718, 1744, 1795, 1803, 1903, 1908, 1928, 1950, 1966, 1977, 2012, 2014, 2021, 2027, 2041, 2042, 2043, 2046, 2049, 2052, 2072, 2075, 2084, 2085, 2087, 2098, 2101], "custom_bwd": [0, 2042], "bwd": [0, 2111], "small": [0, 3, 7, 9, 23, 24, 28, 35, 47, 869, 923, 924, 976, 1022, 1065, 1336, 1337, 1349, 1351, 1431, 1461, 1536, 1541, 1556, 1576, 1580, 1615, 1670, 1677, 1717, 1800, 1806, 1815, 1928, 1929, 2014, 2016, 2017, 2025, 2046, 2049, 2051, 2057, 2060, 2062, 2067, 2070, 2072, 2073, 2082, 2083, 2089, 2098, 2100, 2101, 2103, 2104, 2105, 2107, 2111, 2113, 2115], "magnitud": [0, 993, 1733, 1769, 1878, 2041, 2042, 2046], "represent": [0, 3, 14, 23, 28, 30, 33, 45, 52, 53, 64, 83, 483, 761, 804, 829, 1012, 1125, 1126, 1128, 1152, 1217, 1273, 1313, 1316, 1318, 1321, 1417, 1527, 1533, 1815, 2013, 2014, 2017, 2025, 2036, 2049, 2060, 2065, 2067, 2070, 2071, 2072, 2082, 2089, 2100, 2104, 2113, 2118], "These": [0, 1, 2, 3, 8, 14, 15, 23, 28, 29, 35, 52, 56, 58, 61, 64, 65, 488, 797, 884, 998, 1109, 1174, 1270, 1273, 1316, 1381, 1431, 1527, 1707, 1732, 2013, 2014, 2015, 2016, 2017, 2025, 2030, 2034, 2035, 2037, 2042, 2043, 2046, 2048, 2049, 2057, 2060, 2062, 2067, 2070, 2075, 2077, 2078, 2085, 2091, 2095, 2098, 2100, 2101, 2102, 2104, 2109, 2113, 2114, 2116], "flush": [0, 1, 19, 30, 52, 1065, 1284, 1859, 1872, 2060, 2087], "zero": [0, 1, 2, 24, 28, 29, 32, 33, 35, 50, 53, 55, 60, 64, 66, 73, 75, 76, 77, 152, 260, 262, 317, 488, 515, 517, 546, 547, 586, 587, 589, 590, 626, 682, 694, 701, 715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 747, 752, 753, 754, 755, 756, 757, 758, 759, 762, 763, 767, 771, 772, 775, 776, 777, 778, 780, 783, 784, 798, 802, 822, 823, 824, 825, 828, 833, 881, 890, 891, 892, 897, 904, 906, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 946, 947, 967, 968, 969, 974, 993, 997, 1054, 1065, 1070, 1071, 1100, 1104, 1122, 1123, 1124, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1148, 1157, 1163, 1166, 1174, 1188, 1233, 1234, 1237, 1258, 1270, 1273, 1285, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1341, 1346, 1354, 1355, 1356, 1357, 1358, 1363, 1364, 1413, 1417, 1421, 1435, 1436, 1437, 1438, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1478, 1479, 1481, 1494, 1495, 1496, 1497, 1498, 1499, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1533, 1536, 1539, 1540, 1541, 1542, 1543, 1545, 1565, 1579, 1580, 1583, 1584, 1585, 1600, 1601, 1602, 1611, 1612, 1613, 1615, 1617, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1633, 1654, 1655, 1656, 1670, 1671, 1672, 1685, 1716, 1742, 1743, 1744, 1745, 1760, 1767, 1771, 1775, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1815, 1827, 1829, 1830, 1831, 1840, 1847, 1856, 1866, 1883, 1892, 1896, 1906, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1928, 1943, 1944, 1946, 1952, 2011, 2014, 2015, 2017, 2019, 2030, 2033, 2034, 2035, 2036, 2041, 2043, 2046, 2049, 2057, 2060, 2063, 2067, 2068, 2070, 2071, 2072, 2075, 2077, 2082, 2083, 2085, 2087, 2088, 2089, 2091, 2101, 2103, 2106], "underflow": [0, 1492, 2042], "updat": [0, 9, 11, 23, 24, 30, 32, 37, 47, 52, 55, 58, 59, 64, 88, 490, 515, 517, 763, 802, 803, 827, 900, 901, 930, 943, 1166, 1175, 1273, 1310, 1441, 1442, 1443, 1463, 1469, 1470, 1478, 1489, 1490, 1491, 1527, 1528, 1537, 1567, 1624, 1625, 1724, 1725, 1732, 1737, 1767, 1781, 1787, 1792, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 2012, 2014, 2027, 2042, 2043, 2046, 2048, 2049, 2053, 2057, 2059, 2062, 2063, 2065, 2067, 2068, 2069, 2072, 2077, 2078, 2079, 2087, 2098, 2112], "lost": [0, 37, 48, 51, 1463, 1523, 1524, 1525, 2104], "To": [0, 1, 2, 3, 4, 5, 6, 9, 14, 15, 19, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 39, 45, 46, 48, 50, 52, 53, 55, 60, 64, 81, 84, 85, 88, 337, 501, 559, 737, 899, 918, 932, 942, 973, 976, 1055, 1076, 1097, 1099, 1109, 1137, 1139, 1143, 1154, 1273, 1274, 1276, 1277, 1284, 1285, 1319, 1341, 1346, 1374, 1431, 1440, 1466, 1492, 1523, 1524, 1525, 1527, 1581, 1582, 1606, 1685, 1717, 1733, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1769, 1809, 1817, 1828, 1867, 1871, 1874, 1913, 1929, 2000, 2002, 2012, 2014, 2016, 2017, 2021, 2033, 2034, 2035, 2037, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2058, 2059, 2062, 2067, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2082, 2085, 2086, 2087, 2088, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2111, 2112, 2113, 2115], "prevent": [0, 7, 8, 23, 24, 28, 29, 30, 37, 55, 64, 488, 501, 738, 904, 907, 909, 978, 1011, 1090, 1091, 1110, 1111, 1112, 1270, 1373, 1386, 1418, 1421, 1464, 1533, 1559, 1652, 1691, 1692, 1707, 1717, 1782, 1825, 1904, 1907, 1927, 1954, 1956, 1965, 1982, 2021, 2033, 2042, 2043, 2046, 2048, 2051, 2052, 2057, 2061, 2070, 2071, 2079, 2082, 2083, 2098, 2103, 2104, 2117], "multipli": [0, 28, 315, 323, 425, 515, 688, 689, 690, 691, 692, 693, 694, 763, 782, 788, 789, 790, 944, 956, 1079, 1092, 1109, 1227, 1238, 1297, 1309, 1310, 1314, 1327, 1329, 1331, 1337, 1339, 1368, 1378, 1393, 1412, 1415, 1439, 1454, 1455, 1456, 1459, 1478, 1497, 1580, 1581, 1582, 1644, 1704, 1705, 1706, 1737, 1800, 1806, 1808, 1815, 1871, 1898, 1901, 1905, 1906, 1920, 1924, 1925, 1928, 1950, 2015, 2034, 2046, 2054, 2057, 2060, 2068, 2082, 2083, 2104], "factor": [0, 3, 24, 35, 64, 692, 693, 694, 802, 822, 944, 967, 1316, 1317, 1318, 1321, 1323, 1363, 1364, 1365, 1464, 1487, 1515, 1539, 1540, 1642, 1675, 1676, 1685, 1795, 1796, 1797, 1799, 1800, 1802, 1804, 1805, 1806, 1807, 1808, 1811, 1812, 1813, 1827, 1884, 1906, 1928, 2041, 2042, 2072, 2082, 2104], "flow": [0, 33, 53, 66, 72, 75, 973, 990, 1286, 1289, 1598, 1633, 1717, 2013, 2014, 2041, 2043, 2046, 2049, 2062, 2064, 2067, 2079, 2098, 2099, 2100, 2101, 2104, 2111], "through": [0, 5, 7, 9, 11, 15, 19, 23, 28, 30, 33, 35, 40, 52, 53, 55, 60, 63, 64, 66, 69, 71, 73, 81, 498, 795, 843, 858, 865, 866, 904, 907, 909, 910, 923, 924, 978, 1016, 1054, 1136, 1170, 1178, 1186, 1271, 1274, 1276, 1289, 1290, 1292, 1309, 1310, 1331, 1337, 1354, 1534, 1572, 1573, 1574, 1575, 1635, 1707, 1717, 1724, 1725, 1737, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1827, 1977, 2012, 2013, 2016, 2017, 2021, 2023, 2030, 2033, 2034, 2035, 2043, 2046, 2049, 2051, 2054, 2056, 2057, 2063, 2064, 2065, 2067, 2070, 2072, 2075, 2077, 2078, 2079, 2082, 2086, 2091, 2093, 2094, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2112, 2113, 2118], "word": [0, 1, 8, 28, 47, 48, 53, 58, 63, 64, 960, 1187, 1431, 1454, 1455, 1456, 1469, 1470, 1571, 1624, 1625, 1644, 1704, 1717, 1731, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2016, 2043, 2051, 2052, 2070, 2078, 2101], "have": [0, 1, 3, 5, 6, 7, 8, 9, 11, 12, 14, 17, 23, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 64, 66, 74, 75, 86, 155, 156, 223, 224, 315, 317, 323, 337, 400, 404, 450, 460, 473, 488, 489, 490, 505, 515, 517, 519, 522, 546, 619, 683, 692, 696, 698, 699, 700, 702, 818, 820, 845, 884, 893, 897, 909, 910, 912, 914, 919, 928, 929, 932, 945, 946, 954, 963, 964, 976, 979, 990, 998, 1014, 1023, 1024, 1054, 1065, 1100, 1108, 1115, 1129, 1139, 1144, 1146, 1160, 1166, 1167, 1178, 1188, 1196, 1197, 1198, 1214, 1215, 1231, 1232, 1235, 1236, 1248, 1270, 1272, 1273, 1276, 1277, 1279, 1281, 1283, 1285, 1286, 1287, 1288, 1289, 1290, 1295, 1296, 1305, 1309, 1310, 1326, 1333, 1334, 1337, 1339, 1345, 1354, 1356, 1361, 1368, 1371, 1373, 1374, 1375, 1376, 1378, 1379, 1413, 1418, 1419, 1421, 1423, 1431, 1435, 1439, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1463, 1469, 1470, 1474, 1475, 1480, 1485, 1489, 1490, 1491, 1519, 1527, 1530, 1531, 1532, 1533, 1534, 1575, 1577, 1615, 1624, 1625, 1628, 1629, 1633, 1650, 1669, 1671, 1674, 1709, 1710, 1711, 1714, 1715, 1717, 1718, 1722, 1723, 1724, 1725, 1726, 1728, 1731, 1734, 1737, 1761, 1765, 1770, 1772, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1811, 1825, 1851, 1852, 1866, 1871, 1878, 1901, 1905, 1909, 1910, 1911, 1912, 1914, 1915, 1922, 1923, 1924, 1927, 1928, 1940, 1944, 1950, 1952, 1965, 1968, 1970, 1972, 1973, 1975, 1977, 1979, 1983, 2012, 2014, 2015, 2016, 2017, 2019, 2021, 2023, 2024, 2025, 2027, 2030, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2072, 2073, 2076, 2077, 2078, 2079, 2082, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115], "larger": [0, 9, 28, 35, 64, 256, 501, 924, 947, 1065, 1167, 1328, 1331, 1431, 1463, 1469, 1470, 1519, 1577, 1580, 1624, 1625, 2043, 2046, 2051, 2054, 2056, 2060, 2062, 2082, 2087, 2107, 2110, 2111, 2112, 2117], "thei": [0, 1, 3, 5, 7, 9, 11, 12, 17, 18, 23, 28, 30, 34, 35, 36, 47, 52, 53, 55, 58, 59, 63, 64, 86, 323, 337, 338, 488, 689, 692, 693, 700, 803, 818, 819, 820, 827, 845, 858, 863, 894, 904, 907, 909, 919, 920, 924, 944, 976, 1054, 1069, 1113, 1160, 1166, 1176, 1182, 1194, 1199, 1201, 1262, 1263, 1270, 1273, 1281, 1286, 1295, 1309, 1310, 1316, 1337, 1345, 1367, 1371, 1374, 1376, 1379, 1413, 1436, 1437, 1438, 1457, 1458, 1459, 1473, 1512, 1520, 1521, 1522, 1527, 1533, 1544, 1556, 1579, 1633, 1635, 1707, 1717, 1718, 1722, 1731, 1735, 1758, 1761, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1805, 1808, 1867, 1924, 1928, 1965, 1966, 1971, 2012, 2014, 2016, 2017, 2019, 2021, 2023, 2024, 2027, 2030, 2033, 2034, 2035, 2036, 2037, 2041, 2042, 2043, 2046, 2048, 2049, 2050, 2052, 2055, 2057, 2059, 2060, 2062, 2063, 2065, 2067, 2069, 2070, 2072, 2077, 2082, 2085, 2087, 2088, 2089, 2091, 2092, 2098, 2100, 2101, 2103, 2104, 2105, 2106, 2109, 2113, 2114], "don": [0, 1, 4, 7, 9, 11, 28, 30, 46, 55, 56, 58, 60, 64, 66, 76, 77, 788, 798, 897, 899, 918, 978, 1083, 1166, 1187, 1188, 1194, 1196, 1198, 1202, 1289, 1367, 1489, 1490, 1491, 1580, 1704, 1707, 1717, 1773, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1870, 2012, 2014, 2018, 2033, 2034, 2035, 2043, 2046, 2049, 2051, 2057, 2059, 2063, 2067, 2069, 2070, 2072, 2077, 2078, 2082, 2098, 2100, 2102, 2103, 2104, 2105, 2111, 2114], "t": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 19, 23, 24, 28, 30, 35, 36, 37, 40, 44, 45, 46, 47, 52, 53, 55, 56, 58, 60, 61, 63, 64, 66, 76, 77, 83, 86, 152, 315, 317, 323, 460, 488, 525, 539, 573, 690, 691, 700, 763, 784, 788, 798, 826, 829, 881, 882, 883, 884, 893, 895, 897, 899, 904, 905, 907, 909, 910, 913, 918, 930, 967, 968, 969, 974, 978, 990, 998, 1009, 1012, 1033, 1051, 1052, 1083, 1107, 1125, 1130, 1131, 1132, 1133, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1148, 1160, 1161, 1162, 1163, 1166, 1167, 1168, 1178, 1187, 1188, 1190, 1194, 1196, 1198, 1201, 1202, 1212, 1214, 1227, 1237, 1270, 1271, 1273, 1274, 1276, 1280, 1281, 1285, 1287, 1289, 1292, 1294, 1303, 1304, 1310, 1312, 1313, 1316, 1323, 1331, 1332, 1337, 1345, 1346, 1354, 1367, 1380, 1381, 1396, 1410, 1420, 1440, 1444, 1446, 1454, 1455, 1456, 1463, 1469, 1470, 1478, 1487, 1489, 1490, 1491, 1492, 1497, 1514, 1527, 1543, 1559, 1561, 1571, 1579, 1580, 1604, 1608, 1609, 1610, 1617, 1624, 1625, 1645, 1650, 1670, 1691, 1704, 1707, 1710, 1711, 1717, 1718, 1731, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1759, 1761, 1762, 1773, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1801, 1817, 1823, 1827, 1844, 1867, 1869, 1870, 1878, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1901, 1905, 1924, 1928, 1934, 1940, 1943, 1949, 1952, 1977, 1978, 1990, 2012, 2014, 2015, 2016, 2018, 2021, 2029, 2030, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2049, 2050, 2052, 2054, 2056, 2057, 2059, 2062, 2063, 2067, 2068, 2069, 2070, 2072, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2098, 2100, 2102, 2103, 2105, 2108, 2111, 2113, 2114], "grad": [0, 1, 5, 28, 35, 56, 59, 60, 64, 152, 337, 460, 489, 490, 497, 498, 505, 506, 583, 683, 884, 893, 897, 899, 900, 901, 904, 908, 909, 910, 912, 914, 919, 920, 923, 924, 928, 929, 1054, 1113, 1166, 1167, 1169, 1173, 1177, 1178, 1256, 1273, 1346, 1527, 1717, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1905, 1965, 1977, 2013, 2014, 2015, 2021, 2034, 2035, 2036, 2042, 2048, 2049, 2050, 2054, 2057, 2059, 2068, 2077, 2078, 2082, 2088, 2109, 2111], "unscal": 0, "doe": [0, 1, 3, 4, 5, 7, 8, 9, 14, 17, 19, 27, 28, 32, 35, 37, 40, 44, 46, 47, 50, 52, 53, 55, 58, 59, 60, 63, 64, 66, 256, 260, 337, 437, 460, 585, 619, 683, 698, 699, 751, 760, 763, 793, 797, 865, 884, 920, 921, 943, 956, 978, 1045, 1049, 1100, 1109, 1113, 1163, 1166, 1178, 1184, 1188, 1248, 1272, 1274, 1277, 1284, 1289, 1299, 1303, 1304, 1314, 1316, 1317, 1321, 1322, 1329, 1334, 1335, 1337, 1343, 1346, 1363, 1367, 1368, 1374, 1378, 1382, 1415, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1466, 1469, 1470, 1473, 1478, 1479, 1489, 1490, 1491, 1497, 1498, 1500, 1501, 1502, 1509, 1510, 1511, 1521, 1522, 1528, 1534, 1537, 1543, 1545, 1567, 1579, 1616, 1669, 1717, 1718, 1724, 1725, 1734, 1737, 1741, 1767, 1770, 1772, 1777, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1812, 1816, 1821, 1863, 1866, 1867, 1871, 1896, 1901, 1928, 1949, 1952, 1965, 1966, 1977, 1996, 2011, 2012, 2013, 2014, 2016, 2017, 2019, 2021, 2027, 2033, 2034, 2035, 2036, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2057, 2059, 2060, 2061, 2064, 2065, 2067, 2070, 2072, 2077, 2079, 2081, 2082, 2084, 2085, 2088, 2089, 2098, 2100, 2101, 2113, 2115], "interfer": [0, 2030, 2046, 2067, 2104], "learn": [0, 7, 8, 15, 33, 35, 46, 52, 55, 64, 88, 1444, 1460, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1486, 1499, 1512, 1514, 1535, 1557, 1571, 1573, 1575, 1576, 1577, 1688, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2013, 2021, 2037, 2041, 2046, 2055, 2057, 2058, 2064, 2070, 2072, 2077, 2079, 2094, 2097, 2099, 2100, 2101, 2102], "rate": [0, 2, 8, 24, 35, 55, 1465, 1466, 1467, 1471, 1574, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1820, 2013, 2056, 2087, 2104, 2110], "fp16": [0, 2, 732, 1717, 1724, 1725, 2053, 2072, 2073], "everi": [0, 1, 2, 8, 9, 19, 23, 24, 28, 30, 32, 35, 37, 53, 55, 60, 64, 483, 489, 612, 683, 784, 822, 904, 905, 909, 928, 929, 932, 976, 1092, 1109, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1167, 1227, 1236, 1273, 1305, 1319, 1329, 1332, 1363, 1435, 1464, 1465, 1466, 1467, 1471, 1520, 1527, 1561, 1563, 1567, 1619, 1620, 1621, 1626, 1643, 1658, 1659, 1660, 1674, 1692, 1709, 1710, 1711, 1714, 1715, 1717, 1732, 1766, 1769, 1770, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1867, 1928, 1962, 2017, 2023, 2030, 2042, 2043, 2046, 2048, 2049, 2050, 2052, 2054, 2058, 2067, 2069, 2070, 2071, 2072, 2073, 2077, 2078, 2079, 2082, 2084, 2085, 2087, 2089, 2100, 2101, 2103, 2110, 2111, 2112, 2113], "most": [0, 1, 3, 4, 7, 8, 15, 23, 28, 30, 32, 35, 36, 37, 39, 46, 47, 50, 51, 52, 55, 60, 61, 63, 64, 66, 488, 501, 796, 904, 905, 906, 907, 909, 910, 915, 919, 923, 936, 976, 1078, 1092, 1109, 1167, 1258, 1271, 1274, 1379, 1431, 1575, 1633, 1685, 1717, 1724, 1725, 1737, 1792, 1871, 1903, 1950, 2012, 2014, 2016, 2017, 2019, 2024, 2025, 2033, 2035, 2036, 2043, 2046, 2049, 2052, 2059, 2060, 2061, 2065, 2069, 2071, 2072, 2078, 2079, 2082, 2085, 2087, 2089, 2094, 2097, 2100, 2102, 2103, 2104, 2105, 2106, 2107, 2112, 2113], "bf16": [0, 2, 2053], "pretrain": [0, 30, 866, 1469, 1470, 2012, 2043, 2067], "cannot": [0, 3, 8, 9, 12, 23, 24, 28, 30, 33, 35, 36, 40, 47, 52, 55, 56, 60, 61, 63, 64, 224, 256, 526, 547, 884, 978, 990, 1130, 1132, 1140, 1141, 1142, 1148, 1161, 1166, 1197, 1270, 1278, 1413, 1446, 1469, 1580, 1617, 1685, 1717, 1778, 1929, 2012, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2025, 2030, 2035, 2036, 2043, 2048, 2049, 2062, 2063, 2065, 2067, 2072, 2077, 2079, 2082, 2084, 2085, 2087, 2098, 2101, 2102, 2104, 2105, 2114], "numer": [0, 11, 23, 25, 35, 53, 56, 61, 690, 796, 923, 924, 960, 1277, 1289, 1290, 1305, 1309, 1310, 1314, 1319, 1327, 1328, 1331, 1334, 1337, 1339, 1361, 1363, 1440, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1542, 1561, 1564, 1567, 1576, 1624, 1645, 1652, 1685, 1691, 1693, 1716, 1732, 1766, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1831, 1863, 1883, 1928, 2013, 2017, 2018, 2022, 2027, 2046, 2049, 2050, 2067, 2072, 2083, 2087, 2088, 2104, 2113, 2118], "max": [0, 19, 23, 28, 37, 40, 46, 47, 50, 52, 55, 64, 66, 76, 77, 118, 187, 188, 189, 190, 302, 698, 699, 700, 759, 761, 773, 774, 783, 785, 786, 805, 822, 823, 824, 825, 828, 878, 932, 947, 964, 971, 972, 976, 998, 1088, 1123, 1124, 1198, 1234, 1285, 1305, 1319, 1326, 1328, 1330, 1331, 1343, 1432, 1433, 1434, 1445, 1446, 1460, 1461, 1470, 1474, 1475, 1480, 1485, 1486, 1494, 1495, 1496, 1513, 1515, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1530, 1531, 1532, 1535, 1547, 1548, 1555, 1576, 1577, 1579, 1595, 1596, 1597, 1607, 1615, 1625, 1628, 1629, 1644, 1648, 1658, 1659, 1660, 1670, 1674, 1678, 1680, 1686, 1704, 1722, 1761, 1784, 1785, 1786, 1796, 1801, 1802, 1811, 1832, 1833, 1905, 1922, 1923, 1935, 1965, 1972, 1973, 2014, 2015, 2024, 2030, 2036, 2044, 2046, 2051, 2053, 2068, 2072, 2075, 2095, 2100, 2103, 2107, 2108, 2118], "65504": 0, "overflow": [0, 1090, 1091, 1373, 1418, 1421, 1652, 1691, 1692, 1825, 1856, 1904, 1907, 1927, 1954, 1956, 2046, 2060, 2083], "case": [0, 1, 3, 4, 8, 9, 11, 14, 15, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 39, 40, 47, 48, 50, 51, 52, 53, 55, 56, 58, 59, 60, 61, 63, 64, 66, 86, 152, 156, 198, 354, 488, 498, 501, 683, 763, 787, 794, 797, 823, 824, 826, 829, 862, 869, 897, 909, 910, 912, 914, 918, 939, 943, 945, 947, 952, 978, 993, 1008, 1033, 1055, 1056, 1076, 1077, 1078, 1109, 1130, 1132, 1140, 1141, 1142, 1157, 1168, 1172, 1177, 1188, 1198, 1201, 1233, 1258, 1271, 1274, 1283, 1286, 1287, 1289, 1303, 1305, 1309, 1310, 1314, 1319, 1320, 1321, 1323, 1329, 1332, 1337, 1339, 1343, 1345, 1346, 1351, 1354, 1363, 1374, 1413, 1431, 1436, 1437, 1438, 1439, 1440, 1454, 1455, 1456, 1458, 1459, 1460, 1462, 1465, 1466, 1467, 1468, 1470, 1471, 1473, 1482, 1483, 1484, 1485, 1486, 1487, 1493, 1494, 1495, 1496, 1513, 1518, 1520, 1521, 1522, 1533, 1534, 1535, 1546, 1559, 1565, 1570, 1575, 1577, 1579, 1598, 1616, 1625, 1633, 1637, 1638, 1669, 1671, 1678, 1717, 1719, 1720, 1724, 1725, 1731, 1734, 1737, 1738, 1771, 1772, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1809, 1811, 1847, 1850, 1863, 1865, 1878, 1881, 1913, 1928, 1929, 1960, 1961, 1965, 1980, 1990, 2000, 2001, 2002, 2003, 2012, 2017, 2022, 2025, 2033, 2034, 2035, 2041, 2042, 2043, 2044, 2045, 2046, 2049, 2050, 2051, 2052, 2054, 2057, 2059, 2060, 2062, 2067, 2069, 2070, 2072, 2073, 2075, 2076, 2077, 2078, 2079, 2082, 2083, 2084, 2086, 2087, 2088, 2089, 2094, 2100, 2101, 2103, 2104, 2105, 2109, 2111, 2112, 2113, 2114, 2118], "decreas": [0, 35, 1065, 1319, 1465, 1466, 1467, 1471, 1540, 1759, 1760, 1796, 1803, 1811, 2023, 2027, 2059, 2061, 2082, 2085], "attempt": [0, 1, 8, 14, 19, 28, 30, 45, 47, 48, 60, 87, 970, 976, 979, 994, 1277, 1284, 1363, 1685, 1719, 1720, 1784, 1785, 1797, 1965, 1968, 2014, 2017, 2033, 2034, 2035, 2042, 2046, 2049, 2063, 2064, 2069, 2070, 2077, 2100, 2103, 2104, 2113], "bring": [0, 56, 64, 1130, 1598, 1633, 2049, 2107, 2110], "number": [0, 1, 2, 3, 4, 5, 7, 14, 19, 23, 24, 28, 30, 32, 33, 35, 37, 45, 46, 47, 51, 52, 56, 58, 61, 64, 66, 71, 87, 90, 156, 175, 220, 234, 256, 315, 354, 379, 400, 404, 437, 448, 473, 475, 476, 483, 495, 499, 501, 515, 517, 519, 545, 547, 548, 560, 585, 586, 587, 589, 590, 591, 610, 619, 688, 689, 690, 691, 692, 693, 694, 701, 761, 763, 771, 772, 775, 776, 777, 784, 822, 869, 881, 894, 898, 909, 923, 939, 941, 944, 946, 947, 952, 956, 958, 962, 970, 971, 975, 993, 997, 998, 1003, 1014, 1016, 1022, 1031, 1040, 1041, 1052, 1054, 1055, 1056, 1065, 1076, 1077, 1080, 1081, 1101, 1104, 1106, 1109, 1110, 1114, 1122, 1152, 1154, 1157, 1161, 1163, 1165, 1182, 1214, 1216, 1224, 1225, 1226, 1230, 1234, 1235, 1236, 1248, 1249, 1270, 1271, 1273, 1287, 1294, 1297, 1298, 1305, 1319, 1328, 1337, 1341, 1346, 1351, 1362, 1366, 1374, 1383, 1387, 1388, 1392, 1394, 1405, 1412, 1413, 1417, 1421, 1424, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1444, 1445, 1446, 1447, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1468, 1469, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1492, 1493, 1497, 1498, 1503, 1504, 1505, 1506, 1507, 1508, 1513, 1514, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1526, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1541, 1543, 1545, 1546, 1547, 1548, 1555, 1557, 1558, 1559, 1560, 1561, 1563, 1564, 1565, 1566, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1600, 1601, 1602, 1604, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1624, 1625, 1628, 1629, 1634, 1644, 1645, 1647, 1650, 1658, 1659, 1660, 1669, 1671, 1677, 1678, 1685, 1707, 1717, 1732, 1737, 1742, 1743, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1758, 1759, 1760, 1762, 1766, 1771, 1772, 1773, 1775, 1776, 1779, 1787, 1800, 1801, 1802, 1803, 1806, 1807, 1809, 1810, 1811, 1817, 1820, 1824, 1834, 1836, 1837, 1838, 1840, 1841, 1842, 1847, 1849, 1850, 1853, 1854, 1855, 1856, 1864, 1868, 1871, 1872, 1873, 1874, 1875, 1876, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1901, 1906, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1920, 1921, 1922, 1923, 1924, 1925, 1929, 1944, 1945, 1946, 1954, 1956, 1961, 1962, 1967, 1968, 1971, 1972, 1973, 1974, 1975, 1976, 1983, 1988, 1994, 1995, 2000, 2001, 2002, 2003, 2005, 2006, 2010, 2013, 2015, 2016, 2017, 2018, 2019, 2021, 2024, 2030, 2033, 2036, 2041, 2044, 2046, 2049, 2054, 2057, 2059, 2060, 2062, 2067, 2069, 2070, 2071, 2073, 2076, 2077, 2078, 2081, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2090, 2098, 2100, 2103, 2104, 2110, 2113, 2115, 2116, 2117, 2118], "expect": [0, 1, 3, 5, 7, 9, 12, 23, 24, 28, 30, 32, 33, 37, 45, 47, 50, 52, 53, 55, 60, 61, 64, 82, 417, 488, 683, 700, 763, 858, 912, 913, 914, 915, 916, 917, 1054, 1130, 1132, 1149, 1150, 1151, 1171, 1172, 1188, 1198, 1248, 1270, 1273, 1289, 1290, 1318, 1339, 1340, 1374, 1442, 1443, 1462, 1478, 1479, 1480, 1481, 1490, 1491, 1492, 1497, 1498, 1499, 1509, 1510, 1511, 1527, 1533, 1534, 1542, 1543, 1545, 1561, 1567, 1571, 1573, 1575, 1580, 1624, 1630, 1644, 1669, 1677, 1678, 1691, 1704, 1705, 1706, 1707, 1716, 1717, 1759, 1812, 1938, 1975, 2012, 2013, 2019, 2021, 2025, 2043, 2048, 2051, 2052, 2054, 2057, 2067, 2069, 2070, 2072, 2073, 2074, 2077, 2082, 2087, 2089, 2099, 2101, 2103, 2104, 2107, 2109, 2111], "alwai": [0, 5, 7, 14, 17, 19, 23, 24, 28, 35, 45, 50, 52, 53, 55, 59, 64, 342, 417, 450, 460, 797, 884, 904, 906, 909, 912, 918, 945, 954, 964, 970, 976, 1001, 1003, 1054, 1065, 1096, 1104, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1140, 1141, 1142, 1143, 1145, 1148, 1152, 1182, 1201, 1231, 1232, 1273, 1284, 1289, 1304, 1309, 1310, 1311, 1312, 1314, 1315, 1327, 1330, 1331, 1332, 1333, 1337, 1339, 1419, 1439, 1441, 1442, 1443, 1463, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1564, 1567, 1685, 1707, 1717, 1758, 1811, 1851, 1852, 1877, 1924, 1928, 1935, 1961, 1965, 2017, 2018, 2025, 2029, 2033, 2036, 2043, 2044, 2045, 2046, 2048, 2049, 2052, 2054, 2056, 2057, 2070, 2076, 2077, 2079, 2082, 2085, 2088, 2089, 2098, 2100, 2103, 2111, 2120], "abov": [0, 1, 3, 12, 15, 28, 30, 33, 34, 35, 40, 47, 50, 52, 53, 55, 56, 61, 64, 66, 68, 619, 683, 795, 796, 884, 945, 954, 1065, 1092, 1096, 1097, 1098, 1099, 1100, 1109, 1217, 1231, 1232, 1273, 1274, 1294, 1305, 1309, 1310, 1328, 1330, 1331, 1334, 1337, 1340, 1343, 1346, 1436, 1437, 1438, 1439, 1440, 1473, 1527, 1556, 1564, 1579, 1685, 1707, 1773, 1803, 1827, 1838, 1839, 1871, 1875, 1888, 1908, 1950, 1953, 1954, 1955, 1956, 2012, 2014, 2016, 2017, 2021, 2024, 2043, 2044, 2045, 2046, 2049, 2050, 2054, 2057, 2059, 2061, 2062, 2065, 2067, 2070, 2072, 2077, 2078, 2079, 2082, 2083, 2089, 2094, 2099, 2100, 2101, 2102, 2104, 2105, 2106, 2107, 2113], "our": [0, 3, 7, 8, 11, 33, 43, 46, 47, 48, 51, 59, 60, 61, 64, 65, 904, 905, 909, 1129, 1167, 1181, 1188, 1439, 1724, 1725, 1784, 1785, 1871, 2021, 2043, 2049, 2052, 2054, 2059, 2064, 2067, 2069, 2072, 2078, 2082, 2094, 2098, 2100, 2101, 2103, 2104, 2106, 2107, 2112, 2113], "NOT": [0, 23, 28, 37, 47, 48, 50, 52, 64, 950, 1009, 1043, 1273, 1356, 1466, 1717, 1718, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1770, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 2043, 2065, 2079, 2082, 2084, 2111], "make": [0, 1, 2, 3, 4, 5, 8, 14, 15, 20, 23, 24, 25, 28, 30, 31, 33, 35, 37, 39, 44, 47, 48, 50, 51, 52, 59, 60, 64, 65, 66, 77, 141, 224, 488, 498, 782, 788, 865, 866, 897, 900, 967, 978, 1011, 1012, 1014, 1097, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1149, 1150, 1151, 1182, 1187, 1188, 1273, 1277, 1283, 1284, 1303, 1304, 1316, 1317, 1318, 1346, 1386, 1439, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1487, 1527, 1537, 1575, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1633, 1635, 1644, 1685, 1704, 1717, 1731, 1748, 1765, 1805, 1868, 1900, 1910, 1911, 1912, 1914, 1915, 1947, 1965, 1968, 1970, 1982, 1983, 2012, 2014, 2016, 2017, 2021, 2025, 2030, 2033, 2034, 2036, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2053, 2054, 2057, 2059, 2061, 2062, 2063, 2067, 2069, 2070, 2071, 2072, 2073, 2077, 2078, 2079, 2082, 2084, 2085, 2087, 2092, 2094, 2095, 2100, 2102, 2103, 2104, 2106, 2107, 2109, 2112, 2113, 2114], "guarante": [0, 1, 5, 9, 23, 28, 30, 32, 35, 47, 50, 52, 53, 60, 63, 64, 66, 488, 880, 976, 1186, 1188, 1198, 1273, 1283, 1309, 1311, 1463, 1527, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1900, 1902, 1965, 2043, 2045, 2046, 2060, 2061, 2070, 2071, 2073, 2077, 2079, 2100], "encount": [0, 5, 19, 28, 52, 60, 63, 64, 683, 978, 1717, 1724, 1725, 2014, 2017, 2019, 2023, 2045, 2060, 2070, 2072, 2099, 2101, 2104, 2109, 2111, 2113, 2116], "nan": [0, 1, 27, 35, 429, 430, 501, 687, 689, 692, 693, 694, 697, 700, 701, 885, 889, 944, 960, 1110, 1111, 1112, 1155, 1156, 1157, 1234, 1262, 1263, 1265, 1266, 1313, 1320, 1333, 1336, 1349, 1354, 1372, 1373, 1377, 1417, 1418, 1419, 1420, 1421, 1633, 1722, 1821, 1858, 1863, 1917, 1952, 2015, 2024, 2042, 2043, 2050, 2060, 2083, 2089, 2117], "verifi": [0, 28, 52, 64, 89, 923, 1092, 1779, 1780, 1798, 1950, 2012, 2014, 2028, 2049, 2054, 2067, 2106], "compat": [0, 1, 14, 23, 28, 30, 34, 35, 37, 48, 52, 55, 59, 60, 64, 66, 499, 500, 605, 619, 683, 818, 819, 820, 894, 909, 910, 942, 957, 991, 992, 1186, 1273, 1278, 1291, 1304, 1315, 1328, 1331, 1527, 1533, 1571, 1572, 1573, 1574, 1575, 1644, 1724, 1725, 1769, 1778, 1850, 2012, 2013, 2016, 2017, 2023, 2033, 2034, 2042, 2049, 2053, 2062, 2065, 2070, 2071, 2072, 2075, 2077, 2082, 2084, 2088, 2091, 2095], "init_scal": 0, "65536": 0, "0": [0, 1, 3, 11, 12, 14, 18, 19, 20, 23, 24, 25, 28, 29, 30, 32, 33, 34, 35, 36, 37, 39, 40, 41, 45, 47, 48, 50, 52, 53, 55, 58, 59, 60, 61, 63, 64, 66, 67, 68, 69, 71, 72, 73, 74, 75, 76, 77, 78, 80, 156, 158, 175, 186, 193, 210, 226, 227, 228, 229, 230, 235, 256, 260, 262, 265, 288, 291, 300, 302, 313, 315, 317, 319, 323, 354, 403, 404, 429, 430, 447, 450, 451, 456, 483, 485, 489, 490, 498, 509, 510, 515, 517, 519, 522, 540, 545, 546, 549, 558, 560, 562, 580, 582, 583, 585, 586, 587, 589, 590, 591, 597, 598, 599, 600, 607, 609, 610, 619, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 702, 715, 716, 717, 718, 719, 720, 721, 722, 726, 727, 728, 729, 730, 733, 734, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 751, 754, 755, 756, 758, 759, 760, 761, 763, 771, 772, 773, 775, 776, 777, 778, 781, 783, 785, 786, 788, 797, 799, 802, 805, 820, 823, 824, 825, 828, 858, 860, 869, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 898, 904, 906, 909, 910, 912, 913, 914, 916, 923, 924, 926, 928, 929, 936, 939, 941, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 957, 958, 960, 963, 964, 965, 966, 967, 968, 970, 971, 974, 988, 989, 990, 993, 994, 995, 996, 997, 998, 1008, 1014, 1023, 1025, 1051, 1052, 1079, 1083, 1088, 1089, 1090, 1091, 1092, 1093, 1096, 1097, 1098, 1099, 1100, 1101, 1103, 1104, 1106, 1107, 1109, 1111, 1112, 1119, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1143, 1144, 1145, 1146, 1148, 1149, 1150, 1151, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1166, 1168, 1169, 1170, 1171, 1172, 1176, 1177, 1178, 1188, 1192, 1198, 1214, 1215, 1222, 1227, 1231, 1233, 1234, 1235, 1236, 1237, 1240, 1244, 1248, 1250, 1258, 1269, 1270, 1271, 1273, 1277, 1279, 1280, 1281, 1285, 1293, 1294, 1295, 1296, 1297, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1315, 1316, 1317, 1318, 1319, 1320, 1325, 1326, 1327, 1328, 1330, 1331, 1332, 1333, 1335, 1340, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1353, 1354, 1355, 1356, 1357, 1358, 1360, 1363, 1367, 1371, 1372, 1373, 1374, 1376, 1377, 1378, 1379, 1380, 1381, 1393, 1396, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1425, 1427, 1429, 1430, 1431, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1448, 1449, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1478, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1513, 1515, 1517, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1541, 1542, 1543, 1544, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1555, 1559, 1561, 1562, 1563, 1564, 1565, 1567, 1570, 1571, 1573, 1575, 1576, 1577, 1579, 1580, 1583, 1584, 1585, 1588, 1589, 1590, 1598, 1599, 1600, 1601, 1602, 1603, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1624, 1625, 1626, 1627, 1628, 1629, 1631, 1633, 1636, 1637, 1638, 1641, 1642, 1643, 1644, 1648, 1649, 1651, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1669, 1670, 1671, 1672, 1673, 1674, 1677, 1678, 1680, 1685, 1686, 1689, 1691, 1694, 1700, 1701, 1702, 1703, 1704, 1707, 1716, 1717, 1721, 1722, 1724, 1725, 1731, 1732, 1733, 1737, 1742, 1743, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1753, 1754, 1755, 1758, 1759, 1761, 1762, 1764, 1765, 1766, 1767, 1769, 1771, 1772, 1773, 1777, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1818, 1820, 1821, 1823, 1824, 1825, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1845, 1846, 1847, 1848, 1849, 1850, 1853, 1854, 1855, 1856, 1858, 1859, 1863, 1865, 1866, 1867, 1870, 1872, 1875, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1895, 1896, 1900, 1902, 1903, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1921, 1922, 1923, 1924, 1925, 1927, 1928, 1929, 1930, 1931, 1938, 1939, 1941, 1942, 1943, 1944, 1945, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1958, 1959, 1961, 1962, 1963, 1964, 1967, 1971, 1972, 1973, 1975, 1976, 1977, 1978, 1980, 1983, 2010, 2011, 2012, 2014, 2015, 2016, 2017, 2021, 2024, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2077, 2078, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2092, 2094, 2095, 2098, 2099, 2100, 2102, 2104, 2106, 2107, 2108, 2111, 2112, 2114, 2117, 2118], "growth_factor": 0, "backoff_factor": 0, "5": [0, 1, 10, 11, 12, 14, 18, 21, 23, 24, 28, 33, 35, 45, 52, 59, 60, 61, 63, 64, 66, 67, 71, 72, 74, 75, 76, 77, 156, 193, 210, 235, 262, 291, 300, 315, 317, 319, 323, 403, 404, 447, 473, 501, 515, 517, 519, 525, 539, 546, 560, 562, 586, 587, 588, 589, 590, 609, 688, 689, 693, 700, 737, 742, 743, 744, 745, 746, 748, 749, 763, 765, 776, 777, 865, 866, 869, 884, 890, 891, 892, 912, 915, 917, 944, 947, 954, 955, 956, 960, 962, 966, 968, 969, 970, 971, 974, 981, 993, 997, 1088, 1090, 1092, 1099, 1101, 1103, 1104, 1107, 1108, 1109, 1112, 1125, 1127, 1129, 1130, 1136, 1137, 1140, 1145, 1148, 1149, 1152, 1155, 1157, 1158, 1159, 1161, 1168, 1170, 1171, 1172, 1173, 1176, 1177, 1178, 1215, 1227, 1233, 1235, 1237, 1239, 1240, 1250, 1258, 1262, 1280, 1284, 1285, 1294, 1295, 1296, 1299, 1302, 1303, 1320, 1323, 1326, 1329, 1330, 1331, 1332, 1337, 1338, 1341, 1343, 1344, 1346, 1347, 1348, 1349, 1350, 1356, 1360, 1367, 1368, 1374, 1375, 1413, 1420, 1422, 1423, 1425, 1428, 1429, 1430, 1432, 1433, 1434, 1435, 1436, 1440, 1441, 1442, 1443, 1447, 1448, 1449, 1451, 1452, 1453, 1455, 1456, 1458, 1459, 1460, 1462, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1478, 1480, 1481, 1482, 1487, 1489, 1490, 1491, 1492, 1493, 1497, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1515, 1518, 1523, 1524, 1527, 1534, 1537, 1541, 1542, 1543, 1549, 1550, 1551, 1552, 1553, 1556, 1559, 1565, 1567, 1571, 1573, 1575, 1577, 1578, 1579, 1580, 1581, 1582, 1599, 1600, 1608, 1609, 1611, 1612, 1616, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1628, 1629, 1631, 1633, 1636, 1644, 1669, 1671, 1677, 1685, 1694, 1704, 1705, 1706, 1716, 1717, 1737, 1747, 1750, 1752, 1753, 1755, 1760, 1761, 1763, 1765, 1771, 1772, 1773, 1775, 1776, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1800, 1806, 1810, 1816, 1818, 1820, 1821, 1823, 1824, 1827, 1828, 1831, 1832, 1833, 1834, 1838, 1843, 1844, 1847, 1848, 1854, 1855, 1856, 1863, 1875, 1882, 1883, 1884, 1885, 1886, 1887, 1890, 1891, 1900, 1906, 1908, 1909, 1910, 1911, 1913, 1916, 1924, 1927, 1928, 1929, 1930, 1931, 1939, 1941, 1943, 1944, 1945, 1947, 1948, 1950, 1959, 1960, 1963, 1965, 1971, 1977, 1978, 1979, 2010, 2014, 2015, 2016, 2017, 2018, 2024, 2025, 2035, 2036, 2041, 2042, 2043, 2044, 2046, 2049, 2051, 2053, 2054, 2057, 2058, 2062, 2063, 2065, 2067, 2069, 2072, 2077, 2082, 2083, 2085, 2087, 2088, 2089, 2101, 2102, 2104, 2111, 2112, 2113], "growth_interv": 0, "2000": [0, 28, 32, 1127, 1129, 1136, 1145, 1158, 1351, 1580, 1803, 1831, 1880, 1881, 1943], "float64": [0, 11, 242, 448, 451, 582, 884, 987, 1152, 1161, 1162, 1220, 1255, 1273, 1303, 1309, 1310, 1312, 1314, 1330, 1527, 1685, 1783, 1784, 1785, 1797, 1821, 1829, 1868, 1869, 1872, 1902, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 1975, 1980, 2057, 2060, 2082, 2084, 2085, 2088, 2089, 2118], "out": [0, 1, 2, 3, 7, 8, 9, 11, 15, 18, 19, 23, 25, 28, 30, 35, 44, 45, 48, 52, 56, 59, 64, 66, 70, 72, 235, 314, 316, 318, 320, 401, 403, 450, 498, 514, 516, 518, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 698, 699, 700, 701, 702, 759, 763, 767, 775, 776, 777, 782, 784, 788, 800, 839, 841, 842, 861, 862, 869, 870, 871, 872, 873, 874, 875, 876, 881, 885, 886, 887, 888, 889, 899, 902, 903, 904, 907, 909, 919, 923, 932, 943, 944, 946, 947, 948, 949, 950, 951, 952, 953, 956, 960, 963, 965, 966, 967, 968, 969, 971, 972, 974, 976, 987, 988, 989, 992, 993, 995, 996, 1008, 1013, 1021, 1023, 1025, 1052, 1053, 1065, 1066, 1079, 1083, 1088, 1089, 1090, 1091, 1093, 1096, 1101, 1102, 1104, 1105, 1106, 1108, 1109, 1110, 1114, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1163, 1164, 1167, 1168, 1170, 1171, 1173, 1178, 1197, 1210, 1214, 1215, 1216, 1217, 1218, 1228, 1229, 1230, 1233, 1234, 1235, 1236, 1238, 1239, 1240, 1241, 1242, 1243, 1245, 1246, 1247, 1248, 1250, 1251, 1267, 1268, 1273, 1279, 1285, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1320, 1321, 1322, 1323, 1324, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1342, 1343, 1344, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1367, 1368, 1370, 1371, 1372, 1373, 1374, 1376, 1377, 1378, 1379, 1393, 1396, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1423, 1424, 1425, 1426, 1427, 1428, 1432, 1433, 1434, 1436, 1437, 1438, 1444, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1464, 1465, 1466, 1467, 1469, 1471, 1474, 1475, 1478, 1479, 1494, 1495, 1496, 1497, 1512, 1514, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1539, 1540, 1543, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1571, 1572, 1573, 1574, 1575, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1587, 1604, 1608, 1609, 1610, 1611, 1612, 1613, 1619, 1620, 1621, 1626, 1629, 1633, 1644, 1650, 1670, 1672, 1704, 1717, 1724, 1725, 1742, 1743, 1744, 1745, 1771, 1772, 1773, 1774, 1776, 1777, 1798, 1815, 1816, 1820, 1821, 1822, 1824, 1827, 1828, 1834, 1835, 1836, 1838, 1840, 1842, 1843, 1846, 1847, 1848, 1852, 1856, 1857, 1858, 1860, 1861, 1862, 1863, 1878, 1879, 1880, 1892, 1893, 1894, 1895, 1900, 1905, 1906, 1917, 1918, 1920, 1921, 1922, 1923, 1925, 1926, 1928, 1940, 1941, 1942, 1945, 1947, 1949, 1952, 1953, 1955, 1957, 1958, 1965, 1972, 1973, 1974, 1977, 1979, 1980, 1981, 2010, 2011, 2012, 2013, 2014, 2015, 2017, 2021, 2024, 2025, 2030, 2032, 2033, 2035, 2036, 2043, 2044, 2046, 2048, 2049, 2050, 2052, 2057, 2059, 2065, 2067, 2069, 2070, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2083, 2085, 2087, 2088, 2093, 2100, 2101, 2102, 2103, 2104, 2106, 2107, 2108, 2111, 2112, 2113, 2114, 2115, 2117], "place": [0, 3, 7, 11, 12, 19, 23, 24, 28, 30, 36, 37, 52, 55, 58, 59, 62, 64, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 123, 125, 127, 129, 132, 133, 135, 143, 145, 148, 149, 151, 154, 160, 162, 164, 166, 168, 170, 179, 188, 196, 200, 203, 205, 215, 217, 223, 224, 233, 238, 240, 246, 249, 251, 253, 255, 256, 259, 262, 264, 271, 273, 275, 279, 281, 285, 287, 294, 296, 298, 306, 308, 310, 312, 314, 316, 318, 320, 358, 360, 362, 364, 366, 368, 370, 373, 375, 377, 378, 385, 387, 389, 391, 393, 397, 401, 403, 422, 425, 428, 430, 441, 443, 445, 453, 458, 468, 471, 487, 488, 489, 490, 492, 494, 498, 501, 510, 513, 514, 516, 518, 524, 529, 531, 534, 536, 538, 551, 553, 555, 564, 566, 573, 577, 579, 595, 598, 600, 602, 604, 605, 614, 624, 761, 796, 800, 817, 841, 842, 861, 862, 904, 905, 907, 909, 928, 929, 930, 932, 958, 990, 1021, 1022, 1024, 1025, 1045, 1109, 1160, 1166, 1167, 1175, 1273, 1318, 1336, 1413, 1435, 1445, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1471, 1483, 1484, 1485, 1513, 1527, 1545, 1546, 1547, 1548, 1555, 1570, 1618, 1619, 1620, 1621, 1623, 1624, 1625, 1626, 1637, 1640, 1649, 1681, 1684, 1699, 1703, 1707, 1717, 1722, 1723, 1724, 1725, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1767, 1798, 1854, 1856, 1860, 1861, 1862, 1908, 1945, 2013, 2014, 2016, 2017, 2024, 2035, 2042, 2046, 2049, 2056, 2064, 2065, 2069, 2070, 2077, 2078, 2084, 2086, 2088, 2100, 2101, 2103, 2104, 2110, 2112], "variant": [0, 5, 24, 52, 862, 1304, 1315, 1328, 1331, 1420, 1423, 1587, 1588, 1589, 1590, 1784, 1785, 1786, 1798, 1902, 1952, 2013, 2056, 2075, 2088, 2108, 2109, 2112], "explicitli": [0, 5, 8, 14, 28, 40, 55, 88, 930, 1045, 1097, 1099, 1109, 1236, 1314, 1327, 1331, 1339, 1381, 1644, 1704, 1809, 1871, 1924, 2014, 2016, 2017, 2018, 2023, 2025, 2046, 2049, 2052, 2053, 2054, 2056, 2062, 2067, 2070, 2076, 2077, 2082, 2088, 2104], "suppli": [0, 5, 7, 14, 15, 28, 1043, 1533, 1780, 2017, 2046, 2067, 2082, 2104, 2113], "won": [0, 8, 24, 30, 47, 52, 58, 460, 1188, 1273, 1285, 1527, 1670, 1710, 1711, 2012, 2043, 2049, 2077, 2091, 2103, 2104, 2111, 2113], "go": [0, 1, 7, 15, 23, 28, 30, 33, 44, 50, 52, 59, 64, 501, 502, 562, 904, 906, 909, 928, 932, 1194, 1346, 1436, 1437, 1438, 1520, 1521, 1522, 1724, 1725, 2016, 2017, 2024, 2033, 2034, 2035, 2043, 2045, 2046, 2049, 2050, 2056, 2057, 2059, 2063, 2067, 2070, 2085, 2087, 2099, 2100, 2101, 2102, 2103, 2104], "addmm": [0, 52, 53, 108, 1920, 2015, 2034, 2060, 2068, 2082, 2108, 2112], "b": [0, 1, 3, 11, 12, 23, 28, 30, 35, 45, 52, 64, 66, 69, 87, 89, 262, 337, 400, 619, 688, 689, 751, 760, 784, 827, 884, 904, 905, 907, 908, 909, 910, 926, 928, 929, 944, 955, 956, 958, 962, 964, 966, 969, 974, 990, 993, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1051, 1052, 1101, 1104, 1108, 1109, 1154, 1155, 1156, 1157, 1163, 1167, 1180, 1181, 1199, 1215, 1239, 1250, 1270, 1276, 1284, 1285, 1293, 1294, 1296, 1305, 1306, 1314, 1318, 1319, 1323, 1326, 1327, 1328, 1329, 1330, 1331, 1334, 1335, 1336, 1339, 1340, 1343, 1346, 1355, 1357, 1358, 1364, 1372, 1377, 1379, 1412, 1440, 1444, 1469, 1470, 1477, 1514, 1604, 1606, 1625, 1632, 1650, 1731, 1759, 1760, 1761, 1762, 1763, 1764, 1772, 1795, 1797, 1828, 1834, 1847, 1850, 1866, 1877, 1896, 1905, 1910, 1911, 1912, 1914, 1915, 1919, 1924, 1925, 1927, 1934, 1935, 1936, 1945, 1952, 1953, 1955, 1974, 1979, 2014, 2015, 2016, 2017, 2021, 2034, 2035, 2036, 2041, 2042, 2044, 2046, 2049, 2051, 2054, 2055, 2060, 2062, 2065, 2071, 2072, 2078, 2079, 2082, 2083, 2086, 2087, 2093, 2099, 2101, 2102, 2104, 2106, 2108, 2112, 2113, 2115], "c": [0, 1, 3, 8, 9, 14, 19, 23, 25, 28, 35, 45, 53, 58, 64, 87, 89, 262, 337, 501, 522, 619, 688, 824, 881, 884, 904, 907, 909, 910, 955, 966, 978, 994, 1045, 1051, 1086, 1101, 1104, 1109, 1155, 1156, 1157, 1167, 1168, 1172, 1177, 1215, 1227, 1273, 1284, 1296, 1303, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1319, 1320, 1323, 1325, 1329, 1330, 1332, 1334, 1336, 1337, 1412, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1462, 1465, 1466, 1467, 1471, 1473, 1474, 1475, 1481, 1489, 1490, 1491, 1494, 1495, 1496, 1498, 1499, 1509, 1510, 1511, 1515, 1520, 1521, 1522, 1523, 1524, 1525, 1530, 1531, 1532, 1534, 1539, 1540, 1549, 1550, 1551, 1552, 1553, 1554, 1562, 1567, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1598, 1606, 1616, 1617, 1629, 1633, 1669, 1675, 1676, 1685, 1731, 1760, 1762, 1763, 1764, 1771, 1772, 1815, 1831, 1847, 1884, 1905, 1919, 1924, 1945, 1962, 1967, 1968, 2013, 2014, 2015, 2016, 2017, 2025, 2034, 2035, 2036, 2045, 2046, 2048, 2049, 2050, 2053, 2054, 2056, 2063, 2070, 2078, 2079, 2082, 2083, 2086, 2087, 2094, 2100, 2101, 2102, 2103, 2104, 2106, 2108, 2112, 2113, 2114, 2115, 2116], "addmm_": [0, 2015, 2034, 2082], "d": [0, 1, 11, 23, 24, 28, 34, 35, 56, 64, 315, 323, 337, 473, 515, 517, 519, 546, 585, 619, 689, 693, 869, 884, 904, 907, 909, 910, 943, 944, 945, 947, 954, 955, 956, 960, 963, 966, 1096, 1098, 1108, 1109, 1122, 1126, 1127, 1128, 1129, 1134, 1135, 1138, 1139, 1144, 1145, 1146, 1149, 1150, 1151, 1178, 1181, 1214, 1231, 1232, 1239, 1248, 1270, 1274, 1278, 1316, 1317, 1367, 1378, 1415, 1430, 1438, 1439, 1443, 1456, 1460, 1461, 1465, 1466, 1467, 1469, 1471, 1473, 1478, 1480, 1491, 1497, 1499, 1511, 1522, 1536, 1542, 1543, 1567, 1576, 1577, 1579, 1598, 1633, 1644, 1650, 1678, 1703, 1704, 1716, 1732, 1766, 1771, 1772, 1796, 1816, 1834, 1840, 1843, 1855, 1863, 1868, 1919, 1924, 1938, 1939, 1945, 1948, 1953, 1954, 1955, 1956, 1971, 1977, 1979, 2012, 2015, 2024, 2034, 2035, 2036, 2041, 2043, 2046, 2049, 2050, 2052, 2053, 2054, 2055, 2062, 2067, 2077, 2078, 2082, 2083, 2087, 2088, 2103, 2104, 2106, 2111, 2113, 2115], "best": [0, 1, 7, 15, 18, 23, 28, 35, 48, 55, 58, 865, 866, 936, 963, 976, 1188, 1289, 1290, 1319, 1533, 1798, 1811, 2013, 2014, 2016, 2021, 2033, 2041, 2042, 2043, 2049, 2051, 2070, 2077, 2082, 2084, 2094, 2101, 2102, 2104, 2107], "stabil": [0, 1305, 1310, 1361, 1440, 1441, 1442, 1443, 1480, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1542, 1564, 1567, 1576, 1630, 1693, 1716, 1732, 1766, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1831, 2050, 2104, 2113], "argument": [0, 1, 3, 4, 5, 8, 9, 12, 14, 18, 19, 23, 28, 29, 30, 32, 33, 34, 35, 37, 45, 48, 51, 52, 53, 55, 59, 60, 63, 64, 66, 72, 74, 75, 86, 90, 152, 198, 211, 315, 323, 417, 447, 448, 449, 450, 451, 489, 490, 515, 519, 562, 582, 583, 585, 586, 587, 589, 590, 605, 625, 683, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 698, 699, 700, 701, 702, 737, 741, 742, 743, 744, 745, 746, 796, 797, 802, 822, 823, 824, 827, 828, 829, 845, 861, 862, 865, 867, 869, 884, 885, 886, 887, 888, 889, 893, 894, 895, 896, 897, 904, 905, 906, 907, 909, 910, 913, 915, 916, 917, 918, 920, 921, 928, 929, 944, 945, 946, 948, 949, 950, 951, 952, 953, 954, 956, 960, 963, 965, 966, 967, 968, 969, 971, 973, 974, 980, 987, 990, 992, 993, 995, 996, 998, 1008, 1030, 1032, 1036, 1037, 1043, 1051, 1052, 1054, 1069, 1078, 1082, 1088, 1089, 1090, 1091, 1092, 1093, 1096, 1097, 1098, 1099, 1100, 1101, 1104, 1106, 1107, 1108, 1110, 1111, 1112, 1114, 1119, 1122, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1152, 1153, 1154, 1155, 1156, 1157, 1159, 1161, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1186, 1188, 1208, 1209, 1214, 1215, 1216, 1217, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1248, 1250, 1267, 1268, 1270, 1273, 1276, 1285, 1289, 1290, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1364, 1365, 1367, 1368, 1371, 1372, 1373, 1374, 1376, 1377, 1378, 1379, 1396, 1404, 1409, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1423, 1424, 1425, 1427, 1441, 1442, 1443, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1470, 1473, 1476, 1478, 1485, 1488, 1489, 1490, 1491, 1492, 1497, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1523, 1524, 1525, 1527, 1528, 1533, 1534, 1535, 1543, 1567, 1571, 1575, 1579, 1581, 1582, 1590, 1631, 1633, 1670, 1685, 1707, 1710, 1711, 1717, 1737, 1739, 1743, 1744, 1748, 1752, 1758, 1767, 1771, 1773, 1776, 1777, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1803, 1815, 1816, 1820, 1821, 1824, 1825, 1827, 1828, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1846, 1847, 1848, 1849, 1856, 1858, 1863, 1867, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1895, 1900, 1903, 1905, 1906, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1917, 1918, 1920, 1921, 1922, 1923, 1924, 1925, 1927, 1928, 1935, 1940, 1941, 1942, 1943, 1945, 1946, 1947, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1958, 1961, 1965, 1972, 1973, 1977, 1978, 1979, 1980, 1987, 1989, 1991, 1992, 2004, 2007, 2010, 2011, 2012, 2015, 2016, 2017, 2019, 2021, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2061, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2076, 2077, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2098, 2101, 2102, 2103, 2107, 2112, 2114, 2118], "respect": [0, 1, 5, 8, 17, 28, 29, 32, 33, 35, 37, 45, 47, 55, 61, 63, 64, 198, 211, 489, 582, 605, 625, 692, 693, 694, 763, 798, 822, 845, 889, 895, 897, 909, 910, 918, 924, 928, 929, 971, 998, 1097, 1099, 1100, 1166, 1168, 1169, 1170, 1171, 1172, 1173, 1177, 1227, 1273, 1305, 1309, 1310, 1319, 1329, 1332, 1334, 1337, 1346, 1378, 1417, 1439, 1440, 1446, 1457, 1458, 1459, 1463, 1478, 1480, 1497, 1527, 1528, 1543, 1573, 1575, 1576, 1577, 1578, 1580, 1624, 1633, 1739, 1743, 1745, 1767, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1811, 1901, 1905, 1928, 1945, 2035, 2042, 2043, 2046, 2049, 2054, 2057, 2058, 2075, 2082, 2083, 2084, 2089, 2103, 2107], "follow": [0, 1, 2, 3, 5, 7, 9, 11, 12, 14, 15, 18, 21, 22, 23, 24, 27, 28, 30, 32, 33, 34, 35, 37, 44, 45, 46, 47, 48, 52, 53, 55, 57, 59, 60, 62, 64, 66, 68, 74, 75, 76, 77, 84, 85, 86, 489, 490, 619, 683, 763, 782, 788, 795, 796, 797, 803, 817, 818, 819, 820, 822, 823, 824, 825, 827, 828, 858, 865, 866, 893, 894, 895, 902, 909, 910, 928, 929, 943, 960, 965, 969, 978, 981, 990, 1051, 1065, 1109, 1127, 1129, 1153, 1163, 1176, 1188, 1198, 1227, 1273, 1274, 1283, 1287, 1294, 1305, 1316, 1326, 1329, 1330, 1343, 1346, 1365, 1368, 1431, 1440, 1446, 1462, 1470, 1473, 1478, 1497, 1527, 1543, 1559, 1575, 1579, 1586, 1638, 1652, 1685, 1709, 1710, 1711, 1714, 1715, 1717, 1724, 1725, 1731, 1772, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1806, 1809, 1817, 1828, 1856, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1903, 1904, 1905, 1906, 1913, 1924, 1929, 1950, 1958, 1965, 1968, 2012, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2023, 2024, 2027, 2028, 2033, 2034, 2035, 2036, 2037, 2041, 2042, 2043, 2044, 2045, 2046, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2083, 2085, 2086, 2088, 2089, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2111, 2113, 2114, 2115, 2118], "describ": [0, 5, 7, 8, 9, 23, 24, 28, 30, 32, 34, 37, 39, 45, 47, 52, 53, 64, 235, 515, 795, 796, 797, 818, 819, 820, 845, 863, 961, 1034, 1051, 1068, 1109, 1227, 1294, 1334, 1431, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1464, 1465, 1466, 1467, 1468, 1471, 1473, 1474, 1475, 1481, 1484, 1487, 1489, 1490, 1491, 1493, 1499, 1518, 1520, 1521, 1522, 1533, 1534, 1541, 1542, 1546, 1559, 1567, 1575, 1576, 1577, 1579, 1616, 1628, 1629, 1638, 1672, 1716, 1809, 1950, 2013, 2014, 2016, 2017, 2025, 2041, 2042, 2043, 2046, 2048, 2049, 2051, 2054, 2056, 2057, 2062, 2067, 2070, 2071, 2072, 2075, 2078, 2079, 2081, 2089, 2101, 2104, 2105, 2109], "part": [0, 1, 3, 4, 5, 6, 7, 9, 14, 15, 18, 23, 24, 28, 30, 33, 35, 47, 48, 52, 53, 55, 59, 60, 64, 84, 85, 845, 918, 983, 987, 1109, 1263, 1265, 1266, 1269, 1273, 1284, 1286, 1289, 1290, 1304, 1310, 1312, 1321, 1328, 1331, 1527, 1567, 1685, 1717, 1735, 1801, 1828, 1840, 1909, 1950, 1953, 1954, 1955, 1956, 2012, 2013, 2014, 2016, 2017, 2027, 2028, 2042, 2043, 2046, 2049, 2051, 2052, 2054, 2057, 2062, 2067, 2070, 2072, 2077, 2078, 2079, 2082, 2087, 2089, 2099, 2101, 2103, 2104, 2105, 2111, 2112, 2113], "expos": [0, 1, 8, 19, 28, 32, 38, 55, 63, 64, 798, 1163, 2043, 2046, 2056, 2070, 2072, 2101, 2114], "namespac": [0, 64, 1083, 2014, 2018, 2021, 2049, 2057, 2065, 2067, 2075, 2094, 2102, 2114], "below": [0, 1, 5, 9, 12, 14, 23, 24, 28, 30, 33, 34, 35, 37, 39, 44, 47, 48, 50, 51, 53, 64, 66, 74, 75, 683, 737, 751, 760, 798, 817, 1051, 1096, 1097, 1098, 1099, 1100, 1109, 1137, 1139, 1143, 1181, 1192, 1217, 1227, 1273, 1290, 1330, 1331, 1336, 1343, 1375, 1457, 1458, 1459, 1478, 1480, 1487, 1497, 1499, 1523, 1524, 1525, 1527, 1543, 1559, 1580, 1616, 1642, 1689, 1717, 1737, 1771, 1798, 1868, 1871, 1908, 1940, 1950, 1953, 1954, 1955, 1956, 2014, 2016, 2017, 2019, 2023, 2024, 2033, 2034, 2042, 2043, 2046, 2048, 2049, 2054, 2055, 2057, 2059, 2061, 2062, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2077, 2078, 2079, 2083, 2086, 2089, 2095, 2099, 2100, 2102, 2104, 2105, 2106, 2107, 2109, 2111, 2113], "do": [0, 1, 4, 7, 8, 9, 11, 14, 15, 23, 24, 28, 30, 32, 33, 36, 37, 40, 47, 48, 50, 52, 55, 56, 57, 60, 61, 63, 64, 86, 497, 515, 517, 519, 761, 826, 829, 867, 896, 899, 904, 907, 909, 910, 919, 923, 930, 932, 962, 975, 976, 978, 1005, 1009, 1043, 1054, 1088, 1089, 1090, 1091, 1160, 1161, 1163, 1172, 1186, 1188, 1198, 1214, 1236, 1260, 1270, 1273, 1276, 1285, 1289, 1319, 1337, 1346, 1353, 1363, 1365, 1374, 1413, 1423, 1435, 1445, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1483, 1484, 1485, 1513, 1546, 1547, 1548, 1555, 1570, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1635, 1637, 1652, 1707, 1717, 1724, 1725, 1773, 1778, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1815, 1821, 1871, 1965, 2012, 2013, 2014, 2015, 2017, 2019, 2021, 2025, 2026, 2033, 2034, 2035, 2042, 2043, 2044, 2046, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2071, 2072, 2073, 2077, 2079, 2082, 2083, 2084, 2085, 2086, 2087, 2089, 2098, 2100, 2101, 2103, 2106, 2109, 2110, 2112, 2113, 2115], "defin": [0, 1, 3, 5, 9, 11, 14, 15, 23, 24, 28, 29, 30, 33, 34, 35, 37, 39, 45, 47, 48, 52, 53, 55, 60, 64, 417, 437, 447, 449, 451, 519, 568, 795, 800, 801, 802, 804, 805, 841, 863, 864, 865, 866, 893, 894, 895, 896, 909, 910, 1065, 1092, 1109, 1110, 1129, 1157, 1164, 1188, 1215, 1217, 1233, 1235, 1236, 1262, 1273, 1279, 1296, 1303, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1319, 1320, 1323, 1325, 1326, 1330, 1331, 1332, 1334, 1336, 1337, 1343, 1379, 1463, 1468, 1474, 1475, 1482, 1483, 1484, 1485, 1492, 1494, 1495, 1496, 1527, 1533, 1546, 1561, 1563, 1568, 1570, 1587, 1588, 1598, 1633, 1685, 1691, 1707, 1710, 1717, 1743, 1745, 1752, 1753, 1773, 1776, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1803, 1806, 1809, 1827, 1836, 1838, 1840, 1847, 1875, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1905, 1907, 1950, 1953, 1954, 1955, 1956, 1980, 2010, 2012, 2014, 2017, 2019, 2021, 2032, 2033, 2034, 2036, 2039, 2043, 2046, 2048, 2052, 2054, 2055, 2057, 2059, 2063, 2067, 2069, 2070, 2071, 2072, 2075, 2077, 2082, 2083, 2088, 2089, 2091, 2098, 2099, 2101, 2103, 2105, 2110, 2114], "still": [0, 1, 2, 7, 8, 23, 27, 28, 33, 35, 37, 47, 52, 55, 63, 64, 488, 797, 802, 904, 906, 909, 914, 978, 1187, 1188, 1277, 1283, 1291, 1493, 1518, 1717, 1718, 1719, 1720, 1859, 2014, 2017, 2033, 2034, 2035, 2036, 2042, 2043, 2046, 2048, 2049, 2051, 2052, 2055, 2062, 2063, 2067, 2069, 2070, 2072, 2077, 2078, 2079, 2082, 2095, 2101, 2103, 2108, 2109, 2113, 2115], "chang": [0, 1, 2, 3, 7, 11, 12, 18, 19, 24, 28, 30, 32, 33, 34, 35, 37, 46, 52, 53, 55, 56, 59, 60, 62, 63, 64, 65, 141, 235, 256, 323, 460, 498, 501, 519, 522, 558, 619, 683, 822, 881, 923, 924, 976, 978, 990, 991, 992, 1008, 1009, 1019, 1030, 1032, 1043, 1044, 1051, 1052, 1054, 1097, 1145, 1161, 1166, 1182, 1197, 1201, 1220, 1227, 1248, 1270, 1273, 1284, 1285, 1304, 1315, 1317, 1318, 1319, 1322, 1335, 1375, 1404, 1466, 1469, 1487, 1497, 1527, 1559, 1580, 1586, 1587, 1588, 1591, 1598, 1633, 1658, 1659, 1660, 1685, 1707, 1708, 1717, 1719, 1720, 1724, 1725, 1734, 1737, 1738, 1767, 1771, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1803, 1806, 1807, 1809, 1811, 1813, 1827, 1867, 1871, 1908, 1919, 1922, 1923, 1924, 1949, 1968, 1972, 1973, 1987, 1989, 2012, 2013, 2014, 2021, 2023, 2024, 2025, 2027, 2030, 2033, 2034, 2035, 2036, 2042, 2043, 2044, 2045, 2046, 2049, 2050, 2052, 2054, 2055, 2057, 2059, 2060, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2077, 2079, 2082, 2084, 2085, 2086, 2087, 2088, 2091, 2092, 2093, 2095, 2098, 2100, 2101, 2103, 2105, 2106, 2109, 2110, 2113], "which": [0, 1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 23, 24, 29, 30, 32, 33, 34, 35, 36, 37, 40, 45, 46, 47, 48, 50, 52, 53, 55, 59, 60, 63, 64, 66, 74, 75, 90, 152, 260, 291, 315, 317, 319, 321, 323, 337, 474, 488, 489, 501, 515, 517, 519, 539, 562, 606, 609, 619, 627, 687, 700, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 763, 767, 784, 793, 794, 800, 802, 822, 826, 829, 841, 858, 861, 862, 863, 865, 880, 884, 889, 896, 897, 899, 909, 910, 912, 913, 914, 915, 916, 917, 918, 928, 929, 936, 947, 960, 963, 966, 970, 976, 978, 990, 996, 997, 998, 1008, 1009, 1014, 1021, 1022, 1023, 1024, 1025, 1036, 1037, 1038, 1065, 1067, 1073, 1074, 1085, 1087, 1092, 1096, 1097, 1098, 1099, 1100, 1109, 1125, 1130, 1133, 1137, 1140, 1143, 1148, 1149, 1150, 1151, 1152, 1157, 1160, 1166, 1167, 1170, 1171, 1172, 1173, 1174, 1178, 1186, 1187, 1188, 1192, 1198, 1202, 1210, 1214, 1217, 1236, 1248, 1258, 1264, 1270, 1271, 1273, 1274, 1276, 1279, 1283, 1284, 1285, 1289, 1306, 1309, 1310, 1319, 1323, 1325, 1326, 1329, 1330, 1334, 1336, 1337, 1339, 1342, 1343, 1345, 1351, 1367, 1374, 1379, 1413, 1419, 1422, 1423, 1429, 1430, 1431, 1433, 1434, 1435, 1437, 1438, 1441, 1442, 1443, 1446, 1455, 1456, 1458, 1459, 1462, 1463, 1470, 1477, 1478, 1487, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1497, 1499, 1513, 1517, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1532, 1533, 1542, 1556, 1559, 1561, 1563, 1567, 1571, 1576, 1577, 1591, 1615, 1632, 1633, 1635, 1645, 1652, 1671, 1672, 1685, 1691, 1692, 1700, 1707, 1716, 1717, 1724, 1725, 1731, 1733, 1734, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1755, 1767, 1769, 1772, 1778, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1817, 1829, 1843, 1847, 1849, 1850, 1854, 1856, 1863, 1867, 1871, 1875, 1876, 1877, 1883, 1895, 1900, 1904, 1905, 1907, 1908, 1916, 1919, 1924, 1929, 1935, 1944, 1949, 1950, 1953, 1954, 1955, 1956, 1960, 1961, 1964, 1965, 1977, 1983, 1991, 1992, 1993, 2009, 2012, 2014, 2016, 2017, 2019, 2023, 2024, 2025, 2027, 2028, 2029, 2030, 2033, 2035, 2036, 2041, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2059, 2061, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2076, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2092, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2107, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2117, 2118], "unlist": 0, "downstream": [0, 3, 978, 2030, 2104, 2105, 2113], "stabl": [0, 1, 2, 11, 12, 28, 35, 47, 48, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 880, 990, 1109, 1309, 1310, 1314, 1319, 1327, 1331, 1334, 1337, 1339, 1346, 1440, 1900, 2013, 2015, 2021, 2041, 2044, 2061, 2077], "believ": [0, 8, 2104], "unstabl": [0, 35, 1309, 1310, 1337, 1354, 1363, 1652, 1928, 2083], "__matmul__": 0, "addbmm": [0, 102, 944, 2015, 2060, 2068], "addmv": [0, 110, 2015, 2034, 2068], "addr": [0, 28, 47, 112, 2015, 2068, 2115], "baddbmm": [0, 154, 2015, 2060, 2068], "bmm": [0, 1965, 2015, 2034, 2036, 2060, 2061, 2068, 2082, 2108], "chain_matmul": [0, 2015, 2068], "multi_dot": [0, 966], "conv1d": [0, 711, 715, 718, 728, 744, 1457, 1465, 1503, 1965, 2015, 2068, 2072, 2074], "conv2d": [0, 52, 712, 716, 719, 721, 729, 745, 795, 817, 1273, 1283, 1285, 1289, 1290, 1458, 1466, 1504, 1527, 1528, 1534, 1556, 1579, 1724, 1752, 1965, 2014, 2015, 2027, 2065, 2067, 2068, 2072, 2074, 2075, 2087, 2093], "conv3d": [0, 713, 717, 720, 722, 730, 746, 1459, 1467, 1505, 1725, 1965, 2015, 2068, 2072, 2074], "conv_transpose1d": [0, 2015, 2068, 2074], "conv_transpose2d": [0, 2015, 2068, 2074], "conv_transpose3d": [0, 2015, 2068, 2074], "grucel": [0, 2060, 2072, 2074, 2075], "lstmcell": [0, 2060, 2072, 2074, 2075], "matmul": [0, 2, 11, 956, 976, 1177, 1378, 1579, 1817, 1827, 1871, 1928, 2015, 2034, 2036, 2046, 2060, 2068, 2074, 2082, 2098], "mv": [0, 11, 1285, 1965, 2015, 2034, 2068, 2082], "prelu": [0, 1528, 2015, 2068, 2074], "rnncell": [0, 2072, 2074, 2075], "__pow__": 0, "__rdiv__": 0, "__rpow__": 0, "__rtruediv__": 0, "aco": [0, 96, 630, 631, 870, 2015, 2034, 2068, 2089, 2108], "asin": [0, 143, 632, 633, 872, 2015, 2034, 2068, 2082, 2108], "cosh": [0, 205, 640, 641, 687, 2015, 2034, 2068, 2108], "cosine_embedding_loss": [0, 2015, 2068], "cdist": [0, 2046, 2068], "cosine_similar": [0, 1577, 2015, 2068], "cross_entropi": [0, 34, 2015], "cumprod": [0, 215, 2015, 2034, 2068], "cumsum": [0, 217, 1092, 1965, 2015, 2034, 2068, 2108], "dist": [0, 24, 28, 29, 32, 33, 35, 48, 55, 967, 968, 969, 1303, 1309, 1310, 1311, 1313, 1314, 1315, 1319, 1320, 1331, 1332, 1335, 1337, 1338, 1361, 1364, 1536, 1567, 1717, 1731, 1928, 2015, 2048, 2068, 2077, 2079], "erfinv": [0, 253, 2015, 2034, 2068, 2082, 2083], "exp": [0, 1, 35, 255, 646, 647, 773, 914, 915, 917, 1152, 1333, 1353, 1361, 1440, 1445, 1462, 1468, 1492, 1516, 1517, 1531, 1541, 1555, 1558, 1560, 1561, 1563, 1564, 1568, 1607, 1653, 1677, 1686, 1687, 1691, 1693, 1696, 1731, 1824, 1884, 1885, 1907, 1924, 2015, 2034, 2043, 2067, 2068, 2083, 2108], "expm1": [0, 259, 648, 649, 2015, 2034, 2068, 2082, 2083, 2108], "group_norm": [0, 2015, 2068, 2074], "hinge_embedding_loss": [0, 2015, 2068], "kl_div": [0, 2015, 2068], "l1_loss": [0, 2015, 2068], "layer_norm": [0, 1499, 2015, 2068, 2074], "log": [0, 2, 14, 23, 24, 25, 35, 37, 40, 41, 44, 45, 81, 83, 378, 379, 656, 663, 683, 1119, 1349, 1351, 1353, 1354, 1361, 1390, 1431, 1439, 1440, 1462, 1480, 1492, 1516, 1517, 1531, 1534, 1541, 1560, 1561, 1564, 1630, 1635, 1645, 1652, 1653, 1669, 1677, 1691, 1693, 1717, 2013, 2015, 2023, 2030, 2034, 2043, 2046, 2048, 2049, 2065, 2067, 2068, 2071, 2083, 2087, 2091, 2092, 2093, 2106, 2107, 2108, 2110, 2111, 2113], "log_softmax": [0, 1446, 1492, 1534, 1617, 1669, 1691, 2015, 2035, 2065, 2068, 2083], "log10": [0, 373, 657, 658, 2015, 2034, 2068, 2108], "log1p": [0, 375, 659, 660, 2015, 2034, 2068, 2082, 2083, 2108], "log2": [0, 377, 661, 662, 2015, 2034, 2068, 2083, 2108], "margin_ranking_loss": [0, 2015, 2068], "mse_loss": [0, 59, 1166, 2015, 2068], "multilabel_margin_loss": [0, 2015, 2068], "multi_margin_loss": [0, 2015, 2068], "nll_loss": [0, 2015, 2068], "norm": [0, 33, 34, 35, 55, 57, 64, 705, 706, 707, 708, 709, 710, 964, 1103, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1305, 1309, 1318, 1319, 1326, 1331, 1343, 1469, 1470, 1518, 1536, 1542, 1572, 1573, 1574, 1575, 1576, 1624, 1625, 1670, 1674, 1716, 1721, 1722, 1732, 1733, 1742, 1743, 1748, 1751, 1752, 1766, 1769, 1786, 1848, 2015, 2019, 2042, 2043, 2054, 2057, 2060, 2068], "normal": [0, 1, 19, 24, 28, 32, 47, 52, 55, 64, 84, 338, 353, 379, 456, 558, 998, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1188, 1235, 1270, 1273, 1278, 1309, 1351, 1435, 1441, 1442, 1443, 1465, 1466, 1467, 1471, 1481, 1489, 1490, 1491, 1499, 1509, 1510, 1511, 1515, 1542, 1555, 1567, 1571, 1572, 1573, 1574, 1575, 1603, 1633, 1634, 1643, 1647, 1651, 1678, 1682, 1716, 1722, 1723, 1732, 1733, 1748, 1756, 1757, 1766, 1769, 1795, 1811, 1831, 1840, 1841, 1848, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1924, 1965, 2012, 2013, 2014, 2015, 2019, 2034, 2036, 2041, 2043, 2046, 2050, 2053, 2067, 2068, 2070, 2074, 2077, 2083, 2087, 2089, 2091, 2097, 2108, 2118], "pdist": [0, 1536, 2015, 2068], "poisson_nll_loss": [0, 2015, 2068], "pow": [0, 1, 471, 498, 912, 913, 916, 939, 1152, 2015, 2017, 2034, 2042, 2043, 2068, 2082, 2088, 2108], "prod": [0, 44, 323, 519, 1126, 1128, 1131, 1132, 1134, 1135, 1138, 1139, 1141, 1142, 1144, 1146, 1339, 1340, 1469, 1473, 1579, 1963, 1965, 2015, 2034, 2068, 2082, 2104, 2108], "reciproc": [0, 487, 666, 667, 1858, 2015, 2034, 2068, 2108], "rsqrt": [0, 513, 2015, 2034, 2068, 2108], "sinh": [0, 538, 674, 675, 886, 2015, 2034, 2068, 2082, 2108], "smooth_l1_loss": [0, 2015, 2068], "soft_margin_loss": [0, 2015, 2068], "softmax": [0, 35, 799, 1431, 1462, 1492, 1517, 1562, 1616, 1635, 1645, 1652, 1685, 1692, 1904, 2015, 2034, 2035, 2036, 2046, 2068, 2074, 2082, 2083], "softmin": [0, 2015], "softplu": [0, 35, 1526, 1664, 2015, 2068], "sum": [0, 1, 3, 12, 23, 28, 29, 32, 35, 37, 55, 60, 61, 64, 66, 73, 80, 498, 519, 568, 749, 897, 912, 913, 914, 915, 916, 917, 918, 928, 929, 937, 998, 1023, 1024, 1025, 1091, 1092, 1109, 1170, 1171, 1172, 1177, 1250, 1305, 1326, 1330, 1343, 1351, 1352, 1353, 1361, 1413, 1421, 1439, 1440, 1446, 1459, 1460, 1462, 1463, 1470, 1473, 1480, 1486, 1487, 1492, 1493, 1494, 1495, 1496, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1561, 1563, 1576, 1577, 1579, 1605, 1606, 1616, 1617, 1625, 1630, 1635, 1645, 1654, 1655, 1656, 1669, 1677, 1691, 1692, 1717, 1748, 1753, 1754, 1772, 1783, 1803, 1849, 1871, 1886, 1905, 1907, 1948, 1950, 1965, 2013, 2015, 2017, 2021, 2030, 2034, 2042, 2043, 2046, 2048, 2049, 2050, 2057, 2060, 2067, 2068, 2077, 2078, 2082, 2083, 2088, 2098, 2099, 2101, 2102, 2104, 2108, 2111, 2113, 2115], "renorm": [0, 494, 1469, 1470, 1624, 1625, 2015, 2068], "tan": [0, 577, 678, 679, 887, 2015, 2034, 2068, 2082, 2087, 2108], "triplet_margin_loss": [0, 2015, 2068], "take": [0, 1, 2, 3, 4, 5, 7, 9, 14, 19, 23, 24, 28, 30, 32, 33, 35, 37, 46, 47, 48, 50, 52, 55, 57, 58, 60, 61, 62, 63, 64, 66, 74, 75, 89, 763, 796, 801, 817, 823, 829, 866, 912, 913, 914, 915, 916, 917, 923, 924, 932, 1008, 1045, 1097, 1099, 1100, 1125, 1127, 1130, 1133, 1137, 1138, 1140, 1143, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1198, 1306, 1328, 1331, 1431, 1440, 1470, 1474, 1475, 1478, 1497, 1521, 1522, 1523, 1524, 1525, 1527, 1535, 1543, 1571, 1581, 1582, 1628, 1629, 1646, 1671, 1685, 1717, 1770, 1795, 1799, 1824, 1929, 1952, 1966, 1977, 2012, 2014, 2015, 2017, 2024, 2025, 2027, 2033, 2034, 2035, 2036, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2055, 2057, 2061, 2062, 2063, 2067, 2068, 2070, 2071, 2072, 2077, 2078, 2079, 2082, 2085, 2086, 2087, 2090, 2092, 2095, 2098, 2099, 2100, 2102, 2103, 2104, 2105, 2107, 2111, 2112], "all": [0, 1, 2, 3, 5, 6, 7, 9, 11, 12, 14, 15, 17, 18, 19, 20, 23, 24, 28, 29, 30, 32, 33, 35, 37, 40, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 58, 60, 61, 63, 64, 90, 152, 262, 315, 317, 323, 337, 488, 490, 515, 517, 519, 548, 562, 609, 612, 619, 683, 689, 690, 695, 697, 701, 702, 738, 763, 794, 796, 797, 817, 843, 844, 858, 862, 865, 878, 881, 893, 894, 895, 897, 899, 901, 904, 905, 906, 907, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 932, 936, 937, 938, 942, 943, 946, 955, 957, 962, 963, 967, 970, 971, 975, 976, 978, 986, 990, 997, 1007, 1011, 1012, 1014, 1015, 1023, 1024, 1033, 1041, 1045, 1051, 1055, 1056, 1064, 1065, 1073, 1074, 1076, 1077, 1081, 1083, 1085, 1126, 1127, 1128, 1129, 1132, 1135, 1136, 1139, 1142, 1144, 1146, 1161, 1163, 1166, 1167, 1173, 1174, 1176, 1177, 1188, 1189, 1195, 1214, 1236, 1239, 1269, 1270, 1273, 1281, 1284, 1285, 1288, 1289, 1309, 1336, 1345, 1361, 1363, 1366, 1371, 1373, 1376, 1385, 1386, 1395, 1400, 1411, 1418, 1419, 1420, 1421, 1431, 1438, 1440, 1444, 1446, 1448, 1449, 1450, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1469, 1470, 1473, 1478, 1479, 1480, 1481, 1486, 1492, 1493, 1497, 1498, 1514, 1518, 1522, 1523, 1524, 1525, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1537, 1538, 1543, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1567, 1571, 1573, 1575, 1576, 1579, 1584, 1585, 1591, 1598, 1604, 1606, 1624, 1625, 1630, 1635, 1654, 1655, 1656, 1685, 1691, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1722, 1731, 1737, 1744, 1748, 1758, 1762, 1770, 1771, 1772, 1773, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1803, 1806, 1807, 1810, 1811, 1812, 1813, 1825, 1864, 1871, 1903, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1921, 1922, 1923, 1924, 1927, 1944, 1946, 1953, 1954, 1955, 1956, 1959, 1961, 1962, 1963, 1968, 1972, 1973, 1975, 1979, 1982, 1983, 1984, 1990, 1995, 2000, 2001, 2002, 2003, 2006, 2009, 2012, 2014, 2015, 2016, 2017, 2021, 2023, 2024, 2025, 2027, 2029, 2030, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2045, 2046, 2048, 2050, 2051, 2052, 2053, 2054, 2055, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2068, 2070, 2071, 2072, 2076, 2077, 2078, 2079, 2081, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2093, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2117], "addcdiv": [0, 104, 2015, 2068], "addcmul": [0, 106, 2015, 2068], "atan2": [0, 148, 875, 2015, 2034, 2068, 2108], "bilinear": [0, 782, 788, 789, 1109, 1580, 1581, 1633, 1644, 1704, 1705, 1965, 2015, 2068], "cross": [0, 7, 8, 28, 30, 33, 34, 35, 37, 1439, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1605, 1606, 1616, 2015, 2046, 2063, 2068], "dot": [0, 2, 14, 60, 61, 619, 913, 915, 916, 917, 1088, 1089, 1090, 1091, 1168, 1178, 1250, 1294, 1341, 1342, 1368, 1439, 1440, 1462, 1473, 1486, 1493, 1518, 1533, 1534, 1577, 1586, 1591, 1685, 1924, 1974, 1975, 1976, 1977, 2015, 2034, 2054, 2068, 2070], "grid_sampl": [0, 1598, 1965, 2015, 2068], "index_put": [0, 1965, 2015, 2068, 2108], "scatter_add": [0, 2015, 2068, 2108], "tensordot": [0, 1250, 1339, 1340, 2019, 2046, 2068], "binari": [0, 2, 14, 15, 24, 35, 37, 40, 45, 47, 64, 156, 738, 946, 1367, 1439, 1440, 1533, 1605, 1606, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1779, 2013, 2018, 2034, 2035, 2056, 2065, 2067, 2070, 2087, 2095], "add": [0, 1, 3, 7, 12, 15, 17, 23, 28, 30, 32, 33, 38, 44, 47, 52, 53, 55, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 78, 84, 88, 100, 292, 315, 515, 517, 689, 690, 691, 694, 750, 751, 760, 793, 796, 841, 863, 865, 866, 932, 939, 978, 1024, 1054, 1065, 1109, 1167, 1181, 1187, 1188, 1194, 1273, 1285, 1431, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1503, 1504, 1505, 1506, 1507, 1508, 1527, 1529, 1533, 1534, 1538, 1541, 1635, 1677, 1707, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1871, 1920, 2012, 2014, 2015, 2016, 2017, 2021, 2027, 2030, 2034, 2036, 2042, 2044, 2045, 2048, 2049, 2050, 2055, 2056, 2057, 2059, 2064, 2065, 2067, 2068, 2070, 2071, 2072, 2073, 2074, 2075, 2077, 2078, 2079, 2082, 2085, 2086, 2087, 2092, 2093, 2098, 2099, 2100, 2101, 2102, 2104, 2105, 2107, 2108, 2111, 2112, 2113, 2114, 2115], "nativ": [0, 11, 24, 55, 64, 1034, 1167, 1284, 1722, 1723, 1871, 2014, 2017, 2033, 2046, 2062, 2064, 2069, 2070, 2077, 2104], "without": [0, 1, 3, 5, 7, 8, 9, 14, 18, 23, 24, 28, 29, 30, 32, 33, 34, 35, 40, 47, 52, 55, 56, 61, 63, 64, 65, 66, 67, 256, 488, 490, 619, 973, 976, 1012, 1014, 1049, 1130, 1131, 1132, 1140, 1141, 1142, 1188, 1197, 1198, 1213, 1270, 1271, 1273, 1274, 1320, 1321, 1336, 1343, 1413, 1441, 1442, 1443, 1466, 1470, 1489, 1490, 1491, 1524, 1527, 1535, 1567, 1575, 1625, 1707, 1717, 1749, 1765, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 1834, 1850, 1867, 1959, 2012, 2016, 2017, 2021, 2024, 2025, 2027, 2030, 2033, 2035, 2042, 2043, 2044, 2046, 2048, 2049, 2054, 2055, 2057, 2059, 2061, 2062, 2065, 2067, 2070, 2072, 2076, 2077, 2082, 2084, 2087, 2093, 2098, 2100, 2101, 2102, 2103, 2104, 2113, 2118], "intervent": [0, 8, 33, 2077], "mixtur": [0, 35, 1462, 1616], "bceloss": [0, 1440, 1605], "aren": [0, 8, 52, 60, 64, 83, 1167, 1188, 2034, 2043, 2052, 2078, 2114], "mean": [0, 2, 3, 5, 7, 8, 12, 15, 18, 19, 23, 24, 28, 30, 32, 34, 35, 47, 48, 50, 52, 53, 55, 56, 58, 61, 63, 64, 256, 260, 323, 337, 379, 456, 490, 497, 519, 761, 763, 784, 797, 799, 865, 866, 932, 944, 957, 990, 998, 1166, 1168, 1197, 1198, 1284, 1374, 1393, 1413, 1418, 1422, 1423, 1429, 1430, 1431, 1433, 1434, 1435, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1460, 1462, 1464, 1468, 1470, 1471, 1472, 1476, 1477, 1478, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1497, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1513, 1514, 1516, 1517, 1518, 1519, 1526, 1530, 1531, 1532, 1534, 1535, 1541, 1542, 1543, 1546, 1547, 1548, 1555, 1557, 1558, 1559, 1560, 1561, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1576, 1577, 1578, 1604, 1605, 1606, 1614, 1616, 1617, 1625, 1626, 1630, 1641, 1642, 1645, 1646, 1650, 1657, 1665, 1666, 1667, 1668, 1669, 1677, 1682, 1689, 1690, 1701, 1702, 1716, 1717, 1727, 1729, 1773, 1798, 1831, 1840, 1841, 1871, 1884, 1905, 1922, 1923, 1929, 1965, 1972, 1973, 2012, 2014, 2015, 2016, 2030, 2031, 2033, 2034, 2035, 2036, 2041, 2042, 2043, 2046, 2048, 2049, 2051, 2052, 2054, 2057, 2063, 2067, 2068, 2069, 2070, 2071, 2072, 2073, 2074, 2077, 2078, 2079, 2082, 2100, 2101, 2103, 2104, 2107, 2108, 2109, 2110], "doesn": [0, 1, 2, 5, 7, 8, 11, 12, 19, 23, 28, 30, 37, 45, 53, 58, 60, 63, 64, 86, 826, 829, 884, 904, 905, 909, 918, 930, 990, 1012, 1033, 1167, 1178, 1187, 1188, 1198, 1201, 1212, 1281, 1287, 1313, 1337, 1345, 1354, 1410, 1454, 1455, 1456, 1463, 1492, 1561, 1608, 1609, 1610, 1645, 1691, 1717, 1718, 1748, 1787, 1867, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1901, 1905, 1977, 1990, 2014, 2017, 2018, 2034, 2036, 2043, 2044, 2046, 2049, 2050, 2056, 2059, 2063, 2069, 2078, 2082, 2103, 2108, 2111, 2113], "help": [0, 1, 4, 7, 8, 11, 14, 20, 23, 24, 28, 33, 44, 47, 55, 60, 64, 923, 924, 936, 1033, 1065, 1167, 1168, 1178, 1187, 1188, 1273, 1274, 1293, 1375, 1389, 1390, 1465, 1466, 1467, 1471, 1527, 1731, 1782, 1877, 1977, 1990, 2012, 2017, 2024, 2025, 2034, 2035, 2042, 2043, 2044, 2046, 2048, 2050, 2055, 2057, 2060, 2065, 2067, 2070, 2077, 2078, 2082, 2091, 2101, 2103, 2106, 2107, 2111, 2113, 2117], "revers": [0, 35, 61, 64, 515, 737, 796, 912, 914, 1127, 1129, 1149, 1170, 1172, 1177, 1341, 1497, 1540, 1676, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1952, 1971, 2015, 2016, 2017, 2043, 2048, 2050, 2068, 2088], "therefor": [0, 3, 5, 23, 24, 28, 35, 36, 45, 55, 58, 66, 71, 72, 88, 450, 546, 918, 945, 954, 978, 1054, 1143, 1160, 1163, 1166, 1210, 1231, 1232, 1286, 1289, 1319, 1332, 1469, 1470, 1579, 1624, 1625, 1633, 1767, 1803, 2017, 2026, 2042, 2043, 2046, 2049, 2051, 2052, 2056, 2067, 2079, 2082, 2102, 2105], "rais": [0, 1, 5, 7, 14, 28, 30, 32, 33, 35, 37, 40, 47, 52, 55, 60, 63, 64, 66, 90, 315, 317, 323, 700, 898, 904, 907, 909, 912, 913, 914, 915, 916, 917, 918, 922, 923, 924, 931, 941, 943, 957, 966, 976, 1013, 1079, 1152, 1157, 1204, 1271, 1273, 1278, 1281, 1289, 1291, 1303, 1305, 1313, 1314, 1317, 1321, 1322, 1327, 1334, 1335, 1339, 1340, 1345, 1366, 1393, 1527, 1685, 1710, 1737, 1738, 1743, 1745, 1748, 1758, 1778, 1779, 1865, 1868, 1913, 1965, 2012, 2018, 2021, 2024, 2033, 2043, 2046, 2049, 2051, 2060, 2065, 2067, 2070, 2076, 2077, 2081, 2089, 2100, 2103, 2104, 2112, 2113, 2114], "mani": [0, 3, 7, 11, 14, 19, 23, 24, 28, 35, 47, 55, 59, 60, 64, 90, 404, 683, 893, 895, 909, 910, 912, 914, 1109, 1198, 1286, 1446, 1798, 1896, 1946, 1953, 1954, 1955, 1956, 2014, 2016, 2025, 2034, 2041, 2043, 2044, 2045, 2046, 2049, 2050, 2056, 2057, 2060, 2064, 2067, 2069, 2072, 2079, 2082, 2085, 2087, 2088, 2091, 2099, 2100, 2101, 2103, 2104, 2106, 2107, 2109, 2112, 2113, 2115, 2116], "sigmoid": [0, 35, 64, 529, 670, 671, 763, 798, 1439, 1440, 1469, 1478, 1479, 1497, 1498, 1557, 1605, 1632, 1688, 2015, 2034, 2035, 2041, 2065, 2068, 2074, 2083, 2095, 2108], "right": [0, 2, 7, 9, 12, 28, 30, 35, 52, 55, 64, 823, 869, 945, 952, 954, 955, 960, 965, 969, 990, 1103, 1109, 1150, 1153, 1154, 1158, 1227, 1231, 1232, 1236, 1240, 1270, 1293, 1318, 1323, 1334, 1336, 1337, 1345, 1351, 1352, 1413, 1431, 1436, 1437, 1438, 1439, 1440, 1454, 1455, 1456, 1473, 1480, 1493, 1494, 1495, 1496, 1515, 1516, 1517, 1518, 1520, 1521, 1522, 1530, 1531, 1532, 1536, 1537, 1544, 1576, 1579, 1580, 1581, 1582, 1587, 1588, 1589, 1633, 1653, 1672, 1723, 1737, 1787, 1801, 1802, 1843, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1924, 1945, 1950, 1952, 1968, 1971, 2012, 2015, 2017, 2033, 2034, 2035, 2043, 2049, 2054, 2065, 2067, 2069, 2075, 2079, 2083, 2101, 2102, 2103, 2111], "entropi": [0, 34, 35, 1439, 1462, 1531, 1605, 1606, 1616, 2083], "combin": [0, 3, 23, 24, 28, 30, 38, 47, 61, 63, 619, 692, 715, 716, 717, 718, 719, 720, 721, 722, 796, 858, 894, 909, 1065, 1138, 1139, 1144, 1146, 1201, 1236, 1368, 1378, 1440, 1473, 1487, 1533, 1575, 1579, 1627, 1650, 1744, 2014, 2015, 2016, 2042, 2046, 2050, 2065, 2067, 2068, 2069, 2072, 2075, 2077, 2099], "two": [0, 1, 3, 4, 5, 6, 8, 11, 12, 14, 15, 18, 19, 23, 24, 28, 29, 30, 32, 33, 35, 44, 45, 47, 48, 52, 53, 55, 59, 60, 64, 86, 87, 585, 586, 587, 589, 590, 609, 619, 683, 695, 697, 763, 796, 891, 894, 909, 910, 914, 923, 964, 966, 1018, 1057, 1059, 1088, 1089, 1097, 1106, 1109, 1115, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1155, 1156, 1166, 1176, 1184, 1187, 1196, 1199, 1235, 1237, 1262, 1271, 1273, 1284, 1287, 1294, 1297, 1299, 1304, 1306, 1309, 1310, 1315, 1316, 1321, 1326, 1329, 1330, 1332, 1333, 1335, 1337, 1342, 1345, 1368, 1371, 1374, 1376, 1379, 1420, 1437, 1439, 1440, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1473, 1478, 1486, 1487, 1493, 1495, 1497, 1518, 1519, 1521, 1527, 1530, 1531, 1532, 1534, 1535, 1541, 1543, 1559, 1560, 1575, 1576, 1577, 1579, 1587, 1588, 1605, 1606, 1616, 1635, 1645, 1652, 1669, 1677, 1685, 1731, 1733, 1737, 1759, 1769, 1771, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1802, 1803, 1805, 1809, 1828, 1843, 1856, 1871, 1908, 1913, 1928, 1945, 1949, 1950, 1952, 1961, 1970, 1974, 1978, 2012, 2014, 2016, 2017, 2021, 2023, 2024, 2025, 2034, 2035, 2036, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2057, 2062, 2063, 2064, 2067, 2069, 2070, 2072, 2073, 2077, 2078, 2079, 2082, 2083, 2085, 2087, 2088, 2089, 2092, 2093, 2098, 2099, 2100, 2101, 2103, 2108, 2111, 2113], "bcewithlogitsloss": [0, 1606], "bcewithlogit": 0, "safe": [0, 28, 30, 47, 52, 63, 64, 87, 89, 90, 488, 1055, 1056, 1076, 1077, 1201, 1717, 1781, 1782, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 2000, 2001, 2002, 2003, 2014, 2017, 2042, 2043, 2046, 2050, 2052, 2056, 2062, 2077, 2084, 2094, 2098, 2104, 2105, 2113], "_convolut": [0, 2068], "avg_pool3d": [0, 2015, 2068, 2074, 2108], "grid_sampler_2d": [0, 2015, 2068, 2108], "_grid_sampler_2d_cpu_fallback": [0, 2068], "grid_sampler_3d": [0, 2015, 2068], "polar": [0, 35, 1333, 2015, 2068], "quantil": [0, 1374, 1420, 2015, 2068, 2083], "nanquantil": [0, 2015, 2068], "stft": [0, 945, 954, 1231, 1232, 1270, 1293, 2015, 2068], "view_as_complex": [0, 11, 2015, 2068], "choleski": [0, 2, 35, 968, 969, 1304, 1310, 1346, 2015, 2068], "cholesky_invers": [0, 2, 2015, 2068], "cholesky_solv": [0, 2, 2015, 2068], "invers": [0, 35, 686, 687, 886, 889, 963, 968, 969, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1270, 1314, 1315, 1327, 1331, 1334, 1339, 1340, 1457, 1458, 1459, 1469, 1470, 1473, 1523, 1524, 1525, 1579, 1624, 1625, 1661, 1662, 1663, 1734, 1737, 1761, 1803, 1809, 1960, 2013, 2015, 2017, 2060, 2068, 2083], "lu_solv": [0, 2, 1321, 2015, 2068], "orgqr": [0, 2015, 2068], "ormqr": [0, 1217, 1313, 2015, 2068], "pinvers": [0, 1319, 2015, 2068], "max_pool3d": [0, 2015, 2068, 2074], "max_unpool2d": [0, 1628, 1659, 2015, 2068], "max_unpool3d": [0, 1629, 1660, 2015, 2068], "adaptive_avg_pool3d": [0, 2015, 2049, 2068, 2074], "reflection_pad1d": [0, 2015, 2068, 2108], "reflection_pad2d": [0, 2015, 2068, 2108], "replication_pad1d": [0, 2015, 2068], "replication_pad2d": [0, 2015, 2068, 2108], "replication_pad3d": [0, 2015, 2068, 2108], "ctc_loss": [0, 1446, 2015, 2068], "fft_fft": [0, 2015, 2068], "fft_ifft": [0, 2015, 2068], "fft_fft2": [0, 2015, 2068], "fft_ifft2": [0, 2015, 2068], "fft_fftn": [0, 2015, 2068], "fft_ifftn": [0, 2015, 2068], "fft_rfft": [0, 2015, 2068], "fft_irfft": [0, 2015, 2068], "fft_rfft2": [0, 2015, 2068], "fft_irfft2": [0, 2015, 2068], "fft_rfftn": [0, 2015, 2068], "fft_irfftn": [0, 2015, 2068], "fft_hfft": [0, 2015, 2068], "fft_ihfft": [0, 2015, 2068], "linalg_matrix_norm": [0, 2015, 2068], "linalg_cond": [0, 2015, 2068], "linalg_matrix_rank": [0, 2015, 2068], "linalg_solv": [0, 2015, 2068], "linalg_choleski": [0, 2015, 2068], "linalg_svdv": [0, 2015, 2068], "linalg_eigv": [0, 2015, 2068], "linalg_eigvalsh": [0, 2015, 2068], "linalg_inv": [0, 2015, 2068], "linalg_householder_product": [0, 2015, 2068], "linalg_tensorinv": [0, 2015, 2068], "linalg_tensorsolv": [0, 2015, 2068], "fake_quantize_per_tensor_affin": [0, 2015, 2068], "eig": [0, 1310, 1311, 1337, 2060], "geqrf": [0, 1313, 1815, 2015, 2068], "lstsq": [0, 1217, 1305, 1331], "_lu_with_info": [0, 2068], "qr": [0, 2, 1217, 1309, 1310, 1313, 1319, 1337, 1731, 1815, 2015, 2068], "solv": [0, 7, 11, 52, 1181, 1217, 1227, 1305, 1314, 1316, 1317, 1320, 1321, 1323, 1327, 1335, 1336, 1340, 1346, 1364, 1952, 2043, 2054, 2060, 2063, 2094, 2101, 2104], "svd": [0, 2, 11, 1309, 1310, 1319, 1331, 1338, 1354, 1737, 1817, 1929, 2015, 2060, 2068, 2082, 2108], "symeig": 0, "triangular_solv": [0, 2015, 2068], "fractional_max_pool2d": [0, 2015, 2068], "fractional_max_pool3d": [0, 2015, 2068], "adaptive_max_pool3d": [0, 2015, 2068], "multilabel_margin_loss_forward": [0, 2068], "linalg_qr": [0, 2015, 2068], "linalg_cholesky_ex": [0, 2015, 2068], "linalg_svd": [0, 2015, 2068], "linalg_eig": [0, 2015, 2068], "linalg_eigh": [0, 2015, 2068], "linalg_lstsq": [0, 2015, 2068], "linalg_inv_ex": [0, 2015, 2068], "cat": [0, 28, 35, 64, 546, 750, 751, 760, 796, 799, 988, 989, 1375, 1547, 1909, 1921, 2014, 2015, 2034, 2067, 2068, 2070, 2072, 2074, 2082, 2092, 2100, 2108], "stack": [0, 8, 18, 23, 24, 25, 28, 35, 40, 47, 52, 53, 59, 60, 64, 683, 763, 936, 963, 974, 978, 1070, 1071, 1108, 1176, 1178, 1188, 1239, 1375, 1446, 1478, 1497, 1498, 1543, 1572, 1574, 1762, 1967, 1977, 1979, 2013, 2015, 2021, 2036, 2046, 2048, 2051, 2068, 2070, 2071, 2074, 2082, 2101, 2102, 2103, 2104, 2111, 2113, 2115, 2116], "index_copi": [0, 1965, 2015, 2068], "implement": [1, 2, 5, 8, 11, 12, 16, 17, 19, 23, 24, 28, 29, 30, 32, 34, 35, 39, 44, 45, 48, 52, 55, 60, 64, 65, 82, 86, 152, 417, 515, 517, 519, 690, 741, 742, 743, 744, 745, 746, 763, 782, 784, 788, 796, 803, 822, 823, 824, 827, 828, 829, 882, 884, 897, 913, 923, 924, 990, 996, 1109, 1152, 1157, 1163, 1170, 1171, 1173, 1174, 1273, 1279, 1281, 1283, 1284, 1287, 1329, 1332, 1345, 1346, 1374, 1431, 1446, 1463, 1478, 1481, 1494, 1495, 1496, 1499, 1527, 1533, 1539, 1542, 1543, 1544, 1547, 1564, 1573, 1575, 1633, 1638, 1672, 1685, 1693, 1716, 1717, 1722, 1723, 1731, 1732, 1737, 1748, 1761, 1765, 1766, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1801, 1803, 1809, 1827, 1834, 1847, 1856, 1859, 1883, 1895, 1905, 1928, 1929, 1945, 1961, 1965, 1966, 1968, 2013, 2016, 2017, 2018, 2021, 2024, 2025, 2032, 2033, 2036, 2037, 2041, 2042, 2043, 2045, 2046, 2049, 2050, 2051, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2069, 2070, 2072, 2077, 2078, 2082, 2083, 2086, 2088, 2098, 2100, 2102, 2104, 2109, 2111, 2114], "arbitrari": [1, 3, 28, 32, 33, 52, 66, 69, 74, 256, 894, 909, 1092, 1188, 1337, 1345, 1463, 1469, 1493, 1518, 1579, 1605, 1606, 1624, 1645, 1672, 1758, 1928, 1950, 1963, 2017, 2023, 2025, 2043, 2050, 2056, 2057, 2062, 2069, 2072, 2082, 2091, 2101, 2104, 2109, 2113, 2114], "scalar": [1, 12, 28, 35, 53, 66, 75, 99, 152, 156, 262, 315, 448, 515, 591, 690, 691, 741, 742, 743, 744, 745, 746, 883, 884, 897, 912, 913, 916, 918, 949, 952, 960, 994, 998, 1104, 1109, 1124, 1157, 1164, 1227, 1234, 1250, 1264, 1298, 1299, 1368, 1375, 1420, 1431, 1439, 1440, 1446, 1460, 1462, 1463, 1480, 1486, 1487, 1492, 1493, 1499, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1577, 1635, 1678, 1771, 1776, 1777, 1778, 1811, 1824, 1826, 1828, 1846, 1847, 1863, 1911, 1912, 1913, 1914, 1915, 1943, 1961, 1962, 1980, 2010, 2011, 2015, 2016, 2017, 2030, 2036, 2041, 2043, 2049, 2054, 2067, 2077, 2082, 2085, 2087, 2089, 2101, 2103, 2104, 2108, 2112], "minim": [1, 7, 8, 822, 879, 1376, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1811, 2012, 2030, 2042, 2046, 2057, 2059, 2062, 2067, 2072, 2104, 2111, 2113], "exist": [1, 7, 8, 9, 11, 14, 23, 28, 29, 30, 35, 37, 38, 40, 45, 47, 48, 53, 55, 59, 62, 64, 66, 84, 85, 256, 501, 795, 858, 882, 912, 913, 914, 915, 916, 917, 918, 923, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1049, 1187, 1200, 1201, 1210, 1273, 1277, 1289, 1309, 1314, 1320, 1321, 1346, 1420, 1527, 1528, 1537, 1587, 1903, 1921, 1968, 2012, 2014, 2021, 2024, 2033, 2034, 2036, 2037, 2043, 2044, 2046, 2049, 2050, 2052, 2055, 2056, 2057, 2058, 2060, 2062, 2067, 2070, 2071, 2072, 2077, 2079, 2082, 2086, 2088, 2098, 2099, 2101, 2103, 2105, 2108, 2111, 2115], "code": [1, 3, 4, 8, 9, 11, 14, 15, 16, 19, 23, 24, 28, 30, 33, 35, 37, 48, 50, 52, 53, 60, 61, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 121, 683, 919, 939, 976, 978, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1051, 1052, 1065, 1167, 1188, 1273, 1277, 1278, 1284, 1285, 1286, 1289, 1290, 1291, 1304, 1315, 1317, 1345, 1544, 1556, 1588, 1589, 1590, 1717, 1874, 2012, 2013, 2016, 2017, 2023, 2025, 2035, 2043, 2044, 2048, 2049, 2050, 2051, 2052, 2054, 2055, 2056, 2057, 2059, 2061, 2062, 2063, 2065, 2067, 2069, 2071, 2076, 2077, 2078, 2079, 2082, 2085, 2086, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2107, 2109, 2111, 2112, 2113, 2114, 2115], "need": [1, 3, 5, 6, 7, 8, 9, 14, 15, 23, 24, 28, 29, 30, 32, 33, 34, 35, 39, 40, 45, 47, 48, 51, 52, 53, 55, 57, 58, 60, 64, 65, 66, 76, 77, 87, 88, 152, 256, 473, 490, 497, 501, 585, 683, 737, 738, 822, 823, 824, 827, 828, 829, 845, 862, 893, 897, 904, 905, 906, 909, 910, 918, 930, 957, 958, 966, 983, 984, 1045, 1054, 1061, 1097, 1099, 1109, 1130, 1132, 1152, 1161, 1166, 1188, 1210, 1273, 1367, 1413, 1462, 1469, 1523, 1524, 1525, 1527, 1533, 1571, 1573, 1575, 1608, 1609, 1610, 1703, 1717, 1724, 1725, 1737, 1739, 1743, 1745, 1773, 1798, 1844, 1849, 1863, 1921, 1950, 1966, 1968, 2013, 2014, 2016, 2017, 2021, 2024, 2025, 2027, 2029, 2033, 2035, 2036, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2056, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2072, 2075, 2077, 2078, 2079, 2082, 2084, 2085, 2088, 2089, 2093, 2094, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2109, 2111, 2113, 2114], "declar": [1, 9, 14, 23, 48, 1198, 2016, 2017, 2018, 2049, 2067, 2070], "requires_grad": [1, 5, 30, 34, 35, 61, 337, 447, 448, 449, 450, 451, 489, 490, 498, 869, 884, 904, 905, 907, 908, 909, 910, 919, 920, 923, 924, 926, 928, 929, 939, 945, 954, 1054, 1110, 1111, 1112, 1113, 1122, 1127, 1145, 1163, 1164, 1165, 1168, 1178, 1231, 1232, 1273, 1293, 1344, 1360, 1439, 1440, 1460, 1462, 1469, 1470, 1480, 1492, 1493, 1518, 1519, 1527, 1533, 1534, 1541, 1575, 1576, 1605, 1606, 1616, 1669, 1717, 1718, 1719, 1720, 1765, 1770, 1776, 1777, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1905, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 1965, 1977, 2010, 2011, 2015, 2019, 2021, 2034, 2035, 2036, 2046, 2049, 2050, 2057, 2062, 2067, 2068, 2077, 2078, 2088, 2089, 2091, 2101, 2102, 2103, 2104, 2108, 2111], "keyword": [1, 5, 23, 28, 29, 32, 33, 34, 52, 53, 60, 64, 66, 72, 86, 90, 315, 323, 447, 448, 449, 450, 451, 515, 583, 683, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 698, 699, 700, 701, 702, 865, 869, 884, 885, 886, 887, 888, 889, 944, 945, 946, 948, 949, 950, 951, 952, 953, 954, 956, 960, 963, 965, 967, 968, 969, 971, 973, 974, 987, 992, 993, 995, 996, 998, 1008, 1021, 1023, 1025, 1051, 1052, 1088, 1089, 1090, 1091, 1092, 1093, 1096, 1101, 1104, 1106, 1108, 1110, 1111, 1112, 1114, 1119, 1122, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1152, 1153, 1154, 1155, 1156, 1157, 1159, 1161, 1163, 1164, 1165, 1166, 1186, 1214, 1215, 1216, 1217, 1227, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1238, 1239, 1240, 1248, 1250, 1267, 1268, 1273, 1276, 1289, 1290, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1355, 1356, 1357, 1358, 1360, 1361, 1362, 1364, 1365, 1367, 1368, 1371, 1372, 1373, 1374, 1376, 1377, 1378, 1379, 1396, 1412, 1413, 1415, 1417, 1418, 1419, 1420, 1421, 1423, 1424, 1425, 1427, 1463, 1485, 1488, 1527, 1685, 1710, 1711, 1717, 1737, 1739, 1744, 1748, 1767, 1771, 1773, 1776, 1777, 1815, 1816, 1820, 1821, 1824, 1825, 1827, 1828, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1846, 1847, 1848, 1849, 1856, 1858, 1863, 1878, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1895, 1900, 1903, 1906, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1917, 1918, 1920, 1921, 1922, 1923, 1925, 1927, 1928, 1940, 1941, 1942, 1943, 1947, 1950, 1952, 1953, 1954, 1955, 1956, 1958, 1965, 1972, 1973, 1974, 1979, 1980, 2010, 2011, 2012, 2017, 2018, 2021, 2035, 2036, 2049, 2060, 2065, 2067, 2069, 2077, 2082, 2083, 2087, 2088, 2114], "support": [1, 2, 3, 5, 6, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 23, 24, 25, 28, 30, 32, 33, 34, 35, 36, 37, 39, 40, 47, 48, 52, 53, 55, 58, 59, 60, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 84, 85, 88, 89, 260, 313, 323, 460, 585, 619, 688, 689, 690, 692, 696, 698, 699, 702, 732, 741, 742, 743, 775, 776, 777, 782, 788, 789, 790, 795, 796, 797, 798, 804, 820, 836, 863, 865, 866, 896, 899, 904, 907, 909, 910, 923, 924, 944, 945, 949, 952, 954, 956, 968, 969, 976, 990, 993, 1008, 1051, 1052, 1054, 1072, 1104, 1106, 1109, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1154, 1155, 1156, 1157, 1159, 1160, 1162, 1173, 1177, 1231, 1232, 1244, 1270, 1277, 1285, 1289, 1293, 1294, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1346, 1364, 1368, 1372, 1377, 1378, 1410, 1412, 1454, 1455, 1456, 1457, 1458, 1459, 1466, 1469, 1470, 1473, 1493, 1514, 1532, 1533, 1567, 1575, 1577, 1579, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1625, 1627, 1633, 1644, 1650, 1685, 1703, 1704, 1706, 1717, 1724, 1725, 1772, 1777, 1783, 1784, 1785, 1787, 1797, 1815, 1827, 1846, 1847, 1868, 1871, 1872, 1901, 1902, 1905, 1908, 1925, 1928, 1952, 1954, 1956, 1966, 1968, 1974, 1975, 1976, 2011, 2012, 2013, 2014, 2016, 2018, 2019, 2021, 2027, 2030, 2033, 2037, 2043, 2044, 2045, 2046, 2049, 2053, 2055, 2057, 2059, 2060, 2062, 2063, 2064, 2065, 2066, 2069, 2070, 2071, 2075, 2076, 2077, 2081, 2083, 2085, 2086, 2087, 2088, 2089, 2091, 2092, 2094, 2095, 2097, 2098, 2099, 2100, 2102, 2103, 2105, 2106, 2112, 2113, 2114, 2120], "type": [1, 2, 3, 5, 12, 14, 15, 17, 19, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 38, 40, 41, 45, 47, 48, 50, 52, 55, 60, 62, 63, 64, 66, 68, 71, 72, 75, 76, 77, 82, 83, 87, 89, 90, 152, 193, 198, 210, 328, 331, 335, 343, 447, 448, 449, 450, 451, 483, 561, 606, 683, 688, 689, 690, 691, 692, 693, 737, 738, 741, 742, 743, 752, 754, 755, 756, 757, 759, 767, 769, 770, 773, 774, 775, 776, 777, 778, 779, 780, 781, 784, 787, 795, 796, 797, 800, 818, 819, 820, 822, 823, 824, 825, 826, 828, 830, 843, 844, 858, 859, 860, 861, 862, 863, 864, 865, 866, 869, 877, 883, 893, 894, 895, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 925, 926, 928, 929, 932, 944, 945, 947, 948, 949, 950, 951, 952, 953, 954, 955, 957, 958, 960, 961, 962, 963, 964, 966, 975, 976, 977, 978, 983, 984, 985, 990, 1001, 1002, 1003, 1004, 1006, 1012, 1014, 1018, 1020, 1027, 1028, 1029, 1031, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1046, 1048, 1051, 1052, 1053, 1054, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1065, 1066, 1067, 1072, 1084, 1086, 1087, 1090, 1091, 1104, 1109, 1110, 1111, 1112, 1122, 1123, 1124, 1127, 1145, 1152, 1154, 1155, 1156, 1157, 1160, 1161, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1171, 1174, 1175, 1176, 1178, 1179, 1180, 1181, 1183, 1185, 1186, 1188, 1195, 1202, 1203, 1204, 1205, 1206, 1212, 1215, 1219, 1221, 1223, 1226, 1231, 1232, 1234, 1235, 1236, 1249, 1252, 1254, 1255, 1258, 1261, 1269, 1270, 1271, 1273, 1274, 1276, 1279, 1280, 1283, 1285, 1287, 1289, 1290, 1292, 1293, 1296, 1305, 1309, 1326, 1330, 1337, 1343, 1344, 1345, 1346, 1360, 1363, 1366, 1373, 1375, 1382, 1383, 1384, 1387, 1401, 1402, 1403, 1405, 1407, 1410, 1412, 1418, 1421, 1431, 1435, 1454, 1455, 1456, 1463, 1470, 1517, 1527, 1528, 1529, 1533, 1537, 1538, 1542, 1556, 1561, 1562, 1563, 1571, 1572, 1573, 1574, 1575, 1588, 1589, 1590, 1593, 1594, 1598, 1599, 1603, 1605, 1606, 1607, 1608, 1609, 1610, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1624, 1625, 1626, 1627, 1630, 1632, 1633, 1634, 1635, 1637, 1638, 1639, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1651, 1652, 1654, 1655, 1656, 1657, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1672, 1677, 1679, 1680, 1682, 1683, 1685, 1686, 1688, 1689, 1690, 1691, 1692, 1698, 1700, 1701, 1702, 1703, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1721, 1722, 1723, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1734, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1756, 1757, 1759, 1760, 1761, 1762, 1763, 1764, 1766, 1767, 1769, 1771, 1772, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1817, 1825, 1826, 1827, 1829, 1830, 1831, 1832, 1833, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1846, 1847, 1849, 1853, 1856, 1863, 1864, 1867, 1868, 1869, 1871, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1904, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1924, 1925, 1927, 1928, 1929, 1935, 1943, 1952, 1954, 1956, 1961, 1962, 1963, 1965, 1966, 1967, 1971, 1977, 1980, 1982, 1983, 1985, 1986, 1988, 1991, 1992, 1993, 1994, 1995, 1997, 1998, 2008, 2010, 2011, 2012, 2013, 2014, 2015, 2018, 2021, 2025, 2027, 2028, 2030, 2034, 2036, 2041, 2042, 2043, 2045, 2046, 2053, 2054, 2055, 2056, 2057, 2060, 2062, 2063, 2065, 2066, 2068, 2071, 2072, 2074, 2075, 2076, 2077, 2081, 2082, 2083, 2084, 2085, 2087, 2089, 2091, 2092, 2093, 2099, 2100, 2101, 2102, 2104, 2108, 2111, 2112, 2114], "doubl": [1, 3, 35, 37, 52, 53, 55, 483, 489, 619, 752, 754, 755, 756, 757, 759, 769, 770, 784, 904, 905, 907, 909, 910, 915, 923, 924, 932, 961, 968, 969, 987, 1008, 1054, 1124, 1152, 1227, 1273, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1354, 1355, 1356, 1357, 1358, 1364, 1470, 1527, 1593, 1596, 1625, 1707, 1778, 1815, 1821, 1910, 1911, 1912, 1914, 1915, 1928, 1943, 1952, 1980, 2017, 2034, 2046, 2049, 2050, 2052, 2060, 2067, 2070, 2084, 2085, 2088], "bfloat16": [1, 24, 55, 1255, 1273, 1325, 1342, 1527, 1778, 1783, 1784, 1785, 1797, 1868, 1871, 2034, 2046, 2053, 2082, 2084, 2085, 2088, 2089, 2118], "cfloat": [1, 11, 28, 313, 485, 619, 968, 969, 1008, 1244, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1364, 1815, 1845, 1928, 1952, 1976, 2085, 2088], "cdoubl": [1, 11, 968, 969, 1008, 1273, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1364, 1527, 1815, 1928, 1952, 2085, 2088], "beta": [1, 2, 11, 56, 63, 101, 102, 107, 108, 109, 110, 111, 112, 153, 154, 323, 519, 556, 689, 692, 693, 694, 944, 1009, 1043, 1044, 1051, 1052, 1054, 1231, 1293, 1368, 1378, 1441, 1442, 1443, 1481, 1487, 1489, 1490, 1491, 1499, 1515, 1559, 1564, 1567, 1586, 1591, 1642, 1650, 1651, 1685, 1689, 1693, 1784, 1785, 1786, 1788, 1794, 1798, 1831, 1888, 1890, 1901, 1906, 1920, 2013, 2015, 2023, 2027, 2046, 2064, 2065, 2066, 2067, 2072, 2077, 2082, 2085, 2091, 2108], "even": [1, 2, 8, 19, 23, 24, 28, 30, 37, 52, 55, 63, 64, 89, 488, 546, 582, 683, 818, 819, 820, 897, 998, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1140, 1141, 1142, 1145, 1177, 1187, 1192, 1198, 1200, 1201, 1212, 1258, 1286, 1303, 1305, 1309, 1310, 1311, 1312, 1326, 1330, 1332, 1333, 1337, 1338, 1343, 1368, 1374, 1463, 1567, 1608, 1609, 1610, 1672, 1717, 1724, 1725, 1731, 1770, 1772, 1809, 1856, 1867, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1928, 1975, 2014, 2017, 2018, 2030, 2036, 2042, 2043, 2046, 2049, 2051, 2052, 2055, 2057, 2059, 2060, 2061, 2062, 2069, 2070, 2079, 2084, 2085, 2088, 2098, 2100, 2101, 2103, 2104, 2106, 2115], "though": [1, 11, 28, 64, 66, 69, 152, 488, 797, 894, 897, 909, 914, 1125, 1126, 1128, 1188, 1192, 1198, 1200, 1201, 1271, 1274, 1368, 1772, 2014, 2018, 2035, 2036, 2043, 2046, 2049, 2050, 2052, 2059, 2060, 2069, 2072, 2084, 2101, 2103, 2115], "signatur": [1, 12, 23, 30, 40, 52, 53, 55, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 86, 400, 489, 490, 558, 909, 910, 928, 929, 990, 1273, 1527, 1709, 1710, 1711, 1714, 1715, 1717, 1737, 1758, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1924, 2021, 2046, 2049, 2050, 2065, 2067, 2070, 2077, 2082, 2102, 2106, 2114], "veri": [1, 4, 7, 8, 18, 23, 24, 59, 61, 64, 996, 1168, 1176, 1178, 1187, 1336, 1489, 1490, 1491, 1717, 1718, 1731, 1787, 1809, 1815, 1891, 1895, 1952, 1977, 2016, 2033, 2043, 2049, 2051, 2054, 2055, 2057, 2059, 2060, 2063, 2069, 2070, 2076, 2077, 2078, 2082, 2088, 2101, 2103, 2104, 2106, 2112, 2113], "unlik": [1, 3, 7, 35, 53, 55, 61, 490, 495, 919, 1106, 1145, 1148, 1152, 1309, 1310, 1329, 1332, 1337, 1341, 1371, 1374, 1376, 1499, 1719, 1720, 1846, 1935, 1974, 2016, 2017, 2033, 2036, 2046, 2059, 2061, 2085, 2088, 2091, 2100, 2104], "coverag": [1, 7, 52, 56, 64, 1170, 1171, 2013, 2014, 2019, 2035, 2072, 2082, 2114], "plan": [1, 2, 7, 9, 28, 30, 34, 899, 1717, 1827, 2036, 2043, 2049, 2070, 2082, 2088, 2112, 2113], "consid": [1, 5, 8, 24, 28, 33, 37, 47, 52, 53, 55, 60, 63, 64, 65, 488, 697, 782, 788, 797, 912, 914, 915, 923, 998, 1096, 1097, 1098, 1099, 1100, 1130, 1166, 1262, 1266, 1269, 1273, 1314, 1316, 1317, 1318, 1319, 1327, 1328, 1331, 1336, 1339, 1462, 1473, 1527, 1530, 1579, 1598, 1633, 1644, 1704, 1718, 1731, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1767, 1797, 1811, 1865, 1884, 1919, 1924, 1953, 1954, 1955, 1956, 2012, 2014, 2017, 2024, 2042, 2043, 2044, 2049, 2051, 2052, 2054, 2057, 2060, 2065, 2069, 2070, 2071, 2073, 2078, 2079, 2082, 2085, 2088, 2089, 2100, 2101, 2104, 2112, 2114, 2115], "ad": [1, 3, 9, 14, 19, 23, 24, 28, 32, 33, 35, 37, 44, 55, 58, 60, 61, 63, 64, 223, 224, 315, 321, 473, 517, 683, 689, 690, 691, 692, 693, 694, 738, 899, 902, 903, 904, 912, 913, 914, 915, 919, 920, 921, 923, 932, 944, 1109, 1113, 1170, 1171, 1173, 1181, 1188, 1273, 1287, 1345, 1351, 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1533, 1534, 1536, 1541, 1542, 1556, 1567, 1571, 1576, 1579, 1611, 1612, 1613, 1630, 1658, 1659, 1660, 1685, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1737, 1744, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1831, 1832, 1833, 1901, 1906, 1920, 2012, 2014, 2016, 2029, 2030, 2035, 2042, 2050, 2052, 2054, 2056, 2057, 2062, 2070, 2071, 2072, 2075, 2082, 2087, 2091, 2099, 2100, 2103, 2104, 2108, 2109, 2111, 2112, 2115], "tutori": [1, 3, 9, 15, 28, 29, 32, 55, 64, 899, 902, 903, 904, 907, 909, 1968, 2013, 2014, 2021, 2024, 2037, 2049, 2056, 2057, 2059, 2062, 2067, 2072, 2095, 2103, 2104, 2106], "how": [1, 3, 5, 7, 8, 9, 12, 15, 17, 19, 23, 28, 30, 34, 39, 47, 48, 50, 55, 56, 57, 59, 60, 64, 66, 74, 75, 82, 235, 417, 488, 489, 490, 788, 795, 796, 799, 845, 863, 865, 866, 899, 902, 903, 904, 907, 909, 910, 919, 920, 928, 929, 1092, 1113, 1167, 1176, 1182, 1194, 1202, 1227, 1273, 1284, 1345, 1473, 1527, 1579, 1580, 1672, 1704, 1717, 1758, 1770, 1847, 1871, 1885, 1896, 2013, 2014, 2016, 2017, 2021, 2024, 2025, 2028, 2033, 2034, 2035, 2037, 2042, 2045, 2046, 2048, 2050, 2051, 2052, 2054, 2057, 2059, 2061, 2062, 2067, 2071, 2072, 2075, 2077, 2078, 2079, 2082, 2087, 2095, 2100, 2101, 2106, 2107, 2111, 2115, 2117], "major": [1, 6, 7, 8, 9, 1036, 1928, 2013, 2017, 2052, 2069, 2100, 2104, 2106, 2107], "contain": [1, 2, 3, 5, 11, 14, 15, 23, 28, 29, 30, 32, 34, 35, 37, 38, 40, 41, 45, 48, 53, 55, 63, 64, 66, 67, 72, 75, 85, 90, 156, 193, 197, 210, 292, 313, 315, 317, 321, 323, 473, 485, 546, 605, 609, 689, 700, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 763, 817, 840, 845, 881, 896, 897, 898, 909, 910, 912, 913, 914, 915, 916, 917, 918, 936, 944, 945, 946, 954, 956, 960, 978, 993, 994, 998, 1021, 1022, 1023, 1024, 1025, 1054, 1109, 1143, 1144, 1146, 1152, 1161, 1173, 1174, 1177, 1178, 1231, 1232, 1235, 1236, 1244, 1248, 1264, 1271, 1273, 1274, 1280, 1281, 1284, 1289, 1290, 1293, 1304, 1310, 1311, 1312, 1313, 1315, 1317, 1319, 1320, 1336, 1345, 1363, 1367, 1374, 1413, 1419, 1431, 1440, 1462, 1463, 1469, 1470, 1473, 1478, 1479, 1481, 1486, 1497, 1498, 1519, 1527, 1528, 1529, 1534, 1537, 1543, 1545, 1556, 1560, 1567, 1579, 1586, 1616, 1624, 1625, 1627, 1700, 1707, 1710, 1711, 1717, 1724, 1725, 1732, 1733, 1734, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1765, 1766, 1769, 1771, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1817, 1820, 1845, 1859, 1863, 1923, 1924, 1928, 1945, 1952, 1954, 1956, 1961, 1962, 1963, 1967, 1973, 1977, 2013, 2014, 2016, 2017, 2018, 2021, 2024, 2025, 2029, 2030, 2035, 2036, 2038, 2040, 2042, 2043, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2056, 2057, 2060, 2061, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2075, 2077, 2079, 2081, 2082, 2084, 2085, 2087, 2088, 2091, 2092, 2093, 2097, 2099, 2101, 2102, 2103, 2104, 2106, 2107, 2110, 2111, 2112, 2113, 2114, 2115], "build": [1, 2, 3, 8, 9, 14, 15, 19, 28, 30, 35, 45, 47, 52, 64, 1273, 1283, 1527, 1574, 1598, 1633, 2013, 2014, 2024, 2037, 2043, 2067, 2072, 2078, 2087, 2088, 2094, 2095, 2098, 2101, 2113], "basic": [1, 3, 7, 9, 12, 30, 47, 64, 66, 67, 1170, 1171, 1172, 1280, 1346, 1717, 1803, 2013, 2018, 2037, 2044, 2046, 2048, 2054, 2070, 2078, 2086, 2087, 2102, 2104, 2106], "jacobian": [1, 35, 56, 57, 59, 60, 897, 902, 912, 915, 917, 918, 923, 924, 1171, 1172, 1173, 1177, 1178, 1977, 2043, 2049, 2054], "hessian": [1, 56, 59, 60, 913, 916, 1171, 1172, 2041, 2050], "etc": [1, 3, 5, 11, 12, 23, 24, 28, 33, 35, 37, 47, 48, 52, 53, 55, 863, 864, 865, 932, 978, 990, 1190, 1270, 1273, 1462, 1527, 1717, 1849, 2012, 2016, 2017, 2021, 2024, 2049, 2050, 2051, 2057, 2059, 2065, 2067, 2069, 2070, 2072, 2077, 2082, 2084, 2087, 2091, 2098, 2102, 2111], "user": [1, 2, 5, 7, 8, 9, 10, 11, 12, 15, 19, 23, 24, 28, 29, 30, 33, 34, 35, 36, 37, 40, 44, 47, 48, 51, 53, 55, 56, 59, 60, 63, 64, 66, 79, 86, 152, 337, 488, 735, 741, 742, 743, 748, 749, 759, 767, 797, 800, 802, 841, 858, 865, 866, 897, 904, 907, 909, 910, 918, 978, 1012, 1045, 1160, 1166, 1167, 1188, 1273, 1310, 1345, 1527, 1571, 1573, 1574, 1575, 1685, 1710, 1711, 1717, 1724, 1725, 1734, 1748, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2012, 2013, 2014, 2016, 2017, 2021, 2024, 2029, 2033, 2035, 2036, 2042, 2043, 2044, 2046, 2049, 2054, 2055, 2056, 2057, 2059, 2062, 2065, 2067, 2069, 2071, 2072, 2074, 2077, 2078, 2082, 2084, 2085, 2086, 2087, 2089, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2109, 2111, 2113, 2114], "input": [1, 2, 3, 5, 9, 11, 12, 15, 21, 23, 24, 27, 28, 29, 32, 33, 34, 35, 36, 40, 53, 55, 56, 57, 58, 59, 60, 61, 64, 66, 73, 74, 75, 79, 82, 84, 86, 87, 88, 90, 152, 262, 282, 303, 403, 485, 501, 519, 611, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 696, 697, 698, 699, 700, 701, 702, 723, 731, 732, 737, 738, 741, 742, 743, 744, 745, 746, 748, 749, 759, 761, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 793, 794, 795, 796, 797, 799, 800, 801, 802, 803, 805, 817, 820, 822, 840, 841, 842, 843, 861, 862, 865, 866, 867, 868, 869, 870, 871, 872, 873, 874, 875, 876, 878, 879, 880, 881, 882, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 897, 904, 905, 907, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 936, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 959, 960, 962, 964, 965, 967, 968, 969, 970, 971, 972, 973, 975, 976, 978, 987, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1008, 1024, 1051, 1052, 1054, 1088, 1089, 1090, 1091, 1093, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1109, 1110, 1111, 1112, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1178, 1184, 1186, 1188, 1214, 1215, 1216, 1217, 1218, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1250, 1251, 1252, 1253, 1255, 1258, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1273, 1276, 1277, 1280, 1281, 1285, 1287, 1289, 1290, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 1359, 1361, 1362, 1364, 1366, 1367, 1368, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1396, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1709, 1710, 1711, 1714, 1715, 1716, 1717, 1724, 1725, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1751, 1752, 1753, 1754, 1758, 1759, 1760, 1767, 1770, 1771, 1772, 1774, 1775, 1777, 1779, 1780, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1802, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, 1823, 1824, 1825, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1837, 1839, 1841, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1858, 1860, 1861, 1862, 1865, 1866, 1867, 1868, 1871, 1878, 1879, 1880, 1892, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1900, 1901, 1902, 1904, 1905, 1906, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1917, 1918, 1919, 1920, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1931, 1935, 1938, 1939, 1940, 1941, 1942, 1944, 1946, 1947, 1948, 1949, 1952, 1953, 1955, 1958, 1959, 1960, 1961, 1962, 1964, 1965, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1980, 1981, 2011, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2024, 2027, 2035, 2036, 2037, 2041, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2052, 2056, 2057, 2060, 2061, 2062, 2064, 2065, 2067, 2069, 2071, 2072, 2073, 2075, 2076, 2077, 2078, 2082, 2083, 2085, 2086, 2088, 2089, 2092, 2093, 2095, 2098, 2100, 2101, 2102, 2103, 2104, 2108, 2111, 2112, 2113, 2114, 2117], "set": [1, 2, 3, 5, 8, 9, 13, 14, 19, 20, 22, 23, 24, 25, 27, 28, 29, 30, 32, 33, 35, 37, 40, 45, 47, 48, 50, 51, 52, 53, 55, 58, 59, 62, 63, 64, 66, 81, 83, 90, 152, 156, 256, 332, 447, 448, 449, 450, 451, 460, 498, 501, 522, 582, 583, 683, 738, 744, 746, 763, 782, 788, 795, 796, 797, 800, 818, 819, 820, 823, 824, 825, 828, 829, 845, 858, 862, 863, 865, 866, 869, 882, 894, 896, 897, 904, 908, 909, 910, 912, 913, 914, 915, 916, 917, 918, 920, 921, 943, 955, 960, 971, 975, 976, 984, 992, 1005, 1009, 1019, 1034, 1043, 1055, 1056, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1110, 1111, 1112, 1161, 1166, 1175, 1181, 1189, 1236, 1253, 1254, 1273, 1277, 1281, 1283, 1285, 1287, 1289, 1290, 1309, 1310, 1313, 1319, 1326, 1328, 1330, 1331, 1343, 1344, 1345, 1346, 1360, 1363, 1366, 1388, 1392, 1393, 1394, 1409, 1431, 1435, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1463, 1464, 1465, 1466, 1467, 1471, 1478, 1481, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1512, 1514, 1518, 1519, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1537, 1541, 1542, 1543, 1559, 1560, 1567, 1571, 1573, 1575, 1576, 1577, 1589, 1590, 1598, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1618, 1619, 1620, 1621, 1626, 1633, 1637, 1644, 1645, 1654, 1655, 1656, 1669, 1671, 1677, 1685, 1704, 1716, 1717, 1719, 1720, 1732, 1738, 1767, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1813, 1827, 1834, 1836, 1840, 1842, 1843, 1851, 1852, 1863, 1864, 1867, 1868, 1869, 1870, 1871, 1873, 1874, 1875, 1876, 1877, 1892, 1908, 1913, 1922, 1923, 1929, 1943, 1953, 1954, 1955, 1956, 1965, 1968, 1969, 1972, 1973, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2012, 2014, 2016, 2017, 2018, 2021, 2023, 2026, 2027, 2028, 2029, 2030, 2033, 2036, 2041, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2068, 2069, 2070, 2071, 2072, 2073, 2075, 2076, 2077, 2087, 2089, 2090, 2092, 2093, 2095, 2097, 2101, 2103, 2104, 2105, 2106, 2108, 2109, 2110, 2112, 2113, 2114, 2116, 2117], "can": [1, 2, 3, 4, 5, 7, 8, 9, 11, 12, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 27, 28, 30, 32, 33, 34, 35, 37, 39, 40, 41, 44, 45, 46, 47, 48, 50, 51, 52, 53, 55, 56, 57, 58, 59, 60, 61, 63, 64, 66, 67, 68, 72, 74, 75, 76, 77, 84, 85, 86, 87, 88, 89, 90, 152, 156, 256, 260, 325, 337, 460, 488, 489, 490, 515, 517, 547, 616, 619, 683, 690, 732, 744, 745, 746, 751, 760, 761, 763, 771, 772, 775, 776, 777, 788, 794, 795, 796, 797, 800, 802, 817, 823, 826, 827, 829, 840, 845, 858, 862, 863, 865, 866, 867, 883, 884, 893, 894, 895, 897, 899, 900, 902, 903, 904, 907, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 923, 924, 928, 929, 932, 935, 946, 963, 976, 978, 979, 985, 991, 998, 1009, 1011, 1021, 1023, 1025, 1032, 1033, 1043, 1051, 1053, 1057, 1059, 1061, 1065, 1066, 1092, 1109, 1110, 1113, 1114, 1127, 1129, 1130, 1131, 1132, 1145, 1148, 1161, 1166, 1167, 1168, 1169, 1171, 1172, 1173, 1176, 1177, 1178, 1181, 1182, 1184, 1186, 1187, 1188, 1192, 1198, 1200, 1201, 1210, 1212, 1216, 1217, 1227, 1230, 1235, 1236, 1264, 1270, 1271, 1273, 1274, 1276, 1277, 1279, 1280, 1284, 1285, 1287, 1289, 1290, 1295, 1298, 1305, 1313, 1316, 1320, 1321, 1326, 1329, 1332, 1333, 1337, 1343, 1345, 1362, 1363, 1365, 1375, 1385, 1386, 1420, 1422, 1423, 1424, 1429, 1430, 1431, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1463, 1464, 1468, 1469, 1470, 1471, 1474, 1475, 1478, 1483, 1484, 1485, 1487, 1493, 1495, 1496, 1497, 1500, 1501, 1502, 1513, 1517, 1518, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1529, 1532, 1533, 1534, 1536, 1537, 1538, 1541, 1543, 1544, 1545, 1546, 1547, 1548, 1555, 1556, 1559, 1564, 1567, 1570, 1571, 1572, 1573, 1574, 1575, 1577, 1578, 1580, 1591, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1628, 1629, 1644, 1658, 1659, 1660, 1678, 1685, 1704, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1719, 1720, 1722, 1737, 1758, 1759, 1762, 1765, 1770, 1772, 1776, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1793, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1806, 1807, 1809, 1813, 1815, 1824, 1828, 1836, 1840, 1850, 1856, 1859, 1871, 1873, 1875, 1900, 1903, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1922, 1923, 1924, 1928, 1929, 1943, 1947, 1950, 1952, 1960, 1961, 1964, 1966, 1968, 1972, 1973, 1977, 1982, 1989, 1990, 2010, 2012, 2013, 2014, 2015, 2016, 2017, 2021, 2023, 2024, 2025, 2026, 2027, 2029, 2030, 2031, 2033, 2034, 2035, 2036, 2037, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2052, 2053, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2063, 2064, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2075, 2077, 2078, 2079, 2082, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2092, 2093, 2094, 2095, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2105, 2106, 2107, 2108, 2111, 2112, 2113, 2114, 2115, 2116, 2117, 2118, 2120], "lambda": [1, 12, 23, 30, 35, 58, 61, 63, 64, 66, 69, 74, 75, 78, 260, 489, 490, 928, 929, 964, 1168, 1173, 1177, 1178, 1188, 1309, 1310, 1311, 1312, 1345, 1482, 1565, 1577, 1674, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1797, 1803, 1805, 1808, 1902, 1977, 2018, 2043, 2049, 2050, 2069, 2070, 2077, 2089, 2104, 2114], "captur": [1, 11, 12, 15, 33, 52, 53, 64, 66, 69, 74, 75, 90, 683, 976, 978, 990, 1009, 1011, 1043, 1049, 1054, 1276, 1386, 1781, 1782, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1982, 2014, 2018, 2023, 2049, 2050, 2055, 2064, 2065, 2067, 2070, 2072, 2093, 2094, 2095, 2098, 2100, 2102, 2104, 2111, 2113], "f": [1, 10, 24, 28, 34, 35, 37, 39, 44, 45, 52, 53, 56, 57, 60, 61, 63, 64, 66, 67, 175, 260, 337, 379, 610, 902, 903, 943, 998, 1127, 1129, 1136, 1145, 1167, 1168, 1170, 1171, 1172, 1173, 1177, 1178, 1227, 1273, 1281, 1284, 1285, 1345, 1492, 1494, 1495, 1496, 1498, 1527, 1577, 1587, 1600, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1617, 1624, 1625, 1628, 1629, 1635, 1669, 1671, 1672, 1685, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1859, 1934, 1977, 2014, 2016, 2017, 2021, 2029, 2034, 2035, 2043, 2046, 2049, 2051, 2054, 2055, 2057, 2062, 2063, 2065, 2066, 2067, 2070, 2077, 2082, 2087, 2089, 2099, 2100, 2104, 2112, 2114], "three": [1, 2, 6, 9, 28, 32, 55, 57, 60, 64, 89, 892, 1107, 1109, 1227, 1236, 1317, 1320, 1321, 1322, 1337, 1365, 1438, 1456, 1459, 1496, 1499, 1522, 1685, 1731, 1772, 1803, 1871, 2017, 2043, 2046, 2048, 2049, 2067, 2070, 2072, 2077, 2079, 2082, 2087, 2101, 2110], "anoth": [1, 5, 7, 12, 23, 24, 28, 30, 33, 35, 36, 37, 50, 52, 55, 64, 488, 986, 1009, 1011, 1012, 1014, 1160, 1184, 1188, 1194, 1208, 1209, 1283, 1309, 1310, 1313, 1337, 1469, 1470, 1528, 1537, 1707, 1724, 1725, 1732, 1867, 1983, 2014, 2016, 2017, 2023, 2025, 2029, 2042, 2043, 2045, 2046, 2049, 2052, 2057, 2059, 2061, 2063, 2070, 2078, 2079, 2082, 2088, 2091, 2098, 2099, 2101, 2104, 2106, 2109, 2112, 2115], "constant": [1, 12, 23, 52, 53, 64, 66, 67, 71, 72, 75, 77, 747, 778, 824, 825, 979, 1092, 1149, 1150, 1151, 1195, 1212, 1270, 1273, 1277, 1289, 1321, 1346, 1451, 1452, 1453, 1470, 1473, 1480, 1536, 1559, 1576, 1579, 1630, 1633, 1672, 1732, 1779, 1795, 1800, 1803, 1950, 2015, 2017, 2042, 2049, 2057, 2061, 2067, 2069, 2082, 2101, 2103, 2104, 2106, 2113], "boolean": [1, 12, 14, 35, 47, 64, 66, 74, 75, 402, 404, 893, 909, 910, 919, 948, 950, 951, 953, 982, 990, 1011, 1012, 1014, 1114, 1188, 1195, 1202, 1216, 1230, 1262, 1263, 1264, 1265, 1266, 1269, 1298, 1346, 1362, 1367, 1424, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1542, 1567, 1571, 1685, 1716, 1734, 1737, 1827, 1922, 1923, 1947, 1972, 1973, 1982, 1983, 2016, 2018, 2046, 2049, 2084, 2085, 2087, 2088, 2089, 2091, 2101, 2104], "flag": [1, 2, 5, 14, 28, 29, 36, 48, 51, 52, 55, 58, 60, 64, 738, 800, 877, 912, 914, 919, 920, 921, 945, 954, 967, 968, 969, 976, 983, 984, 1039, 1160, 1166, 1168, 1169, 1171, 1172, 1173, 1177, 1178, 1231, 1232, 1254, 1261, 1345, 1365, 1533, 1591, 1644, 1645, 1717, 1734, 1737, 1767, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1871, 1877, 1903, 1913, 1952, 1965, 1977, 2013, 2014, 2017, 2029, 2033, 2043, 2046, 2049, 2060, 2061, 2062, 2067, 2070, 2072, 2087, 2088, 2104, 2105, 2112, 2113], "inform": [1, 2, 3, 4, 5, 7, 8, 9, 17, 18, 20, 22, 23, 24, 28, 29, 30, 32, 33, 34, 37, 39, 40, 43, 47, 48, 52, 53, 55, 57, 62, 64, 81, 193, 210, 257, 315, 323, 489, 490, 500, 517, 519, 620, 683, 738, 914, 919, 920, 923, 924, 928, 929, 947, 1113, 1144, 1146, 1167, 1202, 1210, 1270, 1273, 1290, 1303, 1310, 1319, 1454, 1455, 1456, 1457, 1458, 1459, 1487, 1497, 1523, 1524, 1525, 1527, 1533, 1542, 1543, 1555, 1571, 1573, 1575, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1625, 1644, 1685, 1704, 1705, 1706, 1716, 1717, 1758, 1770, 1779, 1794, 1826, 1853, 1871, 1877, 1903, 1965, 2014, 2017, 2021, 2023, 2024, 2036, 2037, 2043, 2045, 2046, 2048, 2049, 2056, 2057, 2060, 2065, 2067, 2070, 2071, 2072, 2085, 2087, 2088, 2089, 2093, 2097, 2100, 2101, 2103, 2104, 2107, 2111, 2113, 2115, 2117, 2118], "between": [1, 2, 3, 7, 11, 17, 18, 24, 28, 33, 35, 36, 47, 48, 50, 52, 53, 55, 62, 63, 64, 198, 488, 515, 585, 586, 587, 589, 590, 609, 619, 692, 693, 694, 698, 699, 763, 775, 776, 777, 805, 861, 869, 888, 897, 902, 913, 915, 916, 917, 923, 924, 964, 976, 998, 1011, 1018, 1020, 1067, 1072, 1086, 1087, 1092, 1109, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1160, 1161, 1184, 1187, 1198, 1227, 1234, 1270, 1273, 1285, 1309, 1310, 1332, 1337, 1420, 1439, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1465, 1466, 1467, 1471, 1473, 1478, 1487, 1493, 1503, 1504, 1505, 1506, 1507, 1508, 1518, 1520, 1521, 1522, 1527, 1530, 1531, 1532, 1536, 1556, 1559, 1560, 1561, 1576, 1577, 1579, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1632, 1658, 1659, 1660, 1674, 1691, 1701, 1717, 1724, 1725, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1802, 1803, 1809, 1811, 1820, 1828, 1832, 1833, 1838, 1839, 1843, 1868, 1921, 1922, 1923, 1924, 1928, 1949, 1950, 1972, 1973, 2014, 2016, 2017, 2018, 2024, 2033, 2036, 2042, 2043, 2045, 2046, 2048, 2054, 2055, 2057, 2059, 2061, 2062, 2064, 2065, 2067, 2069, 2072, 2073, 2077, 2079, 2082, 2084, 2085, 2087, 2092, 2098, 2101, 2111, 2113], "well": [1, 3, 5, 7, 9, 14, 19, 24, 28, 38, 47, 52, 55, 60, 61, 64, 65, 619, 794, 825, 828, 865, 1051, 1171, 1172, 1273, 1277, 1284, 1289, 1309, 1310, 1311, 1313, 1319, 1332, 1337, 1384, 1441, 1442, 1443, 1462, 1497, 1527, 1567, 1654, 1655, 1656, 1717, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1827, 1961, 2014, 2017, 2024, 2034, 2036, 2042, 2043, 2046, 2049, 2050, 2052, 2054, 2057, 2059, 2061, 2065, 2067, 2070, 2072, 2075, 2077, 2079, 2082, 2086, 2087, 2091, 2095, 2099, 2101, 2103, 2104, 2105, 2109, 2113], "relat": [1, 6, 7, 9, 23, 28, 33, 37, 52, 53, 54, 55, 60, 66, 67, 72, 75, 81, 683, 983, 1184, 1227, 1313, 1473, 1559, 1579, 1717, 1817, 1966, 2036, 2043, 2062, 2069, 2070, 2077, 2082, 2083, 2089, 2095, 2104, 2114], "mechan": [1, 8, 28, 30, 41, 45, 47, 53, 64, 883, 919, 920, 978, 1113, 1273, 1527, 1588, 1685, 1770, 1943, 2013, 2033, 2036, 2049, 2052, 2056, 2057, 2075, 2077, 2078, 2098, 2101], "confus": [1, 8, 66, 1273, 1527, 2043, 2046, 2070, 2082, 2105], "spars": [1, 11, 192, 193, 210, 220, 325, 330, 344, 345, 437, 545, 546, 547, 548, 583, 585, 586, 587, 588, 589, 590, 616, 692, 748, 749, 923, 924, 1083, 1214, 1238, 1258, 1346, 1368, 1378, 1423, 1469, 1470, 1561, 1624, 1625, 1650, 1783, 1798, 1817, 1865, 1898, 1903, 1910, 1911, 1912, 1913, 1914, 1915, 1920, 1929, 1949, 1965, 1966, 2013, 2015, 2019, 2024, 2041, 2060, 2061, 2068, 2074, 2085, 2086, 2089, 2100, 2108], "param": [1, 2, 11, 30, 32, 35, 39, 47, 55, 57, 59, 62, 64, 490, 767, 798, 1166, 1176, 1205, 1273, 1292, 1527, 1533, 1537, 1538, 1715, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1793, 1794, 1795, 1796, 1797, 1798, 1811, 2015, 2027, 2041, 2042, 2046, 2048, 2069, 2114], "receiv": [1, 7, 9, 23, 28, 30, 32, 33, 35, 53, 60, 63, 1065, 1273, 1527, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1812, 2017, 2030, 2033, 2042, 2043, 2059, 2077, 2078, 2079, 2082, 2100, 2113, 2115], "dure": [1, 5, 14, 18, 19, 24, 28, 30, 33, 37, 39, 41, 47, 52, 55, 63, 64, 66, 73, 83, 85, 86, 337, 490, 505, 506, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 796, 830, 893, 894, 904, 907, 909, 910, 1009, 1043, 1053, 1054, 1066, 1067, 1087, 1284, 1286, 1287, 1289, 1345, 1435, 1441, 1442, 1443, 1463, 1464, 1469, 1470, 1489, 1490, 1491, 1546, 1567, 1573, 1575, 1618, 1624, 1625, 1685, 1707, 1717, 1719, 1720, 1765, 1779, 1812, 1909, 1954, 1956, 2016, 2021, 2023, 2024, 2027, 2029, 2030, 2034, 2036, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2056, 2057, 2060, 2061, 2062, 2065, 2067, 2070, 2071, 2072, 2075, 2077, 2079, 2089, 2095, 2098, 2099, 2101, 2104, 2110, 2111, 2112, 2113, 2115], "accumul": [1, 2, 55, 152, 292, 315, 320, 321, 323, 473, 490, 689, 897, 918, 1188, 1343, 1717, 1782, 1783, 1965, 2014, 2015, 2030, 2043, 2046, 2048, 2051, 2060, 2069, 2073, 2077, 2078, 2082, 2100, 2101, 2104, 2108, 2113], "initi": [1, 2, 3, 4, 8, 17, 18, 20, 23, 24, 30, 32, 33, 34, 37, 40, 47, 48, 51, 52, 55, 82, 90, 498, 501, 582, 715, 716, 717, 718, 719, 720, 723, 733, 734, 735, 736, 748, 749, 759, 763, 767, 865, 866, 883, 932, 986, 1011, 1019, 1040, 1045, 1046, 1049, 1050, 1076, 1110, 1111, 1112, 1220, 1249, 1271, 1273, 1286, 1287, 1345, 1346, 1363, 1408, 1441, 1442, 1443, 1444, 1446, 1457, 1458, 1459, 1463, 1469, 1470, 1478, 1479, 1481, 1489, 1490, 1491, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1527, 1535, 1542, 1543, 1544, 1545, 1567, 1577, 1624, 1707, 1716, 1717, 1719, 1720, 1731, 1734, 1737, 1765, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1813, 1867, 1868, 1869, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 1982, 1994, 1996, 1997, 1999, 2002, 2013, 2015, 2016, 2017, 2025, 2030, 2036, 2041, 2043, 2046, 2048, 2049, 2052, 2056, 2063, 2065, 2067, 2069, 2070, 2072, 2076, 2077, 2078, 2098, 2101, 2111, 2112, 2120], "memori": [1, 2, 3, 5, 11, 20, 24, 27, 30, 32, 35, 36, 53, 55, 59, 64, 152, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 235, 242, 256, 269, 299, 327, 333, 339, 341, 342, 395, 447, 448, 449, 450, 451, 460, 465, 488, 501, 502, 526, 527, 582, 605, 619, 625, 737, 766, 882, 884, 897, 904, 907, 909, 923, 924, 930, 958, 973, 976, 1009, 1010, 1013, 1016, 1017, 1019, 1033, 1034, 1043, 1044, 1047, 1051, 1053, 1054, 1057, 1059, 1060, 1061, 1063, 1064, 1065, 1066, 1067, 1073, 1074, 1075, 1079, 1109, 1110, 1111, 1112, 1160, 1161, 1162, 1163, 1165, 1166, 1167, 1172, 1174, 1178, 1273, 1291, 1319, 1345, 1382, 1384, 1385, 1393, 1470, 1497, 1498, 1527, 1571, 1572, 1573, 1575, 1685, 1703, 1717, 1724, 1725, 1731, 1758, 1770, 1777, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1836, 1837, 1839, 1840, 1841, 1842, 1867, 1943, 1965, 1977, 1990, 2011, 2013, 2014, 2025, 2027, 2033, 2035, 2036, 2037, 2043, 2049, 2052, 2059, 2065, 2067, 2071, 2072, 2077, 2082, 2084, 2085, 2086, 2088, 2089, 2098, 2103, 2104, 2106, 2110, 2112], "overlap": [1, 23, 24, 28, 30, 32, 55, 64, 488, 683, 882, 923, 924, 1112, 1167, 1270, 1473, 1579, 1717, 1924, 1965, 2046, 2048, 2052, 2085, 2100, 2104, 2112], "dens": [1, 32, 220, 547, 548, 585, 586, 587, 588, 589, 590, 945, 954, 1231, 1232, 1293, 1346, 1798, 1898, 1901, 1905, 1906, 1909, 1910, 1911, 1912, 1914, 1915, 1920, 1929, 1965, 2036, 2061, 2082, 2085, 2100], "stride": [1, 11, 53, 140, 256, 341, 447, 448, 449, 450, 451, 501, 522, 546, 583, 585, 586, 587, 589, 590, 619, 715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 771, 772, 775, 776, 777, 785, 786, 869, 882, 945, 954, 1110, 1112, 1122, 1127, 1145, 1161, 1164, 1165, 1188, 1201, 1231, 1232, 1238, 1273, 1283, 1287, 1293, 1344, 1360, 1378, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1494, 1495, 1496, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1539, 1579, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1627, 1654, 1655, 1656, 1658, 1659, 1660, 1661, 1662, 1663, 1703, 1717, 1776, 1832, 1833, 1836, 1838, 1839, 1840, 1842, 1843, 1850, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1902, 1905, 1949, 1954, 1956, 1975, 2010, 2015, 2018, 2021, 2034, 2036, 2067, 2068, 2077, 2082, 2084, 2085, 2087, 2088, 2089, 2100, 2101, 2102, 2103, 2104, 2108, 2113], "otherwis": [1, 2, 3, 5, 7, 9, 11, 14, 19, 23, 28, 29, 36, 40, 52, 55, 56, 60, 64, 90, 211, 323, 330, 334, 338, 340, 344, 345, 497, 506, 562, 582, 583, 585, 605, 619, 625, 683, 689, 690, 691, 692, 693, 696, 698, 699, 700, 702, 738, 763, 771, 772, 787, 797, 820, 823, 824, 844, 865, 866, 869, 896, 902, 909, 910, 914, 923, 943, 944, 960, 1049, 1109, 1115, 1148, 1160, 1187, 1188, 1201, 1204, 1264, 1270, 1273, 1276, 1280, 1286, 1295, 1319, 1334, 1343, 1345, 1346, 1361, 1366, 1371, 1373, 1374, 1376, 1378, 1379, 1418, 1421, 1437, 1438, 1460, 1462, 1465, 1466, 1467, 1471, 1478, 1482, 1483, 1484, 1485, 1487, 1497, 1513, 1527, 1531, 1532, 1533, 1534, 1535, 1537, 1543, 1544, 1546, 1559, 1565, 1567, 1570, 1571, 1573, 1575, 1576, 1577, 1601, 1602, 1635, 1637, 1638, 1642, 1678, 1689, 1717, 1731, 1736, 1737, 1758, 1759, 1761, 1762, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 1815, 1817, 1825, 1827, 1834, 1843, 1850, 1863, 1871, 1878, 1922, 1923, 1924, 1927, 1960, 1961, 1962, 1972, 1973, 1980, 2012, 2014, 2016, 2017, 2021, 2024, 2027, 2034, 2036, 2042, 2049, 2050, 2059, 2065, 2066, 2067, 2072, 2073, 2075, 2076, 2077, 2079, 2083, 2084, 2085, 2086, 2087, 2089, 2095, 2114], "rowmajor": [1, 1717], "contigu": [1, 11, 24, 30, 333, 501, 522, 619, 1472, 1530, 1674, 1717, 1724, 1725, 1844, 1850, 1928, 2015, 2035, 2036, 2068, 2074, 2082, 2084, 2086, 2101], "create_graph": [1, 152, 490, 897, 912, 913, 914, 915, 916, 917, 918, 2015, 2042, 2049], "preserv": [1, 5, 12, 23, 33, 35, 52, 55, 64, 90, 501, 782, 788, 858, 880, 883, 1150, 1151, 1167, 1201, 1273, 1277, 1284, 1527, 1528, 1537, 1580, 1644, 1704, 1780, 1859, 1863, 1900, 1935, 1943, 1970, 2014, 2027, 2034, 2036, 2041, 2046, 2050, 2061, 2064, 2065, 2067, 2077, 2082, 2085, 2098, 2103], "replac": [1, 14, 19, 23, 24, 28, 30, 46, 48, 52, 55, 57, 58, 59, 60, 61, 64, 86, 87, 89, 121, 423, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 750, 817, 842, 862, 967, 1054, 1109, 1166, 1167, 1188, 1278, 1281, 1291, 1363, 1364, 1413, 1417, 1559, 1570, 1707, 1733, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1767, 1769, 1827, 1924, 1928, 1952, 2014, 2015, 2017, 2027, 2046, 2049, 2050, 2054, 2056, 2059, 2063, 2067, 2068, 2070, 2072, 2089, 2099, 2100, 2104, 2106, 2112], "preexist": [1, 2071, 2100, 2103], "behavior": [1, 2, 7, 11, 14, 20, 22, 24, 28, 29, 30, 32, 33, 35, 36, 40, 45, 48, 52, 53, 55, 59, 60, 63, 64, 88, 256, 321, 473, 501, 515, 546, 619, 690, 788, 796, 829, 882, 896, 909, 910, 952, 958, 960, 962, 975, 978, 1008, 1104, 1109, 1110, 1111, 1112, 1154, 1162, 1163, 1167, 1202, 1273, 1284, 1287, 1289, 1319, 1326, 1330, 1343, 1344, 1345, 1360, 1368, 1375, 1463, 1466, 1487, 1497, 1527, 1543, 1567, 1580, 1598, 1633, 1703, 1704, 1707, 1708, 1717, 1718, 1769, 1771, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1821, 1827, 1843, 1850, 1891, 1928, 1950, 1965, 2012, 2014, 2018, 2019, 2021, 2023, 2025, 2034, 2036, 2038, 2044, 2046, 2049, 2050, 2060, 2061, 2062, 2067, 2069, 2070, 2073, 2074, 2077, 2083, 2086, 2100, 2101, 2103, 2105, 2116], "let": [1, 7, 8, 23, 24, 33, 35, 48, 60, 64, 488, 498, 968, 969, 971, 1188, 1197, 1227, 1273, 1293, 1303, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1319, 1320, 1323, 1325, 1332, 1334, 1336, 1337, 1527, 1717, 1731, 1943, 2021, 2034, 2035, 2043, 2046, 2048, 2049, 2050, 2052, 2057, 2059, 2060, 2061, 2062, 2063, 2070, 2078, 2079, 2082, 2087, 2098, 2099, 2101, 2103, 2104, 2105, 2106, 2107, 2112, 2113, 2116], "first": [1, 4, 5, 7, 9, 14, 18, 19, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 40, 44, 47, 48, 52, 53, 55, 61, 64, 66, 75, 78, 218, 256, 288, 292, 591, 612, 689, 692, 694, 697, 763, 796, 797, 817, 858, 861, 863, 878, 879, 888, 893, 894, 895, 896, 898, 909, 910, 918, 923, 932, 941, 943, 944, 948, 949, 951, 952, 953, 956, 958, 960, 974, 1008, 1011, 1022, 1097, 1099, 1100, 1101, 1106, 1109, 1114, 1127, 1129, 1148, 1163, 1168, 1169, 1171, 1172, 1173, 1177, 1178, 1186, 1201, 1216, 1227, 1230, 1238, 1239, 1240, 1250, 1262, 1271, 1281, 1286, 1287, 1293, 1298, 1303, 1306, 1313, 1329, 1332, 1339, 1340, 1342, 1345, 1346, 1362, 1368, 1371, 1374, 1375, 1376, 1378, 1396, 1413, 1419, 1424, 1427, 1431, 1437, 1438, 1444, 1455, 1456, 1458, 1459, 1469, 1470, 1472, 1477, 1478, 1492, 1495, 1496, 1497, 1512, 1519, 1521, 1522, 1543, 1556, 1615, 1703, 1707, 1717, 1724, 1725, 1734, 1735, 1737, 1748, 1758, 1764, 1784, 1785, 1786, 1787, 1788, 1794, 1797, 1798, 1802, 1806, 1809, 1811, 1817, 1827, 1828, 1854, 1855, 1863, 1871, 1890, 1905, 1908, 1913, 1944, 1945, 1949, 1954, 1956, 1962, 1971, 1974, 1977, 1979, 1982, 1996, 2012, 2014, 2016, 2017, 2021, 2024, 2028, 2033, 2034, 2036, 2042, 2043, 2046, 2049, 2051, 2052, 2053, 2054, 2057, 2059, 2060, 2061, 2062, 2063, 2065, 2067, 2069, 2071, 2072, 2077, 2078, 2079, 2081, 2082, 2083, 2087, 2092, 2095, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2111, 2112, 2113, 2114], "accord": [1, 9, 30, 33, 34, 37, 52, 800, 865, 866, 884, 946, 958, 1107, 1237, 1334, 1367, 1413, 1431, 1470, 1539, 1685, 1707, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1803, 1809, 1828, 1891, 1916, 1929, 1944, 1978, 2017, 2030, 2035, 2036, 2057, 2061, 2063, 2070, 2079, 2089], "retain": [1, 9, 28, 696, 698, 699, 702, 878, 879, 884, 1277, 1295, 1326, 1330, 1343, 1361, 1371, 1373, 1374, 1376, 1379, 1418, 1419, 1420, 1421, 1447, 1772, 1825, 1828, 1859, 1922, 1923, 1927, 1953, 1954, 1955, 1956, 1972, 1973, 2033, 2059, 2082], "over": [1, 9, 11, 12, 19, 23, 24, 28, 29, 33, 35, 37, 45, 48, 52, 55, 56, 57, 58, 59, 60, 61, 62, 64, 66, 69, 74, 75, 78, 483, 683, 700, 741, 742, 743, 744, 745, 746, 769, 770, 775, 776, 777, 785, 786, 896, 909, 910, 932, 936, 963, 1020, 1065, 1067, 1072, 1087, 1088, 1089, 1090, 1091, 1109, 1166, 1170, 1172, 1176, 1178, 1184, 1186, 1227, 1235, 1273, 1289, 1306, 1326, 1330, 1342, 1343, 1345, 1353, 1373, 1375, 1421, 1428, 1429, 1430, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1470, 1473, 1474, 1475, 1481, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1499, 1515, 1518, 1519, 1520, 1521, 1522, 1527, 1530, 1531, 1532, 1534, 1541, 1542, 1556, 1559, 1560, 1562, 1567, 1576, 1579, 1592, 1593, 1594, 1595, 1596, 1597, 1600, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1628, 1629, 1645, 1651, 1654, 1655, 1656, 1658, 1659, 1660, 1669, 1670, 1677, 1685, 1711, 1716, 1722, 1733, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1832, 1833, 1848, 1907, 1909, 1922, 1923, 1924, 1927, 1945, 1960, 1972, 1973, 1977, 2017, 2030, 2033, 2034, 2035, 2042, 2044, 2045, 2046, 2048, 2049, 2050, 2052, 2054, 2057, 2059, 2060, 2065, 2069, 2072, 2077, 2078, 2082, 2090, 2091, 2098, 2100, 2101, 2102, 2103, 2104, 2113, 2115, 2116], "time": [1, 3, 4, 7, 8, 9, 14, 19, 23, 24, 28, 29, 30, 32, 33, 35, 36, 37, 44, 47, 48, 50, 52, 53, 55, 60, 63, 64, 66, 76, 77, 90, 292, 315, 488, 489, 495, 619, 683, 688, 689, 690, 691, 692, 693, 694, 697, 748, 749, 759, 763, 767, 771, 772, 802, 804, 827, 845, 881, 918, 928, 929, 930, 931, 937, 939, 944, 956, 964, 966, 976, 991, 998, 1011, 1012, 1014, 1054, 1067, 1087, 1090, 1101, 1123, 1124, 1130, 1131, 1132, 1138, 1149, 1150, 1151, 1159, 1163, 1172, 1173, 1174, 1177, 1178, 1182, 1192, 1202, 1262, 1270, 1273, 1281, 1286, 1289, 1290, 1294, 1299, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1318, 1319, 1320, 1323, 1325, 1329, 1332, 1334, 1336, 1337, 1345, 1346, 1368, 1378, 1386, 1412, 1413, 1415, 1425, 1433, 1434, 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1470, 1473, 1474, 1475, 1478, 1489, 1490, 1491, 1497, 1499, 1513, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1539, 1540, 1542, 1543, 1564, 1567, 1579, 1580, 1581, 1582, 1598, 1601, 1602, 1628, 1629, 1674, 1675, 1676, 1693, 1709, 1710, 1711, 1714, 1715, 1716, 1717, 1731, 1732, 1735, 1737, 1771, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1805, 1815, 1816, 1827, 1849, 1855, 1905, 1919, 1924, 1925, 1945, 1977, 1982, 1983, 1996, 2013, 2016, 2017, 2018, 2021, 2023, 2025, 2027, 2033, 2041, 2043, 2045, 2048, 2049, 2051, 2052, 2054, 2056, 2057, 2059, 2061, 2062, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2073, 2077, 2078, 2079, 2082, 2087, 2093, 2094, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2109, 2110, 2112, 2115, 2117], "4": [1, 3, 5, 11, 12, 18, 19, 21, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 45, 47, 52, 53, 60, 64, 66, 67, 68, 71, 72, 74, 75, 77, 193, 210, 244, 256, 291, 313, 315, 317, 319, 323, 403, 404, 447, 448, 473, 485, 489, 495, 498, 501, 515, 519, 525, 539, 546, 558, 560, 562, 586, 587, 609, 619, 686, 687, 688, 689, 692, 694, 695, 696, 698, 699, 700, 702, 742, 744, 745, 746, 749, 751, 760, 763, 776, 777, 827, 869, 878, 879, 880, 884, 885, 886, 887, 888, 889, 891, 892, 904, 907, 909, 910, 912, 915, 916, 917, 944, 947, 950, 954, 955, 956, 960, 962, 965, 966, 969, 970, 971, 974, 981, 987, 990, 993, 994, 995, 996, 1008, 1089, 1092, 1099, 1101, 1103, 1104, 1107, 1108, 1109, 1112, 1114, 1124, 1125, 1127, 1129, 1137, 1143, 1145, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1157, 1159, 1160, 1163, 1166, 1168, 1176, 1177, 1178, 1188, 1214, 1215, 1216, 1227, 1230, 1234, 1235, 1236, 1237, 1239, 1240, 1244, 1248, 1250, 1262, 1264, 1270, 1279, 1280, 1285, 1287, 1294, 1295, 1296, 1297, 1298, 1299, 1303, 1305, 1306, 1312, 1314, 1316, 1317, 1318, 1321, 1323, 1326, 1328, 1330, 1332, 1334, 1336, 1339, 1340, 1341, 1343, 1344, 1347, 1349, 1350, 1355, 1357, 1358, 1360, 1362, 1367, 1368, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1379, 1396, 1412, 1413, 1421, 1422, 1423, 1424, 1431, 1436, 1447, 1448, 1449, 1451, 1452, 1455, 1456, 1458, 1459, 1467, 1469, 1470, 1471, 1473, 1477, 1478, 1497, 1498, 1523, 1524, 1530, 1532, 1534, 1539, 1540, 1543, 1549, 1550, 1551, 1552, 1553, 1567, 1579, 1580, 1581, 1582, 1583, 1584, 1587, 1588, 1589, 1590, 1598, 1600, 1609, 1612, 1624, 1625, 1633, 1644, 1669, 1672, 1675, 1676, 1703, 1704, 1705, 1706, 1707, 1717, 1724, 1725, 1731, 1737, 1748, 1760, 1761, 1763, 1765, 1771, 1772, 1773, 1775, 1777, 1781, 1794, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1816, 1820, 1821, 1824, 1825, 1827, 1828, 1830, 1834, 1836, 1838, 1840, 1842, 1843, 1844, 1845, 1846, 1847, 1849, 1850, 1854, 1855, 1856, 1858, 1859, 1863, 1875, 1882, 1884, 1885, 1890, 1891, 1893, 1895, 1900, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1924, 1927, 1930, 1931, 1939, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1950, 1953, 1954, 1955, 1956, 1958, 1959, 1960, 1962, 1963, 1964, 1965, 1971, 1974, 1975, 1976, 1977, 1978, 1979, 2011, 2013, 2014, 2015, 2016, 2017, 2018, 2021, 2024, 2033, 2035, 2036, 2041, 2043, 2044, 2046, 2048, 2049, 2052, 2057, 2059, 2062, 2063, 2065, 2067, 2070, 2072, 2074, 2077, 2082, 2083, 2085, 2086, 2087, 2088, 2089, 2098, 2100, 2101, 2102, 2104, 2108, 2111, 2113], "fact": [1, 3, 8, 55, 497, 945, 954, 992, 1198, 1227, 1231, 1232, 1309, 1310, 1337, 2014, 2049, 2052, 2054, 2067, 2078, 2082, 2098, 2101, 2103, 2107], "reset": [1, 28, 30, 763, 823, 828, 897, 1009, 1057, 1059, 1073, 1074, 1075, 1273, 1478, 1527, 1542, 1544, 1716, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1817, 1834, 2030, 2067, 2076], "phase": [1, 19, 24, 796, 1309, 1310, 1809, 1928, 2015, 2063, 2104, 2113], "iter": [1, 2, 3, 7, 19, 24, 28, 29, 30, 32, 33, 35, 48, 51, 52, 55, 64, 66, 71, 932, 943, 981, 1021, 1022, 1023, 1024, 1025, 1054, 1057, 1059, 1081, 1186, 1273, 1346, 1527, 1528, 1529, 1537, 1538, 1717, 1718, 1721, 1722, 1723, 1730, 1732, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1766, 1768, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1801, 1802, 1803, 1806, 1817, 1886, 1929, 2006, 2015, 2017, 2018, 2021, 2033, 2035, 2042, 2043, 2044, 2046, 2048, 2049, 2052, 2053, 2054, 2057, 2069, 2071, 2076, 2081, 2087, 2098, 2101, 2104, 2112, 2113, 2114], "recreat": [1, 2043, 2112], "valid": [1, 19, 25, 27, 28, 30, 33, 35, 45, 47, 52, 53, 64, 86, 90, 750, 751, 760, 794, 967, 978, 979, 985, 1051, 1109, 1188, 1198, 1271, 1273, 1274, 1295, 1309, 1310, 1319, 1320, 1321, 1332, 1337, 1368, 1454, 1455, 1456, 1608, 1609, 1610, 1633, 1707, 1743, 1748, 1752, 1778, 1779, 1799, 1800, 1805, 1806, 1807, 1808, 1810, 1811, 1812, 1813, 1827, 2014, 2015, 2016, 2017, 2021, 2035, 2036, 2043, 2046, 2049, 2055, 2061, 2067, 2069, 2071, 2077, 2078, 2100, 2101, 2102, 2109, 2113], "altern": [1, 9, 23, 28, 33, 64, 683, 817, 1170, 1171, 1236, 1281, 1345, 1556, 1573, 1575, 1652, 1717, 1777, 1798, 1869, 1870, 1965, 2011, 2012, 2018, 2043, 2055, 2057, 2060, 2061, 2063, 2088, 2104, 2111], "assign": [1, 7, 9, 10, 23, 28, 33, 37, 47, 48, 52, 60, 62, 64, 417, 683, 841, 842, 843, 939, 1271, 1273, 1274, 1431, 1462, 1527, 1534, 1537, 1538, 1718, 1737, 2014, 2018, 2021, 2042, 2049, 2050, 2051, 2052, 2067, 2072, 2078, 2079, 2086, 2087], "never": [1, 5, 7, 24, 28, 47, 48, 52, 53, 223, 224, 799, 883, 964, 976, 1270, 1320, 1321, 1332, 1345, 1717, 1758, 2029, 2043, 2046, 2049, 2070, 2077, 2085, 2089, 2100], "long": [1, 7, 9, 23, 47, 52, 63, 737, 752, 754, 755, 756, 757, 759, 766, 784, 981, 1167, 1201, 1249, 1374, 1419, 1446, 1462, 1470, 1497, 1498, 1534, 1617, 1778, 1826, 1940, 1944, 1954, 1956, 1961, 2013, 2017, 2024, 2033, 2034, 2035, 2043, 2044, 2046, 2049, 2051, 2059, 2061, 2067, 2070, 2071, 2076, 2084, 2085, 2087, 2088, 2098, 2101, 2104, 2108, 2111, 2113], "hard": [1, 7, 8, 28, 33, 48, 52, 1109, 1346, 1482, 1635, 1636, 2014, 2015, 2016, 2043, 2052, 2067, 2070, 2104, 2105], "matter": [1, 4, 28, 55, 904, 905, 909, 1097, 1109, 1201, 1281, 1284, 1289, 1717, 2036, 2043, 2070], "discourag": [1, 1078, 1082, 1409, 2007, 2043, 2077, 2088], "aggress": [1, 55, 1270, 1724, 1725, 2043, 2077, 2104], "buffer": [1, 4, 23, 24, 28, 33, 52, 53, 55, 59, 62, 417, 884, 1022, 1054, 1163, 1166, 1176, 1273, 1281, 1284, 1295, 1345, 1441, 1442, 1443, 1463, 1500, 1501, 1502, 1527, 1567, 1709, 1717, 1719, 1726, 1728, 1734, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1755, 1765, 1767, 1795, 1797, 1859, 1900, 1947, 2016, 2037, 2043, 2048, 2049, 2057, 2062, 2065, 2067, 2104, 2117], "free": [1, 7, 28, 30, 35, 37, 47, 48, 55, 64, 66, 74, 75, 981, 1060, 1065, 1079, 1276, 1289, 1346, 2027, 2041, 2043, 2046, 2049, 2051, 2052, 2059, 2063, 2067, 2082, 2085, 2098, 2100, 2104, 2115], "reus": [1, 19, 28, 64, 488, 759, 1190, 2043, 2046, 2077, 2098, 2100, 2101, 2104, 2108, 2113, 2115], "effici": [1, 2, 3, 8, 11, 23, 34, 35, 37, 55, 56, 61, 152, 763, 897, 904, 906, 909, 918, 966, 1174, 1184, 1217, 1291, 1329, 1346, 1365, 1431, 1465, 1466, 1467, 1470, 1471, 1478, 1533, 1539, 1540, 1543, 1575, 1586, 1591, 1685, 2027, 2036, 2043, 2048, 2049, 2054, 2058, 2059, 2060, 2069, 2072, 2077, 2078, 2082, 2085, 2086, 2091, 2101, 2116], "few": [1, 7, 8, 24, 30, 37, 1109, 1167, 1470, 1717, 2012, 2017, 2043, 2046, 2049, 2051, 2053, 2055, 2060, 2063, 2067, 2069, 2072, 2075, 2082, 2086, 2088, 2091, 2100, 2101, 2104, 2106, 2107, 2111, 2113], "occas": [1, 7, 2043], "actual": [1, 8, 30, 37, 40, 52, 53, 55, 60, 64, 260, 683, 793, 991, 1127, 1145, 1198, 1285, 1289, 1457, 1458, 1459, 1633, 1717, 1749, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 2012, 2016, 2017, 2025, 2034, 2043, 2046, 2048, 2049, 2052, 2054, 2059, 2061, 2063, 2065, 2072, 2077, 2084, 2089, 2100, 2101, 2103, 2104, 2106], "signific": [1, 3, 30, 1811, 1871, 2043, 2046, 2082, 2106, 2109], "amount": [1, 2, 3, 4, 7, 23, 28, 30, 37, 47, 64, 924, 1033, 1059, 1061, 1065, 1270, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1473, 1515, 1530, 1579, 1616, 1742, 1743, 1745, 1746, 1748, 1750, 1751, 1752, 1753, 1754, 1755, 1990, 2035, 2043, 2045, 2046, 2050, 2051, 2055, 2065, 2077, 2082, 2100, 2107, 2111, 2113, 2115], "unless": [1, 2, 4, 7, 19, 24, 28, 50, 52, 55, 56, 64, 490, 501, 843, 862, 904, 905, 909, 947, 1009, 1043, 1166, 1273, 1317, 1322, 1335, 1345, 1374, 1480, 1527, 1575, 1711, 1748, 1767, 1792, 1798, 1863, 1965, 2017, 2021, 2023, 2036, 2043, 2046, 2052, 2061, 2062, 2067, 2070, 2073, 2100, 2111], "heavi": [1, 28, 2043, 2063, 2103], "pressur": [1, 55, 2043], "might": [1, 2, 3, 4, 9, 14, 15, 18, 19, 28, 30, 32, 34, 35, 47, 52, 60, 63, 64, 87, 89, 152, 488, 546, 897, 1008, 1289, 1290, 1633, 1717, 1718, 1724, 1725, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 2012, 2014, 2030, 2036, 2043, 2045, 2046, 2048, 2049, 2054, 2056, 2061, 2062, 2064, 2067, 2070, 2072, 2077, 2078, 2079, 2082, 2086, 2088, 2089, 2094, 2098, 2100, 2101, 2103, 2104, 2105, 2106, 2112, 2113], "keep": [1, 3, 5, 7, 23, 24, 30, 32, 34, 37, 47, 49, 52, 53, 55, 66, 488, 1012, 1188, 1345, 1441, 1442, 1443, 1469, 1489, 1490, 1491, 1536, 1567, 1633, 1644, 1717, 1744, 1780, 1848, 1871, 2012, 2013, 2014, 2033, 2035, 2036, 2043, 2046, 2048, 2050, 2051, 2054, 2059, 2065, 2069, 2072, 2077, 2078, 2079, 2100, 2101, 2102, 2103, 2104, 2111, 2115], "track": [1, 34, 37, 49, 52, 53, 66, 81, 82, 83, 88, 337, 919, 930, 1017, 1057, 1059, 1073, 1074, 1075, 1166, 1167, 1188, 1375, 1441, 1442, 1443, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1567, 1717, 1744, 2029, 2030, 2033, 2035, 2036, 2043, 2046, 2049, 2050, 2051, 2056, 2057, 2065, 2069, 2071, 2078, 2079, 2082, 2088, 2100, 2101, 2103, 2104, 2110], "appli": [1, 3, 5, 8, 12, 24, 30, 32, 33, 34, 35, 37, 40, 47, 55, 59, 60, 61, 64, 66, 77, 83, 84, 85, 121, 323, 400, 474, 488, 515, 519, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 738, 741, 742, 743, 744, 745, 746, 761, 763, 769, 770, 771, 772, 773, 774, 775, 776, 777, 783, 784, 785, 786, 787, 813, 814, 815, 816, 843, 862, 904, 905, 907, 908, 909, 910, 920, 921, 923, 949, 952, 978, 990, 993, 1097, 1099, 1104, 1113, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1157, 1166, 1167, 1174, 1177, 1178, 1182, 1188, 1198, 1273, 1277, 1323, 1346, 1363, 1428, 1429, 1430, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1448, 1449, 1450, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1468, 1474, 1475, 1476, 1477, 1478, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1499, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1526, 1527, 1530, 1531, 1532, 1533, 1534, 1535, 1541, 1542, 1543, 1546, 1547, 1548, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1581, 1582, 1592, 1593, 1594, 1595, 1596, 1597, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1626, 1628, 1629, 1630, 1631, 1634, 1636, 1637, 1638, 1639, 1643, 1644, 1645, 1647, 1648, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1658, 1659, 1660, 1664, 1669, 1677, 1678, 1679, 1680, 1682, 1685, 1686, 1687, 1688, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1707, 1716, 1717, 1724, 1725, 1731, 1732, 1733, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1758, 1759, 1766, 1767, 1769, 1770, 1772, 1782, 1794, 1798, 1811, 1824, 1829, 1830, 1831, 1832, 1833, 1904, 1905, 1907, 1924, 1961, 1962, 1964, 1977, 2014, 2017, 2021, 2024, 2033, 2035, 2037, 2042, 2043, 2046, 2048, 2049, 2050, 2052, 2054, 2057, 2060, 2067, 2069, 2070, 2072, 2075, 2078, 2082, 2083, 2097, 2098, 2102, 2104, 2105, 2111, 2112], "save": [1, 5, 7, 11, 15, 24, 28, 30, 32, 33, 52, 55, 498, 840, 893, 894, 904, 907, 909, 910, 1174, 1272, 1273, 1278, 1281, 1289, 1291, 1345, 1527, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1805, 1808, 1812, 2013, 2014, 2015, 2021, 2025, 2028, 2035, 2049, 2050, 2052, 2053, 2057, 2059, 2061, 2065, 2068, 2069, 2070, 2071, 2077, 2082, 2087, 2092, 2095, 2098, 2101, 2104, 2106, 2115], "modifi": [1, 18, 19, 23, 24, 28, 29, 30, 32, 33, 55, 64, 66, 262, 489, 490, 794, 841, 842, 843, 904, 905, 907, 908, 909, 910, 928, 929, 930, 978, 991, 992, 1227, 1270, 1273, 1276, 1277, 1293, 1469, 1527, 1571, 1573, 1575, 1624, 1625, 1709, 1710, 1711, 1714, 1715, 1717, 1722, 1723, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 1801, 1809, 1890, 2014, 2017, 2034, 2042, 2046, 2049, 2050, 2057, 2062, 2070, 2072, 2077, 2083, 2086, 2088, 2093, 2100, 2101, 2102, 2103, 2104, 2109, 2113, 2116], "afterward": [1, 30, 52, 1527, 1707, 1717, 2052, 2082], "onc": [1, 7, 9, 15, 23, 24, 28, 29, 30, 32, 33, 35, 36, 39, 40, 47, 55, 58, 60, 63, 64, 904, 905, 906, 907, 909, 910, 912, 914, 931, 1109, 1273, 1277, 1345, 1463, 1527, 1717, 1735, 1737, 1769, 1807, 1811, 1827, 1873, 1877, 1968, 2014, 2017, 2021, 2027, 2030, 2033, 2042, 2043, 2045, 2046, 2049, 2052, 2054, 2056, 2057, 2069, 2070, 2082, 2087, 2098, 2101, 2102, 2104, 2110], "start": [1, 3, 4, 8, 9, 23, 24, 28, 30, 35, 37, 44, 46, 47, 48, 50, 55, 59, 63, 64, 235, 404, 435, 436, 498, 540, 701, 869, 898, 940, 941, 981, 1057, 1059, 1071, 1073, 1074, 1131, 1132, 1148, 1163, 1186, 1271, 1286, 1299, 1344, 1360, 1422, 1423, 1431, 1436, 1437, 1438, 1470, 1472, 1520, 1521, 1522, 1528, 1530, 1625, 1672, 1717, 1739, 1743, 1745, 1758, 1781, 1803, 1809, 1843, 1873, 1883, 1896, 1910, 1911, 1912, 1914, 1915, 2012, 2013, 2015, 2016, 2021, 2024, 2033, 2035, 2043, 2044, 2046, 2048, 2049, 2051, 2052, 2057, 2058, 2059, 2063, 2067, 2069, 2071, 2072, 2077, 2078, 2079, 2081, 2082, 2097, 2098, 2100, 2101, 2104, 2108, 2110, 2117], "sure": [1, 7, 9, 23, 28, 30, 33, 36, 39, 48, 51, 58, 64, 865, 866, 897, 1160, 1277, 1289, 1290, 1346, 1685, 1717, 1770, 1805, 1947, 2017, 2034, 2043, 2048, 2051, 2053, 2054, 2062, 2063, 2067, 2071, 2072, 2077, 2078, 2079, 2082, 2084, 2087, 2104, 2106, 2113], "been": [1, 5, 7, 9, 11, 17, 19, 23, 24, 28, 29, 32, 33, 34, 35, 37, 40, 47, 51, 52, 58, 59, 64, 86, 488, 490, 683, 763, 904, 905, 909, 981, 1012, 1014, 1019, 1047, 1050, 1108, 1113, 1167, 1276, 1277, 1283, 1288, 1345, 1408, 1478, 1485, 1497, 1543, 1598, 1633, 1717, 1724, 1725, 1734, 1739, 1743, 1745, 1765, 1766, 1781, 1801, 1802, 1803, 1809, 1811, 1827, 1924, 1940, 1968, 1979, 1983, 1999, 2021, 2023, 2025, 2030, 2033, 2034, 2035, 2042, 2043, 2045, 2046, 2052, 2054, 2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2070, 2077, 2079, 2082, 2087, 2095, 2098, 2099, 2101, 2104, 2109, 2113], "longer": [1, 28, 50, 59, 60, 488, 690, 894, 909, 923, 924, 1270, 1277, 1717, 1769, 1772, 2030, 2043, 2046, 2065, 2077, 2079, 2098, 2113], "find": [1, 7, 14, 28, 47, 60, 64, 84, 1109, 1181, 1184, 1227, 1295, 1346, 1457, 1458, 1459, 1497, 1779, 1817, 1828, 1863, 1929, 2012, 2025, 2027, 2033, 2035, 2043, 2045, 2046, 2048, 2049, 2051, 2054, 2059, 2061, 2065, 2067, 2068, 2072, 2077, 2082, 2085, 2087, 2089, 2092, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2112, 2113, 2114, 2116], "quick": [1, 7, 58, 2024, 2057, 2066, 2096, 2111], "guid": [1, 8, 23, 82, 84, 85, 1285, 1769, 2013, 2021, 2046, 2050, 2053, 2070, 2072, 2111], "var": [1, 40, 45, 47, 51, 1441, 1442, 1443, 1480, 1481, 1489, 1490, 1491, 1499, 1567, 1630, 1831, 1973, 2015, 2017, 2034, 2068, 2100, 2108, 2113], "thing": [1, 3, 7, 8, 28, 33, 52, 60, 64, 1167, 1283, 1375, 1439, 1635, 1901, 2016, 2021, 2043, 2046, 2049, 2050, 2051, 2054, 2059, 2067, 2070, 2072, 2073, 2079, 2098, 2101, 2103, 2104, 2111, 2113], "detach": [1, 5, 66, 74, 75, 224, 417, 450, 460, 973, 1166, 1273, 1446, 1527, 1617, 1635, 1905, 1943, 2014, 2015, 2034, 2036, 2049, 2051, 2067, 2068, 2074, 2082, 2086, 2088, 2112], "register_hook": [1, 2034, 2043], "name": [1, 2, 3, 14, 19, 24, 28, 30, 33, 34, 35, 37, 40, 41, 44, 45, 47, 50, 52, 53, 55, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 89, 683, 700, 750, 751, 760, 795, 817, 818, 819, 820, 827, 843, 858, 862, 936, 939, 941, 985, 1037, 1161, 1166, 1176, 1179, 1181, 1186, 1188, 1271, 1273, 1274, 1281, 1284, 1289, 1290, 1304, 1309, 1310, 1316, 1317, 1319, 1320, 1321, 1322, 1332, 1333, 1335, 1337, 1338, 1345, 1527, 1578, 1586, 1709, 1714, 1715, 1717, 1731, 1732, 1733, 1734, 1736, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1766, 1767, 1769, 1778, 1779, 1859, 1869, 1966, 1968, 1971, 1992, 2012, 2013, 2014, 2015, 2017, 2018, 2021, 2023, 2028, 2029, 2030, 2033, 2041, 2043, 2046, 2048, 2049, 2054, 2056, 2057, 2062, 2065, 2067, 2068, 2069, 2070, 2071, 2077, 2078, 2084, 2087, 2088, 2089, 2091, 2092, 2093, 2095, 2099, 2101, 2102, 2104, 2107, 2110, 2111, 2112, 2113, 2114, 2115, 2118], "factori": [1, 2, 11, 35, 37, 41, 45, 47, 60, 827, 943, 1109, 1587, 1770, 1779, 1867, 2013, 2015, 2025, 2035, 2046, 2049, 2068, 2085, 2088, 2100, 2103], "ones": [1, 3, 19, 23, 28, 32, 34, 35, 52, 55, 60, 61, 63, 64, 66, 71, 256, 315, 404, 447, 448, 450, 517, 797, 863, 897, 913, 915, 916, 917, 918, 919, 923, 946, 958, 976, 1092, 1100, 1122, 1166, 1173, 1174, 1177, 1270, 1273, 1285, 1294, 1320, 1336, 1345, 1440, 1460, 1469, 1473, 1480, 1481, 1499, 1527, 1531, 1532, 1534, 1536, 1542, 1579, 1588, 1589, 1590, 1624, 1685, 1703, 1707, 1716, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1749, 1762, 1764, 1767, 1777, 1778, 1831, 1848, 1866, 1871, 1896, 1946, 1950, 1968, 1980, 2015, 2017, 2019, 2025, 2034, 2035, 2043, 2044, 2046, 2049, 2058, 2060, 2061, 2067, 2068, 2069, 2070, 2072, 2077, 2079, 2081, 2083, 2085, 2088, 2107, 2112, 2113], "autograd_tensor": 1, "kwarg": [1, 5, 14, 23, 28, 29, 30, 32, 33, 34, 47, 52, 53, 55, 64, 66, 582, 605, 750, 763, 765, 766, 822, 823, 824, 825, 827, 828, 894, 909, 910, 980, 1012, 1014, 1051, 1052, 1069, 1166, 1177, 1178, 1182, 1186, 1188, 1273, 1276, 1278, 1337, 1363, 1488, 1516, 1527, 1558, 1562, 1566, 1568, 1569, 1707, 1717, 1739, 1744, 1748, 1758, 1765, 1767, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1859, 1962, 1977, 1983, 2012, 2017, 2018, 2021, 2049, 2050, 2057, 2065, 2069, 2077, 2084, 2088, 2093, 2099, 2102, 2112, 2113, 2114], "base": [1, 3, 7, 9, 12, 14, 15, 20, 23, 28, 29, 30, 32, 33, 34, 35, 37, 40, 45, 47, 48, 50, 52, 53, 55, 60, 64, 87, 88, 89, 683, 794, 799, 803, 805, 823, 824, 825, 827, 828, 865, 866, 904, 907, 909, 920, 921, 930, 976, 998, 1070, 1071, 1086, 1107, 1109, 1152, 1194, 1204, 1227, 1237, 1275, 1287, 1299, 1337, 1348, 1350, 1352, 1360, 1440, 1463, 1500, 1527, 1530, 1531, 1532, 1536, 1542, 1544, 1571, 1573, 1575, 1650, 1685, 1716, 1717, 1722, 1723, 1731, 1739, 1743, 1745, 1786, 1797, 1798, 1824, 1834, 1859, 1871, 1928, 1929, 1944, 1954, 1956, 1978, 2013, 2015, 2017, 2045, 2046, 2048, 2049, 2052, 2057, 2071, 2072, 2077, 2078, 2082, 2083, 2086, 2087, 2089, 2092, 2093, 2100, 2101, 2102, 2104, 2106], "static": [1, 3, 8, 14, 29, 33, 35, 37, 47, 52, 53, 55, 66, 71, 72, 75, 797, 802, 818, 820, 827, 829, 835, 861, 863, 893, 894, 895, 896, 909, 910, 1181, 1182, 1188, 1194, 1212, 1287, 1571, 1717, 1903, 2014, 2015, 2016, 2017, 2046, 2052, 2053, 2056, 2064, 2065, 2073, 2077, 2084, 2098, 2100, 2102], "Then": [1, 29, 33, 48, 64, 932, 1473, 1579, 1734, 1779, 1966, 2043, 2044, 2046, 2048, 2049, 2050, 2053, 2060, 2061, 2067, 2069, 2070, 2077, 2078, 2092, 2098, 2101, 2102, 2105], "op": [1, 4, 24, 28, 30, 33, 52, 53, 55, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 80, 84, 88, 89, 152, 505, 526, 606, 760, 795, 796, 797, 826, 829, 858, 865, 866, 897, 904, 907, 909, 910, 918, 966, 976, 978, 1015, 1030, 1032, 1036, 1037, 1051, 1052, 1078, 1082, 1084, 1167, 1224, 1274, 1287, 1289, 1290, 1345, 1351, 1400, 1404, 1409, 1410, 1544, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1815, 1873, 1952, 1984, 1987, 1989, 1991, 1992, 2004, 2007, 2008, 2013, 2017, 2023, 2024, 2027, 2030, 2034, 2035, 2045, 2046, 2048, 2049, 2059, 2065, 2070, 2071, 2072, 2073, 2074, 2075, 2082, 2084, 2086, 2088, 2093, 2098, 2099, 2103, 2104, 2105, 2106, 2108, 2111, 2112, 2113], "directli": [1, 3, 7, 9, 14, 15, 23, 24, 28, 30, 33, 34, 35, 36, 37, 46, 52, 53, 55, 59, 64, 66, 79, 559, 735, 750, 894, 904, 907, 909, 978, 1092, 1160, 1167, 1174, 1176, 1184, 1217, 1304, 1533, 1561, 1580, 1644, 1691, 1759, 1950, 2014, 2016, 2017, 2021, 2025, 2030, 2036, 2043, 2046, 2048, 2049, 2050, 2052, 2054, 2056, 2057, 2058, 2059, 2062, 2070, 2071, 2072, 2077, 2082, 2087, 2089, 2095, 2100, 2102, 2103, 2104, 2106, 2107, 2112, 2113], "ctx": [1, 45, 66, 70, 893, 894, 895, 904, 905, 906, 907, 908, 909, 910, 2021, 2042, 2049, 2050, 2067], "gradcheck": [1, 1902, 2013, 2021, 2049, 2082], "extend": [1, 23, 28, 30, 33, 35, 39, 50, 64, 865, 894, 896, 904, 907, 909, 910, 1529, 1538, 1902, 1966, 2013, 2036, 2043, 2056, 2058, 2059, 2067, 2068, 2070, 2075, 2082, 2100, 2101, 2114], "staticmethod": [1, 66, 894, 896, 904, 905, 906, 907, 908, 909, 910, 2017, 2042, 2049, 2067, 2077], "result": [1, 3, 4, 5, 7, 8, 9, 12, 14, 18, 19, 23, 24, 28, 30, 33, 35, 37, 40, 45, 48, 52, 53, 55, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 86, 87, 89, 155, 223, 224, 256, 257, 317, 337, 400, 498, 500, 501, 558, 585, 586, 587, 589, 590, 620, 689, 690, 691, 692, 693, 696, 698, 699, 700, 702, 763, 796, 881, 883, 902, 903, 912, 913, 914, 915, 916, 917, 923, 924, 932, 944, 947, 958, 962, 967, 973, 975, 976, 978, 979, 990, 994, 996, 1021, 1023, 1025, 1051, 1052, 1065, 1088, 1089, 1090, 1091, 1092, 1104, 1109, 1127, 1130, 1136, 1145, 1152, 1154, 1157, 1162, 1163, 1166, 1167, 1168, 1171, 1172, 1177, 1178, 1182, 1186, 1197, 1202, 1210, 1213, 1217, 1235, 1236, 1238, 1250, 1264, 1273, 1276, 1277, 1285, 1289, 1290, 1292, 1294, 1295, 1299, 1313, 1315, 1317, 1318, 1326, 1330, 1332, 1334, 1335, 1336, 1343, 1353, 1354, 1361, 1371, 1373, 1374, 1375, 1376, 1378, 1379, 1418, 1421, 1436, 1437, 1438, 1463, 1465, 1466, 1467, 1471, 1473, 1478, 1492, 1497, 1527, 1533, 1543, 1571, 1572, 1573, 1574, 1575, 1579, 1615, 1633, 1644, 1700, 1703, 1704, 1717, 1731, 1734, 1767, 1770, 1771, 1772, 1773, 1779, 1782, 1809, 1815, 1817, 1824, 1825, 1828, 1834, 1847, 1853, 1868, 1883, 1895, 1906, 1908, 1909, 1913, 1920, 1922, 1923, 1924, 1927, 1928, 1929, 1939, 1943, 1944, 1949, 1950, 1952, 1953, 1955, 1965, 1968, 1972, 1973, 1977, 2014, 2015, 2016, 2017, 2021, 2024, 2025, 2027, 2034, 2035, 2036, 2041, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2051, 2054, 2055, 2057, 2059, 2060, 2061, 2062, 2064, 2065, 2067, 2069, 2071, 2072, 2073, 2074, 2077, 2078, 2081, 2082, 2085, 2087, 2088, 2089, 2093, 2094, 2095, 2099, 2100, 2101, 2103, 2107, 2110, 2111, 2112, 2113, 2114], "save_for_backward": [1, 894, 904, 906, 908, 909, 910, 2021, 2042, 2043, 2049, 2050, 2067], "grad_output": [1, 66, 893, 904, 905, 909, 910, 918, 924, 928, 929, 1273, 1527, 2015, 2043, 2046, 2049, 2050, 2057, 2108], "saved_tensor": [1, 904, 906, 907, 908, 909, 910, 2021, 2042, 2043, 2049, 2050], "inspect": [1, 28, 52, 55, 64, 935, 1273, 1285, 2042, 2049, 2056, 2070, 2073, 2085, 2104, 2106, 2107, 2114], "cost": [1, 3, 4, 8, 9, 24, 30, 33, 55, 488, 966, 976, 1329, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 1867, 2046, 2052, 2061, 2073, 2078, 2085, 2109, 2113], "both": [1, 2, 3, 14, 19, 23, 24, 25, 28, 29, 30, 34, 35, 40, 45, 46, 47, 48, 52, 53, 55, 63, 64, 66, 74, 75, 99, 341, 501, 732, 771, 772, 775, 776, 777, 796, 855, 896, 903, 909, 910, 913, 914, 915, 916, 917, 944, 982, 998, 1032, 1065, 1104, 1110, 1111, 1112, 1125, 1155, 1156, 1157, 1166, 1215, 1217, 1227, 1234, 1250, 1263, 1264, 1270, 1273, 1287, 1295, 1296, 1344, 1360, 1368, 1374, 1389, 1390, 1436, 1437, 1441, 1442, 1443, 1451, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1481, 1487, 1489, 1490, 1491, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1520, 1521, 1522, 1527, 1533, 1567, 1575, 1579, 1580, 1583, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1658, 1659, 1660, 1685, 1717, 1726, 1728, 1737, 1767, 1784, 1785, 1797, 1809, 1832, 1833, 1871, 1876, 1901, 1903, 1905, 1924, 1928, 1949, 1950, 1989, 2014, 2016, 2017, 2021, 2024, 2034, 2035, 2037, 2042, 2043, 2044, 2049, 2050, 2052, 2053, 2054, 2057, 2060, 2061, 2062, 2067, 2070, 2072, 2075, 2076, 2077, 2079, 2082, 2083, 2084, 2089, 2092, 2093, 2094, 2098, 2100, 2102, 2103, 2104, 2105, 2106, 2110, 2113], "cpu": [1, 4, 5, 14, 17, 23, 28, 30, 53, 55, 66, 71, 75, 76, 77, 90, 121, 198, 291, 328, 337, 447, 448, 449, 450, 451, 460, 488, 582, 591, 869, 884, 937, 939, 945, 954, 996, 999, 1000, 1011, 1021, 1022, 1023, 1025, 1110, 1112, 1122, 1127, 1145, 1157, 1161, 1163, 1164, 1222, 1224, 1225, 1226, 1231, 1232, 1249, 1273, 1281, 1283, 1284, 1293, 1303, 1305, 1309, 1310, 1311, 1312, 1314, 1316, 1319, 1321, 1322, 1328, 1331, 1334, 1337, 1338, 1344, 1345, 1360, 1363, 1374, 1386, 1463, 1469, 1527, 1700, 1717, 1722, 1723, 1758, 1759, 1773, 1776, 1783, 1827, 1836, 1838, 1840, 1842, 1843, 1867, 1872, 1873, 1874, 1876, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1895, 1905, 1910, 1911, 1912, 1913, 1914, 1915, 1928, 1943, 1944, 1945, 1954, 1956, 1961, 1965, 1982, 2010, 2013, 2014, 2015, 2017, 2021, 2025, 2027, 2033, 2034, 2036, 2046, 2049, 2050, 2052, 2055, 2057, 2060, 2061, 2063, 2067, 2068, 2071, 2073, 2076, 2077, 2082, 2084, 2085, 2088, 2089, 2091, 2094, 2095, 2098, 2102, 2103, 2104, 2106, 2107, 2110, 2111, 2113], "There": [1, 5, 6, 7, 9, 12, 14, 19, 22, 28, 30, 47, 52, 53, 55, 56, 60, 61, 64, 86, 87, 89, 683, 737, 796, 894, 909, 910, 976, 1167, 1463, 1497, 1543, 1685, 1717, 1724, 1725, 1765, 1770, 2012, 2014, 2016, 2017, 2023, 2024, 2035, 2043, 2046, 2049, 2050, 2051, 2056, 2059, 2063, 2064, 2067, 2070, 2071, 2072, 2073, 2077, 2079, 2088, 2091, 2098, 2100, 2101, 2103, 2104, 2111, 2112, 2113], "moment": [1, 66, 72, 775, 776, 777, 994, 1784, 1785, 1786, 1788, 1794, 1798, 2027, 2033, 2036, 2071, 2077, 2110], "nvprof": [1, 4, 933, 2046], "regist": [1, 15, 19, 24, 28, 32, 35, 47, 50, 52, 53, 55, 64, 82, 84, 85, 86, 87, 89, 489, 490, 683, 795, 796, 858, 928, 929, 976, 1054, 1188, 1273, 1345, 1527, 1528, 1529, 1537, 1538, 1556, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1718, 1731, 1732, 1734, 1735, 1737, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1966, 1968, 2021, 2030, 2033, 2046, 2048, 2049, 2056, 2057, 2062, 2065, 2067, 2070, 2077, 2094, 2101, 2103, 2106, 2109], "activ": [1, 5, 7, 9, 33, 34, 50, 52, 55, 61, 81, 83, 795, 797, 799, 806, 810, 845, 846, 855, 865, 866, 1019, 1034, 1047, 1065, 1166, 1168, 1178, 1346, 1435, 1465, 1466, 1467, 1471, 1481, 1499, 1526, 1528, 1546, 1557, 1571, 1573, 1575, 1626, 1664, 1688, 1717, 1735, 1737, 1767, 1772, 1977, 2013, 2027, 2033, 2046, 2049, 2050, 2052, 2057, 2067, 2069, 2071, 2072, 2073, 2074, 2077, 2082, 2092, 2093, 2098, 2103, 2108, 2113], "emit_nvtx": [1, 4], "vtune": [1, 4], "emit_itt": [1, 4], "use_cuda": [1, 2071], "use_devic": 1, "record_shap": [1, 2071], "with_flop": [1, 2071], "profile_memori": [1, 2071], "with_stack": [1, 2071], "with_modul": [1, 2071], "use_kineto": 1, "use_cpu": 1, "use_mtia": 1, "experimental_config": [1, 2071], "hold": [1, 28, 30, 45, 47, 50, 51, 52, 55, 59, 63, 64, 539, 1346, 1473, 1528, 1529, 1537, 1538, 1579, 1707, 1717, 1719, 1720, 1734, 1744, 1758, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1910, 1911, 1912, 1913, 1914, 1915, 2044, 2046, 2049, 2051, 2056, 2059, 2069, 2071, 2077, 2078, 2079, 2081, 2082, 2084, 2085, 2087, 2088, 2100, 2101, 2104], "summari": [1, 3, 48, 1066, 1875, 2013, 2030, 2052, 2087, 2093, 2101, 2107, 2113, 2115], "hood": [1, 52, 63, 2033, 2043, 2046, 2048, 2059, 2070, 2079, 2102, 2103], "just": [1, 2, 7, 14, 24, 28, 35, 37, 45, 52, 64, 591, 690, 700, 793, 826, 829, 893, 895, 909, 910, 932, 936, 991, 992, 1186, 1188, 1196, 1289, 1290, 1328, 1331, 1336, 1337, 1363, 1465, 1466, 1467, 1471, 1717, 1737, 1738, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1867, 1953, 1954, 1955, 1956, 1966, 2012, 2014, 2033, 2042, 2043, 2049, 2050, 2054, 2056, 2057, 2058, 2062, 2070, 2077, 2078, 2082, 2085, 2086, 2088, 2093, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2112], "record": [1, 5, 19, 20, 32, 38, 39, 40, 41, 45, 48, 52, 63, 64, 447, 448, 449, 450, 451, 488, 498, 822, 823, 824, 825, 828, 830, 869, 942, 945, 954, 1011, 1012, 1014, 1110, 1111, 1112, 1122, 1127, 1145, 1163, 1164, 1165, 1197, 1231, 1232, 1273, 1289, 1293, 1344, 1360, 1386, 1389, 1390, 1463, 1527, 1776, 1777, 1779, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 1982, 1983, 2010, 2011, 2014, 2016, 2025, 2036, 2042, 2043, 2046, 2049, 2052, 2064, 2065, 2067, 2071, 2077, 2087, 2088, 2089, 2091, 2092, 2093, 2098, 2100, 2101, 2104, 2113, 2115, 2117], "event": [1, 31, 35, 37, 46, 50, 63, 488, 936, 937, 938, 1012, 1014, 1065, 1068, 1289, 1290, 1351, 1389, 1390, 1685, 1983, 2013, 2030, 2033, 2046, 2071, 2087, 2111, 2115, 2117], "being": [1, 3, 5, 9, 11, 12, 19, 20, 23, 28, 30, 32, 33, 35, 37, 40, 47, 48, 50, 52, 53, 55, 59, 60, 63, 64, 66, 75, 83, 99, 152, 404, 417, 763, 796, 802, 896, 909, 910, 912, 918, 974, 978, 990, 1012, 1020, 1067, 1072, 1086, 1087, 1122, 1155, 1156, 1167, 1176, 1177, 1187, 1188, 1262, 1273, 1277, 1363, 1372, 1377, 1436, 1437, 1438, 1439, 1440, 1460, 1462, 1469, 1470, 1474, 1475, 1480, 1486, 1493, 1518, 1519, 1527, 1530, 1531, 1532, 1533, 1534, 1541, 1559, 1560, 1576, 1598, 1605, 1606, 1616, 1628, 1629, 1633, 1645, 1669, 1677, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1751, 1752, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1827, 1828, 1922, 1923, 1924, 1972, 1973, 2014, 2017, 2024, 2029, 2030, 2034, 2035, 2036, 2042, 2043, 2046, 2049, 2050, 2051, 2052, 2054, 2057, 2059, 2060, 2061, 2065, 2067, 2070, 2072, 2075, 2077, 2078, 2082, 2089, 2092, 2095, 2098, 2100, 2101, 2103, 2104, 2111, 2113, 2114, 2117], "those": [1, 2, 4, 5, 14, 23, 24, 28, 30, 32, 33, 34, 35, 52, 55, 60, 63, 64, 83, 683, 827, 923, 1023, 1033, 1097, 1099, 1178, 1188, 1201, 1289, 1331, 1337, 1345, 1385, 1431, 1439, 1440, 1460, 1462, 1470, 1486, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1580, 1605, 1606, 1616, 1625, 1645, 1669, 1677, 1712, 1713, 1779, 1798, 1928, 1950, 1977, 1990, 2016, 2034, 2035, 2036, 2042, 2043, 2046, 2049, 2050, 2052, 2055, 2057, 2061, 2062, 2065, 2067, 2069, 2075, 2077, 2079, 2098, 2101, 2102, 2103, 2104, 2106, 2107, 2110, 2113], "python": [1, 3, 4, 9, 12, 14, 15, 18, 19, 23, 32, 34, 35, 36, 37, 40, 41, 44, 45, 47, 48, 49, 53, 55, 64, 66, 74, 75, 354, 591, 627, 683, 784, 884, 912, 913, 914, 915, 916, 917, 919, 923, 924, 962, 963, 975, 976, 978, 990, 1045, 1051, 1052, 1054, 1104, 1109, 1127, 1129, 1157, 1163, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1249, 1271, 1273, 1274, 1276, 1277, 1278, 1284, 1285, 1289, 1345, 1528, 1529, 1537, 1538, 1685, 1717, 1821, 1843, 1847, 1867, 1868, 1903, 1968, 1969, 1977, 1996, 2012, 2023, 2025, 2031, 2032, 2033, 2035, 2043, 2044, 2045, 2046, 2048, 2050, 2051, 2053, 2056, 2059, 2062, 2063, 2064, 2065, 2070, 2072, 2076, 2077, 2078, 2079, 2085, 2088, 2089, 2094, 2095, 2097, 2099, 2100, 2102, 2103, 2104, 2105, 2106, 2107, 2109, 2110, 2113, 2114, 2115], "report": [1, 3, 4, 18, 28, 40, 48, 63, 64, 86, 87, 89, 1011, 1065, 1170, 1171, 1173, 1186, 1982, 2042, 2046, 2082, 2104, 2107, 2110, 2113, 2115], "runtim": [1, 3, 5, 14, 17, 19, 20, 28, 30, 33, 34, 37, 52, 53, 64, 66, 76, 77, 830, 882, 936, 1107, 1109, 1188, 1198, 1204, 1212, 1237, 1273, 1279, 1289, 1527, 1719, 1720, 1780, 1823, 1965, 1978, 2017, 2034, 2035, 2043, 2046, 2049, 2059, 2064, 2065, 2066, 2067, 2072, 2094, 2100, 2101, 2104, 2109, 2116], "note": [1, 2, 3, 5, 8, 11, 12, 14, 15, 18, 22, 23, 24, 28, 30, 32, 33, 34, 35, 36, 37, 45, 47, 50, 52, 53, 55, 59, 61, 63, 64, 66, 74, 75, 86, 87, 88, 89, 152, 260, 379, 488, 490, 501, 502, 515, 517, 519, 738, 744, 745, 746, 794, 796, 797, 798, 802, 845, 865, 869, 888, 897, 904, 907, 909, 912, 913, 914, 915, 916, 917, 918, 919, 924, 930, 932, 939, 960, 966, 976, 983, 990, 1008, 1084, 1092, 1097, 1109, 1130, 1160, 1163, 1178, 1184, 1186, 1214, 1227, 1260, 1270, 1271, 1273, 1274, 1280, 1346, 1368, 1389, 1390, 1410, 1436, 1437, 1438, 1439, 1440, 1446, 1457, 1458, 1459, 1460, 1462, 1469, 1470, 1474, 1478, 1480, 1486, 1492, 1493, 1497, 1518, 1519, 1527, 1528, 1530, 1531, 1532, 1533, 1534, 1537, 1538, 1541, 1543, 1550, 1559, 1560, 1567, 1571, 1576, 1580, 1588, 1605, 1606, 1616, 1624, 1625, 1633, 1644, 1645, 1669, 1672, 1677, 1678, 1692, 1707, 1717, 1718, 1724, 1725, 1772, 1778, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1801, 1803, 1809, 1811, 1827, 1834, 1909, 1910, 1911, 1912, 1914, 1915, 1920, 1924, 1928, 1950, 1965, 1966, 1968, 1977, 2012, 2014, 2015, 2018, 2024, 2029, 2033, 2036, 2037, 2043, 2044, 2045, 2048, 2049, 2050, 2054, 2055, 2056, 2057, 2059, 2060, 2061, 2062, 2065, 2067, 2069, 2070, 2071, 2073, 2075, 2076, 2078, 2079, 2082, 2084, 2086, 2087, 2090, 2095, 2100, 2101, 2102, 2105, 2111, 2112, 2113, 2116, 2118], "propag": [1, 5, 30, 31, 33, 35, 37, 47, 64, 66, 75, 515, 689, 692, 693, 694, 698, 699, 700, 701, 769, 770, 771, 772, 782, 785, 786, 788, 789, 790, 841, 843, 944, 1155, 1156, 1186, 1197, 1210, 1418, 1724, 1725, 1909, 2013, 2033, 2034, 2042, 2046, 2049, 2054, 2056, 2077, 2088, 2092, 2093, 2100, 2103, 2104, 2112], "async": [1, 28, 29, 30, 63, 605, 1717, 2018, 2046, 2056, 2084, 2101], "task": [1, 3, 7, 24, 56, 61, 1276, 1292, 1489, 1490, 1491, 2017, 2045, 2056, 2057, 2059, 2063, 2101, 2104, 2116], "cuda": [1, 3, 4, 5, 14, 16, 19, 22, 23, 24, 28, 30, 32, 34, 55, 62, 90, 152, 291, 315, 323, 337, 342, 488, 517, 519, 526, 582, 869, 881, 883, 897, 918, 939, 945, 947, 954, 976, 981, 1009, 1010, 1011, 1012, 1014, 1015, 1030, 1032, 1043, 1110, 1111, 1112, 1122, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1161, 1164, 1222, 1231, 1232, 1273, 1293, 1295, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1314, 1315, 1316, 1317, 1319, 1320, 1321, 1322, 1328, 1331, 1334, 1335, 1337, 1338, 1344, 1345, 1360, 1363, 1379, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1469, 1497, 1527, 1543, 1567, 1587, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1625, 1633, 1644, 1672, 1685, 1704, 1705, 1706, 1707, 1717, 1719, 1720, 1722, 1723, 1724, 1725, 1771, 1773, 1776, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1827, 1836, 1838, 1840, 1842, 1843, 1867, 1871, 1876, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1906, 1910, 1911, 1912, 1913, 1914, 1915, 1928, 1943, 1945, 1954, 1956, 1961, 1965, 2010, 2013, 2015, 2021, 2025, 2034, 2036, 2042, 2049, 2050, 2052, 2053, 2057, 2060, 2067, 2068, 2069, 2071, 2076, 2077, 2082, 2084, 2085, 2088, 2089, 2091, 2094, 2095, 2102, 2103, 2106, 2110, 2113, 2116, 2117], "cudaev": 1, "approxim": [1, 3, 24, 32, 47, 64, 1174, 1227, 1337, 1346, 1431, 1476, 1541, 1557, 1564, 1631, 1677, 1688, 1732, 1781, 1798, 1817, 1871, 1929, 1950, 2015, 2017, 2046, 2048, 2049, 2054, 2067, 2108, 2118], "4u": 1, "xpu": [1, 1222, 1273, 1527, 1982, 1983, 1984, 1987, 1989, 2013, 2021, 2042, 2053, 2071, 2085, 2106], "privateuseon": 1, "shape": [1, 5, 8, 11, 12, 19, 24, 28, 33, 34, 35, 53, 55, 57, 59, 61, 64, 65, 66, 67, 68, 71, 72, 74, 78, 99, 152, 173, 193, 210, 220, 402, 404, 447, 449, 451, 473, 499, 500, 515, 517, 519, 546, 568, 619, 683, 688, 690, 691, 700, 738, 748, 749, 759, 761, 763, 767, 769, 770, 771, 772, 775, 776, 777, 784, 799, 882, 888, 904, 906, 909, 913, 915, 916, 917, 936, 946, 947, 949, 952, 957, 959, 963, 964, 968, 969, 976, 990, 993, 1024, 1099, 1100, 1101, 1103, 1104, 1109, 1110, 1112, 1114, 1129, 1130, 1131, 1132, 1140, 1141, 1142, 1148, 1154, 1155, 1156, 1157, 1164, 1171, 1172, 1176, 1177, 1178, 1188, 1189, 1204, 1210, 1214, 1216, 1230, 1235, 1236, 1240, 1248, 1250, 1264, 1270, 1287, 1289, 1290, 1293, 1298, 1299, 1303, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1362, 1363, 1367, 1375, 1380, 1381, 1412, 1413, 1424, 1427, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1588, 1589, 1590, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1600, 1601, 1602, 1604, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1624, 1625, 1629, 1633, 1635, 1644, 1645, 1650, 1658, 1659, 1660, 1670, 1671, 1674, 1675, 1676, 1678, 1685, 1704, 1707, 1716, 1717, 1719, 1720, 1731, 1734, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1773, 1776, 1778, 1780, 1815, 1824, 1828, 1832, 1833, 1836, 1838, 1839, 1840, 1847, 1849, 1850, 1854, 1863, 1866, 1890, 1905, 1906, 1908, 1911, 1913, 1919, 1924, 1925, 1928, 1939, 1946, 1950, 1960, 1961, 1962, 1963, 1977, 1980, 2010, 2014, 2015, 2017, 2021, 2034, 2035, 2036, 2043, 2044, 2046, 2049, 2050, 2051, 2065, 2068, 2071, 2074, 2077, 2082, 2083, 2087, 2088, 2089, 2091, 2094, 2102, 2104, 2108, 2111, 2112, 2113], "about": [1, 8, 9, 12, 17, 23, 28, 29, 30, 32, 33, 37, 40, 44, 46, 47, 48, 52, 56, 60, 64, 88, 257, 488, 500, 620, 923, 924, 930, 981, 990, 1016, 1017, 1033, 1057, 1059, 1060, 1061, 1063, 1064, 1065, 1066, 1073, 1074, 1075, 1167, 1187, 1188, 1198, 1210, 1303, 1310, 1470, 1542, 1716, 1758, 1794, 1798, 1809, 1870, 1996, 2012, 2014, 2016, 2017, 2021, 2024, 2045, 2046, 2048, 2049, 2051, 2052, 2056, 2059, 2060, 2062, 2064, 2070, 2071, 2072, 2075, 2078, 2079, 2082, 2086, 2088, 2099, 2100, 2101, 2102, 2104, 2109, 2113], "dimens": [1, 11, 23, 28, 34, 35, 52, 55, 60, 61, 64, 66, 71, 75, 78, 86, 220, 234, 235, 256, 262, 315, 317, 319, 323, 435, 436, 474, 475, 476, 495, 515, 517, 519, 539, 545, 547, 548, 562, 585, 586, 587, 589, 590, 609, 619, 689, 695, 696, 698, 699, 700, 702, 738, 761, 782, 784, 788, 878, 879, 880, 881, 890, 891, 892, 896, 909, 910, 918, 955, 963, 966, 967, 968, 969, 970, 1008, 1023, 1025, 1088, 1089, 1090, 1091, 1092, 1097, 1098, 1099, 1100, 1101, 1107, 1109, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1148, 1178, 1181, 1182, 1184, 1187, 1192, 1198, 1214, 1227, 1236, 1237, 1248, 1250, 1270, 1294, 1295, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1353, 1354, 1361, 1364, 1368, 1371, 1373, 1374, 1375, 1376, 1379, 1381, 1396, 1418, 1419, 1420, 1421, 1422, 1423, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1448, 1449, 1450, 1455, 1456, 1458, 1459, 1460, 1461, 1462, 1463, 1466, 1468, 1469, 1470, 1472, 1473, 1476, 1477, 1480, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1495, 1496, 1497, 1499, 1507, 1508, 1513, 1514, 1515, 1516, 1517, 1518, 1521, 1522, 1526, 1533, 1534, 1535, 1536, 1539, 1540, 1541, 1542, 1546, 1547, 1548, 1550, 1555, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1573, 1575, 1576, 1577, 1578, 1579, 1598, 1604, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1632, 1633, 1634, 1635, 1644, 1647, 1650, 1651, 1652, 1670, 1671, 1672, 1685, 1691, 1692, 1704, 1716, 1724, 1725, 1731, 1732, 1733, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1752, 1753, 1759, 1760, 1762, 1764, 1766, 1769, 1771, 1772, 1815, 1818, 1825, 1827, 1828, 1829, 1834, 1848, 1849, 1850, 1854, 1863, 1865, 1866, 1875, 1896, 1900, 1904, 1907, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1919, 1921, 1922, 1923, 1924, 1927, 1928, 1938, 1940, 1944, 1945, 1946, 1947, 1949, 1950, 1952, 1953, 1954, 1955, 1956, 1959, 1960, 1961, 1962, 1963, 1964, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 2013, 2015, 2016, 2017, 2036, 2041, 2044, 2046, 2050, 2051, 2054, 2060, 2065, 2072, 2081, 2082, 2083, 2085, 2087, 2088, 2095, 2100, 2101, 2108], "collect": [1, 3, 7, 23, 29, 30, 32, 37, 47, 55, 63, 64, 802, 803, 827, 865, 866, 964, 1047, 1110, 1236, 1375, 1717, 1748, 1776, 1836, 1840, 2010, 2013, 2015, 2016, 2017, 2021, 2030, 2046, 2048, 2050, 2069, 2071, 2072, 2075, 2079, 2082, 2087, 2089, 2104, 2110, 2112, 2113, 2115, 2117], "further": [1, 4, 9, 12, 14, 19, 24, 28, 30, 55, 63, 66, 76, 77, 488, 1188, 1217, 1277, 1313, 1431, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1717, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1815, 2017, 2024, 2052, 2057, 2059, 2070, 2071, 2079, 2087, 2093, 2097, 2100, 2108, 2111, 2113], "group": [1, 3, 9, 23, 24, 29, 30, 32, 33, 37, 44, 47, 48, 50, 51, 55, 58, 64, 612, 683, 715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 775, 776, 777, 884, 936, 1447, 1454, 1455, 1456, 1457, 1458, 1459, 1481, 1503, 1504, 1505, 1506, 1507, 1508, 1567, 1608, 1609, 1610, 1611, 1612, 1613, 1634, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1962, 2012, 2013, 2015, 2017, 2033, 2041, 2046, 2048, 2052, 2067, 2069, 2070, 2071, 2077, 2087, 2099, 2101, 2104, 2108, 2113], "prof": [1, 44, 939, 2071, 2104, 2111, 2113], "key_averag": [1, 939, 2071], "group_by_input_shap": [1, 936, 2071], "skew": [1, 3, 4, 1325, 1731], "neglig": [1, 1286, 1871], "bottom": [1, 55, 1437, 1633, 2024, 2111], "But": [1, 7, 55, 63, 488, 1289, 1319, 1717, 2043, 2049, 2051, 2054, 2061, 2070, 2082, 2086, 2103, 2104, 2107, 2114], "total": [1, 3, 4, 7, 20, 23, 24, 28, 33, 44, 46, 47, 48, 55, 881, 937, 939, 1025, 1060, 1065, 1079, 1163, 1235, 1236, 1384, 1446, 1473, 1486, 1493, 1518, 1533, 1579, 1671, 1717, 1722, 1771, 1773, 1775, 1803, 1809, 1849, 1875, 1924, 2012, 2016, 2030, 2046, 2052, 2055, 2059, 2069, 2082], "artifici": [1, 2082], "increas": [1, 3, 7, 24, 28, 35, 44, 55, 683, 858, 904, 906, 909, 960, 1033, 1065, 1109, 1235, 1236, 1431, 1440, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1539, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1675, 1676, 1685, 1796, 1802, 1803, 1807, 1809, 1811, 1863, 1871, 1950, 1971, 1990, 2015, 2023, 2031, 2045, 2046, 2055, 2059, 2073, 2082, 2098, 2104, 2107], "estim": [1, 3, 23, 35, 994, 998, 1227, 1270, 1441, 1442, 1443, 1476, 1480, 1481, 1489, 1490, 1491, 1499, 1567, 1631, 1732, 1795, 1891, 1929, 2071], "flop": [1, 2071], "hardwar": [1, 8, 863, 865, 866, 1309, 1310, 1337, 1965, 2046, 2059, 2061, 2073, 2082, 2104, 2106, 2113], "matrix": [1, 2, 24, 28, 35, 193, 210, 689, 692, 693, 694, 763, 944, 946, 955, 956, 964, 966, 967, 968, 969, 976, 994, 998, 1092, 1096, 1097, 1099, 1109, 1178, 1217, 1223, 1238, 1294, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1339, 1341, 1343, 1346, 1354, 1363, 1365, 1368, 1378, 1413, 1415, 1469, 1470, 1478, 1497, 1579, 1588, 1589, 1590, 1624, 1625, 1685, 1731, 1737, 1766, 1772, 1815, 1816, 1817, 1827, 1871, 1898, 1901, 1905, 1906, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1920, 1928, 1929, 1945, 1948, 1950, 1952, 1953, 1954, 1955, 1956, 1971, 1977, 2013, 2017, 2034, 2035, 2036, 2041, 2045, 2046, 2049, 2054, 2057, 2060, 2071, 2082, 2087, 2088, 2104], "2d": [1, 30, 35, 55, 589, 590, 703, 706, 709, 738, 742, 745, 769, 771, 776, 782, 786, 788, 789, 790, 994, 998, 1097, 1109, 1329, 1330, 1429, 1433, 1437, 1440, 1441, 1442, 1455, 1458, 1462, 1466, 1470, 1474, 1489, 1490, 1495, 1521, 1530, 1532, 1533, 1534, 1580, 1581, 1582, 1593, 1596, 1598, 1601, 1606, 1609, 1612, 1620, 1625, 1628, 1655, 1659, 1669, 1672, 1732, 1766, 1833, 1908, 1913, 1952, 2027, 2041, 2049, 2071, 2072, 2082], "alloc": [1, 4, 11, 18, 20, 24, 30, 33, 35, 37, 47, 55, 256, 333, 447, 448, 449, 450, 451, 488, 1010, 1012, 1014, 1016, 1017, 1019, 1032, 1033, 1034, 1057, 1059, 1061, 1063, 1064, 1065, 1066, 1074, 1075, 1079, 1110, 1112, 1161, 1182, 1188, 1191, 1194, 1197, 1198, 1219, 1382, 1384, 1385, 1393, 1836, 1840, 1842, 1867, 1943, 1983, 1989, 1990, 2013, 2033, 2036, 2043, 2048, 2052, 2055, 2059, 2065, 2071, 2085, 2097, 2098, 2100, 2103, 2111, 2113], "dealloc": [1, 64, 488, 1163, 1186, 2033, 2046, 2051, 2055, 2071], "line": [1, 4, 18, 19, 28, 37, 52, 53, 64, 683, 919, 936, 1109, 1167, 1273, 1284, 1527, 1598, 1633, 1875, 1903, 2014, 2017, 2018, 2025, 2036, 2044, 2049, 2054, 2055, 2061, 2063, 2067, 2071, 2082, 2097, 2101, 2102, 2104, 2106, 2107, 2110, 2113, 2115], "hierarchi": [1, 30, 33, 52, 64, 843, 1186, 1277, 1784, 1785, 1797, 2018, 2029, 2049, 2071, 2072, 2100], "callstack": [1, 28, 2071], "A": [1, 2, 3, 5, 7, 8, 9, 12, 14, 18, 23, 24, 27, 28, 29, 30, 32, 33, 34, 35, 37, 44, 47, 48, 50, 52, 53, 55, 56, 61, 63, 66, 67, 68, 71, 87, 90, 562, 582, 585, 586, 587, 596, 627, 683, 695, 700, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 748, 749, 759, 764, 765, 766, 767, 768, 793, 795, 817, 862, 863, 865, 866, 938, 945, 954, 955, 957, 962, 967, 968, 969, 975, 976, 978, 981, 982, 985, 990, 994, 998, 1011, 1012, 1014, 1022, 1024, 1025, 1094, 1109, 1114, 1122, 1123, 1124, 1136, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1177, 1178, 1184, 1188, 1190, 1216, 1227, 1230, 1231, 1232, 1236, 1263, 1264, 1265, 1266, 1269, 1271, 1273, 1276, 1281, 1284, 1286, 1289, 1290, 1294, 1298, 1303, 1304, 1305, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1343, 1346, 1354, 1362, 1363, 1364, 1365, 1369, 1424, 1440, 1444, 1446, 1462, 1465, 1466, 1467, 1469, 1470, 1471, 1479, 1480, 1488, 1498, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1517, 1526, 1527, 1533, 1556, 1561, 1563, 1571, 1576, 1577, 1587, 1591, 1598, 1604, 1616, 1635, 1645, 1652, 1664, 1685, 1691, 1692, 1707, 1718, 1719, 1720, 1726, 1728, 1731, 1734, 1737, 1759, 1760, 1763, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1792, 1794, 1795, 1796, 1797, 1798, 1802, 1803, 1805, 1808, 1809, 1811, 1817, 1827, 1829, 1830, 1831, 1832, 1833, 1844, 1850, 1856, 1859, 1891, 1900, 1903, 1904, 1907, 1919, 1923, 1924, 1928, 1929, 1947, 1952, 1953, 1954, 1955, 1956, 1960, 1961, 1962, 1964, 1965, 1973, 1977, 1980, 1982, 1983, 2012, 2014, 2015, 2016, 2017, 2021, 2024, 2027, 2034, 2035, 2036, 2041, 2042, 2045, 2046, 2049, 2050, 2051, 2052, 2059, 2060, 2062, 2067, 2069, 2070, 2071, 2072, 2077, 2078, 2079, 2082, 2083, 2084, 2085, 2087, 2088, 2093, 2098, 2099, 2102, 2103, 2104, 2106, 2108, 2111, 2112, 2113, 2114, 2115, 2118], "aten": [1, 3, 12, 14, 18, 52, 53, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 82, 84, 87, 1051, 1167, 1778, 1779, 1903, 2014, 2015, 2021, 2045, 2049, 2063, 2065, 2068, 2071, 2072, 2082, 2094, 2099, 2105, 2107, 2111, 2113], "torchscript": [1, 3, 8, 32, 52, 53, 64, 619, 1271, 1274, 1276, 1277, 1278, 1280, 1285, 1289, 1291, 1779, 2013, 2018, 2062, 2071, 2077, 2115], "eager": [1, 8, 52, 976, 1084, 1182, 1187, 1271, 1274, 1285, 1410, 1874, 2017, 2021, 2046, 2052, 2053, 2071, 2073, 2075, 2088, 2098, 2104, 2105, 2106, 2110, 2113], "experiment": [1, 2, 24, 28, 30, 32, 33, 34, 52, 53, 55, 64, 66, 69, 74, 75, 78, 235, 912, 914, 918, 976, 985, 1083, 1167, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1304, 1315, 1317, 1318, 1322, 1335, 1717, 1798, 2013, 2014, 2016, 2021, 2025, 2034, 2035, 2046, 2048, 2061, 2063, 2065, 2067, 2070, 2071, 2100, 2103, 2113], "kineto": [1, 932, 2071], "_experimentalconfig": [1, 2071], "librari": [1, 2, 3, 4, 8, 9, 11, 14, 15, 17, 19, 20, 23, 28, 36, 45, 50, 52, 56, 59, 60, 61, 64, 683, 932, 996, 1012, 1035, 1039, 1051, 1160, 1286, 1317, 1363, 1895, 2030, 2045, 2046, 2048, 2049, 2050, 2051, 2056, 2057, 2059, 2060, 2063, 2067, 2070, 2071, 2072, 2077, 2090, 2095, 2101, 2104, 2114, 2116], "100": [1, 19, 23, 28, 32, 35, 64, 66, 74, 75, 80, 302, 741, 742, 745, 746, 932, 1234, 1273, 1285, 1291, 1329, 1344, 1351, 1360, 1375, 1412, 1431, 1439, 1440, 1441, 1442, 1443, 1455, 1456, 1458, 1459, 1461, 1462, 1489, 1490, 1491, 1527, 1534, 1536, 1567, 1576, 1615, 1616, 1669, 1787, 1799, 1800, 1805, 1806, 1807, 1808, 1810, 1812, 1813, 1829, 1884, 1963, 2015, 2017, 2033, 2045, 2046, 2067, 2069, 2073, 2079, 2081, 2082, 2087, 2099, 2102, 2107], "realli": [1, 7, 64, 1186, 1188, 2017, 2043, 2070, 2103], "y": [1, 11, 14, 23, 35, 44, 52, 53, 55, 60, 61, 64, 66, 69, 71, 72, 73, 74, 75, 76, 77, 78, 488, 619, 622, 784, 888, 890, 891, 892, 902, 903, 904, 907, 909, 910, 912, 913, 914, 915, 916, 917, 919, 920, 939, 958, 964, 991, 998, 1051, 1052, 1092, 1103, 1109, 1113, 1166, 1168, 1171, 1172, 1173, 1177, 1178, 1213, 1280, 1285, 1289, 1342, 1351, 1352, 1375, 1439, 1440, 1441, 1442, 1443, 1444, 1460, 1462, 1481, 1486, 1487, 1489, 1490, 1491, 1493, 1499, 1514, 1518, 1519, 1530, 1531, 1532, 1534, 1536, 1542, 1559, 1560, 1567, 1570, 1576, 1577, 1604, 1633, 1650, 1674, 1707, 1716, 1737, 1770, 1831, 1849, 1851, 1852, 1905, 1919, 1946, 1950, 1951, 1977, 1980, 2014, 2015, 2016, 2017, 2021, 2026, 2034, 2035, 2041, 2043, 2044, 2045, 2046, 2050, 2054, 2055, 2058, 2065, 2067, 2070, 2072, 2077, 2079, 2083, 2087, 2091, 2092, 2093, 2098, 2099, 2100, 2101, 2104, 2112, 2113], "column": [1, 3, 24, 28, 34, 193, 262, 586, 589, 939, 974, 994, 998, 1092, 1122, 1150, 1151, 1239, 1309, 1310, 1313, 1329, 1332, 1337, 1341, 1346, 1413, 1536, 1579, 1624, 1625, 1731, 1817, 1827, 1908, 1910, 1911, 1912, 1914, 1915, 1928, 1929, 1950, 1954, 1956, 1971, 2041, 2054, 2082, 2087], "were": [1, 2, 3, 18, 19, 28, 37, 47, 51, 55, 60, 63, 64, 152, 323, 337, 473, 488, 858, 893, 895, 897, 909, 910, 918, 939, 1054, 1167, 1178, 1281, 1345, 1598, 1717, 1722, 1744, 1761, 1863, 1867, 1939, 1946, 1977, 2016, 2024, 2042, 2046, 2049, 2062, 2067, 2070, 2098, 2101, 2103, 2104, 2109, 2112, 2113], "remov": [1, 3, 23, 24, 28, 30, 35, 48, 51, 55, 64, 489, 490, 515, 548, 559, 700, 800, 818, 819, 820, 863, 928, 929, 932, 939, 943, 966, 967, 1054, 1167, 1181, 1218, 1270, 1273, 1277, 1363, 1364, 1368, 1448, 1449, 1450, 1527, 1528, 1537, 1635, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1732, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1751, 1752, 1753, 1754, 1756, 1757, 1767, 1769, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1827, 1843, 1865, 1919, 1928, 1952, 1959, 1961, 2012, 2013, 2027, 2037, 2043, 2049, 2057, 2062, 2067, 2068, 2070, 2079, 2084, 2089, 2101, 2104, 2106, 2109, 2112], "breviti": [1, 64, 939, 998, 2067], "print": [1, 3, 5, 12, 18, 23, 25, 28, 33, 37, 39, 44, 45, 48, 52, 53, 55, 60, 63, 723, 731, 732, 737, 748, 749, 759, 767, 926, 928, 929, 939, 1166, 1167, 1176, 1188, 1273, 1277, 1280, 1281, 1285, 1363, 1444, 1488, 1514, 1527, 1539, 1540, 1615, 1672, 1675, 1676, 1737, 1747, 1748, 1749, 1750, 1753, 1763, 1767, 1779, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 1875, 1969, 2012, 2014, 2015, 2025, 2029, 2043, 2045, 2046, 2049, 2053, 2057, 2058, 2065, 2066, 2067, 2068, 2070, 2071, 2077, 2087, 2088, 2092, 2093, 2098, 2099, 2101, 2102, 2104, 2106, 2113], "tabl": [1, 3, 28, 64, 939, 978, 1469, 1624, 2015, 2017, 2049, 2069, 2070, 2071, 2072, 2077, 2082, 2089, 2103, 2105, 2107, 2110, 2113], "sort_bi": [1, 939, 2071], "self_cpu_time_tot": [1, 939, 2071], "avg": [1, 28, 939, 1782], "mul": [1, 18, 52, 64, 66, 69, 72, 74, 76, 77, 79, 422, 750, 751, 760, 939, 1250, 1414, 2014, 2015, 2017, 2034, 2036, 2049, 2067, 2068, 2074, 2078, 2082, 2085, 2092, 2099, 2100, 2101, 2108, 2112], "32": [1, 2, 24, 28, 52, 90, 749, 1163, 1270, 1283, 1363, 1437, 1465, 1466, 1467, 1471, 1472, 1474, 1475, 1495, 1515, 1521, 1532, 1571, 1572, 1573, 1574, 1575, 1587, 1598, 1628, 1629, 1635, 1685, 2035, 2036, 2057, 2061, 2065, 2067, 2075, 2082, 2085, 2087, 2088, 2101, 2102, 2107], "048m": 1, "200": [1, 32, 1273, 1351, 1527, 1829, 2017, 2082, 2101, 2113], "27": [1, 619, 1341, 1802, 1824, 1971, 2067], "041m": 1, "powbackward0": [1, 939], "9": [1, 7, 23, 24, 28, 36, 64, 66, 71, 315, 317, 319, 323, 403, 404, 473, 515, 562, 583, 585, 700, 748, 749, 858, 960, 966, 970, 974, 998, 1091, 1092, 1107, 1110, 1131, 1132, 1141, 1142, 1152, 1155, 1156, 1159, 1160, 1227, 1237, 1303, 1305, 1326, 1328, 1330, 1341, 1343, 1422, 1423, 1429, 1430, 1433, 1434, 1447, 1449, 1469, 1470, 1523, 1524, 1539, 1540, 1550, 1553, 1624, 1625, 1672, 1675, 1676, 1772, 1773, 1782, 1784, 1785, 1786, 1787, 1788, 1794, 1797, 1798, 1799, 1803, 1809, 1811, 1812, 1816, 1820, 1856, 1863, 1872, 1875, 1885, 1890, 1900, 1908, 1916, 1943, 1944, 1948, 1950, 1959, 1971, 1978, 2014, 2018, 2024, 2051, 2053, 2057, 2062, 2063, 2067, 2068, 2069, 2070, 2077, 2082, 2083, 2085, 2089, 2101], "727m": 1, "55": [1, 1480, 2067], "483m": 1, "accumulategrad": [1, 939, 2043], "148m": 1, "graphroot": [1, 939], "691": 1, "816u": 1, "emit": [1, 14, 39, 44, 64, 683, 1188, 1289, 1877, 2017, 2023, 2065, 2076, 2082], "nvtx": [1, 4, 2013], "program": [1, 3, 4, 8, 12, 18, 23, 27, 28, 39, 48, 51, 52, 53, 55, 64, 619, 932, 991, 992, 1057, 1059, 1167, 1201, 1871, 2014, 2016, 2018, 2031, 2043, 2046, 2051, 2056, 2058, 2059, 2061, 2062, 2063, 2065, 2072, 2087, 2094, 2098, 2100, 2101, 2102, 2103, 2104, 2106, 2109, 2111, 2113, 2115], "off": [1, 5, 7, 8, 14, 19, 27, 28, 37, 55, 64, 83, 920, 921, 945, 954, 1070, 1167, 1231, 1232, 1270, 1289, 1337, 1436, 1437, 1438, 1440, 1520, 1521, 1522, 1633, 1672, 2045, 2046, 2048, 2051, 2056, 2060, 2061, 2071, 2072, 2073, 2077, 2078, 2100, 2103, 2110], "o": [1, 24, 28, 29, 30, 35, 37, 48, 50, 51, 52, 64, 1179, 1183, 1185, 1273, 1345, 1389, 1390, 1391, 1498, 1527, 1533, 1717, 1859, 2012, 2018, 2033, 2043, 2046, 2048, 2051, 2053, 2063, 2070, 2077, 2095], "trace_nam": 1, "regular": [1, 3, 4, 28, 37, 48, 55, 58, 64, 978, 1051, 1052, 1166, 1188, 1202, 1321, 1322, 1446, 1464, 1465, 1466, 1467, 1471, 1512, 1526, 1527, 1528, 1529, 1537, 1538, 1626, 1664, 1707, 1719, 1720, 1767, 1785, 1794, 2017, 2021, 2035, 2036, 2046, 2049, 2050, 2056, 2057, 2065, 2067, 2070, 2072, 2075, 2077, 2082, 2083, 2091, 2093], "command": [1, 4, 28, 37, 45, 48, 64, 2046, 2053, 2055, 2059, 2063, 2071, 2078, 2095, 2107, 2110], "unfortun": [1, 9, 23, 52, 1717, 2043, 2049, 2102, 2103], "wai": [1, 3, 5, 7, 8, 9, 14, 23, 24, 28, 30, 32, 35, 40, 44, 52, 55, 57, 58, 63, 64, 152, 582, 683, 799, 822, 825, 828, 865, 866, 894, 897, 909, 910, 913, 915, 916, 917, 918, 923, 930, 1130, 1132, 1167, 1188, 1201, 1202, 1227, 1273, 1303, 1304, 1319, 1334, 1439, 1470, 1489, 1490, 1491, 1497, 1509, 1510, 1511, 1527, 1556, 1573, 1575, 1625, 1678, 1712, 1713, 1717, 1735, 1765, 1798, 1802, 1809, 2014, 2016, 2017, 2023, 2024, 2025, 2030, 2033, 2034, 2035, 2043, 2045, 2046, 2049, 2050, 2051, 2052, 2054, 2057, 2059, 2060, 2063, 2065, 2067, 2069, 2070, 2072, 2077, 2078, 2082, 2086, 2088, 2093, 2098, 2099, 2100, 2101, 2103, 2104, 2111, 2112, 2113, 2115], "disk": [1, 19, 23, 30, 1345, 1859, 2043, 2057, 2070, 2087], "annot": [1, 33, 34, 40, 45, 64, 933, 1271, 1279, 1285, 2014, 2016, 2018, 2021, 2067, 2077, 2104, 2105, 2111], "wait": [1, 18, 28, 37, 45, 47, 63, 488, 827, 1007, 1011, 1012, 1014, 1085, 1197, 1276, 1386, 1389, 1390, 1395, 1411, 1811, 1982, 1983, 2009, 2015, 2017, 2026, 2033, 2045, 2046, 2048, 2068, 2071, 2077, 2104, 2111, 2115, 2117], "either": [1, 8, 9, 14, 17, 18, 19, 23, 24, 28, 30, 32, 34, 35, 37, 40, 45, 47, 48, 50, 52, 53, 55, 60, 63, 64, 156, 223, 315, 323, 515, 517, 619, 683, 735, 741, 742, 743, 748, 749, 759, 767, 782, 788, 794, 862, 894, 896, 904, 905, 906, 907, 908, 909, 910, 919, 923, 924, 943, 963, 976, 1022, 1109, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1163, 1167, 1196, 1227, 1250, 1266, 1273, 1289, 1329, 1344, 1345, 1360, 1373, 1375, 1429, 1430, 1433, 1434, 1437, 1438, 1439, 1440, 1454, 1455, 1456, 1458, 1459, 1460, 1462, 1470, 1480, 1486, 1493, 1495, 1496, 1518, 1519, 1521, 1522, 1527, 1530, 1531, 1532, 1533, 1534, 1541, 1543, 1545, 1559, 1560, 1575, 1576, 1578, 1580, 1581, 1582, 1605, 1606, 1616, 1644, 1645, 1669, 1677, 1704, 1711, 1717, 1750, 1772, 1784, 1785, 1787, 1794, 1797, 1809, 1824, 1826, 1868, 1871, 1924, 1928, 1980, 2012, 2014, 2016, 2017, 2019, 2021, 2023, 2035, 2041, 2043, 2044, 2046, 2049, 2050, 2051, 2054, 2056, 2057, 2059, 2061, 2062, 2065, 2067, 2069, 2070, 2072, 2078, 2079, 2082, 2083, 2086, 2089, 2100, 2105, 2118], "nvidia": [1, 14, 28, 1020, 1033, 1061, 1067, 1072, 1086, 1087, 1965, 2013, 2046, 2051, 2055, 2061, 2063, 2072, 2082, 2091, 2094, 2110, 2111, 2113], "visual": [1, 64, 1375, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1520, 1521, 1522, 1579, 2013, 2046, 2057, 2063, 2065, 2071, 2087, 2104, 2111, 2113], "nvvp": 1, "timelin": [1, 4, 1389, 1390, 2013, 2071], "load_nvprof": 1, "load": [1, 11, 14, 15, 24, 30, 32, 33, 52, 55, 62, 417, 840, 866, 935, 976, 1010, 1273, 1277, 1284, 1289, 1470, 1527, 1707, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1859, 2013, 2014, 2025, 2026, 2027, 2028, 2035, 2043, 2046, 2056, 2057, 2061, 2063, 2065, 2067, 2077, 2087, 2095, 2106, 2107, 2111, 2113], "repl": [1, 2106], "append": [1, 3, 30, 63, 64, 66, 71, 231, 256, 609, 764, 766, 768, 1054, 1099, 1101, 1368, 1479, 1498, 1529, 1538, 1543, 1545, 1556, 1717, 2015, 2016, 2017, 2043, 2045, 2046, 2059, 2063, 2067, 2068, 2082, 2087, 2112], "size": [1, 2, 3, 7, 11, 18, 20, 23, 24, 28, 30, 34, 35, 37, 47, 52, 53, 55, 58, 64, 66, 67, 72, 74, 75, 80, 140, 210, 244, 256, 257, 315, 317, 323, 341, 447, 448, 449, 451, 489, 495, 500, 501, 502, 515, 517, 519, 522, 525, 546, 547, 548, 568, 583, 585, 586, 587, 589, 590, 608, 609, 619, 620, 683, 693, 694, 696, 698, 699, 700, 702, 723, 731, 732, 738, 744, 745, 746, 748, 749, 759, 767, 769, 770, 771, 772, 782, 788, 789, 790, 799, 862, 869, 881, 882, 884, 896, 909, 910, 912, 913, 914, 915, 916, 917, 936, 944, 945, 947, 954, 956, 957, 958, 959, 960, 963, 967, 970, 976, 998, 1008, 1016, 1022, 1023, 1025, 1065, 1090, 1091, 1097, 1100, 1107, 1109, 1110, 1111, 1112, 1115, 1126, 1127, 1128, 1130, 1131, 1132, 1134, 1135, 1138, 1139, 1140, 1141, 1142, 1144, 1145, 1146, 1161, 1163, 1164, 1165, 1172, 1173, 1178, 1187, 1188, 1192, 1198, 1202, 1214, 1231, 1232, 1236, 1237, 1248, 1250, 1270, 1273, 1295, 1304, 1313, 1318, 1326, 1330, 1334, 1337, 1339, 1340, 1341, 1343, 1344, 1346, 1354, 1360, 1361, 1363, 1364, 1368, 1371, 1373, 1374, 1375, 1376, 1379, 1380, 1381, 1382, 1384, 1413, 1415, 1418, 1421, 1423, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1469, 1470, 1472, 1473, 1474, 1475, 1478, 1480, 1481, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1515, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1539, 1540, 1542, 1543, 1549, 1550, 1551, 1552, 1553, 1554, 1559, 1567, 1571, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1588, 1589, 1590, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1600, 1601, 1602, 1606, 1611, 1612, 1613, 1616, 1617, 1624, 1625, 1628, 1629, 1633, 1644, 1651, 1658, 1659, 1660, 1669, 1670, 1672, 1675, 1676, 1678, 1685, 1704, 1705, 1706, 1707, 1716, 1717, 1733, 1748, 1758, 1759, 1760, 1761, 1762, 1764, 1766, 1769, 1771, 1773, 1776, 1777, 1787, 1796, 1815, 1816, 1817, 1818, 1820, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1843, 1849, 1854, 1863, 1866, 1884, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1919, 1921, 1922, 1923, 1924, 1927, 1928, 1929, 1943, 1944, 1945, 1950, 1952, 1960, 1961, 1962, 1963, 1964, 1972, 1973, 1975, 1976, 1977, 1978, 2010, 2011, 2013, 2014, 2015, 2017, 2021, 2025, 2027, 2030, 2034, 2035, 2042, 2043, 2044, 2046, 2048, 2049, 2050, 2051, 2054, 2055, 2056, 2061, 2062, 2067, 2068, 2071, 2072, 2074, 2077, 2082, 2084, 2085, 2087, 2088, 2095, 2098, 2100, 2101, 2102, 2103, 2104, 2107, 2108, 2111, 2113, 2115], "format": [1, 3, 20, 21, 24, 28, 30, 40, 50, 53, 64, 83, 157, 172, 174, 177, 180, 181, 182, 197, 208, 211, 242, 269, 299, 327, 333, 395, 501, 502, 527, 582, 585, 586, 587, 588, 589, 590, 625, 683, 738, 763, 795, 796, 973, 1054, 1069, 1109, 1110, 1111, 1165, 1181, 1188, 1192, 1270, 1273, 1287, 1316, 1446, 1470, 1478, 1497, 1527, 1543, 1575, 1717, 1724, 1725, 1759, 1761, 1762, 1777, 1779, 1837, 1839, 1841, 1859, 1901, 1905, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 2011, 2012, 2014, 2017, 2018, 2023, 2037, 2046, 2049, 2057, 2064, 2065, 2067, 2068, 2071, 2072, 2077, 2078, 2082, 2085, 2087, 2089, 2111, 2113], "arg0": [1, 28, 2065], "arg1": [1, 28, 45, 46, 48, 53, 2065], "repres": [1, 8, 11, 23, 24, 28, 30, 33, 34, 35, 36, 37, 40, 41, 47, 50, 52, 53, 55, 64, 82, 84, 152, 235, 763, 795, 796, 799, 893, 909, 910, 978, 985, 994, 998, 1009, 1041, 1044, 1109, 1130, 1132, 1137, 1139, 1140, 1141, 1142, 1167, 1184, 1199, 1227, 1234, 1262, 1263, 1266, 1269, 1272, 1289, 1290, 1321, 1363, 1431, 1446, 1527, 1533, 1575, 1577, 1579, 1587, 1588, 1589, 1590, 1717, 1730, 1734, 1739, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1758, 1768, 1779, 1803, 1809, 1815, 1817, 1828, 1871, 1910, 1911, 1912, 1914, 1915, 1924, 1928, 1961, 1962, 1968, 1975, 1976, 1995, 2014, 2016, 2017, 2021, 2024, 2025, 2030, 2036, 2043, 2049, 2050, 2054, 2056, 2057, 2060, 2064, 2065, 2067, 2072, 2073, 2075, 2077, 2082, 2085, 2100, 2101, 2103, 2115, 2118], "order": [1, 3, 5, 28, 29, 30, 32, 33, 35, 48, 52, 55, 56, 61, 63, 64, 152, 193, 210, 235, 315, 317, 319, 323, 333, 404, 489, 490, 683, 795, 796, 805, 858, 880, 897, 899, 918, 928, 929, 943, 955, 966, 1012, 1054, 1065, 1097, 1100, 1101, 1109, 1127, 1129, 1130, 1136, 1148, 1149, 1150, 1151, 1168, 1178, 1186, 1227, 1273, 1293, 1304, 1309, 1310, 1311, 1312, 1319, 1326, 1329, 1330, 1337, 1338, 1341, 1343, 1346, 1375, 1381, 1396, 1413, 1431, 1440, 1446, 1463, 1527, 1528, 1537, 1555, 1556, 1598, 1606, 1633, 1658, 1659, 1660, 1685, 1707, 1717, 1734, 1744, 1758, 1759, 1760, 1761, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1809, 1815, 1818, 1828, 1863, 1866, 1890, 1900, 1928, 1947, 1954, 1956, 1961, 1965, 1971, 1977, 2012, 2013, 2014, 2015, 2017, 2018, 2021, 2033, 2034, 2035, 2036, 2041, 2044, 2046, 2048, 2049, 2050, 2052, 2054, 2057, 2060, 2062, 2065, 2067, 2069, 2070, 2072, 2074, 2077, 2078, 2079, 2082, 2083, 2085, 2089, 2093, 2095, 2098, 2099, 2100, 2101, 2104, 2107, 2111, 2113], "backend": [1, 14, 46, 51, 53, 55, 744, 745, 746, 795, 796, 836, 859, 860, 863, 865, 866, 912, 918, 976, 978, 985, 1034, 1065, 1109, 1167, 1187, 1192, 1287, 1310, 1317, 1319, 1337, 1389, 1390, 1391, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1586, 1591, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1633, 1672, 1685, 1717, 1779, 1780, 1871, 1966, 1968, 2013, 2015, 2016, 2027, 2031, 2032, 2045, 2046, 2049, 2060, 2061, 2067, 2073, 2075, 2082, 2102, 2104, 2105, 2106, 2107, 2108, 2110, 2120], "side": [1, 14, 28, 47, 50, 52, 53, 60, 64, 66, 69, 488, 771, 772, 775, 776, 777, 969, 1043, 1103, 1125, 1126, 1128, 1131, 1132, 1137, 1139, 1140, 1141, 1142, 1145, 1270, 1271, 1318, 1323, 1334, 1336, 1436, 1437, 1438, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1549, 1550, 1552, 1553, 1554, 1579, 1583, 1584, 1585, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1658, 1659, 1660, 1672, 1803, 1832, 1833, 1863, 1924, 1952, 2012, 2014, 2015, 2017, 2042, 2043, 2046, 2049, 2054, 2077, 2095, 2098, 2111], "creation": [1, 2, 23, 28, 30, 64, 488, 748, 749, 759, 767, 827, 1011, 1167, 1717, 1718, 1739, 1982, 2014, 2018, 2030, 2033, 2043, 2046, 2077, 2079, 2082, 2088, 2100], "warmup": [1, 3, 1054, 2046, 2071, 2098, 2104, 2111], "correl": [1, 35, 48, 994, 1454, 1455, 1456, 1457, 1458, 1459, 1465, 1466, 1467, 1471], "view": [1, 7, 8, 11, 18, 23, 24, 30, 32, 37, 53, 55, 64, 66, 75, 81, 224, 256, 437, 499, 500, 501, 609, 620, 695, 700, 763, 882, 890, 891, 892, 903, 919, 943, 958, 970, 991, 992, 1099, 1100, 1107, 1148, 1149, 1150, 1151, 1167, 1237, 1273, 1329, 1340, 1390, 1447, 1470, 1478, 1497, 1527, 1543, 1579, 1580, 1581, 1582, 1625, 1671, 1717, 1722, 1818, 1850, 1854, 1855, 1859, 1865, 1866, 1896, 1916, 1927, 1939, 1944, 1948, 1960, 1975, 1976, 1978, 2013, 2014, 2015, 2033, 2035, 2036, 2044, 2049, 2052, 2065, 2068, 2074, 2077, 2079, 2082, 2084, 2085, 2088, 2103, 2108, 2110, 2112, 2113], "difficult": [1, 7, 9, 33, 52, 60, 978, 1178, 1977, 2103, 2104, 2109, 2111, 2113], "eas": [1, 64, 2045, 2049, 2055, 2100, 2112], "sequenc": [1, 23, 30, 32, 33, 34, 35, 52, 55, 152, 568, 738, 763, 817, 884, 897, 918, 960, 962, 963, 966, 974, 1014, 1021, 1022, 1025, 1094, 1108, 1110, 1178, 1235, 1236, 1239, 1289, 1329, 1375, 1431, 1441, 1446, 1454, 1463, 1470, 1478, 1497, 1533, 1543, 1556, 1571, 1572, 1573, 1574, 1575, 1588, 1625, 1685, 1734, 1737, 1744, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1773, 1776, 1780, 1795, 1799, 1834, 1836, 1840, 1863, 1921, 1924, 1963, 1977, 1979, 1983, 2010, 2014, 2015, 2016, 2021, 2036, 2046, 2051, 2052, 2065, 2067, 2070, 2077, 2081, 2088, 2089, 2098, 2100, 2101, 2102, 2104, 2112, 2113], "gener": [1, 2, 3, 7, 8, 14, 23, 24, 28, 33, 35, 38, 40, 41, 45, 48, 52, 55, 56, 57, 65, 66, 71, 75, 86, 155, 156, 175, 260, 288, 379, 423, 456, 483, 610, 683, 924, 946, 976, 1040, 1043, 1051, 1052, 1055, 1056, 1076, 1077, 1079, 1080, 1081, 1109, 1131, 1132, 1167, 1178, 1182, 1188, 1198, 1226, 1231, 1249, 1277, 1283, 1294, 1305, 1310, 1319, 1336, 1337, 1341, 1346, 1366, 1374, 1387, 1388, 1389, 1390, 1391, 1392, 1394, 1413, 1462, 1473, 1487, 1571, 1579, 1580, 1589, 1590, 1598, 1624, 1642, 1707, 1732, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1766, 1769, 1773, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1815, 1817, 1820, 1834, 1836, 1838, 1839, 1840, 1842, 1864, 1876, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1929, 1945, 1949, 1966, 1968, 1971, 1977, 1994, 2000, 2001, 2002, 2003, 2005, 2006, 2013, 2014, 2015, 2016, 2017, 2018, 2023, 2027, 2034, 2041, 2043, 2046, 2049, 2051, 2052, 2054, 2056, 2057, 2059, 2063, 2065, 2067, 2068, 2069, 2070, 2071, 2076, 2077, 2078, 2082, 2085, 2087, 2089, 2092, 2094, 2095, 2099, 2100, 2103, 2104, 2105, 2106, 2107, 2108, 2109, 2111, 2112, 2113, 2114], "seq": [1, 607, 763, 963, 975, 1375, 1478, 1497, 1533, 1543, 1571, 1573, 1575, 1761, 1959], "n": [1, 3, 19, 28, 30, 34, 35, 37, 40, 45, 48, 53, 64, 231, 262, 408, 467, 468, 488, 689, 692, 693, 694, 738, 761, 763, 784, 881, 936, 942, 944, 945, 947, 954, 956, 960, 966, 967, 968, 969, 998, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1090, 1091, 1101, 1122, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1149, 1170, 1173, 1178, 1186, 1192, 1198, 1210, 1227, 1231, 1232, 1236, 1270, 1273, 1285, 1289, 1290, 1293, 1294, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1346, 1354, 1363, 1368, 1370, 1375, 1378, 1415, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1474, 1475, 1477, 1478, 1479, 1480, 1481, 1486, 1487, 1489, 1490, 1491, 1493, 1494, 1495, 1496, 1497, 1499, 1509, 1510, 1511, 1515, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1530, 1531, 1532, 1533, 1534, 1536, 1542, 1543, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1559, 1561, 1562, 1563, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1598, 1604, 1616, 1617, 1625, 1629, 1633, 1669, 1672, 1674, 1685, 1716, 1717, 1731, 1743, 1752, 1771, 1815, 1816, 1817, 1822, 1827, 1828, 1834, 1840, 1842, 1855, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1905, 1906, 1922, 1923, 1924, 1928, 1929, 1944, 1945, 1950, 1954, 1956, 1971, 1972, 1973, 1974, 1977, 2015, 2017, 2021, 2034, 2035, 2041, 2043, 2046, 2049, 2051, 2054, 2059, 2070, 2071, 2072, 2073, 2082, 2083, 2087, 2088, 2089, 2101, 2104, 2108], "counter": [1, 28, 919, 930, 1047, 1065, 1463, 2030, 2033, 2043], "increment": [1, 28, 47, 52, 930, 932, 1163, 1236, 1277, 1463, 2014, 2016, 2043, 2065, 2077, 2101], "object": [1, 3, 5, 6, 8, 14, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 37, 40, 47, 49, 50, 52, 53, 55, 63, 64, 66, 68, 90, 208, 211, 417, 605, 625, 796, 797, 804, 858, 865, 884, 894, 895, 896, 904, 909, 910, 936, 938, 943, 976, 1032, 1043, 1084, 1109, 1148, 1160, 1163, 1168, 1169, 1171, 1172, 1173, 1177, 1179, 1183, 1188, 1205, 1206, 1259, 1260, 1273, 1280, 1281, 1284, 1285, 1289, 1290, 1305, 1326, 1330, 1343, 1345, 1346, 1366, 1463, 1465, 1466, 1467, 1471, 1489, 1490, 1491, 1527, 1537, 1567, 1717, 1735, 1750, 1759, 1760, 1763, 1764, 1765, 1778, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1859, 1932, 1933, 1937, 1966, 1969, 1970, 1989, 2012, 2016, 2017, 2018, 2021, 2025, 2027, 2028, 2033, 2037, 2043, 2045, 2046, 2049, 2050, 2051, 2054, 2056, 2059, 2061, 2062, 2063, 2065, 2067, 2069, 2071, 2072, 2074, 2075, 2076, 2077, 2079, 2084, 2085, 2087, 2089, 2099, 2101, 2102, 2103, 2109, 2112, 2114, 2115, 2118], "stash": [1, 5, 2046, 2049], "associ": [1, 8, 9, 28, 30, 41, 49, 52, 53, 55, 901, 902, 976, 1017, 1068, 1069, 1071, 1222, 1235, 1236, 1273, 1323, 1334, 1336, 1345, 1446, 1527, 1537, 1617, 1717, 1732, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1884, 1886, 2014, 2017, 2023, 2029, 2034, 2035, 2043, 2054, 2057, 2060, 2062, 2065, 2070, 2071, 2077, 2078, 2084, 2085, 2088, 2101, 2103, 2104, 2113, 2115, 2116], "tell": [1, 7, 64, 498, 978, 1200, 1201, 1210, 1274, 1281, 1345, 1780, 2014, 2021, 2024, 2043, 2049, 2050, 2070, 2101, 2102, 2115], "top": [1, 3, 7, 8, 12, 23, 30, 34, 35, 40, 53, 55, 59, 64, 750, 936, 1439, 1440, 1462, 1486, 1493, 1518, 1534, 1577, 1580, 1633, 1739, 1743, 1745, 1891, 1947, 1967, 2018, 2021, 2024, 2033, 2049, 2065, 2069, 2101, 2102, 2110, 2111], "m": [1, 4, 8, 19, 24, 28, 34, 35, 44, 48, 52, 64, 689, 692, 693, 694, 723, 731, 732, 741, 742, 743, 744, 745, 746, 748, 749, 759, 761, 767, 817, 844, 864, 865, 866, 944, 956, 960, 964, 1122, 1176, 1236, 1271, 1273, 1278, 1280, 1284, 1285, 1291, 1294, 1305, 1313, 1319, 1320, 1321, 1322, 1326, 1327, 1328, 1330, 1331, 1332, 1337, 1338, 1339, 1340, 1346, 1363, 1364, 1368, 1378, 1413, 1415, 1428, 1429, 1430, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1441, 1442, 1443, 1444, 1445, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1458, 1459, 1464, 1465, 1466, 1467, 1468, 1469, 1471, 1472, 1474, 1475, 1476, 1477, 1481, 1482, 1483, 1484, 1485, 1488, 1489, 1490, 1491, 1494, 1495, 1496, 1513, 1514, 1516, 1517, 1520, 1521, 1522, 1526, 1527, 1535, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1557, 1558, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1578, 1580, 1581, 1582, 1583, 1584, 1585, 1672, 1674, 1717, 1731, 1733, 1737, 1747, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1765, 1766, 1769, 1815, 1816, 1817, 1827, 1834, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1905, 1906, 1924, 1928, 1929, 1945, 1952, 2014, 2015, 2016, 2017, 2025, 2046, 2049, 2051, 2054, 2057, 2059, 2062, 2072, 2082, 2093, 2094, 2097, 2104], "By": [1, 2, 3, 5, 14, 19, 23, 28, 34, 39, 44, 52, 55, 64, 447, 448, 449, 450, 451, 884, 976, 998, 1014, 1057, 1059, 1092, 1104, 1127, 1129, 1130, 1131, 1132, 1140, 1141, 1142, 1168, 1169, 1171, 1172, 1178, 1227, 1235, 1236, 1277, 1326, 1337, 1345, 1374, 1379, 1417, 1439, 1440, 1441, 1442, 1443, 1460, 1462, 1480, 1486, 1489, 1490, 1491, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1567, 1576, 1605, 1606, 1616, 1645, 1669, 1677, 1733, 1769, 1817, 1828, 1849, 1903, 1950, 1977, 1983, 2012, 2016, 2021, 2024, 2043, 2046, 2049, 2051, 2052, 2057, 2059, 2060, 2062, 2065, 2067, 2069, 2076, 2077, 2082, 2089, 2101, 2102, 2104, 2107, 2109, 2113], "compar": [1, 3, 5, 14, 23, 52, 55, 64, 87, 697, 869, 880, 918, 919, 920, 978, 1065, 1113, 1114, 1137, 1138, 1139, 1143, 1144, 1145, 1146, 1155, 1156, 1188, 1216, 1230, 1262, 1298, 1362, 1372, 1377, 1424, 1533, 1717, 1770, 1929, 1961, 2013, 2017, 2030, 2046, 2049, 2054, 2060, 2063, 2072, 2073, 2077, 2082, 2087, 2089, 2092, 2093, 2098, 2105, 2110], "down": [1, 7, 14, 23, 35, 37, 45, 46, 48, 64, 782, 1065, 1104, 1151, 1157, 1283, 1319, 1644, 1828, 1856, 2049, 2056, 2059, 2067, 2077, 2079, 2082, 2087, 2107, 2113], "irrelev": [1, 3, 2018], "simpli": [1, 3, 14, 23, 28, 30, 34, 35, 40, 48, 52, 63, 64, 867, 1167, 1260, 1271, 1435, 1464, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1871, 2014, 2016, 2043, 2050, 2054, 2057, 2058, 2071, 2082, 2100, 2101, 2102, 2104, 2109], "earlier": [1, 6, 33, 55, 1724, 1725, 1928, 2043, 2046, 2051, 2052, 2056, 2062, 2067, 2071], "hand": [1, 4, 33, 55, 60, 64, 969, 1103, 1109, 1271, 1318, 1323, 1334, 1336, 1435, 1537, 1556, 1952, 1965, 2014, 2016, 2017, 2043, 2054, 2057, 2059, 2062, 2070, 2082, 2101, 2103, 2104], "underwai": [1, 1049, 2046], "up": [1, 6, 7, 8, 9, 14, 19, 23, 24, 28, 32, 33, 35, 37, 40, 44, 46, 47, 50, 51, 52, 55, 58, 64, 782, 788, 894, 909, 910, 932, 976, 1051, 1052, 1054, 1092, 1109, 1138, 1139, 1144, 1146, 1151, 1167, 1186, 1212, 1264, 1277, 1283, 1286, 1287, 1332, 1431, 1446, 1473, 1533, 1573, 1575, 1579, 1580, 1598, 1624, 1633, 1644, 1704, 1717, 1724, 1725, 1798, 1834, 1856, 1961, 1962, 2012, 2014, 2016, 2024, 2030, 2033, 2034, 2035, 2043, 2044, 2045, 2046, 2051, 2052, 2054, 2055, 2057, 2060, 2067, 2070, 2071, 2072, 2077, 2078, 2089, 2098, 2101, 2102, 2103, 2109, 2111, 2113, 2115, 2117], "nonzero": [1, 55, 1198, 1270, 1355, 1357, 1358, 1363, 1980, 2015, 2021, 2025, 2068, 2100, 2103, 2108], "themselv": [1, 9, 35, 47, 55, 796, 845, 1947, 2046, 2070, 2114], "later": [1, 2, 3, 7, 22, 24, 28, 30, 33, 55, 63, 64, 90, 488, 748, 749, 759, 767, 935, 1043, 1197, 1345, 1497, 1520, 1521, 1522, 1543, 1557, 1658, 1659, 1660, 1688, 1717, 1928, 2014, 2043, 2045, 2048, 2053, 2057, 2062, 2071, 2078, 2079, 2095, 2101, 2109], "origin": [1, 5, 11, 12, 18, 19, 23, 24, 28, 30, 33, 40, 52, 53, 55, 58, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 208, 211, 223, 488, 547, 605, 609, 619, 625, 683, 763, 800, 841, 842, 858, 861, 862, 961, 966, 1012, 1129, 1130, 1132, 1136, 1140, 1141, 1142, 1148, 1166, 1176, 1248, 1270, 1281, 1285, 1286, 1289, 1290, 1367, 1381, 1431, 1435, 1447, 1462, 1463, 1478, 1557, 1567, 1616, 1688, 1724, 1725, 1731, 1732, 1733, 1734, 1737, 1738, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1758, 1761, 1764, 1766, 1767, 1769, 1779, 1794, 1809, 1818, 1854, 1865, 1900, 1916, 1950, 1961, 1962, 2014, 2017, 2023, 2033, 2035, 2043, 2046, 2049, 2051, 2052, 2056, 2059, 2060, 2062, 2067, 2070, 2072, 2073, 2081, 2082, 2084, 2088, 2092, 2093, 2099, 2101, 2102, 2103, 2104, 2105, 2107, 2111, 2112, 2113, 2114], "did": [1, 7, 8, 28, 47, 1201, 1420, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2016, 2017, 2054, 2062, 2070, 2101, 2106, 2115], "relationship": [1, 9, 33, 44, 52, 64, 973, 1227, 1577, 2043, 2046, 2062, 2070, 2103], "conceptu": [1, 3, 2043, 2050, 2079, 2102], "tag": [1, 3, 7, 28, 52, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 985, 1345, 2012, 2013, 2021, 2056, 2062, 2070, 2072, 2087], "eventu": [1, 7, 47, 55, 983, 2012, 2103], "itt": [1, 2071], "intel": [1, 4, 2013, 2063, 2090, 2094, 2106, 2110, 2120], "r": [1, 35, 61, 152, 893, 895, 897, 909, 910, 918, 923, 964, 975, 978, 994, 1109, 1168, 1170, 1173, 1178, 1190, 1214, 1217, 1227, 1285, 1303, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1319, 1320, 1323, 1325, 1332, 1334, 1336, 1337, 1346, 1469, 1470, 1479, 1539, 1540, 1559, 1624, 1625, 1675, 1676, 1731, 1796, 1827, 1977, 2014, 2015, 2016, 2043, 2045, 2049, 2053, 2054, 2063, 2082, 2085, 2087, 2113], "instrument": [1, 3, 24, 1390, 2013, 2056, 2093], "technologi": [1, 52, 2013, 2065, 2066, 2094], "applic": [1, 2, 9, 19, 33, 35, 38, 47, 490, 802, 813, 814, 815, 816, 904, 907, 909, 976, 1033, 1172, 1188, 1198, 1385, 1462, 1463, 1573, 1575, 1616, 1717, 1965, 1990, 2013, 2021, 2024, 2036, 2043, 2045, 2046, 2048, 2049, 2055, 2056, 2057, 2061, 2067, 2069, 2072, 2077, 2078, 2079, 2082, 2088, 2104, 2115], "across": [1, 8, 14, 19, 20, 23, 24, 28, 30, 32, 33, 34, 37, 48, 55, 60, 64, 619, 738, 822, 878, 937, 1025, 1064, 1065, 1109, 1123, 1178, 1284, 1289, 1375, 1440, 1463, 1473, 1515, 1533, 1535, 1567, 1579, 1603, 1606, 1635, 1651, 1700, 1717, 1744, 1748, 1772, 1859, 1977, 2013, 2014, 2030, 2034, 2036, 2043, 2048, 2051, 2056, 2057, 2059, 2060, 2061, 2069, 2073, 2077, 2078, 2084, 2087, 2093, 2098, 2100, 2104, 2113], "tool": [1, 4, 8, 9, 18, 28, 33, 48, 53, 64, 865, 935, 1390, 1903, 2012, 2013, 2014, 2016, 2029, 2045, 2046, 2063, 2067, 2070, 2071, 2093, 2101, 2102, 2104, 2107, 2111, 2113, 2115], "With": [1, 18, 23, 28, 35, 55, 63, 742, 743, 744, 745, 746, 788, 1065, 1130, 1131, 1132, 1140, 1141, 1142, 1290, 1441, 1442, 1443, 1455, 1456, 1458, 1459, 1472, 1489, 1490, 1491, 1567, 1578, 1580, 1609, 1612, 1644, 1670, 1704, 1795, 1838, 2018, 2043, 2046, 2049, 2072, 2077, 2082, 2087, 2097, 2101, 2105, 2110, 2113], "abl": [1, 2, 7, 8, 18, 28, 30, 33, 47, 52, 60, 488, 978, 1167, 1187, 1271, 1284, 1571, 2014, 2025, 2029, 2034, 2043, 2049, 2050, 2052, 2062, 2067, 2070, 2072, 2077, 2082, 2089, 2099, 2100, 2101, 2103, 2104, 2105, 2109, 2113, 2115], "labl": 1, "gui": 1, "detect_anomali": 1, "check_nan": 1, "engin": [1, 8, 9, 11, 15, 20, 52, 337, 744, 745, 746, 904, 905, 909, 918, 930, 1178, 1834, 1977, 2043, 2046, 2048, 2049, 2064, 2065, 2077, 2078, 2094, 2104], "traceback": [1, 18, 40, 48, 52, 63, 64, 919, 1188, 1258, 1903, 2016, 2017, 2018, 2025, 2033, 2036, 2049, 2061, 2082, 2089, 2113, 2115], "fail": [1, 7, 19, 20, 28, 30, 37, 39, 40, 45, 46, 47, 48, 51, 52, 63, 64, 66, 71, 75, 76, 77, 86, 488, 923, 924, 1012, 1065, 1198, 1274, 1281, 1284, 1303, 1332, 1337, 1345, 1346, 1363, 1778, 2017, 2019, 2033, 2043, 2046, 2049, 2059, 2060, 2062, 2063, 2067, 2070, 2077, 2085, 2099, 2100, 2101, 2102, 2104, 2105, 2113, 2115], "test": [1, 3, 14, 18, 19, 28, 45, 47, 48, 64, 66, 74, 75, 696, 702, 976, 1126, 1128, 1129, 1134, 1135, 1140, 1141, 1142, 1144, 1146, 1188, 1196, 1202, 1205, 1206, 1213, 1259, 1260, 1264, 1265, 1267, 1268, 1779, 1892, 2013, 2014, 2018, 2030, 2033, 2043, 2052, 2054, 2061, 2067, 2069, 2071, 2087, 2093, 2103, 2113, 2114], "slow": [1, 923, 1283, 1287, 1303, 1304, 1722, 1723, 1765, 1961, 2054, 2059, 2087, 2100, 2103, 2113], "import": [1, 2, 3, 5, 7, 9, 12, 14, 17, 18, 20, 23, 24, 28, 29, 30, 32, 33, 34, 35, 36, 39, 41, 44, 45, 50, 52, 53, 55, 58, 59, 60, 61, 63, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 86, 683, 737, 744, 745, 746, 775, 776, 777, 795, 864, 865, 866, 919, 926, 928, 955, 962, 998, 1160, 1163, 1166, 1167, 1168, 1170, 1171, 1172, 1173, 1174, 1176, 1270, 1271, 1274, 1276, 1278, 1279, 1280, 1281, 1283, 1284, 1285, 1289, 1290, 1291, 1325, 1326, 1329, 1330, 1343, 1375, 1527, 1587, 1591, 1717, 1735, 1737, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1750, 1751, 1752, 1760, 1761, 1762, 1763, 1764, 1765, 1772, 1797, 1798, 1821, 1963, 2014, 2016, 2017, 2018, 2021, 2025, 2030, 2033, 2036, 2042, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2053, 2056, 2057, 2059, 2060, 2061, 2062, 2065, 2066, 2067, 2072, 2075, 2077, 2078, 2079, 2082, 2085, 2087, 2088, 2089, 2093, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2104, 2106, 2111, 2112, 2113, 2114, 2120], "myfunc": [1, 2050], "inp": [1, 12, 23, 28, 64, 899, 902, 903, 1579, 1717, 2108, 2111, 2114], "clone": [1, 15, 23, 55, 66, 90, 256, 450, 904, 905, 908, 909, 910, 919, 920, 921, 926, 928, 929, 958, 1277, 1469, 1703, 1943, 1952, 2015, 2033, 2036, 2053, 2062, 2068, 2082, 2084, 2085, 2089, 2098, 2108], "runtimeerror": [1, 14, 28, 32, 60, 64, 86, 90, 585, 700, 904, 905, 909, 919, 957, 1157, 1258, 1273, 1303, 1304, 1305, 1313, 1314, 1315, 1317, 1321, 1327, 1334, 1339, 1340, 1366, 1413, 1527, 1778, 1865, 1903, 1965, 2014, 2016, 2017, 2034, 2036, 2042, 2044, 2051, 2061, 2063, 2067, 2072, 2076, 2082, 2085, 2098], "run_fn": [1, 5, 861, 867], "recent": [1, 7, 8, 51, 63, 919, 1258, 1903, 2016, 2017, 2025, 2036, 2049, 2060, 2061, 2082, 2089, 2110, 2113], "last": [1, 5, 6, 11, 23, 24, 29, 32, 34, 35, 37, 47, 53, 63, 64, 317, 619, 695, 763, 881, 919, 943, 945, 954, 960, 970, 1051, 1092, 1097, 1101, 1126, 1128, 1131, 1132, 1134, 1135, 1138, 1139, 1141, 1142, 1144, 1146, 1148, 1188, 1231, 1232, 1236, 1250, 1258, 1270, 1295, 1329, 1332, 1337, 1374, 1379, 1431, 1436, 1437, 1438, 1444, 1462, 1470, 1472, 1478, 1497, 1499, 1514, 1534, 1537, 1541, 1542, 1543, 1556, 1572, 1573, 1579, 1604, 1625, 1634, 1647, 1671, 1672, 1716, 1717, 1718, 1724, 1725, 1771, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1854, 1863, 1871, 1900, 1903, 1910, 1911, 1912, 1914, 1915, 1916, 1924, 1928, 1945, 1947, 1950, 1975, 1976, 2016, 2017, 2025, 2030, 2034, 2036, 2043, 2046, 2049, 2054, 2061, 2065, 2067, 2070, 2071, 2082, 2088, 2089, 2099, 2101, 2107, 2113], "stdin": [1, 28, 919, 1903, 2025, 2036, 2049, 2061, 2082], "instal": [1, 3, 14, 15, 28, 64, 1188, 2012, 2053, 2057, 2058, 2065, 2066, 2067, 2070, 2077, 2087, 2095, 2099, 2100, 2101, 2102, 2109], "_tensor": [1, 156], "py": [1, 4, 14, 18, 28, 32, 33, 35, 39, 46, 48, 53, 55, 64, 88, 864, 1717, 2012, 2014, 2017, 2023, 2048, 2053, 2054, 2056, 2059, 2067, 2070, 2071, 2072, 2078, 2095, 2097, 2099, 2100, 2101, 2102, 2103, 2104, 2106, 2107, 2110, 2112, 2113, 2115], "93": [1, 619, 1091], "retain_graph": [1, 152, 897, 918, 928, 929, 1178, 1977, 2015, 2042, 2043, 2049, 2077], "90": [1, 1093, 1813, 1855, 2021], "allow_unreach": 1, "76": 1, "_forward_cl": 1, "tmp": [1, 3, 14, 28, 45, 47, 2012, 2046, 2071, 2107], "53": [1, 483], "44": [1, 323, 447, 1112, 1438, 1496, 1522, 1765, 2102], "set_detect_anomali": 1, "behaviour": [1, 696, 697, 702, 1633, 1672, 1809, 1877, 2012, 2060], "interpos": [1, 2049], "grad_fn": [1, 152, 337, 490, 884, 897, 912, 913, 914, 915, 916, 917, 926, 928, 929, 1166, 1732, 1905, 2043, 2049, 2057, 2062], "node": [1, 28, 32, 37, 40, 46, 47, 52, 55, 64, 66, 75, 76, 77, 81, 84, 683, 822, 823, 824, 827, 828, 829, 904, 939, 1054, 1176, 1186, 1188, 1210, 1288, 1446, 1463, 1717, 1779, 2027, 2046, 2065, 2067, 2070, 2077, 2078, 2079, 2091, 2093, 2100, 2101, 2103, 2104, 2110, 2112, 2113], "grad_mod": [1, 919, 920, 921, 2015], "least": [1, 5, 6, 8, 24, 30, 35, 46, 47, 55, 262, 404, 700, 947, 1097, 1099, 1100, 1109, 1150, 1151, 1161, 1187, 1227, 1235, 1236, 1270, 1296, 1319, 1346, 1368, 1417, 1431, 1717, 1759, 2012, 2017, 2035, 2041, 2043, 2044, 2046, 2051, 2052, 2073, 2079, 2082, 2083, 2084, 2089, 2104, 2106, 2113], "intermediari": [1, 14, 35, 904, 907, 909, 2043, 2054, 2101, 2104], "access": [1, 9, 18, 23, 28, 30, 33, 52, 53, 63, 66, 68, 82, 490, 559, 737, 904, 907, 909, 910, 942, 1018, 1273, 1276, 1289, 1336, 1431, 1527, 1707, 1719, 1720, 1732, 1737, 1759, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1996, 2013, 2014, 2015, 2017, 2018, 2021, 2027, 2029, 2031, 2032, 2033, 2035, 2036, 2043, 2046, 2051, 2054, 2056, 2057, 2065, 2077, 2082, 2085, 2086, 2088, 2095, 2098, 2100, 2101, 2102, 2103, 2104, 2106, 2109, 2118], "isinst": [1, 23, 35, 64, 926, 928, 929, 1260, 1271, 2015, 2017, 2043, 2049, 2057, 2070, 2082, 2104, 2112, 2113], "dir": [1, 1051, 1779, 2012, 2017, 2070], "__call__": [1, 1273, 1527, 2101], "__class__": [1, 66], "__delattr__": 1, "__dir__": 1, "__doc__": 1, "__eq__": 1, "__format__": [1, 2018], "__ge__": 1, "__getattribute__": 1, "__gt__": 1, "__hash__": [1, 2018], "__init_subclass__": 1, "__le__": 1, "__lt__": [1, 2017], "__ne__": 1, "__new__": [1, 2016, 2018], "__reduce__": [1, 2070], "__reduce_ex__": 1, "__repr__": [1, 3, 2049], "__setattr__": 1, "__sizeof__": 1, "__str__": [1, 64, 2015, 2017], "__subclasshook__": 1, "_raw_saved_result": 1, "_register_hook_dict": 1, "_saved_result": [1, 2043], "metadata": [1, 3, 12, 30, 40, 41, 52, 66, 74, 75, 930, 990, 1345, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1859, 2021, 2025, 2036, 2043, 2049, 2050, 2062, 2065, 2070, 2071, 2077, 2078, 2082, 2087, 2103, 2112], "next_funct": 1, "register_prehook": [1, 2043], "allclos": [1, 60, 61, 64, 923, 924, 1051, 1131, 1132, 1138, 1139, 1167, 1168, 1170, 1171, 1172, 1173, 1177, 1178, 1321, 1323, 1334, 1336, 1339, 1340, 1365, 1737, 1764, 1827, 1977, 2015, 2021, 2050, 2068, 2082], "pack": [1, 32, 741, 742, 743, 744, 745, 746, 748, 749, 763, 784, 1289, 1290, 1365, 1463, 1478, 1497, 1543, 1758, 1759, 1760, 1761, 2015, 2027, 2036, 2043, 2051, 2063, 2072, 2101], "unpack": [1, 66, 71, 72, 763, 900, 903, 1289, 1321, 1365, 1463, 1761, 1763, 1770, 1780, 2017, 2018, 2043, 2049, 2051], "hook": [1, 29, 32, 55, 489, 490, 751, 760, 904, 907, 909, 928, 929, 932, 1054, 1273, 1463, 1527, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1733, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1750, 1755, 1766, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 2013, 2037, 2048, 2049, 2056, 2064, 2065, 2070, 2072, 2102, 2103, 2104, 2105], "common": [1, 3, 8, 23, 33, 37, 47, 60, 66, 83, 88, 688, 796, 869, 949, 952, 957, 978, 993, 1104, 1109, 1154, 1155, 1156, 1157, 1198, 1215, 1296, 1345, 1412, 1441, 1442, 1443, 1446, 1567, 1615, 1708, 1709, 1711, 1712, 1713, 1714, 1715, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1847, 1859, 1925, 1968, 2013, 2017, 2022, 2035, 2043, 2046, 2049, 2050, 2051, 2057, 2059, 2062, 2067, 2070, 2081, 2082, 2083, 2086, 2089, 2099, 2100, 2104, 2109, 2111, 2112, 2113, 2115], "trade": [1, 5, 8, 55, 1337, 1440, 1871, 2045, 2051, 2073], "leav": [1, 8, 37, 48, 152, 897, 1181, 1278, 1291, 1735, 1738, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1919, 2014, 2016, 2017, 2025, 2043, 2046, 2065, 2070, 2073, 2101], "especi": [1, 9, 11, 23, 28, 53, 64, 256, 958, 976, 986, 1703, 2016, 2025, 2043, 2049, 2050, 2060, 2062, 2072, 2077, 2082, 2098, 2103, 2104, 2106], "notic": [1, 12, 28, 53, 65, 692, 1129, 1143, 1182, 1368, 1378, 1439, 1580, 1650, 1800, 1801, 1806, 1807, 1813, 1961, 2013, 2014, 2043, 2082, 2101, 2104, 2107, 2109, 2111], "fit": [1, 9, 39, 59, 60, 501, 936, 1270, 1787, 1849, 2046, 2065, 2089, 2113], "evalu": [1, 4, 8, 9, 32, 35, 52, 58, 64, 696, 702, 801, 867, 923, 1065, 1173, 1174, 1186, 1188, 1208, 1209, 1212, 1273, 1431, 1435, 1441, 1442, 1443, 1464, 1481, 1489, 1490, 1491, 1499, 1527, 1541, 1546, 1567, 1677, 1685, 1700, 1735, 1787, 1803, 1834, 2017, 2018, 2049, 2057, 2064, 2065, 2082, 2091, 2094, 2100, 2102, 2103], "saved_tensors_hook": [1, 904, 907, 909, 2043], "pack_hook": [1, 2043], "unpack_hook": [1, 2043], "pair": [1, 28, 30, 34, 35, 47, 50, 52, 619, 738, 869, 943, 964, 998, 1143, 1184, 1188, 1287, 1337, 1375, 1519, 1528, 1533, 1537, 1674, 1796, 1843, 2016, 2017, 2023, 2034, 2043, 2046, 2077, 2078, 2079, 2087, 2089, 2093, 2104, 2111], "retriev": [1, 23, 24, 28, 32, 33, 47, 64, 90, 539, 893, 894, 909, 910, 1186, 1204, 1469, 1473, 1579, 1624, 1717, 1759, 1779, 1903, 2036, 2043, 2056, 2067, 2070, 2077, 2078, 2079, 2095, 2102, 2115], "everytim": 1, "store": [1, 3, 5, 14, 18, 24, 30, 33, 37, 48, 52, 53, 55, 64, 328, 334, 400, 689, 825, 828, 894, 909, 956, 1021, 1023, 1025, 1190, 1217, 1273, 1279, 1281, 1284, 1304, 1315, 1316, 1317, 1346, 1351, 1363, 1441, 1442, 1443, 1469, 1527, 1556, 1717, 1731, 1734, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1758, 1871, 1908, 2012, 2013, 2014, 2025, 2035, 2036, 2037, 2048, 2049, 2050, 2051, 2056, 2065, 2070, 2072, 2077, 2078, 2079, 2082, 2084, 2087, 2088, 2093, 2095, 2101, 2102, 2103, 2106, 2117], "content": [1, 3, 7, 19, 30, 40, 52, 53, 64, 904, 907, 909, 1281, 1284, 1304, 1315, 1317, 1322, 1335, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1919, 1949, 1970, 2012, 2017, 2028, 2033, 2043, 2046, 2059, 2077, 2086, 2087, 2088, 2109, 2113], "equal": [1, 12, 24, 28, 35, 47, 48, 50, 52, 64, 262, 501, 547, 619, 683, 697, 698, 699, 742, 743, 744, 745, 746, 763, 771, 772, 823, 824, 825, 828, 865, 880, 945, 952, 954, 965, 966, 987, 998, 1023, 1025, 1079, 1098, 1114, 1153, 1168, 1169, 1181, 1184, 1188, 1198, 1199, 1213, 1216, 1231, 1232, 1234, 1235, 1236, 1258, 1262, 1298, 1319, 1332, 1336, 1339, 1340, 1363, 1364, 1375, 1393, 1424, 1428, 1429, 1430, 1432, 1433, 1434, 1439, 1440, 1446, 1455, 1456, 1458, 1459, 1463, 1470, 1473, 1474, 1475, 1478, 1480, 1497, 1532, 1533, 1541, 1543, 1579, 1588, 1601, 1602, 1606, 1609, 1612, 1624, 1625, 1628, 1629, 1635, 1642, 1717, 1762, 1815, 1828, 1884, 1885, 1890, 1916, 1924, 1944, 1960, 1966, 1980, 2015, 2018, 2024, 2034, 2035, 2036, 2043, 2044, 2054, 2060, 2068, 2069, 2083, 2087, 2089, 2101, 2112], "term": [1, 8, 9, 35, 47, 53, 64, 560, 737, 766, 795, 966, 1125, 1126, 1127, 1128, 1129, 1130, 1132, 1140, 1141, 1142, 1145, 1157, 1188, 1195, 1305, 1431, 1439, 1480, 1487, 1497, 1498, 1532, 1541, 1559, 1630, 1642, 1677, 1689, 1717, 1731, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1847, 1883, 1891, 2013, 2017, 2042, 2043, 2049, 2050, 2051, 2054, 2059, 2067, 2069, 2070, 2072, 2078, 2082, 2094, 2100, 2101, 2104], "mulbackward0": [1, 913, 916, 917, 2049], "inplac": [1, 30, 52, 58, 60, 64, 66, 74, 75, 758, 761, 779, 781, 783, 794, 800, 817, 841, 842, 843, 861, 862, 867, 904, 905, 909, 930, 976, 1167, 1273, 1435, 1445, 1464, 1465, 1466, 1467, 1468, 1471, 1483, 1484, 1485, 1513, 1526, 1527, 1546, 1547, 1548, 1555, 1557, 1570, 1599, 1607, 1618, 1619, 1620, 1621, 1622, 1626, 1637, 1638, 1639, 1648, 1664, 1679, 1680, 1683, 1686, 1688, 1698, 1710, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1965, 2015, 2024, 2043, 2049, 2067, 2093, 2108, 2112], "lead": [1, 5, 7, 22, 28, 36, 52, 53, 55, 61, 64, 904, 905, 909, 912, 914, 918, 924, 978, 1160, 1178, 1188, 1304, 1497, 1543, 1559, 1913, 1919, 1977, 2017, 2035, 2036, 2043, 2045, 2048, 2049, 2050, 2052, 2059, 2060, 2063, 2069, 2070, 2072, 2077, 2082, 2087, 2088, 2098, 2105], "undefin": [1, 28, 36, 40, 321, 473, 619, 882, 904, 908, 909, 910, 923, 924, 952, 960, 978, 1112, 1162, 1163, 1439, 1778, 1821, 2021, 2043, 2046, 2049, 2050, 2061, 2077], "recurs": [1, 35, 52, 55, 64, 978, 982, 1101, 1186, 1195, 1213, 1273, 1285, 1287, 1527, 1724, 1725, 1801, 2016, 2049, 2053, 2057, 2070, 2077, 2101, 2102, 2104, 2105], "inner": [1, 3, 55, 56, 61, 818, 820, 912, 1092, 1168, 1172, 1177, 1185, 1193, 1950, 2015, 2049, 2068, 2077, 2093], "save_on_cpu": 1, "pin_memori": [1, 23, 66, 71, 75, 76, 77, 447, 448, 449, 450, 451, 1110, 1112, 1161, 1836, 1840, 1842, 1943, 2014, 2015, 2036, 2046, 2049, 2068, 2084, 2108], "within": [1, 5, 9, 12, 19, 23, 24, 28, 32, 33, 34, 35, 47, 48, 50, 52, 53, 55, 63, 64, 81, 82, 90, 490, 882, 899, 923, 924, 930, 976, 990, 1015, 1092, 1186, 1192, 1273, 1277, 1366, 1400, 1436, 1437, 1438, 1446, 1465, 1466, 1467, 1471, 1473, 1520, 1521, 1522, 1527, 1533, 1567, 1579, 1633, 1643, 1658, 1659, 1660, 1717, 1735, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1751, 1752, 1753, 1754, 1755, 1832, 1833, 1863, 1884, 1964, 1968, 1984, 2014, 2016, 2017, 2029, 2030, 2033, 2041, 2045, 2046, 2049, 2055, 2056, 2057, 2060, 2067, 2069, 2070, 2072, 2075, 2076, 2077, 2078, 2087, 2095, 2100, 2101, 2104, 2106, 2112, 2113], "move": [1, 5, 7, 8, 9, 14, 28, 30, 55, 62, 64, 526, 591, 824, 1129, 1195, 1273, 1281, 1340, 1345, 1381, 1441, 1442, 1443, 1500, 1501, 1502, 1527, 1567, 1672, 1719, 1720, 1795, 2016, 2027, 2028, 2033, 2035, 2046, 2047, 2050, 2051, 2057, 2058, 2059, 2062, 2069, 2070, 2072, 2077, 2084, 2085, 2089, 2104, 2113], "copi": [1, 7, 11, 23, 24, 28, 30, 37, 45, 52, 55, 59, 60, 64, 192, 198, 208, 211, 317, 404, 450, 460, 465, 473, 495, 501, 582, 583, 584, 585, 605, 619, 625, 795, 817, 841, 842, 883, 884, 902, 919, 920, 921, 943, 965, 973, 976, 1021, 1022, 1148, 1149, 1150, 1151, 1153, 1167, 1188, 1273, 1285, 1345, 1346, 1423, 1463, 1473, 1527, 1537, 1579, 1717, 1758, 1768, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1844, 1850, 1856, 1943, 1952, 1958, 2014, 2015, 2025, 2033, 2036, 2043, 2044, 2046, 2049, 2052, 2059, 2068, 2070, 2071, 2072, 2077, 2082, 2084, 2085, 2086, 2088, 2093, 2103, 2104, 2106, 2108, 2112], "pin": [1, 30, 211, 339, 447, 448, 449, 450, 451, 465, 582, 605, 625, 1110, 1112, 1161, 1273, 1527, 1758, 1836, 1840, 1842, 1943, 2013, 2036, 2084], "asynchron": [1, 3, 4, 30, 63, 198, 211, 582, 605, 625, 1034, 1273, 1276, 1292, 1527, 2013, 2018, 2045, 2048, 2077, 2084, 2087, 2111], "prod_1": 1, "prod_2": 1, "del": [1, 33, 488, 2018, 2033, 2049, 2051], "illustr": [1, 2017, 2042, 2049, 2082, 2095, 2098, 2107], "aliv": [1, 5, 23, 37, 47, 1012, 2043, 2046, 2051, 2059, 2077, 2078, 2079], "live": [1, 32, 488, 1051, 1054, 1273, 1527, 2014, 2046, 2051, 2052, 2077, 2079, 2098, 2101, 2103, 2115], "releas": [1, 7, 22, 28, 47, 50, 59, 64, 515, 690, 966, 967, 1008, 1009, 1016, 1033, 1043, 1044, 1047, 1051, 1052, 1054, 1065, 1218, 1270, 1273, 1304, 1315, 1317, 1318, 1319, 1322, 1335, 1363, 1364, 1385, 1466, 1497, 1527, 1543, 1658, 1659, 1660, 1772, 1827, 1843, 1859, 1924, 1928, 1952, 1990, 2012, 2013, 2016, 2030, 2033, 2043, 2045, 2046, 2053, 2055, 2060, 2061, 2062, 2063, 2067, 2072, 2077, 2088, 2089, 2095, 2101, 2104, 2113], "delet": [1, 28, 33, 64, 899, 901, 1009, 1017, 2012, 2027, 2033, 2043, 2068, 2076, 2077, 2079, 2084, 2112], "disable_saved_tensors_hook": 1, "error_messag": 1, "featur": [1, 5, 8, 9, 11, 12, 15, 18, 19, 28, 30, 32, 33, 48, 52, 55, 56, 63, 66, 83, 683, 692, 763, 771, 772, 912, 914, 918, 990, 1083, 1182, 1285, 1368, 1378, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1441, 1459, 1464, 1465, 1466, 1467, 1471, 1474, 1475, 1478, 1479, 1489, 1497, 1498, 1533, 1543, 1545, 1562, 1571, 1573, 1575, 1576, 1577, 1578, 1601, 1602, 1619, 1620, 1621, 1626, 1628, 1629, 1650, 1717, 1817, 1965, 2013, 2014, 2016, 2017, 2018, 2023, 2035, 2036, 2043, 2049, 2051, 2054, 2061, 2062, 2064, 2067, 2071, 2072, 2077, 2082, 2087, 2091, 2094, 2095, 2101, 2105, 2106], "messag": [1, 2, 5, 18, 19, 28, 40, 50, 52, 60, 64, 66, 67, 85, 86, 627, 683, 1068, 1069, 1071, 1181, 1188, 1303, 1304, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 2012, 2014, 2015, 2017, 2018, 2023, 2051, 2065, 2067, 2071, 2077, 2079, 2089, 2106, 2113], "get": [1, 8, 12, 14, 19, 23, 28, 29, 30, 37, 47, 48, 50, 51, 52, 55, 60, 63, 64, 90, 152, 337, 689, 737, 791, 792, 865, 866, 897, 903, 919, 932, 943, 960, 1036, 1037, 1038, 1055, 1096, 1167, 1170, 1171, 1172, 1179, 1180, 1183, 1185, 1188, 1219, 1220, 1289, 1341, 1448, 1449, 1450, 1469, 1470, 1494, 1495, 1496, 1523, 1524, 1525, 1537, 1555, 1717, 1718, 1732, 1759, 1766, 1848, 1863, 1968, 1991, 1992, 1993, 2000, 2012, 2013, 2014, 2017, 2025, 2029, 2030, 2033, 2034, 2043, 2046, 2049, 2050, 2051, 2052, 2054, 2056, 2057, 2058, 2062, 2065, 2068, 2070, 2072, 2077, 2079, 2082, 2086, 2087, 2088, 2098, 2100, 2101, 2102, 2103, 2107, 2113, 2114, 2117], "register_multi_grad_hook": [1, 2043], "fn": [1, 37, 39, 40, 44, 50, 52, 55, 64, 911, 928, 929, 978, 979, 982, 1168, 1174, 1178, 1273, 1274, 1286, 1291, 1527, 1977, 2014, 2016, 2017, 2021, 2033, 2043, 2056, 2077, 2099, 2101, 2105, 2106, 2111, 2112, 2113], "multi": [1, 4, 33, 37, 47, 763, 1012, 1055, 1076, 1109, 1129, 1197, 1236, 1273, 1440, 1463, 1478, 1497, 1527, 1530, 1531, 1532, 1533, 1543, 1571, 1573, 1580, 1606, 1717, 2000, 2002, 2013, 2014, 2017, 2043, 2045, 2046, 2057, 2061, 2069, 2077, 2082, 2085, 2087, 2088, 2091], "specifi": [1, 2, 3, 5, 8, 12, 14, 19, 20, 23, 24, 28, 30, 32, 33, 34, 35, 37, 38, 40, 45, 46, 47, 48, 52, 53, 55, 64, 90, 99, 152, 197, 261, 321, 333, 473, 483, 495, 499, 501, 502, 515, 517, 519, 539, 547, 548, 562, 582, 585, 586, 587, 589, 590, 605, 683, 738, 744, 745, 746, 771, 772, 796, 797, 798, 802, 817, 826, 829, 843, 858, 862, 863, 865, 882, 884, 896, 897, 909, 910, 913, 916, 918, 943, 947, 970, 976, 997, 998, 1009, 1011, 1012, 1021, 1022, 1023, 1025, 1040, 1043, 1054, 1080, 1090, 1091, 1092, 1096, 1097, 1099, 1109, 1112, 1123, 1126, 1128, 1129, 1131, 1132, 1134, 1135, 1136, 1138, 1139, 1140, 1141, 1142, 1144, 1146, 1168, 1169, 1172, 1178, 1184, 1192, 1194, 1214, 1227, 1235, 1236, 1270, 1273, 1277, 1289, 1290, 1316, 1326, 1328, 1330, 1331, 1340, 1343, 1345, 1346, 1356, 1373, 1375, 1381, 1417, 1418, 1421, 1437, 1438, 1439, 1440, 1446, 1458, 1460, 1462, 1463, 1469, 1470, 1473, 1480, 1486, 1487, 1492, 1493, 1497, 1518, 1519, 1524, 1527, 1530, 1531, 1532, 1533, 1534, 1541, 1559, 1560, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1581, 1582, 1601, 1602, 1605, 1606, 1616, 1617, 1624, 1625, 1630, 1633, 1645, 1652, 1669, 1670, 1677, 1685, 1691, 1692, 1717, 1723, 1724, 1725, 1731, 1732, 1733, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1769, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1792, 1794, 1795, 1796, 1797, 1798, 1803, 1808, 1809, 1817, 1825, 1834, 1850, 1855, 1856, 1859, 1875, 1902, 1904, 1906, 1907, 1908, 1910, 1911, 1912, 1913, 1914, 1915, 1919, 1922, 1923, 1924, 1927, 1944, 1946, 1950, 1960, 1961, 1962, 1963, 1964, 1968, 1971, 1972, 1973, 1977, 1982, 1994, 2005, 2012, 2014, 2016, 2017, 2021, 2023, 2024, 2025, 2028, 2030, 2034, 2035, 2043, 2046, 2049, 2055, 2057, 2059, 2063, 2065, 2067, 2069, 2070, 2071, 2072, 2076, 2077, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2095, 2099, 2100, 2112, 2116], "ignor": [1, 5, 7, 28, 32, 39, 45, 55, 64, 152, 501, 546, 689, 692, 693, 694, 738, 797, 798, 862, 865, 866, 897, 918, 923, 924, 944, 966, 968, 969, 998, 1002, 1007, 1055, 1056, 1076, 1077, 1130, 1132, 1140, 1141, 1142, 1188, 1217, 1234, 1273, 1285, 1291, 1294, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1320, 1321, 1322, 1323, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1342, 1343, 1365, 1418, 1419, 1420, 1436, 1437, 1438, 1439, 1440, 1460, 1462, 1470, 1478, 1480, 1486, 1492, 1493, 1497, 1518, 1519, 1520, 1521, 1522, 1527, 1530, 1531, 1532, 1533, 1534, 1541, 1543, 1559, 1560, 1571, 1576, 1605, 1606, 1616, 1625, 1645, 1669, 1677, 1717, 1772, 1780, 1803, 1811, 1815, 1875, 1906, 1907, 1924, 1928, 1952, 2000, 2001, 2002, 2003, 2014, 2017, 2018, 2024, 2035, 2043, 2046, 2049, 2067, 2082, 2089, 2102, 2109, 2112], "rel": [1, 8, 9, 14, 24, 28, 35, 55, 64, 489, 490, 697, 880, 923, 924, 928, 929, 998, 1195, 1262, 1328, 1331, 1576, 1577, 1598, 1633, 1780, 1784, 1785, 1797, 1811, 1929, 2024, 2030, 2045, 2046, 2052, 2056, 2067, 2070, 2089], "allow_mutation_on_saved_tensor": 1, "mutat": [1, 12, 52, 53, 64, 66, 73, 74, 75, 800, 841, 842, 861, 862, 976, 990, 1167, 1277, 2021, 2065, 2070, 2088, 2103, 2104], "_allowmutationonsavedcontext": 1, "purpos": [1, 19, 24, 28, 30, 64, 89, 473, 501, 763, 923, 1283, 1368, 1446, 1478, 1533, 1710, 1711, 1712, 1713, 1875, 2030, 2036, 2043, 2049, 2070, 2078, 2099, 2101, 2104, 2107, 2113], "clear": [1, 2, 8, 9, 50, 64, 943, 986, 1186, 1190, 1273, 1527, 1528, 1537, 1779, 2012, 2043, 2046, 2057, 2062, 2068, 2069, 2101], "upon": [1, 2, 23, 29, 37, 40, 64, 976, 1591, 1717, 1734, 1737, 1961, 2033, 2043, 2046, 2067, 2072, 2079, 2116], "sin_": [1, 2015, 2034], "8415": [1, 2049, 2082], "sinbackward0": 1, "gradientedg": [1, 897, 918], "output_nr": [1, 2015, 2068], "edg": [1, 53, 782, 788, 1227, 1235, 1236, 1644, 1704, 2022, 2050, 2078, 2109], "get_gradient_edg": 1, "equival": [1, 3, 5, 11, 12, 23, 24, 25, 32, 35, 40, 47, 48, 52, 59, 60, 64, 84, 157, 172, 174, 177, 180, 181, 182, 242, 257, 269, 299, 321, 327, 395, 450, 460, 488, 500, 502, 515, 527, 606, 612, 619, 620, 622, 695, 747, 751, 758, 760, 762, 788, 789, 790, 880, 893, 894, 909, 910, 957, 959, 962, 964, 967, 974, 975, 990, 1097, 1101, 1104, 1107, 1108, 1109, 1111, 1126, 1128, 1131, 1134, 1135, 1138, 1139, 1141, 1144, 1146, 1165, 1167, 1170, 1172, 1178, 1188, 1201, 1210, 1237, 1239, 1250, 1272, 1273, 1284, 1289, 1290, 1293, 1305, 1326, 1330, 1338, 1343, 1375, 1380, 1396, 1418, 1431, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1469, 1470, 1481, 1487, 1489, 1490, 1491, 1492, 1497, 1499, 1527, 1543, 1559, 1567, 1579, 1581, 1588, 1589, 1590, 1642, 1652, 1674, 1685, 1704, 1705, 1706, 1717, 1777, 1798, 1834, 1837, 1840, 1841, 1865, 1871, 1878, 1900, 1907, 1920, 1930, 1931, 1938, 1943, 1962, 1977, 1978, 1979, 2011, 2014, 2016, 2017, 2025, 2043, 2065, 2067, 2070, 2073, 2082, 2083, 2085, 2088, 2089, 2099, 2102, 2114, 2118], "variou": [2, 5, 14, 23, 28, 30, 52, 64, 683, 1184, 1346, 1966, 1968, 2021, 2024, 2033, 2036, 2049, 2057, 2059, 2069, 2072, 2074, 2082, 2094, 2103, 2114, 2116], "get_cpu_cap": 2, "capabl": [2, 8, 14, 15, 28, 1036, 1724, 1725, 1834, 1991, 2046, 2055, 2056, 2058, 2091], "string": [2, 3, 5, 13, 14, 23, 28, 37, 44, 45, 47, 52, 53, 64, 605, 804, 817, 858, 985, 1034, 1051, 1052, 1109, 1167, 1188, 1273, 1281, 1284, 1345, 1454, 1455, 1456, 1527, 1528, 1537, 1571, 1573, 1575, 1608, 1609, 1610, 1748, 1827, 1859, 1867, 1869, 1967, 1968, 2012, 2015, 2016, 2017, 2018, 2021, 2035, 2043, 2049, 2056, 2057, 2062, 2065, 2067, 2070, 2071, 2077, 2084, 2085, 2087, 2093, 2099, 2101, 2104, 2113, 2114, 2115], "vsx": 2, "z": [2, 3, 10, 35, 55, 60, 66, 69, 74, 75, 619, 823, 881, 904, 907, 909, 910, 939, 967, 987, 1109, 1113, 1375, 1479, 1633, 1737, 1770, 1771, 1821, 1851, 1852, 2014, 2015, 2016, 2035, 2043, 2045, 2046, 2054, 2055, 2065, 2067, 2070, 2075, 2077, 2079, 2098, 2100, 2101, 2104, 2112, 2113], "vector": [2, 11, 24, 35, 55, 56, 60, 256, 315, 317, 323, 692, 693, 694, 888, 897, 902, 912, 913, 914, 915, 916, 917, 918, 957, 958, 964, 975, 994, 998, 1008, 1090, 1091, 1096, 1098, 1173, 1177, 1178, 1217, 1306, 1313, 1321, 1329, 1330, 1334, 1337, 1341, 1342, 1343, 1363, 1368, 1375, 1413, 1415, 1441, 1442, 1443, 1446, 1463, 1469, 1470, 1473, 1481, 1489, 1490, 1491, 1536, 1567, 1576, 1579, 1624, 1625, 1633, 1635, 1670, 1674, 1703, 1722, 1730, 1732, 1737, 1768, 1772, 1816, 1817, 1908, 1928, 1971, 1974, 1977, 2037, 2043, 2054, 2072, 2082, 2085, 2087, 2095, 2106], "NO": [2, 87, 89, 1187], "avx": [2, 2097], "avx2": [2, 2072, 2097], "avx512": [2, 2097], "is_built": [2, 2058], "built": [2, 3, 7, 8, 14, 24, 28, 34, 40, 58, 64, 977, 1034, 1283, 1577, 1803, 2013, 2021, 2036, 2043, 2045, 2046, 2049, 2055, 2057, 2058, 2059, 2072, 2074, 2099, 2101, 2102, 2113, 2114], "necessarili": [2, 24, 28, 35, 37, 47, 52, 86, 473, 923, 1310, 1332, 1343, 1374, 1462, 1534, 1724, 1725, 2046, 2049], "machin": [2, 28, 37, 47, 55, 56, 61, 1277, 1283, 1319, 1564, 2055, 2056, 2057, 2058, 2059, 2061, 2064, 2065, 2070, 2076, 2077, 2078, 2094, 2095, 2097, 2101, 2106], "driver": [2, 20, 1217, 1319, 1337, 1338, 1384, 2015, 2046, 2053, 2060, 2077, 2111, 2113], "would": [2, 3, 5, 8, 9, 11, 14, 23, 28, 33, 35, 40, 47, 48, 52, 53, 55, 57, 60, 64, 447, 448, 449, 450, 451, 488, 701, 763, 797, 858, 897, 904, 905, 909, 918, 966, 984, 1110, 1112, 1130, 1161, 1171, 1172, 1187, 1188, 1200, 1201, 1202, 1271, 1273, 1274, 1278, 1281, 1289, 1290, 1389, 1390, 1413, 1436, 1437, 1438, 1439, 1440, 1478, 1492, 1497, 1520, 1521, 1522, 1527, 1537, 1543, 1633, 1644, 1707, 1717, 1718, 1724, 1725, 1758, 1770, 1798, 1836, 1840, 1842, 1853, 1863, 1943, 1944, 1949, 2014, 2016, 2017, 2025, 2034, 2035, 2036, 2037, 2042, 2043, 2044, 2045, 2046, 2048, 2049, 2050, 2052, 2053, 2054, 2059, 2062, 2067, 2069, 2070, 2072, 2077, 2078, 2079, 2081, 2082, 2098, 2099, 2100, 2101, 2103, 2104, 2105, 2106, 2107, 2109, 2112, 2113, 2117], "allow_tf32": [2, 1871, 2015, 2046, 2060], "tensorfloat": 2, "core": [2, 3, 7, 8, 52, 59, 82, 976, 1003, 1065, 1778, 2017, 2021, 2045, 2046, 2048, 2059, 2060, 2070, 2091, 2099, 2100, 2104, 2110], "amper": [2, 2082], "newer": [2, 14, 52, 1072, 1766, 2045, 2046, 2062, 2064, 2069, 2070, 2073, 2106], "tf32": [2, 20], "allow_fp16_reduced_precision_reduct": [2, 2046, 2060], "reduc": [2, 3, 14, 24, 28, 29, 30, 32, 34, 55, 323, 515, 518, 519, 689, 696, 698, 699, 700, 702, 822, 823, 824, 825, 828, 830, 878, 879, 976, 981, 1022, 1033, 1065, 1182, 1184, 1247, 1326, 1330, 1332, 1337, 1343, 1361, 1371, 1373, 1374, 1376, 1379, 1418, 1419, 1420, 1421, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1460, 1462, 1470, 1486, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1567, 1576, 1605, 1606, 1614, 1616, 1625, 1641, 1644, 1645, 1646, 1657, 1665, 1666, 1667, 1668, 1669, 1670, 1677, 1689, 1690, 1701, 1704, 1717, 1732, 1770, 1787, 1811, 1825, 1827, 1828, 1862, 1905, 1909, 1922, 1923, 1927, 1928, 1950, 1965, 1972, 1973, 1990, 2015, 2033, 2034, 2043, 2045, 2048, 2049, 2052, 2057, 2059, 2061, 2062, 2063, 2068, 2069, 2070, 2072, 2082, 2098, 2100, 2104, 2108, 2109, 2113, 2114], "precis": [2, 3, 8, 11, 14, 24, 33, 35, 55, 689, 692, 923, 924, 944, 956, 1054, 1152, 1223, 1319, 1337, 1343, 1368, 1378, 1436, 1437, 1438, 1440, 1454, 1455, 1456, 1457, 1458, 1459, 1479, 1498, 1514, 1520, 1521, 1522, 1580, 1644, 1685, 1717, 1856, 1871, 1875, 2013, 2017, 2030, 2037, 2043, 2053, 2057, 2070, 2072, 2073, 2075, 2083, 2085, 2087, 2088, 2110, 2118], "gemm": [2, 17, 19, 2045, 2067, 2104], "allow_bf16_reduced_precision_reduct": [2, 2046, 2060], "cufft_plan_cach": [2, 2046], "cufft": 2, "queri": [2, 19, 28, 47, 64, 738, 1011, 1012, 1014, 1020, 1067, 1072, 1086, 1087, 1198, 1273, 1386, 1527, 1533, 1588, 1685, 1736, 1982, 1983, 2015, 2029, 2046, 2070, 2100, 2103], "specif": [2, 3, 7, 8, 9, 11, 14, 19, 20, 28, 30, 32, 33, 34, 35, 37, 45, 47, 50, 52, 55, 60, 64, 66, 81, 83, 88, 90, 515, 858, 865, 936, 1014, 1100, 1129, 1236, 1270, 1277, 1283, 1287, 1289, 1290, 1309, 1311, 1374, 1473, 1499, 1542, 1685, 1710, 1711, 1716, 1739, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1794, 1795, 1796, 1797, 1798, 1866, 1871, 1905, 1966, 1983, 2012, 2014, 2016, 2017, 2019, 2021, 2023, 2030, 2035, 2037, 2043, 2046, 2049, 2053, 2055, 2059, 2061, 2065, 2069, 2070, 2071, 2072, 2073, 2075, 2077, 2079, 2087, 2088, 2095, 2097, 2100, 2101, 2103, 2104, 2106, 2110, 2112, 2113, 2120], "via": [2, 7, 14, 15, 23, 28, 30, 34, 35, 38, 45, 52, 53, 55, 59, 62, 64, 417, 488, 519, 619, 923, 924, 932, 963, 984, 1009, 1045, 1065, 1113, 1170, 1186, 1190, 1191, 1193, 1195, 1345, 1346, 1441, 1442, 1443, 1463, 1481, 1489, 1490, 1491, 1499, 1567, 1731, 1766, 1769, 1809, 2014, 2016, 2017, 2021, 2029, 2030, 2033, 2036, 2041, 2043, 2046, 2049, 2050, 2051, 2055, 2057, 2059, 2060, 2067, 2070, 2072, 2077, 2078, 2082, 2085, 2086, 2099, 2100, 2101, 2103, 2105, 2109, 2114], "readonli": 2, "int": [2, 3, 12, 18, 19, 23, 24, 28, 30, 32, 33, 34, 35, 37, 41, 44, 45, 47, 48, 51, 52, 53, 55, 64, 66, 74, 75, 77, 80, 90, 218, 220, 234, 235, 244, 256, 315, 317, 319, 323, 439, 446, 447, 449, 451, 459, 474, 478, 495, 499, 501, 515, 517, 519, 522, 539, 545, 547, 548, 560, 562, 568, 585, 586, 587, 589, 590, 609, 619, 683, 696, 698, 699, 700, 702, 758, 759, 778, 780, 782, 783, 788, 789, 790, 822, 878, 879, 880, 882, 896, 904, 907, 909, 910, 927, 932, 939, 945, 947, 954, 961, 963, 970, 975, 978, 997, 998, 1002, 1003, 1007, 1008, 1012, 1014, 1016, 1017, 1020, 1021, 1022, 1023, 1024, 1025, 1027, 1028, 1029, 1030, 1031, 1036, 1037, 1038, 1040, 1042, 1046, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1078, 1079, 1080, 1083, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1096, 1097, 1098, 1099, 1100, 1101, 1107, 1110, 1112, 1122, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1148, 1161, 1163, 1164, 1168, 1169, 1170, 1171, 1172, 1178, 1180, 1183, 1186, 1204, 1206, 1214, 1221, 1224, 1225, 1227, 1231, 1232, 1234, 1235, 1236, 1237, 1248, 1249, 1270, 1271, 1273, 1274, 1276, 1279, 1280, 1285, 1293, 1295, 1305, 1306, 1326, 1327, 1330, 1339, 1340, 1341, 1342, 1343, 1344, 1346, 1353, 1360, 1361, 1366, 1371, 1373, 1374, 1376, 1379, 1381, 1382, 1383, 1384, 1387, 1388, 1394, 1401, 1402, 1403, 1404, 1405, 1413, 1418, 1419, 1420, 1421, 1422, 1423, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1441, 1442, 1443, 1444, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1462, 1463, 1469, 1470, 1472, 1473, 1474, 1475, 1477, 1479, 1481, 1489, 1490, 1491, 1494, 1495, 1496, 1498, 1499, 1503, 1504, 1505, 1506, 1507, 1508, 1512, 1514, 1515, 1517, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1529, 1532, 1534, 1535, 1538, 1539, 1540, 1542, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1561, 1563, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1615, 1616, 1617, 1624, 1625, 1632, 1635, 1644, 1652, 1669, 1670, 1671, 1675, 1676, 1691, 1692, 1700, 1704, 1705, 1706, 1716, 1717, 1732, 1733, 1742, 1743, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1759, 1761, 1766, 1769, 1772, 1773, 1775, 1776, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1817, 1818, 1825, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1836, 1838, 1839, 1840, 1842, 1848, 1849, 1850, 1853, 1854, 1855, 1856, 1859, 1864, 1865, 1866, 1870, 1873, 1874, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1896, 1900, 1904, 1907, 1908, 1909, 1916, 1919, 1921, 1922, 1923, 1924, 1927, 1929, 1933, 1935, 1940, 1944, 1945, 1947, 1949, 1950, 1953, 1954, 1955, 1956, 1959, 1960, 1961, 1962, 1963, 1964, 1967, 1968, 1971, 1972, 1973, 1977, 1978, 1983, 1985, 1986, 1987, 1988, 1991, 1992, 1993, 1994, 1997, 2000, 2001, 2004, 2005, 2009, 2010, 2014, 2015, 2016, 2017, 2018, 2021, 2030, 2033, 2034, 2036, 2041, 2046, 2050, 2059, 2062, 2065, 2067, 2068, 2071, 2072, 2076, 2077, 2081, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2095, 2100, 2104, 2108, 2112, 2115, 2118], "show": [2, 4, 7, 13, 18, 23, 24, 28, 30, 33, 36, 52, 55, 64, 82, 912, 918, 976, 1160, 1273, 1375, 1527, 1707, 1779, 2012, 2013, 2017, 2034, 2045, 2046, 2048, 2049, 2054, 2055, 2057, 2065, 2067, 2069, 2070, 2071, 2077, 2079, 2101, 2106, 2107, 2110, 2111, 2113, 2115], "max_siz": [2, 46, 48, 2046], "capac": [2, 1079, 2046, 2059], "preferred_blas_librari": 2, "overrid": [2, 5, 14, 19, 20, 24, 28, 29, 30, 35, 40, 48, 55, 60, 64, 795, 796, 858, 894, 896, 909, 910, 1051, 1439, 1440, 1460, 1462, 1486, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1544, 1559, 1560, 1576, 1605, 1606, 1616, 1645, 1669, 1677, 1739, 1798, 1859, 1875, 2013, 2017, 2021, 2025, 2062, 2069, 2070, 2072, 2077, 2082, 2087, 2093, 2112, 2113], "bla": [2, 19, 2045], "choos": [2, 9, 15, 19, 64, 896, 909, 910, 936, 1034, 1319, 1332, 1337, 1439, 1929, 2041, 2042, 2045, 2070, 2073, 2087], "cubla": [2, 11, 17, 19, 20, 1026, 1965, 2061, 2098], "cublaslt": [2, 17, 20], "subject": [2, 3, 11, 18, 19, 28, 30, 32, 34, 55, 63, 64, 65, 235, 869, 1586, 1587, 1588, 1591, 1685, 1717, 2017, 2034, 2035, 2043, 2049, 2067, 2071, 2072, 2073, 2077, 2082, 2085, 2091, 2092, 2093, 2095, 2100, 2110], "rocm": [2, 17, 689, 692, 944, 956, 1368, 1378, 1454, 1455, 1456, 1457, 1458, 1459, 1479, 1498, 1514, 2013], "hipbla": [2, 17, 19], "hipblaslt": [2, 17, 19], "offer": [2, 28, 30, 55, 58, 1717, 1965, 2046, 2049, 2055, 2070, 2071, 2077, 2082, 2099, 2104, 2108, 2116], "wherev": [2, 9, 2034], "prefer": [2, 9, 23, 28, 30, 37, 48, 55, 866, 882, 914, 1201, 1271, 1314, 1327, 1331, 1339, 1346, 1534, 1685, 1863, 1924, 1943, 2014, 2025, 2043, 2046, 2069, 2070, 2082, 2104], "environ": [2, 3, 7, 14, 15, 18, 19, 22, 24, 30, 33, 35, 37, 40, 45, 51, 52, 64, 81, 83, 683, 1186, 1188, 1210, 1283, 1289, 1497, 1543, 1965, 2012, 2013, 2014, 2023, 2043, 2045, 2046, 2048, 2055, 2060, 2061, 2063, 2077, 2095, 2097, 2099, 2104, 2113], "variabl": [2, 3, 5, 12, 14, 18, 19, 22, 24, 35, 37, 40, 45, 47, 51, 53, 55, 60, 64, 66, 69, 74, 75, 81, 83, 450, 683, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 731, 732, 733, 734, 735, 737, 741, 742, 743, 744, 745, 746, 748, 749, 759, 763, 767, 802, 904, 905, 909, 978, 990, 994, 998, 1078, 1110, 1178, 1188, 1287, 1289, 1346, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1469, 1470, 1478, 1479, 1497, 1498, 1499, 1512, 1514, 1527, 1530, 1535, 1543, 1545, 1717, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1776, 1780, 1782, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1836, 1838, 1840, 1965, 1977, 2010, 2012, 2013, 2023, 2024, 2035, 2036, 2043, 2045, 2046, 2048, 2051, 2054, 2060, 2061, 2063, 2065, 2067, 2069, 2072, 2077, 2084, 2087, 2095, 2100, 2101, 2102, 2104, 2106, 2113], "torch_blas_prefer_cublaslt": 2, "global": [2, 3, 5, 8, 11, 12, 20, 23, 28, 30, 32, 35, 37, 48, 55, 60, 63, 64, 66, 69, 74, 75, 858, 865, 869, 877, 932, 945, 954, 990, 1009, 1043, 1060, 1067, 1110, 1112, 1122, 1127, 1145, 1161, 1164, 1167, 1231, 1232, 1254, 1261, 1273, 1276, 1289, 1293, 1344, 1360, 1527, 1685, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1744, 1748, 1776, 1836, 1838, 1840, 1843, 1867, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2010, 2014, 2017, 2018, 2021, 2029, 2037, 2042, 2049, 2056, 2057, 2059, 2061, 2062, 2072, 2077, 2078, 2079, 2082, 2085, 2087, 2101, 2102, 2103, 2104, 2105, 2112, 2113], "overridden": [2, 14, 64, 893, 894, 895, 909, 910, 1051, 1527, 1903, 2017, 2043, 2049, 2060, 2113, 2114], "achiev": [2, 19, 23, 24, 28, 30, 34, 35, 48, 55, 1109, 1174, 1273, 1440, 1446, 1527, 1533, 1534, 1606, 1635, 1717, 2031, 2046, 2056, 2059, 2070, 2077, 2079, 2113], "better": [2, 3, 7, 8, 9, 14, 23, 27, 28, 37, 52, 59, 919, 976, 1078, 1166, 1170, 1171, 1182, 1260, 1289, 1462, 1561, 1685, 1691, 1717, 1724, 1725, 1809, 1834, 2017, 2042, 2043, 2045, 2046, 2052, 2054, 2061, 2063, 2067, 2069, 2071, 2072, 2073, 2082, 2087, 2093, 2095, 2099, 2101, 2102, 2110], "select": [2, 5, 15, 17, 19, 21, 23, 28, 30, 35, 37, 45, 53, 315, 317, 319, 323, 763, 966, 1000, 1002, 1006, 1015, 1016, 1020, 1027, 1028, 1029, 1030, 1032, 1053, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1078, 1079, 1082, 1084, 1086, 1087, 1129, 1346, 1375, 1400, 1401, 1402, 1403, 1404, 1409, 1410, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1478, 1497, 1543, 1591, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 1753, 1754, 1866, 1940, 1968, 1980, 1984, 1985, 1986, 1987, 1989, 2004, 2007, 2008, 2014, 2015, 2017, 2033, 2034, 2035, 2036, 2043, 2045, 2046, 2054, 2061, 2068, 2072, 2082, 2086, 2089, 2102, 2108, 2110, 2115], "incorrect": [2, 4, 5, 55, 64, 86, 87, 89, 256, 515, 558, 904, 907, 909, 923, 932, 958, 978, 1289, 1332, 1345, 1533, 1571, 1572, 1573, 1574, 1575, 1703, 1772, 1913, 1924, 2014, 2017, 2021, 2046, 2060, 2067, 2101, 2116], "_blasbackend": 2, "preferred_linalg_librari": [2, 1310], "heurist": [2, 14, 23, 47, 48, 64, 981, 1109, 2052, 2098, 2107], "cusolv": [2, 1337, 1338, 1928], "magma": [2, 1319, 1363, 1827, 1928, 2063, 2082], "algebra": [2, 9, 1109, 1310, 1331, 2013, 2022], "decid": [2, 4, 7, 28, 37, 47, 59, 488, 1184, 1748, 2024, 2067, 2082, 2103, 2104, 2107], "pick": [2, 28, 46, 48, 515, 976, 2043, 2077, 2100, 2102, 2107], "torch_linalg_prefer_cusolv": 2, "linalg": [2, 11, 408, 966, 967, 968, 969, 1008, 1095, 1217, 1251, 1354, 1363, 1364, 1365, 1369, 1370, 1731, 1732, 1737, 1772, 1814, 1819, 1821, 1827, 1897, 1928, 1929, 1952, 1974, 2013], "inv": [2, 35, 1251, 1305, 1309, 1315, 1331, 1335, 1339], "inv_ex": [2, 1314], "cholesky_ex": [2, 1303], "lu_factor": [2, 1322, 1323, 1363, 1364, 1365], "lu": [2, 10, 1315, 1321, 1322, 1323, 1364, 1365, 2015], "eigh": [2, 1303, 1309, 1312, 1331, 1337, 2060], "eighval": 2, "svdval": [2, 1305, 1319, 1328, 1337, 1928, 2060], "_linalgbackend": 2, "sdpaparam": 2, "flash_sdp_en": 2, "flash": [2, 1586, 1591], "scale": [2, 7, 23, 33, 35, 37, 46, 48, 99, 175, 475, 477, 619, 688, 692, 693, 694, 741, 742, 743, 744, 745, 746, 747, 752, 753, 754, 755, 756, 757, 758, 759, 762, 773, 775, 776, 777, 778, 780, 783, 784, 798, 802, 805, 822, 823, 824, 825, 828, 944, 1123, 1124, 1127, 1145, 1177, 1178, 1360, 1435, 1464, 1469, 1470, 1471, 1487, 1499, 1555, 1580, 1581, 1582, 1586, 1591, 1624, 1625, 1626, 1642, 1644, 1685, 1686, 1691, 1782, 1803, 1829, 1830, 1831, 1832, 1833, 1906, 1907, 1925, 1977, 2013, 2015, 2035, 2041, 2046, 2051, 2053, 2057, 2065, 2072, 2073, 2075, 2083, 2104], "product": [2, 15, 28, 30, 35, 47, 152, 689, 692, 693, 694, 763, 897, 902, 913, 915, 916, 917, 918, 944, 956, 962, 966, 994, 1008, 1020, 1067, 1072, 1086, 1087, 1090, 1106, 1109, 1173, 1177, 1178, 1250, 1294, 1306, 1313, 1339, 1340, 1342, 1368, 1375, 1378, 1415, 1478, 1479, 1497, 1498, 1586, 1591, 1632, 1685, 1731, 1815, 1816, 1825, 1871, 1945, 1960, 1974, 1977, 2013, 2014, 2034, 2043, 2054, 2056, 2060, 2070, 2072, 2082, 2101, 2115], "attent": [2, 7, 34, 738, 1440, 1533, 1571, 1573, 1575, 1586, 1588, 1606, 1685, 2013, 2063, 2086, 2101], "enable_mem_efficient_sdp": [2, 1685], "mem_efficient_sdp_en": 2, "enable_flash_sdp": [2, 1685], "math_sdp_en": 2, "math": [2, 23, 64, 1119, 1325, 1586, 1591, 1645, 1685, 1817, 1834, 1929, 2013, 2014, 2016, 2017, 2052, 2060, 2082, 2083, 2089, 2113], "enable_math_sdp": [2, 1685], "cudnn_sdp_en": 2, "enable_cudnn_sdp": 2, "can_use_flash_attent": 2, "debug": [2, 4, 5, 8, 19, 20, 23, 38, 44, 52, 81, 83, 683, 830, 831, 835, 847, 923, 924, 976, 978, 985, 1009, 1042, 1083, 1221, 1303, 1710, 1711, 1712, 1713, 1870, 1877, 2013, 2016, 2023, 2043, 2045, 2046, 2048, 2055, 2056, 2057, 2061, 2063, 2065, 2067, 2070, 2092, 2100, 2101, 2102, 2105, 2106, 2109, 2115, 2116, 2117], "check": [2, 3, 4, 5, 11, 14, 19, 20, 23, 28, 29, 30, 35, 47, 55, 66, 67, 72, 75, 223, 342, 501, 697, 865, 904, 905, 907, 908, 909, 910, 923, 924, 930, 978, 1011, 1012, 1014, 1018, 1047, 1065, 1184, 1188, 1196, 1197, 1205, 1206, 1260, 1270, 1273, 1289, 1290, 1303, 1304, 1309, 1310, 1312, 1315, 1317, 1322, 1328, 1331, 1335, 1337, 1338, 1363, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1527, 1707, 1717, 1734, 1737, 1750, 1760, 1779, 1780, 1903, 1910, 1911, 1912, 1913, 1914, 1915, 1966, 1982, 1983, 2012, 2013, 2016, 2017, 2018, 2021, 2034, 2035, 2042, 2046, 2049, 2050, 2051, 2052, 2056, 2057, 2058, 2061, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2077, 2082, 2087, 2089, 2097, 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2112, 2113, 2114, 2117], "flashattent": [2, 1575, 1685], "util": [2, 7, 17, 30, 48, 52, 53, 55, 56, 63, 64, 66, 501, 735, 738, 741, 742, 743, 748, 749, 759, 763, 767, 793, 801, 910, 1025, 1110, 1111, 1112, 1160, 1205, 1206, 1273, 1478, 1497, 1527, 1543, 1544, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1717, 1734, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1758, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 1932, 1933, 1935, 1936, 1937, 1965, 2013, 2017, 2021, 2025, 2030, 2042, 2044, 2045, 2046, 2049, 2051, 2053, 2054, 2056, 2057, 2059, 2061, 2067, 2069, 2070, 2100, 2101, 2104, 2111, 2113, 2116], "scaled_dot_product_attent": [2, 1533, 1571, 1587, 1590, 1591, 2015, 2038, 2039, 2040, 2068], "_sdpaparam": 2, "kei": [2, 3, 23, 30, 33, 37, 45, 47, 48, 52, 53, 55, 62, 64, 417, 605, 683, 738, 858, 863, 931, 936, 943, 1075, 1166, 1176, 1189, 1273, 1289, 1290, 1345, 1527, 1528, 1533, 1537, 1571, 1572, 1573, 1574, 1575, 1588, 1685, 1751, 1767, 1966, 1968, 2013, 2015, 2016, 2017, 2021, 2029, 2046, 2049, 2056, 2057, 2062, 2067, 2068, 2069, 2071, 2072, 2075, 2077, 2078, 2084, 2087, 2089, 2092, 2094, 2105, 2110, 2111, 2112, 2116], "mask": [2, 45, 401, 402, 403, 404, 405, 546, 738, 923, 924, 1367, 1435, 1446, 1471, 1533, 1571, 1572, 1573, 1574, 1575, 1626, 1685, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1798, 2013, 2015, 2034, 2035, 2082, 2104], "dropout": [2, 5, 34, 64, 737, 738, 763, 1273, 1435, 1465, 1466, 1467, 1471, 1478, 1497, 1527, 1533, 1543, 1544, 1571, 1573, 1575, 1599, 1619, 1620, 1621, 1626, 1685, 2013, 2015, 2027, 2035, 2036, 2043, 2046, 2068, 2074], "causal": [2, 738, 1533, 1571, 1572, 1573, 1574, 1575, 1587, 1588, 1589, 1590, 1685], "warn": [2, 3, 14, 18, 19, 24, 25, 28, 32, 52, 86, 87, 89, 683, 912, 918, 966, 1020, 1067, 1072, 1083, 1086, 1087, 1178, 1188, 1254, 1270, 1289, 1331, 1345, 1375, 1533, 1571, 1572, 1573, 1574, 1575, 1685, 1734, 1737, 1870, 1877, 1924, 1965, 1977, 2012, 2023, 2030, 2044, 2049, 2065, 2068, 2070, 2076, 2101, 2113], "why": [2, 3, 7, 23, 60, 64, 1109, 1277, 1685, 2013, 2021, 2024, 2050, 2052, 2100, 2101, 2103, 2111, 2115], "could": [2, 4, 5, 7, 8, 17, 19, 23, 28, 34, 35, 37, 47, 60, 63, 64, 585, 586, 587, 589, 590, 932, 1047, 1130, 1132, 1140, 1141, 1142, 1189, 1270, 1279, 1304, 1345, 1363, 1389, 1390, 1724, 1725, 1802, 1905, 1929, 1961, 2016, 2017, 2024, 2033, 2043, 2046, 2048, 2062, 2063, 2067, 2070, 2072, 2073, 2077, 2078, 2079, 2082, 2086, 2087, 2099, 2101, 2104, 2105, 2109, 2113, 2116, 2117], "depend": [2, 4, 5, 12, 14, 23, 28, 30, 32, 33, 35, 37, 47, 48, 50, 53, 55, 64, 317, 788, 882, 1020, 1067, 1072, 1086, 1087, 1130, 1132, 1140, 1141, 1142, 1168, 1172, 1177, 1188, 1198, 1200, 1201, 1202, 1204, 1285, 1289, 1309, 1310, 1337, 1363, 1368, 1439, 1440, 1460, 1462, 1470, 1473, 1486, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1579, 1580, 1598, 1605, 1606, 1616, 1625, 1633, 1645, 1669, 1677, 1685, 1704, 1707, 1717, 1737, 1738, 1744, 1803, 1815, 1817, 1834, 1850, 1910, 1911, 1912, 1914, 1915, 1928, 1952, 1980, 2012, 2014, 2016, 2017, 2021, 2024, 2025, 2033, 2043, 2045, 2046, 2049, 2050, 2052, 2053, 2054, 2057, 2059, 2064, 2066, 2067, 2071, 2072, 2077, 2082, 2088, 2089, 2094, 2095, 2098, 2099, 2100, 2101, 2103, 2104, 2106, 2109, 2111, 2112, 2115], "can_use_efficient_attent": 2, "efficient_attent": [2, 1586, 1591], "sdp_kernel": [2, 1685], "enable_flash": 2, "enable_math": [2, 1685], "enable_mem_effici": 2, "enable_cudnn": 2, "temporarili": [2, 37, 1867, 2043, 2067, 2071, 2107], "previou": [2, 18, 28, 30, 47, 52, 55, 64, 515, 558, 763, 976, 978, 1043, 1154, 1227, 1284, 1344, 1360, 1478, 1497, 1543, 1591, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1779, 1811, 1834, 1924, 2027, 2043, 2046, 2049, 2057, 2063, 2065, 2072, 2079, 2095, 2104], "restor": [2, 5, 30, 32, 64, 90, 986, 1154, 1344, 1360, 1591, 1769, 1854, 1875, 2057, 2062, 2102], "is_avail": [2, 17, 28, 1968, 2013, 2046, 2053, 2055, 2058, 2062, 2071, 2095, 2120], "determinist": [2, 3, 5, 22, 28, 35, 64, 86, 90, 488, 501, 515, 877, 1110, 1111, 1112, 1221, 1254, 1276, 1289, 1290, 1371, 1374, 1376, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1497, 1543, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 1864, 1870, 1965, 2013, 2015, 2043, 2052, 2061, 2069, 2076], "algorithm": [2, 3, 7, 11, 19, 21, 24, 29, 32, 35, 37, 55, 90, 763, 782, 788, 966, 1270, 1314, 1321, 1327, 1331, 1337, 1346, 1363, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1476, 1478, 1497, 1543, 1579, 1580, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1633, 1644, 1685, 1704, 1717, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1798, 1817, 1856, 1871, 1928, 1929, 1965, 2013, 2043, 2046, 2060, 2077, 2082], "are_deterministic_algorithms_en": 2, "use_deterministic_algorithm": [2, 27, 501, 877, 1110, 1111, 1112, 1254, 1870, 2061], "benchmark": [2, 19, 2013, 2015, 2046, 2055, 2071, 2098, 2104, 2110], "fastest": [2, 17, 19, 881, 1717, 1771, 1784, 1785, 1797, 2054, 2061, 2069], "benchmark_limit": 2, "maximum": [2, 27, 35, 47, 48, 52, 501, 698, 700, 774, 798, 823, 824, 825, 828, 829, 878, 1022, 1057, 1059, 1065, 1073, 1074, 1088, 1089, 1110, 1111, 1112, 1155, 1172, 1234, 1235, 1236, 1346, 1371, 1393, 1485, 1624, 1625, 1723, 1801, 1809, 1834, 1848, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1967, 2015, 2023, 2041, 2042, 2046, 2059, 2068, 2075, 2089, 2090, 2100, 2108, 2115, 2117], "try": [2, 3, 4, 7, 8, 28, 30, 39, 40, 44, 47, 52, 59, 60, 84, 85, 976, 978, 1079, 1163, 1172, 1178, 1188, 1212, 1278, 1280, 1289, 1310, 1393, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1571, 1572, 1574, 1580, 1586, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1977, 2012, 2014, 2017, 2018, 2021, 2043, 2046, 2049, 2051, 2054, 2059, 2067, 2069, 2070, 2072, 2073, 2077, 2082, 2099, 2100, 2103, 2104, 2106, 2113, 2115], "dispatch": [2, 28, 55, 64, 86, 87, 89, 1189, 1278, 1389, 1390, 1966, 1968, 2017, 2021, 2046, 2049, 2065, 2067, 2104, 2105, 2114], "v8": [2, 20], "api": [2, 3, 5, 8, 9, 14, 20, 24, 28, 29, 30, 34, 37, 39, 40, 44, 45, 46, 47, 48, 55, 56, 59, 63, 65, 66, 71, 75, 76, 77, 235, 803, 827, 865, 899, 900, 901, 902, 903, 915, 920, 921, 965, 978, 981, 1009, 1043, 1044, 1045, 1051, 1052, 1054, 1082, 1113, 1153, 1167, 1170, 1171, 1173, 1178, 1186, 1198, 1284, 1285, 1287, 1393, 1409, 1717, 1767, 1769, 1770, 1778, 1856, 1867, 1958, 1968, 1977, 1996, 2007, 2012, 2023, 2024, 2025, 2027, 2031, 2033, 2034, 2036, 2043, 2048, 2053, 2062, 2064, 2073, 2077, 2078, 2082, 2085, 2087, 2091, 2094, 2095, 2102, 2106, 2114, 2117], "get_fastpath_en": 2, "fast": [2, 7, 8, 23, 28, 64, 923, 1468, 1489, 1490, 1491, 1559, 1575, 1724, 1725, 1809, 1815, 1834, 1856, 1871, 2013, 2045, 2046, 2049, 2055, 2069, 2077, 2082, 2085, 2086, 2094, 2101, 2103, 2106, 2107, 2115], "path": [2, 3, 4, 8, 14, 20, 28, 30, 45, 47, 50, 51, 52, 55, 64, 737, 933, 934, 935, 1009, 1109, 1197, 1273, 1527, 1544, 1575, 1779, 2012, 2013, 2015, 2016, 2043, 2046, 2049, 2052, 2055, 2062, 2065, 2070, 2071, 2082, 2092, 2095, 2098, 2106, 2107], "transformerencod": 2, "multiheadattent": [2, 1571, 1573, 1575, 2072], "fastpath": [2, 1533, 2103], "condit": [2, 5, 12, 21, 23, 52, 53, 60, 64, 66, 69, 74, 75, 619, 622, 627, 697, 763, 913, 923, 924, 990, 1270, 1289, 1303, 1305, 1310, 1313, 1319, 1320, 1321, 1332, 1337, 1478, 1497, 1543, 1575, 1760, 1765, 1952, 1980, 2014, 2015, 2016, 2018, 2026, 2036, 2043, 2049, 2060, 2087, 2100, 2101, 2102, 2103, 2108], "met": [2, 12, 619, 990, 1313, 1320, 1321, 1332, 1346, 1575, 1913], "set_fastpath_en": 2, "verbos": [2, 14, 19, 28, 64, 683, 1188, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2012, 2023, 2035, 2067, 2087, 2113], "On": [2, 14, 22, 23, 28, 29, 35, 48, 55, 60, 64, 689, 692, 944, 956, 1321, 1368, 1378, 1454, 1455, 1456, 1457, 1458, 1459, 1479, 1494, 1495, 1496, 1497, 1498, 1514, 1537, 1543, 1556, 1717, 1784, 1785, 1794, 1797, 2013, 2014, 2024, 2043, 2045, 2046, 2049, 2060, 2070, 2077, 2078, 2079, 2082, 2098, 2100, 2101, 2103, 2104], "demand": [2, 23, 1045, 2016, 2056, 2059, 2077], "onemkl": 2, "easier": [2, 7, 23, 52, 64, 2014, 2016, 2036, 2043, 2044, 2049, 2050, 2052, 2082, 2104, 2113], "dump": [2, 64, 1009, 2048, 2063, 2104, 2113, 2117], "durat": [2, 19, 28, 44, 47, 941, 1389, 1390, 2030, 2065, 2071, 2104, 2113], "kernel": [2, 3, 4, 11, 14, 18, 20, 28, 55, 63, 488, 683, 742, 743, 744, 745, 746, 775, 776, 777, 930, 976, 1007, 1012, 1014, 1015, 1051, 1052, 1085, 1087, 1395, 1400, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1474, 1475, 1494, 1495, 1496, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1523, 1524, 1525, 1579, 1608, 1609, 1610, 1611, 1612, 1613, 1628, 1629, 1685, 1724, 1725, 1735, 1966, 1968, 1983, 1984, 2009, 2014, 2021, 2027, 2036, 2046, 2049, 2050, 2052, 2058, 2060, 2067, 2069, 2071, 2073, 2082, 2098, 2101, 2102, 2103, 2104, 2105, 2106, 2113, 2116], "mkl_verbos": 2, "methodologi": 2, "larg": [2, 3, 7, 8, 23, 28, 33, 34, 64, 683, 862, 996, 998, 1065, 1310, 1319, 1337, 1431, 1473, 1579, 1627, 1717, 1809, 1895, 1908, 1929, 2013, 2033, 2035, 2036, 2045, 2046, 2049, 2051, 2057, 2060, 2062, 2065, 2067, 2070, 2073, 2077, 2082, 2085, 2088, 2098, 2104, 2111, 2113, 2115], "moreov": [2, 30, 83, 515, 1717, 1797, 2116], "investig": [2, 7, 28, 60, 2111], "singl": [2, 3, 5, 12, 14, 19, 24, 28, 30, 32, 33, 35, 37, 40, 45, 46, 47, 55, 56, 58, 59, 60, 61, 63, 64, 66, 74, 75, 256, 683, 698, 699, 769, 770, 771, 772, 775, 776, 777, 817, 899, 912, 913, 914, 915, 916, 917, 918, 930, 945, 954, 958, 976, 978, 990, 994, 998, 1051, 1054, 1166, 1168, 1169, 1172, 1174, 1178, 1231, 1232, 1236, 1258, 1272, 1273, 1289, 1290, 1293, 1351, 1375, 1389, 1390, 1429, 1430, 1433, 1434, 1437, 1438, 1440, 1454, 1455, 1456, 1458, 1459, 1462, 1463, 1474, 1475, 1481, 1494, 1495, 1496, 1499, 1521, 1522, 1527, 1535, 1539, 1540, 1542, 1556, 1567, 1592, 1593, 1594, 1595, 1596, 1597, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1628, 1629, 1630, 1658, 1659, 1660, 1703, 1709, 1711, 1714, 1715, 1716, 1717, 1722, 1723, 1726, 1728, 1730, 1767, 1768, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 1803, 1824, 1850, 1871, 1916, 1922, 1923, 1961, 1962, 1965, 1972, 1973, 1977, 2013, 2014, 2016, 2017, 2024, 2027, 2033, 2035, 2036, 2037, 2043, 2045, 2046, 2049, 2050, 2052, 2054, 2057, 2059, 2060, 2061, 2062, 2067, 2069, 2070, 2071, 2072, 2078, 2079, 2082, 2085, 2088, 2089, 2093, 2097, 2098, 2100, 2102, 2103, 2104, 2106, 2112, 2113], "enough": [2, 8, 24, 64, 1188, 1195, 1285, 1577, 1910, 1911, 1912, 1913, 1914, 1915, 1965, 2016, 2033, 2036, 2046, 2049, 2054, 2069, 2070, 2085, 2101, 2104, 2114], "scope": [2, 7, 12, 50, 64, 83, 990, 1069, 1163, 1567, 1748, 2014, 2016, 2017, 2046, 2051, 2067, 2070, 2079, 2103, 2112], "second": [2, 5, 14, 18, 28, 32, 33, 37, 40, 47, 50, 52, 61, 64, 689, 692, 694, 697, 763, 797, 878, 879, 880, 888, 896, 909, 910, 923, 924, 944, 948, 949, 951, 952, 953, 956, 1008, 1020, 1067, 1072, 1086, 1087, 1097, 1099, 1100, 1106, 1114, 1155, 1156, 1168, 1171, 1172, 1173, 1177, 1178, 1201, 1215, 1216, 1227, 1230, 1238, 1239, 1240, 1250, 1262, 1296, 1298, 1306, 1342, 1345, 1351, 1352, 1362, 1368, 1372, 1374, 1375, 1377, 1378, 1419, 1424, 1427, 1431, 1437, 1438, 1444, 1455, 1456, 1458, 1459, 1469, 1470, 1477, 1478, 1492, 1495, 1496, 1497, 1515, 1519, 1521, 1522, 1543, 1556, 1615, 1617, 1651, 1707, 1748, 1784, 1785, 1788, 1794, 1798, 1809, 1811, 1855, 1905, 1913, 1949, 1954, 1956, 1971, 1974, 1977, 2016, 2017, 2021, 2026, 2043, 2046, 2049, 2050, 2051, 2052, 2054, 2057, 2061, 2065, 2067, 2071, 2077, 2083, 2087, 2095, 2098, 2101], "verbose_on": 2, "level": [2, 3, 7, 8, 9, 12, 15, 18, 23, 25, 28, 30, 33, 37, 39, 40, 44, 52, 53, 55, 59, 64, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 87, 88, 89, 501, 683, 750, 899, 900, 901, 902, 903, 915, 1167, 1182, 1198, 1201, 1217, 1440, 1463, 1717, 1811, 1970, 2013, 2014, 2017, 2018, 2023, 2030, 2033, 2036, 2041, 2043, 2045, 2049, 2052, 2054, 2065, 2070, 2071, 2072, 2073, 2077, 2082, 2087, 2092, 2093, 2094, 2100, 2101, 2102, 2104, 2105, 2108, 2111, 2112, 2113, 2114], "verbose_off": 2, "dnn": [2, 2045], "onednn": [2, 859, 860, 863, 1275, 1282, 2068, 2072], "former": [2, 55, 1497, 1527, 2043], "dnnl_verbos": 2, "verbose_on_cr": 2, "set_flag": 2, "_enabl": 2, "get_opt_einsum": 2, "packag": [2, 7, 8, 16, 17, 24, 33, 35, 52, 59, 63, 1633, 2012, 2013, 2031, 2032, 2049, 2053, 2057, 2065, 2069, 2075, 2077, 2087, 2091, 2095, 2099, 2120], "els": [2, 7, 12, 23, 28, 30, 33, 35, 37, 39, 47, 52, 64, 66, 605, 763, 943, 947, 990, 1285, 1291, 1492, 1567, 1671, 1685, 1717, 1784, 1785, 1788, 1794, 1795, 1796, 1797, 1851, 1852, 1884, 1890, 1968, 2014, 2016, 2018, 2019, 2024, 2033, 2036, 2046, 2049, 2050, 2052, 2057, 2058, 2065, 2066, 2067, 2069, 2070, 2084, 2095, 2098, 2099, 2100, 2101, 2112], "einsum": [2, 2015, 2068], "readthedoc": [2, 1109], "io": [2, 7, 14, 23, 30, 52, 1109, 1281, 1284, 1345, 1497, 1498, 1575, 1859, 2063, 2065, 2072], "en": [2, 14, 24, 1109, 2055, 2087, 2118], "path_find": [2, 1109], "html": [2, 3, 4, 7, 11, 14, 15, 48, 55, 683, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 976, 1109, 1577, 1965, 1968, 2021, 2044, 2055, 2057, 2061, 2070, 2071, 2087], "calcul": [2, 19, 23, 28, 30, 37, 44, 763, 771, 772, 802, 805, 822, 823, 827, 920, 964, 998, 1097, 1101, 1109, 1113, 1264, 1270, 1329, 1351, 1352, 1354, 1436, 1437, 1438, 1440, 1441, 1442, 1443, 1446, 1457, 1458, 1459, 1473, 1478, 1480, 1481, 1489, 1490, 1491, 1499, 1567, 1576, 1579, 1580, 1600, 1601, 1602, 1606, 1625, 1630, 1633, 1644, 1674, 1732, 1744, 1766, 1770, 1772, 1803, 1849, 1922, 1923, 1954, 1956, 1972, 1973, 2043, 2044, 2048, 2052, 2054, 2060, 2063, 2072, 2082, 2093], "contract": [2, 53, 1109, 1945, 2013, 2035, 2070, 2099], "fall": [2, 7, 14, 19, 20, 23, 52, 798, 923, 976, 1201, 1236, 1287, 1345, 1487, 1559, 1642, 1689, 1722, 1723, 2012, 2067, 2072, 2104, 2111], "left": [2, 23, 64, 462, 501, 817, 823, 869, 945, 949, 952, 954, 955, 960, 965, 1109, 1129, 1136, 1150, 1153, 1154, 1158, 1227, 1231, 1232, 1236, 1270, 1271, 1278, 1293, 1314, 1323, 1327, 1331, 1334, 1335, 1336, 1337, 1339, 1351, 1352, 1413, 1431, 1436, 1437, 1438, 1439, 1440, 1454, 1455, 1456, 1473, 1480, 1493, 1494, 1495, 1496, 1515, 1516, 1517, 1518, 1520, 1521, 1522, 1530, 1531, 1532, 1536, 1537, 1576, 1579, 1580, 1581, 1582, 1588, 1590, 1633, 1653, 1672, 1685, 1723, 1801, 1802, 1815, 1843, 1856, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1924, 1945, 1950, 1971, 2014, 2015, 2017, 2043, 2049, 2052, 2057, 2075, 2083, 2101, 2111, 2115], "strategi": [2, 3, 7, 17, 23, 24, 28, 32, 40, 55, 912, 914, 1109, 1170, 1287, 1431, 1717, 1724, 1725, 1809, 2016, 2050, 2054, 2073, 2101, 2104], "auto": [2, 28, 52, 56, 84, 85, 1109, 1439, 1440, 2017, 2067, 2085, 2087, 2103], "greedi": [2, 32, 1109], "doc": [2, 3, 4, 9, 11, 15, 47, 48, 53, 88, 683, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 843, 976, 1178, 1292, 1572, 1573, 1574, 1575, 1965, 1977, 2018, 2021, 2033, 2044, 2048, 2049, 2057, 2061, 2070, 2072, 2077, 2082, 2087, 2095, 2101, 2105, 2109], "timer": [3, 31, 2013], "stmt": [3, 2045], "setup": [3, 14, 24, 30, 33, 47, 48, 55, 65, 823, 824, 825, 828, 1178, 1717, 1977, 2045, 2046, 2053, 2058, 2071, 2077, 2078, 2099], "global_setup": 3, "perf_count": 3, "label": [3, 6, 7, 23, 28, 32, 939, 1109, 1431, 1440, 1446, 1460, 1462, 1486, 1519, 1530, 1531, 1617, 1759, 1802, 2048, 2052, 2059, 2061, 2087], "sub_label": 3, "descript": [3, 7, 13, 14, 20, 23, 25, 40, 47, 64, 737, 1245, 1246, 1247, 1319, 1346, 1598, 1827, 1871, 2016, 2017, 2026, 2046, 2049, 2054, 2056, 2057, 2067, 2090, 2094, 2105, 2111, 2117, 2118], "env": [3, 28, 35, 40, 45, 47, 48, 49, 51, 64, 1065, 1188, 1189, 2048, 2060, 2065, 2066, 2077, 2104, 2113], "num_thread": [3, 2059], "languag": [3, 14, 40, 52, 1285, 1431, 1571, 1632, 2051, 2064, 2101], "measur": [3, 35, 44, 1011, 1057, 1059, 1305, 1386, 1439, 1440, 1460, 1486, 1493, 1518, 1519, 1576, 1577, 1605, 1665, 1811, 1982, 2013, 2029, 2046, 2056, 2057, 2069, 2082, 2113], "statement": [3, 12, 35, 53, 60, 64, 66, 71, 74, 75, 990, 1274, 1289, 2018, 2043, 2049, 2059, 2062, 2064, 2067, 2070, 2075, 2077, 2099, 2101], "full": [3, 7, 8, 14, 15, 23, 24, 28, 30, 32, 33, 35, 47, 51, 52, 55, 56, 60, 64, 488, 515, 904, 908, 909, 910, 945, 954, 976, 1137, 1138, 1139, 1143, 1144, 1146, 1165, 1177, 1178, 1231, 1232, 1277, 1311, 1312, 1319, 1320, 1321, 1332, 1337, 1338, 1346, 1363, 1440, 1446, 1480, 1541, 1571, 1608, 1609, 1610, 1617, 1630, 1677, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1875, 1928, 1929, 1977, 2012, 2014, 2015, 2016, 2017, 2019, 2035, 2042, 2046, 2048, 2049, 2052, 2054, 2057, 2060, 2061, 2065, 2068, 2070, 2072, 2075, 2078, 2086, 2089, 2102, 2104, 2107, 2108, 2113], "org": [3, 4, 7, 9, 10, 11, 12, 15, 24, 33, 35, 48, 55, 683, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 976, 990, 1346, 1497, 1547, 1574, 1577, 1733, 1769, 1817, 1871, 1891, 1968, 2012, 2018, 2021, 2043, 2044, 2057, 2061, 2063, 2067, 2070, 2087, 2088, 2115, 2118], "timeit": [3, 2045], "sever": [3, 12, 15, 23, 28, 35, 55, 64, 741, 742, 743, 744, 745, 746, 769, 770, 775, 776, 777, 785, 786, 919, 920, 1054, 1113, 1208, 1209, 1273, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1454, 1455, 1456, 1457, 1458, 1459, 1474, 1475, 1494, 1495, 1496, 1515, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1581, 1582, 1592, 1593, 1594, 1595, 1596, 1597, 1600, 1608, 1609, 1610, 1611, 1612, 1613, 1628, 1629, 1651, 1654, 1655, 1656, 1658, 1659, 1660, 1717, 1734, 1735, 1737, 1738, 1770, 1778, 1832, 1833, 1960, 1965, 2014, 2042, 2043, 2045, 2046, 2056, 2057, 2059, 2067, 2069, 2072, 2077, 2101, 2102, 2104, 2116], "awar": [3, 7, 55, 86, 488, 715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 736, 841, 842, 859, 866, 867, 1575, 1717, 1932, 1933, 1935, 1936, 1937, 2035, 2043, 2046, 2070, 2075, 2078, 2082, 2110], "element": [3, 12, 23, 28, 35, 53, 60, 61, 64, 66, 72, 74, 75, 99, 121, 152, 156, 198, 218, 244, 256, 260, 288, 315, 317, 319, 321, 323, 354, 400, 402, 404, 437, 456, 473, 475, 476, 499, 501, 515, 517, 519, 522, 547, 548, 560, 562, 611, 612, 619, 684, 686, 687, 690, 691, 696, 697, 701, 702, 738, 761, 763, 773, 774, 775, 776, 777, 783, 787, 796, 878, 880, 881, 882, 885, 886, 887, 888, 889, 897, 912, 913, 915, 916, 917, 923, 924, 946, 955, 958, 965, 971, 975, 990, 992, 994, 995, 996, 1051, 1088, 1089, 1090, 1091, 1092, 1093, 1096, 1098, 1099, 1100, 1104, 1106, 1109, 1112, 1114, 1115, 1119, 1139, 1143, 1144, 1146, 1148, 1153, 1155, 1156, 1158, 1161, 1163, 1168, 1169, 1171, 1172, 1173, 1177, 1178, 1214, 1215, 1216, 1217, 1227, 1230, 1233, 1234, 1235, 1236, 1250, 1258, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1293, 1295, 1296, 1298, 1315, 1317, 1321, 1332, 1336, 1347, 1348, 1350, 1353, 1355, 1356, 1357, 1358, 1362, 1363, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1396, 1413, 1418, 1419, 1421, 1422, 1423, 1424, 1425, 1435, 1436, 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1464, 1465, 1466, 1467, 1468, 1470, 1471, 1473, 1478, 1479, 1482, 1483, 1484, 1485, 1486, 1487, 1492, 1493, 1497, 1499, 1503, 1504, 1505, 1506, 1507, 1508, 1513, 1516, 1518, 1519, 1520, 1521, 1522, 1526, 1528, 1530, 1531, 1532, 1533, 1534, 1535, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1555, 1557, 1558, 1559, 1560, 1561, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1576, 1577, 1579, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1618, 1622, 1625, 1626, 1631, 1632, 1636, 1637, 1638, 1639, 1642, 1645, 1646, 1648, 1653, 1658, 1659, 1660, 1664, 1665, 1669, 1670, 1672, 1675, 1676, 1677, 1678, 1679, 1680, 1685, 1686, 1687, 1688, 1689, 1691, 1693, 1695, 1696, 1697, 1698, 1703, 1716, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1751, 1752, 1758, 1759, 1761, 1762, 1771, 1772, 1773, 1775, 1780, 1820, 1821, 1824, 1825, 1832, 1833, 1835, 1846, 1849, 1850, 1854, 1856, 1858, 1875, 1878, 1880, 1892, 1893, 1895, 1896, 1900, 1907, 1910, 1911, 1912, 1913, 1914, 1915, 1917, 1918, 1922, 1923, 1927, 1939, 1941, 1942, 1946, 1947, 1948, 1950, 1952, 1953, 1954, 1955, 1956, 1958, 1960, 1961, 1962, 1963, 1972, 1973, 1974, 1977, 1980, 2015, 2017, 2021, 2024, 2036, 2041, 2044, 2045, 2049, 2054, 2060, 2062, 2067, 2069, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2091, 2101, 2103, 2106, 2114], "lazili": [3, 17, 30, 1011, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1707, 1982, 2014, 2049, 2111, 2120], "threadpool": 3, "comparison": [3, 24, 28, 52, 64, 1273, 1289, 1290, 1527, 1780, 2018, 2049, 2089, 2093, 2102], "appl": [3, 2031], "synchron": [3, 4, 17, 18, 20, 24, 30, 32, 37, 47, 55, 63, 488, 881, 1011, 1012, 1014, 1022, 1042, 1083, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1314, 1315, 1316, 1317, 1321, 1322, 1328, 1331, 1334, 1335, 1337, 1338, 1346, 1386, 1567, 1717, 1771, 1773, 1849, 1982, 1983, 2013, 2045, 2046, 2048, 2055, 2059, 2077, 2084, 2104, 2116], "focu": [3, 53, 1811], "replic": [3, 23, 30, 34, 55, 88, 1227, 1284, 1454, 1455, 1456, 1463, 1473, 1503, 1504, 1505, 1552, 1553, 1554, 1579, 1672, 1700], "particularli": [3, 23, 24, 50, 1462, 1463, 1534, 2014, 2046, 2100, 2101, 2111], "variat": [3, 35, 2017, 2049, 2069, 2073, 2113], "confound": 3, "quantifi": [3, 1577], "nois": [3, 2015, 2061, 2113], "median": [3, 35, 175, 1419, 1965, 2015, 2034, 2068], "robust": [3, 1346, 2033, 2057], "deviat": [3, 35, 55, 379, 1435, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1567, 1773, 1885, 1890, 1922, 1923, 2041, 2104], "merg": [3, 6, 7, 9, 23, 28, 30, 33, 48, 1528, 1533, 1537], "repeat": [3, 35, 52, 60, 496, 998, 1109, 1198, 1337, 1363, 1523, 1524, 1525, 1605, 1606, 1817, 1849, 1908, 1928, 1929, 1946, 2015, 2017, 2046, 2068, 2071, 2074, 2082, 2108], "autorang": 3, "exact": [3, 14, 23, 37, 50, 53, 87, 341, 744, 745, 746, 798, 878, 879, 880, 923, 924, 976, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1374, 1458, 1497, 1575, 1644, 1717, 1812, 1901, 2041, 2046, 2048, 2059, 2082, 2103, 2104, 2110, 2113, 2114], "discuss": [3, 5, 8, 9, 10, 35, 52, 64, 1487, 1644, 2024, 2043, 2049, 2052, 2057, 2061, 2077, 2079, 2082, 2101, 2102], "docstr": [3, 14, 64, 865, 866, 1273, 1527, 2012, 2046], "adapt": [3, 52, 769, 770, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1464, 1592, 1593, 1594, 1595, 1596, 1597, 1782, 1783, 1794, 1796, 1803, 2046, 2065, 2100], "field": [3, 7, 19, 28, 30, 32, 37, 40, 44, 45, 53, 55, 64, 490, 829, 897, 898, 941, 1271, 1273, 1431, 1439, 1440, 1460, 1462, 1486, 1492, 1493, 1518, 1519, 1527, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1598, 1605, 1606, 1616, 1633, 1645, 1669, 1677, 1758, 1792, 1798, 2030, 2043, 2048, 2059, 2067, 2077, 2078, 2087, 2101, 2112, 2113], "displai": [3, 18, 1053, 1066, 1188, 1644, 1704, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2012, 2018, 2023, 2028, 2067, 2087, 2089, 2095, 2104, 2113, 2116], "instruct": [3, 4, 11, 14, 50, 63, 683, 1871, 2014, 2017, 2046, 2060, 2067, 2072, 2089, 2097, 2101, 2103, 2113], "count": [3, 20, 23, 35, 44, 52, 64, 898, 932, 941, 947, 997, 1047, 1163, 1235, 1236, 1271, 1311, 1312, 1758, 1811, 1961, 1962, 2015, 2030, 2046, 2052, 2068, 2071, 2077, 2079, 2081, 2082, 2104, 2113], "wall": [3, 2107], "callgrind": 3, "analog": [3, 52, 64, 488, 697, 919, 1092, 1130, 1132, 1323, 1497, 1797, 1928, 1946, 2036, 2057, 2100], "constructor": [3, 14, 23, 24, 32, 47, 55, 64, 66, 75, 827, 865, 866, 1431, 1532, 1537, 1538, 1556, 1581, 1582, 1717, 1765, 1903, 2013, 2014, 2017, 2018, 2046, 2048, 2057, 2077, 2082, 2084, 2085, 2088, 2118], "snippet": [3, 48, 2012, 2057, 2062, 2095, 2106], "loop": [3, 19, 24, 30, 33, 50, 52, 53, 56, 60, 61, 64, 66, 71, 866, 867, 918, 1054, 1057, 1059, 1172, 1178, 1289, 1717, 1735, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1977, 2014, 2015, 2017, 2030, 2045, 2046, 2051, 2054, 2057, 2060, 2064, 2067, 2069, 2071, 2072, 2087, 2098, 2101, 2103, 2112], "callabl": [3, 5, 12, 23, 24, 28, 32, 33, 35, 37, 40, 45, 50, 52, 53, 55, 63, 64, 121, 400, 845, 976, 978, 990, 1051, 1052, 1054, 1167, 1168, 1169, 1174, 1177, 1178, 1273, 1276, 1279, 1285, 1289, 1345, 1346, 1527, 1571, 1573, 1575, 1577, 1710, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 1805, 1808, 1977, 2012, 2014, 2016, 2017, 2018, 2021, 2030, 2046, 2049, 2057, 2062, 2065, 2067, 2070, 2071, 2077, 2089, 2092, 2093, 2099, 2102, 2105, 2112, 2114], "present": [3, 9, 23, 28, 30, 47, 53, 55, 898, 941, 943, 1222, 1271, 1273, 1363, 1431, 1497, 1527, 1537, 1758, 1778, 1924, 2012, 2028, 2033, 2034, 2035, 2042, 2043, 2046, 2049, 2054, 2057, 2062, 2067, 2070, 2078, 2081, 2082, 2085, 2101, 2105, 2109, 2115], "default_tim": 3, "dict": [3, 5, 12, 23, 24, 28, 29, 30, 32, 33, 34, 35, 37, 41, 45, 50, 52, 53, 55, 57, 59, 62, 64, 417, 683, 795, 796, 797, 818, 819, 820, 839, 858, 925, 932, 943, 976, 990, 1051, 1052, 1065, 1075, 1166, 1176, 1178, 1186, 1271, 1272, 1273, 1274, 1280, 1285, 1289, 1290, 1345, 1346, 1463, 1473, 1527, 1528, 1537, 1579, 1707, 1717, 1748, 1767, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1977, 1991, 2012, 2014, 2015, 2017, 2018, 2021, 2028, 2030, 2049, 2057, 2062, 2065, 2067, 2068, 2069, 2077, 2078, 2087, 2092, 2093, 2102, 2109, 2112, 2114], "summar": [3, 4, 48, 1875, 2017, 2036, 2052, 2082, 2101, 2102, 2113], "relu": [3, 24, 33, 52, 60, 61, 64, 703, 704, 708, 709, 710, 711, 712, 713, 714, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 731, 732, 768, 795, 796, 817, 858, 864, 1051, 1168, 1178, 1279, 1285, 1527, 1543, 1545, 1556, 1564, 1571, 1573, 1575, 1681, 1683, 1707, 1779, 1977, 2014, 2015, 2027, 2035, 2036, 2041, 2043, 2057, 2062, 2067, 2068, 2072, 2074, 2075, 2095, 2099, 2106, 2108, 2111, 2112, 2113], "readabl": [3, 13, 23, 45, 52, 64, 1053, 1066, 1943, 2067, 2069, 2102, 2113, 2114], "supplement": 3, "disambigu": [3, 45, 64, 1351, 2113], "ident": [3, 14, 23, 28, 30, 35, 64, 791, 817, 923, 924, 1097, 1201, 1311, 1312, 1313, 1314, 1327, 1339, 1342, 1346, 1363, 1418, 1419, 1435, 1464, 1674, 1724, 1725, 1731, 1734, 1970, 1974, 1980, 2018, 2041, 2060, 2061, 2070, 2074, 2082, 2103, 2113], "easi": [3, 23, 28, 30, 33, 47, 52, 1184, 2014, 2043, 2051, 2056, 2057, 2059, 2070, 2072, 2077, 2078, 2082, 2099, 2102, 2104, 2106], "differenti": [3, 35, 56, 61, 152, 354, 591, 805, 893, 895, 897, 902, 903, 904, 906, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 973, 1054, 1171, 1172, 1173, 1177, 1288, 1320, 1321, 1332, 1363, 1445, 1446, 1469, 1635, 1670, 1717, 1737, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1965, 2013, 2024, 2049, 2051, 2054, 2067, 2069, 2077, 2088, 2104], "distinguish": [3, 1187, 2072, 2082], "princip": [3, 1320, 1817], "signal": [3, 11, 30, 37, 47, 50, 741, 742, 743, 769, 770, 785, 786, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1270, 1336, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1454, 1455, 1456, 1474, 1475, 1494, 1495, 1496, 1515, 1520, 1521, 1522, 1581, 1582, 1592, 1593, 1594, 1595, 1596, 1597, 1600, 1608, 1611, 1628, 1629, 1651, 1654, 1655, 1656, 1658, 1659, 1660, 1924, 2013, 2033, 2059, 2071, 2077], "form": [3, 7, 9, 11, 23, 28, 33, 35, 46, 47, 48, 52, 53, 55, 60, 64, 763, 782, 788, 967, 1097, 1137, 1139, 1181, 1184, 1273, 1313, 1316, 1321, 1332, 1337, 1429, 1430, 1433, 1434, 1446, 1474, 1475, 1478, 1497, 1527, 1543, 1579, 1580, 1617, 1628, 1629, 1632, 1644, 1672, 1685, 1704, 1737, 1797, 1815, 1945, 2012, 2014, 2018, 2036, 2043, 2049, 2057, 2065, 2067, 2070, 2072, 2082, 2087, 2101, 2102, 2106, 2112], "treat": [3, 35, 46, 52, 58, 64, 66, 71, 72, 323, 473, 796, 923, 924, 978, 1051, 1166, 1202, 1273, 1326, 1329, 1331, 1343, 1345, 1355, 1356, 1357, 1358, 1375, 1421, 1470, 1480, 1499, 1531, 1532, 1533, 1534, 1537, 1542, 1556, 1561, 1625, 1716, 1717, 1767, 1771, 1803, 1856, 1871, 1924, 1939, 1940, 1946, 1961, 2016, 2017, 2034, 2036, 2043, 2065, 2067, 2077, 2082, 2085, 2093, 2101, 2104, 2105, 2109, 2112], "distinct": [3, 11, 82, 1166, 1309, 1310, 1354, 1440, 2017, 2049, 2062, 2067, 2069, 2077, 2078, 2100], "workload": [3, 8, 19, 23, 28, 55, 1054, 2046, 2052, 2056, 2060, 2071, 2077, 2097, 2111], "good": [3, 7, 8, 14, 64, 976, 1170, 1195, 1535, 1811, 1891, 2012, 2021, 2033, 2046, 2049, 2052, 2056, 2057, 2059, 2067, 2070, 2072, 2100, 2101, 2102, 2103, 2110], "intrins": [3, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 795, 796, 2073, 2074], "contrast": [3, 35, 37, 52, 763, 1184, 1478, 1797, 2041, 2043, 2050, 2052, 2082, 2108], "adaptive_autorang": 3, "threshold": [3, 24, 64, 1328, 1331, 1487, 1559, 1564, 1693, 1699, 1811, 1875, 2015, 2042, 2046, 2068, 2087], "min_run_tim": 3, "01": [3, 29, 32, 50, 490, 758, 783, 824, 825, 1110, 1152, 1513, 1648, 1649, 1707, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1803, 1809, 1829, 1882, 1884, 1885, 1890, 1891, 2015, 2041, 2056, 2067, 2069, 2108], "max_run_tim": 3, "callback": [3, 24, 37, 50, 63, 64, 1717, 2030, 2049, 2056, 2071, 2077], "similar": [3, 7, 9, 11, 20, 23, 28, 35, 44, 53, 55, 60, 63, 64, 495, 517, 715, 716, 717, 718, 719, 720, 723, 733, 734, 735, 736, 748, 749, 759, 767, 805, 881, 896, 909, 910, 957, 962, 975, 976, 1155, 1156, 1273, 1460, 1461, 1473, 1486, 1489, 1490, 1491, 1527, 1576, 1615, 1758, 1772, 1773, 1798, 1821, 1849, 1856, 1905, 1940, 1946, 1962, 2016, 2017, 2033, 2034, 2036, 2043, 2045, 2046, 2049, 2050, 2054, 2060, 2067, 2070, 2072, 2077, 2078, 2082, 2083, 2088, 2100, 2103, 2104, 2106, 2109, 2113, 2118], "blocked_autorang": 3, "variablil": 3, "until": [3, 5, 7, 23, 24, 28, 30, 32, 37, 47, 55, 63, 64, 488, 1011, 1012, 1014, 1045, 1163, 1294, 1346, 1386, 1389, 1390, 1633, 1707, 1717, 1800, 1802, 1806, 1946, 1982, 1983, 1996, 2029, 2033, 2041, 2046, 2051, 2052, 2053, 2062, 2065, 2067, 2071, 2077, 2079, 2084, 2101, 2104], "iqr": 3, "smaller": [3, 19, 23, 64, 501, 547, 998, 1497, 1717, 1811, 1826, 1871, 1916, 2046, 2062, 2070, 2099, 2115, 2118], "reach": [3, 7, 8, 9, 23, 24, 28, 29, 37, 47, 52, 56, 1346, 1717, 1800, 1803, 1806, 1807, 2043, 2049, 2059, 2073, 2077, 2102], "At": [3, 5, 6, 7, 15, 17, 19, 23, 33, 1143, 1441, 1442, 1443, 1454, 1455, 1456, 1457, 1458, 1459, 1494, 1495, 1496, 1966, 1970, 2027, 2036, 2045, 2054, 2072, 2077, 2083, 2101, 2102, 2104, 2113], "high": [3, 4, 7, 8, 9, 10, 15, 24, 28, 35, 37, 44, 46, 48, 64, 121, 1446, 1574, 1838, 1839, 1871, 1970, 2015, 2025, 2030, 2033, 2049, 2054, 2055, 2057, 2058, 2059, 2069, 2072, 2073, 2077, 2082, 2087, 2088, 2089, 2093, 2097, 2098, 2101, 2108, 2113, 2116, 2117], "pseudo": [3, 90], "block_siz": 3, "enough_data": 3, "len": [3, 23, 30, 64, 66, 71, 220, 547, 696, 698, 699, 702, 1128, 1132, 1135, 1139, 1142, 1146, 1177, 1277, 1340, 1361, 1373, 1418, 1421, 1463, 1672, 1743, 1745, 1761, 1802, 1809, 1913, 1916, 1922, 1923, 1927, 1971, 1972, 1973, 2014, 2015, 2017, 2049, 2068, 2081, 2082, 2087, 2101, 2112], "small_iqr": 3, "break": [3, 7, 28, 35, 58, 64, 683, 897, 976, 978, 1188, 1273, 1319, 1527, 1856, 1875, 2013, 2018, 2023, 2025, 2048, 2069, 2082, 2091, 2095, 2098, 2100, 2102, 2105, 2107, 2109], "stop": [3, 5, 28, 35, 37, 47, 48, 50, 53, 869, 898, 941, 1047, 1188, 1271, 1346, 1446, 1758, 1811, 1843, 2014, 2017, 2071, 2077, 2081, 2101, 2105], "repetit": [3, 1849, 1946], "statist": [3, 24, 28, 35, 802, 803, 823, 824, 825, 827, 828, 1020, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1086, 1087, 1351, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1567, 2030, 2043, 2051, 2069, 2072, 2075, 2104, 2113], "minimum": [3, 14, 19, 24, 47, 48, 52, 699, 700, 774, 798, 823, 824, 825, 828, 829, 879, 947, 1089, 1156, 1234, 1235, 1236, 1376, 1446, 1485, 1577, 1801, 1802, 1809, 1891, 1910, 1911, 1912, 1913, 1914, 1915, 2015, 2041, 2043, 2054, 2068, 2075, 2085, 2108], "total_tim": 3, "choic": [3, 8, 9, 28, 1289, 1337, 1528, 1537, 1834, 2045, 2067, 2075, 2082, 2100, 2101, 2113], "block": [3, 7, 8, 20, 23, 28, 30, 32, 34, 47, 50, 55, 63, 64, 488, 585, 586, 587, 939, 955, 1011, 1065, 1294, 1346, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1503, 1504, 1505, 1506, 1507, 1508, 1530, 1579, 1627, 1703, 1717, 1910, 1911, 1912, 2014, 2016, 2017, 2030, 2033, 2037, 2043, 2046, 2048, 2052, 2054, 2067, 2072, 2077, 2082, 2094, 2111, 2115, 2117], "qualiti": [3, 7, 24], "balanc": [3, 976], "compet": [3, 2059], "amort": 3, "invoc": [3, 5, 64, 845, 976, 981, 1186, 1276, 1289, 2014, 2017, 2046, 2049, 2056, 2067, 2077, 2079, 2098, 2111], "less": [3, 6, 7, 14, 23, 24, 28, 35, 50, 52, 55, 366, 683, 912, 914, 923, 924, 998, 1061, 1079, 1109, 1153, 1157, 1166, 1298, 1310, 1346, 1362, 1363, 1431, 1487, 1541, 1550, 1559, 1565, 1761, 1847, 1954, 1956, 2012, 2015, 2017, 2036, 2046, 2049, 2052, 2059, 2060, 2068, 2072, 2101, 2104, 2111], "bias": [3, 24, 737, 763, 797, 1441, 1442, 1443, 1478, 1479, 1481, 1489, 1490, 1491, 1497, 1498, 1499, 1542, 1543, 1545, 1567, 1587, 1588, 1716, 2069], "trivial": [3, 37, 40, 738, 966, 1181, 1188, 1731, 1913, 2067, 2079, 2101, 2103], "low": [3, 7, 18, 24, 35, 55, 501, 915, 1198, 1217, 1446, 1817, 1834, 1838, 1839, 1856, 1929, 2013, 2015, 2030, 2033, 2046, 2059, 2071, 2089, 2101, 2107, 2108, 2114], "digit": [3, 960, 1875, 2012, 2028, 2056, 2060], "microsecond": [3, 2046], "bia": [3, 9, 28, 33, 52, 66, 715, 716, 717, 718, 719, 720, 721, 722, 723, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 741, 742, 743, 744, 745, 746, 752, 754, 755, 756, 757, 759, 763, 764, 767, 768, 775, 776, 777, 784, 795, 1176, 1273, 1283, 1431, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1478, 1479, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1527, 1533, 1543, 1544, 1545, 1571, 1573, 1575, 1588, 1603, 1604, 1608, 1609, 1610, 1611, 1612, 1613, 1634, 1643, 1647, 1650, 1685, 1707, 1717, 1727, 1729, 1731, 1732, 1733, 1747, 1749, 1751, 1766, 1769, 1831, 2015, 2021, 2025, 2027, 2029, 2040, 2049, 2057, 2062, 2065, 2069, 2072, 2074, 2082, 2087, 2108], "period": [3, 9, 32, 47, 945, 954, 1020, 1053, 1066, 1067, 1072, 1086, 1087, 1129, 1231, 1232, 1293, 1813, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2015, 2030, 2059, 2117], "overal": [3, 9, 23, 33, 37, 47, 923, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1574, 2043, 2045, 2046, 2059, 2072, 2073, 2107], "main": [3, 7, 23, 24, 28, 29, 30, 35, 37, 38, 39, 40, 45, 47, 48, 50, 51, 52, 64, 262, 498, 976, 1096, 1097, 1098, 1099, 1100, 1336, 1635, 1908, 1953, 1954, 1955, 1956, 2012, 2014, 2030, 2033, 2035, 2036, 2042, 2043, 2046, 2048, 2049, 2050, 2052, 2053, 2057, 2059, 2063, 2064, 2065, 2072, 2077, 2078, 2087, 2088, 2094, 2095, 2099, 2101, 2104, 2112], "collect_callgrind": 3, "collect_baselin": 3, "retain_out_fil": 3, "callgrindstat": [3, 2013], "tupl": [3, 5, 12, 14, 19, 23, 28, 30, 33, 34, 37, 44, 45, 47, 52, 53, 55, 64, 235, 321, 447, 449, 451, 499, 522, 539, 562, 585, 586, 587, 696, 698, 699, 700, 702, 738, 769, 770, 771, 772, 775, 776, 777, 782, 788, 789, 790, 796, 820, 821, 858, 865, 866, 882, 883, 890, 891, 892, 893, 894, 896, 909, 910, 912, 913, 914, 915, 916, 917, 918, 923, 924, 927, 928, 929, 943, 959, 978, 985, 990, 997, 1021, 1022, 1025, 1036, 1054, 1060, 1088, 1089, 1107, 1110, 1112, 1126, 1128, 1129, 1131, 1132, 1134, 1135, 1136, 1138, 1139, 1141, 1142, 1144, 1146, 1149, 1159, 1164, 1166, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1176, 1177, 1178, 1186, 1188, 1197, 1213, 1217, 1235, 1237, 1273, 1280, 1285, 1289, 1290, 1295, 1304, 1309, 1310, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1326, 1330, 1332, 1333, 1335, 1337, 1340, 1343, 1361, 1363, 1365, 1371, 1373, 1375, 1376, 1379, 1381, 1418, 1421, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1463, 1473, 1474, 1475, 1494, 1495, 1496, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1528, 1533, 1537, 1549, 1550, 1551, 1552, 1553, 1554, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1593, 1594, 1596, 1597, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1628, 1629, 1644, 1658, 1659, 1660, 1670, 1672, 1704, 1705, 1706, 1711, 1717, 1727, 1729, 1737, 1748, 1761, 1767, 1771, 1772, 1776, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1817, 1818, 1827, 1836, 1838, 1840, 1850, 1854, 1855, 1900, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1919, 1922, 1923, 1927, 1928, 1929, 1943, 1944, 1945, 1946, 1947, 1952, 1959, 1960, 1961, 1962, 1963, 1972, 1973, 1977, 1978, 1980, 2010, 2014, 2015, 2018, 2021, 2033, 2036, 2043, 2046, 2049, 2050, 2057, 2062, 2065, 2067, 2069, 2070, 2072, 2077, 2081, 2082, 2087, 2089, 2093, 2101, 2104, 2112, 2114], "modulo": [3, 35, 1157, 1201, 1847], "determin": [3, 5, 8, 11, 14, 17, 18, 22, 23, 24, 28, 33, 35, 37, 45, 47, 55, 60, 64, 87, 88, 89, 826, 829, 912, 914, 923, 924, 945, 954, 961, 966, 1055, 1111, 1130, 1140, 1165, 1166, 1190, 1191, 1231, 1232, 1235, 1236, 1287, 1307, 1319, 1330, 1333, 1354, 1368, 1470, 1474, 1475, 1497, 1533, 1536, 1543, 1586, 1625, 1628, 1629, 1644, 1678, 1704, 1736, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1767, 1777, 1809, 1837, 1839, 1841, 1868, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 2000, 2011, 2017, 2018, 2035, 2046, 2048, 2049, 2052, 2057, 2065, 2067, 2070, 2072, 2077, 2079, 2082, 2084, 2085, 2087, 2089, 2100, 2103, 2104, 2112, 2113, 2120], "itself": [3, 5, 7, 8, 19, 28, 29, 52, 55, 61, 64, 490, 696, 702, 883, 998, 1171, 1172, 1273, 1285, 1289, 1527, 1561, 1691, 1717, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1902, 2014, 2033, 2049, 2057, 2059, 2061, 2064, 2067, 2070, 2077, 2082, 2086, 2099, 2101, 2103, 2113], "jitter": 3, "interpret": [3, 23, 28, 32, 35, 37, 47, 50, 52, 53, 782, 788, 884, 918, 1064, 1130, 1131, 1132, 1137, 1139, 1140, 1141, 1142, 1163, 1186, 1224, 1236, 1273, 1278, 1285, 1289, 1326, 1346, 1466, 1633, 1644, 1645, 1704, 1868, 1873, 2012, 2016, 2017, 2021, 2033, 2045, 2046, 2067, 2082, 2084, 2086, 2101, 2102], "ideal": [3, 46, 48, 1289, 1798, 2035, 2104], "analysi": [3, 24, 35, 52, 64, 1293, 1817, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2018, 2025, 2057, 2064, 2065, 2093, 2099, 2101, 2102, 2103, 2107], "valgrind": 3, "degrad": [3, 14, 55, 2045, 2048, 2049, 2082], "due": [3, 4, 5, 7, 24, 28, 30, 35, 46, 55, 60, 64, 66, 68, 86, 488, 884, 912, 913, 918, 994, 1286, 1309, 1310, 1337, 1363, 1466, 1480, 1571, 1580, 1644, 1685, 1765, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1929, 2014, 2046, 2054, 2055, 2061, 2065, 2067, 2072, 2073, 2077, 2079, 2104, 2109, 2113, 2115, 2117], "amelior": 3, "suffici": [3, 14, 24, 28, 35, 39, 47, 48, 53, 55, 488, 1784, 1785, 1797, 1929, 2012, 2021, 2082, 2085, 2104], "callgrind_control": 3, "callgrind_annot": 3, "boundari": [3, 33, 64, 782, 788, 960, 1227, 1448, 1449, 1450, 1451, 1452, 1453, 1549, 1550, 1551, 1552, 1553, 1554, 1580, 1583, 1584, 1585, 1644, 1704, 1803, 1809, 2015, 2048, 2070, 2077, 2104], "caller": [3, 37, 47, 64, 858, 1273, 1527, 1575, 1913, 2043, 2046, 2077, 2079], "structur": [3, 5, 9, 12, 23, 24, 28, 33, 36, 37, 40, 48, 52, 53, 55, 64, 66, 67, 71, 75, 896, 909, 910, 942, 976, 990, 1160, 1173, 1178, 1186, 1193, 1194, 1289, 1527, 1587, 1744, 1748, 1780, 1817, 1929, 1977, 2013, 2016, 2018, 2024, 2030, 2036, 2046, 2048, 2049, 2050, 2052, 2059, 2062, 2063, 2065, 2067, 2069, 2070, 2077, 2087, 2088, 2089, 2091, 2093, 2095, 2100, 2101, 2102, 2104, 2105, 2109, 2115], "restrict": [3, 8, 12, 23, 35, 53, 55, 60, 61, 978, 990, 1345, 1368, 1462, 2016, 2017, 2018, 2035, 2036, 2043, 2046, 2054, 2072, 2100], "builtin": [3, 28, 64, 66, 75, 1286, 1345, 1843, 1935, 2014, 2016, 2018, 2077, 2079, 2101, 2104, 2113], "surpris": [3, 8, 55, 2012, 2054, 2060, 2103], "serial": [3, 15, 23, 28, 30, 32, 47, 1273, 1283, 1284, 1345, 1527, 1707, 1859, 2012, 2013, 2015, 2028, 2035, 2043, 2046, 2052, 2056, 2057, 2059, 2065, 2070, 2072, 2077, 2078], "subsequ": [3, 7, 14, 15, 17, 28, 52, 55, 64, 976, 1012, 1273, 1285, 1289, 1454, 1455, 1456, 1457, 1458, 1459, 1527, 1556, 2046, 2061, 2067, 2077, 2082, 2095, 2098, 2104], "deseri": [3, 30, 1345, 1707, 2012, 2028, 2062, 2077], "globalsbridg": 3, "care": [3, 7, 14, 28, 35, 55, 63, 64, 1130, 1132, 1187, 1188, 1527, 2033, 2045, 2046, 2048, 2049, 2051, 2057, 2059, 2062, 2067, 2077, 2082, 2098, 2103, 2104, 2105], "reli": [3, 9, 14, 23, 24, 28, 33, 37, 40, 55, 64, 152, 897, 923, 1109, 1463, 1798, 2026, 2043, 2045, 2048, 2049, 2052, 2061, 2062, 2082, 2086, 2103, 2104, 2105, 2107], "pickl": [3, 23, 24, 28, 1273, 1345, 1527, 1859, 2012, 2028, 2033, 2062, 2070, 2077, 2115], "transfer": [3, 23, 28, 55, 2027, 2033, 2046, 2055, 2057, 2070, 2077, 2085], "properli": [3, 7, 23, 24, 30, 32, 37, 47, 63, 978, 1130, 1131, 1132, 1140, 1141, 1142, 1375, 1528, 1529, 1537, 1538, 1717, 1966, 2012, 2024, 2049, 2053, 2054, 2057, 2059, 2062, 2065, 2077, 2078, 2085], "profil": [3, 4, 17, 19, 44, 64, 488, 931, 932, 939, 976, 1287, 1710, 1711, 1712, 1713, 1875, 2013, 2052, 2068, 2077, 2094, 2104], "empti": [3, 27, 28, 30, 37, 45, 53, 55, 60, 64, 235, 323, 515, 517, 525, 539, 547, 820, 943, 946, 947, 957, 963, 1109, 1111, 1271, 1274, 1299, 1319, 1320, 1325, 1332, 1355, 1356, 1357, 1358, 1365, 1440, 1462, 1470, 1534, 1606, 1625, 1672, 1765, 1777, 1778, 1913, 1928, 1943, 2011, 2014, 2015, 2016, 2017, 2019, 2027, 2029, 2034, 2035, 2036, 2041, 2044, 2046, 2049, 2061, 2067, 2068, 2070, 2082, 2083, 2088, 2091, 2100, 2101, 2103, 2108, 2111], "drive": [3, 9, 28, 2043], "facil": [3, 25, 1345, 2033], "analyz": [3, 4, 18, 52, 64, 2048, 2049, 2052, 2060, 2113], "manipul": [3, 19, 55, 63, 1277, 2013, 2042, 2051, 2057, 2075, 2112], "1000000": [3, 1781], "mirror": [3, 141, 1717], "semant": [3, 9, 11, 17, 28, 47, 48, 52, 53, 55, 62, 64, 152, 797, 878, 879, 880, 897, 918, 958, 1014, 1065, 1167, 1178, 1186, 1201, 1202, 1277, 1284, 1343, 1440, 1606, 1678, 1724, 1725, 1798, 1962, 1977, 2013, 2016, 2017, 2021, 2024, 2036, 2050, 2057, 2067, 2077, 2082, 2104], "number_per_run": 3, "raw_tim": 3, "task_spec": 3, "serializ": [3, 24, 2014], "consum": [3, 23, 30, 33, 36, 50, 53, 63, 437, 1109, 2033, 2046, 2052, 2059, 2064, 2065, 2067, 2087], "extrapol": 3, "sinc": [3, 7, 23, 24, 28, 30, 34, 35, 40, 50, 52, 55, 64, 317, 490, 536, 750, 788, 796, 811, 812, 966, 1057, 1059, 1061, 1109, 1149, 1150, 1151, 1166, 1270, 1273, 1278, 1284, 1287, 1363, 1439, 1469, 1523, 1524, 1525, 1527, 1580, 1588, 1598, 1633, 1635, 1704, 1707, 1710, 1748, 1767, 1772, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1924, 2014, 2015, 2016, 2017, 2021, 2023, 2027, 2030, 2036, 2042, 2043, 2046, 2049, 2050, 2051, 2052, 2054, 2056, 2057, 2062, 2063, 2064, 2067, 2068, 2070, 2071, 2072, 2076, 2077, 2078, 2079, 2082, 2083, 2085, 2086, 2088, 2089, 2098, 2101, 2104, 2109, 2112, 2113], "properti": [3, 23, 28, 29, 30, 32, 35, 39, 44, 47, 52, 53, 55, 64, 795, 884, 910, 927, 937, 1038, 1125, 1126, 1128, 1130, 1131, 1132, 1140, 1141, 1142, 1273, 1435, 1527, 1561, 1586, 1691, 1707, 1718, 1719, 1720, 1758, 1993, 2013, 2017, 2021, 2030, 2043, 2046, 2049, 2054, 2065, 2069, 2071, 2077, 2082, 2084, 2085, 2088, 2091, 2095, 2100, 2101, 2102, 2103, 2114, 2118], "significant_figur": 3, "figur": [3, 7, 8, 30, 64, 932, 2045, 2048, 2049, 2067, 2079, 2087, 2093, 2101, 2104, 2117], "intend": [3, 30, 47, 52, 64, 894, 904, 907, 909, 1016, 1188, 1293, 1598, 1710, 1711, 1712, 1713, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 2017, 2036, 2041, 2043, 2057, 2070], "interquartil": 3, "mitig": [3, 66, 2052, 2084], "tail": [3, 23, 45], "645": 3, "conjunct": [3, 23, 28, 32, 55, 827, 1598, 1633, 1717, 2075, 2077], "trim_sigfig": 3, "human": [3, 13, 33, 45, 52, 1053, 1066, 2041, 2067, 2102, 2114], "raw": [3, 64, 1163, 1332, 2046, 2070, 2071], "built_with_debug_symbol": 3, "baseline_inclusive_stat": 3, "baseline_exclusive_stat": 3, "stmt_inclusive_stat": 3, "stmt_exclusive_stat": 3, "stmt_callgrind_out": 3, "done": [3, 15, 23, 28, 30, 32, 33, 35, 37, 45, 53, 55, 63, 64, 488, 498, 683, 763, 923, 930, 1126, 1128, 1131, 1132, 1134, 1135, 1138, 1139, 1141, 1142, 1144, 1146, 1289, 1363, 1441, 1442, 1443, 1463, 1466, 1478, 1489, 1490, 1491, 1501, 1502, 1509, 1510, 1511, 1512, 1537, 1567, 1573, 1575, 1625, 1717, 1919, 2017, 2027, 2029, 2033, 2043, 2045, 2046, 2048, 2049, 2050, 2051, 2053, 2057, 2060, 2065, 2067, 2069, 2072, 2073, 2077, 2079, 2085, 2099, 2100, 2101, 2103, 2110, 2112], "functioncount": [3, 2013], "stat": [3, 58, 829, 839, 840, 1065, 1073, 1074, 1075, 1567, 1717, 2030, 2046, 2092, 2104, 2113], "as_standard": 3, "strip": [3, 1635, 1717, 2014, 2068, 2082], "prefix": [3, 28, 30, 37, 47, 55, 64, 751, 760, 1273, 1527, 1717, 2012, 2023, 2043, 2067, 2070, 2072, 2092, 2111], "stumbl": 3, "filepath": 3, "dif": 3, "compon": [3, 7, 9, 15, 18, 19, 28, 35, 64, 683, 898, 923, 978, 1129, 1130, 1132, 1140, 1141, 1142, 1313, 1571, 1572, 1573, 1574, 1575, 1707, 1817, 1924, 1975, 1976, 2018, 2023, 2043, 2048, 2049, 2053, 2056, 2057, 2065, 2082, 2099, 2105, 2113], "locat": [3, 9, 14, 28, 30, 33, 35, 44, 89, 156, 256, 488, 515, 683, 958, 960, 1023, 1088, 1089, 1295, 1345, 1371, 1376, 1379, 1413, 1463, 1473, 1562, 1579, 1598, 1633, 1700, 1703, 1717, 1737, 1828, 1863, 1884, 1906, 1909, 1965, 2012, 2014, 2028, 2046, 2053, 2062, 2067, 2070, 2071, 2077, 2079, 2082, 2087, 2092, 2095, 2098, 2103], "someth": [3, 7, 52, 53, 57, 64, 66, 68, 912, 914, 1109, 1188, 1198, 1273, 1289, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 2019, 2030, 2033, 2034, 2043, 2055, 2063, 2067, 2077, 2099, 2100, 2101, 2104, 2106, 2112, 2113, 2114], "resembl": [3, 15, 2095], "23234231": 3, "first_build_dir": 3, "foo": [3, 14, 28, 44, 45, 52, 63, 64, 66, 74, 75, 827, 858, 863, 976, 1166, 1167, 1176, 1271, 1276, 1281, 1284, 1285, 1288, 1289, 1767, 1966, 1968, 2012, 2014, 2016, 2017, 2021, 2025, 2049, 2057, 2067, 2070, 2089, 2098, 2101, 2104, 2113], "9823794": 3, "bar": [3, 7, 44, 52, 64, 66, 74, 75, 858, 863, 998, 1166, 1276, 1284, 1922, 1923, 1972, 1973, 2012, 2014, 2016, 2028, 2057, 2067, 2070, 2089], "53453": 3, "src": [3, 28, 64, 198, 230, 315, 323, 473, 514, 515, 516, 517, 518, 519, 521, 540, 1100, 1167, 1571, 1574, 1575, 1778, 1860, 1861, 1862, 1866, 1896, 1939, 1965, 2015, 2070, 2108], "function_that_actually_chang": 3, "second_build_dir": 3, "cancel": [3, 1270], "site": [3, 7, 2095], "denois": 3, "explan": [3, 9, 20, 25, 40, 866, 1273, 1527, 2013, 2048, 2049, 2057, 2104, 2110, 2113], "delta": [3, 35, 763, 998, 1478, 1487, 1497, 1559, 1642, 1782, 1922, 1923, 1950, 1972, 1973, 2015, 2041], "inclus": [3, 35, 52, 90, 515, 1192, 1198, 1234, 1236, 1344, 1360, 1366, 1589, 1590, 1838, 1839, 1921, 2076, 2089], "diff": [3, 7, 2014, 2015, 2068], "One": [3, 8, 14, 28, 30, 52, 53, 58, 60, 64, 585, 955, 978, 1167, 1178, 1264, 1580, 1671, 1724, 1725, 1731, 1735, 1798, 1803, 1811, 1838, 1839, 1960, 1977, 2014, 2016, 2017, 2035, 2036, 2044, 2045, 2046, 2049, 2056, 2071, 2079, 2087, 2099, 2100, 2102, 2103, 2111, 2117], "reason": [3, 8, 9, 23, 28, 30, 37, 52, 55, 60, 65, 83, 683, 845, 909, 910, 1130, 1132, 1273, 1289, 1290, 1309, 1310, 1337, 1374, 1439, 1466, 1527, 1635, 1685, 1737, 1859, 2014, 2016, 2017, 2021, 2023, 2043, 2048, 2050, 2052, 2060, 2062, 2067, 2077, 2085, 2099, 2100, 2101, 2103, 2104, 2107, 2113, 2114], "unit": [3, 11, 14, 35, 37, 48, 55, 64, 763, 764, 1127, 1145, 1435, 1445, 1468, 1471, 1476, 1477, 1478, 1479, 1546, 1547, 1557, 1598, 1622, 1626, 1631, 1632, 1679, 1688, 1741, 1742, 1746, 1749, 1751, 1754, 1840, 1952, 2031, 2043, 2045, 2052, 2054, 2070, 2097, 2098], "next": [3, 23, 28, 35, 47, 53, 55, 64, 562, 763, 1186, 1277, 1427, 1479, 1498, 1545, 1717, 1760, 2033, 2042, 2043, 2045, 2046, 2049, 2050, 2052, 2057, 2059, 2065, 2071, 2077, 2078, 2082, 2085, 2087, 2095, 2098, 2101, 2102, 2104], "logic": [3, 5, 12, 14, 23, 38, 51, 55, 64, 948, 950, 951, 953, 983, 984, 1126, 1128, 1131, 1132, 1134, 1135, 1138, 1139, 1141, 1142, 1144, 1146, 1167, 1208, 1209, 1277, 1355, 1356, 1357, 1358, 1368, 1533, 1717, 1826, 1853, 1937, 2017, 2018, 2046, 2048, 2049, 2050, 2054, 2072, 2101], "question": [3, 10, 23, 64, 1196, 1769, 2013, 2043, 2052, 2094, 2100, 2103], "involv": [3, 5, 7, 9, 11, 23, 28, 52, 55, 58, 60, 64, 82, 1184, 1717, 1871, 2017, 2034, 2036, 2043, 2046, 2048, 2051, 2057, 2067, 2072, 2077, 2078, 2079, 2082, 2100, 2103, 2111], "look": [3, 4, 7, 8, 9, 12, 15, 28, 35, 46, 47, 52, 53, 57, 60, 64, 66, 76, 77, 488, 866, 912, 914, 990, 1186, 1197, 1273, 1368, 1431, 1527, 1624, 1750, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1910, 1911, 1912, 1914, 1915, 2014, 2016, 2021, 2034, 2042, 2043, 2046, 2050, 2056, 2057, 2059, 2063, 2067, 2070, 2072, 2077, 2078, 2093, 2098, 2099, 2100, 2101, 2104, 2105, 2106, 2107, 2110, 2111, 2112, 2113, 2115], "autom": [3, 8, 64, 2014, 2072, 2095, 2104, 2113], "easili": [3, 7, 8, 11, 24, 28, 30, 33, 52, 1167, 1534, 1633, 1672, 1798, 1856, 1966, 2029, 2049, 2050, 2054, 2057, 2062, 2069, 2076, 2077, 2079, 2087, 2104, 2112, 2113], "exclus": [3, 23, 28, 35, 37, 47, 52, 55, 64, 738, 1236, 1717, 1838, 1839, 1842, 2043, 2089], "basi": [3, 9, 10, 35, 683, 1346, 1803, 2046, 2056, 2072, 2077], "thought": [3, 44, 64, 1127, 1129, 1145, 1192, 2101], "path_and_function_nam": 3, "children": [3, 33, 40, 55, 64, 794, 1273, 1527, 2033, 2052, 2057, 2070, 2079], "identifi": [3, 7, 9, 28, 30, 37, 41, 44, 47, 48, 50, 64, 488, 820, 1236, 1345, 1567, 2018, 2021, 2033, 2044, 2056, 2057, 2070, 2077, 2078, 2079, 2087, 2100, 2111], "hot": [3, 35, 1635, 1671, 2054, 2098], "spot": [3, 1724, 1725], "_data": 3, "truncate_row": 3, "_linewidth": 3, "subtract": [3, 315, 566, 869, 1163, 1635, 1910, 1911, 1912, 1914, 1915, 1925, 2015, 2036, 2068, 2082], "index": [3, 15, 19, 23, 24, 28, 30, 33, 35, 45, 52, 64, 193, 210, 283, 314, 315, 316, 317, 318, 319, 321, 323, 324, 473, 474, 514, 515, 516, 517, 518, 519, 520, 521, 698, 699, 820, 858, 881, 896, 898, 909, 910, 941, 960, 1027, 1030, 1088, 1089, 1129, 1170, 1171, 1172, 1176, 1178, 1214, 1245, 1246, 1247, 1248, 1271, 1290, 1295, 1303, 1321, 1339, 1353, 1361, 1363, 1367, 1371, 1374, 1375, 1376, 1379, 1401, 1404, 1413, 1419, 1422, 1423, 1431, 1446, 1462, 1469, 1470, 1528, 1529, 1534, 1537, 1538, 1577, 1579, 1624, 1625, 1671, 1743, 1745, 1752, 1753, 1758, 1771, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1812, 1813, 1828, 1860, 1861, 1862, 1863, 1865, 1866, 1867, 1896, 1907, 1908, 1910, 1911, 1912, 1914, 1915, 1924, 1939, 1960, 1961, 1963, 1964, 1965, 1968, 1977, 1985, 1987, 2013, 2014, 2015, 2017, 2024, 2033, 2034, 2035, 2036, 2043, 2046, 2048, 2050, 2055, 2061, 2068, 2079, 2081, 2082, 2085, 2086, 2087, 2088, 2089, 2103, 2104, 2108, 2113], "cpython": [3, 52, 64, 2094, 2102], "known": [3, 7, 9, 22, 27, 28, 32, 41, 47, 53, 56, 59, 61, 66, 76, 77, 1173, 1174, 1188, 1289, 1290, 1345, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1497, 1543, 1557, 1559, 1579, 1644, 1688, 1707, 1883, 1884, 1929, 1943, 2013, 2015, 2019, 2021, 2030, 2033, 2036, 2041, 2043, 2045, 2061, 2067, 2069, 2077, 2079, 2083, 2094, 2100, 2105, 2113], "quit": [3, 7, 64, 1724, 1725, 2017, 2049, 2051, 2070, 2077, 2111], "noisi": 3, "higher": [3, 7, 8, 24, 28, 55, 56, 61, 66, 71, 75, 152, 897, 899, 918, 1014, 1054, 1065, 1101, 1168, 1174, 1178, 1182, 1198, 1201, 1234, 1250, 1420, 1462, 1519, 1534, 1685, 1782, 1828, 1929, 1977, 1983, 2013, 2021, 2046, 2049, 2050, 2054, 2056, 2062, 2072, 2073, 2077, 2085, 2100], "filter": [3, 20, 546, 775, 776, 777, 1270, 1293, 1454, 1455, 1456, 1457, 1458, 1459, 1608, 1609, 1610, 1611, 1612, 1613, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1924, 2017, 2070, 2113], "transform": [3, 12, 23, 30, 33, 34, 52, 55, 60, 62, 81, 82, 83, 417, 784, 800, 841, 842, 861, 862, 967, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1166, 1167, 1168, 1172, 1177, 1195, 1270, 1283, 1444, 1481, 1489, 1490, 1491, 1499, 1514, 1556, 1572, 1573, 1574, 1575, 1598, 1604, 1633, 1650, 1670, 1707, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1891, 1924, 2013, 2025, 2050, 2052, 2053, 2065, 2072, 2082, 2087, 2093, 2094, 2095, 2102, 2106], "rather": [3, 8, 9, 14, 28, 37, 40, 50, 52, 55, 64, 66, 75, 782, 788, 936, 966, 1271, 1274, 1345, 1423, 1598, 1633, 1644, 1704, 1732, 1871, 1875, 2014, 2017, 2035, 2036, 2044, 2046, 2049, 2050, 2067, 2070, 2072, 2077, 2082, 2087, 2098, 2099, 2101, 2103, 2104, 2105, 2106], "unicod": [3, 2018], "dictionari": [3, 14, 23, 24, 30, 33, 35, 59, 64, 417, 683, 795, 796, 797, 800, 818, 819, 820, 841, 842, 843, 858, 862, 868, 943, 976, 1065, 1166, 1176, 1181, 1188, 1273, 1274, 1281, 1285, 1289, 1290, 1345, 1346, 1469, 1470, 1527, 1528, 1537, 1624, 1748, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1991, 2016, 2018, 2049, 2057, 2065, 2067, 2069, 2077, 2087, 2092, 2109, 2114, 2115], "lookup": [3, 30, 35, 47, 1469, 1624, 2014, 2018, 2045, 2078, 2107], "map": [3, 14, 28, 30, 35, 37, 44, 45, 47, 48, 52, 53, 55, 60, 61, 64, 66, 75, 82, 687, 738, 795, 796, 800, 802, 817, 818, 820, 842, 843, 858, 859, 860, 861, 862, 868, 889, 957, 1161, 1167, 1178, 1184, 1186, 1188, 1193, 1227, 1236, 1281, 1284, 1345, 1346, 1457, 1458, 1459, 1465, 1466, 1467, 1471, 1473, 1523, 1524, 1525, 1528, 1537, 1619, 1620, 1621, 1626, 1731, 1748, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1828, 1830, 1961, 1962, 1977, 2018, 2029, 2035, 2037, 2043, 2046, 2048, 2049, 2050, 2058, 2063, 2065, 2070, 2072, 2075, 2077, 2078, 2079, 2084, 2089, 2102, 2103, 2112, 2114], "agnost": [3, 16, 47, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1633, 1724, 1725, 2021, 2035], "reliabl": 3, "warrant": 3, "except": [3, 5, 7, 9, 14, 28, 29, 30, 35, 37, 39, 40, 44, 48, 52, 53, 55, 63, 64, 585, 589, 590, 619, 687, 696, 698, 699, 702, 763, 889, 922, 923, 924, 963, 970, 974, 1013, 1023, 1025, 1053, 1066, 1101, 1107, 1109, 1155, 1156, 1176, 1178, 1190, 1237, 1273, 1281, 1289, 1291, 1295, 1329, 1345, 1361, 1371, 1373, 1374, 1376, 1379, 1397, 1418, 1421, 1423, 1478, 1497, 1527, 1543, 1580, 1671, 1710, 1717, 1732, 1758, 1765, 1766, 1770, 1772, 1825, 1849, 1865, 1868, 1901, 1913, 1920, 1922, 1923, 1927, 1950, 1972, 1973, 1977, 1978, 2013, 2014, 2016, 2017, 2018, 2019, 2021, 2024, 2033, 2036, 2046, 2049, 2052, 2057, 2060, 2063, 2065, 2070, 2077, 2079, 2082, 2084, 2085, 2089, 2099, 2101, 2103, 2111, 2112, 2115, 2117], "filter_fn": 3, "map_fn": 3, "coalesc": [3, 325, 330, 546, 616, 1022, 1909, 1913, 1960, 2015, 2048, 2068, 2082, 2104], "entri": [3, 19, 28, 29, 32, 35, 37, 38, 45, 47, 48, 317, 738, 862, 863, 936, 1051, 1052, 1099, 1150, 1151, 1248, 1273, 1294, 1469, 1470, 1527, 1533, 1624, 1625, 1743, 1744, 1752, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 1907, 2014, 2017, 2018, 2024, 2036, 2043, 2048, 2049, 2054, 2057, 2062, 2075, 2082, 2087, 2092, 2099, 2100, 2101, 2102], "color": [3, 1491, 2016, 2017, 2087, 2105], "rowwis": [3, 34], "columnwis": 3, "extend_result": 3, "highlight_warn": 3, "highlight": [3, 53, 733, 734, 2017], "trim_significant_figur": 3, "trim": [3, 945, 954, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1231, 1232, 1270], "h": [4, 10, 14, 33, 35, 489, 490, 744, 745, 746, 763, 968, 969, 1303, 1310, 1313, 1323, 1337, 1429, 1430, 1437, 1438, 1440, 1442, 1443, 1447, 1455, 1456, 1458, 1466, 1467, 1469, 1471, 1478, 1479, 1490, 1491, 1497, 1498, 1499, 1510, 1511, 1521, 1522, 1539, 1540, 1543, 1545, 1562, 1578, 1581, 1582, 1598, 1606, 1633, 1669, 1675, 1676, 1731, 1732, 1766, 1778, 1817, 1928, 1929, 2015, 2034, 2035, 2043, 2046, 2048, 2049, 2051, 2054, 2067, 2086, 2087, 2088, 2095, 2100, 2110], "finit": [4, 35, 923, 924, 1262, 1263, 1309, 1310, 1320, 1321, 1337, 1363, 1413, 1417, 1439, 1928, 2049, 2054, 2089], "natur": [4, 7, 8, 11, 30, 35, 55, 66, 68, 86, 923, 924, 1302, 1307, 1333, 1347, 1349, 1354, 1431, 1575, 1685, 2052, 2054, 2064, 2065, 2082, 2083], "against": [4, 5, 14, 28, 37, 47, 797, 869, 923, 924, 1137, 1138, 1139, 1143, 1144, 1146, 1214, 1264, 1273, 1289, 1290, 1527, 1533, 1779, 2012, 2017, 2070, 2093, 2101, 2110], "cprofil": 4, "mode": [4, 8, 23, 24, 28, 30, 35, 47, 50, 52, 60, 61, 64, 81, 223, 224, 490, 749, 750, 775, 776, 777, 782, 788, 789, 790, 818, 820, 826, 829, 863, 864, 893, 895, 899, 902, 903, 904, 909, 910, 912, 913, 914, 915, 919, 920, 921, 923, 976, 981, 1009, 1042, 1083, 1084, 1113, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1167, 1170, 1171, 1172, 1173, 1176, 1177, 1182, 1187, 1221, 1256, 1257, 1271, 1273, 1274, 1277, 1289, 1291, 1332, 1345, 1375, 1389, 1390, 1410, 1441, 1442, 1443, 1454, 1455, 1456, 1470, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1544, 1567, 1580, 1581, 1608, 1609, 1610, 1625, 1633, 1644, 1672, 1685, 1704, 1705, 1706, 1718, 1726, 1728, 1732, 1737, 1770, 1803, 1811, 1827, 1870, 1872, 1965, 2013, 2015, 2021, 2027, 2034, 2041, 2046, 2048, 2050, 2051, 2052, 2053, 2057, 2064, 2065, 2068, 2071, 2073, 2074, 2075, 2077, 2088, 2091, 2095, 2098, 2102, 2103, 2104, 2105, 2106, 2108, 2113, 2114], "correct": [4, 6, 7, 24, 28, 29, 30, 33, 35, 47, 52, 207, 208, 211, 223, 557, 582, 605, 606, 617, 625, 904, 905, 909, 924, 998, 1130, 1132, 1140, 1141, 1142, 1193, 1248, 1271, 1274, 1289, 1290, 1309, 1463, 1480, 1492, 1707, 1717, 1758, 1874, 1922, 1923, 1972, 1973, 2014, 2015, 2016, 2021, 2025, 2034, 2035, 2046, 2049, 2054, 2084, 2101, 2108, 2112], "launch": [4, 14, 23, 31, 33, 37, 39, 40, 45, 46, 47, 51, 63, 1012, 1717, 2013, 2043, 2045, 2046, 2048, 2049, 2077, 2098, 2106], "spent": [4, 28, 937, 1809, 2045, 2057, 2104, 2111, 2113], "appear": [4, 24, 28, 35, 52, 53, 64, 858, 1054, 1109, 1150, 1151, 1178, 1345, 1379, 1381, 1718, 1849, 1877, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1977, 2014, 2017, 2034, 2035, 2049, 2050, 2057, 2067, 2070, 2101, 2104, 2111], "extrem": [4, 28, 1717, 2043, 2067, 2103], "expens": [4, 23, 35, 55, 1731, 2046, 2054, 2056, 2071, 2077, 2088, 2100, 2104, 2107, 2110, 2113], "bound": [4, 15, 24, 28, 52, 55, 483, 798, 960, 971, 1123, 1124, 1181, 1188, 1192, 1197, 1273, 1436, 1437, 1438, 1520, 1521, 1522, 1527, 1546, 1633, 1811, 1842, 1863, 2017, 2018, 2041, 2043, 2052, 2070, 2072, 2083, 2100, 2101, 2103, 2113], "greater": [4, 28, 47, 66, 67, 294, 619, 683, 952, 965, 966, 971, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1216, 1230, 1328, 1393, 1439, 1576, 1633, 1644, 1671, 1685, 1701, 1704, 1732, 1766, 1811, 1965, 2015, 2043, 2046, 2061, 2068, 2082, 2083], "spend": [4, 7, 19, 33, 1346, 2025, 2107], "sens": [4, 35, 47, 64, 1748, 1961, 1962, 2017, 2043, 2052], "respons": [4, 7, 9, 28, 30, 32, 33, 35, 37, 44, 50, 55, 63, 978, 1012, 1190, 1515, 1651, 1717, 1913, 2043, 2046, 2049, 2050, 2057, 2065, 2077, 2103], "Of": [4, 1771, 2012, 2048, 2049, 2094, 2101, 2104], "cours": [4, 19, 64, 2012, 2048, 2049, 2077, 2101, 2104], "realiti": [4, 2052], "complic": [4, 24, 34, 52, 64, 796, 1871, 2021, 2035, 2044, 2070, 2077, 2079, 2101, 2103], "account": [4, 33, 45, 64, 1440, 1717, 2041, 2045, 2052, 2082, 2098], "heavili": [4, 65, 1787, 2045, 2049, 2070], "similarli": [4, 7, 30, 33, 52, 63, 64, 763, 793, 797, 862, 967, 1129, 1273, 1329, 1527, 1575, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1953, 1954, 1955, 1956, 2016, 2043, 2049, 2051, 2054, 2060, 2082, 2103, 2106, 2111], "platform": [4, 8, 9, 14, 28, 39, 40, 44, 1320, 1321, 1332, 1827, 1928, 2027, 2053, 2055, 2060, 2061, 2072], "startup": [4, 19], "slower": [4, 14, 28, 33, 880, 913, 1149, 1150, 1151, 1303, 1310, 1652, 1731, 1782, 2049, 2061, 2069, 2083, 2103], "rerun": [5, 28, 2046], "segment": [5, 1065, 1163, 1559, 2046, 2070, 2104, 2109, 2115], "persist": [5, 21, 30, 52, 55, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 763, 818, 819, 820, 1273, 1478, 1497, 1527, 1543, 2057, 2062, 2063, 2065, 2084, 2098, 2103, 2107], "rng": [5, 23, 1040, 1080, 1387, 1394, 1864, 1994, 2005, 2046, 2061, 2076], "advanc": [5, 15, 23, 24, 37, 1571, 1573, 1575, 1771, 1929, 1968, 2021, 2024, 2035, 2046, 2049, 2055, 2059, 2065, 2086, 2087, 2104], "juggl": 5, "moder": 5, "hit": [5, 8, 14, 1190, 1717, 2012, 2046, 2098, 2101, 2104, 2107, 2113], "preserve_rng_st": 5, "checkpoint_sequenti": [5, 2013], "omit": [5, 14, 28, 48, 152, 1143, 1144, 1146, 1188, 1480, 1541, 2063, 2067, 2077, 2089, 2101, 2105], "exclud": [5, 9, 24, 47, 64, 985, 1236, 1470, 1625, 1674, 1779, 1871, 1953, 1954, 1955, 1956, 2043, 2055, 2062, 2070, 2078, 2102, 2104], "_infer_device_typ": 5, "remain": [5, 8, 19, 35, 47, 64, 1381, 1469, 1470, 1624, 1625, 1707, 1717, 1724, 1725, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1811, 1828, 1850, 2029, 2030, 2033, 2042, 2051, 2057, 2084, 2095, 2102], "consequ": [5, 60, 1337, 1497, 1928, 2017, 2043, 2046, 2059, 2061, 2064], "random": [5, 35, 37, 47, 48, 56, 64, 86, 90, 156, 763, 896, 909, 910, 946, 1040, 1041, 1046, 1055, 1056, 1076, 1077, 1080, 1081, 1171, 1178, 1226, 1249, 1346, 1366, 1387, 1388, 1392, 1394, 1435, 1446, 1471, 1478, 1497, 1546, 1626, 1677, 1683, 1707, 1732, 1739, 1745, 1746, 1753, 1754, 1773, 1817, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1864, 1876, 1929, 1968, 1977, 1994, 1995, 1997, 2000, 2001, 2002, 2003, 2005, 2006, 2012, 2013, 2015, 2023, 2041, 2050, 2054, 2060, 2067, 2068, 2087, 2104, 2113], "gradient": [5, 11, 23, 24, 28, 32, 33, 35, 55, 56, 59, 152, 223, 224, 292, 337, 489, 490, 497, 515, 698, 699, 884, 893, 895, 897, 900, 901, 902, 903, 904, 905, 906, 907, 909, 910, 912, 913, 914, 915, 916, 917, 918, 919, 920, 923, 924, 928, 929, 947, 973, 1113, 1166, 1167, 1168, 1169, 1173, 1178, 1214, 1273, 1309, 1310, 1313, 1320, 1321, 1337, 1346, 1363, 1365, 1371, 1374, 1376, 1439, 1446, 1457, 1458, 1459, 1462, 1463, 1469, 1470, 1480, 1494, 1495, 1496, 1527, 1534, 1559, 1616, 1617, 1624, 1625, 1635, 1644, 1652, 1654, 1655, 1656, 1669, 1704, 1705, 1706, 1717, 1718, 1721, 1722, 1723, 1731, 1737, 1770, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1801, 1802, 1815, 1909, 1928, 1977, 2013, 2014, 2015, 2021, 2024, 2035, 2036, 2037, 2041, 2046, 2048, 2049, 2051, 2052, 2054, 2057, 2060, 2068, 2069, 2077, 2078, 2082, 2088, 2111], "among": [5, 19, 23, 24, 28, 32, 35, 47, 1021, 1022, 1025, 1236, 1463, 1773, 2017, 2059, 2101, 2103], "detect": [5, 14, 17, 18, 19, 23, 25, 28, 29, 37, 40, 55, 912, 913, 914, 915, 916, 917, 976, 1167, 1277, 1571, 1572, 1574, 1717, 1965, 2013, 2033, 2036, 2046, 2054, 2060, 2067, 2070, 2077, 2100, 2101, 2104, 2113, 2117], "priorit": [5, 33, 1170, 1171, 1173, 1784, 1785, 1797, 2082], "defaultdevicetyp": 5, "anticip": [5, 2115], "belong": [5, 28, 30, 32, 35, 44, 64, 960, 1014, 1186, 1799, 1983, 2012, 2046, 2069, 2112, 2114], "use_reentr": [5, 1717], "context_fn": 5, "noop_context_fn": 5, "determinism_check": 5, "techniqu": [5, 17, 19, 33, 64, 1464, 1739, 1950, 2037, 2057, 2064, 2065, 2069, 2072, 2097, 2113], "recomput": [5, 34, 904, 907, 909, 1580, 1644, 1769, 2069, 2100], "refer": [5, 7, 14, 23, 24, 28, 29, 30, 32, 35, 39, 42, 43, 47, 48, 55, 56, 63, 81, 82, 83, 86, 88, 256, 737, 738, 759, 767, 795, 796, 797, 822, 823, 824, 827, 828, 829, 863, 877, 882, 897, 958, 1047, 1052, 1112, 1163, 1188, 1221, 1223, 1254, 1261, 1273, 1276, 1285, 1292, 1305, 1310, 1326, 1330, 1343, 1346, 1413, 1446, 1480, 1527, 1577, 1598, 1606, 1615, 1633, 1644, 1645, 1703, 1712, 1713, 1717, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1817, 1834, 1841, 1870, 1871, 1891, 1929, 1965, 2012, 2013, 2015, 2033, 2034, 2036, 2037, 2042, 2043, 2046, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2057, 2059, 2061, 2067, 2069, 2073, 2074, 2077, 2078, 2082, 2084, 2085, 2086, 2087, 2091, 2094, 2095, 2099, 2101, 2102, 2106, 2109, 2113], "potenti": [5, 8, 30, 47, 50, 86, 193, 210, 488, 1189, 1283, 1319, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 1717, 1965, 2016, 2024, 2033, 2043, 2046, 2049, 2052, 2070, 2082, 2085, 2086, 2100, 2104], "silent": [5, 19, 28, 978, 1055, 1056, 1076, 1077, 1248, 1289, 1527, 1722, 1723, 1913, 2000, 2001, 2002, 2003, 2046, 2060, 2067], "consider": [5, 7, 83, 888, 1270, 1470, 1717, 1724, 1725, 2017, 2045], "limit": [5, 8, 9, 12, 20, 23, 33, 53, 55, 56, 61, 83, 990, 1079, 1167, 1287, 1393, 1446, 1469, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1685, 1717, 1748, 1875, 2013, 2017, 2021, 2033, 2035, 2036, 2043, 2046, 2048, 2053, 2057, 2060, 2061, 2062, 2064, 2070, 2072, 2075, 2077, 2078, 2082, 2088, 2089, 2100, 2103, 2104, 2110, 2113], "reentrant": [5, 1717], "soon": [5, 47, 52, 55, 796, 2043, 2072, 2077, 2079, 2104], "intermedi": [5, 12, 14, 33, 36, 52, 53, 60, 64, 83, 957, 990, 1160, 1167, 1174, 1363, 1470, 1571, 1573, 1575, 1625, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 2014, 2017, 2021, 2046, 2049, 2050, 2051, 2060, 2093, 2100, 2101, 2103], "set_checkpoint_early_stop": 5, "entireti": 5, "graph": [5, 8, 12, 28, 30, 33, 35, 55, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 82, 83, 84, 85, 90, 141, 152, 223, 224, 683, 750, 795, 796, 818, 819, 820, 826, 829, 897, 904, 907, 909, 918, 976, 978, 981, 983, 984, 1009, 1044, 1049, 1054, 1167, 1182, 1186, 1188, 1192, 1198, 1210, 1273, 1276, 1277, 1283, 1285, 1289, 1717, 1779, 1781, 1782, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 2013, 2017, 2021, 2023, 2027, 2037, 2042, 2048, 2049, 2052, 2054, 2057, 2058, 2064, 2065, 2067, 2070, 2071, 2073, 2074, 2075, 2077, 2078, 2079, 2082, 2087, 2093, 2094, 2095, 2099, 2100, 2102, 2103, 2105, 2106, 2107, 2109, 2110], "no_grad": [5, 490, 865, 919, 1113, 1168, 1172, 1177, 1273, 1469, 1527, 1533, 1575, 1718, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 2041, 2043, 2053, 2057, 2072, 2091, 2095], "unmet": 5, "particip": [5, 9, 23, 28, 29, 32, 47, 48, 1571, 1717, 2078, 2100], "wherea": [5, 11, 33, 35, 52, 288, 1329, 1389, 1390, 1418, 1798, 1928, 2017, 2043, 2069, 2089, 2103], "avoid": [5, 8, 9, 19, 20, 23, 28, 30, 33, 35, 45, 48, 55, 64, 66, 74, 75, 193, 210, 450, 784, 869, 897, 957, 976, 1065, 1208, 1209, 1273, 1319, 1345, 1461, 1492, 1493, 1518, 1527, 1536, 1541, 1579, 1615, 1645, 1670, 1677, 1717, 1732, 1798, 1849, 1877, 1924, 1935, 1943, 1961, 1996, 2021, 2030, 2035, 2043, 2045, 2046, 2051, 2054, 2057, 2069, 2077, 2078, 2086, 2087, 2088, 2100, 2101, 2103, 2104, 2105], "know": [5, 7, 8, 14, 17, 19, 28, 29, 33, 36, 52, 64, 488, 904, 905, 909, 913, 930, 978, 1160, 1167, 1188, 1198, 1202, 1284, 1717, 2014, 2017, 2024, 2025, 2029, 2042, 2043, 2046, 2049, 2052, 2054, 2067, 2070, 2077, 2078, 2079, 2082, 2098, 2100, 2101, 2102, 2103, 2104, 2116], "lstm": [5, 766, 1498, 1544, 2015, 2046, 2067, 2068, 2072, 2074, 2075, 2087], "hidden": [5, 763, 1176, 1478, 1479, 1497, 1498, 1543, 1545, 1718, 2046, 2087], "correctli": [5, 19, 23, 28, 33, 47, 55, 488, 1167, 1273, 1289, 1527, 1652, 2014, 2016, 2017, 2021, 2030, 2035, 2042, 2043, 2048, 2049, 2050, 2053, 2059, 2061, 2072, 2077, 2111], "compil": [5, 12, 14, 15, 52, 53, 55, 56, 64, 65, 683, 990, 1035, 1039, 1051, 1052, 1167, 1182, 1187, 1202, 1271, 1273, 1274, 1277, 1278, 1285, 1286, 1287, 1289, 1290, 1291, 1527, 2013, 2014, 2016, 2017, 2018, 2019, 2021, 2023, 2042, 2045, 2048, 2056, 2063, 2066, 2067, 2077, 2088, 2091, 2097, 2098, 2099, 2100, 2101, 2102, 2103, 2106, 2107, 2108, 2109, 2110], "turn": [5, 14, 23, 27, 33, 60, 64, 83, 683, 877, 976, 1261, 1289, 1572, 1574, 1910, 1911, 1912, 1913, 1914, 1915, 1965, 2024, 2043, 2046, 2060, 2061, 2067, 2071, 2072, 2078, 2082, 2100, 2103, 2106, 2111], "open": [5, 8, 9, 11, 14, 23, 35, 47, 52, 56, 692, 933, 960, 1159, 1281, 1345, 1368, 1378, 1650, 2014, 2024, 2033, 2036, 2055, 2062, 2063, 2064, 2067, 2070, 2072, 2077, 2082, 2083, 2084, 2101, 2104, 2111, 2115, 2116], "ran": [5, 18, 55, 2104, 2109, 2113], "sequenti": [5, 23, 32, 52, 53, 55, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 796, 1178, 1273, 1283, 1472, 1527, 1567, 1578, 1724, 1725, 1734, 1748, 1812, 1977, 2014, 2033, 2036, 2046, 2057, 2067, 2069, 2072, 2093, 2101, 2104, 2111, 2113], "divid": [5, 24, 28, 32, 33, 240, 585, 586, 587, 1025, 1104, 1107, 1127, 1145, 1154, 1183, 1236, 1237, 1284, 1439, 1440, 1446, 1447, 1460, 1486, 1487, 1493, 1518, 1519, 1530, 1531, 1532, 1541, 1559, 1560, 1576, 1577, 1605, 1606, 1616, 1617, 1645, 1669, 1677, 1717, 1928, 1978, 2015, 2068], "func": [5, 28, 58, 60, 64, 66, 68, 866, 896, 899, 904, 906, 907, 908, 909, 910, 912, 913, 914, 915, 916, 917, 919, 923, 924, 1276, 1289, 1290, 1767, 1968, 1977, 2013, 2017, 2021, 2042, 2077, 2079, 2114], "compris": [5, 48, 52, 2052], "chunk": [5, 23, 28, 30, 33, 55, 963, 1025, 1172, 1463, 1717, 1916, 2014, 2015, 2034, 2052, 2068, 2069, 2077, 2082, 2086], "input_var": [5, 1463], "set_checkpoint_debug_en": [5, 2013], "defer": [5, 24, 55, 1188, 2046], "person": [6, 7, 9], "land": [6, 9, 10, 978, 2013, 2021, 2049, 2101, 2110], "six": [6, 82, 1456], "commit": [6, 7, 9, 14, 56, 2012, 2013, 2060, 2061, 2110], "repositori": [6, 9, 59, 64, 2012, 2059, 2072], "submit": [6, 9, 1011, 1012, 1014, 1386, 1982, 1983, 2029, 2046, 2061, 2102, 2104, 2110], "month": [6, 9], "qualifi": [6, 28, 30, 33, 34, 44, 64, 683, 1186, 1273, 1527, 2021, 2023, 2029, 2065, 2070], "pr": [6, 7, 1817, 1929, 2093, 2104], "interest": [6, 7, 9, 83, 2043, 2050, 2054, 2057, 2064, 2101, 2102, 2103, 2107, 2111], "merge_rul": 6, "vote": [6, 9], "decis": [6, 30, 37, 47, 50, 64, 683, 1188, 1289, 2024, 2042, 2100], "criteria": [6, 9, 1346, 1929], "approv": [6, 9], "Not": [6, 48, 82, 1195, 1421, 1575, 1924, 2014, 2016, 2017, 2018, 2046, 2049, 2068, 2072, 2077], "busi": [6, 9, 2107], "dai": [6, 7, 2101, 2103, 2110], "contributor": [6, 7, 8, 9], "seen": [6, 12, 18, 35, 64, 223, 931, 963, 976, 998, 1375, 1457, 1458, 1459, 1559, 1658, 1659, 1660, 1811, 2014, 2024, 2043, 2046, 2067, 2082], "thumb": [6, 28], "wiki": [7, 9, 24, 2064, 2118], "acceler": [7, 24, 942, 1222, 1441, 1442, 1443, 1567, 1781, 2055, 2094, 2098, 2104], "deep": [7, 9, 64, 1441, 1442, 1443, 1468, 1567, 1797, 2013, 2041, 2046, 2057, 2072, 2097, 2100, 2102, 2113], "neural": [7, 8, 15, 64, 1435, 1446, 1464, 1471, 1480, 1492, 1526, 1527, 1534, 1539, 1540, 1555, 1557, 1571, 1573, 1575, 1664, 1688, 1735, 1795, 1803, 1809, 2014, 2016, 2017, 2041, 2046, 2049, 2060, 2064, 2072, 2100], "tape": [7, 2098], "system": [7, 8, 14, 15, 17, 23, 48, 52, 60, 64, 65, 969, 986, 1181, 1227, 1281, 1305, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1334, 1335, 1336, 1340, 1345, 1364, 1393, 1571, 1573, 1575, 1717, 1735, 1737, 1872, 1952, 2018, 2023, 2029, 2030, 2043, 2045, 2046, 2053, 2056, 2057, 2059, 2063, 2065, 2070, 2077, 2095, 2100, 2101, 2111, 2120], "organ": [7, 2048, 2056, 2070, 2104], "govern": [7, 8, 2013], "technic": [7, 9, 47, 52, 55, 64, 1273, 1527, 2013, 2043, 2051, 2052, 2059, 2070, 2100], "found": [7, 14, 15, 16, 18, 19, 28, 30, 47, 52, 64, 66, 73, 75, 76, 77, 85, 87, 88, 89, 943, 960, 1008, 1088, 1089, 1197, 1273, 1277, 1295, 1371, 1374, 1376, 1379, 1419, 1435, 1445, 1471, 1527, 1555, 1794, 1863, 2012, 2014, 2017, 2021, 2036, 2042, 2049, 2052, 2054, 2057, 2059, 2067, 2070, 2071, 2072, 2077, 2087, 2099, 2103, 2107, 2110, 2112, 2114], "md": [7, 64, 795, 2070], "healthi": [7, 37, 47], "team": [7, 28, 56, 86, 87, 89, 2062], "commun": [7, 8, 9, 29, 30, 32, 33, 37, 47, 50, 55, 488, 683, 1717, 2043, 2048, 2064, 2077, 2078, 2101, 2104, 2117], "project": [7, 30, 33, 1375, 1497, 1533, 1737, 1817, 2012, 2021, 2052, 2055, 2064, 2095, 2112], "ve": [7, 58, 59, 60, 64, 1182, 1277, 1968, 2024, 2033, 2043, 2050, 2078, 2087, 2103, 2104], "come": [7, 8, 9, 23, 33, 35, 36, 44, 47, 52, 56, 60, 488, 976, 1109, 1160, 1167, 1273, 1345, 1465, 1466, 1467, 1471, 1492, 1778, 2027, 2048, 2050, 2056, 2070, 2077, 2079, 2082, 2102, 2109], "peopl": [7, 28, 2043, 2072, 2106], "scratch": [7, 2043, 2113], "own": [7, 9, 28, 29, 32, 35, 39, 47, 50, 52, 55, 64, 683, 1043, 1143, 1163, 1236, 1273, 1345, 1454, 1455, 1456, 1457, 1458, 1459, 1527, 1734, 1737, 2013, 2017, 2021, 2037, 2042, 2046, 2052, 2070, 2072, 2073, 2077, 2079, 2082, 2101, 2103, 2106], "itch": 7, "acquaint": 7, "tip": [7, 2046, 2101, 2104], "tracker": [7, 1346, 2029], "confirm": [7, 2012, 2014, 2049, 2067, 2077, 2079, 2109], "tend": [7, 914, 1965], "bootcamp": 7, "1hr": 7, "although": [7, 8, 35, 60, 64, 65, 1457, 1458, 1459, 1527, 1535, 1717, 2013, 2017, 2025, 2042, 2049, 2060, 2072, 2111, 2112], "join": [7, 24, 28, 32, 47, 48, 63, 64, 1213, 1717, 2012, 2013, 2018, 2033, 2043, 2048, 2059, 2068, 2088, 2095], "u": [7, 8, 12, 30, 33, 56, 61, 64, 66, 763, 912, 942, 967, 1187, 1210, 1310, 1312, 1316, 1320, 1321, 1337, 1363, 1365, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1478, 1479, 1497, 1498, 1512, 1514, 1543, 1545, 1546, 1567, 1578, 1737, 1817, 1928, 1929, 2014, 2015, 2021, 2024, 2035, 2041, 2043, 2046, 2048, 2049, 2050, 2057, 2070, 2082, 2086, 2087, 2095, 2098, 2100, 2101, 2104, 2107, 2108, 2113, 2116], "dev": [7, 10, 39, 44, 2053, 2102, 2113], "happi": 7, "research": [7, 8, 9, 1717, 2012, 2043, 2054, 2062], "partner": [7, 2094], "speed": [7, 8, 14, 32, 55, 1020, 1109, 1166, 1264, 1277, 1283, 1337, 1431, 1533, 1724, 1725, 1871, 1928, 2025, 2043, 2045, 2046, 2048, 2049, 2051, 2055, 2060, 2072, 2077, 2101, 2102], "design": [7, 9, 23, 30, 35, 44, 47, 53, 56, 59, 60, 61, 923, 924, 1273, 1293, 1440, 1527, 1586, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1940, 2012, 2013, 2030, 2040, 2042, 2046, 2049, 2052, 2055, 2057, 2065, 2070, 2095, 2100, 2101, 2102, 2104, 2106, 2108, 2113], "comment": [7, 64, 1769, 1867, 2017, 2018, 2049, 2085, 2087, 2107], "crack": 7, "usual": [7, 14, 23, 24, 28, 30, 34, 47, 48, 52, 53, 55, 64, 87, 483, 834, 835, 836, 837, 845, 897, 912, 914, 918, 1178, 1465, 1466, 1467, 1471, 1486, 1489, 1490, 1491, 1492, 1707, 1717, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1977, 2014, 2017, 2036, 2042, 2043, 2045, 2046, 2049, 2051, 2056, 2069, 2073, 2075, 2077, 2087, 2097, 2101, 2103, 2107, 2114], "idea": [7, 56, 932, 1109, 1192, 1431, 1717, 2046, 2056, 2067, 2078, 2104], "rfc": [7, 28, 2043, 2072, 2078], "big": [7, 19, 1784, 1785, 1788, 1794, 1795, 1910, 1911, 1912, 1913, 1914, 1915, 2046, 2062, 2069, 2072, 2100, 2103, 2107], "post": [7, 8, 29, 30, 32, 55, 488, 490, 683, 860, 861, 865, 1273, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 2013, 2021, 2043, 2048, 2049, 2051, 2063, 2082, 2101, 2102, 2103, 2113], "standard": [7, 14, 18, 19, 24, 35, 40, 41, 45, 53, 60, 64, 354, 379, 591, 1129, 1188, 1435, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1492, 1499, 1567, 1573, 1575, 1588, 1773, 1817, 1840, 1885, 1890, 1922, 1923, 2016, 2018, 2036, 2041, 2045, 2046, 2059, 2060, 2064, 2067, 2070, 2083, 2101], "lot": [7, 14, 19, 23, 52, 1198, 2033, 2043, 2046, 2054, 2059, 2070, 2076, 2078, 2087, 2100, 2103, 2107, 2115], "boil": 7, "mostli": [7, 35, 1271, 1717, 2046, 2072, 2082, 2100, 2101, 2114], "evid": 7, "peer": [7, 28, 32, 33, 47, 55, 1018, 1717, 2046, 2077], "paper": [7, 9, 24, 34, 35, 53, 763, 1431, 1435, 1441, 1442, 1443, 1445, 1457, 1458, 1459, 1464, 1465, 1466, 1467, 1468, 1471, 1474, 1475, 1478, 1481, 1484, 1489, 1490, 1491, 1499, 1533, 1539, 1540, 1542, 1546, 1555, 1559, 1567, 1571, 1573, 1575, 1576, 1577, 1628, 1629, 1638, 1716, 1784, 1785, 1794, 1796, 1803, 1809, 2054], "framework": [7, 8, 9, 35, 44, 56, 63, 65, 763, 1016, 1384, 1478, 1717, 1731, 1797, 1875, 2013, 2025, 2058, 2072, 2078, 2079], "bit": [7, 64, 90, 332, 460, 764, 766, 768, 822, 823, 824, 825, 828, 830, 862, 949, 952, 991, 992, 1163, 1253, 1851, 1852, 1864, 1871, 1892, 2046, 2052, 2057, 2060, 2063, 2072, 2075, 2076, 2082, 2085, 2088, 2101, 2107, 2118], "accept": [7, 9, 23, 28, 30, 34, 52, 55, 56, 57, 59, 61, 515, 805, 893, 894, 895, 896, 897, 904, 906, 909, 910, 918, 966, 1051, 1054, 1162, 1178, 1187, 1236, 1273, 1277, 1462, 1527, 1534, 1556, 1575, 1744, 1758, 1759, 1765, 1780, 1905, 1919, 1977, 2017, 2021, 2035, 2046, 2049, 2050, 2067, 2069, 2077, 2085, 2087, 2104, 2113], "overwhelm": [7, 2077, 2113], "newli": [7, 55, 64, 90, 1123, 1124, 1188, 1469, 1470, 1829, 1830], "publish": [7, 9, 39, 44, 47, 1346, 2013], "ground": [7, 9, 34, 1462, 1616, 2087, 2101], "becom": [7, 8, 9, 12, 23, 28, 34, 35, 64, 292, 763, 880, 1227, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1478, 1497, 1512, 1532, 1543, 1556, 1616, 1633, 1707, 1801, 1900, 1950, 2024, 2027, 2043, 2048, 2049, 2067, 2070, 2071, 2077, 2113, 2115], "refactor": [7, 64, 2063, 2072], "coordin": [7, 28, 30, 35, 37, 585, 588, 888, 1227, 1236, 1375, 1821, 1913, 1954, 1956, 1963, 2043, 2082, 2087, 2104, 2113], "pace": 7, "branch": [7, 12, 52, 64, 66, 69, 71, 74, 75, 990, 1935, 2012, 2016, 2017, 2046, 2100, 2101, 2110], "definit": [7, 8, 23, 28, 29, 35, 43, 52, 53, 64, 87, 967, 968, 969, 998, 1051, 1157, 1198, 1201, 1208, 1209, 1294, 1303, 1304, 1346, 1413, 1492, 1571, 1645, 1692, 1772, 1847, 1950, 2012, 2014, 2016, 2018, 2042, 2043, 2049, 2054, 2070, 2072, 2087, 2089], "fundament": [7, 60, 2016, 2057, 2077, 2082, 2103], "cut": [7, 33, 2104], "guidanc": [7, 9, 15, 57, 488, 1200, 2100], "stage": [7, 18, 19, 24, 30, 32, 44, 55, 63, 2013, 2024, 2036, 2079, 2113], "piec": [7, 11, 2023, 2036, 2078, 2101, 2104, 2115], "advic": [7, 2104], "readi": [7, 14, 33, 63, 865, 866, 945, 954, 1231, 1232, 1717, 2014, 2048, 2077, 2078, 2098], "draft": 7, "convert": [7, 11, 23, 28, 30, 34, 35, 36, 53, 55, 59, 62, 64, 82, 83, 84, 582, 586, 587, 588, 589, 590, 738, 791, 792, 793, 795, 796, 797, 818, 819, 820, 841, 842, 858, 861, 862, 863, 883, 884, 942, 962, 975, 1093, 1129, 1160, 1273, 1343, 1527, 1537, 1538, 1567, 1574, 1707, 1719, 1720, 1724, 1725, 1778, 1779, 1798, 1829, 1830, 1835, 1910, 1911, 1912, 1913, 1914, 1915, 1963, 2014, 2015, 2016, 2017, 2025, 2036, 2037, 2049, 2055, 2064, 2065, 2067, 2072, 2075, 2082, 2087, 2089, 2093, 2094, 2104, 2112, 2113], "press": [7, 64], "button": [7, 2110], "prepend": [7, 14, 23, 28, 32, 64, 231, 1101, 1273, 1368, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1946, 2015, 2044], "titl": [7, 2068, 2072], "wip": 7, "progress": [7, 32, 37, 48, 51, 81, 488, 1011, 1386, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1971, 1982, 2012, 2028], "ci": [7, 2013, 2110], "folk": 7, "who": [7, 8, 9, 11, 47, 66, 83, 2070], "regularli": 7, "queue": [7, 37, 50, 2033, 2087], "everyth": [7, 23, 35, 53, 59, 66, 2014, 2033, 2070, 2100, 2104, 2107, 2113], "happen": [7, 9, 28, 30, 32, 34, 35, 37, 40, 47, 52, 55, 60, 64, 609, 794, 817, 909, 910, 1167, 1210, 1567, 1717, 1731, 1800, 1806, 1807, 1813, 1928, 2013, 2030, 2033, 2043, 2046, 2048, 2049, 2050, 2051, 2052, 2058, 2059, 2063, 2067, 2072, 2077, 2084, 2086, 2098, 2101, 2104, 2117], "subsystem": [7, 11, 56, 61, 2013, 2021, 2049], "patch": [7, 57, 1473, 1579, 1966, 2105], "feel": [7, 2036, 2067, 2082, 2104], "ll": [7, 19, 60, 64, 763, 823, 824, 865, 866, 967, 968, 969, 1054, 1188, 1196, 1303, 1478, 1479, 1497, 1498, 2021, 2024, 2043, 2046, 2049, 2050, 2059, 2067, 2072, 2078, 2099, 2103, 2107, 2111], "round": [7, 23, 28, 66, 68, 75, 510, 668, 669, 802, 805, 823, 869, 994, 996, 1065, 1104, 1130, 1131, 1132, 1136, 1140, 1141, 1142, 1157, 1319, 1332, 1580, 1644, 1827, 1828, 1847, 1895, 2015, 2017, 2034, 2046, 2068, 2072, 2075, 2082, 2083, 2108, 2115], "trip": [7, 64, 1130, 1131, 1132, 1136, 1140, 1141, 1142], "noth": [7, 14, 37, 64, 683, 1005, 1045, 1737, 1784, 1785, 1797, 1812, 1996, 2014, 2016, 2061, 2079], "accompani": [7, 85, 2065], "solut": [7, 8, 19, 60, 969, 1181, 1318, 1319, 1323, 1331, 1334, 1336, 1340, 1439, 1717, 1952, 2014, 2015, 2041, 2042, 2051, 2059, 2072], "think": [7, 9, 11, 64, 66, 488, 1798, 2014, 2016, 2043, 2069, 2070, 2079, 2101, 2103, 2105], "confid": [7, 1724, 1725, 2052, 2087, 2105], "ahead": [7, 52, 2013, 2072, 2094, 2100, 2104], "search": [7, 11, 24, 822, 960, 1484, 1638, 1717, 1779, 1834, 1863, 2014, 2034, 2035, 2067, 2070, 2082, 2099, 2103, 2110, 2115], "repo": [7, 33, 66, 1803, 2012, 2049, 2063], "unabl": [7, 52, 60, 84, 2067, 2069], "reproduc": [7, 23, 60, 315, 323, 517, 519, 947, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1523, 1524, 1525, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1625, 1633, 1644, 1672, 1685, 1704, 1705, 1706, 1707, 1965, 2012, 2013, 2069, 2070, 2104, 2113], "problemat": [7, 23, 64, 930, 2014, 2060, 2073, 2101, 2104], "insight": [7, 18, 81, 2095, 2115], "individu": [7, 9, 14, 23, 24, 28, 30, 32, 55, 64, 85, 244, 683, 795, 841, 842, 967, 1075, 1127, 1145, 1182, 1273, 1446, 1527, 1567, 1717, 2017, 2034, 2042, 2043, 2046, 2049, 2052, 2056, 2060, 2061, 2065, 2067, 2069, 2075, 2077, 2089, 2098, 2106, 2110, 2115], "intent": [7, 30, 45, 47, 55, 60, 1868, 2024, 2070, 2072, 2109], "lock": [7, 23, 28, 32, 35, 2043, 2046, 2059, 2070, 2078, 2115], "strike": 7, "convers": [7, 30, 460, 582, 585, 796, 961, 1258, 1724, 1725, 1758, 2013, 2018, 2049, 2055, 2067, 2072, 2073, 2082, 2100, 2103, 2112], "medium": [7, 55, 1871], "prioriti": [7, 9, 10, 28, 683, 858, 999, 1014, 1983, 2016, 2062, 2117], "entranc": [7, 2046], "great": [7, 12, 52, 2043, 2054, 2098, 2100, 2101], "deal": [7, 8, 23, 37, 50, 52, 82, 1945, 2033, 2051, 2077, 2100, 2103, 2107, 2111], "welcom": [7, 2036, 2064, 2069, 2082], "aim": [7, 52, 83, 2050, 2082, 2094], "rare": [7, 2042, 2067, 2100, 2105, 2114], "typo": 7, "send": [7, 23, 28, 33, 37, 50, 1047, 1707, 1717, 2033, 2048, 2059, 2063, 2068, 2071, 2077, 2078, 2079, 2091, 2093, 2103, 2106], "forum": [7, 9, 2051, 2059], "share": [7, 10, 14, 23, 29, 30, 32, 33, 35, 36, 47, 48, 53, 55, 60, 223, 313, 342, 460, 485, 522, 526, 619, 863, 883, 884, 904, 905, 909, 910, 923, 924, 976, 1009, 1011, 1043, 1047, 1054, 1160, 1161, 1162, 1163, 1244, 1321, 1422, 1423, 1463, 1544, 1717, 1735, 1773, 1845, 1859, 1867, 1919, 1943, 1949, 1964, 2015, 2042, 2043, 2045, 2059, 2062, 2063, 2067, 2077, 2082, 2084, 2086, 2092, 2095, 2098], "resolv": [7, 8, 9, 30, 35, 64, 84, 85, 1188, 1273, 1457, 1458, 1459, 1473, 1524, 1527, 2016, 2017, 2018, 2063, 2070, 2084, 2100, 2114], "challeng": [7, 28, 30, 2078, 2104], "feedback": [7, 18, 19, 24, 55, 56, 1717, 2013, 2082], "direct": [7, 9, 11, 28, 33, 53, 763, 795, 1150, 1151, 1198, 1273, 1478, 1497, 1527, 1543, 1733, 1769, 1796, 1817, 1834, 1855, 2021, 2043, 2046, 2049, 2052, 2057, 2077, 2101, 2112], "yourself": [7, 58, 1009, 1966, 2049, 2057, 2059, 2104, 2114], "problem": [7, 23, 28, 47, 52, 60, 66, 1109, 1184, 1188, 1289, 1319, 1331, 1346, 1462, 1534, 1598, 1867, 2033, 2043, 2046, 2051, 2054, 2059, 2063, 2072, 2073, 2079, 2085, 2094, 2100, 2101, 2104, 2113], "area": [7, 9, 53, 1644, 2057, 2072, 2083], "appreci": 7, "strive": 7, "respond": [7, 28], "quickli": [7, 8, 24, 47, 2052, 2106], "ey": [7, 35, 193, 210, 968, 969, 1178, 1294, 1303, 1314, 1325, 1328, 1331, 1332, 1339, 1340, 1731, 1827, 1906, 1977, 2015, 2019, 2049, 2068, 2101], "everyon": [7, 37, 47], "touch": [7, 45, 64], "versu": [7, 1187, 1531], "write": [7, 8, 9, 12, 16, 18, 19, 23, 28, 30, 33, 37, 40, 44, 45, 46, 47, 52, 55, 60, 61, 65, 82, 84, 85, 256, 488, 515, 700, 958, 978, 1162, 1167, 1178, 1190, 1250, 1284, 1304, 1315, 1316, 1317, 1321, 1322, 1335, 1703, 1859, 1952, 1965, 1977, 2013, 2016, 2021, 2030, 2035, 2046, 2049, 2050, 2051, 2054, 2065, 2069, 2070, 2072, 2082, 2087, 2094, 2100, 2101, 2104, 2106, 2107, 2113, 2117], "blog": [7, 8, 12, 990, 2021, 2048, 2049, 2072, 2082], "around": [7, 9, 11, 28, 35, 49, 59, 60, 63, 64, 152, 627, 897, 918, 1006, 1009, 1011, 1012, 1014, 1084, 1155, 1156, 1167, 1273, 1386, 1410, 1717, 1856, 1982, 1983, 2008, 2014, 2021, 2033, 2043, 2046, 2067, 2072, 2077, 2098, 2101, 2104], "internet": 7, "grow": [7, 8, 64, 2046, 2082], "market": [7, 9], "benefit": [7, 8, 28, 64, 851, 1724, 1725, 1811, 2033, 2046, 2052, 2072, 2082, 2113], "opinion": [7, 8, 2082], "isn": [7, 19, 23, 64, 460, 1270, 2043, 2046, 2049, 2077, 2089, 2103], "categor": [7, 40, 1635, 2013, 2018, 2073, 2077, 2087, 2112], "aspect": [7, 28, 33, 64, 1544, 2049, 2057], "seem": [7, 1188, 2067], "unusu": [7, 2103], "claim": [7, 1724, 1725, 1809, 2054], "wast": [7, 2046], "someon": [7, 9, 1271, 2035], "end": [7, 8, 9, 19, 23, 24, 28, 33, 35, 40, 44, 58, 64, 363, 364, 540, 763, 787, 800, 823, 824, 869, 932, 940, 945, 993, 1009, 1069, 1070, 1099, 1109, 1148, 1163, 1212, 1227, 1233, 1234, 1270, 1273, 1294, 1299, 1329, 1339, 1341, 1344, 1346, 1360, 1422, 1423, 1438, 1439, 1440, 1448, 1449, 1450, 1460, 1462, 1468, 1472, 1478, 1479, 1482, 1483, 1484, 1485, 1486, 1487, 1493, 1497, 1498, 1513, 1518, 1521, 1522, 1527, 1529, 1534, 1535, 1538, 1543, 1546, 1556, 1559, 1565, 1570, 1577, 1616, 1625, 1637, 1638, 1717, 1731, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1801, 1806, 1811, 1843, 1875, 1878, 1881, 1883, 1896, 1950, 1961, 1962, 1980, 2014, 2015, 2016, 2017, 2033, 2042, 2043, 2046, 2048, 2049, 2051, 2052, 2054, 2060, 2063, 2065, 2067, 2069, 2071, 2072, 2075, 2077, 2083, 2098, 2101, 2103, 2106, 2108, 2113, 2117], "too": [7, 9, 14, 24, 30, 47, 60, 64, 488, 1065, 1166, 1188, 1192, 1319, 1337, 1419, 1446, 1462, 1527, 1617, 1718, 1767, 2017, 2045, 2051, 2054, 2059, 2060, 2061, 2063, 2065, 2070, 2079, 2082, 2100, 2101, 2103, 2113, 2115], "advisori": 7, "fashion": [7, 23, 28, 33, 34, 50, 52, 517, 1351, 1748, 2014], "rough": [7, 9], "consensu": [7, 9], "corpor": [7, 2113], "wrote": [7, 8], "implicitli": [7, 28, 40, 64, 1092, 1187, 1217, 1227, 1289, 1290, 1345, 1436, 1437, 1438, 1520, 1521, 1522, 1868, 1950, 2014, 2016, 2017, 2043, 2049, 2055], "lifetim": [7, 488, 2021, 2077, 2103], "immedi": [7, 8, 9, 19, 28, 30, 47, 48, 55, 63, 488, 1198, 1273, 1276, 1527, 1724, 1725, 2017, 2046, 2052, 2057, 2062, 2069, 2077, 2079, 2100, 2103], "sai": [7, 52, 53, 64, 498, 932, 1170, 1171, 1172, 1187, 1194, 1273, 1527, 2014, 2042, 2043, 2051, 2070, 2078, 2079, 2082, 2098, 2100, 2101, 2107, 2112], "bugfix": 7, "Or": [7, 19, 37, 64, 884, 971, 2055, 2058, 2067, 2082], "motiv": [7, 8, 64, 738, 2052, 2057, 2078, 2113], "ye": [7, 2026, 2067, 2069, 2082, 2104], "knuth": 7, "bewar": 7, "mere": [7, 33, 53], "proven": [7, 1464, 1717], "ok": [7, 40, 45, 59, 1192, 1201, 1278, 2079, 2085, 2101], "sometim": [7, 64, 915, 1065, 1279, 1289, 1473, 1579, 1611, 1612, 1613, 1737, 2013, 2017, 2021, 2033, 2043, 2046, 2050, 2051, 2052, 2057, 2059, 2070, 2085, 2088, 2100, 2101, 2103, 2107, 2111, 2114], "obvious": [7, 2103], "broken": [7, 23, 1065, 2067, 2070], "contrari": [7, 33, 2045], "accident": 7, "put": [7, 9, 23, 28, 37, 59, 63, 64, 321, 1129, 1345, 1481, 1803, 2012, 2015, 2033, 2046, 2052, 2059, 2068, 2070, 2078, 2079, 2101, 2103, 2105], "difficulti": [7, 28, 2041], "nonlinearli": 7, "sign": [7, 35, 343, 531, 888, 993, 1097, 1145, 1157, 1163, 1307, 1332, 1333, 1354, 1519, 1796, 1847, 1878, 1892, 2015, 2034, 2068, 2075, 2082, 2085, 2088, 2089, 2101, 2108], "split": [7, 11, 23, 28, 64, 619, 775, 776, 777, 863, 865, 866, 963, 970, 1065, 1107, 1237, 1463, 1477, 1478, 1497, 1533, 1543, 1608, 1609, 1610, 1611, 1612, 1613, 1632, 1944, 1978, 2013, 2014, 2015, 2034, 2046, 2052, 2068, 2070, 2072, 2077, 2082, 2086, 2101, 2111, 2115], "shippabl": 7, "complet": [7, 14, 19, 23, 28, 30, 37, 39, 45, 47, 50, 60, 63, 488, 683, 865, 1007, 1011, 1012, 1014, 1085, 1166, 1167, 1276, 1285, 1287, 1292, 1304, 1332, 1386, 1389, 1390, 1395, 1411, 1598, 1717, 1734, 1767, 1827, 1871, 1982, 1983, 2009, 2013, 2016, 2017, 2018, 2023, 2033, 2042, 2043, 2046, 2050, 2055, 2061, 2070, 2077, 2078, 2097, 2105, 2110, 2111], "subtl": [7, 1489, 1490, 1491, 2049, 2103, 2104, 2113], "nuanc": [7, 2064], "extra": [7, 14, 23, 24, 28, 32, 35, 38, 52, 55, 64, 1069, 1109, 1167, 1178, 1187, 1273, 1281, 1284, 1319, 1345, 1527, 1534, 1542, 1716, 1731, 1924, 1977, 2016, 2035, 2043, 2045, 2048, 2049, 2051, 2056, 2065, 2070, 2071, 2082, 2088, 2089, 2091, 2103, 2107, 2117], "understand": [7, 8, 9, 28, 30, 37, 39, 52, 55, 66, 82, 83, 1167, 1196, 1331, 2013, 2041, 2043, 2046, 2055, 2062, 2065, 2071, 2087, 2094, 2099, 2100, 2101, 2102, 2103, 2104, 2106, 2113], "hack": 7, "answer": [7, 10, 64, 827, 1198, 1440, 1750, 2103, 2104], "regress": [7, 1439, 2061, 2098, 2109], "scrutini": 7, "undertak": 7, "rest": [7, 23, 24, 48, 55, 64, 796, 817, 983, 984, 1163, 1339, 1340, 1944, 2035, 2057, 2061, 2070, 2072, 2077, 2082, 2101, 2104, 2105, 2107, 2111], "stai": [7, 32, 141, 1463, 1724, 1725, 2046, 2059, 2072, 2077, 2082], "chanc": [7, 30, 35, 2049, 2103, 2104, 2107], "unrel": [7, 986, 1176, 2042, 2049, 2070], "aid": [7, 64, 2043, 2113], "troubleshoot": [7, 28, 2094, 2104], "mayb": [7, 1174, 1968, 2103], "rebas": 7, "latest": [7, 14, 28, 32, 35, 52, 901, 932, 1744, 2012, 2049, 2055, 2065, 2067], "statu": [7, 9, 37, 1363, 2013, 2018, 2033, 2072, 2095], "hud": 7, "risk": [7, 8, 30, 52, 55, 1734, 1737], "anyth": [7, 36, 40, 55, 63, 826, 829, 1198, 1285, 1798, 2014, 2029, 2036, 2052, 2057, 2070, 2100, 2101, 2116, 2117], "configur": [7, 13, 20, 23, 24, 28, 33, 34, 37, 39, 44, 47, 48, 50, 55, 791, 792, 795, 796, 800, 817, 818, 819, 820, 826, 829, 841, 842, 843, 847, 849, 852, 862, 863, 864, 865, 866, 976, 1065, 1717, 1758, 1872, 1965, 2013, 2023, 2030, 2046, 2048, 2049, 2061, 2063, 2070, 2075, 2077, 2087, 2089, 2100, 2104, 2110, 2111, 2113], "riski": 7, "had": [7, 64, 932, 1188, 1210, 1289, 1707, 1946, 2043, 2050, 2100, 2101, 2109], "beforehand": [7, 63, 2104], "hei": 7, "my": [7, 23, 1463, 2056, 2067, 2072], "member": [7, 9, 23, 28, 37, 47, 48, 64, 1273, 1480, 1527, 1630, 2014, 2016, 2017, 2030, 2051, 2071, 2077, 2089, 2091], "sphinx": 7, "folder": [7, 9, 14, 23, 30, 45, 64, 1779, 2012, 2053, 2065, 2087, 2101, 2106, 2113], "tree": [7, 40, 59, 976, 1527, 1571, 1779, 2032, 2067, 2070, 2071, 2079, 2094], "master": [7, 28, 51, 795, 1167, 1571, 2012, 2077], "doxygen": 7, "special": [7, 11, 33, 40, 50, 53, 60, 64, 66, 71, 74, 75, 744, 745, 746, 826, 829, 923, 976, 1102, 1116, 1117, 1118, 1120, 1121, 1130, 1132, 1181, 1187, 1241, 1242, 1243, 1287, 1345, 1359, 1416, 1463, 1575, 1718, 1719, 1720, 1771, 1798, 1822, 1879, 1890, 1894, 1981, 2013, 2018, 2023, 2035, 2046, 2049, 2052, 2054, 2056, 2070, 2072, 2084, 2086, 2087, 2095, 2100, 2102, 2103, 2109], "server": [7, 23, 28, 48, 1277, 2046, 2070, 2072, 2077, 2095, 2097], "cppdoc": [7, 15], "cpp": [7, 14, 28, 2048, 2095, 2100], "accomplish": [7, 30, 2057, 2104], "holist": 7, "concept": [7, 52, 53, 60, 64, 2021, 2049, 2050, 2057, 2085, 2105], "galleri": 7, "restructur": [7, 2070], "text": [7, 23, 35, 53, 155, 156, 175, 610, 619, 684, 686, 687, 688, 689, 690, 691, 692, 693, 694, 697, 701, 748, 749, 759, 761, 763, 767, 771, 772, 773, 775, 776, 777, 783, 787, 823, 824, 869, 885, 886, 887, 888, 889, 944, 945, 946, 949, 952, 954, 956, 965, 968, 969, 971, 992, 993, 995, 996, 998, 1104, 1123, 1124, 1153, 1154, 1158, 1159, 1216, 1230, 1231, 1232, 1233, 1240, 1262, 1270, 1294, 1297, 1298, 1299, 1302, 1303, 1310, 1313, 1319, 1323, 1328, 1331, 1337, 1344, 1353, 1360, 1361, 1362, 1363, 1412, 1413, 1424, 1425, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1465, 1466, 1467, 1468, 1469, 1471, 1472, 1473, 1474, 1475, 1476, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1512, 1513, 1514, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1530, 1531, 1532, 1533, 1534, 1535, 1539, 1540, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1557, 1558, 1559, 1560, 1561, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1600, 1601, 1602, 1604, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1616, 1619, 1620, 1621, 1626, 1629, 1631, 1632, 1633, 1637, 1638, 1648, 1653, 1658, 1659, 1660, 1664, 1669, 1672, 1677, 1678, 1680, 1685, 1686, 1687, 1688, 1691, 1692, 1693, 1695, 1696, 1697, 1716, 1723, 1731, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1803, 1817, 1820, 1821, 1824, 1827, 1840, 1843, 1846, 1858, 1878, 1880, 1881, 1893, 1895, 1906, 1907, 1917, 1924, 1925, 1928, 1929, 1941, 1942, 1980, 2036, 2041, 2054, 2067, 2070, 2075, 2083, 2087, 2089, 2101, 2106], "rst": 7, "trigger": [7, 9, 28, 47, 48, 63, 86, 87, 89, 223, 932, 1083, 1875, 2042, 2043, 2046, 2048, 2049, 2056, 2062, 2077, 2098, 2101, 2102, 2104, 2109, 2110, 2113, 2117], "rebuild": [7, 24], "entir": [7, 14, 23, 28, 30, 33, 34, 37, 48, 64, 515, 700, 923, 924, 976, 1167, 1227, 1465, 1466, 1467, 1471, 1489, 1490, 1491, 1499, 1619, 1620, 1621, 1626, 1717, 1733, 1743, 1745, 1769, 2017, 2036, 2043, 2046, 2049, 2050, 2051, 2056, 2057, 2067, 2070, 2072, 2077, 2079, 2082, 2099, 2100, 2102, 2103, 2104, 2111, 2113, 2115], "circleci": 7, "shard": [7, 23, 30, 32, 33, 34, 55, 1717, 2052, 2104], "worker": [7, 14, 23, 24, 28, 29, 32, 33, 37, 39, 40, 41, 47, 50, 51, 55, 1717, 2017, 2061, 2071, 2077, 2078, 2079], "40": [7, 1227, 1346, 1444, 1489, 1731, 1732, 1733, 1756, 1757, 1766, 1769, 1940, 2102], "minut": [7, 10, 28, 2087], "netlifi": 7, "noplot": 7, "render": [7, 28, 1192, 2087, 2115], "notebook": 7, "rebuilt": [7, 24, 32], "deploi": [7, 12, 37, 47, 2013, 2056, 2062, 2070, 2097, 2101], "action": [7, 28, 35, 37, 41, 64, 1009, 1043, 2046, 2065, 2070, 2071, 2079, 2115], "document": [8, 9, 10, 17, 23, 28, 53, 55, 56, 64, 683, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 877, 878, 879, 880, 961, 1011, 1012, 1014, 1065, 1109, 1217, 1221, 1223, 1254, 1261, 1273, 1389, 1390, 1420, 1431, 1469, 1470, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1527, 1712, 1713, 1734, 1743, 1752, 1772, 1826, 1853, 1870, 1965, 2012, 2014, 2016, 2017, 2026, 2031, 2033, 2034, 2035, 2037, 2042, 2046, 2049, 2051, 2057, 2061, 2062, 2067, 2069, 2070, 2072, 2073, 2074, 2077, 2082, 2086, 2091, 2094, 2099, 2105, 2109, 2112, 2116], "develop": [8, 9, 10, 14, 24, 28, 33, 64, 2016, 2017, 2024, 2031, 2043, 2049, 2053, 2056, 2057, 2061, 2067, 2070, 2072, 2073, 2077, 2082, 2101, 2106, 2108, 2113, 2116], "meant": [8, 29, 30, 47, 50, 55, 1758, 2042, 2077], "rule": [8, 9, 14, 28, 35, 64, 66, 74, 75, 87, 88, 89, 152, 864, 897, 960, 961, 1092, 1323, 1334, 1441, 1442, 1443, 1489, 1490, 1491, 1567, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1863, 1950, 2014, 2016, 2034, 2036, 2043, 2044, 2049, 2052, 2054, 2085, 2089, 2103, 2104], "concern": [8, 23, 55, 2033, 2046, 2067, 2111], "disagr": 8, "contribut": [8, 9, 30, 936, 1235, 1236, 1462, 1469, 1470, 1534, 1616, 1624, 1625, 1669, 1717, 2013, 2036, 2049, 2050, 2067], "maintainership": [8, 9], "escal": [8, 9], "hacker": 8, "poster": 8, "amaz": 8, "ml": [8, 2071], "obsess": 8, "soumith": [8, 10], "goe": [8, 64, 1163, 1435, 1784, 1785, 1797, 2021, 2051, 2065, 2101, 2104, 2105], "depth": [8, 9, 20, 25, 33, 52, 83, 782, 788, 1070, 1071, 1287, 1438, 1456, 1459, 1496, 1522, 1580, 1644, 1704, 2024, 2048, 2057, 2071, 2102, 2115], "primari": [8, 9, 28, 64, 83, 1283, 2018, 2024, 2025, 2082, 2103], "goal": [8, 44, 59, 64, 1375, 2024, 2043, 2048, 2054, 2079, 2106], "secondari": 8, "abil": [8, 12, 52, 1859, 2024, 2056, 2065, 2070, 2101], "flexibl": [8, 24, 33, 52, 55, 59, 1193, 1330, 2024, 2046, 2049, 2057, 2072, 2101], "abstract": [8, 16, 23, 24, 28, 29, 30, 35, 37, 45, 47, 50, 925, 926, 927, 928, 929, 932, 1194, 1739, 2017, 2021, 2025, 2048, 2072, 2077, 2098], "critic": [8, 28, 47, 683, 1732, 1766, 2030, 2045, 2046, 2104, 2105], "futur": [8, 9, 12, 24, 28, 30, 37, 47, 48, 52, 60, 64, 292, 323, 488, 515, 519, 559, 683, 690, 796, 818, 819, 820, 829, 865, 904, 907, 909, 910, 966, 967, 976, 990, 991, 992, 1008, 1009, 1011, 1012, 1014, 1043, 1044, 1051, 1052, 1054, 1218, 1270, 1273, 1276, 1283, 1284, 1287, 1292, 1304, 1315, 1317, 1318, 1319, 1322, 1335, 1363, 1364, 1375, 1386, 1466, 1527, 1635, 1658, 1659, 1660, 1708, 1717, 1722, 1766, 1767, 1772, 1787, 1827, 1843, 1924, 1928, 1952, 1982, 1983, 2013, 2014, 2015, 2016, 2017, 2018, 2023, 2024, 2027, 2030, 2035, 2036, 2045, 2046, 2049, 2062, 2065, 2067, 2069, 2070, 2071, 2072, 2074, 2077, 2082, 2084, 2088, 2089, 2091, 2093, 2100, 2104, 2108, 2115], "concret": [8, 11, 30, 35, 51, 53, 59, 64, 488, 788, 803, 827, 845, 976, 1186, 1188, 1205, 1206, 1580, 1672, 1704, 2014, 2017, 2021, 2046, 2049, 2059, 2100, 2101, 2103], "manner": [8, 28, 33, 37, 90, 515, 912, 914, 2035, 2037, 2044, 2069], "jump": [8, 562, 2085], "regim": 8, "ei": 8, "tradeoff": [8, 24, 52, 488, 2072, 2078, 2104, 2109], "temptat": 8, "impos": [8, 50, 61, 1867, 2033, 2042, 2085], "strict": [8, 30, 53, 912, 913, 914, 915, 916, 917, 1166, 1173, 1192, 1273, 1289, 1290, 1527, 1767, 2070, 2087, 2089], "upfront": [8, 2100], "simplifi": [8, 24, 63, 1181, 1188, 1212, 1281, 1517, 1732, 1798, 2024, 2043, 2049, 2054, 2057, 2069, 2072, 2078, 2100], "worth": [8, 9, 23, 24, 33, 51, 53, 1167, 2012, 2084, 2086, 2107], "friction": 8, "compel": 8, "narrow": [8, 1198, 1343, 1423, 1798, 1885, 2015, 2017, 2034, 2068, 2074, 2086, 2113], "subproblem": 8, "fragment": [8, 1033, 1065, 1990, 2021, 2046, 2113], "ecosystem": [8, 2056, 2058, 2101], "incomprehens": 8, "seamlessli": [8, 2036], "softwar": [8, 19, 1309, 1310, 1337, 1965, 2046, 2055, 2094, 2101], "experi": [8, 9, 11, 24, 52, 61, 1178, 1557, 1688, 1717, 1977, 2024, 2049, 2087, 2102], "rich": [8, 33, 2017], "denomin": [8, 690, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1542, 1567, 1716, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1831], "subset": [8, 23, 28, 32, 48, 53, 1285, 1798, 2014, 2016, 2017, 2049, 2064, 2067, 2075, 2108], "borrow": 8, "zen": 8, "implicit": [8, 52, 53, 88, 771, 772, 775, 776, 777, 1217, 1227, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1520, 1521, 1522, 1579, 1600, 1601, 1602, 1608, 1609, 1610, 1658, 1659, 1660, 1815, 2015, 2017, 2018, 2052, 2067, 2070, 2086, 2108], "concis": [8, 40, 2077], "interchang": [8, 35, 1795, 1949, 2016, 2055, 2065, 2094, 2105], "everydai": 8, "english": 8, "movement": [8, 2086, 2104], "worri": [8, 28, 2077], "placement": [8, 34, 37, 863, 1707, 2072, 2077, 2104], "favor": [8, 28, 788, 789, 790, 967, 1078, 1082, 1273, 1328, 1331, 1363, 1364, 1409, 1485, 1527, 1581, 1582, 1704, 1705, 1706, 1708, 1721, 1827, 1928, 1952, 2007], "practition": 8, "debugg": [8, 28, 1278, 2054, 2104, 2113], "plug": 8, "ir": [8, 52, 64, 81, 83, 763, 1276, 1277, 1478, 1479, 1779, 2014, 2017, 2065, 2067, 2094, 2100, 2101, 2103, 2104, 2113], "classic": [8, 2043], "sort": [8, 32, 64, 592, 611, 880, 881, 904, 906, 909, 1109, 1198, 1234, 1396, 1431, 1759, 1760, 1771, 1828, 1863, 1947, 1961, 2015, 2017, 2049, 2051, 2068, 2082, 2100, 2104, 2108], "distribut": [8, 11, 23, 24, 29, 37, 38, 39, 40, 41, 44, 45, 46, 47, 49, 50, 51, 55, 63, 175, 260, 288, 379, 456, 483, 610, 683, 698, 699, 822, 939, 946, 1083, 1413, 1431, 1435, 1440, 1462, 1463, 1464, 1465, 1466, 1467, 1471, 1476, 1480, 1492, 1541, 1546, 1567, 1616, 1618, 1619, 1620, 1621, 1626, 1630, 1631, 1635, 1645, 1677, 1717, 1773, 1820, 1834, 1836, 1837, 1838, 1839, 1840, 1841, 2013, 2017, 2035, 2036, 2041, 2042, 2046, 2071, 2073, 2079, 2083, 2087, 2091], "tldr": 8, "resourc": [8, 14, 23, 28, 37, 43, 47, 52, 64, 2017, 2033, 2059, 2082, 2104, 2110, 2116, 2117], "characterist": [8, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1929, 2050, 2057], "uniformli": [8, 35, 1838, 1839, 2089], "leak": [8, 897, 904, 907, 909, 2017, 2033, 2043], "smart": [8, 2049, 2070, 2077], "anywai": [8, 2043, 2052], "obviou": [8, 1181, 2051, 2079, 2103], "extens": [8, 14, 18, 28, 30, 35, 52, 65, 1345, 1346, 1859, 1878, 2013, 2024, 2036, 2042, 2049, 2062, 2065, 2070, 2082, 2103, 2104, 2113], "unavoid": 8, "latenc": [8, 30, 44, 2025, 2045, 2046, 2104, 2110], "caveat": [8, 1707, 1765, 2024, 2033, 2046, 2057, 2062, 2099, 2109, 2113], "valuabl": 8, "certainli": [8, 2024], "heterogen": [8, 2016], "cluster": [8, 28, 30, 33, 46, 47, 48, 55, 1431, 2087, 2117], "focus": [8, 2016, 2017, 2049, 2113], "beaten": 8, "space": [8, 9, 23, 35, 775, 776, 777, 1092, 1127, 1129, 1131, 1132, 1138, 1145, 1227, 1236, 1289, 1290, 1344, 1360, 1431, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1492, 1503, 1504, 1505, 1506, 1507, 1508, 1521, 1522, 1579, 1608, 1609, 1610, 1611, 1612, 1613, 1645, 1950, 2015, 2025, 2037, 2043, 2054, 2057, 2062, 2084, 2115], "innov": 8, "growth": 8, "ultim": [8, 9, 14, 40, 50, 52, 2094], "evidenc": 8, "began": 8, "bind": [8, 14, 28, 53, 64, 1045, 1186, 1188, 1197, 1210, 1211, 2017, 2018, 2021, 2049, 2070, 2101], "monolith": 8, "deepli": 8, "integr": [8, 23, 41, 53, 59, 156, 946, 948, 949, 950, 951, 952, 953, 998, 1092, 1235, 1273, 1297, 1341, 1342, 1422, 1527, 1834, 1846, 1950, 2036, 2049, 2056, 2057, 2066, 2069, 2072, 2083, 2085, 2089, 2099], "numpi": [8, 23, 60, 450, 495, 696, 697, 702, 881, 883, 884, 904, 905, 909, 960, 1104, 1106, 1107, 1109, 1127, 1148, 1149, 1150, 1151, 1155, 1156, 1161, 1162, 1178, 1237, 1304, 1305, 1315, 1326, 1328, 1329, 1330, 1331, 1332, 1337, 1338, 1341, 1343, 1375, 1380, 1821, 1846, 1849, 1856, 1868, 1875, 1910, 1911, 1912, 1913, 1914, 1915, 1928, 1930, 1931, 1940, 1943, 1944, 1946, 1974, 1977, 1978, 2021, 2024, 2044, 2049, 2050, 2060, 2061, 2062, 2063, 2070, 2085, 2086, 2087, 2088, 2089, 2101, 2118], "scipi": [8, 964, 1320, 1321, 1674, 1821, 1883, 2070, 2080, 2083, 2087], "scikit": [8, 1644], "favorit": 8, "cython": 8, "numba": 8, "reinvent": 8, "wheel": [8, 2063], "year": [8, 2082], "rewrot": 8, "frontend": [8, 15, 33, 53, 59, 64, 978, 2104], "familiar": [8, 15, 53, 64, 1009, 1043, 1064, 2014, 2043, 2050, 2070, 2072, 2078, 2079, 2101, 2102, 2104, 2112], "perhap": [8, 1188, 2050, 2101], "importantli": 8, "huge": [8, 1929, 2030, 2101], "scientif": [8, 1875], "pareto": [8, 2013], "close": [8, 15, 28, 47, 64, 66, 69, 74, 1047, 1184, 1262, 1309, 1310, 1336, 1337, 1363, 1440, 1559, 1577, 1606, 1794, 1928, 1952, 2030, 2043, 2049, 2060, 2067, 2070, 2072, 2077, 2087, 2089], "curv": [8, 2087], "torchdynamo": [8, 52, 66, 77, 683, 976, 984, 2013, 2023, 2067, 2094, 2099, 2100, 2101, 2102, 2104, 2106], "frame": [8, 52, 64, 976, 1270, 1924, 1967, 2051, 2064, 2065, 2087, 2094, 2100, 2102, 2104, 2105, 2113, 2115], "torch_funct": [8, 2049], "torch_dispatch": 8, "torch": [8, 9, 11, 15, 18, 19, 21, 24, 25, 29, 32, 37, 38, 39, 40, 41, 44, 45, 47, 49, 50, 51, 55, 58, 66, 67, 68, 69, 70, 71, 72, 73, 83, 84, 86, 88, 90, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 898, 899, 904, 909, 910, 919, 920, 921, 931, 932, 939, 940, 941, 942, 943, 999, 1000, 1009, 1010, 1011, 1012, 1014, 1015, 1030, 1032, 1043, 1113, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 1271, 1272, 1273, 1288, 1386, 1398, 1399, 1400, 1404, 1428, 1429, 1430, 1431, 1432, 1433, 1434, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470, 1471, 1472, 1473, 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, 1586, 1588, 1707, 1716, 1717, 1718, 1719, 1720, 1734, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1758, 1770, 1778, 1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1834, 1903, 1982, 1983, 1984, 1987, 1989, 2013, 2014, 2015, 2016, 2025, 2026, 2034, 2035, 2042, 2043, 2044, 2045, 2048, 2051, 2052, 2054, 2056, 2057, 2058, 2059, 2061, 2063, 2065, 2066, 2072, 2073, 2074, 2077, 2078, 2079, 2086, 2098, 2099, 2100, 2101, 2102, 2103, 2106, 2109, 2112, 2113, 2115], "fx": [8, 12, 52, 53, 84, 683, 750, 818, 819, 820, 821, 844, 864, 978, 1167, 1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 2013, 2021, 2064, 2065, 2073, 2074, 2099, 2100, 2101, 2102, 2103, 2104, 2112, 2113], "tracer": [8, 33, 52, 1289, 2067, 2087, 2093, 2101], "functorch": [8, 53, 56, 61, 66, 69, 74, 75, 78, 2099], "anchor": [8, 64, 1576, 1577, 1701, 1702, 2015, 2112], "hackabl": 8, "todai": [8, 28, 53, 56, 61, 976, 2029, 2072, 2100], "evolv": [8, 2048, 2065, 2066], "ai": [8, 2067, 2071, 2083], "adopt": [9, 28, 723, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 1724, 1725, 2065], "hierarch": [9, 2087], "pull": [9, 10, 15, 64, 152, 897, 1202, 2067, 2070, 2103, 2116], "request": [9, 10, 11, 28, 30, 50, 64, 692, 865, 883, 884, 932, 1065, 1346, 1368, 1378, 1650, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2012, 2029, 2036, 2042, 2043, 2045, 2046, 2067, 2070, 2077, 2078, 2079, 2082, 2083, 2100, 2115, 2116], "overseen": 9, "catch": [9, 40, 2014, 2043, 2050, 2054, 2101, 2105], "maker": 9, "strong": 9, "toward": [9, 66, 1104, 1235, 1236, 1346, 1427, 1717, 1806, 1847, 1855, 1856, 2052], "philosophi": [9, 59, 2013], "beyond": [9, 24, 1393, 1462, 1784, 1785, 1794, 1854, 2051, 2057, 2065, 2100], "encourag": [9, 37, 2036, 2072, 2082, 2089], "propos": [9, 1781, 1801, 1802, 2024, 2054, 2069, 2078], "review": [9, 10, 24, 2070, 2106], "willing": 9, "invest": 9, "anyon": 9, "ownership": [9, 64], "codebas": [9, 2049], "strictli": [9, 23, 152, 193, 210, 488, 897, 960, 1236, 1273, 1277, 1527, 2043, 2083], "compani": 9, "bui": 9, "addition": [9, 23, 24, 28, 30, 35, 37, 52, 55, 152, 515, 619, 897, 932, 1167, 1171, 1172, 1277, 1337, 1431, 1489, 1490, 1491, 1905, 2051, 2091, 2115], "membership": [9, 37, 46, 47, 2018], "That": [9, 12, 17, 19, 37, 45, 48, 57, 64, 990, 1270, 1344, 1360, 1965, 2021, 2027, 2049, 2050, 2051, 2052, 2062, 2070, 2077, 2101, 2104], "seat": 9, "reserv": [9, 44, 1065, 2018, 2046, 2057, 2115], "emploi": [9, 30, 1797, 2057, 2070, 2095], "directori": [9, 14, 28, 30, 33, 37, 45, 83, 1779, 2012, 2028, 2053, 2056, 2065, 2070, 2071, 2087, 2095, 2104, 2113], "procedur": [9, 35, 863, 1289, 1290, 1346, 1903, 2077, 2113], "disput": 9, "made": [9, 20, 28, 30, 32, 48, 52, 64, 83, 904, 907, 909, 924, 1273, 1527, 1573, 1575, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1844, 2016, 2024, 2043, 2063, 2070, 2087, 2089, 2098, 2100, 2101, 2113, 2114], "public": [9, 10, 65, 1167, 2049, 2114], "relev": [9, 29, 33, 37, 47, 63, 683, 1337, 1499, 2018, 2042, 2043, 2065, 2070, 2072, 2095, 2104, 2105, 2106, 2116], "resolut": [9, 1539, 1540, 1598, 1633, 1675, 1676, 1950, 2018, 2070, 2118], "conclus": 9, "publicli": [9, 2114], "vision": [9, 1283, 1462, 1616, 2012, 2013, 2053, 2106], "roadmap": [9, 10], "parti": [9, 47, 2012, 2013, 2014, 2021, 2046, 2050, 2053, 2057, 2070, 2101], "triag": [9, 10], "meet": [9, 10, 12, 30, 46, 990, 1303, 2046, 2104], "Their": [9, 1101, 2049, 2103], "articul": 9, "cohes": 9, "negoti": [9, 2077], "contenti": 9, "broad": [9, 2057, 2116], "stakehold": 9, "power": [9, 12, 46, 1072, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1152, 1297, 1327, 1341, 1494, 1495, 1496, 1654, 1655, 1656, 1732, 1766, 1781, 1810, 1824, 1848, 1971, 2018, 2046, 2082], "veto": 9, "admin": 9, "amongst": 9, "commonli": [9, 35, 55, 1375, 2017, 2019, 2042, 2043, 2052, 2069, 2072, 2085, 2094, 2097, 2100], "merit": 9, "demonstr": [9, 33, 48, 52, 64, 66, 74, 75, 1446, 2014, 2057, 2062, 2065, 2077, 2095, 2102, 2104, 2106, 2111], "expertis": 9, "align": [9, 24, 52, 83, 763, 782, 788, 823, 829, 976, 1109, 1227, 1329, 1438, 1446, 1462, 1478, 1492, 1497, 1521, 1522, 1543, 1580, 1588, 1589, 1590, 1616, 1617, 1644, 1645, 1685, 1704, 1731, 1772, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1801, 1950, 2013, 2034, 2043, 2054, 2062, 2067, 2075, 2083], "continu": [9, 23, 28, 30, 35, 51, 55, 64, 404, 610, 822, 913, 1227, 1309, 1310, 1337, 1445, 1446, 1717, 1811, 2018, 2024, 2043, 2052, 2062, 2065, 2071, 2091, 2099, 2101, 2102, 2104], "light": [9, 2087], "mainten": [9, 47, 48], "emeritu": [9, 10], "inact": [9, 1065, 2046, 2115], "contact": [9, 28], "item": [9, 23, 30, 66, 76, 77, 585, 586, 587, 591, 795, 796, 797, 817, 818, 819, 820, 943, 1462, 1528, 1537, 1737, 1875, 2012, 2014, 2015, 2016, 2017, 2025, 2034, 2046, 2067, 2068, 2070, 2077, 2087, 2088, 2098, 2100, 2101, 2103, 2108], "nomine": 9, "breadth": [9, 33], "testimoni": 9, "posit": [9, 23, 30, 33, 35, 52, 64, 90, 404, 473, 547, 738, 861, 865, 867, 945, 947, 954, 967, 968, 969, 1125, 1126, 1127, 1128, 1129, 1137, 1139, 1143, 1145, 1163, 1173, 1174, 1177, 1186, 1188, 1231, 1232, 1265, 1268, 1273, 1276, 1303, 1304, 1315, 1317, 1328, 1331, 1332, 1346, 1366, 1381, 1417, 1422, 1423, 1440, 1454, 1455, 1456, 1461, 1462, 1463, 1470, 1480, 1487, 1527, 1533, 1564, 1571, 1576, 1577, 1598, 1606, 1625, 1630, 1633, 1701, 1702, 1710, 1711, 1854, 1856, 1883, 1906, 1908, 1953, 1954, 1955, 1956, 1964, 2012, 2015, 2029, 2034, 2035, 2049, 2050, 2054, 2065, 2067, 2068, 2076, 2082, 2083, 2089, 2104, 2114, 2118], "neg": [9, 11, 14, 20, 23, 28, 35, 50, 64, 90, 443, 445, 460, 664, 665, 701, 758, 783, 947, 952, 993, 996, 1014, 1030, 1036, 1037, 1065, 1078, 1125, 1126, 1127, 1128, 1129, 1130, 1144, 1145, 1146, 1152, 1163, 1192, 1198, 1263, 1265, 1267, 1327, 1354, 1366, 1389, 1390, 1404, 1413, 1417, 1422, 1423, 1431, 1440, 1448, 1449, 1450, 1471, 1480, 1513, 1520, 1521, 1522, 1530, 1534, 1536, 1541, 1559, 1576, 1577, 1626, 1630, 1633, 1635, 1644, 1648, 1658, 1659, 1660, 1669, 1677, 1701, 1702, 1704, 1820, 1821, 1852, 1856, 1890, 1892, 1895, 1907, 1945, 1953, 1954, 1955, 1956, 1963, 1964, 1983, 1987, 1991, 1992, 2004, 2014, 2015, 2034, 2036, 2041, 2043, 2049, 2050, 2067, 2068, 2076, 2082, 2083, 2100, 2104, 2108], "interact": [9, 15, 23, 28, 64, 65, 866, 919, 1012, 1045, 2018, 2021, 2049, 2070, 2072, 2087, 2098, 2101, 2104, 2107, 2115], "final": [9, 19, 28, 30, 32, 35, 37, 47, 53, 55, 59, 60, 689, 692, 693, 763, 944, 962, 975, 1109, 1148, 1167, 1201, 1227, 1345, 1363, 1368, 1447, 1478, 1480, 1497, 1543, 1556, 1707, 1906, 1950, 2014, 2016, 2017, 2018, 2034, 2049, 2052, 2054, 2057, 2060, 2062, 2064, 2065, 2067, 2069, 2070, 2078, 2079, 2098, 2100, 2102, 2113], "declin": 9, "conflict": [9, 24, 30, 48, 52, 2070], "lack": [9, 11, 33, 943, 1309, 1310, 1337], "unfit": 9, "conduct": [9, 1717, 1817, 1929, 2077, 2095], "filial": 9, "romant": 9, "strength": 9, "candid": [9, 936, 2070], "letter": [9, 1109], "befit": 9, "candidaci": 9, "behind": [9, 28, 2013, 2062, 2078, 2103], "75": [9, 1235, 1515, 1633, 1651, 1781, 1828, 2015, 2083], "unforeseen": 9, "circumst": [9, 47, 976, 1198, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 2046], "perman": [9, 64, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 2043, 2079], "unavail": [9, 2016], "rank": [9, 23, 24, 28, 29, 30, 32, 33, 35, 37, 45, 47, 48, 51, 53, 55, 66, 75, 1319, 1320, 1321, 1328, 1363, 1519, 1567, 1717, 1737, 1817, 1929, 2015, 2048, 2052, 2059, 2067, 2077, 2078, 2100, 2113, 2117], "elect": 9, "invit": [9, 2012], "convinc": 9, "approach": [9, 24, 28, 64, 89, 964, 1178, 1440, 1871, 1903, 1977, 2014, 2033, 2036, 2046, 2049, 2054, 2064, 2065, 2072, 2077, 2104], "interview": 9, "talk": [9, 50, 2021, 2056], "gather": [9, 28, 30, 33, 47, 55, 515, 1940, 1965, 2015, 2049, 2051, 2052, 2056, 2068, 2070, 2108], "read": [9, 12, 18, 19, 23, 28, 30, 36, 37, 40, 47, 48, 53, 55, 59, 64, 450, 930, 990, 1067, 1092, 1160, 1162, 1163, 1190, 1281, 1345, 1811, 2013, 2021, 2034, 2035, 2043, 2044, 2046, 2048, 2050, 2056, 2060, 2064, 2070, 2072, 2077, 2098, 2101, 2102, 2104, 2106], "attend": [9, 738, 1533, 1571], "confer": [9, 1480], "pipelin": [9, 30, 64, 2013, 2077, 2104], "world": [9, 28, 32, 37, 45, 47, 48, 55, 1567, 1717, 2043, 2070, 2072, 2100, 2101, 2102], "cover": [9, 53, 56, 64, 66, 1083, 1109, 1520, 1658, 1659, 1660, 2017, 2019, 2035, 2043, 2049, 2052, 2054, 2056, 2057, 2077, 2102, 2107, 2111, 2116], "push": [9, 39, 1069, 1071, 1178, 1339, 1977, 2071, 2101], "codeown": 9, "notifi": [9, 29, 48, 2079], "expert": 9, "strongli": [9, 28, 37, 48, 1465, 1466, 1467, 1471, 1924, 2012], "failur": [9, 28, 35, 37, 39, 40, 44, 45, 46, 47, 51, 52, 683, 923, 924, 976, 978, 1167, 1188, 1289, 1290, 1393, 2017, 2023, 2033, 2052, 2065, 2077, 2079, 2089, 2101, 2104, 2113], "revert": [9, 35, 55, 1564, 1693, 2079], "substanti": [9, 24, 2046, 2098], "syntact": [9, 40, 64], "incompat": [9, 14, 60, 957, 1270, 1724, 1725, 2044, 2070], "establish": [9, 19, 28, 47, 1811, 2043], "seri": [9, 33, 64, 1446, 1489, 2053, 2061, 2067, 2098, 2109, 2114], "lf": 9, "llc": 9, "guidelin": [9, 1748, 2059, 2064, 2070, 2072, 2073], "trademark": 9, "www": [9, 1446, 1577, 2087], "lfproject": 9, "acknowledg": [9, 28, 2013, 2079, 2082], "copyright": [9, 2113], "holder": 9, "independ": [9, 23, 28, 47, 52, 55, 63, 155, 156, 782, 788, 912, 913, 914, 915, 916, 917, 1014, 1227, 1236, 1332, 1464, 1465, 1466, 1467, 1471, 1619, 1620, 1621, 1626, 1643, 1644, 1704, 1733, 1769, 1827, 1983, 2013, 2014, 2042, 2043, 2046, 2062, 2070, 2111], "authorship": 9, "claus": [9, 2051], "bsd": 9, "licens": 9, "opensourc": 9, "outbound": 9, "inbound": 9, "q": [9, 24, 35, 433, 481, 761, 802, 1217, 1294, 1310, 1313, 1332, 1374, 1420, 1492, 1533, 1587, 1731, 1815, 1817, 1827, 1828, 1929, 2014, 2015, 2054, 2061, 2067, 2082, 2083, 2103], "partli": [9, 2017], "domain": [9, 35, 40, 687, 829, 889, 1123, 1124, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1137, 1139, 1140, 1141, 1142, 1227, 2036, 2067, 2072], "absolut": [9, 11, 14, 64, 94, 684, 697, 923, 924, 1157, 1262, 1302, 1307, 1328, 1331, 1333, 1354, 1487, 1493, 1559, 1642, 1646, 1689, 1742, 1743, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1772, 1780, 1821, 1847, 1878, 2015, 2025, 2057, 2067, 2068, 2083, 2088, 2089, 2095], "health": 9, "success": [9, 30, 35, 37, 44, 64, 288, 1304, 1315, 1317, 1363, 1744, 1910, 1911, 1912, 1914, 1915, 2046, 2057, 2067, 2082, 2113], "am": 9, "grant": 9, "purchas": 9, "board": 9, "driven": [9, 2067], "clearli": [9, 1188, 2024, 2070], "sponsorship": 9, "foundat": [9, 2065], "ptf": 9, "minor": [9, 1036, 1304, 1320, 2082, 2101], "committ": 9, "prior": [9, 19, 24, 28, 30, 37, 81, 83, 904, 908, 909, 910, 981, 1346, 1573, 1575, 1685, 1748, 2044, 2046, 2049, 2069, 2072, 2098, 2104], "walkthrough": [9, 2042], "facebook": 9, "infrastructur": [9, 39, 2070, 2100], "employe": 9, "expand": [9, 35, 257, 495, 882, 904, 908, 909, 910, 923, 924, 959, 1326, 1327, 1375, 1533, 1578, 1678, 1960, 2012, 2015, 2023, 2034, 2035, 2044, 2046, 2049, 2050, 2065, 2067, 2068, 2070, 2086, 2108], "deliv": [9, 2071], "offici": [9, 28, 89, 1431, 2053, 2072], "showcas": [9, 12, 34, 1523, 2046, 2059], "whenev": [9, 52, 884, 1731, 1732, 2030, 2033, 2049, 2075, 2078, 2079, 2103, 2113, 2114], "fix": [10, 18, 23, 28, 35, 37, 47, 52, 57, 60, 64, 84, 85, 264, 798, 804, 932, 1167, 1287, 1319, 1469, 1470, 1546, 1624, 1625, 1644, 1717, 2014, 2015, 2030, 2041, 2042, 2046, 2051, 2052, 2059, 2063, 2065, 2067, 2068, 2069, 2098, 2101, 2104, 2111, 2113], "plu": [10, 14, 30, 912, 1163, 1320, 2052, 2082], "quarterli": 10, "chintala": 10, "edward": 10, "yang": [10, 1346], "ezyang": [10, 2021, 2086], "greg": 10, "chanan": 10, "gchanan": 10, "dmytro": 10, "dzhulgakov": 10, "nikita": 10, "shulga": 10, "malfet": 10, "joel": [10, 1817, 1929], "schlosser": 10, "jbschlosser": 10, "alban": 10, "desmaison": 10, "alband": 10, "sam": 10, "gross": 10, "colesburi": 10, "adam": [10, 29, 30, 32, 35, 55, 932, 1785, 1786, 1788, 1794, 1798, 2069], "paszk": 10, "apaszk": 10, "ilqar": 10, "ramazanli": 10, "iramazanli": 10, "vincent": 10, "quennevil": 10, "belair": 10, "vincentqb": 10, "jeffrei": 10, "wan": 10, "soulitz": 10, "elia": 10, "ellison": 10, "eellison": 10, "michael": [10, 2113], "suo": 10, "yanan": 10, "cao": 10, "gmagogsfm": 10, "jame": 10, "reed": 10, "jamesr66a": 10, "jason": [10, 2102], "ansel": [10, 2102], "jansel": 10, "jiong": 10, "gong": 10, "jgong5": 10, "zach": 10, "devito": 10, "zdevito": 10, "fritz": 10, "obermey": 10, "fritzo": 10, "neeraj": 10, "pradhan": 10, "neerajprad": 10, "alican": 10, "bozkurt": 10, "alicanb": 10, "vishwak": 10, "srinivasan": 10, "vishwakftw": 10, "shen": 10, "li": [10, 12, 1420, 1828, 2046, 2101, 2104], "mrshenli": 10, "pritam": 10, "damania": 10, "pritamdamania87": 10, "yanli": 10, "zhao": 10, "zhaojuanmao": 10, "rohan": 10, "varma": 10, "wanchao": 10, "liang": 10, "wanchaol": 10, "junji": 10, "wang": [10, 35], "fduwjj": 10, "howard": 10, "huang": 10, "tristan": 10, "rice": 10, "d4l3k": 10, "alisson": 10, "azzolini": 10, "aazzolini": 10, "ke": 10, "wen": 10, "kwen2501": 10, "kiuk": 10, "chung": 10, "kiukchung": 10, "pieter": 10, "noordhui": 10, "pietern": 10, "mingzh": 10, "mingzhe09088": 10, "omkar": 10, "salpekar": 10, "osalpekar": 10, "simon": 10, "ssnl": 10, "vitali": 10, "fedyunin": 10, "vitalyfedyunin": 10, "mike": 10, "ruberri": 10, "mruberri": 10, "mario": 10, "lezcano": 10, "ivan": 10, "yashchuk": 10, "ivanyashchuk": 10, "pearu": 10, "peterson": 10, "vedeneev": 10, "nikitav": 10, "christian": 10, "puhrsch": 10, "cpuhrsch": 10, "andrew": [10, 1346], "amjam": 10, "driss": 10, "guessou": 10, "drisspg": 10, "mikayla": 10, "gawarecki": 10, "mikaylagawarecki": 10, "natalia": 10, "gimelshein": 10, "ngimel": 10, "georg": 10, "qi": 10, "peter": 10, "bell": 10, "peterbell10": 10, "mingfei": 10, "ma": 10, "mingfeima": 10, "xiaob": 10, "zhang": 10, "xiaobingsup": 10, "xiaoqiang": 10, "zheng": 10, "xq": 10, "ilia": 10, "cherniavskii": 10, "cher": 10, "bai": 10, "bddppq": 10, "yinghai": 10, "jianhui": 10, "piotr": 10, "bialecki": 10, "ptrblck": 10, "sarofeen": 10, "csarofeen": 10, "tulloch": 10, "ajtulloch": 10, "alex": 10, "jann": 10, "jjsjann123": 10, "peng": 10, "sun": 10, "sunway513": 10, "jithun": 10, "nair": 10, "jithunnair": 10, "jeff": 10, "daili": 10, "jeffdaili": 10, "eli": 10, "uriega": 10, "seemether": 10, "mikei": 10, "dagits": 10, "zain": 10, "rizvi": 10, "zainrizvi": 10, "nirav": 10, "mehta": 10, "mehtanirav": 10, "andrei": 10, "talman": 10, "atalman": 10, "zhuoji": 10, "zhou": 10, "zhouzhuoji": 10, "karl": 10, "ostmo": 10, "kostmo": 10, "adnan": 10, "aziz": 10, "adnanaziz": 10, "ck": 10, "luk": 10, "ckluk": 10, "taylor": [10, 1227], "robi": 10, "robieta": 10, "xu": [10, 55], "xuzhao9": 10, "geeta": 10, "chauhan": 10, "chauhang": 10, "victor": 10, "bittorf": 10, "bitfort": 10, "gisl": 10, "dankel": 10, "gdankel": 10, "Will": [10, 28, 33, 55, 64, 978, 1186, 1913, 2016, 2035, 2067, 2098, 2109], "feng": 10, "yf225": 10, "brian": 10, "hirsh": 10, "bdhirsh": [10, 1968], "sebastian": 10, "messmer": 10, "smessmer": 10, "aaron": 10, "bockov": 10, "abock": 10, "bowen": 10, "bao": [10, 2110], "bowenbao": 10, "thiago": 10, "crepaldi": 10, "thiagocrepaldi": 10, "gari": 10, "miguel": 10, "garymm": 10, "lara": 10, "haidar": 10, "hdr": 10, "fang": 10, "houseroad": 10, "negin": 10, "raoof": 10, "neginraoof": 10, "spandan": 10, "tiwari": 10, "spandantiwari": 10, "david": [10, 1431], "reiss": 10, "dreiss": 10, "raziel": 10, "guevara": 10, "linbin": 10, "yu": 10, "linbinyu": 10, "kobzarev": 10, "ivankobzarev": 10, "tao": 10, "xta0": 10, "vasilii": 10, "kuznetsov": 10, "vkuzo": 10, "jerri": 10, "jerryzh168": [10, 796], "supriya": 10, "rao": 10, "supriyar": 10, "zafar": 10, "takhirov": 10, "raghuraman": 10, "krishnamoorthi": 10, "raghuramank100": 10, "guoliang": 10, "hua": 10, "nbcsm": 10, "teng": 10, "gao": 10, "gaoteng": 10, "git": [10, 2053, 2104, 2113], "johnson": 10, "peterjc123": [10, 2063], "kulin": 10, "seth": 10, "kulinseth": 10, "ramin": 10, "azarmehr": 10, "razarmehr": 10, "alfredo": 10, "mendoza": 10, "avmgithub": 10, "sunita": 10, "nadamp": 10, "snadamp": 10, "svetlana": 10, "karslioglu": 10, "svekar": 10, "jack": 10, "jackcaog": 10, "daniel": [10, 35], "sohn": 10, "jysohn23": 10, "cain": 10, "zcain117": 10, "gregori": 10, "ail": 10, "ailzhang": 10, "libenzi": 10, "dlibenzi": 10, "suhan": 10, "asuhan": 10, "manoj": 10, "mycpuorg": 10, "vamshi": 10, "dantu": 10, "vdantu": 10, "dhanasekar": 10, "karuppasami": 10, "dhanainm": 10, "francisco": 10, "massa": 10, "fmassa": 10, "vasili": 10, "vrynioti": 10, "datumbox": 10, "nicola": 10, "hug": [10, 2013], "nicolashug": 10, "yosua": 10, "maranatha": 10, "yosuamichael": 10, "joao": 10, "gome": 10, "jdsgome": 10, "philip": 10, "meier": 10, "pmeier": 10, "fomin": 10, "vfdev": 10, "nayef": 10, "ahm": 10, "nayef211": 10, "parmeet": 10, "singh": 10, "bhatia": 10, "guanheng": 10, "zhangguanheng66": 10, "moto": 10, "hira": 10, "mthrok": 10, "hwang": 10, "hwangjeff": 10, "carolin": 10, "chen": 10, "carolineechen": 10, "xiaohui": 10, "zhaoheng": 10, "ni": 10, "nateanl": 10, "qb": 10, "ivchenko": 10, "divchenko": 10, "colin": 10, "colin2328": 10, "wenlei": 10, "xie": 10, "wenleix": 10, "express": [11, 12, 30, 53, 56, 60, 61, 64, 66, 74, 75, 76, 77, 321, 990, 1181, 1184, 1188, 1195, 1199, 1200, 1201, 1202, 1212, 1271, 1274, 1769, 1924, 2018, 2027, 2036, 2043, 2049, 2072, 2100, 2101, 2104, 2113, 2114], "bj": 11, "j": [11, 28, 35, 315, 317, 323, 515, 517, 519, 695, 902, 912, 914, 987, 1109, 1125, 1126, 1133, 1139, 1143, 1144, 1214, 1270, 1273, 1309, 1310, 1321, 1337, 1346, 1353, 1361, 1363, 1368, 1465, 1466, 1467, 1471, 1527, 1530, 1619, 1620, 1621, 1626, 1821, 1828, 1907, 1924, 1928, 1974, 2016, 2041, 2043, 2049, 2054, 2062, 2087], "imaginari": [11, 313, 923, 987, 994, 1130, 1132, 1140, 1141, 1142, 1244, 1263, 1265, 1266, 1269, 1840, 1924, 1975, 1976, 2018, 2043, 2054, 2089], "satisfi": [11, 21, 24, 35, 53, 64, 619, 697, 763, 797, 913, 923, 924, 960, 1125, 1126, 1128, 1130, 1132, 1140, 1141, 1142, 1157, 1187, 1287, 1313, 1339, 1340, 1431, 1446, 1473, 1478, 1497, 1543, 1748, 1765, 1847, 1863, 1903, 1924, 2017, 2021, 2043, 2046, 2052, 2054, 2069, 2071, 2082, 2085, 2102], "equat": [11, 969, 1109, 1181, 1192, 1217, 1231, 1317, 1318, 1319, 1320, 1321, 1323, 1334, 1336, 1439, 1888, 1952, 2015, 2043, 2054, 2069, 2083], "frequent": [11, 30, 1431, 2013, 2046, 2059, 2060, 2062, 2082, 2094, 2095, 2106], "occur": [11, 23, 28, 40, 41, 47, 55, 60, 84, 86, 198, 317, 899, 932, 976, 1068, 1109, 1188, 1270, 1276, 1287, 1317, 1446, 1567, 1586, 1617, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1798, 2016, 2030, 2035, 2042, 2043, 2046, 2051, 2060, 2065, 2067, 2071, 2077, 2079, 2086, 2089, 2100, 2101, 2104, 2106, 2109, 2111, 2113, 2114, 2115], "mathemat": [11, 64, 912, 913, 914, 915, 916, 917, 1157, 1227, 1439, 1441, 1442, 1443, 1489, 1490, 1491, 1492, 1567, 1652, 1692, 1717, 1772, 1847, 1905, 1906, 2017, 2021, 2043, 2060, 2083, 2091], "topic": [11, 52, 2056, 2057, 2102], "tradition": 11, "torchaudio": [11, 2013, 2053], "mimick": 11, "assembli": 11, "lapack": [11, 1217, 1304, 1315, 1316, 1317, 1319, 1322, 1335, 1827, 1928], "spectral": [11, 1293, 1309, 1337, 1732, 1756, 1766, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2060], "fft": [11, 2013, 2046], "4621": 11, "0303j": 11, "2438": [11, 1319, 1624], "5874j": 11, "7706": 11, "1421j": 11, "2110": 11, "1918j": 11, "complex128": [11, 177, 987, 1152, 1162, 1252, 1273, 1303, 1304, 1309, 1310, 1311, 1312, 1313, 1314, 1454, 1455, 1456, 1527, 1608, 1609, 1610, 1778, 1821, 1868, 2084, 2085, 2088, 2089, 2104], "complex64": [11, 180, 968, 969, 987, 1126, 1128, 1129, 1134, 1135, 1162, 1252, 1305, 1328, 1331, 1454, 1455, 1456, 1608, 1609, 1610, 1778, 1821, 1868, 2084, 2085, 2088, 2089, 2104], "apart": [11, 2017, 2043], "linspac": [11, 947, 971, 1130, 1140, 1375, 2015, 2019, 2068], "logspac": [11, 2015, 2019, 2068], "arang": [11, 23, 28, 36, 515, 609, 694, 695, 696, 700, 702, 890, 891, 892, 958, 970, 974, 1092, 1107, 1125, 1129, 1137, 1143, 1145, 1149, 1150, 1151, 1152, 1159, 1160, 1237, 1294, 1295, 1299, 1302, 1326, 1329, 1330, 1343, 1423, 1447, 1448, 1449, 1549, 1550, 1551, 1552, 1553, 1580, 1581, 1582, 1671, 1772, 1773, 1816, 1824, 1828, 1843, 1850, 1855, 1875, 1908, 1916, 1927, 1944, 1945, 1947, 1948, 1950, 1978, 2015, 2024, 2036, 2062, 2068, 2083, 2087, 2088, 2098, 2101, 2106, 2108], "switch": [11, 19, 23, 30, 58, 63, 64, 81, 1316, 1317, 1318, 1466, 1633, 1672, 1722, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1859, 1965, 2033, 2043, 2046, 2057, 2059, 2069, 2105], "view_as_r": [11, 1924, 2015, 2068, 2086], "6125": 11, "1681": 11, "3773": 11, "3487": 11, "0861": 11, "7981": 11, "1681j": 11, "3487j": 11, "7981j": 11, "mul_": [11, 66, 79, 2015, 2034, 2036, 2082], "2250": [11, 1330, 1772], "7546": 11, "1722": 11, "x1": [11, 52, 964, 1321, 1461, 1519, 1576, 1577, 1615, 1673, 2015, 2108], "3j": [11, 28, 701, 991, 992, 1851, 1852, 1868], "4j": [11, 28, 1878], "0000": [11, 35, 515, 869, 912, 914, 947, 967, 971, 994, 1090, 1096, 1097, 1098, 1123, 1124, 1127, 1129, 1130, 1136, 1137, 1140, 1145, 1157, 1158, 1159, 1227, 1233, 1236, 1240, 1299, 1302, 1303, 1304, 1316, 1317, 1320, 1325, 1326, 1330, 1332, 1344, 1360, 1418, 1469, 1470, 1580, 1581, 1583, 1584, 1624, 1625, 1772, 1821, 1827, 1832, 1833, 1834, 1843, 1847, 1848, 1872, 1878, 1880, 1881, 1883, 1886, 1887, 1888, 1889, 1906, 1952, 1953, 1955, 1980, 2024, 2036, 2046, 2082, 2083, 2088], "6569": [11, 1240], "5708": [11, 1093], "7854": 11, "complex_tensor": 11, "pt": [11, 24, 30, 55, 1161, 1278, 1281, 1284, 1291, 1345, 1859, 2014, 2025, 2057, 2062, 2070, 2106], "conjug": [11, 332, 460, 695, 923, 968, 969, 991, 992, 1253, 1303, 1304, 1310, 1313, 1316, 1323, 1337, 1342, 1346, 1731, 1815, 1851, 1924, 1928, 1974, 2054, 2069, 2088], "wirting": [11, 923, 2054], "deriv": [11, 28, 33, 52, 55, 152, 741, 742, 743, 744, 745, 746, 784, 803, 827, 897, 918, 923, 924, 1173, 1177, 1184, 1227, 1270, 1331, 1363, 1575, 1707, 2013, 2016, 2049, 2050, 2054, 2077, 2082, 2083], "steepest": [11, 2043], "descent": [11, 35, 1781, 1797, 1801, 1802, 2043, 2057], "box": [11, 28, 48, 64, 978, 1178, 1186, 1977, 2035, 2043, 2048, 2065, 2099, 2100, 2105, 2106], "real_param": 11, "p": [11, 24, 30, 35, 55, 64, 156, 236, 288, 427, 428, 455, 490, 493, 494, 689, 692, 944, 946, 956, 964, 1103, 1294, 1305, 1311, 1312, 1320, 1321, 1363, 1365, 1368, 1378, 1416, 1435, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1492, 1494, 1495, 1496, 1532, 1536, 1538, 1576, 1577, 1599, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1654, 1655, 1656, 1666, 1670, 1673, 1674, 1685, 1701, 1722, 1735, 1737, 1743, 1752, 1772, 1797, 1848, 1905, 2015, 2016, 2042, 2043, 2046, 2051, 2054, 2059, 2069, 2071, 2082, 2083, 2107, 2108], "complex_optim": 11, "adamw": [11, 1788, 1794, 2069], "real_optim": 11, "slight": [11, 1867, 2082, 2085], "discrep": [11, 1717, 1834, 2054], "foreach": [11, 1721, 1722, 1723, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 2069], "v": [11, 30, 44, 52, 59, 64, 489, 490, 546, 902, 913, 915, 916, 917, 943, 960, 1166, 1176, 1178, 1188, 1270, 1287, 1309, 1313, 1337, 1341, 1346, 1533, 1576, 1577, 1587, 1624, 1670, 1733, 1764, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1817, 1850, 1909, 1913, 1928, 1929, 1977, 2014, 2015, 2016, 2017, 2030, 2043, 2054, 2063, 2070, 2082, 2087, 2097], "forloop": 11, "numerical_accuraci": 11, "impact": [11, 24, 28, 83, 1065, 1871, 2027, 2030, 2043, 2072, 2086, 2104, 2109, 2113], "pointwis": [11, 35, 976, 1171, 1172, 1351, 1352, 1492, 2034, 2044, 2103, 2106, 2107], "lbfg": [11, 2069], "yet": [11, 29, 33, 37, 52, 53, 55, 60, 63, 64, 73, 82, 84, 85, 488, 1011, 1083, 1199, 1276, 1278, 1291, 1379, 1717, 1739, 1743, 1745, 1784, 1785, 2013, 2016, 2017, 2023, 2029, 2035, 2036, 2046, 2067, 2068, 2072, 2077, 2079, 2082, 2083, 2085, 2098, 2101, 2103, 2104, 2109, 2111, 2113], "fulli": [11, 14, 23, 28, 30, 32, 33, 34, 52, 59, 64, 683, 1072, 1186, 1273, 1523, 1524, 1525, 1527, 2017, 2023, 2029, 2049, 2070, 2072, 2100, 2101, 2108], "quantiz": [11, 27, 64, 221, 328, 340, 474, 475, 476, 477, 478, 480, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 862, 863, 864, 865, 866, 867, 868, 936, 1094, 1123, 1124, 1829, 1830, 1831, 1832, 1833, 1965, 2013, 2068, 2085, 2088, 2089, 2092, 2093], "pred": [12, 66, 69, 74, 75, 990, 1492, 1717, 2058, 2069, 2077, 2108], "union": [12, 14, 23, 30, 33, 34, 37, 41, 45, 48, 52, 53, 55, 64, 990, 1184, 1186, 1285, 1345, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1474, 1475, 1494, 1495, 1496, 1520, 1521, 1522, 1527, 1571, 1573, 1575, 1578, 1591, 1737, 1778, 1859, 1968, 2015, 2016, 2017, 2021, 2062, 2065, 2067, 2070, 2084, 2087, 2089, 2093], "true_fn": [12, 53, 66, 69, 74, 75, 990], "false_fn": [12, 53, 66, 69, 74, 75, 990], "operand": [12, 66, 74, 75, 952, 990, 1109, 2017, 2018, 2049, 2082, 2085], "Its": [12, 28, 53, 64, 998, 1023, 1273, 1321, 1339, 1340, 1365, 1527, 1772, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1974, 2017, 2048], "uniqu": [12, 28, 37, 44, 47, 48, 50, 515, 519, 1264, 1309, 1310, 1314, 1320, 1321, 1323, 1332, 1334, 1336, 1337, 1374, 1381, 1905, 1928, 1962, 2012, 2014, 2023, 2028, 2049, 2070, 2071, 2077, 2078, 2079, 2082, 2087, 2098, 2115], "predic": [12, 66, 71, 74, 75, 1188], "unlock": [12, 2024], "flexibilti": 12, "architectur": [12, 34, 64, 1035, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1462, 1571, 1616, 1872, 2046, 2060, 2072, 2082, 2097], "prototyp": [12, 18, 19, 28, 52, 683, 912, 914, 918, 923, 924, 990, 1283, 1575, 1587, 1588, 2013, 2024, 2030, 2035, 2036, 2057, 2062, 2075, 2077, 2082, 2085, 2091, 2092, 2093, 2095], "classif": [12, 990, 1440, 1446, 1462, 1530, 1532, 1534, 1560, 1617, 2013, 2041, 2073], "co": [12, 52, 53, 61, 66, 70, 71, 74, 75, 76, 77, 79, 203, 638, 639, 686, 954, 976, 990, 1051, 1168, 1171, 1172, 1177, 1231, 1232, 1325, 1460, 1461, 1464, 1801, 1802, 1809, 1821, 1882, 1886, 1887, 1888, 1889, 1891, 1910, 1911, 1912, 1914, 1915, 2015, 2021, 2034, 2068, 2069, 2082, 2087, 2089, 2099, 2106, 2108], "sin": [12, 14, 52, 53, 60, 61, 66, 70, 71, 74, 75, 76, 77, 534, 672, 673, 885, 976, 990, 1051, 1168, 1170, 1171, 1172, 1174, 1177, 1232, 1325, 1375, 1821, 1883, 1889, 2015, 2021, 2034, 2043, 2068, 2082, 2083, 2087, 2099, 2104, 2106, 2108, 2111], "dynamicshapecondpred": 12, "dyn_shape_mod": 12, "eagerli": [12, 33, 1040, 1046, 1994, 1997, 2046, 2100, 2104], "vari": [12, 52, 822, 1279, 1440, 1559, 1758, 1884, 2030, 2046, 2059, 2060, 2069, 2072, 2073, 2082, 2095, 2100, 2101, 2103, 2104, 2113, 2115], "inp2": 12, "assert": [12, 23, 24, 30, 33, 35, 57, 59, 60, 61, 64, 66, 72, 75, 76, 77, 627, 926, 928, 929, 1168, 1170, 1171, 1172, 1173, 1176, 1177, 1178, 1187, 1188, 1271, 1276, 1277, 1283, 1685, 1778, 1977, 2016, 2018, 2021, 2049, 2050, 2057, 2062, 2070, 2072, 2089, 2100, 2104, 2112, 2113], "export": [12, 15, 18, 28, 33, 36, 51, 66, 68, 71, 73, 74, 75, 80, 83, 683, 844, 935, 978, 983, 990, 1011, 1182, 1192, 1276, 1278, 1279, 1285, 1291, 1759, 1760, 1779, 1780, 2013, 2014, 2016, 2017, 2021, 2049, 2053, 2068, 2071, 2077, 2094, 2100, 2105, 2111, 2112], "deploy": [12, 37, 47, 52, 2013, 2027, 2095, 2105], "dim_batch": 12, "dim": [12, 28, 34, 35, 52, 60, 61, 64, 66, 68, 74, 75, 114, 116, 117, 118, 120, 136, 137, 138, 186, 206, 209, 212, 213, 214, 215, 216, 217, 231, 235, 262, 266, 283, 314, 315, 316, 317, 318, 319, 323, 324, 356, 382, 394, 409, 411, 412, 413, 416, 431, 432, 433, 434, 438, 439, 455, 464, 472, 481, 493, 494, 496, 507, 508, 514, 515, 516, 517, 518, 519, 520, 521, 539, 540, 543, 544, 546, 549, 554, 555, 557, 562, 567, 575, 580, 581, 585, 586, 587, 589, 590, 592, 607, 608, 611, 612, 613, 614, 617, 619, 696, 698, 699, 700, 702, 878, 879, 880, 914, 915, 917, 963, 970, 988, 990, 997, 1008, 1023, 1025, 1088, 1089, 1090, 1091, 1092, 1101, 1107, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1148, 1149, 1168, 1178, 1182, 1214, 1227, 1237, 1245, 1246, 1247, 1248, 1250, 1295, 1305, 1306, 1326, 1330, 1340, 1342, 1343, 1353, 1361, 1371, 1373, 1374, 1376, 1379, 1381, 1396, 1418, 1419, 1420, 1421, 1422, 1423, 1431, 1461, 1462, 1463, 1466, 1470, 1472, 1477, 1492, 1498, 1517, 1533, 1534, 1535, 1561, 1563, 1575, 1577, 1578, 1615, 1616, 1632, 1635, 1644, 1652, 1658, 1659, 1660, 1669, 1670, 1672, 1674, 1678, 1685, 1691, 1692, 1700, 1717, 1732, 1733, 1739, 1743, 1745, 1752, 1753, 1766, 1769, 1772, 1818, 1825, 1828, 1848, 1849, 1854, 1855, 1860, 1861, 1862, 1865, 1866, 1896, 1899, 1900, 1904, 1907, 1909, 1916, 1919, 1921, 1922, 1923, 1924, 1927, 1940, 1944, 1945, 1946, 1947, 1950, 1951, 1959, 1960, 1961, 1962, 1964, 1972, 1973, 1977, 1978, 2013, 2014, 2015, 2021, 2035, 2036, 2050, 2051, 2062, 2065, 2067, 2068, 2082, 2083, 2085, 2092, 2095, 2100, 2108], "batch": [12, 24, 28, 33, 35, 48, 51, 52, 55, 56, 57, 60, 61, 64, 66, 74, 75, 585, 586, 587, 589, 590, 689, 705, 706, 707, 708, 709, 710, 738, 763, 782, 788, 918, 923, 924, 944, 956, 957, 964, 967, 968, 969, 976, 1008, 1097, 1099, 1109, 1171, 1172, 1178, 1270, 1303, 1304, 1305, 1306, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1341, 1342, 1343, 1346, 1354, 1363, 1364, 1368, 1439, 1440, 1441, 1442, 1443, 1446, 1454, 1455, 1460, 1462, 1463, 1465, 1466, 1467, 1469, 1470, 1471, 1473, 1478, 1479, 1480, 1481, 1486, 1487, 1489, 1490, 1491, 1492, 1493, 1497, 1498, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1518, 1519, 1530, 1531, 1532, 1533, 1534, 1536, 1539, 1540, 1541, 1542, 1543, 1545, 1559, 1560, 1567, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1579, 1598, 1603, 1605, 1606, 1616, 1617, 1619, 1620, 1621, 1624, 1625, 1626, 1627, 1630, 1643, 1644, 1645, 1669, 1677, 1685, 1703, 1704, 1707, 1716, 1717, 1731, 1758, 1759, 1761, 1762, 1764, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1802, 1803, 1809, 1815, 1817, 1827, 1831, 1910, 1911, 1912, 1914, 1915, 1919, 1924, 1928, 1929, 1949, 1952, 1953, 1955, 1974, 1977, 2013, 2034, 2035, 2036, 2042, 2046, 2050, 2051, 2057, 2059, 2072, 2077, 2082, 2087, 2088, 2095, 2100, 2101, 2103, 2104, 2107, 2111, 2113], "min": [12, 23, 28, 40, 47, 52, 64, 66, 76, 77, 118, 187, 188, 189, 190, 302, 698, 699, 700, 759, 761, 773, 774, 783, 805, 822, 823, 824, 825, 828, 879, 971, 972, 1051, 1089, 1123, 1124, 1198, 1234, 1305, 1319, 1320, 1326, 1330, 1332, 1337, 1343, 1363, 1413, 1445, 1485, 1513, 1515, 1535, 1548, 1555, 1607, 1644, 1648, 1678, 1680, 1686, 1704, 1796, 1801, 1802, 1811, 1815, 1817, 1827, 1928, 1929, 1936, 1953, 1954, 1955, 1956, 2015, 2030, 2034, 2067, 2068, 2072, 2075, 2077, 2095, 2100, 2104, 2108, 2118], "ep": [12, 52, 715, 716, 717, 718, 719, 720, 726, 727, 739, 740, 752, 754, 755, 756, 757, 798, 822, 823, 824, 825, 828, 829, 923, 924, 1283, 1359, 1427, 1441, 1442, 1443, 1461, 1480, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1536, 1541, 1542, 1567, 1571, 1573, 1575, 1576, 1603, 1615, 1630, 1634, 1635, 1643, 1647, 1670, 1673, 1677, 1682, 1701, 1716, 1732, 1766, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1811, 1831, 2015, 2049, 2054, 2083, 2108, 2118], "dynamic_shap": [12, 52, 1181, 2065, 2095, 2110], "graphmodul": [12, 33, 52, 53, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 83, 863, 864, 865, 866, 2072, 2093, 2099, 2102, 2103, 2112, 2113], "arg0_1": [12, 52], "f32": [12, 52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79], "s0": [12, 52, 53, 66, 74, 75, 488, 1188, 2100, 2101, 2113], "sym_siz": [12, 53, 66, 74, 75, 2068, 2108], "sym": [12, 66, 74, 75, 76, 77, 1188, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891], "gt": [12, 66, 74, 75, 298, 1228, 2015, 2034, 2054, 2068, 2108], "true_graph_0": [12, 53, 66, 69, 74, 75], "false_graph_0": [12, 53, 66, 69, 74, 75], "symbol": [12, 14, 22, 52, 53, 65, 66, 75, 81, 82, 85, 86, 89, 627, 820, 844, 978, 1181, 1182, 1184, 1186, 1188, 1190, 1191, 1193, 1194, 1197, 1198, 1202, 1288, 1342, 1497, 1543, 1952, 1974, 2013, 2017, 2021, 2065, 2100, 2103, 2104, 2113], "sub": [12, 28, 34, 40, 55, 63, 66, 69, 74, 75, 564, 817, 865, 866, 1285, 1289, 1290, 1371, 1374, 1376, 1539, 1540, 1571, 1572, 1574, 1848, 1926, 1944, 1961, 2012, 2014, 2015, 2034, 2036, 2043, 2059, 2067, 2068, 2082, 2085, 2101, 2108, 2112], "exmapl": 12, "dependet": 12, "datadependentcondpred": 12, "sum_1": [12, 64, 2099, 2101, 2102], "b8": [12, 66, 69, 74], "flatten": [12, 24, 52, 53, 55, 64, 66, 72, 878, 879, 1098, 1236, 1330, 1343, 1579, 1730, 1772, 1780, 1828, 1844, 1849, 1854, 1940, 1960, 1961, 1962, 1963, 2015, 2034, 2035, 2037, 2041, 2065, 2067, 2068, 2074, 2086, 2100], "closur": [12, 32, 66, 74, 75, 978, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 2102, 2105], "flat": [12, 52, 1849, 1891, 1963, 2015, 2036, 2052, 2067, 2082], "_higher_order_op": 12, "condition": [12, 47, 920, 990], "constraint": [12, 28, 52, 55, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 797, 798, 913, 990, 1043, 1054, 1172, 1181, 1182, 1184, 1187, 1188, 1198, 1717, 1732, 1758, 1798, 2013, 2017, 2036, 2050, 2052, 2054, 2057, 2082, 2098, 2100, 2101], "true_branch": [12, 990], "false_branch": [12, 990], "consist": [12, 19, 23, 28, 30, 33, 35, 47, 48, 53, 64, 923, 967, 968, 969, 990, 1196, 1273, 1303, 1309, 1310, 1312, 1314, 1315, 1316, 1317, 1341, 1343, 1527, 1575, 1734, 1737, 1780, 1827, 1928, 2014, 2017, 2021, 2024, 2035, 2036, 2050, 2052, 2057, 2059, 2061, 2067, 2069, 2071, 2082, 2099, 2102, 2107, 2113], "possibli": [12, 23, 47, 52, 55, 990, 1200, 1201, 1273, 1289, 1316, 1321, 1527, 1717, 2014, 2021, 2061, 2070, 2103, 2115], "aka": [12, 28, 52, 53, 66, 74, 75, 936, 990, 1487, 1642, 2043, 2065, 2085, 2100], "add_": [12, 28, 52, 58, 60, 490, 990, 1167, 2015, 2034, 2044, 2065, 2082], "tempor": [12, 990, 1441, 1443, 1446, 1567, 1580, 1617, 1644, 1704], "pytre": [12, 52, 64, 66, 990, 2050], "parallel_info": [13, 2013, 2045], "cppextens": [14, 2013], "setuptool": 14, "bare": 14, "pypa": 14, "userguid": 14, "ext_modul": 14, "buildextens": [14, 2013], "extra_compile_arg": [14, 2063], "extra_link_flag": 14, "wl": 14, "lm": [14, 33], "cmdclass": 14, "build_ext": 14, "cudaextens": [14, 2013], "cuda_extens": 14, "extension_kernel": 14, "cu": 14, "cxx": 14, "nvcc": [14, 1039, 2113], "o2": 14, "lcuda": 14, "arch": 14, "card": [14, 2063], "visibl": [14, 28, 30, 44, 53, 1033, 1079, 1528, 1529, 1537, 1538, 2100, 2101, 2105], "ptx": 14, "road": 14, "recompil": [14, 64, 683, 976, 1287, 2023, 2055, 2100, 2101, 2102, 2109], "cc": [14, 28, 2046], "newest": [14, 59, 2064], "torch_cuda_arch_list": 14, "6": [14, 18, 19, 23, 24, 28, 35, 45, 52, 64, 66, 67, 71, 72, 74, 75, 76, 77, 79, 315, 317, 319, 323, 403, 404, 473, 489, 501, 515, 519, 562, 609, 688, 689, 694, 700, 744, 745, 746, 748, 749, 761, 764, 766, 768, 912, 913, 915, 916, 917, 939, 947, 955, 960, 966, 970, 974, 998, 1020, 1067, 1072, 1086, 1087, 1088, 1092, 1093, 1104, 1107, 1108, 1125, 1133, 1138, 1139, 1143, 1144, 1146, 1148, 1149, 1152, 1159, 1227, 1237, 1239, 1240, 1262, 1284, 1295, 1299, 1310, 1326, 1330, 1331, 1332, 1339, 1340, 1344, 1346, 1375, 1421, 1422, 1423, 1436, 1447, 1448, 1449, 1450, 1453, 1458, 1461, 1469, 1470, 1479, 1480, 1481, 1483, 1484, 1523, 1524, 1528, 1536, 1545, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1571, 1572, 1574, 1576, 1579, 1585, 1600, 1630, 1637, 1638, 1671, 1673, 1680, 1707, 1760, 1761, 1763, 1765, 1771, 1772, 1773, 1782, 1796, 1816, 1817, 1820, 1827, 1828, 1834, 1835, 1838, 1844, 1854, 1855, 1859, 1863, 1875, 1896, 1900, 1905, 1908, 1910, 1911, 1916, 1927, 1929, 1930, 1931, 1939, 1944, 1945, 1946, 1947, 1948, 1950, 1953, 1955, 1959, 1963, 1965, 1978, 1979, 2014, 2017, 2018, 2024, 2036, 2041, 2043, 2049, 2050, 2052, 2057, 2062, 2067, 2077, 2078, 2082, 2083, 2085, 2088, 2089, 2101, 2102, 2104, 2108, 2110, 2113, 2114], "build_my_extens": 14, "7": [14, 23, 24, 28, 35, 52, 64, 66, 71, 235, 262, 315, 317, 319, 323, 403, 404, 473, 515, 562, 609, 619, 688, 700, 748, 749, 915, 917, 952, 955, 960, 966, 968, 970, 974, 1091, 1092, 1104, 1106, 1107, 1110, 1125, 1148, 1149, 1152, 1155, 1159, 1227, 1237, 1250, 1295, 1299, 1309, 1314, 1316, 1317, 1320, 1321, 1325, 1326, 1330, 1337, 1421, 1422, 1423, 1429, 1430, 1433, 1434, 1436, 1447, 1448, 1449, 1469, 1515, 1523, 1524, 1549, 1550, 1551, 1552, 1553, 1576, 1579, 1600, 1672, 1724, 1725, 1750, 1755, 1765, 1772, 1787, 1834, 1838, 1844, 1854, 1855, 1856, 1863, 1871, 1875, 1878, 1880, 1891, 1892, 1900, 1908, 1910, 1911, 1916, 1928, 1930, 1931, 1939, 1944, 1945, 1948, 1950, 1959, 1963, 1974, 1978, 2014, 2018, 2024, 2035, 2036, 2044, 2046, 2049, 2060, 2062, 2067, 2068, 2072, 2082, 2085, 2086, 2087, 2088, 2089, 2101, 2106, 2110], "older": [14, 2046, 2062, 2070], "modestli": [14, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798], "imag": [14, 23, 744, 745, 746, 865, 866, 987, 1429, 1433, 1434, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1473, 1474, 1475, 1490, 1499, 1534, 1539, 1540, 1562, 1579, 1581, 1582, 1598, 1609, 1610, 1612, 1613, 1627, 1628, 1633, 1644, 1703, 1704, 1852, 2013, 2015, 2036, 2054, 2068, 2072, 2086, 2087, 2100, 2104, 2105, 2108], "11": [14, 28, 323, 515, 683, 966, 970, 1054, 1107, 1237, 1294, 1330, 1344, 1360, 1431, 1447, 1475, 1524, 1629, 1772, 1773, 1779, 1900, 1944, 1978, 2014, 2018, 2024, 2035, 2046, 2049, 2062, 2063, 2067, 2068, 2082, 2083, 2113], "pars": [14, 28, 48, 51, 933, 2017, 2070, 2077, 2101], "window": [14, 23, 28, 64, 353, 558, 945, 954, 1231, 1232, 1270, 1293, 1436, 1437, 1438, 1474, 1475, 1494, 1495, 1496, 1520, 1521, 1522, 1523, 1524, 1525, 1600, 1628, 1629, 1658, 1659, 1660, 1832, 1833, 1924, 2013, 2015, 2030, 2046, 2062, 2071, 2100], "workaround": [14, 23, 33, 52, 64, 1277, 2012, 2061, 2067, 2072, 2104], "pure": [14, 15, 52, 60, 1276, 2014, 2049], "sigmoidalphablendforwardcuda": 14, "69460": 14, "facebookresearch": 14, "pytorch3d": 14, "cb170ac024a949f1f9614ffe6af1c38d972f7d48": 14, "relocat": 14, "link": [14, 15, 35, 64, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1520, 1521, 1522, 1579, 1635, 2035, 2056, 2057, 2082, 2101, 2111], "rdc": 14, "dc": 14, "anymor": [14, 28, 55], "dlto": 14, "dlink": 14, "protent": 14, "perf": [14, 683, 2072, 2107], "lib": [14, 2021, 2063], "nvshmem": 14, "ninja": [14, 2053, 2063], "dlink_librari": 14, "dlink_lib": 14, "std": [14, 37, 45, 90, 379, 456, 1123, 1124, 1155, 1156, 1157, 1773, 1821, 1847, 1885, 1890, 1923, 1962, 2015, 2034, 2041, 2046, 2056, 2063, 2068, 2095, 2108], "17": [14, 28, 688, 1270, 1326, 1447, 1524, 1900, 2014, 2067, 2068, 2082, 2095, 2102], "mix": [14, 24, 33, 35, 52, 55, 1054, 1717, 2013, 2043, 2045, 2053, 2072, 2082, 2100, 2102, 2104], "use_ninja": 14, "greatli": [14, 64, 2046, 2100], "fallback": [14, 20, 28, 47, 52, 58, 89, 1204, 1287, 1337, 2042, 2046, 2062, 2098, 2104, 2113], "distutil": 14, "max_job": 14, "extra_cflag": 14, "extra_cuda_cflag": 14, "extra_ldflag": 14, "extra_include_path": 14, "build_directori": 14, "with_cuda": [14, 2063], "is_python_modul": 14, "is_standalon": 14, "keep_intermedi": 14, "torch_extens": 14, "temporari": [14, 64, 826, 829, 1718, 2043, 2051, 2095, 2106], "torch_extensions_dir": 14, "subfold": [14, 2113], "o3": 14, "cuh": 14, "Such": [14, 23, 24, 33, 52, 63, 1971, 2082, 2101], "lib64": 14, "cudart": [14, 2063], "fine": [14, 28, 30, 32, 36, 52, 488, 862, 865, 1160, 1289, 1685, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 2012, 2033, 2043, 2046, 2049, 2067, 2070, 2072, 2082, 2094, 2101, 2106, 2113], "cuda_hom": 14, "safest": 14, "pybind11": [14, 15, 2016], "linker": 14, "workspac": [14, 20, 976], "header": [14, 45, 2062, 2063, 2087, 2089], "automat": [14, 19, 24, 28, 30, 47, 48, 52, 64, 88, 591, 893, 895, 909, 910, 930, 976, 1045, 1054, 1236, 1283, 1289, 1375, 1574, 1685, 1718, 1846, 1875, 1966, 2012, 2013, 2016, 2017, 2027, 2028, 2034, 2035, 2043, 2044, 2046, 2049, 2053, 2057, 2059, 2067, 2070, 2072, 2073, 2077, 2085, 2087, 2088, 2100, 2103, 2104, 2106, 2111, 2113], "construct": [14, 15, 23, 28, 30, 32, 33, 35, 47, 52, 53, 55, 57, 59, 60, 64, 66, 71, 72, 76, 77, 152, 450, 795, 796, 827, 882, 883, 897, 918, 987, 1096, 1112, 1178, 1236, 1273, 1285, 1289, 1290, 1297, 1304, 1344, 1345, 1360, 1375, 1469, 1470, 1527, 1588, 1589, 1590, 1624, 1707, 1717, 1719, 1720, 1758, 1765, 1817, 1821, 1868, 1903, 1910, 1911, 1912, 1913, 1914, 1915, 1929, 1943, 1946, 1977, 2013, 2014, 2021, 2025, 2030, 2046, 2048, 2052, 2057, 2059, 2062, 2065, 2070, 2072, 2077, 2079, 2085, 2087, 2088, 2089, 2095, 2101, 2104], "plain": [14, 1440, 1528, 1537, 1731, 1912, 2049, 2082, 2103], "standalon": [14, 46, 47, 48, 820, 1285, 1289, 2014, 2107], "torch_lib_path": 14, "load_inlin": [14, 2013], "cpp_sourc": 14, "cuda_sourc": 14, "with_pytorch_error_handl": 14, "use_pch": 14, "behav": [14, 15, 28, 52, 63, 64, 315, 323, 495, 517, 519, 845, 1177, 1190, 1201, 1289, 1523, 1524, 1525, 1712, 1713, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2016, 2017, 2024, 2043, 2049, 2050, 2057, 2061, 2070, 2077, 2082], "exactli": [14, 24, 28, 35, 50, 52, 53, 923, 924, 970, 1021, 1025, 1109, 1155, 1156, 1194, 1270, 1273, 1315, 1470, 1473, 1474, 1475, 1527, 1556, 1559, 1625, 1635, 1717, 1772, 1780, 1871, 2024, 2025, 2034, 2043, 2046, 2048, 2049, 2052, 2054, 2055, 2067, 2070, 2098, 2101], "filenam": [14, 19, 23, 30, 52, 64, 1161, 1272, 1281, 1284, 2012, 2015, 2028, 2043, 2070, 2084, 2087, 2106, 2115], "typic": [14, 23, 24, 28, 33, 35, 36, 37, 40, 47, 48, 50, 53, 55, 64, 85, 90, 488, 978, 998, 1009, 1152, 1160, 1196, 1202, 1273, 1294, 1297, 1345, 1460, 1486, 1527, 1707, 1717, 1784, 1785, 1797, 1871, 1883, 1910, 1911, 1912, 1914, 1915, 2012, 2013, 2014, 2017, 2023, 2025, 2043, 2045, 2046, 2053, 2060, 2061, 2062, 2067, 2069, 2072, 2077, 2086, 2088, 2097, 2099, 2100, 2103, 2111, 2113, 2115, 2118], "inlin": [14, 52, 63, 66, 68, 72, 1051, 1273, 1277, 1289, 2045, 2104], "concaten": [14, 23, 28, 914, 963, 974, 1023, 1108, 1188, 1239, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1470, 1497, 1617, 1625, 1722, 1737, 1921, 1979, 2015, 2018, 2067, 2068, 2082], "furthermor": [14, 45, 59, 1309, 1310, 1337, 1363, 1464, 2034, 2035, 2043, 2061, 2075, 2077], "cuda_runtim": 14, "se": 14, "macro": [14, 37, 2055], "pybind": 14, "_safe_foo": 14, "redirect": [14, 37, 45, 49, 2091], "obscur": 14, "sin_add": 14, "inline_extens": 14, "include_path": [14, 2013], "get_compiler_abi_compatibility_and_vers": [14, 2013], "abi": [14, 15, 2053], "alongsid": [14, 1273, 1527, 2049, 2053], "shell": 14, "torchvers": 14, "verify_ninja_avail": [14, 2013], "is_ninja_avail": [14, 2013], "embed": [15, 33, 34, 53, 64, 738, 749, 902, 1100, 1431, 1460, 1470, 1486, 1499, 1533, 1577, 1625, 1685, 1798, 1866, 2015, 2045, 2052, 2068, 2071, 2072, 2074, 2077, 2082, 2087, 2108], "simpl": [15, 18, 24, 33, 40, 47, 52, 59, 61, 64, 490, 998, 1065, 1168, 1176, 1177, 1178, 1273, 1277, 1285, 1441, 1442, 1443, 1469, 1500, 1501, 1502, 1527, 1567, 1624, 1883, 1977, 2012, 2014, 2018, 2036, 2045, 2046, 2048, 2049, 2051, 2054, 2056, 2067, 2070, 2077, 2082, 2101, 2102, 2103, 2106], "modif": [15, 55, 64, 83, 223, 783, 904, 905, 909, 1162, 1163, 1273, 1527, 1717, 2043, 2049, 2057, 2070, 2072, 2101, 2104], "submodul": [15, 30, 33, 34, 52, 53, 55, 64, 82, 794, 800, 817, 841, 842, 843, 862, 865, 866, 1176, 1273, 1277, 1279, 1284, 1285, 1463, 1527, 1528, 1529, 1556, 1714, 2013, 2014, 2016, 2017, 2027, 2053, 2057, 2062, 2070, 2072, 2077, 2093, 2099, 2112], "preprocess": [15, 52, 81, 498, 1273], "augment": [15, 2018, 2089], "walk": [15, 64, 2021, 2049, 2070, 2078, 2079, 2086, 2113], "interfac": [15, 19, 24, 30, 36, 37, 39, 44, 47, 65, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 748, 749, 759, 764, 765, 766, 767, 768, 797, 1163, 1707, 1870, 1924, 1965, 2017, 2021, 2030, 2031, 2032, 2049, 2056, 2069, 2071, 2075, 2082, 2087, 2108], "opencv": [15, 1633, 1644], "struct": [15, 1177, 1178, 1977, 2027, 2056], "explain": [15, 28, 52, 1798, 1871, 2012, 2017, 2042, 2046, 2050, 2052, 2059, 2082, 2101, 2104, 2113], "reshap": [15, 28, 35, 500, 501, 515, 546, 619, 695, 974, 1092, 1107, 1108, 1148, 1237, 1294, 1326, 1330, 1339, 1340, 1343, 1423, 1448, 1449, 1473, 1549, 1550, 1551, 1552, 1553, 1579, 1732, 1766, 1772, 1908, 1909, 1916, 1944, 1945, 1950, 1978, 1979, 2015, 2024, 2035, 2036, 2067, 2068, 2074, 2086, 2087, 2108], "classat_1_1_tensor": 15, "tensor_index": 15, "crucial": [15, 90, 2095, 2106, 2107], "cpp_autograd": 15, "workflow": [15, 2012, 2049, 2053, 2072, 2073, 2091, 2094, 2100, 2103, 2107, 2109, 2110], "undesir": [15, 33, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 1924, 2049], "overview": [15, 28, 44, 1587, 1717, 2013, 2024, 2033, 2043, 2054, 2057, 2066, 2072, 2077, 2094, 2096], "cpp_frontend": 15, "library_root": 15, "libtorch": [15, 2095], "linux": [15, 28, 2012, 2053], "gcc": 15, "pre": [15, 28, 32, 52, 55, 64, 918, 929, 1186, 1210, 1273, 1527, 1711, 1713, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1750, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 1800, 1806, 2012, 2014, 2017, 2027, 2043, 2045, 2046, 2057, 2065, 2067, 2088, 2103, 2104, 2109, 2113], "cxx11": 15, "facilit": [16, 29, 35, 60, 999, 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1012, 1097, 1724, 1725, 1868, 2012, 2014, 2017, 2061], "certain": [17, 23, 24, 28, 33, 44, 45, 52, 60, 63, 64, 86, 689, 692, 738, 919, 944, 956, 1033, 1097, 1186, 1270, 1273, 1281, 1345, 1368, 1378, 1454, 1455, 1456, 1457, 1458, 1459, 1479, 1498, 1514, 1527, 1533, 1579, 1634, 1645, 1647, 1771, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1990, 2017, 2021, 2024, 2035, 2043, 2044, 2046, 2049, 2056, 2057, 2060, 2062, 2067, 2071, 2077, 2078, 2082, 2103, 2104, 2107, 2113], "tunabl": 17, "earli": [18, 19, 24, 28, 37, 63, 488, 1465, 1466, 1467, 1471, 2013, 2072, 2073, 2092, 2093, 2103], "introduc": [18, 35, 52, 60, 763, 1188, 1198, 1200, 1201, 1212, 1346, 1478, 1497, 1543, 1644, 1854, 2012, 2017, 2044, 2048, 2049, 2058, 2067, 2070, 2071, 2077, 2082, 2088, 2094, 2101, 2104, 2109, 2111, 2113, 2120], "race": [18, 28, 488, 2043], "enable_cuda_sanit": 18, "torch_cuda_sanit": 18, "concurr": [18, 28, 32, 33, 2045, 2046, 2052, 2059, 2077, 2078], "uniniti": [18, 27, 447, 501, 1110, 1111, 1707, 1719, 1720, 1765, 1965, 2025, 2036, 2068], "overwrit": [18, 19, 23, 28, 30, 64, 1528, 1537, 2017, 2043, 2098], "commandlin": 18, "example_error": 18, "csan": 18, "pointer": [18, 141, 930, 1026, 1544, 2021, 2046, 2048, 2056, 2077, 2079, 2103], "139719969079296": 18, "94646435460352": 18, "_sanit": 18, "364": 18, "_handle_kernel_launch": 18, "stack_trac": [18, 53, 64], "stacksummari": 18, "extract": [18, 64, 839, 968, 969, 1210, 1469, 1473, 1579, 1624, 1703, 2065, 2077, 2093, 2097, 2099, 2101, 2102, 2104, 2105], "10000": [18, 20, 24, 697, 1809, 1856, 2051, 2057, 2082, 2087, 2106], "420": 18, "_handle_memory_alloc": 18, "incorrectli": [18, 55, 1012, 1154, 2105], "id": [18, 20, 23, 28, 30, 32, 33, 37, 45, 46, 47, 48, 55, 64, 777, 827, 939, 1009, 1044, 1567, 1660, 1700, 1717, 1779, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 2017, 2056, 2067, 2068, 2070, 2076, 2077, 2078, 2084, 2110, 2113], "faulti": [18, 28], "schema": [18, 28, 30, 53, 87, 2014, 2015, 2017, 2021, 2065, 2067, 2108, 2112], "current_stream": [18, 1011, 1982, 2013, 2046], "wait_stream": [18, 28, 488, 1012, 1014, 1983, 2046], "default_stream": [18, 28, 2013, 2046], "begin": [18, 23, 24, 28, 29, 30, 33, 39, 47, 55, 64, 498, 683, 763, 787, 823, 824, 869, 945, 981, 993, 1009, 1057, 1059, 1069, 1129, 1227, 1233, 1294, 1329, 1341, 1438, 1439, 1440, 1448, 1449, 1450, 1460, 1462, 1468, 1478, 1479, 1482, 1483, 1484, 1485, 1486, 1487, 1493, 1497, 1498, 1513, 1518, 1521, 1522, 1534, 1535, 1543, 1546, 1559, 1565, 1570, 1577, 1616, 1637, 1638, 1717, 1731, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1801, 1803, 1809, 1875, 1878, 1881, 1924, 1950, 1961, 1980, 2017, 2042, 2043, 2046, 2049, 2052, 2053, 2054, 2056, 2061, 2067, 2069, 2075, 2083, 2098, 2104, 2113], "suspect": [18, 1798, 2046, 2113], "blaslt": 19, "rocbla": [19, 2060], "databas": [19, 23], "prepar": [19, 30, 33, 34, 55, 64, 800, 842, 845, 861, 863, 865, 866, 867, 1176, 1283, 2018, 2067, 2092, 2093], "tunableop_result": 19, "csv": 19, "ordin": [19, 291, 1910, 1911, 1912, 1914, 1915, 2085], "insert": [19, 23, 28, 35, 55, 63, 64, 750, 795, 796, 865, 866, 943, 1528, 1529, 1537, 1717, 1863, 1866, 1875, 1896, 1921, 1964, 2014, 2027, 2046, 2065, 2068, 2072, 2111, 2112], "discov": [19, 28, 976, 2077], "termin": [19, 28, 37, 47, 50, 53, 64, 1787, 2033, 2077, 2106], "pt_version": 19, "rocm_vers": [19, 2055], "12969": 19, "1544e39": 19, "hipblaslt_vers": 19, "a9c5cc7": 19, "rocblas_vers": 19, "72e57364": 19, "dirti": [19, 2043, 2101], "gemmtunableop_float_nt": 19, "nt_25088_4096_64": 19, "1219": [19, 1333, 1846], "262": 19, "nt_4096_4096_64": 19, "1216": [19, 1412], "033": 19, "verison": 19, "reject": 19, "comma": [19, 20, 28, 1109, 2017, 2023], "averag": [19, 28, 32, 738, 769, 770, 771, 772, 824, 825, 936, 938, 998, 1072, 1086, 1273, 1428, 1429, 1430, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1460, 1462, 1470, 1480, 1486, 1492, 1493, 1494, 1495, 1496, 1500, 1501, 1502, 1518, 1519, 1527, 1530, 1531, 1532, 1533, 1534, 1541, 1559, 1560, 1567, 1576, 1592, 1593, 1594, 1600, 1601, 1602, 1605, 1606, 1616, 1630, 1645, 1654, 1655, 1656, 1669, 1677, 1717, 1781, 1782, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1950, 2013, 2048, 2071], "edit": [19, 64, 2070, 2086], "caution": [19, 45, 2105], "untun": 19, "gemmtunableop": 19, "transpos": [19, 462, 595, 596, 619, 695, 744, 745, 746, 968, 969, 1109, 1177, 1303, 1304, 1310, 1313, 1316, 1323, 1334, 1337, 1457, 1458, 1459, 1543, 1579, 1611, 1612, 1613, 1685, 1726, 1727, 1729, 1731, 1815, 1928, 1930, 1931, 1938, 1952, 2015, 2034, 2036, 2043, 2060, 2067, 2068, 2074, 2082, 2086, 2088, 2108], "k": [19, 23, 28, 30, 35, 48, 64, 288, 356, 508, 515, 517, 519, 592, 619, 763, 943, 969, 1096, 1109, 1166, 1181, 1188, 1214, 1236, 1295, 1303, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1318, 1319, 1320, 1323, 1325, 1332, 1334, 1336, 1337, 1346, 1364, 1368, 1436, 1438, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1474, 1475, 1478, 1479, 1497, 1498, 1512, 1514, 1515, 1520, 1522, 1533, 1534, 1543, 1545, 1587, 1616, 1628, 1629, 1651, 1669, 1731, 1815, 1817, 1827, 1855, 1905, 1906, 1907, 1908, 1910, 1911, 1912, 1914, 1915, 1924, 1929, 1947, 1952, 2015, 2016, 2041, 2046, 2049, 2063, 2082, 2083, 2085, 2104, 2107, 2108, 2113], "diagnost": [19, 40, 81, 82, 83, 84, 85, 88, 683, 2014], "besid": [19, 24, 28, 932, 2046, 2048, 2065, 2087, 2099], "pytorch_tunableop_verobs": 19, "30m": 19, "whichev": [19, 998, 1178, 1633, 1828, 1977, 2046], "successfulli": [19, 28, 37, 44, 52, 1280, 1872, 2033, 2057, 2062, 2077, 2113], "bgemm": 19, "rout": [19, 2101], "transa": 19, "transb": [19, 2067], "gettuningcontext": 19, "tuningcontext": 19, "preced": [19, 32, 37, 55, 62, 683, 1711, 1809, 1949, 2018, 2045, 2069, 2072, 2090], "val": [19, 53, 66, 69, 74, 1051, 1188, 1203, 1279, 1280, 2015, 2017, 2021, 2041, 2103], "is_en": [19, 1903, 1910, 1911, 1912, 1913, 1914, 1915], "tuning_en": 19, "tuning_is_en": 19, "set_max_tuning_dur": 19, "millisecond": [19, 41, 44, 1011, 1386, 1982, 2030], "honor": [19, 2017], "get_max_tuning_dur": 19, "set_max_tuning_iter": 19, "get_max_tuning_iter": 19, "set_filenam": 19, "insert_device_ordin": 19, "cenario": 19, "get_filenam": 19, "get_result": 19, "get_valid": 19, "write_file_on_exit": 19, "destruct": [19, 28, 2030, 2077, 2079], "write_fil": 19, "read_fil": 19, "pytorch_no_cuda_memory_cach": [20, 2046, 2055], "pytorch_cuda_alloc_conf": [20, 1034], "pytorch_nvml_based_cuda_check": [20, 2046], "nvml": [20, 2046], "fork": [20, 23, 45, 1292, 1717, 2017, 2045, 2046, 2051, 2056, 2059, 2063, 2076, 2077, 2079], "torch_cudnn_v8_api_lru_cache_limit": 20, "cudnn": [20, 21, 22, 763, 1283, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1478, 1497, 1543, 1544, 1586, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1685, 1724, 1725, 1871, 2013, 2015, 2046, 2060, 2061, 2107, 2116], "roughli": [20, 23, 1492, 2048, 2111, 2112], "2gib": 20, "200kib": 20, "executionplan": 20, "torch_cudnn_v8_api_dis": 20, "And": [20, 52, 59, 800, 827, 1088, 1089, 1166, 1195, 1273, 1295, 1371, 1376, 1446, 1527, 1966, 1968, 2043, 2049, 2050, 2052, 2063, 2067, 2072, 2093, 2099, 2101, 2105, 2106], "v7": 20, "torch_allow_tf32_cublas_overrid": 20, "set_float32_matmul_precis": [20, 1223], "torch_nccl_use_comm_nonblock": 20, "nccl": [20, 24, 29, 30, 48, 55, 1717, 2052, 2055, 2117], "torch_nccl_avoid_record_stream": 20, "stream": [20, 23, 28, 37, 45, 55, 63, 152, 488, 897, 918, 1000, 1002, 1007, 1009, 1011, 1012, 1015, 1016, 1017, 1025, 1028, 1029, 1043, 1049, 1082, 1085, 1386, 1389, 1390, 1395, 1400, 1402, 1403, 1409, 1411, 1849, 1982, 1984, 1986, 2007, 2009, 2013, 2015, 2017, 2052, 2065, 2077, 2104, 2115, 2117], "torch_cudnn_v8_api_debug": 20, "saniti": [20, 30, 2050], "cuda_visible_devic": [20, 28, 1078, 1717, 2046], "cuda_launch_block": [20, 22, 1497, 1543, 2046], "cublas_workspace_config": [20, 22, 1497, 1543, 1965, 2046, 2061], "4096": [20, 22, 52, 1497, 1543, 1965, 2046, 2067], "16": [20, 22, 28, 34, 52, 323, 619, 741, 742, 743, 744, 745, 746, 775, 1107, 1125, 1152, 1227, 1237, 1273, 1285, 1297, 1303, 1309, 1310, 1314, 1330, 1423, 1435, 1437, 1438, 1446, 1447, 1450, 1453, 1454, 1455, 1456, 1458, 1459, 1464, 1465, 1466, 1467, 1471, 1474, 1475, 1494, 1495, 1496, 1497, 1515, 1520, 1521, 1522, 1524, 1525, 1527, 1534, 1543, 1554, 1571, 1585, 1608, 1610, 1611, 1613, 1617, 1628, 1629, 1775, 1824, 1871, 1900, 1965, 1974, 1978, 2014, 2018, 2041, 2046, 2057, 2060, 2062, 2067, 2068, 2077, 2082, 2083, 2085, 2087, 2088, 2095, 2101, 2102], "kib": [20, 2046], "cudnn_conv_wscap_dbg": 20, "cublaslt_workspace_s": 20, "cudnn_errata_json_fil": 20, "errata": 20, "config": [20, 24, 55, 64, 795, 796, 797, 798, 863, 865, 976, 2048, 2063, 2075, 2095, 2104, 2106, 2107, 2109, 2111, 2113], "primarili": [20, 35, 53, 63, 818, 819, 820, 826, 1188, 1197, 1423, 2030, 2072, 2088], "hardcod": [20, 2103], "autotun": [20, 976, 2107], "nvidia_tf32_overrid": 20, "float16": [21, 24, 30, 55, 299, 689, 692, 763, 826, 829, 837, 854, 855, 862, 944, 956, 1162, 1255, 1273, 1368, 1378, 1454, 1455, 1456, 1457, 1458, 1459, 1478, 1479, 1497, 1498, 1514, 1527, 1543, 1587, 1644, 1685, 1724, 1725, 1783, 1784, 1785, 1797, 1856, 1868, 2042, 2053, 2057, 2072, 2074, 2082, 2084, 2085, 2088, 2089, 2118], "v100": [21, 763, 1478, 1497, 1543, 2046], "packedsequ": [21, 763, 1478, 1497, 1543, 1759, 1760, 1761, 1763], "rnn": [22, 737, 763, 764, 765, 766, 768, 862, 1478, 1479, 1497, 1498, 1544, 1545, 1718, 1735, 1758, 2019, 2051, 2057, 2074, 2087], "enforc": [22, 24, 63, 798, 894, 909, 1273, 1497, 1527, 1543, 2017, 2057, 2086], "colon": [22, 1497, 1543, 2077], "heart": 23, "dataload": [23, 498, 1717, 1802, 1803, 1809, 2046, 2051, 2053, 2059, 2063, 2069, 2087], "batch_siz": [23, 33, 35, 59, 61, 896, 909, 910, 1168, 1176, 1178, 1533, 1543, 1758, 1760, 1761, 1763, 1977, 2015, 2050, 2051, 2053, 2061, 2067, 2087], "shuffl": [23, 1447, 2013, 2087], "batch_sampl": 23, "num_work": [23, 37, 2061, 2063], "drop_last": 23, "timeout": [23, 28, 37, 47, 2026, 2033, 2077, 2117], "worker_init_fn": [23, 2051, 2061], "prefetch_factor": 23, "persistent_work": 23, "__getitem__": [23, 1965], "__len__": [23, 64, 2015], "protocol": [23, 30, 36, 47, 50, 884, 1160, 1163, 1859, 2049, 2063, 2065, 2067, 2077, 2114], "sampl": [23, 30, 35, 52, 56, 61, 64, 90, 155, 156, 260, 288, 379, 456, 483, 610, 782, 946, 998, 1020, 1054, 1067, 1072, 1086, 1087, 1127, 1145, 1168, 1178, 1227, 1270, 1290, 1363, 1413, 1435, 1439, 1440, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1460, 1462, 1464, 1465, 1466, 1467, 1469, 1470, 1471, 1480, 1486, 1489, 1490, 1491, 1492, 1493, 1512, 1514, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1546, 1559, 1560, 1567, 1576, 1598, 1605, 1606, 1616, 1618, 1619, 1620, 1621, 1624, 1625, 1626, 1630, 1633, 1635, 1643, 1644, 1645, 1669, 1677, 1717, 1732, 1773, 1802, 1817, 1820, 1834, 1836, 1838, 1840, 1841, 1842, 1883, 1906, 1922, 1923, 1924, 1972, 1973, 1977, 2013, 2021, 2030, 2041, 2042, 2046, 2056, 2057, 2070, 2071, 2072, 2073, 2087, 2113], "idx": [23, 64, 904, 906, 909, 1273, 1279, 1431, 1469, 1527, 1732, 1961, 2015, 2035], "th": [23, 35, 156, 288, 315, 317, 323, 763, 912, 914, 946, 966, 1096, 1101, 1248, 1270, 1295, 1313, 1321, 1327, 1363, 1465, 1466, 1467, 1471, 1478, 1486, 1497, 1543, 1619, 1620, 1621, 1626, 1828, 1924, 1963, 2049, 2063, 2083, 2085, 2115], "iterabledataset": [23, 2056], "__iter__": [23, 2018], "suitabl": [23, 35, 488, 960, 1293, 1798, 1863, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2060, 2073, 2087, 2105], "improb": 23, "fetch": [23, 63, 64, 1186, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1778, 2077], "remot": [23, 28, 32, 37, 1717, 2077, 2078], "real": [23, 35, 64, 86, 689, 690, 691, 692, 693, 695, 701, 923, 944, 968, 969, 987, 994, 1054, 1125, 1126, 1128, 1130, 1131, 1132, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1188, 1204, 1263, 1265, 1266, 1269, 1270, 1294, 1303, 1304, 1305, 1309, 1310, 1311, 1312, 1313, 1316, 1317, 1318, 1323, 1326, 1328, 1330, 1331, 1332, 1333, 1337, 1338, 1342, 1343, 1344, 1354, 1360, 1493, 1536, 1539, 1540, 1577, 1731, 1787, 1840, 1924, 1928, 1974, 1975, 1976, 2012, 2013, 2015, 2017, 2025, 2043, 2046, 2065, 2068, 2070, 2072, 2077, 2079, 2086, 2088, 2089, 2099, 2100, 2101, 2103, 2106, 2108, 2114], "replica": [23, 24, 28, 32, 45, 1463, 1717, 2048], "duplic": [23, 45, 317, 321, 473, 546, 945, 954, 975, 1188, 1231, 1232, 1273, 1527, 1961, 1962, 2067, 2082], "yield": [23, 24, 52, 55, 64, 1097, 1099, 1273, 1320, 1321, 1527, 1980, 2017, 2018, 2024, 2072, 2083, 2104], "stochast": [23, 35, 1474, 1475, 1628, 1629, 1781, 1783, 1784, 1786, 1797, 1801, 1802, 2057, 2069], "decent": 23, "randomli": [23, 748, 749, 759, 767, 924, 1435, 1464, 1465, 1466, 1467, 1471, 1546, 1618, 1619, 1620, 1621, 1626, 1745, 2036, 2056, 2057], "permut": [23, 52, 66, 75, 1109, 1320, 1321, 1340, 1363, 1365, 1724, 1725, 1842, 2013, 2015, 2035, 2068, 2074, 2086, 2088, 2108], "mini": [23, 782, 788, 1441, 1442, 1443, 1469, 1470, 1481, 1486, 1489, 1490, 1491, 1499, 1519, 1530, 1532, 1542, 1567, 1576, 1624, 1625, 1644, 1704, 1716], "neither": [23, 28, 798, 903, 904, 907, 909, 1092, 1152, 1533, 1575, 1772, 1950, 2049, 2060, 2077], "nor": [23, 28, 37, 55, 798, 904, 907, 909, 1188, 1309, 1310, 1337, 1533, 1575, 1717, 1772, 1826, 1928, 2049, 2067, 2098, 2103], "notion": [23, 923, 1441, 1442, 1443, 1489, 1490, 1491, 1567, 2101, 2104], "collat": 23, "minibatch": [23, 771, 772, 775, 776, 777, 1363, 1431, 1439, 1440, 1460, 1462, 1486, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1580, 1600, 1601, 1602, 1605, 1606, 1608, 1609, 1610, 1611, 1612, 1613, 1616, 1645, 1658, 1659, 1660, 1669, 1677], "loader": [23, 2069], "essenti": [23, 28, 37, 64, 1363, 2035, 2046, 2063, 2082, 2095, 2098], "dummi": [23, 32, 2021, 2043, 2049, 2065, 2113, 2114], "infinit": [23, 1265, 1346, 1439, 1446, 1617, 1821, 2049, 2060, 2077, 2103], "drop": [23, 59, 61, 64, 1054, 1278, 1332, 1337, 1435, 1767, 1871, 1924, 2014, 2035, 2059, 2115], "dataset_it": 23, "pad": [23, 24, 52, 715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 738, 741, 742, 743, 744, 745, 746, 771, 772, 775, 776, 777, 782, 785, 786, 788, 976, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1270, 1436, 1437, 1438, 1446, 1448, 1449, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1458, 1459, 1469, 1470, 1473, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1523, 1524, 1525, 1530, 1533, 1549, 1550, 1551, 1552, 1553, 1554, 1574, 1575, 1579, 1583, 1584, 1585, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1624, 1625, 1627, 1633, 1644, 1658, 1659, 1660, 1661, 1662, 1663, 1703, 1704, 1759, 1761, 1762, 1764, 1832, 1833, 1924, 2013, 2015, 2036, 2051, 2062, 2067, 2068, 2072, 2087, 2100, 2108], "length": [23, 24, 28, 30, 34, 35, 262, 315, 317, 323, 353, 435, 436, 585, 586, 587, 738, 763, 817, 897, 918, 940, 975, 1025, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1163, 1178, 1248, 1270, 1289, 1293, 1422, 1423, 1441, 1446, 1454, 1463, 1470, 1473, 1478, 1494, 1497, 1533, 1543, 1571, 1579, 1588, 1608, 1609, 1610, 1617, 1625, 1644, 1685, 1758, 1759, 1760, 1761, 1762, 1763, 1764, 1834, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1908, 1910, 1911, 1912, 1914, 1915, 1924, 1950, 1977, 2015, 2017, 2024, 2035, 2036, 2044, 2051, 2081, 2082, 2089, 2100, 2101, 2104, 2108, 2113], "cheaper": [23, 63], "bulk": [23, 2103], "arrai": [23, 28, 450, 763, 823, 824, 883, 884, 947, 965, 994, 998, 1109, 1153, 1162, 1163, 1171, 1172, 1345, 1375, 1473, 1478, 1479, 1497, 1498, 1627, 1849, 1856, 1875, 1910, 1911, 1912, 1914, 1915, 1940, 1943, 1950, 1958, 1971, 2014, 2018, 2046, 2057, 2062, 2082, 2084, 2088, 2089, 2104], "untouch": 23, "slightli": [23, 28, 35, 52, 55, 1772, 1817, 1929, 2012, 2046, 2049, 2052, 2054, 2060, 2070, 2104], "default_col": 23, "channel": [23, 58, 474, 475, 476, 782, 788, 811, 825, 828, 836, 849, 857, 1123, 1441, 1442, 1443, 1447, 1454, 1455, 1456, 1457, 1458, 1459, 1464, 1465, 1466, 1467, 1471, 1473, 1481, 1489, 1490, 1491, 1499, 1503, 1504, 1505, 1506, 1507, 1508, 1515, 1535, 1562, 1567, 1579, 1580, 1581, 1582, 1603, 1619, 1620, 1621, 1626, 1643, 1644, 1651, 1678, 1704, 1724, 1725, 1733, 1743, 1744, 1745, 1752, 1753, 1769, 1829, 2035, 2037, 2041, 2043, 2072, 2073, 2075, 2087], "class_index": 23, "namedtupl": [23, 30, 52, 53, 64, 898, 903, 1088, 1089, 1217, 1273, 1285, 1295, 1315, 1365, 1371, 1374, 1376, 1379, 1419, 1431, 1527, 1817, 1827, 1900, 1928, 1947, 1952, 2014, 2016, 2017, 2072], "situat": [23, 35, 50, 64, 87, 89, 488, 976, 994, 1737, 2025, 2033, 2049, 2050, 2059, 2060, 2070, 2072, 2079, 2100, 2103, 2104, 2114], "gil": [23, 28, 32, 2043, 2046, 2077], "integ": [23, 27, 28, 35, 47, 52, 53, 90, 291, 447, 449, 451, 501, 547, 562, 568, 688, 689, 690, 691, 692, 693, 769, 770, 788, 790, 869, 896, 909, 910, 944, 945, 954, 965, 993, 1012, 1014, 1030, 1036, 1037, 1065, 1104, 1107, 1109, 1110, 1111, 1112, 1152, 1153, 1154, 1155, 1156, 1157, 1163, 1164, 1168, 1169, 1170, 1171, 1172, 1188, 1198, 1205, 1206, 1215, 1231, 1232, 1235, 1236, 1237, 1284, 1287, 1296, 1297, 1304, 1315, 1317, 1327, 1346, 1404, 1412, 1431, 1446, 1454, 1455, 1456, 1499, 1542, 1592, 1593, 1594, 1595, 1596, 1597, 1706, 1716, 1758, 1773, 1776, 1805, 1808, 1812, 1817, 1829, 1830, 1836, 1838, 1839, 1840, 1842, 1847, 1856, 1868, 1916, 1925, 1929, 1944, 1945, 1958, 1963, 1978, 1983, 1987, 1991, 1992, 2010, 2016, 2017, 2018, 2034, 2050, 2054, 2072, 2073, 2075, 2082, 2083, 2085, 2088, 2089, 2091, 2100, 2101, 2118], "descriptor": [23, 28, 1576, 1577, 2018, 2067], "parent": [23, 40, 45, 50, 55, 683, 791, 792, 820, 1527, 2029, 2033, 2063, 2070, 2079, 2087, 2101], "simplest": [23, 28, 30, 37, 64, 862, 1436, 1437, 1438, 1454, 1455, 1456, 1520, 1521, 1522, 1735, 2048, 2049, 2057, 2065, 2072, 2079, 2082, 2104], "refcount": [23, 2033, 2059], "panda": 23, "pyarrow": 23, "13246": 23, "enumer": [23, 33, 35, 52, 64, 1273, 1527, 1529, 1538, 1802, 2015, 2016, 2042, 2046, 2053, 2063, 2087, 2112], "get_worker_info": [23, 2077], "seed": [23, 90, 1046, 1055, 1056, 1077, 1249, 1366, 1388, 1817, 1834, 1929, 1968, 1997, 2000, 2001, 2003, 2013, 2015, 2051, 2059, 2061, 2068, 2076], "naiv": [23, 2100, 2104], "shut": [23, 2077], "garbag": [23, 2079], "subtleti": [23, 66, 1463, 2049, 2051], "multiprocess": [23, 24, 28, 29, 31, 37, 40, 48, 49, 50, 1463, 1717, 2013, 2035, 2048, 2078, 2084], "unix": [23, 38, 45, 2033], "child": [23, 28, 33, 37, 40, 55, 794, 1273, 1527, 1744, 2033, 2057, 2063, 2079], "address": [23, 28, 47, 51, 60, 218, 923, 924, 1017, 2024, 2033, 2046, 2049, 2069, 2077, 2078, 2084, 2098, 2101, 2115], "maco": [23, 28, 2033, 2058], "spawn": [23, 24, 29, 37, 39, 45, 50, 1276, 1717, 2013, 2042, 2048, 2059, 2063, 2078], "__name__": [23, 24, 28, 38, 39, 40, 48, 2048, 2049, 2059, 2063, 2070, 2078], "__main__": [23, 24, 28, 38, 39, 40, 48, 2044, 2048, 2059, 2062, 2063, 2078], "bytecod": [23, 52, 683, 2023, 2064, 2065, 2070, 2099, 2101, 2102, 2104, 2113], "base_se": 23, "worker_id": [23, 50, 2061], "therebi": [23, 35, 2069, 2072], "mandatorili": 23, "faq": [23, 1463, 1761, 2013], "initial_se": [23, 90, 2013, 2015, 2061, 2068, 2076], "host": [23, 28, 33, 37, 40, 46, 47, 48, 50, 198, 211, 582, 605, 625, 881, 1273, 1527, 1717, 1737, 1771, 2046, 2077, 2078, 2084, 2098, 2103, 2115], "recogn": [23, 83, 2017, 2077, 2082, 2103], "simplecustombatch": 23, "transposed_data": 23, "zip": [23, 30, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 2012, 2015, 2016, 2028, 2046, 2056, 2082], "tgt": [23, 1571, 1572, 1573], "collate_wrapp": 23, "float32": [23, 24, 28, 52, 55, 269, 582, 619, 822, 823, 824, 825, 828, 829, 884, 987, 1123, 1124, 1162, 1220, 1223, 1255, 1427, 1440, 1447, 1580, 1581, 1582, 1600, 1783, 1784, 1785, 1797, 1821, 1826, 1838, 1853, 1868, 1869, 1871, 1913, 1975, 2036, 2042, 2046, 2060, 2065, 2067, 2074, 2082, 2083, 2084, 2085, 2088, 2089, 2101, 2102, 2104, 2113, 2118], "tensordataset": 23, "batch_ndx": 23, "is_pin": [23, 1758, 2015, 2034, 2068, 2084], "multiprocessing_context": 23, "pin_memory_devic": 23, "reshuffl": 23, "draw": [23, 35, 156, 288, 946, 1072, 1413, 1834, 2087, 2111], "mutual": [23, 28, 37, 55, 738, 2089], "subprocess": [23, 28, 31, 45, 47, 50, 2051, 2059], "incomplet": [23, 912, 2019, 2083], "divis": [23, 52, 585, 619, 690, 775, 776, 777, 970, 1104, 1154, 1157, 1284, 1317, 1454, 1455, 1456, 1457, 1458, 1459, 1461, 1481, 1493, 1518, 1536, 1608, 1609, 1610, 1611, 1612, 1613, 1615, 1670, 1847, 1916, 1944, 2017, 2042, 2046], "basecontext": 23, "randomsampl": 23, "prefetch": [23, 55], "unpickl": [23, 28, 1345, 2062, 2070], "practic": [23, 28, 33, 34, 35, 53, 55, 488, 2013, 2014, 2024, 2033, 2035, 2043, 2049, 2052, 2054, 2057, 2062, 2070, 2077, 2084, 2094, 2101, 2103, 2104], "proper": [23, 37, 63, 64, 1100, 1866, 2016, 2043, 2046, 2049, 2059, 2063, 2084], "guess": [23, 1188], "trust": [23, 28, 1345, 2012, 2070], "inaccur": [23, 24, 1644], "kwd": 23, "__getitems__": 23, "speedup": [23, 24, 1533, 1575, 2094, 2098, 2110], "myiterabledataset": 23, "worker_info": 23, "iter_start": 23, "iter_end": 23, "per_work": 23, "ceil": [23, 179, 636, 637, 771, 772, 1436, 1437, 1438, 1494, 1495, 1496, 1520, 1521, 1522, 1600, 1601, 1602, 1658, 1659, 1660, 1832, 1833, 1856, 2015, 2034, 2046, 2068, 2082, 2108], "mult": 23, "12": [23, 24, 28, 55, 323, 519, 619, 689, 744, 745, 746, 748, 749, 939, 966, 970, 1107, 1125, 1237, 1277, 1293, 1310, 1332, 1431, 1447, 1458, 1473, 1474, 1475, 1524, 1539, 1540, 1562, 1571, 1579, 1587, 1628, 1629, 1670, 1675, 1676, 1732, 1766, 1816, 1827, 1875, 1890, 1900, 1944, 1960, 1963, 1978, 2012, 2014, 2015, 2018, 2024, 2046, 2049, 2052, 2058, 2062, 2067, 2068, 2082, 2089, 2101, 2102, 2110, 2111], "overall_start": 23, "overall_end": 23, "stackdataset": 23, "assembl": [23, 2102], "imagedataset": 23, "textdataset": 23, "tuple_stack": 23, "dict_stack": 23, "concatdataset": 23, "chaindataset": 23, "chain": [23, 24, 35, 63, 64, 152, 897, 966, 1329, 1470, 1556, 1799, 1812, 2017, 2043, 2046, 2049, 2054, 2057, 2069, 2101, 2112], "fly": [23, 1051, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 2043], "whole": [23, 28, 30, 33, 1178, 1273, 1527, 1556, 1567, 1717, 1977, 2043, 2048, 2059, 2060, 2070, 2101, 2103, 2105, 2109, 2110], "_util": [23, 983, 2111], "collate_fn_map": 23, "registri": [23, 2013, 2062, 2065, 2067], "default_collate_fn_map": 23, "collate_tensor_fn": 23, "custom_col": 23, "collate_map": 23, "outer": [23, 28, 694, 912, 1109, 1168, 1172, 1177, 1193, 1218, 2015, 2017, 2068], "unchang": [23, 501, 515, 517, 738, 817, 1148, 1273, 1527, 1571, 1848, 1919, 2042, 2060, 2072], "byte": [23, 28, 30, 35, 47, 244, 437, 560, 884, 1016, 1057, 1059, 1061, 1063, 1161, 1163, 1272, 1345, 1382, 1384, 1778, 1787, 1859, 2016, 2017, 2018, 2034, 2052, 2062, 2070, 2082, 2084, 2115], "v_i": [23, 1313], "v_1": 23, "v_2": 23, "v1_i": 23, "v2_i": 23, "v1_1": 23, "v1_2": 23, "v2_1": 23, "v2_2": 23, "elem": [23, 2015], "customtyp": 23, "collate_customtype_fn": 23, "default_convert": 23, "np": [23, 964, 1104, 1109, 1149, 1150, 1151, 1674, 1821, 2021, 2050, 2061, 2067, 2087, 2088, 2089, 2104], "fraction": [23, 35, 1079, 1152, 1158, 1393, 1457, 1458, 1459, 1474, 1475, 1533, 1575, 1628, 1629, 1742, 1743, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1828, 2041, 2104], "workerinfo": [23, 2077], "random_split": 23, "floor": [23, 273, 650, 651, 771, 772, 1104, 1154, 1284, 1436, 1437, 1438, 1494, 1495, 1496, 1520, 1521, 1522, 1600, 1601, 1602, 1658, 1659, 1660, 1832, 1833, 1847, 1856, 1924, 2015, 2017, 2034, 2059, 2062, 2068, 2082, 2108], "frac": [23, 35, 281, 379, 652, 653, 690, 763, 775, 776, 777, 869, 945, 954, 994, 998, 1104, 1154, 1227, 1231, 1232, 1293, 1305, 1309, 1310, 1313, 1325, 1337, 1344, 1360, 1431, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1447, 1454, 1455, 1456, 1457, 1458, 1459, 1462, 1464, 1473, 1478, 1479, 1480, 1481, 1489, 1490, 1491, 1492, 1494, 1495, 1496, 1497, 1498, 1499, 1512, 1514, 1515, 1516, 1517, 1520, 1521, 1522, 1530, 1531, 1532, 1534, 1542, 1543, 1545, 1546, 1558, 1560, 1561, 1563, 1564, 1566, 1567, 1568, 1579, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1653, 1670, 1672, 1674, 1685, 1687, 1691, 1693, 1695, 1696, 1716, 1782, 1783, 1786, 1788, 1794, 1801, 1802, 1831, 1840, 1843, 1846, 1858, 1878, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1907, 1922, 1923, 1924, 1928, 1950, 1972, 1973, 2015, 2034, 2041, 2043, 2054, 2068, 2082, 2083], "remaind": [23, 492, 1157, 1227, 2015, 2068, 2102, 2108], "robin": [23, 28], "generator1": 23, "manual_se": [23, 60, 90, 1876, 2013, 2015, 2034, 2057, 2059, 2061, 2067, 2068, 2076], "42": [23, 827, 954, 1412, 1882, 2046, 2057, 2102], "generator2": 23, "30": [23, 28, 35, 47, 447, 619, 723, 731, 732, 759, 767, 1227, 1296, 1444, 1446, 1453, 1514, 1579, 1585, 1608, 1617, 1707, 1805, 1807, 1813, 1830, 1940, 2025, 2049, 2052, 2067, 2069, 2077, 2081, 2101, 2102], "data_sourc": 23, "accedingsequencelengthsampl": 23, "argsort": [23, 1940, 2015, 2050, 2068], "tolist": [23, 354, 2017, 2068, 2084], "accedingsequencelengthbatchsampl": 23, "sequentialsampl": 23, "num_sampl": [23, 423, 1413, 2015], "drawn": [23, 175, 260, 288, 1413, 1773, 1834, 1838, 1839, 2041, 2089, 2091], "subsetrandomsampl": 23, "weightedrandomsampl": 23, "probabl": [23, 34, 55, 156, 260, 763, 946, 978, 1194, 1235, 1351, 1413, 1431, 1435, 1439, 1446, 1462, 1464, 1465, 1466, 1467, 1471, 1478, 1480, 1497, 1533, 1534, 1543, 1605, 1616, 1617, 1618, 1619, 1620, 1621, 1626, 1635, 1645, 1669, 1685, 1827, 2033, 2049, 2063, 2067, 2083, 2087, 2100, 2103, 2104, 2105, 2109], "row": [23, 24, 28, 34, 35, 210, 315, 317, 323, 587, 590, 696, 702, 881, 912, 914, 964, 994, 998, 1092, 1122, 1150, 1151, 1172, 1178, 1236, 1295, 1321, 1329, 1332, 1337, 1361, 1363, 1371, 1373, 1374, 1376, 1379, 1413, 1419, 1420, 1421, 1624, 1625, 1674, 1731, 1771, 1825, 1828, 1848, 1908, 1909, 1910, 1911, 1912, 1914, 1915, 1927, 1947, 1950, 1954, 1956, 1971, 1977, 1979, 2015, 2054, 2082, 2087], "05": [23, 32, 64, 115, 346, 697, 715, 716, 717, 718, 719, 720, 726, 727, 739, 740, 752, 754, 755, 756, 757, 923, 924, 1123, 1262, 1289, 1290, 1360, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1567, 1571, 1573, 1575, 1603, 1634, 1643, 1647, 1717, 1800, 1806, 1807, 1813, 1884, 1885, 1890, 2014, 2015, 2069, 2078, 2089], "batchsampl": 23, "distributedsampl": [23, 1717], "num_replica": 23, "world_siz": [23, 24, 28, 29, 32, 37, 47, 48, 51, 55, 1717, 2048, 2077, 2078], "evenli": [23, 34, 585, 586, 587, 698, 699, 1107, 1237, 1344, 1360, 1978], "set_epoch": 23, "is_distribut": [23, 2015, 2068], "start_epoch": 23, "n_epoch": 23, "vanilla": [24, 53, 2024], "allreduc": [24, 28, 1717, 2046, 2048, 2068], "register_comm_hook": [24, 32, 55, 1717], "mainli": [24, 35, 830, 1446, 1617, 2092], "bucket": [24, 32, 1201, 1431, 1717, 2015, 2048, 2068, 2104, 2113], "gradbucket": [24, 1717], "decompos": [24, 52, 64, 1159, 1167, 1318, 2043, 2049, 2067, 2108], "get_per_parameter_tensor": 24, "wise": [24, 28, 34, 35, 690, 691, 701, 761, 773, 774, 783, 787, 888, 992, 1109, 1114, 1155, 1156, 1215, 1216, 1230, 1239, 1296, 1298, 1355, 1356, 1357, 1358, 1362, 1372, 1375, 1377, 1424, 1445, 1459, 1466, 1468, 1482, 1483, 1484, 1485, 1487, 1513, 1516, 1526, 1535, 1546, 1547, 1548, 1555, 1557, 1558, 1559, 1564, 1565, 1566, 1568, 1569, 1607, 1622, 1631, 1632, 1636, 1637, 1638, 1639, 1642, 1646, 1648, 1653, 1664, 1665, 1678, 1679, 1680, 1686, 1687, 1688, 1689, 1693, 1695, 1696, 1697, 1908, 1979, 2045, 2049, 2082, 2083, 2086, 2110], "_distributed_c10d": [24, 28], "1d": [24, 35, 55, 705, 708, 741, 744, 775, 785, 975, 994, 998, 1092, 1106, 1235, 1236, 1250, 1270, 1329, 1330, 1375, 1420, 1428, 1432, 1436, 1446, 1454, 1457, 1462, 1465, 1466, 1470, 1494, 1519, 1520, 1532, 1534, 1580, 1592, 1595, 1598, 1600, 1608, 1611, 1619, 1625, 1654, 1658, 1828, 1829, 1832, 1924, 1940, 1950, 1974, 2052], "is_last": 24, "set_buff": 24, "stateless": [24, 60, 2037, 2057], "ddp_comm_hook": [24, 32], "default_hook": 24, "allreduce_hook": 24, "process_group": [24, 29, 30, 32, 55, 1567, 1717], "aggreg": [24, 28, 30, 40, 55, 1470, 1625, 1717, 1748, 2030, 2104, 2113], "henc": [24, 30, 32, 35, 37, 46, 47, 51, 55, 63, 288, 946, 1163, 1523, 1524, 1525, 1580, 1724, 1725, 1913, 2036, 2043, 2046, 2048, 2052, 2077, 2079, 2098], "unaffect": [24, 501, 502, 1480], "ddp_model": [24, 28, 30, 1717, 2048], "fp16_compress_hook": 24, "compress": [24, 55, 210, 589, 590, 1131, 1132, 1141, 1142, 1717, 1910, 1911, 1912, 1914, 1915, 1949, 2013, 2110], "decompress": [24, 2012, 2028], "bf16_compress_hook": 24, "brain": [24, 2085, 2088], "wrapper": [24, 28, 35, 49, 55, 63, 64, 66, 627, 760, 793, 827, 1006, 1009, 1011, 1012, 1014, 1082, 1155, 1156, 1176, 1273, 1386, 1409, 1463, 1982, 1983, 2007, 2014, 2016, 2017, 2030, 2033, 2046, 2048, 2067, 2098], "fp16_compress_wrapp": 24, "powersgdst": 24, "matrix_approximation_rank": 24, "start_powersgd_it": 24, "powersgd_hook": 24, "bf16_compress_wrapp": 24, "wikipedia": [24, 1671, 2043, 2054, 2118], "bfloat16_float": 24, "point_format": 24, "vogel": 24, "et": [24, 35, 55, 1446, 1539, 1540, 1576, 1577, 1797, 1891, 1929, 2041], "al": [24, 35, 55, 1446, 1539, 1540, 1576, 1577, 1797, 1891, 1929, 2041], "neurip": [24, 35], "2019": [24, 35, 1065], "bandwidth": [24, 28, 33, 46, 48, 2046, 2072, 2077, 2106, 2107], "hyperparamet": [24, 55, 64, 2087], "1000": [24, 1124, 1129, 1155, 1156, 1431, 1469, 1577, 1829, 1856, 1875, 1943, 2043, 2062, 2067, 2087], "min_compression_r": 24, "use_error_feedback": 24, "warm_start": 24, "orthogonalization_epsilon": 24, "random_se": 24, "compression_stats_logging_frequ": 24, "batch_tensors_with_same_shap": 24, "tune": [24, 28, 32, 1065, 1310, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 2043, 2046, 2058, 2072, 2113], "stronger": [24, 52, 53, 66], "exponenti": [24, 260, 1119, 1325, 1351, 1352, 1353, 1361, 1445, 1468, 1622, 2013, 2017, 2068, 2069, 2083, 2091], "grid": [24, 1129, 1375, 1598, 1633, 2015, 2046, 2087, 2108], "satisfactori": 24, "nlp": [24, 1489, 1490, 1491, 1499, 2104], "appendix": [24, 2013], "hybrid": [24, 55, 220, 545, 585, 586, 587, 589, 590, 1238], "scheme": [24, 45, 47, 480, 805, 822, 823, 824, 825, 828, 830, 2057], "sensit": [24, 1487, 1559, 2067, 2070, 2078, 2103], "suboptim": [24, 2106], "trajectori": 24, "irrecover": 24, "warm": [24, 32, 1054, 1801, 1802, 2046, 2071, 2098, 2104, 2111], "num_row": 24, "num_col": 24, "1e": [24, 30, 35, 64, 115, 346, 697, 715, 716, 717, 718, 719, 720, 726, 727, 739, 740, 752, 754, 755, 756, 757, 758, 923, 924, 967, 968, 969, 1262, 1289, 1290, 1340, 1441, 1442, 1443, 1461, 1480, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1513, 1536, 1541, 1567, 1571, 1573, 1575, 1576, 1603, 1615, 1630, 1634, 1635, 1643, 1647, 1670, 1673, 1677, 1701, 1732, 1766, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1811, 1819, 1872, 2014, 2015, 2049, 2057, 2067, 2069, 2083, 2089], "orthogon": [24, 1310, 1313, 1332, 1337, 1346, 1815, 1827, 2041, 2043, 2057, 2082], "div": [24, 238, 1105, 1154, 1157, 1431, 1539, 1540, 1847, 1957, 2015, 2021, 2034, 2068, 2082, 2085, 2108, 2112], "epsilon": [24, 64, 822, 823, 824, 825, 828, 869, 1328, 1331, 1441, 1442, 1443, 1461, 1481, 1489, 1490, 1491, 1499, 1536, 1542, 1567, 1615, 1670, 1716, 1727, 1729, 1732, 1766, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1831, 2015, 2083], "bucket_cap_mb": [24, 1717, 2048], "footprint": [24, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2065, 2082, 2110], "bottleneck": [24, 2013, 2057, 2106], "memor": 24, "compens": 24, "apex": 24, "uncompress": [24, 2062, 2082], "pq": 24, "mq": [24, 2072, 2093], "tp": [24, 28, 33, 34, 55], "awai": [24, 64, 979, 1633, 2013, 2035, 2043, 2050, 2100], "comm": [24, 28, 47, 683, 2048], "handler": [24, 28, 38, 40, 41, 49, 1744, 2030, 2049, 2056, 2070, 2114], "batched_powersgd_hook": 24, "destroi": [24, 28, 47, 1188, 1463, 2043, 2071, 2077], "squar": [24, 35, 553, 742, 743, 744, 745, 782, 788, 797, 998, 1096, 1098, 1270, 1305, 1307, 1309, 1310, 1311, 1314, 1315, 1319, 1321, 1323, 1325, 1327, 1331, 1333, 1334, 1336, 1337, 1341, 1354, 1363, 1429, 1433, 1437, 1438, 1455, 1456, 1458, 1459, 1474, 1475, 1487, 1495, 1496, 1518, 1521, 1522, 1525, 1542, 1559, 1571, 1588, 1600, 1609, 1612, 1628, 1629, 1633, 1642, 1644, 1665, 1682, 1685, 1689, 1704, 1716, 1731, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1858, 1917, 1952, 1971, 2015, 2068, 2082], "truncat": [24, 1154, 1871, 1958, 2036, 2041, 2051, 2060], "impli": [24, 47, 55, 260, 1192, 2033, 2043, 2052, 2067, 2075, 2077, 2103], "debugging_hook": 24, "noop_hook": 24, "headroom": 24, "desynchron": [24, 28], "trainer": [24, 28, 30, 37, 40, 45, 47, 48, 50, 1717, 2077], "restart": [24, 37, 46, 48, 51, 1801, 1802, 2033, 2087, 2102], "__setstate__": 24, "__getstate__": 24, "reload": [24, 32, 55, 2012], "sy": [24, 39, 48, 51, 2012, 2046, 2062, 2070], "tempfil": [24, 2062], "mp": [24, 28, 29, 50, 1386, 1717, 2013, 2021, 2048, 2049, 2059, 2072, 2078, 2084, 2085, 2093], "simplemodel": 24, "24": [24, 28, 35, 949, 1332, 1515, 1598, 1765, 1827, 1871, 1945, 2014, 2041, 2045, 2083, 2101, 2102], "fc2": [24, 1707, 2065, 2095], "master_addr": [24, 28, 37, 47, 48, 51, 2048, 2077, 2078], "localhost": [24, 28, 47, 48, 2048, 2077, 2078], "master_port": [24, 28, 37, 47, 48, 51, 2048, 2077, 2078], "12355": 24, "init_process_group": [24, 28, 29, 32, 37, 48, 51, 1717, 2046, 2048, 2077], "cleanup": [24, 1188, 2084], "destroy_process_group": [24, 28], "run_demo": 24, "demo_fn": 24, "nproc": [24, 28, 45, 46, 48, 2033, 2048, 2078], "demo_seri": 24, "gettempdir": 24, "device_id": [24, 28, 29, 32, 48, 55, 1345, 1463, 1567, 1700, 1717, 2048], "powersgd_st": 24, "lr": [24, 29, 30, 32, 55, 490, 1707, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2046, 2048, 2053, 2057, 2059, 2069, 2078, 2087], "001": [24, 923, 924, 1283, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1810, 2048, 2053, 2067], "state_dict": [24, 30, 32, 52, 53, 55, 62, 839, 840, 1273, 1527, 1707, 1751, 1769, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1790, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 2012, 2028, 2029, 2048, 2053, 2057, 2059, 2062, 2065, 2067, 2072, 2092], "comm_hook": 24, "comm_hook_st": 24, "barrier": [24, 37, 47, 2068], "map_loc": [24, 1277, 1281, 1345, 1717, 2012, 2025, 2028, 2070], "new_ddp_model": 24, "load_state_dict": [24, 30, 32, 55, 62, 417, 1273, 1345, 1527, 1707, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813, 2012, 2013, 2057, 2062, 2072], "n_gpu": 24, "device_count": [24, 1968, 2013], "got": [24, 28, 895, 909, 910, 932, 2049, 2089], "thank": [24, 35, 2049], "author": [24, 47, 48, 55, 1178, 1794, 1834, 1977, 2042, 2095, 2102, 2104, 2106, 2109, 2110, 2111, 2113], "thij": 24, "par": 24, "torch_show_cpp_stacktrac": [25, 28], "torch_cpp_log_level": [25, 28], "c10": [25, 47, 2056, 2095, 2100], "glog": 25, "logger": [25, 28, 37, 2092, 2093], "info": [25, 28, 33, 41, 45, 47, 55, 683, 896, 909, 910, 1011, 1012, 1014, 1188, 1304, 1315, 1317, 1318, 1322, 1335, 1363, 1389, 1390, 1778, 2013, 2015, 2021, 2023, 2048, 2049, 2050, 2067, 2070, 2100, 2102, 2103, 2113, 2117], "fatal": [25, 52, 2033, 2059], "torch_log": [25, 81, 83, 683, 976, 2023, 2048, 2100, 2101, 2102, 2104, 2113], "_log": [25, 2013, 2017, 2113, 2116], "home": [26, 2046], "fill_uninitialized_memori": [27, 501, 1110, 1111, 1112, 1965, 2013, 2061], "fill": [27, 28, 33, 156, 175, 260, 261, 262, 288, 319, 323, 379, 402, 447, 448, 449, 451, 456, 483, 515, 610, 626, 947, 1097, 1110, 1111, 1112, 1164, 1165, 1304, 1315, 1317, 1470, 1571, 1625, 1672, 1776, 1777, 1836, 1837, 1838, 1839, 1840, 1841, 1928, 1965, 2010, 2011, 2015, 2025, 2035, 2036, 2041, 2046, 2049, 2062, 2068, 2082, 2089, 2103, 2108], "detriment": [27, 2061], "resize_": [27, 502, 1167, 1295, 1965, 2014, 2015, 2016, 2034, 2061, 2074, 2084, 2108], "empty_strid": [27, 2015, 2019, 2068, 2100, 2108], "empty_permut": [27, 2015, 2068, 2108], "empty_lik": [27, 30, 2015, 2019, 2021, 2025, 2034, 2036, 2068, 2082], "brief": [28, 33, 1717, 2033, 2077], "introduct": [28, 35, 1043, 1717, 2013, 2014, 2018, 2034, 2044, 2057, 2067, 2077, 2082, 2087], "mpi": [28, 1717], "gloo": [28, 48, 1717, 2048, 2055, 2077], "recv": [28, 33, 1717, 2068, 2078], "broadcast": [28, 30, 32, 34, 35, 55, 60, 99, 198, 400, 402, 403, 404, 515, 517, 519, 568, 688, 689, 690, 691, 692, 693, 694, 700, 738, 888, 944, 949, 952, 956, 957, 958, 959, 993, 1022, 1051, 1103, 1104, 1109, 1114, 1154, 1155, 1156, 1157, 1214, 1216, 1230, 1240, 1298, 1299, 1306, 1319, 1323, 1328, 1329, 1331, 1334, 1342, 1362, 1367, 1368, 1378, 1412, 1415, 1424, 1427, 1440, 1461, 1480, 1533, 1606, 1615, 1678, 1685, 1717, 1780, 1816, 1824, 1847, 1849, 1925, 1929, 1945, 1950, 1980, 2013, 2017, 2034, 2035, 2036, 2048, 2067, 2068, 2083, 2104, 2108], "all_reduc": [28, 50, 1717, 2068], "all_gath": 28, "scatter": [28, 32, 55, 515, 517, 519, 1463, 1965, 2015, 2051, 2052, 2068, 2077, 2108], "reduce_scatt": [28, 2068], "all_to_al": 28, "v1": [28, 55, 1342, 1816, 2012, 2048, 2077], "init_method": [28, 1717, 2077], "adher": [28, 978, 2017, 2082], "some_fil": 28, "machine_nam": 28, "share_folder_nam": 28, "tcpstore": [28, 47], "past": [28, 52, 64, 1020, 1067, 1072, 1086, 1087, 1717, 2051, 2106, 2107, 2110], "ask": [28, 59, 60, 2012, 2013, 2050, 2054, 2094, 2101, 2115], "infiniband": [28, 1717, 2077], "interconnect": [28, 33], "gpudirect": 28, "ethernet": 28, "ip": [28, 47], "ib": 28, "upcom": [28, 2042], "nccl_socket_ifnam": 28, "eth0": 28, "gloo_socket_ifnam": 28, "eth1": 28, "eth2": 28, "eth3": 28, "imper": 28, "nccl_debug": 28, "nccl_debug_subsi": 28, "coll": 28, "hang": [28, 29, 32, 35, 1717, 2026, 2048, 2117], "topologi": [28, 30, 34, 37], "effort": [28, 52, 2077, 2101, 2107], "socket": [28, 38, 2033, 2077], "nccl_socket_nthread": 28, "nccl_nsocks_perthread": 28, "cloud": [28, 2082, 2087], "aw": [28, 39, 998], "gcp": [28, 2110], "primit": [28, 32, 33, 47, 1345, 2014, 2016, 2018, 2045, 2058, 2067, 2077, 2108], "kind": [28, 41, 52, 64, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 683, 1184, 1293, 1718, 1779, 1826, 1890, 2012, 2021, 2049, 2059, 2061, 2065, 2067, 2070, 2072, 2083, 2085], "connect": [28, 37, 47, 1454, 1455, 1456, 1457, 1458, 1459, 1503, 1504, 1505, 1506, 1507, 1508, 1556, 2033, 2077, 2111], "advantag": [28, 47, 48, 1440, 1487, 2048, 2051, 2064, 2065, 2077, 2082, 2098], "redund": [28, 55, 1125, 1126, 1128, 1130, 1144, 1146, 1924], "elimin": [28, 52, 64, 612, 1181, 1961, 1962, 2061, 2106, 2112], "thrash": 28, "recurr": [28, 763, 764, 1289, 1446, 1463, 1478, 1479, 1497, 1543, 1735, 1761, 1795, 2013, 2046], "device_mesh": [28, 34, 55, 1717], "init_device_mesh": [28, 34], "use_distribut": 28, "group_nam": [28, 44], "pg_option": 28, "url": [28, 47, 935, 2012, 2028, 2077], "encod": [28, 37, 44, 47, 64, 1321, 1345, 1389, 1390, 1439, 1440, 1571, 1572, 1573, 1574, 1575, 1717, 1871, 1910, 1911, 1912, 1914, 1915, 2014, 2017, 2018, 2049, 2062, 2070, 2078, 2082], "ucc": 28, "lowercas": 28, "deadlock": [28, 1717], "job": [28, 37, 40, 41, 43, 44, 46, 47, 48, 50, 1411, 1717, 1803, 1809, 2046, 2056, 2071, 2087, 2101, 2115, 2117], "exchang": [28, 47, 1012, 1097, 2046, 2064], "timedelta": [28, 47, 2030], "abort": [28, 2046, 2117], "crash": [28, 40, 47, 1310, 2033, 2043, 2077, 2079, 2087, 2100], "corrupt": [28, 47, 1463, 2046, 2059], "torch_nccl_blocking_wait": [28, 2117], "processgroupopt": 28, "processgroupnccl": [28, 2013, 2048, 2116], "is_high_priority_stream": 28, "ncclcomminit": 28, "lazi": [28, 991, 1286, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1707, 1996, 2013, 2100], "ncclcommsplit": 28, "unnecessari": [28, 32, 1724, 1725, 1765, 2017, 2043, 2046, 2049, 2062, 2070, 2086], "backend_nam": [28, 1968], "custom_backend": 28, "mesh_shap": 28, "mesh_dim_nam": 28, "dimension": [28, 35, 53, 515, 517, 884, 890, 891, 892, 955, 962, 974, 1097, 1099, 1100, 1109, 1125, 1126, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1143, 1144, 1146, 1148, 1150, 1151, 1163, 1227, 1236, 1237, 1238, 1306, 1311, 1312, 1313, 1314, 1339, 1344, 1360, 1368, 1375, 1431, 1448, 1449, 1450, 1451, 1452, 1453, 1462, 1463, 1469, 1470, 1499, 1517, 1534, 1542, 1549, 1550, 1551, 1552, 1553, 1554, 1561, 1563, 1567, 1583, 1584, 1585, 1616, 1669, 1672, 1705, 1706, 1716, 1731, 1771, 1834, 1910, 1911, 1912, 1913, 1914, 1915, 1924, 1940, 1943, 1944, 1950, 2017, 2036, 2041, 2044, 2054, 2060, 2082, 2084, 2085, 2088, 2091, 2100], "layout": [28, 34, 53, 64, 152, 193, 210, 235, 344, 345, 437, 447, 448, 449, 450, 451, 460, 546, 583, 584, 585, 586, 587, 589, 590, 619, 692, 869, 897, 902, 945, 954, 1024, 1110, 1111, 1112, 1122, 1127, 1145, 1161, 1164, 1165, 1186, 1231, 1232, 1293, 1344, 1360, 1368, 1378, 1423, 1650, 1776, 1777, 1798, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1949, 1954, 1956, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2019, 2036, 2046, 2068, 2070, 2082, 2087, 2088, 2089, 2108], "spmd": [28, 30], "nd": [28, 1329, 2052, 2060], "inconsist": [28, 55, 869, 1843, 2049], "scene": [28, 2062, 2087], "mesh": [28, 34, 2087], "mesh_1d": 28, "mesh_2d": 28, "dp": 28, "is_initi": [28, 2013], "is_mpi_avail": 28, "is_nccl_avail": 28, "is_gloo_avail": 28, "is_torchelastic_launch": 28, "elast": [28, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 49, 50, 51, 2013], "torchelast": [28, 31, 37, 39, 40, 41, 44, 46, 47, 48, 50, 51], "torchelastic_run_id": [28, 48], "proxi": [28, 30, 978, 2072, 2077, 2101], "rendezv": [28, 31, 37, 44, 46, 51, 2048, 2077], "null": [28, 39, 41, 44, 66, 70], "discoveri": [28, 47, 2046, 2070], "reachabl": 28, "multicast": 28, "20": [28, 32, 35, 323, 688, 723, 731, 732, 737, 741, 742, 743, 744, 745, 746, 749, 759, 763, 764, 765, 766, 767, 768, 775, 1091, 1227, 1271, 1274, 1285, 1296, 1330, 1412, 1435, 1437, 1438, 1440, 1441, 1442, 1443, 1444, 1446, 1453, 1454, 1455, 1456, 1458, 1459, 1464, 1465, 1466, 1467, 1471, 1474, 1475, 1478, 1479, 1481, 1488, 1489, 1490, 1491, 1494, 1495, 1496, 1497, 1498, 1499, 1514, 1520, 1521, 1522, 1524, 1525, 1527, 1543, 1545, 1556, 1564, 1567, 1570, 1571, 1572, 1573, 1585, 1608, 1610, 1611, 1613, 1617, 1628, 1629, 1635, 1693, 1731, 1732, 1733, 1757, 1766, 1769, 1787, 1802, 1830, 1940, 2014, 2015, 2025, 2036, 2048, 2049, 2060, 2065, 2068, 2069, 2077, 2081, 2082, 2101, 2102, 2103], "23456": 28, "clean": [28, 37, 52, 64, 986, 1047, 2012, 2033, 2070], "fcntl": 28, "nf": 28, "init": [28, 34, 37, 40, 55, 1273, 1527, 1535, 1555, 1765, 2013, 2019, 2043, 2049, 2053, 2056, 2057, 2100], "brand": [28, 30, 2049], "succe": [28, 47, 52, 60, 2046, 2049, 2063, 2113, 2115], "unexpect": [28, 30, 36, 64, 1008, 1160, 1163, 1166, 1273, 1527, 1767, 1780, 1919, 2014, 2043, 2049, 2060, 2101, 2103, 2104], "unsuccess": 28, "mnt": 28, "sharedfil": 28, "port": [28, 37, 46, 47, 48, 2025, 2055], "enum": [28, 45, 66, 799, 1586, 1587, 1588, 2049, 2067, 2071, 2072, 2077], "backend_str": 28, "uppercas": 28, "classmethod": [28, 30, 47, 66, 68, 735, 741, 742, 743, 748, 749, 759, 767, 795, 796, 797, 818, 819, 820, 827, 858, 932, 1011, 1469, 1470, 1567, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1778, 2017, 2018, 2049, 2072, 2077, 2084, 2114], "register_backend": [28, 2077, 2099], "extended_api": 28, "instanti": [28, 32, 46, 47, 48, 55, 64, 845, 1470, 1625, 1734, 1758, 1765, 2012, 2014, 2016, 2017, 2046, 2049, 2057, 2062, 2065, 2084], "3rd": [28, 35, 47, 619, 2044], "processgroup": [28, 30, 32, 33, 55, 1717], "four": [28, 1319, 1455, 2049, 2054, 2077, 2079], "c10d": [28, 37, 46, 48, 51, 683, 1717, 2048, 2068], "distributedbackendopt": 28, "get_backend": [28, 47], "get_rank": [28, 55, 1567], "consecut": [28, 55, 612, 1760, 1799, 1950, 1961, 1962, 2035, 2067], "get_world_s": 28, "pattern": [28, 30, 33, 795, 796, 797, 799, 858, 865, 1051, 1201, 1463, 1587, 1761, 1906, 2014, 2017, 2043, 2046, 2049, 2051, 2055, 2062, 2073, 2074, 2082, 2098, 2100, 2101, 2103, 2112, 2115], "launcher": [28, 33, 48], "pg": [28, 1717], "destructor": [28, 2033, 2079], "ncclcommabort": 28, "gc": [28, 488, 2079], "fault": [28, 31, 37, 46, 47, 1163], "toler": [28, 31, 37, 46, 47, 64, 697, 923, 924, 1262, 1289, 1290, 1328, 1331, 1346, 1780, 1787, 2014, 2089], "_after_": [28, 2048], "unsupport": [28, 37, 52, 60, 66, 80, 83, 84, 86, 89, 2014, 2018, 2035, 2062, 2065, 2077, 2082, 2104, 2109], "untest": [28, 2035], "filestor": [28, 47], "hashstor": 28, "client": [28, 32, 47, 1065, 1187, 1192, 2070, 2104], "host_nam": 28, "hostnam": [28, 37, 47, 2071], "listen": 28, "is_mast": 28, "300": [28, 37, 1351, 1440, 1762, 1764, 2069], "wait_for_work": 28, "multi_ten": 28, "underli": [28, 30, 32, 35, 37, 52, 55, 60, 64, 99, 313, 328, 379, 402, 404, 475, 476, 477, 478, 485, 501, 522, 526, 559, 560, 561, 615, 751, 760, 882, 1011, 1161, 1188, 1204, 1205, 1206, 1244, 1248, 1273, 1345, 1422, 1677, 1845, 1949, 1964, 1982, 2014, 2017, 2036, 2046, 2051, 2055, 2077, 2084, 2086, 2094, 2103], "tcpserver": 28, "master_listen_fd": 28, "use_libuv": 28, "libuv": 28, "datetim": [28, 2030, 2104], "server_stor": 28, "127": [28, 797, 2035, 2072, 2087], "1234": [28, 47, 1250, 1963], "client_stor": 28, "first_kei": 28, "first_valu": 28, "hashmap": 28, "file_nam": [28, 2012, 2028, 2070], "store1": 28, "store2": 28, "prefixstor": 28, "old": [28, 36, 52, 64, 488, 858, 1160, 1196, 1210, 1466, 1769, 1777, 1811, 1859, 2011, 2013, 2014, 2043, 2046, 2049, 2063, 2072, 2087, 2103], "throw": [28, 29, 63, 64, 192, 325, 330, 547, 616, 1258, 1273, 1314, 1363, 1527, 1717, 1719, 1720, 1761, 1772, 1823, 1965, 2036, 2043, 2061, 2077, 2088, 2101, 2104, 2113, 2117], "whose": [28, 35, 55, 64, 546, 863, 902, 966, 993, 1097, 1109, 1114, 1163, 1216, 1227, 1230, 1236, 1273, 1279, 1290, 1298, 1344, 1360, 1362, 1424, 1527, 1717, 1773, 1821, 1878, 1946, 2017, 2043, 2049, 2052, 2054, 2070, 2082, 2087, 2088, 2089, 2100, 2113], "quantiti": [28, 56, 61, 1196, 1198, 1201, 1309, 1310, 1337, 1492, 1742, 1743, 1745, 1746, 1748, 1751, 1752, 1753, 1754, 1811, 2021, 2029, 2050, 2054], "compare_set": 28, "arg2": [28, 53], "expected_valu": 28, "desired_valu": 28, "second_valu": 28, "overload": [28, 52, 64, 619, 2016, 2017, 2021, 2065], "bad_kei": 28, "num_kei": 28, "written": [28, 30, 37, 40, 45, 60, 935, 1067, 1161, 1463, 1797, 2014, 2016, 2024, 2027, 2043, 2048, 2049, 2050, 2057, 2062, 2065, 2067, 2070, 2071, 2084, 2087, 2094, 2100, 2101, 2102, 2106, 2107, 2112, 2113], "delete_kei": 28, "set_timeout": 28, "grain": [28, 52, 862, 865, 1685, 2043, 2067, 2082, 2094, 2106, 2113], "plai": [28, 2102, 2104], "new_group": [28, 55, 1567], "opaqu": [28, 36, 978, 1009, 1043, 1044, 1160, 2109], "use_local_synchron": 28, "group_desc": 28, "enqueu": [28, 63, 1012, 1014, 1015, 1400, 1984, 2046, 2078], "groupmemb": 28, "non_group_memb": 28, "significantli": [28, 52, 55, 913, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1871, 2082, 2099], "taken": [28, 33, 35, 52, 63, 64, 66, 71, 869, 1130, 1132, 1155, 1156, 1446, 1462, 1470, 1534, 1542, 1617, 1625, 1716, 1875, 1908, 2041, 2043, 2045, 2046, 2049, 2051, 2052, 2056, 2062, 2067, 2070, 2071, 2113, 2115], "get_group_rank": 28, "global_rank": [28, 37], "translat": [28, 1227, 2043, 2064, 2065, 2079], "get_global_rank": 28, "group_rank": [28, 37, 48], "get_process_group_rank": 28, "inter": [28, 37, 40, 55, 1224, 1873, 2045, 2048, 2077], "intra": [28, 32, 55, 2045, 2048], "_init_backend": 28, "ndarrai": [28, 36, 460, 883, 1160, 1162, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 2067, 2087, 2089, 2104], "dst": [28, 2012, 2070], "destin": [28, 30, 41, 44, 45, 55, 211, 418, 419, 605, 625, 1023, 1024, 1214, 1273, 1380, 1381, 1527, 2015, 2065, 2077, 2078, 2084], "unspecifi": [28, 483, 823, 824, 825, 828, 923, 924, 1188, 1561, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1907, 1913, 2024, 2082, 2087, 2095], "sender": [28, 2079], "isend": 28, "irecv": 28, "is_complet": 28, "finish": [28, 30, 37, 44, 45, 47, 50, 63, 488, 1167, 2046, 2048, 2053, 2063, 2071, 2077, 2079, 2115], "send_object_list": 28, "object_list": 28, "picklabl": [28, 1273, 1527, 2070], "sent": [28, 30, 1047, 2017, 2033, 2059, 2077, 2078, 2079], "current_devic": [28, 30, 55, 1020, 1028, 1029, 1036, 1037, 1053, 1057, 1059, 1060, 1061, 1063, 1065, 1066, 1067, 1072, 1073, 1074, 1075, 1085, 1086, 1087, 1402, 1403, 1968, 1986, 1991, 1992, 2009, 2013, 2085, 2102], "set_devic": [28, 30, 55, 1717, 1867, 2013, 2077, 2085], "insecur": [28, 1345], "malici": [28, 1345, 2070], "ineffici": [28, 55, 2046, 2072], "recv_object_list": 28, "batch_isend_irecv": 28, "p2p_op_list": 28, "p2pop": 28, "op_list": 28, "send_tensor": 28, "recv_tensor": 28, "send_op": 28, "recv_op": 28, "req": 28, "p2p": [28, 47], "async_op": 28, "onto": [28, 55, 64, 66, 68, 1071, 1281, 1284, 1345, 1737, 1966, 2012, 2021, 2025, 2033, 2046, 2050, 2051, 2057, 2071, 2112, 2115], "get_futur": [28, 1717], "regard": [28, 1457, 1458, 1459, 1469, 1470, 1624, 1625, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 2014, 2079, 2082, 2107], "101": [28, 932, 1431], "overwrot": 28, "broadcast_object_list": 28, "redoptyp": 28, "bitwis": [28, 948, 950, 951, 953, 2018, 2060, 2073, 2091], "reduceop": 28, "int64": [28, 86, 193, 210, 315, 323, 395, 869, 947, 960, 1110, 1123, 1124, 1162, 1616, 1758, 1838, 1842, 1843, 1863, 1910, 1911, 1912, 1914, 1915, 2021, 2046, 2082, 2084, 2085, 2088, 2118], "1j": [28, 701, 991, 992, 1269, 1851, 1852, 1974, 2043, 2054, 2062], "2j": [28, 701, 991, 992, 1851, 1852, 1878, 1974, 2043], "tensor_list": [28, 2036], "all_gather_into_tensor": [28, 2068], "output_tensor": 28, "input_tensor": [28, 2106], "accommod": [28, 1523, 1524, 1525], "ii": [28, 994, 1109, 1497, 1498], "tensor_in": 28, "tensor_out": 28, "tensor_out2": 28, "all_gather_object": 28, "obj": [28, 64, 884, 1032, 1259, 1260, 1279, 1280, 1285, 1859, 1969, 1989, 2062, 2063, 2070], "pickabl": 28, "popul": [28, 35, 45, 55, 63, 64, 337, 497, 505, 506, 1186, 1197, 1374, 1419, 2046, 2049, 2103, 2112], "unmodifi": [28, 33, 2102], "responsibl": 28, "gather_object": 28, "gather_list": 28, "object_gather_list": 28, "scatter_list": 28, "tensor_s": 28, "t_one": 28, "t_five": 28, "scatter_object_list": 28, "scatter_object_output_list": 28, "scatter_object_input_list": 28, "output_list": 28, "input_list": 28, "reduce_scatter_tensor": [28, 2068], "all_to_all_singl": [28, 2068], "output_split_s": 28, "input_split_s": 28, "13": [28, 879, 939, 970, 1091, 1107, 1110, 1154, 1227, 1237, 1277, 1363, 1447, 1474, 1475, 1524, 1562, 1628, 1629, 1900, 1944, 1978, 2014, 2018, 2068, 2082, 2107, 2113], "14": [28, 66, 74, 75, 323, 519, 969, 1051, 1052, 1092, 1107, 1237, 1326, 1332, 1417, 1447, 1524, 1827, 1834, 1871, 1900, 1944, 1945, 1950, 1978, 2014, 2018, 2021, 2052, 2068, 2082, 2086, 2087, 2101, 2102], "15": [28, 64, 619, 1091, 1107, 1215, 1227, 1237, 1296, 1310, 1447, 1524, 1525, 1762, 1764, 1819, 1891, 1900, 1948, 1978, 2014, 2015, 2018, 2067, 2082, 2102], "uneven": [28, 29, 32, 34, 1717], "18": [28, 323, 447, 688, 1110, 1227, 1270, 1346, 1524, 2014, 2021, 2065, 2067, 2068, 2082, 2095, 2101, 2102], "21": [28, 35, 688, 939, 966, 1092, 1332, 1827, 1950, 2014, 2082], "22": [28, 35, 323, 619, 1346, 1707, 1762, 1764, 2014, 2061, 2082, 2101, 2102], "23": [28, 515, 1091, 1346, 1871, 1886, 2014, 2082, 2113], "31": [28, 952, 1438, 1496, 1522, 1707, 1886, 2067], "33": [28, 741, 742, 743, 744, 745, 746, 775, 1091, 1273, 1454, 1455, 1456, 1458, 1459, 1525, 1527, 1608, 1610, 1611, 1613, 2014, 2067], "34": [28, 1963, 2102, 2113], "35": [28, 1332, 1442, 1443, 1490, 1491, 1567, 1827], "36": [28, 323, 1152, 1227, 2102], "input_split": 28, "output_split": 28, "5j": 28, "6j": 28, "7j": 28, "8j": 28, "9j": 28, "10j": 28, "11j": 28, "12j": 28, "13j": 28, "14j": 28, "15j": 28, "16j": 28, "output_tensor_list": 28, "input_tensor_list": 28, "monitored_barri": [28, 2068], "wait_all_rank": 28, "pend": [28, 981, 2046, 2077, 2087, 2098], "band": 28, "bor": 28, "bxor": 28, "premul_sum": 28, "suppos": [28, 64, 1210, 1270, 1812, 2024, 2052, 2082, 2103, 2104, 2105], "_make_nccl_premul_sum": 28, "__members__": 28, "reduce_op": 28, "mention": [28, 1182, 2012, 2016, 2017, 2035, 2043, 2046, 2057, 2067, 2082, 2086, 2094, 2107, 2113], "stand": [28, 33, 1173, 1177, 2016, 2070, 2079], "exemplifi": 28, "cpp_extens": [28, 2013], "cpp_c10d_extens": 28, "torchrun": [28, 31, 33, 39, 46, 51], "benefici": [28, 90, 1724, 1725, 2057], "nproc_per_nod": [28, 33, 39], "num_gpus_you_hav": 28, "your_training_script": [28, 46, 48], "arg3": [28, 53], "192": [28, 619, 2067], "168": 28, "nnode": [28, 46, 48], "local_process_rank": 28, "local_rank": [28, 37, 40, 45, 48, 51, 1567], "argpars": [28, 48, 2046], "parser": [28, 48, 2046], "argumentpars": [28, 48, 2046], "add_argu": [28, 48, 2046], "parse_arg": [28, 39, 48, 51, 2046, 2067], "onward": [28, 48, 1811, 2083], "dash": [28, 48, 2077, 2079], "previous": [28, 47, 48, 52, 56, 59, 60, 61, 64, 701, 795, 796, 932, 1178, 1201, 1210, 1281, 1922, 1923, 1972, 1973, 1977, 2014, 2044, 2046, 2057, 2070, 2076, 2077, 2083, 2095, 2101, 2109, 2115], "underscor": [28, 48, 2012, 2035, 2077, 2088], "unrecogn": [28, 48, 2017], "output_devic": [28, 32, 48, 1463, 1567, 1700, 1717], "adjust": [28, 30, 32, 34, 35, 55, 1440, 1811, 1883, 2013, 2045, 2073, 2095, 2115, 2116], "filesystem": [28, 30, 2012, 2070], "12042": 28, "wrong": [28, 64, 86, 904, 905, 909, 981, 1271, 1274, 2048, 2059, 2063, 2067, 2069, 2098, 2101, 2102], "imagenet": [28, 2041], "suit": [28, 2014, 2016, 2017, 2067, 2073, 2077, 2110], "pdb": [28, 1278, 1285, 1291, 2014, 2016], "streamlin": [28, 53, 83], "attach": [28, 33, 53, 58, 63, 64, 141, 715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 736, 796, 841, 843, 868, 1707, 2012, 2014, 2016, 2034, 2043, 2049, 2072, 2078, 2092, 2100], "rerout": 28, "sync": [28, 32, 55, 63, 64, 488, 1717, 2046, 2048, 2053, 2098, 2103], "group_gloo": 28, "29501": 28, "monitoredbarri": 28, "transport": [28, 2077], "598": 28, "2401": 28, "db00": 28, "eef0": 28, "1100": 28, "3560": 28, "1c05": 28, "25d": 28, "8594": 28, "twolinlayernet": 28, "ddp": [28, 29, 30, 32, 33, 55, 683, 1567, 1717, 2013, 2046, 2048, 2077, 2104], "i0607": 28, "739390": 28, "515217": 28, "173": 28, "broadcast_buff": [28, 1717], "bucket_cap_byt": 28, "26214400": 28, "find_unused_paramet": [28, 1717, 2048], "gradient_as_bucket_view": [28, 1717], "is_multi_device_modul": 28, "num_parameter_tensor": 28, "total_parameter_size_byt": 28, "440": 28, "bucket_s": 28, "module_nam": [28, 64, 820, 858, 2070], "nccl_async_error_handl": [28, 2046], "nccl_blocking_wait": 28, "nccl_ib_timeout": 28, "nccl_nthread": 28, "58": [28, 2107], "085681": 28, "544067": 28, "344": 28, "unused_parameter_s": 28, "40838608": 28, "5983335": 28, "4326421": 28, "comp": [28, 35], "4207652": 28, "085693": 28, "544066": 28, "42850427": 28, "3885553": 28, "2357981": 28, "2234674": 28, "enhanc": [28, 55, 2100], "unus": [28, 32, 47, 64, 918, 1047, 1061, 1278, 1285, 1488, 1717, 1780, 2014, 2016, 2017, 2046, 2048, 2055, 2070], "went": [28, 64, 2101], "wasn": [28, 47, 1345, 2014], "va": 28, "lue": 28, "indirectli": 28, "outstand": [28, 2077], "stuck": [28, 37, 50, 2117], "uninform": 28, "root": [28, 30, 33, 40, 47, 48, 52, 55, 64, 796, 1175, 1184, 1311, 1312, 1542, 1682, 1716, 1795, 1858, 1917, 2027, 2043, 2052, 2053, 2070, 2077, 2078, 2082, 2104], "nontrivi": [28, 1188, 1199, 2046, 2100], "reveal": [28, 2048], "default_pg": 28, "opt": [28, 32, 1285, 2014, 2053, 2054], "longtensor": [28, 33, 136, 137, 138, 317, 319, 321, 454, 473, 515, 517, 519, 878, 879, 1214, 1248, 1295, 1413, 1469, 1470, 1530, 1624, 1625, 1671, 1771, 1863, 1900, 1913, 1939, 1947, 1980, 2085, 2088], "set_debug_level": 28, "set_debug_level_from_env": 28, "get_debug_level": 28, "disterror": 28, "distbackenderror": 28, "thrown": [28, 63, 619, 882, 884, 1065, 1107, 1237, 1273, 1304, 1313, 1315, 1317, 1320, 1321, 1332, 1527, 1685, 1722, 1978, 2065, 2070, 2117], "distnetworkerror": 28, "ex": [28, 39, 1188, 1717, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 2030, 2112], "diststoreerror": 28, "outlin": [29, 2053, 2078, 2104], "joinabl": [29, 32, 1717, 2013], "joinhook": [29, 2013], "throw_on_early_termin": [29, 1717], "shadow": [29, 32, 1717, 2092, 2093], "notify_join_context": 29, "zeroredundancyoptim": [29, 32, 1717, 2013], "vacuou": 29, "inherit": [29, 1750, 2014, 2016, 2036, 2049, 2057, 2059], "join_hook": [29, 32, 1717], "join_devic": [29, 32], "join_process_group": [29, 32], "repeatedli": [29, 2046, 2082], "main_hook": 29, "post_hook": 29, "is_last_join": 29, "dcp": 30, "reshard": [30, 55], "storag": [30, 55, 223, 313, 341, 344, 345, 437, 460, 485, 501, 522, 526, 560, 561, 586, 587, 589, 590, 882, 902, 904, 905, 909, 1032, 1100, 1161, 1244, 1248, 1259, 1273, 1281, 1345, 1367, 1422, 1423, 1463, 1527, 1544, 1845, 1859, 1866, 1896, 1901, 1905, 1919, 1943, 1949, 1966, 1989, 2012, 2013, 2015, 2021, 2028, 2033, 2036, 2043, 2046, 2059, 2062, 2070, 2077, 2082, 2085, 2086, 2088, 2103], "entrypoint": [30, 34, 37, 40, 45, 48, 49, 52, 2021, 2033], "state_dict_sav": 30, "checkpoint_id": 30, "storage_writ": 30, "planner": 30, "style": [30, 34, 36, 64, 881, 1104, 1160, 1771, 1794, 2014, 2016, 2017, 2067, 2070, 2087, 2105], "shardedtensor": [30, 55], "dtensor": [30, 34, 55], "save_state_dict": [30, 2013], "fsdp": [30, 33, 55, 488, 683, 2013, 2104], "shardingstrategi": [30, 55, 2013], "hybrid_shard": [30, 55], "shard_group": 30, "pathlik": [30, 52, 64, 1345, 1859, 2070], "storagewrit": [30, 2013], "writer": [30, 2030, 2057, 2087], "saveplann": [30, 2013], "my_model": [30, 1717, 2014, 2098, 2104, 2113], "mymodul": [30, 52, 53, 55, 64, 1277, 1278, 1280, 1284, 1285, 1291, 1528, 1529, 1537, 1538, 2014, 2016, 2017, 2051, 2062, 2077], "fs_storage_writ": 30, "filesystemwrit": [30, 2013], "async_sav": [30, 2013], "de": [30, 2016, 2070, 2072], "checkpoint_futur": 30, "coordinator_rank": 30, "no_dist": 30, "state_dict_load": 30, "storage_read": 30, "fullfil": 30, "po": [30, 2067], "storageread": [30, 2013], "reader": [30, 2101], "loadplann": [30, 2013], "adagrad": [30, 1469, 2024, 2069, 2077], "model_state_dict": [30, 2053, 2065], "fs_storage_read": 30, "filesystemread": [30, 2013], "asyncstag": [30, 2013], "stage_data": 30, "opportun": [30, 1304, 2014, 2078, 2103, 2104], "reflect": [30, 59, 460, 522, 558, 1162, 1163, 1166, 1270, 1454, 1455, 1456, 1503, 1504, 1505, 1549, 1550, 1551, 1633, 1672, 1767, 1812, 1924, 2014, 2015, 2051, 2086, 2098], "ram": [30, 1345, 2046], "responds": 30, "should_synchronize_after_execut": 30, "assumpt": [30, 34, 44, 48, 52, 923, 979, 1446, 1480, 2043, 2048, 2054, 2077, 2078, 2082, 2100, 2101, 2104, 2109, 2113], "synchronize_stag": 30, "innocul": 30, "statefult": 30, "blockingasyncstag": [30, 2013], "cache_staged_state_dict": 30, "type_check": 30, "automodul": 30, "act": [30, 35, 37, 45, 60, 862, 1440, 1528, 1529, 1538, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1749, 1751, 1752, 1753, 1754, 1755, 1965, 2046, 2057, 2061, 2069, 2101], "told": [30, 2016], "role": [30, 37, 39, 47, 48], "read_metadata": 30, "set_up_storage_read": 30, "prepare_local_plan": 30, "prepare_global_plan": 30, "read_data": 30, "central": [30, 66, 1227, 2054, 2056], "loadplan": [30, 2013], "storage_data": 30, "load_byt": 30, "bytesio": [30, 52, 1281, 1284, 1345, 1859, 2067, 2072], "resolve_tensor": 30, "storagelay": 30, "schedul": [30, 37, 40, 46, 63, 683, 1795, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2013, 2023, 2048, 2052, 2056, 2071, 2104], "checkpiont_id": 30, "is_coordin": 30, "validate_checkpoint_id": 30, "stroag": 30, "set_up_storage_writ": 30, "write_data": 30, "mark": [30, 33, 47, 63, 64, 66, 71, 75, 488, 829, 904, 905, 906, 909, 932, 979, 981, 1182, 1389, 1390, 1717, 1779, 2014, 2016, 2043, 2048, 2049, 2057, 2062, 2070, 2071, 2078, 2088, 2094, 2098, 2101, 2104, 2105], "recover": 30, "writeresult": 30, "saveplan": [30, 2013], "storage_meta": 30, "storagemeta": 30, "todo": [30, 84, 85, 829, 863, 865, 866, 1198, 2093, 2103, 2109], "resolve_data": 30, "writeitem": [30, 2013], "tensor_data": 30, "set_up_plann": 30, "create_local_plan": 30, "create_global_plan": 30, "commit_tensor": 30, "defaultloadplann": [30, 2013], "rewrit": [30, 52, 60, 66, 68, 74, 75, 84, 85, 2027, 2043, 2054, 2064, 2065, 2067, 2101, 2102], "requit": 30, "intrincaci": 30, "renameplann": 30, "state_dict_typ": [30, 55], "original_state_dict": 30, "foo_": [30, 1167], "flatten_sharded_tensor": 30, "_flatten_sharded_tensor": 30, "flatten_state_dict": 30, "read_item": 30, "dest_index": 30, "fqn": [30, 33, 34, 52, 2029, 2093], "metamodelmateri": 30, "defaultsaveplann": [30, 2013], "global_plan": 30, "finish_plan": 30, "central_plan": 30, "resolve_byt": 30, "alia": [30, 35, 41, 52, 93, 94, 113, 189, 190, 355, 408, 438, 439, 446, 525, 543, 685, 870, 871, 872, 873, 874, 875, 876, 898, 941, 972, 988, 989, 1095, 1102, 1105, 1116, 1117, 1118, 1120, 1121, 1147, 1218, 1228, 1229, 1241, 1242, 1243, 1251, 1271, 1273, 1300, 1301, 1308, 1324, 1328, 1331, 1359, 1369, 1370, 1380, 1414, 1416, 1426, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1527, 1644, 1720, 1758, 1774, 1814, 1819, 1822, 1857, 1879, 1894, 1897, 1899, 1926, 1930, 1931, 1951, 1957, 1981, 2017, 2049, 2068, 2083, 2084, 2088, 2103, 2108], "readitem": [30, 2013], "planner_data": 30, "loaditemtyp": 30, "metadataindex": 30, "dest_offset": 30, "storage_index": 30, "storage_offset": [30, 140, 522, 619, 882, 2015, 2021, 2068, 2108], "tandem": 30, "fp16planner": 30, "write_item": 30, "writeitemtyp": 30, "byte_io": 30, "itertool": [30, 35, 962, 975], "islic": 30, "dataclass": [30, 52, 53, 2112], "ddploadbalancingplann": 30, "all_plan": 30, "saveextradataplann": 30, "merged_data": 30, "new_plan": 30, "idempot": [30, 2077, 2079], "safeti": [30, 37, 52, 64, 488, 978, 979, 1198, 2014, 2017, 2035, 2061, 2105], "hi": [30, 1497, 1498, 2014, 2016, 2054, 2101], "peak": [30, 32, 55, 1057, 1059, 1065, 1073, 1074, 1075, 1717, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1803, 1809, 2052, 2110], "late": [30, 47, 488], "tensor_storage_s": 30, "single_file_per_rank": 30, "sync_fil": 30, "thread_count": 30, "per_thread_copy_ahead": 30, "10000000": 30, "simplif": [30, 2100], "atom": [30, 47, 64, 2018], "distributedtensor": [30, 34], "dedup_replicated_tensor": 30, "dedup_save_to_lowest_rank": 30, "lookup_object": 30, "transform_object": 30, "allow_partial_load": 30, "lookup_tensor": 30, "transform_tensor": 30, "legaci": [30, 36, 48, 1635, 2071, 2085, 2088], "layer1": 30, "unparallel": 30, "tackl": [30, 2104], "get_model_state_dict": [30, 2013], "get_optimizer_state_dict": [30, 2013], "uniform": [30, 483, 610, 946, 1462, 1546, 1616, 1836, 1837, 1884, 2013, 2041, 2068, 2091, 2108], "hese": 30, "get_state_dict": [30, 2013], "fully_shard": 30, "tensor_parallel": 30, "parallelize_modul": [30, 34, 2013], "hide": [30, 33, 61, 1178, 1977, 2050], "canon": [30, 37, 40, 1184, 1195, 2014, 2103], "named_paramet": [30, 52, 55, 57, 59, 1166, 1273, 1277, 1527, 2057, 2062, 2069], "named_buff": [30, 52, 55, 1273, 1527, 2057, 2062], "fullyshardeddataparallel": [30, 683, 2013, 2052], "fsdp_model": [30, 55], "deepcopi": [30, 59, 2072, 2093], "fsdp_optim": 30, "ddp_optim": 30, "ddp_state_dict": 30, "ddp_optim_state_dict": 30, "fsdp_state_dict": 30, "fsdp_optim_state_dict": 30, "ddp_optim_st": 30, "statedictopt": [30, 2013], "valuetyp": 30, "optimizerstatetyp": 30, "set_state_dict": [30, 2013], "optim_state_dict": [30, 55], "counterpart": [30, 861, 868, 1343, 1717, 2017, 2027, 2037, 2091, 2092, 2100, 2104], "missing_kei": [30, 1273, 1527], "miss": [30, 45, 692, 1166, 1190, 1273, 1368, 1378, 1460, 1489, 1490, 1491, 1527, 1650, 1767, 2025, 2049, 2063, 2067, 2082, 2100, 2105, 2116], "unexpected_kei": [30, 1273, 1527], "set_model_state_dict": [30, 2013], "set_optimizer_state_dict": [30, 2013], "full_state_dict": [30, 55], "cpu_offload": [30, 55], "ignore_frozen_param": 30, "keep_submodule_prefix": 30, "broadcast_from_rank0": 30, "flatten_optimizer_state_dict": 30, "offload": [30, 55], "oom": [30, 33, 55, 2051, 2115], "rank0": [30, 37, 47, 55], "frozen": [30, 32, 55, 1188, 1277, 1283, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 2018, 2063], "offlin": [30, 1284], "betwe": 30, "format_util": 30, "dcp_to_torch_sav": [30, 2013], "dcp_checkpoint_dir": 30, "torch_save_path": 30, "torch_save_to_dcp": [30, 2013], "onlin": [30, 64, 1783], "broadcastingtorchsaveread": [30, 2013], "dynamicmetaloadplann": [30, 2013], "sd": [30, 772, 777], "path_to_model": 30, "incurr": 30, "hopefulli": [30, 2052, 2103], "extnd": 30, "quickstart": 31, "agent": [31, 39, 40, 41, 44, 46, 48, 50, 2077], "expir": 31, "metric": [31, 1057, 1059, 1065, 1811, 2030, 2057, 2071, 2087, 2104, 2110], "plane": [31, 37, 47, 741, 742, 743, 744, 745, 746, 769, 770, 771, 772, 775, 776, 777, 785, 786, 1097, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1474, 1475, 1494, 1495, 1496, 1499, 1515, 1520, 1521, 1522, 1592, 1593, 1594, 1595, 1596, 1597, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1628, 1629, 1651, 1654, 1655, 1656, 1658, 1659, 1660, 1733, 1769, 1832, 1833, 1855], "kubernet": 31, "distributedoptim": [32, 1717, 2013, 2077, 2078], "rref": [32, 1717, 2013, 2017, 2078], "optimizer_class": 32, "params_rref": 32, "get_gradi": [32, 2068, 2077, 2078], "multithread": [32, 921, 2046], "dist_autograd": [32, 1717, 2077, 2078], "rpc": [32, 63, 1717, 2013, 2017, 2078, 2079], "context_id": [32, 1717, 2077, 2078], "rref1": [32, 2077, 2078], "worker1": [32, 63, 1717, 2077, 2078], "rref2": [32, 2077, 2078], "to_her": [32, 1717, 2068, 2077, 2078, 2079], "dist_optim": [32, 1717, 2078], "postlocalsgdoptim": [32, 2013], "afer": 32, "modelaverag": 32, "localsgd": 32, "model_averag": 32, "post_localsgd_hook": 32, "postlocalsgdst": 32, "subgroup": 32, "start_localsgd_it": 32, "warmup_step": 32, "local_optim": 32, "periodicmodelaverag": 32, "checkpoint": [32, 33, 48, 51, 55, 935, 1345, 1717, 2012, 2013, 2028, 2051, 2052, 2053, 2062, 2065, 2079, 2098], "parameters_as_bucket_view": 32, "overlap_with_ddp": 32, "consumpt": [32, 59, 1770, 2082, 2087, 2094], "partit": [32, 33, 34, 1431, 1685, 1950, 2067, 2068, 2078, 2079, 2104, 2112], "registr": [32, 63, 1051, 1709, 1714, 1715, 1717, 1734, 1737, 2021, 2065, 2070, 2099], "offset": [32, 227, 228, 229, 230, 341, 522, 560, 749, 882, 1097, 1098, 1099, 1100, 1163, 1308, 1470, 1589, 1590, 1625, 1717, 1829, 1830, 1908, 1954, 1956, 2015, 2017, 2062, 2102, 2108], "intact": [32, 2077], "ddp_zero_hook": 32, "disjointli": 32, "trail": [32, 1760, 1762, 2036, 2041, 2044, 2049], "wari": 32, "static_graph": [32, 1717], "third": [32, 35, 1108, 1109, 1363, 1431, 1438, 1456, 1459, 1496, 1522, 1809, 1811, 2013, 2014, 2021, 2046, 2050, 2053, 2057, 2070, 2071, 2101], "add_param_group": [32, 1737, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "param_group": [32, 55, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1791, 1794, 1795, 1796, 1797, 1798, 1805, 1808], "trainabl": [32, 1054, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1789, 1794, 1795, 1796, 1797, 1798, 2049], "consolidate_state_dict": 32, "consolid": [32, 55], "pertain": 32, "alpha": [33, 35, 99, 100, 101, 102, 107, 108, 109, 110, 111, 112, 153, 154, 314, 315, 490, 556, 563, 564, 565, 566, 688, 689, 692, 693, 694, 747, 773, 778, 944, 1051, 1052, 1231, 1245, 1435, 1445, 1468, 1515, 1555, 1599, 1607, 1622, 1623, 1633, 1651, 1686, 1781, 1795, 1887, 1888, 1901, 1906, 1920, 1925, 1926, 2015, 2043, 2049, 2067, 2069, 2104, 2108], "migrat": [33, 48, 56, 1375, 1769, 2053, 2075], "pippi": 33, "micro": 33, "convent": [33, 47, 52, 55, 64, 337, 965, 1109, 1127, 1129, 1153, 1176, 1186, 1188, 1441, 1442, 1443, 1489, 1490, 1491, 1567, 1856, 1859, 1958, 2012, 2028, 2036, 2043, 2054, 2057, 2062], "promis": 33, "intrus": [33, 2049], "toolkit": [33, 64], "said": [33, 912, 913, 914, 915, 916, 917, 978, 2024, 2060, 2101], "gpipe": 33, "1f1b": 33, "interleav": [33, 1802], "bf": [33, 1576, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "infrastrutur": 33, "pp": [33, 1270, 1480, 1891], "compos": [33, 34, 35, 59, 64, 741, 742, 743, 744, 745, 746, 769, 770, 775, 776, 777, 785, 786, 967, 1168, 1170, 1171, 1172, 1178, 1273, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1474, 1475, 1494, 1495, 1496, 1515, 1520, 1521, 1522, 1527, 1576, 1581, 1582, 1592, 1593, 1594, 1595, 1596, 1597, 1600, 1608, 1609, 1610, 1611, 1612, 1613, 1628, 1629, 1651, 1654, 1655, 1656, 1658, 1659, 1660, 1832, 1833, 1977, 2013, 2014, 2017, 2034, 2049, 2050, 2053, 2057, 2065, 2067, 2070, 2087, 2108, 2109], "torchtitan": 33, "3d": [33, 704, 707, 710, 738, 743, 746, 770, 772, 777, 782, 1375, 1430, 1434, 1438, 1441, 1443, 1456, 1459, 1466, 1467, 1473, 1475, 1489, 1491, 1496, 1522, 1533, 1580, 1594, 1597, 1598, 1602, 1610, 1613, 1621, 1627, 1629, 1644, 1656, 1660, 1672, 1704, 2036, 2060, 2072, 2082, 2087], "llama": [33, 2052], "pipeliningshapeerror": 33, "paral": 33, "portion": [33, 55, 1158, 1463, 1559, 1674, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 2073, 2105, 2113], "traceabl": [33, 52, 64, 627, 820, 1193, 2014, 2021, 2072, 2100], "appeal": 33, "schedulegpip": 33, "n_microbatch": 33, "in_dim": [33, 60, 61, 896, 909, 910, 1168, 1178, 1977, 2050], "servic": [33, 39, 2048, 2100], "condens": 33, "partition": 33, "model_arg": [33, 2065], "modelarg": 33, "tok_embed": 33, "moduledict": [33, 1731, 1732, 1733, 2016, 2057], "witout": 33, "layer_id": 33, "n_layer": 33, "transformerblock": [33, 34, 2052], "token": [33, 47, 1009, 1043, 1044, 1054, 2012, 2018, 2106, 2108], "freqs_ci": 33, "meta": [33, 49, 53, 55, 59, 64, 338, 1196, 2013, 2021, 2065, 2077, 2085, 2087, 2089, 2100, 2103, 2105, 2112], "num_stag": 33, "stage_index": 33, "elif": [33, 64, 1280, 1492, 2016, 2017, 2055, 2065, 2101, 2112], "input_arg": [33, 52, 1779, 2067], "example_input_microbatch": 33, "output_arg": 33, "emb": [33, 1100, 1866, 1896], "modulelist": [33, 1556, 2057], "lmhead": 33, "lin": 33, "in_featur": [33, 59, 723, 731, 732, 735, 736, 759, 767, 1176, 1273, 1431, 1512, 1514, 1527, 1707, 1731, 1732, 1733, 1765, 1766, 1769, 2025, 2057], "out_featur": [33, 59, 723, 731, 732, 735, 736, 759, 767, 1176, 1273, 1444, 1512, 1514, 1527, 1707, 1731, 1732, 1733, 1765, 1766, 1769, 2025, 2057], "proj": [33, 1497], "splitpoint": 33, "pipe": [33, 37, 50, 2077, 2117], "mod": [33, 52, 53, 64, 724, 725, 735, 741, 742, 743, 748, 749, 759, 767, 813, 814, 815, 816, 839, 840, 868, 1166, 1276, 1277, 1283, 1290, 1767, 1969, 2016, 2017, 2029, 2070, 2092, 2099, 2113], "num_chunk": 33, "example_arg": [33, 52], "split_spec": 33, "submod_0": 33, "interpretermodul": [33, 52], "submod_1": 33, "reconstruct": [33, 1011, 1363, 1439, 1440, 2054, 2062, 2070, 2101], "replai": [33, 1009, 1043, 2046, 2065, 2071, 2098, 2101, 2113], "stage_mod": 33, "get_stage_modul": 33, "stage_idx": 33, "build_stag": 33, "kept": [33, 55, 59, 64, 700, 782, 788, 1441, 1442, 1443, 1489, 1490, 1491, 1567, 1644, 1704, 2017, 2033, 2052, 2075, 2082], "huggingfac": [33, 2106, 2110], "gpt2": 33, "acycl": [33, 53, 795, 2043, 2101], "dag": [33, 53, 2046, 2101], "pipelininig": 33, "truli": [33, 64, 2021, 2064, 2067], "almost": [33, 1320, 1321, 2063, 2101, 2103, 2111], "pipelineschedulesingl": 33, "pipelineschedulemulti": 33, "schedule1f1b": 33, "scheduleinterleaved1f1b": 33, "scheduleloopedbf": 33, "mb_arg": 33, "mb_kwarg": 33, "split_polici": 33, "marker": [33, 1011, 1386, 1982], "polici": [33, 35, 37, 40, 55, 1182, 1803, 1809], "split_gm": 33, "has_loss_and_backward": 33, "loss_spec": 33, "pipe_split": 33, "mm_param": 33, "tensorchunkspec": 33, "split_dim": [33, 2068, 2108], "split_args_kwargs_into_chunk": 33, "args_chunk_spec": 33, "kwargs_chunk_spec": 33, "spec": [33, 37, 39, 50, 52, 53, 66, 796, 797, 822, 823, 824, 827, 828, 829, 2065, 2074, 2088], "kwargs_split": 33, "args_split": 33, "merge_chunk": 33, "chunk_spec": 33, "oppos": [33, 904, 907, 909, 910, 978, 2050], "_pipelinestagebas": 33, "pipelinescheul": 33, "stage_modul": 33, "pipe_info": 33, "pipeinfo": 33, "_pipelinestag": 33, "output_merge_spec": 33, "drain": 33, "steadi": 33, "arxiv": [33, 35, 1497, 1547, 1574, 1733, 1769, 1817, 1871, 1929, 2043, 2088], "pdf": [33, 260, 1446, 1891, 2043], "2104": [33, 1109], "04473": 33, "ab": [33, 35, 92, 93, 628, 629, 685, 964, 1109, 1305, 1326, 1327, 1329, 1330, 1343, 1346, 1497, 1547, 1574, 1577, 1579, 1674, 1733, 1769, 1772, 1811, 1817, 1821, 1871, 2013, 2015, 2017, 2034, 2035, 2036, 2046, 2057, 2068, 2082, 2088, 2098, 2099, 2101, 2102, 2104, 2108, 2113], "2211": 33, "05953": 33, "simliar": 33, "_step_microbatch": 33, "colwis": 34, "parallelize_plan": 34, "sub_modul": 34, "parallelstyl": 34, "devicemesh": [34, 55, 2013], "slice": [34, 52, 64, 66, 75, 77, 609, 698, 699, 1186, 1441, 1442, 1443, 1561, 1563, 1567, 1691, 1692, 1768, 1848, 1865, 1866, 1896, 1907, 1959, 2014, 2018, 2036, 2046, 2067, 2068, 2082, 2083, 2086, 2088, 2104, 2108], "colwiseparallel": [34, 2013], "tp_mesh": 34, "w1": [34, 64], "w2": [34, 64], "rowwiseparallel": [34, 2013], "mlp": [34, 1707, 2065, 2072], "input_layout": 34, "output_layout": 34, "use_local_output": 34, "sharded_mod": 34, "mind": [34, 1319, 1337, 1469, 2043, 2050, 2054, 2072, 2095, 2104, 2111], "sequenceparallel": [34, 2013], "sequence_dim": 34, "layernorm": [34, 1481, 1489, 1490, 1491, 1571, 1573, 1575, 1647, 2036, 2074, 2113], "rmsnorm": [34, 1682], "redistribut": [34, 2063], "preparemoduleinput": [34, 2013], "desired_input_layout": 34, "input_kwarg_layout": 34, "desired_input_kwarg_layout": 34, "placehold": [34, 52, 64, 82, 802, 827, 837, 1186, 1188, 1488, 2046, 2099, 2102, 2112], "attn": [34, 1573, 1575], "preparemoduleoutput": [34, 2013], "desired_output_layout": 34, "loss_parallel": [34, 2013], "crossentropyloss": [34, 1534, 1616, 2053], "logit": [34, 35, 393, 1440, 1462, 1606, 1616, 1635, 2015, 2068, 2083], "truth": [34, 1462, 1616, 2024, 2087], "label_smooth": [34, 1462, 1616, 2015], "dist_input": 34, "distribute_tensor": 34, "randint": [34, 546, 947, 998, 1091, 1152, 1446, 1577, 1616, 1617, 1724, 1725, 1909, 2015, 2019, 2035, 2068, 2087, 2091], "parameteriz": 35, "tensorflow": [35, 1795, 2043, 2087], "backpropag": [35, 924, 1796, 1827, 2051, 2094], "surrog": 35, "likelihood": [35, 1431, 1480, 1534, 1541, 1630, 1669, 1677], "ratio": [35, 37, 619, 1474, 1475, 1628, 1629, 2015, 2046, 2082, 2110], "reinforc": [35, 1557, 1688], "reparameter": [35, 1166, 1733, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755, 1756, 1757, 1769], "trick": [35, 915, 1440, 1635, 2043, 2054, 2056, 2104], "autoencod": 35, "whilst": [35, 2046], "densiti": [35, 260, 303, 1235, 1236, 1891, 2015, 2083], "log_prob": [35, 1431, 1446, 1617, 2015], "theta": [35, 1598, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 2015], "partial": [35, 37, 47, 52, 55, 58, 64, 845, 1054, 1099, 1177, 1186, 1227, 1320, 1321, 1322, 1364, 1523, 1524, 1525, 1661, 1662, 1663, 1717, 1744, 1902, 1908, 2017, 2018, 2035, 2043, 2049, 2052, 2054, 2065, 2067, 2079, 2089, 2109], "pi": [35, 175, 379, 701, 954, 1231, 1232, 1325, 1476, 1541, 1631, 1677, 1801, 1802, 1821, 1882, 1883, 1886, 1887, 1888, 1889, 1924, 2016, 2017, 2070, 2083], "reward": 35, "ascent": 35, "prob": [35, 2015], "policy_network": 35, "next_stat": 35, "rsampl": 35, "parameter": [35, 379, 456, 1280, 2037, 2082], "has_rsampl": 35, "batch_shap": 35, "event_shap": 35, "validate_arg": 35, "arg_constraint": 35, "cdf": 35, "cumul": [35, 37, 1088, 1089, 1090, 1091, 1092, 1353, 1441, 1442, 1443, 1476, 1500, 1501, 1502, 1567, 1631], "mass": 35, "enumerate_support": 35, "discret": [35, 54, 483, 488, 1125, 1126, 1127, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1138, 1139, 1144, 1146, 1635, 1891, 2087, 2091], "cardin": [35, 1375], "univari": 35, "singleton": [35, 256, 1499, 1542, 1716, 1964, 2044], "cartesian": [35, 962, 1375, 1821], "_instanc": 35, "icdf": 35, "perplex": 35, "sample_shap": 35, "sample_n": 35, "set_default_validate_arg": 35, "mimic": [35, 1809], "stddev": 35, "varianc": [35, 998, 1441, 1442, 1443, 1471, 1480, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1567, 1626, 1630, 1727, 1729, 1794, 1795, 1831, 1840, 1841, 1972, 1973, 2041, 2057], "exp_famili": 35, "famili": 35, "p_": [35, 966, 1365, 1797], "langl": 35, "rangl": 35, "denot": [35, 52, 64, 175, 998, 1198, 1294, 1313, 1319, 1332, 1342, 1454, 1455, 1492, 1734, 1737, 1797, 1910, 1911, 1912, 1914, 1915, 1974, 2025, 2054, 2075, 2078, 2082], "carrier": 35, "analyt": [35, 923, 924, 1624, 2082], "bregman": 35, "courtesi": 35, "frank": 35, "nielsen": 35, "richard": 35, "nock": 35, "70": [35, 939, 1332, 1827, 2036], "odd": [35, 1130, 1131, 1132, 1140, 1141, 1142, 1608, 1609, 1610], "interv": [35, 50, 260, 869, 942, 994, 1159, 1389, 1390, 1812, 1836, 1837, 1884, 2030, 2046, 2052, 2071], "lower_bound": 35, "upper_bound": 35, "has_enumerate_support": 35, "param_shap": 35, "concentration1": 35, "concentration0": 35, "concentr": 35, "1046": 35, "1st": [35, 2044, 2052], "2nd": [35, 619, 1535, 1562, 1672, 2044], "greaterthan": 35, "total_count": 35, "71": 35, "trial": [35, 288], "integergreaterthan": 35, "ldot": [35, 288, 1305, 1313, 1344, 1360, 1375, 1499, 1520, 1521, 1522, 1542, 1716, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "unnorm": [35, 1462, 1606, 1616, 1635], "likewis": [35, 1949], "25": [35, 498, 588, 589, 590, 939, 964, 1235, 1341, 1346, 1472, 1530, 1532, 1535, 1707, 1717, 1762, 1764, 1787, 1809, 1828, 1971, 2014, 2082, 2083], "independentconstraint": 35, "simplex": 35, "loc": [35, 1345], "lorentz": 35, "3214": 35, "width": [35, 782, 788, 862, 1234, 1235, 1236, 1437, 1438, 1455, 1456, 1458, 1459, 1495, 1496, 1521, 1522, 1534, 1562, 1580, 1644, 1704, 2035, 2082], "df": 35, "chi": 35, "continuous_bernoulli": 35, "lim": [35, 1270], "499": 35, "501": 35, "2538": [35, 1305], "pervas": 35, "loaiza": 35, "ganem": 35, "cunningham": 35, "jp": 35, "1907": 35, "06845": 35, "8954": 35, "greaterthaneq": 35, "df1": 35, "df2": 35, "fisher": 35, "snedecor": 35, "2453": 35, "degre": [35, 998, 1086, 1093, 1273, 1311, 1312, 1527, 1536, 1576, 1717, 1835, 1855, 1922, 1923, 1972, 1973, 2015, 2054, 2068, 2082], "freedom": [35, 998, 1922, 1923, 1972, 1973, 2054], "geometric_": [35, 2015, 2091], "0124": 35, "half_cauchi": 35, "half_norm": 35, "base_distribut": 35, "reinterpreted_batch_ndim": 35, "reinterpret": [35, 501, 2103], "diagon": [35, 60, 226, 262, 597, 598, 599, 600, 955, 994, 998, 1096, 1097, 1098, 1100, 1109, 1122, 1171, 1172, 1217, 1303, 1315, 1317, 1320, 1332, 1336, 1588, 1589, 1590, 1674, 1685, 1908, 1948, 1952, 1953, 1954, 1955, 1956, 2015, 2049, 2068, 2086, 2108], "multivari": [35, 1413, 2083], "multivariate_norm": 35, "mvn": 35, "scale_tril": 35, "diag": [35, 61, 1170, 1171, 1172, 1309, 1310, 1337, 1817, 1908, 1928, 1929, 2015, 2068], "diagn": 35, "inverse_gamma": 35, "2953": 35, "1729": [35, 2057], "lkj_choleski": 35, "lkj": 35, "matric": [35, 262, 689, 692, 944, 956, 957, 966, 967, 968, 969, 1097, 1217, 1294, 1303, 1304, 1305, 1307, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1323, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1336, 1337, 1338, 1346, 1354, 1363, 1365, 1378, 1477, 1536, 1598, 1632, 1731, 1737, 1815, 1817, 1827, 1875, 1901, 1905, 1906, 1928, 1929, 1952, 1953, 1955, 2015, 2034, 2054, 2060, 2082, 2088], "eta": [35, 1781, 1783, 1796], "proport": [35, 1494, 1495, 1496, 1533, 1575], "det": [35, 1311, 1312, 1333, 1354, 2015, 2068], "l": [35, 738, 763, 945, 954, 960, 967, 968, 969, 1109, 1231, 1232, 1273, 1293, 1303, 1304, 1309, 1310, 1311, 1312, 1316, 1320, 1321, 1363, 1365, 1436, 1439, 1440, 1441, 1454, 1462, 1465, 1466, 1473, 1478, 1486, 1487, 1489, 1492, 1493, 1497, 1509, 1518, 1520, 1527, 1529, 1533, 1534, 1543, 1559, 1576, 1577, 1579, 1685, 1743, 1752, 1760, 1762, 1787, 1863, 1924, 2015, 2017, 2043, 2087, 2101, 2102, 2113], "lkjcorr": 35, "onion": 35, "3x3": [35, 1092, 1950], "3516": 35, "9361": 35, "1899": [35, 1376], "4748": 35, "8593": 35, "vine": 35, "2009": [35, 1817, 1929], "lewandowski": 35, "dorota": 35, "kurowicka": 35, "harri": [35, 1891], "joe": 35, "journal": [35, 1834], "1016": 35, "jmva": 35, "04": [35, 1152, 1351, 1884, 1891], "008": 35, "corrcholeski": 35, "log_norm": [35, 2068], "lowrank_multivariate_norm": 35, "cov_factor": 35, "cov_diag": 35, "covari": [35, 957, 994, 998, 1441, 1442, 1443, 1567, 1817], "covariance_matrix": 35, "2102": 35, "5429": [35, 1952], "woodburi": 35, "lemma": 35, "formula": [35, 90, 771, 772, 893, 895, 909, 910, 945, 954, 1231, 1232, 1299, 1365, 1366, 1541, 1601, 1602, 1692, 1797, 1830, 1883, 2021, 2049, 2054, 2071, 2076, 2082, 2100], "capacit": 35, "precision_matrix": 35, "mixture_same_famili": 35, "mixture_distribut": 35, "component_distribut": 35, "rightmost": [35, 957, 1235, 1236, 2017], "gaussian": [35, 1476, 1480, 1557, 1630, 1631, 1688, 1890, 2083], "gmm": 35, "bivari": 35, "categori": [35, 40, 86, 2017, 2019, 2067, 2069, 2071, 2073, 2085, 2107, 2109], "innermost": [35, 64, 235, 1227, 1236, 1863], "1338": 35, "mathbf": [35, 1294, 1732, 1733, 1766, 1769, 1796], "sigma": [35, 175, 379, 763, 1440, 1477, 1478, 1479, 1497, 1498, 1557, 1558, 1632, 1688, 1732, 1766, 1885, 1922, 1923, 1972, 1973, 2015], "triangular": [35, 967, 968, 969, 1303, 1304, 1310, 1312, 1320, 1321, 1328, 1331, 1332, 1334, 1336, 1587, 1588, 1589, 1590, 1674, 1685, 1803, 1827, 1952, 1953, 1954, 1955, 1956], "decomposit": [35, 52, 53, 64, 83, 967, 968, 969, 1217, 1303, 1304, 1309, 1310, 1311, 1312, 1313, 1315, 1320, 1321, 1322, 1323, 1328, 1332, 1333, 1337, 1338, 1363, 1365, 1731, 1815, 1817, 1821, 1827, 1928, 1929, 2013, 2060, 2100, 2103, 2112], "positivedefinit": 35, "lowercholeski": 35, "negative_binomi": 35, "halfopeninterv": 35, "mu": [35, 379, 1795, 1797], "one_hot_categor": 35, "onehot": 35, "5623": 35, "nonneg": [35, 1577, 1817, 1929, 2083], "pmf": 35, "mathrm": [35, 946, 1311, 1312, 1313, 1314, 1325, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1536, 1541, 1542, 1567, 1716, 1731, 1784, 1785, 1786, 1796, 1831, 2043, 2083], "relaxed_bernoulli": 35, "temperatur": [35, 1635, 2013], "parametr": [35, 1166, 1734, 1741, 1766, 1767, 1769, 2037, 2049], "relax": [35, 1009, 1043, 1289, 1290, 1737, 2072, 2104], "reparametriz": 35, "99": [35, 1285, 1795, 2014], "2951": [35, 1373], "3442": 35, "8918": 35, "9021": 35, "maddison": 35, "2017": [35, 1571, 1573, 1575, 1909, 2063], "reparametr": [35, 1635, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1749, 1769], "jang": 35, "relaxed_categor": 35, "1294": [35, 967], "2324": [35, 1273, 1527], "3859": 35, "2523": 35, "student": 35, "transformed_distribut": 35, "composit": [35, 60, 1170, 1321, 1731, 2014, 2017, 2021, 2050, 2057, 2067], "basedistribut": 35, "dx": [35, 52, 1092, 1439, 1950, 2015, 2049, 2050, 2083], "dy": [35, 52], "logist": [35, 1557, 1560, 1688, 2083], "sigmoidtransform": 35, "affinetransform": 35, "invert": [35, 64, 1264, 1305, 1314, 1315, 1321, 1327, 1334, 1336, 1339, 1340, 1354, 1523, 1524, 1525, 1952, 2015, 2060, 2077], "3418": 35, "upper": [35, 52, 53, 183, 184, 185, 596, 798, 955, 960, 967, 968, 969, 971, 1123, 1124, 1192, 1234, 1235, 1303, 1304, 1310, 1312, 1320, 1321, 1332, 1336, 1546, 1588, 1590, 1674, 1683, 1684, 1685, 1803, 1809, 1827, 1842, 1863, 1952, 1955, 1956, 2015, 2041, 2068, 2083, 2089], "von_mis": 35, "circular": [35, 1448, 1449, 1450, 1454, 1455, 1456, 1503, 1504, 1505, 1672], "von": 35, "mise": 35, "unconstrain": [35, 1737], "angl": [35, 758, 888, 1093, 1354, 1513, 1821, 1835, 1878, 2013, 2015, 2068, 2082], "9777": 35, "radian": [35, 701, 888, 1093, 1835, 2015, 2068], "simul": [35, 490, 802, 804, 2071, 2072, 2075, 2100], "1979": 35, "152": [35, 619], "157": 35, "_rejection_sampl": 35, "88443": 35, "4784": [35, 1378], "symmetr": [35, 805, 823, 945, 954, 967, 968, 969, 1130, 1131, 1132, 1137, 1139, 1143, 1144, 1146, 1231, 1232, 1293, 1303, 1304, 1309, 1310, 1312, 1316, 1317, 1318, 1325, 1328, 1331, 1337, 1346, 1731, 1737, 1809, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2072, 2075], "x_ij": 35, "wu": [35, 1346], "chu": 35, "2018": [35, 1346, 1825], "sawyer": 35, "2007": 35, "anderson": 35, "w": [35, 61, 152, 763, 893, 895, 897, 904, 907, 909, 910, 918, 945, 954, 978, 998, 1168, 1190, 1214, 1231, 1232, 1270, 1313, 1429, 1430, 1437, 1438, 1440, 1442, 1443, 1447, 1455, 1456, 1462, 1466, 1467, 1469, 1470, 1471, 1478, 1490, 1491, 1499, 1510, 1511, 1521, 1522, 1532, 1533, 1534, 1539, 1540, 1562, 1578, 1579, 1581, 1582, 1598, 1606, 1624, 1625, 1633, 1669, 1675, 1676, 1732, 1733, 1766, 1769, 2015, 2021, 2034, 2035, 2041, 2043, 2049, 2054, 2069, 2070, 2087, 2111], "2003": 35, "ed": [35, 60, 1465, 1466, 2043, 2070, 2094, 2111, 2114], "odel": 35, "feiveson": 35, "1966": 35, "samplecovari": 35, "jasa": 35, "61": 35, "313": 35, "199": 35, "203": [35, 619], "ku": 35, "bloomfield": 35, "2010": [35, 2041], "ox": 35, "max_try_correct": 35, "bartlett": [35, 945], "singular": [35, 1305, 1319, 1320, 1326, 1328, 1330, 1331, 1337, 1338, 1346, 1354, 1363, 1732, 1737, 1817, 1928, 1929, 2060], "inf": [35, 52, 55, 260, 687, 689, 692, 693, 694, 889, 944, 960, 1262, 1263, 1265, 1267, 1268, 1305, 1320, 1326, 1330, 1333, 1343, 1354, 1417, 1517, 1561, 1571, 1685, 1722, 1743, 1752, 1772, 1856, 1863, 2042, 2043, 2050, 2060, 2083, 2089, 2113], "accordingli": [35, 64, 1283, 1497, 1737, 2036, 2077, 2109], "kl_diverg": 35, "kullback": [35, 1492, 1645], "leibler": [35, 1492, 1645], "notimplementederror": [35, 2021, 2025, 2070], "register_kl": 35, "type_p": 35, "type_q": 35, "pairwis": [35, 1486, 1536, 1576], "kl_normal_norm": 35, "ambigu": [35, 64, 1258, 1457, 1458, 1459, 1473, 1523, 1524, 1525, 1580, 2017], "runtimewarn": 35, "basep": 35, "derivedq": 35, "kl_version1": 35, "derivedp": 35, "baseq": 35, "kl_version2": 35, "tie": 35, "abstransform": 35, "cache_s": 35, "event_dim": 35, "affin": [35, 55, 474, 475, 476, 477, 478, 752, 754, 755, 756, 805, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1500, 1501, 1502, 1509, 1510, 1511, 1514, 1542, 1567, 1598, 1716, 2046, 2057], "cattransform": 35, "tseq": 35, "functor": [35, 1051, 1052], "submatrix": 35, "x0": [35, 2106], "t0": [35, 52, 1227, 1781, 2016], "exptransform": 35, "identity_transform": 35, "composetransform": 35, "corrcholeskytransform": 35, "uncontrain": 35, "euclidean": [35, 964, 1670], "x_i": [35, 60, 971, 1088, 1089, 1090, 1091, 1342, 1349, 1517, 1536, 1561, 1563, 1576, 1653, 1691, 1824, 1907, 1922, 1923, 1950, 1972, 1973, 1974, 2083], "stickbreakingtransform": 35, "r_i": 35, "tanh": [35, 579, 763, 768, 798, 889, 1476, 1478, 1479, 1497, 1498, 1526, 1543, 1545, 1569, 1631, 1664, 1697, 2015, 2034, 2035, 2041, 2068, 2074, 2082, 2108], "unsign": [35, 2075, 2085, 2088, 2089, 2104], "z_i": 35, "s_i": 35, "y_i": [35, 971, 1088, 1089, 1090, 1091, 1342, 1349, 1576, 1950, 1974, 2054], "sqrt": [35, 64, 379, 551, 676, 677, 763, 994, 1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1240, 1293, 1375, 1441, 1442, 1443, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1476, 1478, 1479, 1481, 1489, 1490, 1491, 1494, 1495, 1496, 1497, 1498, 1499, 1512, 1514, 1542, 1543, 1545, 1567, 1631, 1685, 1716, 1737, 1782, 1783, 1784, 1785, 1788, 1794, 1795, 1831, 1858, 1890, 1922, 1923, 2015, 2034, 2041, 2042, 2043, 2068, 2082, 2083, 2089, 2108], "cumulativedistributiontransform": 35, "copula": 35, "base_dist": 35, "independenttransform": 35, "base_transform": 35, "log_abs_det_jacobian": 35, "lowercholeskytransform": 35, "positivedefinitetransform": 35, "powertransform": 35, "expon": [35, 270, 271, 282, 470, 471, 1152, 1159, 1297, 1327, 1431, 1515, 1670, 1824, 1834, 1871, 2015, 2085, 2088, 2108], "reshapetransform": 35, "in_shap": 35, "out_shap": 35, "softplustransform": 35, "tanhtransform": 35, "softmaxtransform": 35, "biject": 35, "hmc": 35, "stacktransform": 35, "stick": [35, 2069], "aris": [35, 64, 2043, 2100], "memoiz": [35, 2103], "_call": 35, "_invers": 35, "codomain": [35, 2043], "iff": [35, 1533], "weaker": [35, 2046], "pseudoinvers": [35, 1314, 1331, 1339], "monoton": [35, 1526, 1664, 1863], "forward_shap": 35, "inverse_shap": 35, "corr_choleski": 35, "greater_than": 35, "greater_than_eq": 35, "integer_interv": 35, "less_than": 35, "lower_choleski": 35, "lower_triangular": 35, "nonnegative_integ": 35, "one_hot": [35, 2015, 2068], "positive_integ": 35, "positive_semidefinit": 35, "positive_definit": 35, "real_vector": 35, "unit_interv": 35, "is_discret": 35, "constrain": [35, 1192, 1199, 1564, 2017, 2057], "_cat": 35, "dependent_properti": 35, "_dependentproperti": 35, "_greaterthan": 35, "_greaterthaneq": 35, "_independentconstraint": 35, "_integerinterv": 35, "_interv": 35, "half_open_interv": 35, "_halfopeninterv": 35, "_lessthan": 35, "_multinomi": 35, "_stack": [35, 2068], "constraintregistri": 35, "biject_to": 35, "transform_to": 35, "overparameter": 35, "rotat": [35, 1855, 1928], "hamiltonian": 35, "mont": 35, "carlo": 35, "invari": [35, 52, 1758, 1903, 1910, 1911, 1912, 1913, 1914, 1915, 2079, 2082, 2098, 2112], "potential_energi": 35, "cheap": [35, 1188, 1431, 2104], "svi": 35, "fewer": [35, 52, 60, 696, 698, 699, 702, 970, 1294, 1295, 1361, 1371, 1373, 1374, 1376, 1379, 1418, 1421, 1480, 1615, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 1825, 1909, 1922, 1923, 1927, 1946, 1972, 1973, 2023, 2044, 2062, 2089, 2115], "my_constraint": 35, "my_transform": 35, "myconstraintclass": 35, "my_factori": 35, "mytransform": 35, "param1": [35, 2057], "param2": [35, 2057], "constraint_registri": 35, "my_registri": 35, "construct_transform": 35, "myconstraint": 35, "from_dlpack": [36, 884, 2013], "ext_tensor": [36, 1160], "extern": [36, 1012, 1160, 1289, 1968, 2030, 2045, 2046, 2060, 2065, 2067, 2094], "immut": [36, 1160, 2017], "__dlpack__": [36, 1160], "capsul": [36, 884, 1160], "pycapsul": [36, 1160], "to_dlpack": [36, 1160, 2013], "t2": [36, 690, 691, 884, 1106, 1160, 1227, 1717, 1970, 2077, 2078], "dltensor": [36, 1160], "t3": [36, 1160, 2078], "idiomat": 36, "inde": [36, 2014, 2024, 2070, 2079, 2109], "monitor": [37, 48, 50, 88, 1011, 1386, 1811, 1982, 2013, 2046, 2055, 2117], "unhealthi": 37, "tear": 37, "react": [37, 2109], "decentr": 37, "diagram": [37, 47, 1273, 1527, 2072, 2079, 2102, 2104, 2113], "elasticag": 37, "quad": [37, 1439, 1440, 1462, 1493, 1518, 1534, 1577, 1840], "group_result": 37, "is_fail": [37, 39], "exit_cod": 37, "return_valu": [37, 39, 45, 2101, 2102], "get_worker_group": 37, "workergroup": [37, 48], "mutabl": [37, 1289, 2014, 2021, 2070, 2087], "implementor": 37, "defens": 37, "retri": [37, 40, 50, 1065, 2077, 2079], "max_restart": [37, 39, 48], "runresult": 37, "workerspec": [37, 39, 48, 50], "local_world_s": [37, 39, 48], "rdzv_handler": [37, 39, 47], "monitor_interv": [37, 39], "local_addr": [37, 47], "blueprint": 37, "homogen": [37, 48], "rendezvoushandl": [37, 39, 47, 48], "rdzv": [37, 46, 48], "chose": [37, 2042, 2100], "tee": [37, 45], "consol": [37, 41, 44, 45, 48, 2087], "get_entrypoint_nam": 37, "__qualname__": 37, "workerst": 37, "unknown": [37, 53, 1719, 1720, 2079], "unrecover": 37, "interrupt": [37, 2033, 2111], "succeed": [37, 47, 1363, 2101, 2104], "uncaught": [37, 40], "unhandl": 37, "recov": [37, 902, 1130, 1333, 1365, 1717, 1758, 1924, 2042, 2046, 2051, 2054, 2073], "is_run": 37, "role_rank": [37, 48], "role_world_s": [37, 48], "pid": [37, 40, 48, 50, 1567, 2051, 2071], "local_elastic_ag": 37, "localelasticag": [37, 39, 50], "logs_spec": [37, 45], "start_method": [37, 39, 45, 50, 2033], "exit_barrier_timeout": 37, "log_line_prefix_templ": 37, "advis": [37, 546, 619, 869, 2059], "torchelastic_enable_file_tim": 37, "torchelastic_timer_fil": 37, "role_nam": 37, "trainer0": [37, 2077], "foobar": [37, 40, 44, 45], "templat": [37, 976, 1051, 1052, 2069], "substitut": [37, 47, 64, 83, 1286, 2043, 2085], "shared_queu": 37, "get_context": [37, 50, 2059], "nproc_per_process": 37, "other_param": [37, 50], "usr": [37, 45, 47, 2046], "bin": [37, 45, 47, 302, 303, 822, 947, 1234, 1235, 1236, 2015, 2017, 2068, 2087, 2110], "simpleelasticag": 37, "scaffold": [37, 2101], "_assign_worker_rank": 37, "group_world_s": 37, "role_info": 37, "front": [37, 256, 976, 1346, 1530, 2035, 2050], "base_global_rank": 37, "_exit_barri": 37, "guard": [37, 66, 71, 75, 683, 976, 1187, 1188, 1192, 1198, 1200, 1201, 1202, 1208, 1209, 1212, 1213, 2023, 2059, 2068, 2070, 2091, 2104, 2109, 2113, 2114], "_initialize_work": 37, "worker_group": 37, "fresh": [37, 64, 1100, 1191, 1194, 1866, 1896, 2012], "start_work": 37, "_stop_work": 37, "optimist": 37, "deleg": [37, 2101], "_monitor_work": 37, "_rendezv": 37, "_restart_work": 37, "_shutdown": 37, "death_sig": 37, "sigterm": 37, "is_restart": 37, "_start_work": [37, 45], "gracefulli": [37, 47, 48, 60, 1304, 2050, 2104], "meaning": [37, 40, 41, 1065, 2046, 2107], "meaningless": 37, "intention": [37, 1106, 1974, 2055, 2077], "torchelastic_health_check_port": 37, "tcp": [37, 47, 48, 2077], "health_check_serv": 37, "healthcheckserv": 37, "alive_callback": 37, "dead": [37, 64, 2112], "create_healthcheck_serv": 37, "control_plan": 38, "worker_main": 38, "_workerserv": 38, "torch_worker_server_socket": 38, "ship": [39, 1167, 2045, 2077], "programmat": [39, 64, 2057], "my_launch": 39, "argv": [39, 51], "trainer_entrypoint_fn": 39, "fn_arg": 39, "run_result": 39, "tricki": [39, 56, 61, 2043, 2057, 2079, 2098, 2101], "myrendezvoushandl": 39, "elastic_ag": 39, "metrichandl": [39, 44], "mymetrichandl": 39, "metric_data": [39, 44], "metricdata": 39, "sink": [39, 44, 2030], "eventhandl": 39, "cloudwatch": 39, "nulleventhandl": 39, "myeventhandl": 39, "invalid": [40, 45, 979, 1273, 1527, 2042, 2043, 2061, 2065, 2067, 2070, 2103], "infra": [40, 2112], "start_process": [40, 45, 2033], "torchelastic_error_fil": 40, "smallest": [40, 53, 965, 1295, 1305, 1326, 1330, 1346, 1826, 1947, 2089, 2113, 2118], "timestamp": [40, 41, 44, 48, 2030, 2071, 2087, 2113], "error_handl": 40, "sugar": [40, 2016], "get_error_handl": 40, "childfailederror": 40, "get_first_failur": 40, "dump_error_fil": 40, "error_fil": [40, 45], "exitcod": [40, 48], "nanni": 40, "accur": [40, 930, 1011, 1227, 1349, 1386, 1440, 1468, 1834, 1950, 2021, 2029, 2046, 2078, 2094, 2103, 2117], "torchelastic_ag": 40, "trainer_0": 40, "trainer_1": 40, "json": [40, 45, 50, 2056, 2071, 2107, 2111], "trainer_n": 40, "errorhandl": 40, "record_except": 40, "processfailur": 40, "test_ev": 41, "eventsourc": 41, "get_logging_handl": 41, "eventmetadatavalu": 41, "readm": [42, 43, 64, 795, 2036], "telemetri": 44, "timeseri": 44, "metric_group": 44, "metric_nam": 44, "sensibl": 44, "my_modul": [44, 53, 55, 64, 2014, 2070], "nullmetricshandl": 44, "consolemetricshandl": 44, "my_method": 44, "put_metr": 44, "calculate_lat": 44, "succinctli": 44, "baz": [44, 64, 858, 2070, 2089], "leaf_modul": 44, "classnam": [44, 2017], "threw": [44, 2115], "my_app": 44, "consolemetrichandl": 44, "toi": [44, 2113], "stdout": [44, 45, 49, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1813, 2067], "stdoutmetrichandl": 44, "1574213883": 44, "4182858": 44, "my_metr": 44, "1574213940": 44, "5237644": 44, "nullmetrichandl": 44, "class_nam": [44, 64], "def_nam": 44, "leaf": [44, 53, 152, 224, 337, 450, 490, 505, 506, 794, 841, 843, 883, 897, 1176, 1779, 1943, 2036, 2042, 2043, 2052, 2067, 2093, 2103], "metric_valu": 44, "metric_group_nam": 44, "popen": [45, 49], "log_dir": [45, 2030, 2087], "stderr": [45, 49, 2012, 2028], "err": 45, "echo": 45, "hello": [45, 60, 2016, 2070, 2101], "pcontext": 45, "multiprocesscontext": 45, "subprocesscontext": 45, "log_line_prefix": 45, "keyset": 45, "bitmask": 45, "bar0": 45, "bar1": 45, "file1": 45, "file2": 45, "short": [45, 64, 737, 766, 1109, 1201, 1270, 1446, 1497, 1498, 1617, 1778, 1875, 1924, 2016, 2021, 2034, 2043, 2044, 2057, 2077, 2084, 2085, 2088, 2101, 2102], "ing": 45, "cmd": [45, 48], "forkserv": [45, 1717, 2033, 2059], "local_ranks_filt": 45, "processcontext": [45, 2033], "superset": [45, 48, 2021], "tee_stdout": 45, "tee_stderr": 45, "runprocsresult": 45, "defaultlogsspec": 45, "logsspec": 45, "reifi": 45, "rdzv_run_id": 45, "attempt_": 45, "logsdest": 45, "num_nod": [46, 48], "trainers_per_nod": 46, "num_allowed_failur": 46, "job_id": [46, 47, 48], "endpoint": [46, 47, 48], "host_node_addr": [46, 48], "min_siz": [46, 48], "num_allowed_failures_or_membership_chang": 46, "node1": [46, 48], "29400": [46, 48], "sidecar": [46, 47], "agre": [47, 1332, 2079], "resum": [47, 1803, 1809, 1811, 2077, 2079, 2087, 2102, 2104], "retryabl": 47, "announc": 47, "lose": [47, 51, 55, 193, 210, 2035], "train_loop": [47, 866], "arriv": [47, 48, 2077, 2079], "dynamicrendezvoushandl": 47, "rendezvousbackend": 47, "c10drendezvousbackend": 47, "etcdrendezvousbackend": 47, "supersed": [47, 1190], "etcdrendezvoushandl": 47, "my_run_id": 47, "from_backend": 47, "run_id": [47, 48, 50], "min_nod": 47, "max_nod": 47, "rendezvousparamet": 47, "admit": [47, 48, 2046], "get_as_bool": 47, "get_as_int": 47, "rendezvoushandlerregistri": 47, "get_run_id": 47, "is_clos": 47, "set_clos": 47, "next_rendezv": 47, "rendezvousinfo": 47, "rendezvousclosederror": 47, "rendezvousconnectionerror": 47, "rendezvousstateerror": 47, "rendezvoustimeouterror": 47, "num_nodes_wait": 47, "shutdown": [47, 2013, 2026, 2077, 2078], "use_agent_stor": 47, "lifecyl": 47, "rendez": 47, "impl": [47, 52, 1051, 1279, 2021], "rendezvousstoreinfo": 47, "bootstrap_store_info": 47, "bootstrap": [47, 2063], "rendezvouserror": 47, "rendezvousgracefulexiterror": 47, "dynamic_rendezv": 47, "create_handl": 47, "join_timeout": 47, "600": 47, "last_call_timeout": 47, "close_timeout": 47, "rendezvoustimeout": 47, "get_stat": [47, 90, 2046], "fenc": 47, "set_stat": [47, 90, 2046], "last_cal": 47, "heartbeat": [47, 2117], "keep_al": 47, "c10d_rendezvous_backend": 47, "create_backend": 47, "store_typ": 47, "read_timeout": 47, "60": [47, 50, 939, 1480, 1813, 1940, 1945, 2030, 2077, 2102], "is_host": 47, "skip": [47, 53, 863, 978, 1109, 1163, 1196, 1303, 1304, 1436, 1437, 1438, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 1896, 1967, 2014, 2042, 2043, 2046, 2048, 2049, 2057, 2069, 2071, 2077, 2100, 2104, 2105, 2111], "cname": 47, "fqdn": [47, 48], "etcd_rendezvous_backend": 47, "ssl_cert": 47, "ssl": 47, "certif": 47, "ssl_cert_kei": 47, "privat": [47, 55, 1188, 2046, 2049, 2070, 2098, 2104], "ca_cert": 47, "rool": 47, "key_prefix": 47, "ttl": 47, "hour": 47, "etcd_rendezv": 47, "rdzv_impl": 47, "etcdrendezv": 47, "etcd_address": 47, "min_work": 47, "max_work": 47, "noqa": 47, "w605": 47, "2379": [47, 1371], "etcd_prefix": 47, "etcdstor": 47, "etcd_stor": 47, "etcd_client": 47, "etcd_store_prefix": 47, "piggyback": 47, "num": [47, 748, 749, 763, 1413, 1478, 1481, 1497, 1533, 1543, 1571, 2059], "lookuperror": 47, "override_timeout": 47, "etcdserv": 47, "cumbersom": [47, 2017], "highli": [47, 1431, 1717, 2012, 2058, 2065, 2067, 2082, 2089, 2116], "etcd_serv": 47, "data_dir": 47, "v3": [47, 48], "torchelastic_etcd_binary_path": 47, "get_client": 47, "etcd_binary_path": 47, "entry_point": [48, 2099], "train_script": 48, "aforment": 48, "suffic": [48, 64], "compliant": [48, 51], "num_train": 48, "wors": [48, 1811, 1965, 2104], "port_k": 48, "etcd": 48, "v2": [48, 1342, 1816, 2012, 2053], "revis": 48, "physic": [48, 235, 1127, 1145, 2018, 2045, 2059, 2082, 2104], "localworkergroup": 48, "rdzv_id": 48, "rdzv_backend": [48, 51], "rdzv_endpoint": [48, 51], "max_nnod": 48, "torchelastic_restart_count": 48, "far": [48, 1283, 1633, 2100], "torchelastic_max_restart": 48, "python_exec": 48, "gang": 48, "departur": 48, "surviv": 48, "kill": [48, 50, 2033, 2051], "frequenc": [48, 947, 998, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1137, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1270, 1431, 1469, 1470, 1624, 1625, 1803, 1924, 2112], "ness": [48, 2103], "load_checkpoint": [48, 51], "checkpoint_path": [48, 51], "dataset": [48, 51, 66, 801, 1431, 1440, 1492, 2013, 2051, 2053, 2056, 2059, 2063, 2069, 2072, 2073, 2087], "train_step": 48, "should_checkpoint": 48, "save_checkpoint": [48, 51], "subprocess_handl": 49, "get_subprocess_handl": 49, "local_rank_id": 49, "fd": [49, 2087], "acquir": [50, 64, 2057, 2079, 2082], "deadlin": 50, "message_queu": 50, "localtimerserv": 50, "max_interv": 50, "trainer_func": 50, "localtimercli": 50, "expiri": 50, "timer_cli": 50, "countdown": 50, "timefram": [50, 2077], "elig": [50, 2079], "reap": 50, "timerserv": 50, "mp_queue": 50, "daemon": [50, 2033], "filetimerserv": 50, "file_path": 50, "log_ev": [50, 2030], "filetimercli": 50, "fifo": 50, "watchdog": [50, 2117], "filetimerrequest": 50, "sigkil": 50, "named_pip": 50, "mkfifo": 50, "timercli": 50, "timerrequest": 50, "scope_id": 50, "expiration_tim": 50, "acquisit": 50, "whatev": [50, 64, 450, 1187, 1717, 1867, 1952, 2017, 2050, 2103, 2104], "request_queu": 50, "entiti": [50, 64], "clear_tim": 50, "get_expired_tim": 50, "register_tim": 50, "timer_request": 50, "debug_info_log": 50, "log_debug_info_for_expired_tim": 50, "expired_tim": 50, "use_env": 51, "expositori": 51, "worst": [51, 2049], "total_num_epoch": 51, "visit": [51, 55, 2067, 2107], "WILL": 52, "BE": 52, "aot": [52, 683, 2023, 2094, 2095, 2104, 2105, 2113], "exported_program": [52, 2065], "exportedprogram": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "arg1_1": 52, "exportgraphsignatur": [52, 53, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "user_input": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "user_output": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "inputs_to_paramet": 52, "inputs_to_buff": 52, "buffers_to_mut": 52, "backward_signatur": 52, "assertion_dep_token": 52, "sound": [52, 64, 979, 1182, 1556, 2087, 2105, 2113], "alias": [52, 55, 902, 1167, 1178, 1644, 1977, 2016, 2017, 2021, 2049, 2103], "stacktrac": [52, 53, 66, 75, 76, 77], "leverag": [52, 976, 2064, 2065, 2077, 2094, 2097, 2101, 2106, 2116], "_dynamo": [52, 66, 77, 976, 983, 2048, 2094, 2098, 2099, 2100, 2101, 2102, 2109, 2111, 2113], "massiv": [52, 2101], "pt2": [52, 2100, 2103], "artifact": [52, 683, 1924, 2023, 2056, 2070, 2095, 2101, 2111, 2113], "untrac": [52, 64], "disjoint": [52, 2014], "usabl": [52, 63, 2017, 2066, 2067, 2101, 2102], "symbolic_trac": [52, 53, 64, 2072], "comprehens": [52, 83, 1891, 2017, 2018, 2035, 2046, 2055, 2072, 2095], "simpler": [52, 61, 1178, 1977, 2014, 2043, 2049, 2054, 2057, 2101], "straight": [52, 1439, 1635], "conv": [52, 64, 705, 706, 707, 708, 709, 710, 711, 712, 713, 795, 796, 799, 817, 858, 864, 1273, 1283, 1289, 1290, 1454, 1455, 1456, 1457, 1458, 1459, 1527, 1528, 1534, 1726, 1727, 1729, 2027, 2041, 2046, 2060, 2067, 2072, 2073, 2074, 2075, 2107, 2109], "in_channel": [52, 715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 1283, 1454, 1455, 1456, 1457, 1458, 1459, 1503, 1504, 1505, 1506, 1507, 1508], "out_channel": [52, 715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 1283, 1454, 1455, 1456, 1457, 1458, 1459, 1503, 1504, 1505, 1506, 1507, 1508], "kernel_s": [52, 715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 771, 772, 785, 786, 1273, 1283, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1474, 1475, 1494, 1495, 1496, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1523, 1524, 1525, 1527, 1579, 1600, 1601, 1602, 1611, 1612, 1613, 1627, 1628, 1629, 1654, 1655, 1656, 1658, 1659, 1660, 1661, 1662, 1663, 1703, 1832, 1833, 2015, 2087, 2108], "maxpool": [52, 799, 1474, 1475, 1628, 1629, 2067], "maxpool2d": [52, 786, 1524, 1528, 1659, 1662, 2067, 2074], "256": [52, 1446, 1824, 2046, 2067], "example_kwarg": 52, "arg2_1": 52, "arg3_1": 52, "max_pool2d_with_indic": [52, 2015, 2068, 2108], "getitem": [52, 66, 69, 74, 75, 78, 2101, 2112], "85": [52, 64, 1530, 1809, 2107, 2111], "l__self___conv": 52, "lift": [52, 53, 61, 1178, 1977, 2035, 2068, 2103], "get_attr": [52, 64, 82, 1186], "harden": 52, "oncal": 52, "proxytensor": 52, "contextlib": [52, 66, 70], "contextmanag": 52, "__enter__": [52, 2017], "__exit__": [52, 64, 2017, 2018, 2070], "exc_typ": [52, 2018], "exc_valu": [52, 2018], "exc": 52, "expot": 52, "branch1": 52, "64": [52, 58, 59, 61, 939, 1091, 1171, 1172, 1176, 1428, 1429, 1430, 1432, 1433, 1434, 1440, 1556, 1685, 1864, 1967, 2021, 2036, 2062, 2063, 2065, 2067, 2072, 2076, 2082, 2085, 2087, 2088, 2104, 2106], "branch2": 52, "128": [52, 619, 723, 731, 732, 759, 767, 797, 822, 823, 1444, 1461, 1488, 1514, 1536, 1576, 1577, 1615, 1685, 2035, 2036, 2053, 2065, 2072, 2074, 2075, 2077, 2082, 2085, 2088, 2111], "x2": [52, 66, 72, 964, 1321, 1519, 1576, 1577, 1615, 1673, 2015, 2108], "out1": [52, 1167, 2015, 2104], "out2": [52, 1167, 2015, 2104], "arg4_1": 52, "arg5_1": 52, "arg6_1": 52, "permute_1": 52, "addmm_1": 52, "relu_1": [52, 64], "l__self___buff": 52, "rangeconstraint": [52, 53], "min_val": [52, 781, 1485, 1639, 1640, 2015, 2108], "max_val": [52, 781, 1485, 1639, 1640, 2015, 2108], "9223372036854775806": [52, 66, 74, 75], "range_constraint": [52, 53], "dimx": 52, "dimi": 52, "slice_1": [52, 66, 75, 77], "9223372036854775807": [52, 66, 75, 898, 941, 1271, 1758, 2030, 2081, 2115], "input_spec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "inputspec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "inputkind": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "tensorargu": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "output_spec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "outputspec": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "outputkind": [52, 66, 67, 68, 69, 70, 71, 72, 74, 75, 76, 77, 78, 79, 2065], "valuerang": [52, 1187, 1188], "is_bool": 52, "saved_exported_program": 52, "fold": [52, 1579, 2015, 2027, 2067, 2075, 2082], "argumen": 52, "example_input": [52, 820, 821, 865, 866, 1285, 1289, 2072, 2093, 2095, 2099, 2102, 2103], "dynamic_dim": [52, 1181, 1182, 1188], "rewritten": [52, 2054, 2101], "primtivi": 52, "symint": [52, 1188, 1194, 1196, 1197, 1198, 1199, 1200, 1201, 1206, 1208, 1209, 1210, 1932, 1933, 1935, 1936, 1937, 2021, 2091, 2101, 2103, 2108], "symfloat": [52, 1188, 1932, 1933, 2091, 2100, 2101], "symbool": [52, 1188, 1205, 1212, 1937, 2091, 2100, 2101], "const": [52, 1480, 2056], "add_1": [52, 64, 66, 69, 71, 74, 75, 1167], "add_2": [52, 64, 66, 69, 71, 74, 75], "shot": 52, "nearli": [52, 152, 897, 918, 1817, 2059, 2077, 2113], "imposs": [52, 1178, 1198, 1977, 2042], "exportdb": [52, 2091], "combinatori": 52, "explod": [52, 1559], "cond": [52, 53, 60, 66, 69, 75, 2060], "faketensor": [52, 1189, 2021, 2065, 2103], "register_fak": [52, 2021], "dynamo": [52, 64, 81, 683, 978, 1188, 2023, 2048, 2067, 2094, 2099, 2100, 2103, 2104, 2105, 2107, 2109, 2110, 2111, 2113], "preserve_module_call_signatur": 52, "dim0_x": [52, 66, 68, 74, 75], "verbatim": 52, "bake": [52, 64, 66, 75, 1277, 1283, 1784, 1785, 1797, 2069], "diverg": [52, 1202, 1289, 1290, 1492, 1645, 2013, 2017], "register_dataclass": [52, 53], "ordereddict": [52, 1273, 1527, 1528, 1537, 1556, 1707, 1748, 2015, 2057, 2062, 2089], "debug_nam": 52, "_constraint": 52, "unbound": [52, 1187, 1192, 1198], "t1": [52, 690, 691, 884, 1106, 1227, 1717, 1970, 2016, 2077, 2078], "extra_fil": [52, 1281, 1284], "opset_vers": [52, 2065, 2067, 2068], "opset": [52, 2065, 2067, 2068, 2099, 2108], "txt": [52, 1281, 1284, 2053, 2070, 2095, 2113], "decod": [52, 1345, 1571, 1572, 1573, 1717, 2070], "utf": [52, 1345, 2067, 2070], "expected_opset_vers": 52, "rb": [52, 1281, 1345], "seek": [52, 1281, 1345, 2070, 2072], "cl": [52, 66, 68, 141, 2049, 2072, 2077, 2114], "serialized_type_nam": 52, "treespec": 52, "inputdataclass": 52, "outputdataclass": 52, "shapescollect": 52, "builder": [52, 2101], "tensor_x": [52, 2065], "tensor_i": 52, "tensor_z": 52, "refine_dynamic_shapes_from_suggested_fix": 52, "msg": [52, 1068, 1069, 1071, 1188, 2071, 2089], "refin": [52, 1280, 2035], "constraintviol": 52, "straightforward": [52, 64, 2024, 2036, 2057, 2069, 2082, 2099], "ti": [52, 1166, 1767, 1856, 2021, 2052, 2103], "_dx": 52, "1024": [52, 2045, 2046, 2095, 2104], "_derivedconstraint": 52, "graph_signatur": [52, 53], "module_call_graph": 52, "tensor_const": 52, "run_decomposit": [52, 2065], "decomp_t": 52, "joint": [52, 683, 2023], "exportbackwardsignatur": 52, "gradients_to_paramet": 52, "gradients_to_user_input": 52, "loss_output": 52, "gurante": 52, "getattr": [52, 64, 2015, 2049, 2062, 2077], "parameters_buffers_constant_tensor": 52, "flattened_user_input": 52, "mutated_input": 52, "flattened_user_output": 52, "custommodul": [52, 841, 2065, 2072], "my_paramet": [52, 2016, 2065], "register_buff": [52, 1273, 1527, 1709, 2014, 2016, 2049, 2057, 2065], "my_buffer1": [52, 2065], "my_buffer2": [52, 2065], "add_tensor": [52, 53], "call_funct": [52, 64, 66, 80, 82, 84, 85, 1186, 2099, 2101, 2102, 2112, 2113], "mul_tensor": 52, "mul_tensor_1": 52, "add_tensor_1": 52, "add_tensor_2": 52, "buffer_mut": [52, 2065], "modulecallsignatur": 52, "symintargu": [52, 66, 75], "constantargu": [52, 66, 68], "customobjargu": 52, "tokenargu": 52, "in_spec": 52, "_pytre": [52, 66], "out_spec": 52, "modulecallentri": 52, "nonetyp": [52, 2015, 2017], "replace_all_us": 52, "get_replace_hook": 52, "class_fqn": 52, "unflatten": [52, 55, 2015, 2034, 2035, 2037, 2068, 2086], "flatargsadapt": 52, "target_spec": 52, "input_args_with_path": 52, "codegen": [52, 53, 64, 2049, 2104, 2107, 2113], "flat_args_adapt": 52, "hierachi": 52, "swap": [52, 62, 64, 417, 619, 791, 792, 793, 796, 800, 868, 1279, 1363, 1576, 1577, 1701, 1702, 1949, 1970, 2015, 2018, 2046, 2072, 2082, 2084, 2092], "submod": [52, 64], "new_mod": 52, "unflattenedmodul": 52, "bear": 53, "mlir": 53, "soundli": 53, "audienc": 53, "realiz": [53, 1871], "implic": [53, 1271, 1289, 2049, 2077, 2082], "bundl": [53, 2056], "notabl": [53, 976, 2017, 2088, 2095], "graph_modul": [53, 863, 2102, 2112], "sympi": [53, 1188, 1195, 2100], "i0": [53, 308, 1293, 1890, 2015, 2068, 2083], "collorari": 53, "textual": 53, "machineri": [53, 64, 2049, 2101, 2104, 2111], "op_nam": [53, 2021, 2065], "arg4": 53, "arg5": 53, "compact": [53, 1125, 1126, 1128, 1316, 1318, 1321, 2049, 2067, 2072], "args1": 53, "add1": 53, "predefin": [53, 2067], "readign": 53, "referenc": [53, 64, 1012, 1186, 1273, 1527, 1952, 2016, 2043, 2067, 2077, 2102], "19": [53, 688, 749, 969, 1524, 2014, 2067, 2082, 2102, 2107], "dummy_help": 53, "helper_util": 53, "89": [53, 619, 1835, 2107], "nn_module_stack": 53, "came": [53, 2101], "self_linear": 53, "self_sequenti": 53, "source_fn_stack": 53, "source_fn": 53, "encapsul": [53, 63, 64, 1189, 2065, 2077, 2098], "control_flow": [53, 66, 69, 74, 75, 78], "x_1": [53, 66, 1088, 1089, 1090, 1091, 1341, 1444, 1460, 1461, 1604, 1615, 1950], "num_us": [53, 64], "y_1": [53, 1950], "higher_ord": [53, 66, 69, 74, 75, 78], "liter": [53, 1778, 2015, 2018, 2035, 2070, 2112, 2115], "dim_ord": 53, "tensormeta": 53, "promot": [53, 55, 83, 688, 949, 952, 961, 1051, 1104, 1152, 1154, 1155, 1156, 1157, 1412, 1465, 1466, 1467, 1471, 1615, 1826, 1846, 1847, 1853, 1868, 1925, 1935, 2017, 2065, 2085, 2089, 2103, 2104, 2108], "max_pool2d_with_index": 53, "add_on": 53, "ph_0": 53, "jax": [53, 56, 59, 60, 61, 2043, 2050, 2101, 2104], "int64_t": [53, 1778], "scalartyp": [53, 2108], "memoryformat": [53, 2108], "memory_format": [53, 64, 157, 172, 174, 177, 180, 181, 182, 191, 197, 208, 211, 235, 242, 269, 299, 327, 333, 395, 501, 502, 527, 582, 625, 973, 1110, 1111, 1165, 1186, 1273, 1527, 1717, 1724, 1725, 1777, 1837, 1839, 1841, 2011, 2013, 2015, 2049, 2089, 2108], "_register_pytree_nod": 53, "sharding_strategi": 55, "auto_wrap_polici": 55, "backward_prefetch": 55, "backwardprefetch": [55, 2013], "backward_pr": [55, 1273, 1527], "mixed_precis": [55, 1717], "ignored_modul": 55, "param_init_fn": 55, "sync_module_st": 55, "forward_prefetch": [55, 2052], "limit_all_gath": [55, 2052], "use_orig_param": 55, "ignored_st": 55, "inspir": [55, 1787, 2049, 2106], "deepspe": 55, "shorten": 55, "sharded_modul": 55, "0001": [55, 64, 1090, 1318, 1515, 1651, 1781, 1811, 2015, 2069], "dev_id": 55, "shard_grad_op": 55, "unshard": [55, 2052], "full_shard": 55, "summon_full_param": 55, "with_grad": 55, "gap": [55, 869, 1843, 2013, 2072, 2111], "delai": [55, 488, 1717, 2079, 2104], "reacquir": 55, "nccl_cross_nic": 55, "no_sync": [55, 1717], "cpuoffload": [55, 2013], "modulewrappolici": 55, "custompolici": 55, "proceed": [55, 1011, 1386, 1480, 1982, 2078, 2079], "nonwrapped_numel": 55, "travers": [55, 839, 1197, 1717, 2048, 2049, 2078, 2092, 2093], "subtre": 55, "size_based_auto_wrap_polici": 55, "exce": [55, 1351, 2046, 2059], "100m": 55, "numel": [55, 437, 446, 974, 998, 1140, 1258, 2015, 2034, 2043, 2068, 2081, 2082], "custom_auto_wrap_polici": 55, "min_num_param": 55, "1e8": 55, "my_auto_wrap_polici": 55, "functool": [55, 58, 66, 845, 1902, 2049, 2089], "1e5": 55, "mixedprecis": [55, 2013], "granular": [55, 82, 1287, 2042, 2111], "is_meta": [55, 2068], "reset_paramet": [55, 1542, 1716], "torchdistx": 55, "deferred_init": 55, "materialize_modul": 55, "my_init_fn": 55, "fullstatedictconfig": [55, 2013], "flatparamet": 55, "unifi": [55, 2013, 2035, 2101], "alten": 55, "distributed_device_mesh": 55, "check_is_root": 55, "clip_grad_norm_": [55, 1721, 2042], "max_norm": [55, 748, 749, 1469, 1470, 1624, 1625, 1721, 1722, 2015, 2042], "norm_typ": [55, 748, 749, 1469, 1470, 1494, 1495, 1496, 1624, 1625, 1654, 1655, 1656, 1721, 1722, 2015], "clip": [55, 994, 1721, 1722, 1723, 2015, 2037, 2050, 2067, 2068], "infin": [55, 996, 1263, 1265, 1267, 1268, 1417, 1439, 1520, 1521, 1522, 1658, 1659, 1660, 1722, 1786, 1895, 1907, 2082, 2083], "no_shard": 55, "largest": [55, 592, 947, 1153, 1305, 1326, 1328, 1330, 1331, 1346, 1671, 1732, 1947, 2015, 2016, 2017, 2108, 2112, 2118], "fp32": [55, 784, 797, 820, 1094, 1717, 2046, 2052, 2060, 2072, 2073, 2075, 2106], "flatten_sharded_optim_state_dict": 55, "sharded_optim_state_dict": 55, "shard_full_optim_state_dict": 55, "fsdp_modul": 55, "root_onli": 55, "full_optim_state_dict": 55, "optim_input": 55, "rank0_onli": 55, "get_state_dict_typ": 55, "statedictset": [55, 2013], "assertionerror": [55, 64, 66, 68, 73, 75, 86, 737, 1779, 2089, 2113], "differen": 55, "intercept": [55, 64, 2049, 2105], "occurr": [55, 404, 898, 941, 1271, 1374, 1758, 1961, 1962, 2081, 2100], "statedicttyp": 55, "fulloptimstatedictconfig": [55, 2013], "set_state_dict_typ": 55, "save_a_checkpoint": 55, "load_a_checkpoint": 55, "optim_state_dict_to_load": 55, "is_named_optim": 55, "load_directli": 55, "original_osd": 55, "namedoptim": 55, "keyedoptim": 55, "torchrec": [55, 2013], "gossipgrad": [55, 1717], "latter": [55, 690, 794, 1273, 1497, 1527, 1534, 2050, 2057, 2059, 2061, 2101], "rekey_optim_state_dict": 55, "optim_state_key_typ": 55, "loadabl": [55, 2012], "wrapped_model": 55, "wrapped_optim": 55, "full_osd": 55, "nonwrapped_model": 55, "nonwrapped_optim": 55, "rekeyed_osd": 55, "optimstatekeytyp": 55, "param_id": 55, "osd": 55, "param_nam": 55, "sharded_osd": 55, "scatter_full_optim_state_dict": 55, "new_model": 55, "new_optim": 55, "resid": [55, 63, 198, 291, 339, 1345, 1717, 2046, 2077], "remap": [55, 90, 417, 1277, 1281, 1345, 1366, 2012, 2028, 2076], "state_dict_config": 55, "optim_state_dict_config": 55, "descend": [55, 64, 138, 544, 880, 1186, 1273, 1337, 1338, 1527, 1900, 1928, 2015, 2108], "transpar": [55, 2048, 2077, 2079], "sharded_state_dict": 55, "shardedstatedictconfig": [55, 2013], "offload_to_cpu": 55, "optimstatedictconfig": [55, 2013], "param_state_dict": 55, "statedictconfig": [55, 2013], "writeback": 55, "summon": 55, "discard": [55, 1188, 1270, 1331, 1735, 2012, 2016, 2030], "redundantli": [55, 2042], "materi": [55, 904, 908, 909, 910, 991, 1588, 1589, 1590, 1719, 1720, 1798, 1851, 1852, 2049, 2104], "throughput": [55, 2014, 2045, 2046, 2052], "backward_post": 55, "altogeth": [55, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2101], "contend": 55, "volum": [55, 1236], "_hybrid_shard_zero2": 55, "freed": [55, 152, 897, 918, 1017, 1065, 2033, 2046, 2052, 2055, 2077, 2084, 2098, 2115], "param_dtyp": 55, "reduce_dtyp": 55, "buffer_dtyp": 55, "keep_low_precision_grad": 55, "cast_forward_input": 55, "cast_root_forward_input": 55, "_module_classes_to_ignor": 55, "batchnorm": [55, 703, 704, 1175, 1273, 1283, 1527, 1567, 1717, 1726, 1727, 1728, 1729, 2027, 2037, 2043, 2057, 2069, 2072, 2074, 2075], "_batchnorm": [55, 1726, 1728], "permit": [55, 1187, 1198, 2046, 2082], "thereaft": 55, "local_state_dict": 55, "upcast": 55, "recast": 55, "offload_param": 55, "cfg": 55, "finetun": [55, 1273, 1527], "model_fn": 55, "my_checkpoint": 55, "_use_dtensor": 55, "localstatedictconfig": [55, 2013], "shardedoptimstatedictconfig": [55, 2013], "localoptimstatedictconfig": [55, 2013], "love": 56, "hear": 56, "vmap": [56, 58, 59, 64, 909, 910, 912, 914, 918, 923, 924, 1168, 1171, 1172, 1174, 1176], "arbitrarili": [56, 61, 515, 1276, 1598, 2017, 2043, 2049, 2050], "stock": [56, 61], "ensembl": [56, 59, 61, 1176], "maml": [56, 61], "vjp": [56, 59, 60, 893, 894, 904, 909, 910, 1172, 1178, 1977, 2049, 2050], "whirlwind": 56, "tour": 56, "ux": [56, 61, 1182, 2072], "jacrev": [57, 59, 60, 914, 1170, 1171, 2050], "functional_cal": [57, 59, 1176, 2104], "running_mean": [58, 1175, 1273, 1441, 1442, 1443, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1567, 1603, 1643, 2015, 2057, 2062, 2108], "running_var": [58, 1175, 1273, 1441, 1442, 1443, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1567, 1603, 1643, 2015, 2062, 2108], "groupnorm": [58, 1634], "anywher": [58, 2042], "batchnorm2d": [58, 716, 719, 721, 726, 817, 1283, 1463, 1501, 1603, 2027, 2043, 2057, 2072, 2074], "track_running_stat": [58, 754, 755, 756, 1175, 1441, 1442, 1443, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1567, 2062], "resnet": [58, 2012, 2014, 2016, 2057, 2070, 2087, 2092], "regnet": 58, "norm_lay": 58, "resnet18": [58, 64, 2012, 2014, 2016, 2028, 2070, 2111], "num_group": [58, 752, 1481, 1634, 2015], "fragil": 58, "replace_all_batch_norm_modules_": 58, "upstream": [59, 2063], "coupl": [59, 83, 2014, 2056, 2064, 2078, 2098, 2112], "jvp": [59, 60, 894, 899, 902, 903, 904, 908, 909, 910, 1174, 2049], "jacfwd": [59, 60, 914, 1170, 2050], "carefulli": [59, 63, 488, 2021, 2050, 2070, 2103, 2105], "make_functional_with_buff": 59, "hurri": 59, "gist": [59, 796, 2054, 2107], "emul": [59, 802, 2018, 2049, 2071], "fmodel": 59, "compute_loss": [59, 61, 1166, 1168], "predict": [59, 488, 1431, 1440, 1480, 1616, 2069, 2087, 2095], "argnum": [59, 1168, 1169, 1170, 1171, 1172], "carri": [59, 338, 800, 841, 842, 861, 862, 2021, 2044, 2072], "stack_module_st": 59, "num_model": [59, 1176], "base_model": 59, "clearer": [59, 1330, 1798, 2049, 2065], "call_single_model": 59, "aotautograd": [59, 683, 978, 2023, 2048, 2103, 2104, 2105, 2113], "stori": [59, 2054, 2101], "grad_x": [60, 2021, 2050, 2104], "has_aux": [60, 1168, 1169, 1171, 1172, 1173, 1177], "mental": [60, 1167], "absenc": [60, 2095], "unbind": [60, 1178, 1977, 2013, 2015, 2034, 2068, 2086], "presenc": [60, 64, 1283, 1418, 2059, 2070, 2082, 2109], "pop": [60, 943, 1069, 1070, 1528, 1537, 2068, 2071, 2101], "lst": 60, "batchedtensor": 60, "batched_tensor_input": 60, "new_": [60, 2046, 2088], "new_zero": [60, 2015, 2068], "new_empti": [60, 2015, 2021, 2068], "diag_emb": [60, 1099, 1309, 1310, 1337, 1928, 2015, 2068], "vec": [60, 109, 110, 426, 693, 1415, 1768, 2015, 2082, 2108], "copy_": [60, 62, 417, 1167, 1580, 2014, 2015, 2034, 2046, 2084], "arithmet": [60, 949, 952, 966, 1329, 1853, 2018, 2030, 2034, 2035, 2060, 2072, 2085], "extra_arg": 60, "theoret": 60, "custom_dot": 60, "lax": 60, "while_loop": 60, "is_nonzero": [60, 2015, 2068, 2082], "rag": 60, "unclear": [60, 619], "add_nois": 60, "prng": 60, "cos_x": [61, 1168], "neg_sin_x": [61, 1168], "feature_s": [61, 1168, 1178, 1977], "feature_vec": [61, 1168, 1178, 1977], "mseloss": [61, 1168, 1487, 1559, 1665, 2046, 2048], "grad_weight_per_exampl": [61, 1168], "cotang": [61, 1177], "vjp_fn": [61, 1177], "out_tang": 61, "hessian0": 61, "hessian1": 61, "hess": [61, 1170], "set_overwrite_module_params_on_convers": [62, 2013], "to_empti": [62, 1273, 1527, 2025], "get_overwrite_module_params_on_convers": [62, 2013], "set_swap_module_params_on_convers": [62, 2013], "swap_tensor": [62, 417, 2013], "module_load": 62, "get_swap_module_params_on_convers": [62, 417, 1273, 1527, 2013], "rpc_async": [63, 2017, 2068, 2077, 2079], "add_done_callback": 63, "fut": [63, 1276, 1717, 2045, 2077], "set_result": [63, 1717, 2077], "haven": [63, 2057, 2111], "set_except": 63, "baseexcept": 63, "valueerror": [63, 66, 86, 898, 941, 1271, 1737, 1738, 1758, 1761, 2049, 2065, 2081, 2089, 2112], "twice": [63, 619, 913, 1174, 2042, 2043, 2051, 2054, 2103], "slow_set_futur": 63, "sleep": 63, "cb1": 63, "cb2": 63, "dedic": [63, 2046], "pool": [63, 769, 770, 771, 772, 785, 786, 1009, 1043, 1044, 1054, 1065, 1382, 1384, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1474, 1475, 1494, 1495, 1496, 1520, 1521, 1522, 1523, 1524, 1525, 1528, 1592, 1593, 1594, 1595, 1596, 1597, 1600, 1601, 1602, 1628, 1629, 1654, 1655, 1656, 1658, 1659, 1660, 1832, 1833, 2013, 2045, 2046, 2049, 2057, 2059, 2074, 2077, 2098], "didn": [63, 2049, 2052, 2063, 2069], "cb_fut": 63, "chain_cb_fut": 63, "cb": [63, 2077], "held": [63, 1009, 1033, 1061, 1385, 1990, 2042, 2106], "collect_al": [63, 2013], "fut0": 63, "fut1": [63, 2077], "fut_list": 63, "wait_al": [63, 2013], "clamp": [64, 188, 189, 802, 805, 972, 1280, 1439, 1480, 1633, 1644, 1704, 2015, 2027, 2034, 2067, 2068, 2072, 2074, 2075, 2083, 2089, 2108], "call_modul": [64, 82, 1186, 2112], "call_method": [64, 82, 1186, 2099, 2102], "feed": [64, 2014, 2051, 2057, 2092, 2103], "fake": [64, 86, 715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 759, 796, 800, 802, 803, 813, 815, 841, 865, 866, 1123, 1124, 1186, 1188, 1197, 1210, 2021, 2065, 2070, 2072, 2075, 2094], "theses": 64, "callsit": 64, "constitut": [64, 2101], "isol": [64, 1707, 2033, 2069, 2113], "tracer_class": 64, "gm": [64, 2099, 2102, 2103, 2112, 2113], "treatment": 64, "topk": [64, 2015, 2034, 2068, 2108], "print_tabular": [64, 2099, 2102], "opcod": [64, 2070, 2099, 2102], "linear_weight": 64, "linear_1": 64, "topk_1": 64, "pose": [64, 2078, 2101], "explor": [64, 81, 2012, 2043, 2057, 2070, 2115], "lint": 64, "inserting_aft": [64, 2112], "new_nod": 64, "replace_all_uses_with": [64, 2112], "tediou": 64, "unwieldi": 64, "fusion": [64, 683, 795, 817, 864, 865, 976, 1275, 1282, 1287, 1288, 1289, 1290, 1724, 1725, 2027, 2057, 2072, 2101, 2104, 2106, 2112, 2113], "imagin": [64, 2077, 2101, 2105], "requisit": 64, "relu_decomposit": 64, "decomposition_rul": 64, "constitu": [64, 2036, 2049, 2052], "new_graph": 64, "graphappendingtrac": 64, "proxy_arg": 64, "output_proxi": 64, "node_copi": 64, "ari": [64, 2067], "unari": [64, 1171, 1172, 1571, 1573, 1575, 2018, 2034], "organiz": 64, "shapeprop": 64, "named_modul": [64, 1273, 1527, 2057], "args_it": 64, "load_arg": 64, "map_arg": 64, "fetch_attr": [64, 1186], "target_atom": 64, "attr_itr": 64, "hasattr": [64, 1966, 2015, 2049, 2072, 2102], "nonexist": [64, 2016, 2017], "self_obj": 64, "encompass": 64, "prove": [64, 2033], "disprov": 64, "led": [64, 2115], "auxiliari": [64, 1168, 1169, 1171, 1172, 1173, 1177, 2012, 2056], "nondeterminist": [64, 315, 317, 323, 501, 517, 519, 947, 1110, 1111, 1112, 1295, 1446, 1454, 1455, 1456, 1457, 1458, 1459, 1523, 1524, 1525, 1608, 1609, 1610, 1611, 1612, 1613, 1617, 1625, 1633, 1644, 1672, 1685, 1704, 1705, 1706, 1870, 1965, 2099], "unord": [64, 1528, 1537], "nondetermin": [64, 924, 2061], "dedupl": [64, 2067], "torchvis": [64, 2012, 2013, 2014, 2016, 2053, 2067, 2070, 2072, 2087, 2092, 2111], "transformed_resnet18": 64, "input_imag": 64, "224": [64, 865, 866, 2014, 2016, 2053, 2067, 2111], "margin": [64, 1460, 1486, 1519, 1530, 1532, 1576, 1577, 1614, 1641, 1657, 1666, 1701, 1702, 2015, 2087], "commut": 64, "toolbox": 64, "tradit": [64, 1575, 2064, 2065], "luckili": 64, "my_pass": 64, "my_module_transform": 64, "input_valu": 64, "prompt": [64, 2012, 2063], "set_trac": [64, 1278, 1285, 1291, 2014], "examin": [64, 2057, 2067, 2071, 2115], "undergon": 64, "subclassm": 64, "pre_trac": 64, "post_trac": 64, "sake": 64, "tabular": [64, 683, 2023], "transform_graph": 64, "session": [64, 2065], "luck": [64, 2101], "input_nod": 64, "stepwis": 64, "breakpoint": [64, 2017, 2101, 2112], "excel": [64, 2105], "realpython": 64, "pycharm": 64, "vscode": 64, "graphic": [64, 2031, 2063, 2111], "parlanc": 64, "func_to_trac": 64, "dyn": 64, "155": 64, "__bool__": [64, 2015, 2017], "to_bool": 64, "traceerror": [64, 2072], "hyper": [64, 1559, 2016, 2087], "do_activ": 64, "512": [64, 1571, 1572, 1573, 1574, 1575, 2046, 2097], "without_activ": 64, "with_activ": 64, "traced_without_activ": 64, "traced_with_activ": 64, "concrete_arg": 64, "__torch_function__": [64, 2114], "161": 64, "len_1": 64, "sqrt_1": 64, "truediv": [64, 2099, 2101, 2102], "mycustomtrac": 64, "traced_graph": 64, "runnabl": [64, 2042, 2067, 2087, 2113], "myspecialsubmodul": 64, "neg_1": 64, "is_leaf_modul": [64, 2093], "sparse_coo_tensor": [64, 546, 583, 1909, 2015, 2019, 2068, 2082], "ones_lik": [64, 1171, 1172, 1177, 2015, 2021, 2036, 2046, 2049, 2050, 2057, 2068], "zeros_lik": [64, 904, 906, 908, 909, 910, 2015, 2019, 2036, 2046, 2068, 2082], "viabl": [64, 2043, 2046], "torch_randn": 64, "gotcha": 64, "dropoutrepro": 64, "assert_clos": [64, 1126, 1128, 1129, 1134, 1135, 1140, 1141, 1142, 1144, 1146, 2013, 2014, 2089], "greatest": [64, 1215, 1417, 2067, 2089], "6207983493804932": 64, "dropoutrepro2": 64, "overspeci": [64, 976, 2100], "ph": 64, "shouldn": [64, 1009, 2070, 2082, 2086], "fn_or_nam": 64, "callfunct": 64, "my_custom_funct": [64, 978], "fn_to_be_trac": 64, "reassign": [64, 2046], "regener": 64, "unset": [64, 939, 2060], "add_submodul": 64, "subpath": 64, "get_submodul": [64, 1273, 1527], "delete_all_unused_submodul": 64, "delete_submodul": 64, "print_read": [64, 2021, 2102], "print_output": 64, "include_strid": 64, "include_devic": 64, "date": [64, 2070], "pythoncod": 64, "fxmodul": 64, "owning_modul": 64, "tracer_cl": 64, "tracer_extra": 64, "the_funct": 64, "type_expr": 64, "create_nod": 64, "method_nam": 64, "0th": [64, 1178, 1977], "inserting_befor": 64, "influenc": [64, 2065, 2082, 2116], "eliminate_dead_cod": 64, "topolog": [64, 2067], "attr_1": 64, "is_impur": 64, "bad": [64, 66, 2033, 2067, 2070, 2072, 2103, 2111], "erase_nod": 64, "to_eras": 64, "eras": [64, 66, 71, 72, 2100], "find_nod": 64, "iterat": 64, "qualified_nam": 64, "graph_copi": 64, "val_map": 64, "return_output_nod": 64, "opoverload": [64, 1186, 2021], "companion": 64, "arg_transform": 64, "value_remap": 64, "_node_list": 64, "doubli": 64, "on_generate_cod": 64, "make_transform": 64, "transformcodefunc": 64, "insert_pdb": 64, "bodi": [64, 66, 75, 78, 1285, 2014, 2016, 2017, 2018, 2065, 2102, 2113], "current_tran": 64, "stuff": [64, 2103], "default_valu": 64, "_not_": 64, "tabul": 64, "process_input": [64, 1186], "process_output": [64, 1186], "python_cod": 64, "root_modul": [64, 796, 2074], "set_codegen": 64, "return_typ": [64, 700, 1088, 1089, 1236, 1295, 1333, 1371, 1374, 1376, 1379, 1419, 1900, 1947, 1952], "printout": [64, 1053, 1066, 2102], "all_input_nod": 64, "format_nod": 64, "placeholder_nam": 64, "maybe_return_typenam": 64, "autogener": [64, 2050], "insert_arg": 64, "impur": 64, "normalized_argu": 64, "arg_typ": 64, "kwarg_typ": 64, "normalize_to_only_use_kwarg": 64, "vararg": 64, "argskwargspair": 64, "bx": 64, "ax": [64, 969, 1305, 1319, 1323, 1334, 1335, 1336, 1364, 1375, 1535, 1546, 1952, 2067, 2104], "prev": [64, 1796], "replace_with": 64, "delete_user_cb": 64, "propagate_meta": 64, "replace_input_with": 64, "old_input": 64, "new_input": 64, "create_proxi": [64, 2101], "record_stack_trac": 64, "outputgraph": [64, 2101], "update_arg": 64, "update_kwarg": 64, "autowrap_modul": 64, "autowrap_funct": 64, "create_arg": 64, "create_args_for_root": 64, "root_fn": 64, "is_modul": 64, "introspect": [64, 978, 2104], "disallow": [64, 2070, 2077, 2085, 2104, 2105], "proxy_factory_fn": 64, "get_fresh_qualnam": 64, "clash": 64, "attr": [64, 66, 68, 1193, 1375, 1856], "attr_val": 64, "parameter_proxy_cach": 64, "module_qualified_nam": [64, 2093], "path_of_modul": 64, "some_hyperparamet": 64, "indexed_item": 64, "proxied_valu": 64, "garbage_collect_valu": [64, 1186], "run_nod": [64, 1186], "vice": [64, 460, 605, 1162, 1163, 1519, 2043, 2075, 2084, 2100], "versa": [64, 460, 605, 1162, 1163, 1519, 2043, 2075, 2084, 2100], "negsigmswapinterpret": 64, "call_self": 64, "args_tail": 64, "boxed_run": [64, 1186], "args_list": [64, 1186], "promptli": [64, 1186, 1197, 1346], "fetch_args_kwargs_from_env": [64, 1186], "map_nodes_to_valu": [64, 1186], "initial_env": [64, 1186], "enable_io_process": [64, 1186], "negsigmswapxform": 64, "nodes_map": [64, 2112], "subgraph_rewrit": [64, 2112], "m1": [64, 1975, 1976, 2029], "m2": [64, 1765, 1975, 1976, 2029, 2072], "traced_modul": [64, 2062, 2112], "despit": [64, 1289, 1290, 2046, 2054], "stack_1": 64, "stack_2": 64, "sum_2": 64, "max_1": 64, "max_2": 64, "exhaust": [66, 1717, 2116], "wih": 66, "escap": [66, 76, 978], "hatch": [66, 76, 978], "mypi": [66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 1260, 2016, 2017], "untyp": [66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 2084], "assumeconstantresult": [66, 77], "burn": [66, 77, 2098], "tracabl": [66, 77], "get_item": [66, 77], "i64": [66, 71, 72, 76, 77], "myautogradfunct": 66, "autogradfunct": 66, "allow_in_graph": [66, 2104], "p_linear_weight": 66, "p_linear_bia": 66, "mul_1": [66, 72, 2099, 2101], "mysubmodul": [66, 74, 75], "condbranchclassmethod": [66, 74, 75], "subm": [66, 74, 75], "condbranchnestedfunct": [66, 74, 75], "inner_true_fn": [66, 74, 75], "inner_false_fn": [66, 74, 75], "condbranchnonlocalvari": [66, 74, 75], "my_tensor_var": [66, 74, 75], "my_primitive_var": [66, 74, 75], "nonloc": [66, 69, 74, 75, 2018], "c_lifted_tensor_0": [66, 74, 75], "lift_fresh_copi": [66, 74, 75, 2068], "sub_1": [66, 74, 75], "constant_tensor": [66, 74, 75], "lifted_tensor_0": [66, 74, 75], "condclosedovervari": [66, 69, 74], "condoperand": [66, 74, 75], "sym_size_int": [66, 74, 75], "vr": [66, 74, 75, 76, 77, 1192], "condpred": [66, 74, 75], "constrainassizeexampl": [66, 76, 77], "hint": [66, 76, 77, 683, 1009, 1043, 1054, 1188, 1198, 1204, 1274, 1533, 1571, 1572, 1573, 1574, 1575, 2014, 2016, 2017, 2021, 2046, 2049, 2065, 2077, 2100], "_check": [66, 76, 77], "_check_is_s": [66, 76, 77], "u0": [66, 76, 77], "sym_constrain_range_for_size_default": [66, 76, 77], "sym_constrain_range_for_s": [66, 76, 77, 2015, 2068], "sym_constrain_rang": [66, 76, 77, 2015, 2068], "le": [66, 76, 77, 362, 1195, 1301, 1483, 1484, 1637, 1638, 2015, 2034, 2068, 2089, 2108], "_assert_scalar": [66, 76, 77, 2068], "le_1": [66, 76, 77], "_assert_scalar_1": [66, 76, 77], "le_2": [66, 76, 77], "u1": [66, 76, 77], "u2": [66, 76, 77, 2113], "constrainasvalueexampl": [66, 76, 77], "test_decor": 66, "dynamicshapeassert": [66, 67], "dynamicshapeconstructor": [66, 75], "dynamicshapeifguard": [66, 71, 75], "dynamicshapemap": [66, 75, 78], "body_graph_0": [66, 75, 78], "map_impl": [66, 75, 78], "dynamicshapesl": [66, 75], "slice_2": [66, 75], "dynamicshapeview": [66, 75], "new_x_shap": [66, 75], "support_level": [66, 72], "supportlevel": [66, 72], "fnwithkwarg": [66, 72], "pos0": [66, 72], "tuple0": [66, 72], "myarg": [66, 72], "mykw0": [66, 72], "mykwarg": [66, 72], "input0": [66, 72], "input1": [66, 72, 763, 1444, 1460, 1461, 1519, 1536, 1604, 1614, 1615, 1657, 2015, 2042, 2067], "tuple0_0": [66, 72], "tuple0_1": [66, 72], "myargs_0": [66, 72], "myargs_1": [66, 72], "mul_2": [66, 72], "mul_3": [66, 72], "mul_4": [66, 72], "mul_5": [66, 72], "mul_6": [66, 72], "listcontain": [66, 67, 72, 75], "monkei": [66, 67, 72, 75, 1966], "cow": [66, 67, 72, 75], "pig": [66, 67, 72, 75], "listunpack": [66, 71, 72], "args_0": [66, 71, 72], "args_1": [66, 71, 72], "args_2": [66, 71, 72], "nestedfunct": [66, 69], "nullcontextmanag": [66, 70], "nullcontext": [66, 70], "pytreeflatten": 66, "tree_flatten": 66, "x_2": [66, 1088, 1089, 1090, 1091, 1341, 1444, 1460, 1461, 1604, 1615], "dim1_x": [66, 75], "scalaroutput": [66, 75], "anim": 66, "moo": 66, "specializedattribut": 66, "staticforloop": [66, 71], "unrol": [66, 71, 2016, 2017, 2067, 2104], "ret": [66, 71, 2049, 2077], "add_3": [66, 71], "add_4": [66, 71], "add_5": [66, 71], "add_6": [66, 71], "add_7": [66, 71], "add_8": [66, 71], "add_9": [66, 71], "staticif": [66, 71], "tensorsetattr": [66, 68], "setattr": [66, 68, 2017], "typereflectionmethod": [66, 68], "overli": [66, 68, 2049, 2101], "typereflectionmethodrewrit": [66, 68], "userinputmut": [66, 79], "not_supported_yet": [66, 68, 73, 75, 80], "dynamicshaperound": [66, 68, 75], "roundtoint": [66, 68, 75], "inttruediv": [66, 68, 75], "modelattrmut": [66, 73], "attr_list": [66, 73], "recreate_list": [66, 73], "optionalinput": [66, 73], "unexpectedli": [66, 73, 488, 2084, 2088], "torchsymmin": [66, 80], "sym_min": [66, 80], "0x7fe9d8bd4b80": [66, 80], "undergo": 81, "gain": [81, 1798, 2041, 2094, 2095, 2106, 2107], "fxe0010": [81, 2065], "diagnosticopt": [81, 83, 2065], "verbosity_level": [81, 83, 2065], "onnx_diagnost": [81, 83, 683], "fxe0008": [81, 2065], "onnxscript": [82, 2065, 2066, 2067], "torchlib": [82, 86], "fxonnxinterpret": 82, "onnx": [83, 84, 86, 87, 88, 683, 1759, 1760, 1778, 1779, 1780, 2013, 2035, 2072, 2094], "endors": 83, "noteworthi": 83, "_intern": [83, 88, 2084], "disregard": [86, 87, 89, 2101], "indexerror": [86, 1743, 1745], "typeerror": [86, 1748, 2049, 2089, 2114], "opschema": [86, 2065], "onnxfunct": [87, 89, 2065], "perfect": 87, "nearest": [87, 782, 788, 790, 1420, 1580, 1582, 1633, 1644, 1704, 1706, 1828, 1856, 2046], "highest": [87, 683, 1431, 1838, 1839, 1871, 2073, 2089], "stringent": 87, "op_level_debug": [87, 89, 2065], "elementwise_dtyp": 88, "_ref": 88, "type_promot": 88, "absent": [89, 2048], "signifi": 89, "g_cpu": 90, "g_cuda": 90, "clone_st": 90, "cloned_st": 90, "bytetensor": [90, 1040, 1041, 1080, 1081, 1226, 1387, 1394, 1876, 1968, 1994, 1995, 2005, 2006, 2076, 2085, 2088], "graphsafe_get_st": [90, 2046], "current_st": 90, "graphsafe_set_st": [90, 2046], "g_cuda_oth": 90, "2147483647": 90, "0x8000_0000_0000_0000": [90, 1366, 2076], "0xffff_ffff_ffff_ffff": [90, 1366, 2076], "random_devic": 90, "1516516984916": 90, "new_stat": [90, 1080, 1081, 1394, 1876, 1968, 2005, 2006, 2076], "void": [90, 1052, 2046, 2056, 2104], "g_cpu_oth": 90, "abs_": [94, 2015, 2034, 2088], "acosh": [98, 124, 871, 2015, 2034, 2068, 2108], "batch1": [101, 102, 153, 154, 689, 944, 2015], "batch2": [101, 102, 153, 154, 171, 689, 944, 2015], "tensor1": [103, 104, 105, 106, 690, 691, 1368, 1853, 2015], "tensor2": [103, 104, 105, 106, 316, 406, 690, 691, 1368, 1853, 2015], "mat1": [107, 108, 556, 692, 1238, 1294, 1378, 1901, 1905, 1906, 1920, 2015, 2108], "mat2": [107, 108, 415, 556, 692, 956, 1238, 1294, 1378, 1901, 1905, 1906, 1920, 2015, 2108], "mat": [109, 110, 542, 693, 1415, 1834, 1898, 1901, 1920, 2015, 2087], "vec1": [111, 112, 694, 2015], "vec2": [111, 112, 290, 463, 694, 1218, 1816, 2015], "keepdim": [114, 116, 117, 118, 120, 136, 137, 356, 394, 409, 411, 412, 413, 416, 431, 432, 433, 434, 455, 472, 481, 557, 567, 617, 696, 698, 699, 700, 702, 878, 879, 1295, 1326, 1330, 1343, 1361, 1371, 1373, 1374, 1376, 1379, 1418, 1419, 1420, 1421, 1536, 1673, 1772, 1825, 1828, 1922, 1923, 1927, 1972, 1973, 2015, 2034, 2083, 2108], "rtol": [115, 346, 697, 923, 924, 1262, 1328, 1331, 1780, 2014, 2015, 2089], "atol": [115, 346, 697, 923, 924, 1262, 1328, 1331, 1340, 1780, 2014, 2015, 2049, 2089], "08": [115, 346, 697, 954, 1262, 1320, 1461, 1541, 1677, 1784, 1785, 1786, 1788, 1794, 1795, 1798, 1811, 1882, 2015], "equal_nan": [115, 346, 697, 1262, 2015, 2089], "arcco": [123, 2015, 2068, 2089], "acosh_": [125, 2015, 2034], "arccosh": [125, 2015, 2068], "arcsin": [127, 885, 2015, 2068, 2082], "arcsinh": [129, 2015, 2068], "atan2_": [132, 2015, 2034], "arctan2": [132, 2015, 2068], "arctan": [133, 2015, 2068], "arctanh": [135, 2015, 2068], "asinh": [145, 873, 2015, 2034, 2068, 2082, 2108], "atan": [149, 634, 635, 874, 2015, 2034, 2068, 2082, 2108], "atanh": [151, 876, 2015, 2034, 2068, 2082, 2108], "wrt": [152, 923, 924], "60521": [152, 897], "issuecom": [152, 897], "867061780": [152, 897], "texttt": [155, 156, 697, 1262, 1431, 2089], "bernoulli": [156, 763, 1435, 1464, 1465, 1466, 1467, 1471, 1478, 1497, 1618, 1619, 1620, 1621, 1626, 2013, 2015, 2034, 2068, 2091], "preserve_format": [157, 172, 174, 177, 180, 181, 182, 191, 208, 211, 242, 269, 299, 327, 395, 527, 582, 625, 973, 1111, 1165, 1777, 1837, 1839, 1841, 2011, 2049, 2085], "minlength": [158, 947, 2015], "bitwise_and": [160, 2015, 2068, 2108], "bitwise_left_shift": [162, 2015, 2068], "bitwise_not": [164, 2015, 2034, 2068, 2108], "bitwise_or": [166, 2015, 2068, 2108], "bitwise_right_shift": [168, 2015, 2068], "bitwise_xor": [170, 2015, 2068, 2108], "uint8": [174, 244, 619, 696, 702, 1162, 1826, 1829, 1830, 1853, 2072, 2084, 2085, 2087, 2088, 2118], "cauchi": [175, 2013, 2043, 2054, 2068, 2091], "dfrac": [175, 379, 610, 1461, 1615, 1732, 1733, 1766, 1769], "complex32": [181, 1454, 1455, 1456, 1608, 1609, 1610, 1778, 1868, 2088, 2089], "int8": [182, 450, 732, 948, 949, 950, 951, 952, 953, 1162, 1355, 1356, 1357, 1358, 2072, 2075, 2082, 2084, 2085, 2088, 2118], "input2": [185, 461, 462, 519, 763, 1444, 1460, 1461, 1519, 1536, 1604, 1614, 1615, 1657, 2015, 2042, 2067], "clamp_": [190, 2015, 2034], "uncoalesc": [192, 330, 1913], "coo": [192, 325, 330, 344, 616, 1238, 1901, 1905, 1910, 1911, 1912, 1913, 1914, 1915, 2013, 2024, 2085, 2089], "inttensor": [193, 210, 1248, 1363, 1364, 1469, 2085, 2088], "csr": [193, 210, 345, 585, 590, 1470, 1901, 1905, 1906, 1912, 1915, 2024, 2089], "sparse_csr": [193, 210, 585, 590, 1905, 1906, 1908, 1912, 1915, 2082], "nnz": [193, 546, 585, 586, 587, 589, 590, 1198, 1423, 1903, 1905, 1906, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 2021, 2082], "int32": [193, 210, 315, 323, 327, 449, 619, 960, 1111, 1123, 1124, 1159, 1162, 1163, 1304, 1315, 1316, 1317, 1335, 1363, 1446, 1826, 1863, 2072, 2082, 2084, 2085, 2088, 2106, 2118], "mkl": [193, 210, 2013, 2045, 2053, 2063, 2068, 2082, 2090, 2116], "routin": [193, 210, 880, 1346, 1900, 1928, 2082], "downcast": [193, 210], "to_sparse_csr": [193, 210, 586, 587, 1902, 1905, 1906, 2015, 2068, 2082], "conj_phys": [196, 991, 2015, 2068, 2082, 2108], "contiguous_format": [197, 333, 501, 502, 1110, 1717, 1724, 1725, 2085], "non_block": [198, 211, 582, 605, 625, 1273, 1527, 1758, 2015, 2046, 2084, 2108], "copysign": [200, 2015, 2068], "fweight": [207, 998, 2015], "aweight": [207, 998, 2015], "sparse_dim": [220, 547, 548, 1901, 1905, 1909, 1913, 2015, 2068, 2082], "dim1": [227, 229, 230, 571, 594, 595, 1097, 1099, 1100, 1308, 1931, 1949, 2015, 2108], "dim2": [227, 229, 230, 1097, 1099, 1100, 1308, 2015, 2108], "digamma": [233, 2015, 2034, 2068, 2083, 2108], "laid": 235, "outermost": [235, 1227], "channels_last": [235, 1273, 1527, 1717, 1724, 1725, 2085], "rounding_mod": [237, 238, 239, 240, 1104, 1105, 1154, 1157, 1847, 1957, 2015, 2108], "split_size_or_sect": [243, 304, 621, 1916], "eq": [246, 2015, 2034, 2068, 2108], "erf": [249, 642, 643, 2015, 2034, 2068, 2082, 2083, 2108], "erfc": [251, 644, 645, 2015, 2034, 2068, 2083, 2108], "lambd": [260, 300, 1482, 1565, 1636, 1694, 1781, 2015], "theori": [260, 2043, 2052], "fill_valu": [262, 448, 1164, 1165, 1446, 2015, 2046, 2108], "tall": [262, 1332, 1337, 2049], "start_dim": [265, 1148, 1472, 2015], "end_dim": [265, 1148, 1472, 2015], "float_pow": [271, 2015, 2068], "floor_divid": [275, 1104, 2015, 2068, 2082], "divisor": [278, 279, 491, 492, 771, 772, 1104, 1154, 1157, 1183, 1215, 1437, 1438, 1473, 1579, 1601, 1602, 1847, 1957], "fmod": [279, 1847, 2015, 2068, 2108], "mantissa": [282, 483, 1159, 1297, 1871, 2015, 2046, 2060, 2108], "gcd": [285, 2015, 2068, 2108], "ge": [287, 1229, 1367, 1478, 1483, 1484, 1497, 1535, 1637, 1638, 2015, 2034, 2068, 2108], "geometr": [288, 782, 788, 1633, 1644, 1704, 1971, 2013, 2068, 2091, 2110], "greater_equ": [296, 2015, 2068], "hypot": [306, 2015, 2068, 2108], "igamma": [310, 2015, 2068, 2108], "igammac": [312, 2015, 2068, 2108], "3100": [313, 485, 1244, 1845], "3553j": [313, 485, 1244, 1845], "5445": [313, 485, 1244, 1845], "7896j": [313, 485, 1244, 1845], "6492": [313, 485, 1244, 1845], "0633j": [313, 485, 1244, 1845], "0638": [313, 485, 1244, 1845], "8119j": [313, 485, 1244, 1845], "3553": [313, 1244], "7896": [313, 1244], "0633": [313, 1244, 1320, 2036], "8119": [313, 1244], "index_add_": [314, 904, 906, 909, 1245, 1246, 2015, 2061], "index_copy_": [316, 2015], "index_fill_": [318, 2015, 2034], "index_put_": [320, 2015], "include_self": [323, 518, 519, 1247, 1862, 2015, 2108], "identit": 323, "floattensor": [323, 489, 689, 690, 691, 692, 693, 738, 923, 924, 944, 1469, 1470, 1530, 1571, 1869, 2085, 2088], "amax": [323, 519, 699, 700, 1905, 2015, 2068, 2108], "amin": [323, 519, 698, 700, 1905, 2015, 2068, 2108], "fill_": [323, 1273, 1299, 1527, 1848, 2015, 2034, 2057, 2084], "72": [323, 619, 1227, 2072], "uint8_t": [328, 1778], "retain_grad": [337, 2015, 2043, 2068], "requires_grad_": [337, 450, 1273, 1446, 1527, 1617, 1902, 1905, 1943, 2015, 2034, 2043, 2088, 2113], "n_fft": [353, 558, 1270, 1924, 2015], "hop_length": [353, 558, 1270, 1924, 2015], "win_length": [353, 558, 1270, 1924, 2015], "center": [353, 558, 782, 788, 1129, 1270, 1598, 1633, 1644, 1704, 1795, 1817, 1884, 1924, 2015, 2043, 2053, 2068, 2087], "onesid": [353, 558, 1270, 1924, 2015, 2108], "return_complex": [353, 558, 1270, 1924, 2015], "element_s": [355, 437, 2015, 2034, 2068, 2084], "lcm": [358, 2015, 2068], "ldexp": [360, 1159, 2015, 2068], "lerp": [364, 2015, 2068], "lt": [365, 397, 1195, 1300, 2014, 2015, 2034, 2068, 2099, 2102, 2108], "less_equ": [368, 2015, 2068], "lgamma": [370, 654, 655, 2015, 2068, 2108], "ln": [379, 1302, 2083], "logical_and": [385, 2015, 2068, 2108], "logical_not": [387, 1685, 2015, 2034, 2036, 2068, 2108], "logical_or": [389, 2015, 2068, 2108], "logical_xor": [391, 2015, 2068, 2108], "pivot": [398, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1363, 1364, 1365, 1827, 2015], "get_info": [398, 1363], "lu_data": [399, 1364, 1365, 2015], "lu_pivot": [399, 1364, 1365, 2015], "masked_fill_": [401, 1685, 2015, 2034, 2035], "booltensor": [402, 404, 738, 1367, 1571, 1980, 2085, 2088], "masked_scatter_": [403, 2015], "mvlgamma": [428, 2015, 2068], "posinf": [429, 430, 1417, 2015], "neginf": [429, 430, 1417, 2015], "nan_to_num": [430, 2015, 2068], "interpol": [433, 481, 788, 789, 790, 822, 1299, 1420, 1580, 1581, 1582, 1598, 1633, 1704, 1705, 1706, 1828, 1965, 2015, 2074], "ne": [441, 1732, 1766, 1774, 2015, 2034, 2068, 2108, 2113], "8182e": 447, "5765e": 447, "41": [447, 1112, 1332, 1707, 1765, 1827], "0545e": 447, "0949e": 447, "4842e": [447, 1112], "0000e": [447, 1112, 1152, 1351, 1360, 1417, 1765, 1882, 1884, 1885, 1890, 1891], "00": [447, 1112, 1152, 1351, 1360, 1417, 1765, 1779, 1882, 1884, 1885, 1890, 1891, 2014, 2067], "141592": [448, 1164], "1416": [448, 1093, 1164, 1943], "from_numpi": [450, 883, 884, 1867, 1943, 2021, 2104], "array_lik": [450, 883, 1910, 1911, 1912, 1913, 1914, 1915, 1943, 2036, 2088], "nextaft": [453, 2015, 2068, 2108], "fro": [455, 1305, 1326, 1330, 1343, 1743, 1752, 1772, 2015], "not_equ": [458, 2015, 2068], "resolve_conj": [460, 991, 2015, 2068], "resolve_neg": [460, 2015, 2068], "shorthand": [460, 1293], "input3": [462, 2015], "polygamma": [468, 2015, 2068, 2083], "q_per_channel_axi": [475, 476, 2015, 2068], "zero_point": [476, 478, 741, 742, 743, 744, 745, 746, 747, 752, 753, 754, 755, 756, 757, 758, 759, 761, 762, 773, 775, 776, 777, 778, 780, 783, 784, 802, 805, 822, 823, 824, 825, 828, 856, 1123, 1124, 1829, 1830, 1831, 1832, 1833, 2015, 2072, 2073], "qtensor": [480, 2015], "queu": [488, 1015, 1400, 1984, 2046], "life": [488, 1012], "cycl": [488, 897, 1012, 1188, 1803, 1809, 2043, 2071], "poll": 488, "realloc": [488, 976, 1248, 2046], "counterintuit": [488, 2043], "s1": [488, 1188, 2101], "some_comm_op": 488, "wouldn": [488, 1201, 2079, 2103], "chrome": [488, 935, 2071, 2107, 2111], "export_chrome_trac": [488, 2071, 2111], "cudacachingalloc": [488, 2046, 2052], "enable_grad": [490, 2019, 2091], "0100": [490, 879, 1327, 1829], "0200": 490, "0300": [490, 2062], "maxnorm": [493, 494, 1848, 2015], "tile": [495, 2015, 2046, 2068, 2082, 2104], "repeat_interleav": [495, 1965, 2015, 2068, 2074], "output_s": [496, 744, 745, 746, 769, 770, 1428, 1429, 1430, 1432, 1433, 1434, 1458, 1473, 1474, 1475, 1523, 1524, 1525, 1579, 1592, 1593, 1594, 1595, 1596, 1597, 1627, 1628, 1629, 1661, 1662, 1663, 1849, 2015, 2036, 2108], "is_leaf": [497, 2015, 2034, 2036, 2068], "saved_weight": 498, "loaded_weight": 498, "5503": 498, "4926": [498, 2057], "1158": 498, "8303": 498, "1007": 498, "9853": 498, "2316": 498, "6606": 498, "resiz": [501, 502, 526, 547, 548, 1162, 1163, 1167, 1580, 1633, 1644, 2015, 2034, 2053, 2068, 2084, 2108], "set_": [501, 2015], "shift": [507, 949, 952, 1129, 1136, 1435, 1441, 1442, 1443, 1471, 1567, 1626, 1854, 1883, 2015, 2018], "decim": [509, 510, 1856, 2015, 2060, 2118], "scatter_": [514, 517, 1860, 2015], "scatter_add_": [515, 516, 1861, 1965, 2015], "scatter_reduce_": [515, 518, 1862, 2015], "axi": [515, 517, 519, 825, 828, 989, 1108, 1123, 1149, 1214, 1239, 1829, 1849, 1854, 1855, 1979, 2015, 2021, 2050, 2067, 2072, 2104, 2108, 2112], "4600": 515, "2300": 515, "scatter_reduc": [519, 1965, 2015, 2068, 2108], "sgn": [524, 1158, 1880, 2015, 2034, 2036, 2068, 2082], "untypedstorag": [526, 559, 615, 2013, 2062, 2084], "int16": [527, 1162, 1356, 2082, 2084, 2085, 2088, 2118], "dense_dim": [545, 547, 548, 585, 586, 587, 589, 590, 1913, 2015, 2068, 2082], "nse": [546, 2082], "6550": 546, "2397": 546, "1611": 546, "0779": [546, 1310, 1824, 1980], "2326": 546, "0558": 546, "4711": 546, "9678": 546, "5138": 546, "0411": 546, "9417": 546, "5158": 546, "0793": 546, "0036": [546, 1371], "2569": 546, "1055": 546, "sparse_coo": [546, 585, 1423, 1905, 1908, 1909, 1913, 2082, 2085], "split_siz": [549, 1916, 2015, 2108], "squeez": [555, 696, 698, 699, 700, 702, 1295, 1361, 1371, 1373, 1374, 1376, 1379, 1418, 1421, 1615, 1825, 1909, 1922, 1923, 1927, 1972, 1973, 2015, 2034, 2067, 2068, 2074, 2086, 2108], "pad_mod": [558, 1924, 2015], "typedstorag": [559, 2013, 2062, 2084], "untyped_storag": 559, "compute_uv": [569, 1337, 1338, 1928, 2015], "axis0": [570, 1930, 2015], "axis1": [570, 1930, 2015], "dim0": [571, 594, 595, 1931, 1949, 2015], "indices_or_sect": [580, 1107, 1237, 1944, 1978], "5044": 582, "0005": [582, 1807, 1813], "3310": 582, "0584": [582, 1928], "cuda0": [582, 2046, 2055, 2088], "masked_grad": [583, 2015], "sparse_mask": [583, 2015, 2068], "mkldnn": [584, 1283, 2013, 2068], "sparsedim": 585, "blocksiz": [585, 586, 587, 1910, 1911, 1912, 2015, 2082], "sparse_csc": [585, 589, 1908, 1912, 1914, 2082], "sparse_bsr": [585, 587, 1911, 1912, 2082], "sparse_bsc": [585, 586, 1910, 1912, 2082], "bsr": [585, 587, 1911, 1912, 2089], "bsc": [585, 586, 1910, 1912, 2089], "csc": [585, 589, 1912, 1914, 2089], "minu": [585, 586, 587, 589, 590, 2083], "crow_indic": [585, 587, 590, 1903, 1905, 1906, 1908, 1911, 1912, 1915, 2015, 2068, 2082, 2089], "col_indic": [585, 587, 590, 1905, 1906, 1908, 1911, 1912, 1915, 2015, 2068, 2082, 2089], "sparsecsr": [585, 1949, 2082], "row_indic": [586, 589, 1910, 1914, 2015, 2068, 2082, 2089], "ccol_indic": [586, 589, 1910, 1914, 2015, 2068, 2082, 2089], "_nnz": [588, 589, 590, 2068], "012766935862600803": 591, "5415473580360413": 591, "08909505605697632": 591, "7729271650314331": 591, "unitriangular": [596, 1336, 1952, 2015], "tril": [598, 1588, 1589, 1590, 1685, 2015, 2068, 2104], "triu": [600, 1737, 1952, 2015, 2067, 2068, 2104], "trunc": [604, 680, 681, 690, 1104, 1147, 1154, 1157, 1856, 2015, 2034, 2068, 2082, 2108], "sizedim": 609, "return_invers": [611, 612, 1961, 1962, 2015], "return_count": [611, 612, 1961, 1962, 2015], "unsqueez": [614, 1129, 1294, 1334, 1469, 1547, 1737, 1946, 2015, 2049, 2068, 2074, 2082, 2086, 2087, 2108], "subspac": [619, 1337, 1533, 1817, 1928, 1929], "span": [619, 1070, 1071, 1462, 1928, 2071, 2077], "foral": 619, "proportion": [619, 788, 1580, 1704], "9482": [619, 1250], "0310": 619, "4999": 619, "5316": 619, "1520": 619, "7472": 619, "5617": 619, "8649": 619, "4724": [619, 2057], "0334": 619, "2976": 619, "8499": 619, "2109": 619, "9913": 619, "9607": 619, "6123": 619, "1064483442": 619, "1124191867": 619, "1069546515": 619, "1089989247": 619, "1105482831": 619, "1061112040": 619, "1057999968": 619, "1084397505": 619, "1071760287": 619, "1123489973": 619, "1097310419": 619, "1084649136": 619, "1101533110": 619, "1073668768": 619, "1082790149": 619, "1088634448": 619, "1000000000": 619, "0047": 619, "0310j": 619, "5316j": 619, "7472j": 619, "8649j": 619, "0334j": 619, "8499j": 619, "9913j": 619, "6123j": 619, "202": 619, "154": [619, 2063], "59": [619, 1954, 1956], "182": 619, "243": [619, 1270, 1928], "253": 619, "188": 619, "185": [619, 2113], "252": [619, 2063], "191": 619, "63": [619, 2063, 2072], "240": 619, "227": 619, "165": 619, "190": 619, "146": 619, "106": 619, "205": 619, "112": [619, 2113], "206": 619, "189": 619, "95": [619, 1805, 1808, 1809], "147": 619, "43": 619, "246": [619, 2113], "87": 619, "235": 619, "226": 619, "254": [619, 2063], "111": [619, 1779, 2067], "117": 619, "177": 619, "28": [619, 749, 1092, 1950, 2065, 2067, 2101, 2102, 2107], "xlogi": [624, 2015, 2068, 2083], "inductor": [683, 976, 978, 2023, 2053, 2068, 2072, 2094, 2095, 2098, 2099, 2100, 2101, 2102, 2104, 2106, 2107, 2110, 2111, 2113, 2115], "dist_c10d": 683, "dist_ddp": [683, 2048], "dist_fsdp": 683, "aot_graph": [683, 2023], "aot_joint_graph": [683, 2023], "ddp_graph": [683, 2023, 2048], "graph_cod": [683, 2023, 2101], "graph_break": [683, 2023, 2098, 2101, 2104, 2105, 2111], "graph_siz": [683, 2101], "recompiles_verbos": 683, "trace_sourc": 683, "trace_cal": 683, "trace_bytecod": 683, "output_cod": [683, 2023, 2104, 2106, 2113], "kernel_cod": 683, "perf_hint": [683, 976], "post_grad_graph": 683, "cudagraph": [683, 976, 1043, 2046, 2094, 2104, 2106, 2107, 2110], "sym_nod": 683, "compiled_autograd_verbos": 683, "toggl": [683, 2046], "suppress": [683, 2017, 2076], "silenc": 683, "lowest": [683, 966, 1417, 1742, 1743, 1751, 1752, 1838, 1839, 2049, 2089], "notset": 683, "torchinductor": [683, 2023, 2094, 2095, 2100, 2102, 2104, 2106], "ddpoptim": [683, 2023], "symnod": [683, 2100], "opter": 683, "unregist": [683, 2023, 2030, 2067], "cosin": [686, 687, 995, 996, 1460, 1461, 1615, 1801, 1802, 1809, 1886, 2069, 2104], "3348": 686, "5889": 686, "2005": [686, 1973, 2113], "1584": 686, "2294": [686, 1373], "2004": 686, "3690": 686, "7298": [686, 1846], "hyperbol": [687, 886, 889, 996, 1568, 1895, 1942], "uniform_": [687, 889, 946, 2013, 2015, 2034, 2041, 2049, 2083, 2091], "3192": 687, "9915": 687, "9674": 687, "7151": 687, "7791": 687, "3120": [687, 993], "2979": 687, "1341": 687, "_i": [688, 689, 690, 691, 692, 944, 946, 949, 952, 956, 971, 1104, 1154, 1297, 1299, 1412, 1576, 1820, 1824, 1840, 1843, 1878, 1925, 1980, 2083], "0202": 688, "0985": 688, "3506": [688, 1363], "6056": 688, "3944": 688, "9732": 688, "3497": 688, "6245": [688, 1305], "4022": [688, 1928], "3743": 688, "7724": 688, "5811": 688, "8017": 688, "7695": 688, "3930": 688, "3672": [688, 1008, 1306], "1450": [688, 1909], "6971": 688, "0736": [688, 2057], "0994": 688, "3216": 688, "7845": 688, "1610": 688, "1868": 688, "4090": 688, "9902": [688, 1008, 1306], "3667": [688, 993], "3925": 688, "6147": 688, "sum_": [689, 1270, 1325, 1342, 1436, 1437, 1438, 1454, 1455, 1456, 1462, 1494, 1495, 1496, 1515, 1530, 1534, 1536, 1922, 1923, 1924, 1945, 1950, 1972, 1973, 1974, 2083], "mathbin": [689, 692, 693, 944, 956, 1906], "doubletensor": [689, 690, 691, 692, 693, 944, 1869, 2085, 2088], "tensorfloat32": [689, 692, 944, 956, 1368, 1378, 1454, 1455, 1456, 1457, 1458, 1459, 1514, 1608, 1609, 1610, 1611, 1612, 1613, 1650, 1871, 2046, 2060], "6311": 689, "0503": 689, "9768": [689, 2057], "0362": 689, "1653": 689, "8185": 689, "4255": [689, 1412], "6760": 689, "9453": 689, "5743": 689, "8202": 689, "3691": 689, "0943": 689, "1109": [689, 1480, 1891], "4730": [689, 1945], "histor": [690, 1065, 1466, 2045, 2057, 2062], "2312": [690, 1909], "6496": 690, "1312": 690, "0428": 690, "4292": 690, "1030": 690, "5369": 690, "9829": 690, "0430": 690, "8635": 691, "6391": 691, "6174": 691, "7617": 691, "5879": 691, "7388": 691, "8353": 691, "6249": 691, "6511": 691, "8716": 692, "4671": 692, "3746": 692, "7573": 692, "9555": 692, "8681": 692, "3768": 693, "5565": 693, "otim": [694, 1294, 1477, 1632], "conj": [695, 992, 1125, 1126, 1128, 1130, 1137, 1139, 1143, 1144, 1146, 1303, 1304, 1310, 1312, 1331, 1851, 1852, 2015, 2049, 2054, 2068, 2088, 2108], "mh": [695, 967, 968, 969, 1310, 1928, 2015, 2068, 2086, 2088], "lvert": [697, 1262, 1576, 1670, 2089], "rvert": [697, 1262, 2089], "leq": [697, 945, 946, 993, 1127, 1129, 1262, 1294, 1298, 1313, 1319, 1446, 1468, 1530, 1532, 1534, 1617, 1669, 1672, 1881, 1924, 2041, 2083], "elementwis": [697, 993, 1024, 1051, 1052, 1152, 1154, 1427, 1489, 1490, 1491, 1694, 1971, 2017, 2036, 2054, 2083], "07": [697, 822, 823, 824, 825, 828, 967, 1112, 1311, 1314, 1319, 1331, 1332, 1338, 1361, 1364, 1731, 1780, 1787, 1885, 1890, 1928, 2067], "09": [697, 1787, 1799, 2021, 2089], "8177": 698, "4878": 698, "2491": 698, "9130": 698, "7158": 698, "1775": 698, "0992": 698, "4817": 698, "0053": 698, "0164": 698, "3738": 698, "0507": [698, 2067], "9700": 698, "1106": 698, "0318": 698, "0816": [698, 1363], "6451": 699, "4866": [699, 2095], "2987": 699, "3312": 699, "5744": 699, "2980": [699, 2036], "8397": 699, "2713": 699, "9128": 699, "9214": 699, "7268": 699, "2995": 699, "9023": [699, 1304], "4853": 699, "9075": 699, "6165": 699, "180": [701, 1093, 1835], "14159": [701, 1943], "135": 701, "45": [701, 1442, 1443, 1490, 1491, 1567, 1765, 2067], "ao": [703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 2072, 2073, 2074], "batch_norm": [703, 704, 1283, 2015, 2068], "fuse": [703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 726, 727, 728, 729, 730, 731, 732, 795, 796, 805, 807, 808, 809, 817, 851, 863, 864, 976, 1287, 1288, 1685, 1724, 1725, 1726, 1727, 1728, 1729, 1783, 1784, 1785, 1797, 1871, 2027, 2034, 2037, 2067, 2069, 2072, 2073, 2074, 2075, 2104, 2106, 2112], "bn": [705, 706, 707, 708, 709, 710, 817, 864, 1109, 1283, 1726, 1728, 2062, 2069, 2072, 2073], "qat": [715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 733, 734, 735, 736, 795, 796, 850, 2072, 2073, 2074], "dilat": [715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 775, 776, 777, 785, 786, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1503, 1504, 1505, 1506, 1507, 1508, 1520, 1521, 1522, 1579, 1608, 1609, 1610, 1611, 1612, 1613, 1627, 1658, 1659, 1660, 1703, 1832, 1833, 2015, 2067, 2108], "padding_mod": [715, 716, 717, 718, 719, 720, 721, 722, 728, 729, 730, 733, 734, 741, 742, 743, 744, 745, 746, 775, 776, 777, 1454, 1455, 1456, 1457, 1458, 1459, 1503, 1504, 1505, 1506, 1507, 1508, 1633, 2015, 2108], "momentum": [715, 716, 717, 718, 719, 720, 726, 727, 739, 740, 754, 755, 756, 1441, 1442, 1443, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1567, 1603, 1643, 1788, 1795, 1797, 1803, 1809, 1811, 2015, 2053, 2057, 2059, 2069, 2108], "freeze_bn": [715, 716, 717, 718, 719, 720], "qconfig": [715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 736, 791, 792, 794, 797, 798, 799, 800, 841, 842, 843, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 862, 863, 865, 866, 2093], "batchnorm1d": [715, 718, 1500, 1567, 1603, 2062, 2074], "fakequant": [715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 736, 805, 866], "weight_fake_qu": [715, 716, 717, 718, 719, 720, 721, 722, 733, 734], "quant": [715, 716, 717, 718, 719, 720, 721, 722, 723, 733, 734, 735, 759, 793, 795, 796, 800, 841, 865, 866, 1123, 1124, 2072, 2073], "batchnorm3d": [717, 720, 722, 727, 1502, 1567, 1603, 2074], "num_featur": [726, 727, 739, 740, 754, 755, 756, 1273, 1441, 1442, 1443, 1489, 1490, 1491, 1500, 1501, 1502, 1509, 1510, 1511, 1527, 1567, 1635, 2057], "qint8": [731, 732, 759, 764, 767, 768, 775, 776, 777, 784, 795, 797, 802, 823, 845, 862, 863, 865, 866, 1778, 1829, 1830, 2072, 2074, 2075, 2084, 2088, 2089], "from_float": [735, 741, 742, 743, 748, 749, 759, 767, 800, 820, 826, 829, 841, 2072], "use_precomputed_fake_qu": [735, 741, 742, 743, 748, 749, 759, 767, 800, 868], "qparams_dict": [735, 741, 742, 743, 767], "hidden_s": [737, 763, 764, 768, 1478, 1479, 1497, 1498, 1543, 1544, 1545, 2015], "num_lay": [737, 763, 1478, 1497, 1543, 1544, 1572, 1574, 2015, 2057], "batch_first": [737, 738, 763, 1478, 1497, 1533, 1543, 1544, 1571, 1573, 1575, 1759, 1761, 1762, 1764, 2015, 2051], "bidirect": [737, 763, 1478, 1497, 1543, 1544, 2015], "_lstmlayer": 737, "nnqa": 737, "h0": [737, 763, 765, 1478, 1497, 1543], "c0": [737, 765, 1497, 2113], "hn": [737, 763, 765, 1478, 1479, 1497, 1543], "cn": [737, 765, 1273, 1497, 1840], "weight_ih": [737, 1479, 1498, 1543, 1545], "weight_hh": [737, 1479, 1498, 1543, 1545], "embed_dim": [738, 1533], "num_head": [738, 1533, 1587, 2015], "add_bias_kv": [738, 1533], "add_zero_attn": [738, 1533, 2015], "kdim": [738, 1533], "vdim": [738, 1533], "dequant": [738, 791, 793, 795, 796, 802, 804, 865, 2015, 2068, 2073, 2075, 2089, 2092], "mha": [738, 1533, 2013], "conver": 738, "key_padding_mask": [738, 1533, 2015], "need_weight": [738, 1533, 2015], "attn_mask": [738, 1533, 1571, 1685, 2015], "average_attn_weight": [738, 1533, 2015], "is_caus": [738, 1533, 1574, 1575, 1590, 1685, 2015], "attn_output_weight": [738, 1533], "unmask": [738, 1571], "attn_weight": [738, 1533, 1685], "head": [738, 1431, 1533, 1571, 1573, 1575], "attn_output": [738, 1533], "quint8": [741, 742, 743, 744, 745, 746, 748, 749, 759, 775, 776, 777, 784, 795, 797, 802, 822, 823, 824, 825, 828, 830, 863, 865, 1778, 1829, 1830, 1831, 1832, 1833, 2072, 2074, 2075, 2084, 2088, 2089], "learnabl": [741, 742, 743, 744, 745, 746, 748, 749, 759, 763, 767, 1441, 1442, 1443, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1469, 1470, 1478, 1479, 1481, 1489, 1490, 1491, 1497, 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1514, 1535, 1542, 1543, 1545, 1567, 1625, 1678, 1716, 2057, 2069], "q_input": [741, 742, 743, 744, 745, 746, 775, 776, 777], "quantize_per_tensor": [741, 742, 743, 744, 745, 746, 759, 760, 761, 775, 776, 777, 1831, 1832, 1833, 2015, 2068, 2072], "unequ": [742, 743, 744, 745, 746, 1455, 1456, 1458, 1459], "50": [742, 744, 745, 746, 775, 827, 1329, 1437, 1438, 1446, 1454, 1455, 1456, 1458, 1459, 1474, 1475, 1494, 1495, 1496, 1520, 1521, 1522, 1578, 1610, 1611, 1613, 1617, 1628, 1629, 1796, 1940, 2036, 2046, 2087, 2104], "56": [743, 1963, 2082, 2107], "output_pad": [744, 745, 746, 1457, 1458, 1459, 1506, 1507, 1508, 1611, 1612, 1613, 2015, 2108], "qnnpack": [744, 745, 759, 767, 859, 860, 863, 865, 866, 2072, 2074], "convtranspose2d": [744, 1507, 1612, 1724, 1965, 2074], "nnq": [744, 745, 746, 791, 792, 793, 868, 2072], "downsampl": [744, 745, 746, 1458, 1580, 1598, 1633, 1644], "upsampl": [744, 745, 746, 782, 789, 790, 822, 1458, 1581, 1582, 1598, 1633, 1644, 1705, 1706], "fbgemm": [745, 746, 759, 767, 836, 859, 860, 863, 865, 866, 2021, 2072, 2073, 2104, 2105], "cubic": [746, 1475, 1629, 1633], "num_embed": [748, 749, 1469, 1470, 1625], "embedding_dim": [748, 749, 1469, 1470, 1499, 1624, 1625], "padding_idx": [748, 1469, 1470, 1624, 1625, 2015, 2108], "scale_grad_by_freq": [748, 749, 1469, 1470, 1624, 1625, 2015, 2108], "_weight": [748, 749, 1469, 1470, 1788, 1794, 2112], "overwritten": [748, 749, 759, 767, 800, 845, 865, 2043, 2049, 2089, 2098], "_embed": [748, 749], "_dim": [748, 749, 1469], "include_last_offset": [749, 1470, 1625, 2015, 2108], "embedding_bag": [749, 2015, 2068], "floatfunct": [750, 2072], "activation_post_process": [750, 802, 2072], "add_relu": [750, 751, 760, 2027, 2068, 2092], "add_scalar": [750, 751, 760, 2068, 2087, 2092], "mul_scalar": [750, 751, 760, 2068, 2092], "collector": 751, "f_add": 751, "num_channel": [752, 1481, 2035], "normalized_shap": [757, 1499, 1542, 1647, 1682, 1716, 2015, 2036, 2108], "elementwise_affin": [757, 1499, 1542, 1716], "negative_slop": [758, 783, 1513, 1648, 1649, 2015, 2041, 2108], "slope": [758, 783, 1513, 1559, 2041], "bias_": [759, 767], "_featur": [759, 767, 784, 1431, 1444, 1512, 1514, 1604, 1650], "precomput": [759, 800, 2100], "from_refer": [759, 767], "ref_qlinear": [759, 767], "output_scal": [759, 762, 1831, 2015], "output_zero_point": [759, 762, 1831, 2015], "q_add": 760, "qint32": [760, 761, 1778, 1829, 1830, 2072, 2075, 2084, 2088, 2089], "x_0": [761, 1950], "gate": [763, 764, 1477, 1478, 1479, 1497, 1557, 1632, 1688], "r_t": [763, 1478, 1794], "w_": [763, 1429, 1430, 1433, 1434, 1437, 1438, 1440, 1448, 1449, 1450, 1451, 1452, 1453, 1455, 1456, 1458, 1459, 1462, 1474, 1475, 1478, 1479, 1495, 1496, 1497, 1498, 1521, 1522, 1524, 1525, 1534, 1539, 1540, 1543, 1545, 1549, 1550, 1551, 1552, 1553, 1554, 1580, 1581, 1582, 1583, 1584, 1585, 1629, 1633], "x_t": [763, 899, 903, 904, 909, 910, 1441, 1442, 1443, 1478, 1489, 1490, 1491, 1497, 1543, 1567, 1782], "b_": [763, 1478, 1479, 1497, 1498, 1515, 1543, 1545, 1945, 2082], "hr": [763, 1478, 1479, 1497, 2054], "h_": [763, 1429, 1430, 1433, 1434, 1437, 1438, 1444, 1449, 1450, 1452, 1453, 1455, 1456, 1458, 1459, 1474, 1475, 1478, 1479, 1495, 1496, 1497, 1514, 1521, 1522, 1523, 1524, 1525, 1539, 1540, 1543, 1545, 1550, 1551, 1553, 1554, 1580, 1581, 1582, 1584, 1585, 1604, 1629, 1633], "z_t": [763, 1478], "iz": [763, 1478, 1479], "hz": [763, 1020, 1478, 1479, 2087], "n_t": [763, 1478], "odot": [763, 1478, 1479, 1497, 1498], "h_t": [763, 1478, 1497, 1543], "hadamard": [763, 1478, 1479, 1497, 1498], "multilay": [763, 1478, 1497, 2065], "_t": [763, 1478, 1497, 1795, 1797, 2049, 2069], "b_ih": [763, 1478, 1479, 1497, 1498, 1543, 1545, 2015], "b_hh": [763, 1478, 1479, 1497, 1498, 1543, 1545, 2015], "h_0": [763, 1478, 1497, 1498, 1543], "seq_len": [763, 1478, 1497, 1533, 1543], "pack_padded_sequ": [763, 1478, 1497, 1543, 1758, 1760, 1761, 2051], "num_direct": [763, 1478, 1497, 1543], "h_n": [763, 1478, 1497, 1543], "_size": [763, 1428, 1429, 1430, 1432, 1433, 1434, 1436, 1437, 1438, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1474, 1475, 1478, 1479, 1494, 1495, 1496, 1497, 1498, 1520, 1521, 1522, 1523, 1524, 1525, 1543, 1545, 1579, 1629], "_layer": [763, 1478, 1497, 1543], "_direct": 763, "output1": [763, 1431, 2042, 2067], "output2": [763, 1431], "weight_ih_l": [763, 1478, 1497, 1543], "w_ir": [763, 1478], "w_iz": [763, 1478], "w_in": [763, 1478], "weight_hh_l": [763, 1478, 1497, 1543], "w_hr": [763, 1478], "w_hz": [763, 1478], "w_hn": [763, 1478], "bias_ih_l": [763, 1478, 1497, 1543], "b_ir": [763, 1478], "b_iz": [763, 1478], "b_in": [763, 1478], "bias_hh_l": [763, 1478, 1497, 1543], "b_hr": [763, 1478], "b_hz": [763, 1478], "b_hn": [763, 1478], "mathcal": [763, 1444, 1454, 1455, 1456, 1457, 1458, 1459, 1469, 1470, 1478, 1479, 1497, 1498, 1512, 1514, 1543, 1545, 1546, 1567, 1840, 2041, 2054], "subtli": [763, 1478, 1797], "gru": [764, 1479, 1544, 2015, 2046, 2068, 2072, 2074], "cell": [764, 766, 768, 1478, 1479, 1497, 1498, 1543, 1545], "hx": [764, 766, 768, 1479, 1498, 1545, 2015], "cx": [766, 1498, 2015], "nonlinear": [768, 1439, 1460, 1486, 1543, 1545, 1555, 2013, 2041, 2082], "elman": [768, 1543, 1545], "adaptiveavgpool2d": [769, 1593, 1965, 2074], "adaptiveavgpool3d": [770, 1594, 1965, 2074], "ceil_mod": [771, 772, 785, 786, 1436, 1437, 1438, 1494, 1495, 1496, 1520, 1521, 1522, 1600, 1601, 1602, 1654, 1655, 1656, 1658, 1659, 1660, 1832, 1833, 2015, 2108], "count_include_pad": [771, 772, 1436, 1437, 1438, 1600, 1601, 1602, 2015, 2108], "divisor_overrid": [771, 772, 1437, 1438, 1601, 1602, 2015, 2108], "kh": [771, 772, 776, 777, 1437, 1438, 1474, 1475, 1521, 1522, 1601, 1602, 1609, 1610, 1612, 1613, 1628, 1629, 1659, 1660], "kw": [771, 772, 776, 777, 1437, 1438, 1474, 1475, 1521, 1522, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1628, 1629, 1658, 1659, 1660], "sh": [771, 772, 776, 777, 1601, 1602, 1609, 1610, 1612, 1613, 1659, 1660, 2053, 2113], "sw": [771, 772, 775, 776, 777, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1658, 1659, 1660], "avgpool2d": [771, 1601, 2074], "_channel": [771, 772, 775, 776, 777, 1454, 1455, 1456, 1457, 1458, 1459, 1481, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1658, 1659, 1660, 2077], "ih": [771, 772, 776, 777, 1543, 1545, 1601, 1602, 1609, 1610, 1612, 1613, 1659, 1660], "iw": [771, 772, 775, 776, 777, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613, 1658, 1659, 1660], "padh": [771, 772, 776, 777, 1601, 1602, 1609, 1610, 1612, 1613], "padw": [771, 772, 775, 776, 777, 1600, 1601, 1602, 1608, 1609, 1610, 1611, 1612, 1613], "kd": [772, 777, 1438, 1522], "padd": [772, 777], "formul": [773, 1445, 1468, 1482, 1517, 1564, 1565, 1652, 1670, 1685, 2054, 2082], "min_": [774, 1309, 1310, 1319, 1337, 1928], "max_": [774, 1520, 1521, 1522, 1732, 1766], "convolv": [775, 776, 777, 1454, 1455, 1456, 1457, 1458, 1459, 1503, 1504, 1505, 1506, 1507, 1508, 1608, 1609, 1610, 1611, 1612, 1613], "dw": [775, 776, 777, 1608, 1609, 1610, 1611, 1612, 1613], "qf": [775, 776, 777], "dtype_input": [775, 776, 777], "dtype_filt": [775, 776, 777], "q_filter": [775, 776, 777], "dh": [776, 777, 1609, 1610, 1612, 1613], "dd": 777, "scale_factor": [782, 788, 789, 790, 1580, 1581, 1582, 1644, 1685, 1704, 1705, 1706, 2015, 2108], "align_corn": [782, 788, 789, 1580, 1581, 1598, 1633, 1644, 1704, 1705, 2015, 2108], "height": [782, 788, 1437, 1438, 1455, 1456, 1458, 1459, 1495, 1496, 1521, 1522, 1534, 1562, 1580, 1644, 1704, 2035], "spatial": [782, 788, 789, 790, 964, 1129, 1442, 1473, 1499, 1539, 1540, 1562, 1579, 1580, 1581, 1582, 1598, 1606, 1633, 1644, 1674, 1675, 1676, 1704, 1705, 1706], "pixel": [782, 788, 790, 1455, 1462, 1465, 1466, 1467, 1471, 1534, 1539, 1540, 1580, 1598, 1633, 1644, 1704, 1706], "corner": [782, 788, 955, 1580, 1588, 1589, 1590, 1598, 1633, 1644, 1704, 2065], "leakyrelu": [783, 1528, 1648, 2057, 2074], "_slope": [783, 1513, 1648, 2041], "xa": [784, 1323, 1334, 1335, 1336, 1514, 1650], "return_indic": [785, 786, 1432, 1433, 1434, 1474, 1475, 1520, 1521, 1522, 1523, 1524, 1525, 1595, 1596, 1597, 1628, 1629, 1658, 1659, 1660, 2015], "maxpool1d": [785, 1523, 1658, 1661, 2074], "linearli": [788, 1174, 1332, 1580, 1704, 1806, 1827, 2051, 2069, 2075], "neighbour": [790, 1515, 1644, 1706], "stub": [791, 792, 2070], "calibr": [791, 792, 800, 841, 842, 861, 863, 865, 2042, 2072, 2073, 2075, 2093], "quantstub": [793, 2072], "dequantstub": [793, 2072], "quantwrapp": 794, "backend_config": [795, 796, 797, 798, 799, 820, 821, 863, 864, 865, 866, 2093], "backendpatternconfig": [795, 865], "blob": [795, 1167, 2054, 2056, 2087, 2112], "dtypeconfig": [795, 796, 798, 865], "observationtyp": [795, 796, 865, 2074], "weighted_int8_dtype_config": [795, 865], "input_dtyp": [795, 797, 865, 2074], "output_dtyp": [795, 797, 865, 2074, 2108], "weight_dtyp": [795, 797, 865, 2074], "bias_dtyp": [795, 797, 2074], "fuse_conv2d_relu": 795, "is_qat": [795, 796], "convrelu2d": [795, 2074], "linear_config": 795, "set_observation_typ": [795, 796, 865], "output_use_different_observer_as_input": [795, 796, 799, 865, 2074], "add_dtype_config": [795, 796, 865], "set_root_modul": [795, 796], "set_qat_modul": [795, 796], "set_reference_quantized_modul": [795, 796], "conv_relu_config": 795, "set_fused_modul": [795, 796], "set_fuser_method": [795, 796], "fused_conv_relu_config": 795, "my_backend": [795, 2099], "set_backend_pattern_config": [795, 865], "from_dict": [795, 796, 797, 818, 819, 820, 858], "backend_config_dict": [795, 2072], "set_nam": 795, "to_dict": [795, 796, 797, 818, 819, 820, 858], "backendconfig": [796, 821, 863, 865, 2075], "dtype_config": [796, 2074], "backend_pattern_config_dict": 796, "observation_typ": [796, 2074], "qat_modul": [796, 2074], "reference_quantized_modul": 796, "fused_modul": [796, 2074], "fuser_method": [796, 817, 2074], "pattern_complex_format": 796, "set_dtype_config": 796, "fuser": [796, 1287, 2014], "fuse_linear_relu": 796, "linearrelu": [796, 2074], "8bea7180a8ba3c279f2c9b050f2a69a6": 796, "understood": [796, 963], "output_share_observer_with_input": [796, 799, 2074], "renam": [796, 1966, 1968, 2015, 2021, 2034, 2035, 2068, 2107], "quantdequantstub": 796, "set_pattern": 796, "is_dynam": [797, 802, 822, 823, 824, 825, 827, 828, 829, 2074], "quant1": 797, "dequant1": 797, "fp32_linear": 797, "quant2": 797, "dequant2": 797, "bracket": [797, 2078], "dtype_config1": 797, "dtype_config2": 797, "dtypewithconstraint": [797, 2074], "quant_min_lower_bound": [797, 798, 2074], "quant_max_upper_bound": [797, 798, 2074], "255": [797, 805, 1123, 1124, 1163, 1633, 1644, 1704, 2072, 2074, 2087], "input_dtype_with_constraint": 797, "scale_min_lower_bound": [797, 798, 2074], "scale_max_upper_bound": [797, 798, 2074], "dtype_config_dict": 797, "bias_typ": [797, 865], "scale_exact_match": [798, 2074], "zero_point_exact_match": [798, 2074], "quant_min": [798, 802, 805, 822, 823, 824, 825, 828, 829, 1123, 1124, 2015, 2072], "quant_max": [798, 802, 805, 822, 823, 824, 825, 828, 829, 1123, 1124, 2015, 2072], "fixedqparamsobserv": 798, "fixedqparamsfakequant": 798, "input_output_not_observ": [799, 2074], "remove_qconfig": 800, "is_refer": 800, "convert_custom_config_dict": [800, 818, 2072], "from_observ": [800, 818, 2072], "observed_to_quantized_custom_module_class": [800, 818, 2072], "observedcustommodul": [800, 818, 820, 841, 2072], "quantizedcustommodul": [800, 818], "calib_data": 801, "fake_quant": [802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 866, 1123, 1124, 2093], "movingaverageminmaxobserv": [802, 805, 825], "observer_kwarg": [802, 805], "x_out": [802, 805], "quanti": 802, "choose_qparam": 802, "dq": 802, "fake_quant_en": 802, "observer_en": 802, "calculate_qparam": [803, 822, 823, 827], "extra_repr": [804, 1273, 1527, 1542, 1716, 2049], "qscheme": [805, 822, 823, 824, 825, 828, 829, 830, 2015, 2068, 2072, 2075, 2089], "fake_qu": [806, 810, 811, 812, 2072], "default_fake_qu": 807, "default_per_channel_weight_fake_qu": 808, "default_weight_fake_qu": 809, "histogram": [810, 822, 834, 1234, 1236, 2015, 2068, 2087], "memoryless": [811, 812], "averaging_const": [811, 812, 824, 825, 2015], "modules_to_fus": 817, "fuser_func": 817, "fuse_known_modul": 817, "fuse_custom_config_dict": [817, 819], "convmodul": 817, "bnmodul": 817, "convbnmodul": 817, "additional_fuser_method_map": 817, "fuse_conv_bn": [817, 2074], "conv1": [817, 1285, 1527, 1556, 2014, 2065, 2072, 2087], "bn1": 817, "relu1": [817, 1556, 1707], "fused_m": 817, "custom_config": [818, 819, 820, 821], "convert_fx": [818, 2072, 2093], "convert_custom_config": [818, 863, 2072], "set_observed_to_quantized_map": 818, "set_preserved_attribut": [818, 819, 820], "attr1": [818, 819, 820, 2067], "attr2": [818, 819, 820, 2067], "floatcustommodul": [818, 820], "weight_onli": [818, 863, 2026, 2072], "preserved_attribut": [818, 819, 820], "observed_class": [818, 820], "quantized_class": 818, "quant_typ": [818, 820], "quanttyp": [818, 820], "fuse_fx": [819, 2072], "fuse_custom_config": [819, 864], "convertcustomconfig": [819, 863], "prepare_fx": [820, 843, 863, 866, 2072, 2093], "prepare_qat_fx": [820, 863, 2072], "prepare_custom_config": [820, 821, 865, 866, 2072], "set_standalone_module_nam": 820, "module1": [820, 858, 2046], "qconfig_map": [820, 821, 858, 859, 860, 863, 865, 866, 2072], "child_prepare_custom_config": 820, "set_standalone_module_class": 820, "mystandalonemodul": 820, "set_float_to_observed_map": 820, "set_non_traceable_module_nam": 820, "module2": [820, 858, 2046], "module3": [820, 2046], "set_non_traceable_module_class": 820, "nontraceablemodule1": 820, "nontraceablemodule2": 820, "set_input_quantized_index": 820, "set_output_quantized_index": 820, "prepare_custom_config_dict": [820, 841, 843, 2072], "standalone_module_nam": 820, "standalone_module_class": 820, "module_class": 820, "float_to_observed_custom_module_class": [820, 841, 2072], "non_traceable_module_nam": 820, "non_traceable_module_class": 820, "input_quantized_idx": 820, "output_quantized_idx": 820, "float_class": 820, "qconfigmap": [821, 859, 860, 863, 865, 2072, 2075], "preparecustomconfig": [821, 865], "2048": [822, 1571, 1573, 1575, 2046], "upsample_r": 822, "per_tensor_affin": [822, 823, 824, 1830, 1831, 1832, 1833, 2072, 2075], "reduce_rang": [822, 823, 824, 825, 828, 830, 2015, 2072, 2073], "factory_kwarg": [822, 823, 828], "1920928955078125e": [822, 823, 824, 825, 828], "finfo": [822, 823, 824, 825, 828, 1328, 1331, 1427, 1542, 1716, 2013, 2089], "minmaxobserv": [822, 824, 828, 845, 865, 866, 2075], "x_": [823, 824, 998, 1119, 1347, 1348, 1350, 1353, 1361, 1440, 1462, 1517, 1534, 1561, 1563, 1691, 1905, 1907, 1950, 2043, 2075, 2083], "q_": [823, 2075], "x_orig": 823, "reset_min_max_v": [823, 828], "ch_axi": [825, 828, 2015], "per_channel_affin": [825, 828, 1829, 2072, 2075], "custom_op_nam": [826, 829], "with_arg": [827, 845, 865, 866], "_callable_arg": 827, "_with_arg": 827, "foo_build": 827, "foo_instance1": 827, "foo_instance2": 827, "with_callable_arg": 827, "_with_callable_arg": 827, "cur_tim": 827, "get_time_func": 827, "dan": 827, "creation_tim": 827, "compute_dtyp": 829, "ptq": [834, 2072, 2073, 2075], "obs_dict": 840, "get_observer_state_dict": 840, "allow_list": [841, 2092], "observer_non_leaf_module_list": 841, "preemptiv": [841, 842, 1717], "propagate_qconfig_": 843, "qconfig_dict": [843, 858], "pt2e": 844, "export_util": 844, "my_qconfig": 845, "default_observ": 845, "default_qat_config": 851, "set_glob": [858, 863, 865, 2072], "set_object_typ": [858, 863, 865], "set_module_name_regex": 858, "regex": 858, "set_module_nam": [858, 863, 865], "set_module_name_object_type_ord": 858, "global_qconfig": 858, "qconfig1": 858, "qconfig2": 858, "qconfig3": 858, "object_typ": 858, "module_name_regex": 858, "module_name_object_type_ord": 858, "conv0": 858, "x86": [859, 860, 1872, 2072, 2074], "run_arg": [861, 867], "qconfig_spec": 862, "quantize_fx": [863, 864, 865, 866, 2072, 2093], "_remove_qconfig": 863, "qconfig_from_prepar": 863, "prepared_model": [863, 865, 866], "xnnpack": [863, 2027, 2072], "get_default_backend_config": [863, 865, 866], "quantized_model": 863, "fusion_pattern": 864, "fusecustomconfig": 864, "_equalization_config": 865, "get_default_qconfig_map": [865, 2072], "float_model": [865, 866, 2072, 2092], "data_load": [865, 866, 1803, 1809, 2059, 2072], "get_default_qconfig": [865, 866, 2072], "linear_pattern_config": 865, "suer": 865, "sample_inference_data": [865, 2072], "get_default_qat_qconfig_map": [866, 2072], "load_weight": 866, "train_data": 866, "get_default_qat_qconfig": [866, 2072], "custom_module_class_map": 868, "lceil": [869, 965], "rceil": [869, 965], "adjac": [869, 955, 1465, 1466, 1467, 1471, 1843, 2082, 2111], "set_default_dtyp": [869, 945, 954, 1110, 1112, 1122, 1127, 1145, 1161, 1164, 1220, 1231, 1232, 1293, 1776, 1836, 1840, 1843, 1869, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2010], "get_default_dtyp": [869, 1344, 1360, 1778, 1843, 2084, 2085, 2118], "set_default_devic": [869, 945, 954, 1110, 1112, 1122, 1127, 1145, 1161, 1164, 1231, 1232, 1293, 1344, 1360, 1776, 1836, 1838, 1840, 1842, 1843, 1869, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1910, 1911, 1912, 1913, 1914, 1915, 1954, 1956, 2010, 2085], "5000": [869, 884, 890, 891, 892, 947, 971, 1127, 1129, 1130, 1137, 1140, 1145, 1155, 1157, 1158, 1159, 1227, 1233, 1236, 1299, 1325, 1344, 1418, 1420, 1451, 1452, 1470, 1580, 1828, 1832, 1833, 1834, 1843, 1847, 2083, 2088], "maxim": [878, 1371, 1460, 1523, 1524, 1525, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 2082, 2104, 2113], "3398": 878, "2663": [878, 2046], "2686": 878, "2450": 878, "7401": 878, "8805": 878, "3402": 878, "1936": 878, "4907": [878, 1361], "3948": [878, 993], "0691": 878, "3132": 878, "6092": 878, "5419": 878, "2993": [878, 1927], "3195": 878, "1139": 879, "2254": 879, "1381": [879, 1927], "3687": 879, "1975": [879, 2036], "0102": 879, "4732": 879, "9240": 879, "1207": [879, 1412], "7506": 879, "0213": 879, "7809": 879, "2960": 879, "9384": 879, "1438": 879, "ascend": [880, 1129, 1310, 1312, 1341, 1396, 1863, 1900, 1961], "0785": 880, "5267": 880, "8521": 880, "4065": 880, "1598": 880, "0788": 880, "0745": 880, "2700": 880, "2208": 880, "0722": 880, "7064": 880, "2564": 880, "0669": 880, "2318": 880, "8229": 880, "9280": 880, "lexicograph": [881, 1771, 2017, 2082], "9039": 882, "6291": 882, "0795": [882, 1828, 2057], "1586": 882, "1939": [882, 2049], "4900": 882, "1909": 882, "7503": 882, "9355": 882, "histori": [883, 884, 1176, 1787, 1943, 2013, 2024, 2036, 2049, 2051, 2078], "dlpack": [884, 1160, 2013], "frombuff": [884, 1867], "data_ptr": [884, 930, 2021, 2034, 2068, 2084, 2086], "addbackward0": [884, 2057, 2062], "__array_interface__": [884, 2104], "5962": 885, "4985": 885, "4396": 885, "4525": [885, 2036], "6387": 885, "4552": 885, "sine": [886, 1883, 1893, 1895], "1606": 886, "4267": 886, "0899": 886, "0250": 886, "1599": 886, "1534": 886, "9435": 886, "8990": [886, 1104], "arctang": [887, 888], "2341": 887, "2539": 887, "6256": 887, "6448": 887, "2299": 887, "2487": 887, "5591": [887, 916], "5727": 887, "quadrant": 888, "9041": [888, 964], "0196": [888, 964], "3108": [888, 964], "4423": [888, 964], "9833": 888, "0811": 888, "9743": 888, "4151": 888, "tangent": [889, 898, 902, 903, 1173, 1174, 1568, 1941, 1942], "9385": 889, "2968": 889, "8591": 889, "1871": 889, "7253": 889, "3060": 889, "2899": 889, "1893": 889, "needs_input_grad": [893, 909, 910, 2049], "setup_context": [894, 904, 905, 906, 907, 908, 909, 910, 2021, 2050], "save_for_forward": [894, 904, 909, 910, 2050], "grad_input": [895, 904, 906, 908, 909, 910, 928, 1273, 1527, 2049, 2057], "underneath": [896, 909, 910], "generate_vmap_rul": [896, 909, 910, 2050], "out_dim": [896, 909, 910, 1178, 1977, 2015, 2035, 2050], "grad_tensor": [897, 918, 2015, 2046], "grad_vari": 897, "forward_ad": [898, 899], "primal": [898, 903, 1169, 1173, 1174, 1177], "unpack_du": [898, 899, 902], "dual": [898, 899, 900, 902, 903, 1770, 2050], "make_du": [899, 903, 904, 909, 910], "your_fn": 899, "grad_aft": 899, "dual_level": [902, 903, 904, 909, 910], "apply_jvp": 904, "mark_dirti": [904, 909, 910, 930, 2049], "x_npy": [904, 905, 909], "once_differenti": [904, 905, 906, 907, 908, 909, 910, 2049], "mark_non_differenti": [904, 909, 910, 2049, 2050], "g1": [904, 906, 908, 909, 910, 2046, 2079], "g2": [904, 906, 908, 909, 910, 2046, 2079], "weren": [904, 907, 909], "grad_out": [904, 907, 909, 910, 2015, 2054, 2108], "gx": [904, 907, 909], "gy": [904, 907, 909], "gz": [904, 907, 909, 2071], "y_t": [904, 909, 910], "fwad": [904, 909, 910], "a_dual": [904, 909, 910], "set_materialize_grad": [904, 909, 910, 2049], "simplefunc": [904, 908, 909, 910], "induc": [904, 908, 909, 910, 1633, 1672, 2041, 2100], "backward_extend": 910, "forward_extend": 910, "outer_jacobian_strategi": 912, "disconnect": [912, 913, 914, 915, 916, 917], "cliff": [912, 914, 918], "_debug_only_display_vmap_fallback_warn": [912, 918], "pow_reduc": [912, 913, 916], "2265": 912, "8221": 912, "9456": [912, 946], "2550": 912, "viewbackward": [912, 914, 2052], "pow_adder_reduc": [912, 913, 916], "func_output": [913, 915, 916, 917], "1448": 913, "0239": 913, "6456": 913, "4988": 913, "4310": 913, "sumbackward0": [913, 916, 2049], "3030": 913, "vhp": 913, "batched_grad": 914, "exp_reduc": [914, 915, 917], "4917": 914, "4352": 914, "4369": 914, "3799": 914, "exp_add": 914, "8052": 914, "3963": 914, "3090": 915, "6742": 915, "9114": 915, "2106": 915, "sumbackward1": [915, 917], "squeezebackward1": 915, "adder": [915, 917], "2399": 915, "5005": 915, "0689": 916, "2431": 916, "0989": 916, "4456": 916, "8053": [916, 1858], "7817": 917, "2458": 917, "7830": 917, "7782": 917, "4458": 917, "3962": 917, "3042": [917, 1331], "6354": 917, "1288": [917, 1886, 1921], "0652": 917, "5483": 917, "5035": 917, "2046": [917, 993, 2049], "1292": 917, "1432": 917, "3059": 917, "3225": 917, "6652": 917, "7753": 917, "0152": 917, "4225": 917, "3340": 917, "only_input": 918, "allow_unus": [918, 2015], "is_grads_batch": 918, "materialize_grad": 918, "require_grad": [918, 2017, 2043], "inferencemod": [919, 2043, 2095], "bump": 919, "_version": [919, 2068], "doubler": [919, 1113, 1770], "is_train": [920, 2091], "gradgradcheck": [922, 923, 2049], "06": [923, 924, 1314, 1319, 1320, 1331, 1332, 1337, 1480, 1536, 1576, 1579, 1630, 1701, 1782, 1796, 1928, 2015, 2089], "raise_except": [923, 924, 2021], "nondet_tol": [923, 924], "check_undefined_grad": [923, 924], "check_grad_dtyp": [923, 924], "check_batched_grad": [923, 924], "check_batched_forward_grad": 923, "check_forward_ad": 923, "check_backward_ad": 923, "fast_mod": [923, 924, 2054], "differenc": [923, 2049], "perturb": [923, 924, 2054], "gen_non_contig_grad_output": 924, "check_fwd_over_rev": 924, "check_rev_over_rev": 924, "noncontigu": [924, 1051, 2089], "inaccuraci": 924, "clonebackward0": 926, "gi": [928, 929, 2057], "removablehandl": [928, 929, 1273, 1527, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 2070], "88446": 932, "profilerstep": 932, "optimizer1step": 932, "optimizer2step": 932, "optimizer1": [932, 2042], "current_step": 932, "erase_step_count": 932, "increment_step": 932, "_kineto_step": 932, "init_step_count": 932, "eventlist": [935, 936], "group_by_stack_n": [936, 2071], "roof": 936, "functioneventavg": [936, 938], "node_id": 939, "77": 939, "47": 939, "470u": 939, "73": 939, "465u": 939, "03": [939, 967, 1351, 1884, 1885, 1890], "121": 939, "891u": 939, "324u": 939, "421u": 939, "503u": 939, "234": [939, 2021], "344u": 939, "000u": 939, "profiler_util": [940, 941, 942, 943], "elapsed_u": 940, "mem_record": 942, "in_interv": 942, "start_u": 942, "end_u": 942, "shallow": [943, 1273, 1463, 1527, 1576, 1577, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "default_factori": [943, 2112], "__missing__": 943, "fromkei": [943, 1537], "keyerror": 943, "popitem": [943, 1537, 2068], "lifo": 943, "setdefault": [943, 1537, 2068], "window_length": [945, 954, 1231, 1232, 1293, 2015], "2n": [945, 1881, 2082], "_length": [945, 954, 1231, 1232, 1270, 1446, 1924], "sim": [946, 1541, 1677, 1820, 1840], "pseudorandom": [946, 1413, 1773, 1817, 1820, 1836, 1838, 1840, 1842, 1929], "1737": 946, "0950": [946, 1906], "3609": 946, "7148": 946, "0289": [946, 1953], "2676": 946, "8937": 946, "7202": 946, "2500": [947, 1127, 1129, 1130, 1140, 1145, 1344, 1580, 1834], "7500": [947, 1130, 1140, 1159, 1227, 1329, 1344, 1580, 1834, 1889], "AND": [948, 1192, 1355, 2017, 2043], "OR": [951, 1209, 1357, 2017, 2036], "xor": [953, 1358, 2017], "blackman": [954, 1891], "arrang": 955, "broadcast_tensor": [957, 2015, 2068], "out_int32": [960, 1863, 2015], "opposit": [960, 1130, 1132, 1571, 1928, 1949], "formal": [960, 1863, 2024, 2035, 2052, 2081], "eg": [960, 1856, 1863, 2112], "from_": [961, 2015], "tensor_a": [962, 975], "tensor_b": 962, "6580": 963, "0969": 963, "4614": 963, "1034": [963, 1109], "5790": 963, "1497": 963, "compute_mod": [964, 2108], "use_mm_for_euclid_dist_if_necessari": 964, "distanc": [964, 1270, 1309, 1310, 1337, 1486, 1536, 1576, 1577, 1674, 1702, 1803, 1924, 1928, 2013], "infti": [964, 1270, 1325, 1439, 1494, 1495, 1496, 1559, 1674, 1794, 2083], "use_mm_for_euclid_dist": 964, "donot_use_mm_for_euclid_dist": 964, "minkowski": [964, 1674], "ham": [964, 1231, 1674, 1887], "closest": [964, 1674], "xn": [964, 1674], "4821": [964, 967], "059": 964, "0590": 964, "1763": [964, 1846], "4713": [964, 1846], "6986": [964, 1846], "3702": [964, 1846], "1193": [964, 1363], "0959": 964, "7138": 964, "8322": 964, "2830": [964, 1955], "3791": 964, "6341": 965, "4208": 965, "0900": 965, "5826": 965, "clr": [966, 1803], "3375": 966, "9790": 966, "1119": 966, "6577": 966, "5609": [966, 1624], "5095": 966, "2614": 966, "4038": 966, "3378": [966, 1955], "4982": 966, "2457": [966, 1376], "2561": 966, "4684": 966, "7163": 966, "9647": 966, "8917": [966, 1354], "3213": [966, 1350], "2284": [966, 1089], "8615": 966, "2816": 966, "tu": 967, "mt": [967, 1303, 1310, 1312, 1316, 1317, 1318, 1323, 1332, 1827, 1928, 2015, 2024, 2068, 2086, 2088], "4112": 967, "7486": 967, "4551": 967, "3544": 967, "6724": 967, "5528": 967, "0592": [967, 2057], "9371": 967, "5487": 967, "7023": 967, "3842e": [967, 1319], "hermitian": [968, 969, 994, 1125, 1126, 1128, 1130, 1131, 1132, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1303, 1304, 1309, 1310, 1312, 1316, 1317, 1318, 1328, 1331, 1337, 2015], "9314": 968, "2251": [968, 1008, 1306, 1348], "0889": 968, "4439": 968, "2122": 968, "1412": 968, "6358e": 968, "lh": [969, 1319], "1625": 969, "6097": 969, "8398": 969, "2387": [969, 994], "3771": [969, 1320], "4173": 969, "1626": [969, 1008, 1306], "6881e": 969, "tensor_split": [970, 1107, 1237, 1978, 2015, 2068, 2086], "min_valu": [971, 1485], "max_valu": [971, 1485, 2015], "_valu": [971, 1431, 1723, 1913, 2049, 2068, 2082], "7120": 971, "1734": [971, 1248], "0478": [971, 1980], "0922": 971, "3333": [971, 1227, 1236, 1580, 1581, 1943], "horizont": [974, 1237, 1239, 2069, 2104], "hstack": [974, 2015, 2068, 2082], "with_replac": [975, 2015], "combinations_with_replac": 975, "fullgraph": [976, 2104, 2113], "cache_size_limit": [976, 2104, 2113], "list_backend": [976, 2094, 2099, 2106], "compiler_custom_backend": 976, "triton": [976, 2050, 2053, 2094, 2098, 2102, 2104, 2106, 2111, 2113], "_inductor": [976, 2104, 2113], "list_mode_opt": 976, "epilogue_fus": 976, "max_autotun": 976, "fallback_random": [976, 2104, 2113], "shape_pad": 976, "graph_diagram": 976, "pictur": 976, "list_opt": 976, "_glibcxx_use_cxx11_abi": [977, 2053], "black": [978, 2105], "throughout": [978, 1758, 2054, 2057, 2072, 2101], "footgun": [978, 2098, 2103], "bypass": [978, 1109, 2021, 2036, 2046, 2049, 2105, 2111], "bullet": [978, 2017], "rand_foo": 981, "compiler_cudagraph_tre": 981, "external_util": 983, "stricter": [984, 1413, 2105], "is_compil": [984, 2105], "exclude_tag": 985, "flip": [991, 1150, 1151, 1341, 2015, 2068, 2069, 2104, 2108], "writeabl": [991, 992], "is_conj": [991, 1851, 2015, 2068], "geq": [993, 1216, 1305, 1313, 1436, 1437, 1438, 1462, 1513, 1534, 1546, 1616, 1669, 1731, 2041, 2083], "signbit": [993, 2015, 2068, 2082, 2108], "2557": 993, "0026": 993, "5387": 993, "4740": 993, "9244": 993, "7079": 993, "2778": 993, "0249": [993, 1349], "5719": 993, "0059": 993, "2600": 993, "4475": 993, "9567": [993, 1309, 1927], "5757": 993, "1751": 993, "0742": 993, "2998": 993, "1054": 993, "2373": 993, "3190": [993, 2046], "1128": [993, 1331, 1497], "pearson": 994, "coeffici": [994, 1231, 1782, 1784, 1785, 1786, 1788, 1794, 1798, 1886, 1887, 1888, 1952], "r_": [994, 1945], "ij": [994, 1109, 1353, 1361, 1375, 1530, 1905], "c_": [994, 1454, 1455, 1456, 1457, 1458, 1459, 1497, 1539, 1540], "jj": 994, "cov": [994, 2015, 2068], "2678": [994, 1469], "0908": 994, "3766": 994, "2780": 994, "5812": 994, "1535": [994, 1469], "2350": 994, "3582": 994, "4309": 995, "2706": 995, "8562": 995, "9796": [995, 1319], "1395": 995, "2957": 995, "6553": 995, "5574": 995, "1632": 996, "1835": 996, "6979": 996, "7325": [996, 1099], "0133": 996, "7860": 996, "2536": 996, "2805": 996, "sleef": [996, 1895], "y_": [998, 1119, 1347, 1348, 1350, 1440, 1462, 1492, 1905, 1950, 2043, 2083], "_w": [998, 1474], "w_i": [998, 1313], "mu_x": 998, "mu_i": [998, 1788], "w_ia_i": 998, "w_ix_": 998, "mathbb": [998, 1227, 1303, 1305, 1309, 1310, 1311, 1312, 1313, 1314, 1319, 1320, 1323, 1325, 1332, 1334, 1336, 1337, 1462, 1534, 1731], "bessel": [998, 1293, 1890, 1922, 1923, 1972, 1973, 2083], "unbias": [998, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1567, 1922, 1923, 1972, 1973, 2015, 2108], "corrcoef": [998, 2015, 2068], "6667": [998, 1236, 1580, 1581, 1848, 1881, 2062], "fw": 998, "4282": 998, "0255": [998, 1096], "4144": [998, 2046], "4169": 998, "streamcontext": [1006, 1084, 1410, 2008, 2013], "abstractcontextmanag": 1006, "3956": [1008, 1306], "1455": [1008, 1306, 2063], "6895": [1008, 1306], "5849": [1008, 1306], "3599": [1008, 1306], "7180": [1008, 1306], "0521": [1008, 1306], "1339": [1008, 1306], "0225": [1008, 1306, 1319], "0257": [1008, 1306], "4725": [1008, 1306], "1479": [1008, 1306], "7005": [1008, 1306], "9757": [1008, 1306], "3904": [1008, 1306], "3726": [1008, 1306], "1836": [1008, 1306], "9688": [1008, 1306], "7153": [1008, 1306, 2083], "2159": [1008, 1306], "0844": [1008, 1306], "5281": [1008, 1306], "6120": [1008, 1306], "4490": [1008, 1306], "5687": [1008, 1306], "9792": [1008, 1090, 1306], "8304": [1008, 1306], "3037": [1008, 1306, 2057], "5650": [1008, 1306], "2329": [1008, 1306], "9883": [1008, 1306], "0551": [1008, 1306], "capture_begin": [1009, 2046], "capture_error_mod": [1009, 1043], "make_graphed_cal": [1009, 2046], "graph_pool_handl": [1009, 1043, 1054], "other_graph_inst": [1009, 1043, 1054], "cudastreamcapturemod": [1009, 1043], "thread_loc": [1009, 1043], "cudamalloc": [1009, 1043, 1065, 2046, 2115], "unsaf": [1009, 1043, 1345, 1734, 1737, 2015, 2046, 2070], "capture_end": [1009, 2046], "debug_dump": 1009, "debug_path": 1009, "enable_debug_mod": 1009, "path_to_so_fil": 1010, "alloc_fn_nam": 1010, "free_fn_nam": 1010, "enable_tim": [1011, 1386, 1982, 2046], "interprocess": 1011, "elapsed_tim": [1011, 1386, 1982, 2046], "end_ev": [1011, 1386, 1982, 2046], "elaps": [1011, 1386, 1982, 2030], "from_ipc_handl": 1011, "ipc": [1011, 1047], "ipc_handl": 1011, "cudaeventsynchron": 1011, "cudastreamwaitev": [1011, 1012, 1014], "stream_ptr": 1012, "cudastream_t": [1012, 2046], "record_ev": [1012, 1014, 1983], "cudastreamsynchron": [1012, 1014], "wait_ev": [1012, 1014, 1983], "interoper": 1016, "caching_allocator_delet": 1016, "mem_ptr": 1017, "caching_allocator_alloc": 1017, "peer_devic": 1018, "_cudaalloc": 1019, "clock": 1020, "sm": 1020, "hertz": 1020, "smi": [1020, 1033, 1061, 1067, 1072, 1086, 1087, 2046, 2051, 2055], "buffer_s": 1022, "10485760": 1022, "chunk_siz": [1025, 1172, 1178, 1977], "cublashandle_t": 1026, "unoccupi": [1033, 1385, 1990], "cudamallocasync": [1034, 1065, 2046], "_cudadeviceproperti": 1038, "gencod": 1039, "cuda_graph": 1043, "ordinari": [1045, 1717, 1968, 2042], "code_str": [1051, 1052], "temp": 1051, "typenam": [1051, 1052], "my_kernel": [1051, 1052], "jitted_fn": [1051, 1052], "create_jit_fn": [1051, 1052], "util_fn": 1051, "gelu": [1051, 1557, 1571, 1573, 1575, 1688, 2015, 2036, 2068, 2108], "my_gelu": 1051, "my_lib": [1051, 2021, 2063], "num_output": 1052, "sample_arg": 1054, "num_warmup_it": 1054, "allow_unused_input": 1054, "datadistributedparallel": 1054, "amp": [1054, 1968, 2013, 2042, 2107, 2110, 2113], "autocast": [1054, 1533, 2013, 2046, 2049, 2053, 2113], "insuffici": [1055, 2000, 2100, 2113], "manual_seed_al": [1055, 1968, 2000], "occupi": [1057, 1061, 1073, 1382, 1515, 1651, 2046, 2055, 2118], "reset_peak_memory_stat": [1057, 1059, 1073, 1074], "max_memory_reserv": [1058, 2046, 2055], "cudamemgetinfo": 1060, "memory_reserv": [1062, 2046, 2055], "snapshot": [1064, 2013, 2017, 2046, 2055], "large_pool": 1065, "small_pool": 1065, "allocated_byt": 1065, "reserved_byt": 1065, "active_byt": 1065, "inactive_split": 1065, "inactive_split_byt": 1065, "octob": 1065, "1mb": [1065, 2115], "num_alloc_retri": 1065, "num_oom": 1065, "num_sync_all_stream": 1065, "synchronize_and_free_ev": 1065, "num_device_alloc": 1065, "cumemmap": 1065, "num_device_fre": 1065, "cumemunmap": 1065, "cudafre": [1065, 2046, 2115], "assist": [1065, 2048, 2088], "max_split_s": 1065, "oversize_alloc": 1065, "oversize_seg": 1065, "requested_byt": 1065, "abbrevi": 1066, "percent": [1067, 1087, 2107], "instantan": [1068, 2071], "ascii": [1068, 1071, 1345, 2017, 2071], "sensor": [1072, 1086], "mw": 1072, "milliwatt": 1072, "fermi": 1072, "max_memory_alloc": [1073, 2046, 2055], "max_memory_cach": 1074, "memory_stat": [1075, 2046, 2055], "seed_al": [1076, 2002], "environment": [1078, 2107], "total_memori": [1079, 1393], "debug_mod": [1083, 1870], "centigrad": 1086, "x_3": [1088, 1089, 1090, 1091, 1341], "3449": 1088, "5447": 1088, "0685": 1088, "5104": [1088, 2046], "1706": 1088, "2259": 1088, "4696": 1088, "3284": 1088, "9946": 1088, "8209": 1088, "6628": 1089, "0975": 1089, "2680": [1089, 2045], "3298": [1089, 1096], "4220": 1089, "3885": 1089, "1762": 1089, "9165": 1089, "6684": [1089, 1250], "6001": 1090, "2069": 1090, "1919": 1090, "6727": [1090, 1103], "0062": 1090, "4126": 1090, "2129": 1090, "4206": 1090, "1968": [1090, 2083], "1241": 1090, "0238": 1090, "0233": [1090, 1835], "0157": 1090, "0158": [1090, 1928], "0065": 1090, "0014": [1090, 2083], "0006": 1090, "46": [1091, 1231, 1886], "49": [1091, 1152, 1329, 2045], "74": 1091, "83": 1091, "trapezoid": [1092, 1951, 2015, 2068], "360": 1093, "2832": 1093, "diagflat": [1096, 2015, 2068], "5950": 1096, "0872": 1096, "4264": 1096, "1064": [1096, 2057], "8795": 1096, "2429": 1096, "1374": 1096, "1029": 1096, "6482": 1096, "6300": 1096, "5410": 1097, "2934": 1097, "1788": [1097, 2083], "5684": 1097, "0845": [1097, 1895, 2057], "3986": 1097, "2956": [1098, 1306], "9068": 1098, "1695": 1098, "2094": [1098, 2046], "3018": 1098, "1516": 1098, "9342": 1098, "0854": 1099, "1431": 1099, "1752": 1099, "8536": 1099, "0905": 1099, "0360": [1099, 1412], "6927": 1099, "3735": 1099, "4945": 1099, "2631": [1099, 1396, 2046], "3755": 1099, "5977": [1099, 2049], "8172": 1099, "1065": [1099, 2057], "0401": 1099, "2235": [1099, 1927], "7938": 1099, "3081": 1099, "6166": 1099, "2335": 1099, "0500": [1099, 2062], "7336": 1099, "3836": 1099, "1015": 1099, "5393": 1103, "8675": 1103, "5916": 1103, "6321": 1103, "0967": 1103, "0511": 1103, "6295": 1103, "8360": 1103, "6973": 1103, "6537": 1103, "dividend": [1104, 1154, 1157, 1847, 1957], "true_divid": [1104, 2015, 2068], "3810": [1104, 1235], "2774": 1104, "2972": 1104, "3719": 1104, "4637": [1104, 2049], "7620": 1104, "5548": 1104, "5944": 1104, "7438": 1104, "9274": 1104, "3711": 1104, "9353": 1104, "4605": 1104, "2917": 1104, "1815": [1104, 1354], "0111": [1104, 1884], "9805": 1104, "5923": 1104, "1062": 1104, "4581": [1104, 1313], "7759": 1104, "2344": 1104, "1830": 1104, "0313": 1104, "1908": 1104, "4757": 1104, "8032": 1104, "2930": 1104, "8113": 1104, "2308": 1104, "4620": [1104, 1980], "6051": 1104, "5676": 1104, "2639": 1104, "2260": 1104, "4509": [1104, 1327], "2086": 1104, "1322": 1104, "9764": 1104, "9564": 1104, "3484": 1104, "2278": 1104, "1068": [1104, 1248], "4678": 1104, "3938": [1104, 1938], "depthwis": [1107, 1108, 1454, 1455, 1456], "atleast_3d": [1108, 2015, 2068], "notat": [1109, 1492, 1875, 2018, 2057, 2088], "einstein": 1109, "summat": [1109, 1270, 1353, 1361, 2082], "subscript": [1109, 2018, 2059], "jk": [1109, 2107], "ik": [1109, 1346, 1905], "za": 1109, "alphabet": [1109, 1617, 2077], "arrow": [1109, 2079], "ki": 1109, "ellipsi": [1109, 2017, 2018, 2035], "fourth": 1109, "whitespac": [1109, 2018], "opt_einsum": [1109, 2013], "_the_": 1109, "disclaim": 1109, "sublist": [1109, 2113], "52": 1109, "op1": [1109, 2017], "sublist1": 1109, "op2": [1109, 2017], "sublist2": 1109, "subslist_out": 1109, "7952": 1109, "2433": 1109, "4545": 1109, "1156": 1109, "2897": [1109, 2057], "3918": 1109, "4963": 1109, "3744": 1109, "9381": 1109, "2685": 1109, "6070": 1109, "7208": 1109, "8058": 1109, "4419": 1109, "0936": 1109, "1713": 1109, "4291": 1109, "5802": 1109, "7350": [1109, 2083], "5704": 1109, "4290": 1109, "9323": 1109, "4480": 1109, "bij": 1109, "bjk": 1109, "bik": 1109, "0564": 1109, "5904": 1109, "2023": 1109, "1271": 1109, "6706": [1109, 1828], "8097": 1109, "8025": 1109, "1183": 1109, "2239": [1109, 1333], "3107": 1109, "5756": 1109, "2354": 1109, "4558": 1109, "3460": 1109, "5087": 1109, "8530": [1109, 1451, 1583], "8153": 1109, "8787": 1109, "3839": [1109, 1976], "2112": [1109, 1953], "3728": 1109, "1131": [1109, 1825], "0921": 1109, "8305": 1109, "ji": 1109, "anm": 1109, "bm": 1109, "ba": 1109, "3430": [1109, 1354], "2405": 1109, "4494": 1109, "3311": 1109, "5201": 1109, "0356": 1109, "4064e": 1110, "8000e": 1110, "3493e": 1110, "5751e": 1110, "1428e": 1110, "5955e": 1110, "9683e": 1112, "1239e": 1112, "0705e": 1112, "orig_func": [1113, 1770], "set_grad_en": [1113, 2015, 2068, 2091], "tripler": [1113, 1770], "elsewher": [1114, 1122, 1216, 1230, 1263, 1265, 1266, 1269, 1298, 1362, 1424, 1906, 2012], "_max": [1123, 1124, 2072], "_min": [1123, 1124, 2072], "nearbi": [1123, 1124], "_int": [1123, 1124], "_point": [1123, 1124], "2525": 1123, "0466": 1123, "3491": [1123, 1327], "2168": [1123, 1945], "5906": [1123, 2083], "6258": 1123, "6444": 1123, "0542": 1123, "0475": [1123, 2083], "0486": 1123, "3405": 1123, "6134": [1123, 1376], "6323": 1123, "0552": 1124, "9730": 1124, "3973": 1124, "0780": 1124, "4000": [1124, 1127, 1136, 1145, 1580, 1831, 1881], "fourier": [1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1270, 1891, 1924, 2013], "rfft": [1125, 1130, 1140, 1144, 1145, 1146], "chalf": [1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1140, 1141, 1142, 2015, 2068, 2088], "sm53": [1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146], "ortho": [1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1346], "orthonorm": [1125, 1126, 1128, 1130, 1131, 1132, 1133, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1146, 1337, 1731, 1928], "ifft": [1125, 1129, 1130, 1134, 1135, 1137, 1138, 1139, 1140, 1141, 1142], "fftn": [1126, 1129, 1135, 1138, 1146], "rfft2": [1126, 1141], "ifft2": [1126, 1138], "two_fft": [1126, 1128, 1138, 1144, 1146], "check_strid": [1126, 1128, 1129, 1134, 1135, 1140, 1141, 1142, 1144, 1146, 2089], "nyquist": [1127, 1129, 1137, 1139, 1143, 1144, 1145, 1146], "i_1": [1128, 1146, 1294], "i_n": [1128, 1146, 1178, 1294, 1945, 1977], "rfftn": [1128, 1132, 1138, 1142, 1144], "ifftn": [1128, 1134, 1139], "reorder": [1129, 1329, 2052], "rearrang": [1129, 1136, 1447, 1539, 1540, 1675, 1676, 2035], "fftfreq": [1129, 1136, 1145], "9000": [1129, 1943], "8000": [1129, 1418, 1580, 1828, 1881], "uncent": 1129, "ifftshift": 1129, "x_center": 1129, "x_uncent": 1129, "fft_uncent": 1129, "fft_center": 1129, "x_centered_2": 1129, "ihfft": [1130, 1138, 1139], "irfft": [1130, 1142, 1143], "symmetri": [1130, 1132, 1924], "transformed_dim_s": [1130, 1140], "0000j": [1130, 1137, 1140, 1303, 1304, 1309, 1310, 1312, 1821, 1878], "1250": [1130, 1396], "1720j": 1130, "0406j": 1130, "2809": 1130, "6250": [1130, 1140, 1159, 1580], "9691": 1130, "hfftn": [1131, 1139], "last_dim_s": [1131, 1132, 1141, 1142, 2108], "ihfft2": 1131, "roundtrip": [1131, 1132, 1140, 1141, 1142], "ihfftn": [1132, 1138], "irfftn": [1132, 1141, 1146], "fft2": [1134, 1144], "two_ifft": [1134, 1135, 1139], "fftshift": 1136, "hfft": 1137, "6882j": 1137, "1625j": 1137, "hfft2": 1138, "8602j": 1140, "2031j": 1140, "1562": 1140, "3511": 1140, "7812": 1140, "2114": 1140, "irfft2": 1144, "wider": [1152, 2014, 2017, 2069, 2082], "2500e": 1152, "1000e": 1152, "7656e": 1152, "lfloor": [1153, 1158, 1431, 1436, 1437, 1438, 1454, 1455, 1456, 1473, 1494, 1495, 1496, 1520, 1521, 1522, 1579, 1580, 1581, 1582, 1602, 1672, 1843, 1924], "rfloor": [1153, 1158, 1431, 1436, 1437, 1438, 1454, 1455, 1456, 1473, 1494, 1495, 1496, 1520, 1521, 1522, 1579, 1580, 1581, 1582, 1602, 1672, 1843, 1924], "8166": 1153, "5308": 1153, "2530": 1153, "2091": 1153, "7000": [1155, 1470, 1880], "3000": [1156, 1469, 1880, 2046], "entrywis": [1157, 1847], "modulu": [1157, 1333, 1847], "operatornam": [1158, 1309, 1310, 1311, 1312, 1329, 1337, 1439, 1440, 1446, 1486, 1487, 1493, 1518, 1559, 1577, 1817, 1840, 1880, 1929], "8750": [1159, 1580], "sizeof": [1161, 1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 2084], "map_shar": [1161, 2062, 2084], "map_priv": [1161, 2062, 2084], "mmap": [1161, 1345, 2033, 2062, 2084], "tofil": 1161, "t_map": 1161, "char": [1163, 1778, 2034, 2084, 2104], "parameter_and_buffer_dict": 1166, "tie_weight": [1166, 1767], "submodule_nam": [1166, 1767], "parameter_nam": [1166, 1767], "ty": [1166, 1767, 2117], "foo_ti": [1166, 1767], "new_a": [1166, 1767], "grad_weight": [1166, 2049], "detached_param": 1166, "parameters_and_buffer_dict": 1166, "intermediate_upd": 1167, "mutations_and_view": 1167, "proxy_tensor": [1167, 2021, 2103, 2113], "make_fx": [1167, 2021, 2100, 2113], "inpt": 1167, "f_trace": 1167, "f_no_mutations_trac": 1167, "f_no_mutations_and_views_trac": 1167, "a_1": [1167, 1294], "view_1": 1167, "view_copi": [1167, 2015, 2068, 2091], "view_copy_1": 1167, "as_strid": [1167, 2015, 2068, 2086, 2108], "native_funct": [1167, 2049, 2108], "yaml": [1167, 2049, 2108], "aux": [1168, 1169, 1171, 1172, 1173, 1177], "my_loss_func": 1168, "y_pred": [1168, 2046], "loss_per_sampl": 1168, "y_true": 1168, "autodiff": [1171, 1172, 1173, 1332], "jacobian_f": [1171, 1172], "f_x": [1171, 1172], "jacboian": [1171, 1172], "expectedx": [1171, 1172], "expectedi": [1171, 1172], "_preallocate_and_copi": 1172, "jvp_out": 1173, "wish": [1173, 1919, 2021, 2042, 2046, 2049, 2050], "jvp_fn": 1174, "optimiz": [1176, 2014], "l1": [1176, 1486, 1487, 1559, 1642, 1689, 1742, 1751, 2057, 2062], "l2": [1176, 1487, 1518, 1559, 1781, 1782, 1783, 1784, 1786, 1788, 1794, 1795, 1797, 2042], "vjpfunc": 1177, "unsuccessfulli": [1178, 1977], "rummag": [1178, 1977], "batched_dot": [1178, 1977], "jacobian_row": [1178, 1977], "get_vjp": [1178, 1977], "n1": [1178, 1849, 1977], "n0": [1178, 1977], "batched_pow": [1178, 1977], "autobatch": [1178, 1977], "symbolic_shap": [1179, 1180, 1181, 1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193, 1194, 2013, 2100], "symbol_to_sourc": 1181, "var_to_v": 1181, "marked_dynam": 1181, "source_name_to_debug_nam": 1181, "_allow_complex_guards_as_runtime_assert": [1181, 1189], "solver": [1181, 2013, 2060], "expr": [1181, 1188, 1195, 1202], "tautologi": 1181, "add_equ": 1181, "forced_speci": 1181, "prettify_result": 1181, "original_signatur": 1181, "constraint_violation_error": 1181, "violat": [1181, 1188, 2046, 2050], "erro": 1181, "remove_redundant_dynamic_result": 1181, "rewrite_with_congru": 1181, "congruenc": 1181, "ration": 1181, "inequ": [1181, 1195], "_disable_forced_speci": [1181, 1188], "duck": [1182, 1188, 2049, 2070], "nb": [1182, 1192, 1195, 1197, 1199, 1287], "simplic": [1182, 1737, 2077, 2078], "varieti": [1182, 2046, 2078], "assume_static_by_default": [1182, 1189], "mark_dynamic_dim": 1182, "warn_onli": [1184, 1187, 1192, 1965], "source_pair": 1184, "derived_equ": 1184, "phantom_symbol": 1184, "forest": 1184, "transit": [1184, 1273, 1375, 1527, 1924, 2013, 2014, 2094], "phantom": 1184, "inner_nam": 1185, "unback": [1186, 1188, 1197, 1198, 1200, 1201, 1208, 1209, 1210, 2103], "unspeci": 1187, "unspec": 1187, "brittl": 1187, "strictminmaxconstraint": 1187, "should_record_ev": 1188, "tracked_fak": 1188, "add_var_to_v": 1188, "bind_symbol": 1188, "littl": [1188, 2049, 2062, 2079], "evaluate_guard": 1188, "cleanest": 1188, "shenanigan": 1188, "bound_sympi": 1188, "size_oblivi": 1188, "check_equ": 1188, "create_symbol": 1188, "dimdynam": [1188, 1191], "constraint_dim": 1188, "do_not_specialize_zero_on": 1188, "symbolic_context": [1188, 1190, 1191], "create_symbolic_sizes_strides_storage_offset": [1188, 1190, 1191, 1194], "create_symboolnod": 1188, "create_symfloatnod": 1188, "create_symintnod": 1188, "create_unbacked_symbool": 1188, "create_unbacked_symfloat": 1188, "create_unbacked_symint": 1188, "create_unspecified_symbol": 1188, "specialz": 1188, "create_unspecified_symint_and_symbol": 1188, "defer_runtime_assert": 1188, "orig_expr": 1188, "fx_node": 1188, "evaluate_expr": [1188, 2101], "expect_r": 1188, "forcing_spec": 1188, "evaluate_guards_express": 1188, "produce_guards_express": 1188, "evaluate_guards_for_arg": 1188, "ignore_stat": 1188, "format_guard": 1188, "freeze_runtime_assert": 1188, "discharg": [1188, 1198], "get_axiom": 1188, "get_impl": 1188, "compute_hint": 1188, "booleanatom": 1188, "get_nontrivial_guard": 1188, "get_pruned_guard": 1188, "prune": [1188, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 2037, 2082], "ignore_fresh_unbacked_symbol": 1188, "is_unbacked_symint": 1188, "produce_guard": 1188, "source_ref": 1188, "input_context": 1188, "equalities_input": 1188, "_simplifi": 1188, "localsourc": [1188, 2101], "boilerpl": [1188, 2057, 2070], "nice": [1188, 1454, 1455, 1456, 1457, 1458, 1459, 1473, 1520, 1521, 1522, 1579, 2035, 2043, 2046, 2070, 2101], "set_unbacked_var_to_v": 1188, "propagate_real_tensor": [1188, 1197], "resort": [1188, 2046, 2054, 2070], "size_hint": [1188, 2106], "allow_non": 1188, "suppress_guard": 1188, "allow_scalar_output": 1189, "allow_dynamic_output_shape_op": 1189, "specialize_zero_on": 1189, "duck_shap": 1189, "prefer_deferred_runtime_asserts_over_guard": 1189, "dynamic_s": [1190, 1191, 1193], "constraint_s": [1190, 1191, 1193], "view_base_context": [1190, 1191, 1193], "tensor_sourc": [1190, 1193], "shape_env_to_source_to_symbol_cach": [1190, 1193], "statelesssymboliccontext": 1190, "owner": [1190, 2012, 2049, 2068, 2077, 2078], "lifecycl": [1190, 2103], "shape_env": [1190, 1197, 1210, 1211], "dimconstraint": 1191, "relaxedunspecconstraint": 1192, "unsoundli": [1192, 1198], "inner_context": 1193, "canonic": 1195, "rh": [1195, 1319, 1364], "Ors": 1195, "cnf": 1195, "subexpr": 1195, "25924": 1195, "retrac": [1196, 1210, 1281, 2101], "example_valu": 1197, "old_example_valu": 1197, "peek": [1197, 2021], "freshli": 1197, "unbacked_var_to_v": 1197, "lie": [1198, 1561, 1563, 1691, 1907, 2083, 2087, 2103], "intersect": 1198, "fairli": [1198, 2059, 2100, 2104, 2106], "perfectli": [1198, 1871, 2046], "definitely_tru": 1200, "parallel_or": 1201, "parallel_and": 1201, "circuit": 1201, "oblivi": 1202, "118579": 1202, "free_symbol": 1203, "maxsiz": 1207, "sym_and": 1213, "sparse_grad": [1214, 2015, 2108], "tau": [1217, 1313, 1635, 1783, 1797, 1814, 1815, 1884, 2015], "elementari": [1217, 2043, 2054], "reflector": [1217, 1731, 1815], "household": [1217, 1313, 1731, 1815], "householder_product": [1217, 1731, 1814], "gel": [1217, 1319], "set_deterministic_debug_mod": [1221, 1965], "mtia": [1222, 1398, 1399, 1400, 1404, 2013, 2071], "fork_rng": [1226, 2013, 2076], "edge_ord": [1227, 2015], "rightarrow": 1227, "closer": [1227, 1577, 1828, 2049, 2057, 2072, 2106, 2107], "interior": 1227, "theorem": 1227, "h_l": 1227, "h_r": 1227, "neighbor": [1227, 1270, 1580, 1582, 1924], "xi_1": 1227, "xi_2": 1227, "approx": [1227, 1817, 1929, 2054], "80": [1227, 1329, 1807, 1875, 2046, 2069], "halv": 1227, "coord": 1227, "54": [1231, 1488, 1887], "hann_window": [1231, 1924, 2015, 2019, 2068], "hann": [1232, 1887], "hist": [1235, 1236, 2015], "bin_edg": [1235, 1236, 2015], "9524": 1235, "leftmost": [1236, 2036], "leg": 1240, "triangl": [1240, 2087], "hypotenus": 1240, "4031": 1240, "gammainc": [1242, 2083], "gammaincc": [1243, 2083], "index_reduce_": [1247, 2015], "1427": 1248, "0231": 1248, "5414": 1248, "0009": 1248, "4664": [1248, 1945], "2647": 1248, "1228": 1248, "6571": 1248, "7230": 1248, "6004": 1248, "multidimension": [1250, 1343, 1489], "8173": 1250, "0874": 1250, "1784": 1250, "3279": 1250, "7894": 1250, "4682": 1250, "7159": 1250, "1506": 1250, "4034": 1250, "3657": [1250, 2062], "0387": 1250, "9892": 1250, "1774": 1250, "3261": 1250, "3917": 1250, "4537": [1250, 1707], "7493": 1250, "1724": 1250, "2291": 1250, "5749": 1250, "2267": 1250, "7920": 1250, "3607": 1250, "3701": 1250, "3666": 1250, "5850": [1250, 1304], "7242": 1250, "9837": 1250, "1560": 1250, "2907": 1250, "6785": 1250, "5671": [1250, 1305], "5452": 1250, "6912": 1250, "5509": 1250, "1782": 1250, "9843": 1250, "7366": 1250, "5672": [1250, 1773], "5115": 1250, "4864": 1250, "2476": 1250, "4337": 1250, "6347": 1250, "1748": 1250, "3567": [1250, 1304], "6558": 1250, "2469": [1250, 2057], "5787": [1250, 1354], "typecheck": [1260, 2068], "warn_alwai": 1261, "set_warn_alwai": 1261, "nonfinit": 1262, "test_el": [1264, 2015], "assume_uniqu": [1264, 2015], "0j": [1269, 1974], "nola": 1270, "envelop": 1270, "hop": [1270, 1924], "shorter": [1270, 2067, 2077], "griffin": 1270, "ieee": [1270, 1480, 1891, 2060], "tran": 1270, "assp": 1270, "vol": [1270, 1480, 1891], "236": 1270, "apr": 1270, "1984": 1270, "slide": [1270, 1436, 1437, 1438, 1473, 1520, 1521, 1522, 1579, 1627, 1658, 1659, 1660, 1703, 1832, 1833, 1924], "fft_size": 1270, "scriptmodul": [1271, 1272, 1277, 1281, 1283, 1284, 1285, 1289, 1290, 1780, 2014, 2016, 2027, 2067], "attributemodul": 1271, "names_ag": 1271, "get_debug_st": 1272, "graphexecutorst": 1272, "_extra_fil": [1272, 1273, 1281, 1284, 2056], "save_to_buff": 1272, "add_modul": [1273, 1527], "init_weight": [1273, 1527, 2057], "buf": [1273, 1527], "20l": [1273, 1527], "1l": [1273, 1527], "5l": [1273, 1527], "pretti": [1273, 1779, 1875, 2014, 2099, 2103], "syntax": [1273, 2014, 2017, 2070, 2105], "code_with_const": 1273, "constmap": 1273, "get_buff": [1273, 1527], "attributeerror": [1273, 1527, 2049, 2072], "get_extra_st": [1273, 1527], "set_extra_st": [1273, 1527], "get_paramet": [1273, 1527], "net_b": [1273, 1527], "net_c": [1273, 1527], "inlined_graph": 1273, "ipu": [1273, 1527, 2021, 2062], "remove_dupl": [1273, 1527], "named_children": [1273, 1527, 2057], "conv4": [1273, 1527], "conv5": [1273, 1527], "memo": [1273, 1527, 2103], "register_backward_hook": [1273, 1527], "register_full_backward_hook": [1273, 1527, 1712, 2057], "register_forward_hook": [1273, 1527, 1710, 2057], "with_kwarg": [1273, 1527], "always_cal": [1273, 1527, 1710], "fire": [1273, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798, 2048, 2056, 2109], "register_module_forward_hook": [1273, 1527, 2057], "register_forward_pre_hook": [1273, 1463, 1527, 1711, 2057], "forward_pr": [1273, 1527], "register_module_forward_pre_hook": [1273, 1527, 2057], "register_module_full_backward_hook": [1273, 1527, 1708, 2043, 2057], "register_full_backward_pre_hook": [1273, 1527, 1713, 2057], "register_module_full_backward_pre_hook": [1273, 1527, 2057], "register_load_state_dict_post_hook": [1273, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "incompatible_kei": [1273, 1527], "register_modul": [1273, 1527, 1714], "register_paramet": [1273, 1527, 1715, 2049, 2057], "register_state_dict_pre_hook": [1273, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "keep_var": [1273, 1527], "gan": [1273, 1527, 1732, 1766], "share_memori": [1273, 1527, 2059], "share_memory_": [1273, 1527, 2033, 2084], "4d": [1273, 1442, 1473, 1490, 1527, 1580, 1627, 1644, 1672, 1704, 1724, 1725, 1831], "1913": [1273, 1527], "3420": [1273, 1527], "5113": [1273, 1527, 1945], "2325": [1273, 1306, 1527], "gpu1": [1273, 1527], "1914": [1273, 1527], "5112": [1273, 1527, 2046], "3741": [1273, 1527], "2382": [1273, 1412, 1527], "5593": [1273, 1527], "4443": [1273, 1527], "6122": [1273, 1527], "1150": [1273, 1527], "dst_type": [1273, 1527], "set_to_non": [1273, 1527, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1793, 1794, 1795, 1796, 1797, 1798, 2046], "the_typ": 1274, "the_valu": 1274, "script_bar": 1276, "addmod": 1276, "preserved_attr": 1277, "optimize_numer": 1277, "optimize_for_infer": [1277, 2099], "run_frozen_optim": 1277, "scripted_modul": [1277, 1285, 2062], "frozen_modul": 1277, "modified_tensor": 1277, "mymodule2": 1277, "dump_alias_db": 1277, "training_method": 1278, "testcod": [1279, 1280, 1281, 1284, 2016], "interfacetyp": 1279, "impl1": 1279, "impl2": 1279, "user_fn": 1279, "user_fn_jit": 1279, "target_typ": 1280, "key1": 1280, "val1": 1280, "key2": 1280, "val2": 1280, "_restore_shap": 1281, "scriptfunct": [1281, 1285, 1286, 1289, 2067], "readlin": [1281, 1345, 2070], "other_method": 1283, "lesser": [1283, 2043, 2045], "extent": [1283, 2045, 2082], "frozen_mod": 1283, "_frames_up": 1285, "_rcb": 1285, "scriptdict": 1285, "scriptlist": 1285, "test_sum": 1285, "scripted_fn": [1285, 2014], "conv2": [1285, 1527, 1556, 2014, 2065, 2072], "some_entry_point": 1285, "python_only_fn": 1285, "testnnmodul": 1285, "pdt_model": 1285, "scripted_model": [1285, 2070], "un": [1287, 1446, 2072], "unfus": 1287, "check_trac": [1289, 1290], "check_input": [1289, 1290, 2014], "check_toler": [1289, 1290], "_force_outplac": [1289, 1290], "_module_class": [1289, 1290], "_compilation_unit": [1289, 1290], "compilationunit": [1289, 1290], "example_kwarg_input": 1289, "_store_input": [1289, 1290], "trace_modul": [1289, 2014, 2017], "untrack": 1289, "checker": [1289, 1290, 2017, 2067], "traced_foo": [1289, 2014], "example_weight": [1289, 1290], "example_forward_input": [1289, 1290], "example_inputs_is_kwarg": 1290, "method2": 1290, "example_method2_input": 1290, "weighted_kernel_sum": 1290, "use_memory_effici": 1291, "memory_effici": 1291, "scriptabl": 1291, "kaiser": [1293, 1571, 1573, 1575], "i_0": [1293, 1294, 1890, 1945, 2083], "zeroth": [1293, 1890, 2083], "out_i": 1293, "kroneck": 1294, "a_0": 1294, "a_n": 1294, "b_0": 1294, "b_1": 1294, "b_n": 1294, "k_0": [1294, 1945], "k_1": 1294, "k_n": 1294, "j_0": 1294, "j_1": 1294, "j_n": 1294, "k_t": 1294, "i_t": [1294, 1497, 1796], "b_t": 1294, "j_t": 1294, "bmatrix": 1294, "a_": [1294, 1365, 1515, 1945], "cdot": [1294, 1319, 1331, 1439, 1440, 1461, 1462, 1484, 1492, 1530, 1531, 1532, 1533, 1534, 1571, 1615, 1638, 1821, 1924, 2083, 2089], "vdot": [1294, 1341, 1342, 2015, 2068], "ddot": [1294, 1341], "kth": 1295, "full_lik": [1299, 2015, 2019, 2021, 2068], "logarithm": [1302, 1307, 1333, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1360, 1446, 1617, 1652, 1904, 2083], "gamma": [1302, 1441, 1442, 1443, 1481, 1489, 1490, 1491, 1499, 1542, 1567, 1716, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1797, 1799, 1803, 1804, 1807, 1812, 1813, 1831, 2013, 2015, 2067, 2068, 2069, 2083], "5724": [1302, 2083], "1208": [1302, 2083], "mathrlap": [1303, 1309, 1310, 1311, 1312, 1320, 1323, 1332, 1334, 1336, 1337, 1731], "qquad": [1303, 1305, 1309, 1310, 1311, 1312, 1313, 1320, 1323, 1332, 1334, 1336, 1337, 1731], "eigenvalu": [1303, 1309, 1310, 1311, 1312, 1325, 1328, 1331, 1336, 1337, 1346, 1817, 2015], "resp": [1303, 1310, 1312, 1336, 1337, 1354], "5266": 1303, "9586": 1303, "0626j": 1303, "4160": 1303, "5895": 1303, "2322": 1303, "2976j": 1303, "4928": [1303, 1945], "4692e": 1303, "8747e": 1303, "check_error": [1304, 1315, 1317, 1322, 1335, 2015], "performantli": 1304, "3792": 1304, "9831j": 1304, "8757": 1304, "5425": 1304, "6374j": 1304, "kappa": 1305, "_p": [1305, 1536], "frobeniu": [1305, 1319, 1326, 1330, 1772], "nuc": [1305, 1326, 1330, 1343, 1743, 1752, 1772], "nuclear": [1305, 1326, 1330, 1772], "sigma_1": [1305, 1319, 1328, 1331], "sigma_n": 1305, "kappa_2": 1305, "kappa_": 1305, "4142": [1305, 1330, 1772, 1821], "1623": [1305, 1326], "2426": [1305, 1330, 1772], "7071": [1305, 1883], "5917": 1305, "9941": 1306, "5132": 1306, "5681": 1306, "4653": 1306, "4507": 1306, "4119": 1306, "6163": 1306, "1073": 1306, "3957": 1306, "9666": [1306, 1624], "0840": 1306, "3357": 1306, "2139": 1306, "slogdet": [1307, 1354, 2015, 2068], "0934": 1307, "1990": [1307, 1354], "4099": [1307, 1354], "7386": [1307, 1354], "diagonaliz": [1309, 1311], "eigenvector": [1309, 1310, 1346, 2015], "neq": [1309, 1310, 1313, 1337, 1424, 1530, 1532, 1782, 1783, 1784, 1786, 1788, 1794, 1795, 1797, 1801, 1928], "phi": [1309, 1310, 1337, 1476, 1631, 1928], "shall": [1309, 1310, 1337, 1734, 1738], "lambda_i": [1309, 1310, 1325], "lambda_j": [1309, 1310], "eigval": [1309, 2015], "9828": [1309, 1953, 2057], "3889j": 1309, "4617": 1309, "3010j": 1309, "1662": 1309, "7435j": 1309, "6139": 1309, "0562j": 1309, "1226": [1309, 1311], "5738j": [1309, 1311], "7537": [1309, 1311], "1286j": [1309, 1311], "9218": 1309, "1882": 1309, "2220j": 1309, "0270": 1309, "3867j": 1309, "7119e": 1309, "2841e": 1309, "uplo": [1310, 1312, 2015], "unitari": [1310, 1313, 1332, 1337, 1731, 1815], "ill": [1310, 1598, 2060], "eigvalsh": [1310, 1328], "9228": [1310, 1312], "2029": [1310, 1312], "0862j": [1310, 1312], "3464": [1310, 1312], "3277": [1310, 1312], "9415": [1310, 1312], "0846": 1310, "9964": 1310, "9170": 1310, "3898j": 1310, "0331j": 1310, "1062e": 1310, "5423e": 1310, "polynomi": [1311, 1312, 1810, 2104], "_n": [1311, 1312, 1314, 1731, 2049], "4576e": [1311, 1338], "5797": 1312, "4629": 1312, "1605": 1312, "3780": 1312, "1113": [1312, 2057], "7381": 1312, "h_1h_2": 1313, "h_k": 1313, "h_i": [1313, 1562], "_m": [1313, 1731], "tau_i": 1313, "8034": 1313, "4184j": 1313, "2588": 1313, "0174j": 1313, "6853": 1313, "7953j": 1313, "0790": 1313, "5620j": 1313, "6989j": 1313, "5360": 1313, "1193j": 1313, "3877": 1313, "6691j": 1313, "3512": 1313, "3024j": 1313, "4766": 1313, "5783j": 1313, "0361": [1313, 2057], "6587j": 1313, "6396": [1313, 2057], "1612j": 1313, "3693": 1313, "4481j": 1313, "aa": 1314, "pinv": [1314, 1319, 1819], "moor": [1314, 1331], "penros": [1314, 1331], "ainv": [1314, 1315, 1335, 1339], "1921e": 1314, "9073e": [1314, 1579], "5107e": 1314, "ldl": [1316, 1318], "indefinit": [1316, 2026], "ld": [1316, 1317, 1318, 2015], "sytrf": [1316, 1317], "ldl_solv": 1316, "ldl_factor_ex": [1316, 1318], "2079": [1316, 1317, 2083], "2414": [1316, 1317], "9428": [1316, 1317], "4554": [1316, 1317], "3264": [1316, 1317], "3823": [1316, 1317], "5884": [1316, 1317], "9595": [1316, 1317, 1883], "2695": [1316, 1317], "8513": [1316, 1317], "1633": [1316, 1317], "ldl_factor": 1317, "rcond": [1319, 1331, 1819, 2015], "_f": 1319, "gelsi": 1319, "gelsd": 1319, "gelss": 1319, "tridiagon": 1319, "sigma_i": [1319, 1337, 1928], "residu": [1319, 1346, 2015, 2057], "singular_valu": [1319, 2015], "0838": [1319, 2057], "2275": [1319, 1396], "3844": 1319, "5499": 1319, "1175": 1319, "9102": 1319, "0870": 1319, "6772": 1319, "7758": 1319, "5109": 1319, "4382": 1319, "3769": 1319, "1818": 1319, "3450": 1319, "0806": [1319, 2057], "3967": 1319, "3994": 1319, "1521": 1319, "1473": 1319, "9194": 1319, "0458": 1319, "6705": [1319, 1378], "1802": 1319, "4086": 1319, "5152e": 1319, "zero_": [1319, 1624, 2015, 2034, 2035, 2057, 2082], "5007": 1320, "9755": 1320, "0489": 1320, "9644": [1320, 1373], "9605e": 1320, "0376e": 1320, "lu_factor_ex": [1321, 1363], "lu_unpack": [1321, 1363, 2015, 2068], "b1": 1321, "b2": [1321, 2046, 2055], "getrf": [1322, 1335], "adjoint": [1323, 2015, 2068, 2086, 2088], "_exp": 1325, "7183": [1325, 2057], "3891": 1325, "8660": 1325, "ord": [1326, 1330, 1343, 1772, 2015, 2017, 2068], "la": [1326, 1330, 1343, 2057], "2829": 1326, "2627": 1326, "0756": 1327, "4980": 1327, "6617": 1327, "4994": 1327, "9980": 1327, "2731": 1327, "8001": 1327, "2640": 1327, "4571": 1327, "5511": 1327, "0163": [1327, 1373], "5292": 1327, "4899": 1327, "0822": 1327, "2773": [1327, 2036], "varepsilon": [1328, 1331, 1576], "tol": [1328, 1346, 2015], "fewest": 1329, "bc": [1329, 2025, 2069], "75000": 1329, "26": [1329, 1802, 2101, 2102, 2113], "148": 1329, "vector_norm": [1330, 1772], "matrix_norm": [1330, 1343, 1732, 1772], "7460": [1330, 1772], "3485": 1330, "8570e": 1330, "8480": 1330, "2361": [1330, 1772, 1773], "7417": [1330, 1772], "computation": [1331, 2054], "5495": [1331, 1396], "0979": 1331, "4092": 1331, "4132": [1331, 1889], "1143": 1331, "3662": 1331, "6374": 1331, "9294": 1331, "3269": [1331, 2057], "5745": [1331, 1922, 1923, 1972, 1973], "0382": [1331, 1412], "5922": 1331, "6759": 1331, "0600": 1331, "1933": 1331, "2090": 1331, "0903": 1331, "0817": 1331, "4752": [1331, 1927], "7124": 1331, "1631": 1331, "2272": 1331, "1356": 1331, "3933": 1331, "5023": 1331, "0308": 1331, "1725": 1331, "5216": 1331, "apinv": 1331, "5633e": 1331, "0830e": 1331, "wide": [1332, 1337, 1731, 1885, 2045, 2049, 2057, 2069, 2104], "51": [1332, 1525, 1827], "167": [1332, 1827], "68": [1332, 1827, 2014, 2016], "8571": [1332, 1827], "3943": [1332, 1827], "3314": [1332, 1827], "4286": [1332, 1827], "9029": [1332, 1827], "0343": [1332, 1827], "2857": [1332, 1827], "1714": [1332, 1827, 2057], "9429": [1332, 1827], "175": [1332, 1827], "q2": 1332, "r2": [1332, 1567], "6099e": 1332, "2158e": 1332, "logabsdet": [1333, 2015], "0032": 1333, "6690": 1333, "1161": 1333, "4053": 1333, "6218": [1333, 1942], "9273": 1333, "0082": 1333, "7576": 1333, "logdet": [1333, 2015, 2068], "linalg_slogdet": [1333, 2015, 2068], "2776": 1333, "solve_ex": 1334, "solve_triangular": [1334, 1952], "expand_a": [1334, 2015, 2049, 2068, 2086], "rectangular": [1336, 1337, 1365, 1924, 2041, 2065], "triu_": [1336, 2015], "tril_": [1336, 2015], "full_matric": [1337, 1338, 1737, 1928, 2015, 2108], "vh": [1337, 1737, 1928, 2015, 2108], "gesvdj": [1337, 1338, 1928], "jacobi": 1337, "gesvda": [1337, 1338], "gesvd": [1337, 1338, 1928], "u_k": 1337, "v_k": 1337, "sigma_j": [1337, 1928], "eigendecomposit": 1337, "0486e": 1337, "0957e": 1337, "5139": 1338, "1087": 1338, "1066": 1338, "ind": [1339, 1340, 2015, 2050], "tensorsolv": 1339, "ndim": [1339, 1340, 2034, 2036, 2082, 2088, 2102], "atensorinv": 1339, "movedim": [1340, 1380, 2015, 2050, 2068, 2086], "tensorinv": 1340, "vandermond": [1341, 1971], "pmatrix": 1341, "x_n": [1341, 1439, 1440, 1486, 1487, 1493, 1518, 1559, 1950, 2043], "125": [1341, 1546, 1971, 2015], "overlin": [1342, 1974], "3223": 1342, "2815": 1342, "1944": [1342, 2057], "4345": 1343, "pickle_modul": [1345, 1859], "weights_onli": [1345, 2012, 2028, 2062], "pickle_load_arg": 1345, "register_packag": [1345, 2062], "binaryio": [1345, 1859, 2070], "add_safe_glob": [1345, 2062], "mmape": 1345, "untrust": [1345, 2012, 2028, 2070], "tamper": [1345, 2070], "surg": 1345, "unicodedecodeerror": 1345, "codec": 1345, "0x": 1345, "latin1": 1345, "byte_arrai": 1345, "niter": [1346, 1817, 1929], "ortho_iparam": 1346, "ortho_fparam": 1346, "ortho_bparam": 1346, "knyazev": 1346, "knyazev2001": 1346, "stathopoulosetal2002": 1346, "converg": [1346, 1559, 1731, 1784, 1785, 1809, 2042, 2057, 2060], "precondition": 1346, "eigenpair": 1346, "criterion": [1346, 1439, 1440, 1460, 1462, 1487, 1493, 1518, 1519, 1530, 1531, 1532, 1559, 1560, 1576, 1577, 1802, 1834, 2051, 2053], "fep": 1346, "eigenproblem": 1346, "iparam": 1346, "fparam": 1346, "bparam": 1346, "ivar": 1346, "fvar": 1346, "bvar": 1346, "tvar": 1346, "istep": 1346, "converged_count": 1346, "rerr": 1346, "force_stop": 1346, "2001": 1346, "precondit": 1346, "eigensolv": 1346, "siam": 1346, "sci": 1346, "517": 1346, "541": 1346, "epub": 1346, "doi": [1346, 1480, 1891], "1137": 1346, "s1064827500366124": 1346, "andrea": 1346, "stathopoulo": 1346, "kesheng": 1346, "2002": [1346, 1891], "2165": 1346, "2182": 1346, "s1064827500370883": 1346, "duerschetal2018": 1346, "jed": 1346, "duersch": 1346, "meiyu": 1346, "shao": 1346, "chao": 1346, "ming": 1346, "gu": 1346, "c655": 1346, "c676": 1346, "17m1129830": 1346, "log_": [1347, 1348, 1349, 1350, 2015, 2034], "7767": 1347, "3234": 1347, "2156": 1347, "2411": 1347, "5739": 1347, "5637": 1347, "4640": 1347, "1952": 1347, "4226": 1347, "5204": [1347, 1836], "5224": 1348, "9354": 1348, "7257": 1348, "1301": 1348, "2820": 1348, "0290": 1348, "1392": 1348, "8857": 1348, "6476": 1348, "0090": [1349, 1425, 1823, 2083], "9923": 1349, "5372": 1349, "2492": 1349, "8653": 1349, "7055": 1349, "7705": 1349, "2225": 1349, "8419": 1350, "8003": [1350, 2061], "9971": 1350, "5287": 1350, "0490": 1350, "2483": 1350, "0042": 1350, "9196": 1350, "3504": [1350, 1945], "logsumexp": [1351, 2015, 2034, 2068, 2083], "3069": 1351, "6867": 1351, "8731": 1351, "30000": 1351, "1269e": 1351, "log_2": 1352, "logaddexp": [1352, 2015, 2068], "limits_": 1353, "42296738": 1353, "04462666": 1353, "86278635": 1353, "94622083": 1353, "05277811": 1353, "39202815": 1353, "83525007": 1353, "84492621": 1353, "06084887": 1353, "06844475": 1353, "2611": [1354, 1942], "9254": 1354, "6213": [1354, 2057], "6843": 1354, "3242": 1354, "9665": 1354, "4539": 1354, "0887": [1354, 2083], "1336": 1354, "4025": 1354, "7089": [1354, 1469], "9032": 1354, "3031": 1354, "2589": 1360, "1135": 1360, "5481": [1360, 1373, 2057], "9566": 1360, "sum_j": [1361, 1517, 1561, 1563, 1691, 1907, 2083], "0593": [1361, 2057], "5696": 1361, "6859e": 1361, "compute_pivot": 1363, "transposit": [1363, 1949, 2082], "perm": 1363, "a_lu": 1363, "5558": 1363, "1684": 1363, "1551": 1363, "1940": 1363, "6189": 1363, "5497": 1363, "4526": 1363, "2526": 1363, "3285": 1363, "7988": 1363, "7175": 1363, "9701": 1363, "2634": 1363, "9255": 1363, "3459": 1363, "00000e": 1364, "8312": 1364, "unpack_data": [1365, 2015], "unpack_pivot": [1365, 2015], "l_": [1365, 1428, 1432, 1436, 1440, 1454, 1455, 1456, 1457, 1494, 1520], "u_": [1365, 1782, 1786], "3552": [1367, 1625], "3825": 1367, "8297": 1367, "3477": 1367, "2035": [1367, 1922, 1923, 1972, 1973], "2252": [1367, 2083], "5002": 1367, "6248": [1367, 1376], "1307": 1367, "0608": [1367, 1900], "1244": 1367, "0139": 1367, "6763": 1371, "7445": 1371, "2369": 1371, "argmax": [1371, 1431, 1520, 1658, 1659, 1660, 1940, 2015, 2068, 2108], "max_indic": 1371, "2360": 1371, "2942": 1371, "1222": [1371, 2057], "8475": 1371, "1949": 1371, "1127": 1371, "6702": 1371, "5717": 1371, "9207": 1371, "1297": 1371, "8768": 1371, "6172": 1371, "6060": 1371, "2432": 1371, "3288": 1373, "3367": [1373, 1921], "nanmean": [1373, 2015, 2068], "3841": 1373, "6320": 1373, "4254": 1373, "7384": 1373, "0131": 1373, "6549": [1373, 1883], "4279": 1373, "3350": 1373, "7694": 1373, "5600": [1373, 1580], "0842": 1373, "9580": 1373, "3623": 1373, "2343": [1373, 2036], "5085": 1373, "4599": 1373, "1807": 1373, "5219": 1374, "5212": 1374, "2202": 1374, "2505": 1374, "3982": 1374, "9948": 1374, "3518": 1374, "3131": 1374, "3180": [1374, 2082], "6993": 1374, "0436": 1374, "0438": 1374, "2270": 1374, "2751": 1374, "7303": 1374, "2192": 1374, "3321": 1374, "2488": 1374, "0778": 1374, "9510": 1374, "7048": 1374, "4742": [1374, 1958, 2083], "7125": [1374, 1909], "plot": [1375, 2045, 2071, 2087, 2115], "t_0": [1375, 1802], "t_": [1375, 1475, 1629, 1801, 1802, 2015, 2082, 2103], "s_0": 1375, "s_": [1375, 1429, 1430, 1472, 1578], "g_0": 1375, "g_": [1375, 1786, 1796, 1797], "g_i": 1375, "t_i": 1375, "0d": [1375, 1519], "xy": 1375, "50276": 1375, "cartesian_prod": [1375, 2015, 2068], "grid_x": 1375, "grid_i": 1375, "dstack": [1375, 2015, 2068, 2082], "matplotlib": [1375, 2087], "pyplot": 1375, "plt": 1375, "plot_surfac": 1375, "6750": 1376, "0857": [1376, 1955], "7197": [1376, 1980], "argmin": [1376, 2015, 2068, 2108], "min_indic": [1376, 2015], "1334": 1376, "2803": 1376, "4644": [1376, 1922, 1923, 1972, 1973], "2635": [1376, 2057], "3651": 1376, "0384": 1376, "0128": 1376, "7015": 1376, "1153": 1376, "9849": 1376, "1458": [1376, 2083], "5788": 1376, "deduc": [1378, 2082], "4851": 1378, "5037": 1378, "3633": 1378, "0760": 1378, "3362": [1380, 1381], "8437": [1380, 1381], "9627": [1380, 1381], "1727": [1380, 1381], "5173": [1380, 1381], "1398": [1380, 1381], "mpsalloc": [1382, 1384], "metal": [1384, 1393, 2027, 2031, 2058], "mpsgraph": 1384, "wait_until_complet": [1389, 1390], "signpost": [1389, 1390, 1391], "xcode": 1390, "recommendedmaxworkingsets": 1393, "unlimit": [1393, 2046], "1321": 1396, "4370": [1396, 2082], "1289": 1396, "0527": 1396, "3077": [1396, 1918], "0881": 1396, "1259": 1396, "0284": 1396, "2015": [1412, 2041, 2057], "6087": 1412, "1494": 1412, "5491": 1412, "260": 1412, "8663": 1412, "3137": 1412, "0700": 1412, "8378": 1412, "5146": 1412, "5244": 1412, "5767": 1412, "1363": 1412, "5877": 1412, "5083": 1412, "1614": 1412, "1645": 1412, "7021": 1412, "0085": 1412, "0367": 1412, "1567": 1412, "4312": 1412, "1019": 1412, "4394": 1412, "8753": 1412, "_sampl": 1413, "n_sampl": 1413, "prob_dist": 1413, "0404": 1415, "6361": 1415, "multigammaln": [1416, 2083], "4028e": 1417, "38": [1417, 2102], "1400e": 1417, "isnan": [1418, 2015, 2068, 2082, 2108], "midpoint": [1420, 1828], "weakli": [1422, 1423, 2083], "to_spars": [1423, 1905, 2015, 2061, 2068, 2082], "2262": [1425, 1823], "0682": [1425, 1823], "2866": [1425, 1823], "3940": [1425, 1823], "5x7": [1429, 1433], "7x7": [1429, 1433], "10x7": [1429, 1433], "cube": [1430, 1434, 1834], "d_": [1430, 1434, 1438, 1450, 1453, 1456, 1459, 1496, 1522, 1525, 1551, 1554, 1580, 1585, 1633, 1953, 1954, 1955, 1956], "5x7x9": [1430, 1434], "7x7x7": [1430, 1434], "7x9x8": [1430, 1434], "n_class": 1431, "cutoff": [1431, 2041], "div_valu": 1431, "head_bia": 1431, "edouard": 1431, "grave": [1431, 1446], "armand": 1431, "joulin": 1431, "moustapha": 1431, "cissu00e9": 1431, "grangier": 1431, "hervu00e9": 1431, "ju00e9g": 1431, "imbalanc": 1431, "zipf": 1431, "law": 1431, "102": [1431, 2107], "1001": 1431, "1002": 1431, "_class": 1431, "maxunpool1d": [1432, 1520, 1661, 1965], "maxunpool2d": [1433, 1474, 1521, 1662, 1965], "maxunpool3d": [1434, 1475, 1522, 1663, 1965], "selu": [1435, 1471, 1626, 2015, 2041, 2067, 2068], "n_i": [1436, 1437, 1438, 1454, 1455, 1456, 1520, 1521, 1522, 1576, 1577], "c_j": [1436, 1437, 1438, 1520, 1521, 1522], "size_averag": [1439, 1440, 1460, 1462, 1486, 1492, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1605, 1606, 1614, 1616, 1641, 1645, 1646, 1657, 1665, 1666, 1667, 1668, 1669, 1677, 1689, 1690, 1701, 2015], "unreduc": [1439, 1440, 1462, 1487, 1493, 1518, 1534, 1559, 1577], "ell": [1439, 1440, 1462, 1486, 1487, 1493, 1518, 1534, 1559, 1577], "l_1": [1439, 1440, 1462, 1486, 1487, 1493, 1518, 1534, 1559, 1577], "l_n": [1439, 1440, 1462, 1486, 1487, 1493, 1518, 1534, 1559, 1577], "w_n": [1439, 1440, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891], "y_n": [1439, 1440, 1462, 1486, 1487, 1493, 1518, 1534, 1559, 1950, 2043], "lim_": [1439, 2043], "secondli": 1439, "rescal": [1439, 1440, 1462, 1531, 1532, 1534, 1561, 1563, 1605, 1606, 1616, 1669, 1737, 1766], "nbatch": [1439, 1440], "meantim": [1439, 1440, 1460, 1462, 1486, 1493, 1518, 1519, 1530, 1531, 1532, 1534, 1541, 1559, 1560, 1576, 1605, 1606, 1616, 1645, 1669, 1677, 2113], "pos_weight": [1440, 1606, 2015], "recal": [1440, 2049, 2087], "ell_c": 1440, "l_c": 1440, "p_c": 1440, "imbal": 1440, "pai": [1440, 1606, 2063, 2086], "spacial": 1440, "random_": [1440, 1462, 1534, 1606, 2015, 2034, 2091], "hat": [1441, 1442, 1443, 1489, 1490, 1491, 1567], "terminologi": [1441, 1442, 1443, 1567], "5d": [1443, 1491, 1580, 1633, 1644, 1672, 1704], "volumetr": [1443, 1567, 1580, 1633, 1644, 1704, 1705, 1706], "spatio": [1443, 1567], "in1_featur": 1444, "in2_featur": 1444, "in1": [1444, 1604], "in2": [1444, 1604], "blank": [1446, 1617, 2015, 2018], "zero_infin": [1446, 1617, 2015], "connectionist": [1446, 1617], "unseg": 1446, "longest": [1446, 1759, 1761, 1762, 2051], "input_length": [1446, 1617, 2015, 2051], "target_length": [1446, 1617, 2015], "s_n": 1446, "target_n": 1446, "unbatch": [1446, 1462, 1473, 1478, 1489, 1497, 1533, 1543, 1571, 1627], "s_min": 1446, "toronto": 1446, "edu": [1446, 1834], "icml_2006": 1446, "background": [1446, 1457, 1633, 1672, 2059, 2077], "channel_shuffl": [1447, 2015, 2068], "_left": [1448, 1449, 1450, 1451, 1452, 1453, 1549, 1550, 1551, 1552, 1553, 1554, 1583, 1584, 1585, 1672], "_right": [1448, 1449, 1450, 1451, 1452, 1453, 1549, 1550, 1551, 1552, 1553, 1554, 1583, 1584, 1585, 1672], "_top": [1449, 1450, 1452, 1453, 1550, 1551, 1553, 1554, 1584, 1585, 1672], "_bottom": [1449, 1450, 1452, 1453, 1550, 1551, 1553, 1554, 1584, 1585, 1672], "_front": [1450, 1453, 1551, 1554, 1585, 1672], "_back": [1450, 1453, 1551, 1554, 1585, 1672], "320": [1450, 1554], "480": [1450, 1554], "0491": [1451, 1583], "7152": [1451, 1583], "0749": [1451, 1583], "3287": [1451, 1583], "8966": [1451, 1583], "1466": [1451, 1583], "2771": [1451, 1583], "6616": [1451, 1583], "4523": [1451, 1583], "1255": [1451, 1583], "6372": [1451, 1583, 1928], "1182": [1451, 1583], "8652": [1451, 1583], "6585": 1452, "4320": [1452, 1938], "8701": 1452, "4649": 1452, "_j": [1454, 1455], "star": [1454, 1455, 1456, 2017], "uue0": 1454, "trou": [1454, 1455, 1456, 1457, 1458, 1459, 1473, 1579], "harder": [1454, 1455, 1456, 1457, 1458, 1459, 1473, 1521, 1522, 1579], "u00e0": [1455, 1456, 1457, 1458, 1459, 1473, 1579], "prod_": [1455, 1456, 1458, 1459, 1472, 1578, 1788], "out_j": 1456, "deconvolut": [1457, 1458, 1459, 1611, 1612, 1613], "_pad": [1457, 1458, 1459], "semi": [1460, 1486, 2013, 2041], "supervis": [1460, 1486], "vert": [1461, 1536, 1615], "_2": [1461, 1615, 1732, 1766], "ast_1": [1461, 1477], "ast_2": [1461, 1477], "ignore_index": [1462, 1534, 1616, 1669, 2015], "unbalanc": [1462, 1534], "d_1": [1462, 1534, 1616, 1669], "d_2": [1462, 1534, 1616, 1669], "d_k": [1462, 1534, 1616, 1669], "_index": [1462, 1534], "logsoftmax": [1462, 1534, 1561, 1652], "nllloss": [1462, 1561, 1669, 1691, 1965], "blend": 1462, "smooth": [1462, 1487, 1559, 1564, 1616, 1642, 1689, 1795], "w_c": 1462, "rethink": [1462, 1616], "incept": [1462, 1616], "spectral_norm": [1463, 1756], "neuron": 1464, "detector": 1464, "dropout1d": [1466, 2015], "_freez": 1469, "sparseadam": [1469, 2069], "0251": 1469, "6902": [1469, 1825], "7172": 1469, "6431": 1469, "0748": 1469, "6969": 1469, "4970": 1469, "3448": 1469, "9685": 1469, "3677": 1469, "7265": 1469, "1685": 1469, "4362": 1469, "4004": [1469, 1909], "9400": 1469, "9124": 1469, "3616": 1469, "1151": 1469, "0309": 1469, "9315": 1469, "1655": [1469, 2046], "9897": [1469, 2049], "0635": 1469, "7895": 1469, "0364": 1469, "6778": 1469, "5803": 1469, "from_pretrain": [1469, 1470, 2106], "bag": [1470, 1625], "per_sample_weight": [1470, 1625, 2015, 2108], "embedding_sum": 1470, "8861": 1470, "4350": 1470, "0523": 1470, "1306": 1470, "5798": 1470, "0044": 1470, "7082": [1470, 1625], "2145": [1470, 1625], "6251": [1470, 1625], "6500": 1470, "satur": [1471, 1626], "alphadropout": [1471, 1599], "160": [1472, 2069], "unfold": [1473, 2015, 2068, 2086], "prod_d": [1473, 1579], "neighborhood": [1473, 1579], "col2im": [1473, 2015, 2068, 2108], "fold_param": [1473, 1579], "input_on": [1473, 1579], "output_ratio": [1474, 1475, 1628, 1629, 2015], "_random_sampl": [1474, 1475, 1628, 1629, 2015], "ben": [1474, 1475, 1628, 1629], "graham": [1474, 1475, 1628, 1629], "oh": [1474, 1475, 1628, 1629], "ow": [1474, 1475, 1628, 1629], "_ratio": [1474, 1475, 1629], "_h": 1474, "13x12": [1474, 1628], "kt": [1475, 1602, 1610, 1613, 1629, 1660], "ot": [1475, 1629], "13x12x11": [1475, 1629], "044715": [1476, 1631], "pack_sequ": [1478, 1497, 1543, 1761, 1763], "bias_ih": [1479, 1498, 1543, 1545], "bias_hh": [1479, 1498, 1543, 1545], "homoscedast": [1480, 1630], "heteroscedast": [1480, 1630], "nix": 1480, "weigend": 1480, "1994": 1480, "icnn": 1480, "94": 1480, "orlando": 1480, "fl": 1480, "usa": [1480, 2087], "374138": 1480, "instancenorm": [1481, 2074], "shrinkag": [1482, 1565, 1636, 1694], "mobilenetv3": [1484, 1638], "_val": 1485, "dissimilar": 1486, "l1loss": [1487, 1559, 1646], "outlier": [1487, 1559, 2072, 2073], "huber": [1487, 1559, 1642], "smoothl1loss": [1487, 1642, 1689], "insensit": 1488, "unused_argument1": 1488, "unused_argument2": 1488, "ingredi": [1489, 1490, 1491], "styliz": [1489, 1490, 1491], "rgb": [1490, 1491, 2087], "log_target": [1492, 1645, 2015], "kl": [1492, 1645, 2013], "summaris": 1492, "loss_pointwis": 1492, "batchmean": [1492, 1645], "kl_loss": 1492, "mae": 1493, "proj_siz": [1497, 1544], "f_t": [1497, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "hf": [1497, 1498], "g_t": [1497, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "ig": [1497, 1498], "hg": [1497, 1498], "o_t": 1497, "ho": [1497, 1498], "c_t": 1497, "forget": [1497, 2016, 2017], "1402": 1497, "c_0": [1497, 1498], "c_n": 1497, "w_ii": 1497, "w_if": 1497, "w_ig": 1497, "w_io": 1497, "w_hi": 1497, "w_hf": 1497, "w_hg": 1497, "w_ho": 1497, "b_ii": 1497, "b_if": 1497, "b_ig": 1497, "b_io": 1497, "b_hi": 1497, "b_hf": 1497, "b_hg": 1497, "b_ho": 1497, "weight_hr_l": 1497, "_revers": 1497, "h_1": 1498, "c_1": 1498, "time_step": 1498, "_shape": [1499, 1542, 1716], "sentence_length": 1499, "lazymodulemixin": [1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512], "cls_to_becom": [1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510, 1511, 1512, 1720], "convtranspose1d": [1506, 1611, 1965, 2074], "convtranspose3d": [1508, 1613, 1725, 1965, 2074], "instancenorm1d": [1509, 1643, 2074], "instancenorm2d": [1510, 1643, 2074], "instancenorm3d": [1511, 1643, 2074], "uninitializedparamet": [1512, 1707, 2013], "lrn": 1515, "signal_2d": 1515, "signal_4d": 1515, "output_2d": 1515, "output_4d": 1515, "x_j": [1517, 1561, 1563, 1691, 1907, 2054, 2083], "80827": [1523, 1524, 1525], "unpool": [1523, 1524, 1525], "maxpool3d": [1525, 1660, 1663, 1965, 2074], "unpooled_output": 1525, "t_destin": 1527, "lrelu": [1528, 2057], "hing": [1530, 1532], "sum_i": [1531, 1532, 1560], "nelement": [1531, 1560], "jointli": 1533, "multihead": [1533, 1573], "concat": [1533, 2015, 2052, 2068], "head_1": 1533, "head_h": 1533, "head_i": 1533, "qw_i": 1533, "kw_i": 1533, "vw_i": 1533, "inference_mod": [1533, 1575], "nestedtensor": [1533, 1575, 2036], "multihead_attn": 1533, "e_q": 1533, "e_k": 1533, "e_v": 1533, "_head": [1533, 1571], "merge_mask": 1533, "mask_typ": 1533, "merged_mask": 1533, "nll": 1534, "num_paramet": 1535, "nchannel": 1535, "decai": [1535, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 1804, 1806, 1807, 1810, 1811, 1813, 1884, 2069], "legitim": [1535, 1633, 2050], "vert_p": 1536, "upscale_factor": [1539, 1675, 2015], "upscal": 1539, "video": [1539, 1540, 2035, 2087, 2102], "shi": [1539, 1540], "2016": [1539, 1540, 1577], "_factor": [1539, 1540, 1580, 1581, 1582], "pixel_shuffl": [1539, 2015, 2068, 2074], "downscale_factor": [1540, 1676, 2015], "pixelshuffl": [1540, 1675, 1676, 2074], "downscal": 1540, "pixel_unshuffl": [1540, 2015, 2068, 2074], "log_input": [1541, 1677, 2015], "poisson": [1541, 1677, 1884, 2013, 2015, 2068], "stirl": [1541, 1677], "rm": [1542, 1576, 1577, 1716], "rms_norm": [1542, 1716, 2015, 2068], "hh": [1543, 1545], "h_t_minus_1": 1543, "flatten_paramet": 1544, "3333333333333333": [1546, 1800, 1806, 2015], "leaki": [1546, 1683, 2041], "rectifi": [1546, 1547, 1679, 2041], "empir": 1546, "crelu": 1547, "1603": 1547, "05201": 1547, "6732632423543772848170429916717": [1555, 1686], "0507009873554804934193349852946": [1555, 1686], "kaiming_norm": 1555, "kaiming_normal_": [1555, 2013, 2019, 2041], "initialis": 1555, "calculate_gain": [1555, 2013, 2019, 2041], "cascad": 1556, "relu2": [1556, 1707], "swish": [1557, 1688], "coin": [1557, 1688], "cnn": [1559, 2072], "ross": 1559, "girshick": 1559, "quadrat": [1559, 2051], "huberloss": [1559, 1642], "w_j": 1562, "soft": [1565, 1635, 1694], "softshrinkag": 1565, "convert_sync_batchnorm": 1567, "r1": 1567, "sync_bn_network": 1567, "ddp_sync_bn_network": 1567, "sync_bn_modul": 1567, "d_model": [1571, 1572, 1573, 1574, 1575], "nhead": [1571, 1572, 1573, 1574, 1575], "num_encoder_lay": 1571, "num_decoder_lay": 1571, "dim_feedforward": [1571, 1573, 1575], "custom_encod": 1571, "custom_decod": 1571, "layer_norm_ep": [1571, 1573, 1575], "norm_first": [1571, 1573, 1575], "ashish": [1571, 1573, 1575], "vaswani": [1571, 1573, 1575], "noam": [1571, 1573, 1575], "shazeer": [1571, 1573, 1575], "niki": [1571, 1573, 1575], "parmar": [1571, 1573, 1575], "jakob": [1571, 1573, 1575], "uszkoreit": [1571, 1573, 1575], "llion": [1571, 1573, 1575], "jone": [1571, 1573, 1575], "aidan": [1571, 1573, 1575], "gomez": [1571, 1573, 1575], "lukasz": [1571, 1573, 1575], "illia": [1571, 1573, 1575], "polosukhin": [1571, 1573, 1575], "6000": [1571, 1573, 1575, 1580, 1831, 1878, 1881], "6010": [1571, 1573, 1575], "feedforward": [1571, 1573, 1575, 2041], "transformer_model": 1571, "word_language_model": 1571, "src_mask": [1571, 1575], "tgt_mask": [1571, 1572, 1573], "memory_mask": [1571, 1572, 1573], "src_key_padding_mask": [1571, 1574, 1575], "tgt_key_padding_mask": [1571, 1572, 1573], "memory_key_padding_mask": [1571, 1572, 1573], "src_is_caus": 1571, "tgt_is_caus": [1571, 1572, 1573], "memory_is_caus": [1571, 1572, 1573], "_mask": [1571, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1755], "_key_padding_mask": 1571, "generate_square_subsequent_mask": 1571, "sz": 1571, "decoder_lay": [1572, 1573], "transformerdecoderlay": 1572, "transformer_decod": 1572, "encoder_lay": [1574, 1575], "enable_nested_tensor": 1574, "mask_check": 1574, "bert": [1574, 2106], "1810": 1574, "04805": 1574, "transformerencoderlay": 1574, "transformer_encod": 1574, "triplet": [1576, 1577, 1701, 1702], "x3": 1576, "balnta": [1576, 1577], "riba": [1576, 1577], "a_i": [1576, 1577, 1886], "p_i": [1576, 1577], "rvert_p": [1576, 1670], "tripletmarginwithdistanceloss": [1576, 1702], "triplet_loss": [1576, 1577], "distance_funct": [1577, 1702], "l_i": 1577, "tripletmarginloss": [1577, 1701], "l_p": [1577, 1670], "pairwisedist": [1577, 1673], "penal": [1577, 2057, 2069], "distant": 1577, "anchor_id": 1577, "positive_id": 1577, "negative_id": 1577, "l_infin": 1577, "bmva": 1577, "bmvc": 1577, "paper119": 1577, "unflattened_s": 1578, "namedtensor": 1578, "namedshap": 1578, "u_1": 1578, "u_n": 1578, "u_i": 1578, "im2col": [1579, 2015, 2068], "2x3": 1579, "3x4": 1579, "inp_unf": 1579, "out_unf": 1579, "recompute_scale_factor": [1580, 1644], "bicub": [1580, 1633, 1644, 1704, 1965], "trilinear": [1580, 1633, 1644, 1704, 1965], "input_3x3": 1580, "4375": 1580, "8125": 1580, "9375": 1580, "2400": [1580, 1945], "1200": [1580, 1856, 2046], "8800": 1580, "4400": [1580, 1945], "7200": 1580, "0400": 1580, "2800": [1580, 1878], "3600": 1580, "5200": 1580, "6400": 1580, "1678": 1584, "4418": 1584, "9466": [1584, 2083], "9604": 1584, "4219": 1584, "5241": 1584, "9162": 1584, "5436": [1584, 1945], "6446": 1584, "sdpa_kernel": [1586, 1685], "flash_attent": [1586, 1591], "cudnn_attent": 1586, "seq_len_q": 1587, "seq_len_kv": 1587, "causalvari": 1587, "constru": 1587, "causal_upper_left": 1587, "causal_lower_right": 1587, "bsz": 1587, "seqlen_q": 1587, "seqlen_kv": 1587, "head_dim": 1587, "attn_bia": [1587, 1685], "upper_left": [1588, 1590], "lower_right": [1588, 1589], "diagonal_offset": [1588, 1589], "causalbia": [1589, 1590, 1685], "sdpbackend": 1591, "adaptiveavgpool1d": [1592, 2074], "tripl": [1594, 1597], "adaptivemaxpool1d": 1595, "adaptivemaxpool2d": [1596, 1965], "adaptivemaxpool3d": 1597, "avgpool1d": [1600, 2074], "st": [1602, 1610, 1613, 1660, 2113], "avgpool3d": [1602, 1965, 2074], "iT": [1602, 1610, 1613], "padt": [1602, 1610, 1613], "score": [1606, 1685, 1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752, 2013], "dt": [1610, 1613, 2083, 2113], "out_padw": [1611, 1612, 1613], "out_padh": [1612, 1613], "out_padt": 1613, "cosineembeddingloss": 1614, "ctcloss": [1617, 1965], "charact": [1617, 1875, 2014, 2017, 2035, 2077], "elu": [1623, 2015, 2067, 2068, 2074], "embedding_matrix": [1624, 1625], "8490": 1624, "9625": 1624, "6753": 1624, "7761": 1624, "6108": 1624, "6246": 1624, "9751": 1624, "3618": 1624, "4161": [1624, 2082], "2419": 1624, "7383": 1624, "0237": 1624, "7794": 1624, "0528": 1624, "3385": 1624, "8612": 1624, "1867": 1624, "5384": 1624, "8720": 1624, "6262": 1624, "7471": 1624, "embeddingbag": [1625, 1965, 2072, 2074, 2088], "3397": 1625, "5545": 1625, "5893": 1625, "4386": 1625, "5882": 1625, "featurealphadropout": 1626, "gaussiannllloss": 1630, "border": 1633, "affine_grid": [1633, 2015], "extrema": 1633, "pil": [1633, 1644], "overshoot": [1633, 1644, 1704], "gumbel": [1635, 2013], "y_hard": 1635, "y_soft": 1635, "hardtanh": [1640, 2015, 2027, 2068, 2074, 2108], "hingeembeddingloss": 1641, "use_input_stat": [1643, 2015], "antialia": 1644, "anti": 1644, "pillow": [1644, 2087], "buggi": 1644, "inter_nearest": 1644, "104157": 1644, "kldivloss": 1645, "batchsiz": [1645, 1910, 1911, 1912, 1914, 1915, 2082], "leaky_relu": [1649, 2015, 2041, 2068, 2074, 2108], "localresponsenorm": 1651, "_stacklevel": [1652, 1691, 1692, 2015, 2021], "lppool1d": 1654, "lppool2d": 1655, "lppool3d": 1656, "marginrankingloss": 1657, "max_unpool1d": [1658, 2015], "multimarginloss": 1666, "multilabelmarginloss": 1667, "multilabelsoftmarginloss": 1668, "n_0": 1670, "n_": 1670, "n_k": 1670, "everywher": [1671, 1924, 2069], "circularpad2d": 1672, "constantpad2d": 1672, "reflectionpad2d": [1672, 1965], "replicationpad2d": [1672, 1965], "t4d": 1672, "p1d": 1672, "p2d": 1672, "p3d": 1672, "pixelunshuffl": [1676, 2074], "poissonnllloss": 1677, "rrelu": [1684, 2015, 2068], "dropout_p": [1685, 2015], "temp_mask": 1685, "mymodel": [1685, 2017, 2042, 2059, 2065], "ev": 1685, "legend": 1685, "softmarginloss": 1690, "module_kwarg": 1700, "upsample_trilinear": 1705, "fo": 1705, "spatia": 1706, "mixin": [1707, 2013], "dry": 1707, "lazymlp": 1707, "lazylinear": 1707, "lazy_mlp": 1707, "8832e": 1707, "5636e": 1707, "1598e": 1707, "5637e": 1707, "8788e": 1707, "0042e": 1707, "0019": 1707, "lazymodul": 1707, "full_mlp": 1707, "3837": [1707, 1825], "0907": 1707, "6708": 1707, "5223": 1707, "9028": 1707, "2851": 1707, "6813": 1707, "5766": 1707, "8678": 1707, "1320": 1707, "2938": 1707, "0679": [1707, 1955], "2793": [1707, 1773], "1088": 1707, "1795": 1707, "2301": 1707, "2807": 1707, "2479": 1707, "1091": 1707, "has_uninitialized_param": 1707, "initialize_paramet": 1707, "check_reduct": 1717, "delay_all_reduce_named_param": 1717, "param_to_hook_all_reduc": 1717, "optimizer_param": 1717, "loss_func": [1717, 2077], "consume_prefix_in_state_dict_if_pres": 1717, "nccl2": 1717, "dictat": [1717, 2017], "mebibyt": 1717, "mib": 1717, "detach_": [1717, 2015, 2034, 2074, 2082], "ddp_logging_data": 1717, "can_set_static_graph": 1717, "model_ddp": 1717, "_get_ddp_logging_data": 1717, "divide_by_initial_world_s": 1717, "caught": [1717, 2033], "syncbatchnorm": 1717, "deplet": 1717, "pariti": 1717, "another_input": 1717, "predivid": 1717, "noop": 1717, "encode_and_decod": 1717, "encoded_tensor": 1717, "decoded_tensor": 1717, "error_if_nonfinit": [1721, 1722], "clip_valu": 1723, "nhwc": [1724, 1725, 2085, 2087], "outweigh": [1724, 1725, 1798, 2113], "_convnd": 1726, "conv_w": 1727, "conv_b": 1727, "bn_rm": [1727, 1729], "bn_rv": [1727, 1729], "bn_ep": [1727, 1729], "bn_w": [1727, 1729], "bn_b": [1727, 1729], "linear_w": 1729, "linear_b": 1729, "orthogonal_map": 1731, "use_trivi": 1731, "qq": 1731, "matrix_exp": [1731, 2015, 2068], "caylei": 1731, "thin": [1731, 1827], "manifold": 1731, "register_parametr": [1731, 1732, 1734, 1735, 1766, 2037], "orth_linear": 1731, "parametrizedlinear": [1731, 1732, 1733], "parametrizationlist": [1731, 1732, 1733, 1737], "_orthogon": 1731, "9332e": 1731, "n_power_iter": [1732, 1766], "sn": [1732, 1766], "discrimin": [1732, 1766], "adversari": [1732, 1766], "lipschitz": 1732, "reimplement": [1732, 1766, 2101], "_spectralnorm": 1732, "convtranspos": [1732, 1766], "snm": 1732, "0081": 1732, "amaxbackward0": 1732, "decoupl": [1733, 1769, 1785, 1788, 1794], "1602": [1733, 1769], "07868": [1733, 1769], "_weightnorm": 1733, "original0": [1733, 1734, 1737, 1769], "original1": [1733, 1734, 1737, 1769], "tensor_nam": [1734, 1736, 1737, 1738], "right_invers": [1734, 1737], "out_rnn": 1735, "rnn_cell": 1735, "inbuilt": 1737, "unparametr": 1737, "rankon": 1737, "surject": 1737, "s0_sqrt": 1737, "linear_rank_on": 1737, "matrix_rank": 1737, "leave_parametr": 1738, "unparametris": 1738, "skeleton": 1739, "compute_mask": [1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746], "importance_scor": [1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1748, 1751, 1752], "apply_mask": [1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746], "pruned_tensor": [1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746], "default_mask": [1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746], "_orig": [1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1749, 1751, 1752, 1753, 1754, 1755], "undon": [1739, 1740, 1741, 1742, 1743, 1744, 1745, 1746, 1755], "unprun": [1742, 1743, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754], "basepruningmethod": [1744, 1750], "add_pruning_method": 1744, "pruning_typ": [1744, 1748], "unstructur": [1744, 1748], "ravel": [1744, 2015, 2068], "nonmask": 1744, "bias_mask": [1747, 1749], "pruning_method": 1748, "parameters_to_prun": 1748, "l1unstructur": 1748, "parameters_to_vector": 1748, "forward_pre_hook": [1750, 2057], "random_unstructur": [1750, 1755], "odict_kei": 1751, "weight_orig": 1751, "weight_mask": [1751, 1754], "columns_prun": 1753, "t_modul": [1756, 1757, 1766, 1769], "weight_norm": 1757, "sorted_indic": [1758, 1760, 1761, 1763], "unsorted_indic": [1758, 1760, 1761, 1763], "abc": [1758, 2017], "axbc": 1758, "conform": [1758, 2057], "is_cuda": [1758, 2034, 2068, 2084], "enforce_sort": [1759, 1760, 1761], "unsort": [1759, 1760, 1863], "shortest": 1759, "uncondition": [1759, 2012, 2049, 2114], "pad_sequ": [1760, 1764, 2015, 2068], "padding_valu": [1761, 1762, 2015], "total_length": [1761, 2051], "seq_unpack": 1761, "lens_unpack": 1761, "packed_sequ": 1763, "unpacked_sequ": 1763, "padded_sequ": 1764, "unpad": 1764, "unstack": 1764, "as_tensor": [1764, 1943, 2015, 2036, 2049, 2068, 2087, 2088, 2089], "unpadded_sequ": 1764, "module_cl": [1765, 2077], "5846e": 1765, "29": [1765, 1891, 2067], "8307e": 1765, "5250e": 1765, "1210e": 1765, "4677e": 1765, "5915e": 1765, "4013e": 1765, "weight_u": 1766, "parameters_and_buff": 1767, "reparamater": 1767, "weight_g": [1769, 2015], "weight_v": 1769, "modern": [1769, 2060, 2097, 2104], "bother": 1769, "102999": 1769, "remove_parametr": 1769, "as_tupl": [1771, 1980], "complexfloat": [1772, 1778], "0425": 1773, "7969": 1773, "2925": 1773, "7229": 1773, "2134": 1773, "0505": 1773, "1408": 1773, "0563": 1773, "0566": 1773, "0732": [1773, 1945], "0687": 1773, "1177": 1773, "2303": [1773, 1921], "1552": 1773, "6148": 1773, "6535": 1773, "8318": 1773, "3987": 1773, "9544": [1773, 1835], "6048": 1773, "7909": 1773, "120": [1775, 2087], "from_valu": 1778, "onnx_typ": 1778, "tensorprotodatatyp": 1778, "torch_c_value_with_type_float": 1778, "from_dtyp": 1778, "jit_type_bas": 1778, "safer": [1778, 2024], "onnxexportererror": [1778, 2065, 2067], "from_onnx_typ": 1778, "_onnx": 1778, "symbolicvalueerror": 1778, "onnx_compat": 1778, "scalar_nam": 1778, "complexhalf": 1778, "complexdoubl": 1778, "float8e5m2": 1778, "float8e4m3fn": 1778, "float8e5m2fnuz": 1778, "float8e4m3fnuz": 1778, "torch_nam": 1778, "int8_t": 1778, "int16_t": 1778, "float8_e5m2": [1778, 2084, 2088], "float8_e4m3fn": [1778, 2084, 2088], "float8_e5m2fnuz": [1778, 2084], "float8_e4m3fnuz": [1778, 2084], "verif": [1779, 1780, 1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 2067], "params_dict": 1779, "export_opt": [1779, 2065], "_excluded_node_kind": 1779, "frozenset": [1779, 2017], "scalarimplicit": [1779, 2068], "prim": [1779, 2014, 2067, 2068], "listconstruct": [1779, 2014], "all_mismatch_leaf_graph_info": 1779, "essential_node_count": 1779, "subgraph": [1779, 2043, 2048, 2067, 2070, 2093, 2102, 2103, 2113], "essential_node_kind": 1779, "export_repro": 1779, "repro_dir": 1779, "repro": [1779, 2104, 2113], "test_": 1779, "test_data_set_0": 1779, "input_0": [1779, 2067], "pb": [1779, 2067], "input_1": [1779, 2067], "output_0": 1779, "output_1": 1779, "find_mismatch": [1779, 2067], "exhibit": [1779, 2067, 2100, 2113], "verificationopt": [1779, 2067], "find_partit": 1779, "has_mismatch": 1779, "pretty_print_mismatch": 1779, "pretty_print_tre": 1779, "graph_info": [1779, 2067], "__2": [1779, 2067], "__1": [1779, 2067], "__3": [1779, 2067], "110": [1779, 2067], "verify_export": 1779, "onnx_graph": 1779, "onnx_out": 1779, "pt_out": 1779, "ignore_non": 1780, "check_shap": 1780, "check_dtyp": [1780, 2089], "onnxbackend": 1780, "onnx_runtime_cpu": 1780, "remained_onnx_input_idx": 1780, "acceptable_error_percentag": 1780, "percentag": [1780, 1809, 1884, 2072], "weight_decai": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798, 2057, 2069], "1e6": 1781, "tensorlist": [1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "prohibit": [1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797, 2046, 2062], "impair": [1781, 1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "ungraph": [1781, 1782, 1784, 1785, 1786, 1788, 1794, 1795, 1796], "removeablehandl": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "register_load_state_dict_pre_hook": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "register_state_dict_post_hook": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "register_step_post_hook": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "register_step_pre_hook": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "new_arg": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "new_kwarg": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1794, 1795, 1796, 1797, 1798], "momentum_buff": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1791, 1794, 1795, 1796, 1797, 1798], "reevalu": [1781, 1782, 1783, 1784, 1785, 1786, 1787, 1788, 1792, 1794, 1795, 1796, 1797, 1798, 2069], "rho": 1782, "110mm": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "4pt": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "textbf": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "theta_0": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "v_0": [1782, 1784, 1785, 1788, 1794, 1795], "leftarrow": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "u_0": [1782, 1786], "hspace": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "5mm": [1782, 1783, 1784, 1785, 1786, 1788, 1795, 1796, 1797], "nabla_": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "theta_": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "10mm": [1782, 1783, 1784, 1785, 1786, 1788, 1795, 1796, 1797], "v_t": [1782, 1784, 1785, 1788, 1794, 1795], "v_": [1782, 1784, 1785, 1788, 1794, 1795, 1797], "2_t": [1782, 1783, 1784, 1785, 1788, 1794, 1795], "21mm": 1782, "u_t": [1782, 1786], "theta_t": [1782, 1783, 1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "oscil": 1782, "lr_decai": 1783, "initial_accumulator_valu": 1783, "12mm": [1783, 1794], "_sum_0": 1783, "tild": [1783, 1795], "_sum_t": 1783, "_sum_": 1783, "subgradi": 1783, "999": [1784, 1785, 1786, 1788, 1794, 1798, 2062, 2069], "amsgrad": [1784, 1785], "beta_1": [1784, 1785, 1786, 1788, 1794], "beta_2": [1784, 1785, 1786, 1788, 1794], "13mm": [1784, 1785, 1786, 1788, 1794, 1795, 1796, 1797], "textit": [1784, 1785, 1788, 1794, 1797], "m_0": [1784, 1785, 1786, 1788, 1794], "widehat": [1784, 1785, 1788, 1794], "m_t": [1784, 1785, 1786, 1788, 1794], "m_": [1784, 1785, 1786, 1788, 1794, 2082], "002": [1786, 1788], "t_1": 1786, "2e": [1786, 1788], "max_it": 1787, "max_ev": 1787, "tolerance_grad": 1787, "tolerance_chang": 1787, "history_s": 1787, "line_search_fn": 1787, "bfg": 1787, "minfunc": 1787, "intens": [1787, 2060], "param_byt": 1787, "strong_wolf": 1787, "momentum_decai": 1788, "004": 1788, "decoupled_weight_decai": [1788, 1794], "gamma_t": 1788, "psi": [1788, 2083], "_decai": [1788, 1794], "15mm": [1788, 1796, 1797], "mu_t": 1788, "96": 1788, "mu_": 1788, "11mm": 1788, "incorpor": [1788, 1883, 2072], "nesterov": [1788, 1797], "4e": 1788, "nadamw": 1788, "weightdecai": 1794, "18mm": 1794, "rho_": 1794, "6mm": 1794, "rho_t": 1794, "t_2": 1794, "l_t": 1794, "radamw": 1794, "_0": [1795, 2050, 2070, 2115], "av": 1795, "8mm": 1795, "3mm": 1795, "lectur": 1795, "hinton": 1795, "step_siz": [1796, 1813], "resili": [1796, 2060], "eta_": [1796, 1801, 1802], "etaplu": 1796, "etaminu": 1796, "gamma_": [1796, 2083], "0_": 1796, "eta_0": 1796, "i_": [1796, 1945], "eta_t": [1796, 1801, 1802], "dampen": 1797, "sutskev": 1797, "veloc": 1797, "conflat": 1798, "is_spars": [1798, 2034, 2068, 2082, 2084], "maskedtensor": [1798, 2082], "rig": 1798, "insist": 1798, "lr_schedul": [1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 2069], "chainabl": [1799, 1809], "081": 1799, "729": [1799, 1812], "6561": [1799, 1975], "59049": 1799, "scheduler1": [1799, 1812, 2069], "constantlr": [1799, 1812], "total_it": [1799, 1800, 1806, 1810, 1812], "scheduler2": [1799, 1812, 2069], "exponentiallr": [1799, 1812, 2069], "get_last_lr": [1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813], "print_lr": [1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813], "is_verbos": [1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813], "__dict__": [1799, 1800, 1801, 1802, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813], "last_epoch": [1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1812, 1813], "mileston": [1800, 1806, 1807, 1812, 2069], "simultan": [1800, 1801, 1806, 1807, 1813, 2043, 2077], "025": [1800, 1806], "t_max": [1801, 2069], "eta_min": [1801, 1802], "anneal": [1801, 1802, 1809, 2069], "cur": [1801, 1802], "sgdr": [1801, 1802], "2k": 1801, "sole": 1801, "t_mult": 1802, "base_lr": [1803, 1809], "max_lr": [1803, 1809], "step_size_up": 1803, "step_size_down": 1803, "scale_fn": 1803, "scale_mod": 1803, "cycle_momentum": [1803, 1809], "base_momentum": [1803, 1809], "max_momentum": [1803, 1809], "cyclic": 1803, "forth": [1803, 1971, 2012, 2071, 2098], "amplitud": [1803, 1809], "triangular2": 1803, "exp_rang": 1803, "bckenstler": 1803, "train_batch": [1803, 1809], "get_lr": 1803, "lr_lambda": [1805, 1808], "lambda1": 1805, "lambda2": 1805, "start_factor": 1806, "end_factor": 1806, "03125": 1806, "0375": 1806, "04375": 1806, "005": [1807, 1813], "lmbda": 1808, "total_step": 1809, "steps_per_epoch": 1809, "pct_start": 1809, "anneal_strategi": [1809, 2069], "div_factor": 1809, "final_div_factor": 1809, "three_phas": 1809, "1cycl": 1809, "fastai": 1809, "unpublish": 1809, "initial_lr": 1809, "min_lr": [1809, 1811], "1e4": 1809, "annihil": 1809, "00075": 1810, "00050": 1810, "00025": 1810, "patienc": 1811, "threshold_mod": 1811, "cooldown": 1811, "stagnat": 1811, "new_lr": 1811, "baselin": 1811, "intoler": 1811, "optimum": 1811, "dynamic_threshold": 1811, "val_loss": 1811, "81": 1812, "mn": 1815, "pca": [1817, 2082], "overestim": [1817, 1929], "nathan": [1817, 1929], "halko": [1817, 1929], "gunnar": [1817, 1929], "martinsson": [1817, 1929], "tropp": [1817, 1929], "probabilist": [1817, 1929], "0909": [1817, 1929], "4061": [1817, 1929], "na": [1817, 1929], "cmath": [1821, 2015], "4142j": 1821, "4331": 1824, "2475": [1824, 1927], "6834": 1824, "2791": 1824, "1875": 1824, "5561": 1824, "4670": 1824, "8020": 1825, "5428": 1825, "5854": 1825, "5261": [1825, 1928], "1857": 1825, "2498": 1825, "1646": [1825, 2046], "0705": [1825, 2049], "0629": 1825, "2962": 1825, "0821": [1825, 1884], "1831": 1825, "type1": [1826, 2015], "type2": [1826, 2015], "2117": 1828, "9765": 1828, "1707": 1828, "4884": 1828, "5661": 1828, "5795": 1828, "5280": 1828, "9206": 1828, "quantization_schem": [1829, 1830, 1831, 1832, 1833], "int_repr": [1829, 1830, 2015, 2068], "nchw": [1831, 2087], "qx": [1831, 1832, 1833], "00001": 1831, "max_pool1d": [1832, 2015, 2068, 2074], "max_pool2d": [1833, 2015, 2065, 2068, 2074], "quasirandom": 1834, "scrambl": 1834, "sobol": 1834, "quasi": 1834, "21201": 1834, "web": 1834, "unsw": 1834, "au": [1834, 2070], "fkuo": 1834, "art": 1834, "owen": 1834, "niederreit": 1834, "xing": 1834, "466": 1834, "489": 1834, "decemb": 1834, "1998": 1834, "zh": 1834, "vychisl": 1834, "phy": 1834, "784": 1834, "802": 1834, "1967": 1834, "soboleng": 1834, "draw_base2": 1834, "base2": 1834, "fast_forward": 1834, "142": 1835, "283": 1835, "570": 1835, "359": 1835, "9894": 1835, "2503": 1836, "3525": 1836, "5673": 1836, "8237": 1836, "5781": 1836, "6879": 1836, "3816": 1836, "7249": 1836, "0998": 1836, "im": [1840, 2043], "1436": 1840, "9966": 1840, "3426": 1840, "6366": 1840, "5954": 1840, "8929": 1840, "0923": 1840, "1719": 1840, "4709": 1840, "1996": 1840, "4595": 1846, "4314": 1846, "n2": 1849, "n3": 1849, "negat": [1852, 1937, 2017], "is_neg": [1852, 2015, 2068], "equidist": 1856, "inexact": 1856, "1234567": 1856, "1230": 1856, "vstack": [1857, 2015, 2068, 2082], "0370": 1858, "2970": 1858, "5420": 1858, "9105": 1858, "8351": 1858, "pickle_protocol": [1859, 2070], "default_protocol": 1859, "_use_new_zipfile_seri": [1859, 2062], "zipfil": [1859, 2070], "sorted_sequ": [1863, 2015], "sorter": [1863, 2015], "sorted_sequence_1d": 1863, "select_copi": [1865, 2015, 2068], "92701": [1867, 2085], "get_default_devic": 1867, "henry2019": 1871, "briefli": [1871, 2052], "nine": 1871, "fma": 1871, "10x": [1871, 1929], "1904": 1871, "06376": 1871, "denorm": [1872, 2060], "sse3": 1872, "aarch64": 1872, "323": 1872, "88131e": 1872, "324": 1872, "interop": 1873, "intraop": 1874, "edgeitem": 1875, "linewidth": 1875, "sci_mod": 1875, "shamelessli": 1875, "repr": [1875, 2017], "sane": 1875, "_tensor_str": 1875, "_formatt": 1875, "12345": 1875, "1235": 1875, "excess": [1877, 2104], "24j": 1878, "8000j": 1878, "9600j": 1878, "4472": [1878, 2057], "8944j": 1878, "expit": [1879, 2083], "2222": [1881, 1943], "4444": 1881, "8889": 1881, "4901e": 1882, "4000e": 1882, "0077e": 1882, "4923e": 1882, "waveform": [1883, 1884, 1885], "1564": 1883, "4540": 1883, "8910": 1883, "9877": 1883, "1423": [1883, 1906], "4154": 1883, "8413": [1883, 2083], "0302": 1884, "2231": 1884, "6065": 1884, "5400e": 1884, "3546e": 1884, "4788e": 1884, "8316e": 1884, "02": [1884, 1885, 1890], "3534e": 1884, "0065e": [1885, 1890], "1875e": [1885, 1890], "3937e": [1885, 1890], "2465e": [1885, 1890], "8250e": [1885, 1890], "9858e": [1885, 1890], "1365e": [1885, 1890], "8659e": [1885, 1890], "4658e": [1885, 1890], "3941e": [1885, 1890], "5400": 1886, "3376": 1886, "4200": 1886, "9136": 1886, "wit": [1886, 2014], "0955": [1886, 1887, 1889], "3455": [1886, 1887, 1889], "6545": [1886, 1887, 1889], "9045": [1886, 1887, 1889], "0800": [1887, 1888], "1876": [1887, 1888], "4601": [1887, 1888], "7700": [1887, 1888], "9723": [1887, 1888], "1679": 1888, "3979": 1888, "6821": 1888, "9121": 1888, "1170": 1889, "9698": 1889, "36358": 1891, "z_n": [1891, 2043], "48917": 1891, "2z_n": 1891, "13659": 1891, "3z_n": 1891, "01064": 1891, "4z_n": 1891, "u03c0": 1891, "sidelob": 1891, "transact": 1891, "acoust": 1891, "speech": 1891, "84": 1891, "91": 1891, "feb": 1891, "1981": 1891, "tassp": 1891, "1163506": 1891, "heinzel": 1891, "spectrum": [1891, 2060, 2116], "dft": 1891, "februari": 1891, "holomet": 1891, "fnal": 1891, "gov": 1891, "gh_fft": 1891, "nutal": 1891, "general_ham": 1891, "6280e": 1891, "2698e": 1891, "1052e": 1891, "9826e": 1891, "5461": [1893, 2057], "1347": 1893, "7266": 1893, "2746": 1893, "5194": 1893, "1343": 1893, "4032": 1893, "2711": 1893, "5380": 1895, "8632": 1895, "1265": 1895, "9399": 1895, "5644": 1895, "9744": 1895, "1268": 1895, "2162": 1900, "6719": 1900, "3332": 1900, "5793": [1900, 2057], "0061": 1900, "6058": 1900, "9497": 1900, "5071": 1900, "3343": 1900, "9553": 1900, "0960": 1900, "derivati": [1901, 1905], "to_sparse_coo": 1902, "sparsr": 1903, "run_my_model": 1903, "prev_checks_en": 1903, "check_invari": [1903, 1910, 1911, 1912, 1913, 1914, 1915, 2082], "sparse_csr_tensor": [1903, 2015, 2068, 2082], "z_": [1905, 2043, 2083], "bigoplus_": 1905, "kj": 1905, "bigoplu": 1905, "sparseaddmmbackward0": 1905, "y1": [1905, 2065, 2098], "sparsemmreduceimplbackward0": 1905, "y2": [1905, 2065, 2098], "sparsiti": [1906, 2013, 2024, 2041], "spy": 1906, "2847": 1906, "7805": 1906, "1900": [1906, 2061], "to_dens": [1906, 1908, 2015, 2068, 2082], "3903": 1906, "x_k": 1907, "6438": 1909, "6467": 1909, "3411": 1909, "0918": 1909, "5348": 1909, "0634": 1909, "0494": 1909, "0646": 1909, "1844": 1909, "1276": 1909, "1874": 1909, "6334": 1909, "9682": 1909, "5340": 1909, "7483": 1909, "4512": 1909, "4073": 1909, "8901": 1909, "3183": 1909, "7539": 1909, "6596": 1909, "ncolblock": [1910, 2082], "array_list": [1910, 1911, 1912, 1914, 1915], "nrow": [1910, 1911, 1912, 1914, 1915, 2082], "ncol": [1910, 1911, 1912, 1914, 1915, 2082], "denses": [1910, 1911, 1912, 1914, 1915, 2082], "check_sparse_tensor_invari": [1910, 1911, 1912, 1913, 1914, 1915, 2082], "nrowblock": [1911, 2082], "compressed_indic": [1912, 2015, 2082], "plain_indic": [1912, 2015, 2082], "compressed_dim_s": [1912, 2082], "is_coalesc": [1913, 2015, 2068, 2082], "rdinat": 1913, "prerequisit": [1913, 2048], "coalescion": 1913, "sparsetensor": 1913, "_indic": [1913, 2068, 2082, 2104], "0755": [1917, 1918], "0226": [1917, 1918], "0831": [1917, 1918], "4806": [1917, 1918], "0112": 1917, "2883": 1917, "6933": 1917, "0457": 1918, "0069": 1918, "2310": 1918, "2345": [1921, 1953], "1229": 1921, "1863": 1921, "2959": [1922, 1923, 1972, 1973], "8101": [1922, 1923, 1972, 1973], "5027": [1922, 1923, 1972, 1973], "3270": [1922, 1923, 1972, 1973], "5905": [1922, 1923, 1972, 1973], "6538": [1922, 1923, 1972, 1973, 2057], "3330": [1922, 1923, 1972, 1973], "5596": [1922, 1923, 1972, 1973], "6548": [1922, 1923, 1972, 1973], "1264": [1922, 1923, 1972, 1973], "5080": [1922, 1923, 1928, 1972, 1973, 2057], "6420": [1922, 1923, 1972, 1973], "1992": [1922, 1923, 1972, 1973], "0311": [1922, 2083], "7477": 1922, "2204": 1922, "9087": 1922, "2620": 1923, "0028": [1923, 1949], "0957": 1923, "6038": 1923, "0645": [1923, 1973], "4485": [1923, 1973], "8707": [1923, 1973], "0665": [1923, 1973], "taper": 1924, "librosa": 1924, "omega": 1924, "win": [1924, 2043], "_fft": [1924, 2015], "1133": 1927, "2958": 1927, "5475": 1927, "0569": 1927, "0737": 1927, "3429": 1927, "9138": 1927, "9337": 1927, "6864": [1927, 1953], "1132": 1927, "7892": 1927, "1003": 1927, "5688": 1927, "3637": 1927, "9906": 1927, "5197": 1927, "4598": 1927, "3708": 1927, "6217": 1927, "435": 1927, "1335": 1927, "3135": 1927, "gesdd": 1928, "conquer": 1928, "gesvdjbatch": 1928, "fortran": 1928, "2364": 1928, "7752": 1928, "7201": 1928, "7394": 1928, "0504": 1928, "3371": 1928, "5296": 1928, "3550": 1928, "5569": 1928, "2445": 1928, "1414": 1928, "4027": 1928, "0287": 1928, "5434": 1928, "1946": 1928, "8833": 1928, "3679": 1928, "4296": 1928, "2890": 1928, "6604": 1928, "2717": 1928, "2618": 1928, "4234": 1928, "2481": 1928, "4733": 1928, "3289": [1928, 2062], "0315": 1928, "7806": 1928, "0199": 1928, "8766": 1928, "4809": 1928, "4054": 1928, "7600": 1928, "8611": 1928, "2594": 1928, "4373": 1928, "6531e": 1928, "a_big": 1928, "6503e": 1928, "adequ": 1929, "choosen": 1929, "swapax": [1931, 2015, 2068, 2086], "faithfulli": [1935, 2025, 2100], "1995": 1938, "4608": 1938, "7702": 1938, "4875": 1938, "9158": 1938, "5872": 1938, "6929": 1938, "6932": 1938, "take_along_axi": [1940, 2050], "max_idx": 1940, "sorted_idx": 1940, "2027": 1941, "7687": 1941, "4412": 1941, "3856": 1941, "5930": 1941, "9859": 1941, "4722": 1941, "3366": 1941, "8986": 1942, "7279": 1942, "1745": [1942, 2049], "7156": 1942, "8257": 1942, "2553": 1942, "11111": 1943, "222222": 1943, "3333333": 1943, "1111": 1943, "array_split": 1944, "i_d": 1945, "k_": 1945, "4532": 1945, "4874": 1945, "5018": 1945, "4796": [1945, 2061], "5162": 1945, "5306": 1945, "2922": 1945, "7556": 1945, "2741": 1945, "3161": 1945, "0704": 1945, "0187": 1945, "4079": 1945, "3126": 1945, "8744": 1945, "8223": 1945, "9445": 1945, "4117": 1945, "7780": 1945, "7193": 1945, "4867": 1945, "3204": 1945, "5513": 1945, "4737": [1945, 1976], "2850": 1945, "2573": 1945, "5997": 1945, "sparsebsr": 1949, "sparsecsc": 1949, "sparsebsc": 1949, "9893": 1949, "5809": 1949, "1669": 1949, "7299": 1949, "4942": [1949, 2057], "y_0": 1950, "x_diff": 1950, "y_diff": 1950, "riemann": [1950, 2043, 2054, 2083], "badli": 1952, "cloned_coeffici": 1952, "1527": 1952, "0753": 1952, "7986": 1952, "0210": 1952, "3513": 1952, "5492": 1952, "7403": 1952, "0243": 1952, "7841": 1952, "9046": 1952, "5405": 1952, "9320": 1952, "9270": 1952, "2826": 1952, "lbrace": [1953, 1954, 1955, 1956], "rbrace": [1953, 1954, 1955, 1956], "0813": 1953, "8619": 1953, "7105": 1953, "0935": 1953, "1380": 1953, "3409": [1953, 2061], "2219": 1953, "5653": 1953, "2521": 1953, "2544": 1953, "3461": 1953, "4785": 1953, "4477": 1953, "6049": 1953, "6368": 1953, "8775": 1953, "7145": 1953, "1502": 1953, "2716": 1953, "1243": 1953, "5413": 1953, "3615": 1953, "0614": 1953, "7344": 1953, "3164": 1953, "7648": 1953, "4024": 1953, "0978": 1953, "col": [1954, 1956, 2015], "2309": 1955, "5207": 1955, "0049": 1955, "2072": 1955, "0680": 1955, "6602": 1955, "3480": 1955, "5211": 1955, "4573": 1955, "5876": 1955, "0794": [1955, 2083], "8373": 1955, "6654": 1955, "2604": 1955, "5235": 1955, "2447": 1955, "9556": 1955, "2919": 1955, "1768": 1955, "4333": 1955, "3146": [1955, 2057], "6576": 1955, "0432": 1955, "9348": [1955, 2083], "4410": 1955, "9888": 1955, "3337": 1955, "6556": 1955, "4798": 1955, "5466": 1958, "8008": 1958, "9079": 1958, "unique_consecut": [1961, 2015, 2068], "inverse_indic": [1961, 1962], "a_unique_dim0": 1961, "5678": [1963, 2077], "78": 1963, "alon": [1965, 2016, 2070], "put_": [1965, 2015], "index_add": [1965, 2015, 2068], "index_select": [1965, 2015, 2068, 2082, 2108], "fractionalmaxpool2d": 1965, "fractionalmaxpool3d": 1965, "reflectionpad1d": 1965, "reflectionpad3d": 1965, "replicationpad1d": 1965, "replicationpad3d": 1965, "histc": [1965, 2015, 2068], "bincount": [1965, 2015, 2068], "kthvalu": [1965, 2015, 2034, 2068], "avg_pool3d_backward_cuda": 1965, "for_tensor": 1966, "for_modul": 1966, "for_packed_sequ": 1966, "for_storag": 1966, "unsupported_dtyp": 1966, "privateuse1": [1966, 1968, 2071, 2076], "rename_privateuse1_backend": [1966, 2013], "is_foo": 1966, "frames_to_skip": 1967, "maximum_number_of_fram": 1967, "_register_device_modul": 1968, "backendmodul": 1968, "get_amp_supported_dtyp": 1968, "_is_in_bad_fork": 1968, "bad_fork": 1968, "get_rng_stat": [1968, 2013, 2076], "set_rng_stat": [1968, 2013, 2076], "extend_dispatch": 1968, "pytorch_open_registration_exampl": 1968, "nicer": 1969, "slot": [1970, 2097, 2114], "alexandr": 1971, "theophil": 1971, "0631": 1972, "5590": 1972, "4893": 1972, "8258": 1972, "5926": 1973, "0056": 1973, "3646": 1973, "vecdot": 1974, "mi": [1975, 1976], "6116": 1975, "5772": [1975, 2083], "4606": 1975, "9120": 1975, "0786": 1975, "7497": 1975, "6623": 1975, "5772j": 1975, "9120j": 1975, "7497j": 1975, "6623j": 1975, "3839j": 1976, "2098": 1976, "6699j": 1976, "3470": 1976, "9451j": 1976, "5174": 1976, "3136j": 1976, "6699": 1976, "9451": 1976, "3136": 1976, "vertic": [1978, 1979, 2069, 2087, 2104], "atleast_2d": [1979, 2015, 2068], "3139": 1980, "3898": 1980, "1657": 1980, "0383": 1980, "8785": 1980, "1089": 1980, "_xpudeviceproperti": 1993, "hubconf": 2012, "entrypoint_nam": 2012, "_resnet18": 2012, "smoother": [2012, 2024], "load_state_dict_from_url": [2012, 2028], "2gb": 2012, "relative_path_to_checkpoint": 2012, "pth": [2012, 2028, 2053, 2062], "dirnam": [2012, 2053], "__file__": [2012, 2063, 2070, 2106], "5c106cde": [2012, 2028], "force_reload": 2012, "skip_valid": 2012, "trust_repo": 2012, "repo_own": 2012, "repo_nam": 2012, "ref": [2012, 2082, 2100, 2113], "torchhub": 2012, "github_token": 2012, "mute": 2012, "repo_or_dir": 2012, "resnet50": [2012, 2053, 2087, 2106], "resnet50_weight": [2012, 2053], "imagenet1k_v1": 2012, "download_url_to_fil": 2012, "hash_prefix": 2012, "temporary_fil": 2012, "sha256": [2012, 2028], "s3": [2012, 2028, 2063], "amazonaw": [2012, 2028, 2063], "model_dir": [2012, 2028], "check_hash": [2012, 2028], "hub_dir": [2012, 2028], "get_dir": [2012, 2028], "ext": [2012, 2028], "eight": [2012, 2028], "hash": [2012, 2015, 2017, 2028, 2068], "succinct": 2012, "set_dir": 2012, "path_to_hub_dir": 2012, "torch_hom": 2012, "xdg_cache_hom": [2012, 2046], "reiniti": [2012, 2025], "path_importer_cach": 2012, "subpackag": [2012, 2070], "offend": [2012, 2105], "classifi": [2013, 2017, 2067, 2069, 2072, 2087], "pypi": 2013, "conda": [2013, 2053, 2063], "hip": 2013, "javadoc": 2013, "uninitializedbuff": 2013, "anomali": 2013, "can_device_access_p": 2013, "current_blas_handl": 2013, "device_of": 2013, "get_arch_list": 2013, "get_device_cap": 2013, "get_device_nam": 2013, "get_device_properti": 2013, "get_gencode_flag": 2013, "get_sync_debug_mod": 2013, "ipc_collect": 2013, "memory_usag": 2013, "set_stream": 2013, "set_sync_debug_mod": 2013, "power_draw": 2013, "clock_rat": 2013, "outofmemoryerror": 2013, "jiter": 2013, "tunableop": 2013, "sanit": 2013, "_record_memory_histori": [2013, 2115], "_snapshot": [2013, 2115], "_dump_snapshot": [2013, 2115], "empty_cach": [2013, 2046, 2055, 2115], "set_per_process_memory_fract": 2013, "current_allocated_memori": 2013, "driver_allocated_memori": 2013, "deferredmtiacallerror": 2013, "idiom": [2013, 2063], "nnpack": 2013, "openmp": [2013, 2045, 2063, 2090, 2102, 2104], "xeon": [2013, 2045, 2110], "pipelinestag": 2013, "pipelineschedul": 2013, "face": [2013, 2049, 2070, 2087, 2103], "dive": [2013, 2048, 2054, 2070, 2102, 2107], "pathwis": 2013, "exponentialfamili": 2013, "binomi": [2013, 2015, 2068], "chi2": 2013, "continuousbernoulli": 2013, "dirichlet": 2013, "fishersnedecor": 2013, "halfcauchi": 2013, "halfnorm": 2013, "inversegamma": 2013, "kumaraswami": 2013, "lkjcholeski": 2013, "laplac": 2013, "lognorm": 2013, "lowrankmultivariatenorm": 2013, "mixturesamefamili": 2013, "multinomi": [2013, 2015, 2068], "multivariatenorm": 2013, "negativebinomi": 2013, "onehotcategor": 2013, "relaxedbernoulli": 2013, "logitrelaxedbernoulli": 2013, "relaxedonehotcategor": 2013, "studentt": 2013, "transformeddistribut": 2013, "vonmis": 2013, "weibul": 2013, "wishart": 2013, "hub": [2013, 2028, 2106], "misc": 2013, "normal_": [2013, 2015, 2034, 2041, 2046, 2091], "constant_": [2013, 2041], "ones_": [2013, 2041], "zeros_": [2013, 2041], "eye_": [2013, 2019, 2041], "dirac_": [2013, 2019, 2041], "xavier_uniform_": [2013, 2041], "xavier_normal_": [2013, 2041, 2057], "kaiming_uniform_": [2013, 2041], "trunc_normal_": [2013, 2041], "orthogonal_": [2013, 2019, 2041, 2057], "sparse_": [2013, 2041], "swa": 2013, "ema": 2013, "powersgd": 2013, "remotemodul": 2013, "doublestorag": [2013, 2084], "floatstorag": [2013, 2084], "halfstorag": [2013, 2084], "longstorag": [2013, 2084], "intstorag": [2013, 2084], "shortstorag": [2013, 2084], "charstorag": [2013, 2084], "bytestorag": [2013, 2084], "boolstorag": [2013, 2084], "bfloat16storag": [2013, 2084], "complexdoublestorag": [2013, 2084], "complexfloatstorag": [2013, 2084], "quint8storag": [2013, 2084], "qint8storag": [2013, 2084], "qint32storag": [2013, 2084], "quint4x2storag": [2013, 2084], "quint2x4storag": [2013, 2084], "make_tensor": [2013, 2089], "assert_allclos": [2013, 2089], "generate_methods_for_privateuse1_backend": 2013, "get_cpp_backtrac": 2013, "set_modul": 2013, "sampler": 2013, "mobile_optim": 2013, "optimize_for_mobil": [2013, 2027], "model_zoo": 2013, "load_url": [2013, 2028], "tensorboard": [2013, 2030, 2071], "summarywrit": [2013, 2030, 2087], "module_track": 2013, "moduletrack": [2013, 2029], "iinfo": 2013, "__config__": [2013, 2045], "__future__": 2013, "set_log": [2013, 2023, 2113], "miscellan": [2013, 2116], "torchdata": 2013, "torchserv": 2013, "torchtext": 2013, "xla": [2013, 2021, 2062, 2085, 2100], "disadvantag": 2014, "gentl": 2014, "beam": 2014, "traced_bar": 2014, "myscriptmodul": 2014, "103": [2014, 2016], "939": [2014, 2016], "116": [2014, 2016], "779": [2014, 2016], "123": [2014, 2016], "my_script_modul": [2014, 2016], "ins": 2014, "pytorch_jit": 2014, "traced_fn": 2014, "disable_jit_exampl": 2014, "printer": 2014, "rv": 2014, "rv0": 2014, "rv1": 2014, "ssa": 2014, "block0": 2014, "block1": 2014, "loop_in_traced_fn": 2014, "input_tupl": 2014, "fill_row_zero": 2014, "tracerwarn": 2014, "nr": 2014, "09115803241729736": 2014, "6782537698745728": 2014, "cpu_model": 2014, "gpu_model": 2014, "sample_input_cpu": 2014, "sample_input_gpu": 2014, "traced_cpu": 2014, "traced_gpu": 2014, "use_gpu": 2014, "__constants__": [2014, 2016], "my_module_inst": 2014, "redeclar": 2014, "nn_module_inst": 2014, "my_scripted_model": 2014, "pep": [2014, 2016, 2067, 2102], "526": [2014, 2016, 2067], "script_method": 2014, "implicitly_compiled_method": 2014, "another_forward": 2014, "unused_method": 2014, "some_fn": 2014, "ever": [2014, 2084, 2115], "some_fn2": 2014, "some_fn3": 2014, "some_fn4": 2014, "my_dict": [2014, 2016], "my_int": [2014, 2016], "my_const": 2014, "make_dict": 2014, "nnc": 2014, "nvfuser": 2014, "__and__": 2015, "__iand__": 2015, "__ilshift__": 2015, "__ior__": 2015, "__irshift__": 2015, "__ixor__": 2015, "__lshift__": 2015, "__or__": 2015, "__rshift__": 2015, "__xor__": 2015, "absolute_": 2015, "acos_": [2015, 2034], "addbmm_": 2015, "addcdiv_": 2015, "addcmul_": 2015, "addmv_": [2015, 2034], "addr_": 2015, "align_a": [2015, 2034, 2035, 2068], "align_to": [2015, 2034, 2035, 2068], "ellipsis_idx": 2015, "aminmax": [2015, 2068], "arccos_": 2015, "arccosh_": 2015, "arcsin_": [2015, 2082], "arcsinh_": 2015, "arctan2_": 2015, "arctan_": 2015, "arctanh_": 2015, "argwher": [2015, 2068], "as_strided_": 2015, "as_strided_scatt": [2015, 2068, 2108], "asin_": [2015, 2034, 2082], "asinh_": [2015, 2034], "atan_": [2015, 2034], "atanh_": [2015, 2034], "baddbmm_": 2015, "bernoulli_": [2015, 2034, 2091], "bitwise_and_": 2015, "bitwise_left_shift_": 2015, "bitwise_not_": [2015, 2034], "bitwise_or_": 2015, "bitwise_right_shift_": 2015, "bitwise_xor_": 2015, "broadcast_to": [2015, 2068], "cauchy_": [2015, 2034, 2091], "ceil_": [2015, 2034], "clamp_max": [2015, 2068], "clamp_max_": 2015, "clamp_min": [2015, 2068], "clamp_min_": 2015, "clip_": 2015, "conj_physical_": 2015, "copysign_": 2015, "cos_": [2015, 2034, 2043], "cosh_": [2015, 2034], "count_nonzero": [2015, 2068], "cummax": [2015, 2068], "cummin": [2015, 2068], "cumprod_": 2015, "cumsum_": 2015, "deg2rad": [2015, 2034, 2068, 2082], "deg2rad_": [2015, 2034, 2082], "outdim": 2015, "diagonal_scatt": [2015, 2068], "digamma_": [2015, 2034], "div_": [2015, 2034, 2082], "divide_": 2015, "dsplit": [2015, 2068], "eq_": 2015, "erf_": [2015, 2034], "erfc_": [2015, 2034], "erfinv_": [2015, 2034], "exp2": [2015, 2068, 2083, 2087, 2108], "exp2_": 2015, "exp_": [2015, 2034], "expm1_": [2015, 2034], "exponential_": [2015, 2034, 2091], "fill_diagonal_": 2015, "fix_": 2015, "fliplr": [2015, 2068], "flipud": [2015, 2068], "float_power_": 2015, "floor_": [2015, 2034], "floor_divide_": [2015, 2082], "fmax": [2015, 2068, 2108], "fmin": [2015, 2068, 2108], "fmod_": 2015, "frac_": [2015, 2034], "frexp": [2015, 2068, 2108], "gcd_": 2015, "ge_": 2015, "ger": [2015, 2068], "get_devic": [2015, 2034, 2068, 2082, 2084, 2085], "greater_": 2015, "greater_equal_": 2015, "gt_": 2015, "hardshrink": [2015, 2068], "heavisid": [2015, 2068], "heaviside_": 2015, "hsplit": [2015, 2068, 2086], "hypot_": 2015, "i0_": 2015, "igamma_": 2015, "igammac_": 2015, "index_fil": [2015, 2034, 2068], "index_reduc": [2015, 2068], "is_complex": [2015, 2065, 2068, 2082, 2085], "is_contigu": [2015, 2034, 2068, 2086], "is_floating_point": [2015, 2034, 2068, 2082, 2085], "is_infer": [2015, 2068], "is_same_s": [2015, 2068, 2082], "is_set_to": [2015, 2068], "is_sign": [2015, 2034, 2068, 2082], "isclos": [2015, 2068], "isfinit": [2015, 2060, 2068, 2108], "isinf": [2015, 2068, 2082, 2108], "isneginf": [2015, 2068, 2082], "isposinf": [2015, 2068, 2082], "isreal": [2015, 2068], "istft": [2015, 2068], "kron": [2015, 2068], "lcm_": 2015, "ldexp_": 2015, "le_": 2015, "lerp_": 2015, "less_": 2015, "less_equal_": 2015, "lgamma_": 2015, "log10_": [2015, 2034], "log1p_": [2015, 2034, 2082], "log2_": [2015, 2034], "log_normal_": [2015, 2034, 2091], "logaddexp2": [2015, 2068], "logcumsumexp": [2015, 2068], "logical_and_": 2015, "logical_not_": [2015, 2034], "logical_or_": 2015, "logical_xor_": 2015, "logit_": 2015, "lt_": 2015, "masked_fil": [2015, 2034, 2036, 2068, 2082], "masked_scatt": [2015, 2068], "masked_select": [2015, 2034, 2068], "matrix_pow": [2015, 2068], "moveaxi": [2015, 2068], "msort": [2015, 2068], "multiply_": 2015, "mvlgamma_": 2015, "nan_to_num_": 2015, "nanmedian": [2015, 2034, 2068], "nansum": [2015, 2068], "narrow_copi": [2015, 2068, 2082], "ne_": 2015, "neg_": [2015, 2034, 2082], "negative_": [2015, 2082], "new_empty_strid": [2015, 2068], "new_ful": [2015, 2046, 2068], "new_on": [2015, 2068], "nextafter_": 2015, "nonzero_stat": [2015, 2068], "not_equal_": 2015, "polygamma_": 2015, "pow_": [2015, 2034], "q_per_channel_scal": [2015, 2068], "q_per_channel_zero_point": [2015, 2068], "q_scale": [2015, 2068], "q_zero_point": [2015, 2068], "rad2deg": [2015, 2034, 2068, 2082], "rad2deg_": [2015, 2034, 2082], "reciprocal_": [2015, 2034], "record_stream": [2015, 2046, 2068], "refine_nam": [2015, 2034, 2035, 2068], "relu_": [2015, 2074], "remainder_": 2015, "rename_": [2015, 2034, 2035], "renorm_": 2015, "reshape_a": [2015, 2036, 2068, 2086], "resize_a": [2015, 2068], "the_templ": 2015, "resize_as_": [2015, 2034, 2082], "resize_as_sparse_": 2015, "retains_grad": [2015, 2068], "roll": [2015, 2068], "rot90": [2015, 2068], "round_": [2015, 2034], "rsqrt_": [2015, 2034], "select_scatt": [2015, 2068, 2108], "sgn_": [2015, 2034], "sigmoid_": [2015, 2034, 2074], "sign_": [2015, 2034], "sinc_": 2015, "sinh_": [2015, 2034], "slice_invers": [2015, 2068], "slice_scatt": [2015, 2068, 2108], "smm": [2015, 2068, 2082], "sparse_resize_": 2015, "sparse_resize_and_clear_": 2015, "split_with_s": [2015, 2068, 2086, 2108], "sqrt_": [2015, 2034], "square_": 2015, "squeeze_": [2015, 2074], "sspaddmm": [2015, 2068, 2082], "sub_": [2015, 2034, 2082], "subtract_": 2015, "sum_to_s": [2015, 2068], "swapaxes_": 2015, "swapdim": [2015, 2068, 2086], "swapdims_": 2015, "take_along_dim": [2015, 2068], "tan_": [2015, 2034], "tanh_": [2015, 2034, 2074], "tensor_indices_or_sect": 2015, "to_mkldnn": [2015, 2068], "to_padded_tensor": [2015, 2036, 2068], "to_sparse_bsc": [2015, 2068], "to_sparse_bsr": [2015, 2068, 2082], "to_sparse_csc": [2015, 2068, 2082], "transpose_": [2015, 2082], "true_divide_": 2015, "trunc_": [2015, 2034], "type_a": [2015, 2034, 2068], "out0": [2015, 2062], "unsafe_chunk": [2015, 2068], "unsafe_split": [2015, 2068], "unsafe_split_with_s": [2015, 2068], "unsqueeze_": [2015, 2074], "view_a": [2015, 2068, 2086], "vsplit": [2015, 2068, 2086], "xlogy_": 2015, "adaptive_avg_pool2d": [2015, 2049, 2068, 2074], "adaptive_max_pool1d_with_indic": [2015, 2049], "adaptive_max_pool2d_with_indic": 2015, "adaptive_max_pool3d_with_indic": 2015, "alpha_dropout": [2015, 2068], "assert_int_or_pair": 2015, "arg_nam": 2015, "binary_cross_entropi": [2015, 2068], "binary_cross_entropy_with_logit": [2015, 2068], "celu": [2015, 2068], "dropout2d": 2015, "dropout3d": 2015, "feature_alpha_dropout": [2015, 2068], "fractional_max_pool2d_with_indic": 2015, "fractional_max_pool3d_with_indic": 2015, "gaussian_nll_loss": 2015, "glu": [2015, 2068], "gumbel_softmax": 2015, "hardsigmoid": [2015, 2068, 2074], "hardswish": [2015, 2068, 2074], "huber_loss": [2015, 2068], "instance_norm": [2015, 2068, 2074], "local_response_norm": 2015, "lp_pool1d": 2015, "lp_pool2d": 2015, "lp_pool3d": 2015, "max_pool1d_with_indic": [2015, 2068], "max_pool3d_with_indic": [2015, 2068, 2108], "mish": [2015, 2068], "multi_head_attention_forward": 2015, "embed_dim_to_check": 2015, "in_proj_weight": 2015, "in_proj_bia": 2015, "bias_k": 2015, "bias_v": 2015, "out_proj_weight": 2015, "out_proj_bia": 2015, "use_separate_proj_weight": 2015, "q_proj_weight": 2015, "k_proj_weight": 2015, "v_proj_weight": 2015, "static_k": 2015, "static_v": 2015, "multilabel_soft_margin_loss": 2015, "relu6": [2015, 2068, 2074], "silu": [2015, 2036, 2068], "softsign": 2015, "tanhshrink": 2015, "adaptive_avg_pool1d": [2015, 2049, 2068, 2074, 2108], "adaptive_max_pool1d": [2015, 2049, 2068], "affine_grid_gener": [2015, 2068], "alias_copi": [2015, 2068], "align_tensor": [2015, 2068], "alpha_dropout_": 2015, "as_strided_copi": [2015, 2068], "atleast_1d": [2015, 2068], "avg_pool1d": [2015, 2068, 2074, 2108], "bartlett_window": [2015, 2019, 2068], "cudnn_en": 2015, "batch_norm_backward_elemt": 2015, "invstd": 2015, "sum_di": 2015, "sum_dy_xmu": 2015, "batch_norm_backward_reduc": 2015, "input_g": 2015, "bias_g": 2015, "out3": 2015, "batch_norm_elemt": [2015, 2068], "batch_norm_gather_stat": [2015, 2068], "batch_norm_gather_stats_with_count": [2015, 2068], "batch_norm_stat": [2015, 2068], "batch_norm_update_stat": [2015, 2068], "blackman_window": [2015, 2019, 2068], "block_diag": [2015, 2068], "can_cast": [2015, 2068], "ccol_indices_copi": [2015, 2068], "celu_": 2015, "choose_qparams_optim": [2015, 2068], "n_bin": 2015, "bit_width": 2015, "col_indices_copi": [2015, 2068], "column_stack": [2015, 2068], "constant_pad_nd": [2015, 2068, 2108], "conv_tbc": [2015, 2068], "crow_indices_copi": [2015, 2068], "cudnn_affine_grid_gener": [2015, 2068], "cudnn_batch_norm": [2015, 2068], "exponential_average_factor": 2015, "cudnn_convolut": [2015, 2068], "cudnn_convolution_add_relu": [2015, 2068], "cudnn_convolution_relu": [2015, 2068], "cudnn_convolution_transpos": [2015, 2068], "cudnn_grid_sampl": [2015, 2068], "cudnn_is_accept": [2015, 2068], "cumulative_trapezoid": [2015, 2068], "detach_copi": [2015, 2068, 2112], "diagonal_copi": [2015, 2068], "dropout_": [2015, 2027], "embedding_renorm_": 2015, "physical_layout": [2015, 2108], "empty_quant": [2015, 2068], "anyenumtyp": 2015, "expand_copi": [2015, 2068], "fake_quantize_per_channel_affin": [2015, 2068], "fbgemm_linear_fp16_weight": [2015, 2068], "packed_weight": 2015, "fbgemm_linear_fp16_weight_fp32_activ": [2015, 2068], "fbgemm_linear_int8_weight": [2015, 2068], "col_offset": 2015, "weight_scal": 2015, "weight_zero_point": 2015, "fbgemm_linear_int8_weight_fp32_activ": [2015, 2068], "fbgemm_linear_quantize_weight": [2015, 2068], "fbgemm_pack_gemm_matrix_fp16": [2015, 2068], "fbgemm_pack_quantized_matrix": [2015, 2068], "feature_alpha_dropout_": 2015, "feature_dropout": [2015, 2068], "feature_dropout_": 2015, "frobenius_norm": [2015, 2068], "from_fil": [2015, 2068, 2084], "fused_moving_avg_obs_fake_qu": [2015, 2068], "observer_on": 2015, "fake_quant_on": 2015, "running_min": 2015, "running_max": 2015, "per_row_fake_qu": 2015, "symmetric_qu": 2015, "interpolation_mod": [2015, 2108], "has_bias": 2015, "gru_cel": [2015, 2068], "w_ih": 2015, "w_hh": 2015, "hamming_window": [2015, 2019, 2068], "histogramdd": [2015, 2068], "hspmm": [2015, 2068, 2082], "indices_copi": [2015, 2068], "is_autocast_cpu_en": [2015, 2068], "is_autocast_en": [2015, 2068], "is_grad_en": [2015, 2068], "is_vulkan_avail": [2015, 2068], "isin": [2015, 2068], "kaiser_window": [2015, 2068], "lstm_cell": [2015, 2068], "meshgrid": [2015, 2068], "miopen_batch_norm": [2015, 2068], "miopen_convolut": [2015, 2068], "miopen_convolution_add_relu": [2015, 2068], "miopen_convolution_relu": [2015, 2068], "miopen_convolution_transpos": [2015, 2068], "miopen_depthwise_convolut": [2015, 2068], "miopen_rnn": [2015, 2068], "weight_stride0": 2015, "dropout_st": 2015, "out4": 2015, "mkldnn_adaptive_avg_pool2d": [2015, 2068], "mkldnn_convolut": [2015, 2068], "mkldnn_linear_backward_weight": 2015, "bias_defin": 2015, "mkldnn_max_pool2d": [2015, 2068], "mkldnn_max_pool3d": [2015, 2068], "mkldnn_rnn_layer": [2015, 2068], "weight0": 2015, "weight1": 2015, "weight2": 2015, "weight3": 2015, "hx_": 2015, "cx_": 2015, "native_batch_norm": [2015, 2068], "save_mean": 2015, "save_invstd": 2015, "native_channel_shuffl": [2015, 2068], "native_dropout": [2015, 2068, 2108], "native_group_norm": [2015, 2068, 2108], "hxw": [2015, 2108], "native_layer_norm": [2015, 2068, 2108], "native_norm": [2015, 2068, 2082], "norm_except_dim": [2015, 2068], "nuclear_norm": [2015, 2068], "pairwise_dist": [2015, 2068], "permute_copi": [2015, 2068], "promote_typ": [2015, 2068, 2089], "quantize_per_channel": [2015, 2068, 2072], "quantize_per_tensor_dynam": [2015, 2068, 2072], "quantized_batch_norm": [2015, 2068], "quantized_gru_cel": [2015, 2068], "packed_ih": 2015, "packed_hh": 2015, "col_offsets_ih": 2015, "col_offsets_hh": 2015, "scale_ih": 2015, "scale_hh": 2015, "zero_point_ih": 2015, "zero_point_hh": 2015, "quantized_lstm_cel": [2015, 2068], "quantized_max_pool1d": [2015, 2068], "quantized_max_pool2d": [2015, 2068], "quantized_max_pool3d": [2015, 2068], "quantized_rnn_relu_cel": [2015, 2068], "quantized_rnn_tanh_cel": [2015, 2068], "rand_lik": [2015, 2019, 2046, 2068, 2091, 2098], "randint_lik": [2015, 2019, 2068, 2091], "randn_lik": [2015, 2019, 2036, 2068, 2091], "randperm": [2015, 2019, 2068, 2091, 2108], "result_typ": [2015, 2068], "scalar1": 2015, "scalar2": 2015, "rnn_relu": [2015, 2068], "rnn_relu_cel": [2015, 2068], "rnn_tanh": [2015, 2068], "rnn_tanh_cel": [2015, 2068], "row_indices_copi": [2015, 2068], "row_stack": [2015, 2068], "rrelu_": 2015, "rsub": [2015, 2068], "scalar_tensor": [2015, 2068, 2108], "searchsort": [2015, 2068], "segment_reduc": [2015, 2068], "selu_": 2015, "slice_copi": [2015, 2068], "sparse_bsc_tensor": [2015, 2068, 2082], "sparse_bsr_tensor": [2015, 2068, 2082], "sparse_compressed_tensor": [2015, 2068, 2082], "sparse_csc_tensor": [2015, 2068, 2082], "split_copi": [2015, 2068], "split_with_sizes_copi": [2015, 2068], "squeeze_copi": [2015, 2068], "std_mean": [2015, 2034, 2068], "t_copi": [2015, 2068], "threshold_": 2015, "transpose_copi": [2015, 2068], "trapz": [2015, 2068], "tril_indic": [2015, 2019, 2068], "triu_indic": [2015, 2019, 2068], "unbind_copi": [2015, 2068], "unfold_copi": [2015, 2068], "unsqueeze_copi": [2015, 2068], "values_copi": [2015, 2068], "vander": [2015, 2019, 2068], "var_mean": [2015, 2034, 2068], "view_as_complex_copi": [2015, 2068], "view_as_real_copi": [2015, 2068], "_nn": 2015, "adaptive_max_pool2d": [2015, 2068], "avg_pool2d": [2015, 2068, 2074, 2108], "conv_depthwise3d": [2015, 2068], "cross_entropy_loss": [2015, 2068], "input_scal": 2015, "elu_": 2015, "flatten_dense_tensor": [2015, 2068], "random_sampl": 2015, "gelu_": 2015, "hardsigmoid_": [2015, 2074], "hardswish_": 2015, "hardtanh_": [2015, 2074], "leaky_relu_": 2015, "log_sigmoid": [2015, 2068], "mish_": 2015, "mkldnn_linear": [2015, 2068], "mkldnn_reorder_conv2d_weight": [2015, 2068], "mkldnn_reorder_conv3d_weight": [2015, 2068], "nll_loss2d": [2015, 2068], "nll_loss_nd": [2015, 2068], "reflection_pad3d": [2015, 2068, 2108], "relu6_": 2015, "rrelu_with_nois": [2015, 2068], "rrelu_with_noise_": 2015, "silu_": 2015, "slow_conv3d": [2015, 2068], "slow_conv_dilated2d": [2015, 2068], "slow_conv_dilated3d": [2015, 2068], "slow_conv_transpose2d": [2015, 2068], "slow_conv_transpose3d": [2015, 2068], "softshrink": [2015, 2068], "thnn_conv2d": [2015, 2068], "unflatten_dense_tensor": [2015, 2068], "upsample_bicubic2d": [2015, 2068], "scales_h": 2015, "scales_w": 2015, "upsample_bilinear2d": [2015, 2068, 2108], "upsample_linear1d": [2015, 2068], "upsample_nearest1d": [2015, 2068], "upsample_nearest2d": [2015, 2068, 2108], "upsample_nearest3d": [2015, 2068], "scales_d": 2015, "upsample_trilinear3d": [2015, 2068], "fft_fftfreq": [2015, 2068], "fft_fftshift": [2015, 2068], "fft_hfft2": [2015, 2068], "fft_hfftn": [2015, 2068], "fft_ifftshift": [2015, 2068], "fft_ihfft2": [2015, 2068], "fft_ihfftn": [2015, 2068], "fft_rfftfreq": [2015, 2068], "_linalg": 2015, "linalg_cross": [2015, 2068], "linalg_det": [2015, 2068], "linalg_diagon": [2015, 2068], "eigvec": 2015, "linalg_ldl_factor": [2015, 2068], "linalg_ldl_factor_ex": [2015, 2068], "linalg_ldl_solv": [2015, 2068], "linalg_lu": [2015, 2068], "linalg_lu_factor": [2015, 2068], "linalg_lu_factor_ex": [2015, 2068], "linalg_lu_solv": [2015, 2068], "linalg_matmul": [2015, 2068], "linalg_matrix_exp": [2015, 2068], "linalg_matrix_pow": [2015, 2068], "linalg_multi_dot": [2015, 2068], "linalg_norm": [2015, 2068], "linalg_pinv": [2015, 2068], "linalg_solve_ex": [2015, 2068], "linalg_solve_triangular": [2015, 2068], "linalg_vand": [2015, 2068], "linalg_vecdot": [2015, 2068], "linalg_vector_norm": [2015, 2068], "_nest": 2015, "nested_to_padded_tensor": [2015, 2068], "_spars": 2015, "sparse_sampled_addmm": [2015, 2068], "_special": 2015, "special_airy_ai": [2015, 2068], "special_bessel_j0": [2015, 2068], "special_bessel_j1": [2015, 2068], "special_bessel_y0": [2015, 2068], "special_bessel_y1": [2015, 2068], "special_chebyshev_polynomial_t": [2015, 2068], "special_chebyshev_polynomial_u": [2015, 2068], "special_chebyshev_polynomial_v": [2015, 2068], "special_chebyshev_polynomial_w": [2015, 2068], "special_digamma": [2015, 2068], "special_entr": [2015, 2068], "special_erf": [2015, 2068], "special_erfc": [2015, 2068], "special_erfcx": [2015, 2068], "special_erfinv": [2015, 2068], "special_exp2": [2015, 2068], "special_expit": [2015, 2068], "special_expm1": [2015, 2068], "special_gammainc": [2015, 2068], "special_gammaincc": [2015, 2068], "special_gammaln": [2015, 2068], "special_hermite_polynomial_h": [2015, 2068], "special_i0": [2015, 2068], "special_i1": [2015, 2068], "special_laguerre_polynomial_l": [2015, 2068], "special_legendre_polynomial_p": [2015, 2068], "special_log1p": [2015, 2068], "special_log_ndtr": [2015, 2068], "special_log_softmax": [2015, 2068], "special_logit": [2015, 2068], "special_logsumexp": [2015, 2068], "special_modified_bessel_i0": [2015, 2068], "special_modified_bessel_i1": [2015, 2068], "special_modified_bessel_k0": [2015, 2068], "special_modified_bessel_k1": [2015, 2068], "special_multigammaln": [2015, 2068], "special_ndtr": [2015, 2068], "special_ndtri": [2015, 2068], "special_polygamma": [2015, 2068], "special_psi": [2015, 2068], "special_round": [2015, 2068], "special_scaled_modified_bessel_k0": [2015, 2068], "special_scaled_modified_bessel_k1": [2015, 2068], "special_shifted_chebyshev_polynomial_t": [2015, 2068], "special_shifted_chebyshev_polynomial_u": [2015, 2068], "special_shifted_chebyshev_polynomial_v": [2015, 2068], "special_shifted_chebyshev_polynomial_w": [2015, 2068], "special_sinc": [2015, 2068], "special_softmax": [2015, 2068], "special_spherical_bessel_j0": [2015, 2068], "special_xlog1pi": [2015, 2068], "special_xlogi": [2015, 2068], "special_zeta": [2015, 2068], "tval": 2015, "is_accept": 2015, "rect": 2015, "magic": [2015, 2018, 2091, 2103], "__complex__": 2015, "__float__": 2015, "__int__": 2015, "hex": [2015, 2017, 2068], "__hex__": 2015, "oct": [2015, 2068], "__oct__": 2015, "divmod": [2015, 2017, 2068], "chr": [2015, 2017, 2068], "int_float": 2015, "float_int": 2015, "fab": [2015, 2068], "int_int": 2015, "float_float": 2015, "complex_complex": 2015, "int_complex": 2015, "complex_int": 2015, "float_complex": 2015, "complex_float": [2015, 2084], "scalar_scalar": 2015, "int_to_int": 2015, "modf": [2015, 2068], "mathremaind": [2015, 2068], "programm": [2016, 2017, 2072], "tn": 2016, "subtyp": 2016, "an_error": 2016, "noreturn": [2016, 2017], "classvar": [2016, 2017], "anystr": [2016, 2017], "nomin": 2016, "newtyp": [2016, 2017], "tup": [2016, 2017], "emptydatastructur": 2016, "my_list": 2016, "aug_add_x": 2016, "inc": [2016, 2017], "assign_x": [2016, 2017], "polymorph": 2016, "sum_pair": 2016, "red": [2016, 2017], "green": [2016, 2017, 2105], "enum_fn": [2016, 2017], "my_variable_nam": 2016, "top_level_method": 2016, "other_help": 2016, "ten": [2016, 2087], "my_submodul": 2016, "tuple_or_list": 2016, "a_tupl": 2016, "is_script": [2016, 2017, 2068], "unsupported_linear_op": 2016, "is_trac": [2016, 2017], "univers": 2016, "typing_extens": 2016, "a_dict": 2016, "some_dict": 2016, "delimit": [2017, 2018], "tstype": 2017, "tsmoduletyp": 2017, "tsalltyp": 2017, "tsmetatyp": 2017, "tsprimitivetyp": 2017, "tsstructuraltyp": 2017, "tsnominaltyp": 2017, "myclass": [2017, 2070], "printabl": [2017, 2070], "sortabl": 2017, "nevertheless": [2017, 2079], "inc_first_el": 2017, "cpufloattyp": 2017, "tstupl": 2017, "tsnamedtupl": 2017, "tslist": 2017, "tsdict": 2017, "tsoption": 2017, "tsunion": 2017, "tsfutur": 2017, "tsrref": 2017, "tsawait": 2017, "await": [2017, 2018, 2077], "keytyp": 2017, "tensortyp": [2017, 2112], "_await": 2017, "mytupl": 2017, "scripted_inc": 2017, "_annotatednamedtupl": 2017, "_namedtupleannot": 2017, "_unannotatednamedtupl": 2017, "mistak": [2017, 2051, 2103], "nameerror": 2017, "remedi": 2017, "tsbuiltinclass": 2017, "tscustomclass": 2017, "tsenum": 2017, "tstensor": 2017, "subtensor": [2017, 2049, 2114], "subwithtorchfunct": 2017, "script_g": 2017, "tsclassdef": 2017, "methoddefinit": 2017, "__torch__": [2017, 2067], "class2": 2017, "tsenumdef": 2017, "tsenumtyp": 2017, "memberidentifi": 2017, "intenum": 2017, "intflag": 2017, "basecolor": 2017, "compli": [2017, 2070], "classbodydefinit": 2017, "moduleobj": 2017, "testmodul": 2017, "dosometh": 2017, "strateg": 2017, "congruent": 2017, "python3": 2017, "unannot": 2017, "python3annot": 2017, "paramannot": 2017, "returnannot": 2017, "funcormethodbodi": 2017, "mypyannot": 2017, "localvarannot": 2017, "setval": 2017, "moduletyp": [2017, 2070], "classidentifi": 2017, "instanceattridentifi": 2017, "offset_": 2017, "tsstructualtyp": 2017, "grammar": 2017, "chapter": [2017, 2054], "floattyp": 2017, "inttyp": 2017, "stringtyp": 2017, "devicetyp": 2017, "tupletyp": 2017, "listtyp": 2017, "enclosur": 2017, "parenth_form": 2017, "list_displai": 2017, "dict_displai": 2017, "legal": 2017, "stringliter": 2017, "floatnumb": 2017, "expression_list": 2017, "list_comprehens": 2017, "comp_for": 2017, "target_list": 2017, "or_expr": 2017, "key_datum_list": 2017, "dict_comprehens": 2017, "key_datum": 2017, "ongo": [2017, 2067, 2075, 2077], "enclos": 2017, "datum": [2017, 2093], "attributeref": 2017, "slice_list": 2017, "slice_item": 2017, "proper_slic": 2017, "argument_list": 2017, "desugar": [2017, 2103], "u_expr": 2017, "tightli": [2017, 2057], "m_expr": 2017, "a_expr": 2017, "shift_expr": 2017, "and_expr": 2017, "xor_expr": 2017, "comp_oper": 2017, "__contains__": 2017, "or_test": 2017, "and_test": 2017, "not_test": 2017, "conditional_express": 2017, "starred_item": 2017, "expression_stmt": 2017, "starred_express": 2017, "assignment_express": 2017, "assignment_stmt": 2017, "augmented_assignment_stmt": 2017, "augtarget": 2017, "augop": 2017, "annotated_assignment_stmt": 2017, "raise_stmt": 2017, "assert_stmt": 2017, "return_stmt": 2017, "del_stmt": 2017, "pass_stmt": 2017, "print_stmt": 2017, "break_stmt": 2017, "continue_stmt": 2017, "if_stmt": 2017, "while_stmt": 2017, "for_stmt": 2017, "with_stmt": 2017, "with_item": 2017, "tuple_stmt": 2017, "getattr_stmt": 2017, "hasattr_stmt": 2017, "zip_stmt": 2017, "iterable1": 2017, "iterable2": 2017, "enumerate_stmt": 2017, "five": [2017, 2062], "add_stat_valu": 2017, "sugaredvalu": 2017, "__abs__": 2017, "bytearrai": 2017, "delattr": 2017, "exec": 2017, "__index__": 2017, "isint": 2017, "issubclass": [2017, 2049], "ndigit": 2017, "__import__": [2017, 2070], "notimpl": [2017, 2018, 2049, 2103], "rpc_sync": [2017, 2068, 2077, 2078, 2079], "synonym": 2017, "_fork": [2017, 2045], "_wait": [2017, 2045], "lexic": 2018, "indent": 2018, "coroutin": [2018, 2101], "__del__": [2018, 2043], "__bytes__": 2018, "__slots__": 2018, "metaclass": 2018, "mro": 2018, "__r": 2018, "__": [2018, 2112], "bytesliter": 2018, "imagnumb": 2018, "parenthes": 2018, "ifs": 2018, "customiz": [2018, 2092, 2102, 2116], "compound": 2018, "adaptivelogsoftmaxwithloss": 2019, "opcheck": 2021, "torch_librari": [2021, 2049, 2095], "test_util": 2021, "test_schema": 2021, "test_autograd_registr": 2021, "test_faketensor": 2021, "test_aot_dispatch_dynam": 2021, "opoverloadpacket": 2021, "customopdef": 2021, "custom_op": [2021, 2067, 2103], "mylib": 2021, "numpy_mul": 2021, "mutates_arg": 2021, "numpy_add": 2021, "x_np": 2021, "z_np": 2021, "numpy_sin": 2021, "register_autograd": 2021, "sample_input": [2021, 2057], "718": 2021, "my_linear": [2021, 2029], "collis": 2021, "prone": [2021, 2033, 2059, 2103], "y_np": 2021, "numpy_sin_cpu": 2021, "numpy_sin_inplac": 2021, "register_kernel": 2021, "x_cpu": [2021, 2046], "x_cuda": 2021, "backward_fn": 2021, "functionctx": 2021, "keyword_only_input": 2021, "setup_context_fn": 2021, "custom_ops_landing_pag": 2021, "custom_linear": 2021, "_subclass": [2021, 2025, 2065, 2100, 2103], "fake_tensor": [2021, 2065, 2100, 2103], "faketensormod": [2021, 2065, 2100, 2103], "custom_nonzero": 2021, "get_ctx": 2021, "new_dynamic_s": 2021, "tracing_mod": 2021, "impl_abstract": [2021, 2103], "qualnam": 2021, "abstractimplctx": 2021, "2020": 2021, "googl": 2021, "colab": [2021, 2070, 2103], "dispatch_kei": [2021, 2102], "keynam": 2021, "alias_analysi": 2021, "conserv": [2021, 2024, 2046, 2103], "with_keyset": 2021, "fallthrough_kernel": 2021, "fallthrough": 2021, "div_cpu": 2021, "impl_": 2021, "operator_nam": 2021, "mysin": 2021, "off_by_default": 2023, "_registr": 2023, "spammi": [2023, 2102, 2113], "compiled_autograd": 2023, "born": 2024, "citizen": 2024, "afterthought": 2024, "intuit": [2024, 2106], "alik": 2024, "grai": 2024, "systemat": 2024, "onboard": 2024, "maskedarrai": 2024, "masked_tensor": 2024, "principl": [2025, 2046, 2104], "meta_util": 2025, "undocu": 2025, "fidel": 2025, "torch_force_weights_only_load": 2026, "torch_autograd_shutdown_wait_limit": 2026, "mobil": [2027, 2072, 2104], "blocklist": [2027, 2070], "mobileoptimizertyp": 2027, "conv_bn_fus": 2027, "correspondingli": 2027, "prepack": [2027, 2068], "insert_fold_prepack_op": 2027, "arm": [2027, 2072], "remove_dropout": 2027, "hoist": 2027, "hoist_conv_packed_param": 2027, "fuse_add_relu": 2027, "vulkan": 2027, "vulkan_automatic_gpu_transf": 2027, "freeze_modul": 2027, "script_modul": 2027, "optimization_blocklist": 2027, "preserved_method": 2027, "_mobileoptimizertyp": 2027, "recursivescriptmodul": [2027, 2062], "download": [2028, 2053, 2063, 2087, 2106], "friendli": 2029, "flopcountermod": 2029, "is_bw": 2029, "infrequ": 2030, "window_s": 2030, "max_sampl": 2030, "cap": 2030, "hasn": [2030, 2070], "_monitor": 2030, "data_value_t": 2030, "eventhandlerhandl": 2030, "register_event_handl": 2030, "unregister_event_handl": 2030, "tensorboardeventhandl": 2030, "shader": [2031, 2058], "processor": [2031, 2104], "metalperformanceshad": 2031, "shared_memori": 2033, "abruptli": 2033, "get_all_sharing_strategi": 2033, "get_sharing_strategi": 2033, "set_sharing_strategi": 2033, "new_strategi": 2033, "di": [2033, 2098, 2101, 2102], "abnorm": [2033, 2059], "forev": [2033, 2048], "asap": 2033, "queue_2": 2033, "x_clone": 2033, "segfault": [2033, 2060, 2103], "shm_open": [2033, 2084], "seriou": [2033, 2046, 2103], "torch_shm_manag": 2033, "unnot": 2033, "spawncontext": 2033, "has_nam": 2034, "is_shar": [2034, 2084], "is_sparse_csr": [2034, 2068, 2084], "is_tensor": [2034, 2082], "items": [2034, 2068], "unifies_names_from_input_tensor": 2034, "nbyte": [2034, 2068, 2084], "ndimens": 2034, "register_post_accumulate_grad_hook": [2034, 2043], "position": [2034, 2035], "unnam": [2034, 2035], "misalign": 2034, "inher": 2034, "collaps": [2034, 2068, 2108], "disappear": 2034, "img": [2035, 2087], "renamed_img": 2035, "coexist": 2035, "wildcard": [2035, 2070, 2112], "somewher": [2035, 2056], "scale_channel": 2035, "more_img": 2035, "named_tensor": 2035, "named_img": 2035, "flat_img": 2035, "named_flat_img": 2035, "unflattened_named_img": 2035, "grad_loss": 2035, "8107": 2035, "6357": 2035, "0783": 2035, "rename_map": 2035, "greedili": 2035, "unment": 2035, "49152": 2035, "datastructur": 2036, "seamless": 2036, "nested_tensor": 2036, "nt": 2036, "vein": 2036, "as_nested_tensor": 2036, "irregular": 2036, "indistinguish": 2036, "2286": 2036, "4842": 2036, "7827": 2036, "6745": [2036, 2083], "0658": 2036, "1247": 2036, "4078": 2036, "8083": 2036, "2871": 2036, "5559": 2036, "9885": 2036, "4074": 2036, "4855": 2036, "0733": 2036, "8285": 2036, "6858": 2036, "7030": 2036, "3481": 2036, "0236": 2036, "jag": [2036, 2100], "fake_grad": 2036, "nt2": 2036, "6862": 2036, "1282": 2036, "1031": 2036, "0464": 2036, "3276": 2036, "9967": 2036, "0054": 2036, "8972": 2036, "9174": 2036, "4995": 2036, "8546": 2036, "7194": 2036, "2918": 2036, "1846": 2036, "8793": 2036, "5183": 2036, "6447": 2036, "8009": 2036, "8468": 2036, "9832": 2036, "5272": 2036, "pt_infer": 2036, "pt_larg": 2036, "pt_small": 2036, "bitwidth": [2037, 2057, 2072, 2082], "asymmetr": [2037, 2072, 2075], "alter": [2038, 2042, 2049, 2109], "attention_bias": 2040, "sacrific": [2041, 2046], "dirac": 2041, "xavier": 2041, "glorot": 2041, "bengio": 2041, "fan": 2041, "_in": 2041, "_out": [2041, 2054, 2108], "fan_in": 2041, "kaim": 2041, "delv": 2041, "surpass": 2041, "he": 2041, "_mode": 2041, "fan_out": 2041, "redrawn": 2041, "sax": 2041, "2013": 2041, "marten": 2041, "scaler": [2042, 2046, 2053], "clip_grad_value_": 2042, "unscale_": 2042, "optimizer2": 2042, "batch_per_it": 2042, "iters_to_accumul": 2042, "num_proc": 2042, "grad_param": 2042, "grad_norm": 2042, "scaled_grad_param": 2042, "inv_scal": 2042, "get_scal": 2042, "proce": [2042, 2046, 2077, 2113], "optimizer0": 2042, "output0": 2042, "model0": 2042, "model1": 2042, "loss0": 2042, "loss1": 2042, "hundr": [2042, 2056, 2113], "imped": 2042, "poor": [2042, 2043], "dp_model": 2042, "imported_funct": 2042, "mymm": 2042, "myfloat32func": 2042, "fwd_output": 2042, "cleaner": 2043, "mapsto": 2043, "educ": 2043, "_save": 2043, "_saved_self": 2043, "convex": 2043, "concav": 2043, "togglabl": 2043, "drawback": 2043, "0011": 2043, "creator": [2043, 2077, 2079], "hogwild": 2043, "train_fn": 2043, "graphtask": 2043, "copyslic": 2043, "mutex": 2043, "gotten": 2043, "curiou": 2043, "\u2102": 2043, "yj": 2043, "holomorph": 2043, "fulfil": [2043, 2050, 2070, 2100], "mathematician": 2043, "studi": [2043, 2071], "beauti": 2043, "somewhat": [2043, 2046, 2082], "0906": 2043, "4835": 2043, "audio": [2043, 2053, 2087, 2101], "\u211d": 2043, "_output": 2043, "vj": 2043, "handi": [2043, 2046], "selfdeletingtempfil": 2043, "tmp_dir": 2043, "uuid": 2043, "uuid4": 2043, "temp_fil": 2043, "forbidden": 2043, "savedtensor": 2043, "_raw_saved_": 2043, "_raw_saved_self": 2043, "save_on_disk_threshold": 2043, "tensor_or_sctf": 2043, "_saved_oth": 2043, "4th": 2044, "backcompat": 2044, "broadcast_warn": 2044, "userwarn": 2044, "compute_z": 2045, "w_z": 2045, "w_y": 2045, "tbb": 2045, "aten_thread": 2045, "omp": 2045, "mkl_thread": 2045, "mkldnn_cpu_runtim": 2045, "use_mkldnn": 2045, "use_tbb": 2045, "use_openmp": 2045, "ON": [2045, 2055, 2056], "set_num_interop_thread": 2045, "get_num_interop_thread": 2045, "set_num_thread": [2045, 2059], "get_num_thread": 2045, "omp_num_thread": [2045, 2090], "mkl_num_thread": [2045, 2090], "e5": 2045, "oversubscript": 2045, "memory manag": 2046, "optimize pytorch": 2046, "irrespect": 2046, "spread": 2046, "cuda2": [2046, 2055], "broadli": [2046, 2072], "set_float_32_matmul_precis": 2046, "a100": [2046, 2110, 2113], "a_ful": 2046, "10240": 2046, "b_full": 2046, "ab_ful": 2046, "7277": 2046, "ab_tf32": 2046, "016": 2046, "ga100": 2046, "1747": 2046, "relative_error": 2046, "0022": 2046, "ab_fp32": 2046, "0031": 2046, "000039": 2046, "7x": 2046, "globalcontext": 2046, "setallowtf32cubla": 2046, "setallowtf32cudnn": 2046, "bench_gemm_transform": 2046, "allow_fp16_reduc": 2046, "4048": 2046, "1634": 2046, "1639": 2046, "4056": 2046, "1670": 2046, "1661": 2046, "4080": 2046, "1664": 2046, "1658": 2046, "1651": 2046, "4104": 2046, "1677": 2046, "1674": 2046, "4128": 2046, "1796": [2046, 2057], "2519": 2046, "5096": 2046, "2144": 2046, "2149": 2046, "2766": 2046, "5120": 2046, "2142": 2046, "9728": 2046, "3875": 2046, "5779": 2046, "16384": [2046, 2106], "6182": 2046, "9656": 2046, "setallowfp16reductioncubla": 2046, "instabl": 2046, "setallowbf16reductioncubla": 2046, "invis": [2046, 2101, 2104, 2113], "start_ev": 2046, "elapsed_time_m": 2046, "pointless": 2046, "exploit": 2046, "paragraph": [2046, 2054], "initial_grad": 2046, "memory_alloc": [2046, 2055], "memory_snapshot": [2046, 2055], "memcheck": 2046, "option2": 2046, "value2": 2046, "max_split_size_mb": 2046, "mb": 2046, "borderlin": 2046, "memory_summari": 2046, "roundup_power2_divis": 2046, "1280": 2046, "1536": 2046, "1792": 2046, "256mb": 2046, "512mb": 2046, "1gb": [2046, 2111], "knob": [2046, 2100], "garbage_collection_threshold": 2046, "reclaim": [2046, 2098], "release_cached_block": 2046, "unfavor": 2046, "expandable_seg": 2046, "2mb": 2046, "sliver": 2046, "pinned_use_cuda_host_regist": 2046, "cudahostregist": 2046, "cudahostalloc": 2046, "malloc": [2046, 2115], "pinned_num_register_thread": 2046, "cuda_runtime_api": 2046, "iostream": [2046, 2095], "fpic": 2046, "my_malloc": 2046, "ssize_t": 2046, "ptr": 2046, "cout": [2046, 2095], "endl": [2046, 2056, 2095], "my_fre": 2046, "cudapluggablealloc": 2046, "new_alloc": 2046, "change_current_alloc": 2046, "_cuda_clearcublasworkspac": 2046, "lru": 2046, "geometri": 2046, "1023": 2046, "zeta": [2046, 2068, 2083, 2108], "use_pytorch_kernel_cach": 2046, "pytorch_kernel_cache_path": 2046, "store_tru": 2046, "disable_cuda": 2046, "assess": 2046, "cudagetdevicecount": 2046, "cuinit": 2046, "nvmldevicegetcount_v2": 2046, "poison": 2046, "aforement": [2046, 2059], "train_load": [2046, 2053, 2059], "x_gpu": 2046, "x_cpu_long": 2046, "y_cpu": 2046, "y_gpu": 2046, "y_cpu_long": 2046, "new_tensor": 2046, "overus": 2046, "cudagraphlaunch": 2046, "elid": 2046, "versatil": 2046, "static_input": 2046, "static_output": 2046, "realist": 2046, "sophist": [2046, 2069], "register_generator_st": 2046, "virtual": [2046, 2084], "d_in": 2046, "d_out": 2046, "640": 2046, "static_target": 2046, "static_y_pr": 2046, "static_loss": 2046, "real_input": [2046, 2103], "real_target": 2046, "refil": 2046, "rejoin": 2046, "cuda_work": 2046, "nsight": 2046, "reorgan": 2046, "graphabl": 2046, "illeg": [2046, 2100], "needlessli": [2046, 2100], "econom": 2046, "static_out_1": 2046, "g1_workload": 2046, "static_in_1": 2046, "static_out_2": 2046, "g2_workload": 2046, "static_in_2": 2046, "real_data_1": 2046, "real_data_2": 2046, "occasion": [2046, 2082, 2103], "click": [2047, 2091, 2110, 2111], "29500": [2048, 2077, 2078], "grad0": 2048, "grad1": 2048, "bucket1": 2048, "bucket0": 2048, "hurt": 2048, "kick": [2048, 2077, 2078, 2110], "earliest": 2048, "unreadi": 2048, "perspect": [2048, 2052, 2060, 2078], "hpp": 2048, "processgroupgloo": 2048, "processgroupmpi": 2048, "_sync_param": 2048, "autograd_hook": 2048, "prepare_for_backward": 2048, "optimize_ddp": 2048, "linearfunct": 2049, "grad_bia": 2049, "mulconst": 2049, "mycub": [2049, 2050], "grad_dx": [2049, 2050], "my_cub": [2049, 2050], "input_featur": 2049, "output_featur": 2049, "__array_function__": [2049, 2104], "nep": [2049, 2104], "0018": 2049, "scalartensor": 2049, "handled_funct": 2049, "mandat": 2049, "update_wrapp": 2049, "ensure_tensor": 2049, "metadatatensor": 2049, "__add__": 2049, "subtensor2": 2049, "othersubtensor": 2049, "loggingtensor": 2049, "permiss": 2049, "_metadata": 2049, "ndata": 2049, "ministri": 2049, "silli": 2049, "superclass": 2049, "troublesom": 2049, "_get_overridable_funct": 2049, "get_overridable_funct": [2049, 2114], "func_dict": 2049, "nn_func": 2049, "labori": 2049, "_get_testing_overrid": 2049, "get_testing_overrid": [2049, 2114], "override_dict": 2049, "dummy_add": 2049, "get_ignored_funct": [2049, 2114], "__torch_dispatch__": [2049, 2082, 2103], "outdat": 2049, "redispatch": [2049, 2103], "dispatchkei": 2049, "zerotensor": 2049, "compositeimplicitautograd": 2049, "func_nam": [2049, 2077], "overload_nam": 2049, "exot": 2049, "zoo": 2049, "torchfunctionmod": 2049, "_python_dispatch": 2049, "torchdispatchmod": 2049, "resolve_nam": [2049, 2114], "functionlog": 2049, "dispatchlog": 2049, "7164": 2049, "9336": 2049, "4287": 2049, "7989": 2049, "2169": 2049, "7474": 2049, "5624": 2049, "5970": 2049, "4328": 2049, "9794": 2049, "3490": 2049, "8671": 2049, "8573": 2049, "4338": 2049, "4948": 2049, "1249": 2049, "3307": 2049, "2151": 2049, "6018": 2049, "9060": 2049, "2974": 2049, "7708": 2049, "6668": 2049, "0352": 2049, "7948": 2049, "6023": 2049, "4303": 2049, "2036": 2049, "6831": 2049, "8120": 2049, "5949": 2049, "5416": 2049, "3335": 2049, "5897": 2049, "custom_vjp": 2050, "custom_jvp": 2050, "to_numpi": 2050, "numpysort": 2050, "ind_inv": 2050, "_1": [2050, 2070], "numpytak": 2050, "numpy_sort": 2050, "ggx": 2050, "vmappabl": 2050, "x_bdim": 2050, "ind_bdim": 2050, "ind_inv_bdim": 2050, "expanded_x": 2050, "expanded_ind": 2050, "expanded_ind_inv": 2050, "new_dim": 2050, "logical_dim": 2050, "maybe_expand_bdim_at_front": 2050, "pseudocod": 2050, "rapidli": [2051, 2059, 2065, 2066], "fortun": [2051, 2113], "abridg": 2051, "total_loss": 2051, "extrud": 2051, "phenomenon": 2051, "plenti": [2051, 2101], "bptt": 2051, "repackag": 2051, "nm": 2051, "blow": 2051, "rememb": [2051, 2059, 2069], "elf": 2051, "grep": [2051, 2097, 2101], "run_model": 2051, "recoveri": 2051, "data_parallel": 2051, "pad_packed_sequ": 2051, "padded_input": 2051, "packed_input": 2051, "packed_output": 2051, "my_lstm": 2051, "dp_m": 2051, "padding_input": 2051, "flava": 2052, "sooner": 2052, "llm": 2052, "6b": 2052, "2b": 2052, "8gb": 2052, "1x": 2052, "24gb": 2052, "total_transformer_block_params_in_b": 2052, "dtype_byt": 2052, "num_gpu": 2052, "gb": 2052, "2x": [2052, 2087, 2107], "record_funct": [2052, 2111], "recordstream": 2052, "flat_param": 2052, "splitwithsizesbackward": 2052, "4gb": 2052, "6gb": 2052, "_another_": 2052, "_could_": 2052, "setvar": 2053, "pti": 2053, "oneapi_root": 2053, "oneapi": 2053, "acitv": 2053, "cd": [2053, 2063, 2070, 2095, 2113], "checkout": [2053, 2067], "cmake": [2053, 2055, 2063, 2095], "pip": [2053, 2063, 2065, 2066, 2067, 2087, 2102], "use_xpu": 2053, "cmake_prefix_path": [2053, 2095], "conda_prefix": 2053, "torchvison": 2053, "cifar10": 2053, "totensor": [2053, 2087], "train_dataset": [2053, 2061], "batch_idx": 2053, "optimizer_state_dict": 2053, "use_amp": 2053, "ur": 2054, "ui": [2054, 2087, 2107, 2110], "j_f": 2054, "calculu": 2054, "cw": 2054, "bigger": 2054, "articl": 2054, "58eb23378f2a376565a66ac32c93a316c45b6131": 2054, "l99": 2054, "l105": 2054, "ds_dx": 2054, "compute_gradi": 2054, "ds_dy": 2054, "conj_w_d": 2054, "w_d": 2054, "d_idx": 2054, "albeit": 2054, "wonder": 2054, "amd": [2055, 2094], "dialect": 2055, "portabl": 2055, "rocmdoc": 2055, "programming_guid": 2055, "hip_api_guid": 2055, "cuda_vers": 2055, "cudaruntimegetvers": 2055, "cudadrivergetvers": 2055, "hip_vers": 2055, "hipruntimegetvers": 2055, "hipdrivergetvers": 2055, "11000": 2055, "use_rocm": 2055, "40300": 2055, "drocm_force_enable_gpu_assert": 2055, "addglobalcallback": 2056, "recordfunct": 2056, "ivalu": 2056, "threadlocaldebuginfo": 2056, "debuginfoguard": 2056, "recordfunctioncallback": 2056, "onfunctionent": 2056, "onfunctionexit": 2056, "needsinput": 2056, "samplingprob": 2056, "enablerecordfunct": 2056, "cerr": 2056, "broader": [2056, 2091], "inject": 2056, "setapiusagehandl": 2056, "setapiusagelogg": 2056, "event_nam": 2056, "c10_log_api_usage_onc": 2056, "my_api": 2056, "_log_api_usage_onc": 2056, "archiv": [2056, 2062], "akin": [2056, 2095], "jpeg": 2056, "camera": [2056, 2087], "setexportmoduleextrafileshook": 2056, "extrafilesmap": 2056, "producer_info": 2056, "getenv": 2056, "getsourc": 2056, "precompil": 2056, "pyc": 2056, "loos": 2056, "elabor": 2057, "tpu": 2057, "mylinear": 2057, "0413": 2057, "2057": 2057, "0597": 2057, "8247": 2057, "1045": 2057, "4299": 2057, "5457": 2057, "4793": 2057, "3634": 2057, "8525": 2057, "6749": 2057, "l0": [2057, 2062, 2101], "deeper": [2057, 2070], "bignet": 2057, "big_net": 2057, "dynamicnet": 2057, "dynamic_net": 2057, "2051": 2057, "7601": 2057, "1963": 2057, "4354": 2057, "6598": 2057, "4446": 2057, "4628": 2057, "8774": 2057, "6848": 2057, "5458": 2057, "4647": 2057, "5310": 2057, "0609": 2057, "0940": 2057, "1266": 2057, "0623": 2057, "3508": 2057, "0550": 2057, "5317": 2057, "5562": 2057, "4028": 2057, "6942": 2057, "0140": 2057, "0329": 2057, "1160": 2057, "0434": 2057, "3889": 2057, "1613": 2057, "6340": 2057, "3887": 2057, "9979": 2057, "0767": 2057, "3526": 2057, "8756": 2057, "5847": 2057, "6016": 2057, "1608": 2057, "0829": 2057, "6338": 2057, "9239": 2057, "6943": 2057, "5034": 2057, "0268": 2057, "4489": 2057, "9403": 2057, "1571": [2057, 2062], "2509": 2057, "5052": 2057, "3088": 2057, "4951": 2057, "3381": 2057, "5166": 2057, "beginn": 2057, "examples_nn": 2057, "polynomial_modul": 2057, "teach": 2057, "0013": [2057, 2083], "0030": 2057, "0008": 2057, "modalmodul": 2057, "6614": 2057, "2669": 2057, "0617": 2057, "4519": 2057, "two_layer_net_optim": 2057, "blitz": 2057, "neural_networks_tutori": 2057, "autograd_tutori": 2057, "new_net": 2057, "runningmean": 2057, "1041": 2057, "0647": 2057, "1515": 2057, "m_load": 2057, "unserialized_th": 2057, "statefulmodul": 2057, "param3": 2057, "param_list": 2057, "parameterlist": 2057, "param_dict": 2057, "parameterdict": 2057, "buffer1": 2057, "buffer2": 2057, "buffer3": 2057, "0322": 2057, "9066": 2057, "1409": 2057, "4852": 2057, "6949": 2057, "2911": 2057, "1044": 2057, "4202": 2057, "1953": 2057, "5299": 2057, "8747": 2057, "6289": 2057, "4898": 2057, "6434": 2057, "5187": 2057, "0346": 2057, "4077": 2057, "4324": 2057, "7022": 2057, "3915": 2057, "6176": 2057, "6062": 2057, "5992": 2057, "4452": 2057, "2843": 2057, "3710": 2057, "3947": 2057, "saving_loading_model": 2057, "what_is_state_dict": 2057, "skip_init": 2057, "skip_param_init": 2057, "forward_hook": [2057, 2109], "backward_hook": [2057, 2109], "new_grad_input": 2057, "5059": 2057, "8158": 2057, "2390": 2057, "0043": 2057, "addmmbackward": 2057, "forward_pre_hook_handl": 2057, "5752": 2057, "7421": 2057, "forward_hook_handl": 2057, "0980": 2057, "4666": 2057, "0256": 2057, "4497": 2057, "5046": 2057, "combat": 2057, "mps_devic": 2058, "yourfavoritenet": 2058, "a3c": 2059, "set_start_method": 2059, "simplequeu": 2059, "cope": 2059, "eleg": 2059, "num_process": 2059, "inappropri": 2059, "vcpu": 2059, "htop": 2059, "exceed": 2059, "competit": 2059, "oversubscrib": 2059, "mnist_hogwild": 2059, "dataloader_kwarg": 2059, "train_epoch": 2059, "30x": 2059, "boost": [2059, 2097], "754": 2060, "1e20": 2060, "4142e": 2060, "struggl": 2060, "benign": 2060, "v_dot2": 2060, "mfma": 2060, "fp64": 2060, "miopen": 2060, "rocblas_internal_fp16_alt_impl": 2060, "miopen_debug_convolution_attrib_fp16_alt_impl": 2060, "_convbackend": 2060, "slownd": 2060, "slownd_transpos": 2060, "slownd_dil": 2060, "slownd_dilated_transpos": 2060, "convbackend": 2060, "miopendepthwis": 2060, "miopentranspos": 2060, "svd_lowrank": [2061, 2082], "22modul": 2061, "20determin": 2061, "index_add_cuda_": 2061, "1509": 2061, "8027": 2061, "0333": 2061, "1444": 2061, "rese": 2061, "seed_work": 2061, "worker_se": 2061, "tensor_dict": 2062, "loaded_numb": 2062, "loaded_even": 2062, "loaded_smal": 2062, "num_batches_track": 2062, "bn_state_dict": 2062, "new_bn": 2062, "out0_relu": 2062, "1400": 2062, "4563": 2062, "0271": 2062, "4406": 2062, "2827": 2062, "4588": 2062, "2031": 2062, "1316": 2062, "6533": 2062, "3413": 2062, "1112": 2062, "m_state_dict": 2062, "new_m": 2062, "zip64": 2062, "pkl": [2062, 2070], "byteord": 2062, "original_nam": 2062, "controlflowmodul": 2062, "controlflowmodule_trac": 2062, "3793": 2062, "controlflowmodule_script": 2062, "tagger": 2062, "ipu_tag": 2062, "ipu_deseri": 2062, "startswith": [2062, 2068], "get_default_load_endian": 2062, "loadendian": 2062, "default_load_endian": 2062, "set_default_load_endian": 2062, "endian": 2062, "get_default_mmap_opt": 2062, "default_mmap_opt": 2062, "set_default_mmap_opt": 2062, "safe_glob": 2062, "mytensor": 2062, "namedtemporaryfil": 2062, "5024": 2062, "8152": 2062, "5455": 2062, "8234": 2062, "clear_safe_glob": 2062, "get_safe_glob": 2062, "rem": 2063, "7z": 2063, "curl": 2063, "ossci": 2063, "mkl_2020": 2063, "aoa": 2063, "omkl": 2063, "cuda_prefix": 2063, "cuda102": 2063, "magma_2": 2063, "4_": 2063, "omagma": 2063, "cmake_include_path": 2063, "magma_hom": 2063, "studio": [2063, 2065], "cmake_gener": 2063, "ffi": 2063, "create_extens": 2063, "_ext": 2063, "define_macro": 2063, "relative_to": 2063, "c99": 2063, "x86_x64": 2063, "packagesnotfounderror": 2063, "anaconda": 2063, "noarch": 2063, "continuum": 2063, "pkg": 2063, "pro": [2063, 2087], "msys2": 2063, "importerror": [2063, 2070], "dll": 2063, "vc2017": 2063, "vc": 2063, "vs2017_runtim": 2063, "mkl_fft": 2063, "intel_openmp": 2063, "vs2017": 2063, "openbla": 2063, "forg": 2063, "emerg": [2063, 2100], "forgotten": 2063, "freeze_support": 2063, "forkingpickl": 2063, "brokenpipeerror": 2063, "errno": 2063, "couldn": [2063, 2067], "torch_14808_1591070686": 2063, "thalloc": 2063, "tdr": 2063, "thcudacheck": 2063, "csrc": [2063, 2095, 2100, 2101], "storageshar": 2063, "microsoft": [2064, 2072], "flavor": 2064, "polish": [2064, 2065], "orchestr": [2065, 2109], "exportopt": 2065, "onnxregistri": 2065, "extractor": 2065, "fxgraphextractor": 2065, "onnxfakecontext": 2065, "onnxprogram": [2065, 2067], "onnxprogramseri": 2065, "upgrad": [2065, 2069, 2089], "perceptron": 2065, "mlpmodel": 2065, "fc0": 2065, "fc3": 2065, "97": [2065, 2083], "onnx_program": 2065, "dynamo_export": [2065, 2067], "model_proto": 2065, "modelproto": 2065, "complianc": 2065, "protobuf": [2065, 2067], "netron": 2065, "icon": 2065, "viewer": [2065, 2115], "parseabl": 2065, "fxe0007": 2065, "fxe0011": 2065, "fxe0012": 2065, "fxe0013": 2065, "fxe0014": 2065, "fxe0015": 2065, "fxe0016": 2065, "model_kwarg": 2065, "my_simple_model": 2065, "my_dynamic_model": 2065, "tensor_typ": 2065, "elem_typ": 2065, "dim_param": [2065, 2067], "arg0_dim_0": 2065, "arg0_dim_1": 2065, "arg0_dim_2": 2065, "fake_context": 2065, "onnx_registri": 2065, "diagnostic_opt": 2065, "enable_fake_mod": 2065, "xdoctest": [2065, 2066], "torch_doctest_onnx": [2065, 2066], "my_nn_modul": 2065, "my_model_without_initi": 2065, "WITH": [2065, 2067], "my_model_with_initi": 2065, "model_st": 2065, "input_adapt": 2065, "output_adapt": 2065, "diagnostic_context": 2065, "export_except": 2065, "model_signatur": 2065, "model_torch": 2065, "io_adapt": 2065, "inputadapt": 2065, "outputadapt": 2065, "diagnosticcontext": 2065, "adapt_torch_inputs_to_onnx": 2065, "model_with_state_dict": 2065, "func_nested_input": 2065, "x_dict": 2065, "y_tupl": 2065, "y3": 2065, "adapt_torch_outputs_to_onnx": 2065, "model_output": 2065, "func_returning_tupl": 2065, "pt_output": 2065, "inputadaptstep": 2065, "outputadaptstep": 2065, "pprint": 2065, "9216": [2065, 2067], "p_conv1_weight": 2065, "p_conv2_weight": 2065, "p_fc1_weight": 2065, "p_fc2_weight": 2065, "b_my_buffer2": 2065, "b_my_buffer1": 2065, "_log_softmax": [2065, 2068, 2108], "include_initi": 2065, "bufferediobas": 2065, "safetensor": 2065, "safe_open": 2065, "save_diagnost": 2065, "protobufonnxprogramseri": 2065, "serializetostr": 2065, "exported_model": 2065, "onnxruntimeopt": 2065, "session_opt": 2065, "execution_provid": 2065, "execution_provider_opt": 2065, "onnxruntim": [2065, 2066, 2067, 2072], "sessionopt": 2065, "invalidexportoptionserror": 2065, "get_op_funct": 2065, "is_registered_op": 2065, "register_op": 2065, "tracedonnxfunct": 2065, "sctip": 2065, "warnings_as_error": 2065, "is_onnxrt_backend_support": 2066, "onnxrt": [2066, 2094], "dummy_input": 2067, "input_nam": 2067, "actual_input_1": 2067, "learned_": 2067, "output_nam": 2067, "learned_0": 2067, "learned_1": 2067, "learned_2": 2067, "learned_3": 2067, "learned_14": 2067, "learned_15": 2067, "kernel_shap": 2067, "check_model": 2067, "printable_graph": 2067, "ort": 2067, "ort_sess": 2067, "inferencesess": 2067, "astyp": 2067, "seq_length": 2067, "real_seq_length": 2067, "experienc": 2067, "new_data": 2067, "hope": [2067, 2101, 2107], "symbolic_opset": 2067, "symbolic_opset9": 2067, "_variablefunct": 2067, "pyi": 2067, "___torch_mangle_0": 2067, "alpha_f": 2067, "myrelu": 2067, "value_t": 2067, "pythonop": [2067, 2068], "mylogexp": 2067, "operator_export_typ": 2067, "onnx_fallthrough": 2067, "onnx_aten_fallback": 2067, "onnx_opset": 2067, "opset15": 2067, "custom_opset": 2067, "67326": 2067, "alphax": 2067, "castlik": 2067, "gammax": 2067, "settyp": 2067, "custom_selu": 2067, "jit_util": 2067, "graphcontext": 2067, "onnxscript_op": 2067, "register_custom_op_symbol": 2067, "symbolic_nam": 2067, "symbolic_fn": 2067, "Be": [2067, 2110], "symbolic_help": 2067, "symbolic_foo_forward": 2067, "custom_domain": 2067, "attr1_f": 2067, "attr2_i": 2067, "foo_forward": 2067, "foomodel": 2067, "example_input1": 2067, "caffe2": [2067, 2087], "torch_script_graph": 2067, "unconvertible_op": 2067, "dynamic_ax": 2067, "export_param": 2067, "trainingmod": 2067, "operatorexporttyp": 2067, "do_constant_fold": 2067, "keep_initializers_as_input": 2067, "export_modules_as_funct": 2067, "autograd_inlin": 2067, "OF": 2067, "input_i": 2067, "input_z": 2067, "fileno": 2067, "untrain": 2067, "doc_str": 2067, "onnx_aten": 2067, "summodul": 2067, "dim_valu": 2067, "my_custom_axis_nam": 2067, "sum_dynamic_axes_1": 2067, "deduplicate_initi": 2067, "74765": 2067, "checkererror": 2067, "unsupportedoperatorerror": 2067, "export_to_pretty_str": 2067, "export_typ": 2067, "google_print": 2067, "add_node_nam": 2067, "nodeproto": 2067, "debugstr": 2067, "contrib": 2067, "test_aten_embedding_2": 2067, "test_oper": 2067, "unregister_custom_op_symbol": 2067, "select_model_mode_for_export": 2067, "is_in_onnx_export": 2067, "middl": [2067, 2101], "enable_log": 2067, "disable_log": 2067, "graphinfo": 2067, "incorrect_relu_symbolic_funct": 2067, "2328854203224182": 2067, "699536174352349": 2067, "u2713": 2067, "constantchunk": 2068, "__and_": 2068, "__contains_": 2068, "__derive_index": 2068, "__getitem_": 2068, "__interpol": 2068, "__is_": 2068, "__isnot_": 2068, "__lshift_": 2068, "__not_": 2068, "__or_": 2068, "__range_length": 2068, "__rshift_": 2068, "__xor_": 2068, "_cast_byt": 2068, "_cast_char": 2068, "_cast_doubl": 2068, "_cast_float": 2068, "_cast_half": 2068, "_cast_int": 2068, "_cast_long": 2068, "_cast_short": 2068, "_conj": 2068, "_convolution_mod": 2068, "_dim_arang": 2068, "_pack_padded_sequ": 2068, "_pad_packed_sequ": 2068, "_reshape_from_tensor": 2068, "_sample_dirichlet": 2068, "_set_item": 2068, "_shape_as_tensor": 2068, "_standard_gamma": 2068, "_uniqu": 2068, "_unique2": 2068, "_weight_norm": 2068, "conv1d_relu": 2068, "conv2d_relu": 2068, "conv3d_relu": 2068, "embedding_renorm": 2068, "floordiv": [2068, 2074], "linear_relu": [2068, 2074], "nonzero_numpi": 2068, "numpy_t": 2068, "unchecked_cast": 2068, "unique_dim": 2068, "_c10d_function": 2068, "all_gather_into_tensor_coalesc": 2068, "all_gather_into_tensor_out": 2068, "all_reduce_coalesc": 2068, "reduce_scatter_tensor_coalesc": 2068, "wait_tensor": 2068, "_c10d_functional_autograd": 2068, "_dtensor": 2068, "shard_dim_alltoal": 2068, "_quantiz": 2068, "conv2d_prepack": 2068, "conv3d_prepack": 2068, "conv_transpose1d_prepack": 2068, "conv_transpose2d_prepack": 2068, "conv_transpose3d_prepack": 2068, "linear_dynam": 2068, "linear_prepack": 2068, "linear_prepack_fp16": 2068, "linear_prepack_fp16_legaci": 2068, "linear_prepack_legaci": 2068, "wrapped_fbgemm_linear_fp16_weight": 2068, "wrapped_fbgemm_pack_gemm_matrix_fp16": 2068, "_test": 2068, "get_first": 2068, "compleximplicit": 2068, "floatimplicit": 2068, "intimplicit": 2068, "__iand_": 2068, "__ilshift_": 2068, "__ior_": 2068, "__irshift_": 2068, "__ixor_": 2068, "__round_to_zero_floordiv": 2068, "__upsampl": 2068, "__upsample_bilinear": 2068, "__upsample_nearest": 2068, "_adaptive_avg_pool2d": [2068, 2108], "_adaptive_avg_pool3d": [2068, 2108], "_add_batch_dim": 2068, "_add_relu": 2068, "_addmm_activ": 2068, "_aminmax": 2068, "_amp_foreach_non_finite_check_and_unscal": 2068, "_amp_update_scal": 2068, "_assert_async": 2068, "_assert_tensor_metadata": 2068, "_autocast_to_full_precis": 2068, "_autocast_to_reduced_precis": 2068, "_batch_norm_impl_index": 2068, "_batch_norm_no_upd": 2068, "_batch_norm_with_upd": 2068, "_batch_norm_with_update_funct": 2068, "_cdist_forward": [2068, 2108], "_cholesky_solve_help": 2068, "_choose_qparams_per_tensor": 2068, "_chunk_cat": 2068, "_coalesc": 2068, "_compute_linear_combin": 2068, "_conj_copi": 2068, "_conj_phys": 2068, "_conv_depthwise2d": 2068, "_convert_indices_from_coo_to_csr": 2068, "_convert_indices_from_csr_to_coo": 2068, "_convert_weight_to_int4pack": 2068, "_copy_from": 2068, "_copy_from_and_res": 2068, "_cslt_compress": 2068, "_cslt_sparse_mm": 2068, "_cslt_sparse_mm_search": 2068, "_ctc_loss": 2068, "_cudnn_ctc_loss": 2068, "_cudnn_init_dropout_st": 2068, "_cudnn_rnn": 2068, "_cudnn_rnn_flatten_weight": 2068, "_cufft_clear_plan_cach": 2068, "_cufft_get_plan_cache_max_s": 2068, "_cufft_get_plan_cache_s": 2068, "_cufft_set_plan_cache_max_s": 2068, "_cummax_help": 2068, "_cummin_help": 2068, "_debug_has_internal_overlap": 2068, "_dimi": 2068, "_dimv": 2068, "_dirichlet_grad": 2068, "_efficient_attention_forward": 2068, "_efficientzerotensor": 2068, "_embedding_bag": [2068, 2108], "_embedding_bag_forward_onli": 2068, "_empty_affine_quant": 2068, "_empty_per_channel_affine_quant": 2068, "_euclidean_dist": 2068, "_fake_quantize_learnable_per_channel_affin": 2068, "_fake_quantize_learnable_per_tensor_affin": 2068, "_fake_quantize_per_tensor_affine_cachemask_tensor_qparam": 2068, "_fft_c2c": 2068, "_fft_c2r": 2068, "_fft_r2c": 2068, "_fill_mem_eff_dropout_mask": 2068, "_flash_attention_forward": 2068, "_foobar": [2068, 2113], "_foreach_ab": 2068, "_foreach_aco": 2068, "_foreach_add": 2068, "_foreach_addcdiv": 2068, "_foreach_addcmul": 2068, "_foreach_asin": 2068, "_foreach_atan": 2068, "_foreach_ceil": 2068, "_foreach_clamp_max": 2068, "_foreach_clamp_min": 2068, "_foreach_copi": 2068, "_foreach_co": 2068, "_foreach_cosh": 2068, "_foreach_div": 2068, "_foreach_erf": 2068, "_foreach_erfc": 2068, "_foreach_exp": 2068, "_foreach_expm1": 2068, "_foreach_floor": 2068, "_foreach_frac": 2068, "_foreach_lerp": 2068, "_foreach_lgamma": 2068, "_foreach_log": 2068, "_foreach_log10": 2068, "_foreach_log1p": 2068, "_foreach_log2": 2068, "_foreach_max": 2068, "_foreach_maximum": 2068, "_foreach_minimum": 2068, "_foreach_mul": 2068, "_foreach_neg": 2068, "_foreach_norm": 2068, "_foreach_pow": 2068, "_foreach_reciproc": 2068, "_foreach_round": 2068, "_foreach_sigmoid": 2068, "_foreach_sign": 2068, "_foreach_sin": 2068, "_foreach_sinh": 2068, "_foreach_sqrt": 2068, "_foreach_sub": 2068, "_foreach_tan": 2068, "_foreach_tanh": 2068, "_foreach_trunc": 2068, "_foreach_zero": 2068, "_functional_assert_async": 2068, "_functional_assert_scalar": 2068, "_functional_sym_constrain_rang": 2068, "_functional_sym_constrain_range_for_s": 2068, "_fused_adagrad": 2068, "_fused_adam": 2068, "_fused_adamw": 2068, "_fused_dropout": 2068, "_fused_moving_avg_obs_fq_help": 2068, "_fused_moving_avg_obs_fq_helper_funct": 2068, "_fused_sdp_choic": 2068, "_fused_sgd": 2068, "_fw_primal": 2068, "_fw_primal_copi": 2068, "_get_cpu_cap": 2068, "_get_tracing_st": 2068, "_grad_sum_to_s": 2068, "_has_compatible_shallow_copy_typ": 2068, "_has_same_storage_numel": 2068, "_histogramdd_bin_edg": 2068, "_histogramdd_from_bin_ct": 2068, "_histogramdd_from_bin_tensor": 2068, "_index_put_impl": 2068, "_indices_copi": 2068, "_infer_s": 2068, "_int_mm": 2068, "_is_all_tru": 2068, "_is_any_tru": 2068, "_is_zerotensor": 2068, "_jagged_to_padded_dense_forward": 2068, "_lazy_clon": 2068, "_linalg_check_error": 2068, "_linalg_det": 2068, "_linalg_eigh": 2068, "_linalg_eigv": 2068, "_linalg_slogdet": 2068, "_linalg_solve_ex": 2068, "_linalg_svd": 2068, "_list_to_tensor": 2068, "_local_scalar_dens": [2068, 2108], "_logcumsumexp": 2068, "_lstm_mp": 2068, "_make_dep_token": 2068, "_make_du": 2068, "_make_dual_copi": 2068, "_make_per_channel_quantized_tensor": 2068, "_make_per_tensor_quantized_tensor": 2068, "_masked_scal": 2068, "_masked_softmax": 2068, "_mixed_dtypes_linear": 2068, "_mkldnn_reshap": 2068, "_mkldnn_transpos": 2068, "_mps_convolut": 2068, "_mps_convolution_transpos": 2068, "_native_batch_norm_legit": [2068, 2108], "_native_batch_norm_legit_funct": 2068, "_native_batch_norm_legit_no_train": [2068, 2108], "_native_multi_head_attent": 2068, "_ncf_unsqueez": 2068, "_ncf_view": 2068, "_neg_view": 2068, "_neg_view_copi": 2068, "_nested_compute_contiguous_strides_offset": 2068, "_nested_from_pad": 2068, "_nested_from_padded_and_nested_exampl": 2068, "_nested_get_jagged_dummi": 2068, "_nested_get_length": 2068, "_nested_get_offset": 2068, "_nested_get_ragged_idx": 2068, "_nested_get_valu": 2068, "_nested_get_values_copi": 2068, "_nested_tensor_from_mask": 2068, "_nested_tensor_from_mask_left_align": 2068, "_nested_tensor_from_tensor_list": 2068, "_nested_tensor_s": 2068, "_nested_tensor_softmax_with_shap": 2068, "_nested_tensor_storage_offset": 2068, "_nested_tensor_strid": 2068, "_nested_view_from_buff": 2068, "_nested_view_from_buffer_copi": 2068, "_nested_view_from_jag": 2068, "_nested_view_from_jagged_copi": 2068, "_new_zeros_with_same_feature_meta": 2068, "_nnpack_avail": 2068, "_nnpack_spatial_convolut": 2068, "_no_grad_embedding_renorm": 2068, "_no_grad_fil": 2068, "_no_grad_norm": 2068, "_no_grad_uniform": 2068, "_no_grad_zero": 2068, "_pack_sequ": 2068, "_pad_circular": 2068, "_pad_enum": 2068, "_padded_dense_to_jagged_forward": 2068, "_pdist_forward": [2068, 2108], "_pin_memori": 2068, "_prelu_kernel": 2068, "_print": 2068, "_propagate_xla_data": 2068, "_remove_batch_dim": 2068, "_reshape_alia": 2068, "_reshape_alias_copi": 2068, "_reshape_copi": 2068, "_resize_output": 2068, "_rowwise_prun": 2068, "_saturate_weight_to_fp16": 2068, "_scaled_dot_product_attention_math": 2068, "_scaled_dot_product_cudnn_attent": 2068, "_scaled_dot_product_efficient_attent": 2068, "_scaled_dot_product_flash_attent": 2068, "_scaled_dot_product_flash_attention_for_cpu": 2068, "_scaled_mm": 2068, "_size_if_not_equ": 2068, "_slow_conv2d_forward": 2068, "_sobol_engine_draw": 2068, "_sobol_engine_ff": 2068, "_sobol_engine_initialize_st": 2068, "_sobol_engine_scrambl": 2068, "_softmax": [2068, 2083, 2108], "_sparse_addmm": 2068, "_sparse_broadcast_to": 2068, "_sparse_broadcast_to_copi": 2068, "_sparse_bsc_tensor_unsaf": 2068, "_sparse_bsr_tensor_unsaf": 2068, "_sparse_compressed_tensor_unsaf": 2068, "_sparse_compressed_tensor_with_dim": 2068, "_sparse_coo_tensor_unsaf": 2068, "_sparse_coo_tensor_with_dim": 2068, "_sparse_coo_tensor_with_dims_and_tensor": 2068, "_sparse_csc_tensor_unsaf": 2068, "_sparse_csr_prod": 2068, "_sparse_csr_sum": 2068, "_sparse_csr_tensor_unsaf": 2068, "_sparse_log_softmax": 2068, "_sparse_mask_project": 2068, "_sparse_mm": 2068, "_sparse_mm_reduce_impl": 2068, "_sparse_semi_structured_addmm": 2068, "_sparse_semi_structured_appli": 2068, "_sparse_semi_structured_apply_dens": 2068, "_sparse_semi_structured_linear": 2068, "_sparse_semi_structured_mm": 2068, "_sparse_semi_structured_til": 2068, "_sparse_softmax": 2068, "_sparse_sparse_matmul": 2068, "_sparse_sum": 2068, "_spdiag": 2068, "_standard_gamma_grad": 2068, "_tensor_to_list": 2068, "_test_ambiguous_default": 2068, "_test_autograd_multiple_dispatch": 2068, "_test_autograd_multiple_dispatch_view": 2068, "_test_autograd_multiple_dispatch_view_copi": 2068, "_test_check_tensor": 2068, "_test_functorch_fallback": 2068, "_test_optional_filled_intlist": 2068, "_test_optional_floatlist": 2068, "_test_optional_intlist": 2068, "_test_parallel_materi": 2068, "_test_serialization_subcmul": 2068, "_test_string_default": 2068, "_test_warn_in_autograd": 2068, "_thnn_fused_gru_cel": 2068, "_thnn_fused_lstm_cel": 2068, "_to_copi": [2068, 2108], "_to_cpu": 2068, "_to_dens": 2068, "_to_spars": 2068, "_to_sparse_bsc": 2068, "_to_sparse_bsr": 2068, "_to_sparse_csc": 2068, "_to_sparse_csr": 2068, "_to_sparse_semi_structur": 2068, "_transform_bias_rescale_qkv": 2068, "_transformer_encoder_layer_fwd": 2068, "_trilinear": 2068, "_triton_multi_head_attent": 2068, "_triton_scaled_dot_attent": 2068, "_unpack_du": 2068, "_unsafe_index": 2068, "_unsafe_index_put": 2068, "_unsafe_view": 2068, "_unwrap_opt": 2068, "_upsample_bicubic2d_aa": 2068, "_upsample_bilinear2d_aa": 2068, "_upsample_nearest_exact1d": 2068, "_upsample_nearest_exact2d": 2068, "_upsample_nearest_exact3d": 2068, "_use_cudnn_ctc_loss": 2068, "_use_cudnn_rnn_flatten_weight": 2068, "_validate_compressed_sparse_indic": 2068, "_validate_sparse_bsc_tensor_arg": 2068, "_validate_sparse_bsr_tensor_arg": 2068, "_validate_sparse_compressed_tensor_arg": 2068, "_validate_sparse_coo_tensor_arg": 2068, "_validate_sparse_csc_tensor_arg": 2068, "_validate_sparse_csr_tensor_arg": 2068, "_values_copi": 2068, "_weight_int4pack_mm": 2068, "_weight_int8pack_mm": 2068, "_weight_norm_interfac": 2068, "capit": 2068, "confirmed_by_own": [2068, 2077], "convolution_overrid": 2068, "copy_sparse_to_spars": 2068, "endswith": 2068, "expandtab": 2068, "fake_quantize_per_channel_affine_cachemask": 2068, "fake_quantize_per_tensor_affine_cachemask": 2068, "fill_diagon": 2068, "glu_jvp": 2068, "has_torch_funct": [2068, 2114], "is_non_overlapping_and_dens": 2068, "is_own": [2068, 2077], "is_strides_like_format": 2068, "isalnum": 2068, "isalpha": 2068, "isdecim": 2068, "isdigit": 2068, "isidentifi": 2068, "islow": 2068, "isnumer": 2068, "isprint": 2068, "isspac": 2068, "istitl": 2068, "isupp": 2068, "lift_fresh": 2068, "ljust": 2068, "local_valu": [2068, 2077], "log_sigmoid_forward": 2068, "lstrip": 2068, "matrix_h": 2068, "nll_loss2d_forward": 2068, "nll_loss_forward": 2068, "normal_funct": 2068, "owner_nam": [2068, 2077], "percentformat": 2068, "quantized_gru": 2068, "quantized_lstm": 2068, "resize_as_spars": 2068, "rfind": 2068, "rindex": 2068, "rjust": 2068, "rpartit": 2068, "rsplit": 2068, "rstrip": 2068, "set_data": 2068, "slow_conv3d_forward": 2068, "sparse_res": 2068, "sparse_resize_and_clear": 2068, "splitlin": 2068, "swapcas": 2068, "sym_numel": [2068, 2108], "sym_storage_offset": [2068, 2108], "sym_strid": [2068, 2108], "unique_dim_consecut": 2068, "zfill": 2068, "_allgather_bas": 2068, "_reduce_scatter_bas": 2068, "allgath": 2068, "allgather_coalesc": 2068, "allgather_into_tensor_coalesc": 2068, "allreduce_coalesc": 2068, "alltoal": 2068, "alltoall_bas": 2068, "recv_any_sourc": 2068, "debugprim": 2068, "load_tensor": 2068, "_alloc_from_pool": 2068, "_mm_plus_mm": 2068, "_reinterpret_tensor": 2068, "accumulate_grad": 2068, "resize_storage_byt": 2068, "_mkl_linear": 2068, "_mkl_reorder_linear_weight": 2068, "_convolution_pointwis": 2068, "_convolution_transpose_pointwis": 2068, "_get_mkldnn_serialized_md": 2068, "_is_mkldnn_acl_support": 2068, "_is_mkldnn_bf16_support": 2068, "_is_mkldnn_fp16_support": 2068, "_linear_pointwis": 2068, "_nbyte": 2068, "_reorder_convolution_transpose_weight": 2068, "_reorder_convolution_weight": 2068, "_reorder_linear_weight": 2068, "_reorder_mkldnn_rnn_layer_weight": 2068, "mkldnn_prepack": 2068, "conv2d_run": 2068, "qconv1d_pointwis": 2068, "qconv2d_pointwis": 2068, "qconv3d_pointwis": 2068, "qconv_prepack": 2068, "qlinear_pointwis": 2068, "qlinear_prepack": 2068, "conv2d_clamp_prepack": 2068, "conv2d_clamp_run": 2068, "conv2d_transpose_clamp_prepack": 2068, "conv2d_transpose_clamp_run": 2068, "linear_clamp_prepack": 2068, "linear_clamp_run": 2068, "unpack_prepacked_sizes_conv2d": 2068, "unpack_prepacked_sizes_linear": 2068, "addstatvalu": 2068, "autogradadd": 2068, "autogradallnonzero": 2068, "autogradallzero": 2068, "autogradanynonzero": 2068, "autogradzero": 2068, "bailout": [2068, 2100], "bailouttempl": 2068, "broadcastmkldnntensor": 2068, "broadcasts": 2068, "chunksiz": 2068, "constantmkldnntensor": 2068, "differentiablegraph": 2068, "enumnam": 2068, "enumvalu": 2068, "fallbackgraph": 2068, "fusedconcat": 2068, "fusiongroup": 2068, "ifthenels": 2068, "ignoredpythonop": 2068, "mkldnnclamp": 2068, "mkldnnhardsigmoid": 2068, "mkldnnhardswish": 2068, "mkldnnhardtanh": 2068, "mkldnnlayernorm": 2068, "mkldnnscalarmul": 2068, "mmbatchsid": 2068, "mmtreereduc": 2068, "modulecontainerindex": 2068, "numtotensor": 2068, "raiseexcept": 2068, "reductions": 2068, "requiresgradcheck": 2068, "staticruntimecopyout": 2068, "staticsubgraph": 2068, "stringindex": 2068, "tensorexprdynamicgroup": 2068, "tensorexprdynamicguard": 2068, "tensorexprgroup": 2068, "timepoint": 2068, "tupleindex": 2068, "tupleunpack": 2068, "varconcat": 2068, "varstack": 2068, "awaitable_nowait": 2068, "awaitable_wait": 2068, "is_cpu": 2068, "is_ipu": 2068, "is_maia": 2068, "is_mkldnn": 2068, "is_mp": 2068, "is_mtia": 2068, "is_nest": 2068, "is_quant": 2068, "is_vulkan": 2068, "is_xla": 2068, "is_xpu": 2068, "onednnfusiongroup": 2068, "onednnfusionguard": 2068, "profile_ivalu": 2068, "rangelist": 2068, "rpc_remot": 2068, "unchecked_unwrap_opt": 2068, "_make_token": [2068, 2108], "_sink_token": [2068, 2108], "bessel_i0": [2068, 2108], "bessel_i1": [2068, 2108], "bessel_j0": [2068, 2083, 2108], "bessel_j1": [2068, 2083, 2108], "broadcast_in_dim": [2068, 2108], "cbrt": [2068, 2108], "collapse_view": [2068, 2108], "convert_element_typ": [2068, 2108], "copy_strid": [2068, 2108], "copy_to": [2068, 2108], "device_put": [2068, 2108], "erf_inv": [2068, 2108], "erfcx": [2068, 2083, 2108], "fft_c2c": [2068, 2108], "fft_c2r": [2068, 2108], "fft_r2c": [2068, 2108], "iota": [2068, 2108], "maximum_valu": [2068, 2108], "minimum_valu": [2068, 2108], "ndtri": [2068, 2083, 2108], "rev": [2068, 2108], "shift_left": [2068, 2108], "shift_right_arithmet": [2068, 2108], "slice_in_dim": [2068, 2108], "spherical_bessel_j0": [2068, 2083, 2108], "view_of": [2068, 2108], "view_of_dtyp": [2068, 2108], "xor_sum": [2068, 2108], "_call_end_callbacks_on_jit_fut": 2068, "_record_function_ent": 2068, "_record_function_enter_new": 2068, "_record_function_exit": 2068, "_bfloat16quantizedtofloat": 2068, "_floattobfloat16quant": 2068, "add_out": 2068, "add_relu_out": 2068, "add_scalar_out": 2068, "add_scalar_relu": 2068, "add_scalar_relu_out": 2068, "batch_norm1d": 2068, "batch_norm1d_relu": 2068, "batch_norm2d": 2068, "batch_norm2d_relu": 2068, "batch_norm3d": 2068, "batch_norm3d_relu": 2068, "batch_norm_relu": 2068, "cat_out": 2068, "cat_relu": 2068, "cat_relu_out": 2068, "conv1d_dynam": 2068, "conv1d_prepack": 2068, "conv1d_unpack": 2068, "conv2d_add": 2068, "conv2d_add_relu": 2068, "conv2d_dil": 2068, "conv2d_dynam": 2068, "conv2d_group": 2068, "conv2d_output_pad": 2068, "conv2d_pad": 2068, "conv2d_strid": 2068, "conv2d_transpos": 2068, "conv2d_unpack": 2068, "conv2d_unpack_s": 2068, "conv3d_dil": 2068, "conv3d_dynam": 2068, "conv3d_group": 2068, "conv3d_output_pad": 2068, "conv3d_pad": 2068, "conv3d_strid": 2068, "conv3d_transpos": 2068, "conv3d_unpack": 2068, "conv_prepack": 2068, "conv_transpose1d_dynam": 2068, "conv_transpose1d_unpack": 2068, "conv_transpose2d_dil": 2068, "conv_transpose2d_dynam": 2068, "conv_transpose2d_group": 2068, "conv_transpose2d_output_pad": 2068, "conv_transpose2d_pad": 2068, "conv_transpose2d_strid": 2068, "conv_transpose2d_transpos": 2068, "conv_transpose2d_unpack": 2068, "conv_transpose3d_dil": 2068, "conv_transpose3d_dynam": 2068, "conv_transpose3d_group": 2068, "conv_transpose3d_output_pad": 2068, "conv_transpose3d_pad": 2068, "conv_transpose3d_strid": 2068, "conv_transpose3d_transpos": 2068, "conv_transpose3d_unpack": 2068, "conv_unpack": 2068, "embedding_4bit": 2068, "embedding_bag_2bit_prepack": 2068, "embedding_bag_2bit_rowwise_offset": 2068, "embedding_bag_2bit_unpack": 2068, "embedding_bag_4bit": 2068, "embedding_bag_4bit_prepack": 2068, "embedding_bag_4bit_rowwise_offset": 2068, "embedding_bag_4bit_unpack": 2068, "embedding_bag_byt": 2068, "embedding_bag_byte_prepack": 2068, "embedding_bag_byte_rowwise_offset": 2068, "embedding_bag_byte_unpack": 2068, "embedding_bag_prepack": 2068, "embedding_bag_unpack": 2068, "embedding_byt": 2068, "linear_dynamic_fp16": 2068, "linear_dynamic_fp16_unpacked_weight": 2068, "linear_leaky_relu": 2068, "linear_relu_dynam": 2068, "linear_relu_dynamic_fp16": 2068, "linear_tanh": 2068, "linear_unpack": 2068, "linear_unpack_fp16": 2068, "linear_with_input_q_dq_qweight_dq_output_fp32": 2068, "linear_with_input_q_dq_qweight_dq_relu_output_fp32": 2068, "make_quantized_cell_param": 2068, "make_quantized_cell_params_dynam": 2068, "make_quantized_cell_params_fp16": 2068, "mul_out": 2068, "mul_relu": 2068, "mul_relu_out": 2068, "mul_scalar_out": 2068, "mul_scalar_relu": 2068, "mul_scalar_relu_out": 2068, "quantized_gru_cell_dynam": 2068, "quantized_lstm_cell_dynam": 2068, "quantized_rnn_relu_cell_dynam": 2068, "quantized_rnn_tanh_cell_dynam": 2068, "rngprim": 2068, "philox_rand": 2068, "qlinear": 2068, "qlinear_dynam": 2068, "qlinear_relu": 2068, "qlinear_relu_dynam": 2068, "qlinear_unpack": 2068, "static_runtim": 2068, "vartupleunpack": 2068, "clamp_nan_to_num": 2068, "create_owned_ref": 2068, "dequantize_copi": 2068, "dict_unpack": 2068, "expand_dims_copi": 2068, "flatten_copi": 2068, "fused_equally_split": 2068, "reshape_copi": 2068, "select_tensor": 2068, "signed_log1p": 2068, "to_copi": 2068, "to_maybe_copy_out": 2068, "var1": 2069, "var2": 2069, "bias_param": 2069, "adadelta": 2069, "adamax": 2069, "asgd": 2069, "nadam": 2069, "radam": 2069, "rmsprop": 2069, "rprop": 2069, "reducelronplateau": 2069, "multisteplr": 2069, "swa_util": 2069, "averagedmodel": 2069, "swalr": 2069, "update_bn": 2069, "optima": 2069, "polyak": 2069, "averaged_model": 2069, "multi_avg_fn": 2069, "get_ema_multi_avg_fn": 2069, "textrm": 2069, "update_paramet": 2069, "avg_fn": 2069, "_foreach": 2069, "ema_model": 2069, "ema_avg": 2069, "averaged_model_paramet": 2069, "model_paramet": 2069, "num_averag": 2069, "swa_schedul": 2069, "anneal_epoch": 2069, "swa_lr": 2069, "swa_model": 2069, "cosineannealinglr": 2069, "swa_start": 2069, "test_input": 2069, "secur": 2070, "unpackag": 2070, "exercis": 2070, "unzip": 2070, "my_packag": 2070, "freeli": 2070, "94304870911616": 2070, "94304900784016": 2070, "extern_modul": 2070, "model_1": 2070, "myzip": 2070, "file_byt": 2070, "writestr": 2070, "new_file_byt": 2070, "vim": 2070, "vimrc": 2070, "bufreadcmd": 2070, "brows": 2070, "amatch": 2070, "vi": 2070, "packageimport": 2070, "queryabl": 2070, "glob": 2070, "packageexport": 2070, "pe": 2070, "save_pickl": 2070, "has_fil": 2070, "importer_file_structur": 2070, "package_a": 2070, "get_rdep": 2070, "all_path": 2070, "dependency_graph_str": 2070, "save_text": 2070, "save_binari": 2070, "my_resourc": 2070, "config_stuff": 2070, "raw_data": 2070, "my_byt": 2070, "complementari": [2070, 2083], "load_pickl": 2070, "load_text": 2070, "load_binari": 2070, "my_tensor": 2070, "__reduce_package__": 2070, "my_str": 2070, "time_import": 2070, "time_export": 2070, "pickler": 2070, "persistent_id": 2070, "persistent_load": 2070, "generated_module_nam": 2070, "get_unique_id": 2070, "clock_gettim": 2070, "unpackage_foo": 2070, "depickl": 2070, "foo_1": 2070, "foo_2": 2070, "foo_packag": 2070, "foo_collect": 2070, "foo1": 2070, "foo2": 2070, "imported_foo": 2070, "9857706": 2070, "650140837": 2070, "652698385": 2070, "__torch_package__": 2070, "is_in_packag": 2070, "userexcept": 2070, "unpackageableexcept": 2070, "loaded_modul": 2070, "import_modul": 2070, "save_source_str": 2070, "save_modul": 2070, "textwrap": 2070, "dedent": 2070, "my_funct": 2070, "is_packag": 2070, "importlib": 2070, "my_pickl": 2070, "get_my_resourc": 2070, "read_text": 2070, "torch_package_import": 2070, "get_my_pickl": 2070, "is_from_packag": 2070, "stdlib": 2070, "my_test": 2070, "f2": 2070, "sys_import": 2070, "script_model": 2070, "mixed_model": 2070, "python_model_with_scripted_submodul": 2070, "loaded_script": 2070, "loaded_mix": 2070, "convention": 2070, "94286146172688": 2070, "94286146172784": 2070, "consult": [2070, 2100], "essai": 2070, "another_packag": 2070, "pickletool": 2070, "ast": 2070, "deni": 2070, "my_export": 2070, "my_interned_modul": 2070, "package_export": 2070, "my_externed_modul": 2070, "my_mocked_modul": 2070, "unwant": [2070, 2087], "hodg": 2070, "podg": 2070, "bazel": 2070, "buck": 2070, "my_class_inst": 2070, "imported_myclass": 2070, "okai": 2070, "torch_package_0": 2070, "handle_me_this_wai": 2070, "inadvert": 2070, "pun": 2070, "packagingerror": 2070, "dependency_graph": 2070, "emptymatcherror": 2070, "allow_empti": 2070, "_sysimport": 2070, "hermet": 2070, "scan": 2070, "orderedimport": 2070, "add_depend": 2070, "graphviz": 2070, "lang": 2070, "denied_modul": 2070, "my_subpackag": 2070, "digraph": 2070, "externed_modul": 2070, "interned_modul": 2070, "mocked_modul": 2070, "register_extern_hook": 2070, "register_intern_hook": 2070, "register_mock_hook": 2070, "myobject": 2070, "save_source_fil": 2070, "file_or_directori": 2070, "my_subsubpackag": 2070, "file_or_buff": 2070, "module_allow": 2070, "pytorchfileread": 2070, "python_vers": 2070, "is_dir": 2070, "_kinetoprofil": 2071, "execution_trace_observ": 2071, "profileract": 2071, "export_memory_timelin": 2071, "executiontraceobserv": 2071, "add_metadata": 2071, "add_metadata_json": 2071, "unaggreg": 2071, "suffix": [2071, 2087, 2088], "png": 2071, "gzip": 2071, "numbyt": 2071, "increment_vers": 2071, "_memory_profil": 2071, "export_stack": 2071, "self_cuda_time_tot": 2071, "preset_metadata_json": 2071, "preset": 2071, "on_trace_readi": 2071, "record_and_sav": 2071, "tensorboard_trace_handl": 2071, "dir_nam": 2071, "logdir": [2071, 2087], "plugin": [2071, 2087, 2099], "code_to_profil": 2071, "row_limit": 2071, "trace_handl": 2071, "test_trace_": 2071, "step_num": 2071, "code_iteration_to_profil": 2071, "register_callback": 2071, "execution_trac": 2071, "test_execution_trace_with_kineto": 2071, "test_profil": 2071, "_itraceobserv": 2071, "skip_first": 2071, "worker_nam": [2071, 2077], "use_gzip": 2071, "range_push": 2071, "range_pop": 2071, "4x": 2072, "88": [2072, 2107], "14k": 2072, "domin": 2072, "previous_layer_fp32": 2072, "linear_fp32": 2072, "activation_fp32": 2072, "next_layer_fp32": 2072, "linear_weight_fp32": 2072, "linear_int8_w_fp32_inp": 2072, "linear_weight_int8": 2072, "ptdq": 2072, "fc": 2072, "model_fp32": 2072, "model_int8": 2072, "quantize_dynam": 2072, "input_fp32": 2072, "previous_layer_int8": 2072, "linear_with_activation_int8": 2072, "next_layer_int8": 2072, "ptsq": 2072, "minmax": 2072, "l2norm": 2072, "model_fp32_fus": 2072, "fuse_modul": [2072, 2073], "model_fp32_prepar": 2072, "fq": 2072, "prepare_qat": 2072, "training_loop": 2072, "requant": 2072, "linear1": 2072, "custom_qconfig": 2072, "fxptq": 2072, "model_fp": 2072, "usermodel": 2072, "model_to_quant": 2072, "default_dynamic_qconfig": 2072, "model_prepar": 2072, "model_quant": 2072, "model_fus": 2072, "quantize_pt2": 2072, "prepare_pt2": 2072, "_export": [2072, 2095], "capture_pre_autograd_graph": 2072, "xnnpackquant": 2072, "get_symmetric_quantization_config": 2072, "prepare_qat_pt2": 2072, "convert_pt2": 2072, "per_tensor_symmetr": [2072, 2075], "per_channel_symmetr": [2072, 2075], "per_channel_scal": 2072, "per_channel_zero_point": 2072, "quantized_tensor": 2072, "qengin": 2072, "in4": 2072, "tensorrt": [2072, 2094, 2099, 2104], "fx2trt": 2072, "float_modul": [2072, 2092], "staticquantcustommodul": 2072, "observed_modul": 2072, "default_qconfig": [2072, 2093], "vnni": 2072, "test_quantized_op": 2072, "testquantizedop": 2072, "test_custom_module_lstm": 2072, "test_quantize_fx": 2072, "testquantizefx": 2072, "test_static_lstm": 2072, "some_oper": 2072, "e2": 2072, "thnn_conv2d_forward": 2072, "quantizedcpu": 2072, "some_qconfig": 2072, "linearpackedparam": 2072, "_modul": 2072, "prepare_orig": 2072, "quantized_orig": 2072, "scripted_quant": 2072, "fp32_op": 2073, "int8_op": 2073, "cooperlak": 2073, "audit": 2073, "op_fp32": 2073, "op_int8": 2073, "_numeric_suit": 2073, "_numeric_suite_fx": 2073, "0x7f8f93444ce0": 2074, "0x7f8f6cbd6790": 2074, "0x7f8f6cbd6820": 2074, "num_tensor_args_to_observation_typ": 2074, "convbn1d": 2074, "0x7f8f6a62d670": 2074, "reference_quantized_module_for_root": 2074, "fuse_convtranspose_bn": 2074, "0x7f8f6a62d820": 2074, "linearbn1d": 2074, "fuse_linear_bn": 2074, "0x7f8f6a62d790": 2074, "convbn2d": 2074, "convbn3d": 2074, "bnrelu2d": 2074, "bnrelu3d": 2074, "input_type_to_index": 2074, "conv_fus": 2074, "convbnrelu1d": 2074, "convbnrelu2d": 2074, "convbnrelu3d": 2074, "convrelu1d": 2074, "convrelu3d": 2074, "0x7f8f6cbd68b0": 2074, "0x7f8f6cbd6ee0": 2074, "quint4x2": [2074, 2084, 2088, 2089], "embedding_op": 2074, "0x7f8f6cbd9e50": 2074, "00390625": 2074, "0x7f8f6cbd9700": 2074, "0x7f8f6cbd98b0": 2074, "0x7f8f6cbd6dc0": 2074, "0x7f8f6cbd9ca0": 2074, "0x7f8f6cb5a4c0": 2074, "0x7f8f6cbd9d30": 2074, "0x7f8f6cbd90d0": 2074, "linear_fus": 2074, "_sequential_wrapper2": 2074, "0x7f8f9a690430": 2074, "0x7f8f6cbd6ca0": 2074, "0x7f8f647f1dc0": 2074, "fuse_conv_bn_relu": 2074, "0x7f8f6a62d700": 2074, "0x7f8f647f1e50": 2074, "0x7f8f647f1ee0": 2074, "0x7f8f647f1f70": 2074, "0x7f8f6477e040": 2074, "0x7f8f6477e0d0": 2074, "0x7f8f6477e160": 2074, "0x7f8f6477e1f0": 2074, "0x7f8f6477e280": 2074, "0x7f8f6477e310": 2074, "0x7f8f6477e3a0": 2074, "0x7f8f6cbd6e50": 2074, "0078125": 2074, "customconfig": 2075, "custom_module_config": 2075, "_caller": 2076, "_devices_kw": 2076, "slowli": 2076, "unind": 2076, "shortcom": 2077, "stitch": 2077, "init_rpc": [2077, 2078], "rpc_backend_opt": 2077, "trainer3": 2077, "parameterserver2": 2077, "backendtyp": 2077, "rpcbackendopt": 2077, "rpcagent": 2077, "transmit": 2077, "calle": [2077, 2079], "_set_rpc_timeout": 2077, "worker0": 2077, "my_script_add": 2077, "wire": 2077, "fut2": 2077, "meth": 2077, "grace": 2077, "userrref": [2077, 2079], "async_execut": 2077, "paus": 2077, "outmost": 2077, "async_add_chain": 2077, "worker2": 2077, "script_add": 2077, "async_add": 2077, "asyncexecutionclass": 2077, "static_async_add": 2077, "class_async_add": 2077, "ret_fut": 2077, "bound_async_add": 2077, "rpc_timeout": 2077, "incid": [2077, 2079], "nvlink": 2077, "multiplex": 2077, "tensorpiperpcbackendopt": 2077, "num_worker_thread": 2077, "device_map": 2077, "_transport": 2077, "tensorpipeag": 2077, "set_device_map": 2077, "intermitt": 2077, "backoff": 2077, "pyrref": 2077, "type_hint": 2077, "_distributed_rpc": 2077, "dist_autograd_ctx_id": 2077, "ctx_id": 2077, "ownerrref": [2077, 2079], "remote_modul": 2077, "forward_async": 2077, "remote_devic": 2077, "workernam": 2077, "ps0": 2077, "remote_linear_modul": 2077, "get_module_rref": 2077, "remote_paramet": 2077, "my_add": [2078, 2114], "t4": 2078, "t5": 2078, "autograd_message_id": 2078, "autograd_context_id": 2078, "send1": 2078, "kickoff": 2078, "recv2": 2078, "heard": 2078, "send2": 2078, "recv1": 2078, "dist_autograd_simpl": 2078, "random_tensor": 2078, "_run_process": 2078, "dst_rank": 2078, "dst_name": 2078, "run_process": 2078, "rrefid": 2079, "transient": 2079, "udf": 2079, "deliveri": 2079, "knowledg": 2079, "danger": 2079, "ancestor": 2079, "trickier": 2079, "forkid": 2079, "ack": 2079, "solid": 2079, "followup": 2079, "lil": 2082, "stark": 2082, "9093": 2082, "1411": 2082, "7568": 2082, "9589": 2082, "2794": 2082, "catastroph": 2082, "9900": 2082, "metadata_mask": 2082, "rce": 2082, "rc": 2082, "62": 2082, "to_sparse_semi_structur": 2082, "1x4": 2082, "16x16": 2082, "a_spars": 2082, "sparsesemistructuredtensor": 2082, "000": 2082, "400": 2082, "s2": 2082, "plain_dim_s": 2082, "lp64": 2082, "280": 2082, "310": 2082, "sp": 2082, "9078": 2082, "conception": 2082, "sparsesemistructur": 2082, "lobpcg": 2082, "geneig": 2082, "pca_lowrank": 2082, "kindli": 2082, "airy_ai": 2083, "airi": 2083, "9635": 2083, "entr": 2083, "3466": 2083, "int_": 2083, "8427": 2083, "0561": 2083, "4769": 2083, "9213": 2083, "8858": 2083, "7683": 2083, "7481": 2083, "2920": 2083, "int_0": 2083, "gammaln": 2083, "a1": 2083, "a2": 2083, "3528": 2083, "5665": 2083, "6472": 2083, "4335": 2083, "2650": 2083, "2661": 2083, "2796": 2083, "8808": 2083, "3019": 2083, "4658": 2083, "3085": 2083, "2430": 2083, "2070": 2083, "i1": 2083, "5652": 2083, "9534": 2083, "7595": 2083, "2153": 2083, "log_ndtr": 2083, "_ndtr": 2083, "6077": 2083, "7832": 2083, "841": 2083, "6931": 2083, "1728": 2083, "023": 2083, "9331": 2083, "6486": 2083, "1523": 2083, "6516": 2083, "6352": 2083, "6131": 2083, "7169": 2083, "6261": 2083, "displaystyl": 2083, "undefiend": 2083, "6835": 2083, "8474": 2083, "1929": 2083, "7162": 2083, "4180": 2083, "3928": 2083, "4007": 2083, "7586": 2083, "3901": 2083, "5049": 2083, "ndtr": 2083, "0228": 2083, "1587": 2083, "9772": 2083, "9987": 2083, "2p": 2083, "64493": 2083, "4041": 2083, "8288": 2083, "4939": 2083, "4091": 2083, "8863": 2083, "771": 2083, "scaled_modified_bessel_k0": 2083, "scaled_modified_bessel_k1": 2083, "2948": 2083, "0267": 2083, "1566": 2083, "9186": 2083, "8631": 2083, "0259": 2083, "1300": 2083, "spheric": 2083, "xlog1pi": 2083, "3863": 2083, "1972": 2083, "6094": 2083, "2189": 2083, "8283": 2083, "7726": 2083, "0986": 2083, "1589": 2083, "hurwitz": 2083, "6449": 2083, "0823": 2083, "wrap_storag": 2084, "complex_doubl": 2084, "from_buff": 2084, "is_hpu": 2084, "pickle_storage_typ": 2084, "byteswap": 2084, "posix": 2084, "shm_unlink": 2084, "unlink": 2084, "quint2x4": [2084, 2089], "twelv": 2085, "halftensor": [2085, 2088], "bfloat16tensor": [2085, 2088], "chartensor": [2085, 2088], "shorttensor": [2085, 2088], "binary16": [2085, 2088], "significand": [2085, 2088], "float_tensor": 2085, "double_tensor": 2085, "complex_float_tensor": 2085, "complex_double_tensor": 2085, "int_tensor": 2085, "long_tensor": 2085, "uint_tensor": 2085, "bool_tensor": 2085, "long_zerodim": 2085, "int_zerodim": 2085, "cuda1": 2085, "channels_last_3d": 2085, "ndhwc": 2085, "blogpost": [2086, 2101], "trainset": 2087, "mnist": 2087, "mnist_train": 2087, "trainload": 2087, "grayscal": 2087, "make_grid": 2087, "add_imag": 2087, "add_graph": 2087, "clutter": 2087, "n_iter": 2087, "purge_step": 2087, "max_queu": 2087, "flush_sec": 2087, "filename_suffix": 2087, "current_datetime_hostnam": 2087, "exp1": 2087, "global_step": 2087, "purg": 2087, "event_file_writ": 2087, "eventfilewrit": 2087, "may04_22": 2087, "54_": 2087, "macbook": 2087, "my_experi": 2087, "lr_0": 2087, "1_batch_16": 2087, "locallr_0": 2087, "scalar_valu": 2087, "walltim": 2087, "new_styl": 2087, "double_precis": 2087, "blobnam": 2087, "simple_valu": 2087, "main_tag": 2087, "tag_scalar_dict": 2087, "run_14h": 2087, "xsinx": 2087, "xcosx": 2087, "tanx": 2087, "add_histogram": 2087, "max_bin": 2087, "img_tensor": 2087, "dataformat": 2087, "chw": 2087, "hwc": 2087, "hw": 2087, "wh": 2087, "3xhxw": 2087, "img_hwc": 2087, "my_imag": 2087, "my_image_hwc": 2087, "img_batch": 2087, "my_image_batch": 2087, "add_figur": 2087, "add_video": 2087, "vid_tensor": 2087, "fp": 2087, "moviepi": 2087, "add_audio": 2087, "snd_tensor": 2087, "sample_r": 2087, "44100": 2087, "add_text": 2087, "text_str": 2087, "input_to_model": 2087, "use_strict_trac": 2087, "fed": 2087, "add_embed": 2087, "label_img": 2087, "metadata_head": 2087, "projector": 2087, "kwlist": 2087, "add_pr_curv": 2087, "num_threshold": 2087, "pr_curv": 2087, "add_custom_scalar": 2087, "chart": 2087, "categorynam": 2087, "chartnam": 2087, "listofproperti": 2087, "multilin": 2087, "taiwan": 2087, "twse": 2087, "0050": 2087, "2330": 2087, "dow": 2087, "aaa": 2087, "bbb": 2087, "ccc": 2087, "nasdaq": 2087, "add_mesh": 2087, "config_dict": 2087, "threej": 2087, "vertex": 2087, "number_of_vertic": 2087, "vertices_tensor": 2087, "colors_tensor": 2087, "faces_tensor": 2087, "my_mesh": 2087, "add_hparam": 2087, "hparam_dict": 2087, "metric_dict": 2087, "hparam_domain_discret": 2087, "run_nam": 2087, "hparam": 2087, "bsize": 2087, "uint16": [2088, 2104], "uint32": [2088, 2104], "uint64": [2088, 2104], "e4m3": 2088, "e5m2": 2088, "asid": 2088, "58734": 2088, "2209": 2088, "05433": 2088, "tini": [2088, 2089, 2107, 2118], "_like": 2088, "coercion": 2088, "allow_subclass": 2089, "check_devic": 2089, "check_layout": 2089, "6e": 2089, "3e": 2089, "assert_equ": 2089, "000000000000001e": 2089, "1e0": 2089, "argh": 2089, "nfooter": 2089, "66": 2089, "footer": 2089, "exclude_zero": 2089, "1205": 2089, "2282": 2089, "6380": 2089, "default_gener": 2091, "is_integ": 2091, "data_dependent_output": 2091, "dynamic_output_shap": 2091, "inplace_view": 2091, "needs_fixed_stride_ord": 2091, "nondeterministic_bitwis": 2091, "nondeterministic_seed": 2091, "pt2_compliant_tag": 2091, "compare_weight": 2092, "float_dict": 2092, "quantized_dict": 2092, "wt_compare_dict": 2092, "qmodel": 2092, "compute_error": 2092, "weight_dict": 2092, "get_logger_dict": 2092, "shadowlogg": 2092, "outputlogg": [2092, 2093], "target_dict": 2092, "q_modul": 2092, "logger_cl": [2092, 2093], "prepare_model_with_stub": 2092, "module_swap_list": 2092, "q_model": 2092, "ob_dict": 2092, "compare_model_stub": 2092, "quantizablebasicblock": 2092, "get_matching_activ": 2092, "act_dict": 2092, "prepare_model_output": 2092, "compare_model_output": 2092, "act_compare_dict": 2092, "weight_comparison": 2093, "extract_weight": 2093, "sqnr": 2093, "extend_logger_results_with_comparison": 2093, "compute_sqnr": 2093, "mp_n": 2093, "mq_n": 2093, "add_logg": 2093, "act_comparison": 2093, "extract_logger_info": 2093, "mp_shadows_mq": 2093, "add_shadow_logg": 2093, "shadow_act_comparison": 2093, "extract_shadow_logger_info": 2093, "ref_node_nam": 2093, "prev_node_nam": 2093, "model_nam": 2093, "ref_nam": 2093, "prev_node_target_typ": 2093, "ref_node_target_typ": 2093, "results_typ": 2093, "index_within_arg": 2093, "index_of_arg": 2093, "qconfig_str": 2093, "outputcomparisonlogg": 2093, "x_ref": 2093, "nstracer": 2093, "skipped_module_nam": 2093, "skipped_module_class": 2093, "model_name_a": 2093, "model_a": 2093, "model_name_b": 2093, "model_b": 2093, "base_name_to_sets_of_related_op": 2093, "unmatchable_types_map": 2093, "op_to_type_to_weight_extraction_fn": 2093, "unmatch": 2093, "nsresultstyp": 2093, "name_a": 2093, "name_b": 2093, "should_log_input": 2093, "model_a_with_logg": 2093, "model_b_with_logg": 2093, "model_name_to_use_for_layer_nam": 2093, "node_type_to_io_type_map": 2093, "model_a_shadows_b": 2093, "model_name_1": 2093, "model_name_2": 2093, "comparison_fn": 2093, "comparison_nam": 2093, "prepare_n_shadows_model": 2093, "qconfig_multi_map": 2093, "custom_prepare_fn": 2093, "custom_prepare_kwarg": 2093, "custom_trac": 2093, "args_kwargs_m": 2093, "op_m": 2093, "output_m": 2093, "op_m_n": 2093, "log_m_n": 2093, "log_m_0": 2093, "qconfig_n": 2093, "args_m": 2093, "op_m_prepared_with_qconfig_n": 2093, "out_m_n": 2093, "kwargs_m": 2093, "docblock": 2093, "loggers_set_en": 2093, "loggers_set_save_activ": 2093, "save_activ": 2093, "convert_n_shadows_model": 2093, "custom_convert_fn": 2093, "custom_convert_kwarg": 2093, "extract_results_n_shadows_model": 2093, "print_comparisons_n_shadows_model": 2093, "compute_normalized_l2_error": 2093, "compute_cosine_similar": 2093, "surfac": 2094, "openai": 2094, "ipex": 2094, "torch_tensorrt": 2094, "tvm": 2094, "apach": 2094, "openvino": 2094, "aotinductor": 2094, "dashboard": [2094, 2102, 2107], "nnmodul": 2094, "craft": 2095, "aot_compil": 2095, "torchinductor_freez": 2095, "batch_dim": 2095, "so_path": 2095, "aot_inductor": 2095, "output_path": 2095, "getcwd": 2095, "model_container_runner_cuda": 2095, "model_container_runner_cpu": 2095, "aotimodelcontainerrunnercuda": 2095, "aotimodelcontainerrunnercpu": 2095, "kcuda": 2095, "kcpu": 2095, "aoti_runn": 2095, "runner": 2095, "inputs2": 2095, "cmakelist": 2095, "aoti_exampl": 2095, "cmake_minimum_requir": 2095, "fatal_error": 2095, "find_packag": 2095, "add_execut": 2095, "add_custom_command": 2095, "cmake_current_source_dir": 2095, "target_link_librari": 2095, "set_properti": 2095, "cxx_standard": 2095, "mkdir": 2095, "5184": 2095, "4462": 2095, "4611": 2095, "4744": 2095, "4811": 2095, "4938": 2095, "4193": 2095, "cudafloattyp": 2095, "4883": 2095, "4703": 2095, "simd": 2097, "isa": 2097, "amx": 2097, "collect_env": 2097, "avx512f": 2097, "avx512bw": 2097, "avx512_vnni": 2097, "amx_til": 2097, "amx_bf16": 2097, "amx_int8": 2097, "debut": 2098, "cachingalloc": 2098, "cudagraph_tre": 2098, "unintend": 2098, "prematur": 2098, "mark_step_begin": 2098, "my_custom_backend": 2099, "f_opt": 2099, "my_compil": [2099, 2102], "torch_dynamo_backend": 2099, "your_modul": 2099, "minifi": [2099, 2104], "aot_autograd": 2099, "fw_compil": 2099, "bw_compil": 2099, "make_boxed_func": 2099, "model_opt": 2099, "0x7f1a894649a8": 2099, "mockmodul": 2099, "optimized_mod": 2099, "toy_exampl": [2099, 2102, 2104, 2113], "abs_1": [2099, 2102], "0x7f8d259298a0": 2099, "superior": 2099, "optimize_for_inference_compil": 2099, "code_to_acceler": 2099, "lookup_backend": 2099, "trt_compil": 2099, "inductor_compil": 2099, "recognit": 2100, "induct": 2100, "mark_dynam": [2100, 2101], "shapeenv": [2100, 2103], "reusabl": 2100, "plumb": 2100, "symnodeimpl": 2100, "python_symnod": 2100, "_meta_registr": 2100, "decomp": [2100, 2103], "primtorch": [2100, 2103], "apparatu": 2100, "constrain_rang": 2100, "wherebi": 2100, "blame": 2101, "insan": 2101, "backtrac": [2101, 2103, 2113], "blindli": 2101, "arduou": 2101, "mse": 2101, "l_x_": 2101, "l_y_": 2101, "l_n_": 2101, "sequel": 2101, "_convert_frame_assert": 2101, "variabletrack": 2101, "listvari": 2101, "constantvari": [2101, 2104, 2113], "tensorvari": [2101, 2104, 2113], "variablebuild": 2101, "_wrap": 2101, "userdefinedobjectvari": 2101, "sourcebuild": 2101, "load_glob": [2101, 2102], "torchingraphfunctionvari": 2101, "instructortranslatorbas": 2101, "symbolic_convert": [2101, 2113], "instructiontranslatorbas": 2101, "build_list": 2101, "inst": 2101, "popn": 2101, "argval": 2101, "mutable_loc": 2101, "mutableloc": 2101, "instructiontransl": 2101, "wrap_fx_proxi": 2101, "overkil": 2101, "___check_type_id": 2101, "94334122025024": 2101, "9433": 2101, "getitemsourc": 2101, "94439025877664": 2101, "94439025840192": 2101, "saw": 2101, "l_a_": [2101, 2102], "l_b_": [2101, 2102], "__compiled_fn_1": 2101, "check_tensor": [2101, 2102], "maybe_mark_dynam": 2101, "mark_stat": 2101, "symnodevari": 2101, "812": 2101, "django": 2101, "rust": 2101, "choke": 2101, "doctr_det_predictor": 2101, "cv2": 2101, "postprocess": 2101, "confess": 2101, "revisit": 2101, "__compiled_fn_0": [2101, 2102], "load_fast": [2101, 2102], "store_fast": [2101, 2102], "graph_out_0": 2101, "load_const": [2101, 2102], "binary_subscr": 2101, "__resume_at_14_1": 2101, "rot_two": 2101, "resume_in_fn": 2101, "__compiled_fn_2": 2101, "unpack_sequ": [2101, 2102], "l6": 2101, "l8": 2101, "l20": 2101, "l22": 2101, "hamper": 2101, "ride": 2101, "demystifi": 2101, "literatur": 2101, "eval_fram": [2101, 2102], "lingo": 2101, "interestingli": 2101, "523": 2102, "watch": 2102, "kaichao": 2102, "_dynamo_dynamic_indic": 2102, "utils_devic": 2102, "___skip_backend_check": 2102, "___current_backend": 2102, "___lookup_backend": 2102, "140355900538256": 2102, "dispatchkeyset": 2102, "backendselect": 2102, "adinplaceorview": 2102, "autogradcpu": 2102, "recaptur": 2102, "decompil": 2102, "depyf": 2102, "eval_with_kei": 2102, "0x7f9ca082f8a0": 2102, "load_method": 2102, "binary_add": 2102, "binary_true_divid": 2102, "compare_op": 2102, "pop_jump_if_fals": 2102, "binary_multipli": 2102, "__resume_at_30_1": 2102, "__resume_at_38_2": 2102, "__temp_1": 2102, "youkaichao": 2102, "__resume_at_": 2102, "jump_absolut": 2102, "resume_at": 2102, "_debug_get_cache_entry_list": 2102, "__code__": 2102, "codetyp": 2102, "innermost_fn": 2102, "cache_entri": 2102, "check_fn": 2102, "code_part": 2102, "___guarded_cod": 2102, "___check_global_st": 2102, "140215810860528": 2102, "___check_tensor": 2102, "tensor_check_nam": 2102, "co_freevar": 2102, "__closure__": 2102, "___is_grad_en": 2102, "___are_deterministic_algorithms_en": 2102, "___is_torch_function_en": 2102, "value_a": 2102, "value_b": 2102, "__self__": 2102, "compiled_exampl": 2102, "get_cache_entri": 2102, "recompile_and_add_another_cache_entri": 2102, "trash": 2103, "subclass_zoo": 2103, "bunch": 2103, "from_real_tensor": 2103, "fakeifi": 2103, "dispatch_devic": 2103, "ly": 2103, "derefer": 2103, "in_kernel_invocation_manag": 2103, "unwrap": 2103, "test_fake_tensor": 2103, "fake_mod": 2103, "fake_x": 2103, "fake_i": 2103, "fake_z": 2103, "_guard": 2103, "detect_fake_mod": 2103, "fake_arg": 2103, "maybe_disable_fake_tensor_mod": 2103, "nich": 2103, "faketensorprop": 2103, "fake_tensor_prop": 2103, "propagate_dont_convert_input": 2103, "fake_input": 2103, "real_tensor": 2103, "annoi": 2103, "somehow": 2103, "fakecopymod": 2103, "gave": 2103, "fakeif": 2103, "tension": 2103, "analys": 2103, "metaconvert": 2103, "die": 2103, "saroufim": 2104, "evalfram": 2104, "usercod": 2104, "rob": 2104, "diminish": 2104, "vast": 2104, "250k": 2104, "aitempl": 2104, "aot_eag": [2104, 2113], "compile_tim": [2104, 2111, 2113], "torch_compile_debug": [2104, 2106], "troubl": [2104, 2105, 2113], "compileprofil": [2104, 2113], "profiler_model": [2104, 2113], "traffic": 2104, "frozen_toy_exampl": 2104, "multiprocessor": 2104, "some_fun": [2104, 2113], "insurmount": [2104, 2113], "woo": [2104, 2113], "framesummari": [2104, 2113], "generic_jump": [2104, 2113], "torch_dynamo_resume_in_toy_example_at_5": [2104, 2113], "torchdynamo_dynamic_shap": 2104, "cv": 2104, "app": 2104, "unnecessarili": 2104, "cold": [2104, 2110], "visibli": 2104, "torchdynamo_repro_level": [2104, 2113], "bisect": [2104, 2113], "torchdynamo_repro_aft": [2104, 2113], "dramat": [2104, 2113], "allevi": 2104, "wrapper_fn": 2104, "my_fn": 2104, "pitfal": 2104, "_indices_from": 2104, "recarrai": 2104, "float128": 2104, "complex256": 2104, "esoter": 2104, "ufunc": 2104, "poly1d": 2104, "__array_wrap__": 2104, "ctype": 2104, "numpy_fn": 2104, "tweak": 2104, "wrap_numpi": 2104, "charg": 2104, "oop": 2104, "costli": 2104, "daunt": 2104, "diagnos": 2104, "pinpoint": 2104, "discern": 2104, "trace_numpi": 2104, "_numpi": 2104, "uncommon": 2104, "finer": 2104, "a_fn": [2104, 2105], "aa_fn": [2104, 2105], "ab_fn": [2104, 2105], "handel": 2105, "unblock": 2105, "nnthi": 2105, "nnnote": 2105, "screen": [2105, 2111], "is_dynamo_compil": 2105, "b_fn": 2105, "white": 2105, "new_fn": 2106, "famou": 2106, "crunch": 2106, "torchinductor_": 2106, "your_usernam": 2106, "triton_meta": 2106, "i32": 2106, "mutated_arg_nam": 2106, "instance_descriptor": 2106, "divisible_by_16": 2106, "equal_to_1": 2106, "triton_": [2106, 2111], "in_ptr0": 2106, "out_ptr0": 2106, "xnumel": 2106, "xblock": 2106, "tl": 2106, "constexpr": 2106, "xoffset": 2106, "program_id": 2106, "xindex": 2106, "xmask": 2106, "tmp0": 2106, "tmp1": 2106, "tmp2": 2106, "v0": 2106, "opt_model": 2106, "timm": [2106, 2110], "berttoken": 2106, "bertmodel": 2106, "uncas": 2106, "me": 2106, "encoded_input": 2106, "return_tensor": 2106, "trigonometri": 2106, "skim": 2106, "create_model": 2106, "resnext101_32x8d": 2106, "torchinductor_unique_kernel_nam": 2107, "triton_poi_fused_cat_155": 2107, "poi": 2107, "torchinductor_benchmark_kernel": 2107, "har": 2107, "torchinductor_max_autotun": 2107, "mixnet_l": 2107, "timm_model": 2107, "torchinductor_shunt": 2107, "qz": 2107, "cqz7hvhood7y3psp7fy6msjxsxyli7qiwiybizdwtjw6ffyq5wwd": 2107, "shunting314": 2107, "c2a4d8a28b00fcb5586d0e9d9bf77f9f": 2107, "48efc83b12ec3ead950052e4a0220b10": 2107, "compiled_module_profil": 2107, "browser": [2107, 2111], "zoom": [2107, 2111, 2115], "distort": [2107, 2111], "densenet121": 2107, "69": 2107, "cutlass": 2107, "57": 2107, "ff": 2107, "justifi": 2107, "triton_red_fus": 2107, "__native_batch_norm_legit_functional_16": 2107, "cjk2vm3446xrk7rth7hr6pun7xxo3dnzubwcn6ydrpifal4eykrz": 2107, "_adaptive_avg_pool2d_backward": 2108, "half_to_float": 2108, "no_stat": 2108, "start_step": 2108, "avg_pool2d_backward": 2108, "convolution_backward": 2108, "bias_siz": 2108, "output_mask": 2108, "scalar_mod": 2108, "tensor_mod": 2108, "embedding_dense_backward": 2108, "num_weight": 2108, "max_pool2d_with_indices_backward": 2108, "native_group_norm_backward": 2108, "rstd": 2108, "native_layer_norm_backward": 2108, "tensor_scalar": 2108, "tensor_tensor": 2108, "dim_int": 2108, "dim_intlist": 2108, "broadcast_dimens": 2108, "start_indic": 2108, "limit_indic": 2108, "start_index": 2108, "limit_index": 2108, "outer_length": 2108, "constabl": 2109, "_forward_pre_hook": 2109, "_backward_pre_hook": 2109, "_backward_hook": 2109, "_state_dict_hook": 2109, "load_": 2109, "avoiabl": 2109, "skip_nnmodule_hook_guard": 2109, "pre_backward": 2109, "warn_onc": 2109, "hui": 2110, "nightli": 2110, "night": 2110, "40gb": [2110, 2113], "2ghz": 2110, "torchbench": 2110, "trend": 2110, "droplist": 2110, "with_cudagraph": 2110, "toosl": 2111, "ncu": 2111, "model_c": 2111, "fwd_bwd": 2111, "scroll": 2111, "shortcut": 2111, "compiledfunctionbackward": 2111, "ac2g": 2111, "dropdown": 2111, "525": 2111, "_init_for_cuda_graph": 2111, "warmup_compil": 2111, "fn_c": 2111, "trace_compil": 2111, "meanwhil": 2111, "clue": 2111, "synthet": 2111, "modelwithbreak": 2111, "create_sequenti": 2111, "mod1": 2111, "mod2": 2111, "mod3": 2111, "mod4": 2111, "trace_break": 2111, "culaunchkernel": 2111, "cudalaunchkernel": 2111, "unique_kernel_nam": 2111, "sit": 2112, "replace_add_with_mul": 2112, "insert_relu_after_add": 2112, "new_relu_nod": 2112, "replaceaddwithmul": 2112, "transformed_graph_modul": 2112, "replaceaddwithmulsub": 2112, "mul_r": 2112, "removedetachpass": 2112, "args_map": 2112, "_schema": 2112, "kwarg_onli": 2112, "scalartotensorpass": 2112, "try_coerc": 2112, "replace_pattern": 2112, "replaced_pattern": 2112, "replace_pattern_with_filt": 2112, "replacedpattern": 2112, "passmanag": 2112, "pass_manag": 2112, "pm": 2112, "replace_add_with_div": 2112, "replace_div_with_mul": 2112, "run_checks_after_each_pass": 2112, "suppress_check_failur": 2112, "graph_module_out": 2112, "set_check": 2112, "check_div_target": 2112, "add_check": 2112, "subgraphmatch": 2112, "matcher_util": 2112, "match_output": 2112, "match_placehold": 2112, "remove_overlapping_match": 2112, "ignore_liter": 2112, "largemodel": 2112, "_bia": 2112, "large_model_graph": 2112, "patternmodel": 2112, "_weight_1": 2112, "_bias_1": 2112, "pattern_graph": 2112, "subgraph_match": 2112, "match_result": 2112, "internalmatch": 2112, "placeholder_nod": 2112, "returning_nod": 2112, "capabilitybasedpartition": 2112, "l34": 2112, "operator_support": 2112, "operatorsupportbas": 2112, "allows_single_node_partit": 2112, "non_compute_op": 2112, "_oper": 2112, "allowed_single_node_partition_op": 2112, "ll28c1": 2112, "l28c1": 2112, "is_node_support": 2112, "l150": 2112, "any_chain": 2112, "l164": 2112, "addmuloperatorsupport": 2112, "capability_partition": 2112, "op_support": 2112, "partition_list": 2112, "propose_partit": 2112, "fused_graph_modul": 2112, "fuse_partit": 2112, "lazo": 2113, "torchdynamo_verbos": 2113, "replay_record_en": 2113, "torchdynamo_debug_funct": 2113, "test_assertion_error": 2113, "compiled_test_assertion_error": 2113, "convert_fram": 2113, "mlazo": 2113, "837": 2113, "build_map": 2113, "log_level": 2113, "thousand": 2113, "test_backend_error": 2113, "compiled_test_backend_error": 2113, "decomp_fn": 2113, "810": 2113, "repro_aft": 2113, "minifier_launch": 2113, "base_dir": 2113, "rand_strid": 2113, "0a0": 2113, "gitfddfc44": 2113, "fddfc4488afb207971c54ad4bf58130fdc8a4dc5": 2113, "2022": 2113, "thu_feb_10_18": 2113, "41_pst_2022": 2113, "v11": 2113, "cuda_11": 2113, "r11": 2113, "30978841_0": 2113, "sxm4": 2113, "compile_fx": 2113, "compile_fx_inn": 2113, "toy_compil": 2113, "debug_util": 2113, "run_fwd_maybe_bwd": 2113, "opt_mod": 2113, "rg": 2113, "test_model": 2113, "debug_dir_root": 2113, "torch_compile_debug_dir": 2113, "run_2023_03_01_08_20_52_143510": 2113, "pid_180167": 2113, "model__0_forward_1": 2113, "aot_model___0_debug": 2113, "fx_graph_read": 2113, "fx_graph_runn": 2113, "fx_graph_transform": 2113, "ir_post_fus": 2113, "ir_pre_fus": 2113, "fx_graph": 2113, "buf1": 2113, "schedulernod": 2113, "computedbuff": 2113, "memorydep": 2113, "unmet_depend": 2113, "buf0": 2113, "met_depend": 2113, "primals_2": 2113, "buf1_loop_bodi": 2113, "var_rang": 2113, "z0": 2113, "index0": 2113, "index1": 2113, "get_index": 2113, "get_index_1": 2113, "load_1": 2113, "get_index_2": 2113, "compiled_fun": 2113, "hinder": 2113, "explanation_verbos": 2113, "out_guard": 2113, "ops_per_graph": 2113, "compiled_toi": 2113, "torchdynamo_extended_debug_guard_ad": 2113, "torchdynamo_extended_debug_create_symbol": 2113, "torchdynamo_extended_debug_cpp": 2113, "torchinductor_force_disable_cach": 2113, "force_disable_cach": 2113, "as_subclass": 2114, "handle_torch_funct": 2114, "public_api": 2114, "relevant_arg": 2114, "has_torch_function_unari": 2114, "is_tensor_lik": 2114, "notatensor": 2114, "tensorlik": 2114, "is_tensor_method_or_properti": 2114, "__get__": 2114, "__module__": 2114, "wrap_torch_funct": 2114, "drag": 2115, "interactiv": 2115, "memory_viz": 2115, "run_your_cod": 2115, "my_snapshot": 2115, "javascript": 2115, "upload": 2115, "pan": 2115, "mous": 2115, "slider": 2115, "b7f064c000000_0": 2115, "7f064c000000": 2115, "max_entri": 2115, "_memory_viz": 2115, "2u": 2115, "50n": 2115, "currenli": 2115, "typeddict": 2115, "device_trac": 2115, "traceentri": 2115, "total_s": 2115, "segment_typ": 2115, "allocated_s": 2115, "active_s": 2115, "active_awaiting_fre": 2115, "requested_s": 2115, "active_alloc": 2115, "took": 2115, "free_request": 2115, "free_complet": 2115, "segment_alloc": 2115, "segment_fre": 2115, "coorel": 2115, "device_fre": 2115, "dump_snapshot": 2115, "interplai": 2116, "torch_nccl_high_prior": 2117, "torch_nccl_dump_on_timeout": 2117, "torch_nccl_trace_buffer_s": 2117, "torch_nccl_desync_debug": 2117, "desync": 2117, "culprit": 2117, "torch_nccl_enable_tim": 2117, "torch_nccl_enable_monitor": 2117, "torch_nccl_heartbeat_timeout_sec": 2117, "prolong": 2117, "flight": 2117, "ring": 2117, "tracebuff": 2117, "torch_nccl_wait_timeout_dump_milsec": 2117, "torch_nccl_debug_info_temp_fil": 2117, "torch_nccl_debug_info_pipe_fil": 2117, "torch_nccl_nan_check": 2117, "smallest_norm": 2118, "subnorm": 2118, "denormal_numb": 2118, "tailor": 2120}, "objects": {"": [[2091, 0, 0, "-", "torch"], [2014, 7, 1, "-", "PYTORCH_JIT"]], "torch": [[2084, 1, 1, "", "BFloat16Storage"], [2084, 1, 1, "", "BoolStorage"], [2084, 1, 1, "", "ByteStorage"], [2084, 1, 1, "", "CharStorage"], [2084, 1, 1, "", "ComplexDoubleStorage"], [2084, 1, 1, "", "ComplexFloatStorage"], [2084, 1, 1, "", "DoubleStorage"], [2084, 1, 1, "", "FloatStorage"], [90, 1, 1, "", "Generator"], [2084, 1, 1, "", "HalfStorage"], [2084, 1, 1, "", "IntStorage"], [2084, 1, 1, "", "LongStorage"], [2084, 1, 1, "", "QInt32Storage"], [2084, 1, 1, "", "QInt8Storage"], [2084, 1, 1, "", "QUInt2x4Storage"], [2084, 1, 1, "", "QUInt4x2Storage"], [2084, 1, 1, "", "QUInt8Storage"], [2084, 1, 1, "", "ShortStorage"], [2081, 1, 1, "", "Size"], [2091, 1, 1, "", "SymBool"], [2091, 1, 1, "", "SymFloat"], [2091, 1, 1, "", "SymInt"], [2091, 1, 1, "", "Tag"], [2088, 1, 1, "", "Tensor"], [2084, 1, 1, "", "TypedStorage"], [2084, 1, 1, "", "UntypedStorage"], [13, 0, 0, "-", "__config__"], [62, 0, 0, "-", "__future__"], [627, 5, 1, "", "_assert"], [628, 5, 1, "", "_foreach_abs"], [629, 5, 1, "", "_foreach_abs_"], [630, 5, 1, "", "_foreach_acos"], [631, 5, 1, "", "_foreach_acos_"], [632, 5, 1, "", "_foreach_asin"], [633, 5, 1, "", "_foreach_asin_"], [634, 5, 1, "", "_foreach_atan"], [635, 5, 1, "", "_foreach_atan_"], [636, 5, 1, "", "_foreach_ceil"], [637, 5, 1, "", "_foreach_ceil_"], [638, 5, 1, "", "_foreach_cos"], [639, 5, 1, "", "_foreach_cos_"], [640, 5, 1, "", "_foreach_cosh"], [641, 5, 1, "", "_foreach_cosh_"], [642, 5, 1, "", "_foreach_erf"], [643, 5, 1, "", "_foreach_erf_"], [644, 5, 1, "", "_foreach_erfc"], [645, 5, 1, "", "_foreach_erfc_"], [646, 5, 1, "", "_foreach_exp"], [647, 5, 1, "", "_foreach_exp_"], [648, 5, 1, "", "_foreach_expm1"], [649, 5, 1, "", "_foreach_expm1_"], [650, 5, 1, "", "_foreach_floor"], [651, 5, 1, "", "_foreach_floor_"], [652, 5, 1, "", "_foreach_frac"], [653, 5, 1, "", "_foreach_frac_"], [654, 5, 1, "", "_foreach_lgamma"], [655, 5, 1, "", "_foreach_lgamma_"], [656, 5, 1, "", "_foreach_log"], [657, 5, 1, "", "_foreach_log10"], [658, 5, 1, "", "_foreach_log10_"], [659, 5, 1, "", "_foreach_log1p"], [660, 5, 1, "", "_foreach_log1p_"], [661, 5, 1, "", "_foreach_log2"], [662, 5, 1, "", "_foreach_log2_"], [663, 5, 1, "", "_foreach_log_"], [664, 5, 1, "", "_foreach_neg"], [665, 5, 1, "", "_foreach_neg_"], [666, 5, 1, "", "_foreach_reciprocal"], [667, 5, 1, "", "_foreach_reciprocal_"], [668, 5, 1, "", "_foreach_round"], [669, 5, 1, "", "_foreach_round_"], [670, 5, 1, "", "_foreach_sigmoid"], [671, 5, 1, "", "_foreach_sigmoid_"], [672, 5, 1, "", "_foreach_sin"], [673, 5, 1, "", "_foreach_sin_"], [674, 5, 1, "", "_foreach_sinh"], [675, 5, 1, "", "_foreach_sinh_"], [676, 5, 1, "", "_foreach_sqrt"], [677, 5, 1, "", "_foreach_sqrt_"], [678, 5, 1, "", "_foreach_tan"], [679, 5, 1, "", "_foreach_tan_"], [680, 5, 1, "", "_foreach_trunc"], [681, 5, 1, "", "_foreach_trunc_"], [682, 5, 1, "", "_foreach_zero_"], [2023, 0, 0, "-", "_logging"], [684, 5, 1, "", "abs"], [685, 5, 1, "", "absolute"], [686, 5, 1, "", "acos"], [687, 5, 1, "", "acosh"], [688, 5, 1, "", "add"], [689, 5, 1, "", "addbmm"], [690, 5, 1, "", "addcdiv"], [691, 5, 1, "", "addcmul"], [692, 5, 1, "", "addmm"], [693, 5, 1, "", "addmv"], [694, 5, 1, "", "addr"], [695, 5, 1, "", "adjoint"], [696, 5, 1, "", "all"], [697, 5, 1, "", "allclose"], [698, 5, 1, "", "amax"], [699, 5, 1, "", "amin"], [700, 5, 1, "", "aminmax"], [0, 0, 0, "-", "amp"], [701, 5, 1, "", "angle"], [702, 5, 1, "", "any"], [2072, 0, 0, "-", "ao"], [869, 5, 1, "", "arange"], [870, 5, 1, "", "arccos"], [871, 5, 1, "", "arccosh"], [872, 5, 1, "", "arcsin"], [873, 5, 1, "", "arcsinh"], [874, 5, 1, "", "arctan"], [875, 5, 1, "", "arctan2"], [876, 5, 1, "", "arctanh"], [877, 5, 1, "", "are_deterministic_algorithms_enabled"], [878, 5, 1, "", "argmax"], [879, 5, 1, "", "argmin"], [880, 5, 1, "", "argsort"], [881, 5, 1, "", "argwhere"], [882, 5, 1, "", "as_strided"], [883, 5, 1, "", "as_tensor"], [884, 5, 1, "", "asarray"], [885, 5, 1, "", "asin"], [886, 5, 1, "", "asinh"], [887, 5, 1, "", "atan"], [888, 5, 1, "", "atan2"], [889, 5, 1, "", "atanh"], [890, 5, 1, "", "atleast_1d"], [891, 5, 1, "", "atleast_2d"], [892, 5, 1, "", "atleast_3d"], [0, 1, 1, "", "autocast"], [1, 0, 0, "-", "autograd"], [2, 0, 0, "-", "backends"], [944, 5, 1, "", "baddbmm"], [945, 5, 1, "", "bartlett_window"], [946, 5, 1, "", "bernoulli"], [947, 5, 1, "", "bincount"], [948, 5, 1, "", "bitwise_and"], [949, 5, 1, "", "bitwise_left_shift"], [950, 5, 1, "", "bitwise_not"], [951, 5, 1, "", "bitwise_or"], [952, 5, 1, "", "bitwise_right_shift"], [953, 5, 1, "", "bitwise_xor"], [954, 5, 1, "", "blackman_window"], [955, 5, 1, "", "block_diag"], [956, 5, 1, "", "bmm"], [957, 5, 1, "", "broadcast_shapes"], [958, 5, 1, "", "broadcast_tensors"], [959, 5, 1, "", "broadcast_to"], [960, 5, 1, "", "bucketize"], [961, 5, 1, "", "can_cast"], [962, 5, 1, "", "cartesian_prod"], [963, 5, 1, "", "cat"], [964, 5, 1, "", "cdist"], [965, 5, 1, "", "ceil"], [966, 5, 1, "", "chain_matmul"], [967, 5, 1, "", "cholesky"], [968, 5, 1, "", "cholesky_inverse"], [969, 5, 1, "", "cholesky_solve"], [970, 5, 1, "", "chunk"], [971, 5, 1, "", "clamp"], [972, 5, 1, "", "clip"], [973, 5, 1, "", "clone"], [974, 5, 1, "", "column_stack"], [975, 5, 1, "", "combinations"], [976, 5, 1, "", "compile"], [977, 5, 1, "", "compiled_with_cxx11_abi"], [2096, 0, 0, "-", "compiler"], [987, 5, 1, "", "complex"], [988, 5, 1, "", "concat"], [989, 5, 1, "", "concatenate"], [990, 5, 1, "", "cond"], [991, 5, 1, "", "conj"], [992, 5, 1, "", "conj_physical"], [2091, 0, 0, "-", "contrib"], [993, 5, 1, "", "copysign"], [994, 5, 1, "", "corrcoef"], [995, 5, 1, "", "cos"], [996, 5, 1, "", "cosh"], [997, 5, 1, "", "count_nonzero"], [998, 5, 1, "", "cov"], [16, 0, 0, "-", "cpu"], [1008, 5, 1, "", "cross"], [17, 0, 0, "-", "cuda"], [1088, 5, 1, "", "cummax"], [1089, 5, 1, "", "cummin"], [1090, 5, 1, "", "cumprod"], [1091, 5, 1, "", "cumsum"], [1092, 5, 1, "", "cumulative_trapezoid"], [1093, 5, 1, "", "deg2rad"], [1094, 5, 1, "", "dequantize"], [1095, 5, 1, "", "det"], [2085, 1, 1, "", "device"], [1096, 5, 1, "", "diag"], [1097, 5, 1, "", "diag_embed"], [1098, 5, 1, "", "diagflat"], [1099, 5, 1, "", "diagonal"], [1100, 5, 1, "", "diagonal_scatter"], [1101, 5, 1, "", "diff"], [1102, 5, 1, "", "digamma"], [1103, 5, 1, "", "dist"], [28, 0, 0, "-", "distributed"], [35, 0, 0, "-", "distributions"], [1104, 5, 1, "", "div"], [1105, 5, 1, "", "divide"], [1106, 5, 1, "", "dot"], [1107, 5, 1, "", "dsplit"], [1108, 5, 1, "", "dstack"], [2085, 1, 1, "", "dtype"], [1109, 5, 1, "", "einsum"], [1110, 5, 1, "", "empty"], [1111, 5, 1, "", "empty_like"], [1112, 5, 1, "", "empty_strided"], [1113, 1, 1, "", "enable_grad"], [1114, 5, 1, "", "eq"], [1115, 5, 1, "", "equal"], [1116, 5, 1, "", "erf"], [1117, 5, 1, "", "erfc"], [1118, 5, 1, "", "erfinv"], [1119, 5, 1, "", "exp"], [1120, 5, 1, "", "exp2"], [1121, 5, 1, "", "expm1"], [52, 0, 0, "-", "export"], [1122, 5, 1, "", "eye"], [1123, 5, 1, "", "fake_quantize_per_channel_affine"], [1124, 5, 1, "", "fake_quantize_per_tensor_affine"], [54, 0, 0, "-", "fft"], [1147, 5, 1, "", "fix"], [1148, 5, 1, "", "flatten"], [1149, 5, 1, "", "flip"], [1150, 5, 1, "", "fliplr"], [1151, 5, 1, "", "flipud"], [1152, 5, 1, "", "float_power"], [1153, 5, 1, "", "floor"], [1154, 5, 1, "", "floor_divide"], [1155, 5, 1, "", "fmax"], [1156, 5, 1, "", "fmin"], [1157, 5, 1, "", "fmod"], [1158, 5, 1, "", "frac"], [1159, 5, 1, "", "frexp"], [1160, 5, 1, "", "from_dlpack"], [1161, 5, 1, "", "from_file"], [1162, 5, 1, "", "from_numpy"], [1163, 5, 1, "", "frombuffer"], [1164, 5, 1, "", "full"], [1165, 5, 1, "", "full_like"], [57, 0, 0, "-", "func"], [2091, 0, 0, "-", "functional"], [63, 0, 0, "-", "futures"], [64, 0, 0, "-", "fx"], [1214, 5, 1, "", "gather"], [1215, 5, 1, "", "gcd"], [1216, 5, 1, "", "ge"], [1217, 5, 1, "", "geqrf"], [1218, 5, 1, "", "ger"], [1219, 5, 1, "", "get_default_device"], [1220, 5, 1, "", "get_default_dtype"], [1221, 5, 1, "", "get_deterministic_debug_mode"], [1222, 5, 1, "", "get_device_module"], [1223, 5, 1, "", "get_float32_matmul_precision"], [1224, 5, 1, "", "get_num_interop_threads"], [1225, 5, 1, "", "get_num_threads"], [1226, 5, 1, "", "get_rng_state"], [1227, 5, 1, "", "gradient"], [1228, 5, 1, "", "greater"], [1229, 5, 1, "", "greater_equal"], [1230, 5, 1, "", "gt"], [1231, 5, 1, "", "hamming_window"], [1232, 5, 1, "", "hann_window"], [1233, 5, 1, "", "heaviside"], [1234, 5, 1, "", "histc"], [1235, 5, 1, "", "histogram"], [1236, 5, 1, "", "histogramdd"], [1237, 5, 1, "", "hsplit"], [1238, 5, 1, "", "hspmm"], [1239, 5, 1, "", "hstack"], [2012, 0, 0, "-", "hub"], [1240, 5, 1, "", "hypot"], [1241, 5, 1, "", "i0"], [1242, 5, 1, "", "igamma"], [1243, 5, 1, "", "igammac"], [1244, 5, 1, "", "imag"], [1245, 5, 1, "", "index_add"], [1246, 5, 1, "", "index_copy"], [1247, 5, 1, "", "index_reduce"], [1248, 5, 1, "", "index_select"], [1249, 5, 1, "", "initial_seed"], [1250, 5, 1, "", "inner"], [1251, 5, 1, "", "inverse"], [1252, 5, 1, "", "is_complex"], [1253, 5, 1, "", "is_conj"], [1254, 5, 1, "", "is_deterministic_algorithms_warn_only_enabled"], [1255, 5, 1, "", "is_floating_point"], [1256, 5, 1, "", "is_grad_enabled"], [1257, 5, 1, "", "is_inference_mode_enabled"], [1258, 5, 1, "", "is_nonzero"], [1259, 5, 1, "", "is_storage"], [1260, 5, 1, "", "is_tensor"], [1261, 5, 1, "", "is_warn_always_enabled"], [1262, 5, 1, "", "isclose"], [1263, 5, 1, "", "isfinite"], [1264, 5, 1, "", "isin"], [1265, 5, 1, "", "isinf"], [1266, 5, 1, "", "isnan"], [1267, 5, 1, "", "isneginf"], [1268, 5, 1, "", "isposinf"], [1269, 5, 1, "", "isreal"], [1270, 5, 1, "", "istft"], [2014, 0, 0, "-", "jit"], [1293, 5, 1, "", "kaiser_window"], [1294, 5, 1, "", "kron"], [1295, 5, 1, "", "kthvalue"], [2085, 1, 1, "", "layout"], [1296, 5, 1, "", "lcm"], [1297, 5, 1, "", "ldexp"], [1298, 5, 1, "", "le"], [1299, 5, 1, "", "lerp"], [1300, 5, 1, "", "less"], [1301, 5, 1, "", "less_equal"], [1302, 5, 1, "", "lgamma"], [2021, 0, 0, "-", "library"], [2022, 0, 0, "-", "linalg"], [1344, 5, 1, "", "linspace"], [1345, 5, 1, "", "load"], [1346, 5, 1, "", "lobpcg"], [1347, 5, 1, "", "log"], [1348, 5, 1, "", "log10"], [1349, 5, 1, "", "log1p"], [1350, 5, 1, "", "log2"], [1351, 5, 1, "", "logaddexp"], [1352, 5, 1, "", "logaddexp2"], [1353, 5, 1, "", "logcumsumexp"], [1354, 5, 1, "", "logdet"], [1355, 5, 1, "", "logical_and"], [1356, 5, 1, "", "logical_not"], [1357, 5, 1, "", "logical_or"], [1358, 5, 1, "", "logical_xor"], [1359, 5, 1, "", "logit"], [1360, 5, 1, "", "logspace"], [1361, 5, 1, "", "logsumexp"], [1362, 5, 1, "", "lt"], [1363, 5, 1, "", "lu"], [1364, 5, 1, "", "lu_solve"], [1365, 5, 1, "", "lu_unpack"], [1366, 5, 1, "", "manual_seed"], [2024, 0, 0, "-", "masked"], [1367, 5, 1, "", "masked_select"], [1368, 5, 1, "", "matmul"], [1369, 5, 1, "", "matrix_exp"], [1370, 5, 1, "", "matrix_power"], [1371, 5, 1, "", "max"], [1372, 5, 1, "", "maximum"], [1373, 5, 1, "", "mean"], [1374, 5, 1, "", "median"], [2085, 1, 1, "", "memory_format"], [1375, 5, 1, "", "meshgrid"], [1376, 5, 1, "", "min"], [1377, 5, 1, "", "minimum"], [1378, 5, 1, "", "mm"], [1379, 5, 1, "", "mode"], [2030, 0, 0, "-", "monitor"], [1380, 5, 1, "", "moveaxis"], [1381, 5, 1, "", "movedim"], [2031, 0, 0, "-", "mps"], [1396, 5, 1, "", "msort"], [2032, 0, 0, "-", "mtia"], [1412, 5, 1, "", "mul"], [1413, 5, 1, "", "multinomial"], [1414, 5, 1, "", "multiply"], [2033, 0, 0, "-", "multiprocessing"], [1415, 5, 1, "", "mv"], [1416, 5, 1, "", "mvlgamma"], [1417, 5, 1, "", "nan_to_num"], [1418, 5, 1, "", "nanmean"], [1419, 5, 1, "", "nanmedian"], [1420, 5, 1, "", "nanquantile"], [1421, 5, 1, "", "nansum"], [1422, 5, 1, "", "narrow"], [1423, 5, 1, "", "narrow_copy"], [1424, 5, 1, "", "ne"], [1425, 5, 1, "", "neg"], [1426, 5, 1, "", "negative"], [2036, 0, 0, "-", "nested"], [1427, 5, 1, "", "nextafter"], [2037, 0, 0, "-", "nn"], [1770, 1, 1, "", "no_grad"], [1771, 5, 1, "", "nonzero"], [1772, 5, 1, "", "norm"], [1773, 5, 1, "", "normal"], [1774, 5, 1, "", "not_equal"], [1775, 5, 1, "", "numel"], [1776, 5, 1, "", "ones"], [1777, 5, 1, "", "ones_like"], [2067, 0, 0, "-", "onnx"], [2069, 0, 0, "-", "optim"], [1814, 5, 1, "", "orgqr"], [1815, 5, 1, "", "ormqr"], [1816, 5, 1, "", "outer"], [2114, 0, 0, "-", "overrides"], [2070, 0, 0, "-", "package"], [1817, 5, 1, "", "pca_lowrank"], [1818, 5, 1, "", "permute"], [1819, 5, 1, "", "pinverse"], [1820, 5, 1, "", "poisson"], [1821, 5, 1, "", "polar"], [1822, 5, 1, "", "polygamma"], [1823, 5, 1, "", "positive"], [1824, 5, 1, "", "pow"], [1825, 5, 1, "", "prod"], [2071, 0, 0, "-", "profiler"], [1826, 5, 1, "", "promote_types"], [1827, 5, 1, "", "qr"], [1828, 5, 1, "", "quantile"], [2075, 0, 0, "-", "quantization"], [1829, 5, 1, "", "quantize_per_channel"], [1830, 5, 1, "", "quantize_per_tensor"], [1831, 5, 1, "", "quantized_batch_norm"], [1832, 5, 1, "", "quantized_max_pool1d"], [1833, 5, 1, "", "quantized_max_pool2d"], [2091, 0, 0, "-", "quasirandom"], [1835, 5, 1, "", "rad2deg"], [1836, 5, 1, "", "rand"], [1837, 5, 1, "", "rand_like"], [1838, 5, 1, "", "randint"], [1839, 5, 1, "", "randint_like"], [1840, 5, 1, "", "randn"], [1841, 5, 1, "", "randn_like"], [2076, 0, 0, "-", "random"], [1842, 5, 1, "", "randperm"], [1843, 5, 1, "", "range"], [1844, 5, 1, "", "ravel"], [1845, 5, 1, "", "real"], [1846, 5, 1, "", "reciprocal"], [1847, 5, 1, "", "remainder"], [1848, 5, 1, "", "renorm"], [1849, 5, 1, "", "repeat_interleave"], [1850, 5, 1, "", "reshape"], [1851, 5, 1, "", "resolve_conj"], [1852, 5, 1, "", "resolve_neg"], [1853, 5, 1, "", "result_type"], [2091, 0, 0, "-", "return_types"], [1854, 5, 1, "", "roll"], [1855, 5, 1, "", "rot90"], [1856, 5, 1, "", "round"], [1857, 5, 1, "", "row_stack"], [1858, 5, 1, "", "rsqrt"], [1859, 5, 1, "", "save"], [1860, 5, 1, "", "scatter"], [1861, 5, 1, "", "scatter_add"], [1862, 5, 1, "", "scatter_reduce"], [1863, 5, 1, "", "searchsorted"], [1864, 5, 1, "", "seed"], [1865, 5, 1, "", "select"], [1866, 5, 1, "", "select_scatter"], [2091, 0, 0, "-", "serialization"], [1867, 5, 1, "", "set_default_device"], [1868, 5, 1, "", "set_default_dtype"], [1869, 5, 1, "", "set_default_tensor_type"], [1870, 5, 1, "", "set_deterministic_debug_mode"], [1871, 5, 1, "", "set_float32_matmul_precision"], [1872, 5, 1, "", "set_flush_denormal"], [1873, 5, 1, "", "set_num_interop_threads"], [1874, 5, 1, "", "set_num_threads"], [1875, 5, 1, "", "set_printoptions"], [1876, 5, 1, "", "set_rng_state"], [1877, 5, 1, "", "set_warn_always"], [1878, 5, 1, "", "sgn"], [1879, 5, 1, "", "sigmoid"], [1880, 5, 1, "", "sign"], [2080, 0, 0, "-", "signal"], [1892, 5, 1, "", "signbit"], [1893, 5, 1, "", "sin"], [1894, 5, 1, "", "sinc"], [1895, 5, 1, "", "sinh"], [1896, 5, 1, "", "slice_scatter"], [1897, 5, 1, "", "slogdet"], [1898, 5, 1, "", "smm"], [1899, 5, 1, "", "softmax"], [1900, 5, 1, "", "sort"], [2082, 0, 0, "-", "sparse"], [1910, 5, 1, "", "sparse_bsc_tensor"], [1911, 5, 1, "", "sparse_bsr_tensor"], [1912, 5, 1, "", "sparse_compressed_tensor"], [1913, 5, 1, "", "sparse_coo_tensor"], [1914, 5, 1, "", "sparse_csc_tensor"], [1915, 5, 1, "", "sparse_csr_tensor"], [2083, 0, 0, "-", "special"], [1916, 5, 1, "", "split"], [1917, 5, 1, "", "sqrt"], [1918, 5, 1, "", "square"], [1919, 5, 1, "", "squeeze"], [1920, 5, 1, "", "sspaddmm"], [1921, 5, 1, "", "stack"], [1922, 5, 1, "", "std"], [1923, 5, 1, "", "std_mean"], [1924, 5, 1, "", "stft"], [2091, 0, 0, "-", "storage"], [1925, 5, 1, "", "sub"], [1926, 5, 1, "", "subtract"], [1927, 5, 1, "", "sum"], [1928, 5, 1, "", "svd"], [1929, 5, 1, "", "svd_lowrank"], [1930, 5, 1, "", "swapaxes"], [1931, 5, 1, "", "swapdims"], [1932, 5, 1, "", "sym_float"], [1933, 5, 1, "", "sym_int"], [1934, 5, 1, "", "sym_ite"], [1935, 5, 1, "", "sym_max"], [1936, 5, 1, "", "sym_min"], [1937, 5, 1, "", "sym_not"], [1938, 5, 1, "", "t"], [1939, 5, 1, "", "take"], [1940, 5, 1, "", "take_along_dim"], [1941, 5, 1, "", "tan"], [1942, 5, 1, "", "tanh"], [1943, 5, 1, "", "tensor"], [1944, 5, 1, "", "tensor_split"], [1945, 5, 1, "", "tensordot"], [2089, 0, 0, "-", "testing"], [1946, 5, 1, "", "tile"], [1947, 5, 1, "", "topk"], [2091, 0, 0, "-", "torch_version"], [1948, 5, 1, "", "trace"], [1949, 5, 1, "", "transpose"], [1950, 5, 1, "", "trapezoid"], [1951, 5, 1, "", "trapz"], [1952, 5, 1, "", "triangular_solve"], [1953, 5, 1, "", "tril"], [1954, 5, 1, "", "tril_indices"], [1955, 5, 1, "", "triu"], [1956, 5, 1, "", "triu_indices"], [1957, 5, 1, "", "true_divide"], [1958, 5, 1, "", "trunc"], [2091, 0, 0, "-", "types"], [1959, 5, 1, "", "unbind"], [1960, 5, 1, "", "unflatten"], [1961, 5, 1, "", "unique"], [1962, 5, 1, "", "unique_consecutive"], [1963, 5, 1, "", "unravel_index"], [1964, 5, 1, "", "unsqueeze"], [1965, 5, 1, "", "use_deterministic_algorithms"], [2119, 0, 0, "-", "utils"], [1971, 5, 1, "", "vander"], [1972, 5, 1, "", "var"], [1973, 5, 1, "", "var_mean"], [1974, 5, 1, "", "vdot"], [2091, 0, 0, "-", "version"], [1975, 5, 1, "", "view_as_complex"], [1976, 5, 1, "", "view_as_real"], [1977, 5, 1, "", "vmap"], [1978, 5, 1, "", "vsplit"], [1979, 5, 1, "", "vstack"], [1980, 5, 1, "", "where"], [1981, 5, 1, "", "xlogy"], [2120, 0, 0, "-", "xpu"], [2010, 5, 1, "", "zeros"], [2011, 5, 1, "", "zeros_like"]], "torch.BFloat16Storage": [[2084, 2, 1, "", "dtype"]], "torch.BoolStorage": [[2084, 2, 1, "", "dtype"]], "torch.ByteStorage": [[2084, 2, 1, "", "dtype"]], "torch.CharStorage": [[2084, 2, 1, "", "dtype"]], "torch.ComplexDoubleStorage": [[2084, 2, 1, "", "dtype"]], "torch.ComplexFloatStorage": [[2084, 2, 1, "", "dtype"]], "torch.DoubleStorage": [[2084, 2, 1, "", "dtype"]], "torch.FloatStorage": [[2084, 2, 1, "", "dtype"]], "torch.Generator": [[90, 3, 1, "", "clone_state"], [90, 2, 1, "", "device"], [90, 3, 1, "", "get_state"], [90, 3, 1, "", "graphsafe_get_state"], [90, 3, 1, "", "graphsafe_set_state"], [90, 3, 1, "", "initial_seed"], [90, 3, 1, "", "manual_seed"], [90, 3, 1, "", "seed"], [90, 3, 1, "", "set_state"]], "torch.HalfStorage": [[2084, 2, 1, "", "dtype"]], "torch.IntStorage": [[2084, 2, 1, "", "dtype"]], "torch.LongStorage": [[2084, 2, 1, "", "dtype"]], "torch.QInt32Storage": [[2084, 2, 1, "", "dtype"]], "torch.QInt8Storage": [[2084, 2, 1, "", "dtype"]], "torch.QUInt2x4Storage": [[2084, 2, 1, "", "dtype"]], "torch.QUInt4x2Storage": [[2084, 2, 1, "", "dtype"]], "torch.QUInt8Storage": [[2084, 2, 1, "", "dtype"]], "torch.ShortStorage": [[2084, 2, 1, "", "dtype"]], "torch.Size": [[2081, 3, 1, "", "count"], [2081, 3, 1, "", "index"], [2081, 3, 1, "", "numel"]], "torch.SymFloat": [[2091, 3, 1, "", "is_integer"]], "torch.Tag": [[2091, 4, 1, "", "name"]], "torch.Tensor": [[2088, 2, 1, "", "H"], [2088, 2, 1, "", "T"], [2088, 3, 1, "", "__init__"], [91, 3, 1, "", "abs"], [92, 3, 1, "", "abs_"], [93, 3, 1, "", "absolute"], [94, 3, 1, "", "absolute_"], [95, 3, 1, "", "acos"], [96, 3, 1, "", "acos_"], [97, 3, 1, "", "acosh"], [98, 3, 1, "", "acosh_"], [99, 3, 1, "", "add"], [100, 3, 1, "", "add_"], [101, 3, 1, "", "addbmm"], [102, 3, 1, "", "addbmm_"], [103, 3, 1, "", "addcdiv"], [104, 3, 1, "", "addcdiv_"], [105, 3, 1, "", "addcmul"], [106, 3, 1, "", "addcmul_"], [107, 3, 1, "", "addmm"], [108, 3, 1, "", "addmm_"], [109, 3, 1, "", "addmv"], [110, 3, 1, "", "addmv_"], [111, 3, 1, "", "addr"], [112, 3, 1, "", "addr_"], [113, 3, 1, "", "adjoint"], [2035, 3, 1, "", "align_as"], [2035, 3, 1, "", "align_to"], [114, 3, 1, "", "all"], [115, 3, 1, "", "allclose"], [116, 3, 1, "", "amax"], [117, 3, 1, "", "amin"], [118, 3, 1, "", "aminmax"], [119, 3, 1, "", "angle"], [120, 3, 1, "", "any"], [121, 3, 1, "", "apply_"], [122, 3, 1, "", "arccos"], [123, 3, 1, "", "arccos_"], [124, 3, 1, "", "arccosh"], [125, 3, 1, "", "arccosh_"], [126, 3, 1, "", "arcsin"], [127, 3, 1, "", "arcsin_"], [128, 3, 1, "", "arcsinh"], [129, 3, 1, "", "arcsinh_"], [130, 3, 1, "", "arctan"], [131, 3, 1, "", "arctan2"], [132, 3, 1, "", "arctan2_"], [133, 3, 1, "", "arctan_"], [134, 3, 1, "", "arctanh"], [135, 3, 1, "", "arctanh_"], [136, 3, 1, "", "argmax"], [137, 3, 1, "", "argmin"], [138, 3, 1, "", "argsort"], [139, 3, 1, "", "argwhere"], [140, 3, 1, "", "as_strided"], [141, 3, 1, "", "as_subclass"], [142, 3, 1, "", "asin"], [143, 3, 1, "", "asin_"], [144, 3, 1, "", "asinh"], [145, 3, 1, "", "asinh_"], [146, 3, 1, "", "atan"], [147, 3, 1, "", "atan2"], [148, 3, 1, "", "atan2_"], [149, 3, 1, "", "atan_"], [150, 3, 1, "", "atanh"], [151, 3, 1, "", "atanh_"], [152, 3, 1, "", "backward"], [153, 3, 1, "", "baddbmm"], [154, 3, 1, "", "baddbmm_"], [155, 3, 1, "", "bernoulli"], [156, 3, 1, "", "bernoulli_"], [157, 3, 1, "", "bfloat16"], [158, 3, 1, "", "bincount"], [159, 3, 1, "", "bitwise_and"], [160, 3, 1, "", "bitwise_and_"], [161, 3, 1, "", "bitwise_left_shift"], [162, 3, 1, "", "bitwise_left_shift_"], [163, 3, 1, "", "bitwise_not"], [164, 3, 1, "", "bitwise_not_"], [165, 3, 1, "", "bitwise_or"], [166, 3, 1, "", "bitwise_or_"], [167, 3, 1, "", "bitwise_right_shift"], [168, 3, 1, "", "bitwise_right_shift_"], [169, 3, 1, "", "bitwise_xor"], [170, 3, 1, "", "bitwise_xor_"], [171, 3, 1, "", "bmm"], [172, 3, 1, "", "bool"], [173, 3, 1, "", "broadcast_to"], [174, 3, 1, "", "byte"], [175, 3, 1, "", "cauchy_"], [176, 3, 1, "", "ccol_indices"], [177, 3, 1, "", "cdouble"], [178, 3, 1, "", "ceil"], [179, 3, 1, "", "ceil_"], [180, 3, 1, "", "cfloat"], [181, 3, 1, "", "chalf"], [182, 3, 1, "", "char"], [183, 3, 1, "", "cholesky"], [184, 3, 1, "", "cholesky_inverse"], [185, 3, 1, "", "cholesky_solve"], [186, 3, 1, "", "chunk"], [187, 3, 1, "", "clamp"], [188, 3, 1, "", "clamp_"], [189, 3, 1, "", "clip"], [190, 3, 1, "", "clip_"], [191, 3, 1, "", "clone"], [192, 3, 1, "", "coalesce"], [193, 3, 1, "", "col_indices"], [194, 3, 1, "", "conj"], [195, 3, 1, "", "conj_physical"], [196, 3, 1, "", "conj_physical_"], [197, 3, 1, "", "contiguous"], [198, 3, 1, "", "copy_"], [199, 3, 1, "", "copysign"], [200, 3, 1, "", "copysign_"], [201, 3, 1, "", "corrcoef"], [202, 3, 1, "", "cos"], [203, 3, 1, "", "cos_"], [204, 3, 1, "", "cosh"], [205, 3, 1, "", "cosh_"], [206, 3, 1, "", "count_nonzero"], [207, 3, 1, "", "cov"], [208, 3, 1, "", "cpu"], [209, 3, 1, "", "cross"], [210, 3, 1, "", "crow_indices"], [211, 3, 1, "", "cuda"], [212, 3, 1, "", "cummax"], [213, 3, 1, "", "cummin"], [214, 3, 1, "", "cumprod"], [215, 3, 1, "", "cumprod_"], [216, 3, 1, "", "cumsum"], [217, 3, 1, "", "cumsum_"], [218, 3, 1, "", "data_ptr"], [219, 3, 1, "", "deg2rad"], [220, 3, 1, "", "dense_dim"], [221, 3, 1, "", "dequantize"], [222, 3, 1, "", "det"], [223, 3, 1, "", "detach"], [224, 3, 1, "", "detach_"], [225, 2, 1, "", "device"], [226, 3, 1, "", "diag"], [227, 3, 1, "", "diag_embed"], [228, 3, 1, "", "diagflat"], [229, 3, 1, "", "diagonal"], [230, 3, 1, "", "diagonal_scatter"], [231, 3, 1, "", "diff"], [232, 3, 1, "", "digamma"], [233, 3, 1, "", "digamma_"], [234, 3, 1, "", "dim"], [235, 3, 1, "", "dim_order"], [236, 3, 1, "", "dist"], [237, 3, 1, "", "div"], [238, 3, 1, "", "div_"], [239, 3, 1, "", "divide"], [240, 3, 1, "", "divide_"], [241, 3, 1, "", "dot"], [242, 3, 1, "", "double"], [243, 3, 1, "", "dsplit"], [244, 3, 1, "", "element_size"], [245, 3, 1, "", "eq"], [246, 3, 1, "", "eq_"], [247, 3, 1, "", "equal"], [248, 3, 1, "", "erf"], [249, 3, 1, "", "erf_"], [250, 3, 1, "", "erfc"], [251, 3, 1, "", "erfc_"], [252, 3, 1, "", "erfinv"], [253, 3, 1, "", "erfinv_"], [254, 3, 1, "", "exp"], [255, 3, 1, "", "exp_"], [256, 3, 1, "", "expand"], [257, 3, 1, "", "expand_as"], [258, 3, 1, "", "expm1"], [259, 3, 1, "", "expm1_"], [260, 3, 1, "", "exponential_"], [261, 3, 1, "", "fill_"], [262, 3, 1, "", "fill_diagonal_"], [263, 3, 1, "", "fix"], [264, 3, 1, "", "fix_"], [265, 3, 1, "", "flatten"], [266, 3, 1, "", "flip"], [267, 3, 1, "", "fliplr"], [268, 3, 1, "", "flipud"], [269, 3, 1, "", "float"], [270, 3, 1, "", "float_power"], [271, 3, 1, "", "float_power_"], [272, 3, 1, "", "floor"], [273, 3, 1, "", "floor_"], [274, 3, 1, "", "floor_divide"], [275, 3, 1, "", "floor_divide_"], [276, 3, 1, "", "fmax"], [277, 3, 1, "", "fmin"], [278, 3, 1, "", "fmod"], [279, 3, 1, "", "fmod_"], [280, 3, 1, "", "frac"], [281, 3, 1, "", "frac_"], [282, 3, 1, "", "frexp"], [283, 3, 1, "", "gather"], [284, 3, 1, "", "gcd"], [285, 3, 1, "", "gcd_"], [286, 3, 1, "", "ge"], [287, 3, 1, "", "ge_"], [288, 3, 1, "", "geometric_"], [289, 3, 1, "", "geqrf"], [290, 3, 1, "", "ger"], [291, 3, 1, "", "get_device"], [292, 2, 1, "", "grad"], [293, 3, 1, "", "greater"], [294, 3, 1, "", "greater_"], [295, 3, 1, "", "greater_equal"], [296, 3, 1, "", "greater_equal_"], [297, 3, 1, "", "gt"], [298, 3, 1, "", "gt_"], [299, 3, 1, "", "half"], [300, 3, 1, "", "hardshrink"], [301, 3, 1, "", "heaviside"], [302, 3, 1, "", "histc"], [303, 3, 1, "", "histogram"], [304, 3, 1, "", "hsplit"], [305, 3, 1, "", "hypot"], [306, 3, 1, "", "hypot_"], [307, 3, 1, "", "i0"], [308, 3, 1, "", "i0_"], [309, 3, 1, "", "igamma"], [310, 3, 1, "", "igamma_"], [311, 3, 1, "", "igammac"], [312, 3, 1, "", "igammac_"], [313, 2, 1, "", "imag"], [314, 3, 1, "", "index_add"], [315, 3, 1, "", "index_add_"], [316, 3, 1, "", "index_copy"], [317, 3, 1, "", "index_copy_"], [318, 3, 1, "", "index_fill"], [319, 3, 1, "", "index_fill_"], [320, 3, 1, "", "index_put"], [321, 3, 1, "", "index_put_"], [322, 3, 1, "", "index_reduce"], [323, 3, 1, "", "index_reduce_"], [324, 3, 1, "", "index_select"], [325, 3, 1, "", "indices"], [326, 3, 1, "", "inner"], [327, 3, 1, "", "int"], [328, 3, 1, "", "int_repr"], [329, 3, 1, "", "inverse"], [330, 3, 1, "", "is_coalesced"], [331, 3, 1, "", "is_complex"], [332, 3, 1, "", "is_conj"], [333, 3, 1, "", "is_contiguous"], [334, 2, 1, "", "is_cuda"], [335, 3, 1, "", "is_floating_point"], [336, 3, 1, "", "is_inference"], [337, 2, 1, "", "is_leaf"], [338, 2, 1, "", "is_meta"], [339, 3, 1, "", "is_pinned"], [340, 2, 1, "", "is_quantized"], [341, 3, 1, "", "is_set_to"], [342, 3, 1, "", "is_shared"], [343, 3, 1, "", "is_signed"], [344, 2, 1, "", "is_sparse"], [345, 2, 1, "", "is_sparse_csr"], [346, 3, 1, "", "isclose"], [347, 3, 1, "", "isfinite"], [348, 3, 1, "", "isinf"], [349, 3, 1, "", "isnan"], [350, 3, 1, "", "isneginf"], [351, 3, 1, "", "isposinf"], [352, 3, 1, "", "isreal"], [353, 3, 1, "", "istft"], [354, 3, 1, "", "item"], [355, 2, 1, "", "itemsize"], [356, 3, 1, "", "kthvalue"], [357, 3, 1, "", "lcm"], [358, 3, 1, "", "lcm_"], [359, 3, 1, "", "ldexp"], [360, 3, 1, "", "ldexp_"], [361, 3, 1, "", "le"], [362, 3, 1, "", "le_"], [363, 3, 1, "", "lerp"], [364, 3, 1, "", "lerp_"], [365, 3, 1, "", "less"], [366, 3, 1, "", "less_"], [367, 3, 1, "", "less_equal"], [368, 3, 1, "", "less_equal_"], [369, 3, 1, "", "lgamma"], [370, 3, 1, "", "lgamma_"], [371, 3, 1, "", "log"], [372, 3, 1, "", "log10"], [373, 3, 1, "", "log10_"], [374, 3, 1, "", "log1p"], [375, 3, 1, "", "log1p_"], [376, 3, 1, "", "log2"], [377, 3, 1, "", "log2_"], [378, 3, 1, "", "log_"], [379, 3, 1, "", "log_normal_"], [380, 3, 1, "", "logaddexp"], [381, 3, 1, "", "logaddexp2"], [382, 3, 1, "", "logcumsumexp"], [383, 3, 1, "", "logdet"], [384, 3, 1, "", "logical_and"], [385, 3, 1, "", "logical_and_"], [386, 3, 1, "", "logical_not"], [387, 3, 1, "", "logical_not_"], [388, 3, 1, "", "logical_or"], [389, 3, 1, "", "logical_or_"], [390, 3, 1, "", "logical_xor"], [391, 3, 1, "", "logical_xor_"], [392, 3, 1, "", "logit"], [393, 3, 1, "", "logit_"], [394, 3, 1, "", "logsumexp"], [395, 3, 1, "", "long"], [396, 3, 1, "", "lt"], [397, 3, 1, "", "lt_"], [398, 3, 1, "", "lu"], [399, 3, 1, "", "lu_solve"], [2088, 2, 1, "", "mH"], [2088, 2, 1, "", "mT"], [400, 3, 1, "", "map_"], [401, 3, 1, "", "masked_fill"], [402, 3, 1, "", "masked_fill_"], [403, 3, 1, "", "masked_scatter"], [404, 3, 1, "", "masked_scatter_"], [405, 3, 1, "", "masked_select"], [406, 3, 1, "", "matmul"], [407, 3, 1, "", "matrix_exp"], [408, 3, 1, "", "matrix_power"], [409, 3, 1, "", "max"], [410, 3, 1, "", "maximum"], [411, 3, 1, "", "mean"], [412, 3, 1, "", "median"], [413, 3, 1, "", "min"], [414, 3, 1, "", "minimum"], [415, 3, 1, "", "mm"], [416, 3, 1, "", "mode"], [417, 3, 1, "", "module_load"], [418, 3, 1, "", "moveaxis"], [419, 3, 1, "", "movedim"], [420, 3, 1, "", "msort"], [421, 3, 1, "", "mul"], [422, 3, 1, "", "mul_"], [423, 3, 1, "", "multinomial"], [424, 3, 1, "", "multiply"], [425, 3, 1, "", "multiply_"], [426, 3, 1, "", "mv"], [427, 3, 1, "", "mvlgamma"], [428, 3, 1, "", "mvlgamma_"], [2035, 2, 1, "", "names"], [429, 3, 1, "", "nan_to_num"], [430, 3, 1, "", "nan_to_num_"], [431, 3, 1, "", "nanmean"], [432, 3, 1, "", "nanmedian"], [433, 3, 1, "", "nanquantile"], [434, 3, 1, "", "nansum"], [435, 3, 1, "", "narrow"], [436, 3, 1, "", "narrow_copy"], [437, 2, 1, "", "nbytes"], [438, 2, 1, "", "ndim"], [439, 3, 1, "", "ndimension"], [440, 3, 1, "", "ne"], [441, 3, 1, "", "ne_"], [442, 3, 1, "", "neg"], [443, 3, 1, "", "neg_"], [444, 3, 1, "", "negative"], [445, 3, 1, "", "negative_"], [446, 3, 1, "", "nelement"], [447, 3, 1, "", "new_empty"], [448, 3, 1, "", "new_full"], [449, 3, 1, "", "new_ones"], [450, 3, 1, "", "new_tensor"], [451, 3, 1, "", "new_zeros"], [452, 3, 1, "", "nextafter"], [453, 3, 1, "", "nextafter_"], [454, 3, 1, "", "nonzero"], [455, 3, 1, "", "norm"], [456, 3, 1, "", "normal_"], [457, 3, 1, "", "not_equal"], [458, 3, 1, "", "not_equal_"], [459, 3, 1, "", "numel"], [460, 3, 1, "", "numpy"], [461, 3, 1, "", "orgqr"], [462, 3, 1, "", "ormqr"], [463, 3, 1, "", "outer"], [464, 3, 1, "", "permute"], [465, 3, 1, "", "pin_memory"], [466, 3, 1, "", "pinverse"], [467, 3, 1, "", "polygamma"], [468, 3, 1, "", "polygamma_"], [469, 3, 1, "", "positive"], [470, 3, 1, "", "pow"], [471, 3, 1, "", "pow_"], [472, 3, 1, "", "prod"], [473, 3, 1, "", "put_"], [474, 3, 1, "", "q_per_channel_axis"], [475, 3, 1, "", "q_per_channel_scales"], [476, 3, 1, "", "q_per_channel_zero_points"], [477, 3, 1, "", "q_scale"], [478, 3, 1, "", "q_zero_point"], [479, 3, 1, "", "qr"], [480, 3, 1, "", "qscheme"], [481, 3, 1, "", "quantile"], [482, 3, 1, "", "rad2deg"], [483, 3, 1, "", "random_"], [484, 3, 1, "", "ravel"], [485, 2, 1, "", "real"], [486, 3, 1, "", "reciprocal"], [487, 3, 1, "", "reciprocal_"], [488, 3, 1, "", "record_stream"], [2035, 3, 1, "", "refine_names"], [489, 3, 1, "", "register_hook"], [490, 3, 1, "", "register_post_accumulate_grad_hook"], [491, 3, 1, "", "remainder"], [492, 3, 1, "", "remainder_"], [2035, 3, 1, "", "rename"], [2035, 3, 1, "", "rename_"], [493, 3, 1, "", "renorm"], [494, 3, 1, "", "renorm_"], [495, 3, 1, "", "repeat"], [496, 3, 1, "", "repeat_interleave"], [497, 2, 1, "", "requires_grad"], [498, 3, 1, "", "requires_grad_"], [499, 3, 1, "", "reshape"], [500, 3, 1, "", "reshape_as"], [501, 3, 1, "", "resize_"], [502, 3, 1, "", "resize_as_"], [503, 3, 1, "", "resolve_conj"], [504, 3, 1, "", "resolve_neg"], [505, 3, 1, "", "retain_grad"], [506, 2, 1, "", "retains_grad"], [507, 3, 1, "", "roll"], [508, 3, 1, "", "rot90"], [509, 3, 1, "", "round"], [510, 3, 1, "", "round_"], [511, 3, 1, "", "row_indices"], [512, 3, 1, "", "rsqrt"], [513, 3, 1, "", "rsqrt_"], [514, 3, 1, "", "scatter"], [515, 3, 1, "", "scatter_"], [516, 3, 1, "", "scatter_add"], [517, 3, 1, "", "scatter_add_"], [518, 3, 1, "", "scatter_reduce"], [519, 3, 1, "", "scatter_reduce_"], [520, 3, 1, "", "select"], [521, 3, 1, "", "select_scatter"], [522, 3, 1, "", "set_"], [523, 3, 1, "", "sgn"], [524, 3, 1, "", "sgn_"], [525, 2, 1, "", "shape"], [526, 3, 1, "", "share_memory_"], [527, 3, 1, "", "short"], [528, 3, 1, "", "sigmoid"], [529, 3, 1, "", "sigmoid_"], [530, 3, 1, "", "sign"], [531, 3, 1, "", "sign_"], [532, 3, 1, "", "signbit"], [533, 3, 1, "", "sin"], [534, 3, 1, "", "sin_"], [535, 3, 1, "", "sinc"], [536, 3, 1, "", "sinc_"], [537, 3, 1, "", "sinh"], [538, 3, 1, "", "sinh_"], [539, 3, 1, "", "size"], [540, 3, 1, "", "slice_scatter"], [541, 3, 1, "", "slogdet"], [542, 3, 1, "", "smm"], [543, 3, 1, "", "softmax"], [544, 3, 1, "", "sort"], [545, 3, 1, "", "sparse_dim"], [546, 3, 1, "", "sparse_mask"], [547, 3, 1, "", "sparse_resize_"], [548, 3, 1, "", "sparse_resize_and_clear_"], [549, 3, 1, "", "split"], [550, 3, 1, "", "sqrt"], [551, 3, 1, "", "sqrt_"], [552, 3, 1, "", "square"], [553, 3, 1, "", "square_"], [554, 3, 1, "", "squeeze"], [555, 3, 1, "", "squeeze_"], [556, 3, 1, "", "sspaddmm"], [557, 3, 1, "", "std"], [558, 3, 1, "", "stft"], [559, 3, 1, "", "storage"], [560, 3, 1, "", "storage_offset"], [561, 3, 1, "", "storage_type"], [562, 3, 1, "", "stride"], [563, 3, 1, "", "sub"], [564, 3, 1, "", "sub_"], [565, 3, 1, "", "subtract"], [566, 3, 1, "", "subtract_"], [567, 3, 1, "", "sum"], [568, 3, 1, "", "sum_to_size"], [569, 3, 1, "", "svd"], [570, 3, 1, "", "swapaxes"], [571, 3, 1, "", "swapdims"], [572, 3, 1, "", "t"], [573, 3, 1, "", "t_"], [574, 3, 1, "", "take"], [575, 3, 1, "", "take_along_dim"], [576, 3, 1, "", "tan"], [577, 3, 1, "", "tan_"], [578, 3, 1, "", "tanh"], [579, 3, 1, "", "tanh_"], [580, 3, 1, "", "tensor_split"], [581, 3, 1, "", "tile"], [582, 3, 1, "", "to"], [583, 3, 1, "", "to_dense"], [584, 3, 1, "", "to_mkldnn"], [585, 3, 1, "", "to_sparse"], [586, 3, 1, "", "to_sparse_bsc"], [587, 3, 1, "", "to_sparse_bsr"], [588, 3, 1, "", "to_sparse_coo"], [589, 3, 1, "", "to_sparse_csc"], [590, 3, 1, "", "to_sparse_csr"], [591, 3, 1, "", "tolist"], [592, 3, 1, "", "topk"], [593, 3, 1, "", "trace"], [594, 3, 1, "", "transpose"], [595, 3, 1, "", "transpose_"], [596, 3, 1, "", "triangular_solve"], [597, 3, 1, "", "tril"], [598, 3, 1, "", "tril_"], [599, 3, 1, "", "triu"], [600, 3, 1, "", "triu_"], [601, 3, 1, "", "true_divide"], [602, 3, 1, "", "true_divide_"], [603, 3, 1, "", "trunc"], [604, 3, 1, "", "trunc_"], [605, 3, 1, "", "type"], [606, 3, 1, "", "type_as"], [607, 3, 1, "", "unbind"], [608, 3, 1, "", "unflatten"], [609, 3, 1, "", "unfold"], [610, 3, 1, "", "uniform_"], [611, 3, 1, "", "unique"], [612, 3, 1, "", "unique_consecutive"], [613, 3, 1, "", "unsqueeze"], [614, 3, 1, "", "unsqueeze_"], [615, 3, 1, "", "untyped_storage"], [616, 3, 1, "", "values"], [617, 3, 1, "", "var"], [618, 3, 1, "", "vdot"], [619, 3, 1, "", "view"], [620, 3, 1, "", "view_as"], [621, 3, 1, "", "vsplit"], [622, 3, 1, "", "where"], [623, 3, 1, "", "xlogy"], [624, 3, 1, "", "xlogy_"], [625, 3, 1, "", "xpu"], [626, 3, 1, "", "zero_"]], "torch.TypedStorage": [[2084, 3, 1, "", "bfloat16"], [2084, 3, 1, "", "bool"], [2084, 3, 1, "", "byte"], [2084, 3, 1, "", "char"], [2084, 3, 1, "", "clone"], [2084, 3, 1, "", "complex_double"], [2084, 3, 1, "", "complex_float"], [2084, 3, 1, "", "copy_"], [2084, 3, 1, "", "cpu"], [2084, 3, 1, "", "cuda"], [2084, 3, 1, "", "data_ptr"], [2084, 4, 1, "", "device"], [2084, 3, 1, "", "double"], [2084, 2, 1, "", "dtype"], [2084, 3, 1, "", "element_size"], [2084, 4, 1, "", "filename"], [2084, 3, 1, "", "fill_"], [2084, 3, 1, "", "float"], [2084, 3, 1, "", "float8_e4m3fn"], [2084, 3, 1, "", "float8_e4m3fnuz"], [2084, 3, 1, "", "float8_e5m2"], [2084, 3, 1, "", "float8_e5m2fnuz"], [2084, 3, 1, "", "from_buffer"], [2084, 3, 1, "", "from_file"], [2084, 3, 1, "", "get_device"], [2084, 3, 1, "", "half"], [2084, 3, 1, "", "hpu"], [2084, 3, 1, "", "int"], [2084, 4, 1, "", "is_cuda"], [2084, 4, 1, "", "is_hpu"], [2084, 3, 1, "", "is_pinned"], [2084, 3, 1, "", "is_shared"], [2084, 2, 1, "", "is_sparse"], [2084, 3, 1, "", "long"], [2084, 3, 1, "", "nbytes"], [2084, 3, 1, "", "pickle_storage_type"], [2084, 3, 1, "", "pin_memory"], [2084, 3, 1, "", "resizable"], [2084, 3, 1, "", "resize_"], [2084, 3, 1, "", "share_memory_"], [2084, 3, 1, "", "short"], [2084, 3, 1, "", "size"], [2084, 3, 1, "", "to"], [2084, 3, 1, "", "tolist"], [2084, 3, 1, "", "type"], [2084, 3, 1, "", "untyped"]], "torch.UntypedStorage": [[2084, 3, 1, "", "bfloat16"], [2084, 3, 1, "", "bool"], [2084, 3, 1, "", "byte"], [2084, 3, 1, "", "byteswap"], [2084, 3, 1, "", "char"], [2084, 3, 1, "", "clone"], [2084, 3, 1, "", "complex_double"], [2084, 3, 1, "", "complex_float"], [2084, 3, 1, "", "copy_"], [2084, 3, 1, "", "cpu"], [2084, 3, 1, "", "cuda"], [2084, 3, 1, "", "data_ptr"], [2084, 2, 1, "", "device"], [2084, 3, 1, "", "double"], [2084, 3, 1, "", "element_size"], [2084, 4, 1, "", "filename"], [2084, 3, 1, "", "fill_"], [2084, 3, 1, "", "float"], [2084, 3, 1, "", "float8_e4m3fn"], [2084, 3, 1, "", "float8_e4m3fnuz"], [2084, 3, 1, "", "float8_e5m2"], [2084, 3, 1, "", "float8_e5m2fnuz"], [2084, 3, 1, "", "from_buffer"], [2084, 3, 1, "", "from_file"], [2084, 3, 1, "", "get_device"], [2084, 3, 1, "", "half"], [2084, 3, 1, "", "hpu"], [2084, 3, 1, "", "int"], [2084, 4, 1, "", "is_cuda"], [2084, 4, 1, "", "is_hpu"], [2084, 3, 1, "", "is_pinned"], [2084, 3, 1, "", "is_shared"], [2084, 2, 1, "", "is_sparse"], [2084, 2, 1, "", "is_sparse_csr"], [2084, 3, 1, "", "long"], [2084, 3, 1, "", "mps"], [2084, 3, 1, "", "nbytes"], [2084, 3, 1, "", "new"], [2084, 3, 1, "", "pin_memory"], [2084, 3, 1, "", "resizable"], [2084, 3, 1, "", "resize_"], [2084, 3, 1, "", "share_memory_"], [2084, 3, 1, "", "short"], [2084, 3, 1, "", "size"], [2084, 3, 1, "", "to"], [2084, 3, 1, "", "tolist"], [2084, 3, 1, "", "type"], [2084, 3, 1, "", "untyped"]], "torch.__config__": [[13, 5, 1, "", "parallel_info"], [13, 5, 1, "", "show"]], "torch.__future__": [[62, 5, 1, "", "get_overwrite_module_params_on_conversion"], [62, 5, 1, "", "get_swap_module_params_on_conversion"], [62, 5, 1, "", "set_overwrite_module_params_on_conversion"], [62, 5, 1, "", "set_swap_module_params_on_conversion"]], "torch._higher_order_ops.cond": [[12, 5, 1, "", "cond"]], "torch._logging": [[683, 5, 1, "", "set_logs"]], "torch.amp": [[0, 0, 0, "-", "autocast_mode"], [0, 5, 1, "", "custom_bwd"], [0, 5, 1, "", "custom_fwd"], [0, 0, 0, "-", "grad_scaler"]], "torch.amp.autocast_mode": [[0, 5, 1, "", "is_autocast_available"]], "torch.ao": [[2072, 0, 0, "-", "nn"], [2072, 0, 0, "-", "ns"], [2072, 0, 0, "-", "pruning"], [2072, 0, 0, "-", "quantization"]], "torch.ao.nn": [[2075, 0, 0, "-", "intrinsic"], [2075, 0, 0, "-", "qat"], [2072, 0, 0, "-", "quantizable"], [2072, 0, 0, "-", "quantized"], [2072, 0, 0, "-", "sparse"]], "torch.ao.nn.intrinsic": [[703, 1, 1, "", "BNReLU2d"], [704, 1, 1, "", "BNReLU3d"], [705, 1, 1, "", "ConvBn1d"], [706, 1, 1, "", "ConvBn2d"], [707, 1, 1, "", "ConvBn3d"], [708, 1, 1, "", "ConvBnReLU1d"], [709, 1, 1, "", "ConvBnReLU2d"], [710, 1, 1, "", "ConvBnReLU3d"], [711, 1, 1, "", "ConvReLU1d"], [712, 1, 1, "", "ConvReLU2d"], [713, 1, 1, "", "ConvReLU3d"], [714, 1, 1, "", "LinearReLU"], [2075, 0, 0, "-", "modules"], [2075, 0, 0, "-", "qat"], [2075, 0, 0, "-", "quantized"]], "torch.ao.nn.intrinsic.modules": [[2072, 0, 0, "-", "fused"]], "torch.ao.nn.intrinsic.qat": [[715, 1, 1, "", "ConvBn1d"], [716, 1, 1, "", "ConvBn2d"], [717, 1, 1, "", "ConvBn3d"], [718, 1, 1, "", "ConvBnReLU1d"], [719, 1, 1, "", "ConvBnReLU2d"], [720, 1, 1, "", "ConvBnReLU3d"], [721, 1, 1, "", "ConvReLU2d"], [722, 1, 1, "", "ConvReLU3d"], [723, 1, 1, "", "LinearReLU"], [724, 1, 1, "", "freeze_bn_stats"], [2075, 0, 0, "-", "modules"], [725, 1, 1, "", "update_bn_stats"]], "torch.ao.nn.intrinsic.qat.modules": [[2072, 0, 0, "-", "conv_fused"], [2072, 0, 0, "-", "linear_fused"], [2072, 0, 0, "-", "linear_relu"]], "torch.ao.nn.intrinsic.quantized": [[726, 1, 1, "", "BNReLU2d"], [727, 1, 1, "", "BNReLU3d"], [728, 1, 1, "", "ConvReLU1d"], [729, 1, 1, "", "ConvReLU2d"], [730, 1, 1, "", "ConvReLU3d"], [731, 1, 1, "", "LinearReLU"], [2075, 0, 0, "-", "dynamic"], [2075, 0, 0, "-", "modules"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[732, 1, 1, "", "LinearReLU"], [2075, 0, 0, "-", "modules"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules": [[2072, 0, 0, "-", "linear_relu"]], "torch.ao.nn.intrinsic.quantized.modules": [[2072, 0, 0, "-", "bn_relu"], [2072, 0, 0, "-", "conv_add"], [2072, 0, 0, "-", "conv_relu"], [2072, 0, 0, "-", "linear_relu"]], "torch.ao.nn.qat": [[733, 1, 1, "", "Conv2d"], [734, 1, 1, "", "Conv3d"], [735, 1, 1, "", "Linear"], [2075, 0, 0, "-", "dynamic"], [2075, 0, 0, "-", "modules"]], "torch.ao.nn.qat.Linear": [[735, 3, 1, "", "from_float"]], "torch.ao.nn.qat.dynamic": [[736, 1, 1, "", "Linear"], [2075, 0, 0, "-", "modules"]], "torch.ao.nn.qat.dynamic.modules": [[2072, 0, 0, "-", "linear"]], "torch.ao.nn.qat.modules": [[2072, 0, 0, "-", "conv"], [2072, 0, 0, "-", "embedding_ops"], [2072, 0, 0, "-", "linear"]], "torch.ao.nn.quantizable": [[737, 1, 1, "", "LSTM"], [738, 1, 1, "", "MultiheadAttention"], [2072, 0, 0, "-", "modules"]], "torch.ao.nn.quantizable.MultiheadAttention": [[738, 3, 1, "", "dequantize"], [738, 3, 1, "", "forward"]], "torch.ao.nn.quantizable.modules": [[2072, 0, 0, "-", "activation"], [2072, 0, 0, "-", "rnn"]], "torch.ao.nn.quantized": [[739, 1, 1, "", "BatchNorm2d"], [740, 1, 1, "", "BatchNorm3d"], [741, 1, 1, "", "Conv1d"], [742, 1, 1, "", "Conv2d"], [743, 1, 1, "", "Conv3d"], [744, 1, 1, "", "ConvTranspose1d"], [745, 1, 1, "", "ConvTranspose2d"], [746, 1, 1, "", "ConvTranspose3d"], [747, 1, 1, "", "ELU"], [748, 1, 1, "", "Embedding"], [749, 1, 1, "", "EmbeddingBag"], [750, 1, 1, "", "FXFloatFunctional"], [751, 1, 1, "", "FloatFunctional"], [752, 1, 1, "", "GroupNorm"], [753, 1, 1, "", "Hardswish"], [754, 1, 1, "", "InstanceNorm1d"], [755, 1, 1, "", "InstanceNorm2d"], [756, 1, 1, "", "InstanceNorm3d"], [757, 1, 1, "", "LayerNorm"], [758, 1, 1, "", "LeakyReLU"], [759, 1, 1, "", "Linear"], [760, 1, 1, "", "QFunctional"], [761, 1, 1, "", "ReLU6"], [762, 1, 1, "", "Sigmoid"], [2075, 0, 0, "-", "dynamic"], [2075, 0, 0, "-", "functional"], [2075, 0, 0, "-", "modules"], [2072, 0, 0, "-", "reference"]], "torch.ao.nn.quantized.Conv1d": [[741, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Conv2d": [[742, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Conv3d": [[743, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Embedding": [[748, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.EmbeddingBag": [[749, 3, 1, "", "from_float"]], "torch.ao.nn.quantized.Linear": [[759, 3, 1, "", "from_float"], [759, 3, 1, "", "from_reference"]], "torch.ao.nn.quantized.dynamic": [[763, 1, 1, "", "GRU"], [764, 1, 1, "", "GRUCell"], [765, 1, 1, "", "LSTM"], [766, 1, 1, "", "LSTMCell"], [767, 1, 1, "", "Linear"], [768, 1, 1, "", "RNNCell"], [2075, 0, 0, "-", "modules"]], "torch.ao.nn.quantized.dynamic.Linear": [[767, 3, 1, "", "from_float"], [767, 3, 1, "", "from_reference"]], "torch.ao.nn.quantized.dynamic.modules": [[2072, 0, 0, "-", "conv"], [2072, 0, 0, "-", "linear"], [2072, 0, 0, "-", "rnn"]], "torch.ao.nn.quantized.functional": [[769, 1, 1, "", "adaptive_avg_pool2d"], [770, 1, 1, "", "adaptive_avg_pool3d"], [771, 1, 1, "", "avg_pool2d"], [772, 1, 1, "", "avg_pool3d"], [773, 1, 1, "", "celu"], [774, 1, 1, "", "clamp"], [775, 1, 1, "", "conv1d"], [776, 1, 1, "", "conv2d"], [777, 1, 1, "", "conv3d"], [778, 1, 1, "", "elu"], [779, 1, 1, "", "hardsigmoid"], [780, 1, 1, "", "hardswish"], [781, 1, 1, "", "hardtanh"], [782, 1, 1, "", "interpolate"], [783, 1, 1, "", "leaky_relu"], [784, 1, 1, "", "linear"], [785, 1, 1, "", "max_pool1d"], [786, 1, 1, "", "max_pool2d"], [787, 1, 1, "", "threshold"], [788, 1, 1, "", "upsample"], [789, 1, 1, "", "upsample_bilinear"], [790, 1, 1, "", "upsample_nearest"]], "torch.ao.nn.quantized.modules": [[2072, 0, 0, "-", "activation"], [2072, 0, 0, "-", "batchnorm"], [2072, 0, 0, "-", "conv"], [2072, 0, 0, "-", "dropout"], [2072, 0, 0, "-", "embedding_ops"], [2072, 0, 0, "-", "functional_modules"], [2072, 0, 0, "-", "linear"], [2072, 0, 0, "-", "normalization"], [2072, 0, 0, "-", "rnn"], [2072, 0, 0, "-", "utils"]], "torch.ao.nn.quantized.reference": [[2072, 0, 0, "-", "modules"]], "torch.ao.nn.quantized.reference.modules": [[2072, 0, 0, "-", "conv"], [2072, 0, 0, "-", "linear"], [2072, 0, 0, "-", "rnn"], [2072, 0, 0, "-", "sparse"], [2072, 0, 0, "-", "utils"]], "torch.ao.nn.sparse": [[2072, 0, 0, "-", "quantized"]], "torch.ao.nn.sparse.quantized": [[2072, 0, 0, "-", "dynamic"], [2072, 0, 0, "-", "linear"], [2072, 0, 0, "-", "utils"]], "torch.ao.nn.sparse.quantized.dynamic": [[2072, 0, 0, "-", "linear"]], "torch.ao.ns": [[2092, 0, 0, "-", "_numeric_suite"], [2093, 0, 0, "-", "_numeric_suite_fx"], [2072, 0, 0, "-", "fx"]], "torch.ao.ns._numeric_suite": [[2092, 1, 1, "", "Logger"], [2092, 1, 1, "", "OutputLogger"], [2092, 1, 1, "", "Shadow"], [2092, 1, 1, "", "ShadowLogger"], [2092, 5, 1, "", "compare_model_outputs"], [2092, 5, 1, "", "compare_model_stub"], [2092, 5, 1, "", "compare_weights"], [2092, 5, 1, "", "get_logger_dict"], [2092, 5, 1, "", "get_matching_activations"], [2092, 5, 1, "", "prepare_model_outputs"], [2092, 5, 1, "", "prepare_model_with_stubs"]], "torch.ao.ns._numeric_suite.Logger": [[2092, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite.OutputLogger": [[2092, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite.Shadow": [[2092, 3, 1, "", "add"], [2092, 3, 1, "", "add_relu"], [2092, 3, 1, "", "add_scalar"], [2092, 3, 1, "", "cat"], [2092, 3, 1, "", "forward"], [2092, 3, 1, "", "mul"], [2092, 3, 1, "", "mul_scalar"]], "torch.ao.ns._numeric_suite.ShadowLogger": [[2092, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite_fx": [[2093, 1, 1, "", "NSTracer"], [2093, 1, 1, "", "OutputComparisonLogger"], [2093, 1, 1, "", "OutputLogger"], [2093, 5, 1, "", "add_loggers"], [2093, 5, 1, "", "add_shadow_loggers"], [2093, 5, 1, "", "convert_n_shadows_model"], [2093, 5, 1, "", "extend_logger_results_with_comparison"], [2093, 5, 1, "", "extract_logger_info"], [2093, 5, 1, "", "extract_results_n_shadows_model"], [2093, 5, 1, "", "extract_shadow_logger_info"], [2093, 5, 1, "", "extract_weights"], [2093, 5, 1, "", "loggers_set_enabled"], [2093, 5, 1, "", "loggers_set_save_activations"], [2093, 5, 1, "", "prepare_n_shadows_model"], [2093, 5, 1, "", "print_comparisons_n_shadows_model"]], "torch.ao.ns._numeric_suite_fx.NSTracer": [[2093, 3, 1, "", "is_leaf_module"]], "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger": [[2093, 3, 1, "", "forward"]], "torch.ao.ns._numeric_suite_fx.OutputLogger": [[2093, 3, 1, "", "forward"]], "torch.ao.ns.fx": [[2072, 0, 0, "-", "graph_matcher"], [2072, 0, 0, "-", "graph_passes"], [2072, 0, 0, "-", "mappings"], [2072, 0, 0, "-", "n_shadows_utils"], [2072, 0, 0, "-", "ns_types"], [2072, 0, 0, "-", "pattern_utils"], [2072, 0, 0, "-", "qconfig_multi_mapping"], [2072, 0, 0, "-", "utils"], [2072, 0, 0, "-", "weight_utils"]], "torch.ao.ns.fx.utils": [[2093, 5, 1, "", "compute_cosine_similarity"], [2093, 5, 1, "", "compute_normalized_l2_error"], [2093, 5, 1, "", "compute_sqnr"]], "torch.ao.pruning": [[2072, 0, 0, "-", "scheduler"], [2072, 0, 0, "-", "sparsifier"]], "torch.ao.pruning.scheduler": [[2072, 0, 0, "-", "base_scheduler"], [2072, 0, 0, "-", "cubic_scheduler"], [2072, 0, 0, "-", "lambda_scheduler"]], "torch.ao.pruning.sparsifier": [[2072, 0, 0, "-", "base_sparsifier"], [2072, 0, 0, "-", "nearly_diagonal_sparsifier"], [2072, 0, 0, "-", "utils"], [2072, 0, 0, "-", "weight_norm_sparsifier"]], "torch.ao.quantization": [[791, 1, 1, "", "DeQuantStub"], [792, 1, 1, "", "QuantStub"], [793, 1, 1, "", "QuantWrapper"], [794, 1, 1, "", "add_quant_dequant"], [2072, 0, 0, "-", "backend_config"], [800, 1, 1, "", "convert"], [801, 1, 1, "", "default_eval_fn"], [2072, 0, 0, "-", "fake_quantize"], [2072, 0, 0, "-", "fuse_modules"], [2072, 0, 0, "-", "fuser_method_mappings"], [2072, 0, 0, "-", "fx"], [2072, 0, 0, "-", "observer"], [841, 1, 1, "", "prepare"], [842, 1, 1, "", "prepare_qat"], [843, 1, 1, "", "propagate_qconfig_"], [2075, 0, 0, "-", "pt2e"], [2072, 0, 0, "-", "qconfig"], [2072, 0, 0, "-", "qconfig_mapping"], [2072, 0, 0, "-", "quant_type"], [2072, 0, 0, "-", "quantization_mappings"], [861, 1, 1, "", "quantize"], [862, 1, 1, "", "quantize_dynamic"], [2072, 0, 0, "-", "quantize_fx"], [2072, 0, 0, "-", "quantize_jit"], [2072, 0, 0, "-", "quantize_pt2e"], [867, 1, 1, "", "quantize_qat"], [2075, 0, 0, "-", "quantizer"], [2072, 0, 0, "-", "stubs"], [868, 1, 1, "", "swap_module"], [2072, 0, 0, "-", "utils"]], "torch.ao.quantization.backend_config": [[795, 1, 1, "", "BackendConfig"], [796, 1, 1, "", "BackendPatternConfig"], [797, 1, 1, "", "DTypeConfig"], [798, 1, 1, "", "DTypeWithConstraints"], [799, 1, 1, "", "ObservationType"], [2072, 0, 0, "-", "backend_config"], [2072, 0, 0, "-", "executorch"], [2072, 0, 0, "-", "fbgemm"], [2072, 0, 0, "-", "native"], [2072, 0, 0, "-", "observation_type"], [2072, 0, 0, "-", "onednn"], [2072, 0, 0, "-", "qnnpack"], [2072, 0, 0, "-", "tensorrt"], [2072, 0, 0, "-", "utils"], [2072, 0, 0, "-", "x86"]], "torch.ao.quantization.backend_config.BackendConfig": [[795, 4, 1, "", "configs"], [795, 3, 1, "", "from_dict"], [795, 3, 1, "", "set_backend_pattern_config"], [795, 3, 1, "", "set_backend_pattern_configs"], [795, 3, 1, "", "set_name"], [795, 3, 1, "", "to_dict"]], "torch.ao.quantization.backend_config.BackendPatternConfig": [[796, 3, 1, "", "add_dtype_config"], [796, 3, 1, "", "from_dict"], [796, 3, 1, "", "set_dtype_configs"], [796, 3, 1, "", "set_fused_module"], [796, 3, 1, "", "set_fuser_method"], [796, 3, 1, "", "set_observation_type"], [796, 3, 1, "", "set_pattern"], [796, 3, 1, "", "set_qat_module"], [796, 3, 1, "", "set_reference_quantized_module"], [796, 3, 1, "", "set_root_module"], [796, 3, 1, "", "to_dict"]], "torch.ao.quantization.backend_config.DTypeConfig": [[797, 3, 1, "", "from_dict"], [797, 3, 1, "", "to_dict"]], "torch.ao.quantization.backend_config.ObservationType": [[799, 2, 1, "", "INPUT_OUTPUT_NOT_OBSERVED"], [799, 2, 1, "", "OUTPUT_SHARE_OBSERVER_WITH_INPUT"], [799, 2, 1, "", "OUTPUT_USE_DIFFERENT_OBSERVER_AS_INPUT"]], "torch.ao.quantization.fake_quantize": [[802, 1, 1, "", "FakeQuantize"], [803, 1, 1, "", "FakeQuantizeBase"], [804, 1, 1, "", "FixedQParamsFakeQuantize"], [805, 1, 1, "", "FusedMovingAvgObsFakeQuantize"], [806, 2, 1, "", "default_fake_quant"], [807, 2, 1, "", "default_fused_act_fake_quant"], [808, 2, 1, "", "default_fused_per_channel_wt_fake_quant"], [809, 2, 1, "", "default_fused_wt_fake_quant"], [810, 2, 1, "", "default_histogram_fake_quant"], [811, 2, 1, "", "default_per_channel_weight_fake_quant"], [812, 2, 1, "", "default_weight_fake_quant"], [813, 1, 1, "", "disable_fake_quant"], [814, 1, 1, "", "disable_observer"], [815, 1, 1, "", "enable_fake_quant"], [816, 1, 1, "", "enable_observer"]], "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize": [[804, 3, 1, "", "extra_repr"]], "torch.ao.quantization.fuse_modules": [[817, 1, 1, "", "fuse_modules"]], "torch.ao.quantization.fx": [[2072, 0, 0, "-", "convert"], [2072, 0, 0, "-", "custom_config"], [2072, 0, 0, "-", "fuse"], [2072, 0, 0, "-", "fuse_handler"], [2072, 0, 0, "-", "graph_module"], [2072, 0, 0, "-", "lower_to_fbgemm"], [2072, 0, 0, "-", "lower_to_qnnpack"], [2072, 0, 0, "-", "lstm_utils"], [2072, 0, 0, "-", "match_utils"], [2072, 0, 0, "-", "pattern_utils"], [2072, 0, 0, "-", "prepare"], [2072, 0, 0, "-", "qconfig_mapping_utils"], [2072, 0, 0, "-", "quantize_handler"], [2072, 0, 0, "-", "tracer"], [2072, 0, 0, "-", "utils"]], "torch.ao.quantization.fx.custom_config": [[818, 1, 1, "", "ConvertCustomConfig"], [819, 1, 1, "", "FuseCustomConfig"], [820, 1, 1, "", "PrepareCustomConfig"], [821, 1, 1, "", "StandaloneModuleConfigEntry"]], "torch.ao.quantization.fx.custom_config.ConvertCustomConfig": [[818, 3, 1, "", "from_dict"], [818, 3, 1, "", "set_observed_to_quantized_mapping"], [818, 3, 1, "", "set_preserved_attributes"], [818, 3, 1, "", "to_dict"]], "torch.ao.quantization.fx.custom_config.FuseCustomConfig": [[819, 3, 1, "", "from_dict"], [819, 3, 1, "", "set_preserved_attributes"], [819, 3, 1, "", "to_dict"]], "torch.ao.quantization.fx.custom_config.PrepareCustomConfig": [[820, 3, 1, "", "from_dict"], [820, 3, 1, "", "set_float_to_observed_mapping"], [820, 3, 1, "", "set_input_quantized_indexes"], [820, 3, 1, "", "set_non_traceable_module_classes"], [820, 3, 1, "", "set_non_traceable_module_names"], [820, 3, 1, "", "set_output_quantized_indexes"], [820, 3, 1, "", "set_preserved_attributes"], [820, 3, 1, "", "set_standalone_module_class"], [820, 3, 1, "", "set_standalone_module_name"], [820, 3, 1, "", "to_dict"]], "torch.ao.quantization.observer": [[822, 1, 1, "", "HistogramObserver"], [823, 1, 1, "", "MinMaxObserver"], [824, 1, 1, "", "MovingAverageMinMaxObserver"], [825, 1, 1, "", "MovingAveragePerChannelMinMaxObserver"], [826, 1, 1, "", "NoopObserver"], [827, 1, 1, "", "ObserverBase"], [828, 1, 1, "", "PerChannelMinMaxObserver"], [829, 1, 1, "", "PlaceholderObserver"], [830, 1, 1, "", "RecordingObserver"], [831, 2, 1, "", "default_debug_observer"], [832, 2, 1, "", "default_dynamic_quant_observer"], [833, 2, 1, "", "default_float_qparams_observer"], [834, 2, 1, "", "default_histogram_observer"], [835, 2, 1, "", "default_observer"], [836, 2, 1, "", "default_per_channel_weight_observer"], [837, 2, 1, "", "default_placeholder_observer"], [838, 2, 1, "", "default_weight_observer"], [839, 1, 1, "", "get_observer_state_dict"], [840, 1, 1, "", "load_observer_state_dict"]], "torch.ao.quantization.observer.MinMaxObserver": [[823, 3, 1, "", "calculate_qparams"], [823, 3, 1, "", "forward"], [823, 3, 1, "", "reset_min_max_vals"]], "torch.ao.quantization.observer.ObserverBase": [[827, 3, 1, "", "with_args"], [827, 3, 1, "", "with_callable_args"]], "torch.ao.quantization.observer.PerChannelMinMaxObserver": [[828, 3, 1, "", "reset_min_max_vals"]], "torch.ao.quantization.pt2e": [[2072, 0, 0, "-", "duplicate_dq_pass"], [2072, 0, 0, "-", "export_utils"], [2075, 0, 0, "-", "generate_numeric_debug_handle"], [2072, 0, 0, "-", "graph_utils"], [2072, 0, 0, "-", "port_metadata_pass"], [2072, 0, 0, "-", "prepare"], [2072, 0, 0, "-", "qat_utils"], [2075, 0, 0, "-", "representation"], [2072, 0, 0, "-", "utils"]], "torch.ao.quantization.pt2e.export_utils": [[844, 1, 1, "", "model_is_exported"]], "torch.ao.quantization.pt2e.representation": [[2072, 0, 0, "-", "rewrite"]], "torch.ao.quantization.qconfig": [[845, 1, 1, "", "QConfig"], [846, 2, 1, "", "default_activation_only_qconfig"], [847, 2, 1, "", "default_debug_qconfig"], [848, 2, 1, "", "default_dynamic_qconfig"], [849, 2, 1, "", "default_per_channel_qconfig"], [850, 2, 1, "", "default_qat_qconfig"], [851, 2, 1, "", "default_qat_qconfig_v2"], [852, 2, 1, "", "default_qconfig"], [853, 2, 1, "", "default_weight_only_qconfig"], [854, 2, 1, "", "float16_dynamic_qconfig"], [855, 2, 1, "", "float16_static_qconfig"], [856, 2, 1, "", "float_qparams_weight_only_qconfig"], [857, 2, 1, "", "per_channel_dynamic_qconfig"]], "torch.ao.quantization.qconfig_mapping": [[858, 1, 1, "", "QConfigMapping"], [859, 1, 1, "", "get_default_qat_qconfig_mapping"], [860, 1, 1, "", "get_default_qconfig_mapping"]], "torch.ao.quantization.qconfig_mapping.QConfigMapping": [[858, 3, 1, "", "from_dict"], [858, 3, 1, "", "set_global"], [858, 3, 1, "", "set_module_name"], [858, 3, 1, "", "set_module_name_object_type_order"], [858, 3, 1, "", "set_module_name_regex"], [858, 3, 1, "", "set_object_type"], [858, 3, 1, "", "to_dict"]], "torch.ao.quantization.quantize_fx": [[863, 1, 1, "", "convert_fx"], [864, 1, 1, "", "fuse_fx"], [865, 1, 1, "", "prepare_fx"], [866, 1, 1, "", "prepare_qat_fx"]], "torch.ao.quantization.quantizer": [[2072, 0, 0, "-", "composable_quantizer"], [2072, 0, 0, "-", "embedding_quantizer"], [2072, 0, 0, "-", "quantizer"], [2072, 0, 0, "-", "utils"], [2072, 0, 0, "-", "x86_inductor_quantizer"], [2072, 0, 0, "-", "xnnpack_quantizer"], [2072, 0, 0, "-", "xnnpack_quantizer_utils"]], "torch.autograd": [[1, 1, 1, "", "Function"], [1, 0, 0, "-", "anomaly_mode"], [897, 5, 1, "", "backward"], [1, 1, 1, "", "detect_anomaly"], [1, 0, 0, "-", "forward_ad"], [1, 0, 0, "-", "function"], [1, 0, 0, "-", "functional"], [918, 5, 1, "", "grad"], [1, 0, 0, "-", "grad_mode"], [1, 0, 0, "-", "gradcheck"], [1, 0, 0, "-", "graph"], [1, 0, 0, "-", "profiler"], [1, 0, 0, "-", "profiler_legacy"], [1, 0, 0, "-", "profiler_util"], [1, 1, 1, "", "set_detect_anomaly"], [1, 0, 0, "-", "variable"]], "torch.autograd.Function": [[893, 3, 1, "", "backward"], [894, 3, 1, "", "forward"], [895, 3, 1, "", "jvp"], [896, 3, 1, "", "vmap"]], "torch.autograd.forward_ad": [[898, 1, 1, "", "UnpackedDualTensor"], [899, 1, 1, "", "dual_level"], [900, 5, 1, "", "enter_dual_level"], [901, 5, 1, "", "exit_dual_level"], [902, 5, 1, "", "make_dual"], [903, 5, 1, "", "unpack_dual"]], "torch.autograd.forward_ad.UnpackedDualTensor": [[898, 3, 1, "", "count"], [898, 3, 1, "", "index"], [898, 2, 1, "", "primal"], [898, 2, 1, "", "tangent"]], "torch.autograd.function": [[904, 1, 1, "", "BackwardCFunction"], [909, 1, 1, "", "InplaceFunction"], [910, 1, 1, "", "NestedIOFunction"], [911, 5, 1, "", "once_differentiable"]], "torch.autograd.function.BackwardCFunction": [[904, 3, 1, "", "apply"], [904, 3, 1, "", "apply_jvp"], [904, 3, 1, "", "mark_dirty"], [904, 3, 1, "", "mark_non_differentiable"], [904, 3, 1, "", "save_for_backward"], [904, 3, 1, "", "save_for_forward"], [904, 3, 1, "", "set_materialize_grads"]], "torch.autograd.function.FunctionCtx": [[905, 3, 1, "", "mark_dirty"], [906, 3, 1, "", "mark_non_differentiable"], [907, 3, 1, "", "save_for_backward"], [908, 3, 1, "", "set_materialize_grads"]], "torch.autograd.function.InplaceFunction": [[909, 3, 1, "", "backward"], [909, 3, 1, "", "forward"], [909, 3, 1, "", "jvp"], [909, 3, 1, "", "mark_dirty"], [909, 3, 1, "", "mark_non_differentiable"], [909, 3, 1, "", "save_for_backward"], [909, 3, 1, "", "save_for_forward"], [909, 3, 1, "", "set_materialize_grads"], [909, 3, 1, "", "setup_context"], [909, 3, 1, "", "vjp"], [909, 3, 1, "", "vmap"]], "torch.autograd.function.NestedIOFunction": [[910, 3, 1, "", "backward"], [910, 3, 1, "", "backward_extended"], [910, 3, 1, "", "forward"], [910, 3, 1, "", "forward_extended"], [910, 3, 1, "", "jvp"], [910, 3, 1, "", "mark_dirty"], [910, 3, 1, "", "mark_non_differentiable"], [910, 3, 1, "", "save_for_backward"], [910, 3, 1, "", "save_for_forward"], [910, 4, 1, "", "saved_tensors"], [910, 3, 1, "", "set_materialize_grads"], [910, 3, 1, "", "setup_context"], [910, 3, 1, "", "vjp"], [910, 3, 1, "", "vmap"]], "torch.autograd.functional": [[912, 5, 1, "", "hessian"], [913, 5, 1, "", "hvp"], [914, 5, 1, "", "jacobian"], [915, 5, 1, "", "jvp"], [916, 5, 1, "", "vhp"], [917, 5, 1, "", "vjp"]], "torch.autograd.grad_mode": [[919, 1, 1, "", "inference_mode"], [920, 1, 1, "", "set_grad_enabled"], [921, 1, 1, "", "set_multithreading_enabled"]], "torch.autograd.grad_mode.inference_mode": [[919, 3, 1, "", "clone"]], "torch.autograd.grad_mode.set_grad_enabled": [[920, 3, 1, "", "clone"]], "torch.autograd.grad_mode.set_multithreading_enabled": [[921, 3, 1, "", "clone"]], "torch.autograd.gradcheck": [[922, 6, 1, "", "GradcheckError"], [923, 5, 1, "", "gradcheck"], [924, 5, 1, "", "gradgradcheck"]], "torch.autograd.graph": [[1, 1, 1, "", "GradientEdge"], [1, 1, 1, "", "allow_mutation_on_saved_tensors"], [1, 1, 1, "", "disable_saved_tensors_hooks"], [1, 5, 1, "", "get_gradient_edge"], [930, 5, 1, "", "increment_version"], [1, 1, 1, "", "register_multi_grad_hook"], [1, 1, 1, "", "save_on_cpu"], [1, 1, 1, "", "saved_tensors_hooks"]], "torch.autograd.graph.Node": [[925, 3, 1, "", "metadata"], [926, 3, 1, "", "name"], [927, 4, 1, "", "next_functions"], [928, 3, 1, "", "register_hook"], [929, 3, 1, "", "register_prehook"]], "torch.autograd.profiler": [[931, 1, 1, "", "EnforceUnique"], [932, 1, 1, "", "KinetoStepTracker"], [1, 1, 1, "", "emit_itt"], [1, 1, 1, "", "emit_nvtx"], [933, 5, 1, "", "load_nvprof"], [934, 5, 1, "", "parse_nvprof_trace"], [1, 1, 1, "", "profile"], [939, 1, 1, "", "record_function"]], "torch.autograd.profiler.EnforceUnique": [[931, 3, 1, "", "see"]], "torch.autograd.profiler.KinetoStepTracker": [[932, 3, 1, "", "current_step"], [932, 3, 1, "", "erase_step_count"], [932, 3, 1, "", "increment_step"], [932, 3, 1, "", "init_step_count"]], "torch.autograd.profiler.profile": [[935, 3, 1, "", "export_chrome_trace"], [936, 3, 1, "", "key_averages"], [937, 4, 1, "", "self_cpu_time_total"], [938, 3, 1, "", "total_average"]], "torch.autograd.profiler_util": [[940, 1, 1, "", "Interval"], [941, 1, 1, "", "Kernel"], [942, 1, 1, "", "MemRecordsAcc"], [943, 1, 1, "", "StringTable"]], "torch.autograd.profiler_util.Interval": [[940, 3, 1, "", "elapsed_us"]], "torch.autograd.profiler_util.Kernel": [[941, 3, 1, "", "count"], [941, 2, 1, "", "device"], [941, 2, 1, "", "duration"], [941, 3, 1, "", "index"], [941, 2, 1, "", "name"]], "torch.autograd.profiler_util.MemRecordsAcc": [[942, 3, 1, "", "in_interval"]], "torch.autograd.profiler_util.StringTable": [[943, 3, 1, "", "clear"], [943, 3, 1, "", "copy"], [943, 2, 1, "", "default_factory"], [943, 3, 1, "", "fromkeys"], [943, 3, 1, "", "get"], [943, 3, 1, "", "items"], [943, 3, 1, "", "keys"], [943, 3, 1, "", "pop"], [943, 3, 1, "", "popitem"], [943, 3, 1, "", "setdefault"], [943, 3, 1, "", "update"], [943, 3, 1, "", "values"]], "torch.backends": [[2, 0, 0, "-", "cpu"], [2, 0, 0, "-", "cuda"], [2, 0, 0, "-", "cudnn"], [2, 0, 0, "-", "mha"], [2, 0, 0, "-", "mkl"], [2, 0, 0, "-", "mkldnn"], [2, 0, 0, "-", "mps"], [2, 0, 0, "-", "nnpack"], [2, 0, 0, "-", "openmp"], [2, 0, 0, "-", "opt_einsum"], [2, 0, 0, "-", "quantized"], [2, 0, 0, "-", "xeon"], [2, 0, 0, "-", "xnnpack"]], "torch.backends.cpu": [[2, 5, 1, "", "get_cpu_capability"]], "torch.backends.cuda": [[2, 1, 1, "", "SDPAParams"], [2, 5, 1, "", "can_use_efficient_attention"], [2, 5, 1, "", "can_use_flash_attention"], [2, 5, 1, "", "cudnn_sdp_enabled"], [2, 2, 1, "", "cufft_plan_cache"], [2, 5, 1, "", "enable_cudnn_sdp"], [2, 5, 1, "", "enable_flash_sdp"], [2, 5, 1, "", "enable_math_sdp"], [2, 5, 1, "", "enable_mem_efficient_sdp"], [2, 5, 1, "", "flash_sdp_enabled"], [2, 5, 1, "", "is_built"], [2, 5, 1, "", "math_sdp_enabled"], [2, 5, 1, "", "mem_efficient_sdp_enabled"], [2, 5, 1, "", "preferred_blas_library"], [2, 5, 1, "", "preferred_linalg_library"], [2, 5, 1, "", "sdp_kernel"]], "torch.backends.cuda.cufft_plan_cache": [[2, 3, 1, "", "clear"], [2, 2, 1, "", "max_size"], [2, 2, 1, "", "size"]], "torch.backends.cuda.matmul": [[2, 2, 1, "", "allow_bf16_reduced_precision_reduction"], [2, 2, 1, "", "allow_fp16_reduced_precision_reduction"], [2, 2, 1, "", "allow_tf32"]], "torch.backends.cudnn": [[2, 2, 1, "", "allow_tf32"], [2, 2, 1, "", "benchmark"], [2, 2, 1, "", "benchmark_limit"], [2, 2, 1, "", "deterministic"], [2, 2, 1, "", "enabled"], [2, 5, 1, "", "is_available"], [2, 0, 0, "-", "rnn"], [2, 5, 1, "", "version"]], "torch.backends.mha": [[2, 5, 1, "", "get_fastpath_enabled"], [2, 5, 1, "", "set_fastpath_enabled"]], "torch.backends.mkl": [[2, 5, 1, "", "is_available"], [2, 1, 1, "", "verbose"]], "torch.backends.mkldnn": [[2, 5, 1, "", "is_available"], [2, 1, 1, "", "verbose"]], "torch.backends.mps": [[2, 5, 1, "", "is_available"], [2, 5, 1, "", "is_built"]], "torch.backends.nnpack": [[2, 5, 1, "", "flags"], [2, 5, 1, "", "is_available"], [2, 5, 1, "", "set_flags"]], "torch.backends.openmp": [[2, 5, 1, "", "is_available"]], "torch.backends.opt_einsum": [[2, 2, 1, "", "enabled"], [2, 5, 1, "", "get_opt_einsum"], [2, 5, 1, "", "is_available"], [2, 2, 1, "", "strategy"]], "torch.backends.xeon": [[2, 0, 0, "-", "run_cpu"]], "torch.compiler": [[978, 5, 1, "", "allow_in_graph"], [979, 5, 1, "", "assume_constant_result"], [980, 5, 1, "", "compile"], [981, 5, 1, "", "cudagraph_mark_step_begin"], [982, 5, 1, "", "disable"], [983, 5, 1, "", "is_compiling"], [984, 5, 1, "", "is_dynamo_compiling"], [985, 5, 1, "", "list_backends"], [986, 5, 1, "", "reset"]], "torch.cpu": [[999, 1, 1, "", "Stream"], [1000, 1, 1, "", "StreamContext"], [0, 0, 0, "-", "amp"], [1001, 5, 1, "", "current_device"], [1002, 5, 1, "", "current_stream"], [1003, 5, 1, "", "device_count"], [1004, 5, 1, "", "is_available"], [1005, 5, 1, "", "set_device"], [1006, 5, 1, "", "stream"], [1007, 5, 1, "", "synchronize"]], "torch.cpu.amp": [[0, 1, 1, "", "autocast"], [0, 0, 0, "-", "autocast_mode"], [0, 0, 0, "-", "grad_scaler"]], "torch.cuda": [[1009, 1, 1, "", "CUDAGraph"], [1010, 1, 1, "", "CUDAPluggableAllocator"], [1011, 1, 1, "", "Event"], [1012, 1, 1, "", "ExternalStream"], [1013, 6, 1, "", "OutOfMemoryError"], [1014, 1, 1, "", "Stream"], [1015, 1, 1, "", "StreamContext"], [18, 0, 0, "-", "_sanitizer"], [0, 0, 0, "-", "amp"], [1016, 5, 1, "", "caching_allocator_alloc"], [1017, 5, 1, "", "caching_allocator_delete"], [1018, 5, 1, "", "can_device_access_peer"], [1019, 5, 1, "", "change_current_allocator"], [1020, 5, 1, "", "clock_rate"], [17, 0, 0, "-", "comm"], [1026, 5, 1, "", "current_blas_handle"], [1027, 5, 1, "", "current_device"], [1028, 5, 1, "", "current_stream"], [1029, 5, 1, "", "default_stream"], [1030, 1, 1, "", "device"], [1031, 5, 1, "", "device_count"], [1032, 1, 1, "", "device_of"], [1033, 5, 1, "", "empty_cache"], [17, 0, 0, "-", "error"], [1034, 5, 1, "", "get_allocator_backend"], [1035, 5, 1, "", "get_arch_list"], [1036, 5, 1, "", "get_device_capability"], [1037, 5, 1, "", "get_device_name"], [1038, 5, 1, "", "get_device_properties"], [1039, 5, 1, "", "get_gencode_flags"], [1040, 5, 1, "", "get_rng_state"], [1041, 5, 1, "", "get_rng_state_all"], [1042, 5, 1, "", "get_sync_debug_mode"], [1043, 1, 1, "", "graph"], [1044, 5, 1, "", "graph_pool_handle"], [17, 0, 0, "-", "graphs"], [1045, 5, 1, "", "init"], [1046, 5, 1, "", "initial_seed"], [1047, 5, 1, "", "ipc_collect"], [1048, 5, 1, "", "is_available"], [1049, 5, 1, "", "is_current_stream_capturing"], [1050, 5, 1, "", "is_initialized"], [17, 0, 0, "-", "jiterator"], [1053, 5, 1, "", "list_gpu_processes"], [1054, 5, 1, "", "make_graphed_callables"], [1055, 5, 1, "", "manual_seed"], [1056, 5, 1, "", "manual_seed_all"], [1057, 5, 1, "", "max_memory_allocated"], [1058, 5, 1, "", "max_memory_cached"], [1059, 5, 1, "", "max_memory_reserved"], [1060, 5, 1, "", "mem_get_info"], [17, 0, 0, "-", "memory"], [1061, 5, 1, "", "memory_allocated"], [1062, 5, 1, "", "memory_cached"], [1063, 5, 1, "", "memory_reserved"], [1064, 5, 1, "", "memory_snapshot"], [1065, 5, 1, "", "memory_stats"], [1066, 5, 1, "", "memory_summary"], [1067, 5, 1, "", "memory_usage"], [17, 0, 0, "-", "nccl"], [17, 0, 0, "-", "nvtx"], [1072, 5, 1, "", "power_draw"], [17, 0, 0, "-", "profiler"], [17, 0, 0, "-", "random"], [1073, 5, 1, "", "reset_max_memory_allocated"], [1074, 5, 1, "", "reset_max_memory_cached"], [1075, 5, 1, "", "reset_peak_memory_stats"], [1076, 5, 1, "", "seed"], [1077, 5, 1, "", "seed_all"], [1078, 5, 1, "", "set_device"], [1079, 5, 1, "", "set_per_process_memory_fraction"], [1080, 5, 1, "", "set_rng_state"], [1081, 5, 1, "", "set_rng_state_all"], [1082, 5, 1, "", "set_stream"], [1083, 5, 1, "", "set_sync_debug_mode"], [17, 0, 0, "-", "sparse"], [1084, 5, 1, "", "stream"], [17, 0, 0, "-", "streams"], [1085, 5, 1, "", "synchronize"], [1086, 5, 1, "", "temperature"], [19, 0, 0, "-", "tunable"], [1087, 5, 1, "", "utilization"]], "torch.cuda.CUDAGraph": [[1009, 3, 1, "", "capture_begin"], [1009, 3, 1, "", "capture_end"], [1009, 3, 1, "", "debug_dump"], [1009, 3, 1, "", "enable_debug_mode"], [1009, 3, 1, "", "pool"], [1009, 3, 1, "", "replay"], [1009, 3, 1, "", "reset"]], "torch.cuda.Event": [[1011, 3, 1, "", "elapsed_time"], [1011, 3, 1, "", "from_ipc_handle"], [1011, 3, 1, "", "ipc_handle"], [1011, 3, 1, "", "query"], [1011, 3, 1, "", "record"], [1011, 3, 1, "", "synchronize"], [1011, 3, 1, "", "wait"]], "torch.cuda.ExternalStream": [[1012, 3, 1, "", "query"], [1012, 3, 1, "", "record_event"], [1012, 3, 1, "", "synchronize"], [1012, 3, 1, "", "wait_event"], [1012, 3, 1, "", "wait_stream"]], "torch.cuda.Stream": [[1014, 3, 1, "", "query"], [1014, 3, 1, "", "record_event"], [1014, 3, 1, "", "synchronize"], [1014, 3, 1, "", "wait_event"], [1014, 3, 1, "", "wait_stream"]], "torch.cuda._sanitizer": [[18, 5, 1, "", "enable_cuda_sanitizer"]], "torch.cuda.amp": [[0, 1, 1, "", "GradScaler"], [0, 1, 1, "", "autocast"], [0, 0, 0, "-", "autocast_mode"], [0, 0, 0, "-", "common"], [0, 5, 1, "", "custom_bwd"], [0, 5, 1, "", "custom_fwd"], [0, 0, 0, "-", "grad_scaler"]], "torch.cuda.comm": [[1021, 5, 1, "", "broadcast"], [1022, 5, 1, "", "broadcast_coalesced"], [1023, 5, 1, "", "gather"], [1024, 5, 1, "", "reduce_add"], [1025, 5, 1, "", "scatter"]], "torch.cuda.jiterator": [[1051, 5, 1, "", "_create_jit_fn"], [1052, 5, 1, "", "_create_multi_output_jit_fn"]], "torch.cuda.memory": [[2115, 5, 1, "", "_dump_snapshot"], [2115, 5, 1, "", "_record_memory_history"], [2115, 5, 1, "", "_snapshot"]], "torch.cuda.nvtx": [[1068, 5, 1, "", "mark"], [1069, 5, 1, "", "range"], [1070, 5, 1, "", "range_pop"], [1071, 5, 1, "", "range_push"]], "torch.cuda.tunable": [[19, 5, 1, "", "enable"], [19, 5, 1, "", "get_filename"], [19, 5, 1, "", "get_max_tuning_duration"], [19, 5, 1, "", "get_max_tuning_iterations"], [19, 5, 1, "", "get_results"], [19, 5, 1, "", "get_validators"], [19, 5, 1, "", "is_enabled"], [19, 5, 1, "", "read_file"], [19, 5, 1, "", "set_filename"], [19, 5, 1, "", "set_max_tuning_duration"], [19, 5, 1, "", "set_max_tuning_iterations"], [19, 5, 1, "", "tuning_enable"], [19, 5, 1, "", "tuning_is_enabled"], [19, 5, 1, "", "write_file"], [19, 5, 1, "", "write_file_on_exit"]], "torch.distributed": [[28, 1, 1, "", "Backend"], [28, 1, 1, "", "DistBackendError"], [28, 1, 1, "", "DistError"], [28, 1, 1, "", "DistNetworkError"], [28, 1, 1, "", "DistStoreError"], [28, 1, 1, "", "FileStore"], [24, 1, 1, "", "GradBucket"], [28, 1, 1, "", "HashStore"], [28, 1, 1, "", "P2POp"], [28, 1, 1, "", "PrefixStore"], [28, 1, 1, "", "ReduceOp"], [28, 1, 1, "", "Store"], [28, 1, 1, "", "TCPStore"], [28, 1, 1, "", "Work"], [28, 0, 0, "-", "algorithms"], [28, 5, 1, "", "all_gather"], [28, 5, 1, "", "all_gather_into_tensor"], [28, 5, 1, "", "all_gather_object"], [28, 5, 1, "", "all_reduce"], [28, 5, 1, "", "all_to_all"], [28, 5, 1, "", "all_to_all_single"], [28, 0, 0, "-", "argparse_util"], [2077, 0, 0, "-", "autograd"], [28, 5, 1, "", "barrier"], [28, 5, 1, "", "batch_isend_irecv"], [28, 5, 1, "", "breakpoint"], [28, 5, 1, "", "broadcast"], [28, 5, 1, "", "broadcast_object_list"], [28, 0, 0, "-", "c10d_logger"], [30, 0, 0, "-", "checkpoint"], [28, 0, 0, "-", "collective_utils"], [28, 0, 0, "-", "constants"], [28, 0, 0, "-", "device_mesh"], [28, 0, 0, "-", "distributed_c10d"], [28, 0, 0, "-", "elastic"], [55, 0, 0, "-", "fsdp"], [28, 5, 1, "", "gather"], [28, 5, 1, "", "gather_object"], [28, 5, 1, "", "get_backend"], [28, 5, 1, "", "get_global_rank"], [28, 5, 1, "", "get_group_rank"], [28, 5, 1, "", "get_process_group_ranks"], [28, 5, 1, "", "get_rank"], [28, 5, 1, "", "get_world_size"], [28, 5, 1, "", "init_process_group"], [28, 5, 1, "", "irecv"], [28, 5, 1, "", "is_available"], [28, 5, 1, "", "is_gloo_available"], [28, 5, 1, "", "is_initialized"], [28, 5, 1, "", "is_mpi_available"], [28, 5, 1, "", "is_nccl_available"], [28, 5, 1, "", "is_torchelastic_launched"], [28, 5, 1, "", "isend"], [28, 0, 0, "-", "launch"], [28, 0, 0, "-", "launcher"], [28, 0, 0, "-", "logging_handlers"], [28, 5, 1, "", "monitored_barrier"], [28, 5, 1, "", "new_group"], [28, 0, 0, "-", "nn"], [32, 0, 0, "-", "optim"], [33, 0, 0, "-", "pipelining"], [28, 5, 1, "", "recv"], [28, 5, 1, "", "recv_object_list"], [28, 5, 1, "", "reduce"], [28, 1, 1, "", "reduce_op"], [28, 5, 1, "", "reduce_scatter"], [28, 5, 1, "", "reduce_scatter_tensor"], [28, 0, 0, "-", "remote_device"], [28, 0, 0, "-", "rendezvous"], [2077, 0, 0, "-", "rpc"], [48, 0, 0, "-", "run"], [28, 5, 1, "", "scatter"], [28, 5, 1, "", "scatter_object_list"], [28, 5, 1, "", "send"], [28, 5, 1, "", "send_object_list"], [28, 0, 0, "-", "tensor"], [28, 0, 0, "-", "utils"]], "torch.distributed.Backend": [[28, 3, 1, "", "register_backend"]], "torch.distributed.GradBucket": [[24, 5, 1, "", "buffer"], [24, 5, 1, "", "gradients"], [24, 5, 1, "", "index"], [24, 5, 1, "", "is_last"], [24, 5, 1, "", "parameters"], [24, 5, 1, "", "set_buffer"]], "torch.distributed.Store": [[28, 5, 1, "", "add"], [28, 5, 1, "", "compare_set"], [28, 5, 1, "", "delete_key"], [28, 5, 1, "", "get"], [28, 5, 1, "", "num_keys"], [28, 5, 1, "", "set"], [28, 5, 1, "", "set_timeout"], [28, 5, 1, "", "wait"]], "torch.distributed.algorithms": [[29, 1, 1, "", "Join"], [29, 1, 1, "", "JoinHook"], [29, 1, 1, "", "Joinable"], [28, 0, 0, "-", "ddp_comm_hooks"], [28, 0, 0, "-", "join"], [28, 0, 0, "-", "model_averaging"]], "torch.distributed.algorithms.Join": [[29, 3, 1, "", "notify_join_context"]], "torch.distributed.algorithms.JoinHook": [[29, 3, 1, "", "main_hook"], [29, 3, 1, "", "post_hook"]], "torch.distributed.algorithms.Joinable": [[29, 4, 1, "", "join_device"], [29, 3, 1, "", "join_hook"], [29, 4, 1, "", "join_process_group"]], "torch.distributed.algorithms.ddp_comm_hooks": [[28, 0, 0, "-", "ddp_zero_hook"], [28, 0, 0, "-", "debugging_hooks"], [28, 0, 0, "-", "default_hooks"], [28, 0, 0, "-", "mixed_precision_hooks"], [28, 0, 0, "-", "optimizer_overlap_hooks"], [28, 0, 0, "-", "post_localSGD_hook"], [28, 0, 0, "-", "powerSGD_hook"], [28, 0, 0, "-", "quantization_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks": [[24, 5, 1, "", "noop_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.default_hooks": [[24, 5, 1, "", "allreduce_hook"], [24, 5, 1, "", "bf16_compress_hook"], [24, 5, 1, "", "bf16_compress_wrapper"], [24, 5, 1, "", "fp16_compress_hook"], [24, 5, 1, "", "fp16_compress_wrapper"]], "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook": [[24, 1, 1, "", "PowerSGDState"], [24, 5, 1, "", "batched_powerSGD_hook"], [24, 5, 1, "", "powerSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState": [[24, 3, 1, "", "__getstate__"], [24, 3, 1, "", "__setstate__"]], "torch.distributed.algorithms.model_averaging": [[28, 0, 0, "-", "averagers"], [28, 0, 0, "-", "hierarchical_model_averager"], [28, 0, 0, "-", "utils"]], "torch.distributed.autograd": [[2077, 5, 1, "", "backward"], [2077, 1, 1, "", "context"], [2077, 5, 1, "", "get_gradients"]], "torch.distributed.checkpoint": [[30, 1, 1, "", "DefaultLoadPlanner"], [30, 1, 1, "", "DefaultSavePlanner"], [30, 1, 1, "", "FileSystemReader"], [30, 1, 1, "", "FileSystemWriter"], [30, 1, 1, "", "LoadPlan"], [30, 1, 1, "", "LoadPlanner"], [30, 1, 1, "", "ReadItem"], [30, 1, 1, "", "SavePlan"], [30, 1, 1, "", "SavePlanner"], [30, 1, 1, "", "StorageReader"], [30, 1, 1, "", "StorageWriter"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "default_planner"], [28, 0, 0, "-", "filesystem"], [30, 0, 0, "-", "format_utils"], [30, 0, 0, "-", "logger"], [30, 0, 0, "-", "logging_handlers"], [28, 0, 0, "-", "metadata"], [28, 0, 0, "-", "optimizer"], [28, 0, 0, "-", "planner"], [28, 0, 0, "-", "planner_helpers"], [28, 0, 0, "-", "resharding"], [30, 0, 0, "-", "staging"], [28, 0, 0, "-", "state_dict"], [28, 0, 0, "-", "state_dict_loader"], [28, 0, 0, "-", "state_dict_saver"], [28, 0, 0, "-", "stateful"], [28, 0, 0, "-", "storage"], [28, 0, 0, "-", "utils"]], "torch.distributed.checkpoint.DefaultLoadPlanner": [[30, 3, 1, "", "lookup_tensor"], [30, 3, 1, "", "transform_tensor"]], "torch.distributed.checkpoint.DefaultSavePlanner": [[30, 3, 1, "", "lookup_object"], [30, 3, 1, "", "transform_object"]], "torch.distributed.checkpoint.FileSystemReader": [[30, 4, 1, "", "checkpoint_id"]], "torch.distributed.checkpoint.FileSystemWriter": [[30, 3, 1, "", "stage"]], "torch.distributed.checkpoint.LoadPlanner": [[30, 3, 1, "", "commit_tensor"], [30, 3, 1, "", "create_global_plan"], [30, 3, 1, "", "create_local_plan"], [30, 3, 1, "", "finish_plan"], [30, 3, 1, "", "load_bytes"], [30, 3, 1, "", "resolve_bytes"], [30, 3, 1, "", "resolve_tensor"], [30, 3, 1, "", "set_up_planner"]], "torch.distributed.checkpoint.SavePlanner": [[30, 3, 1, "", "create_global_plan"], [30, 3, 1, "", "create_local_plan"], [30, 3, 1, "", "finish_plan"], [30, 3, 1, "", "resolve_data"], [30, 3, 1, "", "set_up_planner"]], "torch.distributed.checkpoint.StorageReader": [[30, 3, 1, "", "prepare_global_plan"], [30, 3, 1, "", "prepare_local_plan"], [30, 3, 1, "", "read_data"], [30, 3, 1, "", "read_metadata"], [30, 3, 1, "", "reset"], [30, 3, 1, "", "set_up_storage_reader"], [30, 3, 1, "", "validate_checkpoint_id"]], "torch.distributed.checkpoint.StorageWriter": [[30, 3, 1, "", "finish"], [30, 3, 1, "", "prepare_global_plan"], [30, 3, 1, "", "prepare_local_plan"], [30, 3, 1, "", "reset"], [30, 3, 1, "", "set_up_storage_writer"], [30, 3, 1, "", "storage_meta"], [30, 3, 1, "", "validate_checkpoint_id"], [30, 3, 1, "", "write_data"]], "torch.distributed.checkpoint.format_utils": [[30, 1, 1, "", "BroadcastingTorchSaveReader"], [30, 1, 1, "", "DynamicMetaLoadPlanner"], [30, 5, 1, "", "dcp_to_torch_save"], [30, 5, 1, "", "torch_save_to_dcp"]], "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader": [[30, 3, 1, "", "prepare_global_plan"], [30, 3, 1, "", "prepare_local_plan"], [30, 3, 1, "", "read_data"], [30, 3, 1, "", "read_metadata"], [30, 3, 1, "", "reset"], [30, 3, 1, "", "set_up_storage_reader"], [30, 3, 1, "", "validate_checkpoint_id"]], "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner": [[30, 3, 1, "", "set_up_planner"]], "torch.distributed.checkpoint.planner": [[30, 1, 1, "", "WriteItem"]], "torch.distributed.checkpoint.planner.WriteItem": [[30, 3, 1, "", "tensor_storage_size"]], "torch.distributed.checkpoint.staging": [[30, 1, 1, "", "AsyncStager"], [30, 1, 1, "", "BlockingAsyncStager"]], "torch.distributed.checkpoint.staging.AsyncStager": [[30, 4, 1, "", "should_synchronize_after_execute"], [30, 3, 1, "", "stage"], [30, 3, 1, "", "synchronize_staging"]], "torch.distributed.checkpoint.staging.BlockingAsyncStager": [[30, 3, 1, "", "stage"], [30, 3, 1, "", "synchronize_staging"]], "torch.distributed.checkpoint.state_dict": [[30, 1, 1, "", "StateDictOptions"], [30, 5, 1, "", "get_model_state_dict"], [30, 5, 1, "", "get_optimizer_state_dict"], [30, 5, 1, "", "get_state_dict"], [30, 5, 1, "", "set_model_state_dict"], [30, 5, 1, "", "set_optimizer_state_dict"], [30, 5, 1, "", "set_state_dict"]], "torch.distributed.checkpoint.state_dict_loader": [[30, 5, 1, "", "load"], [30, 5, 1, "", "load_state_dict"]], "torch.distributed.checkpoint.state_dict_saver": [[30, 5, 1, "", "async_save"], [30, 5, 1, "", "save"], [30, 5, 1, "", "save_state_dict"]], "torch.distributed.checkpoint.stateful": [[30, 1, 1, "", "Stateful"]], "torch.distributed.checkpoint.stateful.Stateful": [[30, 3, 1, "", "load_state_dict"], [30, 3, 1, "", "state_dict"]], "torch.distributed.device_mesh": [[28, 1, 1, "", "DeviceMesh"], [28, 5, 1, "", "init_device_mesh"]], "torch.distributed.elastic": [[37, 0, 0, "-", "agent"], [38, 0, 0, "-", "control_plane"], [41, 0, 0, "-", "events"], [44, 0, 0, "-", "metrics"], [45, 0, 0, "-", "multiprocessing"], [47, 0, 0, "-", "rendezvous"], [50, 0, 0, "-", "timer"], [28, 0, 0, "-", "utils"]], "torch.distributed.elastic.agent": [[37, 0, 0, "-", "server"]], "torch.distributed.elastic.agent.server": [[37, 1, 1, "", "ElasticAgent"], [37, 1, 1, "", "SimpleElasticAgent"], [37, 1, 1, "", "Worker"], [37, 1, 1, "", "WorkerGroup"], [37, 1, 1, "", "WorkerSpec"], [37, 1, 1, "", "WorkerState"], [28, 0, 0, "-", "api"], [37, 0, 0, "-", "health_check_server"], [28, 0, 0, "-", "local_elastic_agent"]], "torch.distributed.elastic.agent.server.ElasticAgent": [[37, 3, 1, "", "get_worker_group"], [37, 3, 1, "", "run"]], "torch.distributed.elastic.agent.server.SimpleElasticAgent": [[37, 3, 1, "", "_assign_worker_ranks"], [37, 3, 1, "", "_exit_barrier"], [37, 3, 1, "", "_initialize_workers"], [37, 3, 1, "", "_monitor_workers"], [37, 3, 1, "", "_rendezvous"], [37, 3, 1, "", "_restart_workers"], [37, 3, 1, "", "_shutdown"], [37, 3, 1, "", "_start_workers"], [37, 3, 1, "", "_stop_workers"]], "torch.distributed.elastic.agent.server.WorkerSpec": [[37, 3, 1, "", "get_entrypoint_name"]], "torch.distributed.elastic.agent.server.WorkerState": [[37, 3, 1, "", "is_running"]], "torch.distributed.elastic.agent.server.api": [[37, 1, 1, "", "RunResult"]], "torch.distributed.elastic.agent.server.health_check_server": [[37, 1, 1, "", "HealthCheckServer"], [37, 5, 1, "", "create_healthcheck_server"]], "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer": [[37, 3, 1, "", "start"], [37, 3, 1, "", "stop"]], "torch.distributed.elastic.agent.server.local_elastic_agent": [[37, 1, 1, "", "LocalElasticAgent"]], "torch.distributed.elastic.control_plane": [[38, 5, 1, "", "worker_main"]], "torch.distributed.elastic.events": [[28, 0, 0, "-", "api"], [41, 5, 1, "", "get_logging_handler"], [28, 0, 0, "-", "handlers"], [41, 5, 1, "", "record"]], "torch.distributed.elastic.events.api": [[41, 1, 1, "", "Event"], [41, 2, 1, "", "EventMetadataValue"], [41, 1, 1, "", "EventSource"]], "torch.distributed.elastic.metrics": [[28, 0, 0, "-", "api"], [44, 5, 1, "", "configure"], [44, 5, 1, "", "prof"], [44, 5, 1, "", "put_metric"]], "torch.distributed.elastic.metrics.api": [[44, 1, 1, "", "ConsoleMetricHandler"], [44, 1, 1, "", "MetricHandler"], [44, 1, 1, "", "NullMetricHandler"]], "torch.distributed.elastic.multiprocessing": [[28, 0, 0, "-", "api"], [40, 0, 0, "-", "errors"], [28, 0, 0, "-", "redirects"], [45, 5, 1, "", "start_processes"], [49, 0, 0, "-", "subprocess_handler"], [28, 0, 0, "-", "tail_log"]], "torch.distributed.elastic.multiprocessing.api": [[45, 1, 1, "", "DefaultLogsSpecs"], [45, 1, 1, "", "LogsDest"], [45, 1, 1, "", "LogsSpecs"], [45, 1, 1, "", "MultiprocessContext"], [45, 1, 1, "", "PContext"], [45, 1, 1, "", "RunProcsResult"], [45, 1, 1, "", "SubprocessContext"]], "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs": [[45, 3, 1, "", "reify"]], "torch.distributed.elastic.multiprocessing.api.LogsSpecs": [[45, 3, 1, "", "reify"]], "torch.distributed.elastic.multiprocessing.errors": [[40, 1, 1, "", "ChildFailedError"], [40, 1, 1, "", "ErrorHandler"], [40, 1, 1, "", "ProcessFailure"], [28, 0, 0, "-", "error_handler"], [28, 0, 0, "-", "handlers"], [40, 5, 1, "", "record"]], "torch.distributed.elastic.multiprocessing.subprocess_handler": [[49, 0, 0, "-", "handlers"], [49, 0, 0, "-", "subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers": [[49, 5, 1, "", "get_subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler": [[49, 1, 1, "", "SubprocessHandler"]], "torch.distributed.elastic.rendezvous": [[47, 1, 1, "", "RendezvousHandler"], [47, 1, 1, "", "RendezvousHandlerRegistry"], [47, 1, 1, "", "RendezvousInfo"], [47, 1, 1, "", "RendezvousParameters"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "c10d_rendezvous_backend"], [28, 0, 0, "-", "dynamic_rendezvous"], [28, 0, 0, "-", "etcd_rendezvous"], [28, 0, 0, "-", "etcd_rendezvous_backend"], [28, 0, 0, "-", "etcd_server"], [28, 0, 0, "-", "etcd_store"], [47, 0, 0, "-", "registry"], [28, 0, 0, "-", "static_tcp_rendezvous"], [28, 0, 0, "-", "utils"]], "torch.distributed.elastic.rendezvous.RendezvousHandler": [[47, 3, 1, "", "get_backend"], [47, 3, 1, "", "get_run_id"], [47, 3, 1, "", "is_closed"], [47, 3, 1, "", "next_rendezvous"], [47, 3, 1, "", "num_nodes_waiting"], [47, 3, 1, "", "set_closed"], [47, 3, 1, "", "shutdown"], [47, 4, 1, "", "use_agent_store"]], "torch.distributed.elastic.rendezvous.RendezvousParameters": [[47, 3, 1, "", "get"], [47, 3, 1, "", "get_as_bool"], [47, 3, 1, "", "get_as_int"]], "torch.distributed.elastic.rendezvous.api": [[47, 1, 1, "", "RendezvousClosedError"], [47, 1, 1, "", "RendezvousConnectionError"], [47, 1, 1, "", "RendezvousError"], [47, 1, 1, "", "RendezvousGracefulExitError"], [47, 1, 1, "", "RendezvousStateError"], [47, 1, 1, "", "RendezvousStoreInfo"], [47, 1, 1, "", "RendezvousTimeoutError"]], "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo": [[47, 3, 1, "", "build"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend": [[47, 1, 1, "", "C10dRendezvousBackend"], [47, 5, 1, "", "create_backend"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend": [[47, 3, 1, "", "get_state"], [47, 4, 1, "", "name"], [47, 3, 1, "", "set_state"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous": [[47, 1, 1, "", "DynamicRendezvousHandler"], [47, 1, 1, "", "RendezvousBackend"], [47, 1, 1, "", "RendezvousTimeout"], [47, 5, 1, "", "create_handler"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler": [[47, 3, 1, "", "from_backend"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend": [[47, 3, 1, "", "get_state"], [47, 4, 1, "", "name"], [47, 3, 1, "", "set_state"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout": [[47, 4, 1, "", "close"], [47, 4, 1, "", "heartbeat"], [47, 4, 1, "", "join"], [47, 4, 1, "", "last_call"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous": [[47, 1, 1, "", "EtcdRendezvousHandler"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend": [[47, 1, 1, "", "EtcdRendezvousBackend"], [47, 5, 1, "", "create_backend"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend": [[47, 3, 1, "", "get_state"], [47, 4, 1, "", "name"], [47, 3, 1, "", "set_state"]], "torch.distributed.elastic.rendezvous.etcd_server": [[47, 1, 1, "", "EtcdServer"]], "torch.distributed.elastic.rendezvous.etcd_store": [[47, 1, 1, "", "EtcdStore"]], "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore": [[47, 3, 1, "", "add"], [47, 3, 1, "", "check"], [47, 3, 1, "", "get"], [47, 3, 1, "", "set"], [47, 3, 1, "", "wait"]], "torch.distributed.elastic.timer": [[50, 1, 1, "", "FileTimerClient"], [50, 1, 1, "", "FileTimerServer"], [50, 1, 1, "", "LocalTimerClient"], [50, 1, 1, "", "LocalTimerServer"], [50, 1, 1, "", "TimerClient"], [50, 1, 1, "", "TimerRequest"], [50, 1, 1, "", "TimerServer"], [28, 0, 0, "-", "api"], [50, 5, 1, "", "configure"], [50, 0, 0, "-", "debug_info_logging"], [50, 5, 1, "", "expires"], [28, 0, 0, "-", "file_based_local_timer"], [28, 0, 0, "-", "local_timer"]], "torch.distributed.elastic.timer.TimerClient": [[50, 3, 1, "", "acquire"], [50, 3, 1, "", "release"]], "torch.distributed.elastic.timer.TimerServer": [[50, 3, 1, "", "clear_timers"], [50, 3, 1, "", "get_expired_timers"], [50, 3, 1, "", "register_timers"]], "torch.distributed.elastic.timer.debug_info_logging": [[50, 5, 1, "", "log_debug_info_for_expired_timers"]], "torch.distributed.elastic.utils": [[28, 0, 0, "-", "api"], [28, 0, 0, "-", "data"], [28, 0, 0, "-", "distributed"], [28, 0, 0, "-", "log_level"], [28, 0, 0, "-", "logging"], [28, 0, 0, "-", "store"]], "torch.distributed.elastic.utils.data": [[28, 0, 0, "-", "cycling_iterator"], [28, 0, 0, "-", "elastic_distributed_sampler"]], "torch.distributed.fsdp": [[55, 1, 1, "", "BackwardPrefetch"], [55, 1, 1, "", "CPUOffload"], [55, 1, 1, "", "FullOptimStateDictConfig"], [55, 1, 1, "", "FullStateDictConfig"], [55, 1, 1, "", "FullyShardedDataParallel"], [55, 1, 1, "", "LocalOptimStateDictConfig"], [55, 1, 1, "", "LocalStateDictConfig"], [55, 1, 1, "", "MixedPrecision"], [55, 1, 1, "", "OptimStateDictConfig"], [55, 1, 1, "", "ShardedOptimStateDictConfig"], [55, 1, 1, "", "ShardedStateDictConfig"], [55, 1, 1, "", "ShardingStrategy"], [55, 1, 1, "", "StateDictConfig"], [55, 1, 1, "", "StateDictSettings"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "fully_sharded_data_parallel"], [28, 0, 0, "-", "sharded_grad_scaler"], [28, 0, 0, "-", "wrap"]], "torch.distributed.fsdp.FullyShardedDataParallel": [[55, 3, 1, "", "apply"], [55, 3, 1, "", "check_is_root"], [55, 3, 1, "", "clip_grad_norm_"], [55, 3, 1, "", "flatten_sharded_optim_state_dict"], [55, 3, 1, "", "forward"], [55, 3, 1, "", "fsdp_modules"], [55, 3, 1, "", "full_optim_state_dict"], [55, 3, 1, "", "get_state_dict_type"], [55, 4, 1, "", "module"], [55, 3, 1, "", "named_buffers"], [55, 3, 1, "", "named_parameters"], [55, 3, 1, "", "no_sync"], [55, 3, 1, "", "optim_state_dict"], [55, 3, 1, "", "optim_state_dict_to_load"], [55, 3, 1, "", "register_comm_hook"], [55, 3, 1, "", "rekey_optim_state_dict"], [55, 3, 1, "", "scatter_full_optim_state_dict"], [55, 3, 1, "", "set_state_dict_type"], [55, 3, 1, "", "shard_full_optim_state_dict"], [55, 3, 1, "", "sharded_optim_state_dict"], [55, 3, 1, "", "state_dict_type"], [55, 3, 1, "", "summon_full_params"]], "torch.distributed.launcher": [[28, 0, 0, "-", "api"]], "torch.distributed.nn": [[28, 0, 0, "-", "api"], [28, 0, 0, "-", "functional"], [28, 0, 0, "-", "jit"]], "torch.distributed.nn.api": [[28, 0, 0, "-", "remote_module"]], "torch.distributed.nn.api.remote_module": [[2077, 1, 1, "", "RemoteModule"]], "torch.distributed.nn.api.remote_module.RemoteModule": [[2077, 3, 1, "", "get_module_rref"], [2077, 3, 1, "", "remote_parameters"]], "torch.distributed.nn.jit": [[28, 0, 0, "-", "instantiator"], [28, 0, 0, "-", "templates"]], "torch.distributed.nn.jit.templates": [[28, 0, 0, "-", "remote_module_template"]], "torch.distributed.optim": [[32, 1, 1, "", "DistributedOptimizer"], [32, 1, 1, "", "PostLocalSGDOptimizer"], [32, 1, 1, "", "ZeroRedundancyOptimizer"], [28, 0, 0, "-", "apply_optimizer_in_backward"], [28, 0, 0, "-", "functional_adadelta"], [28, 0, 0, "-", "functional_adagrad"], [28, 0, 0, "-", "functional_adam"], [28, 0, 0, "-", "functional_adamax"], [28, 0, 0, "-", "functional_adamw"], [28, 0, 0, "-", "functional_rmsprop"], [28, 0, 0, "-", "functional_rprop"], [28, 0, 0, "-", "functional_sgd"], [28, 0, 0, "-", "named_optimizer"], [28, 0, 0, "-", "optimizer"], [28, 0, 0, "-", "post_localSGD_optimizer"], [28, 0, 0, "-", "utils"], [28, 0, 0, "-", "zero_redundancy_optimizer"]], "torch.distributed.optim.DistributedOptimizer": [[32, 3, 1, "", "step"]], "torch.distributed.optim.PostLocalSGDOptimizer": [[32, 3, 1, "", "load_state_dict"], [32, 3, 1, "", "state_dict"], [32, 3, 1, "", "step"]], "torch.distributed.optim.ZeroRedundancyOptimizer": [[32, 3, 1, "", "add_param_group"], [32, 3, 1, "", "consolidate_state_dict"], [32, 4, 1, "", "join_device"], [32, 3, 1, "", "join_hook"], [32, 4, 1, "", "join_process_group"], [32, 3, 1, "", "load_state_dict"], [32, 3, 1, "", "state_dict"], [32, 3, 1, "", "step"]], "torch.distributed.pipelining": [[33, 1, 1, "", "Pipe"], [33, 1, 1, "", "SplitPoint"], [33, 0, 0, "-", "microbatch"], [33, 5, 1, "", "pipe_split"], [33, 5, 1, "", "pipeline"], [33, 0, 0, "-", "schedules"], [33, 0, 0, "-", "stage"]], "torch.distributed.pipelining.microbatch": [[33, 1, 1, "", "TensorChunkSpec"], [33, 5, 1, "", "merge_chunks"], [33, 5, 1, "", "split_args_kwargs_into_chunks"]], "torch.distributed.pipelining.schedules": [[33, 1, 1, "", "PipelineScheduleMulti"], [33, 1, 1, "", "PipelineScheduleSingle"], [33, 1, 1, "", "Schedule1F1B"], [33, 1, 1, "", "ScheduleGPipe"], [33, 1, 1, "", "ScheduleInterleaved1F1B"], [33, 1, 1, "", "ScheduleLoopedBFS"]], "torch.distributed.pipelining.schedules.PipelineScheduleMulti": [[33, 3, 1, "", "step"]], "torch.distributed.pipelining.schedules.PipelineScheduleSingle": [[33, 3, 1, "", "step"]], "torch.distributed.pipelining.stage": [[33, 1, 1, "", "PipelineStage"], [33, 5, 1, "", "build_stage"]], "torch.distributed.rpc": [[2077, 1, 1, "", "BackendType"], [2077, 1, 1, "", "PyRRef"], [2077, 1, 1, "", "RpcBackendOptions"], [2077, 1, 1, "", "TensorPipeRpcBackendOptions"], [2077, 1, 1, "", "WorkerInfo"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "backend_registry"], [28, 0, 0, "-", "constants"], [28, 0, 0, "-", "functions"], [2077, 5, 1, "", "get_worker_info"], [2077, 5, 1, "", "init_rpc"], [28, 0, 0, "-", "internal"], [28, 0, 0, "-", "options"], [2077, 5, 1, "", "remote"], [2077, 5, 1, "", "rpc_async"], [2077, 5, 1, "", "rpc_sync"], [28, 0, 0, "-", "rref_proxy"], [28, 0, 0, "-", "server_process_global_profiler"], [2077, 5, 1, "", "shutdown"]], "torch.distributed.rpc.PyRRef": [[2077, 3, 1, "", "backward"], [2077, 3, 1, "", "confirmed_by_owner"], [2077, 3, 1, "", "is_owner"], [2077, 3, 1, "", "local_value"], [2077, 3, 1, "", "owner"], [2077, 3, 1, "", "owner_name"], [2077, 3, 1, "", "remote"], [2077, 3, 1, "", "rpc_async"], [2077, 3, 1, "", "rpc_sync"], [2077, 3, 1, "", "to_here"]], "torch.distributed.rpc.RpcBackendOptions": [[2077, 4, 1, "", "init_method"], [2077, 4, 1, "", "rpc_timeout"]], "torch.distributed.rpc.TensorPipeRpcBackendOptions": [[2077, 4, 1, "", "device_maps"], [2077, 4, 1, "", "devices"], [2077, 4, 1, "", "init_method"], [2077, 4, 1, "", "num_worker_threads"], [2077, 4, 1, "", "rpc_timeout"], [2077, 3, 1, "", "set_device_map"], [2077, 3, 1, "", "set_devices"]], "torch.distributed.rpc.WorkerInfo": [[2077, 4, 1, "", "id"], [2077, 4, 1, "", "name"]], "torch.distributed.rpc.functions": [[2077, 5, 1, "", "async_execution"]], "torch.distributed.tensor": [[34, 0, 0, "-", "parallel"]], "torch.distributed.tensor.parallel": [[34, 1, 1, "", "ColwiseParallel"], [34, 1, 1, "", "PrepareModuleInput"], [34, 1, 1, "", "PrepareModuleOutput"], [34, 1, 1, "", "RowwiseParallel"], [34, 1, 1, "", "SequenceParallel"], [28, 0, 0, "-", "api"], [28, 0, 0, "-", "ddp"], [28, 0, 0, "-", "fsdp"], [28, 0, 0, "-", "input_reshard"], [28, 0, 0, "-", "loss"], [34, 5, 1, "", "loss_parallel"], [34, 5, 1, "", "parallelize_module"], [28, 0, 0, "-", "style"]], "torch.distributions": [[35, 0, 0, "-", "bernoulli"], [35, 0, 0, "-", "beta"], [35, 0, 0, "-", "binomial"], [35, 0, 0, "-", "categorical"], [35, 0, 0, "-", "cauchy"], [35, 0, 0, "-", "chi2"], [35, 0, 0, "-", "constraint_registry"], [35, 0, 0, "-", "constraints"], [35, 0, 0, "-", "continuous_bernoulli"], [35, 0, 0, "-", "dirichlet"], [35, 0, 0, "-", "distribution"], [35, 0, 0, "-", "exp_family"], [35, 0, 0, "-", "exponential"], [35, 0, 0, "-", "fishersnedecor"], [35, 0, 0, "-", "gamma"], [35, 0, 0, "-", "geometric"], [35, 0, 0, "-", "gumbel"], [35, 0, 0, "-", "half_cauchy"], [35, 0, 0, "-", "half_normal"], [35, 0, 0, "-", "independent"], [35, 0, 0, "-", "inverse_gamma"], [35, 0, 0, "-", "kl"], [35, 0, 0, "-", "kumaraswamy"], [35, 0, 0, "-", "laplace"], [35, 0, 0, "-", "lkj_cholesky"], [35, 0, 0, "-", "log_normal"], [35, 0, 0, "-", "logistic_normal"], [35, 0, 0, "-", "lowrank_multivariate_normal"], [35, 0, 0, "-", "mixture_same_family"], [35, 0, 0, "-", "multinomial"], [35, 0, 0, "-", "multivariate_normal"], [35, 0, 0, "-", "negative_binomial"], [35, 0, 0, "-", "normal"], [35, 0, 0, "-", "one_hot_categorical"], [35, 0, 0, "-", "pareto"], [35, 0, 0, "-", "poisson"], [35, 0, 0, "-", "relaxed_bernoulli"], [35, 0, 0, "-", "relaxed_categorical"], [35, 0, 0, "-", "studentT"], [35, 0, 0, "-", "transformed_distribution"], [35, 0, 0, "-", "transforms"], [35, 0, 0, "-", "uniform"], [35, 0, 0, "-", "utils"], [35, 0, 0, "-", "von_mises"], [35, 0, 0, "-", "weibull"], [35, 0, 0, "-", "wishart"]], "torch.distributions.bernoulli": [[35, 1, 1, "", "Bernoulli"]], "torch.distributions.bernoulli.Bernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.beta": [[35, 1, 1, "", "Beta"]], "torch.distributions.beta.Beta": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "concentration0"], [35, 4, 1, "", "concentration1"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.binomial": [[35, 1, 1, "", "Binomial"]], "torch.distributions.binomial.Binomial": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.categorical": [[35, 1, 1, "", "Categorical"]], "torch.distributions.categorical.Categorical": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.cauchy": [[35, 1, 1, "", "Cauchy"]], "torch.distributions.cauchy.Cauchy": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.chi2": [[35, 1, 1, "", "Chi2"]], "torch.distributions.chi2.Chi2": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "df"], [35, 3, 1, "", "expand"]], "torch.distributions.constraint_registry": [[35, 1, 1, "", "ConstraintRegistry"]], "torch.distributions.constraint_registry.ConstraintRegistry": [[35, 3, 1, "", "register"]], "torch.distributions.constraints": [[35, 1, 1, "", "Constraint"], [35, 2, 1, "", "cat"], [35, 2, 1, "", "dependent_property"], [35, 2, 1, "", "greater_than"], [35, 2, 1, "", "greater_than_eq"], [35, 2, 1, "", "half_open_interval"], [35, 2, 1, "", "independent"], [35, 2, 1, "", "integer_interval"], [35, 2, 1, "", "interval"], [35, 2, 1, "", "less_than"], [35, 2, 1, "", "multinomial"], [35, 2, 1, "", "stack"]], "torch.distributions.constraints.Constraint": [[35, 3, 1, "", "check"]], "torch.distributions.continuous_bernoulli": [[35, 1, 1, "", "ContinuousBernoulli"]], "torch.distributions.continuous_bernoulli.ContinuousBernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.dirichlet": [[35, 1, 1, "", "Dirichlet"]], "torch.distributions.dirichlet.Dirichlet": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.distribution": [[35, 1, 1, "", "Distribution"]], "torch.distributions.distribution.Distribution": [[35, 4, 1, "", "arg_constraints"], [35, 4, 1, "", "batch_shape"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 4, 1, "", "event_shape"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "perplexity"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 3, 1, "", "sample_n"], [35, 3, 1, "", "set_default_validate_args"], [35, 4, 1, "", "stddev"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.exp_family": [[35, 1, 1, "", "ExponentialFamily"]], "torch.distributions.exp_family.ExponentialFamily": [[35, 3, 1, "", "entropy"]], "torch.distributions.exponential": [[35, 1, 1, "", "Exponential"]], "torch.distributions.exponential.Exponential": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.fishersnedecor": [[35, 1, 1, "", "FisherSnedecor"]], "torch.distributions.fishersnedecor.FisherSnedecor": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.gamma": [[35, 1, 1, "", "Gamma"]], "torch.distributions.gamma.Gamma": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.geometric": [[35, 1, 1, "", "Geometric"]], "torch.distributions.geometric.Geometric": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.gumbel": [[35, 1, 1, "", "Gumbel"]], "torch.distributions.gumbel.Gumbel": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.half_cauchy": [[35, 1, 1, "", "HalfCauchy"]], "torch.distributions.half_cauchy.HalfCauchy": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "scale"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.half_normal": [[35, 1, 1, "", "HalfNormal"]], "torch.distributions.half_normal.HalfNormal": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "scale"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.independent": [[35, 1, 1, "", "Independent"]], "torch.distributions.independent.Independent": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "has_enumerate_support"], [35, 4, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.inverse_gamma": [[35, 1, 1, "", "InverseGamma"]], "torch.distributions.inverse_gamma.InverseGamma": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "concentration"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "rate"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.kl": [[35, 5, 1, "", "kl_divergence"], [35, 5, 1, "", "register_kl"]], "torch.distributions.kumaraswamy": [[35, 1, 1, "", "Kumaraswamy"]], "torch.distributions.kumaraswamy.Kumaraswamy": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.laplace": [[35, 1, 1, "", "Laplace"]], "torch.distributions.laplace.Laplace": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.lkj_cholesky": [[35, 1, 1, "", "LKJCholesky"]], "torch.distributions.lkj_cholesky.LKJCholesky": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"]], "torch.distributions.log_normal": [[35, 1, 1, "", "LogNormal"]], "torch.distributions.log_normal.LogNormal": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "loc"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "scale"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.lowrank_multivariate_normal": [[35, 1, 1, "", "LowRankMultivariateNormal"]], "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "covariance_matrix"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "precision_matrix"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "scale_tril"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.mixture_same_family": [[35, 1, 1, "", "MixtureSameFamily"]], "torch.distributions.mixture_same_family.MixtureSameFamily": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 4, 1, "", "component_distribution"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mixture_distribution"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.multinomial": [[35, 1, 1, "", "Multinomial"]], "torch.distributions.multinomial.Multinomial": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"], [35, 2, 1, "", "total_count"], [35, 4, 1, "", "variance"]], "torch.distributions.multivariate_normal": [[35, 1, 1, "", "MultivariateNormal"]], "torch.distributions.multivariate_normal.MultivariateNormal": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "covariance_matrix"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "precision_matrix"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "scale_tril"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.negative_binomial": [[35, 1, 1, "", "NegativeBinomial"]], "torch.distributions.negative_binomial.NegativeBinomial": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.normal": [[35, 1, 1, "", "Normal"]], "torch.distributions.normal.Normal": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "stddev"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.one_hot_categorical": [[35, 1, 1, "", "OneHotCategorical"]], "torch.distributions.one_hot_categorical.OneHotCategorical": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "enumerate_support"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_enumerate_support"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.pareto": [[35, 1, 1, "", "Pareto"]], "torch.distributions.pareto.Pareto": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.poisson": [[35, 1, 1, "", "Poisson"]], "torch.distributions.poisson.Poisson": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.relaxed_bernoulli": [[35, 1, 1, "", "LogitRelaxedBernoulli"], [35, 1, 1, "", "RelaxedBernoulli"]], "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "param_shape"], [35, 4, 1, "", "probs"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"]], "torch.distributions.relaxed_bernoulli.RelaxedBernoulli": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "probs"], [35, 2, 1, "", "support"], [35, 4, 1, "", "temperature"]], "torch.distributions.relaxed_categorical": [[35, 1, 1, "", "RelaxedOneHotCategorical"]], "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 4, 1, "", "logits"], [35, 4, 1, "", "probs"], [35, 2, 1, "", "support"], [35, 4, 1, "", "temperature"]], "torch.distributions.studentT": [[35, 1, 1, "", "StudentT"]], "torch.distributions.studentT.StudentT": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.transformed_distribution": [[35, 1, 1, "", "TransformedDistribution"]], "torch.distributions.transformed_distribution.TransformedDistribution": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 3, 1, "", "rsample"], [35, 3, 1, "", "sample"], [35, 4, 1, "", "support"]], "torch.distributions.transforms": [[35, 1, 1, "", "AbsTransform"], [35, 1, 1, "", "AffineTransform"], [35, 1, 1, "", "CatTransform"], [35, 1, 1, "", "ComposeTransform"], [35, 1, 1, "", "CorrCholeskyTransform"], [35, 1, 1, "", "CumulativeDistributionTransform"], [35, 1, 1, "", "ExpTransform"], [35, 1, 1, "", "IndependentTransform"], [35, 1, 1, "", "LowerCholeskyTransform"], [35, 1, 1, "", "PositiveDefiniteTransform"], [35, 1, 1, "", "PowerTransform"], [35, 1, 1, "", "ReshapeTransform"], [35, 1, 1, "", "SigmoidTransform"], [35, 1, 1, "", "SoftmaxTransform"], [35, 1, 1, "", "SoftplusTransform"], [35, 1, 1, "", "StackTransform"], [35, 1, 1, "", "StickBreakingTransform"], [35, 1, 1, "", "TanhTransform"], [35, 1, 1, "", "Transform"]], "torch.distributions.transforms.Transform": [[35, 3, 1, "", "forward_shape"], [35, 4, 1, "", "inv"], [35, 3, 1, "", "inverse_shape"], [35, 3, 1, "", "log_abs_det_jacobian"], [35, 4, 1, "", "sign"]], "torch.distributions.uniform": [[35, 1, 1, "", "Uniform"]], "torch.distributions.uniform.Uniform": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "cdf"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "icdf"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "stddev"], [35, 4, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.von_mises": [[35, 1, 1, "", "VonMises"]], "torch.distributions.von_mises.VonMises": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 3, 1, "", "sample"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.weibull": [[35, 1, 1, "", "Weibull"]], "torch.distributions.weibull.Weibull": [[35, 2, 1, "", "arg_constraints"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.distributions.wishart": [[35, 1, 1, "", "Wishart"]], "torch.distributions.wishart.Wishart": [[35, 2, 1, "", "arg_constraints"], [35, 4, 1, "", "covariance_matrix"], [35, 3, 1, "", "entropy"], [35, 3, 1, "", "expand"], [35, 2, 1, "", "has_rsample"], [35, 3, 1, "", "log_prob"], [35, 4, 1, "", "mean"], [35, 4, 1, "", "mode"], [35, 4, 1, "", "precision_matrix"], [35, 3, 1, "", "rsample"], [35, 4, 1, "", "scale_tril"], [35, 2, 1, "", "support"], [35, 4, 1, "", "variance"]], "torch.export": [[52, 2, 1, "", "Constraint"], [52, 1, 1, "", "ExportBackwardSignature"], [52, 1, 1, "", "ExportGraphSignature"], [52, 1, 1, "", "ExportedProgram"], [52, 1, 1, "", "ModuleCallEntry"], [52, 1, 1, "", "ModuleCallSignature"], [52, 0, 0, "-", "custom_obj"], [52, 5, 1, "", "dims"], [52, 0, 0, "-", "dynamic_shapes"], [52, 5, 1, "", "export"], [52, 0, 0, "-", "exported_program"], [52, 0, 0, "-", "graph_signature"], [52, 5, 1, "", "load"], [52, 5, 1, "", "register_dataclass"], [52, 5, 1, "", "save"], [52, 0, 0, "-", "unflatten"]], "torch.export.ExportedProgram": [[52, 3, 1, "", "buffers"], [52, 3, 1, "", "module"], [52, 3, 1, "", "named_buffers"], [52, 3, 1, "", "named_parameters"], [52, 3, 1, "", "parameters"], [52, 3, 1, "", "run_decompositions"]], "torch.export.dynamic_shapes": [[52, 5, 1, "", "Dim"], [52, 1, 1, "", "ShapesCollection"], [52, 5, 1, "", "dynamic_dim"], [52, 5, 1, "", "refine_dynamic_shapes_from_suggested_fixes"]], "torch.export.dynamic_shapes.ShapesCollection": [[52, 3, 1, "", "dynamic_shapes"]], "torch.export.graph_signature": [[52, 1, 1, "", "CustomObjArgument"], [52, 1, 1, "", "ExportGraphSignature"], [52, 1, 1, "", "InputKind"], [52, 1, 1, "", "InputSpec"], [52, 1, 1, "", "OutputKind"], [52, 1, 1, "", "OutputSpec"]], "torch.export.graph_signature.ExportGraphSignature": [[52, 3, 1, "", "get_replace_hook"], [52, 3, 1, "", "replace_all_uses"]], "torch.export.unflatten": [[52, 1, 1, "", "FlatArgsAdapter"], [52, 1, 1, "", "InterpreterModule"], [52, 5, 1, "", "unflatten"]], "torch.export.unflatten.FlatArgsAdapter": [[52, 3, 1, "", "adapt"]], "torch.fft": [[1125, 5, 1, "", "fft"], [1126, 5, 1, "", "fft2"], [1127, 5, 1, "", "fftfreq"], [1128, 5, 1, "", "fftn"], [1129, 5, 1, "", "fftshift"], [1130, 5, 1, "", "hfft"], [1131, 5, 1, "", "hfft2"], [1132, 5, 1, "", "hfftn"], [1133, 5, 1, "", "ifft"], [1134, 5, 1, "", "ifft2"], [1135, 5, 1, "", "ifftn"], [1136, 5, 1, "", "ifftshift"], [1137, 5, 1, "", "ihfft"], [1138, 5, 1, "", "ihfft2"], [1139, 5, 1, "", "ihfftn"], [1140, 5, 1, "", "irfft"], [1141, 5, 1, "", "irfft2"], [1142, 5, 1, "", "irfftn"], [1143, 5, 1, "", "rfft"], [1144, 5, 1, "", "rfft2"], [1145, 5, 1, "", "rfftfreq"], [1146, 5, 1, "", "rfftn"]], "torch.func": [[1166, 5, 1, "", "functional_call"], [1167, 5, 1, "", "functionalize"], [1168, 5, 1, "", "grad"], [1169, 5, 1, "", "grad_and_value"], [1170, 5, 1, "", "hessian"], [1171, 5, 1, "", "jacfwd"], [1172, 5, 1, "", "jacrev"], [1173, 5, 1, "", "jvp"], [1174, 5, 1, "", "linearize"], [1175, 5, 1, "", "replace_all_batch_norm_modules_"], [1176, 5, 1, "", "stack_module_state"], [1177, 5, 1, "", "vjp"], [1178, 5, 1, "", "vmap"]], "torch.futures": [[63, 1, 1, "", "Future"], [63, 5, 1, "", "collect_all"], [63, 5, 1, "", "wait_all"]], "torch.futures.Future": [[63, 3, 1, "", "add_done_callback"], [63, 3, 1, "", "done"], [63, 3, 1, "", "set_exception"], [63, 3, 1, "", "set_result"], [63, 3, 1, "", "then"], [63, 3, 1, "", "value"], [63, 3, 1, "", "wait"]], "torch.fx": [[64, 1, 1, "", "Graph"], [64, 1, 1, "", "GraphModule"], [64, 1, 1, "", "Interpreter"], [64, 1, 1, "", "Node"], [64, 1, 1, "", "Proxy"], [64, 1, 1, "", "Tracer"], [64, 1, 1, "", "Transformer"], [64, 0, 0, "-", "annotate"], [64, 0, 0, "-", "config"], [64, 0, 0, "-", "experimental"], [64, 0, 0, "-", "graph"], [64, 0, 0, "-", "graph_module"], [64, 0, 0, "-", "immutable_collections"], [64, 0, 0, "-", "interpreter"], [64, 0, 0, "-", "node"], [64, 0, 0, "-", "operator_schemas"], [64, 0, 0, "-", "passes"], [64, 0, 0, "-", "proxy"], [64, 5, 1, "", "replace_pattern"], [64, 0, 0, "-", "subgraph_rewriter"], [64, 5, 1, "", "symbolic_trace"], [64, 0, 0, "-", "tensor_type"], [64, 0, 0, "-", "traceback"], [64, 5, 1, "", "wrap"]], "torch.fx.Graph": [[64, 3, 1, "", "__init__"], [64, 3, 1, "", "call_function"], [64, 3, 1, "", "call_method"], [64, 3, 1, "", "call_module"], [64, 3, 1, "", "create_node"], [64, 3, 1, "", "eliminate_dead_code"], [64, 3, 1, "", "erase_node"], [64, 3, 1, "", "find_nodes"], [64, 3, 1, "", "get_attr"], [64, 3, 1, "", "graph_copy"], [64, 3, 1, "", "inserting_after"], [64, 3, 1, "", "inserting_before"], [64, 3, 1, "", "lint"], [64, 3, 1, "", "node_copy"], [64, 4, 1, "", "nodes"], [64, 3, 1, "", "on_generate_code"], [64, 3, 1, "", "output"], [64, 3, 1, "", "placeholder"], [64, 3, 1, "", "print_tabular"], [64, 3, 1, "", "process_inputs"], [64, 3, 1, "", "process_outputs"], [64, 3, 1, "", "python_code"], [64, 3, 1, "", "set_codegen"]], "torch.fx.GraphModule": [[64, 3, 1, "", "__init__"], [64, 3, 1, "", "add_submodule"], [64, 4, 1, "", "code"], [64, 3, 1, "", "delete_all_unused_submodules"], [64, 3, 1, "", "delete_submodule"], [64, 4, 1, "", "graph"], [64, 3, 1, "", "print_readable"], [64, 3, 1, "", "recompile"], [64, 3, 1, "", "to_folder"]], "torch.fx.Interpreter": [[64, 3, 1, "", "boxed_run"], [64, 3, 1, "", "call_function"], [64, 3, 1, "", "call_method"], [64, 3, 1, "", "call_module"], [64, 3, 1, "", "fetch_args_kwargs_from_env"], [64, 3, 1, "", "fetch_attr"], [64, 3, 1, "", "get_attr"], [64, 3, 1, "", "map_nodes_to_values"], [64, 3, 1, "", "output"], [64, 3, 1, "", "placeholder"], [64, 3, 1, "", "run"], [64, 3, 1, "", "run_node"]], "torch.fx.Node": [[64, 4, 1, "", "all_input_nodes"], [64, 3, 1, "", "append"], [64, 4, 1, "", "args"], [64, 3, 1, "", "format_node"], [64, 3, 1, "", "insert_arg"], [64, 3, 1, "", "is_impure"], [64, 4, 1, "", "kwargs"], [64, 4, 1, "", "next"], [64, 3, 1, "", "normalized_arguments"], [64, 3, 1, "", "prepend"], [64, 4, 1, "", "prev"], [64, 3, 1, "", "replace_all_uses_with"], [64, 3, 1, "", "replace_input_with"], [64, 4, 1, "", "stack_trace"], [64, 3, 1, "", "update_arg"], [64, 3, 1, "", "update_kwarg"]], "torch.fx.Tracer": [[64, 3, 1, "", "call_module"], [64, 3, 1, "", "create_arg"], [64, 3, 1, "", "create_args_for_root"], [64, 3, 1, "", "create_node"], [64, 3, 1, "", "create_proxy"], [64, 3, 1, "", "get_fresh_qualname"], [64, 3, 1, "", "getattr"], [64, 3, 1, "", "is_leaf_module"], [64, 3, 1, "", "iter"], [64, 3, 1, "", "keys"], [64, 3, 1, "", "path_of_module"], [64, 3, 1, "", "proxy"], [64, 3, 1, "", "to_bool"], [64, 3, 1, "", "trace"]], "torch.fx.Transformer": [[64, 3, 1, "", "call_function"], [64, 3, 1, "", "call_module"], [64, 3, 1, "", "get_attr"], [64, 3, 1, "", "placeholder"], [64, 3, 1, "", "transform"]], "torch.fx.experimental": [[64, 0, 0, "-", "accelerator_partitioner"], [64, 0, 0, "-", "const_fold"], [64, 0, 0, "-", "debug"], [64, 0, 0, "-", "graph_gradual_typechecker"], [64, 0, 0, "-", "merge_matmul"], [64, 0, 0, "-", "meta_tracer"], [64, 0, 0, "-", "migrate_gradual_types"], [64, 0, 0, "-", "normalize"], [64, 0, 0, "-", "optimization"], [64, 0, 0, "-", "partitioner_utils"], [64, 0, 0, "-", "proxy_tensor"], [64, 0, 0, "-", "recording"], [64, 0, 0, "-", "refinement_types"], [64, 0, 0, "-", "rewriter"], [64, 0, 0, "-", "schema_type_annotation"], [64, 0, 0, "-", "sym_node"], [65, 0, 0, "-", "symbolic_shapes"], [64, 0, 0, "-", "unification"], [64, 0, 0, "-", "unify_refinements"], [64, 0, 0, "-", "validator"]], "torch.fx.experimental.migrate_gradual_types": [[64, 0, 0, "-", "constraint"], [64, 0, 0, "-", "constraint_generator"], [64, 0, 0, "-", "constraint_transformation"], [64, 0, 0, "-", "operation"], [64, 0, 0, "-", "transform_to_z3"], [64, 0, 0, "-", "util"], [64, 0, 0, "-", "z3_types"]], "torch.fx.experimental.symbolic_shapes": [[1179, 1, 1, "", "CallMethodKey"], [1180, 1, 1, "", "ConvertIntKey"], [1181, 1, 1, "", "DimConstraints"], [1182, 1, 1, "", "DimDynamic"], [1183, 1, 1, "", "DivideByKey"], [1184, 1, 1, "", "EqualityConstraint"], [1185, 1, 1, "", "InnerTensorKey"], [1186, 1, 1, "", "PropagateUnbackedSymInts"], [1187, 1, 1, "", "RelaxedUnspecConstraint"], [1188, 1, 1, "", "ShapeEnv"], [1189, 1, 1, "", "ShapeEnvSettings"], [1190, 1, 1, "", "StatefulSymbolicContext"], [1191, 1, 1, "", "StatelessSymbolicContext"], [1192, 1, 1, "", "StrictMinMaxConstraint"], [1193, 1, 1, "", "SubclassSymbolicContext"], [1194, 1, 1, "", "SymbolicContext"], [1195, 5, 1, "", "canonicalize_bool_expr"], [1196, 5, 1, "", "check_consistent"], [1197, 5, 1, "", "compute_unbacked_bindings"], [1198, 5, 1, "", "constrain_range"], [1199, 5, 1, "", "constrain_unify"], [1200, 5, 1, "", "definitely_false"], [1201, 5, 1, "", "definitely_true"], [1202, 5, 1, "", "guard_size_oblivious"], [1203, 5, 1, "", "has_free_symbols"], [1204, 5, 1, "", "hint_int"], [1205, 5, 1, "", "is_concrete_bool"], [1206, 5, 1, "", "is_concrete_int"], [1207, 5, 1, "", "lru_cache"], [1208, 5, 1, "", "parallel_and"], [1209, 5, 1, "", "parallel_or"], [1210, 5, 1, "", "rebind_unbacked"], [1211, 5, 1, "", "resolve_unbacked_bindings"], [1212, 5, 1, "", "statically_known_true"], [1213, 5, 1, "", "sym_eq"]], "torch.fx.experimental.symbolic_shapes.CallMethodKey": [[1179, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.ConvertIntKey": [[1180, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.DimConstraints": [[1181, 3, 1, "", "add"], [1181, 3, 1, "", "add_equality"], [1181, 3, 1, "", "forced_specializations"], [1181, 3, 1, "", "prettify_results"], [1181, 3, 1, "", "remove_redundant_dynamic_results"], [1181, 3, 1, "", "rewrite_with_congruences"], [1181, 3, 1, "", "solve"]], "torch.fx.experimental.symbolic_shapes.DivideByKey": [[1183, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.InnerTensorKey": [[1185, 3, 1, "", "get"]], "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts": [[1186, 3, 1, "", "boxed_run"], [1186, 3, 1, "", "call_function"], [1186, 3, 1, "", "call_method"], [1186, 3, 1, "", "call_module"], [1186, 3, 1, "", "fetch_args_kwargs_from_env"], [1186, 3, 1, "", "fetch_attr"], [1186, 3, 1, "", "get_attr"], [1186, 3, 1, "", "map_nodes_to_values"], [1186, 3, 1, "", "output"], [1186, 3, 1, "", "placeholder"], [1186, 3, 1, "", "run"], [1186, 3, 1, "", "run_node"]], "torch.fx.experimental.symbolic_shapes.ShapeEnv": [[1188, 3, 1, "", "add_var_to_val"], [1188, 3, 1, "", "bind_symbols"], [1188, 3, 1, "", "bound_sympy"], [1188, 3, 1, "", "check_equal"], [1188, 3, 1, "", "cleanup"], [1188, 3, 1, "", "create_symbol"], [1188, 3, 1, "", "create_symbolic_sizes_strides_storage_offset"], [1188, 3, 1, "", "create_symboolnode"], [1188, 3, 1, "", "create_symfloatnode"], [1188, 3, 1, "", "create_symintnode"], [1188, 3, 1, "", "create_unbacked_symbool"], [1188, 3, 1, "", "create_unbacked_symfloat"], [1188, 3, 1, "", "create_unbacked_symint"], [1188, 3, 1, "", "create_unspecified_symbol"], [1188, 3, 1, "", "create_unspecified_symint_and_symbol"], [1188, 3, 1, "", "defer_runtime_assert"], [1188, 3, 1, "", "evaluate_expr"], [1188, 3, 1, "", "evaluate_guards_expression"], [1188, 3, 1, "", "evaluate_guards_for_args"], [1188, 3, 1, "", "format_guards"], [1188, 3, 1, "", "freeze"], [1188, 3, 1, "", "freeze_runtime_asserts"], [1188, 3, 1, "", "get_axioms"], [1188, 3, 1, "", "get_implications"], [1188, 3, 1, "", "get_nontrivial_guards"], [1188, 3, 1, "", "get_pruned_guards"], [1188, 3, 1, "", "ignore_fresh_unbacked_symbols"], [1188, 3, 1, "", "is_unbacked_symint"], [1188, 3, 1, "", "produce_guards"], [1188, 3, 1, "", "produce_guards_expression"], [1188, 3, 1, "", "replace"], [1188, 3, 1, "", "set_unbacked_var_to_val"], [1188, 3, 1, "", "simplify"], [1188, 3, 1, "", "size_hint"], [1188, 3, 1, "", "suppress_guards"]], "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint": [[1192, 3, 1, "", "render"]], "torch.fx.experimental.unification": [[64, 0, 0, "-", "core"], [64, 0, 0, "-", "dispatch"], [64, 0, 0, "-", "match"], [64, 0, 0, "-", "more"], [64, 0, 0, "-", "multipledispatch"], [64, 0, 0, "-", "unification_tools"], [64, 0, 0, "-", "utils"], [64, 0, 0, "-", "variable"]], "torch.fx.experimental.unification.multipledispatch": [[64, 0, 0, "-", "conflict"], [64, 0, 0, "-", "core"], [64, 0, 0, "-", "dispatcher"], [64, 0, 0, "-", "utils"], [64, 0, 0, "-", "variadic"]], "torch.fx.passes": [[64, 0, 0, "-", "annotate_getitem_nodes"], [64, 0, 0, "-", "backends"], [64, 0, 0, "-", "dialect"], [64, 0, 0, "-", "fake_tensor_prop"], [64, 0, 0, "-", "graph_drawer"], [64, 0, 0, "-", "graph_manipulation"], [64, 0, 0, "-", "graph_transform_observer"], [64, 0, 0, "-", "infra"], [64, 0, 0, "-", "net_min_base"], [64, 0, 0, "-", "operator_support"], [64, 0, 0, "-", "param_fetch"], [64, 0, 0, "-", "pass_manager"], [64, 0, 0, "-", "reinplace"], [64, 0, 0, "-", "runtime_assert"], [64, 0, 0, "-", "shape_prop"], [64, 0, 0, "-", "split_module"], [64, 0, 0, "-", "split_utils"], [64, 0, 0, "-", "splitter_base"], [64, 0, 0, "-", "tests"], [64, 0, 0, "-", "tools_common"], [64, 0, 0, "-", "utils"]], "torch.fx.passes.backends": [[64, 0, 0, "-", "cudagraphs"]], "torch.fx.passes.dialect": [[64, 0, 0, "-", "common"]], "torch.fx.passes.dialect.common": [[64, 0, 0, "-", "cse_pass"]], "torch.fx.passes.infra": [[64, 0, 0, "-", "partitioner"], [64, 0, 0, "-", "pass_base"], [64, 0, 0, "-", "pass_manager"]], "torch.fx.passes.tests": [[64, 0, 0, "-", "test_pass_manager"]], "torch.fx.passes.utils": [[64, 0, 0, "-", "common"], [64, 0, 0, "-", "fuser_utils"], [64, 0, 0, "-", "matcher_utils"], [64, 0, 0, "-", "matcher_with_name_node_map_utils"], [64, 0, 0, "-", "source_matcher_utils"]], "torch.hub": [[2012, 5, 1, "", "download_url_to_file"], [2012, 5, 1, "", "get_dir"], [2012, 5, 1, "", "help"], [2012, 5, 1, "", "list"], [2012, 5, 1, "", "load"], [2012, 5, 1, "", "load_state_dict_from_url"], [2012, 5, 1, "", "set_dir"]], "torch.jit": [[1271, 1, 1, "", "Attribute"], [1272, 1, 1, "", "ScriptFunction"], [1273, 1, 1, "", "ScriptModule"], [1274, 5, 1, "", "annotate"], [2014, 0, 0, "-", "annotations"], [1275, 5, 1, "", "enable_onednn_fusion"], [2014, 5, 1, "", "export"], [1276, 5, 1, "", "fork"], [1277, 5, 1, "", "freeze"], [2014, 0, 0, "-", "frontend"], [2014, 0, 0, "-", "generate_bytecode"], [1278, 5, 1, "", "ignore"], [1279, 5, 1, "", "interface"], [2016, 5, 1, "", "is_scripting"], [2016, 5, 1, "", "is_tracing"], [1280, 5, 1, "", "isinstance"], [1281, 5, 1, "", "load"], [2014, 0, 0, "-", "mobile"], [1282, 5, 1, "", "onednn_fusion_enabled"], [1283, 5, 1, "", "optimize_for_inference"], [2014, 0, 0, "-", "quantized"], [1284, 5, 1, "", "save"], [1285, 5, 1, "", "script"], [1286, 5, 1, "", "script_if_tracing"], [1287, 5, 1, "", "set_fusion_strategy"], [1288, 1, 1, "", "strict_fusion"], [2015, 0, 0, "-", "supported_ops"], [1289, 5, 1, "", "trace"], [1290, 5, 1, "", "trace_module"], [2019, 0, 0, "-", "unsupported_tensor_ops"], [1291, 5, 1, "", "unused"], [1292, 5, 1, "", "wait"]], "torch.jit.Attribute": [[1271, 3, 1, "", "count"], [1271, 3, 1, "", "index"], [1271, 2, 1, "", "type"], [1271, 2, 1, "", "value"]], "torch.jit.ScriptFunction": [[1272, 3, 1, "", "get_debug_state"], [1272, 3, 1, "", "save"], [1272, 3, 1, "", "save_to_buffer"]], "torch.jit.ScriptModule": [[1273, 3, 1, "", "add_module"], [1273, 3, 1, "", "apply"], [1273, 3, 1, "", "bfloat16"], [1273, 3, 1, "", "buffers"], [1273, 3, 1, "", "children"], [1273, 4, 1, "", "code"], [1273, 4, 1, "", "code_with_constants"], [1273, 3, 1, "", "compile"], [1273, 3, 1, "", "cpu"], [1273, 3, 1, "", "cuda"], [1273, 3, 1, "", "double"], [1273, 3, 1, "", "eval"], [1273, 3, 1, "", "extra_repr"], [1273, 3, 1, "", "float"], [1273, 3, 1, "", "get_buffer"], [1273, 3, 1, "", "get_extra_state"], [1273, 3, 1, "", "get_parameter"], [1273, 3, 1, "", "get_submodule"], [1273, 4, 1, "", "graph"], [1273, 3, 1, "", "half"], [1273, 4, 1, "", "inlined_graph"], [1273, 3, 1, "", "ipu"], [1273, 3, 1, "", "load_state_dict"], [1273, 3, 1, "", "modules"], [1273, 3, 1, "", "named_buffers"], [1273, 3, 1, "", "named_children"], [1273, 3, 1, "", "named_modules"], [1273, 3, 1, "", "named_parameters"], [1273, 3, 1, "", "parameters"], [1273, 3, 1, "", "register_backward_hook"], [1273, 3, 1, "", "register_buffer"], [1273, 3, 1, "", "register_forward_hook"], [1273, 3, 1, "", "register_forward_pre_hook"], [1273, 3, 1, "", "register_full_backward_hook"], [1273, 3, 1, "", "register_full_backward_pre_hook"], [1273, 3, 1, "", "register_load_state_dict_post_hook"], [1273, 3, 1, "", "register_module"], [1273, 3, 1, "", "register_parameter"], [1273, 3, 1, "", "register_state_dict_pre_hook"], [1273, 3, 1, "", "requires_grad_"], [1273, 3, 1, "", "save"], [1273, 3, 1, "", "set_extra_state"], [1273, 3, 1, "", "share_memory"], [1273, 3, 1, "", "state_dict"], [1273, 3, 1, "", "to"], [1273, 3, 1, "", "to_empty"], [1273, 3, 1, "", "train"], [1273, 3, 1, "", "type"], [1273, 3, 1, "", "xpu"], [1273, 3, 1, "", "zero_grad"]], "torch.library": [[2021, 1, 1, "", "Library"], [2021, 5, 1, "", "custom_op"], [2021, 5, 1, "", "define"], [2021, 5, 1, "", "fallthrough_kernel"], [2021, 5, 1, "", "get_ctx"], [2021, 5, 1, "", "impl"], [2021, 5, 1, "", "impl_abstract"], [2021, 5, 1, "", "opcheck"], [2021, 5, 1, "", "register_autograd"], [2021, 5, 1, "", "register_fake"], [2021, 5, 1, "", "register_kernel"]], "torch.library.Library": [[2021, 3, 1, "", "define"], [2021, 3, 1, "", "impl"]], "torch.linalg": [[1303, 5, 1, "", "cholesky"], [1304, 5, 1, "", "cholesky_ex"], [1305, 5, 1, "", "cond"], [1306, 5, 1, "", "cross"], [1307, 5, 1, "", "det"], [1308, 5, 1, "", "diagonal"], [1309, 5, 1, "", "eig"], [1310, 5, 1, "", "eigh"], [1311, 5, 1, "", "eigvals"], [1312, 5, 1, "", "eigvalsh"], [1313, 5, 1, "", "householder_product"], [1314, 5, 1, "", "inv"], [1315, 5, 1, "", "inv_ex"], [1316, 5, 1, "", "ldl_factor"], [1317, 5, 1, "", "ldl_factor_ex"], [1318, 5, 1, "", "ldl_solve"], [1319, 5, 1, "", "lstsq"], [1320, 5, 1, "", "lu"], [1321, 5, 1, "", "lu_factor"], [1322, 5, 1, "", "lu_factor_ex"], [1323, 5, 1, "", "lu_solve"], [1324, 5, 1, "", "matmul"], [1325, 5, 1, "", "matrix_exp"], [1326, 5, 1, "", "matrix_norm"], [1327, 5, 1, "", "matrix_power"], [1328, 5, 1, "", "matrix_rank"], [1329, 5, 1, "", "multi_dot"], [1330, 5, 1, "", "norm"], [1331, 5, 1, "", "pinv"], [1332, 5, 1, "", "qr"], [1333, 5, 1, "", "slogdet"], [1334, 5, 1, "", "solve"], [1335, 5, 1, "", "solve_ex"], [1336, 5, 1, "", "solve_triangular"], [1337, 5, 1, "", "svd"], [1338, 5, 1, "", "svdvals"], [1339, 5, 1, "", "tensorinv"], [1340, 5, 1, "", "tensorsolve"], [1341, 5, 1, "", "vander"], [1342, 5, 1, "", "vecdot"], [1343, 5, 1, "", "vector_norm"]], "torch.masked": [[2024, 0, 0, "-", "maskedtensor"]], "torch.masked.maskedtensor": [[2024, 0, 0, "-", "binary"], [2024, 0, 0, "-", "core"], [2024, 0, 0, "-", "creation"], [2024, 0, 0, "-", "passthrough"], [2024, 0, 0, "-", "reductions"], [2024, 0, 0, "-", "unary"]], "torch.monitor": [[2030, 1, 1, "", "Aggregation"], [2030, 1, 1, "", "Event"], [2030, 1, 1, "", "EventHandlerHandle"], [2030, 1, 1, "", "Stat"], [2030, 1, 1, "", "TensorboardEventHandler"], [2030, 1, 1, "", "data_value_t"], [2030, 5, 1, "", "log_event"], [2030, 5, 1, "", "register_event_handler"], [2030, 5, 1, "", "unregister_event_handler"]], "torch.monitor.Aggregation": [[2030, 4, 1, "", "name"]], "torch.monitor.Event": [[2030, 3, 1, "", "__init__"], [2030, 4, 1, "", "data"], [2030, 4, 1, "", "name"], [2030, 4, 1, "", "timestamp"]], "torch.monitor.Stat": [[2030, 3, 1, "", "__init__"], [2030, 3, 1, "", "add"], [2030, 4, 1, "", "count"], [2030, 3, 1, "", "get"], [2030, 4, 1, "", "name"]], "torch.monitor.TensorboardEventHandler": [[2030, 3, 1, "", "__init__"]], "torch.mps": [[1382, 5, 1, "", "current_allocated_memory"], [1383, 5, 1, "", "device_count"], [1384, 5, 1, "", "driver_allocated_memory"], [1385, 5, 1, "", "empty_cache"], [2031, 0, 0, "-", "event"], [1387, 5, 1, "", "get_rng_state"], [1388, 5, 1, "", "manual_seed"], [2031, 0, 0, "-", "profiler"], [1392, 5, 1, "", "seed"], [1393, 5, 1, "", "set_per_process_memory_fraction"], [1394, 5, 1, "", "set_rng_state"], [1395, 5, 1, "", "synchronize"]], "torch.mps.event": [[1386, 1, 1, "", "Event"]], "torch.mps.event.Event": [[1386, 3, 1, "", "elapsed_time"], [1386, 3, 1, "", "query"], [1386, 3, 1, "", "record"], [1386, 3, 1, "", "synchronize"], [1386, 3, 1, "", "wait"]], "torch.mps.profiler": [[1389, 5, 1, "", "profile"], [1390, 5, 1, "", "start"], [1391, 5, 1, "", "stop"]], "torch.mtia": [[1397, 6, 1, "", "DeferredMtiaCallError"], [1398, 1, 1, "", "Event"], [1399, 1, 1, "", "Stream"], [1400, 1, 1, "", "StreamContext"], [1401, 5, 1, "", "current_device"], [1402, 5, 1, "", "current_stream"], [1403, 5, 1, "", "default_stream"], [1404, 1, 1, "", "device"], [1405, 5, 1, "", "device_count"], [1406, 5, 1, "", "init"], [1407, 5, 1, "", "is_available"], [1408, 5, 1, "", "is_initialized"], [1409, 5, 1, "", "set_stream"], [1410, 5, 1, "", "stream"], [1411, 5, 1, "", "synchronize"]], "torch.multiprocessing": [[2033, 1, 1, "", "SpawnContext"], [2033, 5, 1, "", "get_all_sharing_strategies"], [2033, 5, 1, "", "get_sharing_strategy"], [2033, 0, 0, "-", "pool"], [2033, 0, 0, "-", "queue"], [2033, 0, 0, "-", "reductions"], [2033, 5, 1, "", "set_sharing_strategy"], [2033, 0, 0, "-", "spawn"]], "torch.multiprocessing.SpawnContext": [[2033, 3, 1, "", "join"]], "torch.multiprocessing.spawn": [[2033, 5, 1, "", "spawn"]], "torch.nested": [[2036, 5, 1, "", "as_nested_tensor"], [2036, 5, 1, "", "nested_tensor"], [2036, 5, 1, "", "to_padded_tensor"]], "torch.nn": [[1428, 1, 1, "", "AdaptiveAvgPool1d"], [1429, 1, 1, "", "AdaptiveAvgPool2d"], [1430, 1, 1, "", "AdaptiveAvgPool3d"], [1431, 1, 1, "", "AdaptiveLogSoftmaxWithLoss"], [1432, 1, 1, "", "AdaptiveMaxPool1d"], [1433, 1, 1, "", "AdaptiveMaxPool2d"], [1434, 1, 1, "", "AdaptiveMaxPool3d"], [1435, 1, 1, "", "AlphaDropout"], [1436, 1, 1, "", "AvgPool1d"], [1437, 1, 1, "", "AvgPool2d"], [1438, 1, 1, "", "AvgPool3d"], [1439, 1, 1, "", "BCELoss"], [1440, 1, 1, "", "BCEWithLogitsLoss"], [1441, 1, 1, "", "BatchNorm1d"], [1442, 1, 1, "", "BatchNorm2d"], [1443, 1, 1, "", "BatchNorm3d"], [1444, 1, 1, "", "Bilinear"], [1445, 1, 1, "", "CELU"], [1446, 1, 1, "", "CTCLoss"], [1447, 1, 1, "", "ChannelShuffle"], [1448, 1, 1, "", "CircularPad1d"], [1449, 1, 1, "", "CircularPad2d"], [1450, 1, 1, "", "CircularPad3d"], [1451, 1, 1, "", "ConstantPad1d"], [1452, 1, 1, "", "ConstantPad2d"], [1453, 1, 1, "", "ConstantPad3d"], [1454, 1, 1, "", "Conv1d"], [1455, 1, 1, "", "Conv2d"], [1456, 1, 1, "", "Conv3d"], [1457, 1, 1, "", "ConvTranspose1d"], [1458, 1, 1, "", "ConvTranspose2d"], [1459, 1, 1, "", "ConvTranspose3d"], [1460, 1, 1, "", "CosineEmbeddingLoss"], [1461, 1, 1, "", "CosineSimilarity"], [1462, 1, 1, "", "CrossEntropyLoss"], [1463, 1, 1, "", "DataParallel"], [1464, 1, 1, "", "Dropout"], [1465, 1, 1, "", "Dropout1d"], [1466, 1, 1, "", "Dropout2d"], [1467, 1, 1, "", "Dropout3d"], [1468, 1, 1, "", "ELU"], [1469, 1, 1, "", "Embedding"], [1470, 1, 1, "", "EmbeddingBag"], [1471, 1, 1, "", "FeatureAlphaDropout"], [1472, 1, 1, "", "Flatten"], [1473, 1, 1, "", "Fold"], [1474, 1, 1, "", "FractionalMaxPool2d"], [1475, 1, 1, "", "FractionalMaxPool3d"], [1476, 1, 1, "", "GELU"], [1477, 1, 1, "", "GLU"], [1478, 1, 1, "", "GRU"], [1479, 1, 1, "", "GRUCell"], [1480, 1, 1, "", "GaussianNLLLoss"], [1481, 1, 1, "", "GroupNorm"], [1482, 1, 1, "", "Hardshrink"], [1483, 1, 1, "", "Hardsigmoid"], [1484, 1, 1, "", "Hardswish"], [1485, 1, 1, "", "Hardtanh"], [1486, 1, 1, "", "HingeEmbeddingLoss"], [1487, 1, 1, "", "HuberLoss"], [1488, 1, 1, "", "Identity"], [1489, 1, 1, "", "InstanceNorm1d"], [1490, 1, 1, "", "InstanceNorm2d"], [1491, 1, 1, "", "InstanceNorm3d"], [1492, 1, 1, "", "KLDivLoss"], [1493, 1, 1, "", "L1Loss"], [1494, 1, 1, "", "LPPool1d"], [1495, 1, 1, "", "LPPool2d"], [1496, 1, 1, "", "LPPool3d"], [1497, 1, 1, "", "LSTM"], [1498, 1, 1, "", "LSTMCell"], [1499, 1, 1, "", "LayerNorm"], [1500, 1, 1, "", "LazyBatchNorm1d"], [1501, 1, 1, "", "LazyBatchNorm2d"], [1502, 1, 1, "", "LazyBatchNorm3d"], [1503, 1, 1, "", "LazyConv1d"], [1504, 1, 1, "", "LazyConv2d"], [1505, 1, 1, "", "LazyConv3d"], [1506, 1, 1, "", "LazyConvTranspose1d"], [1507, 1, 1, "", "LazyConvTranspose2d"], [1508, 1, 1, "", "LazyConvTranspose3d"], [1509, 1, 1, "", "LazyInstanceNorm1d"], [1510, 1, 1, "", "LazyInstanceNorm2d"], [1511, 1, 1, "", "LazyInstanceNorm3d"], [1512, 1, 1, "", "LazyLinear"], [1513, 1, 1, "", "LeakyReLU"], [1514, 1, 1, "", "Linear"], [1515, 1, 1, "", "LocalResponseNorm"], [1516, 1, 1, "", "LogSigmoid"], [1517, 1, 1, "", "LogSoftmax"], [1518, 1, 1, "", "MSELoss"], [1519, 1, 1, "", "MarginRankingLoss"], [1520, 1, 1, "", "MaxPool1d"], [1521, 1, 1, "", "MaxPool2d"], [1522, 1, 1, "", "MaxPool3d"], [1523, 1, 1, "", "MaxUnpool1d"], [1524, 1, 1, "", "MaxUnpool2d"], [1525, 1, 1, "", "MaxUnpool3d"], [1526, 1, 1, "", "Mish"], [1527, 1, 1, "", "Module"], [1528, 1, 1, "", "ModuleDict"], [1529, 1, 1, "", "ModuleList"], [1530, 1, 1, "", "MultiLabelMarginLoss"], [1531, 1, 1, "", "MultiLabelSoftMarginLoss"], [1532, 1, 1, "", "MultiMarginLoss"], [1533, 1, 1, "", "MultiheadAttention"], [1534, 1, 1, "", "NLLLoss"], [1535, 1, 1, "", "PReLU"], [1536, 1, 1, "", "PairwiseDistance"], [1537, 1, 1, "", "ParameterDict"], [1538, 1, 1, "", "ParameterList"], [1539, 1, 1, "", "PixelShuffle"], [1540, 1, 1, "", "PixelUnshuffle"], [1541, 1, 1, "", "PoissonNLLLoss"], [1542, 1, 1, "", "RMSNorm"], [1543, 1, 1, "", "RNN"], [1544, 1, 1, "", "RNNBase"], [1545, 1, 1, "", "RNNCell"], [1546, 1, 1, "", "RReLU"], [1547, 1, 1, "", "ReLU"], [1548, 1, 1, "", "ReLU6"], [1549, 1, 1, "", "ReflectionPad1d"], [1550, 1, 1, "", "ReflectionPad2d"], [1551, 1, 1, "", "ReflectionPad3d"], [1552, 1, 1, "", "ReplicationPad1d"], [1553, 1, 1, "", "ReplicationPad2d"], [1554, 1, 1, "", "ReplicationPad3d"], [1555, 1, 1, "", "SELU"], [1556, 1, 1, "", "Sequential"], [1557, 1, 1, "", "SiLU"], [1558, 1, 1, "", "Sigmoid"], [1559, 1, 1, "", "SmoothL1Loss"], [1560, 1, 1, "", "SoftMarginLoss"], [1561, 1, 1, "", "Softmax"], [1562, 1, 1, "", "Softmax2d"], [1563, 1, 1, "", "Softmin"], [1564, 1, 1, "", "Softplus"], [1565, 1, 1, "", "Softshrink"], [1566, 1, 1, "", "Softsign"], [1567, 1, 1, "", "SyncBatchNorm"], [1568, 1, 1, "", "Tanh"], [1569, 1, 1, "", "Tanhshrink"], [1570, 1, 1, "", "Threshold"], [1571, 1, 1, "", "Transformer"], [1572, 1, 1, "", "TransformerDecoder"], [1573, 1, 1, "", "TransformerDecoderLayer"], [1574, 1, 1, "", "TransformerEncoder"], [1575, 1, 1, "", "TransformerEncoderLayer"], [1576, 1, 1, "", "TripletMarginLoss"], [1577, 1, 1, "", "TripletMarginWithDistanceLoss"], [1578, 1, 1, "", "Unflatten"], [1579, 1, 1, "", "Unfold"], [1580, 1, 1, "", "Upsample"], [1581, 1, 1, "", "UpsamplingBilinear2d"], [1582, 1, 1, "", "UpsamplingNearest2d"], [1583, 1, 1, "", "ZeroPad1d"], [1584, 1, 1, "", "ZeroPad2d"], [1585, 1, 1, "", "ZeroPad3d"], [2038, 0, 0, "-", "attention"], [2037, 0, 0, "-", "backends"], [2037, 0, 0, "-", "common_types"], [2037, 0, 0, "-", "cpp"], [2037, 0, 0, "-", "functional"], [2037, 0, 0, "-", "grad"], [2037, 0, 0, "-", "init"], [2075, 0, 0, "-", "intrinsic"], [2037, 0, 0, "-", "modules"], [2037, 0, 0, "-", "parallel"], [2037, 0, 0, "-", "parameter"], [2075, 0, 0, "-", "qat"], [2075, 0, 0, "-", "quantizable"], [2075, 0, 0, "-", "quantized"], [2037, 0, 0, "-", "utils"]], "torch.nn.AdaptiveLogSoftmaxWithLoss": [[1431, 3, 1, "", "log_prob"], [1431, 3, 1, "", "predict"]], "torch.nn.Embedding": [[1469, 3, 1, "", "from_pretrained"]], "torch.nn.EmbeddingBag": [[1470, 3, 1, "", "forward"], [1470, 3, 1, "", "from_pretrained"]], "torch.nn.LazyBatchNorm1d": [[1500, 2, 1, "", "cls_to_become"]], "torch.nn.LazyBatchNorm2d": [[1501, 2, 1, "", "cls_to_become"]], "torch.nn.LazyBatchNorm3d": [[1502, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConv1d": [[1503, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConv2d": [[1504, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConv3d": [[1505, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConvTranspose1d": [[1506, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConvTranspose2d": [[1507, 2, 1, "", "cls_to_become"]], "torch.nn.LazyConvTranspose3d": [[1508, 2, 1, "", "cls_to_become"]], "torch.nn.LazyInstanceNorm1d": [[1509, 2, 1, "", "cls_to_become"]], "torch.nn.LazyInstanceNorm2d": [[1510, 2, 1, "", "cls_to_become"]], "torch.nn.LazyInstanceNorm3d": [[1511, 2, 1, "", "cls_to_become"]], "torch.nn.LazyLinear": [[1512, 2, 1, "", "cls_to_become"]], "torch.nn.Module": [[1527, 3, 1, "", "add_module"], [1527, 3, 1, "", "apply"], [1527, 3, 1, "", "bfloat16"], [1527, 3, 1, "", "buffers"], [1527, 3, 1, "", "children"], [1527, 3, 1, "", "compile"], [1527, 3, 1, "", "cpu"], [1527, 3, 1, "", "cuda"], [1527, 3, 1, "", "double"], [1527, 3, 1, "", "eval"], [1527, 3, 1, "", "extra_repr"], [1527, 3, 1, "", "float"], [1527, 3, 1, "", "forward"], [1527, 3, 1, "", "get_buffer"], [1527, 3, 1, "", "get_extra_state"], [1527, 3, 1, "", "get_parameter"], [1527, 3, 1, "", "get_submodule"], [1527, 3, 1, "", "half"], [1527, 3, 1, "", "ipu"], [1527, 3, 1, "", "load_state_dict"], [1527, 3, 1, "", "modules"], [1527, 3, 1, "", "named_buffers"], [1527, 3, 1, "", "named_children"], [1527, 3, 1, "", "named_modules"], [1527, 3, 1, "", "named_parameters"], [1527, 3, 1, "", "parameters"], [1527, 3, 1, "", "register_backward_hook"], [1527, 3, 1, "", "register_buffer"], [1527, 3, 1, "", "register_forward_hook"], [1527, 3, 1, "", "register_forward_pre_hook"], [1527, 3, 1, "", "register_full_backward_hook"], [1527, 3, 1, "", "register_full_backward_pre_hook"], [1527, 3, 1, "", "register_load_state_dict_post_hook"], [1527, 3, 1, "", "register_module"], [1527, 3, 1, "", "register_parameter"], [1527, 3, 1, "", "register_state_dict_pre_hook"], [1527, 3, 1, "", "requires_grad_"], [1527, 3, 1, "", "set_extra_state"], [1527, 3, 1, "", "share_memory"], [1527, 3, 1, "", "state_dict"], [1527, 3, 1, "", "to"], [1527, 3, 1, "", "to_empty"], [1527, 3, 1, "", "train"], [1527, 3, 1, "", "type"], [1527, 3, 1, "", "xpu"], [1527, 3, 1, "", "zero_grad"]], "torch.nn.ModuleDict": [[1528, 3, 1, "", "clear"], [1528, 3, 1, "", "items"], [1528, 3, 1, "", "keys"], [1528, 3, 1, "", "pop"], [1528, 3, 1, "", "update"], [1528, 3, 1, "", "values"]], "torch.nn.ModuleList": [[1529, 3, 1, "", "append"], [1529, 3, 1, "", "extend"], [1529, 3, 1, "", "insert"]], "torch.nn.MultiheadAttention": [[1533, 3, 1, "", "forward"], [1533, 3, 1, "", "merge_masks"]], "torch.nn.ParameterDict": [[1537, 3, 1, "", "clear"], [1537, 3, 1, "", "copy"], [1537, 3, 1, "", "fromkeys"], [1537, 3, 1, "", "get"], [1537, 3, 1, "", "items"], [1537, 3, 1, "", "keys"], [1537, 3, 1, "", "pop"], [1537, 3, 1, "", "popitem"], [1537, 3, 1, "", "setdefault"], [1537, 3, 1, "", "update"], [1537, 3, 1, "", "values"]], "torch.nn.ParameterList": [[1538, 3, 1, "", "append"], [1538, 3, 1, "", "extend"]], "torch.nn.RMSNorm": [[1542, 3, 1, "", "extra_repr"], [1542, 3, 1, "", "forward"], [1542, 3, 1, "", "reset_parameters"]], "torch.nn.RNNBase": [[1544, 3, 1, "", "flatten_parameters"]], "torch.nn.Sequential": [[1556, 3, 1, "", "append"]], "torch.nn.SyncBatchNorm": [[1567, 3, 1, "", "convert_sync_batchnorm"]], "torch.nn.Transformer": [[1571, 3, 1, "", "forward"], [1571, 3, 1, "", "generate_square_subsequent_mask"]], "torch.nn.TransformerDecoder": [[1572, 3, 1, "", "forward"]], "torch.nn.TransformerDecoderLayer": [[1573, 3, 1, "", "forward"]], "torch.nn.TransformerEncoder": [[1574, 3, 1, "", "forward"]], "torch.nn.TransformerEncoderLayer": [[1575, 3, 1, "", "forward"]], "torch.nn.attention": [[1586, 1, 1, "", "SDPBackend"], [2039, 0, 0, "-", "bias"], [1591, 5, 1, "", "sdpa_kernel"]], "torch.nn.attention.SDPBackend": [[1586, 4, 1, "", "name"]], "torch.nn.attention.bias": [[1587, 1, 1, "", "CausalBias"], [1588, 1, 1, "", "CausalVariant"], [1589, 5, 1, "", "causal_lower_right"], [1590, 5, 1, "", "causal_upper_left"]], "torch.nn.backends": [[2037, 0, 0, "-", "thnn"]], "torch.nn.functional": [[1592, 5, 1, "", "adaptive_avg_pool1d"], [1593, 5, 1, "", "adaptive_avg_pool2d"], [1594, 5, 1, "", "adaptive_avg_pool3d"], [1595, 5, 1, "", "adaptive_max_pool1d"], [1596, 5, 1, "", "adaptive_max_pool2d"], [1597, 5, 1, "", "adaptive_max_pool3d"], [1598, 5, 1, "", "affine_grid"], [1599, 5, 1, "", "alpha_dropout"], [1600, 5, 1, "", "avg_pool1d"], [1601, 5, 1, "", "avg_pool2d"], [1602, 5, 1, "", "avg_pool3d"], [1603, 5, 1, "", "batch_norm"], [1604, 5, 1, "", "bilinear"], [1605, 5, 1, "", "binary_cross_entropy"], [1606, 5, 1, "", "binary_cross_entropy_with_logits"], [1607, 5, 1, "", "celu"], [1608, 5, 1, "", "conv1d"], [1609, 5, 1, "", "conv2d"], [1610, 5, 1, "", "conv3d"], [1611, 5, 1, "", "conv_transpose1d"], [1612, 5, 1, "", "conv_transpose2d"], [1613, 5, 1, "", "conv_transpose3d"], [1614, 5, 1, "", "cosine_embedding_loss"], [1615, 5, 1, "", "cosine_similarity"], [1616, 5, 1, "", "cross_entropy"], [1617, 5, 1, "", "ctc_loss"], [1618, 5, 1, "", "dropout"], [1619, 5, 1, "", "dropout1d"], [1620, 5, 1, "", "dropout2d"], [1621, 5, 1, "", "dropout3d"], [1622, 5, 1, "", "elu"], [1623, 5, 1, "", "elu_"], [1624, 5, 1, "", "embedding"], [1625, 5, 1, "", "embedding_bag"], [1626, 5, 1, "", "feature_alpha_dropout"], [1627, 5, 1, "", "fold"], [1628, 5, 1, "", "fractional_max_pool2d"], [1629, 5, 1, "", "fractional_max_pool3d"], [1630, 5, 1, "", "gaussian_nll_loss"], [1631, 5, 1, "", "gelu"], [1632, 5, 1, "", "glu"], [1633, 5, 1, "", "grid_sample"], [1634, 5, 1, "", "group_norm"], [1635, 5, 1, "", "gumbel_softmax"], [1636, 5, 1, "", "hardshrink"], [1637, 5, 1, "", "hardsigmoid"], [1638, 5, 1, "", "hardswish"], [1639, 5, 1, "", "hardtanh"], [1640, 5, 1, "", "hardtanh_"], [1641, 5, 1, "", "hinge_embedding_loss"], [1642, 5, 1, "", "huber_loss"], [1643, 5, 1, "", "instance_norm"], [1644, 5, 1, "", "interpolate"], [1645, 5, 1, "", "kl_div"], [1646, 5, 1, "", "l1_loss"], [1647, 5, 1, "", "layer_norm"], [1648, 5, 1, "", "leaky_relu"], [1649, 5, 1, "", "leaky_relu_"], [1650, 5, 1, "", "linear"], [1651, 5, 1, "", "local_response_norm"], [1652, 5, 1, "", "log_softmax"], [1653, 5, 1, "", "logsigmoid"], [1654, 5, 1, "", "lp_pool1d"], [1655, 5, 1, "", "lp_pool2d"], [1656, 5, 1, "", "lp_pool3d"], [1657, 5, 1, "", "margin_ranking_loss"], [1658, 5, 1, "", "max_pool1d"], [1659, 5, 1, "", "max_pool2d"], [1660, 5, 1, "", "max_pool3d"], [1661, 5, 1, "", "max_unpool1d"], [1662, 5, 1, "", "max_unpool2d"], [1663, 5, 1, "", "max_unpool3d"], [1664, 5, 1, "", "mish"], [1665, 5, 1, "", "mse_loss"], [1666, 5, 1, "", "multi_margin_loss"], [1667, 5, 1, "", "multilabel_margin_loss"], [1668, 5, 1, "", "multilabel_soft_margin_loss"], [1669, 5, 1, "", "nll_loss"], [1670, 5, 1, "", "normalize"], [1671, 5, 1, "", "one_hot"], [1672, 5, 1, "", "pad"], [1673, 5, 1, "", "pairwise_distance"], [1674, 5, 1, "", "pdist"], [1675, 5, 1, "", "pixel_shuffle"], [1676, 5, 1, "", "pixel_unshuffle"], [1677, 5, 1, "", "poisson_nll_loss"], [1678, 5, 1, "", "prelu"], [1679, 5, 1, "", "relu"], [1680, 5, 1, "", "relu6"], [1681, 5, 1, "", "relu_"], [1682, 5, 1, "", "rms_norm"], [1683, 5, 1, "", "rrelu"], [1684, 5, 1, "", "rrelu_"], [1685, 5, 1, "", "scaled_dot_product_attention"], [1686, 5, 1, "", "selu"], [1687, 5, 1, "", "sigmoid"], [1688, 5, 1, "", "silu"], [1689, 5, 1, "", "smooth_l1_loss"], [1690, 5, 1, "", "soft_margin_loss"], [1691, 5, 1, "", "softmax"], [1692, 5, 1, "", "softmin"], [1693, 5, 1, "", "softplus"], [1694, 5, 1, "", "softshrink"], [1695, 5, 1, "", "softsign"], [1696, 5, 1, "", "tanh"], [1697, 5, 1, "", "tanhshrink"], [1698, 5, 1, "", "threshold"], [1699, 5, 1, "", "threshold_"], [1701, 5, 1, "", "triplet_margin_loss"], [1702, 5, 1, "", "triplet_margin_with_distance_loss"], [1703, 5, 1, "", "unfold"], [1704, 5, 1, "", "upsample"], [1705, 5, 1, "", "upsample_bilinear"], [1706, 5, 1, "", "upsample_nearest"]], "torch.nn.init": [[2041, 5, 1, "", "calculate_gain"], [2041, 5, 1, "", "constant_"], [2041, 5, 1, "", "dirac_"], [2041, 5, 1, "", "eye_"], [2041, 5, 1, "", "kaiming_normal_"], [2041, 5, 1, "", "kaiming_uniform_"], [2041, 5, 1, "", "normal_"], [2041, 5, 1, "", "ones_"], [2041, 5, 1, "", "orthogonal_"], [2041, 5, 1, "", "sparse_"], [2041, 5, 1, "", "trunc_normal_"], [2041, 5, 1, "", "uniform_"], [2041, 5, 1, "", "xavier_normal_"], [2041, 5, 1, "", "xavier_uniform_"], [2041, 5, 1, "", "zeros_"]], "torch.nn.intrinsic": [[2075, 0, 0, "-", "modules"], [2075, 0, 0, "-", "qat"], [2075, 0, 0, "-", "quantized"]], "torch.nn.intrinsic.modules": [[2072, 0, 0, "-", "fused"]], "torch.nn.intrinsic.qat": [[2075, 0, 0, "-", "modules"]], "torch.nn.intrinsic.qat.modules": [[2072, 0, 0, "-", "conv_fused"], [2072, 0, 0, "-", "linear_fused"], [2072, 0, 0, "-", "linear_relu"]], "torch.nn.intrinsic.quantized": [[2075, 0, 0, "-", "dynamic"], [2075, 0, 0, "-", "modules"]], "torch.nn.intrinsic.quantized.dynamic": [[2075, 0, 0, "-", "modules"]], "torch.nn.intrinsic.quantized.dynamic.modules": [[2072, 0, 0, "-", "linear_relu"]], "torch.nn.intrinsic.quantized.modules": [[2072, 0, 0, "-", "bn_relu"], [2072, 0, 0, "-", "conv_relu"], [2072, 0, 0, "-", "linear_relu"]], "torch.nn.modules": [[2037, 0, 0, "-", "activation"], [2037, 0, 0, "-", "adaptive"], [2037, 0, 0, "-", "batchnorm"], [2037, 0, 0, "-", "channelshuffle"], [2037, 0, 0, "-", "container"], [2037, 0, 0, "-", "conv"], [2037, 0, 0, "-", "distance"], [2037, 0, 0, "-", "dropout"], [2037, 0, 0, "-", "flatten"], [2037, 0, 0, "-", "fold"], [2037, 0, 0, "-", "instancenorm"], [2037, 0, 0, "-", "lazy"], [2037, 0, 0, "-", "linear"], [2037, 0, 0, "-", "loss"], [2037, 0, 0, "-", "module"], [2037, 0, 0, "-", "normalization"], [2037, 0, 0, "-", "padding"], [2037, 0, 0, "-", "pixelshuffle"], [2037, 0, 0, "-", "pooling"], [2037, 0, 0, "-", "rnn"], [2037, 0, 0, "-", "sparse"], [2037, 0, 0, "-", "transformer"], [2037, 0, 0, "-", "upsampling"], [2037, 0, 0, "-", "utils"]], "torch.nn.modules.lazy": [[1707, 1, 1, "", "LazyModuleMixin"]], "torch.nn.modules.lazy.LazyModuleMixin": [[1707, 3, 1, "", "has_uninitialized_params"], [1707, 3, 1, "", "initialize_parameters"]], "torch.nn.modules.module": [[1708, 5, 1, "", "register_module_backward_hook"], [1709, 5, 1, "", "register_module_buffer_registration_hook"], [1710, 5, 1, "", "register_module_forward_hook"], [1711, 5, 1, "", "register_module_forward_pre_hook"], [1712, 5, 1, "", "register_module_full_backward_hook"], [1713, 5, 1, "", "register_module_full_backward_pre_hook"], [1714, 5, 1, "", "register_module_module_registration_hook"], [1715, 5, 1, "", "register_module_parameter_registration_hook"]], "torch.nn.modules.normalization": [[1716, 1, 1, "", "RMSNorm"]], "torch.nn.modules.normalization.RMSNorm": [[1716, 3, 1, "", "extra_repr"], [1716, 3, 1, "", "forward"], [1716, 3, 1, "", "reset_parameters"]], "torch.nn.parallel": [[1717, 1, 1, "", "DistributedDataParallel"], [2037, 0, 0, "-", "comm"], [1700, 5, 1, "", "data_parallel"], [2037, 0, 0, "-", "distributed"], [2037, 0, 0, "-", "parallel_apply"], [2037, 0, 0, "-", "replicate"], [2037, 0, 0, "-", "scatter_gather"]], "torch.nn.parallel.DistributedDataParallel": [[1717, 3, 1, "", "join"], [1717, 3, 1, "", "join_hook"], [1717, 3, 1, "", "no_sync"], [1717, 3, 1, "", "register_comm_hook"]], "torch.nn.parameter": [[1718, 1, 1, "", "Parameter"], [1719, 1, 1, "", "UninitializedBuffer"], [1720, 1, 1, "", "UninitializedParameter"]], "torch.nn.parameter.UninitializedParameter": [[1720, 2, 1, "", "cls_to_become"]], "torch.nn.qat": [[2075, 0, 0, "-", "dynamic"], [2075, 0, 0, "-", "modules"]], "torch.nn.qat.dynamic": [[2075, 0, 0, "-", "modules"]], "torch.nn.qat.dynamic.modules": [[2072, 0, 0, "-", "linear"]], "torch.nn.qat.modules": [[2072, 0, 0, "-", "conv"], [2072, 0, 0, "-", "embedding_ops"], [2072, 0, 0, "-", "linear"]], "torch.nn.quantizable": [[2075, 0, 0, "-", "modules"]], "torch.nn.quantizable.modules": [[2072, 0, 0, "-", "activation"], [2072, 0, 0, "-", "rnn"]], "torch.nn.quantized": [[2075, 0, 0, "-", "dynamic"], [2072, 0, 0, "-", "functional"], [2075, 0, 0, "-", "modules"]], "torch.nn.quantized.dynamic": [[2075, 0, 0, "-", "modules"]], "torch.nn.quantized.dynamic.modules": [[2072, 0, 0, "-", "conv"], [2072, 0, 0, "-", "linear"], [2072, 0, 0, "-", "rnn"]], "torch.nn.quantized.modules": [[2072, 0, 0, "-", "activation"], [2072, 0, 0, "-", "batchnorm"], [2072, 0, 0, "-", "conv"], [2072, 0, 0, "-", "dropout"], [2072, 0, 0, "-", "embedding_ops"], [2072, 0, 0, "-", "functional_modules"], [2072, 0, 0, "-", "linear"], [2072, 0, 0, "-", "normalization"], [2072, 0, 0, "-", "rnn"], [2072, 0, 0, "-", "utils"]], "torch.nn.utils": [[2037, 0, 0, "-", "clip_grad"], [1721, 5, 1, "", "clip_grad_norm"], [1722, 5, 1, "", "clip_grad_norm_"], [1723, 5, 1, "", "clip_grad_value_"], [1724, 5, 1, "", "convert_conv2d_weight_memory_format"], [1725, 5, 1, "", "convert_conv3d_weight_memory_format"], [2037, 0, 0, "-", "convert_parameters"], [1726, 5, 1, "", "fuse_conv_bn_eval"], [1727, 5, 1, "", "fuse_conv_bn_weights"], [1728, 5, 1, "", "fuse_linear_bn_eval"], [1729, 5, 1, "", "fuse_linear_bn_weights"], [2037, 0, 0, "-", "fusion"], [2037, 0, 0, "-", "init"], [2037, 0, 0, "-", "memory_format"], [1730, 5, 1, "", "parameters_to_vector"], [2037, 0, 0, "-", "parametrizations"], [2037, 0, 0, "-", "parametrize"], [2037, 0, 0, "-", "prune"], [1756, 5, 1, "", "remove_spectral_norm"], [1757, 5, 1, "", "remove_weight_norm"], [2037, 0, 0, "-", "rnn"], [1765, 5, 1, "", "skip_init"], [1766, 5, 1, "", "spectral_norm"], [2037, 0, 0, "-", "stateless"], [1768, 5, 1, "", "vector_to_parameters"], [1769, 5, 1, "", "weight_norm"]], "torch.nn.utils.parametrizations": [[1731, 5, 1, "", "orthogonal"], [1732, 5, 1, "", "spectral_norm"], [1733, 5, 1, "", "weight_norm"]], "torch.nn.utils.parametrize": [[1734, 1, 1, "", "ParametrizationList"], [1735, 5, 1, "", "cached"], [1736, 5, 1, "", "is_parametrized"], [1737, 5, 1, "", "register_parametrization"], [1738, 5, 1, "", "remove_parametrizations"]], "torch.nn.utils.parametrize.ParametrizationList": [[1734, 3, 1, "", "right_inverse"]], "torch.nn.utils.prune": [[1739, 1, 1, "", "BasePruningMethod"], [1740, 1, 1, "", "CustomFromMask"], [1741, 1, 1, "", "Identity"], [1742, 1, 1, "", "L1Unstructured"], [1743, 1, 1, "", "LnStructured"], [1744, 1, 1, "", "PruningContainer"], [1745, 1, 1, "", "RandomStructured"], [1746, 1, 1, "", "RandomUnstructured"], [1747, 5, 1, "", "custom_from_mask"], [1748, 5, 1, "", "global_unstructured"], [1749, 5, 1, "", "identity"], [1750, 5, 1, "", "is_pruned"], [1751, 5, 1, "", "l1_unstructured"], [1752, 5, 1, "", "ln_structured"], [1753, 5, 1, "", "random_structured"], [1754, 5, 1, "", "random_unstructured"], [1755, 5, 1, "", "remove"]], "torch.nn.utils.prune.BasePruningMethod": [[1739, 3, 1, "", "apply"], [1739, 3, 1, "", "apply_mask"], [1739, 3, 1, "", "compute_mask"], [1739, 3, 1, "", "prune"], [1739, 3, 1, "", "remove"]], "torch.nn.utils.prune.CustomFromMask": [[1740, 3, 1, "", "apply"], [1740, 3, 1, "", "apply_mask"], [1740, 3, 1, "", "prune"], [1740, 3, 1, "", "remove"]], "torch.nn.utils.prune.Identity": [[1741, 3, 1, "", "apply"], [1741, 3, 1, "", "apply_mask"], [1741, 3, 1, "", "prune"], [1741, 3, 1, "", "remove"]], "torch.nn.utils.prune.L1Unstructured": [[1742, 3, 1, "", "apply"], [1742, 3, 1, "", "apply_mask"], [1742, 3, 1, "", "prune"], [1742, 3, 1, "", "remove"]], "torch.nn.utils.prune.LnStructured": [[1743, 3, 1, "", "apply"], [1743, 3, 1, "", "apply_mask"], [1743, 3, 1, "", "compute_mask"], [1743, 3, 1, "", "prune"], [1743, 3, 1, "", "remove"]], "torch.nn.utils.prune.PruningContainer": [[1744, 3, 1, "", "add_pruning_method"], [1744, 3, 1, "", "apply"], [1744, 3, 1, "", "apply_mask"], [1744, 3, 1, "", "compute_mask"], [1744, 3, 1, "", "prune"], [1744, 3, 1, "", "remove"]], "torch.nn.utils.prune.RandomStructured": [[1745, 3, 1, "", "apply"], [1745, 3, 1, "", "apply_mask"], [1745, 3, 1, "", "compute_mask"], [1745, 3, 1, "", "prune"], [1745, 3, 1, "", "remove"]], "torch.nn.utils.prune.RandomUnstructured": [[1746, 3, 1, "", "apply"], [1746, 3, 1, "", "apply_mask"], [1746, 3, 1, "", "prune"], [1746, 3, 1, "", "remove"]], "torch.nn.utils.rnn": [[1758, 1, 1, "", "PackedSequence"], [1759, 5, 1, "", "pack_padded_sequence"], [1760, 5, 1, "", "pack_sequence"], [1761, 5, 1, "", "pad_packed_sequence"], [1762, 5, 1, "", "pad_sequence"], [1763, 5, 1, "", "unpack_sequence"], [1764, 5, 1, "", "unpad_sequence"]], "torch.nn.utils.rnn.PackedSequence": [[1758, 2, 1, "", "batch_sizes"], [1758, 3, 1, "", "count"], [1758, 2, 1, "", "data"], [1758, 3, 1, "", "index"], [1758, 4, 1, "", "is_cuda"], [1758, 3, 1, "", "is_pinned"], [1758, 2, 1, "", "sorted_indices"], [1758, 3, 1, "", "to"], [1758, 2, 1, "", "unsorted_indices"]], "torch.nn.utils.stateless": [[1767, 5, 1, "", "functional_call"]], "torch.onnx": [[2065, 1, 1, "", "DiagnosticOptions"], [2065, 1, 1, "", "ExportOptions"], [2065, 1, 1, "", "InvalidExportOptionsError"], [1778, 1, 1, "", "JitScalarType"], [2065, 1, 1, "", "ONNXProgram"], [2065, 1, 1, "", "ONNXProgramSerializer"], [2065, 1, 1, "", "ONNXRuntimeOptions"], [2065, 1, 1, "", "OnnxExporterError"], [2065, 1, 1, "", "OnnxRegistry"], [2067, 5, 1, "", "disable_log"], [2065, 5, 1, "", "dynamo_export"], [2065, 5, 1, "", "enable_fake_mode"], [2067, 5, 1, "", "enable_log"], [2064, 0, 0, "-", "errors"], [2067, 5, 1, "", "export"], [2067, 5, 1, "", "export_to_pretty_string"], [2067, 5, 1, "", "is_in_onnx_export"], [2066, 5, 1, "", "is_onnxrt_backend_supported"], [2064, 0, 0, "-", "operators"], [2067, 5, 1, "", "register_custom_op_symbolic"], [2067, 5, 1, "", "select_model_mode_for_export"], [2064, 0, 0, "-", "symbolic_caffe2"], [2064, 0, 0, "-", "symbolic_helper"], [2064, 0, 0, "-", "symbolic_opset10"], [2064, 0, 0, "-", "symbolic_opset11"], [2064, 0, 0, "-", "symbolic_opset12"], [2064, 0, 0, "-", "symbolic_opset13"], [2064, 0, 0, "-", "symbolic_opset14"], [2064, 0, 0, "-", "symbolic_opset15"], [2064, 0, 0, "-", "symbolic_opset16"], [2064, 0, 0, "-", "symbolic_opset17"], [2064, 0, 0, "-", "symbolic_opset18"], [2064, 0, 0, "-", "symbolic_opset19"], [2064, 0, 0, "-", "symbolic_opset20"], [2064, 0, 0, "-", "symbolic_opset7"], [2064, 0, 0, "-", "symbolic_opset8"], [2064, 0, 0, "-", "symbolic_opset9"], [2067, 5, 1, "", "unregister_custom_op_symbolic"], [2064, 0, 0, "-", "utils"], [2064, 0, 0, "-", "verification"]], "torch.onnx.JitScalarType": [[1778, 3, 1, "", "dtype"], [1778, 3, 1, "", "from_dtype"], [1778, 3, 1, "", "from_onnx_type"], [1778, 3, 1, "", "from_value"], [1778, 3, 1, "", "onnx_compatible"], [1778, 3, 1, "", "onnx_type"], [1778, 3, 1, "", "scalar_name"], [1778, 3, 1, "", "torch_name"]], "torch.onnx.ONNXProgram": [[2065, 3, 1, "", "adapt_torch_inputs_to_onnx"], [2065, 3, 1, "", "adapt_torch_outputs_to_onnx"], [2065, 4, 1, "", "diagnostic_context"], [2065, 4, 1, "", "fake_context"], [2065, 4, 1, "", "model_proto"], [2065, 4, 1, "", "model_signature"], [2065, 3, 1, "", "save"], [2065, 3, 1, "", "save_diagnostics"]], "torch.onnx.ONNXProgramSerializer": [[2065, 3, 1, "", "serialize"]], "torch.onnx.OnnxRegistry": [[2065, 3, 1, "", "get_op_functions"], [2065, 3, 1, "", "is_registered_op"], [2065, 4, 1, "", "opset_version"], [2065, 3, 1, "", "register_op"]], "torch.onnx.verification": [[1779, 1, 1, "", "GraphInfo"], [1780, 1, 1, "", "VerificationOptions"], [2067, 5, 1, "", "find_mismatch"]], "torch.onnx.verification.GraphInfo": [[1779, 3, 1, "", "all_mismatch_leaf_graph_info"], [1779, 3, 1, "", "clear"], [1779, 3, 1, "", "essential_node_count"], [1779, 3, 1, "", "essential_node_kinds"], [1779, 3, 1, "", "export_repro"], [1779, 3, 1, "", "find_mismatch"], [1779, 3, 1, "", "find_partition"], [1779, 3, 1, "", "has_mismatch"], [1779, 3, 1, "", "pretty_print_mismatch"], [1779, 3, 1, "", "pretty_print_tree"], [1779, 3, 1, "", "verify_export"]], "torch.optim": [[1781, 1, 1, "", "ASGD"], [1782, 1, 1, "", "Adadelta"], [1783, 1, 1, "", "Adagrad"], [1784, 1, 1, "", "Adam"], [1785, 1, 1, "", "AdamW"], [1786, 1, 1, "", "Adamax"], [1787, 1, 1, "", "LBFGS"], [1788, 1, 1, "", "NAdam"], [2069, 1, 1, "", "Optimizer"], [1794, 1, 1, "", "RAdam"], [1795, 1, 1, "", "RMSprop"], [1796, 1, 1, "", "Rprop"], [1797, 1, 1, "", "SGD"], [1798, 1, 1, "", "SparseAdam"], [2069, 0, 0, "-", "adadelta"], [2069, 0, 0, "-", "adagrad"], [2069, 0, 0, "-", "adam"], [2069, 0, 0, "-", "adamax"], [2069, 0, 0, "-", "adamw"], [2069, 0, 0, "-", "asgd"], [2069, 0, 0, "-", "lbfgs"], [2069, 0, 0, "-", "lr_scheduler"], [2069, 0, 0, "-", "nadam"], [2069, 0, 0, "-", "optimizer"], [2069, 0, 0, "-", "radam"], [2069, 0, 0, "-", "rmsprop"], [2069, 0, 0, "-", "rprop"], [2069, 0, 0, "-", "sgd"], [2069, 0, 0, "-", "sparse_adam"], [2069, 0, 0, "-", "swa_utils"]], "torch.optim.ASGD": [[1781, 3, 1, "", "add_param_group"], [1781, 3, 1, "", "load_state_dict"], [1781, 3, 1, "", "register_load_state_dict_post_hook"], [1781, 3, 1, "", "register_load_state_dict_pre_hook"], [1781, 3, 1, "", "register_state_dict_post_hook"], [1781, 3, 1, "", "register_state_dict_pre_hook"], [1781, 3, 1, "", "register_step_post_hook"], [1781, 3, 1, "", "register_step_pre_hook"], [1781, 3, 1, "", "state_dict"], [1781, 3, 1, "", "step"], [1781, 3, 1, "", "zero_grad"]], "torch.optim.Adadelta": [[1782, 3, 1, "", "add_param_group"], [1782, 3, 1, "", "load_state_dict"], [1782, 3, 1, "", "register_load_state_dict_post_hook"], [1782, 3, 1, "", "register_load_state_dict_pre_hook"], [1782, 3, 1, "", "register_state_dict_post_hook"], [1782, 3, 1, "", "register_state_dict_pre_hook"], [1782, 3, 1, "", "register_step_post_hook"], [1782, 3, 1, "", "register_step_pre_hook"], [1782, 3, 1, "", "state_dict"], [1782, 3, 1, "", "step"], [1782, 3, 1, "", "zero_grad"]], "torch.optim.Adagrad": [[1783, 3, 1, "", "add_param_group"], [1783, 3, 1, "", "load_state_dict"], [1783, 3, 1, "", "register_load_state_dict_post_hook"], [1783, 3, 1, "", "register_load_state_dict_pre_hook"], [1783, 3, 1, "", "register_state_dict_post_hook"], [1783, 3, 1, "", "register_state_dict_pre_hook"], [1783, 3, 1, "", "register_step_post_hook"], [1783, 3, 1, "", "register_step_pre_hook"], [1783, 3, 1, "", "state_dict"], [1783, 3, 1, "", "step"], [1783, 3, 1, "", "zero_grad"]], "torch.optim.Adam": [[1784, 3, 1, "", "add_param_group"], [1784, 3, 1, "", "load_state_dict"], [1784, 3, 1, "", "register_load_state_dict_post_hook"], [1784, 3, 1, "", "register_load_state_dict_pre_hook"], [1784, 3, 1, "", "register_state_dict_post_hook"], [1784, 3, 1, "", "register_state_dict_pre_hook"], [1784, 3, 1, "", "register_step_post_hook"], [1784, 3, 1, "", "register_step_pre_hook"], [1784, 3, 1, "", "state_dict"], [1784, 3, 1, "", "step"], [1784, 3, 1, "", "zero_grad"]], "torch.optim.AdamW": [[1785, 3, 1, "", "add_param_group"], [1785, 3, 1, "", "load_state_dict"], [1785, 3, 1, "", "register_load_state_dict_post_hook"], [1785, 3, 1, "", "register_load_state_dict_pre_hook"], [1785, 3, 1, "", "register_state_dict_post_hook"], [1785, 3, 1, "", "register_state_dict_pre_hook"], [1785, 3, 1, "", "register_step_post_hook"], [1785, 3, 1, "", "register_step_pre_hook"], [1785, 3, 1, "", "state_dict"], [1785, 3, 1, "", "step"], [1785, 3, 1, "", "zero_grad"]], "torch.optim.Adamax": [[1786, 3, 1, "", "add_param_group"], [1786, 3, 1, "", "load_state_dict"], [1786, 3, 1, "", "register_load_state_dict_post_hook"], [1786, 3, 1, "", "register_load_state_dict_pre_hook"], [1786, 3, 1, "", "register_state_dict_post_hook"], [1786, 3, 1, "", "register_state_dict_pre_hook"], [1786, 3, 1, "", "register_step_post_hook"], [1786, 3, 1, "", "register_step_pre_hook"], [1786, 3, 1, "", "state_dict"], [1786, 3, 1, "", "step"], [1786, 3, 1, "", "zero_grad"]], "torch.optim.LBFGS": [[1787, 3, 1, "", "add_param_group"], [1787, 3, 1, "", "load_state_dict"], [1787, 3, 1, "", "register_load_state_dict_post_hook"], [1787, 3, 1, "", "register_load_state_dict_pre_hook"], [1787, 3, 1, "", "register_state_dict_post_hook"], [1787, 3, 1, "", "register_state_dict_pre_hook"], [1787, 3, 1, "", "register_step_post_hook"], [1787, 3, 1, "", "register_step_pre_hook"], [1787, 3, 1, "", "state_dict"], [1787, 3, 1, "", "step"], [1787, 3, 1, "", "zero_grad"]], "torch.optim.NAdam": [[1788, 3, 1, "", "add_param_group"], [1788, 3, 1, "", "load_state_dict"], [1788, 3, 1, "", "register_load_state_dict_post_hook"], [1788, 3, 1, "", "register_load_state_dict_pre_hook"], [1788, 3, 1, "", "register_state_dict_post_hook"], [1788, 3, 1, "", "register_state_dict_pre_hook"], [1788, 3, 1, "", "register_step_post_hook"], [1788, 3, 1, "", "register_step_pre_hook"], [1788, 3, 1, "", "state_dict"], [1788, 3, 1, "", "step"], [1788, 3, 1, "", "zero_grad"]], "torch.optim.Optimizer": [[1789, 3, 1, "", "add_param_group"], [1790, 3, 1, "", "load_state_dict"], [1791, 3, 1, "", "state_dict"], [1792, 3, 1, "", "step"], [1793, 3, 1, "", "zero_grad"]], "torch.optim.RAdam": [[1794, 3, 1, "", "add_param_group"], [1794, 3, 1, "", "load_state_dict"], [1794, 3, 1, "", "register_load_state_dict_post_hook"], [1794, 3, 1, "", "register_load_state_dict_pre_hook"], [1794, 3, 1, "", "register_state_dict_post_hook"], [1794, 3, 1, "", "register_state_dict_pre_hook"], [1794, 3, 1, "", "register_step_post_hook"], [1794, 3, 1, "", "register_step_pre_hook"], [1794, 3, 1, "", "state_dict"], [1794, 3, 1, "", "step"], [1794, 3, 1, "", "zero_grad"]], "torch.optim.RMSprop": [[1795, 3, 1, "", "add_param_group"], [1795, 3, 1, "", "load_state_dict"], [1795, 3, 1, "", "register_load_state_dict_post_hook"], [1795, 3, 1, "", "register_load_state_dict_pre_hook"], [1795, 3, 1, "", "register_state_dict_post_hook"], [1795, 3, 1, "", "register_state_dict_pre_hook"], [1795, 3, 1, "", "register_step_post_hook"], [1795, 3, 1, "", "register_step_pre_hook"], [1795, 3, 1, "", "state_dict"], [1795, 3, 1, "", "step"], [1795, 3, 1, "", "zero_grad"]], "torch.optim.Rprop": [[1796, 3, 1, "", "add_param_group"], [1796, 3, 1, "", "load_state_dict"], [1796, 3, 1, "", "register_load_state_dict_post_hook"], [1796, 3, 1, "", "register_load_state_dict_pre_hook"], [1796, 3, 1, "", "register_state_dict_post_hook"], [1796, 3, 1, "", "register_state_dict_pre_hook"], [1796, 3, 1, "", "register_step_post_hook"], [1796, 3, 1, "", "register_step_pre_hook"], [1796, 3, 1, "", "state_dict"], [1796, 3, 1, "", "step"], [1796, 3, 1, "", "zero_grad"]], "torch.optim.SGD": [[1797, 3, 1, "", "add_param_group"], [1797, 3, 1, "", "load_state_dict"], [1797, 3, 1, "", "register_load_state_dict_post_hook"], [1797, 3, 1, "", "register_load_state_dict_pre_hook"], [1797, 3, 1, "", "register_state_dict_post_hook"], [1797, 3, 1, "", "register_state_dict_pre_hook"], [1797, 3, 1, "", "register_step_post_hook"], [1797, 3, 1, "", "register_step_pre_hook"], [1797, 3, 1, "", "state_dict"], [1797, 3, 1, "", "step"], [1797, 3, 1, "", "zero_grad"]], "torch.optim.SparseAdam": [[1798, 3, 1, "", "add_param_group"], [1798, 3, 1, "", "load_state_dict"], [1798, 3, 1, "", "register_load_state_dict_post_hook"], [1798, 3, 1, "", "register_load_state_dict_pre_hook"], [1798, 3, 1, "", "register_state_dict_post_hook"], [1798, 3, 1, "", "register_state_dict_pre_hook"], [1798, 3, 1, "", "register_step_post_hook"], [1798, 3, 1, "", "register_step_pre_hook"], [1798, 3, 1, "", "state_dict"], [1798, 3, 1, "", "step"], [1798, 3, 1, "", "zero_grad"]], "torch.optim.lr_scheduler": [[1799, 1, 1, "", "ChainedScheduler"], [1800, 1, 1, "", "ConstantLR"], [1801, 1, 1, "", "CosineAnnealingLR"], [1802, 1, 1, "", "CosineAnnealingWarmRestarts"], [1803, 1, 1, "", "CyclicLR"], [1804, 1, 1, "", "ExponentialLR"], [1805, 1, 1, "", "LambdaLR"], [1806, 1, 1, "", "LinearLR"], [1807, 1, 1, "", "MultiStepLR"], [1808, 1, 1, "", "MultiplicativeLR"], [1809, 1, 1, "", "OneCycleLR"], [1810, 1, 1, "", "PolynomialLR"], [1811, 1, 1, "", "ReduceLROnPlateau"], [1812, 1, 1, "", "SequentialLR"], [1813, 1, 1, "", "StepLR"]], "torch.optim.lr_scheduler.ChainedScheduler": [[1799, 3, 1, "", "get_last_lr"], [1799, 3, 1, "", "load_state_dict"], [1799, 3, 1, "", "print_lr"], [1799, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.ConstantLR": [[1800, 3, 1, "", "get_last_lr"], [1800, 3, 1, "", "load_state_dict"], [1800, 3, 1, "", "print_lr"], [1800, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.CosineAnnealingLR": [[1801, 3, 1, "", "get_last_lr"], [1801, 3, 1, "", "load_state_dict"], [1801, 3, 1, "", "print_lr"], [1801, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts": [[1802, 3, 1, "", "get_last_lr"], [1802, 3, 1, "", "load_state_dict"], [1802, 3, 1, "", "print_lr"], [1802, 3, 1, "", "state_dict"], [1802, 3, 1, "", "step"]], "torch.optim.lr_scheduler.CyclicLR": [[1803, 3, 1, "", "get_last_lr"], [1803, 3, 1, "", "get_lr"], [1803, 3, 1, "", "print_lr"]], "torch.optim.lr_scheduler.ExponentialLR": [[1804, 3, 1, "", "get_last_lr"], [1804, 3, 1, "", "load_state_dict"], [1804, 3, 1, "", "print_lr"], [1804, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.LambdaLR": [[1805, 3, 1, "", "get_last_lr"], [1805, 3, 1, "", "load_state_dict"], [1805, 3, 1, "", "print_lr"], [1805, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.LinearLR": [[1806, 3, 1, "", "get_last_lr"], [1806, 3, 1, "", "load_state_dict"], [1806, 3, 1, "", "print_lr"], [1806, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.MultiStepLR": [[1807, 3, 1, "", "get_last_lr"], [1807, 3, 1, "", "load_state_dict"], [1807, 3, 1, "", "print_lr"], [1807, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.MultiplicativeLR": [[1808, 3, 1, "", "get_last_lr"], [1808, 3, 1, "", "load_state_dict"], [1808, 3, 1, "", "print_lr"], [1808, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.OneCycleLR": [[1809, 3, 1, "", "get_last_lr"], [1809, 3, 1, "", "load_state_dict"], [1809, 3, 1, "", "print_lr"], [1809, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.PolynomialLR": [[1810, 3, 1, "", "get_last_lr"], [1810, 3, 1, "", "load_state_dict"], [1810, 3, 1, "", "print_lr"], [1810, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.ReduceLROnPlateau": [[1811, 3, 1, "", "get_last_lr"], [1811, 3, 1, "", "print_lr"]], "torch.optim.lr_scheduler.SequentialLR": [[1812, 3, 1, "", "get_last_lr"], [1812, 3, 1, "", "load_state_dict"], [1812, 3, 1, "", "print_lr"], [1812, 3, 1, "", "state_dict"]], "torch.optim.lr_scheduler.StepLR": [[1813, 3, 1, "", "get_last_lr"], [1813, 3, 1, "", "load_state_dict"], [1813, 3, 1, "", "print_lr"], [1813, 3, 1, "", "state_dict"]], "torch.overrides": [[2114, 5, 1, "", "get_ignored_functions"], [2114, 5, 1, "", "get_overridable_functions"], [2114, 5, 1, "", "get_testing_overrides"], [2114, 5, 1, "", "handle_torch_function"], [2114, 5, 1, "", "has_torch_function"], [2114, 5, 1, "", "is_tensor_like"], [2114, 5, 1, "", "is_tensor_method_or_property"], [2114, 5, 1, "", "resolve_name"], [2114, 5, 1, "", "wrap_torch_function"]], "torch.package": [[2070, 1, 1, "", "Directory"], [2070, 1, 1, "", "EmptyMatchError"], [2070, 1, 1, "", "PackageExporter"], [2070, 1, 1, "", "PackageImporter"], [2070, 1, 1, "", "PackagingError"], [2070, 0, 0, "-", "analyze"], [2070, 0, 0, "-", "file_structure_representation"], [2070, 0, 0, "-", "find_file_dependencies"], [2070, 0, 0, "-", "glob_group"], [2070, 0, 0, "-", "importer"], [2070, 0, 0, "-", "package_exporter"], [2070, 0, 0, "-", "package_importer"]], "torch.package.Directory": [[2070, 3, 1, "", "has_file"]], "torch.package.PackageExporter": [[2070, 3, 1, "", "__init__"], [2070, 3, 1, "", "add_dependency"], [2070, 3, 1, "", "all_paths"], [2070, 3, 1, "", "close"], [2070, 3, 1, "", "denied_modules"], [2070, 3, 1, "", "deny"], [2070, 3, 1, "", "dependency_graph_string"], [2070, 3, 1, "", "extern"], [2070, 3, 1, "", "externed_modules"], [2070, 3, 1, "", "get_rdeps"], [2070, 3, 1, "", "get_unique_id"], [2070, 3, 1, "", "intern"], [2070, 3, 1, "", "interned_modules"], [2070, 3, 1, "", "mock"], [2070, 3, 1, "", "mocked_modules"], [2070, 3, 1, "", "register_extern_hook"], [2070, 3, 1, "", "register_intern_hook"], [2070, 3, 1, "", "register_mock_hook"], [2070, 3, 1, "", "save_binary"], [2070, 3, 1, "", "save_module"], [2070, 3, 1, "", "save_pickle"], [2070, 3, 1, "", "save_source_file"], [2070, 3, 1, "", "save_source_string"], [2070, 3, 1, "", "save_text"]], "torch.package.PackageImporter": [[2070, 3, 1, "", "__init__"], [2070, 3, 1, "", "file_structure"], [2070, 3, 1, "", "id"], [2070, 3, 1, "", "import_module"], [2070, 3, 1, "", "load_binary"], [2070, 3, 1, "", "load_pickle"], [2070, 3, 1, "", "load_text"], [2070, 3, 1, "", "python_version"]], "torch.package.analyze": [[2070, 0, 0, "-", "find_first_use_of_broken_modules"], [2070, 0, 0, "-", "is_from_package"], [2070, 0, 0, "-", "trace_dependencies"]], "torch.profiler": [[2071, 1, 1, "", "ProfilerAction"], [2071, 1, 1, "", "ProfilerActivity"], [2071, 1, 1, "", "_KinetoProfile"], [2071, 0, 0, "-", "itt"], [2071, 1, 1, "", "profile"], [2071, 0, 0, "-", "profiler"], [2071, 0, 0, "-", "python_tracer"], [2071, 5, 1, "", "schedule"], [2071, 5, 1, "", "tensorboard_trace_handler"]], "torch.profiler.ProfilerActivity": [[2071, 4, 1, "", "name"]], "torch.profiler._KinetoProfile": [[2071, 3, 1, "", "add_metadata"], [2071, 3, 1, "", "add_metadata_json"], [2071, 3, 1, "", "events"], [2071, 3, 1, "", "export_chrome_trace"], [2071, 3, 1, "", "export_memory_timeline"], [2071, 3, 1, "", "export_stacks"], [2071, 3, 1, "", "key_averages"], [2071, 3, 1, "", "preset_metadata_json"]], "torch.profiler.itt": [[2071, 5, 1, "", "is_available"], [2071, 5, 1, "", "mark"], [2071, 5, 1, "", "range_pop"], [2071, 5, 1, "", "range_push"]], "torch.profiler.profile": [[2071, 3, 1, "", "step"]], "torch.quantization": [[2072, 0, 0, "-", "fake_quantize"], [2072, 0, 0, "-", "fuse_modules"], [2072, 0, 0, "-", "fuser_method_mappings"], [2075, 0, 0, "-", "fx"], [2072, 0, 0, "-", "observer"], [2072, 0, 0, "-", "qconfig"], [2072, 0, 0, "-", "quant_type"], [2072, 0, 0, "-", "quantization_mappings"], [2072, 0, 0, "-", "quantize"], [2072, 0, 0, "-", "quantize_fx"], [2072, 0, 0, "-", "quantize_jit"], [2072, 0, 0, "-", "stubs"], [2072, 0, 0, "-", "utils"]], "torch.quantization.fx": [[2072, 0, 0, "-", "convert"], [2072, 0, 0, "-", "fuse"], [2072, 0, 0, "-", "fusion_patterns"], [2072, 0, 0, "-", "graph_module"], [2072, 0, 0, "-", "match_utils"], [2072, 0, 0, "-", "pattern_utils"], [2072, 0, 0, "-", "prepare"], [2072, 0, 0, "-", "quantization_patterns"], [2072, 0, 0, "-", "quantization_types"], [2072, 0, 0, "-", "utils"]], "torch.quasirandom": [[1834, 1, 1, "", "SobolEngine"]], "torch.quasirandom.SobolEngine": [[1834, 3, 1, "", "draw"], [1834, 3, 1, "", "draw_base2"], [1834, 3, 1, "", "fast_forward"], [1834, 3, 1, "", "reset"]], "torch.random": [[2076, 5, 1, "", "fork_rng"], [2076, 5, 1, "", "get_rng_state"], [2076, 5, 1, "", "initial_seed"], [2076, 5, 1, "", "manual_seed"], [2076, 5, 1, "", "seed"], [2076, 5, 1, "", "set_rng_state"]], "torch.serialization": [[2062, 5, 1, "", "add_safe_globals"], [2062, 5, 1, "", "clear_safe_globals"], [2062, 5, 1, "", "get_default_load_endianness"], [2062, 5, 1, "", "get_default_mmap_options"], [2062, 5, 1, "", "get_safe_globals"], [2062, 5, 1, "", "register_package"], [2062, 5, 1, "", "set_default_load_endianness"], [2062, 5, 1, "", "set_default_mmap_options"]], "torch.signal": [[2080, 0, 0, "-", "windows"]], "torch.signal.windows": [[1881, 5, 1, "", "bartlett"], [1882, 5, 1, "", "blackman"], [1883, 5, 1, "", "cosine"], [1884, 5, 1, "", "exponential"], [1885, 5, 1, "", "gaussian"], [1886, 5, 1, "", "general_cosine"], [1887, 5, 1, "", "general_hamming"], [1888, 5, 1, "", "hamming"], [1889, 5, 1, "", "hann"], [1890, 5, 1, "", "kaiser"], [1891, 5, 1, "", "nuttall"], [2091, 0, 0, "-", "windows"]], "torch.sparse": [[1901, 5, 1, "", "addmm"], [1902, 5, 1, "", "as_sparse_gradcheck"], [1903, 1, 1, "", "check_sparse_tensor_invariants"], [1904, 5, 1, "", "log_softmax"], [1905, 5, 1, "", "mm"], [1906, 5, 1, "", "sampled_addmm"], [2091, 0, 0, "-", "semi_structured"], [1907, 5, 1, "", "softmax"], [1908, 5, 1, "", "spdiags"], [1909, 5, 1, "", "sum"]], "torch.sparse.check_sparse_tensor_invariants": [[1903, 3, 1, "", "disable"], [1903, 3, 1, "", "enable"], [1903, 3, 1, "", "is_enabled"]], "torch.special": [[2083, 5, 1, "", "airy_ai"], [2083, 5, 1, "", "bessel_j0"], [2083, 5, 1, "", "bessel_j1"], [2083, 5, 1, "", "digamma"], [2083, 5, 1, "", "entr"], [2083, 5, 1, "", "erf"], [2083, 5, 1, "", "erfc"], [2083, 5, 1, "", "erfcx"], [2083, 5, 1, "", "erfinv"], [2083, 5, 1, "", "exp2"], [2083, 5, 1, "", "expit"], [2083, 5, 1, "", "expm1"], [2083, 5, 1, "", "gammainc"], [2083, 5, 1, "", "gammaincc"], [2083, 5, 1, "", "gammaln"], [2083, 5, 1, "", "i0"], [2083, 5, 1, "", "i0e"], [2083, 5, 1, "", "i1"], [2083, 5, 1, "", "i1e"], [2083, 5, 1, "", "log1p"], [2083, 5, 1, "", "log_ndtr"], [2083, 5, 1, "", "log_softmax"], [2083, 5, 1, "", "logit"], [2083, 5, 1, "", "logsumexp"], [2083, 5, 1, "", "multigammaln"], [2083, 5, 1, "", "ndtr"], [2083, 5, 1, "", "ndtri"], [2083, 5, 1, "", "polygamma"], [2083, 5, 1, "", "psi"], [2083, 5, 1, "", "round"], [2083, 5, 1, "", "scaled_modified_bessel_k0"], [2083, 5, 1, "", "scaled_modified_bessel_k1"], [2083, 5, 1, "", "sinc"], [2083, 5, 1, "", "softmax"], [2083, 5, 1, "", "spherical_bessel_j0"], [2083, 5, 1, "", "xlog1py"], [2083, 5, 1, "", "xlogy"], [2083, 5, 1, "", "zeta"]], "torch.testing": [[2089, 5, 1, "", "assert_allclose"], [2089, 5, 1, "", "assert_close"], [2089, 5, 1, "", "make_tensor"]], "torch.torch": [[2091, 2, 1, "", "default_generator"], [2118, 1, 1, "", "finfo"], [2118, 1, 1, "", "iinfo"]], "torch.utils": [[2091, 0, 0, "-", "backcompat"], [2119, 0, 0, "-", "backend_registration"], [3, 0, 0, "-", "benchmark"], [4, 0, 0, "-", "bottleneck"], [2119, 0, 0, "-", "bundled_inputs"], [2119, 0, 0, "-", "checkpoint"], [2119, 0, 0, "-", "collect_env"], [2119, 0, 0, "-", "cpp_backtrace"], [2119, 0, 0, "-", "cpp_extension"], [23, 0, 0, "-", "data"], [27, 0, 0, "-", "deterministic"], [2119, 0, 0, "-", "dlpack"], [2119, 0, 0, "-", "file_baton"], [2119, 0, 0, "-", "flop_counter"], [1966, 5, 1, "", "generate_methods_for_privateuse1_backend"], [1967, 5, 1, "", "get_cpp_backtrace"], [2091, 0, 0, "-", "hipify"], [2119, 0, 0, "-", "hooks"], [2020, 0, 0, "-", "jit"], [2119, 0, 0, "-", "mkldnn"], [2119, 0, 0, "-", "mobile_optimizer"], [2091, 0, 0, "-", "model_dump"], [2028, 0, 0, "-", "model_zoo"], [2029, 0, 0, "-", "module_tracker"], [1968, 5, 1, "", "rename_privateuse1_backend"], [1969, 5, 1, "", "set_module"], [2119, 0, 0, "-", "show_pickle"], [1970, 5, 1, "", "swap_tensors"], [2087, 0, 0, "-", "tensorboard"], [2119, 0, 0, "-", "throughput_benchmark"], [2091, 0, 0, "-", "viz"], [2119, 0, 0, "-", "weak"]], "torch.utils.benchmark": [[3, 1, 1, "", "CallgrindStats"], [3, 1, 1, "", "Compare"], [3, 1, 1, "", "FunctionCounts"], [3, 1, 1, "", "Measurement"], [3, 1, 1, "", "Timer"], [3, 0, 0, "-", "examples"], [3, 0, 0, "-", "op_fuzzers"], [3, 0, 0, "-", "utils"]], "torch.utils.benchmark.CallgrindStats": [[3, 3, 1, "", "as_standardized"], [3, 3, 1, "", "counts"], [3, 3, 1, "", "delta"], [3, 3, 1, "", "stats"]], "torch.utils.benchmark.Compare": [[3, 3, 1, "", "colorize"], [3, 3, 1, "", "extend_results"], [3, 3, 1, "", "highlight_warnings"], [3, 3, 1, "", "print"], [3, 3, 1, "", "trim_significant_figures"]], "torch.utils.benchmark.FunctionCounts": [[3, 3, 1, "", "denoise"], [3, 3, 1, "", "filter"], [3, 3, 1, "", "transform"]], "torch.utils.benchmark.Measurement": [[3, 3, 1, "", "merge"], [3, 4, 1, "", "significant_figures"]], "torch.utils.benchmark.Timer": [[3, 3, 1, "", "adaptive_autorange"], [3, 3, 1, "", "blocked_autorange"], [3, 3, 1, "", "collect_callgrind"], [3, 3, 1, "", "timeit"]], "torch.utils.benchmark.examples": [[2119, 0, 0, "-", "blas_compare_setup"], [2119, 0, 0, "-", "compare"], [2119, 0, 0, "-", "fuzzer"], [2119, 0, 0, "-", "op_benchmark"], [2119, 0, 0, "-", "simple_timeit"], [2119, 0, 0, "-", "spectral_ops_fuzz_test"]], "torch.utils.benchmark.op_fuzzers": [[2119, 0, 0, "-", "binary"], [2119, 0, 0, "-", "sparse_binary"], [2119, 0, 0, "-", "sparse_unary"], [2119, 0, 0, "-", "spectral"], [2119, 0, 0, "-", "unary"]], "torch.utils.benchmark.utils": [[2119, 0, 0, "-", "common"], [2119, 0, 0, "-", "compare"], [2119, 0, 0, "-", "compile"], [2119, 0, 0, "-", "cpp_jit"], [2119, 0, 0, "-", "fuzzer"], [2119, 0, 0, "-", "sparse_fuzzer"], [2119, 0, 0, "-", "timer"], [3, 0, 0, "-", "valgrind_wrapper"]], "torch.utils.benchmark.utils.valgrind_wrapper": [[2119, 0, 0, "-", "timer_interface"]], "torch.utils.checkpoint": [[5, 5, 1, "", "checkpoint"], [5, 5, 1, "", "checkpoint_sequential"], [5, 5, 1, "", "set_checkpoint_debug_enabled"]], "torch.utils.cpp_extension": [[14, 5, 1, "", "BuildExtension"], [14, 5, 1, "", "CUDAExtension"], [14, 5, 1, "", "CppExtension"], [14, 5, 1, "", "get_compiler_abi_compatibility_and_version"], [14, 5, 1, "", "include_paths"], [14, 5, 1, "", "is_ninja_available"], [14, 5, 1, "", "load"], [14, 5, 1, "", "load_inline"], [14, 5, 1, "", "verify_ninja_availability"]], "torch.utils.data": [[23, 1, 1, "", "BatchSampler"], [23, 1, 1, "", "ChainDataset"], [23, 1, 1, "", "ConcatDataset"], [23, 1, 1, "", "DataLoader"], [23, 1, 1, "", "Dataset"], [23, 1, 1, "", "IterableDataset"], [23, 1, 1, "", "RandomSampler"], [23, 1, 1, "", "Sampler"], [23, 1, 1, "", "SequentialSampler"], [23, 1, 1, "", "StackDataset"], [23, 1, 1, "", "Subset"], [23, 1, 1, "", "SubsetRandomSampler"], [23, 1, 1, "", "TensorDataset"], [23, 1, 1, "", "WeightedRandomSampler"], [2119, 0, 0, "-", "backward_compatibility"], [2119, 0, 0, "-", "dataloader"], [23, 0, 0, "-", "datapipes"], [2119, 0, 0, "-", "dataset"], [23, 5, 1, "", "default_collate"], [23, 5, 1, "", "default_convert"], [2119, 0, 0, "-", "distributed"], [23, 5, 1, "", "get_worker_info"], [2119, 0, 0, "-", "graph"], [2119, 0, 0, "-", "graph_settings"], [23, 5, 1, "", "random_split"], [2119, 0, 0, "-", "sampler"]], "torch.utils.data._utils.collate": [[23, 5, 1, "", "collate"]], "torch.utils.data.datapipes": [[23, 0, 0, "-", "dataframe"], [2119, 0, 0, "-", "datapipe"], [2119, 0, 0, "-", "gen_pyi"], [23, 0, 0, "-", "iter"], [23, 0, 0, "-", "map"], [23, 0, 0, "-", "utils"]], "torch.utils.data.datapipes.dataframe": [[2119, 0, 0, "-", "dataframe_wrapper"], [2119, 0, 0, "-", "dataframes"], [2119, 0, 0, "-", "datapipes"], [2119, 0, 0, "-", "structures"]], "torch.utils.data.datapipes.iter": [[2119, 0, 0, "-", "callable"], [2119, 0, 0, "-", "combinatorics"], [2119, 0, 0, "-", "combining"], [2119, 0, 0, "-", "filelister"], [2119, 0, 0, "-", "fileopener"], [2119, 0, 0, "-", "grouping"], [2119, 0, 0, "-", "routeddecoder"], [2119, 0, 0, "-", "selecting"], [2119, 0, 0, "-", "sharding"], [2119, 0, 0, "-", "streamreader"], [2119, 0, 0, "-", "utils"]], "torch.utils.data.datapipes.map": [[2119, 0, 0, "-", "callable"], [2119, 0, 0, "-", "combinatorics"], [2119, 0, 0, "-", "combining"], [2119, 0, 0, "-", "grouping"], [2119, 0, 0, "-", "utils"]], "torch.utils.data.datapipes.utils": [[2119, 0, 0, "-", "common"], [2119, 0, 0, "-", "decoder"], [2119, 0, 0, "-", "snapshot"]], "torch.utils.data.distributed": [[23, 1, 1, "", "DistributedSampler"]], "torch.utils.deterministic": [[27, 2, 1, "", "fill_uninitialized_memory"]], "torch.utils.dlpack": [[36, 5, 1, "", "from_dlpack"], [36, 5, 1, "", "to_dlpack"]], "torch.utils.hipify": [[2119, 0, 0, "-", "constants"], [2119, 0, 0, "-", "cuda_to_hip_mappings"], [2119, 0, 0, "-", "hipify_python"], [2119, 0, 0, "-", "version"]], "torch.utils.jit": [[2119, 0, 0, "-", "log_extract"]], "torch.utils.mobile_optimizer": [[2027, 5, 1, "", "optimize_for_mobile"]], "torch.utils.model_zoo": [[2028, 5, 1, "", "load_url"]], "torch.utils.module_tracker": [[2029, 1, 1, "", "ModuleTracker"]], "torch.utils.tensorboard": [[2119, 0, 0, "-", "summary"], [2119, 0, 0, "-", "writer"]], "torch.utils.tensorboard.writer": [[2087, 1, 1, "", "SummaryWriter"]], "torch.utils.tensorboard.writer.SummaryWriter": [[2087, 3, 1, "", "__init__"], [2087, 3, 1, "", "add_audio"], [2087, 3, 1, "", "add_custom_scalars"], [2087, 3, 1, "", "add_embedding"], [2087, 3, 1, "", "add_figure"], [2087, 3, 1, "", "add_graph"], [2087, 3, 1, "", "add_histogram"], [2087, 3, 1, "", "add_hparams"], [2087, 3, 1, "", "add_image"], [2087, 3, 1, "", "add_images"], [2087, 3, 1, "", "add_mesh"], [2087, 3, 1, "", "add_pr_curve"], [2087, 3, 1, "", "add_scalar"], [2087, 3, 1, "", "add_scalars"], [2087, 3, 1, "", "add_text"], [2087, 3, 1, "", "add_video"], [2087, 3, 1, "", "close"], [2087, 3, 1, "", "flush"]], "torch.xpu": [[1982, 1, 1, "", "Event"], [1983, 1, 1, "", "Stream"], [1984, 1, 1, "", "StreamContext"], [1985, 5, 1, "", "current_device"], [1986, 5, 1, "", "current_stream"], [1987, 1, 1, "", "device"], [1988, 5, 1, "", "device_count"], [1989, 1, 1, "", "device_of"], [1990, 5, 1, "", "empty_cache"], [1991, 5, 1, "", "get_device_capability"], [1992, 5, 1, "", "get_device_name"], [1993, 5, 1, "", "get_device_properties"], [1994, 5, 1, "", "get_rng_state"], [1995, 5, 1, "", "get_rng_state_all"], [1996, 5, 1, "", "init"], [1997, 5, 1, "", "initial_seed"], [1998, 5, 1, "", "is_available"], [1999, 5, 1, "", "is_initialized"], [2000, 5, 1, "", "manual_seed"], [2001, 5, 1, "", "manual_seed_all"], [2120, 0, 0, "-", "random"], [2002, 5, 1, "", "seed"], [2003, 5, 1, "", "seed_all"], [2004, 5, 1, "", "set_device"], [2005, 5, 1, "", "set_rng_state"], [2006, 5, 1, "", "set_rng_state_all"], [2007, 5, 1, "", "set_stream"], [2008, 5, 1, "", "stream"], [2120, 0, 0, "-", "streams"], [2009, 5, 1, "", "synchronize"]], "torch.xpu.Event": [[1982, 3, 1, "", "elapsed_time"], [1982, 3, 1, "", "query"], [1982, 3, 1, "", "record"], [1982, 3, 1, "", "synchronize"], [1982, 3, 1, "", "wait"]], "torch.xpu.Stream": [[1983, 3, 1, "", "query"], [1983, 3, 1, "", "record_event"], [1983, 3, 1, "", "synchronize"], [1983, 3, 1, "", "wait_event"], [1983, 3, 1, "", "wait_stream"]]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute", "3": "py:method", "4": "py:property", "5": "py:function", "6": "py:exception", "7": "std:envvar"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "method", "Python method"], "4": ["py", "property", "Python property"], "5": ["py", "function", "Python function"], "6": ["py", "exception", "Python exception"], "7": ["std", "envvar", "environment variable"]}, "titleterms": {"automat": [0, 1, 23, 33, 2014, 2042, 2050], "mix": [0, 2014, 2042], "precis": [0, 2042, 2046, 2060], "packag": [0, 1, 15, 28, 2033, 2063, 2070], "torch": [0, 1, 2, 3, 4, 5, 10, 12, 13, 14, 16, 17, 23, 26, 27, 28, 30, 31, 33, 34, 35, 36, 48, 52, 53, 54, 56, 57, 59, 60, 61, 62, 63, 64, 65, 74, 75, 76, 77, 78, 79, 80, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 869, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 900, 901, 902, 903, 905, 906, 907, 908, 911, 912, 913, 914, 915, 916, 917, 918, 922, 923, 924, 925, 926, 927, 928, 929, 930, 933, 934, 935, 936, 937, 938, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1013, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1031, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110, 1111, 1112, 1114, 1115, 1116, 1117, 1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269, 1270, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1287, 1289, 1290, 1291, 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376, 1377, 1378, 1379, 1380, 1381, 1382, 1383, 1384, 1385, 1387, 1388, 1389, 1390, 1391, 1392, 1393, 1394, 1395, 1396, 1397, 1401, 1402, 1403, 1405, 1406, 1407, 1408, 1409, 1410, 1411, 1412, 1413, 1414, 1415, 1416, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1427, 1587, 1589, 1590, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1735, 1736, 1737, 1738, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1771, 1772, 1773, 1774, 1775, 1776, 1777, 1789, 1790, 1791, 1792, 1793, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, 1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, 1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1857, 1858, 1859, 1860, 1861, 1862, 1863, 1864, 1865, 1866, 1867, 1868, 1869, 1870, 1871, 1872, 1873, 1874, 1875, 1876, 1877, 1878, 1879, 1880, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1900, 1901, 1902, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, 1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1931, 1932, 1933, 1934, 1935, 1936, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1956, 1957, 1958, 1959, 1960, 1961, 1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2017, 2019, 2020, 2021, 2022, 2023, 2024, 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2036, 2037, 2038, 2039, 2040, 2041, 2046, 2049, 2050, 2053, 2055, 2060, 2062, 2064, 2067, 2069, 2070, 2071, 2075, 2076, 2080, 2081, 2082, 2083, 2084, 2085, 2087, 2088, 2089, 2091, 2092, 2093, 2094, 2095, 2096, 2104, 2105, 2111, 2114, 2116, 2118, 2119, 2120], "amp": [0, 2046, 2053], "autocast": [0, 2042], "gradient": [0, 1, 61, 1227, 2042, 2043, 2050, 2091, 2104], "scale": [0, 2042, 2056], "op": [0, 12, 86, 2019, 2021, 2042, 2067, 2091], "refer": [0, 12, 18, 19, 33, 52, 53, 57, 64, 2014, 2016, 2017, 2018, 2030, 2035, 2055, 2065, 2070, 2071, 2072, 2075, 2079, 2088, 2096, 2115], "elig": 0, "cuda": [0, 2, 10, 17, 18, 20, 211, 1013, 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1031, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 2033, 2046, 2051, 2055, 2059, 2061, 2063, 2098, 2104, 2111, 2115], "specif": [0, 23, 53, 2082], "behavior": [0, 19, 23, 2043, 2054], "can": [0, 2043, 2051, 2104, 2110], "float16": 0, "float32": 0, "promot": [0, 7, 88], "widest": 0, "input": [0, 19, 52, 2034, 2042, 2054], "type": [0, 23, 53, 88, 605, 2016, 2017, 2049, 2067, 2070, 2088, 2118], "prefer": 0, "binary_cross_entropy_with_logit": [0, 1606], "over": [0, 8, 2016], "binary_cross_entropi": [0, 1605], "xpu": [0, 625, 1985, 1986, 1988, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2120], "experiment": [0, 65, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 2022], "cpu": [0, 2, 10, 16, 208, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 2043, 2045, 2059, 2072, 2097], "bfloat16": [0, 157], "differenti": [1, 2043], "autograd": [1, 10, 11, 15, 60, 893, 894, 895, 896, 897, 900, 901, 902, 903, 905, 906, 907, 908, 911, 912, 913, 914, 915, 916, 917, 918, 922, 923, 924, 925, 926, 927, 928, 929, 930, 933, 934, 935, 936, 937, 938, 2035, 2042, 2043, 2049, 2050, 2067, 2077, 2078], "forward": [1, 894, 2049, 2078], "mode": [1, 48, 58, 416, 1379, 2043, 2049, 2054, 2067, 2072, 2078], "function": [1, 28, 35, 54, 56, 57, 59, 61, 64, 84, 87, 893, 894, 895, 896, 905, 906, 907, 908, 911, 912, 913, 914, 915, 916, 917, 1167, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 2014, 2015, 2016, 2017, 2019, 2022, 2024, 2034, 2036, 2037, 2040, 2042, 2043, 2050, 2054, 2062, 2067, 2075, 2082, 2083, 2104, 2114], "higher": 1, "level": [1, 10, 86, 2021, 2075], "api": [1, 10, 12, 15, 18, 19, 31, 33, 41, 52, 57, 60, 64, 2013, 2014, 2017, 2021, 2030, 2035, 2045, 2046, 2049, 2055, 2056, 2065, 2067, 2070, 2071, 2072, 2075, 2096, 2100, 2101, 2103, 2104, 2105, 2115], "local": [1, 2017, 2043, 2091, 2110, 2112], "disabl": [1, 23, 982, 2014, 2043, 2091, 2104, 2105], "comput": [1, 61, 2043, 2060, 2078, 2091, 2104], "default": [1, 23, 24, 2016, 2043, 2046, 2054, 2074, 2101], "layout": [1, 2085], "manual": [1, 33], "In": [1, 8, 2043, 2044, 2091], "place": [1, 60, 2034, 2043, 2044, 2067, 2091], "oper": [1, 10, 19, 24, 28, 52, 60, 80, 89, 2016, 2017, 2022, 2024, 2034, 2035, 2036, 2043, 2047, 2049, 2056, 2063, 2067, 2068, 2072, 2082, 2088, 2091, 2103, 2111], "tensor": [1, 11, 15, 34, 52, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 1943, 2015, 2019, 2022, 2025, 2033, 2034, 2035, 2036, 2043, 2049, 2062, 2067, 2072, 2075, 2082, 2085, 2086, 2088, 2091, 2103], "correct": [1, 64, 2043], "check": [1, 37, 64, 2014, 2043, 2053, 2055, 2110], "variabl": [1, 20, 25, 28, 48, 2014, 2016, 2017, 2026, 2090, 2107, 2116, 2117], "deprec": 1, "context": [1, 19, 29, 45, 70, 2078], "method": [1, 40, 41, 44, 50, 2015, 2016, 2019, 2067, 2075, 2082], "mixin": 1, "custom": [1, 39, 50, 64, 2017, 2021, 2042, 2046, 2047, 2050, 2057, 2067, 2069, 2070, 2072, 2099], "util": [1, 3, 4, 5, 10, 14, 23, 27, 28, 33, 36, 57, 59, 1087, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1735, 1736, 1737, 1738, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 1966, 1967, 1968, 1969, 1970, 2020, 2027, 2028, 2029, 2037, 2038, 2062, 2075, 2087, 2091, 2093, 2112, 2119], "numer": [1, 2054, 2060, 2073], "profil": [1, 28, 933, 934, 935, 936, 937, 938, 1389, 1390, 1391, 2031, 2056, 2057, 2071, 2107, 2111, 2113], "debug": [1, 24, 25, 28, 50, 64, 86, 2014, 2072, 2073, 2099, 2104, 2113], "anomali": 1, "detect": 1, "graph": [1, 17, 52, 53, 64, 81, 925, 926, 927, 928, 929, 930, 1043, 2014, 2043, 2046, 2072, 2098, 2101, 2104, 2111, 2112, 2113], "backend": [2, 28, 47, 48, 2014, 2055, 2058, 2066, 2072, 2074, 2077, 2094, 2097, 2099, 2113], "cudnn": 2, "mha": 2, "mp": [2, 10, 1382, 1383, 1384, 1385, 1387, 1388, 1389, 1390, 1391, 1392, 1393, 1394, 1395, 2031, 2058], "mkl": 2, "mkldnn": [2, 10], "nnpack": 2, "openmp": 2, "opt_einsum": 2, "xeon": 2, "benchmark": [3, 2061, 2107], "bottleneck": 4, "checkpoint": [5, 24, 30], "pytorch": [6, 7, 8, 9, 10, 15, 26, 28, 52, 60, 2013, 2014, 2015, 2019, 2043, 2046, 2047, 2049, 2053, 2061, 2062, 2067, 2072, 2075, 2094, 2098, 2109, 2110, 2113, 2117], "govern": [6, 9, 10], "build": [6, 7, 10, 33, 2045, 2053, 2056, 2057, 2063], "ci": [6, 10], "how": [6, 24, 33, 58, 2012, 2043, 2049, 2069, 2070, 2102, 2103, 2104, 2110], "add": [6, 9, 99, 688], "new": [6, 7, 9, 2021, 2049], "maintain": [6, 9, 10], "contribut": [7, 2064], "guid": 7, "process": [7, 9, 23, 45, 2042], "get": [7, 31, 2053, 2067, 2094, 2104, 2106], "start": [7, 31, 45, 1390, 2053, 2094, 2106, 2113], "propos": 7, "featur": [7, 2056, 2057, 2104], "report": [7, 2051], "issu": [7, 2014, 2065, 2111], "implement": [7, 33, 37, 47, 50, 2012, 2048, 2054, 2067, 2073, 2075, 2079, 2101, 2103], "fix": [7, 48, 58, 263, 1147], "bug": 7, "ad": [7, 2049, 2067, 2101], "tutori": [7, 10, 2070, 2077], "improv": [7, 2057], "document": [7, 31, 2013], "particip": 7, "onlin": 7, "discuss": 7, "submit": 7, "pull": 7, "request": 7, "open": 7, "review": 7, "code": [7, 64, 2014, 2046, 2053, 2070, 2104], "readabl": 7, "test": [7, 2017, 2021, 2049, 2070, 2089, 2110], "case": [7, 2014], "make": [7, 9, 2098, 2101], "codebas": 7, "more": [7, 52, 56, 2077, 2094], "robust": 7, "triag": 7, "about": [7, 2043, 2077, 2103], "sourc": [7, 2053, 2061, 2063, 2070], "develop": [7, 52, 2013, 2064, 2094], "common": [7, 28, 64, 2056, 2072], "mistak": 7, "To": 7, "avoid": [7, 2059, 2061, 2067, 2070], "frequent": [7, 2014, 2051, 2067, 2072, 2104], "ask": [7, 2014, 2051, 2067, 2072, 2104], "question": [7, 2014, 2051, 2067, 2072, 2104], "On": [7, 24, 2016], "python": [7, 8, 28, 52, 60, 67, 68, 69, 70, 71, 72, 73, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2021, 2049, 2061, 2067, 2101], "doc": [7, 10, 2055], "c": [7, 10, 15, 2021, 2043, 2055, 2062, 2067, 2095], "overview": [7, 18, 19, 52, 64, 2064, 2065, 2070, 2071, 2082, 2099, 2102], "design": [8, 2017, 2048, 2077, 2078, 2079], "philosophi": 8, "principl": [8, 9], "1": [8, 33, 48, 58, 2014, 2050, 2101], "usabl": 8, "perform": [8, 10, 2057, 2062, 2103, 2110, 2111, 2113], "2": [8, 33, 58, 2014, 2046, 2050, 2051, 2053, 2072, 2075, 2109, 2110, 2113], "simpl": [8, 2016, 2017, 2057, 2065, 2078], "easi": 8, "3": [8, 48, 58], "first": [8, 2070], "best": [8, 2046, 2059, 2072, 2097], "class": [8, 40, 64, 2014, 2016, 2017, 2019, 2067, 2069, 2070, 2088], "languag": [8, 2013, 2014, 2016, 2017, 2018], "interoper": 8, "mechan": [9, 2040, 2043, 2054], "summari": [9, 2072], "modul": [9, 10, 57, 59, 64, 1527, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 2014, 2015, 2016, 2017, 2019, 2037, 2043, 2049, 2057, 2062, 2070, 2072, 2109], "core": [9, 10, 2108], "lead": [9, 10], "bdfl": [9, 10], "nomin": [9, 2017], "confirm": 9, "remov": [9, 1755, 2034], "The": [9, 64, 2017, 2043, 2100], "re": [9, 2070], "scope": 9, "project": 9, "decis": 9, "uncontroversi": 9, "chang": [9, 48, 58, 2053, 2104], "controversi": 9, "gener": [9, 17, 29, 60, 64, 90, 2044, 2050, 2061, 2072, 2073, 2091, 2101, 2102, 2115, 2120], "polici": [9, 2100], "faq": [9, 2063], "respons": 10, "nn": [10, 57, 59, 1587, 1589, 1590, 1591, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, 1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, 1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1721, 1722, 1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1735, 1736, 1737, 1738, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1759, 1760, 1761, 1762, 1763, 1764, 1765, 1766, 1767, 1768, 1769, 2016, 2017, 2037, 2038, 2039, 2040, 2041, 2043, 2046, 2049, 2062, 2075, 2082, 2109], "optim": [10, 11, 32, 1789, 1790, 1791, 1792, 1793, 2042, 2043, 2046, 2069, 2077, 2078, 2091], "compil": [10, 59, 976, 978, 979, 980, 981, 982, 983, 984, 985, 986, 2046, 2053, 2094, 2095, 2096, 2104, 2105, 2111, 2113], "jit": [10, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, 1283, 1284, 1285, 1286, 1287, 1289, 1290, 1291, 1292, 2014, 2017, 2020], "torchscript": [10, 15, 2014, 2015, 2016, 2017, 2019, 2045, 2056, 2064, 2067, 2068, 2070], "fx": [10, 64, 65, 81, 82, 83, 85, 88, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 2057, 2072, 2075, 2093], "torchdynamo": [10, 2048, 2064, 2065, 2066, 2098, 2105, 2113], "distribut": [10, 28, 30, 31, 32, 33, 34, 35, 48, 2037, 2040, 2048, 2055, 2057, 2077, 2078, 2104], "rng": 10, "multiprocess": [10, 45, 2033, 2046, 2059, 2063], "dataload": [10, 2061], "linear": [10, 11, 735, 736, 759, 767, 784, 1174, 1514, 1650, 2037, 2040, 2060, 2082], "algebra": [10, 11, 2060, 2082], "linalg": [10, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1318, 1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1342, 1343, 2022, 2060], "spars": [10, 1901, 1902, 1904, 1905, 1906, 1907, 1908, 1909, 2037, 2040, 2082], "nestedtensor": 10, "nest": [10, 2036], "maskedtensor": [10, 2024], "mask": [10, 2024], "fast": [10, 54, 2054, 2078], "fourier": [10, 54], "transform": [10, 35, 54, 56, 57, 59, 61, 64, 1571, 2037, 2049, 2057, 2104, 2112], "fft": [10, 54, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146], "inductor": 10, "gpu": [10, 28, 2037, 2040, 2042, 2051, 2053, 2107], "triton": [10, 2107], "nvfuser": 10, "amd": [10, 2060], "rocm": [10, 19, 2055], "hip": [10, 2055], "tool": [10, 17, 2073, 2082], "c10": 10, "dispatch": 10, "onnx": [10, 81, 82, 89, 2064, 2065, 2066, 2067, 2068], "export": [10, 52, 53, 2064, 2065, 2067, 2070, 2072, 2075, 2091, 2095, 2104], "mobil": 10, "edg": [10, 2014, 2070], "model": [10, 15, 33, 52, 73, 2012, 2042, 2051, 2056, 2065, 2069, 2070, 2072, 2075, 2095, 2100, 2106, 2107], "compress": [10, 2082], "window": [10, 1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2063, 2080], "appl": 10, "m1": 10, "powerpc": 10, "aarch64": 10, "librari": [10, 2013, 2021, 2061], "xla": 10, "torchserv": 10, "torchvis": [10, 58], "torchtext": 10, "torchaudio": 10, "torchrec": 10, "torchx": 10, "torchdata": 10, "torcharrow": 10, "complex": [11, 987, 2043, 2054], "number": [11, 17, 48, 2043, 2045, 2051, 2061, 2091, 2120], "creat": [11, 2014, 2021, 2035], "transit": [11, 48], "from": [11, 48, 59, 64, 2012, 2021, 2034, 2053, 2063, 2067, 2070], "old": 11, "represent": [11, 81], "access": [11, 2016, 2070, 2113], "real": [11, 485, 1845, 2054], "imag": [11, 313, 1244], "angl": [11, 119, 701], "ab": [11, 91, 684], "serial": [11, 52, 2062, 2091], "control": [12, 38, 52, 60, 64, 71, 2061, 2091, 2105], "flow": [12, 52, 60, 64, 71, 2072, 2091], "cond": [12, 74, 990, 1305], "exampl": [12, 33, 42, 52, 64, 2042, 2048, 2049, 2050, 2053, 2065, 2067, 2078, 2099], "invari": 12, "higher_ord": 12, "__config__": 13, "cpp_extens": 14, "extend": [15, 37, 2021, 2049, 2050, 2113], "extens": [15, 17, 2056, 2063], "author": [15, 64], "stream": [16, 17, 18, 999, 1006, 1014, 1084, 1399, 1410, 1983, 2008, 2032, 2046, 2120], "event": [16, 17, 39, 41, 1011, 1386, 1398, 1982, 2031, 2032, 2120], "random": [17, 23, 60, 2051, 2061, 2076, 2091, 2120], "commun": [17, 24, 28, 2013, 2052], "collect": [17, 28], "beta": [17, 35], "memori": [17, 23, 2046, 2051, 2055, 2057, 2061, 2115], "manag": [17, 29, 70, 2033, 2046, 2055, 2070, 2112], "nvidia": [17, 2060], "nvtx": [17, 1068, 1069, 1070, 1071], "jiter": [17, 1051, 1052], "tunableop": [17, 19], "sanit": [17, 18], "prototyp": [17, 2072, 2073], "usag": [18, 31, 48, 2046, 2050, 2056, 2057, 2063, 2109, 2115], "enabl": [19, 2055], "tune": [19, 2045], "separ": [19, 2049], "file": [19, 28, 2033, 2062, 2070], "output": [19, 53, 2054, 2101], "A": [19, 64, 2057, 2065, 2101], "note": [19, 48, 81, 2013, 2017, 2046, 2052, 2072, 2077], "current": [19, 2035], "tunabl": 19, "tunablegemm": 19, "environ": [20, 25, 28, 48, 2026, 2053, 2056, 2070, 2090, 2107, 2116, 2117], "data": [23, 52, 60, 72, 2017, 2048, 2051, 2067, 2073, 2088], "dataset": 23, "map": [23, 78, 2055], "style": 23, "iter": [23, 2016], "load": [23, 1281, 1345, 2012, 2062, 2070, 2072], "order": [23, 2043], "sampler": 23, "batch": [23, 58, 2060, 2069], "non": [23, 52, 64, 2037, 2040, 2043, 2046, 2060, 2070, 2072], "work": [23, 57, 2025, 2042, 2051, 2082, 2103, 2104, 2111], "collate_fn": 23, "singl": [23, 48, 2042], "multi": [23, 28, 48, 2037, 2040], "platform": 23, "pin": [23, 2046], "ddp": 24, "hook": [24, 2043, 2057, 2109], "us": [24, 28, 33, 64, 2016, 2043, 2046, 2049, 2054, 2065, 2067, 2069, 2070, 2072, 2082, 2104, 2106, 2111, 2113, 2115], "what": [24, 33, 53, 56, 58, 61, 2024, 2043, 2070, 2102, 2104, 2110, 2111], "doe": [24, 33, 2043, 2103, 2104], "powersgd": 24, "state": [24, 2057, 2070, 2115], "acknowledg": 24, "deploi": 26, "ha": 26, "been": 26, "move": 26, "multipi": 26, "determinist": 27, "come": [28, 2043], "which": [28, 2043, 2104], "choos": 28, "network": [28, 2046, 2051, 2057], "interfac": [28, 1279, 2055], "other": [28, 2037, 2061, 2070, 2082, 2091, 2103], "nccl": [28, 2046], "basic": [28, 2017, 2035, 2050, 2077, 2088, 2111], "initi": [28, 2037, 2057, 2088], "tcp": 28, "share": [28, 2033, 2046, 2070, 2079], "system": [28, 2017, 2033, 2050], "post": [28, 2072], "shutdown": 28, "reiniti": 28, "kei": [28, 81, 2063], "valu": [28, 76, 616, 2016, 2017, 2060, 2074, 2079], "store": [28, 47], "group": 28, "devicemesh": 28, "point": [28, 2056], "synchron": [28, 1007, 1085, 1395, 1411, 2009], "asynchron": [28, 2017, 2046, 2059], "third": 28, "parti": 28, "launch": [28, 48, 2111], "spawn": [28, 2033], "applic": 28, "breakpoint": 28, "monitor": [28, 2030], "barrier": 28, "torch_distributed_debug": 28, "log": [28, 50, 371, 1347, 2056], "join": [29, 2091], "elast": [31, 37, 48], "advanc": [31, 2057], "plugin": 31, "pipelin": 33, "parallel": [33, 34, 1700, 2046, 2048, 2051, 2091], "why": [33, 56, 61, 2046, 2054, 2070, 2082, 2104], "i": [33, 53, 61, 2024, 2043, 2070, 2102, 2103, 2104, 2110], "step": [33, 1792, 2069, 2106], "pipelinestag": 33, "execut": [33, 2017, 2043, 2046, 2070, 2104], "pipelineschedul": 33, "option": [33, 58, 2016, 2045, 2063, 2069], "split": [33, 549, 1916], "hug": 33, "face": 33, "technic": 33, "deep": [33, 52, 2094, 2101], "dive": [33, 52, 2094, 2101], "your": [33, 2070], "own": [33, 2043], "schedul": [33, 2069], "microbatch": 33, "stage": 33, "probabl": 35, "score": 35, "pathwis": 35, "deriv": [35, 2043], "exponentialfamili": 35, "bernoulli": [35, 155, 946], "binomi": 35, "categor": 35, "cauchi": 35, "chi2": 35, "continuousbernoulli": 35, "dirichlet": 35, "exponenti": [35, 1884], "fishersnedecor": 35, "gamma": 35, "geometr": 35, "gumbel": 35, "halfcauchi": 35, "halfnorm": 35, "independ": 35, "inversegamma": 35, "kumaraswami": 35, "lkjcholeski": 35, "laplac": 35, "lognorm": 35, "lowrankmultivariatenorm": 35, "mixturesamefamili": 35, "multinomi": [35, 423, 1413], "multivariatenorm": 35, "negativebinomi": 35, "normal": [35, 1670, 1773, 2037, 2069], "onehotcategor": 35, "pareto": 35, "poisson": [35, 1820], "relaxedbernoulli": 35, "logitrelaxedbernoulli": 35, "relaxedonehotcategor": 35, "studentt": 35, "transformeddistribut": 35, "uniform": 35, "vonmis": 35, "weibul": 35, "wishart": 35, "kl": 35, "diverg": [35, 2019], "constraint": [35, 2046], "registri": [35, 47, 89], "dlpack": 36, "agent": 37, "server": [37, 47, 50], "concept": 37, "watchdog": 37, "health": 37, "plane": 38, "launcher": 39, "rendezv": [39, 47, 48], "handler": [39, 44, 47, 2051], "metric": [39, 44], "error": [40, 2051, 2063, 2072, 2073, 2113], "propag": [40, 2035], "object": [41, 73, 2070], "torchelast": 43, "kubernet": 43, "multipl": [45, 2042, 2046, 2049], "worker": [45, 48, 2051], "quickstart": 46, "dataclass": 47, "except": [47, 2051], "dynam": [47, 52, 60, 64, 75, 76, 2072, 2075, 2100, 2103], "c10d": 47, "etcd": 47, "legaci": 47, "torchrun": 48, "node": [48, 53, 82, 85, 88, 925, 926, 927, 928, 929, 2043], "stack": [48, 1921, 2072], "fault": 48, "toler": 48, "size": [48, 539, 2036, 2052, 2081], "failur": 48, "min": [48, 413, 1376], "max": [48, 409, 1371], "4": [48, 58, 2053], "up": [48, 2053, 2104], "membership": [48, 2017], "definit": [48, 2017], "deploy": [48, 2056], "import": [48, 2012, 2063, 2070, 2103], "notic": [48, 2012], "subprocess": [49, 2033], "handl": [49, 2104], "retriev": 49, "subprocesshandl": 49, "expir": 50, "timer": 50, "client": 50, "write": [50, 64, 2043, 2067, 2112], "info": [50, 2118], "train": [51, 2042, 2053, 2057, 2059, 2072, 2104], "script": [51, 1285, 2014, 2063, 2067], "exist": 52, "framework": [52, 2070, 2077], "an": [52, 2012, 2067, 2069, 2070], "strict": 52, "express": [52, 2016, 2017], "special": [52, 2017, 2043, 2083, 2101], "shape": [52, 60, 75, 525, 2067, 2100, 2101, 2103], "primit": [52, 2017], "contain": [52, 2037], "limit": [52, 60, 64, 2012, 2050, 2067, 2098, 2105, 2109], "break": [52, 2016, 2017, 2101, 2104, 2111, 2113], "depend": [52, 60, 2065, 2070, 2078], "miss": 52, "fake": [52, 2103], "meta": [52, 2017, 2025], "abstract": 52, "kernel": [52, 941, 2055, 2072, 2107, 2111], "read": [52, 56, 2067, 2094, 2110], "addit": [52, 81], "link": 52, "user": [52, 2070, 2079, 2094], "ir": [53, 2108, 2112], "assumpt": [53, 2079], "exportedprogram": 53, "call_funct": 53, "metadata": [53, 925, 2056], "placehold": 53, "get_attr": 53, "symint": [53, 2100], "faketensor": 53, "pytre": 53, "abl": 53, "helper": 54, "fullyshardeddataparallel": 55, "func": [56, 57, 59, 61, 1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173, 1174, 1175, 1176, 1177, 1178, 2049, 2050, 2104], "ar": [56, 61, 2012, 2043, 2070, 2101, 2104], "compos": [56, 61, 2099], "patch": [58, 2070], "norm": [58, 455, 1330, 1772], "": [58, 2070, 2104, 2110], "happen": 58, "batchnorm": 58, "paramet": [58, 1718, 2016, 2069], "functorch": [58, 59], "eval": [58, 2043], "migrat": [59, 2014], "make_funct": 59, "combine_state_for_ensembl": 59, "ux": 60, "vmap": [60, 61, 896, 1178, 1977, 2049, 2050, 2104], "mutat": [60, 79, 2091], "arbitrari": [60, 2070], "structur": [60, 72, 2017, 2082], "out": [60, 2034, 2051], "item": [60, 354], "nonzero": [60, 454, 1771], "friend": 60, "whirlwind": 61, "tour": 61, "grad": [61, 292, 918, 1168, 2043, 2046, 2104], "auto": 61, "vector": 61, "vjp": [61, 917, 1177], "jacobian": [61, 914], "product": [61, 2022, 2104], "jvp": [61, 895, 915, 1173, 2050], "jacrev": [61, 1172], "jacfwd": [61, 1171], "hessian": [61, 912, 1170], "__future__": 62, "futur": 63, "quick": 64, "primer": 64, "manipul": [64, 2035], "direct": 64, "subgraph": [64, 2112], "rewrit": [64, 2112], "With": [64, 2019], "replace_pattern": 64, "proxi": 64, "retrac": 64, "interpret": [64, 2014], "pattern": [64, 2016, 2067, 2070], "introduct": [64, 2024, 2036, 2072, 2101], "pitfal": [64, 2067], "pdb": 64, "print": [64, 2016, 2017], "to_fold": 64, "graphmodul": 64, "avail": [64, 2053], "debugg": 64, "symbol": [64, 84, 87, 2067, 2072, 2091, 2101], "trace": [64, 593, 1289, 1948, 2014, 2067, 2071, 2072, 2104, 2105, 2111], "static": [64, 2067, 2072, 2101], "tracer": [64, 2014], "leaf": 64, "miscellanea": 64, "symbolic_shap": [65, 1195, 1196, 1197, 1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213], "exportdb": 66, "tag": [66, 2091], "support": [66, 2015, 2017, 2024, 2034, 2035, 2036, 2050, 2067, 2068, 2072, 2082, 2104, 2109], "assume_constant_result": [66, 77, 979], "autograd_funct": 66, "class_method": 66, "cond_branch_class_method": [66, 74, 75], "cond_branch_nested_funct": [66, 74, 75], "cond_branch_nonlocal_vari": [66, 74, 75], "cond_closed_over_vari": [66, 69, 74], "cond_operand": [66, 74, 75], "cond_pred": [66, 74, 75], "constrain_as_size_exampl": [66, 76, 77], "constrain_as_value_exampl": [66, 76, 77], "decor": 66, "dictionari": [66, 72, 2017], "dynamic_shape_assert": [66, 67], "dynamic_shape_constructor": [66, 75], "dynamic_shape_if_guard": [66, 71, 75], "dynamic_shape_map": [66, 75, 78], "dynamic_shape_sl": [66, 75], "dynamic_shape_view": [66, 75], "fn_with_kwarg": [66, 72], "list_contain": [66, 67, 72, 75], "list_unpack": [66, 71, 72], "nested_funct": [66, 69], "null_context_manag": [66, 70], "pytree_flatten": 66, "scalar_output": [66, 75], "specialized_attribut": 66, "static_for_loop": [66, 71], "static_if": [66, 71], "tensor_setattr": [66, 68], "type_reflection_method": [66, 68], "user_input_mut": [66, 79], "Not": [66, 2019], "yet": 66, "dynamic_shape_round": [66, 68, 75], "model_attr_mut": [66, 73], "optional_input": [66, 73], "torch_sym_min": [66, 80], "assert": [67, 2017, 2055], "builtin": [68, 2015], "closur": [69, 2069], "escap": [77, 2104], "hatch": [77, 2104], "fxe0007": 81, "fxe0008": 82, "fxe0010": 83, "pass": [83, 2017, 2046, 2059, 2072, 2078, 2112], "fxe0011": 84, "call": [84, 2016, 2017, 2050, 2104], "fxe0012": 85, "unsupport": [85, 2016, 2017, 2019, 2067, 2068], "analysi": 85, "fxe0013": 86, "fxe0014": 87, "find": [87, 89, 2070, 2111], "opschema": 87, "match": [87, 2016, 2035], "fxe0015": 88, "insert": 88, "fxe0016": 89, "overload": 89, "abs_": 92, "absolut": [93, 685], "absolute_": 94, "aco": [95, 686], "acos_": 96, "acosh": [97, 687], "acosh_": 98, "add_": 100, "addbmm": [101, 689], "addbmm_": 102, "addcdiv": [103, 690], "addcdiv_": 104, "addcmul": [105, 691], "addcmul_": 106, "addmm": [107, 692, 1901], "addmm_": 108, "addmv": [109, 693], "addmv_": 110, "addr": [111, 694], "addr_": 112, "adjoint": [113, 695], "all": [114, 696, 2049, 2067, 2069], "allclos": [115, 697], "amax": [116, 698], "amin": [117, 699], "aminmax": [118, 700], "ani": [120, 702, 2017, 2104, 2110], "apply_": 121, "arcco": [122, 870], "arccos_": 123, "arccosh": [124, 871], "arccosh_": 125, "arcsin": [126, 872], "arcsin_": 127, "arcsinh": [128, 873], "arcsinh_": 129, "arctan": [130, 874], "arctan2": [131, 875], "arctan2_": 132, "arctan_": 133, "arctanh": [134, 876], "arctanh_": 135, "argmax": [136, 878], "argmin": [137, 879], "argsort": [138, 880], "argwher": [139, 881], "as_strid": [140, 882], "as_subclass": 141, "asin": [142, 885], "asin_": 143, "asinh": [144, 886], "asinh_": 145, "atan": [146, 887], "atan2": [147, 888], "atan2_": 148, "atan_": 149, "atanh": [150, 889], "atanh_": 151, "backward": [152, 893, 897, 2043, 2044, 2046, 2054, 2078], "baddbmm": [153, 944], "baddbmm_": 154, "bernoulli_": 156, "bincount": [158, 947], "bitwise_and": [159, 948], "bitwise_and_": 160, "bitwise_left_shift": [161, 949], "bitwise_left_shift_": 162, "bitwise_not": [163, 950], "bitwise_not_": 164, "bitwise_or": [165, 951], "bitwise_or_": 166, "bitwise_right_shift": [167, 952], "bitwise_right_shift_": 168, "bitwise_xor": [169, 953], "bitwise_xor_": 170, "bmm": [171, 956], "bool": 172, "broadcast_to": [173, 959], "byte": 174, "cauchy_": 175, "ccol_indic": 176, "cdoubl": 177, "ceil": [178, 965], "ceil_": 179, "cfloat": 180, "chalf": 181, "char": 182, "choleski": [183, 967, 1303], "cholesky_invers": [184, 968], "cholesky_solv": [185, 969], "chunk": [186, 970], "clamp": [187, 774, 971], "clamp_": 188, "clip": [189, 972, 2042], "clip_": 190, "clone": [191, 973], "coalesc": 192, "col_indic": 193, "conj": [194, 991], "conj_phys": [195, 992], "conj_physical_": 196, "contigu": 197, "copy_": 198, "copysign": [199, 993], "copysign_": 200, "corrcoef": [201, 994], "co": [202, 995], "cos_": 203, "cosh": [204, 996], "cosh_": 205, "count_nonzero": [206, 997], "cov": [207, 998], "cross": [209, 1008, 1306, 2043], "crow_indic": 210, "cummax": [212, 1088], "cummin": [213, 1089], "cumprod": [214, 1090], "cumprod_": 215, "cumsum": [216, 1091], "cumsum_": 217, "data_ptr": 218, "deg2rad": [219, 1093], "dense_dim": 220, "dequant": [221, 1094, 2072], "det": [222, 1095, 1307], "detach": 223, "detach_": 224, "devic": [225, 1030, 1404, 1987, 2025, 2046, 2060, 2085], "diag": [226, 1096], "diag_emb": [227, 1097], "diagflat": [228, 1098], "diagon": [229, 1099, 1308], "diagonal_scatt": [230, 1100], "diff": [231, 1101], "digamma": [232, 1102], "digamma_": 233, "dim": [234, 2034], "dim_ord": 235, "dist": [236, 1103], "div": [237, 1104, 2062], "div_": 238, "divid": [239, 1105], "divide_": 240, "dot": [241, 1106], "doubl": 242, "dsplit": [243, 1107], "element_s": 244, "eq": [245, 1114], "eq_": 246, "equal": [247, 1115], "erf": [248, 1116], "erf_": 249, "erfc": [250, 1117], "erfc_": 251, "erfinv": [252, 1118], "erfinv_": 253, "exp": [254, 1119], "exp_": 255, "expand": 256, "expand_a": 257, "expm1": [258, 1121], "expm1_": 259, "exponential_": 260, "fill_": 261, "fill_diagonal_": 262, "fix_": 264, "flatten": [265, 1148, 1472], "flip": [266, 1149], "fliplr": [267, 1150], "flipud": [268, 1151], "float": [269, 2062], "float_pow": [270, 1152], "float_power_": 271, "floor": [272, 1153], "floor_": 273, "floor_divid": [274, 1154], "floor_divide_": 275, "fmax": [276, 1155], "fmin": [277, 1156], "fmod": [278, 1157], "fmod_": 279, "frac": [280, 1158], "frac_": 281, "frexp": [282, 1159], "gather": [283, 1023, 1214], "gcd": [284, 1215], "gcd_": 285, "ge": [286, 1216], "ge_": 287, "geometric_": 288, "geqrf": [289, 1217], "ger": [290, 1218], "get_devic": 291, "greater": [293, 1228], "greater_": 294, "greater_equ": [295, 1229], "greater_equal_": 296, "gt": [297, 1230], "gt_": 298, "half": 299, "hardshrink": [300, 1482, 1636], "heavisid": [301, 1233], "histc": [302, 1234], "histogram": [303, 1235], "hsplit": [304, 1237], "hypot": [305, 1240], "hypot_": 306, "i0": [307, 1241], "i0_": 308, "igamma": [309, 1242], "igamma_": 310, "igammac": [311, 1243], "igammac_": 312, "index_add": [314, 1245], "index_add_": 315, "index_copi": [316, 1246], "index_copy_": 317, "index_fil": 318, "index_fill_": 319, "index_put": 320, "index_put_": 321, "index_reduc": [322, 1247], "index_reduce_": 323, "index_select": [324, 1248], "indic": [325, 2013], "inner": [326, 1250], "int": [327, 2101], "int_repr": 328, "invers": [329, 1251, 2022], "is_coalesc": 330, "is_complex": [331, 1252], "is_conj": [332, 1253], "is_contigu": 333, "is_cuda": 334, "is_floating_point": [335, 1255], "is_infer": 336, "is_leaf": 337, "is_meta": 338, "is_pin": 339, "is_quant": 340, "is_set_to": 341, "is_shar": 342, "is_sign": 343, "is_spars": 344, "is_sparse_csr": 345, "isclos": [346, 1262], "isfinit": [347, 1263], "isinf": [348, 1265], "isnan": [349, 1266], "isneginf": [350, 1267], "isposinf": [351, 1268], "isreal": [352, 1269], "istft": [353, 1270], "items": 355, "kthvalu": [356, 1295], "lcm": [357, 1296], "lcm_": 358, "ldexp": [359, 1297], "ldexp_": 360, "le": [361, 1298], "le_": 362, "lerp": [363, 1299], "lerp_": 364, "less": [365, 1300], "less_": 366, "less_equ": [367, 1301], "less_equal_": 368, "lgamma": [369, 1302], "lgamma_": 370, "log10": [372, 1348], "log10_": 373, "log1p": [374, 1349], "log1p_": 375, "log2": [376, 1350], "log2_": 377, "log_": 378, "log_normal_": 379, "logaddexp": [380, 1351], "logaddexp2": [381, 1352], "logcumsumexp": [382, 1353], "logdet": [383, 1354], "logical_and": [384, 1355], "logical_and_": 385, "logical_not": [386, 1356], "logical_not_": 387, "logical_or": [388, 1357], "logical_or_": 389, "logical_xor": [390, 1358], "logical_xor_": 391, "logit": [392, 1359], "logit_": 393, "logsumexp": [394, 1361], "long": 395, "lt": [396, 1362], "lt_": 397, "lu": [398, 1320, 1363], "lu_solv": [399, 1323, 1364], "map_": 400, "masked_fil": 401, "masked_fill_": 402, "masked_scatt": 403, "masked_scatter_": 404, "masked_select": [405, 1367], "matmul": [406, 1324, 1368], "matrix_exp": [407, 1325, 1369], "matrix_pow": [408, 1327, 1370], "maximum": [410, 1372], "mean": [411, 1373], "median": [412, 1374], "minimum": [414, 1377, 2053], "mm": [415, 1378, 1905], "module_load": 417, "moveaxi": [418, 1380], "movedim": [419, 1381], "msort": [420, 1396], "mul": [421, 1412], "mul_": 422, "multipli": [424, 1414], "multiply_": 425, "mv": [426, 1415], "mvlgamma": [427, 1416], "mvlgamma_": 428, "nan_to_num": [429, 1417], "nan_to_num_": 430, "nanmean": [431, 1418], "nanmedian": [432, 1419], "nanquantil": [433, 1420], "nansum": [434, 1421], "narrow": [435, 1422], "narrow_copi": [436, 1423], "nbyte": 437, "ndim": 438, "ndimens": 439, "ne": [440, 1424], "ne_": 441, "neg": [442, 444, 1425, 1426], "neg_": 443, "negative_": 445, "nelement": 446, "new_empti": 447, "new_ful": 448, "new_on": 449, "new_tensor": 450, "new_zero": 451, "nextaft": [452, 1427], "nextafter_": 453, "normal_": 456, "not_equ": [457, 1774], "not_equal_": 458, "numel": [459, 1775], "numpi": [460, 2067, 2104], "orgqr": [461, 1814], "ormqr": [462, 1815], "outer": [463, 1816], "permut": [464, 1818, 2034], "pin_memori": 465, "pinvers": [466, 1819], "polygamma": [467, 1822], "polygamma_": 468, "posit": [469, 1823], "pow": [470, 1824], "pow_": 471, "prod": [472, 1825], "put_": 473, "q_per_channel_axi": 474, "q_per_channel_scal": 475, "q_per_channel_zero_point": 476, "q_scale": 477, "q_zero_point": 478, "qr": [479, 1332, 1827], "qscheme": 480, "quantil": [481, 1828], "rad2deg": [482, 1835], "random_": 483, "ravel": [484, 1844], "reciproc": [486, 1846], "reciprocal_": 487, "record_stream": 488, "register_hook": [489, 928], "register_post_accumulate_grad_hook": 490, "remaind": [491, 1847], "remainder_": 492, "renorm": [493, 1848], "renorm_": 494, "repeat": 495, "repeat_interleav": [496, 1849], "requires_grad": [497, 2043], "requires_grad_": 498, "reshap": [499, 1850], "reshape_a": 500, "resize_": 501, "resize_as_": 502, "resolve_conj": [503, 1851], "resolve_neg": [504, 1852], "retain_grad": 505, "retains_grad": 506, "roll": [507, 1854], "rot90": [508, 1855], "round": [509, 1856], "round_": 510, "row_indic": 511, "rsqrt": [512, 1858], "rsqrt_": 513, "scatter": [514, 1025, 1860], "scatter_": 515, "scatter_add": [516, 1861], "scatter_add_": 517, "scatter_reduc": [518, 1862], "scatter_reduce_": 519, "select": [520, 1865, 2024], "select_scatt": [521, 1866], "set_": 522, "sgn": [523, 1878], "sgn_": 524, "share_memory_": 526, "short": 527, "sigmoid": [528, 762, 1558, 1687, 1879], "sigmoid_": 529, "sign": [530, 1880], "sign_": 531, "signbit": [532, 1892], "sin": [533, 1893], "sin_": 534, "sinc": [535, 1894], "sinc_": 536, "sinh": [537, 1895], "sinh_": 538, "slice_scatt": [540, 1896], "slogdet": [541, 1333, 1897], "smm": [542, 1898], "softmax": [543, 1561, 1691, 1899, 1907], "sort": [544, 1900], "sparse_dim": 545, "sparse_mask": 546, "sparse_resize_": 547, "sparse_resize_and_clear_": 548, "sqrt": [550, 1917], "sqrt_": 551, "squar": [552, 1918], "square_": 553, "squeez": [554, 1919], "squeeze_": 555, "sspaddmm": [556, 1920], "std": [557, 1922], "stft": [558, 1924], "storag": [559, 2084], "storage_offset": 560, "storage_typ": 561, "stride": 562, "sub": [563, 1925], "sub_": 564, "subtract": [565, 1926], "subtract_": 566, "sum": [567, 1909, 1927, 2037], "sum_to_s": 568, "svd": [569, 1337, 1928], "swapax": [570, 1930], "swapdim": [571, 1931], "t": [572, 1938, 2017, 2051, 2104], "t_": 573, "take": [574, 1939, 2069], "take_along_dim": [575, 1940], "tan": [576, 1941], "tan_": 577, "tanh": [578, 1568, 1696, 1942], "tanh_": 579, "tensor_split": [580, 1944], "tile": [581, 1946], "to_dens": 583, "to_mkldnn": 584, "to_spars": 585, "to_sparse_bsc": 586, "to_sparse_bsr": 587, "to_sparse_coo": 588, "to_sparse_csc": 589, "to_sparse_csr": 590, "tolist": 591, "topk": [592, 1947], "transpos": [594, 1949], "transpose_": 595, "triangular_solv": [596, 1952], "tril": [597, 1953], "tril_": 598, "triu": [599, 1955], "triu_": 600, "true_divid": [601, 1957], "true_divide_": 602, "trunc": [603, 1958], "trunc_": 604, "type_a": 606, "unbind": [607, 1959, 2036], "unflatten": [608, 1578, 1960], "unfold": [609, 1579, 1703], "uniform_": 610, "uniqu": [611, 1961], "unique_consecut": [612, 1962], "unsqueez": [613, 1964], "unsqueeze_": 614, "untyped_storag": 615, "var": [617, 1972], "vdot": [618, 1974], "view": [619, 2024, 2062, 2086, 2111], "view_a": 620, "vsplit": [621, 1978], "where": [622, 1980, 2012], "xlogi": [623, 1981], "xlogy_": 624, "zero_": 626, "_assert": 627, "_foreach_ab": 628, "_foreach_abs_": 629, "_foreach_aco": 630, "_foreach_acos_": 631, "_foreach_asin": 632, "_foreach_asin_": 633, "_foreach_atan": 634, "_foreach_atan_": 635, "_foreach_ceil": 636, "_foreach_ceil_": 637, "_foreach_co": 638, "_foreach_cos_": 639, "_foreach_cosh": 640, "_foreach_cosh_": 641, "_foreach_erf": 642, "_foreach_erf_": 643, "_foreach_erfc": 644, "_foreach_erfc_": 645, "_foreach_exp": 646, "_foreach_exp_": 647, "_foreach_expm1": 648, "_foreach_expm1_": 649, "_foreach_floor": 650, "_foreach_floor_": 651, "_foreach_frac": 652, "_foreach_frac_": 653, "_foreach_lgamma": 654, "_foreach_lgamma_": 655, "_foreach_log": 656, "_foreach_log10": 657, "_foreach_log10_": 658, "_foreach_log1p": 659, "_foreach_log1p_": 660, "_foreach_log2": 661, "_foreach_log2_": 662, "_foreach_log_": 663, "_foreach_neg": 664, "_foreach_neg_": 665, "_foreach_reciproc": 666, "_foreach_reciprocal_": 667, "_foreach_round": 668, "_foreach_round_": 669, "_foreach_sigmoid": 670, "_foreach_sigmoid_": 671, "_foreach_sin": 672, "_foreach_sin_": 673, "_foreach_sinh": 674, "_foreach_sinh_": 675, "_foreach_sqrt": 676, "_foreach_sqrt_": 677, "_foreach_tan": 678, "_foreach_tan_": 679, "_foreach_trunc": 680, "_foreach_trunc_": 681, "_foreach_zero_": 682, "_log": [683, 2023], "set_log": 683, "bnrelu2d": [703, 726], "bnrelu3d": [704, 727], "convbn1d": [705, 715], "convbn2d": [706, 716], "convbn3d": [707, 717], "convbnrelu1d": [708, 718], "convbnrelu2d": [709, 719], "convbnrelu3d": [710, 720], "convrelu1d": [711, 728], "convrelu2d": [712, 721, 729], "convrelu3d": [713, 722, 730], "linearrelu": [714, 723, 731, 732], "freeze_bn_stat": 724, "update_bn_stat": 725, "conv2d": [733, 742, 776, 1455, 1609], "conv3d": [734, 743, 777, 1456, 1610], "lstm": [737, 765, 1497, 2061], "multiheadattent": [738, 1533], "batchnorm2d": [739, 1442], "batchnorm3d": [740, 1443], "conv1d": [741, 775, 1454, 1608], "convtranspose1d": [744, 1457], "convtranspose2d": [745, 1458], "convtranspose3d": [746, 1459], "elu": [747, 778, 1468, 1622], "embed": [748, 1469, 1624], "embeddingbag": [749, 1470], "fxfloatfunct": 750, "floatfunct": 751, "groupnorm": [752, 1481], "hardswish": [753, 780, 1484, 1638], "instancenorm1d": [754, 1489], "instancenorm2d": [755, 1490], "instancenorm3d": [756, 1491], "layernorm": [757, 1499], "leakyrelu": [758, 1513], "qfunction": 760, "relu6": [761, 1548, 1680], "gru": [763, 1478], "grucel": [764, 1479], "lstmcell": [766, 1498], "rnncell": [768, 1545], "adaptive_avg_pool2d": [769, 1593], "adaptive_avg_pool3d": [770, 1594], "avg_pool2d": [771, 1601], "avg_pool3d": [772, 1602], "celu": [773, 1445, 1607], "hardsigmoid": [779, 1483, 1637], "hardtanh": [781, 1485, 1639], "interpol": [782, 1644], "leaky_relu": [783, 1648], "max_pool1d": [785, 1658], "max_pool2d": [786, 1659], "threshold": [787, 1570, 1698], "upsampl": [788, 1580, 1704], "upsample_bilinear": [789, 1705], "upsample_nearest": [790, 1706], "dequantstub": 791, "quantstub": 792, "quantwrapp": 793, "add_quant_dequ": 794, "backendconfig": 795, "backendpatternconfig": 796, "dtypeconfig": 797, "dtypewithconstraint": 798, "observationtyp": 799, "convert": [800, 2103], "default_eval_fn": 801, "fakequant": [802, 2072], "fakequantizebas": 803, "fixedqparamsfakequant": 804, "fusedmovingavgobsfakequant": 805, "default_fake_qu": 806, "default_fused_act_fake_qu": 807, "default_fused_per_channel_wt_fake_qu": 808, "default_fused_wt_fake_qu": 809, "default_histogram_fake_qu": 810, "default_per_channel_weight_fake_qu": 811, "default_weight_fake_qu": 812, "disable_fake_qu": 813, "disable_observ": 814, "enable_fake_qu": 815, "enable_observ": 816, "fuse_modul": 817, "convertcustomconfig": 818, "fusecustomconfig": 819, "preparecustomconfig": 820, "standalonemoduleconfigentri": 821, "histogramobserv": 822, "minmaxobserv": 823, "movingaverageminmaxobserv": 824, "movingaverageperchannelminmaxobserv": 825, "noopobserv": 826, "observerbas": 827, "perchannelminmaxobserv": 828, "placeholderobserv": 829, "recordingobserv": 830, "default_debug_observ": 831, "default_dynamic_quant_observ": 832, "default_float_qparams_observ": 833, "default_histogram_observ": 834, "default_observ": 835, "default_per_channel_weight_observ": 836, "default_placeholder_observ": 837, "default_weight_observ": 838, "get_observer_state_dict": 839, "load_observer_state_dict": 840, "prepar": [841, 2072, 2075], "prepare_qat": 842, "propagate_qconfig": 843, "model_is_export": 844, "qconfig": [845, 2072, 2075], "default_activation_only_qconfig": 846, "default_debug_qconfig": 847, "default_dynamic_qconfig": 848, "default_per_channel_qconfig": 849, "default_qat_qconfig": 850, "default_qat_qconfig_v2": 851, "default_qconfig": 852, "default_weight_only_qconfig": 853, "float16_dynamic_qconfig": 854, "float16_static_qconfig": 855, "float_qparams_weight_only_qconfig": 856, "per_channel_dynamic_qconfig": 857, "qconfigmap": 858, "get_default_qat_qconfig_map": 859, "get_default_qconfig_map": 860, "quantiz": [861, 2037, 2057, 2067, 2072, 2073, 2074, 2075], "quantize_dynam": 862, "convert_fx": 863, "fuse_fx": 864, "prepare_fx": 865, "prepare_qat_fx": 866, "quantize_qat": 867, "swap_modul": 868, "arang": 869, "are_deterministic_algorithms_en": 877, "as_tensor": 883, "asarrai": 884, "atleast_1d": 890, "atleast_2d": 891, "atleast_3d": 892, "unpackeddualtensor": 898, "dual_level": 899, "forward_ad": [900, 901, 902, 903], "enter_dual_level": 900, "exit_dual_level": 901, "make_du": 902, "unpack_du": 903, "backwardcfunct": 904, "functionctx": [905, 906, 907, 908], "mark_dirti": 905, "mark_non_differenti": 906, "save_for_backward": 907, "set_materialize_grad": 908, "inplacefunct": 909, "nestediofunct": 910, "once_differenti": 911, "hvp": 913, "vhp": 916, "inference_mod": 919, "set_grad_en": 920, "set_multithreading_en": 921, "gradcheck": [922, 923, 924, 2054], "gradcheckerror": 922, "gradgradcheck": [924, 2054], "name": [926, 2016, 2034, 2035], "next_funct": 927, "register_prehook": 929, "increment_vers": 930, "enforceuniqu": 931, "kinetosteptrack": 932, "load_nvprof": 933, "parse_nvprof_trac": 934, "export_chrome_trac": 935, "key_averag": 936, "self_cpu_time_tot": 937, "total_averag": 938, "record_funct": 939, "interv": 940, "memrecordsacc": 942, "stringtabl": 943, "bartlett_window": 945, "blackman_window": 954, "block_diag": 955, "broadcast_shap": 957, "broadcast_tensor": 958, "bucket": 960, "can_cast": 961, "cartesian_prod": 962, "cat": 963, "cdist": 964, "chain_matmul": 966, "column_stack": 974, "combin": [975, 2049], "compiled_with_cxx11_abi": 977, "allow_in_graph": [978, 2105], "cudagraph_mark_step_begin": 981, "is_compil": 983, "is_dynamo_compil": 984, "list_backend": 985, "reset": 986, "concat": 988, "concaten": 989, "streamcontext": [1000, 1015, 1400, 1984], "current_devic": [1001, 1027, 1401, 1985], "current_stream": [1002, 1028, 1402, 1986], "device_count": [1003, 1031, 1383, 1405, 1988], "is_avail": [1004, 1048, 1407, 1998], "set_devic": [1005, 1078, 2004], "cudagraph": [1009, 2098], "cudapluggablealloc": 1010, "externalstream": 1012, "outofmemoryerror": 1013, "caching_allocator_alloc": 1016, "caching_allocator_delet": 1017, "can_device_access_p": 1018, "change_current_alloc": 1019, "clock_rat": 1020, "comm": [1021, 1022, 1023, 1024, 1025], "broadcast": [1021, 2044], "broadcast_coalesc": 1022, "reduce_add": 1024, "current_blas_handl": 1026, "default_stream": [1029, 1403], "device_of": [1032, 1989], "empty_cach": [1033, 1385, 1990], "get_allocator_backend": 1034, "get_arch_list": 1035, "get_device_cap": [1036, 1991], "get_device_nam": [1037, 1992], "get_device_properti": [1038, 1993], "get_gencode_flag": 1039, "get_rng_stat": [1040, 1226, 1387, 1994], "get_rng_state_al": [1041, 1995], "get_sync_debug_mod": 1042, "graph_pool_handl": 1044, "init": [1045, 1406, 1996, 2041], "initial_se": [1046, 1249, 1997], "ipc_collect": 1047, "is_current_stream_captur": 1049, "is_initi": [1050, 1408, 1999], "_create_jit_fn": 1051, "_create_multi_output_jit_fn": 1052, "list_gpu_process": 1053, "make_graphed_cal": 1054, "manual_se": [1055, 1366, 1388, 2000], "manual_seed_al": [1056, 2001], "max_memory_alloc": 1057, "max_memory_cach": 1058, "max_memory_reserv": 1059, "mem_get_info": 1060, "memory_alloc": 1061, "memory_cach": 1062, "memory_reserv": 1063, "memory_snapshot": 1064, "memory_stat": 1065, "memory_summari": 1066, "memory_usag": 1067, "mark": 1068, "rang": [1069, 1843, 2016], "range_pop": 1070, "range_push": 1071, "power_draw": 1072, "reset_max_memory_alloc": 1073, "reset_max_memory_cach": 1074, "reset_peak_memory_stat": 1075, "seed": [1076, 1392, 1864, 2002], "seed_al": [1077, 2003], "set_per_process_memory_fract": [1079, 1393], "set_rng_stat": [1080, 1394, 1876, 2005], "set_rng_state_al": [1081, 2006], "set_stream": [1082, 1409, 2007], "set_sync_debug_mod": 1083, "temperatur": 1086, "cumulative_trapezoid": 1092, "dstack": 1108, "einsum": 1109, "empti": 1110, "empty_lik": 1111, "empty_strid": 1112, "enable_grad": 1113, "exp2": 1120, "ey": 1122, "fake_quantize_per_channel_affin": 1123, "fake_quantize_per_tensor_affin": 1124, "fft2": 1126, "fftfreq": 1127, "fftn": 1128, "fftshift": 1129, "hfft": 1130, "hfft2": 1131, "hfftn": 1132, "ifft": 1133, "ifft2": 1134, "ifftn": 1135, "ifftshift": 1136, "ihfft": 1137, "ihfft2": 1138, "ihfftn": 1139, "irfft": 1140, "irfft2": 1141, "irfftn": 1142, "rfft": 1143, "rfft2": 1144, "rfftfreq": 1145, "rfftn": 1146, "from_dlpack": 1160, "from_fil": 1161, "from_numpi": 1162, "frombuff": 1163, "full": [1164, 2062], "full_lik": 1165, "functional_cal": [1166, 1767], "grad_and_valu": 1169, "replace_all_batch_norm_modules_": 1175, "stack_module_st": 1176, "callmethodkei": 1179, "convertintkei": 1180, "dimconstraint": 1181, "dimdynam": [1182, 2100], "dividebykei": 1183, "equalityconstraint": 1184, "innertensorkei": 1185, "propagateunbackedsymint": 1186, "relaxedunspecconstraint": 1187, "shapeenv": 1188, "shapeenvset": 1189, "statefulsymboliccontext": 1190, "statelesssymboliccontext": 1191, "strictminmaxconstraint": 1192, "subclasssymboliccontext": 1193, "symboliccontext": 1194, "canonicalize_bool_expr": 1195, "check_consist": 1196, "compute_unbacked_bind": 1197, "constrain_rang": 1198, "constrain_unifi": 1199, "definitely_fals": 1200, "definitely_tru": 1201, "guard_size_oblivi": 1202, "has_free_symbol": 1203, "hint_int": 1204, "is_concrete_bool": 1205, "is_concrete_int": 1206, "lru_cach": 1207, "parallel_and": 1208, "parallel_or": 1209, "rebind_unback": 1210, "resolve_unbacked_bind": 1211, "statically_known_tru": 1212, "sym_eq": 1213, "get_default_devic": 1219, "get_default_dtyp": 1220, "get_deterministic_debug_mod": 1221, "get_device_modul": 1222, "get_float32_matmul_precis": 1223, "get_num_interop_thread": 1224, "get_num_thread": 1225, "hamming_window": 1231, "hann_window": 1232, "histogramdd": 1236, "hspmm": 1238, "hstack": 1239, "is_deterministic_algorithms_warn_only_en": 1254, "is_grad_en": 1256, "is_inference_mode_en": 1257, "is_nonzero": 1258, "is_storag": 1259, "is_tensor": 1260, "is_warn_always_en": 1261, "isin": 1264, "attribut": [1271, 2014, 2016, 2017, 2019, 2085], "scriptfunct": 1272, "scriptmodul": [1273, 2062], "annot": [1274, 2017], "enable_onednn_fus": 1275, "fork": 1276, "freez": 1277, "ignor": 1278, "isinst": 1280, "onednn_fusion_en": 1282, "optimize_for_infer": 1283, "save": [1284, 1859, 2012, 2043, 2056, 2062, 2072], "script_if_trac": 1286, "set_fusion_strategi": 1287, "strict_fus": 1288, "trace_modul": 1290, "unus": 1291, "wait": 1292, "kaiser_window": 1293, "kron": 1294, "cholesky_ex": 1304, "eig": 1309, "eigh": 1310, "eigval": 1311, "eigvalsh": 1312, "householder_product": 1313, "inv": 1314, "inv_ex": 1315, "ldl_factor": 1316, "ldl_factor_ex": 1317, "ldl_solv": 1318, "lstsq": 1319, "lu_factor": 1321, "lu_factor_ex": 1322, "matrix_norm": 1326, "matrix_rank": 1328, "multi_dot": 1329, "pinv": 1331, "solv": 1334, "solve_ex": 1335, "solve_triangular": 1336, "svdval": 1338, "tensorinv": 1339, "tensorsolv": 1340, "vander": [1341, 1971], "vecdot": 1342, "vector_norm": 1343, "linspac": 1344, "lobpcg": 1346, "logspac": 1360, "lu_unpack": 1365, "meshgrid": 1375, "current_allocated_memori": 1382, "driver_allocated_memori": 1384, "stop": 1391, "mtia": [1397, 1401, 1402, 1403, 1405, 1406, 1407, 1408, 1409, 1410, 1411, 2032], "deferredmtiacallerror": 1397, "adaptiveavgpool1d": 1428, "adaptiveavgpool2d": 1429, "adaptiveavgpool3d": 1430, "adaptivelogsoftmaxwithloss": 1431, "adaptivemaxpool1d": 1432, "adaptivemaxpool2d": 1433, "adaptivemaxpool3d": 1434, "alphadropout": 1435, "avgpool1d": 1436, "avgpool2d": 1437, "avgpool3d": 1438, "bceloss": 1439, "bcewithlogitsloss": 1440, "batchnorm1d": 1441, "bilinear": [1444, 1604], "ctcloss": 1446, "channelshuffl": 1447, "circularpad1d": 1448, "circularpad2d": 1449, "circularpad3d": 1450, "constantpad1d": 1451, "constantpad2d": 1452, "constantpad3d": 1453, "cosineembeddingloss": 1460, "cosinesimilar": 1461, "crossentropyloss": 1462, "dataparallel": [1463, 2037, 2040, 2042, 2046], "dropout": [1464, 1618, 2037, 2040], "dropout1d": [1465, 1619], "dropout2d": [1466, 1620], "dropout3d": [1467, 1621], "featurealphadropout": 1471, "fold": [1473, 1627], "fractionalmaxpool2d": 1474, "fractionalmaxpool3d": 1475, "gelu": [1476, 1631], "glu": [1477, 1632], "gaussiannllloss": 1480, "hingeembeddingloss": 1486, "huberloss": 1487, "ident": [1488, 1741, 1749, 2017, 2051], "kldivloss": 1492, "l1loss": 1493, "lppool1d": 1494, "lppool2d": 1495, "lppool3d": 1496, "lazybatchnorm1d": 1500, "lazybatchnorm2d": 1501, "lazybatchnorm3d": 1502, "lazyconv1d": 1503, "lazyconv2d": 1504, "lazyconv3d": 1505, "lazyconvtranspose1d": 1506, "lazyconvtranspose2d": 1507, "lazyconvtranspose3d": 1508, "lazyinstancenorm1d": 1509, "lazyinstancenorm2d": 1510, "lazyinstancenorm3d": 1511, "lazylinear": 1512, "localresponsenorm": 1515, "logsigmoid": [1516, 1653], "logsoftmax": 1517, "mseloss": 1518, "marginrankingloss": 1519, "maxpool1d": 1520, "maxpool2d": 1521, "maxpool3d": 1522, "maxunpool1d": 1523, "maxunpool2d": 1524, "maxunpool3d": 1525, "mish": [1526, 1664], "moduledict": [1528, 2017], "modulelist": [1529, 2016, 2017], "multilabelmarginloss": 1530, "multilabelsoftmarginloss": 1531, "multimarginloss": 1532, "nllloss": 1534, "prelu": [1535, 1678], "pairwisedist": 1536, "parameterdict": 1537, "parameterlist": 1538, "pixelshuffl": 1539, "pixelunshuffl": 1540, "poissonnllloss": 1541, "rmsnorm": [1542, 1716], "rnn": [1543, 1759, 1760, 1761, 1762, 1763, 1764, 2061], "rnnbase": 1544, "rrelu": [1546, 1683], "relu": [1547, 1679], "reflectionpad1d": 1549, "reflectionpad2d": 1550, "reflectionpad3d": 1551, "replicationpad1d": 1552, "replicationpad2d": 1553, "replicationpad3d": 1554, "selu": [1555, 1686], "sequenti": 1556, "silu": [1557, 1688], "smoothl1loss": 1559, "softmarginloss": 1560, "softmax2d": 1562, "softmin": [1563, 1692], "softplu": [1564, 1693], "softshrink": [1565, 1694], "softsign": [1566, 1695], "syncbatchnorm": 1567, "tanhshrink": [1569, 1697], "transformerdecod": 1572, "transformerdecoderlay": 1573, "transformerencod": 1574, "transformerencoderlay": 1575, "tripletmarginloss": 1576, "tripletmarginwithdistanceloss": 1577, "upsamplingbilinear2d": 1581, "upsamplingnearest2d": 1582, "zeropad1d": 1583, "zeropad2d": 1584, "zeropad3d": 1585, "sdpbackend": 1586, "attent": [1587, 1589, 1590, 1591, 2038, 2039, 2040], "bia": [1587, 1589, 1590, 2039], "causalbia": [1587, 2039], "causalvari": 1588, "causal_lower_right": 1589, "causal_upper_left": 1590, "sdpa_kernel": 1591, "adaptive_avg_pool1d": 1592, "adaptive_max_pool1d": 1595, "adaptive_max_pool2d": 1596, "adaptive_max_pool3d": 1597, "affine_grid": 1598, "alpha_dropout": 1599, "avg_pool1d": 1600, "batch_norm": 1603, "conv_transpose1d": 1611, "conv_transpose2d": 1612, "conv_transpose3d": 1613, "cosine_embedding_loss": 1614, "cosine_similar": 1615, "cross_entropi": 1616, "ctc_loss": 1617, "elu_": 1623, "embedding_bag": 1625, "feature_alpha_dropout": 1626, "fractional_max_pool2d": 1628, "fractional_max_pool3d": 1629, "gaussian_nll_loss": 1630, "grid_sampl": 1633, "group_norm": 1634, "gumbel_softmax": 1635, "hardtanh_": 1640, "hinge_embedding_loss": 1641, "huber_loss": 1642, "instance_norm": 1643, "kl_div": 1645, "l1_loss": 1646, "layer_norm": 1647, "leaky_relu_": 1649, "local_response_norm": 1651, "log_softmax": [1652, 1904], "lp_pool1d": 1654, "lp_pool2d": 1655, "lp_pool3d": 1656, "margin_ranking_loss": 1657, "max_pool3d": 1660, "max_unpool1d": 1661, "max_unpool2d": 1662, "max_unpool3d": 1663, "mse_loss": 1665, "multi_margin_loss": 1666, "multilabel_margin_loss": 1667, "multilabel_soft_margin_loss": 1668, "nll_loss": 1669, "one_hot": 1671, "pad": [1672, 2037], "pairwise_dist": 1673, "pdist": 1674, "pixel_shuffl": 1675, "pixel_unshuffl": 1676, "poisson_nll_loss": 1677, "relu_": 1681, "rms_norm": 1682, "rrelu_": 1684, "scaled_dot_product_attent": 1685, "smooth_l1_loss": 1689, "soft_margin_loss": 1690, "threshold_": 1699, "data_parallel": [1700, 2040], "triplet_margin_loss": 1701, "triplet_margin_with_distance_loss": 1702, "lazymodulemixin": 1707, "register_module_backward_hook": 1708, "register_module_buffer_registration_hook": 1709, "register_module_forward_hook": 1710, "register_module_forward_pre_hook": 1711, "register_module_full_backward_hook": 1712, "register_module_full_backward_pre_hook": 1713, "register_module_module_registration_hook": 1714, "register_module_parameter_registration_hook": 1715, "distributeddataparallel": [1717, 2042, 2046, 2048], "uninitializedbuff": 1719, "uninitializedparamet": 1720, "clip_grad_norm": 1721, "clip_grad_norm_": 1722, "clip_grad_value_": 1723, "convert_conv2d_weight_memory_format": 1724, "convert_conv3d_weight_memory_format": 1725, "fuse_conv_bn_ev": 1726, "fuse_conv_bn_weight": 1727, "fuse_linear_bn_ev": 1728, "fuse_linear_bn_weight": 1729, "parameters_to_vector": 1730, "parametr": [1731, 1732, 1733, 1735, 1736, 1737, 1738, 2057], "orthogon": 1731, "spectral_norm": [1732, 1766], "weight_norm": [1733, 1769], "parametrizationlist": 1734, "cach": [1735, 2012, 2046, 2055, 2113], "is_parametr": 1736, "register_parametr": 1737, "remove_parametr": 1738, "basepruningmethod": 1739, "customfrommask": 1740, "l1unstructur": 1742, "lnstructur": 1743, "pruningcontain": 1744, "randomstructur": 1745, "randomunstructur": 1746, "prune": [1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 2057], "custom_from_mask": 1747, "global_unstructur": 1748, "is_prun": 1750, "l1_unstructur": 1751, "ln_structur": 1752, "random_structur": 1753, "random_unstructur": 1754, "remove_spectral_norm": 1756, "remove_weight_norm": 1757, "packedsequ": 1758, "pack_padded_sequ": 1759, "pack_sequ": 1760, "pad_packed_sequ": 1761, "pad_sequ": 1762, "unpack_sequ": 1763, "unpad_sequ": 1764, "skip_init": 1765, "stateless": 1767, "vector_to_paramet": 1768, "no_grad": 1770, "ones": [1776, 2104], "ones_lik": 1777, "jitscalartyp": 1778, "graphinfo": 1779, "verificationopt": 1780, "asgd": 1781, "adadelta": 1782, "adagrad": 1783, "adam": 1784, "adamw": 1785, "adamax": 1786, "lbfg": 1787, "nadam": 1788, "add_param_group": 1789, "load_state_dict": 1790, "state_dict": [1791, 2109], "zero_grad": 1793, "radam": 1794, "rmsprop": 1795, "rprop": 1796, "sgd": 1797, "sparseadam": 1798, "chainedschedul": 1799, "constantlr": 1800, "cosineannealinglr": 1801, "cosineannealingwarmrestart": 1802, "cycliclr": 1803, "exponentiallr": 1804, "lambdalr": 1805, "linearlr": 1806, "multisteplr": 1807, "multiplicativelr": 1808, "onecyclelr": 1809, "polynomiallr": 1810, "reducelronplateau": 1811, "sequentiallr": 1812, "steplr": 1813, "pca_lowrank": 1817, "polar": 1821, "promote_typ": 1826, "quantize_per_channel": 1829, "quantize_per_tensor": 1830, "quantized_batch_norm": 1831, "quantized_max_pool1d": 1832, "quantized_max_pool2d": 1833, "sobolengin": 1834, "rand": 1836, "rand_lik": 1837, "randint": 1838, "randint_lik": 1839, "randn": 1840, "randn_lik": 1841, "randperm": 1842, "result_typ": 1853, "row_stack": 1857, "searchsort": 1863, "set_default_devic": 1867, "set_default_dtyp": 1868, "set_default_tensor_typ": 1869, "set_deterministic_debug_mod": 1870, "set_float32_matmul_precis": 1871, "set_flush_denorm": 1872, "set_num_interop_thread": 1873, "set_num_thread": 1874, "set_printopt": 1875, "set_warn_alwai": 1877, "signal": [1881, 1882, 1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 2080], "bartlett": 1881, "blackman": 1882, "cosin": 1883, "gaussian": 1885, "general_cosin": 1886, "general_ham": 1887, "ham": 1888, "hann": 1889, "kaiser": 1890, "nuttal": 1891, "as_sparse_gradcheck": 1902, "check_sparse_tensor_invari": 1903, "sampled_addmm": 1906, "spdiag": 1908, "sparse_bsc_tensor": 1910, "sparse_bsr_tensor": 1911, "sparse_compressed_tensor": 1912, "sparse_coo_tensor": 1913, "sparse_csc_tensor": 1914, "sparse_csr_tensor": 1915, "std_mean": 1923, "svd_lowrank": 1929, "sym_float": 1932, "sym_int": 1933, "sym_it": 1934, "sym_max": 1935, "sym_min": 1936, "sym_not": 1937, "tensordot": 1945, "trapezoid": 1950, "trapz": 1951, "tril_indic": 1954, "triu_indic": 1956, "unravel_index": 1963, "use_deterministic_algorithm": 1965, "generate_methods_for_privateuse1_backend": 1966, "get_cpp_backtrac": 1967, "rename_privateuse1_backend": 1968, "set_modul": 1969, "swap_tensor": 1970, "var_mean": 1973, "view_as_complex": 1975, "view_as_r": 1976, "vstack": 1979, "zero": 2010, "zeros_lik": 2011, "hub": 2012, "publish": 2012, "entrypoint": 2012, "run": [2012, 2110], "my": [2012, 2043, 2051, 2070, 2104, 2110], "download": 2012, "logic": [2012, 2016], "known": [2012, 2014], "bind": 2013, "tabl": [2013, 2062], "built": [2014, 2015, 2017, 2067], "comparison": [2014, 2016, 2017, 2091, 2098], "inspect": [2014, 2065, 2102], "warn": 2014, "appendix": [2014, 2017], "recurs": 2014, "constant": [2014, 2016], "fusion": 2014, "math": [2015, 2091], "construct": [2016, 2017, 2019, 2036, 2069, 2082], "refin": [2016, 2017], "enum": [2016, 2017], "tupl": [2016, 2017], "liter": [2016, 2017], "list": [2016, 2017, 2067], "dict": 2016, "arithmet": [2016, 2017], "subscript": [2016, 2017], "slice": [2016, 2017, 2060, 2091], "ternari": [2016, 2017], "cast": 2016, "statement": [2016, 2017], "assign": [2016, 2017], "If": 2016, "while": [2016, 2017], "loop": 2016, "For": 2016, "continu": [2016, 2017], "return": [2016, 2017, 2051, 2079], "resolut": [2016, 2017], "lookup": 2016, "defin": [2016, 2049, 2050], "terminologi": 2017, "instanc": 2017, "when": [2017, 2043, 2049, 2067, 2072, 2082, 2104], "signatur": 2017, "expr": 2017, "convers": [2017, 2036], "atom": 2017, "identifi": [2017, 2104, 2113], "parenthes": 2017, "form": 2017, "displai": 2017, "primari": 2017, "power": 2017, "unari": [2017, 2024, 2082], "bitwis": 2017, "binari": [2017, 2024], "shift": 2017, "boolean": 2017, "condit": 2017, "augment": 2017, "rais": 2017, "del": 2017, "compound": 2017, "els": 2017, "getattr": 2017, "hasattr": 2017, "zip": [2017, 2070], "enumer": 2017, "rule": [2017, 2035, 2050, 2065], "remot": [2017, 2079], "procedur": 2017, "program": 2017, "coverag": [2018, 2034, 2049], "properti": [2019, 2022], "correctli": 2019, "bound": 2019, "schema": 2019, "between": [2019, 2070, 2104], "low": 2021, "matrix": [2022, 2072], "decomposit": 2022, "solver": 2022, "misc": 2022, "motiv": [2024, 2100, 2103], "reduct": [2024, 2046, 2060, 2091], "idiom": 2025, "miscellan": 2026, "mobile_optim": 2027, "model_zoo": 2028, "module_track": 2029, "strategi": [2033, 2069], "descriptor": 2033, "file_descriptor": 2033, "file_system": 2033, "keep": [2034, 2070], "dimens": [2034, 2035], "unifi": 2034, "contract": 2034, "awai": 2034, "factori": 2034, "variant": 2034, "semant": [2035, 2044, 2046, 2055, 2062], "infer": [2035, 2043, 2045, 2053, 2062, 2095], "explicit": 2035, "align": 2035, "subsystem": 2035, "constructor": 2036, "convolut": [2037, 2040, 2060, 2061], "layer": 2037, "pool": [2037, 2040], "activ": [2037, 2040, 2115], "weight": [2037, 2069], "nonlinear": 2037, "recurr": [2037, 2051], "distanc": [2037, 2040], "loss": [2037, 2040, 2042], "vision": [2037, 2040], "shuffl": 2037, "lazi": 2037, "alias": 2037, "submodul": 2038, "typic": 2042, "unscal": 2042, "accumul": 2042, "penalti": 2042, "one": 2042, "per": [2042, 2069], "need": [2042, 2104], "particular": [2042, 2043], "dtype": [2042, 2062, 2075, 2085], "encod": 2043, "histori": [2043, 2115], "set": [2043, 2053, 2067], "No": 2043, "evalu": [2043, 2054, 2101], "multithread": 2043, "concurr": 2043, "determin": [2043, 2061], "retain": 2043, "thread": [2043, 2045, 2090], "safeti": 2043, "wirting": 2043, "calculu": 2043, "pictur": 2043, "conjug": 2043, "formula": 2043, "domain": 2043, "regist": [2043, 2099], "whether": [2043, 2070], "fire": 2043, "differ": [2043, 2067, 2104], "modifi": 2043, "compat": 2044, "runtim": [2045, 2051, 2113], "tensorfloat": [2046, 2055, 2060], "32": [2046, 2055, 2060, 2063], "tf32": [2046, 2055, 2060], "amper": [2046, 2060], "later": [2046, 2060, 2070], "reduc": [2046, 2060], "fp16": [2046, 2060], "gemm": [2046, 2060], "bf16": [2046, 2060], "bc": 2046, "pytorch_cuda_alloc_conf": 2046, "alloc": [2046, 2051, 2115], "cubla": 2046, "workspac": 2046, "cufft": 2046, "plan": [2046, 2055], "just": 2046, "time": [2046, 2095, 2107, 2111, 2113], "practic": [2046, 2059, 2072, 2097], "agnost": 2046, "buffer": [2046, 2052, 2059], "instead": 2046, "whole": [2046, 2104], "captur": 2046, "partial": 2046, "9": 2046, "6": 2046, "across": [2046, 2062], "land": 2047, "page": 2047, "intern": [2048, 2067, 2070, 2100, 2102], "processgroup": 2048, "ddpoptim": 2048, "setup_context": 2049, "like": [2049, 2070], "subclass": [2049, 2103], "wrapper": 2049, "__torch_function__": 2049, "overrid": [2049, 2114], "nativ": [2049, 2072, 2074], "anoth": 2050, "specifi": 2050, "gotcha": 2050, "staticmethod": 2050, "isn": 2051, "freed": 2051, "properli": 2051, "loader": 2051, "doesn": 2051, "fsdp": 2052, "prefetch": 2052, "nuanc": 2052, "payload": 2052, "intel": [2053, 2071], "hardwar": [2053, 2072], "prerequisit": 2053, "softwar": 2053, "fp32": 2053, "notat": 2054, "background": [2054, 2078, 2079, 2098], "inform": [2054, 2077, 2112], "analyt": 2054, "u": 2054, "reus": [2055, 2059], "hipfft": 2055, "rocfft": 2055, "larg": 2056, "fleet": 2056, "wide": 2056, "attach": 2056, "consider": 2056, "block": 2057, "neural": 2057, "tip": [2059, 2073], "fight": 2059, "deadlock": 2059, "through": 2059, "queue": 2059, "e": 2059, "g": 2059, "hogwild": 2059, "oversubscript": 2059, "accuraci": [2060, 2072, 2073, 2113], "extrem": 2060, "finit": 2060, "instinct": 2060, "mi200": 2060, "reproduc": 2061, "nondeterminist": 2061, "algorithm": [2061, 2069, 2078], "fill": 2061, "uniniti": 2061, "content": [2062, 2070], "preserv": 2062, "format": [2062, 2070], "them": [2062, 2070], "version": 2062, "integ": 2062, "divis": 2062, "alwai": [2062, 2101], "includ": [2063, 2070], "compon": 2063, "speed": [2063, 2104], "One": [2063, 2112], "instal": 2063, "cffi": 2063, "cpp": 2063, "found": 2063, "win": 2063, "channel": 2063, "without": 2063, "claus": 2063, "protect": 2063, "broken": 2063, "pipe": 2063, "driver": 2063, "shut": 2063, "down": 2063, "ipc": 2063, "base": [2064, 2065, 2067, 2069, 2112], "gui": 2065, "diagnos": [2065, 2113], "sarif": 2065, "diagnost": 2065, "alexnet": 2067, "v": 2067, "index": [2067, 2091], "aten": [2067, 2108, 2112], "inlin": 2067, "discov": 2067, "unconvert": 2067, "onc": 2067, "adjust": 2069, "learn": 2069, "rate": 2069, "averag": 2069, "swa": 2069, "ema": 2069, "care": 2069, "put": 2069, "togeth": 2069, "do": [2070, 2102, 2104], "see": [2070, 2104], "insid": [2070, 2104], "treat": 2070, "archiv": 2070, "file_structur": 2070, "given": 2070, "wa": 2070, "resourc": [2070, 2103], "distinguish": 2070, "explan": 2070, "analyz": 2070, "extern": 2070, "mock": 2070, "refactor": 2070, "sharp": 2070, "global": 2070, "isol": 2070, "each": [2070, 2103], "mangl": 2070, "instrument": 2071, "technologi": 2071, "eager": 2072, "awar": 2072, "mainten": 2072, "engin": 2072, "observ": [2072, 2075], "configur": [2072, 2074], "insensit": 2073, "int8": 2073, "sensit": 2073, "ao": [2075, 2092, 2093], "top": 2075, "quantize_fx": 2075, "qconfig_map": 2075, "backend_config": 2075, "custom_config": 2075, "pt2e": 2075, "0": [2075, 2101, 2109, 2110, 2113], "export_util": 2075, "relat": [2075, 2103], "fake_quant": 2075, "intrins": 2075, "qat": 2075, "scheme": 2075, "rpc": 2077, "tensorpip": 2077, "rref": [2077, 2079], "remotemodul": 2077, "record": 2078, "dure": 2078, "smart": 2078, "end": 2078, "protocol": 2079, "lifetim": 2079, "reason": 2079, "scenario": 2079, "owner": 2079, "argument": 2079, "sparsiti": 2082, "semi": 2082, "acceler": 2082, "coo": 2082, "hybrid": 2082, "uncoalesc": 2082, "csr": 2082, "csc": 2082, "bsr": 2082, "bsc": 2082, "memory_format": 2085, "tensorboard": 2087, "creation": 2091, "sampl": 2091, "quasi": 2091, "pointwis": 2091, "spectral": 2091, "bla": 2091, "lapack": 2091, "foreach": 2091, "path": 2091, "n": [2092, 2093], "_numeric_suit": 2092, "_numeric_suite_fx": 2093, "howto": 2094, "vendor": 2094, "aotinductor": 2095, "ahead": 2095, "Of": 2095, "ed": 2095, "x86": 2097, "tree": 2098, "integr": 2098, "callabl": 2098, "previou": 2098, "after": 2099, "aotautograd": 2099, "speedi": 2099, "abridg": 2100, "public": 2100, "guard": [2100, 2101, 2102], "overal": [2100, 2103], "architectur": [2100, 2103], "unback": 2100, "dynamo": [2101, 2102], "gentl": 2101, "pep": 2101, "523": 2101, "frame": 2101, "cpython": 2101, "sound": 2101, "duck": 2101, "complet": 2101, "conclus": 2101, "footnot": 2101, "artifact": 2102, "bit": 2103, "detail": 2103, "individu": [2103, 2107], "characterist": 2103, "interact": 2103, "you": 2104, "still": 2104, "crash": 2104, "slow": 2104, "recompil": [2104, 2113], "am": 2104, "speedup": 2104, "caus": [2104, 2113], "didn": 2104, "incorrect": 2104, "result": 2104, "oom": 2104, "besid": 2104, "via": 2104, "under": 2104, "some": 2104, "did": 2104, "fine": [2104, 2105], "grain": [2104, 2105], "_dynamo": [2104, 2105], "disallow_in_graph": [2104, 2105], "_dynamo_skip": 2104, "pretrain": 2106, "next": 2106, "torchinductor": [2107, 2110, 2113], "relev": 2107, "breakdown": 2107, "prim": 2108, "nnmodul": 2109, "__call__": 2109, "dashboard": 2110, "measur": 2110, "pr": 2110, "affect": 2110, "befor": 2110, "merg": 2110, "understand": [2111, 2115], "around": 2111, "region": 2111, "compiledfunct": 2111, "overhead": 2111, "x": 2112, "none": 2112, "partition": 2112, "matcher": 2112, "capabl": 2112, "troubleshoot": 2113, "titl": 2113, "minifi": 2113, "torch_compile_debug": 2113, "excess": 2113, "cold": 2113, "corrupt": 2113, "snapshot": 2115, "visual": 2115, "timelin": 2115, "processgroupnccl": 2117, "finfo": 2118, "iinfo": 2118}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx.ext.todo": 2, "sphinx.ext.viewcode": 1, "sphinx": 57}, "alltitles": {"torch.Tensor.histogram": [[303, "torch-tensor-histogram"]], "torch.Tensor.ger": [[290, "torch-tensor-ger"]], "torch.Tensor.flip": [[266, "torch-tensor-flip"]], "torch.Tensor.fmod": [[278, "torch-tensor-fmod"]], "torch.Tensor.flatten": [[265, "torch-tensor-flatten"]], "torch.Tensor.gt_": [[298, "torch-tensor-gt"]], "torch.Tensor.frac_": [[281, "torch-tensor-frac"]], "torch.Tensor.expm1": [[258, "torch-tensor-expm1"]], "torch.Tensor.greater_equal": [[295, "torch-tensor-greater-equal"]], "torch.Tensor.histc": [[302, "torch-tensor-histc"]], "torch.Tensor.get_device": [[291, "torch-tensor-get-device"]], "torch.Tensor.float_power_": [[271, "torch-tensor-float-power"]], "torch.Tensor.frexp": [[282, "torch-tensor-frexp"]], "torch.Tensor.floor_": [[273, "torch-tensor-floor"]], "torch.Tensor.hsplit": [[304, "torch-tensor-hsplit"]], "torch.Tensor.heaviside": [[301, "torch-tensor-heaviside"]], "torch.Tensor.floor_divide": [[274, "torch-tensor-floor-divide"]], "torch.Tensor.floor": [[272, "torch-tensor-floor"]], "torch.Tensor.ge": [[286, "torch-tensor-ge"]], "torch.Tensor.hypot": [[305, "torch-tensor-hypot"]], "torch.Tensor.half": [[299, "torch-tensor-half"]], "torch.Tensor.gather": [[283, "torch-tensor-gather"]], "torch.Tensor.fliplr": [[267, "torch-tensor-fliplr"]], "torch.Tensor.float": [[269, "torch-tensor-float"]], "torch.Tensor.fill_diagonal_": [[262, "torch-tensor-fill-diagonal"]], "torch.Tensor.expm1_": [[259, "torch-tensor-expm1"]], "torch.Tensor.float_power": [[270, "torch-tensor-float-power"]], "torch.Tensor.frac": [[280, "torch-tensor-frac"]], "torch.Tensor.fmod_": [[279, "torch-tensor-fmod"]], "torch.Tensor.greater_equal_": [[296, "torch-tensor-greater-equal"]], "torch.Tensor.expand": [[256, "torch-tensor-expand"]], "torch.Tensor.flipud": [[268, "torch-tensor-flipud"]], "torch.Tensor.geometric_": [[288, "torch-tensor-geometric"]], "torch.Tensor.exp_": [[255, "torch-tensor-exp"]], "torch.Tensor.expand_as": [[257, "torch-tensor-expand-as"]], "torch.Tensor.fix_": [[264, "torch-tensor-fix"]], "torch.Tensor.fix": [[263, "torch-tensor-fix"]], "torch.Tensor.gcd": [[284, "torch-tensor-gcd"]], "torch.Tensor.fmax": [[276, "torch-tensor-fmax"]], "torch.Tensor.ge_": [[287, "torch-tensor-ge"]], "torch.Tensor.fmin": [[277, "torch-tensor-fmin"]], "torch.Tensor.gt": [[297, "torch-tensor-gt"]], "torch.Tensor.hardshrink": [[300, "torch-tensor-hardshrink"]], "torch.Tensor.greater_": [[294, "torch-tensor-greater"]], "torch.Tensor.grad": [[292, "torch-tensor-grad"]], "torch.Tensor.floor_divide_": [[275, "torch-tensor-floor-divide"]], "torch.Tensor.greater": [[293, "torch-tensor-greater"]], "torch.Tensor.fill_": [[261, "torch-tensor-fill"]], "torch.Tensor.geqrf": [[289, "torch-tensor-geqrf"]], "torch.Tensor.exponential_": [[260, "torch-tensor-exponential"]], "torch.Tensor.gcd_": [[285, "torch-tensor-gcd"]], "torch.Tensor.byte": [[174, "torch-tensor-byte"]], "torch.Tensor.bernoulli_": [[156, "torch-tensor-bernoulli"]], "torch.Tensor.cholesky_solve": [[185, "torch-tensor-cholesky-solve"]], "torch.Tensor.col_indices": [[193, "torch-tensor-col-indices"]], "torch.Tensor.bitwise_left_shift": [[161, "torch-tensor-bitwise-left-shift"]], "torch.Tensor.ceil_": [[179, "torch-tensor-ceil"]], "torch.Tensor.ceil": [[178, "torch-tensor-ceil"]], "torch.Tensor.bitwise_not_": [[164, "torch-tensor-bitwise-not"]], "torch.Tensor.cholesky_inverse": [[184, "torch-tensor-cholesky-inverse"]], "torch.Tensor.clip": [[189, "torch-tensor-clip"]], "torch.Tensor.cos": [[202, "torch-tensor-cos"]], "torch.Tensor.coalesce": [[192, "torch-tensor-coalesce"]], "torch.Tensor.bincount": [[158, "torch-tensor-bincount"]], "torch.Tensor.chunk": [[186, "torch-tensor-chunk"]], "torch.Tensor.bool": [[172, "torch-tensor-bool"]], "torch.Tensor.conj": [[194, "torch-tensor-conj"]], "torch.Tensor.contiguous": [[197, "torch-tensor-contiguous"]], "torch.Tensor.cdouble": [[177, "torch-tensor-cdouble"]], "torch.Tensor.baddbmm": [[153, "torch-tensor-baddbmm"]], "torch.Tensor.bitwise_left_shift_": [[162, "torch-tensor-bitwise-left-shift"]], "torch.Tensor.bitwise_or": [[165, "torch-tensor-bitwise-or"]], "torch.Tensor.bernoulli": [[155, "torch-tensor-bernoulli"]], "torch.Tensor.bitwise_right_shift": [[167, "torch-tensor-bitwise-right-shift"]], "torch.Tensor.cholesky": [[183, "torch-tensor-cholesky"]], "torch.Tensor.clip_": [[190, "torch-tensor-clip"]], "torch.Tensor.corrcoef": [[201, "torch-tensor-corrcoef"]], "torch.Tensor.clamp": [[187, "torch-tensor-clamp"]], "torch.Tensor.chalf": [[181, "torch-tensor-chalf"]], "torch.Tensor.copysign": [[199, "torch-tensor-copysign"]], "torch.Tensor.bitwise_and_": [[160, "torch-tensor-bitwise-and"]], "torch.Tensor.bfloat16": [[157, "torch-tensor-bfloat16"]], "torch.Tensor.cos_": [[203, "torch-tensor-cos"]], "torch.Tensor.clamp_": [[188, "torch-tensor-clamp"]], "torch.Tensor.conj_physical": [[195, "torch-tensor-conj-physical"]], "torch.Tensor.broadcast_to": [[173, "torch-tensor-broadcast-to"]], "torch.Tensor.copy_": [[198, "torch-tensor-copy"]], "torch.Tensor.bitwise_and": [[159, "torch-tensor-bitwise-and"]], "torch.Tensor.bitwise_xor": [[169, "torch-tensor-bitwise-xor"]], "torch.Tensor.cauchy_": [[175, "torch-tensor-cauchy"]], "torch.Tensor.bitwise_or_": [[166, "torch-tensor-bitwise-or"]], "torch.Tensor.char": [[182, "torch-tensor-char"]], "torch.Tensor.clone": [[191, "torch-tensor-clone"]], "torch.Tensor.baddbmm_": [[154, "torch-tensor-baddbmm"]], "torch.Tensor.conj_physical_": [[196, "torch-tensor-conj-physical"]], "torch.Tensor.copysign_": [[200, "torch-tensor-copysign"]], "torch.Tensor.bitwise_xor_": [[170, "torch-tensor-bitwise-xor"]], "torch.Tensor.bitwise_right_shift_": [[168, "torch-tensor-bitwise-right-shift"]], "torch.Tensor.bitwise_not": [[163, "torch-tensor-bitwise-not"]], "torch.Tensor.ccol_indices": [[176, "torch-tensor-ccol-indices"]], "torch.Tensor.cfloat": [[180, "torch-tensor-cfloat"]], "torch.Tensor.bmm": [[171, "torch-tensor-bmm"]], "torch.Tensor.atan2_": [[148, "torch-tensor-atan2"]], "torch.Tensor.addcmul_": [[106, "torch-tensor-addcmul"]], "torch.Tensor.angle": [[119, "torch-tensor-angle"]], "torch.Tensor.arctan": [[130, "torch-tensor-arctan"]], "torch.Tensor.arccosh": [[124, "torch-tensor-arccosh"]], "torch.Tensor.argmin": [[137, "torch-tensor-argmin"]], "torch.Tensor.argwhere": [[139, "torch-tensor-argwhere"]], "torch.Tensor.addmv_": [[110, "torch-tensor-addmv"]], "torch.Tensor.arcsin": [[126, "torch-tensor-arcsin"]], "torch.Tensor.addmv": [[109, "torch-tensor-addmv"]], "torch.Tensor.argmax": [[136, "torch-tensor-argmax"]], "torch.Tensor.atan": [[146, "torch-tensor-atan"]], "torch.Tensor.asinh": [[144, "torch-tensor-asinh"]], "torch.Tensor.addcdiv": [[103, "torch-tensor-addcdiv"]], "torch.Tensor.arcsinh_": [[129, "torch-tensor-arcsinh"]], "torch.Tensor.arctan2_": [[132, "torch-tensor-arctan2"]], "torch.Tensor.amin": [[117, "torch-tensor-amin"]], "torch.Tensor.argsort": [[138, "torch-tensor-argsort"]], "torch.Tensor.atanh_": [[151, "torch-tensor-atanh"]], "torch.Tensor.addr_": [[112, "torch-tensor-addr"]], "torch.Tensor.atan2": [[147, "torch-tensor-atan2"]], "torch.Tensor.addr": [[111, "torch-tensor-addr"]], "torch.Tensor.any": [[120, "torch-tensor-any"]], "torch.Tensor.backward": [[152, "torch-tensor-backward"]], "torch.Tensor.arctan2": [[131, "torch-tensor-arctan2"]], "torch.Tensor.arctan_": [[133, "torch-tensor-arctan"]], "torch.Tensor.arctanh_": [[135, "torch-tensor-arctanh"]], "torch.Tensor.all": [[114, "torch-tensor-all"]], "torch.Tensor.apply_": [[121, "torch-tensor-apply"]], "torch.Tensor.addmm": [[107, "torch-tensor-addmm"]], "torch.Tensor.arccosh_": [[125, "torch-tensor-arccosh"]], "torch.Tensor.amax": [[116, "torch-tensor-amax"]], "torch.Tensor.addcdiv_": [[104, "torch-tensor-addcdiv"]], "torch.Tensor.arccos_": [[123, "torch-tensor-arccos"]], "torch.Tensor.asin": [[142, "torch-tensor-asin"]], "torch.Tensor.atan_": [[149, "torch-tensor-atan"]], "torch.Tensor.arctanh": [[134, "torch-tensor-arctanh"]], "torch.Tensor.addmm_": [[108, "torch-tensor-addmm"]], "torch.Tensor.asinh_": [[145, "torch-tensor-asinh"]], "torch.Tensor.atanh": [[150, "torch-tensor-atanh"]], "torch.Tensor.aminmax": [[118, "torch-tensor-aminmax"]], "torch.Tensor.adjoint": [[113, "torch-tensor-adjoint"]], "torch.Tensor.arcsin_": [[127, "torch-tensor-arcsin"]], "torch.Tensor.as_subclass": [[141, "torch-tensor-as-subclass"]], "torch.Tensor.allclose": [[115, "torch-tensor-allclose"]], "torch.Tensor.arccos": [[122, "torch-tensor-arccos"]], "torch.Tensor.arcsinh": [[128, "torch-tensor-arcsinh"]], "torch.Tensor.asin_": [[143, "torch-tensor-asin"]], "torch.Tensor.as_strided": [[140, "torch-tensor-as-strided"]], "torch.Tensor.addcmul": [[105, "torch-tensor-addcmul"]], "torch.Tensor.addbmm_": [[102, "torch-tensor-addbmm"]], "torch.Tensor.cosh": [[204, "torch-tensor-cosh"]], "torch.Tensor.digamma": [[232, "torch-tensor-digamma"]], "torch.Tensor.div_": [[238, "torch-tensor-div"]], "torch.Tensor.diag": [[226, "torch-tensor-diag"]], "torch.Tensor.dequantize": [[221, "torch-tensor-dequantize"]], "torch.Tensor.deg2rad": [[219, "torch-tensor-deg2rad"]], "torch.Tensor.dim": [[234, "torch-tensor-dim"]], "torch.Tensor.detach": [[223, "torch-tensor-detach"]], "torch.Tensor.data_ptr": [[218, "torch-tensor-data-ptr"]], "torch.Tensor.dense_dim": [[220, "torch-tensor-dense-dim"]], "torch.Tensor.detach_": [[224, "torch-tensor-detach"]], "torch.Tensor.cpu": [[208, "torch-tensor-cpu"]], "torch.Tensor.cov": [[207, "torch-tensor-cov"]], "torch.Tensor.equal": [[247, "torch-tensor-equal"]], "torch.Tensor.exp": [[254, "torch-tensor-exp"]], "torch.Tensor.cosh_": [[205, "torch-tensor-cosh"]], "torch.Tensor.dist": [[236, "torch-tensor-dist"]], "torch.Tensor.crow_indices": [[210, "torch-tensor-crow-indices"]], "torch.Tensor.cummax": [[212, "torch-tensor-cummax"]], "torch.Tensor.device": [[225, "torch-tensor-device"]], "torch.Tensor.diff": [[231, "torch-tensor-diff"]], "torch.Tensor.cumprod_": [[215, "torch-tensor-cumprod"]], "torch.Tensor.divide": [[239, "torch-tensor-divide"]], "torch.Tensor.divide_": [[240, "torch-tensor-divide"]], "torch.Tensor.div": [[237, "torch-tensor-div"]], "torch.Tensor.diag_embed": [[227, "torch-tensor-diag-embed"]], "torch.Tensor.diagonal": [[229, "torch-tensor-diagonal"]], "torch.Tensor.eq": [[245, "torch-tensor-eq"]], "torch.Tensor.eq_": [[246, "torch-tensor-eq"]], "torch.Tensor.erfinv_": [[253, "torch-tensor-erfinv"]], "torch.Tensor.digamma_": [[233, "torch-tensor-digamma"]], "torch.Tensor.element_size": [[244, "torch-tensor-element-size"]], "torch.Tensor.count_nonzero": [[206, "torch-tensor-count-nonzero"]], "torch.Tensor.double": [[242, "torch-tensor-double"]], "torch.Tensor.erfc": [[250, "torch-tensor-erfc"]], "torch.Tensor.cumprod": [[214, "torch-tensor-cumprod"]], "torch.Tensor.erf": [[248, "torch-tensor-erf"]], "torch.Tensor.dsplit": [[243, "torch-tensor-dsplit"]], "torch.Tensor.erfc_": [[251, "torch-tensor-erfc"]], "torch.Tensor.cummin": [[213, "torch-tensor-cummin"]], "torch.Tensor.cumsum_": [[217, "torch-tensor-cumsum"]], "torch.Tensor.det": [[222, "torch-tensor-det"]], "torch.Tensor.dim_order": [[235, "torch-tensor-dim-order"]], "torch.Tensor.diagonal_scatter": [[230, "torch-tensor-diagonal-scatter"]], "torch.Tensor.erf_": [[249, "torch-tensor-erf"]], "torch.Tensor.cross": [[209, "torch-tensor-cross"]], "torch.Tensor.cumsum": [[216, "torch-tensor-cumsum"]], "torch.Tensor.diagflat": [[228, "torch-tensor-diagflat"]], "torch.Tensor.erfinv": [[252, "torch-tensor-erfinv"]], "torch.Tensor.dot": [[241, "torch-tensor-dot"]], "torch.Tensor.cuda": [[211, "torch-tensor-cuda"]], "torch.Tensor.igamma": [[309, "torch-tensor-igamma"]], "torch.Tensor.is_quantized": [[340, "torch-tensor-is-quantized"]], "torch.Tensor.is_inference": [[336, "torch-tensor-is-inference"]], "torch.Tensor.int_repr": [[328, "torch-tensor-int-repr"]], "torch.Tensor.is_sparse_csr": [[345, "torch-tensor-is-sparse-csr"]], "torch.Tensor.indices": [[325, "torch-tensor-indices"]], "torch.Tensor.is_sparse": [[344, "torch-tensor-is-sparse"]], "torch.Tensor.kthvalue": [[356, "torch-tensor-kthvalue"]], "torch.Tensor.index_reduce": [[322, "torch-tensor-index-reduce"]], "torch.Tensor.itemsize": [[355, "torch-tensor-itemsize"]], "torch.Tensor.is_pinned": [[339, "torch-tensor-is-pinned"]], "torch.Tensor.index_put_": [[321, "torch-tensor-index-put"]], "torch.Tensor.is_meta": [[338, "torch-tensor-is-meta"]], "torch.Tensor.isposinf": [[351, "torch-tensor-isposinf"]], "torch.Tensor.igammac_": [[312, "torch-tensor-igammac"]], "torch.Tensor.index_select": [[324, "torch-tensor-index-select"]], "torch.Tensor.index_add_": [[315, "torch-tensor-index-add"]], "torch.Tensor.index_copy_": [[317, "torch-tensor-index-copy"]], "torch.Tensor.inner": [[326, "torch-tensor-inner"]], "torch.Tensor.int": [[327, "torch-tensor-int"]], "torch.Tensor.is_conj": [[332, "torch-tensor-is-conj"]], "torch.Tensor.istft": [[353, "torch-tensor-istft"]], "torch.Tensor.i0": [[307, "torch-tensor-i0"]], "torch.Tensor.isreal": [[352, "torch-tensor-isreal"]], "torch.Tensor.index_add": [[314, "torch-tensor-index-add"]], "torch.Tensor.hypot_": [[306, "torch-tensor-hypot"]], "torch.Tensor.is_shared": [[342, "torch-tensor-is-shared"]], "torch.Tensor.igamma_": [[310, "torch-tensor-igamma"]], "torch.Tensor.is_floating_point": [[335, "torch-tensor-is-floating-point"]], "torch.Tensor.isinf": [[348, "torch-tensor-isinf"]], "torch.Tensor.index_copy": [[316, "torch-tensor-index-copy"]], "torch.Tensor.i0_": [[308, "torch-tensor-i0"]], "torch.Tensor.imag": [[313, "torch-tensor-imag"]], "torch.Tensor.isclose": [[346, "torch-tensor-isclose"]], "torch.Tensor.is_leaf": [[337, "torch-tensor-is-leaf"]], "torch.Tensor.isneginf": [[350, "torch-tensor-isneginf"]], "torch.Tensor.index_fill_": [[319, "torch-tensor-index-fill"]], "torch.Tensor.is_cuda": [[334, "torch-tensor-is-cuda"]], "torch.Tensor.isfinite": [[347, "torch-tensor-isfinite"]], "torch.Tensor.index_fill": [[318, "torch-tensor-index-fill"]], "torch.Tensor.is_signed": [[343, "torch-tensor-is-signed"]], "torch.Tensor.isnan": [[349, "torch-tensor-isnan"]], "torch.Tensor.item": [[354, "torch-tensor-item"]], "torch.Tensor.is_complex": [[331, "torch-tensor-is-complex"]], "torch.Tensor.index_put": [[320, "torch-tensor-index-put"]], "torch.Tensor.inverse": [[329, "torch-tensor-inverse"]], "torch.Tensor.index_reduce_": [[323, "torch-tensor-index-reduce"]], "torch.Tensor.is_set_to": [[341, "torch-tensor-is-set-to"]], "torch.Tensor.is_coalesced": [[330, "torch-tensor-is-coalesced"]], "torch.Tensor.igammac": [[311, "torch-tensor-igammac"]], "torch.Tensor.is_contiguous": [[333, "torch-tensor-is-contiguous"]], "torch.Tensor.less": [[365, "torch-tensor-less"]], "torch.Tensor.logcumsumexp": [[382, "torch-tensor-logcumsumexp"]], "torch.Tensor.ldexp_": [[360, "torch-tensor-ldexp"]], "torch.Tensor.lt": [[396, "torch-tensor-lt"]], "torch.Tensor.lu": [[398, "torch-tensor-lu"]], "torch.Tensor.lcm_": [[358, "torch-tensor-lcm"]], "torch.Tensor.less_equal_": [[368, "torch-tensor-less-equal"]], "torch.Tensor.ldexp": [[359, "torch-tensor-ldexp"]], "torch.Tensor.log1p_": [[375, "torch-tensor-log1p"]], "torch.Tensor.masked_fill": [[401, "torch-tensor-masked-fill"]], "torch.Tensor.lerp": [[363, "torch-tensor-lerp"]], "torch.Tensor.log": [[371, "torch-tensor-log"]], "torch.Tensor.logical_xor": [[390, "torch-tensor-logical-xor"]], "torch.Tensor.map_": [[400, "torch-tensor-map"]], "torch.Tensor.long": [[395, "torch-tensor-long"]], "torch.Tensor.logsumexp": [[394, "torch-tensor-logsumexp"]], "torch.Tensor.log_": [[378, "torch-tensor-log"]], "torch.Tensor.matmul": [[406, "torch-tensor-matmul"]], "torch.Tensor.log2": [[376, "torch-tensor-log2"]], "torch.Tensor.logical_and": [[384, "torch-tensor-logical-and"]], "torch.Tensor.le": [[361, "torch-tensor-le"]], "torch.Tensor.masked_fill_": [[402, "torch-tensor-masked-fill"]], "torch.Tensor.logical_not": [[386, "torch-tensor-logical-not"]], "torch.Tensor.logical_not_": [[387, "torch-tensor-logical-not"]], "torch.Tensor.logit_": [[393, "torch-tensor-logit"]], "torch.Tensor.logaddexp2": [[381, "torch-tensor-logaddexp2"]], "torch.Tensor.lerp_": [[364, "torch-tensor-lerp"]], "torch.Tensor.logaddexp": [[380, "torch-tensor-logaddexp"]], "torch.Tensor.logical_or": [[388, "torch-tensor-logical-or"]], "torch.Tensor.masked_select": [[405, "torch-tensor-masked-select"]], "torch.Tensor.less_equal": [[367, "torch-tensor-less-equal"]], "torch.Tensor.log10": [[372, "torch-tensor-log10"]], "torch.Tensor.log10_": [[373, "torch-tensor-log10"]], "torch.Tensor.masked_scatter": [[403, "torch-tensor-masked-scatter"]], "torch.Tensor.lu_solve": [[399, "torch-tensor-lu-solve"]], "torch.Tensor.lgamma_": [[370, "torch-tensor-lgamma"]], "torch.Tensor.logdet": [[383, "torch-tensor-logdet"]], "torch.Tensor.lgamma": [[369, "torch-tensor-lgamma"]], "torch.Tensor.log_normal_": [[379, "torch-tensor-log-normal"]], "torch.Tensor.matrix_exp": [[407, "torch-tensor-matrix-exp"]], "torch.Tensor.less_": [[366, "torch-tensor-less"]], "torch.Tensor.logit": [[392, "torch-tensor-logit"]], "torch.Tensor.lt_": [[397, "torch-tensor-lt"]], "torch.Tensor.logical_and_": [[385, "torch-tensor-logical-and"]], "torch.Tensor.logical_or_": [[389, "torch-tensor-logical-or"]], "torch.Tensor.logical_xor_": [[391, "torch-tensor-logical-xor"]], "torch.Tensor.le_": [[362, "torch-tensor-le"]], "torch.Tensor.log2_": [[377, "torch-tensor-log2"]], "torch.Tensor.log1p": [[374, "torch-tensor-log1p"]], "torch.Tensor.lcm": [[357, "torch-tensor-lcm"]], "torch.Tensor.masked_scatter_": [[404, "torch-tensor-masked-scatter"]], "torch.xpu.current_device": [[1985, "torch-xpu-current-device"]], "torch.trapezoid": [[1950, "torch-trapezoid"]], "torch.unflatten": [[1960, "torch-unflatten"]], "torch.var": [[1972, "torch-var"]], "torch.unsqueeze": [[1964, "torch-unsqueeze"]], "torch.xlogy": [[1981, "torch-xlogy"]], "torch.utils.set_module": [[1969, "torch-utils-set-module"]], "torch.where": [[1980, "torch-where"]], "torch.unbind": [[1959, "torch-unbind"]], "torch.utils.rename_privateuse1_backend": [[1968, "torch-utils-rename-privateuse1-backend"]], "torch.tensor": [[1943, "torch-tensor"]], "Stream": [[1983, "stream"], [1399, "stream"], [999, "stream"], [1014, "stream"]], "torch.unique": [[1961, "torch-unique"]], "torch.utils.swap_tensors": [[1970, "torch-utils-swap-tensors"]], "torch.tan": [[1941, "torch-tan"]], "torch.view_as_real": [[1976, "torch-view-as-real"]], "torch.vstack": [[1979, "torch-vstack"]], "torch.tril": [[1953, "torch-tril"]], "torch.triu": [[1955, "torch-triu"]], "torch.vmap": [[1977, "torch-vmap"]], "torch.xpu.current_stream": [[1986, "torch-xpu-current-stream"]], "torch.tile": [[1946, "torch-tile"]], "torch.tensor_split": [[1944, "torch-tensor-split"]], "torch.vdot": [[1974, "torch-vdot"]], "torch.t": [[1938, "torch-t"]], "torch.transpose": [[1949, "torch-transpose"]], "StreamContext": [[1984, "streamcontext"], [1400, "streamcontext"], [1000, "streamcontext"], [1015, "streamcontext"]], "torch.tril_indices": [[1954, "torch-tril-indices"]], "torch.vander": [[1971, "torch-vander"]], "torch.xpu.device_count": [[1988, "torch-xpu-device-count"]], "torch.take_along_dim": [[1940, "torch-take-along-dim"]], "Event": [[1982, "event"], [1398, "event"], [1386, "event"], [1011, "event"]], "torch.view_as_complex": [[1975, "torch-view-as-complex"]], "torch.vsplit": [[1978, "torch-vsplit"]], "torch.tanh": [[1942, "torch-tanh"]], "torch.triangular_solve": [[1952, "torch-triangular-solve"]], "torch.trapz": [[1951, "torch-trapz"]], "torch.utils.get_cpp_backtrace": [[1967, "torch-utils-get-cpp-backtrace"]], "torch.trunc": [[1958, "torch-trunc"]], "device": [[1987, "device"], [1404, "device"], [1030, "device"]], "torch.tensordot": [[1945, "torch-tensordot"]], "torch.unravel_index": [[1963, "torch-unravel-index"]], "torch.take": [[1939, "torch-take"]], "torch.unique_consecutive": [[1962, "torch-unique-consecutive"]], "torch.use_deterministic_algorithms": [[1965, "torch-use-deterministic-algorithms"]], "torch.utils.generate_methods_for_privateuse1_backend": [[1966, "torch-utils-generate-methods-for-privateuse1-backend"]], "torch.triu_indices": [[1956, "torch-triu-indices"]], "torch.trace": [[1948, "torch-trace"]], "torch.topk": [[1947, "torch-topk"]], "torch.true_divide": [[1957, "torch-true-divide"]], "torch.var_mean": [[1973, "torch-var-mean"]], "torch.reciprocal": [[1846, "torch-reciprocal"]], "torch.set_flush_denormal": [[1872, "torch-set-flush-denormal"]], "torch.randn_like": [[1841, "torch-randn-like"]], "torch.randperm": [[1842, "torch-randperm"]], "torch.renorm": [[1848, "torch-renorm"]], "torch.select": [[1865, "torch-select"]], "torch.set_num_interop_threads": [[1873, "torch-set-num-interop-threads"]], "torch.scatter": [[1860, "torch-scatter"]], "torch.sgn": [[1878, "torch-sgn"]], "torch.resolve_conj": [[1851, "torch-resolve-conj"]], "torch.signal.windows.general_cosine": [[1886, "torch-signal-windows-general-cosine"]], "torch.roll": [[1854, "torch-roll"]], "torch.set_default_tensor_type": [[1869, "torch-set-default-tensor-type"]], "torch.randint": [[1838, "torch-randint"]], "torch.set_warn_always": [[1877, "torch-set-warn-always"]], "torch.signal.windows.exponential": [[1884, "torch-signal-windows-exponential"]], "torch.reshape": [[1850, "torch-reshape"]], "torch.sigmoid": [[1879, "torch-sigmoid"]], "torch.range": [[1843, "torch-range"]], "torch.set_float32_matmul_precision": [[1871, "torch-set-float32-matmul-precision"]], "torch.set_num_threads": [[1874, "torch-set-num-threads"]], "torch.rand_like": [[1837, "torch-rand-like"]], "torch.set_rng_state": [[1876, "torch-set-rng-state"]], "torch.select_scatter": [[1866, "torch-select-scatter"]], "torch.rsqrt": [[1858, "torch-rsqrt"]], "torch.randint_like": [[1839, "torch-randint-like"]], "torch.scatter_reduce": [[1862, "torch-scatter-reduce"]], "torch.set_printoptions": [[1875, "torch-set-printoptions"]], "torch.set_deterministic_debug_mode": [[1870, "torch-set-deterministic-debug-mode"]], "torch.signal.windows.bartlett": [[1881, "torch-signal-windows-bartlett"]], "torch.save": [[1859, "torch-save"]], "torch.remainder": [[1847, "torch-remainder"]], "torch.rand": [[1836, "torch-rand"]], "torch.result_type": [[1853, "torch-result-type"]], "torch.randn": [[1840, "torch-randn"]], "torch.set_default_device": [[1867, "torch-set-default-device"]], "torch.signal.windows.cosine": [[1883, "torch-signal-windows-cosine"]], "torch.seed": [[1864, "torch-seed"]], "torch.real": [[1845, "torch-real"]], "torch.row_stack": [[1857, "torch-row-stack"]], "torch.round": [[1856, "torch-round"]], "torch.set_default_dtype": [[1868, "torch-set-default-dtype"]], "torch.signal.windows.blackman": [[1882, "torch-signal-windows-blackman"]], "torch.searchsorted": [[1863, "torch-searchsorted"]], "torch.signal.windows.gaussian": [[1885, "torch-signal-windows-gaussian"]], "torch.ravel": [[1844, "torch-ravel"]], "torch.sign": [[1880, "torch-sign"]], "torch.scatter_add": [[1861, "torch-scatter-add"]], "torch.rot90": [[1855, "torch-rot90"]], "torch.repeat_interleave": [[1849, "torch-repeat-interleave"]], "torch.resolve_neg": [[1852, "torch-resolve-neg"]], "torch.sparse.as_sparse_gradcheck": [[1902, "torch-sparse-as-sparse-gradcheck"]], "torch.sparse_bsc_tensor": [[1910, "torch-sparse-bsc-tensor"]], "torch.square": [[1918, "torch-square"]], "torch.sparse_csr_tensor": [[1915, "torch-sparse-csr-tensor"]], "torch.sparse_bsr_tensor": [[1911, "torch-sparse-bsr-tensor"]], "torch.softmax": [[1899, "torch-softmax"]], "torch.signal.windows.nuttall": [[1891, "torch-signal-windows-nuttall"]], "torch.sparse_compressed_tensor": [[1912, "torch-sparse-compressed-tensor"]], "torch.sqrt": [[1917, "torch-sqrt"]], "torch.sym_float": [[1932, "torch-sym-float"]], "torch.signal.windows.general_hamming": [[1887, "torch-signal-windows-general-hamming"]], "torch.std_mean": [[1923, "torch-std-mean"]], "torch.sparse.softmax": [[1907, "torch-sparse-softmax"]], "torch.swapdims": [[1931, "torch-swapdims"]], "torch.sym_not": [[1937, "torch-sym-not"]], "torch.svd_lowrank": [[1929, "torch-svd-lowrank"]], "torch.stft": [[1924, "torch-stft"]], "torch.smm": [[1898, "torch-smm"]], "torch.sym_min": [[1936, "torch-sym-min"]], "check_sparse_tensor_invariants": [[1903, "check-sparse-tensor-invariants"]], "torch.sinh": [[1895, "torch-sinh"]], "torch.stack": [[1921, "torch-stack"]], "torch.sparse.sum": [[1909, "torch-sparse-sum"]], "torch.slice_scatter": [[1896, "torch-slice-scatter"]], "torch.squeeze": [[1919, "torch-squeeze"]], "torch.sort": [[1900, "torch-sort"]], "torch.sin": [[1893, "torch-sin"]], "torch.sinc": [[1894, "torch-sinc"]], "torch.sparse.addmm": [[1901, "torch-sparse-addmm"]], "torch.signal.windows.hann": [[1889, "torch-signal-windows-hann"]], "torch.sspaddmm": [[1920, "torch-sspaddmm"]], "torch.swapaxes": [[1930, "torch-swapaxes"]], "torch.sparse.mm": [[1905, "torch-sparse-mm"]], "torch.svd": [[1928, "torch-svd"]], "torch.slogdet": [[1897, "torch-slogdet"]], "torch.signbit": [[1892, "torch-signbit"]], "torch.sparse_csc_tensor": [[1914, "torch-sparse-csc-tensor"]], "torch.signal.windows.kaiser": [[1890, "torch-signal-windows-kaiser"]], "torch.sparse.sampled_addmm": [[1906, "torch-sparse-sampled-addmm"]], "torch.sym_int": [[1933, "torch-sym-int"]], "torch.signal.windows.hamming": [[1888, "torch-signal-windows-hamming"]], "torch.sym_max": [[1935, "torch-sym-max"]], "torch.std": [[1922, "torch-std"]], "torch.sparse.log_softmax": [[1904, "torch-sparse-log-softmax"]], "torch.sparse_coo_tensor": [[1913, "torch-sparse-coo-tensor"]], "torch.sym_ite": [[1934, "torch-sym-ite"]], "torch.split": [[1916, "torch-split"]], "torch.subtract": [[1926, "torch-subtract"]], "torch.sub": [[1925, "torch-sub"]], "torch.sparse.spdiags": [[1908, "torch-sparse-spdiags"]], "torch.sum": [[1927, "torch-sum"]], "torch.nn.functional.scaled_dot_product_attention": [[1685, "torch-nn-functional-scaled-dot-product-attention"]], "torch.nn.utils.parametrizations.spectral_norm": [[1732, "torch-nn-utils-parametrizations-spectral-norm"]], "torch.nn.functional.softshrink": [[1694, "torch-nn-functional-softshrink"]], "torch.nn.functional.torch.nn.parallel.data_parallel": [[1700, "torch-nn-functional-torch-nn-parallel-data-parallel"]], "torch.nn.functional.upsample_nearest": [[1706, "torch-nn-functional-upsample-nearest"]], "torch.nn.functional.threshold": [[1698, "torch-nn-functional-threshold"]], "torch.nn.functional.tanhshrink": [[1697, "torch-nn-functional-tanhshrink"]], "torch.nn.functional.softsign": [[1695, "torch-nn-functional-softsign"]], "torch.nn.functional.threshold_": [[1699, "torch-nn-functional-threshold"]], "torch.nn.utils.fuse_linear_bn_eval": [[1728, "torch-nn-utils-fuse-linear-bn-eval"]], "torch.nn.functional.smooth_l1_loss": [[1689, "torch-nn-functional-smooth-l1-loss"]], "torch.nn.modules.module.register_module_parameter_registration_hook": [[1715, "torch-nn-modules-module-register-module-parameter-registration-hook"]], "torch.nn.functional.softplus": [[1693, "torch-nn-functional-softplus"]], "torch.nn.modules.module.register_module_backward_hook": [[1708, "torch-nn-modules-module-register-module-backward-hook"]], "Parameter": [[1718, "parameter"]], "torch.nn.utils.clip_grad_norm": [[1721, "torch-nn-utils-clip-grad-norm"]], "torch.nn.utils.parametrizations.weight_norm": [[1733, "torch-nn-utils-parametrizations-weight-norm"]], "torch.nn.functional.triplet_margin_loss": [[1701, "torch-nn-functional-triplet-margin-loss"]], "torch.nn.modules.module.register_module_forward_pre_hook": [[1711, "torch-nn-modules-module-register-module-forward-pre-hook"]], "torch.nn.functional.softmin": [[1692, "torch-nn-functional-softmin"]], "torch.nn.modules.module.register_module_full_backward_pre_hook": [[1713, "torch-nn-modules-module-register-module-full-backward-pre-hook"]], "torch.nn.functional.upsample_bilinear": [[1705, "torch-nn-functional-upsample-bilinear"]], "torch.nn.utils.convert_conv3d_weight_memory_format": [[1725, "torch-nn-utils-convert-conv3d-weight-memory-format"]], "torch.nn.functional.upsample": [[1704, "torch-nn-functional-upsample"]], "torch.nn.modules.module.register_module_buffer_registration_hook": [[1709, "torch-nn-modules-module-register-module-buffer-registration-hook"]], "torch.nn.functional.unfold": [[1703, "torch-nn-functional-unfold"]], "UninitializedParameter": [[1720, "uninitializedparameter"]], "torch.nn.modules.module.register_module_module_registration_hook": [[1714, "torch-nn-modules-module-register-module-module-registration-hook"]], "torch.nn.functional.softmax": [[1691, "torch-nn-functional-softmax"]], "torch.nn.functional.selu": [[1686, "torch-nn-functional-selu"]], "torch.nn.utils.convert_conv2d_weight_memory_format": [[1724, "torch-nn-utils-convert-conv2d-weight-memory-format"]], "torch.nn.functional.sigmoid": [[1687, "torch-nn-functional-sigmoid"]], "torch.nn.utils.fuse_linear_bn_weights": [[1729, "torch-nn-utils-fuse-linear-bn-weights"]], "torch.nn.utils.clip_grad_norm_": [[1722, "torch-nn-utils-clip-grad-norm"]], "torch.nn.functional.rrelu_": [[1684, "torch-nn-functional-rrelu"]], "torch.nn.functional.tanh": [[1696, "torch-nn-functional-tanh"]], "torch.nn.modules.module.register_module_forward_hook": [[1710, "torch-nn-modules-module-register-module-forward-hook"]], "torch.nn.utils.fuse_conv_bn_eval": [[1726, "torch-nn-utils-fuse-conv-bn-eval"]], "LazyModuleMixin": [[1707, "lazymodulemixin"]], "torch.nn.functional.triplet_margin_with_distance_loss": [[1702, "torch-nn-functional-triplet-margin-with-distance-loss"]], "UninitializedBuffer": [[1719, "uninitializedbuffer"]], "torch.nn.functional.rrelu": [[1683, "torch-nn-functional-rrelu"]], "torch.nn.utils.fuse_conv_bn_weights": [[1727, "torch-nn-utils-fuse-conv-bn-weights"]], "torch.nn.functional.soft_margin_loss": [[1690, "torch-nn-functional-soft-margin-loss"]], "torch.nn.utils.clip_grad_value_": [[1723, "torch-nn-utils-clip-grad-value"]], "torch.nn.utils.parametrizations.orthogonal": [[1731, "torch-nn-utils-parametrizations-orthogonal"]], "torch.nn.functional.silu": [[1688, "torch-nn-functional-silu"]], "RMSNorm": [[1716, "rmsnorm"], [1542, "rmsnorm"]], "torch.nn.utils.parameters_to_vector": [[1730, "torch-nn-utils-parameters-to-vector"]], "DistributedDataParallel": [[1717, "distributeddataparallel"], [2048, "distributeddataparallel"]], "torch.nn.modules.module.register_module_full_backward_hook": [[1712, "torch-nn-modules-module-register-module-full-backward-hook"]], "FXE0015:fx-node-insert-type-promotion": [[88, "fxe0015-fx-node-insert-type-promotion"]], "torch.fx.experimental": [[65, "torch-fx-experimental"]], "torch.fx.experimental.symbolic_shapes": [[65, "module-torch.fx.experimental.symbolic_shapes"]], "Train script": [[51, "train-script"]], "python.assert": [[67, "python-assert"]], "dynamic_shape_assert": [[67, "dynamic-shape-assert"], [66, "dynamic-shape-assert"]], "list_contains": [[67, "list-contains"], [66, "list-contains"], [72, "list-contains"], [75, "list-contains"]], "torch.fft": [[54, "torch-fft"]], "Fast Fourier Transforms": [[54, "fast-fourier-transforms"]], "Helper Functions": [[54, "helper-functions"]], "FXE0010:fx-pass": [[83, "fxe0010-fx-pass"]], "python.object-model": [[73, "python-object-model"]], "model_attr_mutation": [[73, "model-attr-mutation"], [66, "model-attr-mutation"]], "optional_input": [[73, "optional-input"], [66, "optional-input"]], "FXE0014:find-opschema-matched-symbolic-function": [[87, "fxe0014-find-opschema-matched-symbolic-function"]], "FXE0016:find-operator-overloads-in-onnx-registry": [[89, "fxe0016-find-operator-overloads-in-onnx-registry"]], "ExportDB": [[66, "exportdb"]], "Tags": [[66, null]], "Supported": [[66, "supported"]], "assume_constant_result": [[66, "assume-constant-result"], [77, "assume-constant-result"]], "autograd_function": [[66, "autograd-function"]], "class_method": [[66, "class-method"]], "cond_branch_class_method": [[66, "cond-branch-class-method"], [74, "cond-branch-class-method"], [75, "cond-branch-class-method"]], "cond_branch_nested_function": [[66, "cond-branch-nested-function"], [74, "cond-branch-nested-function"], [75, "cond-branch-nested-function"]], "cond_branch_nonlocal_variables": [[66, "cond-branch-nonlocal-variables"], [74, "cond-branch-nonlocal-variables"], [75, "cond-branch-nonlocal-variables"]], "cond_closed_over_variable": [[66, "cond-closed-over-variable"], [69, "cond-closed-over-variable"], [74, "cond-closed-over-variable"]], "cond_operands": [[66, "cond-operands"], [74, "cond-operands"], [75, "cond-operands"]], "cond_predicate": [[66, "cond-predicate"], [74, "cond-predicate"], [75, "cond-predicate"]], "constrain_as_size_example": [[66, "constrain-as-size-example"], [76, "constrain-as-size-example"], [77, "constrain-as-size-example"]], "constrain_as_value_example": [[66, "constrain-as-value-example"], [76, "constrain-as-value-example"], [77, "constrain-as-value-example"]], "decorator": [[66, "decorator"]], "dictionary": [[66, "dictionary"], [72, "dictionary"]], "dynamic_shape_constructor": [[66, "dynamic-shape-constructor"], [75, "dynamic-shape-constructor"]], "dynamic_shape_if_guard": [[66, "dynamic-shape-if-guard"], [75, "dynamic-shape-if-guard"], [71, "dynamic-shape-if-guard"]], "dynamic_shape_map": [[66, "dynamic-shape-map"], [78, "dynamic-shape-map"], [75, "dynamic-shape-map"]], "dynamic_shape_slicing": [[66, "dynamic-shape-slicing"], [75, "dynamic-shape-slicing"]], "dynamic_shape_view": [[66, "dynamic-shape-view"], [75, "dynamic-shape-view"]], "fn_with_kwargs": [[66, "fn-with-kwargs"], [72, "fn-with-kwargs"]], "list_unpack": [[66, "list-unpack"], [72, "list-unpack"], [71, "list-unpack"]], "nested_function": [[66, "nested-function"], [69, "nested-function"]], "null_context_manager": [[66, "null-context-manager"], [70, "null-context-manager"]], "pytree_flatten": [[66, "pytree-flatten"]], "scalar_output": [[66, "scalar-output"], [75, "scalar-output"]], "specialized_attribute": [[66, "specialized-attribute"]], "static_for_loop": [[66, "static-for-loop"], [71, "static-for-loop"]], "static_if": [[66, "static-if"], [71, "static-if"]], "tensor_setattr": [[66, "tensor-setattr"], [68, "tensor-setattr"]], "type_reflection_method": [[66, "type-reflection-method"], [68, "type-reflection-method"]], "user_input_mutation": [[66, "user-input-mutation"], [79, "user-input-mutation"]], "Not Supported Yet": [[66, "not-supported-yet"]], "dynamic_shape_round": [[66, "dynamic-shape-round"], [75, "dynamic-shape-round"], [68, "dynamic-shape-round"]], "torch_sym_min": [[66, "torch-sym-min"], [80, "torch-sym-min"]], "Generator": [[90, "generator"]], "torch.func API Reference": [[57, "module-torch.func"]], "Function Transforms": [[57, "function-transforms"]], "Utilities for working with torch.nn.Modules": [[57, "utilities-for-working-with-torch-nn-modules"]], "FXE0013:op-level-debugging": [[86, "fxe0013-op-level-debugging"]], "torch.operator": [[80, "torch-operator"]], "torch.Tensor.abs": [[91, "torch-tensor-abs"]], "torch.Tensor.absolute": [[93, "torch-tensor-absolute"]], "torch.func": [[56, "torch-func"]], "What are composable function transforms?": [[56, "what-are-composable-function-transforms"]], "Why composable function transforms?": [[56, "why-composable-function-transforms"], [61, "why-composable-function-transforms"]], "Read More": [[56, "read-more"], [52, "read-more"], [2094, "read-more"]], "torch.Tensor.addbmm": [[101, "torch-tensor-addbmm"]], "torch.dynamic-value": [[76, "torch-dynamic-value"]], "torch.map": [[78, "torch-map"]], "torch.Tensor.acosh": [[97, "torch-tensor-acosh"]], "FullyShardedDataParallel": [[55, "module-torch.distributed.fsdp"]], "torch.Tensor.acos_": [[96, "torch-tensor-acos"]], "torch.escape-hatch": [[77, "torch-escape-hatch"]], "torch.Tensor.acos": [[95, "torch-tensor-acos"]], "torch.__future__": [[62, "module-torch.__future__"]], "Patching Batch Norm": [[58, "patching-batch-norm"]], "What\u2019s happening?": [[58, "what-s-happening"]], "How to fix": [[58, "how-to-fix"]], "Option 1: Change the BatchNorm": [[58, "option-1-change-the-batchnorm"]], "Option 2: torchvision parameter": [[58, "option-2-torchvision-parameter"]], "Option 3: functorch\u2019s patching": [[58, "option-3-functorch-s-patching"]], "Option 4: eval mode": [[58, "option-4-eval-mode"]], "FXE0008:fx-node-to-onnx": [[82, "fxe0008-fx-node-to-onnx"]], "python.data-structure": [[72, "python-data-structure"]], "torch.export IR Specification": [[53, "torch-export-ir-specification"]], "Assumptions": [[53, "assumptions"], [2079, "assumptions"]], "What is Export IR": [[53, "what-is-export-ir"]], "ExportedProgram": [[53, "exportedprogram"]], "Graph": [[53, "graph"]], "Node": [[53, "node"]], "call_function": [[53, "call-function"]], "Metadata": [[53, "metadata"]], "placeholder": [[53, "placeholder"]], "output": [[53, "output"]], "get_attr": [[53, "get-attr"]], "References": [[53, "references"], [2014, "references"]], "SymInt": [[53, "symint"]], "FakeTensor": [[53, "faketensor"]], "Pytree-able Types": [[53, "pytree-able-types"]], "torch.Tensor.absolute_": [[94, "torch-tensor-absolute"]], "torch.futures": [[63, "torch-futures"]], "torch.fx": [[64, "torch-fx"]], "Overview": [[64, "module-torch.fx"], [52, "overview"], [2099, "overview"], [18, "module-torch.cuda._sanitizer"], [19, "module-torch.cuda.tunable"], [2064, "overview"], [2065, "overview"], [2071, "module-torch.profiler"]], "Writing Transformations": [[64, "writing-transformations"]], "A Quick Primer on Graphs": [[64, "a-quick-primer-on-graphs"]], "Graph Manipulation": [[64, "graph-manipulation"]], "Direct Graph Manipulation": [[64, "direct-graph-manipulation"]], "Subgraph Rewriting With replace_pattern()": [[64, "subgraph-rewriting-with-replace-pattern"]], "Graph Manipulation Examples": [[64, "graph-manipulation-examples"]], "Proxy/Retracing": [[64, "proxy-retracing"]], "The Interpreter Pattern": [[64, "the-interpreter-pattern"]], "Examples of the Interpreter Pattern": [[64, "examples-of-the-interpreter-pattern"]], "Debugging": [[64, "debugging"], [2014, "debugging"]], "Introduction": [[64, "introduction"], [2024, "introduction"], [2036, "introduction"]], "Common Pitfalls in Transform Authoring": [[64, "common-pitfalls-in-transform-authoring"]], "Checking Correctness of Modules": [[64, "checking-correctness-of-modules"]], "Debugging the Generated Code": [[64, "debugging-the-generated-code"]], "Use pdb": [[64, "use-pdb"]], "Print the Generated Code": [[64, "print-the-generated-code"]], "Use the to_folder Function From GraphModule": [[64, "use-the-to-folder-function-from-graphmodule"]], "Debugging the Transformation": [[64, "debugging-the-transformation"]], "Available Debuggers": [[64, "available-debuggers"]], "Limitations of Symbolic Tracing": [[64, "limitations-of-symbolic-tracing"]], "Dynamic Control Flow": [[64, "dynamic-control-flow"]], "Static Control Flow": [[64, "static-control-flow"]], "Non-torch Functions": [[64, "non-torch-functions"]], "Customizing Tracing with the Tracer class": [[64, "customizing-tracing-with-the-tracer-class"]], "Leaf Modules": [[64, "leaf-modules"]], "Miscellanea": [[64, "miscellanea"]], "API Reference": [[64, "api-reference"], [52, "module-torch.export"], [2030, "module-torch.monitor"], [18, "api-reference"], [19, "api-reference"], [33, "module-torch.distributed.pipelining"], [12, "api-reference"], [2070, "api-reference"], [2065, "api-reference"], [2071, "api-reference"]], "FXE0012:unsupported-fx-node-analysis": [[85, "fxe0012-unsupported-fx-node-analysis"]], "torch.Tensor.add_": [[100, "torch-tensor-add"]], "torch.mutation": [[79, "torch-mutation"]], "FXE0007:fx-graph-to-onnx": [[81, "fxe0007-fx-graph-to-onnx"]], "Key Representations:": [[81, "key-representations"]], "Additional Notes:": [[81, "additional-notes"]], "torch.export": [[52, "torch-export"]], "Existing frameworks": [[52, "existing-frameworks"]], "Exporting a PyTorch Model": [[52, "exporting-a-pytorch-model"]], "An Example": [[52, "an-example"]], "Non-Strict Export": [[52, "non-strict-export"]], "Expressing Dynamism": [[52, "expressing-dynamism"]], "Serialization": [[52, "serialization"], [2091, "serialization"], [11, "serialization"]], "Specializations": [[52, "specializations"]], "Input Tensor Shapes": [[52, "input-tensor-shapes"]], "Python Primitives": [[52, "python-primitives"]], "Python Containers": [[52, "python-containers"]], "Limitations of torch.export": [[52, "limitations-of-torch-export"]], "Graph Breaks": [[52, "graph-breaks"], [2113, "graph-breaks"], [2104, "graph-breaks"]], "Data/Shape-Dependent Control Flow": [[52, "data-shape-dependent-control-flow"]], "Missing Fake/Meta/Abstract Kernels for Operators": [[52, "missing-fake-meta-abstract-kernels-for-operators"]], "Additional Links for Export Users": [[52, null]], "Deep Dive for PyTorch Developers": [[52, null], [2094, null]], "python.closure": [[69, "python-closure"]], "torch.cond": [[74, "torch-cond"], [990, "torch-cond"]], "torch.Tensor.abs_": [[92, "torch-tensor-abs"]], "torch.func Whirlwind Tour": [[61, "torch-func-whirlwind-tour"]], "What is torch.func?": [[61, "what-is-torch-func"]], "What are the transforms?": [[61, "what-are-the-transforms"]], "grad() (gradient computation)": [[61, "grad-gradient-computation"]], "vmap() (auto-vectorization)": [[61, "vmap-auto-vectorization"]], "vjp() (vector-Jacobian product)": [[61, "vjp-vector-jacobian-product"]], "jvp() (Jacobian-vector product)": [[61, "jvp-jacobian-vector-product"]], "jacrev(), jacfwd(), and hessian()": [[61, "jacrev-jacfwd-and-hessian"]], "Migrating from functorch to torch.func": [[59, "migrating-from-functorch-to-torch-func"]], "function transforms": [[59, "function-transforms"]], "NN module utilities": [[59, "nn-module-utilities"]], "functorch.make_functional": [[59, "functorch-make-functional"]], "functorch.combine_state_for_ensemble": [[59, "functorch-combine-state-for-ensemble"]], "functorch.compile": [[59, "functorch-compile"]], "python.context-manager": [[70, "python-context-manager"]], "torch.dynamic-shape": [[75, "torch-dynamic-shape"]], "torch.Tensor.add": [[99, "torch-tensor-add"]], "python.control-flow": [[71, "python-control-flow"]], "torch.Tensor.acosh_": [[98, "torch-tensor-acosh"]], "python.builtin": [[68, "python-builtin"]], "UX Limitations": [[60, "ux-limitations"]], "General limitations": [[60, "general-limitations"]], "torch.autograd APIs": [[60, "torch-autograd-apis"]], "vmap limitations": [[60, "vmap-limitations"]], "Mutation: Arbitrary mutation of Python data structures": [[60, "mutation-arbitrary-mutation-of-python-data-structures"]], "Mutation: in-place PyTorch Operations": [[60, "mutation-in-place-pytorch-operations"]], "Mutation: out= PyTorch Operations": [[60, "mutation-out-pytorch-operations"]], "Data-dependent Python control flow": [[60, "data-dependent-python-control-flow"]], "Data-dependent operations (.item())": [[60, "data-dependent-operations-item"]], "Dynamic shape operations (nonzero and friends)": [[60, "dynamic-shape-operations-nonzero-and-friends"]], "Randomness": [[60, "randomness"]], "FXE0011:no-symbolic-function-for-call-function": [[84, "fxe0011-no-symbolic-function-for-call-function"]], "GraphInfo": [[1779, "graphinfo"]], "CustomFromMask": [[1740, "customfrommask"]], "JitScalarType": [[1778, "jitscalartype"]], "torch.not_equal": [[1774, "torch-not-equal"]], "torch.nn.utils.rnn.pack_sequence": [[1760, "torch-nn-utils-rnn-pack-sequence"]], "torch.ones": [[1776, "torch-ones"]], "torch.nn.utils.parametrize.remove_parametrizations": [[1738, "torch-nn-utils-parametrize-remove-parametrizations"]], "Identity": [[1741, "identity"], [1488, "identity"]], "Adagrad": [[1783, "adagrad"]], "torch.nn.utils.parametrize.cached": [[1735, "torch-nn-utils-parametrize-cached"]], "BasePruningMethod": [[1739, "basepruningmethod"]], "torch.nn.utils.prune.is_pruned": [[1750, "torch-nn-utils-prune-is-pruned"]], "torch.nn.utils.prune.custom_from_mask": [[1747, "torch-nn-utils-prune-custom-from-mask"]], "no_grad": [[1770, "no-grad"]], "torch.nn.utils.spectral_norm": [[1766, "torch-nn-utils-spectral-norm"]], "Adadelta": [[1782, "adadelta"]], "torch.nn.utils.prune.ln_structured": [[1752, "torch-nn-utils-prune-ln-structured"]], "torch.nn.utils.prune.random_structured": [[1753, "torch-nn-utils-prune-random-structured"]], "torch.nn.utils.skip_init": [[1765, "torch-nn-utils-skip-init"]], "torch.nn.utils.rnn.unpack_sequence": [[1763, "torch-nn-utils-rnn-unpack-sequence"]], "L1Unstructured": [[1742, "l1unstructured"]], "PackedSequence": [[1758, "packedsequence"]], "torch.nn.utils.vector_to_parameters": [[1768, "torch-nn-utils-vector-to-parameters"]], "torch.nn.utils.weight_norm": [[1769, "torch-nn-utils-weight-norm"]], "torch.ones_like": [[1777, "torch-ones-like"]], "ParametrizationList": [[1734, "parametrizationlist"]], "RandomUnstructured": [[1746, "randomunstructured"]], "torch.nn.utils.prune.global_unstructured": [[1748, "torch-nn-utils-prune-global-unstructured"]], "torch.nn.utils.remove_spectral_norm": [[1756, "torch-nn-utils-remove-spectral-norm"]], "VerificationOptions": [[1780, "verificationoptions"]], "torch.nn.utils.rnn.pack_padded_sequence": [[1759, "torch-nn-utils-rnn-pack-padded-sequence"]], "torch.nn.utils.rnn.pad_packed_sequence": [[1761, "torch-nn-utils-rnn-pad-packed-sequence"]], "Adam": [[1784, "adam"]], "torch.nn.utils.rnn.pad_sequence": [[1762, "torch-nn-utils-rnn-pad-sequence"]], "torch.nn.utils.rnn.unpad_sequence": [[1764, "torch-nn-utils-rnn-unpad-sequence"]], "torch.nn.utils.parametrize.is_parametrized": [[1736, "torch-nn-utils-parametrize-is-parametrized"]], "torch.nn.utils.prune.random_unstructured": [[1754, "torch-nn-utils-prune-random-unstructured"]], "torch.nn.utils.prune.remove": [[1755, "torch-nn-utils-prune-remove"]], "torch.nn.utils.stateless.functional_call": [[1767, "torch-nn-utils-stateless-functional-call"]], "torch.numel": [[1775, "torch-numel"]], "torch.norm": [[1772, "torch-norm"]], "ASGD": [[1781, "asgd"]], "torch.normal": [[1773, "torch-normal"]], "LnStructured": [[1743, "lnstructured"]], "torch.nonzero": [[1771, "torch-nonzero"]], "RandomStructured": [[1745, "randomstructured"]], "PruningContainer": [[1744, "pruningcontainer"]], "torch.nn.utils.remove_weight_norm": [[1757, "torch-nn-utils-remove-weight-norm"]], "torch.nn.utils.prune.identity": [[1749, "torch-nn-utils-prune-identity"]], "torch.nn.utils.parametrize.register_parametrization": [[1737, "torch-nn-utils-parametrize-register-parametrization"]], "torch.nn.utils.prune.l1_unstructured": [[1751, "torch-nn-utils-prune-l1-unstructured"]], "ConstantLR": [[1800, "constantlr"]], "torch.optim.Optimizer.state_dict": [[1791, "torch-optim-optimizer-state-dict"]], "torch.polar": [[1821, "torch-polar"]], "Rprop": [[1796, "rprop"]], "torch.outer": [[1816, "torch-outer"]], "SequentialLR": [[1812, "sequentiallr"]], "torch.polygamma": [[1822, "torch-polygamma"]], "torch.pinverse": [[1819, "torch-pinverse"]], "torch.quantize_per_channel": [[1829, "torch-quantize-per-channel"]], "CosineAnnealingLR": [[1801, "cosineannealinglr"]], "RMSprop": [[1795, "rmsprop"]], "SobolEngine": [[1834, "sobolengine"]], "torch.optim.Optimizer.add_param_group": [[1789, "torch-optim-optimizer-add-param-group"]], "SparseAdam": [[1798, "sparseadam"]], "torch.quantile": [[1828, "torch-quantile"]], "torch.quantized_max_pool2d": [[1833, "torch-quantized-max-pool2d"]], "CosineAnnealingWarmRestarts": [[1802, "cosineannealingwarmrestarts"]], "MultiplicativeLR": [[1808, "multiplicativelr"]], "torch.promote_types": [[1826, "torch-promote-types"]], "RAdam": [[1794, "radam"]], "torch.quantized_max_pool1d": [[1832, "torch-quantized-max-pool1d"]], "torch.quantize_per_tensor": [[1830, "torch-quantize-per-tensor"]], "CyclicLR": [[1803, "cycliclr"]], "torch.pca_lowrank": [[1817, "torch-pca-lowrank"]], "torch.quantized_batch_norm": [[1831, "torch-quantized-batch-norm"]], "PolynomialLR": [[1810, "polynomiallr"]], "StepLR": [[1813, "steplr"]], "ReduceLROnPlateau": [[1811, "reducelronplateau"]], "NAdam": [[1788, "nadam"]], "torch.optim.Optimizer.step": [[1792, "torch-optim-optimizer-step"]], "ExponentialLR": [[1804, "exponentiallr"]], "torch.permute": [[1818, "torch-permute"]], "SGD": [[1797, "sgd"]], "torch.positive": [[1823, "torch-positive"]], "torch.qr": [[1827, "torch-qr"]], "torch.prod": [[1825, "torch-prod"]], "MultiStepLR": [[1807, "multisteplr"]], "torch.poisson": [[1820, "torch-poisson"]], "OneCycleLR": [[1809, "onecyclelr"]], "torch.rad2deg": [[1835, "torch-rad2deg"]], "torch.orgqr": [[1814, "torch-orgqr"]], "torch.ormqr": [[1815, "torch-ormqr"]], "torch.optim.Optimizer.load_state_dict": [[1790, "torch-optim-optimizer-load-state-dict"]], "torch.optim.Optimizer.zero_grad": [[1793, "torch-optim-optimizer-zero-grad"]], "LambdaLR": [[1805, "lambdalr"]], "LinearLR": [[1806, "linearlr"]], "LBFGS": [[1787, "lbfgs"]], "Adamax": [[1786, "adamax"]], "torch.pow": [[1824, "torch-pow"]], "AdamW": [[1785, "adamw"]], "ChainedScheduler": [[1799, "chainedscheduler"]], "torch.nn.functional.instance_norm": [[1643, "torch-nn-functional-instance-norm"]], "torch.nn.functional.pad": [[1672, "torch-nn-functional-pad"]], "torch.nn.functional.logsigmoid": [[1653, "torch-nn-functional-logsigmoid"]], "torch.nn.functional.grid_sample": [[1633, "torch-nn-functional-grid-sample"]], "torch.nn.functional.hardswish": [[1638, "torch-nn-functional-hardswish"]], "torch.nn.functional.normalize": [[1670, "torch-nn-functional-normalize"]], "torch.nn.functional.glu": [[1632, "torch-nn-functional-glu"]], "torch.nn.functional.hardsigmoid": [[1637, "torch-nn-functional-hardsigmoid"]], "torch.nn.functional.nll_loss": [[1669, "torch-nn-functional-nll-loss"]], "torch.nn.functional.max_unpool2d": [[1662, "torch-nn-functional-max-unpool2d"]], "torch.nn.functional.interpolate": [[1644, "torch-nn-functional-interpolate"]], "torch.nn.functional.pixel_shuffle": [[1675, "torch-nn-functional-pixel-shuffle"]], "torch.nn.functional.one_hot": [[1671, "torch-nn-functional-one-hot"]], "torch.nn.functional.lp_pool1d": [[1654, "torch-nn-functional-lp-pool1d"]], "torch.nn.functional.rms_norm": [[1682, "torch-nn-functional-rms-norm"]], "torch.nn.functional.hardshrink": [[1636, "torch-nn-functional-hardshrink"]], "torch.nn.functional.multilabel_margin_loss": [[1667, "torch-nn-functional-multilabel-margin-loss"]], "torch.nn.functional.lp_pool2d": [[1655, "torch-nn-functional-lp-pool2d"]], "torch.nn.functional.multilabel_soft_margin_loss": [[1668, "torch-nn-functional-multilabel-soft-margin-loss"]], "torch.nn.functional.group_norm": [[1634, "torch-nn-functional-group-norm"]], "torch.nn.functional.gumbel_softmax": [[1635, "torch-nn-functional-gumbel-softmax"]], "torch.nn.functional.pairwise_distance": [[1673, "torch-nn-functional-pairwise-distance"]], "torch.nn.functional.prelu": [[1678, "torch-nn-functional-prelu"]], "torch.nn.functional.huber_loss": [[1642, "torch-nn-functional-huber-loss"]], "torch.nn.functional.relu_": [[1681, "torch-nn-functional-relu"]], "torch.nn.functional.poisson_nll_loss": [[1677, "torch-nn-functional-poisson-nll-loss"]], "torch.nn.functional.hardtanh": [[1639, "torch-nn-functional-hardtanh"]], "torch.nn.functional.pixel_unshuffle": [[1676, "torch-nn-functional-pixel-unshuffle"]], "torch.nn.functional.l1_loss": [[1646, "torch-nn-functional-l1-loss"]], "torch.nn.functional.linear": [[1650, "torch-nn-functional-linear"]], "torch.nn.functional.mse_loss": [[1665, "torch-nn-functional-mse-loss"]], "torch.nn.functional.hardtanh_": [[1640, "torch-nn-functional-hardtanh"]], "torch.nn.functional.relu6": [[1680, "torch-nn-functional-relu6"]], "torch.nn.functional.hinge_embedding_loss": [[1641, "torch-nn-functional-hinge-embedding-loss"]], "torch.nn.functional.max_unpool1d": [[1661, "torch-nn-functional-max-unpool1d"]], "torch.nn.functional.margin_ranking_loss": [[1657, "torch-nn-functional-margin-ranking-loss"]], "torch.nn.functional.max_pool3d": [[1660, "torch-nn-functional-max-pool3d"]], "torch.nn.functional.lp_pool3d": [[1656, "torch-nn-functional-lp-pool3d"]], "torch.nn.functional.mish": [[1664, "torch-nn-functional-mish"]], "torch.nn.functional.max_unpool3d": [[1663, "torch-nn-functional-max-unpool3d"]], "torch.nn.functional.leaky_relu": [[1648, "torch-nn-functional-leaky-relu"]], "torch.nn.functional.max_pool1d": [[1658, "torch-nn-functional-max-pool1d"]], "torch.nn.functional.relu": [[1679, "torch-nn-functional-relu"]], "torch.nn.functional.max_pool2d": [[1659, "torch-nn-functional-max-pool2d"]], "torch.nn.functional.multi_margin_loss": [[1666, "torch-nn-functional-multi-margin-loss"]], "torch.nn.functional.pdist": [[1674, "torch-nn-functional-pdist"]], "torch.nn.functional.leaky_relu_": [[1649, "torch-nn-functional-leaky-relu"]], "torch.nn.functional.kl_div": [[1645, "torch-nn-functional-kl-div"]], "torch.nn.functional.log_softmax": [[1652, "torch-nn-functional-log-softmax"]], "torch.nn.functional.local_response_norm": [[1651, "torch-nn-functional-local-response-norm"]], "torch.nn.functional.layer_norm": [[1647, "torch-nn-functional-layer-norm"]], "Fake tensor": [[2103, "fake-tensor"]], "Motivation": [[2103, "motivation"], [2100, "motivation"], [2024, "motivation"]], "Related work": [[2103, "related-work"]], "Overall architecture": [[2103, "overall-architecture"], [2100, "overall-architecture"]], "API: the important bits": [[2103, "api-the-important-bits"]], "Details": [[2103, "details"]], "About the tensor subclass": [[2103, "about-the-tensor-subclass"]], "How is each individual operator implemented?": [[2103, "how-is-each-individual-operator-implemented"]], "How does the converter work?": [[2103, "how-does-the-converter-work"]], "Performance characteristics": [[2103, "performance-characteristics"]], "Fake tensor of fake tensor?": [[2103, "fake-tensor-of-fake-tensor"]], "Interaction with dynamic shapes": [[2103, "interaction-with-dynamic-shapes"]], "Other resources": [[2103, "other-resources"]], "torch": [[2091, "module-torch"]], "Tensors": [[2091, "tensors"]], "Creation Ops": [[2091, "creation-ops"]], "Indexing, Slicing, Joining, Mutating Ops": [[2091, "indexing-slicing-joining-mutating-ops"]], "Generators": [[2091, "generators"]], "Random sampling": [[2091, "random-sampling"]], "In-place random sampling": [[2091, "in-place-random-sampling"]], "Quasi-random sampling": [[2091, "quasi-random-sampling"]], "Parallelism": [[2091, "parallelism"]], "Locally disabling gradient computation": [[2091, "locally-disabling-gradient-computation"], [1, "locally-disabling-gradient-computation"], [2043, "locally-disabling-gradient-computation"]], "Math operations": [[2091, "math-operations"]], "Pointwise Ops": [[2091, "pointwise-ops"]], "Reduction Ops": [[2091, "reduction-ops"]], "Comparison Ops": [[2091, "comparison-ops"]], "Spectral Ops": [[2091, "spectral-ops"]], "Other Operations": [[2091, "other-operations"]], "BLAS and LAPACK Operations": [[2091, "blas-and-lapack-operations"]], "Foreach Operations": [[2091, "foreach-operations"]], "Utilities": [[2091, "utilities"], [2037, "module-torch.nn.utils"]], "Symbolic Numbers": [[2091, "symbolic-numbers"]], "Export Path": [[2091, "export-path"]], "Control Flow": [[2091, "control-flow"]], "Optimizations": [[2091, "optimizations"]], "Operator Tags": [[2091, "operator-tags"]], "AOTInductor: Ahead-Of-Time Compilation for Torch.Export-ed Models": [[2095, "aotinductor-ahead-of-time-compilation-for-torch-export-ed-models"]], "Model Compilation": [[2095, "model-compilation"]], "Inference in C++": [[2095, "inference-in-c"]], "Writing Graph Transformations on ATen IR": [[2112, "writing-graph-transformations-on-aten-ir"]], "Passes": [[2112, "passes"]], "Transformer": [[2112, "transformer"], [1571, "transformer"]], "One-to-One Pass": [[2112, "one-to-one-pass"]], "One-to-X Pass": [[2112, "one-to-x-pass"]], "One-to-None Pass": [[2112, "one-to-none-pass"]], "Utilizing Local Information": [[2112, "utilizing-local-information"]], "Subgraph Rewriter": [[2112, "subgraph-rewriter"]], "Pass Manager": [[2112, "pass-manager"]], "Partitioner": [[2112, "partitioner"]], "Subgraph Matcher": [[2112, "subgraph-matcher"]], "Capability Based Partitioner": [[2112, "capability-based-partitioner"]], "torch.utils": [[2119, "module-torch.utils"]], "Dynamo Deep-Dive": [[2101, "dynamo-deep-dive"]], "A Gentle Introduction to Dynamo": [[2101, "a-gentle-introduction-to-dynamo"]], "PEP 523: Adding a frame evaluation API to CPython": [[2101, "pep-523-adding-a-frame-evaluation-api-to-cpython"]], "Implementing CPython in Python": [[2101, "implementing-cpython-in-python"]], "Generating the Output Graph": [[2101, "generating-the-output-graph"]], "Making Dynamo Sound: Guards": [[2101, "making-dynamo-sound-guards"]], "Symbolic Shapes": [[2101, "symbolic-shapes"]], "Static by default": [[2101, "static-by-default"]], "0, 1 are always specialized": [[2101, "are-always-specialized"]], "Duck shaping": [[2101, "duck-shaping"]], "Guards on symbolic ints": [[2101, "guards-on-symbolic-ints"]], "Making Dynamo Complete: Graph Breaks": [[2101, "making-dynamo-complete-graph-breaks"]], "Conclusion": [[2101, "conclusion"]], "Footnotes": [[2101, "footnotes"]], "PyTorch 2.0 NNModule Support": [[2109, "pytorch-2-0-nnmodule-support"]], "NNModule Hooks Support": [[2109, "nnmodule-hooks-support"]], "nn.Module.__call__ Hooks Usage and limitations": [[2109, "nn-module-call-hooks-usage-and-limitations"]], "state_dict Hooks": [[2109, "state-dict-hooks"]], "Understanding CUDA Memory Usage": [[2115, "understanding-cuda-memory-usage"]], "Generating a Snapshot": [[2115, "generating-a-snapshot"]], "Using the visualizer": [[2115, "using-the-visualizer"]], "Active Memory Timeline": [[2115, "active-memory-timeline"]], "Allocator State History": [[2115, "allocator-state-history"]], "Snapshot API Reference": [[2115, "snapshot-api-reference"]], "IRs": [[2108, "irs"]], "Core Aten IR": [[2108, "core-aten-ir"]], "Prims IR": [[2108, "prims-ir"]], "Getting Started": [[2106, "getting-started"], [7, "getting-started"]], "Using a pretrained model": [[2106, "using-a-pretrained-model"]], "Next Steps": [[2106, "next-steps"]], "CUDAGraph Trees": [[2098, "cudagraph-trees"]], "CUDAGraph Background": [[2098, "cudagraph-background"]], "PyTorch CUDAGraph Integration": [[2098, "pytorch-cudagraph-integration"]], "Make Graphed Callables": [[2098, "make-graphed-callables"]], "TorchDynamo Previous CUDA Graphs Integration": [[2098, "torchdynamo-previous-cuda-graphs-integration"]], "CUDAGraph Trees Integration": [[2098, "cudagraph-trees-integration"]], "Limitations": [[2098, "limitations"], [2105, "limitations"], [2067, "limitations"]], "Comparisons": [[2098, "comparisons"], [2017, "comparisons"]], "Dynamic shapes": [[2100, "dynamic-shapes"]], "Abridged public API": [[2100, "abridged-public-api"]], "The Guard Model": [[2100, "the-guard-model"]], "Abridged internal API": [[2100, "abridged-internal-api"]], "DimDynamic policy": [[2100, "dimdynamic-policy"]], "Unbacked SymInts": [[2100, "unbacked-symints"]], "PYTORCH ProcessGroupNCCL Environment Variables": [[2117, "pytorch-processgroupnccl-environment-variables"]], "torch.ao.ns._numeric_suite_fx": [[2093, "torch-ao-ns-numeric-suite-fx"]], "torch.ao.ns.fx.utils": [[2093, "torch-ao-ns-fx-utils"]], "Torch Environment Variables": [[2116, "torch-environment-variables"]], "Best Practices for Backends": [[2097, "best-practices-for-backends"]], "x86 CPU": [[2097, "x86-cpu"]], "TorchInductor GPU Profiling": [[2107, "torchinductor-gpu-profiling"]], "Relevant Environment Variables": [[2107, "relevant-environment-variables"]], "Breakdown Model GPU Time": [[2107, "breakdown-model-gpu-time"]], "Benchmark Individual Triton Kernel": [[2107, "benchmark-individual-triton-kernel"]], "Profiling to understand torch.compile performance": [[2111, "profiling-to-understand-torch-compile-performance"]], "What to use torch.profiler for:": [[2111, "what-to-use-torch-profiler-for"]], "Basics of using torch.profiler and viewing traces": [[2111, "basics-of-using-torch-profiler-and-viewing-traces"]], "Working around CUDA Graph profiling issues": [[2111, "working-around-cuda-graph-profiling-issues"]], "Understanding compilation time": [[2111, "understanding-compilation-time"]], "Finding graph breaks: \u201cTorch-Compiled Region\u201d and \u201cCompiledFunction\u201d": [[2111, "finding-graph-breaks-torch-compiled-region-and-compiledfunction"]], "Operator Kernels": [[2111, "operator-kernels"]], "Launch overhead": [[2111, "launch-overhead"]], "PyTorch 2.0 Performance Dashboard": [[2110, "pytorch-2-0-performance-dashboard"]], "How to read the dashboard?": [[2110, "how-to-read-the-dashboard"]], "What is measured on the dashboard?": [[2110, "what-is-measured-on-the-dashboard"]], "Can I check if my PR affects TorchInductor\u2019s performance on the dashboard before merging?": [[2110, "can-i-check-if-my-pr-affects-torchinductor-s-performance-on-the-dashboard-before-merging"]], "How can I run any performance test locally?": [[2110, "how-can-i-run-any-performance-test-locally"]], "torch.compiler": [[2094, "torch-compiler"]], "Getting Started for PyTorch Users": [[2094, null]], "HowTo for PyTorch Backend Vendors": [[2094, null]], "torch.compiler API reference": [[2096, "torch-compiler-api-reference"]], "torch.ao.ns._numeric_suite": [[2092, "torch-ao-ns-numeric-suite"]], "Type Info": [[2118, "type-info"]], "torch.finfo": [[2118, "torch-finfo"]], "torch.iinfo": [[2118, "torch-iinfo"]], "torch.overrides": [[2114, "module-torch.overrides"]], "Functions": [[2114, "functions"], [2016, "functions"], [2014, "functions"], [2083, "functions"], [2067, "functions"]], "Custom Backends": [[2099, "custom-backends"]], "Registering Custom Backends": [[2099, "registering-custom-backends"]], "Custom Backends after AOTAutograd": [[2099, "custom-backends-after-aotautograd"]], "Examples": [[2099, "examples"], [42, "examples"], [12, "examples"], [2053, "examples"]], "Debugging Backend": [[2099, "debugging-backend"]], "Speedy Backend": [[2099, "speedy-backend"]], "Composable Backends": [[2099, "composable-backends"]], "torch.xpu": [[2120, "module-torch.xpu"]], "Random Number Generator": [[2120, "random-number-generator"], [17, "random-number-generator"]], "Streams and events": [[2120, "streams-and-events"], [2032, "streams-and-events"], [16, "streams-and-events"], [17, "streams-and-events"]], "PyTorch 2.0 Troubleshooting": [[2113, "pytorch-2-0-troubleshooting"]], "Title": [[2113, "id1"]], "Diagnosing Runtime Errors": [[2113, "diagnosing-runtime-errors"]], "Torchdynamo Errors": [[2113, "torchdynamo-errors"]], "Diagnosing TorchInductor Errors": [[2113, "diagnosing-torchinductor-errors"]], "Minifying TorchInductor Errors": [[2113, "minifying-torchinductor-errors"]], "Minifying Backend Compiler Errors": [[2113, "minifying-backend-compiler-errors"]], "Performance Profiling": [[2113, "performance-profiling"]], "Accessing TorchDynamo Profiler": [[2113, "accessing-torchdynamo-profiler"]], "TorchInductor Debugging using TORCH_COMPILE_DEBUG": [[2113, "torchinductor-debugging-using-torch-compile-debug"]], "Identifying the Cause of a Graph Break": [[2113, "identifying-the-cause-of-a-graph-break"]], "Excessive Recompilation": [[2113, "excessive-recompilation"]], "Accuracy Debugging": [[2113, "accuracy-debugging"]], "Extended Debugging": [[2113, "extended-debugging"]], "Cold Start Timing and Cache Corruption Debugging": [[2113, "cold-start-timing-and-cache-corruption-debugging"]], "Frequently Asked Questions": [[2104, "frequently-asked-questions"], [2014, "frequently-asked-questions"], [7, "frequently-asked-questions"], [2072, "frequently-asked-questions"], [2051, "frequently-asked-questions"], [2067, "frequently-asked-questions"]], "Does torch.compile support training?": [[2104, "does-torch-compile-support-training"]], "Do you support Distributed code?": [[2104, "do-you-support-distributed-code"]], "Do I still need to export whole graphs?": [[2104, "do-i-still-need-to-export-whole-graphs"]], "Why is my code crashing?": [[2104, "why-is-my-code-crashing"]], "Why is compilation slow?": [[2104, "why-is-compilation-slow"]], "Why are you recompiling in production?": [[2104, "why-are-you-recompiling-in-production"]], "How are you speeding up my code?": [[2104, "how-are-you-speeding-up-my-code"]], "Why am I not seeing speedups?": [[2104, "why-am-i-not-seeing-speedups"]], "Identifying the cause of a graph break": [[2104, "identifying-the-cause-of-a-graph-break"]], "Why didn\u2019t my code recompile when I changed it?": [[2104, "why-didnt-my-code-recompile-when-i-changed-it"]], "Why am I getting incorrect results?": [[2104, "why-am-i-getting-incorrect-results"]], "Why am I getting OOMs?": [[2104, "why-am-i-getting-ooms"]], "Does torch.func work with torch.compile (for grad and vmap transforms)?": [[2104, "does-torch-func-work-with-torch-compile-for-grad-and-vmap-transforms"]], "Calling torch.func transform inside of a function handled with torch.compile": [[2104, "calling-torch-func-transform-inside-of-a-function-handled-with-torch-compile"]], "Compiling torch.func.grad with torch.compile": [[2104, "compiling-torch-func-grad-with-torch-compile"]], "Compiling torch.vmap with torch.compile": [[2104, "compiling-torch-vmap-with-torch-compile"]], "Compiling functions besides the ones which are supported (escape hatch)": [[2104, "compiling-functions-besides-the-ones-which-are-supported-escape-hatch"]], "Does NumPy work with torch.compile?": [[2104, "does-numpy-work-with-torch-compile"]], "Which NumPy features does torch.compile support?": [[2104, "which-numpy-features-does-torch-compile-support"]], "Can I compile NumPy code using torch.compile?": [[2104, "can-i-compile-numpy-code-using-torch-compile"]], "Can I execute NumPy code on CUDA and compute gradients via torch.compile?": [[2104, "can-i-execute-numpy-code-on-cuda-and-compute-gradients-via-torch-compile"]], "How do I debug NumPy code under torch.compile?": [[2104, "how-do-i-debug-numpy-code-under-torch-compile"]], "I torch.compile some NumPy code and I did not see any speed-up.": [[2104, "i-torch-compile-some-numpy-code-and-i-did-not-see-any-speed-up"]], "Which API to use for fine grain tracing?": [[2104, "which-api-to-use-for-fine-grain-tracing"]], "How do I graph break on a function?": [[2104, "how-do-i-graph-break-on-a-function"]], "What\u2019s the difference between torch._dynamo.disable and torch._dynamo.disallow_in_graph": [[2104, "what-s-the-difference-between-torch-dynamo-disable-and-torch-dynamo-disallow-in-graph"]], "What\u2019s the difference between torch._dynamo.disable and torch._dynamo_skip": [[2104, "what-s-the-difference-between-torch-dynamo-disable-and-torch-dynamo-skip"]], "TorchDynamo APIs for fine-grained tracing": [[2105, "torchdynamo-apis-for-fine-grained-tracing"]], "TorchDynamo APIs to control fine-grained tracing": [[2105, "id1"]], "torch.compiler.disable": [[2105, "torch-compiler-disable"], [982, "torch-compiler-disable"]], "torch._dynamo.disallow_in_graph": [[2105, "torch-dynamo-disallow-in-graph"]], "torch.compiler.allow_in_graph": [[2105, "torch-compiler-allow-in-graph"], [978, "torch-compiler-allow-in-graph"]], "Dynamo Overview": [[2102, "dynamo-overview"]], "Dynamo Internals": [[2102, "dynamo-internals"]], "What is a guard?": [[2102, "what-is-a-guard"]], "What is Dynamo doing?": [[2102, "what-is-dynamo-doing"]], "How to inspect artifacts generated by Dynamo?": [[2102, "how-to-inspect-artifacts-generated-by-dynamo"]], "CausalVariant": [[1588, "causalvariant"]], "torch.nn.functional.alpha_dropout": [[1599, "torch-nn-functional-alpha-dropout"]], "torch.nn.functional.adaptive_max_pool2d": [[1596, "torch-nn-functional-adaptive-max-pool2d"]], "torch.nn.functional.elu_": [[1623, "torch-nn-functional-elu"]], "torch.nn.functional.cross_entropy": [[1616, "torch-nn-functional-cross-entropy"]], "torch.nn.functional.conv3d": [[1610, "torch-nn-functional-conv3d"]], "torch.nn.functional.fractional_max_pool3d": [[1629, "torch-nn-functional-fractional-max-pool3d"]], "UpsamplingNearest2d": [[1582, "upsamplingnearest2d"]], "torch.nn.functional.feature_alpha_dropout": [[1626, "torch-nn-functional-feature-alpha-dropout"]], "UpsamplingBilinear2d": [[1581, "upsamplingbilinear2d"]], "torch.nn.functional.batch_norm": [[1603, "torch-nn-functional-batch-norm"]], "torch.nn.functional.gelu": [[1631, "torch-nn-functional-gelu"]], "torch.nn.functional.conv1d": [[1608, "torch-nn-functional-conv1d"]], "torch.nn.functional.adaptive_avg_pool1d": [[1592, "torch-nn-functional-adaptive-avg-pool1d"]], "torch.nn.functional.binary_cross_entropy": [[1605, "torch-nn-functional-binary-cross-entropy"]], "torch.nn.functional.adaptive_max_pool1d": [[1595, "torch-nn-functional-adaptive-max-pool1d"]], "ZeroPad2d": [[1584, "zeropad2d"]], "torch.nn.functional.cosine_embedding_loss": [[1614, "torch-nn-functional-cosine-embedding-loss"]], "ZeroPad3d": [[1585, "zeropad3d"]], "torch.nn.functional.dropout2d": [[1620, "torch-nn-functional-dropout2d"]], "torch.nn.functional.conv_transpose1d": [[1611, "torch-nn-functional-conv-transpose1d"]], "torch.nn.functional.ctc_loss": [[1617, "torch-nn-functional-ctc-loss"]], "torch.nn.functional.elu": [[1622, "torch-nn-functional-elu"]], "torch.nn.functional.fold": [[1627, "torch-nn-functional-fold"]], "torch.nn.functional.adaptive_max_pool3d": [[1597, "torch-nn-functional-adaptive-max-pool3d"]], "torch.nn.functional.cosine_similarity": [[1615, "torch-nn-functional-cosine-similarity"]], "torch.nn.functional.affine_grid": [[1598, "torch-nn-functional-affine-grid"]], "torch.nn.functional.adaptive_avg_pool3d": [[1594, "torch-nn-functional-adaptive-avg-pool3d"]], "torch.nn.functional.bilinear": [[1604, "torch-nn-functional-bilinear"]], "torch.nn.functional.embedding": [[1624, "torch-nn-functional-embedding"]], "torch.nn.functional.dropout1d": [[1619, "torch-nn-functional-dropout1d"]], "torch.nn.functional.adaptive_avg_pool2d": [[1593, "torch-nn-functional-adaptive-avg-pool2d"]], "torch.nn.attention.bias.causal_lower_right": [[1589, "torch-nn-attention-bias-causal-lower-right"]], "torch.nn.functional.embedding_bag": [[1625, "torch-nn-functional-embedding-bag"]], "torch.nn.functional.conv_transpose2d": [[1612, "torch-nn-functional-conv-transpose2d"]], "torch.nn.functional.dropout": [[1618, "torch-nn-functional-dropout"]], "torch.nn.functional.conv_transpose3d": [[1613, "torch-nn-functional-conv-transpose3d"]], "ZeroPad1d": [[1583, "zeropad1d"]], "torch.nn.functional.gaussian_nll_loss": [[1630, "torch-nn-functional-gaussian-nll-loss"]], "SDPBackend": [[1586, "sdpbackend"]], "torch.nn.attention.bias.causal_upper_left": [[1590, "torch-nn-attention-bias-causal-upper-left"]], "torch.nn.functional.dropout3d": [[1621, "torch-nn-functional-dropout3d"]], "torch.nn.functional.fractional_max_pool2d": [[1628, "torch-nn-functional-fractional-max-pool2d"]], "torch.nn.functional.binary_cross_entropy_with_logits": [[1606, "torch-nn-functional-binary-cross-entropy-with-logits"]], "torch.nn.functional.celu": [[1607, "torch-nn-functional-celu"]], "torch.nn.functional.avg_pool2d": [[1601, "torch-nn-functional-avg-pool2d"]], "torch.nn.functional.conv2d": [[1609, "torch-nn-functional-conv2d"]], "torch.nn.functional.avg_pool3d": [[1602, "torch-nn-functional-avg-pool3d"]], "torch.nn.attention.bias.CausalBias": [[1587, "torch-nn-attention-bias-causalbias"]], "torch.nn.attention.sdpa_kernel": [[1591, "torch-nn-attention-sdpa-kernel"]], "torch.nn.functional.avg_pool1d": [[1600, "torch-nn-functional-avg-pool1d"]], "torch.mps": [[2031, "module-torch.mps"]], "MPS Profiler": [[2031, "mps-profiler"]], "MPS Event": [[2031, "mps-event"]], "TorchScript Language Reference": [[2017, "torchscript-language-reference"], [2016, "torchscript-language-reference"]], "Terminology": [[2017, "terminology"]], "Type System": [[2017, "id1"]], "TorchScript Types": [[2017, "torchscript-types"]], "Meta Types": [[2017, "meta-types"]], "Any Type": [[2017, "any-type"]], "Operators Supported for Any Type": [[2017, "operators-supported-for-any-type"]], "Design Notes": [[2017, "design-notes"], [2077, "design-notes"]], "Primitive Types": [[2017, "primitive-types"]], "Structural Types": [[2017, "structural-types"]], "Nominal Types": [[2017, "nominal-types"]], "Built-in Class": [[2017, "built-in-class"]], "Special Note on torch.nn.ModuleList and torch.nn.ModuleDict": [[2017, "special-note-on-torch-nn-modulelist-and-torch-nn-moduledict"]], "Custom Class": [[2017, "custom-class"]], "Enum Type": [[2017, "enum-type"]], "TorchScript Module Class": [[2017, "torchscript-module-class"]], "Module Instance Class": [[2017, "module-instance-class"]], "Type Annotation": [[2017, "type-annotation"]], "When to Annotate Types": [[2017, "when-to-annotate-types"]], "Annotate Function Signature": [[2017, "annotate-function-signature"]], "Annotate Variables and Data Attributes": [[2017, "annotate-variables-and-data-attributes"]], "Local Variables": [[2017, "local-variables"]], "Instance Data Attributes": [[2017, "instance-data-attributes"]], "Type Annotation APIs": [[2017, "type-annotation-apis"]], "torch.jit.annotate(T, expr)": [[2017, "torch-jit-annotate-t-expr"]], "Type Annotation Appendix": [[2017, "type-annotation-appendix"]], "TorchScript Type System Definition": [[2017, "torchscript-type-system-definition"]], "Unsupported Typing Constructs": [[2017, "unsupported-typing-constructs"], [2016, "unsupported-typing-constructs"]], "Expressions": [[2017, "expressions"], [2016, "expressions"]], "Arithmetic Conversions": [[2017, "arithmetic-conversions"]], "Atoms": [[2017, "atoms"]], "Identifiers": [[2017, "identifiers"]], "Literals": [[2017, "literals"], [2016, "literals"]], "Parenthesized Forms": [[2017, "parenthesized-forms"]], "List and Dictionary Displays": [[2017, "list-and-dictionary-displays"]], "Primaries": [[2017, "primaries"]], "Attribute References": [[2017, "attribute-references"]], "Subscriptions": [[2017, "subscriptions"]], "Slicings": [[2017, "slicings"]], "Calls": [[2017, "calls"]], "Power Operator": [[2017, "power-operator"]], "Unary and Arithmetic Bitwise Operations": [[2017, "unary-and-arithmetic-bitwise-operations"]], "Binary Arithmetic Operations": [[2017, "binary-arithmetic-operations"]], "Shifting Operations": [[2017, "shifting-operations"]], "Binary Bitwise Operations": [[2017, "binary-bitwise-operations"]], "Value Comparisons": [[2017, "value-comparisons"]], "Membership Test Operations": [[2017, "membership-test-operations"]], "Identity Comparisons": [[2017, "identity-comparisons"]], "Boolean Operations": [[2017, "boolean-operations"]], "Conditional Expressions": [[2017, "conditional-expressions"]], "Expression Lists": [[2017, "expression-lists"]], "Simple Statements": [[2017, "simple-statements"]], "Expression Statements": [[2017, "expression-statements"]], "Assignment Statements": [[2017, "assignment-statements"]], "Augmented Assignment Statements": [[2017, "augmented-assignment-statements"]], "Annotated Assignment Statements": [[2017, "annotated-assignment-statements"]], "The raise Statement": [[2017, "the-raise-statement"]], "The assert Statement": [[2017, "the-assert-statement"]], "The return Statement": [[2017, "the-return-statement"]], "The del Statement": [[2017, "the-del-statement"]], "The pass Statement": [[2017, "the-pass-statement"]], "The print Statement": [[2017, "the-print-statement"]], "The break Statement": [[2017, "the-break-statement"]], "The continue Statement:": [[2017, "the-continue-statement"]], "Compound Statements": [[2017, "compound-statements"]], "The if Statement": [[2017, "the-if-statement"]], "Basic if/else Statement": [[2017, "basic-if-else-statement"]], "Ternary if/else Statement": [[2017, "ternary-if-else-statement"]], "The while Statement": [[2017, "the-while-statement"]], "The for-in Statement": [[2017, "the-for-in-statement"]], "The with Statement": [[2017, "the-with-statement"]], "The tuple Statement": [[2017, "the-tuple-statement"]], "The getattr Statement": [[2017, "the-getattr-statement"]], "The hasattr Statement": [[2017, "the-hasattr-statement"]], "The zip Statement": [[2017, "the-zip-statement"]], "The enumerate Statement": [[2017, "the-enumerate-statement"]], "Python Values": [[2017, "python-values"]], "Resolution Rules": [[2017, "resolution-rules"]], "Python Built-in Functions Support": [[2017, "python-built-in-functions-support"]], "TorchScript Support for Python Built-in Functions": [[2017, "id5"]], "Python Built-in Values Support": [[2017, "python-built-in-values-support"]], "TorchScript Support for Python Built-in Values": [[2017, "id6"]], "torch.* APIs": [[2017, "torch-apis"]], "Remote Procedure Calls": [[2017, "remote-procedure-calls"]], "Asynchronous Execution": [[2017, "asynchronous-execution"]], "Type Annotations": [[2017, "type-annotations"]], "Meta Programming": [[2017, "meta-programming"]], "Type Refinement": [[2017, "type-refinement"]], "torch.linalg": [[2022, "torch-linalg"]], "Matrix Properties": [[2022, "matrix-properties"]], "Decompositions": [[2022, "decompositions"]], "Solvers": [[2022, "solvers"]], "Inverses": [[2022, "inverses"]], "Matrix Functions": [[2022, "matrix-functions"]], "Matrix Products": [[2022, "matrix-products"]], "Tensor Operations": [[2022, "tensor-operations"]], "Misc": [[2022, "misc"]], "Experimental Functions": [[2022, "experimental-functions"]], "JIT Utils - torch.utils.jit": [[2020, "module-torch.utils.jit"]], "torch.mtia": [[2032, "torch-mtia"]], "torch.hub": [[2012, "torch-hub"]], "Publishing models": [[2012, "publishing-models"]], "How to implement an entrypoint?": [[2012, "how-to-implement-an-entrypoint"]], "Important Notice": [[2012, "important-notice"]], "Loading models from Hub": [[2012, "loading-models-from-hub"]], "Running a loaded model:": [[2012, "running-a-loaded-model"]], "Where are my downloaded models saved?": [[2012, "where-are-my-downloaded-models-saved"]], "Caching logic": [[2012, "caching-logic"]], "Known limitations:": [[2012, "known-limitations"]], "torch.xpu.synchronize": [[2009, "torch-xpu-synchronize"]], "torch.xpu.get_device_name": [[1992, "torch-xpu-get-device-name"]], "torch.xpu.set_stream": [[2007, "torch-xpu-set-stream"]], "Types": [[2016, "supported-type"], [2067, "types"]], "Default Types": [[2016, "default-types"]], "Optional Type Refinement": [[2016, "optional-type-refinement"]], "TorchScript Classes": [[2016, "id2"], [2014, "torchscript-classes"]], "TorchScript Enums": [[2016, "id4"]], "Named Tuples": [[2016, "named-tuples"]], "Iterables": [[2016, "iterables"]], "List Construction": [[2016, "list-construction"]], "Tuple Construction": [[2016, "tuple-construction"]], "Dict Construction": [[2016, "dict-construction"]], "Variables": [[2016, "variables"], [2014, "variables"]], "Arithmetic Operators": [[2016, "arithmetic-operators"]], "Comparison Operators": [[2016, "comparison-operators"]], "Logical Operators": [[2016, "logical-operators"]], "Subscripts and Slicing": [[2016, "subscripts-and-slicing"]], "Function Calls": [[2016, "function-calls"]], "Method Calls": [[2016, "method-calls"]], "Ternary Expressions": [[2016, "ternary-expressions"]], "Casts": [[2016, "casts"]], "Accessing Module Parameters": [[2016, "accessing-module-parameters"]], "Statements": [[2016, "statements"]], "Simple Assignments": [[2016, "simple-assignments"]], "Pattern Matching Assignments": [[2016, "pattern-matching-assignments"]], "Print Statements": [[2016, "print-statements"]], "If Statements": [[2016, "if-statements"]], "While Loops": [[2016, "while-loops"]], "For loops with range": [[2016, "for-loops-with-range"]], "For loops over tuples": [[2016, "for-loops-over-tuples"]], "For loops over constant nn.ModuleList": [[2016, "for-loops-over-constant-nn-modulelist"]], "Break and Continue": [[2016, "break-and-continue"]], "Return": [[2016, "return"]], "Variable Resolution": [[2016, "variable-resolution"]], "Use of Python Values": [[2016, "use-of-python-values"]], "Attribute Lookup On Python Modules": [[2016, "attribute-lookup-on-python-modules"]], "Python-defined Constants": [[2016, "python-defined-constants"]], "Module Attributes": [[2016, "module-attributes"]], "TorchScript Builtins": [[2015, "torchscript-builtins"]], "Supported Tensor Methods": [[2015, "supported-tensor-methods"]], "Supported PyTorch Functions": [[2015, "supported-pytorch-functions"]], "TorchScript Builtin Functions": [[2015, "torchscript-builtin-functions"]], "Python Built-in Functions": [[2015, "python-built-in-functions"]], "math Module": [[2015, "math-module"]], "torch._logging": [[2023, "torch-logging"]], "torch.utils.mobile_optimizer": [[2027, "torch-utils-mobile-optimizer"]], "torch.monitor": [[2030, "torch-monitor"]], "torch.xpu.is_available": [[1998, "torch-xpu-is-available"]], "Named Tensors operator coverage": [[2034, "named-tensors-operator-coverage"]], "Supported Operations": [[2034, "id1"]], "Keeps input names": [[2034, "keeps-input-names"]], "Removes dimensions": [[2034, "removes-dimensions"]], "Unifies names from inputs": [[2034, "unifies-names-from-inputs"]], "Permutes dimensions": [[2034, "permutes-dimensions"]], "Contracts away dims": [[2034, "contracts-away-dims"]], "Factory functions": [[2034, "factory-functions"]], "out function and in-place variants": [[2034, "out-function-and-in-place-variants"]], "Named Tensors": [[2035, "named-tensors"]], "Creating named tensors": [[2035, "creating-named-tensors"]], "Named dimensions": [[2035, "named-dimensions"]], "Name propagation semantics": [[2035, "name-propagation-semantics"]], "match semantics": [[2035, "match-semantics"]], "Basic name inference rules": [[2035, "basic-name-inference-rules"]], "Explicit alignment by names": [[2035, "explicit-alignment-by-names"]], "Manipulating dimensions": [[2035, "manipulating-dimensions"]], "Autograd support": [[2035, "autograd-support"]], "Currently supported operations and subsystems": [[2035, "currently-supported-operations-and-subsystems"]], "Operators": [[2035, "operators"]], "Subsystems": [[2035, "subsystems"]], "Named tensor API reference": [[2035, "named-tensor-api-reference"]], "torch.xpu.empty_cache": [[1990, "torch-xpu-empty-cache"]], "torch.nn.attention.bias": [[2039, "module-torch.nn.attention.bias"]], "CausalBias": [[2039, "causalbias"]], "PyTorch documentation": [[2013, "pytorch-documentation"]], "Community": [[2013, null]], "Developer Notes": [[2013, null]], "Language Bindings": [[2013, null]], "Python API": [[2013, null], [2067, "module-torch.onnx"]], "Libraries": [[2013, null]], "Indices and tables": [[2013, "indices-and-tables"]], "torch.zeros_like": [[2011, "torch-zeros-like"]], "Meta device": [[2025, "meta-device"]], "Idioms for working with meta tensors": [[2025, "idioms-for-working-with-meta-tensors"]], "torch.xpu.get_rng_state_all": [[1995, "torch-xpu-get-rng-state-all"]], "torch.xpu.manual_seed_all": [[2001, "torch-xpu-manual-seed-all"]], "torch.xpu.set_rng_state_all": [[2006, "torch-xpu-set-rng-state-all"]], "torch.nn": [[2037, "module-torch.nn"], [2037, "id1"]], "Containers": [[2037, "containers"]], "Convolution Layers": [[2037, "convolution-layers"]], "Pooling layers": [[2037, "pooling-layers"]], "Padding Layers": [[2037, "padding-layers"]], "Non-linear Activations (weighted sum, nonlinearity)": [[2037, "non-linear-activations-weighted-sum-nonlinearity"]], "Non-linear Activations (other)": [[2037, "non-linear-activations-other"]], "Normalization Layers": [[2037, "normalization-layers"]], "Recurrent Layers": [[2037, "recurrent-layers"]], "Transformer Layers": [[2037, "transformer-layers"]], "Linear Layers": [[2037, "linear-layers"]], "Dropout Layers": [[2037, "dropout-layers"]], "Sparse Layers": [[2037, "sparse-layers"]], "Distance Functions": [[2037, "distance-functions"]], "Loss Functions": [[2037, "loss-functions"]], "Vision Layers": [[2037, "vision-layers"]], "Shuffle Layers": [[2037, "shuffle-layers"]], "DataParallel Layers (multi-GPU, distributed)": [[2037, "module-torch.nn.parallel"]], "Quantized Functions": [[2037, "quantized-functions"]], "Lazy Modules Initialization": [[2037, "lazy-modules-initialization"]], "Aliases": [[2037, "aliases"]], "torch.nn.attention": [[2038, "module-torch.nn.attention"]], "Utils": [[2038, "utils"]], "Submodules": [[2038, "submodules"]], "device_of": [[1989, "device-of"], [1032, "device-of"]], "torch.xpu.initial_seed": [[1997, "torch-xpu-initial-seed"]], "torch.xpu.seed": [[2002, "torch-xpu-seed"]], "Multiprocessing package - torch.multiprocessing": [[2033, "module-torch.multiprocessing"]], "Strategy management": [[2033, "strategy-management"]], "Sharing CUDA tensors": [[2033, "sharing-cuda-tensors"]], "Sharing strategies": [[2033, "sharing-strategies"]], "File descriptor - file_descriptor": [[2033, "file-descriptor-file-descriptor"]], "File system - file_system": [[2033, "file-system-file-system"]], "Spawning subprocesses": [[2033, "spawning-subprocesses"]], "torch.masked": [[2024, "torch-masked"]], "What is a MaskedTensor?": [[2024, "what-is-a-maskedtensor"]], "Supported Operators": [[2024, "supported-operators"]], "Unary Operators": [[2024, "unary-operators"]], "Binary Operators": [[2024, "binary-operators"]], "Reductions": [[2024, "reductions"]], "View and select functions": [[2024, "view-and-select-functions"]], "torch.utils.module_tracker": [[2029, "module-torch.utils.module_tracker"]], "torch.xpu.is_initialized": [[1999, "torch-xpu-is-initialized"]], "torch.zeros": [[2010, "torch-zeros"]], "TorchScript Unsupported PyTorch Constructs": [[2019, "torchscript-unsupported-pytorch-constructs"]], "Torch and Tensor Unsupported Attributes": [[2019, "torch-and-tensor-unsupported-attributes"]], "Unsupported Tensor Methods": [[2019, "unsupported-tensor-methods"]], "Unsupported Tensor Properties": [[2019, "unsupported-tensor-properties"]], "Functions Not Correctly Bound on Torch": [[2019, "functions-not-correctly-bound-on-torch"]], "Ops With Divergent Schemas Between Torch & Python": [[2019, "ops-with-divergent-schemas-between-torch-python"]], "PyTorch Unsupported Modules and Classes": [[2019, "pytorch-unsupported-modules-and-classes"]], "torch.xpu.get_device_properties": [[1993, "torch-xpu-get-device-properties"]], "torch.nested": [[2036, "module-torch.nested"]], "Construction": [[2036, "construction"], [2082, "construction"]], "size": [[2036, "size"]], "unbind": [[2036, "unbind"]], "Nested tensor constructor and conversion functions": [[2036, "nested-tensor-constructor-and-conversion-functions"]], "Supported operations": [[2036, "supported-operations"], [2082, "supported-operations"]], "torch.xpu.manual_seed": [[2000, "torch-xpu-manual-seed"]], "torch.xpu.seed_all": [[2003, "torch-xpu-seed-all"]], "torch.xpu.get_device_capability": [[1991, "torch-xpu-get-device-capability"]], "torch.utils.model_zoo": [[2028, "torch-utils-model-zoo"]], "torch.xpu.init": [[1996, "torch-xpu-init"]], "TorchScript": [[2014, "torchscript"]], "Creating TorchScript Code": [[2014, "creating-torchscript-code"]], "Mixing Tracing and Scripting": [[2014, "mixing-tracing-and-scripting"]], "TorchScript Language": [[2014, "torchscript-language"]], "Built-in Functions and Modules": [[2014, "built-in-functions-and-modules"]], "PyTorch Functions and Modules": [[2014, "pytorch-functions-and-modules"]], "Python Functions and Modules": [[2014, "python-functions-and-modules"]], "Python Language Reference Comparison": [[2014, "python-language-reference-comparison"]], "Disable JIT for Debugging": [[2014, "disable-jit-for-debugging"]], "Inspecting Code": [[2014, "inspecting-code"]], "Interpreting Graphs": [[2014, "interpreting-graphs"]], "Tracer": [[2014, "tracer"]], "Tracing Edge Cases": [[2014, "tracing-edge-cases"]], "Automatic Trace Checking": [[2014, "automatic-trace-checking"]], "Tracer Warnings": [[2014, "tracer-warnings"]], "Known Issues": [[2014, "known-issues"]], "Appendix": [[2014, "appendix"]], "Migrating to PyTorch 1.2 Recursive Scripting API": [[2014, "migrating-to-pytorch-1-2-recursive-scripting-api"]], "Modules": [[2014, "modules"], [2057, "modules"]], "Attributes": [[2014, "attributes"]], "Constants": [[2014, "constants"]], "Fusion Backends": [[2014, "fusion-backends"]], "torch.xpu.stream": [[2008, "torch-xpu-stream"]], "torch.xpu.set_device": [[2004, "torch-xpu-set-device"]], "torch.xpu.get_rng_state": [[1994, "torch-xpu-get-rng-state"]], "Python Language Reference Coverage": [[2018, "python-language-reference-coverage"]], "torch.library": [[2021, "module-torch.library"]], "Testing custom ops": [[2021, "testing-custom-ops"]], "Creating new custom ops in Python": [[2021, "creating-new-custom-ops-in-python"]], "Extending custom ops (created from Python or C++)": [[2021, "extending-custom-ops-created-from-python-or-c"]], "Low-level APIs": [[2021, "low-level-apis"]], "Miscellaneous Environment Variables": [[2026, "miscellaneous-environment-variables"]], "torch.xpu.set_rng_state": [[2005, "torch-xpu-set-rng-state"]], "Sigmoid": [[1558, "sigmoid"], [762, "sigmoid"]], "MultiMarginLoss": [[1532, "multimarginloss"]], "Unfold": [[1579, "unfold"]], "Tanh": [[1568, "tanh"]], "PReLU": [[1535, "prelu"]], "RReLU": [[1546, "rrelu"]], "Softmax2d": [[1562, "softmax2d"]], "SyncBatchNorm": [[1567, "syncbatchnorm"]], "SmoothL1Loss": [[1559, "smoothl1loss"]], "PixelShuffle": [[1539, "pixelshuffle"]], "Softsign": [[1566, "softsign"]], "SoftMarginLoss": [[1560, "softmarginloss"]], "ReplicationPad3d": [[1554, "replicationpad3d"]], "SELU": [[1555, "selu"]], "MultiheadAttention": [[1533, "multiheadattention"], [738, "multiheadattention"]], "NLLLoss": [[1534, "nllloss"]], "ReLU": [[1547, "relu"]], "Threshold": [[1570, "threshold"]], "ParameterDict": [[1537, "parameterdict"]], "Tanhshrink": [[1569, "tanhshrink"]], "ReflectionPad1d": [[1549, "reflectionpad1d"]], "Upsample": [[1580, "upsample"]], "ReplicationPad1d": [[1552, "replicationpad1d"]], "TransformerEncoder": [[1574, "transformerencoder"]], "Softplus": [[1564, "softplus"]], "TransformerDecoder": [[1572, "transformerdecoder"]], "PairwiseDistance": [[1536, "pairwisedistance"]], "RNN": [[1543, "rnn"]], "ReLU6": [[1548, "relu6"], [761, "relu6"]], "TransformerDecoderLayer": [[1573, "transformerdecoderlayer"]], "Softmax": [[1561, "softmax"]], "RNNBase": [[1544, "rnnbase"]], "Unflatten": [[1578, "unflatten"]], "SiLU": [[1557, "silu"]], "TransformerEncoderLayer": [[1575, "transformerencoderlayer"]], "PixelUnshuffle": [[1540, "pixelunshuffle"]], "ReplicationPad2d": [[1553, "replicationpad2d"]], "Softshrink": [[1565, "softshrink"]], "TripletMarginWithDistanceLoss": [[1577, "tripletmarginwithdistanceloss"]], "TripletMarginLoss": [[1576, "tripletmarginloss"]], "ReflectionPad2d": [[1550, "reflectionpad2d"]], "RNNCell": [[1545, "rnncell"], [768, "rnncell"]], "ParameterList": [[1538, "parameterlist"]], "PoissonNLLLoss": [[1541, "poissonnllloss"]], "Sequential": [[1556, "sequential"]], "MultiLabelMarginLoss": [[1530, "multilabelmarginloss"]], "Softmin": [[1563, "softmin"]], "ReflectionPad3d": [[1551, "reflectionpad3d"]], "MultiLabelSoftMarginLoss": [[1531, "multilabelsoftmarginloss"]], "torch.mtia.current_stream": [[1402, "torch-mtia-current-stream"]], "torch.mps.profiler.profile": [[1389, "torch-mps-profiler-profile"]], "torch.mps.seed": [[1392, "torch-mps-seed"]], "torch.mps.empty_cache": [[1385, "torch-mps-empty-cache"]], "torch.mtia.synchronize": [[1411, "torch-mtia-synchronize"]], "torch.mps.manual_seed": [[1388, "torch-mps-manual-seed"]], "torch.moveaxis": [[1380, "torch-moveaxis"]], "torch.mode": [[1379, "torch-mode"]], "torch.nansum": [[1421, "torch-nansum"]], "torch.mvlgamma": [[1416, "torch-mvlgamma"]], "torch.mv": [[1415, "torch-mv"]], "torch.mps.get_rng_state": [[1387, "torch-mps-get-rng-state"]], "torch.mps.synchronize": [[1395, "torch-mps-synchronize"]], "torch.mtia.default_stream": [[1403, "torch-mtia-default-stream"]], "torch.mtia.device_count": [[1405, "torch-mtia-device-count"]], "torch.mtia.current_device": [[1401, "torch-mtia-current-device"]], "torch.mps.device_count": [[1383, "torch-mps-device-count"]], "torch.nextafter": [[1427, "torch-nextafter"]], "torch.ne": [[1424, "torch-ne"]], "torch.msort": [[1396, "torch-msort"]], "torch.multiply": [[1414, "torch-multiply"]], "torch.mm": [[1378, "torch-mm"]], "torch.mtia.DeferredMtiaCallError": [[1397, "torch-mtia-deferredmtiacallerror"]], "torch.mtia.is_available": [[1407, "torch-mtia-is-available"]], "torch.nanquantile": [[1420, "torch-nanquantile"]], "torch.mtia.init": [[1406, "torch-mtia-init"]], "torch.mtia.stream": [[1410, "torch-mtia-stream"]], "torch.mps.profiler.start": [[1390, "torch-mps-profiler-start"]], "torch.narrow": [[1422, "torch-narrow"]], "torch.mps.driver_allocated_memory": [[1384, "torch-mps-driver-allocated-memory"]], "torch.multinomial": [[1413, "torch-multinomial"]], "torch.mps.set_per_process_memory_fraction": [[1393, "torch-mps-set-per-process-memory-fraction"]], "torch.minimum": [[1377, "torch-minimum"]], "torch.neg": [[1425, "torch-neg"]], "torch.negative": [[1426, "torch-negative"]], "torch.mps.set_rng_state": [[1394, "torch-mps-set-rng-state"]], "torch.nanmean": [[1418, "torch-nanmean"]], "torch.mps.profiler.stop": [[1391, "torch-mps-profiler-stop"]], "torch.nanmedian": [[1419, "torch-nanmedian"]], "torch.mul": [[1412, "torch-mul"]], "torch.nan_to_num": [[1417, "torch-nan-to-num"]], "torch.mtia.is_initialized": [[1408, "torch-mtia-is-initialized"]], "torch.narrow_copy": [[1423, "torch-narrow-copy"]], "torch.mps.current_allocated_memory": [[1382, "torch-mps-current-allocated-memory"]], "torch.movedim": [[1381, "torch-movedim"]], "torch.mtia.set_stream": [[1409, "torch-mtia-set-stream"]], "CosineEmbeddingLoss": [[1460, "cosineembeddingloss"]], "AvgPool1d": [[1436, "avgpool1d"]], "Bilinear": [[1444, "bilinear"]], "AdaptiveLogSoftmaxWithLoss": [[1431, "adaptivelogsoftmaxwithloss"]], "Conv1d": [[1454, "conv1d"], [741, "conv1d"]], "BatchNorm1d": [[1441, "batchnorm1d"]], "EmbeddingBag": [[1470, "embeddingbag"], [749, "embeddingbag"]], "Flatten": [[1472, "flatten"]], "FractionalMaxPool3d": [[1475, "fractionalmaxpool3d"]], "CircularPad1d": [[1448, "circularpad1d"]], "CircularPad3d": [[1450, "circularpad3d"]], "CrossEntropyLoss": [[1462, "crossentropyloss"]], "Dropout3d": [[1467, "dropout3d"]], "ConstantPad1d": [[1451, "constantpad1d"]], "ConvTranspose2d": [[1458, "convtranspose2d"], [745, "convtranspose2d"]], "Dropout2d": [[1466, "dropout2d"]], "BatchNorm2d": [[1442, "batchnorm2d"], [739, "batchnorm2d"]], "ConstantPad3d": [[1453, "constantpad3d"]], "Embedding": [[1469, "embedding"], [748, "embedding"]], "Conv3d": [[1456, "conv3d"], [743, "conv3d"], [734, "conv3d"]], "AdaptiveMaxPool1d": [[1432, "adaptivemaxpool1d"]], "CosineSimilarity": [[1461, "cosinesimilarity"]], "AvgPool3d": [[1438, "avgpool3d"]], "GLU": [[1477, "glu"]], "BCEWithLogitsLoss": [[1440, "bcewithlogitsloss"]], "CircularPad2d": [[1449, "circularpad2d"]], "BatchNorm3d": [[1443, "batchnorm3d"], [740, "batchnorm3d"]], "AdaptiveAvgPool3d": [[1430, "adaptiveavgpool3d"]], "ConstantPad2d": [[1452, "constantpad2d"]], "AvgPool2d": [[1437, "avgpool2d"]], "Fold": [[1473, "fold"]], "AdaptiveAvgPool1d": [[1428, "adaptiveavgpool1d"]], "AdaptiveAvgPool2d": [[1429, "adaptiveavgpool2d"]], "GELU": [[1476, "gelu"]], "CTCLoss": [[1446, "ctcloss"]], "ELU": [[1468, "elu"], [747, "elu"]], "FeatureAlphaDropout": [[1471, "featurealphadropout"]], "AdaptiveMaxPool3d": [[1434, "adaptivemaxpool3d"]], "Dropout": [[1464, "dropout"]], "CELU": [[1445, "celu"]], "AlphaDropout": [[1435, "alphadropout"]], "Dropout1d": [[1465, "dropout1d"]], "AdaptiveMaxPool2d": [[1433, "adaptivemaxpool2d"]], "GRU": [[1478, "gru"], [763, "gru"]], "FractionalMaxPool2d": [[1474, "fractionalmaxpool2d"]], "BCELoss": [[1439, "bceloss"]], "Conv2d": [[1455, "conv2d"], [733, "conv2d"], [742, "conv2d"]], "ConvTranspose1d": [[1457, "convtranspose1d"], [744, "convtranspose1d"]], "ChannelShuffle": [[1447, "channelshuffle"]], "DataParallel": [[1463, "dataparallel"]], "ConvTranspose3d": [[1459, "convtranspose3d"], [746, "convtranspose3d"]], "LSTMCell": [[1498, "lstmcell"], [766, "lstmcell"]], "Mish": [[1526, "mish"]], "MarginRankingLoss": [[1519, "marginrankingloss"]], "GRUCell": [[1479, "grucell"], [764, "grucell"]], "LogSoftmax": [[1517, "logsoftmax"]], "ModuleDict": [[1528, "moduledict"]], "MaxPool2d": [[1521, "maxpool2d"]], "KLDivLoss": [[1492, "kldivloss"]], "Hardswish": [[1484, "hardswish"], [753, "hardswish"]], "LazyInstanceNorm3d": [[1511, "lazyinstancenorm3d"]], "LeakyReLU": [[1513, "leakyrelu"], [758, "leakyrelu"]], "LazyBatchNorm3d": [[1502, "lazybatchnorm3d"]], "LPPool2d": [[1495, "lppool2d"]], "LazyConvTranspose1d": [[1506, "lazyconvtranspose1d"]], "LazyConvTranspose3d": [[1508, "lazyconvtranspose3d"]], "LazyInstanceNorm1d": [[1509, "lazyinstancenorm1d"]], "Module": [[1527, "module"]], "MaxUnpool3d": [[1525, "maxunpool3d"]], "MaxUnpool2d": [[1524, "maxunpool2d"]], "HuberLoss": [[1487, "huberloss"]], "MSELoss": [[1518, "mseloss"]], "InstanceNorm2d": [[1490, "instancenorm2d"], [755, "instancenorm2d"]], "LazyBatchNorm2d": [[1501, "lazybatchnorm2d"]], "InstanceNorm3d": [[1491, "instancenorm3d"], [756, "instancenorm3d"]], "LazyBatchNorm1d": [[1500, "lazybatchnorm1d"]], "GaussianNLLLoss": [[1480, "gaussiannllloss"]], "MaxPool1d": [[1520, "maxpool1d"]], "Hardshrink": [[1482, "hardshrink"]], "MaxPool3d": [[1522, "maxpool3d"]], "LSTM": [[1497, "lstm"], [765, "lstm"], [737, "lstm"]], "InstanceNorm1d": [[1489, "instancenorm1d"], [754, "instancenorm1d"]], "LazyConv3d": [[1505, "lazyconv3d"]], "LPPool3d": [[1496, "lppool3d"]], "ModuleList": [[1529, "modulelist"]], "Linear": [[1514, "linear"], [767, "linear"], [736, "linear"], [735, "linear"], [759, "linear"]], "LocalResponseNorm": [[1515, "localresponsenorm"]], "LazyConv2d": [[1504, "lazyconv2d"]], "LazyLinear": [[1512, "lazylinear"]], "Hardsigmoid": [[1483, "hardsigmoid"]], "LazyConvTranspose2d": [[1507, "lazyconvtranspose2d"]], "LazyConv1d": [[1503, "lazyconv1d"]], "LogSigmoid": [[1516, "logsigmoid"]], "MaxUnpool1d": [[1523, "maxunpool1d"]], "HingeEmbeddingLoss": [[1486, "hingeembeddingloss"]], "LPPool1d": [[1494, "lppool1d"]], "GroupNorm": [[1481, "groupnorm"], [752, "groupnorm"]], "LazyInstanceNorm2d": [[1510, "lazyinstancenorm2d"]], "L1Loss": [[1493, "l1loss"]], "Hardtanh": [[1485, "hardtanh"]], "LayerNorm": [[1499, "layernorm"], [757, "layernorm"]], "torch.matrix_exp": [[1369, "torch-matrix-exp"]], "torch.lobpcg": [[1346, "torch-lobpcg"]], "torch.lu": [[1363, "torch-lu"]], "torch.linalg.matrix_power": [[1327, "torch-linalg-matrix-power"]], "torch.linalg.matrix_rank": [[1328, "torch-linalg-matrix-rank"]], "torch.linalg.svdvals": [[1338, "torch-linalg-svdvals"]], "torch.max": [[1371, "torch-max"]], "torch.meshgrid": [[1375, "torch-meshgrid"]], "torch.log": [[1347, "torch-log"]], "torch.lu_solve": [[1364, "torch-lu-solve"]], "torch.mean": [[1373, "torch-mean"]], "torch.linalg.pinv": [[1331, "torch-linalg-pinv"]], "torch.linalg.tensorsolve": [[1340, "torch-linalg-tensorsolve"]], "torch.linalg.norm": [[1330, "torch-linalg-norm"]], "torch.linalg.solve_ex": [[1335, "torch-linalg-solve-ex"]], "torch.log2": [[1350, "torch-log2"]], "torch.log10": [[1348, "torch-log10"]], "torch.lt": [[1362, "torch-lt"]], "torch.min": [[1376, "torch-min"]], "torch.linalg.matrix_norm": [[1326, "torch-linalg-matrix-norm"]], "torch.median": [[1374, "torch-median"]], "torch.matrix_power": [[1370, "torch-matrix-power"]], "torch.logdet": [[1354, "torch-logdet"]], "torch.linalg.solve_triangular": [[1336, "torch-linalg-solve-triangular"]], "torch.linalg.slogdet": [[1333, "torch-linalg-slogdet"]], "torch.linalg.tensorinv": [[1339, "torch-linalg-tensorinv"]], "torch.logcumsumexp": [[1353, "torch-logcumsumexp"]], "torch.logspace": [[1360, "torch-logspace"]], "torch.logical_and": [[1355, "torch-logical-and"]], "torch.linalg.multi_dot": [[1329, "torch-linalg-multi-dot"]], "torch.logical_or": [[1357, "torch-logical-or"]], "torch.linalg.solve": [[1334, "torch-linalg-solve"]], "torch.matmul": [[1368, "torch-matmul"]], "torch.linalg.svd": [[1337, "torch-linalg-svd"]], "torch.logical_xor": [[1358, "torch-logical-xor"]], "torch.linalg.vector_norm": [[1343, "torch-linalg-vector-norm"]], "torch.log1p": [[1349, "torch-log1p"]], "torch.manual_seed": [[1366, "torch-manual-seed"]], "torch.logical_not": [[1356, "torch-logical-not"]], "torch.linspace": [[1344, "torch-linspace"]], "torch.logaddexp2": [[1352, "torch-logaddexp2"]], "torch.load": [[1345, "torch-load"]], "torch.maximum": [[1372, "torch-maximum"]], "torch.linalg.vander": [[1341, "torch-linalg-vander"]], "torch.logit": [[1359, "torch-logit"]], "torch.lu_unpack": [[1365, "torch-lu-unpack"]], "torch.logsumexp": [[1361, "torch-logsumexp"]], "torch.linalg.qr": [[1332, "torch-linalg-qr"]], "torch.linalg.vecdot": [[1342, "torch-linalg-vecdot"]], "torch.masked_select": [[1367, "torch-masked-select"]], "torch.logaddexp": [[1351, "torch-logaddexp"]], "torch.linalg.inv_ex": [[1315, "torch-linalg-inv-ex"]], "torch.linalg.eigh": [[1310, "torch-linalg-eigh"]], "torch.jit.script_if_tracing": [[1286, "torch-jit-script-if-tracing"]], "torch.jit.script": [[1285, "torch-jit-script"]], "torch.linalg.cross": [[1306, "torch-linalg-cross"]], "torch.jit.freeze": [[1277, "torch-jit-freeze"]], "torch.linalg.cond": [[1305, "torch-linalg-cond"]], "torch.jit.trace": [[1289, "torch-jit-trace"]], "torch.linalg.ldl_factor": [[1316, "torch-linalg-ldl-factor"]], "torch.jit.set_fusion_strategy": [[1287, "torch-jit-set-fusion-strategy"]], "torch.linalg.matmul": [[1324, "torch-linalg-matmul"]], "torch.jit.enable_onednn_fusion": [[1275, "torch-jit-enable-onednn-fusion"]], "torch.jit.unused": [[1291, "torch-jit-unused"]], "torch.lerp": [[1299, "torch-lerp"]], "torch.less": [[1300, "torch-less"]], "torch.linalg.inv": [[1314, "torch-linalg-inv"]], "torch.linalg.cholesky": [[1303, "torch-linalg-cholesky"]], "torch.linalg.householder_product": [[1313, "torch-linalg-householder-product"]], "torch.jit.onednn_fusion_enabled": [[1282, "torch-jit-onednn-fusion-enabled"]], "torch.linalg.eigvals": [[1311, "torch-linalg-eigvals"]], "torch.linalg.ldl_factor_ex": [[1317, "torch-linalg-ldl-factor-ex"]], "torch.linalg.lu_factor_ex": [[1322, "torch-linalg-lu-factor-ex"]], "torch.linalg.diagonal": [[1308, "torch-linalg-diagonal"]], "torch.kaiser_window": [[1293, "torch-kaiser-window"]], "torch.jit.fork": [[1276, "torch-jit-fork"]], "torch.jit.wait": [[1292, "torch-jit-wait"]], "torch.less_equal": [[1301, "torch-less-equal"]], "torch.jit.trace_module": [[1290, "torch-jit-trace-module"]], "torch.lcm": [[1296, "torch-lcm"]], "torch.linalg.cholesky_ex": [[1304, "torch-linalg-cholesky-ex"]], "torch.jit.save": [[1284, "torch-jit-save"]], "torch.jit.load": [[1281, "torch-jit-load"]], "torch.linalg.ldl_solve": [[1318, "torch-linalg-ldl-solve"]], "torch.jit.ignore": [[1278, "torch-jit-ignore"]], "torch.kron": [[1294, "torch-kron"]], "torch.linalg.lstsq": [[1319, "torch-linalg-lstsq"]], "torch.linalg.det": [[1307, "torch-linalg-det"]], "torch.kthvalue": [[1295, "torch-kthvalue"]], "torch.linalg.eig": [[1309, "torch-linalg-eig"]], "strict_fusion": [[1288, "strict-fusion"]], "torch.linalg.eigvalsh": [[1312, "torch-linalg-eigvalsh"]], "torch.lgamma": [[1302, "torch-lgamma"]], "torch.ldexp": [[1297, "torch-ldexp"]], "torch.linalg.lu_factor": [[1321, "torch-linalg-lu-factor"]], "torch.linalg.matrix_exp": [[1325, "torch-linalg-matrix-exp"]], "torch.le": [[1298, "torch-le"]], "torch.linalg.lu": [[1320, "torch-linalg-lu"]], "torch.jit.isinstance": [[1280, "torch-jit-isinstance"]], "torch.jit.optimize_for_inference": [[1283, "torch-jit-optimize-for-inference"]], "torch.linalg.lu_solve": [[1323, "torch-linalg-lu-solve"]], "torch.jit.interface": [[1279, "torch-jit-interface"]], "torch.index_copy": [[1246, "torch-index-copy"]], "torch.is_complex": [[1252, "torch-is-complex"]], "torch.is_tensor": [[1260, "torch-is-tensor"]], "torch.histc": [[1234, "torch-histc"]], "torch.hstack": [[1239, "torch-hstack"]], "torch.is_deterministic_algorithms_warn_only_enabled": [[1254, "torch-is-deterministic-algorithms-warn-only-enabled"]], "torch.get_num_interop_threads": [[1224, "torch-get-num-interop-threads"]], "torch.histogram": [[1235, "torch-histogram"]], "torch.imag": [[1244, "torch-imag"]], "torch.greater_equal": [[1229, "torch-greater-equal"]], "torch.i0": [[1241, "torch-i0"]], "torch.initial_seed": [[1249, "torch-initial-seed"]], "torch.isneginf": [[1267, "torch-isneginf"]], "torch.gt": [[1230, "torch-gt"]], "torch.index_add": [[1245, "torch-index-add"]], "ScriptModule": [[1273, "scriptmodule"]], "torch.get_num_threads": [[1225, "torch-get-num-threads"]], "torch.get_rng_state": [[1226, "torch-get-rng-state"]], "torch.is_nonzero": [[1258, "torch-is-nonzero"]], "torch.is_floating_point": [[1255, "torch-is-floating-point"]], "torch.is_conj": [[1253, "torch-is-conj"]], "torch.is_storage": [[1259, "torch-is-storage"]], "torch.inner": [[1250, "torch-inner"]], "torch.histogramdd": [[1236, "torch-histogramdd"]], "torch.index_reduce": [[1247, "torch-index-reduce"]], "ScriptFunction": [[1272, "scriptfunction"]], "torch.hspmm": [[1238, "torch-hspmm"]], "torch.greater": [[1228, "torch-greater"]], "torch.index_select": [[1248, "torch-index-select"]], "torch.hann_window": [[1232, "torch-hann-window"]], "torch.isinf": [[1265, "torch-isinf"]], "torch.is_grad_enabled": [[1256, "torch-is-grad-enabled"]], "torch.isin": [[1264, "torch-isin"]], "torch.is_warn_always_enabled": [[1261, "torch-is-warn-always-enabled"]], "torch.isposinf": [[1268, "torch-isposinf"]], "torch.is_inference_mode_enabled": [[1257, "torch-is-inference-mode-enabled"]], "torch.isnan": [[1266, "torch-isnan"]], "torch.istft": [[1270, "torch-istft"]], "Attribute": [[1271, "attribute"]], "torch.hamming_window": [[1231, "torch-hamming-window"]], "torch.gradient": [[1227, "torch-gradient"]], "torch.isreal": [[1269, "torch-isreal"]], "torch.isfinite": [[1263, "torch-isfinite"]], "torch.hsplit": [[1237, "torch-hsplit"]], "torch.isclose": [[1262, "torch-isclose"]], "torch.jit.annotate": [[1274, "torch-jit-annotate"]], "torch.hypot": [[1240, "torch-hypot"]], "torch.inverse": [[1251, "torch-inverse"]], "torch.igammac": [[1243, "torch-igammac"]], "torch.igamma": [[1242, "torch-igamma"]], "torch.heaviside": [[1233, "torch-heaviside"]], "torch.fx.experimental.symbolic_shapes.lru_cache": [[1207, "torch-fx-experimental-symbolic-shapes-lru-cache"]], "torch.gcd": [[1215, "torch-gcd"]], "RelaxedUnspecConstraint": [[1187, "relaxedunspecconstraint"]], "InnerTensorKey": [[1185, "innertensorkey"]], "torch.func.stack_module_state": [[1176, "torch-func-stack-module-state"]], "DivideByKey": [[1183, "dividebykey"]], "torch.fx.experimental.symbolic_shapes.parallel_or": [[1209, "torch-fx-experimental-symbolic-shapes-parallel-or"]], "StrictMinMaxConstraint": [[1192, "strictminmaxconstraint"]], "EqualityConstraint": [[1184, "equalityconstraint"]], "torch.fx.experimental.symbolic_shapes.constrain_unify": [[1199, "torch-fx-experimental-symbolic-shapes-constrain-unify"]], "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings": [[1197, "torch-fx-experimental-symbolic-shapes-compute-unbacked-bindings"]], "torch.get_deterministic_debug_mode": [[1221, "torch-get-deterministic-debug-mode"]], "torch.fx.experimental.symbolic_shapes.check_consistent": [[1196, "torch-fx-experimental-symbolic-shapes-check-consistent"]], "ConvertIntKey": [[1180, "convertintkey"]], "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings": [[1211, "torch-fx-experimental-symbolic-shapes-resolve-unbacked-bindings"]], "torch.fx.experimental.symbolic_shapes.statically_known_true": [[1212, "torch-fx-experimental-symbolic-shapes-statically-known-true"]], "torch.get_default_device": [[1219, "torch-get-default-device"]], "torch.fx.experimental.symbolic_shapes.sym_eq": [[1213, "torch-fx-experimental-symbolic-shapes-sym-eq"]], "torch.func.jvp": [[1173, "torch-func-jvp"]], "torch.ge": [[1216, "torch-ge"]], "torch.get_default_dtype": [[1220, "torch-get-default-dtype"]], "torch.func.vjp": [[1177, "torch-func-vjp"]], "torch.func.vmap": [[1178, "torch-func-vmap"]], "PropagateUnbackedSymInts": [[1186, "propagateunbackedsymints"]], "ShapeEnvSettings": [[1189, "shapeenvsettings"]], "SubclassSymbolicContext": [[1193, "subclasssymboliccontext"]], "torch.fx.experimental.symbolic_shapes.is_concrete_bool": [[1205, "torch-fx-experimental-symbolic-shapes-is-concrete-bool"]], "SymbolicContext": [[1194, "symboliccontext"]], "torch.fx.experimental.symbolic_shapes.definitely_false": [[1200, "torch-fx-experimental-symbolic-shapes-definitely-false"]], "torch.get_float32_matmul_precision": [[1223, "torch-get-float32-matmul-precision"]], "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr": [[1195, "torch-fx-experimental-symbolic-shapes-canonicalize-bool-expr"]], "StatelessSymbolicContext": [[1191, "statelesssymboliccontext"]], "torch.func.replace_all_batch_norm_modules_": [[1175, "torch-func-replace-all-batch-norm-modules"]], "DimDynamic": [[1182, "dimdynamic"]], "torch.get_device_module": [[1222, "torch-get-device-module"]], "torch.fx.experimental.symbolic_shapes.definitely_true": [[1201, "torch-fx-experimental-symbolic-shapes-definitely-true"]], "torch.ger": [[1218, "torch-ger"]], "torch.fx.experimental.symbolic_shapes.rebind_unbacked": [[1210, "torch-fx-experimental-symbolic-shapes-rebind-unbacked"]], "StatefulSymbolicContext": [[1190, "statefulsymboliccontext"]], "ShapeEnv": [[1188, "shapeenv"]], "torch.fx.experimental.symbolic_shapes.constrain_range": [[1198, "torch-fx-experimental-symbolic-shapes-constrain-range"]], "torch.fx.experimental.symbolic_shapes.parallel_and": [[1208, "torch-fx-experimental-symbolic-shapes-parallel-and"]], "torch.fx.experimental.symbolic_shapes.guard_size_oblivious": [[1202, "torch-fx-experimental-symbolic-shapes-guard-size-oblivious"]], "torch.geqrf": [[1217, "torch-geqrf"]], "torch.fx.experimental.symbolic_shapes.hint_int": [[1204, "torch-fx-experimental-symbolic-shapes-hint-int"]], "torch.fx.experimental.symbolic_shapes.is_concrete_int": [[1206, "torch-fx-experimental-symbolic-shapes-is-concrete-int"]], "torch.fx.experimental.symbolic_shapes.has_free_symbols": [[1203, "torch-fx-experimental-symbolic-shapes-has-free-symbols"]], "torch.gather": [[1214, "torch-gather"]], "CallMethodKey": [[1179, "callmethodkey"]], "torch.func.linearize": [[1174, "torch-func-linearize"]], "DimConstraints": [[1181, "dimconstraints"]], "torch.func.functional_call": [[1166, "torch-func-functional-call"]], "torch.func.functionalize": [[1167, "torch-func-functionalize"]], "torch.fft.hfft2": [[1131, "torch-fft-hfft2"]], "torch.func.jacrev": [[1172, "torch-func-jacrev"]], "torch.fft.irfftn": [[1142, "torch-fft-irfftn"]], "torch.fft.fftfreq": [[1127, "torch-fft-fftfreq"]], "torch.floor_divide": [[1154, "torch-floor-divide"]], "torch.frac": [[1158, "torch-frac"]], "torch.float_power": [[1152, "torch-float-power"]], "torch.fix": [[1147, "torch-fix"]], "torch.from_file": [[1161, "torch-from-file"]], "torch.fft.ifft": [[1133, "torch-fft-ifft"]], "torch.fft.fftshift": [[1129, "torch-fft-fftshift"]], "torch.full": [[1164, "torch-full"]], "torch.full_like": [[1165, "torch-full-like"]], "torch.fft.irfft": [[1140, "torch-fft-irfft"]], "torch.fft.hfftn": [[1132, "torch-fft-hfftn"]], "torch.flipud": [[1151, "torch-flipud"]], "torch.fft.irfft2": [[1141, "torch-fft-irfft2"]], "torch.fft.rfftfreq": [[1145, "torch-fft-rfftfreq"]], "torch.fft.fftn": [[1128, "torch-fft-fftn"]], "torch.fmax": [[1155, "torch-fmax"]], "torch.fft.rfft2": [[1144, "torch-fft-rfft2"]], "torch.func.grad": [[1168, "torch-func-grad"]], "torch.fmod": [[1157, "torch-fmod"]], "torch.fft.fft": [[1125, "torch-fft-fft"]], "torch.from_dlpack": [[1160, "torch-from-dlpack"]], "torch.flatten": [[1148, "torch-flatten"]], "torch.eye": [[1122, "torch-eye"]], "torch.frombuffer": [[1163, "torch-frombuffer"]], "torch.fft.ifftn": [[1135, "torch-fft-ifftn"]], "torch.fft.ihfft": [[1137, "torch-fft-ihfft"]], "torch.fft.ihfftn": [[1139, "torch-fft-ihfftn"]], "torch.fft.rfft": [[1143, "torch-fft-rfft"]], "torch.fft.fft2": [[1126, "torch-fft-fft2"]], "torch.fft.ifft2": [[1134, "torch-fft-ifft2"]], "torch.fake_quantize_per_tensor_affine": [[1124, "torch-fake-quantize-per-tensor-affine"]], "torch.fft.rfftn": [[1146, "torch-fft-rfftn"]], "torch.fliplr": [[1150, "torch-fliplr"]], "torch.fft.ihfft2": [[1138, "torch-fft-ihfft2"]], "torch.from_numpy": [[1162, "torch-from-numpy"]], "torch.fake_quantize_per_channel_affine": [[1123, "torch-fake-quantize-per-channel-affine"]], "torch.func.grad_and_value": [[1169, "torch-func-grad-and-value"]], "torch.func.hessian": [[1170, "torch-func-hessian"]], "torch.fmin": [[1156, "torch-fmin"]], "torch.func.jacfwd": [[1171, "torch-func-jacfwd"]], "torch.floor": [[1153, "torch-floor"]], "torch.fft.ifftshift": [[1136, "torch-fft-ifftshift"]], "torch.fft.hfft": [[1130, "torch-fft-hfft"]], "torch.flip": [[1149, "torch-flip"]], "torch.frexp": [[1159, "torch-frexp"]], "torch.cuda.set_per_process_memory_fraction": [[1079, "torch-cuda-set-per-process-memory-fraction"]], "torch.equal": [[1115, "torch-equal"]], "torch.dot": [[1106, "torch-dot"]], "torch.cuda.stream": [[1084, "torch-cuda-stream"]], "torch.empty": [[1110, "torch-empty"]], "torch.diag": [[1096, "torch-diag"]], "torch.exp2": [[1120, "torch-exp2"]], "torch.cumprod": [[1090, "torch-cumprod"]], "torch.erfc": [[1117, "torch-erfc"]], "torch.cuda.seed": [[1076, "torch-cuda-seed"]], "torch.cummin": [[1089, "torch-cummin"]], "torch.diff": [[1101, "torch-diff"]], "torch.cuda.reset_peak_memory_stats": [[1075, "torch-cuda-reset-peak-memory-stats"]], "torch.cuda.set_rng_state": [[1080, "torch-cuda-set-rng-state"]], "torch.dequantize": [[1094, "torch-dequantize"]], "torch.cuda.set_device": [[1078, "torch-cuda-set-device"]], "torch.cumsum": [[1091, "torch-cumsum"]], "torch.diagflat": [[1098, "torch-diagflat"]], "torch.diagonal": [[1099, "torch-diagonal"]], "torch.divide": [[1105, "torch-divide"]], "torch.exp": [[1119, "torch-exp"]], "torch.digamma": [[1102, "torch-digamma"]], "torch.div": [[1104, "torch-div"]], "torch.cuda.reset_max_memory_cached": [[1074, "torch-cuda-reset-max-memory-cached"]], "torch.det": [[1095, "torch-det"]], "torch.cumulative_trapezoid": [[1092, "torch-cumulative-trapezoid"]], "torch.cuda.nvtx.range_push": [[1071, "torch-cuda-nvtx-range-push"]], "torch.dsplit": [[1107, "torch-dsplit"]], "torch.empty_strided": [[1112, "torch-empty-strided"]], "torch.cuda.temperature": [[1086, "torch-cuda-temperature"]], "torch.erf": [[1116, "torch-erf"]], "torch.einsum": [[1109, "torch-einsum"]], "torch.cuda.utilization": [[1087, "torch-cuda-utilization"]], "torch.eq": [[1114, "torch-eq"]], "torch.cuda.reset_max_memory_allocated": [[1073, "torch-cuda-reset-max-memory-allocated"]], "torch.cuda.set_rng_state_all": [[1081, "torch-cuda-set-rng-state-all"]], "torch.diag_embed": [[1097, "torch-diag-embed"]], "torch.cuda.set_stream": [[1082, "torch-cuda-set-stream"]], "torch.erfinv": [[1118, "torch-erfinv"]], "torch.dstack": [[1108, "torch-dstack"]], "torch.cuda.set_sync_debug_mode": [[1083, "torch-cuda-set-sync-debug-mode"]], "torch.cuda.seed_all": [[1077, "torch-cuda-seed-all"]], "torch.cuda.synchronize": [[1085, "torch-cuda-synchronize"]], "torch.expm1": [[1121, "torch-expm1"]], "torch.dist": [[1103, "torch-dist"]], "torch.cummax": [[1088, "torch-cummax"]], "torch.cuda.power_draw": [[1072, "torch-cuda-power-draw"]], "torch.deg2rad": [[1093, "torch-deg2rad"]], "torch.diagonal_scatter": [[1100, "torch-diagonal-scatter"]], "enable_grad": [[1113, "enable-grad"]], "torch.empty_like": [[1111, "torch-empty-like"]], "torch.cuda.manual_seed_all": [[1056, "torch-cuda-manual-seed-all"]], "torch.cuda.get_rng_state_all": [[1041, "torch-cuda-get-rng-state-all"]], "torch.cuda.comm.reduce_add": [[1024, "torch-cuda-comm-reduce-add"]], "torch.cuda.get_device_name": [[1037, "torch-cuda-get-device-name"]], "torch.cuda.manual_seed": [[1055, "torch-cuda-manual-seed"]], "torch.cuda.comm.gather": [[1023, "torch-cuda-comm-gather"]], "torch.cuda.device_count": [[1031, "torch-cuda-device-count"]], "torch.cuda.memory_reserved": [[1063, "torch-cuda-memory-reserved"]], "torch.cuda.memory_stats": [[1065, "torch-cuda-memory-stats"]], "torch.cuda.current_stream": [[1028, "torch-cuda-current-stream"]], "torch.cuda.memory_usage": [[1067, "torch-cuda-memory-usage"]], "torch.cuda.is_available": [[1048, "torch-cuda-is-available"]], "torch.cuda.mem_get_info": [[1060, "torch-cuda-mem-get-info"]], "torch.cuda.clock_rate": [[1020, "torch-cuda-clock-rate"]], "torch.cuda.is_initialized": [[1050, "torch-cuda-is-initialized"]], "torch.cuda.jiterator._create_jit_fn": [[1051, "torch-cuda-jiterator-create-jit-fn"]], "torch.cuda.nvtx.range": [[1069, "torch-cuda-nvtx-range"]], "torch.cuda.current_device": [[1027, "torch-cuda-current-device"]], "torch.cuda.max_memory_cached": [[1058, "torch-cuda-max-memory-cached"]], "torch.cuda.get_arch_list": [[1035, "torch-cuda-get-arch-list"]], "torch.cuda.memory_snapshot": [[1064, "torch-cuda-memory-snapshot"]], "torch.cuda.memory_summary": [[1066, "torch-cuda-memory-summary"]], "torch.cuda.init": [[1045, "torch-cuda-init"]], "torch.cuda.is_current_stream_capturing": [[1049, "torch-cuda-is-current-stream-capturing"]], "graph": [[1043, "graph"]], "torch.cuda.initial_seed": [[1046, "torch-cuda-initial-seed"]], "torch.cuda.list_gpu_processes": [[1053, "torch-cuda-list-gpu-processes"]], "torch.cuda.nvtx.range_pop": [[1070, "torch-cuda-nvtx-range-pop"]], "torch.cuda.comm.broadcast": [[1021, "torch-cuda-comm-broadcast"]], "torch.cuda.make_graphed_callables": [[1054, "torch-cuda-make-graphed-callables"]], "torch.cuda.comm.broadcast_coalesced": [[1022, "torch-cuda-comm-broadcast-coalesced"]], "torch.cuda.jiterator._create_multi_output_jit_fn": [[1052, "torch-cuda-jiterator-create-multi-output-jit-fn"]], "torch.cuda.max_memory_allocated": [[1057, "torch-cuda-max-memory-allocated"]], "torch.cuda.current_blas_handle": [[1026, "torch-cuda-current-blas-handle"]], "torch.cuda.memory_allocated": [[1061, "torch-cuda-memory-allocated"]], "torch.cuda.default_stream": [[1029, "torch-cuda-default-stream"]], "torch.cuda.get_rng_state": [[1040, "torch-cuda-get-rng-state"]], "torch.cuda.get_device_properties": [[1038, "torch-cuda-get-device-properties"]], "torch.cuda.graph_pool_handle": [[1044, "torch-cuda-graph-pool-handle"]], "torch.cuda.empty_cache": [[1033, "torch-cuda-empty-cache"]], "torch.cuda.get_allocator_backend": [[1034, "torch-cuda-get-allocator-backend"]], "torch.cuda.comm.scatter": [[1025, "torch-cuda-comm-scatter"]], "torch.cuda.ipc_collect": [[1047, "torch-cuda-ipc-collect"]], "torch.cuda.nvtx.mark": [[1068, "torch-cuda-nvtx-mark"]], "torch.cuda.get_gencode_flags": [[1039, "torch-cuda-get-gencode-flags"]], "torch.cuda.get_sync_debug_mode": [[1042, "torch-cuda-get-sync-debug-mode"]], "torch.cuda.memory_cached": [[1062, "torch-cuda-memory-cached"]], "torch.cuda.max_memory_reserved": [[1059, "torch-cuda-max-memory-reserved"]], "torch.cuda.get_device_capability": [[1036, "torch-cuda-get-device-capability"]], "torch.cos": [[995, "torch-cos"]], "torch.compiler.compile": [[980, "torch-compiler-compile"]], "torch.cholesky_solve": [[969, "torch-cholesky-solve"]], "torch.cov": [[998, "torch-cov"]], "torch.cpu.current_stream": [[1002, "torch-cpu-current-stream"]], "torch.clamp": [[971, "torch-clamp"]], "torch.compile": [[976, "torch-compile"]], "torch.corrcoef": [[994, "torch-corrcoef"]], "torch.cuda.OutOfMemoryError": [[1013, "torch-cuda-outofmemoryerror"]], "torch.column_stack": [[974, "torch-column-stack"]], "torch.compiler.reset": [[986, "torch-compiler-reset"]], "torch.compiler.is_dynamo_compiling": [[984, "torch-compiler-is-dynamo-compiling"]], "torch.compiled_with_cxx11_abi": [[977, "torch-compiled-with-cxx11-abi"]], "torch.conj_physical": [[992, "torch-conj-physical"]], "torch.cuda.can_device_access_peer": [[1018, "torch-cuda-can-device-access-peer"]], "torch.conj": [[991, "torch-conj"]], "torch.cuda.caching_allocator_alloc": [[1016, "torch-cuda-caching-allocator-alloc"]], "torch.clone": [[973, "torch-clone"]], "torch.clip": [[972, "torch-clip"]], "torch.compiler.is_compiling": [[983, "torch-compiler-is-compiling"]], "torch.compiler.list_backends": [[985, "torch-compiler-list-backends"]], "torch.cosh": [[996, "torch-cosh"]], "torch.cpu.synchronize": [[1007, "torch-cpu-synchronize"]], "torch.cuda.change_current_allocator": [[1019, "torch-cuda-change-current-allocator"]], "torch.cross": [[1008, "torch-cross"]], "torch.cpu.current_device": [[1001, "torch-cpu-current-device"]], "torch.combinations": [[975, "torch-combinations"]], "torch.concat": [[988, "torch-concat"]], "torch.chunk": [[970, "torch-chunk"]], "torch.compiler.cudagraph_mark_step_begin": [[981, "torch-compiler-cudagraph-mark-step-begin"]], "torch.cuda.caching_allocator_delete": [[1017, "torch-cuda-caching-allocator-delete"]], "torch.count_nonzero": [[997, "torch-count-nonzero"]], "torch.cpu.stream": [[1006, "torch-cpu-stream"]], "torch.complex": [[987, "torch-complex"]], "CUDAGraph": [[1009, "cudagraph"]], "ExternalStream": [[1012, "externalstream"]], "torch.concatenate": [[989, "torch-concatenate"]], "torch.cpu.is_available": [[1004, "torch-cpu-is-available"]], "torch.cpu.set_device": [[1005, "torch-cpu-set-device"]], "torch.cpu.device_count": [[1003, "torch-cpu-device-count"]], "torch.copysign": [[993, "torch-copysign"]], "CUDAPluggableAllocator": [[1010, "cudapluggableallocator"]], "torch.compiler.assume_constant_result": [[979, "torch-compiler-assume-constant-result"]], "Events": [[41, "module-torch.distributed.elastic.events"]], "API Methods": [[41, "api-methods"]], "Event Objects": [[41, "event-objects"]], "torch.utils.data": [[23, "module-torch.utils.data"]], "Dataset Types": [[23, "dataset-types"]], "Map-style datasets": [[23, "map-style-datasets"]], "Iterable-style datasets": [[23, "iterable-style-datasets"]], "Data Loading Order and Sampler": [[23, "data-loading-order-and-sampler"]], "Loading Batched and Non-Batched Data": [[23, "loading-batched-and-non-batched-data"]], "Automatic batching (default)": [[23, "automatic-batching-default"]], "Disable automatic batching": [[23, "disable-automatic-batching"]], "Working with collate_fn": [[23, "working-with-collate-fn"]], "Single- and Multi-process Data Loading": [[23, "single-and-multi-process-data-loading"]], "Single-process data loading (default)": [[23, "single-process-data-loading-default"]], "Multi-process data loading": [[23, "multi-process-data-loading"]], "Platform-specific behaviors": [[23, "platform-specific-behaviors"]], "Randomness in multi-process data loading": [[23, "randomness-in-multi-process-data-loading"]], "Memory Pinning": [[23, "memory-pinning"]], "torch.utils.deterministic": [[27, "module-torch.utils.deterministic"]], "PyTorch Governance | Build + CI": [[6, "pytorch-governance-build-ci"]], "How to Add a New Maintainer": [[6, "how-to-add-a-new-maintainer"]], "Control Plane": [[38, "module-torch.distributed.elastic.control_plane"]], "torchrun (Elastic Launch)": [[48, "module-torch.distributed.run"]], "Transitioning from torch.distributed.launch to torchrun": [[48, "transitioning-from-torch-distributed-launch-to-torchrun"]], "Usage": [[48, "usage"], [18, "usage"], [31, null]], "Single-node multi-worker": [[48, "single-node-multi-worker"]], "Stacked single-node multi-worker": [[48, "stacked-single-node-multi-worker"]], "Fault tolerant (fixed sized number of workers, no elasticity, tolerates 3 failures)": [[48, "fault-tolerant-fixed-sized-number-of-workers-no-elasticity-tolerates-3-failures"]], "Elastic (min=1, max=4, tolerates up to 3 membership changes or failures)": [[48, "elastic-min-1-max-4-tolerates-up-to-3-membership-changes-or-failures"]], "Note on rendezvous backend": [[48, "note-on-rendezvous-backend"]], "Definitions": [[48, "definitions"]], "Environment Variables": [[48, "environment-variables"]], "Deployment": [[48, "deployment"]], "Failure Modes": [[48, "failure-modes"]], "Membership Changes": [[48, "membership-changes"]], "Important Notices": [[48, "important-notices"]], "torch.cpu": [[16, "module-torch.cpu"]], "Automatic Mixed Precision package - torch.amp": [[0, "automatic-mixed-precision-package-torch-amp"]], "Autocasting": [[0, "autocasting"]], "Gradient Scaling": [[0, "gradient-scaling"]], "Autocast Op Reference": [[0, "autocast-op-reference"]], "Op Eligibility": [[0, "op-eligibility"]], "CUDA Op-Specific Behavior": [[0, "cuda-op-specific-behavior"]], "CUDA Ops that can autocast to float16": [[0, "cuda-ops-that-can-autocast-to-float16"]], "CUDA Ops that can autocast to float32": [[0, "cuda-ops-that-can-autocast-to-float32"]], "CUDA Ops that promote to the widest input type": [[0, "cuda-ops-that-promote-to-the-widest-input-type"]], "Prefer binary_cross_entropy_with_logits over binary_cross_entropy": [[0, "prefer-binary-cross-entropy-with-logits-over-binary-cross-entropy"]], "XPU Op-Specific Behavior (Experimental)": [[0, "xpu-op-specific-behavior-experimental"]], "XPU Ops that can autocast to float16": [[0, "xpu-ops-that-can-autocast-to-float16"]], "XPU Ops that can autocast to float32": [[0, "xpu-ops-that-can-autocast-to-float32"]], "XPU Ops that promote to the widest input type": [[0, "xpu-ops-that-promote-to-the-widest-input-type"]], "CPU Op-Specific Behavior": [[0, "cpu-op-specific-behavior"]], "CPU Ops that can autocast to bfloat16": [[0, "cpu-ops-that-can-autocast-to-bfloat16"]], "CPU Ops that can autocast to float32": [[0, "cpu-ops-that-can-autocast-to-float32"]], "CPU Ops that promote to the widest input type": [[0, "cpu-ops-that-promote-to-the-widest-input-type"]], "torch.cuda": [[17, "module-torch.cuda"]], "Communication collectives": [[17, "communication-collectives"]], "Graphs (beta)": [[17, "graphs-beta"]], "Memory management": [[17, "memory-management"], [2046, "memory-management"], [2055, "memory-management"]], "NVIDIA Tools Extension (NVTX)": [[17, "nvidia-tools-extension-nvtx"]], "Jiterator (beta)": [[17, "jiterator-beta"]], "TunableOp": [[17, "tunableop"], [19, "tunableop"]], "Stream Sanitizer (prototype)": [[17, "stream-sanitizer-prototype"]], "torch::deploy has been moved to pytorch/multipy": [[26, "torch-deploy-has-been-moved-to-pytorch-multipy"]], "torch.utils.dlpack": [[36, "torch-utils-dlpack"]], "DDP Communication Hooks": [[24, "ddp-communication-hooks"]], "How to Use a Communication Hook?": [[24, "how-to-use-a-communication-hook"]], "What Does a Communication Hook Operate On?": [[24, "what-does-a-communication-hook-operate-on"]], "Default Communication Hooks": [[24, "default-communication-hooks"]], "PowerSGD Communication Hook": [[24, "powersgd-communication-hook"]], "PowerSGD State": [[24, "powersgd-state"]], "PowerSGD Hooks": [[24, "powersgd-hooks"]], "Debugging Communication Hooks": [[24, "debugging-communication-hooks"]], "Checkpointing of Communication Hooks": [[24, "checkpointing-of-communication-hooks"]], "Acknowledgements": [[24, "acknowledgements"]], "Automatic differentiation package - torch.autograd": [[1, "module-torch.autograd"]], "Forward-mode Automatic Differentiation": [[1, "forward-mode-automatic-differentiation"]], "Functional higher level API": [[1, "functional-higher-level-api"]], "Default gradient layouts": [[1, "default-gradient-layouts"]], "Manual gradient layouts": [[1, "manual-gradient-layouts"]], "In-place operations on Tensors": [[1, "in-place-operations-on-tensors"]], "In-place correctness checks": [[1, "in-place-correctness-checks"], [2043, "in-place-correctness-checks"]], "Variable (deprecated)": [[1, "variable-deprecated"]], "Tensor autograd functions": [[1, "tensor-autograd-functions"]], "Function": [[1, "function"]], "Context method mixins": [[1, "context-method-mixins"]], "Custom Function utilities": [[1, "custom-function-utilities"]], "Numerical gradient checking": [[1, "module-torch.autograd.gradcheck"]], "Profiler": [[1, "profiler"]], "Debugging and anomaly detection": [[1, "debugging-and-anomaly-detection"]], "Autograd graph": [[1, "autograd-graph"]], "Error Propagation": [[40, "module-torch.distributed.elastic.multiprocessing.errors"]], "Methods and Classes": [[40, "methods-and-classes"]], "torch.__config__": [[13, "module-torch.__config__"]], "Generic Join Context Manager": [[29, "generic-join-context-manager"]], "Tensor Parallelism - torch.distributed.tensor.parallel": [[34, "tensor-parallelism-torch-distributed-tensor-parallel"]], "Benchmark Utils - torch.utils.benchmark": [[3, "module-torch.utils.benchmark"]], "Distributed Checkpoint - torch.distributed.checkpoint": [[30, "distributed-checkpoint-torch-distributed-checkpoint"]], "Customization": [[39, "customization"]], "Launcher": [[39, "launcher"]], "Rendezvous Handler": [[39, "rendezvous-handler"]], "Metric Handler": [[39, "metric-handler"]], "Events Handler": [[39, "events-handler"]], "Multiprocessing": [[45, "module-torch.distributed.elastic.multiprocessing"]], "Starting Multiple Workers": [[45, "starting-multiple-workers"]], "Process Context": [[45, "process-context"]], "PyTorch Governance | Mechanics": [[9, "pytorch-governance-mechanics"]], "Summary": [[9, "summary"]], "Module Maintainers": [[9, "module-maintainers"]], "Core Maintainers": [[9, "core-maintainers"], [10, "core-maintainers"]], "Lead Core Maintainer (BDFL)": [[9, "lead-core-maintainer-bdfl"], [10, "lead-core-maintainer-bdfl"]], "Nominating, Confirming and Removing Maintainers": [[9, "nominating-confirming-and-removing-maintainers"]], "The Principles": [[9, "the-principles"]], "The Process for Nomination": [[9, "the-process-for-nomination"]], "The Process for Removal": [[9, "the-process-for-removal"]], "Nominating Core Maintainers": [[9, "nominating-core-maintainers"]], "Removing the Lead Core Maintainer and Nominating a New Lead Core Maintainer": [[9, "removing-the-lead-core-maintainer-and-nominating-a-new-lead-core-maintainer"]], "Add, Remove, and Re-Scope Modules and Projects": [[9, "add-remove-and-re-scope-modules-and-projects"]], "Decision Making": [[9, "decision-making"]], "Uncontroversial Changes": [[9, "uncontroversial-changes"]], "Controversial Decision Process": [[9, "controversial-decision-process"]], "General Project Policies": [[9, "general-project-policies"]], "FAQ": [[9, "faq"]], "torch.backends": [[2, "module-torch.backends"]], "torch.backends.cpu": [[2, "module-torch.backends.cpu"]], "torch.backends.cuda": [[2, "module-torch.backends.cuda"]], "torch.backends.cudnn": [[2, "module-torch.backends.cudnn"]], "torch.backends.mha": [[2, "module-torch.backends.mha"]], "torch.backends.mps": [[2, "module-torch.backends.mps"]], "torch.backends.mkl": [[2, "module-torch.backends.mkl"]], "torch.backends.mkldnn": [[2, "module-torch.backends.mkldnn"]], "torch.backends.nnpack": [[2, "module-torch.backends.nnpack"]], "torch.backends.openmp": [[2, "module-torch.backends.openmp"]], "torch.backends.opt_einsum": [[2, "module-torch.backends.opt_einsum"]], "torch.backends.xeon": [[2, "module-torch.backends.xeon"]], "CUDA Stream Sanitizer": [[18, "cuda-stream-sanitizer"]], "Expiration Timers": [[50, "module-torch.distributed.elastic.timer"]], "Client Methods": [[50, "client-methods"]], "Server/Client Implementations": [[50, "server-client-implementations"]], "Writing a custom timer server/client": [[50, "writing-a-custom-timer-server-client"]], "Debug info logging": [[50, "module-torch.distributed.elastic.timer.debug_info_logging"]], "Distributed communication package - torch.distributed": [[28, "distributed-communication-package-torch-distributed"]], "Backends": [[28, "backends"], [2077, "backends"]], "Backends that come with PyTorch": [[28, "backends-that-come-with-pytorch"]], "Which backend to use?": [[28, "which-backend-to-use"]], "Common environment variables": [[28, "common-environment-variables"]], "Choosing the network interface to use": [[28, "choosing-the-network-interface-to-use"]], "Other NCCL environment variables": [[28, "other-nccl-environment-variables"]], "Basics": [[28, "basics"], [2077, "basics"]], "Initialization": [[28, "initialization"]], "TCP initialization": [[28, "tcp-initialization"]], "Shared file-system initialization": [[28, "shared-file-system-initialization"]], "Environment variable initialization": [[28, "environment-variable-initialization"]], "Post-Initialization": [[28, "post-initialization"]], "Shutdown": [[28, "shutdown"]], "Reinitialization": [[28, "reinitialization"]], "Distributed Key-Value Store": [[28, "distributed-key-value-store"]], "Groups": [[28, "groups"]], "DeviceMesh": [[28, "devicemesh"]], "Point-to-point communication": [[28, "point-to-point-communication"]], "Synchronous and asynchronous collective operations": [[28, "synchronous-and-asynchronous-collective-operations"]], "Collective functions": [[28, "collective-functions"]], "Profiling Collective Communication": [[28, "profiling-collective-communication"]], "Multi-GPU collective functions": [[28, "multi-gpu-collective-functions"]], "Third-party backends": [[28, "third-party-backends"]], "Launch utility": [[28, "launch-utility"]], "Spawn utility": [[28, "spawn-utility"]], "Debugging torch.distributed applications": [[28, "debugging-torch-distributed-applications"]], "Python Breakpoint": [[28, "python-breakpoint"]], "Monitored Barrier": [[28, "monitored-barrier"]], "TORCH_DISTRIBUTED_DEBUG": [[28, "torch-distributed-debug"]], "Logging": [[28, "logging"]], "TorchElastic Kubernetes": [[43, "torchelastic-kubernetes"]], "PyTorch Contribution Guide": [[7, "pytorch-contribution-guide"]], "Contribution Process": [[7, "contribution-process"]], "Proposing New Features": [[7, "proposing-new-features"]], "Reporting Issues": [[7, "reporting-issues"]], "Implementing Features or Fixing Bugs": [[7, "implementing-features-or-fixing-bugs"]], "Adding Tutorials": [[7, "adding-tutorials"]], "Improving Documentation & Tutorials": [[7, "improving-documentation-tutorials"]], "Participating in Online Discussions": [[7, "participating-in-online-discussions"]], "Submitting Pull Requests to Fix Open Issues": [[7, "submitting-pull-requests-to-fix-open-issues"]], "Reviewing Open Pull Requests": [[7, "reviewing-open-pull-requests"]], "Improving Code Readability": [[7, "improving-code-readability"]], "Adding Test Cases to Make the Codebase More Robust": [[7, "adding-test-cases-to-make-the-codebase-more-robust"]], "Promoting PyTorch": [[7, "promoting-pytorch"]], "Triaging Issues": [[7, "triaging-issues"]], "About Open Source Development": [[7, "about-open-source-development"]], "Common Mistakes To Avoid": [[7, "common-mistakes-to-avoid"]], "On Documentation": [[7, "on-documentation"]], "Python Docs": [[7, "python-docs"]], "C++ Docs": [[7, "c-docs"]], "Tutorials": [[7, "tutorials"], [2070, "tutorials"], [2077, "tutorials"]], "Tutorials Build Overview": [[7, "tutorials-build-overview"]], "Contributing a New Tutorial": [[7, "contributing-a-new-tutorial"]], "Debugging Environment Variables": [[25, "debugging-environment-variables"]], "PyTorch Design Philosophy": [[8, "pytorch-design-philosophy"]], "Design Principles": [[8, "design-principles"]], "Principle 1: Usability over Performance": [[8, "principle-1-usability-over-performance"]], "Principle 2: Simple Over Easy": [[8, "principle-2-simple-over-easy"]], "Principle 3: Python First with Best In Class Language Interoperability": [[8, "principle-3-python-first-with-best-in-class-language-interoperability"]], "CUDA Environment Variables": [[20, "cuda-environment-variables"]], "Distributed Optimizers": [[32, "distributed-optimizers"]], "Elastic Agent": [[37, "module-torch.distributed.elastic.agent"]], "Server": [[37, "module-torch.distributed.elastic.agent.server"]], "Concepts": [[37, "concepts"]], "Implementations": [[37, "implementations"], [47, "implementations"]], "Extending the Agent": [[37, "extending-the-agent"]], "Watchdog in the Agent": [[37, "watchdog-in-the-agent"]], "Health Check Server": [[37, "health-check-server"]], "PyTorch Governance | Maintainers": [[10, "pytorch-governance-maintainers"]], "Responsibilities": [[10, "responsibilities"]], "Module-level maintainers": [[10, "module-level-maintainers"]], "NN APIs (torch.nn)": [[10, "nn-apis-torch-nn"]], "Optimizers (torch.optim)": [[10, "optimizers-torch-optim"]], "Autograd (torch.autograd)": [[10, "autograd-torch-autograd"]], "Compilers (JIT / TorchScript / FX / TorchDynamo)": [[10, "compilers-jit-torchscript-fx-torchdynamo"]], "Distributions & RNG": [[10, "distributions-rng"]], "Distributed": [[10, "distributed"]], "Multiprocessing and DataLoaders": [[10, "multiprocessing-and-dataloaders"]], "Linear Algebra (torch.linalg)": [[10, "linear-algebra-torch-linalg"]], "Sparse (torch.sparse)": [[10, "sparse-torch-sparse"]], "NestedTensor (torch.nested)": [[10, "nestedtensor-torch-nested"]], "MaskedTensor (torch.masked)": [[10, "maskedtensor-torch-masked"]], "Fast Fourier Transform (torch.fft)": [[10, "fast-fourier-transform-torch-fft"]], "CPU Performance (Torch Inductor / MKLDNN)": [[10, "cpu-performance-torch-inductor-mkldnn"]], "GPU Performance (Torch Inductor / Triton / CUDA)": [[10, "gpu-performance-torch-inductor-triton-cuda"]], "NVFuser": [[10, "nvfuser"]], "AMD/ROCm/HIP": [[10, "amd-rocm-hip"]], "Build + CI": [[10, "build-ci"]], "Performance Tools": [[10, "performance-tools"]], "C++ API": [[10, "c-api"]], "C10 utils and operator dispatch": [[10, "c10-utils-and-operator-dispatch"]], "ONNX exporter": [[10, "onnx-exporter"]], "Mobile / Edge": [[10, "mobile-edge"]], "Model Compression & Optimization": [[10, "model-compression-optimization"]], "Windows": [[10, "windows"]], "Apple M1/MPS": [[10, "apple-m1-mps"]], "PowerPC": [[10, "powerpc"]], "AArch64 CPU": [[10, "aarch64-cpu"]], "Docs / Tutorials": [[10, "docs-tutorials"]], "Library-level maintainers": [[10, "library-level-maintainers"]], "XLA": [[10, "xla"]], "TorchServe": [[10, "torchserve"]], "TorchVision": [[10, "torchvision"]], "TorchText": [[10, "torchtext"]], "TorchAudio": [[10, "torchaudio"]], "TorchRec": [[10, "torchrec"]], "TorchX": [[10, "torchx"]], "TorchData / TorchArrow": [[10, "torchdata-torcharrow"]], "torch.utils.cpp_extension": [[14, "torch-utils-cpp-extension"]], "Quickstart": [[46, "quickstart"]], "torch.utils.checkpoint": [[5, "torch-utils-checkpoint"]], "Enabling TunableOp and Tuning Separately": [[19, "enabling-tunableop-and-tuning-separately"]], "File Input and Output": [[19, "file-input-and-output"]], "A Note on Tuning Behavior": [[19, "a-note-on-tuning-behavior"]], "Current Tunable Operators": [[19, "current-tunable-operators"]], "TunableGemm for ROCm": [[19, "tunablegemm-for-rocm"]], "Tuning Context": [[19, "tuning-context"]], "Rendezvous": [[47, "module-torch.distributed.elastic.rendezvous"]], "Registry": [[47, "registry"]], "Handler": [[47, "handler"]], "Dataclasses": [[47, "dataclasses"]], "Exceptions": [[47, "exceptions"]], "Dynamic Rendezvous": [[47, "dynamic-rendezvous"]], "C10d Backend": [[47, "c10d-backend"]], "Etcd Backend": [[47, "etcd-backend"]], "Etcd Rendezvous (Legacy)": [[47, "etcd-rendezvous-legacy"]], "Etcd Store": [[47, "etcd-store"]], "Etcd Server": [[47, "etcd-server"]], "Subprocess Handling": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"]], "Retrieve SubprocessHandler": [[49, "retrieve-subprocesshandler"]], "SubprocessHandler": [[49, "subprocesshandler"]], "Pipeline Parallelism": [[33, "pipeline-parallelism"]], "Why Pipeline Parallel?": [[33, "why-pipeline-parallel"]], "What is torch.distributed.pipelining?": [[33, "what-is-torch-distributed-pipelining"]], "Step 1: build PipelineStage for execution": [[33, "step-1-build-pipelinestage-for-execution"]], "Step 2: use PipelineSchedule for execution": [[33, "step-2-use-pipelineschedule-for-execution"]], "Options for Splitting a Model": [[33, "options-for-splitting-a-model"]], "Option 1: splitting a model manually": [[33, "option-1-splitting-a-model-manually"]], "Option 2: splitting a model automatically": [[33, "option-2-splitting-a-model-automatically"]], "Hugging Face Examples": [[33, "hugging-face-examples"]], "Technical Deep Dive": [[33, "technical-deep-dive"]], "How does the pipeline API split a model?": [[33, "how-does-the-pipeline-api-split-a-model"]], "Implementing Your Own Schedule": [[33, "implementing-your-own-schedule"]], "Model Split APIs": [[33, "model-split-apis"]], "Microbatch Utilities": [[33, "module-torch.distributed.pipelining.microbatch"]], "Pipeline Stages": [[33, "module-torch.distributed.pipelining.stage"]], "Pipeline Schedules": [[33, "module-torch.distributed.pipelining.schedules"]], "Torch Distributed Elastic": [[31, "torch-distributed-elastic"]], "Get Started": [[31, "get-started"]], "Documentation": [[31, "documentation"]], "API": [[31, null]], "Advanced": [[31, null]], "Plugins": [[31, null]], "C++": [[15, "c"]], "TorchScript C++ API": [[15, "torchscript-c-api"]], "Extending PyTorch and TorchScript with C++ Extensions": [[15, "extending-pytorch-and-torchscript-with-c-extensions"]], "Tensor and Autograd in C++": [[15, "tensor-and-autograd-in-c"]], "Authoring Models in C++": [[15, "authoring-models-in-c"]], "Packaging for C++": [[15, "packaging-for-c"]], "Metrics": [[44, "module-torch.distributed.elastic.metrics"]], "Metric Handlers": [[44, "metric-handlers"]], "Methods": [[44, "methods"]], "Control Flow - Cond": [[12, "control-flow-cond"]], "Invariants of torch.ops.higher_order.cond": [[12, "invariants-of-torch-ops-higher-order-cond"]], "Probability distributions - torch.distributions": [[35, "module-torch.distributions"]], "Score function": [[35, "score-function"]], "Pathwise derivative": [[35, "pathwise-derivative"]], "Distribution": [[35, "distribution"]], "ExponentialFamily": [[35, "exponentialfamily"]], "Bernoulli": [[35, "bernoulli"]], "Beta": [[35, "beta"]], "Binomial": [[35, "binomial"]], "Categorical": [[35, "categorical"]], "Cauchy": [[35, "cauchy"]], "Chi2": [[35, "chi2"]], "ContinuousBernoulli": [[35, "continuousbernoulli"]], "Dirichlet": [[35, "dirichlet"]], "Exponential": [[35, "exponential"]], "FisherSnedecor": [[35, "fishersnedecor"]], "Gamma": [[35, "gamma"]], "Geometric": [[35, "geometric"]], "Gumbel": [[35, "gumbel"]], "HalfCauchy": [[35, "halfcauchy"]], "HalfNormal": [[35, "halfnormal"]], "Independent": [[35, "independent"]], "InverseGamma": [[35, "inversegamma"]], "Kumaraswamy": [[35, "kumaraswamy"]], "LKJCholesky": [[35, "lkjcholesky"]], "Laplace": [[35, "laplace"]], "LogNormal": [[35, "lognormal"]], "LowRankMultivariateNormal": [[35, "lowrankmultivariatenormal"]], "MixtureSameFamily": [[35, "mixturesamefamily"]], "Multinomial": [[35, "multinomial"]], "MultivariateNormal": [[35, "multivariatenormal"]], "NegativeBinomial": [[35, "negativebinomial"]], "Normal": [[35, "normal"]], "OneHotCategorical": [[35, "onehotcategorical"]], "Pareto": [[35, "pareto"]], "Poisson": [[35, "poisson"]], "RelaxedBernoulli": [[35, "relaxedbernoulli"]], "LogitRelaxedBernoulli": [[35, "logitrelaxedbernoulli"]], "RelaxedOneHotCategorical": [[35, "relaxedonehotcategorical"]], "StudentT": [[35, "studentt"]], "TransformedDistribution": [[35, "transformeddistribution"]], "Uniform": [[35, "uniform"]], "VonMises": [[35, "vonmises"]], "Weibull": [[35, "weibull"]], "Wishart": [[35, "wishart"]], "KL Divergence": [[35, "module-torch.distributions.kl"]], "Transforms": [[35, "module-torch.distributions.transforms"]], "Constraints": [[35, "module-torch.distributions.constraints"], [2046, "constraints"]], "Constraint Registry": [[35, "module-torch.distributions.constraint_registry"]], "Complex Numbers": [[11, "complex-numbers"]], "Creating Complex Tensors": [[11, "creating-complex-tensors"]], "Transition from the old representation": [[11, "transition-from-the-old-representation"]], "Accessing real and imag": [[11, "accessing-real-and-imag"]], "Angle and abs": [[11, "angle-and-abs"]], "Linear Algebra": [[11, "linear-algebra"]], "Autograd": [[11, "autograd"]], "Optimizers": [[11, "optimizers"]], "torch.utils.bottleneck": [[4, "module-torch.utils.bottleneck"]], "torch.bincount": [[947, "torch-bincount"]], "torch.bitwise_or": [[951, "torch-bitwise-or"]], "torch.broadcast_tensors": [[958, "torch-broadcast-tensors"]], "torch.autograd.grad": [[918, "torch-autograd-grad"]], "torch.cholesky_inverse": [[968, "torch-cholesky-inverse"]], "torch.broadcast_shapes": [[957, "torch-broadcast-shapes"]], "torch.bitwise_right_shift": [[952, "torch-bitwise-right-shift"]], "torch.baddbmm": [[944, "torch-baddbmm"]], "torch.autograd.graph.Node.name": [[926, "torch-autograd-graph-node-name"]], "torch.autograd.graph.increment_version": [[930, "torch-autograd-graph-increment-version"]], "Kernel": [[941, "kernel"]], "torch.autograd.profiler.profile.export_chrome_trace": [[935, "torch-autograd-profiler-profile-export-chrome-trace"]], "KinetoStepTracker": [[932, "kinetosteptracker"]], "StringTable": [[943, "stringtable"]], "MemRecordsAcc": [[942, "memrecordsacc"]], "torch.bartlett_window": [[945, "torch-bartlett-window"]], "torch.bernoulli": [[946, "torch-bernoulli"]], "torch.bitwise_xor": [[953, "torch-bitwise-xor"]], "torch.can_cast": [[961, "torch-can-cast"]], "EnforceUnique": [[931, "enforceunique"]], "torch.cartesian_prod": [[962, "torch-cartesian-prod"]], "record_function": [[939, "record-function"]], "torch.autograd.profiler.profile.total_average": [[938, "torch-autograd-profiler-profile-total-average"]], "torch.autograd.graph.Node.register_prehook": [[929, "torch-autograd-graph-node-register-prehook"]], "torch.autograd.profiler.profile.key_averages": [[936, "torch-autograd-profiler-profile-key-averages"]], "set_multithreading_enabled": [[921, "set-multithreading-enabled"]], "torch.bitwise_not": [[950, "torch-bitwise-not"]], "torch.autograd.graph.Node.register_hook": [[928, "torch-autograd-graph-node-register-hook"]], "torch.autograd.gradcheck.gradgradcheck": [[924, "torch-autograd-gradcheck-gradgradcheck"]], "torch.autograd.graph.Node.next_functions": [[927, "torch-autograd-graph-node-next-functions"]], "torch.autograd.profiler.profile.self_cpu_time_total": [[937, "torch-autograd-profiler-profile-self-cpu-time-total"]], "torch.blackman_window": [[954, "torch-blackman-window"]], "torch.autograd.gradcheck.gradcheck": [[923, "torch-autograd-gradcheck-gradcheck"]], "Interval": [[940, "interval"]], "torch.broadcast_to": [[959, "torch-broadcast-to"]], "torch.cdist": [[964, "torch-cdist"]], "torch.cholesky": [[967, "torch-cholesky"]], "torch.autograd.gradcheck.GradcheckError": [[922, "torch-autograd-gradcheck-gradcheckerror"]], "torch.autograd.profiler.load_nvprof": [[933, "torch-autograd-profiler-load-nvprof"]], "torch.bucketize": [[960, "torch-bucketize"]], "torch.bitwise_and": [[948, "torch-bitwise-and"]], "torch.autograd.graph.Node.metadata": [[925, "torch-autograd-graph-node-metadata"]], "torch.block_diag": [[955, "torch-block-diag"]], "set_grad_enabled": [[920, "set-grad-enabled"]], "torch.bitwise_left_shift": [[949, "torch-bitwise-left-shift"]], "torch.cat": [[963, "torch-cat"]], "torch.autograd.profiler.parse_nvprof_trace": [[934, "torch-autograd-profiler-parse-nvprof-trace"]], "torch.ceil": [[965, "torch-ceil"]], "torch.chain_matmul": [[966, "torch-chain-matmul"]], "inference_mode": [[919, "inference-mode"]], "torch.bmm": [[956, "torch-bmm"]], "torch.onnx": [[2064, "torch-onnx"]], "TorchDynamo-based ONNX Exporter": [[2064, "torchdynamo-based-onnx-exporter"], [2065, "torchdynamo-based-onnx-exporter"]], "TorchScript-based ONNX Exporter": [[2064, "torchscript-based-onnx-exporter"], [2067, "torchscript-based-onnx-exporter"]], "Contributing / Developing": [[2064, "contributing-developing"]], "Threading Environment Variables": [[2090, "threading-environment-variables"]], "Quantization": [[2072, "module-torch.ao.quantization"]], "Introduction to Quantization": [[2072, "introduction-to-quantization"]], "Quantization API Summary": [[2072, "quantization-api-summary"]], "Eager Mode Quantization": [[2072, "eager-mode-quantization"]], "Post Training Dynamic Quantization": [[2072, "post-training-dynamic-quantization"]], "Post Training Static Quantization": [[2072, "post-training-static-quantization"]], "Quantization Aware Training for Static Quantization": [[2072, "quantization-aware-training-for-static-quantization"]], "Model Preparation for Eager Mode Static Quantization": [[2072, "model-preparation-for-eager-mode-static-quantization"]], "(Prototype - maintenance mode) FX Graph Mode Quantization": [[2072, "prototype-maintenance-mode-fx-graph-mode-quantization"]], "(Prototype) PyTorch 2 Export Quantization": [[2072, "prototype-pytorch-2-export-quantization"]], "Quantization Stack": [[2072, "quantization-stack"]], "Quantized Model": [[2072, "quantized-model"]], "Quantized Tensor": [[2072, "quantized-tensor"]], "Quantize and Dequantize": [[2072, "quantize-and-dequantize"]], "Quantized Operators/Modules": [[2072, "quantized-operators-modules"]], "Quantized Engine": [[2072, "quantized-engine"]], "Quantization Flow": [[2072, "quantization-flow"]], "Observer and FakeQuantize": [[2072, "observer-and-fakequantize"]], "QConfig": [[2072, "qconfig"], [845, "qconfig"]], "General Quantization Flow": [[2072, "general-quantization-flow"]], "Quantization Support Matrix": [[2072, "quantization-support-matrix"]], "Quantization Mode Support": [[2072, "quantization-mode-support"]], "Quantization Flow Support": [[2072, "quantization-flow-support"]], "Backend/Hardware Support": [[2072, "backend-hardware-support"]], "Note for native CPU backends": [[2072, "note-for-native-cpu-backends"]], "Operator Support": [[2072, "operator-support"]], "Quantization API Reference": [[2072, "quantization-api-reference"], [2075, "quantization-api-reference"]], "Quantization Backend Configuration": [[2072, "quantization-backend-configuration"], [2074, "quantization-backend-configuration"]], "Quantization Accuracy Debugging": [[2072, "quantization-accuracy-debugging"], [2073, "quantization-accuracy-debugging"]], "Quantization Customizations": [[2072, "quantization-customizations"]], "Quantization Custom Module API": [[2072, "quantization-custom-module-api"]], "Best Practices": [[2072, "best-practices"]], "Common Errors": [[2072, "common-errors"]], "Passing a non-quantized Tensor into a quantized kernel": [[2072, "passing-a-non-quantized-tensor-into-a-quantized-kernel"]], "Passing a quantized Tensor into a non-quantized kernel": [[2072, "passing-a-quantized-tensor-into-a-non-quantized-kernel"]], "Saving and Loading Quantized models": [[2072, "saving-and-loading-quantized-models"]], "Symbolic Trace Error when using FX Graph Mode Quantization": [[2072, "symbolic-trace-error-when-using-fx-graph-mode-quantization"]], "torch.sparse": [[2082, "torch-sparse"]], "Why and when to use sparsity": [[2082, "why-and-when-to-use-sparsity"]], "Functionality overview": [[2082, "functionality-overview"]], "Operator overview": [[2082, "operator-overview"]], "Sparse Semi-Structured Tensors": [[2082, "sparse-semi-structured-tensors"]], "Constructing Sparse Semi-Structured Tensors": [[2082, "constructing-sparse-semi-structured-tensors"]], "Sparse Semi-Structured Tensor Operations": [[2082, "sparse-semi-structured-tensor-operations"]], "Accelerating nn.Linear with semi-structured sparsity": [[2082, "accelerating-nn-linear-with-semi-structured-sparsity"]], "Sparse COO tensors": [[2082, "sparse-coo-tensors"]], "Sparse hybrid COO tensors": [[2082, "sparse-hybrid-coo-tensors"]], "Uncoalesced sparse COO tensors": [[2082, "uncoalesced-sparse-coo-tensors"]], "Working with sparse COO tensors": [[2082, "working-with-sparse-coo-tensors"]], "Sparse Compressed Tensors": [[2082, "sparse-compressed-tensors"]], "Sparse CSR Tensor": [[2082, "sparse-csr-tensor"]], "Construction of CSR tensors": [[2082, "construction-of-csr-tensors"]], "CSR Tensor Operations": [[2082, "csr-tensor-operations"]], "Sparse CSC Tensor": [[2082, "sparse-csc-tensor"]], "Construction of CSC tensors": [[2082, "construction-of-csc-tensors"]], "Sparse BSR Tensor": [[2082, "sparse-bsr-tensor"]], "Construction of BSR tensors": [[2082, "construction-of-bsr-tensors"]], "Sparse BSC Tensor": [[2082, "sparse-bsc-tensor"]], "Construction of BSC tensors": [[2082, "construction-of-bsc-tensors"]], "Tools for working with sparse compressed tensors": [[2082, "tools-for-working-with-sparse-compressed-tensors"]], "Construction of sparse compressed tensors": [[2082, "construction-of-sparse-compressed-tensors"]], "Linear Algebra operations": [[2082, "linear-algebra-operations"]], "Tensor methods and sparse": [[2082, "tensor-methods-and-sparse"]], "Torch functions specific to sparse Tensors": [[2082, "torch-functions-specific-to-sparse-tensors"]], "Other functions": [[2082, "other-functions"]], "Unary functions": [[2082, "unary-functions"]], "torch.Storage": [[2084, "torch-storage"]], "Tensor Attributes": [[2085, "tensor-attributes"]], "torch.dtype": [[2085, "torch-dtype"]], "torch.device": [[2085, "torch-device"]], "torch.layout": [[2085, "torch-layout"]], "torch.memory_format": [[2085, "torch-memory-format"]], "CUDA semantics": [[2046, "cuda-semantics"]], "TensorFloat-32 (TF32) on Ampere (and later) devices": [[2046, "tensorfloat-32-tf32-on-ampere-and-later-devices"]], "Reduced Precision Reduction in FP16 GEMMs": [[2046, "reduced-precision-reduction-in-fp16-gemms"]], "Reduced Precision Reduction in BF16 GEMMs": [[2046, "reduced-precision-reduction-in-bf16-gemms"]], "Asynchronous execution": [[2046, "asynchronous-execution"]], "CUDA streams": [[2046, "cuda-streams"]], "Stream semantics of backward passes": [[2046, "stream-semantics-of-backward-passes"]], "BC note: Using grads on the default stream": [[2046, "bc-note-using-grads-on-the-default-stream"]], "Optimizing memory usage with PYTORCH_CUDA_ALLOC_CONF": [[2046, "optimizing-memory-usage-with-pytorch-cuda-alloc-conf"]], "Using custom memory allocators for CUDA": [[2046, "using-custom-memory-allocators-for-cuda"]], "cuBLAS workspaces": [[2046, "cublas-workspaces"]], "cuFFT plan cache": [[2046, "cufft-plan-cache"]], "Just-in-Time Compilation": [[2046, "just-in-time-compilation"]], "Best practices": [[2046, "best-practices"]], "Device-agnostic code": [[2046, "device-agnostic-code"]], "Use pinned memory buffers": [[2046, "use-pinned-memory-buffers"]], "Use nn.parallel.DistributedDataParallel instead of multiprocessing or nn.DataParallel": [[2046, "use-nn-parallel-distributeddataparallel-instead-of-multiprocessing-or-nn-dataparallel"]], "CUDA Graphs": [[2046, "cuda-graphs"]], "Why CUDA Graphs?": [[2046, "why-cuda-graphs"]], "PyTorch API": [[2046, "pytorch-api"]], "Non-constraints": [[2046, "non-constraints"]], "Whole-network capture": [[2046, "whole-network-capture"]], "Partial-network capture": [[2046, "partial-network-capture"]], "Usage with torch.cuda.amp": [[2046, "usage-with-torch-cuda-amp"]], "Usage with multiple streams": [[2046, "usage-with-multiple-streams"]], "Usage with DistributedDataParallel": [[2046, "usage-with-distributeddataparallel"]], "NCCL < 2.9.6": [[2046, "nccl-2-9-6"]], "NCCL >= 2.9.6": [[2046, "id5"]], "Graph memory management": [[2046, "graph-memory-management"]], "Sharing memory across captures": [[2046, "sharing-memory-across-captures"]], "torch.nn.init": [[2041, "torch-nn-init"]], "Reproducibility": [[2061, "reproducibility"]], "Controlling sources of randomness": [[2061, "controlling-sources-of-randomness"]], "PyTorch random number generator": [[2061, "pytorch-random-number-generator"]], "Python": [[2061, "python"]], "Random number generators in other libraries": [[2061, "random-number-generators-in-other-libraries"]], "CUDA convolution benchmarking": [[2061, "cuda-convolution-benchmarking"]], "Avoiding nondeterministic algorithms": [[2061, "avoiding-nondeterministic-algorithms"]], "CUDA convolution determinism": [[2061, "cuda-convolution-determinism"]], "CUDA RNN and LSTM": [[2061, "cuda-rnn-and-lstm"]], "Filling uninitialized memory": [[2061, "filling-uninitialized-memory"]], "DataLoader": [[2061, "dataloader"]], "torch.package": [[2070, "torch-package"]], "Packaging your first model": [[2070, "packaging-your-first-model"]], "How do I\u2026": [[2070, "how-do-i"]], "See what is inside a package?": [[2070, "see-what-is-inside-a-package"]], "Treat the package like a ZIP archive": [[2070, "treat-the-package-like-a-zip-archive"]], "Use the file_structure() API": [[2070, "use-the-file-structure-api"]], "See why a given module was included as a dependency?": [[2070, "see-why-a-given-module-was-included-as-a-dependency"]], "Include arbitrary resources with my package and access them later?": [[2070, "include-arbitrary-resources-with-my-package-and-access-them-later"]], "Customize how a class is packaged?": [[2070, "customize-how-a-class-is-packaged"]], "Test in my source code whether or not it is executing inside a package?": [[2070, "test-in-my-source-code-whether-or-not-it-is-executing-inside-a-package"]], "Patch code into a package?": [[2070, "patch-code-into-a-package"]], "Access package contents from packaged code?": [[2070, "access-package-contents-from-packaged-code"]], "Distinguish between packaged code and non-packaged code?": [[2070, "distinguish-between-packaged-code-and-non-packaged-code"]], "Re-export an imported object?": [[2070, "re-export-an-imported-object"]], "Package a TorchScript module?": [[2070, "package-a-torchscript-module"]], "Explanation": [[2070, "explanation"]], "torch.package Format Overview": [[2070, "torch-package-format-overview"]], "Framework files": [[2070, "framework-files"]], "User files": [[2070, "user-files"]], "How torch.package finds your code\u2019s dependencies": [[2070, "how-torch-package-finds-your-code-s-dependencies"]], "Analyzing an object\u2019s dependencies": [[2070, "analyzing-an-object-s-dependencies"]], "Analyzing a module\u2019s dependencies": [[2070, "analyzing-a-module-s-dependencies"]], "Dependency Management": [[2070, "dependency-management"]], "intern": [[2070, "intern"]], "extern": [[2070, "extern"]], "mock": [[2070, "mock"]], "Refactoring": [[2070, "refactoring"]], "Patterns": [[2070, "patterns"]], "torch.package sharp edges": [[2070, "torch-package-sharp-edges"]], "Avoid global state in your modules": [[2070, "avoid-global-state-in-your-modules"]], "Types are not shared between packages and the loading environment": [[2070, "types-are-not-shared-between-packages-and-the-loading-environment"]], "How torch.package keeps packages isolated from each other": [[2070, "how-torch-package-keeps-packages-isolated-from-each-other"]], "Mangling": [[2070, "mangling"]], "torch.special": [[2083, "torch-special"]], "PyTorch Custom Operators Landing Page": [[2047, "pytorch-custom-operators-landing-page"]], "Windows FAQ": [[2063, "windows-faq"]], "Building from source": [[2063, "building-from-source"]], "Include optional components": [[2063, "include-optional-components"]], "Speeding CUDA build for Windows": [[2063, "speeding-cuda-build-for-windows"]], "One key install script": [[2063, "one-key-install-script"]], "Extension": [[2063, "extension"]], "CFFI Extension": [[2063, "cffi-extension"]], "Cpp Extension": [[2063, "cpp-extension"]], "Installation": [[2063, "installation"]], "Package not found in win-32 channel.": [[2063, "package-not-found-in-win-32-channel"]], "Import error": [[2063, "import-error"]], "Usage (multiprocessing)": [[2063, "usage-multiprocessing"]], "Multiprocessing error without if-clause protection": [[2063, "multiprocessing-error-without-if-clause-protection"]], "Multiprocessing error \u201cBroken pipe\u201d": [[2063, "multiprocessing-error-broken-pipe"]], "Multiprocessing error \u201cdriver shut down\u201d": [[2063, "multiprocessing-error-driver-shut-down"]], "CUDA IPC operations": [[2063, "cuda-ipc-operations"]], "torch.optim": [[2069, "module-torch.optim"]], "How to use an optimizer": [[2069, "how-to-use-an-optimizer"]], "Constructing it": [[2069, "constructing-it"]], "Per-parameter options": [[2069, "per-parameter-options"]], "Taking an optimization step": [[2069, "taking-an-optimization-step"]], "optimizer.step()": [[2069, "optimizer-step"]], "optimizer.step(closure)": [[2069, "optimizer-step-closure"]], "Base class": [[2069, "base-class"]], "Algorithms": [[2069, "algorithms"]], "How to adjust learning rate": [[2069, "how-to-adjust-learning-rate"]], "Weight Averaging (SWA and EMA)": [[2069, "weight-averaging-swa-and-ema"]], "Constructing averaged models": [[2069, "constructing-averaged-models"]], "Custom averaging strategies": [[2069, "custom-averaging-strategies"]], "SWA learning rate schedules": [[2069, "swa-learning-rate-schedules"]], "Taking care of batch normalization": [[2069, "taking-care-of-batch-normalization"]], "Putting it all together: SWA": [[2069, "putting-it-all-together-swa"]], "Putting it all together: EMA": [[2069, "putting-it-all-together-ema"]], "Data insensitive error": [[2073, "data-insensitive-error"]], "General tips": [[2073, "general-tips"]], "Int8 quantization tips": [[2073, "int8-quantization-tips"]], "Data sensitive error": [[2073, "data-sensitive-error"]], "Implementation error": [[2073, "implementation-error"]], "Numerical Debugging Tooling (prototype)": [[2073, "numerical-debugging-tooling-prototype"]], "Serialization semantics": [[2062, "serialization-semantics"]], "Table of Contents": [[2062, "table-of-contents"]], "Saving and loading tensors": [[2062, "saving-and-loading-tensors"]], "Saving and loading tensors preserves views": [[2062, "saving-and-loading-tensors-preserves-views"]], "Saving and loading torch.nn.Modules": [[2062, "saving-and-loading-torch-nn-modules"]], "Serialized file format for torch.save": [[2062, "serialized-file-format-for-torch-save"]], "Serializing torch.nn.Modules and loading them in C++": [[2062, "serializing-torch-nn-modules-and-loading-them-in-c"]], "Saving and loading ScriptModules across PyTorch versions": [[2062, "saving-and-loading-scriptmodules-across-pytorch-versions"]], "torch.div performing integer division": [[2062, "torch-div-performing-integer-division"]], "torch.full always inferring a float dtype": [[2062, "torch-full-always-inferring-a-float-dtype"]], "Utility functions": [[2062, "utility-functions"], [2075, "utility-functions"]], "FSDP Notes": [[2052, "fsdp-notes"]], "FSDP Prefetch Nuances": [[2052, "fsdp-prefetch-nuances"]], "Communication payload size": [[2052, "communication-payload-size"]], "FSDP buffers sizes": [[2052, "fsdp-buffers-sizes"]], "ONNX Backend for TorchDynamo": [[2066, "onnx-backend-for-torchdynamo"]], "Distributed RPC Framework": [[2077, "distributed-rpc-framework"]], "RPC": [[2077, "rpc"]], "TensorPipe Backend": [[2077, "tensorpipe-backend"]], "RRef": [[2077, "rref"]], "More Information about RRef": [[2077, null]], "RemoteModule": [[2077, "remotemodule"]], "Distributed Autograd Framework": [[2077, "distributed-autograd-framework"]], "More Information about RPC Autograd": [[2077, null]], "Distributed Optimizer": [[2077, "distributed-optimizer"], [2078, "distributed-optimizer"]], "My model reports \u201ccuda runtime error(2): out of memory\u201d": [[2051, "my-model-reports-cuda-runtime-error-2-out-of-memory"]], "My GPU memory isn\u2019t freed properly": [[2051, "my-gpu-memory-isn-t-freed-properly"]], "My out of memory exception handler can\u2019t allocate memory": [[2051, "my-out-of-memory-exception-handler-can-t-allocate-memory"]], "My data loader workers return identical random numbers": [[2051, "my-data-loader-workers-return-identical-random-numbers"]], "My recurrent network doesn\u2019t work with data parallelism": [[2051, "my-recurrent-network-doesn-t-work-with-data-parallelism"]], "torch.testing": [[2089, "module-torch.testing"]], "torch.signal": [[2080, "module-torch.signal"]], "torch.signal.windows": [[2080, "module-torch.signal.windows"]], "Dependencies": [[2065, "dependencies"]], "A simple example": [[2065, "a-simple-example"]], "Inspecting the ONNX model using GUI": [[2065, "inspecting-the-onnx-model-using-gui"]], "Diagnosing issues with SARIF": [[2065, "diagnosing-issues-with-sarif"]], "ONNX Diagnostic SARIF Rules": [[2065, null]], "Default values for native configurations": [[2074, "default-values-for-native-configurations"]], "torch.random": [[2076, "module-torch.random"]], "Extending torch.func with autograd.Function": [[2050, "extending-torch-func-with-autograd-function"]], "Basic Usage": [[2050, "basic-usage"]], "Example 1: autograd.Function calls into another system": [[2050, "example-1-autograd-function-calls-into-another-system"]], "Example 2: autograd.Function specifies custom gradient rules": [[2050, "example-2-autograd-function-specifies-custom-gradient-rules"]], "Limitations and gotchas": [[2050, "limitations-and-gotchas"]], "torch.vmap() Support": [[2050, "torch-vmap-support"]], "Automatically generate a vmap rule": [[2050, "automatically-generate-a-vmap-rule"]], "Defining the vmap staticmethod": [[2050, "defining-the-vmap-staticmethod"]], "torch.func.jvp() Support": [[2050, "torch-func-jvp-support"]], "Autograd mechanics": [[2043, "autograd-mechanics"]], "How autograd encodes the history": [[2043, "how-autograd-encodes-the-history"]], "Saved tensors": [[2043, "saved-tensors"]], "Gradients for non-differentiable functions": [[2043, "gradients-for-non-differentiable-functions"]], "Setting requires_grad": [[2043, "setting-requires-grad"]], "Grad Modes": [[2043, "grad-modes"]], "Default Mode (Grad Mode)": [[2043, "default-mode-grad-mode"]], "No-grad Mode": [[2043, "no-grad-mode"]], "Inference Mode": [[2043, "inference-mode"]], "Evaluation Mode (nn.Module.eval())": [[2043, "evaluation-mode-nn-module-eval"]], "In-place operations with autograd": [[2043, "in-place-operations-with-autograd"]], "Multithreaded Autograd": [[2043, "multithreaded-autograd"]], "Concurrency on CPU": [[2043, "concurrency-on-cpu"]], "Non-determinism": [[2043, "non-determinism"]], "Graph retaining": [[2043, "graph-retaining"]], "Thread Safety on Autograd Node": [[2043, "thread-safety-on-autograd-node"]], "No thread safety on C++ hooks": [[2043, "no-thread-safety-on-c-hooks"]], "Autograd for Complex Numbers": [[2043, "autograd-for-complex-numbers"]], "What are complex derivatives?": [[2043, "what-are-complex-derivatives"]], "Wirtinger Calculus comes into the picture \u2026": [[2043, "wirtinger-calculus-comes-into-the-picture"]], "How is Wirtinger Calculus useful in optimization?": [[2043, "how-is-wirtinger-calculus-useful-in-optimization"]], "How does PyTorch compute the conjugate Wirtinger derivative?": [[2043, "how-does-pytorch-compute-the-conjugate-wirtinger-derivative"]], "How can I write my own derivative formula for a complex function?": [[2043, "how-can-i-write-my-own-derivative-formula-for-a-complex-function"]], "What about cross-domain functions?": [[2043, "what-about-cross-domain-functions"]], "Hooks for saved tensors": [[2043, "hooks-for-saved-tensors"]], "Registering hooks for a saved tensor": [[2043, "registering-hooks-for-a-saved-tensor"]], "Registering default hooks for saved tensors": [[2043, "registering-default-hooks-for-saved-tensors"]], "Backward Hooks execution": [[2043, "backward-hooks-execution"]], "Whether a particular hook will be fired": [[2043, "whether-a-particular-hook-will-be-fired"]], "The order in which the different hooks are fired": [[2043, "the-order-in-which-the-different-hooks-are-fired"]], "Special hooks": [[2043, "special-hooks"]], "Behavior of Tensor hooks when Tensor is modified in-place": [[2043, "behavior-of-tensor-hooks-when-tensor-is-modified-in-place"]], "MPS backend": [[2058, "mps-backend"]], "ONNX supported TorchScript operators": [[2068, "onnx-supported-torchscript-operators"]], "Supported operators": [[2068, "supported-operators"]], "ONNX support for TorchScript operators": [[2068, "id1"]], "Unsupported operators": [[2068, "unsupported-operators"], [2068, "id2"]], "torch.profiler": [[2071, "torch-profiler"]], "Intel Instrumentation and Tracing Technology APIs": [[2071, "intel-instrumentation-and-tracing-technology-apis"]], "Remote Reference Protocol": [[2079, "remote-reference-protocol"]], "Background": [[2079, "background"], [2078, "background"]], "RRef Lifetime": [[2079, "rref-lifetime"]], "Design Reasoning": [[2079, "design-reasoning"]], "Implementation": [[2079, "implementation"], [2048, "implementation"]], "Protocol Scenarios": [[2079, "protocol-scenarios"]], "User Share RRef with Owner as Return Value": [[2079, "user-share-rref-with-owner-as-return-value"]], "User Share RRef with Owner as Argument": [[2079, "user-share-rref-with-owner-as-argument"]], "Owner Share RRef with User": [[2079, "owner-share-rref-with-user"]], "User Share RRef with User": [[2079, "user-share-rref-with-user"]], "CPU threading and TorchScript inference": [[2045, "cpu-threading-and-torchscript-inference"]], "Build options": [[2045, "build-options"]], "Runtime API": [[2045, "runtime-api"]], "Tuning the number of threads": [[2045, "tuning-the-number-of-threads"]], "Numerical accuracy": [[2060, "numerical-accuracy"]], "Batched computations or slice computations": [[2060, "batched-computations-or-slice-computations"]], "Extremal values": [[2060, "extremal-values"]], "Linear algebra (torch.linalg)": [[2060, "linear-algebra-torch-linalg"]], "Non-finite values": [[2060, "non-finite-values"]], "Extremal values in linalg": [[2060, "extremal-values-in-linalg"]], "TensorFloat-32(TF32) on Nvidia Ampere (and later) devices": [[2060, "tensorfloat-32-tf32-on-nvidia-ampere-and-later-devices"]], "Reduced Precision Reduction for FP16 and BF16 GEMMs": [[2060, "reduced-precision-reduction-for-fp16-and-bf16-gemms"]], "Reduced Precision FP16 and BF16 GEMMs and Convolutions on AMD Instinct MI200 devices": [[2060, "reduced-precision-fp16-and-bf16-gemms-and-convolutions-on-amd-instinct-mi200-devices"]], "Distributed Data Parallel": [[2048, "distributed-data-parallel"]], "Example": [[2048, "example"], [2049, "example"]], "Internal Design": [[2048, "internal-design"]], "ProcessGroup": [[2048, "processgroup"]], "TorchDynamo DDPOptimizer": [[2048, "id1"]], "Extending PyTorch": [[2049, "extending-pytorch"]], "Adding new operators": [[2049, "adding-new-operators"]], "Extending torch.autograd": [[2049, "extending-torch-autograd"]], "When to use": [[2049, "when-to-use"]], "When not to use": [[2049, "when-not-to-use"]], "How to use": [[2049, "how-to-use"]], "Combined or separate forward() and setup_context()": [[2049, "combined-or-separate-forward-and-setup-context"]], "Forward mode AD": [[2049, "forward-mode-ad"]], "torch.func transforms and/or torch.vmap()": [[2049, "torch-func-transforms-and-or-torch-vmap"]], "Extending torch.nn": [[2049, "extending-torch-nn"]], "Adding a Module": [[2049, "adding-a-module"]], "Extending torch Python API": [[2049, "extending-torch-python-api"]], "Extending torch with a Tensor-like type": [[2049, "extending-torch-with-a-tensor-like-type"]], "Subclassing torch.Tensor": [[2049, "subclassing-torch-tensor"]], "Extending torch with a Tensor wrapper type": [[2049, "extending-torch-with-a-tensor-wrapper-type"]], "Operations on multiple types that define __torch_function__": [[2049, "operations-on-multiple-types-that-define-torch-function"]], "Testing Coverage of Overrides for the PyTorch API": [[2049, "testing-coverage-of-overrides-for-the-pytorch-api"]], "Extending torch native API": [[2049, "extending-torch-native-api"]], "Extending all torch API with Modes": [[2049, "extending-all-torch-api-with-modes"]], "torch.ao.quantization": [[2075, "torch-ao-quantization"]], "Top level APIs": [[2075, "top-level-apis"]], "Preparing model for quantization": [[2075, "preparing-model-for-quantization"]], "torch.ao.quantization.quantize_fx": [[2075, "torch-ao-quantization-quantize-fx"]], "torch.ao.quantization.qconfig_mapping": [[2075, "torch-ao-quantization-qconfig-mapping"]], "torch.ao.quantization.backend_config": [[2075, "torch-ao-quantization-backend-config"]], "torch.ao.quantization.fx.custom_config": [[2075, "torch-ao-quantization-fx-custom-config"]], "torch.ao.quantization.quantizer": [[2075, "module-torch.ao.quantization.quantizer"]], "torch.ao.quantization.pt2e (quantization in pytorch 2.0 export implementation)": [[2075, "module-torch.ao.quantization.pt2e"]], "torch.ao.quantization.pt2e.export_utils": [[2075, "torch-ao-quantization-pt2e-export-utils"]], "torch (quantization related functions)": [[2075, "torch-quantization-related-functions"]], "torch.Tensor (quantization related methods)": [[2075, "torch-tensor-quantization-related-methods"]], "torch.ao.quantization.observer": [[2075, "torch-ao-quantization-observer"]], "torch.ao.quantization.fake_quantize": [[2075, "torch-ao-quantization-fake-quantize"]], "torch.ao.quantization.qconfig": [[2075, "torch-ao-quantization-qconfig"]], "torch.ao.nn.intrinsic": [[2075, "module-torch.ao.nn.intrinsic"]], "torch.ao.nn.intrinsic.qat": [[2075, "module-torch.ao.nn.intrinsic.qat"]], "torch.ao.nn.intrinsic.quantized": [[2075, "module-torch.ao.nn.intrinsic.quantized"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[2075, "module-torch.ao.nn.intrinsic.quantized.dynamic"]], "torch.ao.nn.qat": [[2075, "module-torch.ao.nn.qat"]], "torch.ao.nn.qat.dynamic": [[2075, "module-torch.ao.nn.qat.dynamic"]], "torch.ao.nn.quantized": [[2075, "module-torch.ao.nn.quantized.modules"]], "torch.ao.nn.quantized.functional": [[2075, "module-torch.ao.nn.quantized.functional"]], "torch.ao.nn.quantizable": [[2075, "torch-ao-nn-quantizable"]], "torch.ao.nn.quantized.dynamic": [[2075, "module-torch.ao.nn.quantized.dynamic"]], "Quantized dtypes and quantization schemes": [[2075, "quantized-dtypes-and-quantization-schemes"]], "Automatic Mixed Precision examples": [[2042, "automatic-mixed-precision-examples"]], "Typical Mixed Precision Training": [[2042, "typical-mixed-precision-training"]], "Working with Unscaled Gradients": [[2042, "working-with-unscaled-gradients"]], "Gradient clipping": [[2042, "gradient-clipping"]], "Working with Scaled Gradients": [[2042, "working-with-scaled-gradients"]], "Gradient accumulation": [[2042, "gradient-accumulation"]], "Gradient penalty": [[2042, "gradient-penalty"]], "Working with Multiple Models, Losses, and Optimizers": [[2042, "working-with-multiple-models-losses-and-optimizers"]], "Working with Multiple GPUs": [[2042, "working-with-multiple-gpus"]], "DataParallel in a single process": [[2042, "dataparallel-in-a-single-process"]], "DistributedDataParallel, one GPU per process": [[2042, "distributeddataparallel-one-gpu-per-process"]], "DistributedDataParallel, multiple GPUs per process": [[2042, "distributeddataparallel-multiple-gpus-per-process"]], "Autocast and Custom Autograd Functions": [[2042, "autocast-and-custom-autograd-functions"]], "Functions with multiple inputs or autocastable ops": [[2042, "functions-with-multiple-inputs-or-autocastable-ops"]], "Functions that need a particular dtype": [[2042, "functions-that-need-a-particular-dtype"]], "torch.nn.functional": [[2040, "torch-nn-functional"]], "Convolution functions": [[2040, "convolution-functions"]], "Pooling functions": [[2040, "pooling-functions"]], "Attention Mechanisms": [[2040, "attention-mechanisms"]], "Non-linear activation functions": [[2040, "non-linear-activation-functions"]], "Linear functions": [[2040, "linear-functions"]], "Dropout functions": [[2040, "dropout-functions"]], "Sparse functions": [[2040, "sparse-functions"]], "Distance functions": [[2040, "distance-functions"]], "Loss functions": [[2040, "loss-functions"]], "Vision functions": [[2040, "vision-functions"]], "DataParallel functions (multi-GPU, distributed)": [[2040, "dataparallel-functions-multi-gpu-distributed"]], "data_parallel": [[2040, "data-parallel"]], "Example: AlexNet from PyTorch to ONNX": [[2067, "example-alexnet-from-pytorch-to-onnx"]], "Tracing vs Scripting": [[2067, "tracing-vs-scripting"]], "Avoiding Pitfalls": [[2067, "avoiding-pitfalls"]], "Avoid NumPy and built-in Python types": [[2067, "avoid-numpy-and-built-in-python-types"]], "Avoid Tensor.data": [[2067, "avoid-tensor-data"]], "Avoid in-place operations when using tensor.shape in tracing mode": [[2067, "avoid-in-place-operations-when-using-tensor-shape-in-tracing-mode"]], "Differences in Operator Implementations": [[2067, "differences-in-operator-implementations"]], "Unsupported Tensor Indexing Patterns": [[2067, "unsupported-tensor-indexing-patterns"]], "Reads / Gets": [[2067, "reads-gets"]], "Writes / Sets": [[2067, "writes-sets"]], "Adding support for operators": [[2067, "adding-support-for-operators"]], "ONNX exporter internals": [[2067, "onnx-exporter-internals"]], "ATen operators": [[2067, "aten-operators"]], "List of supported operators": [[2067, "list-of-supported-operators"]], "Adding support for an aten or quantized operator": [[2067, "adding-support-for-an-aten-or-quantized-operator"]], "torch.autograd.Functions": [[2067, "torch-autograd-functions"]], "Static Symbolic Method": [[2067, "static-symbolic-method"]], "Inline Autograd Function": [[2067, "inline-autograd-function"]], "Custom operators": [[2067, "custom-operators"]], "ONNX-script functions": [[2067, "onnx-script-functions"]], "C++ Operators": [[2067, "c-operators"]], "Discovering all unconvertible ATen ops at once": [[2067, "discovering-all-unconvertible-aten-ops-at-once"]], "Classes": [[2067, "classes"]], "Tensor Views": [[2086, "tensor-views"]], "torch.utils.tensorboard": [[2087, "module-torch.utils.tensorboard"]], "Gradcheck mechanics": [[2054, "gradcheck-mechanics"]], "Notations and background information": [[2054, "notations-and-background-information"]], "Default backward mode gradcheck behavior": [[2054, "default-backward-mode-gradcheck-behavior"]], "Real-to-real functions": [[2054, "real-to-real-functions"]], "Default real input numerical evaluation": [[2054, "default-real-input-numerical-evaluation"]], "Default real input analytical evaluation": [[2054, "default-real-input-analytical-evaluation"]], "Complex-to-real functions": [[2054, "complex-to-real-functions"]], "Default complex input numerical evaluation": [[2054, "default-complex-input-numerical-evaluation"]], "Default complex input analytical evaluation": [[2054, "default-complex-input-analytical-evaluation"]], "Functions with complex outputs": [[2054, "functions-with-complex-outputs"]], "Fast backward mode gradcheck": [[2054, "fast-backward-mode-gradcheck"]], "Fast gradcheck for real-to-real functions": [[2054, "fast-gradcheck-for-real-to-real-functions"]], "Fast gradcheck for complex-to-real functions": [[2054, "fast-gradcheck-for-complex-to-real-functions"]], "Fast complex input numerical evaluation": [[2054, "fast-complex-input-numerical-evaluation"]], "Fast complex input analytical evaluation": [[2054, "fast-complex-input-analytical-evaluation"]], "Why not use a complex u": [[2054, "why-not-use-a-complex-u"]], "Fast gradcheck for functions with complex outputs": [[2054, "fast-gradcheck-for-functions-with-complex-outputs"]], "Gradgradcheck implementation": [[2054, "gradgradcheck-implementation"]], "torch.Size": [[2081, "torch-size"]], "Pytorch 2.4: Getting Started on Intel GPU": [[2053, "pytorch-2-4-getting-started-on-intel-gpu"]], "Hardware Prerequisites": [[2053, "hardware-prerequisites"]], "Software Prerequisites": [[2053, "software-prerequisites"]], "Set up Environment": [[2053, "set-up-environment"]], "Build from source": [[2053, "build-from-source"]], "Check availability for Intel GPU": [[2053, "check-availability-for-intel-gpu"]], "Minimum Code Change": [[2053, "minimum-code-change"]], "Inference Examples": [[2053, "inference-examples"]], "Inference with FP32": [[2053, "inference-with-fp32"]], "Inference with AMP": [[2053, "inference-with-amp"]], "Inference with torch.compile": [[2053, "inference-with-torch-compile"]], "Training Examples": [[2053, "training-examples"]], "Train with FP32": [[2053, "train-with-fp32"]], "Train with AMP": [[2053, "train-with-amp"]], "A Simple Custom Module": [[2057, "a-simple-custom-module"]], "Modules as Building Blocks": [[2057, "modules-as-building-blocks"]], "Neural Network Training with Modules": [[2057, "neural-network-training-with-modules"]], "Module State": [[2057, "module-state"]], "Module Initialization": [[2057, "module-initialization"]], "Module Hooks": [[2057, "module-hooks"]], "Advanced Features": [[2057, "advanced-features"]], "Distributed Training": [[2057, "distributed-training"]], "Profiling Performance": [[2057, "profiling-performance"]], "Improving Performance with Quantization": [[2057, "improving-performance-with-quantization"]], "Improving Memory Usage with Pruning": [[2057, "improving-memory-usage-with-pruning"]], "Parametrizations": [[2057, "parametrizations"]], "Transforming Modules with FX": [[2057, "transforming-modules-with-fx"]], "Broadcasting semantics": [[2044, "broadcasting-semantics"]], "General semantics": [[2044, "general-semantics"]], "In-place semantics": [[2044, "in-place-semantics"]], "Backwards compatibility": [[2044, "backwards-compatibility"]], "Multiprocessing best practices": [[2059, "multiprocessing-best-practices"]], "CUDA in multiprocessing": [[2059, "cuda-in-multiprocessing"]], "Best practices and tips": [[2059, "best-practices-and-tips"]], "Avoiding and fighting deadlocks": [[2059, "avoiding-and-fighting-deadlocks"]], "Reuse buffers passed through a Queue": [[2059, "reuse-buffers-passed-through-a-queue"]], "Asynchronous multiprocess training (e.g. Hogwild)": [[2059, "asynchronous-multiprocess-training-e-g-hogwild"]], "Hogwild": [[2059, "hogwild"]], "CPU in multiprocessing": [[2059, "cpu-in-multiprocessing"]], "CPU oversubscription": [[2059, "cpu-oversubscription"]], "Avoid CPU oversubscription": [[2059, "avoid-cpu-oversubscription"]], "torch.Tensor": [[2088, "torch-tensor"]], "Data types": [[2088, "data-types"]], "Initializing and basic operations": [[2088, "initializing-and-basic-operations"]], "Tensor class reference": [[2088, "tensor-class-reference"]], "Distributed Autograd Design": [[2078, "distributed-autograd-design"]], "Autograd recording during the forward pass": [[2078, "autograd-recording-during-the-forward-pass"]], "Distributed Autograd Context": [[2078, "distributed-autograd-context"]], "Distributed Backward Pass": [[2078, "distributed-backward-pass"]], "Computing dependencies": [[2078, "computing-dependencies"]], "FAST mode algorithm": [[2078, "fast-mode-algorithm"]], "SMART mode algorithm": [[2078, "smart-mode-algorithm"]], "Simple end to end example": [[2078, "simple-end-to-end-example"]], "Features for large-scale deployments": [[2056, "features-for-large-scale-deployments"]], "Fleet-wide operator profiling": [[2056, "fleet-wide-operator-profiling"]], "API usage logging": [[2056, "api-usage-logging"]], "Attaching metadata to saved TorchScript models": [[2056, "attaching-metadata-to-saved-torchscript-models"]], "Build environment considerations": [[2056, "build-environment-considerations"]], "Common extension points": [[2056, "common-extension-points"]], "HIP (ROCm) semantics": [[2055, "hip-rocm-semantics"]], "HIP Interfaces Reuse the CUDA Interfaces": [[2055, "hip-interfaces-reuse-the-cuda-interfaces"]], "Checking for HIP": [[2055, "checking-for-hip"]], "TensorFloat-32(TF32) on ROCm": [[2055, "tensorfloat-32-tf32-on-rocm"]], "hipFFT/rocFFT plan cache": [[2055, "hipfft-rocfft-plan-cache"]], "torch.distributed backends": [[2055, "torch-distributed-backends"]], "CUDA API to HIP API mappings in C++": [[2055, "cuda-api-to-hip-api-mappings-in-c"]], "Refer to CUDA Semantics doc": [[2055, "refer-to-cuda-semantics-doc"]], "Enabling kernel asserts": [[2055, "enabling-kernel-asserts"]], "torch.arccosh": [[871, "torch-arccosh"]], "torch.atanh": [[889, "torch-atanh"]], "torch.autograd.backward": [[897, "torch-autograd-backward"]], "torch.autograd.forward_ad.enter_dual_level": [[900, "torch-autograd-forward-ad-enter-dual-level"]], "torch.autograd.functional.vhp": [[916, "torch-autograd-functional-vhp"]], "torch.argwhere": [[881, "torch-argwhere"]], "torch.argmax": [[878, "torch-argmax"]], "torch.atleast_1d": [[890, "torch-atleast-1d"]], "UnpackedDualTensor": [[898, "unpackeddualtensor"]], "torch.asinh": [[886, "torch-asinh"]], "swap_module": [[868, "swap-module"]], "torch.autograd.functional.hvp": [[913, "torch-autograd-functional-hvp"]], "torch.autograd.function.FunctionCtx.set_materialize_grads": [[908, "torch-autograd-function-functionctx-set-materialize-grads"]], "torch.argsort": [[880, "torch-argsort"]], "torch.autograd.function.FunctionCtx.save_for_backward": [[907, "torch-autograd-function-functionctx-save-for-backward"]], "torch.atleast_2d": [[891, "torch-atleast-2d"]], "torch.as_strided": [[882, "torch-as-strided"]], "torch.are_deterministic_algorithms_enabled": [[877, "torch-are-deterministic-algorithms-enabled"]], "torch.atleast_3d": [[892, "torch-atleast-3d"]], "torch.arctan2": [[875, "torch-arctan2"]], "torch.autograd.Function.vmap": [[896, "torch-autograd-function-vmap"]], "torch.autograd.forward_ad.make_dual": [[902, "torch-autograd-forward-ad-make-dual"]], "torch.arange": [[869, "torch-arange"]], "torch.autograd.forward_ad.unpack_dual": [[903, "torch-autograd-forward-ad-unpack-dual"]], "torch.asin": [[885, "torch-asin"]], "torch.arccos": [[870, "torch-arccos"]], "torch.autograd.Function.jvp": [[895, "torch-autograd-function-jvp"]], "torch.autograd.function.FunctionCtx.mark_non_differentiable": [[906, "torch-autograd-function-functionctx-mark-non-differentiable"]], "torch.arcsinh": [[873, "torch-arcsinh"]], "torch.atan2": [[888, "torch-atan2"]], "NestedIOFunction": [[910, "nestediofunction"]], "torch.argmin": [[879, "torch-argmin"]], "torch.autograd.functional.hessian": [[912, "torch-autograd-functional-hessian"]], "torch.autograd.functional.jacobian": [[914, "torch-autograd-functional-jacobian"]], "dual_level": [[899, "dual-level"]], "torch.autograd.function.FunctionCtx.mark_dirty": [[905, "torch-autograd-function-functionctx-mark-dirty"]], "torch.autograd.function.once_differentiable": [[911, "torch-autograd-function-once-differentiable"]], "torch.arcsin": [[872, "torch-arcsin"]], "torch.autograd.Function.backward": [[893, "torch-autograd-function-backward"]], "quantize_qat": [[867, "quantize-qat"]], "torch.autograd.Function.forward": [[894, "torch-autograd-function-forward"]], "torch.autograd.functional.jvp": [[915, "torch-autograd-functional-jvp"]], "torch.asarray": [[884, "torch-asarray"]], "torch.as_tensor": [[883, "torch-as-tensor"]], "BackwardCFunction": [[904, "backwardcfunction"]], "torch.arctan": [[874, "torch-arctan"]], "torch.autograd.forward_ad.exit_dual_level": [[901, "torch-autograd-forward-ad-exit-dual-level"]], "torch.atan": [[887, "torch-atan"]], "torch.autograd.functional.vjp": [[917, "torch-autograd-functional-vjp"]], "InplaceFunction": [[909, "inplacefunction"]], "torch.arctanh": [[876, "torch-arctanh"]], "ConvertCustomConfig": [[818, "convertcustomconfig"]], "get_observer_state_dict": [[839, "get-observer-state-dict"]], "default_weight_observer": [[838, "default-weight-observer"]], "PrepareCustomConfig": [[820, "preparecustomconfig"]], "get_default_qconfig_mapping": [[860, "get-default-qconfig-mapping"]], "default_weight_only_qconfig": [[853, "default-weight-only-qconfig"]], "default_per_channel_qconfig": [[849, "default-per-channel-qconfig"]], "default_activation_only_qconfig": [[846, "default-activation-only-qconfig"]], "MinMaxObserver": [[823, "minmaxobserver"]], "PerChannelMinMaxObserver": [[828, "perchannelminmaxobserver"]], "default_dynamic_quant_observer": [[832, "default-dynamic-quant-observer"]], "per_channel_dynamic_qconfig": [[857, "per-channel-dynamic-qconfig"]], "QConfigMapping": [[858, "qconfigmapping"]], "default_qconfig": [[852, "default-qconfig"]], "prepare": [[841, "prepare"]], "float16_dynamic_qconfig": [[854, "float16-dynamic-qconfig"]], "convert_fx": [[863, "convert-fx"]], "ObserverBase": [[827, "observerbase"]], "enable_observer": [[816, "enable-observer"]], "prepare_qat": [[842, "prepare-qat"]], "default_qat_qconfig": [[850, "default-qat-qconfig"]], "MovingAveragePerChannelMinMaxObserver": [[825, "movingaverageperchannelminmaxobserver"]], "RecordingObserver": [[830, "recordingobserver"]], "HistogramObserver": [[822, "histogramobserver"]], "load_observer_state_dict": [[840, "load-observer-state-dict"]], "prepare_fx": [[865, "prepare-fx"]], "default_histogram_observer": [[834, "default-histogram-observer"]], "default_observer": [[835, "default-observer"]], "StandaloneModuleConfigEntry": [[821, "standalonemoduleconfigentry"]], "FuseCustomConfig": [[819, "fusecustomconfig"]], "default_debug_observer": [[831, "default-debug-observer"]], "get_default_qat_qconfig_mapping": [[859, "get-default-qat-qconfig-mapping"]], "default_dynamic_qconfig": [[848, "default-dynamic-qconfig"]], "float16_static_qconfig": [[855, "float16-static-qconfig"]], "default_debug_qconfig": [[847, "default-debug-qconfig"]], "quantize": [[861, "quantize"]], "quantize_dynamic": [[862, "quantize-dynamic"]], "MovingAverageMinMaxObserver": [[824, "movingaverageminmaxobserver"]], "PlaceholderObserver": [[829, "placeholderobserver"]], "prepare_qat_fx": [[866, "prepare-qat-fx"]], "model_is_exported": [[844, "model-is-exported"]], "NoopObserver": [[826, "noopobserver"]], "default_float_qparams_observer": [[833, "default-float-qparams-observer"]], "fuse_modules": [[817, "fuse-modules"]], "propagate_qconfig": [[843, "propagate-qconfig"]], "default_placeholder_observer": [[837, "default-placeholder-observer"]], "fuse_fx": [[864, "fuse-fx"]], "default_qat_qconfig_v2": [[851, "default-qat-qconfig-v2"]], "default_per_channel_weight_observer": [[836, "default-per-channel-weight-observer"]], "float_qparams_weight_only_qconfig": [[856, "float-qparams-weight-only-qconfig"]], "default_weight_fake_quant": [[812, "default-weight-fake-quant"]], "default_per_channel_weight_fake_quant": [[811, "default-per-channel-weight-fake-quant"]], "hardswish": [[780, "hardswish"]], "add_quant_dequant": [[794, "add-quant-dequant"]], "avg_pool2d": [[771, "avg-pool2d"]], "conv3d": [[777, "conv3d"]], "default_fused_act_fake_quant": [[807, "default-fused-act-fake-quant"]], "FakeQuantize": [[802, "fakequantize"]], "disable_observer": [[814, "disable-observer"]], "adaptive_avg_pool3d": [[770, "adaptive-avg-pool3d"]], "default_histogram_fake_quant": [[810, "default-histogram-fake-quant"]], "FakeQuantizeBase": [[803, "fakequantizebase"]], "default_eval_fn": [[801, "default-eval-fn"]], "disable_fake_quant": [[813, "disable-fake-quant"]], "linear": [[784, "linear"]], "hardsigmoid": [[779, "hardsigmoid"]], "upsample_nearest": [[790, "upsample-nearest"]], "default_fake_quant": [[806, "default-fake-quant"]], "DTypeWithConstraints": [[798, "dtypewithconstraints"]], "default_fused_wt_fake_quant": [[809, "default-fused-wt-fake-quant"]], "celu": [[773, "celu"]], "upsample_bilinear": [[789, "upsample-bilinear"]], "leaky_relu": [[783, "leaky-relu"]], "DTypeConfig": [[797, "dtypeconfig"]], "ObservationType": [[799, "observationtype"]], "adaptive_avg_pool2d": [[769, "adaptive-avg-pool2d"]], "hardtanh": [[781, "hardtanh"]], "BackendConfig": [[795, "backendconfig"]], "upsample": [[788, "upsample"]], "FixedQParamsFakeQuantize": [[804, "fixedqparamsfakequantize"]], "FusedMovingAvgObsFakeQuantize": [[805, "fusedmovingavgobsfakequantize"]], "enable_fake_quant": [[815, "enable-fake-quant"]], "convert": [[800, "convert"]], "conv2d": [[776, "conv2d"]], "DeQuantStub": [[791, "dequantstub"]], "elu": [[778, "elu"]], "QuantWrapper": [[793, "quantwrapper"]], "clamp": [[774, "clamp"]], "conv1d": [[775, "conv1d"]], "avg_pool3d": [[772, "avg-pool3d"]], "max_pool1d": [[785, "max-pool1d"]], "max_pool2d": [[786, "max-pool2d"]], "threshold": [[787, "threshold"]], "interpolate": [[782, "interpolate"]], "QuantStub": [[792, "quantstub"]], "BackendPatternConfig": [[796, "backendpatternconfig"]], "default_fused_per_channel_wt_fake_quant": [[808, "default-fused-per-channel-wt-fake-quant"]], "ConvBnReLU2d": [[719, "convbnrelu2d"], [709, "convbnrelu2d"]], "FloatFunctional": [[751, "floatfunctional"]], "ConvBn1d": [[715, "convbn1d"], [705, "convbn1d"]], "BNReLU3d": [[727, "bnrelu3d"], [704, "bnrelu3d"]], "ConvReLU2d": [[729, "convrelu2d"], [721, "convrelu2d"], [712, "convrelu2d"]], "ConvReLU1d": [[728, "convrelu1d"], [711, "convrelu1d"]], "FXFloatFunctional": [[750, "fxfloatfunctional"]], "LinearReLU": [[714, "linearrelu"], [731, "linearrelu"], [723, "linearrelu"], [732, "linearrelu"]], "freeze_bn_stats": [[724, "freeze-bn-stats"]], "ConvBnReLU3d": [[720, "convbnrelu3d"], [710, "convbnrelu3d"]], "ConvReLU3d": [[730, "convrelu3d"], [722, "convrelu3d"], [713, "convrelu3d"]], "update_bn_stats": [[725, "update-bn-stats"]], "ConvBn3d": [[717, "convbn3d"], [707, "convbn3d"]], "QFunctional": [[760, "qfunctional"]], "BNReLU2d": [[726, "bnrelu2d"], [703, "bnrelu2d"]], "ConvBn2d": [[716, "convbn2d"], [706, "convbn2d"]], "ConvBnReLU1d": [[718, "convbnrelu1d"], [708, "convbnrelu1d"]], "torch._foreach_zero_": [[682, "torch-foreach-zero"]], "torch._foreach_sinh": [[674, "torch-foreach-sinh"]], "torch._foreach_neg": [[664, "torch-foreach-neg"]], "torch._foreach_sinh_": [[675, "torch-foreach-sinh"]], "torch._foreach_tan_": [[679, "torch-foreach-tan"]], "torch.addr": [[694, "torch-addr"]], "torch.adjoint": [[695, "torch-adjoint"]], "torch.allclose": [[697, "torch-allclose"]], "torch.aminmax": [[700, "torch-aminmax"]], "torch.amax": [[698, "torch-amax"]], "torch._foreach_round_": [[669, "torch-foreach-round"]], "torch._foreach_tan": [[678, "torch-foreach-tan"]], "torch._logging.set_logs": [[683, "torch-logging-set-logs"]], "torch._foreach_log_": [[663, "torch-foreach-log"]], "torch.addbmm": [[689, "torch-addbmm"]], "torch._foreach_sin": [[672, "torch-foreach-sin"]], "torch.amin": [[699, "torch-amin"]], "torch._foreach_sqrt_": [[677, "torch-foreach-sqrt"]], "torch.any": [[702, "torch-any"]], "torch.addcmul": [[691, "torch-addcmul"]], "torch.addmv": [[693, "torch-addmv"]], "torch.absolute": [[685, "torch-absolute"]], "torch.addmm": [[692, "torch-addmm"]], "torch.abs": [[684, "torch-abs"]], "torch.addcdiv": [[690, "torch-addcdiv"]], "torch._foreach_sin_": [[673, "torch-foreach-sin"]], "torch._foreach_reciprocal": [[666, "torch-foreach-reciprocal"]], "torch._foreach_neg_": [[665, "torch-foreach-neg"]], "torch._foreach_trunc": [[680, "torch-foreach-trunc"]], "torch.acos": [[686, "torch-acos"]], "torch._foreach_round": [[668, "torch-foreach-round"]], "torch.angle": [[701, "torch-angle"]], "torch._foreach_sigmoid": [[670, "torch-foreach-sigmoid"]], "torch.acosh": [[687, "torch-acosh"]], "torch.all": [[696, "torch-all"]], "torch._foreach_sigmoid_": [[671, "torch-foreach-sigmoid"]], "torch._foreach_reciprocal_": [[667, "torch-foreach-reciprocal"]], "torch._foreach_sqrt": [[676, "torch-foreach-sqrt"]], "torch.add": [[688, "torch-add"]], "torch._foreach_trunc_": [[681, "torch-foreach-trunc"]], "torch._foreach_exp": [[646, "torch-foreach-exp"]], "torch._foreach_acos": [[630, "torch-foreach-acos"]], "torch.Tensor.unsqueeze": [[613, "torch-tensor-unsqueeze"]], "torch._foreach_asin_": [[633, "torch-foreach-asin"]], "torch.Tensor.vdot": [[618, "torch-tensor-vdot"]], "torch._foreach_acos_": [[631, "torch-foreach-acos"]], "torch.Tensor.xlogy": [[623, "torch-tensor-xlogy"]], "torch._foreach_expm1_": [[649, "torch-foreach-expm1"]], "torch.Tensor.xlogy_": [[624, "torch-tensor-xlogy"]], "torch._foreach_erfc_": [[645, "torch-foreach-erfc"]], "torch._foreach_log2_": [[662, "torch-foreach-log2"]], "torch.Tensor.untyped_storage": [[615, "torch-tensor-untyped-storage"]], "torch._foreach_cosh_": [[641, "torch-foreach-cosh"]], "torch._foreach_atan_": [[635, "torch-foreach-atan"]], "torch.Tensor.view": [[619, "torch-tensor-view"]], "torch._foreach_erf_": [[643, "torch-foreach-erf"]], "torch._foreach_lgamma": [[654, "torch-foreach-lgamma"]], "torch.Tensor.view_as": [[620, "torch-tensor-view-as"]], "torch.Tensor.zero_": [[626, "torch-tensor-zero"]], "torch.Tensor.var": [[617, "torch-tensor-var"]], "torch.Tensor.unsqueeze_": [[614, "torch-tensor-unsqueeze"]], "torch._foreach_log2": [[661, "torch-foreach-log2"]], "torch.Tensor.xpu": [[625, "torch-tensor-xpu"]], "torch._foreach_atan": [[634, "torch-foreach-atan"]], "torch._foreach_log10": [[657, "torch-foreach-log10"]], "torch._assert": [[627, "torch-assert"]], "torch._foreach_log1p": [[659, "torch-foreach-log1p"]], "torch.Tensor.unique_consecutive": [[612, "torch-tensor-unique-consecutive"]], "torch._foreach_cosh": [[640, "torch-foreach-cosh"]], "torch._foreach_frac_": [[653, "torch-foreach-frac"]], "torch.Tensor.values": [[616, "torch-tensor-values"]], "torch._foreach_frac": [[652, "torch-foreach-frac"]], "torch._foreach_log": [[656, "torch-foreach-log"]], "torch._foreach_abs": [[628, "torch-foreach-abs"]], "torch._foreach_floor": [[650, "torch-foreach-floor"]], "torch._foreach_cos_": [[639, "torch-foreach-cos"]], "torch._foreach_erfc": [[644, "torch-foreach-erfc"]], "torch.Tensor.where": [[622, "torch-tensor-where"]], "torch.Tensor.vsplit": [[621, "torch-tensor-vsplit"]], "torch._foreach_erf": [[642, "torch-foreach-erf"]], "torch._foreach_expm1": [[648, "torch-foreach-expm1"]], "torch._foreach_log10_": [[658, "torch-foreach-log10"]], "torch._foreach_floor_": [[651, "torch-foreach-floor"]], "torch._foreach_exp_": [[647, "torch-foreach-exp"]], "torch._foreach_cos": [[638, "torch-foreach-cos"]], "torch._foreach_lgamma_": [[655, "torch-foreach-lgamma"]], "torch._foreach_ceil_": [[637, "torch-foreach-ceil"]], "torch._foreach_abs_": [[629, "torch-foreach-abs"]], "torch._foreach_log1p_": [[660, "torch-foreach-log1p"]], "torch._foreach_asin": [[632, "torch-foreach-asin"]], "torch._foreach_ceil": [[636, "torch-foreach-ceil"]], "torch.Tensor.tan": [[576, "torch-tensor-tan"]], "torch.Tensor.unfold": [[609, "torch-tensor-unfold"]], "torch.Tensor.storage_type": [[561, "torch-tensor-storage-type"]], "torch.Tensor.type": [[605, "torch-tensor-type"]], "torch.Tensor.trunc_": [[604, "torch-tensor-trunc"]], "torch.Tensor.true_divide_": [[602, "torch-tensor-true-divide"]], "torch.Tensor.subtract": [[565, "torch-tensor-subtract"]], "torch.Tensor.to_dense": [[583, "torch-tensor-to-dense"]], "torch.Tensor.subtract_": [[566, "torch-tensor-subtract"]], "torch.Tensor.to_sparse_bsr": [[587, "torch-tensor-to-sparse-bsr"]], "torch.Tensor.trace": [[593, "torch-tensor-trace"]], "torch.Tensor.trunc": [[603, "torch-tensor-trunc"]], "torch.Tensor.unique": [[611, "torch-tensor-unique"]], "torch.Tensor.transpose_": [[595, "torch-tensor-transpose"]], "torch.Tensor.sub": [[563, "torch-tensor-sub"]], "torch.Tensor.tolist": [[591, "torch-tensor-tolist"]], "torch.Tensor.transpose": [[594, "torch-tensor-transpose"]], "torch.Tensor.tril_": [[598, "torch-tensor-tril"]], "torch.Tensor.svd": [[569, "torch-tensor-svd"]], "torch.Tensor.tanh_": [[579, "torch-tensor-tanh"]], "torch.Tensor.unbind": [[607, "torch-tensor-unbind"]], "torch.Tensor.sum_to_size": [[568, "torch-tensor-sum-to-size"]], "torch.Tensor.tensor_split": [[580, "torch-tensor-tensor-split"]], "torch.Tensor.take_along_dim": [[575, "torch-tensor-take-along-dim"]], "torch.Tensor.to_mkldnn": [[584, "torch-tensor-to-mkldnn"]], "torch.Tensor.take": [[574, "torch-tensor-take"]], "torch.Tensor.tril": [[597, "torch-tensor-tril"]], "torch.Tensor.to_sparse_bsc": [[586, "torch-tensor-to-sparse-bsc"]], "torch.Tensor.sub_": [[564, "torch-tensor-sub"]], "torch.Tensor.type_as": [[606, "torch-tensor-type-as"]], "torch.Tensor.tile": [[581, "torch-tensor-tile"]], "torch.Tensor.to": [[582, "torch-tensor-to"]], "torch.Tensor.true_divide": [[601, "torch-tensor-true-divide"]], "torch.Tensor.tan_": [[577, "torch-tensor-tan"]], "torch.Tensor.uniform_": [[610, "torch-tensor-uniform"]], "torch.Tensor.to_sparse_coo": [[588, "torch-tensor-to-sparse-coo"]], "torch.Tensor.triu": [[599, "torch-tensor-triu"]], "torch.Tensor.topk": [[592, "torch-tensor-topk"]], "torch.Tensor.unflatten": [[608, "torch-tensor-unflatten"]], "torch.Tensor.swapdims": [[571, "torch-tensor-swapdims"]], "torch.Tensor.to_sparse": [[585, "torch-tensor-to-sparse"]], "torch.Tensor.to_sparse_csr": [[590, "torch-tensor-to-sparse-csr"]], "torch.Tensor.triu_": [[600, "torch-tensor-triu"]], "torch.Tensor.tanh": [[578, "torch-tensor-tanh"]], "torch.Tensor.sum": [[567, "torch-tensor-sum"]], "torch.Tensor.stride": [[562, "torch-tensor-stride"]], "torch.Tensor.to_sparse_csc": [[589, "torch-tensor-to-sparse-csc"]], "torch.Tensor.t_": [[573, "torch-tensor-t"]], "torch.Tensor.triangular_solve": [[596, "torch-tensor-triangular-solve"]], "torch.Tensor.t": [[572, "torch-tensor-t"]], "torch.Tensor.swapaxes": [[570, "torch-tensor-swapaxes"]], "torch.Tensor.sin": [[533, "torch-tensor-sin"]], "torch.Tensor.softmax": [[543, "torch-tensor-softmax"]], "torch.Tensor.stft": [[558, "torch-tensor-stft"]], "torch.Tensor.slogdet": [[541, "torch-tensor-slogdet"]], "torch.Tensor.sgn_": [[524, "torch-tensor-sgn"]], "torch.Tensor.sinc": [[535, "torch-tensor-sinc"]], "torch.Tensor.sqrt": [[550, "torch-tensor-sqrt"]], "torch.Tensor.select_scatter": [[521, "torch-tensor-select-scatter"]], "torch.Tensor.sspaddmm": [[556, "torch-tensor-sspaddmm"]], "torch.Tensor.select": [[520, "torch-tensor-select"]], "torch.Tensor.scatter_reduce_": [[519, "torch-tensor-scatter-reduce"]], "torch.Tensor.storage_offset": [[560, "torch-tensor-storage-offset"]], "torch.Tensor.sparse_resize_and_clear_": [[548, "torch-tensor-sparse-resize-and-clear"]], "torch.Tensor.share_memory_": [[526, "torch-tensor-share-memory"]], "torch.Tensor.squeeze": [[554, "torch-tensor-squeeze"]], "torch.Tensor.std": [[557, "torch-tensor-std"]], "torch.Tensor.short": [[527, "torch-tensor-short"]], "torch.Tensor.signbit": [[532, "torch-tensor-signbit"]], "torch.Tensor.scatter_add": [[516, "torch-tensor-scatter-add"]], "torch.Tensor.size": [[539, "torch-tensor-size"]], "torch.Tensor.scatter_reduce": [[518, "torch-tensor-scatter-reduce"]], "torch.Tensor.sinh": [[537, "torch-tensor-sinh"]], "torch.Tensor.sign_": [[531, "torch-tensor-sign"]], "torch.Tensor.smm": [[542, "torch-tensor-smm"]], "torch.Tensor.scatter_": [[515, "torch-tensor-scatter"]], "torch.Tensor.round_": [[510, "torch-tensor-round"]], "torch.Tensor.sinh_": [[538, "torch-tensor-sinh"]], "torch.Tensor.sin_": [[534, "torch-tensor-sin"]], "torch.Tensor.scatter_add_": [[517, "torch-tensor-scatter-add"]], "torch.Tensor.scatter": [[514, "torch-tensor-scatter"]], "torch.Tensor.split": [[549, "torch-tensor-split"]], "torch.Tensor.sort": [[544, "torch-tensor-sort"]], "torch.Tensor.sqrt_": [[551, "torch-tensor-sqrt"]], "torch.Tensor.storage": [[559, "torch-tensor-storage"]], "torch.Tensor.sgn": [[523, "torch-tensor-sgn"]], "torch.Tensor.square_": [[553, "torch-tensor-square"]], "torch.Tensor.square": [[552, "torch-tensor-square"]], "torch.Tensor.row_indices": [[511, "torch-tensor-row-indices"]], "torch.Tensor.rsqrt": [[512, "torch-tensor-rsqrt"]], "torch.Tensor.sinc_": [[536, "torch-tensor-sinc"]], "torch.Tensor.slice_scatter": [[540, "torch-tensor-slice-scatter"]], "torch.Tensor.sparse_mask": [[546, "torch-tensor-sparse-mask"]], "torch.Tensor.squeeze_": [[555, "torch-tensor-squeeze"]], "torch.Tensor.sigmoid_": [[529, "torch-tensor-sigmoid"]], "torch.Tensor.rsqrt_": [[513, "torch-tensor-rsqrt"]], "torch.Tensor.sign": [[530, "torch-tensor-sign"]], "torch.Tensor.sparse_resize_": [[547, "torch-tensor-sparse-resize"]], "torch.Tensor.sparse_dim": [[545, "torch-tensor-sparse-dim"]], "torch.Tensor.shape": [[525, "torch-tensor-shape"]], "torch.Tensor.sigmoid": [[528, "torch-tensor-sigmoid"]], "torch.Tensor.set_": [[522, "torch-tensor-set"]], "torch.Tensor.resolve_conj": [[503, "torch-tensor-resolve-conj"]], "torch.Tensor.pin_memory": [[465, "torch-tensor-pin-memory"]], "torch.Tensor.reciprocal_": [[487, "torch-tensor-reciprocal"]], "torch.Tensor.real": [[485, "torch-tensor-real"]], "torch.Tensor.renorm": [[493, "torch-tensor-renorm"]], "torch.Tensor.numpy": [[460, "torch-tensor-numpy"]], "torch.Tensor.q_scale": [[477, "torch-tensor-q-scale"]], "torch.Tensor.ormqr": [[462, "torch-tensor-ormqr"]], "torch.Tensor.q_zero_point": [[478, "torch-tensor-q-zero-point"]], "torch.Tensor.repeat": [[495, "torch-tensor-repeat"]], "torch.Tensor.put_": [[473, "torch-tensor-put"]], "torch.Tensor.polygamma": [[467, "torch-tensor-polygamma"]], "torch.Tensor.quantile": [[481, "torch-tensor-quantile"]], "torch.Tensor.q_per_channel_scales": [[475, "torch-tensor-q-per-channel-scales"]], "torch.Tensor.pow": [[470, "torch-tensor-pow"]], "torch.Tensor.requires_grad_": [[498, "torch-tensor-requires-grad"]], "torch.Tensor.resize_as_": [[502, "torch-tensor-resize-as"]], "torch.Tensor.retain_grad": [[505, "torch-tensor-retain-grad"]], "torch.Tensor.qr": [[479, "torch-tensor-qr"]], "torch.Tensor.reciprocal": [[486, "torch-tensor-reciprocal"]], "torch.Tensor.round": [[509, "torch-tensor-round"]], "torch.Tensor.resolve_neg": [[504, "torch-tensor-resolve-neg"]], "torch.Tensor.q_per_channel_zero_points": [[476, "torch-tensor-q-per-channel-zero-points"]], "torch.Tensor.outer": [[463, "torch-tensor-outer"]], "torch.Tensor.ravel": [[484, "torch-tensor-ravel"]], "torch.Tensor.reshape": [[499, "torch-tensor-reshape"]], "torch.Tensor.positive": [[469, "torch-tensor-positive"]], "torch.Tensor.roll": [[507, "torch-tensor-roll"]], "torch.Tensor.register_post_accumulate_grad_hook": [[490, "torch-tensor-register-post-accumulate-grad-hook"]], "torch.Tensor.requires_grad": [[497, "torch-tensor-requires-grad"]], "torch.Tensor.retains_grad": [[506, "torch-tensor-retains-grad"]], "torch.Tensor.record_stream": [[488, "torch-tensor-record-stream"]], "torch.Tensor.remainder": [[491, "torch-tensor-remainder"]], "torch.Tensor.rad2deg": [[482, "torch-tensor-rad2deg"]], "torch.Tensor.qscheme": [[480, "torch-tensor-qscheme"]], "torch.Tensor.pow_": [[471, "torch-tensor-pow"]], "torch.Tensor.permute": [[464, "torch-tensor-permute"]], "torch.Tensor.register_hook": [[489, "torch-tensor-register-hook"]], "torch.Tensor.repeat_interleave": [[496, "torch-tensor-repeat-interleave"]], "torch.Tensor.random_": [[483, "torch-tensor-random"]], "torch.Tensor.pinverse": [[466, "torch-tensor-pinverse"]], "torch.Tensor.polygamma_": [[468, "torch-tensor-polygamma"]], "torch.Tensor.resize_": [[501, "torch-tensor-resize"]], "torch.Tensor.orgqr": [[461, "torch-tensor-orgqr"]], "torch.Tensor.numel": [[459, "torch-tensor-numel"]], "torch.Tensor.remainder_": [[492, "torch-tensor-remainder"]], "torch.Tensor.reshape_as": [[500, "torch-tensor-reshape-as"]], "torch.Tensor.rot90": [[508, "torch-tensor-rot90"]], "torch.Tensor.renorm_": [[494, "torch-tensor-renorm"]], "torch.Tensor.prod": [[472, "torch-tensor-prod"]], "torch.Tensor.q_per_channel_axis": [[474, "torch-tensor-q-per-channel-axis"]], "torch.Tensor.movedim": [[419, "torch-tensor-movedim"]], "torch.Tensor.negative_": [[445, "torch-tensor-negative"]], "torch.Tensor.mvlgamma": [[427, "torch-tensor-mvlgamma"]], "torch.Tensor.minimum": [[414, "torch-tensor-minimum"]], "torch.Tensor.not_equal_": [[458, "torch-tensor-not-equal"]], "torch.Tensor.max": [[409, "torch-tensor-max"]], "torch.Tensor.narrow": [[435, "torch-tensor-narrow"]], "torch.Tensor.mul": [[421, "torch-tensor-mul"]], "torch.Tensor.neg": [[442, "torch-tensor-neg"]], "torch.Tensor.nansum": [[434, "torch-tensor-nansum"]], "torch.Tensor.new_zeros": [[451, "torch-tensor-new-zeros"]], "torch.Tensor.ndim": [[438, "torch-tensor-ndim"]], "torch.Tensor.multiply": [[424, "torch-tensor-multiply"]], "torch.Tensor.moveaxis": [[418, "torch-tensor-moveaxis"]], "torch.Tensor.median": [[412, "torch-tensor-median"]], "torch.Tensor.new_tensor": [[450, "torch-tensor-new-tensor"]], "torch.Tensor.nan_to_num_": [[430, "torch-tensor-nan-to-num"]], "torch.Tensor.nelement": [[446, "torch-tensor-nelement"]], "torch.Tensor.narrow_copy": [[436, "torch-tensor-narrow-copy"]], "torch.Tensor.normal_": [[456, "torch-tensor-normal"]], "torch.Tensor.mul_": [[422, "torch-tensor-mul"]], "torch.Tensor.nextafter_": [[453, "torch-tensor-nextafter"]], "torch.Tensor.maximum": [[410, "torch-tensor-maximum"]], "torch.Tensor.mvlgamma_": [[428, "torch-tensor-mvlgamma"]], "torch.Tensor.negative": [[444, "torch-tensor-negative"]], "torch.Tensor.ne": [[440, "torch-tensor-ne"]], "torch.Tensor.multiply_": [[425, "torch-tensor-multiply"]], "torch.Tensor.nanquantile": [[433, "torch-tensor-nanquantile"]], "torch.Tensor.not_equal": [[457, "torch-tensor-not-equal"]], "torch.Tensor.nanmedian": [[432, "torch-tensor-nanmedian"]], "torch.Tensor.matrix_power": [[408, "torch-tensor-matrix-power"]], "torch.Tensor.module_load": [[417, "torch-tensor-module-load"]], "torch.Tensor.multinomial": [[423, "torch-tensor-multinomial"]], "torch.Tensor.new_ones": [[449, "torch-tensor-new-ones"]], "torch.Tensor.ne_": [[441, "torch-tensor-ne"]], "torch.Tensor.mean": [[411, "torch-tensor-mean"]], "torch.Tensor.ndimension": [[439, "torch-tensor-ndimension"]], "torch.Tensor.neg_": [[443, "torch-tensor-neg"]], "torch.Tensor.new_empty": [[447, "torch-tensor-new-empty"]], "torch.Tensor.nextafter": [[452, "torch-tensor-nextafter"]], "torch.Tensor.nonzero": [[454, "torch-tensor-nonzero"]], "torch.Tensor.min": [[413, "torch-tensor-min"]], "torch.Tensor.nanmean": [[431, "torch-tensor-nanmean"]], "torch.Tensor.nan_to_num": [[429, "torch-tensor-nan-to-num"]], "torch.Tensor.mv": [[426, "torch-tensor-mv"]], "torch.Tensor.mode": [[416, "torch-tensor-mode"]], "torch.Tensor.new_full": [[448, "torch-tensor-new-full"]], "torch.Tensor.msort": [[420, "torch-tensor-msort"]], "torch.Tensor.nbytes": [[437, "torch-tensor-nbytes"]], "torch.Tensor.norm": [[455, "torch-tensor-norm"]], "torch.Tensor.mm": [[415, "torch-tensor-mm"]]}, "indexentries": {"gradscaler (class in torch.cuda.amp)": [[0, "torch.cuda.amp.GradScaler"]], "autocast (class in torch)": [[0, "torch.autocast"]], "autocast (class in torch.cpu.amp)": [[0, "torch.cpu.amp.autocast"]], "autocast (class in torch.cuda.amp)": [[0, "torch.cuda.amp.autocast"]], "custom_bwd() (in module torch.amp)": [[0, "torch.amp.custom_bwd"]], "custom_bwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_bwd"]], "custom_fwd() (in module torch.amp)": [[0, "torch.amp.custom_fwd"]], "custom_fwd() (in module torch.cuda.amp)": [[0, "torch.cuda.amp.custom_fwd"]], "is_autocast_available() (in module torch.amp.autocast_mode)": [[0, "torch.amp.autocast_mode.is_autocast_available"]], "module": [[0, "module-torch.amp"], [0, "module-torch.amp.autocast_mode"], [0, "module-torch.amp.grad_scaler"], [0, "module-torch.cpu.amp"], [0, "module-torch.cpu.amp.autocast_mode"], [0, "module-torch.cpu.amp.grad_scaler"], [0, "module-torch.cuda.amp"], [0, "module-torch.cuda.amp.autocast_mode"], [0, "module-torch.cuda.amp.common"], [0, "module-torch.cuda.amp.grad_scaler"], [1, "module-torch.autograd"], [1, "module-torch.autograd.anomaly_mode"], [1, "module-torch.autograd.forward_ad"], [1, "module-torch.autograd.function"], [1, "module-torch.autograd.functional"], [1, "module-torch.autograd.grad_mode"], [1, "module-torch.autograd.gradcheck"], [1, "module-torch.autograd.graph"], [1, "module-torch.autograd.profiler"], [1, "module-torch.autograd.profiler_legacy"], [1, "module-torch.autograd.profiler_util"], [1, "module-torch.autograd.variable"], [2, "module-torch.backends"], [2, "module-torch.backends.cpu"], [2, "module-torch.backends.cuda"], [2, "module-torch.backends.cudnn"], [2, "module-torch.backends.cudnn.rnn"], [2, "module-torch.backends.mha"], [2, "module-torch.backends.mkl"], [2, "module-torch.backends.mkldnn"], [2, "module-torch.backends.mps"], [2, "module-torch.backends.nnpack"], [2, "module-torch.backends.openmp"], [2, "module-torch.backends.opt_einsum"], [2, "module-torch.backends.quantized"], [2, "module-torch.backends.xeon"], [2, "module-torch.backends.xeon.run_cpu"], [2, "module-torch.backends.xnnpack"], [3, "module-torch.utils.benchmark"], [3, "module-torch.utils.benchmark.examples"], [3, "module-torch.utils.benchmark.op_fuzzers"], [3, "module-torch.utils.benchmark.utils"], [3, "module-torch.utils.benchmark.utils.valgrind_wrapper"], [4, "module-torch.utils.bottleneck"], [13, "module-torch.__config__"], [16, "module-torch.cpu"], [17, "module-torch.cuda"], [17, "module-torch.cuda.comm"], [17, "module-torch.cuda.error"], [17, "module-torch.cuda.graphs"], [17, "module-torch.cuda.jiterator"], [17, "module-torch.cuda.memory"], [17, "module-torch.cuda.nccl"], [17, "module-torch.cuda.nvtx"], [17, "module-torch.cuda.profiler"], [17, "module-torch.cuda.random"], [17, "module-torch.cuda.sparse"], [17, "module-torch.cuda.streams"], [18, "module-torch.cuda._sanitizer"], [19, "module-torch.cuda.tunable"], [23, "module-torch.utils.data"], [23, "module-torch.utils.data.datapipes"], [23, "module-torch.utils.data.datapipes.dataframe"], [23, "module-torch.utils.data.datapipes.iter"], [23, "module-torch.utils.data.datapipes.map"], [23, "module-torch.utils.data.datapipes.utils"], [27, "module-torch.utils.deterministic"], [28, "module-torch.distributed"], [28, "module-torch.distributed.algorithms"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"], [28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"], [28, "module-torch.distributed.algorithms.join"], [28, "module-torch.distributed.algorithms.model_averaging"], [28, "module-torch.distributed.algorithms.model_averaging.averagers"], [28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"], [28, "module-torch.distributed.algorithms.model_averaging.utils"], [28, "module-torch.distributed.argparse_util"], [28, "module-torch.distributed.c10d_logger"], [28, "module-torch.distributed.checkpoint.api"], [28, "module-torch.distributed.checkpoint.default_planner"], [28, "module-torch.distributed.checkpoint.filesystem"], [28, "module-torch.distributed.checkpoint.metadata"], [28, "module-torch.distributed.checkpoint.optimizer"], [28, "module-torch.distributed.checkpoint.planner"], [28, "module-torch.distributed.checkpoint.planner_helpers"], [28, "module-torch.distributed.checkpoint.resharding"], [28, "module-torch.distributed.checkpoint.state_dict"], [28, "module-torch.distributed.checkpoint.state_dict_loader"], [28, "module-torch.distributed.checkpoint.state_dict_saver"], [28, "module-torch.distributed.checkpoint.stateful"], [28, "module-torch.distributed.checkpoint.storage"], [28, "module-torch.distributed.checkpoint.utils"], [28, "module-torch.distributed.collective_utils"], [28, "module-torch.distributed.constants"], [28, "module-torch.distributed.device_mesh"], [28, "module-torch.distributed.distributed_c10d"], [28, "module-torch.distributed.elastic"], [28, "module-torch.distributed.elastic.agent.server.api"], [28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"], [28, "module-torch.distributed.elastic.events.api"], [28, "module-torch.distributed.elastic.events.handlers"], [28, "module-torch.distributed.elastic.metrics.api"], [28, "module-torch.distributed.elastic.multiprocessing.api"], [28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"], [28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"], [28, "module-torch.distributed.elastic.multiprocessing.redirects"], [28, "module-torch.distributed.elastic.multiprocessing.tail_log"], [28, "module-torch.distributed.elastic.rendezvous.api"], [28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"], [28, "module-torch.distributed.elastic.rendezvous.etcd_server"], [28, "module-torch.distributed.elastic.rendezvous.etcd_store"], [28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"], [28, "module-torch.distributed.elastic.rendezvous.utils"], [28, "module-torch.distributed.elastic.timer.api"], [28, "module-torch.distributed.elastic.timer.file_based_local_timer"], [28, "module-torch.distributed.elastic.timer.local_timer"], [28, "module-torch.distributed.elastic.utils"], [28, "module-torch.distributed.elastic.utils.api"], [28, "module-torch.distributed.elastic.utils.data"], [28, "module-torch.distributed.elastic.utils.data.cycling_iterator"], [28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"], [28, "module-torch.distributed.elastic.utils.distributed"], [28, "module-torch.distributed.elastic.utils.log_level"], [28, "module-torch.distributed.elastic.utils.logging"], [28, "module-torch.distributed.elastic.utils.store"], [28, "module-torch.distributed.fsdp.api"], [28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"], [28, "module-torch.distributed.fsdp.sharded_grad_scaler"], [28, "module-torch.distributed.fsdp.wrap"], [28, "module-torch.distributed.launch"], [28, "module-torch.distributed.launcher"], [28, "module-torch.distributed.launcher.api"], [28, "module-torch.distributed.logging_handlers"], [28, "module-torch.distributed.nn"], [28, "module-torch.distributed.nn.api"], [28, "module-torch.distributed.nn.api.remote_module"], [28, "module-torch.distributed.nn.functional"], [28, "module-torch.distributed.nn.jit"], [28, "module-torch.distributed.nn.jit.instantiator"], [28, "module-torch.distributed.nn.jit.templates"], [28, "module-torch.distributed.nn.jit.templates.remote_module_template"], [28, "module-torch.distributed.optim.apply_optimizer_in_backward"], [28, "module-torch.distributed.optim.functional_adadelta"], [28, "module-torch.distributed.optim.functional_adagrad"], [28, "module-torch.distributed.optim.functional_adam"], [28, "module-torch.distributed.optim.functional_adamax"], [28, "module-torch.distributed.optim.functional_adamw"], [28, "module-torch.distributed.optim.functional_rmsprop"], [28, "module-torch.distributed.optim.functional_rprop"], [28, "module-torch.distributed.optim.functional_sgd"], [28, "module-torch.distributed.optim.named_optimizer"], [28, "module-torch.distributed.optim.optimizer"], [28, "module-torch.distributed.optim.post_localSGD_optimizer"], [28, "module-torch.distributed.optim.utils"], [28, "module-torch.distributed.optim.zero_redundancy_optimizer"], [28, "module-torch.distributed.remote_device"], [28, "module-torch.distributed.rendezvous"], [28, "module-torch.distributed.rpc.api"], [28, "module-torch.distributed.rpc.backend_registry"], [28, "module-torch.distributed.rpc.constants"], [28, "module-torch.distributed.rpc.functions"], [28, "module-torch.distributed.rpc.internal"], [28, "module-torch.distributed.rpc.options"], [28, "module-torch.distributed.rpc.rref_proxy"], [28, "module-torch.distributed.rpc.server_process_global_profiler"], [28, "module-torch.distributed.tensor"], [28, "module-torch.distributed.tensor.parallel.api"], [28, "module-torch.distributed.tensor.parallel.ddp"], [28, "module-torch.distributed.tensor.parallel.fsdp"], [28, "module-torch.distributed.tensor.parallel.input_reshard"], [28, "module-torch.distributed.tensor.parallel.loss"], [28, "module-torch.distributed.tensor.parallel.style"], [28, "module-torch.distributed.utils"], [30, "module-torch.distributed.checkpoint"], [30, "module-torch.distributed.checkpoint.format_utils"], [30, "module-torch.distributed.checkpoint.logger"], [30, "module-torch.distributed.checkpoint.logging_handlers"], [30, "module-torch.distributed.checkpoint.staging"], [32, "module-torch.distributed.optim"], [33, "module-torch.distributed.pipelining"], [33, "module-torch.distributed.pipelining.microbatch"], [33, "module-torch.distributed.pipelining.schedules"], [33, "module-torch.distributed.pipelining.stage"], [34, "module-torch.distributed.tensor.parallel"], [35, "module-torch.distributions"], [35, "module-torch.distributions.bernoulli"], [35, "module-torch.distributions.beta"], [35, "module-torch.distributions.binomial"], [35, "module-torch.distributions.categorical"], [35, "module-torch.distributions.cauchy"], [35, "module-torch.distributions.chi2"], [35, "module-torch.distributions.constraint_registry"], [35, "module-torch.distributions.constraints"], [35, "module-torch.distributions.continuous_bernoulli"], [35, "module-torch.distributions.dirichlet"], [35, "module-torch.distributions.distribution"], [35, "module-torch.distributions.exp_family"], [35, "module-torch.distributions.exponential"], [35, "module-torch.distributions.fishersnedecor"], [35, "module-torch.distributions.gamma"], [35, "module-torch.distributions.geometric"], [35, "module-torch.distributions.gumbel"], [35, "module-torch.distributions.half_cauchy"], [35, "module-torch.distributions.half_normal"], [35, "module-torch.distributions.independent"], [35, "module-torch.distributions.inverse_gamma"], [35, "module-torch.distributions.kl"], [35, "module-torch.distributions.kumaraswamy"], [35, "module-torch.distributions.laplace"], [35, "module-torch.distributions.lkj_cholesky"], [35, "module-torch.distributions.log_normal"], [35, "module-torch.distributions.logistic_normal"], [35, "module-torch.distributions.lowrank_multivariate_normal"], [35, "module-torch.distributions.mixture_same_family"], [35, "module-torch.distributions.multinomial"], [35, "module-torch.distributions.multivariate_normal"], [35, "module-torch.distributions.negative_binomial"], [35, "module-torch.distributions.normal"], [35, "module-torch.distributions.one_hot_categorical"], [35, "module-torch.distributions.pareto"], [35, "module-torch.distributions.poisson"], [35, "module-torch.distributions.relaxed_bernoulli"], [35, "module-torch.distributions.relaxed_categorical"], [35, "module-torch.distributions.studentT"], [35, "module-torch.distributions.transformed_distribution"], [35, "module-torch.distributions.transforms"], [35, "module-torch.distributions.uniform"], [35, "module-torch.distributions.utils"], [35, "module-torch.distributions.von_mises"], [35, "module-torch.distributions.weibull"], [35, "module-torch.distributions.wishart"], [37, "module-torch.distributed.elastic.agent"], [37, "module-torch.distributed.elastic.agent.server"], [37, "module-torch.distributed.elastic.agent.server.health_check_server"], [38, "module-torch.distributed.elastic.control_plane"], [40, "module-torch.distributed.elastic.multiprocessing.errors"], [41, "module-torch.distributed.elastic.events"], [44, "module-torch.distributed.elastic.metrics"], [45, "module-torch.distributed.elastic.multiprocessing"], [47, "module-torch.distributed.elastic.rendezvous"], [47, "module-torch.distributed.elastic.rendezvous.registry"], [48, "module-torch.distributed.run"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"], [49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"], [50, "module-torch.distributed.elastic.timer"], [50, "module-torch.distributed.elastic.timer.debug_info_logging"], [52, "module-torch.export"], [52, "module-torch.export.custom_obj"], [52, "module-torch.export.dynamic_shapes"], [52, "module-torch.export.exported_program"], [52, "module-torch.export.graph_signature"], [52, "module-torch.export.unflatten"], [54, "module-torch.fft"], [55, "module-torch.distributed.fsdp"], [57, "module-torch.func"], [62, "module-torch.__future__"], [63, "module-torch.futures"], [64, "module-torch.fx"], [64, "module-torch.fx.annotate"], [64, "module-torch.fx.config"], [64, "module-torch.fx.experimental"], [64, "module-torch.fx.experimental.accelerator_partitioner"], [64, "module-torch.fx.experimental.const_fold"], [64, "module-torch.fx.experimental.debug"], [64, "module-torch.fx.experimental.graph_gradual_typechecker"], [64, "module-torch.fx.experimental.merge_matmul"], [64, "module-torch.fx.experimental.meta_tracer"], [64, "module-torch.fx.experimental.migrate_gradual_types"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"], [64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"], [64, "module-torch.fx.experimental.migrate_gradual_types.operation"], [64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"], [64, "module-torch.fx.experimental.migrate_gradual_types.util"], [64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"], [64, "module-torch.fx.experimental.normalize"], [64, "module-torch.fx.experimental.optimization"], [64, "module-torch.fx.experimental.partitioner_utils"], [64, "module-torch.fx.experimental.proxy_tensor"], [64, "module-torch.fx.experimental.recording"], [64, "module-torch.fx.experimental.refinement_types"], [64, "module-torch.fx.experimental.rewriter"], [64, "module-torch.fx.experimental.schema_type_annotation"], [64, "module-torch.fx.experimental.sym_node"], [64, "module-torch.fx.experimental.unification"], [64, "module-torch.fx.experimental.unification.core"], [64, "module-torch.fx.experimental.unification.dispatch"], [64, "module-torch.fx.experimental.unification.match"], [64, "module-torch.fx.experimental.unification.more"], [64, "module-torch.fx.experimental.unification.multipledispatch"], [64, "module-torch.fx.experimental.unification.multipledispatch.conflict"], [64, "module-torch.fx.experimental.unification.multipledispatch.core"], [64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"], [64, "module-torch.fx.experimental.unification.multipledispatch.utils"], [64, "module-torch.fx.experimental.unification.multipledispatch.variadic"], [64, "module-torch.fx.experimental.unification.unification_tools"], [64, "module-torch.fx.experimental.unification.utils"], [64, "module-torch.fx.experimental.unification.variable"], [64, "module-torch.fx.experimental.unify_refinements"], [64, "module-torch.fx.experimental.validator"], [64, "module-torch.fx.graph"], [64, "module-torch.fx.graph_module"], [64, "module-torch.fx.immutable_collections"], [64, "module-torch.fx.interpreter"], [64, "module-torch.fx.node"], [64, "module-torch.fx.operator_schemas"], [64, "module-torch.fx.passes"], [64, "module-torch.fx.passes.annotate_getitem_nodes"], [64, "module-torch.fx.passes.backends"], [64, "module-torch.fx.passes.backends.cudagraphs"], [64, "module-torch.fx.passes.dialect"], [64, "module-torch.fx.passes.dialect.common"], [64, "module-torch.fx.passes.dialect.common.cse_pass"], [64, "module-torch.fx.passes.fake_tensor_prop"], [64, "module-torch.fx.passes.graph_drawer"], [64, "module-torch.fx.passes.graph_manipulation"], [64, "module-torch.fx.passes.graph_transform_observer"], [64, "module-torch.fx.passes.infra"], [64, "module-torch.fx.passes.infra.partitioner"], [64, "module-torch.fx.passes.infra.pass_base"], [64, "module-torch.fx.passes.infra.pass_manager"], [64, "module-torch.fx.passes.net_min_base"], [64, "module-torch.fx.passes.operator_support"], [64, "module-torch.fx.passes.param_fetch"], [64, "module-torch.fx.passes.pass_manager"], [64, "module-torch.fx.passes.reinplace"], [64, "module-torch.fx.passes.runtime_assert"], [64, "module-torch.fx.passes.shape_prop"], [64, "module-torch.fx.passes.split_module"], [64, "module-torch.fx.passes.split_utils"], [64, "module-torch.fx.passes.splitter_base"], [64, "module-torch.fx.passes.tests"], [64, "module-torch.fx.passes.tests.test_pass_manager"], [64, "module-torch.fx.passes.tools_common"], [64, "module-torch.fx.passes.utils"], [64, "module-torch.fx.passes.utils.common"], [64, "module-torch.fx.passes.utils.fuser_utils"], [64, "module-torch.fx.passes.utils.matcher_utils"], [64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"], [64, "module-torch.fx.passes.utils.source_matcher_utils"], [64, "module-torch.fx.proxy"], [64, "module-torch.fx.subgraph_rewriter"], [64, "module-torch.fx.tensor_type"], [64, "module-torch.fx.traceback"], [65, "module-torch.fx.experimental.symbolic_shapes"], [2012, "module-torch.hub"], [2014, "module-torch.jit"], [2014, "module-torch.jit.annotations"], [2014, "module-torch.jit.frontend"], [2014, "module-torch.jit.generate_bytecode"], [2014, "module-torch.jit.mobile"], [2014, "module-torch.jit.quantized"], [2015, "module-torch.jit.supported_ops"], [2019, "module-torch.jit.unsupported_tensor_ops"], [2020, "module-torch.utils.jit"], [2021, "module-torch.library"], [2022, "module-torch.linalg"], [2023, "module-torch._logging"], [2024, "module-torch.masked"], [2024, "module-torch.masked.maskedtensor"], [2024, "module-torch.masked.maskedtensor.binary"], [2024, "module-torch.masked.maskedtensor.core"], [2024, "module-torch.masked.maskedtensor.creation"], [2024, "module-torch.masked.maskedtensor.passthrough"], [2024, "module-torch.masked.maskedtensor.reductions"], [2024, "module-torch.masked.maskedtensor.unary"], [2028, "module-torch.utils.model_zoo"], [2029, "module-torch.utils.module_tracker"], [2030, "module-torch.monitor"], [2031, "module-torch.mps"], [2031, "module-torch.mps.event"], [2031, "module-torch.mps.profiler"], [2032, "module-torch.mtia"], [2033, "module-torch.multiprocessing"], [2033, "module-torch.multiprocessing.pool"], [2033, "module-torch.multiprocessing.queue"], [2033, "module-torch.multiprocessing.reductions"], [2033, "module-torch.multiprocessing.spawn"], [2036, "module-torch.nested"], [2037, "module-torch.nn"], [2037, "module-torch.nn.backends"], [2037, "module-torch.nn.backends.thnn"], [2037, "module-torch.nn.common_types"], [2037, "module-torch.nn.cpp"], [2037, "module-torch.nn.functional"], [2037, "module-torch.nn.grad"], [2037, "module-torch.nn.init"], [2037, "module-torch.nn.modules"], [2037, "module-torch.nn.modules.activation"], [2037, "module-torch.nn.modules.adaptive"], [2037, "module-torch.nn.modules.batchnorm"], [2037, "module-torch.nn.modules.channelshuffle"], [2037, "module-torch.nn.modules.container"], [2037, "module-torch.nn.modules.conv"], [2037, "module-torch.nn.modules.distance"], [2037, "module-torch.nn.modules.dropout"], [2037, "module-torch.nn.modules.flatten"], [2037, "module-torch.nn.modules.fold"], [2037, "module-torch.nn.modules.instancenorm"], [2037, "module-torch.nn.modules.lazy"], [2037, "module-torch.nn.modules.linear"], [2037, "module-torch.nn.modules.loss"], [2037, "module-torch.nn.modules.module"], [2037, "module-torch.nn.modules.normalization"], [2037, "module-torch.nn.modules.padding"], [2037, "module-torch.nn.modules.pixelshuffle"], [2037, "module-torch.nn.modules.pooling"], [2037, "module-torch.nn.modules.rnn"], [2037, "module-torch.nn.modules.sparse"], [2037, "module-torch.nn.modules.transformer"], [2037, "module-torch.nn.modules.upsampling"], [2037, "module-torch.nn.modules.utils"], [2037, "module-torch.nn.parallel"], [2037, "module-torch.nn.parallel.comm"], [2037, "module-torch.nn.parallel.distributed"], [2037, "module-torch.nn.parallel.parallel_apply"], [2037, "module-torch.nn.parallel.replicate"], [2037, "module-torch.nn.parallel.scatter_gather"], [2037, "module-torch.nn.parameter"], [2037, "module-torch.nn.utils"], [2037, "module-torch.nn.utils.clip_grad"], [2037, "module-torch.nn.utils.convert_parameters"], [2037, "module-torch.nn.utils.fusion"], [2037, "module-torch.nn.utils.init"], [2037, "module-torch.nn.utils.memory_format"], [2037, "module-torch.nn.utils.parametrizations"], [2037, "module-torch.nn.utils.parametrize"], [2037, "module-torch.nn.utils.prune"], [2037, "module-torch.nn.utils.rnn"], [2037, "module-torch.nn.utils.stateless"], [2038, "module-torch.nn.attention"], [2039, "module-torch.nn.attention.bias"], [2064, "module-torch.onnx.errors"], [2064, "module-torch.onnx.operators"], [2064, "module-torch.onnx.symbolic_caffe2"], [2064, "module-torch.onnx.symbolic_helper"], [2064, "module-torch.onnx.symbolic_opset10"], [2064, "module-torch.onnx.symbolic_opset11"], [2064, "module-torch.onnx.symbolic_opset12"], [2064, "module-torch.onnx.symbolic_opset13"], [2064, "module-torch.onnx.symbolic_opset14"], [2064, "module-torch.onnx.symbolic_opset15"], [2064, "module-torch.onnx.symbolic_opset16"], [2064, "module-torch.onnx.symbolic_opset17"], [2064, "module-torch.onnx.symbolic_opset18"], [2064, "module-torch.onnx.symbolic_opset19"], [2064, "module-torch.onnx.symbolic_opset20"], [2064, "module-torch.onnx.symbolic_opset7"], [2064, "module-torch.onnx.symbolic_opset8"], [2064, "module-torch.onnx.symbolic_opset9"], [2064, "module-torch.onnx.utils"], [2064, "module-torch.onnx.verification"], [2067, "module-torch.onnx"], [2069, "module-torch.optim"], [2069, "module-torch.optim.adadelta"], [2069, "module-torch.optim.adagrad"], [2069, "module-torch.optim.adam"], [2069, "module-torch.optim.adamax"], [2069, "module-torch.optim.adamw"], [2069, "module-torch.optim.asgd"], [2069, "module-torch.optim.lbfgs"], [2069, "module-torch.optim.lr_scheduler"], [2069, "module-torch.optim.nadam"], [2069, "module-torch.optim.optimizer"], [2069, "module-torch.optim.radam"], [2069, "module-torch.optim.rmsprop"], [2069, "module-torch.optim.rprop"], [2069, "module-torch.optim.sgd"], [2069, "module-torch.optim.sparse_adam"], [2069, "module-torch.optim.swa_utils"], [2070, "module-torch.package"], [2070, "module-torch.package.analyze"], [2070, "module-torch.package.analyze.find_first_use_of_broken_modules"], [2070, "module-torch.package.analyze.is_from_package"], [2070, "module-torch.package.analyze.trace_dependencies"], [2070, "module-torch.package.file_structure_representation"], [2070, "module-torch.package.find_file_dependencies"], [2070, "module-torch.package.glob_group"], [2070, "module-torch.package.importer"], [2070, "module-torch.package.package_exporter"], [2070, "module-torch.package.package_importer"], [2071, "module-torch.profiler"], [2071, "module-torch.profiler.itt"], [2071, "module-torch.profiler.profiler"], [2071, "module-torch.profiler.python_tracer"], [2072, "module-torch.ao"], [2072, "module-torch.ao.nn"], [2072, "module-torch.ao.nn.intrinsic.modules.fused"], [2072, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"], [2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"], [2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"], [2072, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"], [2072, "module-torch.ao.nn.qat.dynamic.modules.linear"], [2072, "module-torch.ao.nn.qat.modules.conv"], [2072, "module-torch.ao.nn.qat.modules.embedding_ops"], [2072, "module-torch.ao.nn.qat.modules.linear"], [2072, "module-torch.ao.nn.quantizable"], [2072, "module-torch.ao.nn.quantizable.modules"], [2072, "module-torch.ao.nn.quantizable.modules.activation"], [2072, "module-torch.ao.nn.quantizable.modules.rnn"], [2072, "module-torch.ao.nn.quantized"], [2072, "module-torch.ao.nn.quantized.dynamic.modules.conv"], [2072, "module-torch.ao.nn.quantized.dynamic.modules.linear"], [2072, "module-torch.ao.nn.quantized.dynamic.modules.rnn"], [2072, "module-torch.ao.nn.quantized.modules.activation"], [2072, "module-torch.ao.nn.quantized.modules.batchnorm"], [2072, "module-torch.ao.nn.quantized.modules.conv"], [2072, "module-torch.ao.nn.quantized.modules.dropout"], [2072, "module-torch.ao.nn.quantized.modules.embedding_ops"], [2072, "module-torch.ao.nn.quantized.modules.functional_modules"], [2072, "module-torch.ao.nn.quantized.modules.linear"], [2072, "module-torch.ao.nn.quantized.modules.normalization"], [2072, "module-torch.ao.nn.quantized.modules.rnn"], [2072, "module-torch.ao.nn.quantized.modules.utils"], [2072, "module-torch.ao.nn.quantized.reference"], [2072, "module-torch.ao.nn.quantized.reference.modules"], [2072, "module-torch.ao.nn.quantized.reference.modules.conv"], [2072, "module-torch.ao.nn.quantized.reference.modules.linear"], [2072, "module-torch.ao.nn.quantized.reference.modules.rnn"], [2072, "module-torch.ao.nn.quantized.reference.modules.sparse"], [2072, "module-torch.ao.nn.quantized.reference.modules.utils"], [2072, "module-torch.ao.nn.sparse"], [2072, "module-torch.ao.nn.sparse.quantized"], [2072, "module-torch.ao.nn.sparse.quantized.dynamic"], [2072, "module-torch.ao.nn.sparse.quantized.dynamic.linear"], [2072, "module-torch.ao.nn.sparse.quantized.linear"], [2072, "module-torch.ao.nn.sparse.quantized.utils"], [2072, "module-torch.ao.ns"], [2072, "module-torch.ao.ns.fx"], [2072, "module-torch.ao.ns.fx.graph_matcher"], [2072, "module-torch.ao.ns.fx.graph_passes"], [2072, "module-torch.ao.ns.fx.mappings"], [2072, "module-torch.ao.ns.fx.n_shadows_utils"], [2072, "module-torch.ao.ns.fx.ns_types"], [2072, "module-torch.ao.ns.fx.pattern_utils"], [2072, "module-torch.ao.ns.fx.qconfig_multi_mapping"], [2072, "module-torch.ao.ns.fx.utils"], [2072, "module-torch.ao.ns.fx.weight_utils"], [2072, "module-torch.ao.pruning"], [2072, "module-torch.ao.pruning.scheduler"], [2072, "module-torch.ao.pruning.scheduler.base_scheduler"], [2072, "module-torch.ao.pruning.scheduler.cubic_scheduler"], [2072, "module-torch.ao.pruning.scheduler.lambda_scheduler"], [2072, "module-torch.ao.pruning.sparsifier"], [2072, "module-torch.ao.pruning.sparsifier.base_sparsifier"], [2072, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"], [2072, "module-torch.ao.pruning.sparsifier.utils"], [2072, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"], [2072, "module-torch.ao.quantization"], [2072, "module-torch.ao.quantization.backend_config"], [2072, "module-torch.ao.quantization.backend_config.backend_config"], [2072, "module-torch.ao.quantization.backend_config.executorch"], [2072, "module-torch.ao.quantization.backend_config.fbgemm"], [2072, "module-torch.ao.quantization.backend_config.native"], [2072, "module-torch.ao.quantization.backend_config.observation_type"], [2072, "module-torch.ao.quantization.backend_config.onednn"], [2072, "module-torch.ao.quantization.backend_config.qnnpack"], [2072, "module-torch.ao.quantization.backend_config.tensorrt"], [2072, "module-torch.ao.quantization.backend_config.utils"], [2072, "module-torch.ao.quantization.backend_config.x86"], [2072, "module-torch.ao.quantization.fake_quantize"], [2072, "module-torch.ao.quantization.fuse_modules"], [2072, "module-torch.ao.quantization.fuser_method_mappings"], [2072, "module-torch.ao.quantization.fx"], [2072, "module-torch.ao.quantization.fx.convert"], [2072, "module-torch.ao.quantization.fx.custom_config"], [2072, "module-torch.ao.quantization.fx.fuse"], [2072, "module-torch.ao.quantization.fx.fuse_handler"], [2072, "module-torch.ao.quantization.fx.graph_module"], [2072, "module-torch.ao.quantization.fx.lower_to_fbgemm"], [2072, "module-torch.ao.quantization.fx.lower_to_qnnpack"], [2072, "module-torch.ao.quantization.fx.lstm_utils"], [2072, "module-torch.ao.quantization.fx.match_utils"], [2072, "module-torch.ao.quantization.fx.pattern_utils"], [2072, "module-torch.ao.quantization.fx.prepare"], [2072, "module-torch.ao.quantization.fx.qconfig_mapping_utils"], [2072, "module-torch.ao.quantization.fx.quantize_handler"], [2072, "module-torch.ao.quantization.fx.tracer"], [2072, "module-torch.ao.quantization.fx.utils"], [2072, "module-torch.ao.quantization.observer"], [2072, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"], [2072, "module-torch.ao.quantization.pt2e.export_utils"], [2072, "module-torch.ao.quantization.pt2e.graph_utils"], [2072, "module-torch.ao.quantization.pt2e.port_metadata_pass"], [2072, "module-torch.ao.quantization.pt2e.prepare"], [2072, "module-torch.ao.quantization.pt2e.qat_utils"], [2072, "module-torch.ao.quantization.pt2e.representation.rewrite"], [2072, "module-torch.ao.quantization.pt2e.utils"], [2072, "module-torch.ao.quantization.qconfig"], [2072, "module-torch.ao.quantization.qconfig_mapping"], [2072, "module-torch.ao.quantization.quant_type"], [2072, "module-torch.ao.quantization.quantization_mappings"], [2072, "module-torch.ao.quantization.quantize_fx"], [2072, "module-torch.ao.quantization.quantize_jit"], [2072, "module-torch.ao.quantization.quantize_pt2e"], [2072, "module-torch.ao.quantization.quantizer.composable_quantizer"], [2072, "module-torch.ao.quantization.quantizer.embedding_quantizer"], [2072, "module-torch.ao.quantization.quantizer.quantizer"], [2072, "module-torch.ao.quantization.quantizer.utils"], [2072, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"], [2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"], [2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"], [2072, "module-torch.ao.quantization.stubs"], [2072, "module-torch.ao.quantization.utils"], [2072, "module-torch.nn.intrinsic.modules.fused"], [2072, "module-torch.nn.intrinsic.qat.modules.conv_fused"], [2072, "module-torch.nn.intrinsic.qat.modules.linear_fused"], [2072, "module-torch.nn.intrinsic.qat.modules.linear_relu"], [2072, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"], [2072, "module-torch.nn.intrinsic.quantized.modules.bn_relu"], [2072, "module-torch.nn.intrinsic.quantized.modules.conv_relu"], [2072, "module-torch.nn.intrinsic.quantized.modules.linear_relu"], [2072, "module-torch.nn.qat.dynamic.modules.linear"], [2072, "module-torch.nn.qat.modules.conv"], [2072, "module-torch.nn.qat.modules.embedding_ops"], [2072, "module-torch.nn.qat.modules.linear"], [2072, "module-torch.nn.quantizable.modules.activation"], [2072, "module-torch.nn.quantizable.modules.rnn"], [2072, "module-torch.nn.quantized.dynamic.modules.conv"], [2072, "module-torch.nn.quantized.dynamic.modules.linear"], [2072, "module-torch.nn.quantized.dynamic.modules.rnn"], [2072, "module-torch.nn.quantized.functional"], [2072, "module-torch.nn.quantized.modules.activation"], [2072, "module-torch.nn.quantized.modules.batchnorm"], [2072, "module-torch.nn.quantized.modules.conv"], [2072, "module-torch.nn.quantized.modules.dropout"], [2072, "module-torch.nn.quantized.modules.embedding_ops"], [2072, "module-torch.nn.quantized.modules.functional_modules"], [2072, "module-torch.nn.quantized.modules.linear"], [2072, "module-torch.nn.quantized.modules.normalization"], [2072, "module-torch.nn.quantized.modules.rnn"], [2072, "module-torch.nn.quantized.modules.utils"], [2072, "module-torch.quantization.fake_quantize"], [2072, "module-torch.quantization.fuse_modules"], [2072, "module-torch.quantization.fuser_method_mappings"], [2072, "module-torch.quantization.fx.convert"], [2072, "module-torch.quantization.fx.fuse"], [2072, "module-torch.quantization.fx.fusion_patterns"], [2072, "module-torch.quantization.fx.graph_module"], [2072, "module-torch.quantization.fx.match_utils"], [2072, "module-torch.quantization.fx.pattern_utils"], [2072, "module-torch.quantization.fx.prepare"], [2072, "module-torch.quantization.fx.quantization_patterns"], [2072, "module-torch.quantization.fx.quantization_types"], [2072, "module-torch.quantization.fx.utils"], [2072, "module-torch.quantization.observer"], [2072, "module-torch.quantization.qconfig"], [2072, "module-torch.quantization.quant_type"], [2072, "module-torch.quantization.quantization_mappings"], [2072, "module-torch.quantization.quantize"], [2072, "module-torch.quantization.quantize_fx"], [2072, "module-torch.quantization.quantize_jit"], [2072, "module-torch.quantization.stubs"], [2072, "module-torch.quantization.utils"], [2075, "module-torch.ao.nn.intrinsic"], [2075, "module-torch.ao.nn.intrinsic.modules"], [2075, "module-torch.ao.nn.intrinsic.qat"], [2075, "module-torch.ao.nn.intrinsic.qat.modules"], [2075, "module-torch.ao.nn.intrinsic.quantized"], [2075, "module-torch.ao.nn.intrinsic.quantized.dynamic"], [2075, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"], [2075, "module-torch.ao.nn.intrinsic.quantized.modules"], [2075, "module-torch.ao.nn.qat"], [2075, "module-torch.ao.nn.qat.dynamic"], [2075, "module-torch.ao.nn.qat.dynamic.modules"], [2075, "module-torch.ao.nn.qat.modules"], [2075, "module-torch.ao.nn.quantized.dynamic"], [2075, "module-torch.ao.nn.quantized.dynamic.modules"], [2075, "module-torch.ao.nn.quantized.functional"], [2075, "module-torch.ao.nn.quantized.modules"], [2075, "module-torch.ao.quantization.pt2e"], [2075, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"], [2075, "module-torch.ao.quantization.pt2e.representation"], [2075, "module-torch.ao.quantization.quantizer"], [2075, "module-torch.nn.intrinsic"], [2075, "module-torch.nn.intrinsic.modules"], [2075, "module-torch.nn.intrinsic.qat"], [2075, "module-torch.nn.intrinsic.qat.modules"], [2075, "module-torch.nn.intrinsic.quantized"], [2075, "module-torch.nn.intrinsic.quantized.dynamic"], [2075, "module-torch.nn.intrinsic.quantized.dynamic.modules"], [2075, "module-torch.nn.intrinsic.quantized.modules"], [2075, "module-torch.nn.qat"], [2075, "module-torch.nn.qat.dynamic"], [2075, "module-torch.nn.qat.dynamic.modules"], [2075, "module-torch.nn.qat.modules"], [2075, "module-torch.nn.quantizable"], [2075, "module-torch.nn.quantizable.modules"], [2075, "module-torch.nn.quantized"], [2075, "module-torch.nn.quantized.dynamic"], [2075, "module-torch.nn.quantized.dynamic.modules"], [2075, "module-torch.nn.quantized.modules"], [2075, "module-torch.quantization"], [2075, "module-torch.quantization.fx"], [2076, "module-torch.random"], [2077, "module-torch.distributed.autograd"], [2077, "module-torch.distributed.rpc"], [2080, "module-torch.signal"], [2080, "module-torch.signal.windows"], [2082, "module-torch.sparse"], [2083, "module-torch.special"], [2087, "module-torch.utils.tensorboard"], [2089, "module-torch.testing"], [2091, "module-torch"], [2091, "module-torch.contrib"], [2091, "module-torch.functional"], [2091, "module-torch.quasirandom"], [2091, "module-torch.return_types"], [2091, "module-torch.serialization"], [2091, "module-torch.signal.windows.windows"], [2091, "module-torch.sparse.semi_structured"], [2091, "module-torch.storage"], [2091, "module-torch.torch_version"], [2091, "module-torch.types"], [2091, "module-torch.utils.backcompat"], [2091, "module-torch.utils.hipify"], [2091, "module-torch.utils.model_dump"], [2091, "module-torch.utils.viz"], [2091, "module-torch.version"], [2092, "module-torch.ao.ns._numeric_suite"], [2093, "module-torch.ao.ns._numeric_suite_fx"], [2096, "module-torch.compiler"], [2114, "module-torch.overrides"], [2119, "module-torch.utils"], [2119, "module-torch.utils.backend_registration"], [2119, "module-torch.utils.benchmark.examples.blas_compare_setup"], [2119, "module-torch.utils.benchmark.examples.compare"], [2119, "module-torch.utils.benchmark.examples.fuzzer"], [2119, "module-torch.utils.benchmark.examples.op_benchmark"], [2119, "module-torch.utils.benchmark.examples.simple_timeit"], [2119, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"], [2119, "module-torch.utils.benchmark.op_fuzzers.binary"], [2119, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"], [2119, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"], [2119, "module-torch.utils.benchmark.op_fuzzers.spectral"], [2119, "module-torch.utils.benchmark.op_fuzzers.unary"], [2119, "module-torch.utils.benchmark.utils.common"], [2119, "module-torch.utils.benchmark.utils.compare"], [2119, "module-torch.utils.benchmark.utils.compile"], [2119, "module-torch.utils.benchmark.utils.cpp_jit"], [2119, "module-torch.utils.benchmark.utils.fuzzer"], [2119, "module-torch.utils.benchmark.utils.sparse_fuzzer"], [2119, "module-torch.utils.benchmark.utils.timer"], [2119, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"], [2119, "module-torch.utils.bundled_inputs"], [2119, "module-torch.utils.checkpoint"], [2119, "module-torch.utils.collect_env"], [2119, "module-torch.utils.cpp_backtrace"], [2119, "module-torch.utils.cpp_extension"], [2119, "module-torch.utils.data.backward_compatibility"], [2119, "module-torch.utils.data.dataloader"], [2119, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"], [2119, "module-torch.utils.data.datapipes.dataframe.dataframes"], [2119, "module-torch.utils.data.datapipes.dataframe.datapipes"], [2119, "module-torch.utils.data.datapipes.dataframe.structures"], [2119, "module-torch.utils.data.datapipes.datapipe"], [2119, "module-torch.utils.data.datapipes.gen_pyi"], [2119, "module-torch.utils.data.datapipes.iter.callable"], [2119, "module-torch.utils.data.datapipes.iter.combinatorics"], [2119, "module-torch.utils.data.datapipes.iter.combining"], [2119, "module-torch.utils.data.datapipes.iter.filelister"], [2119, "module-torch.utils.data.datapipes.iter.fileopener"], [2119, "module-torch.utils.data.datapipes.iter.grouping"], [2119, "module-torch.utils.data.datapipes.iter.routeddecoder"], [2119, "module-torch.utils.data.datapipes.iter.selecting"], [2119, "module-torch.utils.data.datapipes.iter.sharding"], [2119, "module-torch.utils.data.datapipes.iter.streamreader"], [2119, "module-torch.utils.data.datapipes.iter.utils"], [2119, "module-torch.utils.data.datapipes.map.callable"], [2119, "module-torch.utils.data.datapipes.map.combinatorics"], [2119, "module-torch.utils.data.datapipes.map.combining"], [2119, "module-torch.utils.data.datapipes.map.grouping"], [2119, "module-torch.utils.data.datapipes.map.utils"], [2119, "module-torch.utils.data.datapipes.utils.common"], [2119, "module-torch.utils.data.datapipes.utils.decoder"], [2119, "module-torch.utils.data.datapipes.utils.snapshot"], [2119, "module-torch.utils.data.dataset"], [2119, "module-torch.utils.data.distributed"], [2119, "module-torch.utils.data.graph"], [2119, "module-torch.utils.data.graph_settings"], [2119, "module-torch.utils.data.sampler"], [2119, "module-torch.utils.dlpack"], [2119, "module-torch.utils.file_baton"], [2119, "module-torch.utils.flop_counter"], [2119, "module-torch.utils.hipify.constants"], [2119, "module-torch.utils.hipify.cuda_to_hip_mappings"], [2119, "module-torch.utils.hipify.hipify_python"], [2119, "module-torch.utils.hipify.version"], [2119, "module-torch.utils.hooks"], [2119, "module-torch.utils.jit.log_extract"], [2119, "module-torch.utils.mkldnn"], [2119, "module-torch.utils.mobile_optimizer"], [2119, "module-torch.utils.show_pickle"], [2119, "module-torch.utils.tensorboard.summary"], [2119, "module-torch.utils.tensorboard.writer"], [2119, "module-torch.utils.throughput_benchmark"], [2119, "module-torch.utils.weak"], [2120, "module-torch.xpu"], [2120, "module-torch.xpu.random"], [2120, "module-torch.xpu.streams"]], "torch.amp": [[0, "module-torch.amp"]], "torch.amp.autocast_mode": [[0, "module-torch.amp.autocast_mode"]], "torch.amp.grad_scaler": [[0, "module-torch.amp.grad_scaler"]], "torch.cpu.amp": [[0, "module-torch.cpu.amp"]], "torch.cpu.amp.autocast_mode": [[0, "module-torch.cpu.amp.autocast_mode"]], "torch.cpu.amp.grad_scaler": [[0, "module-torch.cpu.amp.grad_scaler"]], "torch.cuda.amp": [[0, "module-torch.cuda.amp"]], "torch.cuda.amp.autocast_mode": [[0, "module-torch.cuda.amp.autocast_mode"]], "torch.cuda.amp.common": [[0, "module-torch.cuda.amp.common"]], "torch.cuda.amp.grad_scaler": [[0, "module-torch.cuda.amp.grad_scaler"]], "function (class in torch.autograd)": [[1, "torch.autograd.Function"]], "gradientedge (class in torch.autograd.graph)": [[1, "torch.autograd.graph.GradientEdge"]], "allow_mutation_on_saved_tensors (class in torch.autograd.graph)": [[1, "torch.autograd.graph.allow_mutation_on_saved_tensors"]], "detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.detect_anomaly"]], "disable_saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.disable_saved_tensors_hooks"]], "emit_itt (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_itt"]], "emit_nvtx (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.emit_nvtx"]], "get_gradient_edge() (in module torch.autograd.graph)": [[1, "torch.autograd.graph.get_gradient_edge"]], "profile (class in torch.autograd.profiler)": [[1, "torch.autograd.profiler.profile"]], "register_multi_grad_hook (class in torch.autograd.graph)": [[1, "torch.autograd.graph.register_multi_grad_hook"]], "save_on_cpu (class in torch.autograd.graph)": [[1, "torch.autograd.graph.save_on_cpu"]], "saved_tensors_hooks (class in torch.autograd.graph)": [[1, "torch.autograd.graph.saved_tensors_hooks"]], "set_detect_anomaly (class in torch.autograd)": [[1, "torch.autograd.set_detect_anomaly"]], "torch.autograd": [[1, "module-torch.autograd"]], "torch.autograd.anomaly_mode": [[1, "module-torch.autograd.anomaly_mode"]], "torch.autograd.forward_ad": [[1, "module-torch.autograd.forward_ad"]], "torch.autograd.function": [[1, "module-torch.autograd.function"]], "torch.autograd.functional": [[1, "module-torch.autograd.functional"]], "torch.autograd.grad_mode": [[1, "module-torch.autograd.grad_mode"]], "torch.autograd.gradcheck": [[1, "module-torch.autograd.gradcheck"]], "torch.autograd.graph": [[1, "module-torch.autograd.graph"]], "torch.autograd.profiler": [[1, "module-torch.autograd.profiler"]], "torch.autograd.profiler_legacy": [[1, "module-torch.autograd.profiler_legacy"]], "torch.autograd.profiler_util": [[1, "module-torch.autograd.profiler_util"]], "torch.autograd.variable": [[1, "module-torch.autograd.variable"]], "sdpaparams (class in torch.backends.cuda)": [[2, "torch.backends.cuda.SDPAParams"]], "allow_bf16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_bf16_reduced_precision_reduction"]], "allow_fp16_reduced_precision_reduction (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_fp16_reduced_precision_reduction"]], "allow_tf32 (in module torch.backends.cuda.matmul)": [[2, "torch.backends.cuda.matmul.allow_tf32"]], "allow_tf32 (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.allow_tf32"]], "benchmark (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark"]], "benchmark_limit (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.benchmark_limit"]], "can_use_efficient_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_efficient_attention"]], "can_use_flash_attention() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.can_use_flash_attention"]], "clear() (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.clear"]], "cudnn_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cudnn_sdp_enabled"]], "cufft_plan_cache (in module torch.backends.cuda)": [[2, "torch.backends.cuda.cufft_plan_cache"]], "deterministic (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.deterministic"]], "enable_cudnn_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_cudnn_sdp"]], "enable_flash_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_flash_sdp"]], "enable_math_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_math_sdp"]], "enable_mem_efficient_sdp() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.enable_mem_efficient_sdp"]], "enabled (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.enabled"]], "enabled (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.enabled"]], "flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.flags"]], "flash_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.flash_sdp_enabled"]], "get_cpu_capability() (in module torch.backends.cpu)": [[2, "torch.backends.cpu.get_cpu_capability"]], "get_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.get_fastpath_enabled"]], "get_opt_einsum() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.get_opt_einsum"]], "is_available() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.is_available"]], "is_available() (in module torch.backends.mkl)": [[2, "torch.backends.mkl.is_available"]], "is_available() (in module torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.is_available"]], "is_available() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_available"]], "is_available() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.is_available"]], "is_available() (in module torch.backends.openmp)": [[2, "torch.backends.openmp.is_available"]], "is_available() (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.is_available"]], "is_built() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.is_built"]], "is_built() (in module torch.backends.mps)": [[2, "torch.backends.mps.is_built"]], "math_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.math_sdp_enabled"]], "max_size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.max_size"]], "mem_efficient_sdp_enabled() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.mem_efficient_sdp_enabled"]], "preferred_blas_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_blas_library"]], "preferred_linalg_library() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.preferred_linalg_library"]], "sdp_kernel() (in module torch.backends.cuda)": [[2, "torch.backends.cuda.sdp_kernel"]], "set_fastpath_enabled() (in module torch.backends.mha)": [[2, "torch.backends.mha.set_fastpath_enabled"]], "set_flags() (in module torch.backends.nnpack)": [[2, "torch.backends.nnpack.set_flags"]], "size (in module torch.backends.cuda.cufft_plan_cache)": [[2, "torch.backends.cuda.cufft_plan_cache.size"]], "strategy (in module torch.backends.opt_einsum)": [[2, "torch.backends.opt_einsum.strategy"]], "torch.backends": [[2, "module-torch.backends"]], "torch.backends.cpu": [[2, "module-torch.backends.cpu"]], "torch.backends.cuda": [[2, "module-torch.backends.cuda"]], "torch.backends.cudnn": [[2, "module-torch.backends.cudnn"]], "torch.backends.cudnn.rnn": [[2, "module-torch.backends.cudnn.rnn"]], "torch.backends.mha": [[2, "module-torch.backends.mha"]], "torch.backends.mkl": [[2, "module-torch.backends.mkl"]], "torch.backends.mkldnn": [[2, "module-torch.backends.mkldnn"]], "torch.backends.mps": [[2, "module-torch.backends.mps"]], "torch.backends.nnpack": [[2, "module-torch.backends.nnpack"]], "torch.backends.openmp": [[2, "module-torch.backends.openmp"]], "torch.backends.opt_einsum": [[2, "module-torch.backends.opt_einsum"]], "torch.backends.quantized": [[2, "module-torch.backends.quantized"]], "torch.backends.xeon": [[2, "module-torch.backends.xeon"]], "torch.backends.xeon.run_cpu": [[2, "module-torch.backends.xeon.run_cpu"]], "torch.backends.xnnpack": [[2, "module-torch.backends.xnnpack"]], "verbose (class in torch.backends.mkl)": [[2, "torch.backends.mkl.verbose"]], "verbose (class in torch.backends.mkldnn)": [[2, "torch.backends.mkldnn.verbose"]], "version() (in module torch.backends.cudnn)": [[2, "torch.backends.cudnn.version"]], "callgrindstats (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.CallgrindStats"]], "compare (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Compare"]], "functioncounts (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.FunctionCounts"]], "measurement (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Measurement"]], "timer (class in torch.utils.benchmark)": [[3, "torch.utils.benchmark.Timer"]], "adaptive_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.adaptive_autorange"]], "as_standardized() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.as_standardized"]], "blocked_autorange() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.blocked_autorange"]], "collect_callgrind() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.collect_callgrind"]], "colorize() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.colorize"]], "counts() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.counts"]], "delta() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.delta"]], "denoise() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.denoise"]], "extend_results() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.extend_results"]], "filter() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.filter"]], "highlight_warnings() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.highlight_warnings"]], "merge() (torch.utils.benchmark.measurement static method)": [[3, "torch.utils.benchmark.Measurement.merge"]], "print() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.print"]], "significant_figures (torch.utils.benchmark.measurement property)": [[3, "torch.utils.benchmark.Measurement.significant_figures"]], "stats() (torch.utils.benchmark.callgrindstats method)": [[3, "torch.utils.benchmark.CallgrindStats.stats"]], "timeit() (torch.utils.benchmark.timer method)": [[3, "torch.utils.benchmark.Timer.timeit"]], "torch.utils.benchmark": [[3, "module-torch.utils.benchmark"]], "torch.utils.benchmark.examples": [[3, "module-torch.utils.benchmark.examples"]], "torch.utils.benchmark.op_fuzzers": [[3, "module-torch.utils.benchmark.op_fuzzers"]], "torch.utils.benchmark.utils": [[3, "module-torch.utils.benchmark.utils"]], "torch.utils.benchmark.utils.valgrind_wrapper": [[3, "module-torch.utils.benchmark.utils.valgrind_wrapper"]], "transform() (torch.utils.benchmark.functioncounts method)": [[3, "torch.utils.benchmark.FunctionCounts.transform"]], "trim_significant_figures() (torch.utils.benchmark.compare method)": [[3, "torch.utils.benchmark.Compare.trim_significant_figures"]], "torch.utils.bottleneck": [[4, "module-torch.utils.bottleneck"]], "checkpoint() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint"]], "checkpoint_sequential() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.checkpoint_sequential"]], "set_checkpoint_debug_enabled() (in module torch.utils.checkpoint)": [[5, "torch.utils.checkpoint.set_checkpoint_debug_enabled"]], "cond() (in module torch._higher_order_ops.cond)": [[12, "torch._higher_order_ops.cond.cond"]], "parallel_info() (in module torch.__config__)": [[13, "torch.__config__.parallel_info"]], "show() (in module torch.__config__)": [[13, "torch.__config__.show"]], "torch.__config__": [[13, "module-torch.__config__"]], "buildextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.BuildExtension"]], "cudaextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CUDAExtension"]], "cppextension() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.CppExtension"]], "get_compiler_abi_compatibility_and_version() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.get_compiler_abi_compatibility_and_version"]], "include_paths() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.include_paths"]], "is_ninja_available() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.is_ninja_available"]], "load() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load"]], "load_inline() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.load_inline"]], "verify_ninja_availability() (in module torch.utils.cpp_extension)": [[14, "torch.utils.cpp_extension.verify_ninja_availability"]], "torch.cpu": [[16, "module-torch.cpu"]], "torch.cuda": [[17, "module-torch.cuda"]], "torch.cuda.comm": [[17, "module-torch.cuda.comm"]], "torch.cuda.error": [[17, "module-torch.cuda.error"]], "torch.cuda.graphs": [[17, "module-torch.cuda.graphs"]], "torch.cuda.jiterator": [[17, "module-torch.cuda.jiterator"]], "torch.cuda.memory": [[17, "module-torch.cuda.memory"]], "torch.cuda.nccl": [[17, "module-torch.cuda.nccl"]], "torch.cuda.nvtx": [[17, "module-torch.cuda.nvtx"]], "torch.cuda.profiler": [[17, "module-torch.cuda.profiler"]], "torch.cuda.random": [[17, "module-torch.cuda.random"]], "torch.cuda.sparse": [[17, "module-torch.cuda.sparse"]], "torch.cuda.streams": [[17, "module-torch.cuda.streams"]], "enable_cuda_sanitizer() (in module torch.cuda._sanitizer)": [[18, "torch.cuda._sanitizer.enable_cuda_sanitizer"]], "torch.cuda._sanitizer": [[18, "module-torch.cuda._sanitizer"]], "enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.enable"]], "get_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_filename"]], "get_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_duration"]], "get_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_max_tuning_iterations"]], "get_results() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_results"]], "get_validators() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.get_validators"]], "is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.is_enabled"]], "read_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.read_file"]], "set_filename() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_filename"]], "set_max_tuning_duration() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_duration"]], "set_max_tuning_iterations() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.set_max_tuning_iterations"]], "torch.cuda.tunable": [[19, "module-torch.cuda.tunable"]], "tuning_enable() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_enable"]], "tuning_is_enabled() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.tuning_is_enabled"]], "write_file() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file"]], "write_file_on_exit() (in module torch.cuda.tunable)": [[19, "torch.cuda.tunable.write_file_on_exit"]], "batchsampler (class in torch.utils.data)": [[23, "torch.utils.data.BatchSampler"]], "chaindataset (class in torch.utils.data)": [[23, "torch.utils.data.ChainDataset"]], "concatdataset (class in torch.utils.data)": [[23, "torch.utils.data.ConcatDataset"]], "dataloader (class in torch.utils.data)": [[23, "torch.utils.data.DataLoader"]], "dataset (class in torch.utils.data)": [[23, "torch.utils.data.Dataset"]], "distributedsampler (class in torch.utils.data.distributed)": [[23, "torch.utils.data.distributed.DistributedSampler"]], "iterabledataset (class in torch.utils.data)": [[23, "torch.utils.data.IterableDataset"]], "randomsampler (class in torch.utils.data)": [[23, "torch.utils.data.RandomSampler"]], "sampler (class in torch.utils.data)": [[23, "torch.utils.data.Sampler"]], "sequentialsampler (class in torch.utils.data)": [[23, "torch.utils.data.SequentialSampler"]], "stackdataset (class in torch.utils.data)": [[23, "torch.utils.data.StackDataset"]], "subset (class in torch.utils.data)": [[23, "torch.utils.data.Subset"]], "subsetrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.SubsetRandomSampler"]], "tensordataset (class in torch.utils.data)": [[23, "torch.utils.data.TensorDataset"]], "weightedrandomsampler (class in torch.utils.data)": [[23, "torch.utils.data.WeightedRandomSampler"]], "collate() (in module torch.utils.data._utils.collate)": [[23, "torch.utils.data._utils.collate.collate"]], "default_collate() (in module torch.utils.data)": [[23, "torch.utils.data.default_collate"]], "default_convert() (in module torch.utils.data)": [[23, "torch.utils.data.default_convert"]], "get_worker_info() (in module torch.utils.data)": [[23, "torch.utils.data.get_worker_info"]], "random_split() (in module torch.utils.data)": [[23, "torch.utils.data.random_split"]], "torch.utils.data": [[23, "module-torch.utils.data"]], "torch.utils.data.datapipes": [[23, "module-torch.utils.data.datapipes"]], "torch.utils.data.datapipes.dataframe": [[23, "module-torch.utils.data.datapipes.dataframe"]], "torch.utils.data.datapipes.iter": [[23, "module-torch.utils.data.datapipes.iter"]], "torch.utils.data.datapipes.map": [[23, "module-torch.utils.data.datapipes.map"]], "torch.utils.data.datapipes.utils": [[23, "module-torch.utils.data.datapipes.utils"]], "gradbucket (class in torch.distributed)": [[24, "torch.distributed.GradBucket"]], "powersgdstate (class in torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState"]], "__getstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__getstate__"]], "__setstate__() (torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook.powersgdstate method)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.PowerSGDState.__setstate__"]], "allreduce_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.allreduce_hook"]], "batched_powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.batched_powerSGD_hook"]], "bf16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_hook"]], "bf16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.bf16_compress_wrapper"]], "buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.buffer"]], "fp16_compress_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_hook"]], "fp16_compress_wrapper() (in module torch.distributed.algorithms.ddp_comm_hooks.default_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_wrapper"]], "gradients() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.gradients"]], "index() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.index"]], "is_last() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.is_last"]], "noop_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks.noop_hook"]], "parameters() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.parameters"]], "powersgd_hook() (in module torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook)": [[24, "torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook.powerSGD_hook"]], "set_buffer() (in module torch.distributed.gradbucket)": [[24, "torch.distributed.GradBucket.set_buffer"]], "fill_uninitialized_memory (in module torch.utils.deterministic)": [[27, "torch.utils.deterministic.fill_uninitialized_memory"]], "torch.utils.deterministic": [[27, "module-torch.utils.deterministic"]], "backend (class in torch.distributed)": [[28, "torch.distributed.Backend"]], "devicemesh (class in torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.DeviceMesh"]], "distbackenderror (class in torch.distributed)": [[28, "torch.distributed.DistBackendError"]], "disterror (class in torch.distributed)": [[28, "torch.distributed.DistError"]], "distnetworkerror (class in torch.distributed)": [[28, "torch.distributed.DistNetworkError"]], "diststoreerror (class in torch.distributed)": [[28, "torch.distributed.DistStoreError"]], "filestore (class in torch.distributed)": [[28, "torch.distributed.FileStore"]], "hashstore (class in torch.distributed)": [[28, "torch.distributed.HashStore"]], "p2pop (class in torch.distributed)": [[28, "torch.distributed.P2POp"]], "prefixstore (class in torch.distributed)": [[28, "torch.distributed.PrefixStore"]], "reduceop (class in torch.distributed)": [[28, "torch.distributed.ReduceOp"]], "store (class in torch.distributed)": [[28, "torch.distributed.Store"]], "tcpstore (class in torch.distributed)": [[28, "torch.distributed.TCPStore"]], "work (class in torch.distributed)": [[28, "torch.distributed.Work"]], "add() (in module torch.distributed.store)": [[28, "torch.distributed.Store.add"]], "all_gather() (in module torch.distributed)": [[28, "torch.distributed.all_gather"]], "all_gather_into_tensor() (in module torch.distributed)": [[28, "torch.distributed.all_gather_into_tensor"]], "all_gather_object() (in module torch.distributed)": [[28, "torch.distributed.all_gather_object"]], "all_reduce() (in module torch.distributed)": [[28, "torch.distributed.all_reduce"]], "all_to_all() (in module torch.distributed)": [[28, "torch.distributed.all_to_all"]], "all_to_all_single() (in module torch.distributed)": [[28, "torch.distributed.all_to_all_single"]], "barrier() (in module torch.distributed)": [[28, "torch.distributed.barrier"]], "batch_isend_irecv() (in module torch.distributed)": [[28, "torch.distributed.batch_isend_irecv"]], "breakpoint() (in module torch.distributed)": [[28, "torch.distributed.breakpoint"]], "broadcast() (in module torch.distributed)": [[28, "torch.distributed.broadcast"]], "broadcast_object_list() (in module torch.distributed)": [[28, "torch.distributed.broadcast_object_list"]], "compare_set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.compare_set"]], "delete_key() (in module torch.distributed.store)": [[28, "torch.distributed.Store.delete_key"]], "gather() (in module torch.distributed)": [[28, "torch.distributed.gather"]], "gather_object() (in module torch.distributed)": [[28, "torch.distributed.gather_object"]], "get() (in module torch.distributed.store)": [[28, "torch.distributed.Store.get"]], "get_backend() (in module torch.distributed)": [[28, "torch.distributed.get_backend"]], "get_global_rank() (in module torch.distributed)": [[28, "torch.distributed.get_global_rank"]], "get_group_rank() (in module torch.distributed)": [[28, "torch.distributed.get_group_rank"]], "get_process_group_ranks() (in module torch.distributed)": [[28, "torch.distributed.get_process_group_ranks"]], "get_rank() (in module torch.distributed)": [[28, "torch.distributed.get_rank"]], "get_world_size() (in module torch.distributed)": [[28, "torch.distributed.get_world_size"]], "init_device_mesh() (in module torch.distributed.device_mesh)": [[28, "torch.distributed.device_mesh.init_device_mesh"]], "init_process_group() (in module torch.distributed)": [[28, "torch.distributed.init_process_group"]], "irecv() (in module torch.distributed)": [[28, "torch.distributed.irecv"]], "is_available() (in module torch.distributed)": [[28, "torch.distributed.is_available"]], "is_gloo_available() (in module torch.distributed)": [[28, "torch.distributed.is_gloo_available"]], "is_initialized() (in module torch.distributed)": [[28, "torch.distributed.is_initialized"]], "is_mpi_available() (in module torch.distributed)": [[28, "torch.distributed.is_mpi_available"]], "is_nccl_available() (in module torch.distributed)": [[28, "torch.distributed.is_nccl_available"]], "is_torchelastic_launched() (in module torch.distributed)": [[28, "torch.distributed.is_torchelastic_launched"]], "isend() (in module torch.distributed)": [[28, "torch.distributed.isend"]], "monitored_barrier() (in module torch.distributed)": [[28, "torch.distributed.monitored_barrier"]], "new_group() (in module torch.distributed)": [[28, "torch.distributed.new_group"]], "num_keys() (in module torch.distributed.store)": [[28, "torch.distributed.Store.num_keys"]], "recv() (in module torch.distributed)": [[28, "torch.distributed.recv"]], "recv_object_list() (in module torch.distributed)": [[28, "torch.distributed.recv_object_list"]], "reduce() (in module torch.distributed)": [[28, "torch.distributed.reduce"]], "reduce_op (class in torch.distributed)": [[28, "torch.distributed.reduce_op"]], "reduce_scatter() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter"]], "reduce_scatter_tensor() (in module torch.distributed)": [[28, "torch.distributed.reduce_scatter_tensor"]], "register_backend() (torch.distributed.backend class method)": [[28, "torch.distributed.Backend.register_backend"]], "scatter() (in module torch.distributed)": [[28, "torch.distributed.scatter"]], "scatter_object_list() (in module torch.distributed)": [[28, "torch.distributed.scatter_object_list"]], "send() (in module torch.distributed)": [[28, "torch.distributed.send"]], "send_object_list() (in module torch.distributed)": [[28, "torch.distributed.send_object_list"]], "set() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set"]], "set_timeout() (in module torch.distributed.store)": [[28, "torch.distributed.Store.set_timeout"]], "torch.distributed": [[28, "module-torch.distributed"]], "torch.distributed.algorithms": [[28, "module-torch.distributed.algorithms"]], "torch.distributed.algorithms.ddp_comm_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.ddp_zero_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.debugging_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.default_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.default_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.mixed_precision_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.optimizer_overlap_hooks"]], "torch.distributed.algorithms.ddp_comm_hooks.post_localsgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.post_localSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.powersgd_hook": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.powerSGD_hook"]], "torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks": [[28, "module-torch.distributed.algorithms.ddp_comm_hooks.quantization_hooks"]], "torch.distributed.algorithms.join": [[28, "module-torch.distributed.algorithms.join"]], "torch.distributed.algorithms.model_averaging": [[28, "module-torch.distributed.algorithms.model_averaging"]], "torch.distributed.algorithms.model_averaging.averagers": [[28, "module-torch.distributed.algorithms.model_averaging.averagers"]], "torch.distributed.algorithms.model_averaging.hierarchical_model_averager": [[28, "module-torch.distributed.algorithms.model_averaging.hierarchical_model_averager"]], "torch.distributed.algorithms.model_averaging.utils": [[28, "module-torch.distributed.algorithms.model_averaging.utils"]], "torch.distributed.argparse_util": [[28, "module-torch.distributed.argparse_util"]], "torch.distributed.c10d_logger": [[28, "module-torch.distributed.c10d_logger"]], "torch.distributed.checkpoint.api": [[28, "module-torch.distributed.checkpoint.api"]], "torch.distributed.checkpoint.default_planner": [[28, "module-torch.distributed.checkpoint.default_planner"]], "torch.distributed.checkpoint.filesystem": [[28, "module-torch.distributed.checkpoint.filesystem"]], "torch.distributed.checkpoint.metadata": [[28, "module-torch.distributed.checkpoint.metadata"]], "torch.distributed.checkpoint.optimizer": [[28, "module-torch.distributed.checkpoint.optimizer"]], "torch.distributed.checkpoint.planner": [[28, "module-torch.distributed.checkpoint.planner"]], "torch.distributed.checkpoint.planner_helpers": [[28, "module-torch.distributed.checkpoint.planner_helpers"]], "torch.distributed.checkpoint.resharding": [[28, "module-torch.distributed.checkpoint.resharding"]], "torch.distributed.checkpoint.state_dict": [[28, "module-torch.distributed.checkpoint.state_dict"]], "torch.distributed.checkpoint.state_dict_loader": [[28, "module-torch.distributed.checkpoint.state_dict_loader"]], "torch.distributed.checkpoint.state_dict_saver": [[28, "module-torch.distributed.checkpoint.state_dict_saver"]], "torch.distributed.checkpoint.stateful": [[28, "module-torch.distributed.checkpoint.stateful"]], "torch.distributed.checkpoint.storage": [[28, "module-torch.distributed.checkpoint.storage"]], "torch.distributed.checkpoint.utils": [[28, "module-torch.distributed.checkpoint.utils"]], "torch.distributed.collective_utils": [[28, "module-torch.distributed.collective_utils"]], "torch.distributed.constants": [[28, "module-torch.distributed.constants"]], "torch.distributed.device_mesh": [[28, "module-torch.distributed.device_mesh"]], "torch.distributed.distributed_c10d": [[28, "module-torch.distributed.distributed_c10d"]], "torch.distributed.elastic": [[28, "module-torch.distributed.elastic"]], "torch.distributed.elastic.agent.server.api": [[28, "module-torch.distributed.elastic.agent.server.api"]], "torch.distributed.elastic.agent.server.local_elastic_agent": [[28, "module-torch.distributed.elastic.agent.server.local_elastic_agent"]], "torch.distributed.elastic.events.api": [[28, "module-torch.distributed.elastic.events.api"]], "torch.distributed.elastic.events.handlers": [[28, "module-torch.distributed.elastic.events.handlers"]], "torch.distributed.elastic.metrics.api": [[28, "module-torch.distributed.elastic.metrics.api"]], "torch.distributed.elastic.multiprocessing.api": [[28, "module-torch.distributed.elastic.multiprocessing.api"]], "torch.distributed.elastic.multiprocessing.errors.error_handler": [[28, "module-torch.distributed.elastic.multiprocessing.errors.error_handler"]], "torch.distributed.elastic.multiprocessing.errors.handlers": [[28, "module-torch.distributed.elastic.multiprocessing.errors.handlers"]], "torch.distributed.elastic.multiprocessing.redirects": [[28, "module-torch.distributed.elastic.multiprocessing.redirects"]], "torch.distributed.elastic.multiprocessing.tail_log": [[28, "module-torch.distributed.elastic.multiprocessing.tail_log"]], "torch.distributed.elastic.rendezvous.api": [[28, "module-torch.distributed.elastic.rendezvous.api"]], "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.c10d_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.dynamic_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.dynamic_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous"]], "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend": [[28, "module-torch.distributed.elastic.rendezvous.etcd_rendezvous_backend"]], "torch.distributed.elastic.rendezvous.etcd_server": [[28, "module-torch.distributed.elastic.rendezvous.etcd_server"]], "torch.distributed.elastic.rendezvous.etcd_store": [[28, "module-torch.distributed.elastic.rendezvous.etcd_store"]], "torch.distributed.elastic.rendezvous.static_tcp_rendezvous": [[28, "module-torch.distributed.elastic.rendezvous.static_tcp_rendezvous"]], "torch.distributed.elastic.rendezvous.utils": [[28, "module-torch.distributed.elastic.rendezvous.utils"]], "torch.distributed.elastic.timer.api": [[28, "module-torch.distributed.elastic.timer.api"]], "torch.distributed.elastic.timer.file_based_local_timer": [[28, "module-torch.distributed.elastic.timer.file_based_local_timer"]], "torch.distributed.elastic.timer.local_timer": [[28, "module-torch.distributed.elastic.timer.local_timer"]], "torch.distributed.elastic.utils": [[28, "module-torch.distributed.elastic.utils"]], "torch.distributed.elastic.utils.api": [[28, "module-torch.distributed.elastic.utils.api"]], "torch.distributed.elastic.utils.data": [[28, "module-torch.distributed.elastic.utils.data"]], "torch.distributed.elastic.utils.data.cycling_iterator": [[28, "module-torch.distributed.elastic.utils.data.cycling_iterator"]], "torch.distributed.elastic.utils.data.elastic_distributed_sampler": [[28, "module-torch.distributed.elastic.utils.data.elastic_distributed_sampler"]], "torch.distributed.elastic.utils.distributed": [[28, "module-torch.distributed.elastic.utils.distributed"]], "torch.distributed.elastic.utils.log_level": [[28, "module-torch.distributed.elastic.utils.log_level"]], "torch.distributed.elastic.utils.logging": [[28, "module-torch.distributed.elastic.utils.logging"]], "torch.distributed.elastic.utils.store": [[28, "module-torch.distributed.elastic.utils.store"]], "torch.distributed.fsdp.api": [[28, "module-torch.distributed.fsdp.api"]], "torch.distributed.fsdp.fully_sharded_data_parallel": [[28, "module-torch.distributed.fsdp.fully_sharded_data_parallel"]], "torch.distributed.fsdp.sharded_grad_scaler": [[28, "module-torch.distributed.fsdp.sharded_grad_scaler"]], "torch.distributed.fsdp.wrap": [[28, "module-torch.distributed.fsdp.wrap"]], "torch.distributed.launch": [[28, "module-torch.distributed.launch"]], "torch.distributed.launcher": [[28, "module-torch.distributed.launcher"]], "torch.distributed.launcher.api": [[28, "module-torch.distributed.launcher.api"]], "torch.distributed.logging_handlers": [[28, "module-torch.distributed.logging_handlers"]], "torch.distributed.nn": [[28, "module-torch.distributed.nn"]], "torch.distributed.nn.api": [[28, "module-torch.distributed.nn.api"]], "torch.distributed.nn.api.remote_module": [[28, "module-torch.distributed.nn.api.remote_module"]], "torch.distributed.nn.functional": [[28, "module-torch.distributed.nn.functional"]], "torch.distributed.nn.jit": [[28, "module-torch.distributed.nn.jit"]], "torch.distributed.nn.jit.instantiator": [[28, "module-torch.distributed.nn.jit.instantiator"]], "torch.distributed.nn.jit.templates": [[28, "module-torch.distributed.nn.jit.templates"]], "torch.distributed.nn.jit.templates.remote_module_template": [[28, "module-torch.distributed.nn.jit.templates.remote_module_template"]], "torch.distributed.optim.apply_optimizer_in_backward": [[28, "module-torch.distributed.optim.apply_optimizer_in_backward"]], "torch.distributed.optim.functional_adadelta": [[28, "module-torch.distributed.optim.functional_adadelta"]], "torch.distributed.optim.functional_adagrad": [[28, "module-torch.distributed.optim.functional_adagrad"]], "torch.distributed.optim.functional_adam": [[28, "module-torch.distributed.optim.functional_adam"]], "torch.distributed.optim.functional_adamax": [[28, "module-torch.distributed.optim.functional_adamax"]], "torch.distributed.optim.functional_adamw": [[28, "module-torch.distributed.optim.functional_adamw"]], "torch.distributed.optim.functional_rmsprop": [[28, "module-torch.distributed.optim.functional_rmsprop"]], "torch.distributed.optim.functional_rprop": [[28, "module-torch.distributed.optim.functional_rprop"]], "torch.distributed.optim.functional_sgd": [[28, "module-torch.distributed.optim.functional_sgd"]], "torch.distributed.optim.named_optimizer": [[28, "module-torch.distributed.optim.named_optimizer"]], "torch.distributed.optim.optimizer": [[28, "module-torch.distributed.optim.optimizer"]], "torch.distributed.optim.post_localsgd_optimizer": [[28, "module-torch.distributed.optim.post_localSGD_optimizer"]], "torch.distributed.optim.utils": [[28, "module-torch.distributed.optim.utils"]], "torch.distributed.optim.zero_redundancy_optimizer": [[28, "module-torch.distributed.optim.zero_redundancy_optimizer"]], "torch.distributed.remote_device": [[28, "module-torch.distributed.remote_device"]], "torch.distributed.rendezvous": [[28, "module-torch.distributed.rendezvous"]], "torch.distributed.rpc.api": [[28, "module-torch.distributed.rpc.api"]], "torch.distributed.rpc.backend_registry": [[28, "module-torch.distributed.rpc.backend_registry"]], "torch.distributed.rpc.constants": [[28, "module-torch.distributed.rpc.constants"]], "torch.distributed.rpc.functions": [[28, "module-torch.distributed.rpc.functions"]], "torch.distributed.rpc.internal": [[28, "module-torch.distributed.rpc.internal"]], "torch.distributed.rpc.options": [[28, "module-torch.distributed.rpc.options"]], "torch.distributed.rpc.rref_proxy": [[28, "module-torch.distributed.rpc.rref_proxy"]], "torch.distributed.rpc.server_process_global_profiler": [[28, "module-torch.distributed.rpc.server_process_global_profiler"]], "torch.distributed.tensor": [[28, "module-torch.distributed.tensor"]], "torch.distributed.tensor.parallel.api": [[28, "module-torch.distributed.tensor.parallel.api"]], "torch.distributed.tensor.parallel.ddp": [[28, "module-torch.distributed.tensor.parallel.ddp"]], "torch.distributed.tensor.parallel.fsdp": [[28, "module-torch.distributed.tensor.parallel.fsdp"]], "torch.distributed.tensor.parallel.input_reshard": [[28, "module-torch.distributed.tensor.parallel.input_reshard"]], "torch.distributed.tensor.parallel.loss": [[28, "module-torch.distributed.tensor.parallel.loss"]], "torch.distributed.tensor.parallel.style": [[28, "module-torch.distributed.tensor.parallel.style"]], "torch.distributed.utils": [[28, "module-torch.distributed.utils"]], "wait() (in module torch.distributed.store)": [[28, "torch.distributed.Store.wait"]], "join (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Join"]], "joinhook (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.JoinHook"]], "joinable (class in torch.distributed.algorithms)": [[29, "torch.distributed.algorithms.Joinable"]], "join_device (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_device"]], "join_hook() (torch.distributed.algorithms.joinable method)": [[29, "torch.distributed.algorithms.Joinable.join_hook"]], "join_process_group (torch.distributed.algorithms.joinable property)": [[29, "torch.distributed.algorithms.Joinable.join_process_group"]], "main_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.main_hook"]], "notify_join_context() (torch.distributed.algorithms.join static method)": [[29, "torch.distributed.algorithms.Join.notify_join_context"]], "post_hook() (torch.distributed.algorithms.joinhook method)": [[29, "torch.distributed.algorithms.JoinHook.post_hook"]], "asyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.AsyncStager"]], "blockingasyncstager (class in torch.distributed.checkpoint.staging)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager"]], "broadcastingtorchsavereader (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader"]], "defaultloadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner"]], "defaultsaveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner"]], "dynamicmetaloadplanner (class in torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner"]], "filesystemreader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemReader"]], "filesystemwriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.FileSystemWriter"]], "loadplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlan"]], "loadplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.LoadPlanner"]], "readitem (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.ReadItem"]], "saveplan (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlan"]], "saveplanner (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.SavePlanner"]], "statedictoptions (class in torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.StateDictOptions"]], "stateful (class in torch.distributed.checkpoint.stateful)": [[30, "torch.distributed.checkpoint.stateful.Stateful"]], "storagereader (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageReader"]], "storagewriter (class in torch.distributed.checkpoint)": [[30, "torch.distributed.checkpoint.StorageWriter"]], "writeitem (class in torch.distributed.checkpoint.planner)": [[30, "torch.distributed.checkpoint.planner.WriteItem"]], "async_save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.async_save"]], "checkpoint_id (torch.distributed.checkpoint.filesystemreader property)": [[30, "torch.distributed.checkpoint.FileSystemReader.checkpoint_id"]], "commit_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.commit_tensor"]], "create_global_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_global_plan"]], "create_global_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_global_plan"]], "create_local_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.create_local_plan"]], "create_local_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.create_local_plan"]], "dcp_to_torch_save() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.dcp_to_torch_save"]], "finish() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.finish"]], "finish_plan() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.finish_plan"]], "finish_plan() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.finish_plan"]], "get_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_model_state_dict"]], "get_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_optimizer_state_dict"]], "get_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.get_state_dict"]], "load() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load"]], "load_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.load_bytes"]], "load_state_dict() (in module torch.distributed.checkpoint.state_dict_loader)": [[30, "torch.distributed.checkpoint.state_dict_loader.load_state_dict"]], "load_state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.load_state_dict"]], "lookup_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.lookup_object"]], "lookup_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.lookup_tensor"]], "prepare_global_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_global_plan"]], "prepare_global_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_global_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.prepare_local_plan"]], "prepare_local_plan() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.prepare_local_plan"]], "read_data() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_data"]], "read_data() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_data"]], "read_metadata() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.read_metadata"]], "read_metadata() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.read_metadata"]], "reset() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.reset"]], "reset() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.reset"]], "reset() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.reset"]], "resolve_bytes() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_bytes"]], "resolve_data() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.resolve_data"]], "resolve_tensor() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.resolve_tensor"]], "save() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save"]], "save_state_dict() (in module torch.distributed.checkpoint.state_dict_saver)": [[30, "torch.distributed.checkpoint.state_dict_saver.save_state_dict"]], "set_model_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_model_state_dict"]], "set_optimizer_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_optimizer_state_dict"]], "set_state_dict() (in module torch.distributed.checkpoint.state_dict)": [[30, "torch.distributed.checkpoint.state_dict.set_state_dict"]], "set_up_planner() (torch.distributed.checkpoint.loadplanner method)": [[30, "torch.distributed.checkpoint.LoadPlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.saveplanner method)": [[30, "torch.distributed.checkpoint.SavePlanner.set_up_planner"]], "set_up_planner() (torch.distributed.checkpoint.format_utils.dynamicmetaloadplanner method)": [[30, "torch.distributed.checkpoint.format_utils.DynamicMetaLoadPlanner.set_up_planner"]], "set_up_storage_reader() (torch.distributed.checkpoint.storagereader method)": [[30, "torch.distributed.checkpoint.StorageReader.set_up_storage_reader"]], "set_up_storage_reader() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.set_up_storage_reader"]], "set_up_storage_writer() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.set_up_storage_writer"]], "should_synchronize_after_execute (torch.distributed.checkpoint.staging.asyncstager property)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.should_synchronize_after_execute"]], "stage() (torch.distributed.checkpoint.filesystemwriter method)": [[30, "torch.distributed.checkpoint.FileSystemWriter.stage"]], "stage() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.stage"]], "stage() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.stage"]], "state_dict() (torch.distributed.checkpoint.stateful.stateful method)": [[30, "torch.distributed.checkpoint.stateful.Stateful.state_dict"]], "storage_meta() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.storage_meta"]], "synchronize_staging() (torch.distributed.checkpoint.staging.asyncstager method)": [[30, "torch.distributed.checkpoint.staging.AsyncStager.synchronize_staging"]], "synchronize_staging() (torch.distributed.checkpoint.staging.blockingasyncstager method)": [[30, "torch.distributed.checkpoint.staging.BlockingAsyncStager.synchronize_staging"]], "tensor_storage_size() (torch.distributed.checkpoint.planner.writeitem method)": [[30, "torch.distributed.checkpoint.planner.WriteItem.tensor_storage_size"]], "torch.distributed.checkpoint": [[30, "module-torch.distributed.checkpoint"]], "torch.distributed.checkpoint.format_utils": [[30, "module-torch.distributed.checkpoint.format_utils"]], "torch.distributed.checkpoint.logger": [[30, "module-torch.distributed.checkpoint.logger"]], "torch.distributed.checkpoint.logging_handlers": [[30, "module-torch.distributed.checkpoint.logging_handlers"]], "torch.distributed.checkpoint.staging": [[30, "module-torch.distributed.checkpoint.staging"]], "torch_save_to_dcp() (in module torch.distributed.checkpoint.format_utils)": [[30, "torch.distributed.checkpoint.format_utils.torch_save_to_dcp"]], "transform_object() (torch.distributed.checkpoint.defaultsaveplanner method)": [[30, "torch.distributed.checkpoint.DefaultSavePlanner.transform_object"]], "transform_tensor() (torch.distributed.checkpoint.defaultloadplanner method)": [[30, "torch.distributed.checkpoint.DefaultLoadPlanner.transform_tensor"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagereader class method)": [[30, "torch.distributed.checkpoint.StorageReader.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.storagewriter class method)": [[30, "torch.distributed.checkpoint.StorageWriter.validate_checkpoint_id"]], "validate_checkpoint_id() (torch.distributed.checkpoint.format_utils.broadcastingtorchsavereader class method)": [[30, "torch.distributed.checkpoint.format_utils.BroadcastingTorchSaveReader.validate_checkpoint_id"]], "write_data() (torch.distributed.checkpoint.storagewriter method)": [[30, "torch.distributed.checkpoint.StorageWriter.write_data"]], "distributedoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.DistributedOptimizer"]], "postlocalsgdoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer"]], "zeroredundancyoptimizer (class in torch.distributed.optim)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer"]], "add_param_group() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.add_param_group"]], "consolidate_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.consolidate_state_dict"]], "join_device (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_device"]], "join_hook() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_hook"]], "join_process_group (torch.distributed.optim.zeroredundancyoptimizer property)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.join_process_group"]], "load_state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.load_state_dict"]], "load_state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.load_state_dict"]], "state_dict() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.state_dict"]], "state_dict() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.state_dict"]], "step() (torch.distributed.optim.distributedoptimizer method)": [[32, "torch.distributed.optim.DistributedOptimizer.step"]], "step() (torch.distributed.optim.postlocalsgdoptimizer method)": [[32, "torch.distributed.optim.PostLocalSGDOptimizer.step"]], "step() (torch.distributed.optim.zeroredundancyoptimizer method)": [[32, "torch.distributed.optim.ZeroRedundancyOptimizer.step"]], "torch.distributed.optim": [[32, "module-torch.distributed.optim"]], "pipe (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.Pipe"]], "pipelineschedulemulti (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti"]], "pipelineschedulesingle (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle"]], "pipelinestage (class in torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.PipelineStage"]], "schedule1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.Schedule1F1B"]], "schedulegpipe (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleGPipe"]], "scheduleinterleaved1f1b (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleInterleaved1F1B"]], "scheduleloopedbfs (class in torch.distributed.pipelining.schedules)": [[33, "torch.distributed.pipelining.schedules.ScheduleLoopedBFS"]], "splitpoint (class in torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.SplitPoint"]], "tensorchunkspec (class in torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.TensorChunkSpec"]], "build_stage() (in module torch.distributed.pipelining.stage)": [[33, "torch.distributed.pipelining.stage.build_stage"]], "merge_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.merge_chunks"]], "pipe_split() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipe_split"]], "pipeline() (in module torch.distributed.pipelining)": [[33, "torch.distributed.pipelining.pipeline"]], "split_args_kwargs_into_chunks() (in module torch.distributed.pipelining.microbatch)": [[33, "torch.distributed.pipelining.microbatch.split_args_kwargs_into_chunks"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulemulti method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleMulti.step"]], "step() (torch.distributed.pipelining.schedules.pipelineschedulesingle method)": [[33, "torch.distributed.pipelining.schedules.PipelineScheduleSingle.step"]], "torch.distributed.pipelining": [[33, "module-torch.distributed.pipelining"]], "torch.distributed.pipelining.microbatch": [[33, "module-torch.distributed.pipelining.microbatch"]], "torch.distributed.pipelining.schedules": [[33, "module-torch.distributed.pipelining.schedules"]], "torch.distributed.pipelining.stage": [[33, "module-torch.distributed.pipelining.stage"]], "colwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.ColwiseParallel"]], "preparemoduleinput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleInput"]], "preparemoduleoutput (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.PrepareModuleOutput"]], "rowwiseparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.RowwiseParallel"]], "sequenceparallel (class in torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.SequenceParallel"]], "loss_parallel() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.loss_parallel"]], "parallelize_module() (in module torch.distributed.tensor.parallel)": [[34, "torch.distributed.tensor.parallel.parallelize_module"]], "torch.distributed.tensor.parallel": [[34, "module-torch.distributed.tensor.parallel"]], "abstransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AbsTransform"]], "affinetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.AffineTransform"]], "bernoulli (class in torch.distributions.bernoulli)": [[35, "torch.distributions.bernoulli.Bernoulli"]], "beta (class in torch.distributions.beta)": [[35, "torch.distributions.beta.Beta"]], "binomial (class in torch.distributions.binomial)": [[35, "torch.distributions.binomial.Binomial"]], "cattransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CatTransform"]], "categorical (class in torch.distributions.categorical)": [[35, "torch.distributions.categorical.Categorical"]], "cauchy (class in torch.distributions.cauchy)": [[35, "torch.distributions.cauchy.Cauchy"]], "chi2 (class in torch.distributions.chi2)": [[35, "torch.distributions.chi2.Chi2"]], "composetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ComposeTransform"]], "constraint (class in torch.distributions.constraints)": [[35, "torch.distributions.constraints.Constraint"]], "constraintregistry (class in torch.distributions.constraint_registry)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry"]], "continuousbernoulli (class in torch.distributions.continuous_bernoulli)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli"]], "corrcholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CorrCholeskyTransform"]], "cumulativedistributiontransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.CumulativeDistributionTransform"]], "dirichlet (class in torch.distributions.dirichlet)": [[35, "torch.distributions.dirichlet.Dirichlet"]], "distribution (class in torch.distributions.distribution)": [[35, "torch.distributions.distribution.Distribution"]], "exptransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ExpTransform"]], "exponential (class in torch.distributions.exponential)": [[35, "torch.distributions.exponential.Exponential"]], "exponentialfamily (class in torch.distributions.exp_family)": [[35, "torch.distributions.exp_family.ExponentialFamily"]], "fishersnedecor (class in torch.distributions.fishersnedecor)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor"]], "gamma (class in torch.distributions.gamma)": [[35, "torch.distributions.gamma.Gamma"]], "geometric (class in torch.distributions.geometric)": [[35, "torch.distributions.geometric.Geometric"]], "gumbel (class in torch.distributions.gumbel)": [[35, "torch.distributions.gumbel.Gumbel"]], "halfcauchy (class in torch.distributions.half_cauchy)": [[35, "torch.distributions.half_cauchy.HalfCauchy"]], "halfnormal (class in torch.distributions.half_normal)": [[35, "torch.distributions.half_normal.HalfNormal"]], "independent (class in torch.distributions.independent)": [[35, "torch.distributions.independent.Independent"]], "independenttransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.IndependentTransform"]], "inversegamma (class in torch.distributions.inverse_gamma)": [[35, "torch.distributions.inverse_gamma.InverseGamma"]], "kumaraswamy (class in torch.distributions.kumaraswamy)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy"]], "lkjcholesky (class in torch.distributions.lkj_cholesky)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky"]], "laplace (class in torch.distributions.laplace)": [[35, "torch.distributions.laplace.Laplace"]], "lognormal (class in torch.distributions.log_normal)": [[35, "torch.distributions.log_normal.LogNormal"]], "logitrelaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli"]], "lowrankmultivariatenormal (class in torch.distributions.lowrank_multivariate_normal)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal"]], "lowercholeskytransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.LowerCholeskyTransform"]], "mixturesamefamily (class in torch.distributions.mixture_same_family)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily"]], "multinomial (class in torch.distributions.multinomial)": [[35, "torch.distributions.multinomial.Multinomial"]], "multivariatenormal (class in torch.distributions.multivariate_normal)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal"]], "negativebinomial (class in torch.distributions.negative_binomial)": [[35, "torch.distributions.negative_binomial.NegativeBinomial"]], "normal (class in torch.distributions.normal)": [[35, "torch.distributions.normal.Normal"]], "onehotcategorical (class in torch.distributions.one_hot_categorical)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical"]], "pareto (class in torch.distributions.pareto)": [[35, "torch.distributions.pareto.Pareto"]], "poisson (class in torch.distributions.poisson)": [[35, "torch.distributions.poisson.Poisson"]], "positivedefinitetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PositiveDefiniteTransform"]], "powertransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.PowerTransform"]], "relaxedbernoulli (class in torch.distributions.relaxed_bernoulli)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli"]], "relaxedonehotcategorical (class in torch.distributions.relaxed_categorical)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical"]], "reshapetransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.ReshapeTransform"]], "sigmoidtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SigmoidTransform"]], "softmaxtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftmaxTransform"]], "softplustransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.SoftplusTransform"]], "stacktransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StackTransform"]], "stickbreakingtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.StickBreakingTransform"]], "studentt (class in torch.distributions.studentt)": [[35, "torch.distributions.studentT.StudentT"]], "tanhtransform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.TanhTransform"]], "transform (class in torch.distributions.transforms)": [[35, "torch.distributions.transforms.Transform"]], "transformeddistribution (class in torch.distributions.transformed_distribution)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution"]], "uniform (class in torch.distributions.uniform)": [[35, "torch.distributions.uniform.Uniform"]], "vonmises (class in torch.distributions.von_mises)": [[35, "torch.distributions.von_mises.VonMises"]], "weibull (class in torch.distributions.weibull)": [[35, "torch.distributions.weibull.Weibull"]], "wishart (class in torch.distributions.wishart)": [[35, "torch.distributions.wishart.Wishart"]], "arg_constraints (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.arg_constraints"]], "arg_constraints (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.arg_constraints"]], "arg_constraints (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.arg_constraints"]], "arg_constraints (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.arg_constraints"]], "arg_constraints (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.arg_constraints"]], "arg_constraints (torch.distributions.chi2.chi2 attribute)": [[35, "torch.distributions.chi2.Chi2.arg_constraints"]], "arg_constraints (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.arg_constraints"]], "arg_constraints (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.arg_constraints"]], "arg_constraints (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.arg_constraints"]], "arg_constraints (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.arg_constraints"]], "arg_constraints (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.arg_constraints"]], "arg_constraints (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.arg_constraints"]], "arg_constraints (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.arg_constraints"]], "arg_constraints (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.arg_constraints"]], "arg_constraints (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.arg_constraints"]], "arg_constraints (torch.distributions.independent.independent attribute)": [[35, "torch.distributions.independent.Independent.arg_constraints"]], "arg_constraints (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.arg_constraints"]], "arg_constraints (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.arg_constraints"]], "arg_constraints (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.arg_constraints"]], "arg_constraints (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.arg_constraints"]], "arg_constraints (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.arg_constraints"]], "arg_constraints (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.arg_constraints"]], "arg_constraints (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.arg_constraints"]], "arg_constraints (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.arg_constraints"]], "arg_constraints (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.arg_constraints"]], "arg_constraints (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.arg_constraints"]], "arg_constraints (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.pareto.pareto attribute)": [[35, "torch.distributions.pareto.Pareto.arg_constraints"]], "arg_constraints (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.arg_constraints"]], "arg_constraints (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.arg_constraints"]], "arg_constraints (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.arg_constraints"]], "arg_constraints (torch.distributions.transformed_distribution.transformeddistribution attribute)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.arg_constraints"]], "arg_constraints (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.arg_constraints"]], "arg_constraints (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.arg_constraints"]], "arg_constraints (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.arg_constraints"]], "arg_constraints (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.arg_constraints"]], "batch_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.batch_shape"]], "cat (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.cat"]], "cdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.cdf"]], "cdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.cdf"]], "cdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.cdf"]], "cdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.cdf"]], "cdf() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.cdf"]], "cdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.cdf"]], "cdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.cdf"]], "cdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.cdf"]], "cdf() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.cdf"]], "cdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.cdf"]], "cdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.cdf"]], "cdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.cdf"]], "check() (torch.distributions.constraints.constraint method)": [[35, "torch.distributions.constraints.Constraint.check"]], "component_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.component_distribution"]], "concentration (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.concentration"]], "concentration0 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration0"]], "concentration1 (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.concentration1"]], "covariance_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.covariance_matrix"]], "covariance_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.covariance_matrix"]], "dependent_property (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.dependent_property"]], "df (torch.distributions.chi2.chi2 property)": [[35, "torch.distributions.chi2.Chi2.df"]], "entropy() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.entropy"]], "entropy() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.entropy"]], "entropy() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.entropy"]], "entropy() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.entropy"]], "entropy() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.entropy"]], "entropy() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.entropy"]], "entropy() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.entropy"]], "entropy() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.entropy"]], "entropy() (torch.distributions.exp_family.exponentialfamily method)": [[35, "torch.distributions.exp_family.ExponentialFamily.entropy"]], "entropy() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.entropy"]], "entropy() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.entropy"]], "entropy() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.entropy"]], "entropy() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.entropy"]], "entropy() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.entropy"]], "entropy() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.entropy"]], "entropy() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.entropy"]], "entropy() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.entropy"]], "entropy() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.entropy"]], "entropy() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.entropy"]], "entropy() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.entropy"]], "entropy() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.entropy"]], "entropy() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.entropy"]], "entropy() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.entropy"]], "entropy() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.entropy"]], "entropy() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.entropy"]], "entropy() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.entropy"]], "entropy() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.entropy"]], "entropy() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.entropy"]], "entropy() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.entropy"]], "entropy() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.entropy"]], "enumerate_support() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.enumerate_support"]], "enumerate_support() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.enumerate_support"]], "enumerate_support() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.enumerate_support"]], "enumerate_support() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.enumerate_support"]], "enumerate_support() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.enumerate_support"]], "enumerate_support() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.enumerate_support"]], "event_shape (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.event_shape"]], "expand() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.expand"]], "expand() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.expand"]], "expand() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.expand"]], "expand() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.expand"]], "expand() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.expand"]], "expand() (torch.distributions.chi2.chi2 method)": [[35, "torch.distributions.chi2.Chi2.expand"]], "expand() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.expand"]], "expand() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.expand"]], "expand() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.expand"]], "expand() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.expand"]], "expand() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.expand"]], "expand() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.expand"]], "expand() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.expand"]], "expand() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.expand"]], "expand() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.expand"]], "expand() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.expand"]], "expand() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.expand"]], "expand() (torch.distributions.inverse_gamma.inversegamma method)": [[35, "torch.distributions.inverse_gamma.InverseGamma.expand"]], "expand() (torch.distributions.kumaraswamy.kumaraswamy method)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.expand"]], "expand() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.expand"]], "expand() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.expand"]], "expand() (torch.distributions.log_normal.lognormal method)": [[35, "torch.distributions.log_normal.LogNormal.expand"]], "expand() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.expand"]], "expand() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.expand"]], "expand() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.expand"]], "expand() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.expand"]], "expand() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.expand"]], "expand() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.expand"]], "expand() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.expand"]], "expand() (torch.distributions.pareto.pareto method)": [[35, "torch.distributions.pareto.Pareto.expand"]], "expand() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.expand"]], "expand() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_bernoulli.relaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.expand"]], "expand() (torch.distributions.relaxed_categorical.relaxedonehotcategorical method)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.expand"]], "expand() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.expand"]], "expand() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.expand"]], "expand() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.expand"]], "expand() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.expand"]], "expand() (torch.distributions.weibull.weibull method)": [[35, "torch.distributions.weibull.Weibull.expand"]], "expand() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.expand"]], "forward_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.forward_shape"]], "greater_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than"]], "greater_than_eq (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.greater_than_eq"]], "half_open_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.half_open_interval"]], "has_enumerate_support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.has_enumerate_support"]], "has_enumerate_support (torch.distributions.binomial.binomial attribute)": [[35, "torch.distributions.binomial.Binomial.has_enumerate_support"]], "has_enumerate_support (torch.distributions.categorical.categorical attribute)": [[35, "torch.distributions.categorical.Categorical.has_enumerate_support"]], "has_enumerate_support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_enumerate_support"]], "has_enumerate_support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.has_enumerate_support"]], "has_rsample (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.has_rsample"]], "has_rsample (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.has_rsample"]], "has_rsample (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.has_rsample"]], "has_rsample (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.has_rsample"]], "has_rsample (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.has_rsample"]], "has_rsample (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.has_rsample"]], "has_rsample (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.has_rsample"]], "has_rsample (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.has_rsample"]], "has_rsample (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.has_rsample"]], "has_rsample (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.has_rsample"]], "has_rsample (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.has_rsample"]], "has_rsample (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.has_rsample"]], "has_rsample (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.has_rsample"]], "has_rsample (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.has_rsample"]], "has_rsample (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.mixture_same_family.mixturesamefamily attribute)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.has_rsample"]], "has_rsample (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.has_rsample"]], "has_rsample (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.has_rsample"]], "has_rsample (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.has_rsample"]], "has_rsample (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.has_rsample"]], "has_rsample (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.has_rsample"]], "has_rsample (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.has_rsample"]], "has_rsample (torch.distributions.uniform.uniform attribute)": [[35, "torch.distributions.uniform.Uniform.has_rsample"]], "has_rsample (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.has_rsample"]], "has_rsample (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.has_rsample"]], "icdf() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.icdf"]], "icdf() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.icdf"]], "icdf() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.icdf"]], "icdf() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.icdf"]], "icdf() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.icdf"]], "icdf() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.icdf"]], "icdf() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.icdf"]], "icdf() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.icdf"]], "icdf() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.icdf"]], "icdf() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.icdf"]], "independent (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.independent"]], "integer_interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.integer_interval"]], "interval (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.interval"]], "inv (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.inv"]], "inverse_shape() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.inverse_shape"]], "kl_divergence() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.kl_divergence"]], "less_than (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.less_than"]], "loc (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.loc"]], "log_abs_det_jacobian() (torch.distributions.transforms.transform method)": [[35, "torch.distributions.transforms.Transform.log_abs_det_jacobian"]], "log_prob() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.log_prob"]], "log_prob() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.log_prob"]], "log_prob() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.log_prob"]], "log_prob() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.log_prob"]], "log_prob() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.log_prob"]], "log_prob() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.log_prob"]], "log_prob() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.log_prob"]], "log_prob() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.log_prob"]], "log_prob() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.log_prob"]], "log_prob() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.log_prob"]], "log_prob() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.log_prob"]], "log_prob() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.log_prob"]], "log_prob() (torch.distributions.gumbel.gumbel method)": [[35, "torch.distributions.gumbel.Gumbel.log_prob"]], "log_prob() (torch.distributions.half_cauchy.halfcauchy method)": [[35, "torch.distributions.half_cauchy.HalfCauchy.log_prob"]], "log_prob() (torch.distributions.half_normal.halfnormal method)": [[35, "torch.distributions.half_normal.HalfNormal.log_prob"]], "log_prob() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.log_prob"]], "log_prob() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.log_prob"]], "log_prob() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.log_prob"]], "log_prob() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.log_prob"]], "log_prob() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.log_prob"]], "log_prob() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.log_prob"]], "log_prob() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.log_prob"]], "log_prob() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.log_prob"]], "log_prob() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.log_prob"]], "log_prob() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.log_prob"]], "log_prob() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.log_prob"]], "log_prob() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.log_prob"]], "log_prob() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.log_prob"]], "log_prob() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.log_prob"]], "log_prob() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.log_prob"]], "log_prob() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.log_prob"]], "log_prob() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.log_prob"]], "logits (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.logits"]], "logits (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.logits"]], "logits (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.logits"]], "logits (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.logits"]], "logits (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.logits"]], "logits (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.logits"]], "logits (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.logits"]], "logits (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.logits"]], "logits (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.logits"]], "logits (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.logits"]], "mean (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mean"]], "mean (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mean"]], "mean (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mean"]], "mean (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mean"]], "mean (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mean"]], "mean (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.mean"]], "mean (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mean"]], "mean (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mean"]], "mean (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mean"]], "mean (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mean"]], "mean (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mean"]], "mean (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mean"]], "mean (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mean"]], "mean (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mean"]], "mean (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mean"]], "mean (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mean"]], "mean (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mean"]], "mean (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mean"]], "mean (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mean"]], "mean (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mean"]], "mean (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mean"]], "mean (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mean"]], "mean (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.mean"]], "mean (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mean"]], "mean (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mean"]], "mean (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mean"]], "mean (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mean"]], "mean (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mean"]], "mean (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mean"]], "mean (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mean"]], "mean (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mean"]], "mean (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mean"]], "mean (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mean"]], "mean (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mean"]], "mixture_distribution (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.mixture_distribution"]], "mode (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.mode"]], "mode (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.mode"]], "mode (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.mode"]], "mode (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.mode"]], "mode (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.mode"]], "mode (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.mode"]], "mode (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.mode"]], "mode (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.mode"]], "mode (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.mode"]], "mode (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.mode"]], "mode (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.mode"]], "mode (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.mode"]], "mode (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.mode"]], "mode (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.mode"]], "mode (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.mode"]], "mode (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.mode"]], "mode (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.mode"]], "mode (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.mode"]], "mode (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.mode"]], "mode (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.mode"]], "mode (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.mode"]], "mode (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.mode"]], "mode (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.mode"]], "mode (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.mode"]], "mode (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.mode"]], "mode (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.mode"]], "mode (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.mode"]], "mode (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.mode"]], "mode (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.mode"]], "mode (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.mode"]], "mode (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.mode"]], "multinomial (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.multinomial"]], "param_shape (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.param_shape"]], "param_shape (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.param_shape"]], "param_shape (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.param_shape"]], "param_shape (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.param_shape"]], "param_shape (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.param_shape"]], "param_shape (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.param_shape"]], "param_shape (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.param_shape"]], "param_shape (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.param_shape"]], "perplexity() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.perplexity"]], "precision_matrix (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.precision_matrix"]], "precision_matrix (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.precision_matrix"]], "probs (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.probs"]], "probs (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.probs"]], "probs (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.probs"]], "probs (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.probs"]], "probs (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.probs"]], "probs (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.probs"]], "probs (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.probs"]], "probs (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.probs"]], "probs (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.probs"]], "probs (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.probs"]], "rate (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.rate"]], "register() (torch.distributions.constraint_registry.constraintregistry method)": [[35, "torch.distributions.constraint_registry.ConstraintRegistry.register"]], "register_kl() (in module torch.distributions.kl)": [[35, "torch.distributions.kl.register_kl"]], "rsample() (torch.distributions.beta.beta method)": [[35, "torch.distributions.beta.Beta.rsample"]], "rsample() (torch.distributions.cauchy.cauchy method)": [[35, "torch.distributions.cauchy.Cauchy.rsample"]], "rsample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.rsample"]], "rsample() (torch.distributions.dirichlet.dirichlet method)": [[35, "torch.distributions.dirichlet.Dirichlet.rsample"]], "rsample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.rsample"]], "rsample() (torch.distributions.exponential.exponential method)": [[35, "torch.distributions.exponential.Exponential.rsample"]], "rsample() (torch.distributions.fishersnedecor.fishersnedecor method)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.rsample"]], "rsample() (torch.distributions.gamma.gamma method)": [[35, "torch.distributions.gamma.Gamma.rsample"]], "rsample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.rsample"]], "rsample() (torch.distributions.laplace.laplace method)": [[35, "torch.distributions.laplace.Laplace.rsample"]], "rsample() (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal method)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.rsample"]], "rsample() (torch.distributions.multivariate_normal.multivariatenormal method)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.rsample"]], "rsample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.rsample"]], "rsample() (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli method)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.rsample"]], "rsample() (torch.distributions.studentt.studentt method)": [[35, "torch.distributions.studentT.StudentT.rsample"]], "rsample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.rsample"]], "rsample() (torch.distributions.uniform.uniform method)": [[35, "torch.distributions.uniform.Uniform.rsample"]], "rsample() (torch.distributions.wishart.wishart method)": [[35, "torch.distributions.wishart.Wishart.rsample"]], "sample() (torch.distributions.bernoulli.bernoulli method)": [[35, "torch.distributions.bernoulli.Bernoulli.sample"]], "sample() (torch.distributions.binomial.binomial method)": [[35, "torch.distributions.binomial.Binomial.sample"]], "sample() (torch.distributions.categorical.categorical method)": [[35, "torch.distributions.categorical.Categorical.sample"]], "sample() (torch.distributions.continuous_bernoulli.continuousbernoulli method)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.sample"]], "sample() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample"]], "sample() (torch.distributions.geometric.geometric method)": [[35, "torch.distributions.geometric.Geometric.sample"]], "sample() (torch.distributions.independent.independent method)": [[35, "torch.distributions.independent.Independent.sample"]], "sample() (torch.distributions.lkj_cholesky.lkjcholesky method)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.sample"]], "sample() (torch.distributions.mixture_same_family.mixturesamefamily method)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.sample"]], "sample() (torch.distributions.multinomial.multinomial method)": [[35, "torch.distributions.multinomial.Multinomial.sample"]], "sample() (torch.distributions.negative_binomial.negativebinomial method)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.sample"]], "sample() (torch.distributions.normal.normal method)": [[35, "torch.distributions.normal.Normal.sample"]], "sample() (torch.distributions.one_hot_categorical.onehotcategorical method)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.sample"]], "sample() (torch.distributions.poisson.poisson method)": [[35, "torch.distributions.poisson.Poisson.sample"]], "sample() (torch.distributions.transformed_distribution.transformeddistribution method)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.sample"]], "sample() (torch.distributions.von_mises.vonmises method)": [[35, "torch.distributions.von_mises.VonMises.sample"]], "sample_n() (torch.distributions.distribution.distribution method)": [[35, "torch.distributions.distribution.Distribution.sample_n"]], "scale (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.scale"]], "scale (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.scale"]], "scale (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.scale"]], "scale_tril (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.scale_tril"]], "scale_tril (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.scale_tril"]], "set_default_validate_args() (torch.distributions.distribution.distribution static method)": [[35, "torch.distributions.distribution.Distribution.set_default_validate_args"]], "sign (torch.distributions.transforms.transform property)": [[35, "torch.distributions.transforms.Transform.sign"]], "stack (in module torch.distributions.constraints)": [[35, "torch.distributions.constraints.stack"]], "stddev (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.stddev"]], "stddev (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.stddev"]], "stddev (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.stddev"]], "stddev (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.stddev"]], "stddev (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.stddev"]], "stddev (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.stddev"]], "stddev (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.stddev"]], "support (torch.distributions.bernoulli.bernoulli attribute)": [[35, "torch.distributions.bernoulli.Bernoulli.support"]], "support (torch.distributions.beta.beta attribute)": [[35, "torch.distributions.beta.Beta.support"]], "support (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.support"]], "support (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.support"]], "support (torch.distributions.cauchy.cauchy attribute)": [[35, "torch.distributions.cauchy.Cauchy.support"]], "support (torch.distributions.continuous_bernoulli.continuousbernoulli attribute)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.support"]], "support (torch.distributions.dirichlet.dirichlet attribute)": [[35, "torch.distributions.dirichlet.Dirichlet.support"]], "support (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.support"]], "support (torch.distributions.exponential.exponential attribute)": [[35, "torch.distributions.exponential.Exponential.support"]], "support (torch.distributions.fishersnedecor.fishersnedecor attribute)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.support"]], "support (torch.distributions.gamma.gamma attribute)": [[35, "torch.distributions.gamma.Gamma.support"]], "support (torch.distributions.geometric.geometric attribute)": [[35, "torch.distributions.geometric.Geometric.support"]], "support (torch.distributions.gumbel.gumbel attribute)": [[35, "torch.distributions.gumbel.Gumbel.support"]], "support (torch.distributions.half_cauchy.halfcauchy attribute)": [[35, "torch.distributions.half_cauchy.HalfCauchy.support"]], "support (torch.distributions.half_normal.halfnormal attribute)": [[35, "torch.distributions.half_normal.HalfNormal.support"]], "support (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.support"]], "support (torch.distributions.inverse_gamma.inversegamma attribute)": [[35, "torch.distributions.inverse_gamma.InverseGamma.support"]], "support (torch.distributions.kumaraswamy.kumaraswamy attribute)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.support"]], "support (torch.distributions.laplace.laplace attribute)": [[35, "torch.distributions.laplace.Laplace.support"]], "support (torch.distributions.lkj_cholesky.lkjcholesky attribute)": [[35, "torch.distributions.lkj_cholesky.LKJCholesky.support"]], "support (torch.distributions.log_normal.lognormal attribute)": [[35, "torch.distributions.log_normal.LogNormal.support"]], "support (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal attribute)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.support"]], "support (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.support"]], "support (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.support"]], "support (torch.distributions.multivariate_normal.multivariatenormal attribute)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.support"]], "support (torch.distributions.negative_binomial.negativebinomial attribute)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.support"]], "support (torch.distributions.normal.normal attribute)": [[35, "torch.distributions.normal.Normal.support"]], "support (torch.distributions.one_hot_categorical.onehotcategorical attribute)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.support"]], "support (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.support"]], "support (torch.distributions.poisson.poisson attribute)": [[35, "torch.distributions.poisson.Poisson.support"]], "support (torch.distributions.relaxed_bernoulli.logitrelaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.LogitRelaxedBernoulli.support"]], "support (torch.distributions.relaxed_bernoulli.relaxedbernoulli attribute)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.support"]], "support (torch.distributions.relaxed_categorical.relaxedonehotcategorical attribute)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.support"]], "support (torch.distributions.studentt.studentt attribute)": [[35, "torch.distributions.studentT.StudentT.support"]], "support (torch.distributions.transformed_distribution.transformeddistribution property)": [[35, "torch.distributions.transformed_distribution.TransformedDistribution.support"]], "support (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.support"]], "support (torch.distributions.von_mises.vonmises attribute)": [[35, "torch.distributions.von_mises.VonMises.support"]], "support (torch.distributions.weibull.weibull attribute)": [[35, "torch.distributions.weibull.Weibull.support"]], "support (torch.distributions.wishart.wishart attribute)": [[35, "torch.distributions.wishart.Wishart.support"]], "temperature (torch.distributions.relaxed_bernoulli.relaxedbernoulli property)": [[35, "torch.distributions.relaxed_bernoulli.RelaxedBernoulli.temperature"]], "temperature (torch.distributions.relaxed_categorical.relaxedonehotcategorical property)": [[35, "torch.distributions.relaxed_categorical.RelaxedOneHotCategorical.temperature"]], "torch.distributions": [[35, "module-torch.distributions"]], "torch.distributions.bernoulli": [[35, "module-torch.distributions.bernoulli"]], "torch.distributions.beta": [[35, "module-torch.distributions.beta"]], "torch.distributions.binomial": [[35, "module-torch.distributions.binomial"]], "torch.distributions.categorical": [[35, "module-torch.distributions.categorical"]], "torch.distributions.cauchy": [[35, "module-torch.distributions.cauchy"]], "torch.distributions.chi2": [[35, "module-torch.distributions.chi2"]], "torch.distributions.constraint_registry": [[35, "module-torch.distributions.constraint_registry"]], "torch.distributions.constraints": [[35, "module-torch.distributions.constraints"]], "torch.distributions.continuous_bernoulli": [[35, "module-torch.distributions.continuous_bernoulli"]], "torch.distributions.dirichlet": [[35, "module-torch.distributions.dirichlet"]], "torch.distributions.distribution": [[35, "module-torch.distributions.distribution"]], "torch.distributions.exp_family": [[35, "module-torch.distributions.exp_family"]], "torch.distributions.exponential": [[35, "module-torch.distributions.exponential"]], "torch.distributions.fishersnedecor": [[35, "module-torch.distributions.fishersnedecor"]], "torch.distributions.gamma": [[35, "module-torch.distributions.gamma"]], "torch.distributions.geometric": [[35, "module-torch.distributions.geometric"]], "torch.distributions.gumbel": [[35, "module-torch.distributions.gumbel"]], "torch.distributions.half_cauchy": [[35, "module-torch.distributions.half_cauchy"]], "torch.distributions.half_normal": [[35, "module-torch.distributions.half_normal"]], "torch.distributions.independent": [[35, "module-torch.distributions.independent"]], "torch.distributions.inverse_gamma": [[35, "module-torch.distributions.inverse_gamma"]], "torch.distributions.kl": [[35, "module-torch.distributions.kl"]], "torch.distributions.kumaraswamy": [[35, "module-torch.distributions.kumaraswamy"]], "torch.distributions.laplace": [[35, "module-torch.distributions.laplace"]], "torch.distributions.lkj_cholesky": [[35, "module-torch.distributions.lkj_cholesky"]], "torch.distributions.log_normal": [[35, "module-torch.distributions.log_normal"]], "torch.distributions.logistic_normal": [[35, "module-torch.distributions.logistic_normal"]], "torch.distributions.lowrank_multivariate_normal": [[35, "module-torch.distributions.lowrank_multivariate_normal"]], "torch.distributions.mixture_same_family": [[35, "module-torch.distributions.mixture_same_family"]], "torch.distributions.multinomial": [[35, "module-torch.distributions.multinomial"]], "torch.distributions.multivariate_normal": [[35, "module-torch.distributions.multivariate_normal"]], "torch.distributions.negative_binomial": [[35, "module-torch.distributions.negative_binomial"]], "torch.distributions.normal": [[35, "module-torch.distributions.normal"]], "torch.distributions.one_hot_categorical": [[35, "module-torch.distributions.one_hot_categorical"]], "torch.distributions.pareto": [[35, "module-torch.distributions.pareto"]], "torch.distributions.poisson": [[35, "module-torch.distributions.poisson"]], "torch.distributions.relaxed_bernoulli": [[35, "module-torch.distributions.relaxed_bernoulli"]], "torch.distributions.relaxed_categorical": [[35, "module-torch.distributions.relaxed_categorical"]], "torch.distributions.studentt": [[35, "module-torch.distributions.studentT"]], "torch.distributions.transformed_distribution": [[35, "module-torch.distributions.transformed_distribution"]], "torch.distributions.transforms": [[35, "module-torch.distributions.transforms"]], "torch.distributions.uniform": [[35, "module-torch.distributions.uniform"]], "torch.distributions.utils": [[35, "module-torch.distributions.utils"]], "torch.distributions.von_mises": [[35, "module-torch.distributions.von_mises"]], "torch.distributions.weibull": [[35, "module-torch.distributions.weibull"]], "torch.distributions.wishart": [[35, "module-torch.distributions.wishart"]], "total_count (torch.distributions.multinomial.multinomial attribute)": [[35, "torch.distributions.multinomial.Multinomial.total_count"]], "variance (torch.distributions.bernoulli.bernoulli property)": [[35, "torch.distributions.bernoulli.Bernoulli.variance"]], "variance (torch.distributions.beta.beta property)": [[35, "torch.distributions.beta.Beta.variance"]], "variance (torch.distributions.binomial.binomial property)": [[35, "torch.distributions.binomial.Binomial.variance"]], "variance (torch.distributions.categorical.categorical property)": [[35, "torch.distributions.categorical.Categorical.variance"]], "variance (torch.distributions.cauchy.cauchy property)": [[35, "torch.distributions.cauchy.Cauchy.variance"]], "variance (torch.distributions.continuous_bernoulli.continuousbernoulli property)": [[35, "torch.distributions.continuous_bernoulli.ContinuousBernoulli.variance"]], "variance (torch.distributions.dirichlet.dirichlet property)": [[35, "torch.distributions.dirichlet.Dirichlet.variance"]], "variance (torch.distributions.distribution.distribution property)": [[35, "torch.distributions.distribution.Distribution.variance"]], "variance (torch.distributions.exponential.exponential property)": [[35, "torch.distributions.exponential.Exponential.variance"]], "variance (torch.distributions.fishersnedecor.fishersnedecor property)": [[35, "torch.distributions.fishersnedecor.FisherSnedecor.variance"]], "variance (torch.distributions.gamma.gamma property)": [[35, "torch.distributions.gamma.Gamma.variance"]], "variance (torch.distributions.geometric.geometric property)": [[35, "torch.distributions.geometric.Geometric.variance"]], "variance (torch.distributions.gumbel.gumbel property)": [[35, "torch.distributions.gumbel.Gumbel.variance"]], "variance (torch.distributions.half_cauchy.halfcauchy property)": [[35, "torch.distributions.half_cauchy.HalfCauchy.variance"]], "variance (torch.distributions.half_normal.halfnormal property)": [[35, "torch.distributions.half_normal.HalfNormal.variance"]], "variance (torch.distributions.independent.independent property)": [[35, "torch.distributions.independent.Independent.variance"]], "variance (torch.distributions.inverse_gamma.inversegamma property)": [[35, "torch.distributions.inverse_gamma.InverseGamma.variance"]], "variance (torch.distributions.kumaraswamy.kumaraswamy property)": [[35, "torch.distributions.kumaraswamy.Kumaraswamy.variance"]], "variance (torch.distributions.laplace.laplace property)": [[35, "torch.distributions.laplace.Laplace.variance"]], "variance (torch.distributions.log_normal.lognormal property)": [[35, "torch.distributions.log_normal.LogNormal.variance"]], "variance (torch.distributions.lowrank_multivariate_normal.lowrankmultivariatenormal property)": [[35, "torch.distributions.lowrank_multivariate_normal.LowRankMultivariateNormal.variance"]], "variance (torch.distributions.mixture_same_family.mixturesamefamily property)": [[35, "torch.distributions.mixture_same_family.MixtureSameFamily.variance"]], "variance (torch.distributions.multinomial.multinomial property)": [[35, "torch.distributions.multinomial.Multinomial.variance"]], "variance (torch.distributions.multivariate_normal.multivariatenormal property)": [[35, "torch.distributions.multivariate_normal.MultivariateNormal.variance"]], "variance (torch.distributions.negative_binomial.negativebinomial property)": [[35, "torch.distributions.negative_binomial.NegativeBinomial.variance"]], "variance (torch.distributions.normal.normal property)": [[35, "torch.distributions.normal.Normal.variance"]], "variance (torch.distributions.one_hot_categorical.onehotcategorical property)": [[35, "torch.distributions.one_hot_categorical.OneHotCategorical.variance"]], "variance (torch.distributions.pareto.pareto property)": [[35, "torch.distributions.pareto.Pareto.variance"]], "variance (torch.distributions.poisson.poisson property)": [[35, "torch.distributions.poisson.Poisson.variance"]], "variance (torch.distributions.studentt.studentt property)": [[35, "torch.distributions.studentT.StudentT.variance"]], "variance (torch.distributions.uniform.uniform property)": [[35, "torch.distributions.uniform.Uniform.variance"]], "variance (torch.distributions.von_mises.vonmises property)": [[35, "torch.distributions.von_mises.VonMises.variance"]], "variance (torch.distributions.weibull.weibull property)": [[35, "torch.distributions.weibull.Weibull.variance"]], "variance (torch.distributions.wishart.wishart property)": [[35, "torch.distributions.wishart.Wishart.variance"]], "from_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.from_dlpack"]], "to_dlpack() (in module torch.utils.dlpack)": [[36, "torch.utils.dlpack.to_dlpack"]], "elasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent"]], "healthcheckserver (class in torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer"]], "localelasticagent (class in torch.distributed.elastic.agent.server.local_elastic_agent)": [[37, "torch.distributed.elastic.agent.server.local_elastic_agent.LocalElasticAgent"]], "runresult (class in torch.distributed.elastic.agent.server.api)": [[37, "torch.distributed.elastic.agent.server.api.RunResult"]], "simpleelasticagent (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent"]], "worker (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.Worker"]], "workergroup (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerGroup"]], "workerspec (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec"]], "workerstate (class in torch.distributed.elastic.agent.server)": [[37, "torch.distributed.elastic.agent.server.WorkerState"]], "_assign_worker_ranks() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._assign_worker_ranks"]], "_exit_barrier() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._exit_barrier"]], "_initialize_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._initialize_workers"]], "_monitor_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._monitor_workers"]], "_rendezvous() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._rendezvous"]], "_restart_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._restart_workers"]], "_shutdown() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._shutdown"]], "_start_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._start_workers"]], "_stop_workers() (torch.distributed.elastic.agent.server.simpleelasticagent method)": [[37, "torch.distributed.elastic.agent.server.SimpleElasticAgent._stop_workers"]], "create_healthcheck_server() (in module torch.distributed.elastic.agent.server.health_check_server)": [[37, "torch.distributed.elastic.agent.server.health_check_server.create_healthcheck_server"]], "get_entrypoint_name() (torch.distributed.elastic.agent.server.workerspec method)": [[37, "torch.distributed.elastic.agent.server.WorkerSpec.get_entrypoint_name"]], "get_worker_group() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.get_worker_group"]], "is_running() (torch.distributed.elastic.agent.server.workerstate static method)": [[37, "torch.distributed.elastic.agent.server.WorkerState.is_running"]], "run() (torch.distributed.elastic.agent.server.elasticagent method)": [[37, "torch.distributed.elastic.agent.server.ElasticAgent.run"]], "start() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.start"]], "stop() (torch.distributed.elastic.agent.server.health_check_server.healthcheckserver method)": [[37, "torch.distributed.elastic.agent.server.health_check_server.HealthCheckServer.stop"]], "torch.distributed.elastic.agent": [[37, "module-torch.distributed.elastic.agent"]], "torch.distributed.elastic.agent.server": [[37, "module-torch.distributed.elastic.agent.server"]], "torch.distributed.elastic.agent.server.health_check_server": [[37, "module-torch.distributed.elastic.agent.server.health_check_server"]], "torch.distributed.elastic.control_plane": [[38, "module-torch.distributed.elastic.control_plane"]], "worker_main() (in module torch.distributed.elastic.control_plane)": [[38, "torch.distributed.elastic.control_plane.worker_main"]], "childfailederror (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ChildFailedError"]], "errorhandler (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ErrorHandler"]], "processfailure (class in torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.ProcessFailure"]], "record() (in module torch.distributed.elastic.multiprocessing.errors)": [[40, "torch.distributed.elastic.multiprocessing.errors.record"]], "torch.distributed.elastic.multiprocessing.errors": [[40, "module-torch.distributed.elastic.multiprocessing.errors"]], "event (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.Event"]], "eventmetadatavalue (in module torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventMetadataValue"]], "eventsource (class in torch.distributed.elastic.events.api)": [[41, "torch.distributed.elastic.events.api.EventSource"]], "get_logging_handler() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.get_logging_handler"]], "record() (in module torch.distributed.elastic.events)": [[41, "torch.distributed.elastic.events.record"]], "torch.distributed.elastic.events": [[41, "module-torch.distributed.elastic.events"]], "consolemetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.ConsoleMetricHandler"]], "metrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.MetricHandler"]], "nullmetrichandler (class in torch.distributed.elastic.metrics.api)": [[44, "torch.distributed.elastic.metrics.api.NullMetricHandler"]], "configure() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.configure"]], "prof() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.prof"]], "put_metric() (in module torch.distributed.elastic.metrics)": [[44, "torch.distributed.elastic.metrics.put_metric"]], "torch.distributed.elastic.metrics": [[44, "module-torch.distributed.elastic.metrics"]], "defaultlogsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs"]], "logsdest (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsDest"]], "logsspecs (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs"]], "multiprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.MultiprocessContext"]], "pcontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.PContext"]], "runprocsresult (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.RunProcsResult"]], "subprocesscontext (class in torch.distributed.elastic.multiprocessing.api)": [[45, "torch.distributed.elastic.multiprocessing.api.SubprocessContext"]], "reify() (torch.distributed.elastic.multiprocessing.api.defaultlogsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.DefaultLogsSpecs.reify"]], "reify() (torch.distributed.elastic.multiprocessing.api.logsspecs method)": [[45, "torch.distributed.elastic.multiprocessing.api.LogsSpecs.reify"]], "start_processes() (in module torch.distributed.elastic.multiprocessing)": [[45, "torch.distributed.elastic.multiprocessing.start_processes"]], "torch.distributed.elastic.multiprocessing": [[45, "module-torch.distributed.elastic.multiprocessing"]], "c10drendezvousbackend (class in torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend"]], "dynamicrendezvoushandler (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler"]], "etcdrendezvousbackend (class in torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend"]], "etcdrendezvoushandler (class in torch.distributed.elastic.rendezvous.etcd_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous.EtcdRendezvousHandler"]], "etcdserver (class in torch.distributed.elastic.rendezvous.etcd_server)": [[47, "torch.distributed.elastic.rendezvous.etcd_server.EtcdServer"]], "etcdstore (class in torch.distributed.elastic.rendezvous.etcd_store)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore"]], "rendezvousbackend (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend"]], "rendezvousclosederror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousClosedError"]], "rendezvousconnectionerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousConnectionError"]], "rendezvouserror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousError"]], "rendezvousgracefulexiterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousGracefulExitError"]], "rendezvoushandler (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler"]], "rendezvoushandlerregistry (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandlerRegistry"]], "rendezvousinfo (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousInfo"]], "rendezvousparameters (class in torch.distributed.elastic.rendezvous)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters"]], "rendezvousstateerror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStateError"]], "rendezvousstoreinfo (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo"]], "rendezvoustimeout (class in torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout"]], "rendezvoustimeouterror (class in torch.distributed.elastic.rendezvous.api)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousTimeoutError"]], "add() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.add"]], "build() (torch.distributed.elastic.rendezvous.api.rendezvousstoreinfo static method)": [[47, "torch.distributed.elastic.rendezvous.api.RendezvousStoreInfo.build"]], "check() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.check"]], "close (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.close"]], "create_backend() (in module torch.distributed.elastic.rendezvous.c10d_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.create_backend"]], "create_backend() (in module torch.distributed.elastic.rendezvous.etcd_rendezvous_backend)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.create_backend"]], "create_handler() (in module torch.distributed.elastic.rendezvous.dynamic_rendezvous)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.create_handler"]], "from_backend() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.dynamicrendezvoushandler class method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.DynamicRendezvousHandler.from_backend"]], "get() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get"]], "get() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.get"]], "get_as_bool() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_bool"]], "get_as_int() (torch.distributed.elastic.rendezvous.rendezvousparameters method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousParameters.get_as_int"]], "get_backend() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_backend"]], "get_run_id() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.get_run_id"]], "get_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.get_state"]], "get_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.get_state"]], "heartbeat (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.heartbeat"]], "is_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.is_closed"]], "join (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.join"]], "last_call (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvoustimeout property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousTimeout.last_call"]], "name (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.name"]], "name (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend property)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.name"]], "next_rendezvous() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.next_rendezvous"]], "num_nodes_waiting() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.num_nodes_waiting"]], "set() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.set"]], "set_closed() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.set_closed"]], "set_state() (torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.c10drendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.c10d_rendezvous_backend.C10dRendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.dynamic_rendezvous.rendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.dynamic_rendezvous.RendezvousBackend.set_state"]], "set_state() (torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.etcdrendezvousbackend method)": [[47, "torch.distributed.elastic.rendezvous.etcd_rendezvous_backend.EtcdRendezvousBackend.set_state"]], "shutdown() (torch.distributed.elastic.rendezvous.rendezvoushandler method)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.shutdown"]], "torch.distributed.elastic.rendezvous": [[47, "module-torch.distributed.elastic.rendezvous"]], "torch.distributed.elastic.rendezvous.registry": [[47, "module-torch.distributed.elastic.rendezvous.registry"]], "use_agent_store (torch.distributed.elastic.rendezvous.rendezvoushandler property)": [[47, "torch.distributed.elastic.rendezvous.RendezvousHandler.use_agent_store"]], "wait() (torch.distributed.elastic.rendezvous.etcd_store.etcdstore method)": [[47, "torch.distributed.elastic.rendezvous.etcd_store.EtcdStore.wait"]], "torch.distributed.run": [[48, "module-torch.distributed.run"]], "subprocesshandler (class in torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler.SubprocessHandler"]], "get_subprocess_handler() (in module torch.distributed.elastic.multiprocessing.subprocess_handler.handlers)": [[49, "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers.get_subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.handlers": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.handlers"]], "torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler": [[49, "module-torch.distributed.elastic.multiprocessing.subprocess_handler.subprocess_handler"]], "filetimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerClient"]], "filetimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.FileTimerServer"]], "localtimerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerClient"]], "localtimerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.LocalTimerServer"]], "timerclient (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerClient"]], "timerrequest (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerRequest"]], "timerserver (class in torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.TimerServer"]], "acquire() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.acquire"]], "clear_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.clear_timers"]], "configure() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.configure"]], "expires() (in module torch.distributed.elastic.timer)": [[50, "torch.distributed.elastic.timer.expires"]], "get_expired_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.get_expired_timers"]], "log_debug_info_for_expired_timers() (in module torch.distributed.elastic.timer.debug_info_logging)": [[50, "torch.distributed.elastic.timer.debug_info_logging.log_debug_info_for_expired_timers"]], "register_timers() (torch.distributed.elastic.timer.timerserver method)": [[50, "torch.distributed.elastic.timer.TimerServer.register_timers"]], "release() (torch.distributed.elastic.timer.timerclient method)": [[50, "torch.distributed.elastic.timer.TimerClient.release"]], "torch.distributed.elastic.timer": [[50, "module-torch.distributed.elastic.timer"]], "torch.distributed.elastic.timer.debug_info_logging": [[50, "module-torch.distributed.elastic.timer.debug_info_logging"]], "constraint (in module torch.export)": [[52, "torch.export.Constraint"]], "customobjargument (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.CustomObjArgument"]], "dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.Dim"]], "exportbackwardsignature (class in torch.export)": [[52, "torch.export.ExportBackwardSignature"]], "exportgraphsignature (class in torch.export)": [[52, "torch.export.ExportGraphSignature"]], "exportgraphsignature (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.ExportGraphSignature"]], "exportedprogram (class in torch.export)": [[52, "torch.export.ExportedProgram"]], "flatargsadapter (class in torch.export.unflatten)": [[52, "torch.export.unflatten.FlatArgsAdapter"]], "inputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputKind"]], "inputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.InputSpec"]], "interpretermodule (class in torch.export.unflatten)": [[52, "torch.export.unflatten.InterpreterModule"]], "modulecallentry (class in torch.export)": [[52, "torch.export.ModuleCallEntry"]], "modulecallsignature (class in torch.export)": [[52, "torch.export.ModuleCallSignature"]], "outputkind (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputKind"]], "outputspec (class in torch.export.graph_signature)": [[52, "torch.export.graph_signature.OutputSpec"]], "shapescollection (class in torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.ShapesCollection"]], "adapt() (torch.export.unflatten.flatargsadapter method)": [[52, "torch.export.unflatten.FlatArgsAdapter.adapt"]], "buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.buffers"]], "dims() (in module torch.export)": [[52, "torch.export.dims"]], "dynamic_dim() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.dynamic_dim"]], "dynamic_shapes() (torch.export.dynamic_shapes.shapescollection method)": [[52, "torch.export.dynamic_shapes.ShapesCollection.dynamic_shapes"]], "export() (in module torch.export)": [[52, "torch.export.export"]], "get_replace_hook() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.get_replace_hook"]], "load() (in module torch.export)": [[52, "torch.export.load"]], "module() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.module"]], "named_buffers() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_buffers"]], "named_parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.named_parameters"]], "parameters() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.parameters"]], "refine_dynamic_shapes_from_suggested_fixes() (in module torch.export.dynamic_shapes)": [[52, "torch.export.dynamic_shapes.refine_dynamic_shapes_from_suggested_fixes"]], "register_dataclass() (in module torch.export)": [[52, "torch.export.register_dataclass"]], "replace_all_uses() (torch.export.graph_signature.exportgraphsignature method)": [[52, "torch.export.graph_signature.ExportGraphSignature.replace_all_uses"]], "run_decompositions() (torch.export.exportedprogram method)": [[52, "torch.export.ExportedProgram.run_decompositions"]], "save() (in module torch.export)": [[52, "torch.export.save"]], "torch.export": [[52, "module-torch.export"]], "torch.export.custom_obj": [[52, "module-torch.export.custom_obj"]], "torch.export.dynamic_shapes": [[52, "module-torch.export.dynamic_shapes"]], "torch.export.exported_program": [[52, "module-torch.export.exported_program"]], "torch.export.graph_signature": [[52, "module-torch.export.graph_signature"]], "torch.export.unflatten": [[52, "module-torch.export.unflatten"]], "unflatten() (in module torch.export.unflatten)": [[52, "torch.export.unflatten.unflatten"]], "torch.fft": [[54, "module-torch.fft"]], "backwardprefetch (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.BackwardPrefetch"]], "cpuoffload (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.CPUOffload"]], "fulloptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullOptimStateDictConfig"]], "fullstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullStateDictConfig"]], "fullyshardeddataparallel (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel"]], "localoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalOptimStateDictConfig"]], "localstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.LocalStateDictConfig"]], "mixedprecision (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.MixedPrecision"]], "optimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.OptimStateDictConfig"]], "shardedoptimstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedOptimStateDictConfig"]], "shardedstatedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardedStateDictConfig"]], "shardingstrategy (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.ShardingStrategy"]], "statedictconfig (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictConfig"]], "statedictsettings (class in torch.distributed.fsdp)": [[55, "torch.distributed.fsdp.StateDictSettings"]], "apply() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.apply"]], "check_is_root() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.check_is_root"]], "clip_grad_norm_() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.clip_grad_norm_"]], "flatten_sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.flatten_sharded_optim_state_dict"]], "forward() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.forward"]], "fsdp_modules() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.fsdp_modules"]], "full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.full_optim_state_dict"]], "get_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.get_state_dict_type"]], "module (torch.distributed.fsdp.fullyshardeddataparallel property)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.module"]], "named_buffers() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_buffers"]], "named_parameters() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.named_parameters"]], "no_sync() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.no_sync"]], "optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict"]], "optim_state_dict_to_load() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.optim_state_dict_to_load"]], "register_comm_hook() (torch.distributed.fsdp.fullyshardeddataparallel method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.register_comm_hook"]], "rekey_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.rekey_optim_state_dict"]], "scatter_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.scatter_full_optim_state_dict"]], "set_state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.set_state_dict_type"]], "shard_full_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.shard_full_optim_state_dict"]], "sharded_optim_state_dict() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.sharded_optim_state_dict"]], "state_dict_type() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.state_dict_type"]], "summon_full_params() (torch.distributed.fsdp.fullyshardeddataparallel static method)": [[55, "torch.distributed.fsdp.FullyShardedDataParallel.summon_full_params"]], "torch.distributed.fsdp": [[55, "module-torch.distributed.fsdp"]], "torch.func": [[57, "module-torch.func"]], "get_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_overwrite_module_params_on_conversion"]], "get_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.get_swap_module_params_on_conversion"]], "set_overwrite_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_overwrite_module_params_on_conversion"]], "set_swap_module_params_on_conversion() (in module torch.__future__)": [[62, "torch.__future__.set_swap_module_params_on_conversion"]], "torch.__future__": [[62, "module-torch.__future__"]], "future (class in torch.futures)": [[63, "torch.futures.Future"]], "add_done_callback() (torch.futures.future method)": [[63, "torch.futures.Future.add_done_callback"]], "collect_all() (in module torch.futures)": [[63, "torch.futures.collect_all"]], "done() (torch.futures.future method)": [[63, "torch.futures.Future.done"]], "set_exception() (torch.futures.future method)": [[63, "torch.futures.Future.set_exception"]], "set_result() (torch.futures.future method)": [[63, "torch.futures.Future.set_result"]], "then() (torch.futures.future method)": [[63, "torch.futures.Future.then"]], "torch.futures": [[63, "module-torch.futures"]], "value() (torch.futures.future method)": [[63, "torch.futures.Future.value"]], "wait() (torch.futures.future method)": [[63, "torch.futures.Future.wait"]], "wait_all() (in module torch.futures)": [[63, "torch.futures.wait_all"]], "graph (class in torch.fx)": [[64, "torch.fx.Graph"]], "graphmodule (class in torch.fx)": [[64, "torch.fx.GraphModule"]], "interpreter (class in torch.fx)": [[64, "torch.fx.Interpreter"]], "node (class in torch.fx)": [[64, "torch.fx.Node"]], "proxy (class in torch.fx)": [[64, "torch.fx.Proxy"]], "tracer (class in torch.fx)": [[64, "torch.fx.Tracer"]], "transformer (class in torch.fx)": [[64, "torch.fx.Transformer"]], "__init__() (torch.fx.graph method)": [[64, "torch.fx.Graph.__init__"]], "__init__() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.__init__"]], "add_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.add_submodule"]], "all_input_nodes (torch.fx.node property)": [[64, "torch.fx.Node.all_input_nodes"]], "append() (torch.fx.node method)": [[64, "torch.fx.Node.append"]], "args (torch.fx.node property)": [[64, "torch.fx.Node.args"]], "boxed_run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.boxed_run"]], "call_function() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_function"]], "call_function() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_function"]], "call_function() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_function"]], "call_method() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_method"]], "call_method() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_method"]], "call_module() (torch.fx.graph method)": [[64, "torch.fx.Graph.call_module"]], "call_module() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.call_module"]], "call_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.call_module"]], "call_module() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.call_module"]], "code (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.code"]], "create_arg() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_arg"]], "create_args_for_root() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_args_for_root"]], "create_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.create_node"]], "create_node() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_node"]], "create_proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.create_proxy"]], "delete_all_unused_submodules() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_all_unused_submodules"]], "delete_submodule() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.delete_submodule"]], "eliminate_dead_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.eliminate_dead_code"]], "erase_node() (torch.fx.graph method)": [[64, "torch.fx.Graph.erase_node"]], "fetch_args_kwargs_from_env() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.fetch_attr"]], "find_nodes() (torch.fx.graph method)": [[64, "torch.fx.Graph.find_nodes"]], "format_node() (torch.fx.node method)": [[64, "torch.fx.Node.format_node"]], "get_attr() (torch.fx.graph method)": [[64, "torch.fx.Graph.get_attr"]], "get_attr() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.get_attr"]], "get_attr() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.get_attr"]], "get_fresh_qualname() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.get_fresh_qualname"]], "getattr() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.getattr"]], "graph (torch.fx.graphmodule property)": [[64, "torch.fx.GraphModule.graph"]], "graph_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.graph_copy"]], "insert_arg() (torch.fx.node method)": [[64, "torch.fx.Node.insert_arg"]], "inserting_after() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_after"]], "inserting_before() (torch.fx.graph method)": [[64, "torch.fx.Graph.inserting_before"]], "is_impure() (torch.fx.node method)": [[64, "torch.fx.Node.is_impure"]], "is_leaf_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.is_leaf_module"]], "iter() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.iter"]], "keys() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.keys"]], "kwargs (torch.fx.node property)": [[64, "torch.fx.Node.kwargs"]], "lint() (torch.fx.graph method)": [[64, "torch.fx.Graph.lint"]], "map_nodes_to_values() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.map_nodes_to_values"]], "next (torch.fx.node property)": [[64, "torch.fx.Node.next"]], "node_copy() (torch.fx.graph method)": [[64, "torch.fx.Graph.node_copy"]], "nodes (torch.fx.graph property)": [[64, "torch.fx.Graph.nodes"]], "normalized_arguments() (torch.fx.node method)": [[64, "torch.fx.Node.normalized_arguments"]], "on_generate_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.on_generate_code"]], "output() (torch.fx.graph method)": [[64, "torch.fx.Graph.output"]], "output() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.output"]], "path_of_module() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.path_of_module"]], "placeholder() (torch.fx.graph method)": [[64, "torch.fx.Graph.placeholder"]], "placeholder() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.placeholder"]], "placeholder() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.placeholder"]], "prepend() (torch.fx.node method)": [[64, "torch.fx.Node.prepend"]], "prev (torch.fx.node property)": [[64, "torch.fx.Node.prev"]], "print_readable() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.print_readable"]], "print_tabular() (torch.fx.graph method)": [[64, "torch.fx.Graph.print_tabular"]], "process_inputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_inputs"]], "process_outputs() (torch.fx.graph method)": [[64, "torch.fx.Graph.process_outputs"]], "proxy() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.proxy"]], "python_code() (torch.fx.graph method)": [[64, "torch.fx.Graph.python_code"]], "recompile() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.recompile"]], "replace_all_uses_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_all_uses_with"]], "replace_input_with() (torch.fx.node method)": [[64, "torch.fx.Node.replace_input_with"]], "replace_pattern() (in module torch.fx)": [[64, "torch.fx.replace_pattern"]], "run() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run"]], "run_node() (torch.fx.interpreter method)": [[64, "torch.fx.Interpreter.run_node"]], "set_codegen() (torch.fx.graph method)": [[64, "torch.fx.Graph.set_codegen"]], "stack_trace (torch.fx.node property)": [[64, "torch.fx.Node.stack_trace"]], "symbolic_trace() (in module torch.fx)": [[64, "torch.fx.symbolic_trace"]], "to_bool() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.to_bool"]], "to_folder() (torch.fx.graphmodule method)": [[64, "torch.fx.GraphModule.to_folder"]], "torch.fx": [[64, "module-torch.fx"]], "torch.fx.annotate": [[64, "module-torch.fx.annotate"]], "torch.fx.config": [[64, "module-torch.fx.config"]], "torch.fx.experimental": [[64, "module-torch.fx.experimental"]], "torch.fx.experimental.accelerator_partitioner": [[64, "module-torch.fx.experimental.accelerator_partitioner"]], "torch.fx.experimental.const_fold": [[64, "module-torch.fx.experimental.const_fold"]], "torch.fx.experimental.debug": [[64, "module-torch.fx.experimental.debug"]], "torch.fx.experimental.graph_gradual_typechecker": [[64, "module-torch.fx.experimental.graph_gradual_typechecker"]], "torch.fx.experimental.merge_matmul": [[64, "module-torch.fx.experimental.merge_matmul"]], "torch.fx.experimental.meta_tracer": [[64, "module-torch.fx.experimental.meta_tracer"]], "torch.fx.experimental.migrate_gradual_types": [[64, "module-torch.fx.experimental.migrate_gradual_types"]], "torch.fx.experimental.migrate_gradual_types.constraint": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint"]], "torch.fx.experimental.migrate_gradual_types.constraint_generator": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_generator"]], "torch.fx.experimental.migrate_gradual_types.constraint_transformation": [[64, "module-torch.fx.experimental.migrate_gradual_types.constraint_transformation"]], "torch.fx.experimental.migrate_gradual_types.operation": [[64, "module-torch.fx.experimental.migrate_gradual_types.operation"]], "torch.fx.experimental.migrate_gradual_types.transform_to_z3": [[64, "module-torch.fx.experimental.migrate_gradual_types.transform_to_z3"]], "torch.fx.experimental.migrate_gradual_types.util": [[64, "module-torch.fx.experimental.migrate_gradual_types.util"]], "torch.fx.experimental.migrate_gradual_types.z3_types": [[64, "module-torch.fx.experimental.migrate_gradual_types.z3_types"]], "torch.fx.experimental.normalize": [[64, "module-torch.fx.experimental.normalize"]], "torch.fx.experimental.optimization": [[64, "module-torch.fx.experimental.optimization"]], "torch.fx.experimental.partitioner_utils": [[64, "module-torch.fx.experimental.partitioner_utils"]], "torch.fx.experimental.proxy_tensor": [[64, "module-torch.fx.experimental.proxy_tensor"]], "torch.fx.experimental.recording": [[64, "module-torch.fx.experimental.recording"]], "torch.fx.experimental.refinement_types": [[64, "module-torch.fx.experimental.refinement_types"]], "torch.fx.experimental.rewriter": [[64, "module-torch.fx.experimental.rewriter"]], "torch.fx.experimental.schema_type_annotation": [[64, "module-torch.fx.experimental.schema_type_annotation"]], "torch.fx.experimental.sym_node": [[64, "module-torch.fx.experimental.sym_node"]], "torch.fx.experimental.unification": [[64, "module-torch.fx.experimental.unification"]], "torch.fx.experimental.unification.core": [[64, "module-torch.fx.experimental.unification.core"]], "torch.fx.experimental.unification.dispatch": [[64, "module-torch.fx.experimental.unification.dispatch"]], "torch.fx.experimental.unification.match": [[64, "module-torch.fx.experimental.unification.match"]], "torch.fx.experimental.unification.more": [[64, "module-torch.fx.experimental.unification.more"]], "torch.fx.experimental.unification.multipledispatch": [[64, "module-torch.fx.experimental.unification.multipledispatch"]], "torch.fx.experimental.unification.multipledispatch.conflict": [[64, "module-torch.fx.experimental.unification.multipledispatch.conflict"]], "torch.fx.experimental.unification.multipledispatch.core": [[64, "module-torch.fx.experimental.unification.multipledispatch.core"]], "torch.fx.experimental.unification.multipledispatch.dispatcher": [[64, "module-torch.fx.experimental.unification.multipledispatch.dispatcher"]], "torch.fx.experimental.unification.multipledispatch.utils": [[64, "module-torch.fx.experimental.unification.multipledispatch.utils"]], "torch.fx.experimental.unification.multipledispatch.variadic": [[64, "module-torch.fx.experimental.unification.multipledispatch.variadic"]], "torch.fx.experimental.unification.unification_tools": [[64, "module-torch.fx.experimental.unification.unification_tools"]], "torch.fx.experimental.unification.utils": [[64, "module-torch.fx.experimental.unification.utils"]], "torch.fx.experimental.unification.variable": [[64, "module-torch.fx.experimental.unification.variable"]], "torch.fx.experimental.unify_refinements": [[64, "module-torch.fx.experimental.unify_refinements"]], "torch.fx.experimental.validator": [[64, "module-torch.fx.experimental.validator"]], "torch.fx.graph": [[64, "module-torch.fx.graph"]], "torch.fx.graph_module": [[64, "module-torch.fx.graph_module"]], "torch.fx.immutable_collections": [[64, "module-torch.fx.immutable_collections"]], "torch.fx.interpreter": [[64, "module-torch.fx.interpreter"]], "torch.fx.node": [[64, "module-torch.fx.node"]], "torch.fx.operator_schemas": [[64, "module-torch.fx.operator_schemas"]], "torch.fx.passes": [[64, "module-torch.fx.passes"]], "torch.fx.passes.annotate_getitem_nodes": [[64, "module-torch.fx.passes.annotate_getitem_nodes"]], "torch.fx.passes.backends": [[64, "module-torch.fx.passes.backends"]], "torch.fx.passes.backends.cudagraphs": [[64, "module-torch.fx.passes.backends.cudagraphs"]], "torch.fx.passes.dialect": [[64, "module-torch.fx.passes.dialect"]], "torch.fx.passes.dialect.common": [[64, "module-torch.fx.passes.dialect.common"]], "torch.fx.passes.dialect.common.cse_pass": [[64, "module-torch.fx.passes.dialect.common.cse_pass"]], "torch.fx.passes.fake_tensor_prop": [[64, "module-torch.fx.passes.fake_tensor_prop"]], "torch.fx.passes.graph_drawer": [[64, "module-torch.fx.passes.graph_drawer"]], "torch.fx.passes.graph_manipulation": [[64, "module-torch.fx.passes.graph_manipulation"]], "torch.fx.passes.graph_transform_observer": [[64, "module-torch.fx.passes.graph_transform_observer"]], "torch.fx.passes.infra": [[64, "module-torch.fx.passes.infra"]], "torch.fx.passes.infra.partitioner": [[64, "module-torch.fx.passes.infra.partitioner"]], "torch.fx.passes.infra.pass_base": [[64, "module-torch.fx.passes.infra.pass_base"]], "torch.fx.passes.infra.pass_manager": [[64, "module-torch.fx.passes.infra.pass_manager"]], "torch.fx.passes.net_min_base": [[64, "module-torch.fx.passes.net_min_base"]], "torch.fx.passes.operator_support": [[64, "module-torch.fx.passes.operator_support"]], "torch.fx.passes.param_fetch": [[64, "module-torch.fx.passes.param_fetch"]], "torch.fx.passes.pass_manager": [[64, "module-torch.fx.passes.pass_manager"]], "torch.fx.passes.reinplace": [[64, "module-torch.fx.passes.reinplace"]], "torch.fx.passes.runtime_assert": [[64, "module-torch.fx.passes.runtime_assert"]], "torch.fx.passes.shape_prop": [[64, "module-torch.fx.passes.shape_prop"]], "torch.fx.passes.split_module": [[64, "module-torch.fx.passes.split_module"]], "torch.fx.passes.split_utils": [[64, "module-torch.fx.passes.split_utils"]], "torch.fx.passes.splitter_base": [[64, "module-torch.fx.passes.splitter_base"]], "torch.fx.passes.tests": [[64, "module-torch.fx.passes.tests"]], "torch.fx.passes.tests.test_pass_manager": [[64, "module-torch.fx.passes.tests.test_pass_manager"]], "torch.fx.passes.tools_common": [[64, "module-torch.fx.passes.tools_common"]], "torch.fx.passes.utils": [[64, "module-torch.fx.passes.utils"]], "torch.fx.passes.utils.common": [[64, "module-torch.fx.passes.utils.common"]], "torch.fx.passes.utils.fuser_utils": [[64, "module-torch.fx.passes.utils.fuser_utils"]], "torch.fx.passes.utils.matcher_utils": [[64, "module-torch.fx.passes.utils.matcher_utils"]], "torch.fx.passes.utils.matcher_with_name_node_map_utils": [[64, "module-torch.fx.passes.utils.matcher_with_name_node_map_utils"]], "torch.fx.passes.utils.source_matcher_utils": [[64, "module-torch.fx.passes.utils.source_matcher_utils"]], "torch.fx.proxy": [[64, "module-torch.fx.proxy"]], "torch.fx.subgraph_rewriter": [[64, "module-torch.fx.subgraph_rewriter"]], "torch.fx.tensor_type": [[64, "module-torch.fx.tensor_type"]], "torch.fx.traceback": [[64, "module-torch.fx.traceback"]], "trace() (torch.fx.tracer method)": [[64, "torch.fx.Tracer.trace"]], "transform() (torch.fx.transformer method)": [[64, "torch.fx.Transformer.transform"]], "update_arg() (torch.fx.node method)": [[64, "torch.fx.Node.update_arg"]], "update_kwarg() (torch.fx.node method)": [[64, "torch.fx.Node.update_kwarg"]], "wrap() (in module torch.fx)": [[64, "torch.fx.wrap"]], "torch.fx.experimental.symbolic_shapes": [[65, "module-torch.fx.experimental.symbolic_shapes"]], "generator (class in torch)": [[90, "torch.Generator"]], "clone_state() (torch.generator method)": [[90, "torch.Generator.clone_state"]], "device (torch.generator attribute)": [[90, "torch.Generator.device"]], "get_state() (torch.generator method)": [[90, "torch.Generator.get_state"]], "graphsafe_get_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_get_state"]], "graphsafe_set_state() (torch.generator method)": [[90, "torch.Generator.graphsafe_set_state"]], "initial_seed() (torch.generator method)": [[90, "torch.Generator.initial_seed"]], "manual_seed() (torch.generator method)": [[90, "torch.Generator.manual_seed"]], "seed() (torch.generator method)": [[90, "torch.Generator.seed"]], "set_state() (torch.generator method)": [[90, "torch.Generator.set_state"]], "abs() (torch.tensor method)": [[91, "torch.Tensor.abs"]], "abs_() (torch.tensor method)": [[92, "torch.Tensor.abs_"]], "absolute() (torch.tensor method)": [[93, "torch.Tensor.absolute"]], "absolute_() (torch.tensor method)": [[94, "torch.Tensor.absolute_"]], "acos() (torch.tensor method)": [[95, "torch.Tensor.acos"]], "acos_() (torch.tensor method)": [[96, "torch.Tensor.acos_"]], "acosh() (torch.tensor method)": [[97, "torch.Tensor.acosh"]], "acosh_() (torch.tensor method)": [[98, "torch.Tensor.acosh_"]], "add() (torch.tensor method)": [[99, "torch.Tensor.add"]], "add_() (torch.tensor method)": [[100, "torch.Tensor.add_"]], "addbmm() (torch.tensor method)": [[101, "torch.Tensor.addbmm"]], "addbmm_() (torch.tensor method)": [[102, "torch.Tensor.addbmm_"]], "addcdiv() (torch.tensor method)": [[103, "torch.Tensor.addcdiv"]], "addcdiv_() (torch.tensor method)": [[104, "torch.Tensor.addcdiv_"]], "addcmul() (torch.tensor method)": [[105, "torch.Tensor.addcmul"]], "addcmul_() (torch.tensor method)": [[106, "torch.Tensor.addcmul_"]], "addmm() (torch.tensor method)": [[107, "torch.Tensor.addmm"]], "addmm_() (torch.tensor method)": [[108, "torch.Tensor.addmm_"]], "addmv() (torch.tensor method)": [[109, "torch.Tensor.addmv"]], "addmv_() (torch.tensor method)": [[110, "torch.Tensor.addmv_"]], "addr() (torch.tensor method)": [[111, "torch.Tensor.addr"]], "addr_() (torch.tensor method)": [[112, "torch.Tensor.addr_"]], "adjoint() (torch.tensor method)": [[113, "torch.Tensor.adjoint"]], "all() (torch.tensor method)": [[114, "torch.Tensor.all"]], "allclose() (torch.tensor method)": [[115, "torch.Tensor.allclose"]], "amax() (torch.tensor method)": [[116, "torch.Tensor.amax"]], "amin() (torch.tensor method)": [[117, "torch.Tensor.amin"]], "aminmax() (torch.tensor method)": [[118, "torch.Tensor.aminmax"]], "angle() (torch.tensor method)": [[119, "torch.Tensor.angle"]], "any() (torch.tensor method)": [[120, "torch.Tensor.any"]], "apply_() (torch.tensor method)": [[121, "torch.Tensor.apply_"]], "arccos() (torch.tensor method)": [[122, "torch.Tensor.arccos"]], "arccos_() (torch.tensor method)": [[123, "torch.Tensor.arccos_"]], "arccosh() (torch.tensor method)": [[124, "torch.Tensor.arccosh"]], "arccosh_() (torch.tensor method)": [[125, "torch.Tensor.arccosh_"]], "arcsin() (torch.tensor method)": [[126, "torch.Tensor.arcsin"]], "arcsin_() (torch.tensor method)": [[127, "torch.Tensor.arcsin_"]], "arcsinh() (torch.tensor method)": [[128, "torch.Tensor.arcsinh"]], "arcsinh_() (torch.tensor method)": [[129, "torch.Tensor.arcsinh_"]], "arctan() (torch.tensor method)": [[130, "torch.Tensor.arctan"]], "arctan2() (torch.tensor method)": [[131, "torch.Tensor.arctan2"]], "arctan2_() (torch.tensor method)": [[132, "torch.Tensor.arctan2_"]], "arctan_() (torch.tensor method)": [[133, "torch.Tensor.arctan_"]], "arctanh() (torch.tensor method)": [[134, "torch.Tensor.arctanh"]], "arctanh_() (torch.tensor method)": [[135, "torch.Tensor.arctanh_"]], "argmax() (torch.tensor method)": [[136, "torch.Tensor.argmax"]], "argmin() (torch.tensor method)": [[137, "torch.Tensor.argmin"]], "argsort() (torch.tensor method)": [[138, "torch.Tensor.argsort"]], "argwhere() (torch.tensor method)": [[139, "torch.Tensor.argwhere"]], "as_strided() (torch.tensor method)": [[140, "torch.Tensor.as_strided"]], "as_subclass() (torch.tensor method)": [[141, "torch.Tensor.as_subclass"]], "asin() (torch.tensor method)": [[142, "torch.Tensor.asin"]], "asin_() (torch.tensor method)": [[143, "torch.Tensor.asin_"]], "asinh() (torch.tensor method)": [[144, "torch.Tensor.asinh"]], "asinh_() (torch.tensor method)": [[145, "torch.Tensor.asinh_"]], "atan() (torch.tensor method)": [[146, "torch.Tensor.atan"]], "atan2() (torch.tensor method)": [[147, "torch.Tensor.atan2"]], "atan2_() (torch.tensor method)": [[148, "torch.Tensor.atan2_"]], "atan_() (torch.tensor method)": [[149, "torch.Tensor.atan_"]], "atanh() (torch.tensor method)": [[150, "torch.Tensor.atanh"]], "atanh_() (torch.tensor method)": [[151, "torch.Tensor.atanh_"]], "backward() (torch.tensor method)": [[152, "torch.Tensor.backward"]], "baddbmm() (torch.tensor method)": [[153, "torch.Tensor.baddbmm"]], "baddbmm_() (torch.tensor method)": [[154, "torch.Tensor.baddbmm_"]], "bernoulli() (torch.tensor method)": [[155, "torch.Tensor.bernoulli"]], "bernoulli_() (torch.tensor method)": [[156, "torch.Tensor.bernoulli_"]], "bfloat16() (torch.tensor method)": [[157, "torch.Tensor.bfloat16"]], "bincount() (torch.tensor method)": [[158, "torch.Tensor.bincount"]], "bitwise_and() (torch.tensor method)": [[159, "torch.Tensor.bitwise_and"]], "bitwise_and_() (torch.tensor method)": [[160, "torch.Tensor.bitwise_and_"]], "bitwise_left_shift() (torch.tensor method)": [[161, "torch.Tensor.bitwise_left_shift"]], "bitwise_left_shift_() (torch.tensor method)": [[162, "torch.Tensor.bitwise_left_shift_"]], "bitwise_not() (torch.tensor method)": [[163, "torch.Tensor.bitwise_not"]], "bitwise_not_() (torch.tensor method)": [[164, "torch.Tensor.bitwise_not_"]], "bitwise_or() (torch.tensor method)": [[165, "torch.Tensor.bitwise_or"]], "bitwise_or_() (torch.tensor method)": [[166, "torch.Tensor.bitwise_or_"]], "bitwise_right_shift() (torch.tensor method)": [[167, "torch.Tensor.bitwise_right_shift"]], "bitwise_right_shift_() (torch.tensor method)": [[168, "torch.Tensor.bitwise_right_shift_"]], "bitwise_xor() (torch.tensor method)": [[169, "torch.Tensor.bitwise_xor"]], "bitwise_xor_() (torch.tensor method)": [[170, "torch.Tensor.bitwise_xor_"]], "bmm() (torch.tensor method)": [[171, "torch.Tensor.bmm"]], "bool() (torch.tensor method)": [[172, "torch.Tensor.bool"]], "broadcast_to() (torch.tensor method)": [[173, "torch.Tensor.broadcast_to"]], "byte() (torch.tensor method)": [[174, "torch.Tensor.byte"]], "cauchy_() (torch.tensor method)": [[175, "torch.Tensor.cauchy_"]], "ccol_indices() (torch.tensor method)": [[176, "torch.Tensor.ccol_indices"]], "cdouble() (torch.tensor method)": [[177, "torch.Tensor.cdouble"]], "ceil() (torch.tensor method)": [[178, "torch.Tensor.ceil"]], "ceil_() (torch.tensor method)": [[179, "torch.Tensor.ceil_"]], "cfloat() (torch.tensor method)": [[180, "torch.Tensor.cfloat"]], "chalf() (torch.tensor method)": [[181, "torch.Tensor.chalf"]], "char() (torch.tensor method)": [[182, "torch.Tensor.char"]], "cholesky() (torch.tensor method)": [[183, "torch.Tensor.cholesky"]], "cholesky_inverse() (torch.tensor method)": [[184, "torch.Tensor.cholesky_inverse"]], "cholesky_solve() (torch.tensor method)": [[185, "torch.Tensor.cholesky_solve"]], "chunk() (torch.tensor method)": [[186, "torch.Tensor.chunk"]], "clamp() (torch.tensor method)": [[187, "torch.Tensor.clamp"]], "clamp_() (torch.tensor method)": [[188, "torch.Tensor.clamp_"]], "clip() (torch.tensor method)": [[189, "torch.Tensor.clip"]], "clip_() (torch.tensor method)": [[190, "torch.Tensor.clip_"]], "clone() (torch.tensor method)": [[191, "torch.Tensor.clone"]], "coalesce() (torch.tensor method)": [[192, "torch.Tensor.coalesce"]], "col_indices() (torch.tensor method)": [[193, "torch.Tensor.col_indices"]], "conj() (torch.tensor method)": [[194, "torch.Tensor.conj"]], "conj_physical() (torch.tensor method)": [[195, "torch.Tensor.conj_physical"]], "conj_physical_() (torch.tensor method)": [[196, "torch.Tensor.conj_physical_"]], "contiguous() (torch.tensor method)": [[197, "torch.Tensor.contiguous"]], "copy_() (torch.tensor method)": [[198, "torch.Tensor.copy_"]], "copysign() (torch.tensor method)": [[199, "torch.Tensor.copysign"]], "copysign_() (torch.tensor method)": [[200, "torch.Tensor.copysign_"]], "corrcoef() (torch.tensor method)": [[201, "torch.Tensor.corrcoef"]], "cos() (torch.tensor method)": [[202, "torch.Tensor.cos"]], "cos_() (torch.tensor method)": [[203, "torch.Tensor.cos_"]], "cosh() (torch.tensor method)": [[204, "torch.Tensor.cosh"]], "cosh_() (torch.tensor method)": [[205, "torch.Tensor.cosh_"]], "count_nonzero() (torch.tensor method)": [[206, "torch.Tensor.count_nonzero"]], "cov() (torch.tensor method)": [[207, "torch.Tensor.cov"]], "cpu() (torch.tensor method)": [[208, "torch.Tensor.cpu"]], "cross() (torch.tensor method)": [[209, "torch.Tensor.cross"]], "crow_indices() (torch.tensor method)": [[210, "torch.Tensor.crow_indices"]], "cuda() (torch.tensor method)": [[211, "torch.Tensor.cuda"]], "cummax() (torch.tensor method)": [[212, "torch.Tensor.cummax"]], "cummin() (torch.tensor method)": [[213, "torch.Tensor.cummin"]], "cumprod() (torch.tensor method)": [[214, "torch.Tensor.cumprod"]], "cumprod_() (torch.tensor method)": [[215, "torch.Tensor.cumprod_"]], "cumsum() (torch.tensor method)": [[216, "torch.Tensor.cumsum"]], "cumsum_() (torch.tensor method)": [[217, "torch.Tensor.cumsum_"]], "data_ptr() (torch.tensor method)": [[218, "torch.Tensor.data_ptr"]], "deg2rad() (torch.tensor method)": [[219, "torch.Tensor.deg2rad"]], "dense_dim() (torch.tensor method)": [[220, "torch.Tensor.dense_dim"]], "dequantize() (torch.tensor method)": [[221, "torch.Tensor.dequantize"]], "det() (torch.tensor method)": [[222, "torch.Tensor.det"]], "detach() (torch.tensor method)": [[223, "torch.Tensor.detach"]], "detach_() (torch.tensor method)": [[224, "torch.Tensor.detach_"]], "device (torch.tensor attribute)": [[225, "torch.Tensor.device"]], "diag() (torch.tensor method)": [[226, "torch.Tensor.diag"]], "diag_embed() (torch.tensor method)": [[227, "torch.Tensor.diag_embed"]], "diagflat() (torch.tensor method)": [[228, "torch.Tensor.diagflat"]], "diagonal() (torch.tensor method)": [[229, "torch.Tensor.diagonal"]], "diagonal_scatter() (torch.tensor method)": [[230, "torch.Tensor.diagonal_scatter"]], "diff() (torch.tensor method)": [[231, "torch.Tensor.diff"]], "digamma() (torch.tensor method)": [[232, "torch.Tensor.digamma"]], "digamma_() (torch.tensor method)": [[233, "torch.Tensor.digamma_"]], "dim() (torch.tensor method)": [[234, "torch.Tensor.dim"]], "dim_order() (torch.tensor method)": [[235, "torch.Tensor.dim_order"]], "dist() (torch.tensor method)": [[236, "torch.Tensor.dist"]], "div() (torch.tensor method)": [[237, "torch.Tensor.div"]], "div_() (torch.tensor method)": [[238, "torch.Tensor.div_"]], "divide() (torch.tensor method)": [[239, "torch.Tensor.divide"]], "divide_() (torch.tensor method)": [[240, "torch.Tensor.divide_"]], "dot() (torch.tensor method)": [[241, "torch.Tensor.dot"]], "double() (torch.tensor method)": [[242, "torch.Tensor.double"]], "dsplit() (torch.tensor method)": [[243, "torch.Tensor.dsplit"]], "element_size() (torch.tensor method)": [[244, "torch.Tensor.element_size"]], "eq() (torch.tensor method)": [[245, "torch.Tensor.eq"]], "eq_() (torch.tensor method)": [[246, "torch.Tensor.eq_"]], "equal() (torch.tensor method)": [[247, "torch.Tensor.equal"]], "erf() (torch.tensor method)": [[248, "torch.Tensor.erf"]], "erf_() (torch.tensor method)": [[249, "torch.Tensor.erf_"]], "erfc() (torch.tensor method)": [[250, "torch.Tensor.erfc"]], "erfc_() (torch.tensor method)": [[251, "torch.Tensor.erfc_"]], "erfinv() (torch.tensor method)": [[252, "torch.Tensor.erfinv"]], "erfinv_() (torch.tensor method)": [[253, "torch.Tensor.erfinv_"]], "exp() (torch.tensor method)": [[254, "torch.Tensor.exp"]], "exp_() (torch.tensor method)": [[255, "torch.Tensor.exp_"]], "expand() (torch.tensor method)": [[256, "torch.Tensor.expand"]], "expand_as() (torch.tensor method)": [[257, "torch.Tensor.expand_as"]], "expm1() (torch.tensor method)": [[258, "torch.Tensor.expm1"]], "expm1_() (torch.tensor method)": [[259, "torch.Tensor.expm1_"]], "exponential_() (torch.tensor method)": [[260, "torch.Tensor.exponential_"]], "fill_() (torch.tensor method)": [[261, "torch.Tensor.fill_"]], "fill_diagonal_() (torch.tensor method)": [[262, "torch.Tensor.fill_diagonal_"]], "fix() (torch.tensor method)": [[263, "torch.Tensor.fix"]], "fix_() (torch.tensor method)": [[264, "torch.Tensor.fix_"]], "flatten() (torch.tensor method)": [[265, "torch.Tensor.flatten"]], "flip() (torch.tensor method)": [[266, "torch.Tensor.flip"]], "fliplr() (torch.tensor method)": [[267, "torch.Tensor.fliplr"]], "flipud() (torch.tensor method)": [[268, "torch.Tensor.flipud"]], "float() (torch.tensor method)": [[269, "torch.Tensor.float"]], "float_power() (torch.tensor method)": [[270, "torch.Tensor.float_power"]], "float_power_() (torch.tensor method)": [[271, "torch.Tensor.float_power_"]], "floor() (torch.tensor method)": [[272, "torch.Tensor.floor"]], "floor_() (torch.tensor method)": [[273, "torch.Tensor.floor_"]], "floor_divide() (torch.tensor method)": [[274, "torch.Tensor.floor_divide"]], "floor_divide_() (torch.tensor method)": [[275, "torch.Tensor.floor_divide_"]], "fmax() (torch.tensor method)": [[276, "torch.Tensor.fmax"]], "fmin() (torch.tensor method)": [[277, "torch.Tensor.fmin"]], "fmod() (torch.tensor method)": [[278, "torch.Tensor.fmod"]], "fmod_() (torch.tensor method)": [[279, "torch.Tensor.fmod_"]], "frac() (torch.tensor method)": [[280, "torch.Tensor.frac"]], "frac_() (torch.tensor method)": [[281, "torch.Tensor.frac_"]], "frexp() (torch.tensor method)": [[282, "torch.Tensor.frexp"]], "gather() (torch.tensor method)": [[283, "torch.Tensor.gather"]], "gcd() (torch.tensor method)": [[284, "torch.Tensor.gcd"]], "gcd_() (torch.tensor method)": [[285, "torch.Tensor.gcd_"]], "ge() (torch.tensor method)": [[286, "torch.Tensor.ge"]], "ge_() (torch.tensor method)": [[287, "torch.Tensor.ge_"]], "geometric_() (torch.tensor method)": [[288, "torch.Tensor.geometric_"]], "geqrf() (torch.tensor method)": [[289, "torch.Tensor.geqrf"]], "ger() (torch.tensor method)": [[290, "torch.Tensor.ger"]], "get_device() (torch.tensor method)": [[291, "torch.Tensor.get_device"]], "grad (torch.tensor attribute)": [[292, "torch.Tensor.grad"]], "greater() (torch.tensor method)": [[293, "torch.Tensor.greater"]], "greater_() (torch.tensor method)": [[294, "torch.Tensor.greater_"]], "greater_equal() (torch.tensor method)": [[295, "torch.Tensor.greater_equal"]], "greater_equal_() (torch.tensor method)": [[296, "torch.Tensor.greater_equal_"]], "gt() (torch.tensor method)": [[297, "torch.Tensor.gt"]], "gt_() (torch.tensor method)": [[298, "torch.Tensor.gt_"]], "half() (torch.tensor method)": [[299, "torch.Tensor.half"]], "hardshrink() (torch.tensor method)": [[300, "torch.Tensor.hardshrink"]], "heaviside() (torch.tensor method)": [[301, "torch.Tensor.heaviside"]], "histc() (torch.tensor method)": [[302, "torch.Tensor.histc"]], "histogram() (torch.tensor method)": [[303, "torch.Tensor.histogram"]], "hsplit() (torch.tensor method)": [[304, "torch.Tensor.hsplit"]], "hypot() (torch.tensor method)": [[305, "torch.Tensor.hypot"]], "hypot_() (torch.tensor method)": [[306, "torch.Tensor.hypot_"]], "i0() (torch.tensor method)": [[307, "torch.Tensor.i0"]], "i0_() (torch.tensor method)": [[308, "torch.Tensor.i0_"]], "igamma() (torch.tensor method)": [[309, "torch.Tensor.igamma"]], "igamma_() (torch.tensor method)": [[310, "torch.Tensor.igamma_"]], "igammac() (torch.tensor method)": [[311, "torch.Tensor.igammac"]], "igammac_() (torch.tensor method)": [[312, "torch.Tensor.igammac_"]], "imag (torch.tensor attribute)": [[313, "torch.Tensor.imag"]], "index_add() (torch.tensor method)": [[314, "torch.Tensor.index_add"]], "index_add_() (torch.tensor method)": [[315, "torch.Tensor.index_add_"]], "index_copy() (torch.tensor method)": [[316, "torch.Tensor.index_copy"]], "index_copy_() (torch.tensor method)": [[317, "torch.Tensor.index_copy_"]], "index_fill() (torch.tensor method)": [[318, "torch.Tensor.index_fill"]], "index_fill_() (torch.tensor method)": [[319, "torch.Tensor.index_fill_"]], "index_put() (torch.tensor method)": [[320, "torch.Tensor.index_put"]], "index_put_() (torch.tensor method)": [[321, "torch.Tensor.index_put_"]], "index_reduce() (torch.tensor method)": [[322, "torch.Tensor.index_reduce"]], "index_reduce_() (torch.tensor method)": [[323, "torch.Tensor.index_reduce_"]], "index_select() (torch.tensor method)": [[324, "torch.Tensor.index_select"]], "indices() (torch.tensor method)": [[325, "torch.Tensor.indices"]], "inner() (torch.tensor method)": [[326, "torch.Tensor.inner"]], "int() (torch.tensor method)": [[327, "torch.Tensor.int"]], "int_repr() (torch.tensor method)": [[328, "torch.Tensor.int_repr"]], "inverse() (torch.tensor method)": [[329, "torch.Tensor.inverse"]], "is_coalesced() (torch.tensor method)": [[330, "torch.Tensor.is_coalesced"]], "is_complex() (torch.tensor method)": [[331, "torch.Tensor.is_complex"]], "is_conj() (torch.tensor method)": [[332, "torch.Tensor.is_conj"]], "is_contiguous() (torch.tensor method)": [[333, "torch.Tensor.is_contiguous"]], "is_cuda (torch.tensor attribute)": [[334, "torch.Tensor.is_cuda"]], "is_floating_point() (torch.tensor method)": [[335, "torch.Tensor.is_floating_point"]], "is_inference() (torch.tensor method)": [[336, "torch.Tensor.is_inference"]], "is_leaf (torch.tensor attribute)": [[337, "torch.Tensor.is_leaf"]], "is_meta (torch.tensor attribute)": [[338, "torch.Tensor.is_meta"]], "is_pinned() (torch.tensor method)": [[339, "torch.Tensor.is_pinned"]], "is_quantized (torch.tensor attribute)": [[340, "torch.Tensor.is_quantized"]], "is_set_to() (torch.tensor method)": [[341, "torch.Tensor.is_set_to"]], "is_shared() (torch.tensor method)": [[342, "torch.Tensor.is_shared"]], "is_signed() (torch.tensor method)": [[343, "torch.Tensor.is_signed"]], "is_sparse (torch.tensor attribute)": [[344, "torch.Tensor.is_sparse"]], "is_sparse_csr (torch.tensor attribute)": [[345, "torch.Tensor.is_sparse_csr"]], "isclose() (torch.tensor method)": [[346, "torch.Tensor.isclose"]], "isfinite() (torch.tensor method)": [[347, "torch.Tensor.isfinite"]], "isinf() (torch.tensor method)": [[348, "torch.Tensor.isinf"]], "isnan() (torch.tensor method)": [[349, "torch.Tensor.isnan"]], "isneginf() (torch.tensor method)": [[350, "torch.Tensor.isneginf"]], "isposinf() (torch.tensor method)": [[351, "torch.Tensor.isposinf"]], "isreal() (torch.tensor method)": [[352, "torch.Tensor.isreal"]], "istft() (torch.tensor method)": [[353, "torch.Tensor.istft"]], "item() (torch.tensor method)": [[354, "torch.Tensor.item"]], "itemsize (torch.tensor attribute)": [[355, "torch.Tensor.itemsize"]], "kthvalue() (torch.tensor method)": [[356, "torch.Tensor.kthvalue"]], "lcm() (torch.tensor method)": [[357, "torch.Tensor.lcm"]], "lcm_() (torch.tensor method)": [[358, "torch.Tensor.lcm_"]], "ldexp() (torch.tensor method)": [[359, "torch.Tensor.ldexp"]], "ldexp_() (torch.tensor method)": [[360, "torch.Tensor.ldexp_"]], "le() (torch.tensor method)": [[361, "torch.Tensor.le"]], "le_() (torch.tensor method)": [[362, "torch.Tensor.le_"]], "lerp() (torch.tensor method)": [[363, "torch.Tensor.lerp"]], "lerp_() (torch.tensor method)": [[364, "torch.Tensor.lerp_"]], "less() (torch.tensor method)": [[365, "torch.Tensor.less"]], "less_() (torch.tensor method)": [[366, "torch.Tensor.less_"]], "less_equal() (torch.tensor method)": [[367, "torch.Tensor.less_equal"]], "less_equal_() (torch.tensor method)": [[368, "torch.Tensor.less_equal_"]], "lgamma() (torch.tensor method)": [[369, "torch.Tensor.lgamma"]], "lgamma_() (torch.tensor method)": [[370, "torch.Tensor.lgamma_"]], "log() (torch.tensor method)": [[371, "torch.Tensor.log"]], "log10() (torch.tensor method)": [[372, "torch.Tensor.log10"]], "log10_() (torch.tensor method)": [[373, "torch.Tensor.log10_"]], "log1p() (torch.tensor method)": [[374, "torch.Tensor.log1p"]], "log1p_() (torch.tensor method)": [[375, "torch.Tensor.log1p_"]], "log2() (torch.tensor method)": [[376, "torch.Tensor.log2"]], "log2_() (torch.tensor method)": [[377, "torch.Tensor.log2_"]], "log_() (torch.tensor method)": [[378, "torch.Tensor.log_"]], "log_normal_() (torch.tensor method)": [[379, "torch.Tensor.log_normal_"]], "logaddexp() (torch.tensor method)": [[380, "torch.Tensor.logaddexp"]], "logaddexp2() (torch.tensor method)": [[381, "torch.Tensor.logaddexp2"]], "logcumsumexp() (torch.tensor method)": [[382, "torch.Tensor.logcumsumexp"]], "logdet() (torch.tensor method)": [[383, "torch.Tensor.logdet"]], "logical_and() (torch.tensor method)": [[384, "torch.Tensor.logical_and"]], "logical_and_() (torch.tensor method)": [[385, "torch.Tensor.logical_and_"]], "logical_not() (torch.tensor method)": [[386, "torch.Tensor.logical_not"]], "logical_not_() (torch.tensor method)": [[387, "torch.Tensor.logical_not_"]], "logical_or() (torch.tensor method)": [[388, "torch.Tensor.logical_or"]], "logical_or_() (torch.tensor method)": [[389, "torch.Tensor.logical_or_"]], "logical_xor() (torch.tensor method)": [[390, "torch.Tensor.logical_xor"]], "logical_xor_() (torch.tensor method)": [[391, "torch.Tensor.logical_xor_"]], "logit() (torch.tensor method)": [[392, "torch.Tensor.logit"]], "logit_() (torch.tensor method)": [[393, "torch.Tensor.logit_"]], "logsumexp() (torch.tensor method)": [[394, "torch.Tensor.logsumexp"]], "long() (torch.tensor method)": [[395, "torch.Tensor.long"]], "lt() (torch.tensor method)": [[396, "torch.Tensor.lt"]], "lt_() (torch.tensor method)": [[397, "torch.Tensor.lt_"]], "lu() (torch.tensor method)": [[398, "torch.Tensor.lu"]], "lu_solve() (torch.tensor method)": [[399, "torch.Tensor.lu_solve"]], "map_() (torch.tensor method)": [[400, "torch.Tensor.map_"]], "masked_fill() (torch.tensor method)": [[401, "torch.Tensor.masked_fill"]], "masked_fill_() (torch.tensor method)": [[402, "torch.Tensor.masked_fill_"]], "masked_scatter() (torch.tensor method)": [[403, "torch.Tensor.masked_scatter"]], "masked_scatter_() (torch.tensor method)": [[404, "torch.Tensor.masked_scatter_"]], "masked_select() (torch.tensor method)": [[405, "torch.Tensor.masked_select"]], "matmul() (torch.tensor method)": [[406, "torch.Tensor.matmul"]], "matrix_exp() (torch.tensor method)": [[407, "torch.Tensor.matrix_exp"]], "matrix_power() (torch.tensor method)": [[408, "torch.Tensor.matrix_power"]], "max() (torch.tensor method)": [[409, "torch.Tensor.max"]], "maximum() (torch.tensor method)": [[410, "torch.Tensor.maximum"]], "mean() (torch.tensor method)": [[411, "torch.Tensor.mean"]], "median() (torch.tensor method)": [[412, "torch.Tensor.median"]], "min() (torch.tensor method)": [[413, "torch.Tensor.min"]], "minimum() (torch.tensor method)": [[414, "torch.Tensor.minimum"]], "mm() (torch.tensor method)": [[415, "torch.Tensor.mm"]], "mode() (torch.tensor method)": [[416, "torch.Tensor.mode"]], "module_load() (torch.tensor method)": [[417, "torch.Tensor.module_load"]], "moveaxis() (torch.tensor method)": [[418, "torch.Tensor.moveaxis"]], "movedim() (torch.tensor method)": [[419, "torch.Tensor.movedim"]], "msort() (torch.tensor method)": [[420, "torch.Tensor.msort"]], "mul() (torch.tensor method)": [[421, "torch.Tensor.mul"]], "mul_() (torch.tensor method)": [[422, "torch.Tensor.mul_"]], "multinomial() (torch.tensor method)": [[423, "torch.Tensor.multinomial"]], "multiply() (torch.tensor method)": [[424, "torch.Tensor.multiply"]], "multiply_() (torch.tensor method)": [[425, "torch.Tensor.multiply_"]], "mv() (torch.tensor method)": [[426, "torch.Tensor.mv"]], "mvlgamma() (torch.tensor method)": [[427, "torch.Tensor.mvlgamma"]], "mvlgamma_() (torch.tensor method)": [[428, "torch.Tensor.mvlgamma_"]], "nan_to_num() (torch.tensor method)": [[429, "torch.Tensor.nan_to_num"]], "nan_to_num_() (torch.tensor method)": [[430, "torch.Tensor.nan_to_num_"]], "nanmean() (torch.tensor method)": [[431, "torch.Tensor.nanmean"]], "nanmedian() (torch.tensor method)": [[432, "torch.Tensor.nanmedian"]], "nanquantile() (torch.tensor method)": [[433, "torch.Tensor.nanquantile"]], "nansum() (torch.tensor method)": [[434, "torch.Tensor.nansum"]], "narrow() (torch.tensor method)": [[435, "torch.Tensor.narrow"]], "narrow_copy() (torch.tensor method)": [[436, "torch.Tensor.narrow_copy"]], "nbytes (torch.tensor attribute)": [[437, "torch.Tensor.nbytes"]], "ndim (torch.tensor attribute)": [[438, "torch.Tensor.ndim"]], "ndimension() (torch.tensor method)": [[439, "torch.Tensor.ndimension"]], "ne() (torch.tensor method)": [[440, "torch.Tensor.ne"]], "ne_() (torch.tensor method)": [[441, "torch.Tensor.ne_"]], "neg() (torch.tensor method)": [[442, "torch.Tensor.neg"]], "neg_() (torch.tensor method)": [[443, "torch.Tensor.neg_"]], "negative() (torch.tensor method)": [[444, "torch.Tensor.negative"]], "negative_() (torch.tensor method)": [[445, "torch.Tensor.negative_"]], "nelement() (torch.tensor method)": [[446, "torch.Tensor.nelement"]], "new_empty() (torch.tensor method)": [[447, "torch.Tensor.new_empty"]], "new_full() (torch.tensor method)": [[448, "torch.Tensor.new_full"]], "new_ones() (torch.tensor method)": [[449, "torch.Tensor.new_ones"]], "new_tensor() (torch.tensor method)": [[450, "torch.Tensor.new_tensor"]], "new_zeros() (torch.tensor method)": [[451, "torch.Tensor.new_zeros"]], "nextafter() (torch.tensor method)": [[452, "torch.Tensor.nextafter"]], "nextafter_() (torch.tensor method)": [[453, "torch.Tensor.nextafter_"]], "nonzero() (torch.tensor method)": [[454, "torch.Tensor.nonzero"]], "norm() (torch.tensor method)": [[455, "torch.Tensor.norm"]], "normal_() (torch.tensor method)": [[456, "torch.Tensor.normal_"]], "not_equal() (torch.tensor method)": [[457, "torch.Tensor.not_equal"]], "not_equal_() (torch.tensor method)": [[458, "torch.Tensor.not_equal_"]], "numel() (torch.tensor method)": [[459, "torch.Tensor.numel"]], "numpy() (torch.tensor method)": [[460, "torch.Tensor.numpy"]], "orgqr() (torch.tensor method)": [[461, "torch.Tensor.orgqr"]], "ormqr() (torch.tensor method)": [[462, "torch.Tensor.ormqr"]], "outer() (torch.tensor method)": [[463, "torch.Tensor.outer"]], "permute() (torch.tensor method)": [[464, "torch.Tensor.permute"]], "pin_memory() (torch.tensor method)": [[465, "torch.Tensor.pin_memory"]], "pinverse() (torch.tensor method)": [[466, "torch.Tensor.pinverse"]], "polygamma() (torch.tensor method)": [[467, "torch.Tensor.polygamma"]], "polygamma_() (torch.tensor method)": [[468, "torch.Tensor.polygamma_"]], "positive() (torch.tensor method)": [[469, "torch.Tensor.positive"]], "pow() (torch.tensor method)": [[470, "torch.Tensor.pow"]], "pow_() (torch.tensor method)": [[471, "torch.Tensor.pow_"]], "prod() (torch.tensor method)": [[472, "torch.Tensor.prod"]], "put_() (torch.tensor method)": [[473, "torch.Tensor.put_"]], "q_per_channel_axis() (torch.tensor method)": [[474, "torch.Tensor.q_per_channel_axis"]], "q_per_channel_scales() (torch.tensor method)": [[475, "torch.Tensor.q_per_channel_scales"]], "q_per_channel_zero_points() (torch.tensor method)": [[476, "torch.Tensor.q_per_channel_zero_points"]], "q_scale() (torch.tensor method)": [[477, "torch.Tensor.q_scale"]], "q_zero_point() (torch.tensor method)": [[478, "torch.Tensor.q_zero_point"]], "qr() (torch.tensor method)": [[479, "torch.Tensor.qr"]], "qscheme() (torch.tensor method)": [[480, "torch.Tensor.qscheme"]], "quantile() (torch.tensor method)": [[481, "torch.Tensor.quantile"]], "rad2deg() (torch.tensor method)": [[482, "torch.Tensor.rad2deg"]], "random_() (torch.tensor method)": [[483, "torch.Tensor.random_"]], "ravel() (torch.tensor method)": [[484, "torch.Tensor.ravel"]], "real (torch.tensor attribute)": [[485, "torch.Tensor.real"]], "reciprocal() (torch.tensor method)": [[486, "torch.Tensor.reciprocal"]], "reciprocal_() (torch.tensor method)": [[487, "torch.Tensor.reciprocal_"]], "record_stream() (torch.tensor method)": [[488, "torch.Tensor.record_stream"]], "register_hook() (torch.tensor method)": [[489, "torch.Tensor.register_hook"]], "register_post_accumulate_grad_hook() (torch.tensor method)": [[490, "torch.Tensor.register_post_accumulate_grad_hook"]], "remainder() (torch.tensor method)": [[491, "torch.Tensor.remainder"]], "remainder_() (torch.tensor method)": [[492, "torch.Tensor.remainder_"]], "renorm() (torch.tensor method)": [[493, "torch.Tensor.renorm"]], "renorm_() (torch.tensor method)": [[494, "torch.Tensor.renorm_"]], "repeat() (torch.tensor method)": [[495, "torch.Tensor.repeat"]], "repeat_interleave() (torch.tensor method)": [[496, "torch.Tensor.repeat_interleave"]], "requires_grad (torch.tensor attribute)": [[497, "torch.Tensor.requires_grad"]], "requires_grad_() (torch.tensor method)": [[498, "torch.Tensor.requires_grad_"]], "reshape() (torch.tensor method)": [[499, "torch.Tensor.reshape"]], "reshape_as() (torch.tensor method)": [[500, "torch.Tensor.reshape_as"]], "resize_() (torch.tensor method)": [[501, "torch.Tensor.resize_"]], "resize_as_() (torch.tensor method)": [[502, "torch.Tensor.resize_as_"]], "resolve_conj() (torch.tensor method)": [[503, "torch.Tensor.resolve_conj"]], "resolve_neg() (torch.tensor method)": [[504, "torch.Tensor.resolve_neg"]], "retain_grad() (torch.tensor method)": [[505, "torch.Tensor.retain_grad"]], "retains_grad (torch.tensor attribute)": [[506, "torch.Tensor.retains_grad"]], "roll() (torch.tensor method)": [[507, "torch.Tensor.roll"]], "rot90() (torch.tensor method)": [[508, "torch.Tensor.rot90"]], "round() (torch.tensor method)": [[509, "torch.Tensor.round"]], "round_() (torch.tensor method)": [[510, "torch.Tensor.round_"]], "row_indices() (torch.tensor method)": [[511, "torch.Tensor.row_indices"]], "rsqrt() (torch.tensor method)": [[512, "torch.Tensor.rsqrt"]], "rsqrt_() (torch.tensor method)": [[513, "torch.Tensor.rsqrt_"]], "scatter() (torch.tensor method)": [[514, "torch.Tensor.scatter"]], "scatter_() (torch.tensor method)": [[515, "torch.Tensor.scatter_"]], "scatter_add() (torch.tensor method)": [[516, "torch.Tensor.scatter_add"]], "scatter_add_() (torch.tensor method)": [[517, "torch.Tensor.scatter_add_"]], "scatter_reduce() (torch.tensor method)": [[518, "torch.Tensor.scatter_reduce"]], "scatter_reduce_() (torch.tensor method)": [[519, "torch.Tensor.scatter_reduce_"]], "select() (torch.tensor method)": [[520, "torch.Tensor.select"]], "select_scatter() (torch.tensor method)": [[521, "torch.Tensor.select_scatter"]], "set_() (torch.tensor method)": [[522, "torch.Tensor.set_"]], "sgn() (torch.tensor method)": [[523, "torch.Tensor.sgn"]], "sgn_() (torch.tensor method)": [[524, "torch.Tensor.sgn_"]], "shape (torch.tensor attribute)": [[525, "torch.Tensor.shape"]], "share_memory_() (torch.tensor method)": [[526, "torch.Tensor.share_memory_"]], "short() (torch.tensor method)": [[527, "torch.Tensor.short"]], "sigmoid() (torch.tensor method)": [[528, "torch.Tensor.sigmoid"]], "sigmoid_() (torch.tensor method)": [[529, "torch.Tensor.sigmoid_"]], "sign() (torch.tensor method)": [[530, "torch.Tensor.sign"]], "sign_() (torch.tensor method)": [[531, "torch.Tensor.sign_"]], "signbit() (torch.tensor method)": [[532, "torch.Tensor.signbit"]], "sin() (torch.tensor method)": [[533, "torch.Tensor.sin"]], "sin_() (torch.tensor method)": [[534, "torch.Tensor.sin_"]], "sinc() (torch.tensor method)": [[535, "torch.Tensor.sinc"]], "sinc_() (torch.tensor method)": [[536, "torch.Tensor.sinc_"]], "sinh() (torch.tensor method)": [[537, "torch.Tensor.sinh"]], "sinh_() (torch.tensor method)": [[538, "torch.Tensor.sinh_"]], "size() (torch.tensor method)": [[539, "torch.Tensor.size"]], "slice_scatter() (torch.tensor method)": [[540, "torch.Tensor.slice_scatter"]], "slogdet() (torch.tensor method)": [[541, "torch.Tensor.slogdet"]], "smm() (torch.tensor method)": [[542, "torch.Tensor.smm"]], "softmax() (torch.tensor method)": [[543, "torch.Tensor.softmax"]], "sort() (torch.tensor method)": [[544, "torch.Tensor.sort"]], "sparse_dim() (torch.tensor method)": [[545, "torch.Tensor.sparse_dim"]], "sparse_mask() (torch.tensor method)": [[546, "torch.Tensor.sparse_mask"]], "sparse_resize_() (torch.tensor method)": [[547, "torch.Tensor.sparse_resize_"]], "sparse_resize_and_clear_() (torch.tensor method)": [[548, "torch.Tensor.sparse_resize_and_clear_"]], "split() (torch.tensor method)": [[549, "torch.Tensor.split"]], "sqrt() (torch.tensor method)": [[550, "torch.Tensor.sqrt"]], "sqrt_() (torch.tensor method)": [[551, "torch.Tensor.sqrt_"]], "square() (torch.tensor method)": [[552, "torch.Tensor.square"]], "square_() (torch.tensor method)": [[553, "torch.Tensor.square_"]], "squeeze() (torch.tensor method)": [[554, "torch.Tensor.squeeze"]], "squeeze_() (torch.tensor method)": [[555, "torch.Tensor.squeeze_"]], "sspaddmm() (torch.tensor method)": [[556, "torch.Tensor.sspaddmm"]], "std() (torch.tensor method)": [[557, "torch.Tensor.std"]], "stft() (torch.tensor method)": [[558, "torch.Tensor.stft"]], "storage() (torch.tensor method)": [[559, "torch.Tensor.storage"]], "storage_offset() (torch.tensor method)": [[560, "torch.Tensor.storage_offset"]], "storage_type() (torch.tensor method)": [[561, "torch.Tensor.storage_type"]], "stride() (torch.tensor method)": [[562, "torch.Tensor.stride"]], "sub() (torch.tensor method)": [[563, "torch.Tensor.sub"]], "sub_() (torch.tensor method)": [[564, "torch.Tensor.sub_"]], "subtract() (torch.tensor method)": [[565, "torch.Tensor.subtract"]], "subtract_() (torch.tensor method)": [[566, "torch.Tensor.subtract_"]], "sum() (torch.tensor method)": [[567, "torch.Tensor.sum"]], "sum_to_size() (torch.tensor method)": [[568, "torch.Tensor.sum_to_size"]], "svd() (torch.tensor method)": [[569, "torch.Tensor.svd"]], "swapaxes() (torch.tensor method)": [[570, "torch.Tensor.swapaxes"]], "swapdims() (torch.tensor method)": [[571, "torch.Tensor.swapdims"]], "t() (torch.tensor method)": [[572, "torch.Tensor.t"]], "t_() (torch.tensor method)": [[573, "torch.Tensor.t_"]], "take() (torch.tensor method)": [[574, "torch.Tensor.take"]], "take_along_dim() (torch.tensor method)": [[575, "torch.Tensor.take_along_dim"]], "tan() (torch.tensor method)": [[576, "torch.Tensor.tan"]], "tan_() (torch.tensor method)": [[577, "torch.Tensor.tan_"]], "tanh() (torch.tensor method)": [[578, "torch.Tensor.tanh"]], "tanh_() (torch.tensor method)": [[579, "torch.Tensor.tanh_"]], "tensor_split() (torch.tensor method)": [[580, "torch.Tensor.tensor_split"]], "tile() (torch.tensor method)": [[581, "torch.Tensor.tile"]], "to() (torch.tensor method)": [[582, "torch.Tensor.to"]], "to_dense() (torch.tensor method)": [[583, "torch.Tensor.to_dense"]], "to_mkldnn() (torch.tensor method)": [[584, "torch.Tensor.to_mkldnn"]], "to_sparse() (torch.tensor method)": [[585, "torch.Tensor.to_sparse"]], "to_sparse_bsc() (torch.tensor method)": [[586, "torch.Tensor.to_sparse_bsc"]], "to_sparse_bsr() (torch.tensor method)": [[587, "torch.Tensor.to_sparse_bsr"]], "to_sparse_coo() (torch.tensor method)": [[588, "torch.Tensor.to_sparse_coo"]], "to_sparse_csc() (torch.tensor method)": [[589, "torch.Tensor.to_sparse_csc"]], "to_sparse_csr() (torch.tensor method)": [[590, "torch.Tensor.to_sparse_csr"]], "tolist() (torch.tensor method)": [[591, "torch.Tensor.tolist"]], "topk() (torch.tensor method)": [[592, "torch.Tensor.topk"]], "trace() (torch.tensor method)": [[593, "torch.Tensor.trace"]], "transpose() (torch.tensor method)": [[594, "torch.Tensor.transpose"]], "transpose_() (torch.tensor method)": [[595, "torch.Tensor.transpose_"]], "triangular_solve() (torch.tensor method)": [[596, "torch.Tensor.triangular_solve"]], "tril() (torch.tensor method)": [[597, "torch.Tensor.tril"]], "tril_() (torch.tensor method)": [[598, "torch.Tensor.tril_"]], "triu() (torch.tensor method)": [[599, "torch.Tensor.triu"]], "triu_() (torch.tensor method)": [[600, "torch.Tensor.triu_"]], "true_divide() (torch.tensor method)": [[601, "torch.Tensor.true_divide"]], "true_divide_() (torch.tensor method)": [[602, "torch.Tensor.true_divide_"]], "trunc() (torch.tensor method)": [[603, "torch.Tensor.trunc"]], "trunc_() (torch.tensor method)": [[604, "torch.Tensor.trunc_"]], "type() (torch.tensor method)": [[605, "torch.Tensor.type"]], "type_as() (torch.tensor method)": [[606, "torch.Tensor.type_as"]], "unbind() (torch.tensor method)": [[607, "torch.Tensor.unbind"]], "unflatten() (torch.tensor method)": [[608, "torch.Tensor.unflatten"]], "unfold() (torch.tensor method)": [[609, "torch.Tensor.unfold"]], "uniform_() (torch.tensor method)": [[610, "torch.Tensor.uniform_"]], "unique() (torch.tensor method)": [[611, "torch.Tensor.unique"]], "unique_consecutive() (torch.tensor method)": [[612, "torch.Tensor.unique_consecutive"]], "unsqueeze() (torch.tensor method)": [[613, "torch.Tensor.unsqueeze"]], "unsqueeze_() (torch.tensor method)": [[614, "torch.Tensor.unsqueeze_"]], "untyped_storage() (torch.tensor method)": [[615, "torch.Tensor.untyped_storage"]], "values() (torch.tensor method)": [[616, "torch.Tensor.values"]], "var() (torch.tensor method)": [[617, "torch.Tensor.var"]], "vdot() (torch.tensor method)": [[618, "torch.Tensor.vdot"]], "view() (torch.tensor method)": [[619, "torch.Tensor.view"]], "view_as() (torch.tensor method)": [[620, "torch.Tensor.view_as"]], "vsplit() (torch.tensor method)": [[621, "torch.Tensor.vsplit"]], "where() (torch.tensor method)": [[622, "torch.Tensor.where"]], "xlogy() (torch.tensor method)": [[623, "torch.Tensor.xlogy"]], "xlogy_() (torch.tensor method)": [[624, "torch.Tensor.xlogy_"]], "xpu() (torch.tensor method)": [[625, "torch.Tensor.xpu"]], "zero_() (torch.tensor method)": [[626, "torch.Tensor.zero_"]], "_assert() (in module torch)": [[627, "torch._assert"]], "_foreach_abs() (in module torch)": [[628, "torch._foreach_abs"]], "_foreach_abs_() (in module torch)": [[629, "torch._foreach_abs_"]], "_foreach_acos() (in module torch)": [[630, "torch._foreach_acos"]], "_foreach_acos_() (in module torch)": [[631, "torch._foreach_acos_"]], "_foreach_asin() (in module torch)": [[632, "torch._foreach_asin"]], "_foreach_asin_() (in module torch)": [[633, "torch._foreach_asin_"]], "_foreach_atan() (in module torch)": [[634, "torch._foreach_atan"]], "_foreach_atan_() (in module torch)": [[635, "torch._foreach_atan_"]], "_foreach_ceil() (in module torch)": [[636, "torch._foreach_ceil"]], "_foreach_ceil_() (in module torch)": [[637, "torch._foreach_ceil_"]], "_foreach_cos() (in module torch)": [[638, "torch._foreach_cos"]], "_foreach_cos_() (in module torch)": [[639, "torch._foreach_cos_"]], "_foreach_cosh() (in module torch)": [[640, "torch._foreach_cosh"]], "_foreach_cosh_() (in module torch)": [[641, "torch._foreach_cosh_"]], "_foreach_erf() (in module torch)": [[642, "torch._foreach_erf"]], "_foreach_erf_() (in module torch)": [[643, "torch._foreach_erf_"]], "_foreach_erfc() (in module torch)": [[644, "torch._foreach_erfc"]], "_foreach_erfc_() (in module torch)": [[645, "torch._foreach_erfc_"]], "_foreach_exp() (in module torch)": [[646, "torch._foreach_exp"]], "_foreach_exp_() (in module torch)": [[647, "torch._foreach_exp_"]], "_foreach_expm1() (in module torch)": [[648, "torch._foreach_expm1"]], "_foreach_expm1_() (in module torch)": [[649, "torch._foreach_expm1_"]], "_foreach_floor() (in module torch)": [[650, "torch._foreach_floor"]], "_foreach_floor_() (in module torch)": [[651, "torch._foreach_floor_"]], "_foreach_frac() (in module torch)": [[652, "torch._foreach_frac"]], "_foreach_frac_() (in module torch)": [[653, "torch._foreach_frac_"]], "_foreach_lgamma() (in module torch)": [[654, "torch._foreach_lgamma"]], "_foreach_lgamma_() (in module torch)": [[655, "torch._foreach_lgamma_"]], "_foreach_log() (in module torch)": [[656, "torch._foreach_log"]], "_foreach_log10() (in module torch)": [[657, "torch._foreach_log10"]], "_foreach_log10_() (in module torch)": [[658, "torch._foreach_log10_"]], "_foreach_log1p() (in module torch)": [[659, "torch._foreach_log1p"]], "_foreach_log1p_() (in module torch)": [[660, "torch._foreach_log1p_"]], "_foreach_log2() (in module torch)": [[661, "torch._foreach_log2"]], "_foreach_log2_() (in module torch)": [[662, "torch._foreach_log2_"]], "_foreach_log_() (in module torch)": [[663, "torch._foreach_log_"]], "_foreach_neg() (in module torch)": [[664, "torch._foreach_neg"]], "_foreach_neg_() (in module torch)": [[665, "torch._foreach_neg_"]], "_foreach_reciprocal() (in module torch)": [[666, "torch._foreach_reciprocal"]], "_foreach_reciprocal_() (in module torch)": [[667, "torch._foreach_reciprocal_"]], "_foreach_round() (in module torch)": [[668, "torch._foreach_round"]], "_foreach_round_() (in module torch)": [[669, "torch._foreach_round_"]], "_foreach_sigmoid() (in module torch)": [[670, "torch._foreach_sigmoid"]], "_foreach_sigmoid_() (in module torch)": [[671, "torch._foreach_sigmoid_"]], "_foreach_sin() (in module torch)": [[672, "torch._foreach_sin"]], "_foreach_sin_() (in module torch)": [[673, "torch._foreach_sin_"]], "_foreach_sinh() (in module torch)": [[674, "torch._foreach_sinh"]], "_foreach_sinh_() (in module torch)": [[675, "torch._foreach_sinh_"]], "_foreach_sqrt() (in module torch)": [[676, "torch._foreach_sqrt"]], "_foreach_sqrt_() (in module torch)": [[677, "torch._foreach_sqrt_"]], "_foreach_tan() (in module torch)": [[678, "torch._foreach_tan"]], "_foreach_tan_() (in module torch)": [[679, "torch._foreach_tan_"]], "_foreach_trunc() (in module torch)": [[680, "torch._foreach_trunc"]], "_foreach_trunc_() (in module torch)": [[681, "torch._foreach_trunc_"]], "_foreach_zero_() (in module torch)": [[682, "torch._foreach_zero_"]], "set_logs() (in module torch._logging)": [[683, "torch._logging.set_logs"]], "abs() (in module torch)": [[684, "torch.abs"]], "absolute() (in module torch)": [[685, "torch.absolute"]], "acos() (in module torch)": [[686, "torch.acos"]], "acosh() (in module torch)": [[687, "torch.acosh"]], "add() (in module torch)": [[688, "torch.add"]], "addbmm() (in module torch)": [[689, "torch.addbmm"]], "addcdiv() (in module torch)": [[690, "torch.addcdiv"]], "addcmul() (in module torch)": [[691, "torch.addcmul"]], "addmm() (in module torch)": [[692, "torch.addmm"]], "addmv() (in module torch)": [[693, "torch.addmv"]], "addr() (in module torch)": [[694, "torch.addr"]], "adjoint() (in module torch)": [[695, "torch.adjoint"]], "all() (in module torch)": [[696, "torch.all"]], "allclose() (in module torch)": [[697, "torch.allclose"]], "amax() (in module torch)": [[698, "torch.amax"]], "amin() (in module torch)": [[699, "torch.amin"]], "aminmax() (in module torch)": [[700, "torch.aminmax"]], "angle() (in module torch)": [[701, "torch.angle"]], "any() (in module torch)": [[702, "torch.any"]], "bnrelu2d (class in torch.ao.nn.intrinsic)": [[703, "torch.ao.nn.intrinsic.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic)": [[704, "torch.ao.nn.intrinsic.BNReLU3d"]], "convbn1d (class in torch.ao.nn.intrinsic)": [[705, "torch.ao.nn.intrinsic.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic)": [[706, "torch.ao.nn.intrinsic.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic)": [[707, "torch.ao.nn.intrinsic.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic)": [[708, "torch.ao.nn.intrinsic.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic)": [[709, "torch.ao.nn.intrinsic.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic)": [[710, "torch.ao.nn.intrinsic.ConvBnReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic)": [[711, "torch.ao.nn.intrinsic.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic)": [[712, "torch.ao.nn.intrinsic.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic)": [[713, "torch.ao.nn.intrinsic.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic)": [[714, "torch.ao.nn.intrinsic.LinearReLU"]], "convbn1d (class in torch.ao.nn.intrinsic.qat)": [[715, "torch.ao.nn.intrinsic.qat.ConvBn1d"]], "convbn2d (class in torch.ao.nn.intrinsic.qat)": [[716, "torch.ao.nn.intrinsic.qat.ConvBn2d"]], "convbn3d (class in torch.ao.nn.intrinsic.qat)": [[717, "torch.ao.nn.intrinsic.qat.ConvBn3d"]], "convbnrelu1d (class in torch.ao.nn.intrinsic.qat)": [[718, "torch.ao.nn.intrinsic.qat.ConvBnReLU1d"]], "convbnrelu2d (class in torch.ao.nn.intrinsic.qat)": [[719, "torch.ao.nn.intrinsic.qat.ConvBnReLU2d"]], "convbnrelu3d (class in torch.ao.nn.intrinsic.qat)": [[720, "torch.ao.nn.intrinsic.qat.ConvBnReLU3d"]], "convrelu2d (class in torch.ao.nn.intrinsic.qat)": [[721, "torch.ao.nn.intrinsic.qat.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.qat)": [[722, "torch.ao.nn.intrinsic.qat.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.qat)": [[723, "torch.ao.nn.intrinsic.qat.LinearReLU"]], "freeze_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[724, "torch.ao.nn.intrinsic.qat.freeze_bn_stats"]], "update_bn_stats (class in torch.ao.nn.intrinsic.qat)": [[725, "torch.ao.nn.intrinsic.qat.update_bn_stats"]], "bnrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[726, "torch.ao.nn.intrinsic.quantized.BNReLU2d"]], "bnrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[727, "torch.ao.nn.intrinsic.quantized.BNReLU3d"]], "convrelu1d (class in torch.ao.nn.intrinsic.quantized)": [[728, "torch.ao.nn.intrinsic.quantized.ConvReLU1d"]], "convrelu2d (class in torch.ao.nn.intrinsic.quantized)": [[729, "torch.ao.nn.intrinsic.quantized.ConvReLU2d"]], "convrelu3d (class in torch.ao.nn.intrinsic.quantized)": [[730, "torch.ao.nn.intrinsic.quantized.ConvReLU3d"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized)": [[731, "torch.ao.nn.intrinsic.quantized.LinearReLU"]], "linearrelu (class in torch.ao.nn.intrinsic.quantized.dynamic)": [[732, "torch.ao.nn.intrinsic.quantized.dynamic.LinearReLU"]], "conv2d (class in torch.ao.nn.qat)": [[733, "torch.ao.nn.qat.Conv2d"]], "conv3d (class in torch.ao.nn.qat)": [[734, "torch.ao.nn.qat.Conv3d"]], "linear (class in torch.ao.nn.qat)": [[735, "torch.ao.nn.qat.Linear"]], "from_float() (torch.ao.nn.qat.linear class method)": [[735, "torch.ao.nn.qat.Linear.from_float"]], "linear (class in torch.ao.nn.qat.dynamic)": [[736, "torch.ao.nn.qat.dynamic.Linear"]], "lstm (class in torch.ao.nn.quantizable)": [[737, "torch.ao.nn.quantizable.LSTM"]], "multiheadattention (class in torch.ao.nn.quantizable)": [[738, "torch.ao.nn.quantizable.MultiheadAttention"]], "dequantize() (torch.ao.nn.quantizable.multiheadattention method)": [[738, "torch.ao.nn.quantizable.MultiheadAttention.dequantize"]], "forward() (torch.ao.nn.quantizable.multiheadattention method)": [[738, "torch.ao.nn.quantizable.MultiheadAttention.forward"]], "batchnorm2d (class in torch.ao.nn.quantized)": [[739, "torch.ao.nn.quantized.BatchNorm2d"]], "batchnorm3d (class in torch.ao.nn.quantized)": [[740, "torch.ao.nn.quantized.BatchNorm3d"]], "conv1d (class in torch.ao.nn.quantized)": [[741, "torch.ao.nn.quantized.Conv1d"]], "from_float() (torch.ao.nn.quantized.conv1d class method)": [[741, "torch.ao.nn.quantized.Conv1d.from_float"]], "conv2d (class in torch.ao.nn.quantized)": [[742, "torch.ao.nn.quantized.Conv2d"]], "from_float() (torch.ao.nn.quantized.conv2d class method)": [[742, "torch.ao.nn.quantized.Conv2d.from_float"]], "conv3d (class in torch.ao.nn.quantized)": [[743, "torch.ao.nn.quantized.Conv3d"]], "from_float() (torch.ao.nn.quantized.conv3d class method)": [[743, "torch.ao.nn.quantized.Conv3d.from_float"]], "convtranspose1d (class in torch.ao.nn.quantized)": [[744, "torch.ao.nn.quantized.ConvTranspose1d"]], "convtranspose2d (class in torch.ao.nn.quantized)": [[745, "torch.ao.nn.quantized.ConvTranspose2d"]], "convtranspose3d (class in torch.ao.nn.quantized)": [[746, "torch.ao.nn.quantized.ConvTranspose3d"]], "elu (class in torch.ao.nn.quantized)": [[747, "torch.ao.nn.quantized.ELU"]], "embedding (class in torch.ao.nn.quantized)": [[748, "torch.ao.nn.quantized.Embedding"]], "from_float() (torch.ao.nn.quantized.embedding class method)": [[748, "torch.ao.nn.quantized.Embedding.from_float"]], "embeddingbag (class in torch.ao.nn.quantized)": [[749, "torch.ao.nn.quantized.EmbeddingBag"]], "from_float() (torch.ao.nn.quantized.embeddingbag class method)": [[749, "torch.ao.nn.quantized.EmbeddingBag.from_float"]], "fxfloatfunctional (class in torch.ao.nn.quantized)": [[750, "torch.ao.nn.quantized.FXFloatFunctional"]], "floatfunctional (class in torch.ao.nn.quantized)": [[751, "torch.ao.nn.quantized.FloatFunctional"]], "groupnorm (class in torch.ao.nn.quantized)": [[752, "torch.ao.nn.quantized.GroupNorm"]], "hardswish (class in torch.ao.nn.quantized)": [[753, "torch.ao.nn.quantized.Hardswish"]], "instancenorm1d (class in torch.ao.nn.quantized)": [[754, "torch.ao.nn.quantized.InstanceNorm1d"]], "instancenorm2d (class in torch.ao.nn.quantized)": [[755, "torch.ao.nn.quantized.InstanceNorm2d"]], "instancenorm3d (class in torch.ao.nn.quantized)": [[756, "torch.ao.nn.quantized.InstanceNorm3d"]], "layernorm (class in torch.ao.nn.quantized)": [[757, "torch.ao.nn.quantized.LayerNorm"]], "leakyrelu (class in torch.ao.nn.quantized)": [[758, "torch.ao.nn.quantized.LeakyReLU"]], "linear (class in torch.ao.nn.quantized)": [[759, "torch.ao.nn.quantized.Linear"]], "from_float() (torch.ao.nn.quantized.linear class method)": [[759, "torch.ao.nn.quantized.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.linear class method)": [[759, "torch.ao.nn.quantized.Linear.from_reference"]], "qfunctional (class in torch.ao.nn.quantized)": [[760, "torch.ao.nn.quantized.QFunctional"]], "relu6 (class in torch.ao.nn.quantized)": [[761, "torch.ao.nn.quantized.ReLU6"]], "sigmoid (class in torch.ao.nn.quantized)": [[762, "torch.ao.nn.quantized.Sigmoid"]], "gru (class in torch.ao.nn.quantized.dynamic)": [[763, "torch.ao.nn.quantized.dynamic.GRU"]], "grucell (class in torch.ao.nn.quantized.dynamic)": [[764, "torch.ao.nn.quantized.dynamic.GRUCell"]], "lstm (class in torch.ao.nn.quantized.dynamic)": [[765, "torch.ao.nn.quantized.dynamic.LSTM"]], "lstmcell (class in torch.ao.nn.quantized.dynamic)": [[766, "torch.ao.nn.quantized.dynamic.LSTMCell"]], "linear (class in torch.ao.nn.quantized.dynamic)": [[767, "torch.ao.nn.quantized.dynamic.Linear"]], "from_float() (torch.ao.nn.quantized.dynamic.linear class method)": [[767, "torch.ao.nn.quantized.dynamic.Linear.from_float"]], "from_reference() (torch.ao.nn.quantized.dynamic.linear class method)": [[767, "torch.ao.nn.quantized.dynamic.Linear.from_reference"]], "rnncell (class in torch.ao.nn.quantized.dynamic)": [[768, "torch.ao.nn.quantized.dynamic.RNNCell"]], "adaptive_avg_pool2d (class in torch.ao.nn.quantized.functional)": [[769, "torch.ao.nn.quantized.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d (class in torch.ao.nn.quantized.functional)": [[770, "torch.ao.nn.quantized.functional.adaptive_avg_pool3d"]], "avg_pool2d (class in torch.ao.nn.quantized.functional)": [[771, "torch.ao.nn.quantized.functional.avg_pool2d"]], "avg_pool3d (class in torch.ao.nn.quantized.functional)": [[772, "torch.ao.nn.quantized.functional.avg_pool3d"]], "celu (class in torch.ao.nn.quantized.functional)": [[773, "torch.ao.nn.quantized.functional.celu"]], "clamp (class in torch.ao.nn.quantized.functional)": [[774, "torch.ao.nn.quantized.functional.clamp"]], "conv1d (class in torch.ao.nn.quantized.functional)": [[775, "torch.ao.nn.quantized.functional.conv1d"]], "conv2d (class in torch.ao.nn.quantized.functional)": [[776, "torch.ao.nn.quantized.functional.conv2d"]], "conv3d (class in torch.ao.nn.quantized.functional)": [[777, "torch.ao.nn.quantized.functional.conv3d"]], "elu (class in torch.ao.nn.quantized.functional)": [[778, "torch.ao.nn.quantized.functional.elu"]], "hardsigmoid (class in torch.ao.nn.quantized.functional)": [[779, "torch.ao.nn.quantized.functional.hardsigmoid"]], "hardswish (class in torch.ao.nn.quantized.functional)": [[780, "torch.ao.nn.quantized.functional.hardswish"]], "hardtanh (class in torch.ao.nn.quantized.functional)": [[781, "torch.ao.nn.quantized.functional.hardtanh"]], "interpolate (class in torch.ao.nn.quantized.functional)": [[782, "torch.ao.nn.quantized.functional.interpolate"]], "leaky_relu (class in torch.ao.nn.quantized.functional)": [[783, "torch.ao.nn.quantized.functional.leaky_relu"]], "linear (class in torch.ao.nn.quantized.functional)": [[784, "torch.ao.nn.quantized.functional.linear"]], "max_pool1d (class in torch.ao.nn.quantized.functional)": [[785, "torch.ao.nn.quantized.functional.max_pool1d"]], "max_pool2d (class in torch.ao.nn.quantized.functional)": [[786, "torch.ao.nn.quantized.functional.max_pool2d"]], "threshold (class in torch.ao.nn.quantized.functional)": [[787, "torch.ao.nn.quantized.functional.threshold"]], "upsample (class in torch.ao.nn.quantized.functional)": [[788, "torch.ao.nn.quantized.functional.upsample"]], "upsample_bilinear (class in torch.ao.nn.quantized.functional)": [[789, "torch.ao.nn.quantized.functional.upsample_bilinear"]], "upsample_nearest (class in torch.ao.nn.quantized.functional)": [[790, "torch.ao.nn.quantized.functional.upsample_nearest"]], "dequantstub (class in torch.ao.quantization)": [[791, "torch.ao.quantization.DeQuantStub"]], "quantstub (class in torch.ao.quantization)": [[792, "torch.ao.quantization.QuantStub"]], "quantwrapper (class in torch.ao.quantization)": [[793, "torch.ao.quantization.QuantWrapper"]], "add_quant_dequant (class in torch.ao.quantization)": [[794, "torch.ao.quantization.add_quant_dequant"]], "backendconfig (class in torch.ao.quantization.backend_config)": [[795, "torch.ao.quantization.backend_config.BackendConfig"]], "configs (torch.ao.quantization.backend_config.backendconfig property)": [[795, "torch.ao.quantization.backend_config.BackendConfig.configs"]], "from_dict() (torch.ao.quantization.backend_config.backendconfig class method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.from_dict"]], "set_backend_pattern_config() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_config"]], "set_backend_pattern_configs() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.set_backend_pattern_configs"]], "set_name() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.set_name"]], "to_dict() (torch.ao.quantization.backend_config.backendconfig method)": [[795, "torch.ao.quantization.backend_config.BackendConfig.to_dict"]], "backendpatternconfig (class in torch.ao.quantization.backend_config)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig"]], "add_dtype_config() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.add_dtype_config"]], "from_dict() (torch.ao.quantization.backend_config.backendpatternconfig class method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.from_dict"]], "set_dtype_configs() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_dtype_configs"]], "set_fused_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fused_module"]], "set_fuser_method() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_fuser_method"]], "set_observation_type() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_observation_type"]], "set_pattern() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_pattern"]], "set_qat_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_qat_module"]], "set_reference_quantized_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_reference_quantized_module"]], "set_root_module() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.set_root_module"]], "to_dict() (torch.ao.quantization.backend_config.backendpatternconfig method)": [[796, "torch.ao.quantization.backend_config.BackendPatternConfig.to_dict"]], "dtypeconfig (class in torch.ao.quantization.backend_config)": [[797, "torch.ao.quantization.backend_config.DTypeConfig"]], "from_dict() (torch.ao.quantization.backend_config.dtypeconfig class method)": [[797, "torch.ao.quantization.backend_config.DTypeConfig.from_dict"]], "to_dict() (torch.ao.quantization.backend_config.dtypeconfig method)": [[797, "torch.ao.quantization.backend_config.DTypeConfig.to_dict"]], "dtypewithconstraints (class in torch.ao.quantization.backend_config)": [[798, "torch.ao.quantization.backend_config.DTypeWithConstraints"]], "input_output_not_observed (torch.ao.quantization.backend_config.observationtype attribute)": [[799, "torch.ao.quantization.backend_config.ObservationType.INPUT_OUTPUT_NOT_OBSERVED"]], "output_share_observer_with_input (torch.ao.quantization.backend_config.observationtype attribute)": [[799, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_SHARE_OBSERVER_WITH_INPUT"]], "output_use_different_observer_as_input (torch.ao.quantization.backend_config.observationtype attribute)": [[799, "torch.ao.quantization.backend_config.ObservationType.OUTPUT_USE_DIFFERENT_OBSERVER_AS_INPUT"]], "observationtype (class in torch.ao.quantization.backend_config)": [[799, "torch.ao.quantization.backend_config.ObservationType"]], "convert (class in torch.ao.quantization)": [[800, "torch.ao.quantization.convert"]], "default_eval_fn (class in torch.ao.quantization)": [[801, "torch.ao.quantization.default_eval_fn"]], "fakequantize (class in torch.ao.quantization.fake_quantize)": [[802, "torch.ao.quantization.fake_quantize.FakeQuantize"]], "fakequantizebase (class in torch.ao.quantization.fake_quantize)": [[803, "torch.ao.quantization.fake_quantize.FakeQuantizeBase"]], "fixedqparamsfakequantize (class in torch.ao.quantization.fake_quantize)": [[804, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize"]], "extra_repr() (torch.ao.quantization.fake_quantize.fixedqparamsfakequantize method)": [[804, "torch.ao.quantization.fake_quantize.FixedQParamsFakeQuantize.extra_repr"]], "fusedmovingavgobsfakequantize (class in torch.ao.quantization.fake_quantize)": [[805, "torch.ao.quantization.fake_quantize.FusedMovingAvgObsFakeQuantize"]], "default_fake_quant (in module torch.ao.quantization.fake_quantize)": [[806, "torch.ao.quantization.fake_quantize.default_fake_quant"]], "default_fused_act_fake_quant (in module torch.ao.quantization.fake_quantize)": [[807, "torch.ao.quantization.fake_quantize.default_fused_act_fake_quant"]], "default_fused_per_channel_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[808, "torch.ao.quantization.fake_quantize.default_fused_per_channel_wt_fake_quant"]], "default_fused_wt_fake_quant (in module torch.ao.quantization.fake_quantize)": [[809, "torch.ao.quantization.fake_quantize.default_fused_wt_fake_quant"]], "default_histogram_fake_quant (in module torch.ao.quantization.fake_quantize)": [[810, "torch.ao.quantization.fake_quantize.default_histogram_fake_quant"]], "default_per_channel_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[811, "torch.ao.quantization.fake_quantize.default_per_channel_weight_fake_quant"]], "default_weight_fake_quant (in module torch.ao.quantization.fake_quantize)": [[812, "torch.ao.quantization.fake_quantize.default_weight_fake_quant"]], "disable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[813, "torch.ao.quantization.fake_quantize.disable_fake_quant"]], "disable_observer (class in torch.ao.quantization.fake_quantize)": [[814, "torch.ao.quantization.fake_quantize.disable_observer"]], "enable_fake_quant (class in torch.ao.quantization.fake_quantize)": [[815, "torch.ao.quantization.fake_quantize.enable_fake_quant"]], "enable_observer (class in torch.ao.quantization.fake_quantize)": [[816, "torch.ao.quantization.fake_quantize.enable_observer"]], "fuse_modules (class in torch.ao.quantization.fuse_modules)": [[817, "torch.ao.quantization.fuse_modules.fuse_modules"]], "convertcustomconfig (class in torch.ao.quantization.fx.custom_config)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig class method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.from_dict"]], "set_observed_to_quantized_mapping() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_observed_to_quantized_mapping"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.convertcustomconfig method)": [[818, "torch.ao.quantization.fx.custom_config.ConvertCustomConfig.to_dict"]], "fusecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig class method)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.from_dict"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.set_preserved_attributes"]], "to_dict() (torch.ao.quantization.fx.custom_config.fusecustomconfig method)": [[819, "torch.ao.quantization.fx.custom_config.FuseCustomConfig.to_dict"]], "preparecustomconfig (class in torch.ao.quantization.fx.custom_config)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig"]], "from_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig class method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.from_dict"]], "set_float_to_observed_mapping() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_float_to_observed_mapping"]], "set_input_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_input_quantized_indexes"]], "set_non_traceable_module_classes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_classes"]], "set_non_traceable_module_names() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_non_traceable_module_names"]], "set_output_quantized_indexes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_output_quantized_indexes"]], "set_preserved_attributes() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_preserved_attributes"]], "set_standalone_module_class() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_class"]], "set_standalone_module_name() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.set_standalone_module_name"]], "to_dict() (torch.ao.quantization.fx.custom_config.preparecustomconfig method)": [[820, "torch.ao.quantization.fx.custom_config.PrepareCustomConfig.to_dict"]], "standalonemoduleconfigentry (class in torch.ao.quantization.fx.custom_config)": [[821, "torch.ao.quantization.fx.custom_config.StandaloneModuleConfigEntry"]], "histogramobserver (class in torch.ao.quantization.observer)": [[822, "torch.ao.quantization.observer.HistogramObserver"]], "minmaxobserver (class in torch.ao.quantization.observer)": [[823, "torch.ao.quantization.observer.MinMaxObserver"]], "calculate_qparams() (torch.ao.quantization.observer.minmaxobserver method)": [[823, "torch.ao.quantization.observer.MinMaxObserver.calculate_qparams"]], "forward() (torch.ao.quantization.observer.minmaxobserver method)": [[823, "torch.ao.quantization.observer.MinMaxObserver.forward"]], "reset_min_max_vals() (torch.ao.quantization.observer.minmaxobserver method)": [[823, "torch.ao.quantization.observer.MinMaxObserver.reset_min_max_vals"]], "movingaverageminmaxobserver (class in torch.ao.quantization.observer)": [[824, "torch.ao.quantization.observer.MovingAverageMinMaxObserver"]], "movingaverageperchannelminmaxobserver (class in torch.ao.quantization.observer)": [[825, "torch.ao.quantization.observer.MovingAveragePerChannelMinMaxObserver"]], "noopobserver (class in torch.ao.quantization.observer)": [[826, "torch.ao.quantization.observer.NoopObserver"]], "observerbase (class in torch.ao.quantization.observer)": [[827, "torch.ao.quantization.observer.ObserverBase"]], "with_args() (torch.ao.quantization.observer.observerbase class method)": [[827, "torch.ao.quantization.observer.ObserverBase.with_args"]], "with_callable_args() (torch.ao.quantization.observer.observerbase class method)": [[827, "torch.ao.quantization.observer.ObserverBase.with_callable_args"]], "perchannelminmaxobserver (class in torch.ao.quantization.observer)": [[828, "torch.ao.quantization.observer.PerChannelMinMaxObserver"]], "reset_min_max_vals() (torch.ao.quantization.observer.perchannelminmaxobserver method)": [[828, "torch.ao.quantization.observer.PerChannelMinMaxObserver.reset_min_max_vals"]], "placeholderobserver (class in torch.ao.quantization.observer)": [[829, "torch.ao.quantization.observer.PlaceholderObserver"]], "recordingobserver (class in torch.ao.quantization.observer)": [[830, "torch.ao.quantization.observer.RecordingObserver"]], "default_debug_observer (in module torch.ao.quantization.observer)": [[831, "torch.ao.quantization.observer.default_debug_observer"]], "default_dynamic_quant_observer (in module torch.ao.quantization.observer)": [[832, "torch.ao.quantization.observer.default_dynamic_quant_observer"]], "default_float_qparams_observer (in module torch.ao.quantization.observer)": [[833, "torch.ao.quantization.observer.default_float_qparams_observer"]], "default_histogram_observer (in module torch.ao.quantization.observer)": [[834, "torch.ao.quantization.observer.default_histogram_observer"]], "default_observer (in module torch.ao.quantization.observer)": [[835, "torch.ao.quantization.observer.default_observer"]], "default_per_channel_weight_observer (in module torch.ao.quantization.observer)": [[836, "torch.ao.quantization.observer.default_per_channel_weight_observer"]], "default_placeholder_observer (in module torch.ao.quantization.observer)": [[837, "torch.ao.quantization.observer.default_placeholder_observer"]], "default_weight_observer (in module torch.ao.quantization.observer)": [[838, "torch.ao.quantization.observer.default_weight_observer"]], "get_observer_state_dict (class in torch.ao.quantization.observer)": [[839, "torch.ao.quantization.observer.get_observer_state_dict"]], "load_observer_state_dict (class in torch.ao.quantization.observer)": [[840, "torch.ao.quantization.observer.load_observer_state_dict"]], "prepare (class in torch.ao.quantization)": [[841, "torch.ao.quantization.prepare"]], "prepare_qat (class in torch.ao.quantization)": [[842, "torch.ao.quantization.prepare_qat"]], "propagate_qconfig_ (class in torch.ao.quantization)": [[843, "torch.ao.quantization.propagate_qconfig_"]], "model_is_exported (class in torch.ao.quantization.pt2e.export_utils)": [[844, "torch.ao.quantization.pt2e.export_utils.model_is_exported"]], "qconfig (class in torch.ao.quantization.qconfig)": [[845, "torch.ao.quantization.qconfig.QConfig"]], "default_activation_only_qconfig (in module torch.ao.quantization.qconfig)": [[846, "torch.ao.quantization.qconfig.default_activation_only_qconfig"]], "default_debug_qconfig (in module torch.ao.quantization.qconfig)": [[847, "torch.ao.quantization.qconfig.default_debug_qconfig"]], "default_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[848, "torch.ao.quantization.qconfig.default_dynamic_qconfig"]], "default_per_channel_qconfig (in module torch.ao.quantization.qconfig)": [[849, "torch.ao.quantization.qconfig.default_per_channel_qconfig"]], "default_qat_qconfig (in module torch.ao.quantization.qconfig)": [[850, "torch.ao.quantization.qconfig.default_qat_qconfig"]], "default_qat_qconfig_v2 (in module torch.ao.quantization.qconfig)": [[851, "torch.ao.quantization.qconfig.default_qat_qconfig_v2"]], "default_qconfig (in module torch.ao.quantization.qconfig)": [[852, "torch.ao.quantization.qconfig.default_qconfig"]], "default_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[853, "torch.ao.quantization.qconfig.default_weight_only_qconfig"]], "float16_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[854, "torch.ao.quantization.qconfig.float16_dynamic_qconfig"]], "float16_static_qconfig (in module torch.ao.quantization.qconfig)": [[855, "torch.ao.quantization.qconfig.float16_static_qconfig"]], "float_qparams_weight_only_qconfig (in module torch.ao.quantization.qconfig)": [[856, "torch.ao.quantization.qconfig.float_qparams_weight_only_qconfig"]], "per_channel_dynamic_qconfig (in module torch.ao.quantization.qconfig)": [[857, "torch.ao.quantization.qconfig.per_channel_dynamic_qconfig"]], "qconfigmapping (class in torch.ao.quantization.qconfig_mapping)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping"]], "from_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping class method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.from_dict"]], "set_global() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_global"]], "set_module_name() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name"]], "set_module_name_object_type_order() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_object_type_order"]], "set_module_name_regex() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_module_name_regex"]], "set_object_type() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.set_object_type"]], "to_dict() (torch.ao.quantization.qconfig_mapping.qconfigmapping method)": [[858, "torch.ao.quantization.qconfig_mapping.QConfigMapping.to_dict"]], "get_default_qat_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[859, "torch.ao.quantization.qconfig_mapping.get_default_qat_qconfig_mapping"]], "get_default_qconfig_mapping (class in torch.ao.quantization.qconfig_mapping)": [[860, "torch.ao.quantization.qconfig_mapping.get_default_qconfig_mapping"]], "quantize (class in torch.ao.quantization)": [[861, "torch.ao.quantization.quantize"]], "quantize_dynamic (class in torch.ao.quantization)": [[862, "torch.ao.quantization.quantize_dynamic"]], "convert_fx (class in torch.ao.quantization.quantize_fx)": [[863, "torch.ao.quantization.quantize_fx.convert_fx"]], "fuse_fx (class in torch.ao.quantization.quantize_fx)": [[864, "torch.ao.quantization.quantize_fx.fuse_fx"]], "prepare_fx (class in torch.ao.quantization.quantize_fx)": [[865, "torch.ao.quantization.quantize_fx.prepare_fx"]], "prepare_qat_fx (class in torch.ao.quantization.quantize_fx)": [[866, "torch.ao.quantization.quantize_fx.prepare_qat_fx"]], "quantize_qat (class in torch.ao.quantization)": [[867, "torch.ao.quantization.quantize_qat"]], "swap_module (class in torch.ao.quantization)": [[868, "torch.ao.quantization.swap_module"]], "arange() (in module torch)": [[869, "torch.arange"]], "arccos() (in module torch)": [[870, "torch.arccos"]], "arccosh() (in module torch)": [[871, "torch.arccosh"]], "arcsin() (in module torch)": [[872, "torch.arcsin"]], "arcsinh() (in module torch)": [[873, "torch.arcsinh"]], "arctan() (in module torch)": [[874, "torch.arctan"]], "arctan2() (in module torch)": [[875, "torch.arctan2"]], "arctanh() (in module torch)": [[876, "torch.arctanh"]], "are_deterministic_algorithms_enabled() (in module torch)": [[877, "torch.are_deterministic_algorithms_enabled"]], "argmax() (in module torch)": [[878, "torch.argmax"]], "argmin() (in module torch)": [[879, "torch.argmin"]], "argsort() (in module torch)": [[880, "torch.argsort"]], "argwhere() (in module torch)": [[881, "torch.argwhere"]], "as_strided() (in module torch)": [[882, "torch.as_strided"]], "as_tensor() (in module torch)": [[883, "torch.as_tensor"]], "asarray() (in module torch)": [[884, "torch.asarray"]], "asin() (in module torch)": [[885, "torch.asin"]], "asinh() (in module torch)": [[886, "torch.asinh"]], "atan() (in module torch)": [[887, "torch.atan"]], "atan2() (in module torch)": [[888, "torch.atan2"]], "atanh() (in module torch)": [[889, "torch.atanh"]], "atleast_1d() (in module torch)": [[890, "torch.atleast_1d"]], "atleast_2d() (in module torch)": [[891, "torch.atleast_2d"]], "atleast_3d() (in module torch)": [[892, "torch.atleast_3d"]], "backward() (torch.autograd.function static method)": [[893, "torch.autograd.Function.backward"]], "forward() (torch.autograd.function static method)": [[894, "torch.autograd.Function.forward"]], "jvp() (torch.autograd.function static method)": [[895, "torch.autograd.Function.jvp"]], "vmap() (torch.autograd.function static method)": [[896, "torch.autograd.Function.vmap"]], "backward() (in module torch.autograd)": [[897, "torch.autograd.backward"]], "unpackeddualtensor (class in torch.autograd.forward_ad)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor"]], "count() (torch.autograd.forward_ad.unpackeddualtensor method)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.count"]], "index() (torch.autograd.forward_ad.unpackeddualtensor method)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.index"]], "primal (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.primal"]], "tangent (torch.autograd.forward_ad.unpackeddualtensor attribute)": [[898, "torch.autograd.forward_ad.UnpackedDualTensor.tangent"]], "dual_level (class in torch.autograd.forward_ad)": [[899, "torch.autograd.forward_ad.dual_level"]], "enter_dual_level() (in module torch.autograd.forward_ad)": [[900, "torch.autograd.forward_ad.enter_dual_level"]], "exit_dual_level() (in module torch.autograd.forward_ad)": [[901, "torch.autograd.forward_ad.exit_dual_level"]], "make_dual() (in module torch.autograd.forward_ad)": [[902, "torch.autograd.forward_ad.make_dual"]], "unpack_dual() (in module torch.autograd.forward_ad)": [[903, "torch.autograd.forward_ad.unpack_dual"]], "backwardcfunction (class in torch.autograd.function)": [[904, "torch.autograd.function.BackwardCFunction"]], "apply() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.apply"]], "apply_jvp() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.apply_jvp"]], "mark_dirty() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.backwardcfunction method)": [[904, "torch.autograd.function.BackwardCFunction.set_materialize_grads"]], "mark_dirty() (torch.autograd.function.functionctx method)": [[905, "torch.autograd.function.FunctionCtx.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.functionctx method)": [[906, "torch.autograd.function.FunctionCtx.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.functionctx method)": [[907, "torch.autograd.function.FunctionCtx.save_for_backward"]], "set_materialize_grads() (torch.autograd.function.functionctx method)": [[908, "torch.autograd.function.FunctionCtx.set_materialize_grads"]], "inplacefunction (class in torch.autograd.function)": [[909, "torch.autograd.function.InplaceFunction"]], "backward() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.backward"]], "forward() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.forward"]], "jvp() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.jvp"]], "mark_dirty() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.save_for_forward"]], "set_materialize_grads() (torch.autograd.function.inplacefunction method)": [[909, "torch.autograd.function.InplaceFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.setup_context"]], "vjp() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.vjp"]], "vmap() (torch.autograd.function.inplacefunction static method)": [[909, "torch.autograd.function.InplaceFunction.vmap"]], "nestediofunction (class in torch.autograd.function)": [[910, "torch.autograd.function.NestedIOFunction"]], "backward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.backward"]], "backward_extended() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.backward_extended"]], "forward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.forward"]], "forward_extended() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.forward_extended"]], "jvp() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.jvp"]], "mark_dirty() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.mark_dirty"]], "mark_non_differentiable() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.mark_non_differentiable"]], "save_for_backward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.save_for_backward"]], "save_for_forward() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.save_for_forward"]], "saved_tensors (torch.autograd.function.nestediofunction property)": [[910, "torch.autograd.function.NestedIOFunction.saved_tensors"]], "set_materialize_grads() (torch.autograd.function.nestediofunction method)": [[910, "torch.autograd.function.NestedIOFunction.set_materialize_grads"]], "setup_context() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.setup_context"]], "vjp() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.vjp"]], "vmap() (torch.autograd.function.nestediofunction static method)": [[910, "torch.autograd.function.NestedIOFunction.vmap"]], "once_differentiable() (in module torch.autograd.function)": [[911, "torch.autograd.function.once_differentiable"]], "hessian() (in module torch.autograd.functional)": [[912, "torch.autograd.functional.hessian"]], "hvp() (in module torch.autograd.functional)": [[913, "torch.autograd.functional.hvp"]], "jacobian() (in module torch.autograd.functional)": [[914, "torch.autograd.functional.jacobian"]], "jvp() (in module torch.autograd.functional)": [[915, "torch.autograd.functional.jvp"]], "vhp() (in module torch.autograd.functional)": [[916, "torch.autograd.functional.vhp"]], "vjp() (in module torch.autograd.functional)": [[917, "torch.autograd.functional.vjp"]], "grad() (in module torch.autograd)": [[918, "torch.autograd.grad"]], "clone() (torch.autograd.grad_mode.inference_mode method)": [[919, "torch.autograd.grad_mode.inference_mode.clone"]], "inference_mode (class in torch.autograd.grad_mode)": [[919, "torch.autograd.grad_mode.inference_mode"]], "clone() (torch.autograd.grad_mode.set_grad_enabled method)": [[920, "torch.autograd.grad_mode.set_grad_enabled.clone"]], "set_grad_enabled (class in torch.autograd.grad_mode)": [[920, "torch.autograd.grad_mode.set_grad_enabled"]], "clone() (torch.autograd.grad_mode.set_multithreading_enabled method)": [[921, "torch.autograd.grad_mode.set_multithreading_enabled.clone"]], "set_multithreading_enabled (class in torch.autograd.grad_mode)": [[921, "torch.autograd.grad_mode.set_multithreading_enabled"]], "gradcheckerror": [[922, "torch.autograd.gradcheck.GradcheckError"]], "gradcheck() (in module torch.autograd.gradcheck)": [[923, "torch.autograd.gradcheck.gradcheck"]], "gradgradcheck() (in module torch.autograd.gradcheck)": [[924, "torch.autograd.gradcheck.gradgradcheck"]], "metadata() (torch.autograd.graph.node method)": [[925, "torch.autograd.graph.Node.metadata"]], "name() (torch.autograd.graph.node method)": [[926, "torch.autograd.graph.Node.name"]], "next_functions (torch.autograd.graph.node property)": [[927, "torch.autograd.graph.Node.next_functions"]], "register_hook() (torch.autograd.graph.node method)": [[928, "torch.autograd.graph.Node.register_hook"]], "register_prehook() (torch.autograd.graph.node method)": [[929, "torch.autograd.graph.Node.register_prehook"]], "increment_version() (in module torch.autograd.graph)": [[930, "torch.autograd.graph.increment_version"]], "enforceunique (class in torch.autograd.profiler)": [[931, "torch.autograd.profiler.EnforceUnique"]], "see() (torch.autograd.profiler.enforceunique method)": [[931, "torch.autograd.profiler.EnforceUnique.see"]], "kinetosteptracker (class in torch.autograd.profiler)": [[932, "torch.autograd.profiler.KinetoStepTracker"]], "current_step() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.current_step"]], "erase_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.erase_step_count"]], "increment_step() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.increment_step"]], "init_step_count() (torch.autograd.profiler.kinetosteptracker class method)": [[932, "torch.autograd.profiler.KinetoStepTracker.init_step_count"]], "load_nvprof() (in module torch.autograd.profiler)": [[933, "torch.autograd.profiler.load_nvprof"]], "parse_nvprof_trace() (in module torch.autograd.profiler)": [[934, "torch.autograd.profiler.parse_nvprof_trace"]], "export_chrome_trace() (torch.autograd.profiler.profile method)": [[935, "torch.autograd.profiler.profile.export_chrome_trace"]], "key_averages() (torch.autograd.profiler.profile method)": [[936, "torch.autograd.profiler.profile.key_averages"]], "self_cpu_time_total (torch.autograd.profiler.profile property)": [[937, "torch.autograd.profiler.profile.self_cpu_time_total"]], "total_average() (torch.autograd.profiler.profile method)": [[938, "torch.autograd.profiler.profile.total_average"]], "record_function (class in torch.autograd.profiler)": [[939, "torch.autograd.profiler.record_function"]], "interval (class in torch.autograd.profiler_util)": [[940, "torch.autograd.profiler_util.Interval"]], "elapsed_us() (torch.autograd.profiler_util.interval method)": [[940, "torch.autograd.profiler_util.Interval.elapsed_us"]], "kernel (class in torch.autograd.profiler_util)": [[941, "torch.autograd.profiler_util.Kernel"]], "count() (torch.autograd.profiler_util.kernel method)": [[941, "torch.autograd.profiler_util.Kernel.count"]], "device (torch.autograd.profiler_util.kernel attribute)": [[941, "torch.autograd.profiler_util.Kernel.device"]], "duration (torch.autograd.profiler_util.kernel attribute)": [[941, "torch.autograd.profiler_util.Kernel.duration"]], "index() (torch.autograd.profiler_util.kernel method)": [[941, "torch.autograd.profiler_util.Kernel.index"]], "name (torch.autograd.profiler_util.kernel attribute)": [[941, "torch.autograd.profiler_util.Kernel.name"]], "memrecordsacc (class in torch.autograd.profiler_util)": [[942, "torch.autograd.profiler_util.MemRecordsAcc"]], "in_interval() (torch.autograd.profiler_util.memrecordsacc method)": [[942, "torch.autograd.profiler_util.MemRecordsAcc.in_interval"]], "stringtable (class in torch.autograd.profiler_util)": [[943, "torch.autograd.profiler_util.StringTable"]], "clear() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.clear"]], "copy() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.copy"]], "default_factory (torch.autograd.profiler_util.stringtable attribute)": [[943, "torch.autograd.profiler_util.StringTable.default_factory"]], "fromkeys() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.fromkeys"]], "get() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.get"]], "items() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.items"]], "keys() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.keys"]], "pop() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.pop"]], "popitem() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.popitem"]], "setdefault() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.setdefault"]], "update() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.update"]], "values() (torch.autograd.profiler_util.stringtable method)": [[943, "torch.autograd.profiler_util.StringTable.values"]], "baddbmm() (in module torch)": [[944, "torch.baddbmm"]], "bartlett_window() (in module torch)": [[945, "torch.bartlett_window"]], "bernoulli() (in module torch)": [[946, "torch.bernoulli"]], "bincount() (in module torch)": [[947, "torch.bincount"]], "bitwise_and() (in module torch)": [[948, "torch.bitwise_and"]], "bitwise_left_shift() (in module torch)": [[949, "torch.bitwise_left_shift"]], "bitwise_not() (in module torch)": [[950, "torch.bitwise_not"]], "bitwise_or() (in module torch)": [[951, "torch.bitwise_or"]], "bitwise_right_shift() (in module torch)": [[952, "torch.bitwise_right_shift"]], "bitwise_xor() (in module torch)": [[953, "torch.bitwise_xor"]], "blackman_window() (in module torch)": [[954, "torch.blackman_window"]], "block_diag() (in module torch)": [[955, "torch.block_diag"]], "bmm() (in module torch)": [[956, "torch.bmm"]], "broadcast_shapes() (in module torch)": [[957, "torch.broadcast_shapes"]], "broadcast_tensors() (in module torch)": [[958, "torch.broadcast_tensors"]], "broadcast_to() (in module torch)": [[959, "torch.broadcast_to"]], "bucketize() (in module torch)": [[960, "torch.bucketize"]], "can_cast() (in module torch)": [[961, "torch.can_cast"]], "cartesian_prod() (in module torch)": [[962, "torch.cartesian_prod"]], "cat() (in module torch)": [[963, "torch.cat"]], "cdist() (in module torch)": [[964, "torch.cdist"]], "ceil() (in module torch)": [[965, "torch.ceil"]], "chain_matmul() (in module torch)": [[966, "torch.chain_matmul"]], "cholesky() (in module torch)": [[967, "torch.cholesky"]], "cholesky_inverse() (in module torch)": [[968, "torch.cholesky_inverse"]], "cholesky_solve() (in module torch)": [[969, "torch.cholesky_solve"]], "chunk() (in module torch)": [[970, "torch.chunk"]], "clamp() (in module torch)": [[971, "torch.clamp"]], "clip() (in module torch)": [[972, "torch.clip"]], "clone() (in module torch)": [[973, "torch.clone"]], "column_stack() (in module torch)": [[974, "torch.column_stack"]], "combinations() (in module torch)": [[975, "torch.combinations"]], "compile() (in module torch)": [[976, "torch.compile"]], "compiled_with_cxx11_abi() (in module torch)": [[977, "torch.compiled_with_cxx11_abi"]], "allow_in_graph() (in module torch.compiler)": [[978, "torch.compiler.allow_in_graph"]], "assume_constant_result() (in module torch.compiler)": [[979, "torch.compiler.assume_constant_result"]], "compile() (in module torch.compiler)": [[980, "torch.compiler.compile"]], "cudagraph_mark_step_begin() (in module torch.compiler)": [[981, "torch.compiler.cudagraph_mark_step_begin"]], "disable() (in module torch.compiler)": [[982, "torch.compiler.disable"]], "is_compiling() (in module torch.compiler)": [[983, "torch.compiler.is_compiling"]], "is_dynamo_compiling() (in module torch.compiler)": [[984, "torch.compiler.is_dynamo_compiling"]], "list_backends() (in module torch.compiler)": [[985, "torch.compiler.list_backends"]], "reset() (in module torch.compiler)": [[986, "torch.compiler.reset"]], "complex() (in module torch)": [[987, "torch.complex"]], "concat() (in module torch)": [[988, "torch.concat"]], "concatenate() (in module torch)": [[989, "torch.concatenate"]], "cond() (in module torch)": [[990, "torch.cond"]], "conj() (in module torch)": [[991, "torch.conj"]], "conj_physical() (in module torch)": [[992, "torch.conj_physical"]], "copysign() (in module torch)": [[993, "torch.copysign"]], "corrcoef() (in module torch)": [[994, "torch.corrcoef"]], "cos() (in module torch)": [[995, "torch.cos"]], "cosh() (in module torch)": [[996, "torch.cosh"]], "count_nonzero() (in module torch)": [[997, "torch.count_nonzero"]], "cov() (in module torch)": [[998, "torch.cov"]], "stream (class in torch.cpu)": [[999, "torch.cpu.Stream"]], "streamcontext (class in torch.cpu)": [[1000, "torch.cpu.StreamContext"]], "current_device() (in module torch.cpu)": [[1001, "torch.cpu.current_device"]], "current_stream() (in module torch.cpu)": [[1002, "torch.cpu.current_stream"]], "device_count() (in module torch.cpu)": [[1003, "torch.cpu.device_count"]], "is_available() (in module torch.cpu)": [[1004, "torch.cpu.is_available"]], "set_device() (in module torch.cpu)": [[1005, "torch.cpu.set_device"]], "stream() (in module torch.cpu)": [[1006, "torch.cpu.stream"]], "synchronize() (in module torch.cpu)": [[1007, "torch.cpu.synchronize"]], "cross() (in module torch)": [[1008, "torch.cross"]], "cudagraph (class in torch.cuda)": [[1009, "torch.cuda.CUDAGraph"]], "capture_begin() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.capture_begin"]], "capture_end() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.capture_end"]], "debug_dump() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.debug_dump"]], "enable_debug_mode() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.enable_debug_mode"]], "pool() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.pool"]], "replay() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.replay"]], "reset() (torch.cuda.cudagraph method)": [[1009, "torch.cuda.CUDAGraph.reset"]], "cudapluggableallocator (class in torch.cuda)": [[1010, "torch.cuda.CUDAPluggableAllocator"]], "event (class in torch.cuda)": [[1011, "torch.cuda.Event"]], "elapsed_time() (torch.cuda.event method)": [[1011, "torch.cuda.Event.elapsed_time"]], "from_ipc_handle() (torch.cuda.event class method)": [[1011, "torch.cuda.Event.from_ipc_handle"]], "ipc_handle() (torch.cuda.event method)": [[1011, "torch.cuda.Event.ipc_handle"]], "query() (torch.cuda.event method)": [[1011, "torch.cuda.Event.query"]], "record() (torch.cuda.event method)": [[1011, "torch.cuda.Event.record"]], "synchronize() (torch.cuda.event method)": [[1011, "torch.cuda.Event.synchronize"]], "wait() (torch.cuda.event method)": [[1011, "torch.cuda.Event.wait"]], "externalstream (class in torch.cuda)": [[1012, "torch.cuda.ExternalStream"]], "query() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.query"]], "record_event() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.record_event"]], "synchronize() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.synchronize"]], "wait_event() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.wait_event"]], "wait_stream() (torch.cuda.externalstream method)": [[1012, "torch.cuda.ExternalStream.wait_stream"]], "outofmemoryerror": [[1013, "torch.cuda.OutOfMemoryError"]], "stream (class in torch.cuda)": [[1014, "torch.cuda.Stream"]], "query() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.query"]], "record_event() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.record_event"]], "synchronize() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.synchronize"]], "wait_event() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.wait_event"]], "wait_stream() (torch.cuda.stream method)": [[1014, "torch.cuda.Stream.wait_stream"]], "streamcontext (class in torch.cuda)": [[1015, "torch.cuda.StreamContext"]], "caching_allocator_alloc() (in module torch.cuda)": [[1016, "torch.cuda.caching_allocator_alloc"]], "caching_allocator_delete() (in module torch.cuda)": [[1017, "torch.cuda.caching_allocator_delete"]], "can_device_access_peer() (in module torch.cuda)": [[1018, "torch.cuda.can_device_access_peer"]], "change_current_allocator() (in module torch.cuda)": [[1019, "torch.cuda.change_current_allocator"]], "clock_rate() (in module torch.cuda)": [[1020, "torch.cuda.clock_rate"]], "broadcast() (in module torch.cuda.comm)": [[1021, "torch.cuda.comm.broadcast"]], "broadcast_coalesced() (in module torch.cuda.comm)": [[1022, "torch.cuda.comm.broadcast_coalesced"]], "gather() (in module torch.cuda.comm)": [[1023, "torch.cuda.comm.gather"]], "reduce_add() (in module torch.cuda.comm)": [[1024, "torch.cuda.comm.reduce_add"]], "scatter() (in module torch.cuda.comm)": [[1025, "torch.cuda.comm.scatter"]], "current_blas_handle() (in module torch.cuda)": [[1026, "torch.cuda.current_blas_handle"]], "current_device() (in module torch.cuda)": [[1027, "torch.cuda.current_device"]], "current_stream() (in module torch.cuda)": [[1028, "torch.cuda.current_stream"]], "default_stream() (in module torch.cuda)": [[1029, "torch.cuda.default_stream"]], "device (class in torch.cuda)": [[1030, "torch.cuda.device"]], "device_count() (in module torch.cuda)": [[1031, "torch.cuda.device_count"]], "device_of (class in torch.cuda)": [[1032, "torch.cuda.device_of"]], "empty_cache() (in module torch.cuda)": [[1033, "torch.cuda.empty_cache"]], "get_allocator_backend() (in module torch.cuda)": [[1034, "torch.cuda.get_allocator_backend"]], "get_arch_list() (in module torch.cuda)": [[1035, "torch.cuda.get_arch_list"]], "get_device_capability() (in module torch.cuda)": [[1036, "torch.cuda.get_device_capability"]], "get_device_name() (in module torch.cuda)": [[1037, "torch.cuda.get_device_name"]], "get_device_properties() (in module torch.cuda)": [[1038, "torch.cuda.get_device_properties"]], "get_gencode_flags() (in module torch.cuda)": [[1039, "torch.cuda.get_gencode_flags"]], "get_rng_state() (in module torch.cuda)": [[1040, "torch.cuda.get_rng_state"]], "get_rng_state_all() (in module torch.cuda)": [[1041, "torch.cuda.get_rng_state_all"]], "get_sync_debug_mode() (in module torch.cuda)": [[1042, "torch.cuda.get_sync_debug_mode"]], "graph (class in torch.cuda)": [[1043, "torch.cuda.graph"]], "graph_pool_handle() (in module torch.cuda)": [[1044, "torch.cuda.graph_pool_handle"]], "init() (in module torch.cuda)": [[1045, "torch.cuda.init"]], "initial_seed() (in module torch.cuda)": [[1046, "torch.cuda.initial_seed"]], "ipc_collect() (in module torch.cuda)": [[1047, "torch.cuda.ipc_collect"]], "is_available() (in module torch.cuda)": [[1048, "torch.cuda.is_available"]], "is_current_stream_capturing() (in module torch.cuda)": [[1049, "torch.cuda.is_current_stream_capturing"]], "is_initialized() (in module torch.cuda)": [[1050, "torch.cuda.is_initialized"]], "_create_jit_fn() (in module torch.cuda.jiterator)": [[1051, "torch.cuda.jiterator._create_jit_fn"]], "_create_multi_output_jit_fn() (in module torch.cuda.jiterator)": [[1052, "torch.cuda.jiterator._create_multi_output_jit_fn"]], "list_gpu_processes() (in module torch.cuda)": [[1053, "torch.cuda.list_gpu_processes"]], "make_graphed_callables() (in module torch.cuda)": [[1054, "torch.cuda.make_graphed_callables"]], "manual_seed() (in module torch.cuda)": [[1055, "torch.cuda.manual_seed"]], "manual_seed_all() (in module torch.cuda)": [[1056, "torch.cuda.manual_seed_all"]], "max_memory_allocated() (in module torch.cuda)": [[1057, "torch.cuda.max_memory_allocated"]], "max_memory_cached() (in module torch.cuda)": [[1058, "torch.cuda.max_memory_cached"]], "max_memory_reserved() (in module torch.cuda)": [[1059, "torch.cuda.max_memory_reserved"]], "mem_get_info() (in module torch.cuda)": [[1060, "torch.cuda.mem_get_info"]], "memory_allocated() (in module torch.cuda)": [[1061, "torch.cuda.memory_allocated"]], "memory_cached() (in module torch.cuda)": [[1062, "torch.cuda.memory_cached"]], "memory_reserved() (in module torch.cuda)": [[1063, "torch.cuda.memory_reserved"]], "memory_snapshot() (in module torch.cuda)": [[1064, "torch.cuda.memory_snapshot"]], "memory_stats() (in module torch.cuda)": [[1065, "torch.cuda.memory_stats"]], "memory_summary() (in module torch.cuda)": [[1066, "torch.cuda.memory_summary"]], "memory_usage() (in module torch.cuda)": [[1067, "torch.cuda.memory_usage"]], "mark() (in module torch.cuda.nvtx)": [[1068, "torch.cuda.nvtx.mark"]], "range() (in module torch.cuda.nvtx)": [[1069, "torch.cuda.nvtx.range"]], "range_pop() (in module torch.cuda.nvtx)": [[1070, "torch.cuda.nvtx.range_pop"]], "range_push() (in module torch.cuda.nvtx)": [[1071, "torch.cuda.nvtx.range_push"]], "power_draw() (in module torch.cuda)": [[1072, "torch.cuda.power_draw"]], "reset_max_memory_allocated() (in module torch.cuda)": [[1073, "torch.cuda.reset_max_memory_allocated"]], "reset_max_memory_cached() (in module torch.cuda)": [[1074, "torch.cuda.reset_max_memory_cached"]], "reset_peak_memory_stats() (in module torch.cuda)": [[1075, "torch.cuda.reset_peak_memory_stats"]], "seed() (in module torch.cuda)": [[1076, "torch.cuda.seed"]], "seed_all() (in module torch.cuda)": [[1077, "torch.cuda.seed_all"]], "set_device() (in module torch.cuda)": [[1078, "torch.cuda.set_device"]], "set_per_process_memory_fraction() (in module torch.cuda)": [[1079, "torch.cuda.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.cuda)": [[1080, "torch.cuda.set_rng_state"]], "set_rng_state_all() (in module torch.cuda)": [[1081, "torch.cuda.set_rng_state_all"]], "set_stream() (in module torch.cuda)": [[1082, "torch.cuda.set_stream"]], "set_sync_debug_mode() (in module torch.cuda)": [[1083, "torch.cuda.set_sync_debug_mode"]], "stream() (in module torch.cuda)": [[1084, "torch.cuda.stream"]], "synchronize() (in module torch.cuda)": [[1085, "torch.cuda.synchronize"]], "temperature() (in module torch.cuda)": [[1086, "torch.cuda.temperature"]], "utilization() (in module torch.cuda)": [[1087, "torch.cuda.utilization"]], "cummax() (in module torch)": [[1088, "torch.cummax"]], "cummin() (in module torch)": [[1089, "torch.cummin"]], "cumprod() (in module torch)": [[1090, "torch.cumprod"]], "cumsum() (in module torch)": [[1091, "torch.cumsum"]], "cumulative_trapezoid() (in module torch)": [[1092, "torch.cumulative_trapezoid"]], "deg2rad() (in module torch)": [[1093, "torch.deg2rad"]], "dequantize() (in module torch)": [[1094, "torch.dequantize"]], "det() (in module torch)": [[1095, "torch.det"]], "diag() (in module torch)": [[1096, "torch.diag"]], "diag_embed() (in module torch)": [[1097, "torch.diag_embed"]], "diagflat() (in module torch)": [[1098, "torch.diagflat"]], "diagonal() (in module torch)": [[1099, "torch.diagonal"]], "diagonal_scatter() (in module torch)": [[1100, "torch.diagonal_scatter"]], "diff() (in module torch)": [[1101, "torch.diff"]], "digamma() (in module torch)": [[1102, "torch.digamma"]], "dist() (in module torch)": [[1103, "torch.dist"]], "div() (in module torch)": [[1104, "torch.div"]], "divide() (in module torch)": [[1105, "torch.divide"]], "dot() (in module torch)": [[1106, "torch.dot"]], "dsplit() (in module torch)": [[1107, "torch.dsplit"]], "dstack() (in module torch)": [[1108, "torch.dstack"]], "einsum() (in module torch)": [[1109, "torch.einsum"]], "empty() (in module torch)": [[1110, "torch.empty"]], "empty_like() (in module torch)": [[1111, "torch.empty_like"]], "empty_strided() (in module torch)": [[1112, "torch.empty_strided"]], "enable_grad (class in torch)": [[1113, "torch.enable_grad"]], "eq() (in module torch)": [[1114, "torch.eq"]], "equal() (in module torch)": [[1115, "torch.equal"]], "erf() (in module torch)": [[1116, "torch.erf"]], "erfc() (in module torch)": [[1117, "torch.erfc"]], "erfinv() (in module torch)": [[1118, "torch.erfinv"]], "exp() (in module torch)": [[1119, "torch.exp"]], "exp2() (in module torch)": [[1120, "torch.exp2"]], "expm1() (in module torch)": [[1121, "torch.expm1"]], "eye() (in module torch)": [[1122, "torch.eye"]], "fake_quantize_per_channel_affine() (in module torch)": [[1123, "torch.fake_quantize_per_channel_affine"]], "fake_quantize_per_tensor_affine() (in module torch)": [[1124, "torch.fake_quantize_per_tensor_affine"]], "fft() (in module torch.fft)": [[1125, "torch.fft.fft"]], "fft2() (in module torch.fft)": [[1126, "torch.fft.fft2"]], "fftfreq() (in module torch.fft)": [[1127, "torch.fft.fftfreq"]], "fftn() (in module torch.fft)": [[1128, "torch.fft.fftn"]], "fftshift() (in module torch.fft)": [[1129, "torch.fft.fftshift"]], "hfft() (in module torch.fft)": [[1130, "torch.fft.hfft"]], "hfft2() (in module torch.fft)": [[1131, "torch.fft.hfft2"]], "hfftn() (in module torch.fft)": [[1132, "torch.fft.hfftn"]], "ifft() (in module torch.fft)": [[1133, "torch.fft.ifft"]], "ifft2() (in module torch.fft)": [[1134, "torch.fft.ifft2"]], "ifftn() (in module torch.fft)": [[1135, "torch.fft.ifftn"]], "ifftshift() (in module torch.fft)": [[1136, "torch.fft.ifftshift"]], "ihfft() (in module torch.fft)": [[1137, "torch.fft.ihfft"]], "ihfft2() (in module torch.fft)": [[1138, "torch.fft.ihfft2"]], "ihfftn() (in module torch.fft)": [[1139, "torch.fft.ihfftn"]], "irfft() (in module torch.fft)": [[1140, "torch.fft.irfft"]], "irfft2() (in module torch.fft)": [[1141, "torch.fft.irfft2"]], "irfftn() (in module torch.fft)": [[1142, "torch.fft.irfftn"]], "rfft() (in module torch.fft)": [[1143, "torch.fft.rfft"]], "rfft2() (in module torch.fft)": [[1144, "torch.fft.rfft2"]], "rfftfreq() (in module torch.fft)": [[1145, "torch.fft.rfftfreq"]], "rfftn() (in module torch.fft)": [[1146, "torch.fft.rfftn"]], "fix() (in module torch)": [[1147, "torch.fix"]], "flatten() (in module torch)": [[1148, "torch.flatten"]], "flip() (in module torch)": [[1149, "torch.flip"]], "fliplr() (in module torch)": [[1150, "torch.fliplr"]], "flipud() (in module torch)": [[1151, "torch.flipud"]], "float_power() (in module torch)": [[1152, "torch.float_power"]], "floor() (in module torch)": [[1153, "torch.floor"]], "floor_divide() (in module torch)": [[1154, "torch.floor_divide"]], "fmax() (in module torch)": [[1155, "torch.fmax"]], "fmin() (in module torch)": [[1156, "torch.fmin"]], "fmod() (in module torch)": [[1157, "torch.fmod"]], "frac() (in module torch)": [[1158, "torch.frac"]], "frexp() (in module torch)": [[1159, "torch.frexp"]], "from_dlpack() (in module torch)": [[1160, "torch.from_dlpack"]], "from_file() (in module torch)": [[1161, "torch.from_file"]], "from_numpy() (in module torch)": [[1162, "torch.from_numpy"]], "frombuffer() (in module torch)": [[1163, "torch.frombuffer"]], "full() (in module torch)": [[1164, "torch.full"]], "full_like() (in module torch)": [[1165, "torch.full_like"]], "functional_call() (in module torch.func)": [[1166, "torch.func.functional_call"]], "functionalize() (in module torch.func)": [[1167, "torch.func.functionalize"]], "grad() (in module torch.func)": [[1168, "torch.func.grad"]], "grad_and_value() (in module torch.func)": [[1169, "torch.func.grad_and_value"]], "hessian() (in module torch.func)": [[1170, "torch.func.hessian"]], "jacfwd() (in module torch.func)": [[1171, "torch.func.jacfwd"]], "jacrev() (in module torch.func)": [[1172, "torch.func.jacrev"]], "jvp() (in module torch.func)": [[1173, "torch.func.jvp"]], "linearize() (in module torch.func)": [[1174, "torch.func.linearize"]], "replace_all_batch_norm_modules_() (in module torch.func)": [[1175, "torch.func.replace_all_batch_norm_modules_"]], "stack_module_state() (in module torch.func)": [[1176, "torch.func.stack_module_state"]], "vjp() (in module torch.func)": [[1177, "torch.func.vjp"]], "vmap() (in module torch.func)": [[1178, "torch.func.vmap"]], "callmethodkey (class in torch.fx.experimental.symbolic_shapes)": [[1179, "torch.fx.experimental.symbolic_shapes.CallMethodKey"]], "get() (torch.fx.experimental.symbolic_shapes.callmethodkey method)": [[1179, "torch.fx.experimental.symbolic_shapes.CallMethodKey.get"]], "convertintkey (class in torch.fx.experimental.symbolic_shapes)": [[1180, "torch.fx.experimental.symbolic_shapes.ConvertIntKey"]], "get() (torch.fx.experimental.symbolic_shapes.convertintkey method)": [[1180, "torch.fx.experimental.symbolic_shapes.ConvertIntKey.get"]], "dimconstraints (class in torch.fx.experimental.symbolic_shapes)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints"]], "add() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.add"]], "add_equality() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.add_equality"]], "forced_specializations() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.forced_specializations"]], "prettify_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.prettify_results"]], "remove_redundant_dynamic_results() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.remove_redundant_dynamic_results"]], "rewrite_with_congruences() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.rewrite_with_congruences"]], "solve() (torch.fx.experimental.symbolic_shapes.dimconstraints method)": [[1181, "torch.fx.experimental.symbolic_shapes.DimConstraints.solve"]], "dimdynamic (class in torch.fx.experimental.symbolic_shapes)": [[1182, "torch.fx.experimental.symbolic_shapes.DimDynamic"]], "dividebykey (class in torch.fx.experimental.symbolic_shapes)": [[1183, "torch.fx.experimental.symbolic_shapes.DivideByKey"]], "get() (torch.fx.experimental.symbolic_shapes.dividebykey method)": [[1183, "torch.fx.experimental.symbolic_shapes.DivideByKey.get"]], "equalityconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1184, "torch.fx.experimental.symbolic_shapes.EqualityConstraint"]], "innertensorkey (class in torch.fx.experimental.symbolic_shapes)": [[1185, "torch.fx.experimental.symbolic_shapes.InnerTensorKey"]], "get() (torch.fx.experimental.symbolic_shapes.innertensorkey method)": [[1185, "torch.fx.experimental.symbolic_shapes.InnerTensorKey.get"]], "propagateunbackedsymints (class in torch.fx.experimental.symbolic_shapes)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts"]], "boxed_run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.boxed_run"]], "call_function() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_function"]], "call_method() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_method"]], "call_module() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.call_module"]], "fetch_args_kwargs_from_env() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_args_kwargs_from_env"]], "fetch_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.fetch_attr"]], "get_attr() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.get_attr"]], "map_nodes_to_values() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.map_nodes_to_values"]], "output() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.output"]], "placeholder() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.placeholder"]], "run() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run"]], "run_node() (torch.fx.experimental.symbolic_shapes.propagateunbackedsymints method)": [[1186, "torch.fx.experimental.symbolic_shapes.PropagateUnbackedSymInts.run_node"]], "relaxedunspecconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1187, "torch.fx.experimental.symbolic_shapes.RelaxedUnspecConstraint"]], "shapeenv (class in torch.fx.experimental.symbolic_shapes)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv"]], "add_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.add_var_to_val"]], "bind_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bind_symbols"]], "bound_sympy() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.bound_sympy"]], "check_equal() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.check_equal"]], "cleanup() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.cleanup"]], "create_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbol"]], "create_symbolic_sizes_strides_storage_offset() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symbolic_sizes_strides_storage_offset"]], "create_symboolnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symboolnode"]], "create_symfloatnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symfloatnode"]], "create_symintnode() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_symintnode"]], "create_unbacked_symbool() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symbool"]], "create_unbacked_symfloat() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symfloat"]], "create_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unbacked_symint"]], "create_unspecified_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symbol"]], "create_unspecified_symint_and_symbol() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.create_unspecified_symint_and_symbol"]], "defer_runtime_assert() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.defer_runtime_assert"]], "evaluate_expr() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_expr"]], "evaluate_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_expression"]], "evaluate_guards_for_args() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.evaluate_guards_for_args"]], "format_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.format_guards"]], "freeze() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze"]], "freeze_runtime_asserts() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.freeze_runtime_asserts"]], "get_axioms() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_axioms"]], "get_implications() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_implications"]], "get_nontrivial_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_nontrivial_guards"]], "get_pruned_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.get_pruned_guards"]], "ignore_fresh_unbacked_symbols() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.ignore_fresh_unbacked_symbols"]], "is_unbacked_symint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.is_unbacked_symint"]], "produce_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards"]], "produce_guards_expression() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.produce_guards_expression"]], "replace() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.replace"]], "set_unbacked_var_to_val() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.set_unbacked_var_to_val"]], "simplify() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.simplify"]], "size_hint() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.size_hint"]], "suppress_guards() (torch.fx.experimental.symbolic_shapes.shapeenv method)": [[1188, "torch.fx.experimental.symbolic_shapes.ShapeEnv.suppress_guards"]], "shapeenvsettings (class in torch.fx.experimental.symbolic_shapes)": [[1189, "torch.fx.experimental.symbolic_shapes.ShapeEnvSettings"]], "statefulsymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1190, "torch.fx.experimental.symbolic_shapes.StatefulSymbolicContext"]], "statelesssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1191, "torch.fx.experimental.symbolic_shapes.StatelessSymbolicContext"]], "strictminmaxconstraint (class in torch.fx.experimental.symbolic_shapes)": [[1192, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint"]], "render() (torch.fx.experimental.symbolic_shapes.strictminmaxconstraint method)": [[1192, "torch.fx.experimental.symbolic_shapes.StrictMinMaxConstraint.render"]], "subclasssymboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1193, "torch.fx.experimental.symbolic_shapes.SubclassSymbolicContext"]], "symboliccontext (class in torch.fx.experimental.symbolic_shapes)": [[1194, "torch.fx.experimental.symbolic_shapes.SymbolicContext"]], "canonicalize_bool_expr() (in module torch.fx.experimental.symbolic_shapes)": [[1195, "torch.fx.experimental.symbolic_shapes.canonicalize_bool_expr"]], "check_consistent() (in module torch.fx.experimental.symbolic_shapes)": [[1196, "torch.fx.experimental.symbolic_shapes.check_consistent"]], "compute_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1197, "torch.fx.experimental.symbolic_shapes.compute_unbacked_bindings"]], "constrain_range() (in module torch.fx.experimental.symbolic_shapes)": [[1198, "torch.fx.experimental.symbolic_shapes.constrain_range"]], "constrain_unify() (in module torch.fx.experimental.symbolic_shapes)": [[1199, "torch.fx.experimental.symbolic_shapes.constrain_unify"]], "definitely_false() (in module torch.fx.experimental.symbolic_shapes)": [[1200, "torch.fx.experimental.symbolic_shapes.definitely_false"]], "definitely_true() (in module torch.fx.experimental.symbolic_shapes)": [[1201, "torch.fx.experimental.symbolic_shapes.definitely_true"]], "guard_size_oblivious() (in module torch.fx.experimental.symbolic_shapes)": [[1202, "torch.fx.experimental.symbolic_shapes.guard_size_oblivious"]], "has_free_symbols() (in module torch.fx.experimental.symbolic_shapes)": [[1203, "torch.fx.experimental.symbolic_shapes.has_free_symbols"]], "hint_int() (in module torch.fx.experimental.symbolic_shapes)": [[1204, "torch.fx.experimental.symbolic_shapes.hint_int"]], "is_concrete_bool() (in module torch.fx.experimental.symbolic_shapes)": [[1205, "torch.fx.experimental.symbolic_shapes.is_concrete_bool"]], "is_concrete_int() (in module torch.fx.experimental.symbolic_shapes)": [[1206, "torch.fx.experimental.symbolic_shapes.is_concrete_int"]], "lru_cache() (in module torch.fx.experimental.symbolic_shapes)": [[1207, "torch.fx.experimental.symbolic_shapes.lru_cache"]], "parallel_and() (in module torch.fx.experimental.symbolic_shapes)": [[1208, "torch.fx.experimental.symbolic_shapes.parallel_and"]], "parallel_or() (in module torch.fx.experimental.symbolic_shapes)": [[1209, "torch.fx.experimental.symbolic_shapes.parallel_or"]], "rebind_unbacked() (in module torch.fx.experimental.symbolic_shapes)": [[1210, "torch.fx.experimental.symbolic_shapes.rebind_unbacked"]], "resolve_unbacked_bindings() (in module torch.fx.experimental.symbolic_shapes)": [[1211, "torch.fx.experimental.symbolic_shapes.resolve_unbacked_bindings"]], "statically_known_true() (in module torch.fx.experimental.symbolic_shapes)": [[1212, "torch.fx.experimental.symbolic_shapes.statically_known_true"]], "sym_eq() (in module torch.fx.experimental.symbolic_shapes)": [[1213, "torch.fx.experimental.symbolic_shapes.sym_eq"]], "gather() (in module torch)": [[1214, "torch.gather"]], "gcd() (in module torch)": [[1215, "torch.gcd"]], "ge() (in module torch)": [[1216, "torch.ge"]], "geqrf() (in module torch)": [[1217, "torch.geqrf"]], "ger() (in module torch)": [[1218, "torch.ger"]], "get_default_device() (in module torch)": [[1219, "torch.get_default_device"]], "get_default_dtype() (in module torch)": [[1220, "torch.get_default_dtype"]], "get_deterministic_debug_mode() (in module torch)": [[1221, "torch.get_deterministic_debug_mode"]], "get_device_module() (in module torch)": [[1222, "torch.get_device_module"]], "get_float32_matmul_precision() (in module torch)": [[1223, "torch.get_float32_matmul_precision"]], "get_num_interop_threads() (in module torch)": [[1224, "torch.get_num_interop_threads"]], "get_num_threads() (in module torch)": [[1225, "torch.get_num_threads"]], "get_rng_state() (in module torch)": [[1226, "torch.get_rng_state"]], "gradient() (in module torch)": [[1227, "torch.gradient"]], "greater() (in module torch)": [[1228, "torch.greater"]], "greater_equal() (in module torch)": [[1229, "torch.greater_equal"]], "gt() (in module torch)": [[1230, "torch.gt"]], "hamming_window() (in module torch)": [[1231, "torch.hamming_window"]], "hann_window() (in module torch)": [[1232, "torch.hann_window"]], "heaviside() (in module torch)": [[1233, "torch.heaviside"]], "histc() (in module torch)": [[1234, "torch.histc"]], "histogram() (in module torch)": [[1235, "torch.histogram"]], "histogramdd() (in module torch)": [[1236, "torch.histogramdd"]], "hsplit() (in module torch)": [[1237, "torch.hsplit"]], "hspmm() (in module torch)": [[1238, "torch.hspmm"]], "hstack() (in module torch)": [[1239, "torch.hstack"]], "hypot() (in module torch)": [[1240, "torch.hypot"]], "i0() (in module torch)": [[1241, "torch.i0"]], "igamma() (in module torch)": [[1242, "torch.igamma"]], "igammac() (in module torch)": [[1243, "torch.igammac"]], "imag() (in module torch)": [[1244, "torch.imag"]], "index_add() (in module torch)": [[1245, "torch.index_add"]], "index_copy() (in module torch)": [[1246, "torch.index_copy"]], "index_reduce() (in module torch)": [[1247, "torch.index_reduce"]], "index_select() (in module torch)": [[1248, "torch.index_select"]], "initial_seed() (in module torch)": [[1249, "torch.initial_seed"]], "inner() (in module torch)": [[1250, "torch.inner"]], "inverse() (in module torch)": [[1251, "torch.inverse"]], "is_complex() (in module torch)": [[1252, "torch.is_complex"]], "is_conj() (in module torch)": [[1253, "torch.is_conj"]], "is_deterministic_algorithms_warn_only_enabled() (in module torch)": [[1254, "torch.is_deterministic_algorithms_warn_only_enabled"]], "is_floating_point() (in module torch)": [[1255, "torch.is_floating_point"]], "is_grad_enabled() (in module torch)": [[1256, "torch.is_grad_enabled"]], "is_inference_mode_enabled() (in module torch)": [[1257, "torch.is_inference_mode_enabled"]], "is_nonzero() (in module torch)": [[1258, "torch.is_nonzero"]], "is_storage() (in module torch)": [[1259, "torch.is_storage"]], "is_tensor() (in module torch)": [[1260, "torch.is_tensor"]], "is_warn_always_enabled() (in module torch)": [[1261, "torch.is_warn_always_enabled"]], "isclose() (in module torch)": [[1262, "torch.isclose"]], "isfinite() (in module torch)": [[1263, "torch.isfinite"]], "isin() (in module torch)": [[1264, "torch.isin"]], "isinf() (in module torch)": [[1265, "torch.isinf"]], "isnan() (in module torch)": [[1266, "torch.isnan"]], "isneginf() (in module torch)": [[1267, "torch.isneginf"]], "isposinf() (in module torch)": [[1268, "torch.isposinf"]], "isreal() (in module torch)": [[1269, "torch.isreal"]], "istft() (in module torch)": [[1270, "torch.istft"]], "attribute (class in torch.jit)": [[1271, "torch.jit.Attribute"]], "count() (torch.jit.attribute method)": [[1271, "torch.jit.Attribute.count"]], "index() (torch.jit.attribute method)": [[1271, "torch.jit.Attribute.index"]], "type (torch.jit.attribute attribute)": [[1271, "torch.jit.Attribute.type"]], "value (torch.jit.attribute attribute)": [[1271, "torch.jit.Attribute.value"]], "scriptfunction (class in torch.jit)": [[1272, "torch.jit.ScriptFunction"]], "get_debug_state() (torch.jit.scriptfunction method)": [[1272, "torch.jit.ScriptFunction.get_debug_state"]], "save() (torch.jit.scriptfunction method)": [[1272, "torch.jit.ScriptFunction.save"]], "save_to_buffer() (torch.jit.scriptfunction method)": [[1272, "torch.jit.ScriptFunction.save_to_buffer"]], "scriptmodule (class in torch.jit)": [[1273, "torch.jit.ScriptModule"]], "add_module() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.add_module"]], "apply() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.apply"]], "bfloat16() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.bfloat16"]], "buffers() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.buffers"]], "children() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.children"]], "code (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.code"]], "code_with_constants (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.code_with_constants"]], "compile() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.compile"]], "cpu() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.cpu"]], "cuda() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.cuda"]], "double() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.double"]], "eval() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.eval"]], "extra_repr() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.extra_repr"]], "float() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.float"]], "get_buffer() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_buffer"]], "get_extra_state() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_extra_state"]], "get_parameter() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_parameter"]], "get_submodule() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.get_submodule"]], "graph (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.graph"]], "half() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.half"]], "inlined_graph (torch.jit.scriptmodule property)": [[1273, "torch.jit.ScriptModule.inlined_graph"]], "ipu() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.ipu"]], "load_state_dict() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.load_state_dict"]], "modules() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.modules"]], "named_buffers() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_buffers"]], "named_children() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_children"]], "named_modules() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_modules"]], "named_parameters() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.named_parameters"]], "parameters() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.parameters"]], "register_backward_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_backward_hook"]], "register_buffer() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_buffer"]], "register_forward_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_forward_hook"]], "register_forward_pre_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_forward_pre_hook"]], "register_full_backward_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_load_state_dict_post_hook"]], "register_module() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_module"]], "register_parameter() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_parameter"]], "register_state_dict_pre_hook() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.register_state_dict_pre_hook"]], "requires_grad_() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.requires_grad_"]], "save() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.save"]], "set_extra_state() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.set_extra_state"]], "share_memory() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.share_memory"]], "state_dict() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.state_dict"]], "to() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.to"]], "to_empty() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.to_empty"]], "train() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.train"]], "type() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.type"]], "xpu() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.xpu"]], "zero_grad() (torch.jit.scriptmodule method)": [[1273, "torch.jit.ScriptModule.zero_grad"]], "annotate() (in module torch.jit)": [[1274, "torch.jit.annotate"]], "enable_onednn_fusion() (in module torch.jit)": [[1275, "torch.jit.enable_onednn_fusion"]], "fork() (in module torch.jit)": [[1276, "torch.jit.fork"]], "freeze() (in module torch.jit)": [[1277, "torch.jit.freeze"]], "ignore() (in module torch.jit)": [[1278, "torch.jit.ignore"]], "interface() (in module torch.jit)": [[1279, "torch.jit.interface"]], "isinstance() (in module torch.jit)": [[1280, "torch.jit.isinstance"]], "load() (in module torch.jit)": [[1281, "torch.jit.load"]], "onednn_fusion_enabled() (in module torch.jit)": [[1282, "torch.jit.onednn_fusion_enabled"]], "optimize_for_inference() (in module torch.jit)": [[1283, "torch.jit.optimize_for_inference"]], "save() (in module torch.jit)": [[1284, "torch.jit.save"]], "script() (in module torch.jit)": [[1285, "torch.jit.script"]], "script_if_tracing() (in module torch.jit)": [[1286, "torch.jit.script_if_tracing"]], "set_fusion_strategy() (in module torch.jit)": [[1287, "torch.jit.set_fusion_strategy"]], "strict_fusion (class in torch.jit)": [[1288, "torch.jit.strict_fusion"]], "trace() (in module torch.jit)": [[1289, "torch.jit.trace"]], "trace_module() (in module torch.jit)": [[1290, "torch.jit.trace_module"]], "unused() (in module torch.jit)": [[1291, "torch.jit.unused"]], "wait() (in module torch.jit)": [[1292, "torch.jit.wait"]], "kaiser_window() (in module torch)": [[1293, "torch.kaiser_window"]], "kron() (in module torch)": [[1294, "torch.kron"]], "kthvalue() (in module torch)": [[1295, "torch.kthvalue"]], "lcm() (in module torch)": [[1296, "torch.lcm"]], "ldexp() (in module torch)": [[1297, "torch.ldexp"]], "le() (in module torch)": [[1298, "torch.le"]], "lerp() (in module torch)": [[1299, "torch.lerp"]], "less() (in module torch)": [[1300, "torch.less"]], "less_equal() (in module torch)": [[1301, "torch.less_equal"]], "lgamma() (in module torch)": [[1302, "torch.lgamma"]], "cholesky() (in module torch.linalg)": [[1303, "torch.linalg.cholesky"]], "cholesky_ex() (in module torch.linalg)": [[1304, "torch.linalg.cholesky_ex"]], "cond() (in module torch.linalg)": [[1305, "torch.linalg.cond"]], "cross() (in module torch.linalg)": [[1306, "torch.linalg.cross"]], "det() (in module torch.linalg)": [[1307, "torch.linalg.det"]], "diagonal() (in module torch.linalg)": [[1308, "torch.linalg.diagonal"]], "eig() (in module torch.linalg)": [[1309, "torch.linalg.eig"]], "eigh() (in module torch.linalg)": [[1310, "torch.linalg.eigh"]], "eigvals() (in module torch.linalg)": [[1311, "torch.linalg.eigvals"]], "eigvalsh() (in module torch.linalg)": [[1312, "torch.linalg.eigvalsh"]], "householder_product() (in module torch.linalg)": [[1313, "torch.linalg.householder_product"]], "inv() (in module torch.linalg)": [[1314, "torch.linalg.inv"]], "inv_ex() (in module torch.linalg)": [[1315, "torch.linalg.inv_ex"]], "ldl_factor() (in module torch.linalg)": [[1316, "torch.linalg.ldl_factor"]], "ldl_factor_ex() (in module torch.linalg)": [[1317, "torch.linalg.ldl_factor_ex"]], "ldl_solve() (in module torch.linalg)": [[1318, "torch.linalg.ldl_solve"]], "lstsq() (in module torch.linalg)": [[1319, "torch.linalg.lstsq"]], "lu() (in module torch.linalg)": [[1320, "torch.linalg.lu"]], "lu_factor() (in module torch.linalg)": [[1321, "torch.linalg.lu_factor"]], "lu_factor_ex() (in module torch.linalg)": [[1322, "torch.linalg.lu_factor_ex"]], "lu_solve() (in module torch.linalg)": [[1323, "torch.linalg.lu_solve"]], "matmul() (in module torch.linalg)": [[1324, "torch.linalg.matmul"]], "matrix_exp() (in module torch.linalg)": [[1325, "torch.linalg.matrix_exp"]], "matrix_norm() (in module torch.linalg)": [[1326, "torch.linalg.matrix_norm"]], "matrix_power() (in module torch.linalg)": [[1327, "torch.linalg.matrix_power"]], "matrix_rank() (in module torch.linalg)": [[1328, "torch.linalg.matrix_rank"]], "multi_dot() (in module torch.linalg)": [[1329, "torch.linalg.multi_dot"]], "norm() (in module torch.linalg)": [[1330, "torch.linalg.norm"]], "pinv() (in module torch.linalg)": [[1331, "torch.linalg.pinv"]], "qr() (in module torch.linalg)": [[1332, "torch.linalg.qr"]], "slogdet() (in module torch.linalg)": [[1333, "torch.linalg.slogdet"]], "solve() (in module torch.linalg)": [[1334, "torch.linalg.solve"]], "solve_ex() (in module torch.linalg)": [[1335, "torch.linalg.solve_ex"]], "solve_triangular() (in module torch.linalg)": [[1336, "torch.linalg.solve_triangular"]], "svd() (in module torch.linalg)": [[1337, "torch.linalg.svd"]], "svdvals() (in module torch.linalg)": [[1338, "torch.linalg.svdvals"]], "tensorinv() (in module torch.linalg)": [[1339, "torch.linalg.tensorinv"]], "tensorsolve() (in module torch.linalg)": [[1340, "torch.linalg.tensorsolve"]], "vander() (in module torch.linalg)": [[1341, "torch.linalg.vander"]], "vecdot() (in module torch.linalg)": [[1342, "torch.linalg.vecdot"]], "vector_norm() (in module torch.linalg)": [[1343, "torch.linalg.vector_norm"]], "linspace() (in module torch)": [[1344, "torch.linspace"]], "load() (in module torch)": [[1345, "torch.load"]], "lobpcg() (in module torch)": [[1346, "torch.lobpcg"]], "log() (in module torch)": [[1347, "torch.log"]], "log10() (in module torch)": [[1348, "torch.log10"]], "log1p() (in module torch)": [[1349, "torch.log1p"]], "log2() (in module torch)": [[1350, "torch.log2"]], "logaddexp() (in module torch)": [[1351, "torch.logaddexp"]], "logaddexp2() (in module torch)": [[1352, "torch.logaddexp2"]], "logcumsumexp() (in module torch)": [[1353, "torch.logcumsumexp"]], "logdet() (in module torch)": [[1354, "torch.logdet"]], "logical_and() (in module torch)": [[1355, "torch.logical_and"]], "logical_not() (in module torch)": [[1356, "torch.logical_not"]], "logical_or() (in module torch)": [[1357, "torch.logical_or"]], "logical_xor() (in module torch)": [[1358, "torch.logical_xor"]], "logit() (in module torch)": [[1359, "torch.logit"]], "logspace() (in module torch)": [[1360, "torch.logspace"]], "logsumexp() (in module torch)": [[1361, "torch.logsumexp"]], "lt() (in module torch)": [[1362, "torch.lt"]], "lu() (in module torch)": [[1363, "torch.lu"]], "lu_solve() (in module torch)": [[1364, "torch.lu_solve"]], "lu_unpack() (in module torch)": [[1365, "torch.lu_unpack"]], "manual_seed() (in module torch)": [[1366, "torch.manual_seed"]], "masked_select() (in module torch)": [[1367, "torch.masked_select"]], "matmul() (in module torch)": [[1368, "torch.matmul"]], "matrix_exp() (in module torch)": [[1369, "torch.matrix_exp"]], "matrix_power() (in module torch)": [[1370, "torch.matrix_power"]], "max() (in module torch)": [[1371, "torch.max"]], "maximum() (in module torch)": [[1372, "torch.maximum"]], "mean() (in module torch)": [[1373, "torch.mean"]], "median() (in module torch)": [[1374, "torch.median"]], "meshgrid() (in module torch)": [[1375, "torch.meshgrid"]], "min() (in module torch)": [[1376, "torch.min"]], "minimum() (in module torch)": [[1377, "torch.minimum"]], "mm() (in module torch)": [[1378, "torch.mm"]], "mode() (in module torch)": [[1379, "torch.mode"]], "moveaxis() (in module torch)": [[1380, "torch.moveaxis"]], "movedim() (in module torch)": [[1381, "torch.movedim"]], "current_allocated_memory() (in module torch.mps)": [[1382, "torch.mps.current_allocated_memory"]], "device_count() (in module torch.mps)": [[1383, "torch.mps.device_count"]], "driver_allocated_memory() (in module torch.mps)": [[1384, "torch.mps.driver_allocated_memory"]], "empty_cache() (in module torch.mps)": [[1385, "torch.mps.empty_cache"]], "event (class in torch.mps.event)": [[1386, "torch.mps.event.Event"]], "elapsed_time() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.elapsed_time"]], "query() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.query"]], "record() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.record"]], "synchronize() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.synchronize"]], "wait() (torch.mps.event.event method)": [[1386, "torch.mps.event.Event.wait"]], "get_rng_state() (in module torch.mps)": [[1387, "torch.mps.get_rng_state"]], "manual_seed() (in module torch.mps)": [[1388, "torch.mps.manual_seed"]], "profile() (in module torch.mps.profiler)": [[1389, "torch.mps.profiler.profile"]], "start() (in module torch.mps.profiler)": [[1390, "torch.mps.profiler.start"]], "stop() (in module torch.mps.profiler)": [[1391, "torch.mps.profiler.stop"]], "seed() (in module torch.mps)": [[1392, "torch.mps.seed"]], "set_per_process_memory_fraction() (in module torch.mps)": [[1393, "torch.mps.set_per_process_memory_fraction"]], "set_rng_state() (in module torch.mps)": [[1394, "torch.mps.set_rng_state"]], "synchronize() (in module torch.mps)": [[1395, "torch.mps.synchronize"]], "msort() (in module torch)": [[1396, "torch.msort"]], "deferredmtiacallerror": [[1397, "torch.mtia.DeferredMtiaCallError"]], "event (class in torch.mtia)": [[1398, "torch.mtia.Event"]], "stream (class in torch.mtia)": [[1399, "torch.mtia.Stream"]], "streamcontext (class in torch.mtia)": [[1400, "torch.mtia.StreamContext"]], "current_device() (in module torch.mtia)": [[1401, "torch.mtia.current_device"]], "current_stream() (in module torch.mtia)": [[1402, "torch.mtia.current_stream"]], "default_stream() (in module torch.mtia)": [[1403, "torch.mtia.default_stream"]], "device (class in torch.mtia)": [[1404, "torch.mtia.device"]], "device_count() (in module torch.mtia)": [[1405, "torch.mtia.device_count"]], "init() (in module torch.mtia)": [[1406, "torch.mtia.init"]], "is_available() (in module torch.mtia)": [[1407, "torch.mtia.is_available"]], "is_initialized() (in module torch.mtia)": [[1408, "torch.mtia.is_initialized"]], "set_stream() (in module torch.mtia)": [[1409, "torch.mtia.set_stream"]], "stream() (in module torch.mtia)": [[1410, "torch.mtia.stream"]], "synchronize() (in module torch.mtia)": [[1411, "torch.mtia.synchronize"]], "mul() (in module torch)": [[1412, "torch.mul"]], "multinomial() (in module torch)": [[1413, "torch.multinomial"]], "multiply() (in module torch)": [[1414, "torch.multiply"]], "mv() (in module torch)": [[1415, "torch.mv"]], "mvlgamma() (in module torch)": [[1416, "torch.mvlgamma"]], "nan_to_num() (in module torch)": [[1417, "torch.nan_to_num"]], "nanmean() (in module torch)": [[1418, "torch.nanmean"]], "nanmedian() (in module torch)": [[1419, "torch.nanmedian"]], "nanquantile() (in module torch)": [[1420, "torch.nanquantile"]], "nansum() (in module torch)": [[1421, "torch.nansum"]], "narrow() (in module torch)": [[1422, "torch.narrow"]], "narrow_copy() (in module torch)": [[1423, "torch.narrow_copy"]], "ne() (in module torch)": [[1424, "torch.ne"]], "neg() (in module torch)": [[1425, "torch.neg"]], "negative() (in module torch)": [[1426, "torch.negative"]], "nextafter() (in module torch)": [[1427, "torch.nextafter"]], "adaptiveavgpool1d (class in torch.nn)": [[1428, "torch.nn.AdaptiveAvgPool1d"]], "adaptiveavgpool2d (class in torch.nn)": [[1429, "torch.nn.AdaptiveAvgPool2d"]], "adaptiveavgpool3d (class in torch.nn)": [[1430, "torch.nn.AdaptiveAvgPool3d"]], "adaptivelogsoftmaxwithloss (class in torch.nn)": [[1431, "torch.nn.AdaptiveLogSoftmaxWithLoss"]], "log_prob() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1431, "torch.nn.AdaptiveLogSoftmaxWithLoss.log_prob"]], "predict() (torch.nn.adaptivelogsoftmaxwithloss method)": [[1431, "torch.nn.AdaptiveLogSoftmaxWithLoss.predict"]], "adaptivemaxpool1d (class in torch.nn)": [[1432, "torch.nn.AdaptiveMaxPool1d"]], "adaptivemaxpool2d (class in torch.nn)": [[1433, "torch.nn.AdaptiveMaxPool2d"]], "adaptivemaxpool3d (class in torch.nn)": [[1434, "torch.nn.AdaptiveMaxPool3d"]], "alphadropout (class in torch.nn)": [[1435, "torch.nn.AlphaDropout"]], "avgpool1d (class in torch.nn)": [[1436, "torch.nn.AvgPool1d"]], "avgpool2d (class in torch.nn)": [[1437, "torch.nn.AvgPool2d"]], "avgpool3d (class in torch.nn)": [[1438, "torch.nn.AvgPool3d"]], "bceloss (class in torch.nn)": [[1439, "torch.nn.BCELoss"]], "bcewithlogitsloss (class in torch.nn)": [[1440, "torch.nn.BCEWithLogitsLoss"]], "batchnorm1d (class in torch.nn)": [[1441, "torch.nn.BatchNorm1d"]], "batchnorm2d (class in torch.nn)": [[1442, "torch.nn.BatchNorm2d"]], "batchnorm3d (class in torch.nn)": [[1443, "torch.nn.BatchNorm3d"]], "bilinear (class in torch.nn)": [[1444, "torch.nn.Bilinear"]], "celu (class in torch.nn)": [[1445, "torch.nn.CELU"]], "ctcloss (class in torch.nn)": [[1446, "torch.nn.CTCLoss"]], "channelshuffle (class in torch.nn)": [[1447, "torch.nn.ChannelShuffle"]], "circularpad1d (class in torch.nn)": [[1448, "torch.nn.CircularPad1d"]], "circularpad2d (class in torch.nn)": [[1449, "torch.nn.CircularPad2d"]], "circularpad3d (class in torch.nn)": [[1450, "torch.nn.CircularPad3d"]], "constantpad1d (class in torch.nn)": [[1451, "torch.nn.ConstantPad1d"]], "constantpad2d (class in torch.nn)": [[1452, "torch.nn.ConstantPad2d"]], "constantpad3d (class in torch.nn)": [[1453, "torch.nn.ConstantPad3d"]], "conv1d (class in torch.nn)": [[1454, "torch.nn.Conv1d"]], "conv2d (class in torch.nn)": [[1455, "torch.nn.Conv2d"]], "conv3d (class in torch.nn)": [[1456, "torch.nn.Conv3d"]], "convtranspose1d (class in torch.nn)": [[1457, "torch.nn.ConvTranspose1d"]], "convtranspose2d (class in torch.nn)": [[1458, "torch.nn.ConvTranspose2d"]], "convtranspose3d (class in torch.nn)": [[1459, "torch.nn.ConvTranspose3d"]], "cosineembeddingloss (class in torch.nn)": [[1460, "torch.nn.CosineEmbeddingLoss"]], "cosinesimilarity (class in torch.nn)": [[1461, "torch.nn.CosineSimilarity"]], "crossentropyloss (class in torch.nn)": [[1462, "torch.nn.CrossEntropyLoss"]], "dataparallel (class in torch.nn)": [[1463, "torch.nn.DataParallel"]], "dropout (class in torch.nn)": [[1464, "torch.nn.Dropout"]], "dropout1d (class in torch.nn)": [[1465, "torch.nn.Dropout1d"]], "dropout2d (class in torch.nn)": [[1466, "torch.nn.Dropout2d"]], "dropout3d (class in torch.nn)": [[1467, "torch.nn.Dropout3d"]], "elu (class in torch.nn)": [[1468, "torch.nn.ELU"]], "embedding (class in torch.nn)": [[1469, "torch.nn.Embedding"]], "from_pretrained() (torch.nn.embedding class method)": [[1469, "torch.nn.Embedding.from_pretrained"]], "embeddingbag (class in torch.nn)": [[1470, "torch.nn.EmbeddingBag"]], "forward() (torch.nn.embeddingbag method)": [[1470, "torch.nn.EmbeddingBag.forward"]], "from_pretrained() (torch.nn.embeddingbag class method)": [[1470, "torch.nn.EmbeddingBag.from_pretrained"]], "featurealphadropout (class in torch.nn)": [[1471, "torch.nn.FeatureAlphaDropout"]], "flatten (class in torch.nn)": [[1472, "torch.nn.Flatten"]], "fold (class in torch.nn)": [[1473, "torch.nn.Fold"]], "fractionalmaxpool2d (class in torch.nn)": [[1474, "torch.nn.FractionalMaxPool2d"]], "fractionalmaxpool3d (class in torch.nn)": [[1475, "torch.nn.FractionalMaxPool3d"]], "gelu (class in torch.nn)": [[1476, "torch.nn.GELU"]], "glu (class in torch.nn)": [[1477, "torch.nn.GLU"]], "gru (class in torch.nn)": [[1478, "torch.nn.GRU"]], "grucell (class in torch.nn)": [[1479, "torch.nn.GRUCell"]], "gaussiannllloss (class in torch.nn)": [[1480, "torch.nn.GaussianNLLLoss"]], "groupnorm (class in torch.nn)": [[1481, "torch.nn.GroupNorm"]], "hardshrink (class in torch.nn)": [[1482, "torch.nn.Hardshrink"]], "hardsigmoid (class in torch.nn)": [[1483, "torch.nn.Hardsigmoid"]], "hardswish (class in torch.nn)": [[1484, "torch.nn.Hardswish"]], "hardtanh (class in torch.nn)": [[1485, "torch.nn.Hardtanh"]], "hingeembeddingloss (class in torch.nn)": [[1486, "torch.nn.HingeEmbeddingLoss"]], "huberloss (class in torch.nn)": [[1487, "torch.nn.HuberLoss"]], "identity (class in torch.nn)": [[1488, "torch.nn.Identity"]], "instancenorm1d (class in torch.nn)": [[1489, "torch.nn.InstanceNorm1d"]], "instancenorm2d (class in torch.nn)": [[1490, "torch.nn.InstanceNorm2d"]], "instancenorm3d (class in torch.nn)": [[1491, "torch.nn.InstanceNorm3d"]], "kldivloss (class in torch.nn)": [[1492, "torch.nn.KLDivLoss"]], "l1loss (class in torch.nn)": [[1493, "torch.nn.L1Loss"]], "lppool1d (class in torch.nn)": [[1494, "torch.nn.LPPool1d"]], "lppool2d (class in torch.nn)": [[1495, "torch.nn.LPPool2d"]], "lppool3d (class in torch.nn)": [[1496, "torch.nn.LPPool3d"]], "lstm (class in torch.nn)": [[1497, "torch.nn.LSTM"]], "lstmcell (class in torch.nn)": [[1498, "torch.nn.LSTMCell"]], "layernorm (class in torch.nn)": [[1499, "torch.nn.LayerNorm"]], "lazybatchnorm1d (class in torch.nn)": [[1500, "torch.nn.LazyBatchNorm1d"]], "cls_to_become (torch.nn.lazybatchnorm1d attribute)": [[1500, "torch.nn.LazyBatchNorm1d.cls_to_become"]], "lazybatchnorm2d (class in torch.nn)": [[1501, "torch.nn.LazyBatchNorm2d"]], "cls_to_become (torch.nn.lazybatchnorm2d attribute)": [[1501, "torch.nn.LazyBatchNorm2d.cls_to_become"]], "lazybatchnorm3d (class in torch.nn)": [[1502, "torch.nn.LazyBatchNorm3d"]], "cls_to_become (torch.nn.lazybatchnorm3d attribute)": [[1502, "torch.nn.LazyBatchNorm3d.cls_to_become"]], "lazyconv1d (class in torch.nn)": [[1503, "torch.nn.LazyConv1d"]], "cls_to_become (torch.nn.lazyconv1d attribute)": [[1503, "torch.nn.LazyConv1d.cls_to_become"]], "lazyconv2d (class in torch.nn)": [[1504, "torch.nn.LazyConv2d"]], "cls_to_become (torch.nn.lazyconv2d attribute)": [[1504, "torch.nn.LazyConv2d.cls_to_become"]], "lazyconv3d (class in torch.nn)": [[1505, "torch.nn.LazyConv3d"]], "cls_to_become (torch.nn.lazyconv3d attribute)": [[1505, "torch.nn.LazyConv3d.cls_to_become"]], "lazyconvtranspose1d (class in torch.nn)": [[1506, "torch.nn.LazyConvTranspose1d"]], "cls_to_become (torch.nn.lazyconvtranspose1d attribute)": [[1506, "torch.nn.LazyConvTranspose1d.cls_to_become"]], "lazyconvtranspose2d (class in torch.nn)": [[1507, "torch.nn.LazyConvTranspose2d"]], "cls_to_become (torch.nn.lazyconvtranspose2d attribute)": [[1507, "torch.nn.LazyConvTranspose2d.cls_to_become"]], "lazyconvtranspose3d (class in torch.nn)": [[1508, "torch.nn.LazyConvTranspose3d"]], "cls_to_become (torch.nn.lazyconvtranspose3d attribute)": [[1508, "torch.nn.LazyConvTranspose3d.cls_to_become"]], "lazyinstancenorm1d (class in torch.nn)": [[1509, "torch.nn.LazyInstanceNorm1d"]], "cls_to_become (torch.nn.lazyinstancenorm1d attribute)": [[1509, "torch.nn.LazyInstanceNorm1d.cls_to_become"]], "lazyinstancenorm2d (class in torch.nn)": [[1510, "torch.nn.LazyInstanceNorm2d"]], "cls_to_become (torch.nn.lazyinstancenorm2d attribute)": [[1510, "torch.nn.LazyInstanceNorm2d.cls_to_become"]], "lazyinstancenorm3d (class in torch.nn)": [[1511, "torch.nn.LazyInstanceNorm3d"]], "cls_to_become (torch.nn.lazyinstancenorm3d attribute)": [[1511, "torch.nn.LazyInstanceNorm3d.cls_to_become"]], "lazylinear (class in torch.nn)": [[1512, "torch.nn.LazyLinear"]], "cls_to_become (torch.nn.lazylinear attribute)": [[1512, "torch.nn.LazyLinear.cls_to_become"]], "leakyrelu (class in torch.nn)": [[1513, "torch.nn.LeakyReLU"]], "linear (class in torch.nn)": [[1514, "torch.nn.Linear"]], "localresponsenorm (class in torch.nn)": [[1515, "torch.nn.LocalResponseNorm"]], "logsigmoid (class in torch.nn)": [[1516, "torch.nn.LogSigmoid"]], "logsoftmax (class in torch.nn)": [[1517, "torch.nn.LogSoftmax"]], "mseloss (class in torch.nn)": [[1518, "torch.nn.MSELoss"]], "marginrankingloss (class in torch.nn)": [[1519, "torch.nn.MarginRankingLoss"]], "maxpool1d (class in torch.nn)": [[1520, "torch.nn.MaxPool1d"]], "maxpool2d (class in torch.nn)": [[1521, "torch.nn.MaxPool2d"]], "maxpool3d (class in torch.nn)": [[1522, "torch.nn.MaxPool3d"]], "maxunpool1d (class in torch.nn)": [[1523, "torch.nn.MaxUnpool1d"]], "maxunpool2d (class in torch.nn)": [[1524, "torch.nn.MaxUnpool2d"]], "maxunpool3d (class in torch.nn)": [[1525, "torch.nn.MaxUnpool3d"]], "mish (class in torch.nn)": [[1526, "torch.nn.Mish"]], "module (class in torch.nn)": [[1527, "torch.nn.Module"]], "add_module() (torch.nn.module method)": [[1527, "torch.nn.Module.add_module"]], "apply() (torch.nn.module method)": [[1527, "torch.nn.Module.apply"]], "bfloat16() (torch.nn.module method)": [[1527, "torch.nn.Module.bfloat16"]], "buffers() (torch.nn.module method)": [[1527, "torch.nn.Module.buffers"]], "children() (torch.nn.module method)": [[1527, "torch.nn.Module.children"]], "compile() (torch.nn.module method)": [[1527, "torch.nn.Module.compile"]], "cpu() (torch.nn.module method)": [[1527, "torch.nn.Module.cpu"]], "cuda() (torch.nn.module method)": [[1527, "torch.nn.Module.cuda"]], "double() (torch.nn.module method)": [[1527, "torch.nn.Module.double"]], "eval() (torch.nn.module method)": [[1527, "torch.nn.Module.eval"]], "extra_repr() (torch.nn.module method)": [[1527, "torch.nn.Module.extra_repr"]], "float() (torch.nn.module method)": [[1527, "torch.nn.Module.float"]], "forward() (torch.nn.module method)": [[1527, "torch.nn.Module.forward"]], "get_buffer() (torch.nn.module method)": [[1527, "torch.nn.Module.get_buffer"]], "get_extra_state() (torch.nn.module method)": [[1527, "torch.nn.Module.get_extra_state"]], "get_parameter() (torch.nn.module method)": [[1527, "torch.nn.Module.get_parameter"]], "get_submodule() (torch.nn.module method)": [[1527, "torch.nn.Module.get_submodule"]], "half() (torch.nn.module method)": [[1527, "torch.nn.Module.half"]], "ipu() (torch.nn.module method)": [[1527, "torch.nn.Module.ipu"]], "load_state_dict() (torch.nn.module method)": [[1527, "torch.nn.Module.load_state_dict"]], "modules() (torch.nn.module method)": [[1527, "torch.nn.Module.modules"]], "named_buffers() (torch.nn.module method)": [[1527, "torch.nn.Module.named_buffers"]], "named_children() (torch.nn.module method)": [[1527, "torch.nn.Module.named_children"]], "named_modules() (torch.nn.module method)": [[1527, "torch.nn.Module.named_modules"]], "named_parameters() (torch.nn.module method)": [[1527, "torch.nn.Module.named_parameters"]], "parameters() (torch.nn.module method)": [[1527, "torch.nn.Module.parameters"]], "register_backward_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_backward_hook"]], "register_buffer() (torch.nn.module method)": [[1527, "torch.nn.Module.register_buffer"]], "register_forward_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_forward_hook"]], "register_forward_pre_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_forward_pre_hook"]], "register_full_backward_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_full_backward_hook"]], "register_full_backward_pre_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_full_backward_pre_hook"]], "register_load_state_dict_post_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_load_state_dict_post_hook"]], "register_module() (torch.nn.module method)": [[1527, "torch.nn.Module.register_module"]], "register_parameter() (torch.nn.module method)": [[1527, "torch.nn.Module.register_parameter"]], "register_state_dict_pre_hook() (torch.nn.module method)": [[1527, "torch.nn.Module.register_state_dict_pre_hook"]], "requires_grad_() (torch.nn.module method)": [[1527, "torch.nn.Module.requires_grad_"]], "set_extra_state() (torch.nn.module method)": [[1527, "torch.nn.Module.set_extra_state"]], "share_memory() (torch.nn.module method)": [[1527, "torch.nn.Module.share_memory"]], "state_dict() (torch.nn.module method)": [[1527, "torch.nn.Module.state_dict"]], "to() (torch.nn.module method)": [[1527, "torch.nn.Module.to"]], "to_empty() (torch.nn.module method)": [[1527, "torch.nn.Module.to_empty"]], "train() (torch.nn.module method)": [[1527, "torch.nn.Module.train"]], "type() (torch.nn.module method)": [[1527, "torch.nn.Module.type"]], "xpu() (torch.nn.module method)": [[1527, "torch.nn.Module.xpu"]], "zero_grad() (torch.nn.module method)": [[1527, "torch.nn.Module.zero_grad"]], "moduledict (class in torch.nn)": [[1528, "torch.nn.ModuleDict"]], "clear() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.clear"]], "items() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.items"]], "keys() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.keys"]], "pop() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.pop"]], "update() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.update"]], "values() (torch.nn.moduledict method)": [[1528, "torch.nn.ModuleDict.values"]], "modulelist (class in torch.nn)": [[1529, "torch.nn.ModuleList"]], "append() (torch.nn.modulelist method)": [[1529, "torch.nn.ModuleList.append"]], "extend() (torch.nn.modulelist method)": [[1529, "torch.nn.ModuleList.extend"]], "insert() (torch.nn.modulelist method)": [[1529, "torch.nn.ModuleList.insert"]], "multilabelmarginloss (class in torch.nn)": [[1530, "torch.nn.MultiLabelMarginLoss"]], "multilabelsoftmarginloss (class in torch.nn)": [[1531, "torch.nn.MultiLabelSoftMarginLoss"]], "multimarginloss (class in torch.nn)": [[1532, "torch.nn.MultiMarginLoss"]], "multiheadattention (class in torch.nn)": [[1533, "torch.nn.MultiheadAttention"]], "forward() (torch.nn.multiheadattention method)": [[1533, "torch.nn.MultiheadAttention.forward"]], "merge_masks() (torch.nn.multiheadattention method)": [[1533, "torch.nn.MultiheadAttention.merge_masks"]], "nllloss (class in torch.nn)": [[1534, "torch.nn.NLLLoss"]], "prelu (class in torch.nn)": [[1535, "torch.nn.PReLU"]], "pairwisedistance (class in torch.nn)": [[1536, "torch.nn.PairwiseDistance"]], "parameterdict (class in torch.nn)": [[1537, "torch.nn.ParameterDict"]], "clear() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.clear"]], "copy() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.copy"]], "fromkeys() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.fromkeys"]], "get() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.get"]], "items() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.items"]], "keys() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.keys"]], "pop() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.pop"]], "popitem() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.popitem"]], "setdefault() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.setdefault"]], "update() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.update"]], "values() (torch.nn.parameterdict method)": [[1537, "torch.nn.ParameterDict.values"]], "parameterlist (class in torch.nn)": [[1538, "torch.nn.ParameterList"]], "append() (torch.nn.parameterlist method)": [[1538, "torch.nn.ParameterList.append"]], "extend() (torch.nn.parameterlist method)": [[1538, "torch.nn.ParameterList.extend"]], "pixelshuffle (class in torch.nn)": [[1539, "torch.nn.PixelShuffle"]], "pixelunshuffle (class in torch.nn)": [[1540, "torch.nn.PixelUnshuffle"]], "poissonnllloss (class in torch.nn)": [[1541, "torch.nn.PoissonNLLLoss"]], "rmsnorm (class in torch.nn)": [[1542, "torch.nn.RMSNorm"]], "extra_repr() (torch.nn.rmsnorm method)": [[1542, "torch.nn.RMSNorm.extra_repr"]], "forward() (torch.nn.rmsnorm method)": [[1542, "torch.nn.RMSNorm.forward"]], "reset_parameters() (torch.nn.rmsnorm method)": [[1542, "torch.nn.RMSNorm.reset_parameters"]], "rnn (class in torch.nn)": [[1543, "torch.nn.RNN"]], "rnnbase (class in torch.nn)": [[1544, "torch.nn.RNNBase"]], "flatten_parameters() (torch.nn.rnnbase method)": [[1544, "torch.nn.RNNBase.flatten_parameters"]], "rnncell (class in torch.nn)": [[1545, "torch.nn.RNNCell"]], "rrelu (class in torch.nn)": [[1546, "torch.nn.RReLU"]], "relu (class in torch.nn)": [[1547, "torch.nn.ReLU"]], "relu6 (class in torch.nn)": [[1548, "torch.nn.ReLU6"]], "reflectionpad1d (class in torch.nn)": [[1549, "torch.nn.ReflectionPad1d"]], "reflectionpad2d (class in torch.nn)": [[1550, "torch.nn.ReflectionPad2d"]], "reflectionpad3d (class in torch.nn)": [[1551, "torch.nn.ReflectionPad3d"]], "replicationpad1d (class in torch.nn)": [[1552, "torch.nn.ReplicationPad1d"]], "replicationpad2d (class in torch.nn)": [[1553, "torch.nn.ReplicationPad2d"]], "replicationpad3d (class in torch.nn)": [[1554, "torch.nn.ReplicationPad3d"]], "selu (class in torch.nn)": [[1555, "torch.nn.SELU"]], "sequential (class in torch.nn)": [[1556, "torch.nn.Sequential"]], "append() (torch.nn.sequential method)": [[1556, "torch.nn.Sequential.append"]], "silu (class in torch.nn)": [[1557, "torch.nn.SiLU"]], "sigmoid (class in torch.nn)": [[1558, "torch.nn.Sigmoid"]], "smoothl1loss (class in torch.nn)": [[1559, "torch.nn.SmoothL1Loss"]], "softmarginloss (class in torch.nn)": [[1560, "torch.nn.SoftMarginLoss"]], "softmax (class in torch.nn)": [[1561, "torch.nn.Softmax"]], "softmax2d (class in torch.nn)": [[1562, "torch.nn.Softmax2d"]], "softmin (class in torch.nn)": [[1563, "torch.nn.Softmin"]], "softplus (class in torch.nn)": [[1564, "torch.nn.Softplus"]], "softshrink (class in torch.nn)": [[1565, "torch.nn.Softshrink"]], "softsign (class in torch.nn)": [[1566, "torch.nn.Softsign"]], "syncbatchnorm (class in torch.nn)": [[1567, "torch.nn.SyncBatchNorm"]], "convert_sync_batchnorm() (torch.nn.syncbatchnorm class method)": [[1567, "torch.nn.SyncBatchNorm.convert_sync_batchnorm"]], "tanh (class in torch.nn)": [[1568, "torch.nn.Tanh"]], "tanhshrink (class in torch.nn)": [[1569, "torch.nn.Tanhshrink"]], "threshold (class in torch.nn)": [[1570, "torch.nn.Threshold"]], "transformer (class in torch.nn)": [[1571, "torch.nn.Transformer"]], "forward() (torch.nn.transformer method)": [[1571, "torch.nn.Transformer.forward"]], "generate_square_subsequent_mask() (torch.nn.transformer static method)": [[1571, "torch.nn.Transformer.generate_square_subsequent_mask"]], "transformerdecoder (class in torch.nn)": [[1572, "torch.nn.TransformerDecoder"]], "forward() (torch.nn.transformerdecoder method)": [[1572, "torch.nn.TransformerDecoder.forward"]], "transformerdecoderlayer (class in torch.nn)": [[1573, "torch.nn.TransformerDecoderLayer"]], "forward() (torch.nn.transformerdecoderlayer method)": [[1573, "torch.nn.TransformerDecoderLayer.forward"]], "transformerencoder (class in torch.nn)": [[1574, "torch.nn.TransformerEncoder"]], "forward() (torch.nn.transformerencoder method)": [[1574, "torch.nn.TransformerEncoder.forward"]], "transformerencoderlayer (class in torch.nn)": [[1575, "torch.nn.TransformerEncoderLayer"]], "forward() (torch.nn.transformerencoderlayer method)": [[1575, "torch.nn.TransformerEncoderLayer.forward"]], "tripletmarginloss (class in torch.nn)": [[1576, "torch.nn.TripletMarginLoss"]], "tripletmarginwithdistanceloss (class in torch.nn)": [[1577, "torch.nn.TripletMarginWithDistanceLoss"]], "unflatten (class in torch.nn)": [[1578, "torch.nn.Unflatten"]], "unfold (class in torch.nn)": [[1579, "torch.nn.Unfold"]], "upsample (class in torch.nn)": [[1580, "torch.nn.Upsample"]], "upsamplingbilinear2d (class in torch.nn)": [[1581, "torch.nn.UpsamplingBilinear2d"]], "upsamplingnearest2d (class in torch.nn)": [[1582, "torch.nn.UpsamplingNearest2d"]], "zeropad1d (class in torch.nn)": [[1583, "torch.nn.ZeroPad1d"]], "zeropad2d (class in torch.nn)": [[1584, "torch.nn.ZeroPad2d"]], "zeropad3d (class in torch.nn)": [[1585, "torch.nn.ZeroPad3d"]], "sdpbackend (class in torch.nn.attention)": [[1586, "torch.nn.attention.SDPBackend"]], "name (torch.nn.attention.sdpbackend property)": [[1586, "torch.nn.attention.SDPBackend.name"]], "causalbias (class in torch.nn.attention.bias)": [[1587, "torch.nn.attention.bias.CausalBias"]], "causalvariant (class in torch.nn.attention.bias)": [[1588, "torch.nn.attention.bias.CausalVariant"]], "causal_lower_right() (in module torch.nn.attention.bias)": [[1589, "torch.nn.attention.bias.causal_lower_right"]], "causal_upper_left() (in module torch.nn.attention.bias)": [[1590, "torch.nn.attention.bias.causal_upper_left"]], "sdpa_kernel() (in module torch.nn.attention)": [[1591, "torch.nn.attention.sdpa_kernel"]], "adaptive_avg_pool1d() (in module torch.nn.functional)": [[1592, "torch.nn.functional.adaptive_avg_pool1d"]], "adaptive_avg_pool2d() (in module torch.nn.functional)": [[1593, "torch.nn.functional.adaptive_avg_pool2d"]], "adaptive_avg_pool3d() (in module torch.nn.functional)": [[1594, "torch.nn.functional.adaptive_avg_pool3d"]], "adaptive_max_pool1d() (in module torch.nn.functional)": [[1595, "torch.nn.functional.adaptive_max_pool1d"]], "adaptive_max_pool2d() (in module torch.nn.functional)": [[1596, "torch.nn.functional.adaptive_max_pool2d"]], "adaptive_max_pool3d() (in module torch.nn.functional)": [[1597, "torch.nn.functional.adaptive_max_pool3d"]], "affine_grid() (in module torch.nn.functional)": [[1598, "torch.nn.functional.affine_grid"]], "alpha_dropout() (in module torch.nn.functional)": [[1599, "torch.nn.functional.alpha_dropout"]], "avg_pool1d() (in module torch.nn.functional)": [[1600, "torch.nn.functional.avg_pool1d"]], "avg_pool2d() (in module torch.nn.functional)": [[1601, "torch.nn.functional.avg_pool2d"]], "avg_pool3d() (in module torch.nn.functional)": [[1602, "torch.nn.functional.avg_pool3d"]], "batch_norm() (in module torch.nn.functional)": [[1603, "torch.nn.functional.batch_norm"]], "bilinear() (in module torch.nn.functional)": [[1604, "torch.nn.functional.bilinear"]], "binary_cross_entropy() (in module torch.nn.functional)": [[1605, "torch.nn.functional.binary_cross_entropy"]], "binary_cross_entropy_with_logits() (in module torch.nn.functional)": [[1606, "torch.nn.functional.binary_cross_entropy_with_logits"]], "celu() (in module torch.nn.functional)": [[1607, "torch.nn.functional.celu"]], "conv1d() (in module torch.nn.functional)": [[1608, "torch.nn.functional.conv1d"]], "conv2d() (in module torch.nn.functional)": [[1609, "torch.nn.functional.conv2d"]], "conv3d() (in module torch.nn.functional)": [[1610, "torch.nn.functional.conv3d"]], "conv_transpose1d() (in module torch.nn.functional)": [[1611, "torch.nn.functional.conv_transpose1d"]], "conv_transpose2d() (in module torch.nn.functional)": [[1612, "torch.nn.functional.conv_transpose2d"]], "conv_transpose3d() (in module torch.nn.functional)": [[1613, "torch.nn.functional.conv_transpose3d"]], "cosine_embedding_loss() (in module torch.nn.functional)": [[1614, "torch.nn.functional.cosine_embedding_loss"]], "cosine_similarity() (in module torch.nn.functional)": [[1615, "torch.nn.functional.cosine_similarity"]], "cross_entropy() (in module torch.nn.functional)": [[1616, "torch.nn.functional.cross_entropy"]], "ctc_loss() (in module torch.nn.functional)": [[1617, "torch.nn.functional.ctc_loss"]], "dropout() (in module torch.nn.functional)": [[1618, "torch.nn.functional.dropout"]], "dropout1d() (in module torch.nn.functional)": [[1619, "torch.nn.functional.dropout1d"]], "dropout2d() (in module torch.nn.functional)": [[1620, "torch.nn.functional.dropout2d"]], "dropout3d() (in module torch.nn.functional)": [[1621, "torch.nn.functional.dropout3d"]], "elu() (in module torch.nn.functional)": [[1622, "torch.nn.functional.elu"]], "elu_() (in module torch.nn.functional)": [[1623, "torch.nn.functional.elu_"]], "embedding() (in module torch.nn.functional)": [[1624, "torch.nn.functional.embedding"]], "embedding_bag() (in module torch.nn.functional)": [[1625, "torch.nn.functional.embedding_bag"]], "feature_alpha_dropout() (in module torch.nn.functional)": [[1626, "torch.nn.functional.feature_alpha_dropout"]], "fold() (in module torch.nn.functional)": [[1627, "torch.nn.functional.fold"]], "fractional_max_pool2d() (in module torch.nn.functional)": [[1628, "torch.nn.functional.fractional_max_pool2d"]], "fractional_max_pool3d() (in module torch.nn.functional)": [[1629, "torch.nn.functional.fractional_max_pool3d"]], "gaussian_nll_loss() (in module torch.nn.functional)": [[1630, "torch.nn.functional.gaussian_nll_loss"]], "gelu() (in module torch.nn.functional)": [[1631, "torch.nn.functional.gelu"]], "glu() (in module torch.nn.functional)": [[1632, "torch.nn.functional.glu"]], "grid_sample() (in module torch.nn.functional)": [[1633, "torch.nn.functional.grid_sample"]], "group_norm() (in module torch.nn.functional)": [[1634, "torch.nn.functional.group_norm"]], "gumbel_softmax() (in module torch.nn.functional)": [[1635, "torch.nn.functional.gumbel_softmax"]], "hardshrink() (in module torch.nn.functional)": [[1636, "torch.nn.functional.hardshrink"]], "hardsigmoid() (in module torch.nn.functional)": [[1637, "torch.nn.functional.hardsigmoid"]], "hardswish() (in module torch.nn.functional)": [[1638, "torch.nn.functional.hardswish"]], "hardtanh() (in module torch.nn.functional)": [[1639, "torch.nn.functional.hardtanh"]], "hardtanh_() (in module torch.nn.functional)": [[1640, "torch.nn.functional.hardtanh_"]], "hinge_embedding_loss() (in module torch.nn.functional)": [[1641, "torch.nn.functional.hinge_embedding_loss"]], "huber_loss() (in module torch.nn.functional)": [[1642, "torch.nn.functional.huber_loss"]], "instance_norm() (in module torch.nn.functional)": [[1643, "torch.nn.functional.instance_norm"]], "interpolate() (in module torch.nn.functional)": [[1644, "torch.nn.functional.interpolate"]], "kl_div() (in module torch.nn.functional)": [[1645, "torch.nn.functional.kl_div"]], "l1_loss() (in module torch.nn.functional)": [[1646, "torch.nn.functional.l1_loss"]], "layer_norm() (in module torch.nn.functional)": [[1647, "torch.nn.functional.layer_norm"]], "leaky_relu() (in module torch.nn.functional)": [[1648, "torch.nn.functional.leaky_relu"]], "leaky_relu_() (in module torch.nn.functional)": [[1649, "torch.nn.functional.leaky_relu_"]], "linear() (in module torch.nn.functional)": [[1650, "torch.nn.functional.linear"]], "local_response_norm() (in module torch.nn.functional)": [[1651, "torch.nn.functional.local_response_norm"]], "log_softmax() (in module torch.nn.functional)": [[1652, "torch.nn.functional.log_softmax"]], "logsigmoid() (in module torch.nn.functional)": [[1653, "torch.nn.functional.logsigmoid"]], "lp_pool1d() (in module torch.nn.functional)": [[1654, "torch.nn.functional.lp_pool1d"]], "lp_pool2d() (in module torch.nn.functional)": [[1655, "torch.nn.functional.lp_pool2d"]], "lp_pool3d() (in module torch.nn.functional)": [[1656, "torch.nn.functional.lp_pool3d"]], "margin_ranking_loss() (in module torch.nn.functional)": [[1657, "torch.nn.functional.margin_ranking_loss"]], "max_pool1d() (in module torch.nn.functional)": [[1658, "torch.nn.functional.max_pool1d"]], "max_pool2d() (in module torch.nn.functional)": [[1659, "torch.nn.functional.max_pool2d"]], "max_pool3d() (in module torch.nn.functional)": [[1660, "torch.nn.functional.max_pool3d"]], "max_unpool1d() (in module torch.nn.functional)": [[1661, "torch.nn.functional.max_unpool1d"]], "max_unpool2d() (in module torch.nn.functional)": [[1662, "torch.nn.functional.max_unpool2d"]], "max_unpool3d() (in module torch.nn.functional)": [[1663, "torch.nn.functional.max_unpool3d"]], "mish() (in module torch.nn.functional)": [[1664, "torch.nn.functional.mish"]], "mse_loss() (in module torch.nn.functional)": [[1665, "torch.nn.functional.mse_loss"]], "multi_margin_loss() (in module torch.nn.functional)": [[1666, "torch.nn.functional.multi_margin_loss"]], "multilabel_margin_loss() (in module torch.nn.functional)": [[1667, "torch.nn.functional.multilabel_margin_loss"]], "multilabel_soft_margin_loss() (in module torch.nn.functional)": [[1668, "torch.nn.functional.multilabel_soft_margin_loss"]], "nll_loss() (in module torch.nn.functional)": [[1669, "torch.nn.functional.nll_loss"]], "normalize() (in module torch.nn.functional)": [[1670, "torch.nn.functional.normalize"]], "one_hot() (in module torch.nn.functional)": [[1671, "torch.nn.functional.one_hot"]], "pad() (in module torch.nn.functional)": [[1672, "torch.nn.functional.pad"]], "pairwise_distance() (in module torch.nn.functional)": [[1673, "torch.nn.functional.pairwise_distance"]], "pdist() (in module torch.nn.functional)": [[1674, "torch.nn.functional.pdist"]], "pixel_shuffle() (in module torch.nn.functional)": [[1675, "torch.nn.functional.pixel_shuffle"]], "pixel_unshuffle() (in module torch.nn.functional)": [[1676, "torch.nn.functional.pixel_unshuffle"]], "poisson_nll_loss() (in module torch.nn.functional)": [[1677, "torch.nn.functional.poisson_nll_loss"]], "prelu() (in module torch.nn.functional)": [[1678, "torch.nn.functional.prelu"]], "relu() (in module torch.nn.functional)": [[1679, "torch.nn.functional.relu"]], "relu6() (in module torch.nn.functional)": [[1680, "torch.nn.functional.relu6"]], "relu_() (in module torch.nn.functional)": [[1681, "torch.nn.functional.relu_"]], "rms_norm() (in module torch.nn.functional)": [[1682, "torch.nn.functional.rms_norm"]], "rrelu() (in module torch.nn.functional)": [[1683, "torch.nn.functional.rrelu"]], "rrelu_() (in module torch.nn.functional)": [[1684, "torch.nn.functional.rrelu_"]], "scaled_dot_product_attention() (in module torch.nn.functional)": [[1685, "torch.nn.functional.scaled_dot_product_attention"]], "selu() (in module torch.nn.functional)": [[1686, "torch.nn.functional.selu"]], "sigmoid() (in module torch.nn.functional)": [[1687, "torch.nn.functional.sigmoid"]], "silu() (in module torch.nn.functional)": [[1688, "torch.nn.functional.silu"]], "smooth_l1_loss() (in module torch.nn.functional)": [[1689, "torch.nn.functional.smooth_l1_loss"]], "soft_margin_loss() (in module torch.nn.functional)": [[1690, "torch.nn.functional.soft_margin_loss"]], "softmax() (in module torch.nn.functional)": [[1691, "torch.nn.functional.softmax"]], "softmin() (in module torch.nn.functional)": [[1692, "torch.nn.functional.softmin"]], "softplus() (in module torch.nn.functional)": [[1693, "torch.nn.functional.softplus"]], "softshrink() (in module torch.nn.functional)": [[1694, "torch.nn.functional.softshrink"]], "softsign() (in module torch.nn.functional)": [[1695, "torch.nn.functional.softsign"]], "tanh() (in module torch.nn.functional)": [[1696, "torch.nn.functional.tanh"]], "tanhshrink() (in module torch.nn.functional)": [[1697, "torch.nn.functional.tanhshrink"]], "threshold() (in module torch.nn.functional)": [[1698, "torch.nn.functional.threshold"]], "threshold_() (in module torch.nn.functional)": [[1699, "torch.nn.functional.threshold_"]], "data_parallel() (in module torch.nn.parallel)": [[1700, "torch.nn.parallel.data_parallel"]], "triplet_margin_loss() (in module torch.nn.functional)": [[1701, "torch.nn.functional.triplet_margin_loss"]], "triplet_margin_with_distance_loss() (in module torch.nn.functional)": [[1702, "torch.nn.functional.triplet_margin_with_distance_loss"]], "unfold() (in module torch.nn.functional)": [[1703, "torch.nn.functional.unfold"]], "upsample() (in module torch.nn.functional)": [[1704, "torch.nn.functional.upsample"]], "upsample_bilinear() (in module torch.nn.functional)": [[1705, "torch.nn.functional.upsample_bilinear"]], "upsample_nearest() (in module torch.nn.functional)": [[1706, "torch.nn.functional.upsample_nearest"]], "lazymodulemixin (class in torch.nn.modules.lazy)": [[1707, "torch.nn.modules.lazy.LazyModuleMixin"]], "has_uninitialized_params() (torch.nn.modules.lazy.lazymodulemixin method)": [[1707, "torch.nn.modules.lazy.LazyModuleMixin.has_uninitialized_params"]], "initialize_parameters() (torch.nn.modules.lazy.lazymodulemixin method)": [[1707, "torch.nn.modules.lazy.LazyModuleMixin.initialize_parameters"]], "register_module_backward_hook() (in module torch.nn.modules.module)": [[1708, "torch.nn.modules.module.register_module_backward_hook"]], "register_module_buffer_registration_hook() (in module torch.nn.modules.module)": [[1709, "torch.nn.modules.module.register_module_buffer_registration_hook"]], "register_module_forward_hook() (in module torch.nn.modules.module)": [[1710, "torch.nn.modules.module.register_module_forward_hook"]], "register_module_forward_pre_hook() (in module torch.nn.modules.module)": [[1711, "torch.nn.modules.module.register_module_forward_pre_hook"]], "register_module_full_backward_hook() (in module torch.nn.modules.module)": [[1712, "torch.nn.modules.module.register_module_full_backward_hook"]], "register_module_full_backward_pre_hook() (in module torch.nn.modules.module)": [[1713, "torch.nn.modules.module.register_module_full_backward_pre_hook"]], "register_module_module_registration_hook() (in module torch.nn.modules.module)": [[1714, "torch.nn.modules.module.register_module_module_registration_hook"]], "register_module_parameter_registration_hook() (in module torch.nn.modules.module)": [[1715, "torch.nn.modules.module.register_module_parameter_registration_hook"]], "rmsnorm (class in torch.nn.modules.normalization)": [[1716, "torch.nn.modules.normalization.RMSNorm"]], "extra_repr() (torch.nn.modules.normalization.rmsnorm method)": [[1716, "torch.nn.modules.normalization.RMSNorm.extra_repr"]], "forward() (torch.nn.modules.normalization.rmsnorm method)": [[1716, "torch.nn.modules.normalization.RMSNorm.forward"]], "reset_parameters() (torch.nn.modules.normalization.rmsnorm method)": [[1716, "torch.nn.modules.normalization.RMSNorm.reset_parameters"]], "distributeddataparallel (class in torch.nn.parallel)": [[1717, "torch.nn.parallel.DistributedDataParallel"]], "join() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.join"]], "join_hook() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.join_hook"]], "no_sync() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.no_sync"]], "register_comm_hook() (torch.nn.parallel.distributeddataparallel method)": [[1717, "torch.nn.parallel.DistributedDataParallel.register_comm_hook"]], "parameter (class in torch.nn.parameter)": [[1718, "torch.nn.parameter.Parameter"]], "uninitializedbuffer (class in torch.nn.parameter)": [[1719, "torch.nn.parameter.UninitializedBuffer"]], "uninitializedparameter (class in torch.nn.parameter)": [[1720, "torch.nn.parameter.UninitializedParameter"]], "cls_to_become (torch.nn.parameter.uninitializedparameter attribute)": [[1720, "torch.nn.parameter.UninitializedParameter.cls_to_become"]], "clip_grad_norm() (in module torch.nn.utils)": [[1721, "torch.nn.utils.clip_grad_norm"]], "clip_grad_norm_() (in module torch.nn.utils)": [[1722, "torch.nn.utils.clip_grad_norm_"]], "clip_grad_value_() (in module torch.nn.utils)": [[1723, "torch.nn.utils.clip_grad_value_"]], "convert_conv2d_weight_memory_format() (in module torch.nn.utils)": [[1724, "torch.nn.utils.convert_conv2d_weight_memory_format"]], "convert_conv3d_weight_memory_format() (in module torch.nn.utils)": [[1725, "torch.nn.utils.convert_conv3d_weight_memory_format"]], "fuse_conv_bn_eval() (in module torch.nn.utils)": [[1726, "torch.nn.utils.fuse_conv_bn_eval"]], "fuse_conv_bn_weights() (in module torch.nn.utils)": [[1727, "torch.nn.utils.fuse_conv_bn_weights"]], "fuse_linear_bn_eval() (in module torch.nn.utils)": [[1728, "torch.nn.utils.fuse_linear_bn_eval"]], "fuse_linear_bn_weights() (in module torch.nn.utils)": [[1729, "torch.nn.utils.fuse_linear_bn_weights"]], "parameters_to_vector() (in module torch.nn.utils)": [[1730, "torch.nn.utils.parameters_to_vector"]], "orthogonal() (in module torch.nn.utils.parametrizations)": [[1731, "torch.nn.utils.parametrizations.orthogonal"]], "spectral_norm() (in module torch.nn.utils.parametrizations)": [[1732, "torch.nn.utils.parametrizations.spectral_norm"]], "weight_norm() (in module torch.nn.utils.parametrizations)": [[1733, "torch.nn.utils.parametrizations.weight_norm"]], "parametrizationlist (class in torch.nn.utils.parametrize)": [[1734, "torch.nn.utils.parametrize.ParametrizationList"]], "right_inverse() (torch.nn.utils.parametrize.parametrizationlist method)": [[1734, "torch.nn.utils.parametrize.ParametrizationList.right_inverse"]], "cached() (in module torch.nn.utils.parametrize)": [[1735, "torch.nn.utils.parametrize.cached"]], "is_parametrized() (in module torch.nn.utils.parametrize)": [[1736, "torch.nn.utils.parametrize.is_parametrized"]], "register_parametrization() (in module torch.nn.utils.parametrize)": [[1737, "torch.nn.utils.parametrize.register_parametrization"]], "remove_parametrizations() (in module torch.nn.utils.parametrize)": [[1738, "torch.nn.utils.parametrize.remove_parametrizations"]], "basepruningmethod (class in torch.nn.utils.prune)": [[1739, "torch.nn.utils.prune.BasePruningMethod"]], "apply() (torch.nn.utils.prune.basepruningmethod class method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.apply"]], "apply_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.apply_mask"]], "compute_mask() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.compute_mask"]], "prune() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.prune"]], "remove() (torch.nn.utils.prune.basepruningmethod method)": [[1739, "torch.nn.utils.prune.BasePruningMethod.remove"]], "customfrommask (class in torch.nn.utils.prune)": [[1740, "torch.nn.utils.prune.CustomFromMask"]], "apply() (torch.nn.utils.prune.customfrommask class method)": [[1740, "torch.nn.utils.prune.CustomFromMask.apply"]], "apply_mask() (torch.nn.utils.prune.customfrommask method)": [[1740, "torch.nn.utils.prune.CustomFromMask.apply_mask"]], "prune() (torch.nn.utils.prune.customfrommask method)": [[1740, "torch.nn.utils.prune.CustomFromMask.prune"]], "remove() (torch.nn.utils.prune.customfrommask method)": [[1740, "torch.nn.utils.prune.CustomFromMask.remove"]], "identity (class in torch.nn.utils.prune)": [[1741, "torch.nn.utils.prune.Identity"]], "apply() (torch.nn.utils.prune.identity class method)": [[1741, "torch.nn.utils.prune.Identity.apply"]], "apply_mask() (torch.nn.utils.prune.identity method)": [[1741, "torch.nn.utils.prune.Identity.apply_mask"]], "prune() (torch.nn.utils.prune.identity method)": [[1741, "torch.nn.utils.prune.Identity.prune"]], "remove() (torch.nn.utils.prune.identity method)": [[1741, "torch.nn.utils.prune.Identity.remove"]], "l1unstructured (class in torch.nn.utils.prune)": [[1742, "torch.nn.utils.prune.L1Unstructured"]], "apply() (torch.nn.utils.prune.l1unstructured class method)": [[1742, "torch.nn.utils.prune.L1Unstructured.apply"]], "apply_mask() (torch.nn.utils.prune.l1unstructured method)": [[1742, "torch.nn.utils.prune.L1Unstructured.apply_mask"]], "prune() (torch.nn.utils.prune.l1unstructured method)": [[1742, "torch.nn.utils.prune.L1Unstructured.prune"]], "remove() (torch.nn.utils.prune.l1unstructured method)": [[1742, "torch.nn.utils.prune.L1Unstructured.remove"]], "lnstructured (class in torch.nn.utils.prune)": [[1743, "torch.nn.utils.prune.LnStructured"]], "apply() (torch.nn.utils.prune.lnstructured class method)": [[1743, "torch.nn.utils.prune.LnStructured.apply"]], "apply_mask() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.compute_mask"]], "prune() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.prune"]], "remove() (torch.nn.utils.prune.lnstructured method)": [[1743, "torch.nn.utils.prune.LnStructured.remove"]], "pruningcontainer (class in torch.nn.utils.prune)": [[1744, "torch.nn.utils.prune.PruningContainer"]], "add_pruning_method() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.add_pruning_method"]], "apply() (torch.nn.utils.prune.pruningcontainer class method)": [[1744, "torch.nn.utils.prune.PruningContainer.apply"]], "apply_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.apply_mask"]], "compute_mask() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.compute_mask"]], "prune() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.prune"]], "remove() (torch.nn.utils.prune.pruningcontainer method)": [[1744, "torch.nn.utils.prune.PruningContainer.remove"]], "randomstructured (class in torch.nn.utils.prune)": [[1745, "torch.nn.utils.prune.RandomStructured"]], "apply() (torch.nn.utils.prune.randomstructured class method)": [[1745, "torch.nn.utils.prune.RandomStructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.apply_mask"]], "compute_mask() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.compute_mask"]], "prune() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.prune"]], "remove() (torch.nn.utils.prune.randomstructured method)": [[1745, "torch.nn.utils.prune.RandomStructured.remove"]], "randomunstructured (class in torch.nn.utils.prune)": [[1746, "torch.nn.utils.prune.RandomUnstructured"]], "apply() (torch.nn.utils.prune.randomunstructured class method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.apply"]], "apply_mask() (torch.nn.utils.prune.randomunstructured method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.apply_mask"]], "prune() (torch.nn.utils.prune.randomunstructured method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.prune"]], "remove() (torch.nn.utils.prune.randomunstructured method)": [[1746, "torch.nn.utils.prune.RandomUnstructured.remove"]], "custom_from_mask() (in module torch.nn.utils.prune)": [[1747, "torch.nn.utils.prune.custom_from_mask"]], "global_unstructured() (in module torch.nn.utils.prune)": [[1748, "torch.nn.utils.prune.global_unstructured"]], "identity() (in module torch.nn.utils.prune)": [[1749, "torch.nn.utils.prune.identity"]], "is_pruned() (in module torch.nn.utils.prune)": [[1750, "torch.nn.utils.prune.is_pruned"]], "l1_unstructured() (in module torch.nn.utils.prune)": [[1751, "torch.nn.utils.prune.l1_unstructured"]], "ln_structured() (in module torch.nn.utils.prune)": [[1752, "torch.nn.utils.prune.ln_structured"]], "random_structured() (in module torch.nn.utils.prune)": [[1753, "torch.nn.utils.prune.random_structured"]], "random_unstructured() (in module torch.nn.utils.prune)": [[1754, "torch.nn.utils.prune.random_unstructured"]], "remove() (in module torch.nn.utils.prune)": [[1755, "torch.nn.utils.prune.remove"]], "remove_spectral_norm() (in module torch.nn.utils)": [[1756, "torch.nn.utils.remove_spectral_norm"]], "remove_weight_norm() (in module torch.nn.utils)": [[1757, "torch.nn.utils.remove_weight_norm"]], "packedsequence (class in torch.nn.utils.rnn)": [[1758, "torch.nn.utils.rnn.PackedSequence"]], "batch_sizes (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.batch_sizes"]], "count() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.count"]], "data (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.data"]], "index() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.index"]], "is_cuda (torch.nn.utils.rnn.packedsequence property)": [[1758, "torch.nn.utils.rnn.PackedSequence.is_cuda"]], "is_pinned() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.is_pinned"]], "sorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.sorted_indices"]], "to() (torch.nn.utils.rnn.packedsequence method)": [[1758, "torch.nn.utils.rnn.PackedSequence.to"]], "unsorted_indices (torch.nn.utils.rnn.packedsequence attribute)": [[1758, "torch.nn.utils.rnn.PackedSequence.unsorted_indices"]], "pack_padded_sequence() (in module torch.nn.utils.rnn)": [[1759, "torch.nn.utils.rnn.pack_padded_sequence"]], "pack_sequence() (in module torch.nn.utils.rnn)": [[1760, "torch.nn.utils.rnn.pack_sequence"]], "pad_packed_sequence() (in module torch.nn.utils.rnn)": [[1761, "torch.nn.utils.rnn.pad_packed_sequence"]], "pad_sequence() (in module torch.nn.utils.rnn)": [[1762, "torch.nn.utils.rnn.pad_sequence"]], "unpack_sequence() (in module torch.nn.utils.rnn)": [[1763, "torch.nn.utils.rnn.unpack_sequence"]], "unpad_sequence() (in module torch.nn.utils.rnn)": [[1764, "torch.nn.utils.rnn.unpad_sequence"]], "skip_init() (in module torch.nn.utils)": [[1765, "torch.nn.utils.skip_init"]], "spectral_norm() (in module torch.nn.utils)": [[1766, "torch.nn.utils.spectral_norm"]], "functional_call() (in module torch.nn.utils.stateless)": [[1767, "torch.nn.utils.stateless.functional_call"]], "vector_to_parameters() (in module torch.nn.utils)": [[1768, "torch.nn.utils.vector_to_parameters"]], "weight_norm() (in module torch.nn.utils)": [[1769, "torch.nn.utils.weight_norm"]], "no_grad (class in torch)": [[1770, "torch.no_grad"]], "nonzero() (in module torch)": [[1771, "torch.nonzero"]], "norm() (in module torch)": [[1772, "torch.norm"]], "normal() (in module torch)": [[1773, "torch.normal"]], "not_equal() (in module torch)": [[1774, "torch.not_equal"]], "numel() (in module torch)": [[1775, "torch.numel"]], "ones() (in module torch)": [[1776, "torch.ones"]], "ones_like() (in module torch)": [[1777, "torch.ones_like"]], "jitscalartype (class in torch.onnx)": [[1778, "torch.onnx.JitScalarType"]], "dtype() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.dtype"]], "from_dtype() (torch.onnx.jitscalartype class method)": [[1778, "torch.onnx.JitScalarType.from_dtype"]], "from_onnx_type() (torch.onnx.jitscalartype class method)": [[1778, "torch.onnx.JitScalarType.from_onnx_type"]], "from_value() (torch.onnx.jitscalartype class method)": [[1778, "torch.onnx.JitScalarType.from_value"]], "onnx_compatible() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.onnx_compatible"]], "onnx_type() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.onnx_type"]], "scalar_name() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.scalar_name"]], "torch_name() (torch.onnx.jitscalartype method)": [[1778, "torch.onnx.JitScalarType.torch_name"]], "graphinfo (class in torch.onnx.verification)": [[1779, "torch.onnx.verification.GraphInfo"]], "all_mismatch_leaf_graph_info() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.all_mismatch_leaf_graph_info"]], "clear() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.clear"]], "essential_node_count() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.essential_node_count"]], "essential_node_kinds() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.essential_node_kinds"]], "export_repro() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.export_repro"]], "find_mismatch() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.find_mismatch"]], "find_partition() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.find_partition"]], "has_mismatch() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.has_mismatch"]], "pretty_print_mismatch() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.pretty_print_mismatch"]], "pretty_print_tree() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.pretty_print_tree"]], "verify_export() (torch.onnx.verification.graphinfo method)": [[1779, "torch.onnx.verification.GraphInfo.verify_export"]], "verificationoptions (class in torch.onnx.verification)": [[1780, "torch.onnx.verification.VerificationOptions"]], "asgd (class in torch.optim)": [[1781, "torch.optim.ASGD"]], "add_param_group() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.add_param_group"]], "load_state_dict() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.register_step_pre_hook"]], "state_dict() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.state_dict"]], "step() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.step"]], "zero_grad() (torch.optim.asgd method)": [[1781, "torch.optim.ASGD.zero_grad"]], "adadelta (class in torch.optim)": [[1782, "torch.optim.Adadelta"]], "add_param_group() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.add_param_group"]], "load_state_dict() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.register_step_pre_hook"]], "state_dict() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.state_dict"]], "step() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.step"]], "zero_grad() (torch.optim.adadelta method)": [[1782, "torch.optim.Adadelta.zero_grad"]], "adagrad (class in torch.optim)": [[1783, "torch.optim.Adagrad"]], "add_param_group() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.add_param_group"]], "load_state_dict() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.register_step_pre_hook"]], "state_dict() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.state_dict"]], "step() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.step"]], "zero_grad() (torch.optim.adagrad method)": [[1783, "torch.optim.Adagrad.zero_grad"]], "adam (class in torch.optim)": [[1784, "torch.optim.Adam"]], "add_param_group() (torch.optim.adam method)": [[1784, "torch.optim.Adam.add_param_group"]], "load_state_dict() (torch.optim.adam method)": [[1784, "torch.optim.Adam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adam method)": [[1784, "torch.optim.Adam.register_step_pre_hook"]], "state_dict() (torch.optim.adam method)": [[1784, "torch.optim.Adam.state_dict"]], "step() (torch.optim.adam method)": [[1784, "torch.optim.Adam.step"]], "zero_grad() (torch.optim.adam method)": [[1784, "torch.optim.Adam.zero_grad"]], "adamw (class in torch.optim)": [[1785, "torch.optim.AdamW"]], "add_param_group() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.add_param_group"]], "load_state_dict() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.register_step_pre_hook"]], "state_dict() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.state_dict"]], "step() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.step"]], "zero_grad() (torch.optim.adamw method)": [[1785, "torch.optim.AdamW.zero_grad"]], "adamax (class in torch.optim)": [[1786, "torch.optim.Adamax"]], "add_param_group() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.add_param_group"]], "load_state_dict() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.register_step_pre_hook"]], "state_dict() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.state_dict"]], "step() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.step"]], "zero_grad() (torch.optim.adamax method)": [[1786, "torch.optim.Adamax.zero_grad"]], "lbfgs (class in torch.optim)": [[1787, "torch.optim.LBFGS"]], "add_param_group() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.add_param_group"]], "load_state_dict() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.register_step_pre_hook"]], "state_dict() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.state_dict"]], "step() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.step"]], "zero_grad() (torch.optim.lbfgs method)": [[1787, "torch.optim.LBFGS.zero_grad"]], "nadam (class in torch.optim)": [[1788, "torch.optim.NAdam"]], "add_param_group() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.add_param_group"]], "load_state_dict() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.register_step_pre_hook"]], "state_dict() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.state_dict"]], "step() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.step"]], "zero_grad() (torch.optim.nadam method)": [[1788, "torch.optim.NAdam.zero_grad"]], "add_param_group() (torch.optim.optimizer method)": [[1789, "torch.optim.Optimizer.add_param_group"]], "load_state_dict() (torch.optim.optimizer method)": [[1790, "torch.optim.Optimizer.load_state_dict"]], "state_dict() (torch.optim.optimizer method)": [[1791, "torch.optim.Optimizer.state_dict"]], "step() (torch.optim.optimizer method)": [[1792, "torch.optim.Optimizer.step"]], "zero_grad() (torch.optim.optimizer method)": [[1793, "torch.optim.Optimizer.zero_grad"]], "radam (class in torch.optim)": [[1794, "torch.optim.RAdam"]], "add_param_group() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.add_param_group"]], "load_state_dict() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.register_step_pre_hook"]], "state_dict() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.state_dict"]], "step() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.step"]], "zero_grad() (torch.optim.radam method)": [[1794, "torch.optim.RAdam.zero_grad"]], "rmsprop (class in torch.optim)": [[1795, "torch.optim.RMSprop"]], "add_param_group() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.add_param_group"]], "load_state_dict() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.register_step_pre_hook"]], "state_dict() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.state_dict"]], "step() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.step"]], "zero_grad() (torch.optim.rmsprop method)": [[1795, "torch.optim.RMSprop.zero_grad"]], "rprop (class in torch.optim)": [[1796, "torch.optim.Rprop"]], "add_param_group() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.add_param_group"]], "load_state_dict() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.register_step_pre_hook"]], "state_dict() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.state_dict"]], "step() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.step"]], "zero_grad() (torch.optim.rprop method)": [[1796, "torch.optim.Rprop.zero_grad"]], "sgd (class in torch.optim)": [[1797, "torch.optim.SGD"]], "add_param_group() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.add_param_group"]], "load_state_dict() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.register_step_pre_hook"]], "state_dict() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.state_dict"]], "step() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.step"]], "zero_grad() (torch.optim.sgd method)": [[1797, "torch.optim.SGD.zero_grad"]], "sparseadam (class in torch.optim)": [[1798, "torch.optim.SparseAdam"]], "add_param_group() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.add_param_group"]], "load_state_dict() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.load_state_dict"]], "register_load_state_dict_post_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_load_state_dict_post_hook"]], "register_load_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_load_state_dict_pre_hook"]], "register_state_dict_post_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_state_dict_post_hook"]], "register_state_dict_pre_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_state_dict_pre_hook"]], "register_step_post_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_step_post_hook"]], "register_step_pre_hook() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.register_step_pre_hook"]], "state_dict() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.state_dict"]], "step() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.step"]], "zero_grad() (torch.optim.sparseadam method)": [[1798, "torch.optim.SparseAdam.zero_grad"]], "chainedscheduler (class in torch.optim.lr_scheduler)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler"]], "get_last_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.print_lr"]], "state_dict() (torch.optim.lr_scheduler.chainedscheduler method)": [[1799, "torch.optim.lr_scheduler.ChainedScheduler.state_dict"]], "constantlr (class in torch.optim.lr_scheduler)": [[1800, "torch.optim.lr_scheduler.ConstantLR"]], "get_last_lr() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.constantlr method)": [[1800, "torch.optim.lr_scheduler.ConstantLR.state_dict"]], "cosineannealinglr (class in torch.optim.lr_scheduler)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealinglr method)": [[1801, "torch.optim.lr_scheduler.CosineAnnealingLR.state_dict"]], "cosineannealingwarmrestarts (class in torch.optim.lr_scheduler)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts"]], "get_last_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.print_lr"]], "state_dict() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.state_dict"]], "step() (torch.optim.lr_scheduler.cosineannealingwarmrestarts method)": [[1802, "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts.step"]], "cycliclr (class in torch.optim.lr_scheduler)": [[1803, "torch.optim.lr_scheduler.CyclicLR"]], "get_last_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1803, "torch.optim.lr_scheduler.CyclicLR.get_last_lr"]], "get_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1803, "torch.optim.lr_scheduler.CyclicLR.get_lr"]], "print_lr() (torch.optim.lr_scheduler.cycliclr method)": [[1803, "torch.optim.lr_scheduler.CyclicLR.print_lr"]], "exponentiallr (class in torch.optim.lr_scheduler)": [[1804, "torch.optim.lr_scheduler.ExponentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.exponentiallr method)": [[1804, "torch.optim.lr_scheduler.ExponentialLR.state_dict"]], "lambdalr (class in torch.optim.lr_scheduler)": [[1805, "torch.optim.lr_scheduler.LambdaLR"]], "get_last_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.lambdalr method)": [[1805, "torch.optim.lr_scheduler.LambdaLR.state_dict"]], "linearlr (class in torch.optim.lr_scheduler)": [[1806, "torch.optim.lr_scheduler.LinearLR"]], "get_last_lr() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.linearlr method)": [[1806, "torch.optim.lr_scheduler.LinearLR.state_dict"]], "multisteplr (class in torch.optim.lr_scheduler)": [[1807, "torch.optim.lr_scheduler.MultiStepLR"]], "get_last_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multisteplr method)": [[1807, "torch.optim.lr_scheduler.MultiStepLR.state_dict"]], "multiplicativelr (class in torch.optim.lr_scheduler)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR"]], "get_last_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.multiplicativelr method)": [[1808, "torch.optim.lr_scheduler.MultiplicativeLR.state_dict"]], "onecyclelr (class in torch.optim.lr_scheduler)": [[1809, "torch.optim.lr_scheduler.OneCycleLR"]], "get_last_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.onecyclelr method)": [[1809, "torch.optim.lr_scheduler.OneCycleLR.state_dict"]], "polynomiallr (class in torch.optim.lr_scheduler)": [[1810, "torch.optim.lr_scheduler.PolynomialLR"]], "get_last_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.polynomiallr method)": [[1810, "torch.optim.lr_scheduler.PolynomialLR.state_dict"]], "reducelronplateau (class in torch.optim.lr_scheduler)": [[1811, "torch.optim.lr_scheduler.ReduceLROnPlateau"]], "get_last_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1811, "torch.optim.lr_scheduler.ReduceLROnPlateau.get_last_lr"]], "print_lr() (torch.optim.lr_scheduler.reducelronplateau method)": [[1811, "torch.optim.lr_scheduler.ReduceLROnPlateau.print_lr"]], "sequentiallr (class in torch.optim.lr_scheduler)": [[1812, "torch.optim.lr_scheduler.SequentialLR"]], "get_last_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.sequentiallr method)": [[1812, "torch.optim.lr_scheduler.SequentialLR.state_dict"]], "steplr (class in torch.optim.lr_scheduler)": [[1813, "torch.optim.lr_scheduler.StepLR"]], "get_last_lr() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.get_last_lr"]], "load_state_dict() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.load_state_dict"]], "print_lr() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.print_lr"]], "state_dict() (torch.optim.lr_scheduler.steplr method)": [[1813, "torch.optim.lr_scheduler.StepLR.state_dict"]], "orgqr() (in module torch)": [[1814, "torch.orgqr"]], "ormqr() (in module torch)": [[1815, "torch.ormqr"]], "outer() (in module torch)": [[1816, "torch.outer"]], "pca_lowrank() (in module torch)": [[1817, "torch.pca_lowrank"]], "permute() (in module torch)": [[1818, "torch.permute"]], "pinverse() (in module torch)": [[1819, "torch.pinverse"]], "poisson() (in module torch)": [[1820, "torch.poisson"]], "polar() (in module torch)": [[1821, "torch.polar"]], "polygamma() (in module torch)": [[1822, "torch.polygamma"]], "positive() (in module torch)": [[1823, "torch.positive"]], "pow() (in module torch)": [[1824, "torch.pow"]], "prod() (in module torch)": [[1825, "torch.prod"]], "promote_types() (in module torch)": [[1826, "torch.promote_types"]], "qr() (in module torch)": [[1827, "torch.qr"]], "quantile() (in module torch)": [[1828, "torch.quantile"]], "quantize_per_channel() (in module torch)": [[1829, "torch.quantize_per_channel"]], "quantize_per_tensor() (in module torch)": [[1830, "torch.quantize_per_tensor"]], "quantized_batch_norm() (in module torch)": [[1831, "torch.quantized_batch_norm"]], "quantized_max_pool1d() (in module torch)": [[1832, "torch.quantized_max_pool1d"]], "quantized_max_pool2d() (in module torch)": [[1833, "torch.quantized_max_pool2d"]], "sobolengine (class in torch.quasirandom)": [[1834, "torch.quasirandom.SobolEngine"]], "draw() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.draw"]], "draw_base2() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.draw_base2"]], "fast_forward() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.fast_forward"]], "reset() (torch.quasirandom.sobolengine method)": [[1834, "torch.quasirandom.SobolEngine.reset"]], "rad2deg() (in module torch)": [[1835, "torch.rad2deg"]], "rand() (in module torch)": [[1836, "torch.rand"]], "rand_like() (in module torch)": [[1837, "torch.rand_like"]], "randint() (in module torch)": [[1838, "torch.randint"]], "randint_like() (in module torch)": [[1839, "torch.randint_like"]], "randn() (in module torch)": [[1840, "torch.randn"]], "randn_like() (in module torch)": [[1841, "torch.randn_like"]], "randperm() (in module torch)": [[1842, "torch.randperm"]], "range() (in module torch)": [[1843, "torch.range"]], "ravel() (in module torch)": [[1844, "torch.ravel"]], "real() (in module torch)": [[1845, "torch.real"]], "reciprocal() (in module torch)": [[1846, "torch.reciprocal"]], "remainder() (in module torch)": [[1847, "torch.remainder"]], "renorm() (in module torch)": [[1848, "torch.renorm"]], "repeat_interleave() (in module torch)": [[1849, "torch.repeat_interleave"]], "reshape() (in module torch)": [[1850, "torch.reshape"]], "resolve_conj() (in module torch)": [[1851, "torch.resolve_conj"]], "resolve_neg() (in module torch)": [[1852, "torch.resolve_neg"]], "result_type() (in module torch)": [[1853, "torch.result_type"]], "roll() (in module torch)": [[1854, "torch.roll"]], "rot90() (in module torch)": [[1855, "torch.rot90"]], "round() (in module torch)": [[1856, "torch.round"]], "row_stack() (in module torch)": [[1857, "torch.row_stack"]], "rsqrt() (in module torch)": [[1858, "torch.rsqrt"]], "save() (in module torch)": [[1859, "torch.save"]], "scatter() (in module torch)": [[1860, "torch.scatter"]], "scatter_add() (in module torch)": [[1861, "torch.scatter_add"]], "scatter_reduce() (in module torch)": [[1862, "torch.scatter_reduce"]], "searchsorted() (in module torch)": [[1863, "torch.searchsorted"]], "seed() (in module torch)": [[1864, "torch.seed"]], "select() (in module torch)": [[1865, "torch.select"]], "select_scatter() (in module torch)": [[1866, "torch.select_scatter"]], "set_default_device() (in module torch)": [[1867, "torch.set_default_device"]], "set_default_dtype() (in module torch)": [[1868, "torch.set_default_dtype"]], "set_default_tensor_type() (in module torch)": [[1869, "torch.set_default_tensor_type"]], "set_deterministic_debug_mode() (in module torch)": [[1870, "torch.set_deterministic_debug_mode"]], "set_float32_matmul_precision() (in module torch)": [[1871, "torch.set_float32_matmul_precision"]], "set_flush_denormal() (in module torch)": [[1872, "torch.set_flush_denormal"]], "set_num_interop_threads() (in module torch)": [[1873, "torch.set_num_interop_threads"]], "set_num_threads() (in module torch)": [[1874, "torch.set_num_threads"]], "set_printoptions() (in module torch)": [[1875, "torch.set_printoptions"]], "set_rng_state() (in module torch)": [[1876, "torch.set_rng_state"]], "set_warn_always() (in module torch)": [[1877, "torch.set_warn_always"]], "sgn() (in module torch)": [[1878, "torch.sgn"]], "sigmoid() (in module torch)": [[1879, "torch.sigmoid"]], "sign() (in module torch)": [[1880, "torch.sign"]], "bartlett() (in module torch.signal.windows)": [[1881, "torch.signal.windows.bartlett"]], "blackman() (in module torch.signal.windows)": [[1882, "torch.signal.windows.blackman"]], "cosine() (in module torch.signal.windows)": [[1883, "torch.signal.windows.cosine"]], "exponential() (in module torch.signal.windows)": [[1884, "torch.signal.windows.exponential"]], "gaussian() (in module torch.signal.windows)": [[1885, "torch.signal.windows.gaussian"]], "general_cosine() (in module torch.signal.windows)": [[1886, "torch.signal.windows.general_cosine"]], "general_hamming() (in module torch.signal.windows)": [[1887, "torch.signal.windows.general_hamming"]], "hamming() (in module torch.signal.windows)": [[1888, "torch.signal.windows.hamming"]], "hann() (in module torch.signal.windows)": [[1889, "torch.signal.windows.hann"]], "kaiser() (in module torch.signal.windows)": [[1890, "torch.signal.windows.kaiser"]], "nuttall() (in module torch.signal.windows)": [[1891, "torch.signal.windows.nuttall"]], "signbit() (in module torch)": [[1892, "torch.signbit"]], "sin() (in module torch)": [[1893, "torch.sin"]], "sinc() (in module torch)": [[1894, "torch.sinc"]], "sinh() (in module torch)": [[1895, "torch.sinh"]], "slice_scatter() (in module torch)": [[1896, "torch.slice_scatter"]], "slogdet() (in module torch)": [[1897, "torch.slogdet"]], "smm() (in module torch)": [[1898, "torch.smm"]], "softmax() (in module torch)": [[1899, "torch.softmax"]], "sort() (in module torch)": [[1900, "torch.sort"]], "addmm() (in module torch.sparse)": [[1901, "torch.sparse.addmm"]], "as_sparse_gradcheck() (in module torch.sparse)": [[1902, "torch.sparse.as_sparse_gradcheck"]], "check_sparse_tensor_invariants (class in torch.sparse)": [[1903, "torch.sparse.check_sparse_tensor_invariants"]], "disable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1903, "torch.sparse.check_sparse_tensor_invariants.disable"]], "enable() (torch.sparse.check_sparse_tensor_invariants static method)": [[1903, "torch.sparse.check_sparse_tensor_invariants.enable"]], "is_enabled() (torch.sparse.check_sparse_tensor_invariants static method)": [[1903, "torch.sparse.check_sparse_tensor_invariants.is_enabled"]], "log_softmax() (in module torch.sparse)": [[1904, "torch.sparse.log_softmax"]], "mm() (in module torch.sparse)": [[1905, "torch.sparse.mm"]], "sampled_addmm() (in module torch.sparse)": [[1906, "torch.sparse.sampled_addmm"]], "softmax() (in module torch.sparse)": [[1907, "torch.sparse.softmax"]], "spdiags() (in module torch.sparse)": [[1908, "torch.sparse.spdiags"]], "sum() (in module torch.sparse)": [[1909, "torch.sparse.sum"]], "sparse_bsc_tensor() (in module torch)": [[1910, "torch.sparse_bsc_tensor"]], "sparse_bsr_tensor() (in module torch)": [[1911, "torch.sparse_bsr_tensor"]], "sparse_compressed_tensor() (in module torch)": [[1912, "torch.sparse_compressed_tensor"]], "sparse_coo_tensor() (in module torch)": [[1913, "torch.sparse_coo_tensor"]], "sparse_csc_tensor() (in module torch)": [[1914, "torch.sparse_csc_tensor"]], "sparse_csr_tensor() (in module torch)": [[1915, "torch.sparse_csr_tensor"]], "split() (in module torch)": [[1916, "torch.split"]], "sqrt() (in module torch)": [[1917, "torch.sqrt"]], "square() (in module torch)": [[1918, "torch.square"]], "squeeze() (in module torch)": [[1919, "torch.squeeze"]], "sspaddmm() (in module torch)": [[1920, "torch.sspaddmm"]], "stack() (in module torch)": [[1921, "torch.stack"]], "std() (in module torch)": [[1922, "torch.std"]], "std_mean() (in module torch)": [[1923, "torch.std_mean"]], "stft() (in module torch)": [[1924, "torch.stft"]], "sub() (in module torch)": [[1925, "torch.sub"]], "subtract() (in module torch)": [[1926, "torch.subtract"]], "sum() (in module torch)": [[1927, "torch.sum"]], "svd() (in module torch)": [[1928, "torch.svd"]], "svd_lowrank() (in module torch)": [[1929, "torch.svd_lowrank"]], "swapaxes() (in module torch)": [[1930, "torch.swapaxes"]], "swapdims() (in module torch)": [[1931, "torch.swapdims"]], "sym_float() (in module torch)": [[1932, "torch.sym_float"]], "sym_int() (in module torch)": [[1933, "torch.sym_int"]], "sym_ite() (in module torch)": [[1934, "torch.sym_ite"]], "sym_max() (in module torch)": [[1935, "torch.sym_max"]], "sym_min() (in module torch)": [[1936, "torch.sym_min"]], "sym_not() (in module torch)": [[1937, "torch.sym_not"]], "t() (in module torch)": [[1938, "torch.t"]], "take() (in module torch)": [[1939, "torch.take"]], "take_along_dim() (in module torch)": [[1940, "torch.take_along_dim"]], "tan() (in module torch)": [[1941, "torch.tan"]], "tanh() (in module torch)": [[1942, "torch.tanh"]], "tensor() (in module torch)": [[1943, "torch.tensor"]], "tensor_split() (in module torch)": [[1944, "torch.tensor_split"]], "tensordot() (in module torch)": [[1945, "torch.tensordot"]], "tile() (in module torch)": [[1946, "torch.tile"]], "topk() (in module torch)": [[1947, "torch.topk"]], "trace() (in module torch)": [[1948, "torch.trace"]], "transpose() (in module torch)": [[1949, "torch.transpose"]], "trapezoid() (in module torch)": [[1950, "torch.trapezoid"]], "trapz() (in module torch)": [[1951, "torch.trapz"]], "triangular_solve() (in module torch)": [[1952, "torch.triangular_solve"]], "tril() (in module torch)": [[1953, "torch.tril"]], "tril_indices() (in module torch)": [[1954, "torch.tril_indices"]], "triu() (in module torch)": [[1955, "torch.triu"]], "triu_indices() (in module torch)": [[1956, "torch.triu_indices"]], "true_divide() (in module torch)": [[1957, "torch.true_divide"]], "trunc() (in module torch)": [[1958, "torch.trunc"]], "unbind() (in module torch)": [[1959, "torch.unbind"]], "unflatten() (in module torch)": [[1960, "torch.unflatten"]], "unique() (in module torch)": [[1961, "torch.unique"]], "unique_consecutive() (in module torch)": [[1962, "torch.unique_consecutive"]], "unravel_index() (in module torch)": [[1963, "torch.unravel_index"]], "unsqueeze() (in module torch)": [[1964, "torch.unsqueeze"]], "use_deterministic_algorithms() (in module torch)": [[1965, "torch.use_deterministic_algorithms"]], "generate_methods_for_privateuse1_backend() (in module torch.utils)": [[1966, "torch.utils.generate_methods_for_privateuse1_backend"]], "get_cpp_backtrace() (in module torch.utils)": [[1967, "torch.utils.get_cpp_backtrace"]], "rename_privateuse1_backend() (in module torch.utils)": [[1968, "torch.utils.rename_privateuse1_backend"]], "set_module() (in module torch.utils)": [[1969, "torch.utils.set_module"]], "swap_tensors() (in module torch.utils)": [[1970, "torch.utils.swap_tensors"]], "vander() (in module torch)": [[1971, "torch.vander"]], "var() (in module torch)": [[1972, "torch.var"]], "var_mean() (in module torch)": [[1973, "torch.var_mean"]], "vdot() (in module torch)": [[1974, "torch.vdot"]], "view_as_complex() (in module torch)": [[1975, "torch.view_as_complex"]], "view_as_real() (in module torch)": [[1976, "torch.view_as_real"]], "vmap() (in module torch)": [[1977, "torch.vmap"]], "vsplit() (in module torch)": [[1978, "torch.vsplit"]], "vstack() (in module torch)": [[1979, "torch.vstack"]], "where() (in module torch)": [[1980, "torch.where"]], "xlogy() (in module torch)": [[1981, "torch.xlogy"]], "event (class in torch.xpu)": [[1982, "torch.xpu.Event"]], "elapsed_time() (torch.xpu.event method)": [[1982, "torch.xpu.Event.elapsed_time"]], "query() (torch.xpu.event method)": [[1982, "torch.xpu.Event.query"]], "record() (torch.xpu.event method)": [[1982, "torch.xpu.Event.record"]], "synchronize() (torch.xpu.event method)": [[1982, "torch.xpu.Event.synchronize"]], "wait() (torch.xpu.event method)": [[1982, "torch.xpu.Event.wait"]], "stream (class in torch.xpu)": [[1983, "torch.xpu.Stream"]], "query() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.query"]], "record_event() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.record_event"]], "synchronize() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.synchronize"]], "wait_event() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.wait_event"]], "wait_stream() (torch.xpu.stream method)": [[1983, "torch.xpu.Stream.wait_stream"]], "streamcontext (class in torch.xpu)": [[1984, "torch.xpu.StreamContext"]], "current_device() (in module torch.xpu)": [[1985, "torch.xpu.current_device"]], "current_stream() (in module torch.xpu)": [[1986, "torch.xpu.current_stream"]], "device (class in torch.xpu)": [[1987, "torch.xpu.device"]], "device_count() (in module torch.xpu)": [[1988, "torch.xpu.device_count"]], "device_of (class in torch.xpu)": [[1989, "torch.xpu.device_of"]], "empty_cache() (in module torch.xpu)": [[1990, "torch.xpu.empty_cache"]], "get_device_capability() (in module torch.xpu)": [[1991, "torch.xpu.get_device_capability"]], "get_device_name() (in module torch.xpu)": [[1992, "torch.xpu.get_device_name"]], "get_device_properties() (in module torch.xpu)": [[1993, "torch.xpu.get_device_properties"]], "get_rng_state() (in module torch.xpu)": [[1994, "torch.xpu.get_rng_state"]], "get_rng_state_all() (in module torch.xpu)": [[1995, "torch.xpu.get_rng_state_all"]], "init() (in module torch.xpu)": [[1996, "torch.xpu.init"]], "initial_seed() (in module torch.xpu)": [[1997, "torch.xpu.initial_seed"]], "is_available() (in module torch.xpu)": [[1998, "torch.xpu.is_available"]], "is_initialized() (in module torch.xpu)": [[1999, "torch.xpu.is_initialized"]], "manual_seed() (in module torch.xpu)": [[2000, "torch.xpu.manual_seed"]], "manual_seed_all() (in module torch.xpu)": [[2001, "torch.xpu.manual_seed_all"]], "seed() (in module torch.xpu)": [[2002, "torch.xpu.seed"]], "seed_all() (in module torch.xpu)": [[2003, "torch.xpu.seed_all"]], "set_device() (in module torch.xpu)": [[2004, "torch.xpu.set_device"]], "set_rng_state() (in module torch.xpu)": [[2005, "torch.xpu.set_rng_state"]], "set_rng_state_all() (in module torch.xpu)": [[2006, "torch.xpu.set_rng_state_all"]], "set_stream() (in module torch.xpu)": [[2007, "torch.xpu.set_stream"]], "stream() (in module torch.xpu)": [[2008, "torch.xpu.stream"]], "synchronize() (in module torch.xpu)": [[2009, "torch.xpu.synchronize"]], "zeros() (in module torch)": [[2010, "torch.zeros"]], "zeros_like() (in module torch)": [[2011, "torch.zeros_like"]], "download_url_to_file() (in module torch.hub)": [[2012, "torch.hub.download_url_to_file"]], "get_dir() (in module torch.hub)": [[2012, "torch.hub.get_dir"]], "help() (in module torch.hub)": [[2012, "torch.hub.help"]], "list() (in module torch.hub)": [[2012, "torch.hub.list"]], "load() (in module torch.hub)": [[2012, "torch.hub.load"]], "load_state_dict_from_url() (in module torch.hub)": [[2012, "torch.hub.load_state_dict_from_url"]], "set_dir() (in module torch.hub)": [[2012, "torch.hub.set_dir"]], "torch.hub": [[2012, "module-torch.hub"]], "pytorch_jit": [[2014, "envvar-PYTORCH_JIT"]], "environment variable": [[2014, "envvar-PYTORCH_JIT"]], "export() (in module torch.jit)": [[2014, "torch.jit.export"]], "torch.jit": [[2014, "module-torch.jit"]], "torch.jit.annotations": [[2014, "module-torch.jit.annotations"]], "torch.jit.frontend": [[2014, "module-torch.jit.frontend"]], "torch.jit.generate_bytecode": [[2014, "module-torch.jit.generate_bytecode"]], "torch.jit.mobile": [[2014, "module-torch.jit.mobile"]], "torch.jit.quantized": [[2014, "module-torch.jit.quantized"]], "torch.jit.supported_ops": [[2015, "module-torch.jit.supported_ops"]], "is_scripting() (in module torch.jit)": [[2016, "torch.jit.is_scripting"]], "is_tracing() (in module torch.jit)": [[2016, "torch.jit.is_tracing"]], "torch.jit.unsupported_tensor_ops": [[2019, "module-torch.jit.unsupported_tensor_ops"]], "torch.utils.jit": [[2020, "module-torch.utils.jit"]], "library (class in torch.library)": [[2021, "torch.library.Library"]], "custom_op() (in module torch.library)": [[2021, "torch.library.custom_op"]], "define() (in module torch.library)": [[2021, "torch.library.define"]], "define() (torch.library.library method)": [[2021, "torch.library.Library.define"]], "fallthrough_kernel() (in module torch.library)": [[2021, "torch.library.fallthrough_kernel"]], "get_ctx() (in module torch.library)": [[2021, "torch.library.get_ctx"]], "impl() (in module torch.library)": [[2021, "torch.library.impl"]], "impl() (torch.library.library method)": [[2021, "torch.library.Library.impl"]], "impl_abstract() (in module torch.library)": [[2021, "torch.library.impl_abstract"]], "opcheck() (in module torch.library)": [[2021, "torch.library.opcheck"]], "register_autograd() (in module torch.library)": [[2021, "torch.library.register_autograd"]], "register_fake() (in module torch.library)": [[2021, "torch.library.register_fake"]], "register_kernel() (in module torch.library)": [[2021, "torch.library.register_kernel"]], "torch.library": [[2021, "module-torch.library"]], "torch.linalg": [[2022, "module-torch.linalg"]], "torch._logging": [[2023, "module-torch._logging"]], "torch.masked": [[2024, "module-torch.masked"]], "torch.masked.maskedtensor": [[2024, "module-torch.masked.maskedtensor"]], "torch.masked.maskedtensor.binary": [[2024, "module-torch.masked.maskedtensor.binary"]], "torch.masked.maskedtensor.core": [[2024, "module-torch.masked.maskedtensor.core"]], "torch.masked.maskedtensor.creation": [[2024, "module-torch.masked.maskedtensor.creation"]], "torch.masked.maskedtensor.passthrough": [[2024, "module-torch.masked.maskedtensor.passthrough"]], "torch.masked.maskedtensor.reductions": [[2024, "module-torch.masked.maskedtensor.reductions"]], "torch.masked.maskedtensor.unary": [[2024, "module-torch.masked.maskedtensor.unary"]], "optimize_for_mobile() (in module torch.utils.mobile_optimizer)": [[2027, "torch.utils.mobile_optimizer.optimize_for_mobile"]], "load_url() (in module torch.utils.model_zoo)": [[2028, "torch.utils.model_zoo.load_url"]], "torch.utils.model_zoo": [[2028, "module-torch.utils.model_zoo"]], "moduletracker (class in torch.utils.module_tracker)": [[2029, "torch.utils.module_tracker.ModuleTracker"]], "torch.utils.module_tracker": [[2029, "module-torch.utils.module_tracker"]], "aggregation (class in torch.monitor)": [[2030, "torch.monitor.Aggregation"]], "event (class in torch.monitor)": [[2030, "torch.monitor.Event"]], "eventhandlerhandle (class in torch.monitor)": [[2030, "torch.monitor.EventHandlerHandle"]], "stat (class in torch.monitor)": [[2030, "torch.monitor.Stat"]], "tensorboardeventhandler (class in torch.monitor)": [[2030, "torch.monitor.TensorboardEventHandler"]], "__init__() (torch.monitor.event method)": [[2030, "torch.monitor.Event.__init__"]], "__init__() (torch.monitor.stat method)": [[2030, "torch.monitor.Stat.__init__"]], "__init__() (torch.monitor.tensorboardeventhandler method)": [[2030, "torch.monitor.TensorboardEventHandler.__init__"]], "add() (torch.monitor.stat method)": [[2030, "torch.monitor.Stat.add"]], "count (torch.monitor.stat property)": [[2030, "torch.monitor.Stat.count"]], "data (torch.monitor.event property)": [[2030, "torch.monitor.Event.data"]], "data_value_t (class in torch.monitor)": [[2030, "torch.monitor.data_value_t"]], "get() (torch.monitor.stat method)": [[2030, "torch.monitor.Stat.get"]], "log_event() (in module torch.monitor)": [[2030, "torch.monitor.log_event"]], "name (torch.monitor.aggregation property)": [[2030, "torch.monitor.Aggregation.name"]], "name (torch.monitor.event property)": [[2030, "torch.monitor.Event.name"]], "name (torch.monitor.stat property)": [[2030, "torch.monitor.Stat.name"]], "register_event_handler() (in module torch.monitor)": [[2030, "torch.monitor.register_event_handler"]], "timestamp (torch.monitor.event property)": [[2030, "torch.monitor.Event.timestamp"]], "torch.monitor": [[2030, "module-torch.monitor"]], "unregister_event_handler() (in module torch.monitor)": [[2030, "torch.monitor.unregister_event_handler"]], "torch.mps": [[2031, "module-torch.mps"]], "torch.mps.event": [[2031, "module-torch.mps.event"]], "torch.mps.profiler": [[2031, "module-torch.mps.profiler"]], "torch.mtia": [[2032, "module-torch.mtia"]], "spawncontext (class in torch.multiprocessing)": [[2033, "torch.multiprocessing.SpawnContext"]], "get_all_sharing_strategies() (in module torch.multiprocessing)": [[2033, "torch.multiprocessing.get_all_sharing_strategies"]], "get_sharing_strategy() (in module torch.multiprocessing)": [[2033, "torch.multiprocessing.get_sharing_strategy"]], "join() (torch.multiprocessing.spawncontext method)": [[2033, "torch.multiprocessing.SpawnContext.join"]], "set_sharing_strategy() (in module torch.multiprocessing)": [[2033, "torch.multiprocessing.set_sharing_strategy"]], "spawn() (in module torch.multiprocessing.spawn)": [[2033, "torch.multiprocessing.spawn.spawn"]], "torch.multiprocessing": [[2033, "module-torch.multiprocessing"]], "torch.multiprocessing.pool": [[2033, "module-torch.multiprocessing.pool"]], "torch.multiprocessing.queue": [[2033, "module-torch.multiprocessing.queue"]], "torch.multiprocessing.reductions": [[2033, "module-torch.multiprocessing.reductions"]], "torch.multiprocessing.spawn": [[2033, "module-torch.multiprocessing.spawn"]], "align_as() (torch.tensor method)": [[2035, "torch.Tensor.align_as"]], "align_to() (torch.tensor method)": [[2035, "torch.Tensor.align_to"]], "names (torch.tensor attribute)": [[2035, "torch.Tensor.names"]], "refine_names() (torch.tensor method)": [[2035, "torch.Tensor.refine_names"]], "rename() (torch.tensor method)": [[2035, "torch.Tensor.rename"]], "rename_() (torch.tensor method)": [[2035, "torch.Tensor.rename_"]], "as_nested_tensor() (in module torch.nested)": [[2036, "torch.nested.as_nested_tensor"]], "nested_tensor() (in module torch.nested)": [[2036, "torch.nested.nested_tensor"]], "to_padded_tensor() (in module torch.nested)": [[2036, "torch.nested.to_padded_tensor"]], "torch.nested": [[2036, "module-torch.nested"]], "torch.nn": [[2037, "module-torch.nn"]], "torch.nn.backends": [[2037, "module-torch.nn.backends"]], "torch.nn.backends.thnn": [[2037, "module-torch.nn.backends.thnn"]], "torch.nn.common_types": [[2037, "module-torch.nn.common_types"]], "torch.nn.cpp": [[2037, "module-torch.nn.cpp"]], "torch.nn.functional": [[2037, "module-torch.nn.functional"]], "torch.nn.grad": [[2037, "module-torch.nn.grad"]], "torch.nn.init": [[2037, "module-torch.nn.init"]], "torch.nn.modules": [[2037, "module-torch.nn.modules"]], "torch.nn.modules.activation": [[2037, "module-torch.nn.modules.activation"]], "torch.nn.modules.adaptive": [[2037, "module-torch.nn.modules.adaptive"]], "torch.nn.modules.batchnorm": [[2037, "module-torch.nn.modules.batchnorm"]], "torch.nn.modules.channelshuffle": [[2037, "module-torch.nn.modules.channelshuffle"]], "torch.nn.modules.container": [[2037, "module-torch.nn.modules.container"]], "torch.nn.modules.conv": [[2037, "module-torch.nn.modules.conv"]], "torch.nn.modules.distance": [[2037, "module-torch.nn.modules.distance"]], "torch.nn.modules.dropout": [[2037, "module-torch.nn.modules.dropout"]], "torch.nn.modules.flatten": [[2037, "module-torch.nn.modules.flatten"]], "torch.nn.modules.fold": [[2037, "module-torch.nn.modules.fold"]], "torch.nn.modules.instancenorm": [[2037, "module-torch.nn.modules.instancenorm"]], "torch.nn.modules.lazy": [[2037, "module-torch.nn.modules.lazy"]], "torch.nn.modules.linear": [[2037, "module-torch.nn.modules.linear"]], "torch.nn.modules.loss": [[2037, "module-torch.nn.modules.loss"]], "torch.nn.modules.module": [[2037, "module-torch.nn.modules.module"]], "torch.nn.modules.normalization": [[2037, "module-torch.nn.modules.normalization"]], "torch.nn.modules.padding": [[2037, "module-torch.nn.modules.padding"]], "torch.nn.modules.pixelshuffle": [[2037, "module-torch.nn.modules.pixelshuffle"]], "torch.nn.modules.pooling": [[2037, "module-torch.nn.modules.pooling"]], "torch.nn.modules.rnn": [[2037, "module-torch.nn.modules.rnn"]], "torch.nn.modules.sparse": [[2037, "module-torch.nn.modules.sparse"]], "torch.nn.modules.transformer": [[2037, "module-torch.nn.modules.transformer"]], "torch.nn.modules.upsampling": [[2037, "module-torch.nn.modules.upsampling"]], "torch.nn.modules.utils": [[2037, "module-torch.nn.modules.utils"]], "torch.nn.parallel": [[2037, "module-torch.nn.parallel"]], "torch.nn.parallel.comm": [[2037, "module-torch.nn.parallel.comm"]], "torch.nn.parallel.distributed": [[2037, "module-torch.nn.parallel.distributed"]], "torch.nn.parallel.parallel_apply": [[2037, "module-torch.nn.parallel.parallel_apply"]], "torch.nn.parallel.replicate": [[2037, "module-torch.nn.parallel.replicate"]], "torch.nn.parallel.scatter_gather": [[2037, "module-torch.nn.parallel.scatter_gather"]], "torch.nn.parameter": [[2037, "module-torch.nn.parameter"]], "torch.nn.utils": [[2037, "module-torch.nn.utils"]], "torch.nn.utils.clip_grad": [[2037, "module-torch.nn.utils.clip_grad"]], "torch.nn.utils.convert_parameters": [[2037, "module-torch.nn.utils.convert_parameters"]], "torch.nn.utils.fusion": [[2037, "module-torch.nn.utils.fusion"]], "torch.nn.utils.init": [[2037, "module-torch.nn.utils.init"]], "torch.nn.utils.memory_format": [[2037, "module-torch.nn.utils.memory_format"]], "torch.nn.utils.parametrizations": [[2037, "module-torch.nn.utils.parametrizations"]], "torch.nn.utils.parametrize": [[2037, "module-torch.nn.utils.parametrize"]], "torch.nn.utils.prune": [[2037, "module-torch.nn.utils.prune"]], "torch.nn.utils.rnn": [[2037, "module-torch.nn.utils.rnn"]], "torch.nn.utils.stateless": [[2037, "module-torch.nn.utils.stateless"]], "torch.nn.attention": [[2038, "module-torch.nn.attention"]], "torch.nn.attention.bias": [[2039, "module-torch.nn.attention.bias"]], "calculate_gain() (in module torch.nn.init)": [[2041, "torch.nn.init.calculate_gain"]], "constant_() (in module torch.nn.init)": [[2041, "torch.nn.init.constant_"]], "dirac_() (in module torch.nn.init)": [[2041, "torch.nn.init.dirac_"]], "eye_() (in module torch.nn.init)": [[2041, "torch.nn.init.eye_"]], "kaiming_normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.kaiming_normal_"]], "kaiming_uniform_() (in module torch.nn.init)": [[2041, "torch.nn.init.kaiming_uniform_"]], "normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.normal_"]], "ones_() (in module torch.nn.init)": [[2041, "torch.nn.init.ones_"]], "orthogonal_() (in module torch.nn.init)": [[2041, "torch.nn.init.orthogonal_"]], "sparse_() (in module torch.nn.init)": [[2041, "torch.nn.init.sparse_"]], "trunc_normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.trunc_normal_"]], "uniform_() (in module torch.nn.init)": [[2041, "torch.nn.init.uniform_"]], "xavier_normal_() (in module torch.nn.init)": [[2041, "torch.nn.init.xavier_normal_"]], "xavier_uniform_() (in module torch.nn.init)": [[2041, "torch.nn.init.xavier_uniform_"]], "zeros_() (in module torch.nn.init)": [[2041, "torch.nn.init.zeros_"]], "add_safe_globals() (in module torch.serialization)": [[2062, "torch.serialization.add_safe_globals"]], "clear_safe_globals() (in module torch.serialization)": [[2062, "torch.serialization.clear_safe_globals"]], "get_default_load_endianness() (in module torch.serialization)": [[2062, "torch.serialization.get_default_load_endianness"]], "get_default_mmap_options() (in module torch.serialization)": [[2062, "torch.serialization.get_default_mmap_options"]], "get_safe_globals() (in module torch.serialization)": [[2062, "torch.serialization.get_safe_globals"]], "register_package() (in module torch.serialization)": [[2062, "torch.serialization.register_package"]], "set_default_load_endianness() (in module torch.serialization)": [[2062, "torch.serialization.set_default_load_endianness"]], "set_default_mmap_options() (in module torch.serialization)": [[2062, "torch.serialization.set_default_mmap_options"]], "torch.onnx.errors": [[2064, "module-torch.onnx.errors"]], "torch.onnx.operators": [[2064, "module-torch.onnx.operators"]], "torch.onnx.symbolic_caffe2": [[2064, "module-torch.onnx.symbolic_caffe2"]], "torch.onnx.symbolic_helper": [[2064, "module-torch.onnx.symbolic_helper"]], "torch.onnx.symbolic_opset10": [[2064, "module-torch.onnx.symbolic_opset10"]], "torch.onnx.symbolic_opset11": [[2064, "module-torch.onnx.symbolic_opset11"]], "torch.onnx.symbolic_opset12": [[2064, "module-torch.onnx.symbolic_opset12"]], "torch.onnx.symbolic_opset13": [[2064, "module-torch.onnx.symbolic_opset13"]], "torch.onnx.symbolic_opset14": [[2064, "module-torch.onnx.symbolic_opset14"]], "torch.onnx.symbolic_opset15": [[2064, "module-torch.onnx.symbolic_opset15"]], "torch.onnx.symbolic_opset16": [[2064, "module-torch.onnx.symbolic_opset16"]], "torch.onnx.symbolic_opset17": [[2064, "module-torch.onnx.symbolic_opset17"]], "torch.onnx.symbolic_opset18": [[2064, "module-torch.onnx.symbolic_opset18"]], "torch.onnx.symbolic_opset19": [[2064, "module-torch.onnx.symbolic_opset19"]], "torch.onnx.symbolic_opset20": [[2064, "module-torch.onnx.symbolic_opset20"]], "torch.onnx.symbolic_opset7": [[2064, "module-torch.onnx.symbolic_opset7"]], "torch.onnx.symbolic_opset8": [[2064, "module-torch.onnx.symbolic_opset8"]], "torch.onnx.symbolic_opset9": [[2064, "module-torch.onnx.symbolic_opset9"]], "torch.onnx.utils": [[2064, "module-torch.onnx.utils"]], "torch.onnx.verification": [[2064, "module-torch.onnx.verification"]], "diagnosticoptions (class in torch.onnx)": [[2065, "torch.onnx.DiagnosticOptions"]], "exportoptions (class in torch.onnx)": [[2065, "torch.onnx.ExportOptions"]], "invalidexportoptionserror (class in torch.onnx)": [[2065, "torch.onnx.InvalidExportOptionsError"]], "onnxprogram (class in torch.onnx)": [[2065, "torch.onnx.ONNXProgram"]], "onnxprogramserializer (class in torch.onnx)": [[2065, "torch.onnx.ONNXProgramSerializer"]], "onnxruntimeoptions (class in torch.onnx)": [[2065, "torch.onnx.ONNXRuntimeOptions"]], "onnxexportererror (class in torch.onnx)": [[2065, "torch.onnx.OnnxExporterError"]], "onnxregistry (class in torch.onnx)": [[2065, "torch.onnx.OnnxRegistry"]], "adapt_torch_inputs_to_onnx() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.adapt_torch_inputs_to_onnx"]], "adapt_torch_outputs_to_onnx() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.adapt_torch_outputs_to_onnx"]], "diagnostic_context (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.diagnostic_context"]], "dynamo_export() (in module torch.onnx)": [[2065, "torch.onnx.dynamo_export"]], "enable_fake_mode() (in module torch.onnx)": [[2065, "torch.onnx.enable_fake_mode"]], "fake_context (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.fake_context"]], "get_op_functions() (torch.onnx.onnxregistry method)": [[2065, "torch.onnx.OnnxRegistry.get_op_functions"]], "is_registered_op() (torch.onnx.onnxregistry method)": [[2065, "torch.onnx.OnnxRegistry.is_registered_op"]], "model_proto (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.model_proto"]], "model_signature (torch.onnx.onnxprogram property)": [[2065, "torch.onnx.ONNXProgram.model_signature"]], "opset_version (torch.onnx.onnxregistry property)": [[2065, "torch.onnx.OnnxRegistry.opset_version"]], "register_op() (torch.onnx.onnxregistry method)": [[2065, "torch.onnx.OnnxRegistry.register_op"]], "save() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.save"]], "save_diagnostics() (torch.onnx.onnxprogram method)": [[2065, "torch.onnx.ONNXProgram.save_diagnostics"]], "serialize() (torch.onnx.onnxprogramserializer method)": [[2065, "torch.onnx.ONNXProgramSerializer.serialize"]], "is_onnxrt_backend_supported() (in module torch.onnx)": [[2066, "torch.onnx.is_onnxrt_backend_supported"]], "disable_log() (in module torch.onnx)": [[2067, "torch.onnx.disable_log"]], "enable_log() (in module torch.onnx)": [[2067, "torch.onnx.enable_log"]], "export() (in module torch.onnx)": [[2067, "torch.onnx.export"]], "export_to_pretty_string() (in module torch.onnx)": [[2067, "torch.onnx.export_to_pretty_string"]], "find_mismatch() (in module torch.onnx.verification)": [[2067, "torch.onnx.verification.find_mismatch"]], "is_in_onnx_export() (in module torch.onnx)": [[2067, "torch.onnx.is_in_onnx_export"]], "register_custom_op_symbolic() (in module torch.onnx)": [[2067, "torch.onnx.register_custom_op_symbolic"]], "select_model_mode_for_export() (in module torch.onnx)": [[2067, "torch.onnx.select_model_mode_for_export"]], "torch.onnx": [[2067, "module-torch.onnx"]], "unregister_custom_op_symbolic() (in module torch.onnx)": [[2067, "torch.onnx.unregister_custom_op_symbolic"]], "optimizer (class in torch.optim)": [[2069, "torch.optim.Optimizer"]], "torch.optim": [[2069, "module-torch.optim"]], "torch.optim.adadelta": [[2069, "module-torch.optim.adadelta"]], "torch.optim.adagrad": [[2069, "module-torch.optim.adagrad"]], "torch.optim.adam": [[2069, "module-torch.optim.adam"]], "torch.optim.adamax": [[2069, "module-torch.optim.adamax"]], "torch.optim.adamw": [[2069, "module-torch.optim.adamw"]], "torch.optim.asgd": [[2069, "module-torch.optim.asgd"]], "torch.optim.lbfgs": [[2069, "module-torch.optim.lbfgs"]], "torch.optim.lr_scheduler": [[2069, "module-torch.optim.lr_scheduler"]], "torch.optim.nadam": [[2069, "module-torch.optim.nadam"]], "torch.optim.optimizer": [[2069, "module-torch.optim.optimizer"]], "torch.optim.radam": [[2069, "module-torch.optim.radam"]], "torch.optim.rmsprop": [[2069, "module-torch.optim.rmsprop"]], "torch.optim.rprop": [[2069, "module-torch.optim.rprop"]], "torch.optim.sgd": [[2069, "module-torch.optim.sgd"]], "torch.optim.sparse_adam": [[2069, "module-torch.optim.sparse_adam"]], "torch.optim.swa_utils": [[2069, "module-torch.optim.swa_utils"]], "directory (class in torch.package)": [[2070, "torch.package.Directory"]], "emptymatcherror (class in torch.package)": [[2070, "torch.package.EmptyMatchError"]], "packageexporter (class in torch.package)": [[2070, "torch.package.PackageExporter"]], "packageimporter (class in torch.package)": [[2070, "torch.package.PackageImporter"]], "packagingerror (class in torch.package)": [[2070, "torch.package.PackagingError"]], "__init__() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.__init__"]], "__init__() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.__init__"]], "add_dependency() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.add_dependency"]], "all_paths() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.all_paths"]], "close() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.close"]], "denied_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.denied_modules"]], "deny() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.deny"]], "dependency_graph_string() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.dependency_graph_string"]], "extern() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.extern"]], "externed_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.externed_modules"]], "file_structure() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.file_structure"]], "get_rdeps() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.get_rdeps"]], "get_unique_id() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.get_unique_id"]], "has_file() (torch.package.directory method)": [[2070, "torch.package.Directory.has_file"]], "id() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.id"]], "import_module() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.import_module"]], "intern() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.intern"]], "interned_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.interned_modules"]], "load_binary() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.load_binary"]], "load_pickle() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.load_pickle"]], "load_text() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.load_text"]], "mock() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.mock"]], "mocked_modules() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.mocked_modules"]], "python_version() (torch.package.packageimporter method)": [[2070, "torch.package.PackageImporter.python_version"]], "register_extern_hook() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.register_extern_hook"]], "register_intern_hook() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.register_intern_hook"]], "register_mock_hook() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.register_mock_hook"]], "save_binary() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_binary"]], "save_module() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_module"]], "save_pickle() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_pickle"]], "save_source_file() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_source_file"]], "save_source_string() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_source_string"]], "save_text() (torch.package.packageexporter method)": [[2070, "torch.package.PackageExporter.save_text"]], "torch.package": [[2070, "module-torch.package"]], "torch.package.analyze": [[2070, "module-torch.package.analyze"]], "torch.package.analyze.find_first_use_of_broken_modules": [[2070, "module-torch.package.analyze.find_first_use_of_broken_modules"]], "torch.package.analyze.is_from_package": [[2070, "module-torch.package.analyze.is_from_package"]], "torch.package.analyze.trace_dependencies": [[2070, "module-torch.package.analyze.trace_dependencies"]], "torch.package.file_structure_representation": [[2070, "module-torch.package.file_structure_representation"]], "torch.package.find_file_dependencies": [[2070, "module-torch.package.find_file_dependencies"]], "torch.package.glob_group": [[2070, "module-torch.package.glob_group"]], "torch.package.importer": [[2070, "module-torch.package.importer"]], "torch.package.package_exporter": [[2070, "module-torch.package.package_exporter"]], "torch.package.package_importer": [[2070, "module-torch.package.package_importer"]], "profileraction (class in torch.profiler)": [[2071, "torch.profiler.ProfilerAction"]], "profileractivity (class in torch.profiler)": [[2071, "torch.profiler.ProfilerActivity"]], "_kinetoprofile (class in torch.profiler)": [[2071, "torch.profiler._KinetoProfile"]], "add_metadata() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.add_metadata"]], "add_metadata_json() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.add_metadata_json"]], "events() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.events"]], "export_chrome_trace() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.export_chrome_trace"]], "export_memory_timeline() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.export_memory_timeline"]], "export_stacks() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.export_stacks"]], "is_available() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.is_available"]], "key_averages() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.key_averages"]], "mark() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.mark"]], "name (torch.profiler.profileractivity property)": [[2071, "torch.profiler.ProfilerActivity.name"]], "preset_metadata_json() (torch.profiler._kinetoprofile method)": [[2071, "torch.profiler._KinetoProfile.preset_metadata_json"]], "profile (class in torch.profiler)": [[2071, "torch.profiler.profile"]], "range_pop() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.range_pop"]], "range_push() (in module torch.profiler.itt)": [[2071, "torch.profiler.itt.range_push"]], "schedule() (in module torch.profiler)": [[2071, "torch.profiler.schedule"]], "step() (torch.profiler.profile method)": [[2071, "torch.profiler.profile.step"]], "tensorboard_trace_handler() (in module torch.profiler)": [[2071, "torch.profiler.tensorboard_trace_handler"]], "torch.profiler": [[2071, "module-torch.profiler"]], "torch.profiler.itt": [[2071, "module-torch.profiler.itt"]], "torch.profiler.profiler": [[2071, "module-torch.profiler.profiler"]], "torch.profiler.python_tracer": [[2071, "module-torch.profiler.python_tracer"]], "torch.ao": [[2072, "module-torch.ao"]], "torch.ao.nn": [[2072, "module-torch.ao.nn"]], "torch.ao.nn.intrinsic.modules.fused": [[2072, "module-torch.ao.nn.intrinsic.modules.fused"]], "torch.ao.nn.intrinsic.qat.modules.conv_fused": [[2072, "module-torch.ao.nn.intrinsic.qat.modules.conv_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_fused": [[2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_fused"]], "torch.ao.nn.intrinsic.qat.modules.linear_relu": [[2072, "module-torch.ao.nn.intrinsic.qat.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.ao.nn.intrinsic.quantized.modules.bn_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.bn_relu"]], "torch.ao.nn.intrinsic.quantized.modules.conv_add": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_add"]], "torch.ao.nn.intrinsic.quantized.modules.conv_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.conv_relu"]], "torch.ao.nn.intrinsic.quantized.modules.linear_relu": [[2072, "module-torch.ao.nn.intrinsic.quantized.modules.linear_relu"]], "torch.ao.nn.qat.dynamic.modules.linear": [[2072, "module-torch.ao.nn.qat.dynamic.modules.linear"]], "torch.ao.nn.qat.modules.conv": [[2072, "module-torch.ao.nn.qat.modules.conv"]], "torch.ao.nn.qat.modules.embedding_ops": [[2072, "module-torch.ao.nn.qat.modules.embedding_ops"]], "torch.ao.nn.qat.modules.linear": [[2072, "module-torch.ao.nn.qat.modules.linear"]], "torch.ao.nn.quantizable": [[2072, "module-torch.ao.nn.quantizable"]], "torch.ao.nn.quantizable.modules": [[2072, "module-torch.ao.nn.quantizable.modules"]], "torch.ao.nn.quantizable.modules.activation": [[2072, "module-torch.ao.nn.quantizable.modules.activation"]], "torch.ao.nn.quantizable.modules.rnn": [[2072, "module-torch.ao.nn.quantizable.modules.rnn"]], "torch.ao.nn.quantized": [[2072, "module-torch.ao.nn.quantized"]], "torch.ao.nn.quantized.dynamic.modules.conv": [[2072, "module-torch.ao.nn.quantized.dynamic.modules.conv"]], "torch.ao.nn.quantized.dynamic.modules.linear": [[2072, "module-torch.ao.nn.quantized.dynamic.modules.linear"]], "torch.ao.nn.quantized.dynamic.modules.rnn": [[2072, "module-torch.ao.nn.quantized.dynamic.modules.rnn"]], "torch.ao.nn.quantized.modules.activation": [[2072, "module-torch.ao.nn.quantized.modules.activation"]], "torch.ao.nn.quantized.modules.batchnorm": [[2072, "module-torch.ao.nn.quantized.modules.batchnorm"]], "torch.ao.nn.quantized.modules.conv": [[2072, "module-torch.ao.nn.quantized.modules.conv"]], "torch.ao.nn.quantized.modules.dropout": [[2072, "module-torch.ao.nn.quantized.modules.dropout"]], "torch.ao.nn.quantized.modules.embedding_ops": [[2072, "module-torch.ao.nn.quantized.modules.embedding_ops"]], "torch.ao.nn.quantized.modules.functional_modules": [[2072, "module-torch.ao.nn.quantized.modules.functional_modules"]], "torch.ao.nn.quantized.modules.linear": [[2072, "module-torch.ao.nn.quantized.modules.linear"]], "torch.ao.nn.quantized.modules.normalization": [[2072, "module-torch.ao.nn.quantized.modules.normalization"]], "torch.ao.nn.quantized.modules.rnn": [[2072, "module-torch.ao.nn.quantized.modules.rnn"]], "torch.ao.nn.quantized.modules.utils": [[2072, "module-torch.ao.nn.quantized.modules.utils"]], "torch.ao.nn.quantized.reference": [[2072, "module-torch.ao.nn.quantized.reference"]], "torch.ao.nn.quantized.reference.modules": [[2072, "module-torch.ao.nn.quantized.reference.modules"]], "torch.ao.nn.quantized.reference.modules.conv": [[2072, "module-torch.ao.nn.quantized.reference.modules.conv"]], "torch.ao.nn.quantized.reference.modules.linear": [[2072, "module-torch.ao.nn.quantized.reference.modules.linear"]], "torch.ao.nn.quantized.reference.modules.rnn": [[2072, "module-torch.ao.nn.quantized.reference.modules.rnn"]], "torch.ao.nn.quantized.reference.modules.sparse": [[2072, "module-torch.ao.nn.quantized.reference.modules.sparse"]], "torch.ao.nn.quantized.reference.modules.utils": [[2072, "module-torch.ao.nn.quantized.reference.modules.utils"]], "torch.ao.nn.sparse": [[2072, "module-torch.ao.nn.sparse"]], "torch.ao.nn.sparse.quantized": [[2072, "module-torch.ao.nn.sparse.quantized"]], "torch.ao.nn.sparse.quantized.dynamic": [[2072, "module-torch.ao.nn.sparse.quantized.dynamic"]], "torch.ao.nn.sparse.quantized.dynamic.linear": [[2072, "module-torch.ao.nn.sparse.quantized.dynamic.linear"]], "torch.ao.nn.sparse.quantized.linear": [[2072, "module-torch.ao.nn.sparse.quantized.linear"]], "torch.ao.nn.sparse.quantized.utils": [[2072, "module-torch.ao.nn.sparse.quantized.utils"]], "torch.ao.ns": [[2072, "module-torch.ao.ns"]], "torch.ao.ns.fx": [[2072, "module-torch.ao.ns.fx"]], "torch.ao.ns.fx.graph_matcher": [[2072, "module-torch.ao.ns.fx.graph_matcher"]], "torch.ao.ns.fx.graph_passes": [[2072, "module-torch.ao.ns.fx.graph_passes"]], "torch.ao.ns.fx.mappings": [[2072, "module-torch.ao.ns.fx.mappings"]], "torch.ao.ns.fx.n_shadows_utils": [[2072, "module-torch.ao.ns.fx.n_shadows_utils"]], "torch.ao.ns.fx.ns_types": [[2072, "module-torch.ao.ns.fx.ns_types"]], "torch.ao.ns.fx.pattern_utils": [[2072, "module-torch.ao.ns.fx.pattern_utils"]], "torch.ao.ns.fx.qconfig_multi_mapping": [[2072, "module-torch.ao.ns.fx.qconfig_multi_mapping"]], "torch.ao.ns.fx.utils": [[2072, "module-torch.ao.ns.fx.utils"]], "torch.ao.ns.fx.weight_utils": [[2072, "module-torch.ao.ns.fx.weight_utils"]], "torch.ao.pruning": [[2072, "module-torch.ao.pruning"]], "torch.ao.pruning.scheduler": [[2072, "module-torch.ao.pruning.scheduler"]], "torch.ao.pruning.scheduler.base_scheduler": [[2072, "module-torch.ao.pruning.scheduler.base_scheduler"]], "torch.ao.pruning.scheduler.cubic_scheduler": [[2072, "module-torch.ao.pruning.scheduler.cubic_scheduler"]], "torch.ao.pruning.scheduler.lambda_scheduler": [[2072, "module-torch.ao.pruning.scheduler.lambda_scheduler"]], "torch.ao.pruning.sparsifier": [[2072, "module-torch.ao.pruning.sparsifier"]], "torch.ao.pruning.sparsifier.base_sparsifier": [[2072, "module-torch.ao.pruning.sparsifier.base_sparsifier"]], "torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier": [[2072, "module-torch.ao.pruning.sparsifier.nearly_diagonal_sparsifier"]], "torch.ao.pruning.sparsifier.utils": [[2072, "module-torch.ao.pruning.sparsifier.utils"]], "torch.ao.pruning.sparsifier.weight_norm_sparsifier": [[2072, "module-torch.ao.pruning.sparsifier.weight_norm_sparsifier"]], "torch.ao.quantization": [[2072, "module-torch.ao.quantization"]], "torch.ao.quantization.backend_config": [[2072, "module-torch.ao.quantization.backend_config"]], "torch.ao.quantization.backend_config.backend_config": [[2072, "module-torch.ao.quantization.backend_config.backend_config"]], "torch.ao.quantization.backend_config.executorch": [[2072, "module-torch.ao.quantization.backend_config.executorch"]], "torch.ao.quantization.backend_config.fbgemm": [[2072, "module-torch.ao.quantization.backend_config.fbgemm"]], "torch.ao.quantization.backend_config.native": [[2072, "module-torch.ao.quantization.backend_config.native"]], "torch.ao.quantization.backend_config.observation_type": [[2072, "module-torch.ao.quantization.backend_config.observation_type"]], "torch.ao.quantization.backend_config.onednn": [[2072, "module-torch.ao.quantization.backend_config.onednn"]], "torch.ao.quantization.backend_config.qnnpack": [[2072, "module-torch.ao.quantization.backend_config.qnnpack"]], "torch.ao.quantization.backend_config.tensorrt": [[2072, "module-torch.ao.quantization.backend_config.tensorrt"]], "torch.ao.quantization.backend_config.utils": [[2072, "module-torch.ao.quantization.backend_config.utils"]], "torch.ao.quantization.backend_config.x86": [[2072, "module-torch.ao.quantization.backend_config.x86"]], "torch.ao.quantization.fake_quantize": [[2072, "module-torch.ao.quantization.fake_quantize"]], "torch.ao.quantization.fuse_modules": [[2072, "module-torch.ao.quantization.fuse_modules"]], "torch.ao.quantization.fuser_method_mappings": [[2072, "module-torch.ao.quantization.fuser_method_mappings"]], "torch.ao.quantization.fx": [[2072, "module-torch.ao.quantization.fx"]], "torch.ao.quantization.fx.convert": [[2072, "module-torch.ao.quantization.fx.convert"]], "torch.ao.quantization.fx.custom_config": [[2072, "module-torch.ao.quantization.fx.custom_config"]], "torch.ao.quantization.fx.fuse": [[2072, "module-torch.ao.quantization.fx.fuse"]], "torch.ao.quantization.fx.fuse_handler": [[2072, "module-torch.ao.quantization.fx.fuse_handler"]], "torch.ao.quantization.fx.graph_module": [[2072, "module-torch.ao.quantization.fx.graph_module"]], "torch.ao.quantization.fx.lower_to_fbgemm": [[2072, "module-torch.ao.quantization.fx.lower_to_fbgemm"]], "torch.ao.quantization.fx.lower_to_qnnpack": [[2072, "module-torch.ao.quantization.fx.lower_to_qnnpack"]], "torch.ao.quantization.fx.lstm_utils": [[2072, "module-torch.ao.quantization.fx.lstm_utils"]], "torch.ao.quantization.fx.match_utils": [[2072, "module-torch.ao.quantization.fx.match_utils"]], "torch.ao.quantization.fx.pattern_utils": [[2072, "module-torch.ao.quantization.fx.pattern_utils"]], "torch.ao.quantization.fx.prepare": [[2072, "module-torch.ao.quantization.fx.prepare"]], "torch.ao.quantization.fx.qconfig_mapping_utils": [[2072, "module-torch.ao.quantization.fx.qconfig_mapping_utils"]], "torch.ao.quantization.fx.quantize_handler": [[2072, "module-torch.ao.quantization.fx.quantize_handler"]], "torch.ao.quantization.fx.tracer": [[2072, "module-torch.ao.quantization.fx.tracer"]], "torch.ao.quantization.fx.utils": [[2072, "module-torch.ao.quantization.fx.utils"]], "torch.ao.quantization.observer": [[2072, "module-torch.ao.quantization.observer"]], "torch.ao.quantization.pt2e.duplicate_dq_pass": [[2072, "module-torch.ao.quantization.pt2e.duplicate_dq_pass"]], "torch.ao.quantization.pt2e.export_utils": [[2072, "module-torch.ao.quantization.pt2e.export_utils"]], "torch.ao.quantization.pt2e.graph_utils": [[2072, "module-torch.ao.quantization.pt2e.graph_utils"]], "torch.ao.quantization.pt2e.port_metadata_pass": [[2072, "module-torch.ao.quantization.pt2e.port_metadata_pass"]], "torch.ao.quantization.pt2e.prepare": [[2072, "module-torch.ao.quantization.pt2e.prepare"]], "torch.ao.quantization.pt2e.qat_utils": [[2072, "module-torch.ao.quantization.pt2e.qat_utils"]], "torch.ao.quantization.pt2e.representation.rewrite": [[2072, "module-torch.ao.quantization.pt2e.representation.rewrite"]], "torch.ao.quantization.pt2e.utils": [[2072, "module-torch.ao.quantization.pt2e.utils"]], "torch.ao.quantization.qconfig": [[2072, "module-torch.ao.quantization.qconfig"]], "torch.ao.quantization.qconfig_mapping": [[2072, "module-torch.ao.quantization.qconfig_mapping"]], "torch.ao.quantization.quant_type": [[2072, "module-torch.ao.quantization.quant_type"]], "torch.ao.quantization.quantization_mappings": [[2072, "module-torch.ao.quantization.quantization_mappings"]], "torch.ao.quantization.quantize_fx": [[2072, "module-torch.ao.quantization.quantize_fx"]], "torch.ao.quantization.quantize_jit": [[2072, "module-torch.ao.quantization.quantize_jit"]], "torch.ao.quantization.quantize_pt2e": [[2072, "module-torch.ao.quantization.quantize_pt2e"]], "torch.ao.quantization.quantizer.composable_quantizer": [[2072, "module-torch.ao.quantization.quantizer.composable_quantizer"]], "torch.ao.quantization.quantizer.embedding_quantizer": [[2072, "module-torch.ao.quantization.quantizer.embedding_quantizer"]], "torch.ao.quantization.quantizer.quantizer": [[2072, "module-torch.ao.quantization.quantizer.quantizer"]], "torch.ao.quantization.quantizer.utils": [[2072, "module-torch.ao.quantization.quantizer.utils"]], "torch.ao.quantization.quantizer.x86_inductor_quantizer": [[2072, "module-torch.ao.quantization.quantizer.x86_inductor_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer": [[2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer"]], "torch.ao.quantization.quantizer.xnnpack_quantizer_utils": [[2072, "module-torch.ao.quantization.quantizer.xnnpack_quantizer_utils"]], "torch.ao.quantization.stubs": [[2072, "module-torch.ao.quantization.stubs"]], "torch.ao.quantization.utils": [[2072, "module-torch.ao.quantization.utils"]], "torch.nn.intrinsic.modules.fused": [[2072, "module-torch.nn.intrinsic.modules.fused"]], "torch.nn.intrinsic.qat.modules.conv_fused": [[2072, "module-torch.nn.intrinsic.qat.modules.conv_fused"]], "torch.nn.intrinsic.qat.modules.linear_fused": [[2072, "module-torch.nn.intrinsic.qat.modules.linear_fused"]], "torch.nn.intrinsic.qat.modules.linear_relu": [[2072, "module-torch.nn.intrinsic.qat.modules.linear_relu"]], "torch.nn.intrinsic.quantized.dynamic.modules.linear_relu": [[2072, "module-torch.nn.intrinsic.quantized.dynamic.modules.linear_relu"]], "torch.nn.intrinsic.quantized.modules.bn_relu": [[2072, "module-torch.nn.intrinsic.quantized.modules.bn_relu"]], "torch.nn.intrinsic.quantized.modules.conv_relu": [[2072, "module-torch.nn.intrinsic.quantized.modules.conv_relu"]], "torch.nn.intrinsic.quantized.modules.linear_relu": [[2072, "module-torch.nn.intrinsic.quantized.modules.linear_relu"]], "torch.nn.qat.dynamic.modules.linear": [[2072, "module-torch.nn.qat.dynamic.modules.linear"]], "torch.nn.qat.modules.conv": [[2072, "module-torch.nn.qat.modules.conv"]], "torch.nn.qat.modules.embedding_ops": [[2072, "module-torch.nn.qat.modules.embedding_ops"]], "torch.nn.qat.modules.linear": [[2072, "module-torch.nn.qat.modules.linear"]], "torch.nn.quantizable.modules.activation": [[2072, "module-torch.nn.quantizable.modules.activation"]], "torch.nn.quantizable.modules.rnn": [[2072, "module-torch.nn.quantizable.modules.rnn"]], "torch.nn.quantized.dynamic.modules.conv": [[2072, "module-torch.nn.quantized.dynamic.modules.conv"]], "torch.nn.quantized.dynamic.modules.linear": [[2072, "module-torch.nn.quantized.dynamic.modules.linear"]], "torch.nn.quantized.dynamic.modules.rnn": [[2072, "module-torch.nn.quantized.dynamic.modules.rnn"]], "torch.nn.quantized.functional": [[2072, "module-torch.nn.quantized.functional"]], "torch.nn.quantized.modules.activation": [[2072, "module-torch.nn.quantized.modules.activation"]], "torch.nn.quantized.modules.batchnorm": [[2072, "module-torch.nn.quantized.modules.batchnorm"]], "torch.nn.quantized.modules.conv": [[2072, "module-torch.nn.quantized.modules.conv"]], "torch.nn.quantized.modules.dropout": [[2072, "module-torch.nn.quantized.modules.dropout"]], "torch.nn.quantized.modules.embedding_ops": [[2072, "module-torch.nn.quantized.modules.embedding_ops"]], "torch.nn.quantized.modules.functional_modules": [[2072, "module-torch.nn.quantized.modules.functional_modules"]], "torch.nn.quantized.modules.linear": [[2072, "module-torch.nn.quantized.modules.linear"]], "torch.nn.quantized.modules.normalization": [[2072, "module-torch.nn.quantized.modules.normalization"]], "torch.nn.quantized.modules.rnn": [[2072, "module-torch.nn.quantized.modules.rnn"]], "torch.nn.quantized.modules.utils": [[2072, "module-torch.nn.quantized.modules.utils"]], "torch.quantization.fake_quantize": [[2072, "module-torch.quantization.fake_quantize"]], "torch.quantization.fuse_modules": [[2072, "module-torch.quantization.fuse_modules"]], "torch.quantization.fuser_method_mappings": [[2072, "module-torch.quantization.fuser_method_mappings"]], "torch.quantization.fx.convert": [[2072, "module-torch.quantization.fx.convert"]], "torch.quantization.fx.fuse": [[2072, "module-torch.quantization.fx.fuse"]], "torch.quantization.fx.fusion_patterns": [[2072, "module-torch.quantization.fx.fusion_patterns"]], "torch.quantization.fx.graph_module": [[2072, "module-torch.quantization.fx.graph_module"]], "torch.quantization.fx.match_utils": [[2072, "module-torch.quantization.fx.match_utils"]], "torch.quantization.fx.pattern_utils": [[2072, "module-torch.quantization.fx.pattern_utils"]], "torch.quantization.fx.prepare": [[2072, "module-torch.quantization.fx.prepare"]], "torch.quantization.fx.quantization_patterns": [[2072, "module-torch.quantization.fx.quantization_patterns"]], "torch.quantization.fx.quantization_types": [[2072, "module-torch.quantization.fx.quantization_types"]], "torch.quantization.fx.utils": [[2072, "module-torch.quantization.fx.utils"]], "torch.quantization.observer": [[2072, "module-torch.quantization.observer"]], "torch.quantization.qconfig": [[2072, "module-torch.quantization.qconfig"]], "torch.quantization.quant_type": [[2072, "module-torch.quantization.quant_type"]], "torch.quantization.quantization_mappings": [[2072, "module-torch.quantization.quantization_mappings"]], "torch.quantization.quantize": [[2072, "module-torch.quantization.quantize"]], "torch.quantization.quantize_fx": [[2072, "module-torch.quantization.quantize_fx"]], "torch.quantization.quantize_jit": [[2072, "module-torch.quantization.quantize_jit"]], "torch.quantization.stubs": [[2072, "module-torch.quantization.stubs"]], "torch.quantization.utils": [[2072, "module-torch.quantization.utils"]], "torch.ao.nn.intrinsic": [[2075, "module-torch.ao.nn.intrinsic"]], "torch.ao.nn.intrinsic.modules": [[2075, "module-torch.ao.nn.intrinsic.modules"]], "torch.ao.nn.intrinsic.qat": [[2075, "module-torch.ao.nn.intrinsic.qat"]], "torch.ao.nn.intrinsic.qat.modules": [[2075, "module-torch.ao.nn.intrinsic.qat.modules"]], "torch.ao.nn.intrinsic.quantized": [[2075, "module-torch.ao.nn.intrinsic.quantized"]], "torch.ao.nn.intrinsic.quantized.dynamic": [[2075, "module-torch.ao.nn.intrinsic.quantized.dynamic"]], "torch.ao.nn.intrinsic.quantized.dynamic.modules": [[2075, "module-torch.ao.nn.intrinsic.quantized.dynamic.modules"]], "torch.ao.nn.intrinsic.quantized.modules": [[2075, "module-torch.ao.nn.intrinsic.quantized.modules"]], "torch.ao.nn.qat": [[2075, "module-torch.ao.nn.qat"]], "torch.ao.nn.qat.dynamic": [[2075, "module-torch.ao.nn.qat.dynamic"]], "torch.ao.nn.qat.dynamic.modules": [[2075, "module-torch.ao.nn.qat.dynamic.modules"]], "torch.ao.nn.qat.modules": [[2075, "module-torch.ao.nn.qat.modules"]], "torch.ao.nn.quantized.dynamic": [[2075, "module-torch.ao.nn.quantized.dynamic"]], "torch.ao.nn.quantized.dynamic.modules": [[2075, "module-torch.ao.nn.quantized.dynamic.modules"]], "torch.ao.nn.quantized.functional": [[2075, "module-torch.ao.nn.quantized.functional"]], "torch.ao.nn.quantized.modules": [[2075, "module-torch.ao.nn.quantized.modules"]], "torch.ao.quantization.pt2e": [[2075, "module-torch.ao.quantization.pt2e"]], "torch.ao.quantization.pt2e.generate_numeric_debug_handle": [[2075, "module-torch.ao.quantization.pt2e.generate_numeric_debug_handle"]], "torch.ao.quantization.pt2e.representation": [[2075, "module-torch.ao.quantization.pt2e.representation"]], "torch.ao.quantization.quantizer": [[2075, "module-torch.ao.quantization.quantizer"]], "torch.nn.intrinsic": [[2075, "module-torch.nn.intrinsic"]], "torch.nn.intrinsic.modules": [[2075, "module-torch.nn.intrinsic.modules"]], "torch.nn.intrinsic.qat": [[2075, "module-torch.nn.intrinsic.qat"]], "torch.nn.intrinsic.qat.modules": [[2075, "module-torch.nn.intrinsic.qat.modules"]], "torch.nn.intrinsic.quantized": [[2075, "module-torch.nn.intrinsic.quantized"]], "torch.nn.intrinsic.quantized.dynamic": [[2075, "module-torch.nn.intrinsic.quantized.dynamic"]], "torch.nn.intrinsic.quantized.dynamic.modules": [[2075, "module-torch.nn.intrinsic.quantized.dynamic.modules"]], "torch.nn.intrinsic.quantized.modules": [[2075, "module-torch.nn.intrinsic.quantized.modules"]], "torch.nn.qat": [[2075, "module-torch.nn.qat"]], "torch.nn.qat.dynamic": [[2075, "module-torch.nn.qat.dynamic"]], "torch.nn.qat.dynamic.modules": [[2075, "module-torch.nn.qat.dynamic.modules"]], "torch.nn.qat.modules": [[2075, "module-torch.nn.qat.modules"]], "torch.nn.quantizable": [[2075, "module-torch.nn.quantizable"]], "torch.nn.quantizable.modules": [[2075, "module-torch.nn.quantizable.modules"]], "torch.nn.quantized": [[2075, "module-torch.nn.quantized"]], "torch.nn.quantized.dynamic": [[2075, "module-torch.nn.quantized.dynamic"]], "torch.nn.quantized.dynamic.modules": [[2075, "module-torch.nn.quantized.dynamic.modules"]], "torch.nn.quantized.modules": [[2075, "module-torch.nn.quantized.modules"]], "torch.quantization": [[2075, "module-torch.quantization"]], "torch.quantization.fx": [[2075, "module-torch.quantization.fx"]], "fork_rng() (in module torch.random)": [[2076, "torch.random.fork_rng"]], "get_rng_state() (in module torch.random)": [[2076, "torch.random.get_rng_state"]], "initial_seed() (in module torch.random)": [[2076, "torch.random.initial_seed"]], "manual_seed() (in module torch.random)": [[2076, "torch.random.manual_seed"]], "seed() (in module torch.random)": [[2076, "torch.random.seed"]], "set_rng_state() (in module torch.random)": [[2076, "torch.random.set_rng_state"]], "torch.random": [[2076, "module-torch.random"]], "backendtype (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.BackendType"]], "pyrref (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.PyRRef"]], "remotemodule (class in torch.distributed.nn.api.remote_module)": [[2077, "torch.distributed.nn.api.remote_module.RemoteModule"]], "rpcbackendoptions (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.RpcBackendOptions"]], "tensorpiperpcbackendoptions (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions"]], "workerinfo (class in torch.distributed.rpc)": [[2077, "torch.distributed.rpc.WorkerInfo"]], "async_execution() (in module torch.distributed.rpc.functions)": [[2077, "torch.distributed.rpc.functions.async_execution"]], "backward() (in module torch.distributed.autograd)": [[2077, "torch.distributed.autograd.backward"]], "backward() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.backward"]], "confirmed_by_owner() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.confirmed_by_owner"]], "context (class in torch.distributed.autograd)": [[2077, "torch.distributed.autograd.context"]], "device_maps (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.device_maps"]], "devices (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.devices"]], "get_gradients() (in module torch.distributed.autograd)": [[2077, "torch.distributed.autograd.get_gradients"]], "get_module_rref() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2077, "torch.distributed.nn.api.remote_module.RemoteModule.get_module_rref"]], "get_worker_info() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.get_worker_info"]], "id (torch.distributed.rpc.workerinfo property)": [[2077, "torch.distributed.rpc.WorkerInfo.id"]], "init_method (torch.distributed.rpc.rpcbackendoptions property)": [[2077, "torch.distributed.rpc.RpcBackendOptions.init_method"]], "init_method (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.init_method"]], "init_rpc() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.init_rpc"]], "is_owner() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.is_owner"]], "local_value() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.local_value"]], "name (torch.distributed.rpc.workerinfo property)": [[2077, "torch.distributed.rpc.WorkerInfo.name"]], "num_worker_threads (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.num_worker_threads"]], "owner() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.owner"]], "owner_name() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.owner_name"]], "remote() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.remote"]], "remote() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.remote"]], "remote_parameters() (torch.distributed.nn.api.remote_module.remotemodule method)": [[2077, "torch.distributed.nn.api.remote_module.RemoteModule.remote_parameters"]], "rpc_async() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.rpc_async"]], "rpc_async() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.rpc_async"]], "rpc_sync() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.rpc_sync"]], "rpc_sync() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.rpc_sync"]], "rpc_timeout (torch.distributed.rpc.rpcbackendoptions property)": [[2077, "torch.distributed.rpc.RpcBackendOptions.rpc_timeout"]], "rpc_timeout (torch.distributed.rpc.tensorpiperpcbackendoptions property)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.rpc_timeout"]], "set_device_map() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_device_map"]], "set_devices() (torch.distributed.rpc.tensorpiperpcbackendoptions method)": [[2077, "torch.distributed.rpc.TensorPipeRpcBackendOptions.set_devices"]], "shutdown() (in module torch.distributed.rpc)": [[2077, "torch.distributed.rpc.shutdown"]], "to_here() (torch.distributed.rpc.pyrref method)": [[2077, "torch.distributed.rpc.PyRRef.to_here"]], "torch.distributed.autograd": [[2077, "module-torch.distributed.autograd"]], "torch.distributed.rpc": [[2077, "module-torch.distributed.rpc"]], "torch.signal": [[2080, "module-torch.signal"]], "torch.signal.windows": [[2080, "module-torch.signal.windows"]], "size (class in torch)": [[2081, "torch.Size"]], "count() (torch.size method)": [[2081, "torch.Size.count"]], "index() (torch.size method)": [[2081, "torch.Size.index"]], "numel() (torch.size method)": [[2081, "torch.Size.numel"]], "torch.sparse": [[2082, "module-torch.sparse"]], "airy_ai() (in module torch.special)": [[2083, "torch.special.airy_ai"]], "bessel_j0() (in module torch.special)": [[2083, "torch.special.bessel_j0"]], "bessel_j1() (in module torch.special)": [[2083, "torch.special.bessel_j1"]], "digamma() (in module torch.special)": [[2083, "torch.special.digamma"]], "entr() (in module torch.special)": [[2083, "torch.special.entr"]], "erf() (in module torch.special)": [[2083, "torch.special.erf"]], "erfc() (in module torch.special)": [[2083, "torch.special.erfc"]], "erfcx() (in module torch.special)": [[2083, "torch.special.erfcx"]], "erfinv() (in module torch.special)": [[2083, "torch.special.erfinv"]], "exp2() (in module torch.special)": [[2083, "torch.special.exp2"]], "expit() (in module torch.special)": [[2083, "torch.special.expit"]], "expm1() (in module torch.special)": [[2083, "torch.special.expm1"]], "gammainc() (in module torch.special)": [[2083, "torch.special.gammainc"]], "gammaincc() (in module torch.special)": [[2083, "torch.special.gammaincc"]], "gammaln() (in module torch.special)": [[2083, "torch.special.gammaln"]], "i0() (in module torch.special)": [[2083, "torch.special.i0"]], "i0e() (in module torch.special)": [[2083, "torch.special.i0e"]], "i1() (in module torch.special)": [[2083, "torch.special.i1"]], "i1e() (in module torch.special)": [[2083, "torch.special.i1e"]], "log1p() (in module torch.special)": [[2083, "torch.special.log1p"]], "log_ndtr() (in module torch.special)": [[2083, "torch.special.log_ndtr"]], "log_softmax() (in module torch.special)": [[2083, "torch.special.log_softmax"]], "logit() (in module torch.special)": [[2083, "torch.special.logit"]], "logsumexp() (in module torch.special)": [[2083, "torch.special.logsumexp"]], "multigammaln() (in module torch.special)": [[2083, "torch.special.multigammaln"]], "ndtr() (in module torch.special)": [[2083, "torch.special.ndtr"]], "ndtri() (in module torch.special)": [[2083, "torch.special.ndtri"]], "polygamma() (in module torch.special)": [[2083, "torch.special.polygamma"]], "psi() (in module torch.special)": [[2083, "torch.special.psi"]], "round() (in module torch.special)": [[2083, "torch.special.round"]], "scaled_modified_bessel_k0() (in module torch.special)": [[2083, "torch.special.scaled_modified_bessel_k0"]], "scaled_modified_bessel_k1() (in module torch.special)": [[2083, "torch.special.scaled_modified_bessel_k1"]], "sinc() (in module torch.special)": [[2083, "torch.special.sinc"]], "softmax() (in module torch.special)": [[2083, "torch.special.softmax"]], "spherical_bessel_j0() (in module torch.special)": [[2083, "torch.special.spherical_bessel_j0"]], "torch.special": [[2083, "module-torch.special"]], "xlog1py() (in module torch.special)": [[2083, "torch.special.xlog1py"]], "xlogy() (in module torch.special)": [[2083, "torch.special.xlogy"]], "zeta() (in module torch.special)": [[2083, "torch.special.zeta"]], "bfloat16storage (class in torch)": [[2084, "torch.BFloat16Storage"]], "boolstorage (class in torch)": [[2084, "torch.BoolStorage"]], "bytestorage (class in torch)": [[2084, "torch.ByteStorage"]], "charstorage (class in torch)": [[2084, "torch.CharStorage"]], "complexdoublestorage (class in torch)": [[2084, "torch.ComplexDoubleStorage"]], "complexfloatstorage (class in torch)": [[2084, "torch.ComplexFloatStorage"]], "doublestorage (class in torch)": [[2084, "torch.DoubleStorage"]], "floatstorage (class in torch)": [[2084, "torch.FloatStorage"]], "halfstorage (class in torch)": [[2084, "torch.HalfStorage"]], "intstorage (class in torch)": [[2084, "torch.IntStorage"]], "longstorage (class in torch)": [[2084, "torch.LongStorage"]], "qint32storage (class in torch)": [[2084, "torch.QInt32Storage"]], "qint8storage (class in torch)": [[2084, "torch.QInt8Storage"]], "quint2x4storage (class in torch)": [[2084, "torch.QUInt2x4Storage"]], "quint4x2storage (class in torch)": [[2084, "torch.QUInt4x2Storage"]], "quint8storage (class in torch)": [[2084, "torch.QUInt8Storage"]], "shortstorage (class in torch)": [[2084, "torch.ShortStorage"]], "typedstorage (class in torch)": [[2084, "torch.TypedStorage"]], "untypedstorage (class in torch)": [[2084, "torch.UntypedStorage"]], "bfloat16() (torch.typedstorage method)": [[2084, "torch.TypedStorage.bfloat16"]], "bfloat16() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.bfloat16"]], "bool() (torch.typedstorage method)": [[2084, "torch.TypedStorage.bool"]], "bool() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.bool"]], "byte() (torch.typedstorage method)": [[2084, "torch.TypedStorage.byte"]], "byte() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.byte"]], "byteswap() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.byteswap"]], "char() (torch.typedstorage method)": [[2084, "torch.TypedStorage.char"]], "char() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.char"]], "clone() (torch.typedstorage method)": [[2084, "torch.TypedStorage.clone"]], "clone() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.clone"]], "complex_double() (torch.typedstorage method)": [[2084, "torch.TypedStorage.complex_double"]], "complex_double() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.complex_double"]], "complex_float() (torch.typedstorage method)": [[2084, "torch.TypedStorage.complex_float"]], "complex_float() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.complex_float"]], "copy_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.copy_"]], "copy_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.copy_"]], "cpu() (torch.typedstorage method)": [[2084, "torch.TypedStorage.cpu"]], "cpu() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.cpu"]], "cuda() (torch.typedstorage method)": [[2084, "torch.TypedStorage.cuda"]], "cuda() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.cuda"]], "data_ptr() (torch.typedstorage method)": [[2084, "torch.TypedStorage.data_ptr"]], "data_ptr() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.data_ptr"]], "device (torch.typedstorage property)": [[2084, "torch.TypedStorage.device"]], "device (torch.untypedstorage attribute)": [[2084, "torch.UntypedStorage.device"]], "double() (torch.typedstorage method)": [[2084, "torch.TypedStorage.double"]], "double() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.double"]], "dtype (torch.bfloat16storage attribute)": [[2084, "torch.BFloat16Storage.dtype"]], "dtype (torch.boolstorage attribute)": [[2084, "torch.BoolStorage.dtype"]], "dtype (torch.bytestorage attribute)": [[2084, "torch.ByteStorage.dtype"]], "dtype (torch.charstorage attribute)": [[2084, "torch.CharStorage.dtype"]], "dtype (torch.complexdoublestorage attribute)": [[2084, "torch.ComplexDoubleStorage.dtype"]], "dtype (torch.complexfloatstorage attribute)": [[2084, "torch.ComplexFloatStorage.dtype"]], "dtype (torch.doublestorage attribute)": [[2084, "torch.DoubleStorage.dtype"]], "dtype (torch.floatstorage attribute)": [[2084, "torch.FloatStorage.dtype"]], "dtype (torch.halfstorage attribute)": [[2084, "torch.HalfStorage.dtype"]], "dtype (torch.intstorage attribute)": [[2084, "torch.IntStorage.dtype"]], "dtype (torch.longstorage attribute)": [[2084, "torch.LongStorage.dtype"]], "dtype (torch.qint32storage attribute)": [[2084, "torch.QInt32Storage.dtype"]], "dtype (torch.qint8storage attribute)": [[2084, "torch.QInt8Storage.dtype"]], "dtype (torch.quint2x4storage attribute)": [[2084, "torch.QUInt2x4Storage.dtype"]], "dtype (torch.quint4x2storage attribute)": [[2084, "torch.QUInt4x2Storage.dtype"]], "dtype (torch.quint8storage attribute)": [[2084, "torch.QUInt8Storage.dtype"]], "dtype (torch.shortstorage attribute)": [[2084, "torch.ShortStorage.dtype"]], "dtype (torch.typedstorage attribute)": [[2084, "torch.TypedStorage.dtype"]], "element_size() (torch.typedstorage method)": [[2084, "torch.TypedStorage.element_size"]], "element_size() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.element_size"]], "filename (torch.typedstorage property)": [[2084, "torch.TypedStorage.filename"]], "filename (torch.untypedstorage property)": [[2084, "torch.UntypedStorage.filename"]], "fill_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.fill_"]], "fill_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.fill_"]], "float() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float"]], "float() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float"]], "float8_e4m3fn() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e4m3fn"]], "float8_e4m3fn() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e4m3fn"]], "float8_e4m3fnuz() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e4m3fnuz"]], "float8_e4m3fnuz() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e4m3fnuz"]], "float8_e5m2() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e5m2"]], "float8_e5m2() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e5m2"]], "float8_e5m2fnuz() (torch.typedstorage method)": [[2084, "torch.TypedStorage.float8_e5m2fnuz"]], "float8_e5m2fnuz() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.float8_e5m2fnuz"]], "from_buffer() (torch.typedstorage class method)": [[2084, "torch.TypedStorage.from_buffer"]], "from_buffer() (torch.untypedstorage static method)": [[2084, "torch.UntypedStorage.from_buffer"]], "from_file() (torch.typedstorage class method)": [[2084, "torch.TypedStorage.from_file"]], "from_file() (torch.untypedstorage static method)": [[2084, "torch.UntypedStorage.from_file"]], "get_device() (torch.typedstorage method)": [[2084, "torch.TypedStorage.get_device"]], "get_device() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.get_device"]], "half() (torch.typedstorage method)": [[2084, "torch.TypedStorage.half"]], "half() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.half"]], "hpu() (torch.typedstorage method)": [[2084, "torch.TypedStorage.hpu"]], "hpu() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.hpu"]], "int() (torch.typedstorage method)": [[2084, "torch.TypedStorage.int"]], "int() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.int"]], "is_cuda (torch.typedstorage property)": [[2084, "torch.TypedStorage.is_cuda"]], "is_cuda (torch.untypedstorage property)": [[2084, "torch.UntypedStorage.is_cuda"]], "is_hpu (torch.typedstorage property)": [[2084, "torch.TypedStorage.is_hpu"]], "is_hpu (torch.untypedstorage property)": [[2084, "torch.UntypedStorage.is_hpu"]], "is_pinned() (torch.typedstorage method)": [[2084, "torch.TypedStorage.is_pinned"]], "is_pinned() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.is_pinned"]], "is_shared() (torch.typedstorage method)": [[2084, "torch.TypedStorage.is_shared"]], "is_shared() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.is_shared"]], "is_sparse (torch.typedstorage attribute)": [[2084, "torch.TypedStorage.is_sparse"]], "is_sparse (torch.untypedstorage attribute)": [[2084, "torch.UntypedStorage.is_sparse"]], "is_sparse_csr (torch.untypedstorage attribute)": [[2084, "torch.UntypedStorage.is_sparse_csr"]], "long() (torch.typedstorage method)": [[2084, "torch.TypedStorage.long"]], "long() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.long"]], "mps() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.mps"]], "nbytes() (torch.typedstorage method)": [[2084, "torch.TypedStorage.nbytes"]], "nbytes() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.nbytes"]], "new() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.new"]], "pickle_storage_type() (torch.typedstorage method)": [[2084, "torch.TypedStorage.pickle_storage_type"]], "pin_memory() (torch.typedstorage method)": [[2084, "torch.TypedStorage.pin_memory"]], "pin_memory() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.pin_memory"]], "resizable() (torch.typedstorage method)": [[2084, "torch.TypedStorage.resizable"]], "resizable() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.resizable"]], "resize_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.resize_"]], "resize_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.resize_"]], "share_memory_() (torch.typedstorage method)": [[2084, "torch.TypedStorage.share_memory_"]], "share_memory_() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.share_memory_"]], "short() (torch.typedstorage method)": [[2084, "torch.TypedStorage.short"]], "short() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.short"]], "size() (torch.typedstorage method)": [[2084, "torch.TypedStorage.size"]], "size() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.size"]], "to() (torch.typedstorage method)": [[2084, "torch.TypedStorage.to"]], "to() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.to"]], "tolist() (torch.typedstorage method)": [[2084, "torch.TypedStorage.tolist"]], "tolist() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.tolist"]], "type() (torch.typedstorage method)": [[2084, "torch.TypedStorage.type"]], "type() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.type"]], "untyped() (torch.typedstorage method)": [[2084, "torch.TypedStorage.untyped"]], "untyped() (torch.untypedstorage method)": [[2084, "torch.UntypedStorage.untyped"]], "device (class in torch)": [[2085, "torch.device"]], "dtype (class in torch)": [[2085, "torch.dtype"]], "layout (class in torch)": [[2085, "torch.layout"]], "memory_format (class in torch)": [[2085, "torch.memory_format"]], "summarywriter (class in torch.utils.tensorboard.writer)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter"]], "__init__() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.__init__"]], "add_audio() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_audio"]], "add_custom_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_custom_scalars"]], "add_embedding() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_embedding"]], "add_figure() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_figure"]], "add_graph() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_graph"]], "add_histogram() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_histogram"]], "add_hparams() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_hparams"]], "add_image() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_image"]], "add_images() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_images"]], "add_mesh() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_mesh"]], "add_pr_curve() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_pr_curve"]], "add_scalar() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_scalar"]], "add_scalars() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_scalars"]], "add_text() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_text"]], "add_video() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.add_video"]], "close() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.close"]], "flush() (torch.utils.tensorboard.writer.summarywriter method)": [[2087, "torch.utils.tensorboard.writer.SummaryWriter.flush"]], "torch.utils.tensorboard": [[2087, "module-torch.utils.tensorboard"]], "h (torch.tensor attribute)": [[2088, "torch.Tensor.H"]], "t (torch.tensor attribute)": [[2088, "torch.Tensor.T"]], "tensor (class in torch)": [[2088, "torch.Tensor"]], "__init__() (torch.tensor method)": [[2088, "torch.Tensor.__init__"]], "mh (torch.tensor attribute)": [[2088, "torch.Tensor.mH"]], "mt (torch.tensor attribute)": [[2088, "torch.Tensor.mT"]], "assert_allclose() (in module torch.testing)": [[2089, "torch.testing.assert_allclose"]], "assert_close() (in module torch.testing)": [[2089, "torch.testing.assert_close"]], "make_tensor() (in module torch.testing)": [[2089, "torch.testing.make_tensor"]], "torch.testing": [[2089, "module-torch.testing"]], "symbool (class in torch)": [[2091, "torch.SymBool"]], "symfloat (class in torch)": [[2091, "torch.SymFloat"]], "symint (class in torch)": [[2091, "torch.SymInt"]], "tag (class in torch)": [[2091, "torch.Tag"]], "default_generator (torch.torch attribute)": [[2091, "torch.torch.default_generator"]], "is_integer() (torch.symfloat method)": [[2091, "torch.SymFloat.is_integer"]], "name (torch.tag property)": [[2091, "torch.Tag.name"]], "torch": [[2091, "module-torch"]], "torch.contrib": [[2091, "module-torch.contrib"]], "torch.functional": [[2091, "module-torch.functional"]], "torch.quasirandom": [[2091, "module-torch.quasirandom"]], "torch.return_types": [[2091, "module-torch.return_types"]], "torch.serialization": [[2091, "module-torch.serialization"]], "torch.signal.windows.windows": [[2091, "module-torch.signal.windows.windows"]], "torch.sparse.semi_structured": [[2091, "module-torch.sparse.semi_structured"]], "torch.storage": [[2091, "module-torch.storage"]], "torch.torch_version": [[2091, "module-torch.torch_version"]], "torch.types": [[2091, "module-torch.types"]], "torch.utils.backcompat": [[2091, "module-torch.utils.backcompat"]], "torch.utils.hipify": [[2091, "module-torch.utils.hipify"]], "torch.utils.model_dump": [[2091, "module-torch.utils.model_dump"]], "torch.utils.viz": [[2091, "module-torch.utils.viz"]], "torch.version": [[2091, "module-torch.version"]], "logger (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.Logger"]], "outputlogger (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.OutputLogger"]], "shadow (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.Shadow"]], "shadowlogger (class in torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.ShadowLogger"]], "add() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.add"]], "add_relu() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.add_relu"]], "add_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.add_scalar"]], "cat() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.cat"]], "compare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.compare_model_outputs"]], "compare_model_stub() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.compare_model_stub"]], "compare_weights() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.compare_weights"]], "forward() (torch.ao.ns._numeric_suite.logger method)": [[2092, "torch.ao.ns._numeric_suite.Logger.forward"]], "forward() (torch.ao.ns._numeric_suite.outputlogger method)": [[2092, "torch.ao.ns._numeric_suite.OutputLogger.forward"]], "forward() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.forward"]], "forward() (torch.ao.ns._numeric_suite.shadowlogger method)": [[2092, "torch.ao.ns._numeric_suite.ShadowLogger.forward"]], "get_logger_dict() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.get_logger_dict"]], "get_matching_activations() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.get_matching_activations"]], "mul() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.mul"]], "mul_scalar() (torch.ao.ns._numeric_suite.shadow method)": [[2092, "torch.ao.ns._numeric_suite.Shadow.mul_scalar"]], "prepare_model_outputs() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.prepare_model_outputs"]], "prepare_model_with_stubs() (in module torch.ao.ns._numeric_suite)": [[2092, "torch.ao.ns._numeric_suite.prepare_model_with_stubs"]], "torch.ao.ns._numeric_suite": [[2092, "module-torch.ao.ns._numeric_suite"]], "nstracer (class in torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.NSTracer"]], "outputcomparisonlogger (class in torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger"]], "outputlogger (class in torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputLogger"]], "add_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.add_loggers"]], "add_shadow_loggers() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.add_shadow_loggers"]], "compute_cosine_similarity() (in module torch.ao.ns.fx.utils)": [[2093, "torch.ao.ns.fx.utils.compute_cosine_similarity"]], "compute_normalized_l2_error() (in module torch.ao.ns.fx.utils)": [[2093, "torch.ao.ns.fx.utils.compute_normalized_l2_error"]], "compute_sqnr() (in module torch.ao.ns.fx.utils)": [[2093, "torch.ao.ns.fx.utils.compute_sqnr"]], "convert_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.convert_n_shadows_model"]], "extend_logger_results_with_comparison() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extend_logger_results_with_comparison"]], "extract_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_logger_info"]], "extract_results_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_results_n_shadows_model"]], "extract_shadow_logger_info() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_shadow_logger_info"]], "extract_weights() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.extract_weights"]], "forward() (torch.ao.ns._numeric_suite_fx.outputcomparisonlogger method)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputComparisonLogger.forward"]], "forward() (torch.ao.ns._numeric_suite_fx.outputlogger method)": [[2093, "torch.ao.ns._numeric_suite_fx.OutputLogger.forward"]], "is_leaf_module() (torch.ao.ns._numeric_suite_fx.nstracer method)": [[2093, "torch.ao.ns._numeric_suite_fx.NSTracer.is_leaf_module"]], "loggers_set_enabled() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.loggers_set_enabled"]], "loggers_set_save_activations() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.loggers_set_save_activations"]], "prepare_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.prepare_n_shadows_model"]], "print_comparisons_n_shadows_model() (in module torch.ao.ns._numeric_suite_fx)": [[2093, "torch.ao.ns._numeric_suite_fx.print_comparisons_n_shadows_model"]], "torch.ao.ns._numeric_suite_fx": [[2093, "module-torch.ao.ns._numeric_suite_fx"]], "torch.compiler": [[2096, "module-torch.compiler"]], "get_ignored_functions() (in module torch.overrides)": [[2114, "torch.overrides.get_ignored_functions"]], "get_overridable_functions() (in module torch.overrides)": [[2114, "torch.overrides.get_overridable_functions"]], "get_testing_overrides() (in module torch.overrides)": [[2114, "torch.overrides.get_testing_overrides"]], "handle_torch_function() (in module torch.overrides)": [[2114, "torch.overrides.handle_torch_function"]], "has_torch_function() (in module torch.overrides)": [[2114, "torch.overrides.has_torch_function"]], "is_tensor_like() (in module torch.overrides)": [[2114, "torch.overrides.is_tensor_like"]], "is_tensor_method_or_property() (in module torch.overrides)": [[2114, "torch.overrides.is_tensor_method_or_property"]], "resolve_name() (in module torch.overrides)": [[2114, "torch.overrides.resolve_name"]], "torch.overrides": [[2114, "module-torch.overrides"]], "wrap_torch_function() (in module torch.overrides)": [[2114, "torch.overrides.wrap_torch_function"]], "_dump_snapshot() (in module torch.cuda.memory)": [[2115, "torch.cuda.memory._dump_snapshot"]], "_record_memory_history() (in module torch.cuda.memory)": [[2115, "torch.cuda.memory._record_memory_history"]], "_snapshot() (in module torch.cuda.memory)": [[2115, "torch.cuda.memory._snapshot"]], "torch.finfo (class in torch)": [[2118, "torch.torch.finfo"]], "torch.iinfo (class in torch)": [[2118, "torch.torch.iinfo"]], "torch.utils": [[2119, "module-torch.utils"]], "torch.utils.backend_registration": [[2119, "module-torch.utils.backend_registration"]], "torch.utils.benchmark.examples.blas_compare_setup": [[2119, "module-torch.utils.benchmark.examples.blas_compare_setup"]], "torch.utils.benchmark.examples.compare": [[2119, "module-torch.utils.benchmark.examples.compare"]], "torch.utils.benchmark.examples.fuzzer": [[2119, "module-torch.utils.benchmark.examples.fuzzer"]], "torch.utils.benchmark.examples.op_benchmark": [[2119, "module-torch.utils.benchmark.examples.op_benchmark"]], "torch.utils.benchmark.examples.simple_timeit": [[2119, "module-torch.utils.benchmark.examples.simple_timeit"]], "torch.utils.benchmark.examples.spectral_ops_fuzz_test": [[2119, "module-torch.utils.benchmark.examples.spectral_ops_fuzz_test"]], "torch.utils.benchmark.op_fuzzers.binary": [[2119, "module-torch.utils.benchmark.op_fuzzers.binary"]], "torch.utils.benchmark.op_fuzzers.sparse_binary": [[2119, "module-torch.utils.benchmark.op_fuzzers.sparse_binary"]], "torch.utils.benchmark.op_fuzzers.sparse_unary": [[2119, "module-torch.utils.benchmark.op_fuzzers.sparse_unary"]], "torch.utils.benchmark.op_fuzzers.spectral": [[2119, "module-torch.utils.benchmark.op_fuzzers.spectral"]], "torch.utils.benchmark.op_fuzzers.unary": [[2119, "module-torch.utils.benchmark.op_fuzzers.unary"]], "torch.utils.benchmark.utils.common": [[2119, "module-torch.utils.benchmark.utils.common"]], "torch.utils.benchmark.utils.compare": [[2119, "module-torch.utils.benchmark.utils.compare"]], "torch.utils.benchmark.utils.compile": [[2119, "module-torch.utils.benchmark.utils.compile"]], "torch.utils.benchmark.utils.cpp_jit": [[2119, "module-torch.utils.benchmark.utils.cpp_jit"]], "torch.utils.benchmark.utils.fuzzer": [[2119, "module-torch.utils.benchmark.utils.fuzzer"]], "torch.utils.benchmark.utils.sparse_fuzzer": [[2119, "module-torch.utils.benchmark.utils.sparse_fuzzer"]], "torch.utils.benchmark.utils.timer": [[2119, "module-torch.utils.benchmark.utils.timer"]], "torch.utils.benchmark.utils.valgrind_wrapper.timer_interface": [[2119, "module-torch.utils.benchmark.utils.valgrind_wrapper.timer_interface"]], "torch.utils.bundled_inputs": [[2119, "module-torch.utils.bundled_inputs"]], "torch.utils.checkpoint": [[2119, "module-torch.utils.checkpoint"]], "torch.utils.collect_env": [[2119, "module-torch.utils.collect_env"]], "torch.utils.cpp_backtrace": [[2119, "module-torch.utils.cpp_backtrace"]], "torch.utils.cpp_extension": [[2119, "module-torch.utils.cpp_extension"]], "torch.utils.data.backward_compatibility": [[2119, "module-torch.utils.data.backward_compatibility"]], "torch.utils.data.dataloader": [[2119, "module-torch.utils.data.dataloader"]], "torch.utils.data.datapipes.dataframe.dataframe_wrapper": [[2119, "module-torch.utils.data.datapipes.dataframe.dataframe_wrapper"]], "torch.utils.data.datapipes.dataframe.dataframes": [[2119, "module-torch.utils.data.datapipes.dataframe.dataframes"]], "torch.utils.data.datapipes.dataframe.datapipes": [[2119, "module-torch.utils.data.datapipes.dataframe.datapipes"]], "torch.utils.data.datapipes.dataframe.structures": [[2119, "module-torch.utils.data.datapipes.dataframe.structures"]], "torch.utils.data.datapipes.datapipe": [[2119, "module-torch.utils.data.datapipes.datapipe"]], "torch.utils.data.datapipes.gen_pyi": [[2119, "module-torch.utils.data.datapipes.gen_pyi"]], "torch.utils.data.datapipes.iter.callable": [[2119, "module-torch.utils.data.datapipes.iter.callable"]], "torch.utils.data.datapipes.iter.combinatorics": [[2119, "module-torch.utils.data.datapipes.iter.combinatorics"]], "torch.utils.data.datapipes.iter.combining": [[2119, "module-torch.utils.data.datapipes.iter.combining"]], "torch.utils.data.datapipes.iter.filelister": [[2119, "module-torch.utils.data.datapipes.iter.filelister"]], "torch.utils.data.datapipes.iter.fileopener": [[2119, "module-torch.utils.data.datapipes.iter.fileopener"]], "torch.utils.data.datapipes.iter.grouping": [[2119, "module-torch.utils.data.datapipes.iter.grouping"]], "torch.utils.data.datapipes.iter.routeddecoder": [[2119, "module-torch.utils.data.datapipes.iter.routeddecoder"]], "torch.utils.data.datapipes.iter.selecting": [[2119, "module-torch.utils.data.datapipes.iter.selecting"]], "torch.utils.data.datapipes.iter.sharding": [[2119, "module-torch.utils.data.datapipes.iter.sharding"]], "torch.utils.data.datapipes.iter.streamreader": [[2119, "module-torch.utils.data.datapipes.iter.streamreader"]], "torch.utils.data.datapipes.iter.utils": [[2119, "module-torch.utils.data.datapipes.iter.utils"]], "torch.utils.data.datapipes.map.callable": [[2119, "module-torch.utils.data.datapipes.map.callable"]], "torch.utils.data.datapipes.map.combinatorics": [[2119, "module-torch.utils.data.datapipes.map.combinatorics"]], "torch.utils.data.datapipes.map.combining": [[2119, "module-torch.utils.data.datapipes.map.combining"]], "torch.utils.data.datapipes.map.grouping": [[2119, "module-torch.utils.data.datapipes.map.grouping"]], "torch.utils.data.datapipes.map.utils": [[2119, "module-torch.utils.data.datapipes.map.utils"]], "torch.utils.data.datapipes.utils.common": [[2119, "module-torch.utils.data.datapipes.utils.common"]], "torch.utils.data.datapipes.utils.decoder": [[2119, "module-torch.utils.data.datapipes.utils.decoder"]], "torch.utils.data.datapipes.utils.snapshot": [[2119, "module-torch.utils.data.datapipes.utils.snapshot"]], "torch.utils.data.dataset": [[2119, "module-torch.utils.data.dataset"]], "torch.utils.data.distributed": [[2119, "module-torch.utils.data.distributed"]], "torch.utils.data.graph": [[2119, "module-torch.utils.data.graph"]], "torch.utils.data.graph_settings": [[2119, "module-torch.utils.data.graph_settings"]], "torch.utils.data.sampler": [[2119, "module-torch.utils.data.sampler"]], "torch.utils.dlpack": [[2119, "module-torch.utils.dlpack"]], "torch.utils.file_baton": [[2119, "module-torch.utils.file_baton"]], "torch.utils.flop_counter": [[2119, "module-torch.utils.flop_counter"]], "torch.utils.hipify.constants": [[2119, "module-torch.utils.hipify.constants"]], "torch.utils.hipify.cuda_to_hip_mappings": [[2119, "module-torch.utils.hipify.cuda_to_hip_mappings"]], "torch.utils.hipify.hipify_python": [[2119, "module-torch.utils.hipify.hipify_python"]], "torch.utils.hipify.version": [[2119, "module-torch.utils.hipify.version"]], "torch.utils.hooks": [[2119, "module-torch.utils.hooks"]], "torch.utils.jit.log_extract": [[2119, "module-torch.utils.jit.log_extract"]], "torch.utils.mkldnn": [[2119, "module-torch.utils.mkldnn"]], "torch.utils.mobile_optimizer": [[2119, "module-torch.utils.mobile_optimizer"]], "torch.utils.show_pickle": [[2119, "module-torch.utils.show_pickle"]], "torch.utils.tensorboard.summary": [[2119, "module-torch.utils.tensorboard.summary"]], "torch.utils.tensorboard.writer": [[2119, "module-torch.utils.tensorboard.writer"]], "torch.utils.throughput_benchmark": [[2119, "module-torch.utils.throughput_benchmark"]], "torch.utils.weak": [[2119, "module-torch.utils.weak"]], "torch.xpu": [[2120, "module-torch.xpu"]], "torch.xpu.random": [[2120, "module-torch.xpu.random"]], "torch.xpu.streams": [[2120, "module-torch.xpu.streams"]]}}) \ No newline at end of file