From 2ea961ed3ba1fe92e637145e56c5a6f0616570f8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 31 Jul 2021 12:21:47 +0100 Subject: [PATCH 001/730] Document the new resolver --- docs/html/topics/deps.dot | 19 +++ docs/html/topics/deps.png | Bin 0 -> 27346 bytes docs/html/topics/index.md | 1 + .../html/topics/more-dependency-resolution.md | 160 ++++++++++++++++++ 4 files changed, 180 insertions(+) create mode 100644 docs/html/topics/deps.dot create mode 100644 docs/html/topics/deps.png create mode 100644 docs/html/topics/more-dependency-resolution.md diff --git a/docs/html/topics/deps.dot b/docs/html/topics/deps.dot new file mode 100644 index 00000000000..8c7ec10c9b2 --- /dev/null +++ b/docs/html/topics/deps.dot @@ -0,0 +1,19 @@ +digraph G { + graph [fontname = "Handlee"]; + node [fontname = "Handlee"]; + edge [fontname = "Handlee"]; + + bgcolor=transparent; + + A [color=blue fontcolor=blue]; + A -> B [color=red]; + A -> C [color=red]; + node [color=lightgrey fontcolor=lightgrey]; + edge [color=lightgrey]; + node [color=lightgrey]; + B -> B1; + B -> B2; + C -> C1; + C -> C2; + +} diff --git a/docs/html/topics/deps.png b/docs/html/topics/deps.png new file mode 100644 index 0000000000000000000000000000000000000000..cf9e0ca66839913c2e520bddfb3e3eb2ce436d30 GIT binary patch literal 27346 zcmZTvWkAzwxZglPhkyu3i%v>Xx=V(Lh;*k+K%^!ynh^&XEhwGBA(WQx5*SE|jE12g z-5q21kLTV`cOSPmp8P%WJ`2}+p-KT^f`C9E3bm&&9T13!4+H{xT_**;@eIsK0D!uzOu5sT^n2oe!NxV2qx(JHY&xP9#!XHRQV>U4H*O=s!P{#cynGY-~3A`ZT=x z!V7}^{||-IGMRh5m1#?P39gpJnu`Mp&H3@8>ue{z9&AoCYz~&YJG>6|2M@=B#{wKJ zK~pzY@PU`)!>nL)_X*E9G#cIaT#(Ft;~;@FgKXFw)XX$Y7|6c8C!ftZ5#NE`s`d0e zRj@clIYdhK1Ocnp-U~ohh?PPYwgXGITIG%N9`D}B<`0|$ttfzIDTb@fy7Dbio=C+c zl7sQUyA&(tfiYz|;0DkzMVQ@uI2v6zxY`t$X;FADitM1I1Fj3+Bt5u44{;OEUMBW= zn7s`0VaYxxZV<_4KJOcR9D=F7nXO86W_`cs!&VJ7@DT%>-yE(4W!%;yWZJ;k>gr4-Q4mYX=!PLUrI4@k)Ou5+EPVeaD<>n4CNT zvP&2EhAw6UI~3jW13{Y|M&y%S_UM9oI0PiuZuqqHxHi$EE$Y0%Z`?Vq!$bD-+)OfT=ra*^?9Z{hc=yQTW_Dxu95WOOO^qqNCh@b* z1WdR)v*=jV^I{TpMyx17a=@Uty@NOKc@zFn9!YT~HEZ0#qy8;O72X zZvCRBCh`^IiVwE|E#8$_I z=QX4&f|k_T|9+dnw`IaC&~*wY!pdCo*HQ<#NeVb#nn z8NdRx!&e>$=C|DPzj@l%lhpMB>vfR+IB#ic0x);_&&Cc)orLoZrglTXj2 zG$sT11@HvQ_6weH!ZC4$u9!UD?$Zy~XB$@@?(wV@yz;%Jt?F{UGH{Su#@KCGmpmbn z*qRGa+!tk^KtXPqxU%vl#SOgyn})GcsRsf86?yixfpbTe$I%&urz@VIgbsH9{jMv4 z=y4rLV8-jU6|-V83cXjELJbm4|2vAtr&^Guax4_ZD5LE*DhEkO4EgT|;?evWL&dU) z3|pN01I2A#+9L^~hNl~^@C9}x=R2I)B3&;4I%(rd8EK<*!RCC(oS=~qynGXQg)zXF zhv87K`^MkFpSoph%im6f)yTV%oE1^1!Q2-x(#=8nz3b|)8P$9w4; z$dTyk?_zof<4U16&}_H==xVP@NZyXfcxCk!n_!VuiFOG}Y2RNi=3QMck(_sy398(xRa8??qpPGNib#kH==@6Aqqa`Nzu39vcy6 zMBXWkYmMeXl&kGx|0?mFdw%w<3wwafQ8$Ub^V6R@woC$a%IV`V8Sel+cjwau zbC@O=8?K^AVAbF7Mx|61MRNaLQehuz)%)ip8C&){C*b0Rc$^s{R~$2SlYw3LmFV=~ zqmXuQYp&HS`_2@XgAA9Ul}=mrncbr%T(z_yk_oU(rnDN|f=R`PgP|&He2^nYdZ32q zjlJVs{VTA&zf!Qof@K-Bm>xHk9nIend>Le^{j#3@zn!|fp*T5Ur)JIY%hmup5Ua;} z=;g!0au&M(m61)$Ek9mld<%2`t)(D6_qCwRyLv{*sQ!>Ym0zK2(gzQ5$~l0%~ZGWKFcFqlXOJPR20pS()&Sqq9) z`aflf_&903^rjl_f^)rjK}XY7a_g_0GDf;U8MkN?W4m7b^gBS~PUdg__kF6kVXq$H z@M0o7RkaHmw)QTa1|X2?py6?QJ!%6MtywF`vj5>fk8_iz$`+)943}6JyfJIAUofi7 zjPqrcI)bvJ`aCb*NZVp_iwld z0zVXdd!ZlD;E9kmy)Xp3V<&pAaOhd=XB~S$^I!IUpvZc5vAB7Mo-44`b3HrGw#wLO zog-O^g>&&z;i4ue49STb?Zw;@9z;_t3)rlL3vn|hi zoFg*g0Z-^%Ifj(*)`EPvvjwlk`r}-}`e|e~cSelCa!kWRczG0fr9E(vXyvrMqV5q= z2jGb_-xqXtfA8U+AyRiXMYbD|_1?ftl9gW-qm=w|w8Ng{tncYb+?5qaal1g^c?NJe zO!5f-Y$&PjlF!Fs8(n~ z@m|EkA~@`|GpfB21<1-Z#aJ>`Fc7{H!Uc#{>;nT*=+w%WeCC4m7qpSd zFVm9TiO2w~2>nJeXw#1bM@KmPGLhnw+g78r^ybxuEO4G4F~n?voG^|bFM->oQj?TY z@$i4|`FmC8^B~WV4uS}YZA`{z!C+HmwGBRChiDY-L>v8?ao*C|M9SH-pxKZJEE{|o zh}}6abJ|EIn|LqR6Dvdx!-xdWX103<9|L;;J?tv+b~#k+A0!d&^X_;w53*iy6Y_5x zwk3Orho6AXu)jxJhg~L^1a(>eJ?#F!Z5~QWWsnO|WPXs2Rq%Y+vbEjSE%$TUJZY0z zW+)p3k@@XfxNYo9w%k{wu5sE}2ms5Xl8RQ)xr(z<4%&=BH75#Rd znFP5geRVU%;~*7?16c;0%&3-j8|k_Jl)M!1wT5O;u3wOR^+b6nOE%J6eD>;a&CCM4!z0@gNT^Vg8kwN4JsSYl{JD6Lm=v^e>eQpgqShX{t-uJ zF66Wgv}Uj314P(hVbb#v^1M+JO_yVNlp@Az}=J-o$>h&?TKgkkmr9k z(yWE+$i}tsBQs63tro0W81=2vCQ<-1D1?yw(ZYSt9-fm#=9}o?vf5i}|I@WTcR5PS zDmEsg0DG}gxZ5ry`RFfEJX+Zt+{K=;8C;|!l#rPAMo9l%86e70dxsiZT1IAYagxO2 zj4nj}(zyo_nZ8Rcqxl@-Xv*;L@WbCbQ!|Mau0xi~XiqFQo>x3XxFDXFMqL?f;p#(# zmzE@f7n$jk-DU!0tyw*TS6uz1^^)RFPKwGXu;50hAKss-pL1yS^G3#A}x-PVmk0j+euZYRX*EKrW{ygEmmOvCs(z6;k;SC5N zHG-&sgk+bpV*e*E36?Tulnml0bH!**esQHx0jXg*OZk$2(P}7edI)uew(AIIqmRe~(bU zo}Zk>AfukEl`U3S#lrIU%g+zXpK6t70@vkgS!Lkrz7Wa$mA9S=B)#KPf2VTLcc2J~ zhA1eg2a#JZJpf7(a)?=WTepbU;JI0Xw-c5s{OW7+D&6n>%hq0lFW#R?Gawsg|d_lu%U$NvD-@$<@ZGLeedlCN#CPb%O*i zj*q(rfmlo%N;H21f)Rp5ZGHL`e^|Csoke~aOVp}|``uA-`2&RyLk)&qo~v6g0}Ydi z$3Y6$=;CegaPo)cu|&DS9U?PxI=JGA$?v7lik>AA$#9nk>`drDo*xie9wBZ2Qd>7f z|8AT2ky}F6P|2y@#Rmup-#9fBhy=;Y@?Dd5qH!mk_hRU-A&!@@9SOGqOelh9rFHLV zLYBX2PzJ4+Nf3(nQ6-*&P*d*^?5UC42Z z9#h`UG7i%JN#U}kXXQasx;I3=?0EXJ|H9_qwkf67?Bfu4z3)!C)L3+1A5X;4$+9ei z2vE^#)20f(G8$o12_m8M(wTKE3gbqEpaz$f<|BVw+X{-z#~pp*;pa~~?d#)M2tGEug!_`_T;N#xO!oYV3> zgp6{Kaq#lSCL4jz)&S5$s;<>}cA#JTae`!DZL#c@guVpX(=>u-Bhh}>;N0trlI$VC zXiI{#%09~772yJS8+f=go{eaF0R%iQt)jO5%Uw-6uMxsDkIW<`Gf6;x8*}Kh@8a!4 zl_~EG!&5#0IFwI&VyN{fv!rdkr6azC@*>p5yVuqVuf5S9{URvq#GttR$Je3;ZPQ7| z+OY0HAg$07S^IO0fpppV<*}QpzWz$IeTMSnEr)G14LlC=4xWBHiA?{qPJx&BYxt=* zoX-o1JRRg!2tRSVzH~H7BDqE>C$w#gJh9`h=d^9#X$ma8 zjp^#lg7U)Fal>ug<#r1m(ydQWX9b`mP&u`kLjL5N1J7{uYldPljtE5PJV*!;ERxL! z%>|pv?!2ipcn=4&fUM|2rl1r4K-P=nRrIjN-#n)Fc%yJLh#IN=9q$_h~A&^ zeegM%J{x_kvZSqO=&o?i(#8O?J87-|af^+Y=^jbICvQSdum4503+SX~-ZnHX%K1s{ z2ivfsbVm@5_c`jai2`R_H~q1obM{wy0m>J^-Rc8hP;Bm=wLEyVHoJ)!$5RLC@hRNw z!k>c@_fVd`ATpwpq42iv#x_egtayA1Cr&nLvQVQ;fAXKXfsF8C_cGRa`OoVa!?v&O zhz1JOJ=-})l#M5W^0Sx6-M`GY91W#y)Ry_4W|Z0PxDk&FI`D$rD5m4?L38AdncD0T zB+AH7mqcx)R z?ndIoB->k^>3f<>CXzPY>IQ$;c2wW|S{;KfB&`WrPts{Bp$Q<|4y4h_l)qP$LAtVS zYqryL?tVG0{frHJNn{rEB&3#U#_8jYLfLM) z*}+qYBA40HDNEh6Z>5Okc~?+?*7S+d?Y1nC896TcG4?Y8Z{E?<@~-HB&)|1Dkgqw| zfFvHY4!02F9_m^6m4VCO6bE*`@~C^fgRGl=+^lcW4LacpRLNoIuZ_k@n;&DDgebR= zeE>7dUDnq&3v56ajHPt>M&1v%=528G$}myxo}91m1HcLNQ$@qF$G^uz9l+c%foC~8 zc#hYZaA5|Aw&*`=pYOGjIKMJ_z$bi4Q+KRG&vPs!o76&tcvTz-gV|KynTe{L?5ZLG z4&epyvGJSxrFBwt3K$r4II*XmGaUYL*DEWWA-3|YOL@;c_6{Tvx{glU^z*vi&fvj#y6OJd46=%Lu&Q{|+HSMbGOQ`|0#u#1QHD zxYgUb(BeCkgOXBw;|C=Wg&l+B|6Y__f35+ z@s5Lpx#G@UXKFWU>amQ|I8VRyh(6zSRan3NlcLT6Kc*Pc-b;UxK$71ok$pz`A zb1DIYQDiOyx3xd?*%lAju3dlz4p&>*G~udl?870hTUtx1Vz3${Q^ zf5Svjv~E8YxO! z!DEA(3;Fw0y!@?Ez;c%GzWcLgBR*dL_~%UgcnqUh2QAV!7A#25>1>un^yvpo|L$h2 z+SE1%`#y3`e@8FyVPu81F1%39|C(ox-jX7hnsoZ;$ouR@`IqZkL2rDBjAI192|f@sqCB3)7p8;t!0FelwisTLEpTD>=b5F{U69K4%dy5l z9U@V*afi2E@MJkbqo!}49%LJn9*Qosv{jPdU&(^nF&1{l89w%@tV3;uVaPMS>YF$R zeZREXxYz7O>)qa>yvT>Ime`AUR-O2OhLfI5B(E;J}9#H8C~ ze}#Ns@b|l%!PpV;PfN>vzmIChqSNzI>9pozYPdda(@hrt_%ZHQnmKbL$=`3|9t?&T zBHH?OXurr3GBOV#$C;S+N%!xY*KI6!Q|ff3V0ih`0M)U{+dC8lExHy_!PZ)`6gkmw`HEvO|si;_fY#JL&IxlY-@gMu}5A%e-v@W{k`!w*ehE(V|QSZ z7el^RT5$-qVb!BF`odaVBGOhrYna=qsr>~xjylkqnD1OGXGCcE(OD|#3pi%Cqju%QJRPtk!m+$I|R z?6vHS%K8Lg4)=@74?KJsm$u7x6n+}c=RSyIoBs{9Ta7L5Dq*x)9*+7WcvY^jwV7AF zhyjG~RsP{5U+p9^o-i4K@TUu)7;m`!g2fPYOuddOrhRlBBU zSfwK57WJWuftTf-D+3O>)MPRIGx-dRW39Vj1(=(BEkaVR0@zBNjq{VT_RVv;I5%KJ zuVh^$Hn$?==3|4G^%YsINwbvPzc)V>6O=qR?pa`e<${p0>3IrY8Dqas{Sar^1~xMO zcT>%mt)l<-t!}d;h%*oZHQsTClxul_DjrlhUENe&l~D0R+K2{sgfE{^0;CT$3qr19 zj7i^Q(6Mo@|A@I6hwXK3C+z}%jKg3C1OzRJ=8!7K5BOM{Yg5yfO=JXEXa7D% z2Fx4i^s9TdmM+e-4S(yD{U2kSS8o@WwI`suSZw$*?&uT8-vdpEwmJ?&2@U2mlEg^K zJ>+NB7NCJY7E4=ESF5tJNg? z>oII)_glGJX(jryOVDfle9^yL7t6-F;wt~SyqUCWvY0UMV~=MnEY{D98)Une^?KL) zRoef}0=(r3bb_m+o`ENN92MA##=aJ_-9sUg*Q zqgnq!7q*HAHwN0Ua+O6hhxs6c_xk!i#8v8vKm?{+FhhC51HC_PB9#LuS#L{`Q;!5T z1@{>wYW(j06zr`fHQqb;_P1U&msyP}h-Hz0XA0a_?S~~;JPx4b2SNp~oaLq`HS>CF z4ET}hPZ;eP*Iuyg^F=V0YN!vMsUS!u& zFe()-te|MK-uL3|?}N2Rbx}hgqubtDX$oC8RgCBp_zTl|fDkGwiBgr7mqLZ z$h*mEY#I9fOa|n8f2ieUzm~=ZGspsVDec~UwN}&wC-BA-+S=yCCZ_FXO4{~nv8?M2 zaJ-skgZvgJ(-G=iP3vrpJl3U8j${^>2M;mhN+y!F+|ZnbD)~JUl5G%zbdUUg zm@HN7_C3p8qXnJ_n`8EwmYF}TmdtH1g#7!39WG!W2dL$UFei4k(Zqh3=}e(HEvb9L(@!79Ec7n|?+U^ZKuuv6H4o?)n44(gvSbX2Pp)4elwcaR z4?(OL8Pp}|>7pwe(FBi0)w>GOK;$CH;7Z-wcQa-0QB#Cklp`Ie?7ZhLX}MPV>%3$C zI>c7V?#{yC#>_g1m<{` zk1+8A)Nld+t!2rpmDKJMR~z`^8iHl7#m-ZuUj;@xb_|wMAgm#j;Sg0dm~a#f&7nEf#6UHm^kyoo$QGIv6Z( z*IX~)tMAP=N=(7aA!%K4{FAb6q7#*G&nqr&C8$%q+CMK2ZL<76{W@nl z3Om*87pkJN7>iJD6I8b8I-;(dwxuGAoxNdoDha#*^}%_cD|ljFVq^$5m5$@ZwqHl9fFxc5{1og8DnwU?L zoR&muEFmhyQMX#@@%DF!+Rh=&*aSs|3t+zvJ)=AN^Mtq6R8v*ObEIv?V` z8UQV(lv^Fl7Dn6Poq3x?W7|E>G{@U>C`%cWz6CL^zz`f{!VzUjyrZ*&pNEGQ!O1^; zbs0)S)XHO(mP&Gqv@fUGzJpI5m=&oReKM>|tQ(!4o>rit)`)+#*05e`T3pC?n@gNp z`?oQ~dEyJva2LI$8JqvXNG~*poWDv7>-oUpkMqaDe4wmZWV)j$nCp5?UqW&m6Xkf* zigpivL_PDi`FaA$xe9`u2iwSjlm46quC=6jtikmGC*!+ zxwt?256N(4`|uiOvpW5%M`jMgr3km2nb^J?;Lc6Dx;*-$ zqt;q(GY6eI(U3v;^NpiWdW(~v^LNjr)#9BT8i8t$VvwQI;?dO0bo5H0cJ7pIicMl# zT$##8r!3l*R+_C$|K>CCz*(H@yLzrR{vA)#oe4wG3(zCqkN-x$RHgKIbMP{k7b z5$opoYi*9o2gj}C5;^uHpmm~&8>M24P!JV}A!OBM{h#SH1S>igyyH-(HV=|$!XrSM z`llIjAaU?a6CZZ!IOLc{X7V;QMd;w*V7`If^AP^;?FM*djMTSS)L zc;TVEUWMq)^=RKXO`k+3D&jh7XMC2mWyt9lyq&0(Lua+#EEq?85OB#o^L}@E!6G*d z$HQ>k-W=oEsV}`9SoCV{SJt@jxS3lsWSmDlj|%l*dhVSAb;Dif`yC}h{+1u~7ff!O zlDD37m~kw5Vz_>!J%Qb!j-#r47Fm6S1GC6Lp%7ySmt%VD2GO*zZGUnmw)+%lrMcA- zpAa?E%Nl|vSlT?l;8pqg`RA*!F|=_auvZ_^7p}J&DH_TQ-J=}1hIz@0(mz=jo)6k7(kvW0hf zhH_jB(q!A4m7y7>bfq0095X(OBI!?BD=Xc+se7HN>{Ii(j+Q_`?~=|wHFHI<(DF<( zxTdkOv5b|(?j0{^Fv!7nyF|;IqEsJl2U-UQRwE?{wT<}k59cSINuYAZz}61BCkq!s zFWEl!;N(N;c^_sG)sdFsQtD$-AKs0NgPflnKHFIaP2mQ!XmE*%6BMQU$L~M}bb#@| zRHGP1bXER+O$l-!Iw3Y&p7PahCV9;=^OZR5=Bt7zp>^BI3*5WM^_e@S}Wdm>zC3L=#| zmP@{F_|S*ql-;H4Szon08}GwIC$YL`y<;oQP+7V*0tf?-l{*SWuVCUH^kjPd3U_JK ze=P9M^wWiW-5qgqL-1$uHLht35<~{!yy#}v;J@|_Dqfk)@KWXoLK3?0Ap<;-K5i)3 zcf*Ad{tjLmrN6*)|Crc|DwOEgM&(c@@;+fdi!d{83ep8V?>_L?jmV1H1|uM5VK|qY zoh{l<{ZzGY4tf!zGX>mHf9j8>911d2hvd&4EeAiTAxgx@!IF^DYTJ>8DX_z)IM71I zES#AFE~%_$#z&~7Cp(p z6CgA12IKylhAGCE?PP3zvNEw-t%OSn5DUagu&(;6c+mPqBcB;B&J+GmHFnDg7(%=A zfQ(w(9*wDP58clY)yR!rQyX=sq)y?1O8XqFjDq<{$7x&d2X5k|Uzbuz?7fz%8?4^% zj5f0_x&a2k7B_?sXl|T2*-XzqcoK!7P*j7P zmfr?N9zge&fr+>gb~Q=k`_yqlwvFXBDzG)%XZ{YG9+R?0CqMycHRrtkA&RdTvi;i1-7+2PZSbap!R;+_#M$0{)xJYI=dQ6s?C$b# zK}F?(s?o?E>swV%PtT;B4i%tT5+$V02$vsvLd>Bi?6UD(1?uXZlDhx-5{;3%WbqL} zCSD46Uz800*t>8Wl9TYYUgOHQmqu+oA46lt+Plu$CcF#%V2hln zOZRVs!neAU;lR9HL1RP1pQwyT^y>)^clV2ah7Toh;fETd3D$ON;nhcH*`Z6n{@Z7d zqLgEn`L1Z}K(HvRieeIb^Rw1^8F+gZhwiEGIuM7WIn{)-J+$6*2?iG02qL)eQhRr1 zDIE6<3Nt`mUw!(qm;J>x^A7ZlEOc-BF;`3T)pI<&-$Ut7lJ)W@SM9Ns%3d;+#rz3x z((epzu(z)DSI?{OK2{@-cOKi7;C(lq`@f8;7slXQ0fNk6i#_d9DE|#vWhEFTzuyi{ z^b+=yAxh|+UI0M=Mg+==tLe!m6=E~O`nnh@-ykl5S;rwQp>7tP*Y*$n*MUi$dY{)Mt~hq@-)?!g#+^iuN7g7iyb7JnZP`N`d;X| zJRLPK)x5|;`_t}aD?j|X^2}{Wo_zyqK`y}i>hTnB9d+utAU?cOJ0EZ^Q{i@)giil+ zyg*DnQd(Nd)-?!Qx)1fewjJUCJa|)Lr~H8zo(A){RCHl*^-;uQvgst)tGJ1J(#LxV zNQDD_zR0T`+AHfL<+abFgna4e_6acnRfUGwDZ*K#L5=({*`ch^OcWY(y_Jo|kNkV^ z6(2?pv4ReSl1rW{&d=3H{a3=w|G?h60o@C1#qS#YM#h zcbr;>x}FDk)aQgY-nkmY0v@bIHB$5UBQYWiB{Y^87kZg#t8mQA094CuYF0U)15dbJ z&R#jzf{}L4@Y|p+IrOXlC&HA%)ywr`Q`70&-3+_*1)jl?KLsi(DMH!RD38uXRA5z& zjlMM#DTT7d)wL4e`9Eo;=9>cd^<3Sj!KW6l4_g#NDKVv)rvskjzu~ByCrR{JADDg% z?^A}|by$2k&%G$27stMecC#vB|$-|cgj0`MU zmpG9RX62_*U^UZ^Z4myF?7*A&8z^)ytynV*-NaxPbrhT7R2Xh*F1;SIgWQnS6TnUh zDTOYzR_uR!&)z$vvRH|eWVr=Q28e;FC-2wo$d0XKr59K8lF}s#RqSikqcQvK=Xv(@ zH=f788gP6AYTcx{TKtIm)Ev?7eUBRxi_UPWb9Z;=yS3p4%v|URbSH-e?=P25cw6uJ z&&l5?W%lg`n5V#)G}<9P)#n=f5MluzOU!Qb7De9Wp?BCEX!l+E1T1%Oaj~oiMg--q zc(~V{j1P`~=oG~?Ng1&XTxf6)MpUhgr~)ojAJd_vtFbab!jfJBzYV9gVTefTwucN#>wh6xh6sVhpRw&4mX|q^tqh z{5kuFus$r{O8#R)@c#O-KgMeDuL>q6ts!UIO$>l^)O~%=1uyFNQ(WYX#F6>pXud%26l=LZdb%*pX$tHCOIAfAYR_1A6uyt-hDYh3MS;R zd4ik33jmx&0GLKDaBcF(b}sBy%f#dw;ModPJV>+h=rfLV&Nhb5PrLZxsi}s->LdRc z1NJvM*r||^zGpSFw`2^P_lo*$=@Kiiz?wR4Glb~48MxtZHhO&WDX6g5$l;jZz@WGq zK;NQFM))5{m@hDUv-B%${R{f}u3H5kwT$j0U@hh7CT6{h05(8B6c6D27WzEAs^13n zNuhfRsQ$Tdz$Uqvu^Xz*<;1HXcMlJKhMFog<}SeV$RqCO)S3DQ$k>b`zK(i(X&D({ zLL;n?BUwN(TF7yP<=(u>p&EihMiSta>2Y_m26;k4Y;iSn&!9dhb_zvCGT3nCs|@f< znkGVKL`FeB?3EbxsjOsX4*;<+gX-?=j7XFFv@CdfOV7yDeeEVCJ?^;TRG>|YI7rf|2cQ;E|d$q&l7VQTzP-v8E2&V%;;G&{)#e&IFsZ zWXmc+2O+6z$u-vdlGOG5z7~dt0n#|&3PqRz z6)&$^{Kg#_T}BQyS62i}&sAyhZ}pKbwbVxaa%P${bRnd-HU#d|A*0Ax5jS3`@eK2kA&tgb;6f< zibOTWug5IW!ut=Q!uK^so2RFyS}Zp`zYSTxDQPamdS4}M)t49EqYQ41LVspOHIBCz z>OcIWJ3#gDZAO|tkLuGcCK-BdU2=LM(eCHFp`7%~%=+*XtWRi{&toBD<0V*)OU#MMW>&F$y930GKt69EDiCasn7GGBt;?x*DEt2DE zOaMT+5_`&&fi7lkB0VPAUM7dzJ>Zd}KKODI_hgm=+{b(Hq*%x7yW>EINuw?z?P6Yu z_8yB~hWVz1i+nQ-|Q7@FwvChEsz*0nQ^uP~QY`L$_F1s~fOA{p&c5QpV z>|*_}9(^H!bk1pDcdq$ayhE;fF++P)@Tgms7g}^W?>(as;IwfO-8bY~1a;LosdHKF zOROV64dlPL1aM!tC&UAZq9xk0ZivCnyV!4lz0qTCWTc{yaG8pePBXWxC3hbEEM!|U zQ6Ih*A@pawoq6#&=zU)}=IdtFhG%iL-D!{X2lf;>i?eP{D3#hHw(mgE*$QOOIy3B6 zmZy+E0~KyM{0Sp_)gGP-H}OcI>k%RKvBSWD+WQtLo_&(v zYhzGSEqv`GTK4^{3|-tQC_682DGfWd+s?d<#ho^i(y_cii$+=atmc(xwUc%y$3cbV zHAahbD>>Enawnud%L52dkiB&9VxDS~ahG*NWwCjJF)g85yg0cK{rad9ms;HQj8c2_ zjlF#emjhD`HZw^slfCy7bgu#3#A>z^b1N>dYk<7NL-h@8FQ$6?3{Du|pmBA1popHD zn!3YF8*vdgr*?+uNPoRhjj9?RfzAfx0n4%wbbqX#e&Y#jb8cBu1_sJ`3G* z$4cY@PD%#dv(S1Pv+e{HVe)~-mjw7*c+EE!6-CyDT&3|S((PHpXrW2p)5D3$)M#`u zKh#*&XLDs`g6G29&TiQRD%@@&)6MYVkme`5T4Gc}N}&J){75k1$L+XAl|>ZbYL%fw zba8aS)Vw@UHWLSe;ETOK4^E3aXhOzuy%Y6?m*JQ*CEhn)zAOI%H-xhdUf3fpsQBc$ z)Z)ddy`|LR{Z(UFPAPg-UFPT>h-lBg_d7BB@W~__rj2#N4}_rW-YeGCQ1tGJHz0XPsmWq zeG+|Uu&GNk2BtoBKrBp+QRVqc0Cxk6d4?2BU=}>I$uxj}8$jb}V+bxlX(8zsx%>k9 zb!egj&I|c!m+Adynw6DR{UrRicqP`^xdi_&V#_@o15{BN07wEolOnWZB9O2HWrLb# zQSfLRnmHwb^?WEwh_C`w2p~iP9lYNFJ4F>GL`U8ORIDyqy;7VnfX-(CA5`7c!LSNj z>KtVZ#0?HpQ&ZF<*F}uWw>}k(?CNR{C4~<;OmO)cY$-Q11L}HN_x5)St2{tIg~}oj zD`NQhr~#A@{jD&&QTwy5JBm>xw)*J0LkWL6nI$^y<3?9Ds!z-DgJ0Jl0f)_$?&Ri@ z>Q-)p_l2H7L7e?#S9^iM@_qj5yI)U;=0-cWTX9*;Hcx&bg|gV`Dk- z8V(NYIYN6nO6uz0B|fklf4xdtsEwx?Dz)R;P0<+2?qmhPHfZ9MqlJ1RFmLTsF9#Uh z(A7uU&Ji%h9r&P^a!>!!CZuv#y($-OSLvLd#*7 zTA)7g07?qUqunp60Y_7e&9*c@L}zot=I1#I88&p*FYHrG9ZNQAu=&s*0?;95c&1o) z)b8@LT?3Cg|CKV+WkxuqTKwX2_29f7pnlZlKT^uDt@u@!$e)ft2F4XKo5ik%@e&#o2&;158p*31mU6If;~c8(8f7>Nn( zPDTNxZ_8xBEUx;b&E(wRD%GE3WAbUy-xqb*T$eK=pA+ep1#or_@K*X)1!&A0AXod9 z^utT8&Upjs>^@0Lm>7?zDq>yR+LUcef(IbYmS`3v9%kYYd%U!` z)NTfBXh#JCm+{h@Akjzv4%_A{J$^ji~uL%N+oL=BeD1TSR>#+!_p zoClOsxkO<014zd_R2Ek|g_t;_6`zqO{LbQ61C5C{+Zhj$6Gw(6EtS3;yCRLNW^Ra)Jp7&lbwZrjr}tC zD@!B{een#)w6`b2^#??l2?!`ET2NlZd^8x@Kc9v+B;@(MMvAgad7> ztdQR&*vnRHXfLtj-N}_vLT(g2Z=Hd^UOIi#f6@8d-yxn|wYIjlY~x(QoC{i}UytzP zgVu&)f_^1Fk?hwTP*Ad+sZ!`-80CP{1Bv5;)*U@juU~(j)_+E9*2`&(YFuotlw4f1 z7ChKFLxwv9&sXN=KJaP%I+Cl3lF%5%S0=Kx`g1}J!!f-&ue-#$W%;1%J%i>h@$6E? zwN&l)sRE9s08zn{5Y33{_Nte~q_a5_3n82_S z-NjQ^eF&hlKyuZTO&^)p8FOKR|5@AGvH*_N1M&c%`l7>+pwdSr$c-b=on{w%M3 zx=Y?$jC=(GMIO+0!q;nGJc@G~GI0R@x$FFaBC^B~2+gL9@Gai-X$inPewl>N-MHIY zst}40VUR&?)LyhjubL>lB6Uz?gd5O}`^=4AA7XT3V=`QC>M`-Hs6O(7_s<0&df#-J zegJyCDMdhN6M0KF-V-JCfgH8(jdIT77(L#tA7QARYm2q*4aeAqYJ%wjz6Dmew9>ds z71WL@U%>lKu~Vsod_2&|6UrpJa7=ECt(jk3r&CkiADnqdWJ;Fgnzp47&*fmr2C+~T zkSv}CJX?5snI)88WHMT0S+vRnkO8prokE|I+PI;)8D9n!eg=ye=wCQ`UyW2f;%5l> zCBGqir|F2MxSEOqemCy1LkdA_7igQD0Jy`qj;%7}Z zlD-T&mduXFiu$&`-xh%Pe-_5;QO0treRbmB8>6t?Pd6y~zLj)J?B(aBgT{p57^QA^D; zR95~hS9SwB3}1gqYs!x*mkM@Kc;anJgvH3={m83wm+1EZPg%Pg)YUqko%!kwAaSBT z46t>eKm0b7GG78{a5}O>lSpIVlC1Io_)33ULPxBzWgcjoy!^4h`IydAeFAum=uYkg z(vYge@$~dK9=ldpOB)jeV>dlC}ZCvjO?TAYl_5RFe9?d zGTE{ZipoUR(O4$?J>B~k-1GB!J?A{z=Xo!m&p9WxQ{IcV-@DjQSB|h$Bgk(SS8S^Y z(090-K!9_(&6PpE8`U}T>sn6$Z1C2~N*aa{Oz0O3-}bC2hzu7&^pSFSW6_cuK3DdX zeDuoE`B`Nv0RW2eA5EgzUT#$C{R;UnH$PcU(ArZ55WHWJtOo{dKH@%bR=?-4S`@wf z(MC67T2k=(Lw`hMCEV-@-|&uLR~>visI|p%HJRcwGrqe7bpA?2bf;PM?$peTO5;;h zj)rwm%~x^_e*E(X0OTV3Gk*Z6T%lKeEpP|QJ|h4EIOg6!zbB8f zvn^ixmkxaJ*VOC~d^t%Rp!hw>oWvBPtRfM7cjSUa4VF$0ohPS^_0?>>30}5pkMa`! zr#nrh7P8bB1j4>N`1XOw8kk(*-wy=GSOB2k^QiZhIM91j$3wlyJ|i43RU!)Bw)UiL zcOw$TLV))J%BHMDe|n3x+5fU?Ltr(aCgT@f-1sCH8o$qbFksPqGh<_2Da{&N)2}K+ z-o5+ko-`v7&>I3zvNqB!WKJ3XFXC_IHF%XA;tz=jF7F-j@@Lvpj&#p!KSIX2+I&Ns z&q}@Qm7>2i+#6y0)IDd6JzXb;zxnAp3AaE^jqOe{9b8g<{OBLbxAK(9T=Ah(2k4D` zMv8^2i1&_F&-1%&BEK$pZoI&4UyM&lNnsdM(3rA(*tP9iht7viY@dBq@PR=uahxj* z(8$~Y5qUjqcbI#-@%x8XiGAO*_Cu<+)q<{8Rqwt&O0w1rAaIW<6fAI~9{jyCx*+;l z)Fi9~*zn~z6e}8?e`TWg-WDyRoAs_TFnEvF^YKb9pU^pg>Sie+=6CS7t2Sbl?);?%U05H^*y5C_N&v@1QOeCK_C`iQ*+GuX992;KdJaB z+GP%`tbgjd90J(Z&{))}xO03E$Q!G+DC#q$seYmn#MWlsyjJg{`B|C|IDTC_X_+SV zDRN6_?SVU7S(!{g-{tcMK5;{dHlaEzd3;R75BM?x2|L3D?_QP5z@n~Lk4g8+3iS7X z^}0Kvm8xS?znF88Sfi>J1_)Y{%uu`boHF;tol$P>pHtGGC8XREczzL2*C1?kcwZfz92Zr4p*U^utDs3HXj=wjeV)|6>4uw}*8>P4OCvc?Mq6pu z!d=ALZ`=P4^*h9r)T>#@%lfW`GC0m z<$n81UGb%o9Ka#9vv#}h>2+&=?isFLk>xxhm?NDXsMF;*&lrz3Wa7HPpTC~FDP}fw z(4fw(HmwlhzFk{K#iQ4+s9DbJix&V$-QUvEQYX3W?zkU+PK3vfR`2lSIbE_HaatoM z9|EfoGhEx;EHt717gwL|kF2Y~0a`ZYZtd$DIm9F>hq1Eus&VMLdNLsSxIX)?-lR*7 zy}Gy{wN=X5rM>)Asr+JcD5W)!uzm(W%s{Pc2?jzWh+1KR4e`$k0iI=OV{1E|;ned- ze&nSmw)GpOuL1u70P5g?Y6P$c=ef6EX_fc(4nw)Yb*A^d-vUxO<{uf1rmgPZ5mBDk ztpI`n3);{;;4+rObK{huD3g*@;8uHdqqsKu5b#C5k(#an#!v8hiv({UA1%PrIbrrz zjO5FQ^0{0~|G-gG|6;2j@Rhr`>+OTqUS0`g)$rWd>?9${H)7V9qfC%;rhj4 zo6s_?Uh}o;Z8I$ZriloOR)DGc!U{L1>lIA0(w&+@0lNaI{C?&5u)zffFoiZ8iv7~n z+W-d;bXi_ugAbbZW+Q)7KLPrg@9?0_nW@<;M^8^=x%D0%5^2%*N*qxd`9H+GpGk3~ zXOgTL??$IHg2&vGtR(`<5MKah^akA`*wBCkHkhMfZAb1xZiK}`$mOckJ?C}FWA>ej zFNpjnxG6{xeEsR^Ze}i{Z0}bAz%{_G2>Jj|)?(;8QvO$gj^Eq5ep>!7f_@sFFb4Fo z%yXRAkL%`dCjQWPvQS4I0i0Je48(}BW^^C8KNi3D`8t#-ZX${@mDZYCe!yljFhx$~ zh3~O>luu8SQ!fK%_{nnW@xZ?f1;7um^3*ziZ`Rj%=BrZ}9hBJ4==0WH)nMIN12^Sx zIbUWOFs>#6o`sbtEL*t?JOH3F7o$IarUxeW3P3vrX~@f6sQBYlKy{GH{${S!>v!=` z@D1}1$*!qcVPGIDyWSXceHtDI?3wuQ_#o+kLJuHF>ri1^wfhnlXBWItC){x3 zd+~=h{a*d62A4AX(CSlD7W0Cy;^sN|-aNgJv$en9kVWV2Z}(W?qXdN#=#Ah|G?62G z>(?m((XpeCg@>`~9IvcjEQ(>^O-H_I_J3Ek6sF#O3Qe)_`QVN|FlT$F#s}XhGF&djF0NejQsd!C7< zQT{qOK05*~F^$mHlGj|Y(xpBEt@$o~yBg)Mm+iO0JXAEOxLcDZijS^Ft81#N9xNQ} z>G5m(x(5}bjtg@BJb(6BVgcmW0UJWz;7Bi`(U?`chY{IWUM`PXqA)mPHl1Th^Zf&P ziqrSj1%uN7i3S`(Kha&;e9CI-ndja1%KP<@kA*T|L?g?=xEB5YvqU23~SxLk$J1{A8 z=lv>S=CWoWBrc<7YX-4se*w^1pb_6lSu#TV4BME3(JYx2j8kpeSJI|i*MO56MRI$37wVoORM@U2{)UW@GNv*gcEqMCGV<=*AlwB`CLSE}@_P zfFUpRgkjE5nb8OXfo(N3Y%5Eq8EHulCG^LAxxu)%#3Hn!E^w^kuF_9j8Js#B%W$kfz(I}-Hwk*VoCeY2uf*Y&aC zg{wrT(HAY^QEEE7HXy-7Y$Xp8*?kg;!&^1WSz#U>yrs-KiJPf0e>pow9~AybN(r?W zD~I>fZZGs{47Zg8sXr=o6pDTeE36I?NWh8|&mW3o%ww24q{lqyh&43qh>-PUhSr0M zhXLy=#L(NyGWZ&M|-4x&BBou7_*l7L`4qpR?N+`JAqgkAg8MPA0 zczK$$i+3q;o9`+pMJdjUvKs9!mHOo8&5(ef0%I#iCRezXi-gRaVq9N7vZcLU#EQyRo0oQf(&p&m&l3Yu5J+g=eJ@G=5^%lss=7pG=Dl;_ zx>^Cw7xFq?=*#HBpoBsQ>@tS!DiOpw&jGS!+%1^+`yTwM76QWrQ8vtW%0mKNFn!rq z*xH;P@HQNpt)-;;FU96gH%rxa`eqE1Xd?SQ@Baw8tkt3WXMQ-SEzb+uroOe0(VLsv z3F!8Axu4`#whY~J5CUtVj|{`>b?C~9Niwk(p7g?Mao8e)socy-rx zy14vCB|Y-28%=Ax@^WwM>-C3cYcp1S?ydex@L~3zEi7p0BW2~1&FTyMLS#a{9S?c~ zE~bGQzlp+1(Z%SNN!?t>YqnWE$X%0}k+XN~of;LP1NLdA*Oi6Y1_)%i8}COw+0zpT z@vQ1GRH??VRLSELCf!HLtG8UfsH(nQh#8S(0NTR5aX)wjN{uBnL>wP%z*mKW!ya48 z`CA!BYNUaVxc|s+;}Ao+VtKfdKn4??V=fSVJ)>F=x8#O^Io21Vpc44kxP*}xqcyhd zD;w4Lo26EWAe~sTRDh9u=`6;av@-Z98mBN3q^oob2cEE@qTt$DWBZ-}yFp8+#|5oq6*#LSPRXd(E>-^xh~t z2vUyCjpOT)5tA<$g29mHkrs{d006!EP5WPJGn{X;#oEeR6(&XLyYg!)NG}k{DT{ zjC5>YY~(1@YA)>?U>?OS<7M=#6)j%0K-0+%zv-;@*vD=6!&<$&|O)hCjJ*L*R4KDF7}E+%eL1!Z&6XW($S%(!Q>W#ozN z1%Brzf98uU8?2w+G;WHkb{o9rIqjD-8nk2KQ%|D1LyZqHef?`k&PH2(VgrXiYn&TX zt)A0)-CfZ;M`|!u(E^xkL(TUiOLc|$vbt6oVYOmn>%s-sI1!bW1G}wYUP+KrhVji; zpQ{NLy<{@k@zYc$AbYE*>~kHA9VOV@`>H~lAT^))#x1!7HECpGEyZCVP>C7UW1hF! zW#|?eZeuX@9MLZ<>}W==!LN%DG_f?0D)(=M?!UyJ!&gY(mx_fA6hV?Z=I$jkFKW2w zUh%L>;;^;izU@<2oS`m}@8{z3ljnaI?5El<&s;sKQ?y5Te)2Id-n)<%W_U4^->}W3 zrd_t8Jmbv|D$5*S<)nvmz-K>~+Q(Zq=OXeaAEeoNsLMcEH|&%=yo-NflNPh@2l|7u zA9l;%#kAjjTL*tUJ%9MZgXabLX(xlvO{tOC5X4e)yo>O=#x1&jIuFDQ_kLLJ<6xg4 zu0+f&X&B;V988bA>{UjxdLK7jrFpJ{YF8~6ikx_zPzfuvdw!-0Vd=D*e^_3aKY`>b zOnG%fLVuZ=KZ)AM%>OXDq|UXivoz28@Aa*Dt)Y;%LCK%dq*=)cWXnVlFGw&IE8fv7 zT_V+))-QMv?W5r337_8>(Jy>8!7cj3YQXM{s%QVzPL{>iNW2I0$EFi#nE<Ao8m$O&tw%P5Avp~@N<7GJZtVs_a0v|1eo3j4$@N!cXA zU~RqX>xm)0W15|kj=fdSgaVCM=#id;*Nk($5ex?YEX1tmovwaU`P{AMMFeue^XWnOB5c<$f<-NdM+Qf$vU}97f))m7fZ|9qJ74d{5*^`rv9`0 zMC=Ot4XXD}3Ba>=%mys{=N0NT)R71x7OnVevfCO(T4a z*(jd5CBE|LQRIc3#|~;3$8*zGSc`>;fh!9YV$#NC9Zlyf+)ubOx!U**PuRjmwH}rF z9k98qeZJx%tV^~P6739aw?*0qPskQbB4+oiXc0xPd0uRo)l8SW6_t$V<>!YXkoFME z_w3x!R0kegb7k(D&(W)qHV?2CY|38l3WP!s)C`k^b=6$|JGtqmjTwkoT~(ZD;0sYN z&k}1J%>SERSn%PECyCVRFGMt$xN++Sp;~=6BwS`PSdHlOJCf_Qiw6vY?pNN;Ion-) zX)@iwz@P|5P5DAW8~Q|3?;iA$NThK+a=>EFkr~p2j=kyDZ;3AX|6rK37Na8Kw^q$R zYDt}!{lh;(V#p!k0*${%d_GN0Fe~|b@>SZ{BTYnLU@#NIbn6LU&~b6olVN^$#`c9z zQ|9u@V5(xcgR^ItvqyYFe|qm<4Ix5dPYTLA0Vf_(r2UMHUt-DJJz}}5s;#@JJhGSq z_Bn<$-Bo*ms`wb!LaRx{dg8kEZ~NV)rT{Sh^qQ@?0OqvsvAQ&&5VK^pD9_zFe^}l# z#c?U!k|)L$@~>^Qu^8uiqS&xXqhs|Q0n)aNxH}6dbjFR{Y@3UNJ-BrBD;VrJr5_TE zv`^&0^pR)3*up(Mdi(qPhmpwSb}gXg8PZ0@#Ks>m6;EoWrX9T+ZHh!{B=WE~_3dc1 z&IrAXCtEK)TX2A|gvh0bvKA#{%lRMs!d@2W+Znb+A(#?rZTI!Wx6%EcLv@ofE^FfM z08@EIuJQlgl$|WkSCfjBV&mPdFvCpuPSslx-`<5CdKi#1<37i0I_0C7YSU#Fmq;PA zJ)x&^8x!7&GfqB9SWh`^-$6gl2aU3giRgTH#G~b2V0a4xML&HxP9w`JT10T#+Co9= zk1i~{B&)f2+9W-{$`zgnm`#+^7r8Zec$Zh(BAkf|NTe%kjET^D`9_wgJp53;Gs4xN zcM6~FBy6D1{CyiBrDvkn20F{QL8O>RCJR3tr#$fCCLa$zk`8R{WE|?~o@fAr+#7e1 zw@?VIr1ju*yCaAaD7;^^WYj)Jld&hKsY`R|DEP_b(M~pmuBVc@cH4da%_8V~U)gb0 z61JRI7Xpb3%O*SaP1SRciZl-iAzT5xtM;N_@!-%BzLuAG_bt^eVEuAB3h0J0P%lx& z7BUffnnl0wLOJ$Us+5en$wci*OsjqV+MqzE1s+Yd#J47eQo5yIm7u$MELv!$C5$?& ze+o#s&2KF#HL}PtA^dC{Qj-VTeJX*VsKXupHUQ-*nj(bDHVdC!ygdgpxnsNg@Dz?T zNu*P%tM7a2_qgM~p>t@598`%AI4k=w^w}`801pq(j0%C^O}CYCDF%LFzT0KZ1`RN} z2Fg3M9lz~30SvBT*vfVohYvUXj{V9>4`LPO**ts&llN@2cBWPjGSxrN8;>bRc`uN1 zIaML2lk?ji_~y{7LI}`ZSPDn$`)%3cm7MOV6ERFYQ)4CVgaxUf`saSpV0l~$;;1j>6g8notSFPk;!u} zXeS(yk^+0fXv@Iijmx%k+Mtf|p~LJK{kf#vkc&~u^*FdRN4hr04f}J**#>t5rMn() zAR|CxIZ)x2r|EHH@%GN_y4lK?9tKwWeLionuMW}#U_}Y2O$`VI`ZL0&;^B!Tq1xFQ ziClMuoYpi&-W*d1$DlSVAU;yhU1xRt@=99gUy^rm{uiM(G0Tp=w4FLq^8rItyQ?rq z*j;KoPEqP=J}O?mk}eYycD7>_Hg@Cj`1T%0Q;*|=-MKza{QW1x8!X?Y zQIV80wFXFg==Z^w3$%;lvj|B)slTa+95SW5J)z&@_>@K7=X_KmiXN%*+wU3egdV{) z)G*3xYuWy1{#3*bA$M$#wNSeSymZ_(nesk=Aif43pAID9eD#q9tY8a0%qbF?jJ46aUHxcee@iizm^~Bf%MXgB|n(( z`;Ca+dPh51#Nm57LQk9XvsN6~gB^kHlsnXD-JIAri+MBwL|hEyeAIM3ZhT#Ua$JJj z-D8o$1i5`+0}&=X@Li%^H-X9x>`}HB02y{j&9h}hhBsi}J(r}<7COJw5o(cTjW?Iw zqCvxQwAE7FPH&WT;q&;xA2o%;g&jsRzwMh}G z!Dqe~!Dd$}Tyc^0ydcAqmt-Rs5B{r=_KeV)h?Q2Y>%c7;iNSX#t>yA5eDzY9r|F(& z*)Zx}a)ZTdy9b+)5$L|K8D?SVBhl}hkE&Jz?IZ@L(08^Yk#53wLxdxJp!MHzB*Qt} zWN%pL!|3u6AK3Zn>dqO&ocD%2oY^U#tObl~9*`=tLDTgY4-59uuvGx0u*+wBN4vFca~QP85D<5k9b3DeRn1Zg-q6q zwYEg-&&4)tp8 z>e8&1DMsBJw|V25px}az?l1zbqRO|+kQHw8ZRTd4UyC@AlF)w#r|8X6T3RaOYVpXk zzNxwo7NOzu?)UATc3-xWBdgg&G%eyh%ttlR09DXuYA67`;NGdGH@;dVFjlghvXHJW zMd)>=>%f_)lC&@_Pf9)>>X9#DEPsjKQyyE_Kf5K3`ET37zK*+PVWT*s+!cBDR~($V zLvOh0RKOPAF0?-lG5tCCRq+SO4nRh!SX@n7SOU61-p&IksX!)g$jiz7xv>0|A)iMf z_Y60{?KNk3qkv?UWfj!MUsd*C&nnLwst_$s36*3>|P@#o^9DfdSfBF(ZSPcsJe&lwIH6L|YA;vJ&>(1Up-+{yV zv%dWE=Z_oxtp~bqh!=yc8jstQ3B&3TM;)pHW)0O`bSZrRwpnzVgl;K<(vHdx4xccJ zUCd5MNYL_e`Jg_S)9D}{_0Qxnb`k=f-j) zqIL;c2`rluNYfEe=aR7gLa&<)EWRM!hWJwx`x!w4m6NM? z05K_^lq)sd)SA<@TJtvHsafgq*wH80xQShQsEz%uJoQS2QfooeA$+69Vb< znTSKZSuY_JKUzFR++VGt@6ZP=W5V289U`Z$D+2f{>&#znqEG!k=Ep^l=Fis1yPLCR zEbnsRuTYz=F%#|g|IAZ9OFHdmHqGA4e*SJWs0HQS?twpOiCC+gUK;^7X%QDecJHqt zc$Np%y$BlY9Tk$><^KA3o8wFMvru!C!r4&u;yG-DK2?H!KNDT$%(?vJV*T6fHC-|z z0=g?fXTNG?eGr?oSToOagOWJ zAQPMr4*HwCsc!w&NS!GojQIEO-+G`sbhpxH))B9JR0tkofftM_6|N08Y2feA{RbZN z)38uWP|M!<{aI#acRR=qTIT?qsUt4NuWum%T&}Ns&xx}kB$9XJFWcY+vj5=wnIb*v zt>v%jAkFWsTG7@K?*lC=C%fswFf*H713=;IV2^9gqIW@1_-pH?6dNp|#{1bq_935P zp~BNSo!RJ<)p*=U&EH^eo~BOhZ7wi&UZ`>o>`Ul9kwr)p}7wtKJ{g&>*;1@ z94$a1&)&{PW#lZ<*%zs^bpZYdS5KRu$dI!VTr-Z$tZ3-gG~O))Hb0=WPX#RE`6u+q z76p^@mGimch&k8D5z&vPB-z)>A(WBrc?#E|!%L(Zm?{B?l@sghHDLGl=n@VFN`p6U^J|EeG%=se#swZ?0KUJ@e8L-byCsZ3< zzy_2(C(ejsZgPI!TaQiYZEMMxd9*Ky$)fw4PRL7!8046IS_9nxpH{0~pH6Gh)|DQe zjAJQ4IgYv(kNb;!zXJA!L^aZrZWcL|z9~*hP1Q;z^X8){)7WaqDLM~DVkO8+7wND)k>;3WLJNLL()jpdyvJCwlBESvhW-S8@vv zu2YT&xIFg;FnjMTDo5zI5FkP4%x?!0{Q07O@pF_Jb-P}_FhC9#K&XL;zOF7WPt^v6-dc9B=q-Nv|G65-r}SlbSs?-RWIgb&DiBQXo^G|a HQ}q7;A@Kf7 literal 0 HcmV?d00001 diff --git a/docs/html/topics/index.md b/docs/html/topics/index.md index 10eb5dddaee..5f4c49f0467 100644 --- a/docs/html/topics/index.md +++ b/docs/html/topics/index.md @@ -14,6 +14,7 @@ authentication caching configuration dependency-resolution +more-dependency-resolution repeatable-installs vcs-support ``` diff --git a/docs/html/topics/more-dependency-resolution.md b/docs/html/topics/more-dependency-resolution.md new file mode 100644 index 00000000000..e8609c986ad --- /dev/null +++ b/docs/html/topics/more-dependency-resolution.md @@ -0,0 +1,160 @@ +# More on Dependency Resolution + +This article goes into more detail about pip's dependency resolution algorithm. +In certain situations, pip can take a long time to determine what to install, +and this article is intended to help readers understand what is happening +"behind the scenes" during that process. + +## The dependency resolution problem + +The process of finding a set of packages to install, given a set of dependencies +between them, is known to be an [NP-hard](https://en.wikipedia.org/wiki/NP-hardness) +problem. What this means in practice is roughly that the process scales +*extremely* badly as the size of the problem increases. So when you have a lot +of dependencies, working out what to install will, in the worst case, take a +very long time. + +The practical implication of that is that there will always be some situations +where pip cannot determine what to install in a reasonable length of time. We +make every effort to ensure that such situations happen rarely, but eliminating +them altogether isn't even theoretically possible. We'll discuss what options +yopu have if you hit a problem situation like this a little later. + +## Python specific issues + +Many algorithms for handling dependency resolution assume that you know the +full details of the problem at the start - that is, you know all of the +dependencies up front. Unfortunately, that is not the case for Python packages. +With the current package index structure, dependency metadata is only available +by downloading the package file, and extracting the data from it. And in the +case of source distributions, the situation is even worse as the project must +be built after being downloaded in order to determine the dependencies. + +Work is ongoing to try to make metadata more readily available at lower cost, +but at the time of writing, this has not been completed. + +As downloading projects is a costly operation, pip cannot pre-compute the full +dependency tree. This means that we are unable to use a number of techniques +for solving the dependency resolution problem. In practice, we have to use a +*backtracking algorithm*. + +## Dependency metadata + +It is worth discussing precisely what metadata is needed in order to drive the +package resolution process. There are essentially three key pieces of +information: + +* The project name +* The release version +* The dependencies themselves + +There are other pieces of data (e.g., extras, python version restrictions, wheel +compatibility tags) which are used as well, but they do not fundamentally +alter the process, so we will ignore them here. + +The most important information is the project name and version. Those two pieces +of information identify an individual "candidate" for installation, and must +uniquely identify such a candidate. Name and version must be available from the +moment the candidate object is created. This is not an issue for distribution +files (sdists and wheels) as that data is available from the filename, but for +unpackaged source trees, pip needs to call the build backend to ask for that +data. This is done before resolution proper starts. + +The dependency data is *not* requested in advance (as noted above, doing so +would be prohibitively costly, and for a backtracking algorithm it isn't +needed). Instead, pip requests dependency data "on demand", as the algorithm +starts to check that particular candidate. + +One particular implication of the lazy fetching of dependency data is that +often, pip *does not know* things that might be obvious to a human looking at +the dependency tree as a whole. For example, if package A depends on version +1.0 of package B, it's obvious to a human that there's no point in looking at +other versions of package B. But if pip starts looking at B before it has +considered A, it doesn't have access to A's dependency data, and so has no way +of knowing that looking at other versions of B is wasted work. And worse still, +pip cannot even know that there's vital information in A's dependencies. + +This latter point is a common theme with many cases where pip takes a long time +to complete a resolution - there's information pip doesn't know at the point +where it makes a "wrong" choice. Most of the heuristics added to the resolver +to guide the algorithm are designed to guess correctly in the face of that +lack of knowledge. + +## The resolver and the finder + +So far, we have been talking about the "resolver" as a single entity. While that +is mostly true, the process of getting package data from an index is handled +by another component of pip, the "finder". The finder is responsible for +feeding candidates to the resolver, and has a key role to play in selecting +suitable candidates. + +Note that the resolver is *only* relevant for packages fetched from an index. +Candidates coming from other sources (local source directories, PEP 508 +direct URL references) do *not* go through the finder, and are merged with the +candidates provided by the finder as part of the resolver's "provider" +implementation. + +As well as determining what versions exist in the index for a given project, +the finder selects the best distribution file to use for that candidate. This +may be a wheel or a source distribution, and precisely what is selected is +controlled by wheel compatibility tags, pip's options (whether to prefer binary +or source) and metadata supplied by the index. In particular, if a file is +marked as only being for specific Python versions, the file will be ignored by +the finder (and the resolver may never even see that version). + +The finder also provides candidates for a project to the resolver in order of +preference - the provider implements the rule that later versions are preferred +over older versions, for example. + +## The resolver algorithm + +The resolver itself is based on a separate package, [resolvelib](https://pypi.org/project/resolvelib/). +This implements an abstract backtracking resolution algorithm, in a way that is +independent of the specifics of Python packages - those specifics are abstracted +away by pip before calling the resolver. + +Pip's interface to resolvelib is in the form of a "provider", which is the +interface between pip's model of packages and the resolution algorithm. The +provider deals in "candidates" and "requirements" and implements the following +operations: + +* `identify` - implements identity for candidates and requirements. It is this + operation that implements the rule that candidates are identified by their + name and version, for example. +* `get_preference` - this provides information to the resolver to help it choose + which requirement to look at "next" when working through the resolution + process. +* `find_matches` - given a set of constraints, determine what candidates exist + that satisfy them. This is essentially where the finder interacts with the + resolver. +* `is_satisfied_by` - checks if a candidate satisfies a requirement. This is + basically the implementation of what a requirement meams. +* `get_dependencies` - get the dependency metadata for a candidate. This is + the implementation of the process of getting and reading package metadata. + +Of these methods, the only non-trivial one is the `get_preference` method. This +implements the heuristics used to guide the resolution, telling it which +requirement to try to satisfy next. It's this method that is responsible for +trying to guess which route through the dependency tree will be most productive. +As noted above, it's doing this with limited information. See the following +diagram + +![](deps.png) + +When the provider is asked to choose between the red requirements (A->B and +A->C) it doesn't know anything about the dependencies of B or C (i.e., the +grey parts of the graph). + +Pip's current implementation of the provider implements `get_preference` as +follows: + +* Prefer if any of the known requirements is "direct", e.g. points to an + explicit URL. +* If equal, prefer if any requirement is "pinned", i.e. contains + operator ``===`` or ``==``. +* If equal, calculate an approximate "depth" and resolve requirements + closer to the user-specified requirements first. +* Order user-specified requirements by the order they are specified. +* If equal, prefers "non-free" requirements, i.e. contains at least one + operator, such as ``>=`` or ``<``. +* If equal, order alphabetically for consistency (helps debuggability). From ece68dae568d0b73520a94bfdbcbfcf6c0a92b46 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 31 Jul 2021 12:30:05 +0100 Subject: [PATCH 002/730] Add new docs filetypes to manifest.in --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index f9b15403e84..266064db67e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -24,6 +24,7 @@ exclude noxfile.py recursive-include src/pip/_vendor *.pem recursive-include src/pip/_vendor py.typed recursive-include docs *.css *.py *.rst *.md +recursive-include docs *.dot *.png exclude src/pip/_vendor/six exclude src/pip/_vendor/six/moves From ebee1cecf8a27fea74c29fb9323ccca2d61bd4e3 Mon Sep 17 00:00:00 2001 From: Pieter Degroote Date: Sun, 17 Oct 2021 19:19:52 +0200 Subject: [PATCH 003/730] Improve error message when egg-link does not match installed location Include the locations of the mismatched locations in the message, to provide more context. --- src/pip/_internal/req/req_uninstall.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 779e93b44af..b135017907b 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -530,10 +530,11 @@ def from_dist(cls, dist: Distribution) -> "UninstallPathSet": # develop egg with open(develop_egg_link) as fh: link_pointer = os.path.normcase(fh.readline().strip()) - assert ( - link_pointer == dist.location - ), "Egg-link {} does not match installed location of {} (at {})".format( - link_pointer, dist.project_name, dist.location + assert link_pointer == dist.location, ( + "Egg-link located at {} and pointing to {} does not match " + "installed location of {} at {}".format( + develop_egg_link, link_pointer, dist.project_name, dist.location + ) ) paths_to_remove.add(develop_egg_link) easy_install_pth = os.path.join( From ae9c0fd8a8cf42f70534ae7ec4ae865735389eb8 Mon Sep 17 00:00:00 2001 From: Pieter Degroote Date: Wed, 20 Oct 2021 20:24:33 +0200 Subject: [PATCH 004/730] Add news entry for improved error message Co-authored-by: Pradyun Gedam Co-authored-by: Tzu-ping Chung --- news/10476.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/10476.feature.rst diff --git a/news/10476.feature.rst b/news/10476.feature.rst new file mode 100644 index 00000000000..7c2757771a7 --- /dev/null +++ b/news/10476.feature.rst @@ -0,0 +1 @@ +Specify egg-link location in assertion message when it does not match installed location to provide better error message for debugging. From a57668ef12da8151a58e2438c23130a376265c37 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 11 Jul 2022 16:26:24 +0100 Subject: [PATCH 005/730] Add an option to the test suite to specify a zipapp to test --- tests/conftest.py | 16 +++++++++++++--- tests/lib/__init__.py | 9 ++++++++- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 1cf058d7000..096ef2c898a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -84,6 +84,12 @@ def pytest_addoption(parser: Parser) -> None: default=None, help="use given proxy in session network tests", ) + parser.addoption( + "--use-zipapp", + action="store", + default=None, + help="use given pip zipapp when running pip in tests", + ) def pytest_collection_modifyitems(config: Config, items: List[pytest.Function]) -> None: @@ -487,7 +493,7 @@ def with_wheel(virtualenv: VirtualEnvironment, wheel_install: Path) -> None: class ScriptFactory(Protocol): def __call__( - self, tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None + self, tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None, zipapp: Optional[str] = None ) -> PipTestEnvironment: ... @@ -497,7 +503,7 @@ def script_factory( virtualenv_factory: Callable[[Path], VirtualEnvironment], deprecated_python: bool ) -> ScriptFactory: def factory( - tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None + tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None, zipapp: Optional[str] = None, ) -> PipTestEnvironment: if virtualenv is None: virtualenv = virtualenv_factory(tmpdir.joinpath("venv")) @@ -516,6 +522,8 @@ def factory( assert_no_temp=True, # Deprecated python versions produce an extra deprecation warning pip_expect_warning=deprecated_python, + # Tell the Test Environment if we want to run pip via a zipapp + zipapp=zipapp, ) return factory @@ -523,6 +531,7 @@ def factory( @pytest.fixture def script( + request: pytest.FixtureRequest, tmpdir: Path, virtualenv: VirtualEnvironment, script_factory: ScriptFactory, @@ -533,7 +542,8 @@ def script( test function. The returned object is a ``tests.lib.PipTestEnvironment``. """ - return script_factory(tmpdir.joinpath("workspace"), virtualenv) + zipapp = request.config.getoption("--use-zipapp") + return script_factory(tmpdir.joinpath("workspace"), virtualenv, zipapp) @pytest.fixture(scope="session") diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 43624c16614..2750a552b6e 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -507,6 +507,7 @@ def __init__( *args: Any, virtualenv: VirtualEnvironment, pip_expect_warning: bool = False, + zipapp: Optional[str] = None, **kwargs: Any, ) -> None: # Store paths related to the virtual environment @@ -553,6 +554,9 @@ def __init__( # (useful for Python version deprecation) self.pip_expect_warning = pip_expect_warning + # The name of an (optional) zipapp to use when running pip + self.zipapp = zipapp + # Call the TestFileEnvironment __init__ super().__init__(base_path, *args, **kwargs) @@ -698,7 +702,10 @@ def pip( __tracebackhide__ = True if self.pip_expect_warning: kwargs["allow_stderr_warning"] = True - if use_module: + if self.zipapp: + exe = "python" + args = (self.zipapp, ) + args + elif use_module: exe = "python" args = ("-m", "pip") + args else: From ef999f4c7668339a8772f75aab67c7e286673493 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 11 Jul 2022 17:18:21 +0100 Subject: [PATCH 006/730] Ignore temporary extracted copies of cacert.pem when testing with a zipapp --- tests/lib/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 2750a552b6e..a8d74b6d44c 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -591,6 +591,10 @@ def __init__( def _ignore_file(self, fn: str) -> bool: if fn.endswith("__pycache__") or fn.endswith(".pyc"): result = True + elif self.zipapp and fn.endswith("cacert.pem"): + # Temporary copies of cacert.pem are extracted + # when running from a zipapp + result = True else: result = super()._ignore_file(fn) return result From 9a51fc8e0c58e8d93f33141d9c1e5e154ebedd51 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 11 Jul 2022 20:01:26 +0100 Subject: [PATCH 007/730] Make the zipapp in a fixture --- tests/conftest.py | 38 ++++++++++++++++++++++++++++++-------- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 096ef2c898a..0cb047625dc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -86,9 +86,9 @@ def pytest_addoption(parser: Parser) -> None: ) parser.addoption( "--use-zipapp", - action="store", - default=None, - help="use given pip zipapp when running pip in tests", + action="store_true", + default=False, + help="use a zipapp when running pip in tests", ) @@ -493,17 +493,17 @@ def with_wheel(virtualenv: VirtualEnvironment, wheel_install: Path) -> None: class ScriptFactory(Protocol): def __call__( - self, tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None, zipapp: Optional[str] = None + self, tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None ) -> PipTestEnvironment: ... @pytest.fixture(scope="session") def script_factory( - virtualenv_factory: Callable[[Path], VirtualEnvironment], deprecated_python: bool + virtualenv_factory: Callable[[Path], VirtualEnvironment], deprecated_python: bool, zipapp: Optional[str] ) -> ScriptFactory: def factory( - tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None, zipapp: Optional[str] = None, + tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None, ) -> PipTestEnvironment: if virtualenv is None: virtualenv = virtualenv_factory(tmpdir.joinpath("venv")) @@ -529,6 +529,29 @@ def factory( return factory +@pytest.fixture(scope="session") +def zipapp(request: pytest.FixtureRequest, tmpdir_factory: pytest.TempPathFactory) -> Optional[str]: + """ + If the user requested for pip to be run from a zipapp, build that zipapp + and return its location. If the user didn't request a zipapp, return None. + + This fixture is session scoped, so the zipapp will only be created once. + """ + if not request.config.getoption("--use-zipapp"): + return None + + temp_location = tmpdir_factory.mktemp("zipapp") + pyz_file = temp_location / "pip.pyz" + # What we want to do here is `pip wheel --wheel-dir temp_location ` + # and then build a zipapp from that wheel. + # TODO: Remove hard coded file + za = "pip-22.2.dev0.pyz" + import warnings + warnings.warn(f"Copying {za} to {pyz_file}") + shutil.copyfile(za, pyz_file) + return str(pyz_file) + + @pytest.fixture def script( request: pytest.FixtureRequest, @@ -542,8 +565,7 @@ def script( test function. The returned object is a ``tests.lib.PipTestEnvironment``. """ - zipapp = request.config.getoption("--use-zipapp") - return script_factory(tmpdir.joinpath("workspace"), virtualenv, zipapp) + return script_factory(tmpdir.joinpath("workspace"), virtualenv) @pytest.fixture(scope="session") From b84e5f3d9976241400e56b83b40a5c4e4c40294c Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 11 Jul 2022 23:52:44 +0100 Subject: [PATCH 008/730] Actually build the zipapp --- tests/conftest.py | 39 ++++++++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 0cb047625dc..aff7390f6e8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -20,6 +20,7 @@ Union, ) from unittest.mock import patch +from zipfile import ZipFile import pytest @@ -32,6 +33,7 @@ from _pytest.config.argparsing import Parser from setuptools.wheel import Wheel +from pip import __file__ as pip_location from pip._internal.cli.main import main as pip_entry_point from pip._internal.locations import _USE_SYSCONFIG from pip._internal.utils.temp_dir import global_tempdir_manager @@ -529,6 +531,35 @@ def factory( return factory +ZIPAPP_MAIN = """\ +#!/usr/bin/env python + +import os +import runpy +import sys + +lib = os.path.join(os.path.dirname(__file__), "lib") +sys.path.insert(0, lib) + +runpy.run_module("pip", run_name="__main__") +""" + +def make_zipapp_from_pip(zipapp_name: Path) -> None: + pip_dir = Path(pip_location).parent + with zipapp_name.open("wb") as zipapp_file: + zipapp_file.write(b"#!/usr/bin/env python\n") + with ZipFile(zipapp_file, "w") as zipapp: + for pip_file in pip_dir.rglob("*"): + if pip_file.suffix == ".pyc": + continue + if pip_file.name == "__pycache__": + continue + rel_name = pip_file.relative_to(pip_dir.parent) + zipapp.write(pip_file, arcname=f"lib/{rel_name}") + zipapp.writestr("__main__.py", ZIPAPP_MAIN) + + + @pytest.fixture(scope="session") def zipapp(request: pytest.FixtureRequest, tmpdir_factory: pytest.TempPathFactory) -> Optional[str]: """ @@ -542,13 +573,7 @@ def zipapp(request: pytest.FixtureRequest, tmpdir_factory: pytest.TempPathFactor temp_location = tmpdir_factory.mktemp("zipapp") pyz_file = temp_location / "pip.pyz" - # What we want to do here is `pip wheel --wheel-dir temp_location ` - # and then build a zipapp from that wheel. - # TODO: Remove hard coded file - za = "pip-22.2.dev0.pyz" - import warnings - warnings.warn(f"Copying {za} to {pyz_file}") - shutil.copyfile(za, pyz_file) + make_zipapp_from_pip(pyz_file) return str(pyz_file) From c7e7e426cb2a53127bae11492590f883db1779f4 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 12 Jul 2022 09:02:11 +0100 Subject: [PATCH 009/730] Apply black --- tests/conftest.py | 13 +++++++++---- tests/lib/__init__.py | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index aff7390f6e8..0523bdc20a3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -502,10 +502,13 @@ def __call__( @pytest.fixture(scope="session") def script_factory( - virtualenv_factory: Callable[[Path], VirtualEnvironment], deprecated_python: bool, zipapp: Optional[str] + virtualenv_factory: Callable[[Path], VirtualEnvironment], + deprecated_python: bool, + zipapp: Optional[str], ) -> ScriptFactory: def factory( - tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None, + tmpdir: Path, + virtualenv: Optional[VirtualEnvironment] = None, ) -> PipTestEnvironment: if virtualenv is None: virtualenv = virtualenv_factory(tmpdir.joinpath("venv")) @@ -544,6 +547,7 @@ def factory( runpy.run_module("pip", run_name="__main__") """ + def make_zipapp_from_pip(zipapp_name: Path) -> None: pip_dir = Path(pip_location).parent with zipapp_name.open("wb") as zipapp_file: @@ -559,9 +563,10 @@ def make_zipapp_from_pip(zipapp_name: Path) -> None: zipapp.writestr("__main__.py", ZIPAPP_MAIN) - @pytest.fixture(scope="session") -def zipapp(request: pytest.FixtureRequest, tmpdir_factory: pytest.TempPathFactory) -> Optional[str]: +def zipapp( + request: pytest.FixtureRequest, tmpdir_factory: pytest.TempPathFactory +) -> Optional[str]: """ If the user requested for pip to be run from a zipapp, build that zipapp and return its location. If the user didn't request a zipapp, return None. diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index a8d74b6d44c..be3e4c36e9a 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -708,7 +708,7 @@ def pip( kwargs["allow_stderr_warning"] = True if self.zipapp: exe = "python" - args = (self.zipapp, ) + args + args = (self.zipapp,) + args elif use_module: exe = "python" args = ("-m", "pip") + args From ea2318fbf9857834b2cc68dac960bc1536875733 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 12 Jul 2022 10:12:17 +0100 Subject: [PATCH 010/730] Minor zipapp-related fixes and skips for some tests --- tests/functional/test_cli.py | 3 +++ tests/functional/test_completion.py | 7 ++++++- tests/lib/test_lib.py | 4 ++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_cli.py b/tests/functional/test_cli.py index 3e8570359bb..a1b69b72106 100644 --- a/tests/functional/test_cli.py +++ b/tests/functional/test_cli.py @@ -16,6 +16,9 @@ ], ) def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None: + if script.zipapp: + pytest.skip("Zipapp does not include entrypoints") + fake_pkg = script.temp_path / "fake_pkg" fake_pkg.mkdir() fake_pkg.joinpath("setup.py").write_text( diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py index df4afab74b8..b02cd4fa317 100644 --- a/tests/functional/test_completion.py +++ b/tests/functional/test_completion.py @@ -107,7 +107,12 @@ def test_completion_for_supported_shells( Test getting completion for bash shell """ result = script_with_launchers.pip("completion", "--" + shell, use_module=False) - assert completion in result.stdout, str(result.stdout) + actual = str(result.stdout) + if script_with_launchers.zipapp: + # The zipapp reports its name as "pip.pyz", but the expected + # output assumes "pip" + actual = actual.replace("pip.pyz", "pip") + assert completion in actual, actual @pytest.fixture(scope="session") diff --git a/tests/lib/test_lib.py b/tests/lib/test_lib.py index 99514d5f92c..ea9baed54d3 100644 --- a/tests/lib/test_lib.py +++ b/tests/lib/test_lib.py @@ -41,6 +41,10 @@ def test_correct_pip_version(script: PipTestEnvironment) -> None: """ Check we are running proper version of pip in run_pip. """ + + if script.zipapp: + pytest.skip("Test relies on the pip under test being in the filesystem") + # output is like: # pip PIPVERSION from PIPDIRECTORY (python PYVERSION) result = script.pip("--version") From f7240d8691ee99cab6a77da13a7e43c717f6eab3 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 12 Jul 2022 10:27:52 +0100 Subject: [PATCH 011/730] Add a news file --- news/11250.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11250.feature.rst diff --git a/news/11250.feature.rst b/news/11250.feature.rst new file mode 100644 index 00000000000..a80c54699c8 --- /dev/null +++ b/news/11250.feature.rst @@ -0,0 +1 @@ +Add an option to run the test suite with pip built as a zipapp. From 81e813ac7948e9b3af7e0f3b3555405652dc7963 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 12 Jul 2022 10:28:34 +0100 Subject: [PATCH 012/730] Add testing with pip built as a zipapp to the CI --- .github/workflows/ci.yml | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e467b3e50b1..439b6fabb52 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -219,6 +219,35 @@ jobs: env: TEMP: "R:\\Temp" + tests-zipapp: + name: tests / zipapp + runs-on: ubuntu-latest + + needs: [pre-commit, packaging, determine-changes] + if: >- + needs.determine-changes.outputs.tests == 'true' || + github.event_name != 'pull_request' + + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.10" + + - name: Install Ubuntu dependencies + run: sudo apt-get install bzr + + - run: pip install nox 'virtualenv<20' 'setuptools != 60.6.0' + + # Main check + - name: Run integration tests + run: >- + nox -s test-3.10 -- + -m integration + --verbose --numprocesses auto --showlocals + --durations=5 + --use-zipapp + # TODO: Remove this when we add Python 3.11 to CI. tests-importlib-metadata: name: tests for importlib.metadata backend From 8f0d16e267f531f6d7055746ba2dd6febbca79a0 Mon Sep 17 00:00:00 2001 From: Kai Mueller Date: Wed, 13 Jul 2022 10:13:27 +0000 Subject: [PATCH 013/730] Clarify difference between pip-wheel and build Closes #11235 --- docs/html/cli/pip_wheel.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/html/cli/pip_wheel.rst b/docs/html/cli/pip_wheel.rst index 153d6925d4e..b78f08f0747 100644 --- a/docs/html/cli/pip_wheel.rst +++ b/docs/html/cli/pip_wheel.rst @@ -30,6 +30,15 @@ Description This is now covered in :doc:`../reference/build-system/index`. +Differences to `build` +---------------------- + +`build `_ is a simple tool which can among other things build +wheels for projects using PEP 517 +``pip wheel`` can do the same but also supports projects not using PEP 517. +In addition, it's e.g. also possible to include the dependencies of a project into the wheel. + + Options ======= From fcda0edff5e86f1f481115774a230245800b65a4 Mon Sep 17 00:00:00 2001 From: Federico Date: Mon, 18 Jul 2022 17:32:52 +0200 Subject: [PATCH 014/730] Suggest disabling pip cache in containers When building containers (like docker or podman) the layer system already handles the caching. Not disabling pip's cache could result in the duplication of the size of the images. I know this sujbject is not specially relevant to the documentation, so the comment is fairly small. Duplication of pip's cache is a recurrent problem in many Python images --- docs/html/topics/caching.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/caching.md b/docs/html/topics/caching.md index 929ac3541df..08e4acba32c 100644 --- a/docs/html/topics/caching.md +++ b/docs/html/topics/caching.md @@ -140,6 +140,6 @@ The {ref}`pip cache` command can be used to manage pip's cache. pip's caching behaviour is disabled by passing the `--no-cache-dir` option. -It is, however, recommended to **NOT** disable pip's caching. Doing so can +It is, however, recommended to **NOT** disable pip's caching (except for building containerized appplications). Doing so can significantly slow down pip (due to repeated operations and package builds) and result in significantly more network usage. From 97abdbc040d878b4962e5bc9fc2d85692a42c87e Mon Sep 17 00:00:00 2001 From: Federico Date: Tue, 19 Jul 2022 10:26:06 +0200 Subject: [PATCH 015/730] Do not suggest caching if higher level caching Co-authored-by: Pradyun Gedam --- docs/html/topics/caching.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/caching.md b/docs/html/topics/caching.md index 08e4acba32c..2ce7d6e0044 100644 --- a/docs/html/topics/caching.md +++ b/docs/html/topics/caching.md @@ -140,6 +140,6 @@ The {ref}`pip cache` command can be used to manage pip's cache. pip's caching behaviour is disabled by passing the `--no-cache-dir` option. -It is, however, recommended to **NOT** disable pip's caching (except for building containerized appplications). Doing so can +It is, however, recommended to **NOT** disable pip's caching unless you have caching at a higher level (eg: layered caches in container builds). Doing so can significantly slow down pip (due to repeated operations and package builds) and result in significantly more network usage. From 3fda91290bb0c6c2e00556812e073f53d6f83608 Mon Sep 17 00:00:00 2001 From: Kai Mueller Date: Tue, 19 Jul 2022 13:37:58 +0000 Subject: [PATCH 016/730] fix --- docs/html/cli/pip_wheel.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docs/html/cli/pip_wheel.rst b/docs/html/cli/pip_wheel.rst index b78f08f0747..82dd970aacb 100644 --- a/docs/html/cli/pip_wheel.rst +++ b/docs/html/cli/pip_wheel.rst @@ -34,10 +34,8 @@ Differences to `build` ---------------------- `build `_ is a simple tool which can among other things build -wheels for projects using PEP 517 -``pip wheel`` can do the same but also supports projects not using PEP 517. -In addition, it's e.g. also possible to include the dependencies of a project into the wheel. - +wheels for projects using PEP 517. It is comparable to the execution of ``pip wheel --no-deps .``. +``pip wheel`` coveres the wheel scope of ``build`` but offers many additional features. Options ======= From 271ed7bb7a0b137c4f5e22ab542847607abcb2be Mon Sep 17 00:00:00 2001 From: Kai Mueller Date: Tue, 19 Jul 2022 14:18:16 +0000 Subject: [PATCH 017/730] fix2 --- docs/html/cli/pip_wheel.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/cli/pip_wheel.rst b/docs/html/cli/pip_wheel.rst index 82dd970aacb..d645f29cd2a 100644 --- a/docs/html/cli/pip_wheel.rst +++ b/docs/html/cli/pip_wheel.rst @@ -35,7 +35,7 @@ Differences to `build` `build `_ is a simple tool which can among other things build wheels for projects using PEP 517. It is comparable to the execution of ``pip wheel --no-deps .``. -``pip wheel`` coveres the wheel scope of ``build`` but offers many additional features. +``pip wheel`` covers the wheel scope of ``build`` but offers many additional features. Options ======= From 5d7a1a68c7feb75136a0fd120de54b85df105bac Mon Sep 17 00:00:00 2001 From: Klaas van Schelven Date: Wed, 20 Jul 2022 15:55:17 +0200 Subject: [PATCH 018/730] Respect --no-index from the requirements file See #11276 SearchScope was extended with an extra parameter to be able to pass-on the value of no_index as we do with the other parameters. This allows us to respect its value regardless of the order in which options are evaluated. --- src/pip/_internal/index/collector.py | 1 + src/pip/_internal/models/search_scope.py | 6 +++++- src/pip/_internal/req/req_file.py | 9 ++++++--- tests/functional/test_build_env.py | 2 +- tests/lib/__init__.py | 6 +++++- tests/unit/resolution_resolvelib/conftest.py | 2 +- tests/unit/test_index.py | 18 +++++++++--------- tests/unit/test_search_scope.py | 2 ++ 8 files changed, 30 insertions(+), 16 deletions(-) diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index 6e5dac5ad3c..0291d54f7cf 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -558,6 +558,7 @@ def create( search_scope = SearchScope.create( find_links=find_links, index_urls=index_urls, + no_index=options.no_index, ) link_collector = LinkCollector( session=session, diff --git a/src/pip/_internal/models/search_scope.py b/src/pip/_internal/models/search_scope.py index e4e54c2f4c6..a64af73899d 100644 --- a/src/pip/_internal/models/search_scope.py +++ b/src/pip/_internal/models/search_scope.py @@ -20,13 +20,14 @@ class SearchScope: Encapsulates the locations that pip is configured to search. """ - __slots__ = ["find_links", "index_urls"] + __slots__ = ["find_links", "index_urls", "no_index"] @classmethod def create( cls, find_links: List[str], index_urls: List[str], + no_index: bool, ) -> "SearchScope": """ Create a SearchScope object after normalizing the `find_links`. @@ -60,15 +61,18 @@ def create( return cls( find_links=built_find_links, index_urls=index_urls, + no_index=no_index, ) def __init__( self, find_links: List[str], index_urls: List[str], + no_index: bool, ) -> None: self.find_links = find_links self.index_urls = index_urls + self.no_index = no_index def get_formatted_locations(self) -> str: lines = [] diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index 4550c72d607..06ea6f277aa 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -229,11 +229,13 @@ def handle_option_line( if finder: find_links = finder.find_links index_urls = finder.index_urls - if opts.index_url: - index_urls = [opts.index_url] + no_index = finder.search_scope.no_index if opts.no_index is True: + no_index = True index_urls = [] - if opts.extra_index_urls: + if opts.index_url and not no_index: + index_urls = [opts.index_url] + if opts.extra_index_urls and not no_index: index_urls.extend(opts.extra_index_urls) if opts.find_links: # FIXME: it would be nice to keep track of the source @@ -253,6 +255,7 @@ def handle_option_line( search_scope = SearchScope( find_links=find_links, index_urls=index_urls, + no_index=no_index, ) finder.search_scope = search_scope diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py index 6936246183c..437adb99570 100644 --- a/tests/functional/test_build_env.py +++ b/tests/functional/test_build_env.py @@ -41,7 +41,7 @@ def run_with_build_env( link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope.create([{scratch!r}], []), + search_scope=SearchScope.create([{scratch!r}], [], False), ) selection_prefs = SelectionPreferences( allow_yanked=True, diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 8774d8bc144..d30deced1b2 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -87,7 +87,11 @@ def make_test_search_scope( if index_urls is None: index_urls = [] - return SearchScope.create(find_links=find_links, index_urls=index_urls) + return SearchScope.create( + find_links=find_links, + index_urls=index_urls, + no_index=False, + ) def make_test_link_collector( diff --git a/tests/unit/resolution_resolvelib/conftest.py b/tests/unit/resolution_resolvelib/conftest.py index 9ef9f8c5c18..a4ee32444e2 100644 --- a/tests/unit/resolution_resolvelib/conftest.py +++ b/tests/unit/resolution_resolvelib/conftest.py @@ -23,7 +23,7 @@ @pytest.fixture def finder(data: TestData) -> Iterator[PackageFinder]: session = PipSession() - scope = SearchScope([str(data.packages)], []) + scope = SearchScope([str(data.packages)], [], False) collector = LinkCollector(session, scope) prefs = SelectionPreferences(allow_yanked=False) finder = PackageFinder.create(collector, prefs) diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index cd3c748b7aa..78837b94e8b 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -593,7 +593,7 @@ def test_create__candidate_prefs( """ link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) selection_prefs = SelectionPreferences( allow_yanked=True, @@ -614,7 +614,7 @@ def test_create__link_collector(self) -> None: """ link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) finder = PackageFinder.create( link_collector=link_collector, @@ -629,7 +629,7 @@ def test_create__target_python(self) -> None: """ link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) target_python = TargetPython(py_version_info=(3, 7, 3)) finder = PackageFinder.create( @@ -649,7 +649,7 @@ def test_create__target_python_none(self) -> None: """ link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) finder = PackageFinder.create( link_collector=link_collector, @@ -668,7 +668,7 @@ def test_create__allow_yanked(self, allow_yanked: bool) -> None: """ link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) selection_prefs = SelectionPreferences(allow_yanked=allow_yanked) finder = PackageFinder.create( @@ -684,7 +684,7 @@ def test_create__ignore_requires_python(self, ignore_requires_python: bool) -> N """ link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) selection_prefs = SelectionPreferences( allow_yanked=True, @@ -702,7 +702,7 @@ def test_create__format_control(self) -> None: """ link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) format_control = FormatControl(set(), {":all:"}) selection_prefs = SelectionPreferences( @@ -743,7 +743,7 @@ def test_make_link_evaluator( link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) finder = PackageFinder( @@ -793,7 +793,7 @@ def test_make_candidate_evaluator( ) link_collector = LinkCollector( session=PipSession(), - search_scope=SearchScope([], []), + search_scope=SearchScope([], [], False), ) finder = PackageFinder( link_collector=link_collector, diff --git a/tests/unit/test_search_scope.py b/tests/unit/test_search_scope.py index ef21c10b820..d8128341659 100644 --- a/tests/unit/test_search_scope.py +++ b/tests/unit/test_search_scope.py @@ -16,6 +16,7 @@ def test_get_formatted_locations_basic_auth(self) -> None: search_scope = SearchScope( find_links=find_links, index_urls=index_urls, + no_index=False, ) result = search_scope.get_formatted_locations() @@ -29,6 +30,7 @@ def test_get_index_urls_locations(self) -> None: search_scope = SearchScope( find_links=[], index_urls=["file://index1/", "file://index2"], + no_index=False, ) req = install_req_from_line("Complex_Name") assert req.name is not None From 58c05735eaa773c6f5f2c344f90185131ee82f5c Mon Sep 17 00:00:00 2001 From: Klaas van Schelven Date: Wed, 20 Jul 2022 16:01:07 +0200 Subject: [PATCH 019/730] Add news entry --- news/11276.bugfix.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 news/11276.bugfix.rst diff --git a/news/11276.bugfix.rst b/news/11276.bugfix.rst new file mode 100644 index 00000000000..af8f518bef4 --- /dev/null +++ b/news/11276.bugfix.rst @@ -0,0 +1,2 @@ +Fix ``--no-index`` when ``--index-url`` or ``--extra-index-url`` is specified +inside a requirements file. From 2ec509728148d4abedb59c764ea1f4d90e02cb05 Mon Sep 17 00:00:00 2001 From: Klaas van Schelven Date: Wed, 27 Jul 2022 11:48:44 +0200 Subject: [PATCH 020/730] Add a test for 'respect --no-index' See #11276 --- tests/unit/test_req_file.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 8928fd1690f..1365e158415 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -395,6 +395,13 @@ def test_set_finder_no_index( line_processor("--no-index", "file", 1, finder=finder) assert finder.index_urls == [] + def test_set_finder_no_index_is_remembered_for_later_invocations( + self, line_processor: LineProcessor, finder: PackageFinder + ) -> None: + line_processor("--no-index", "file", 1, finder=finder) + line_processor("--index-url=url", "file", 1, finder=finder) + assert finder.index_urls == [] + def test_set_finder_index_url( self, line_processor: LineProcessor, finder: PackageFinder, session: PipSession ) -> None: From c88036882242578f464cef159ea3205c350a929a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 27 Jul 2022 19:18:15 +0200 Subject: [PATCH 021/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 2451926bc52..a40148f008f 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "22.2.1" +__version__ = "22.3.dev0" def main(args: Optional[List[str]] = None) -> int: From 278141678e8bac4633a0589db83f13e17ebae6d7 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Thu, 28 Jul 2022 07:36:02 +0300 Subject: [PATCH 022/730] Check if binary_executable exists --- src/pip/_internal/utils/entrypoints.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/utils/entrypoints.py b/src/pip/_internal/utils/entrypoints.py index f292c64045b..15013693854 100644 --- a/src/pip/_internal/utils/entrypoints.py +++ b/src/pip/_internal/utils/entrypoints.py @@ -55,9 +55,14 @@ def get_best_invocation_for_this_pip() -> str: if exe_are_in_PATH: for exe_name in _EXECUTABLE_NAMES: found_executable = shutil.which(exe_name) - if found_executable and os.path.samefile( - found_executable, - os.path.join(binary_prefix, exe_name), + binary_executable = os.path.join(binary_prefix, exe_name) + if ( + found_executable + and os.path.exists(binary_executable) + and os.path.samefile( + found_executable, + binary_executable, + ) ): return exe_name From ee6c7caabdfcd0bddd9b92d05cddd8b6be7cbe10 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 28 Jul 2022 11:30:54 +0100 Subject: [PATCH 023/730] Fix test_runner_work_in_environments_with_no_pip to work under --use-zipapp --- tests/functional/test_pip_runner_script.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_pip_runner_script.py b/tests/functional/test_pip_runner_script.py index 26016d45a08..f2f879b824d 100644 --- a/tests/functional/test_pip_runner_script.py +++ b/tests/functional/test_pip_runner_script.py @@ -12,7 +12,9 @@ def test_runner_work_in_environments_with_no_pip( # Ensure there's no pip installed in the environment script.pip("uninstall", "pip", "--yes", use_module=True) - script.pip("--version", expect_error=True) + # We don't use script.pip to check here, as when testing a + # zipapp, script.pip will run pip from the zipapp. + script.run("python", "-c", "import pip", expect_error=True) # The runner script should still invoke a usable pip result = script.run("python", os.fspath(runner), "--version") From 79cd5998aa617f772e6b905177c93cb1f55634ec Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 28 Jul 2022 12:10:50 +0100 Subject: [PATCH 024/730] Add a --python option --- src/pip/_internal/cli/cmdoptions.py | 8 ++++++++ src/pip/_internal/cli/main_parser.py | 15 +++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 47ed92779e9..84e0e783869 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -189,6 +189,13 @@ class PipOption(Option): ), ) +python: Callable[..., Option] = partial( + Option, + "--python", + dest="python", + help="Run pip with the specified Python interpreter.", +) + verbose: Callable[..., Option] = partial( Option, "-v", @@ -1029,6 +1036,7 @@ def check_list_path_option(options: Values) -> None: debug_mode, isolated_mode, require_virtualenv, + python, verbose, version, quiet, diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index 3666ab04ca6..8a79191c8b2 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -2,9 +2,11 @@ """ import os +import subprocess import sys from typing import List, Tuple +from pip._internal.build_env import _get_runnable_pip from pip._internal.cli import cmdoptions from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter from pip._internal.commands import commands_dict, get_similar_commands @@ -57,6 +59,19 @@ def parse_command(args: List[str]) -> Tuple[str, List[str]]: # args_else: ['install', '--user', 'INITools'] general_options, args_else = parser.parse_args(args) + # --python + if general_options.python: + if "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + pip_cmd = [ + general_options.python, + _get_runnable_pip(), + ] + pip_cmd.extend(args) + # Block recursing indefinitely + os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1" + proc = subprocess.run(pip_cmd) + sys.exit(proc.returncode) + # --version if general_options.version: sys.stdout.write(parser.version) From 0f8243ff5e81d8f905422613ea9c0f45b120b84d Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Thu, 28 Jul 2022 14:23:38 +0300 Subject: [PATCH 025/730] Add news --- news/11309.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11309.bugfix.rst diff --git a/news/11309.bugfix.rst b/news/11309.bugfix.rst new file mode 100644 index 00000000000..f59d2516eee --- /dev/null +++ b/news/11309.bugfix.rst @@ -0,0 +1 @@ +Ensure that a binary executable of pip exists when checking for a new version of pip. From 95cf55bf185b41ce029b5349a8a8f016d6c5e8a5 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 28 Jul 2022 14:31:10 +0100 Subject: [PATCH 026/730] Add a news file --- news/11320.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11320.feature.rst diff --git a/news/11320.feature.rst b/news/11320.feature.rst new file mode 100644 index 00000000000..028f16c2bcf --- /dev/null +++ b/news/11320.feature.rst @@ -0,0 +1 @@ +Add a ``--python`` option to specify the Python environment to be managed by pip. From 42eae5033e33aace218186f8b76b731771268bbd Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 28 Jul 2022 15:04:35 +0100 Subject: [PATCH 027/730] More flexible handling of the --python argument --- src/pip/_internal/cli/main_parser.py | 72 +++++++++++++++++++++++----- 1 file changed, 60 insertions(+), 12 deletions(-) diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index 8a79191c8b2..967d568e22c 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -2,15 +2,17 @@ """ import os +import shutil import subprocess import sys -from typing import List, Tuple +from typing import List, Optional, Tuple from pip._internal.build_env import _get_runnable_pip from pip._internal.cli import cmdoptions from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter from pip._internal.commands import commands_dict, get_similar_commands from pip._internal.exceptions import CommandError +from pip._internal.utils.compat import WINDOWS from pip._internal.utils.misc import get_pip_version, get_prog __all__ = ["create_main_parser", "parse_command"] @@ -47,6 +49,44 @@ def create_main_parser() -> ConfigOptionParser: return parser +def identify_python_interpreter(python: str) -> Optional[str]: + if python == "python" or python == "py": + # Run the active Python. + # We have to be very careful here, because: + # + # 1. On Unix, "python" is probably correct but there is a "py" launcher. + # 2. On Windows, "py" is the best option if it's present. + # 3. On Windows without "py", "python" might work, but it might also + # be the shim that launches the Windows store to allow you to install + # Python. + # + # We go with getting py on Windows, and if it's not present or we're + # on Unix, get python. We don't worry about the launcher on Unix or + # the installer stub on Windows. + py = None + if WINDOWS: + py = shutil.which("py") + if py is None: + py = shutil.which("python") + if py: + return py + + # TODO: On Windows, `--python .venv/Scripts/python` won't pass the + # exists() check (no .exe extension supplied). But it's pretty + # obvious what the user intends. Should we allow this? + if os.path.exists(python): + if not os.path.isdir(python): + return python + # Might be a virtual environment + for exe in ("bin/python", "Scripts/python.exe"): + py = os.path.join(python, exe) + if os.path.exists(py): + return py + + # Could not find the interpreter specified + return None + + def parse_command(args: List[str]) -> Tuple[str, List[str]]: parser = create_main_parser() @@ -60,17 +100,25 @@ def parse_command(args: List[str]) -> Tuple[str, List[str]]: general_options, args_else = parser.parse_args(args) # --python - if general_options.python: - if "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: - pip_cmd = [ - general_options.python, - _get_runnable_pip(), - ] - pip_cmd.extend(args) - # Block recursing indefinitely - os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1" - proc = subprocess.run(pip_cmd) - sys.exit(proc.returncode) + if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + # Re-invoke pip using the specified Python interpreter + interpreter = identify_python_interpreter(general_options.python) + if interpreter is None: + raise CommandError( + f"Could not locate Python interpreter {general_options.python}" + ) + + pip_cmd = [ + interpreter, + _get_runnable_pip(), + ] + pip_cmd.extend(args) + + # Set a flag so the child doesn't re-invoke itself, causing + # an infinite loop. + os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1" + proc = subprocess.run(pip_cmd) + sys.exit(proc.returncode) # --version if general_options.version: From 78e7ea88e98a66a5e0d8dd6574ad3323e13c1a8e Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 29 Jul 2022 09:37:29 +0100 Subject: [PATCH 028/730] Make get_runnable_pip public --- src/pip/_internal/build_env.py | 4 ++-- src/pip/_internal/cli/main_parser.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index 6d4f6a56eb7..6213eedd14a 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -39,7 +39,7 @@ def __init__(self, path: str) -> None: self.lib_dirs = get_prefixed_libs(path) -def _get_runnable_pip() -> str: +def get_runnable_pip() -> str: """Get a file to pass to a Python executable, to run the currently-running pip. This is used to run a pip subprocess, for installing requirements into the build @@ -194,7 +194,7 @@ def install_requirements( if not requirements: return self._install_requirements( - _get_runnable_pip(), + get_runnable_pip(), finder, requirements, prefix, diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index 967d568e22c..61dc42a1298 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -7,7 +7,7 @@ import sys from typing import List, Optional, Tuple -from pip._internal.build_env import _get_runnable_pip +from pip._internal.build_env import get_runnable_pip from pip._internal.cli import cmdoptions from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter from pip._internal.commands import commands_dict, get_similar_commands @@ -110,7 +110,7 @@ def parse_command(args: List[str]) -> Tuple[str, List[str]]: pip_cmd = [ interpreter, - _get_runnable_pip(), + get_runnable_pip(), ] pip_cmd.extend(args) From 24c22a3e5d0ad30dcb6fabf68047185496bd21d8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 29 Jul 2022 09:44:14 +0100 Subject: [PATCH 029/730] Check the argument to --python is executable --- src/pip/_internal/cli/main_parser.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index 61dc42a1298..6502c567794 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -71,17 +71,20 @@ def identify_python_interpreter(python: str) -> Optional[str]: if py: return py - # TODO: On Windows, `--python .venv/Scripts/python` won't pass the - # exists() check (no .exe extension supplied). But it's pretty - # obvious what the user intends. Should we allow this? + # If the named file exists, and is executable, use it. + # If it's a directory, assume it's a virtual environment and + # look for the environment's Python executable. if os.path.exists(python): - if not os.path.isdir(python): + # Do the directory check first because directories can be executable + if os.path.isdir(python): + # bin/python for Unix, Scripts/python.exe for Windows + # Try both in case of odd cases like cygwin. + for exe in ("bin/python", "Scripts/python.exe"): + py = os.path.join(python, exe) + if os.path.exists(py): + return py + elif os.access(python, os.X_OK): return python - # Might be a virtual environment - for exe in ("bin/python", "Scripts/python.exe"): - py = os.path.join(python, exe) - if os.path.exists(py): - return py # Could not find the interpreter specified return None From 3ebcc7122c4867b8f62b6ace6bbd34c12aaa4ec8 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 29 Jul 2022 12:53:18 +0100 Subject: [PATCH 030/730] Add note to divert away from adding content to user guide This guide is being broken up, and multiple folks have now tried to add content to it instead of adding dedicated pages for it. This note should help direct contributors away from adding more content on this page, to stop the bleeding and avoid regressing on the amount of content we'll have to move out of this page later. --- docs/html/user_guide.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 70a28ab9988..4fbbd9eae00 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -2,6 +2,15 @@ User Guide ========== +.. Hello there! + + If you're thinking of adding content to this page... please take a moment + to consider if this content can live on its own, within a topic guide or a + reference page. + + There is active effort being put toward *reducing* the amount of content on + this specific page (https://github.com/pypa/pip/issues/9475) and moving it + into more focused single-page documents that cover that specific topic. Running pip =========== From dc1ea04e9210a5c7c3a9d9a89fa4e9a57b581aee Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 29 Jul 2022 12:39:40 +0100 Subject: [PATCH 031/730] Add a dedicated topic page for HTTPS certificates This makes further progress on moving content into dedeicated topic pages, away from dumping it into `pip install`'s documentation or as a part of the user guide. --- docs/html/cli/pip_install.rst | 14 +---- docs/html/topics/https-certificates.md | 71 +++++++++++++++++++++++ docs/html/topics/index.md | 1 + docs/html/user_guide.rst | 78 +------------------------- 4 files changed, 78 insertions(+), 86 deletions(-) create mode 100644 docs/html/topics/https-certificates.md diff --git a/docs/html/cli/pip_install.rst b/docs/html/cli/pip_install.rst index 384d393fdd7..7c17c264a30 100644 --- a/docs/html/cli/pip_install.rst +++ b/docs/html/cli/pip_install.rst @@ -219,18 +219,10 @@ details) is selected. See the :ref:`pip install Examples`. +.. _`0-ssl certificate verification`: +.. rubric:: SSL Certificate Verification -.. _`SSL Certificate Verification`: - -SSL Certificate Verification ----------------------------- - -Starting with v1.3, pip provides SSL certificate verification over HTTP, to -prevent man-in-the-middle attacks against PyPI downloads. This does not use -the system certificate store but instead uses a bundled CA certificate -store. The default bundled CA certificate store certificate store may be -overridden by using ``--cert`` option or by using ``PIP_CERT``, -``REQUESTS_CA_BUNDLE``, or ``CURL_CA_BUNDLE`` environment variables. +This is now covered in :doc:`../topics/https-certificates`. .. _`0-caching`: .. rubric:: Caching diff --git a/docs/html/topics/https-certificates.md b/docs/html/topics/https-certificates.md new file mode 100644 index 00000000000..b42c463e6cc --- /dev/null +++ b/docs/html/topics/https-certificates.md @@ -0,0 +1,71 @@ +(SSL Certificate Verification)= + +# HTTPS Certificates + +```{versionadded} 1.3 + +``` + +By default, pip will perform SSL certificate verification for network +connections it makes over HTTPS. These serve to prevent man-in-the-middle +attacks against package downloads. This does not use the system certificate +store but, instead, uses a bundled CA certificate store from {pypi}`certifi`. + +## Using a specific certificate store + +The `--cert` option (and the corresponding `PIP_CERT` environment variable) +allow users to specify a different certificate store/bundle for pip to use. It +is also possible to use `REQUESTS_CA_BUNDLE` or `CURL_CA_BUNDLE` environment +variables. + +## Using system certificate stores + +```{versionadded} 22.2 +Experimental support, behind `--use-feature=truststore`. +``` + +It is possible to use the system trust store, instead of the bundled certifi +certificates for verifying HTTPS certificates. This approach will typically +support corporate proxy certificates without additional configuration. + +In order to use system trust stores, you need to: + +- Use Python 3.10 or newer. +- Install the {pypi}`truststore` package, in the Python environment you're + running pip in. + + This is typically done by installing this package using a system package + manager or by using pip in {ref}`Hash-checking mode` for this package and + trusting the network using the `--trusted-host` flag. + + ```{pip-cli} + $ python -m pip install truststore + [...] + $ python -m pip install SomePackage --use-feature=truststore + [...] + Successfully installed SomePackage + ``` + +### When to use + +You should try using system trust stores when there is a custom certificate +chain configured for your system that pip isn't aware of. Typically, this +situation will manifest with an `SSLCertVerificationError` with the message +"certificate verify failed: unable to get local issuer certificate": + +```{pip-cli} +$ pip install -U SomePackage +[...] + SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: unable to get local issuer certificate (\_ssl.c:997)'))) - skipping +``` + +This error means that OpenSSL wasn't able to find a trust anchor to verify the +chain against. Using system trust stores instead of certifi will likely solve +this issue. + +If you encounter a TLS/SSL error when using the `truststore` feature you should +open an issue on the [truststore GitHub issue tracker] instead of pip's issue +tracker. The maintainers of truststore will help diagnose and fix the issue. + +[truststore github issue tracker]: + https://github.com/sethmlarson/truststore/issues diff --git a/docs/html/topics/index.md b/docs/html/topics/index.md index 011205a111d..eb2b5f54d5b 100644 --- a/docs/html/topics/index.md +++ b/docs/html/topics/index.md @@ -14,6 +14,7 @@ authentication caching configuration dependency-resolution +https-certificates local-project-installs repeatable-installs secure-installs diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 70a28ab9988..b31ca4f608a 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -1138,79 +1138,7 @@ announcements on the `low-traffic packaging announcements list`_ and .. _the official Python blog: https://blog.python.org/ .. _Python Windows launcher: https://docs.python.org/3/using/windows.html#launcher -Using system trust stores for verifying HTTPS -============================================= +.. _`0-using-system-trust-stores-for-verifying-https`: +.. rubric:: Using system trust stores for verifying HTTPS -pip 22.2 added **experimental** support for using system trust stores to verify HTTPS certificates -instead of certifi. Using system trust stores has advantages over certifi like automatically supporting -corporate proxy certificates without additional configuration. - -In order to use system trust stores you must be using Python 3.10+ and install the package `truststore`_ from PyPI. - -.. tab:: Unix/macOS - - .. code-block:: console - - # Requires Python 3.10 or later - $ python --version - Python 3.10.4 - - # Install the 'truststore' package from PyPI - $ python -m pip install truststore - [...] - - # Use '--use-feature=truststore' flag to enable - $ python -m pip install SomePackage --use-feature=truststore - [...] - Successfully installed SomePackage - -.. tab:: Windows - - .. code-block:: console - - # Requires Python 3.10 or later - C:\> py --version - Python 3.10.4 - - # Install the 'truststore' package from PyPI - C:\> py -m pip install truststore - [...] - - # Use '--use-feature=truststore' flag to enable - C:\> py -m pip install SomePackage --use-feature=truststore - [...] - Successfully installed SomePackage - -When to use system trust stores -------------------------------- - -You should try using system trust stores when there is a custom certificate chain configured for your -system that pip isn't aware of. Typically this situation will manifest with an ``SSLCertVerificationError`` -with the message "certificate verify failed: unable to get local issuer certificate": - -.. code-block:: console - - $ python -m pip install -U SomePackage - - [...] - - Could not fetch URL https://pypi.org/simple/SomePackage/: - There was a problem confirming the ssl certificate: - - [...] - - (Caused by SSLError(SSLCertVerificationError(1, '[SSL: CERTIFICATE_VERIFY_FAILED] - certificate verify failed: unable to get local issuer certificate (_ssl.c:997)'))) - skipping - -This error means that OpenSSL wasn't able to find a trust anchor to verify the chain against. -Using system trust stores instead of certifi will likely solve this issue. - -Follow up ---------- - -If you encounter a TLS/SSL error when using the ``truststore`` feature you should open an issue -on the `truststore GitHub issue tracker`_ instead of pip's issue tracker. The maintainers of truststore -will help diagnose and fix the issue. - -.. _truststore: https://truststore.readthedocs.io -.. _truststore GitHub issue tracker: https://github.com/sethmlarson/truststore/issues +This is now covered in :doc:`topics/https-certificates`. From 89983e9ad923826217cf06f30f7c52f36c1b6069 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 8 Dec 2021 18:44:11 +0000 Subject: [PATCH 032/730] Use `shell=True` for opening the editor with `pip config edit` This makes the behavior compatible with git and other tools that invoke the editor in this manner. --- news/10716.feature.rst | 1 + src/pip/_internal/commands/configuration.py | 8 +++++++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 news/10716.feature.rst diff --git a/news/10716.feature.rst b/news/10716.feature.rst new file mode 100644 index 00000000000..ef09e1b8f58 --- /dev/null +++ b/news/10716.feature.rst @@ -0,0 +1 @@ +Use ``shell=True`` for opening the editor with ``pip config edit``. diff --git a/src/pip/_internal/commands/configuration.py b/src/pip/_internal/commands/configuration.py index e3837325986..84b134e490b 100644 --- a/src/pip/_internal/commands/configuration.py +++ b/src/pip/_internal/commands/configuration.py @@ -228,9 +228,15 @@ def open_in_editor(self, options: Values, args: List[str]) -> None: fname = self.configuration.get_file_to_edit() if fname is None: raise PipError("Could not determine appropriate file.") + elif '"' in fname: + # This shouldn't happen, unless we see a username like that. + # If that happens, we'd appreciate a pull request fixing this. + raise PipError( + f'Can not open an editor for a file name containing "\n{fname}' + ) try: - subprocess.check_call([editor, fname]) + subprocess.check_call(f'{editor} "{fname}"', shell=True) except FileNotFoundError as e: if not e.filename: e.filename = editor From 50eb337a0f04960c33a9eb8da20877aa718f7ff6 Mon Sep 17 00:00:00 2001 From: Brett Rosen Date: Fri, 29 Jul 2022 16:53:32 -0400 Subject: [PATCH 033/730] Ensure that removing shim in older setuptools does not error --- news/11314.bugfix.rst | 1 + src/pip/_internal/locations/_distutils.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 news/11314.bugfix.rst diff --git a/news/11314.bugfix.rst b/news/11314.bugfix.rst new file mode 100644 index 00000000000..02d78dc47ff --- /dev/null +++ b/news/11314.bugfix.rst @@ -0,0 +1 @@ +Avoid ``AttributeError`` when removing the setuptools-provided ``_distutils_hack`` and it is missing its implementation. diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index 1b8b42606d7..fbcb04f488f 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -11,7 +11,7 @@ # rationale for why this is done within pip. try: __import__("_distutils_hack").remove_shim() -except ImportError: +except (ImportError, AttributeError): pass import logging From db4751595867db1d938df7183a60dfb15fa0d708 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 29 Jul 2022 22:35:47 +0200 Subject: [PATCH 034/730] Import distutils only if needed, but sooner --- news/11319.bugfix.rst | 1 + src/pip/_internal/locations/__init__.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 8 deletions(-) create mode 100644 news/11319.bugfix.rst diff --git a/news/11319.bugfix.rst b/news/11319.bugfix.rst new file mode 100644 index 00000000000..31cd2a34b0b --- /dev/null +++ b/news/11319.bugfix.rst @@ -0,0 +1 @@ +Fix import error when reinstalling pip in user site. diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 3f6f0a58e16..60afe0a73b8 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -60,6 +60,12 @@ def _should_use_sysconfig() -> bool: _USE_SYSCONFIG = _should_use_sysconfig() +if not _USE_SYSCONFIG: + # Import distutils lazily to avoid deprecation warnings, + # but import it soon enough that it is in memory and available during + # a pip reinstall. + from . import _distutils + # Be noisy about incompatibilities if this platforms "should" be using # sysconfig, but is explicitly opting out and using distutils instead. if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG: @@ -241,8 +247,6 @@ def get_scheme( if _USE_SYSCONFIG: return new - from . import _distutils - old = _distutils.get_scheme( dist_name, user=user, @@ -407,8 +411,6 @@ def get_bin_prefix() -> str: if _USE_SYSCONFIG: return new - from . import _distutils - old = _distutils.get_bin_prefix() if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"): _log_context() @@ -442,8 +444,6 @@ def get_purelib() -> str: if _USE_SYSCONFIG: return new - from . import _distutils - old = _distutils.get_purelib() if _looks_like_deb_system_dist_packages(old): return old @@ -488,8 +488,6 @@ def get_prefixed_libs(prefix: str) -> List[str]: if _USE_SYSCONFIG: return _deduplicated(new_pure, new_plat) - from . import _distutils - old_pure, old_plat = _distutils.get_prefixed_libs(prefix) old_lib_paths = _deduplicated(old_pure, old_plat) From 01e122ed4125673c77bfd2aced75e70f6dfa2a7c Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 29 Jul 2022 10:42:34 +0100 Subject: [PATCH 035/730] Add tests --- tests/functional/test_python_option.py | 33 ++++++++++++++++++++++++++ tests/unit/test_cmdoptions.py | 31 ++++++++++++++++++++++++ 2 files changed, 64 insertions(+) create mode 100644 tests/functional/test_python_option.py diff --git a/tests/functional/test_python_option.py b/tests/functional/test_python_option.py new file mode 100644 index 00000000000..4fafde2a8b2 --- /dev/null +++ b/tests/functional/test_python_option.py @@ -0,0 +1,33 @@ +import json +import os +from pathlib import Path +from venv import EnvBuilder + +from tests.lib import PipTestEnvironment, TestData + + +def test_python_interpreter( + script: PipTestEnvironment, + tmpdir: Path, + shared_data: TestData, +) -> None: + env_path = os.fsdecode(tmpdir / "venv") + env = EnvBuilder(with_pip=False) + env.create(env_path) + + result = script.pip("--python", env_path, "list", "--format=json") + assert json.loads(result.stdout) == [] + script.pip( + "--python", + env_path, + "install", + "-f", + shared_data.find_links, + "--no-index", + "simplewheel==1.0", + ) + result = script.pip("--python", env_path, "list", "--format=json") + assert json.loads(result.stdout) == [{"name": "simplewheel", "version": "1.0"}] + script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") + result = script.pip("--python", env_path, "list", "--format=json") + assert json.loads(result.stdout) == [] diff --git a/tests/unit/test_cmdoptions.py b/tests/unit/test_cmdoptions.py index 1e5ef995cd0..d5b4813822f 100644 --- a/tests/unit/test_cmdoptions.py +++ b/tests/unit/test_cmdoptions.py @@ -1,8 +1,12 @@ +import os +from pathlib import Path from typing import Optional, Tuple +from venv import EnvBuilder import pytest from pip._internal.cli.cmdoptions import _convert_python_version +from pip._internal.cli.main_parser import identify_python_interpreter @pytest.mark.parametrize( @@ -29,3 +33,30 @@ def test_convert_python_version( ) -> None: actual = _convert_python_version(value) assert actual == expected, f"actual: {actual!r}" + + +def test_identify_python_interpreter_py(monkeypatch: pytest.MonkeyPatch) -> None: + def which(cmd: str) -> str: + assert cmd == "py" or cmd == "python" + return "dummy_value" + + monkeypatch.setattr("shutil.which", which) + assert identify_python_interpreter("py") == "dummy_value" + assert identify_python_interpreter("python") == "dummy_value" + + +def test_identify_python_interpreter_venv(tmpdir: Path) -> None: + env_path = tmpdir / "venv" + env = EnvBuilder(with_pip=False) + env.create(env_path) + + # Passing a virtual environment returns the Python executable + interp = identify_python_interpreter(os.fsdecode(env_path)) + assert interp is not None + assert Path(interp).exists() + + # Passing an executable returns it + assert identify_python_interpreter(interp) == interp + + # Passing a non-existent file returns None + assert identify_python_interpreter(str(tmpdir / "nonexistent")) is None From b1eb91204e0673a61d9a3203a49675be3307abd2 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 29 Jul 2022 10:57:00 +0100 Subject: [PATCH 036/730] Added documentation --- docs/html/user_guide.rst | 43 ++++++++++++++++++++++++++++++++++++++++ news/11320.feature.rst | 3 ++- 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 70a28ab9988..1f1a8660627 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -782,6 +782,49 @@ This is now covered in :doc:`../topics/repeatable-installs`. This is now covered in :doc:`../topics/dependency-resolution`. +.. _`Managing a different Python interpreter`: + +Managing a different Python interpreter +======================================= + +Occasionally, you may want to use pip to manage a Python installation other than +the one pip is installed into. In this case, you can use the ``--python`` option +to specify the interpreter you want to manage. This option can take one of three +values: + +#. The path to a Python executable. +#. The path to a virtual environment. +#. Either "py" or "python", referring to the currently active Python interpreter. + +In all 3 cases, pip will run exactly as if it had been invoked from that Python +environment. + +One example of where this might be useful is to manage a virtual environment +that does not have pip installed. + +.. tab:: Unix/macOS + + .. code-block:: console + + $ python -m venv .venv --without-pip + $ python -m pip --python .venv install SomePackage + [...] + Successfully installed SomePackage + +.. tab:: Windows + + .. code-block:: console + + C:\> py -m venv .venv --without-pip + C:\> py -m pip --python .venv install SomePackage + [...] + Successfully installed SomePackage + +You could also use ``--python .venv/bin/python`` (or on Windows, +``--python .venv\Scripts\python.exe``) if you wanted to be explicit, but the +virtual environment name is shorter and works exactly the same. + + .. _`Using pip from your program`: Using pip from your program diff --git a/news/11320.feature.rst b/news/11320.feature.rst index 028f16c2bcf..843eac7c9f4 100644 --- a/news/11320.feature.rst +++ b/news/11320.feature.rst @@ -1 +1,2 @@ -Add a ``--python`` option to specify the Python environment to be managed by pip. +Add a ``--python`` option to allow pip to manage Python environments other +than the one pip is installed in. From f86a140b124bef3c4a6beeaa5d6fff1a4cd62a18 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 29 Jul 2022 16:59:31 +0100 Subject: [PATCH 037/730] Move docs to a topic --- docs/html/topics/index.md | 1 + docs/html/topics/python-option.md | 38 +++++++++++++++++++++++++++ docs/html/user_guide.rst | 43 ------------------------------- 3 files changed, 39 insertions(+), 43 deletions(-) create mode 100644 docs/html/topics/python-option.md diff --git a/docs/html/topics/index.md b/docs/html/topics/index.md index 011205a111d..c5e4d36c95f 100644 --- a/docs/html/topics/index.md +++ b/docs/html/topics/index.md @@ -18,4 +18,5 @@ local-project-installs repeatable-installs secure-installs vcs-support +python-option ``` diff --git a/docs/html/topics/python-option.md b/docs/html/topics/python-option.md new file mode 100644 index 00000000000..242784dfbe8 --- /dev/null +++ b/docs/html/topics/python-option.md @@ -0,0 +1,38 @@ +# Managing a different Python interpreter + + +Occasionally, you may want to use pip to manage a Python installation other than +the one pip is installed into. In this case, you can use the `--python` option +to specify the interpreter you want to manage. This option can take one of three +values: + +1. The path to a Python executable. +2. The path to a virtual environment. +3. Either "py" or "python", referring to the currently active Python interpreter. + +In all 3 cases, pip will run exactly as if it had been invoked from that Python +environment. + +One example of where this might be useful is to manage a virtual environment +that does not have pip installed. + +````{tab} Unix/macOS +```{code-block} console +$ python -m venv .venv --without-pip +$ python -m pip --python .venv install SomePackage +[...] +Successfully installed SomePackage +``` +```` +````{tab} Windows +```{code-block} console +C:\> py -m venv .venv --without-pip +C:\> py -m pip --python .venv install SomePackage +[...] +Successfully installed SomePackage +``` +```` + +You could also use `--python .venv/bin/python` (or on Windows, +`--python .venv\Scripts\python.exe`) if you wanted to be explicit, but the +virtual environment name is shorter and works exactly the same. diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 1f1a8660627..70a28ab9988 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -782,49 +782,6 @@ This is now covered in :doc:`../topics/repeatable-installs`. This is now covered in :doc:`../topics/dependency-resolution`. -.. _`Managing a different Python interpreter`: - -Managing a different Python interpreter -======================================= - -Occasionally, you may want to use pip to manage a Python installation other than -the one pip is installed into. In this case, you can use the ``--python`` option -to specify the interpreter you want to manage. This option can take one of three -values: - -#. The path to a Python executable. -#. The path to a virtual environment. -#. Either "py" or "python", referring to the currently active Python interpreter. - -In all 3 cases, pip will run exactly as if it had been invoked from that Python -environment. - -One example of where this might be useful is to manage a virtual environment -that does not have pip installed. - -.. tab:: Unix/macOS - - .. code-block:: console - - $ python -m venv .venv --without-pip - $ python -m pip --python .venv install SomePackage - [...] - Successfully installed SomePackage - -.. tab:: Windows - - .. code-block:: console - - C:\> py -m venv .venv --without-pip - C:\> py -m pip --python .venv install SomePackage - [...] - Successfully installed SomePackage - -You could also use ``--python .venv/bin/python`` (or on Windows, -``--python .venv\Scripts\python.exe``) if you wanted to be explicit, but the -virtual environment name is shorter and works exactly the same. - - .. _`Using pip from your program`: Using pip from your program From b5afdd604831f985427880537d37eb7a35addaa1 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 29 Jul 2022 12:54:13 +0100 Subject: [PATCH 038/730] Fix test to cater for packages leaked into venv --- tests/functional/test_python_option.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/tests/functional/test_python_option.py b/tests/functional/test_python_option.py index 4fafde2a8b2..8bf16d7a56b 100644 --- a/tests/functional/test_python_option.py +++ b/tests/functional/test_python_option.py @@ -11,12 +11,17 @@ def test_python_interpreter( tmpdir: Path, shared_data: TestData, ) -> None: - env_path = os.fsdecode(tmpdir / "venv") + env_path = os.fspath(tmpdir / "venv") env = EnvBuilder(with_pip=False) env.create(env_path) result = script.pip("--python", env_path, "list", "--format=json") - assert json.loads(result.stdout) == [] + before = json.loads(result.stdout) + + # Ideally we would assert that before==[], but there's a problem in CI + # that means this isn't true. See https://github.com/pypa/pip/pull/11326 + # for details. + script.pip( "--python", env_path, @@ -26,8 +31,11 @@ def test_python_interpreter( "--no-index", "simplewheel==1.0", ) + result = script.pip("--python", env_path, "list", "--format=json") - assert json.loads(result.stdout) == [{"name": "simplewheel", "version": "1.0"}] + installed = json.loads(result.stdout) + assert {"name": "simplewheel", "version": "1.0"} in installed + script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") - assert json.loads(result.stdout) == [] + assert json.loads(result.stdout) == before From 61249ed9ee1811ef2693195e21432ebba738574a Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 30 Jul 2022 16:55:48 +0100 Subject: [PATCH 039/730] Update docs/html/topics/python-option.md Co-authored-by: Pradyun Gedam --- docs/html/topics/python-option.md | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/docs/html/topics/python-option.md b/docs/html/topics/python-option.md index 242784dfbe8..877f78b3041 100644 --- a/docs/html/topics/python-option.md +++ b/docs/html/topics/python-option.md @@ -16,22 +16,12 @@ environment. One example of where this might be useful is to manage a virtual environment that does not have pip installed. -````{tab} Unix/macOS -```{code-block} console +```{pip-cli} $ python -m venv .venv --without-pip -$ python -m pip --python .venv install SomePackage +$ pip --python .venv install SomePackage [...] Successfully installed SomePackage ``` -```` -````{tab} Windows -```{code-block} console -C:\> py -m venv .venv --without-pip -C:\> py -m pip --python .venv install SomePackage -[...] -Successfully installed SomePackage -``` -```` You could also use `--python .venv/bin/python` (or on Windows, `--python .venv\Scripts\python.exe`) if you wanted to be explicit, but the From d0b5a8f75dbac35896a394ab27fff5378ee23baf Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 30 Jul 2022 16:56:09 +0100 Subject: [PATCH 040/730] Update docs/html/topics/python-option.md Co-authored-by: Pradyun Gedam --- docs/html/topics/python-option.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/html/topics/python-option.md b/docs/html/topics/python-option.md index 877f78b3041..435d8ed6774 100644 --- a/docs/html/topics/python-option.md +++ b/docs/html/topics/python-option.md @@ -1,5 +1,7 @@ # Managing a different Python interpreter +```{versionadded} 22.3 +``` Occasionally, you may want to use pip to manage a Python installation other than the one pip is installed into. In this case, you can use the `--python` option From 7cc6445a7a3d456d4ed41ba514c11a02faddf0c9 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 30 Jul 2022 19:09:42 +0100 Subject: [PATCH 041/730] Add a clarifying note to the pip download docs --- docs/html/cli/pip_download.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/html/cli/pip_download.rst b/docs/html/cli/pip_download.rst index 4f15314d765..f1fe1769ee7 100644 --- a/docs/html/cli/pip_download.rst +++ b/docs/html/cli/pip_download.rst @@ -43,7 +43,9 @@ match the constraint of the current interpreter (but not your target one), it is recommended to specify all of these options if you are specifying one of them. Generic dependencies (e.g. universal wheels, or dependencies with no platform, abi, or implementation constraints) will still match an over- -constrained download requirement. +constrained download requirement. If some of your dependencies are not +available as binaries, you can build them manually for your target platform +and let pip download know where to find them using ``--find-links``. From b6be01aee8be13844fa054a5f8c56fa062cc2c21 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 31 Jul 2022 11:50:29 +0100 Subject: [PATCH 042/730] Catch errors from running the subprocess --- src/pip/_internal/cli/main_parser.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index 6502c567794..06a61305d05 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -120,8 +120,13 @@ def parse_command(args: List[str]) -> Tuple[str, List[str]]: # Set a flag so the child doesn't re-invoke itself, causing # an infinite loop. os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1" - proc = subprocess.run(pip_cmd) - sys.exit(proc.returncode) + returncode = 0 + try: + proc = subprocess.run(pip_cmd) + returncode = proc.returncode + except (subprocess.SubprocessError, OSError) as exc: + raise CommandError(f"Failed to run pip under {interpreter}: {exc}") + sys.exit(returncode) # --version if general_options.version: From 333389133a9fc5f07280a1025a466b137386b18a Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 31 Jul 2022 12:12:49 +0100 Subject: [PATCH 043/730] Check python version in __pip-runner__.py --- setup.py | 2 ++ src/pip/__pip-runner__.py | 16 ++++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/setup.py b/setup.py index 9b7fdeb1134..2179d34d2bf 100644 --- a/setup.py +++ b/setup.py @@ -81,5 +81,7 @@ def get_version(rel_path: str) -> str: ], }, zip_safe=False, + # NOTE: python_requires is duplicated in __pip-runner__.py. + # When changing this value, please change the other copy as well. python_requires=">=3.7", ) diff --git a/src/pip/__pip-runner__.py b/src/pip/__pip-runner__.py index 280e99f2f08..28e4399b054 100644 --- a/src/pip/__pip-runner__.py +++ b/src/pip/__pip-runner__.py @@ -12,6 +12,8 @@ from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) +# Copied from setup.py +PYTHON_REQUIRES = ">=3.7" class PipImportRedirectingFinder: @@ -30,8 +32,22 @@ def find_spec( return spec +def check_python_version() -> None: + # Import here to ensure the imports happen after the sys.meta_path change. + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.packaging.version import Version + + py_ver = Version("{0.major}.{0.minor}.{0.micro}".format(sys.version_info)) + if py_ver not in SpecifierSet(PYTHON_REQUIRES): + raise SystemExit( + f"This version of pip does not support python {py_ver} " + f"(requires {PYTHON_REQUIRES})" + ) + + # TODO https://github.com/pypa/pip/issues/11294 sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" +check_python_version() runpy.run_module("pip", run_name="__main__", alter_sys=True) From 4dc35b7399cee3668caf31ba200d3dcfc2bd7579 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 31 Jul 2022 16:01:57 +0100 Subject: [PATCH 044/730] Skip the executable check, as subprocess.run will catch this --- src/pip/_internal/cli/main_parser.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index 06a61305d05..548174d8dfe 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -71,11 +71,10 @@ def identify_python_interpreter(python: str) -> Optional[str]: if py: return py - # If the named file exists, and is executable, use it. + # If the named file exists, use it. # If it's a directory, assume it's a virtual environment and # look for the environment's Python executable. if os.path.exists(python): - # Do the directory check first because directories can be executable if os.path.isdir(python): # bin/python for Unix, Scripts/python.exe for Windows # Try both in case of odd cases like cygwin. @@ -83,7 +82,7 @@ def identify_python_interpreter(python: str) -> Optional[str]: py = os.path.join(python, exe) if os.path.exists(py): return py - elif os.access(python, os.X_OK): + else: return python # Could not find the interpreter specified From 0f559adabb704ca5b09e1984965aa34233c5022a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 31 Jul 2022 17:21:19 +0200 Subject: [PATCH 045/730] Remove TODO --- src/pip/__pip-runner__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pip/__pip-runner__.py b/src/pip/__pip-runner__.py index 280e99f2f08..14026c0d131 100644 --- a/src/pip/__pip-runner__.py +++ b/src/pip/__pip-runner__.py @@ -30,7 +30,6 @@ def find_spec( return spec -# TODO https://github.com/pypa/pip/issues/11294 sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" From d5317f27785d9c623e3f5c74af16f1fb73d6fd50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 31 Jul 2022 17:33:22 +0200 Subject: [PATCH 046/730] Revert "PipDeprecationWarning subclass DeprecationWarning" This reverts commit f1bc96a4a336e2b8889269aec046ac4044e4b46c. --- news/11330.bugfix.rst | 1 + src/pip/_internal/utils/deprecation.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 news/11330.bugfix.rst diff --git a/news/11330.bugfix.rst b/news/11330.bugfix.rst new file mode 100644 index 00000000000..e03501fe5ef --- /dev/null +++ b/news/11330.bugfix.rst @@ -0,0 +1 @@ +Show pip deprecation warnings by default. diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 7964095ffde..72bd6f25a55 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -13,7 +13,7 @@ DEPRECATION_MSG_PREFIX = "DEPRECATION: " -class PipDeprecationWarning(DeprecationWarning): +class PipDeprecationWarning(Warning): pass From ebe491a82a13e6610697b22be00db363fb5ff5e3 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 1 Aug 2022 10:49:32 +0100 Subject: [PATCH 047/730] Get rid of the --python python/py shortcuts --- src/pip/_internal/cli/main_parser.py | 23 ----------------------- tests/unit/test_cmdoptions.py | 12 +----------- 2 files changed, 1 insertion(+), 34 deletions(-) diff --git a/src/pip/_internal/cli/main_parser.py b/src/pip/_internal/cli/main_parser.py index 548174d8dfe..5ade356b9c2 100644 --- a/src/pip/_internal/cli/main_parser.py +++ b/src/pip/_internal/cli/main_parser.py @@ -2,7 +2,6 @@ """ import os -import shutil import subprocess import sys from typing import List, Optional, Tuple @@ -12,7 +11,6 @@ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter from pip._internal.commands import commands_dict, get_similar_commands from pip._internal.exceptions import CommandError -from pip._internal.utils.compat import WINDOWS from pip._internal.utils.misc import get_pip_version, get_prog __all__ = ["create_main_parser", "parse_command"] @@ -50,27 +48,6 @@ def create_main_parser() -> ConfigOptionParser: def identify_python_interpreter(python: str) -> Optional[str]: - if python == "python" or python == "py": - # Run the active Python. - # We have to be very careful here, because: - # - # 1. On Unix, "python" is probably correct but there is a "py" launcher. - # 2. On Windows, "py" is the best option if it's present. - # 3. On Windows without "py", "python" might work, but it might also - # be the shim that launches the Windows store to allow you to install - # Python. - # - # We go with getting py on Windows, and if it's not present or we're - # on Unix, get python. We don't worry about the launcher on Unix or - # the installer stub on Windows. - py = None - if WINDOWS: - py = shutil.which("py") - if py is None: - py = shutil.which("python") - if py: - return py - # If the named file exists, use it. # If it's a directory, assume it's a virtual environment and # look for the environment's Python executable. diff --git a/tests/unit/test_cmdoptions.py b/tests/unit/test_cmdoptions.py index d5b4813822f..8c33ca8c18d 100644 --- a/tests/unit/test_cmdoptions.py +++ b/tests/unit/test_cmdoptions.py @@ -35,23 +35,13 @@ def test_convert_python_version( assert actual == expected, f"actual: {actual!r}" -def test_identify_python_interpreter_py(monkeypatch: pytest.MonkeyPatch) -> None: - def which(cmd: str) -> str: - assert cmd == "py" or cmd == "python" - return "dummy_value" - - monkeypatch.setattr("shutil.which", which) - assert identify_python_interpreter("py") == "dummy_value" - assert identify_python_interpreter("python") == "dummy_value" - - def test_identify_python_interpreter_venv(tmpdir: Path) -> None: env_path = tmpdir / "venv" env = EnvBuilder(with_pip=False) env.create(env_path) # Passing a virtual environment returns the Python executable - interp = identify_python_interpreter(os.fsdecode(env_path)) + interp = identify_python_interpreter(os.fspath(env_path)) assert interp is not None assert Path(interp).exists() From 6354192e2ef4ab19db5ba324dfd1ef4e2c840e07 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 1 Aug 2022 14:28:59 +0300 Subject: [PATCH 048/730] Fix news --- news/11309.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/11309.bugfix.rst b/news/11309.bugfix.rst index f59d2516eee..9ee54057da4 100644 --- a/news/11309.bugfix.rst +++ b/news/11309.bugfix.rst @@ -1 +1 @@ -Ensure that a binary executable of pip exists when checking for a new version of pip. +Ensure that the candidate ``pip`` executable exists, when checking for a new version of pip. From 9c22ee1ef11e216299a610fe3c3e01765ace9097 Mon Sep 17 00:00:00 2001 From: Godefroid Chapelle Date: Tue, 2 Aug 2022 12:08:33 +0200 Subject: [PATCH 049/730] Do not parse JSON content with HTML parser --- src/pip/_internal/index/collector.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index 6e5dac5ad3c..bc41737d972 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -345,6 +345,7 @@ def parse_links(page: "IndexContent") -> Iterable[Link]: yanked_reason=yanked_reason, hashes=file.get("hashes", {}), ) + return parser = HTMLLinkParser(page.url) encoding = page.encoding or "utf-8" From bb2894d7377b91664897b1fe11dbb79c9f546136 Mon Sep 17 00:00:00 2001 From: Philippe Ombredanne Date: Tue, 2 Aug 2022 16:14:01 +0200 Subject: [PATCH 050/730] Fix minor docstring typo Signed-off-by: Philippe Ombredanne --- src/pip/_internal/network/lazy_wheel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/network/lazy_wheel.py b/src/pip/_internal/network/lazy_wheel.py index 2d1ddaa8981..854a6fa1fdc 100644 --- a/src/pip/_internal/network/lazy_wheel.py +++ b/src/pip/_internal/network/lazy_wheel.py @@ -23,7 +23,7 @@ class HTTPRangeRequestUnsupported(Exception): def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution: """Return a distribution object from the given wheel URL. - This uses HTTP range requests to only fetch the potion of the wheel + This uses HTTP range requests to only fetch the portion of the wheel containing metadata, just enough for the object to be constructed. If such requests are not supported, HTTPRangeRequestUnsupported is raised. From 5befbe3b1a2077f9c29dcb762a45204a1a0cd484 Mon Sep 17 00:00:00 2001 From: Godefroid Chapelle Date: Tue, 2 Aug 2022 16:59:48 +0200 Subject: [PATCH 051/730] fix forgotten rename --- src/pip/_internal/index/collector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index bc41737d972..c8e120b519e 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -448,7 +448,7 @@ def _get_index_content( ) -> Optional["IndexContent"]: if session is None: raise TypeError( - "_get_html_page() missing 1 required keyword argument: 'session'" + "_get_index_content() missing 1 required keyword argument: 'session'" ) url = link.url.split("#", 1)[0] From c69ea02bff38d7d6c5cda41a344b2d6a71ad2c74 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 3 Aug 2022 10:12:29 +0100 Subject: [PATCH 052/730] Add a version check to __pip-runner__.py --- setup.py | 2 ++ src/pip/__pip-runner__.py | 36 +++++++++++++++++++++++++----------- 2 files changed, 27 insertions(+), 11 deletions(-) diff --git a/setup.py b/setup.py index 9b7fdeb1134..2179d34d2bf 100644 --- a/setup.py +++ b/setup.py @@ -81,5 +81,7 @@ def get_version(rel_path: str) -> str: ], }, zip_safe=False, + # NOTE: python_requires is duplicated in __pip-runner__.py. + # When changing this value, please change the other copy as well. python_requires=">=3.7", ) diff --git a/src/pip/__pip-runner__.py b/src/pip/__pip-runner__.py index 14026c0d131..49a148a097e 100644 --- a/src/pip/__pip-runner__.py +++ b/src/pip/__pip-runner__.py @@ -4,24 +4,38 @@ an import statement. """ -import runpy +# /!\ This version compatibility check section must be Python 2 compatible. /!\ + import sys -import types -from importlib.machinery import ModuleSpec, PathFinder -from os.path import dirname -from typing import Optional, Sequence, Union + +# Copied from setup.py +PYTHON_REQUIRES = (3, 7) + + +def version_str(version): # type: ignore + return ".".join(str(v) for v in version) + + +if sys.version_info[:2] < PYTHON_REQUIRES: + raise SystemExit( + "This version of pip does not support python {} (requires >={}).".format( + version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES) + ) + ) + +# From here on, we can use Python 3 features, but the syntax must remain +# Python 2 compatible. + +import runpy # noqa: E402 +from importlib.machinery import PathFinder # noqa: E402 +from os.path import dirname # noqa: E402 PIP_SOURCES_ROOT = dirname(dirname(__file__)) class PipImportRedirectingFinder: @classmethod - def find_spec( - self, - fullname: str, - path: Optional[Sequence[Union[bytes, str]]] = None, - target: Optional[types.ModuleType] = None, - ) -> Optional[ModuleSpec]: + def find_spec(self, fullname, path=None, target=None): # type: ignore if fullname != "pip": return None From b8aa21b5759c55bf53f69c185b6193a19e82cd20 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 3 Aug 2022 10:05:25 +0100 Subject: [PATCH 053/730] Revert __pip-runner__.py changes --- setup.py | 2 -- src/pip/__pip-runner__.py | 16 ---------------- 2 files changed, 18 deletions(-) diff --git a/setup.py b/setup.py index 2179d34d2bf..9b7fdeb1134 100644 --- a/setup.py +++ b/setup.py @@ -81,7 +81,5 @@ def get_version(rel_path: str) -> str: ], }, zip_safe=False, - # NOTE: python_requires is duplicated in __pip-runner__.py. - # When changing this value, please change the other copy as well. python_requires=">=3.7", ) diff --git a/src/pip/__pip-runner__.py b/src/pip/__pip-runner__.py index 41d7fe00474..14026c0d131 100644 --- a/src/pip/__pip-runner__.py +++ b/src/pip/__pip-runner__.py @@ -12,8 +12,6 @@ from typing import Optional, Sequence, Union PIP_SOURCES_ROOT = dirname(dirname(__file__)) -# Copied from setup.py -PYTHON_REQUIRES = ">=3.7" class PipImportRedirectingFinder: @@ -32,21 +30,7 @@ def find_spec( return spec -def check_python_version() -> None: - # Import here to ensure the imports happen after the sys.meta_path change. - from pip._vendor.packaging.specifiers import SpecifierSet - from pip._vendor.packaging.version import Version - - py_ver = Version("{0.major}.{0.minor}.{0.micro}".format(sys.version_info)) - if py_ver not in SpecifierSet(PYTHON_REQUIRES): - raise SystemExit( - f"This version of pip does not support python {py_ver} " - f"(requires {PYTHON_REQUIRES})" - ) - - sys.meta_path.insert(0, PipImportRedirectingFinder()) assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module" -check_python_version() runpy.run_module("pip", run_name="__main__", alter_sys=True) From ef4fc3c516d2b0709328a893b8c1d840923d6914 Mon Sep 17 00:00:00 2001 From: kasium <15907922+kasium@users.noreply.github.com> Date: Wed, 3 Aug 2022 11:39:59 +0200 Subject: [PATCH 054/730] Update docs/html/cli/pip_wheel.rst Co-authored-by: Tzu-ping Chung --- docs/html/cli/pip_wheel.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/html/cli/pip_wheel.rst b/docs/html/cli/pip_wheel.rst index d645f29cd2a..e93fee2e620 100644 --- a/docs/html/cli/pip_wheel.rst +++ b/docs/html/cli/pip_wheel.rst @@ -30,8 +30,8 @@ Description This is now covered in :doc:`../reference/build-system/index`. -Differences to `build` ----------------------- +Differences to ``build`` +------------------------ `build `_ is a simple tool which can among other things build wheels for projects using PEP 517. It is comparable to the execution of ``pip wheel --no-deps .``. From 905fa3f076d020239903730223b6eb057121c2f2 Mon Sep 17 00:00:00 2001 From: Kai Mueller Date: Wed, 3 Aug 2022 09:53:13 +0000 Subject: [PATCH 055/730] Comments --- docs/html/cli/pip_wheel.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/html/cli/pip_wheel.rst b/docs/html/cli/pip_wheel.rst index e93fee2e620..bfd19a0ccb1 100644 --- a/docs/html/cli/pip_wheel.rst +++ b/docs/html/cli/pip_wheel.rst @@ -35,6 +35,7 @@ Differences to ``build`` `build `_ is a simple tool which can among other things build wheels for projects using PEP 517. It is comparable to the execution of ``pip wheel --no-deps .``. +It can also build source distributions which is not possible with ``pip``. ``pip wheel`` covers the wheel scope of ``build`` but offers many additional features. Options From 5a770bd2d657a35f50152896a3b3691f9af5f56c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 3 Aug 2022 19:57:04 +0200 Subject: [PATCH 056/730] Handle a type error statically in collector --- src/pip/_internal/index/collector.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index c8e120b519e..fed018ac31d 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -443,14 +443,7 @@ def _make_index_content( ) -def _get_index_content( - link: Link, session: Optional[PipSession] = None -) -> Optional["IndexContent"]: - if session is None: - raise TypeError( - "_get_index_content() missing 1 required keyword argument: 'session'" - ) - +def _get_index_content(link: Link, session: PipSession) -> Optional["IndexContent"]: url = link.url.split("#", 1)[0] # Check for VCS schemes that do not support lookup as web pages. From 1294804204cda0ba3dabcc991d248685424d8e6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 3 Aug 2022 19:58:30 +0200 Subject: [PATCH 057/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 3d4b45a0c99..a40148f008f 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "22.2.2" +__version__ = "22.3.dev0" def main(args: Optional[List[str]] = None) -> int: From 2009007cafcc3e8ddec399db2d222e6b63d1c36e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 3 Aug 2022 20:44:11 +0200 Subject: [PATCH 058/730] Remove 22.2.2 news files --- news/11314.bugfix.rst | 1 - news/11319.bugfix.rst | 1 - news/11330.bugfix.rst | 1 - 3 files changed, 3 deletions(-) delete mode 100644 news/11314.bugfix.rst delete mode 100644 news/11319.bugfix.rst delete mode 100644 news/11330.bugfix.rst diff --git a/news/11314.bugfix.rst b/news/11314.bugfix.rst deleted file mode 100644 index 02d78dc47ff..00000000000 --- a/news/11314.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid ``AttributeError`` when removing the setuptools-provided ``_distutils_hack`` and it is missing its implementation. diff --git a/news/11319.bugfix.rst b/news/11319.bugfix.rst deleted file mode 100644 index 31cd2a34b0b..00000000000 --- a/news/11319.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix import error when reinstalling pip in user site. diff --git a/news/11330.bugfix.rst b/news/11330.bugfix.rst deleted file mode 100644 index e03501fe5ef..00000000000 --- a/news/11330.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Show pip deprecation warnings by default. From 9b638ec6dcf3b28c8c57b0e08056ee19177f52fd Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 3 Aug 2022 20:25:40 +0100 Subject: [PATCH 059/730] Update docs to match behaviour --- docs/html/topics/python-option.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/html/topics/python-option.md b/docs/html/topics/python-option.md index 435d8ed6774..5ad46e7af9c 100644 --- a/docs/html/topics/python-option.md +++ b/docs/html/topics/python-option.md @@ -5,14 +5,13 @@ Occasionally, you may want to use pip to manage a Python installation other than the one pip is installed into. In this case, you can use the `--python` option -to specify the interpreter you want to manage. This option can take one of three +to specify the interpreter you want to manage. This option can take one of two values: 1. The path to a Python executable. 2. The path to a virtual environment. -3. Either "py" or "python", referring to the currently active Python interpreter. -In all 3 cases, pip will run exactly as if it had been invoked from that Python +In both cases, pip will run exactly as if it had been invoked from that Python environment. One example of where this might be useful is to manage a virtual environment From de49b52ec2522b78a93e5a2f77e59707dca6b22f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Thu, 4 Aug 2022 09:04:03 +0200 Subject: [PATCH 060/730] _get_index_content's session arg must be a kw arg Co-authored-by: Pradyun Gedam --- src/pip/_internal/index/collector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index fed018ac31d..3b96749db96 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -443,7 +443,7 @@ def _make_index_content( ) -def _get_index_content(link: Link, session: PipSession) -> Optional["IndexContent"]: +def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]: url = link.url.split("#", 1)[0] # Check for VCS schemes that do not support lookup as web pages. From b423c07ff49a8fb4089421d16c203549ba010b76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 29 Aug 2021 16:56:18 +0200 Subject: [PATCH 061/730] Detected indented ERROR and WARNING messages in tests --- tests/lib/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 79b240eeb24..c753768c939 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -447,6 +447,7 @@ def _check_stderr( lines = stderr.splitlines() for line in lines: + line = line.lstrip() # First check for logging errors, which we don't allow during # tests even if allow_stderr_error=True (since a logging error # would signal a bug in pip's code). From 58d8dc28cb841317453b7ce62680d7cf15761866 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 2 Aug 2022 12:19:13 +0200 Subject: [PATCH 062/730] Do not fail tests on our own deprecation warnings --- tests/lib/__init__.py | 3 +-- tests/lib/test_lib.py | 2 -- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index c753768c939..1dfaea7e0f2 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -39,7 +39,6 @@ from pip._internal.models.selection_prefs import SelectionPreferences from pip._internal.models.target_python import TargetPython from pip._internal.network.session import PipSession -from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX from tests.lib.venv import VirtualEnvironment from tests.lib.wheel import make_wheel @@ -474,7 +473,7 @@ def _check_stderr( if allow_stderr_warning: continue - if line.startswith("WARNING: ") or line.startswith(DEPRECATION_MSG_PREFIX): + if line.startswith("WARNING: "): reason = ( "stderr has an unexpected warning " "(pass allow_stderr_warning=True to permit this)" diff --git a/tests/lib/test_lib.py b/tests/lib/test_lib.py index ea9baed54d3..a541a0a204d 100644 --- a/tests/lib/test_lib.py +++ b/tests/lib/test_lib.py @@ -150,7 +150,6 @@ def test_run__allow_stderr_warning(self, script: PipTestEnvironment) -> None: @pytest.mark.parametrize( "prefix", ( - "DEPRECATION", "WARNING", "ERROR", ), @@ -167,7 +166,6 @@ def test_run__allow_stderr_error( @pytest.mark.parametrize( "prefix, expected_start", ( - ("DEPRECATION", "stderr has an unexpected warning"), ("WARNING", "stderr has an unexpected warning"), ("ERROR", "stderr has an unexpected error"), ), From 1800635e4c12f677cb063576c472f335d22662d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 2 Aug 2022 14:35:32 +0200 Subject: [PATCH 063/730] Fix tests with indented errors and warning --- tests/functional/test_download.py | 20 ++++++++++++++++-- tests/functional/test_install.py | 27 +++++++++++++++++++----- tests/functional/test_install_vcs_git.py | 22 ++++++++++++++----- tests/functional/test_uninstall.py | 2 +- 4 files changed, 58 insertions(+), 13 deletions(-) diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 93718ca42fe..89318b74553 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -1116,9 +1116,17 @@ def test_download_file_url_existing_bad_download( simple_pkg_bytes = simple_pkg.read_bytes() url = f"{simple_pkg.as_uri()}#sha256={sha256(simple_pkg_bytes).hexdigest()}" - shared_script.pip("download", "-d", str(download_dir), url) + result = shared_script.pip( + "download", + "-d", + str(download_dir), + url, + allow_stderr_warning=True, # bad hash + ) assert simple_pkg_bytes == downloaded_path.read_bytes() + assert "WARNING: Previously-downloaded file" in result.stderr + assert "has bad hash. Re-downloading." in result.stderr def test_download_http_url_bad_hash( @@ -1144,9 +1152,17 @@ def test_download_http_url_bad_hash( base_address = f"http://{mock_server.host}:{mock_server.port}" url = f"{base_address}/simple-1.0.tar.gz#sha256={digest}" - shared_script.pip("download", "-d", str(download_dir), url) + result = shared_script.pip( + "download", + "-d", + str(download_dir), + url, + allow_stderr_warning=True, # bad hash + ) assert simple_pkg_bytes == downloaded_path.read_bytes() + assert "WARNING: Previously-downloaded file" in result.stderr + assert "has bad hash. Re-downloading." in result.stderr mock_server.stop() requests = mock_server.get_requests() diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index e74477fe299..bc9ca9eaf96 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -183,7 +183,16 @@ def test_pep518_with_user_pip( non-isolated environment, and break pip in the system site-packages, so that isolated uses of pip will fail. """ - script.pip("install", "--ignore-installed", "-f", common_wheels, "--user", pip_src) + script.pip( + "install", + "--ignore-installed", + "-f", + common_wheels, + "--user", + pip_src, + # WARNING: The scripts pip, pip3, ... are installed in ... which is not on PATH + allow_stderr_warning=True, + ) system_pip_dir = script.site_packages_path / "pip" assert not system_pip_dir.exists() system_pip_dir.mkdir() @@ -1542,13 +1551,16 @@ def test_install_topological_sort(script: PipTestEnvironment, data: TestData) -> @pytest.mark.usefixtures("with_wheel") def test_install_wheel_broken(script: PipTestEnvironment) -> None: - res = script.pip_install_local("wheelbroken", expect_stderr=True) + res = script.pip_install_local("wheelbroken", allow_stderr_error=True) + assert "ERROR: Failed building wheel for wheelbroken" in res.stderr + # Fallback to setup.py install (https://github.com/pypa/pip/issues/8368) assert "Successfully installed wheelbroken-0.1" in str(res), str(res) @pytest.mark.usefixtures("with_wheel") def test_cleanup_after_failed_wheel(script: PipTestEnvironment) -> None: - res = script.pip_install_local("wheelbrokenafter", expect_stderr=True) + res = script.pip_install_local("wheelbrokenafter", allow_stderr_error=True) + assert "ERROR: Failed building wheel for wheelbrokenafter" in res.stderr # One of the effects of not cleaning up is broken scripts: script_py = script.bin_path / "script.py" assert script_py.exists(), script_py @@ -1577,7 +1589,12 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No # vcs coverage. to_install = data.packages.joinpath("requires_wheelbroken_upper") res = script.pip( - "install", "--no-index", "-f", data.find_links, to_install, expect_stderr=True + "install", + "--no-index", + "-f", + data.find_links, + to_install, + allow_stderr_error=True, # error building wheelbroken ) expected = ( "Successfully installed requires-wheelbroken-upper-0" @@ -1620,7 +1637,7 @@ def test_install_no_binary_disables_building_wheels( "-f", data.find_links, to_install, - expect_stderr=True, + allow_stderr_error=True, # error building wheelbroken ) expected = ( "Successfully installed requires-wheelbroken-upper-0" diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 4cdf71551b8..2171d3162b3 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -94,7 +94,7 @@ def _install_version_pkg_only( script: PipTestEnvironment, path: Path, rev: Optional[str] = None, - expect_stderr: bool = False, + allow_stderr_warning: bool = False, ) -> None: """ Install the version_pkg package in editable mode (without returning @@ -106,14 +106,16 @@ def _install_version_pkg_only( rev: an optional revision to install like a branch name or tag. """ version_pkg_url = _make_version_pkg_url(path, rev=rev) - script.pip("install", "-e", version_pkg_url, expect_stderr=expect_stderr) + script.pip( + "install", "-e", version_pkg_url, allow_stderr_warning=allow_stderr_warning + ) def _install_version_pkg( script: PipTestEnvironment, path: Path, rev: Optional[str] = None, - expect_stderr: bool = False, + allow_stderr_warning: bool = False, ) -> str: """ Install the version_pkg package in editable mode, and return the version @@ -128,7 +130,7 @@ def _install_version_pkg( script, path, rev=rev, - expect_stderr=expect_stderr, + allow_stderr_warning=allow_stderr_warning, ) result = script.run("version_pkg") version = result.stdout.strip() @@ -227,7 +229,13 @@ def test_git_with_short_sha1_revisions(script: PipTestEnvironment) -> None: "HEAD~1", cwd=version_pkg_path, ).stdout.strip()[:7] - version = _install_version_pkg(script, version_pkg_path, rev=sha1) + version = _install_version_pkg( + script, + version_pkg_path, + rev=sha1, + # WARNING: Did not find branch or tag ..., assuming revision or ref. + allow_stderr_warning=True, + ) assert "0.1" == version @@ -273,6 +281,8 @@ def test_git_install_ref(script: PipTestEnvironment) -> None: script, version_pkg_path, rev="refs/foo/bar", + # WARNING: Did not find branch or tag ..., assuming revision or ref. + allow_stderr_warning=True, ) assert "0.1" == version @@ -294,6 +304,8 @@ def test_git_install_then_install_ref(script: PipTestEnvironment) -> None: script, version_pkg_path, rev="refs/foo/bar", + # WARNING: Did not find branch or tag ..., assuming revision or ref. + allow_stderr_warning=True, ) assert "0.1" == version diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index 2933a5ef99c..b0e12f6af59 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -684,7 +684,7 @@ def test_uninstall_editable_and_pip_install_easy_install_remove( os.remove(pip_test_fspkg_pth) # Uninstall will fail with given warning - uninstall = script.pip("uninstall", "FSPkg", "-y") + uninstall = script.pip("uninstall", "FSPkg", "-y", allow_stderr_warning=True) assert "Cannot remove entries from nonexistent file" in uninstall.stderr assert ( From 6817fbfb1fd389ad61009f0199db5670b146c8d3 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sat, 6 Aug 2022 06:18:59 +0800 Subject: [PATCH 064/730] Skip dist if metadata does not have a valid name --- news/11352.bugfix.rst | 2 ++ src/pip/_internal/metadata/importlib/_compat.py | 14 +++++++++++++- src/pip/_internal/metadata/importlib/_envs.py | 14 +++++++++++--- 3 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 news/11352.bugfix.rst diff --git a/news/11352.bugfix.rst b/news/11352.bugfix.rst new file mode 100644 index 00000000000..78016c912ef --- /dev/null +++ b/news/11352.bugfix.rst @@ -0,0 +1,2 @@ +Ignore distributions with invalid ``Name`` in metadata instead of crashing, when +using the ``importlib.metadata`` backend. diff --git a/src/pip/_internal/metadata/importlib/_compat.py b/src/pip/_internal/metadata/importlib/_compat.py index e0879807ab9..593bff23ede 100644 --- a/src/pip/_internal/metadata/importlib/_compat.py +++ b/src/pip/_internal/metadata/importlib/_compat.py @@ -2,6 +2,15 @@ from typing import Any, Optional, Protocol, cast +class BadMetadata(ValueError): + def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None: + self.dist = dist + self.reason = reason + + def __str__(self) -> str: + return f"Bad metadata in {self.dist} ({self.reason})" + + class BasePath(Protocol): """A protocol that various path objects conform. @@ -40,4 +49,7 @@ def get_dist_name(dist: importlib.metadata.Distribution) -> str: The ``name`` attribute is only available in Python 3.10 or later. We are targeting exactly that, but Mypy does not know this. """ - return cast(Any, dist).name + name = cast(Any, dist).name + if not isinstance(name, str): + raise BadMetadata(dist, reason="invalid metadata entry 'name'") + return name diff --git a/src/pip/_internal/metadata/importlib/_envs.py b/src/pip/_internal/metadata/importlib/_envs.py index d5fcfdbfef2..cbec59e2c6d 100644 --- a/src/pip/_internal/metadata/importlib/_envs.py +++ b/src/pip/_internal/metadata/importlib/_envs.py @@ -1,5 +1,6 @@ import functools import importlib.metadata +import logging import os import pathlib import sys @@ -14,9 +15,11 @@ from pip._internal.utils.deprecation import deprecated from pip._internal.utils.filetypes import WHEEL_EXTENSION -from ._compat import BasePath, get_dist_name, get_info_location +from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location from ._dists import Distribution +logger = logging.getLogger(__name__) + def _looks_like_wheel(location: str) -> bool: if not location.endswith(WHEEL_EXTENSION): @@ -56,11 +59,16 @@ def _find_impl(self, location: str) -> Iterator[FoundResult]: # To know exactly where we find a distribution, we have to feed in the # paths one by one, instead of dumping the list to importlib.metadata. for dist in importlib.metadata.distributions(path=[location]): - normalized_name = canonicalize_name(get_dist_name(dist)) + info_location = get_info_location(dist) + try: + raw_name = get_dist_name(dist) + except BadMetadata as e: + logger.warning("Skipping %s due to %s", info_location, e.reason) + continue + normalized_name = canonicalize_name(raw_name) if normalized_name in self._found_names: continue self._found_names.add(normalized_name) - info_location = get_info_location(dist) yield dist, info_location def find(self, location: str) -> Iterator[BaseDistribution]: From 27878a52af77c9e677ffaf63d64959514d1442e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 6 Aug 2022 19:26:58 +0200 Subject: [PATCH 065/730] Refactor legacy_install_reason --- src/pip/_internal/commands/install.py | 3 +- src/pip/_internal/req/req_install.py | 22 +++++--------- src/pip/_internal/utils/deprecation.py | 41 ++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 15 deletions(-) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 29907645c81..65d6362d77c 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -29,6 +29,7 @@ from pip._internal.req import install_given_reqs from pip._internal.req.req_install import InstallRequirement from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import LegacyInstallReasonFailedBdistWheel from pip._internal.utils.distutils_args import parse_distutils_args from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.logging import getLogger @@ -440,7 +441,7 @@ def run(self, options: Values, args: List[str]) -> int: # those. for r in build_failures: if not r.use_pep517: - r.legacy_install_reason = 8368 + r.legacy_install_reason = LegacyInstallReasonFailedBdistWheel to_install = resolver.get_installation_order(requirement_set) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index a1e376c893a..2b9cd992e7f 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -42,7 +42,7 @@ from pip._internal.operations.install.wheel import install_wheel from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path from pip._internal.req.req_uninstall import UninstallPathSet -from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.deprecation import LegacyInstallReason, deprecated from pip._internal.utils.direct_url_helpers import ( direct_url_for_editable, direct_url_from_link, @@ -96,7 +96,7 @@ def __init__( self.constraint = constraint self.editable = editable self.permit_editable_wheels = permit_editable_wheels - self.legacy_install_reason: Optional[int] = None + self.legacy_install_reason: Optional[LegacyInstallReason] = None # source_dir is the local directory where the linked requirement is # located, or unpacked. In case unpacking is needed, creating and @@ -836,18 +836,12 @@ def install( self.install_succeeded = success - if success and self.legacy_install_reason == 8368: - deprecated( - reason=( - "{} was installed using the legacy 'setup.py install' " - "method, because a wheel could not be built for it.".format( - self.name - ) - ), - replacement="to fix the wheel build issue reported above", - gone_in=None, - issue=8368, - ) + if ( + success + and self.legacy_install_reason is not None + and self.legacy_install_reason.emit_after_success + ): + self.legacy_install_reason.emit_deprecation(self.name) def check_invalid_constraint_type(req: InstallRequirement) -> str: diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 72bd6f25a55..a15c52197b9 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -118,3 +118,44 @@ def deprecated( raise PipDeprecationWarning(message) warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) + + +class LegacyInstallReason: + def __init__( + self, + reason: str, + replacement: Optional[str], + gone_in: Optional[str], + feature_flag: Optional[str] = None, + issue: Optional[int] = None, + emit_after_success: bool = False, + emit_before_install: bool = False, + ): + self._reason = reason + self._replacement = replacement + self._gone_in = gone_in + self._feature_flag = feature_flag + self._issue = issue + self.emit_after_success = emit_after_success + self.emit_before_install = emit_before_install + + def emit_deprecation(self, name: str) -> None: + deprecated( + reason=self._reason.format(name=name), + replacement=self._replacement, + gone_in=self._gone_in, + feature_flag=self._feature_flag, + issue=self._issue, + ) + + +LegacyInstallReasonFailedBdistWheel = LegacyInstallReason( + reason=( + "{name} was installed using the legacy 'setup.py install' " + "method, because a wheel could not be built for it." + ), + replacement="to fix the wheel build issue reported above", + gone_in=None, + issue=8368, + emit_after_success=True, +) From 0326b33a6df086c0d83e46ac0856dd1c7b6fe51e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 1 Aug 2022 17:06:55 +0200 Subject: [PATCH 066/730] Add missing with_wheel fixture --- tests/functional/test_install_index.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/test_install_index.py b/tests/functional/test_install_index.py index 71c0b3e6c60..c1f0ecbd7c6 100644 --- a/tests/functional/test_install_index.py +++ b/tests/functional/test_install_index.py @@ -23,6 +23,7 @@ def test_find_links_relative_path(script: PipTestEnvironment, data: TestData) -> result.did_create(initools_folder) +@pytest.mark.usefixtures("with_wheel") def test_find_links_no_doctype(script: PipTestEnvironment, data: TestData) -> None: shutil.copy(data.packages / "simple-1.0.tar.gz", script.scratch_path) html = script.scratch_path.joinpath("index.html") From afe136c42b3c374567acdfbe097da0376557421a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 2 Aug 2022 12:04:00 +0200 Subject: [PATCH 067/730] Add test for issue 8559 deprecation --- tests/functional/test_install.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index e74477fe299..740cd2f97a6 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -12,6 +12,7 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.models.index import PyPI, TestPyPI +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX from pip._internal.utils.misc import rmtree from tests.conftest import CertFactory from tests.lib import ( @@ -2286,3 +2287,32 @@ def test_install_dry_run(script: PipTestEnvironment, data: TestData) -> None: ) assert "Would install simple-3.0" in result.stdout assert "Successfully installed" not in result.stdout + + +def test_install_8559_missing_wheel_package( + script: PipTestEnvironment, shared_data: TestData +) -> None: + result = script.pip( + "install", + "--find-links", + shared_data.find_links, + "simple", + allow_stderr_warning=True, + ) + assert DEPRECATION_MSG_PREFIX in result.stderr + assert "'wheel' package is not installed" in result.stderr + assert "using the legacy 'setup.py install' method" in result.stderr + + +@pytest.mark.usefixtures("with_wheel") +def test_install_8559_wheel_package_present( + script: PipTestEnvironment, shared_data: TestData +) -> None: + result = script.pip( + "install", + "--find-links", + shared_data.find_links, + "simple", + allow_stderr_warning=False, + ) + assert DEPRECATION_MSG_PREFIX not in result.stderr From ae802e3e66e8a921141872fcd7b0cba9522cf5aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 31 Jul 2022 17:16:26 +0200 Subject: [PATCH 068/730] Deprecate setup.py install fallback when wheel package is absent --- news/8559.removal.rst | 2 ++ src/pip/_internal/req/req_install.py | 5 +++++ src/pip/_internal/utils/deprecation.py | 14 ++++++++++++++ src/pip/_internal/wheel_builder.py | 7 ++----- 4 files changed, 23 insertions(+), 5 deletions(-) create mode 100644 news/8559.removal.rst diff --git a/news/8559.removal.rst b/news/8559.removal.rst new file mode 100644 index 00000000000..aa9f814120d --- /dev/null +++ b/news/8559.removal.rst @@ -0,0 +1,2 @@ +Deprecate installation with 'setup.py install' when the 'wheel' package is absent for +source distributions without 'pyproject.toml'. diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 2b9cd992e7f..88d481dfe5c 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -811,6 +811,11 @@ def install( install_options = list(install_options) + self.install_options try: + if ( + self.legacy_install_reason is not None + and self.legacy_install_reason.emit_before_install + ): + self.legacy_install_reason.emit_deprecation(self.name) success = install_legacy( install_options=install_options, global_options=global_options, diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index a15c52197b9..7c7ace6ff4c 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -159,3 +159,17 @@ def emit_deprecation(self, name: str) -> None: issue=8368, emit_after_success=True, ) + + +LegacyInstallReasonMissingWheelPackage = LegacyInstallReason( + reason=( + "{name} is being installed using the legacy " + "'setup.py install' method, because it does not have a " + "'pyproject.toml' and the 'wheel' package " + "is not installed." + ), + replacement="to enable the '--use-pep517' option", + gone_in=None, + issue=8559, + emit_before_install=True, +) diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index 77a17ff0f15..d2a7146edb7 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -19,6 +19,7 @@ from pip._internal.operations.build.wheel_editable import build_wheel_editable from pip._internal.operations.build.wheel_legacy import build_wheel_legacy from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import LegacyInstallReasonMissingWheelPackage from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed from pip._internal.utils.setuptools_build import make_setuptools_clean_args @@ -86,11 +87,7 @@ def _should_build( if not is_wheel_installed(): # we don't build legacy requirements if wheel is not installed - logger.info( - "Using legacy 'setup.py install' for %s, " - "since package 'wheel' is not installed.", - req.name, - ) + req.legacy_install_reason = LegacyInstallReasonMissingWheelPackage return False return True From 77da6ae52c4a948ec75956ab39f8cc7955cd8f9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 7 Aug 2022 11:39:56 +0200 Subject: [PATCH 069/730] Mention --quiet in --report option help --- news/11357.doc.rst | 1 + src/pip/_internal/commands/install.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 news/11357.doc.rst diff --git a/news/11357.doc.rst b/news/11357.doc.rst new file mode 100644 index 00000000000..887928a086e --- /dev/null +++ b/news/11357.doc.rst @@ -0,0 +1 @@ +Mention that --quiet must be used when writing the installation report to stdout. diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 29907645c81..e4b90bde801 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -263,7 +263,9 @@ def add_options(self) -> None: "the provided requirements. " "Can be used in combination with --dry-run and --ignore-installed " "to 'resolve' the requirements. " - "When - is used as file name it writes to stdout." + "When - is used as file name it writes to stdout. " + "When writing to stdout, please combine with the --quiet option " + "to avoid mixing pip logging output with JSON output." ), ) From be718ff59ec093f22cc91eb710c53a7553b58ae3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 12 Aug 2022 12:57:39 +0200 Subject: [PATCH 070/730] Fix tests that relied on setuptools not supporting PEP 660 --- tests/functional/test_install.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 340d7205df0..f4f8d4efb0c 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -700,17 +700,27 @@ def test_editable_install__local_dir_no_setup_py( ) +@pytest.mark.network def test_editable_install__local_dir_no_setup_py_with_pyproject( script: PipTestEnvironment, ) -> None: """ Test installing in editable mode from a local directory with no setup.py - but that does have pyproject.toml. + but that does have pyproject.toml with a build backend that does not support + the build_editable hook. """ local_dir = script.scratch_path.joinpath("temp") local_dir.mkdir() pyproject_path = local_dir.joinpath("pyproject.toml") - pyproject_path.write_text("") + pyproject_path.write_text( + textwrap.dedent( + """ + [build-system] + requires = ["setuptools<64"] + build-backend = "setuptools.build_meta" + """ + ) + ) result = script.pip("install", "-e", local_dir, expect_error=True) assert not result.files_created @@ -1253,13 +1263,14 @@ def test_install_editable_with_prefix_setup_py(script: PipTestEnvironment) -> No _test_install_editable_with_prefix(script, {"setup.py": setup_py}) +@pytest.mark.network def test_install_editable_with_prefix_setup_cfg(script: PipTestEnvironment) -> None: setup_cfg = """[metadata] name = pkga version = 0.1 """ pyproject_toml = """[build-system] -requires = ["setuptools", "wheel"] +requires = ["setuptools<64", "wheel"] build-backend = "setuptools.build_meta" """ _test_install_editable_with_prefix( From df8a5011b6f4c35902d48f9c9940980f4d2a6ddf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 7 Aug 2022 15:57:52 +0200 Subject: [PATCH 071/730] Simplify should_build --- src/pip/_internal/wheel_builder.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index d2a7146edb7..a166146621b 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -5,7 +5,7 @@ import os.path import re import shutil -from typing import Any, Callable, Iterable, List, Optional, Tuple +from typing import Callable, Iterable, List, Optional, Tuple from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version from pip._vendor.packaging.version import InvalidVersion, Version @@ -47,7 +47,7 @@ def _contains_egg_info(s: str) -> bool: def _should_build( req: InstallRequirement, need_wheel: bool, - check_binary_allowed: BinaryAllowedPredicate, + check_binary_allowed: Optional[BinaryAllowedPredicate] = None, ) -> bool: """Return whether an InstallRequirement should be built into a wheel.""" if req.constraint: @@ -78,6 +78,7 @@ def _should_build( if req.use_pep517: return True + assert check_binary_allowed is not None if not check_binary_allowed(req): logger.info( "Skipping wheel build for %s, due to binaries being disabled for it.", @@ -96,7 +97,7 @@ def _should_build( def should_build_for_wheel_command( req: InstallRequirement, ) -> bool: - return _should_build(req, need_wheel=True, check_binary_allowed=_always_true) + return _should_build(req, need_wheel=True) def should_build_for_install_command( @@ -156,10 +157,6 @@ def _get_cache_dir( return cache_dir -def _always_true(_: Any) -> bool: - return True - - def _verify_one(req: InstallRequirement, wheel_path: str) -> None: canonical_name = canonicalize_name(req.name or "") w = Wheel(os.path.basename(wheel_path)) From d8e2d6605ab4bba341f80519cada310556204abe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 7 Aug 2022 16:09:01 +0200 Subject: [PATCH 072/730] Rename BinaryAllowedPredicate It really is a BdistWheelAllowedPredicate and this will make it easier to reason when --no-binary does not imply setup.py install anymore. --- src/pip/_internal/commands/install.py | 12 ++++++++---- src/pip/_internal/wheel_builder.py | 12 ++++++------ tests/unit/test_wheel_builder.py | 10 +++++----- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 91fe3b3b658..dcf5ce8c617 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -45,7 +45,7 @@ virtualenv_no_global, ) from pip._internal.wheel_builder import ( - BinaryAllowedPredicate, + BdistWheelAllowedPredicate, build, should_build_for_install_command, ) @@ -53,7 +53,9 @@ logger = getLogger(__name__) -def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate: +def get_check_bdist_wheel_allowed( + format_control: FormatControl, +) -> BdistWheelAllowedPredicate: def check_binary_allowed(req: InstallRequirement) -> bool: canonical_name = canonicalize_name(req.name or "") allowed_formats = format_control.get_allowed_formats(canonical_name) @@ -409,12 +411,14 @@ def run(self, options: Values, args: List[str]) -> int: modifying_pip = pip_req.satisfied_by is None protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) - check_binary_allowed = get_check_binary_allowed(finder.format_control) + check_bdist_wheel_allowed = get_check_bdist_wheel_allowed( + finder.format_control + ) reqs_to_build = [ r for r in requirement_set.requirements.values() - if should_build_for_install_command(r, check_binary_allowed) + if should_build_for_install_command(r, check_bdist_wheel_allowed) ] _, build_failures = build( diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index a166146621b..60db28e92c3 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -32,7 +32,7 @@ _egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE) -BinaryAllowedPredicate = Callable[[InstallRequirement], bool] +BdistWheelAllowedPredicate = Callable[[InstallRequirement], bool] BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] @@ -47,7 +47,7 @@ def _contains_egg_info(s: str) -> bool: def _should_build( req: InstallRequirement, need_wheel: bool, - check_binary_allowed: Optional[BinaryAllowedPredicate] = None, + check_bdist_wheel: Optional[BdistWheelAllowedPredicate] = None, ) -> bool: """Return whether an InstallRequirement should be built into a wheel.""" if req.constraint: @@ -78,8 +78,8 @@ def _should_build( if req.use_pep517: return True - assert check_binary_allowed is not None - if not check_binary_allowed(req): + assert check_bdist_wheel is not None + if not check_bdist_wheel(req): logger.info( "Skipping wheel build for %s, due to binaries being disabled for it.", req.name, @@ -102,10 +102,10 @@ def should_build_for_wheel_command( def should_build_for_install_command( req: InstallRequirement, - check_binary_allowed: BinaryAllowedPredicate, + check_bdist_wheel_allowed: BdistWheelAllowedPredicate, ) -> bool: return _should_build( - req, need_wheel=False, check_binary_allowed=check_binary_allowed + req, need_wheel=False, check_bdist_wheel=check_bdist_wheel_allowed ) diff --git a/tests/unit/test_wheel_builder.py b/tests/unit/test_wheel_builder.py index 2329899608a..5444056e790 100644 --- a/tests/unit/test_wheel_builder.py +++ b/tests/unit/test_wheel_builder.py @@ -58,7 +58,7 @@ def supports_pyproject_editable(self) -> bool: @pytest.mark.parametrize( - "req, disallow_binaries, expected", + "req, disallow_bdist_wheel, expected", [ # When binaries are allowed, we build. (ReqMock(use_pep517=True), False, True), @@ -110,11 +110,11 @@ def supports_pyproject_editable(self) -> bool: ], ) def test_should_build_for_install_command( - req: ReqMock, disallow_binaries: bool, expected: bool + req: ReqMock, disallow_bdist_wheel: bool, expected: bool ) -> None: should_build = wheel_builder.should_build_for_install_command( cast(InstallRequirement, req), - check_binary_allowed=lambda req: not disallow_binaries, + check_bdist_wheel_allowed=lambda req: not disallow_bdist_wheel, ) assert should_build is expected @@ -144,7 +144,7 @@ def test_should_build_legacy_wheel_not_installed(is_wheel_installed: mock.Mock) legacy_req = ReqMock(use_pep517=False) should_build = wheel_builder.should_build_for_install_command( cast(InstallRequirement, legacy_req), - check_binary_allowed=lambda req: True, + check_bdist_wheel_allowed=lambda req: True, ) assert not should_build @@ -155,7 +155,7 @@ def test_should_build_legacy_wheel_installed(is_wheel_installed: mock.Mock) -> N legacy_req = ReqMock(use_pep517=False) should_build = wheel_builder.should_build_for_install_command( cast(InstallRequirement, legacy_req), - check_binary_allowed=lambda req: True, + check_bdist_wheel_allowed=lambda req: True, ) assert should_build From b9ec5ddc297259cc81dfcb6c590da5700554ae8b Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Fri, 12 Aug 2022 18:44:48 -0700 Subject: [PATCH 073/730] Use --no-implicit-optional for type checking This makes type checking PEP 484 compliant (as of 2018). mypy will change its defaults soon. See: https://github.com/python/mypy/issues/9091 https://github.com/python/mypy/pull/13401 --- src/pip/_internal/cli/spinners.py | 4 ++-- src/pip/_internal/exceptions.py | 5 ++++- src/pip/_internal/locations/_distutils.py | 6 +++--- src/pip/_internal/operations/install/wheel.py | 4 +++- src/pip/_internal/utils/hashes.py | 2 +- src/pip/_internal/utils/setuptools_build.py | 2 +- src/pip/_internal/vcs/subversion.py | 2 +- 7 files changed, 15 insertions(+), 10 deletions(-) diff --git a/src/pip/_internal/cli/spinners.py b/src/pip/_internal/cli/spinners.py index a50e6adf263..cf2b976f377 100644 --- a/src/pip/_internal/cli/spinners.py +++ b/src/pip/_internal/cli/spinners.py @@ -3,7 +3,7 @@ import logging import sys import time -from typing import IO, Generator +from typing import IO, Generator, Optional from pip._internal.utils.compat import WINDOWS from pip._internal.utils.logging import get_indentation @@ -23,7 +23,7 @@ class InteractiveSpinner(SpinnerInterface): def __init__( self, message: str, - file: IO[str] = None, + file: Optional[IO[str]] = None, spin_chars: str = "-\\|/", # Empirically, 8 updates/second looks nice min_update_interval_seconds: float = 0.125, diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 97b9612a187..377cde52521 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -288,7 +288,10 @@ class NetworkConnectionError(PipError): """HTTP connection error""" def __init__( - self, error_msg: str, response: Response = None, request: Request = None + self, + error_msg: str, + response: Optional[Response] = None, + request: Optional[Request] = None, ) -> None: """ Initialize NetworkConnectionError with `request` and `response` diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index fbcb04f488f..c7712f016f5 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -35,10 +35,10 @@ def distutils_scheme( dist_name: str, user: bool = False, - home: str = None, - root: str = None, + home: Optional[str] = None, + root: Optional[str] = None, isolated: bool = False, - prefix: str = None, + prefix: Optional[str] = None, *, ignore_config_files: bool = False, ) -> Dict[str, str]: diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py index 1af8978d409..1650d59a374 100644 --- a/src/pip/_internal/operations/install/wheel.py +++ b/src/pip/_internal/operations/install/wheel.py @@ -420,7 +420,9 @@ def _raise_for_invalid_entrypoint(specification: str) -> None: class PipScriptMaker(ScriptMaker): - def make(self, specification: str, options: Dict[str, Any] = None) -> List[str]: + def make( + self, specification: str, options: Optional[Dict[str, Any]] = None + ) -> List[str]: _raise_for_invalid_entrypoint(specification) return super().make(specification, options) diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py index 0c1af327cc2..e79cfdb8c20 100644 --- a/src/pip/_internal/utils/hashes.py +++ b/src/pip/_internal/utils/hashes.py @@ -28,7 +28,7 @@ class Hashes: """ - def __init__(self, hashes: Dict[str, List[str]] = None) -> None: + def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None: """ :param hashes: A dict of algorithm names pointing to lists of allowed hex digests diff --git a/src/pip/_internal/utils/setuptools_build.py b/src/pip/_internal/utils/setuptools_build.py index f460c4003f3..01ef4a4ca59 100644 --- a/src/pip/_internal/utils/setuptools_build.py +++ b/src/pip/_internal/utils/setuptools_build.py @@ -48,7 +48,7 @@ def make_setuptools_shim_args( setup_py_path: str, - global_options: Sequence[str] = None, + global_options: Optional[Sequence[str]] = None, no_user_config: bool = False, unbuffered_output: bool = False, ) -> List[str]: diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 89c8754ce09..2cd6f0ae9d2 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -184,7 +184,7 @@ def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool: """Always assume the versions don't match""" return False - def __init__(self, use_interactive: bool = None) -> None: + def __init__(self, use_interactive: Optional[bool] = None) -> None: if use_interactive is None: use_interactive = is_console_interactive() self.use_interactive = use_interactive From 4d13842ec6e246e90718c371b74503cee8d061ad Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Fri, 12 Aug 2022 18:48:15 -0700 Subject: [PATCH 074/730] fixups --- setup.cfg | 1 + src/pip/_internal/utils/hashes.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index bdc224e6dd6..dae2f21b10d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -40,6 +40,7 @@ ignore_missing_imports = True disallow_untyped_defs = True disallow_any_generics = True warn_unused_ignores = True +no_implicit_optional = True [mypy-pip._vendor.*] ignore_errors = True diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py index e79cfdb8c20..76727306a4c 100644 --- a/src/pip/_internal/utils/hashes.py +++ b/src/pip/_internal/utils/hashes.py @@ -1,5 +1,5 @@ import hashlib -from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List +from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError from pip._internal.utils.misc import read_chunks From c0b86d338a69741fc791bd94575f8cfdecfc1c3e Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Fri, 12 Aug 2022 18:51:47 -0700 Subject: [PATCH 075/730] no news today --- news/5580954E-E089-4CDB-857A-868BA1F7435D.trivial.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 news/5580954E-E089-4CDB-857A-868BA1F7435D.trivial.rst diff --git a/news/5580954E-E089-4CDB-857A-868BA1F7435D.trivial.rst b/news/5580954E-E089-4CDB-857A-868BA1F7435D.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From edbfeae9fbd4618256091bceabb494749cfc2c94 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Fri, 12 Aug 2022 19:01:26 -0700 Subject: [PATCH 076/730] fix tests --- tests/lib/__init__.py | 2 +- tests/unit/test_req_file.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 1dfaea7e0f2..1436f7a42c7 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -1168,7 +1168,7 @@ def create_basic_wheel_for_package( name: str, version: str, depends: Optional[List[str]] = None, - extras: Dict[str, List[str]] = None, + extras: Optional[Dict[str, List[str]]] = None, requires_python: Optional[str] = None, extra_files: Optional[Dict[str, Union[bytes, str]]] = None, ) -> pathlib.Path: diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 8928fd1690f..ef575f601d2 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -60,8 +60,8 @@ def options(session: PipSession) -> mock.Mock: def parse_reqfile( filename: Union[Path, str], session: PipSession, - finder: PackageFinder = None, - options: Values = None, + finder: Optional[PackageFinder] = None, + options: Optional[Values] = None, constraint: bool = False, isolated: bool = False, ) -> Iterator[InstallRequirement]: From 1413fae8eb15ba38751173cdd29258c4f8f23790 Mon Sep 17 00:00:00 2001 From: Kai Mueller Date: Sun, 14 Aug 2022 19:26:08 +0000 Subject: [PATCH 077/730] Add news --- news/11254.trivial.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 news/11254.trivial.rst diff --git a/news/11254.trivial.rst b/news/11254.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From 26b66a830fd9322dcc826fee2f1924670ea6c976 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 17 Aug 2022 06:41:27 +0800 Subject: [PATCH 078/730] Decrease timeout to make test less flaky --- tests/functional/test_requests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/test_requests.py b/tests/functional/test_requests.py index 0fbc4ae0e36..66050c518e5 100644 --- a/tests/functional/test_requests.py +++ b/tests/functional/test_requests.py @@ -7,7 +7,7 @@ def test_timeout(script: PipTestEnvironment) -> None: result = script.pip( "--timeout", - "0.0001", + "0.00001", "install", "-vvv", "INITools", From 5ec3f37bc87ea129790c9e9408d392343ef72161 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 17 Aug 2022 15:38:15 +0800 Subject: [PATCH 079/730] Don't retry to 'improve' possibility of failure --- tests/functional/test_requests.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/functional/test_requests.py b/tests/functional/test_requests.py index 66050c518e5..622b150aa83 100644 --- a/tests/functional/test_requests.py +++ b/tests/functional/test_requests.py @@ -6,6 +6,8 @@ @pytest.mark.network def test_timeout(script: PipTestEnvironment) -> None: result = script.pip( + "--retries", + "1", "--timeout", "0.00001", "install", From 72ce3ba0fe9b7ff740dc29a87d0863d9416b3cf8 Mon Sep 17 00:00:00 2001 From: Diego Ramirez Date: Thu, 18 Aug 2022 11:02:37 -0500 Subject: [PATCH 080/730] Delete the "good first issue" template Seems like we're not using this template, and other users are using it as "good first reporters". --- .github/ISSUE_TEMPLATE/~good-first-issue.yml | 38 -------------------- 1 file changed, 38 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/~good-first-issue.yml diff --git a/.github/ISSUE_TEMPLATE/~good-first-issue.yml b/.github/ISSUE_TEMPLATE/~good-first-issue.yml deleted file mode 100644 index 81e206a35f9..00000000000 --- a/.github/ISSUE_TEMPLATE/~good-first-issue.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Good first issue -description: If you're a pip maintainer, use this to create a "good first issue" for new contributors. -labels: "good first issue" - -body: - - type: textarea - attributes: - label: Description - description: >- - A clear and concise description of what the task is. - validations: - required: true - - - type: textarea - attributes: - label: What needs to be done - description: >- - Describe what the contributor would need to do, describing the change. - See https://github.com/pypa/pip/issues/7661 for example. - validations: - required: true - - - type: textarea - attributes: - label: Guidance for potential contributors - description: >- - Usually, you don't have to modify the content here. - value: >- - This issue is a good starting point for first time contributors -- the - process of fixing this should be a good introduction to pip's - development workflow. If there is not a corresponding pull request for - this issue, it is up for grabs. For directions for getting set up, see our - [Getting Started Guide](https://pip.pypa.io/en/latest/development/getting-started/). - If you are working on this issue and have questions, feel free to ask - them here. If you've contributed code to pip before, we encourage you to - pick up an issue without this label. - validations: - required: true From 7e1bb71b050ce817c4a535cca999313ec4a550f1 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 26 Aug 2022 13:44:15 +0100 Subject: [PATCH 081/730] Mention pip config on the page about pip's configuration This is relevant to the topic page and should make this command more visible. --- docs/html/topics/configuration.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/html/topics/configuration.md b/docs/html/topics/configuration.md index 9b240ec7902..e4aafcd2b98 100644 --- a/docs/html/topics/configuration.md +++ b/docs/html/topics/configuration.md @@ -11,6 +11,10 @@ pip allows a user to change its behaviour via 3 mechanisms: This page explains how the configuration files and environment variables work, and how they are related to pip's various command line options. +```{seealso} +{doc}`../cli/pip_config` command, which helps manage pip's configuration. +``` + (config-file)= ## Configuration Files From 254e668eef34ca21005634a2bdba9d9a74deaa26 Mon Sep 17 00:00:00 2001 From: M00nL1ght <69127692+SCH227@users.noreply.github.com> Date: Tue, 30 Aug 2022 05:51:29 +0300 Subject: [PATCH 082/730] Fix vulnerable regex Implement exclusive RE searches to avoid backtracking --- src/pip/_internal/models/wheel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/models/wheel.py b/src/pip/_internal/models/wheel.py index 35c70375539..a5dc12bdd63 100644 --- a/src/pip/_internal/models/wheel.py +++ b/src/pip/_internal/models/wheel.py @@ -13,8 +13,8 @@ class Wheel: """A wheel file""" wheel_file_re = re.compile( - r"""^(?P(?P.+?)-(?P.*?)) - ((-(?P\d[^-]*?))?-(?P.+?)-(?P.+?)-(?P.+?) + r"""^(?P(?P[^\s-]+?)-(?P[^\s-]*?)) + ((-(?P\d[^-]*?))?-(?P[^\s-]+?)-(?P[^\s-]+?)-(?P[^\s-]+?) \.whl|\.dist-info)$""", re.VERBOSE, ) From 321018fb930e95c85fc218e3ecf1ea1436309fc6 Mon Sep 17 00:00:00 2001 From: M00nL1ght <69127692+SCH227@users.noreply.github.com> Date: Tue, 30 Aug 2022 06:04:32 +0300 Subject: [PATCH 083/730] Create 11418.bugfix.rst --- news/11418.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11418.bugfix.rst diff --git a/news/11418.bugfix.rst b/news/11418.bugfix.rst new file mode 100644 index 00000000000..df32a0d0bc3 --- /dev/null +++ b/news/11418.bugfix.rst @@ -0,0 +1 @@ +Patch non-exploitable ReDoS vulnerability in wheel_file regex From 7485260b4e741ddf7b0fbcf5efe54feac768321e Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 2 Sep 2022 21:42:08 +0100 Subject: [PATCH 084/730] Update bug-report.yml Drop the "render", because our users are generally smart and GitHub is not. --- .github/ISSUE_TEMPLATE/bug-report.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index 51a290f50c7..e28e5408208 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -60,14 +60,12 @@ body: label: Output description: >- Provide the output of the steps above, including the commands - themselves and pip's output/traceback etc. If you're familiar with - Markdown, DO NOT add backticks. They're added automatically. + themselves and pip's output/traceback etc. If you want to present output from multiple commands, please prefix the line containing the command with `$ `. Please also ensure that the "How to reproduce" section contains matching instructions for reproducing this. - render: shell - type: checkboxes attributes: From 8856b5900e6f902ca2e72f0e625591dc9d75c0d3 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 8 Sep 2022 10:24:04 +0800 Subject: [PATCH 085/730] Further attempt to stablize timeout test --- tests/functional/test_requests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/test_requests.py b/tests/functional/test_requests.py index 622b150aa83..2ef121fedcb 100644 --- a/tests/functional/test_requests.py +++ b/tests/functional/test_requests.py @@ -7,7 +7,7 @@ def test_timeout(script: PipTestEnvironment) -> None: result = script.pip( "--retries", - "1", + "0", "--timeout", "0.00001", "install", From bad03ef931d9b3ff4f9e75f35f9c41f45839e2a1 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Sat, 10 Sep 2022 10:28:57 +0000 Subject: [PATCH 086/730] Use data-dist-info-metadata (PEP 658) to decouple resolution from downloading (#11111) Co-authored-by: Tzu-ping Chung --- news/11111.feature.rst | 1 + src/pip/_internal/exceptions.py | 11 +- src/pip/_internal/index/collector.py | 120 +----- src/pip/_internal/metadata/__init__.py | 22 ++ src/pip/_internal/metadata/base.py | 18 + .../_internal/metadata/importlib/_dists.py | 18 + src/pip/_internal/metadata/pkg_resources.py | 23 +- src/pip/_internal/models/link.py | 247 ++++++++++-- src/pip/_internal/operations/prepare.py | 73 +++- tests/functional/test_download.py | 355 +++++++++++++++++- tests/functional/test_new_resolver.py | 2 +- tests/lib/server.py | 8 - .../metadata/test_metadata_pkg_resources.py | 6 +- tests/unit/test_collector.py | 110 +++++- 14 files changed, 834 insertions(+), 180 deletions(-) create mode 100644 news/11111.feature.rst diff --git a/news/11111.feature.rst b/news/11111.feature.rst new file mode 100644 index 00000000000..39cb4b35c12 --- /dev/null +++ b/news/11111.feature.rst @@ -0,0 +1 @@ +Use the ``data-dist-info-metadata`` attribute from :pep:`658` to resolve distribution metadata without downloading the dist yet. diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 377cde52521..2ab1f591f12 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -335,8 +335,8 @@ class MetadataInconsistent(InstallationError): """Built metadata contains inconsistent information. This is raised when the metadata contains values (e.g. name and version) - that do not match the information previously obtained from sdist filename - or user-supplied ``#egg=`` value. + that do not match the information previously obtained from sdist filename, + user-supplied ``#egg=`` value, or an install requirement name. """ def __init__( @@ -348,11 +348,10 @@ def __init__( self.m_val = m_val def __str__(self) -> str: - template = ( - "Requested {} has inconsistent {}: " - "filename has {!r}, but metadata has {!r}" + return ( + f"Requested {self.ireq} has inconsistent {self.field}: " + f"expected {self.f_val!r}, but metadata has {self.m_val!r}" ) - return template.format(self.ireq, self.field, self.f_val, self.m_val) class LegacyInstallFailure(DiagnosticPipError): diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index f4e6e221f5d..0120610c758 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -9,10 +9,8 @@ import json import logging import os -import re import urllib.parse import urllib.request -import xml.etree.ElementTree from html.parser import HTMLParser from optparse import Values from typing import ( @@ -39,7 +37,7 @@ from pip._internal.network.session import PipSession from pip._internal.network.utils import raise_for_status from pip._internal.utils.filetypes import is_archive_file -from pip._internal.utils.misc import pairwise, redact_auth_from_url +from pip._internal.utils.misc import redact_auth_from_url from pip._internal.vcs import vcs from .sources import CandidatesFromPage, LinkSource, build_source @@ -51,7 +49,6 @@ logger = logging.getLogger(__name__) -HTMLElement = xml.etree.ElementTree.Element ResponseHeaders = MutableMapping[str, str] @@ -191,94 +188,6 @@ def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]: return None -def _clean_url_path_part(part: str) -> str: - """ - Clean a "part" of a URL path (i.e. after splitting on "@" characters). - """ - # We unquote prior to quoting to make sure nothing is double quoted. - return urllib.parse.quote(urllib.parse.unquote(part)) - - -def _clean_file_url_path(part: str) -> str: - """ - Clean the first part of a URL path that corresponds to a local - filesystem path (i.e. the first part after splitting on "@" characters). - """ - # We unquote prior to quoting to make sure nothing is double quoted. - # Also, on Windows the path part might contain a drive letter which - # should not be quoted. On Linux where drive letters do not - # exist, the colon should be quoted. We rely on urllib.request - # to do the right thing here. - return urllib.request.pathname2url(urllib.request.url2pathname(part)) - - -# percent-encoded: / -_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) - - -def _clean_url_path(path: str, is_local_path: bool) -> str: - """ - Clean the path portion of a URL. - """ - if is_local_path: - clean_func = _clean_file_url_path - else: - clean_func = _clean_url_path_part - - # Split on the reserved characters prior to cleaning so that - # revision strings in VCS URLs are properly preserved. - parts = _reserved_chars_re.split(path) - - cleaned_parts = [] - for to_clean, reserved in pairwise(itertools.chain(parts, [""])): - cleaned_parts.append(clean_func(to_clean)) - # Normalize %xx escapes (e.g. %2f -> %2F) - cleaned_parts.append(reserved.upper()) - - return "".join(cleaned_parts) - - -def _clean_link(url: str) -> str: - """ - Make sure a link is fully quoted. - For example, if ' ' occurs in the URL, it will be replaced with "%20", - and without double-quoting other characters. - """ - # Split the URL into parts according to the general structure - # `scheme://netloc/path;parameters?query#fragment`. - result = urllib.parse.urlparse(url) - # If the netloc is empty, then the URL refers to a local filesystem path. - is_local_path = not result.netloc - path = _clean_url_path(result.path, is_local_path=is_local_path) - return urllib.parse.urlunparse(result._replace(path=path)) - - -def _create_link_from_element( - element_attribs: Dict[str, Optional[str]], - page_url: str, - base_url: str, -) -> Optional[Link]: - """ - Convert an anchor element's attributes in a simple repository page to a Link. - """ - href = element_attribs.get("href") - if not href: - return None - - url = _clean_link(urllib.parse.urljoin(base_url, href)) - pyrequire = element_attribs.get("data-requires-python") - yanked_reason = element_attribs.get("data-yanked") - - link = Link( - url, - comes_from=page_url, - requires_python=pyrequire, - yanked_reason=yanked_reason, - ) - - return link - - class CacheablePageContent: def __init__(self, page: "IndexContent") -> None: assert page.cache_link_parsing @@ -326,25 +235,10 @@ def parse_links(page: "IndexContent") -> Iterable[Link]: if content_type_l.startswith("application/vnd.pypi.simple.v1+json"): data = json.loads(page.content) for file in data.get("files", []): - file_url = file.get("url") - if file_url is None: + link = Link.from_json(file, page.url) + if link is None: continue - - # The Link.yanked_reason expects an empty string instead of a boolean. - yanked_reason = file.get("yanked") - if yanked_reason and not isinstance(yanked_reason, str): - yanked_reason = "" - # The Link.yanked_reason expects None instead of False - elif not yanked_reason: - yanked_reason = None - - yield Link( - _clean_link(urllib.parse.urljoin(page.url, file_url)), - comes_from=page.url, - requires_python=file.get("requires-python"), - yanked_reason=yanked_reason, - hashes=file.get("hashes", {}), - ) + yield link return parser = HTMLLinkParser(page.url) @@ -354,11 +248,7 @@ def parse_links(page: "IndexContent") -> Iterable[Link]: url = page.url base_url = parser.base_url or url for anchor in parser.anchors: - link = _create_link_from_element( - anchor, - page_url=url, - base_url=base_url, - ) + link = Link.from_element(anchor, page_url=url, base_url=base_url) if link is None: continue yield link diff --git a/src/pip/_internal/metadata/__init__.py b/src/pip/_internal/metadata/__init__.py index 8cd0fda6851..9f73ca7105f 100644 --- a/src/pip/_internal/metadata/__init__.py +++ b/src/pip/_internal/metadata/__init__.py @@ -103,3 +103,25 @@ def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistributio :param canonical_name: Normalized project name of the given wheel. """ return select_backend().Distribution.from_wheel(wheel, canonical_name) + + +def get_metadata_distribution( + metadata_contents: bytes, + filename: str, + canonical_name: str, +) -> BaseDistribution: + """Get the dist representation of the specified METADATA file contents. + + This returns a Distribution instance from the chosen backend sourced from the data + in `metadata_contents`. + + :param metadata_contents: Contents of a METADATA file within a dist, or one served + via PEP 658. + :param filename: Filename for the dist this metadata represents. + :param canonical_name: Normalized project name of the given dist. + """ + return select_backend().Distribution.from_metadata_file_contents( + metadata_contents, + filename, + canonical_name, + ) diff --git a/src/pip/_internal/metadata/base.py b/src/pip/_internal/metadata/base.py index 151fd6d009e..cafb79fb3dc 100644 --- a/src/pip/_internal/metadata/base.py +++ b/src/pip/_internal/metadata/base.py @@ -113,6 +113,24 @@ def from_directory(cls, directory: str) -> "BaseDistribution": """ raise NotImplementedError() + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> "BaseDistribution": + """Load the distribution from the contents of a METADATA file. + + This is used to implement PEP 658 by generating a "shallow" dist object that can + be used for resolution without downloading or building the actual dist yet. + + :param metadata_contents: The contents of a METADATA file. + :param filename: File name for the dist with this metadata. + :param project_name: Name of the project this dist represents. + """ + raise NotImplementedError() + @classmethod def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution": """Load the distribution from a given wheel. diff --git a/src/pip/_internal/metadata/importlib/_dists.py b/src/pip/_internal/metadata/importlib/_dists.py index fbf9a93218a..65c043c87ef 100644 --- a/src/pip/_internal/metadata/importlib/_dists.py +++ b/src/pip/_internal/metadata/importlib/_dists.py @@ -28,6 +28,7 @@ ) from pip._internal.utils.misc import normalize_path from pip._internal.utils.packaging import safe_extra +from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file from ._compat import BasePath, get_dist_name @@ -109,6 +110,23 @@ def from_directory(cls, directory: str) -> BaseDistribution: dist = importlib.metadata.Distribution.at(info_location) return cls(dist, info_location, info_location.parent) + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> BaseDistribution: + # Generate temp dir to contain the metadata file, and write the file contents. + temp_dir = pathlib.Path( + TempDirectory(kind="metadata", globally_managed=True).path + ) + metadata_path = temp_dir / "METADATA" + metadata_path.write_bytes(metadata_contents) + # Construct dist pointing to the newly created directory. + dist = importlib.metadata.Distribution.at(metadata_path.parent) + return cls(dist, metadata_path.parent, None) + @classmethod def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: try: diff --git a/src/pip/_internal/metadata/pkg_resources.py b/src/pip/_internal/metadata/pkg_resources.py index bf79ba139c0..f330ef12a2c 100644 --- a/src/pip/_internal/metadata/pkg_resources.py +++ b/src/pip/_internal/metadata/pkg_resources.py @@ -33,7 +33,7 @@ class EntryPoint(NamedTuple): group: str -class WheelMetadata: +class InMemoryMetadata: """IMetadataProvider that reads metadata files from a dictionary. This also maps metadata decoding exceptions to our internal exception type. @@ -92,12 +92,29 @@ def from_directory(cls, directory: str) -> BaseDistribution: dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata) return cls(dist) + @classmethod + def from_metadata_file_contents( + cls, + metadata_contents: bytes, + filename: str, + project_name: str, + ) -> BaseDistribution: + metadata_dict = { + "METADATA": metadata_contents, + } + dist = pkg_resources.DistInfoDistribution( + location=filename, + metadata=InMemoryMetadata(metadata_dict, filename), + project_name=project_name, + ) + return cls(dist) + @classmethod def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: try: with wheel.as_zipfile() as zf: info_dir, _ = parse_wheel(zf, name) - metadata_text = { + metadata_dict = { path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path) for path in zf.namelist() if path.startswith(f"{info_dir}/") @@ -108,7 +125,7 @@ def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution: raise UnsupportedWheel(f"{name} has an invalid wheel, {e}") dist = pkg_resources.DistInfoDistribution( location=wheel.location, - metadata=WheelMetadata(metadata_text, wheel.location), + metadata=InMemoryMetadata(metadata_dict, wheel.location), project_name=name, ) return cls(dist) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 8fd1c3d9960..c792d128bcf 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -1,11 +1,14 @@ import functools +import itertools import logging import os import posixpath import re import urllib.parse +from dataclasses import dataclass from typing import ( TYPE_CHECKING, + Any, Dict, List, Mapping, @@ -18,6 +21,7 @@ from pip._internal.utils.filetypes import WHEEL_EXTENSION from pip._internal.utils.hashes import Hashes from pip._internal.utils.misc import ( + pairwise, redact_auth_from_url, split_auth_from_netloc, splitext, @@ -36,6 +40,119 @@ _SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5") +@dataclass(frozen=True) +class LinkHash: + """Links to content may have embedded hash values. This class parses those. + + `name` must be any member of `_SUPPORTED_HASHES`. + + This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to + be JSON-serializable to conform to PEP 610, this class contains the logic for + parsing a hash name and value for correctness, and then checking whether that hash + conforms to a schema with `.is_hash_allowed()`.""" + + name: str + value: str + + _hash_re = re.compile( + # NB: we do not validate that the second group (.*) is a valid hex + # digest. Instead, we simply keep that string in this class, and then check it + # against Hashes when hash-checking is needed. This is easier to debug than + # proactively discarding an invalid hex digest, as we handle incorrect hashes + # and malformed hashes in the same place. + r"({choices})=(.*)".format( + choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES) + ), + ) + + def __post_init__(self) -> None: + assert self._hash_re.match(f"{self.name}={self.value}") + + @classmethod + @functools.lru_cache(maxsize=None) + def split_hash_name_and_value(cls, url: str) -> Optional["LinkHash"]: + """Search a string for a checksum algorithm name and encoded output value.""" + match = cls._hash_re.search(url) + if match is None: + return None + name, value = match.groups() + return cls(name=name, value=value) + + def as_hashes(self) -> Hashes: + """Return a Hashes instance which checks only for the current hash.""" + return Hashes({self.name: [self.value]}) + + def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: + """ + Return True if the current hash is allowed by `hashes`. + """ + if hashes is None: + return False + return hashes.is_hash_allowed(self.name, hex_digest=self.value) + + +def _clean_url_path_part(part: str) -> str: + """ + Clean a "part" of a URL path (i.e. after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + return urllib.parse.quote(urllib.parse.unquote(part)) + + +def _clean_file_url_path(part: str) -> str: + """ + Clean the first part of a URL path that corresponds to a local + filesystem path (i.e. the first part after splitting on "@" characters). + """ + # We unquote prior to quoting to make sure nothing is double quoted. + # Also, on Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + return urllib.request.pathname2url(urllib.request.url2pathname(part)) + + +# percent-encoded: / +_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE) + + +def _clean_url_path(path: str, is_local_path: bool) -> str: + """ + Clean the path portion of a URL. + """ + if is_local_path: + clean_func = _clean_file_url_path + else: + clean_func = _clean_url_path_part + + # Split on the reserved characters prior to cleaning so that + # revision strings in VCS URLs are properly preserved. + parts = _reserved_chars_re.split(path) + + cleaned_parts = [] + for to_clean, reserved in pairwise(itertools.chain(parts, [""])): + cleaned_parts.append(clean_func(to_clean)) + # Normalize %xx escapes (e.g. %2f -> %2F) + cleaned_parts.append(reserved.upper()) + + return "".join(cleaned_parts) + + +def _ensure_quoted_url(url: str) -> str: + """ + Make sure a link is fully quoted. + For example, if ' ' occurs in the URL, it will be replaced with "%20", + and without double-quoting other characters. + """ + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. + result = urllib.parse.urlparse(url) + # If the netloc is empty, then the URL refers to a local filesystem path. + is_local_path = not result.netloc + path = _clean_url_path(result.path, is_local_path=is_local_path) + return urllib.parse.urlunparse(result._replace(path=path)) + + class Link(KeyBasedCompareMixin): """Represents a parsed link from a Package Index's simple URL""" @@ -46,6 +163,8 @@ class Link(KeyBasedCompareMixin): "comes_from", "requires_python", "yanked_reason", + "dist_info_metadata", + "link_hash", "cache_link_parsing", ] @@ -55,6 +174,8 @@ def __init__( comes_from: Optional[Union[str, "IndexContent"]] = None, requires_python: Optional[str] = None, yanked_reason: Optional[str] = None, + dist_info_metadata: Optional[str] = None, + link_hash: Optional[LinkHash] = None, cache_link_parsing: bool = True, hashes: Optional[Mapping[str, str]] = None, ) -> None: @@ -72,6 +193,14 @@ def __init__( a simple repository HTML link. If the file has been yanked but no reason was provided, this should be the empty string. See PEP 592 for more information and the specification. + :param dist_info_metadata: the metadata attached to the file, or None if no such + metadata is provided. This is the value of the "data-dist-info-metadata" + attribute, if present, in a simple repository HTML link. This may be parsed + into its own `Link` by `self.metadata_link()`. See PEP 658 for more + information and the specification. + :param link_hash: a checksum for the content the link points to. If not + provided, this will be extracted from the link URL, if the URL has + any checksum. :param cache_link_parsing: A flag that is used elsewhere to determine whether resources retrieved from this link should be cached. PyPI index urls should @@ -94,11 +223,75 @@ def __init__( self.comes_from = comes_from self.requires_python = requires_python if requires_python else None self.yanked_reason = yanked_reason + self.dist_info_metadata = dist_info_metadata + self.link_hash = link_hash or LinkHash.split_hash_name_and_value(self._url) super().__init__(key=url, defining_class=Link) self.cache_link_parsing = cache_link_parsing + @classmethod + def from_json( + cls, + file_data: Dict[str, Any], + page_url: str, + ) -> Optional["Link"]: + """ + Convert an pypi json document from a simple repository page into a Link. + """ + file_url = file_data.get("url") + if file_url is None: + return None + + url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url)) + pyrequire = file_data.get("requires-python") + yanked_reason = file_data.get("yanked") + dist_info_metadata = file_data.get("dist-info-metadata") + hashes = file_data.get("hashes", {}) + + # The Link.yanked_reason expects an empty string instead of a boolean. + if yanked_reason and not isinstance(yanked_reason, str): + yanked_reason = "" + # The Link.yanked_reason expects None instead of False. + elif not yanked_reason: + yanked_reason = None + + return cls( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + hashes=hashes, + dist_info_metadata=dist_info_metadata, + ) + + @classmethod + def from_element( + cls, + anchor_attribs: Dict[str, Optional[str]], + page_url: str, + base_url: str, + ) -> Optional["Link"]: + """ + Convert an anchor element's attributes in a simple repository page to a Link. + """ + href = anchor_attribs.get("href") + if not href: + return None + + url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href)) + pyrequire = anchor_attribs.get("data-requires-python") + yanked_reason = anchor_attribs.get("data-yanked") + dist_info_metadata = anchor_attribs.get("data-dist-info-metadata") + + return cls( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + dist_info_metadata=dist_info_metadata, + ) + def __str__(self) -> str: if self.requires_python: rp = f" (requires-python:{self.requires_python})" @@ -181,32 +374,36 @@ def subdirectory_fragment(self) -> Optional[str]: return None return match.group(1) - _hash_re = re.compile( - r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES)) - ) + def metadata_link(self) -> Optional["Link"]: + """Implementation of PEP 658 parsing.""" + # Note that Link.from_element() parsing the "data-dist-info-metadata" attribute + # from an HTML anchor tag is typically how the Link.dist_info_metadata attribute + # gets set. + if self.dist_info_metadata is None: + return None + metadata_url = f"{self.url_without_fragment}.metadata" + link_hash: Optional[LinkHash] = None + # If data-dist-info-metadata="true" is set, then the metadata file exists, + # but there is no information about its checksum or anything else. + if self.dist_info_metadata != "true": + link_hash = LinkHash.split_hash_name_and_value(self.dist_info_metadata) + return Link(metadata_url, link_hash=link_hash) + + def as_hashes(self) -> Optional[Hashes]: + if self.link_hash is not None: + return self.link_hash.as_hashes() + return None @property def hash(self) -> Optional[str]: - for hashname in _SUPPORTED_HASHES: - if hashname in self._hashes: - return self._hashes[hashname] - - match = self._hash_re.search(self._url) - if match: - return match.group(2) - + if self.link_hash is not None: + return self.link_hash.value return None @property def hash_name(self) -> Optional[str]: - for hashname in _SUPPORTED_HASHES: - if hashname in self._hashes: - return hashname - - match = self._hash_re.search(self._url) - if match: - return match.group(1) - + if self.link_hash is not None: + return self.link_hash.name return None @property @@ -236,19 +433,15 @@ def is_yanked(self) -> bool: @property def has_hash(self) -> bool: - return self.hash_name is not None + return self.link_hash is not None def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: """ - Return True if the link has a hash and it is allowed. + Return True if the link has a hash and it is allowed by `hashes`. """ - if hashes is None or not self.has_hash: + if self.link_hash is None: return False - # Assert non-None so mypy knows self.hash_name and self.hash are str. - assert self.hash_name is not None - assert self.hash is not None - - return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) + return self.link_hash.is_hash_allowed(hashes) class _CleanResult(NamedTuple): diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 80723fffe47..4bf414cb005 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -19,12 +19,13 @@ HashMismatch, HashUnpinned, InstallationError, + MetadataInconsistent, NetworkConnectionError, PreviousBuildDirError, VcsHashUnsupported, ) from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import BaseDistribution +from pip._internal.metadata import BaseDistribution, get_metadata_distribution from pip._internal.models.direct_url import ArchiveInfo from pip._internal.models.link import Link from pip._internal.models.wheel import Wheel @@ -346,19 +347,72 @@ def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: # showing the user what the hash should be. return req.hashes(trust_internet=False) or MissingHashes() + def _fetch_metadata_only( + self, + req: InstallRequirement, + ) -> Optional[BaseDistribution]: + if self.require_hashes: + logger.debug( + "Metadata-only fetching is not used as hash checking is required", + ) + return None + # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable. + return self._fetch_metadata_using_link_data_attr( + req + ) or self._fetch_metadata_using_lazy_wheel(req.link) + + def _fetch_metadata_using_link_data_attr( + self, + req: InstallRequirement, + ) -> Optional[BaseDistribution]: + """Fetch metadata from the data-dist-info-metadata attribute, if possible.""" + # (1) Get the link to the metadata file, if provided by the backend. + metadata_link = req.link.metadata_link() + if metadata_link is None: + return None + assert req.req is not None + logger.info( + "Obtaining dependency information for %s from %s", + req.req, + metadata_link, + ) + # (2) Download the contents of the METADATA file, separate from the dist itself. + metadata_file = get_http_url( + metadata_link, + self._download, + hashes=metadata_link.as_hashes(), + ) + with open(metadata_file.path, "rb") as f: + metadata_contents = f.read() + # (3) Generate a dist just from those file contents. + metadata_dist = get_metadata_distribution( + metadata_contents, + req.link.filename, + req.req.name, + ) + # (4) Ensure the Name: field from the METADATA file matches the name from the + # install requirement. + # + # NB: raw_name will fall back to the name from the install requirement if + # the Name: field is not present, but it's noted in the raw_name docstring + # that that should NEVER happen anyway. + if metadata_dist.raw_name != req.req.name: + raise MetadataInconsistent( + req, "Name", req.req.name, metadata_dist.raw_name + ) + return metadata_dist + def _fetch_metadata_using_lazy_wheel( self, link: Link, ) -> Optional[BaseDistribution]: """Fetch metadata using lazy wheel, if possible.""" + # --use-feature=fast-deps must be provided. if not self.use_lazy_wheel: return None - if self.require_hashes: - logger.debug("Lazy wheel is not used as hash checking is required") - return None if link.is_file or not link.is_wheel: logger.debug( - "Lazy wheel is not used as %r does not points to a remote wheel", + "Lazy wheel is not used as %r does not point to a remote wheel", link, ) return None @@ -414,13 +468,12 @@ def prepare_linked_requirement( ) -> BaseDistribution: """Prepare a requirement to be obtained from req.link.""" assert req.link - link = req.link self._log_preparing_link(req) with indent_log(): # Check if the relevant file is already available # in the download directory file_path = None - if self.download_dir is not None and link.is_wheel: + if self.download_dir is not None and req.link.is_wheel: hashes = self._get_linked_req_hashes(req) file_path = _check_download_dir(req.link, self.download_dir, hashes) @@ -429,10 +482,10 @@ def prepare_linked_requirement( self._downloaded[req.link.url] = file_path else: # The file is not available, attempt to fetch only metadata - wheel_dist = self._fetch_metadata_using_lazy_wheel(link) - if wheel_dist is not None: + metadata_dist = self._fetch_metadata_only(req) + if metadata_dist is not None: req.needs_more_preparation = True - return wheel_dist + return metadata_dist # None of the optimizations worked, fully prepare the requirement return self._prepare_linked_requirement(req, parallel_builds) diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 89318b74553..ede2213aa70 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -1,17 +1,24 @@ import os +import re import shutil import textwrap +import uuid +from dataclasses import dataclass +from enum import Enum from hashlib import sha256 from pathlib import Path -from typing import List +from textwrap import dedent +from typing import Callable, Dict, List, Tuple import pytest from pip._internal.cli.status_codes import ERROR +from pip._internal.utils.urls import path_to_url from tests.conftest import MockServer, ScriptFactory from tests.lib import ( PipTestEnvironment, TestData, + TestPipResult, create_basic_sdist_for_package, create_really_basic_wheel, ) @@ -1230,3 +1237,349 @@ def test_download_use_pep517_propagation( downloads = os.listdir(download_dir) assert len(downloads) == 2 + + +class MetadataKind(Enum): + """All the types of values we might be provided for the data-dist-info-metadata + attribute from PEP 658.""" + + # Valid: will read metadata from the dist instead. + No = "none" + # Valid: will read the .metadata file, but won't check its hash. + Unhashed = "unhashed" + # Valid: will read the .metadata file and check its hash matches. + Sha256 = "sha256" + # Invalid: will error out after checking the hash. + WrongHash = "wrong-hash" + # Invalid: will error out after failing to fetch the .metadata file. + NoFile = "no-file" + + +@dataclass(frozen=True) +class Package: + """Mock package structure used to generate a PyPI repository. + + Package name and version should correspond to sdists (.tar.gz files) in our test + data.""" + + name: str + version: str + filename: str + metadata: MetadataKind + # This will override any dependencies specified in the actual dist's METADATA. + requires_dist: Tuple[str, ...] = () + + def metadata_filename(self) -> str: + """This is specified by PEP 658.""" + return f"{self.filename}.metadata" + + def generate_additional_tag(self) -> str: + """This gets injected into the tag in the generated PyPI index page for this + package.""" + if self.metadata == MetadataKind.No: + return "" + if self.metadata in [MetadataKind.Unhashed, MetadataKind.NoFile]: + return 'data-dist-info-metadata="true"' + if self.metadata == MetadataKind.WrongHash: + return 'data-dist-info-metadata="sha256=WRONG-HASH"' + assert self.metadata == MetadataKind.Sha256 + checksum = sha256(self.generate_metadata()).hexdigest() + return f'data-dist-info-metadata="sha256={checksum}"' + + def requires_str(self) -> str: + if not self.requires_dist: + return "" + joined = " and ".join(self.requires_dist) + return f"Requires-Dist: {joined}" + + def generate_metadata(self) -> bytes: + """This is written to `self.metadata_filename()` and will override the actual + dist's METADATA, unless `self.metadata == MetadataKind.NoFile`.""" + return dedent( + f"""\ + Metadata-Version: 2.1 + Name: {self.name} + Version: {self.version} + {self.requires_str()} + """ + ).encode("utf-8") + + +@pytest.fixture(scope="function") +def write_index_html_content(tmpdir: Path) -> Callable[[str], Path]: + """Generate a PyPI package index.html within a temporary local directory.""" + html_dir = tmpdir / "index_html_content" + html_dir.mkdir() + + def generate_index_html_subdir(index_html: str) -> Path: + """Create a new subdirectory after a UUID and write an index.html.""" + new_subdir = html_dir / uuid.uuid4().hex + new_subdir.mkdir() + + with open(new_subdir / "index.html", "w") as f: + f.write(index_html) + + return new_subdir + + return generate_index_html_subdir + + +@pytest.fixture(scope="function") +def html_index_for_packages( + shared_data: TestData, + write_index_html_content: Callable[[str], Path], +) -> Callable[..., Path]: + """Generate a PyPI HTML package index within a local directory pointing to + blank data.""" + + def generate_html_index_for_packages(packages: Dict[str, List[Package]]) -> Path: + """ + Produce a PyPI directory structure pointing to the specified packages. + """ + # (1) Generate the content for a PyPI index.html. + pkg_links = "\n".join( + f' {pkg}' for pkg in packages.keys() + ) + index_html = f"""\ + + + + + Simple index + + +{pkg_links} + +""" + # (2) Generate the index.html in a new subdirectory of the temp directory. + index_html_subdir = write_index_html_content(index_html) + + # (3) Generate subdirectories for individual packages, each with their own + # index.html. + for pkg, links in packages.items(): + pkg_subdir = index_html_subdir / pkg + pkg_subdir.mkdir() + + download_links: List[str] = [] + for package_link in links: + # (3.1) Generate the tag which pip can crawl pointing to this + # specific package version. + download_links.append( + f' {package_link.filename}
' # noqa: E501 + ) + # (3.2) Copy over the corresponding file in `shared_data.packages`. + shutil.copy( + shared_data.packages / package_link.filename, + pkg_subdir / package_link.filename, + ) + # (3.3) Write a metadata file, if applicable. + if package_link.metadata != MetadataKind.NoFile: + with open(pkg_subdir / package_link.metadata_filename(), "wb") as f: + f.write(package_link.generate_metadata()) + + # (3.4) After collating all the download links and copying over the files, + # write an index.html with the generated download links for each + # copied file for this specific package name. + download_links_str = "\n".join(download_links) + pkg_index_content = f"""\ + + + + + Links for {pkg} + + +

Links for {pkg}

+{download_links_str} + +""" + with open(pkg_subdir / "index.html", "w") as f: + f.write(pkg_index_content) + + return index_html_subdir + + return generate_html_index_for_packages + + +@pytest.fixture(scope="function") +def download_generated_html_index( + script: PipTestEnvironment, + html_index_for_packages: Callable[[Dict[str, List[Package]]], Path], + tmpdir: Path, +) -> Callable[..., Tuple[TestPipResult, Path]]: + """Execute `pip download` against a generated PyPI index.""" + download_dir = tmpdir / "download_dir" + + def run_for_generated_index( + packages: Dict[str, List[Package]], + args: List[str], + allow_error: bool = False, + ) -> Tuple[TestPipResult, Path]: + """ + Produce a PyPI directory structure pointing to the specified packages, then + execute `pip download -i ...` pointing to our generated index. + """ + index_dir = html_index_for_packages(packages) + pip_args = [ + "download", + "-d", + str(download_dir), + "-i", + path_to_url(str(index_dir)), + *args, + ] + result = script.pip(*pip_args, allow_error=allow_error) + return (result, download_dir) + + return run_for_generated_index + + +# The package database we generate for testing PEP 658 support. +_simple_packages: Dict[str, List[Package]] = { + "simple": [ + Package("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256), + Package("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No), + # This will raise a hashing error. + Package("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash), + ], + "simple2": [ + # Override the dependencies here in order to force pip to download + # simple-1.0.tar.gz as well. + Package( + "simple2", + "1.0", + "simple2-1.0.tar.gz", + MetadataKind.Unhashed, + ("simple==1.0",), + ), + # This will raise an error when pip attempts to fetch the metadata file. + Package("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile), + ], + "colander": [ + # Ensure we can read the dependencies from a metadata file within a wheel + # *without* PEP 658 metadata. + Package( + "colander", "0.9.9", "colander-0.9.9-py2.py3-none-any.whl", MetadataKind.No + ), + ], + "compilewheel": [ + # Ensure we can override the dependencies of a wheel file by injecting PEP + # 658 metadata. + Package( + "compilewheel", + "1.0", + "compilewheel-1.0-py2.py3-none-any.whl", + MetadataKind.Unhashed, + ("simple==1.0",), + ), + ], + "has-script": [ + # Ensure we check PEP 658 metadata hashing errors for wheel files. + Package( + "has-script", + "1.0", + "has.script-1.0-py2.py3-none-any.whl", + MetadataKind.WrongHash, + ), + ], + "translationstring": [ + Package( + "translationstring", "1.1", "translationstring-1.1.tar.gz", MetadataKind.No + ), + ], + "priority": [ + # Ensure we check for a missing metadata file for wheels. + Package( + "priority", "1.0", "priority-1.0-py2.py3-none-any.whl", MetadataKind.NoFile + ), + ], +} + + +@pytest.mark.parametrize( + "requirement_to_download, expected_outputs", + [ + ("simple2==1.0", ["simple-1.0.tar.gz", "simple2-1.0.tar.gz"]), + ("simple==2.0", ["simple-2.0.tar.gz"]), + ( + "colander", + ["colander-0.9.9-py2.py3-none-any.whl", "translationstring-1.1.tar.gz"], + ), + ( + "compilewheel", + ["compilewheel-1.0-py2.py3-none-any.whl", "simple-1.0.tar.gz"], + ), + ], +) +def test_download_metadata( + download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + requirement_to_download: str, + expected_outputs: List[str], +) -> None: + """Verify that if a data-dist-info-metadata attribute is present, then it is used + instead of the actual dist's METADATA.""" + _, download_dir = download_generated_html_index( + _simple_packages, + [requirement_to_download], + ) + assert sorted(os.listdir(download_dir)) == expected_outputs + + +@pytest.mark.parametrize( + "requirement_to_download, real_hash", + [ + ( + "simple==3.0", + "95e0f200b6302989bcf2cead9465cf229168295ea330ca30d1ffeab5c0fed996", + ), + ( + "has-script", + "16ba92d7f6f992f6de5ecb7d58c914675cf21f57f8e674fb29dcb4f4c9507e5b", + ), + ], +) +def test_incorrect_metadata_hash( + download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + requirement_to_download: str, + real_hash: str, +) -> None: + """Verify that if a hash for data-dist-info-metadata is provided, it must match the + actual hash of the metadata file.""" + result, _ = download_generated_html_index( + _simple_packages, + [requirement_to_download], + allow_error=True, + ) + assert result.returncode != 0 + expected_msg = f"""\ + Expected sha256 WRONG-HASH + Got {real_hash}""" + assert expected_msg in result.stderr + + +@pytest.mark.parametrize( + "requirement_to_download, expected_url", + [ + ("simple2==2.0", "simple2-2.0.tar.gz.metadata"), + ("priority", "priority-1.0-py2.py3-none-any.whl.metadata"), + ], +) +def test_metadata_not_found( + download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + requirement_to_download: str, + expected_url: str, +) -> None: + """Verify that if a data-dist-info-metadata attribute is provided, that pip will + fetch the .metadata file at the location specified by PEP 658, and error + if unavailable.""" + result, _ = download_generated_html_index( + _simple_packages, + [requirement_to_download], + allow_error=True, + ) + assert result.returncode != 0 + expected_re = re.escape(expected_url) + pattern = re.compile( + f"ERROR: 404 Client Error: FileNotFoundError for url:.*{expected_re}" + ) + assert pattern.search(result.stderr), (pattern, result.stderr) diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index efcae29289f..fc52ab9c8d8 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -1363,7 +1363,7 @@ def test_new_resolver_skip_inconsistent_metadata(script: PipTestEnvironment) -> ) assert ( - " inconsistent version: filename has '3', but metadata has '2'" + " inconsistent version: expected '3', but metadata has '2'" ) in result.stdout, str(result) script.assert_installed(a="1") diff --git a/tests/lib/server.py b/tests/lib/server.py index 4b5add345d3..4cc18452cb5 100644 --- a/tests/lib/server.py +++ b/tests/lib/server.py @@ -150,14 +150,6 @@ def html5_page(text: str) -> str: ) -def index_page(spec: Dict[str, str]) -> "WSGIApplication": - def link(name: str, value: str) -> str: - return '{}'.format(value, name) - - links = "".join(link(*kv) for kv in spec.items()) - return text_html_response(html5_page(links)) - - def package_page(spec: Dict[str, str]) -> "WSGIApplication": def link(name: str, value: str) -> str: return '{}'.format(value, name) diff --git a/tests/unit/metadata/test_metadata_pkg_resources.py b/tests/unit/metadata/test_metadata_pkg_resources.py index 6bb67156c9f..ab1a56107f4 100644 --- a/tests/unit/metadata/test_metadata_pkg_resources.py +++ b/tests/unit/metadata/test_metadata_pkg_resources.py @@ -11,7 +11,7 @@ from pip._internal.metadata.pkg_resources import ( Distribution, Environment, - WheelMetadata, + InMemoryMetadata, ) pkg_resources = pytest.importorskip("pip._vendor.pkg_resources") @@ -99,7 +99,7 @@ def test_wheel_metadata_works() -> None: dist = Distribution( pkg_resources.DistInfoDistribution( location="", - metadata=WheelMetadata({"METADATA": metadata.as_bytes()}, ""), + metadata=InMemoryMetadata({"METADATA": metadata.as_bytes()}, ""), project_name=name, ), ) @@ -116,7 +116,7 @@ def test_wheel_metadata_works() -> None: def test_wheel_metadata_throws_on_bad_unicode() -> None: - metadata = WheelMetadata({"METADATA": b"\xff"}, "") + metadata = InMemoryMetadata({"METADATA": b"\xff"}, "") with pytest.raises(UnsupportedWheel) as e: metadata.get_metadata("METADATA") diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 3afc5210dc7..55676a4fc5c 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -11,13 +11,12 @@ import pytest from pip._vendor import requests +from pip._vendor.packaging.requirements import Requirement from pip._internal.exceptions import NetworkConnectionError from pip._internal.index.collector import ( IndexContent, LinkCollector, - _clean_link, - _clean_url_path, _get_index_content, _get_simple_response, _make_index_content, @@ -28,7 +27,12 @@ from pip._internal.index.sources import _FlatDirectorySource, _IndexDirectorySource from pip._internal.models.candidate import InstallationCandidate from pip._internal.models.index import PyPI -from pip._internal.models.link import Link +from pip._internal.models.link import ( + Link, + LinkHash, + _clean_url_path, + _ensure_quoted_url, +) from pip._internal.network.session import PipSession from tests.lib import TestData, make_test_link_collector @@ -402,13 +406,13 @@ def test_clean_url_path_with_local_path(path: str, expected: str) -> None: ), ], ) -def test_clean_link(url: str, clean_url: str) -> None: - assert _clean_link(url) == clean_url +def test_ensure_quoted_url(url: str, clean_url: str) -> None: + assert _ensure_quoted_url(url) == clean_url def _test_parse_links_data_attribute( anchor_html: str, attr: str, expected: Optional[str] -) -> None: +) -> Link: html = ( "" '' @@ -427,6 +431,7 @@ def _test_parse_links_data_attribute( (link,) = links actual = getattr(link, attr) assert actual == expected + return link @pytest.mark.parametrize( @@ -454,6 +459,12 @@ def test_parse_links__requires_python( _test_parse_links_data_attribute(anchor_html, "requires_python", expected) +# TODO: this test generates its own examples to validate the json client implementation +# instead of sharing those examples with the html client testing. We expect this won't +# hide any bugs because operations like resolving PEP 658 metadata should use the same +# code for both types of indices, but it might be nice to explicitly have all our tests +# in test_download.py execute over both html and json indices with +# a pytest.mark.parameterize decorator to ensure nothing slips through the cracks. def test_parse_links_json() -> None: json_bytes = json.dumps( { @@ -474,6 +485,14 @@ def test_parse_links_json() -> None: "requires-python": ">=3.7", "dist-info-metadata": False, }, + # Same as above, but parsing dist-info-metadata. + { + "filename": "holygrail-1.0-py3-none-any.whl", + "url": "/files/holygrail-1.0-py3-none-any.whl", + "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + "requires-python": ">=3.7", + "dist-info-metadata": "sha512=aabdd41", + }, ], } ).encode("utf8") @@ -502,8 +521,25 @@ def test_parse_links_json() -> None: yanked_reason=None, hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, ), + Link( + "https://example.com/files/holygrail-1.0-py3-none-any.whl", + comes_from=page.url, + requires_python=">=3.7", + yanked_reason=None, + hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + dist_info_metadata="sha512=aabdd41", + ), ] + # Ensure the metadata info can be parsed into the correct link. + metadata_link = links[2].metadata_link() + assert metadata_link is not None + assert ( + metadata_link.url + == "https://example.com/files/holygrail-1.0-py3-none-any.whl.metadata" + ) + assert metadata_link.link_hash == LinkHash("sha512", "aabdd41") + @pytest.mark.parametrize( "anchor_html, expected", @@ -534,6 +570,48 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) - _test_parse_links_data_attribute(anchor_html, "yanked_reason", expected) +# Requirement objects do not == each other unless they point to the same instance! +_pkg1_requirement = Requirement("pkg1==1.0") + + +@pytest.mark.parametrize( + "anchor_html, expected, link_hash", + [ + # Test not present. + ( + '', + None, + None, + ), + # Test with value "true". + ( + '', + "true", + None, + ), + # Test with a provided hash value. + ( + '', # noqa: E501 + "sha256=aa113592bbe", + None, + ), + # Test with a provided hash value for both the requirement as well as metadata. + ( + '', # noqa: E501 + "sha256=aa113592bbe", + LinkHash("sha512", "abc132409cb"), + ), + ], +) +def test_parse_links__dist_info_metadata( + anchor_html: str, + expected: Optional[str], + link_hash: Optional[LinkHash], +) -> None: + link = _test_parse_links_data_attribute(anchor_html, "dist_info_metadata", expected) + assert link.link_hash == link_hash + + def test_parse_links_caches_same_page_by_url() -> None: html = ( "" @@ -963,3 +1041,23 @@ def expand_path(path: str) -> str: expected_temp2_dir = os.path.normcase(temp2_dir) assert search_scope.find_links == ["~/temp1", expected_temp2_dir] assert search_scope.index_urls == ["default_url"] + + +@pytest.mark.parametrize( + "url, result", + [ + ( + "https://pypi.org/pip-18.0.tar.gz#sha256=aa113592bbe", + LinkHash("sha256", "aa113592bbe"), + ), + ( + "https://pypi.org/pip-18.0.tar.gz#md5=aa113592bbe", + LinkHash("md5", "aa113592bbe"), + ), + ("https://pypi.org/pip-18.0.tar.gz", None), + # We don't recognize the "sha500" algorithm, so we discard it. + ("https://pypi.org/pip-18.0.tar.gz#sha500=aa113592bbe", None), + ], +) +def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None: + assert LinkHash.split_hash_name_and_value(url) == result From 08eb492641fecfaf0cdfbed5581bd405166a3816 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 7 Aug 2022 17:39:33 +0200 Subject: [PATCH 087/730] Deprecate --no-binary disabling the wheel cache --- news/11454.removal.rst | 1 + src/pip/_internal/cache.py | 6 +++++- src/pip/_internal/cli/cmdoptions.py | 7 ++++++- src/pip/_internal/commands/install.py | 26 +++++++++++++++++++++++--- src/pip/_internal/commands/wheel.py | 20 ++++++++++++++++++++ 5 files changed, 55 insertions(+), 5 deletions(-) create mode 100644 news/11454.removal.rst diff --git a/news/11454.removal.rst b/news/11454.removal.rst new file mode 100644 index 00000000000..14c4dc73ac7 --- /dev/null +++ b/news/11454.removal.rst @@ -0,0 +1 @@ +Deprecate ```--no-binary`` disabling the wheel cache. diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py index e51edd5157e..c53b7f023a1 100644 --- a/src/pip/_internal/cache.py +++ b/src/pip/_internal/cache.py @@ -221,7 +221,11 @@ class WheelCache(Cache): when a certain link is not found in the simple wheel cache first. """ - def __init__(self, cache_dir: str, format_control: FormatControl) -> None: + def __init__( + self, cache_dir: str, format_control: Optional[FormatControl] = None + ) -> None: + if format_control is None: + format_control = FormatControl() super().__init__(cache_dir, format_control, {"binary"}) self._wheel_cache = SimpleWheelCache(cache_dir, format_control) self._ephem_cache = EphemWheelCache(format_control) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 84e0e783869..f0950332115 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -1007,7 +1007,12 @@ def check_list_path_option(options: Values) -> None: metavar="feature", action="append", default=[], - choices=["2020-resolver", "fast-deps", "truststore"], + choices=[ + "2020-resolver", + "fast-deps", + "truststore", + "no-binary-enable-wheel-cache", + ], help="Enable new functionality, that may be backward incompatible.", ) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index dcf5ce8c617..b37303caade 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -29,7 +29,10 @@ from pip._internal.req import install_given_reqs from pip._internal.req.req_install import InstallRequirement from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.deprecation import LegacyInstallReasonFailedBdistWheel +from pip._internal.utils.deprecation import ( + LegacyInstallReasonFailedBdistWheel, + deprecated, +) from pip._internal.utils.distutils_args import parse_distutils_args from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.logging import getLogger @@ -326,8 +329,6 @@ def run(self, options: Values, args: List[str]) -> int: target_python=target_python, ignore_requires_python=options.ignore_requires_python, ) - wheel_cache = WheelCache(options.cache_dir, options.format_control) - build_tracker = self.enter_context(get_build_tracker()) directory = TempDirectory( @@ -339,6 +340,25 @@ def run(self, options: Values, args: List[str]) -> int: try: reqs = self.get_requirements(args, options, finder, session) + if "no-binary-enable-wheel-cache" in options.features_enabled: + # TODO: remove format_control from WheelCache when the deprecation cycle + # is over + wheel_cache = WheelCache(options.cache_dir) + else: + if options.format_control.no_binary: + deprecated( + reason=( + "--no-binary currently disables reading from " + "the cache of locally built wheels. In the future " + "--no-binary will not influence the wheel cache." + ), + replacement="to use the --no-cache-dir option", + feature_flag="no-binary-enable-wheel-cache", + issue=11453, + gone_in="23.1", + ) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + # Only when installing is it permitted to use PEP 660. # In other circumstances (pip wheel, pip download) we generate # regular (i.e. non editable) metadata and wheels. diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index 9dd6c82f210..5ddb3bd6ceb 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -11,6 +11,7 @@ from pip._internal.exceptions import CommandError from pip._internal.operations.build.build_tracker import get_build_tracker from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import deprecated from pip._internal.utils.misc import ensure_dir, normalize_path from pip._internal.utils.temp_dir import TempDirectory from pip._internal.wheel_builder import build, should_build_for_wheel_command @@ -120,6 +121,25 @@ def run(self, options: Values, args: List[str]) -> int: reqs = self.get_requirements(args, options, finder, session) + if "no-binary-enable-wheel-cache" in options.features_enabled: + # TODO: remove format_control from WheelCache when the deprecation cycle + # is over + wheel_cache = WheelCache(options.cache_dir) + else: + if options.format_control.no_binary: + deprecated( + reason=( + "--no-binary currently disables reading from " + "the cache of locally built wheels. In the future " + "--no-binary will not influence the wheel cache." + ), + replacement="to use the --no-cache-dir option", + feature_flag="no-binary-enable-wheel-cache", + issue=11453, + gone_in="23.1", + ) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + preparer = self.make_requirement_preparer( temp_build_dir=directory, options=options, From f39d38668add998395414064449f7fd0a68bd650 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 7 Aug 2022 16:21:20 +0200 Subject: [PATCH 088/730] Deprecate --no-binary implying setup.py install --- news/11452.removal.rst | 2 ++ src/pip/_internal/utils/deprecation.py | 17 +++++++++++++++-- src/pip/_internal/wheel_builder.py | 10 +++++----- 3 files changed, 22 insertions(+), 7 deletions(-) create mode 100644 news/11452.removal.rst diff --git a/news/11452.removal.rst b/news/11452.removal.rst new file mode 100644 index 00000000000..ac29324abc8 --- /dev/null +++ b/news/11452.removal.rst @@ -0,0 +1,2 @@ +Deprecate installation with 'setup.py install' when no-binary is enabled for +source distributions without 'pyproject.toml'. diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 7c7ace6ff4c..a7acf07bb3a 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -124,8 +124,8 @@ class LegacyInstallReason: def __init__( self, reason: str, - replacement: Optional[str], - gone_in: Optional[str], + replacement: Optional[str] = None, + gone_in: Optional[str] = None, feature_flag: Optional[str] = None, issue: Optional[int] = None, emit_after_success: bool = False, @@ -173,3 +173,16 @@ def emit_deprecation(self, name: str) -> None: issue=8559, emit_before_install=True, ) + +LegacyInstallReasonNoBinaryForcesSetuptoolsInstall = LegacyInstallReason( + reason=( + "{name} is being installed using the legacy " + "'setup.py install' method, because the '--no-binary' option was enabled " + "for it and this currently disables local wheel building for projects that " + "don't have a 'pyproject.toml' file." + ), + replacement="to enable the '--use-pep517' option", + gone_in="23.1", + issue=11451, + emit_before_install=True, +) diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index 60db28e92c3..6f359421d80 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -19,7 +19,10 @@ from pip._internal.operations.build.wheel_editable import build_wheel_editable from pip._internal.operations.build.wheel_legacy import build_wheel_legacy from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.deprecation import LegacyInstallReasonMissingWheelPackage +from pip._internal.utils.deprecation import ( + LegacyInstallReasonMissingWheelPackage, + LegacyInstallReasonNoBinaryForcesSetuptoolsInstall, +) from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed from pip._internal.utils.setuptools_build import make_setuptools_clean_args @@ -80,10 +83,7 @@ def _should_build( assert check_bdist_wheel is not None if not check_bdist_wheel(req): - logger.info( - "Skipping wheel build for %s, due to binaries being disabled for it.", - req.name, - ) + req.legacy_install_reason = LegacyInstallReasonNoBinaryForcesSetuptoolsInstall return False if not is_wheel_installed(): From 24c8ebc85e0ab8097a879ad274e2b21e42349788 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 17 Sep 2022 15:43:40 +0200 Subject: [PATCH 089/730] Set deprecation deadlines --- src/pip/_internal/utils/deprecation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 7c7ace6ff4c..51de0a5bde7 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -155,7 +155,7 @@ def emit_deprecation(self, name: str) -> None: "method, because a wheel could not be built for it." ), replacement="to fix the wheel build issue reported above", - gone_in=None, + gone_in="23.1", issue=8368, emit_after_success=True, ) @@ -169,7 +169,7 @@ def emit_deprecation(self, name: str) -> None: "is not installed." ), replacement="to enable the '--use-pep517' option", - gone_in=None, + gone_in="23.1", issue=8559, emit_before_install=True, ) From 857df9059d24ec43f088da88b957665bd3b05d32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 18 Sep 2022 10:05:11 +0200 Subject: [PATCH 090/730] Add maintenance comment --- src/pip/_internal/wheel_builder.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index 6f359421d80..15b30af58e4 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -83,6 +83,11 @@ def _should_build( assert check_bdist_wheel is not None if not check_bdist_wheel(req): + # /!\ When we change this to unconditionally return True, we must also remove + # support for `--install-option`. Indeed, `--install-option` implies + # `--no-binary` so we can return False here and run `setup.py install`. + # `--global-option` and `--build-option` can remain until we drop support for + # building with `setup.py bdist_wheel`. req.legacy_install_reason = LegacyInstallReasonNoBinaryForcesSetuptoolsInstall return False From 410e953cc1e750f3c9bace46e6929653fefbe775 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 18 Sep 2022 10:59:57 +0100 Subject: [PATCH 091/730] Add documentation for the zipapp distribution --- docs/html/installation.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/docs/html/installation.md b/docs/html/installation.md index d9b9a2d9fef..110e46adc9e 100644 --- a/docs/html/installation.md +++ b/docs/html/installation.md @@ -45,6 +45,34 @@ More details about this script can be found in [pypa/get-pip]'s README. [pypa/get-pip]: https://github.com/pypa/get-pip +## Standalone zip application + +In addition to installing pip in your environment, pip is available as a +standalone [zip application](https://docs.python.org/3.11/library/zipapp.html). +This can be downloaded from . There are +also zip applications for specific pip versions, named `pip-X.Y.Z.pyz`. + +The zip application can be run using any supported version of Python: + +```{pip-cli} +$ python pip.pyz --help +``` + +If run directly: + +```{pip-cli} +$ pip.pyz --help +``` + +then the currently active Python interpreter will be used. + +```{note} +The zip application is currently experimental. We test that pip runs correctly +in this form, but it is possible that there could be issues in some situations. +We will accept bug reports in such cases, but for now the zip application should +not be used in production environments. +``` + ## Alternative Methods Depending on how you installed Python, there might be other mechanisms From 860f9bf57c19df590b699d29ff5cba5e472a8be2 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 18 Sep 2022 11:17:32 +0100 Subject: [PATCH 092/730] Add a news file --- news/11459.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11459.feature.rst diff --git a/news/11459.feature.rst b/news/11459.feature.rst new file mode 100644 index 00000000000..a4a11c09353 --- /dev/null +++ b/news/11459.feature.rst @@ -0,0 +1 @@ +Document the new (experimental) zipapp distribution of pip. From 318786c8f3831462417966d72a1871bc43ab1f73 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 18 Sep 2022 11:18:42 +0100 Subject: [PATCH 093/730] Fix header level --- docs/html/installation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/installation.md b/docs/html/installation.md index 110e46adc9e..367abc6bc01 100644 --- a/docs/html/installation.md +++ b/docs/html/installation.md @@ -45,7 +45,7 @@ More details about this script can be found in [pypa/get-pip]'s README. [pypa/get-pip]: https://github.com/pypa/get-pip -## Standalone zip application +### Standalone zip application In addition to installing pip in your environment, pip is available as a standalone [zip application](https://docs.python.org/3.11/library/zipapp.html). From 7e8263dde94136c3149acdb8d866093ca1ad245d Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 18 Sep 2022 12:19:23 +0100 Subject: [PATCH 094/730] Put the note at the top of the section --- docs/html/installation.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/html/installation.md b/docs/html/installation.md index 367abc6bc01..7db0d47ab23 100644 --- a/docs/html/installation.md +++ b/docs/html/installation.md @@ -47,6 +47,13 @@ More details about this script can be found in [pypa/get-pip]'s README. ### Standalone zip application +```{note} +The zip application is currently experimental. We test that pip runs correctly +in this form, but it is possible that there could be issues in some situations. +We will accept bug reports in such cases, but for now the zip application should +not be used in production environments. +``` + In addition to installing pip in your environment, pip is available as a standalone [zip application](https://docs.python.org/3.11/library/zipapp.html). This can be downloaded from . There are @@ -66,13 +73,6 @@ $ pip.pyz --help then the currently active Python interpreter will be used. -```{note} -The zip application is currently experimental. We test that pip runs correctly -in this form, but it is possible that there could be issues in some situations. -We will accept bug reports in such cases, but for now the zip application should -not be used in production environments. -``` - ## Alternative Methods Depending on how you installed Python, there might be other mechanisms From 7979dc00298bddac3a6741b750fe7befcda7d4cb Mon Sep 17 00:00:00 2001 From: Jaime Sanz Date: Thu, 22 Sep 2022 21:07:42 -0300 Subject: [PATCH 095/730] Fix typo in dependency-resolution.md (#11465) Removes a duplicated word. --- docs/html/topics/dependency-resolution.md | 2 +- news/5ba58886-1229-45ef-8a8a-dee90602ccdd.trivial.rst | 0 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 news/5ba58886-1229-45ef-8a8a-dee90602ccdd.trivial.rst diff --git a/docs/html/topics/dependency-resolution.md b/docs/html/topics/dependency-resolution.md index 6d02866a7ef..03f276baa2f 100644 --- a/docs/html/topics/dependency-resolution.md +++ b/docs/html/topics/dependency-resolution.md @@ -155,7 +155,7 @@ how to inspect: - their release notes and changelogs from past versions During deployment, you can create a lockfile stating the exact package and -version number for for each dependency of that package. You can create this +version number for each dependency of that package. You can create this with [pip-tools](https://github.com/jazzband/pip-tools/). This means the "work" is done once during development process, and thus diff --git a/news/5ba58886-1229-45ef-8a8a-dee90602ccdd.trivial.rst b/news/5ba58886-1229-45ef-8a8a-dee90602ccdd.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From 24773bc4f5ab49835294abfa2cb479e9733a4f66 Mon Sep 17 00:00:00 2001 From: Min RK Date: Fri, 23 Sep 2022 14:32:26 +0200 Subject: [PATCH 096/730] Fix build isolation on system Pythons use site.getsitepackages() where available instead of just purelib/platlib, which is often insufficient on e.g. System Pythons for Debian/macOS handle virtualenv < 20 overwriting site.py without getsitepackages() by preserving current behavior. --- news/6264.bugfix.rst | 1 + src/pip/_internal/build_env.py | 26 +++++++++++++++++++++++--- tests/functional/test_build_env.py | 15 ++++++++++++++- 3 files changed, 38 insertions(+), 4 deletions(-) create mode 100644 news/6264.bugfix.rst diff --git a/news/6264.bugfix.rst b/news/6264.bugfix.rst new file mode 100644 index 00000000000..66554a473f5 --- /dev/null +++ b/news/6264.bugfix.rst @@ -0,0 +1 @@ +Fix build environment isolation on some system Pythons. diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index 6213eedd14a..cc2b38bab79 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -4,6 +4,7 @@ import logging import os import pathlib +import site import sys import textwrap from collections import OrderedDict @@ -55,6 +56,26 @@ def get_runnable_pip() -> str: return os.fsdecode(source / "__pip-runner__.py") +def _get_system_sitepackages() -> Set[str]: + """Get system site packages + + Usually from site.getsitepackages, + but fallback on `get_purelib()/get_platlib()` if unavailable + (e.g. in a virtualenv created by virtualenv<20) + + Returns normalized set of strings. + """ + if hasattr(site, "getsitepackages"): + system_sites = site.getsitepackages() + else: + # virtualenv < 20 overwrites site.py without getsitepackages + # fallback on get_purelib/get_platlib. + # this is known to miss things, but shouldn't in the cases + # where getsitepackages() has been removed (inside a virtualenv) + system_sites = [get_purelib(), get_platlib()] + return {os.path.normcase(path) for path in system_sites} + + class BuildEnvironment: """Creates and manages an isolated environment to install build deps""" @@ -75,9 +96,8 @@ def __init__(self) -> None: # Customize site to: # - ensure .pth files are honored # - prevent access to system site packages - system_sites = { - os.path.normcase(site) for site in (get_purelib(), get_platlib()) - } + system_sites = _get_system_sitepackages() + self._site_dir = os.path.join(temp_dir.path, "site") if not os.path.exists(self._site_dir): os.mkdir(self._site_dir) diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py index 437adb99570..869e8ad921d 100644 --- a/tests/functional/test_build_env.py +++ b/tests/functional/test_build_env.py @@ -4,7 +4,7 @@ import pytest -from pip._internal.build_env import BuildEnvironment +from pip._internal.build_env import BuildEnvironment, _get_system_sitepackages from tests.lib import ( PipTestEnvironment, TestPipResult, @@ -226,6 +226,10 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None: script.pip_install_local("-t", target, pkg_whl) script.environ["PYTHONPATH"] = target + system_sites = _get_system_sitepackages() + # there should always be something to exclude + assert system_sites + run_with_build_env( script, "", @@ -247,5 +251,14 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None: })), file=sys.stderr) print('sys.path:\n ' + '\n '.join(sys.path), file=sys.stderr) sys.exit(1) + """ + f""" + # second check: direct check of exclusion of system site packages + import os + + normalized_path = [os.path.normcase(path) for path in sys.path] + for system_path in {system_sites!r}: + assert system_path not in normalized_path, \ + f"{{system_path}} found in {{normalized_path}}" """, ) From a5621a8a819f59aad5f64a6617a14167d0b6a680 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 7 Aug 2022 14:51:21 +0200 Subject: [PATCH 097/730] Deprecate --install-option --- news/11358.removal.rst | 2 ++ src/pip/_internal/cli/cmdoptions.py | 25 ------------- src/pip/_internal/commands/download.py | 7 ++++ src/pip/_internal/commands/install.py | 10 ++++-- src/pip/_internal/commands/wheel.py | 11 ++++-- src/pip/_internal/req/req_file.py | 4 --- src/pip/_internal/req/req_install.py | 50 ++++++++++++++++++++++++++ 7 files changed, 75 insertions(+), 34 deletions(-) create mode 100644 news/11358.removal.rst diff --git a/news/11358.removal.rst b/news/11358.removal.rst new file mode 100644 index 00000000000..9767949b48b --- /dev/null +++ b/news/11358.removal.rst @@ -0,0 +1,2 @@ +Deprecate ``--install-options`` which forces pip to use the deprecated ``install`` +command of ``setuptools``. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index f0950332115..9ff7a3d07de 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -59,31 +59,6 @@ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> Opti return option_group -def check_install_build_global( - options: Values, check_options: Optional[Values] = None -) -> None: - """Disable wheels if per-setup.py call options are set. - - :param options: The OptionParser options to update. - :param check_options: The options to check, if not supplied defaults to - options. - """ - if check_options is None: - check_options = options - - def getname(n: str) -> Optional[Any]: - return getattr(check_options, n, None) - - names = ["build_options", "global_options", "install_options"] - if any(map(getname, names)): - control = options.format_control - control.disallow_binaries() - logger.warning( - "Disabling all use of wheels due to the use of --build-option " - "/ --global-option / --install-option.", - ) - - def check_dist_restriction(options: Values, check_target: bool = False) -> None: """Function for determining if custom platform options are allowed. diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index 26a5080c790..4132e089883 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -8,6 +8,10 @@ from pip._internal.cli.req_command import RequirementCommand, with_cleanup from pip._internal.cli.status_codes import SUCCESS from pip._internal.operations.build.build_tracker import get_build_tracker +from pip._internal.req.req_install import ( + LegacySetupPyOptionsCheckMode, + check_legacy_setup_py_options, +) from pip._internal.utils.misc import ensure_dir, normalize_path, write_output from pip._internal.utils.temp_dir import TempDirectory @@ -105,6 +109,9 @@ def run(self, options: Values, args: List[str]) -> int: ) reqs = self.get_requirements(args, options, finder, session) + check_legacy_setup_py_options( + options, reqs, LegacySetupPyOptionsCheckMode.DOWNLOAD + ) preparer = self.make_requirement_preparer( temp_build_dir=directory, diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index b37303caade..1a36132b0dc 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -27,7 +27,11 @@ from pip._internal.operations.build.build_tracker import get_build_tracker from pip._internal.operations.check import ConflictDetails, check_install_conflicts from pip._internal.req import install_given_reqs -from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_install import ( + InstallRequirement, + LegacySetupPyOptionsCheckMode, + check_legacy_setup_py_options, +) from pip._internal.utils.compat import WINDOWS from pip._internal.utils.deprecation import ( LegacyInstallReasonFailedBdistWheel, @@ -280,7 +284,6 @@ def run(self, options: Values, args: List[str]) -> int: if options.use_user_site and options.target_dir is not None: raise CommandError("Can not combine '--user' and '--target'") - cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy @@ -339,6 +342,9 @@ def run(self, options: Values, args: List[str]) -> int: try: reqs = self.get_requirements(args, options, finder, session) + check_legacy_setup_py_options( + options, reqs, LegacySetupPyOptionsCheckMode.INSTALL + ) if "no-binary-enable-wheel-cache" in options.features_enabled: # TODO: remove format_control from WheelCache when the deprecation cycle diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index 5ddb3bd6ceb..1afbd562c65 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -10,7 +10,11 @@ from pip._internal.cli.status_codes import SUCCESS from pip._internal.exceptions import CommandError from pip._internal.operations.build.build_tracker import get_build_tracker -from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.req_install import ( + InstallRequirement, + LegacySetupPyOptionsCheckMode, + check_legacy_setup_py_options, +) from pip._internal.utils.deprecation import deprecated from pip._internal.utils.misc import ensure_dir, normalize_path from pip._internal.utils.temp_dir import TempDirectory @@ -101,8 +105,6 @@ def add_options(self) -> None: @with_cleanup def run(self, options: Values, args: List[str]) -> int: - cmdoptions.check_install_build_global(options) - session = self.get_default_session(options) finder = self._build_package_finder(options, session) @@ -120,6 +122,9 @@ def run(self, options: Values, args: List[str]) -> int: ) reqs = self.get_requirements(args, options, finder, session) + check_legacy_setup_py_options( + options, reqs, LegacySetupPyOptionsCheckMode.WHEEL + ) if "no-binary-enable-wheel-cache" in options.features_enabled: # TODO: remove format_control from WheelCache when the deprecation cycle diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index 06ea6f277aa..e4b4a99627c 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -186,10 +186,6 @@ def handle_requirement_line( constraint=line.constraint, ) else: - if options: - # Disable wheels if the user has specified build options - cmdoptions.check_install_build_global(options, line.opts) - # get the options that apply to requirements req_options = {} for dest in SUPPORTED_OPTIONS_REQ_DEST: diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 88d481dfe5c..ae7cd4f5678 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -8,6 +8,8 @@ import sys import uuid import zipfile +from enum import Enum +from optparse import Values from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union from pip._vendor.packaging.markers import Marker @@ -876,3 +878,51 @@ def check_invalid_constraint_type(req: InstallRequirement) -> str: ) return problem + + +def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool: + if getattr(options, option, None): + return True + for req in reqs: + if getattr(req, option, None): + return True + return False + + +class LegacySetupPyOptionsCheckMode(Enum): + INSTALL = 1 + WHEEL = 2 + DOWNLOAD = 3 + + +def check_legacy_setup_py_options( + options: Values, + reqs: List[InstallRequirement], + mode: LegacySetupPyOptionsCheckMode, +) -> None: + has_install_options = _has_option(options, reqs, "install_options") + has_build_options = _has_option(options, reqs, "build_options") + has_global_options = _has_option(options, reqs, "global_options") + legacy_setup_py_options_present = ( + has_install_options or has_build_options or has_global_options + ) + if not legacy_setup_py_options_present: + return + + options.format_control.disallow_binaries() + logger.warning( + "Implying --no-binary=:all: due to the presence of " + "--build-option / --global-option / --install-option. " + "Consider using --config-settings for more flexibility.", + ) + if mode == LegacySetupPyOptionsCheckMode.INSTALL and has_install_options: + deprecated( + reason=( + "--install-option is deprecated because " + "it forces pip to use the 'setup.py install' " + "command which is itself deprecated." + ), + issue=11358, + replacement="to use --config-settings", + gone_in="23.1", + ) From 65641f10ad302a83b4cd037ef5bc6cd7c98b733d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 7 Aug 2022 18:54:11 +0200 Subject: [PATCH 098/730] Fix test_install_requirements_with_options --- tests/unit/test_req_file.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 6b3712b0c7d..02b1ba230a3 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -881,5 +881,3 @@ def test_install_requirements_with_options( < args.index("install") < args.index(install_option) ) - assert options.format_control.no_binary == {":all:"} - assert options.format_control.only_binary == set() From 51c78b4b83546d4b02cf7a13fb03c758d533893f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 16 Sep 2022 23:10:28 +0200 Subject: [PATCH 099/730] Add tests for --xxx-options warnings and deprecation --- tests/functional/test_install.py | 2 ++ tests/functional/test_install_reqs.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index f4f8d4efb0c..f4e0a53bec4 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -842,6 +842,8 @@ def test_install_global_option(script: PipTestEnvironment) -> None: ) assert "INITools==0.1\n" in result.stdout assert not result.files_created + assert "Implying --no-binary=:all:" in result.stderr + assert "Consider using --config-settings" in result.stderr def test_install_with_hacked_egg_info( diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index c5fc11dd7a5..19c526aab09 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -342,7 +342,7 @@ def test_install_option_in_requirements_file_overrides_cli( reqs_file = script.scratch_path.joinpath("reqs.txt") reqs_file.write_text("simple --install-option='-O0'") - script.pip( + result = script.pip( "install", "--no-index", "-f", @@ -355,6 +355,9 @@ def test_install_option_in_requirements_file_overrides_cli( simple_args = simple_sdist.args() assert "install" in simple_args assert simple_args.index("-O1") < simple_args.index("-O0") + assert "Implying --no-binary=:all:" in result.stderr + assert "Consider using --config-settings" in result.stderr + assert "--install-option is deprecated" in result.stderr def test_constraints_not_installed_by_default( From 77c83013dfe0caa10386369e340e39997f2d4c09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 25 Sep 2022 12:07:31 +0200 Subject: [PATCH 100/730] Pass global_options when building for install Before 'build' was never called in presence of install_option/global_option/build_option. Now that it can build in such cases, pass these options as well, for consistency with the wheel command. --- src/pip/_internal/commands/install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 1a36132b0dc..e081c27d2d2 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -452,7 +452,7 @@ def run(self, options: Values, args: List[str]) -> int: wheel_cache=wheel_cache, verify=True, build_options=[], - global_options=[], + global_options=global_options, ) # If we're using PEP 517, we cannot do a legacy setup.py install From fe58a42521074df368edecbdaaeb530695386246 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 25 Sep 2022 12:13:08 +0200 Subject: [PATCH 101/730] Warn when --install-option is ignored Due to building with pep 517. --- src/pip/_internal/req/req_install.py | 34 ++++++++++++++++++++-------- 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index ae7cd4f5678..5f29261c252 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -889,6 +889,15 @@ def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> return False +def _install_option_ignored( + install_options: List[str], reqs: List[InstallRequirement] +) -> bool: + for req in reqs: + if (install_options or req.install_options) and not req.use_pep517: + return False + return True + + class LegacySetupPyOptionsCheckMode(Enum): INSTALL = 1 WHEEL = 2 @@ -916,13 +925,18 @@ def check_legacy_setup_py_options( "Consider using --config-settings for more flexibility.", ) if mode == LegacySetupPyOptionsCheckMode.INSTALL and has_install_options: - deprecated( - reason=( - "--install-option is deprecated because " - "it forces pip to use the 'setup.py install' " - "command which is itself deprecated." - ), - issue=11358, - replacement="to use --config-settings", - gone_in="23.1", - ) + if _install_option_ignored(options.install_options, reqs): + logger.warning( + "Ignoring --install-option when building using PEP 517", + ) + else: + deprecated( + reason=( + "--install-option is deprecated because " + "it forces pip to use the 'setup.py install' " + "command which is itself deprecated." + ), + issue=11358, + replacement="to use --config-settings", + gone_in="23.1", + ) From a0fac9829c6059b66c0be6070b4d39a4ca90ae86 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Thu, 29 Sep 2022 01:44:25 +0200 Subject: [PATCH 102/730] Introduce a `check` CI job for branch protection (#11434) This patch adds a special check-job that produces a clear failure or success status based on how the dependent jobs are doing. It is possible to use it in GitHub's branch protection instead of having to manually add and remove individual job names via the repo settings. https://github.com/marketplace/actions/alls-green#why --- .github/workflows/ci.yml | 31 +++++++++++++++++++ ...3f-7001-4ff0-a100-c36496ff8758.trivial.rst | 0 2 files changed, 31 insertions(+) create mode 100644 news/c57bc03f-7001-4ff0-a100-c36496ff8758.trivial.rst diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c43b7dabbf3..0f0d0a9bb1e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -284,3 +284,34 @@ jobs: -m integration --verbose --numprocesses auto --showlocals --durations=5 + + check: # This job does nothing and is only used for the branch protection + if: always() + + needs: + - determine-changes + - docs + - packaging + - pre-commit + - tests-unix + - tests-windows + - tests-zipapp + - tests-importlib-metadata + - vendoring + + runs-on: ubuntu-latest + + steps: + - name: Decide whether the needed jobs succeeded or failed + uses: re-actors/alls-green@release/v1 + with: + allowed-skips: >- + ${{ + ( + needs.determine-changes.outputs.vendoring != 'true' + && github.event_name == 'pull_request' + ) + && 'vendoring' + || '' + }} + jobs: ${{ toJSON(needs) }} diff --git a/news/c57bc03f-7001-4ff0-a100-c36496ff8758.trivial.rst b/news/c57bc03f-7001-4ff0-a100-c36496ff8758.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From b5375b4df5449c224db1833646eb7f0031d3e34a Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Thu, 29 Sep 2022 10:24:48 +0200 Subject: [PATCH 103/730] Allow skips for the test jobs in branch protection (#11472) --- .github/workflows/ci.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0f0d0a9bb1e..1786dd47dc4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -314,4 +314,18 @@ jobs: && 'vendoring' || '' }} + , + ${{ + ( + needs.determine-changes.outputs.tests != 'true' + && github.event_name == 'pull_request' + ) + && ' + tests-unix, + tests-windows, + test-zipapp, + test-importlib-metadata, + ' + || '' + }} jobs: ${{ toJSON(needs) }} From 5d269d91d4a287b30de0e6dc1e8d7b646b4735f8 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Thu, 29 Sep 2022 14:54:42 +0200 Subject: [PATCH 104/730] Fix a typo in required CI jobs allowed to skip (#11474) test-zipapp -> tests-zipapp test-importlib-metadata -> tests-importlib-metadata --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1786dd47dc4..792a3e31153 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -323,8 +323,8 @@ jobs: && ' tests-unix, tests-windows, - test-zipapp, - test-importlib-metadata, + tests-zipapp, + tests-importlib-metadata, ' || '' }} From c24231d0d732ee0dc11632fca0ba44bfceaa924b Mon Sep 17 00:00:00 2001 From: Dominic Davis-Foster Date: Thu, 29 Sep 2022 13:58:45 +0100 Subject: [PATCH 105/730] Remove link to the distutils-sig mailing list. (#11470) The mailing list has been shutdown in favour of Discourse. --- README.rst | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.rst b/README.rst index 6810315526b..7e08f857c4c 100644 --- a/README.rst +++ b/README.rst @@ -33,7 +33,6 @@ If you want to get involved head over to GitHub to get the source code, look at * `GitHub page`_ * `Development documentation`_ -* `Development mailing list`_ * `Development IRC`_ Code of Conduct @@ -56,7 +55,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. .. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support .. _Issue tracking: https://github.com/pypa/pip/issues .. _Discourse channel: https://discuss.python.org/c/packaging -.. _Development mailing list: https://mail.python.org/mailman3/lists/distutils-sig.python.org/ .. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa .. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev .. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md From a5f4a9f89823df29f6b15cb313c5b3e21876174e Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 30 Sep 2022 15:43:20 +0100 Subject: [PATCH 106/730] Rename 11418.bugfix.rst to 11418.trivial.rst --- news/{11418.bugfix.rst => 11418.trivial.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{11418.bugfix.rst => 11418.trivial.rst} (100%) diff --git a/news/11418.bugfix.rst b/news/11418.trivial.rst similarity index 100% rename from news/11418.bugfix.rst rename to news/11418.trivial.rst From 550027d0168f89f2dd27d29d58f0eb2693448dcd Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Oct 2022 12:17:52 +0100 Subject: [PATCH 107/730] Add a 'work in progress' note to the document --- docs/html/topics/more-dependency-resolution.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/html/topics/more-dependency-resolution.md b/docs/html/topics/more-dependency-resolution.md index e8609c986ad..31967a6a920 100644 --- a/docs/html/topics/more-dependency-resolution.md +++ b/docs/html/topics/more-dependency-resolution.md @@ -5,6 +5,14 @@ In certain situations, pip can take a long time to determine what to install, and this article is intended to help readers understand what is happening "behind the scenes" during that process. +```{note} +This document is a work in progress. The details included are accurate (at the +time of writing), but there is additional information, in particular around +pip's interface with resolvelib, which have not yet been included. + +Contributions to improve this document are welcome. +``` + ## The dependency resolution problem The process of finding a set of packages to install, given a set of dependencies From f3b91157af4121796d0ac1b6773c01555c36dd8b Mon Sep 17 00:00:00 2001 From: Claudio Jolowicz Date: Tue, 27 Sep 2022 08:03:08 +0200 Subject: [PATCH 108/730] Fix typos in HTML docs --- docs/html/reference/inspect-report.md | 2 +- docs/html/topics/caching.md | 2 +- docs/html/topics/repeatable-installs.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/html/reference/inspect-report.md b/docs/html/reference/inspect-report.md index 4d367da9de8..50d83365ccf 100644 --- a/docs/html/reference/inspect-report.md +++ b/docs/html/reference/inspect-report.md @@ -44,7 +44,7 @@ the following properties: `.egg-info` directory. ```{warning} - This field may not necessary point to a directory, for instance, in the case of older + This field may not necessarily point to a directory, for instance, in the case of older `.egg` installs. ``` diff --git a/docs/html/topics/caching.md b/docs/html/topics/caching.md index 929ac3541df..351f960a6bf 100644 --- a/docs/html/topics/caching.md +++ b/docs/html/topics/caching.md @@ -96,7 +96,7 @@ In some cases, pip's caching behaviour can be undesirable. As an example, if you have package with optional C extensions, that generates a pure Python wheel when the C extension can’t be built, pip will use that cached wheel even when you later invoke it from an environment that could have built those optional C -extensions. This is because pip is seeing a cached wheel for that matches the +extensions. This is because pip is seeing a cached wheel that matches the package being built, and pip assumes that the result of building a package from a package index is deterministic. diff --git a/docs/html/topics/repeatable-installs.md b/docs/html/topics/repeatable-installs.md index d4609f96866..d3f06679587 100644 --- a/docs/html/topics/repeatable-installs.md +++ b/docs/html/topics/repeatable-installs.md @@ -20,7 +20,7 @@ specific version. ``` A requirements file, containing pinned package versions can be generated using -{ref}`pip freeze`. This would not only the top-level packages, but also all of +{ref}`pip freeze`. This would pin not only the top-level packages, but also all of their transitive dependencies. Performing the installation using {ref}`--no-deps ` would provide an extra dose of insurance against installing anything not explicitly listed. From 74dfed55c4b1fa9a7bbf91ea996734ed8a9267c6 Mon Sep 17 00:00:00 2001 From: Claudio Jolowicz Date: Mon, 3 Oct 2022 11:57:13 +0200 Subject: [PATCH 109/730] Mark as trivial --- news/A12E8588-9311-4DAB-BD89-6FBC3F296275.trivial.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 news/A12E8588-9311-4DAB-BD89-6FBC3F296275.trivial.rst diff --git a/news/A12E8588-9311-4DAB-BD89-6FBC3F296275.trivial.rst b/news/A12E8588-9311-4DAB-BD89-6FBC3F296275.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From e6e7c12b6dfbf520bcabed243071abaaaac00d44 Mon Sep 17 00:00:00 2001 From: Felipe Peter Date: Tue, 4 Oct 2022 17:11:46 +0800 Subject: [PATCH 110/730] Disallow freeze format with pip list --outdated (#11482) Co-authored-by: Tzu-ping Chung --- news/9789.removal.rst | 1 + src/pip/_internal/commands/list.py | 5 +++++ tests/functional/test_list.py | 8 ++++++-- 3 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 news/9789.removal.rst diff --git a/news/9789.removal.rst b/news/9789.removal.rst new file mode 100644 index 00000000000..90ded923507 --- /dev/null +++ b/news/9789.removal.rst @@ -0,0 +1 @@ +Remove the ability to use ``pip list --outdated`` in combination with ``--format=freeze``. diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index a9b08a0bc88..8e1426dbb6c 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -155,6 +155,11 @@ def run(self, options: Values, args: List[str]) -> int: if options.outdated and options.uptodate: raise CommandError("Options --outdated and --uptodate cannot be combined.") + if options.outdated and options.list_format == "freeze": + raise CommandError( + "List format 'freeze' can not be used with the --outdated option." + ) + cmdoptions.check_list_path_option(options) skip = set(stdlib_pkgs) diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index c799aabd255..c7fdec2f2fc 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -577,7 +577,7 @@ def test_outdated_formats(script: PipTestEnvironment, data: TestData) -> None: assert "Package Version Latest Type" in result.stdout assert "simple 1.0 1.1 wheel" in result.stdout - # Check freeze + # Check that freeze is not allowed result = script.pip( "list", "--no-index", @@ -585,8 +585,12 @@ def test_outdated_formats(script: PipTestEnvironment, data: TestData) -> None: wheelhouse_path, "--outdated", "--format=freeze", + expect_error=True, + ) + assert ( + "List format 'freeze' can not be used with the --outdated option." + in result.stderr ) - assert "simple==1.0" in result.stdout # Check json result = script.pip( From b777bcda819961092ae87ca7512e0b27c576d19c Mon Sep 17 00:00:00 2001 From: Phil Elson Date: Wed, 5 Oct 2022 18:19:28 +0200 Subject: [PATCH 111/730] Add the "base" config level concept, to allow configuration of all virtual environments sharing the same base. The new functionality serves a use case which was not previously possible with pip configuration files, namely the situation where you have a base Python installation and want to influence the pip configuration for all derivative virtual environments *without* changing the config for all other environments on a machine (global), or for all other environment run by the same user (user). Concretely, this could be used for a centrally managed network mounted filesystem based Python installation, from which multiple users can build virtual environments and inside which a specific pip configuration is needed (e.g. an index URL). --- docs/html/topics/configuration.md | 27 ++++++++++++++++-------- news/9752.feature.rst | 1 + src/pip/_internal/configuration.py | 18 +++++++++++++--- tests/unit/test_configuration.py | 33 ++++++++++++++++++++++++++++-- tests/unit/test_options.py | 2 +- 5 files changed, 67 insertions(+), 14 deletions(-) create mode 100644 news/9752.feature.rst diff --git a/docs/html/topics/configuration.md b/docs/html/topics/configuration.md index e4aafcd2b98..c544d0c700c 100644 --- a/docs/html/topics/configuration.md +++ b/docs/html/topics/configuration.md @@ -19,13 +19,14 @@ and how they are related to pip's various command line options. ## Configuration Files -Configuration files can change the default values for command line option. -They are written using a standard INI style configuration files. +Configuration files can change the default values for command line options. +They are written using standard INI style configuration files. -pip has 3 "levels" of configuration files: +pip has 4 "levels" of configuration files: -- `global`: system-wide configuration file, shared across users. -- `user`: per-user configuration file. +- `global`: system-wide configuration file, shared across all users. +- `user`: per-user configuration file, shared across all environments. +- `base` : per-base environment configuration file, shared across all virtualenvs with the same base. (added in pip `v23`) - `site`: per-environment configuration file; i.e. per-virtualenv. ### Location @@ -47,8 +48,11 @@ User The legacy "per-user" configuration file is also loaded, if it exists: {file}`$HOME/.pip/pip.conf`. +Base +: {file}`\{sys.base_prefix\}/pip.conf` + Site -: {file}`$VIRTUAL_ENV/pip.conf` +: {file}`\{sys.prefix\}/pip.conf` ``` ```{tab} MacOS @@ -63,8 +67,11 @@ User The legacy "per-user" configuration file is also loaded, if it exists: {file}`$HOME/.pip/pip.conf`. +Base +: {file}`\{sys.base_prefix\}/pip.conf` + Site -: {file}`$VIRTUAL_ENV/pip.conf` +: {file}`\{sys.prefix\}/pip.conf` ``` ```{tab} Windows @@ -81,8 +88,11 @@ User The legacy "per-user" configuration file is also loaded, if it exists: {file}`%HOME%\\pip\\pip.ini` +Base +: {file}`\{sys.base_prefix\}\\pip.ini` + Site -: {file}`%VIRTUAL_ENV%\\pip.ini` +: {file}`\{sys.prefix\}\\pip.ini` ``` ### `PIP_CONFIG_FILE` @@ -102,6 +112,7 @@ order: - `PIP_CONFIG_FILE`, if given. - Global - User +- Base - Site Each file read overrides any values read from previous files, so if the diff --git a/news/9752.feature.rst b/news/9752.feature.rst new file mode 100644 index 00000000000..d515267be21 --- /dev/null +++ b/news/9752.feature.rst @@ -0,0 +1 @@ +In the case of virtual environments, configuration files are now also included from the base installation. diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 8fd46c9b8e0..6cce8bcbcce 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -36,12 +36,20 @@ kinds = enum( USER="user", # User Specific GLOBAL="global", # System Wide - SITE="site", # [Virtual] Environment Specific + BASE="base", # Base environment specific (e.g. for all venvs with the same base) + SITE="site", # Environment Specific (e.g. per venv) ENV="env", # from PIP_CONFIG_FILE ENV_VAR="env-var", # from Environment Variables ) -OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR -VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE +OVERRIDE_ORDER = ( + kinds.GLOBAL, + kinds.USER, + kinds.BASE, + kinds.SITE, + kinds.ENV, + kinds.ENV_VAR, +) +VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.BASE, kinds.SITE logger = getLogger(__name__) @@ -70,6 +78,7 @@ def get_configuration_files() -> Dict[Kind, List[str]]: os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") ] + base_config_file = os.path.join(sys.base_prefix, CONFIG_BASENAME) site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) legacy_config_file = os.path.join( os.path.expanduser("~"), @@ -78,6 +87,7 @@ def get_configuration_files() -> Dict[Kind, List[str]]: ) new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) return { + kinds.BASE: [base_config_file], kinds.GLOBAL: global_config_files, kinds.SITE: [site_config_file], kinds.USER: [legacy_config_file, new_config_file], @@ -344,6 +354,8 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: # The legacy config file is overridden by the new config file yield kinds.USER, config_files[kinds.USER] + yield kinds.BASE, config_files[kinds.BASE] + # finally virtualenv configuration first trumping others yield kinds.SITE, config_files[kinds.SITE] diff --git a/tests/unit/test_configuration.py b/tests/unit/test_configuration.py index c6b44d45aad..b0d655d8fb6 100644 --- a/tests/unit/test_configuration.py +++ b/tests/unit/test_configuration.py @@ -24,12 +24,18 @@ def test_user_loading(self) -> None: self.configuration.load() assert self.configuration.get_value("test.hello") == "2" - def test_site_loading(self) -> None: - self.patch_configuration(kinds.SITE, {"test.hello": "3"}) + def test_base_loading(self) -> None: + self.patch_configuration(kinds.BASE, {"test.hello": "3"}) self.configuration.load() assert self.configuration.get_value("test.hello") == "3" + def test_site_loading(self) -> None: + self.patch_configuration(kinds.SITE, {"test.hello": "4"}) + + self.configuration.load() + assert self.configuration.get_value("test.hello") == "4" + def test_environment_config_loading(self, monkeypatch: pytest.MonkeyPatch) -> None: contents = """ [test] @@ -107,6 +113,15 @@ def test_no_such_key_error_message_missing_option(self) -> None: with pytest.raises(ConfigurationError, match=pat): self.configuration.get_value("global.index-url") + def test_overrides_normalization(self) -> None: + # Check that normalized names are used in precedence calculations. + # Reminder: USER has higher precedence than GLOBAL. + self.patch_configuration(kinds.USER, {"test.hello-world": "1"}) + self.patch_configuration(kinds.GLOBAL, {"test.hello_world": "0"}) + self.configuration.load() + + assert self.configuration.get_value("test.hello_world") == "1" + class TestConfigurationPrecedence(ConfigurationMixin): # Tests for methods to that determine the order of precedence of @@ -133,6 +148,13 @@ def test_env_overides_global(self) -> None: assert self.configuration.get_value("test.hello") == "0" + def test_site_overides_base(self) -> None: + self.patch_configuration(kinds.BASE, {"test.hello": "2"}) + self.patch_configuration(kinds.SITE, {"test.hello": "1"}) + self.configuration.load() + + assert self.configuration.get_value("test.hello") == "1" + def test_site_overides_user(self) -> None: self.patch_configuration(kinds.USER, {"test.hello": "2"}) self.patch_configuration(kinds.SITE, {"test.hello": "1"}) @@ -147,6 +169,13 @@ def test_site_overides_global(self) -> None: assert self.configuration.get_value("test.hello") == "1" + def test_base_overides_user(self) -> None: + self.patch_configuration(kinds.USER, {"test.hello": "2"}) + self.patch_configuration(kinds.BASE, {"test.hello": "1"}) + self.configuration.load() + + assert self.configuration.get_value("test.hello") == "1" + def test_user_overides_global(self) -> None: self.patch_configuration(kinds.GLOBAL, {"test.hello": "3"}) self.patch_configuration(kinds.USER, {"test.hello": "2"}) diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index ada5e1c3076..39396512a97 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -588,7 +588,7 @@ def test_venv_config_file_found(self, monkeypatch: pytest.MonkeyPatch) -> None: for _, val in cp.iter_config_files(): files.extend(val) - assert len(files) == 4 + assert len(files) == 5 @pytest.mark.parametrize( "args, expect", From 3ca52dc55c01ca0ba3de6ee7087277ea5d44ade5 Mon Sep 17 00:00:00 2001 From: Riccardo Schirone Date: Thu, 6 Oct 2022 14:09:48 +0200 Subject: [PATCH 112/730] Raise RequirementsFileParseError when missing closing quotation shlex.split, used to split options in requirements.txt files, might raise a ValueError when the input string is not well formed. Catch the ValueError and re-raise it as a RequirementsFileParseError instead. --- news/11491.bugfix.rst | 1 + src/pip/_internal/req/req_file.py | 7 ++++++- tests/unit/test_req_file.py | 14 ++++++++++++++ 3 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 news/11491.bugfix.rst diff --git a/news/11491.bugfix.rst b/news/11491.bugfix.rst new file mode 100644 index 00000000000..a8f53927fa1 --- /dev/null +++ b/news/11491.bugfix.rst @@ -0,0 +1 @@ +Raise RequirementsFileParseError when parsing malformed requirements options that can't be sucessfully parsed by shlex. diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index 06ea6f277aa..1f0a277e158 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -397,7 +397,12 @@ def parse_line(line: str) -> Tuple[str, Values]: args_str, options_str = break_args_options(line) - opts, _ = parser.parse_args(shlex.split(options_str), defaults) + try: + options = shlex.split(options_str) + except ValueError as e: + raise OptionParsingError(f"Could not split options: {options_str}") from e + + opts, _ = parser.parse_args(options, defaults) return args_str, opts diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 6b3712b0c7d..76fac40ad20 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -786,6 +786,20 @@ def test_req_file_parse_comment_start_of_line( assert not reqs + def test_invalid_options(self, tmpdir: Path, finder: PackageFinder) -> None: + """ + Test parsing invalid options such as missing closing quotation + """ + with open(tmpdir.joinpath("req1.txt"), "w") as fp: + fp.write("--'data\n") + + with pytest.raises(RequirementsFileParseError): + list( + parse_reqfile( + tmpdir.joinpath("req1.txt"), finder=finder, session=PipSession() + ) + ) + def test_req_file_parse_comment_end_of_line_with_url( self, tmpdir: Path, finder: PackageFinder ) -> None: From 6421ab59d57522714183c731c05179e530cfcbc5 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 7 Oct 2022 11:56:34 +0100 Subject: [PATCH 113/730] Remove the opt-in for `2020-resolver` This has been the default for quite some time now. --- src/pip/_internal/cli/base_command.py | 7 ------- src/pip/_internal/cli/cmdoptions.py | 1 - tests/unit/test_req_file.py | 12 ++++++++++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 0774f26081f..5bd7e67e649 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -151,13 +151,6 @@ def _main(self, args: List[str]) -> int: ) options.cache_dir = None - if "2020-resolver" in options.features_enabled: - logger.warning( - "--use-feature=2020-resolver no longer has any effect, " - "since it is now the default dependency resolver in pip. " - "This will become an error in pip 21.0." - ) - def intercepts_unhandled_exc( run_func: Callable[..., int] ) -> Callable[..., int]: diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 9ff7a3d07de..b4e2560dea2 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -983,7 +983,6 @@ def check_list_path_option(options: Values) -> None: action="append", default=[], choices=[ - "2020-resolver", "fast-deps", "truststore", "no-binary-enable-wheel-cache", diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index fde0cb2f712..228d0aaa49c 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -459,8 +459,16 @@ def test_use_feature( self, line_processor: LineProcessor, options: mock.Mock ) -> None: """--use-feature can be set in requirements files.""" - line_processor("--use-feature=2020-resolver", "filename", 1, options=options) - assert "2020-resolver" in options.features_enabled + line_processor("--use-feature=fast-deps", "filename", 1, options=options) + + def test_use_feature_with_error( + self, line_processor: LineProcessor, options: mock.Mock + ) -> None: + """--use-feature triggers error when parsing requirements files.""" + with pytest.raises(RequirementsFileParseError): + line_processor( + "--use-feature=2020-resolver", "filename", 1, options=options + ) def test_relative_local_find_links( self, From 7662593941cf5a3519741b5c99973f0a9c5b50b1 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 7 Oct 2022 11:59:08 +0100 Subject: [PATCH 114/730] :newspaper: --- news/11493.removal.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11493.removal.rst diff --git a/news/11493.removal.rst b/news/11493.removal.rst new file mode 100644 index 00000000000..de559e899bb --- /dev/null +++ b/news/11493.removal.rst @@ -0,0 +1 @@ +Remove ``--use-feature=2020-resolver`` opt-in flag. This was supposed to be removed in 21.0, but missed during that release cycle. From 2b9e3a958acf5810bb935f95a35ac260e5ac681b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 7 Oct 2022 12:04:14 +0100 Subject: [PATCH 115/730] Remove pre-commit from GitHub Actions This is a step in moving linting completely to pre-commit.ci. --- .github/workflows/ci.yml | 20 ++++---------------- .pre-commit-config.yaml | 1 - 2 files changed, 4 insertions(+), 17 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 792a3e31153..d3c64ee40a3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -55,17 +55,6 @@ jobs: - "noxfile.py" if: github.event_name == 'pull_request' - pre-commit: - name: pre-commit - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - - uses: pre-commit/action@v2.0.0 - with: - extra_args: --all-files --hook-stage=manual - packaging: name: packaging runs-on: ubuntu-latest @@ -103,7 +92,7 @@ jobs: name: tests / ${{ matrix.python }} / ${{ matrix.os }} runs-on: ${{ matrix.os }}-latest - needs: [pre-commit, packaging, determine-changes] + needs: [packaging, determine-changes] if: >- needs.determine-changes.outputs.tests == 'true' || github.event_name != 'pull_request' @@ -151,7 +140,7 @@ jobs: name: tests / ${{ matrix.python }} / ${{ matrix.os }} / ${{ matrix.group }} runs-on: ${{ matrix.os }}-latest - needs: [pre-commit, packaging, determine-changes] + needs: [packaging, determine-changes] if: >- needs.determine-changes.outputs.tests == 'true' || github.event_name != 'pull_request' @@ -225,7 +214,7 @@ jobs: name: tests / zipapp runs-on: ubuntu-latest - needs: [pre-commit, packaging, determine-changes] + needs: [packaging, determine-changes] if: >- needs.determine-changes.outputs.tests == 'true' || github.event_name != 'pull_request' @@ -257,7 +246,7 @@ jobs: env: _PIP_USE_IMPORTLIB_METADATA: 'true' - needs: [pre-commit, packaging, determine-changes] + needs: [packaging, determine-changes] if: >- needs.determine-changes.outputs.tests == 'true' || github.event_name != 'pull_request' @@ -292,7 +281,6 @@ jobs: - determine-changes - docs - packaging - - pre-commit - tests-unix - tests-windows - tests-zipapp diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6dde3d6ff2d..4d121315a0e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -77,4 +77,3 @@ repos: rev: '0.48' hooks: - id: check-manifest - stages: [manual] From 7257d5b31ae075894d363b0cd6e7ff8ca81f2658 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 7 Oct 2022 12:07:07 +0100 Subject: [PATCH 116/730] Tweak the pre-commit.ci configuration This avoids modifying PRs and slows down the frequency of auto-update pull requests. --- .pre-commit-config.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4d121315a0e..6e561f09c13 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -77,3 +77,8 @@ repos: rev: '0.48' hooks: - id: check-manifest + +ci: + autofix_prs: false + autoupdate_commit_msg: 'pre-commit autoupdate' + autoupdate_schedule: monthly From a8716b979dde361f2042278c7a3e973be7d32598 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 7 Oct 2022 15:28:12 +0100 Subject: [PATCH 117/730] Switch to the no-response GitHub Action The no-response bot has been turned off. --- .github/no-response.yml | 11 ----------- .github/workflows/no-response.yml | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 11 deletions(-) delete mode 100644 .github/no-response.yml create mode 100644 .github/workflows/no-response.yml diff --git a/.github/no-response.yml b/.github/no-response.yml deleted file mode 100644 index 47afde800e0..00000000000 --- a/.github/no-response.yml +++ /dev/null @@ -1,11 +0,0 @@ -# Number of days of inactivity before issue is closed for lack of response -daysUntilClose: 15 -# Label requiring a response -responseRequiredLabel: "S: awaiting response" -# Comment to post when closing an Issue for lack of response. Set to `false` to disable -closeComment: > - This issue has been automatically closed because there has been no response - to our request for more information from the original author. With only the - information that is currently in the issue, we don't have enough information - to take action. Please reach out if you have or find the answers we need so - that we can investigate further. diff --git a/.github/workflows/no-response.yml b/.github/workflows/no-response.yml new file mode 100644 index 00000000000..939290b93e5 --- /dev/null +++ b/.github/workflows/no-response.yml @@ -0,0 +1,19 @@ +name: No Response + +# Both `issue_comment` and `scheduled` event types are required for this Action +# to work properly. +on: + issue_comment: + types: [created] + schedule: + # Schedule for five minutes after the hour, every hour + - cron: '5 * * * *' + +jobs: + noResponse: + runs-on: ubuntu-latest + steps: + - uses: lee-dohm/no-response@v0.5.0 + with: + token: ${{ github.token }} + responseRequiredLabel: "S: awaiting response" From d37034ccf478ad34d7f8f26cbc62065d67952668 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Fri, 7 Oct 2022 19:16:33 +0100 Subject: [PATCH 118/730] bazaar: Use lightweight checkouts rather than a full branch clone (#11264) --- news/5444.feature.rst | 1 + src/pip/_internal/vcs/bazaar.py | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 news/5444.feature.rst diff --git a/news/5444.feature.rst b/news/5444.feature.rst new file mode 100644 index 00000000000..19780e694ec --- /dev/null +++ b/news/5444.feature.rst @@ -0,0 +1 @@ +Use the much faster 'bzr co --lightweight' to obtain a copy of a Bazaar tree. diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index a7b16e2e052..06c80e48a39 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -49,14 +49,25 @@ def fetch_new( flag = "" else: flag = f"-{'v'*verbosity}" - cmd_args = make_command("branch", flag, rev_options.to_args(), url, dest) + cmd_args = make_command( + "checkout", "--lightweight", flag, rev_options.to_args(), url, dest + ) self.run_command(cmd_args) def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: self.run_command(make_command("switch", url), cwd=dest) def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: - cmd_args = make_command("pull", "-q", rev_options.to_args()) + output = self.run_command( + make_command("info"), show_stdout=False, stdout_only=True, cwd=dest + ) + if output.startswith("Standalone "): + # Older versions of pip used to create standalone branches. + # Convert the standalone branch to a checkout by calling "bzr bind". + cmd_args = make_command("bind", "-q", url) + self.run_command(cmd_args, cwd=dest) + + cmd_args = make_command("update", "-q", rev_options.to_args()) self.run_command(cmd_args, cwd=dest) @classmethod From f86f52c1b757da998adabbf120ccef474806c3e8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 7 Oct 2022 20:52:35 +0100 Subject: [PATCH 119/730] Get rid of the timeout test as it's too flaky to be useful --- tests/functional/test_requests.py | 22 ---------------------- 1 file changed, 22 deletions(-) delete mode 100644 tests/functional/test_requests.py diff --git a/tests/functional/test_requests.py b/tests/functional/test_requests.py deleted file mode 100644 index 2ef121fedcb..00000000000 --- a/tests/functional/test_requests.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest - -from tests.lib import PipTestEnvironment - - -@pytest.mark.network -def test_timeout(script: PipTestEnvironment) -> None: - result = script.pip( - "--retries", - "0", - "--timeout", - "0.00001", - "install", - "-vvv", - "INITools", - expect_error=True, - ) - assert ( - "Could not fetch URL https://pypi.org/simple/initools/: " - "connection error: HTTPSConnectionPool(host='pypi.org', port=443): " - "Max retries exceeded with url: /simple/initools/ " - ) in result.stdout From c39b2cc50c029dd20df62749b99b2bbb2372e9ae Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 7 Oct 2022 15:30:35 +0100 Subject: [PATCH 120/730] Run `check-manifest` without build isolation This is necessary to work in no-network environments like pre-commit.ci. --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6e561f09c13..366d859dba5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -77,6 +77,7 @@ repos: rev: '0.48' hooks: - id: check-manifest + args: [--no-build-isolation] ci: autofix_prs: false From 430a846d3fa5957e77e0f82e6fc7a91f626f34a3 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 8 Oct 2022 15:56:57 +0100 Subject: [PATCH 121/730] Add a warning comment to the vendoring session in noxfile --- noxfile.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/noxfile.py b/noxfile.py index 8199b64a681..975f52c2571 100644 --- a/noxfile.py +++ b/noxfile.py @@ -172,6 +172,10 @@ def lint(session: nox.Session) -> None: session.run("pre-commit", "run", *args) +# NOTE: This session will COMMIT upgardes to vendored libraries. +# You should therefore not run it directly against main, as you +# will put your main branch out of sync with upstream. Always run +# it on a dedicated branch @nox.session def vendoring(session: nox.Session) -> None: session.install("vendoring~=1.2.0") From bed8c14b251468cbddae542823ad023cd1be3f46 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 8 Oct 2022 17:30:30 +0100 Subject: [PATCH 122/730] Update noxfile.py Co-authored-by: Pradyun Gedam --- noxfile.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/noxfile.py b/noxfile.py index 975f52c2571..2227c33eee1 100644 --- a/noxfile.py +++ b/noxfile.py @@ -172,10 +172,13 @@ def lint(session: nox.Session) -> None: session.run("pre-commit", "run", *args) -# NOTE: This session will COMMIT upgardes to vendored libraries. -# You should therefore not run it directly against main, as you -# will put your main branch out of sync with upstream. Always run -# it on a dedicated branch +# NOTE: This session will COMMIT upgrades to vendored libraries. +# You should therefore not run it directly against `main`. If you +# do (asusming you started with a clean main), you can run: +# +# git checkout -b vendoring-updates +# git checkout main +# git reset --hard origin/main @nox.session def vendoring(session: nox.Session) -> None: session.install("vendoring~=1.2.0") From 08b7a100cae368c544b1e353f285286f38fcded5 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 8 Oct 2022 17:47:22 +0100 Subject: [PATCH 123/730] Update noxfile.py --- noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index 2227c33eee1..7692bf4b507 100644 --- a/noxfile.py +++ b/noxfile.py @@ -174,7 +174,7 @@ def lint(session: nox.Session) -> None: # NOTE: This session will COMMIT upgrades to vendored libraries. # You should therefore not run it directly against `main`. If you -# do (asusming you started with a clean main), you can run: +# do (assuming you started with a clean main), you can run: # # git checkout -b vendoring-updates # git checkout main From c423e4290964ff8d10c29f39c1f46e1ee889bbce Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 10 Oct 2022 12:20:21 +0100 Subject: [PATCH 124/730] Upgrade distlib to 0.3.6 --- news/distlib.vendor.rst | 1 + src/pip/_vendor/distlib/__init__.py | 4 ++-- src/pip/_vendor/distlib/t32.exe | Bin 97792 -> 97792 bytes src/pip/_vendor/distlib/t64-arm.exe | Bin 182784 -> 182784 bytes src/pip/_vendor/distlib/t64.exe | Bin 107520 -> 108032 bytes src/pip/_vendor/distlib/w32.exe | Bin 91648 -> 91648 bytes src/pip/_vendor/distlib/w64-arm.exe | Bin 168448 -> 168448 bytes src/pip/_vendor/distlib/w64.exe | Bin 101888 -> 101888 bytes src/pip/_vendor/vendor.txt | 2 +- 9 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 news/distlib.vendor.rst diff --git a/news/distlib.vendor.rst b/news/distlib.vendor.rst new file mode 100644 index 00000000000..5fa036110fb --- /dev/null +++ b/news/distlib.vendor.rst @@ -0,0 +1 @@ +Upgrade distlib to 0.3.6 diff --git a/src/pip/_vendor/distlib/__init__.py b/src/pip/_vendor/distlib/__init__.py index 505556517f8..962173c8d0a 100644 --- a/src/pip/_vendor/distlib/__init__.py +++ b/src/pip/_vendor/distlib/__init__.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2012-2019 Vinay Sajip. +# Copyright (C) 2012-2022 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # import logging -__version__ = '0.3.5' +__version__ = '0.3.6' class DistlibException(Exception): pass diff --git a/src/pip/_vendor/distlib/t32.exe b/src/pip/_vendor/distlib/t32.exe index 0aaa386d75662d2f874aab78460dcd39b2ee43b0..52154f0be32cc2bdbf98af131d477900667d0abd 100644 GIT binary patch delta 15993 zcmd^mdsx&}_wV;RfPf4Fa=)X1ppwr0KFp|~XlP^DASU453AMb;cqs{Wz{C+lx2ctx zrlzIl1#kI!K~eFVmn_XoVK1+3!wS-rlAQC|-$8rc-#O27{yKjho@dQod+oJvYp=cb z+G~HA*XA>?&8OPH(`Cl2vS}A)o(|rS`V3wz(x;w5{_85g)O$i|OsyAE|I}SbkJ%)t z7m%I~{v>skKo4x^*S4pbLi+M+V{NQWuKmxF&2hQB4<`!zb<8tb11I8%A0N!$Bhh?s zFWCm3yvPsqKj6=C>smN2ZdsRDa)1xxN3QDAi;oIKwO;n|v+YP@u94sQZu}S$EtX2*Zi~&(u@6VJok6QLTj&yE@ebn+WW2XJ@yDWP zj87QjY{qyoq@8hj6dCFr%3mQDz2o?y`ttGwO4nO&$ zN=GZ*LwH|hr_Y624_&QnRIiaNU)}idXf|Q7{*>ctlb8Hae^&BM0LK*+yH2&YRO%

w zoc7J_=pSb0s_eer^a@Gv>pi45##`X5^LEyI>kIv+f5$pr8VOp_tHr_$tob%ph0XyH z&A3`=NCb)tDr#p3Sm@bU;I8H-FXvb8eEJIc(C;0k86D@9JaT_q@`vj>ejiq5x4FNq z%x6BC%Zu zPd=5ly4}tb!EPlrLh?#BLU(u^nB9B70+*}e5^FRZjTX?8_(dCkGfnn^{J93}ZB?)IL;qmZqtxGxOQ zGnnucRySA7s{Vm}hN^p6xjURg!Fg!I&^&9t%a}`Mh=Yi(# zqaO*~p%Uw>8*9?kUeg{WRBRFk(1V0_dY%6lSc*xe}R0po-18ZK1pY7h?Y=I<@l&e`@E5E6Kz-nd*HUCXs z3rgj`Anyis3I6aQt6w52_HTvhl!S>|L`@_JQvumu)c zY^Bq?vEenM2OnV06LIy~ZBL=H6sHZ$X(KF4-_UN+ld&unwT>t&U5WX)n%&`9pjc>s zGCwpi{<@UWFx-kqjwo$*n?01i0yd~jN196xhjzhH_CshIznb`lCGw9+TG#;b2T9LI z_8WOKY(~gh6HLYaW+npYwuLSwPs2utcP2h(^C9EH7kB=vKrchqDLfQ9cX$k8yw2{n=*OkTrnWc1J>qx3y9^Dj#d1CtD*NLDzCPZk}#tTe(|8*HI;Ad{$gevXG9EZJgah*5=X zkL=ay6gns z5SbmFA0CK-xlZWo>~Zzn`SZ}X5cE$PqgDJk5*uTP=ngw_#xq9e{a~nA)!=3<^^}+XzSrbmj) zqZFc;hYX}2jHMJipSbP5dGzE6wBs7wRxH2pDqY?3eBf5MvauI4%+;vF4oGO_65^MX z8oL?PE2* zJI9}XUd#~3i!S9<=z7H?dmlJGoLvtI(-|l$aJ4q2Td0kp`udqMt_~BNhS^3(2n-6& zhm~YQ4+)Nst3C3>=ip3MP322OR{9c=_Z%bgRgzUbSNI!c9A||;^Bz%Tr0~Cyf{YPZ zl06xz{3UWDqac8tv=~B1Secs47lQfF)je@Gp#MAFpY8e(JE(1GYTf%0xk#DDHYh z;aB2EPcklAjClyv{OKM>DB-CK*0Ml<$__(Sww}?}+aRqClksD5D@P%5D@Wp&ihmT2 z(8}xvt)tP=!9u%(n_BSG)XSB9-{t4>uN-{KGL-HbiB<~gT;r)L`xe-prHvfp4`Tei z@zZXZvDWm05A==R#)$8|_mo`jeKd=0rwuv1;eqnh8=JqN1RI0xEZd2{^EzrB)qX8jb8Mm`37L7bJkZ&roF@jn}PI#XEM$}BTH;J3*PjXq@8Z7gMNlD^sDEw60~qU*^~jg;R? zS~U5hq*O9U8y|`}o9!K(v$0`faB}-m*xTE0#gU2HqLd5Y;aKemkE60;4SuyB-NLV< z2Yn1X-hM{sl6%_Kd_m0uomAvK7Z$N*zafqn{V|4oV_XKGV`An)K8gIAX@mEeZ@S7? zZ^$roCB~jRouk90GWO9x#v@cpC@P$aHZVQDyh!XOJ=Yol2HwzYW71*nbAU zlg#fwgg-(q_0QnXlE3>W@}(qxKnf0>z5_M|m7HPyz{P3}!Cp!yj|OD$W|EwfnsGqT z9Y@gor(6mSdtWO(a+WDidU_wOa)8aH6c9%vPUJnrp7TOn3yLe_)3GO=PoFvx>U|i> zLx0`jwd8P4SN;%b$oZt_38CGfax03evMJo8@SW`>HAps|(fh?&E%XlAKCnOk0eLvE zEok&9VVq&MsLmK?B5@CTBdL9aA+G*5qi2k;j`bL0WsxnD{p+vD($FW7cK16;j^Eir@}j7?P5f2KEAN zahtufYk)U~7J#9_k_I!fvp(28*w0aKr96pxsfgc9X1yfk50cMb>T6wsnbx_(Pa`?* z&AHrg0jBn3F2H$ET+MJlW8KOetgFW1}aVJh-R(4Jf5-Jw0o#=!={H?MdURNe|XxcaBSJI0g;Fg{oR3{Ot$ z$tgU!o}Qe+lZ*G{%${73Czs{P@t$0cC-pw!IujoAuKlS3mo|BAah=trY$SZ1 z9!A%g*QFbi{{|-b^-PZSK3PCbBh6f;G8fnekDyPJ^1R;s60#@H;LChA8&^ag=e@`` zkbc87d?cAWOv&ey-NRl9VI5*F;kKwpf+yEVqFxyk-37+LO239%iOnwj))X*&4SDMo zv$=1!a3TkYqsWt2 zdWf!Pku+xi2}h8@-$E)@L_V3k(e_ zNLv2OqIePJ+4ylBHnVjV6O-MK<4hj%GU#{~cNL?oPKiMI1@ z#BHqfvnztDfY!zGS_}K5!LS!r+9csPGg>pu>|v$fBU9y_&E)Dj-SevwugibhPN0FF z-{o28c!&jeSWNM-?GT7Z!n`1%e0m)_o!!rB9h=;Hy`OeKtNHW<>0J;J)dwp2^BYCKiml|Fr^gZX0CV<^TpTf7mS`4orzxAWbNUY=xgMB zK^8<8GNL=LsL_v*@q8|M{Z%D=-Hos2g_>CltxmVw-WM8}2@T989Y?-^Qq9P8{yrH! zGA=UlOKg_b0w`RX)~6PCxtWRR`ypu<8AHApDdia^jUO`{`=}M`>2_pjv&HuRoj0d2 zCgF&{#pW)s1^*AWd4=rK@_C`Am-Q-;i8l>rgz|csAs#M^6gpA7#JIuH%$oer6 zJ&v5i#ySjzK}5^h`qVLi27u40>FOw;Jg`bllf@O4Z?zpEcgDo^ymr}Bm`X1ItBh)j zgR32jxDcxwOb>uq&vj+Z2a@!$!}wNGI#ynGerQ#0ieJyu&O1R~$70U}94ol@^F)q& zPW638y$Y&u7az*zXr=ap=p-BNp}1hn0!Jjhd5N{}kM^BI#7Rsfy|H1pnC*eIRw!a! zH~l+Lesc?NkEks2-Vz*C{toFiEUd3f4aQo~Nz$dmEq{u80g&Lno)E}GCK)QDVV z6ir6rnJJx34o&Fc&w1!~FOc6Rj2^x6g1|KwN&6~aM%zs)*YV4=a&WNz*mSSTY!)h@ zE$|v$*KsRuuDIz)po4@OOd;vZ0_w+v2|>Q@4a>)uF!v9ha2nY)akHqcnT)XQ321{- z!Wgj|9*KU<#FvqR*Ic6C&XT*YT{qBA$TaD)e(cdFqIz2asW+{D`P0CxkC+&&~X&(zzNEcqD}^o!lvEPxpVGf>TX7&eCAc z9-(!Jr-xV}c_zjNBN;L!8E-t(r}XEqkuRq#6t!ANpVz}gKUzrs>$8)NZ-m?Kh8I{_ zu>yxRe4?JT8QJzl%_hIS-VHH;h&R6GH`e_8Mudp3C;ap{K87Sr?;>i=t?4uU1s+cJ z8#9*iV#3>JU~wGw20nsV9WuVUrqc0*=U3NM%u3+BS0H3YemfIR4$aQOWs#pVh#yOG z=Tz}Ub)Xa4S*Z`}^v^OLAe(;$`$)RF> zuuI!_ONn-1{@`9`A(9c zw-Um?Y|pJl+P)SKHe&g@@m!A{X@?sAHEe6v<(E$5|yyQ5&ze_kg&OtF|xyG zHrr3+%ybtxf}Exv7@x^iM`I3??74H2M3^!)Vf%Qldcqn=kZoUs3iAEjouarEHPxjt zB2mqkHQUOPc)ka@FpuyP$msbC`C)|4ACJeDAq(Eag}iYA6MUpISR_h`Bfa`Y5K~2~ zs9O!uE*vR3y_>9B*dGzr?-wR}kK8TX?|P8l@90Gjc9F^NB*AsAdS`sMs!FyBRRg&i zmzk@nG;{v}m=~M5C4kj{?*ZSCs6}1IoZ0q#qzS9tZj9SqcJD5EaIBOk|~yG;MX%y8=bhvzzY?!Aj*w)-b(n|lR8MY*sFN=W;;a;;)RKA?; z-za8GmqC06qn#gMrEjw5o+^6>esPELTF1{N-UIS!M;s>18LBkeFcWxI=R7UsHOZ}X zAS%quG!|41=j~nD8|p1se(?H2P9`hd#vyUS_^}iNoFVnzUJvF$OsDbE#l#dzBui7H zTgsW3SmXaKrVZrvr6aJ@&n#`^HROwTyYk`W^1DN$S0BQ?i9OmLiPH7qta*##>p@N9B+nW3tPKGUew|f#3 zx}#0*K-4%6xEv8*cI{$ml#pky2k!7NB3?e5|Cv-Q?-xG!6LzdQJGrR@n8&;>B)6Ae z2%m)OfTv^#Fi*)wa+J(c3|xk}p2JN}U+3sTFEb@d;wls99$@uc6jp+M2Gqsf8wGVo zkl$rYAtftHlbAWOP(8{DXpq2x$Jcuka1Od14b0Q^eJ>@BG2v!P|M(be0)03rSm*8Z z5E<}ZD&kyI-YZtz#KjYK4o}lh9MMz{=J^^Yy0%-~D%!FVJIc*`tnt@z##UutXVj(3 zS4M|68ilpQt@^ld^s4CGnv9hl`4=uJSV3^War^oF{Cuz<1tIx3Ncy^<@}+33#H%X1 zku{WYk&In6G~(rFHG;|qVWOT|Msjvln(saAjshA$T35x#F872uSu4kJ;+)EUDr(9U zdYDZls}uPWVp+Wr0i8cqCx)e9nIP5Y2e8iec3g6Yg+ z!UA9yy7zO~3haF$^y6RnV}xHf4(&`>HwC{}*QxmZH3920cw{u63<@8C+xY){d_my* z6D@q#%b!e&T33tmHn`iidSCW9Ni)QE#qJbeovGO%->na#W8 z)`rpOBXgr!Rdn=!??XEcYl3z6ujg*xMzghkuIm*y?FXy8O_mZak9Ism_H67bp9^h+ zTISK8XP}Mj)`F_>_lt{O#kEd;fmTiaKXm*h@!ynP_9w=~Y*~wtis?Ne4W@U5G>m>H zq*3%TOI7r&kWHe;g*1h-DY<8+)7?UvK|dE#3H?M!74!okjibwjR6`dFsevv)>O5g~ zm(4WOSpqGKz9FPJbfS>v(qbXC(0m~sMqd)r;dFqIj-)0b9YxhbI*#@hQX5SZ(#bSY zNT)`;Ln>u#5c}q_&EJBodtW(2?+WN5hTas=B@DeJpzLl%PYUP? zh8`5q)ePMsplcYqQ9#!*^nC%{$k3$%x`m-m0X1vmnjq+V5nR`uQN1VKyNWLUO*cd+F3vw8R{#b6lfW3o5ZGlpOISx^eRK| z3TQJ!Zwlx`hF%iTR)(Gw&^Cr16i^m*r8@+a9T0S*fci1?eW1da#VqF%3W6CrZ&DfS zJdEKEfgHuq5&?~4Xt97MF?5)KrZ9A%fTlAvQ$RBqDi=@*L(>IR!O(aC)iAWPfEpO; zE1+h+7*IqjTJUlZC7)BEJMQ|@~xQFjhPPApc_=A*pONj9 zjUI|d?=!WXvV4SxWYGKA+e!006tmuEc01*D4<$?QGq;^G(nHD7`^;;n3=$|abM?w{ zfnuean&8i?d43LAzcm(tl*3yi8fb=ZlT}CX%qDOXu6&p)rGTOxD>X|3bewmxk_`fS}uf9U8uXwCpGY zK5b`=Sg{!oP1%i1k$xbnc8(WCd`zD1?1dMY7j}&R&!SyYekD1$D_Qf>R%ix`jYd@U zgF`Sq6W8uGs%@v*dmnR)ELDmF=nBWjhIE8iaVoitOGx*0;vF=^?{g~O>7RL-`w%h-t4Y+L;MaCz=)>_PD_m`z`h|*vPR@2JF%F-ww1sxUtfYo+5^$@c}9{?HR>GGVW*^{}x$tbVgDwrcM90 zLqZj6;I#`eieHg!-#PtAVr@4*oAj@3;;+=mjzx?3&uen)rto|+Ie20@|0@}M@(si@ z51+ilKOrBVViAffr~c&mvYPX!ExagdBS|>x;0KZwXM6M0$ceKr@cqc|XDz;7Ic(|H z5&q2xQhY8)eJz`bA4YM+MSNd*0lq2VKLF`h2p9pJfW0UazI=NALoS`m&dC^#wl2cy zt#^db%g}z$!n^8S*O~?i!G^Y>ZbvI5#-c`Wdchd#5gK&N-$Lt9r=aN~34Sk%IL?PA z?m?d2{0eVX7B&c9Z#_kH!p5jjgTZ9;`NS}DdoC4eyP~~GZk$g;bYFC#m$+`?bMiT2 zxRBVj8~Ey&h51iD(*Yqw;Pz?2Q6@Z&)8KbW>4k^*{(AaX{bTgd6VJ6~*-)(FsV2nz zqL}mFK(2h%J#5!njO2uvHbW8WxgJc3|3iWPk{Y9O84MFd)(XMO+*xwF-d5@xt1RB2aiGeuqB8m_EgfOER!(~rEic2#7IGK29 zT$s-ijHVee14kP)#jnJlc6f*Ua49qG+p%mU!ka?RmoPYlD&}EWd5G}ivnl=e_VFdg zJlcn3UCxfbhG?4SYoiu~j1cC}r=tbLJ_H|!etNk}+=;~){QGiWzBB1|B|UA$ez1AfS?C<_v#^K$seJ^_D|TefZZFHncgv0|J<@H0K|zpi z@IyBpFy@)u@@bhFjc6UM_D-zYG*W*>fyjK@m45gp-TZYgf07Gl4<*OQtgjRKShDKt zQ;DO{7_@0B5`0^YEf`6iI7&R{0B&$|uPQ`;-yyrN_Ik!T8LaY~_ZG6H3I(fEwC@(R2XW{%stBif6ygjx(FkG5c}_FBJ*_fBsT&kMz8z zhoVfpHj6(`ez=y-->UJu?$3)Cf6JE0?pG3le-McOF491egg}3iupw2yg{J0p5{37X!!u7yuT)D8N*}Jit=Gr+_1X z{g1%6xi|B|(9QucqG=KOG;{P9HJ@lqdj zgO70~Tb6Z+AeOc$uZam}@?u-O*XTq~n%>=$4(;Jd$MqIcY+3)m6N40Tbld(nRc0nH zwMFwLGW736em9wh--l!weut4=f5-Z4)tkAaC@Pt$l`w}$;D=J(H+enAjeSKN$xyN3<{iVX3WChcauD$ zs3zUriK1b1NDp^{sN*71;!g0*_|(jJ&`Ws0&CO=+S3rB-#{!3b7&+rk^fDe6W+*y% zM5w#@tdPt*$C~I~{hgWH!mh*Zf9KiS;1>Qc{2!Va%9i`yGCuPE_x&*Z|J8n2B@*$o z_g9PfslLl^#(dh5@06-&G;{w~*q<832U^Q&S`>Lm_tJJI1HR_Y>OPO_`=jvs`mm^QR_VyIlK`_K5bD)~y|`o1m-G9nsa7>Auqa zTlZAgSwB!eMPI5vuKz=?Hw-b1HcU4V!+yhU!z5#+@l)eIqdfD+%uDDS*`56?3V1`vfpJ-WdZWu@;>sH(1R=QdGlk`ui zQZ_<1QTDFvnCy(qSH3}UUU5ljQ4LqUrgEsNR6nVfY9h+CdD=p)L%UXcN!zNOpmXU~ z>o)0j=??4abl>Q1>!_}`UZKy_&xUNu^^5gxgk=H@A%;jpydlNV(;zcw4Q9gtNX%*| zGE9Kf-ZIQHEHqRb-ZyMC>@@5(oHU#@TrzxXxNmr15E+kVwqypIdYhJ;_L{Dlel)R= zo|)r@A&HWtOJ0_Yl}we)lDs2%U$R#6yQGs;D$SBklP;D1C_N-Q0iiyY4Us$K^W{qv zzbZK8VdbZ)y{b#9U$hean}%bCUkpLU#!NJ3;W#m@K!LPSI$Anj`kHhKT5(9rA)Rlf zx1@KaZ^;hGF3L=03X5W~VwGaOVu#{@;<)0Gf>#D8QTC6v^&Jg5Lmw<=o}t(<-tfA?ZYVWW8s0UmGL(H}_{^}waL91Va2`TxH3S;v#u>&r z#tp{Lj5~}cjkk?nnWdTQGCzm<{hTQ`X-zMghM7j1#+Y6+y$%Nk|z@-+Ei z`7mg;L%vSlSMiEsqT&n1Da9p4lj4EmPlc~iugp;vDwis^DjzFTRk^AHRk5m4wF3+G zgDOScQ*Bniqb^&fK7<|oyE;PCUo%{@Q1h8)hvqxYkD6aKsoGvzopy$Hu6B|3r1mTA z4ebMMm@ZzIr5mcV>c;6z`Z3tD)%qXw-3%`n)P{kEQG#e!8P;Oso-61+ zL5>}ubmvUmR6*T1iBv7kmW`DCUM5SHZ&4H}ZOS-RPt_+FUAcOxx|e3MX07IwhSQza zP10BDcj$u*3DEQ*Cbsc&Aki$zb!n_@fXpV#RcwTD+NRp6+M_z4I;5&qol^a+>ZtCa z&Qi}*uT?M7%+R{DJG2K3ZAP`}P1C2Q(`-qqIc_eJvR%?RMUo;#k*>&4NE8Z%Mln`t zPz_NXR{f>&SLdqp)uUnSO4aYHzfm)b_NDf;meY0Ay$BhM(oNIN)fu2zuNYp%>?atm z7_J*08sdyLquuBvM(dX$au-7|f=4Uo%)|$AT9LHkZI!U4Ak>n0^!$%pZ?5=!C zIa)bGSyrK3rTk3!FXaX056Tv$mnvM9rurP4?4kOJ+Dqf75o>}qT1_#wSh;4AW{IX+ zvqH04vqrN{^RuQ|^H9^OY153+PJxcRt6ia8tzDx%to=zFtxM9S=+bo=I*Cr9)94Jk zg}NoWYTXK$H@UtBu27Ux=FefXNIpVR8}@fHb&MWv&bLFLls`i3}uD#f$D<# zBh6QuM(ltr?Hz5XPOsaGv%#%X>P`By`Y8sNVV5D?_<}JH@0{EvVS96vBx9rxWGeX> zxl=wtc}4v#PQ~xlKdSGle}=(&Lo-+No~B0gn5~-&%By>E3+~4 Re&)l>w#@wrGx_g*{};Hht7QNH delta 15894 zcmd^md014(67M-9s|*5RmsJHsM4f#fW>i4j2Qh=F;I1QaiochD-uJ%m{q_EOd|yphS66S<)z#J2 zhvlu_%UiuG41CiDV`j;tOY>?gb|pW9G57Sz=a9c1=$HIJNR7!gLh7Hq59vuNNxp=% zw&MNdJpw(jonPCYW(ets>62`%P42(`ELj{kg!ks=n~Nv1qG`G{Nn57mM=Uj#iqFURmlQW4x0> z(mXkv7te9Ud>$(So;ItO=ZbvhXCy%^Md@Par#Jp7KDLKJv<+E!0 zNRf{zQ0yTKlquvBA5B6jWa`RlDi~LE?)1y9ht7v$YjJP4!%zOC%+W&MBD}A%(>kG6 zv#W)TY9Pt<)eU+a#U?D)pLN`5>>z)V%}Txu;JAWuuCwheE%oE#gBtIdV1FZALLw~(KFhjjcc)XbIJeZA-=?9Qhaa>#EDpGh+O z6T*`*6Uv0|tWLDhNaS3~A!M$95kHn(^MA!^X2WCaviYPfDxfd7%cf_G)B8`iyP%-F z^xr!Tt|IsnM_f%AM9jr&0m44pp zcH7nq?G6XoeXs#!`WB;B3REo_7%;3P2hHVw!Fh39Zu#aWu#CvHw+Lp8rBgVv%hZ_A|Xn zWsMCH8VmL`c96;19nPWPB9tjN&zkQtS{|9XujYvRlFi~I{w?yQIE!DoF)+Nd5qpto zhyB^o~m#3t1ClUGkI^KqUP z<HoywfR- zPbamV4BwYIblBD8TqeL1vdp=o4P zaLDvv|bsxMd{IYiPZFA6&)tX0p z`Lh-zd2}Jj7ax2ArO^kj>e`och<-o)KpzkLCAL|DVH)>h|Xu0lWkeGEfi z;|QaJL9gMklR3Jx6IiMn7lXqg);|RYX$lD`+0%vN%Ch{x9Y{k2ZWto2@m$%`)~Q;( zBayatebz*ATX|NK#l{9e6JSs>JS*4&D=)Uvj1)GN28@OS<~$So-$SvG!qAlHIavJCYDbin zZou4KP3~~Ew=J|U*&dn@w_D0+m>R|xrOj%!hf)jJpnx6e08$s)g+ECC4DH@!vL2oJ zxU#+U-l?g5IeUaF+s_r4*rP93nllHpJx%(B4G?cmd?q~lE7=_OT1c@8w&-vZ6J*d~ zUke>gqQggv|B>*VjUy%D>s~t7A7Y88q5U|{=4Pwcjl@L6nCwh%TT0!Ilwxs_*88Xi z9S7#oOW?*{bbQ?COTWfKK$^#e#8oq{s4*;$?qv-06XJ+a^NHlsh$!BdoQ-elC9DC;oqZw*BSk3du$DN@gh_)#6tf?ygI9RCDC^_MD&K|aLzLn=fhxV zU-{4`h_Sr$VctfvV&?Gk$f1~td>o06jp0*?GSS@}y>1Mq;{2c<8;&a;QeN5sq!=plI!Q>_pT8ax zDaJau!`A{UFReoryyO`fj~2 zt?Av0_#vdWTPc2rC7p>LX+Y;?g{|5mKS#7}XAsAgXN%GHPSP`39l@5AZO;L)sHf$3 zTuc3mJJJZUAelLB?fK7P(x_k#&M^f7?GNvu5r)z1Q%TbR8!^ol(rgHmJN_i{MbpIm3myN2x+2Xc$bq{S=;Ook& zUmMjJQl3?B$)msONpLFR$B^BrsrbE=x?gl#L00ro^DmPxdRQ^B;50e!BAIFFc?lCZ zZr4Ldk!r`Y{khPeemssLj=?VF9H?~Z6MJ8HOPpQLtj}DO6}Va&Q!R8FL-jTDI=g(P zIt>eqju6-noFkjb=`;!d8fi|;6Q3`FxaRQ1A}bw9@_J4b)hWo~o}2tT$vDmm?`RVl znV!TqlZEM{`L*OmdNThl`7^yBfE~OTLPr>ts-j-OeCYI^INj0z{vOY^@+q>ScM`vc zeAs(vNghlylW}n@Erii5pe?YL0wI_V5eQQzVn4%MOG7)Dq+f9iJ(`N~yTf;*t6hcx z5Wpu)6qJeLXOAfSiv8%DjEfdw9zr#LTFMB;JRJ+;SfD>^hxI9$%V_V}Agx_9@MCc+ z$0Kno$KscU{3x8iWm)xFM}x!1LjUXzOVkBb8hg949=iNo{$)e&T5@USShNDuK83on z?tZTGLB71y0ku81aM6ZRAm(6Pau~?aJxX7oHBifz4k~jE%u| zmhC8<56`}G8W{rzLGiSb@!0&mxN3L!v+YvVEV;_V1^8MfgWj~r;&}s!l*hvgsN^~P zT~Z<+$nPNMY!DdU{>GDg!tTwN-JIb6FcvPyvwWdiCOHli!E0AjX=(B z+~8`#;o3NVpJ;lN-|eTLU!JP+cHdylcd8HxmZIhNQ7!`NL)MBDo}Ma zki4ZT4Ow&B!~2XnpY%~@^H<3{^_%e10yQPR^D@owv}X~b(T5!R9!puBsBdz3!3PMU zACmhTDZ8C%^F@!6NU=7K9MTpg@o1E4;jWaGZpW|ooxAvTq|p>4?QXXu82~+O^m$5+#nSTG}s&_gbal zI8xESH@||M=-<;Tg=K&1pDAh>vvEx%8Jrp5^`#o5(V0q!q$0CVw_IeM$0+)D7udZ@ z-6i*&$Ic+f*+bn28A87#KW56s#-`^Z+(^1)N#!dqGHYA$c8-}V%{Fs~0bv8o+yuZ( zzyZKflofM=56@Jtl9Xok#KEu+zjpF{)-oIhW3ykw-S%qsFu0@%1Je0F$dCaE{9ZC+ zKoY-(Y#8uC(2jGgANXM{A=oncBr7K!PR!h#Er9DfKv4 z>Z#8Ia(74?e}woB-R8yGAf1LTB=N)gXX(DiVjen*dC?YjJ{GuEA3z2texx?a5o)2) zFBoPC4DLgfjGKQ!e?mSSmT3L<4EDfk_z2vI<@o7YW1*)6($)6d0p#3=s*$4~1BHX? zT73gKV=DYKdyIA~>zH+Ew?fK?SP@-LVuvTei1ZyE5uAxi10NwBbek zO>%a)lz%{4hWE2pU}C4;;TMsd^5R@>9PD#?vV;99D6S^>kTGs$4wlh#)VrFVGu&Vd zXNyk1Hv|0aIE;YtwRkv5g(W(v4h+sfp|uB8Sj!y7BfWL zDYqYAL2`4u@;StwtLHb7%G~hi2G+nZFL#+wqra8bp2Sq-PoOOIj)cXG&y}6-$<6fS6rS8TPtM@U<$7{vPp-cwm+8qVJ-HlDuDd5U)>!Gs zHiCsloJ>bA#evih6Dx2|Y2{qYY^82{XAJN8wsCt@GM_#mRU`6!H&mkvy-GszQX}s& z;W0nhpDJ*NmRFBEZFMOrvE=Fb8)SW6mlP)dT}<%rF*(+FX98^+W9G_~LxAn_2>K$q zp4SJG>oC&b%RDjLzCw~caxmXOHjdQrgUN-FN_blQ%P)tp4l$Q-ThtT5Kci&Y%Y&jV z*!xy0!s%zF|ANbCr9T$1-wWjY%VzVdS;9$U?}+0cAq|`~1yqW&%eEZHn$3nE`W`cD z%*h&Q&9|+32b8^N?8tbO9iL1FS<^(mS!A9yLF99vY_S@!us5s=d#pn7ilq$|NL~wp z59eY-6S=QCvl_e_f2ql;-y|^9Zzc2cM~I^Ck@NYrq49-G(k*<@Dqq-#;tn*hkbE#o zDhj?%>PB@bNyh}g`b#97PYJe*ZZnr>rFGv34ggvg%d0KykD6c}tTY;zBr{qw%uBOU zF*4;|Sxm03+Mbt-J6!w2b_Nafyp?C68zB}%WiZ8Kwxb{(59Wy$m&AHlTw$tHJ3G zTax99o4tMFlfEOM?*$5bOW4N9%vb9pC(dZ8M3VI1-ob*CzG}tb`)A`ybxN z$>$R#A+J5BJXpi5_9W6YF(PgI1#F>XPz~pqC}&HXV*u5G&#CF^D4-HxWtv8dD=Ob= z+fF2tV$*%v3zKQ&lup93#;^x{$Xf$@o@sagw|w zw|q#FU(Z_S4?!JH#+(Z{S@58vjfDo=slLys&C@VR7V^kt^R&|4U!tQdxOn1%Ju4lN zG(hOXAALB7i4&Pv`e1wRG1~)a)759)H~y3-zq5z8N0b$L?FkNAwFW_g$=$nm+K;(0 zr_L^>A^|tU!3)IolkSIO#q0{4j$g^8$$doP0@5~lAYV#yrgY=qAv31P0uNq!F4T@+ zk$+6lgkQ&C3mU!Qf0p^U=S8{a`B~}Dq-9Evw6Bh|Yp4VPQxD!|Ps+E__mMf-;%zNA zlqP0Fe>RfAQ&;(KfkHbIIl6?@P3;mo6S=Y|nunx@V^gJjhzr;h2xJ*pbCQLYc zN#NRxq(n(`HZ)j&a&CvREOy_g|3=WwRfnr>Q|TQ?JY6KzU|LC63aB5G zCL}ov!P;WJ7(O1mfDItL?IRKQhBQ2sQD2U=Y|7vr@P+IE| z}BAWj> znO;4HoSHqKA6ccF6CpxVxpQM-v1ZNfB1#!rwP9{Ao*z$6ytV;1xm^2e{O9Crdp#dX zK6J?VO;xuXfAM^6)wlEGd9PF1xG{vB4=3Vzhd+Q?v)pB*?Q7WW z1FT^RhWf?g6tVpUeFZ&yTl`l}fulQn8|idrU^J9xF$%mK(uSS zOgz8$`4|(ny4@JDyX3(yP-A90F^kT0&kK&fF-Z@{X>jD@@N<{w@6QXTt6uQj0?> zQ0#Vuu`ppeIl6Xo;6o@34hZbuSlU8*l?_yX_K)WscX;0HD%5ig6_=i~-^YpNF3~!I z=Y>({3)VXFY1woP*x6(nVusp+9P(F&ccV%y;pMe{^z7U4V z3h!_@0wNw^3~+|jcy;)FIb>0bCn_e3IFkE%a`e+xOct#1|CGgU^2Y0<`A+25*BkgA zF|n@w>BbfMocO-ba+;^{$PHCz-{f_?(j#XT4gg+w6UWlSZmO-mD*1+q{B$_i+(z)_af z@?a{?KewZvIkc&plVTYYE}-ROV?n_>e@c%L+va5c9(jH9IK^JvF9okv|Cb}0 z8o<0l<3!hXi(5ti{U$b)n>koh>TtG}XVo$4IMU_K=+JMC!eZhce@ZxWRdi_8&^J5s zy-q4vL2$q+`^EhHeDGI-(CrjR`qM$HUPo)i9m=yBSWACjCabpOM)ZGHBWTqi%+yn> zH)-6`-S=1QfdVQf(OctUCV2vstd&zZYR+c0i5fG69tM)!tqIWnSGVrQ>q5j^31O*N zCy4d=iS51J?Iu5LjrK`wL%*fO_EvAn_SM*5#bLHN%o9HUHZGR!Pil^+XU|ZfbUNss zMLR*xzBK{S=+tde1mQ<-n@Ub>+kv=M(c8!U{q|s=;kdj@lDBu~3&_yz3VdR)a=Vic zuJUB{z0=l`n_7K`#;Je+_M9wG@<3c{r|U4CXDr zL^sa(KXiPY^!OmFqzz+&FG`;XshIvIq`~wTAq}JVgfxoYWT}c?5weN2Mo5z=o05B8 zDm^Tu>GX3UmC%obR6*Yr(pb7hNHz2gAvMr7NS$ZQ?vi2EHgiaPx z3mq$@Bk4#X9Yu!-=~$X6q~obUNGDT;klJXvkj|hmQkEko-CbUj1A6;O7wqUQy46GKl3=vIav5YX)m{a8SEGIWQ4 z?q(cX9F^gHQLMRAkXz42@tj92hFA~U644o~Yu?(Fopot7E z5YQxs4inH+hV~cGbcSjKRKn0+0;*tWR{_;9G*Unf3>6EgnI8w77w}9*euB?B+=p@) z+6Yt_^bnT&UMR3I^cw*k$tV`vi16Lq8-nd%6yEf`G6eUb79bH|+lM zs)ypI_cpgv#(5|by|=BMGE|_(P-1kso_1+`fDVIEyM7{T@cFI>CN|N4tY&+#+4@IN*p3_d*GTK8j=)LXj zq*4#XtoL5fPI=8k$<%uUB!}FpLPzP^M%kqqgN~kz8i3n zyz^-yeA45e_P{UwRGP$!(QhJ>RDcAa0hj?f01IFeF@82M)YJYF6%pnA;SkKtzF_LNf)A(~_Kfag$PUwcNc_Z z{OD@(;9&ej2^$TIUO)SU!)&EKxHUrv_Bdx-{k#is(sm(Fo0qXR#TXqz-1q>4{sh8{ zFIEV01qko|NH6>ceKDDSD20EMR2+)utH|evk~-fx#4LrFeGf9G{X32$4V|%;BJ!_8 zy3i0*FVoPcOP^0+2+&cggJDY!a3j;OJke}1$;A_vspWSYJnQ3u#a4URWkMMCG^P11;PX?_TFT&H$BfD5r zZemQUEXX6oLc)&B;;#_rkskQs;nO4C_&(&?5gD8muP;*hhosjRqwu@>iwXRvgnp5Z zm%8vT?}5_%u|ZIJ$xyIhL4wC6Bo$#lg1VF!+XRQT+UNpM?w)QrdhnesNq_ ze0#Qo(1g&%DAN0Qcf1!`kEaBFJnwnW`{uDQOiBnCJ=gmyQ?Z_`6AA0&065z_38h7( z_IP}V=|$yD6A&D`O`3W|li(BnS*NFBF`y|r`-(yJgXf~taT4I2(vA+U5kqUev z#a`(eIQB|!kETC6(9O_>GCx{NicZ7@XwZjeI8Vsx6W#H($7d&AORT~CsoM@wm2QWt zErb-BkZsraEYi0+g&#m{)s1{*)yR|4B7SextkW}jKAhY=vk_tR*+9GUHN0R~7h#@8cfl zv(*=Z#cuC<95}+uG=20s`SemVZzk_w&h9)I8sfQpyqAk*JKKn`ToiNB9~04)9$_1H zU?gY6lmlB0m&VlhF)>|92>2GNP~4S$X4jKUzcLKFpZ;?-3A&ci{g+8>BtqO@KOBQYL}59Gm4_%iK26f!vrj2D=20uLU(1U71#vFV z7d;#>gyQq*97K4~_b}-H&1+p^KUjwW?P9?Ie7|zLOrjM&^l^k1d(m|y^qYQscVhh} zwfnBaVC!%iU)O=3MJM!a_R%<1*uk=`2P8 zTG4DIB{&5Ft&6FCT9@-ySyofGQ)MqVk^IHtD=(*e}*?TiF zq-g;5?(>I-Xnd*g{mocjNB+E-75lOY9k)Lu3>5I^4+(#eG2iJCBiZuZeEv2Gs!QdM zRcY${dGWTJ!pis+N6_tL=&eX&Id&#=4-al5aGD@Y#jcR}{0gr+2gW=fj6a3}B`9PX zk6@}Y32W&NA5Y%9rG+})xs{~sT8U4iJzrk!)w1P7qqwprpaQ~LpFr(FfM~s3HH~87 ziI4_m6UFV7aqNn}s}dXM^n2`=J*bKCk^>*a9X^&^zumvX-X`q>&H!!#9s)#>JQoj80I~sAKrvtu0O3Dw8(CG|zPf3<8V=%mpk1tOslbR03)N-vI6b zngFeUj-7cf3eXjx0%QZm0%idg0oDU{1NL?<;klzgY5}(a&45lZ7%4yp7zvmRSOnMv zcptDIPz|^W_z~~~@DgS=1Hk?|V&Kh~IX*5G^8f44tgdiQM?uq%)+D z6~7=J+|9h9OLzzT*Gbh!-y7pYmX}?>bnq8*zY`OB7kv9*=Q3aN_%D6O%Dx>IkIt+<7lfkWV9kLTVsb3FIDoyjGnSF#5UnKeeoe&fzf6=Q&W+uZSv;>m- zcLJU>Ud8VXvH`yevhVL0ZLW9E8Gt|Z8tyHD^Q|D~+zB1xjtMgqZ96X1Ej=$JMHg5T zJ;vXH`_8Ve?SH>ueK_2hTf$$^Qm|f@%^Ud0|KIn-*#DP%VoMv(F9`p`D}Tatd-m+P zdDCZ2n=xhh#NsKF?1vp9{#D-(JMx{9y&BBi{}m1=NAZEyl1lj@d5!!hd6?o=MOUR& zc~I%2>Z0nUlBsf3xvG5C9Muw4xr(T^s*b6ytNv7lsJpB4)pONL)LYe`sPC#Dsaw=- z>JFL^O{6AH(@oPuBhzR!S(?Ebi)ONBu4caGg65tkP&;3{QoBogQrn<4>IUoP>UQb& z>Z)|LCAyz2F6GA5dArbVV^ zQ>%$f#Crf#F+>t8F-m`thRb4PX|n0E*|IX(R@n!#eX^6XTG@B9pJYw4FnNkxAs;Bu zlP{F-m478aBd796#bd=9jZFKY_ET-i741#!1l>a23Eef_KK)nvTKyA!tKQcjHuN$m z41Em)40XmK84ojxOs|?&nm#i9XnJhoBpk=0Re_RTlI4;;l9Q6}B^{+|X+P;4=`!gW zX^r$22Bnh?z>waPeJuN@%&FL`s8mW+8r4A6%c_~G8r3Y#6OCMJEYZHKU8b$n)@ieK zujm%&N_87_??5;Qb;oqIx*&a+K3+dcU#Op~FV^4J|EzD+Khi(d|E>2j1Q=d2L>m$f z-3`4BDnkaOHo`E*Fy1iLFvqaau*y(j*lhT~@QI<)aL91caNY2u;oXdD8BG~Mrn#og zro*OFrWO;2e+c3%n2Dc6Ch0G+Nb)7qBnu_WB;QDWlY~fhJ4@rGU8N~f zrL-^PQZC&hO~*nlmo>?Gg|EZ49BxsT~y)|-8Kh1Q_M$InGVND0^2<bU`U3nbOy5_$1uz=8nT*cD4B0qYFK0V*l^s? zU`R5i8AlpN8^;^x8DBRZF!s*K%@~tGGTzQ;$oMsbGkKc=O`)dFrbJVBQ!kU!WH9wL z4K$4~6`3YMIcGtz3rtH*PSaY`7SpGuD$`e{YE!N0lIdI1ZPO2?CTPxICT={(u?VA= zBuSDc=~p7*rM}W&X{1yu{Y-jDdQN%+%F`%)Ed5g&Axo60WPN30WK(6cWXoi$WSeD| zWp%PPnYTOvdYvo(UGAfZQjAl~SFBKMSL{}NqBx>>sQ5$CNoi7!Rqj#VRQ|2(s0vo; zRO7K&6{>GlcU6y68g)PQOm(Sxm%2t>@>K1wQENtP#%tDTh~{0*P0f8xqb5n)ORLq+ z)0Svm+TGf(wU@PzwIZFbE?n273x!Jd)mP}h)8Erm{h#^}L9jW7;Rc&wiD9L|1zk90 z5E-qI;^BJ0Tgb*yHX zW}c=}yFeGC*XqaVPv|e}AL!dMnpq>uJ&mlDUXVVOM#);`BUPhRV==eMDw}EsOw~Nq zr>ZZZG0mzJ^)U4`%?NF=_I2$R!yRLUslZf0`&p43HEHO_9N|m?E`I^PN_5LEV?ne7(+TNgw9|x zWEu7u4r7b`Wf)-0Gfp&4Gp;q38>@^rjXxRxhw-V=Z4AgzWEe6Yuu1IXxEV;MOKw0G zJ|)s9X-}z9x>Wj(bgp8P;*z3S5vGh+DwU&^OO{-k`Y^i{1>A64H_-%m_*Dla5(Y~Xt(f*<3bY40?omdyF z3)4mEVs%q>Gjwxwc9^$N{UE(X|FV9P-lm_apQ~S4qTi=Kj8*o5JiT8J3J`8Pyp-W&D=WoZ)5i$Cm7BN;Sz$ z8dE=04mRW%(^S(dCc9~wsmyf7bdOoOYK|KTfh>_arDf9l(x0V|q^UBqtmF!;+BNwj z`45VY%1O#=s(+{#YxZhR!I*T_p3pwhM(YSn@%I>coPLjfuwlC44a4t-0HYMol-yO0 zZD($v#4Nprqs9#Dn5nFU#vFkZtJP=Jwd#w|o*|kk8mH!j=AW8ZwXf>t>DK5!(92vYMg6aj5%I1-ZD~Sv+>=GCm8`IooSqjtLHccIw+J(mawhUPdZzAOuAaO zP5z#|o3dCrN4ZStQhuUbu3D{fsWzzISN)=5j#&=s%)nt}moAVlk*<}lKO8uZ|J`?k H#Or?ob9iVn diff --git a/src/pip/_vendor/distlib/t64-arm.exe b/src/pip/_vendor/distlib/t64-arm.exe index a759e270d0bf784d1cbd03acc9492cc25f7e4669..e1ab8f8f589eadabaf3efa068dce3ff620a01898 100644 GIT binary patch delta 21754 zcmeI4dstOv+W((>fx5v%ARs4|jmPa7&lss2kC0Y)46_4x3Oiy)Y??NhHFRdToEDlD zXc=KTG^87wX@Z)J&b&4ioi}QQO-(e@0h@~V`>gd{re5=Vuj~Em_s`39ZNB^S-1l=o z=Vv`@u{Wh#dX{eKS#g$lg45>)zPahApC1|FzSS#<^8IXU!3eEC{p_(dxBMYHeewF$MY=5I{+Lm6 zM#SNif0(HJ@pZ1dy4n)k;qHjE-OI^ojXvbbPZPj!3* z;-r_22!0=-YQwWtFj18r%0hrNr8d}QRL2FwiI1fG5kbN|{l{mi(#3+;RbI74vx+vK zNf?;026zOx$y<6T9U;OI{c3+_ewC0VHRgzZgA?Y{ZfvTeJ)!n8v#?*hq?6z^-b7%B;{gMCLU>J6U-m`jZKaD57{On8?urz zb6w&vId0yQI8)v;Ur+Q;%1#Kj-Kv7R!NbzkmbN(m@qb0Ak-9+*;3W}ivVI?jG72{( z1P`xP!G30T(xm7f&il<^(nNo$QcFGYYEfD@wV_$mQlF>} zb;qpZ1)`qT?+uJuPK^IRMBmcI=~B8lYGdEN_Si-FkLSjy1LlWGLuHw1BwsFV^;abi z7s)msNEwXZ&)B~|PRZ&$9o;T7-?6i0o5NDmVv!P#h_5{(u5lO)&N+dnO)Iw9D&Jo32R+I3m!B_ z49JKq>lZv=UNoS5v?K-}_!(0Jr8oZ4W!Y1vEHN0g)8cCXOR>M6of{4#O)r;t^P9BMp7a4| zPke0;bEx-{0necTyD%9T{0^(_IYWJ8yQ6p8&1$cAP}4wrseSxYmlZJ7#%rTha0_1h z@UkJE`F_Y8lBxt^peoOK*$YQ%Rr3W#QYmxmM?NeeF4w@62KFH3n z7EX4TjxqNS8XnU&NV|@N>jv$*%=~K5oL<#~gK7650Zqs@rw{h^DjX6_yUj_MWNsKd zX+rkU6Sddb1|w)EX1~+2VZpQrr{n>wkwT^49XuqW#k0)(W$>t>Xro)f!x0I!I)uPg zw>QctJ%swTcov!G4zW^X_z6$#_13@bCe}r?4L7@n43O34fT1Z)|bDiIpZr(fQO<84LJoZxi_sIH=v1Nu7n%9hVdoou|pXYVvTSEz-0mkJa+h;PuDl6-+T;5_BO_B98SB!+l*I&MJhRwS#>Syx{mweP$=G9kU zbG0FVtdG4OU0GD0e8X%*KB>Q`WQ`=q&|uLpbvzg98_<~9-qkfZN6cGp8e;yq@#_C< zt-s}_ojv_cLxSbVI1%c@PE&o0-z;A&)4~yPwFkppRum>*y;A$0(EX0!wl7rsX&R{bFeHm*WbBQCf4`9Yo#Ga&Gq-Z zgcgs#H=|cV+pqIQ{oFOVT%Q=K>u=b0l_C4gPq$B%KIVXjj>w&+Jp6(&_XBh9!hZE1 zKDO7m(>HcIUJEvt4{(q)R*l*e@-&**cp=U_1TZc>nXl` z*Kqv4W!Kjj)vF%=cTX&U%j#F+_rdy4VA%Ea`~hvff_I=>yoJ4jcjJPN`qW8!ZzoO6 z=zZ5>Io-1F>saZ2VxDUsPr7-_{#3-+wm+?Rwa-4q=s3PY^Wgrpp)w+eQp0?8q&FD~?WY>=jMHo_DKlZ>G-DGwBjK3sCpvdJ-^YQ!PCM=5j_nTJtgx`$U zFE_9bYTrzQ_bYEM5oxL)_+MXZ>GRfeI>Z}q|0}|1>u$dA-bgHof4nywds4smN5Ns% z`@;nemwhl8V&3~EbIeP`WE8V$-r~h^^?z{GruR+PgxUqJr8}U%(g9Q+GN%TRes4FGm){O0>PJ ze;Xkq?HuXm1xLqWFz-CN0JVGP=#sgG(N3(gXs2Ya3Kk&VPdG{Fl5r+uyzJv&V+HA* zSzn0@jG6Cr!?d)^Dl@P8bkw{iMryOm961}2N)aiHNV-ipL7o)JIXz)bjx0r_?rx;E z9_A;Xjx@J^8aJT|dH?#q$y;rn``NfyTy{|Jk!W$P-C~U6XL|b9ewKzg^~z^6`!?Ag zwI}+dt*4p#c?w*nd_EYzFZletSgaA<_!BN=<};t`Zu{c%1?JmF=bAUQ$MtP;e7NV^X6O#6d+ULSiC+xM_9KJQ_)^)}D==ed}u7ynbAEH~kInYrtqU6}E^{x!}( z(mUV1i+R8I`>FP%JwDKk?7Rm)_jmqVPvYad@&3#oC*u88Kkmf1_5Jr?Nw1&s@01AH zWG?z;I}HE$Wu)ZQCwC2wkl%%-8&YqGVSN)R1v1`R+)dt){h{>kf<*M6k$U4uoM?3lqZi?( zm-PxZMqKiaRk}{%tuKej&;jXDLA~b&`l#{g`mO?jeG+{jS06LP8aq_3k-b*+P`Ost zTM5I&i;kHx44o|BS>1ectMm`u;S+3P%t+ zjh45hDzsvZ%)u$cdU&jaAj`*LoXV`5#>o*h_~H!oSef;3hHUEl{nK3*7W<*?fA6w# z^dhegO&Bk4c9W)%F;zx(mz~y_>GHVz-gB+Kajtnu{qO2R|z!!2iTBE7A_4V0u6Y#PH zl8R>CzCeDF*R9(YN}i;L-dQM*8%A3d?1S0HQR`LYexH_n$oXy-saysE8-^QG2!^hvwgp`sO17=^KaWfev;+lpQ-m%!z+ z)smK6ZP*v0Fz&+ig{S~G!an;#WbIfjb0b@PZ4&zXYS}C9X~Ok{SBodx+PhXSqtM4| z<^67vdAqtSGh81ZYPno`MH)ZsvOc~VOY$k}hGIU)!PohT&t(f_%S_13cAppdDj1?w3abF=(GVqdC0wU7s`uYM~FG0D!nMLsr+x*eQM zWpa1y>s4LWKhOpG#!&Ab)uFe`q^}_@R?8;r1wY*?Phqp#yIIfY&>NfOrikR($b!q6 zeIvIAPag{Hqh-I^=~iL6+=cG@s$3q$esKRCdIqntw%&s+$={7*k66Df#q*Ky z^PW)g-Ex-5t=67{>^{LAjkcv&U*3n) zGvD{GkYrhHO{tIomfQXc1X>;XtU_u<&bEU0iz#EQp{9H;f46#Xk(cCc>+LPlrFVzl z%Lj6H=&1+ve$ZqM+lpzDZT)2{CT6-daGMNAZ_V2#pXlqruvric zQK9|Y zQj4fJQSYQaK;1!oh58QlDD|IEUB3E>34PW$1$(I()LGPXsjH~hP&ZTWrEa6{qW+os z4z+_Cz1CQv%UAt5RvFZJRNIJO134sPsdK2Vy=17uy~3|QB@>k23QjTN(`&{%mI9<` zkJj@XZQdz1cUZB{Nn(87K_|KNDJMB*krSZ5Rq&j|tnm3Avs*&(`;Hq{n*CEs#!1Nj zDL>6#Nygf?e`?6l)`90F4NuLTpVrgdj*@W{vwu2VyA{jCFK6(O7Z}5 z!)f+bhXcxW=se9K=_99}gBie2P9YbNz2tH-o)OzWHRNIBmecZg*o(hcZJec?em3;#AnR z`^ckc&nLU_OUPc@tH1$0>jp8QnGQa3CwU~<7j_CvCFhXcWuH%urM;N!uHVzwYiU?6 zwl^;~L*;1&G?3j%);_>9F(um2!vlf zb!YUyb}A=*=Wsr?IMZQQy}owhN<3vIO)aI7OMAaC%u4L zMGaHE9kf&Hs4?F#om%H;ifU#;nA$sTAanZ8bj{Q3MY}>>xY-@UXOg_82V?BV{OL7%49-cI{3+Pu-{UB0?7!s&ANlHEWD_lnv`cCV<-?Kbl`^Ym`zRF<2Ogdze{bWBmo18*syh=@yi-6Aatzr+_L8GqI|KG4XVBh@ z>?ijo=aKu63v5RJpTUG;I>eAm$+2WF3+zkwljF!0^p7W3krT*u5z zn0!9DgM0y5B{?l#OO7F5PWF&Wq%_4;pQLO(jRkWCr6Ph$YMF@>Q4#m(!txEbgH##c5$A zIfmSW>?KE$Gsw|oKe-nm<;4{{Ybid;vICO49M zkz2?q0Vaf*kVWn!OOjIoPpZ=cQRF0YG}%Y)Mb0FrkaNgcy(?c%2G2lcUJ3n}J+OmJG+g#`PyRxc=m3*ME}Z-|G64+coR?7tMrDH$$eA z!85?=foO6PS_y;n1n;ABoP-kBVWGq2F6TL1NsheP z;Tm!Zxgm7mXG!c8aLh`A1Fs6mzje&2m>jaozLvxl z<;1x(yZ88!Xjr?YmG z-CGs+<}8W!yDWTCE-}hSJlkpm1~Q2|nLCI4Iys;GB)N$EGP#6&Ke?Rz47rm0BDseA zw8Lks1}3~ihh}mMxs~kR`n8iAY40SzWqtFtBn8Gfo$+6^CXqiP`^fK-Gs&-zbIAM1 z`Q#>Y5&3WA67r+uKsgikFrkwC3Au)Bk{ifjax?i)VaC+cl zIwX_02p3gC%(E5&lgo*}r*|cJkyQxu`WoV5nv>YJG2{l?uOVmBKAzl6 zdkMLe_A|+?v~RLDptykh&}Amg?KIy(?j$cE`x$Nm*)zczij8FVIonWj6783gTlzZw zLmUn$A05`wA(MPB*~0=yk#lIjlAKSzm0U^x>Et5XZzq@2o@Mh&JnE#wW;$fhA;Zl; z`$ObP@{Qyg^7Z5f@-nLo9Xz6$_yEnVJ?ur@6SVQ+Oh|oc0yuia5u97P*r4 z+sHNK$H@)k8_3P%?GC4?QWls@hgLeQv^L<35ojl_p}CX%7qVxPQ=b>eN#r`RkK91c zB)>||A-_Z}XLV8>4yb%OTu6t80ZxHO$VIe2Pc9`tORgmEBG-|(k}FtXD!G~VZ8it+ z*%BsPM2DO>rvekn?X=%SuA|*g?xg)raxv|hWY1(LUMabR_Gy|?zDlA)867HF+(TvxK96cwGqFg}>UYtEU)2X|!aj(Per6 zBry+U8j&$GKizis<@26C_o39(rPn^ya?py;G_G6Wc}$f3McyG-+JhSNf++p?*GVhE zjfeiaFL?c$!s`l$OxDFW;?v+B=-7;m)$4F+Xm)w_;-y3KXJ>1l7OO4Oh@0Mes>yCh z_>{C<+r9m?G@aOCjh}`PoiHWo((LHDvvu^Cw^#!w%?_J%N?M*B)@zj^#IlT2P5IiE zd1{&+!hcHIQaeP>scE)f9@6TpW7CX3B%IVY_8iooj$Cf-In#I(pYQ!`mXVlH{AW=O zU0q$%R7Ol&^X6vbwxdUittYaKm5Y+5mmT)!&3Z_sS@o?#@$x~2LTBOG2&T9aoOX(y7Kmnii&ysog; z&M?M}(e-Xdy|Z*Dwy+bK_7N&jg#u5cTTL^JxB+gKRd^nY_^R#Y?z;DXhdxCAe>1~K zO?QVT*6!0^d!lt2K4Xbj*cvm_NFC}{wE{`obuxQm!pW7fuAFHkp6wQj{*CElcS_5q zAt=!ep{lWB@w$Q`(oZY7^HHVd;GiD)qV>j1qu;a>gYdXgf5pr9qB96#yze}TZ&evd zr#5ha|D@jBj%#KEURPM>_>D^gZf&+SqNFAyBWe-!jMHl4hdoEXSSo58({@|)DJ^Pw z5ue69h@qy>>3v!C62=xUHwNsNVFLf!+3hfXdy+$M6%*4|_BYYv=t*UC%w=iK*P{0~CnY)7C@TM&gO~M&iou$Lnd3j zfe%RIt`8RC#vqlV$abp78!S1kxH;j;^o@=pJva4?e3H8X{Xss5gT6( zK9T=g<&T+*kF<`PZLGyk_WRk!O2{{}jW|fRImY0T?&6t%sXj(e^|Di^`nQwu?Vc&9 z)j@0V9Al8Z^|WFMFaHl6dh<9@l^H1A+B(NrZM#;&H5I#5q8<9z>Eb^RVe!he(&rj+ zC;Dg2V!?L|MCBiJ7RWZNdVI!LwcU2=>@ctsJh@cVh08>h(DzHcSId9p_|y94X~b>8 z%kA4SzuKpu%dmf)FY0f2MRIWdU(@o#LUhL}d{@spHrGhCcPQP=Ns{4hI~4tLxwAvv z>WEIj4nan3v=b%gGJoqG!Uzn5<5t*7>X-lb>}UR73bo^h=nwfROCBzwM* z1}Su;3=->xx<+N#%c|IV3dzHCvc6a(>-LuYIs~nM!WjWJ^^#WW`}xMjDKYv3);KHp z`00N1ipoP;)gk?{>XiTc*1-RLYvBLqw+3)P!q5NuTLV`10^@inZ=q4wO&_EO?q6(_ z8_9n>?tEAgOQXw<%U^Y&>ZQgR5}WaZW3TIUv?|oJ%*cugT%^JZo07Un6-p^|nOY^S z(BrbsXc>;P#d(Hmz+j5>7b*1wDpY~n?0xEUBj*yQg$-0+fl?2l z;AZG{BRU9F}%G8pbDc4vXh5rQSwCVd#r$wNZmdS(A-*M*F>(B*y1P zMo_n8m9#^f5UA#UrM`n!nYw4fD*yMGJ1DU90ep%A1?E^-CGwe3Qi%$pCo;C`xR@K% z#%*XhhO+%Z#1F{&2;W0`Oba5bs7ZKGFc%I5k2qRWg_9ow`>N4(m{gfNbOA@Ccqi_@ zP=QLE&$IO$fj$ktg2$A4N`)n6m!3mML_Mx=TMYYbz7O13WKBAtmnFs=x)B=l3?8Z=PUfGm{!8$t^v`-(9Fe?doeijls{W!Yg7pp!>LGyY z0`s2J1+6t2p%lc~W!!psw)(R=Aob@+*sIS z480EAsX5~Fh>}A}9qayiMDu&j`uDw$NrilQ)Dq~;@UOrPM-wJ>!H4+YUf788K1KzM zV-dw+3=JwC$UBTKgJby-%<{-%k(upE{j4tSo_Wk^8NQjIUV|OqOhCmjc7NBC3zG<{ zHy(WT0b_bp@YM%<2h9Hp4Z#*$`nBHSR!IkR1^jY4^r~4O;r~Yat&%$E28H$S{ipp# zDxkL^aMHhY*B_M{sLoLJtzI2R1mC?tKokt^K!qy5$4<5!WBdbdS)h5iZ7D?8*FpE8 zfTH8tA3Gy-J=Fgr&cyI*hIYeN-tuqs(DDGfxJ$3^BStH9494Dr<8zG;4E3CEpi^-d z0?xplN;|X)`mzceMTWq?5Z^QxEVBZmJVNZ2bwclkUwfpe?b^SaU~4}j_|k{Hs_>-` z`=S=pLsWGDhB~~_1&66I_-9Yo%Mqg@3P)E|ptz^bSSXFum|mhbM4*e$5NuqZ8u>Az z*1)f#uV80f7m^!Qjsi;Y zi4uJf@!+N}as?^|)jL}Uv>KBaUjV{B5>_qHCTvvruLbDYqoQ!fwg`TCP#^MD2JogF zZ}OJt3&%~Wv4bnTH)!5rB^3jzSV3J;hK+wvIMfsu4m*7qm znuEK#X80GZ!av1>hSxennz>G}I*u3}&|>%nnlDAy;7#3Sq8^3%t`zkiwn}vs#x(L+ z_nK?$IbnFN)h%w5JZLlgI3_jBkD`|mEVK0hi2R> z_D+}sO_0?_!F{#{jFJjnqc+J1qQw}3y89i?*dn-Wt}{BJqY<#QQjhWH5lLIcK6o@h zbCBNh5atTB?qN~aL5m&{Y*4F=q$>0**t=73u)$6V)pMpD`l1}|UiX+@RRQdSyL3TE zr5QS1um8tI?ZMvOP=hWevOoC)>QAFWS9&%%Kt44s7Qy1v8L>Vs;#ek7n-KS9%~pMq)CPnfh_KOxfkEp(zb zBpq}T@WoQwpr2Q02mLHUSEx!e#!%l|cdj(7^cMLc)aMH0$7RWlJ6d+M@95akxkK&r z?DX#R?abJjw=;idftB{W(dJip-h|Bwl2=<$TU=XGTUuLQTTxqCTUA?A+fdtB+g#gH z+gcm0ZLjU9_3TdC?c1HPJ9D>xch2s{C!3$NX1rhw>4WSY=qpI$?&jTA@e4-(h^i-R ztovRtmPDw%9_!N=jAgxG#)Iqag2!MxLpguNUmHYOtuGogjK)1y(m`XPl!w+HG&V~w TKQ7>KZbgxGq2n(ZJ&gYbW8$WF delta 22110 zcmeI4X{o~nxPR-6Ah=R(*SCPbt)?JU?=0bersLJ)X(`pFP=Bgn?Ik=_S)b3zSnP_ z){;f>j;_T!x|U55Z*Xcx(Yx3DcKpR5?x%IU^D-a({wD&lr-G zq12kH&g$~#E?VW<+f;wm)##C^YDFnOKKSvPqvg+%YSv4Vl$k9OZ8R9cevvZlUy<+cr;8^HP2ANI92@KXe5xN)CN0`mgrz}+}@=K~J{xB80TOGXmSVZExZH@}OWEccp2dZolprQg_EL4Ryjj+xslMsm%o zdQFgl<|DoOjOq7ZQb|uyJ7Q7DT}EsWh5E7Cx-Ve`3KIic$wu?nUL*T-{x6pe!DYC_ zoDtVwHkxbW2FVZRZE@4(9rLZYz_^mwV9O0Es4LtzS?y?v4jlVAObyi)dKA1UOpVvy zqmf7MOR>QNt5vX@nI1o`FF>Js$CK2KoZma1G?a=Nht|-+`^;ec*!Y7=Ek@rrs`0aq z?MI*ZM+AU z{60xhur!CoRGnnq;##9mwm#Ztj6|9(ebOY|Oz3;HcecJx1@(0uGd#fq=B~cuWS;q9 z-z0BKY_M)=_%XXFyWE#>(Cn0$8J^xN*k~?DObxH^6@1yeF|l-bdR(yLIJz9U*ZtmM z*+Zc|F8Ej57G3qMYs-xf?ldR$8z4)}75zqapL9|c#+!Hb8{AiV2M^9LFc9Oa&~gix zB7v64DBeeb%~Q=E`}x9i6M}2Zcl*Wlvg?f+*L6NAk*Vg$q`2heuo{75y3cf-oPn7T zoQK3X@Yg1#bYGr+>}Z%&#i99IwwgPW#&%kfehlB@J#vc^%xucq7?oc1<4XsrLn&5%7VTo?I>Onm;9P>HW%YOi20%G197vkdThqYHsg8Gb*>g zeZl1jva2)N{Jj5QZ*qzb9g5HmI&`HOn=-RoOG+^5HW*OeIc8ppzuT?>!K51SJ&WJOwSyp#x*AuFFXsANZX~PYYnK0t&h}=PT3jTESsS)qUTJ!sn zL(S@uQ)O+<_apZjQexgUdX_wDwv6^AK9hc|RMZnWFLe)IzEbV*i5hnWYfbTVHYKT^vN z<0ohtJfXkrGS8VXF`_<9m#f-eZ{9q?4y>856vdC6I2mP{HSuzUecv2c^VY;~!y;mi zbXaF#869cXPu_-l&zka@yk66N>T@EyYW_YgTFhSpz0J>KI-9>u*KM#Ra5ZZG`Sf_R zNBYZ&>77m#t@J=(l3&yWJ?2rZYEN#>_vv9G$u&QnHCm*tX28q{q-l=KSZO|x5p7mv zbdubf2Q#{aq3s{dTq|_N2et&N-GiS~e=~HvhoQY!m;|E$2Ya-^o z(?#AiKVJO0>^65T8H}xN|B^S*Si6=)*HkWj-au>AtXnn|8e3ytFwN%om-n*y+m)a7 zl&qTl8!tDo8BV$qjlHoZ{i+#;9Im;saE-*K4-DoHQpaXP{fTvHtsNcXGtdA+7mmt+rq&^m^<8;hW zbNfxj`s$IVUtNK#|Ajf(>%XQ@bJm{*8B$rZemh$Ea`V!gQ?P9ZZ{8zQYsTMlfg#_U z!P{O(9hcpn+AUU$6CI_FcZ@I9`*(fKy?0$^$ctw8dnQV}nQ_lSW5++O-=|71GjQ)d zIk)D!d#lBe!8P|)ED?Fc{AJgksC`WxRu>fKKv+VR-k6JP9^Q>6oMt}0CnVmQ_4mgZ z=!h*34#vL^KKKK=cIQL?>592=N6iI#hSYok$JIWT*r9Q9c~GW9;zn<8gOW5rFPi95~q^drhPvzdhy}o*S_%Z zaqLa-H%g1&K7{Ce{Uzr2PbFof4+-kc)V_MxA35nI>1U;$u74*$duWaU z#WfYKKeuLKD9n)hnx#*l(Mi_TtbHLa4D)PD{n_~Uwff0u1LLLDU8SUE?OPWc$t_Mc z2g72A9+;tmEtoELMU>LLQifjn>77L)Ej6>={Xxrw_g~bBAN|MA$fS$;?Z-ng3wwPs z7^~B?Plh4jB78@{9sk6_S8qP`X$|0_&%Tz#^e%SO9YEnliWll!at!rkKKec;-Ms9J zD|+WT5xOI_7pu~E?e)}~zkU%iXSYO|FEtOdSBneK9p|)M8=F2hs3RhiJ;B4n9Ae5*eJ~#pa80Ex_P#yTdM9o;m2yoT&6jdp9@|CPR@*8A`YH z&?uzxw?lJqDEa-+qKI9QI;}F4>-;Zu`qzBf2RZNja+o>#t7!9sFJoc*=F2{Yc+4(e z4UI_e>V)T-XMUx_;lia?eKl$x_7A-=BC9GV!LGp>J{7D(;U2`fL~qpNv9zObjUIcf z#M)y;cdQRDJxR$kCczVH|~#=548Rj*KIlc4wm}) z-+bN0NbX@y{AoTK@TQ-1o>lm_-hBI~4vgKme;%cGwBLXE2=L+2iF&N{Xy1z_eWv|C z_V<3~2ghy$uljW?cHo`A?m;?d{wDMUF3qE3tsH7wqos+ z&NvBUY!AdlxGy1u_&Y_?t-nXeSV<549wC>7$wQ$_yW@^X=7f&-5HnnEw1Uxcb9kdC zVsI!aMy?8zdTVE#SdfwNf_hj(d&_;{*m0Yz(eV-;YVIvnfj&60siCvdhBE86X-BtA@*^K^p!Wu#neE$=U>pxgV)Gm>V_PLT^@*VlGf z@1sjR7#97p_v-mKCG>QP#K?$}?!ly{9vhmYOYhPf2S!b`!n%kx&iRfW4s*ZPTfYuK zanh|b2TC7Fww4Z*jZ$mvA1GJI71q>2;*;Faib3+WNQX7TFE_}b(Bpo&*pSIq;xNfJ zB*MCSn4BZI)=R@sgH#K1`enpEGF;x5s!+)YnJH3Yy*5%pkS(Jm1(n@BN)DnzH>D!$ zUDj)byy9-bbTTkf8!QpXUOG{7cykE5(^A9>P(hv-Qxqay~Q^mL;7rcC5$+GC`iP(ih0shzB0+ zutxcd-+R$|d?9+Hc7ZI{m&7eZZ(v9+l74cLRkBb9M`SZ{qmC@sL30-U5tL{>xkyGu zL?Gx~B$r}+w@BvdpvjBn??|ch66t5uKcZWBjG6OcOz4s&;uTqKUB6U5#eQO4aGuu0}G`_ zqCVYys=eAeycq){*E;hW={S+eqWk~vnOtFAe=RaeKP|D?(30!qF^PJ2*QuF2V8w2c zxtQ*&w#es(9%Uu6H|nE`4(oeVUf)FOMP_g4c!~5htbcEnSZlyGc^oU*huiS5s3dfB zn_Lqn^Q;Y}5+`R{+e_sZv{LMLxgUu>vt5ti%dGmFk@s2FhFj!Y`8l-wR=HZ_HtQd^ zp~tpcz$9zH?Xnyb^4C90f_3-pvJBsk-j4aa)|&NanFG1w&$7-57zG(xhQ)fFwW3Ul zB+vT13<<3Z^$23{$ztp2JH(WU);v?bmS3##JLGlw!aBY~I`lg77x|~L_#Qp3`-R$f z$`S)rnSUpSNUrtOow5=Wc-CDq7}N2xyW|Vpg;?|NMm_7T%kGwi@}5`2jf-IsE>Br1U=Nyp1HP)hU^1?q>?R%$yn2B$y$gCnf1 zXHv7MYpI3QQb&ta1r3iRjsi)O_kj>dn;M)H-St^&73oUmc~vSnXuki#n8=Mx9U1 zp>Cw!L@lRQQlFsKQCq1;sKy$@&R==)MgJsFhf}9f=Tmd2mr#qTWz^l&ebid&E7VV@ zN2opWj1_jqe)iFs)cMpkR67X&Okvl|rLLm7{;Yv2;5@8$ELqmOFG$>g;?a&dZH%MU z=Q>)kN9^#13<+2z6O5P`|7(sX`*Fw9nx$;d04wW7@y=EO$L;1<{D~8WmD>I(BjZSJ z|CFERuOvrUProQhqazvCL^F1M`=`|nBOf7mwj$~zsR+x2{p0(mle?RnpX?@^M()b^ zOtO3ZLNeWIF*%%u9I~5!zQcwR>*G2ZJbDxTZi^R^ox!Bkb*FK$g{Mt%QEp@HaNX82 z{Fg$MdsW2=^Y(h|qG8s4{T~iu9oA%6Ltc_m$!?BKjCJ`_m#xh&$@CLRww*|FKdw&4 z5!d`!Na$j;KIg#Ap zaFKEYT2Bk`eCE{4_4~;wOdyM#KrSHr$mL`_P_}<+$%DwvHe=;-6HuQ!8J@uaFWGI_ z7;-22eW%5zlHCF>CgZ-u{>dS`1Ghl4UO9U(pqK$}fhx#u3)GPZGQO2Ofb9LksetQG zBM+lLpX{bz3NF%v&c}di2J|O4k^N*(vy;J4avHf0Ih*XR0Qux7`isf#RL0c3?39(R z?6eCi$!-P>WOtA?lie0lElvS@GsDx@hH`6{o`%iVf|n%DYN{7+Z+Ft=aQM07-51Jz zO@26|sP>Ih7t8oJPvy2F4yXR&XlB}}+WxJ><^OUtKg%g%Ih7?<`Aj#bv*WLJw7<&! z#<{S9>U;In46>{rKa{w1H(xis*3-&aNr1EbDp{W&P%`tBc95W|LjL zknC#CY5s!K{KcpF%TD!M(_3+Vu4@(F?BWimB-^N^)LW@Ls1?*oYAy8@YNIveu$*1= z8&N64o)qC!57kSJrw*VFr~ZrAk0Vc^&ZK5hbEx^$LPtlcZ8Vfp%cvF9TBWEc31K?vb$HdlijsL^>p&% zN?)WrG`MFPFWFrP@K{;WP7X8dt50E?n+)pc319lvU@zJAopVimEhB8_DkG*+f2ZTp$l-2Cd{F{@9zph!N0H;n6UaXDM6#cpMouM9BBzn3x%mfZn9hJqa)6vgPA6xRXOMHqGs*en zS>yup+2lgljoQvXAT`r;;OFKNIXq4$$9? zoJHI(k;}=G$d%+X$+hIkg`jEM-6& z`8=|UcWQh-*-OqN<2RY?F?2CGm3#>~KrSI?k;OidVE*OM5Y8h?fb+Twxq$u%axpoQ zTt@Cjt{{8K)#PY$9XUp`p8t(B^kP6WIf2|pP9%GJJ2f6ojweqhr;?|U1LPUxEb_%* zoxjSbL3qY0B!`nr$z90hqxVi_~NqJbhW?0Zsz(_k6b~1m|RVMnp{VIh1^K~)MoT)GYvBs&_;fftVTF3@FLku zZXkQuV(uN3kN&^XpGLlz?B222w|II?Wiep28^GIe-QD^phkgzPRY1PpajRl-i0nQF za32$t)1N`V`xqb`tn*iu3~=vwYRP}%1@3c#PUHsqITV%q+`xUV&_w@k#>ohGhfKsq97Y4p2yXde1K^k>q)hkKgJrr|9HY6`mE>2+wd5zr4df5)_J^^FhDHXolIM`y$#rDUD5u5VCwp1L2y#6A z?~?uG&&X-yqK|0Eq~Y)6Z1R)jd~!Xxko*C;lzczAocsv6lKcg^*5M*$($K(wW^xnx zS#m4+ZE`!gh3rXnTI8SPc=G3DKlx*Fn$2kc*JyB`l%$X|c|ji8%R%Tq8Of&q68haI zC+?G$eERe0k7pM}k_)w8_y1fPQklU>aw!wIg6v}gUCHJ2?;ux_A0XF~A0;=C=aHMh z_C4iOG_*3{IdU}%G>zO&e<|60GBT0u8SQj|JE;9Ea5wtn>7P$di{?G$C>s0>*hDU6 z0z3$*H2NQ-Ka+euIhFC<$=URuOIECqpPWCs$SLtfG`Nr3lE{TjAeWrR40@1D>CYku z=s$y8PXAxXmE;BF%pzXkrJ1t2=?^6P>0jqC?kO2CfB|U?SWC_%-%gHa zhQr9&^j}KOC*MGJpRP?M7t()|%^0fXOmH#-N*SA#U&OMZylK)#CHM84bI|I3(RZw9n7-~w_Z{r$=9^sga%#yJIgfgDeMnd~Q5lheqx zcKgGaNy8fq$R@u|E@uG}$oceNNNz}U3UH8INdHUZGV=4}N^;SIG}O^>C%J+d_8~XX ze;2uxyqugJ?G#`vxt;!N$aVAw$e!_be381DhGGV!k>i~JRZK3We-hbGe+jvg1@22u zqkjcClYAMu+Kxy2C(@A3fR*HYW;otu`mZCq{}baQ7t+6qT+0IWBbU;jL(b9Z>;4}{ zLpcNPC0CQLCATxd!Q=+|*OQybmy=t`Vea+h17yzx-cy#*5D$ak`!2Gd{5NtMxr&@g zo<+_M{TOGA?KjzdciDjbeC?ZyZ)uv;iT9fInIHzA3*o!)51+Mif)SJk>&FR3$Idh( z+pPc|-C(1vAKcV_t*{JQ2?rF7$KZO6xh}cj;fz+2PmU@I7_@A3RcJ zdAr+5?GJC*I>=5=S86!!xbpFFuPem&GR;=^X+~U>n``_+B!|y7Yy31L>4dprky88c znQ7%sGe(Wj$c6|DZ_4ah5*Q67jfZgTphfs@GoldC5_r{a> z29+9rY6TMmC$;7#Y?o#D%(UhOjCDnBX)dWlPWa43s)2g^r{HO&iHFapKg<=S^|svD za7v9TUsdV|KKYC}y)6r0N8jS(rUCy&7{Grtb{(8=o)pksdA%(qABgTJx1f!0VKU&O zE3Xx6o~H$~VU{Y0Z@J;UF7Mu?$DaShP(JhtGW@4fw_5k48*wM<`{@BJ9oX1!Wm$Xb z`kajM@ z)BM@Ew&JAguTTMR27XC+)C?mJE7@OW7#Bc(oMA*mI?XgvhPsny42Jp$J=9B19qMo5 z1;61FP^wq0g)@!*_R`aYDZKQ*H0a_{q6$(`I_u7v#%epX9HD)%O2yep|1n(R&qcEM zc&+3NBl<-9Oj#%@8=utIoC$IlW<7puTD9DG>gX^qg0?Njo8)*|FpIIT;Cs_a9CKRR z+=_H7@p0RJ#2@|%Xfmu{=ZV^f&!?Q#|I@I1A{Whp&#l(s45N>|Lg{LblT=^Jdy)G~ zofYa9XN5|hb<*ZB_ms_J-Ylcf5Z(Uz>IrE7Zn~y7oZ9~Wb*Ldeg;sEuafR-+vyI7+ z^s|j5NUkF#kSI5)jVjfiR+-l0Fb~pZy;=0u^)30M3#y+Lbp)R#wBM0ut)pifD-uTP zUs=alA(ih8f4+|~O^-+Q|NK6NHEE77 zH0$z>i`HMZ?vlK7S6!61dQ)ihT;uUhPc1Y`jovREbAJC5bEd=2kpI+$b}cr}kf^}F z9e-WBqt&7MCC224qVcL(wGB{exyqI7fl4h=tE3uw3~zc?4#v)ZuA$2CsI3~BIZWaI zQSyw!5jZn~(?%=RTNSG;Tborr883&ZL)wqCWpAXP1x!Ib)>u|mA zOr`SFAt|NmJ$;w4KD>OoQd1GvG#$5Y1NDUg?7X<34ysGmMD?a)qg-I9RA{UmlFYM| z+Kd9!L3M#VxFuME_#Eh^D$i(yZbEz(&a!#3N@_Eds+YrIg=ga+3wyKW zf;AQ4%cpSE;dJ^X98Ea!-mf#nDfc@h=)+UbATq%FH7Z$J4ZJ<0u0)N>7U0fnAu_`8 zG8YA@hW-Uzmx*(0ChT?4&G-%U%*FU^LvSX}j)6t!N@xtU3HmAQMLFl;%#SZ^*-Aa2 z4odklN9*uR;9(Rfe+B*&hWgs@t#oQwMpZdV-GhuPpm*c?vRvGpLCaSw^@v(yWUg_v z9eN$?@$1k<#wy9ijc#{n!Np1qF;*2xG4B0t)J=&yUfs0C&{v_E1xjs)HbS42W>vP) z(X35MJq`9=kc)HPWpb;8)Li)4Km+7W%46JAJdgJvKg z?LJ3~D{yQ?!fF?q4ui_OTW4@kGWX!N3xDhW;0IMtMd#bU89;hrWq)(&{jKpgHyU z6C;{mLf1emp?9hS((tm=Ql3{(U{uieYLTAD&8qZO+#4YfFRQ81Do3)Rw;|(psBVeu z*KwD-1oIb~3T=fxuUE+ZSderVzF{vYQV2~~IR;*2P$O`ided1?@S=uY5xiJouY`C3 z!mdbbk^Kdu9d{ObAu53C0@XsZkWm}d?&G)h{U7=eS`zkkSk`+=9q#;fSjETA{8yh~ zP~kd1H4Azj;`4DQQjbBM_9^~28Wtk|=O}=2I4l!oYeB(_{0Gov2+TQ%Q67Fc+}ou#{K+2Q9R; z2u=LEp5F(J>JHs!InoTR)CuE!?K#g-wa^BzM?|$k3!(c}vk@=^@wrj}{SF!Dgo$0V zM(C}Gs|y!(w~p^5SlSN?UZ%0f5?-dUH?>yit|B-}yXb^+q?XzWy;2Su`4OW0C_rXc zePOPYQ(K`M!qCKL2o|m{jZ`o0uMn5tQ?N3w4fElT-zCbrus|&COJMi(!bu#e@Tbp4 z;|-hQXAXDA-;P8i9Ux4Ge#hU>dG9YN5}g z#)UrA5D8WE7quNt>K}kZJGfyW{$`2J5Px5)0a}JXBlxN25u&|{s1FOU{22YJ(kH;TB$%dAW$LSuLg?l;-8mWUOqkzrOc-RXj33k7O zM$TkW10~NWog(&nl06l7mf9acfuI#LP#|dOOx!_0i!<Gq*+x$ z>#UTOqZxdJVDeLT~K<5qn;gt zZF8;B2px_Dv&(fKe;w8a)d!EVJ4I!}UU?733bgcIQJbKF`veQrDx(cr3U1#mIM`sN z+~bUyI_Rr%sB`H9dR7(TtcNeJqHii56dVdM|DlgyZ7-`tlOus_{3!-~FlmC0hI;l2 z7M2z%fG&r&Rcpq<<#9cW@{EESQHRkoIZx<8eNa44;vuX~=P9&6_*X{hGdiDURq-qa zVG$hqUUw1<`jP8!976Tu)NH7JcDfI$AJ~=SOsF3VY25~$f&+(s7B~+3y1w<-ifY+@ z{j5No^`j%LZ$igvM;i^^*Bpm_kf8(gLkeA>Le1zyebe23fnh~8 z%D+QBE-`*x64$W1X?NT1_T6faXODM}e^2V3v^_a{@~z~TjFxEyI9Fh-DjWe-PE|ow zaaCzmSyg#eMO9^0byaOuLser{Q&n?SYgJoSdzIQ7zt_JvZEs+2=H9Hm*?Xar1*1c_eaW2{C+2`FCzt6YNzb|!P+P=WP%zf4SYWG>waE(>>vN0&E z{Sjpye%V;kBj>SvoLltMZj2liI_DMq--PwqtHxBLc9+%nHDj*a8e0FFu}!@D5EVfC QLvSS8TNgU^y3xh>KTZ|9h5!Hn diff --git a/src/pip/_vendor/distlib/t64.exe b/src/pip/_vendor/distlib/t64.exe index 82fe2d99e115fe361782ac4e84f8a4e67a243768..e8bebdba6d8f242244bf397ab067965d47c5093e 100644 GIT binary patch delta 23759 zcmeHvc~}%j_ip#V;IKKM3_G$bn~LHBf(i<;(NT#j?kEO#L(Jf=gBgjA?HIM9nAJ5= zqe+Yy#WlD@P;rS{FdBm~OB-A;U!oX|ec#jFn0)vC{=EO(=ehGd#e3@1slDpdsii5d zaw)EMS((N+tm>LM=C8b&zrNQry3tv#td0H!{%0;9MW0k~OY|xQS4Q6l-oO1p^pC(_ z-!n#+EBpO;2Z@O^2=lyY<_N(V2ICq^J$JK+eTV>8? z-h5l9)mwC|C*P6(iOu0#@ri6BpQNkMtqKoh^}L}|_Gh_T% z%UF)fOeQYRaa{m^ur|6@d>b|de|xZn_}hfZR_o%+H)D@oTBQ9D zqn6+wxmy8^atGbec4#TF#<=FF)b4Lt?&la8FZWITeny~ipT3;E+$4zK$)+{Q;5V_) zn{*FvK@l8-q9zIsb5yR>7G)4b@`~_^HPU@%G<0#f6Gk_WUSE6kXxZ;m&8dk?~<>(Dgu$_oG zUR_Aj#L`&uP&zl@%Sw8bo%**$pPl-Bkyp~i{f`h;Hg!V0zT*At z)MpfOLkXcQWK*yb@{5rA+@cp!Zx(s6QEslhj!kfjZ~N#D$Gu466g+)PezF8-+n&i5 z-)ym3HeFE#)ReZdqi)T;_oyr?v20ob598nUV{JDWn6rDdbyflzKk0;EFR68ZPv`8D4!cxS8S^@msTf zWK@`J+9pvSSlm+}SfqFKZ>ZBL$<9Wy($)sqN=YRk(jw0R_{$|KsiZchBAa560E#gQ z@@UH-RdPd4?3;Nz_KITwyATA(%ZK=71~Ll4QOEb>t^psQt5 zXG&Pi#pJ7zW0yfjYbwpF>$9#h0Kl3C?usN7$`H6EDuzCo}T?Go$@ z$_4wV3c;Zt$E+U7?is&Q?Th-zruFQYM?zv8{AP!v78ONy)p{O7;6&0X=_1C2Aet`q zMq7)XCjheP(J$yJH_s^dkDpTEEPl>XHB0eKvaV~1*tX*a!B%O&$eD44=DW=o!)}uz zIrzeUB#ac+BL%sts}nr4+@KaO$fgru2@cOSf64Lzup#t`!ZGzh6tr)lQV>tQQUZ#P z;?!sJ7{e8|(-=M;zi=_c`yM=+`#G|ynVLYnnm{5F!2Ab)H*3{2(yay9g7`!>MYBOo zTUqxvR~jS$wf_;iRdDn%$R-J`f?j~~DXJDqu3Ox*oo))^<2&b3KnR|KU`DwIc`$MF zG0;@S_6_(Y6=t!LwEAp}c)>Bt-?*=70aS^1hImynBeVTfOC;>1#OZAM8Icf0$|iZ+ zNu4I|LGmGL;Uq!4S&*61(h^|Y=T3ZmbJou2XT1yMj-hNQ$h1eI{dJ_ky)abN((I<{ zD8<28qiJ#lBdx&JJ)$VKz?K@J3LSlni%(;7i<;I#(m$A)_NB!deWmzM}dKQ!PV?&oy+rFU8LDDQCHVIL%eL< zS0yd*R!Y+}+xE3C!?-VtD}d~66uP_@6&gi@%8*UHZ}+8)x^4znykJ3M9oK=Za|f{s zgkbv(HFXp^K`O)EVaQ4S>`pwFBQ-~bu-Z9HzhT7wvl4}|*4`c5z5LYVs7^m4Mz|8=LoG)8f5-4-G0ozvYbjsq@LJ0H?n7D~=0~NA=lWvVcbK+P zOaxp)z(J)y9iHwwQjS>sFH0-+RLG+FP?`6#kdVWXhvqv#Z7Ai1(zsU`vZ)eX=NQ<$ zAR}c^REwgxY$&@h$ME}7&0O+?tP!I{+!j)FEf3 zNgL9QYffU|o~uWB?Ng(Y&2~#vqS@ievz5*L_!PFSd5b`-t}lKC1;)_D7&%BL)$?fC z<>oheth8mOjzNPu^H_^y)1I43Rs5v3F*IsxWYYlVJr|y?CCp z_=?3S^9jH2|io&Gky)nmIAW><1FHka34|-BLJkueBnMOtY=sI!g(E>#Y zDf(Z#pe#*Emxr^kfN0&*Y}P9vs_EE3o;$8u6{7xaHYXs$`q~JrQZX3Lf?c20M`^;M zC`XO~aqS_4D9~LEjL`y{l*8`g>Siiy+=0zrvkS`T_bQQ#JZw*8OA`#MLiTfHYM>>Omhqbi$GWA=i_=8g^&?o9s05$S z;5p#u%vC`Dl1+DM0N9)bZEhdy7$KqmI@$PIu%)q8~DF~e;gYca9>aOM;{s%atx#|ru1ptaLeSYDhDdn0y`?rJiti495rp$EsUw9QZQ z;EDoK=_IcM8=%k72xnk$)dXFP{7cSE^4RZ2Q?Cp&xzUiZ+1ivU?Vz+zP}uiWs7st^}1vsUvK(~ ztSirpN7%Ct?X2rzYoh-mY+WtQcut4u^<``5-e4L4ec~In@Tyku{t?|Mnyra0BT+9B z4SOM4K%#f1qRXW^1fxOR_G23dogPO6ZOK#-BPrvB5Uq8kLU43;bJPRZEZYccoYGvSA5brFKNI7-ROt{+dyJ2bDY0$UaR zLy9iydn+014|i;t zlJp*%ncALT&bFsU@N-x-{!V8PQu`U#7gAcrl505Qko6hNoHn>cQlg@P?DM0-QoD{$ zhVACKOUdLvMn&7OuhL>8o!ctW7VlD$H~k4?MG$>{$DZUWMvPeux!sDnca0n090P)M zdOo&v`cFv8?$JOKcQCE2n({ohj}j|tfwg>zl90y}sh~zlW)}R}wy39ET@(_Sq=q(7 zT-g+fXvk?0d#7s>Z(wJ;2J&AsscUxY?sV*d1YMy+zx@jX04E(4UdmaCG8#kw6@HM7 z?&cT%6qZV3jMBy!K6ZcUM+CqYv9h(@2JnIGPPYR79?R+8KH3e7=?oZ^9MldcEoK`t=J65> z=@HJKEbHFGz`M34OB}1npDd?m&wyKG>Wcpe8}~L99D`R%PuR|$8~JZp zzsxgyJ`2oRteZZ$Y(v&&o?pZ=dkx|<**m?Wct3WeSB!4-WG3~R%Ga~OLRWq_V?tuY zSSS{h$(%2hR4k09xi;Il&m1+L&MniWcr0M-XJMGmr3XvTj^?+P6=X;9&bD-pD|7T7 z$?JluSnIVR%wP`n>&v5n&_%JbsmFGXOLKWW5Tl?o>t=q5zrjk(F?=Qa%1%Ty z%=ngl!}zAGSKo1bB>SZARDK2X>zAe*HHi)G*IL(U0$bcKNykrM`}=L;x3iJ`JMq6W z*1vu7SNAY~deA}=R4=<1`C?$qze6L_j5RN{3R*k*B(%DyUkgFOz64qWf%>KF@BY1- zn-OG-Zud8Mwx`dFqAT?gY-mm_-@q2-1fi541@}(`UH%g2D^T; zLU`EquMj~idKK$BXo65HQPKuV+Ve7KHR%CIQGI*JaV5|RDQ5U#-o)D)?h?RxuUL>`s1wA;NfVSRf9i(Mb&ml0qnH3L*j)# zv}}rE3{}3#Bx+s}7B@Qol*QMU!WDT-vHy~oGf|Ns^(cA&i?9ERxekr6-ok>61h7Nk zzA8o&2=V8ovnnr#5{37O>P@5+cB^zQ1v&HyzXH(dfwuj5;850o=Yu7RBp{WLuw!2C z5u6r~_(M9=N$FK@X(ot9Dz2N&1?AOCqu8#Y0enw(dT6LF@(8ONn!(4g*17GlB^;C6 z9s_iBZhEudFwDs8_LNQEz7Nu~hW(Tq32VwQERhdpDZ_g5$!x)}4E{O$YFHe9iT#GZ z4a_urjCI{Xs$?I5^3+SqR9~*@o3HvNs=n!}uTb^nslM5&Z-C+xl~z&`%1{{_)Y`qb zM{Nwf6bt@W%6K|2h2VIH{bzWfS1;^mU(6C0sh)+5Xze~34QPS+tLJ}VgGWU2*dLB) z&wI0-Bf|Ly?A(Zabb0%c0z&7Fv^4+oLt55;U5A7)(do4764Q-pi>kICmE7vhuc?w} zq~DYdKO=po;q(H^_zaYG*oZqB1#H!*;$FYwn2{5&S^TjO{t8-Z|B|v08pJcQ=_)Zc zVhKeC$0#$0Cj&?eF*LCj42NIDE?OEriRF%ti4-XQg0F2pe( zj{Me8Kn=oj*_tsk_&dyy*Mh&vTICh`Hp9P+{9j(I)uBg*M4j&P(a7jPteiq>;%{}9&t z7zrw;Q3dvnhf(Hi@rrCZy&7D{{HR|`Dw-F?A&K9%kdC0U)3=er>Ty01Cyv2TBSsw= z&~k-fYe<{fix}U5fhguq=OS3lt%g#YXqv1VOGv~i1VJx z!N4)qZl+>+h0*GCR+{??$K63Cu%?`%4#1(ifRmUo$wbNi?@aFBOjQnR^4sJ3>hk-u z+v7+td*-(e{2jTWT`hk5$pn&atC@$`x)!oQ`H{`OtmioGfCJYi=%p>pp8vX0dXC!a zId?S2S-a;E<~}|V3rX5|f8N3dk57vqf$#&lN}r#V#?g5aM$JUg6iV?NODUpYZ{l>t zZt`S@#%Fm~V%4Ct>C{W}m)5z6A^TXfSGw@)Sl?F$L33<+WiMaIh84V&@D8+j(mm{w zGHlhD59g!~yJ=q3Ao-#gY$Bt`%1eF=#p*hpohg_Z@E9FCGeSHso4#0q0qIyTp_-Yg zAwRI;6NV?0=ObG+c3T0_<5KjmsS(b}rg$a7hgyVTtaifK1o{7r@Kc?V10sCrpd1<_ zRI_Ol`$gJe`7C%v$o~(P$x#@af-S9{v*ZY(NByG=f1AZi>d9_S^lh0)VPL8z+waTC z=NY6jA4-4iRIEIA7C33RanOFM@<6Xlu@W1KkC|;!q3e}$>_4icrc>CnNm=cj+{qR# zM)RBVZzJPNFx7h7(rP)&#NMJ$?V}#Zcr=vB)?Ko^65(ZR#^jzJuacDL^CJQ-u%nYl z;9w|ZO1RGD4ojaho?pp6ni6K}^$GG;EVJWYf_$#%vfv2GXWvap<6AJlsqOf9mOZs4 z%*(>55&SvErgn1axgAU1YIc6A$u$NfvV#)1H#Ld(Vd2xJ`1r2T_N8Kn;I=D>Eq>Li z!?}~ba68*uIJ?=VFVMt&{Du7bM@Qc%@{{7uvh>#ibxqH*k*{UMH`zuFQvoZf#?n-J zvQ_cr!B^ytQF=60ndFLDFpX8c)?U|X9DDX!6kpB4XZV@6T)=5Wh9S)2mvm7)hf6A* zq<@w5oe{$yESou_8}G3P_b6K1Ok)+VCpSx*j*>SK#0)RN&eIh~C3CYRX0QDW#cz-; zX~eQCp68afzcsQ8qRE% z_3j0ype1E{vefkyYh4uJvka5ZXj_N%9_ONFu>mK*Wo&p+yuk^A5WB$~MNrHL9QDZo9E$}!AJ$wuVY2CSK-*I$(-4`lnHFnoR+a)&&4bh zYoQQKvZ)px%!_*`&@N}>JEx)TQ;&&Fjb`{%wE zz5<%|fE&#phyLnpT)LzTuCe&mc{K8lu|D(qx0?GdQhoY7!R2j zB$Yw=R&uUX?<#h5USPyoFzzJ4{!6N;3zXKwhX$Mdk5s$u?C*IQX}!Tox{eK1_=})B zN03U=^^#M@r5}Jw?K&NXmeIC~N+VT;mir#Pc zDLXtryu&hutCVWJB8N6X==x7FvX7%!6pqOG|A+-|W6c(1we$w(AWaa2p2FCMbXs3% z^kzcpr5T=Bx&&M%U0@3q1fnCnv+w)JA_f3%ADkPP5<| ztAtiUGKnHL)5JH$u(8(Wx=agOaW$L|XX~sTJcNHD@`&ii9$3fnwe00$oZ7Rg#dWUV zoKTZ^hvhALNw@3-`=%z49a|K_2e8^jmDuEMdt)NMtIXRrf!7_n#pW&!^SyZ#OSkQ$ zA;Wm&nUMcTzW+~yD<357dC_hg-;?F^ zda!O=1RI?mh=CX8ZQOT98DCe8`_9T|q%|}SJF^6_DYnF^q7Z3chEX;MhQoiI7xV-} z8)eVdv5!Ry--!h+kK(<{(wFz;UGvM;p%Ty5I2Agb)@5|)DCuWU1z#je5WP0eeK@q?(cDymZAn68lEddG7vt56YPthvZe1t zo6lDx(B6c&vlYE^g$DgY=mjYBm&rsF@=pqm;GVc`k1X1Qh;u$wJ7yzR1+zVRGQ4Jc zt4jSVr&;ia{%p>x?kxCSFVh7(Y-CJAALJ?ZS7i+>lKes_al9;m0eO2k(Z|^rZT;A#cZ2wM*@Jfng#6kH2fDbH(_R?S!5OR&*a~?0 z5YI-x7p2>DxNPxzr+M$#N;DJp!`=2#P9gu<&c=KY#J|MmeGq8<73-whChvT%wn+s< zKH82oFI6!Y&qxLz@^p0+@|HG2{{6;7O=%}ZRa@-!e_CuNY#X##pT-t*szE|U`;cf3 z5+$`+JPYw`(dqDVO$1vj?Av;X*;ZQtec!YoD>a53RZ@+3ePb*L4j7 zJF;Hv+y>f{D%2BGU2t?=hmq5GqmSl0sb7XEfiaytm@*6rCtYjr`rS@eykRJ&PR)jP zo}oJtMLv^Mk!cSxGT)6IF?h`zhj=v|1X(&Yv%E|jpFV8&#`uIw1C=Vdli9!ibu1|2 zA4bq8KzmMxk7>F$9^EoTKu;n+HL=E&B39vz)OQfZyTe-9^b={B>qXn8 zC2YXfmL5LE2vA~tcYrP4I(AH}TPR&t6%{o=azV63$u<=l=zcuS=5J4N=>ruZwO|LghvP}i_uE780P^Yf7(cxl%eeSIN)gl~>pQY8 zpVUGkXh&<)^4>_7ir!H+?Glu>I=q^V+7YfhGM^Ri=*(xZ@*UAlpRW3+o62Uhx*gN} zoiRmlYcEVWg{v6nW5weQhUcPDTEhx=E(^W6lKP1j<+>gvr)IZF_3VHt_IgVppB~wv zsXq<#SpPys%4J!fhUtE=vk9NJNIqzxIoY}bBMIk{r}n5V(h*Zc+JGSJf-EuQVI+rr zfwm!7%)CBp!LEPm9yeT(k}XPa~c z&DYuZ&)V>DjD41&+jf*){cLd4c4;)2=!`k3TFPW8pGO%#UyCD4vcPeHN%qy$&qJH; z$A*tI?I~VJ@f^?I`Ft|3XU@Ck;23S;u5LIbJiM#7?n)2l`bD(vyYVdX3qiM%Aers{ zqDFUMFrk5q*0(4VZOWmFf zcDn2(Oc3uqU3E7PvYb7^y83)JeNPA7cj;`)o^BmZW3XY_aP+BPl=A)eiavZRkJ`Tf zKa7Q7h$k@-3_{XbY2s+sbYBAN@ntjJo;)_>%XRz^_TbAD-JY&2Zf~#PgS5S;l0n1J zN&FX;PFl&n8Ot1dxt`QG3ZLsy!278Bkb=Vjvghj=N2)Sn$}tgZhp&YPo=JIw9A zc<68z?+iTmb=MslUDj{kAt&qH;nX>OyrkDu!e5%G5@FI9m57mss6?XFS0UV`ER~Ti zrKyA1~y+AbP1vR}pQUuX?cm=A=TEt|5B7 zO4kydtI`tDW|gidI$fn3i0+`$bm}R^s5HF_k%CoPIiL4ZX@BB7tF#p-dz{petJGdO z^1rLn%5i+HN+*-dWtC1Rx=N+#R9`xz(sY_H?NaGNqPM8@Y@*kxbTQG(ReCAW#korR ziz~@vQ3KWyJyE465wer3#fc5WPpGy@=kX(*8uhuhL;euT<$6 zq8Ed^p4-B_=$!mYuo$D{^_A-I{! zaZ))k;22cS!@*RWJ~$EceEz0Vlpj=1GB|&zoUNsg{EPTp7K-&d;LuQ_7U^}qw`kkyX zlpc0?eneio=f^5+Rbd-|%suQ=)(}UIZh^=oKQam@eU>Qyp&5fR24}FH$Gcna&3Umq z`xpyMT{;%}5Q-&w`U37ON8!K>w^=9q9j`Yg~tL~C?Z8Z#fyK@xe zn?OF0P5F9a=Seu^I*vwCP<>^2z<^vFz5ONo1}btYOB=KGxq}MKpR=m>DHxh>}_S-7Mab zPQX}${htzY=`94~;cdwkYG;S%W`PRXm#U%kg>f;XesGxPB0wp}M#>5oR*UB<9R$Ps zL4qP7C=%yT{e?(WHq9nMD_Q2}RROOT0tJddE(uWh=W6(cUgW0)`YGYbjp1)0+>FZ+ zJY%KeqG=*fBD*S%uRR*&CQv$dy+4HNyl|Ufe2Z(x)#0tI-UTeYJZVDFv6&iYPW{1x zcVoyqjpB%&_f`&5Kv+9GsTjs?o{o8CNMFpb$wJA+q9)Re-k?Vcw!dm;^+X}g zENcA9sN}j}%&bw~51Z+(m7bz$lk3$Keg0Bt!5rFpe^5%>7E8Qfds;j5FmXh1r0HzR znc(=BXVAdLsJ6r?JBcT+(H^euij>?KFhUKeeU0rt(=p&7!qp>o94&XvHhzxFuu~R; zl(M=yUia`L*1Ec*$7wnug-wZPW0TqR>ejKN5Gg!a>ZHW=#UPX(U?Cy}aMXqR?5SDA zBYpk5YuJhEzE;2HD9IV=@()yOdzRl-L5qf|cu%>Rfk-D#<*!l~HcwX^*AITPdbB^B5UQE!{zQq<^4DkIrLb1Z#ov_!*Onq)GuQSM| zT=wb3P62;TZrr^|Zd1^O{&&is&~o6)o?Q&d>8V6Sqv4ejJ^>K$XmSghSVE*bvmqzo zbzTlMLlDg!(fO#-$mtgMjN%1cpmA|~h)DC49D=F$R}?c$8+0wYzQ9=r0PKl{1rxx zSA;8{h8ENnq(CtgOw#ugslBAV0CoSXmgXYTLTQ>yAG`>VhV*9@9sJpzD=l;<$Knnl ziZxsbjm%0%^LRn(zwZ$yP2LjLPA!HmM;GGQtXrczH@VrUy1 zY0j^}omD1tT#dlZ!mg_wJcWOf!ZYQES7-6N*rabm_zP^ww@+LvrctMzk(M%h%}e~8 z%@=BT++TllZ6d$Etjl+}OW3gtcL~8kxWMx~JDJXfEC2gGAu)h`u{gce3WVWPCx053 z|I>v+ff8QDGH!(6#o3q}b9KLNp!)<}i;e7jNwD)yENOJ5zA(KtJ6@&w3`>lx7 zMK9t=Ye6Cz`hpW9?`gS{+_ScW?+<7n@VRx5w%lL^k}VK-~?I&HO10?`2AV z%8Tnb>_r>j?+!JA>w0MFaD2^aTznh!0c`zYq*UX|CH&`5W9~%Klz(R!->wu=I}Tv| ze~$GTw1^r2mW^ugIRsZ}KU@BDQrBw}P~DP8jhAZr{qVyS%ccc4Q#!Z@(d_yYV^O79 zWwZ#)nInC?TUo>krFQJu&z;f&ixrvvNX@UZTNJk6fWk?CS{0!#n$U-; zP&}LdOOmeF7i{w{iFiA3`Ik7{fyuvQ1r92p6J;f)osvgk=GDzWvZ-wJuLIEAU;Nsl z`ziEPX3Es4*_I-z{D-?_`G^jg2@Vez+)CbDh=`~%;trhgM5_^WPd^Qag7ip3y*&Y6J&hWC(A|A(gCREN{(`p&MR6P zX2?ZXw)D5ox`(sa(cjJnd^q66ARL+^%jKj^>)b><_1sz)>wTgVvKgqnkvR4S4sy-x zR$U0b<(KPvgtkIU{_{d2K=tJ{zL2=Z^6qxW>Hmhi(E)Wu>Y*sUXi^SFoji`=t%Jk! zIJRIst=$`ZphoF;s zc9doHp_79WQ1)rK>I#rn=e(#_pUza!DmZ*JZWfP1NiQ9PlgEq1z?SSdETjl*(h8K< z`L?^s5ZEw`I_JsrA`fXl^^@qig4NxN;rlY5`w6jWpWxG3bxaC)tx+#kwxWJe24x$; zF@#s{ZK|aA*`)g+{7JUte!GzZ^Z}l99YAX7nNXZzvT0Nkq-@uJoJ)C~5Y%r5kkuN6 zfya~h9`VU@i;HX$^@>Q$aXDH7DM$aN{Eb4%mCFRL2cz)%clv`s-puR|LPHYf(`E^m z4Wo`%r{l@_-9$nBZ9lTZtJ3*w(ZlAf=0PO?C3}V#M_Awbz+ifV@n12*C@G|=~R`1xy?pkbPZW(`MbI9bCtG<;XXm}Dg#E9a{coQsBcJF6M|p$T&I zB@lid8b)cDreQw~Cu_J=!}m4ZrQvA}Z)$j7K^(R{*BH$@su{J^u%Cu`8cx@6v4*QP z+@Rr34fkqzOvB3>{-|NShJO(vf6fhGOVKY(!vqbxX*fW`(Ha(NxKzWH8g9^Vr-nx~ ztkLjy4Gmfgw*XT9T(ZUxG#strYz^PkaIJ=0G~A=%Ne#c#u!Gi>ls5gUwXWQ&VNGM# za$ITeJl@FabxngeM04rX4nAX5%)6*!^;RyuG$W7q@i079ImInij(;g0&%2tm|H(qC zq%T}8zWS^6EgSylLQiX+7q6sAtEKYO{B(UpzYRt;9ExB0`#;ylr?`tlYMc{ z{An-mHS^;Se_z!r>zzlQ2cQ2iJdrQeoA}qWZuGwcj&_;j(Z9xm)`Q_+rEet*NNl$z^y0^_v1q&8VjA>nd1uKwrhp1 zhsdgA6zpl~+#o)*bdn$6>O~saX&jdeCzoyn`$fe^!QlJ@?k6n`T{n*NgKPAA8=X00 zhbEk{y?bF})$+S@+%!13)YG5$OKD{9Nawioa1Z`QPu4kt;pg zf=_Bgy&o2Y{xummQwMj>)ZV++c)_#6z0fVMF}ZhYIqnP`s@c?^_kEH3lv^D4IoyNN z>`)$GYL`w6<%3={&i;EGcMlG+-VNnfcvDV^&gkj{9T&*sUmBIBgz+}O;V`~cCkkB& z3ya6UZ*az_4#StKjlXTmqQP?&jb;r3B=9!qXm#-Y|7MTcRR7=X`G3Of`A;N2YhQ5` zU)%)7Pn*37{7SddU7dLU?6fB8f}5;i2MrT7jL|SmLw^ko8aBA7@$Z&;cjo)Y9@4z! z8t&0>mxkLkTvxiNGw)}u*1YpIv}ib0!#oXhH54>V)-X}S1P$>YDbi_N8Z>OstA2c= ztZ1^v$z8AVOO45VIA2v7o6Ls>tkh&@YnZ2DPU)m%evEgmvzkQb7M$Gf6Sb4tro_!{p# zA62T}U&X@Gja~Rqe*C_pUHH$PtPRanwxnUB_&8rp{E2Gu7XDvC*hw6@`T7+ZM%hO2tC{Zqq<3izbu%v_m z|Ard`nhWQ+fpC^_WiNJ=;Yel|Fti=V9g0KC0lUFb`sqL!PHElt9Jd#CF?Ef#9E4W~Gz)|X55qlciIo;O zxg*EbfIk&j3rCR&OFCio1%DT?JQ>FU(Y%5eQ&0iWmx1A_FsDIB0eiqvAu@rF;3)kD z;EXi%|56B9(m1YdSG5Zgf$d>4mxJE{xE_xBasx0Bb~4pC82AQU17sEh)6-Qxp;HFO z1tSAz;Jh9<2?aeLxB`wUuoCzkHXReeztPhQ6DbQNB@b{v%;x2w4*}_)B5eab5;z}@ zA``x^(S$$3QQ^2g=xVqNkWT~(a5bQFfG@+{0G$UM3s(zzJg}n~8v`pm$sk^Z`vY_# zkRFf*qyIdBTrT()l2;Ko-t zt_u7uz?cHHaD-+!Y9T`F1pL!e(8WN0qQbXw&LFPAB_rSl&@@Sn;16u0(E?D^=;gq# z;4&d!0ek{SE%6MvW3o!`1ky(uIgqgc--V+>Q~{%3#S8)+1Dx_I`k!iGfoCfm32g(; zoxyQCLC**N1y=?78PNK=N*4q57L_&tSHe-35q2p;A45J3_$3@^(sH2lOf`MNdN@NY z(zFtD76uGFgiU6veHjLHnS*`;-vBhjQ7sAEW4}(*tOIcVJk$mn?b>1vsQ%h?0Wd09=Mvxd3`O@Mf`EgIb`|QjWU}zB4e? z&T)4^hXJ?3Q7yLt@552X^}vi}_?Nb5l}z9@IBKCAz@=+2Wm}`Q)?og(T&qTo0X8Y+ zxDF690K3CcLg~O&aFo#7!26rj8Bq`P-@5Js`}gm{Nz)#7Bbrj-0+#Y zz!28JQI`>VeXi1k$Kh%rpR)@Uv*OQPc-{u4mE)ij+gie=`=L-kdjWgEQNI=f&%;qc zFA#kY;|4MpfG&sB48nkG4&xsdgTDdzGaSu(3E1|CN~Z%QxMIjyiP(LV;}*lS2iW@< znhvxX_{nk981znH+zGS*=yaf^5~>$8;kYwsA<*N27vN~oBK)pe<=+5yJ*#$Q4v;&C zk&m(B1^o6L`riwldU!rMkN%`V1Uv&547wUf|G<)J9DIS}`dvgTfsf-sZZjN>k1fE@ zE^*vc@OJ?#;goRzyl@5T8hpaRS5^63V5@IcI_6u9@QLtH1j5uBbugv@k7@J?U{o#U zIuasux`i$S?F_sQ=Z}$m1GwX^I;wX9Gw!L~kqK;dAKd|&7~niOD)f9{dOh@p6#+yH zf}=hr{0ole^fTbTKQO2uQw~h}6NLjU0BawiK%fafdyK9Dy$kpQ995VH^0kedZ1P9;g2Wt7-)& delta 23636 zcmeHvd016d6z@3~E;C*bF7rGH0xF6F2q-A#K`%;zsqu8~%I$yzhH_-@d=K*Is*GXYaMw*@u!E zyOI-jOH+7hS!zb{?6Gg&@z+H*2jk%K$lHiNeP=`DaT!00TqfhP$os(id8;FT0p9U% z5&5pn|5TMYYy%7B z+u2Ne%CmudC;l&H=A-yDww+JVoX{)_^;@oS==|UwMBNZ5o2`yfInC_O%xx;^6{I-mf+XGcH?(##m|~Bo}K00mfQ9Y*c8KYl8>I_43>#YP30#rPwm)VUBE!b_<;mjiMFjf>RS+SA3N;DmB~P?~;!-u+uMG^k*BVvH$#Wz4 zL$>Hly7*(J_90I{jCkl{cJmOfc3Qzrpa>iq+DbmP%RamO~uZ z_@oRKHKyx?r@ay?#UG_2wdf4-Xr}n1AfA@q@+0}vf_PS%iDsdxXy|D6mR)e4R!Sw5 z8v7jeNNvy;xjka9M!Ee7l{UTZ3b9QnnO93Th{sVfS}WP0WTm)3R%hqMhp|U?K`C3K zmCn7C+wR*eSM7wc!$^s>x|N5e_Rp-=ewLB3)~w`f)BSXNv?tiB_Wt}_Hq}0zpU$@0 zcMok#87zY$3IvNGB1f{KY(b=`h^SaA9b!6%u68#aF;$0He+NBZ$R;=h$CsTn>P?S% zqnjYQ8vFB{>6F!~XAK;;7h}x{reo1ZYtjY^rn@16c-gA2XSEKk_&&_)5EE_hPgPV4 zVv|&Yc9&JobP-QTfg!}LZLk$g)fn(IQXPBMv3-~w6jng;J64+SCg~x@5Be1^70HZR z=~uSFadh(Mzj55Yzriwyl2yNoVt>i8I;(yWVwgc`9A!mOKckD%;O279vQ#Hu|8bDX z67OdwKQ(GIldl%JC7hRwv+4yl&B>Q<%S5NxfnktB!b6m7)&E_OEM>Qh0hy+!R-;#@ zSYy@miU2jls=ojMq3oeCKrlUr0Cd2ruLBPyP}{8f3Pjl0H@sPha~u96*3a21GlkMm zLVX(SlzB93HyNPum`nX*8Ufc25j9vwMr41p6a)ROBYq4P?FMi&#A@+JgLz~`2SfI{Y^|q)?u(*EzkJY*#YzX}Z zA=Jx|1!K7pl?tN1Qy_C zp<|Rn2cb|ro#0<$eOrV(4FFpZA6xaWvY9QSinn;k1LBKTcu|U_uY*YCzTCMq}?qNl( z^G(TNiqr#0p##*WpP-vBiZWGcsD0?aH=3Nj7i)st|6#07=hNN?JQPW}mLV6~L%oiq z)+Q;9@}-g*bl~XroRA_|Yqyc^c~NLl!ZE>Iw$zUFfwA}9LRu}sew*+x!;NgLqUBf57u#Fwc#M zHkijpgh*{zjYm+6{Cr(-DRc#eCA&VTu=V_z=SQ=f`cD3(U3jeTR{hVO;5gkL9jyu>x;?d0PxjC|mw%lN_1TuWvLki;WKoFJw-27lAQURy z=ncwBR1|Vz(J6!w6q=fN?Fw-kNgv~qwBE^ROABnv(lD0o8>va?!`||ZXtB(X=Z-4- zxTrnGHu;7Xe>wu2N^TalQd`tl9_*qBOSS{@ngdHIv8R%lqbAz#g%!g!yeaQ%;V4ir zd-8(mNW5S=>>yaHjgIJSEQA|wV+F+Y_(P_bMX<>fPZ%uva{|w0ij~6QCavj4a3<^H zm+bXPHpi8uE=+(j7`0OSesbS-u#fzFU1$2Dg5=B7yJ3TP$`1R*^4aX3pP#S(Cyskz zk^E$O>^aZFbl6P_8o)yQeZ6m9MrsY&ZS_8s|J3wO$Ul{3`?qQREhUK04&DUq^|SK9pF@KKIYiI0dk~{_Xj478DS#u^Ys)16FIeLF|4&zJ?oEF(fdb z*MtmWhl1YL1dL`qf=&D!c04$Uzr}tH7Wl2KZO9x=!hnjeLhSffBhq14{SqupKA)3m z<9P!6Oj0OD$sI&I2thoDhodw5F?2Nl9_t*|kB?_%VIlmx?DMcpjW&`!2n!FW^Ms(h z9Mxe7N*9~NKg7$X$5sqmI*SkY>hykp+Ac>%6bhDJZrIe^$bt|Aa|Z1%kIx}d{LBI4 zOgfhaBL`C??PhbrllV8;uJFz=!yuAyQf#v7dxIi7L9psOf`neVm062iOi!#vd+60j zrjJPW+YnA$pcjdXXXFP%L$p~dvPltf-nY-<8TYp3Dq*OR5s6`~28*T7_h6L~PHr*C zo-SU_6q^L`4=k<~KSdPt{2n&ERXE0AeydM455}?3s1A6-21Uj47FH6q#r0@co|`d7 z$a^eU+Wf$}MhElD+3@Hnz9)M-dSDBGN_|bpyNuM>uIy%XB!81Rwr&lfgw~^432yt>VkyH|RKrlJ^*}G%dAFSU(w|@WPRPN`n_yIOly8VdoL!CpUJkw#0AYEp~@nA@p%1i zj5@~c)gGuK0V}4~JCQw#$?|D}$WqfBR2hoBy}8DiQ*30cU()e3j_Xx+uV_3aS@q>7 zIqqRk2j1wS#NR`_K@d!*tt8a_IFbZ&0@P?U+Zx+l6V!z@#)brkB-518Nv^EYI77($ z5dsqSl}xkkS(ElKYZq6*f5ukDRV-j661ye4Cx1|iToc7@W zg|$6~6z$t@fhXFH9=8EZ23P5pE@g!B)frNKKSehb_Bl5$%Mq<%-W*>Sg#uZIC#*Ue0B|XMH5sb1oK82BB03W7TjHkc3~(3(SECb z4-Vhh7#uv%22_|M?+t?@a^-`mG<7fa8;g`+nj4YL8PkQTjEF>z{nXj7_=LNXCB#5h zvOz>_ zRc}GWlxhbduErVI|6951gxu!2I8QrpBco~1JpaI1;u18*AO?Cwv*gs6@qJrU^(WwWLDrvO+6ZQsCVFrO(+;aHp8xEj z!is`muk0Za(dDs}Un>{$1>%{eh$mb_1Pb^-No}IMR{aQMLrtaZWNHF@RnJoW_`jJ? zx6C$gk;4H6I${mAe*y)7kD<~{_EE^%IP_oX4Qxp_@6d#QajO1Nb9kD4BtIm;CfUl4 zcN@S5G2ia_{2Er$y+dSI?2(Up`nb^iO@KPh}T@sX9tA<+Yyh@>TAL<&D-fLF*A~XqhfSs zIB#2l17t;6pOL)gW)<7?e&BLLFyD)X81#HQ*4gk1zk$s&MEiV;M-BET2EeM1-Kb1; z8&m2;cF>RuV<;#qgtueev&QlMY+cqA{(YwFm!i2?z_R+a(R`iH=Jrd_%*tom`>o@r zu|fSi^S?2%e}}|9_wb1IpuNVw(dt~}1>G^$lkys{Ehe9!h5b7^+iRCVP%yuXg$0S) z683lh-X5OEF}abgpMht3^1LXrO6$k^XSc>U&&u}aFR+!_UHN2$c@SRxU ztNUP�&^>9T`F@HfSn-0;AdB0mpE5@f&F7-PwkLkv?;Ng-g=vSmZ8HfrxIo>bPgq1v=Mgz zm_$F*zFcsq>Y_ZbM3DrfEE2ZN$vK1*Jc-w{r9-__B8h7%ZsLp{g4iDi`IAN`YPME# zmQ$)i1ncRdLl__0@8Si~7?DU1^_cpGgY4$ebUuVd=CsGEF*K(Gba7EmTFYOcQDls} zTJ=A!2I+d1UCjySi+mtfx!=%8^%bZU zT4{h1%TZz(N~}PMbyi}9N-SE5%~WE3a!i!RPcFz&VXRg9y?&=MOj_x|eyjCcS8%bS z2g3Qm8ixD1b;lw1#q(e%?PdBAZJftpXpOLiwU2*gStG*vk!;$C4!k4VFd~${!;X*0 zNPEe+USZ&pxCWbGI8y#qGkAtvuseomV zjt)0b9%C?a%qD?UC5H}{M#9N-OSZ~SBg~*d;cGJeFB2ZSQvQ+ zWZ4y6*##Od(X|c^Bdb2oioD*7HS8)?m1xOGp`-i{+7(%S4NLD|uiZ|<}UUC9(LTJ@)v zgKL=^ai^@(ttbXX{IP)+5AunQWNXHHhb=w=lZ~PiJELrCrC@4Gd9yb$hJb+#U$9GK zgKTT@__DQQJ&QY%JlG>q4jemZjTDJAbR!)e7M0_naG*DO3FDQ%*DZv$o$sYT#&cXf zIsxbCNt&1Q@FKuR6Cz#1a)ra_f0-x#%~a(0U2O5VEX|dE?8>;-czCpVZTu#oHjJy$ zdml!LlSU|&2bxj~m?1B`<(5W{Q=J}od4Wk<%@*X%(A7Ohf3=)5hU0s)>uzQ4m{w;P4aziKoFn3{tofx>4IZf!wFJ-+Z4C0g7hZA=51?<)QSK^jo&q=t4LsYt{ z2Ft){>FIV_4rU9YldrO@0&o5pn^4ftbpec?c@u=Z zKe7D{W|al{RA;;l!oFd&vsS@qj4(a%oZB9W(a=*EbB&6*_Qzt1xq2wf2hx$vvV~{E zTD~@17XbT55C^(th*daD1hF}<6*{iji?avb1WjUpz1FM!6DP7mOE9*EyqhSz8fID_ zQ%W6YEa)S8);;XuV=|7$R`tdTzhaXn^>mp?QljTCNLa=8O&Yv5$ycr6yA&JrnKk7S^AV#IP(@v34<{qPU#%s zya{{X@N;BTxu8J_4;x%F%_E<24R%uFM!9;|I`-?71m1=DPMz!(qD9Z)8qrJ3@l2|Q zSYP(x)I5#74YN*N>9Y6?#=`OnkCjQw*1jGSJOm6=VwkS%U6d~gh?#vF=1bi{K;{6e ze?2Mb`Dt0EC_(xQRE9aEp*2p6(Z}tf>(7edw+RIpRg7En*|ySgSd? zo;`bG6HWx56c)D}2Xzwr`UrWA4-daxU5rp3bxhi@Z8H*Ea<8Ms_JWx1CYX78J@GF4XGVPHkgpXbOCeUK z9C>bRN0Agn$h&0eG#{&)kXI`lexj9Hftw-LrkidCXHCA6W3UPMxxpSbRv$LQ_=$6? zlUNp2@wn9j5c1_0J_gV+S0@Osv#DAwWV35)f#E0RCV;@&#w?=fEDsNL$yWz(hMx{ zRh%QK(n7XJOom zgezqybqm*kN=_Ypo2FqIwNAWa5FcgAZkW_OHfe6G&wAuT_PKI)QssuBnEeNv*_OGX z9s9f}R2n18VN8%ZW(zd?D4Ip-$Q=Gs?CKXxGp|>xbFgi{q0*3QhHgVTB@4@TOCkA( z>8|oi0lfBNQ|I~d%hunEO6G24xJ`BX-GAer7>*3%=e zpK=ttWO9`;8qc;d+>loZMO(=B6^G#}#ns}DE+b)*s-@^zK*`L2Mz}LfH*^hnNq0)f zJ0j%aEwkr|9=by-oZQ&9k_JcpcS;4L*oFnKXigtv&uaac-P?hDdlvC_mFByn?D^%f zte?rw#TZB1@p+gMrJLEbpSo3iYzpEv4!^K7R|5R)u%?F{!B%fN?vSoK^i;@uDB$1; z4@V)hyPD>3^DtR_y$V^st3D3j$dSlG_WIQT+jmhcTXOX++l~D}G?jI+ok~Q;wRW06 zMzNx6vFyEf2Kw&r2Q4>7q|um(y|&SYs)I)Xd&HA>LNt+6S>U^InySTY%FkhJ>btxB zQd`r445?W10Tt2(m9+FlHPXSqS?+Z>c`GI^bmaY??tC#^yFRj7(-MnT)qjlFtft5! zCYpl>o&SWEJRF7Gq&>1Isk`ZfwP8Spcm{Tfcu988ygLPrYa%(ka{U93e}g8W5_LPG zf!`uuLydeoIEo33xXhlL`?XI4*A#-wGcQW5$F|Ltot!x%IrXDjj+-%A5U(4=-!l(C zg9EY?j=YQf`5gAaqR>t|Dw>}gsR|^DQFQz`8VjxUm$oZh$^25V2m5`IFG~~KX)<@Q zX<{$UqRlKmJ&xTH{lhOVf~OYv6RKb`6H}+aRk4n;=tl8 z-tqcgMRlTB%O$<=ROBoP=0n-cB~$qXc6&*crsp2E^}U7s$%@3KCSH?%ksVwXp&7c1 zrS|s0KKgW7R|mgcq!n}XShwYY4)<{Euo79c14R7oq3J%tBO}vj^+7J*~a&SU9RE5RU7?2`{Dh5uBMlalqIZf zc{p!k1Inj3{bG>U#3<<+J5fH$w*l=aqQg;`+d!9KaBJlNQq{9;?9&;nv0~`sjFD}hkjvyZ&6119<~I7Ir285 z26I%E_QomYK$1|2%Ow76)rW5S0L&y*&2?!7n)Z<*U&;=C7>aqj{$V_P4{jgnJv;)b zKwJZWSvZmUBpziq+Ih47ANljQ+2oG~;6=zG&|BG0A4O4+*XXsJVj{mR(AE{VohWpHtLfr8X=Q) zT`P9^@|e=gDm<2gCDn6x^HB-M5Anz2T08g}p`!-|Q+z4Mv%Bi``onuL)ntg4YN3 zbXkej=S|W>xJKU{NaesOVbvQ!!VJnxxPseA176aDNCeV}&56kaS@HT<-}hHwEr0Zb z!5p{(6cv&tJ2j57s`X40md!?Pcwf_^H~VWthR@OuXsd_AQpyL_yom;tu9g}wh&Fgi7+UhF}dEaVXkZ3{+jaj0z)L(>& z^ZRqu>kMz`g>*W5bNS|Z7o>AF*y}5cyz@=-F8CF>33-E0K>kh|SxZ*qG0|B*#Y%QJ zDvoV*;yv1SL8GAzHO&cuUD%CHzMARP?CGW?yN}H1-|H;-%TTyYM|~N{_hWOujP@SZ zSQf3;$9r~R@=3fju{}~U-ofFysKYh8#?6a@+b*MdqP?%72ezoT z$*w!GV9sbITN8SW^p+5p-+RZL{F%dcLh2rYv=PAI+$q%)C8P6ElHr z+b-~RZ0q(U)?r7jrfV538M7LvMtmBFdoxwL(+1J!yoIRwp3G9AF z0)Lc+?QEq<>B0K!dI|DRnIm3f)9>sX~*dQktpIcteGEzzQvU)pHfv zhxmgOx)^RZ_!pFj?5IysXxWJ#uh5Ak6QR&)MEfW-`QjxPh0Z3LQ|LmX|H_d^VkXg& zLYEMIRiPIWeKtoPe{m^AjwuPNh~BHv1w?OF=#50LRp@O*uTbc{L@!k6N}}f~^a-L1 z75XgE;}v=>(K!lT%NHYJP$G4dkfzWQ(H#}Kk?3fJZX!BBp*h%rlAA(15N)f_ZbUZ? zm3!|)^j(DxA-YbXqlvx%x){n5PmvQ!LL$)z6grLQZ3-y{dxPfy+;p|X2IpBEXxkIkC3I|>$S679z zgjIbTTD)4xFhNNULGp5iGf?3~gR@ZKq$?b_tz73ToOTK)5gen!2~;>~;7n3D&I(5W zCs*M-9xQjD0G!uVp*sqz5UlZv&_#ta6P)1+=cvq)mlQCw6z10o6FuUrR-c~glomTmmBtEra2tP&#q{9c)q6C`aU&kA*~zqYRA@ku7Q)BVi1$B;?sL> zI10~M@u$Di9&Fr5AbU+A>WO2G{2ZA{eq;pPamEOr zq3MIt2dA^RW8I5WXTLZwJ;VmnkcNGJ4MQ%`^?gA7Vl)n{4;2;zG~jwC?$pR4b0|JV znz@oX(g>TV$N>lJ`ssMY1aawF>X>-Skf(hF%O+`}#;KI^ZHm%0@QeyH{KIqA>9suAhX6TKB$e z`*-2qQXjPA2Wc#W8E&?XfV@5A*Rh803cA$86Ll1WB%^XIQa8-X-`?%n%&WuIs_H>hHBBmsTC(q8z`*>W)Wgv{B5lDhwxuxGI0) z;93-j9W_NbY_$Ps z^(~%Mh-`EZFe5@p7bS!Ehe52D46x@CuHgFBKqwmyX}YqDG|m>+1elCg{Y5HHo(o+m zqj|9Cosb~6qcx?G3uv}HK`^`DY9mY33M6qFZG8S76~d8VG1=uq6@j%c1a8X$b;PFh zE=qdti}cfSdS!FEHwXh>Q<$~iA%TV_3@!3i#PQWfv)u1-NHuF$L8!qEuK)Mmd6%-oc{a|FeBFo(Ua0#R;co6Sd|LiX%rCN5R? zK4oy#|BL@UTYG92ngyS;Ej687hHpTjFn2|5&SZAKrcLWD)2!Ce zMCl!X+R^UVKWPOx3@5xnd}0u9!jtXyKFjz%t9Y^pno=!YxK8cXm7c;bgT!4pTl5ia z>(U=)*gKTyN-JpdX$g^fWYRFtKB7xqrs$&6tvG7cd$%S7R#!R? zxnSlxrjZrwF!wXQnyyhS?o1c{F&lp-40j^mJu?PMaOX^zrh5=`J{zrR%~+?ikv=;j z(5u?|yQmZAY(sqm2o%!5rk{ORlee5*I~S(u9m#^v9pD$RtLMTs6CxOY{xtt9J9|D( zle?5XKkw_66)xN1&NjvfAtU*D@!kdpob*|nA3FQ?noNUB$2Vy>NV8A<-p8~dEMs$j z2+XbyQ~F7B;xFQhw`fNu(cp~YUk;HkfYr{sU~Yu;MI<}dQ&SFx2BLOcKQ z+KYDDD;X8XrXBWT*|GTm0#w||mx6C+v&Ka}%=V%mzla52^wB#~CG+4s#b+++?ah`; znBk(|xZ{Jc5@(9Ge?83b!PVGU90|LFC9eZ-htePv;nhq@Fii%np-=|B)CN`Ijey;Y z8kHu6VXsD$JfwRCFWUThf7aO1lQqt3r)j+rPF7s1o!^50gIOn!>~Q^@Ck{69Yb!a%maIF!#~S1)yR&3IYNvn&?) z<6FU%0sr>;9~LYwm6>^!g3XCnbh}F(>sZB)j~(w$rA47y>d1E0zQTXD{%I}GuV&9K z7w~;6@_(Ah^ViwwE1^){#w(uuc&5FY?y!FnJrx1I6@++oc4~1G zjb^X42J(WEQwf(akK$+`!PQi`0$v@IP~_abNOLSnP^SbcG$lw2rS>-S;3!2(Hs{6)yy z054!Q+@+Y5-xt7-@5S2R=;G*t6w=CDY}yS^{lo>(zt{icyf0h;CHG?6ZrJh>Y}bvj zF7rmdP@3f+k(s`Jgg!2%_CT)!q}d8z8t_uonYH*Oti|35G;NoatA%f{Zoj+}v~T*s*~2UUw%n&Kl&c@uPfxLI2ebV)Q+{f+1~jXj;xk3VbJeAY~0Q1 z{TJhk54!7NnitLK$`Y_f(82#xtom!L`YJba-OSdN`OEx=crykg32(;mSqV8XtCpaO zZYc4aFw}rDAF}XUW4p&;9i~k(FM~cVg;`csIUjyaY@BB#H>9D05Y!ofQIE!_jHEFa z>GLhRc52l>WJho1y0piEQhiQ-W9@#$WpI}DYp|}gfR^BA>ADqFLUO11Y|gK(Jr^#Z z>R>Y|rldu1lNFb|z>hC)qKr{K9UNN&L_i=Cb7Y6vlPc~W|yVv z1cNiW@GV_*wG4$xbfOpC#~cc#AWG;@QG00#7FqQLxe27GeEfR9J}PtQFHnAX16_nw zLM+UMEA#yuu1J{>G9_w@oTXqnrz`cM1szJ)8jhz$Q`CAuF;7oS!x9@#F z9PmQT1}0gpdud^Aa1yos*bfb@-77nz?E~e@5l7yJYgk~ecLVtt7JIixa5%>8<<$ti z63cDA8ew6J?{?>TR&zJf*ZWPy&x+5QWM8X|%Mo01u(0EXh=*Y|?i>SW=6Pb^nD+=qR~Qa{`SR6#)7?Y})C{BXfBd}2McPMG zA-e8o-uI(ni=^F;Yk7AQzJXQLQNT5gM(HS}v(Fs=-IGi(aaebc9vh zZ=X|&g#yg|C^RjDHVK#{_Nc(DJ)c8W9ut)B22kT+nSm>lF4Pj-Fw%dkl|`bDS|bC{ zXlrDU{F{ScmiGH7{uB1`?|ytK+xL5L;P|<81~VNC8HKOYapnAOydeIz4^?qe=3KV@ zfd_MF4Ch~CagBbsb2+ckFW}0Y|0zcZ<-kJ(_Z$h(cQB2?d>Px@7_J#Rhh_in>6DiR zA&jVg0DICHtMMYP-r4mK-nPIJvq8Ju8$H>`2PwRfeej^8_Zv7j|EE<{2h8PL4-Cak z?O{{m&#r@lJ}S0SF-^q*Do#?dM8#Dq?o{!(idR+qL&X;KEgk+MRP3x^G53nf7_Qoj~PD(KWD#odprs5zK=c>3w#nmeAP_a_Q(<=U?;w=>$Rn*{fDEt+3ZVJH#sTi$d zqKf@g9IoON6-!iHtm4Ni?pE=Til)89Wd!L*9rQfJz(V|n^G-Ut5^q=j!`B$r0*3mB_l z-8lub*@K7Pj#vL_j=4VyUl#A+)%@>GX!*!OXbj`IM$d{qkLJ4;7rOCsQM5VIpPL$| zV+{Rm(<$j#>*c>(HI9u)`BUTWYIJQ2MII)O{Fkl9Vd2Stjjl?%j}k5BN>qlgDiE*6 z{nU7+i;@AlD*t7xaXJObe`-8PjqY_;- ze3lx|R^zz;CI7{%@uzA$T8&%PxP!wUKRf=EoBOlD%MK>=jqH11`L3Ld)IXowm;dd> z&*-(eC$<&HP7mR@l?ZN6l#CyO{`y~WcRyRsuY*^iKG!pz^3a5TfL0$xh$-LV&3ES= z%I|seechW=SHWen9U-GU(}xd!!Om&Jab9gvQTZ$%{NEaf@=yPrdN_gOsu9kVpY!3{ zy~y-WCyw*)%yFa3-F^As=;lWI;)QA&!YsAXl@J-5h=!^0@ zffDRA*jO|#Chy(x9JdA`wfwL@?=gz9kskDe-I+r{%{CSE3WS+zoL7p;cK5L5JUZHQ z9vyUYw?9=+;+waOx2>>hSaaPMp8djq@L` z{MEQgeKDbSD#jl*_IF}zdZj;(e-yjE0x7@kT@vMEN zw$L`$CR-ElP`*8cPZ&%Z6ylHh*E?|fj?SFEgL|Fs?0oDE~*b>G^b*9v% zu*P`&mj_!s3-S1>wfXOoyvy)ht!fYv-wT`nMh)#cUjN@v`u~QKqAdSU7)s|O`4YW@ zcIepGjIX>gec)@aPnJ*q`uWbB-RLoY zvf@>YE^Kr#kndLs#s9| zX%g?jn~G~y3{l0Gs__yP3suZfQBW~nMIRMiRJ2vG(MGB7go1Y5HX_Qq z(HExie`J^MP3CLe3q2Jf&R4;b@=sIvVBWara0j*<3Ai=Bjj!)gn*WS{b4Adz-Q&zz-1mBmks_3;5<+4 zUZ6{WUm)BCy%AWCFf*F$b&l%>AH?1Wo=XGTdUISUWC&wnE3X9|2TTm$xGA84#oW$d z7+Q!B9zvi(&jNo($i@HqXas&Ag8zXC`YbR!RH-ZjcoAVO__aWN7;oKzKn)rR ze5TS|IChH&)CGAT;5LN2ZHsxC$d6Qdj6(~T-Ac(!_!a^+XeRKA%D)P9jZ$P*0Zpwr z?kWl`1)f2$MI&l~{~)-5ehN&0#p?sQBk&4BJm{;y$Hn-`B_Uu8W~Tsj9B?@T^=1Wd zKLR!47*H3_ajU?01NKFrp*8^LA&^W7@OOlo7>pdSMSG=uH{clrd31sAz?!D1DP9Po z4&fLIBD8db#ny`FmIAxNzLvmG0d7N}%!GqGb6gYnCBTJ=FrOoN8P_G@5^rms+W>sf z6`cY9FJQ}Lr4gRM;|Ns#31E*D%zp`lGE(3dK_I2$QemcbLxtem0^dNOxhw?!hCn^O z3mniLstlPyK({o7Pxu3ZCn~rA?AwFmQa~GkqYzVZUg92pk@Yo2HF<*u1YTix*8R}8*mi@O&MX2 zBFs1BGk^yXNWm+CE^n$GE(Xzrz~Qm6eGBu4kOrDiH&dC*Xkd$37(Vdb2oY$$3A@f# z9=Q}?=^RuJ8Nzi4w61Ib+Rs%gBilL>fjSutEJG;Xh76S;TFgTar~=@c`DiKVZNLvP zLUo{51MiopdI0oT2)7UTp1^iy%q8e};2s3(?PSJcLaP@kQ+Xy~k{05~r2B7o0JTM1z##lDPfn*Xat5X5!iMMMg^6{ z10%jxHWk8=2sAZ>D^!}W_f{-ckUtA_--gkmh5`>1?}3Lbmgf!t>AyL21ich^1cBzS z5!ip9(#vdM|8H=RhD@E=B0_#CaOi2+8K4V+k!LXf(TGH!!K#T62igF94S}?DHSqK~^agzR54rI3N}&h0XwO zxQ=HR1reI+FlV4kfg^5UDTVfq1a3hfCEW%rzN=_;2~dAenHnG969gmVp91^eR~nHG z^lZfHS&RfCq7i7m2~Q)?b9@#k{)L?oGK+!sf1`1r1AsdpqH&-J-+qMY0bL5LK%k!O z1onK4MuVRXT#7(^!jmdp11ygG2iqqSqJe7=Xgyd9yn^5ZJ{+~&a|FsvUceZHzo1-% z87fUU8et>0V?uJgkqqGql_uvK`O=6_NDec|7s~{ByeNUt{;5I}l9!73gsCb`NUkK} z6V6a+az!zfCM1^-$q-&w={rDj0TG`t-b$Z F{{uwys8#>~ diff --git a/src/pip/_vendor/distlib/w32.exe b/src/pip/_vendor/distlib/w32.exe index f2e73aa0ce958737d95b1968e94a03a815cadff6..4ee2d3a31b59e8b50f433ecdf0be9e496e8cc3b8 100644 GIT binary patch delta 13492 zcmeHud3;UR*Y`P@gd~zkuFR4U67!vhoI4>QNa_-91PN;BZHgN1H5Cb05Las06s@+V zwyBmPq0|(jLQAXtsnU8Xt(2`Xq^hKqpYwe8xuL)3`MiI;|Godb_mg$@+H0>pt-bcz zYp-)sa^An>y#E>_-?Y}0Sy*D;@bbN>&mbUm_SE&rzy2_DYL%2upZcbhW=y?-bW8Zc zsq2t#c=`U+KP0-fh2Pnd&XUq$FHdl=HrFOcuWbzU4GOup_DKRiki_u4eP2%UA%ppW zt*5l+xaw~>E}n#+iYJHp2)<-vpZ0uoFvoGdoRj7{k*1U1`BZ)+iSe1k$5yWOd5K@= z7sL-I!G5uPHc9m}#Sgz|KlrH`Ci$AaIioL!VoD$k2e~u#Y-Roc1DKbUmbPW66>Lb1%Ozj&q;x;mN7@ z&8aKmy5>;Pg}-L^PaN&zt#E=hmd9G#M(zbzLK;(~b|lIOlHN+6_;M=8c`_UG#}=Ne z8R5C(z7uRO>Lt1YRgcPCO|&Cf+e*`Rn$*}sPZJxgfm~{pUVJr{^&Kpnab0TgRXys@ zO18JjA&BXY=|GEG+eBF~ zB(18p7I=pdZ_iUbvg0SpCVm5P+wJ9<_26V8mD|C@GnAeny@ERMa*~7R zVlpKtRh$181eH-e1_Q zlPG!))S=8yKWP?4$3t{vqjeyQ2kJqln;A9w4x@fSIt33&$U<{@hdDov8(O}h5iG-o z+VUJ6bWN0{kk!Fa#djZzqWg4M%MxhVutixCs)+}_0>7v9B5ODj4d>I-c$Swo{#z7n z4u9yPC^LdF%j64uPDo3MC>sZ25%1^;Ig7F}$T-$ZL;4Y2*{S*o>o8c<++k}zST+$1 zc}3zg9f(J^yz~w_wLK%Hu_mgdCN`sq{Cx2MD;H%P3eH2%hvwSzJf@LkdPsNvPqHN> zQyv7q^4Fq6KW*hc9X%Op8c`Hzqnq1#m)hwM)ok!lRp&Y~?lNIrBjIgju|+M#u3MAc z7S8k7858S!SS6 z>k6TB$?xqVQztxUDSBF-*=TdH=IAnPedtxgHk;@QL(ie4M?_58KJeCHqtj!pum&|A z5$a<1xB^Qbxgux)3X9qomdUA&OpWNwUnV3%2w!#4$_$&#aSav+lg=jBBf6$fN@Mkl z0;wA|mNjIjUAtp%HrnZL-8jxpf5A~?r(a9>XVUW%RI$@dq;I4(<5dM}q185$?HaD@ z!{yToImbD+q7%moJm@w^k{Ke`a2P4wP*769aHs@j$Cu)fPs9uv`7tsa>w7mcDR5^; zj6RU&6PZlQuOZp89+Bs}a-5-+^RDn@rdia9mpxbLVX{hQ2;7PKix<}KB^P8dfmKqb z?hv^r({z}D;~-xW5VXp@FpP5sqS4WeT0;+#KJACMzKgLl4lQ}7eQL~lNj$D-JKdbb z<}b>ak+IQ%~uX~a4}z|n@xW@iK)2{tx!I)ubVCG&@gF)B5Ai-1)P@bvc+ z{O7jr!#Ue~`UiSKrnkYFk~JNDJt0e@1_Zwi^=MIso8e4P`#O6Iu5F9dTXsMFBKeI^M<-4=9QSm4z;PiCOIDRNv~k%F4@>__Iuo#zY5fcR$qSCWx1t9>mt^Eq$(?mJ;n z`|_MdI;`?Sg2M-V+9WOiCYhQvj^A1NRg#X6-ixM+APQ0TF|hK|E7*+aw1OmbOib$A z_`EHtSCKItJI5Ub0W}UGDfIUHoV9XG$8UZ3&g9k3J^3Q?cIQIg zMw&a9;<=>DHNKjRP1Rw>8&d^-EBROI8=w@WrL<)tdeq;KcgE#n_LW=G1RofdE8Wug z;gx*%zC5aD^iaVqo7;vzFa|%pd zA7??bo~DL08@)rW_lV+S$>SbNgNJ}zKAkV}vD1+|miOfO7_y>Q3S6_@y+-k?NMP^I z{8iGccYY8%n4lkRpx~A3dUxdI@*8Y}7St4tbOb)Y_M|wW%QH7YAnepuJu+z0QeoGE z?C|Wc{AWqMd`WTAuc#!-PJ?4d3X18{!I(uno5cyt2eV-N?eBVUQ9Xtx5n9vRj8MeW z>R*rwXPnUW%M#Cl0UY-z^EB@a2>mqYY=du6oBW|RyMyB}p!e%Uu`ILB;Hq~8*ytGW z)BbKe8+v&%?|1?|t;=$**@n`t{UI2buu0UDc@6yTh4mb((3e$s5s%u;=BF@5p3FLW z!5{XMI!Jfr=lovsyD~DE>5jwiTXwcrkZ@I8;4`JDRI4uXd|%?PQS!M&uj#<6Ngkdu zGFy|yJID#m_UKt?JwJb24+z5JxA&1X*zF9q+2O3r)9SiaO9Gb)T@j*sFTHUc+)}}=2q_5{F$4Q?&aV%b=P{Rx&ItD+=yph+_^&)VfD{V- zq5SU&Dn6K86$S@+=j)2fqq9kGV=OKMbB!kTSK;CDBP&h~J?E_yl1+m;xXPv}n zk;PV_w`3(Q!oo)v;kavp=DFGAJx~|={jxWw_U1-=b4G72$D6Zya~5wd!<$ohb6MV8CvR?yX(Y9Btl4&qP!vMH z`WvZxQZwgSELKh)i@kg z)njYy9!)kW=qvDbWOLu-E=;I9m^8Q51FQ8v4Rp$2D_5pD4{V25dRItY-%cSx-(W{+ z7=uSsMZ)?S1DM<#J;|_s*?c2;w_iuB;HiEJ7|)0OvP0QA?#PCl!0fN7iVVq6;1F7z z(GAzqk2AVL`7dXvL1~!)A$UpmsOa{ez)ggQbi9%SqABpC8*8)Fale7X&`jK1`F7PNqBOa#}=wCjHbR{ zumc9e8p_NM#_5zHlR?&HseGC*lG-dgzln4h_<-j~_@LKtR+JA)b?$r;7_Jgp&1URaRS z(s8(_zr*D1+D6yaqU*9kN3O?2NrxdDqgt3Eq~SbgyGqUv(eOKnk1Z4XC(HJxALAoK zZ1c$2q5U#_K8Gw19>tL8TM%-7zGqn#GD3|r$`I{}u+g7SKW7M+7{r5n8TYw==*P*e zp-J|)LGUa)kFpabcyzC|(UlVEVoPp;RHGU>Is+)Kf1YKRkTcDN|K*G`Y+@a=E~Bgv z&_Py2vq-_Pl(bG@&o_*1cDC~j@War8Ff{D3U`BS=hKV_Wu39_&cMbV)*mB-Z6uFae zCE1kwI*e56a3zo6%%~9>7|S&yaw9UBb~L#~(b*UK(F{?KB#%b)L}`jWjSnL^_V}n1 zr!bYKd^m5Z2LCiH+3Z9tX(TDJ59Zl3mfW;Q@n4gt_PCb3Ixm**PKM;gx8#@QIodL_ zEFB~4Q2CHF=S}37lJO&7!oE8ZJ*T_^isriBoxgIw_y@3ZGB@nzXWQ68e=EgN0VID%Va<;$iUjzf|nY z98ZN~Lela*@D6If!Zs@1g5wA$Nq2e(*%tqtCsmHU{KLvgFUR@tk(G;IN#^-uWXBZ3 z|3!3Dmto%?pE`+uL{g@0z~26Bnv!2rd3Rcv4}Y}s>GV(@YSMP50?T5Z*`4o7rpz3` zFCbMjJE4s$GxdBW37^#)o?)L^$-IS3m^G-Qb1-t3l{*HAo^9nO0;U5F z0(O&ovyyuat$9AmH!+Dr*ha;-Z$hbY5oGt2Slas^j=99-jE)cbhcFyfZm$Nn=!#_i z^+#leb7DxpK5RvzA@O)xOnz|=4AwwZTV%0_=v;}xZT=zTk{4auV$@ir+}tA3mYXYn zUzXW~{nMw&$A;SD>;zmw zTFvRqyGYMDQ9&I(!gZP*^0G8CWKPe7t3N&4E+L?$ur;k6JG!t24htPmHqS|d;rVQi zE$MaaS5ezNj3Jb{nsK#| zM3LFt(3;_-Y+=8yRwf)*v&x0*kP)~FmmKD^uv9PQ*(H<3RsLK!mro?q7WI>zg6fn- zyW5KNJz(C*!F_UO(FIxEr_V}ifO$(oNJa4+_5Keq*Dv7)2{o={+8?FvB(AIj?Fy`x zLlhF;9Z(N-!JMN8orWY*4`@|JP_Z|?%#d?~mci~}?QbDi0uvT=Ds<@}Ow`NpEva;;wi*U4&Zi~hXLkZ( zOCqYMNX*hi{zGC|x)ZUrpOz+uXJAPnL1x}CWgDGG+Lk8scZs~Tm)!4d?1G|5N76$p zN6q`8+G5#4D3!+*Ej&gv3!$S3`d|B=txGC-y>z^fxR3l^Du*HNxU4W%x?Vqfcpdx^ zY;KOTqW(?@L4#gDE2xtOuAO`}%0G&3HJu9;! zyj>VU$N&rnI7n;H@E(t_tStu7vJtX4D;BUw95hD5VHaa1v8K39`_TI z<7>I|@Sz&QZFDU8Y_$R2z{AzA@C(VfHSPKBWX_swqZ|{!rp%)`9%=hMq!~awnN9iS zlj_G7j)w26x(+DXIqd`&{Nqwa>nm%!ZPfH)N zWOfq0Bc&;{UP{yGH7V^uFG;DKo|jTJJuRj2^r)2TX_b^3>BmUjr>$b~Y%AR+(K6^} zDb1p5r8JwCNhyNhQaYT@lhToNhLnz>FH7k-I$laA(tIg((4kT~g$|U`>9miOI;lZQ z=Te1~E}&hJ7PCPdEP=MrI7U9Wl%Zh~x`Ltp61s|^e=lJSYZ!W8Lf132UP3o9^s0nz zVdz&9x|N|d61tP2ha|Lup}QsYXfeaLOL#RyH%jPfhOUy(vkYAl0_#H-$lh8{H4U^DDhWbnBLx%nh zEfo(oG4#HKHZ!zdLfJ)$UX@UG2-B}5G?1Y+K&3GUv)loxfNfj4YjH7aFp}Z#O5|vU zu9whwhL%Yvu9h5KB%vt`oh6}Z40T9o4~7;yZ7~6;gfW;f$@KOQ=e`^b6sh1)b{2eWnnG$8TTF^|9D0VuIXx`{tobKfe6a2GU zIMck8NWnk5h0@+ji5C2AEfha5C0_6!-a>f**<(~mg8#@CN}ZRIBKVJKp?u|~=mr1j zEtI39yd#LY(cYBqS)z+M78nd zpikd@W}SKR!N%@*Bl@q6&BX)R@Cy)Hk(_bdodW6pg&?V_8*=g*{j_fOmv|k+@-z*%gjy~bs*LCjV(~Nzgn!;y)WUO+Q}79yu(=EW1zEYdAFg0uZO)CzM0uH>c1O~3s5y!H&B2kkm@k{e(ep@K3S@-5 zDe&Rs>o+^_CFI_liMZCZed|7a)2DBx;To8_WhCEJS+WIJuqJZv?W=g6edlWwZ+cfA z#bPFyMzHUJv|mM0_^J5IhG2H5yZ){fd`Vk7AnIn>ni_xi4c1?4wn_g4S|)F>hzBxI zh5q#hS+X_7wp(I|XAB2`VvNk!4)&Dl!PLD}jwd^y#;Pt9(k(OCAyO3Pure3wDA+v( zf$k?yU2$}tw=OLtkG6J@4Qt8ikhb(*vX~^vyYQb7+qN#@|Fz*c{a2vwBr8AD3cPSV!YnM-(hA9YxS}&#Pjbx3^*3r9aLM6cPfv^xAdkdp1G{CHBa{k!C~vmnfG2xMEj<=-tA-c!i7ta4B>;Xjduv%!uJLQKAY zu+>#)ry=C@jt0K4a_`P5{)oLL>|z&P+pg(;{3UX7&%`bRCooaNv%0z8%H7y&vbR_8j%Cj}$MyH<-o)ep@JX*#56@-`D!o<}{Uh;KL;ARY=8)#SnW^obSHoqq z2A^2KD7GvoQ-l@;vJ%I>%-)?|MMFobgT1lmUUu4!5!_8rogM8o0Qku!e|x#HGzscJ zA5A0I_9b@xZkog*I>COBAx(IyN$)Bx|6%3is<-)}bD?cJIE?H04cg<1EPb>HcW_r2 zUG*LYiH}M=nenC7i-HiliKcTU8eUh$VxbUc$J?L_A#*e*4Su!W<{Iw^M$9*QTW7$ePttWDPu1>HXQ1fHr=}gL)|$KMY?4cR{xSz53ZU2T;Rc# zTaFIq@ktpymKECxA#m?~f9gc=pJ~8KMl%opb28|7*T{h%U<9Xw>6N+9BtS2a<;N3) zcY{#Wk$rpccmmmfJcpOBJCOj}-r+z0W<@`aDPkz zWCIETPCyA@6QBZ61vmw`2>1c;0O0ckg9FF`#wT#>2LZ7I#sQ`P76H}(HUTOCRe&>q zI>6t6FbuQ{zzVPdCIdV&UjXb^fpk4!DPShx5a0}elS{?_JZ^Tz zRo!OgtYfTP?^9Mzx77N)uLzZM)xu-A&jCn1&a5FA6wY|8*;_)x;TAmXk)Zm&DM-R#EGo zQ(Aqq`#J8jYY&N~9n4|JtlS-TUuyYHWb1JeVuRK{`&|ZY;x}09|DZ);ZHIpYvHt(P z^Z$15kg319`M=5UzeQHYHT)Vtl1|0$e~{*%T7|IH{{`Ud024UGu}0-Oqr(6=H6zbxyU@-{F=GMyvjlFv=|#c^!am`g@IXj0W*a{_|Ht>zERd&~#T$IWNV-2 z{VZV?nWeL(n?-KXTCy#lf6ozqz$%bi$1BTBG4-J1AxOD&Y;PmkH z#PpK%vh>yIQ9>6%BlH!rgqMVA!hE4bSS73z-V(M6yM%+nQE1l}!e!x_a9j98_)G9L z7I!wL8^;;98s9fwG+s7dgR-?T$xXXV7fj!qLd{X;apuY91?I)(GV^Nl8|E$M9p(?s z`^<;T$IPFbzcT+|zHY8J|7N~teh6uakXMAItEIO^XE9nbECVe=Ep|(RWr}5nWsar9 zveojw<+$Z%3$-*_I2*^EM~sUbC!ZvrCSNGuDt}MDOMX%=Q^YC+g;g<0@rq)GB3hZC z?5)gEj#s{h5?2V=~L2u1eI{e zIL$QI)Wa;8_nS|c*IVAPaMR(~U;t+M0{I^KWqBAn+M(PFW&91hDnm6+Jy6E28$u1;4Y`JQ z4Idd!7=AI_H#{-;rU#`*rCZap(*G1(#yaC4#&*S~6qC+0&{Ssn(1Zc3kq%>S5wzi| zoKqwzx+(g=VwEYh%A?9;RZrD>suQZSs=rl()C<(>)o-bHs4LVT!(d-ki|Pbyury5% zja;MF=&{wTnw6S0n)RAZkfDz@M4P7VtyOFFTBEi=yIgx%Tdh5Iz82K=q>;J~K48*ofFpMFQ?DGP9HU&KT(5jzc}V$%@|yCVk}FoV zQ>Cc1sw~wgRd(%#dC>K^C|g%^d%Q0!???b%T7 z`$DrIGpdZmuNpTRlTD?j3ezdm4U@n51@lI8A6Po%gfZ`h zaXzK`QuUK6LETlYS7)g2tNX(+uhe{^*{!Y8%5^&3I^79f2xQe@Xfi~l_ZD)5ZN>`Y rMbl-|kEWkZ4@_D!bGouPt}VApu|~08u}QH-QK9(w)6WZdRp5UE#B{f?5X6kc22k;(dR08_mo-{}2Cf@0Yh9&aG3YPF0;c zb?Vfqx|anP{1#mBTWRE*R+)0ktfsZEWlens{^Ew zuwzqKBVGGiuc^OC^aXAF``XfKDINOSs}9yC_dh>dF308be%vu@$tzM(44(Cz4QXKpGI!` zCi3%%zn_j*5VKz*pGwB~#qm5T_3NMJ3FbJ}d7&<3%}GZ+stuuXQ4~vj3Po2djZNb? zmcB$9{ZhhyN|kaBU!LPi_ySN9{pI{EGQ>YA)Z;C)m++#inf}z7yzOt!x);K6?$eo` zK@IH&)t7MH22s(4zgG7*98L08IKdh#WUVbEKlxjNFQrI*NE8jp4$vp`PveF9t~s0-83%Q$YOKZHzT)hKs)u&`#7M_w z=+tgsmfK(xBj+IVXPwF3qLM!tr`<84(aOmk)A2+#r&Q)?u@`!4PDj2_j4VacHX@8g zj2uO-2j+M9A;QWna|Za*Op?{U*WgO1zsOzd>#p+^iUVifU}K=LrJ~U=C;t4m@FGDhyG~=o41-o8=ZxmM>C8}>M)vjkh2|LkLknI!^~yvNyh?=aGN7G z2R8FB(myDkR}%-Go5dP}Q@)9%Xj=X{#tYn$eMsK52nr?r#8@ZeY}y`;2;ihXqF>>O8h@aM!w;4yMp{<~ zM@$)yhDvxxPv};R9E*%&jno-y$WHT~u;yfK7NcPr5F=B-C2Huy_ts;!`}Gf-7CvC<#Kg|s8)qTtbWM=83B6-&4g3-7jIj3H}lY_v@3ICP?0 zNOq;6irbuA5VEGmQ6-5rj-2Xf?Uv{j4qY(G(wG3N!rBHBBX!d#iCao~giPgw$;OcG zyquf~F-9F_b9G%+Q}H!Ab2xrtLk<3oM2BklugIX#uF?8S%<-veR9_3fGL~Hu%T}^5 zG?nMbN1;ifBbzbDMI}CCynTF0u7|!5oGuZ)ln+T@n3iuKmas_v3o<;cN7$k`$!%b| zFv+mfv1CumpJBufqtB`RbIwuOurv#mnDMfG3nWYMQVHJHiL4#L9UMsv1vm zpztk366#zL^b@S1IxY(KO&^k5osyF~yufjlxq&DQrf*1vYU}VI%VqFS&CYd>il7<<=jP80wFb_E8#sGO~sxO>uC}r^FguEKICy!Lf32H^)z7u zDjvFG47sh&a9RL1HflP6Xrhx~2!+w<{JW$qI)Q&kK8)@wTMu(+o9ge#FVSy=bray* z4mK^zZL(o)+ORdviWw=}9siu|5xE(&G*HE?&`-#f&blt28i5yW%S@MIg7^|& zd!@#TX`<`M3te2HN(;v=Fa?I>aGcYRJ~fh^T{Kxck~nVRU>UanRU13F&n9B?+6-An z7sEcV@p^It-7OLGqTDs1HaeFC#dhX5lhoL(j)hQ5QE@HXFvQ61WL#`gc*cK`&`5DD z*%*6)f0ay%Yvq3=u=+t)hY$GdiCX?cvMg~dzq9(M zL>(Wq53Q6yF=AvDuw@n3;knV@E@DbfNbKA6ye+$^iNV0c$TeVNdm`rb5GhSgjjaIz zHI5<~?d|tDYxU9OD?WU8vLdx7!lh%WqxoSZF0BI3t!X#;)1)k2hru397x?Yu*YtIu zEX_#i$h7k?&zE<`7GU7jM>7N;1XwNIGk8UHQjZsTemEJQse)Cn&&(9I4B)tJiI9vQ z>Cen)ZhP88LRsE0zC-4DMAF#I_(H*Fzy}D^@);Xfj|et{^)?IL!P7;;VSOEcG@jiIL~Pu`LC~bm2gZ zB94vXD8_?P1Y+@u4d9|0h$a!*)1Mfjgr{e|M=G3k!t%eCc=qbL$PR2A98$jsri*%X!DMq+IUpP)I3{xD~gkka)kMqoeA z`Q~ti9&?5040q3mZ3h>M|0tySWWJCG<3A&)_%QNV7#QRouPeHczC~U#cHzg85@T+M zRtQ-nYT-G^VWXzgZ5RoS$4*-7_^=;S`k~dPiOkKL4&j+-?!ud@1#>WuOn*zf{SYLm z6(esVIpxb?R}wWTerzq*gW_p|Cyo_0=fa<@ZfsZ27#bME+0nII62b3o5uH&sQFF=5 zNsd`M^ORh(BxBe9-D2hM6TvzGA>$US&`a_Xm*CZ7O0d1PU|%hQ!8fBh_h4VK(!a62 zou2{!uR6g8ga<`uyryg_vUwB`l z81NSV4*6t20-VXk0V!}GzYO>bFC*&*D*5lo=L36YKA*Rz-g&dr6KK9@%bfN(TxDN} zM5(nyidE+Wb5G4#I}yZJb4b2w`&`Hw+>xbj&>AyO!tZTrFWy-n8GD- zp0j;J?!TnrcamgVE|!nOw$YdIk!iNMq-;p<+<-IC<$dK8JW_e5zQdj=mtrr6`6PoKh2T8a?Kr5pS*FlDn6Fr_hMvwa}$cQ)I*5dXqx z{sf6HjBU#gE9}C@l4*r;ZTTIAj*iSLOS=f$Q+AWM5#xC$SvX=6Ur+9hDC7^5oTBmk zsp=g?(|Ooh^vfA|T3>#Tk0Uo<&Wj#Uz;Q#C<);MpIOqN@pxw#X^Fb$z?x~4k)V%Z_ z$DqENM4`zkqX`mj6`EYZqDw~S55pv4`2^T$cXDe~FSyU(;sN{_GNSkeel963p3R3> zHy6kGz!!#%{fJK_hsW*@N|X#xiaE&KaTfkta(LX0EI;f`Y?g8xdK5QZrZ~;W3Pe{^ z3Fo-?2Qy(k23VQfjNa4;R!Ge&G5pWu#w$nutGN$f9fsYf`PBh@ezj%%6dv2nClg-h z`;*QSmv&4+ztUu|i}h$|C+t^4*jP{5{fl>RJS(x27ujHPxZh zLVfs8tCMDg@cdGun5lpoUY*&4&m=2m_C)}BZRQJn4S6zC&mSP_H+u1fWc(XRd|$HY zjR|Q-K47k7S%1zk8!psk4k3Ng%kD`4%SD}qJR)ntuxe6Ti$DL)3vjiYZ9KUaU59#+{l^o3-kD?t|gC&5lC zTw6M`2`MNj5p4wp;x*h1VX5Sl_}Eb2GAo(CO2IQVfoBL8-`_0e}1Wdd5!DG65qasIJsuM8J9c(B@4`^qGcm^XEM8WoN!YT zJ^;d7>;*QOO(wq?%jc3MZ>I7IWcQoVd@VWq=18B%W|I8Y4_Mj$rOB|*ZlyyypZNza znAr8@aXA*ZyV? zHQjP=qA#<@dU!|33M1^}(URb*n1rX^vo^bLn8K<^JkpWnq3aXus|5>Fv9{+f9HZXQ zz)Xc*#mB z1P4dZv#a%Qci=NeWJ>-h=#=w9VPPTYeL;BP6kL>08&bX;t(CNsu4=Wg#i{P0{J+Ra zcLaRjSMGGna9DB?byv4(4L8vD#=rth;!|uov4tf*XwQaI4-*)zokHYwHutGdqgm=n zON`|Sh!jSZZ^xAqUy%^j4|4}qGtUPN*>DA*s7QkK7gS_*40s=_rX<3V*lgu!4p~tV z9aO*P*>rU$dn(5H`0OPSi{#j*au$+1Pf_&V}nks1fruo3yrXE-lMazK}J@iY&(bLBnhTG0aL|KSxvSEi1AJ|M1wPYeZ?Yt$?@y<0^e<{nR zueEY3kv1c{vK)^pz|}Qm?~-&d{%c7pKX!Fx*OakfgNrLf(g$D!r~#RP6hIsxg5*^8 z4`+-Xk+nJpP!3p6-mjb>YhB0ak8Yr=U8LjEE|DAv$T0fKP(0LtQN*+~(WiAC8M8El zC6!C%sf+|KZgNCD`H!80t=f7f#d`O>>k z#W}IK7J)2GNW)>BW2P`?I7j$7n&u| z>fADYIeg2kKq{9~8O@N=P?{*E5i~|hV`wN#wY0sIO{Bb(rqIU-%fwk3^k*r}r1zv$ zPQQ~gjnYHPVwv-KVW$*(@tPAkq5J-BQ|*?vT=a`hk?%=sGDKMwd(J z2)am0N74CGI+o6o((!bvlsf1{DVy*Kx!~t$qb!yvv($oSvP7{{7n!gwwd^G?XQ<%UuZ`2>r9=pR`E8UQUP_GMXKSN$ z_EO>mzhP~Z052s`@Eg%a`3Rq&hB zM)|}`=_B}+wo%^mQu+yg3)(1^5@lAtpedIqb~>B9`tEB%bss)+&tc@+yFGBx+HQSo z*=QzgF>X~{>>UBlO@Xv-9!RR1#zBRRa?toZ`+w~m@31X`UF|=+*SF%lxdT7E&E6DbLGSevaJy#RK+dUHQ{>WZb6-nw)uN!t=1 z`AS=^FVeQ&S1u%0c^b~&3$~<%Jy`pk{u@wtB3ZdL(l#92wP>+`1}ws?u#U#Ej#|Ld z)=@a!`n)RSytf5%SA0DuQ0N+%mcz~LMBUGe=>-zEH9hYMwkhvbUckdX{TO*%UUB7*@ zA8#asK4u5q1s_lM#o71g-Q&|nzsgjN(CWZmD_31*bMH^Ya zC%2bs8X7uMBlBK6XW3~oBe+|hI+N`*3i!zuKl?IcMIy|B`cEUWy$RiJPnB3iC)jUz zaf$sR@c(2#MuzQ8?6&DYxL5uMclrNtJIVWdi}}Ii$zBWYXoPATe~!FUJs!c@&(&JI z4$au71wIDP!uT;=zV2&;5a+B%q^s8 z65BaBuJ96HYA32gFXEG`S%)ξjV{Wnl)tJqd^gqQu}&F2b_tG;k_Adk<+VvhIgqQdpJ z_vAlsJowKxVkTo)g#Rz{=J9S3MIS+e(=tluJktQZMGhWMkR1b|B$<6?5HOy6e|!+! zzx+g3IQPLP68Yt1+6i^mlyUGqxp=E3E86)sPVM#Z2i`T~9xO}5T?EJ7Ga(e&X>|re zk(W>827g_I?%M7mza%fzhzbAPhmT;qZ#wS`XR{{V0XP$2X}Bf&n&g~};H|`dazR&i zhTc{M8>rpFp6u<9jIF-_D9k~AJ=we6rAyZ8A-_E0%cei%xcPvUfDZtN0A~T$0Zjn^ zM;sRq$OhyCUIEMjtO0xsI10D`s0TCwIK-Wu0LcI)zzP@vm=3VvcGC(-14IB?TDdZ= z0nzCdz$w5!K=xyf8w?l?C;`j@EC=iW90FVg`~vXD=`jk>9bg7b1k3>}2W$lF1)KtW z0k{eH5zq{fJ>j@GfD$m^2_kztknw=&fHwh)0P6uC1NH(=0lom#1AYfYK*;U@D_|s` z1h4?G5wH{R3P#3$@er~TpcNx$zf(we13mz}1GoaHAAraI{ytB&a#L(pE^!nN_$P4~ zFSkBtsk&(8O4I(#KP$j@mDP1;D&oTy7Iyey?zDXC(#MNtzaHV<6+ns`dUpuz!*T2d z(dN~wlA=gWLkvHF+-OK>Cr`GLBMn{KIWoQJdWDqYs@W&IZtSRX?i`=>!8_;KlmE|N!R+7#%A)tBHb4tLLPoCpGZ|){tXuP@p zsFl0V&QEQ>`D|t`aojL0f@i-b&>WXJuFrqavM|LXE<>&VZz=uXT}ouakBcK8`xcIS z&6ziCX2EMyCrzI4(kmqsUUgPC{`kay?=KmC>E*c7`u_psq|cb@-H+^}`OtU$?0o++ zt2$3TMm<^WRKKaNR4-SrRsTi3OMO&*UEN+YQ4_5lsQp;`L>r|`)}`xAx;$OJZh~%x z?rq%?-7?)?-8tRAb^iJ|eZJnIpQ&H2->ko)zpMXA->iSE?`ViHbTK3v(hR)}8pDf* zeuhDYafWFIr{SF8YeR=@XLeck=CbVL+23Vn3;l#?!e-&GLX~hvs27@r9>$l9V~pF4 z*Nk0Fa#L^9Fq6x)!F0kj$o#sw(!9YOWjSa0!g9^>jRh8*$Z^}5A1Ujn}4X)mp1|kamXFtzD(vp{>$>rERZ^(Ix3} zbxz%I{ash2|yZwdVKCOgN4^jU*GoSrvOB!dHqX z3T>IPzjB;%q4KJ-RT-&Ls79+is&%T<_<-YURjj(F`ZY|+d+J*CZ|XmA5WlA7biVp> z{X6>i^?%i$)W;f5We*S*3#*{HD}t{v)40)i!uW;pE8`Ewe;c0|c~iJ4)|6(NVw!0x zHC;4W%(>=)<{{=nbFq0mG&$2;HrKqsyx6?lyw3cAd53wg`G~p3eAax)e8b#eZZiL7 z_Oo=bgj%93nHH7hMGI$zi@2plUl0#K(}4@ ziLOT1pi9xu(XZEU)!)#6t8dW%qJN}s)$@ihgVm5{7;IQ;*l4(FxM8R_+&0`d{A>^n zW3yk&o|gS6+h2$jl7$SRuP|5`DU1~+3p0hcgoVOVVV$r^*e-l591xBQ=Y@X=H-sN> z8`8yCW;HG_E;3db4;YUcuNZ$chMJa|J~16M{oC}&WHslRN14Z&Uo%fL&oY;q-7vjX z=Jn?H&3`fPHvipx!hG8NnfVLzRcPy5=<8Q=drOEV+7f3;v2?R2ELw}nl5ZJeDYT5W zEU+xHd|){Vv%O^DY=|si3s!k^H>wA zU7%Z~`&@Tj_r0!3_gL3XuhYMXWjaAWP5-ujhyIqnlOYwFEZb{%VkpV3%zm2PLFg*< z5(WrEp}HAR*$&}TVZU%(sD)`g7LtuBV_zdNesAn?)?BEnvnsU=qj_C5 zTeT8~@^{sDs-IN9s(AHHwW#i(>8UBzJk%!Wj_48%=>|=9e)jj-;lg_3VABYb%p7Z8 zZT`^gw9L0~(>bmm1c;Z9k}sDZkT)svm2Txq7~^MHRVgZyIzwaCjMq%j?=jrZ?j^h| z%o8S>E?fR-xnU{0W%+j7tHfH@KjqZCPu;%7xWMzv~)XnM^EHh5ys|nP|G@+W2 znz0zOLo-=(MRQBjq7gN|+CZ&L+f!Spt%SI1wClB-v}K#M+qFBjyR;9rt<1UT0(EnA z^RWQ?>WAt_z^hNu&(|-4!&#@_1hd(tKcGLM|4jd-{<{8q{R4dl{Q4+EefHh#2iak; zn_q=mqscVZ^wM zYF-np>7yB?Y19nRKF|j1{?MiA%k&Qnb=mI<=Y(&B1f$vbwK3SFF@0*ffew0^4dyh< zdCOf3SA_r^9R<1EP1UAUaM`?OM(Pf%>+HznHnHY;{1st#T(=2e0ChoAogkrJ(5 diff --git a/src/pip/_vendor/distlib/w64-arm.exe b/src/pip/_vendor/distlib/w64-arm.exe index b998321c461a422a6f03deb404a299c2d288c617..951d5817c9e6d81c94a173a0d9fead7f1f143331 100644 GIT binary patch delta 10631 zcmeI2dsvj!_Q%)S130MI0D_=$zla(*ctuSaykeT7d7;c?@P?(OrA8igK+RCoq}O`w zs0C_@ghq`z+Nlnl($GxN%BZ6@BVMqhhI;BqW&FPTop)4x`u+a@JqRnn5r=#vs6nTGjmFgY>V`N8H0U^rk46pM7wKB z3l8Hdr6YeTZi&LEw!TU8rsd9q;4H;obl_9=-ohYwm%X1?n^q#Costw7t1Ju{DMUNv zNI*8LN@Adedn?Zbb`E{S(mAC2K`P9ns#yMCA#Sln(-*hD>hZw4*nLM1FX#`hOLTJLOOX&A2z3yui7f>Ee*p=bU>LE+A(ZyT7v;L5D1Lg36z7N$<0?j2#w*H z$|?L#Q+^HY%(IpFwjJ22jBMK(zYE(w&6W0xNa4XstM)PY?W+BbZ{OE1%{`i0MT3>M z+DG^!#c60=dZ+rq$}vxB;JvATwC~DWl;p4!u2h~1d%-kJmZL&hu5qBTaG`QNtUHfZ z;=_}8t1>QpIImNR!@Kb^<(u%;oTXSJ0)r;rZd9Fyz|z0{(Wt_Y>e<@7aJ;fIB9v4~A$;yurJ)_#9gr;khrJ+(-8Y;#Q(!9~t7p;G5c`-#vjEw0Mq178)IvDk8 z(&$r#&pnm(LmZ`H&M`1&z@5HMQ{IXUZr;Mv$T;Pb$QbByE;5xLSE8dv@@ZvmR5w1Z zyc0D!Y#GEfth6rl8#4e-EYmryW#LXRl9h!C!+Ej&P(rF#H})=U ze1IB!Z~su`g#Sm%#oz%;gB0xO@+z!rK@ADWlSZwBzP^hrUnx%{hA0_{L0qg%Pkfc5 zlwT5~`GyjjbQ=EoOVSIz#YtL232DlLF2{L^vfJEAnPcw9OYFPN?+VUW9`81UJ1NE8 z`WUK>imBmr2U)R)y(4+>%y(v!|wf;&dgnvcm=Vzqcq)a z>`@@CSgh>Jf0o%EzjV6bFO)UQdvUCCaCxmLa;eX>W2p2}8Bes&UID!iDDzit_1}NC zQEdiSI%jBGDp##n>>XEOtR7a9S39`9{qpM80weavnhyAlU;7h!8~kdE78w2LzfQ#O zlE40dj3+k^4X^YoT!|GiGR?2>mBvOD>a1gKkt_E%0#Ebn_XnZ;=hKVf~6u8cwJlqEp8 zbde|DCG54EOMTd8U%PFi9Ij3!$NeynzwV8IPqf|lD8o_g&pNJYoP6LDjIuraqnie- znE@4@g{zs8S`ja2g^c*v6+|HMVPy=%5zqd^4tu}$=_MIic(5IQ-#GYuXp~Xwq_n8) zR3uZ69BjZCpy)#m_|Nx;`l80*&nGJ19r9O}e*Szbm$C42XWscVr2gU(ilJ%%1cp=% z6)vOlYSmI0FW~TT*!tYzr|~=T$ZGt4e`Gk?i~r&;VNH!tDvjWcQPtYCDxZAus9VCl zWk?g}K>}uWOH?XJNB?h$eC4g96Vc+YM`1Lj?Uz!A6<@}nC#7G$itj$hhAVMj3|5XD zOGT{t@r?p~*m3+A47H%z=F4gJU%pHALeaB7gezmuuEE#2v%hKack#Q**$u1RC(f_T+zIp@_FzuR#2a^3v75 zd`zjlI?>;8uu)yQ#G zNXN)wr{p+fpuP|JkXq@*6NCQq_eS+AkvGN&Ye!>kl0I?7k?hTR9L~)P<>a%qqU0!f z>_AomH$N~;p4D8w>Tw^Q%R|((X8b&}y0;lea+Kp#Gd{+=Mt#SZm+>A)$L9RB;KSws%t8M+c7KwlO zqu=>z{{UVZu=b-yRUTiet~9E$TSlwnWB@w^r#V&yaUXB)rygv}JGhHFyB!bZj_Tfa zoPZk7w&UsI!r#&|K- zcg8?iuqEU)nPN4(BmcoO)UZyRAzTK>vz>UmfEK}tJQ0?BE|Ia+6{`!AxFB=e0H`S}%_%ufFNO#@{k?B47 zJ*jF>#*9;Y_T*CT?{M|xNF!uKDh8`k9gxbCdt)a}cTsQ8c5XV;sP1WOZ1YQm6yHn{STBjExyJP_c(Vp z@D_FSB!1Wb?>igSYp{Vl>B;G)sD4lK>d^O4>fg92;Pf?j7f$?EtP=9ZveWVYliZob ze@fM}lQG+Gs(x9#2#&cb3py=P|H$G|z(^~`cZvE}tLDPUYpb@;hT$~1w(5v%4rQA< zJ)3QC=8M@doXrtD1>=T^SUHsy4CAzZXazf9a&K9{KMG#0u6dDf z@_0x7Le4O7rQ_sc_AzjUU+334f7JP_ zPSb;W{W`nq9N?yn9@i6cbmr<@r*n(WeL6qWc}nMboi}tIN_IE6PiKkFO*+@=%++Ya zeO*r&qccTkxK1yfpC!60KCAQJI-4iC(_?k^(V3}pmd=GbSLu91@*$Xho1XB2&X06{ zt@Db`=3U$kMeFRYzQ39yY|rV@6*@QREZ6xDohNmk)%l~&0JFQ=aGek8?4xs#&ha{P zbmqEgqg8ss8#+sMeyH=P&Qm(i>Aa>>^FREo&<*;A&N7{z{1JM*kIs0FJ&A(#geP>) z(&;H!A5RwcL_lMAvZ($nTF0hzbLZ6StQhT%-xKtznZe4RCB(!`A|A<`0^-weG%n&OhDxB|52(F0XUWNq5}zm757k?)EZudb;7% z>#ez>%U#tNN7dEtgpd<%TGqPhX(%Mb-KC&3Ps@6hS$18}(}X9k%jN1Vrh9tS$FZuI zJ5BD3$F6KyM!M1C2zep6ky1 zx{JlPrix9ab+7bmJ#6*$m%rWCZd_+|y%6&rOMjk8drOO*{S0cPj6$aNUZkhx5*0Ds zLhDI0r&{M(V|z#$G(p(7D9LP|GJOU?H!za++pk%Dg#E)3hpb zo1~4^s>nl{Ngd}UJ`K9Z4&9{|ajF__9jUA48fFQ2cl@XP0G zYgG0Tfn1{o`G_fTl43WDW$O`%MmI(adoqgXcH_&8Es_t!ceC`9HOmL)FVEk ze~>3{>Hwr8Y?IZfVa>#+Xvoz}1p0d5qFZy z#}R#mP^Nxo6dmp+PaygXp-lDn746W2L?Fmx(5_@QxvG~}iI=1?C;5st(K4$FwvM7X zM%S`vO*`3-lvjwBBSb3d8@?jCw@2%vYth?Ri3Ta|-WK4y=$`m8GKaVh`&6M3j94w2 zx$b+9+JiO_Wg(QQQO(5+o5!>xcAz&11*kQ$gy@0$w7In%Dj|QW-YT>;xn8PT8jB`8 zbwrjyGlVi-#{a%L>URS`A;?N@@{ks!6V>DCX*}wYnI*cG?Kmt`Xk8BOSZ)1 zbmgC~9YS+LT+2>2JF!9za>5-mK2b@u0ijx*+d{Ot)8Da&@gRXvt?L>f`*UAYb=7F& z#9jaBA!U{|?X&qbQ58bHUQhCUeKwv!^9ZK(+Gq`+zOs_lO)L2t0?W_hPU9BB`XC!> z+V3(aQ4K=9I;^GWd#4|{7g0Zg$7F%`wV!c`CyA zWHAlK4r%ZcJ2kH^f$I*CUR@=Yx!tYOPsDV*>+l(-L^=2JtDE@B`Zi4eM#xnCT8S7; zljK(SO!W5tOeq0OQ`K>;MCveE*#@-JjxvmGw94*IlbQ#at{~*Z5i z63-)dvk5oH8ug4xL;$w|e~%fTBeOPTur~RNh&Y)nM=?}(C;zTxEl6LEV0j%>=lY8& ziN-|O4O?hUCuu+TzlBJvz!tjyp0xc)D^fWC6Yjrvi2vRp+|Kg<^bQgDCwBa{NBJej3;#D3LYkhv34 zd$9gNqC_!GM0O#15b>I>xcA9)FqHDh*iF(&ks9*|mlDd9`Fc98D-?$OgzjXc9As4C z;3W$%=YmJ$F7j zQuHW}Ly*fFOJs#272xX_%1n9ill6fwl8ZCP6FCvLW#ZUZkB=JA51Q3wVr=o^8i~_* zJ{m}$NR$Q^fFTirOp~zXgGEmgy+gH}KH1GVQ}mwd%$Q2F844Ju6YZj@V!zIu85kOo z%_TF5I*M!?@1I4q5`vc5_+J3*Bzl(U09Y{xmJm~faUKo=XfWdiqMc~)md>JFm|mRY ziiK`E7ZH7fdiF0SI!IGQ-V)i+bWxFqAr+T|ahX&kn=8Od)RVWI=x4Ika_$P48Vy)p zM#Iqb7I+X%R}{Eeuo7bm6IZWtGvpPbMbZM``{d+|HF8L58TV3*J)8_Ik@+~^?nQde zI=3?xflj3Fe@&ZR!ld9DXG20RJ|02ADlm!}zJkyZ-zBV1X z9!#Swftxxdq9AVW+H}NQ1S+yJ-VU_+Pj4IEZWHdrE}Nq z!Te80Li%1L7%myA_7h$9xnu~ba8I9HupM4|3P#e^SJ0UC?GBy zDh^=?pmE_UQM(Gqy&t^0#*HuTpuvDr{v0JXc+f_&!ABqSO=ElcyElx*qxOMWeNI0dNZ5}oQ8%IoJ2Fh zb>M@L3vm&7VfRf&z+$A+HFv{iomt>~ zt`*Lou%x4W`E^;&W@WnJcA^Y$u3@HE#m{mfxga+Dg8OU?mdsmnL7HwT_!VQ!GYloa zp~py{^E;*!m~$K6EwI8h$|1p?fot9w^t>J{luJ4@eTzXmm0jqp*W(6}_;7Bjyt#HW(>mR^>^8s&}aKVVhkl>QBldK{=kTJH|Tn)w|-5G@U z2`~d!lq-ru4by0vz@M?;WY~|v(dbbfSb>6OTwEgK;FO?TK+MTZ=aDWiJaRt| z!R6%uOk53GAXk8kOfi<^gq}>>5Wm$2^?>GwFfqWaey|Ma?9Y@X^HXh11^7r9i089- zf$J612(WGl(?ej^FsARYvZXwXG4s0Y;~Xg!wF=89*+Ul>fLV|;k7oD`4j^MBYuO2o z#u%H&LJ{OgW@2c;ikVFFz?`R8Tfp*c_&A6jq(9Btc3B601_7Fbo`O!W9^5dOu|8yr z^yiqoI$((AGq&GL2E6NPV;i!7wONn@9)u;$FE9==Sv&{aC0EoJ;Ug$lw@?m+m2>_g zEox=k27F9IPiPTi$#ijEzKmPhw3M+>OcS)6=`wm&56bO#!%IxvWkoB{1FwrBWVLJv zODis2W05W|X1x)Y?`lOjImt(}qo8~si^M^v>@}vAIB&@JE6_$)@KKHgc>&#o-Ea^n z1>}WQ#^vLVq78Qm?5cAM?2K5euvCNOGE1#b+th7RjKswUM%hWzH6U-e2x1XD<;Hvej=WKvkevo!IidQ9>HBiP?(Y&5UV`xKS8v2 zQx5y*@){*6z{JCpg#o=oCz-Air=K`)pz5l%;_7*#eTRr@5-iF)0gpf@BJg&dmv88- zgHCSXYM!QC3Y?1H(LvcfM|m}$%ocAbuwMOPZ`^_N6=~HT97M=EIJ-V zMaP>AU8_eZ&vhMwI`4Ij2|A#ci;@bxJg7QO`MK+O{!tkg92T_W-_-X~9t$2HwD#YW zTvm<*PvlWb_mFh%smu%+-|KWYTHhRCy85eu5@oNJgwguR1{xrre^PkRr@P1%_D$15xPbg8L{X$xZrWl#pXu#;JaMZelB-2% zd=whSPbuGq#_(gxpZLvJqPz9xMaqP3@tmhD!*iapt=l6UW$O|a&pcKc7#@S)hqT|n zgb(o<<(iLrm^O}8J`L~T2T7A5uv*TNF8!xl8gQ#Lu6tkJrA+Id#!<@4-JkTmU)G~C zS8j(wky_koGK|pSd5f_OWlv;qR6eB8Y%X<;LF-j>=dvJJ)M`yr&PK+d zD;=ZKd6qIRY672CUXAL<-zZ0-9+oj49mek~W257_Mp+a+kzcdbMW-;YRDO;L=5dO5 z?D&p@t~u2zjMmsknH4*b3*M@f^c&6nl|%jR zF|-+r70>>O?5p(YAJ0CvY5l#JTWppACWftT13&Yx<1+8--`7VXXwbU`BYP^=L2>BM zjzKB-Juzqye!HZ_aGlaOZIHL6lWe*np;>t%EgmHs)25<>zoZR8v&n-W=Q`V#!NS9P zNNwkyLwV5q!f3Si|_W~lOA#tW!DH}gq!=DW;)utj-) zL>OFhVuXP?!}jgSAp-4oz4zfzi>KC2a!^?$_c$T9NOZTxSZrH+ua^N;HjMM^z+tw{ z6Zy9y7VlkBwoHt$jhqz1ycm-HwzBLOg7}&(bp8mzi)}L(eDA{#DZ>h5c&}n9 z+{B5t>xHYC&B}{US8}>7>6zZlLzQXI9^{U;sOPFYf}+F?U(t2vfR)l;b;@_ek7IRD zD48qx@5;XAgE(2axV(j{m1EEE7kSOftn99~T`SrI?^iazSn2oicTUv{v2fbZts47C z_6FOeRhX11%CyyXj4ov`*YhBo}KojPHD$8DUB-yIvjN5P6s_eM1P zl)Z>eBhuni_L9@7qBrX}s*6j;U6da;Jpsj8uU+=Dc-`o>R6zUowihaa;fdz?VF^pv#Y-FiK=s1Z7~ETo*qGE&Dcw1 z5H%4En6Ej;mig{v#@yRf`z4Jpy!R2N(>D5pUkwQHdmOz580O_R;)V713iej)^(D}{ z=g=4U?RfYR{4P7Z8o%PR@#xru&z`~a;m;7N%Gu8*#D)(%@$k4x7aP}lv&@siQkUj|DiocD6vgz@bpeoKU6;3REbWmYL0>T_B3}h zaJzD-^)(dFJ(I|v+RDxZ;H+@Sw&VOef}gcb`!R(*Qa^R7YNQv3C}ibNRCUv2d>Jz&ljGXpTi+21YYBdv!}RR7=#e(fl~@U<}VjtMA9~ByO{Jj^*wK zWE11L2N&DN#q;;l4~e}Pt6m-5i~rZnA{66|4D6M2V#-Qh_*9sbyu#Mq4L)NRQ; zP)xC_3&S}=ZBOPv?{vFUO-U#^K205)%0tww6rPD{ds5&b%tjxM5HQv)mE*jBdDo@c zS!$L;>2&H1CAO>g_u&-ZO6W|4wGTn(;*`%+M=uUnO$bGGS07AphI+gYe+J82`f>zM zw}03d;lS6`p#FRk6Z1=d-h{%n1Nbf3_kkE_ojPkESBo+Gv_9Jt1~D6pYVP8QdzT~();G4~~EVlH=6KR5Ft!5Y%rb$v_JnW=Mz&O)6lbZ*gE zqZ6+|d9CAG`N@iV9sB?wR4LbMebm;s_=V_h4=@dz>27Pt5 z_HpIE()p>*_jT^lXeHX93rchr=zLJ;P@T~_PwNd_)9Ih=YA8Wxrp_Fl%XF^NS+286 zvOiq6R~LM&vq9&#I)BmGF-31kJr*E(SPS&*vpU!5+^+L|okw*x>1@+^O{X!{Wj0mk zP@Us+=IG4Txx_^)tyF*4^RP~L`2sCJ znkYvv7^$>ET81cH!NjMZdNnaaByJZ zE|=I*>=HL*y9|V@>(;QZIGt*Tr z$QyF|s;!*(@DP0S$(40vFg=7+6e6fdxI)Wa)*13Zq?0H+d9&=5%c9JKDqUUbuGYNA zM%A$iFV@{GzV_YrdU5a0iE}e{ZaDLBhksrFU0pB4l4+5PnY2$zv5zf;MM@}R3ilvg zR!CHV`7|J(KgY7f5<5WZpb5hMNXe?s1cMh)^5vwJoUcg<@8s%OEK(%F(Lk;>h9b+e}k?kZIoZKMt&`Uxousy_`< z4||H?LGH4lnUEtrE6u3gy~KVr)a)e!{M;JtqljX$!k4QZj3Q>5yZzP)_jmDA29ezL%y=%8d9~Omf^W1+Q(zA16LqIi z^td5j`2f*+q~+>2MiGBgJeBA*q~)rgw+O=sl7Jw$L;F&yZ*#-)dhxt;=1gx994(9L z;p-?WFgBM)YrT`>@O_DB22%UW>IQESJ=opr?Q1dGzY>KjuF;zDTzE^qlu|>Q4|&$3 z5zJT%n)%uLmTrfa6Qv<7SED+Lc~-Y;W8c7Nkj${!4&HS~ZyI(GH6jhwTSaeuo1d>Q ziA57`8|`nRXS-lp*YUfyA@9MkklYGB$2z2Xb!=xb1JP%4xI88J%HPVr68hCn_8DdZJU zUF0WbC&kI56JDWBiPC?rzjDaxw=p?xk)437N@ag6xc}TF{&SOX1A#1mcRxgxR`US#2djEUHo!1}w1l0=?pPbRt>`I^4i#HAbzr6Ow5*hKAm z9(NVWk>y}0+QTh|x`Vh&T4_ECDsUo`1+`!!@(BZFN1A9Q*edflmhA%D!NKTYY8uff zun-&zIzVZ#2|Pu)!uKA0A(!>RX=L^|4?c%{(Ga{bV8=L=D34mXU?^_W@W{&x@G;60 zwcvT^r;SE?oXZvW64mhqgV{P3Uq4W=ay-!ynk#bdb9Kl8W<{W3ocYc`?wEjs9`f?+ zco7DTlL-I8Ya(3xD2mSu-0Ns522wZ~XI$u|;ZSHnN6NuhFqNq|4Spc)flpC0r#^_w zB=Xi&oCn+RPy_m)XO0}qExxa|Pa`Tu1Ci5-GC(sJ(nHYs8JIS(a2C<))WVSuyEtaH zK2n_tbBH#h15J5EyJ?PCH`hheJWP$q<)VC|ULx1Z>mDU~5sD#?;lJeEM2!oH-UG`E z;0ZBXG%d!d84V^pNwgCUHs~xYgzLp=E`Qp^+GmOW4m<0fBRW8{Mb2MjLvuxW5vEj} z6HO(u1G!vY3fIF<&T^tFWNqQB6>v2g2zdbwqo)nv0W@8{(na%&m{Yj8YL$y^t8ovI z9(al9ZEE6#HF8Q?7&lhTJ%S9ZlI1w~?t|R4&J~P>U=!r)Ue;EZI1jd4A;?mQ?nlK6 zFoW|9P3v)Ai4;_!;w=Vdl@obmxixN-t0I@pn}{Z1L^Yd<7GcDZTZmreR#E$3M9qkS z@(LO4xg4@p&UG%Y1Bb#+?KYzOK-%tF-Z`LEuJc+vlz~~V6RiQ8z(w#t)(+WVE7yP( zsMrP;f+1BnpvpWr12peM2k0~xRl8!v0Zzhz8gb}$pd23|s1KRB3S5H@kHJA(4!CMJ z)_})d_&gT+H&N2FoVnjS9nT7MO^8%{Q_SpV93Jws-UfZ zT>Ui<;gbM}=m+I8c7Sqfj5w6Xq8$?`gWIte>wgRck^3MpoHJC^5?$~-XK4Fx*Ye47 zAWETFsIvmhf_`nC45Ha$<;O%nV;vWMf)0oahVoA_1?YHTJv;@ynnMUWj|(2=qeRzf zo~QW>*TC?#gDOzoX2ruOxIFEc45k)756X3m?t=fZ38L#5+lI|RrPndKVWW8 z$eqo?1>hCRH_ZQ9>g94HxCk86=wdDSybQu`r0ZL`0hIdn-^%@>g{}CY1A}N7`~*F6 zoWxEAuEe?D7>&96PIkyF8o_-qyzYCcZ(%rv^1=Dw-Oy_TJ76blZ-$4EH=U8otwqpT zIb~)}1)qg{C3q4vp2J-vF^OAU||P%5e?T|aE&bT2iyRm=ldgW z6;dBOgO1}%k`_!**4_q1n`t?i+6h+*neT$zH&}lc z(_XLvH;=SfObKouJINv<0~m9g%T-`3o||dte7N z4rOWt(}uw_VC`@wvn)@y;%b5i8i7wwae^Ab`#{HNT&2LYu}t4!XY(D0IrF&SSvx^? z)FMJA$`LlR88k!BIEfK6IDlwu;o5BJiB>jFK}S&DJ{407mgh4q0Zj{7+rV(T*!0)@uMF;cL$0M|pXU=d?~$Q6-KFnPpdik327zvm42+^fxP8z{F_(=urf z4L`{^$e4KyxLfY1g@_TE0~(bQl<+q zT)$ifT?-dH&(vQUT7eOGoE2@WWJB0maT}WgxxBFrMqYk!tH8-geogxvlwZywanM&D#uisE9X}h*uz$d HGhY7#nG&?2 diff --git a/src/pip/_vendor/distlib/w64.exe b/src/pip/_vendor/distlib/w64.exe index 9a6b894db58c868b27c1859614c479a4d640a6ce..5763076d2878093971a0ef9870e1cde7f556b18b 100644 GIT binary patch delta 14555 zcmeHtd3;UR*Z1BxxtVWnLQV!5Wey1;hD3x!f)g$^#nh-6O4QKcM%B>Z#&CPw3LDX) zRXS*^Ev^bm%}LZeRg@pvrgaGwLyIEk{qA#4Jiq68-}k@gzvs#4v+j58wb$Nz?P=|` z_sP6bjd)9nq7G`^xm!(T|>8ynPcQNh55#R?`iya2wK-@M@= z@X=Fy!wN+nqUpzL*jm8>6UG$sI>~?e>(`x-v5Q=YyW!8#N>&hl>q#W>)Oiq6gT!{V zgjWyNgtl_pc$Ttkx?Jbxw7hvUR!`mNZuXSM(FOMAx=VD0^DEvNtq2iqh7KB zaE_vWY`*gc-U(xrj5nV1j5#bpA4NshM?ZtkXGioiX)?=jiSi&xP?GwjIx5&3E}>zU z6A3XP+1)lLn*QKS3CWT!%6_CCr`VvL;I@`dR1?ABsf`#m8(&eCy5 zxxZRhmhA`h>DfYAl>WFxJGiT~-gZJ_y8?4XDzQ z+msNcX{mcXHx0#|y5A97K0zgSTdMY%q)Jo*!yMsZY_V&XrfdJNjFYU!HHQ|mP`4~P znvHbp+oTRUqBJPFP?Sv3L!}zW&uqV2SNbKB-2}Rn3GU$~mZv(ihvljBj5e01E^j4@ zlHs#1l!yzDnInvQ+8+aNFxm9mfmyrD(Dgm;_>qM#ekNoW28)Q6Bk_k%oH&s=P@I2< zD|Ame2K!Uar5uxm^nXFYW{NIo6k@)(NHXN2Ld%b`tP>*Ga`*Or+n;bv;V=NYysYCY z``Nvj$C0aC|F|riXASN_bSew^9(I|K-G4!3k{q(& z!&8rx)D2nCA%zKceAYy*>Ol)a9Pjf4ucwyH^$ZIB6$`qL^k;7RbF)4-{c5pK$~mPv zS*T{)JcH;r?6_ykfnP!k8GoZ@St#a37S@_WaxKqgvwyC1LKdv50yjh!1|cCX{M#HN zS{k5$bt4Ofssgu77E+L4hQa_=;FUz1viH3Fb9Y=q3&${`5C7tY4~SO1Z#pH$RR?BEbI8-#~$sbISbZ*t!)u8btT4v1aEqsS_b^$aEL`C=W5Zw#&wLJ{O&Fy%;-Xk}+A&&{PE@7!7+S0vhe?&r zRi!_(&kmrOGVSU_BL|5EouCd*(V_6Q#$uPQz>{ z{`C;vbC(xSVBbk#vZND`zkqz<0h3f^TzSxuk4Y76Zv`4FyvM=I_-08zIdah0Zr*oP z?4*<_5q!}qia3(EzNJ#9>3eZ~K{d`HO4r8aq{W(pj1`_-{=!eG!b7wi)*&OR_@>Jp|2QpO2OZ7)7*u#4r!>!FwjSAv5)fpi%R#ZFU z+1tMPO^yqg!;~YUbzvE{h%7ui%WnFH$F6q9uKqj6HqMPtfxF3;lwVIsU6jdIsH=fXCj&&ok~!ubm}iTv|prgwh!^3bywHTjvSJ07kUjhW)l8Cun6^ z+YPSNdtnjvbG!nJvFU7rzdAzLetdYG*~d-Vxz{~VdgiEN*P65lNJR_MDL5LN@GoSD zW1eu$MJ&8&FdfIzn+Exx;QCkq;(b}>-Bpb3$lhog;{T(T<^S(ldbX}9RQx zenU3F%ly-x_tWgg56VBeAO-@I)?sHt=R7KOyaQ*7RWlsv)9sL+g&%mS&v|3EjLm>L zZ&7kWx6L~F40ef|n0|fQUD1-U2{jUPI0}Bi>$WvjTQl-={#cf+pH3DQFiT*!KC$7H zP-7{DdX>HjPoQ@>OJZEXF1%t~c!76TG7!{)NAw7_V)Arc-O;qN3S`bcriQ_+^xku7Q&uKW8L z`@Cf={g$0-InYoNi|h&F7z|U=GS(_FmiA-*5z;b9#a}<}60>0=tzs zfxf{8CUwyLR>7pC7@E(vB_-&7jb>MqLYzY?2wCyEAMw^Ff={ixgoRYgQyj$G05?P`(1y1b{d)BHeUBJ>?xA!zfQIczQK4a0x z*k`vM?40P`29qsHp>>EhMCrb<)WSE|!YjooVp?b=gyv#L`XFWS+}MkhK>DrSzuhGr zJUyl(7O?$^h}f@#4-96Y;kH=+MQLUwxG?}gVaub;ufX#<-u6r zLkBDTK9-qQK$ozO)24WTjmxAgbVS8i+G|*7r=D~ud%aW3l4zN?QX0L4cgS+U4b#63 zy3q3AIadAb7d%xOUB(rx`Z>s~(hq>p9ZRE1Zvw|%6AB!=!!>gF6-j?oFPlm3AvbTGcZ^#3kIYt8LOd1iak`t7EqLhcF5Sn+frP280(t8C-RW8 z_rkC&c7t;~@>N{6w-WP-(n(p^$bvg(&}jRx&c2j-*e7PJqx5U$)-{uwS#H-zI*Lug z?_jp9Yd+PpJ6%Wnz88&<-Uh32-abYLrm&HjsZHB+HzoN!Kn(WlHGW=(MzJq5Q-|Dc z3c1zm3BTOs!D}S%*?<#M7N&io%ybwWLKRn$yq-FyT~-QKAf0Q8en#q|QT-Qc?h{@X zA^9eLx`nP}X<2<~d$uGi4;$fHRyekGLspc(>rJd#T*40cSpyyC5XHa=-TQ-e?Diea zWOutY_kQOem~Hc_7Oh6ONfwelG;OR-n#UirW!cRH3HhSb zTj_zPC7l+#$U+tSAv@mKgXcIMgYDJW@)7K>>?j({f^uSYcWYTzj!AcY16!7pJWs)>{Qd=ve+JR^efi4N0)$9Rn!KspRY-T z2?gB3O+DrqPVT~ZoVA*waZVg%i9Ms~I@Z7EI|-prU;*sIKsXYz&>T643o+!bO4^sz_MRK_aE+p0ianOgZ_2+ZX(#mp^OKr6 zk}Wa~bi1+#nzoD!>=#qAn_W$8;R&CVYnTiVOok|V%EF6sHmuJ!dX9Pa?M#cA*tZAu zXUqC_i~q3(Zp2uck7=7PX+JP>q@1g^e+I0E9xNDMgEuEE&t#+5YqCEH-e!=#YCCAXY9PMJS7tL&|m5E|h-uvja|2`WpLaP}{VY zS1_;L_&qSVUiK{Z#|%4^3!(`DU3xXb4BJtDW!3ATC|VcqQU+B2=x64UXA1ZoAvUo* zYbcJX(*FdWuYFlq!Y1d1hi-Y99t;m&cfs=Pd9j};C4y`d7bD%R-4yS_u?}1 z9h~F;`7*wEjHQ3@25|F@kWS0OEH-iQIi36`>pCQXuCq@VVy3!#=h*e3ZPM!SI6{pJ z??U|t1AY*@@2->OGAY~Hs*i$L%67Br!xYBLst@AKB}~prmOE^GmVZxb`|akd+2M{&QlU7a6+<(Bf zjm+`=ieF(YRc`6$=PapnkwVupmx4s&@KR+jb{EIo!(latWfip6JuhX`3ZiHnTV0Sw zSFtk%{`50;vmlw~vA|L7TnAxs%ffaxa8xEe%T|txb{&LDq>Rtl{!uf6TGtXXBa%*AP)SY^I!@0i5cS}Yx$0`=+{_1=7*GMrY0_Z_o#D#zM>yZ6{Q>iao@1*2^q z2Fx_(H`K70^>{s;JFVBVX_WoT*C#sZ_$3185l=QzzQzl4hipXkMj$A*edUWe*j zF*;-Im*0rd(I1#RDGEVN)a1j$9`x2|S%VV~alc7=f{Wm93)!>D7pWIJJEa!&?|d@` z*OIGm?hCdQD@QMatYgy^Zd}^jtL9NyYYA*eQJ&ZN1&E)Fb*>Op+l8rZX~K)C7zODD z+=UG=l~pz19Pi7ab+04-(1rqA_v znTO~sEw9GxU+1-lyE#<eH%-#xTqV-W*ko^3oJnaG zdv!(+Z_Ar#`-$a=T#SbyT%Try>gunsZ!7&-?TkFX@!0ayCyLT_lXNfl;0wfw?ThSv z-nvK;tVX?kUH8u-<~%EkUbM&0>O-k3n=!i&-EBWIJCD+d_GWWabci?mmIPsW7nXE# zjZ~s76Z@hh%=IsXgR&53KUH#AkL&P?g?sSi-Dgn@{PXlh9q3y2<)SINJ6l<^#iC*K zR!jo+z+ObbtJ#~2hc?UL5}YvGR`9!eOe-W!*0?JDi>vIH#p#C4-{KZIQq5b(5|%XA z-5$YAOZwAg?87CW>u$bk?`geB{VOkHU+W$BPI31Zy5tz@wCQVDjx@z53t2qx4K**F z9g?!VxA71bb{v5A-LIJM(t#msxCn;$y@xt1F-wrdR*XZK`dST431n-QhWR%jjC35e zDH`{|lUoF?gxwe?*ne4if_Cfy_rn`|^Egy&8C!9kXngY#rAw9ijojtpv3Lk?EA-Y; zfHmX6W>)*2)zEP>l;z_o2Xd^TactJI_HZ&=mkp*}m}^<5IH$qN!tvw}Whw8yUxeQC3)%M2&29Sy;CxtKq3<@XH!v!%d1>K4;6o=14WUDOY6S>Y)GN zi?|B3QyVjfu)oU^Jr|&w+{fol*3fzOq~+gHx`+L?;%nX20c-^uC@dbpyU_iH$k$yb zaSVUJI?S?GhO`{qPuU7}U&GaYh94>Q)>>z7E1phqZ-u8Fa=qQ4u`R{L|Z`)Hoc;Csp z5Tl5gMZIaRWk0XKp}W0=?fo#cWD|1uw9o1e-1E~GHh2|q=zFz zF>^w9Fg#-x&)|l43crcK!T zQXx!HAdMAV0%O9Hid8RgbFoAtp*}%X$DKPzYv@~Ez^ZqAjyd*$r@YP?ycX$y0)tDy zC4iV-Wgcxxzi!6$uv%^+geZ*|_VFzKqVL6SebmdVzoL2C+)fsDvaTD1*i=o>R2Ek8 z=3oPV^}q|1^3>ywY+0CXpS58l^{HQlW}8cT2heKxbrnn8*oAgt6F0t13)t@)*U`gl z!KPR`iG8uDCyi$JH!a68YWV~=Wc%SyJSkp*PJfz)33UJLtH81hz9`HBKK>QtNC)sp zm&KQKfBT8g4m;;&NXk3O?J0b5^u|k=Aw`wcs1hGY5>!d0Dj5pNNL8{!l@vgd$7;Tc zFR6W3eRWag5s?3^O2(^_cu3Buk~~#{0LoCQO0rc+DkOVVNgGv?3CVU<60S-_NH(hy zgDNS6WM_#gdNx_G82-Gs^pR)k>(LTpN3Vc@Q^B8q6(L&!mm|fnJS#E z3fFL<l|Ud3cN|z9JfPYLo{u6MwNh)=pU>nEK*3)ASu# zi>*C^pQkAM*?qYzTb=4IIz%wabI)%nOxk)75l^3Oc{p)5Z5vC!V{Y3MlBErVqz&`y zC3)S*mAs6`m4{_vSzA8I#?rY6h7hN^XO{kLEGag3 z%HG3Q648RET5KMDA68^2WJmEG`dS<2uzy7p*vcKTLEBry(wbX$P^*+5 zZMszl2F2>uv-3L^>Bg*Q89O6!Gk)j0efSRDd1n-TofYgnO$C-*k%(nGqM}(qpH|9R z^mH;ui#h2HCA;cyrrtq!g-)DW_YQ`Sz z+T`cXbLZmZ#fKT~g{BGnYIjG^+bwt_#fUZL+3)WDl+q@QeP7mMcSn4cSokTQ7aT`d zCSaItg7e!r9MUVz@s=kI<|xyc0|;WuSf9O-bh5o@?-}j{vwrBS`)ej!{)4Q$^Ok+Z z{`=HF;SKn1gp*muAD)Y2{)VI8B@6lN{exMqw*31J6bi~V8(Ks9xIOFUqDM)TD1wE@)uRdx|L0WyRyZ&+jms&r`~Nwz5EzBDL=ZE<((ST!BfdQV8#tSUX{oxeYyw-b>^8-7`TmRa1%Y!1j_q~53Y4X z;5e(h^P8P3YdE{g7=$d7bQD7V68$RMc&>%c;mb~)i>EW#<8!I}@^Jo-q;C*Z4Ri1~&HsjK zSg4^{!+9F6(QvDVhcvvV;d2dr@D|ShqBZQI;Q)?kpN!T7GcA7n zLpQv|^FMzLBQ#9b5YK2z{s0YM)6fj$?UMzXpiIMc8h)nXb`6hc_>+d$H2hP;e>C*c zMlx2zObrKXI7!1r@l2I=3L1ytUFHDvIM&9Lln;2YBtRRZ}4?NoHdiA1D03mVNQ{ z7va7~=jZ*Qi=Cad<=TQ8{Z;Fml&zAdH=op#5FB)*NhBfdLG`t&ft|qj<>^%kv}F^n z#T))DnvnZ3gn0bEY8>rCPptZs_V#Ve9NUJFxu90%l+ti|qP#h!aW8X6bR=XvXm|NA zN?X4auTCSR0wk8dk3uia`K}WoS3zTbSIx->{~{fpsr)jfJztjot}7u&K)cJ^=>Dzr zu}ne=vfwbZ(o?`&c)EP5jy6qeH28EkLN0@LYiho}F2uK;2k~udIN(|2QP8LtfOs|? zw4r>Tjs^&g;*Q;6Bj`MeUoF3-qXS>sHO@rHBHTG@%KJFcSKBw1Sc}WbWzc=CL3d|L zoMAv?{;>Xpi~#YW9#Q@;CmLMRn00>uHsU~B=l@U##Mi@_c(ijN9&KHUxQ-6uN?gm% zfRZ$w-@$v~de*10OjBplw4Ezy+Sd58%p_b8-v>38`3n56AgxUKTh6rk%WmDq1#~MO zrV7iyaHhd82O@4bA;UrQ(9ZDk%g)N=KLx~`FcVUwCq%W@^`*7EgTEFa&KpI@#_|k3 zjlyynrKjO-8v850PRJwB#zxy+bi}2tF1az^j??%UXkYm@JstAW!pb)Z=~ToAKFH;$A2+`x|E-Ep)FsEZFtYx59(6x?y6Xtg>N0K462@XLi4bd-?Zpe{GH$^8E;ygKN# zuH*~P;7(sbEhF4%@XLlLpC{yFP?vHm#K`^Bo!%4pkS9A664ch2L{N7cR1}zeqWlXF z#R}?)|G`_lKO8l{(neY%cpwMfd4&+|MwvNAAJzN z>rLOQXl115U2q(SRZxHWk*kS(sUExk9oTqm_p0a{Mjv>Vye(ieuMlHz`%`j2HG`# z2QWJppSYki2e=W$>;DA!3uqp13m6l}BQGT40=C7? z_yYd10bvqB5TxjH%m(pZ<^Vs}wjTDey~B39obun2GIGrAfd@1W|mBqk(fkol$^eLMj>#pd=Ca6Q}_48lVZ$6*q$8 zeNYkPAqc0UJL6yh&#_Ae?iS!PfwnG$JON+U1(EkRNN^*z0!Jgz;th-iUIY2~Q*r~i zHd7t;b-)j^aCktz0qEIHodPf5A`tKK65yYpnb4^RZpl{V9N)|#WF6#1K=&R9?EFh8 z@d6VD;ys7}zR{DAJ5b_rz=T|^QqBW&5pnszI1_LaBCka7cYt1f)v_EDHJ;-%5MNJa zz#X8W&_4jI0TqD10sN%|KVy-o2i^x2f`0^@iL2@)@Em0jAC*AFbq%1E;7NbHG=O-I zIgSS%fV>d65X3vP3wV1Vh8=R}L4dFUA`iUw{5@k%+K04X~H zC=W)1kdq z6gUGM@fJn^d=YT?OvIev^MT*Its1lyczzzv0LU)@@6E?>gRcj^wLsOG2lTh7d?4^K zs1`a;fUQeagOY*sLHIw}5>g5#Y$1jMJ%|9#D#K!g^O*zuWH}-2z;jGpq4r=9Fo>bo zkcR;KuT*;U;(M2)Yb|;Pxf^gUNZGHz??GcB$4d;k z2I7_S2NjRsQ69bL*h=F$ilEI{Hyrs(2iM{Fj>eZ*!0<;5p23m7RPX{E`LhF;bL0;S zoae}22sqD?-|9KfQPg;TvoF+mj_+tZzrFL@HrMC4?+-FSNq`|WS_VgciRMZigKw&Q v6p-I$xt!w=jpw)l#Es^-yW&MV>Y3*mu_=C2@}|^H{WcA)X!-2vB8uy=> delta 14582 zcmeHtd0bUh*ZrEtpj<{L1Wgc85YU5O6mzUFL5$SLf%HX} z50+Nul%6!WU-89*M}IW8(5u@gH2;b8ov|-8pI2~X^Lzy-Ha`VEom|3qKml+erw8-v~cEpppQo|k&c|hO+Yt9o)>o!mq5D3gx>&l>5wE*Q7bhCta~!WBs%hSJ3i`W7Ebn3&>TSa*^bbykSm*T8p6Iz9Z z??BRmWR887neKL_gk(w=bEOm6(jn*1;cRWIb_3Qjlqi;S+dmau9^aY|qrM@;$5lG! zd?P@uE7QK4kh_noWl_4DX%e60B=372$|yLHWsQR{<1Y~PG3 zo$-EzC{0T_ce!aO?vis`Y~O@RPPA9;vq*@f5fvt~0N0f$|$BB}~JW{H6?quH?degU=Y!K)~Cb&n8sy=EJDj!j+k0=?p^rEEl zds!A5e;}k5LqWvK19AJ0XABYN-d==cPV!OMmV74ps4TpC1=)6sxv)hj3sx?YjFVcb zv9PV~-TW^!aYf-K0IIyG{}tBk9%8=xBUe8r3o>Ws9^fs!wEsA7uJU%IC|#0;4_Qx- z82URa_DG0}2vgKWsmZx4l#uE@)4il4&Tj3vw7$_*tUQL{KjmyYW{#!Sm&`{+9^Z@h3zU$tepxdFp|ZYLJCaNMSympSM=4dYC|n z^FyBC^*Grlp20frOKg{CQ0RktLh_^=Vp_9RFQ)xe=97FzsYe!mV0S#*=U+fRL=RE5 zEPTS1tFK$zF@qh%Kg}blDS45^hlyoq!9w8h%7iS z@Np52LLH<24hkfnQC3H`z0WwO?q!~fwIT}IN6{Hm*WwW&riGoa-1q`<=2t36* zig^=d!3{~iRj_)eKY`FLRI807C{~?vj!~uWs#1SNddz82rJtzMW_HKeMrZ87jNVC} z?y$9bPuT!jn8NydcWCqXdEOg)1Nw#9JD^0KbRhYt^F2%lTjy=|-1(TdYYlXD-e%{$ zlgn%4pjo-!AXXkWN^*rB}tAlv)FheRO&aadL!|HbEQKu}-LId)VbQdJv@DS~J zb+WJ-BKWY@f6MYth@n)YNQBx4$_Oq++t?e&!MnnT&JW; zc{$0r7zyl6W&55`^XKqF_Mzr{Svbojc43xUc`7Q0Lx8-P?KYV`CPOSrk7VIbcH0yi zuvJjTCm02ic#eI5K^AIRyifS}V5Bo^y5*1D4H z)TpwXrtO|Q+sS_R@k=YkRI63#xS2}(SV4THU%4;KC6&F~mBp7-rnOU*j8KAkyvg0H zvv0u2WC${AOx} zTA8-4!w=~bqbbREPJ_kRJa(bSePxKSjrjP)v4gF zT)DW3=Y4(;BWbhF$`^4!=9i>e&G}Z$EIND6If0G#3l6yZhcd3tiO}SOpmZUPz2ny= z;CC&n$3L^&*&e@y@*BufrmTUt^58Ca!L!U6(#brWqITggUJ@CXk@1Z(MRwy$_Xyk# z&eC(vT?S<(&4S;ucjc?xUWjqp$p_0>&=xhT0_>_YgxH7pm1L$4HHVia+F$(L znfj8B_ZBVf#`wb!Y`9Pk(JpdYr0<`j#~YEHD+jf0_Kk@kzyS!@`KZI(ICVjF9YbiTqegqYUH z@mEq}im1XGM1^U_*Bn-1CT(-2jxU+sCl$-Ad?S7X3C=TF_?meJ#?VHV6nM2fArIC` zm#jhXBoW}9e|J;Xqk$i)-dqq30ZJ#F;$VD?3Y~M{S+Vq9MEX=$q~F93{MISnm_5A? z(C95nE*KK4POekjT-w#CzloLU`z>hB8i9g4!8z@IYHKDQ5**F6=j&u)6nD;L@9rmlij-;<=IpOURw)VxquFv8v>EGwu8v15?z5z#pEo>Rj zl+?^wY;m}M@*Ai@wE0u9a$llYxz`}d$E@zKa1eJ5H!v?4g(=Xg1zRlPvgw-G{G^H^1ffpTcU8g_E4O=|{5*u^j?GM`CB7E7^lR!+VC7--U<`UJ$@K$GQ15 zo8VieOSyU`taZwVWX{bDJZHd-uxB-%TUVC&jP=mMUw z!|h|}MfQ99A;ynmkUd^3zJ%k@j0DmE!)sBnpUyX9piPnX!fLI8`tdZFfH;Gf0o#(3ti5}bQ+70_i(3#gyL<4 zJj^mss}~MoA6a+_=|)kkJSpc?|5n!f5VAyDGMwNw7TUQtUBzDN+|6@h6eYPf*Sbo5 zj$?V};jSG}DIcaVPd-)BJyTUB-(l64%aX;^up5XNj~WqCR`N^bLGzc4}B9(CV6exv`5_tpA#Qn$nxT!fvJ{&>pOHY7hTXl)>C>4#ff= zIb7kdu)@?r4Ab7!$=)ZLxjP)a0U46MmZkUTN2jyu9_`DsW!_4ac>(WG7@R2C+DPKZta zBlMK$#rb@idK~wMTuihZ9k5&$x*=7Wz71NW%tKX7fTAoEAY&(QOYz-(+32*L?XD@C zFC6RQ5;*4rU&Uv8D`B80osflnEUjlcwK!(<^rbYxA*HXN^aP9Rok5qd@x9y8xojbR zUuR$SE}+rOD`SlBmu3X`c36#5`!96h9af%^;x~-@Darp2L~7T5;^%8v7W+OUWrPQp z+q{}YgfYY5HWGKP#0e`4AFNSkIvgJ1rXMfr^~gE(l2Y&@(z%u9$E4AW>W`|qO}wlu zxS$y7-ojqU97u<;^_j!46Phz4=l~YpCn_NJCe|!&XM6l@LC!OXh2VuuH`$0jTWKLP zX0`SH=x>;9_c|ilOz@MeXV%Ej`d@Iap(E|Z>ffxPl{2ve1Z8L*{fg*LW_G zqu*wZoN?{%Jwie<{tCFY5OadP@w+Oz31~#n5`95UpJILc#?oVKa^GHoBURC0z;}F2 zB33Bm7Vhsm%lKO@#^baNu?rR_v-C65UF@}fbK=vRU;*s!K?K^9^@WI&O0#E5Jbv#`hh6w^#q%h9&+^=tTr zC;o`_&TSVy04Y(bgLx}Ba}2RUK=K*q-DZ9}ox{p=1L@1GCU>VUbSoRuKZ(v~Z}so# z@%gVfbV6`WWU}M^2fN+;fNw1uGY@DR{%V6(;lCk7ZQ-~*;`ZZ(gBMl^(D4$ zIS90KrSr0ov=mZA*f(+DCqQ=$dudP~UEpf=@t|}>KUW6D(veJ`|02y}h4}?|*7`Po zI6cDr2d8wYU!wG-?VkV)UA!#phNNe!X(1R9Pe?6xgjSh5TwxdwMV7JIgA0O|KR{rd zZ$Nm0ryQmfE$djB{uR47crN{bO?!S!8`F7+l?R4Wl*P#*<+^J3$)1jT&!3=l4*PCs zQtE)qnAbiyP^_VMWzVtz%&VL!-kvHEsU6m9csl>ycJUuFrzEP;BA z2dRAATo!#;{}8b_U;DDKiP?umgdKmDo&yiwc&_sClQMr%$~H$H)^B1*hxMR8GuPqm zbzwiSF2l0}PQA+)kE!YgZvYqHNa++0Nu}XubP;#h=n?UBm%}!~N_D2QOdi=K)l-?b z>ffNg+lU_o@H-l1d6AUmYSU*yEM*yN`d$j-Wz(l}<{~C%3mgB!tKk#zjBr;rl=)(q z3&H269YzZ`}zOkflihA@8GN1>5ffuSbvI*dGSRYZ{NQ7 zA-%y~dZ`57a^Fj5Xf0b>(234w2MU%@c+^o{=+{gdl|;XC>>d?Q=@-oTa-8Wlzn@XVnJ4fgs02Wb#L^X+;_I9@|o6cAJGtr1V4VYSHV#gRT?udOJQT zeh?!>&_;RnG}2!9tpYFs#BEQkF?y)?7vC6uzAB#K0xWW>g0(aFjYTI0%GTy;@g7BX6&27rDcGb~oY%KLXj9|fJ zufTv=ivNQeK4m4ZL~y6|N*2v>eE-V7Ty(Ls9o@%&MuXcpu~>hO5cf9e#eW?gDY|&= zYjnftR5>0^h}O}cSj40#1T|Tc_6t9tx2DQvIPnnpTcjr323_7_p_9+k4(#6K>-1fA zVM;XaB~PY&8@i!PIeHOf-J7OxC8@K2v=(8l<+5|7!@Ta#L;PfFbc4X@==NF?jgOv+ zQIHzoE(BS~KmyNhU!0O83^cD_4p1()T-hlx0x7 zs?U7AulI&2XuGMhNiM@H5$;c~hv|HNWTy@VFxNMR`On9epY|_Nx@wW`q~QS?uT_$KBsmi>EX9=+taK64nQlB37$WF6wo$>qUxD_cx}JZCTmT@^Cjn;RiW>FW;}nefaC@opb@4^j0+eo3Xc^qx;$SZ%x*DZDu{@i^h4I zFbUWLyATC$WADx%88V7XaKdbUk6+lM2O?>)#U9p&U14-Vn(@e2xJHgv^LDY^1#NY$ zUuF{*45r)J?ggLfOa+ctZ8vGajZ4_qdgq;!+`WaZKZ-i-`X)A3n(R}AES@((&3lPm zlQO+e^AHwx?18p%BkQtoNSj(Esy*iPe+P9`Mz2Q_TQL@4>YRF*62xj3h6jX0>O5#y zG<>%rlYhWSep{Av zr3bik$-?7dysATu$Uy2hwqQ|5+L>)z6y7JO2G06*eqlp+cuY|%;A}0}Y^gps`LZmy z{)1n}RiK^Hl3C6I7I*Mm^=~-Uzs_20VJjVZi?>qx19M?t=v;@euh|gctLJ$aavDUw z^m>TJ{3Yx^tY}G__H*);-O%_2yzM&tNGZ3LyK-Ccd@>Jt=*$3BA3kkK8~^T#KHisC zL0OdY8W6X$#Y<9YlH=Ht8W&vLikEftHV;GA9(sEPuF?FCxEOQHW3#>I!)Q zUO_KKA+DtY8H@$bD)?N$&BZEhhxTl$I_}*qdKWok(;J}Fh>u2k`BN}>G15N-gL}YV z05R>bb&MtLsuefHBXVmYTPbY3!n61je^=(dvcK1BismV6S6TRpjb16ljMM~vvhZ!T zY6E}yz$=vU+!K#%Sy=7(VC5?6 z`I4UDxV>(_YwjpXc`NyKGG81640xp*qDtyji4P>Xs^pL=83{?bD%qk+3Lz7q(9AUUH-B2RdP#}OowEjD!Hggqzb-f(~MhH;XYNkKozc6g3oq|L){=6g3;z8j zuZCR7%Vb)zUlz6}@kusS%|<+gP}M!7>Y=Hs6bV^a!{&b7h4yETufGcqN!HF)Q`JCa z@8LU%ScxZF>^}V@w#X4e$FL2K6kYJg>=#E{U8^K!*s=lV<(4fm!9R3{rS-RNqgJWF zY`N742E{x-V)wVarK|jqjoQ|ZPGKKy{g!^h3bsX2JFD1sil(#t?Hy4NNp_rhV`jkEj-4h=6^e0T|Z!x-w0v; zJ68M0^W51ud_`QQ)km5nc5Fv?&(`gEBV`CT);o+l*HW6q9N#QTxYQk=H>%h2dBM?m zc|3;ME|_QFa7a63jjL>GwnkZs_aKhh#3t=(N9~S9yXv?TEZRL#hnxH6-LlSWItK9` z4al7U-;Ibe)3p1ESo{!!BZ*2?IO4A(I_V3dr z9Ere|&NioTXA4(A!<7+n#}b$@6Q3Axxj!c2N!>1VW+xB*%l}jCsA-e`TZplVH%?O@zC$N5?zP9r=!W4;}^Qy+^D(O2t&8 zg?;kCGaoQkRf$FV!7O6giwSXYD~bzfay8gRQcl)ocBxY><><2;hX zFL#|;_?e}qOk`Q4gAnp7U?tmkCPC-Zn*Dwzj()@f&!+JE!`T}hPa;mVQ9cm-yMGB@ z9Pp9zb_zo0S;gpO<)h{@FK1ijFf~qK#pmK^s$=1~i4=bJ(fKy?V@JS+X*77+%>R3k ze3`Af_;8qOyz-9#xM!1$Y|L|{EVz54a+^M6G;ia8xc@jB#dsOr6#-r;ym-@Ayz&Hk zDJpUM<>)nF<>+$(UahF5L zIt^xKqOe8rEreO~VHoKGD!i8@V726Ew`$ z@MR6H8onJzmG;RRO|V77?=`%x;U5|<(xyo127dZ#L)=3{vxY7CziMmYf`*4Q+@j&% zW`jB;hUl8i{xk?X!n;kI1d_p}V@IeMSE6FdQj)>7{JmCOS0zuNsg%S2V7cJmxE%AZ zK8^5QGN*b_;;0X@YHw7%wr<)Dy&GF9Pit%GM+go+@<&@jgm#3~;UdicLcn+8>80_s zeQU18+um;`5%&xu`eA>@L+*Fh2LSO3EQeq}t-VV@d ztwDEJN?c(;OMdELLefEesMBk%xX{q@maNmbL5&_l$Q`YyuZJt~=-P^SB(*B#Iyw|t zH-ShHDAM?AA|9UeGBO8LMC zxBAC|81ouN2Xt1eEgt-LAeWjKTG8TXr3>f6>43s(PPC%!pNS{oiR)ufc#XT8VrfS= z`tGyboVA3E19h)C;YQn|ACKHf>2R6??WWyn=t$EFp2L8{X{6B~C{~XE$ zhw?mc`VKYJ_?oEyc9V&oYqgyQ(2w0L4WFqe@c$1~PyYNn{kIPcr+;{sj}Wl2@lQD; zfgS;bObn;Q3z!EYZ73NETm{;Mk7#Rv5OZA73xd`&ID}) zUsVof9jF$G^}wr%gzN#|0G!{2kb3Y7fNMd8Xm~x)J4x+<5AZn*aAL|VK7j;BGTkUs&s_9SE_c#h$CU~2#$37pi6kS6e@y%3SVg9JBX5wH(} zF5W;ka5u;&fRa7HnHlP^&jQZLBqRm$3Sc9M5A|(eNguVxTo=JK;8f>8llQ@XCR+y{CeQmBVj0Xwg8WTcmv0Qt}m!{a_p(` z9H+j3`R9!hgn5~uUEoWBCkhDp5qurcZyGaCY04dW4JPJ~#0~lXM z$WXZILSXrq_!)-+n}F$WVDEy@2lkkbwE(^+@SPc|L5qNn3Rn*L7T~G5xF>J$PWYJ3y$VkOQ9=+pyERjNTgz#@=;82tZOFwLMi^ng@jDJ;Tbg!3r?RxKu^D|n7U z@2NfL3VZ|-A%6nwvPA8DSKtm1UkAH@_d&e(4}jg4s%3GhCHWf9F|r2p&kID>@Y|lc zpjH5nFH_wl$K@ZW-f#u*z2$hQN5e~i3s$IS1jn=wVJzf5f!EjIIs=~Lwza4ed@b-- z5TBZ$b*K--r=zqS%$m>O=b^;$3W#Tr&k0!%YJi;Mrj4i=JjZTds69&ox_zng2A~My zWjVIqjI#i}_XmCsN(8?V=;1&Z4&Dp+HmH0Y5(~k+xg9PO{4AgoQ~{pjAGNCd0dUw3 zwXsrQ(9a_~KY^FVz5s(|M~SS+L-IO(vuR!V`r&!9JuX8_NGxG$*(esdOH74ls`(>b*s zAK(;_GVGkc0Hedo;pxCUy~raN!~yk)OhDXGhpol&4^Sld2f(NXwP7=GHYg7A3Sf5mHDn->4IByL3xwm%|Db1(JAtFF zYx@sYXRpu@*6(qImT%`zvYV>&+%oA=QnwN^XB>-KfFPnR}x@I zt(L)&U#Phf$HyA)a#Ow8ayh@@b_a2uV+DvC&GG&1PrFjjVV-fT6IZ9K&R9Kib>Viu Jx9M*#{|kF4kbwXI diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 375b6411af6..4053e5ab8c7 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,6 +1,6 @@ CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this. colorama==0.4.5 -distlib==0.3.5 +distlib==0.3.6 distro==1.7.0 msgpack==1.0.4 packaging==21.3 From 77a0a61722099ab479d29e8e191dea972ac3996f Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 10 Oct 2022 12:20:36 +0100 Subject: [PATCH 125/730] Upgrade pep517 to 0.13.0 --- news/pep517.vendor.rst | 1 + src/pip/_vendor/pep517/__init__.py | 2 +- src/pip/_vendor/pep517/_compat.py | 8 +++ src/pip/_vendor/pep517/build.py | 13 ++--- src/pip/_vendor/pep517/check.py | 16 +++--- src/pip/_vendor/pep517/colorlog.py | 2 - src/pip/_vendor/pep517/compat.py | 51 ----------------- src/pip/_vendor/pep517/dirtools.py | 27 +-------- src/pip/_vendor/pep517/envbuild.py | 15 +++-- src/pip/_vendor/pep517/in_process/__init__.py | 17 ++++-- .../_vendor/pep517/in_process/_in_process.py | 36 ++++-------- src/pip/_vendor/pep517/meta.py | 13 +++-- src/pip/_vendor/pep517/wrappers.py | 55 +++++++------------ src/pip/_vendor/vendor.txt | 2 +- 14 files changed, 86 insertions(+), 172 deletions(-) create mode 100644 news/pep517.vendor.rst create mode 100644 src/pip/_vendor/pep517/_compat.py delete mode 100644 src/pip/_vendor/pep517/compat.py diff --git a/news/pep517.vendor.rst b/news/pep517.vendor.rst new file mode 100644 index 00000000000..e18c1d87bb0 --- /dev/null +++ b/news/pep517.vendor.rst @@ -0,0 +1 @@ +Upgrade pep517 to 0.13.0 diff --git a/src/pip/_vendor/pep517/__init__.py b/src/pip/_vendor/pep517/__init__.py index 2b6b8856790..38ea0f5f11f 100644 --- a/src/pip/_vendor/pep517/__init__.py +++ b/src/pip/_vendor/pep517/__init__.py @@ -1,6 +1,6 @@ """Wrappers to build Python packages using PEP 517 hooks """ -__version__ = '0.12.0' +__version__ = '0.13.0' from .wrappers import * # noqa: F401, F403 diff --git a/src/pip/_vendor/pep517/_compat.py b/src/pip/_vendor/pep517/_compat.py new file mode 100644 index 00000000000..95e509c0143 --- /dev/null +++ b/src/pip/_vendor/pep517/_compat.py @@ -0,0 +1,8 @@ +__all__ = ("tomllib",) + +import sys + +if sys.version_info >= (3, 11): + import tomllib +else: + from pip._vendor import tomli as tomllib diff --git a/src/pip/_vendor/pep517/build.py b/src/pip/_vendor/pep517/build.py index bc463b2ba6d..b30909c8704 100644 --- a/src/pip/_vendor/pep517/build.py +++ b/src/pip/_vendor/pep517/build.py @@ -1,15 +1,14 @@ """Build a project using PEP 517 hooks. """ import argparse -import io import logging import os import shutil +import tempfile +from ._compat import tomllib from .envbuild import BuildEnvironment from .wrappers import Pep517HookCaller -from .dirtools import tempdir, mkdir_p -from .compat import FileNotFoundError, toml_load log = logging.getLogger(__name__) @@ -31,8 +30,8 @@ def load_system(source_dir): Load the build system from a source dir (pyproject.toml). """ pyproject = os.path.join(source_dir, 'pyproject.toml') - with io.open(pyproject, 'rb') as f: - pyproject_data = toml_load(f) + with open(pyproject, 'rb') as f: + pyproject_data = tomllib.load(f) return pyproject_data['build-system'] @@ -64,7 +63,7 @@ def _do_build(hooks, env, dist, dest): env.pip_install(reqs) log.info('Installed dynamic build dependencies') - with tempdir() as td: + with tempfile.TemporaryDirectory() as td: log.info('Trying to build %s in %s', dist, td) build_name = 'build_{dist}'.format(**locals()) build = getattr(hooks, build_name) @@ -76,7 +75,7 @@ def _do_build(hooks, env, dist, dest): def build(source_dir, dist, dest=None, system=None): system = system or load_system(source_dir) dest = os.path.join(source_dir, dest or 'dist') - mkdir_p(dest) + os.makedirs(dest, exist_ok=True) validate_system(system) hooks = Pep517HookCaller( diff --git a/src/pip/_vendor/pep517/check.py b/src/pip/_vendor/pep517/check.py index bf3c722641e..b79f6270b40 100644 --- a/src/pip/_vendor/pep517/check.py +++ b/src/pip/_vendor/pep517/check.py @@ -1,19 +1,19 @@ """Check a project and backend by attempting to build using PEP 517 hooks. """ import argparse -import io import logging import os -from os.path import isfile, join as pjoin import shutil -from subprocess import CalledProcessError import sys import tarfile -from tempfile import mkdtemp import zipfile +from os.path import isfile +from os.path import join as pjoin +from subprocess import CalledProcessError +from tempfile import mkdtemp +from ._compat import tomllib from .colorlog import enable_colourful_output -from .compat import TOMLDecodeError, toml_load from .envbuild import BuildEnvironment from .wrappers import Pep517HookCaller @@ -142,15 +142,15 @@ def check(source_dir): return False try: - with io.open(pyproject, 'rb') as f: - pyproject_data = toml_load(f) + with open(pyproject, 'rb') as f: + pyproject_data = tomllib.load(f) # Ensure the mandatory data can be loaded buildsys = pyproject_data['build-system'] requires = buildsys['requires'] backend = buildsys['build-backend'] backend_path = buildsys.get('backend-path') log.info('Loaded pyproject.toml') - except (TOMLDecodeError, KeyError): + except (tomllib.TOMLDecodeError, KeyError): log.error("Invalid pyproject.toml", exc_info=True) return False diff --git a/src/pip/_vendor/pep517/colorlog.py b/src/pip/_vendor/pep517/colorlog.py index 69c8a59d3d4..66310a79a99 100644 --- a/src/pip/_vendor/pep517/colorlog.py +++ b/src/pip/_vendor/pep517/colorlog.py @@ -73,8 +73,6 @@ def __init__(self, color=True, datefmt=None): # right conversion in python 3. fg_color = (curses.tigetstr("setaf") or curses.tigetstr("setf") or "") - if (3, 0) < sys.version_info < (3, 2, 3): - fg_color = str(fg_color, "ascii") for levelno, code in self.DEFAULT_COLORS.items(): self._colors[levelno] = str( diff --git a/src/pip/_vendor/pep517/compat.py b/src/pip/_vendor/pep517/compat.py deleted file mode 100644 index 730ef5ffaa1..00000000000 --- a/src/pip/_vendor/pep517/compat.py +++ /dev/null @@ -1,51 +0,0 @@ -"""Python 2/3 compatibility""" -import io -import json -import sys - - -# Handle reading and writing JSON in UTF-8, on Python 3 and 2. - -if sys.version_info[0] >= 3: - # Python 3 - def write_json(obj, path, **kwargs): - with open(path, 'w', encoding='utf-8') as f: - json.dump(obj, f, **kwargs) - - def read_json(path): - with open(path, 'r', encoding='utf-8') as f: - return json.load(f) - -else: - # Python 2 - def write_json(obj, path, **kwargs): - with open(path, 'wb') as f: - json.dump(obj, f, encoding='utf-8', **kwargs) - - def read_json(path): - with open(path, 'rb') as f: - return json.load(f) - - -# FileNotFoundError - -try: - FileNotFoundError = FileNotFoundError -except NameError: - FileNotFoundError = IOError - - -if sys.version_info < (3, 6): - from toml import load as _toml_load # noqa: F401 - - def toml_load(f): - w = io.TextIOWrapper(f, encoding="utf8", newline="") - try: - return _toml_load(w) - finally: - w.detach() - - from toml import TomlDecodeError as TOMLDecodeError # noqa: F401 -else: - from pip._vendor.tomli import load as toml_load # noqa: F401 - from pip._vendor.tomli import TOMLDecodeError # noqa: F401 diff --git a/src/pip/_vendor/pep517/dirtools.py b/src/pip/_vendor/pep517/dirtools.py index 58c6ca0c56b..3eff4d801ba 100644 --- a/src/pip/_vendor/pep517/dirtools.py +++ b/src/pip/_vendor/pep517/dirtools.py @@ -1,33 +1,8 @@ -import os import io -import contextlib -import tempfile -import shutil -import errno +import os import zipfile -@contextlib.contextmanager -def tempdir(): - """Create a temporary directory in a context manager.""" - td = tempfile.mkdtemp() - try: - yield td - finally: - shutil.rmtree(td) - - -def mkdir_p(*args, **kwargs): - """Like `mkdir`, but does not raise an exception if the - directory already exists. - """ - try: - return os.mkdir(*args, **kwargs) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise - - def dir_to_zipfile(root): """Construct an in-memory zip file for a directory.""" buffer = io.BytesIO() diff --git a/src/pip/_vendor/pep517/envbuild.py b/src/pip/_vendor/pep517/envbuild.py index fe8873c64a9..c0415c4d738 100644 --- a/src/pip/_vendor/pep517/envbuild.py +++ b/src/pip/_vendor/pep517/envbuild.py @@ -1,27 +1,26 @@ """Build wheels/sdists by installing build deps to a temporary environment. """ -import io -import os import logging +import os import shutil -from subprocess import check_call import sys +from subprocess import check_call from sysconfig import get_paths from tempfile import mkdtemp -from .compat import toml_load -from .wrappers import Pep517HookCaller, LoggerWrapper +from ._compat import tomllib +from .wrappers import LoggerWrapper, Pep517HookCaller log = logging.getLogger(__name__) def _load_pyproject(source_dir): - with io.open( + with open( os.path.join(source_dir, 'pyproject.toml'), 'rb', ) as f: - pyproject_data = toml_load(f) + pyproject_data = tomllib.load(f) buildsys = pyproject_data['build-system'] return ( buildsys['requires'], @@ -30,7 +29,7 @@ def _load_pyproject(source_dir): ) -class BuildEnvironment(object): +class BuildEnvironment: """Context manager to install build deps in a simple temporary environment Based on code I wrote for pip, which is MIT licensed. diff --git a/src/pip/_vendor/pep517/in_process/__init__.py b/src/pip/_vendor/pep517/in_process/__init__.py index c932313b328..281a356cfe2 100644 --- a/src/pip/_vendor/pep517/in_process/__init__.py +++ b/src/pip/_vendor/pep517/in_process/__init__.py @@ -3,15 +3,24 @@ The subpackage should stay as empty as possible to avoid shadowing modules that the backend might import. """ -from os.path import dirname, abspath, join as pjoin from contextlib import contextmanager +from os.path import abspath, dirname +from os.path import join as pjoin try: import importlib.resources as resources - - def _in_proc_script_path(): - return resources.path(__package__, '_in_process.py') + try: + resources.files + except AttributeError: + # Python 3.8 compatibility + def _in_proc_script_path(): + return resources.path(__package__, '_in_process.py') + else: + def _in_proc_script_path(): + return resources.as_file( + resources.files(__package__).joinpath('_in_process.py')) except ImportError: + # Python 3.6 compatibility @contextmanager def _in_proc_script_path(): yield pjoin(dirname(abspath(__file__)), '_in_process.py') diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pep517/in_process/_in_process.py index 954a4ab05e9..ae4cf9e9cee 100644 --- a/src/pip/_vendor/pep517/in_process/_in_process.py +++ b/src/pip/_vendor/pep517/in_process/_in_process.py @@ -12,41 +12,29 @@ - control_dir/output.json - {"return_val": ...} """ -from glob import glob -from importlib import import_module import json import os import os.path -from os.path import join as pjoin import re import shutil import sys import traceback +from glob import glob +from importlib import import_module +from os.path import join as pjoin -# This file is run as a script, and `import compat` is not zip-safe, so we -# include write_json() and read_json() from compat.py. -# -# Handle reading and writing JSON in UTF-8, on Python 3 and 2. +# This file is run as a script, and `import wrappers` is not zip-safe, so we +# include write_json() and read_json() from wrappers.py. -if sys.version_info[0] >= 3: - # Python 3 - def write_json(obj, path, **kwargs): - with open(path, 'w', encoding='utf-8') as f: - json.dump(obj, f, **kwargs) - def read_json(path): - with open(path, 'r', encoding='utf-8') as f: - return json.load(f) +def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) -else: - # Python 2 - def write_json(obj, path, **kwargs): - with open(path, 'wb') as f: - json.dump(obj, f, encoding='utf-8', **kwargs) - def read_json(path): - with open(path, 'rb') as f: - return json.load(f) +def read_json(path): + with open(path, encoding='utf-8') as f: + return json.load(f) class BackendUnavailable(Exception): @@ -64,7 +52,7 @@ def __init__(self, message): class HookMissing(Exception): """Raised if a hook is missing and we are not executing the fallback""" def __init__(self, hook_name=None): - super(HookMissing, self).__init__(hook_name) + super().__init__(hook_name) self.hook_name = hook_name diff --git a/src/pip/_vendor/pep517/meta.py b/src/pip/_vendor/pep517/meta.py index d525de5c6c8..4afc3c047a7 100644 --- a/src/pip/_vendor/pep517/meta.py +++ b/src/pip/_vendor/pep517/meta.py @@ -1,10 +1,11 @@ """Build metadata for a project using PEP 517 hooks. """ import argparse +import functools import logging import os import shutil -import functools +import tempfile try: import importlib.metadata as imp_meta @@ -16,10 +17,10 @@ except ImportError: from zipp import Path +from .build import compat_system, load_system, validate_system +from .dirtools import dir_to_zipfile from .envbuild import BuildEnvironment from .wrappers import Pep517HookCaller, quiet_subprocess_runner -from .dirtools import tempdir, mkdir_p, dir_to_zipfile -from .build import validate_system, load_system, compat_system log = logging.getLogger(__name__) @@ -31,7 +32,7 @@ def _prep_meta(hooks, env, dest): env.pip_install(reqs) log.info('Installed dynamic build dependencies') - with tempdir() as td: + with tempfile.TemporaryDirectory() as td: log.info('Trying to build metadata in %s', td) filename = hooks.prepare_metadata_for_build_wheel(td, {}) source = os.path.join(td, filename) @@ -41,7 +42,7 @@ def _prep_meta(hooks, env, dest): def build(source_dir='.', dest=None, system=None): system = system or load_system(source_dir) dest = os.path.join(source_dir, dest or 'dist') - mkdir_p(dest) + os.makedirs(dest, exist_ok=True) validate_system(system) hooks = Pep517HookCaller( source_dir, system['build-backend'], system.get('backend-path') @@ -54,7 +55,7 @@ def build(source_dir='.', dest=None, system=None): def build_as_zip(builder=build): - with tempdir() as out_dir: + with tempfile.TemporaryDirectory() as out_dir: builder(dest=out_dir) return dir_to_zipfile(out_dir) diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pep517/wrappers.py index e031ed70875..987a62aaa99 100644 --- a/src/pip/_vendor/pep517/wrappers.py +++ b/src/pip/_vendor/pep517/wrappers.py @@ -1,13 +1,13 @@ -import threading -from contextlib import contextmanager +import json import os -from os.path import abspath, join as pjoin -import shutil -from subprocess import check_call, check_output, STDOUT import sys -from tempfile import mkdtemp +import tempfile +import threading +from contextlib import contextmanager +from os.path import abspath +from os.path import join as pjoin +from subprocess import STDOUT, check_call, check_output -from . import compat from .in_process import _in_proc_script_path __all__ = [ @@ -21,13 +21,14 @@ ] -@contextmanager -def tempdir(): - td = mkdtemp() - try: - yield td - finally: - shutil.rmtree(td) +def write_json(obj, path, **kwargs): + with open(path, 'w', encoding='utf-8') as f: + json.dump(obj, f, **kwargs) + + +def read_json(path): + with open(path, encoding='utf-8') as f: + return json.load(f) class BackendUnavailable(Exception): @@ -47,7 +48,7 @@ def __init__(self, backend_name, backend_path, message): class HookMissing(Exception): """Will be raised on missing hooks.""" def __init__(self, hook_name): - super(HookMissing, self).__init__(hook_name) + super().__init__(hook_name) self.hook_name = hook_name @@ -99,7 +100,7 @@ def norm_and_check(source_tree, requested): return abs_requested -class Pep517HookCaller(object): +class Pep517HookCaller: """A wrapper around a source directory to be built with a PEP 517 backend. :param source_dir: The path to the source directory, containing @@ -292,29 +293,15 @@ def build_sdist(self, sdist_directory, config_settings=None): }) def _call_hook(self, hook_name, kwargs): - # On Python 2, pytoml returns Unicode values (which is correct) but the - # environment passed to check_call needs to contain string values. We - # convert here by encoding using ASCII (the backend can only contain - # letters, digits and _, . and : characters, and will be used as a - # Python identifier, so non-ASCII content is wrong on Python 2 in - # any case). - # For backend_path, we use sys.getfilesystemencoding. - if sys.version_info[0] == 2: - build_backend = self.build_backend.encode('ASCII') - else: - build_backend = self.build_backend - extra_environ = {'PEP517_BUILD_BACKEND': build_backend} + extra_environ = {'PEP517_BUILD_BACKEND': self.build_backend} if self.backend_path: backend_path = os.pathsep.join(self.backend_path) - if sys.version_info[0] == 2: - backend_path = backend_path.encode(sys.getfilesystemencoding()) extra_environ['PEP517_BACKEND_PATH'] = backend_path - with tempdir() as td: + with tempfile.TemporaryDirectory() as td: hook_input = {'kwargs': kwargs} - compat.write_json(hook_input, pjoin(td, 'input.json'), - indent=2) + write_json(hook_input, pjoin(td, 'input.json'), indent=2) # Run the hook in a subprocess with _in_proc_script_path() as script: @@ -325,7 +312,7 @@ def _call_hook(self, hook_name, kwargs): extra_environ=extra_environ ) - data = compat.read_json(pjoin(td, 'output.json')) + data = read_json(pjoin(td, 'output.json')) if data.get('unsupported'): raise UnsupportedOperation(data.get('traceback', '')) if data.get('no_backend'): diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 4053e5ab8c7..f9018960e2c 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -4,7 +4,7 @@ distlib==0.3.6 distro==1.7.0 msgpack==1.0.4 packaging==21.3 -pep517==0.12.0 +pep517==0.13.0 platformdirs==2.5.2 pyparsing==3.0.9 requests==2.28.1 From b922e0f66028701690322aedb4deecf0e401057e Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 10 Oct 2022 12:20:52 +0100 Subject: [PATCH 126/730] Upgrade idna to 3.4 --- news/idna.vendor.rst | 1 + src/pip/_vendor/idna/core.py | 5 +- src/pip/_vendor/idna/idnadata.py | 34 ++- src/pip/_vendor/idna/package_data.py | 2 +- src/pip/_vendor/idna/uts46data.py | 306 +++++++++++++++++---------- src/pip/_vendor/vendor.txt | 2 +- 6 files changed, 228 insertions(+), 122 deletions(-) create mode 100644 news/idna.vendor.rst diff --git a/news/idna.vendor.rst b/news/idna.vendor.rst new file mode 100644 index 00000000000..087598cbf5c --- /dev/null +++ b/news/idna.vendor.rst @@ -0,0 +1 @@ +Upgrade idna to 3.4 diff --git a/src/pip/_vendor/idna/core.py b/src/pip/_vendor/idna/core.py index 55ab9678850..4f300371102 100644 --- a/src/pip/_vendor/idna/core.py +++ b/src/pip/_vendor/idna/core.py @@ -339,7 +339,10 @@ def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False def encode(s: Union[str, bytes, bytearray], strict: bool = False, uts46: bool = False, std3_rules: bool = False, transitional: bool = False) -> bytes: if isinstance(s, (bytes, bytearray)): - s = s.decode('ascii') + try: + s = s.decode('ascii') + except UnicodeDecodeError: + raise IDNAError('should pass a unicode string to the function rather than a byte string.') if uts46: s = uts46_remap(s, std3_rules, transitional) trailing_dot = False diff --git a/src/pip/_vendor/idna/idnadata.py b/src/pip/_vendor/idna/idnadata.py index 1b5805d15e5..67db4625829 100644 --- a/src/pip/_vendor/idna/idnadata.py +++ b/src/pip/_vendor/idna/idnadata.py @@ -1,6 +1,6 @@ # This file is automatically generated by tools/idna-data -__version__ = '14.0.0' +__version__ = '15.0.0' scripts = { 'Greek': ( 0x37000000374, @@ -55,12 +55,13 @@ 0x16fe200016fe4, 0x16ff000016ff2, 0x200000002a6e0, - 0x2a7000002b739, + 0x2a7000002b73a, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x2f8000002fa1e, 0x300000003134b, + 0x31350000323b0, ), 'Hebrew': ( 0x591000005c8, @@ -77,6 +78,7 @@ 0x304100003097, 0x309d000030a0, 0x1b0010001b120, + 0x1b1320001b133, 0x1b1500001b153, 0x1f2000001f201, ), @@ -93,6 +95,7 @@ 0x1affd0001afff, 0x1b0000001b001, 0x1b1200001b123, + 0x1b1550001b156, 0x1b1640001b168, ), } @@ -1331,7 +1334,7 @@ 0xcdd00000cdf, 0xce000000ce4, 0xce600000cf0, - 0xcf100000cf3, + 0xcf100000cf4, 0xd0000000d0d, 0xd0e00000d11, 0xd1200000d45, @@ -1366,7 +1369,7 @@ 0xeb400000ebe, 0xec000000ec5, 0xec600000ec7, - 0xec800000ece, + 0xec800000ecf, 0xed000000eda, 0xede00000ee0, 0xf0000000f01, @@ -1859,7 +1862,7 @@ 0xab200000ab27, 0xab280000ab2f, 0xab300000ab5b, - 0xab600000ab6a, + 0xab600000ab69, 0xabc00000abeb, 0xabec0000abee, 0xabf00000abfa, @@ -1943,7 +1946,7 @@ 0x10e8000010eaa, 0x10eab00010ead, 0x10eb000010eb2, - 0x10f0000010f1d, + 0x10efd00010f1d, 0x10f2700010f28, 0x10f3000010f51, 0x10f7000010f86, @@ -1966,7 +1969,7 @@ 0x111dc000111dd, 0x1120000011212, 0x1121300011238, - 0x1123e0001123f, + 0x1123e00011242, 0x1128000011287, 0x1128800011289, 0x1128a0001128e, @@ -2047,11 +2050,16 @@ 0x11d9300011d99, 0x11da000011daa, 0x11ee000011ef7, + 0x11f0000011f11, + 0x11f1200011f3b, + 0x11f3e00011f43, + 0x11f5000011f5a, 0x11fb000011fb1, 0x120000001239a, 0x1248000012544, 0x12f9000012ff1, - 0x130000001342f, + 0x1300000013430, + 0x1344000013456, 0x1440000014647, 0x1680000016a39, 0x16a4000016a5f, @@ -2079,7 +2087,9 @@ 0x1aff50001affc, 0x1affd0001afff, 0x1b0000001b123, + 0x1b1320001b133, 0x1b1500001b153, + 0x1b1550001b156, 0x1b1640001b168, 0x1b1700001b2fc, 0x1bc000001bc6b, @@ -2096,17 +2106,21 @@ 0x1da9b0001daa0, 0x1daa10001dab0, 0x1df000001df1f, + 0x1df250001df2b, 0x1e0000001e007, 0x1e0080001e019, 0x1e01b0001e022, 0x1e0230001e025, 0x1e0260001e02b, + 0x1e0300001e06e, + 0x1e08f0001e090, 0x1e1000001e12d, 0x1e1300001e13e, 0x1e1400001e14a, 0x1e14e0001e14f, 0x1e2900001e2af, 0x1e2c00001e2fa, + 0x1e4d00001e4fa, 0x1e7e00001e7e7, 0x1e7e80001e7ec, 0x1e7ed0001e7ef, @@ -2115,13 +2129,13 @@ 0x1e8d00001e8d7, 0x1e9220001e94c, 0x1e9500001e95a, - 0x1fbf00001fbfa, 0x200000002a6e0, - 0x2a7000002b739, + 0x2a7000002b73a, 0x2b7400002b81e, 0x2b8200002cea2, 0x2ceb00002ebe1, 0x300000003134b, + 0x31350000323b0, ), 'CONTEXTJ': ( 0x200c0000200e, diff --git a/src/pip/_vendor/idna/package_data.py b/src/pip/_vendor/idna/package_data.py index f5ea87c12bd..8501893bd15 100644 --- a/src/pip/_vendor/idna/package_data.py +++ b/src/pip/_vendor/idna/package_data.py @@ -1,2 +1,2 @@ -__version__ = '3.3' +__version__ = '3.4' diff --git a/src/pip/_vendor/idna/uts46data.py b/src/pip/_vendor/idna/uts46data.py index 8f65705ee91..186796c17b2 100644 --- a/src/pip/_vendor/idna/uts46data.py +++ b/src/pip/_vendor/idna/uts46data.py @@ -7,7 +7,7 @@ """IDNA Mapping Table from UTS46.""" -__version__ = '14.0.0' +__version__ = '15.0.0' def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ (0x0, '3'), @@ -1300,7 +1300,7 @@ def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xCE6, 'V'), (0xCF0, 'X'), (0xCF1, 'V'), - (0xCF3, 'X'), + (0xCF4, 'X'), (0xD00, 'V'), (0xD0D, 'X'), (0xD0E, 'V'), @@ -1368,7 +1368,7 @@ def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0xEC6, 'V'), (0xEC7, 'X'), (0xEC8, 'V'), - (0xECE, 'X'), + (0xECF, 'X'), (0xED0, 'V'), (0xEDA, 'X'), (0xEDC, 'M', 'ຫນ'), @@ -5917,7 +5917,7 @@ def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x10EAE, 'X'), (0x10EB0, 'V'), (0x10EB2, 'X'), - (0x10F00, 'V'), + (0x10EFD, 'V'), (0x10F28, 'X'), (0x10F30, 'V'), (0x10F5A, 'X'), @@ -5956,7 +5956,7 @@ def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x11200, 'V'), (0x11212, 'X'), (0x11213, 'V'), - (0x1123F, 'X'), + (0x11242, 'X'), (0x11280, 'V'), (0x11287, 'X'), (0x11288, 'V'), @@ -6097,6 +6097,8 @@ def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x11AA3, 'X'), (0x11AB0, 'V'), (0x11AF9, 'X'), + (0x11B00, 'V'), + (0x11B0A, 'X'), (0x11C00, 'V'), (0x11C09, 'X'), (0x11C0A, 'V'), @@ -6139,13 +6141,19 @@ def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x11DAA, 'X'), (0x11EE0, 'V'), (0x11EF9, 'X'), - (0x11FB0, 'V'), - (0x11FB1, 'X'), - (0x11FC0, 'V'), + (0x11F00, 'V'), ] def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: return [ + (0x11F11, 'X'), + (0x11F12, 'V'), + (0x11F3B, 'X'), + (0x11F3E, 'V'), + (0x11F5A, 'X'), + (0x11FB0, 'V'), + (0x11FB1, 'X'), + (0x11FC0, 'V'), (0x11FF2, 'X'), (0x11FFF, 'V'), (0x1239A, 'X'), @@ -6158,7 +6166,9 @@ def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x12F90, 'V'), (0x12FF3, 'X'), (0x13000, 'V'), - (0x1342F, 'X'), + (0x13430, 'X'), + (0x13440, 'V'), + (0x13456, 'X'), (0x14400, 'V'), (0x14647, 'X'), (0x16800, 'V'), @@ -6236,6 +6246,10 @@ def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x18D00, 'V'), (0x18D09, 'X'), (0x1AFF0, 'V'), + ] + +def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1AFF4, 'X'), (0x1AFF5, 'V'), (0x1AFFC, 'X'), @@ -6243,13 +6257,13 @@ def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1AFFF, 'X'), (0x1B000, 'V'), (0x1B123, 'X'), + (0x1B132, 'V'), + (0x1B133, 'X'), (0x1B150, 'V'), (0x1B153, 'X'), + (0x1B155, 'V'), + (0x1B156, 'X'), (0x1B164, 'V'), - ] - -def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1B168, 'X'), (0x1B170, 'V'), (0x1B2FC, 'X'), @@ -6295,6 +6309,8 @@ def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D1EB, 'X'), (0x1D200, 'V'), (0x1D246, 'X'), + (0x1D2C0, 'V'), + (0x1D2D4, 'X'), (0x1D2E0, 'V'), (0x1D2F4, 'X'), (0x1D300, 'V'), @@ -6334,6 +6350,10 @@ def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D41E, 'M', 'e'), (0x1D41F, 'M', 'f'), (0x1D420, 'M', 'g'), + ] + +def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D421, 'M', 'h'), (0x1D422, 'M', 'i'), (0x1D423, 'M', 'j'), @@ -6350,10 +6370,6 @@ def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D42E, 'M', 'u'), (0x1D42F, 'M', 'v'), (0x1D430, 'M', 'w'), - ] - -def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D431, 'M', 'x'), (0x1D432, 'M', 'y'), (0x1D433, 'M', 'z'), @@ -6438,6 +6454,10 @@ def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D482, 'M', 'a'), (0x1D483, 'M', 'b'), (0x1D484, 'M', 'c'), + ] + +def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D485, 'M', 'd'), (0x1D486, 'M', 'e'), (0x1D487, 'M', 'f'), @@ -6454,10 +6474,6 @@ def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D492, 'M', 'q'), (0x1D493, 'M', 'r'), (0x1D494, 'M', 's'), - ] - -def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D495, 'M', 't'), (0x1D496, 'M', 'u'), (0x1D497, 'M', 'v'), @@ -6542,6 +6558,10 @@ def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D4E9, 'M', 'z'), (0x1D4EA, 'M', 'a'), (0x1D4EB, 'M', 'b'), + ] + +def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D4EC, 'M', 'c'), (0x1D4ED, 'M', 'd'), (0x1D4EE, 'M', 'e'), @@ -6558,10 +6578,6 @@ def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D4F9, 'M', 'p'), (0x1D4FA, 'M', 'q'), (0x1D4FB, 'M', 'r'), - ] - -def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D4FC, 'M', 's'), (0x1D4FD, 'M', 't'), (0x1D4FE, 'M', 'u'), @@ -6646,6 +6662,10 @@ def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D550, 'M', 'y'), (0x1D551, 'X'), (0x1D552, 'M', 'a'), + ] + +def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D553, 'M', 'b'), (0x1D554, 'M', 'c'), (0x1D555, 'M', 'd'), @@ -6662,10 +6682,6 @@ def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D560, 'M', 'o'), (0x1D561, 'M', 'p'), (0x1D562, 'M', 'q'), - ] - -def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D563, 'M', 'r'), (0x1D564, 'M', 's'), (0x1D565, 'M', 't'), @@ -6750,6 +6766,10 @@ def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D5B4, 'M', 'u'), (0x1D5B5, 'M', 'v'), (0x1D5B6, 'M', 'w'), + ] + +def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D5B7, 'M', 'x'), (0x1D5B8, 'M', 'y'), (0x1D5B9, 'M', 'z'), @@ -6766,10 +6786,6 @@ def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D5C4, 'M', 'k'), (0x1D5C5, 'M', 'l'), (0x1D5C6, 'M', 'm'), - ] - -def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D5C7, 'M', 'n'), (0x1D5C8, 'M', 'o'), (0x1D5C9, 'M', 'p'), @@ -6854,6 +6870,10 @@ def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D618, 'M', 'q'), (0x1D619, 'M', 'r'), (0x1D61A, 'M', 's'), + ] + +def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D61B, 'M', 't'), (0x1D61C, 'M', 'u'), (0x1D61D, 'M', 'v'), @@ -6870,10 +6890,6 @@ def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D628, 'M', 'g'), (0x1D629, 'M', 'h'), (0x1D62A, 'M', 'i'), - ] - -def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D62B, 'M', 'j'), (0x1D62C, 'M', 'k'), (0x1D62D, 'M', 'l'), @@ -6958,6 +6974,10 @@ def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D67C, 'M', 'm'), (0x1D67D, 'M', 'n'), (0x1D67E, 'M', 'o'), + ] + +def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D67F, 'M', 'p'), (0x1D680, 'M', 'q'), (0x1D681, 'M', 'r'), @@ -6974,10 +6994,6 @@ def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D68C, 'M', 'c'), (0x1D68D, 'M', 'd'), (0x1D68E, 'M', 'e'), - ] - -def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D68F, 'M', 'f'), (0x1D690, 'M', 'g'), (0x1D691, 'M', 'h'), @@ -7062,6 +7078,10 @@ def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D6E2, 'M', 'α'), (0x1D6E3, 'M', 'β'), (0x1D6E4, 'M', 'γ'), + ] + +def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D6E5, 'M', 'δ'), (0x1D6E6, 'M', 'ε'), (0x1D6E7, 'M', 'ζ'), @@ -7078,10 +7098,6 @@ def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D6F2, 'M', 'ρ'), (0x1D6F3, 'M', 'θ'), (0x1D6F4, 'M', 'σ'), - ] - -def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D6F5, 'M', 'τ'), (0x1D6F6, 'M', 'υ'), (0x1D6F7, 'M', 'φ'), @@ -7166,6 +7182,10 @@ def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D747, 'M', 'σ'), (0x1D749, 'M', 'τ'), (0x1D74A, 'M', 'υ'), + ] + +def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D74B, 'M', 'φ'), (0x1D74C, 'M', 'χ'), (0x1D74D, 'M', 'ψ'), @@ -7182,10 +7202,6 @@ def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D758, 'M', 'γ'), (0x1D759, 'M', 'δ'), (0x1D75A, 'M', 'ε'), - ] - -def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D75B, 'M', 'ζ'), (0x1D75C, 'M', 'η'), (0x1D75D, 'M', 'θ'), @@ -7270,6 +7286,10 @@ def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D7AD, 'M', 'δ'), (0x1D7AE, 'M', 'ε'), (0x1D7AF, 'M', 'ζ'), + ] + +def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1D7B0, 'M', 'η'), (0x1D7B1, 'M', 'θ'), (0x1D7B2, 'M', 'ι'), @@ -7286,10 +7306,6 @@ def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1D7BE, 'M', 'υ'), (0x1D7BF, 'M', 'φ'), (0x1D7C0, 'M', 'χ'), - ] - -def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1D7C1, 'M', 'ψ'), (0x1D7C2, 'M', 'ω'), (0x1D7C3, 'M', '∂'), @@ -7359,6 +7375,8 @@ def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1DAB0, 'X'), (0x1DF00, 'V'), (0x1DF1F, 'X'), + (0x1DF25, 'V'), + (0x1DF2B, 'X'), (0x1E000, 'V'), (0x1E007, 'X'), (0x1E008, 'V'), @@ -7369,6 +7387,75 @@ def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1E025, 'X'), (0x1E026, 'V'), (0x1E02B, 'X'), + (0x1E030, 'M', 'а'), + (0x1E031, 'M', 'б'), + (0x1E032, 'M', 'в'), + ] + +def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ + (0x1E033, 'M', 'г'), + (0x1E034, 'M', 'д'), + (0x1E035, 'M', 'е'), + (0x1E036, 'M', 'ж'), + (0x1E037, 'M', 'з'), + (0x1E038, 'M', 'и'), + (0x1E039, 'M', 'к'), + (0x1E03A, 'M', 'л'), + (0x1E03B, 'M', 'м'), + (0x1E03C, 'M', 'о'), + (0x1E03D, 'M', 'п'), + (0x1E03E, 'M', 'р'), + (0x1E03F, 'M', 'с'), + (0x1E040, 'M', 'т'), + (0x1E041, 'M', 'у'), + (0x1E042, 'M', 'ф'), + (0x1E043, 'M', 'х'), + (0x1E044, 'M', 'ц'), + (0x1E045, 'M', 'ч'), + (0x1E046, 'M', 'ш'), + (0x1E047, 'M', 'ы'), + (0x1E048, 'M', 'э'), + (0x1E049, 'M', 'ю'), + (0x1E04A, 'M', 'ꚉ'), + (0x1E04B, 'M', 'ә'), + (0x1E04C, 'M', 'і'), + (0x1E04D, 'M', 'ј'), + (0x1E04E, 'M', 'ө'), + (0x1E04F, 'M', 'ү'), + (0x1E050, 'M', 'ӏ'), + (0x1E051, 'M', 'а'), + (0x1E052, 'M', 'б'), + (0x1E053, 'M', 'в'), + (0x1E054, 'M', 'г'), + (0x1E055, 'M', 'д'), + (0x1E056, 'M', 'е'), + (0x1E057, 'M', 'ж'), + (0x1E058, 'M', 'з'), + (0x1E059, 'M', 'и'), + (0x1E05A, 'M', 'к'), + (0x1E05B, 'M', 'л'), + (0x1E05C, 'M', 'о'), + (0x1E05D, 'M', 'п'), + (0x1E05E, 'M', 'с'), + (0x1E05F, 'M', 'у'), + (0x1E060, 'M', 'ф'), + (0x1E061, 'M', 'х'), + (0x1E062, 'M', 'ц'), + (0x1E063, 'M', 'ч'), + (0x1E064, 'M', 'ш'), + (0x1E065, 'M', 'ъ'), + (0x1E066, 'M', 'ы'), + (0x1E067, 'M', 'ґ'), + (0x1E068, 'M', 'і'), + (0x1E069, 'M', 'ѕ'), + (0x1E06A, 'M', 'џ'), + (0x1E06B, 'M', 'ҫ'), + (0x1E06C, 'M', 'ꙑ'), + (0x1E06D, 'M', 'ұ'), + (0x1E06E, 'X'), + (0x1E08F, 'V'), + (0x1E090, 'X'), (0x1E100, 'V'), (0x1E12D, 'X'), (0x1E130, 'V'), @@ -7383,6 +7470,8 @@ def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1E2FA, 'X'), (0x1E2FF, 'V'), (0x1E300, 'X'), + (0x1E4D0, 'V'), + (0x1E4FA, 'X'), (0x1E7E0, 'V'), (0x1E7E7, 'X'), (0x1E7E8, 'V'), @@ -7390,10 +7479,6 @@ def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1E7ED, 'V'), (0x1E7EF, 'X'), (0x1E7F0, 'V'), - ] - -def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1E7FF, 'X'), (0x1E800, 'V'), (0x1E8C5, 'X'), @@ -7409,6 +7494,10 @@ def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1E907, 'M', '𞤩'), (0x1E908, 'M', '𞤪'), (0x1E909, 'M', '𞤫'), + ] + +def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1E90A, 'M', '𞤬'), (0x1E90B, 'M', '𞤭'), (0x1E90C, 'M', '𞤮'), @@ -7494,10 +7583,6 @@ def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1EE31, 'M', 'ص'), (0x1EE32, 'M', 'ق'), (0x1EE33, 'X'), - ] - -def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1EE34, 'M', 'ش'), (0x1EE35, 'M', 'ت'), (0x1EE36, 'M', 'ث'), @@ -7513,6 +7598,10 @@ def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1EE48, 'X'), (0x1EE49, 'M', 'ي'), (0x1EE4A, 'X'), + ] + +def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EE4B, 'M', 'ل'), (0x1EE4C, 'X'), (0x1EE4D, 'M', 'ن'), @@ -7598,10 +7687,6 @@ def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1EEA3, 'M', 'د'), (0x1EEA4, 'X'), (0x1EEA5, 'M', 'و'), - ] - -def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1EEA6, 'M', 'ز'), (0x1EEA7, 'M', 'ح'), (0x1EEA8, 'M', 'ط'), @@ -7617,6 +7702,10 @@ def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1EEB2, 'M', 'ق'), (0x1EEB3, 'M', 'ر'), (0x1EEB4, 'M', 'ش'), + ] + +def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1EEB5, 'M', 'ت'), (0x1EEB6, 'M', 'ث'), (0x1EEB7, 'M', 'خ'), @@ -7702,10 +7791,6 @@ def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1F141, 'M', 'r'), (0x1F142, 'M', 's'), (0x1F143, 'M', 't'), - ] - -def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1F144, 'M', 'u'), (0x1F145, 'M', 'v'), (0x1F146, 'M', 'w'), @@ -7721,6 +7806,10 @@ def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1F150, 'V'), (0x1F16A, 'M', 'mc'), (0x1F16B, 'M', 'md'), + ] + +def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1F16C, 'M', 'mr'), (0x1F16D, 'V'), (0x1F190, 'M', 'dj'), @@ -7793,23 +7882,19 @@ def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1F266, 'X'), (0x1F300, 'V'), (0x1F6D8, 'X'), - (0x1F6DD, 'V'), + (0x1F6DC, 'V'), (0x1F6ED, 'X'), (0x1F6F0, 'V'), (0x1F6FD, 'X'), (0x1F700, 'V'), - (0x1F774, 'X'), - (0x1F780, 'V'), - (0x1F7D9, 'X'), + (0x1F777, 'X'), + (0x1F77B, 'V'), + (0x1F7DA, 'X'), (0x1F7E0, 'V'), (0x1F7EC, 'X'), (0x1F7F0, 'V'), (0x1F7F1, 'X'), (0x1F800, 'V'), - ] - -def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x1F80C, 'X'), (0x1F810, 'V'), (0x1F848, 'X'), @@ -7825,24 +7910,24 @@ def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x1FA54, 'X'), (0x1FA60, 'V'), (0x1FA6E, 'X'), + ] + +def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x1FA70, 'V'), - (0x1FA75, 'X'), - (0x1FA78, 'V'), (0x1FA7D, 'X'), (0x1FA80, 'V'), - (0x1FA87, 'X'), + (0x1FA89, 'X'), (0x1FA90, 'V'), - (0x1FAAD, 'X'), - (0x1FAB0, 'V'), - (0x1FABB, 'X'), - (0x1FAC0, 'V'), + (0x1FABE, 'X'), + (0x1FABF, 'V'), (0x1FAC6, 'X'), - (0x1FAD0, 'V'), - (0x1FADA, 'X'), + (0x1FACE, 'V'), + (0x1FADC, 'X'), (0x1FAE0, 'V'), - (0x1FAE8, 'X'), + (0x1FAE9, 'X'), (0x1FAF0, 'V'), - (0x1FAF7, 'X'), + (0x1FAF9, 'X'), (0x1FB00, 'V'), (0x1FB93, 'X'), (0x1FB94, 'V'), @@ -7861,7 +7946,7 @@ def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x20000, 'V'), (0x2A6E0, 'X'), (0x2A700, 'V'), - (0x2B739, 'X'), + (0x2B73A, 'X'), (0x2B740, 'V'), (0x2B81E, 'X'), (0x2B820, 'V'), @@ -7910,10 +7995,6 @@ def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F827, 'M', '勤'), (0x2F828, 'M', '勺'), (0x2F829, 'M', '包'), - ] - -def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2F82A, 'M', '匆'), (0x2F82B, 'M', '北'), (0x2F82C, 'M', '卉'), @@ -7933,6 +8014,10 @@ def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F83C, 'M', '咞'), (0x2F83D, 'M', '吸'), (0x2F83E, 'M', '呈'), + ] + +def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F83F, 'M', '周'), (0x2F840, 'M', '咢'), (0x2F841, 'M', '哶'), @@ -8014,10 +8099,6 @@ def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F88F, 'M', '𪎒'), (0x2F890, 'M', '廾'), (0x2F891, 'M', '𢌱'), - ] - -def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2F893, 'M', '舁'), (0x2F894, 'M', '弢'), (0x2F896, 'M', '㣇'), @@ -8037,6 +8118,10 @@ def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F8A4, 'M', '𢛔'), (0x2F8A5, 'M', '惇'), (0x2F8A6, 'M', '慈'), + ] + +def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F8A7, 'M', '慌'), (0x2F8A8, 'M', '慎'), (0x2F8A9, 'M', '慌'), @@ -8118,10 +8203,6 @@ def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F8F5, 'M', '殺'), (0x2F8F6, 'M', '殻'), (0x2F8F7, 'M', '𣪍'), - ] - -def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2F8F8, 'M', '𡴋'), (0x2F8F9, 'M', '𣫺'), (0x2F8FA, 'M', '汎'), @@ -8141,6 +8222,10 @@ def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F908, 'M', '港'), (0x2F909, 'M', '湮'), (0x2F90A, 'M', '㴳'), + ] + +def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F90B, 'M', '滋'), (0x2F90C, 'M', '滇'), (0x2F90D, 'M', '𣻑'), @@ -8222,10 +8307,6 @@ def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F95B, 'M', '穏'), (0x2F95C, 'M', '𥥼'), (0x2F95D, 'M', '𥪧'), - ] - -def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2F95F, 'X'), (0x2F960, 'M', '䈂'), (0x2F961, 'M', '𥮫'), @@ -8245,6 +8326,10 @@ def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F96F, 'M', '縂'), (0x2F970, 'M', '繅'), (0x2F971, 'M', '䌴'), + ] + +def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F972, 'M', '𦈨'), (0x2F973, 'M', '𦉇'), (0x2F974, 'M', '䍙'), @@ -8326,10 +8411,6 @@ def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F9C0, 'M', '蟡'), (0x2F9C1, 'M', '蠁'), (0x2F9C2, 'M', '䗹'), - ] - -def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ (0x2F9C3, 'M', '衠'), (0x2F9C4, 'M', '衣'), (0x2F9C5, 'M', '𧙧'), @@ -8349,6 +8430,10 @@ def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2F9D3, 'M', '𧲨'), (0x2F9D4, 'M', '貫'), (0x2F9D5, 'M', '賁'), + ] + +def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + return [ (0x2F9D6, 'M', '贛'), (0x2F9D7, 'M', '起'), (0x2F9D8, 'M', '𧼯'), @@ -8423,6 +8508,8 @@ def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: (0x2FA1E, 'X'), (0x30000, 'V'), (0x3134B, 'X'), + (0x31350, 'V'), + (0x323B0, 'X'), (0xE0100, 'I'), (0xE01F0, 'X'), ] @@ -8509,4 +8596,5 @@ def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: + _seg_78() + _seg_79() + _seg_80() + + _seg_81() ) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index f9018960e2c..a1a80610f8a 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -10,7 +10,7 @@ pyparsing==3.0.9 requests==2.28.1 certifi==2022.06.15 chardet==5.0.0 - idna==3.3 + idna==3.4 urllib3==1.26.10 rich==12.5.1 pygments==2.12.0 From 07c86c7947fcb973905116b4af795a7d353eed5c Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 10 Oct 2022 12:21:49 +0100 Subject: [PATCH 127/730] Upgrade tenacity to 8.1.0 --- news/tenacity.vendor.rst | 1 + src/pip/_vendor/tenacity/__init__.py | 2 + src/pip/_vendor/tenacity/retry.py | 27 +++++++++++ src/pip/_vendor/tenacity/wait.py | 71 ++++++++++++++++++++++------ src/pip/_vendor/vendor.txt | 2 +- 5 files changed, 87 insertions(+), 16 deletions(-) create mode 100644 news/tenacity.vendor.rst diff --git a/news/tenacity.vendor.rst b/news/tenacity.vendor.rst new file mode 100644 index 00000000000..45921e5bff1 --- /dev/null +++ b/news/tenacity.vendor.rst @@ -0,0 +1 @@ +Upgrade tenacity to 8.1.0 diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__init__.py index 086ad46e1d6..ab3be3bf63d 100644 --- a/src/pip/_vendor/tenacity/__init__.py +++ b/src/pip/_vendor/tenacity/__init__.py @@ -33,6 +33,7 @@ from .retry import retry_any # noqa from .retry import retry_if_exception # noqa from .retry import retry_if_exception_type # noqa +from .retry import retry_if_exception_cause_type # noqa from .retry import retry_if_not_exception_type # noqa from .retry import retry_if_not_result # noqa from .retry import retry_if_result # noqa @@ -63,6 +64,7 @@ from .wait import wait_random # noqa from .wait import wait_random_exponential # noqa from .wait import wait_random_exponential as wait_full_jitter # noqa +from .wait import wait_exponential_jitter # noqa # Import all built-in before strategies for easier usage. from .before import before_log # noqa diff --git a/src/pip/_vendor/tenacity/retry.py b/src/pip/_vendor/tenacity/retry.py index 1d727e9b346..9ebeb62d5c9 100644 --- a/src/pip/_vendor/tenacity/retry.py +++ b/src/pip/_vendor/tenacity/retry.py @@ -117,6 +117,33 @@ def __call__(self, retry_state: "RetryCallState") -> bool: return self.predicate(retry_state.outcome.exception()) +class retry_if_exception_cause_type(retry_base): + """Retries if any of the causes of the raised exception is of one or more types. + + The check on the type of the cause of the exception is done recursively (until finding + an exception in the chain that has no `__cause__`) + """ + + def __init__( + self, + exception_types: typing.Union[ + typing.Type[BaseException], + typing.Tuple[typing.Type[BaseException], ...], + ] = Exception, + ) -> None: + self.exception_cause_types = exception_types + + def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.outcome.failed: + exc = retry_state.outcome.exception() + while exc is not None: + if isinstance(exc.__cause__, self.exception_cause_types): + return True + exc = exc.__cause__ + + return False + + class retry_if_result(retry_base): """Retries if the result verifies a predicate.""" diff --git a/src/pip/_vendor/tenacity/wait.py b/src/pip/_vendor/tenacity/wait.py index 6ed97a7bcdc..8fdfc8f9d4e 100644 --- a/src/pip/_vendor/tenacity/wait.py +++ b/src/pip/_vendor/tenacity/wait.py @@ -17,12 +17,19 @@ import abc import random import typing +from datetime import timedelta from pip._vendor.tenacity import _utils if typing.TYPE_CHECKING: from pip._vendor.tenacity import RetryCallState +wait_unit_type = typing.Union[int, float, timedelta] + + +def to_seconds(wait_unit: wait_unit_type) -> float: + return float(wait_unit.total_seconds() if isinstance(wait_unit, timedelta) else wait_unit) + class wait_base(abc.ABC): """Abstract base class for wait strategies.""" @@ -44,8 +51,8 @@ def __radd__(self, other: "wait_base") -> typing.Union["wait_combine", "wait_bas class wait_fixed(wait_base): """Wait strategy that waits a fixed amount of time between each retry.""" - def __init__(self, wait: float) -> None: - self.wait_fixed = wait + def __init__(self, wait: wait_unit_type) -> None: + self.wait_fixed = to_seconds(wait) def __call__(self, retry_state: "RetryCallState") -> float: return self.wait_fixed @@ -61,9 +68,9 @@ def __init__(self) -> None: class wait_random(wait_base): """Wait strategy that waits a random amount of time between min/max.""" - def __init__(self, min: typing.Union[int, float] = 0, max: typing.Union[int, float] = 1) -> None: # noqa - self.wait_random_min = min - self.wait_random_max = max + def __init__(self, min: wait_unit_type = 0, max: wait_unit_type = 1) -> None: # noqa + self.wait_random_min = to_seconds(min) + self.wait_random_max = to_seconds(max) def __call__(self, retry_state: "RetryCallState") -> float: return self.wait_random_min + (random.random() * (self.wait_random_max - self.wait_random_min)) @@ -113,13 +120,13 @@ class wait_incrementing(wait_base): def __init__( self, - start: typing.Union[int, float] = 0, - increment: typing.Union[int, float] = 100, - max: typing.Union[int, float] = _utils.MAX_WAIT, # noqa + start: wait_unit_type = 0, + increment: wait_unit_type = 100, + max: wait_unit_type = _utils.MAX_WAIT, # noqa ) -> None: - self.start = start - self.increment = increment - self.max = max + self.start = to_seconds(start) + self.increment = to_seconds(increment) + self.max = to_seconds(max) def __call__(self, retry_state: "RetryCallState") -> float: result = self.start + (self.increment * (retry_state.attempt_number - 1)) @@ -142,13 +149,13 @@ class wait_exponential(wait_base): def __init__( self, multiplier: typing.Union[int, float] = 1, - max: typing.Union[int, float] = _utils.MAX_WAIT, # noqa + max: wait_unit_type = _utils.MAX_WAIT, # noqa exp_base: typing.Union[int, float] = 2, - min: typing.Union[int, float] = 0, # noqa + min: wait_unit_type = 0, # noqa ) -> None: self.multiplier = multiplier - self.min = min - self.max = max + self.min = to_seconds(min) + self.max = to_seconds(max) self.exp_base = exp_base def __call__(self, retry_state: "RetryCallState") -> float: @@ -189,3 +196,37 @@ class wait_random_exponential(wait_exponential): def __call__(self, retry_state: "RetryCallState") -> float: high = super().__call__(retry_state=retry_state) return random.uniform(0, high) + + +class wait_exponential_jitter(wait_base): + """Wait strategy that applies exponential backoff and jitter. + + It allows for a customized initial wait, maximum wait and jitter. + + This implements the strategy described here: + https://cloud.google.com/storage/docs/retry-strategy + + The wait time is min(initial * (2**n + random.uniform(0, jitter)), maximum) + where n is the retry count. + """ + + def __init__( + self, + initial: float = 1, + max: float = _utils.MAX_WAIT, # noqa + exp_base: float = 2, + jitter: float = 1, + ) -> None: + self.initial = initial + self.max = max + self.exp_base = exp_base + self.jitter = jitter + + def __call__(self, retry_state: "RetryCallState") -> float: + jitter = random.uniform(0, self.jitter) + try: + exp = self.exp_base ** (retry_state.attempt_number - 1) + result = self.initial * exp + jitter + except OverflowError: + result = self.max + return max(0, min(result, self.max)) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index a1a80610f8a..17fa0cd0f1b 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -18,6 +18,6 @@ rich==12.5.1 resolvelib==0.8.1 setuptools==44.0.0 six==1.16.0 -tenacity==8.0.1 +tenacity==8.1.0 tomli==2.0.1 webencodings==0.5.1 From 2e7f88e71d41442e66e97cffdaabd49be3093405 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 10 Oct 2022 02:10:08 +0100 Subject: [PATCH 128/730] Upgrade urllib3 to 1.26.12 --- news/urllib3.vendor.rst | 1 + src/pip/_vendor/urllib3/__init__.py | 17 ++++++ src/pip/_vendor/urllib3/_version.py | 2 +- src/pip/_vendor/urllib3/contrib/pyopenssl.py | 9 ++++ src/pip/_vendor/urllib3/response.py | 54 +++++++++++++++++-- src/pip/_vendor/vendor.txt | 2 +- .../patches/urllib3-disable-brotli.patch | 6 +-- 7 files changed, 83 insertions(+), 8 deletions(-) create mode 100644 news/urllib3.vendor.rst diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst new file mode 100644 index 00000000000..5942d465180 --- /dev/null +++ b/news/urllib3.vendor.rst @@ -0,0 +1 @@ +Upgrade urllib3 to 1.26.12 diff --git a/src/pip/_vendor/urllib3/__init__.py b/src/pip/_vendor/urllib3/__init__.py index fe86b59d782..c6fa38212fb 100644 --- a/src/pip/_vendor/urllib3/__init__.py +++ b/src/pip/_vendor/urllib3/__init__.py @@ -19,6 +19,23 @@ from .util.timeout import Timeout from .util.url import get_host +# === NOTE TO REPACKAGERS AND VENDORS === +# Please delete this block, this logic is only +# for urllib3 being distributed via PyPI. +# See: https://github.com/urllib3/urllib3/issues/2680 +try: + import urllib3_secure_extra # type: ignore # noqa: F401 +except ImportError: + pass +else: + warnings.warn( + "'urllib3[secure]' extra is deprecated and will be removed " + "in a future release of urllib3 2.x. Read more in this issue: " + "https://github.com/urllib3/urllib3/issues/2680", + category=DeprecationWarning, + stacklevel=2, + ) + __author__ = "Andrey Petrov (andrey.petrov@shazow.net)" __license__ = "MIT" __version__ = __version__ diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py index c8ac29d0824..6fbc84b30f2 100644 --- a/src/pip/_vendor/urllib3/_version.py +++ b/src/pip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.10" +__version__ = "1.26.12" diff --git a/src/pip/_vendor/urllib3/contrib/pyopenssl.py b/src/pip/_vendor/urllib3/contrib/pyopenssl.py index 5f1d2d0b7af..528764a0334 100644 --- a/src/pip/_vendor/urllib3/contrib/pyopenssl.py +++ b/src/pip/_vendor/urllib3/contrib/pyopenssl.py @@ -73,11 +73,20 @@ class UnsupportedExtension(Exception): import logging import ssl import sys +import warnings from .. import util from ..packages import six from ..util.ssl_ import PROTOCOL_TLS_CLIENT +warnings.warn( + "'urllib3.contrib.pyopenssl' module is deprecated and will be removed " + "in a future release of urllib3 2.x. Read more in this issue: " + "https://github.com/urllib3/urllib3/issues/2680", + category=DeprecationWarning, + stacklevel=2, +) + __all__ = ["inject_into_urllib3", "extract_from_urllib3"] # SNI always works. diff --git a/src/pip/_vendor/urllib3/response.py b/src/pip/_vendor/urllib3/response.py index 776e49dd2b2..4969b70e3ef 100644 --- a/src/pip/_vendor/urllib3/response.py +++ b/src/pip/_vendor/urllib3/response.py @@ -2,6 +2,7 @@ import io import logging +import sys import zlib from contextlib import contextmanager from socket import error as SocketError @@ -9,6 +10,7 @@ brotli = None +from . import util from ._collections import HTTPHeaderDict from .connection import BaseSSLError, HTTPException from .exceptions import ( @@ -475,6 +477,54 @@ def _error_catcher(self): if self._original_response and self._original_response.isclosed(): self.release_conn() + def _fp_read(self, amt): + """ + Read a response with the thought that reading the number of bytes + larger than can fit in a 32-bit int at a time via SSL in some + known cases leads to an overflow error that has to be prevented + if `amt` or `self.length_remaining` indicate that a problem may + happen. + + The known cases: + * 3.8 <= CPython < 3.9.7 because of a bug + https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900. + * urllib3 injected with pyOpenSSL-backed SSL-support. + * CPython < 3.10 only when `amt` does not fit 32-bit int. + """ + assert self._fp + c_int_max = 2 ** 31 - 1 + if ( + ( + (amt and amt > c_int_max) + or (self.length_remaining and self.length_remaining > c_int_max) + ) + and not util.IS_SECURETRANSPORT + and (util.IS_PYOPENSSL or sys.version_info < (3, 10)) + ): + buffer = io.BytesIO() + # Besides `max_chunk_amt` being a maximum chunk size, it + # affects memory overhead of reading a response by this + # method in CPython. + # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum + # chunk size that does not lead to an overflow error, but + # 256 MiB is a compromise. + max_chunk_amt = 2 ** 28 + while amt is None or amt != 0: + if amt is not None: + chunk_amt = min(amt, max_chunk_amt) + amt -= chunk_amt + else: + chunk_amt = max_chunk_amt + data = self._fp.read(chunk_amt) + if not data: + break + buffer.write(data) + del data # to reduce peak memory usage by `max_chunk_amt`. + return buffer.getvalue() + else: + # StringIO doesn't like amt=None + return self._fp.read(amt) if amt is not None else self._fp.read() + def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional @@ -507,13 +557,11 @@ def read(self, amt=None, decode_content=None, cache_content=False): fp_closed = getattr(self._fp, "closed", False) with self._error_catcher(): + data = self._fp_read(amt) if not fp_closed else b"" if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() if not fp_closed else b"" flush_decoder = True else: cache_content = False - data = self._fp.read(amt) if not fp_closed else b"" if ( amt != 0 and not data ): # Platform-specific: Buggy versions of Python. diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 17fa0cd0f1b..733e665d2a7 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -11,7 +11,7 @@ requests==2.28.1 certifi==2022.06.15 chardet==5.0.0 idna==3.4 - urllib3==1.26.10 + urllib3==1.26.12 rich==12.5.1 pygments==2.12.0 typing_extensions==4.3.0 diff --git a/tools/vendoring/patches/urllib3-disable-brotli.patch b/tools/vendoring/patches/urllib3-disable-brotli.patch index 1058ac479ef..7a8eaa306c5 100644 --- a/tools/vendoring/patches/urllib3-disable-brotli.patch +++ b/tools/vendoring/patches/urllib3-disable-brotli.patch @@ -1,8 +1,8 @@ diff --git a/src/pip/_vendor/urllib3/response.py b/src/pip/_vendor/urllib3/response.py -index fdb50ddb2..db259d6ce 100644 +index 01f08eee8..4969b70e3 100644 --- a/src/pip/_vendor/urllib3/response.py +++ b/src/pip/_vendor/urllib3/response.py -@@ -7,13 +7,7 @@ +@@ -8,13 +8,7 @@ from socket import error as SocketError from socket import timeout as SocketTimeout @@ -15,8 +15,8 @@ index fdb50ddb2..db259d6ce 100644 - brotli = None +brotli = None + from . import util from ._collections import HTTPHeaderDict - from .connection import BaseSSLError, HTTPException diff --git a/src/pip/_vendor/urllib3/util/request.py b/src/pip/_vendor/urllib3/util/request.py index b574b081e..330766ef4 100644 --- a/src/pip/_vendor/urllib3/util/request.py From 99eab68bf959e4c71c2688e4b1675ce9147ee785 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 10 Oct 2022 02:10:31 +0100 Subject: [PATCH 129/730] Upgrade pygments to 2.13.0 --- news/pygments.vendor.rst | 1 + src/pip/_vendor/pygments/__init__.py | 19 +++--- src/pip/_vendor/pygments/cmdline.py | 9 ++- src/pip/_vendor/pygments/filters/__init__.py | 7 +- .../_vendor/pygments/formatters/__init__.py | 16 +---- .../_vendor/pygments/formatters/_mapping.py | 67 +------------------ src/pip/_vendor/pygments/formatters/img.py | 12 ++-- src/pip/_vendor/pygments/lexers/__init__.py | 20 ++---- src/pip/_vendor/pygments/lexers/_mapping.py | 67 ++----------------- src/pip/_vendor/pygments/lexers/python.py | 27 ++++++-- src/pip/_vendor/pygments/plugin.py | 35 +++++++--- src/pip/_vendor/pygments/styles/__init__.py | 4 ++ src/pip/_vendor/pygments/token.py | 1 + src/pip/_vendor/vendor.txt | 2 +- 14 files changed, 100 insertions(+), 187 deletions(-) create mode 100644 news/pygments.vendor.rst diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst new file mode 100644 index 00000000000..0e5f7c580c6 --- /dev/null +++ b/news/pygments.vendor.rst @@ -0,0 +1 @@ +Upgrade pygments to 2.13.0 diff --git a/src/pip/_vendor/pygments/__init__.py b/src/pip/_vendor/pygments/__init__.py index 52ff035dd46..7185e537694 100644 --- a/src/pip/_vendor/pygments/__init__.py +++ b/src/pip/_vendor/pygments/__init__.py @@ -26,7 +26,7 @@ """ from io import StringIO, BytesIO -__version__ = '2.12.0' +__version__ = '2.13.0' __docformat__ = 'restructuredtext' __all__ = ['lex', 'format', 'highlight'] @@ -38,10 +38,10 @@ def lex(code, lexer): """ try: return lexer.get_tokens(code) - except TypeError as err: - if (isinstance(err.args[0], str) and - ('unbound method get_tokens' in err.args[0] or - 'missing 1 required positional argument' in err.args[0])): + except TypeError: + # Heuristic to catch a common mistake. + from pip._vendor.pygments.lexer import RegexLexer + if isinstance(lexer, type) and issubclass(lexer, RegexLexer): raise TypeError('lex() argument must be a lexer instance, ' 'not a class') raise @@ -62,10 +62,10 @@ def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builti return realoutfile.getvalue() else: formatter.format(tokens, outfile) - except TypeError as err: - if (isinstance(err.args[0], str) and - ('unbound method format' in err.args[0] or - 'missing 1 required positional argument' in err.args[0])): + except TypeError: + # Heuristic to catch a common mistake. + from pip._vendor.pygments.formatter import Formatter + if isinstance(formatter, type) and issubclass(formatter, Formatter): raise TypeError('format() argument must be a formatter instance, ' 'not a class') raise @@ -80,4 +80,3 @@ def highlight(code, lexer, formatter, outfile=None): it is returned as a string. """ return format(lex(code, lexer), formatter, outfile) - diff --git a/src/pip/_vendor/pygments/cmdline.py b/src/pip/_vendor/pygments/cmdline.py index 349c626f196..de73b06b4cf 100644 --- a/src/pip/_vendor/pygments/cmdline.py +++ b/src/pip/_vendor/pygments/cmdline.py @@ -25,7 +25,7 @@ from pip._vendor.pygments.formatters import get_all_formatters, get_formatter_by_name, \ load_formatter_from_file, get_formatter_for_filename, find_formatter_class from pip._vendor.pygments.formatters.terminal import TerminalFormatter -from pip._vendor.pygments.formatters.terminal256 import Terminal256Formatter +from pip._vendor.pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter from pip._vendor.pygments.filters import get_all_filters, find_filter_class from pip._vendor.pygments.styles import get_all_styles, get_style_by_name @@ -445,7 +445,9 @@ def is_only_option(opt): return 1 else: if not fmter: - if '256' in os.environ.get('TERM', ''): + if os.environ.get('COLORTERM','') in ('truecolor', '24bit'): + fmter = TerminalTrueColorFormatter(**parsed_opts) + elif '256' in os.environ.get('TERM', ''): fmter = Terminal256Formatter(**parsed_opts) else: fmter = TerminalFormatter(**parsed_opts) @@ -636,6 +638,9 @@ def main(args=sys.argv): try: return main_inner(parser, argns) + except BrokenPipeError: + # someone closed our stdout, e.g. by quitting a pager. + return 0 except Exception: if argns.v: print(file=sys.stderr) diff --git a/src/pip/_vendor/pygments/filters/__init__.py b/src/pip/_vendor/pygments/filters/__init__.py index 5c99ce2714a..c302a6c0c53 100644 --- a/src/pip/_vendor/pygments/filters/__init__.py +++ b/src/pip/_vendor/pygments/filters/__init__.py @@ -69,13 +69,16 @@ class CodeTagFilter(Filter): `codetags` : list of strings A list of strings that are flagged as code tags. The default is to - highlight ``XXX``, ``TODO``, ``BUG`` and ``NOTE``. + highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``. + + .. versionchanged:: 2.13 + Now recognizes ``FIXME`` by default. """ def __init__(self, **options): Filter.__init__(self, **options) tags = get_list_opt(options, 'codetags', - ['XXX', 'TODO', 'BUG', 'NOTE']) + ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE']) self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([ re.escape(tag) for tag in tags if tag ])) diff --git a/src/pip/_vendor/pygments/formatters/__init__.py b/src/pip/_vendor/pygments/formatters/__init__.py index 7023aae4a05..43c4c89aacf 100644 --- a/src/pip/_vendor/pygments/formatters/__init__.py +++ b/src/pip/_vendor/pygments/formatters/__init__.py @@ -11,7 +11,7 @@ import re import sys import types -import fnmatch +from fnmatch import fnmatch from os.path import basename from pip._vendor.pygments.formatters._mapping import FORMATTERS @@ -22,16 +22,6 @@ 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) _formatter_cache = {} # classes by name -_pattern_cache = {} - - -def _fn_matches(fn, glob): - """Return whether the supplied file name fn matches pattern filename.""" - if glob not in _pattern_cache: - pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) - return pattern.match(fn) - return _pattern_cache[glob].match(fn) - def _load_formatters(module_name): """Load a formatter (and all others in the module too).""" @@ -122,13 +112,13 @@ def get_formatter_for_filename(fn, **options): fn = basename(fn) for modname, name, _, filenames, _ in FORMATTERS.values(): for filename in filenames: - if _fn_matches(fn, filename): + if fnmatch(fn, filename): if name not in _formatter_cache: _load_formatters(modname) return _formatter_cache[name](**options) for cls in find_plugin_formatters(): for filename in cls.filenames: - if _fn_matches(fn, filename): + if fnmatch(fn, filename): return cls(**options) raise ClassNotFound("no formatter found for file name %r" % fn) diff --git a/src/pip/_vendor/pygments/formatters/_mapping.py b/src/pip/_vendor/pygments/formatters/_mapping.py index db1a8d17abf..6e34f960784 100644 --- a/src/pip/_vendor/pygments/formatters/_mapping.py +++ b/src/pip/_vendor/pygments/formatters/_mapping.py @@ -1,16 +1,5 @@ -""" - pygments.formatters._mapping - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Formatter mapping definitions. This file is generated by itself. Every time - you change something on a builtin formatter definition, run this script from - the formatters folder to update it. - - Do not alter the FORMATTERS dictionary by hand. - - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `make mapfiles` instead. FORMATTERS = { 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), @@ -30,55 +19,5 @@ 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'), 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), - 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.') + 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'), } - -if __name__ == '__main__': # pragma: no cover - import sys - import os - - # lookup formatters - found_formatters = [] - imports = [] - sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) - from pip._vendor.pygments.util import docstring_headline - - for root, dirs, files in os.walk('.'): - for filename in files: - if filename.endswith('.py') and not filename.startswith('_'): - module_name = 'pygments.formatters%s.%s' % ( - root[1:].replace('/', '.'), filename[:-3]) - print(module_name) - module = __import__(module_name, None, None, ['']) - for formatter_name in module.__all__: - formatter = getattr(module, formatter_name) - found_formatters.append( - '%r: %r' % (formatter_name, - (module_name, - formatter.name, - tuple(formatter.aliases), - tuple(formatter.filenames), - docstring_headline(formatter)))) - # sort them to make the diff minimal - found_formatters.sort() - - # extract useful sourcecode from this file - with open(__file__) as fp: - content = fp.read() - # replace crnl to nl for Windows. - # - # Note that, originally, contributors should keep nl of master - # repository, for example by using some kind of automatic - # management EOL, like `EolExtension - # `. - content = content.replace("\r\n", "\n") - header = content[:content.find('FORMATTERS = {')] - footer = content[content.find("if __name__ == '__main__':"):] - - # write new file - with open(__file__, 'w') as fp: - fp.write(header) - fp.write('FORMATTERS = {\n %s\n}\n\n' % ',\n '.join(found_formatters)) - fp.write(footer) - - print ('=== %d formatters processed.' % len(found_formatters)) diff --git a/src/pip/_vendor/pygments/formatters/img.py b/src/pip/_vendor/pygments/formatters/img.py index 2cc0b2b5bd7..0f36a32ba33 100644 --- a/src/pip/_vendor/pygments/formatters/img.py +++ b/src/pip/_vendor/pygments/formatters/img.py @@ -206,13 +206,17 @@ def get_char_size(self): """ Get the character size. """ - return self.fonts['NORMAL'].getsize('M') + return self.get_text_size('M') def get_text_size(self, text): """ - Get the text size(width, height). + Get the text size (width, height). """ - return self.fonts['NORMAL'].getsize(text) + font = self.fonts['NORMAL'] + if hasattr(font, 'getbbox'): # Pillow >= 9.2.0 + return font.getbbox(text)[2:4] + else: + return font.getsize(text) def get_font(self, bold, oblique): """ @@ -520,7 +524,7 @@ def _create_drawables(self, tokensource): text_fg = self._get_text_color(style), text_bg = self._get_text_bg_color(style), ) - temp_width, temp_hight = self.fonts.get_text_size(temp) + temp_width, _ = self.fonts.get_text_size(temp) linelength += temp_width maxlinelength = max(maxlinelength, linelength) charno += len(temp) diff --git a/src/pip/_vendor/pygments/lexers/__init__.py b/src/pip/_vendor/pygments/lexers/__init__.py index 3f404e4f747..ed69f24ed35 100644 --- a/src/pip/_vendor/pygments/lexers/__init__.py +++ b/src/pip/_vendor/pygments/lexers/__init__.py @@ -11,7 +11,7 @@ import re import sys import types -import fnmatch +from fnmatch import fnmatch from os.path import basename from pip._vendor.pygments.lexers._mapping import LEXERS @@ -28,16 +28,6 @@ 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT) _lexer_cache = {} -_pattern_cache = {} - - -def _fn_matches(fn, glob): - """Return whether the supplied file name fn matches pattern filename.""" - if glob not in _pattern_cache: - pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) - return pattern.match(fn) - return _pattern_cache[glob].match(fn) - def _load_lexers(module_name): """Load a lexer (and all others in the module too).""" @@ -169,13 +159,13 @@ def find_lexer_class_for_filename(_fn, code=None): fn = basename(_fn) for modname, name, _, filenames, _ in LEXERS.values(): for filename in filenames: - if _fn_matches(fn, filename): + if fnmatch(fn, filename): if name not in _lexer_cache: _load_lexers(modname) matches.append((_lexer_cache[name], filename)) for cls in find_plugin_lexers(): for filename in cls.filenames: - if _fn_matches(fn, filename): + if fnmatch(fn, filename): matches.append((cls, filename)) if isinstance(code, bytes): @@ -262,11 +252,11 @@ def guess_lexer_for_filename(_fn, _text, **options): matching_lexers = set() for lexer in _iter_lexerclasses(): for filename in lexer.filenames: - if _fn_matches(fn, filename): + if fnmatch(fn, filename): matching_lexers.add(lexer) primary[lexer] = True for filename in lexer.alias_filenames: - if _fn_matches(fn, filename): + if fnmatch(fn, filename): matching_lexers.add(lexer) primary[lexer] = False if not matching_lexers: diff --git a/src/pip/_vendor/pygments/lexers/_mapping.py b/src/pip/_vendor/pygments/lexers/_mapping.py index 44dbfe67717..40dcaa3c778 100644 --- a/src/pip/_vendor/pygments/lexers/_mapping.py +++ b/src/pip/_vendor/pygments/lexers/_mapping.py @@ -1,16 +1,5 @@ -""" - pygments.lexers._mapping - ~~~~~~~~~~~~~~~~~~~~~~~~ - - Lexer mapping definitions. This file is generated by itself. Every time - you change something on a builtin lexer definition, run this script from - the lexers folder to update it. - - Do not alter the LEXERS dictionary by hand. - - :copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" +# Automatically generated by scripts/gen_mapfiles.py. +# DO NOT EDIT BY HAND; run `make mapfiles` instead. LEXERS = { 'ABAPLexer': ('pip._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), @@ -103,6 +92,7 @@ 'ColdfusionCFCLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), 'ColdfusionHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), 'ColdfusionLexer': ('pip._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), + 'Comal80Lexer': ('pip._vendor.pygments.lexers.comal', 'COMAL-80', ('comal', 'comal80'), ('*.cml', '*.comal'), ()), 'CommonLispLexer': ('pip._vendor.pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)), 'ComponentPascalLexer': ('pip._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)), 'CoqLexer': ('pip._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), @@ -229,6 +219,7 @@ 'IrcLogsLexer': ('pip._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), 'IsabelleLexer': ('pip._vendor.pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)), 'JLexer': ('pip._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), + 'JMESPathLexer': ('pip._vendor.pygments.lexers.jmespath', 'JMESPath', ('jmespath', 'jp'), ('*.jp',), ()), 'JSLTLexer': ('pip._vendor.pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)), 'JagsLexer': ('pip._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), 'JasminLexer': ('pip._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), @@ -462,6 +453,7 @@ 'SourcesListLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()), 'SparqlLexer': ('pip._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), 'SpiceLexer': ('pip._vendor.pygments.lexers.spice', 'Spice', ('spice', 'spicelang'), ('*.spice',), ('text/x-spice',)), + 'SqlJinjaLexer': ('pip._vendor.pygments.lexers.templates', 'SQL+Jinja', ('sql+jinja',), ('*.sql', '*.sql.j2', '*.sql.jinja2'), ()), 'SqlLexer': ('pip._vendor.pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), 'SqliteConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), 'SquidConfLexer': ('pip._vendor.pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), @@ -516,7 +508,7 @@ 'VGLLexer': ('pip._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), 'ValaLexer': ('pip._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), 'VbNetAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), - 'VbNetLexer': ('pip._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), + 'VbNetLexer': ('pip._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), 'VelocityHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), 'VelocityLexer': ('pip._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), 'VelocityXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), @@ -547,50 +539,3 @@ 'ZigLexer': ('pip._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), 'apdlexer': ('pip._vendor.pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()), } - -if __name__ == '__main__': # pragma: no cover - import sys - import os - - # lookup lexers - found_lexers = [] - sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) - for root, dirs, files in os.walk('.'): - for filename in files: - if filename.endswith('.py') and not filename.startswith('_'): - module_name = 'pygments.lexers%s.%s' % ( - root[1:].replace('/', '.'), filename[:-3]) - print(module_name) - module = __import__(module_name, None, None, ['']) - for lexer_name in module.__all__: - lexer = getattr(module, lexer_name) - found_lexers.append( - '%r: %r' % (lexer_name, - (module_name, - lexer.name, - tuple(lexer.aliases), - tuple(lexer.filenames), - tuple(lexer.mimetypes)))) - # sort them to make the diff minimal - found_lexers.sort() - - # extract useful sourcecode from this file - with open(__file__) as fp: - content = fp.read() - # replace crnl to nl for Windows. - # - # Note that, originally, contributors should keep nl of master - # repository, for example by using some kind of automatic - # management EOL, like `EolExtension - # `. - content = content.replace("\r\n", "\n") - header = content[:content.find('LEXERS = {')] - footer = content[content.find("if __name__ == '__main__':"):] - - # write new file - with open(__file__, 'w') as fp: - fp.write(header) - fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers)) - fp.write(footer) - - print ('=== %d lexers processed.' % len(found_lexers)) diff --git a/src/pip/_vendor/pygments/lexers/python.py b/src/pip/_vendor/pygments/lexers/python.py index 6bc7a78b6a4..c24e3c86ef2 100644 --- a/src/pip/_vendor/pygments/lexers/python.py +++ b/src/pip/_vendor/pygments/lexers/python.py @@ -142,7 +142,7 @@ def fstring_rules(ttype): combined('fstringescape', 'dqf')), ("([fF])(')", bygroups(String.Affix, String.Single), combined('fstringescape', 'sqf')), - # raw strings + # raw bytes and strings ('(?i)(rb|br|r)(""")', bygroups(String.Affix, String.Double), 'tdqs'), ("(?i)(rb|br|r)(''')", @@ -152,14 +152,24 @@ def fstring_rules(ttype): ("(?i)(rb|br|r)(')", bygroups(String.Affix, String.Single), 'sqs'), # non-raw strings - ('([uUbB]?)(""")', bygroups(String.Affix, String.Double), + ('([uU]?)(""")', bygroups(String.Affix, String.Double), combined('stringescape', 'tdqs')), - ("([uUbB]?)(''')", bygroups(String.Affix, String.Single), + ("([uU]?)(''')", bygroups(String.Affix, String.Single), combined('stringescape', 'tsqs')), - ('([uUbB]?)(")', bygroups(String.Affix, String.Double), + ('([uU]?)(")', bygroups(String.Affix, String.Double), combined('stringescape', 'dqs')), - ("([uUbB]?)(')", bygroups(String.Affix, String.Single), + ("([uU]?)(')", bygroups(String.Affix, String.Single), combined('stringescape', 'sqs')), + # non-raw bytes + ('([bB])(""")', bygroups(String.Affix, String.Double), + combined('bytesescape', 'tdqs')), + ("([bB])(''')", bygroups(String.Affix, String.Single), + combined('bytesescape', 'tsqs')), + ('([bB])(")', bygroups(String.Affix, String.Double), + combined('bytesescape', 'dqs')), + ("([bB])(')", bygroups(String.Affix, String.Single), + combined('bytesescape', 'sqs')), + (r'[^\S\n]+', Text), include('numbers'), (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator), @@ -343,9 +353,12 @@ def fstring_rules(ttype): include('rfstringescape'), include('stringescape'), ], + 'bytesescape': [ + (r'\\([\\abfnrtv"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) + ], 'stringescape': [ - (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|' - r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) + (r'\\(N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8})', String.Escape), + include('bytesescape') ], 'fstrings-single': fstring_rules(String.Single), 'fstrings-double': fstring_rules(String.Double), diff --git a/src/pip/_vendor/pygments/plugin.py b/src/pip/_vendor/pygments/plugin.py index a0431bf720e..3590bee8d29 100644 --- a/src/pip/_vendor/pygments/plugin.py +++ b/src/pip/_vendor/pygments/plugin.py @@ -2,9 +2,12 @@ pygments.plugin ~~~~~~~~~~~~~~~ - Pygments setuptools plugin interface. The methods defined - here also work if setuptools isn't installed but they just - return nothing. + Pygments plugin interface. By default, this tries to use + ``importlib.metadata``, which is in the Python standard + library since Python 3.8, or its ``importlib_metadata`` + backport for earlier versions of Python. It falls back on + ``pkg_resources`` if not found. Finally, if ``pkg_resources`` + is not found either, no plugins are loaded at all. lexer plugins:: @@ -34,6 +37,7 @@ :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ + LEXER_ENTRY_POINT = 'pygments.lexers' FORMATTER_ENTRY_POINT = 'pygments.formatters' STYLE_ENTRY_POINT = 'pygments.styles' @@ -42,11 +46,26 @@ def iter_entry_points(group_name): try: - from pip._vendor import pkg_resources - except (ImportError, OSError): - return [] - - return pkg_resources.iter_entry_points(group_name) + from importlib.metadata import entry_points + except ImportError: + try: + from importlib_metadata import entry_points + except ImportError: + try: + from pip._vendor.pkg_resources import iter_entry_points + except (ImportError, OSError): + return [] + else: + return iter_entry_points(group_name) + groups = entry_points() + if hasattr(groups, 'select'): + # New interface in Python 3.10 and newer versions of the + # importlib_metadata backport. + return groups.select(group=group_name) + else: + # Older interface, deprecated in Python 3.10 and recent + # importlib_metadata, but we need it in Python 3.8 and 3.9. + return groups.get(group_name, []) def find_plugin_lexers(): diff --git a/src/pip/_vendor/pygments/styles/__init__.py b/src/pip/_vendor/pygments/styles/__init__.py index 951ca1794db..44cc0efb086 100644 --- a/src/pip/_vendor/pygments/styles/__init__.py +++ b/src/pip/_vendor/pygments/styles/__init__.py @@ -48,6 +48,7 @@ 'solarized-dark': 'solarized::SolarizedDarkStyle', 'solarized-light': 'solarized::SolarizedLightStyle', 'sas': 'sas::SasStyle', + 'staroffice' : 'staroffice::StarofficeStyle', 'stata': 'stata_light::StataLightStyle', 'stata-light': 'stata_light::StataLightStyle', 'stata-dark': 'stata_dark::StataDarkStyle', @@ -58,6 +59,9 @@ 'dracula': 'dracula::DraculaStyle', 'one-dark': 'onedark::OneDarkStyle', 'lilypond' : 'lilypond::LilyPondStyle', + 'nord': 'nord::NordStyle', + 'nord-darker': 'nord::NordDarkerStyle', + 'github-dark': 'gh_dark::GhDarkStyle' } diff --git a/src/pip/_vendor/pygments/token.py b/src/pip/_vendor/pygments/token.py index 8aee88a8350..e3e565ad591 100644 --- a/src/pip/_vendor/pygments/token.py +++ b/src/pip/_vendor/pygments/token.py @@ -189,6 +189,7 @@ def string_to_tokentype(s): Operator.Word: 'ow', Punctuation: 'p', + Punctuation.Marker: 'pm', Comment: 'c', Comment.Hashbang: 'ch', diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 733e665d2a7..d192f643103 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -13,7 +13,7 @@ requests==2.28.1 idna==3.4 urllib3==1.26.12 rich==12.5.1 - pygments==2.12.0 + pygments==2.13.0 typing_extensions==4.3.0 resolvelib==0.8.1 setuptools==44.0.0 From 4ab07c7e19149c77e9dbe2a03ee5267879d0ee03 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 10 Oct 2022 02:10:46 +0100 Subject: [PATCH 130/730] Upgrade typing_extensions to 4.4.0 --- news/typing_extensions.vendor.rst | 1 + src/pip/_vendor/typing_extensions.LICENSE | 20 +-- src/pip/_vendor/typing_extensions.py | 156 ++++++++++++++++++++-- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 160 insertions(+), 19 deletions(-) create mode 100644 news/typing_extensions.vendor.rst diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst new file mode 100644 index 00000000000..60371890721 --- /dev/null +++ b/news/typing_extensions.vendor.rst @@ -0,0 +1 @@ +Upgrade typing_extensions to 4.4.0 diff --git a/src/pip/_vendor/typing_extensions.LICENSE b/src/pip/_vendor/typing_extensions.LICENSE index 583f9f6e617..1df6b3b8de0 100644 --- a/src/pip/_vendor/typing_extensions.LICENSE +++ b/src/pip/_vendor/typing_extensions.LICENSE @@ -13,12 +13,11 @@ software. In May 2000, Guido and the Python core development team moved to BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations (now Zope -Corporation, see http://www.zope.com). In 2001, the Python Software -Foundation (PSF, see http://www.python.org/psf/) was formed, a -non-profit organization created specifically to own Python-related -Intellectual Property. Zope Corporation is a sponsoring member of -the PSF. +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. All Python releases are Open Source (see http://www.opensource.org for the Open Source Definition). Historically, most, but not all, Python @@ -74,8 +73,9 @@ analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are -retained in Python alone or in any derivative version prepared by Licensee. +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make @@ -180,9 +180,9 @@ version prepared by Licensee. Alternately, in lieu of CNRI's License Agreement, Licensee may substitute the following text (omitting the quotes): "Python 1.6.1 is made available subject to the terms and conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following +Python 1.6.1 may be located on the internet using the following unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet +Agreement may also be obtained from a proxy server on the internet using the following URL: http://hdl.handle.net/1895.22/1013". 3. In the event Licensee prepares a derivative work that is based on diff --git a/src/pip/_vendor/typing_extensions.py b/src/pip/_vendor/typing_extensions.py index 4fd8247683e..34199c2a984 100644 --- a/src/pip/_vendor/typing_extensions.py +++ b/src/pip/_vendor/typing_extensions.py @@ -8,9 +8,9 @@ import typing -# Please keep __all__ alphabetized within each category. __all__ = [ # Super-special typing primitives. + 'Any', 'ClassVar', 'Concatenate', 'Final', @@ -20,6 +20,7 @@ 'ParamSpecKwargs', 'Self', 'Type', + 'TypeVar', 'TypeVarTuple', 'Unpack', @@ -60,6 +61,7 @@ 'Literal', 'NewType', 'overload', + 'override', 'Protocol', 'reveal_type', 'runtime', @@ -149,6 +151,37 @@ def _collect_type_vars(types, typevar_types=None): T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. + +if sys.version_info >= (3, 11): + from typing import Any +else: + + class _AnyMeta(type): + def __instancecheck__(self, obj): + if self is Any: + raise TypeError("typing_extensions.Any cannot be used with isinstance()") + return super().__instancecheck__(obj) + + def __repr__(self): + if self is Any: + return "typing_extensions.Any" + return super().__repr__() + + class Any(metaclass=_AnyMeta): + """Special type indicating an unconstrained type. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ + def __new__(cls, *args, **kwargs): + if cls is Any: + raise TypeError("Any cannot be instantiated") + return super().__new__(cls, *args, **kwargs) + + ClassVar = typing.ClassVar # On older versions of typing there is an internal class named "Final". @@ -431,7 +464,7 @@ def _no_init(self, *args, **kwargs): if type(self)._is_protocol: raise TypeError('Protocols cannot be instantiated') - class _ProtocolMeta(abc.ABCMeta): + class _ProtocolMeta(abc.ABCMeta): # noqa: B024 # This metaclass is a bit unfortunate and exists only because of the lack # of __instancehook__. def __instancecheck__(cls, instance): @@ -1115,6 +1148,44 @@ def __repr__(self): above.""") +class _DefaultMixin: + """Mixin for TypeVarLike defaults.""" + + __slots__ = () + + def __init__(self, default): + if isinstance(default, (tuple, list)): + self.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default: + self.__default__ = typing._type_check(default, "Default must be a type") + else: + self.__default__ = None + + +# Add default and infer_variance parameters from PEP 696 and 695 +class TypeVar(typing.TypeVar, _DefaultMixin, _root=True): + """Type variable.""" + + __module__ = 'typing' + + def __init__(self, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=None, infer_variance=False): + super().__init__(name, *constraints, bound=bound, covariant=covariant, + contravariant=contravariant) + _DefaultMixin.__init__(self, default) + self.__infer_variance__ = infer_variance + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + # Python 3.10+ has PEP 612 if hasattr(typing, 'ParamSpecArgs'): ParamSpecArgs = typing.ParamSpecArgs @@ -1179,12 +1250,32 @@ def __eq__(self, other): # 3.10+ if hasattr(typing, 'ParamSpec'): - ParamSpec = typing.ParamSpec + + # Add default Parameter - PEP 696 + class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True): + """Parameter specification variable.""" + + __module__ = 'typing' + + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + default=None): + super().__init__(name, bound=bound, covariant=covariant, + contravariant=contravariant) + _DefaultMixin.__init__(self, default) + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # 3.7-3.9 else: # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class ParamSpec(list): + class ParamSpec(list, _DefaultMixin): """Parameter specification variable. Usage:: @@ -1242,7 +1333,8 @@ def args(self): def kwargs(self): return ParamSpecKwargs(self) - def __init__(self, name, *, bound=None, covariant=False, contravariant=False): + def __init__(self, name, *, bound=None, covariant=False, contravariant=False, + default=None): super().__init__([self]) self.__name__ = name self.__covariant__ = bool(covariant) @@ -1251,6 +1343,7 @@ def __init__(self, name, *, bound=None, covariant=False, contravariant=False): self.__bound__ = typing._type_check(bound, 'Bound must be a type.') else: self.__bound__ = None + _DefaultMixin.__init__(self, default) # for pickling: try: @@ -1752,9 +1845,25 @@ def _is_unpack(obj): if hasattr(typing, "TypeVarTuple"): # 3.11+ - TypeVarTuple = typing.TypeVarTuple + + # Add default Parameter - PEP 696 + class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True): + """Type variable tuple.""" + + def __init__(self, name, *, default=None): + super().__init__(name) + _DefaultMixin.__init__(self, default) + + # for pickling: + try: + def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): + def_mod = None + if def_mod != 'typing_extensions': + self.__module__ = def_mod + else: - class TypeVarTuple: + class TypeVarTuple(_DefaultMixin): """Type variable tuple. Usage:: @@ -1804,8 +1913,9 @@ def get_shape(self) -> Tuple[*Ts]: def __iter__(self): yield self.__unpacked__ - def __init__(self, name): + def __init__(self, name, *, default=None): self.__name__ = name + _DefaultMixin.__init__(self, default) # for pickling: try: @@ -1968,6 +2078,36 @@ def decorator(cls_or_fn): return decorator +if hasattr(typing, "override"): + override = typing.override +else: + _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) + + def override(__arg: _F) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: ... + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + See PEP 698 for details. + + """ + return __arg + + # We have to do some monkey patching to deal with the dual nature of # Unpack/TypeVarTuple: # - We want Unpack to be a kind of TypeVar so it gets accepted in diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index d192f643103..b36575c988b 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -14,7 +14,7 @@ requests==2.28.1 urllib3==1.26.12 rich==12.5.1 pygments==2.13.0 - typing_extensions==4.3.0 + typing_extensions==4.4.0 resolvelib==0.8.1 setuptools==44.0.0 six==1.16.0 From 2a0552ac61ee26df04e08e21943a1e36aa880db1 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 10 Oct 2022 02:03:20 +0100 Subject: [PATCH 131/730] Replace complex certifi patch with a more targetted requests patch This should have the same final functionality, with a cleaner patch to requests instead of an exception-based complex patch to certifi. --- src/pip/_vendor/certifi/core.py | 18 +----------- src/pip/_vendor/requests/certs.py | 9 +++++- tools/vendoring/patches/certifi.patch | 40 ++++++-------------------- tools/vendoring/patches/requests.patch | 20 +++++++++++++ 4 files changed, 38 insertions(+), 49 deletions(-) diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py index f34045b5270..60ad982c6c1 100644 --- a/src/pip/_vendor/certifi/core.py +++ b/src/pip/_vendor/certifi/core.py @@ -8,21 +8,7 @@ import types from typing import Union - -class _PipPatchedCertificate(Exception): - pass - - try: - # Return a certificate file on disk for a standalone pip zipapp running in - # an isolated build environment to use. Passing --cert to the standalone - # pip does not work since requests calls where() unconditionally on import. - _PIP_STANDALONE_CERT = os.environ.get("_PIP_STANDALONE_CERT") - if _PIP_STANDALONE_CERT: - def where(): - return _PIP_STANDALONE_CERT - raise _PipPatchedCertificate() - from importlib.resources import path as get_path, read_text _CACERT_CTX = None @@ -52,8 +38,6 @@ def where() -> str: return _CACERT_PATH -except _PipPatchedCertificate: - pass except ImportError: Package = Union[types.ModuleType, str] @@ -81,4 +65,4 @@ def where() -> str: def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") + return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") diff --git a/src/pip/_vendor/requests/certs.py b/src/pip/_vendor/requests/certs.py index 2743144b994..38696a1fb34 100644 --- a/src/pip/_vendor/requests/certs.py +++ b/src/pip/_vendor/requests/certs.py @@ -11,7 +11,14 @@ environment, you can change the definition of where() to return a separately packaged CA bundle. """ -from pip._vendor.certifi import where + +import os + +if "_PIP_STANDALONE_CERT" not in os.environ: + from pip._vendor.certifi import where +else: + def where(): + return os.environ["_PIP_STANDALONE_CERT"] if __name__ == "__main__": print(where()) diff --git a/tools/vendoring/patches/certifi.patch b/tools/vendoring/patches/certifi.patch index 31554505c05..cc241a27684 100644 --- a/tools/vendoring/patches/certifi.patch +++ b/tools/vendoring/patches/certifi.patch @@ -1,41 +1,19 @@ diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py -index 497d938..f34045b 100644 +index 497d938d0..60ad982c6 100644 --- a/src/pip/_vendor/certifi/core.py +++ b/src/pip/_vendor/certifi/core.py -@@ -8,7 +8,21 @@ import os - import types - from typing import Union - -+ -+class _PipPatchedCertificate(Exception): -+ pass -+ -+ - try: -+ # Return a certificate file on disk for a standalone pip zipapp running in -+ # an isolated build environment to use. Passing --cert to the standalone -+ # pip does not work since requests calls where() unconditionally on import. -+ _PIP_STANDALONE_CERT = os.environ.get("_PIP_STANDALONE_CERT") -+ if _PIP_STANDALONE_CERT: -+ def where(): -+ return _PIP_STANDALONE_CERT -+ raise _PipPatchedCertificate() -+ - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None -@@ -33,11 +47,13 @@ try: +@@ -33,7 +33,7 @@ def where() -> str: # We also have to hold onto the actual context manager, because # it will do the cleanup whenever it gets garbage collected, so # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") _CACERT_PATH = str(_CACERT_CTX.__enter__()) - + return _CACERT_PATH - -+except _PipPatchedCertificate: -+ pass - - except ImportError: - Package = Union[types.ModuleType, str] +@@ -65,4 +65,4 @@ def where() -> str: + + + def contents() -> str: +- return read_text("certifi", "cacert.pem", encoding="ascii") ++ return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") diff --git a/tools/vendoring/patches/requests.patch b/tools/vendoring/patches/requests.patch index f9c722dfe7e..596b729c0b9 100644 --- a/tools/vendoring/patches/requests.patch +++ b/tools/vendoring/patches/requests.patch @@ -123,3 +123,23 @@ index 8fbcd656..c5e9c19e 100644 try: import chardet +diff --git a/src/pip/_vendor/requests/certs.py b/src/pip/_vendor/requests/certs.py +index 2743144b9..38696a1fb 100644 +--- a/src/pip/_vendor/requests/certs.py ++++ b/src/pip/_vendor/requests/certs.py +@@ -11,7 +11,14 @@ + environment, you can change the definition of where() to return a separately + packaged CA bundle. + """ +-from certifi import where ++ ++import os ++ ++if "_PIP_STANDALONE_CERT" not in os.environ: ++ from certifi import where ++else: ++ def where(): ++ return os.environ["_PIP_STANDALONE_CERT"] + + if __name__ == "__main__": + print(where()) From 1b7e5ef34f926f33fa7932239229220dd65eb7a6 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 10 Oct 2022 02:08:04 +0100 Subject: [PATCH 132/730] Upgrade certifi to 2022.9.24 --- news/certifi.vendor.rst | 1 + src/pip/_vendor/certifi/LICENSE | 2 +- src/pip/_vendor/certifi/__init__.py | 2 +- src/pip/_vendor/certifi/cacert.pem | 101 ++++++++++++++++---------- src/pip/_vendor/certifi/core.py | 58 ++++++++++++--- src/pip/_vendor/vendor.txt | 2 +- tools/vendoring/patches/certifi.patch | 41 ++++++++--- 7 files changed, 147 insertions(+), 60 deletions(-) create mode 100644 news/certifi.vendor.rst diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst new file mode 100644 index 00000000000..7477fcc2a24 --- /dev/null +++ b/news/certifi.vendor.rst @@ -0,0 +1 @@ +Upgrade certifi to 2022.9.24 diff --git a/src/pip/_vendor/certifi/LICENSE b/src/pip/_vendor/certifi/LICENSE index c2fda9a2642..0a64774eabe 100644 --- a/src/pip/_vendor/certifi/LICENSE +++ b/src/pip/_vendor/certifi/LICENSE @@ -6,7 +6,7 @@ Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# This is a bundle of X.509 certificates of public Certificate Authorities (CA). These were automatically extracted from Mozilla's root certificates file (certdata.txt). This file can be found in the mozilla source tree: -http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt It contains the certificates in PEM format and therefore can be directly used with curl / libcurl / php_curl, or with an Apache+mod_ssl webserver for SSL client authentication. diff --git a/src/pip/_vendor/certifi/__init__.py b/src/pip/_vendor/certifi/__init__.py index bdeb06beedf..af4bcc1510f 100644 --- a/src/pip/_vendor/certifi/__init__.py +++ b/src/pip/_vendor/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2022.06.15" +__version__ = "2022.09.24" diff --git a/src/pip/_vendor/certifi/cacert.pem b/src/pip/_vendor/certifi/cacert.pem index ee9be4cb3bb..40051551137 100644 --- a/src/pip/_vendor/certifi/cacert.pem +++ b/src/pip/_vendor/certifi/cacert.pem @@ -1323,45 +1323,6 @@ t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 -----END CERTIFICATE----- -# Issuer: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes -# Subject: CN=EC-ACC O=Agencia Catalana de Certificacio (NIF Q-0801176-I) OU=Serveis Publics de Certificacio/Vegeu https://www.catcert.net/verarrel (c)03/Jerarquia Entitats de Certificacio Catalanes -# Label: "EC-ACC" -# Serial: -23701579247955709139626555126524820479 -# MD5 Fingerprint: eb:f5:9d:29:0d:61:f9:42:1f:7c:c2:ba:6d:e3:15:09 -# SHA1 Fingerprint: 28:90:3a:63:5b:52:80:fa:e6:77:4c:0b:6d:a7:d6:ba:a6:4a:f2:e8 -# SHA256 Fingerprint: 88:49:7f:01:60:2f:31:54:24:6a:e2:8c:4d:5a:ef:10:f1:d8:7e:bb:76:62:6f:4a:e0:b7:f9:5b:a7:96:87:99 ------BEGIN CERTIFICATE----- -MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB -8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy -dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1 -YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3 -dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh -IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD -LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG -EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g -KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD -ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu -bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg -ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R -85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm -4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV -HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd -QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t -lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB -o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4 -opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo -dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW -ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN -AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y -/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k -SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy -Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS -Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl -nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI= ------END CERTIFICATE----- - # Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 # Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 # Label: "Actalis Authentication Root CA" @@ -4683,3 +4644,65 @@ ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx vmjkI6TZraE3 -----END CERTIFICATE----- + +# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication RootCA3" +# Serial: 16247922307909811815 +# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 +# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a +# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 +-----BEGIN CERTIFICATE----- +MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV +BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw +JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 +MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc +U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg +Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC +CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r +CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA +lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG +TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 +9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 +8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 +g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we +GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst ++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M +0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ +T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw +HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP +BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS +YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA +FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd +9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI +UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ +OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke +gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf +iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV +nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD +2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// +1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad +TdJ0MN1kURXbg4NR16/9M51NZg== +-----END CERTIFICATE----- + +# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. +# Label: "Security Communication ECC RootCA1" +# Serial: 15446673492073852651 +# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 +# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 +# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 +-----BEGIN CERTIFICATE----- +MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT +AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD +VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx +NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT +HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 +IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi +AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl +dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK +ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E +BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu +9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O +be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= +-----END CERTIFICATE----- diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py index 60ad982c6c1..c3e546604c8 100644 --- a/src/pip/_vendor/certifi/core.py +++ b/src/pip/_vendor/certifi/core.py @@ -4,12 +4,12 @@ This module returns the installation location of cacert.pem or its contents. """ -import os -import types -from typing import Union +import sys -try: - from importlib.resources import path as get_path, read_text + +if sys.version_info >= (3, 11): + + from importlib.resources import as_file, files _CACERT_CTX = None _CACERT_PATH = None @@ -33,13 +33,54 @@ def where() -> str: # We also have to hold onto the actual context manager, because # it will do the cleanup whenever it gets garbage collected, so # we will also store that at the global level as well. + _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: + return files("pip._vendor.certifi").joinpath("cacert.pem").read_text(encoding="ascii") + +elif sys.version_info >= (3, 7): + + from importlib.resources import path as get_path, read_text + + _CACERT_CTX = None + _CACERT_PATH = None + + def where() -> str: + # This is slightly terrible, but we want to delay extracting the + # file in cases where we're inside of a zipimport situation until + # someone actually calls where(), but we don't want to re-extract + # the file on every call of where(), so we'll do it once then store + # it in a global variable. + global _CACERT_CTX + global _CACERT_PATH + if _CACERT_PATH is None: + # This is slightly janky, the importlib.resources API wants you + # to manage the cleanup of this file, so it doesn't actually + # return a path, it returns a context manager that will give + # you the path when you enter it and will do any cleanup when + # you leave it. In the common case of not needing a temporary + # file, it will just return the file system location and the + # __exit__() is a no-op. + # + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") _CACERT_PATH = str(_CACERT_CTX.__enter__()) return _CACERT_PATH + def contents() -> str: + return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") + +else: + import os + import types + from typing import Union -except ImportError: Package = Union[types.ModuleType, str] Resource = Union[str, "os.PathLike"] @@ -63,6 +104,5 @@ def where() -> str: return os.path.join(f, "cacert.pem") - -def contents() -> str: - return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") + def contents() -> str: + return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 17fa0cd0f1b..d14f1661cb4 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -8,7 +8,7 @@ pep517==0.13.0 platformdirs==2.5.2 pyparsing==3.0.9 requests==2.28.1 - certifi==2022.06.15 + certifi==2022.9.24 chardet==5.0.0 idna==3.4 urllib3==1.26.10 diff --git a/tools/vendoring/patches/certifi.patch b/tools/vendoring/patches/certifi.patch index cc241a27684..4f03c62fbde 100644 --- a/tools/vendoring/patches/certifi.patch +++ b/tools/vendoring/patches/certifi.patch @@ -1,19 +1,42 @@ diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py -index 497d938d0..60ad982c6 100644 +index de028981b..c3e546604 100644 --- a/src/pip/_vendor/certifi/core.py +++ b/src/pip/_vendor/certifi/core.py -@@ -33,7 +33,7 @@ def where() -> str: +@@ -33,13 +33,13 @@ def where() -> str: + # We also have to hold onto the actual context manager, because + # it will do the cleanup whenever it gets garbage collected, so + # we will also store that at the global level as well. +- _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) ++ _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem")) + _CACERT_PATH = str(_CACERT_CTX.__enter__()) + + return _CACERT_PATH + + def contents() -> str: +- return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") ++ return files("pip._vendor.certifi").joinpath("cacert.pem").read_text(encoding="ascii") + + elif sys.version_info >= (3, 7): + +@@ -68,13 +68,13 @@ def where() -> str: # We also have to hold onto the actual context manager, because # it will do the cleanup whenever it gets garbage collected, so # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") + _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem") _CACERT_PATH = str(_CACERT_CTX.__enter__()) - + return _CACERT_PATH -@@ -65,4 +65,4 @@ def where() -> str: - - - def contents() -> str: -- return read_text("certifi", "cacert.pem", encoding="ascii") -+ return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") + + def contents() -> str: +- return read_text("certifi", "cacert.pem", encoding="ascii") ++ return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") + + else: + import os +@@ -105,4 +105,4 @@ def where() -> str: + return os.path.join(f, "cacert.pem") + + def contents() -> str: +- return read_text("certifi", "cacert.pem", encoding="ascii") ++ return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii") From 4e48bbc31cf34b1b4ccd100a787d1204ddb8866b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 14 Oct 2022 11:14:23 +0100 Subject: [PATCH 133/730] Move check-manifest to a CI check This makes local runs of `nox -s lint` quicker while still providing the relevant protections for checking the MANIFEST file. --- .github/workflows/ci.yml | 1 + .pre-commit-config.yaml | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d3c64ee40a3..362f712b1ef 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,6 +70,7 @@ jobs: - run: pip install nox - run: nox -s prepare-release -- 99.9 - run: nox -s build-release -- 99.9 + - run: pipx run check-manifest vendoring: name: vendoring diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 366d859dba5..098f3bfe7b0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -73,12 +73,6 @@ repos: exclude: ^news/(.gitignore|.*\.(process|removal|feature|bugfix|vendor|doc|trivial).rst) files: ^news/ -- repo: https://github.com/mgedmin/check-manifest - rev: '0.48' - hooks: - - id: check-manifest - args: [--no-build-isolation] - ci: autofix_prs: false autoupdate_commit_msg: 'pre-commit autoupdate' From 65c23fa99d19af8ebd375e7129213794dce4b4b2 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 15 Oct 2022 10:25:56 +0100 Subject: [PATCH 134/730] Unnormalise the certifi version --- src/pip/_vendor/vendor.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index d14f1661cb4..3abc6cb1190 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -8,7 +8,7 @@ pep517==0.13.0 platformdirs==2.5.2 pyparsing==3.0.9 requests==2.28.1 - certifi==2022.9.24 + certifi==2022.09.24 chardet==5.0.0 idna==3.4 urllib3==1.26.10 From 25638287f8b8bd571a10c4f5ae1b7f4eae454dcc Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Oct 2022 11:59:13 +0100 Subject: [PATCH 135/730] Update AUTHORS.txt --- AUTHORS.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/AUTHORS.txt b/AUTHORS.txt index 3db1a3c7362..007454f8b20 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -137,6 +137,7 @@ Christopher Hunt Christopher Snyder cjc7373 Clark Boylan +Claudio Jolowicz Clay McClure Cody Cody Soyland @@ -234,6 +235,8 @@ Erik Rose Erwin Janssen Eugene Vereshchagin everdimension +Federico +Felipe Peter Felix Yan fiber-space Filip Kokosiński @@ -275,6 +278,7 @@ Hari Charan Harsh Vardhan harupy Harutaka Kawamura +hauntsaninja Henrich Hartzer Henry Schreiner Herbert Pfennig @@ -299,6 +303,7 @@ Ionel Maries Cristian Ivan Pozdeev Jacob Kim Jacob Walls +Jaime Sanz jakirkham Jakub Stasiak Jakub Vysoky @@ -393,6 +398,7 @@ Luo Jiebin luojiebin luz.paz László Kiss Kollár +M00nL1ght Marc Abramowitz Marc Tamlyn Marcus Smith @@ -545,6 +551,7 @@ Reece Dunham Remi Rampin Rene Dudfield Riccardo Magliocchetti +Riccardo Schirone Richard Jones Richard Si Ricky Ng-Adam From c8ae2800186a0b499d5717a64b267460a6d51f6a Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Oct 2022 11:59:14 +0100 Subject: [PATCH 136/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 168abe86186..adc0eb10d00 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "22.3" +__version__ = "23.0.dev0" def main(args: Optional[List[str]] = None) -> int: From 0a76da3a94130fad58b086e331c3d3e1b02a89eb Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Oct 2022 11:59:14 +0100 Subject: [PATCH 137/730] Bump for release --- NEWS.rst | 56 +++++++++++++++++++ news/10716.feature.rst | 1 - news/11111.feature.rst | 1 - news/11250.feature.rst | 1 - news/11254.trivial.rst | 0 news/11276.bugfix.rst | 2 - news/11309.bugfix.rst | 1 - news/11320.feature.rst | 2 - news/11352.bugfix.rst | 2 - news/11357.doc.rst | 1 - news/11358.removal.rst | 2 - news/11418.trivial.rst | 1 - news/11452.removal.rst | 2 - news/11454.removal.rst | 1 - news/11459.feature.rst | 1 - news/11491.bugfix.rst | 1 - news/11493.removal.rst | 1 - news/5444.feature.rst | 1 - ...4E-E089-4CDB-857A-868BA1F7435D.trivial.rst | 0 ...86-1229-45ef-8a8a-dee90602ccdd.trivial.rst | 0 news/6264.bugfix.rst | 1 - news/8559.removal.rst | 2 - news/9789.removal.rst | 1 - ...88-9311-4DAB-BD89-6FBC3F296275.trivial.rst | 0 ...3f-7001-4ff0-a100-c36496ff8758.trivial.rst | 0 news/certifi.vendor.rst | 1 - news/distlib.vendor.rst | 1 - news/idna.vendor.rst | 1 - news/pep517.vendor.rst | 1 - news/pygments.vendor.rst | 1 - news/tenacity.vendor.rst | 1 - news/typing_extensions.vendor.rst | 1 - news/urllib3.vendor.rst | 1 - src/pip/__init__.py | 2 +- 34 files changed, 57 insertions(+), 34 deletions(-) delete mode 100644 news/10716.feature.rst delete mode 100644 news/11111.feature.rst delete mode 100644 news/11250.feature.rst delete mode 100644 news/11254.trivial.rst delete mode 100644 news/11276.bugfix.rst delete mode 100644 news/11309.bugfix.rst delete mode 100644 news/11320.feature.rst delete mode 100644 news/11352.bugfix.rst delete mode 100644 news/11357.doc.rst delete mode 100644 news/11358.removal.rst delete mode 100644 news/11418.trivial.rst delete mode 100644 news/11452.removal.rst delete mode 100644 news/11454.removal.rst delete mode 100644 news/11459.feature.rst delete mode 100644 news/11491.bugfix.rst delete mode 100644 news/11493.removal.rst delete mode 100644 news/5444.feature.rst delete mode 100644 news/5580954E-E089-4CDB-857A-868BA1F7435D.trivial.rst delete mode 100644 news/5ba58886-1229-45ef-8a8a-dee90602ccdd.trivial.rst delete mode 100644 news/6264.bugfix.rst delete mode 100644 news/8559.removal.rst delete mode 100644 news/9789.removal.rst delete mode 100644 news/A12E8588-9311-4DAB-BD89-6FBC3F296275.trivial.rst delete mode 100644 news/c57bc03f-7001-4ff0-a100-c36496ff8758.trivial.rst delete mode 100644 news/certifi.vendor.rst delete mode 100644 news/distlib.vendor.rst delete mode 100644 news/idna.vendor.rst delete mode 100644 news/pep517.vendor.rst delete mode 100644 news/pygments.vendor.rst delete mode 100644 news/tenacity.vendor.rst delete mode 100644 news/typing_extensions.vendor.rst delete mode 100644 news/urllib3.vendor.rst diff --git a/NEWS.rst b/NEWS.rst index 6485bc159b7..ec16a384eb2 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,62 @@ .. towncrier release notes start +22.3 (2022-10-15) +================= + +Deprecations and Removals +------------------------- + +- Deprecate ``--install-options`` which forces pip to use the deprecated ``install`` + command of ``setuptools``. (`#11358 `_) +- Deprecate installation with 'setup.py install' when no-binary is enabled for + source distributions without 'pyproject.toml'. (`#11452 `_) +- Deprecate ```--no-binary`` disabling the wheel cache. (`#11454 `_) +- Remove ``--use-feature=2020-resolver`` opt-in flag. This was supposed to be removed in 21.0, but missed during that release cycle. (`#11493 `_) +- Deprecate installation with 'setup.py install' when the 'wheel' package is absent for + source distributions without 'pyproject.toml'. (`#8559 `_) +- Remove the ability to use ``pip list --outdated`` in combination with ``--format=freeze``. (`#9789 `_) + +Features +-------- + +- Use ``shell=True`` for opening the editor with ``pip config edit``. (`#10716 `_) +- Use the ``data-dist-info-metadata`` attribute from :pep:`658` to resolve distribution metadata without downloading the dist yet. (`#11111 `_) +- Add an option to run the test suite with pip built as a zipapp. (`#11250 `_) +- Add a ``--python`` option to allow pip to manage Python environments other + than the one pip is installed in. (`#11320 `_) +- Document the new (experimental) zipapp distribution of pip. (`#11459 `_) +- Use the much faster 'bzr co --lightweight' to obtain a copy of a Bazaar tree. (`#5444 `_) + +Bug Fixes +--------- + +- Fix ``--no-index`` when ``--index-url`` or ``--extra-index-url`` is specified + inside a requirements file. (`#11276 `_) +- Ensure that the candidate ``pip`` executable exists, when checking for a new version of pip. (`#11309 `_) +- Ignore distributions with invalid ``Name`` in metadata instead of crashing, when + using the ``importlib.metadata`` backend. (`#11352 `_) +- Raise RequirementsFileParseError when parsing malformed requirements options that can't be sucessfully parsed by shlex. (`#11491 `_) +- Fix build environment isolation on some system Pythons. (`#6264 `_) + +Vendored Libraries +------------------ + +- Upgrade certifi to 2022.9.24 +- Upgrade distlib to 0.3.6 +- Upgrade idna to 3.4 +- Upgrade pep517 to 0.13.0 +- Upgrade pygments to 2.13.0 +- Upgrade tenacity to 8.1.0 +- Upgrade typing_extensions to 4.4.0 +- Upgrade urllib3 to 1.26.12 + +Improved Documentation +---------------------- + +- Mention that --quiet must be used when writing the installation report to stdout. (`#11357 `_) + + 22.2.2 (2022-08-03) =================== diff --git a/news/10716.feature.rst b/news/10716.feature.rst deleted file mode 100644 index ef09e1b8f58..00000000000 --- a/news/10716.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Use ``shell=True`` for opening the editor with ``pip config edit``. diff --git a/news/11111.feature.rst b/news/11111.feature.rst deleted file mode 100644 index 39cb4b35c12..00000000000 --- a/news/11111.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Use the ``data-dist-info-metadata`` attribute from :pep:`658` to resolve distribution metadata without downloading the dist yet. diff --git a/news/11250.feature.rst b/news/11250.feature.rst deleted file mode 100644 index a80c54699c8..00000000000 --- a/news/11250.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add an option to run the test suite with pip built as a zipapp. diff --git a/news/11254.trivial.rst b/news/11254.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/11276.bugfix.rst b/news/11276.bugfix.rst deleted file mode 100644 index af8f518bef4..00000000000 --- a/news/11276.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``--no-index`` when ``--index-url`` or ``--extra-index-url`` is specified -inside a requirements file. diff --git a/news/11309.bugfix.rst b/news/11309.bugfix.rst deleted file mode 100644 index 9ee54057da4..00000000000 --- a/news/11309.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Ensure that the candidate ``pip`` executable exists, when checking for a new version of pip. diff --git a/news/11320.feature.rst b/news/11320.feature.rst deleted file mode 100644 index 843eac7c9f4..00000000000 --- a/news/11320.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add a ``--python`` option to allow pip to manage Python environments other -than the one pip is installed in. diff --git a/news/11352.bugfix.rst b/news/11352.bugfix.rst deleted file mode 100644 index 78016c912ef..00000000000 --- a/news/11352.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Ignore distributions with invalid ``Name`` in metadata instead of crashing, when -using the ``importlib.metadata`` backend. diff --git a/news/11357.doc.rst b/news/11357.doc.rst deleted file mode 100644 index 887928a086e..00000000000 --- a/news/11357.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Mention that --quiet must be used when writing the installation report to stdout. diff --git a/news/11358.removal.rst b/news/11358.removal.rst deleted file mode 100644 index 9767949b48b..00000000000 --- a/news/11358.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate ``--install-options`` which forces pip to use the deprecated ``install`` -command of ``setuptools``. diff --git a/news/11418.trivial.rst b/news/11418.trivial.rst deleted file mode 100644 index df32a0d0bc3..00000000000 --- a/news/11418.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Patch non-exploitable ReDoS vulnerability in wheel_file regex diff --git a/news/11452.removal.rst b/news/11452.removal.rst deleted file mode 100644 index ac29324abc8..00000000000 --- a/news/11452.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate installation with 'setup.py install' when no-binary is enabled for -source distributions without 'pyproject.toml'. diff --git a/news/11454.removal.rst b/news/11454.removal.rst deleted file mode 100644 index 14c4dc73ac7..00000000000 --- a/news/11454.removal.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecate ```--no-binary`` disabling the wheel cache. diff --git a/news/11459.feature.rst b/news/11459.feature.rst deleted file mode 100644 index a4a11c09353..00000000000 --- a/news/11459.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Document the new (experimental) zipapp distribution of pip. diff --git a/news/11491.bugfix.rst b/news/11491.bugfix.rst deleted file mode 100644 index a8f53927fa1..00000000000 --- a/news/11491.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Raise RequirementsFileParseError when parsing malformed requirements options that can't be sucessfully parsed by shlex. diff --git a/news/11493.removal.rst b/news/11493.removal.rst deleted file mode 100644 index de559e899bb..00000000000 --- a/news/11493.removal.rst +++ /dev/null @@ -1 +0,0 @@ -Remove ``--use-feature=2020-resolver`` opt-in flag. This was supposed to be removed in 21.0, but missed during that release cycle. diff --git a/news/5444.feature.rst b/news/5444.feature.rst deleted file mode 100644 index 19780e694ec..00000000000 --- a/news/5444.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Use the much faster 'bzr co --lightweight' to obtain a copy of a Bazaar tree. diff --git a/news/5580954E-E089-4CDB-857A-868BA1F7435D.trivial.rst b/news/5580954E-E089-4CDB-857A-868BA1F7435D.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/5ba58886-1229-45ef-8a8a-dee90602ccdd.trivial.rst b/news/5ba58886-1229-45ef-8a8a-dee90602ccdd.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/6264.bugfix.rst b/news/6264.bugfix.rst deleted file mode 100644 index 66554a473f5..00000000000 --- a/news/6264.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix build environment isolation on some system Pythons. diff --git a/news/8559.removal.rst b/news/8559.removal.rst deleted file mode 100644 index aa9f814120d..00000000000 --- a/news/8559.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate installation with 'setup.py install' when the 'wheel' package is absent for -source distributions without 'pyproject.toml'. diff --git a/news/9789.removal.rst b/news/9789.removal.rst deleted file mode 100644 index 90ded923507..00000000000 --- a/news/9789.removal.rst +++ /dev/null @@ -1 +0,0 @@ -Remove the ability to use ``pip list --outdated`` in combination with ``--format=freeze``. diff --git a/news/A12E8588-9311-4DAB-BD89-6FBC3F296275.trivial.rst b/news/A12E8588-9311-4DAB-BD89-6FBC3F296275.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/c57bc03f-7001-4ff0-a100-c36496ff8758.trivial.rst b/news/c57bc03f-7001-4ff0-a100-c36496ff8758.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst deleted file mode 100644 index 7477fcc2a24..00000000000 --- a/news/certifi.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade certifi to 2022.9.24 diff --git a/news/distlib.vendor.rst b/news/distlib.vendor.rst deleted file mode 100644 index 5fa036110fb..00000000000 --- a/news/distlib.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade distlib to 0.3.6 diff --git a/news/idna.vendor.rst b/news/idna.vendor.rst deleted file mode 100644 index 087598cbf5c..00000000000 --- a/news/idna.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade idna to 3.4 diff --git a/news/pep517.vendor.rst b/news/pep517.vendor.rst deleted file mode 100644 index e18c1d87bb0..00000000000 --- a/news/pep517.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade pep517 to 0.13.0 diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst deleted file mode 100644 index 0e5f7c580c6..00000000000 --- a/news/pygments.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade pygments to 2.13.0 diff --git a/news/tenacity.vendor.rst b/news/tenacity.vendor.rst deleted file mode 100644 index 45921e5bff1..00000000000 --- a/news/tenacity.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade tenacity to 8.1.0 diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst deleted file mode 100644 index 60371890721..00000000000 --- a/news/typing_extensions.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade typing_extensions to 4.4.0 diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst deleted file mode 100644 index 5942d465180..00000000000 --- a/news/urllib3.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade urllib3 to 1.26.12 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index a40148f008f..168abe86186 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "22.3.dev0" +__version__ = "22.3" def main(args: Optional[List[str]] = None) -> int: From fe7948a4a2a6763ec7d4b62bdc88fbf525e7b767 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= Date: Mon, 17 Oct 2022 19:17:25 +0200 Subject: [PATCH 138/730] Avoid distutils imports in tests The tests still don't run without distutils because they require virtualenv < 20 (and virtualenv 16 uses distutils), but at least they don't import distutils directly now. Fixes https://github.com/pypa/pip/issues/11521 --- ...57-c1fe-4d64-88a4-f775a2f6995d.trivial.rst | 0 tests/functional/test_install.py | 43 ++++++++++++++++--- tests/functional/test_install_wheel.py | 6 ++- 3 files changed, 41 insertions(+), 8 deletions(-) create mode 100644 news/7d576457-c1fe-4d64-88a4-f775a2f6995d.trivial.rst diff --git a/news/7d576457-c1fe-4d64-88a4-f775a2f6995d.trivial.rst b/news/7d576457-c1fe-4d64-88a4-f775a2f6995d.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index f4e0a53bec4..876f2e12a7c 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1,8 +1,8 @@ -import distutils import os import re import ssl import sys +import sysconfig import textwrap from os.path import curdir, join, pardir from pathlib import Path @@ -1145,6 +1145,39 @@ def main(): pass assert "--no-warn-script-location" not in result.stderr, str(result) +def _change_root(new_root: str, pathname: str) -> str: + """ + Adapted from distutils. + + Return 'pathname' with 'new_root' prepended. If 'pathname' is + relative, this is equivalent to "os.path.join(new_root,pathname)". + Otherwise, it requires making 'pathname' relative and then joining the + two, which is tricky on DOS/Windows and Mac OS. + """ + try: + from distutils.util import change_root + except ImportError: + pass + else: + return change_root(new_root, pathname) + + if os.name == "posix": + if not os.path.isabs(pathname): + return os.path.join(new_root, pathname) + else: + return os.path.join(new_root, pathname[1:]) + + elif os.name == "nt": + drive, path = os.path.splitdrive(pathname) + if path[0] == "\\": + path = path[1:] + return os.path.join(new_root, path) + + else: + # distutils raise DistutilsPlatformError here + raise RuntimeError(f"nothing known about platform '{os.name}'") + + @pytest.mark.usefixtures("with_wheel") def test_install_package_with_root(script: PipTestEnvironment, data: TestData) -> None: """ @@ -1163,10 +1196,8 @@ def test_install_package_with_root(script: PipTestEnvironment, data: TestData) - normal_install_path = os.fspath( script.base_path / script.site_packages / "simple-1.0.dist-info" ) - # use distutils to change the root exactly how the --root option does it - from distutils.util import change_root - root_path = change_root(os.path.join(script.scratch, "root"), normal_install_path) + root_path = _change_root(os.path.join(script.scratch, "root"), normal_install_path) result.did_create(root_path) # Should show find-links location in output @@ -1195,7 +1226,7 @@ def test_install_package_with_prefix( rel_prefix_path = script.scratch / "prefix" install_path = join( - distutils.sysconfig.get_python_lib(prefix=rel_prefix_path), + sysconfig.get_path("purelib", vars={"base": rel_prefix_path}), # we still test for egg-info because no-binary implies setup.py install f"simple-1.0-py{pyversion}.egg-info", ) @@ -1217,7 +1248,7 @@ def _test_install_editable_with_prefix( "prefix", "lib", f"python{pyversion}", "site-packages" ) else: - site_packages = distutils.sysconfig.get_python_lib(prefix="prefix") + site_packages = sysconfig.get_path("purelib", vars={"base": "prefix"}) # make sure target path is in PYTHONPATH pythonpath = script.scratch_path / site_packages diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py index 18985342908..e988e741962 100644 --- a/tests/functional/test_install_wheel.py +++ b/tests/functional/test_install_wheel.py @@ -1,9 +1,9 @@ import base64 import csv -import distutils import hashlib import os import shutil +import sysconfig from pathlib import Path from typing import Any @@ -284,7 +284,9 @@ def test_install_wheel_with_prefix( "--find-links", tmpdir, ) - lib = distutils.sysconfig.get_python_lib(prefix=os.path.join("scratch", "prefix")) + lib = sysconfig.get_path( + "purelib", vars={"base": os.path.join("scratch", "prefix")} + ) result.did_create(lib) From 2b82b386601f1ded7a4f1c64ffc1a1083124590a Mon Sep 17 00:00:00 2001 From: Michael Mintz Date: Tue, 18 Oct 2022 20:44:00 -0400 Subject: [PATCH 139/730] Fix grammar --- src/pip/_internal/self_outdated_check.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py index 9e2149c5247..f87c9a5d2c0 100644 --- a/src/pip/_internal/self_outdated_check.py +++ b/src/pip/_internal/self_outdated_check.py @@ -133,7 +133,7 @@ def __rich__(self) -> Group: return Group( Text(), Text.from_markup( - f"{notice} A new release of pip available: " + f"{notice} A new release of pip is available: " f"[red]{self.old}[reset] -> [green]{self.new}[reset]" ), Text.from_markup( From 0e48cae74e4921b4a09115e890435c464540b91b Mon Sep 17 00:00:00 2001 From: Michael Mintz Date: Wed, 19 Oct 2022 00:02:05 -0400 Subject: [PATCH 140/730] Add a news fragment for the grammar fix --- news/news/11529.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/news/11529.bugfix.rst diff --git a/news/news/11529.bugfix.rst b/news/news/11529.bugfix.rst new file mode 100644 index 00000000000..d05e404602e --- /dev/null +++ b/news/news/11529.bugfix.rst @@ -0,0 +1 @@ +Fix grammar by changing "A new release of pip available:" to "A new release of pip is available:" in the notice used for indicating that. From 22d401bfb7814b0ef023c21c90be20e0d6bad993 Mon Sep 17 00:00:00 2001 From: Michael Mintz Date: Wed, 19 Oct 2022 00:07:31 -0400 Subject: [PATCH 141/730] Update the news fragment for the grammar fix --- news/{news => }/11529.bugfix.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{news => }/11529.bugfix.rst (100%) diff --git a/news/news/11529.bugfix.rst b/news/11529.bugfix.rst similarity index 100% rename from news/news/11529.bugfix.rst rename to news/11529.bugfix.rst From 470caee06d0cc50742e0187309e8d6c628679ead Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Wed, 19 Oct 2022 08:16:04 +0300 Subject: [PATCH 142/730] Replace bzr with breezy Bzr is disabled https://github.com/Homebrew/homebrew-core/blob/9890144632fb15e68ba22db71c8d355865827f13/Formula/bazaar.rb#L19 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 362f712b1ef..7687b1b8cd4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -120,7 +120,7 @@ jobs: - name: Install MacOS dependencies if: matrix.os == 'MacOS' - run: brew install bzr + run: brew install breezy - run: pip install nox 'virtualenv<20' 'setuptools != 60.6.0' From fdc262f06936fb406af2af74ad6b0946ac1f4bd8 Mon Sep 17 00:00:00 2001 From: Felipe Peter Date: Wed, 19 Oct 2022 14:23:06 +0800 Subject: [PATCH 143/730] Fix error message and improve help text --- news/3297dfd6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst | 0 src/pip/_internal/commands/list.py | 9 +++++++-- tests/functional/test_list.py | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) create mode 100644 news/3297dfd6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst diff --git a/news/3297dfd6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst b/news/3297dfd6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index 8e1426dbb6c..c4df1008a76 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -103,7 +103,11 @@ def add_options(self) -> None: dest="list_format", default="columns", choices=("columns", "freeze", "json"), - help="Select the output format among: columns (default), freeze, or json", + help=( + "Select the output format among: columns (default), freeze, or json. " + "The 'freeze' format cannot be used together with the --outdated " + "option." + ), ) self.cmd_opts.add_option( @@ -157,7 +161,8 @@ def run(self, options: Values, args: List[str]) -> int: if options.outdated and options.list_format == "freeze": raise CommandError( - "List format 'freeze' can not be used with the --outdated option." + "List format 'freeze' cannot be used together with the --outdated " + "option." ) cmdoptions.check_list_path_option(options) diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index c7fdec2f2fc..cd94960b3f8 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -588,7 +588,7 @@ def test_outdated_formats(script: PipTestEnvironment, data: TestData) -> None: expect_error=True, ) assert ( - "List format 'freeze' can not be used with the --outdated option." + "List format 'freeze' cannot be used together with the --outdated option." in result.stderr ) From 1e236f4c8491d50d91038050bc393b2266364c06 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 28 Oct 2022 00:12:03 +0800 Subject: [PATCH 144/730] Ignore failing git submodule test for now Git 2.38.1 patched CVE-2022-39253 by disaling automated fetch against a file: repository. This breaks git submodule, which is used by a pip test. Information on how projects relying on automated fetch should configure git correctly after this change is lacking, so the test is disabled for now until someone can come up with a better solution. --- news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst | 5 +++++ tests/functional/test_install_vcs_git.py | 5 +++++ 2 files changed, 10 insertions(+) create mode 100644 news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst diff --git a/news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst b/news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst new file mode 100644 index 00000000000..720c98885e6 --- /dev/null +++ b/news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst @@ -0,0 +1,5 @@ +Git 2.38.1 patched CVE-2022-39253 by disaling automated fetch against a +``file:`` repository. This breaks git submodule, which is used by a pip test. +Information on how projects relying on automated fetch should configure git +correctly after this change is lacking, so the test is disabled for now until +someone can come up with a better solution. diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 2171d3162b3..cb72ec0b230 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -544,6 +544,11 @@ def test_reinstalling_works_with_editable_non_master_branch( # TODO(pnasrat) fix all helpers to do right things with paths on windows. @pytest.mark.skipif("sys.platform == 'win32'") +@pytest.mark.xfail( + condition=True, + reason="Git submodule against file: is not working; waiting for a good solution", + run=True, +) def test_check_submodule_addition(script: PipTestEnvironment) -> None: """ Submodules are pulled in on install and updated on upgrade. From f96ba08dbc8b970d9a740717c9730017eedf81f2 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 22 Jul 2022 13:22:17 +0800 Subject: [PATCH 145/730] Bump virtualenv to 20+ in tests --- .github/workflows/ci.yml | 6 +++--- tests/requirements.txt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7687b1b8cd4..9f5b34737a7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -122,7 +122,7 @@ jobs: if: matrix.os == 'MacOS' run: brew install breezy - - run: pip install nox 'virtualenv<20' 'setuptools != 60.6.0' + - run: pip install nox 'virtualenv>=20' 'setuptools!=60.6.0' # Main check - name: Run unit tests @@ -179,7 +179,7 @@ jobs: $acl.AddAccessRule($rule) Set-Acl "R:\Temp" $acl - - run: pip install nox 'virtualenv<20' + - run: pip install nox 'virtualenv>=20' env: TEMP: "R:\\Temp" @@ -261,7 +261,7 @@ jobs: - name: Install Ubuntu dependencies run: sudo apt-get install bzr - - run: pip install nox 'virtualenv<20' + - run: pip install nox 'virtualenv>=20' - name: Run unit tests run: >- diff --git a/tests/requirements.txt b/tests/requirements.txt index 9ce6d62078a..f4f27e94f71 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -7,7 +7,7 @@ pytest-rerunfailures pytest-xdist scripttest setuptools -virtualenv < 20.0 +virtualenv >= 20.0 werkzeug wheel tomli-w From 5ded5474ac9b323496506e6391e8d8c2c888d7f1 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 22 Jul 2022 13:22:39 +0800 Subject: [PATCH 146/730] Name virtualenv<20 as "legacy" Well they are. At least not "regular" anymore. --- src/pip/_internal/utils/virtualenv.py | 12 ++++++------ tests/unit/test_utils_virtualenv.py | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/utils/virtualenv.py b/src/pip/_internal/utils/virtualenv.py index c926db4c332..882e36f5c1d 100644 --- a/src/pip/_internal/utils/virtualenv.py +++ b/src/pip/_internal/utils/virtualenv.py @@ -19,7 +19,7 @@ def _running_under_venv() -> bool: return sys.prefix != getattr(sys, "base_prefix", sys.prefix) -def _running_under_regular_virtualenv() -> bool: +def _running_under_legacy_virtualenv() -> bool: """Checks if sys.real_prefix is set. This handles virtual environments created with pypa's virtualenv. @@ -29,8 +29,8 @@ def _running_under_regular_virtualenv() -> bool: def running_under_virtualenv() -> bool: - """Return True if we're running inside a virtualenv, False otherwise.""" - return _running_under_venv() or _running_under_regular_virtualenv() + """True if we're running inside a virtual environment, False otherwise.""" + return _running_under_venv() or _running_under_legacy_virtualenv() def _get_pyvenv_cfg_lines() -> Optional[List[str]]: @@ -77,7 +77,7 @@ def _no_global_under_venv() -> bool: return False -def _no_global_under_regular_virtualenv() -> bool: +def _no_global_under_legacy_virtualenv() -> bool: """Check if "no-global-site-packages.txt" exists beside site.py This mirrors logic in pypa/virtualenv for determining whether system @@ -98,7 +98,7 @@ def virtualenv_no_global() -> bool: if _running_under_venv(): return _no_global_under_venv() - if _running_under_regular_virtualenv(): - return _no_global_under_regular_virtualenv() + if _running_under_legacy_virtualenv(): + return _no_global_under_legacy_virtualenv() return False diff --git a/tests/unit/test_utils_virtualenv.py b/tests/unit/test_utils_virtualenv.py index 38d5383ce04..94461c6d89e 100644 --- a/tests/unit/test_utils_virtualenv.py +++ b/tests/unit/test_utils_virtualenv.py @@ -63,7 +63,7 @@ def test_virtualenv_no_global_with_regular_virtualenv( monkeypatch.setattr(site, "__file__", os.fspath(tmpdir / "site.py")) monkeypatch.setattr( virtualenv, - "_running_under_regular_virtualenv", + "_running_under_legacy_virtualenv", lambda: under_virtualenv, ) if no_global_file: @@ -73,7 +73,7 @@ def test_virtualenv_no_global_with_regular_virtualenv( @pytest.mark.parametrize( - "pyvenv_cfg_lines, under_venv, expected, expect_warning", + "pyvenv_cfg_lines, under_venv, expect_no_global, expect_warning", [ (None, False, False, False), (None, True, True, True), # this has a warning. @@ -104,15 +104,15 @@ def test_virtualenv_no_global_with_pep_405_virtual_environment( caplog: pytest.LogCaptureFixture, pyvenv_cfg_lines: Optional[List[str]], under_venv: bool, - expected: bool, + expect_no_global: bool, expect_warning: bool, ) -> None: - monkeypatch.setattr(virtualenv, "_running_under_regular_virtualenv", lambda: False) + monkeypatch.setattr(virtualenv, "_running_under_legacy_virtualenv", lambda: False) monkeypatch.setattr(virtualenv, "_get_pyvenv_cfg_lines", lambda: pyvenv_cfg_lines) monkeypatch.setattr(virtualenv, "_running_under_venv", lambda: under_venv) with caplog.at_level(logging.WARNING): - assert virtualenv.virtualenv_no_global() == expected + assert virtualenv.virtualenv_no_global() == expect_no_global if expect_warning: assert caplog.records From 1d05ba8ffdd21e24de2a01487a1318712cf66953 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 22 Jul 2022 14:11:35 +0800 Subject: [PATCH 147/730] Rewrite virtualenv tool in tests for 20+ support Co-Authored-By: Lumir Balhar --- tests/lib/venv.py | 141 +++++++++++++++------------------------------- 1 file changed, 46 insertions(+), 95 deletions(-) diff --git a/tests/lib/venv.py b/tests/lib/venv.py index ab6644bc9ab..eb54d834d2a 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -1,8 +1,7 @@ import compileall import os import shutil -import subprocess -import sys +import sysconfig import textwrap import venv as _venv from pathlib import Path @@ -47,15 +46,16 @@ def __init__( self._create() def _update_paths(self) -> None: - home, lib, inc, bin = _virtualenv.path_locations(self.location) - self.bin = Path(bin) - self.site = Path(lib) / "site-packages" - # Workaround for https://github.com/pypa/virtualenv/issues/306 - if hasattr(sys, "pypy_version_info"): - version_dir = str(sys.version_info.major) - self.lib = Path(home, "lib-python", version_dir) - else: - self.lib = Path(lib) + bases = { + "installed_base": self.location, + "installed_platbase": self.location, + "base": self.location, + "platbase": self.location, + } + paths = sysconfig.get_paths(vars=bases) + self.bin = Path(paths["scripts"]) + self.site = Path(paths["purelib"]) + self.lib = Path(paths["stdlib"]) def __repr__(self) -> str: return f"" @@ -64,10 +64,6 @@ def _create(self, clear: bool = False) -> None: if clear: shutil.rmtree(self.location) if self._template: - # On Windows, calling `_virtualenv.path_locations(target)` - # will have created the `target` directory... - if sys.platform == "win32" and self.location.exists(): - self.location.rmdir() # Clone virtual environment from template. shutil.copytree(self._template.location, self.location, symlinks=True) self._sitecustomize = self._template.sitecustomize @@ -75,18 +71,14 @@ def _create(self, clear: bool = False) -> None: else: # Create a new virtual environment. if self._venv_type == "virtualenv": - subprocess.check_call( + _virtualenv.cli_run( [ - sys.executable, - "-m", - "virtualenv", "--no-pip", "--no-wheel", "--no-setuptools", - str(self.location), - ] + os.fspath(self.location), + ], ) - self._fix_virtualenv_site_module() elif self._venv_type == "venv": builder = _venv.EnvBuilder() context = builder.ensure_directories(self.location) @@ -96,71 +88,30 @@ def _create(self, clear: bool = False) -> None: self.sitecustomize = self._sitecustomize self.user_site_packages = self._user_site_packages - def _fix_virtualenv_site_module(self) -> None: - # Patch `site.py` so user site work as expected. - site_py = self.lib / "site.py" - with open(site_py) as fp: - site_contents = fp.read() - for pattern, replace in ( - ( - # Ensure enabling user site does not result in adding - # the real site-packages' directory to `sys.path`. - ("\ndef virtual_addsitepackages(known_paths):\n"), - ( - "\ndef virtual_addsitepackages(known_paths):\n" - " return known_paths\n" - ), - ), - ( - # Fix sites ordering: user site must be added before system. - ( - "\n paths_in_sys = addsitepackages(paths_in_sys)" - "\n paths_in_sys = addusersitepackages(paths_in_sys)\n" - ), - ( - "\n paths_in_sys = addusersitepackages(paths_in_sys)" - "\n paths_in_sys = addsitepackages(paths_in_sys)\n" - ), - ), - ): - assert pattern in site_contents - site_contents = site_contents.replace(pattern, replace) - with open(site_py, "w") as fp: - fp.write(site_contents) - # Make sure bytecode is up-to-date too. - assert compileall.compile_file(str(site_py), quiet=1, force=True) - def _customize_site(self) -> None: - contents = "" - if self._venv_type == "venv": - # Enable user site (before system). - contents += textwrap.dedent( - """ - import os, site, sys - - if not os.environ.get('PYTHONNOUSERSITE', False): - - site.ENABLE_USER_SITE = True - - # First, drop system-sites related paths. - original_sys_path = sys.path[:] - known_paths = set() - for path in site.getsitepackages(): - site.addsitedir(path, known_paths=known_paths) - system_paths = sys.path[len(original_sys_path):] - for path in system_paths: - if path in original_sys_path: - original_sys_path.remove(path) - sys.path = original_sys_path - - # Second, add user-site. - site.addsitedir(site.getusersitepackages()) - - # Third, add back system-sites related paths. - for path in site.getsitepackages(): - site.addsitedir(path) - """ - ).strip() + # Enable user site (before system). + contents = textwrap.dedent( + """ + import os, site, sys + if not os.environ.get('PYTHONNOUSERSITE', False): + site.ENABLE_USER_SITE = True + # First, drop system-sites related paths. + original_sys_path = sys.path[:] + known_paths = set() + for path in site.getsitepackages(): + site.addsitedir(path, known_paths=known_paths) + system_paths = sys.path[len(original_sys_path):] + for path in system_paths: + if path in original_sys_path: + original_sys_path.remove(path) + sys.path = original_sys_path + # Second, add user-site. + site.addsitedir(site.getusersitepackages()) + # Third, add back system-sites related paths. + for path in site.getsitepackages(): + site.addsitedir(path) + """ + ).strip() if self._sitecustomize is not None: contents += "\n" + self._sitecustomize sitecustomize = self.site / "sitecustomize.py" @@ -191,12 +142,12 @@ def user_site_packages(self) -> bool: @user_site_packages.setter def user_site_packages(self, value: bool) -> None: - self._user_site_packages = value - if self._venv_type == "virtualenv": - marker = self.lib / "no-global-site-packages.txt" - if self._user_site_packages: - marker.unlink() - else: - marker.touch() - elif self._venv_type == "venv": - self._customize_site() + self._customize_site() + pyvenv_cfg = self.location.joinpath("pyvenv.cfg") + modified_lines = [] + for line in pyvenv_cfg.read_text().splitlines(): + k, v = line.split("=", 1) + if k.strip() == "include-system-site-packages": + line = f"include-system-site-packages = {str(bool(value)).lower()}" + modified_lines.append(line) + pyvenv_cfg.write_text("\n".join(modified_lines)) From 4d533cc269ae501ba4e29735f0d5f69b533ecafb Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 22 Jul 2022 18:04:28 +0800 Subject: [PATCH 148/730] Don't enable global site when enabling user site --- tests/lib/venv.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/tests/lib/venv.py b/tests/lib/venv.py index eb54d834d2a..9177651d125 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -91,10 +91,10 @@ def _create(self, clear: bool = False) -> None: def _customize_site(self) -> None: # Enable user site (before system). contents = textwrap.dedent( - """ + f""" import os, site, sys if not os.environ.get('PYTHONNOUSERSITE', False): - site.ENABLE_USER_SITE = True + site.ENABLE_USER_SITE = {self._user_site_packages} # First, drop system-sites related paths. original_sys_path = sys.path[:] known_paths = set() @@ -106,7 +106,8 @@ def _customize_site(self) -> None: original_sys_path.remove(path) sys.path = original_sys_path # Second, add user-site. - site.addsitedir(site.getusersitepackages()) + if {self._user_site_packages}: + site.addsitedir(site.getusersitepackages()) # Third, add back system-sites related paths. for path in site.getsitepackages(): site.addsitedir(path) @@ -142,12 +143,5 @@ def user_site_packages(self) -> bool: @user_site_packages.setter def user_site_packages(self, value: bool) -> None: + self._user_site_packages = value self._customize_site() - pyvenv_cfg = self.location.joinpath("pyvenv.cfg") - modified_lines = [] - for line in pyvenv_cfg.read_text().splitlines(): - k, v = line.split("=", 1) - if k.strip() == "include-system-site-packages": - line = f"include-system-site-packages = {str(bool(value)).lower()}" - modified_lines.append(line) - pyvenv_cfg.write_text("\n".join(modified_lines)) From 4f20a7e3e76674c3361d8bde175fff82ed301d35 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 27 Oct 2022 09:15:33 +0800 Subject: [PATCH 149/730] Set include-system-site-packages for user site --- tests/lib/venv.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/tests/lib/venv.py b/tests/lib/venv.py index 9177651d125..55b81a7d4b5 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -5,7 +5,7 @@ import textwrap import venv as _venv from pathlib import Path -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, Dict, Optional, Union import virtualenv as _virtualenv @@ -120,6 +120,21 @@ def _customize_site(self) -> None: # Make sure bytecode is up-to-date too. assert compileall.compile_file(str(sitecustomize), quiet=1, force=True) + def _rewrite_pyvenv_cfg(self, replacements: Dict[str, str]) -> None: + pyvenv_cfg = self.location.joinpath("pyvenv.cfg") + lines = pyvenv_cfg.read_text(encoding="utf-8").splitlines() + + def maybe_replace_line(line: str) -> str: + key = line.split("=", 1)[0].strip() + try: + value = replacements[key] + except KeyError: # No need to replace. + return line + return f"{key} = {value}" + + lines = [maybe_replace_line(line) for line in lines] + pyvenv_cfg.write_text("\n".join(lines), encoding="utf-8") + def clear(self) -> None: self._create(clear=True) @@ -144,4 +159,7 @@ def user_site_packages(self) -> bool: @user_site_packages.setter def user_site_packages(self, value: bool) -> None: self._user_site_packages = value + self._rewrite_pyvenv_cfg( + {"include-system-site-packages": str(bool(value)).lower()} + ) self._customize_site() From b850d539f9890f5283fffce1a625149ae2a1e5c3 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 27 Oct 2022 09:29:50 +0800 Subject: [PATCH 150/730] Keep using old virtualenv for Python < 3.10 pip uses distutils (instead of sysconfig) for Python < 3.10, which has awkward path issues when faking a user site. --- tests/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/requirements.txt b/tests/requirements.txt index f4f27e94f71..84b7c14d4b4 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -7,7 +7,8 @@ pytest-rerunfailures pytest-xdist scripttest setuptools -virtualenv >= 20.0 +virtualenv < 20.0 ; python_version < '3.10' +virtualenv >= 20.0 ; python_version >= '3.10' werkzeug wheel tomli-w From 4bf1b67964aad6f5db2ec5888833c30685b6d4dd Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 27 Oct 2022 10:02:23 +0800 Subject: [PATCH 151/730] Build fake wheels for --user tests The old INITools tests rely on setup.py, which relies on distutils and generates a ton of issues. Build fake wheels directly to avoid dealing with them. --- tests/functional/test_install_user.py | 135 +++++++++++++++++--------- 1 file changed, 91 insertions(+), 44 deletions(-) diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index d0bdbc3a547..e34b35431dc 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -1,6 +1,7 @@ """ tests specific to "pip install --user" """ +import os import textwrap from os.path import curdir, isdir, isfile from pathlib import Path @@ -8,7 +9,12 @@ import pytest from tests.lib import pyversion # noqa: F401 -from tests.lib import PipTestEnvironment, TestData, need_svn +from tests.lib import ( + PipTestEnvironment, + TestData, + create_basic_wheel_for_package, + need_svn, +) from tests.lib.local_repos import local_checkout from tests.lib.venv import VirtualEnvironment @@ -142,7 +148,6 @@ def test_install_user_conflict_in_usersite( result2.did_create(egg_info_folder) assert not isfile(initools_v3_file), initools_v3_file - @pytest.mark.network @pytest.mark.incompatible_with_test_venv def test_install_user_conflict_in_globalsite( self, virtualenv: VirtualEnvironment, script: PipTestEnvironment @@ -151,30 +156,41 @@ def test_install_user_conflict_in_globalsite( Test user install with conflict in global site ignores site and installs to usersite """ - _patch_dist_in_site_packages(virtualenv) + create_basic_wheel_for_package(script, "initools", "0.1") + create_basic_wheel_for_package(script, "initools", "0.2") - script.pip("install", "INITools==0.2", "--no-binary=:all:") + _patch_dist_in_site_packages(virtualenv) - result2 = script.pip("install", "--user", "INITools==0.1", "--no-binary=:all:") + script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "initools==0.2", + ) + result2 = script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "--user", + "initools==0.1", + ) # usersite has 0.1 - # we still test for egg-info because no-binary implies setup.py install - egg_info_folder = script.user_site / f"INITools-0.1-py{pyversion}.egg-info" + dist_info_folder = script.user_site / "initools-0.1.dist-info" initools_folder = script.user_site / "initools" - result2.did_create(egg_info_folder) + result2.did_create(dist_info_folder) result2.did_create(initools_folder) # site still has 0.2 (can't look in result1; have to check) - egg_info_folder = ( - script.base_path - / script.site_packages - / f"INITools-0.2-py{pyversion}.egg-info" + dist_info_folder = ( + script.base_path / script.site_packages / "initools-0.2.dist-info" ) initools_folder = script.base_path / script.site_packages / "initools" - assert isdir(egg_info_folder) + assert isdir(dist_info_folder) assert isdir(initools_folder) - @pytest.mark.network @pytest.mark.incompatible_with_test_venv def test_upgrade_user_conflict_in_globalsite( self, virtualenv: VirtualEnvironment, script: PipTestEnvironment @@ -183,31 +199,42 @@ def test_upgrade_user_conflict_in_globalsite( Test user install/upgrade with conflict in global site ignores site and installs to usersite """ + create_basic_wheel_for_package(script, "initools", "0.2") + create_basic_wheel_for_package(script, "initools", "0.3.1") + _patch_dist_in_site_packages(virtualenv) - script.pip("install", "INITools==0.2", "--no-binary=:all:") + script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "initools==0.2", + ) result2 = script.pip( - "install", "--user", "--upgrade", "INITools", "--no-binary=:all:" + "install", + "--no-index", + "--find-links", + script.scratch_path, + "--user", + "--upgrade", + "initools", ) # usersite has 0.3.1 - # we still test for egg-info because no-binary implies setup.py install - egg_info_folder = script.user_site / f"INITools-0.3.1-py{pyversion}.egg-info" + dist_info_folder = script.user_site / "initools-0.3.1.dist-info" initools_folder = script.user_site / "initools" - result2.did_create(egg_info_folder) + result2.did_create(dist_info_folder) result2.did_create(initools_folder) # site still has 0.2 (can't look in result1; have to check) - egg_info_folder = ( - script.base_path - / script.site_packages - / f"INITools-0.2-py{pyversion}.egg-info" + dist_info_folder = ( + script.base_path / script.site_packages / "initools-0.2.dist-info" ) initools_folder = script.base_path / script.site_packages / "initools" - assert isdir(egg_info_folder), result2.stdout + assert isdir(dist_info_folder), result2.stdout assert isdir(initools_folder) - @pytest.mark.network @pytest.mark.incompatible_with_test_venv def test_install_user_conflict_in_globalsite_and_usersite( self, virtualenv: VirtualEnvironment, script: PipTestEnvironment @@ -216,34 +243,54 @@ def test_install_user_conflict_in_globalsite_and_usersite( Test user install with conflict in globalsite and usersite ignores global site and updates usersite. """ - _patch_dist_in_site_packages(virtualenv) + initools_v3_file_name = os.path.join("initools", "configparser.py") + create_basic_wheel_for_package(script, "initools", "0.1") + create_basic_wheel_for_package(script, "initools", "0.2") + create_basic_wheel_for_package( + script, + "initools", + "0.3", + extra_files={initools_v3_file_name: "# Hi!"}, + ) - script.pip("install", "INITools==0.2", "--no-binary=:all:") - script.pip("install", "--user", "INITools==0.3", "--no-binary=:all:") + _patch_dist_in_site_packages(virtualenv) - result3 = script.pip("install", "--user", "INITools==0.1", "--no-binary=:all:") + script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "initools==0.2", + ) + script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "--user", + "initools==0.3", + ) + result3 = script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "--user", + "initools==0.1", + ) # usersite has 0.1 - # we still test for egg-info because no-binary implies setup.py install - egg_info_folder = script.user_site / f"INITools-0.1-py{pyversion}.egg-info" - initools_v3_file = ( - # file only in 0.3 - script.base_path - / script.user_site - / "initools" - / "configparser.py" - ) - result3.did_create(egg_info_folder) + dist_info_folder = script.user_site / "initools-0.1.dist-info" + result3.did_create(dist_info_folder) + initools_v3_file = script.base_path / script.user_site / initools_v3_file_name assert not isfile(initools_v3_file), initools_v3_file # site still has 0.2 (can't just look in result1; have to check) - egg_info_folder = ( - script.base_path - / script.site_packages - / f"INITools-0.2-py{pyversion}.egg-info" + dist_info_folder = ( + script.base_path / script.site_packages / "initools-0.2.dist-info" ) initools_folder = script.base_path / script.site_packages / "initools" - assert isdir(egg_info_folder) + assert isdir(dist_info_folder) assert isdir(initools_folder) @pytest.mark.network From 4ab48650633b61e2c4f730584553604758724fb2 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 27 Oct 2022 10:36:46 +0800 Subject: [PATCH 152/730] One more rewrite to avoid distutils --- tests/functional/test_uninstall_user.py | 44 ++++++++++++++++++------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/tests/functional/test_uninstall_user.py b/tests/functional/test_uninstall_user.py index 6d48fe1627a..1ef65dd1671 100644 --- a/tests/functional/test_uninstall_user.py +++ b/tests/functional/test_uninstall_user.py @@ -6,9 +6,9 @@ import pytest from tests.functional.test_install_user import _patch_dist_in_site_packages -from tests.lib import pyversion # noqa: F401 from tests.lib import PipTestEnvironment, TestData, assert_all_changes from tests.lib.venv import VirtualEnvironment +from tests.lib.wheel import make_wheel @pytest.mark.incompatible_with_test_venv @@ -28,14 +28,39 @@ def test_uninstall_from_usersite_with_dist_in_global_site( """ Test uninstall from usersite (with same dist in global site) """ + entry_points_txt = "[console_scripts]\nscript = pkg:func" + make_wheel( + "pkg", + "0.1", + extra_metadata_files={"entry_points.txt": entry_points_txt}, + ).save_to_dir(script.scratch_path) + make_wheel( + "pkg", + "0.1.1", + extra_metadata_files={"entry_points.txt": entry_points_txt}, + ).save_to_dir(script.scratch_path) + _patch_dist_in_site_packages(virtualenv) - script.pip_install_local("pip-test-package==0.1", "--no-binary=:all:") + script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "--no-warn-script-location", + "pkg==0.1", + ) - result2 = script.pip_install_local( - "--user", "pip-test-package==0.1.1", "--no-binary=:all:" + result2 = script.pip( + "install", + "--no-index", + "--find-links", + script.scratch_path, + "--no-warn-script-location", + "--user", + "pkg==0.1.1", ) - result3 = script.pip("uninstall", "-vy", "pip-test-package") + result3 = script.pip("uninstall", "-vy", "pkg") # uninstall console is mentioning user scripts, but not global scripts assert normcase(script.user_bin_path) in result3.stdout, str(result3) @@ -45,13 +70,8 @@ def test_uninstall_from_usersite_with_dist_in_global_site( assert_all_changes(result2, result3, [script.venv / "build", "cache"]) # site still has 0.2 (can't look in result1; have to check) - # keep checking for egg-info because no-binary implies setup.py install - egg_info_folder = ( - script.base_path - / script.site_packages - / f"pip_test_package-0.1-py{pyversion}.egg-info" - ) - assert isdir(egg_info_folder) + dist_info_folder = script.base_path / script.site_packages / "pkg-0.1.dist-info" + assert isdir(dist_info_folder) def test_uninstall_editable_from_usersite( self, script: PipTestEnvironment, data: TestData From 83c85e94b70aa5b3211ef3cc592d8cd8619beb15 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 27 Oct 2022 11:02:51 +0800 Subject: [PATCH 153/730] Fix legacy virtualenv setup in tests --- .github/workflows/ci.yml | 6 +- tests/lib/venv.py | 154 ++++++++++++++++++++++++++++++--------- 2 files changed, 121 insertions(+), 39 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9f5b34737a7..c9cfb7b8e98 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -122,7 +122,7 @@ jobs: if: matrix.os == 'MacOS' run: brew install breezy - - run: pip install nox 'virtualenv>=20' 'setuptools!=60.6.0' + - run: pip install nox # Main check - name: Run unit tests @@ -179,7 +179,7 @@ jobs: $acl.AddAccessRule($rule) Set-Acl "R:\Temp" $acl - - run: pip install nox 'virtualenv>=20' + - run: pip install nox env: TEMP: "R:\\Temp" @@ -261,7 +261,7 @@ jobs: - name: Install Ubuntu dependencies run: sudo apt-get install bzr - - run: pip install nox 'virtualenv>=20' + - run: pip install nox - name: Run unit tests run: >- diff --git a/tests/lib/venv.py b/tests/lib/venv.py index 55b81a7d4b5..42ec5082ad7 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -1,6 +1,8 @@ import compileall import os import shutil +import subprocess +import sys import sysconfig import textwrap import venv as _venv @@ -18,6 +20,9 @@ VirtualEnvironmentType = str +LEGACY_VIRTUALENV = int(_virtualenv.__version__.split(".", 1)[0]) < 20 + + class VirtualEnvironment: """ An abstraction around virtual environments, currently it only uses @@ -39,13 +44,28 @@ def __init__( self._venv_type = venv_type else: self._venv_type = "virtualenv" + assert self._venv_type in ("virtualenv", "venv") self._user_site_packages = False self._template = template self._sitecustomize: Optional[str] = None self._update_paths() self._create() + def __update_paths_legacy(self) -> None: + home, lib, inc, bin = _virtualenv.path_locations(self.location) + self.bin = Path(bin) + self.site = Path(lib) / "site-packages" + # Workaround for https://github.com/pypa/virtualenv/issues/306 + if hasattr(sys, "pypy_version_info"): + version_dir = str(sys.version_info.major) + self.lib = Path(home, "lib-python", version_dir) + else: + self.lib = Path(lib) + def _update_paths(self) -> None: + if LEGACY_VIRTUALENV: + self.__update_paths_legacy() + return bases = { "installed_base": self.location, "installed_platbase": self.location, @@ -64,6 +84,10 @@ def _create(self, clear: bool = False) -> None: if clear: shutil.rmtree(self.location) if self._template: + # On Windows, calling `_virtualenv.path_locations(target)` + # will have created the `target` directory... + if LEGACY_VIRTUALENV and sys.platform == "win32" and self.location.exists(): + self.location.rmdir() # Clone virtual environment from template. shutil.copytree(self._template.location, self.location, symlinks=True) self._sitecustomize = self._template.sitecustomize @@ -71,14 +95,28 @@ def _create(self, clear: bool = False) -> None: else: # Create a new virtual environment. if self._venv_type == "virtualenv": - _virtualenv.cli_run( - [ - "--no-pip", - "--no-wheel", - "--no-setuptools", - os.fspath(self.location), - ], - ) + if LEGACY_VIRTUALENV: + subprocess.check_call( + [ + sys.executable, + "-m", + "virtualenv", + "--no-pip", + "--no-wheel", + "--no-setuptools", + os.fspath(self.location), + ] + ) + self._fix_legacy_virtualenv_site_module() + else: + _virtualenv.cli_run( + [ + "--no-pip", + "--no-wheel", + "--no-setuptools", + os.fspath(self.location), + ], + ) elif self._venv_type == "venv": builder = _venv.EnvBuilder() context = builder.ensure_directories(self.location) @@ -88,31 +126,68 @@ def _create(self, clear: bool = False) -> None: self.sitecustomize = self._sitecustomize self.user_site_packages = self._user_site_packages + def _fix_legacy_virtualenv_site_module(self) -> None: + # Patch `site.py` so user site work as expected. + site_py = self.lib / "site.py" + with open(site_py) as fp: + site_contents = fp.read() + for pattern, replace in ( + ( + # Ensure enabling user site does not result in adding + # the real site-packages' directory to `sys.path`. + ("\ndef virtual_addsitepackages(known_paths):\n"), + ( + "\ndef virtual_addsitepackages(known_paths):\n" + " return known_paths\n" + ), + ), + ( + # Fix sites ordering: user site must be added before system. + ( + "\n paths_in_sys = addsitepackages(paths_in_sys)" + "\n paths_in_sys = addusersitepackages(paths_in_sys)\n" + ), + ( + "\n paths_in_sys = addusersitepackages(paths_in_sys)" + "\n paths_in_sys = addsitepackages(paths_in_sys)\n" + ), + ), + ): + assert pattern in site_contents + site_contents = site_contents.replace(pattern, replace) + with open(site_py, "w") as fp: + fp.write(site_contents) + # Make sure bytecode is up-to-date too. + assert compileall.compile_file(str(site_py), quiet=1, force=True) + def _customize_site(self) -> None: - # Enable user site (before system). - contents = textwrap.dedent( - f""" - import os, site, sys - if not os.environ.get('PYTHONNOUSERSITE', False): - site.ENABLE_USER_SITE = {self._user_site_packages} - # First, drop system-sites related paths. - original_sys_path = sys.path[:] - known_paths = set() - for path in site.getsitepackages(): - site.addsitedir(path, known_paths=known_paths) - system_paths = sys.path[len(original_sys_path):] - for path in system_paths: - if path in original_sys_path: - original_sys_path.remove(path) - sys.path = original_sys_path - # Second, add user-site. - if {self._user_site_packages}: - site.addsitedir(site.getusersitepackages()) - # Third, add back system-sites related paths. - for path in site.getsitepackages(): - site.addsitedir(path) - """ - ).strip() + if not LEGACY_VIRTUALENV or self._venv_type == "venv": + # Enable user site (before system). + contents = textwrap.dedent( + f""" + import os, site, sys + if not os.environ.get('PYTHONNOUSERSITE', False): + site.ENABLE_USER_SITE = {self._user_site_packages} + # First, drop system-sites related paths. + original_sys_path = sys.path[:] + known_paths = set() + for path in site.getsitepackages(): + site.addsitedir(path, known_paths=known_paths) + system_paths = sys.path[len(original_sys_path):] + for path in system_paths: + if path in original_sys_path: + original_sys_path.remove(path) + sys.path = original_sys_path + # Second, add user-site. + if {self._user_site_packages}: + site.addsitedir(site.getusersitepackages()) + # Third, add back system-sites related paths. + for path in site.getsitepackages(): + site.addsitedir(path) + """ + ).strip() + else: + contents = "" if self._sitecustomize is not None: contents += "\n" + self._sitecustomize sitecustomize = self.site / "sitecustomize.py" @@ -159,7 +234,14 @@ def user_site_packages(self) -> bool: @user_site_packages.setter def user_site_packages(self, value: bool) -> None: self._user_site_packages = value - self._rewrite_pyvenv_cfg( - {"include-system-site-packages": str(bool(value)).lower()} - ) - self._customize_site() + if not LEGACY_VIRTUALENV or self._venv_type == "venv": + self._rewrite_pyvenv_cfg( + {"include-system-site-packages": str(bool(value)).lower()} + ) + self._customize_site() + else: + marker = self.lib / "no-global-site-packages.txt" + if self._user_site_packages: + marker.unlink() + else: + marker.touch() From 50e194f1070733d0af66904001a89e4d603387b4 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 27 Oct 2022 21:34:17 +0800 Subject: [PATCH 154/730] Selectively enable user site The modern virtual environment structure does not allow us to enable "fake user site" while disabling the global site, so we need to do more fine-grained configuration to correctly set up test environments for each test case. With this done, we can also properly support the stdlib venv ad the test environment backend, since it basically works identically with modern virtualenv. The incompatible_with_test_venv is thus removed. --- setup.cfg | 1 - tests/conftest.py | 12 ++- tests/functional/test_build_env.py | 2 +- tests/functional/test_freeze.py | 4 +- tests/functional/test_install.py | 4 +- tests/functional/test_install_reqs.py | 3 +- tests/functional/test_install_user.py | 9 +-- tests/functional/test_install_wheel.py | 3 +- tests/functional/test_list.py | 6 +- tests/functional/test_new_resolver_user.py | 17 ++--- tests/functional/test_uninstall_user.py | 2 +- tests/lib/venv.py | 85 ++++++++++++---------- 12 files changed, 70 insertions(+), 78 deletions(-) diff --git a/setup.cfg b/setup.cfg index dae2f21b10d..1502abfc86a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -63,7 +63,6 @@ xfail_strict = True markers = network: tests that need network incompatible_with_sysconfig - incompatible_with_test_venv incompatible_with_venv no_auto_tempdir_manager unit: unit tests diff --git a/tests/conftest.py b/tests/conftest.py index 44aa56026b6..46975b29beb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -108,10 +108,6 @@ def pytest_collection_modifyitems(config: Config, items: List[pytest.Function]) if item.get_closest_marker("network") is not None: item.add_marker(pytest.mark.flaky(reruns=3, reruns_delay=2)) - if item.get_closest_marker("incompatible_with_test_venv") and config.getoption( - "--use-venv" - ): - item.add_marker(pytest.mark.skip("Incompatible with test venv")) if ( item.get_closest_marker("incompatible_with_venv") and sys.prefix != sys.base_prefix @@ -474,9 +470,6 @@ def virtualenv_template( ): (venv.bin / exe).unlink() - # Enable user site packages. - venv.user_site_packages = True - # Rename original virtualenv directory to make sure # it's not reused by mistake from one of the copies. venv_template = tmpdir / "venv_template" @@ -742,3 +735,8 @@ def mock_server() -> Iterator[MockServer]: @pytest.fixture def proxy(request: pytest.FixtureRequest) -> str: return request.config.getoption("proxy") + + +@pytest.fixture +def enable_user_site(virtualenv: VirtualEnvironment) -> None: + virtualenv.user_site_packages = True diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py index 869e8ad921d..93a6b930f66 100644 --- a/tests/functional/test_build_env.py +++ b/tests/functional/test_build_env.py @@ -204,7 +204,7 @@ def test_build_env_overlay_prefix_has_priority(script: PipTestEnvironment) -> No assert result.stdout.strip() == "2.0", str(result) -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_build_env_isolation(script: PipTestEnvironment) -> None: # Create dummy `pkg` wheel. diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 535581121ff..49b362d7e96 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -862,7 +862,7 @@ def test_freeze_with_requirement_option_package_repeated_multi_file( @pytest.mark.network -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_freeze_user( script: PipTestEnvironment, virtualenv: VirtualEnvironment, data: TestData ) -> None: @@ -900,7 +900,7 @@ def test_freeze_path(tmpdir: Path, script: PipTestEnvironment, data: TestData) - @pytest.mark.network -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_freeze_path_exclude_user( tmpdir: Path, script: PipTestEnvironment, data: TestData ) -> None: diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 876f2e12a7c..f611372685f 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -171,7 +171,7 @@ def test_pep518_allows_missing_requires( assert result.files_created -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_pep518_with_user_pip( script: PipTestEnvironment, pip_src: Path, data: TestData, common_wheels: Path ) -> None: @@ -2106,7 +2106,7 @@ def test_target_install_ignores_distutils_config_install_prefix( result.did_not_create(relative_script_base) -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_user_config_accepted(script: PipTestEnvironment) -> None: # user set in the config file is parsed as 0/1 instead of True/False. # Check that this doesn't cause a problem. diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 19c526aab09..14e1056ae7a 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -305,8 +305,7 @@ def test_install_local_with_subdirectory(script: PipTestEnvironment) -> None: result.assert_installed("version_subpkg.py", editable=False) -@pytest.mark.incompatible_with_test_venv -@pytest.mark.usefixtures("with_wheel") +@pytest.mark.usefixtures("enable_user_site", "with_wheel") def test_wheel_user_with_prefix_in_pydistutils_cfg( script: PipTestEnvironment, data: TestData ) -> None: diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index e34b35431dc..c960d0de4f9 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -35,9 +35,9 @@ def dist_in_site_packages(dist): ) +@pytest.mark.usefixtures("enable_user_site") class Tests_UserSite: @pytest.mark.network - @pytest.mark.incompatible_with_test_venv def test_reset_env_system_site_packages_usersite( self, script: PipTestEnvironment ) -> None: @@ -57,7 +57,6 @@ def test_reset_env_system_site_packages_usersite( @pytest.mark.xfail @pytest.mark.network @need_svn - @pytest.mark.incompatible_with_test_venv def test_install_subversion_usersite_editable_with_distribute( self, script: PipTestEnvironment, tmpdir: Path ) -> None: @@ -77,7 +76,6 @@ def test_install_subversion_usersite_editable_with_distribute( ) result.assert_installed("INITools", use_user_site=True) - @pytest.mark.incompatible_with_test_venv @pytest.mark.usefixtures("with_wheel") def test_install_from_current_directory_into_usersite( self, script: PipTestEnvironment, data: TestData @@ -123,7 +121,6 @@ def test_install_user_venv_nositepkgs_fails( ) @pytest.mark.network - @pytest.mark.incompatible_with_test_venv def test_install_user_conflict_in_usersite( self, script: PipTestEnvironment ) -> None: @@ -148,7 +145,6 @@ def test_install_user_conflict_in_usersite( result2.did_create(egg_info_folder) assert not isfile(initools_v3_file), initools_v3_file - @pytest.mark.incompatible_with_test_venv def test_install_user_conflict_in_globalsite( self, virtualenv: VirtualEnvironment, script: PipTestEnvironment ) -> None: @@ -191,7 +187,6 @@ def test_install_user_conflict_in_globalsite( assert isdir(dist_info_folder) assert isdir(initools_folder) - @pytest.mark.incompatible_with_test_venv def test_upgrade_user_conflict_in_globalsite( self, virtualenv: VirtualEnvironment, script: PipTestEnvironment ) -> None: @@ -235,7 +230,6 @@ def test_upgrade_user_conflict_in_globalsite( assert isdir(dist_info_folder), result2.stdout assert isdir(initools_folder) - @pytest.mark.incompatible_with_test_venv def test_install_user_conflict_in_globalsite_and_usersite( self, virtualenv: VirtualEnvironment, script: PipTestEnvironment ) -> None: @@ -294,7 +288,6 @@ def test_install_user_conflict_in_globalsite_and_usersite( assert isdir(initools_folder) @pytest.mark.network - @pytest.mark.incompatible_with_test_venv def test_install_user_in_global_virtualenv_with_conflict_fails( self, script: PipTestEnvironment ) -> None: diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py index e988e741962..49c2d1d6d7c 100644 --- a/tests/functional/test_install_wheel.py +++ b/tests/functional/test_install_wheel.py @@ -406,8 +406,7 @@ def test_wheel_record_lines_have_updated_hash_for_scripts( ] -@pytest.mark.incompatible_with_test_venv -@pytest.mark.usefixtures("with_wheel") +@pytest.mark.usefixtures("enable_user_site", "with_wheel") def test_install_user_wheel( script: PipTestEnvironment, shared_data: TestData, tmpdir: Path ) -> None: diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index c7fdec2f2fc..d05fe9dcea5 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -129,7 +129,7 @@ def test_multiple_exclude_and_normalization( @pytest.mark.network -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_user_flag(script: PipTestEnvironment, data: TestData) -> None: """ Test the behavior of --user flag in the list command @@ -144,7 +144,7 @@ def test_user_flag(script: PipTestEnvironment, data: TestData) -> None: @pytest.mark.network -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_user_columns_flag(script: PipTestEnvironment, data: TestData) -> None: """ Test the behavior of --user --format=columns flags in the list command @@ -656,7 +656,7 @@ def test_list_path(tmpdir: Path, script: PipTestEnvironment, data: TestData) -> assert {"name": "simple", "version": "2.0"} in json_result -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_list_path_exclude_user( tmpdir: Path, script: PipTestEnvironment, data: TestData ) -> None: diff --git a/tests/functional/test_new_resolver_user.py b/tests/functional/test_new_resolver_user.py index 2f9fb65ba5a..4578c311468 100644 --- a/tests/functional/test_new_resolver_user.py +++ b/tests/functional/test_new_resolver_user.py @@ -7,7 +7,7 @@ from tests.lib.venv import VirtualEnvironment -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_new_resolver_install_user(script: PipTestEnvironment) -> None: create_basic_wheel_for_package(script, "base", "0.1.0") result = script.pip( @@ -22,7 +22,7 @@ def test_new_resolver_install_user(script: PipTestEnvironment) -> None: result.did_create(script.user_site / "base") -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_new_resolver_install_user_satisfied_by_global_site( script: PipTestEnvironment, ) -> None: @@ -53,7 +53,7 @@ def test_new_resolver_install_user_satisfied_by_global_site( result.did_not_create(script.user_site / "base") -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_new_resolver_install_user_conflict_in_user_site( script: PipTestEnvironment, ) -> None: @@ -91,7 +91,7 @@ def test_new_resolver_install_user_conflict_in_user_site( result.did_not_create(base_2_dist_info) -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") def test_new_resolver_install_user_in_virtualenv_with_conflict_fails( script: PipTestEnvironment, ) -> None: @@ -141,8 +141,7 @@ def dist_in_site_packages(dist): ) -@pytest.mark.incompatible_with_test_venv -@pytest.mark.usefixtures("patch_dist_in_site_packages") +@pytest.mark.usefixtures("enable_user_site", "patch_dist_in_site_packages") def test_new_resolver_install_user_reinstall_global_site( script: PipTestEnvironment, ) -> None: @@ -177,8 +176,7 @@ def test_new_resolver_install_user_reinstall_global_site( assert "base" in site_packages_content -@pytest.mark.incompatible_with_test_venv -@pytest.mark.usefixtures("patch_dist_in_site_packages") +@pytest.mark.usefixtures("enable_user_site", "patch_dist_in_site_packages") def test_new_resolver_install_user_conflict_in_global_site( script: PipTestEnvironment, ) -> None: @@ -215,8 +213,7 @@ def test_new_resolver_install_user_conflict_in_global_site( assert "base-1.0.0.dist-info" in site_packages_content -@pytest.mark.incompatible_with_test_venv -@pytest.mark.usefixtures("patch_dist_in_site_packages") +@pytest.mark.usefixtures("enable_user_site", "patch_dist_in_site_packages") def test_new_resolver_install_user_conflict_in_global_and_user_sites( script: PipTestEnvironment, ) -> None: diff --git a/tests/functional/test_uninstall_user.py b/tests/functional/test_uninstall_user.py index 1ef65dd1671..0bf2e6d4180 100644 --- a/tests/functional/test_uninstall_user.py +++ b/tests/functional/test_uninstall_user.py @@ -11,7 +11,7 @@ from tests.lib.wheel import make_wheel -@pytest.mark.incompatible_with_test_venv +@pytest.mark.usefixtures("enable_user_site") class Tests_UninstallUserSite: @pytest.mark.network def test_uninstall_from_usersite(self, script: PipTestEnvironment) -> None: diff --git a/tests/lib/venv.py b/tests/lib/venv.py index 42ec5082ad7..e65a3291230 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -20,9 +20,6 @@ VirtualEnvironmentType = str -LEGACY_VIRTUALENV = int(_virtualenv.__version__.split(".", 1)[0]) < 20 - - class VirtualEnvironment: """ An abstraction around virtual environments, currently it only uses @@ -34,7 +31,7 @@ def __init__( location: Path, template: Optional["VirtualEnvironment"] = None, venv_type: Optional[VirtualEnvironmentType] = None, - ): + ) -> None: self.location = location assert template is None or venv_type is None self._venv_type: VirtualEnvironmentType @@ -44,13 +41,18 @@ def __init__( self._venv_type = venv_type else: self._venv_type = "virtualenv" - assert self._venv_type in ("virtualenv", "venv") self._user_site_packages = False self._template = template self._sitecustomize: Optional[str] = None self._update_paths() self._create() + @property + def _legacy_virtualenv(self) -> bool: + if self._venv_type != "virtualenv": + return False + return int(_virtualenv.__version__.split(".", 1)[0]) < 20 + def __update_paths_legacy(self) -> None: home, lib, inc, bin = _virtualenv.path_locations(self.location) self.bin = Path(bin) @@ -63,7 +65,7 @@ def __update_paths_legacy(self) -> None: self.lib = Path(lib) def _update_paths(self) -> None: - if LEGACY_VIRTUALENV: + if self._legacy_virtualenv: self.__update_paths_legacy() return bases = { @@ -86,7 +88,11 @@ def _create(self, clear: bool = False) -> None: if self._template: # On Windows, calling `_virtualenv.path_locations(target)` # will have created the `target` directory... - if LEGACY_VIRTUALENV and sys.platform == "win32" and self.location.exists(): + if ( + self._legacy_virtualenv + and sys.platform == "win32" + and self.location.exists() + ): self.location.rmdir() # Clone virtual environment from template. shutil.copytree(self._template.location, self.location, symlinks=True) @@ -94,35 +100,36 @@ def _create(self, clear: bool = False) -> None: self._user_site_packages = self._template.user_site_packages else: # Create a new virtual environment. - if self._venv_type == "virtualenv": - if LEGACY_VIRTUALENV: - subprocess.check_call( - [ - sys.executable, - "-m", - "virtualenv", - "--no-pip", - "--no-wheel", - "--no-setuptools", - os.fspath(self.location), - ] - ) - self._fix_legacy_virtualenv_site_module() - else: - _virtualenv.cli_run( - [ - "--no-pip", - "--no-wheel", - "--no-setuptools", - os.fspath(self.location), - ], - ) + if self._legacy_virtualenv: + subprocess.check_call( + [ + sys.executable, + "-m", + "virtualenv", + "--no-pip", + "--no-wheel", + "--no-setuptools", + os.fspath(self.location), + ] + ) + self._fix_legacy_virtualenv_site_module() + elif self._venv_type == "virtualenv": + _virtualenv.cli_run( + [ + "--no-pip", + "--no-wheel", + "--no-setuptools", + os.fspath(self.location), + ], + ) elif self._venv_type == "venv": builder = _venv.EnvBuilder() context = builder.ensure_directories(self.location) builder.create_configuration(context) builder.setup_python(context) self.site.mkdir(parents=True, exist_ok=True) + else: + raise RuntimeError(f"Unsupported venv type {self._venv_type!r}") self.sitecustomize = self._sitecustomize self.user_site_packages = self._user_site_packages @@ -161,7 +168,9 @@ def _fix_legacy_virtualenv_site_module(self) -> None: assert compileall.compile_file(str(site_py), quiet=1, force=True) def _customize_site(self) -> None: - if not LEGACY_VIRTUALENV or self._venv_type == "venv": + if self._legacy_virtualenv: + contents = "" + else: # Enable user site (before system). contents = textwrap.dedent( f""" @@ -186,8 +195,6 @@ def _customize_site(self) -> None: site.addsitedir(path) """ ).strip() - else: - contents = "" if self._sitecustomize is not None: contents += "\n" + self._sitecustomize sitecustomize = self.site / "sitecustomize.py" @@ -234,14 +241,14 @@ def user_site_packages(self) -> bool: @user_site_packages.setter def user_site_packages(self, value: bool) -> None: self._user_site_packages = value - if not LEGACY_VIRTUALENV or self._venv_type == "venv": - self._rewrite_pyvenv_cfg( - {"include-system-site-packages": str(bool(value)).lower()} - ) - self._customize_site() - else: + if self._legacy_virtualenv: marker = self.lib / "no-global-site-packages.txt" if self._user_site_packages: marker.unlink() else: marker.touch() + else: + self._rewrite_pyvenv_cfg( + {"include-system-site-packages": str(bool(value)).lower()} + ) + self._customize_site() From 2c195f9c2ccf3a040cad185f5cb4f63795501f8e Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 26 Oct 2022 16:24:19 +0800 Subject: [PATCH 155/730] Fix multi-digit version in entry point replacement Previously, the special case to generate 'pip' and 'easy_install' entry points with the correct Python version (e.g. 'pip3.9' on Python 3.9) only accounted for single-digit version segments, and did not work correctly on Python 3.10 and up. This was missed when Python 3.10 was released because we (accidentally) generated wheels that did not need any such replacements, but was exposed in CPython 3.11.0 since it bundled pip 22.3 generated against Python 3.10. --- news/11547.bugfix.rst | 3 ++ src/pip/_internal/operations/install/wheel.py | 4 +-- tests/unit/test_wheel.py | 35 ++++++++++++++++++- 3 files changed, 39 insertions(+), 3 deletions(-) create mode 100644 news/11547.bugfix.rst diff --git a/news/11547.bugfix.rst b/news/11547.bugfix.rst new file mode 100644 index 00000000000..05aa2d3176a --- /dev/null +++ b/news/11547.bugfix.rst @@ -0,0 +1,3 @@ +Fix entry point generation of ``pip.X``, ``pipX.Y``, and ``easy_install-X.Y`` +to correctly account for multi-digit Python version segments (e.g. the "11" +part 3.11). diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py index 1650d59a374..c79941398a2 100644 --- a/src/pip/_internal/operations/install/wheel.py +++ b/src/pip/_internal/operations/install/wheel.py @@ -325,7 +325,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]: scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r"pip(\d(\.\d)?)?$", k)] + pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)] for k in pip_ep: del console[k] easy_install_script = console.pop("easy_install", None) @@ -340,7 +340,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]: ) # Delete any other versioned easy_install entry points easy_install_ep = [ - k for k in console if re.match(r"easy_install(-\d\.\d)?$", k) + k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k) ] for k in easy_install_ep: del console[k] diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index 6aec64702d2..d93d458d146 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -3,6 +3,7 @@ import logging import os import pathlib +import sys import textwrap from email import message_from_string from pathlib import Path @@ -22,7 +23,11 @@ from pip._internal.models.scheme import Scheme from pip._internal.operations.build.wheel_legacy import get_legacy_build_wheel_path from pip._internal.operations.install import wheel -from pip._internal.operations.install.wheel import InstalledCSVRow, RecordPath +from pip._internal.operations.install.wheel import ( + InstalledCSVRow, + RecordPath, + get_console_script_specs, +) from pip._internal.utils.compat import WINDOWS from pip._internal.utils.misc import hash_file from pip._internal.utils.unpacking import unpack_file @@ -681,3 +686,31 @@ def test_rehash(self, tmpdir: Path) -> None: h, length = wheel.rehash(os.fspath(self.test_file)) assert length == str(self.test_file_len) assert h == self.test_file_hash_encoded + + +def test_get_console_script_specs_replaces_python_version( + monkeypatch: pytest.MonkeyPatch, +) -> None: + # Fake Python version. + monkeypatch.setattr(sys, "version_info", (10, 11)) + + entry_points = { + "pip": "real_pip", + "pip99": "whatever", + "pip99.88": "whatever", + "easy_install": "real_easy_install", + "easy_install-99.88": "whatever", + # The followings shouldn't be replaced. + "not_pip_or_easy_install-99": "whatever", + "not_pip_or_easy_install-99.88": "whatever", + } + specs = get_console_script_specs(entry_points) + assert specs == [ + "pip = real_pip", + "pip10 = real_pip", + "pip10.11 = real_pip", + "easy_install = real_easy_install", + "easy_install-10.11 = real_easy_install", + "not_pip_or_easy_install-99 = whatever", + "not_pip_or_easy_install-99.88 = whatever", + ] From 8f34d86a1e787caeba0805669a73810cd6d8b448 Mon Sep 17 00:00:00 2001 From: Holly Stotelmyer Date: Thu, 27 Oct 2022 16:28:21 -0500 Subject: [PATCH 156/730] Add section clarifying pip version support (#11556) Co-authored-by: Paul Moore --- docs/html/development/release-process.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/html/development/release-process.rst b/docs/html/development/release-process.rst index 188d3e87bec..acff1204e7b 100644 --- a/docs/html/development/release-process.rst +++ b/docs/html/development/release-process.rst @@ -65,6 +65,13 @@ their merits. ``pip._internal.utils.deprecation.deprecated``. The function is not a part of pip's public API. +Supported Versions +================== + +The latest version of the pip is the only supported version, previous +versions should be considered unsupported. Users are encouraged to make +regular updates to their version of pip in order to remain supported. + .. _`Python 2 Support`: Python 2 Support From d6e333fb636424d7dca15f4e8aa61cdaab9cdd31 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sat, 29 Oct 2022 00:37:04 +0800 Subject: [PATCH 157/730] Add 3.11 to CI (#11550) --- .github/workflows/ci.yml | 82 +++++++--------------- .github/workflows/news-file.yml | 2 +- noxfile.py | 2 +- tests/functional/test_install_user.py | 33 +++++---- tests/functional/test_new_resolver_user.py | 33 --------- 5 files changed, 50 insertions(+), 102 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c9cfb7b8e98..7b48d944329 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,8 +26,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.x" - run: pip install nox - run: nox -s docs @@ -60,8 +62,10 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.x" - name: Set up git credentials run: | git config --global user.email "pypa-dev@googlegroups.com" @@ -82,8 +86,10 @@ jobs: github.event_name != 'pull_request' steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.x" - run: pip install nox - run: nox -s vendoring @@ -103,14 +109,15 @@ jobs: matrix: os: [Ubuntu, MacOS] python: - - 3.7 - - 3.8 - - 3.9 + - "3.7" + - "3.8" + - "3.9" - "3.10" + - "3.11" steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} @@ -151,16 +158,17 @@ jobs: matrix: os: [Windows] python: - - 3.7 + - "3.7" # Commented out, since Windows tests are expensively slow. - # - 3.8 - # - 3.9 - - "3.10" + # - "3.8" + # - "3.9" + # - "3.10" + - "3.11" group: [1, 2] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} @@ -221,8 +229,8 @@ jobs: github.event_name != 'pull_request' steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 with: python-version: "3.10" @@ -240,41 +248,6 @@ jobs: --durations=5 --use-zipapp - # TODO: Remove this when we add Python 3.11 to CI. - tests-importlib-metadata: - name: tests for importlib.metadata backend - runs-on: ubuntu-latest - env: - _PIP_USE_IMPORTLIB_METADATA: 'true' - - needs: [packaging, determine-changes] - if: >- - needs.determine-changes.outputs.tests == 'true' || - github.event_name != 'pull_request' - - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 - with: - python-version: '3.10' - - - name: Install Ubuntu dependencies - run: sudo apt-get install bzr - - - run: pip install nox - - - name: Run unit tests - run: >- - nox -s test-3.10 -- - -m unit - --verbose --numprocesses auto --showlocals - - name: Run integration tests - run: >- - nox -s test-3.10 -- - -m integration - --verbose --numprocesses auto --showlocals - --durations=5 - check: # This job does nothing and is only used for the branch protection if: always() @@ -285,7 +258,6 @@ jobs: - tests-unix - tests-windows - tests-zipapp - - tests-importlib-metadata - vendoring runs-on: ubuntu-latest diff --git a/.github/workflows/news-file.yml b/.github/workflows/news-file.yml index da7119a5573..371e12fd755 100644 --- a/.github/workflows/news-file.yml +++ b/.github/workflows/news-file.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: # `towncrier check` runs `git diff --name-only origin/main...`, which # needs a non-shallow clone. diff --git a/noxfile.py b/noxfile.py index 7692bf4b507..1345c417d69 100644 --- a/noxfile.py +++ b/noxfile.py @@ -66,7 +66,7 @@ def should_update_common_wheels() -> bool: # ----------------------------------------------------------------------------- # Development Commands # ----------------------------------------------------------------------------- -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "pypy3"]) +@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "pypy3"]) def test(session: nox.Session) -> None: # Get the common wheels. if should_update_common_wheels(): diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index c960d0de4f9..3207f0a45bf 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -287,7 +287,6 @@ def test_install_user_conflict_in_globalsite_and_usersite( assert isdir(dist_info_folder) assert isdir(initools_folder) - @pytest.mark.network def test_install_user_in_global_virtualenv_with_conflict_fails( self, script: PipTestEnvironment ) -> None: @@ -295,27 +294,37 @@ def test_install_user_in_global_virtualenv_with_conflict_fails( Test user install in --system-site-packages virtualenv with conflict in site fails. """ + create_basic_wheel_for_package(script, "pkg", "0.1") + create_basic_wheel_for_package(script, "pkg", "0.2") - script.pip("install", "INITools==0.2") + script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + "pkg==0.2", + ) result2 = script.pip( "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, "--user", - "INITools==0.1", + "pkg==0.1", expect_error=True, ) resultp = script.run( "python", "-c", - "import pkg_resources; print(pkg_resources.get_distribution" - "('initools').location)", + "from pip._internal.metadata import get_default_environment; " + "print(get_default_environment().get_distribution('pkg').location)", ) dist_location = resultp.stdout.strip() + assert ( - "Will not install to the user site because it will lack sys.path " - "precedence to {name} in {location}".format( - name="INITools", - location=dist_location, - ) - in result2.stderr - ) + f"Will not install to the user site because it will lack sys.path " + f"precedence to pkg in {dist_location}" + ) in result2.stderr diff --git a/tests/functional/test_new_resolver_user.py b/tests/functional/test_new_resolver_user.py index 4578c311468..4cd06311348 100644 --- a/tests/functional/test_new_resolver_user.py +++ b/tests/functional/test_new_resolver_user.py @@ -91,39 +91,6 @@ def test_new_resolver_install_user_conflict_in_user_site( result.did_not_create(base_2_dist_info) -@pytest.mark.usefixtures("enable_user_site") -def test_new_resolver_install_user_in_virtualenv_with_conflict_fails( - script: PipTestEnvironment, -) -> None: - create_basic_wheel_for_package(script, "base", "1.0.0") - create_basic_wheel_for_package(script, "base", "2.0.0") - - script.pip( - "install", - "--no-cache-dir", - "--no-index", - "--find-links", - script.scratch_path, - "base==2.0.0", - ) - result = script.pip( - "install", - "--no-cache-dir", - "--no-index", - "--find-links", - script.scratch_path, - "--user", - "base==1.0.0", - expect_error=True, - ) - - error_message = ( - "Will not install to the user site because it will lack sys.path " - "precedence to base in {}" - ).format(os.path.normcase(script.site_packages_path)) - assert error_message in result.stderr - - @pytest.fixture() def patch_dist_in_site_packages(virtualenv: VirtualEnvironment) -> None: # Since the tests are run from a virtualenv, and to avoid the "Will not From bfaebd96b985fc39113fa489d580910b78261291 Mon Sep 17 00:00:00 2001 From: Oliver Freund Date: Fri, 28 Oct 2022 22:59:12 -0500 Subject: [PATCH 158/730] changed the description of --install_options, since the option is now deprecated --- news/10265.bugfix.rst | 1 + src/pip/_internal/cli/cmdoptions.py | 7 ++----- 2 files changed, 3 insertions(+), 5 deletions(-) create mode 100644 news/10265.bugfix.rst diff --git a/news/10265.bugfix.rst b/news/10265.bugfix.rst new file mode 100644 index 00000000000..477eb9753fb --- /dev/null +++ b/news/10265.bugfix.rst @@ -0,0 +1 @@ +Fixed the description of the option "--install-options" in the documentation diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index b4e2560dea2..d62954cca7d 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -825,11 +825,8 @@ def _handle_config_settings( dest="install_options", action="append", metavar="options", - help="Extra arguments to be supplied to the setup.py install " - 'command (use like --install-option="--install-scripts=/usr/local/' - 'bin"). Use multiple --install-option options to pass multiple ' - "options to setup.py install. If you are using an option with a " - "directory path, be sure to use absolute path.", + help="This option is deprecated. Using this option with location-changing options may cause unexpected behavior. " + "Use pip-level options like --user, --prefix, --root, and --target", ) build_options: Callable[..., Option] = partial( From 1304e3e8d4c9ba2acfa06a37ecbfbd2eab2b0554 Mon Sep 17 00:00:00 2001 From: Oliver Freund Date: Sat, 29 Oct 2022 17:21:51 -0500 Subject: [PATCH 159/730] Fixed lint errors (line too long) --- src/pip/_internal/cli/cmdoptions.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index d62954cca7d..661c489c73e 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -825,8 +825,9 @@ def _handle_config_settings( dest="install_options", action="append", metavar="options", - help="This option is deprecated. Using this option with location-changing options may cause unexpected behavior. " - "Use pip-level options like --user, --prefix, --root, and --target", + help="This option is deprecated. Using this option with location-changing " + "options may cause unexpected behavior. " + "Use pip-level options like --user, --prefix, --root, and --target.", ) build_options: Callable[..., Option] = partial( From 30b4cff198b1dbf004f5ac069d50965c897803dc Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 3 Nov 2022 15:20:47 +0800 Subject: [PATCH 160/730] Fix typos Co-authored-by: Ed Morley <501702+edmorley@users.noreply.github.com> --- news/11547.bugfix.rst | 2 +- tests/unit/test_wheel.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/news/11547.bugfix.rst b/news/11547.bugfix.rst index 05aa2d3176a..29d566a23ff 100644 --- a/news/11547.bugfix.rst +++ b/news/11547.bugfix.rst @@ -1,3 +1,3 @@ Fix entry point generation of ``pip.X``, ``pipX.Y``, and ``easy_install-X.Y`` to correctly account for multi-digit Python version segments (e.g. the "11" -part 3.11). +part of 3.11). diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index d93d458d146..c5a8f3be4f3 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -700,7 +700,7 @@ def test_get_console_script_specs_replaces_python_version( "pip99.88": "whatever", "easy_install": "real_easy_install", "easy_install-99.88": "whatever", - # The followings shouldn't be replaced. + # The following shouldn't be replaced. "not_pip_or_easy_install-99": "whatever", "not_pip_or_easy_install-99.88": "whatever", } From 22fd64ac0b68782acb308e2484b553a2ecadff78 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 3 Nov 2022 11:50:58 +0000 Subject: [PATCH 161/730] Merge pull request #11547 from uranusjr/entry-point-python-version-replacement-multi-digit Fix multi-digit version in entry point replacement --- news/11547.bugfix.rst | 3 ++ src/pip/_internal/operations/install/wheel.py | 4 +-- tests/unit/test_wheel.py | 35 ++++++++++++++++++- 3 files changed, 39 insertions(+), 3 deletions(-) create mode 100644 news/11547.bugfix.rst diff --git a/news/11547.bugfix.rst b/news/11547.bugfix.rst new file mode 100644 index 00000000000..29d566a23ff --- /dev/null +++ b/news/11547.bugfix.rst @@ -0,0 +1,3 @@ +Fix entry point generation of ``pip.X``, ``pipX.Y``, and ``easy_install-X.Y`` +to correctly account for multi-digit Python version segments (e.g. the "11" +part of 3.11). diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py index 1650d59a374..c79941398a2 100644 --- a/src/pip/_internal/operations/install/wheel.py +++ b/src/pip/_internal/operations/install/wheel.py @@ -325,7 +325,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]: scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}") # Delete any other versioned pip entry points - pip_ep = [k for k in console if re.match(r"pip(\d(\.\d)?)?$", k)] + pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)] for k in pip_ep: del console[k] easy_install_script = console.pop("easy_install", None) @@ -340,7 +340,7 @@ def get_console_script_specs(console: Dict[str, str]) -> List[str]: ) # Delete any other versioned easy_install entry points easy_install_ep = [ - k for k in console if re.match(r"easy_install(-\d\.\d)?$", k) + k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k) ] for k in easy_install_ep: del console[k] diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index 6aec64702d2..c5a8f3be4f3 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -3,6 +3,7 @@ import logging import os import pathlib +import sys import textwrap from email import message_from_string from pathlib import Path @@ -22,7 +23,11 @@ from pip._internal.models.scheme import Scheme from pip._internal.operations.build.wheel_legacy import get_legacy_build_wheel_path from pip._internal.operations.install import wheel -from pip._internal.operations.install.wheel import InstalledCSVRow, RecordPath +from pip._internal.operations.install.wheel import ( + InstalledCSVRow, + RecordPath, + get_console_script_specs, +) from pip._internal.utils.compat import WINDOWS from pip._internal.utils.misc import hash_file from pip._internal.utils.unpacking import unpack_file @@ -681,3 +686,31 @@ def test_rehash(self, tmpdir: Path) -> None: h, length = wheel.rehash(os.fspath(self.test_file)) assert length == str(self.test_file_len) assert h == self.test_file_hash_encoded + + +def test_get_console_script_specs_replaces_python_version( + monkeypatch: pytest.MonkeyPatch, +) -> None: + # Fake Python version. + monkeypatch.setattr(sys, "version_info", (10, 11)) + + entry_points = { + "pip": "real_pip", + "pip99": "whatever", + "pip99.88": "whatever", + "easy_install": "real_easy_install", + "easy_install-99.88": "whatever", + # The following shouldn't be replaced. + "not_pip_or_easy_install-99": "whatever", + "not_pip_or_easy_install-99.88": "whatever", + } + specs = get_console_script_specs(entry_points) + assert specs == [ + "pip = real_pip", + "pip10 = real_pip", + "pip10.11 = real_pip", + "easy_install = real_easy_install", + "easy_install-10.11 = real_easy_install", + "not_pip_or_easy_install-99 = whatever", + "not_pip_or_easy_install-99.88 = whatever", + ] From 1463081f10de6bfad81afe0d68272e7c3bedbadf Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 5 Nov 2022 15:25:43 +0000 Subject: [PATCH 162/730] Bump for release --- NEWS.rst | 11 +++++++++++ news/11547.bugfix.rst | 3 --- src/pip/__init__.py | 2 +- 3 files changed, 12 insertions(+), 4 deletions(-) delete mode 100644 news/11547.bugfix.rst diff --git a/NEWS.rst b/NEWS.rst index ec16a384eb2..ff89e5cdf54 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,17 @@ .. towncrier release notes start +22.3.1 (2022-11-05) +=================== + +Bug Fixes +--------- + +- Fix entry point generation of ``pip.X``, ``pipX.Y``, and ``easy_install-X.Y`` + to correctly account for multi-digit Python version segments (e.g. the "11" + part of 3.11). (`#11547 `_) + + 22.3 (2022-10-15) ================= diff --git a/news/11547.bugfix.rst b/news/11547.bugfix.rst deleted file mode 100644 index 29d566a23ff..00000000000 --- a/news/11547.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix entry point generation of ``pip.X``, ``pipX.Y``, and ``easy_install-X.Y`` -to correctly account for multi-digit Python version segments (e.g. the "11" -part of 3.11). diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 168abe86186..5563b5d55c7 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "22.3" +__version__ = "22.3.1" def main(args: Optional[List[str]] = None) -> int: From 9aabb33b04b83f7c0ab289744017ab713d350508 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 5 Nov 2022 15:25:43 +0000 Subject: [PATCH 163/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 5563b5d55c7..adc0eb10d00 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "22.3.1" +__version__ = "23.0.dev0" def main(args: Optional[List[str]] = None) -> int: From ce3fef7d98baaa29938efb8a380563f80eb65370 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 5 Nov 2022 15:41:33 +0000 Subject: [PATCH 164/730] Delete news fragments from release 22.3.1 --- news/7d576457-c1fe-4d64-88a4-f775a2f6995d.trivial.rst | 0 news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst | 5 ----- 2 files changed, 5 deletions(-) delete mode 100644 news/7d576457-c1fe-4d64-88a4-f775a2f6995d.trivial.rst delete mode 100644 news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst diff --git a/news/7d576457-c1fe-4d64-88a4-f775a2f6995d.trivial.rst b/news/7d576457-c1fe-4d64-88a4-f775a2f6995d.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst b/news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst deleted file mode 100644 index 720c98885e6..00000000000 --- a/news/B5BABEE8-4FFA-4D62-87AB-FE7450141ECF.trivial.rst +++ /dev/null @@ -1,5 +0,0 @@ -Git 2.38.1 patched CVE-2022-39253 by disaling automated fetch against a -``file:`` repository. This breaks git submodule, which is used by a pip test. -Information on how projects relying on automated fetch should configure git -correctly after this change is lacking, so the test is disabled for now until -someone can come up with a better solution. From b87ddb95e8748fbc41927f592887526fca857b89 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 19:32:36 +0000 Subject: [PATCH 165/730] Add an interface to allow calling system `keyring` --- src/pip/_internal/network/auth.py | 64 +++++++++++++++++++++++++++++-- 1 file changed, 60 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index ca42798bd95..5107c7c1317 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -4,8 +4,10 @@ providing credentials in the context of network requests. """ +import shutil +import subprocess import urllib.parse -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Dict, List, NamedTuple, Optional, Tuple from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth from pip._vendor.requests.models import Request, Response @@ -23,11 +25,61 @@ logger = getLogger(__name__) -Credentials = Tuple[str, str, str] + +class Credentials(NamedTuple): + service_name: str + username: str + password: str + + +class KeyRingCredential(NamedTuple): + username: str + password: str + + +class KeyRingCli: + """Mirror the parts of keyring's API which pip uses + + Instead of calling the keyring package installed alongside pip + we call keyring on the command line which will enable pip to + use which ever installation of keyring is available first in + PATH. + """ + + @staticmethod + def _quote(string: Optional[str]) -> str: + return f"'{string}'" + + def get_credential( + self, service_name: str, username: Optional[str] + ) -> Optional[KeyRingCredential]: + cmd = ["keyring", "get", self._quote(service_name), self._quote(username)] + res = subprocess.run(cmd) + if res.returncode: + return None + return KeyRingCredential(username=username, password=res.stdout) + + def set_password(self, service_name: str, username: str, password: str) -> None: + cmd = [ + "echo", + self._quote(password), + "|", + "keyring", + "set", + self._quote(service_name), + self._quote(username), + ] + res = subprocess.run(cmd) + if res.returncode: + raise RuntimeError(res.stderr) + return None + try: import keyring except ImportError: + if shutil.which("keyring") is not None: + keyring = KeyRingCli() keyring = None # type: ignore[assignment] except Exception as exc: logger.warning( @@ -276,7 +328,11 @@ def handle_401(self, resp: Response, **kwargs: Any) -> Response: # Prompt to save the password to keyring if save and self._should_save_password_to_keyring(): - self._credentials_to_save = (parsed.netloc, username, password) + self._credentials_to_save = Credentials( + service_name=parsed.netloc, + username=username, + password=password, + ) # Consume content and release the original connection to allow our new # request to reuse the same one. @@ -318,6 +374,6 @@ def save_credentials(self, resp: Response, **kwargs: Any) -> None: if creds and resp.status_code < 400: try: logger.info("Saving credentials to keyring") - keyring.set_password(*creds) + keyring.set_password(creds.service_name, creds.username, creds.password) except Exception: logger.exception("Failed to save credentials") From edc588c48f4c87c6d5ee1398698a08ebb3316a7b Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 19:40:59 +0000 Subject: [PATCH 166/730] Add news --- news/11589.feature.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 news/11589.feature.rst diff --git a/news/11589.feature.rst b/news/11589.feature.rst new file mode 100644 index 00000000000..d01a564b631 --- /dev/null +++ b/news/11589.feature.rst @@ -0,0 +1,2 @@ +Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring`` +installed using ``pipx`` to be used by ``pip``. From 4cbae5b1a016924fb62e0b2c7b812430fc62edf0 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 20:28:59 +0000 Subject: [PATCH 167/730] Improve cli interface --- src/pip/_internal/network/auth.py | 35 ++++++++++++------------------- 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 5107c7c1317..0164a46170b 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -33,7 +33,7 @@ class Credentials(NamedTuple): class KeyRingCredential(NamedTuple): - username: str + username: Optional[str] password: str @@ -46,30 +46,21 @@ class KeyRingCli: PATH. """ - @staticmethod - def _quote(string: Optional[str]) -> str: - return f"'{string}'" - + @classmethod def get_credential( - self, service_name: str, username: Optional[str] + cls, service_name: str, username: Optional[str] ) -> Optional[KeyRingCredential]: - cmd = ["keyring", "get", self._quote(service_name), self._quote(username)] - res = subprocess.run(cmd) + cmd = ["keyring", "get", service_name, str(username)] + res = subprocess.run(cmd, capture_output=True) if res.returncode: return None - return KeyRingCredential(username=username, password=res.stdout) - - def set_password(self, service_name: str, username: str, password: str) -> None: - cmd = [ - "echo", - self._quote(password), - "|", - "keyring", - "set", - self._quote(service_name), - self._quote(username), - ] - res = subprocess.run(cmd) + password = res.stdout.decode().strip("\n") + return KeyRingCredential(username=username, password=password) + + @classmethod + def set_password(cls, service_name: str, username: str, password: str) -> None: + cmd = ["keyring", "set", service_name, username] + res = subprocess.run(cmd, input=password.encode() + b"\n", capture_output=True) if res.returncode: raise RuntimeError(res.stderr) return None @@ -79,7 +70,7 @@ def set_password(self, service_name: str, username: str, password: str) -> None: import keyring except ImportError: if shutil.which("keyring") is not None: - keyring = KeyRingCli() + keyring = KeyRingCli # type: ignore[assignment] keyring = None # type: ignore[assignment] except Exception as exc: logger.warning( From efa7f2bf7d8672af0de11cd1104e933164e3eabb Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 20:38:10 +0000 Subject: [PATCH 168/730] Raise better exception --- src/pip/_internal/network/auth.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 0164a46170b..ed8c54cf03e 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -61,8 +61,7 @@ def get_credential( def set_password(cls, service_name: str, username: str, password: str) -> None: cmd = ["keyring", "set", service_name, username] res = subprocess.run(cmd, input=password.encode() + b"\n", capture_output=True) - if res.returncode: - raise RuntimeError(res.stderr) + res.check_returncode() return None From 7e9310245dbc1c36a7babf32a8df51ced4e3f947 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 20:39:06 +0000 Subject: [PATCH 169/730] Don't capture output --- src/pip/_internal/network/auth.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index ed8c54cf03e..358ef11b232 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -60,7 +60,8 @@ def get_credential( @classmethod def set_password(cls, service_name: str, username: str, password: str) -> None: cmd = ["keyring", "set", service_name, username] - res = subprocess.run(cmd, input=password.encode() + b"\n", capture_output=True) + input_ = password.encode() + b"\n" + res = subprocess.run(cmd, input=input_) res.check_returncode() return None From 6ec0af5258d56618dd7cc9ced077d9d12bfb8687 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 22:31:37 +0000 Subject: [PATCH 170/730] Handle IO encoding --- src/pip/_internal/network/auth.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 358ef11b232..bae2ce6f563 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -51,17 +51,19 @@ def get_credential( cls, service_name: str, username: Optional[str] ) -> Optional[KeyRingCredential]: cmd = ["keyring", "get", service_name, str(username)] - res = subprocess.run(cmd, capture_output=True) + res = subprocess.run( + cmd, capture_output=True, env=dict(PYTHONIOENCODING="utf-8") + ) if res.returncode: return None - password = res.stdout.decode().strip("\n") + password = res.stdout.decode("utf-8").strip("\n") return KeyRingCredential(username=username, password=password) @classmethod def set_password(cls, service_name: str, username: str, password: str) -> None: cmd = ["keyring", "set", service_name, username] - input_ = password.encode() + b"\n" - res = subprocess.run(cmd, input=input_) + input_ = password.encode("utf-8") + b"\n" + res = subprocess.run(cmd, input=input_, env=dict(PYTHONIOENCODING="utf-8")) res.check_returncode() return None From f5c96b14a0c9e9c274f6555a50f93fd2e5e35a35 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 22:43:13 +0000 Subject: [PATCH 171/730] Switch to defining `get_password` --- src/pip/_internal/network/auth.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index bae2ce6f563..0993fc112a4 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -32,11 +32,6 @@ class Credentials(NamedTuple): password: str -class KeyRingCredential(NamedTuple): - username: Optional[str] - password: str - - class KeyRingCli: """Mirror the parts of keyring's API which pip uses @@ -47,17 +42,14 @@ class KeyRingCli: """ @classmethod - def get_credential( - cls, service_name: str, username: Optional[str] - ) -> Optional[KeyRingCredential]: - cmd = ["keyring", "get", service_name, str(username)] + def get_password(cls, service_name: str, username: str) -> Optional[str]: + cmd = ["keyring", "get", service_name, username] res = subprocess.run( cmd, capture_output=True, env=dict(PYTHONIOENCODING="utf-8") ) if res.returncode: return None - password = res.stdout.decode("utf-8").strip("\n") - return KeyRingCredential(username=username, password=password) + return res.stdout.decode("utf-8").strip("\n") @classmethod def set_password(cls, service_name: str, username: str, password: str) -> None: From 43abcf01b155a86276727a0a5900f54dc9e74ae6 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 22:51:24 +0000 Subject: [PATCH 172/730] Set `keyring` correctly --- src/pip/_internal/network/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 0993fc112a4..6f011e1aa65 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -63,9 +63,9 @@ def set_password(cls, service_name: str, username: str, password: str) -> None: try: import keyring except ImportError: + keyring = None # type: ignore[assignment] if shutil.which("keyring") is not None: keyring = KeyRingCli # type: ignore[assignment] - keyring = None # type: ignore[assignment] except Exception as exc: logger.warning( "Keyring is skipped due to an exception: %s", From 5137ce26b6580ab4186519549c1ec6e07859b109 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 23:13:21 +0000 Subject: [PATCH 173/730] Use full `keyring` path --- src/pip/_internal/network/auth.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 6f011e1aa65..e13a4450209 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -41,9 +41,11 @@ class KeyRingCli: PATH. """ - @classmethod - def get_password(cls, service_name: str, username: str) -> Optional[str]: - cmd = ["keyring", "get", service_name, username] + def __init__(self, keyring: str) -> None: + self.keyring = keyring + + def get_password(self, service_name: str, username: str) -> Optional[str]: + cmd = [self.keyring, "get", service_name, username] res = subprocess.run( cmd, capture_output=True, env=dict(PYTHONIOENCODING="utf-8") ) @@ -51,9 +53,8 @@ def get_password(cls, service_name: str, username: str) -> Optional[str]: return None return res.stdout.decode("utf-8").strip("\n") - @classmethod - def set_password(cls, service_name: str, username: str, password: str) -> None: - cmd = ["keyring", "set", service_name, username] + def set_password(self, service_name: str, username: str, password: str) -> None: + cmd = [self.keyring, "set", service_name, username] input_ = password.encode("utf-8") + b"\n" res = subprocess.run(cmd, input=input_, env=dict(PYTHONIOENCODING="utf-8")) res.check_returncode() @@ -64,8 +65,9 @@ def set_password(cls, service_name: str, username: str, password: str) -> None: import keyring except ImportError: keyring = None # type: ignore[assignment] - if shutil.which("keyring") is not None: - keyring = KeyRingCli # type: ignore[assignment] + keyring_path = shutil.which("keyring") + if keyring_path is not None: + keyring = KeyRingCli(keyring_path) # type: ignore[assignment] except Exception as exc: logger.warning( "Keyring is skipped due to an exception: %s", From 4fc2008d04cb3600c7eb9d603aa6a34f4cf4ddba Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Wed, 9 Nov 2022 23:32:38 +0000 Subject: [PATCH 174/730] Prevent `keyring` from ever reading from `stdin` It shouldn't need to ever so no reason to allow it and have to jiggle around the `--no-input` option in `pip`. --- src/pip/_internal/network/auth.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index e13a4450209..bb223c6a231 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -47,7 +47,10 @@ def __init__(self, keyring: str) -> None: def get_password(self, service_name: str, username: str) -> Optional[str]: cmd = [self.keyring, "get", service_name, username] res = subprocess.run( - cmd, capture_output=True, env=dict(PYTHONIOENCODING="utf-8") + cmd, + stdin=subprocess.DEVNULL, + capture_output=True, + env=dict(PYTHONIOENCODING="utf-8"), ) if res.returncode: return None From 888c3b6c543a59559fd25cf8c95f6db1c2365819 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 10:31:38 +0000 Subject: [PATCH 175/730] Abstract provider interface to `keyring` --- src/pip/_internal/network/auth.py | 181 +++++++++++++++++++++--------- 1 file changed, 126 insertions(+), 55 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index bb223c6a231..72e300bd138 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -4,10 +4,12 @@ providing credentials in the context of network requests. """ +import os import shutil import subprocess import urllib.parse -from typing import Any, Dict, List, NamedTuple, Optional, Tuple +from abc import ABC, abstractmethod +from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Type from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth from pip._vendor.requests.models import Request, Response @@ -27,13 +29,71 @@ class Credentials(NamedTuple): - service_name: str + url: str username: str password: str -class KeyRingCli: - """Mirror the parts of keyring's API which pip uses +class KeyRingBaseProvider(ABC): + """Keyring base provider interface""" + + @classmethod + @abstractmethod + def is_available(cls) -> bool: + ... + + @classmethod + @abstractmethod + def get_auth_info(cls, url: str, username: Optional[str]) -> Optional[AuthInfo]: + ... + + @classmethod + @abstractmethod + def save_auth_info(cls, url: str, username: str, password: str) -> None: + ... + + +class KeyRingPythonProvider(KeyRingBaseProvider): + """Keyring interface which uses locally imported `keyring`""" + + try: + import keyring + except ImportError: + keyring = None # type: ignore[assignment] + + @classmethod + def is_available(cls) -> bool: + return cls.keyring is not None + + @classmethod + def get_auth_info(cls, url: str, username: Optional[str]) -> Optional[AuthInfo]: + if cls.is_available is False: + return None + + # Support keyring's get_credential interface which supports getting + # credentials without a username. This is only available for + # keyring>=15.2.0. + if hasattr(cls.keyring, "get_credential"): + logger.debug("Getting credentials from keyring for %s", url) + cred = cls.keyring.get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username is not None: + logger.debug("Getting password from keyring for %s", url) + password = cls.keyring.get_password(url, username) + if password: + return username, password + return None + + @classmethod + def save_auth_info(cls, url: str, username: str, password: str) -> None: + cls.keyring.set_password(url, username, password) + + +class KeyRingCliProvider(KeyRingBaseProvider): + """Provider which uses `keyring` cli Instead of calling the keyring package installed alongside pip we call keyring on the command line which will enable pip to @@ -41,75 +101,85 @@ class KeyRingCli: PATH. """ - def __init__(self, keyring: str) -> None: - self.keyring = keyring + keyring = shutil.which("keyring") + + @classmethod + def is_available(cls) -> bool: + return cls.keyring is not None - def get_password(self, service_name: str, username: str) -> Optional[str]: - cmd = [self.keyring, "get", service_name, username] + @classmethod + def get_auth_info(cls, url: str, username: Optional[str]) -> Optional[AuthInfo]: + if cls.is_available is False: + return None + + # This is the default implementation of keyring.get_credential + # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139 + if username is not None: + password = cls._get_password(url, username) + if password is not None: + return username, password + return None + + @classmethod + def save_auth_info(cls, url: str, username: str, password: str) -> None: + if not cls.is_available: + raise RuntimeError("keyring is not available") + return cls._set_password(url, username, password) + + @classmethod + def _get_password(cls, service_name: str, username: str) -> Optional[str]: + """Mirror the implemenation of keyring.get_password using cli""" + if cls.keyring is None: + return None + + cmd = [cls.keyring, "get", service_name, username] + env = os.environ + env["PYTHONIOENCODING"] = "utf-8" res = subprocess.run( cmd, stdin=subprocess.DEVNULL, capture_output=True, - env=dict(PYTHONIOENCODING="utf-8"), + env=env, ) if res.returncode: return None return res.stdout.decode("utf-8").strip("\n") - def set_password(self, service_name: str, username: str, password: str) -> None: - cmd = [self.keyring, "set", service_name, username] + @classmethod + def _set_password(cls, service_name: str, username: str, password: str) -> None: + """Mirror the implemenation of keyring.set_password using cli""" + if cls.keyring is None: + return None + + cmd = [cls.keyring, "set", service_name, username] input_ = password.encode("utf-8") + b"\n" - res = subprocess.run(cmd, input=input_, env=dict(PYTHONIOENCODING="utf-8")) + env = os.environ + env["PYTHONIOENCODING"] = "utf-8" + res = subprocess.run(cmd, input=input_, env=env) res.check_returncode() return None -try: - import keyring -except ImportError: - keyring = None # type: ignore[assignment] - keyring_path = shutil.which("keyring") - if keyring_path is not None: - keyring = KeyRingCli(keyring_path) # type: ignore[assignment] -except Exception as exc: - logger.warning( - "Keyring is skipped due to an exception: %s", - str(exc), - ) - keyring = None # type: ignore[assignment] +def get_keyring_provider() -> Optional[Type[KeyRingBaseProvider]]: + if KeyRingPythonProvider.is_available(): + return KeyRingPythonProvider + if KeyRingCliProvider.is_available(): + return KeyRingCliProvider + return None def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]: """Return the tuple auth for a given url from keyring.""" - global keyring - if not url or not keyring: + # Do nothing if no url was provided + if not url: return None - try: - try: - get_credential = keyring.get_credential - except AttributeError: - pass - else: - logger.debug("Getting credentials from keyring for %s", url) - cred = get_credential(url, username) - if cred is not None: - return cred.username, cred.password - return None - - if username: - logger.debug("Getting password from keyring for %s", url) - password = keyring.get_password(url, username) - if password: - return username, password + keyring = get_keyring_provider() + # Do nothin if keyring is not available + if keyring is None: + return None - except Exception as exc: - logger.warning( - "Keyring is skipped due to an exception: %s", - str(exc), - ) - keyring = None # type: ignore[assignment] - return None + return keyring.get_auth_info(url, username) class MultiDomainBasicAuth(AuthBase): @@ -283,7 +353,7 @@ def _prompt_for_password( # Factored out to allow for easy patching in tests def _should_save_password_to_keyring(self) -> bool: - if not keyring: + if get_keyring_provider() is None: return False return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" @@ -319,7 +389,7 @@ def handle_401(self, resp: Response, **kwargs: Any) -> Response: # Prompt to save the password to keyring if save and self._should_save_password_to_keyring(): self._credentials_to_save = Credentials( - service_name=parsed.netloc, + url=parsed.netloc, username=username, password=password, ) @@ -355,15 +425,16 @@ def warn_on_401(self, resp: Response, **kwargs: Any) -> None: def save_credentials(self, resp: Response, **kwargs: Any) -> None: """Response callback to save credentials on success.""" + keyring = get_keyring_provider() assert keyring is not None, "should never reach here without keyring" if not keyring: - return + return None creds = self._credentials_to_save self._credentials_to_save = None if creds and resp.status_code < 400: try: logger.info("Saving credentials to keyring") - keyring.set_password(creds.service_name, creds.username, creds.password) + keyring.save_auth_info(creds.url, creds.username, creds.password) except Exception: logger.exception("Failed to save credentials") From 996d4fad95c2df44181dfc7eb2f811014ce82572 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 10:36:17 +0000 Subject: [PATCH 176/730] Take copy of `os.environ` rather than editing --- src/pip/_internal/network/auth.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 72e300bd138..249b3a465de 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -133,7 +133,7 @@ def _get_password(cls, service_name: str, username: str) -> Optional[str]: return None cmd = [cls.keyring, "get", service_name, username] - env = os.environ + env = os.environ.copy() env["PYTHONIOENCODING"] = "utf-8" res = subprocess.run( cmd, @@ -153,7 +153,7 @@ def _set_password(cls, service_name: str, username: str, password: str) -> None: cmd = [cls.keyring, "set", service_name, username] input_ = password.encode("utf-8") + b"\n" - env = os.environ + env = os.environ.copy() env["PYTHONIOENCODING"] = "utf-8" res = subprocess.run(cmd, input=input_, env=env) res.check_returncode() From 4f8a6137a1ec625910e40e3ea768e38d36ec923e Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 10:44:25 +0000 Subject: [PATCH 177/730] Import `keyring` lazily --- src/pip/_internal/network/auth.py | 95 +++++++++++++++---------------- 1 file changed, 47 insertions(+), 48 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 249b3a465de..bda61534705 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -4,12 +4,13 @@ providing credentials in the context of network requests. """ +import functools import os import shutil import subprocess import urllib.parse from abc import ABC, abstractmethod -from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Type +from typing import Any, Dict, List, NamedTuple, Optional, Tuple from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth from pip._vendor.requests.models import Request, Response @@ -37,59 +38,56 @@ class Credentials(NamedTuple): class KeyRingBaseProvider(ABC): """Keyring base provider interface""" - @classmethod @abstractmethod - def is_available(cls) -> bool: + def is_available(self) -> bool: ... - @classmethod @abstractmethod - def get_auth_info(cls, url: str, username: Optional[str]) -> Optional[AuthInfo]: + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: ... - @classmethod @abstractmethod - def save_auth_info(cls, url: str, username: str, password: str) -> None: + def save_auth_info(self, url: str, username: str, password: str) -> None: ... class KeyRingPythonProvider(KeyRingBaseProvider): """Keyring interface which uses locally imported `keyring`""" - try: - import keyring - except ImportError: - keyring = None # type: ignore[assignment] + def __init__(self) -> None: + try: + import keyring + except ImportError: + keyring = None # type: ignore[assignment] - @classmethod - def is_available(cls) -> bool: - return cls.keyring is not None + self.keyring = keyring - @classmethod - def get_auth_info(cls, url: str, username: Optional[str]) -> Optional[AuthInfo]: - if cls.is_available is False: + def is_available(self) -> bool: + return self.keyring is not None + + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: + if self.is_available is False: return None # Support keyring's get_credential interface which supports getting # credentials without a username. This is only available for # keyring>=15.2.0. - if hasattr(cls.keyring, "get_credential"): + if hasattr(self.keyring, "get_credential"): logger.debug("Getting credentials from keyring for %s", url) - cred = cls.keyring.get_credential(url, username) + cred = self.keyring.get_credential(url, username) if cred is not None: return cred.username, cred.password return None if username is not None: logger.debug("Getting password from keyring for %s", url) - password = cls.keyring.get_password(url, username) + password = self.keyring.get_password(url, username) if password: return username, password return None - @classmethod - def save_auth_info(cls, url: str, username: str, password: str) -> None: - cls.keyring.set_password(url, username, password) + def save_auth_info(self, url: str, username: str, password: str) -> None: + self.keyring.set_password(url, username, password) class KeyRingCliProvider(KeyRingBaseProvider): @@ -101,38 +99,35 @@ class KeyRingCliProvider(KeyRingBaseProvider): PATH. """ - keyring = shutil.which("keyring") + def __init__(self) -> None: + self.keyring = shutil.which("keyring") - @classmethod - def is_available(cls) -> bool: - return cls.keyring is not None + def is_available(self) -> bool: + return self.keyring is not None - @classmethod - def get_auth_info(cls, url: str, username: Optional[str]) -> Optional[AuthInfo]: - if cls.is_available is False: + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: + if self.is_available is False: return None # This is the default implementation of keyring.get_credential # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139 if username is not None: - password = cls._get_password(url, username) + password = self._get_password(url, username) if password is not None: return username, password return None - @classmethod - def save_auth_info(cls, url: str, username: str, password: str) -> None: - if not cls.is_available: + def save_auth_info(self, url: str, username: str, password: str) -> None: + if not self.is_available: raise RuntimeError("keyring is not available") - return cls._set_password(url, username, password) + return self._set_password(url, username, password) - @classmethod - def _get_password(cls, service_name: str, username: str) -> Optional[str]: + def _get_password(self, service_name: str, username: str) -> Optional[str]: """Mirror the implemenation of keyring.get_password using cli""" - if cls.keyring is None: + if self.keyring is None: return None - cmd = [cls.keyring, "get", service_name, username] + cmd = [self.keyring, "get", service_name, username] env = os.environ.copy() env["PYTHONIOENCODING"] = "utf-8" res = subprocess.run( @@ -145,13 +140,12 @@ def _get_password(cls, service_name: str, username: str) -> Optional[str]: return None return res.stdout.decode("utf-8").strip("\n") - @classmethod - def _set_password(cls, service_name: str, username: str, password: str) -> None: + def _set_password(self, service_name: str, username: str, password: str) -> None: """Mirror the implemenation of keyring.set_password using cli""" - if cls.keyring is None: + if self.keyring is None: return None - cmd = [cls.keyring, "set", service_name, username] + cmd = [self.keyring, "set", service_name, username] input_ = password.encode("utf-8") + b"\n" env = os.environ.copy() env["PYTHONIOENCODING"] = "utf-8" @@ -160,11 +154,16 @@ def _set_password(cls, service_name: str, username: str, password: str) -> None: return None -def get_keyring_provider() -> Optional[Type[KeyRingBaseProvider]]: - if KeyRingPythonProvider.is_available(): - return KeyRingPythonProvider - if KeyRingCliProvider.is_available(): - return KeyRingCliProvider +@functools.lru_cache(maxsize=1) +def get_keyring_provider() -> Optional[KeyRingBaseProvider]: + python_keyring = KeyRingPythonProvider() + if python_keyring.is_available(): + return python_keyring + + cli_keyring = KeyRingCliProvider() + if cli_keyring.is_available(): + return cli_keyring + return None From 3a15e010916f3fd40a37b4458ad9a35696241f0b Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 19:15:07 +0000 Subject: [PATCH 178/730] Get the tests passing again --- src/pip/_internal/network/auth.py | 17 ++++++++++++++--- tests/unit/test_network_auth.py | 25 +++++++++++++++++-------- 2 files changed, 31 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index bda61534705..99fd9977c75 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -28,6 +28,8 @@ logger = getLogger(__name__) +KEYRING_DISABLED = False + class Credentials(NamedTuple): url: str @@ -174,11 +176,20 @@ def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[Au return None keyring = get_keyring_provider() - # Do nothin if keyring is not available - if keyring is None: + # Do nothing if keyring is not available + global KEYRING_DISABLED + if keyring is None or KEYRING_DISABLED: return None - return keyring.get_auth_info(url, username) + try: + return keyring.get_auth_info(url, username) + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + KEYRING_DISABLED = True + return None class MultiDomainBasicAuth(AuthBase): diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 5c0e5746281..03d39c452a2 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -1,5 +1,6 @@ import functools -from typing import Any, List, Optional, Tuple +import sys +from typing import Any, Iterable, List, Optional, Tuple import pytest @@ -8,6 +9,14 @@ from tests.lib.requests_mocks import MockConnection, MockRequest, MockResponse +@pytest.fixture(scope="function", autouse=True) +def reset_keyring() -> Iterable[None]: + yield None + # Reset the state of the module between tests + pip._internal.network.auth.KEYRING_DISABLED = False + pip._internal.network.auth.get_keyring_provider.cache_clear() + + @pytest.mark.parametrize( ["input_url", "url", "username", "password"], [ @@ -138,7 +147,7 @@ def test_keyring_get_password( expect: Tuple[Optional[str], Optional[str]], ) -> None: keyring = KeyringModuleV1() - monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) + monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) actual = auth._get_new_credentials(url, allow_netrc=False, allow_keyring=True) @@ -147,7 +156,7 @@ def test_keyring_get_password( def test_keyring_get_password_after_prompt(monkeypatch: pytest.MonkeyPatch) -> None: keyring = KeyringModuleV1() - monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) + monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] auth = MultiDomainBasicAuth() def ask_input(prompt: str) -> str: @@ -163,7 +172,7 @@ def test_keyring_get_password_after_prompt_when_none( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) + monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] auth = MultiDomainBasicAuth() def ask_input(prompt: str) -> str: @@ -184,7 +193,7 @@ def test_keyring_get_password_username_in_index( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) + monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] auth = MultiDomainBasicAuth(index_urls=["http://user@example.com/path2"]) get = functools.partial( auth._get_new_credentials, allow_netrc=False, allow_keyring=True @@ -217,7 +226,7 @@ def test_keyring_set_password( expect_save: bool, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setattr("pip._internal.network.auth.keyring", keyring) + monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] auth = MultiDomainBasicAuth(prompting=True) monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) @@ -293,7 +302,7 @@ def get_credential(self, system: str, username: str) -> Optional[Credential]: def test_keyring_get_credential( monkeypatch: pytest.MonkeyPatch, url: str, expect: str ) -> None: - monkeypatch.setattr(pip._internal.network.auth, "keyring", KeyringModuleV2()) + monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) # type: ignore[misc] auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) assert ( @@ -314,7 +323,7 @@ def get_credential(self, system: str, username: str) -> None: def test_broken_keyring_disables_keyring(monkeypatch: pytest.MonkeyPatch) -> None: keyring_broken = KeyringModuleBroken() - monkeypatch.setattr(pip._internal.network.auth, "keyring", keyring_broken) + monkeypatch.setitem(sys.modules, "keyring", keyring_broken) # type: ignore[misc] auth = MultiDomainBasicAuth(index_urls=["http://example.com/"]) From 8d9ea8b62f91fb98ee13b7a370274deffdbe4956 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 20:01:52 +0000 Subject: [PATCH 179/730] Add tests for new code paths --- tests/unit/test_network_auth.py | 142 +++++++++++++++++++++++++++++++- 1 file changed, 141 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 03d39c452a2..56c17d11f02 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -1,6 +1,6 @@ import functools import sys -from typing import Any, Iterable, List, Optional, Tuple +from typing import Any, Dict, Iterable, List, Optional, Tuple import pytest @@ -334,3 +334,143 @@ def test_broken_keyring_disables_keyring(monkeypatch: pytest.MonkeyPatch) -> Non url, allow_netrc=False, allow_keyring=True ) == (None, None) assert keyring_broken._call_count == 1 + + +class KeyringSubprocessResult(KeyringModuleV1): + """Represents the subprocess call to keyring""" + + returncode = 0 # Default to zero retcode + + def __call__( + self, + cmd: List[str], + *, + env: Dict[str, str], + stdin: Optional[Any] = None, + capture_output: Optional[bool] = None, + input: Optional[bytes] = None, + ) -> Any: + if cmd[1] == "get": + assert stdin == -3 # subprocess.DEVNULL + assert capture_output is True + assert env["PYTHONIOENCODING"] == "utf-8" + + password = self.get_password(*cmd[2:]) + if password is None: + # Expect non-zero returncode if no password present + self.returncode = 1 + else: + # Passwords are returned encoded with a newline appended + self.stdout = password.encode("utf-8") + b"\n" + + if cmd[1] == "set": + assert stdin is None + assert capture_output is None + assert env["PYTHONIOENCODING"] == "utf-8" + assert input is not None + + # Input from stdin is encoded + self.set_password(cmd[2], cmd[3], input.decode("utf-8").strip("\n")) + + return self + + def check_returncode(self) -> None: + if self.returncode: + raise Exception() + + +@pytest.mark.parametrize( + "url, expect", + ( + ("http://example.com/path1", (None, None)), + # path1 URLs will be resolved by netloc + ("http://user@example.com/path1", ("user", "user!netloc")), + ("http://user2@example.com/path1", ("user2", "user2!netloc")), + # path2 URLs will be resolved by index URL + ("http://example.com/path2/path3", (None, None)), + ("http://foo@example.com/path2/path3", ("foo", "foo!url")), + ), +) +def test_keyring_cli_get_password( + monkeypatch: pytest.MonkeyPatch, + url: str, + expect: Tuple[Optional[str], Optional[str]], +) -> None: + monkeypatch.setattr(pip._internal.network.auth.shutil, "which", lambda x: "keyring") + monkeypatch.setattr( + pip._internal.network.auth.subprocess, "run", KeyringSubprocessResult() + ) + auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) + + actual = auth._get_new_credentials(url, allow_netrc=False, allow_keyring=True) + assert actual == expect + + +@pytest.mark.parametrize( + "response_status, creds, expect_save", + ( + (403, ("user", "pass", True), False), + ( + 200, + ("user", "pass", True), + True, + ), + ( + 200, + ("user", "pass", False), + False, + ), + ), +) +def test_keyring_cli_set_password( + monkeypatch: pytest.MonkeyPatch, + response_status: int, + creds: Tuple[str, str, bool], + expect_save: bool, +) -> None: + monkeypatch.setattr(pip._internal.network.auth.shutil, "which", lambda x: "keyring") + keyring = KeyringSubprocessResult() + monkeypatch.setattr(pip._internal.network.auth.subprocess, "run", keyring) + auth = MultiDomainBasicAuth(prompting=True) + monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) + monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) + if creds[2]: + # when _prompt_for_password indicates to save, we should save + def should_save_password_to_keyring(*a: Any) -> bool: + return True + + else: + # when _prompt_for_password indicates not to save, we should + # never call this function + def should_save_password_to_keyring(*a: Any) -> bool: + assert False, "_should_save_password_to_keyring should not be called" + + monkeypatch.setattr( + auth, "_should_save_password_to_keyring", should_save_password_to_keyring + ) + + req = MockRequest("https://example.com") + resp = MockResponse(b"") + resp.url = req.url + connection = MockConnection() + + def _send(sent_req: MockRequest, **kwargs: Any) -> MockResponse: + assert sent_req is req + assert "Authorization" in sent_req.headers + r = MockResponse(b"") + r.status_code = response_status + return r + + # https://github.com/python/mypy/issues/2427 + connection._send = _send # type: ignore[assignment] + + resp.request = req + resp.status_code = 401 + resp.connection = connection + + auth.handle_401(resp) + + if expect_save: + assert keyring.saved_passwords == [("example.com", creds[0], creds[1])] + else: + assert keyring.saved_passwords == [] From c04222fe47698b49ba33618c91c3bdb6d419cff0 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 21:25:26 +0000 Subject: [PATCH 180/730] Simplify provider interface --- src/pip/_internal/network/auth.py | 77 +++++++++++++------------------ tests/unit/test_network_auth.py | 1 - 2 files changed, 31 insertions(+), 47 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 99fd9977c75..3e2e54da227 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -4,12 +4,12 @@ providing credentials in the context of network requests. """ -import functools import os import shutil import subprocess import urllib.parse from abc import ABC, abstractmethod +from types import ModuleType from typing import Any, Dict, List, NamedTuple, Optional, Tuple from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth @@ -40,10 +40,6 @@ class Credentials(NamedTuple): class KeyRingBaseProvider(ABC): """Keyring base provider interface""" - @abstractmethod - def is_available(self) -> bool: - ... - @abstractmethod def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: ... @@ -53,24 +49,23 @@ def save_auth_info(self, url: str, username: str, password: str) -> None: ... -class KeyRingPythonProvider(KeyRingBaseProvider): - """Keyring interface which uses locally imported `keyring`""" +class KeyRingNullProvider(KeyRingBaseProvider): + """Keyring null provider""" - def __init__(self) -> None: - try: - import keyring - except ImportError: - keyring = None # type: ignore[assignment] + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: + return None - self.keyring = keyring + def save_auth_info(self, url: str, username: str, password: str) -> None: + return None - def is_available(self) -> bool: - return self.keyring is not None - def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: - if self.is_available is False: - return None +class KeyRingPythonProvider(KeyRingBaseProvider): + """Keyring interface which uses locally imported `keyring`""" + def __init__(self, module: ModuleType) -> None: + self.keyring = module + + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: # Support keyring's get_credential interface which supports getting # credentials without a username. This is only available for # keyring>=15.2.0. @@ -101,16 +96,10 @@ class KeyRingCliProvider(KeyRingBaseProvider): PATH. """ - def __init__(self) -> None: - self.keyring = shutil.which("keyring") - - def is_available(self) -> bool: - return self.keyring is not None + def __init__(self, cmd: str) -> None: + self.keyring = cmd def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: - if self.is_available is False: - return None - # This is the default implementation of keyring.get_credential # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139 if username is not None: @@ -120,8 +109,6 @@ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo] return None def save_auth_info(self, url: str, username: str, password: str) -> None: - if not self.is_available: - raise RuntimeError("keyring is not available") return self._set_password(url, username, password) def _get_password(self, service_name: str, username: str) -> Optional[str]: @@ -156,17 +143,19 @@ def _set_password(self, service_name: str, username: str, password: str) -> None return None -@functools.lru_cache(maxsize=1) -def get_keyring_provider() -> Optional[KeyRingBaseProvider]: - python_keyring = KeyRingPythonProvider() - if python_keyring.is_available(): - return python_keyring - - cli_keyring = KeyRingCliProvider() - if cli_keyring.is_available(): - return cli_keyring +def get_keyring_provider() -> KeyRingBaseProvider: + # keyring has previously failed and been disabled + if not KEYRING_DISABLED: + try: + import keyring - return None + return KeyRingPythonProvider(keyring) + except ImportError: + pass + cli = shutil.which("keyring") + if cli: + return KeyRingCliProvider(cli) + return KeyRingNullProvider() def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]: @@ -176,11 +165,6 @@ def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[Au return None keyring = get_keyring_provider() - # Do nothing if keyring is not available - global KEYRING_DISABLED - if keyring is None or KEYRING_DISABLED: - return None - try: return keyring.get_auth_info(url, username) except Exception as exc: @@ -188,6 +172,7 @@ def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[Au "Keyring is skipped due to an exception: %s", str(exc), ) + global KEYRING_DISABLED KEYRING_DISABLED = True return None @@ -436,9 +421,9 @@ def warn_on_401(self, resp: Response, **kwargs: Any) -> None: def save_credentials(self, resp: Response, **kwargs: Any) -> None: """Response callback to save credentials on success.""" keyring = get_keyring_provider() - assert keyring is not None, "should never reach here without keyring" - if not keyring: - return None + assert not isinstance( + keyring, KeyRingNullProvider + ), "should never reach here without keyring" creds = self._credentials_to_save self._credentials_to_save = None diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 56c17d11f02..625a20a48f5 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -14,7 +14,6 @@ def reset_keyring() -> Iterable[None]: yield None # Reset the state of the module between tests pip._internal.network.auth.KEYRING_DISABLED = False - pip._internal.network.auth.get_keyring_provider.cache_clear() @pytest.mark.parametrize( From e6e42de0e6086a8814ebfc24d5abda35ad3fba29 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 21:34:50 +0000 Subject: [PATCH 181/730] Move `keyring` import --- src/pip/_internal/network/auth.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 3e2e54da227..b84a9eb1f9d 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -9,7 +9,6 @@ import subprocess import urllib.parse from abc import ABC, abstractmethod -from types import ModuleType from typing import Any, Dict, List, NamedTuple, Optional, Tuple from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth @@ -62,8 +61,10 @@ def save_auth_info(self, url: str, username: str, password: str) -> None: class KeyRingPythonProvider(KeyRingBaseProvider): """Keyring interface which uses locally imported `keyring`""" - def __init__(self, module: ModuleType) -> None: - self.keyring = module + def __init__(self) -> None: + import keyring + + self.keyring = keyring def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: # Support keyring's get_credential interface which supports getting @@ -147,9 +148,7 @@ def get_keyring_provider() -> KeyRingBaseProvider: # keyring has previously failed and been disabled if not KEYRING_DISABLED: try: - import keyring - - return KeyRingPythonProvider(keyring) + return KeyRingPythonProvider() except ImportError: pass cli = shutil.which("keyring") From 14a3d9388eb9afb46637567348cae6d71f06c3a2 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 21:42:47 +0000 Subject: [PATCH 182/730] Don't silently fallback to cli --- src/pip/_internal/network/auth.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index b84a9eb1f9d..8ea9040b1b0 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -147,13 +147,26 @@ def _set_password(self, service_name: str, username: str, password: str) -> None def get_keyring_provider() -> KeyRingBaseProvider: # keyring has previously failed and been disabled if not KEYRING_DISABLED: + # Default to trying to use Python provider try: return KeyRingPythonProvider() except ImportError: pass + except Exception as exc: + # In the event of an unexpected exception + # we shouldn't fallback silently to the + # CliProvider + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + return KeyRingNullProvider() + + # Fallback to Cli Provider if `keyring` isn't installed cli = shutil.which("keyring") if cli: return KeyRingCliProvider(cli) + return KeyRingNullProvider() From 623ac5d77dec4c9e4e8d99582bad913c1b0f0b6f Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Thu, 10 Nov 2022 21:48:50 +0000 Subject: [PATCH 183/730] Do fallback but issue a warning --- src/pip/_internal/network/auth.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 8ea9040b1b0..241ddc53a9c 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -154,13 +154,12 @@ def get_keyring_provider() -> KeyRingBaseProvider: pass except Exception as exc: # In the event of an unexpected exception - # we shouldn't fallback silently to the - # CliProvider + # we should warn the user logger.warning( - "Keyring is skipped due to an exception: %s", + "Installed copy of keyring fails with exception %s, " + "trying to find a keyring executable as a fallback", str(exc), ) - return KeyRingNullProvider() # Fallback to Cli Provider if `keyring` isn't installed cli = shutil.which("keyring") From afb0d13220293651a1a3b8b8f51759c18ffcc2f4 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 11 Nov 2022 07:43:08 +0800 Subject: [PATCH 184/730] Fix docstring to match reality We've moved to pathlib a while ago, the docstring still references tests.lib.path, which no longer exists. --- tests/functional/test_install_vcs_git.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index cb72ec0b230..60b7715a9ca 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -79,7 +79,7 @@ def _make_version_pkg_url( Return a "git+file://" URL to the version_pkg test package. Args: - path: a tests.lib.path.Path object pointing to a Git repository + path: a pathlib.Path object pointing to a Git repository containing the version_pkg package. rev: an optional revision to install like a branch name, tag, or SHA. """ @@ -101,7 +101,7 @@ def _install_version_pkg_only( the version). Args: - path: a tests.lib.path.Path object pointing to a Git repository + path: a pathlib.Path object pointing to a Git repository containing the package. rev: an optional revision to install like a branch name or tag. """ @@ -122,7 +122,7 @@ def _install_version_pkg( installed. Args: - path: a tests.lib.path.Path object pointing to a Git repository + path: a pathlib.Path object pointing to a Git repository containing the package. rev: an optional revision to install like a branch name or tag. """ From 89a4a35a3381cc324aa818392c19631654cae495 Mon Sep 17 00:00:00 2001 From: Judah Rand <17158624+judahrand@users.noreply.github.com> Date: Fri, 11 Nov 2022 21:44:35 +0000 Subject: [PATCH 185/730] Add documentation on functionality in #11589 --- docs/html/topics/authentication.md | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/docs/html/topics/authentication.md b/docs/html/topics/authentication.md index 981aab5abd7..f5b553160df 100644 --- a/docs/html/topics/authentication.md +++ b/docs/html/topics/authentication.md @@ -68,8 +68,21 @@ man pages][netrc-docs]. pip supports loading credentials stored in your keyring using the {pypi}`keyring` library. +pip will first try to use `keyring` in the same environment as itself and +fallback to using any `keyring` installation which is available on `PATH`. + +Therefore, either of the following setups will work: + +```bash +$ pip install keyring # install keyring from PyPI into same environment as pip +$ echo "your-password" | keyring set pypi.company.com your-username +$ pip install your-package --index-url https://pypi.company.com/ +``` + +or + ```bash -$ pip install keyring # install keyring from PyPI +$ pipx install keyring # install keyring from PyPI into standalone environment $ echo "your-password" | keyring set pypi.company.com your-username $ pip install your-package --index-url https://pypi.company.com/ ``` @@ -79,5 +92,4 @@ pip. This can create a bootstrapping issue if you need the credentials stored in the keyring to download and install keyring. It is, thus, expected that users that wish to use pip's keyring support have -some mechanism for downloading and installing {pypi}`keyring` in their Python -environment. +some mechanism for downloading and installing {pypi}`keyring`. From 8cda1c34eb865c1be5b72e8295f7ca7adfdeb113 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 12 Nov 2022 10:40:03 +0000 Subject: [PATCH 186/730] Upgrade colorama to 0.4.6 --- news/colorama.vendor.rst | 1 + src/pip/_vendor/colorama/__init__.py | 5 +- src/pip/_vendor/colorama/ansitowin32.py | 17 +- src/pip/_vendor/colorama/initialise.py | 51 ++- src/pip/_vendor/colorama/tests/__init__.py | 1 + src/pip/_vendor/colorama/tests/ansi_test.py | 76 +++++ .../colorama/tests/ansitowin32_test.py | 294 ++++++++++++++++++ .../_vendor/colorama/tests/initialise_test.py | 189 +++++++++++ src/pip/_vendor/colorama/tests/isatty_test.py | 57 ++++ src/pip/_vendor/colorama/tests/utils.py | 49 +++ .../_vendor/colorama/tests/winterm_test.py | 131 ++++++++ src/pip/_vendor/colorama/win32.py | 28 ++ src/pip/_vendor/colorama/winterm.py | 28 +- src/pip/_vendor/vendor.txt | 2 +- 14 files changed, 917 insertions(+), 12 deletions(-) create mode 100644 news/colorama.vendor.rst create mode 100644 src/pip/_vendor/colorama/tests/__init__.py create mode 100644 src/pip/_vendor/colorama/tests/ansi_test.py create mode 100644 src/pip/_vendor/colorama/tests/ansitowin32_test.py create mode 100644 src/pip/_vendor/colorama/tests/initialise_test.py create mode 100644 src/pip/_vendor/colorama/tests/isatty_test.py create mode 100644 src/pip/_vendor/colorama/tests/utils.py create mode 100644 src/pip/_vendor/colorama/tests/winterm_test.py diff --git a/news/colorama.vendor.rst b/news/colorama.vendor.rst new file mode 100644 index 00000000000..bf206c456d9 --- /dev/null +++ b/news/colorama.vendor.rst @@ -0,0 +1 @@ +Upgrade colorama to 0.4.6 diff --git a/src/pip/_vendor/colorama/__init__.py b/src/pip/_vendor/colorama/__init__.py index 9138a8cc8f0..383101cdb38 100644 --- a/src/pip/_vendor/colorama/__init__.py +++ b/src/pip/_vendor/colorama/__init__.py @@ -1,6 +1,7 @@ # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from .initialise import init, deinit, reinit, colorama_text +from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console from .ansi import Fore, Back, Style, Cursor from .ansitowin32 import AnsiToWin32 -__version__ = '0.4.5' +__version__ = '0.4.6' + diff --git a/src/pip/_vendor/colorama/ansitowin32.py b/src/pip/_vendor/colorama/ansitowin32.py index 3db248baac4..abf209e60c7 100644 --- a/src/pip/_vendor/colorama/ansitowin32.py +++ b/src/pip/_vendor/colorama/ansitowin32.py @@ -4,7 +4,7 @@ import os from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL -from .winterm import WinTerm, WinColor, WinStyle +from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle from .win32 import windll, winapi_test @@ -94,15 +94,22 @@ def __init__(self, wrapped, convert=None, strip=None, autoreset=False): # (e.g. Cygwin Terminal). In this case it's up to the terminal # to support the ANSI codes. conversion_supported = on_windows and winapi_test() + try: + fd = wrapped.fileno() + except Exception: + fd = -1 + system_has_native_ansi = not on_windows or enable_vt_processing(fd) + have_tty = not self.stream.closed and self.stream.isatty() + need_conversion = conversion_supported and not system_has_native_ansi # should we strip ANSI sequences from our output? if strip is None: - strip = conversion_supported or (not self.stream.closed and not self.stream.isatty()) + strip = need_conversion or not have_tty self.strip = strip # should we should convert ANSI sequences into win32 calls? if convert is None: - convert = conversion_supported and not self.stream.closed and self.stream.isatty() + convert = need_conversion and have_tty self.convert = convert # dict of ansi codes to win32 functions and parameters @@ -264,3 +271,7 @@ def convert_osc(self, text): if params[0] in '02': winterm.set_title(params[1]) return text + + + def flush(self): + self.wrapped.flush() diff --git a/src/pip/_vendor/colorama/initialise.py b/src/pip/_vendor/colorama/initialise.py index 430d0668727..d5fd4b71fed 100644 --- a/src/pip/_vendor/colorama/initialise.py +++ b/src/pip/_vendor/colorama/initialise.py @@ -6,13 +6,27 @@ from .ansitowin32 import AnsiToWin32 -orig_stdout = None -orig_stderr = None +def _wipe_internal_state_for_tests(): + global orig_stdout, orig_stderr + orig_stdout = None + orig_stderr = None + + global wrapped_stdout, wrapped_stderr + wrapped_stdout = None + wrapped_stderr = None -wrapped_stdout = None -wrapped_stderr = None + global atexit_done + atexit_done = False + + global fixed_windows_console + fixed_windows_console = False -atexit_done = False + try: + # no-op if it wasn't registered + atexit.unregister(reset_all) + except AttributeError: + # python 2: no atexit.unregister. Oh well, we did our best. + pass def reset_all(): @@ -55,6 +69,29 @@ def deinit(): sys.stderr = orig_stderr +def just_fix_windows_console(): + global fixed_windows_console + + if sys.platform != "win32": + return + if fixed_windows_console: + return + if wrapped_stdout is not None or wrapped_stderr is not None: + # Someone already ran init() and it did stuff, so we won't second-guess them + return + + # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the + # native ANSI support in the console as a side-effect. We only need to actually + # replace sys.stdout/stderr if we're in the old-style conversion mode. + new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) + if new_stdout.convert: + sys.stdout = new_stdout + new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) + if new_stderr.convert: + sys.stderr = new_stderr + + fixed_windows_console = True + @contextlib.contextmanager def colorama_text(*args, **kwargs): init(*args, **kwargs) @@ -78,3 +115,7 @@ def wrap_stream(stream, convert, strip, autoreset, wrap): if wrapper.should_wrap(): stream = wrapper.stream return stream + + +# Use this for initial setup as well, to reduce code duplication +_wipe_internal_state_for_tests() diff --git a/src/pip/_vendor/colorama/tests/__init__.py b/src/pip/_vendor/colorama/tests/__init__.py new file mode 100644 index 00000000000..8c5661e93a2 --- /dev/null +++ b/src/pip/_vendor/colorama/tests/__init__.py @@ -0,0 +1 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. diff --git a/src/pip/_vendor/colorama/tests/ansi_test.py b/src/pip/_vendor/colorama/tests/ansi_test.py new file mode 100644 index 00000000000..0a20c80f882 --- /dev/null +++ b/src/pip/_vendor/colorama/tests/ansi_test.py @@ -0,0 +1,76 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main + +from ..ansi import Back, Fore, Style +from ..ansitowin32 import AnsiToWin32 + +stdout_orig = sys.stdout +stderr_orig = sys.stderr + + +class AnsiTest(TestCase): + + def setUp(self): + # sanity check: stdout should be a file or StringIO object. + # It will only be AnsiToWin32 if init() has previously wrapped it + self.assertNotEqual(type(sys.stdout), AnsiToWin32) + self.assertNotEqual(type(sys.stderr), AnsiToWin32) + + def tearDown(self): + sys.stdout = stdout_orig + sys.stderr = stderr_orig + + + def testForeAttributes(self): + self.assertEqual(Fore.BLACK, '\033[30m') + self.assertEqual(Fore.RED, '\033[31m') + self.assertEqual(Fore.GREEN, '\033[32m') + self.assertEqual(Fore.YELLOW, '\033[33m') + self.assertEqual(Fore.BLUE, '\033[34m') + self.assertEqual(Fore.MAGENTA, '\033[35m') + self.assertEqual(Fore.CYAN, '\033[36m') + self.assertEqual(Fore.WHITE, '\033[37m') + self.assertEqual(Fore.RESET, '\033[39m') + + # Check the light, extended versions. + self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m') + self.assertEqual(Fore.LIGHTRED_EX, '\033[91m') + self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m') + self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m') + self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m') + self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m') + self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m') + self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m') + + + def testBackAttributes(self): + self.assertEqual(Back.BLACK, '\033[40m') + self.assertEqual(Back.RED, '\033[41m') + self.assertEqual(Back.GREEN, '\033[42m') + self.assertEqual(Back.YELLOW, '\033[43m') + self.assertEqual(Back.BLUE, '\033[44m') + self.assertEqual(Back.MAGENTA, '\033[45m') + self.assertEqual(Back.CYAN, '\033[46m') + self.assertEqual(Back.WHITE, '\033[47m') + self.assertEqual(Back.RESET, '\033[49m') + + # Check the light, extended versions. + self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m') + self.assertEqual(Back.LIGHTRED_EX, '\033[101m') + self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m') + self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m') + self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m') + self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m') + self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m') + self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m') + + + def testStyleAttributes(self): + self.assertEqual(Style.DIM, '\033[2m') + self.assertEqual(Style.NORMAL, '\033[22m') + self.assertEqual(Style.BRIGHT, '\033[1m') + + +if __name__ == '__main__': + main() diff --git a/src/pip/_vendor/colorama/tests/ansitowin32_test.py b/src/pip/_vendor/colorama/tests/ansitowin32_test.py new file mode 100644 index 00000000000..91ca551f97b --- /dev/null +++ b/src/pip/_vendor/colorama/tests/ansitowin32_test.py @@ -0,0 +1,294 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from io import StringIO, TextIOWrapper +from unittest import TestCase, main +try: + from contextlib import ExitStack +except ImportError: + # python 2 + from contextlib2 import ExitStack + +try: + from unittest.mock import MagicMock, Mock, patch +except ImportError: + from mock import MagicMock, Mock, patch + +from ..ansitowin32 import AnsiToWin32, StreamWrapper +from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING +from .utils import osname + + +class StreamWrapperTest(TestCase): + + def testIsAProxy(self): + mockStream = Mock() + wrapper = StreamWrapper(mockStream, None) + self.assertTrue( wrapper.random_attr is mockStream.random_attr ) + + def testDelegatesWrite(self): + mockStream = Mock() + mockConverter = Mock() + wrapper = StreamWrapper(mockStream, mockConverter) + wrapper.write('hello') + self.assertTrue(mockConverter.write.call_args, (('hello',), {})) + + def testDelegatesContext(self): + mockConverter = Mock() + s = StringIO() + with StreamWrapper(s, mockConverter) as fp: + fp.write(u'hello') + self.assertTrue(s.closed) + + def testProxyNoContextManager(self): + mockStream = MagicMock() + mockStream.__enter__.side_effect = AttributeError() + mockConverter = Mock() + with self.assertRaises(AttributeError) as excinfo: + with StreamWrapper(mockStream, mockConverter) as wrapper: + wrapper.write('hello') + + def test_closed_shouldnt_raise_on_closed_stream(self): + stream = StringIO() + stream.close() + wrapper = StreamWrapper(stream, None) + self.assertEqual(wrapper.closed, True) + + def test_closed_shouldnt_raise_on_detached_stream(self): + stream = TextIOWrapper(StringIO()) + stream.detach() + wrapper = StreamWrapper(stream, None) + self.assertEqual(wrapper.closed, True) + +class AnsiToWin32Test(TestCase): + + def testInit(self): + mockStdout = Mock() + auto = Mock() + stream = AnsiToWin32(mockStdout, autoreset=auto) + self.assertEqual(stream.wrapped, mockStdout) + self.assertEqual(stream.autoreset, auto) + + @patch('colorama.ansitowin32.winterm', None) + @patch('colorama.ansitowin32.winapi_test', lambda *_: True) + def testStripIsTrueOnWindows(self): + with osname('nt'): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout) + self.assertTrue(stream.strip) + + def testStripIsFalseOffWindows(self): + with osname('posix'): + mockStdout = Mock(closed=False) + stream = AnsiToWin32(mockStdout) + self.assertFalse(stream.strip) + + def testWriteStripsAnsi(self): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout) + stream.wrapped = Mock() + stream.write_and_convert = Mock() + stream.strip = True + + stream.write('abc') + + self.assertFalse(stream.wrapped.write.called) + self.assertEqual(stream.write_and_convert.call_args, (('abc',), {})) + + def testWriteDoesNotStripAnsi(self): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout) + stream.wrapped = Mock() + stream.write_and_convert = Mock() + stream.strip = False + stream.convert = False + + stream.write('abc') + + self.assertFalse(stream.write_and_convert.called) + self.assertEqual(stream.wrapped.write.call_args, (('abc',), {})) + + def assert_autoresets(self, convert, autoreset=True): + stream = AnsiToWin32(Mock()) + stream.convert = convert + stream.reset_all = Mock() + stream.autoreset = autoreset + stream.winterm = Mock() + + stream.write('abc') + + self.assertEqual(stream.reset_all.called, autoreset) + + def testWriteAutoresets(self): + self.assert_autoresets(convert=True) + self.assert_autoresets(convert=False) + self.assert_autoresets(convert=True, autoreset=False) + self.assert_autoresets(convert=False, autoreset=False) + + def testWriteAndConvertWritesPlainText(self): + stream = AnsiToWin32(Mock()) + stream.write_and_convert( 'abc' ) + self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) ) + + def testWriteAndConvertStripsAllValidAnsi(self): + stream = AnsiToWin32(Mock()) + stream.call_win32 = Mock() + data = [ + 'abc\033[mdef', + 'abc\033[0mdef', + 'abc\033[2mdef', + 'abc\033[02mdef', + 'abc\033[002mdef', + 'abc\033[40mdef', + 'abc\033[040mdef', + 'abc\033[0;1mdef', + 'abc\033[40;50mdef', + 'abc\033[50;30;40mdef', + 'abc\033[Adef', + 'abc\033[0Gdef', + 'abc\033[1;20;128Hdef', + ] + for datum in data: + stream.wrapped.write.reset_mock() + stream.write_and_convert( datum ) + self.assertEqual( + [args[0] for args in stream.wrapped.write.call_args_list], + [ ('abc',), ('def',) ] + ) + + def testWriteAndConvertSkipsEmptySnippets(self): + stream = AnsiToWin32(Mock()) + stream.call_win32 = Mock() + stream.write_and_convert( '\033[40m\033[41m' ) + self.assertFalse( stream.wrapped.write.called ) + + def testWriteAndConvertCallsWin32WithParamsAndCommand(self): + stream = AnsiToWin32(Mock()) + stream.convert = True + stream.call_win32 = Mock() + stream.extract_params = Mock(return_value='params') + data = { + 'abc\033[adef': ('a', 'params'), + 'abc\033[;;bdef': ('b', 'params'), + 'abc\033[0cdef': ('c', 'params'), + 'abc\033[;;0;;Gdef': ('G', 'params'), + 'abc\033[1;20;128Hdef': ('H', 'params'), + } + for datum, expected in data.items(): + stream.call_win32.reset_mock() + stream.write_and_convert( datum ) + self.assertEqual( stream.call_win32.call_args[0], expected ) + + def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self): + stream = StringIO() + converter = AnsiToWin32(stream) + stream.close() + + converter.reset_all() + + def test_wrap_shouldnt_raise_on_closed_orig_stdout(self): + stream = StringIO() + stream.close() + with \ + patch("colorama.ansitowin32.os.name", "nt"), \ + patch("colorama.ansitowin32.winapi_test", lambda: True): + converter = AnsiToWin32(stream) + self.assertTrue(converter.strip) + self.assertFalse(converter.convert) + + def test_wrap_shouldnt_raise_on_missing_closed_attr(self): + with \ + patch("colorama.ansitowin32.os.name", "nt"), \ + patch("colorama.ansitowin32.winapi_test", lambda: True): + converter = AnsiToWin32(object()) + self.assertTrue(converter.strip) + self.assertFalse(converter.convert) + + def testExtractParams(self): + stream = AnsiToWin32(Mock()) + data = { + '': (0,), + ';;': (0,), + '2': (2,), + ';;002;;': (2,), + '0;1': (0, 1), + ';;003;;456;;': (3, 456), + '11;22;33;44;55': (11, 22, 33, 44, 55), + } + for datum, expected in data.items(): + self.assertEqual(stream.extract_params('m', datum), expected) + + def testCallWin32UsesLookup(self): + listener = Mock() + stream = AnsiToWin32(listener) + stream.win32_calls = { + 1: (lambda *_, **__: listener(11),), + 2: (lambda *_, **__: listener(22),), + 3: (lambda *_, **__: listener(33),), + } + stream.call_win32('m', (3, 1, 99, 2)) + self.assertEqual( + [a[0][0] for a in listener.call_args_list], + [33, 11, 22] ) + + def test_osc_codes(self): + mockStdout = Mock() + stream = AnsiToWin32(mockStdout, convert=True) + with patch('colorama.ansitowin32.winterm') as winterm: + data = [ + '\033]0\x07', # missing arguments + '\033]0;foo\x08', # wrong OSC command + '\033]0;colorama_test_title\x07', # should work + '\033]1;colorama_test_title\x07', # wrong set command + '\033]2;colorama_test_title\x07', # should work + '\033]' + ';' * 64 + '\x08', # see issue #247 + ] + for code in data: + stream.write(code) + self.assertEqual(winterm.set_title.call_count, 2) + + def test_native_windows_ansi(self): + with ExitStack() as stack: + def p(a, b): + stack.enter_context(patch(a, b, create=True)) + # Pretend to be on Windows + p("colorama.ansitowin32.os.name", "nt") + p("colorama.ansitowin32.winapi_test", lambda: True) + p("colorama.win32.winapi_test", lambda: True) + p("colorama.winterm.win32.windll", "non-None") + p("colorama.winterm.get_osfhandle", lambda _: 1234) + + # Pretend that our mock stream has native ANSI support + p( + "colorama.winterm.win32.GetConsoleMode", + lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING, + ) + SetConsoleMode = Mock() + p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) + + stdout = Mock() + stdout.closed = False + stdout.isatty.return_value = True + stdout.fileno.return_value = 1 + + # Our fake console says it has native vt support, so AnsiToWin32 should + # enable that support and do nothing else. + stream = AnsiToWin32(stdout) + SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) + self.assertFalse(stream.strip) + self.assertFalse(stream.convert) + self.assertFalse(stream.should_wrap()) + + # Now let's pretend we're on an old Windows console, that doesn't have + # native ANSI support. + p("colorama.winterm.win32.GetConsoleMode", lambda _: 0) + SetConsoleMode = Mock() + p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) + + stream = AnsiToWin32(stdout) + SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) + self.assertTrue(stream.strip) + self.assertTrue(stream.convert) + self.assertTrue(stream.should_wrap()) + + +if __name__ == '__main__': + main() diff --git a/src/pip/_vendor/colorama/tests/initialise_test.py b/src/pip/_vendor/colorama/tests/initialise_test.py new file mode 100644 index 00000000000..89f9b07511c --- /dev/null +++ b/src/pip/_vendor/colorama/tests/initialise_test.py @@ -0,0 +1,189 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main, skipUnless + +try: + from unittest.mock import patch, Mock +except ImportError: + from mock import patch, Mock + +from ..ansitowin32 import StreamWrapper +from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests +from .utils import osname, replace_by + +orig_stdout = sys.stdout +orig_stderr = sys.stderr + + +class InitTest(TestCase): + + @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty") + def setUp(self): + # sanity check + self.assertNotWrapped() + + def tearDown(self): + _wipe_internal_state_for_tests() + sys.stdout = orig_stdout + sys.stderr = orig_stderr + + def assertWrapped(self): + self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped') + self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped') + self.assertTrue(isinstance(sys.stdout, StreamWrapper), + 'bad stdout wrapper') + self.assertTrue(isinstance(sys.stderr, StreamWrapper), + 'bad stderr wrapper') + + def assertNotWrapped(self): + self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped') + self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped') + + @patch('colorama.initialise.reset_all') + @patch('colorama.ansitowin32.winapi_test', lambda *_: True) + @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False) + def testInitWrapsOnWindows(self, _): + with osname("nt"): + init() + self.assertWrapped() + + @patch('colorama.initialise.reset_all') + @patch('colorama.ansitowin32.winapi_test', lambda *_: False) + def testInitDoesntWrapOnEmulatedWindows(self, _): + with osname("nt"): + init() + self.assertNotWrapped() + + def testInitDoesntWrapOnNonWindows(self): + with osname("posix"): + init() + self.assertNotWrapped() + + def testInitDoesntWrapIfNone(self): + with replace_by(None): + init() + # We can't use assertNotWrapped here because replace_by(None) + # changes stdout/stderr already. + self.assertIsNone(sys.stdout) + self.assertIsNone(sys.stderr) + + def testInitAutoresetOnWrapsOnAllPlatforms(self): + with osname("posix"): + init(autoreset=True) + self.assertWrapped() + + def testInitWrapOffDoesntWrapOnWindows(self): + with osname("nt"): + init(wrap=False) + self.assertNotWrapped() + + def testInitWrapOffIncompatibleWithAutoresetOn(self): + self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False)) + + @patch('colorama.win32.SetConsoleTextAttribute') + @patch('colorama.initialise.AnsiToWin32') + def testAutoResetPassedOn(self, mockATW32, _): + with osname("nt"): + init(autoreset=True) + self.assertEqual(len(mockATW32.call_args_list), 2) + self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True) + self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True) + + @patch('colorama.initialise.AnsiToWin32') + def testAutoResetChangeable(self, mockATW32): + with osname("nt"): + init() + + init(autoreset=True) + self.assertEqual(len(mockATW32.call_args_list), 4) + self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True) + self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True) + + init() + self.assertEqual(len(mockATW32.call_args_list), 6) + self.assertEqual( + mockATW32.call_args_list[4][1]['autoreset'], False) + self.assertEqual( + mockATW32.call_args_list[5][1]['autoreset'], False) + + + @patch('colorama.initialise.atexit.register') + def testAtexitRegisteredOnlyOnce(self, mockRegister): + init() + self.assertTrue(mockRegister.called) + mockRegister.reset_mock() + init() + self.assertFalse(mockRegister.called) + + +class JustFixWindowsConsoleTest(TestCase): + def _reset(self): + _wipe_internal_state_for_tests() + sys.stdout = orig_stdout + sys.stderr = orig_stderr + + def tearDown(self): + self._reset() + + @patch("colorama.ansitowin32.winapi_test", lambda: True) + def testJustFixWindowsConsole(self): + if sys.platform != "win32": + # just_fix_windows_console should be a no-op + just_fix_windows_console() + self.assertIs(sys.stdout, orig_stdout) + self.assertIs(sys.stderr, orig_stderr) + else: + def fake_std(): + # Emulate stdout=not a tty, stderr=tty + # to check that we handle both cases correctly + stdout = Mock() + stdout.closed = False + stdout.isatty.return_value = False + stdout.fileno.return_value = 1 + sys.stdout = stdout + + stderr = Mock() + stderr.closed = False + stderr.isatty.return_value = True + stderr.fileno.return_value = 2 + sys.stderr = stderr + + for native_ansi in [False, True]: + with patch( + 'colorama.ansitowin32.enable_vt_processing', + lambda *_: native_ansi + ): + self._reset() + fake_std() + + # Regular single-call test + prev_stdout = sys.stdout + prev_stderr = sys.stderr + just_fix_windows_console() + self.assertIs(sys.stdout, prev_stdout) + if native_ansi: + self.assertIs(sys.stderr, prev_stderr) + else: + self.assertIsNot(sys.stderr, prev_stderr) + + # second call without resetting is always a no-op + prev_stdout = sys.stdout + prev_stderr = sys.stderr + just_fix_windows_console() + self.assertIs(sys.stdout, prev_stdout) + self.assertIs(sys.stderr, prev_stderr) + + self._reset() + fake_std() + + # If init() runs first, just_fix_windows_console should be a no-op + init() + prev_stdout = sys.stdout + prev_stderr = sys.stderr + just_fix_windows_console() + self.assertIs(prev_stdout, sys.stdout) + self.assertIs(prev_stderr, sys.stderr) + + +if __name__ == '__main__': + main() diff --git a/src/pip/_vendor/colorama/tests/isatty_test.py b/src/pip/_vendor/colorama/tests/isatty_test.py new file mode 100644 index 00000000000..0f84e4befe5 --- /dev/null +++ b/src/pip/_vendor/colorama/tests/isatty_test.py @@ -0,0 +1,57 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main + +from ..ansitowin32 import StreamWrapper, AnsiToWin32 +from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY + + +def is_a_tty(stream): + return StreamWrapper(stream, None).isatty() + +class IsattyTest(TestCase): + + def test_TTY(self): + tty = StreamTTY() + self.assertTrue(is_a_tty(tty)) + with pycharm(): + self.assertTrue(is_a_tty(tty)) + + def test_nonTTY(self): + non_tty = StreamNonTTY() + self.assertFalse(is_a_tty(non_tty)) + with pycharm(): + self.assertFalse(is_a_tty(non_tty)) + + def test_withPycharm(self): + with pycharm(): + self.assertTrue(is_a_tty(sys.stderr)) + self.assertTrue(is_a_tty(sys.stdout)) + + def test_withPycharmTTYOverride(self): + tty = StreamTTY() + with pycharm(), replace_by(tty): + self.assertTrue(is_a_tty(tty)) + + def test_withPycharmNonTTYOverride(self): + non_tty = StreamNonTTY() + with pycharm(), replace_by(non_tty): + self.assertFalse(is_a_tty(non_tty)) + + def test_withPycharmNoneOverride(self): + with pycharm(): + with replace_by(None), replace_original_by(None): + self.assertFalse(is_a_tty(None)) + self.assertFalse(is_a_tty(StreamNonTTY())) + self.assertTrue(is_a_tty(StreamTTY())) + + def test_withPycharmStreamWrapped(self): + with pycharm(): + self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty()) + self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty()) + self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty()) + self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty()) + + +if __name__ == '__main__': + main() diff --git a/src/pip/_vendor/colorama/tests/utils.py b/src/pip/_vendor/colorama/tests/utils.py new file mode 100644 index 00000000000..472fafb4403 --- /dev/null +++ b/src/pip/_vendor/colorama/tests/utils.py @@ -0,0 +1,49 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from contextlib import contextmanager +from io import StringIO +import sys +import os + + +class StreamTTY(StringIO): + def isatty(self): + return True + +class StreamNonTTY(StringIO): + def isatty(self): + return False + +@contextmanager +def osname(name): + orig = os.name + os.name = name + yield + os.name = orig + +@contextmanager +def replace_by(stream): + orig_stdout = sys.stdout + orig_stderr = sys.stderr + sys.stdout = stream + sys.stderr = stream + yield + sys.stdout = orig_stdout + sys.stderr = orig_stderr + +@contextmanager +def replace_original_by(stream): + orig_stdout = sys.__stdout__ + orig_stderr = sys.__stderr__ + sys.__stdout__ = stream + sys.__stderr__ = stream + yield + sys.__stdout__ = orig_stdout + sys.__stderr__ = orig_stderr + +@contextmanager +def pycharm(): + os.environ["PYCHARM_HOSTED"] = "1" + non_tty = StreamNonTTY() + with replace_by(non_tty), replace_original_by(non_tty): + yield + del os.environ["PYCHARM_HOSTED"] diff --git a/src/pip/_vendor/colorama/tests/winterm_test.py b/src/pip/_vendor/colorama/tests/winterm_test.py new file mode 100644 index 00000000000..d0955f9e608 --- /dev/null +++ b/src/pip/_vendor/colorama/tests/winterm_test.py @@ -0,0 +1,131 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import sys +from unittest import TestCase, main, skipUnless + +try: + from unittest.mock import Mock, patch +except ImportError: + from mock import Mock, patch + +from ..winterm import WinColor, WinStyle, WinTerm + + +class WinTermTest(TestCase): + + @patch('colorama.winterm.win32') + def testInit(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 7 + 6 * 16 + 8 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + self.assertEqual(term._fore, 7) + self.assertEqual(term._back, 6) + self.assertEqual(term._style, 8) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testGetAttrs(self): + term = WinTerm() + + term._fore = 0 + term._back = 0 + term._style = 0 + self.assertEqual(term.get_attrs(), 0) + + term._fore = WinColor.YELLOW + self.assertEqual(term.get_attrs(), WinColor.YELLOW) + + term._back = WinColor.MAGENTA + self.assertEqual( + term.get_attrs(), + WinColor.YELLOW + WinColor.MAGENTA * 16) + + term._style = WinStyle.BRIGHT + self.assertEqual( + term.get_attrs(), + WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT) + + @patch('colorama.winterm.win32') + def testResetAll(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 1 + 2 * 16 + 8 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + + term.set_console = Mock() + term._fore = -1 + term._back = -1 + term._style = -1 + + term.reset_all() + + self.assertEqual(term._fore, 1) + self.assertEqual(term._back, 2) + self.assertEqual(term._style, 8) + self.assertEqual(term.set_console.called, True) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testFore(self): + term = WinTerm() + term.set_console = Mock() + term._fore = 0 + + term.fore(5) + + self.assertEqual(term._fore, 5) + self.assertEqual(term.set_console.called, True) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testBack(self): + term = WinTerm() + term.set_console = Mock() + term._back = 0 + + term.back(5) + + self.assertEqual(term._back, 5) + self.assertEqual(term.set_console.called, True) + + @skipUnless(sys.platform.startswith("win"), "requires Windows") + def testStyle(self): + term = WinTerm() + term.set_console = Mock() + term._style = 0 + + term.style(22) + + self.assertEqual(term._style, 22) + self.assertEqual(term.set_console.called, True) + + @patch('colorama.winterm.win32') + def testSetConsole(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 0 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + term.windll = Mock() + + term.set_console() + + self.assertEqual( + mockWin32.SetConsoleTextAttribute.call_args, + ((mockWin32.STDOUT, term.get_attrs()), {}) + ) + + @patch('colorama.winterm.win32') + def testSetConsoleOnStderr(self, mockWin32): + mockAttr = Mock() + mockAttr.wAttributes = 0 + mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr + term = WinTerm() + term.windll = Mock() + + term.set_console(on_stderr=True) + + self.assertEqual( + mockWin32.SetConsoleTextAttribute.call_args, + ((mockWin32.STDERR, term.get_attrs()), {}) + ) + + +if __name__ == '__main__': + main() diff --git a/src/pip/_vendor/colorama/win32.py b/src/pip/_vendor/colorama/win32.py index c2d83603367..841b0e270a3 100644 --- a/src/pip/_vendor/colorama/win32.py +++ b/src/pip/_vendor/colorama/win32.py @@ -4,6 +4,8 @@ STDOUT = -11 STDERR = -12 +ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 + try: import ctypes from ctypes import LibraryLoader @@ -89,6 +91,20 @@ def __str__(self): ] _SetConsoleTitleW.restype = wintypes.BOOL + _GetConsoleMode = windll.kernel32.GetConsoleMode + _GetConsoleMode.argtypes = [ + wintypes.HANDLE, + POINTER(wintypes.DWORD) + ] + _GetConsoleMode.restype = wintypes.BOOL + + _SetConsoleMode = windll.kernel32.SetConsoleMode + _SetConsoleMode.argtypes = [ + wintypes.HANDLE, + wintypes.DWORD + ] + _SetConsoleMode.restype = wintypes.BOOL + def _winapi_test(handle): csbi = CONSOLE_SCREEN_BUFFER_INFO() success = _GetConsoleScreenBufferInfo( @@ -150,3 +166,15 @@ def FillConsoleOutputAttribute(stream_id, attr, length, start): def SetConsoleTitle(title): return _SetConsoleTitleW(title) + + def GetConsoleMode(handle): + mode = wintypes.DWORD() + success = _GetConsoleMode(handle, byref(mode)) + if not success: + raise ctypes.WinError() + return mode.value + + def SetConsoleMode(handle, mode): + success = _SetConsoleMode(handle, mode) + if not success: + raise ctypes.WinError() diff --git a/src/pip/_vendor/colorama/winterm.py b/src/pip/_vendor/colorama/winterm.py index 0fdb4ec4e91..aad867e8c80 100644 --- a/src/pip/_vendor/colorama/winterm.py +++ b/src/pip/_vendor/colorama/winterm.py @@ -1,6 +1,12 @@ # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from . import win32 +try: + from msvcrt import get_osfhandle +except ImportError: + def get_osfhandle(_): + raise OSError("This isn't windows!") + +from . import win32 # from wincon.h class WinColor(object): @@ -167,3 +173,23 @@ def erase_line(self, mode=0, on_stderr=False): def set_title(self, title): win32.SetConsoleTitle(title) + + +def enable_vt_processing(fd): + if win32.windll is None or not win32.winapi_test(): + return False + + try: + handle = get_osfhandle(fd) + mode = win32.GetConsoleMode(handle) + win32.SetConsoleMode( + handle, + mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING, + ) + + mode = win32.GetConsoleMode(handle) + if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING: + return True + # Can get TypeError in testsuite where 'fd' is a Mock() + except (OSError, TypeError): + return False diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 9e9d4c11f9e..aa96c0205ab 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,5 +1,5 @@ CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this. -colorama==0.4.5 +colorama==0.4.6 distlib==0.3.6 distro==1.7.0 msgpack==1.0.4 From 6bc9ae84c66794fb6e3b893899191a6c5794b15a Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 12 Nov 2022 10:40:21 +0000 Subject: [PATCH 187/730] Upgrade distro to 1.8.0 --- news/distro.vendor.rst | 1 + src/pip/_vendor/distro/distro.py | 101 +++++++++++++++++++------------ src/pip/_vendor/vendor.txt | 2 +- 3 files changed, 65 insertions(+), 39 deletions(-) create mode 100644 news/distro.vendor.rst diff --git a/news/distro.vendor.rst b/news/distro.vendor.rst new file mode 100644 index 00000000000..a11f652b922 --- /dev/null +++ b/news/distro.vendor.rst @@ -0,0 +1 @@ +Upgrade distro to 1.8.0 diff --git a/src/pip/_vendor/distro/distro.py b/src/pip/_vendor/distro/distro.py index 49066ae8364..89e18680472 100644 --- a/src/pip/_vendor/distro/distro.py +++ b/src/pip/_vendor/distro/distro.py @@ -55,7 +55,7 @@ # Python 3.7 TypedDict = dict -__version__ = "1.7.0" +__version__ = "1.8.0" class VersionDict(TypedDict): @@ -122,6 +122,26 @@ class InfoDict(TypedDict): # Pattern for base file name of distro release file _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") +# Base file names to be looked up for if _UNIXCONFDIR is not readable. +_DISTRO_RELEASE_BASENAMES = [ + "SuSE-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "rocky-release", + "sl-release", + "slackware-version", +] + # Base file names to be ignored when searching for distro release file _DISTRO_RELEASE_IGNORE_BASENAMES = ( "debian_version", @@ -200,6 +220,7 @@ def id() -> str: "opensuse" openSUSE "amzn" Amazon Linux "arch" Arch Linux + "buildroot" Buildroot "cloudlinux" CloudLinux OS "exherbo" Exherbo Linux "gentoo" GenToo Linux @@ -221,6 +242,7 @@ def id() -> str: "midnightbsd" MidnightBSD "rocky" Rocky Linux "aix" AIX + "guix" Guix System ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -876,6 +898,9 @@ def version(self, pretty: bool = False, best: bool = False) -> str: if self.uname_attr("id").startswith("aix"): # On AIX platforms, prefer oslevel command output. versions.insert(0, self.oslevel_info()) + elif self.id() == "debian" or "debian" in self.like().split(): + # On Debian-like, add debian_version file content to candidates list. + versions.append(self._debian_version) version = "" if best: # This algorithm uses the last version in priority order that has @@ -1186,6 +1211,16 @@ def _oslevel_info(self) -> str: return "" return self._to_str(stdout).strip() + @cached_property + def _debian_version(self) -> str: + try: + with open( + os.path.join(self.etc_dir, "debian_version"), encoding="ascii" + ) as fp: + return fp.readline().rstrip() + except FileNotFoundError: + return "" + @staticmethod def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: if not lines: @@ -1228,14 +1263,14 @@ def _distro_release_info(self) -> Dict[str, str]: # file), because we want to use what was specified as best as # possible. match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if "name" in distro_info and "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - elif match: - distro_info["id"] = match.group(1) - return distro_info else: try: - basenames = os.listdir(self.etc_dir) + basenames = [ + basename + for basename in os.listdir(self.etc_dir) + if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES + and os.path.isfile(os.path.join(self.etc_dir, basename)) + ] # We sort for repeatability in cases where there are multiple # distro specific files; e.g. CentOS, Oracle, Enterprise all # containing `redhat-release` on top of their own. @@ -1245,39 +1280,29 @@ def _distro_release_info(self) -> Dict[str, str]: # sure about the *-release files. Check common entries of # /etc for information. If they turn out to not be there the # error is handled in `_parse_distro_release_file()`. - basenames = [ - "SuSE-release", - "arch-release", - "base-release", - "centos-release", - "fedora-release", - "gentoo-release", - "mageia-release", - "mandrake-release", - "mandriva-release", - "mandrivalinux-release", - "manjaro-release", - "oracle-release", - "redhat-release", - "rocky-release", - "sl-release", - "slackware-version", - ] + basenames = _DISTRO_RELEASE_BASENAMES for basename in basenames: - if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: - continue match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match: - filepath = os.path.join(self.etc_dir, basename) - distro_info = self._parse_distro_release_file(filepath) - if "name" in distro_info: - # The name is always present if the pattern matches - self.distro_release_file = filepath - distro_info["id"] = match.group(1) - if "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - return distro_info - return {} + if match is None: + continue + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + # The name is always present if the pattern matches. + if "name" not in distro_info: + continue + self.distro_release_file = filepath + break + else: # the loop didn't "break": no candidate. + return {} + + if match is not None: + distro_info["id"] = match.group(1) + + # CloudLinux < 7: manually enrich info with proper id. + if "cloudlinux" in distro_info.get("name", "").lower(): + distro_info["id"] = "cloudlinux" + + return distro_info def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: """ diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index aa96c0205ab..c86e2b348a7 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,7 +1,7 @@ CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this. colorama==0.4.6 distlib==0.3.6 -distro==1.7.0 +distro==1.8.0 msgpack==1.0.4 packaging==21.3 pep517==0.13.0 From bbe83b044a908257cf5555a018629f3b2b7546fc Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 12 Nov 2022 10:40:39 +0000 Subject: [PATCH 188/730] Upgrade platformdirs to 2.5.3 --- news/platformdirs.vendor.rst | 1 + src/pip/_vendor/platformdirs/LICENSE | 21 +++++++++++++++++++++ src/pip/_vendor/platformdirs/LICENSE.txt | 22 ---------------------- src/pip/_vendor/platformdirs/version.py | 4 ++-- src/pip/_vendor/platformdirs/windows.py | 4 +++- src/pip/_vendor/vendor.txt | 2 +- 6 files changed, 28 insertions(+), 26 deletions(-) create mode 100644 news/platformdirs.vendor.rst create mode 100644 src/pip/_vendor/platformdirs/LICENSE delete mode 100644 src/pip/_vendor/platformdirs/LICENSE.txt diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst new file mode 100644 index 00000000000..04ee05723b1 --- /dev/null +++ b/news/platformdirs.vendor.rst @@ -0,0 +1 @@ +Upgrade platformdirs to 2.5.3 diff --git a/src/pip/_vendor/platformdirs/LICENSE b/src/pip/_vendor/platformdirs/LICENSE new file mode 100644 index 00000000000..f35fed9191b --- /dev/null +++ b/src/pip/_vendor/platformdirs/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010-202x The platformdirs developers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/pip/_vendor/platformdirs/LICENSE.txt b/src/pip/_vendor/platformdirs/LICENSE.txt deleted file mode 100644 index f0bbd69f0c8..00000000000 --- a/src/pip/_vendor/platformdirs/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -# This is the MIT license - -Copyright (c) 2010 ActiveState Software Inc. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/pip/_vendor/platformdirs/version.py b/src/pip/_vendor/platformdirs/version.py index 4552c02aff9..6361dbf9c07 100644 --- a/src/pip/_vendor/platformdirs/version.py +++ b/src/pip/_vendor/platformdirs/version.py @@ -1,4 +1,4 @@ """Version information""" -__version__ = "2.5.2" -__version_info__ = (2, 5, 2) +__version__ = "2.5.3" +__version_info__ = (2, 5, 3) diff --git a/src/pip/_vendor/platformdirs/windows.py b/src/pip/_vendor/platformdirs/windows.py index ef972bdf29c..d5c27b34140 100644 --- a/src/pip/_vendor/platformdirs/windows.py +++ b/src/pip/_vendor/platformdirs/windows.py @@ -2,6 +2,7 @@ import ctypes import os +import sys from functools import lru_cache from typing import Callable @@ -132,7 +133,8 @@ def get_win_folder_from_registry(csidl_name: str) -> str: }.get(csidl_name) if shell_folder_name is None: raise ValueError(f"Unknown CSIDL name: {csidl_name}") - + if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows + raise NotImplementedError import winreg key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders") diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index c86e2b348a7..a34277b8c54 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -5,7 +5,7 @@ distro==1.8.0 msgpack==1.0.4 packaging==21.3 pep517==0.13.0 -platformdirs==2.5.2 +platformdirs==2.5.3 pyparsing==3.0.9 requests==2.28.1 certifi==2022.09.24 From 31ebba23d713b85d3bb098af4a81afa0e9b05654 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 12 Nov 2022 11:46:20 +0000 Subject: [PATCH 189/730] Update release docs to clarify some points --- docs/html/development/release-process.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/docs/html/development/release-process.rst b/docs/html/development/release-process.rst index acff1204e7b..a9ae8465ef3 100644 --- a/docs/html/development/release-process.rst +++ b/docs/html/development/release-process.rst @@ -31,6 +31,15 @@ to need extra work before being released, the release manager always has the option to back out the partial change prior to a release. The PR can then be reworked and resubmitted for the next release. +Vendoring updates will be picked up fron the ``main`` branch, as for any other +update. Ideally, vendoring updates should be merged between releases, just like +any other change. If there are outstanding updates to vendored packages, the +release manager *may* at their discretion choose to do a vendoring update +before the release. However this is *not* a requirement and in particular, +updates to vendored packages that fix issues in pip should be merged +proactively, to ensure that they will be present in the next release. + + .. _`Deprecation Policy`: Deprecation Policy @@ -166,6 +175,11 @@ Sometimes we need to release a bugfix release of the form ``YY.N.Z+1``. In order to create one of these the changes should already be merged into the ``main`` branch. +Note that this process is only needed when there are changes on the main branch +that you do *not* want to include in the bugfix release. For a bugfix release +that will include everything that is on the ``main`` branch, the above process +for creating a new release can be used, simply changing the version number. + #. Create a new ``release/YY.N.Z+1`` branch off of the ``YY.N`` tag using the command ``git checkout -b release/YY.N.Z+1 YY.N``. #. Cherry pick the fixed commits off of the ``main`` branch, fixing any From f7589d6ec0024f310b9643805201495ffb44b0e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= Date: Wed, 16 Nov 2022 13:28:17 +0100 Subject: [PATCH 190/730] Don't maintain 2 copies of change_root The change_root function was added twice: In ae324d17033253626725330c6635c0089927fbf5 and in 36a9b365234a0e6608e9304f560d68df7bf85038 --- ...1b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst | 0 tests/functional/test_install.py | 38 ++----------------- 2 files changed, 4 insertions(+), 34 deletions(-) create mode 100644 news/c1da841b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst diff --git a/news/c1da841b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst b/news/c1da841b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index f611372685f..6131a4054d6 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1145,39 +1145,6 @@ def main(): pass assert "--no-warn-script-location" not in result.stderr, str(result) -def _change_root(new_root: str, pathname: str) -> str: - """ - Adapted from distutils. - - Return 'pathname' with 'new_root' prepended. If 'pathname' is - relative, this is equivalent to "os.path.join(new_root,pathname)". - Otherwise, it requires making 'pathname' relative and then joining the - two, which is tricky on DOS/Windows and Mac OS. - """ - try: - from distutils.util import change_root - except ImportError: - pass - else: - return change_root(new_root, pathname) - - if os.name == "posix": - if not os.path.isabs(pathname): - return os.path.join(new_root, pathname) - else: - return os.path.join(new_root, pathname[1:]) - - elif os.name == "nt": - drive, path = os.path.splitdrive(pathname) - if path[0] == "\\": - path = path[1:] - return os.path.join(new_root, path) - - else: - # distutils raise DistutilsPlatformError here - raise RuntimeError(f"nothing known about platform '{os.name}'") - - @pytest.mark.usefixtures("with_wheel") def test_install_package_with_root(script: PipTestEnvironment, data: TestData) -> None: """ @@ -1196,8 +1163,11 @@ def test_install_package_with_root(script: PipTestEnvironment, data: TestData) - normal_install_path = os.fspath( script.base_path / script.site_packages / "simple-1.0.dist-info" ) + # use a function borrowed from distutils + # to change the root exactly how the --root option does it + from pip._internal.locations.base import change_root - root_path = _change_root(os.path.join(script.scratch, "root"), normal_install_path) + root_path = change_root(os.path.join(script.scratch, "root"), normal_install_path) result.did_create(root_path) # Should show find-links location in output From 93ade8586e3eb3792250f1f0eb924c1664bc4df7 Mon Sep 17 00:00:00 2001 From: Phil Elson Date: Wed, 16 Nov 2022 21:17:33 +0100 Subject: [PATCH 191/730] Update docs/html/topics/configuration.md Co-authored-by: Tzu-ping Chung --- docs/html/topics/configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/configuration.md b/docs/html/topics/configuration.md index c544d0c700c..ddeee0826a4 100644 --- a/docs/html/topics/configuration.md +++ b/docs/html/topics/configuration.md @@ -26,7 +26,7 @@ pip has 4 "levels" of configuration files: - `global`: system-wide configuration file, shared across all users. - `user`: per-user configuration file, shared across all environments. -- `base` : per-base environment configuration file, shared across all virtualenvs with the same base. (added in pip `v23`) +- `base` : per-base environment configuration file, shared across all virtualenvs with the same base. (available since pip 23.0) - `site`: per-environment configuration file; i.e. per-virtualenv. ### Location From 19e802250e93ee5c9b35ef72a70d0ae4e32155c4 Mon Sep 17 00:00:00 2001 From: Daniele Nicolodi Date: Tue, 15 Nov 2022 09:46:55 +0100 Subject: [PATCH 192/730] Use the "venv" scheme if available to obtain prefixed lib paths get_prefixed_libs() computes the Python path for libraries in a pip isolation environment. Python 3.11 introduced the "venv" path scheme to be used in these cases. Use it if available. This solves a bug on Homebrew's Python 3.10 and later where the default paths scheme when Python is invoked outside a virtual environment is "osx_framework_library" and does not relative to the "{base}" or "{platbase}" variables. Fixes #11539. --- news/11598.bugfix.rst | 1 + src/pip/_internal/locations/_sysconfig.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 news/11598.bugfix.rst diff --git a/news/11598.bugfix.rst b/news/11598.bugfix.rst new file mode 100644 index 00000000000..031ff9ddca0 --- /dev/null +++ b/news/11598.bugfix.rst @@ -0,0 +1 @@ +Use the "venv" scheme if available to obtain prefixed lib paths. diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 0bbc9283db7..5c870c783b3 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -214,5 +214,9 @@ def get_platlib() -> str: def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]: - paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix}) + vars = {"base": prefix, "platbase": prefix} + if "venv" in sysconfig.get_scheme_names(): + paths = sysconfig.get_paths(vars=vars, scheme="venv") + else: + paths = sysconfig.get_paths(vars=vars) return (paths["purelib"], paths["platlib"]) From f8beb61f1c7022c667936cd8e6998ec987f38daf Mon Sep 17 00:00:00 2001 From: Daniele Nicolodi Date: Wed, 16 Nov 2022 22:38:02 +0100 Subject: [PATCH 193/730] Rename get_prefixed_libs() to get_isolated_environment_lib_paths() Since this function is only used for creating isolated environments, rename it to better describe what it does. This avoids needing to think about why the implementation uses the "venv" paths scheme even when pip is not running in a virtual environment. --- src/pip/_internal/build_env.py | 8 ++++++-- src/pip/_internal/locations/__init__.py | 8 ++++---- src/pip/_internal/locations/_distutils.py | 2 +- src/pip/_internal/locations/_sysconfig.py | 2 +- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index cc2b38bab79..e67b868e8f4 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -18,7 +18,11 @@ from pip import __file__ as pip_location from pip._internal.cli.spinners import open_spinner -from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib +from pip._internal.locations import ( + get_isolated_environment_lib_paths, + get_platlib, + get_purelib, +) from pip._internal.metadata import get_default_environment, get_environment from pip._internal.utils.subprocess import call_subprocess from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds @@ -37,7 +41,7 @@ def __init__(self, path: str) -> None: "nt" if os.name == "nt" else "posix_prefix", vars={"base": path, "platbase": path}, )["scripts"] - self.lib_dirs = get_prefixed_libs(path) + self.lib_dirs = get_isolated_environment_lib_paths(path) def get_runnable_pip() -> str: diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 60afe0a73b8..516bd607839 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -27,7 +27,7 @@ "get_bin_user", "get_major_minor_version", "get_platlib", - "get_prefixed_libs", + "get_isolated_environment_lib_paths", "get_purelib", "get_scheme", "get_src_prefix", @@ -482,13 +482,13 @@ def _looks_like_apple_library(path: str) -> bool: return path == f"/Library/Python/{get_major_minor_version()}/site-packages" -def get_prefixed_libs(prefix: str) -> List[str]: +def get_isolated_environment_lib_paths(prefix: str) -> List[str]: """Return the lib locations under ``prefix``.""" - new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix) + new_pure, new_plat = _sysconfig.get_isolated_environment_lib_paths(prefix) if _USE_SYSCONFIG: return _deduplicated(new_pure, new_plat) - old_pure, old_plat = _distutils.get_prefixed_libs(prefix) + old_pure, old_plat = _distutils.get_isolated_environment_lib_paths(prefix) old_lib_paths = _deduplicated(old_pure, old_plat) # Apple's Python (shipped with Xcode and Command Line Tools) hard-code diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index c7712f016f5..a6fbcd2f09d 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -173,7 +173,7 @@ def get_platlib() -> str: return get_python_lib(plat_specific=True) -def get_prefixed_libs(prefix: str) -> Tuple[str, str]: +def get_isolated_environment_lib_paths(prefix: str) -> Tuple[str, str]: return ( get_python_lib(plat_specific=False, prefix=prefix), get_python_lib(plat_specific=True, prefix=prefix), diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 5c870c783b3..69821572081 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -213,7 +213,7 @@ def get_platlib() -> str: return sysconfig.get_paths()["platlib"] -def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]: +def get_isolated_environment_lib_paths(prefix: str) -> typing.Tuple[str, str]: vars = {"base": prefix, "platbase": prefix} if "venv" in sysconfig.get_scheme_names(): paths = sysconfig.get_paths(vars=vars, scheme="venv") From ba0e3ac6f4f0f396c02ae248c599205b98d184aa Mon Sep 17 00:00:00 2001 From: Rishi <29522253+inuik@users.noreply.github.com> Date: Wed, 16 Nov 2022 16:55:47 -0500 Subject: [PATCH 194/730] Update the macos tag in `pip download` docs (#11603) --- docs/html/cli/pip_download.rst | 6 +++--- news/d4da20f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst | 0 2 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 news/d4da20f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst diff --git a/docs/html/cli/pip_download.rst b/docs/html/cli/pip_download.rst index f1fe1769ee7..d247c51ccfb 100644 --- a/docs/html/cli/pip_download.rst +++ b/docs/html/cli/pip_download.rst @@ -81,7 +81,7 @@ Examples #. Download a package and all of its dependencies with OSX specific interpreter constraints. This forces OSX 10.10 or lower compatibility. Since OSX deps are forward compatible, - this will also match ``macosx-10_9_x86_64``, ``macosx-10_8_x86_64``, ``macosx-10_8_intel``, + this will also match ``macosx_10_9_x86_64``, ``macosx_10_8_x86_64``, ``macosx_10_8_intel``, etc. It will also match deps with platform ``any``. Also force the interpreter version to ``27`` (or more generic, i.e. ``2``) and implementation to ``cp`` (or more generic, i.e. ``py``). @@ -92,7 +92,7 @@ Examples python -m pip download \ --only-binary=:all: \ - --platform macosx-10_10_x86_64 \ + --platform macosx_10_10_x86_64 \ --python-version 27 \ --implementation cp \ SomePackage @@ -103,7 +103,7 @@ Examples py -m pip download ^ --only-binary=:all: ^ - --platform macosx-10_10_x86_64 ^ + --platform macosx_10_10_x86_64 ^ --python-version 27 ^ --implementation cp ^ SomePackage diff --git a/news/d4da20f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst b/news/d4da20f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From 81d6053ee3ce5c4a8b0572cb80c542ab9f7e461e Mon Sep 17 00:00:00 2001 From: Phil Elson Date: Thu, 17 Nov 2022 17:25:30 +0100 Subject: [PATCH 195/730] Use the VIRTUAL_ENV environment variable in the configuration documentation This follows the discussion in https://github.com/pypa/pip/pull/11487/files#r988625394, that despite the VIRTUAL_ENV environment variable not being the technically correct value, it is more readily understood by readers than ``sys.prefix``. --- docs/html/topics/configuration.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/html/topics/configuration.md b/docs/html/topics/configuration.md index ddeee0826a4..521bc9af4b9 100644 --- a/docs/html/topics/configuration.md +++ b/docs/html/topics/configuration.md @@ -52,7 +52,7 @@ Base : {file}`\{sys.base_prefix\}/pip.conf` Site -: {file}`\{sys.prefix\}/pip.conf` +: {file}`$VIRTUAL_ENV/pip.conf` ``` ```{tab} MacOS @@ -71,7 +71,7 @@ Base : {file}`\{sys.base_prefix\}/pip.conf` Site -: {file}`\{sys.prefix\}/pip.conf` +: {file}`$VIRTUAL_ENV/pip.conf` ``` ```{tab} Windows @@ -92,7 +92,7 @@ Base : {file}`\{sys.base_prefix\}\\pip.ini` Site -: {file}`\{sys.prefix\}\\pip.ini` +: {file}`%VIRTUAL_ENV%\\pip.ini` ``` ### `PIP_CONFIG_FILE` From 1273c7a9694686e438e886b6382b7d41f430a25c Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 20 Nov 2022 13:23:48 +0100 Subject: [PATCH 196/730] Fix typos found by codespell --- NEWS.rst | 2 +- docs/html/development/release-process.rst | 2 +- docs/html/reference/installation-report.md | 2 +- src/pip/_internal/index/collector.py | 2 +- src/pip/_internal/network/auth.py | 4 ++-- src/pip/_internal/vcs/bazaar.py | 2 +- src/pip/_internal/vcs/subversion.py | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/NEWS.rst b/NEWS.rst index ff89e5cdf54..1473db7125c 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -55,7 +55,7 @@ Bug Fixes - Ensure that the candidate ``pip`` executable exists, when checking for a new version of pip. (`#11309 `_) - Ignore distributions with invalid ``Name`` in metadata instead of crashing, when using the ``importlib.metadata`` backend. (`#11352 `_) -- Raise RequirementsFileParseError when parsing malformed requirements options that can't be sucessfully parsed by shlex. (`#11491 `_) +- Raise RequirementsFileParseError when parsing malformed requirements options that can't be successfully parsed by shlex. (`#11491 `_) - Fix build environment isolation on some system Pythons. (`#6264 `_) Vendored Libraries diff --git a/docs/html/development/release-process.rst b/docs/html/development/release-process.rst index a9ae8465ef3..b71e2820bd2 100644 --- a/docs/html/development/release-process.rst +++ b/docs/html/development/release-process.rst @@ -31,7 +31,7 @@ to need extra work before being released, the release manager always has the option to back out the partial change prior to a release. The PR can then be reworked and resubmitted for the next release. -Vendoring updates will be picked up fron the ``main`` branch, as for any other +Vendoring updates will be picked up from the ``main`` branch, as for any other update. Ideally, vendoring updates should be merged between releases, just like any other change. If there are outstanding updates to vendored packages, the release manager *may* at their discretion choose to do a vendoring update diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index fff37242d04..de67528cfd7 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -71,7 +71,7 @@ package with the following properties: ``` - `requested`: `true` if the requirement was explicitly provided by the user, either - directely via a command line argument or indirectly via a requirements file. `false` + directly via a command line argument or indirectly via a requirements file. `false` if the requirement was installed as a dependency of another requirement. - `requested_extras`: extras requested by the user. This field is only present when the diff --git a/src/pip/_internal/index/collector.py b/src/pip/_internal/index/collector.py index 0120610c758..b3e293ea3a5 100644 --- a/src/pip/_internal/index/collector.py +++ b/src/pip/_internal/index/collector.py @@ -354,7 +354,7 @@ def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexCon if not url.endswith("/"): url += "/" # TODO: In the future, it would be nice if pip supported PEP 691 - # style respones in the file:// URLs, however there's no + # style responses in the file:// URLs, however there's no # standard file extension for application/vnd.pypi.simple.v1+json # so we'll need to come up with something on our own. url = urllib.parse.urljoin(url, "index.html") diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 241ddc53a9c..68b5a5f45be 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -113,7 +113,7 @@ def save_auth_info(self, url: str, username: str, password: str) -> None: return self._set_password(url, username, password) def _get_password(self, service_name: str, username: str) -> Optional[str]: - """Mirror the implemenation of keyring.get_password using cli""" + """Mirror the implementation of keyring.get_password using cli""" if self.keyring is None: return None @@ -131,7 +131,7 @@ def _get_password(self, service_name: str, username: str) -> Optional[str]: return res.stdout.decode("utf-8").strip("\n") def _set_password(self, service_name: str, username: str, password: str) -> None: - """Mirror the implemenation of keyring.set_password using cli""" + """Mirror the implementation of keyring.set_password using cli""" if self.keyring is None: return None diff --git a/src/pip/_internal/vcs/bazaar.py b/src/pip/_internal/vcs/bazaar.py index 06c80e48a39..20a17ed0927 100644 --- a/src/pip/_internal/vcs/bazaar.py +++ b/src/pip/_internal/vcs/bazaar.py @@ -72,7 +72,7 @@ def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None: @classmethod def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: - # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// re-add it url, rev, user_pass = super().get_url_rev_and_auth(url) if url.startswith("ssh://"): url = "bzr+" + url diff --git a/src/pip/_internal/vcs/subversion.py b/src/pip/_internal/vcs/subversion.py index 2cd6f0ae9d2..16d93a67b7b 100644 --- a/src/pip/_internal/vcs/subversion.py +++ b/src/pip/_internal/vcs/subversion.py @@ -87,7 +87,7 @@ def get_netloc_and_auth( @classmethod def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]: - # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + # hotfix the URL scheme after removing svn+ from svn+ssh:// re-add it url, rev, user_pass = super().get_url_rev_and_auth(url) if url.startswith("ssh://"): url = "svn+" + url From 98aa8b5298340dd57b1e02d24943410491a261b9 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 22 Nov 2022 15:00:04 -0500 Subject: [PATCH 197/730] Restrict `#egg=` fragments to valid PEP 508 names This should help reduce user confusion about what can go in a URI's egg fragment. Fixes #11567. Signed-off-by: William Woodruff --- src/pip/_internal/exceptions.py | 8 ++++++++ src/pip/_internal/models/link.py | 16 +++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 2ab1f591f12..ac4057733e1 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -658,3 +658,11 @@ def __str__(self) -> str: assert self.error is not None message_part = f".\n{self.error}\n" return f"Configuration file {self.reason}{message_part}" + + +class InvalidEggFragment(InstallationError): + """A link's `#egg=` fragment doesn't look like a valid PEP 508 project + name.""" + + def __init__(self, fragment: str) -> None: + super().__init__(f"egg fragment is not a bare project name: {fragment}") diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index c792d128bcf..27001b2bbc6 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -18,6 +18,7 @@ Union, ) +from pip._internal.exceptions import InvalidEggFragment from pip._internal.utils.filetypes import WHEEL_EXTENSION from pip._internal.utils.hashes import Hashes from pip._internal.utils.misc import ( @@ -358,12 +359,25 @@ def url_without_fragment(self) -> str: _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") + # Per PEP 508. + _project_name_re = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE + ) + @property def egg_fragment(self) -> Optional[str]: match = self._egg_fragment_re.search(self._url) if not match: return None - return match.group(1) + + # The egg fragment must look like a project name, and only + # a project name. In particular, it can't contain version constraints + # or anything else like that. + project_name = match.group(1) + if not self._project_name_re.match(project_name): + raise InvalidEggFragment(project_name) + + return project_name _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)") From 4af0984cc31bb62f207d3b1ce220d58b38010af4 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 22 Nov 2022 15:50:45 -0500 Subject: [PATCH 198/730] models/link: reuse pyparsing + requirements combinators for egg fragment This should now be consistent with existing tests (without establishing that those tests are actually well-specified). Signed-off-by: William Woodruff --- src/pip/_internal/models/link.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 27001b2bbc6..3df2d0b5f4c 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -18,6 +18,10 @@ Union, ) +from pip._vendor.packaging.requirements import EXTRAS, NAME +from pip._vendor.pyparsing import Optional as Maybe +from pip._vendor.pyparsing import ParseException, stringEnd, stringStart + from pip._internal.exceptions import InvalidEggFragment from pip._internal.utils.filetypes import WHEEL_EXTENSION from pip._internal.utils.hashes import Hashes @@ -359,10 +363,7 @@ def url_without_fragment(self) -> str: _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") - # Per PEP 508. - _project_name_re = re.compile( - r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE - ) + _fragment_parser = stringStart + NAME + Maybe(EXTRAS) + stringEnd @property def egg_fragment(self) -> Optional[str]: @@ -370,11 +371,12 @@ def egg_fragment(self) -> Optional[str]: if not match: return None - # The egg fragment must look like a project name, and only - # a project name. In particular, it can't contain version constraints - # or anything else like that. + # An egg fragment looks like a PEP 508 project name, along with + # an optional extras specifier. Anything else is invalid. project_name = match.group(1) - if not self._project_name_re.match(project_name): + try: + self._fragment_parser.parseString(project_name) + except ParseException: raise InvalidEggFragment(project_name) return project_name From e26712616e774f5a8d4a732be1fcd281042d414a Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 22 Nov 2022 17:04:23 -0500 Subject: [PATCH 199/730] topics/vcs-support: clarify the egg fragment's syntax This doesn't actually address the semantics of extras in the egg fragment. Signed-off-by: William Woodruff --- docs/html/topics/vcs-support.md | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md index 70bb5beb9dc..af1ebffcff3 100644 --- a/docs/html/topics/vcs-support.md +++ b/docs/html/topics/vcs-support.md @@ -139,9 +139,16 @@ option. pip looks at 2 fragments for VCS URLs: - `egg`: For specifying the "project name" for use in pip's dependency - resolution logic. eg: `egg=project_name` + resolution logic. e.g.: `egg=project_name` + + The `egg` fragment may additionally contain an extras specifier, e.g.: + `egg=project_name[dev,test]`. + + Both the project name and extras specifier must appear in the form + defined by [PEP 508](https://peps.python.org/pep-0508/). + - `subdirectory`: For specifying the path to the Python package, when it is not - in the root of the VCS directory. eg: `pkg_dir` + in the root of the VCS directory. e.g.: `pkg_dir` ````{admonition} Example If your repository layout is: From 3c337a98cc35127c6c47c014e8bd6b737695cebe Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 22 Nov 2022 17:26:40 -0500 Subject: [PATCH 200/730] models/link: make egg fragment evaluation eager This should prevent us from accepting malformed egg fragments that are shadowed by other parts of the requirement specifier. Signed-off-by: William Woodruff --- src/pip/_internal/models/link.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 3df2d0b5f4c..fd421dc4208 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -171,6 +171,7 @@ class Link(KeyBasedCompareMixin): "dist_info_metadata", "link_hash", "cache_link_parsing", + "egg_fragment", ] def __init__( @@ -234,6 +235,7 @@ def __init__( super().__init__(key=url, defining_class=Link) self.cache_link_parsing = cache_link_parsing + self.egg_fragment = self._egg_fragment() @classmethod def from_json( @@ -365,8 +367,7 @@ def url_without_fragment(self) -> str: _fragment_parser = stringStart + NAME + Maybe(EXTRAS) + stringEnd - @property - def egg_fragment(self) -> Optional[str]: + def _egg_fragment(self) -> Optional[str]: match = self._egg_fragment_re.search(self._url) if not match: return None From 227fbee124dd2b099b0cdcfbbca661c1928865c2 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Tue, 22 Nov 2022 17:40:55 -0500 Subject: [PATCH 201/730] tests: add more Link tests This exercises our expectation that egg fragments don't include version specifiers and are evaluated eagerly. Signed-off-by: William Woodruff --- tests/unit/test_link.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/unit/test_link.py b/tests/unit/test_link.py index 99ed0aba76e..f5c9e84ef3b 100644 --- a/tests/unit/test_link.py +++ b/tests/unit/test_link.py @@ -2,6 +2,7 @@ import pytest +from pip._internal.exceptions import InvalidEggFragment from pip._internal.models.link import Link, links_equivalent from pip._internal.utils.hashes import Hashes @@ -80,6 +81,35 @@ def test_fragments(self) -> None: assert "eggname" == Link(url).egg_fragment assert "subdir" == Link(url).subdirectory_fragment + # Extras are supported and preserved in the egg fragment, + # even the empty extras specifier (since PEP 508 allows it). + url = "git+https://example.com/package#egg=eggname[extra]" + assert "eggname[extra]" == Link(url).egg_fragment + assert None is Link(url).subdirectory_fragment + url = "git+https://example.com/package#egg=eggname[extra1,extra2]" + assert "eggname[extra1,extra2]" == Link(url).egg_fragment + assert None is Link(url).subdirectory_fragment + url = "git+https://example.com/package#egg=eggname[]" + assert "eggname[]" == Link(url).egg_fragment + assert None is Link(url).subdirectory_fragment + + @pytest.mark.parametrize( + "fragment", + [ + # Package names in egg fragments must be in PEP 508 form. + "~invalid~package~name~", + # Version specifiers are not valid in egg fragments. + "eggname==1.2.3", + "eggname>=1.2.3", + # The extras specifier must be in PEP 508 form. + "eggname[!]", + ], + ) + def test_invalid_egg_fragments(self, fragment: str) -> None: + url = f"git+https://example.com/package#egg={fragment}" + with pytest.raises(InvalidEggFragment): + Link(url) + @pytest.mark.parametrize( "yanked_reason, expected", [ From 3ec4181e5ec9d65cbfe6893175b345a4218cb5cc Mon Sep 17 00:00:00 2001 From: Stephen Rosen Date: Fri, 25 Nov 2022 06:10:37 +0000 Subject: [PATCH 202/730] Update flake8 additional_dependencies via 'upadup' --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 098f3bfe7b0..20a85438c5a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,8 +26,8 @@ repos: hooks: - id: flake8 additional_dependencies: [ - 'flake8-bugbear==22.3.23', - 'flake8-logging-format==0.6.0', + 'flake8-bugbear==22.10.27', + 'flake8-logging-format==0.9.0', 'flake8-implicit-str-concat==0.3.0', ] exclude: tests/data From 3b9abbc9872381ffe2c79f0fc0fe5f5b6b06d61a Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 2 Dec 2022 21:12:47 -0800 Subject: [PATCH 203/730] topics/vcs-support: re-qualify egg fragment behavior Signed-off-by: William Woodruff --- docs/html/topics/vcs-support.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md index af1ebffcff3..d108f4d825d 100644 --- a/docs/html/topics/vcs-support.md +++ b/docs/html/topics/vcs-support.md @@ -141,11 +141,9 @@ pip looks at 2 fragments for VCS URLs: - `egg`: For specifying the "project name" for use in pip's dependency resolution logic. e.g.: `egg=project_name` - The `egg` fragment may additionally contain an extras specifier, e.g.: - `egg=project_name[dev,test]`. - - Both the project name and extras specifier must appear in the form - defined by [PEP 508](https://peps.python.org/pep-0508/). + The `egg` fragment **should** be a bare + [PEP 508](https://peps.python.org/pep-0508/) project name. Anything else + is not guaranteed to work. - `subdirectory`: For specifying the path to the Python package, when it is not in the root of the VCS directory. e.g.: `pkg_dir` From 464d16618e2929cd6e274ee98348ebcf395d98f9 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 2 Dec 2022 21:33:22 -0800 Subject: [PATCH 204/730] src, tests: use deprecation instead of hard error for egg fragments This turns invalid egg fragments into a soft error, with a scheduled deprecation period of two releases. Signed-off-by: William Woodruff --- src/pip/_internal/exceptions.py | 8 -------- src/pip/_internal/models/link.py | 22 ++++++++++++---------- tests/unit/test_link.py | 4 ++-- 3 files changed, 14 insertions(+), 20 deletions(-) diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index ac4057733e1..2ab1f591f12 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -658,11 +658,3 @@ def __str__(self) -> str: assert self.error is not None message_part = f".\n{self.error}\n" return f"Configuration file {self.reason}{message_part}" - - -class InvalidEggFragment(InstallationError): - """A link's `#egg=` fragment doesn't look like a valid PEP 508 project - name.""" - - def __init__(self, fragment: str) -> None: - super().__init__(f"egg fragment is not a bare project name: {fragment}") diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index fd421dc4208..c7c4b0e9b25 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -18,11 +18,7 @@ Union, ) -from pip._vendor.packaging.requirements import EXTRAS, NAME -from pip._vendor.pyparsing import Optional as Maybe -from pip._vendor.pyparsing import ParseException, stringEnd, stringStart - -from pip._internal.exceptions import InvalidEggFragment +from pip._internal.utils.deprecation import deprecated from pip._internal.utils.filetypes import WHEEL_EXTENSION from pip._internal.utils.hashes import Hashes from pip._internal.utils.misc import ( @@ -365,7 +361,10 @@ def url_without_fragment(self) -> str: _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)") - _fragment_parser = stringStart + NAME + Maybe(EXTRAS) + stringEnd + # Per PEP 508. + _project_name_re = re.compile( + r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE + ) def _egg_fragment(self) -> Optional[str]: match = self._egg_fragment_re.search(self._url) @@ -375,10 +374,13 @@ def _egg_fragment(self) -> Optional[str]: # An egg fragment looks like a PEP 508 project name, along with # an optional extras specifier. Anything else is invalid. project_name = match.group(1) - try: - self._fragment_parser.parseString(project_name) - except ParseException: - raise InvalidEggFragment(project_name) + if not self._project_name_re.match(project_name): + deprecated( + reason=f"{self} contains an egg fragment with a non-PEP 508 name", + replacement="to use the req @ url syntax, and remove the egg fragment", + gone_in="25.0", + issue=11617, + ) return project_name diff --git a/tests/unit/test_link.py b/tests/unit/test_link.py index f5c9e84ef3b..0db48194612 100644 --- a/tests/unit/test_link.py +++ b/tests/unit/test_link.py @@ -2,7 +2,6 @@ import pytest -from pip._internal.exceptions import InvalidEggFragment from pip._internal.models.link import Link, links_equivalent from pip._internal.utils.hashes import Hashes @@ -93,6 +92,7 @@ def test_fragments(self) -> None: assert "eggname[]" == Link(url).egg_fragment assert None is Link(url).subdirectory_fragment + @pytest.mark.xfail(reason="Behavior change scheduled for 25.0", strict=True) @pytest.mark.parametrize( "fragment", [ @@ -107,7 +107,7 @@ def test_fragments(self) -> None: ) def test_invalid_egg_fragments(self, fragment: str) -> None: url = f"git+https://example.com/package#egg={fragment}" - with pytest.raises(InvalidEggFragment): + with pytest.raises(Exception): Link(url) @pytest.mark.parametrize( From d9502ff5013d124f7af7d487cae2332b4a462c91 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 2 Dec 2022 21:38:51 -0800 Subject: [PATCH 205/730] tests: fix comment Signed-off-by: William Woodruff --- tests/unit/test_link.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_link.py b/tests/unit/test_link.py index 0db48194612..df4957d5974 100644 --- a/tests/unit/test_link.py +++ b/tests/unit/test_link.py @@ -81,7 +81,8 @@ def test_fragments(self) -> None: assert "subdir" == Link(url).subdirectory_fragment # Extras are supported and preserved in the egg fragment, - # even the empty extras specifier (since PEP 508 allows it). + # even the empty extras specifier. + # This behavior is deprecated and will change in pip 25. url = "git+https://example.com/package#egg=eggname[extra]" assert "eggname[extra]" == Link(url).egg_fragment assert None is Link(url).subdirectory_fragment From 63097482bb8a64cf69fdbd230082999815088343 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Fri, 2 Dec 2022 21:41:12 -0800 Subject: [PATCH 206/730] news: add entry Signed-off-by: William Woodruff --- news/11617.bugfix.rst | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 news/11617.bugfix.rst diff --git a/news/11617.bugfix.rst b/news/11617.bugfix.rst new file mode 100644 index 00000000000..02346e49c42 --- /dev/null +++ b/news/11617.bugfix.rst @@ -0,0 +1,3 @@ +Deprecated a historical ambiguity in how ``egg`` fragments in URL-style +requirements are formatted and handled. ``egg`` fragments that do not look +like PEP 508 names now produce a deprecation warning. From 76cff147236d76c3c9560d59311ac8ebf0ed29f3 Mon Sep 17 00:00:00 2001 From: William Woodruff Date: Sat, 3 Dec 2022 11:28:11 -0800 Subject: [PATCH 207/730] news: recategorize entry Signed-off-by: William Woodruff --- news/{10265.bugfix.rst => 10265.removal.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{10265.bugfix.rst => 10265.removal.rst} (100%) diff --git a/news/10265.bugfix.rst b/news/10265.removal.rst similarity index 100% rename from news/10265.bugfix.rst rename to news/10265.removal.rst From 227fe23b0ea9e01d1476595f40ef53dbbfc15715 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 5 Dec 2022 08:05:26 +0300 Subject: [PATCH 208/730] Add per-requirement --config-settings --- news/11325.feature.rst | 1 + src/pip/_internal/cli/req_command.py | 3 +++ src/pip/_internal/req/req_file.py | 1 + src/pip/_internal/req/req_install.py | 2 ++ tests/functional/test_config_settings.py | 25 ++++++++++++++++++++++++ tests/unit/test_req_file.py | 12 ++++++++++-- 6 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 news/11325.feature.rst diff --git a/news/11325.feature.rst b/news/11325.feature.rst new file mode 100644 index 00000000000..10683d4cd6d --- /dev/null +++ b/news/11325.feature.rst @@ -0,0 +1 @@ +Support a per-requirement ``--config-settings`` option. diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 1044809f040..5a3ff0007f1 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -438,6 +438,9 @@ def get_requirements( isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, + config_settings=parsed_req.options.get("config_settings") + if parsed_req.options + else None, ) requirements.append(req_to_add) diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index 11ec699acc5..082e83028f5 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -72,6 +72,7 @@ cmdoptions.install_options, cmdoptions.global_options, cmdoptions.hash, + cmdoptions.config_settings, ] # the 'dest' string values diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 5f29261c252..5d91c59e746 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -150,6 +150,8 @@ def __init__( self.global_options = global_options if global_options else [] self.hash_options = hash_options if hash_options else {} self.config_settings = config_settings + if isinstance(comes_from, InstallRequirement) and comes_from.config_settings: + self.config_settings = comes_from.config_settings # Set to True after successful preparation of this requirement self.prepared = False # User supplied requirement are explicitly requested for installation diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index b1e15c01031..6758324b17e 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -112,6 +112,20 @@ def test_backend_sees_config(script: PipTestEnvironment) -> None: assert json.loads(output) == {"FOO": "Hello"} +def test_backend_sees_config_reqs(script: PipTestEnvironment) -> None: + name, version, project_dir = make_project(script.scratch_path) + script.scratch_path.joinpath("reqs.txt").write_text( + f"{project_dir} --config-settings FOO=Hello" + ) + script.pip("wheel", "-r", "reqs.txt") + wheel_file_name = f"{name}-{version}-py3-none-any.whl" + wheel_file_path = script.cwd / wheel_file_name + with open(wheel_file_path, "rb") as f: + with ZipFile(f) as z: + output = z.read("config.json") + assert json.loads(output) == {"FOO": "Hello"} + + def test_install_sees_config(script: PipTestEnvironment) -> None: _, _, project_dir = make_project(script.scratch_path) script.pip( @@ -125,6 +139,17 @@ def test_install_sees_config(script: PipTestEnvironment) -> None: assert json.load(f) == {"FOO": "Hello"} +def test_install_sees_config_reqs(script: PipTestEnvironment) -> None: + _, _, project_dir = make_project(script.scratch_path) + script.scratch_path.joinpath("reqs.txt").write_text( + f"{project_dir} --config-settings FOO=Hello" + ) + script.pip("install", "-r", "reqs.txt") + config = script.site_packages_path / "config.json" + with open(config, "rb") as f: + assert json.load(f) == {"FOO": "Hello"} + + def test_install_editable_sees_config(script: PipTestEnvironment) -> None: _, _, project_dir = make_project(script.scratch_path) script.pip( diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 228d0aaa49c..23c04b7df28 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -74,7 +74,13 @@ def parse_reqfile( options=options, constraint=constraint, ): - yield install_req_from_parsed_requirement(parsed_req, isolated=isolated) + yield install_req_from_parsed_requirement( + parsed_req, + isolated=isolated, + config_settings=parsed_req.options.get("config_settings") + if parsed_req.options + else None, + ) def test_read_file_url(tmp_path: Path, session: PipSession) -> None: @@ -346,12 +352,14 @@ def test_nested_constraints_file( def test_options_on_a_requirement_line(self, line_processor: LineProcessor) -> None: line = ( "SomeProject --install-option=yo1 --install-option yo2 " - '--global-option="yo3" --global-option "yo4"' + '--global-option="yo3" --global-option "yo4" ' + '--config-settings="yo3=yo4" --config-settings "yo1=yo2"' ) filename = "filename" req = line_processor(line, filename, 1)[0] assert req.global_options == ["yo3", "yo4"] assert req.install_options == ["yo1", "yo2"] + assert req.config_settings == {"yo3": "yo4", "yo1": "yo2"} def test_hash_options(self, line_processor: LineProcessor) -> None: """Test the --hash option: mostly its value storage. From 6d1f25b222c3211f1b207b557d93cdb0044ecf43 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 13 Dec 2022 14:32:03 +0800 Subject: [PATCH 209/730] Skip tests using custom SSL on 3.7 + Linux This combination does not work well on CI, and since 3.7 support is on its last days anyway, let's just ignore those tests. --- ...704B9286-9572-4871-9B35-C6A8FA66AE75.trivial.rst | 0 tests/functional/test_install.py | 4 ++++ tests/functional/test_install_config.py | 13 +++++++++++++ 3 files changed, 17 insertions(+) create mode 100644 news/704B9286-9572-4871-9B35-C6A8FA66AE75.trivial.rst diff --git a/news/704B9286-9572-4871-9B35-C6A8FA66AE75.trivial.rst b/news/704B9286-9572-4871-9B35-C6A8FA66AE75.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 6131a4054d6..7b07226c90e 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2204,6 +2204,10 @@ def test_error_all_yanked_files_and_no_pin( ), str(result) +@pytest.mark.skipif( + sys.platform == "linux" and sys.version_info < (3, 8), + reason="Custom SSL certification not running well in CI", +) @pytest.mark.parametrize( "install_args", [ diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 66043fa1f08..99e59b97b18 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -1,5 +1,6 @@ import os import ssl +import sys import tempfile import textwrap @@ -270,6 +271,10 @@ def test_install_no_binary_via_config_disables_cached_wheels( assert "Running setup.py install for upper" in str(res), str(res) +@pytest.mark.skipif( + sys.platform == "linux" and sys.version_info < (3, 8), + reason="Custom SSL certification not running well in CI", +) def test_prompt_for_authentication( script: PipTestEnvironment, data: TestData, cert_factory: CertFactory ) -> None: @@ -310,6 +315,10 @@ def test_prompt_for_authentication( assert f"User for {server.host}:{server.port}" in result.stdout, str(result) +@pytest.mark.skipif( + sys.platform == "linux" and sys.version_info < (3, 8), + reason="Custom SSL certification not running well in CI", +) def test_do_not_prompt_for_authentication( script: PipTestEnvironment, data: TestData, cert_factory: CertFactory ) -> None: @@ -352,6 +361,10 @@ def test_do_not_prompt_for_authentication( assert "ERROR: HTTP error 401" in result.stderr +@pytest.mark.skipif( + sys.platform == "linux" and sys.version_info < (3, 8), + reason="Custom SSL certification not running well in CI", +) @pytest.mark.parametrize("auth_needed", (True, False)) def test_prompt_for_keyring_if_needed( script: PipTestEnvironment, From d3062a1603cf3fb472a5a9a116a22479131b2076 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Thu, 15 Dec 2022 02:24:34 +0300 Subject: [PATCH 210/730] Add more tests --- tests/functional/test_config_settings.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index 6758324b17e..cc7ec420341 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -3,7 +3,7 @@ from typing import Tuple from zipfile import ZipFile -from tests.lib import PipTestEnvironment +from tests.lib import PipTestEnvironment, create_basic_sdist_for_package PYPROJECT_TOML = """\ [build-system] @@ -162,3 +162,21 @@ def test_install_editable_sees_config(script: PipTestEnvironment) -> None: config = script.site_packages_path / "config.json" with open(config, "rb") as f: assert json.load(f) == {"FOO": "Hello"} + + +def test_install_config_reqs(script: PipTestEnvironment) -> None: + _, _, project_dir = make_project(script.scratch_path) + a_sdist = create_basic_sdist_for_package( + script, + "foo", + "1.0", + {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC}, + ) + script.scratch_path.joinpath("reqs.txt").write_text( + "foo --config-settings FOO=Hello" + ) + script.pip("install", "--no-index", "-f", str(a_sdist.parent), "-r", "reqs.txt") + script.assert_installed(foo="1.0") + config = script.site_packages_path / "config.json" + with open(config, "rb") as f: + assert json.load(f) == {"FOO": "Hello"} From 92730f6f632ec95fd44edf47747690c083916874 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Thu, 15 Dec 2022 03:15:10 +0300 Subject: [PATCH 211/730] Add more tests --- tests/functional/test_install_reqs.py | 71 ++++++++++++++++++++++++--- 1 file changed, 64 insertions(+), 7 deletions(-) diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 14e1056ae7a..ccef7e6a716 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -2,7 +2,7 @@ import os import textwrap from pathlib import Path -from typing import Any, Callable +from typing import TYPE_CHECKING, Any, Dict, Optional import pytest @@ -18,6 +18,11 @@ ) from tests.lib.local_repos import local_checkout +if TYPE_CHECKING: + from typing import Protocol +else: + Protocol = object + class ArgRecordingSdist: def __init__(self, sdist_path: Path, args_path: Path) -> None: @@ -28,10 +33,17 @@ def args(self) -> Any: return json.loads(self._args_path.read_text()) +class ArgRecordingSdistMaker(Protocol): + def __call__( + self, name: str, extra_files: Optional[Dict[str, str]] = None + ) -> ArgRecordingSdist: + ... + + @pytest.fixture() def arg_recording_sdist_maker( script: PipTestEnvironment, -) -> Callable[[str], ArgRecordingSdist]: +) -> ArgRecordingSdistMaker: arg_writing_setup_py = textwrap.dedent( """ import io @@ -52,9 +64,13 @@ def arg_recording_sdist_maker( output_dir.mkdir(parents=True) script.environ["OUTPUT_DIR"] = str(output_dir) - def _arg_recording_sdist_maker(name: str) -> ArgRecordingSdist: - extra_files = {"setup.py": arg_writing_setup_py.format(name=name)} - sdist_path = create_basic_sdist_for_package(script, name, "0.1.0", extra_files) + def _arg_recording_sdist_maker( + name: str, extra_files: Optional[Dict[str, str]] = None + ) -> ArgRecordingSdist: + _extra_files = {"setup.py": arg_writing_setup_py.format(name=name)} + if extra_files is not None: + _extra_files.update(extra_files) + sdist_path = create_basic_sdist_for_package(script, name, "0.1.0", _extra_files) args_path = output_dir / f"{name}.json" return ArgRecordingSdist(sdist_path, args_path) @@ -334,7 +350,7 @@ def test_wheel_user_with_prefix_in_pydistutils_cfg( def test_install_option_in_requirements_file_overrides_cli( script: PipTestEnvironment, - arg_recording_sdist_maker: Callable[[str], ArgRecordingSdist], + arg_recording_sdist_maker: ArgRecordingSdistMaker, ) -> None: simple_sdist = arg_recording_sdist_maker("simple") @@ -763,7 +779,7 @@ def test_install_unsupported_wheel_file( def test_install_options_local_to_package( script: PipTestEnvironment, - arg_recording_sdist_maker: Callable[[str], ArgRecordingSdist], + arg_recording_sdist_maker: ArgRecordingSdistMaker, ) -> None: """Make sure --install-options does not leak across packages. @@ -817,3 +833,44 @@ def test_location_related_install_option_fails(script: PipTestEnvironment) -> No expect_error=True, ) assert "['--home'] from simple" in result.stderr + + +@pytest.mark.network +def test_config_settings_local_to_package( + script: PipTestEnvironment, arg_recording_sdist_maker: ArgRecordingSdistMaker +) -> None: + pyproject_toml = textwrap.dedent( + """ + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + """ + ) + simple1_sdist = arg_recording_sdist_maker( + "simple1", {"pyproject.toml": pyproject_toml} + ) + simple2_sdist = arg_recording_sdist_maker( + "simple2", {"pyproject.toml": pyproject_toml} + ) + + reqs_file = script.scratch_path.joinpath("reqs.txt") + reqs_file.write_text( + textwrap.dedent( + """ + simple1 --config-settings "--build-option=--verbose" + simple2 + """ + ) + ) + script.pip( + "install", + "-f", + str(simple1_sdist.sdist_path.parent), + "-r", + reqs_file, + ) + + simple1_args = simple1_sdist.args() + assert "--verbose" in simple1_args + simple2_args = simple2_sdist.args() + assert "--verbose" not in simple2_args From be21f0dd10589aa29c178d834e77d018ba791789 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Thu, 15 Dec 2022 04:21:38 +0300 Subject: [PATCH 212/730] Add more tests --- tests/functional/test_install_reqs.py | 49 ++++++++++++++++----------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index ccef7e6a716..7051c4aa724 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -2,7 +2,7 @@ import os import textwrap from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any import pytest @@ -34,9 +34,7 @@ def args(self) -> Any: class ArgRecordingSdistMaker(Protocol): - def __call__( - self, name: str, extra_files: Optional[Dict[str, str]] = None - ) -> ArgRecordingSdist: + def __call__(self, name: str, **kwargs: Any) -> ArgRecordingSdist: ... @@ -44,20 +42,16 @@ def __call__( def arg_recording_sdist_maker( script: PipTestEnvironment, ) -> ArgRecordingSdistMaker: - arg_writing_setup_py = textwrap.dedent( + arg_writing_setup_py_prelude = textwrap.dedent( """ import io import json import os import sys - from setuptools import setup - args_path = os.path.join(os.environ["OUTPUT_DIR"], "{name}.json") with open(args_path, 'w') as f: json.dump(sys.argv, f) - - setup(name={name!r}, version="0.1.0") """ ) output_dir = script.scratch_path.joinpath("args_recording_sdist_maker_output") @@ -65,12 +59,16 @@ def arg_recording_sdist_maker( script.environ["OUTPUT_DIR"] = str(output_dir) def _arg_recording_sdist_maker( - name: str, extra_files: Optional[Dict[str, str]] = None + name: str, + **kwargs: Any, ) -> ArgRecordingSdist: - _extra_files = {"setup.py": arg_writing_setup_py.format(name=name)} - if extra_files is not None: - _extra_files.update(extra_files) - sdist_path = create_basic_sdist_for_package(script, name, "0.1.0", _extra_files) + sdist_path = create_basic_sdist_for_package( + script, + name, + "0.1.0", + setup_py_prelude=arg_writing_setup_py_prelude.format(name=name), + **kwargs, + ) args_path = output_dir / f"{name}.json" return ArgRecordingSdist(sdist_path, args_path) @@ -835,9 +833,10 @@ def test_location_related_install_option_fails(script: PipTestEnvironment) -> No assert "['--home'] from simple" in result.stderr -@pytest.mark.network def test_config_settings_local_to_package( - script: PipTestEnvironment, arg_recording_sdist_maker: ArgRecordingSdistMaker + script: PipTestEnvironment, + common_wheels: Path, + arg_recording_sdist_maker: ArgRecordingSdistMaker, ) -> None: pyproject_toml = textwrap.dedent( """ @@ -847,10 +846,16 @@ def test_config_settings_local_to_package( """ ) simple1_sdist = arg_recording_sdist_maker( - "simple1", {"pyproject.toml": pyproject_toml} + "simple1", + extra_files={"pyproject.toml": pyproject_toml}, + depends=["bar"], + ) + bar_sdist = arg_recording_sdist_maker( + "bar", extra_files={"pyproject.toml": pyproject_toml} ) simple2_sdist = arg_recording_sdist_maker( - "simple2", {"pyproject.toml": pyproject_toml} + "simple2", + extra_files={"pyproject.toml": pyproject_toml}, ) reqs_file = script.scratch_path.joinpath("reqs.txt") @@ -862,15 +867,21 @@ def test_config_settings_local_to_package( """ ) ) + script.pip( "install", + "--no-index", "-f", - str(simple1_sdist.sdist_path.parent), + script.scratch_path, + "-f", + common_wheels, "-r", reqs_file, ) simple1_args = simple1_sdist.args() assert "--verbose" in simple1_args + bar_args = bar_sdist.args() + assert "--verbose" in bar_args simple2_args = simple2_sdist.args() assert "--verbose" not in simple2_args From 833c23c1f5d6ef742bd17711dbbbc9789bad92f2 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Fri, 16 Dec 2022 13:00:38 +0300 Subject: [PATCH 213/730] Mention requirements files --- news/11325.feature.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/11325.feature.rst b/news/11325.feature.rst index 10683d4cd6d..282310816b6 100644 --- a/news/11325.feature.rst +++ b/news/11325.feature.rst @@ -1 +1 @@ -Support a per-requirement ``--config-settings`` option. +Support a per-requirement ``--config-settings`` option in requirements files. From e59ff2fc8ca1d58fceb7a82cb67694a98bfa4982 Mon Sep 17 00:00:00 2001 From: doron zarhi Date: Tue, 6 Dec 2022 14:20:18 +0200 Subject: [PATCH 214/730] show: add editable location if package is editable (#11638) --- news/11638.bugfix.rst | 1 + src/pip/_internal/commands/show.py | 6 ++++++ tests/functional/test_show.py | 9 +++++---- 3 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 news/11638.bugfix.rst diff --git a/news/11638.bugfix.rst b/news/11638.bugfix.rst new file mode 100644 index 00000000000..04ef930bc1b --- /dev/null +++ b/news/11638.bugfix.rst @@ -0,0 +1 @@ +Make ``pip show`` show the editable location if package is editable diff --git a/src/pip/_internal/commands/show.py b/src/pip/_internal/commands/show.py index 212167c9d1e..3f10701f6b2 100644 --- a/src/pip/_internal/commands/show.py +++ b/src/pip/_internal/commands/show.py @@ -53,6 +53,7 @@ class _PackageInfo(NamedTuple): name: str version: str location: str + editable_project_location: Optional[str] requires: List[str] required_by: List[str] installer: str @@ -120,6 +121,7 @@ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]: name=dist.raw_name, version=str(dist.version), location=dist.location or "", + editable_project_location=dist.editable_project_location, requires=requires, required_by=required_by, installer=dist.installer, @@ -158,6 +160,10 @@ def print_results( write_output("Author-email: %s", dist.author_email) write_output("License: %s", dist.license) write_output("Location: %s", dist.location) + if dist.editable_project_location is not None: + write_output( + "Editable project location: %s", dist.editable_project_location + ) write_output("Requires: %s", ", ".join(dist.requires)) write_output("Required-by: %s", ", ".join(dist.required_by)) diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index 2fc8ca24270..a7e9022a5c4 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -17,7 +17,7 @@ def test_basic_show(script: PipTestEnvironment) -> None: """ result = script.pip("show", "pip") lines = result.stdout.splitlines() - assert len(lines) == 10 + assert len(lines) == 11 assert "Name: pip" in lines assert f"Version: {__version__}" in lines assert any(line.startswith("Location: ") for line in lines) @@ -33,7 +33,7 @@ def test_show_with_files_not_found(script: PipTestEnvironment, data: TestData) - script.pip("install", "-e", editable) result = script.pip("show", "-f", "SetupPyUTF8") lines = result.stdout.splitlines() - assert len(lines) == 12 + assert len(lines) == 13 assert "Name: SetupPyUTF8" in lines assert "Version: 0.0.0" in lines assert any(line.startswith("Location: ") for line in lines) @@ -128,7 +128,7 @@ def test_report_mixed_not_found(script: PipTestEnvironment) -> None: result = script.pip("show", "Abcd3", "A-B-C", "pip", allow_stderr_warning=True) assert "WARNING: Package(s) not found: A-B-C, Abcd3" in result.stderr lines = result.stdout.splitlines() - assert len(lines) == 10 + assert len(lines) == 11 assert "Name: pip" in lines @@ -213,6 +213,7 @@ def test_all_fields(script: PipTestEnvironment) -> None: "Author-email", "License", "Location", + "Editable project location", "Requires", "Required-by", } @@ -226,7 +227,7 @@ def test_pip_show_is_short(script: PipTestEnvironment) -> None: """ result = script.pip("show", "pip") lines = result.stdout.splitlines() - assert len(lines) <= 10 + assert len(lines) <= 11 def test_pip_show_divider(script: PipTestEnvironment, data: TestData) -> None: From 9c8e74bad972fffefc5d54817362840cba986214 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 19 Dec 2022 18:59:51 +0300 Subject: [PATCH 215/730] Test deps with config-settings --- tests/functional/test_install_reqs.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 7051c4aa724..22978765adb 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -845,6 +845,15 @@ def test_config_settings_local_to_package( build-backend = "setuptools.build_meta" """ ) + simple0_sdist = arg_recording_sdist_maker( + "simple0", + extra_files={"pyproject.toml": pyproject_toml}, + depends=["foo"], + ) + foo_sdist = arg_recording_sdist_maker( + "foo", + extra_files={"pyproject.toml": pyproject_toml}, + ) simple1_sdist = arg_recording_sdist_maker( "simple1", extra_files={"pyproject.toml": pyproject_toml}, @@ -862,6 +871,8 @@ def test_config_settings_local_to_package( reqs_file.write_text( textwrap.dedent( """ + simple0 --config-settings "--build-option=--verbose" + foo --config-settings "--build-option=--quiet" simple1 --config-settings "--build-option=--verbose" simple2 """ @@ -879,6 +890,10 @@ def test_config_settings_local_to_package( reqs_file, ) + simple0_args = simple0_sdist.args() + assert "--verbose" in simple0_args + foo_args = foo_sdist.args() + assert "--quiet" in foo_args simple1_args = simple1_sdist.args() assert "--verbose" in simple1_args bar_args = bar_sdist.args() From dd70d4a3ac2641f8fb7de2482eb324ec6acbdd2c Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Tue, 20 Dec 2022 12:56:50 +0000 Subject: [PATCH 216/730] Remove duplicate news entry for #11547 #11547 was backported to pip 22.3.1, so has already been released and already has a news entry: https://pip.pypa.io/en/latest/news/#v22-3-1 As such, one shouldn't be included for it, for the next pip release: https://pip.pypa.io/en/latest/news/#not-yet-released-2022-12-19 --- news/11547.bugfix.rst | 3 --- 1 file changed, 3 deletions(-) delete mode 100644 news/11547.bugfix.rst diff --git a/news/11547.bugfix.rst b/news/11547.bugfix.rst deleted file mode 100644 index 29d566a23ff..00000000000 --- a/news/11547.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix entry point generation of ``pip.X``, ``pipX.Y``, and ``easy_install-X.Y`` -to correctly account for multi-digit Python version segments (e.g. the "11" -part of 3.11). From 6177722d4330d903c08f27374a846807bcb8889d Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Wed, 21 Dec 2022 23:17:24 +0300 Subject: [PATCH 217/730] Document edge cases --- src/pip/_internal/req/req_install.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 5d91c59e746..26fd4494609 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -151,6 +151,13 @@ def __init__( self.hash_options = hash_options if hash_options else {} self.config_settings = config_settings if isinstance(comes_from, InstallRequirement) and comes_from.config_settings: + # 1. If a user-requested package has config settings, those are always used. + # 2. If a user-requested package does not have user-specified config + # settings, but is another package’s transitive dependency, it would + # inherit the dependant’s config settings. + # 3. A transitive cannot have user-specified config settings. + # 4. If a transitive dependency’s dependant has config settings, + # the config settings are inherited. self.config_settings = comes_from.config_settings # Set to True after successful preparation of this requirement self.prepared = False From 8dabf3316a47deb539bd2267d89144bae08e58fc Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Wed, 21 Dec 2022 23:23:20 +0300 Subject: [PATCH 218/730] Test all edge cases --- tests/functional/test_install_reqs.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 22978765adb..8f548ed840f 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -860,7 +860,12 @@ def test_config_settings_local_to_package( depends=["bar"], ) bar_sdist = arg_recording_sdist_maker( - "bar", extra_files={"pyproject.toml": pyproject_toml} + "bar", + extra_files={"pyproject.toml": pyproject_toml}, + depends=["simple3"], + ) + simple3_sdist = arg_recording_sdist_maker( + "simple3", extra_files={"pyproject.toml": pyproject_toml} ) simple2_sdist = arg_recording_sdist_maker( "simple2", @@ -898,5 +903,7 @@ def test_config_settings_local_to_package( assert "--verbose" in simple1_args bar_args = bar_sdist.args() assert "--verbose" in bar_args + simple3_args = simple3_sdist.args() + assert "--verbose" in simple3_args simple2_args = simple2_sdist.args() assert "--verbose" not in simple2_args From b7ad4ee389e8b50d64530ca5747ebb580fab7f5d Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 26 Dec 2022 14:59:18 +0000 Subject: [PATCH 219/730] Update `rich` to 12.6.0 --- news/rich.vendor.rst | 1 + src/pip/_vendor/rich/__init__.py | 7 ++- src/pip/_vendor/rich/__main__.py | 8 --- src/pip/_vendor/rich/_null_file.py | 83 ++++++++++++++++++++++++++++ src/pip/_vendor/rich/ansi.py | 2 +- src/pip/_vendor/rich/box.py | 2 +- src/pip/_vendor/rich/color.py | 5 +- src/pip/_vendor/rich/console.py | 78 +++++++++++++++++++------- src/pip/_vendor/rich/filesize.py | 2 +- src/pip/_vendor/rich/json.py | 4 +- src/pip/_vendor/rich/layout.py | 4 +- src/pip/_vendor/rich/logging.py | 23 +++++--- src/pip/_vendor/rich/panel.py | 67 ++++++++++++++++++++-- src/pip/_vendor/rich/pretty.py | 59 +++++++++++++------- src/pip/_vendor/rich/progress.py | 12 ++-- src/pip/_vendor/rich/progress_bar.py | 2 +- src/pip/_vendor/rich/repr.py | 9 +-- src/pip/_vendor/rich/scope.py | 2 +- src/pip/_vendor/rich/style.py | 6 +- src/pip/_vendor/rich/syntax.py | 15 ++++- src/pip/_vendor/rich/table.py | 6 ++ src/pip/_vendor/rich/text.py | 27 ++++++++- src/pip/_vendor/rich/traceback.py | 12 ++-- src/pip/_vendor/vendor.txt | 2 +- 24 files changed, 342 insertions(+), 96 deletions(-) create mode 100644 news/rich.vendor.rst create mode 100644 src/pip/_vendor/rich/_null_file.py diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst new file mode 100644 index 00000000000..56a2e9c5112 --- /dev/null +++ b/news/rich.vendor.rst @@ -0,0 +1 @@ +Upgrade rich to 12.6.0 diff --git a/src/pip/_vendor/rich/__init__.py b/src/pip/_vendor/rich/__init__.py index d35875dbb81..73f58d77408 100644 --- a/src/pip/_vendor/rich/__init__.py +++ b/src/pip/_vendor/rich/__init__.py @@ -5,7 +5,7 @@ from ._extension import load_ipython_extension # noqa: F401 -__all__ = ["get_console", "reconfigure", "print", "inspect"] +__all__ = ["get_console", "reconfigure", "print", "inspect", "print_json"] if TYPE_CHECKING: from .console import Console @@ -40,7 +40,8 @@ def reconfigure(*args: Any, **kwargs: Any) -> None: """Reconfigures the global console by replacing it with another. Args: - console (Console): Replacement console instance. + *args (Any): Positional arguments for the replacement :class:`~rich.console.Console`. + **kwargs (Any): Keyword arguments for the replacement :class:`~rich.console.Console`. """ from pip._vendor.rich.console import Console @@ -80,7 +81,7 @@ def print_json( indent: Union[None, int, str] = 2, highlight: bool = True, skip_keys: bool = False, - ensure_ascii: bool = True, + ensure_ascii: bool = False, check_circular: bool = True, allow_nan: bool = True, default: Optional[Callable[[Any], Any]] = None, diff --git a/src/pip/_vendor/rich/__main__.py b/src/pip/_vendor/rich/__main__.py index 54e6d5e8ab2..270629fd806 100644 --- a/src/pip/_vendor/rich/__main__.py +++ b/src/pip/_vendor/rich/__main__.py @@ -227,10 +227,6 @@ def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: c = Console(record=True) c.print(test_card) - # c.save_svg( - # path="/Users/darrenburns/Library/Application Support/JetBrains/PyCharm2021.3/scratches/svg_export.svg", - # title="Rich can export to SVG", - # ) print(f"rendered in {pre_cache_taken}ms (cold cache)") print(f"rendered in {taken}ms (warm cache)") @@ -247,10 +243,6 @@ def iter_last(values: Iterable[T]) -> Iterable[Tuple[bool, T]]: "Textualize", "[u blue link=https://github.com/textualize]https://github.com/textualize", ) - sponsor_message.add_row( - "Buy devs a :coffee:", - "[u blue link=https://ko-fi.com/textualize]https://ko-fi.com/textualize", - ) sponsor_message.add_row( "Twitter", "[u blue link=https://twitter.com/willmcgugan]https://twitter.com/willmcgugan", diff --git a/src/pip/_vendor/rich/_null_file.py b/src/pip/_vendor/rich/_null_file.py new file mode 100644 index 00000000000..49038bfcbe5 --- /dev/null +++ b/src/pip/_vendor/rich/_null_file.py @@ -0,0 +1,83 @@ +from types import TracebackType +from typing import IO, Iterable, Iterator, List, Optional, Type + + +class NullFile(IO[str]): + + # TODO: "mode", "name" and "closed" are only required for Python 3.6. + + @property + def mode(self) -> str: + return "" + + @property + def name(self) -> str: + return "NullFile" + + def closed(self) -> bool: + return False + + def close(self) -> None: + pass + + def isatty(self) -> bool: + return False + + def read(self, __n: int = 1) -> str: + return "" + + def readable(self) -> bool: + return False + + def readline(self, __limit: int = 1) -> str: + return "" + + def readlines(self, __hint: int = 1) -> List[str]: + return [] + + def seek(self, __offset: int, __whence: int = 1) -> int: + return 0 + + def seekable(self) -> bool: + return False + + def tell(self) -> int: + return 0 + + def truncate(self, __size: Optional[int] = 1) -> int: + return 0 + + def writable(self) -> bool: + return False + + def writelines(self, __lines: Iterable[str]) -> None: + pass + + def __next__(self) -> str: + return "" + + def __iter__(self) -> Iterator[str]: + return iter([""]) + + def __enter__(self) -> IO[str]: + pass + + def __exit__( + self, + __t: Optional[Type[BaseException]], + __value: Optional[BaseException], + __traceback: Optional[TracebackType], + ) -> None: + pass + + def write(self, text: str) -> int: + return 0 + + def flush(self) -> None: + pass + + def fileno(self) -> int: + return -1 + + +NULL_FILE = NullFile() diff --git a/src/pip/_vendor/rich/ansi.py b/src/pip/_vendor/rich/ansi.py index d4c32cef1ee..92ef5194117 100644 --- a/src/pip/_vendor/rich/ansi.py +++ b/src/pip/_vendor/rich/ansi.py @@ -120,7 +120,7 @@ def __init__(self) -> None: self.style = Style.null() def decode(self, terminal_text: str) -> Iterable[Text]: - """Decode ANSI codes in an interable of lines. + """Decode ANSI codes in an iterable of lines. Args: lines (Iterable[str]): An iterable of lines of terminal output. diff --git a/src/pip/_vendor/rich/box.py b/src/pip/_vendor/rich/box.py index d0b07cf57e0..97d2a944457 100644 --- a/src/pip/_vendor/rich/box.py +++ b/src/pip/_vendor/rich/box.py @@ -514,4 +514,4 @@ def get_bottom(self, widths: Iterable[int]) -> str: columns.add_renderable(table) console.print(columns) - # console.save_html("box.html", inline_styles=True) + # console.save_svg("box.svg") diff --git a/src/pip/_vendor/rich/color.py b/src/pip/_vendor/rich/color.py index 6bca2da922c..ef2e895d7cb 100644 --- a/src/pip/_vendor/rich/color.py +++ b/src/pip/_vendor/rich/color.py @@ -29,6 +29,9 @@ class ColorSystem(IntEnum): def __repr__(self) -> str: return f"ColorSystem.{self.name}" + def __str__(self) -> str: + return repr(self) + class ColorType(IntEnum): """Type of color stored in Color class.""" @@ -310,7 +313,7 @@ class Color(NamedTuple): """A triplet of color components, if an RGB color.""" def __rich__(self) -> "Text": - """Dispays the actual color if Rich printed.""" + """Displays the actual color if Rich printed.""" from .style import Style from .text import Text diff --git a/src/pip/_vendor/rich/console.py b/src/pip/_vendor/rich/console.py index 93a10b0b500..f805f2dea7d 100644 --- a/src/pip/_vendor/rich/console.py +++ b/src/pip/_vendor/rich/console.py @@ -34,6 +34,8 @@ cast, ) +from pip._vendor.rich._null_file import NULL_FILE + if sys.version_info >= (3, 8): from typing import Literal, Protocol, runtime_checkable else: @@ -104,7 +106,11 @@ class NoChange: _STD_STREAMS_OUTPUT = (_STDOUT_FILENO, _STDERR_FILENO) -_TERM_COLORS = {"256color": ColorSystem.EIGHT_BIT, "16color": ColorSystem.STANDARD} +_TERM_COLORS = { + "kitty": ColorSystem.EIGHT_BIT, + "256color": ColorSystem.EIGHT_BIT, + "16color": ColorSystem.STANDARD, +} class ConsoleDimensions(NamedTuple): @@ -516,7 +522,11 @@ def _is_jupyter() -> bool: # pragma: no cover return False ipython = get_ipython() # type: ignore[name-defined] shell = ipython.__class__.__name__ - if "google.colab" in str(ipython.__class__) or shell == "ZMQInteractiveShell": + if ( + "google.colab" in str(ipython.__class__) + or os.getenv("DATABRICKS_RUNTIME_VERSION") + or shell == "ZMQInteractiveShell" + ): return True # Jupyter notebook or qtconsole elif shell == "TerminalInteractiveShell": return False # Terminal running IPython @@ -697,7 +707,16 @@ def __init__( self._height = height self._color_system: Optional[ColorSystem] - self._force_terminal = force_terminal + + self._force_terminal = None + if force_terminal is not None: + self._force_terminal = force_terminal + else: + # If FORCE_COLOR env var has any value at all, we force terminal. + force_color = self._environ.get("FORCE_COLOR") + if force_color is not None: + self._force_terminal = True + self._file = file self.quiet = quiet self.stderr = stderr @@ -746,6 +765,8 @@ def file(self) -> IO[str]: """Get the file object to write to.""" file = self._file or (sys.stderr if self.stderr else sys.stdout) file = getattr(file, "rich_proxied_file", file) + if file is None: + file = NULL_FILE return file @file.setter @@ -1701,7 +1722,7 @@ def print_json( indent: Union[None, int, str] = 2, highlight: bool = True, skip_keys: bool = False, - ensure_ascii: bool = True, + ensure_ascii: bool = False, check_circular: bool = True, allow_nan: bool = True, default: Optional[Callable[[Any], Any]] = None, @@ -1996,9 +2017,11 @@ def _check_buffer(self) -> None: from pip._vendor.rich._win32_console import LegacyWindowsTerm from pip._vendor.rich._windows_renderer import legacy_windows_render - legacy_windows_render( - self._buffer[:], LegacyWindowsTerm(self.file) - ) + buffer = self._buffer[:] + if self.no_color and self._color_system: + buffer = list(Segment.remove_color(buffer)) + + legacy_windows_render(buffer, LegacyWindowsTerm(self.file)) else: # Either a non-std stream on legacy Windows, or modern Windows. text = self._render_buffer(self._buffer[:]) @@ -2238,18 +2261,24 @@ def export_svg( theme: Optional[TerminalTheme] = None, clear: bool = True, code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, ) -> str: """ Generate an SVG from the console contents (requires record=True in Console constructor). Args: - path (str): The path to write the SVG to. - title (str): The title of the tab in the output image + title (str, optional): The title of the tab in the output image theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` - code_format (str): Format string used to generate the SVG. Rich will inject a number of variables + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables into the string in order to form the final SVG output. The default template used and the variables injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. """ from pip._vendor.rich.cells import cell_len @@ -2293,7 +2322,7 @@ def get_svg_style(style: Style) -> str: width = self.width char_height = 20 - char_width = char_height * 0.61 + char_width = char_height * font_aspect_ratio line_height = char_height * 1.22 margin_top = 1 @@ -2345,14 +2374,16 @@ def stringify(value: object) -> str: if clear: self._record_buffer.clear() - unique_id = "terminal-" + str( - zlib.adler32( - ("".join(segment.text for segment in segments)).encode( - "utf-8", "ignore" + if unique_id is None: + unique_id = "terminal-" + str( + zlib.adler32( + ("".join(repr(segment) for segment in segments)).encode( + "utf-8", + "ignore", + ) + + title.encode("utf-8", "ignore") ) - + title.encode("utf-8", "ignore") ) - ) y = 0 for y, line in enumerate(Segment.split_and_crop_lines(segments, length=width)): x = 0 @@ -2482,23 +2513,32 @@ def save_svg( theme: Optional[TerminalTheme] = None, clear: bool = True, code_format: str = CONSOLE_SVG_FORMAT, + font_aspect_ratio: float = 0.61, + unique_id: Optional[str] = None, ) -> None: """Generate an SVG file from the console contents (requires record=True in Console constructor). Args: path (str): The path to write the SVG to. - title (str): The title of the tab in the output image + title (str, optional): The title of the tab in the output image theme (TerminalTheme, optional): The ``TerminalTheme`` object to use to style the terminal clear (bool, optional): Clear record buffer after exporting. Defaults to ``True`` - code_format (str): Format string used to generate the SVG. Rich will inject a number of variables + code_format (str, optional): Format string used to generate the SVG. Rich will inject a number of variables into the string in order to form the final SVG output. The default template used and the variables injected by Rich can be found by inspecting the ``console.CONSOLE_SVG_FORMAT`` variable. + font_aspect_ratio (float, optional): The width to height ratio of the font used in the ``code_format`` + string. Defaults to 0.61, which is the width to height ratio of Fira Code (the default font). + If you aren't specifying a different font inside ``code_format``, you probably don't need this. + unique_id (str, optional): unique id that is used as the prefix for various elements (CSS styles, node + ids). If not set, this defaults to a computed value based on the recorded content. """ svg = self.export_svg( title=title, theme=theme, clear=clear, code_format=code_format, + font_aspect_ratio=font_aspect_ratio, + unique_id=unique_id, ) with open(path, "wt", encoding="utf-8") as write_file: write_file.write(svg) diff --git a/src/pip/_vendor/rich/filesize.py b/src/pip/_vendor/rich/filesize.py index 61be47510f0..99f118e2010 100644 --- a/src/pip/_vendor/rich/filesize.py +++ b/src/pip/_vendor/rich/filesize.py @@ -2,7 +2,7 @@ """Functions for reporting filesizes. Borrowed from https://github.com/PyFilesystem/pyfilesystem2 The functions declared in this module should cover the different -usecases needed to generate a string representation of a file size +use cases needed to generate a string representation of a file size using several different units. Since there are many standards regarding file size units, three different functions have been implemented. diff --git a/src/pip/_vendor/rich/json.py b/src/pip/_vendor/rich/json.py index 23583871e8f..21b642ab8e5 100644 --- a/src/pip/_vendor/rich/json.py +++ b/src/pip/_vendor/rich/json.py @@ -27,7 +27,7 @@ def __init__( indent: Union[None, int, str] = 2, highlight: bool = True, skip_keys: bool = False, - ensure_ascii: bool = True, + ensure_ascii: bool = False, check_circular: bool = True, allow_nan: bool = True, default: Optional[Callable[[Any], Any]] = None, @@ -56,7 +56,7 @@ def from_data( indent: Union[None, int, str] = 2, highlight: bool = True, skip_keys: bool = False, - ensure_ascii: bool = True, + ensure_ascii: bool = False, check_circular: bool = True, allow_nan: bool = True, default: Optional[Callable[[Any], Any]] = None, diff --git a/src/pip/_vendor/rich/layout.py b/src/pip/_vendor/rich/layout.py index 1d704652eef..849356ea9a0 100644 --- a/src/pip/_vendor/rich/layout.py +++ b/src/pip/_vendor/rich/layout.py @@ -20,8 +20,8 @@ from .highlighter import ReprHighlighter from .panel import Panel from .pretty import Pretty -from .repr import rich_repr, Result from .region import Region +from .repr import Result, rich_repr from .segment import Segment from .style import StyleType @@ -162,7 +162,6 @@ def __init__( minimum_size: int = 1, ratio: int = 1, visible: bool = True, - height: Optional[int] = None, ) -> None: self._renderable = renderable or _Placeholder(self) self.size = size @@ -170,7 +169,6 @@ def __init__( self.ratio = ratio self.name = name self.visible = visible - self.height = height self.splitter: Splitter = self.splitters["column"]() self._children: List[Layout] = [] self._render_map: RenderMap = {} diff --git a/src/pip/_vendor/rich/logging.py b/src/pip/_vendor/rich/logging.py index 58188fd8a84..91368dda78a 100644 --- a/src/pip/_vendor/rich/logging.py +++ b/src/pip/_vendor/rich/logging.py @@ -3,10 +3,12 @@ from logging import Handler, LogRecord from pathlib import Path from types import ModuleType -from typing import ClassVar, List, Optional, Iterable, Type, Union +from typing import ClassVar, Iterable, List, Optional, Type, Union + +from pip._vendor.rich._null_file import NullFile from . import get_console -from ._log_render import LogRender, FormatTimeCallable +from ._log_render import FormatTimeCallable, LogRender from .console import Console, ConsoleRenderable from .highlighter import Highlighter, ReprHighlighter from .text import Text @@ -158,16 +160,23 @@ def emit(self, record: LogRecord) -> None: log_renderable = self.render( record=record, traceback=traceback, message_renderable=message_renderable ) - try: - self.console.print(log_renderable) - except Exception: + if isinstance(self.console.file, NullFile): + # Handles pythonw, where stdout/stderr are null, and we return NullFile + # instance from Console.file. In this case, we still want to make a log record + # even though we won't be writing anything to a file. self.handleError(record) + else: + try: + self.console.print(log_renderable) + except Exception: + self.handleError(record) def render_message(self, record: LogRecord, message: str) -> "ConsoleRenderable": """Render message text in to Text. - record (LogRecord): logging Record. - message (str): String containing log message. + Args: + record (LogRecord): logging Record. + message (str): String containing log message. Returns: ConsoleRenderable: Renderable to display log message. diff --git a/src/pip/_vendor/rich/panel.py b/src/pip/_vendor/rich/panel.py index fc2807c3136..d522d80b518 100644 --- a/src/pip/_vendor/rich/panel.py +++ b/src/pip/_vendor/rich/panel.py @@ -2,11 +2,12 @@ from .align import AlignMethod from .box import ROUNDED, Box +from .cells import cell_len from .jupyter import JupyterMixin from .measure import Measurement, measure_renderables from .padding import Padding, PaddingDimensions from .segment import Segment -from .style import StyleType +from .style import Style, StyleType from .text import Text, TextType if TYPE_CHECKING: @@ -149,9 +150,53 @@ def __rich_console__( safe_box: bool = console.safe_box if self.safe_box is None else self.safe_box box = self.box.substitute(options, safe=safe_box) + def align_text( + text: Text, width: int, align: str, character: str, style: Style + ) -> Text: + """Gets new aligned text. + + Args: + text (Text): Title or subtitle text. + width (int): Desired width. + align (str): Alignment. + character (str): Character for alignment. + style (Style): Border style + + Returns: + Text: New text instance + """ + text = text.copy() + text.truncate(width) + excess_space = width - cell_len(text.plain) + if excess_space: + if align == "left": + return Text.assemble( + text, + (character * excess_space, style), + no_wrap=True, + end="", + ) + elif align == "center": + left = excess_space // 2 + return Text.assemble( + (character * left, style), + text, + (character * (excess_space - left), style), + no_wrap=True, + end="", + ) + else: + return Text.assemble( + (character * excess_space, style), + text, + no_wrap=True, + end="", + ) + return text + title_text = self._title if title_text is not None: - title_text.style = border_style + title_text.stylize_before(border_style) child_width = ( width - 2 @@ -180,7 +225,13 @@ def __rich_console__( if title_text is None or width <= 4: yield Segment(box.get_top([width - 2]), border_style) else: - title_text.align(self.title_align, width - 4, character=box.top) + title_text = align_text( + title_text, + width - 4, + self.title_align, + box.top, + border_style, + ) yield Segment(box.top_left + box.top, border_style) yield from console.render(title_text, child_options.update_width(width - 4)) yield Segment(box.top + box.top_right, border_style) @@ -194,12 +245,18 @@ def __rich_console__( subtitle_text = self._subtitle if subtitle_text is not None: - subtitle_text.style = border_style + subtitle_text.stylize_before(border_style) if subtitle_text is None or width <= 4: yield Segment(box.get_bottom([width - 2]), border_style) else: - subtitle_text.align(self.subtitle_align, width - 4, character=box.bottom) + subtitle_text = align_text( + subtitle_text, + width - 4, + self.subtitle_align, + box.bottom, + border_style, + ) yield Segment(box.bottom_left + box.bottom, border_style) yield from console.render( subtitle_text, child_options.update_width(width - 4) diff --git a/src/pip/_vendor/rich/pretty.py b/src/pip/_vendor/rich/pretty.py index 4a5ddaaf7a1..847b558c9c4 100644 --- a/src/pip/_vendor/rich/pretty.py +++ b/src/pip/_vendor/rich/pretty.py @@ -120,6 +120,7 @@ def _ipy_display_hook( indent_guides: bool = False, max_length: Optional[int] = None, max_string: Optional[int] = None, + max_depth: Optional[int] = None, expand_all: bool = False, ) -> None: # needed here to prevent circular import: @@ -177,6 +178,7 @@ def _ipy_display_hook( indent_guides=indent_guides, max_length=max_length, max_string=max_string, + max_depth=max_depth, expand_all=expand_all, margin=12, ), @@ -202,6 +204,7 @@ def install( indent_guides: bool = False, max_length: Optional[int] = None, max_string: Optional[int] = None, + max_depth: Optional[int] = None, expand_all: bool = False, ) -> None: """Install automatic pretty printing in the Python REPL. @@ -214,6 +217,7 @@ def install( max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to None. max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None. + max_depth (int, optional): Maximum depth of nested data structures, or None for no maximum. Defaults to None. expand_all (bool, optional): Expand all containers. Defaults to False. max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100. """ @@ -236,6 +240,7 @@ def display_hook(value: Any) -> None: indent_guides=indent_guides, max_length=max_length, max_string=max_string, + max_depth=max_depth, expand_all=expand_all, ), crop=crop, @@ -258,6 +263,7 @@ def __call__(self, value: Any) -> Any: indent_guides=indent_guides, max_length=max_length, max_string=max_string, + max_depth=max_depth, expand_all=expand_all, ) else: @@ -333,7 +339,7 @@ def __rich_console__( max_depth=self.max_depth, expand_all=self.expand_all, ) - pretty_text = Text( + pretty_text = Text.from_ansi( pretty_str, justify=self.justify or options.justify, overflow=self.overflow or options.overflow, @@ -630,6 +636,11 @@ def to_repr(obj: Any) -> str: def _traverse(obj: Any, root: bool = False, depth: int = 0) -> Node: """Walk the object depth first.""" + obj_id = id(obj) + if obj_id in visited_ids: + # Recursion detected + return Node(value_repr="...") + obj_type = type(obj) py_version = (sys.version_info.major, sys.version_info.minor) children: List[Node] @@ -667,6 +678,7 @@ def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]: pass if rich_repr_result is not None: + push_visited(obj_id) angular = getattr(obj.__rich_repr__, "angular", False) args = list(iter_rich_args(rich_repr_result)) class_name = obj.__class__.__name__ @@ -676,7 +688,10 @@ def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]: append = children.append if reached_max_depth: - node = Node(value_repr=f"...") + if angular: + node = Node(value_repr=f"<{class_name}...>") + else: + node = Node(value_repr=f"{class_name}(...)") else: if angular: node = Node( @@ -711,14 +726,16 @@ def iter_rich_args(rich_args: Any) -> Iterable[Union[Any, Tuple[str, Any]]]: children=[], last=root, ) + pop_visited(obj_id) elif _is_attr_object(obj) and not fake_attributes: + push_visited(obj_id) children = [] append = children.append attr_fields = _get_attr_fields(obj) if attr_fields: if reached_max_depth: - node = Node(value_repr=f"...") + node = Node(value_repr=f"{obj.__class__.__name__}(...)") else: node = Node( open_brace=f"{obj.__class__.__name__}(", @@ -758,23 +775,18 @@ def iter_attrs() -> Iterable[ node = Node( value_repr=f"{obj.__class__.__name__}()", children=[], last=root ) - + pop_visited(obj_id) elif ( is_dataclass(obj) and not _safe_isinstance(obj, type) and not fake_attributes and (_is_dataclass_repr(obj) or py_version == (3, 6)) ): - obj_id = id(obj) - if obj_id in visited_ids: - # Recursion detected - return Node(value_repr="...") push_visited(obj_id) - children = [] append = children.append if reached_max_depth: - node = Node(value_repr=f"...") + node = Node(value_repr=f"{obj.__class__.__name__}(...)") else: node = Node( open_brace=f"{obj.__class__.__name__}(", @@ -792,42 +804,43 @@ def iter_attrs() -> Iterable[ child_node.key_separator = "=" append(child_node) - pop_visited(obj_id) + pop_visited(obj_id) elif _is_namedtuple(obj) and _has_default_namedtuple_repr(obj): + push_visited(obj_id) + class_name = obj.__class__.__name__ if reached_max_depth: - node = Node(value_repr="...") + # If we've reached the max depth, we still show the class name, but not its contents + node = Node( + value_repr=f"{class_name}(...)", + ) else: children = [] - class_name = obj.__class__.__name__ + append = children.append node = Node( open_brace=f"{class_name}(", close_brace=")", children=children, empty=f"{class_name}()", ) - append = children.append for last, (key, value) in loop_last(obj._asdict().items()): child_node = _traverse(value, depth=depth + 1) child_node.key_repr = key child_node.last = last child_node.key_separator = "=" append(child_node) + pop_visited(obj_id) elif _safe_isinstance(obj, _CONTAINERS): for container_type in _CONTAINERS: if _safe_isinstance(obj, container_type): obj_type = container_type break - obj_id = id(obj) - if obj_id in visited_ids: - # Recursion detected - return Node(value_repr="...") push_visited(obj_id) open_brace, close_brace, empty = _BRACES[obj_type](obj) if reached_max_depth: - node = Node(value_repr=f"...", last=root) + node = Node(value_repr=f"{open_brace}...{close_brace}") elif obj_type.__repr__ != type(obj).__repr__: node = Node(value_repr=to_repr(obj), last=root) elif obj: @@ -1007,4 +1020,10 @@ class StockKeepingUnit(NamedTuple): from pip._vendor.rich import print - print(Pretty(data, indent_guides=True, max_string=20)) + # print(Pretty(data, indent_guides=True, max_string=20)) + + class Thing: + def __repr__(self) -> str: + return "Hello\x1b[38;5;239m World!" + + print(Pretty(Thing())) diff --git a/src/pip/_vendor/rich/progress.py b/src/pip/_vendor/rich/progress.py index 92cfa802302..e7d163c1377 100644 --- a/src/pip/_vendor/rich/progress.py +++ b/src/pip/_vendor/rich/progress.py @@ -129,7 +129,7 @@ def track( refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10. style (StyleType, optional): Style for the bar background. Defaults to "bar.back". complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete". - finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done". + finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished". pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse". update_period (float, optional): Minimum time (in seconds) between calls to update(). Defaults to 0.1. disable (bool, optional): Disable display of progress. @@ -216,6 +216,10 @@ def fileno(self) -> int: def isatty(self) -> bool: return self.handle.isatty() + @property + def mode(self) -> str: + return self.handle.mode + @property def name(self) -> str: return self.handle.name @@ -315,7 +319,7 @@ def wrap_file( refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10. style (StyleType, optional): Style for the bar background. Defaults to "bar.back". complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete". - finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done". + finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished". pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse". disable (bool, optional): Disable display of progress. Returns: @@ -440,7 +444,7 @@ def open( refresh_per_second (float): Number of times per second to refresh the progress information. Defaults to 10. style (StyleType, optional): Style for the bar background. Defaults to "bar.back". complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete". - finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done". + finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished". pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse". disable (bool, optional): Disable display of progress. encoding (str, optional): The encoding to use when reading in text mode. @@ -634,7 +638,7 @@ class BarColumn(ProgressColumn): bar_width (Optional[int], optional): Width of bar or None for full width. Defaults to 40. style (StyleType, optional): Style for the bar background. Defaults to "bar.back". complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete". - finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done". + finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished". pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse". """ diff --git a/src/pip/_vendor/rich/progress_bar.py b/src/pip/_vendor/rich/progress_bar.py index 9c3a4f25a2c..67361df2e49 100644 --- a/src/pip/_vendor/rich/progress_bar.py +++ b/src/pip/_vendor/rich/progress_bar.py @@ -25,7 +25,7 @@ class ProgressBar(JupyterMixin): pulse (bool, optional): Enable pulse effect. Defaults to False. Will pulse if a None total was passed. style (StyleType, optional): Style for the bar background. Defaults to "bar.back". complete_style (StyleType, optional): Style for the completed bar. Defaults to "bar.complete". - finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.done". + finished_style (StyleType, optional): Style for a finished bar. Defaults to "bar.finished". pulse_style (StyleType, optional): Style for pulsing bars. Defaults to "bar.pulse". animation_time (Optional[float], optional): Time in seconds to use for animation, or None to use system time. """ diff --git a/src/pip/_vendor/rich/repr.py b/src/pip/_vendor/rich/repr.py index 36966e70f19..72d1a7e30b6 100644 --- a/src/pip/_vendor/rich/repr.py +++ b/src/pip/_vendor/rich/repr.py @@ -1,21 +1,18 @@ -from functools import partial import inspect -import sys - +from functools import partial from typing import ( Any, Callable, Iterable, List, Optional, - overload, - Union, Tuple, Type, TypeVar, + Union, + overload, ) - T = TypeVar("T") diff --git a/src/pip/_vendor/rich/scope.py b/src/pip/_vendor/rich/scope.py index 6822b8ca542..c9d134cc3ce 100644 --- a/src/pip/_vendor/rich/scope.py +++ b/src/pip/_vendor/rich/scope.py @@ -26,7 +26,7 @@ def render_scope( scope (Mapping): A mapping containing variable names and values. title (str, optional): Optional title. Defaults to None. sort_keys (bool, optional): Enable sorting of items. Defaults to True. - indent_guides (bool, optional): Enable indentaton guides. Defaults to False. + indent_guides (bool, optional): Enable indentation guides. Defaults to False. max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to None. max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to None. diff --git a/src/pip/_vendor/rich/style.py b/src/pip/_vendor/rich/style.py index b2e8aff71f5..ad388aadb0e 100644 --- a/src/pip/_vendor/rich/style.py +++ b/src/pip/_vendor/rich/style.py @@ -188,8 +188,10 @@ def _make_color(color: Union[Color, str]) -> Color: ) self._link = link - self._link_id = f"{randint(0, 999999)}" if link else "" self._meta = None if meta is None else dumps(meta) + self._link_id = ( + f"{randint(0, 999999)}{hash(self._meta)}" if (link or meta) else "" + ) self._hash: Optional[int] = None self._null = not (self._set_attributes or color or bgcolor or link or meta) @@ -237,8 +239,8 @@ def from_meta(cls, meta: Optional[Dict[str, Any]]) -> "Style": style._set_attributes = 0 style._attributes = 0 style._link = None - style._link_id = "" style._meta = dumps(meta) + style._link_id = f"{randint(0, 999999)}{hash(style._meta)}" style._hash = None style._null = not (meta) return style diff --git a/src/pip/_vendor/rich/syntax.py b/src/pip/_vendor/rich/syntax.py index dace718c1b5..01bdd04398f 100644 --- a/src/pip/_vendor/rich/syntax.py +++ b/src/pip/_vendor/rich/syntax.py @@ -40,6 +40,7 @@ from pip._vendor.rich.padding import Padding, PaddingDimensions from ._loop import loop_first +from .cells import cell_len from .color import Color, blend_rgb from .console import Console, ConsoleOptions, JustifyMethod, RenderResult from .jupyter import JupyterMixin @@ -586,11 +587,21 @@ def _get_number_styles(self, console: Console) -> Tuple[Style, Style, Style]: def __rich_measure__( self, console: "Console", options: "ConsoleOptions" ) -> "Measurement": + _, right, _, left = Padding.unpack(self.padding) + padding = left + right if self.code_width is not None: - width = self.code_width + self._numbers_column_width + right + left + width = self.code_width + self._numbers_column_width + padding + 1 return Measurement(self._numbers_column_width, width) - return Measurement(self._numbers_column_width, options.max_width) + lines = self.code.splitlines() + width = ( + self._numbers_column_width + + padding + + (max(cell_len(line) for line in lines) if lines else 0) + ) + if self.line_numbers: + width += 1 + return Measurement(self._numbers_column_width, width) def __rich_console__( self, console: Console, options: ConsoleOptions diff --git a/src/pip/_vendor/rich/table.py b/src/pip/_vendor/rich/table.py index 8fc28ef2f74..17409f2ee8d 100644 --- a/src/pip/_vendor/rich/table.py +++ b/src/pip/_vendor/rich/table.py @@ -462,6 +462,12 @@ def add_cell(column: Column, renderable: "RenderableType") -> None: ) self.rows.append(Row(style=style, end_section=end_section)) + def add_section(self) -> None: + """Add a new section (draw a line after current row).""" + + if self.rows: + self.rows[-1].end_section = True + def __rich_console__( self, console: "Console", options: "ConsoleOptions" ) -> "RenderResult": diff --git a/src/pip/_vendor/rich/text.py b/src/pip/_vendor/rich/text.py index 12037d0cf4f..b14055aa7b4 100644 --- a/src/pip/_vendor/rich/text.py +++ b/src/pip/_vendor/rich/text.py @@ -450,7 +450,6 @@ def stylize( style (Union[str, Style]): Style instance or style definition to apply. start (int): Start offset (negative indexing is supported). Defaults to 0. end (Optional[int], optional): End offset (negative indexing is supported), or None for end of text. Defaults to None. - """ if style: length = len(self) @@ -465,6 +464,32 @@ def stylize( return self._spans.append(Span(start, min(length, end), style)) + def stylize_before( + self, + style: Union[str, Style], + start: int = 0, + end: Optional[int] = None, + ) -> None: + """Apply a style to the text, or a portion of the text. Styles will be applied before other styles already present. + + Args: + style (Union[str, Style]): Style instance or style definition to apply. + start (int): Start offset (negative indexing is supported). Defaults to 0. + end (Optional[int], optional): End offset (negative indexing is supported), or None for end of text. Defaults to None. + """ + if style: + length = len(self) + if start < 0: + start = length + start + if end is None: + end = length + if end < 0: + end = length + end + if start >= length or end <= start: + # Span not in text or not valid + return + self._spans.insert(0, Span(start, min(length, end), style)) + def apply_meta( self, meta: Dict[str, Any], start: int = 0, end: Optional[int] = None ) -> None: diff --git a/src/pip/_vendor/rich/traceback.py b/src/pip/_vendor/rich/traceback.py index e5023c77ab4..1f481298f6f 100644 --- a/src/pip/_vendor/rich/traceback.py +++ b/src/pip/_vendor/rich/traceback.py @@ -337,7 +337,7 @@ def extract( from pip._vendor.rich import _IMPORT_CWD def safe_str(_object: Any) -> str: - """Don't allow exceptions from __str__ to propegate.""" + """Don't allow exceptions from __str__ to propagate.""" try: return str(_object) except Exception: @@ -389,19 +389,17 @@ def safe_str(_object: Any) -> str: del stack.frames[:] cause = getattr(exc_value, "__cause__", None) - if cause and cause.__traceback__: + if cause: exc_type = cause.__class__ exc_value = cause + # __traceback__ can be None, e.g. for exceptions raised by the + # 'multiprocessing' module traceback = cause.__traceback__ is_cause = True continue cause = exc_value.__context__ - if ( - cause - and cause.__traceback__ - and not getattr(exc_value, "__suppress_context__", False) - ): + if cause and not getattr(exc_value, "__suppress_context__", False): exc_type = cause.__class__ exc_value = cause traceback = cause.__traceback__ diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index a34277b8c54..850f3f46808 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -12,7 +12,7 @@ requests==2.28.1 chardet==5.0.0 idna==3.4 urllib3==1.26.12 -rich==12.5.1 +rich==12.6.0 pygments==2.13.0 typing_extensions==4.4.0 resolvelib==0.8.1 From 1b33f4b944b46f9d8bd6380201ef1e6c33fd944a Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 26 Dec 2022 15:01:38 +0000 Subject: [PATCH 220/730] Stop forcing color in CI This affects the output presented in tests that rely on checking for how `rich` errors are presented. --- .github/workflows/ci.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7b48d944329..3b35e93b21f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,11 +11,6 @@ on: schedule: - cron: 0 0 * * MON # Run every Monday at 00:00 UTC -env: - # The "FORCE_COLOR" variable, when set to 1, - # tells Nox to colorize itself. - FORCE_COLOR: "1" - concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true From 8dbb8b9bbe4976c87e465695850821804aa47a83 Mon Sep 17 00:00:00 2001 From: Jakub Kuczys Date: Tue, 27 Dec 2022 10:36:28 +0100 Subject: [PATCH 221/730] Don't check for `wheel` when only `build-system.requires` is present (#11674) --- news/11673.bugfix.rst | 3 +++ src/pip/_internal/pyproject.py | 7 +++---- 2 files changed, 6 insertions(+), 4 deletions(-) create mode 100644 news/11673.bugfix.rst diff --git a/news/11673.bugfix.rst b/news/11673.bugfix.rst new file mode 100644 index 00000000000..c3d92475c99 --- /dev/null +++ b/news/11673.bugfix.rst @@ -0,0 +1,3 @@ +Stop checking that ``wheel`` is present when ``build-system.requires`` +is provided without ``build-system.build-backend`` as ``setuptools`` +(which we still check for) will inject it anyway. diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index 1e9119f3e5c..1de9f0fde5d 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -159,9 +159,8 @@ def load_pyproject_toml( if backend is None: # If the user didn't specify a backend, we assume they want to use # the setuptools backend. But we can't be sure they have included - # a version of setuptools which supplies the backend, or wheel - # (which is needed by the backend) in their requirements. So we - # make a note to check that those requirements are present once + # a version of setuptools which supplies the backend. So we + # make a note to check that this requirement is present once # we have set up the environment. # This is quite a lot of work to check for a very specific case. But # the problem is, that case is potentially quite common - projects that @@ -170,6 +169,6 @@ def load_pyproject_toml( # tools themselves. The original PEP 518 code had a similar check (but # implemented in a different way). backend = "setuptools.build_meta:__legacy__" - check = ["setuptools>=40.8.0", "wheel"] + check = ["setuptools>=40.8.0"] return BuildSystemDetails(requires, backend, check, backend_path) From da478818c1c1d45770487f4346351413d92f61c9 Mon Sep 17 00:00:00 2001 From: Daniele Nicolodi Date: Sun, 27 Nov 2022 16:49:08 +0100 Subject: [PATCH 222/730] Fix isolated environment scripts path on Debian The scripts path was looked up passing explicitly the scheme to be used using "nt" on Windows and "posix_prefix" everywhere else. However, when the isolated build environment is created, packages are installed using the default scheme for the platform. On most platforms this works because normally "nt" and "posix_prefix" are the default schemes. However, Debian customizes sysconfig to use a "posix_local" scheme by default and under this scheme the scripts path does not match the one of the "posix_prefix" scheme. This results in scripts installed as part of the build dependencies not to be found during the build, as reported here https://github.com/mesonbuild/meson-python/issues/109 and here https://bugs.debian.org/1019293. The problem can be solved omitting to specify a scheme when looking up the scripts path. To future proof the path lookup, use the "venv" scheme if available as done in #11598. For uniformity use similar functions as used to lookup the library paths. --- news/11623.bugfix.rst | 1 + src/pip/_internal/build_env.py | 7 ++----- src/pip/_internal/locations/__init__.py | 5 +++++ src/pip/_internal/locations/_sysconfig.py | 16 ++++++++++++---- 4 files changed, 20 insertions(+), 9 deletions(-) create mode 100644 news/11623.bugfix.rst diff --git a/news/11623.bugfix.rst b/news/11623.bugfix.rst new file mode 100644 index 00000000000..45b8fe1928f --- /dev/null +++ b/news/11623.bugfix.rst @@ -0,0 +1 @@ +Fix scripts path in isolated build environment on Debian. diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index e67b868e8f4..24bfa870b07 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -8,7 +8,6 @@ import sys import textwrap from collections import OrderedDict -from sysconfig import get_paths from types import TracebackType from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type @@ -19,6 +18,7 @@ from pip import __file__ as pip_location from pip._internal.cli.spinners import open_spinner from pip._internal.locations import ( + get_isolated_environment_bin_path, get_isolated_environment_lib_paths, get_platlib, get_purelib, @@ -37,10 +37,7 @@ class _Prefix: def __init__(self, path: str) -> None: self.path = path self.setup = False - self.bin_dir = get_paths( - "nt" if os.name == "nt" else "posix_prefix", - vars={"base": path, "platbase": path}, - )["scripts"] + self.bin_dir = get_isolated_environment_bin_path(path) self.lib_dirs = get_isolated_environment_lib_paths(path) diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 516bd607839..547bb803ce2 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -28,6 +28,7 @@ "get_major_minor_version", "get_platlib", "get_isolated_environment_lib_paths", + "get_isolated_environment_bin_path", "get_purelib", "get_scheme", "get_src_prefix", @@ -526,3 +527,7 @@ def get_isolated_environment_lib_paths(prefix: str) -> List[str]: _log_context(prefix=prefix) return old_lib_paths + + +def get_isolated_environment_bin_path(prefix: str) -> str: + return _sysconfig.get_isolated_environment_bin_path(prefix) diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 69821572081..dfe2f0bd5a0 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -213,10 +213,18 @@ def get_platlib() -> str: return sysconfig.get_paths()["platlib"] -def get_isolated_environment_lib_paths(prefix: str) -> typing.Tuple[str, str]: +def _get_isolated_environment_paths(prefix: str) -> typing.Dict[str, str]: vars = {"base": prefix, "platbase": prefix} if "venv" in sysconfig.get_scheme_names(): - paths = sysconfig.get_paths(vars=vars, scheme="venv") - else: - paths = sysconfig.get_paths(vars=vars) + return sysconfig.get_paths(vars=vars, scheme="venv") + return sysconfig.get_paths(vars=vars) + + +def get_isolated_environment_lib_paths(prefix: str) -> typing.Tuple[str, str]: + paths = _get_isolated_environment_paths(prefix) return (paths["purelib"], paths["platlib"]) + + +def get_isolated_environment_bin_path(prefix: str) -> str: + paths = _get_isolated_environment_paths(prefix) + return paths["scripts"] From 77ef9f0bfd456a5cd1a3a969072a158e781962d3 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 28 Dec 2022 10:41:06 +0800 Subject: [PATCH 223/730] Avoid using built-in names as variable name --- src/pip/_internal/locations/_sysconfig.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index dfe2f0bd5a0..2bf5a1f444a 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -214,10 +214,10 @@ def get_platlib() -> str: def _get_isolated_environment_paths(prefix: str) -> typing.Dict[str, str]: - vars = {"base": prefix, "platbase": prefix} + variables = {"base": prefix, "platbase": prefix} if "venv" in sysconfig.get_scheme_names(): - return sysconfig.get_paths(vars=vars, scheme="venv") - return sysconfig.get_paths(vars=vars) + return sysconfig.get_paths(vars=variables, scheme="venv") + return sysconfig.get_paths(vars=variables) def get_isolated_environment_lib_paths(prefix: str) -> typing.Tuple[str, str]: From 28e5eb99c813ede28f40f53213ae517b6535e38b Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 28 Dec 2022 10:42:04 +0800 Subject: [PATCH 224/730] Avoid unnecessary indirection Since there's not equivalent of get_isolated_environment_bin_path for the distutils backend, the additional declaretion in the sysconfig backend is unnecessary. --- src/pip/_internal/locations/__init__.py | 2 +- src/pip/_internal/locations/_sysconfig.py | 9 ++------- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 547bb803ce2..815f8c22b15 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -530,4 +530,4 @@ def get_isolated_environment_lib_paths(prefix: str) -> List[str]: def get_isolated_environment_bin_path(prefix: str) -> str: - return _sysconfig.get_isolated_environment_bin_path(prefix) + return _sysconfig.get_isolated_environment_paths(prefix)["scripts"] diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 2bf5a1f444a..38e400f94d4 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -213,7 +213,7 @@ def get_platlib() -> str: return sysconfig.get_paths()["platlib"] -def _get_isolated_environment_paths(prefix: str) -> typing.Dict[str, str]: +def get_isolated_environment_paths(prefix: str) -> typing.Dict[str, str]: variables = {"base": prefix, "platbase": prefix} if "venv" in sysconfig.get_scheme_names(): return sysconfig.get_paths(vars=variables, scheme="venv") @@ -221,10 +221,5 @@ def _get_isolated_environment_paths(prefix: str) -> typing.Dict[str, str]: def get_isolated_environment_lib_paths(prefix: str) -> typing.Tuple[str, str]: - paths = _get_isolated_environment_paths(prefix) + paths = get_isolated_environment_paths(prefix) return (paths["purelib"], paths["platlib"]) - - -def get_isolated_environment_bin_path(prefix: str) -> str: - paths = _get_isolated_environment_paths(prefix) - return paths["scripts"] From a0c607ffbf481a67e23c33a38e956fed7d2025ff Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 28 Dec 2022 10:44:51 +0800 Subject: [PATCH 225/730] Reorder __all__ The list is *almost* sorted alphabetically so let's make it fully so. --- src/pip/_internal/locations/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 815f8c22b15..496844be142 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -25,10 +25,10 @@ "USER_CACHE_DIR", "get_bin_prefix", "get_bin_user", + "get_isolated_environment_bin_path", + "get_isolated_environment_lib_paths", "get_major_minor_version", "get_platlib", - "get_isolated_environment_lib_paths", - "get_isolated_environment_bin_path", "get_purelib", "get_scheme", "get_src_prefix", From e22003e0e113fc0280df66fb764c1fc2b6dc56e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 28 Dec 2022 12:00:53 +0100 Subject: [PATCH 226/730] Update VCS support documentation Now that PEP 610 (direct_url.json) is implemented, an editable install is not required anymore for pip freeze to work correctly. --- docs/html/topics/vcs-support.md | 4 ---- news/11675.doc.rst | 2 ++ 2 files changed, 2 insertions(+), 4 deletions(-) create mode 100644 news/11675.doc.rst diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md index 70bb5beb9dc..7582099eb52 100644 --- a/docs/html/topics/vcs-support.md +++ b/docs/html/topics/vcs-support.md @@ -130,10 +130,6 @@ VCS source will not overwrite it without an `--upgrade` flag. Further, pip looks at the package version, at the target revision to determine what action to take on the VCS requirement (not the commit itself). -The {ref}`pip freeze` subcommand will record the VCS requirement specifier -(referencing a specific commit) only if the install is done with the editable -option. - ## URL fragments pip looks at 2 fragments for VCS URLs: diff --git a/news/11675.doc.rst b/news/11675.doc.rst new file mode 100644 index 00000000000..b2b2278faf6 --- /dev/null +++ b/news/11675.doc.rst @@ -0,0 +1,2 @@ +Remove mention that editable installs are necessary for pip freeze to report the VCS +URL. From fa4b2efbabffb96536c10bfb94477912a0f20e7f Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 11 Dec 2022 20:03:33 +0000 Subject: [PATCH 227/730] Replace `pep517` with `pyproject_hooks` The `pep517` package has been superseded by a new package. --- news/pep517.vendor.rst | 1 + news/pyproject-hooks.vendor.rst | 1 + .../_internal/operations/build/metadata.py | 6 +- .../operations/build/metadata_editable.py | 6 +- src/pip/_internal/operations/build/wheel.py | 4 +- .../operations/build/wheel_editable.py | 4 +- src/pip/_internal/req/req_install.py | 8 +- src/pip/_internal/utils/misc.py | 6 +- src/pip/_internal/utils/subprocess.py | 4 +- src/pip/_vendor/pep517.pyi | 1 - src/pip/_vendor/pep517/__init__.py | 6 - src/pip/_vendor/pep517/build.py | 126 ----------- src/pip/_vendor/pep517/check.py | 207 ------------------ src/pip/_vendor/pep517/colorlog.py | 113 ---------- src/pip/_vendor/pep517/dirtools.py | 19 -- src/pip/_vendor/pep517/envbuild.py | 170 -------------- src/pip/_vendor/pep517/in_process/__init__.py | 26 --- src/pip/_vendor/pep517/meta.py | 93 -------- src/pip/_vendor/pyproject_hooks.pyi | 1 + .../{pep517 => pyproject_hooks}/LICENSE | 0 src/pip/_vendor/pyproject_hooks/__init__.py | 23 ++ .../{pep517 => pyproject_hooks}/_compat.py | 0 .../wrappers.py => pyproject_hooks/_impl.py} | 196 +++++++---------- .../pyproject_hooks/_in_process/__init__.py | 18 ++ .../_in_process}/_in_process.py | 8 +- src/pip/_vendor/vendor.txt | 2 +- 26 files changed, 151 insertions(+), 898 deletions(-) create mode 100644 news/pep517.vendor.rst create mode 100644 news/pyproject-hooks.vendor.rst delete mode 100644 src/pip/_vendor/pep517.pyi delete mode 100644 src/pip/_vendor/pep517/__init__.py delete mode 100644 src/pip/_vendor/pep517/build.py delete mode 100644 src/pip/_vendor/pep517/check.py delete mode 100644 src/pip/_vendor/pep517/colorlog.py delete mode 100644 src/pip/_vendor/pep517/dirtools.py delete mode 100644 src/pip/_vendor/pep517/envbuild.py delete mode 100644 src/pip/_vendor/pep517/in_process/__init__.py delete mode 100644 src/pip/_vendor/pep517/meta.py create mode 100644 src/pip/_vendor/pyproject_hooks.pyi rename src/pip/_vendor/{pep517 => pyproject_hooks}/LICENSE (100%) create mode 100644 src/pip/_vendor/pyproject_hooks/__init__.py rename src/pip/_vendor/{pep517 => pyproject_hooks}/_compat.py (100%) rename src/pip/_vendor/{pep517/wrappers.py => pyproject_hooks/_impl.py} (63%) create mode 100644 src/pip/_vendor/pyproject_hooks/_in_process/__init__.py rename src/pip/_vendor/{pep517/in_process => pyproject_hooks/_in_process}/_in_process.py (96%) diff --git a/news/pep517.vendor.rst b/news/pep517.vendor.rst new file mode 100644 index 00000000000..4b91e560f1f --- /dev/null +++ b/news/pep517.vendor.rst @@ -0,0 +1 @@ +Remove pep517 from vendored packages diff --git a/news/pyproject-hooks.vendor.rst b/news/pyproject-hooks.vendor.rst new file mode 100644 index 00000000000..2598d556477 --- /dev/null +++ b/news/pyproject-hooks.vendor.rst @@ -0,0 +1 @@ +Add pyproject-hooks 1.0.0 diff --git a/src/pip/_internal/operations/build/metadata.py b/src/pip/_internal/operations/build/metadata.py index e2b7b444543..c66ac354deb 100644 --- a/src/pip/_internal/operations/build/metadata.py +++ b/src/pip/_internal/operations/build/metadata.py @@ -3,7 +3,7 @@ import os -from pip._vendor.pep517.wrappers import Pep517HookCaller +from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._internal.build_env import BuildEnvironment from pip._internal.exceptions import ( @@ -15,7 +15,7 @@ def generate_metadata( - build_env: BuildEnvironment, backend: Pep517HookCaller, details: str + build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str ) -> str: """Generate metadata using mechanisms described in PEP 517. @@ -26,7 +26,7 @@ def generate_metadata( metadata_dir = metadata_tmpdir.path with build_env: - # Note that Pep517HookCaller implements a fallback for + # Note that BuildBackendHookCaller implements a fallback for # prepare_metadata_for_build_wheel, so we don't have to # consider the possibility that this hook doesn't exist. runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)") diff --git a/src/pip/_internal/operations/build/metadata_editable.py b/src/pip/_internal/operations/build/metadata_editable.py index 4c3f48b6cdf..27c69f0d1ea 100644 --- a/src/pip/_internal/operations/build/metadata_editable.py +++ b/src/pip/_internal/operations/build/metadata_editable.py @@ -3,7 +3,7 @@ import os -from pip._vendor.pep517.wrappers import Pep517HookCaller +from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._internal.build_env import BuildEnvironment from pip._internal.exceptions import ( @@ -15,7 +15,7 @@ def generate_editable_metadata( - build_env: BuildEnvironment, backend: Pep517HookCaller, details: str + build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str ) -> str: """Generate metadata using mechanisms described in PEP 660. @@ -26,7 +26,7 @@ def generate_editable_metadata( metadata_dir = metadata_tmpdir.path with build_env: - # Note that Pep517HookCaller implements a fallback for + # Note that BuildBackendHookCaller implements a fallback for # prepare_metadata_for_build_wheel/editable, so we don't have to # consider the possibility that this hook doesn't exist. runner = runner_with_spinner_message( diff --git a/src/pip/_internal/operations/build/wheel.py b/src/pip/_internal/operations/build/wheel.py index b0d2fc9eadb..064811ad11b 100644 --- a/src/pip/_internal/operations/build/wheel.py +++ b/src/pip/_internal/operations/build/wheel.py @@ -2,7 +2,7 @@ import os from typing import Optional -from pip._vendor.pep517.wrappers import Pep517HookCaller +from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._internal.utils.subprocess import runner_with_spinner_message @@ -11,7 +11,7 @@ def build_wheel_pep517( name: str, - backend: Pep517HookCaller, + backend: BuildBackendHookCaller, metadata_directory: str, tempd: str, ) -> Optional[str]: diff --git a/src/pip/_internal/operations/build/wheel_editable.py b/src/pip/_internal/operations/build/wheel_editable.py index cf7b01aed5a..719d69dd801 100644 --- a/src/pip/_internal/operations/build/wheel_editable.py +++ b/src/pip/_internal/operations/build/wheel_editable.py @@ -2,7 +2,7 @@ import os from typing import Optional -from pip._vendor.pep517.wrappers import HookMissing, Pep517HookCaller +from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing from pip._internal.utils.subprocess import runner_with_spinner_message @@ -11,7 +11,7 @@ def build_wheel_editable( name: str, - backend: Pep517HookCaller, + backend: BuildBackendHookCaller, metadata_directory: str, tempd: str, ) -> Optional[str]: diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 5f29261c252..4543be34c20 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -18,7 +18,7 @@ from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.packaging.version import Version from pip._vendor.packaging.version import parse as parse_version -from pip._vendor.pep517.wrappers import Pep517HookCaller +from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment from pip._internal.exceptions import InstallationError, LegacyInstallFailure @@ -51,7 +51,7 @@ ) from pip._internal.utils.hashes import Hashes from pip._internal.utils.misc import ( - ConfiguredPep517HookCaller, + ConfiguredBuildBackendHookCaller, ask_path_exists, backup_dir, display_path, @@ -173,7 +173,7 @@ def __init__( self.requirements_to_check: List[str] = [] # The PEP 517 backend we should use to build the project - self.pep517_backend: Optional[Pep517HookCaller] = None + self.pep517_backend: Optional[BuildBackendHookCaller] = None # Are we using PEP 517 for this requirement? # After pyproject.toml has been loaded, the only valid values are True @@ -482,7 +482,7 @@ def load_pyproject_toml(self) -> None: requires, backend, check, backend_path = pyproject_toml_data self.requirements_to_check = check self.pyproject_requires = requires - self.pep517_backend = ConfiguredPep517HookCaller( + self.pep517_backend = ConfiguredBuildBackendHookCaller( self, self.unpacked_source_directory, backend, diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index a8f4cb5cf56..a3b04de9f67 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -34,7 +34,7 @@ cast, ) -from pip._vendor.pep517 import Pep517HookCaller +from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed from pip import __version__ @@ -57,7 +57,7 @@ "captured_stdout", "ensure_dir", "remove_auth_from_url", - "ConfiguredPep517HookCaller", + "ConfiguredBuildBackendHookCaller", ] @@ -635,7 +635,7 @@ def partition( return filterfalse(pred, t1), filter(pred, t2) -class ConfiguredPep517HookCaller(Pep517HookCaller): +class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller): def __init__( self, config_holder: Any, diff --git a/src/pip/_internal/utils/subprocess.py b/src/pip/_internal/utils/subprocess.py index cf5bf6be1f6..1e8ff50edfb 100644 --- a/src/pip/_internal/utils/subprocess.py +++ b/src/pip/_internal/utils/subprocess.py @@ -239,8 +239,8 @@ def call_subprocess( def runner_with_spinner_message(message: str) -> Callable[..., None]: """Provide a subprocess_runner that shows a spinner message. - Intended for use with for pep517's Pep517HookCaller. Thus, the runner has - an API that matches what's expected by Pep517HookCaller.subprocess_runner. + Intended for use with for BuildBackendHookCaller. Thus, the runner has + an API that matches what's expected by BuildBackendHookCaller.subprocess_runner. """ def runner( diff --git a/src/pip/_vendor/pep517.pyi b/src/pip/_vendor/pep517.pyi deleted file mode 100644 index d1ce810290f..00000000000 --- a/src/pip/_vendor/pep517.pyi +++ /dev/null @@ -1 +0,0 @@ -from pep517 import * \ No newline at end of file diff --git a/src/pip/_vendor/pep517/__init__.py b/src/pip/_vendor/pep517/__init__.py deleted file mode 100644 index 38ea0f5f11f..00000000000 --- a/src/pip/_vendor/pep517/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Wrappers to build Python packages using PEP 517 hooks -""" - -__version__ = '0.13.0' - -from .wrappers import * # noqa: F401, F403 diff --git a/src/pip/_vendor/pep517/build.py b/src/pip/_vendor/pep517/build.py deleted file mode 100644 index b30909c8704..00000000000 --- a/src/pip/_vendor/pep517/build.py +++ /dev/null @@ -1,126 +0,0 @@ -"""Build a project using PEP 517 hooks. -""" -import argparse -import logging -import os -import shutil -import tempfile - -from ._compat import tomllib -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller - -log = logging.getLogger(__name__) - - -def validate_system(system): - """ - Ensure build system has the requisite fields. - """ - required = {'requires', 'build-backend'} - if not (required <= set(system)): - message = "Missing required fields: {missing}".format( - missing=required-set(system), - ) - raise ValueError(message) - - -def load_system(source_dir): - """ - Load the build system from a source dir (pyproject.toml). - """ - pyproject = os.path.join(source_dir, 'pyproject.toml') - with open(pyproject, 'rb') as f: - pyproject_data = tomllib.load(f) - return pyproject_data['build-system'] - - -def compat_system(source_dir): - """ - Given a source dir, attempt to get a build system backend - and requirements from pyproject.toml. Fallback to - setuptools but only if the file was not found or a build - system was not indicated. - """ - try: - system = load_system(source_dir) - except (FileNotFoundError, KeyError): - system = {} - system.setdefault( - 'build-backend', - 'setuptools.build_meta:__legacy__', - ) - system.setdefault('requires', ['setuptools', 'wheel']) - return system - - -def _do_build(hooks, env, dist, dest): - get_requires_name = 'get_requires_for_build_{dist}'.format(**locals()) - get_requires = getattr(hooks, get_requires_name) - reqs = get_requires({}) - log.info('Got build requires: %s', reqs) - - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - - with tempfile.TemporaryDirectory() as td: - log.info('Trying to build %s in %s', dist, td) - build_name = 'build_{dist}'.format(**locals()) - build = getattr(hooks, build_name) - filename = build(td, {}) - source = os.path.join(td, filename) - shutil.move(source, os.path.join(dest, os.path.basename(filename))) - - -def build(source_dir, dist, dest=None, system=None): - system = system or load_system(source_dir) - dest = os.path.join(source_dir, dest or 'dist') - os.makedirs(dest, exist_ok=True) - - validate_system(system) - hooks = Pep517HookCaller( - source_dir, system['build-backend'], system.get('backend-path') - ) - - with BuildEnvironment() as env: - env.pip_install(system['requires']) - _do_build(hooks, env, dist, dest) - - -parser = argparse.ArgumentParser() -parser.add_argument( - 'source_dir', - help="A directory containing pyproject.toml", -) -parser.add_argument( - '--binary', '-b', - action='store_true', - default=False, -) -parser.add_argument( - '--source', '-s', - action='store_true', - default=False, -) -parser.add_argument( - '--out-dir', '-o', - help="Destination in which to save the builds relative to source dir", -) - - -def main(args): - log.warning('pep517.build is deprecated. ' - 'Consider switching to https://pypi.org/project/build/') - - # determine which dists to build - dists = list(filter(None, ( - 'sdist' if args.source or not args.binary else None, - 'wheel' if args.binary or not args.source else None, - ))) - - for dist in dists: - build(args.source_dir, dist, args.out_dir) - - -if __name__ == '__main__': - main(parser.parse_args()) diff --git a/src/pip/_vendor/pep517/check.py b/src/pip/_vendor/pep517/check.py deleted file mode 100644 index b79f6270b40..00000000000 --- a/src/pip/_vendor/pep517/check.py +++ /dev/null @@ -1,207 +0,0 @@ -"""Check a project and backend by attempting to build using PEP 517 hooks. -""" -import argparse -import logging -import os -import shutil -import sys -import tarfile -import zipfile -from os.path import isfile -from os.path import join as pjoin -from subprocess import CalledProcessError -from tempfile import mkdtemp - -from ._compat import tomllib -from .colorlog import enable_colourful_output -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller - -log = logging.getLogger(__name__) - - -def check_build_sdist(hooks, build_sys_requires): - with BuildEnvironment() as env: - try: - env.pip_install(build_sys_requires) - log.info('Installed static build dependencies') - except CalledProcessError: - log.error('Failed to install static build dependencies') - return False - - try: - reqs = hooks.get_requires_for_build_sdist({}) - log.info('Got build requires: %s', reqs) - except Exception: - log.error('Failure in get_requires_for_build_sdist', exc_info=True) - return False - - try: - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - except CalledProcessError: - log.error('Failed to install dynamic build dependencies') - return False - - td = mkdtemp() - log.info('Trying to build sdist in %s', td) - try: - try: - filename = hooks.build_sdist(td, {}) - log.info('build_sdist returned %r', filename) - except Exception: - log.info('Failure in build_sdist', exc_info=True) - return False - - if not filename.endswith('.tar.gz'): - log.error( - "Filename %s doesn't have .tar.gz extension", filename) - return False - - path = pjoin(td, filename) - if isfile(path): - log.info("Output file %s exists", path) - else: - log.error("Output file %s does not exist", path) - return False - - if tarfile.is_tarfile(path): - log.info("Output file is a tar file") - else: - log.error("Output file is not a tar file") - return False - - finally: - shutil.rmtree(td) - - return True - - -def check_build_wheel(hooks, build_sys_requires): - with BuildEnvironment() as env: - try: - env.pip_install(build_sys_requires) - log.info('Installed static build dependencies') - except CalledProcessError: - log.error('Failed to install static build dependencies') - return False - - try: - reqs = hooks.get_requires_for_build_wheel({}) - log.info('Got build requires: %s', reqs) - except Exception: - log.error('Failure in get_requires_for_build_sdist', exc_info=True) - return False - - try: - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - except CalledProcessError: - log.error('Failed to install dynamic build dependencies') - return False - - td = mkdtemp() - log.info('Trying to build wheel in %s', td) - try: - try: - filename = hooks.build_wheel(td, {}) - log.info('build_wheel returned %r', filename) - except Exception: - log.info('Failure in build_wheel', exc_info=True) - return False - - if not filename.endswith('.whl'): - log.error("Filename %s doesn't have .whl extension", filename) - return False - - path = pjoin(td, filename) - if isfile(path): - log.info("Output file %s exists", path) - else: - log.error("Output file %s does not exist", path) - return False - - if zipfile.is_zipfile(path): - log.info("Output file is a zip file") - else: - log.error("Output file is not a zip file") - return False - - finally: - shutil.rmtree(td) - - return True - - -def check(source_dir): - pyproject = pjoin(source_dir, 'pyproject.toml') - if isfile(pyproject): - log.info('Found pyproject.toml') - else: - log.error('Missing pyproject.toml') - return False - - try: - with open(pyproject, 'rb') as f: - pyproject_data = tomllib.load(f) - # Ensure the mandatory data can be loaded - buildsys = pyproject_data['build-system'] - requires = buildsys['requires'] - backend = buildsys['build-backend'] - backend_path = buildsys.get('backend-path') - log.info('Loaded pyproject.toml') - except (tomllib.TOMLDecodeError, KeyError): - log.error("Invalid pyproject.toml", exc_info=True) - return False - - hooks = Pep517HookCaller(source_dir, backend, backend_path) - - sdist_ok = check_build_sdist(hooks, requires) - wheel_ok = check_build_wheel(hooks, requires) - - if not sdist_ok: - log.warning('Sdist checks failed; scroll up to see') - if not wheel_ok: - log.warning('Wheel checks failed') - - return sdist_ok - - -def main(argv=None): - log.warning('pep517.check is deprecated. ' - 'Consider switching to https://pypi.org/project/build/') - - ap = argparse.ArgumentParser() - ap.add_argument( - 'source_dir', - help="A directory containing pyproject.toml") - args = ap.parse_args(argv) - - enable_colourful_output() - - ok = check(args.source_dir) - - if ok: - print(ansi('Checks passed', 'green')) - else: - print(ansi('Checks failed', 'red')) - sys.exit(1) - - -ansi_codes = { - 'reset': '\x1b[0m', - 'bold': '\x1b[1m', - 'red': '\x1b[31m', - 'green': '\x1b[32m', -} - - -def ansi(s, attr): - if os.name != 'nt' and sys.stdout.isatty(): - return ansi_codes[attr] + str(s) + ansi_codes['reset'] - else: - return str(s) - - -if __name__ == '__main__': - main() diff --git a/src/pip/_vendor/pep517/colorlog.py b/src/pip/_vendor/pep517/colorlog.py deleted file mode 100644 index 66310a79a99..00000000000 --- a/src/pip/_vendor/pep517/colorlog.py +++ /dev/null @@ -1,113 +0,0 @@ -"""Nicer log formatting with colours. - -Code copied from Tornado, Apache licensed. -""" -# Copyright 2012 Facebook -# -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -import logging -import sys - -try: - import curses -except ImportError: - curses = None - - -def _stderr_supports_color(): - color = False - if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): - try: - curses.setupterm() - if curses.tigetnum("colors") > 0: - color = True - except Exception: - pass - return color - - -class LogFormatter(logging.Formatter): - """Log formatter with colour support - """ - DEFAULT_COLORS = { - logging.INFO: 2, # Green - logging.WARNING: 3, # Yellow - logging.ERROR: 1, # Red - logging.CRITICAL: 1, - } - - def __init__(self, color=True, datefmt=None): - r""" - :arg bool color: Enables color support. - :arg string fmt: Log message format. - It will be applied to the attributes dict of log records. The - text between ``%(color)s`` and ``%(end_color)s`` will be colored - depending on the level if color support is on. - :arg dict colors: color mappings from logging level to terminal color - code - :arg string datefmt: Datetime format. - Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. - .. versionchanged:: 3.2 - Added ``fmt`` and ``datefmt`` arguments. - """ - logging.Formatter.__init__(self, datefmt=datefmt) - self._colors = {} - if color and _stderr_supports_color(): - # The curses module has some str/bytes confusion in - # python3. Until version 3.2.3, most methods return - # bytes, but only accept strings. In addition, we want to - # output these strings with the logging module, which - # works with unicode strings. The explicit calls to - # unicode() below are harmless in python2 but will do the - # right conversion in python 3. - fg_color = (curses.tigetstr("setaf") or - curses.tigetstr("setf") or "") - - for levelno, code in self.DEFAULT_COLORS.items(): - self._colors[levelno] = str( - curses.tparm(fg_color, code), "ascii") - self._normal = str(curses.tigetstr("sgr0"), "ascii") - - scr = curses.initscr() - self.termwidth = scr.getmaxyx()[1] - curses.endwin() - else: - self._normal = '' - # Default width is usually 80, but too wide is - # worse than too narrow - self.termwidth = 70 - - def formatMessage(self, record): - mlen = len(record.message) - right_text = '{initial}-{name}'.format(initial=record.levelname[0], - name=record.name) - if mlen + len(right_text) < self.termwidth: - space = ' ' * (self.termwidth - (mlen + len(right_text))) - else: - space = ' ' - - if record.levelno in self._colors: - start_color = self._colors[record.levelno] - end_color = self._normal - else: - start_color = end_color = '' - - return record.message + space + start_color + right_text + end_color - - -def enable_colourful_output(level=logging.INFO): - handler = logging.StreamHandler() - handler.setFormatter(LogFormatter()) - logging.root.addHandler(handler) - logging.root.setLevel(level) diff --git a/src/pip/_vendor/pep517/dirtools.py b/src/pip/_vendor/pep517/dirtools.py deleted file mode 100644 index 3eff4d801ba..00000000000 --- a/src/pip/_vendor/pep517/dirtools.py +++ /dev/null @@ -1,19 +0,0 @@ -import io -import os -import zipfile - - -def dir_to_zipfile(root): - """Construct an in-memory zip file for a directory.""" - buffer = io.BytesIO() - zip_file = zipfile.ZipFile(buffer, 'w') - for root, dirs, files in os.walk(root): - for path in dirs: - fs_path = os.path.join(root, path) - rel_path = os.path.relpath(fs_path, root) - zip_file.writestr(rel_path + '/', '') - for path in files: - fs_path = os.path.join(root, path) - rel_path = os.path.relpath(fs_path, root) - zip_file.write(fs_path, rel_path) - return zip_file diff --git a/src/pip/_vendor/pep517/envbuild.py b/src/pip/_vendor/pep517/envbuild.py deleted file mode 100644 index c0415c4d738..00000000000 --- a/src/pip/_vendor/pep517/envbuild.py +++ /dev/null @@ -1,170 +0,0 @@ -"""Build wheels/sdists by installing build deps to a temporary environment. -""" - -import logging -import os -import shutil -import sys -from subprocess import check_call -from sysconfig import get_paths -from tempfile import mkdtemp - -from ._compat import tomllib -from .wrappers import LoggerWrapper, Pep517HookCaller - -log = logging.getLogger(__name__) - - -def _load_pyproject(source_dir): - with open( - os.path.join(source_dir, 'pyproject.toml'), - 'rb', - ) as f: - pyproject_data = tomllib.load(f) - buildsys = pyproject_data['build-system'] - return ( - buildsys['requires'], - buildsys['build-backend'], - buildsys.get('backend-path'), - ) - - -class BuildEnvironment: - """Context manager to install build deps in a simple temporary environment - - Based on code I wrote for pip, which is MIT licensed. - """ - # Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file) - # - # Permission is hereby granted, free of charge, to any person obtaining - # a copy of this software and associated documentation files (the - # "Software"), to deal in the Software without restriction, including - # without limitation the rights to use, copy, modify, merge, publish, - # distribute, sublicense, and/or sell copies of the Software, and to - # permit persons to whom the Software is furnished to do so, subject to - # the following conditions: - # - # The above copyright notice and this permission notice shall be - # included in all copies or substantial portions of the Software. - # - # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF - # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE - # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION - # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION - # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - - path = None - - def __init__(self, cleanup=True): - self._cleanup = cleanup - - def __enter__(self): - self.path = mkdtemp(prefix='pep517-build-env-') - log.info('Temporary build environment: %s', self.path) - - self.save_path = os.environ.get('PATH', None) - self.save_pythonpath = os.environ.get('PYTHONPATH', None) - - install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix' - install_dirs = get_paths(install_scheme, vars={ - 'base': self.path, - 'platbase': self.path, - }) - - scripts = install_dirs['scripts'] - if self.save_path: - os.environ['PATH'] = scripts + os.pathsep + self.save_path - else: - os.environ['PATH'] = scripts + os.pathsep + os.defpath - - if install_dirs['purelib'] == install_dirs['platlib']: - lib_dirs = install_dirs['purelib'] - else: - lib_dirs = install_dirs['purelib'] + os.pathsep + \ - install_dirs['platlib'] - if self.save_pythonpath: - os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \ - self.save_pythonpath - else: - os.environ['PYTHONPATH'] = lib_dirs - - return self - - def pip_install(self, reqs): - """Install dependencies into this env by calling pip in a subprocess""" - if not reqs: - return - log.info('Calling pip to install %s', reqs) - cmd = [ - sys.executable, '-m', 'pip', 'install', '--ignore-installed', - '--prefix', self.path] + list(reqs) - check_call( - cmd, - stdout=LoggerWrapper(log, logging.INFO), - stderr=LoggerWrapper(log, logging.ERROR), - ) - - def __exit__(self, exc_type, exc_val, exc_tb): - needs_cleanup = ( - self._cleanup and - self.path is not None and - os.path.isdir(self.path) - ) - if needs_cleanup: - shutil.rmtree(self.path) - - if self.save_path is None: - os.environ.pop('PATH', None) - else: - os.environ['PATH'] = self.save_path - - if self.save_pythonpath is None: - os.environ.pop('PYTHONPATH', None) - else: - os.environ['PYTHONPATH'] = self.save_pythonpath - - -def build_wheel(source_dir, wheel_dir, config_settings=None): - """Build a wheel from a source directory using PEP 517 hooks. - - :param str source_dir: Source directory containing pyproject.toml - :param str wheel_dir: Target directory to create wheel in - :param dict config_settings: Options to pass to build backend - - This is a blocking function which will run pip in a subprocess to install - build requirements. - """ - if config_settings is None: - config_settings = {} - requires, backend, backend_path = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend, backend_path) - - with BuildEnvironment() as env: - env.pip_install(requires) - reqs = hooks.get_requires_for_build_wheel(config_settings) - env.pip_install(reqs) - return hooks.build_wheel(wheel_dir, config_settings) - - -def build_sdist(source_dir, sdist_dir, config_settings=None): - """Build an sdist from a source directory using PEP 517 hooks. - - :param str source_dir: Source directory containing pyproject.toml - :param str sdist_dir: Target directory to place sdist in - :param dict config_settings: Options to pass to build backend - - This is a blocking function which will run pip in a subprocess to install - build requirements. - """ - if config_settings is None: - config_settings = {} - requires, backend, backend_path = _load_pyproject(source_dir) - hooks = Pep517HookCaller(source_dir, backend, backend_path) - - with BuildEnvironment() as env: - env.pip_install(requires) - reqs = hooks.get_requires_for_build_sdist(config_settings) - env.pip_install(reqs) - return hooks.build_sdist(sdist_dir, config_settings) diff --git a/src/pip/_vendor/pep517/in_process/__init__.py b/src/pip/_vendor/pep517/in_process/__init__.py deleted file mode 100644 index 281a356cfe2..00000000000 --- a/src/pip/_vendor/pep517/in_process/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -"""This is a subpackage because the directory is on sys.path for _in_process.py - -The subpackage should stay as empty as possible to avoid shadowing modules that -the backend might import. -""" -from contextlib import contextmanager -from os.path import abspath, dirname -from os.path import join as pjoin - -try: - import importlib.resources as resources - try: - resources.files - except AttributeError: - # Python 3.8 compatibility - def _in_proc_script_path(): - return resources.path(__package__, '_in_process.py') - else: - def _in_proc_script_path(): - return resources.as_file( - resources.files(__package__).joinpath('_in_process.py')) -except ImportError: - # Python 3.6 compatibility - @contextmanager - def _in_proc_script_path(): - yield pjoin(dirname(abspath(__file__)), '_in_process.py') diff --git a/src/pip/_vendor/pep517/meta.py b/src/pip/_vendor/pep517/meta.py deleted file mode 100644 index 4afc3c047a7..00000000000 --- a/src/pip/_vendor/pep517/meta.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Build metadata for a project using PEP 517 hooks. -""" -import argparse -import functools -import logging -import os -import shutil -import tempfile - -try: - import importlib.metadata as imp_meta -except ImportError: - import importlib_metadata as imp_meta - -try: - from zipfile import Path -except ImportError: - from zipp import Path - -from .build import compat_system, load_system, validate_system -from .dirtools import dir_to_zipfile -from .envbuild import BuildEnvironment -from .wrappers import Pep517HookCaller, quiet_subprocess_runner - -log = logging.getLogger(__name__) - - -def _prep_meta(hooks, env, dest): - reqs = hooks.get_requires_for_build_wheel({}) - log.info('Got build requires: %s', reqs) - - env.pip_install(reqs) - log.info('Installed dynamic build dependencies') - - with tempfile.TemporaryDirectory() as td: - log.info('Trying to build metadata in %s', td) - filename = hooks.prepare_metadata_for_build_wheel(td, {}) - source = os.path.join(td, filename) - shutil.move(source, os.path.join(dest, os.path.basename(filename))) - - -def build(source_dir='.', dest=None, system=None): - system = system or load_system(source_dir) - dest = os.path.join(source_dir, dest or 'dist') - os.makedirs(dest, exist_ok=True) - validate_system(system) - hooks = Pep517HookCaller( - source_dir, system['build-backend'], system.get('backend-path') - ) - - with hooks.subprocess_runner(quiet_subprocess_runner): - with BuildEnvironment() as env: - env.pip_install(system['requires']) - _prep_meta(hooks, env, dest) - - -def build_as_zip(builder=build): - with tempfile.TemporaryDirectory() as out_dir: - builder(dest=out_dir) - return dir_to_zipfile(out_dir) - - -def load(root): - """ - Given a source directory (root) of a package, - return an importlib.metadata.Distribution object - with metadata build from that package. - """ - root = os.path.expanduser(root) - system = compat_system(root) - builder = functools.partial(build, source_dir=root, system=system) - path = Path(build_as_zip(builder)) - return imp_meta.PathDistribution(path) - - -parser = argparse.ArgumentParser() -parser.add_argument( - 'source_dir', - help="A directory containing pyproject.toml", -) -parser.add_argument( - '--out-dir', '-o', - help="Destination in which to save the builds relative to source dir", -) - - -def main(): - args = parser.parse_args() - build(args.source_dir, args.out_dir) - - -if __name__ == '__main__': - main() diff --git a/src/pip/_vendor/pyproject_hooks.pyi b/src/pip/_vendor/pyproject_hooks.pyi new file mode 100644 index 00000000000..e68245481d5 --- /dev/null +++ b/src/pip/_vendor/pyproject_hooks.pyi @@ -0,0 +1 @@ +from pyproject_hooks import * \ No newline at end of file diff --git a/src/pip/_vendor/pep517/LICENSE b/src/pip/_vendor/pyproject_hooks/LICENSE similarity index 100% rename from src/pip/_vendor/pep517/LICENSE rename to src/pip/_vendor/pyproject_hooks/LICENSE diff --git a/src/pip/_vendor/pyproject_hooks/__init__.py b/src/pip/_vendor/pyproject_hooks/__init__.py new file mode 100644 index 00000000000..ddfcf7f72f3 --- /dev/null +++ b/src/pip/_vendor/pyproject_hooks/__init__.py @@ -0,0 +1,23 @@ +"""Wrappers to call pyproject.toml-based build backend hooks. +""" + +from ._impl import ( + BackendInvalid, + BackendUnavailable, + BuildBackendHookCaller, + HookMissing, + UnsupportedOperation, + default_subprocess_runner, + quiet_subprocess_runner, +) + +__version__ = '1.0.0' +__all__ = [ + 'BackendUnavailable', + 'BackendInvalid', + 'HookMissing', + 'UnsupportedOperation', + 'default_subprocess_runner', + 'quiet_subprocess_runner', + 'BuildBackendHookCaller', +] diff --git a/src/pip/_vendor/pep517/_compat.py b/src/pip/_vendor/pyproject_hooks/_compat.py similarity index 100% rename from src/pip/_vendor/pep517/_compat.py rename to src/pip/_vendor/pyproject_hooks/_compat.py diff --git a/src/pip/_vendor/pep517/wrappers.py b/src/pip/_vendor/pyproject_hooks/_impl.py similarity index 63% rename from src/pip/_vendor/pep517/wrappers.py rename to src/pip/_vendor/pyproject_hooks/_impl.py index 987a62aaa99..37b0e6531f1 100644 --- a/src/pip/_vendor/pep517/wrappers.py +++ b/src/pip/_vendor/pyproject_hooks/_impl.py @@ -2,23 +2,12 @@ import os import sys import tempfile -import threading from contextlib import contextmanager from os.path import abspath from os.path import join as pjoin from subprocess import STDOUT, check_call, check_output -from .in_process import _in_proc_script_path - -__all__ = [ - 'BackendUnavailable', - 'BackendInvalid', - 'HookMissing', - 'UnsupportedOperation', - 'default_subprocess_runner', - 'quiet_subprocess_runner', - 'Pep517HookCaller', -] +from ._in_process import _in_proc_script_path def write_json(obj, path, **kwargs): @@ -40,13 +29,13 @@ def __init__(self, traceback): class BackendInvalid(Exception): """Will be raised if the backend is invalid.""" def __init__(self, backend_name, backend_path, message): + super().__init__(message) self.backend_name = backend_name self.backend_path = backend_path - self.message = message class HookMissing(Exception): - """Will be raised on missing hooks.""" + """Will be raised on missing hooks (if a fallback can't be used).""" def __init__(self, hook_name): super().__init__(hook_name) self.hook_name = hook_name @@ -59,7 +48,10 @@ def __init__(self, traceback): def default_subprocess_runner(cmd, cwd=None, extra_environ=None): - """The default method of calling the wrapper subprocess.""" + """The default method of calling the wrapper subprocess. + + This uses :func:`subprocess.check_call` under the hood. + """ env = os.environ.copy() if extra_environ: env.update(extra_environ) @@ -68,7 +60,10 @@ def default_subprocess_runner(cmd, cwd=None, extra_environ=None): def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None): - """A method of calling the wrapper subprocess while suppressing output.""" + """Call the subprocess while suppressing output. + + This uses :func:`subprocess.check_output` under the hood. + """ env = os.environ.copy() if extra_environ: env.update(extra_environ) @@ -100,26 +95,10 @@ def norm_and_check(source_tree, requested): return abs_requested -class Pep517HookCaller: - """A wrapper around a source directory to be built with a PEP 517 backend. - - :param source_dir: The path to the source directory, containing - pyproject.toml. - :param build_backend: The build backend spec, as per PEP 517, from - pyproject.toml. - :param backend_path: The backend path, as per PEP 517, from pyproject.toml. - :param runner: A callable that invokes the wrapper subprocess. - :param python_executable: The Python executable used to invoke the backend - - The 'runner', if provided, must expect the following: - - - cmd: a list of strings representing the command and arguments to - execute, as would be passed to e.g. 'subprocess.check_call'. - - cwd: a string representing the working directory that must be - used for the subprocess. Corresponds to the provided source_dir. - - extra_environ: a dict mapping environment variable names to values - which must be set for the subprocess execution. +class BuildBackendHookCaller: + """A wrapper to call the build backend hooks for a source directory. """ + def __init__( self, source_dir, @@ -128,6 +107,14 @@ def __init__( runner=None, python_executable=None, ): + """ + :param source_dir: The source directory to invoke the build backend for + :param build_backend: The build backend spec + :param backend_path: Additional path entries for the build backend spec + :param runner: The :ref:`subprocess runner ` to use + :param python_executable: + The Python executable used to invoke the build backend + """ if runner is None: runner = default_subprocess_runner @@ -145,8 +132,14 @@ def __init__( @contextmanager def subprocess_runner(self, runner): - """A context manager for temporarily overriding the default subprocess - runner. + """A context manager for temporarily overriding the default + :ref:`subprocess runner `. + + .. code-block:: python + + hook_caller = BuildBackendHookCaller(...) + with hook_caller.subprocess_runner(quiet_subprocess_runner): + ... """ prev = self._subprocess_runner self._subprocess_runner = runner @@ -160,15 +153,15 @@ def _supported_features(self): return self._call_hook('_supported_features', {}) def get_requires_for_build_wheel(self, config_settings=None): - """Identify packages required for building a wheel + """Get additional dependencies required for building a wheel. - Returns a list of dependency specifications, e.g.:: + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] - ["wheel >= 0.25", "setuptools"] + .. admonition:: Fallback - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. + If the build backend does not defined a hook with this name, an + empty list will be returned. """ return self._call_hook('get_requires_for_build_wheel', { 'config_settings': config_settings @@ -179,12 +172,16 @@ def prepare_metadata_for_build_wheel( _allow_fallback=True): """Prepare a ``*.dist-info`` folder with metadata for this project. - Returns the name of the newly created folder. + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + :rtype: str + + .. admonition:: Fallback - If the build backend defines a hook with this name, it will be called - in a subprocess. If not, the backend will be asked to build a wheel, - and the dist-info extracted from that (unless _allow_fallback is - False). + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_wheel`` hook and the dist-info extracted from + that will be returned. """ return self._call_hook('prepare_metadata_for_build_wheel', { 'metadata_directory': abspath(metadata_directory), @@ -197,12 +194,15 @@ def build_wheel( metadata_directory=None): """Build a wheel from this project. - Returns the name of the newly created file. + :returns: + The name of the newly created wheel within ``wheel_directory``. - In general, this will call the 'build_wheel' hook in the backend. - However, if that was previously called by - 'prepare_metadata_for_build_wheel', and the same metadata_directory is - used, the previously built wheel will be copied to wheel_directory. + .. admonition:: Interaction with fallback + + If the ``build_wheel`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_wheel`, the build backend would + not be invoked. Instead, the previously built wheel will be copied + to ``wheel_directory`` and the name of that file will be returned. """ if metadata_directory is not None: metadata_directory = abspath(metadata_directory) @@ -213,15 +213,15 @@ def build_wheel( }) def get_requires_for_build_editable(self, config_settings=None): - """Identify packages required for building an editable wheel + """Get additional dependencies required for building an editable wheel. - Returns a list of dependency specifications, e.g.:: + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] - ["wheel >= 0.25", "setuptools"] + .. admonition:: Fallback - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. + If the build backend does not defined a hook with this name, an + empty list will be returned. """ return self._call_hook('get_requires_for_build_editable', { 'config_settings': config_settings @@ -232,12 +232,16 @@ def prepare_metadata_for_build_editable( _allow_fallback=True): """Prepare a ``*.dist-info`` folder with metadata for this project. - Returns the name of the newly created folder. + :returns: Name of the newly created subfolder within + ``metadata_directory``, containing the metadata. + :rtype: str + + .. admonition:: Fallback - If the build backend defines a hook with this name, it will be called - in a subprocess. If not, the backend will be asked to build an editable - wheel, and the dist-info extracted from that (unless _allow_fallback is - False). + If the build backend does not define a hook with this name and + ``_allow_fallback`` is truthy, the backend will be asked to build a + wheel via the ``build_editable`` hook and the dist-info + extracted from that will be returned. """ return self._call_hook('prepare_metadata_for_build_editable', { 'metadata_directory': abspath(metadata_directory), @@ -250,12 +254,16 @@ def build_editable( metadata_directory=None): """Build an editable wheel from this project. - Returns the name of the newly created file. + :returns: + The name of the newly created wheel within ``wheel_directory``. - In general, this will call the 'build_editable' hook in the backend. - However, if that was previously called by - 'prepare_metadata_for_build_editable', and the same metadata_directory - is used, the previously built wheel will be copied to wheel_directory. + .. admonition:: Interaction with fallback + + If the ``build_editable`` hook was called in the fallback for + :meth:`prepare_metadata_for_build_editable`, the build backend + would not be invoked. Instead, the previously built wheel will be + copied to ``wheel_directory`` and the name of that file will be + returned. """ if metadata_directory is not None: metadata_directory = abspath(metadata_directory) @@ -266,15 +274,10 @@ def build_editable( }) def get_requires_for_build_sdist(self, config_settings=None): - """Identify packages required for building a wheel - - Returns a list of dependency specifications, e.g.:: + """Get additional dependencies required for building an sdist. - ["setuptools >= 26"] - - This does not include requirements specified in pyproject.toml. - It returns the result of calling the equivalently named hook in a - subprocess. + :returns: A list of :pep:`dependency specifiers <508>`. + :rtype: list[str] """ return self._call_hook('get_requires_for_build_sdist', { 'config_settings': config_settings @@ -283,9 +286,8 @@ def get_requires_for_build_sdist(self, config_settings=None): def build_sdist(self, sdist_directory, config_settings=None): """Build an sdist from this project. - Returns the name of the newly created file. - - This calls the 'build_sdist' backend hook in a subprocess. + :returns: + The name of the newly created sdist within ``wheel_directory``. """ return self._call_hook('build_sdist', { 'sdist_directory': abspath(sdist_directory), @@ -326,37 +328,3 @@ def _call_hook(self, hook_name, kwargs): if data.get('hook_missing'): raise HookMissing(data.get('missing_hook_name') or hook_name) return data['return_val'] - - -class LoggerWrapper(threading.Thread): - """ - Read messages from a pipe and redirect them - to a logger (see python's logging module). - """ - - def __init__(self, logger, level): - threading.Thread.__init__(self) - self.daemon = True - - self.logger = logger - self.level = level - - # create the pipe and reader - self.fd_read, self.fd_write = os.pipe() - self.reader = os.fdopen(self.fd_read) - - self.start() - - def fileno(self): - return self.fd_write - - @staticmethod - def remove_newline(msg): - return msg[:-1] if msg.endswith(os.linesep) else msg - - def run(self): - for line in self.reader: - self._write(self.remove_newline(line)) - - def _write(self, message): - self.logger.log(self.level, message) diff --git a/src/pip/_vendor/pyproject_hooks/_in_process/__init__.py b/src/pip/_vendor/pyproject_hooks/_in_process/__init__.py new file mode 100644 index 00000000000..917fa065b3c --- /dev/null +++ b/src/pip/_vendor/pyproject_hooks/_in_process/__init__.py @@ -0,0 +1,18 @@ +"""This is a subpackage because the directory is on sys.path for _in_process.py + +The subpackage should stay as empty as possible to avoid shadowing modules that +the backend might import. +""" + +import importlib.resources as resources + +try: + resources.files +except AttributeError: + # Python 3.8 compatibility + def _in_proc_script_path(): + return resources.path(__package__, '_in_process.py') +else: + def _in_proc_script_path(): + return resources.as_file( + resources.files(__package__).joinpath('_in_process.py')) diff --git a/src/pip/_vendor/pep517/in_process/_in_process.py b/src/pip/_vendor/pyproject_hooks/_in_process/_in_process.py similarity index 96% rename from src/pip/_vendor/pep517/in_process/_in_process.py rename to src/pip/_vendor/pyproject_hooks/_in_process/_in_process.py index ae4cf9e9cee..ee511ff20d7 100644 --- a/src/pip/_vendor/pep517/in_process/_in_process.py +++ b/src/pip/_vendor/pyproject_hooks/_in_process/_in_process.py @@ -145,11 +145,13 @@ def prepare_metadata_for_build_wheel( except AttributeError: if not _allow_fallback: raise HookMissing() - whl_basename = backend.build_wheel(metadata_directory, config_settings) - return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory, - config_settings) else: return hook(metadata_directory, config_settings) + # fallback to build_wheel outside the try block to avoid exception chaining + # which can be confusing to users and is not relevant + whl_basename = backend.build_wheel(metadata_directory, config_settings) + return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory, + config_settings) def prepare_metadata_for_build_editable( diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index a34277b8c54..1bd03c9c7e6 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -4,9 +4,9 @@ distlib==0.3.6 distro==1.8.0 msgpack==1.0.4 packaging==21.3 -pep517==0.13.0 platformdirs==2.5.3 pyparsing==3.0.9 +pyproject-hooks==1.0.0 requests==2.28.1 certifi==2022.09.24 chardet==5.0.0 From 4162f28cad4a6742cb15006a5a8f2f253f7f779e Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 28 Dec 2022 20:13:51 +0000 Subject: [PATCH 228/730] Deal with dashed names in vendored modules, in `pip debug` --- src/pip/_internal/commands/debug.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 6fad1fe8943..2a3e7d298f3 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -48,7 +48,7 @@ def create_vendor_txt_map() -> Dict[str, str]: def get_module_from_module_name(module_name: str) -> ModuleType: # Module name can be uppercase in vendor.txt for some reason... - module_name = module_name.lower() + module_name = module_name.lower().replace("-", "_") # PATCH: setuptools is actually only pkg_resources. if module_name == "setuptools": module_name = "pkg_resources" From 668ea3f6f383749ee9a7a4fdd56246851820e1bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 28 Dec 2022 12:13:16 +0100 Subject: [PATCH 229/730] Clarify the role of the egg URL fragment --- docs/html/topics/vcs-support.md | 24 +++++++++++++++--------- news/11676.doc.rst | 2 ++ 2 files changed, 17 insertions(+), 9 deletions(-) create mode 100644 news/11676.doc.rst diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md index 4c69c4e0228..faa0633cc7f 100644 --- a/docs/html/topics/vcs-support.md +++ b/docs/html/topics/vcs-support.md @@ -132,17 +132,17 @@ take on the VCS requirement (not the commit itself). ## URL fragments -pip looks at 2 fragments for VCS URLs: +pip looks at the `subdirectory` fragments of VCS URLs for specifying the path to the +Python package, when it is not in the root of the VCS directory. eg: `pkg_dir`. -- `egg`: For specifying the "project name" for use in pip's dependency - resolution logic. e.g.: `egg=project_name` +pip also looks at the `egg` fragment specifying the "project name". In practice the +`egg` fragment is only required to help pip determine the VCS clone location in editable +mode. In all other circumstances, the `egg` fragment is not necessary and its use is +discouraged. - The `egg` fragment **should** be a bare - [PEP 508](https://peps.python.org/pep-0508/) project name. Anything else - is not guaranteed to work. - -- `subdirectory`: For specifying the path to the Python package, when it is not - in the root of the VCS directory. e.g.: `pkg_dir` +The `egg` fragment **should** be a bare +[PEP 508](https://peps.python.org/pep-0508/) project name. Anything else +is not guaranteed to work. ````{admonition} Example If your repository layout is: @@ -158,6 +158,12 @@ some_other_file Then, to install from this repository, the syntax would be: +```{pip-cli} +$ pip install "pkg @ vcs+protocol://repo_url/#subdirectory=pkg_dir" +``` + +or: + ```{pip-cli} $ pip install -e "vcs+protocol://repo_url/#egg=pkg&subdirectory=pkg_dir" ``` diff --git a/news/11676.doc.rst b/news/11676.doc.rst new file mode 100644 index 00000000000..d3f9bd88bed --- /dev/null +++ b/news/11676.doc.rst @@ -0,0 +1,2 @@ +Clarify that the egg URL fragment is only necessary for editable VCS installs, and +otherwise not necessary anymore. From fea8ae9f9ffdabb15d81cbca5636f7ab82fa419c Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 25 Nov 2022 13:36:04 +0000 Subject: [PATCH 230/730] Enable managing RTD redirects in-tree This is designed as a script and a data file (in YAML format), and meant to manage the RTD redirects with a version controlled file. This makes it possible for pull requests to this repository to update the redirects for this project's documentation (eg: for better error urls) and for this evolution to be tracked as a part of version control history. --- .pre-commit-config.yaml | 1 + .readthedocs-custom-redirects.yml | 15 +++ MANIFEST.in | 1 + tools/update-rtd-redirects.py | 155 ++++++++++++++++++++++++++++++ 4 files changed, 172 insertions(+) create mode 100644 .readthedocs-custom-redirects.yml create mode 100644 tools/update-rtd-redirects.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 20a85438c5a..a49016eed3c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -52,6 +52,7 @@ repos: 'types-setuptools==57.4.14', 'types-freezegun==1.1.9', 'types-six==1.16.15', + 'types-pyyaml==6.0.12.2', ] - repo: https://github.com/pre-commit/pygrep-hooks diff --git a/.readthedocs-custom-redirects.yml b/.readthedocs-custom-redirects.yml new file mode 100644 index 00000000000..46c1f819c88 --- /dev/null +++ b/.readthedocs-custom-redirects.yml @@ -0,0 +1,15 @@ +# This file is read by tools/update-rtd-redirects.py. +# It is related to Read the Docs, but is not a file processed by the platform. + +/dev/news-entry-failure: >- + https://pip.pypa.io/en/stable/development/contributing/#news-entries +/errors/resolution-impossible: >- + https://pip.pypa.io/en/latest/topics/dependency-resolution/#dealing-with-dependency-conflicts +/surveys/backtracking: >- + https://forms.gle/LkZP95S4CfqBAU1N6 +/warnings/backtracking: >- + https://pip.pypa.io/en/stable/topics/dependency-resolution/#possible-ways-to-reduce-backtracking +/warnings/enable-long-paths: >- + https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=cmd#enable-long-paths-in-windows-10-version-1607-and-later +/warnings/venv: >- + https://docs.python.org/3/tutorial/venv.html diff --git a/MANIFEST.in b/MANIFEST.in index ff3825f65c2..e0fba8222af 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -18,6 +18,7 @@ exclude .mailmap exclude .appveyor.yml exclude .readthedocs.yml exclude .pre-commit-config.yaml +exclude .readthedocs-custom-redirects.yml exclude tox.ini exclude noxfile.py diff --git a/tools/update-rtd-redirects.py b/tools/update-rtd-redirects.py new file mode 100644 index 00000000000..8515c026cb7 --- /dev/null +++ b/tools/update-rtd-redirects.py @@ -0,0 +1,155 @@ +"""Update the 'exact' redirects on Read the Docs to match an in-tree file's contents. + +Relevant API reference: https://docs.readthedocs.io/en/stable/api/v3.html#redirects +""" +import operator +import os +import sys +from pathlib import Path + +import httpx +import rich +import yaml + +try: + _TOKEN = os.environ["RTD_API_TOKEN"] +except KeyError: + rich.print( + "[bold]error[/]: [red]No API token provided. Please set `RTD_API_TOKEN`.[/]", + file=sys.stderr, + ) + sys.exit(1) + +RTD_API_HEADERS = {"Authorization": f"token {_TOKEN}"} +RTD_API_BASE_URL = "https://readthedocs.org/api/v3/projects/pip/" +REPO_ROOT = Path(__file__).resolve().parent.parent + + +# -------------------------------------------------------------------------------------- +# Helpers +# -------------------------------------------------------------------------------------- +def next_step(msg: str) -> None: + rich.print(f"> [blue]{msg}[/]") + + +def log_response(response: httpx.Response) -> None: + request = response.request + rich.print(f"[bold magenta]{request.method}[/] {request.url} -> {response}") + + +def get_rtd_api() -> httpx.Client: + return httpx.Client( + headers=RTD_API_HEADERS, + base_url=RTD_API_BASE_URL, + event_hooks={"response": [log_response]}, + ) + + +# -------------------------------------------------------------------------------------- +# Actual logic +# -------------------------------------------------------------------------------------- +next_step("Loading local redirects from the yaml file.") + +with open(REPO_ROOT / ".readthedocs-custom-redirects.yml") as f: + local_redirects = yaml.safe_load(f) + +rich.print("Loaded local redirects!") +for src, dst in sorted(local_redirects.items()): + rich.print(f" [yellow]{src}[/] --> {dst}") +rich.print(f"{len(local_redirects)} entries.") + + +next_step("Fetch redirects configured on RTD.") + +with get_rtd_api() as rtd_api: + response = rtd_api.get("redirects/") + response.raise_for_status() + + rtd_redirects = response.json() + +for redirect in sorted( + rtd_redirects["results"], key=operator.itemgetter("type", "from_url", "to_url") +): + if redirect["type"] != "exact": + rich.print(f" [magenta]{redirect['type']}[/]") + continue + + pk = redirect["pk"] + src = redirect["from_url"] + dst = redirect["to_url"] + rich.print(f" [yellow]{src}[/] -({pk:^5})-> {dst}") + +rich.print(f"{rtd_redirects['count']} entries.") + + +next_step("Compare and determine modifications.") + +redirects_to_remove: list[int] = [] +redirects_to_add: dict[str, str] = {} + +for redirect in rtd_redirects["results"]: + if redirect["type"] != "exact": + continue + + rtd_src = redirect["from_url"] + rtd_dst = redirect["to_url"] + redirect_id = redirect["pk"] + + if rtd_src not in local_redirects: + redirects_to_remove.append(redirect_id) + continue + + local_dst = local_redirects[rtd_src] + if local_dst != rtd_dst: + redirects_to_remove.append(redirect_id) + redirects_to_add[rtd_src] = local_dst + + del local_redirects[rtd_src] + +for src, dst in sorted(local_redirects.items()): + redirects_to_add[src] = dst + del local_redirects[src] + +assert not local_redirects + +if not redirects_to_remove: + rich.print("Nothing to remove.") +else: + rich.print(f"To remove: ({len(redirects_to_remove)} entries)") + for redirect_id in redirects_to_remove: + rich.print(" ", redirect_id) + +if not redirects_to_add: + rich.print("Nothing to add.") +else: + rich.print(f"To add: ({len(redirects_to_add)} entries)") + for src, dst in redirects_to_add.items(): + rich.print(f" {src} --> {dst}") + + +next_step("Update the RTD redirects.") + +if not (redirects_to_add or redirects_to_remove): + rich.print("[green]Nothing to do![/]") + sys.exit(0) + +exit_code = 0 +with get_rtd_api() as rtd_api: + for redirect_id in redirects_to_remove: + response = rtd_api.delete(f"redirects/{redirect_id}/") + response.raise_for_status() + if response.status_code != 204: + rich.print("[red]This might not have been removed correctly.[/]") + exit_code = 1 + + for src, dst in redirects_to_add.items(): + response = rtd_api.post( + "redirects/", + json={"from_url": src, "to_url": dst, "type": "exact"}, + ) + response.raise_for_status() + if response.status_code != 201: + rich.print("[red]This might not have been added correctly.[/]") + exit_code = 1 + +sys.exit(exit_code) From 8328135d934d9136756639606632ab70625242ae Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 25 Nov 2022 13:47:05 +0000 Subject: [PATCH 231/730] Add GitHub action for RTD redirect updates This makes it possible for pip's documentation's redirects to be automatically synchronised with the `main` branch. --- .github/workflows/update-rtd-redirects.yml | 27 ++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 .github/workflows/update-rtd-redirects.yml diff --git a/.github/workflows/update-rtd-redirects.yml b/.github/workflows/update-rtd-redirects.yml new file mode 100644 index 00000000000..5ac9c63130e --- /dev/null +++ b/.github/workflows/update-rtd-redirects.yml @@ -0,0 +1,27 @@ +name: Update documentation redirects + +on: + push: + branches: [main] + schedule: + - cron: 0 0 * * MON # Run every Monday at 00:00 UTC + +env: + FORCE_COLOR: "1" + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +jobs: + update-rtd-redirects: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: "3.11" + - run: pip install httpx requests pyyaml + - run: python tools/update-rtd-redirects.py + env: + RTD_API_TOKEN: ${{ secrets.RTD_API_TOKEN }} From 5427bc56b554923b2ef7d3e3a9d36184940630e8 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 31 Dec 2022 21:02:45 +0000 Subject: [PATCH 232/730] Fix the dependencies as installed for the RTD update script This ensures that it is updated correctly. --- .github/workflows/update-rtd-redirects.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/update-rtd-redirects.yml b/.github/workflows/update-rtd-redirects.yml index 5ac9c63130e..8a300d06fd9 100644 --- a/.github/workflows/update-rtd-redirects.yml +++ b/.github/workflows/update-rtd-redirects.yml @@ -21,7 +21,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.11" - - run: pip install httpx requests pyyaml + - run: pip install httpx pyyaml rich - run: python tools/update-rtd-redirects.py env: RTD_API_TOKEN: ${{ secrets.RTD_API_TOKEN }} From 450b361d21fe8d9d44d7b14762399d74553ef276 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 1 Jan 2023 11:07:34 +0000 Subject: [PATCH 233/730] Add an environment containing the secret token This ensures that we only expose the token to this one job on `main`. --- .github/workflows/update-rtd-redirects.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/update-rtd-redirects.yml b/.github/workflows/update-rtd-redirects.yml index 8a300d06fd9..8259b6c0b6a 100644 --- a/.github/workflows/update-rtd-redirects.yml +++ b/.github/workflows/update-rtd-redirects.yml @@ -16,6 +16,7 @@ concurrency: jobs: update-rtd-redirects: runs-on: ubuntu-latest + environment: RTD Deploys steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 From 66e8d70dc76b3afab1d399452a82007ca1d0d446 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 1 Jan 2023 18:47:30 +0000 Subject: [PATCH 234/730] Update our RTD redirects to use `latest` for dev, `stable` for errors --- .readthedocs-custom-redirects.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.readthedocs-custom-redirects.yml b/.readthedocs-custom-redirects.yml index 46c1f819c88..d0c072fbf96 100644 --- a/.readthedocs-custom-redirects.yml +++ b/.readthedocs-custom-redirects.yml @@ -2,9 +2,9 @@ # It is related to Read the Docs, but is not a file processed by the platform. /dev/news-entry-failure: >- - https://pip.pypa.io/en/stable/development/contributing/#news-entries + https://pip.pypa.io/en/latest/development/contributing/#news-entries /errors/resolution-impossible: >- - https://pip.pypa.io/en/latest/topics/dependency-resolution/#dealing-with-dependency-conflicts + https://pip.pypa.io/en/stable/topics/dependency-resolution/#dealing-with-dependency-conflicts /surveys/backtracking: >- https://forms.gle/LkZP95S4CfqBAU1N6 /warnings/backtracking: >- From 69cb3f861717c3d1cdd6b8a88482170d468cdb1f Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 16 Dec 2022 17:23:53 +0800 Subject: [PATCH 235/730] Check EXTERNALLY-MANAGED in install and uninstall This implements the PEP 668 logic to 'pip install' and 'pip uninstall'. Are there any other commands that may need it? This implementation disables the check is any of --prefix, --home, or --target is provided, since those can indicate the command does not actually install into the environment. Note that it is still possible the command is still modifying the environment, but we don't have a way to stop the user *that* determined to break the environment anyway (they can always just use those flags in a virtual environment). Also not sure how best this can be tested. --- news/11381.feature.rst | 3 ++ src/pip/_internal/commands/install.py | 16 +++++++ src/pip/_internal/commands/uninstall.py | 9 +++- src/pip/_internal/utils/misc.py | 56 +++++++++++++++++++++++++ 4 files changed, 83 insertions(+), 1 deletion(-) create mode 100644 news/11381.feature.rst diff --git a/news/11381.feature.rst b/news/11381.feature.rst new file mode 100644 index 00000000000..3df9877b476 --- /dev/null +++ b/news/11381.feature.rst @@ -0,0 +1,3 @@ +Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in PEP 668. +This allows a downstream Python distributor to prevent users from using pip to +modify the externally managed environment. diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index e081c27d2d2..b7bd81c4c0b 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -42,6 +42,7 @@ from pip._internal.utils.logging import getLogger from pip._internal.utils.misc import ( ensure_dir, + get_externally_managed_error, get_pip_version, protect_pip_from_modification_on_windows, write_output, @@ -284,6 +285,21 @@ def run(self, options: Values, args: List[str]) -> int: if options.use_user_site and options.target_dir is not None: raise CommandError("Can not combine '--user' and '--target'") + # Check whether the environment we're installing into is externally + # managed, as specified in PEP 668. Specifying --root, --target, or + # --prefix disables the check, since there's no reliable way to locate + # the EXTERNALLY-MANAGED file for those cases. + installing_into_current_environment = ( + not options.dry_run + and options.root_path is None + and options.target_dir is None + and options.prefix_path is None + ) + if installing_into_current_environment: + externally_managed_error = get_externally_managed_error() + if externally_managed_error is not None: + raise InstallationError(externally_managed_error) + upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py index dea8077e7f5..fa96a97681c 100644 --- a/src/pip/_internal/commands/uninstall.py +++ b/src/pip/_internal/commands/uninstall.py @@ -14,7 +14,10 @@ install_req_from_line, install_req_from_parsed_requirement, ) -from pip._internal.utils.misc import protect_pip_from_modification_on_windows +from pip._internal.utils.misc import ( + get_externally_managed_error, + protect_pip_from_modification_on_windows, +) logger = logging.getLogger(__name__) @@ -90,6 +93,10 @@ def run(self, options: Values, args: List[str]) -> int: f'"pip help {self.name}")' ) + externally_managed_error = get_externally_managed_error() + if externally_managed_error is not None: + raise InstallationError(externally_managed_error) + protect_pip_from_modification_on_windows( modifying_pip="pip" in reqs_to_uninstall ) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index a3b04de9f67..5d2000de247 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -1,17 +1,20 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False +import configparser import contextlib import errno import getpass import hashlib import io +import locale import logging import os import posixpath import shutil import stat import sys +import sysconfig import urllib.parse from io import StringIO from itertools import filterfalse, tee, zip_longest @@ -57,6 +60,7 @@ "captured_stdout", "ensure_dir", "remove_auth_from_url", + "get_externally_managed_error", "ConfiguredBuildBackendHookCaller", ] @@ -581,6 +585,58 @@ def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: ) +_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\ +The Python environment under {sys.prefix} is managed externally, and may not be +manipulated by the user. Please use specific tooling from the distributor of +the Python installation to interact with this environment instead. +""" + + +def _iter_externally_managed_error_keys() -> Iterator[str]: + lang, _ = locale.getlocale(locale.LC_MESSAGES) + if lang is not None: + yield f"Error-{lang}" + for sep in ("-", "_"): + before, found, _ = lang.partition(sep) + if not found: + continue + yield f"Error-{before}" + yield "Error" + + +def get_externally_managed_error() -> Optional[str]: + """Get an error message from the EXTERNALLY-MANAGED config file. + + This checks whether the current environment pip is running in is externally + managed. If the EXTERNALLY-MANAGED file is found, the vendor-provided error + message is read and returned (if available; a default message is used + otherwise), as specified in `PEP 668`_. + + If the current environment is *not* externally managed, *None* is returned. + + .. _`PEP 668`: https://peps.python.org/pep-0668/ + """ + if running_under_virtualenv(): + return None + marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED") + if not os.path.isfile(marker): + return None + try: + parser = configparser.ConfigParser(interpolation=None) + parser.read(marker, encoding="utf-8") + except (OSError, UnicodeDecodeError) as e: + logger.warning("Ignoring %s due to error %s", marker, e) + return _DEFAULT_EXTERNALLY_MANAGED_ERROR + try: + section = parser["externally-managed"] + except KeyError: + return _DEFAULT_EXTERNALLY_MANAGED_ERROR + for key in _iter_externally_managed_error_keys(): + with contextlib.suppress(KeyError): + return section[key] + return _DEFAULT_EXTERNALLY_MANAGED_ERROR + + def is_console_interactive() -> bool: """Is this console interactive?""" return sys.stdin is not None and sys.stdin.isatty() From e27a8198833d2f62022da4393be0b5b2396ad7ff Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 19 Dec 2022 16:36:31 +0800 Subject: [PATCH 236/730] Use ExternallyManagedEnvironment to show error This moves most of the displaying logic into the exception class so it can better leverage DiagnosticPipError and Rich functionalities. --- src/pip/_internal/commands/install.py | 6 +-- src/pip/_internal/commands/uninstall.py | 6 +-- src/pip/_internal/exceptions.py | 64 ++++++++++++++++++++++++- src/pip/_internal/utils/misc.py | 62 ++++++------------------ 4 files changed, 82 insertions(+), 56 deletions(-) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index b7bd81c4c0b..311c48a969f 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -41,8 +41,8 @@ from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.logging import getLogger from pip._internal.utils.misc import ( + check_externally_managed, ensure_dir, - get_externally_managed_error, get_pip_version, protect_pip_from_modification_on_windows, write_output, @@ -296,9 +296,7 @@ def run(self, options: Values, args: List[str]) -> int: and options.prefix_path is None ) if installing_into_current_environment: - externally_managed_error = get_externally_managed_error() - if externally_managed_error is not None: - raise InstallationError(externally_managed_error) + check_externally_managed() upgrade_strategy = "to-satisfy-only" if options.upgrade: diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py index fa96a97681c..e5a4c8e10d4 100644 --- a/src/pip/_internal/commands/uninstall.py +++ b/src/pip/_internal/commands/uninstall.py @@ -15,7 +15,7 @@ install_req_from_parsed_requirement, ) from pip._internal.utils.misc import ( - get_externally_managed_error, + check_externally_managed, protect_pip_from_modification_on_windows, ) @@ -93,9 +93,7 @@ def run(self, options: Values, args: List[str]) -> int: f'"pip help {self.name}")' ) - externally_managed_error = get_externally_managed_error() - if externally_managed_error is not None: - raise InstallationError(externally_managed_error) + check_externally_managed() protect_pip_from_modification_on_windows( modifying_pip="pip" in reqs_to_uninstall diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 2ab1f591f12..0aa861d1440 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -6,9 +6,12 @@ """ import configparser +import contextlib +import locale import re +import sys from itertools import chain, groupby, repeat -from typing import TYPE_CHECKING, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union from pip._vendor.requests.models import Request, Response from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult @@ -658,3 +661,62 @@ def __str__(self) -> str: assert self.error is not None message_part = f".\n{self.error}\n" return f"Configuration file {self.reason}{message_part}" + + +_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\ +The Python environment under {sys.prefix} is managed externally, and may not be +manipulated by the user. Please use specific tooling from the distributor of +the Python installation to interact with this environment instead. +""" + + +class ExternallyManagedEnvironment(DiagnosticPipError): + """The current environment is externally managed. + + This is raised when the current environment is externally managed, as + defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked + and displayed when the error is bubbled up to the user. + + :param error: The error message read from ``EXTERNALLY-MANAGED``. + """ + + def __init__(self, error: Optional[str]) -> None: + if error is None: + context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR) + else: + context = Text(error) + super().__init__( + message="This environment is externally managed", + context=context, + note_stmt=( + "If you believe this is a mistake, please contact your " + "Python installation or OS distribution provider." + ), + hint_stmt=Text("See PEP 668 for the detailed specification."), + ) + + @staticmethod + def _iter_externally_managed_error_keys() -> Iterator[str]: + lang, _ = locale.getlocale(locale.LC_MESSAGES) + if lang is not None: + yield f"Error-{lang}" + for sep in ("-", "_"): + before, found, _ = lang.partition(sep) + if not found: + continue + yield f"Error-{before}" + yield "Error" + + @classmethod + def from_config( + cls, + parser: configparser.ConfigParser, + ) -> "ExternallyManagedEnvironment": + try: + section = parser["externally-managed"] + except KeyError: + return cls(None) + for key in cls._iter_externally_managed_error_keys(): + with contextlib.suppress(KeyError): + return cls(section[key]) + return cls(None) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 5d2000de247..fd9acb119b4 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -7,8 +7,6 @@ import getpass import hashlib import io -import locale -import logging import os import posixpath import shutil @@ -41,8 +39,9 @@ from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed from pip import __version__ -from pip._internal.exceptions import CommandError +from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment from pip._internal.locations import get_major_minor_version +from pip._internal.utils._log import VERBOSE, getLogger from pip._internal.utils.compat import WINDOWS from pip._internal.utils.virtualenv import running_under_virtualenv @@ -60,12 +59,12 @@ "captured_stdout", "ensure_dir", "remove_auth_from_url", - "get_externally_managed_error", + "check_externally_managed", "ConfiguredBuildBackendHookCaller", ] -logger = logging.getLogger(__name__) +logger = getLogger(__name__) T = TypeVar("T") ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] @@ -585,56 +584,25 @@ def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None: ) -_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\ -The Python environment under {sys.prefix} is managed externally, and may not be -manipulated by the user. Please use specific tooling from the distributor of -the Python installation to interact with this environment instead. -""" +def check_externally_managed() -> None: + """Check whether the current environment is externally managed. - -def _iter_externally_managed_error_keys() -> Iterator[str]: - lang, _ = locale.getlocale(locale.LC_MESSAGES) - if lang is not None: - yield f"Error-{lang}" - for sep in ("-", "_"): - before, found, _ = lang.partition(sep) - if not found: - continue - yield f"Error-{before}" - yield "Error" - - -def get_externally_managed_error() -> Optional[str]: - """Get an error message from the EXTERNALLY-MANAGED config file. - - This checks whether the current environment pip is running in is externally - managed. If the EXTERNALLY-MANAGED file is found, the vendor-provided error - message is read and returned (if available; a default message is used - otherwise), as specified in `PEP 668`_. - - If the current environment is *not* externally managed, *None* is returned. - - .. _`PEP 668`: https://peps.python.org/pep-0668/ + If the ``EXTERNALLY-MANAGED`` config file is found, the current environment + is considered externally managed, and an ExternallyManagedEnvironment is + raised. """ if running_under_virtualenv(): return None marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED") if not os.path.isfile(marker): - return None + return + parser = configparser.ConfigParser(interpolation=None) try: - parser = configparser.ConfigParser(interpolation=None) parser.read(marker, encoding="utf-8") - except (OSError, UnicodeDecodeError) as e: - logger.warning("Ignoring %s due to error %s", marker, e) - return _DEFAULT_EXTERNALLY_MANAGED_ERROR - try: - section = parser["externally-managed"] - except KeyError: - return _DEFAULT_EXTERNALLY_MANAGED_ERROR - for key in _iter_externally_managed_error_keys(): - with contextlib.suppress(KeyError): - return section[key] - return _DEFAULT_EXTERNALLY_MANAGED_ERROR + except (OSError, UnicodeDecodeError): + exc_info = logger.isEnabledFor(VERBOSE) + logger.warning("Failed to read %s", marker, exc_info=exc_info) + raise ExternallyManagedEnvironment.from_config(parser) def is_console_interactive() -> bool: From 095fd850fd7a09a3f4d85c21d339357edb67ef13 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 29 Dec 2022 14:58:35 +0800 Subject: [PATCH 237/730] Isolate EXTERNALLY-MANAGED parsing logic This makes the parser easier to test. --- src/pip/_internal/exceptions.py | 16 ++++++++++++---- src/pip/_internal/utils/misc.py | 16 ++++------------ 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 0aa861d1440..5fed804c78c 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -8,6 +8,7 @@ import configparser import contextlib import locale +import logging import re import sys from itertools import chain, groupby, repeat @@ -25,6 +26,8 @@ from pip._internal.metadata import BaseDistribution from pip._internal.req.req_install import InstallRequirement +logger = logging.getLogger(__name__) + # # Scaffolding @@ -708,10 +711,15 @@ def _iter_externally_managed_error_keys() -> Iterator[str]: yield "Error" @classmethod - def from_config( - cls, - parser: configparser.ConfigParser, - ) -> "ExternallyManagedEnvironment": + def from_config(cls, config: str) -> "ExternallyManagedEnvironment": + parser = configparser.ConfigParser(interpolation=None) + try: + parser.read(config, encoding="utf-8") + except (OSError, UnicodeDecodeError): + from pip._internal.utils._log import VERBOSE + + exc_info = logger.isEnabledFor(VERBOSE) + logger.warning("Failed to read %s", config, exc_info=exc_info) try: section = parser["externally-managed"] except KeyError: diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index fd9acb119b4..baa1ba7eac2 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -1,12 +1,12 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -import configparser import contextlib import errno import getpass import hashlib import io +import logging import os import posixpath import shutil @@ -41,7 +41,6 @@ from pip import __version__ from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment from pip._internal.locations import get_major_minor_version -from pip._internal.utils._log import VERBOSE, getLogger from pip._internal.utils.compat import WINDOWS from pip._internal.utils.virtualenv import running_under_virtualenv @@ -63,8 +62,7 @@ "ConfiguredBuildBackendHookCaller", ] - -logger = getLogger(__name__) +logger = logging.getLogger(__name__) T = TypeVar("T") ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] @@ -592,17 +590,11 @@ def check_externally_managed() -> None: raised. """ if running_under_virtualenv(): - return None + return marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED") if not os.path.isfile(marker): return - parser = configparser.ConfigParser(interpolation=None) - try: - parser.read(marker, encoding="utf-8") - except (OSError, UnicodeDecodeError): - exc_info = logger.isEnabledFor(VERBOSE) - logger.warning("Failed to read %s", marker, exc_info=exc_info) - raise ExternallyManagedEnvironment.from_config(parser) + raise ExternallyManagedEnvironment.from_config(marker) def is_console_interactive() -> bool: From 3d1937f420de3b10e140c144aa11fc23f2425b26 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 29 Dec 2022 16:37:51 +0800 Subject: [PATCH 238/730] Add tests for EXTERNALLY-MANAGED parser --- src/pip/_internal/exceptions.py | 23 +++--- tests/unit/test_exceptions.py | 124 +++++++++++++++++++++++++++++++- 2 files changed, 137 insertions(+), 10 deletions(-) diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 5fed804c78c..5e0559c9ac1 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -9,6 +9,7 @@ import contextlib import locale import logging +import pathlib import re import sys from itertools import chain, groupby, repeat @@ -683,6 +684,8 @@ class ExternallyManagedEnvironment(DiagnosticPipError): :param error: The error message read from ``EXTERNALLY-MANAGED``. """ + reference = "externally-managed-environment" + def __init__(self, error: Optional[str]) -> None: if error is None: context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR) @@ -711,20 +714,22 @@ def _iter_externally_managed_error_keys() -> Iterator[str]: yield "Error" @classmethod - def from_config(cls, config: str) -> "ExternallyManagedEnvironment": + def from_config( + cls, + config: Union[pathlib.Path, str], + ) -> "ExternallyManagedEnvironment": parser = configparser.ConfigParser(interpolation=None) try: parser.read(config, encoding="utf-8") - except (OSError, UnicodeDecodeError): + section = parser["externally-managed"] + for key in cls._iter_externally_managed_error_keys(): + with contextlib.suppress(KeyError): + return cls(section[key]) + except KeyError: + pass + except (OSError, UnicodeDecodeError, configparser.ParsingError): from pip._internal.utils._log import VERBOSE exc_info = logger.isEnabledFor(VERBOSE) logger.warning("Failed to read %s", config, exc_info=exc_info) - try: - section = parser["externally-managed"] - except KeyError: - return cls(None) - for key in cls._iter_externally_managed_error_keys(): - with contextlib.suppress(KeyError): - return cls(section[key]) return cls(None) diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py index 8f8224dc817..17cccd591e9 100644 --- a/tests/unit/test_exceptions.py +++ b/tests/unit/test_exceptions.py @@ -1,12 +1,17 @@ """Tests the presentation style of exceptions.""" import io +import locale +import logging +import pathlib +import sys import textwrap +from typing import Optional, Tuple import pytest from pip._vendor import rich -from pip._internal.exceptions import DiagnosticPipError +from pip._internal.exceptions import DiagnosticPipError, ExternallyManagedEnvironment class TestDiagnosticPipErrorCreation: @@ -472,3 +477,120 @@ def test_no_hint_no_note_no_context(self) -> None: It broke. :( """ ) + + +class TestExternallyManagedEnvironment: + default_text = ( + f"The Python environment under {sys.prefix} is managed externally, " + f"and may not be\nmanipulated by the user. Please use specific " + f"tooling from the distributor of\nthe Python installation to " + f"interact with this environment instead.\n" + ) + + @pytest.fixture(autouse=True) + def patch_locale(self, monkeypatch: pytest.MonkeyPatch) -> None: + orig_getlocal = locale.getlocale + + def fake_getlocale(category: int) -> Tuple[Optional[str], Optional[str]]: + """Fake getlocale() that always report zh_Hant.""" + result = orig_getlocal(category) + if category == locale.LC_MESSAGES: + return "zh_Hant", result[1] + return result + + monkeypatch.setattr(locale, "getlocale", fake_getlocale) + + @pytest.fixture() + def marker(self, tmp_path: pathlib.Path) -> pathlib.Path: + marker = tmp_path.joinpath("EXTERNALLY-MANAGED") + marker.touch() + return marker + + def test_invalid_config_format( + self, + caplog: pytest.LogCaptureFixture, + marker: pathlib.Path, + ) -> None: + marker.write_text("invalid", encoding="utf8") + + with caplog.at_level(logging.WARNING, "pip._internal.exceptions"): + exc = ExternallyManagedEnvironment.from_config(marker) + assert len(caplog.records) == 1 + assert caplog.records[-1].getMessage() == f"Failed to read {marker}" + + assert str(exc.context) == self.default_text + + @pytest.mark.parametrize( + "config", + [ + pytest.param("", id="empty"), + pytest.param("[foo]\nblah = blah", id="no-section"), + pytest.param("[externally-managed]\nblah = blah", id="no-key"), + ], + ) + def test_config_without_key( + self, + caplog: pytest.LogCaptureFixture, + marker: pathlib.Path, + config: str, + ) -> None: + marker.write_text(config, encoding="utf8") + + with caplog.at_level(logging.WARNING, "pip._internal.exceptions"): + exc = ExternallyManagedEnvironment.from_config(marker) + assert not caplog.records + assert str(exc.context) == self.default_text + + @pytest.mark.parametrize( + "config, expected", + [ + pytest.param( + """\ + [externally-managed] + Error = 最後 + Error-en = English + Error-zh = 中文 + Error-zh_Hant = 繁體 + Error-zh_Hans = 简体 + """, + "繁體", + id="full", + ), + pytest.param( + """\ + [externally-managed] + Error = 最後 + Error-en = English + Error-zh = 中文 + Error-zh_Hans = 简体 + """, + "中文", + id="no-variant", + ), + pytest.param( + """\ + [externally-managed] + Error = 最後 + Error-en = English + """, + "最後", + id="fallback", + ), + ], + ) + def test_config_canonical( + self, + caplog: pytest.LogCaptureFixture, + marker: pathlib.Path, + config: str, + expected: str, + ) -> None: + marker.write_text( + textwrap.dedent(config), + encoding="utf8", + ) + + with caplog.at_level(logging.WARNING, "pip._internal.exceptions"): + exc = ExternallyManagedEnvironment.from_config(marker) + assert not caplog.records + assert str(exc.context) == expected From 6750d847a7e3220af700a07f2f96daa4939776c4 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 29 Dec 2022 17:23:51 +0800 Subject: [PATCH 239/730] Add PEP 668 functional tests --- tests/functional/test_pep668.py | 66 +++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 tests/functional/test_pep668.py diff --git a/tests/functional/test_pep668.py b/tests/functional/test_pep668.py new file mode 100644 index 00000000000..b8739ed2ac3 --- /dev/null +++ b/tests/functional/test_pep668.py @@ -0,0 +1,66 @@ +import textwrap +from typing import List + +import pytest + +from tests.lib import PipTestEnvironment, create_basic_wheel_for_package +from tests.lib.venv import VirtualEnvironment + + +@pytest.fixture() +def patch_check_externally_managed(virtualenv: VirtualEnvironment) -> None: + # Since the tests are run from a virtual environment, and we can't + # guarantee access to the actual stdlib location (where EXTERNALLY-MANAGED + # needs to go into), we patch the check to always raise a simple message. + virtualenv.sitecustomize = textwrap.dedent( + """\ + from pip._internal.exceptions import ExternallyManagedEnvironment + from pip._internal.utils import misc + + def check_externally_managed(): + raise ExternallyManagedEnvironment("I am externally managed") + + misc.check_externally_managed = check_externally_managed + """ + ) + + +@pytest.mark.parametrize( + "arguments", + [ + pytest.param(["install"], id="install"), + pytest.param(["install", "--user"], id="install-user"), + pytest.param(["uninstall", "-y"], id="uninstall"), + ], +) +@pytest.mark.usefixtures("patch_check_externally_managed") +def test_fails(script: PipTestEnvironment, arguments: List[str]) -> None: + result = script.pip(*arguments, "pip", expect_error=True) + assert "I am externally managed" in result.stderr + + +@pytest.mark.parametrize( + "arguments", + [ + pytest.param(["install", "--root"], id="install-root"), + pytest.param(["install", "--prefix"], id="install-prefix"), + pytest.param(["install", "--target"], id="install-target"), + ], +) +@pytest.mark.usefixtures("patch_check_externally_managed") +def test_allows_if_out_of_environment( + script: PipTestEnvironment, + arguments: List[str], +) -> None: + wheel = create_basic_wheel_for_package(script, "foo", "1.0") + result = script.pip(*arguments, script.scratch_path, wheel.as_uri()) + assert "Successfully installed foo-1.0" in result.stdout + assert "I am externally managed" not in result.stderr + + +@pytest.mark.usefixtures("patch_check_externally_managed") +def test_allows_install_dry_run(script: PipTestEnvironment) -> None: + wheel = create_basic_wheel_for_package(script, "foo", "1.0") + result = script.pip("install", "--dry-run", wheel.as_uri()) + assert "Would install foo-1.0" in result.stdout + assert "I am externally managed" not in result.stderr From 8fe656305049b0bddc372dc2e77770af52f223c4 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 3 Jan 2023 07:44:22 +0800 Subject: [PATCH 240/730] Fall back to non-localized message on Windows Windows does not implement LC_MESSAGES, and since PEP 668 is mainly designed for Linux distributions, we simply take the easier way out until someone wants an equivalent on Windows. --- src/pip/_internal/exceptions.py | 12 ++++++- tests/unit/test_exceptions.py | 62 +++++++++++++++++++++++++++++++-- 2 files changed, 71 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 5e0559c9ac1..d28713ff79f 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -703,7 +703,17 @@ def __init__(self, error: Optional[str]) -> None: @staticmethod def _iter_externally_managed_error_keys() -> Iterator[str]: - lang, _ = locale.getlocale(locale.LC_MESSAGES) + # LC_MESSAGES is in POSIX, but not the C standard. The most common + # platform that does not implement this category is Windows, where + # using other categories for console message localization is equally + # unreliable, so we fall back to the locale-less vendor message. This + # can always be re-evaluated when a vendor proposes a new alternative. + try: + category = locale.LC_MESSAGES + except AttributeError: + lang: Optional[str] = None + else: + lang, _ = locale.getlocale(category) if lang is not None: yield f"Error-{lang}" for sep in ("-", "_"): diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py index 17cccd591e9..6510b569e5f 100644 --- a/tests/unit/test_exceptions.py +++ b/tests/unit/test_exceptions.py @@ -492,9 +492,9 @@ def patch_locale(self, monkeypatch: pytest.MonkeyPatch) -> None: orig_getlocal = locale.getlocale def fake_getlocale(category: int) -> Tuple[Optional[str], Optional[str]]: - """Fake getlocale() that always report zh_Hant.""" + """Fake getlocale() that always reports zh_Hant for LC_MESSASGES.""" result = orig_getlocal(category) - if category == locale.LC_MESSAGES: + if category == getattr(locale, "LC_MESSAGES", None): return "zh_Hant", result[1] return result @@ -541,6 +541,10 @@ def test_config_without_key( assert not caplog.records assert str(exc.context) == self.default_text + @pytest.mark.skipif( + sys.platform == "win32", + reason="Localization disabled on Windows", + ) @pytest.mark.parametrize( "config, expected", [ @@ -594,3 +598,57 @@ def test_config_canonical( exc = ExternallyManagedEnvironment.from_config(marker) assert not caplog.records assert str(exc.context) == expected + + @pytest.mark.skipif( + sys.platform != "win32", + reason="Non-Windows should implement localization", + ) + @pytest.mark.parametrize( + "config", + [ + pytest.param( + """\ + [externally-managed] + Error = 最後 + Error-en = English + Error-zh = 中文 + Error-zh_Hant = 繁體 + Error-zh_Hans = 简体 + """, + id="full", + ), + pytest.param( + """\ + [externally-managed] + Error = 最後 + Error-en = English + Error-zh = 中文 + Error-zh_Hans = 简体 + """, + id="no-variant", + ), + pytest.param( + """\ + [externally-managed] + Error = 最後 + Error-en = English + """, + id="fallback", + ), + ], + ) + def test_config_canonical_no_localization( + self, + caplog: pytest.LogCaptureFixture, + marker: pathlib.Path, + config: str, + ) -> None: + marker.write_text( + textwrap.dedent(config), + encoding="utf8", + ) + + with caplog.at_level(logging.WARNING, "pip._internal.exceptions"): + exc = ExternallyManagedEnvironment.from_config(marker) + assert not caplog.records + assert str(exc.context) == "最後" From 0233bf2757bc72f4ea15dd53b173f4d0d9075125 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 4 Jan 2023 18:18:07 +0800 Subject: [PATCH 241/730] Merge link_hash back into _hashes Commit bad03ef931d9b3ff4f9e75f35f9c41f45839e2a1 introduced the new link_hash attribute that holds the link's hash info, but that attribute does the same thing as _hashes, and some existing usages still populate that old attribute. Since the plural variant covers more use cases (a file can be hashed with multiple algorithms), we restore the old logic that uses _hashes before the commit, and consolidate link_hash back into that attribute. --- src/pip/_internal/models/link.py | 52 +++++++++++++++----------------- tests/unit/test_collector.py | 18 +++++------ 2 files changed, 34 insertions(+), 36 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index c7c4b0e9b25..a1e4d5a08df 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -79,6 +79,9 @@ def split_hash_name_and_value(cls, url: str) -> Optional["LinkHash"]: name, value = match.groups() return cls(name=name, value=value) + def as_dict(self) -> Dict[str, str]: + return {self.name: self.value} + def as_hashes(self) -> Hashes: """Return a Hashes instance which checks only for the current hash.""" return Hashes({self.name: [self.value]}) @@ -165,7 +168,6 @@ class Link(KeyBasedCompareMixin): "requires_python", "yanked_reason", "dist_info_metadata", - "link_hash", "cache_link_parsing", "egg_fragment", ] @@ -177,7 +179,6 @@ def __init__( requires_python: Optional[str] = None, yanked_reason: Optional[str] = None, dist_info_metadata: Optional[str] = None, - link_hash: Optional[LinkHash] = None, cache_link_parsing: bool = True, hashes: Optional[Mapping[str, str]] = None, ) -> None: @@ -200,16 +201,11 @@ def __init__( attribute, if present, in a simple repository HTML link. This may be parsed into its own `Link` by `self.metadata_link()`. See PEP 658 for more information and the specification. - :param link_hash: a checksum for the content the link points to. If not - provided, this will be extracted from the link URL, if the URL has - any checksum. :param cache_link_parsing: A flag that is used elsewhere to determine - whether resources retrieved from this link - should be cached. PyPI index urls should - generally have this set to False, for - example. + whether resources retrieved from this link should be cached. PyPI + URLs should generally have this set to False, for example. :param hashes: A mapping of hash names to digests to allow us to - determine the validity of a download. + determine the validity of a download. """ # url can be a UNC windows share @@ -220,13 +216,18 @@ def __init__( # Store the url as a private attribute to prevent accidentally # trying to set a new value. self._url = url - self._hashes = hashes if hashes is not None else {} + + link_hash = LinkHash.split_hash_name_and_value(url) + hashes_from_link = {} if link_hash is None else link_hash.as_dict() + if hashes is None: + self._hashes = hashes_from_link + else: + self._hashes = {**hashes, **hashes_from_link} self.comes_from = comes_from self.requires_python = requires_python if requires_python else None self.yanked_reason = yanked_reason self.dist_info_metadata = dist_info_metadata - self.link_hash = link_hash or LinkHash.split_hash_name_and_value(self._url) super().__init__(key=url, defining_class=Link) @@ -401,29 +402,26 @@ def metadata_link(self) -> Optional["Link"]: if self.dist_info_metadata is None: return None metadata_url = f"{self.url_without_fragment}.metadata" - link_hash: Optional[LinkHash] = None # If data-dist-info-metadata="true" is set, then the metadata file exists, # but there is no information about its checksum or anything else. if self.dist_info_metadata != "true": link_hash = LinkHash.split_hash_name_and_value(self.dist_info_metadata) - return Link(metadata_url, link_hash=link_hash) + else: + link_hash = None + if link_hash is None: + return Link(metadata_url) + return Link(metadata_url, hashes=link_hash.as_dict()) - def as_hashes(self) -> Optional[Hashes]: - if self.link_hash is not None: - return self.link_hash.as_hashes() - return None + def as_hashes(self) -> Hashes: + return Hashes({k: [v] for k, v in self._hashes.items()}) @property def hash(self) -> Optional[str]: - if self.link_hash is not None: - return self.link_hash.value - return None + return next(iter(self._hashes.values()), None) @property def hash_name(self) -> Optional[str]: - if self.link_hash is not None: - return self.link_hash.name - return None + return next(iter(self._hashes), None) @property def show_url(self) -> str: @@ -452,15 +450,15 @@ def is_yanked(self) -> bool: @property def has_hash(self) -> bool: - return self.link_hash is not None + return bool(self._hashes) def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: """ Return True if the link has a hash and it is allowed by `hashes`. """ - if self.link_hash is None: + if hashes is None: return False - return self.link_hash.is_hash_allowed(hashes) + return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items()) class _CleanResult(NamedTuple): diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 55676a4fc5c..47307c00e84 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -6,7 +6,7 @@ import uuid from pathlib import Path from textwrap import dedent -from typing import List, Optional, Tuple +from typing import Dict, List, Optional, Tuple from unittest import mock import pytest @@ -538,7 +538,7 @@ def test_parse_links_json() -> None: metadata_link.url == "https://example.com/files/holygrail-1.0-py3-none-any.whl.metadata" ) - assert metadata_link.link_hash == LinkHash("sha512", "aabdd41") + assert metadata_link._hashes == {"sha512": "aabdd41"} @pytest.mark.parametrize( @@ -575,41 +575,41 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) - @pytest.mark.parametrize( - "anchor_html, expected, link_hash", + "anchor_html, expected, hashes", [ # Test not present. ( '', None, - None, + {}, ), # Test with value "true". ( '', "true", - None, + {}, ), # Test with a provided hash value. ( '', # noqa: E501 "sha256=aa113592bbe", - None, + {}, ), # Test with a provided hash value for both the requirement as well as metadata. ( '', # noqa: E501 "sha256=aa113592bbe", - LinkHash("sha512", "abc132409cb"), + {"sha512": "abc132409cb"}, ), ], ) def test_parse_links__dist_info_metadata( anchor_html: str, expected: Optional[str], - link_hash: Optional[LinkHash], + hashes: Dict[str, str], ) -> None: link = _test_parse_links_data_attribute(anchor_html, "dist_info_metadata", expected) - assert link.link_hash == link_hash + assert link._hashes == hashes def test_parse_links_caches_same_page_by_url() -> None: From 471cb14e88942f43425a487792cebb936df9cafe Mon Sep 17 00:00:00 2001 From: Tommi Enenkel | AnB Date: Sun, 8 Jan 2023 09:31:44 +0100 Subject: [PATCH 242/730] fix grammar error in user notice --- src/pip/_internal/self_outdated_check.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py index 9e2149c5247..f87c9a5d2c0 100644 --- a/src/pip/_internal/self_outdated_check.py +++ b/src/pip/_internal/self_outdated_check.py @@ -133,7 +133,7 @@ def __rich__(self) -> Group: return Group( Text(), Text.from_markup( - f"{notice} A new release of pip available: " + f"{notice} A new release of pip is available: " f"[red]{self.old}[reset] -> [green]{self.new}[reset]" ), Text.from_markup( From d1d21bbc40e28f422f653eb6b06231041cc42948 Mon Sep 17 00:00:00 2001 From: Tommi Enenkel | AnB Date: Sun, 8 Jan 2023 09:33:58 +0100 Subject: [PATCH 243/730] add news --- news/of.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/of.trivial.rst diff --git a/news/of.trivial.rst b/news/of.trivial.rst new file mode 100644 index 00000000000..8b137891791 --- /dev/null +++ b/news/of.trivial.rst @@ -0,0 +1 @@ + From 5eda50d23710e44ea08cfeac30b4fa5353354d68 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 9 Jan 2023 10:13:19 +0800 Subject: [PATCH 244/730] Test and news for wheel cache restoration The fix merged in 3b60e36289866dfc5a99d57ec12069c9ff597fe8 also fixed the wheel cache; this cherry-picks the test and news from the other pull request to validate and document the fact. --- news/11527.bugfix.rst | 2 ++ tests/unit/test_cache.py | 44 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 news/11527.bugfix.rst diff --git a/news/11527.bugfix.rst b/news/11527.bugfix.rst new file mode 100644 index 00000000000..0185a804ff7 --- /dev/null +++ b/news/11527.bugfix.rst @@ -0,0 +1,2 @@ +Wheel cache behavior is restored to match previous versions, allowing the +cache to find existing entries. diff --git a/tests/unit/test_cache.py b/tests/unit/test_cache.py index f1f0141c708..f27daa266c8 100644 --- a/tests/unit/test_cache.py +++ b/tests/unit/test_cache.py @@ -1,7 +1,7 @@ import os from pathlib import Path -from pip._vendor.packaging.tags import Tag +from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version from pip._internal.cache import WheelCache, _hash_dict from pip._internal.models.format_control import FormatControl @@ -52,6 +52,48 @@ def test_cache_hash() -> None: assert h == "f83b32dfa27a426dec08c21bf006065dd003d0aac78e7fc493d9014d" +def test_link_to_cache(tmpdir: Path) -> None: + """ + Test that Link.from_json() produces Links with consistent cache + locations + """ + wc = WheelCache(os.fspath(tmpdir), FormatControl()) + # Define our expectations for stable cache path. + i_name = interpreter_name() + i_version = interpreter_version() + key_parts = { + "url": "https://files.pythonhosted.org/packages/a6/91/" + "86a6eac449ddfae239e93ffc1918cf33fd9bab35c04d1e963b311e347a73/" + "netifaces-0.11.0.tar.gz", + "sha256": "043a79146eb2907edf439899f262b3dfe41717d34124298ed281139a8b93ca32", + "interpreter_name": i_name, + "interpreter_version": i_version, + } + expected_hash = _hash_dict(key_parts) + parts = [ + expected_hash[:2], + expected_hash[2:4], + expected_hash[4:6], + expected_hash[6:], + ] + pathed_hash = os.path.join(*parts) + # Check working from a Link produces the same result. + file_data = { + "filename": "netifaces-0.11.0.tar.gz", + "hashes": { + "sha256": key_parts["sha256"], + }, + "requires-python": "", + "url": key_parts["url"], + "yanked": False, + } + page_url = "https://pypi.org/simple/netifaces/" + link = Link.from_json(file_data=file_data, page_url=page_url) + assert link + path = wc.get_path_for_link(link) + assert pathed_hash in path + + def test_get_cache_entry(tmpdir: Path) -> None: wc = WheelCache(os.fspath(tmpdir), FormatControl()) persi_link = Link("https://g.c/o/r/persi") From 24feb508d669e271fff0ec9fd9bc4735f05d349b Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 9 Jan 2023 10:43:29 +0800 Subject: [PATCH 245/730] Remove unneeded news file --- news/of.trivial.rst | 1 - 1 file changed, 1 deletion(-) delete mode 100644 news/of.trivial.rst diff --git a/news/of.trivial.rst b/news/of.trivial.rst deleted file mode 100644 index 8b137891791..00000000000 --- a/news/of.trivial.rst +++ /dev/null @@ -1 +0,0 @@ - From dc8ac7b896ea5228f17fc89414e0afc1bbc3d6d1 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 9 Jan 2023 17:27:40 +0800 Subject: [PATCH 246/730] Enable strict optional checking in PackageFinder Most should be pretty straightforward. In one place I need to use a custom TypeGuard, available in typing_extensions, which we're vendoring for Rich. --- src/pip/_internal/index/package_finder.py | 52 ++++++++++++----------- src/pip/_internal/self_outdated_check.py | 9 ++-- 2 files changed, 34 insertions(+), 27 deletions(-) diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py index 9bf247f0246..b6f8d57e854 100644 --- a/src/pip/_internal/index/package_finder.py +++ b/src/pip/_internal/index/package_finder.py @@ -1,14 +1,11 @@ """Routines related to PyPI, indexes""" -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import enum import functools import itertools import logging import re -from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union +from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union from pip._vendor.packaging import specifiers from pip._vendor.packaging.tags import Tag @@ -39,6 +36,9 @@ from pip._internal.utils.packaging import check_requires_python from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS +if TYPE_CHECKING: + from pip._vendor.typing_extensions import TypeGuard + __all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"] @@ -251,7 +251,7 @@ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]: def filter_unallowed_hashes( candidates: List[InstallationCandidate], - hashes: Hashes, + hashes: Optional[Hashes], project_name: str, ) -> List[InstallationCandidate]: """ @@ -540,6 +540,7 @@ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey: binary_preference = 1 if wheel.build_tag is not None: match = re.match(r"^(\d+)(.*)$", wheel.build_tag) + assert match is not None, "guaranteed by filename validation" build_tag_groups = match.groups() build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) else: # sdist @@ -942,43 +943,46 @@ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str: "No matching distribution found for {}".format(req) ) - best_installed = False - if installed_version and ( - best_candidate is None or best_candidate.version <= installed_version - ): - best_installed = True + def _should_install_candidate( + candidate: Optional[InstallationCandidate], + ) -> "TypeGuard[InstallationCandidate]": + if installed_version is None: + return True + if best_candidate is None: + return False + return best_candidate.version > installed_version if not upgrade and installed_version is not None: - if best_installed: + if _should_install_candidate(best_candidate): logger.debug( - "Existing installed version (%s) is most up-to-date and " - "satisfies requirement", + "Existing installed version (%s) satisfies requirement " + "(most up-to-date version is %s)", installed_version, + best_candidate.version, ) else: logger.debug( - "Existing installed version (%s) satisfies requirement " - "(most up-to-date version is %s)", + "Existing installed version (%s) is most up-to-date and " + "satisfies requirement", installed_version, - best_candidate.version, ) return None - if best_installed: - # We have an existing version, and its the best version + if _should_install_candidate(best_candidate): logger.debug( - "Installed version (%s) is most up-to-date (past versions: %s)", - installed_version, + "Using version %s (newest of versions: %s)", + best_candidate.version, _format_versions(best_candidate_result.iter_applicable()), ) - raise BestVersionAlreadyInstalled + return best_candidate + # We have an existing version, and its the best version logger.debug( - "Using version %s (newest of versions: %s)", - best_candidate.version, + "Installed version (%s) is most up-to-date (past versions: %s)", + installed_version, _format_versions(best_candidate_result.iter_applicable()), ) - return best_candidate + raise BestVersionAlreadyInstalled def _find_name_version_sep(fragment: str, canonical_name: str) -> int: diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py index f87c9a5d2c0..41cc42c5677 100644 --- a/src/pip/_internal/self_outdated_check.py +++ b/src/pip/_internal/self_outdated_check.py @@ -155,7 +155,7 @@ def was_installed_by_pip(pkg: str) -> bool: def _get_current_remote_pip_version( session: PipSession, options: optparse.Values -) -> str: +) -> Optional[str]: # Lets use PackageFinder to see what the latest pip version is link_collector = LinkCollector.create( session, @@ -176,7 +176,7 @@ def _get_current_remote_pip_version( ) best_candidate = finder.find_best_candidate("pip").best_candidate if best_candidate is None: - return + return None return str(best_candidate.version) @@ -186,11 +186,14 @@ def _self_version_check_logic( state: SelfCheckState, current_time: datetime.datetime, local_version: DistributionVersion, - get_remote_version: Callable[[], str], + get_remote_version: Callable[[], Optional[str]], ) -> Optional[UpgradePrompt]: remote_version_str = state.get(current_time) if remote_version_str is None: remote_version_str = get_remote_version() + if remote_version_str is None: + logger.debug("No remote pip version found") + return None state.set(remote_version_str, current_time) remote_version = parse_version(remote_version_str) From 07a360dfe8fcad8c34d7bb70c77362cc3ec8a374 Mon Sep 17 00:00:00 2001 From: Joonatan Partanen Date: Mon, 9 Jan 2023 14:42:52 +0200 Subject: [PATCH 247/730] Mention build-backend in regular/editable install differences (#11691) Co-authored-by: Paul Moore Co-authored-by: Pradyun Gedam --- docs/html/topics/local-project-installs.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/local-project-installs.md b/docs/html/topics/local-project-installs.md index 151035b00dd..bd70dbe2f78 100644 --- a/docs/html/topics/local-project-installs.md +++ b/docs/html/topics/local-project-installs.md @@ -34,7 +34,7 @@ Editable installs allow you to install your project without copying any files. I With an editable install, you only need to perform a re-installation if you change the project metadata (eg: version, what scripts need to be generated etc). You will still need to run build commands when you need to perform a compilation for non-Python code in the project (eg: C extensions). ```{caution} -It is possible to see behaviour differences between regular installs vs editable installs. In case you distribute the project as a "distribution package", users will see the behaviour of regular installs -- thus, it is important to ensure that regular installs work correctly. +It is possible to see behaviour differences between regular installs vs editable installs. These differences depend on the build-backend, and you should check the build-backend documentation for the details. In case you distribute the project as a "distribution package", users will see the behaviour of regular installs -- thus, it is important to ensure that regular installs work correctly. ``` ```{note} From bc96473b9a5083f8f3db67ce4a60df528687d1a9 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 9 Jan 2023 16:15:37 +0000 Subject: [PATCH 248/730] Ignore optional (`extra`) dependencies in `pip check` This preserves the current behaviour and avoids failures due to optional dependencies in the graph. --- src/pip/_internal/operations/check.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py index fb3ac8b9c9e..e3bce69b204 100644 --- a/src/pip/_internal/operations/check.py +++ b/src/pip/_internal/operations/check.py @@ -75,7 +75,7 @@ def check_package_set( if name not in package_set: missed = True if req.marker is not None: - missed = req.marker.evaluate() + missed = req.marker.evaluate({"extra": ""}) if missed: missing_deps.add((name, req)) continue From 254bdf87895a9a19166811a477c3a77d8c7039ba Mon Sep 17 00:00:00 2001 From: Daniil Konovalenko Date: Sat, 7 Jan 2023 20:18:00 +0100 Subject: [PATCH 249/730] fallback to a placeholder in case dist.location is None --- src/pip/_internal/req/req_install.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 4543be34c20..dfef45fc53c 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -1,5 +1,4 @@ # The following comment should be removed at some point in the future. -# mypy: strict-optional=False import functools import logging @@ -195,7 +194,11 @@ def __str__(self) -> str: else: s = "" if self.satisfied_by is not None: - s += " in {}".format(display_path(self.satisfied_by.location)) + if self.satisfied_by.location is not None: + location = display_path(self.satisfied_by.location) + else: + location = 'memory' + s += f" in {location}" if self.comes_from: if isinstance(self.comes_from, str): comes_from: Optional[str] = self.comes_from From 8374d818ac12b35e41d79ded476b4b8050a876f1 Mon Sep 17 00:00:00 2001 From: Daniil Konovalenko Date: Sat, 7 Jan 2023 21:16:32 +0100 Subject: [PATCH 250/730] add test --- tests/functional/test_install.py | 45 ++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 7b07226c90e..e39b0b7e9f2 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2351,3 +2351,48 @@ def test_install_8559_wheel_package_present( allow_stderr_warning=False, ) assert DEPRECATION_MSG_PREFIX not in result.stderr + + +@pytest.mark.skipif( + sys.version_info < (3, 11), + reason="3.11 required to find distributions via importlib metadata" +) +def test_install_existing_memory_distribution(script: PipTestEnvironment): + sitecustomize_text = textwrap.dedent( + """ + import sys + from importlib.metadata import Distribution, DistributionFinder + + + EXAMPLE_METADATA = '''Metadata-Version: 2.1 + Name: example + Version: 1.0.0 + + ''' + + class ExampleDistribution(Distribution): + def locate_file(self, path): + return path + + def read_text(self, filename): + if filename == 'METADATA': + return EXAMPLE_METADATA + + + class CustomFinder(DistributionFinder): + def find_distributions(self, context=None): + return [ExampleDistribution()] + + + sys.meta_path.append(CustomFinder()) + """ + ) + with open(script.site_packages_path / 'sitecustomize.py', 'w') as sitecustomize_file: + sitecustomize_file.write(sitecustomize_text) + + result = script.pip( + "install", + "example" + ) + + assert "Requirement already satisfied: example in " in result.stdout From 53064079ed127974c11930db2ce12d48f9c6c901 Mon Sep 17 00:00:00 2001 From: Daniil Konovalenko Date: Sat, 7 Jan 2023 21:16:40 +0100 Subject: [PATCH 251/730] revert mypy comment --- src/pip/_internal/req/req_install.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index dfef45fc53c..a50bb869b8d 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -1,4 +1,5 @@ # The following comment should be removed at some point in the future. +# mypy: strict-optional=False import functools import logging From ed7dbe9843a318f37a289eb4c1ca3b6dd2a477a6 Mon Sep 17 00:00:00 2001 From: Daniil Konovalenko Date: Sat, 7 Jan 2023 21:18:12 +0100 Subject: [PATCH 252/730] fix formatting --- src/pip/_internal/req/req_install.py | 2 +- tests/functional/test_install.py | 27 ++++++++++++--------------- 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index a50bb869b8d..bb38ec09da4 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -198,7 +198,7 @@ def __str__(self) -> str: if self.satisfied_by.location is not None: location = display_path(self.satisfied_by.location) else: - location = 'memory' + location = "" s += f" in {location}" if self.comes_from: if isinstance(self.comes_from, str): diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index e39b0b7e9f2..79da3d709bf 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2355,44 +2355,41 @@ def test_install_8559_wheel_package_present( @pytest.mark.skipif( sys.version_info < (3, 11), - reason="3.11 required to find distributions via importlib metadata" + reason="3.11 required to find distributions via importlib metadata", ) def test_install_existing_memory_distribution(script: PipTestEnvironment): sitecustomize_text = textwrap.dedent( """ import sys from importlib.metadata import Distribution, DistributionFinder - - + + EXAMPLE_METADATA = '''Metadata-Version: 2.1 Name: example Version: 1.0.0 - + ''' class ExampleDistribution(Distribution): def locate_file(self, path): return path - + def read_text(self, filename): if filename == 'METADATA': return EXAMPLE_METADATA - - + + class CustomFinder(DistributionFinder): def find_distributions(self, context=None): return [ExampleDistribution()] - - + + sys.meta_path.append(CustomFinder()) """ ) - with open(script.site_packages_path / 'sitecustomize.py', 'w') as sitecustomize_file: - sitecustomize_file.write(sitecustomize_text) + with open(script.site_packages_path / "sitecustomize.py", "w") as sitecustomize: + sitecustomize.write(sitecustomize_text) - result = script.pip( - "install", - "example" - ) + result = script.pip("install", "example") assert "Requirement already satisfied: example in " in result.stdout From 15b2cc993e06fbaef5f8d6e891f78ff6a131182d Mon Sep 17 00:00:00 2001 From: Daniil Konovalenko Date: Sat, 7 Jan 2023 21:19:23 +0100 Subject: [PATCH 253/730] fix mypy --- tests/functional/test_install.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 79da3d709bf..3fd9329bc6e 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2357,7 +2357,7 @@ def test_install_8559_wheel_package_present( sys.version_info < (3, 11), reason="3.11 required to find distributions via importlib metadata", ) -def test_install_existing_memory_distribution(script: PipTestEnvironment): +def test_install_existing_memory_distribution(script: PipTestEnvironment) -> None: sitecustomize_text = textwrap.dedent( """ import sys From 5540331160b47e2ae4b735b0c38dc6adb00cbced Mon Sep 17 00:00:00 2001 From: Daniil Konovalenko Date: Sat, 7 Jan 2023 21:25:44 +0100 Subject: [PATCH 254/730] add a news entry --- news/11704.bugfix.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 news/11704.bugfix.rst diff --git a/news/11704.bugfix.rst b/news/11704.bugfix.rst new file mode 100644 index 00000000000..0e7902a2590 --- /dev/null +++ b/news/11704.bugfix.rst @@ -0,0 +1,2 @@ +Fix an issue when an already existing in-memory distribution would cause +exceptions in ``pip install`` From 898b0fced46a24061424e90f8c5f28871db523eb Mon Sep 17 00:00:00 2001 From: Jason Curtis <3341011+jason-curtis@users.noreply.github.com> Date: Mon, 16 Jan 2023 14:01:00 -0800 Subject: [PATCH 255/730] minor installation.md docs typo --- docs/html/installation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/installation.md b/docs/html/installation.md index 7db0d47ab23..036a91397a5 100644 --- a/docs/html/installation.md +++ b/docs/html/installation.md @@ -90,7 +90,7 @@ distro community, cloud provider support channels, etc). ## Upgrading `pip` -Upgrading your `pip` by running: +Upgrade your `pip` by running: ```{pip-cli} $ pip install --upgrade pip From a2519ebf496f234d2736f8c1d1ed04ccd7c57f29 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 17 Jan 2023 10:05:12 +0800 Subject: [PATCH 256/730] Enable strict optionals in egg_link code Newer typeshed correctly annotates sysconfig.get_path() to return str so this just works now. --- src/pip/_internal/locations/base.py | 2 +- src/pip/_internal/utils/egg_link.py | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/locations/base.py b/src/pip/_internal/locations/base.py index 3f7de0061f1..3f9f896e632 100644 --- a/src/pip/_internal/locations/base.py +++ b/src/pip/_internal/locations/base.py @@ -13,7 +13,7 @@ USER_CACHE_DIR = appdirs.user_cache_dir("pip") # FIXME doesn't account for venv linked to global site-packages -site_packages: typing.Optional[str] = sysconfig.get_path("purelib") +site_packages: str = sysconfig.get_path("purelib") def get_major_minor_version() -> str: diff --git a/src/pip/_internal/utils/egg_link.py b/src/pip/_internal/utils/egg_link.py index 9e0da8d2d29..eb57ed1519f 100644 --- a/src/pip/_internal/utils/egg_link.py +++ b/src/pip/_internal/utils/egg_link.py @@ -1,10 +1,7 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import os import re import sys -from typing import Optional +from typing import List, Optional from pip._internal.locations import site_packages, user_site from pip._internal.utils.virtualenv import ( @@ -57,7 +54,7 @@ def egg_link_path_from_location(raw_name: str) -> Optional[str]: This method will just return the first one found. """ - sites = [] + sites: List[str] = [] if running_under_virtualenv(): sites.append(site_packages) if not virtualenv_no_global() and user_site: From 5e5480b3bcb4a890fff123e658c35502e87e5cb9 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 17 Jan 2023 09:57:32 +0800 Subject: [PATCH 257/730] Only exclude --dry-run when used with --report --- src/pip/_internal/commands/install.py | 5 +++-- tests/functional/test_pep668.py | 19 +++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 311c48a969f..accceeaec39 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -288,9 +288,10 @@ def run(self, options: Values, args: List[str]) -> int: # Check whether the environment we're installing into is externally # managed, as specified in PEP 668. Specifying --root, --target, or # --prefix disables the check, since there's no reliable way to locate - # the EXTERNALLY-MANAGED file for those cases. + # the EXTERNALLY-MANAGED file for those cases. An exception is also + # made specifically for "--dry-run --report" for convenience. installing_into_current_environment = ( - not options.dry_run + not (options.dry_run and options.json_report_file) and options.root_path is None and options.target_dir is None and options.prefix_path is None diff --git a/tests/functional/test_pep668.py b/tests/functional/test_pep668.py index b8739ed2ac3..1fed85e708e 100644 --- a/tests/functional/test_pep668.py +++ b/tests/functional/test_pep668.py @@ -1,3 +1,5 @@ +import json +import pathlib import textwrap from typing import List @@ -30,6 +32,7 @@ def check_externally_managed(): [ pytest.param(["install"], id="install"), pytest.param(["install", "--user"], id="install-user"), + pytest.param(["install", "--dry-run"], id="install-dry-run"), pytest.param(["uninstall", "-y"], id="uninstall"), ], ) @@ -59,8 +62,20 @@ def test_allows_if_out_of_environment( @pytest.mark.usefixtures("patch_check_externally_managed") -def test_allows_install_dry_run(script: PipTestEnvironment) -> None: +def test_allows_install_dry_run( + script: PipTestEnvironment, + tmp_path: pathlib.Path, +) -> None: + output = tmp_path.joinpath("out.json") wheel = create_basic_wheel_for_package(script, "foo", "1.0") - result = script.pip("install", "--dry-run", wheel.as_uri()) + result = script.pip( + "install", + "--dry-run", + f"--report={output.as_posix()}", + wheel.as_uri(), + expect_stderr=True, + ) assert "Would install foo-1.0" in result.stdout assert "I am externally managed" not in result.stderr + with output.open(encoding="utf8") as f: + assert isinstance(json.load(f), dict) From 950522e456714f5ba8bddae695041afecabff558 Mon Sep 17 00:00:00 2001 From: Chris Pryer Date: Wed, 18 Jan 2023 20:49:40 -0500 Subject: [PATCH 258/730] Fix docs --- docs/html/development/architecture/overview.rst | 16 ++++++++-------- docs/html/user_guide.rst | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/html/development/architecture/overview.rst b/docs/html/development/architecture/overview.rst index f9bcfb8731e..63445754a76 100644 --- a/docs/html/development/architecture/overview.rst +++ b/docs/html/development/architecture/overview.rst @@ -24,9 +24,9 @@ Things pip does: backwards compatibility reasons. But thing with setuptools: has a ``setup.py`` file that it invokes to …… get info? -2. Decides where to install stuff. Once the package is built, resulting - artifact is then installed into system in appropriate place. :pep:`517` - defines interface between build backend & installer. +2. Decides where to install stuff. Once the package is built, the resulting + artifact is then installed to the system in its appropriate place. :pep:`517` + defines the interface between the build backend & installer. Broad overview of flow ====================== @@ -111,24 +111,24 @@ The package index gives pip a list of files for that package (via the existing P pip chooses from the list a single file to download. -It may go back and choose another file to download +It may go back and choose another file to download. When pip looks at the package index, the place where it looks has -basically a link. The link’s text is the name of the file +basically a link. The link’s text is the name of the file. This is the `PyPI Simple API`_ (PyPI has several APIs, some are being deprecated). pip looks at Simple API, documented initially at :pep:`503` -- packaging.python.org has PyPA specifications with more details for -Simple Repository API +Simple Repository API. -For this package name -- this is the list of files available +For this package name -- this is the list of files available. Looks there for: * The list of filenames * Other info -Once it has those, selects one file, downloads it +Once it has those, it selects one file and downloads it. (Question: If I want to ``pip install flask``, I think the whole list of filenames cannot….should not be …. ? I want only the Flask …. Why am I getting the diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index b90b778b8a2..966b200f4f5 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -800,7 +800,7 @@ As noted previously, pip is a command line program. While it is implemented in Python, and so is available from your Python code via ``import pip``, you must not use pip's internal APIs in this way. There are a number of reasons for this: -#. The pip code assumes that is in sole control of the global state of the +#. The pip code assumes that it is in sole control of the global state of the program. pip manages things like the logging system configuration, or the values of the standard IO streams, without considering the possibility that user code From b99a6fe012eb0950437e3980bb110e4d1065254e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Muha=20Ajjan=E2=80=AE?= Date: Sat, 21 Jan 2023 11:24:26 +0100 Subject: [PATCH 259/730] Rename Unix to Linux this keeps the tabs under "Default paths" and "Avoiding caching" in sync to reproduce the issue this commit solves, go to this page: https://pip.pypa.io/en/stable/topics/caching/ then change the tabs under one of the sections, notice how changing to Unix / Linux breaks sync due to PipCLIDirective using the name [Linux](https://github.com/pypa/pip/blob/95a58e7ba58e769af882502f7436559f61ecca8a/docs/pip_sphinxext.py#L231) for Unix based OS's --- docs/html/topics/caching.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/caching.md b/docs/html/topics/caching.md index d1a3f5ea19f..954cebe402d 100644 --- a/docs/html/topics/caching.md +++ b/docs/html/topics/caching.md @@ -67,7 +67,7 @@ You can use `pip cache dir` to get the cache directory that pip is currently con ### Default paths -````{tab} Unix +````{tab} Linux ``` ~/.cache/pip ``` From 44c1ccaf4013ff0312f8ce0f371161cbcefa7558 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A0=D0=BE=D0=BC=D0=B0=D0=BD=20=D0=94=D0=BE=D0=BD=D1=87?= =?UTF-8?q?=D0=B5=D0=BD=D0=BA=D0=BE?= Date: Fri, 27 Jan 2023 20:48:47 +0300 Subject: [PATCH 260/730] Replace deprecated setup/teardown usage As explained in , `setup`/`teardown` are a part of nose compatibility, which is deprecated. You're supposed to use `setup_method` and `teardown_method` instead. --- news/4e5ddb5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst | 0 tests/lib/configuration_helpers.py | 2 +- tests/lib/options_helpers.py | 4 ++-- tests/unit/test_locations.py | 4 ++-- tests/unit/test_req.py | 8 ++++---- tests/unit/test_utils.py | 4 ++-- tests/unit/test_utils_unpacking.py | 4 ++-- 7 files changed, 13 insertions(+), 13 deletions(-) create mode 100644 news/4e5ddb5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst diff --git a/news/4e5ddb5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst b/news/4e5ddb5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/lib/configuration_helpers.py b/tests/lib/configuration_helpers.py index 67f75e8e7a0..ec824ffd3b8 100644 --- a/tests/lib/configuration_helpers.py +++ b/tests/lib/configuration_helpers.py @@ -17,7 +17,7 @@ class ConfigurationMixin: - def setup(self) -> None: + def setup_method(self) -> None: self.configuration = pip._internal.configuration.Configuration( isolated=False, ) diff --git a/tests/lib/options_helpers.py b/tests/lib/options_helpers.py index 31f65003545..4444fa3e97b 100644 --- a/tests/lib/options_helpers.py +++ b/tests/lib/options_helpers.py @@ -22,12 +22,12 @@ def main( # type: ignore[override] class AddFakeCommandMixin: - def setup(self) -> None: + def setup_method(self) -> None: commands_dict["fake"] = CommandInfo( "tests.lib.options_helpers", "FakeCommand", "fake summary", ) - def teardown(self) -> None: + def teardown_method(self) -> None: commands_dict.pop("fake") diff --git a/tests/unit/test_locations.py b/tests/unit/test_locations.py index 77567665376..bd233b22aab 100644 --- a/tests/unit/test_locations.py +++ b/tests/unit/test_locations.py @@ -28,13 +28,13 @@ def _get_scheme_dict(*args: Any, **kwargs: Any) -> Dict[str, str]: class TestLocations: - def setup(self) -> None: + def setup_method(self) -> None: self.tempdir = tempfile.mkdtemp() self.st_uid = 9999 self.username = "example" self.patch() - def teardown(self) -> None: + def teardown_method(self) -> None: self.revert_patch() shutil.rmtree(self.tempdir, ignore_errors=True) diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index bd828916593..c46883dc2d4 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -71,10 +71,10 @@ def get_processed_req_from_line( class TestRequirementSet: """RequirementSet tests""" - def setup(self) -> None: + def setup_method(self) -> None: self.tempdir = tempfile.mkdtemp() - def teardown(self) -> None: + def teardown_method(self) -> None: shutil.rmtree(self.tempdir, ignore_errors=True) @contextlib.contextmanager @@ -507,10 +507,10 @@ def test_download_info_vcs(self) -> None: class TestInstallRequirement: - def setup(self) -> None: + def setup_method(self) -> None: self.tempdir = tempfile.mkdtemp() - def teardown(self) -> None: + def teardown_method(self) -> None: shutil.rmtree(self.tempdir, ignore_errors=True) def test_url_with_query(self) -> None: diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 0c9b0766448..1daaecbf490 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -52,7 +52,7 @@ class Tests_EgglinkPath: "util.egg_link_path_from_location() tests" - def setup(self) -> None: + def setup_method(self) -> None: project = "foo" @@ -81,7 +81,7 @@ def setup(self) -> None: self.old_isfile = path.isfile self.mock_isfile = path.isfile = Mock() - def teardown(self) -> None: + def teardown_method(self) -> None: from pip._internal.utils import egg_link as utils utils.site_packages = self.old_site_packages diff --git a/tests/unit/test_utils_unpacking.py b/tests/unit/test_utils_unpacking.py index 382142ac177..1f0b59dbd6b 100644 --- a/tests/unit/test_utils_unpacking.py +++ b/tests/unit/test_utils_unpacking.py @@ -37,12 +37,12 @@ class TestUnpackArchives: """ - def setup(self) -> None: + def setup_method(self) -> None: self.tempdir = tempfile.mkdtemp() self.old_mask = os.umask(0o022) self.symlink_expected_mode = None - def teardown(self) -> None: + def teardown_method(self) -> None: os.umask(self.old_mask) shutil.rmtree(self.tempdir, ignore_errors=True) From 38681f3d6669754c7e919f0eb051b12931bfb0f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 30 Dec 2022 14:11:45 +0100 Subject: [PATCH 261/730] Allow multiple hashes in direct_url.json This influences the recorded direct_url.json metadata, and therefore the pip inspect output, as well as the pip install --report format. --- news/11312.feature.rst | 2 ++ src/pip/_internal/models/direct_url.py | 15 +++++++++++++-- tests/unit/test_direct_url.py | 4 ++++ tests/unit/test_direct_url_helpers.py | 4 ++++ 4 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 news/11312.feature.rst diff --git a/news/11312.feature.rst b/news/11312.feature.rst new file mode 100644 index 00000000000..493dde83059 --- /dev/null +++ b/news/11312.feature.rst @@ -0,0 +1,2 @@ +Change the hashes in the installation report to be a mapping. Emit the +``archive_info.hashes`` dictionary in ``direct_url.json``. diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py index e75feda9ca9..09b540f916c 100644 --- a/src/pip/_internal/models/direct_url.py +++ b/src/pip/_internal/models/direct_url.py @@ -103,17 +103,28 @@ class ArchiveInfo: def __init__( self, hash: Optional[str] = None, + hashes: Optional[Dict[str, str]] = None, ) -> None: + if hash is not None: + # Auto-populate the hashes key to upgrade to the new format automatically. + # We don't back-populate the legacy hash key. + hash_name, hash_value = hash.split("=", 1) + if hashes is None: + hashes = {hash_name: hash_value} + elif hash_name not in hash: + hashes = hashes.copy() + hashes[hash_name] = hash_value self.hash = hash + self.hashes = hashes @classmethod def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: if d is None: return None - return cls(hash=_get(d, str, "hash")) + return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes")) def _to_dict(self) -> Dict[str, Any]: - return _filter_none(hash=self.hash) + return _filter_none(hash=self.hash, hashes=self.hashes) class DirInfo: diff --git a/tests/unit/test_direct_url.py b/tests/unit/test_direct_url.py index c81e5129253..e1708ae9381 100644 --- a/tests/unit/test_direct_url.py +++ b/tests/unit/test_direct_url.py @@ -39,6 +39,10 @@ def test_archive_info() -> None: assert ( direct_url.info.hash == direct_url_dict["archive_info"]["hash"] # type: ignore ) + # test we add the hashes key automatically + direct_url_dict["archive_info"]["hashes"] = { # type: ignore + "sha1": "1b8c5bc61a86f377fea47b4276c8c8a5842d2220" + } assert direct_url.to_dict() == direct_url_dict diff --git a/tests/unit/test_direct_url_helpers.py b/tests/unit/test_direct_url_helpers.py index 3ab253462b6..692ee299c02 100644 --- a/tests/unit/test_direct_url_helpers.py +++ b/tests/unit/test_direct_url_helpers.py @@ -146,6 +146,10 @@ def test_from_link_archive() -> None: ) assert isinstance(direct_url.info, ArchiveInfo) assert direct_url.info.hash == "sha1=1b8c5bc61a86f377fea47b4276c8c8a5842d2220" + # Test the hashes key has been automatically populated. + assert direct_url.info.hashes == { + "sha1": "1b8c5bc61a86f377fea47b4276c8c8a5842d2220" + } def test_from_link_dir(tmpdir: Path) -> None: From 99dddc1616c98c0a3c0adc79431677cc3cb1ce38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 28 Jan 2023 13:48:14 +0100 Subject: [PATCH 262/730] pip inspect format declared stable --- docs/html/reference/inspect-report.md | 7 ++----- src/pip/_internal/commands/inspect.py | 7 +------ tests/functional/test_inspect.py | 2 +- 3 files changed, 4 insertions(+), 12 deletions(-) diff --git a/docs/html/reference/inspect-report.md b/docs/html/reference/inspect-report.md index 50d83365ccf..e2c7301fdab 100644 --- a/docs/html/reference/inspect-report.md +++ b/docs/html/reference/inspect-report.md @@ -10,10 +10,7 @@ environment, including installed distributions. The report is a JSON object with the following properties: -- `version`: the string `0`, denoting that the inspect command is an experimental - feature. This value will change to `1`, when the feature is deemed stable after - gathering user feedback (likely in pip 22.3 or 23.0). Backward incompatible changes - may be introduced in version `1` without notice. After that, it will change only if +- `version`: the string `1`. It will change only if and when backward incompatible changes are introduced, such as removing mandatory fields or changing the semantics or data type of existing fields. The introduction of backward incompatible changes will follow the usual pip processes such as the @@ -72,7 +69,7 @@ this (metadata abriged for brevity): ```json { - "version": "0", + "version": "1", "pip_version": "22.2.dev0", "installed": [ { diff --git a/src/pip/_internal/commands/inspect.py b/src/pip/_internal/commands/inspect.py index a4e3599306e..27c8fa3d5b6 100644 --- a/src/pip/_internal/commands/inspect.py +++ b/src/pip/_internal/commands/inspect.py @@ -46,11 +46,6 @@ def add_options(self) -> None: self.parser.insert_option_group(0, self.cmd_opts) def run(self, options: Values, args: List[str]) -> int: - logger.warning( - "pip inspect is currently an experimental command. " - "The output format may change in a future release without prior warning." - ) - cmdoptions.check_list_path_option(options) dists = get_environment(options.path).iter_installed_distributions( local_only=options.local, @@ -58,7 +53,7 @@ def run(self, options: Values, args: List[str]) -> int: skip=set(stdlib_pkgs), ) output = { - "version": "0", + "version": "1", "pip_version": __version__, "installed": [self._dist_to_dict(dist) for dist in dists], "environment": default_environment(), diff --git a/tests/functional/test_inspect.py b/tests/functional/test_inspect.py index 464bdbaa11e..18abf1a46f6 100644 --- a/tests/functional/test_inspect.py +++ b/tests/functional/test_inspect.py @@ -28,7 +28,7 @@ def test_inspect_basic(simple_script: PipTestEnvironment) -> None: """ Test default behavior of inspect command. """ - result = simple_script.pip("inspect", allow_stderr_warning=True) + result = simple_script.pip("inspect") report = json.loads(result.stdout) installed = report["installed"] assert len(installed) == 4 From 126e6f67a55220cc28596d25326e4fdaccbd120b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 28 Jan 2023 13:48:33 +0100 Subject: [PATCH 263/730] Install report format declared stable --- docs/html/reference/installation-report.md | 7 ++----- src/pip/_internal/commands/install.py | 6 ------ src/pip/_internal/models/installation_report.py | 2 +- tests/functional/test_install_report.py | 11 ----------- 4 files changed, 3 insertions(+), 23 deletions(-) diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index de67528cfd7..e0f2413186b 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -23,10 +23,7 @@ When considering use cases, please bear in mind that The report is a JSON object with the following properties: -- `version`: the string `0`, denoting that the installation report is an experimental - feature. This value will change to `1`, when the feature is deemed stable after - gathering user feedback (likely in pip 22.3 or 23.0). Backward incompatible changes - may be introduced in version `1` without notice. After that, it will change only if +- `version`: the string `1`. It will change only if and when backward incompatible changes are introduced, such as removing mandatory fields or changing the semantics or data type of existing fields. The introduction of backward incompatible changes will follow the usual pip processes such as the @@ -92,7 +89,7 @@ will produce an output similar to this (metadata abriged for brevity): ```json { - "version": "0", + "version": "1", "pip_version": "22.2", "install": [ { diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index accceeaec39..cecaac2bc5b 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -417,12 +417,6 @@ def run(self, options: Values, args: List[str]) -> int: ) if options.json_report_file: - logger.warning( - "--report is currently an experimental option. " - "The output format may change in a future release " - "without prior warning." - ) - report = InstallationReport(requirement_set.requirements_to_install) if options.json_report_file == "-": print_json(data=report.to_dict()) diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index 965f0952371..b54afb109b4 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -38,7 +38,7 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: def to_dict(self) -> Dict[str, Any]: return { - "version": "0", + "version": "1", "pip_version": __version__, "install": [ self._install_req_to_dict(ireq) for ireq in self._install_requirements diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index b61fd89c69f..70f71e22335 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -26,7 +26,6 @@ def test_install_report_basic( str(shared_data.root / "packages/"), "--report", str(report_path), - allow_stderr_warning=True, ) report = json.loads(report_path.read_text()) assert "install" in report @@ -59,7 +58,6 @@ def test_install_report_dep( str(shared_data.root / "packages/"), "--report", str(report_path), - allow_stderr_warning=True, ) report = json.loads(report_path.read_text()) assert len(report["install"]) == 2 @@ -78,7 +76,6 @@ def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> Non "Paste[openid]==1.7.5.1", "--report", str(report_path), - allow_stderr_warning=True, ) report = json.loads(report_path.read_text()) assert len(report["install"]) == 2 @@ -114,7 +111,6 @@ def test_install_report_vcs_and_wheel_cache( str(cache_dir), "--report", str(report_path), - allow_stderr_warning=True, ) report = json.loads(report_path.read_text()) assert len(report["install"]) == 1 @@ -142,7 +138,6 @@ def test_install_report_vcs_and_wheel_cache( str(cache_dir), "--report", str(report_path), - allow_stderr_warning=True, ) assert "Using cached pip_test_package" in result.stdout report = json.loads(report_path.read_text()) @@ -176,7 +171,6 @@ def test_install_report_vcs_editable( "#egg=pip-test-package", "--report", str(report_path), - allow_stderr_warning=True, ) report = json.loads(report_path.read_text()) assert len(report["install"]) == 1 @@ -203,11 +197,6 @@ def test_install_report_to_stdout( str(shared_data.root / "packages/"), "--report", "-", - allow_stderr_warning=True, - ) - assert result.stderr == ( - "WARNING: --report is currently an experimental option. " - "The output format may change in a future release without prior warning.\n" ) report = json.loads(result.stdout) assert "install" in report From 46ec9368fb406b0d51e1d39dcd9fac244f49b009 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 28 Jan 2023 14:13:08 +0100 Subject: [PATCH 264/730] Add news --- news/11757.feature.rst | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 news/11757.feature.rst diff --git a/news/11757.feature.rst b/news/11757.feature.rst new file mode 100644 index 00000000000..594fb627156 --- /dev/null +++ b/news/11757.feature.rst @@ -0,0 +1,2 @@ +The inspect and installation report formats are now declared stabled, and their version +has been bumped from ``0`` to ``1``. From 85e128b2d91db1ec90df2be0503e1d061c3e2de7 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 28 Jan 2023 20:41:07 +0000 Subject: [PATCH 265/730] Upgrade platformdirs to 2.6.2 --- news/platformdirs.vendor.rst | 2 +- src/pip/_vendor/platformdirs/__init__.py | 12 +++++++----- src/pip/_vendor/platformdirs/unix.py | 4 ++-- src/pip/_vendor/platformdirs/version.py | 8 ++++---- src/pip/_vendor/vendor.txt | 2 +- 5 files changed, 15 insertions(+), 13 deletions(-) diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst index 04ee05723b1..5c15bfbd9d5 100644 --- a/news/platformdirs.vendor.rst +++ b/news/platformdirs.vendor.rst @@ -1 +1 @@ -Upgrade platformdirs to 2.5.3 +Upgrade platformdirs to 2.6.2 diff --git a/src/pip/_vendor/platformdirs/__init__.py b/src/pip/_vendor/platformdirs/__init__.py index 9d513dcf177..82d907163c7 100644 --- a/src/pip/_vendor/platformdirs/__init__.py +++ b/src/pip/_vendor/platformdirs/__init__.py @@ -7,13 +7,15 @@ import os import sys from pathlib import Path -from typing import TYPE_CHECKING -if TYPE_CHECKING: - from pip._vendor.typing_extensions import Literal # pragma: no cover +if sys.version_info >= (3, 8): # pragma: no cover (py38+) + from typing import Literal +else: # pragma: no cover (py38+) + from pip._vendor.typing_extensions import Literal from .api import PlatformDirsABC -from .version import __version__, __version_info__ +from .version import __version__ +from .version import __version_tuple__ as __version_info__ def _set_platform_dir_class() -> type[PlatformDirsABC]: @@ -26,7 +28,7 @@ def _set_platform_dir_class() -> type[PlatformDirsABC]: if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": - if os.getenv("SHELL") is not None: + if os.getenv("SHELL") or os.getenv("PREFIX"): return Result from pip._vendor.platformdirs.android import _android_folder diff --git a/src/pip/_vendor/platformdirs/unix.py b/src/pip/_vendor/platformdirs/unix.py index 2fbd4d4f367..9aca5a03054 100644 --- a/src/pip/_vendor/platformdirs/unix.py +++ b/src/pip/_vendor/platformdirs/unix.py @@ -107,9 +107,9 @@ def user_state_dir(self) -> str: @property def user_log_dir(self) -> str: """ - :return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``log`` in it + :return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it """ - path = self.user_cache_dir + path = self.user_state_dir if self.opinion: path = os.path.join(path, "log") return path diff --git a/src/pip/_vendor/platformdirs/version.py b/src/pip/_vendor/platformdirs/version.py index 6361dbf9c07..9f6eb98e8f0 100644 --- a/src/pip/_vendor/platformdirs/version.py +++ b/src/pip/_vendor/platformdirs/version.py @@ -1,4 +1,4 @@ -"""Version information""" - -__version__ = "2.5.3" -__version_info__ = (2, 5, 3) +# file generated by setuptools_scm +# don't change, don't track in version control +__version__ = version = '2.6.2' +__version_tuple__ = version_tuple = (2, 6, 2) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 26afe72d198..282f627a6a5 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -4,7 +4,7 @@ distlib==0.3.6 distro==1.8.0 msgpack==1.0.4 packaging==21.3 -platformdirs==2.5.3 +platformdirs==2.6.2 pyparsing==3.0.9 pyproject-hooks==1.0.0 requests==2.28.1 From fb17ee1e914e6601ae96399443e3dc4a552f9d0c Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 28 Jan 2023 20:41:19 +0000 Subject: [PATCH 266/730] Upgrade requests to 2.28.2 --- news/requests.vendor.rst | 1 + src/pip/_vendor/requests/__init__.py | 4 ++-- src/pip/_vendor/requests/__version__.py | 6 +++--- src/pip/_vendor/requests/models.py | 2 +- src/pip/_vendor/vendor.txt | 2 +- 5 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 news/requests.vendor.rst diff --git a/news/requests.vendor.rst b/news/requests.vendor.rst new file mode 100644 index 00000000000..9f91985c70c --- /dev/null +++ b/news/requests.vendor.rst @@ -0,0 +1 @@ +Upgrade requests to 2.28.2 diff --git a/src/pip/_vendor/requests/__init__.py b/src/pip/_vendor/requests/__init__.py index 9e97059d1db..a4776248038 100644 --- a/src/pip/_vendor/requests/__init__.py +++ b/src/pip/_vendor/requests/__init__.py @@ -77,8 +77,8 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver elif charset_normalizer_version: major, minor, patch = charset_normalizer_version.split(".")[:3] major, minor, patch = int(major), int(minor), int(patch) - # charset_normalizer >= 2.0.0 < 3.0.0 - assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) + # charset_normalizer >= 2.0.0 < 4.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) else: raise Exception("You need either charset_normalizer or chardet installed") diff --git a/src/pip/_vendor/requests/__version__.py b/src/pip/_vendor/requests/__version__.py index e725ada6550..69be3dec741 100644 --- a/src/pip/_vendor/requests/__version__.py +++ b/src/pip/_vendor/requests/__version__.py @@ -5,10 +5,10 @@ __title__ = "requests" __description__ = "Python HTTP for Humans." __url__ = "https://requests.readthedocs.io" -__version__ = "2.28.1" -__build__ = 0x022801 +__version__ = "2.28.2" +__build__ = 0x022802 __author__ = "Kenneth Reitz" __author_email__ = "me@kennethreitz.org" __license__ = "Apache 2.0" -__copyright__ = "Copyright 2022 Kenneth Reitz" +__copyright__ = "Copyright Kenneth Reitz" __cake__ = "\u2728 \U0001f370 \u2728" diff --git a/src/pip/_vendor/requests/models.py b/src/pip/_vendor/requests/models.py index b45e8103258..76e6f199c00 100644 --- a/src/pip/_vendor/requests/models.py +++ b/src/pip/_vendor/requests/models.py @@ -438,7 +438,7 @@ def prepare_url(self, url, params): if not scheme: raise MissingSchema( f"Invalid URL {url!r}: No scheme supplied. " - f"Perhaps you meant http://{url}?" + f"Perhaps you meant https://{url}?" ) if not host: diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 282f627a6a5..f466ed0712d 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -7,7 +7,7 @@ packaging==21.3 platformdirs==2.6.2 pyparsing==3.0.9 pyproject-hooks==1.0.0 -requests==2.28.1 +requests==2.28.2 certifi==2022.09.24 chardet==5.0.0 idna==3.4 From 1c110bede610c211f284b09058cbd72b1dd9ed2c Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 28 Jan 2023 20:41:31 +0000 Subject: [PATCH 267/730] Upgrade certifi to 2022.12.7 --- news/certifi.vendor.rst | 1 + src/pip/_vendor/certifi/__init__.py | 2 +- src/pip/_vendor/certifi/cacert.pem | 181 ---------------------------- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 3 insertions(+), 183 deletions(-) create mode 100644 news/certifi.vendor.rst diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst new file mode 100644 index 00000000000..f02ba9f4187 --- /dev/null +++ b/news/certifi.vendor.rst @@ -0,0 +1 @@ +Upgrade certifi to 2022.12.7 diff --git a/src/pip/_vendor/certifi/__init__.py b/src/pip/_vendor/certifi/__init__.py index af4bcc1510f..a3546f12555 100644 --- a/src/pip/_vendor/certifi/__init__.py +++ b/src/pip/_vendor/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2022.09.24" +__version__ = "2022.12.07" diff --git a/src/pip/_vendor/certifi/cacert.pem b/src/pip/_vendor/certifi/cacert.pem index 40051551137..df9e4e3c755 100644 --- a/src/pip/_vendor/certifi/cacert.pem +++ b/src/pip/_vendor/certifi/cacert.pem @@ -636,37 +636,6 @@ BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB ZQ== -----END CERTIFICATE----- -# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. -# Label: "Network Solutions Certificate Authority" -# Serial: 116697915152937497490437556386812487904 -# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e -# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce -# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c ------BEGIN CERTIFICATE----- -MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi -MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu -MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp -dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV -UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO -ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz -c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP -OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl -mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF -BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 -qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw -gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB -BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu -bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp -dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 -6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ -h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH -/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv -wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN -pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey ------END CERTIFICATE----- - # Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited # Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited # Label: "COMODO ECC Certification Authority" @@ -2204,46 +2173,6 @@ KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg xwy8p2Fp8fc74SrL+SvzZpA3 -----END CERTIFICATE----- -# Issuer: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Subject: CN=Staat der Nederlanden EV Root CA O=Staat der Nederlanden -# Label: "Staat der Nederlanden EV Root CA" -# Serial: 10000013 -# MD5 Fingerprint: fc:06:af:7b:e8:1a:f1:9a:b4:e8:d2:70:1f:c0:f5:ba -# SHA1 Fingerprint: 76:e2:7e:c1:4f:db:82:c1:c0:a6:75:b5:05:be:3d:29:b4:ed:db:bb -# SHA256 Fingerprint: 4d:24:91:41:4c:fe:95:67:46:ec:4c:ef:a6:cf:6f:72:e2:8a:13:29:43:2f:9d:8a:90:7a:c4:cb:5d:ad:c1:5a ------BEGIN CERTIFICATE----- -MIIFcDCCA1igAwIBAgIEAJiWjTANBgkqhkiG9w0BAQsFADBYMQswCQYDVQQGEwJO -TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSkwJwYDVQQDDCBTdGFh -dCBkZXIgTmVkZXJsYW5kZW4gRVYgUm9vdCBDQTAeFw0xMDEyMDgxMTE5MjlaFw0y -MjEyMDgxMTEwMjhaMFgxCzAJBgNVBAYTAk5MMR4wHAYDVQQKDBVTdGFhdCBkZXIg -TmVkZXJsYW5kZW4xKTAnBgNVBAMMIFN0YWF0IGRlciBOZWRlcmxhbmRlbiBFViBS -b290IENBMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA48d+ifkkSzrS -M4M1LGns3Amk41GoJSt5uAg94JG6hIXGhaTK5skuU6TJJB79VWZxXSzFYGgEt9nC -UiY4iKTWO0Cmws0/zZiTs1QUWJZV1VD+hq2kY39ch/aO5ieSZxeSAgMs3NZmdO3d -Z//BYY1jTw+bbRcwJu+r0h8QoPnFfxZpgQNH7R5ojXKhTbImxrpsX23Wr9GxE46p -rfNeaXUmGD5BKyF/7otdBwadQ8QpCiv8Kj6GyzyDOvnJDdrFmeK8eEEzduG/L13l -pJhQDBXd4Pqcfzho0LKmeqfRMb1+ilgnQ7O6M5HTp5gVXJrm0w912fxBmJc+qiXb -j5IusHsMX/FjqTf5m3VpTCgmJdrV8hJwRVXj33NeN/UhbJCONVrJ0yPr08C+eKxC -KFhmpUZtcALXEPlLVPxdhkqHz3/KRawRWrUgUY0viEeXOcDPusBCAUCZSCELa6fS -/ZbV0b5GnUngC6agIk440ME8MLxwjyx1zNDFjFE7PZQIZCZhfbnDZY8UnCHQqv0X -cgOPvZuM5l5Tnrmd74K74bzickFbIZTTRTeU0d8JOV3nI6qaHcptqAqGhYqCvkIH -1vI4gnPah1vlPNOePqc7nvQDs/nxfRN0Av+7oeX6AHkcpmZBiFxgV6YuCcS6/ZrP -px9Aw7vMWgpVSzs4dlG4Y4uElBbmVvMCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFP6rAJCYniT8qcwaivsnuL8wbqg7 -MA0GCSqGSIb3DQEBCwUAA4ICAQDPdyxuVr5Os7aEAJSrR8kN0nbHhp8dB9O2tLsI -eK9p0gtJ3jPFrK3CiAJ9Brc1AsFgyb/E6JTe1NOpEyVa/m6irn0F3H3zbPB+po3u -2dfOWBfoqSmuc0iH55vKbimhZF8ZE/euBhD/UcabTVUlT5OZEAFTdfETzsemQUHS -v4ilf0X8rLiltTMMgsT7B/Zq5SWEXwbKwYY5EdtYzXc7LMJMD16a4/CrPmEbUCTC -wPTxGfARKbalGAKb12NMcIxHowNDXLldRqANb/9Zjr7dn3LDWyvfjFvO5QxGbJKy -CqNMVEIYFRIYvdr8unRu/8G2oGTYqV9Vrp9canaW2HNnh/tNf1zuacpzEPuKqf2e -vTY4SUmH9A4U8OmHuD+nT3pajnnUk+S7aFKErGzp85hwVXIy+TSrK0m1zSBi5Dp6 -Z2Orltxtrpfs/J92VoguZs9btsmksNcFuuEnL5O7Jiqik7Ab846+HUCjuTaPPoIa -Gl6I6lD4WeKDRikL40Rc4ZW2aZCaFG+XroHPaO+Zmr615+F/+PoTRxZMzG0IQOeL -eG9QgkRQP2YGiqtDhFZKDyAthg710tvSeopLzaXoTvFeJiUBWSOgftL2fiFX1ye8 -FVdMpEbB4IMeDExNH08GGeL5qPQ6gqGyeUN51q1veieQA6TqJIc/2b3Z6fJfUEkc -7uzXLg== ------END CERTIFICATE----- - # Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust # Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust # Label: "IdenTrust Commercial Root CA 1" @@ -2851,116 +2780,6 @@ T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== -----END CERTIFICATE----- -# Issuer: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-1" -# Serial: 15752444095811006489 -# MD5 Fingerprint: 6e:85:f1:dc:1a:00:d3:22:d5:b2:b2:ac:6b:37:05:45 -# SHA1 Fingerprint: ff:bd:cd:e7:82:c8:43:5e:3c:6f:26:86:5c:ca:a8:3a:45:5b:c3:0a -# SHA256 Fingerprint: d4:0e:9c:86:cd:8f:e4:68:c1:77:69:59:f4:9e:a7:74:fa:54:86:84:b6:c4:06:f3:90:92:61:f4:dc:e2:57:5c ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIJANqb7HHzA7AZMA0GCSqGSIb3DQEBCwUAMIGkMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRydXN0Q29y -IFJvb3RDZXJ0IENBLTEwHhcNMTYwMjA0MTIzMjE2WhcNMjkxMjMxMTcyMzE2WjCB -pDELMAkGA1UEBhMCUEExDzANBgNVBAgMBlBhbmFtYTEUMBIGA1UEBwwLUGFuYW1h -IENpdHkxJDAiBgNVBAoMG1RydXN0Q29yIFN5c3RlbXMgUy4gZGUgUi5MLjEnMCUG -A1UECwweVHJ1c3RDb3IgQ2VydGlmaWNhdGUgQXV0aG9yaXR5MR8wHQYDVQQDDBZU -cnVzdENvciBSb290Q2VydCBDQS0xMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB -CgKCAQEAv463leLCJhJrMxnHQFgKq1mqjQCj/IDHUHuO1CAmujIS2CNUSSUQIpid -RtLByZ5OGy4sDjjzGiVoHKZaBeYei0i/mJZ0PmnK6bV4pQa81QBeCQryJ3pS/C3V -seq0iWEk8xoT26nPUu0MJLq5nux+AHT6k61sKZKuUbS701e/s/OojZz0JEsq1pme -9J7+wH5COucLlVPat2gOkEz7cD+PSiyU8ybdY2mplNgQTsVHCJCZGxdNuWxu72CV -EY4hgLW9oHPY0LJ3xEXqWib7ZnZ2+AYfYW0PVcWDtxBWcgYHpfOxGgMFZA6dWorW -hnAbJN7+KIor0Gqw/Hqi3LJ5DotlDwIDAQABo2MwYTAdBgNVHQ4EFgQU7mtJPHo/ -DeOxCbeKyKsZn3MzUOcwHwYDVR0jBBgwFoAU7mtJPHo/DeOxCbeKyKsZn3MzUOcw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQAD -ggEBACUY1JGPE+6PHh0RU9otRCkZoB5rMZ5NDp6tPVxBb5UrJKF5mDo4Nvu7Zp5I -/5CQ7z3UuJu0h3U/IJvOcs+hVcFNZKIZBqEHMwwLKeXx6quj7LUKdJDHfXLy11yf -ke+Ri7fc7Waiz45mO7yfOgLgJ90WmMCV1Aqk5IGadZQ1nJBfiDcGrVmVCrDRZ9MZ -yonnMlo2HD6CqFqTvsbQZJG2z9m2GM/bftJlo6bEjhcxwft+dtvTheNYsnd6djts -L1Ac59v2Z3kf9YKVmgenFK+P3CghZwnS1k1aHBkcjndcw5QkPTJrS37UeJSDvjdN -zl/HHk484IkzlQsPpTLWPFp5LBk= ------END CERTIFICATE----- - -# Issuer: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor RootCert CA-2 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor RootCert CA-2" -# Serial: 2711694510199101698 -# MD5 Fingerprint: a2:e1:f8:18:0b:ba:45:d5:c7:41:2a:bb:37:52:45:64 -# SHA1 Fingerprint: b8:be:6d:cb:56:f1:55:b9:63:d4:12:ca:4e:06:34:c7:94:b2:1c:c0 -# SHA256 Fingerprint: 07:53:e9:40:37:8c:1b:d5:e3:83:6e:39:5d:ae:a5:cb:83:9e:50:46:f1:bd:0e:ae:19:51:cf:10:fe:c7:c9:65 ------BEGIN CERTIFICATE----- -MIIGLzCCBBegAwIBAgIIJaHfyjPLWQIwDQYJKoZIhvcNAQELBQAwgaQxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEfMB0GA1UEAwwWVHJ1c3RDb3Ig -Um9vdENlcnQgQ0EtMjAeFw0xNjAyMDQxMjMyMjNaFw0zNDEyMzExNzI2MzlaMIGk -MQswCQYDVQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEg -Q2l0eTEkMCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYD -VQQLDB5UcnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxHzAdBgNVBAMMFlRy -dXN0Q29yIFJvb3RDZXJ0IENBLTIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCnIG7CKqJiJJWQdsg4foDSq8GbZQWU9MEKENUCrO2fk8eHyLAnK0IMPQo+ -QVqedd2NyuCb7GgypGmSaIwLgQ5WoD4a3SwlFIIvl9NkRvRUqdw6VC0xK5mC8tkq -1+9xALgxpL56JAfDQiDyitSSBBtlVkxs1Pu2YVpHI7TYabS3OtB0PAx1oYxOdqHp -2yqlO/rOsP9+aij9JxzIsekp8VduZLTQwRVtDr4uDkbIXvRR/u8OYzo7cbrPb1nK -DOObXUm4TOJXsZiKQlecdu/vvdFoqNL0Cbt3Nb4lggjEFixEIFapRBF37120Hape -az6LMvYHL1cEksr1/p3C6eizjkxLAjHZ5DxIgif3GIJ2SDpxsROhOdUuxTTCHWKF -3wP+TfSvPd9cW436cOGlfifHhi5qjxLGhF5DUVCcGZt45vz27Ud+ez1m7xMTiF88 -oWP7+ayHNZ/zgp6kPwqcMWmLmaSISo5uZk3vFsQPeSghYA2FFn3XVDjxklb9tTNM -g9zXEJ9L/cb4Qr26fHMC4P99zVvh1Kxhe1fVSntb1IVYJ12/+CtgrKAmrhQhJ8Z3 -mjOAPF5GP/fDsaOGM8boXg25NSyqRsGFAnWAoOsk+xWq5Gd/bnc/9ASKL3x74xdh -8N0JqSDIvgmk0H5Ew7IwSjiqqewYmgeCK9u4nBit2uBGF6zPXQIDAQABo2MwYTAd -BgNVHQ4EFgQU2f4hQG6UnrybPZx9mCAZ5YwwYrIwHwYDVR0jBBgwFoAU2f4hQG6U -nrybPZx9mCAZ5YwwYrIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYw -DQYJKoZIhvcNAQELBQADggIBAJ5Fngw7tu/hOsh80QA9z+LqBrWyOrsGS2h60COX -dKcs8AjYeVrXWoSK2BKaG9l9XE1wxaX5q+WjiYndAfrs3fnpkpfbsEZC89NiqpX+ -MWcUaViQCqoL7jcjx1BRtPV+nuN79+TMQjItSQzL/0kMmx40/W5ulop5A7Zv2wnL -/V9lFDfhOPXzYRZY5LVtDQsEGz9QLX+zx3oaFoBg+Iof6Rsqxvm6ARppv9JYx1RX -CI/hOWB3S6xZhBqI8d3LT3jX5+EzLfzuQfogsL7L9ziUwOHQhQ+77Sxzq+3+knYa -ZH9bDTMJBzN7Bj8RpFxwPIXAz+OQqIN3+tvmxYxoZxBnpVIt8MSZj3+/0WvitUfW -2dCFmU2Umw9Lje4AWkcdEQOsQRivh7dvDDqPys/cA8GiCcjl/YBeyGBCARsaU1q7 -N6a3vLqE6R5sGtRk2tRD/pOLS/IseRYQ1JMLiI+h2IYURpFHmygk71dSTlxCnKr3 -Sewn6EAes6aJInKc9Q0ztFijMDvd1GpUk74aTfOTlPf8hAs/hCBcNANExdqtvArB -As8e5ZTZ845b2EzwnexhF7sUMlQMAimTHpKG9n/v55IFDlndmQguLvqcAFLTxWYp -5KeXRKQOKIETNcX2b2TmQcTVL8w0RSXPQQCWPUouwpaYT05KnJe32x+SMsj/D1Fu -1uwJ ------END CERTIFICATE----- - -# Issuer: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Subject: CN=TrustCor ECA-1 O=TrustCor Systems S. de R.L. OU=TrustCor Certificate Authority -# Label: "TrustCor ECA-1" -# Serial: 9548242946988625984 -# MD5 Fingerprint: 27:92:23:1d:0a:f5:40:7c:e9:e6:6b:9d:d8:f5:e7:6c -# SHA1 Fingerprint: 58:d1:df:95:95:67:6b:63:c0:f0:5b:1c:17:4d:8b:84:0b:c8:78:bd -# SHA256 Fingerprint: 5a:88:5d:b1:9c:01:d9:12:c5:75:93:88:93:8c:af:bb:df:03:1a:b2:d4:8e:91:ee:15:58:9b:42:97:1d:03:9c ------BEGIN CERTIFICATE----- -MIIEIDCCAwigAwIBAgIJAISCLF8cYtBAMA0GCSqGSIb3DQEBCwUAMIGcMQswCQYD -VQQGEwJQQTEPMA0GA1UECAwGUGFuYW1hMRQwEgYDVQQHDAtQYW5hbWEgQ2l0eTEk -MCIGA1UECgwbVHJ1c3RDb3IgU3lzdGVtcyBTLiBkZSBSLkwuMScwJQYDVQQLDB5U -cnVzdENvciBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxFzAVBgNVBAMMDlRydXN0Q29y -IEVDQS0xMB4XDTE2MDIwNDEyMzIzM1oXDTI5MTIzMTE3MjgwN1owgZwxCzAJBgNV -BAYTAlBBMQ8wDQYDVQQIDAZQYW5hbWExFDASBgNVBAcMC1BhbmFtYSBDaXR5MSQw -IgYDVQQKDBtUcnVzdENvciBTeXN0ZW1zIFMuIGRlIFIuTC4xJzAlBgNVBAsMHlRy -dXN0Q29yIENlcnRpZmljYXRlIEF1dGhvcml0eTEXMBUGA1UEAwwOVHJ1c3RDb3Ig -RUNBLTEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPj+ARtZ+odnbb -3w9U73NjKYKtR8aja+3+XzP4Q1HpGjORMRegdMTUpwHmspI+ap3tDvl0mEDTPwOA -BoJA6LHip1GnHYMma6ve+heRK9jGrB6xnhkB1Zem6g23xFUfJ3zSCNV2HykVh0A5 -3ThFEXXQmqc04L/NyFIduUd+Dbi7xgz2c1cWWn5DkR9VOsZtRASqnKmcp0yJF4Ou -owReUoCLHhIlERnXDH19MURB6tuvsBzvgdAsxZohmz3tQjtQJvLsznFhBmIhVE5/ -wZ0+fyCMgMsq2JdiyIMzkX2woloPV+g7zPIlstR8L+xNxqE6FXrntl019fZISjZF -ZtS6mFjBAgMBAAGjYzBhMB0GA1UdDgQWBBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAf -BgNVHSMEGDAWgBREnkj1zG1I1KBLf/5ZJC+Dl5mahjAPBgNVHRMBAf8EBTADAQH/ -MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAQEABT41XBVwm8nHc2Fv -civUwo/yQ10CzsSUuZQRg2dd4mdsdXa/uwyqNsatR5Nj3B5+1t4u/ukZMjgDfxT2 -AHMsWbEhBuH7rBiVDKP/mZb3Kyeb1STMHd3BOuCYRLDE5D53sXOpZCz2HAF8P11F -hcCF5yWPldwX8zyfGm6wyuMdKulMY/okYWLW2n62HGz1Ah3UKt1VkOsqEUc8Ll50 -soIipX1TH0XsJ5F95yIW6MBoNtjG8U+ARDL54dHRHareqKucBK+tIA5kmE2la8BI -WJZpTdwHjFGTot+fDz2LYLSCjaoITmJF4PkL0uDgPFveXHEnJcLmA4GLEFPjx1Wi -tJ/X5g== ------END CERTIFICATE----- - # Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation # Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation # Label: "SSL.com Root Certification Authority RSA" diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index f466ed0712d..cd42578f4d3 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -8,7 +8,7 @@ platformdirs==2.6.2 pyparsing==3.0.9 pyproject-hooks==1.0.0 requests==2.28.2 - certifi==2022.09.24 + certifi==2022.12.7 chardet==5.0.0 idna==3.4 urllib3==1.26.12 From be20a75c108b5db5ca0dc097e6f46a3ebccfd48a Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 28 Jan 2023 20:41:43 +0000 Subject: [PATCH 268/730] Upgrade chardet to 5.1.0 --- news/chardet.vendor.rst | 1 + src/pip/_vendor/chardet.pyi | 1 - src/pip/_vendor/chardet/__init__.py | 36 +++- src/pip/_vendor/chardet/big5prober.py | 6 +- src/pip/_vendor/chardet/chardistribution.py | 54 +++--- src/pip/_vendor/chardet/charsetgroupprober.py | 31 ++-- src/pip/_vendor/chardet/charsetprober.py | 35 ++-- src/pip/_vendor/chardet/cli/chardetect.py | 42 ++++- src/pip/_vendor/chardet/codingstatemachine.py | 16 +- .../_vendor/chardet/codingstatemachinedict.py | 19 ++ src/pip/_vendor/chardet/cp949prober.py | 6 +- src/pip/_vendor/chardet/enums.py | 9 +- src/pip/_vendor/chardet/escprober.py | 26 +-- src/pip/_vendor/chardet/escsm.py | 9 +- src/pip/_vendor/chardet/eucjpprober.py | 19 +- src/pip/_vendor/chardet/euckrprober.py | 6 +- src/pip/_vendor/chardet/euctwprober.py | 6 +- src/pip/_vendor/chardet/gb2312prober.py | 6 +- src/pip/_vendor/chardet/hebrewprober.py | 56 +++--- src/pip/_vendor/chardet/johabprober.py | 6 +- src/pip/_vendor/chardet/jpcntx.py | 31 ++-- src/pip/_vendor/chardet/latin1prober.py | 18 +- src/pip/_vendor/chardet/macromanprober.py | 162 ++++++++++++++++++ src/pip/_vendor/chardet/mbcharsetprober.py | 32 ++-- src/pip/_vendor/chardet/mbcsgroupprober.py | 3 +- src/pip/_vendor/chardet/mbcssm.py | 23 +-- src/pip/_vendor/chardet/metadata/languages.py | 37 ++-- src/pip/_vendor/chardet/py.typed | 0 src/pip/_vendor/chardet/resultdict.py | 16 ++ src/pip/_vendor/chardet/sbcharsetprober.py | 52 +++--- src/pip/_vendor/chardet/sbcsgroupprober.py | 2 +- src/pip/_vendor/chardet/sjisprober.py | 19 +- src/pip/_vendor/chardet/universaldetector.py | 68 ++++++-- src/pip/_vendor/chardet/utf1632prober.py | 32 ++-- src/pip/_vendor/chardet/utf8prober.py | 16 +- src/pip/_vendor/chardet/version.py | 4 +- src/pip/_vendor/vendor.txt | 2 +- 37 files changed, 620 insertions(+), 287 deletions(-) create mode 100644 news/chardet.vendor.rst delete mode 100644 src/pip/_vendor/chardet.pyi create mode 100644 src/pip/_vendor/chardet/codingstatemachinedict.py create mode 100644 src/pip/_vendor/chardet/macromanprober.py create mode 100644 src/pip/_vendor/chardet/py.typed create mode 100644 src/pip/_vendor/chardet/resultdict.py diff --git a/news/chardet.vendor.rst b/news/chardet.vendor.rst new file mode 100644 index 00000000000..5aceb6c5e6f --- /dev/null +++ b/news/chardet.vendor.rst @@ -0,0 +1 @@ +Upgrade chardet to 5.1.0 diff --git a/src/pip/_vendor/chardet.pyi b/src/pip/_vendor/chardet.pyi deleted file mode 100644 index 29e87e33157..00000000000 --- a/src/pip/_vendor/chardet.pyi +++ /dev/null @@ -1 +0,0 @@ -from chardet import * \ No newline at end of file diff --git a/src/pip/_vendor/chardet/__init__.py b/src/pip/_vendor/chardet/__init__.py index e91ad61822c..fe581623d89 100644 --- a/src/pip/_vendor/chardet/__init__.py +++ b/src/pip/_vendor/chardet/__init__.py @@ -15,19 +15,29 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import List, Union + +from .charsetgroupprober import CharSetGroupProber +from .charsetprober import CharSetProber from .enums import InputState +from .resultdict import ResultDict from .universaldetector import UniversalDetector from .version import VERSION, __version__ __all__ = ["UniversalDetector", "detect", "detect_all", "__version__", "VERSION"] -def detect(byte_str): +def detect( + byte_str: Union[bytes, bytearray], should_rename_legacy: bool = False +) -> ResultDict: """ Detect the encoding of the given byte string. :param byte_str: The byte sequence to examine. :type byte_str: ``bytes`` or ``bytearray`` + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + :type should_rename_legacy: ``bool`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): @@ -35,12 +45,16 @@ def detect(byte_str): f"Expected object of type bytes or bytearray, got: {type(byte_str)}" ) byte_str = bytearray(byte_str) - detector = UniversalDetector() + detector = UniversalDetector(should_rename_legacy=should_rename_legacy) detector.feed(byte_str) return detector.close() -def detect_all(byte_str, ignore_threshold=False): +def detect_all( + byte_str: Union[bytes, bytearray], + ignore_threshold: bool = False, + should_rename_legacy: bool = False, +) -> List[ResultDict]: """ Detect all the possible encodings of the given byte string. @@ -50,6 +64,9 @@ def detect_all(byte_str, ignore_threshold=False): ``UniversalDetector.MINIMUM_THRESHOLD`` in results. :type ignore_threshold: ``bool`` + :param should_rename_legacy: Should we rename legacy encodings + to their more modern equivalents? + :type should_rename_legacy: ``bool`` """ if not isinstance(byte_str, bytearray): if not isinstance(byte_str, bytes): @@ -58,15 +75,15 @@ def detect_all(byte_str, ignore_threshold=False): ) byte_str = bytearray(byte_str) - detector = UniversalDetector() + detector = UniversalDetector(should_rename_legacy=should_rename_legacy) detector.feed(byte_str) detector.close() if detector.input_state == InputState.HIGH_BYTE: - results = [] - probers = [] + results: List[ResultDict] = [] + probers: List[CharSetProber] = [] for prober in detector.charset_probers: - if hasattr(prober, "probers"): + if isinstance(prober, CharSetGroupProber): probers.extend(p for p in prober.probers) else: probers.append(prober) @@ -80,6 +97,11 @@ def detect_all(byte_str, ignore_threshold=False): charset_name = detector.ISO_WIN_MAP.get( lower_charset_name, charset_name ) + # Rename legacy encodings with superset encodings if asked + if should_rename_legacy: + charset_name = detector.LEGACY_MAP.get( + charset_name.lower(), charset_name + ) results.append( { "encoding": charset_name, diff --git a/src/pip/_vendor/chardet/big5prober.py b/src/pip/_vendor/chardet/big5prober.py index e4dfa7aa02a..ef09c60e327 100644 --- a/src/pip/_vendor/chardet/big5prober.py +++ b/src/pip/_vendor/chardet/big5prober.py @@ -32,16 +32,16 @@ class Big5Prober(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) self.distribution_analyzer = Big5DistributionAnalysis() self.reset() @property - def charset_name(self): + def charset_name(self) -> str: return "Big5" @property - def language(self): + def language(self) -> str: return "Chinese" diff --git a/src/pip/_vendor/chardet/chardistribution.py b/src/pip/_vendor/chardet/chardistribution.py index 27b4a293911..176cb996408 100644 --- a/src/pip/_vendor/chardet/chardistribution.py +++ b/src/pip/_vendor/chardet/chardistribution.py @@ -25,6 +25,8 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import Tuple, Union + from .big5freq import ( BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE, @@ -59,22 +61,22 @@ class CharDistributionAnalysis: SURE_NO = 0.01 MINIMUM_DATA_THRESHOLD = 3 - def __init__(self): + def __init__(self) -> None: # Mapping table to get frequency order from char order (get from # GetOrder()) - self._char_to_freq_order = tuple() - self._table_size = None # Size of above table + self._char_to_freq_order: Tuple[int, ...] = tuple() + self._table_size = 0 # Size of above table # This is a constant value which varies from language to language, # used in calculating confidence. See # http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html # for further detail. - self.typical_distribution_ratio = None - self._done = None - self._total_chars = None - self._freq_chars = None + self.typical_distribution_ratio = 0.0 + self._done = False + self._total_chars = 0 + self._freq_chars = 0 self.reset() - def reset(self): + def reset(self) -> None: """reset analyser, clear any state""" # If this flag is set to True, detection is done and conclusion has # been made @@ -83,7 +85,7 @@ def reset(self): # The number of characters whose frequency order is less than 512 self._freq_chars = 0 - def feed(self, char, char_len): + def feed(self, char: Union[bytes, bytearray], char_len: int) -> None: """feed a character with known length""" if char_len == 2: # we only care about 2-bytes character in our distribution analysis @@ -97,7 +99,7 @@ def feed(self, char, char_len): if 512 > self._char_to_freq_order[order]: self._freq_chars += 1 - def get_confidence(self): + def get_confidence(self) -> float: """return confidence based on existing data""" # if we didn't receive any character in our consideration range, # return negative answer @@ -114,12 +116,12 @@ def get_confidence(self): # normalize confidence (we don't want to be 100% sure) return self.SURE_YES - def got_enough_data(self): + def got_enough_data(self) -> bool: # It is not necessary to receive all data to draw conclusion. # For charset detection, certain amount of data is enough return self._total_chars > self.ENOUGH_DATA_THRESHOLD - def get_order(self, _): + def get_order(self, _: Union[bytes, bytearray]) -> int: # We do not handle characters based on the original encoding string, # but convert this encoding string to a number, here called order. # This allows multiple encodings of a language to share one frequency @@ -128,13 +130,13 @@ def get_order(self, _): class EUCTWDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER self._table_size = EUCTW_TABLE_SIZE self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: # for euc-TW encoding, we are interested # first byte range: 0xc4 -- 0xfe # second byte range: 0xa1 -- 0xfe @@ -146,13 +148,13 @@ def get_order(self, byte_str): class EUCKRDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER self._table_size = EUCKR_TABLE_SIZE self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: # for euc-KR encoding, we are interested # first byte range: 0xb0 -- 0xfe # second byte range: 0xa1 -- 0xfe @@ -164,13 +166,13 @@ def get_order(self, byte_str): class JOHABDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER self._table_size = EUCKR_TABLE_SIZE self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: first_char = byte_str[0] if 0x88 <= first_char < 0xD4: code = first_char * 256 + byte_str[1] @@ -179,13 +181,13 @@ def get_order(self, byte_str): class GB2312DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER self._table_size = GB2312_TABLE_SIZE self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: # for GB2312 encoding, we are interested # first byte range: 0xb0 -- 0xfe # second byte range: 0xa1 -- 0xfe @@ -197,13 +199,13 @@ def get_order(self, byte_str): class Big5DistributionAnalysis(CharDistributionAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER self._table_size = BIG5_TABLE_SIZE self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: # for big5 encoding, we are interested # first byte range: 0xa4 -- 0xfe # second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe @@ -217,13 +219,13 @@ def get_order(self, byte_str): class SJISDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER self._table_size = JIS_TABLE_SIZE self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: # for sjis encoding, we are interested # first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe # second byte range: 0x40 -- 0x7e, 0x81 -- oxfe @@ -242,13 +244,13 @@ def get_order(self, byte_str): class EUCJPDistributionAnalysis(CharDistributionAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER self._table_size = JIS_TABLE_SIZE self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> int: # for euc-JP encoding, we are interested # first byte range: 0xa0 -- 0xfe # second byte range: 0xa1 -- 0xfe diff --git a/src/pip/_vendor/chardet/charsetgroupprober.py b/src/pip/_vendor/chardet/charsetgroupprober.py index 778ff332bbd..6def56b4a75 100644 --- a/src/pip/_vendor/chardet/charsetgroupprober.py +++ b/src/pip/_vendor/chardet/charsetgroupprober.py @@ -25,29 +25,30 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import List, Optional, Union + from .charsetprober import CharSetProber -from .enums import ProbingState +from .enums import LanguageFilter, ProbingState class CharSetGroupProber(CharSetProber): - def __init__(self, lang_filter=None): + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: super().__init__(lang_filter=lang_filter) self._active_num = 0 - self.probers = [] - self._best_guess_prober = None + self.probers: List[CharSetProber] = [] + self._best_guess_prober: Optional[CharSetProber] = None - def reset(self): + def reset(self) -> None: super().reset() self._active_num = 0 for prober in self.probers: - if prober: - prober.reset() - prober.active = True - self._active_num += 1 + prober.reset() + prober.active = True + self._active_num += 1 self._best_guess_prober = None @property - def charset_name(self): + def charset_name(self) -> Optional[str]: if not self._best_guess_prober: self.get_confidence() if not self._best_guess_prober: @@ -55,17 +56,15 @@ def charset_name(self): return self._best_guess_prober.charset_name @property - def language(self): + def language(self) -> Optional[str]: if not self._best_guess_prober: self.get_confidence() if not self._best_guess_prober: return None return self._best_guess_prober.language - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: for prober in self.probers: - if not prober: - continue if not prober.active: continue state = prober.feed(byte_str) @@ -83,7 +82,7 @@ def feed(self, byte_str): return self.state return self.state - def get_confidence(self): + def get_confidence(self) -> float: state = self.state if state == ProbingState.FOUND_IT: return 0.99 @@ -92,8 +91,6 @@ def get_confidence(self): best_conf = 0.0 self._best_guess_prober = None for prober in self.probers: - if not prober: - continue if not prober.active: self.logger.debug("%s not active", prober.charset_name) continue diff --git a/src/pip/_vendor/chardet/charsetprober.py b/src/pip/_vendor/chardet/charsetprober.py index 9f1afd999c1..a103ca11356 100644 --- a/src/pip/_vendor/chardet/charsetprober.py +++ b/src/pip/_vendor/chardet/charsetprober.py @@ -28,8 +28,9 @@ import logging import re +from typing import Optional, Union -from .enums import ProbingState +from .enums import LanguageFilter, ProbingState INTERNATIONAL_WORDS_PATTERN = re.compile( b"[a-zA-Z]*[\x80-\xFF]+[a-zA-Z]*[^a-zA-Z\x80-\xFF]?" @@ -40,35 +41,40 @@ class CharSetProber: SHORTCUT_THRESHOLD = 0.95 - def __init__(self, lang_filter=None): - self._state = None + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: + self._state = ProbingState.DETECTING + self.active = True self.lang_filter = lang_filter self.logger = logging.getLogger(__name__) - def reset(self): + def reset(self) -> None: self._state = ProbingState.DETECTING @property - def charset_name(self): + def charset_name(self) -> Optional[str]: return None - def feed(self, byte_str): + @property + def language(self) -> Optional[str]: + raise NotImplementedError + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: raise NotImplementedError @property - def state(self): + def state(self) -> ProbingState: return self._state - def get_confidence(self): + def get_confidence(self) -> float: return 0.0 @staticmethod - def filter_high_byte_only(buf): + def filter_high_byte_only(buf: Union[bytes, bytearray]) -> bytes: buf = re.sub(b"([\x00-\x7F])+", b" ", buf) return buf @staticmethod - def filter_international_words(buf): + def filter_international_words(buf: Union[bytes, bytearray]) -> bytearray: """ We define three types of bytes: alphabet: english alphabets [a-zA-Z] @@ -102,7 +108,7 @@ def filter_international_words(buf): return filtered @staticmethod - def remove_xml_tags(buf): + def remove_xml_tags(buf: Union[bytes, bytearray]) -> bytes: """ Returns a copy of ``buf`` that retains only the sequences of English alphabet and high byte characters that are not between <> characters. @@ -117,10 +123,13 @@ def remove_xml_tags(buf): for curr, buf_char in enumerate(buf): # Check if we're coming out of or entering an XML tag - if buf_char == b">": + + # https://github.com/python/typeshed/issues/8182 + if buf_char == b">": # type: ignore[comparison-overlap] prev = curr + 1 in_tag = False - elif buf_char == b"<": + # https://github.com/python/typeshed/issues/8182 + elif buf_char == b"<": # type: ignore[comparison-overlap] if curr > prev and not in_tag: # Keep everything after last non-extended-ASCII, # non-alphabetic character diff --git a/src/pip/_vendor/chardet/cli/chardetect.py b/src/pip/_vendor/chardet/cli/chardetect.py index 7926fa37e38..43f6e144f67 100644 --- a/src/pip/_vendor/chardet/cli/chardetect.py +++ b/src/pip/_vendor/chardet/cli/chardetect.py @@ -15,12 +15,18 @@ import argparse import sys +from typing import Iterable, List, Optional from .. import __version__ from ..universaldetector import UniversalDetector -def description_of(lines, name="stdin"): +def description_of( + lines: Iterable[bytes], + name: str = "stdin", + minimal: bool = False, + should_rename_legacy: bool = False, +) -> Optional[str]: """ Return a string describing the probable encoding of a file or list of strings. @@ -29,8 +35,11 @@ def description_of(lines, name="stdin"): :type lines: Iterable of bytes :param name: Name of file or collection of lines :type name: str + :param should_rename_legacy: Should we rename legacy encodings to + their more modern equivalents? + :type should_rename_legacy: ``bool`` """ - u = UniversalDetector() + u = UniversalDetector(should_rename_legacy=should_rename_legacy) for line in lines: line = bytearray(line) u.feed(line) @@ -39,12 +48,14 @@ def description_of(lines, name="stdin"): break u.close() result = u.result + if minimal: + return result["encoding"] if result["encoding"]: return f'{name}: {result["encoding"]} with confidence {result["confidence"]}' return f"{name}: no result" -def main(argv=None): +def main(argv: Optional[List[str]] = None) -> None: """ Handles command line arguments and gets things started. @@ -54,17 +65,28 @@ def main(argv=None): """ # Get command line arguments parser = argparse.ArgumentParser( - description="Takes one or more file paths and reports their detected \ - encodings" + description=( + "Takes one or more file paths and reports their detected encodings" + ) ) parser.add_argument( "input", - help="File whose encoding we would like to determine. \ - (default: stdin)", + help="File whose encoding we would like to determine. (default: stdin)", type=argparse.FileType("rb"), nargs="*", default=[sys.stdin.buffer], ) + parser.add_argument( + "--minimal", + help="Print only the encoding to standard output", + action="store_true", + ) + parser.add_argument( + "-l", + "--legacy", + help="Rename legacy encodings to more modern ones.", + action="store_true", + ) parser.add_argument( "--version", action="version", version=f"%(prog)s {__version__}" ) @@ -79,7 +101,11 @@ def main(argv=None): "--help\n", file=sys.stderr, ) - print(description_of(f, f.name)) + print( + description_of( + f, f.name, minimal=args.minimal, should_rename_legacy=args.legacy + ) + ) if __name__ == "__main__": diff --git a/src/pip/_vendor/chardet/codingstatemachine.py b/src/pip/_vendor/chardet/codingstatemachine.py index d3e3e825d6d..8ed4a8773b8 100644 --- a/src/pip/_vendor/chardet/codingstatemachine.py +++ b/src/pip/_vendor/chardet/codingstatemachine.py @@ -27,6 +27,7 @@ import logging +from .codingstatemachinedict import CodingStateMachineDict from .enums import MachineState @@ -53,18 +54,19 @@ class CodingStateMachine: encoding from consideration from here on. """ - def __init__(self, sm): + def __init__(self, sm: CodingStateMachineDict) -> None: self._model = sm self._curr_byte_pos = 0 self._curr_char_len = 0 - self._curr_state = None + self._curr_state = MachineState.START + self.active = True self.logger = logging.getLogger(__name__) self.reset() - def reset(self): + def reset(self) -> None: self._curr_state = MachineState.START - def next_state(self, c): + def next_state(self, c: int) -> int: # for each byte we get its class # if it is first byte, we also get byte length byte_class = self._model["class_table"][c] @@ -77,12 +79,12 @@ def next_state(self, c): self._curr_byte_pos += 1 return self._curr_state - def get_current_charlen(self): + def get_current_charlen(self) -> int: return self._curr_char_len - def get_coding_state_machine(self): + def get_coding_state_machine(self) -> str: return self._model["name"] @property - def language(self): + def language(self) -> str: return self._model["language"] diff --git a/src/pip/_vendor/chardet/codingstatemachinedict.py b/src/pip/_vendor/chardet/codingstatemachinedict.py new file mode 100644 index 00000000000..7a3c4c7e3fe --- /dev/null +++ b/src/pip/_vendor/chardet/codingstatemachinedict.py @@ -0,0 +1,19 @@ +from typing import TYPE_CHECKING, Tuple + +if TYPE_CHECKING: + # TypedDict was introduced in Python 3.8. + # + # TODO: Remove the else block and TYPE_CHECKING check when dropping support + # for Python 3.7. + from typing import TypedDict + + class CodingStateMachineDict(TypedDict, total=False): + class_table: Tuple[int, ...] + class_factor: int + state_table: Tuple[int, ...] + char_len_table: Tuple[int, ...] + name: str + language: str # Optional key + +else: + CodingStateMachineDict = dict diff --git a/src/pip/_vendor/chardet/cp949prober.py b/src/pip/_vendor/chardet/cp949prober.py index 28a1f3dbb57..fa7307ed898 100644 --- a/src/pip/_vendor/chardet/cp949prober.py +++ b/src/pip/_vendor/chardet/cp949prober.py @@ -32,7 +32,7 @@ class CP949Prober(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(CP949_SM_MODEL) # NOTE: CP949 is a superset of EUC-KR, so the distribution should be @@ -41,9 +41,9 @@ def __init__(self): self.reset() @property - def charset_name(self): + def charset_name(self) -> str: return "CP949" @property - def language(self): + def language(self) -> str: return "Korean" diff --git a/src/pip/_vendor/chardet/enums.py b/src/pip/_vendor/chardet/enums.py index 32a77e76c61..5e3e1982336 100644 --- a/src/pip/_vendor/chardet/enums.py +++ b/src/pip/_vendor/chardet/enums.py @@ -4,6 +4,8 @@ :author: Dan Blanchard (dan.blanchard@gmail.com) """ +from enum import Enum, Flag + class InputState: """ @@ -15,12 +17,13 @@ class InputState: HIGH_BYTE = 2 -class LanguageFilter: +class LanguageFilter(Flag): """ This enum represents the different language filters we can apply to a ``UniversalDetector``. """ + NONE = 0x00 CHINESE_SIMPLIFIED = 0x01 CHINESE_TRADITIONAL = 0x02 JAPANESE = 0x04 @@ -31,7 +34,7 @@ class LanguageFilter: CJK = CHINESE | JAPANESE | KOREAN -class ProbingState: +class ProbingState(Enum): """ This enum represents the different states a prober can be in. """ @@ -62,7 +65,7 @@ class SequenceLikelihood: POSITIVE = 3 @classmethod - def get_num_categories(cls): + def get_num_categories(cls) -> int: """:returns: The number of likelihood categories in the enum.""" return 4 diff --git a/src/pip/_vendor/chardet/escprober.py b/src/pip/_vendor/chardet/escprober.py index d9926115dad..fd713830d36 100644 --- a/src/pip/_vendor/chardet/escprober.py +++ b/src/pip/_vendor/chardet/escprober.py @@ -25,6 +25,8 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import Optional, Union + from .charsetprober import CharSetProber from .codingstatemachine import CodingStateMachine from .enums import LanguageFilter, MachineState, ProbingState @@ -43,7 +45,7 @@ class EscCharSetProber(CharSetProber): identify these encodings. """ - def __init__(self, lang_filter=None): + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: super().__init__(lang_filter=lang_filter) self.coding_sm = [] if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED: @@ -53,17 +55,15 @@ def __init__(self, lang_filter=None): self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL)) if self.lang_filter & LanguageFilter.KOREAN: self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL)) - self.active_sm_count = None - self._detected_charset = None - self._detected_language = None - self._state = None + self.active_sm_count = 0 + self._detected_charset: Optional[str] = None + self._detected_language: Optional[str] = None + self._state = ProbingState.DETECTING self.reset() - def reset(self): + def reset(self) -> None: super().reset() for coding_sm in self.coding_sm: - if not coding_sm: - continue coding_sm.active = True coding_sm.reset() self.active_sm_count = len(self.coding_sm) @@ -71,20 +71,20 @@ def reset(self): self._detected_language = None @property - def charset_name(self): + def charset_name(self) -> Optional[str]: return self._detected_charset @property - def language(self): + def language(self) -> Optional[str]: return self._detected_language - def get_confidence(self): + def get_confidence(self) -> float: return 0.99 if self._detected_charset else 0.00 - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: for c in byte_str: for coding_sm in self.coding_sm: - if not coding_sm or not coding_sm.active: + if not coding_sm.active: continue coding_state = coding_sm.next_state(c) if coding_state == MachineState.ERROR: diff --git a/src/pip/_vendor/chardet/escsm.py b/src/pip/_vendor/chardet/escsm.py index 3aa0f4d962d..11d4adf771f 100644 --- a/src/pip/_vendor/chardet/escsm.py +++ b/src/pip/_vendor/chardet/escsm.py @@ -25,6 +25,7 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from .codingstatemachinedict import CodingStateMachineDict from .enums import MachineState # fmt: off @@ -75,7 +76,7 @@ HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) -HZ_SM_MODEL = { +HZ_SM_MODEL: CodingStateMachineDict = { "class_table": HZ_CLS, "class_factor": 6, "state_table": HZ_ST, @@ -134,7 +135,7 @@ ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0) -ISO2022CN_SM_MODEL = { +ISO2022CN_SM_MODEL: CodingStateMachineDict = { "class_table": ISO2022CN_CLS, "class_factor": 9, "state_table": ISO2022CN_ST, @@ -194,7 +195,7 @@ ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0) -ISO2022JP_SM_MODEL = { +ISO2022JP_SM_MODEL: CodingStateMachineDict = { "class_table": ISO2022JP_CLS, "class_factor": 10, "state_table": ISO2022JP_ST, @@ -250,7 +251,7 @@ ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0) -ISO2022KR_SM_MODEL = { +ISO2022KR_SM_MODEL: CodingStateMachineDict = { "class_table": ISO2022KR_CLS, "class_factor": 6, "state_table": ISO2022KR_ST, diff --git a/src/pip/_vendor/chardet/eucjpprober.py b/src/pip/_vendor/chardet/eucjpprober.py index abf2e66e283..39487f4098d 100644 --- a/src/pip/_vendor/chardet/eucjpprober.py +++ b/src/pip/_vendor/chardet/eucjpprober.py @@ -25,6 +25,8 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import Union + from .chardistribution import EUCJPDistributionAnalysis from .codingstatemachine import CodingStateMachine from .enums import MachineState, ProbingState @@ -34,26 +36,29 @@ class EUCJPProber(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL) self.distribution_analyzer = EUCJPDistributionAnalysis() self.context_analyzer = EUCJPContextAnalysis() self.reset() - def reset(self): + def reset(self) -> None: super().reset() self.context_analyzer.reset() @property - def charset_name(self): + def charset_name(self) -> str: return "EUC-JP" @property - def language(self): + def language(self) -> str: return "Japanese" - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + assert self.coding_sm is not None + assert self.distribution_analyzer is not None + for i, byte in enumerate(byte_str): # PY3K: byte_str is a byte array, so byte is an int, not a byte coding_state = self.coding_sm.next_state(byte) @@ -89,7 +94,9 @@ def feed(self, byte_str): return self.state - def get_confidence(self): + def get_confidence(self) -> float: + assert self.distribution_analyzer is not None + context_conf = self.context_analyzer.get_confidence() distrib_conf = self.distribution_analyzer.get_confidence() return max(context_conf, distrib_conf) diff --git a/src/pip/_vendor/chardet/euckrprober.py b/src/pip/_vendor/chardet/euckrprober.py index 154a6d2162b..1fc5de0462c 100644 --- a/src/pip/_vendor/chardet/euckrprober.py +++ b/src/pip/_vendor/chardet/euckrprober.py @@ -32,16 +32,16 @@ class EUCKRProber(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) self.distribution_analyzer = EUCKRDistributionAnalysis() self.reset() @property - def charset_name(self): + def charset_name(self) -> str: return "EUC-KR" @property - def language(self): + def language(self) -> str: return "Korean" diff --git a/src/pip/_vendor/chardet/euctwprober.py b/src/pip/_vendor/chardet/euctwprober.py index ca10a23ca43..a37ab189958 100644 --- a/src/pip/_vendor/chardet/euctwprober.py +++ b/src/pip/_vendor/chardet/euctwprober.py @@ -32,16 +32,16 @@ class EUCTWProber(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) self.distribution_analyzer = EUCTWDistributionAnalysis() self.reset() @property - def charset_name(self): + def charset_name(self) -> str: return "EUC-TW" @property - def language(self): + def language(self) -> str: return "Taiwan" diff --git a/src/pip/_vendor/chardet/gb2312prober.py b/src/pip/_vendor/chardet/gb2312prober.py index 251c042955e..d423e7311e2 100644 --- a/src/pip/_vendor/chardet/gb2312prober.py +++ b/src/pip/_vendor/chardet/gb2312prober.py @@ -32,16 +32,16 @@ class GB2312Prober(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) self.distribution_analyzer = GB2312DistributionAnalysis() self.reset() @property - def charset_name(self): + def charset_name(self) -> str: return "GB2312" @property - def language(self): + def language(self) -> str: return "Chinese" diff --git a/src/pip/_vendor/chardet/hebrewprober.py b/src/pip/_vendor/chardet/hebrewprober.py index 3ca634bf373..785d0057bcc 100644 --- a/src/pip/_vendor/chardet/hebrewprober.py +++ b/src/pip/_vendor/chardet/hebrewprober.py @@ -25,8 +25,11 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import Optional, Union + from .charsetprober import CharSetProber from .enums import ProbingState +from .sbcharsetprober import SingleByteCharSetProber # This prober doesn't actually recognize a language or a charset. # It is a helper prober for the use of the Hebrew model probers @@ -127,6 +130,7 @@ class HebrewProber(CharSetProber): + SPACE = 0x20 # windows-1255 / ISO-8859-8 code points of interest FINAL_KAF = 0xEA NORMAL_KAF = 0xEB @@ -152,31 +156,35 @@ class HebrewProber(CharSetProber): VISUAL_HEBREW_NAME = "ISO-8859-8" LOGICAL_HEBREW_NAME = "windows-1255" - def __init__(self): + def __init__(self) -> None: super().__init__() - self._final_char_logical_score = None - self._final_char_visual_score = None - self._prev = None - self._before_prev = None - self._logical_prober = None - self._visual_prober = None + self._final_char_logical_score = 0 + self._final_char_visual_score = 0 + self._prev = self.SPACE + self._before_prev = self.SPACE + self._logical_prober: Optional[SingleByteCharSetProber] = None + self._visual_prober: Optional[SingleByteCharSetProber] = None self.reset() - def reset(self): + def reset(self) -> None: self._final_char_logical_score = 0 self._final_char_visual_score = 0 # The two last characters seen in the previous buffer, # mPrev and mBeforePrev are initialized to space in order to simulate # a word delimiter at the beginning of the data - self._prev = " " - self._before_prev = " " + self._prev = self.SPACE + self._before_prev = self.SPACE # These probers are owned by the group prober. - def set_model_probers(self, logical_prober, visual_prober): + def set_model_probers( + self, + logical_prober: SingleByteCharSetProber, + visual_prober: SingleByteCharSetProber, + ) -> None: self._logical_prober = logical_prober self._visual_prober = visual_prober - def is_final(self, c): + def is_final(self, c: int) -> bool: return c in [ self.FINAL_KAF, self.FINAL_MEM, @@ -185,7 +193,7 @@ def is_final(self, c): self.FINAL_TSADI, ] - def is_non_final(self, c): + def is_non_final(self, c: int) -> bool: # The normal Tsadi is not a good Non-Final letter due to words like # 'lechotet' (to chat) containing an apostrophe after the tsadi. This # apostrophe is converted to a space in FilterWithoutEnglishLetters @@ -198,7 +206,7 @@ def is_non_final(self, c): # since these words are quite rare. return c in [self.NORMAL_KAF, self.NORMAL_MEM, self.NORMAL_NUN, self.NORMAL_PE] - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: # Final letter analysis for logical-visual decision. # Look for evidence that the received buffer is either logical Hebrew # or visual Hebrew. @@ -232,9 +240,9 @@ def feed(self, byte_str): byte_str = self.filter_high_byte_only(byte_str) for cur in byte_str: - if cur == " ": + if cur == self.SPACE: # We stand on a space - a word just ended - if self._before_prev != " ": + if self._before_prev != self.SPACE: # next-to-last char was not a space so self._prev is not a # 1 letter word if self.is_final(self._prev): @@ -247,9 +255,9 @@ def feed(self, byte_str): else: # Not standing on a space if ( - (self._before_prev == " ") + (self._before_prev == self.SPACE) and (self.is_final(self._prev)) - and (cur != " ") + and (cur != self.SPACE) ): # case (3) [-2:space][-1:final letter][cur:not space] self._final_char_visual_score += 1 @@ -261,7 +269,10 @@ def feed(self, byte_str): return ProbingState.DETECTING @property - def charset_name(self): + def charset_name(self) -> str: + assert self._logical_prober is not None + assert self._visual_prober is not None + # Make the decision: is it Logical or Visual? # If the final letter score distance is dominant enough, rely on it. finalsub = self._final_char_logical_score - self._final_char_visual_score @@ -289,11 +300,14 @@ def charset_name(self): return self.LOGICAL_HEBREW_NAME @property - def language(self): + def language(self) -> str: return "Hebrew" @property - def state(self): + def state(self) -> ProbingState: + assert self._logical_prober is not None + assert self._visual_prober is not None + # Remain active as long as any of the model probers are active. if (self._logical_prober.state == ProbingState.NOT_ME) and ( self._visual_prober.state == ProbingState.NOT_ME diff --git a/src/pip/_vendor/chardet/johabprober.py b/src/pip/_vendor/chardet/johabprober.py index 6f359d193f7..d7364ba61ec 100644 --- a/src/pip/_vendor/chardet/johabprober.py +++ b/src/pip/_vendor/chardet/johabprober.py @@ -32,16 +32,16 @@ class JOHABProber(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(JOHAB_SM_MODEL) self.distribution_analyzer = JOHABDistributionAnalysis() self.reset() @property - def charset_name(self): + def charset_name(self) -> str: return "Johab" @property - def language(self): + def language(self) -> str: return "Korean" diff --git a/src/pip/_vendor/chardet/jpcntx.py b/src/pip/_vendor/chardet/jpcntx.py index 7a8e5be0623..2f53bdda09e 100644 --- a/src/pip/_vendor/chardet/jpcntx.py +++ b/src/pip/_vendor/chardet/jpcntx.py @@ -25,6 +25,7 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import List, Tuple, Union # This is hiragana 2-char sequence table, the number in each cell represents its frequency category # fmt: off @@ -123,15 +124,15 @@ class JapaneseContextAnalysis: MAX_REL_THRESHOLD = 1000 MINIMUM_DATA_THRESHOLD = 4 - def __init__(self): - self._total_rel = None - self._rel_sample = None - self._need_to_skip_char_num = None - self._last_char_order = None - self._done = None + def __init__(self) -> None: + self._total_rel = 0 + self._rel_sample: List[int] = [] + self._need_to_skip_char_num = 0 + self._last_char_order = -1 + self._done = False self.reset() - def reset(self): + def reset(self) -> None: self._total_rel = 0 # total sequence received # category counters, each integer counts sequence in its category self._rel_sample = [0] * self.NUM_OF_CATEGORY @@ -143,7 +144,7 @@ def reset(self): # been made self._done = False - def feed(self, byte_str, num_bytes): + def feed(self, byte_str: Union[bytes, bytearray], num_bytes: int) -> None: if self._done: return @@ -172,29 +173,29 @@ def feed(self, byte_str, num_bytes): ] += 1 self._last_char_order = order - def got_enough_data(self): + def got_enough_data(self) -> bool: return self._total_rel > self.ENOUGH_REL_THRESHOLD - def get_confidence(self): + def get_confidence(self) -> float: # This is just one way to calculate confidence. It works well for me. if self._total_rel > self.MINIMUM_DATA_THRESHOLD: return (self._total_rel - self._rel_sample[0]) / self._total_rel return self.DONT_KNOW - def get_order(self, _): + def get_order(self, _: Union[bytes, bytearray]) -> Tuple[int, int]: return -1, 1 class SJISContextAnalysis(JapaneseContextAnalysis): - def __init__(self): + def __init__(self) -> None: super().__init__() self._charset_name = "SHIFT_JIS" @property - def charset_name(self): + def charset_name(self) -> str: return self._charset_name - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]: if not byte_str: return -1, 1 # find out current char's byte length @@ -216,7 +217,7 @@ def get_order(self, byte_str): class EUCJPContextAnalysis(JapaneseContextAnalysis): - def get_order(self, byte_str): + def get_order(self, byte_str: Union[bytes, bytearray]) -> Tuple[int, int]: if not byte_str: return -1, 1 # find out current char's byte length diff --git a/src/pip/_vendor/chardet/latin1prober.py b/src/pip/_vendor/chardet/latin1prober.py index 241f14ab914..59a01d91b87 100644 --- a/src/pip/_vendor/chardet/latin1prober.py +++ b/src/pip/_vendor/chardet/latin1prober.py @@ -26,6 +26,8 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import List, Union + from .charsetprober import CharSetProber from .enums import ProbingState @@ -96,26 +98,26 @@ class Latin1Prober(CharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() - self._last_char_class = None - self._freq_counter = None + self._last_char_class = OTH + self._freq_counter: List[int] = [] self.reset() - def reset(self): + def reset(self) -> None: self._last_char_class = OTH self._freq_counter = [0] * FREQ_CAT_NUM super().reset() @property - def charset_name(self): + def charset_name(self) -> str: return "ISO-8859-1" @property - def language(self): + def language(self) -> str: return "" - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: byte_str = self.remove_xml_tags(byte_str) for c in byte_str: char_class = Latin1_CharToClass[c] @@ -128,7 +130,7 @@ def feed(self, byte_str): return self.state - def get_confidence(self): + def get_confidence(self) -> float: if self.state == ProbingState.NOT_ME: return 0.01 diff --git a/src/pip/_vendor/chardet/macromanprober.py b/src/pip/_vendor/chardet/macromanprober.py new file mode 100644 index 00000000000..1425d10ecaa --- /dev/null +++ b/src/pip/_vendor/chardet/macromanprober.py @@ -0,0 +1,162 @@ +######################## BEGIN LICENSE BLOCK ######################## +# This code was modified from latin1prober.py by Rob Speer . +# The Original Code is Mozilla Universal charset detector code. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 2001 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# Rob Speer - adapt to MacRoman encoding +# Mark Pilgrim - port to Python +# Shy Shalom - original C code +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA +# 02110-1301 USA +######################### END LICENSE BLOCK ######################### + +from typing import List, Union + +from .charsetprober import CharSetProber +from .enums import ProbingState + +FREQ_CAT_NUM = 4 + +UDF = 0 # undefined +OTH = 1 # other +ASC = 2 # ascii capital letter +ASS = 3 # ascii small letter +ACV = 4 # accent capital vowel +ACO = 5 # accent capital other +ASV = 6 # accent small vowel +ASO = 7 # accent small other +ODD = 8 # character that is unlikely to appear +CLASS_NUM = 9 # total classes + +# The change from Latin1 is that we explicitly look for extended characters +# that are infrequently-occurring symbols, and consider them to always be +# improbable. This should let MacRoman get out of the way of more likely +# encodings in most situations. + +# fmt: off +MacRoman_CharToClass = ( + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F + OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F + ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 + ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F + OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F + ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 + ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F + ACV, ACV, ACO, ACV, ACO, ACV, ACV, ASV, # 80 - 87 + ASV, ASV, ASV, ASV, ASV, ASO, ASV, ASV, # 88 - 8F + ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASV, # 90 - 97 + ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # 98 - 9F + OTH, OTH, OTH, OTH, OTH, OTH, OTH, ASO, # A0 - A7 + OTH, OTH, ODD, ODD, OTH, OTH, ACV, ACV, # A8 - AF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 + OTH, OTH, OTH, OTH, OTH, OTH, ASV, ASV, # B8 - BF + OTH, OTH, ODD, OTH, ODD, OTH, OTH, OTH, # C0 - C7 + OTH, OTH, OTH, ACV, ACV, ACV, ACV, ASV, # C8 - CF + OTH, OTH, OTH, OTH, OTH, OTH, OTH, ODD, # D0 - D7 + ASV, ACV, ODD, OTH, OTH, OTH, OTH, OTH, # D8 - DF + OTH, OTH, OTH, OTH, OTH, ACV, ACV, ACV, # E0 - E7 + ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # E8 - EF + ODD, ACV, ACV, ACV, ACV, ASV, ODD, ODD, # F0 - F7 + ODD, ODD, ODD, ODD, ODD, ODD, ODD, ODD, # F8 - FF +) + +# 0 : illegal +# 1 : very unlikely +# 2 : normal +# 3 : very likely +MacRomanClassModel = ( +# UDF OTH ASC ASS ACV ACO ASV ASO ODD + 0, 0, 0, 0, 0, 0, 0, 0, 0, # UDF + 0, 3, 3, 3, 3, 3, 3, 3, 1, # OTH + 0, 3, 3, 3, 3, 3, 3, 3, 1, # ASC + 0, 3, 3, 3, 1, 1, 3, 3, 1, # ASS + 0, 3, 3, 3, 1, 2, 1, 2, 1, # ACV + 0, 3, 3, 3, 3, 3, 3, 3, 1, # ACO + 0, 3, 1, 3, 1, 1, 1, 3, 1, # ASV + 0, 3, 1, 3, 1, 1, 3, 3, 1, # ASO + 0, 1, 1, 1, 1, 1, 1, 1, 1, # ODD +) +# fmt: on + + +class MacRomanProber(CharSetProber): + def __init__(self) -> None: + super().__init__() + self._last_char_class = OTH + self._freq_counter: List[int] = [] + self.reset() + + def reset(self) -> None: + self._last_char_class = OTH + self._freq_counter = [0] * FREQ_CAT_NUM + + # express the prior that MacRoman is a somewhat rare encoding; + # this can be done by starting out in a slightly improbable state + # that must be overcome + self._freq_counter[2] = 10 + + super().reset() + + @property + def charset_name(self) -> str: + return "MacRoman" + + @property + def language(self) -> str: + return "" + + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + byte_str = self.remove_xml_tags(byte_str) + for c in byte_str: + char_class = MacRoman_CharToClass[c] + freq = MacRomanClassModel[(self._last_char_class * CLASS_NUM) + char_class] + if freq == 0: + self._state = ProbingState.NOT_ME + break + self._freq_counter[freq] += 1 + self._last_char_class = char_class + + return self.state + + def get_confidence(self) -> float: + if self.state == ProbingState.NOT_ME: + return 0.01 + + total = sum(self._freq_counter) + confidence = ( + 0.0 + if total < 0.01 + else (self._freq_counter[3] - self._freq_counter[1] * 20.0) / total + ) + confidence = max(confidence, 0.0) + # lower the confidence of MacRoman so that other more accurate + # detector can take priority. + confidence *= 0.73 + return confidence diff --git a/src/pip/_vendor/chardet/mbcharsetprober.py b/src/pip/_vendor/chardet/mbcharsetprober.py index bf96ad5d490..666307e8fe0 100644 --- a/src/pip/_vendor/chardet/mbcharsetprober.py +++ b/src/pip/_vendor/chardet/mbcharsetprober.py @@ -27,8 +27,12 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import Optional, Union + +from .chardistribution import CharDistributionAnalysis from .charsetprober import CharSetProber -from .enums import MachineState, ProbingState +from .codingstatemachine import CodingStateMachine +from .enums import LanguageFilter, MachineState, ProbingState class MultiByteCharSetProber(CharSetProber): @@ -36,29 +40,24 @@ class MultiByteCharSetProber(CharSetProber): MultiByteCharSetProber """ - def __init__(self, lang_filter=None): + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: super().__init__(lang_filter=lang_filter) - self.distribution_analyzer = None - self.coding_sm = None - self._last_char = [0, 0] + self.distribution_analyzer: Optional[CharDistributionAnalysis] = None + self.coding_sm: Optional[CodingStateMachine] = None + self._last_char = bytearray(b"\0\0") - def reset(self): + def reset(self) -> None: super().reset() if self.coding_sm: self.coding_sm.reset() if self.distribution_analyzer: self.distribution_analyzer.reset() - self._last_char = [0, 0] - - @property - def charset_name(self): - raise NotImplementedError + self._last_char = bytearray(b"\0\0") - @property - def language(self): - raise NotImplementedError + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + assert self.coding_sm is not None + assert self.distribution_analyzer is not None - def feed(self, byte_str): for i, byte in enumerate(byte_str): coding_state = self.coding_sm.next_state(byte) if coding_state == MachineState.ERROR: @@ -91,5 +90,6 @@ def feed(self, byte_str): return self.state - def get_confidence(self): + def get_confidence(self) -> float: + assert self.distribution_analyzer is not None return self.distribution_analyzer.get_confidence() diff --git a/src/pip/_vendor/chardet/mbcsgroupprober.py b/src/pip/_vendor/chardet/mbcsgroupprober.py index 94488360c4b..6cb9cc7b3bc 100644 --- a/src/pip/_vendor/chardet/mbcsgroupprober.py +++ b/src/pip/_vendor/chardet/mbcsgroupprober.py @@ -30,6 +30,7 @@ from .big5prober import Big5Prober from .charsetgroupprober import CharSetGroupProber from .cp949prober import CP949Prober +from .enums import LanguageFilter from .eucjpprober import EUCJPProber from .euckrprober import EUCKRProber from .euctwprober import EUCTWProber @@ -40,7 +41,7 @@ class MBCSGroupProber(CharSetGroupProber): - def __init__(self, lang_filter=None): + def __init__(self, lang_filter: LanguageFilter = LanguageFilter.NONE) -> None: super().__init__(lang_filter=lang_filter) self.probers = [ UTF8Prober(), diff --git a/src/pip/_vendor/chardet/mbcssm.py b/src/pip/_vendor/chardet/mbcssm.py index d3b9c4b75a2..7bbe97e6665 100644 --- a/src/pip/_vendor/chardet/mbcssm.py +++ b/src/pip/_vendor/chardet/mbcssm.py @@ -25,6 +25,7 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from .codingstatemachinedict import CodingStateMachineDict from .enums import MachineState # BIG5 @@ -74,7 +75,7 @@ BIG5_CHAR_LEN_TABLE = (0, 1, 1, 2, 0) -BIG5_SM_MODEL = { +BIG5_SM_MODEL: CodingStateMachineDict = { "class_table": BIG5_CLS, "class_factor": 5, "state_table": BIG5_ST, @@ -117,7 +118,7 @@ CP949_CHAR_LEN_TABLE = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2) -CP949_SM_MODEL = { +CP949_SM_MODEL: CodingStateMachineDict = { "class_table": CP949_CLS, "class_factor": 10, "state_table": CP949_ST, @@ -173,7 +174,7 @@ EUCJP_CHAR_LEN_TABLE = (2, 2, 2, 3, 1, 0) -EUCJP_SM_MODEL = { +EUCJP_SM_MODEL: CodingStateMachineDict = { "class_table": EUCJP_CLS, "class_factor": 6, "state_table": EUCJP_ST, @@ -226,7 +227,7 @@ EUCKR_CHAR_LEN_TABLE = (0, 1, 2, 0) -EUCKR_SM_MODEL = { +EUCKR_SM_MODEL: CodingStateMachineDict = { "class_table": EUCKR_CLS, "class_factor": 4, "state_table": EUCKR_ST, @@ -283,7 +284,7 @@ JOHAB_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 0, 0, 2, 2, 2) -JOHAB_SM_MODEL = { +JOHAB_SM_MODEL: CodingStateMachineDict = { "class_table": JOHAB_CLS, "class_factor": 10, "state_table": JOHAB_ST, @@ -340,7 +341,7 @@ EUCTW_CHAR_LEN_TABLE = (0, 0, 1, 2, 2, 2, 3) -EUCTW_SM_MODEL = { +EUCTW_SM_MODEL: CodingStateMachineDict = { "class_table": EUCTW_CLS, "class_factor": 7, "state_table": EUCTW_ST, @@ -402,7 +403,7 @@ # 2 here. GB2312_CHAR_LEN_TABLE = (0, 1, 1, 1, 1, 1, 2) -GB2312_SM_MODEL = { +GB2312_SM_MODEL: CodingStateMachineDict = { "class_table": GB2312_CLS, "class_factor": 7, "state_table": GB2312_ST, @@ -458,7 +459,7 @@ SJIS_CHAR_LEN_TABLE = (0, 1, 1, 2, 0, 0) -SJIS_SM_MODEL = { +SJIS_SM_MODEL: CodingStateMachineDict = { "class_table": SJIS_CLS, "class_factor": 6, "state_table": SJIS_ST, @@ -516,7 +517,7 @@ UCS2BE_CHAR_LEN_TABLE = (2, 2, 2, 0, 2, 2) -UCS2BE_SM_MODEL = { +UCS2BE_SM_MODEL: CodingStateMachineDict = { "class_table": UCS2BE_CLS, "class_factor": 6, "state_table": UCS2BE_ST, @@ -574,7 +575,7 @@ UCS2LE_CHAR_LEN_TABLE = (2, 2, 2, 2, 2, 2) -UCS2LE_SM_MODEL = { +UCS2LE_SM_MODEL: CodingStateMachineDict = { "class_table": UCS2LE_CLS, "class_factor": 6, "state_table": UCS2LE_ST, @@ -651,7 +652,7 @@ UTF8_CHAR_LEN_TABLE = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6) -UTF8_SM_MODEL = { +UTF8_SM_MODEL: CodingStateMachineDict = { "class_table": UTF8_CLS, "class_factor": 16, "state_table": UTF8_ST, diff --git a/src/pip/_vendor/chardet/metadata/languages.py b/src/pip/_vendor/chardet/metadata/languages.py index 1d37884c31e..eb40c5f0c85 100644 --- a/src/pip/_vendor/chardet/metadata/languages.py +++ b/src/pip/_vendor/chardet/metadata/languages.py @@ -6,6 +6,7 @@ """ from string import ascii_letters +from typing import List, Optional # TODO: Add Ukrainian (KOI8-U) @@ -33,13 +34,13 @@ class Language: def __init__( self, - name=None, - iso_code=None, - use_ascii=True, - charsets=None, - alphabet=None, - wiki_start_pages=None, - ): + name: Optional[str] = None, + iso_code: Optional[str] = None, + use_ascii: bool = True, + charsets: Optional[List[str]] = None, + alphabet: Optional[str] = None, + wiki_start_pages: Optional[List[str]] = None, + ) -> None: super().__init__() self.name = name self.iso_code = iso_code @@ -55,7 +56,7 @@ def __init__( self.alphabet = "".join(sorted(set(alphabet))) if alphabet else None self.wiki_start_pages = wiki_start_pages - def __repr__(self): + def __repr__(self) -> str: param_str = ", ".join( f"{k}={v!r}" for k, v in self.__dict__.items() if not k.startswith("_") ) @@ -103,7 +104,7 @@ def __repr__(self): name="Danish", iso_code="da", use_ascii=True, - charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252"], + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], alphabet="æøåÆØÅ", wiki_start_pages=["Forside"], ), @@ -111,8 +112,8 @@ def __repr__(self): name="German", iso_code="de", use_ascii=True, - charsets=["ISO-8859-1", "WINDOWS-1252"], - alphabet="äöüßÄÖÜ", + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], + alphabet="äöüßẞÄÖÜ", wiki_start_pages=["Wikipedia:Hauptseite"], ), "Greek": Language( @@ -127,7 +128,7 @@ def __repr__(self): name="English", iso_code="en", use_ascii=True, - charsets=["ISO-8859-1", "WINDOWS-1252"], + charsets=["ISO-8859-1", "WINDOWS-1252", "MacRoman"], wiki_start_pages=["Main_Page"], ), "Esperanto": Language( @@ -143,7 +144,7 @@ def __repr__(self): name="Spanish", iso_code="es", use_ascii=True, - charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252"], + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], alphabet="ñáéíóúüÑÁÉÍÓÚÜ", wiki_start_pages=["Wikipedia:Portada"], ), @@ -161,7 +162,7 @@ def __repr__(self): name="Finnish", iso_code="fi", use_ascii=True, - charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252"], + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], alphabet="ÅÄÖŠŽåäöšž", wiki_start_pages=["Wikipedia:Etusivu"], ), @@ -169,7 +170,7 @@ def __repr__(self): name="French", iso_code="fr", use_ascii=True, - charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252"], + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], alphabet="œàâçèéîïùûêŒÀÂÇÈÉÎÏÙÛÊ", wiki_start_pages=["Wikipédia:Accueil_principal", "Bœuf (animal)"], ), @@ -203,7 +204,7 @@ def __repr__(self): name="Italian", iso_code="it", use_ascii=True, - charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252"], + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], alphabet="ÀÈÉÌÒÓÙàèéìòóù", wiki_start_pages=["Pagina_principale"], ), @@ -237,7 +238,7 @@ def __repr__(self): name="Dutch", iso_code="nl", use_ascii=True, - charsets=["ISO-8859-1", "WINDOWS-1252"], + charsets=["ISO-8859-1", "WINDOWS-1252", "MacRoman"], wiki_start_pages=["Hoofdpagina"], ), "Polish": Language( @@ -253,7 +254,7 @@ def __repr__(self): name="Portuguese", iso_code="pt", use_ascii=True, - charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252"], + charsets=["ISO-8859-1", "ISO-8859-15", "WINDOWS-1252", "MacRoman"], alphabet="ÁÂÃÀÇÉÊÍÓÔÕÚáâãàçéêíóôõú", wiki_start_pages=["Wikipédia:Página_principal"], ), diff --git a/src/pip/_vendor/chardet/py.typed b/src/pip/_vendor/chardet/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_vendor/chardet/resultdict.py b/src/pip/_vendor/chardet/resultdict.py new file mode 100644 index 00000000000..7d36e64c467 --- /dev/null +++ b/src/pip/_vendor/chardet/resultdict.py @@ -0,0 +1,16 @@ +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + # TypedDict was introduced in Python 3.8. + # + # TODO: Remove the else block and TYPE_CHECKING check when dropping support + # for Python 3.7. + from typing import TypedDict + + class ResultDict(TypedDict): + encoding: Optional[str] + confidence: float + language: Optional[str] + +else: + ResultDict = dict diff --git a/src/pip/_vendor/chardet/sbcharsetprober.py b/src/pip/_vendor/chardet/sbcharsetprober.py index 31d70e154a9..0ffbcdd2c3e 100644 --- a/src/pip/_vendor/chardet/sbcharsetprober.py +++ b/src/pip/_vendor/chardet/sbcharsetprober.py @@ -26,23 +26,20 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### -from collections import namedtuple +from typing import Dict, List, NamedTuple, Optional, Union from .charsetprober import CharSetProber from .enums import CharacterCategory, ProbingState, SequenceLikelihood -SingleByteCharSetModel = namedtuple( - "SingleByteCharSetModel", - [ - "charset_name", - "language", - "char_to_order_map", - "language_model", - "typical_positive_ratio", - "keep_ascii_letters", - "alphabet", - ], -) + +class SingleByteCharSetModel(NamedTuple): + charset_name: str + language: str + char_to_order_map: Dict[int, int] + language_model: Dict[int, Dict[int, int]] + typical_positive_ratio: float + keep_ascii_letters: bool + alphabet: str class SingleByteCharSetProber(CharSetProber): @@ -51,22 +48,27 @@ class SingleByteCharSetProber(CharSetProber): POSITIVE_SHORTCUT_THRESHOLD = 0.95 NEGATIVE_SHORTCUT_THRESHOLD = 0.05 - def __init__(self, model, is_reversed=False, name_prober=None): + def __init__( + self, + model: SingleByteCharSetModel, + is_reversed: bool = False, + name_prober: Optional[CharSetProber] = None, + ) -> None: super().__init__() self._model = model # TRUE if we need to reverse every pair in the model lookup self._reversed = is_reversed # Optional auxiliary prober for name decision self._name_prober = name_prober - self._last_order = None - self._seq_counters = None - self._total_seqs = None - self._total_char = None - self._control_char = None - self._freq_char = None + self._last_order = 255 + self._seq_counters: List[int] = [] + self._total_seqs = 0 + self._total_char = 0 + self._control_char = 0 + self._freq_char = 0 self.reset() - def reset(self): + def reset(self) -> None: super().reset() # char order of last character self._last_order = 255 @@ -78,18 +80,18 @@ def reset(self): self._freq_char = 0 @property - def charset_name(self): + def charset_name(self) -> Optional[str]: if self._name_prober: return self._name_prober.charset_name return self._model.charset_name @property - def language(self): + def language(self) -> Optional[str]: if self._name_prober: return self._name_prober.language return self._model.language - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: # TODO: Make filter_international_words keep things in self.alphabet if not self._model.keep_ascii_letters: byte_str = self.filter_international_words(byte_str) @@ -139,7 +141,7 @@ def feed(self, byte_str): return self.state - def get_confidence(self): + def get_confidence(self) -> float: r = 0.01 if self._total_seqs > 0: r = ( diff --git a/src/pip/_vendor/chardet/sbcsgroupprober.py b/src/pip/_vendor/chardet/sbcsgroupprober.py index cad001cb10e..890ae8465c5 100644 --- a/src/pip/_vendor/chardet/sbcsgroupprober.py +++ b/src/pip/_vendor/chardet/sbcsgroupprober.py @@ -48,7 +48,7 @@ class SBCSGroupProber(CharSetGroupProber): - def __init__(self): + def __init__(self) -> None: super().__init__() hebrew_prober = HebrewProber() logical_hebrew_prober = SingleByteCharSetProber( diff --git a/src/pip/_vendor/chardet/sjisprober.py b/src/pip/_vendor/chardet/sjisprober.py index 3bcbdb71d16..91df077961b 100644 --- a/src/pip/_vendor/chardet/sjisprober.py +++ b/src/pip/_vendor/chardet/sjisprober.py @@ -25,6 +25,8 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import Union + from .chardistribution import SJISDistributionAnalysis from .codingstatemachine import CodingStateMachine from .enums import MachineState, ProbingState @@ -34,26 +36,29 @@ class SJISProber(MultiByteCharSetProber): - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(SJIS_SM_MODEL) self.distribution_analyzer = SJISDistributionAnalysis() self.context_analyzer = SJISContextAnalysis() self.reset() - def reset(self): + def reset(self) -> None: super().reset() self.context_analyzer.reset() @property - def charset_name(self): + def charset_name(self) -> str: return self.context_analyzer.charset_name @property - def language(self): + def language(self) -> str: return "Japanese" - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: + assert self.coding_sm is not None + assert self.distribution_analyzer is not None + for i, byte in enumerate(byte_str): coding_state = self.coding_sm.next_state(byte) if coding_state == MachineState.ERROR: @@ -92,7 +97,9 @@ def feed(self, byte_str): return self.state - def get_confidence(self): + def get_confidence(self) -> float: + assert self.distribution_analyzer is not None + context_conf = self.context_analyzer.get_confidence() distrib_conf = self.distribution_analyzer.get_confidence() return max(context_conf, distrib_conf) diff --git a/src/pip/_vendor/chardet/universaldetector.py b/src/pip/_vendor/chardet/universaldetector.py index 22fcf8290c1..30c441dc28e 100644 --- a/src/pip/_vendor/chardet/universaldetector.py +++ b/src/pip/_vendor/chardet/universaldetector.py @@ -39,12 +39,16 @@ class a user of ``chardet`` should use. import codecs import logging import re +from typing import List, Optional, Union from .charsetgroupprober import CharSetGroupProber +from .charsetprober import CharSetProber from .enums import InputState, LanguageFilter, ProbingState from .escprober import EscCharSetProber from .latin1prober import Latin1Prober +from .macromanprober import MacRomanProber from .mbcsgroupprober import MBCSGroupProber +from .resultdict import ResultDict from .sbcsgroupprober import SBCSGroupProber from .utf1632prober import UTF1632Prober @@ -80,34 +84,55 @@ class UniversalDetector: "iso-8859-9": "Windows-1254", "iso-8859-13": "Windows-1257", } + # Based on https://encoding.spec.whatwg.org/#names-and-labels + # but altered to match Python names for encodings and remove mappings + # that break tests. + LEGACY_MAP = { + "ascii": "Windows-1252", + "iso-8859-1": "Windows-1252", + "tis-620": "ISO-8859-11", + "iso-8859-9": "Windows-1254", + "gb2312": "GB18030", + "euc-kr": "CP949", + "utf-16le": "UTF-16", + } - def __init__(self, lang_filter=LanguageFilter.ALL): - self._esc_charset_prober = None - self._utf1632_prober = None - self._charset_probers = [] - self.result = None - self.done = None - self._got_data = None - self._input_state = None - self._last_char = None + def __init__( + self, + lang_filter: LanguageFilter = LanguageFilter.ALL, + should_rename_legacy: bool = False, + ) -> None: + self._esc_charset_prober: Optional[EscCharSetProber] = None + self._utf1632_prober: Optional[UTF1632Prober] = None + self._charset_probers: List[CharSetProber] = [] + self.result: ResultDict = { + "encoding": None, + "confidence": 0.0, + "language": None, + } + self.done = False + self._got_data = False + self._input_state = InputState.PURE_ASCII + self._last_char = b"" self.lang_filter = lang_filter self.logger = logging.getLogger(__name__) - self._has_win_bytes = None + self._has_win_bytes = False + self.should_rename_legacy = should_rename_legacy self.reset() @property - def input_state(self): + def input_state(self) -> int: return self._input_state @property - def has_win_bytes(self): + def has_win_bytes(self) -> bool: return self._has_win_bytes @property - def charset_probers(self): + def charset_probers(self) -> List[CharSetProber]: return self._charset_probers - def reset(self): + def reset(self) -> None: """ Reset the UniversalDetector and all of its probers back to their initial states. This is called by ``__init__``, so you only need to @@ -126,7 +151,7 @@ def reset(self): for prober in self._charset_probers: prober.reset() - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> None: """ Takes a chunk of a document and feeds it through all of the relevant charset probers. @@ -166,6 +191,7 @@ def feed(self, byte_str): elif byte_str.startswith(b"\xFE\xFF\x00\x00"): # FE FF 00 00 UCS-4, unusual octet order BOM (3412) self.result = { + # TODO: This encoding is not supported by Python. Should remove? "encoding": "X-ISO-10646-UCS-4-3412", "confidence": 1.0, "language": "", @@ -173,6 +199,7 @@ def feed(self, byte_str): elif byte_str.startswith(b"\x00\x00\xFF\xFE"): # 00 00 FF FE UCS-4, unusual octet order BOM (2143) self.result = { + # TODO: This encoding is not supported by Python. Should remove? "encoding": "X-ISO-10646-UCS-4-2143", "confidence": 1.0, "language": "", @@ -242,6 +269,7 @@ def feed(self, byte_str): if self.lang_filter & LanguageFilter.NON_CJK: self._charset_probers.append(SBCSGroupProber()) self._charset_probers.append(Latin1Prober()) + self._charset_probers.append(MacRomanProber()) for prober in self._charset_probers: if prober.feed(byte_str) == ProbingState.FOUND_IT: self.result = { @@ -254,7 +282,7 @@ def feed(self, byte_str): if self.WIN_BYTE_DETECTOR.search(byte_str): self._has_win_bytes = True - def close(self): + def close(self) -> ResultDict: """ Stop analyzing the current document and come up with a final prediction. @@ -288,7 +316,8 @@ def close(self): max_prober = prober if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): charset_name = max_prober.charset_name - lower_charset_name = max_prober.charset_name.lower() + assert charset_name is not None + lower_charset_name = charset_name.lower() confidence = max_prober.get_confidence() # Use Windows encoding name instead of ISO-8859 if we saw any # extra Windows-specific bytes @@ -297,6 +326,11 @@ def close(self): charset_name = self.ISO_WIN_MAP.get( lower_charset_name, charset_name ) + # Rename legacy encodings with superset encodings if asked + if self.should_rename_legacy: + charset_name = self.LEGACY_MAP.get( + (charset_name or "").lower(), charset_name + ) self.result = { "encoding": charset_name, "confidence": confidence, diff --git a/src/pip/_vendor/chardet/utf1632prober.py b/src/pip/_vendor/chardet/utf1632prober.py index 9fd1580b837..6bdec63d686 100644 --- a/src/pip/_vendor/chardet/utf1632prober.py +++ b/src/pip/_vendor/chardet/utf1632prober.py @@ -18,6 +18,8 @@ # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import List, Union + from .charsetprober import CharSetProber from .enums import ProbingState @@ -36,7 +38,7 @@ class UTF1632Prober(CharSetProber): # a fixed constant ratio of expected zeros or non-zeros in modulo-position. EXPECTED_RATIO = 0.94 - def __init__(self): + def __init__(self) -> None: super().__init__() self.position = 0 self.zeros_at_mod = [0] * 4 @@ -51,7 +53,7 @@ def __init__(self): self.first_half_surrogate_pair_detected_16le = False self.reset() - def reset(self): + def reset(self) -> None: super().reset() self.position = 0 self.zeros_at_mod = [0] * 4 @@ -66,7 +68,7 @@ def reset(self): self.quad = [0, 0, 0, 0] @property - def charset_name(self): + def charset_name(self) -> str: if self.is_likely_utf32be(): return "utf-32be" if self.is_likely_utf32le(): @@ -79,16 +81,16 @@ def charset_name(self): return "utf-16" @property - def language(self): + def language(self) -> str: return "" - def approx_32bit_chars(self): + def approx_32bit_chars(self) -> float: return max(1.0, self.position / 4.0) - def approx_16bit_chars(self): + def approx_16bit_chars(self) -> float: return max(1.0, self.position / 2.0) - def is_likely_utf32be(self): + def is_likely_utf32be(self) -> bool: approx_chars = self.approx_32bit_chars() return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( self.zeros_at_mod[0] / approx_chars > self.EXPECTED_RATIO @@ -98,7 +100,7 @@ def is_likely_utf32be(self): and not self.invalid_utf32be ) - def is_likely_utf32le(self): + def is_likely_utf32le(self) -> bool: approx_chars = self.approx_32bit_chars() return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( self.nonzeros_at_mod[0] / approx_chars > self.EXPECTED_RATIO @@ -108,7 +110,7 @@ def is_likely_utf32le(self): and not self.invalid_utf32le ) - def is_likely_utf16be(self): + def is_likely_utf16be(self) -> bool: approx_chars = self.approx_16bit_chars() return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( (self.nonzeros_at_mod[1] + self.nonzeros_at_mod[3]) / approx_chars @@ -118,7 +120,7 @@ def is_likely_utf16be(self): and not self.invalid_utf16be ) - def is_likely_utf16le(self): + def is_likely_utf16le(self) -> bool: approx_chars = self.approx_16bit_chars() return approx_chars >= self.MIN_CHARS_FOR_DETECTION and ( (self.nonzeros_at_mod[0] + self.nonzeros_at_mod[2]) / approx_chars @@ -128,7 +130,7 @@ def is_likely_utf16le(self): and not self.invalid_utf16le ) - def validate_utf32_characters(self, quad): + def validate_utf32_characters(self, quad: List[int]) -> None: """ Validate if the quad of bytes is valid UTF-32. @@ -150,7 +152,7 @@ def validate_utf32_characters(self, quad): ): self.invalid_utf32le = True - def validate_utf16_characters(self, pair): + def validate_utf16_characters(self, pair: List[int]) -> None: """ Validate if the pair of bytes is valid UTF-16. @@ -182,7 +184,7 @@ def validate_utf16_characters(self, pair): else: self.invalid_utf16le = True - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: for c in byte_str: mod4 = self.position % 4 self.quad[mod4] = c @@ -198,7 +200,7 @@ def feed(self, byte_str): return self.state @property - def state(self): + def state(self) -> ProbingState: if self._state in {ProbingState.NOT_ME, ProbingState.FOUND_IT}: # terminal, decided states return self._state @@ -210,7 +212,7 @@ def state(self): self._state = ProbingState.NOT_ME return self._state - def get_confidence(self): + def get_confidence(self) -> float: return ( 0.85 if ( diff --git a/src/pip/_vendor/chardet/utf8prober.py b/src/pip/_vendor/chardet/utf8prober.py index 3aae09e8630..d96354d97c2 100644 --- a/src/pip/_vendor/chardet/utf8prober.py +++ b/src/pip/_vendor/chardet/utf8prober.py @@ -25,6 +25,8 @@ # 02110-1301 USA ######################### END LICENSE BLOCK ######################### +from typing import Union + from .charsetprober import CharSetProber from .codingstatemachine import CodingStateMachine from .enums import MachineState, ProbingState @@ -34,26 +36,26 @@ class UTF8Prober(CharSetProber): ONE_CHAR_PROB = 0.5 - def __init__(self): + def __init__(self) -> None: super().__init__() self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) - self._num_mb_chars = None + self._num_mb_chars = 0 self.reset() - def reset(self): + def reset(self) -> None: super().reset() self.coding_sm.reset() self._num_mb_chars = 0 @property - def charset_name(self): + def charset_name(self) -> str: return "utf-8" @property - def language(self): + def language(self) -> str: return "" - def feed(self, byte_str): + def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: for c in byte_str: coding_state = self.coding_sm.next_state(c) if coding_state == MachineState.ERROR: @@ -72,7 +74,7 @@ def feed(self, byte_str): return self.state - def get_confidence(self): + def get_confidence(self) -> float: unlike = 0.99 if self._num_mb_chars < 6: unlike *= self.ONE_CHAR_PROB**self._num_mb_chars diff --git a/src/pip/_vendor/chardet/version.py b/src/pip/_vendor/chardet/version.py index a08a06b9a87..c5e9d85cd75 100644 --- a/src/pip/_vendor/chardet/version.py +++ b/src/pip/_vendor/chardet/version.py @@ -1,9 +1,9 @@ """ This module exists only to simplify retrieving the version number of chardet -from within setup.py and from chardet subpackages. +from within setuptools and from chardet subpackages. :author: Dan Blanchard (dan.blanchard@gmail.com) """ -__version__ = "5.0.0" +__version__ = "5.1.0" VERSION = __version__.split(".") diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index cd42578f4d3..31cda4a8095 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -9,7 +9,7 @@ pyparsing==3.0.9 pyproject-hooks==1.0.0 requests==2.28.2 certifi==2022.12.7 - chardet==5.0.0 + chardet==5.1.0 idna==3.4 urllib3==1.26.12 rich==12.6.0 From 17b73457a18bb990b4955b1d5d55fc054acbea38 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 28 Jan 2023 20:41:54 +0000 Subject: [PATCH 269/730] Upgrade urllib3 to 1.26.14 --- news/urllib3.vendor.rst | 1 + src/pip/_vendor/urllib3/_version.py | 2 +- src/pip/_vendor/urllib3/connectionpool.py | 2 +- src/pip/_vendor/urllib3/contrib/appengine.py | 2 +- src/pip/_vendor/urllib3/contrib/ntlmpool.py | 4 ++-- src/pip/_vendor/urllib3/contrib/pyopenssl.py | 7 +++---- src/pip/_vendor/urllib3/response.py | 13 +++++++++++++ src/pip/_vendor/urllib3/util/retry.py | 2 +- src/pip/_vendor/urllib3/util/url.py | 2 +- src/pip/_vendor/vendor.txt | 2 +- 10 files changed, 25 insertions(+), 12 deletions(-) create mode 100644 news/urllib3.vendor.rst diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst new file mode 100644 index 00000000000..c9d10554e0e --- /dev/null +++ b/news/urllib3.vendor.rst @@ -0,0 +1 @@ +Upgrade urllib3 to 1.26.14 diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py index 6fbc84b30f2..7c031661ba8 100644 --- a/src/pip/_vendor/urllib3/_version.py +++ b/src/pip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.12" +__version__ = "1.26.14" diff --git a/src/pip/_vendor/urllib3/connectionpool.py b/src/pip/_vendor/urllib3/connectionpool.py index 96339e90af1..70873927924 100644 --- a/src/pip/_vendor/urllib3/connectionpool.py +++ b/src/pip/_vendor/urllib3/connectionpool.py @@ -862,7 +862,7 @@ def _is_ssl_error_message_from_http_proxy(ssl_error): ) # Check if we should retry the HTTP response. - has_retry_after = bool(response.getheader("Retry-After")) + has_retry_after = bool(response.headers.get("Retry-After")) if retries.is_retry(method, response.status, has_retry_after): try: retries = retries.increment(method, url, response=response, _pool=self) diff --git a/src/pip/_vendor/urllib3/contrib/appengine.py b/src/pip/_vendor/urllib3/contrib/appengine.py index 668538695f9..1717ee22cdf 100644 --- a/src/pip/_vendor/urllib3/contrib/appengine.py +++ b/src/pip/_vendor/urllib3/contrib/appengine.py @@ -224,7 +224,7 @@ def urlopen( ) # Check if we should retry the HTTP response. - has_retry_after = bool(http_response.getheader("Retry-After")) + has_retry_after = bool(http_response.headers.get("Retry-After")) if retries.is_retry(method, http_response.status, has_retry_after): retries = retries.increment(method, url, response=http_response, _pool=self) log.debug("Retry: %s", url) diff --git a/src/pip/_vendor/urllib3/contrib/ntlmpool.py b/src/pip/_vendor/urllib3/contrib/ntlmpool.py index 41a8fd174cb..471665754e9 100644 --- a/src/pip/_vendor/urllib3/contrib/ntlmpool.py +++ b/src/pip/_vendor/urllib3/contrib/ntlmpool.py @@ -69,7 +69,7 @@ def _new_conn(self): log.debug("Request headers: %s", headers) conn.request("GET", self.authurl, None, headers) res = conn.getresponse() - reshdr = dict(res.getheaders()) + reshdr = dict(res.headers) log.debug("Response status: %s %s", res.status, res.reason) log.debug("Response headers: %s", reshdr) log.debug("Response data: %s [...]", res.read(100)) @@ -101,7 +101,7 @@ def _new_conn(self): conn.request("GET", self.authurl, None, headers) res = conn.getresponse() log.debug("Response status: %s %s", res.status, res.reason) - log.debug("Response headers: %s", dict(res.getheaders())) + log.debug("Response headers: %s", dict(res.headers)) log.debug("Response data: %s [...]", res.read()[:100]) if res.status != 200: if res.status == 401: diff --git a/src/pip/_vendor/urllib3/contrib/pyopenssl.py b/src/pip/_vendor/urllib3/contrib/pyopenssl.py index 528764a0334..19e4aa97cc1 100644 --- a/src/pip/_vendor/urllib3/contrib/pyopenssl.py +++ b/src/pip/_vendor/urllib3/contrib/pyopenssl.py @@ -47,10 +47,10 @@ """ from __future__ import absolute_import +import OpenSSL.crypto import OpenSSL.SSL from cryptography import x509 from cryptography.hazmat.backends.openssl import backend as openssl_backend -from cryptography.hazmat.backends.openssl.x509 import _Certificate try: from cryptography.x509 import UnsupportedExtension @@ -228,9 +228,8 @@ def get_subj_alt_name(peer_cert): if hasattr(peer_cert, "to_cryptography"): cert = peer_cert.to_cryptography() else: - # This is technically using private APIs, but should work across all - # relevant versions before PyOpenSSL got a proper API for this. - cert = _Certificate(openssl_backend, peer_cert._x509) + der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert) + cert = x509.load_der_x509_certificate(der, openssl_backend) # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) diff --git a/src/pip/_vendor/urllib3/response.py b/src/pip/_vendor/urllib3/response.py index 4969b70e3ef..8909f8454e9 100644 --- a/src/pip/_vendor/urllib3/response.py +++ b/src/pip/_vendor/urllib3/response.py @@ -3,6 +3,7 @@ import io import logging import sys +import warnings import zlib from contextlib import contextmanager from socket import error as SocketError @@ -657,9 +658,21 @@ def from_httplib(ResponseCls, r, **response_kw): # Backwards-compatibility methods for http.client.HTTPResponse def getheaders(self): + warnings.warn( + "HTTPResponse.getheaders() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.", + category=DeprecationWarning, + stacklevel=2, + ) return self.headers def getheader(self, name, default=None): + warnings.warn( + "HTTPResponse.getheader() is deprecated and will be removed " + "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).", + category=DeprecationWarning, + stacklevel=2, + ) return self.headers.get(name, default) # Backwards compatibility for http.cookiejar diff --git a/src/pip/_vendor/urllib3/util/retry.py b/src/pip/_vendor/urllib3/util/retry.py index 3398323fd7c..2490d5e5b63 100644 --- a/src/pip/_vendor/urllib3/util/retry.py +++ b/src/pip/_vendor/urllib3/util/retry.py @@ -394,7 +394,7 @@ def parse_retry_after(self, retry_after): def get_retry_after(self, response): """Get the value of Retry-After in seconds.""" - retry_after = response.getheader("Retry-After") + retry_after = response.headers.get("Retry-After") if retry_after is None: return None diff --git a/src/pip/_vendor/urllib3/util/url.py b/src/pip/_vendor/urllib3/util/url.py index 86bd8b48ab0..d6d0bbcea66 100644 --- a/src/pip/_vendor/urllib3/util/url.py +++ b/src/pip/_vendor/urllib3/util/url.py @@ -63,7 +63,7 @@ BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") -_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( +_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % ( REG_NAME_PAT, IPV4_PAT, IPV6_ADDRZ_PAT, diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 31cda4a8095..67452d89fcf 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -11,7 +11,7 @@ requests==2.28.2 certifi==2022.12.7 chardet==5.1.0 idna==3.4 - urllib3==1.26.12 + urllib3==1.26.14 rich==12.6.0 pygments==2.13.0 typing_extensions==4.4.0 From 17e20c746e6b855f6bb4a28bbbfff66706762240 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 28 Jan 2023 22:23:16 +0000 Subject: [PATCH 270/730] Correctly handle keyring auth subprocess newlines on Windows The line endings on Windows are not required to be `\n`. --- src/pip/_internal/network/auth.py | 4 ++-- tests/unit/test_network_auth.py | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 68b5a5f45be..c1621326826 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -128,7 +128,7 @@ def _get_password(self, service_name: str, username: str) -> Optional[str]: ) if res.returncode: return None - return res.stdout.decode("utf-8").strip("\n") + return res.stdout.decode("utf-8").strip(os.linesep) def _set_password(self, service_name: str, username: str, password: str) -> None: """Mirror the implementation of keyring.set_password using cli""" @@ -136,7 +136,7 @@ def _set_password(self, service_name: str, username: str, password: str) -> None return None cmd = [self.keyring, "set", service_name, username] - input_ = password.encode("utf-8") + b"\n" + input_ = (password + os.linesep).encode("utf-8") env = os.environ.copy() env["PYTHONIOENCODING"] = "utf-8" res = subprocess.run(cmd, input=input_, env=env) diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 625a20a48f5..5e9e325a15f 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -1,4 +1,5 @@ import functools +import os import sys from typing import Any, Dict, Iterable, List, Optional, Tuple @@ -360,7 +361,7 @@ def __call__( self.returncode = 1 else: # Passwords are returned encoded with a newline appended - self.stdout = password.encode("utf-8") + b"\n" + self.stdout = (password + os.linesep).encode("utf-8") if cmd[1] == "set": assert stdin is None @@ -369,7 +370,7 @@ def __call__( assert input is not None # Input from stdin is encoded - self.set_password(cmd[2], cmd[3], input.decode("utf-8").strip("\n")) + self.set_password(cmd[2], cmd[3], input.decode("utf-8").strip(os.linesep)) return self From acd7ef1f9aa74efbec786e3929faca1201b4b422 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 28 Jan 2023 22:35:25 +0000 Subject: [PATCH 271/730] Perform relaxed version matching in `pip debug` test This ensures that we're not trying to compare versions as equal strings. --- tests/functional/test_debug.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_debug.py b/tests/functional/test_debug.py index 41374f8cb88..77cd732f9f1 100644 --- a/tests/functional/test_debug.py +++ b/tests/functional/test_debug.py @@ -1,6 +1,8 @@ +import re from typing import List import pytest +from pip._vendor.packaging.version import Version from pip._internal.commands.debug import create_vendor_txt_map from pip._internal.utils import compatibility_tags @@ -45,7 +47,9 @@ def test_debug__library_versions(script: PipTestEnvironment) -> None: vendored_versions = create_vendor_txt_map() for name, value in vendored_versions.items(): - assert f"{name}=={value}" in result.stdout + match = re.search(rf"{name}==(\S+)", result.stdout) + assert match is not None, f"Could not find {name} in output" + assert Version(match.group(1)) == Version(value) @pytest.mark.parametrize( From 169511e68eb64efff5705305f72b0c53d7bff580 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 29 Jan 2023 12:25:34 +0100 Subject: [PATCH 272/730] Update direct URL hashes examples --- docs/html/reference/installation-report.md | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index e0f2413186b..fc9729785d2 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -61,7 +61,7 @@ package with the following properties: `--find-links`. ```{note} - For source archives, `download_info.archive_info.hash` may + For source archives, `download_info.archive_info.hashes` may be absent when the requirement was installed from the wheel cache and the cache entry was populated by an older pip version that did not record the origin URL of the downloaded artifact. @@ -96,7 +96,9 @@ will produce an output similar to this (metadata abriged for brevity): "download_info": { "url": "https://files.pythonhosted.org/packages/a4/0c/fbaa7319dcb5eecd3484686eb5a5c5702a6445adb566f01aee6de3369bc4/pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", "archive_info": { - "hash": "sha256=18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310" + "hashes": { + "sha256": "18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310" + } } }, "is_direct": false, @@ -141,7 +143,9 @@ will produce an output similar to this (metadata abriged for brevity): "download_info": { "url": "https://files.pythonhosted.org/packages/6c/10/a7d0fa5baea8fe7b50f448ab742f26f52b80bfca85ac2be9d35cdd9a3246/pyparsing-3.0.9-py3-none-any.whl", "archive_info": { - "hash": "sha256=5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" + "hashes": { + "sha256": "5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" + } } }, "is_direct": false, @@ -160,7 +164,9 @@ will produce an output similar to this (metadata abriged for brevity): "download_info": { "url": "https://files.pythonhosted.org/packages/75/e1/932e06004039dd670c9d5e1df0cd606bf46e29a28e65d5bb28e894ea29c9/typing_extensions-4.2.0-py3-none-any.whl", "archive_info": { - "hash": "sha256=6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708" + "hashes": { + "sha256": "6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708" + } } }, "is_direct": false, From ec7eb6f179866151f148c7695fc773e66b8c3adc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 29 Jan 2023 12:42:46 +0100 Subject: [PATCH 273/730] Add version history to inspect and install report docs --- docs/html/reference/inspect-report.md | 4 ++++ docs/html/reference/installation-report.md | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/docs/html/reference/inspect-report.md b/docs/html/reference/inspect-report.md index e2c7301fdab..8f4811730f9 100644 --- a/docs/html/reference/inspect-report.md +++ b/docs/html/reference/inspect-report.md @@ -3,6 +3,10 @@ ```{versionadded} 22.2 ``` +```{versionchanged} 23.0 +``version`` has been bumped to ``1`` and the format declared stable. +``` + The `pip inspect` command produces a detailed JSON report of the Python environment, including installed distributions. diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index fc9729785d2..6ed5f659cff 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -3,6 +3,10 @@ ```{versionadded} 22.2 ``` +```{versionchanged} 23.0 +``version`` has been bumped to ``1`` and the format declared stable. +``` + The `--report` option of the pip install command produces a detailed JSON report of what it did install (or what it would have installed, if used with the `--dry-run` option). From 33cd541cc49b112753841c7937427bf0fe18e197 Mon Sep 17 00:00:00 2001 From: Dos Moonen Date: Thu, 15 Dec 2022 12:19:15 +0100 Subject: [PATCH 274/730] Make it possible to request a keyring provider: `auto`, `disabled`, `import` or `subprocess` Refactored `_get_index_url()` to get integration tests for the subprocess backend working. Keyring support via the 'subprocess' provider can only retrieve a password, not a username-password combo. The username therefor MUST come from the URL. If the URL obtained from the index does not contain a username then the username from a matching index is used. `_get_index_url()` does that matching. The problem this refactoring solves is that the URL where a wheel or sdist can be downloaded from does not always start with the index url. Azure DevOps Artifacts Feeds are an example since it replaces the friendly name of the Feed with the GUID of the Feed. Causing `url.startswith(prefix)` to evaluate as `False`. The new behaviour is to return the index which matches the netloc and has the longest common prefix of the `path` property of the value returned by `urllib.parse.urlsplit()`. The behaviour for resolving ties is unspecified. --- .pre-commit-config.yaml | 2 +- docs/html/topics/authentication.md | 113 +++++++++++-- news/8719.feature.rst | 1 + src/pip/_internal/cli/cmdoptions.py | 14 ++ src/pip/_internal/cli/req_command.py | 1 + src/pip/_internal/network/auth.py | 212 +++++++++++++++++------- tests/conftest.py | 11 +- tests/functional/test_install_config.py | 128 ++++++++++++-- tests/unit/test_network_auth.py | 99 ++++++++--- 9 files changed, 481 insertions(+), 100 deletions(-) create mode 100644 news/8719.feature.rst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a49016eed3c..0fb3a836be6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,7 +47,7 @@ repos: additional_dependencies: [ 'keyring==23.0.1', 'nox==2021.6.12', - 'pytest==7.1.1', + 'pytest', 'types-docutils==0.18.3', 'types-setuptools==57.4.14', 'types-freezegun==1.1.9', diff --git a/docs/html/topics/authentication.md b/docs/html/topics/authentication.md index f5b553160df..54c3cf9dd3a 100644 --- a/docs/html/topics/authentication.md +++ b/docs/html/topics/authentication.md @@ -66,25 +66,118 @@ man pages][netrc-docs]. ## Keyring Support pip supports loading credentials stored in your keyring using the -{pypi}`keyring` library. +{pypi}`keyring` library which can be enabled py passing `--keyring-provider` +with a value of `auto`, `disabled`, `import` or `subprocess`. The default value +is `auto`. `auto` will respect `--no-input` and not query keyring at all if that +option is used. The `auto` provider will use the `import` provider if the +`keyring` module can be imported. If that is not the case it will use the +`subprocess` provider if a `keyring` executable can be found. Otherwise, the +`disabled` provider will be used. + +### Configuring Pip +Passing this as a command line argument will work, but is not how the majority +of this feature's users will use it. They instead will want to overwrite the +default of `disabled` in the global, user of site configuration file: +```bash +$ pip config set --global global.keyring-provider subprocess + +# A different user on the same system which has PYTHONPATH configured and and +# wanting to use keyring installed that way could then run +$ pip config set --user global.keyring-provider import -pip will first try to use `keyring` in the same environment as itself and -fallback to using any `keyring` installation which is available on `PATH`. +# For a specific virtual environment you might want to use disable it again +# because you will only be using PyPI and the private repo (and mirror) +# requires 2FA with a keycard and a pincode +$ pip config set --site global.index https://pypi.org/simple +$ pip config set --site global.keyring-provider disabled + +# configuring it via environment variable is also possible +$ export PIP_KEYRING_PROVIDER=disabled +``` -Therefore, either of the following setups will work: +### Installing and using the keyring python module + +Setting it to `import` tries to communicate with `keyring` by importing it +and using its Python api. ```bash -$ pip install keyring # install keyring from PyPI into same environment as pip +# install keyring from PyPI +$ pip install keyring --index-url https://pypi.org/simple $ echo "your-password" | keyring set pypi.company.com your-username -$ pip install your-package --index-url https://pypi.company.com/ +$ pip install your-package --keyring-provider import --index-url https://pypi.company.com/ ``` -or +### Installing and using the keyring cli application + +Setting it to `subprocess` will look for a `keyring` executable on the PATH +if one can be found that is different from the `keyring` installation `import` +would be using. + +The cli requires a username, therefore you MUST put a username in the url. +See the example below or the basic HTTP authentication section at the top of +this page. ```bash -$ pipx install keyring # install keyring from PyPI into standalone environment -$ echo "your-password" | keyring set pypi.company.com your-username -$ pip install your-package --index-url https://pypi.company.com/ +# install keyring from PyPI using pipx, which we assume if installed properly +# you can also create a venv somewhere and add it to the PATH yourself instead +$ pipx install keyring --index-url https://pypi.org/simple + +# install the keyring backend for Azure DevOps for example +# VssSessionToken is the username you MUST use for this backend +$ pipx inject keyring artifacts-keyring --index-url https://pypi.org/simple + +# or the one for Google Artifact Registry +$ pipx inject keyring keyrings.google-artifactregistry-auth --index-url https://pypi.org/simple +$ gcloud auth login + +$ pip install your-package --keyring-provider subprocess --index-url https://username@pypi.example.com/ +``` + +### Here be dragons + +The `auto` provider is conservative and does not query keyring at all when +`--no-input` is used because the keyring might require user interaction such as +prompting the user on the console. Third party tools frequently call Pip for +you and do indeed pass `--no-input` as they are well-behaved and don't have +much information to work with. (Keyring does have an api to request a backend +that does not require user input.) You have more information about your system, +however! + +You can force keyring usage by requesting a keyring provider other than `auto` +(or `disabled`). Leaving `import` and `subprocess`. You do this by passing +`--keyring-provider import` or one of the following methods: + +```bash +# via config file, possibly with --user, --global or --site +$ pip config set global.keyring-provider subprocess +# or via environment variable +$ export PIP_KEYRING_PROVIDER=import +``` + +```{warning} +Be careful when doing this since it could cause tools such as Pipx and Pipenv +to appear to hang. They show their own progress indicator while hiding output +from the subprocess in which they run Pip. You won't know whether the keyring +backend is waiting the user input or not in such situations. +``` + +Pip is conservative and does not query keyring at all when `--no-input` is used +because the keyring might require user interaction such as prompting the user +on the console. You can force keyring usage by passing `--force-keyring` or one +of the following: + +```bash +# possibly with --user, --global or --site +$ pip config set global.force-keyring true +# or +$ export PIP_FORCE_KEYRING=1 +``` + +```{warning} +Be careful when doing this since it could cause tools such as Pipx and Pipenv +to appear to hang. They show their own progress indicator while hiding output +from the subprocess in which they run Pip. You won't know whether the keyring +backend is waiting the user input or not in such situations. ``` Note that `keyring` (the Python package) needs to be installed separately from diff --git a/news/8719.feature.rst b/news/8719.feature.rst new file mode 100644 index 00000000000..3f3caf2db89 --- /dev/null +++ b/news/8719.feature.rst @@ -0,0 +1 @@ +Add ``--keyring-provider`` flag. See the Authentication page in the documentation for more info. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 661c489c73e..c1c7f41845a 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -244,6 +244,19 @@ class PipOption(Option): help="Disable prompting for input.", ) +keyring_provider: Callable[..., Option] = partial( + Option, + "--keyring-provider", + dest="keyring_provider", + choices=["auto", "disabled", "import", "subprocess"], + default="disabled", + help=( + "Enable the credential lookup via the keyring library if user input is allowed." + " Specify which mechanism to use [disabled, import, subprocess]." + " (default: disabled)" + ), +) + proxy: Callable[..., Option] = partial( Option, "--proxy", @@ -1019,6 +1032,7 @@ def check_list_path_option(options: Values) -> None: quiet, log, no_input, + keyring_provider, proxy, retries, timeout, diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 1044809f040..048b9c99e41 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -151,6 +151,7 @@ def _build_session( # Determine if we can prompt the user for authentication or not session.auth.prompting = not options.no_input + session.auth.keyring_provider = options.keyring_provider return session diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index c1621326826..3e23dcbbedf 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -3,12 +3,16 @@ Contains interface (MultiDomainBasicAuth) and associated glue code for providing credentials in the context of network requests. """ - import os import shutil import subprocess +import sysconfig +import typing import urllib.parse from abc import ABC, abstractmethod +from functools import lru_cache +from os.path import commonprefix +from pathlib import Path from typing import Any, Dict, List, NamedTuple, Optional, Tuple from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth @@ -123,7 +127,7 @@ def _get_password(self, service_name: str, username: str) -> Optional[str]: res = subprocess.run( cmd, stdin=subprocess.DEVNULL, - capture_output=True, + stdout=subprocess.PIPE, env=env, ) if res.returncode: @@ -144,56 +148,75 @@ def _set_password(self, service_name: str, username: str, password: str) -> None return None -def get_keyring_provider() -> KeyRingBaseProvider: +@lru_cache(maxsize=None) +def get_keyring_provider(provider: str) -> KeyRingBaseProvider: + logger.verbose("Keyring provider requested: %s", provider) + # keyring has previously failed and been disabled - if not KEYRING_DISABLED: - # Default to trying to use Python provider + if KEYRING_DISABLED: + provider = "disabled" + if provider in ["import", "auto"]: try: - return KeyRingPythonProvider() + impl = KeyRingPythonProvider() + logger.verbose("Keyring provider set: import") + return impl except ImportError: pass except Exception as exc: # In the event of an unexpected exception # we should warn the user - logger.warning( - "Installed copy of keyring fails with exception %s, " - "trying to find a keyring executable as a fallback", - str(exc), - ) - - # Fallback to Cli Provider if `keyring` isn't installed + msg = "Installed copy of keyring fails with exception %s" + if provider == "auto": + msg = msg + ", trying to find a keyring executable as a fallback" + logger.warning(msg, str(exc)) + if provider in ["subprocess", "auto"]: cli = shutil.which("keyring") + if cli and cli.startswith(sysconfig.get_path("scripts")): + # all code within this function is stolen from shutil.which implementation + @typing.no_type_check + def PATH_as_shutil_which_determines_it() -> str: + path = os.environ.get("PATH", None) + if path is None: + try: + path = os.confstr("CS_PATH") + except (AttributeError, ValueError): + # os.confstr() or CS_PATH is not available + path = os.defpath + # bpo-35755: Don't use os.defpath if the PATH environment variable is + # set to an empty string + + return path + + scripts = Path(sysconfig.get_path("scripts")).resolve() + + paths = [] + for path in PATH_as_shutil_which_determines_it().split(os.pathsep): + p = Path(path) + if p.exists() and not p.resolve().samefile(scripts): + paths.append(path) + + path = os.pathsep.join(paths) + + cli = shutil.which("keyring", path=path) + if cli: + logger.verbose("Keyring provider set: subprocess with executable %s", cli) return KeyRingCliProvider(cli) + logger.verbose("Keyring provider set: disabled") return KeyRingNullProvider() -def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]: - """Return the tuple auth for a given url from keyring.""" - # Do nothing if no url was provided - if not url: - return None - - keyring = get_keyring_provider() - try: - return keyring.get_auth_info(url, username) - except Exception as exc: - logger.warning( - "Keyring is skipped due to an exception: %s", - str(exc), - ) - global KEYRING_DISABLED - KEYRING_DISABLED = True - return None - - class MultiDomainBasicAuth(AuthBase): def __init__( - self, prompting: bool = True, index_urls: Optional[List[str]] = None + self, + prompting: bool = True, + index_urls: Optional[List[str]] = None, + keyring_provider: str = "auto", ) -> None: self.prompting = prompting self.index_urls = index_urls + self.keyring_provider = keyring_provider # type: ignore[assignment] self.passwords: Dict[str, AuthInfo] = {} # When the user is prompted to enter credentials and keyring is # available, we will offer to save them. If the user accepts, @@ -202,6 +225,47 @@ def __init__( # ``save_credentials`` to save these. self._credentials_to_save: Optional[Credentials] = None + @property + def keyring_provider(self) -> KeyRingBaseProvider: + return get_keyring_provider(self._keyring_provider) + + @keyring_provider.setter + def keyring_provider(self, provider: str) -> None: + # The free function get_keyring_provider has been decorated with + # functools.cache. If an exception occurs in get_keyring_auth that + # cache will be cleared and keyring disabled, take that into account + # if you want to remove this indirection. + self._keyring_provider = provider + + @property + def use_keyring(self) -> bool: + # We won't use keyring when --no-input is passed unless + # a specific provider is requested because it might require + # user interaction + return self.prompting or self._keyring_provider not in ["auto", "disabled"] + + def _get_keyring_auth( + self, + url: Optional[str], + username: Optional[str], + ) -> Optional[AuthInfo]: + """Return the tuple auth for a given url from keyring.""" + # Do nothing if no url was provided + if not url: + return None + + try: + return self.keyring_provider.get_auth_info(url, username) + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", + str(exc), + ) + global KEYRING_DISABLED + KEYRING_DISABLED = True + get_keyring_provider.cache_clear() + return None + def _get_index_url(self, url: str) -> Optional[str]: """Return the original index URL matching the requested URL. @@ -218,15 +282,42 @@ def _get_index_url(self, url: str) -> Optional[str]: if not url or not self.index_urls: return None - for u in self.index_urls: - prefix = remove_auth_from_url(u).rstrip("/") + "/" - if url.startswith(prefix): - return u - return None + url = remove_auth_from_url(url).rstrip("/") + "/" + parsed_url = urllib.parse.urlsplit(url) + + candidates = [] + + for index in self.index_urls: + index = index.rstrip("/") + "/" + parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index)) + if parsed_url == parsed_index: + return index + + if parsed_url.netloc != parsed_index.netloc: + continue + + candidate = urllib.parse.urlsplit(index) + candidates.append(candidate) + + if not candidates: + return None + + candidates.sort( + reverse=True, + key=lambda candidate: commonprefix( + [ + parsed_url.path, + candidate.path, + ] + ).rfind("/"), + ) + + return urllib.parse.urlunsplit(candidates[0]) def _get_new_credentials( self, original_url: str, + *, allow_netrc: bool = True, allow_keyring: bool = False, ) -> AuthInfo: @@ -270,8 +361,8 @@ def _get_new_credentials( # The index url is more specific than the netloc, so try it first # fmt: off kr_auth = ( - get_keyring_auth(index_url, username) or - get_keyring_auth(netloc, username) + self._get_keyring_auth(index_url, username) or + self._get_keyring_auth(netloc, username) ) # fmt: on if kr_auth: @@ -348,18 +439,23 @@ def __call__(self, req: Request) -> Request: def _prompt_for_password( self, netloc: str ) -> Tuple[Optional[str], Optional[str], bool]: - username = ask_input(f"User for {netloc}: ") + username = ask_input(f"User for {netloc}: ") if self.prompting else None if not username: return None, None, False - auth = get_keyring_auth(netloc, username) - if auth and auth[0] is not None and auth[1] is not None: - return auth[0], auth[1], False + if self.use_keyring: + auth = self._get_keyring_auth(netloc, username) + if auth and auth[0] is not None and auth[1] is not None: + return auth[0], auth[1], False password = ask_password("Password: ") return username, password, True # Factored out to allow for easy patching in tests def _should_save_password_to_keyring(self) -> bool: - if get_keyring_provider() is None: + if ( + not self.prompting + or not self.use_keyring + or isinstance(self.keyring_provider, KeyRingNullProvider) + ): return False return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" @@ -369,19 +465,22 @@ def handle_401(self, resp: Response, **kwargs: Any) -> Response: if resp.status_code != 401: return resp + username, password = None, None + + # Query the keyring for credentials: + if self.use_keyring: + username, password = self._get_new_credentials( + resp.url, + allow_netrc=False, + allow_keyring=True, + ) + # We are not able to prompt the user so simply return the response - if not self.prompting: + if not self.prompting and not username and not password: return resp parsed = urllib.parse.urlparse(resp.url) - # Query the keyring for credentials: - username, password = self._get_new_credentials( - resp.url, - allow_netrc=False, - allow_keyring=True, - ) - # Prompt the user for a new username and password save = False if not username and not password: @@ -431,9 +530,8 @@ def warn_on_401(self, resp: Response, **kwargs: Any) -> None: def save_credentials(self, resp: Response, **kwargs: Any) -> None: """Response callback to save credentials on success.""" - keyring = get_keyring_provider() assert not isinstance( - keyring, KeyRingNullProvider + self.keyring_provider, KeyRingNullProvider ), "should never reach here without keyring" creds = self._credentials_to_save @@ -441,6 +539,8 @@ def save_credentials(self, resp: Response, **kwargs: Any) -> None: if creds and resp.status_code < 400: try: logger.info("Saving credentials to keyring") - keyring.save_auth_info(creds.url, creds.username, creds.password) + self.keyring_provider.save_auth_info( + creds.url, creds.username, creds.password + ) except Exception: logger.exception("Failed to save credentials") diff --git a/tests/conftest.py b/tests/conftest.py index 46975b29beb..106e7321456 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,7 @@ from pathlib import Path from typing import ( TYPE_CHECKING, + AnyStr, Callable, Dict, Iterable, @@ -507,7 +508,10 @@ def with_wheel(virtualenv: VirtualEnvironment, wheel_install: Path) -> None: class ScriptFactory(Protocol): def __call__( - self, tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None + self, + tmpdir: Path, + virtualenv: Optional[VirtualEnvironment] = None, + environ: Optional[Dict[AnyStr, AnyStr]] = None, ) -> PipTestEnvironment: ... @@ -521,7 +525,11 @@ def script_factory( def factory( tmpdir: Path, virtualenv: Optional[VirtualEnvironment] = None, + environ: Optional[Dict[AnyStr, AnyStr]] = None, ) -> PipTestEnvironment: + kwargs = {} + if environ: + kwargs["environ"] = environ if virtualenv is None: virtualenv = virtualenv_factory(tmpdir.joinpath("venv")) return PipTestEnvironment( @@ -541,6 +549,7 @@ def factory( pip_expect_warning=deprecated_python, # Tell the Test Environment if we want to run pip via a zipapp zipapp=zipapp, + **kwargs, ) return factory diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 99e59b97b18..43a9f8bd305 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -3,10 +3,12 @@ import sys import tempfile import textwrap +from pathlib import Path +from typing import Callable, List import pytest -from tests.conftest import CertFactory, MockServer +from tests.conftest import CertFactory, MockServer, ScriptFactory from tests.lib import PipTestEnvironment, TestData from tests.lib.server import ( authorization_response, @@ -361,20 +363,96 @@ def test_do_not_prompt_for_authentication( assert "ERROR: HTTP error 401" in result.stderr +@pytest.fixture(params=(True, False), ids=("interactive", "noninteractive")) +def interactive(request: pytest.FixtureRequest) -> bool: + return request.param + + +@pytest.fixture(params=(True, False), ids=("auth_needed", "auth_not_needed")) +def auth_needed(request: pytest.FixtureRequest) -> bool: + return request.param + + +@pytest.fixture(params=("disabled", "import", "subprocess", "auto")) +def keyring_provider(request: pytest.FixtureRequest) -> str: + return request.param + + +@pytest.fixture(params=("disabled", "import", "subprocess")) +def keyring_provider_implementation(request: pytest.FixtureRequest) -> str: + return request.param + + +@pytest.fixture() +def flags( + request: pytest.FixtureRequest, + interactive: bool, + auth_needed: bool, + keyring_provider: str, + keyring_provider_implementation: str, +) -> List[str]: + if ( + keyring_provider != "auto" + and keyring_provider_implementation != keyring_provider + ): + pytest.skip() + + flags = ["--keyring-provider", keyring_provider] + if not interactive: + flags.append("--no-input") + if auth_needed: + if keyring_provider_implementation == "disabled" or ( + not interactive and keyring_provider == "auto" + ): + request.applymarker(pytest.mark.xfail()) + return flags + + @pytest.mark.skipif( sys.platform == "linux" and sys.version_info < (3, 8), reason="Custom SSL certification not running well in CI", ) -@pytest.mark.parametrize("auth_needed", (True, False)) def test_prompt_for_keyring_if_needed( - script: PipTestEnvironment, data: TestData, cert_factory: CertFactory, auth_needed: bool, + flags: List[str], + keyring_provider: str, + keyring_provider_implementation: str, + tmpdir: Path, + script_factory: ScriptFactory, + virtualenv_factory: Callable[[Path], VirtualEnvironment], ) -> None: - """Test behaviour while installing from a index url + """Test behaviour while installing from an index url requiring authentication and keyring is possible. """ + environ = os.environ.copy() + workspace = tmpdir.joinpath("workspace") + + if keyring_provider_implementation == "subprocess": + keyring_virtualenv = virtualenv_factory(workspace.joinpath("keyring")) + keyring_script = script_factory( + workspace.joinpath("keyring"), keyring_virtualenv + ) + keyring_script.pip( + "install", + "keyring", + ) + + environ["PATH"] = str(keyring_script.bin_path) + os.pathsep + environ["PATH"] + + virtualenv = virtualenv_factory(workspace.joinpath("venv")) + script = script_factory(workspace.joinpath("venv"), virtualenv, environ=environ) + + if keyring_provider != "auto" or keyring_provider_implementation != "subprocess": + script.pip( + "install", + "keyring", + ) + + if keyring_provider_implementation != "subprocess": + keyring_script = script + cert_path = cert_factory() ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ctx.load_cert_chain(cert_path, cert_path) @@ -394,22 +472,40 @@ def test_prompt_for_keyring_if_needed( response(data.packages / "simple-3.0.tar.gz"), ] - url = f"https://{server.host}:{server.port}/simple" + url = f"https://USERNAME@{server.host}:{server.port}/simple" keyring_content = textwrap.dedent( """\ import os import sys - from collections import namedtuple + import keyring + from keyring.backend import KeyringBackend + from keyring.credentials import SimpleCredential + + class TestBackend(KeyringBackend): + priority = 1 + + def get_credential(self, url, username): + sys.stderr.write("get_credential was called" + os.linesep) + return SimpleCredential(username="USERNAME", password="PASSWORD") - Cred = namedtuple("Cred", ["username", "password"]) + def get_password(self, url, username): + sys.stderr.write("get_password was called" + os.linesep) + return "PASSWORD" - def get_credential(url, username): - sys.stderr.write("get_credential was called" + os.linesep) - return Cred("USERNAME", "PASSWORD") + def set_password(self, url, username): + pass """ ) - keyring_path = script.site_packages_path / "keyring.py" + keyring_path = keyring_script.site_packages_path / "keyring_test.py" + keyring_path.write_text(keyring_content) + + keyring_content = ( + "import keyring_test;" + " import keyring;" + " keyring.set_keyring(keyring_test.TestBackend())" + os.linesep + ) + keyring_path = keyring_path.with_suffix(".pth") keyring_path.write_text(keyring_content) with server_running(server): @@ -421,10 +517,16 @@ def get_credential(url, username): cert_path, "--client-cert", cert_path, + *flags, "simple", ) + function_name = ( + "get_credential" + if keyring_provider_implementation == "import" + else "get_password" + ) if auth_needed: - assert "get_credential was called" in result.stderr + assert function_name + " was called" in result.stderr else: - assert "get_credential was called" not in result.stderr + assert function_name + " was called" not in result.stderr diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 5e9e325a15f..dfd65ca3f52 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -1,5 +1,6 @@ import functools import os +import subprocess import sys from typing import Any, Dict, Iterable, List, Optional, Tuple @@ -15,6 +16,7 @@ def reset_keyring() -> Iterable[None]: yield None # Reset the state of the module between tests pip._internal.network.auth.KEYRING_DISABLED = False + pip._internal.network.auth.get_keyring_provider.cache_clear() @pytest.mark.parametrize( @@ -100,7 +102,12 @@ def test_get_credentials_uses_cached_credentials_only_username() -> None: def test_get_index_url_credentials() -> None: - auth = MultiDomainBasicAuth(index_urls=["http://foo:bar@example.com/path"]) + auth = MultiDomainBasicAuth( + index_urls=[ + "http://example.com/", + "http://foo:bar@example.com/path", + ] + ) get = functools.partial( auth._get_new_credentials, allow_netrc=False, allow_keyring=False ) @@ -110,6 +117,45 @@ def test_get_index_url_credentials() -> None: assert get("http://example.com/path3/path2") == (None, None) +def test_prioritize_longest_path_prefix_match_organization() -> None: + auth = MultiDomainBasicAuth( + index_urls=[ + "http://foo:bar@example.com/org-name-alpha/repo-alias/simple", + "http://bar:foo@example.com/org-name-beta/repo-alias/simple", + ] + ) + get = functools.partial( + auth._get_new_credentials, allow_netrc=False, allow_keyring=False + ) + + # Inspired by Azure DevOps URL structure, GitLab should look similar + assert get("http://example.com/org-name-alpha/repo-guid/dowbload/") == ( + "foo", + "bar", + ) + assert get("http://example.com/org-name-beta/repo-guid/dowbload/") == ("bar", "foo") + + +def test_prioritize_longest_path_prefix_match_project() -> None: + auth = MultiDomainBasicAuth( + index_urls=[ + "http://foo:bar@example.com/org-alpha/project-name-alpha/repo-alias/simple", + "http://bar:foo@example.com/org-alpha/project-name-beta/repo-alias/simple", + ] + ) + get = functools.partial( + auth._get_new_credentials, allow_netrc=False, allow_keyring=False + ) + + # Inspired by Azure DevOps URL structure, GitLab should look similar + assert get( + "http://example.com/org-alpha/project-name-alpha/repo-guid/dowbload/" + ) == ("foo", "bar") + assert get( + "http://example.com/org-alpha/project-name-beta/repo-guid/dowbload/" + ) == ("bar", "foo") + + class KeyringModuleV1: """Represents the supported API of keyring before get_credential was added. @@ -121,7 +167,7 @@ def __init__(self) -> None: def get_password(self, system: str, username: str) -> Optional[str]: if system == "example.com" and username: return username + "!netloc" - if system == "http://example.com/path2" and username: + if system == "http://example.com/path2/" and username: return username + "!url" return None @@ -134,8 +180,8 @@ def set_password(self, system: str, username: str, password: str) -> None: ( ("http://example.com/path1", (None, None)), # path1 URLs will be resolved by netloc - ("http://user@example.com/path1", ("user", "user!netloc")), - ("http://user2@example.com/path1", ("user2", "user2!netloc")), + ("http://user@example.com/path3", ("user", "user!netloc")), + ("http://user2@example.com/path3", ("user2", "user2!netloc")), # path2 URLs will be resolved by index URL ("http://example.com/path2/path3", (None, None)), ("http://foo@example.com/path2/path3", ("foo", "foo!url")), @@ -148,7 +194,10 @@ def test_keyring_get_password( ) -> None: keyring = KeyringModuleV1() monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] - auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) + auth = MultiDomainBasicAuth( + index_urls=["http://example.com/path2", "http://example.com/path3"], + keyring_provider="import", + ) actual = auth._get_new_credentials(url, allow_netrc=False, allow_keyring=True) assert actual == expect @@ -157,7 +206,7 @@ def test_keyring_get_password( def test_keyring_get_password_after_prompt(monkeypatch: pytest.MonkeyPatch) -> None: keyring = KeyringModuleV1() monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] - auth = MultiDomainBasicAuth() + auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: assert prompt == "User for example.com: " @@ -173,7 +222,7 @@ def test_keyring_get_password_after_prompt_when_none( ) -> None: keyring = KeyringModuleV1() monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] - auth = MultiDomainBasicAuth() + auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: assert prompt == "User for unknown.com: " @@ -194,7 +243,10 @@ def test_keyring_get_password_username_in_index( ) -> None: keyring = KeyringModuleV1() monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] - auth = MultiDomainBasicAuth(index_urls=["http://user@example.com/path2"]) + auth = MultiDomainBasicAuth( + index_urls=["http://user@example.com/path2", "http://example.com/path4"], + keyring_provider="import", + ) get = functools.partial( auth._get_new_credentials, allow_netrc=False, allow_keyring=True ) @@ -227,7 +279,7 @@ def test_keyring_set_password( ) -> None: keyring = KeyringModuleV1() monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] - auth = MultiDomainBasicAuth(prompting=True) + auth = MultiDomainBasicAuth(prompting=True, keyring_provider="import") monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) if creds[2]: @@ -284,7 +336,7 @@ def get_password(self, system: str, username: str) -> None: assert False, "get_password should not ever be called" def get_credential(self, system: str, username: str) -> Optional[Credential]: - if system == "http://example.com/path2": + if system == "http://example.com/path2/": return self.Credential("username", "url") if system == "example.com": return self.Credential("username", "netloc") @@ -303,7 +355,10 @@ def test_keyring_get_credential( monkeypatch: pytest.MonkeyPatch, url: str, expect: str ) -> None: monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) # type: ignore[misc] - auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) + auth = MultiDomainBasicAuth( + index_urls=["http://example.com/path1", "http://example.com/path2"], + keyring_provider="import", + ) assert ( auth._get_new_credentials(url, allow_netrc=False, allow_keyring=True) == expect @@ -325,7 +380,9 @@ def test_broken_keyring_disables_keyring(monkeypatch: pytest.MonkeyPatch) -> Non keyring_broken = KeyringModuleBroken() monkeypatch.setitem(sys.modules, "keyring", keyring_broken) # type: ignore[misc] - auth = MultiDomainBasicAuth(index_urls=["http://example.com/"]) + auth = MultiDomainBasicAuth( + index_urls=["http://example.com/"], keyring_provider="import" + ) assert keyring_broken._call_count == 0 for i in range(5): @@ -347,12 +404,12 @@ def __call__( *, env: Dict[str, str], stdin: Optional[Any] = None, - capture_output: Optional[bool] = None, + stdout: Optional[Any] = None, input: Optional[bytes] = None, ) -> Any: if cmd[1] == "get": assert stdin == -3 # subprocess.DEVNULL - assert capture_output is True + assert stdout == subprocess.PIPE assert env["PYTHONIOENCODING"] == "utf-8" password = self.get_password(*cmd[2:]) @@ -361,11 +418,12 @@ def __call__( self.returncode = 1 else: # Passwords are returned encoded with a newline appended + self.returncode = 0 self.stdout = (password + os.linesep).encode("utf-8") if cmd[1] == "set": assert stdin is None - assert capture_output is None + assert stdout is None assert env["PYTHONIOENCODING"] == "utf-8" assert input is not None @@ -384,8 +442,8 @@ def check_returncode(self) -> None: ( ("http://example.com/path1", (None, None)), # path1 URLs will be resolved by netloc - ("http://user@example.com/path1", ("user", "user!netloc")), - ("http://user2@example.com/path1", ("user2", "user2!netloc")), + ("http://user@example.com/path3", ("user", "user!netloc")), + ("http://user2@example.com/path3", ("user2", "user2!netloc")), # path2 URLs will be resolved by index URL ("http://example.com/path2/path3", (None, None)), ("http://foo@example.com/path2/path3", ("foo", "foo!url")), @@ -400,7 +458,10 @@ def test_keyring_cli_get_password( monkeypatch.setattr( pip._internal.network.auth.subprocess, "run", KeyringSubprocessResult() ) - auth = MultiDomainBasicAuth(index_urls=["http://example.com/path2"]) + auth = MultiDomainBasicAuth( + index_urls=["http://example.com/path2", "http://example.com/path3"], + keyring_provider="subprocess", + ) actual = auth._get_new_credentials(url, allow_netrc=False, allow_keyring=True) assert actual == expect @@ -431,7 +492,7 @@ def test_keyring_cli_set_password( monkeypatch.setattr(pip._internal.network.auth.shutil, "which", lambda x: "keyring") keyring = KeyringSubprocessResult() monkeypatch.setattr(pip._internal.network.auth.subprocess, "run", keyring) - auth = MultiDomainBasicAuth(prompting=True) + auth = MultiDomainBasicAuth(prompting=True, keyring_provider="subprocess") monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) if creds[2]: From 0a9ff9de2323bce6814ea4cf8a20687584bf7720 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 13 Jan 2023 15:46:42 +0800 Subject: [PATCH 275/730] Revise documentation on keyring --- docs/html/topics/authentication.md | 60 +++++++++++++++--------------- 1 file changed, 31 insertions(+), 29 deletions(-) diff --git a/docs/html/topics/authentication.md b/docs/html/topics/authentication.md index 54c3cf9dd3a..9cb5f688c88 100644 --- a/docs/html/topics/authentication.md +++ b/docs/html/topics/authentication.md @@ -66,18 +66,21 @@ man pages][netrc-docs]. ## Keyring Support pip supports loading credentials stored in your keyring using the -{pypi}`keyring` library which can be enabled py passing `--keyring-provider` -with a value of `auto`, `disabled`, `import` or `subprocess`. The default value -is `auto`. `auto` will respect `--no-input` and not query keyring at all if that -option is used. The `auto` provider will use the `import` provider if the -`keyring` module can be imported. If that is not the case it will use the -`subprocess` provider if a `keyring` executable can be found. Otherwise, the -`disabled` provider will be used. - -### Configuring Pip -Passing this as a command line argument will work, but is not how the majority -of this feature's users will use it. They instead will want to overwrite the -default of `disabled` in the global, user of site configuration file: +{pypi}`keyring` library, which can be enabled py passing `--keyring-provider` +with a value of `auto`, `disabled`, `import`, or `subprocess`. The default +value `auto` respects `--no-input` and not query keyring at all if the option +is used; otherwise it tries the `import`, `subprocess`, and `disabled` +providers (in this order) and uses the first one that works. + +### Configuring pip's keyring usage + +Since the keyring configuration is likely system-wide, a more common way to +configure its usage would be to use a configuration instead: + +```{seealso} +{doc}`./configuration` describes how pip configuration works. +``` + ```bash $ pip config set --global global.keyring-provider subprocess @@ -95,10 +98,10 @@ $ pip config set --site global.keyring-provider disabled $ export PIP_KEYRING_PROVIDER=disabled ``` -### Installing and using the keyring python module +### Using keyring's Python module -Setting it to `import` tries to communicate with `keyring` by importing it -and using its Python api. +Setting `keyring-provider` to `import` makes pip communicate with `keyring` via +its Python interface. ```bash # install keyring from PyPI @@ -107,29 +110,28 @@ $ echo "your-password" | keyring set pypi.company.com your-username $ pip install your-package --keyring-provider import --index-url https://pypi.company.com/ ``` -### Installing and using the keyring cli application +### Using keyring as a command line application -Setting it to `subprocess` will look for a `keyring` executable on the PATH -if one can be found that is different from the `keyring` installation `import` -would be using. +Setting `keyring-provider` to `subprocess` makes pip look for and use the +`keyring` command found on `PATH`. -The cli requires a username, therefore you MUST put a username in the url. -See the example below or the basic HTTP authentication section at the top of -this page. +For this use case, a username *must* be included in the URL, since it is +required by `keyring`'s command line interface. See the example below or the +basic HTTP authentication section at the top of this page. ```bash -# install keyring from PyPI using pipx, which we assume if installed properly +# Install keyring from PyPI using pipx, which we assume if installed properly # you can also create a venv somewhere and add it to the PATH yourself instead $ pipx install keyring --index-url https://pypi.org/simple -# install the keyring backend for Azure DevOps for example -# VssSessionToken is the username you MUST use for this backend +# For Azure DevOps, also install its keyring backend. $ pipx inject keyring artifacts-keyring --index-url https://pypi.org/simple -# or the one for Google Artifact Registry +# For Google Artifact Registry, also install and initialize its keyring backend. $ pipx inject keyring keyrings.google-artifactregistry-auth --index-url https://pypi.org/simple $ gcloud auth login +# Note that a username is required in the index URL. $ pip install your-package --keyring-provider subprocess --index-url https://username@pypi.example.com/ ``` @@ -155,13 +157,13 @@ $ export PIP_KEYRING_PROVIDER=import ``` ```{warning} -Be careful when doing this since it could cause tools such as Pipx and Pipenv +Be careful when doing this since it could cause tools such as pipx and Pipenv to appear to hang. They show their own progress indicator while hiding output from the subprocess in which they run Pip. You won't know whether the keyring backend is waiting the user input or not in such situations. ``` -Pip is conservative and does not query keyring at all when `--no-input` is used +pip is conservative and does not query keyring at all when `--no-input` is used because the keyring might require user interaction such as prompting the user on the console. You can force keyring usage by passing `--force-keyring` or one of the following: @@ -174,7 +176,7 @@ $ export PIP_FORCE_KEYRING=1 ``` ```{warning} -Be careful when doing this since it could cause tools such as Pipx and Pipenv +Be careful when doing this since it could cause tools such as pipx and Pipenv to appear to hang. They show their own progress indicator while hiding output from the subprocess in which they run Pip. You won't know whether the keyring backend is waiting the user input or not in such situations. From d325245052f388523c6e65b7f43382017b92956e Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 13 Jan 2023 15:50:59 +0800 Subject: [PATCH 276/730] Better subprocess handling --- src/pip/_internal/network/auth.py | 11 ++++++----- tests/unit/test_network_auth.py | 3 +++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 3e23dcbbedf..5213e1cc9bc 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -138,13 +138,14 @@ def _set_password(self, service_name: str, username: str, password: str) -> None """Mirror the implementation of keyring.set_password using cli""" if self.keyring is None: return None - - cmd = [self.keyring, "set", service_name, username] - input_ = (password + os.linesep).encode("utf-8") env = os.environ.copy() env["PYTHONIOENCODING"] = "utf-8" - res = subprocess.run(cmd, input=input_, env=env) - res.check_returncode() + subprocess.run( + [self.keyring, "set", service_name, username], + input=f"{password}{os.linesep}".encode("utf-8"), + env=env, + check=True, + ) return None diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index dfd65ca3f52..5dde6da57c5 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -406,11 +406,13 @@ def __call__( stdin: Optional[Any] = None, stdout: Optional[Any] = None, input: Optional[bytes] = None, + check: Optional[bool] = None ) -> Any: if cmd[1] == "get": assert stdin == -3 # subprocess.DEVNULL assert stdout == subprocess.PIPE assert env["PYTHONIOENCODING"] == "utf-8" + assert check is None password = self.get_password(*cmd[2:]) if password is None: @@ -426,6 +428,7 @@ def __call__( assert stdout is None assert env["PYTHONIOENCODING"] == "utf-8" assert input is not None + assert check # Input from stdin is encoded self.set_password(cmd[2], cmd[3], input.decode("utf-8").strip(os.linesep)) From 7b75fda0c6acb312bb6dbe9a201d6814465a66f8 Mon Sep 17 00:00:00 2001 From: Dos Moonen Date: Fri, 13 Jan 2023 09:04:23 +0100 Subject: [PATCH 277/730] Typo in documentation: s/if/is --- docs/html/topics/authentication.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/authentication.md b/docs/html/topics/authentication.md index 9cb5f688c88..966ac3e7a0d 100644 --- a/docs/html/topics/authentication.md +++ b/docs/html/topics/authentication.md @@ -120,7 +120,7 @@ required by `keyring`'s command line interface. See the example below or the basic HTTP authentication section at the top of this page. ```bash -# Install keyring from PyPI using pipx, which we assume if installed properly +# Install keyring from PyPI using pipx, which we assume is installed properly # you can also create a venv somewhere and add it to the PATH yourself instead $ pipx install keyring --index-url https://pypi.org/simple From 16bd6b705418fec2be8db6337853dd3a6b626f6a Mon Sep 17 00:00:00 2001 From: Dos Moonen Date: Fri, 13 Jan 2023 10:04:20 +0100 Subject: [PATCH 278/730] Applied review suggestions --- src/pip/_internal/network/auth.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index 5213e1cc9bc..9bdbf12f40c 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -3,6 +3,7 @@ Contains interface (MultiDomainBasicAuth) and associated glue code for providing credentials in the context of network requests. """ +import logging import os import shutil import subprocess @@ -169,7 +170,7 @@ def get_keyring_provider(provider: str) -> KeyRingBaseProvider: msg = "Installed copy of keyring fails with exception %s" if provider == "auto": msg = msg + ", trying to find a keyring executable as a fallback" - logger.warning(msg, str(exc)) + logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG)) if provider in ["subprocess", "auto"]: cli = shutil.which("keyring") if cli and cli.startswith(sysconfig.get_path("scripts")): @@ -188,13 +189,16 @@ def PATH_as_shutil_which_determines_it() -> str: return path - scripts = Path(sysconfig.get_path("scripts")).resolve() + scripts = Path(sysconfig.get_path("scripts")) paths = [] for path in PATH_as_shutil_which_determines_it().split(os.pathsep): p = Path(path) - if p.exists() and not p.resolve().samefile(scripts): - paths.append(path) + try: + if not p.samefile(scripts): + paths.append(path) + except FileNotFoundError: + pass path = os.pathsep.join(paths) From b653b129c56b29ad565886c1f423de89639d20f3 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 30 Jan 2023 17:06:13 +0800 Subject: [PATCH 279/730] Bump pre-commit isort to 5.12.0 This release contains a critical fix for newer Poetry versions that crashes due to "invalid" pyproject.toml syntax. --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a49016eed3c..0b997282163 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: exclude: tests/data - repo: https://github.com/PyCQA/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort files: \.py$ From 60ce5c0943c303e48f0aed8bce650f725dcd222d Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 29 Jan 2023 15:42:47 +0000 Subject: [PATCH 280/730] Fix the kind of news fragment --- news/{10265.removal.rst => 10265.doc.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{10265.removal.rst => 10265.doc.rst} (100%) diff --git a/news/10265.removal.rst b/news/10265.doc.rst similarity index 100% rename from news/10265.removal.rst rename to news/10265.doc.rst From aa94ccadb45d6ee44defea8a82bd5b647ccba799 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 30 Jan 2023 15:13:07 +0000 Subject: [PATCH 281/730] Update AUTHORS.txt --- AUTHORS.txt | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/AUTHORS.txt b/AUTHORS.txt index 007454f8b20..0f0fb3caf98 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -128,6 +128,7 @@ Chris Hunt Chris Jerdonek Chris McDonough Chris Pawley +Chris Pryer Chris Wolfe Christian Clauss Christian Heimes @@ -169,7 +170,9 @@ Daniel Jost Daniel Katz Daniel Shaulov Daniele Esposti +Daniele Nicolodi Daniele Procida +Daniil Konovalenko Danny Hermes Danny McClanahan Darren Kavanagh @@ -200,6 +203,7 @@ Diego Caraballo Diego Ramirez DiegoCaraballo Dimitri Merejkowsky +Dimitri Papadopoulos Dirk Stolle Dmitry Gladkov Dmitry Volodin @@ -207,6 +211,7 @@ Domen Kožar Dominic Davis-Foster Donald Stufft Dongweiming +doron zarhi Douglas Thor DrFeathers Dustin Ingram @@ -282,6 +287,7 @@ hauntsaninja Henrich Hartzer Henry Schreiner Herbert Pfennig +Holly Stotelmyer Hsiaoming Yang Hugo Lopes Tavares Hugo van Kemenade @@ -305,6 +311,7 @@ Jacob Kim Jacob Walls Jaime Sanz jakirkham +Jakub Kuczys Jakub Stasiak Jakub Vysoky Jakub Wilk @@ -317,6 +324,7 @@ Jan Pokorný Jannis Leidel Jarek Potiuk jarondl +Jason Curtis Jason R. Coombs Jay Graves Jean-Christophe Fillion-Robin @@ -342,6 +350,7 @@ Jon Dufresne Jon Parise Jonas Nockert Jonathan Herbert +Joonatan Partanen Joost Molenaar Jorge Niedbalski Joseph Bylund @@ -350,6 +359,7 @@ Josh Bronson Josh Hansen Josh Schneier Juanjo Bazán +Judah Rand Julian Berman Julian Gethmann Julien Demoor @@ -455,6 +465,7 @@ Miro Hrončok Monica Baluna montefra Monty Taylor +Muha Ajjan‮ Nadav Wexler Nahuel Ambrosini Nate Coraor @@ -484,6 +495,7 @@ nvdv OBITORASU Ofek Lev ofrinevo +Oliver Freund Oliver Jeeves Oliver Mannion Oliver Tonnhofer @@ -555,6 +567,7 @@ Riccardo Schirone Richard Jones Richard Si Ricky Ng-Adam +Rishi RobberPhex Robert Collins Robert McGibbon @@ -605,6 +618,7 @@ Stavros Korokithakis Stefan Scherfke Stefano Rivera Stephan Erb +Stephen Rosen stepshal Steve (Gadget) Barnes Steve Barnes @@ -644,6 +658,7 @@ Tom V Tomas Hrnciar Tomas Orsava Tomer Chachamu +Tommi Enenkel | AnB Tomáš Hrnčiar Tony Beswick Tony Narlock @@ -672,6 +687,7 @@ Wil Tan Wilfred Hughes William ML Leslie William T Olson +William Woodruff Wilson Mo wim glenn Winson Luk From d21af1c986589cda1f722f5ca5b4748bfe026705 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 30 Jan 2023 15:13:08 +0000 Subject: [PATCH 282/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 2aead080d0f..ce90d06bfd4 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.0" +__version__ = "23.1.dev0" def main(args: Optional[List[str]] = None) -> int: From 368c7b4c557e673b05b0f8cffc967d3e333eee19 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 30 Jan 2023 15:13:08 +0000 Subject: [PATCH 283/730] Bump for release --- NEWS.rst | 57 +++++++++++++++++++ news/10265.doc.rst | 1 - news/11312.feature.rst | 2 - news/11381.feature.rst | 3 - news/11527.bugfix.rst | 2 - news/11589.feature.rst | 2 - news/11598.bugfix.rst | 1 - news/11617.bugfix.rst | 3 - news/11623.bugfix.rst | 1 - news/11638.bugfix.rst | 1 - news/11673.bugfix.rst | 3 - news/11675.doc.rst | 2 - news/11676.doc.rst | 2 - news/11704.bugfix.rst | 2 - news/11757.feature.rst | 2 - ...5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst | 0 ...86-9572-4871-9B35-C6A8FA66AE75.trivial.rst | 0 ...1b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst | 0 news/certifi.vendor.rst | 1 - news/chardet.vendor.rst | 1 - news/colorama.vendor.rst | 1 - ...f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst | 0 news/distro.vendor.rst | 1 - news/pep517.vendor.rst | 1 - news/platformdirs.vendor.rst | 1 - news/pyproject-hooks.vendor.rst | 1 - news/requests.vendor.rst | 1 - news/rich.vendor.rst | 1 - news/urllib3.vendor.rst | 1 - src/pip/__init__.py | 2 +- 30 files changed, 58 insertions(+), 38 deletions(-) delete mode 100644 news/10265.doc.rst delete mode 100644 news/11312.feature.rst delete mode 100644 news/11381.feature.rst delete mode 100644 news/11527.bugfix.rst delete mode 100644 news/11589.feature.rst delete mode 100644 news/11598.bugfix.rst delete mode 100644 news/11617.bugfix.rst delete mode 100644 news/11623.bugfix.rst delete mode 100644 news/11638.bugfix.rst delete mode 100644 news/11673.bugfix.rst delete mode 100644 news/11675.doc.rst delete mode 100644 news/11676.doc.rst delete mode 100644 news/11704.bugfix.rst delete mode 100644 news/11757.feature.rst delete mode 100644 news/4e5ddb5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst delete mode 100644 news/704B9286-9572-4871-9B35-C6A8FA66AE75.trivial.rst delete mode 100644 news/c1da841b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst delete mode 100644 news/certifi.vendor.rst delete mode 100644 news/chardet.vendor.rst delete mode 100644 news/colorama.vendor.rst delete mode 100644 news/d4da20f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst delete mode 100644 news/distro.vendor.rst delete mode 100644 news/pep517.vendor.rst delete mode 100644 news/platformdirs.vendor.rst delete mode 100644 news/pyproject-hooks.vendor.rst delete mode 100644 news/requests.vendor.rst delete mode 100644 news/rich.vendor.rst delete mode 100644 news/urllib3.vendor.rst diff --git a/NEWS.rst b/NEWS.rst index 1473db7125c..2e24fab37ad 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,63 @@ .. towncrier release notes start +23.0 (2023-01-30) +================= + +Features +-------- + +- Change the hashes in the installation report to be a mapping. Emit the + ``archive_info.hashes`` dictionary in ``direct_url.json``. (`#11312 `_) +- Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in PEP 668. + This allows a downstream Python distributor to prevent users from using pip to + modify the externally managed environment. (`#11381 `_) +- Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring`` + installed using ``pipx`` to be used by ``pip``. (`#11589 `_) +- The inspect and installation report formats are now declared stabled, and their version + has been bumped from ``0`` to ``1``. (`#11757 `_) + +Bug Fixes +--------- + +- Wheel cache behavior is restored to match previous versions, allowing the + cache to find existing entries. (`#11527 `_) +- Use the "venv" scheme if available to obtain prefixed lib paths. (`#11598 `_) +- Deprecated a historical ambiguity in how ``egg`` fragments in URL-style + requirements are formatted and handled. ``egg`` fragments that do not look + like PEP 508 names now produce a deprecation warning. (`#11617 `_) +- Fix scripts path in isolated build environment on Debian. (`#11623 `_) +- Make ``pip show`` show the editable location if package is editable (`#11638 `_) +- Stop checking that ``wheel`` is present when ``build-system.requires`` + is provided without ``build-system.build-backend`` as ``setuptools`` + (which we still check for) will inject it anyway. (`#11673 `_) +- Fix an issue when an already existing in-memory distribution would cause + exceptions in ``pip install`` (`#11704 `_) + +Vendored Libraries +------------------ + +- Upgrade certifi to 2022.12.7 +- Upgrade chardet to 5.1.0 +- Upgrade colorama to 0.4.6 +- Upgrade distro to 1.8.0 +- Remove pep517 from vendored packages +- Upgrade platformdirs to 2.6.2 +- Add pyproject-hooks 1.0.0 +- Upgrade requests to 2.28.2 +- Upgrade rich to 12.6.0 +- Upgrade urllib3 to 1.26.14 + +Improved Documentation +---------------------- + +- Fixed the description of the option "--install-options" in the documentation (`#10265 `_) +- Remove mention that editable installs are necessary for pip freeze to report the VCS + URL. (`#11675 `_) +- Clarify that the egg URL fragment is only necessary for editable VCS installs, and + otherwise not necessary anymore. (`#11676 `_) + + 22.3.1 (2022-11-05) =================== diff --git a/news/10265.doc.rst b/news/10265.doc.rst deleted file mode 100644 index 477eb9753fb..00000000000 --- a/news/10265.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed the description of the option "--install-options" in the documentation diff --git a/news/11312.feature.rst b/news/11312.feature.rst deleted file mode 100644 index 493dde83059..00000000000 --- a/news/11312.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Change the hashes in the installation report to be a mapping. Emit the -``archive_info.hashes`` dictionary in ``direct_url.json``. diff --git a/news/11381.feature.rst b/news/11381.feature.rst deleted file mode 100644 index 3df9877b476..00000000000 --- a/news/11381.feature.rst +++ /dev/null @@ -1,3 +0,0 @@ -Implement logic to read the ``EXTERNALLY-MANAGED`` file as specified in PEP 668. -This allows a downstream Python distributor to prevent users from using pip to -modify the externally managed environment. diff --git a/news/11527.bugfix.rst b/news/11527.bugfix.rst deleted file mode 100644 index 0185a804ff7..00000000000 --- a/news/11527.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Wheel cache behavior is restored to match previous versions, allowing the -cache to find existing entries. diff --git a/news/11589.feature.rst b/news/11589.feature.rst deleted file mode 100644 index d01a564b631..00000000000 --- a/news/11589.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring`` -installed using ``pipx`` to be used by ``pip``. diff --git a/news/11598.bugfix.rst b/news/11598.bugfix.rst deleted file mode 100644 index 031ff9ddca0..00000000000 --- a/news/11598.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Use the "venv" scheme if available to obtain prefixed lib paths. diff --git a/news/11617.bugfix.rst b/news/11617.bugfix.rst deleted file mode 100644 index 02346e49c42..00000000000 --- a/news/11617.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Deprecated a historical ambiguity in how ``egg`` fragments in URL-style -requirements are formatted and handled. ``egg`` fragments that do not look -like PEP 508 names now produce a deprecation warning. diff --git a/news/11623.bugfix.rst b/news/11623.bugfix.rst deleted file mode 100644 index 45b8fe1928f..00000000000 --- a/news/11623.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix scripts path in isolated build environment on Debian. diff --git a/news/11638.bugfix.rst b/news/11638.bugfix.rst deleted file mode 100644 index 04ef930bc1b..00000000000 --- a/news/11638.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Make ``pip show`` show the editable location if package is editable diff --git a/news/11673.bugfix.rst b/news/11673.bugfix.rst deleted file mode 100644 index c3d92475c99..00000000000 --- a/news/11673.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Stop checking that ``wheel`` is present when ``build-system.requires`` -is provided without ``build-system.build-backend`` as ``setuptools`` -(which we still check for) will inject it anyway. diff --git a/news/11675.doc.rst b/news/11675.doc.rst deleted file mode 100644 index b2b2278faf6..00000000000 --- a/news/11675.doc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Remove mention that editable installs are necessary for pip freeze to report the VCS -URL. diff --git a/news/11676.doc.rst b/news/11676.doc.rst deleted file mode 100644 index d3f9bd88bed..00000000000 --- a/news/11676.doc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Clarify that the egg URL fragment is only necessary for editable VCS installs, and -otherwise not necessary anymore. diff --git a/news/11704.bugfix.rst b/news/11704.bugfix.rst deleted file mode 100644 index 0e7902a2590..00000000000 --- a/news/11704.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue when an already existing in-memory distribution would cause -exceptions in ``pip install`` diff --git a/news/11757.feature.rst b/news/11757.feature.rst deleted file mode 100644 index 594fb627156..00000000000 --- a/news/11757.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -The inspect and installation report formats are now declared stabled, and their version -has been bumped from ``0`` to ``1``. diff --git a/news/4e5ddb5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst b/news/4e5ddb5b-e740-4ef8-a78a-8d62a144fdde.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/704B9286-9572-4871-9B35-C6A8FA66AE75.trivial.rst b/news/704B9286-9572-4871-9B35-C6A8FA66AE75.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/c1da841b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst b/news/c1da841b-9024-4448-9ae1-6e4a5a5952f0.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst deleted file mode 100644 index f02ba9f4187..00000000000 --- a/news/certifi.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade certifi to 2022.12.7 diff --git a/news/chardet.vendor.rst b/news/chardet.vendor.rst deleted file mode 100644 index 5aceb6c5e6f..00000000000 --- a/news/chardet.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade chardet to 5.1.0 diff --git a/news/colorama.vendor.rst b/news/colorama.vendor.rst deleted file mode 100644 index bf206c456d9..00000000000 --- a/news/colorama.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade colorama to 0.4.6 diff --git a/news/d4da20f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst b/news/d4da20f5-0ed2-480c-baa9-2490e4abdff6.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/distro.vendor.rst b/news/distro.vendor.rst deleted file mode 100644 index a11f652b922..00000000000 --- a/news/distro.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade distro to 1.8.0 diff --git a/news/pep517.vendor.rst b/news/pep517.vendor.rst deleted file mode 100644 index 4b91e560f1f..00000000000 --- a/news/pep517.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Remove pep517 from vendored packages diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst deleted file mode 100644 index 5c15bfbd9d5..00000000000 --- a/news/platformdirs.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade platformdirs to 2.6.2 diff --git a/news/pyproject-hooks.vendor.rst b/news/pyproject-hooks.vendor.rst deleted file mode 100644 index 2598d556477..00000000000 --- a/news/pyproject-hooks.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Add pyproject-hooks 1.0.0 diff --git a/news/requests.vendor.rst b/news/requests.vendor.rst deleted file mode 100644 index 9f91985c70c..00000000000 --- a/news/requests.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade requests to 2.28.2 diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst deleted file mode 100644 index 56a2e9c5112..00000000000 --- a/news/rich.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade rich to 12.6.0 diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst deleted file mode 100644 index c9d10554e0e..00000000000 --- a/news/urllib3.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade urllib3 to 1.26.14 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index adc0eb10d00..2aead080d0f 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.0.dev0" +__version__ = "23.0" def main(args: Optional[List[str]] = None) -> int: From 9058b900214579a8430e5aaec383d61c9608022a Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Fri, 3 Feb 2023 09:32:53 +0800 Subject: [PATCH 284/730] fix: correct the way to decide if keyring is available --- src/pip/_internal/network/auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index c1621326826..a1c52315b2b 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -359,7 +359,7 @@ def _prompt_for_password( # Factored out to allow for easy patching in tests def _should_save_password_to_keyring(self) -> bool: - if get_keyring_provider() is None: + if isinstance(get_keyring_provider(), KeyRingNullProvider): return False return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" From 706456c5cf463f3ed8f1a949a71bf4379e6baf3a Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Fri, 3 Feb 2023 09:39:21 +0800 Subject: [PATCH 285/730] add news --- news/11774.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11774.bugfix.rst diff --git a/news/11774.bugfix.rst b/news/11774.bugfix.rst new file mode 100644 index 00000000000..771246b0b54 --- /dev/null +++ b/news/11774.bugfix.rst @@ -0,0 +1 @@ +Correct the way to decide if keyring is available. From 2d0a5c9cd29f72348031b8b517068f98aed14ad7 Mon Sep 17 00:00:00 2001 From: Frost Ming Date: Fri, 3 Feb 2023 15:33:55 +0800 Subject: [PATCH 286/730] use a attribute to tell if the provider is null --- src/pip/_internal/network/auth.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index a1c52315b2b..ac8cbf23bab 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -39,6 +39,8 @@ class Credentials(NamedTuple): class KeyRingBaseProvider(ABC): """Keyring base provider interface""" + has_keyring: bool + @abstractmethod def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: ... @@ -51,6 +53,8 @@ def save_auth_info(self, url: str, username: str, password: str) -> None: class KeyRingNullProvider(KeyRingBaseProvider): """Keyring null provider""" + has_keyring = False + def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]: return None @@ -61,6 +65,8 @@ def save_auth_info(self, url: str, username: str, password: str) -> None: class KeyRingPythonProvider(KeyRingBaseProvider): """Keyring interface which uses locally imported `keyring`""" + has_keyring = True + def __init__(self) -> None: import keyring @@ -97,6 +103,8 @@ class KeyRingCliProvider(KeyRingBaseProvider): PATH. """ + has_keyring = True + def __init__(self, cmd: str) -> None: self.keyring = cmd @@ -359,7 +367,7 @@ def _prompt_for_password( # Factored out to allow for easy patching in tests def _should_save_password_to_keyring(self) -> bool: - if isinstance(get_keyring_provider(), KeyRingNullProvider): + if not get_keyring_provider().has_keyring: return False return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" @@ -432,9 +440,7 @@ def warn_on_401(self, resp: Response, **kwargs: Any) -> None: def save_credentials(self, resp: Response, **kwargs: Any) -> None: """Response callback to save credentials on success.""" keyring = get_keyring_provider() - assert not isinstance( - keyring, KeyRingNullProvider - ), "should never reach here without keyring" + assert keyring.has_keyring, "should never reach here without keyring" creds = self._credentials_to_save self._credentials_to_save = None From eb7b4ed62e75cac1dfcb39b8e835baf6b1a1bb03 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sat, 4 Feb 2023 21:18:10 +0000 Subject: [PATCH 287/730] Update vendored `pkg_resources` --- news/pkg_resources.vendor.rst | 1 + news/setuptools.vendor.rst | 1 + pyproject.toml | 2 + src/pip/_internal/utils/_jaraco_text.py | 109 +++++++ src/pip/_vendor/pkg_resources/LICENSE | 24 +- src/pip/_vendor/pkg_resources/__init__.py | 314 ++++++++++---------- src/pip/_vendor/pkg_resources/py31compat.py | 23 -- src/pip/_vendor/vendor.txt | 2 +- tools/vendoring/patches/pkg_resources.patch | 27 +- 9 files changed, 303 insertions(+), 200 deletions(-) create mode 100644 news/pkg_resources.vendor.rst create mode 100644 news/setuptools.vendor.rst create mode 100644 src/pip/_internal/utils/_jaraco_text.py delete mode 100644 src/pip/_vendor/pkg_resources/py31compat.py diff --git a/news/pkg_resources.vendor.rst b/news/pkg_resources.vendor.rst new file mode 100644 index 00000000000..a20817dfb24 --- /dev/null +++ b/news/pkg_resources.vendor.rst @@ -0,0 +1 @@ +Patch pkg_resources to remove dependency on ``jaraco.text``. diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst new file mode 100644 index 00000000000..f86cecbca92 --- /dev/null +++ b/news/setuptools.vendor.rst @@ -0,0 +1 @@ +Update pkg_resources (via setuptools) to 65.6.3 diff --git a/pyproject.toml b/pyproject.toml index a02457eeffd..139c37e18d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,8 @@ drop = [ "easy_install.py", "setuptools", "pkg_resources/_vendor/", + "_distutils_hack", + "distutils-precedence.pth", "pkg_resources/extern/", # trim vendored pygments styles and lexers "pygments/styles/[!_]*.py", diff --git a/src/pip/_internal/utils/_jaraco_text.py b/src/pip/_internal/utils/_jaraco_text.py new file mode 100644 index 00000000000..e06947c051a --- /dev/null +++ b/src/pip/_internal/utils/_jaraco_text.py @@ -0,0 +1,109 @@ +"""Functions brought over from jaraco.text. + +These functions are not supposed to be used within `pip._internal`. These are +helper functions brought over from `jaraco.text` to enable vendoring newer +copies of `pkg_resources` without having to vendor `jaraco.text` and its entire +dependency cone; something that our vendoring setup is not currently capable of +handling. + +License reproduced from original source below: + +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +import functools +import itertools + + +def _nonblank(str): + return str and not str.startswith("#") + + +@functools.singledispatch +def yield_lines(iterable): + r""" + Yield valid lines of a string or iterable. + + >>> list(yield_lines('')) + [] + >>> list(yield_lines(['foo', 'bar'])) + ['foo', 'bar'] + >>> list(yield_lines('foo\nbar')) + ['foo', 'bar'] + >>> list(yield_lines('\nfoo\n#bar\nbaz #comment')) + ['foo', 'baz #comment'] + >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n'])) + ['foo', 'bar', 'baz', 'bing'] + """ + return itertools.chain.from_iterable(map(yield_lines, iterable)) + + +@yield_lines.register(str) +def _(text): + return filter(_nonblank, map(str.strip, text.splitlines())) + + +def drop_comment(line): + """ + Drop comments. + + >>> drop_comment('foo # bar') + 'foo' + + A hash without a space may be in a URL. + + >>> drop_comment('http://example.com/foo#bar') + 'http://example.com/foo#bar' + """ + return line.partition(" #")[0] + + +def join_continuation(lines): + r""" + Join lines continued by a trailing backslash. + + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar', 'baz'])) + ['foobar', 'baz'] + >>> list(join_continuation(['foo \\', 'bar \\', 'baz'])) + ['foobarbaz'] + + Not sure why, but... + The character preceeding the backslash is also elided. + + >>> list(join_continuation(['goo\\', 'dly'])) + ['godly'] + + A terrible idea, but... + If no line is available to continue, suppress the lines. + + >>> list(join_continuation(['foo', 'bar\\', 'baz\\'])) + ['foo'] + """ + lines = iter(lines) + for item in lines: + while item.endswith("\\"): + try: + item = item[:-2].strip() + next(lines) + except StopIteration: + return + yield item diff --git a/src/pip/_vendor/pkg_resources/LICENSE b/src/pip/_vendor/pkg_resources/LICENSE index 6e0693b4b01..353924be0e5 100644 --- a/src/pip/_vendor/pkg_resources/LICENSE +++ b/src/pip/_vendor/pkg_resources/LICENSE @@ -1,19 +1,19 @@ -Copyright (C) 2016 Jason R Coombs +Copyright Jason R. Coombs -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 4cd562cf94c..0ec74f8a6ef 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -1,4 +1,3 @@ -# coding: utf-8 """ Package resource API -------------------- @@ -15,8 +14,6 @@ method. """ -from __future__ import absolute_import - import sys import os import io @@ -41,6 +38,7 @@ import inspect import ntpath import posixpath +import importlib from pkgutil import get_importer try: @@ -54,9 +52,6 @@ except NameError: FileExistsError = OSError -from pip._vendor import six -from pip._vendor.six.moves import urllib, map, filter - # capture these to bypass sandboxing from os import utime try: @@ -76,26 +71,23 @@ except ImportError: importlib_machinery = None -from . import py31compat +from pip._internal.utils._jaraco_text import ( + yield_lines, + drop_comment, + join_continuation, +) + from pip._vendor import platformdirs from pip._vendor import packaging __import__('pip._vendor.packaging.version') __import__('pip._vendor.packaging.specifiers') __import__('pip._vendor.packaging.requirements') __import__('pip._vendor.packaging.markers') +__import__('pip._vendor.packaging.utils') - -__metaclass__ = type - - -if (3, 0) < sys.version_info < (3, 5): +if sys.version_info < (3, 5): raise RuntimeError("Python 3.5 or later is required") -if six.PY2: - # Those builtin exceptions are only defined in Python 3 - PermissionError = None - NotADirectoryError = None - # declare some globals that will be defined later to # satisfy the linters. require = None @@ -128,6 +120,11 @@ def parse_version(v): try: return packaging.version.Version(v) except packaging.version.InvalidVersion: + warnings.warn( + f"{v} is an invalid version and will not be supported in " + "a future release", + PkgResourcesDeprecationWarning, + ) return packaging.version.LegacyVersion(v) @@ -178,10 +175,10 @@ def get_supported_platform(): """Return this platform's maximum compatible version. distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by + of macOS that would be required to *use* extensions produced by distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the + version of macOS that we are *running*. To allow usage of packages that + explicitly require a newer version of macOS, we must also know the current version of the OS. If this condition occurs for any other platform with a version in its @@ -191,9 +188,9 @@ def get_supported_platform(): m = macosVersionString.match(plat) if m is not None and sys.platform == "darwin": try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3)) except ValueError: - # not Mac OS X + # not macOS pass return plat @@ -364,7 +361,7 @@ def get_provider(moduleOrReq): return _find_adapter(_provider_factories, loader)(module) -def _macosx_vers(_cache=[]): +def _macos_vers(_cache=[]): if not _cache: version = platform.mac_ver()[0] # fallback for MacPorts @@ -380,7 +377,7 @@ def _macosx_vers(_cache=[]): return _cache[0] -def _macosx_arch(machine): +def _macos_arch(machine): return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) @@ -388,18 +385,18 @@ def get_build_platform(): """Return this platform's string for platform-specific distributions XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. + needs some hacks for Linux and macOS. """ from sysconfig import get_platform plat = get_platform() if sys.platform == "darwin" and not plat.startswith('macosx-'): try: - version = _macosx_vers() + version = _macos_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % ( int(version[0]), int(version[1]), - _macosx_arch(machine), + _macos_arch(machine), ) except ValueError: # if someone is running a non-Mac darwin system, this will fall @@ -425,7 +422,7 @@ def compatible_platforms(provided, required): # easy case return True - # Mac OS X special cases + # macOS special cases reqMac = macosVersionString.match(required) if reqMac: provMac = macosVersionString.match(provided) @@ -434,7 +431,7 @@ def compatible_platforms(provided, required): if not provMac: # this is backwards compatibility for packages built before # setuptools 0.6. All packages built after this point will - # use the new macosx designation. + # use the new macOS designation. provDarwin = darwinVersionString.match(provided) if provDarwin: dversion = int(provDarwin.group(1)) @@ -442,7 +439,7 @@ def compatible_platforms(provided, required): if dversion == 7 and macosversion >= "10.3" or \ dversion == 8 and macosversion >= "10.4": return True - # egg isn't macosx or legacy darwin + # egg isn't macOS or legacy darwin return False # are they the same major version and machine type? @@ -475,7 +472,7 @@ def run_script(dist_spec, script_name): def get_distribution(dist): """Return a current distribution object for a Requirement or string""" - if isinstance(dist, six.string_types): + if isinstance(dist, str): dist = Requirement.parse(dist) if isinstance(dist, Requirement): dist = get_provider(dist) @@ -558,6 +555,7 @@ def __init__(self, entries=None): self.entries = [] self.entry_keys = {} self.by_key = {} + self.normalized_to_canonical_keys = {} self.callbacks = [] if entries is None: @@ -638,6 +636,14 @@ def find(self, req): is returned. """ dist = self.by_key.get(req.key) + + if dist is None: + canonical_key = self.normalized_to_canonical_keys.get(req.key) + + if canonical_key is not None: + req.key = canonical_key + dist = self.by_key.get(canonical_key) + if dist is not None and dist not in req: # XXX add more info raise VersionConflict(dist, req) @@ -706,13 +712,16 @@ def add(self, dist, entry=None, insert=True, replace=False): return self.by_key[dist.key] = dist + normalized_name = packaging.utils.canonicalize_name(dist.key) + self.normalized_to_canonical_keys[normalized_name] = dist.key if dist.key not in keys: keys.append(dist.key) if dist.key not in keys2: keys2.append(dist.key) self._added_new(dist) - def resolve(self, requirements, env=None, installer=None, + # FIXME: 'WorkingSet.resolve' is too complex (11) + def resolve(self, requirements, env=None, installer=None, # noqa: C901 replace_conflicting=False, extras=None): """List all distributions needed to (recursively) meet `requirements` @@ -925,14 +934,15 @@ def _added_new(self, dist): def __getstate__(self): return ( self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] + self.normalized_to_canonical_keys.copy(), self.callbacks[:] ) - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c + def __setstate__(self, e_k_b_n_c): + entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c self.entries = entries[:] self.entry_keys = keys.copy() self.by_key = by_key.copy() + self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy() self.callbacks = callbacks[:] @@ -1234,12 +1244,13 @@ def _warn_unsafe_extraction_path(path): mode = os.stat(path).st_mode if mode & stat.S_IWOTH or mode & stat.S_IWGRP: msg = ( - "%s is writable by group/others and vulnerable to attack " - "when " - "used with get_resource_filename. Consider a more secure " + "Extraction path is writable by group/others " + "and vulnerable to attack when " + "used with get_resource_filename ({path}). " + "Consider a more secure " "location (set with .set_extraction_path or the " - "PYTHON_EGG_CACHE environment variable)." % path - ) + "PYTHON_EGG_CACHE environment variable)." + ).format(**locals()) warnings.warn(msg, UserWarning) def postprocess(self, tempname, filename): @@ -1377,7 +1388,7 @@ def evaluate_marker(text, extra=None): marker = packaging.markers.Marker(text) return marker.evaluate() except packaging.markers.InvalidMarker as e: - raise SyntaxError(e) + raise SyntaxError(e) from e class NullProvider: @@ -1418,8 +1429,6 @@ def get_metadata(self, name): return "" path = self._get_metadata_path(name) value = self._get(path) - if six.PY2: - return value try: return value.decode('utf-8') except UnicodeDecodeError as exc: @@ -1457,7 +1466,8 @@ def run_script(self, script_name, namespace): script_filename = self._fn(self.egg_info, script) namespace['__file__'] = script_filename if os.path.exists(script_filename): - source = open(script_filename).read() + with open(script_filename) as fid: + source = fid.read() code = compile(source, script_filename, 'exec') exec(code, namespace, namespace) else: @@ -1493,7 +1503,7 @@ def _fn(self, base, resource_name): def _validate_resource_path(path): """ Validate the resource paths according to the docs. - https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access + https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access >>> warned = getfixture('recwarn') >>> warnings.simplefilter('always') @@ -1575,26 +1585,35 @@ def _get(self, path): register_loader_type(object, NullProvider) +def _parents(path): + """ + yield all parents of path including path + """ + last = None + while path != last: + yield path + last = path + path, _ = os.path.split(path) + + class EggProvider(NullProvider): """Provider based on a virtual filesystem""" def __init__(self, module): - NullProvider.__init__(self, module) + super().__init__(module) self._setup_prefix() def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path != old: - if _is_egg_path(path): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) + # Assume that metadata may be nested inside a "basket" + # of multiple eggs and use module_path instead of .archive. + eggs = filter(_is_egg_path, _parents(self.module_path)) + egg = next(eggs, None) + egg and self._set_egg(egg) + + def _set_egg(self, path): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path class DefaultProvider(EggProvider): @@ -1701,7 +1720,7 @@ class ZipProvider(EggProvider): _zip_manifests = MemoizedZipManifests() def __init__(self, module): - EggProvider.__init__(self, module) + super().__init__(module) self.zip_pre = self.loader.archive + os.sep def _zipinfo_name(self, fspath): @@ -1752,7 +1771,8 @@ def _get_date_and_size(zip_stat): timestamp = time.mktime(date_time) return timestamp, size - def _extract_resource(self, manager, zip_path): + # FIXME: 'ZipProvider._extract_resource' is too complex (12) + def _extract_resource(self, manager, zip_path): # noqa: C901 if zip_path in self._index(): for name in self._index()[zip_path]: @@ -1900,8 +1920,7 @@ def get_metadata(self, name): return metadata def _warn_on_replacement(self, metadata): - # Python 2.7 compat for: replacement_char = '�' - replacement_char = b'\xef\xbf\xbd'.decode('utf-8') + replacement_char = '�' if replacement_char in metadata: tmpl = "{self.path} could not be properly decoded in UTF-8" msg = tmpl.format(**locals()) @@ -1991,7 +2010,7 @@ def find_eggs_in_zip(importer, path_item, only=False): dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) for dist in dists: yield dist - elif subitem.lower().endswith('.dist-info'): + elif subitem.lower().endswith(('.dist-info', '.egg-info')): subpath = os.path.join(path_item, subitem) submeta = EggMetadata(zipimport.zipimporter(subpath)) submeta.egg_info = subpath @@ -2015,7 +2034,7 @@ def _by_version_descending(names): >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' >>> _by_version_descending(names) - ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] + ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo'] >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' >>> _by_version_descending(names) ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] @@ -2023,13 +2042,22 @@ def _by_version_descending(names): >>> _by_version_descending(names) ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] """ + def try_parse(name): + """ + Attempt to parse as a version or return a null version. + """ + try: + return packaging.version.Version(name) + except Exception: + return packaging.version.Version('0') + def _by_version(name): """ Parse each component of the filename """ name, ext = os.path.splitext(name) parts = itertools.chain(name.split('-'), [ext]) - return [packaging.version.parse(part) for part in parts] + return [try_parse(part) for part in parts] return sorted(names, key=_by_version, reverse=True) @@ -2046,7 +2074,10 @@ def find_on_path(importer, path_item, only=False): ) return - entries = safe_listdir(path_item) + entries = ( + os.path.join(path_item, child) + for child in safe_listdir(path_item) + ) # for performance, before sorting by version, # screen entries for only those that will yield @@ -2067,11 +2098,14 @@ def find_on_path(importer, path_item, only=False): def dist_factory(path_item, entry, only): - """ - Return a dist_factory for a path_item and entry - """ + """Return a dist_factory for the given entry.""" lower = entry.lower() - is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) + is_egg_info = lower.endswith('.egg-info') + is_dist_info = ( + lower.endswith('.dist-info') and + os.path.isdir(os.path.join(path_item, entry)) + ) + is_meta = is_egg_info or is_dist_info return ( distributions_from_metadata if is_meta else @@ -2093,8 +2127,6 @@ class NoDists: """ def __bool__(self): return False - if six.PY2: - __nonzero__ = __bool__ def __call__(self, fullpath): return iter(()) @@ -2111,12 +2143,7 @@ def safe_listdir(path): except OSError as e: # Ignore the directory if does not exist, not a directory or # permission denied - ignorable = ( - e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) - # Python 2 on Windows needs to be handled this way :( - or getattr(e, "winerror", None) == 267 - ) - if not ignorable: + if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT): raise return () @@ -2195,10 +2222,16 @@ def _handle_ns(packageName, path_item): if importer is None: return None - # capture warnings due to #1111 - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - loader = importer.find_module(packageName) + # use find_spec (PEP 451) and fall-back to find_module (PEP 302) + try: + spec = importer.find_spec(packageName) + except AttributeError: + # capture warnings due to #1111 + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + loader = importer.find_module(packageName) + else: + loader = spec.loader if spec else None if loader is None: return None @@ -2214,7 +2247,7 @@ def _handle_ns(packageName, path_item): if subpath is not None: path = module.__path__ path.append(subpath) - loader.load_module(packageName) + importlib.import_module(packageName) _rebuild_mod_path(path, packageName, module) return subpath @@ -2270,8 +2303,8 @@ def declare_namespace(packageName): __import__(parent) try: path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) + except AttributeError as e: + raise TypeError("Not a package:", parent) from e # Track what packages are namespaces, so when new path items are added, # they can be updated @@ -2328,7 +2361,8 @@ def null_ns_handler(importer, path_item, packageName, module): def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) + return os.path.normcase(os.path.realpath(os.path.normpath( + _cygwin_patch(filename)))) def _cygwin_patch(filename): # pragma: nocover @@ -2354,7 +2388,15 @@ def _is_egg_path(path): """ Determine if given path appears to be an egg. """ - return path.lower().endswith('.egg') + return _is_zip_egg(path) or _is_unpacked_egg(path) + + +def _is_zip_egg(path): + return ( + path.lower().endswith('.egg') and + os.path.isfile(path) and + zipfile.is_zipfile(path) + ) def _is_unpacked_egg(path): @@ -2362,7 +2404,7 @@ def _is_unpacked_egg(path): Determine if given path appears to be an unpacked egg. """ return ( - _is_egg_path(path) and + path.lower().endswith('.egg') and os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) ) @@ -2375,20 +2417,6 @@ def _set_parent_ns(packageName): setattr(sys.modules[parent], name, sys.modules[packageName]) -def yield_lines(strs): - """Yield non-empty/non-comment lines of a string or sequence""" - if isinstance(strs, six.string_types): - for s in strs.splitlines(): - s = s.strip() - # skip blank lines/comments - if s and not s.startswith('#'): - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - - MODULE = re.compile(r"\w+(\.\w+)*$").match EGG_NAME = re.compile( r""" @@ -2450,7 +2478,7 @@ def resolve(self): try: return functools.reduce(getattr, self.attrs, module) except AttributeError as exc: - raise ImportError(str(exc)) + raise ImportError(str(exc)) from exc def require(self, env=None, installer=None): if self.extras and not self.dist: @@ -2536,15 +2564,6 @@ def parse_map(cls, data, dist=None): return maps -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urllib.parse.urlparse(location) - if parsed[-1].startswith('md5='): - return urllib.parse.urlunparse(parsed[:-1] + ('',)) - return location - - def _version_from_file(lines): """ Given an iterable of lines from a Metadata file, return @@ -2601,7 +2620,7 @@ def hashcmp(self): self.parsed_version, self.precedence, self.key, - _remove_md5_fragment(self.location), + self.location, self.py_version or '', self.platform or '', ) @@ -2679,14 +2698,14 @@ def _warn_legacy_version(self): def version(self): try: return self._version - except AttributeError: + except AttributeError as e: version = self._get_version() if version is None: path = self._get_metadata_path_for_display(self.PKG_INFO) msg = ( "Missing 'Version:' header and/or {} file at path: {}" ).format(self.PKG_INFO, path) - raise ValueError(msg, self) + raise ValueError(msg, self) from e return version @@ -2739,10 +2758,10 @@ def requires(self, extras=()): for ext in extras: try: deps.extend(dm[safe_extra(ext)]) - except KeyError: + except KeyError as e: raise UnknownExtra( "%s has no such extra feature %r" % (self, ext) - ) + ) from e return deps def _get_metadata_path_for_display(self, name): @@ -2824,10 +2843,6 @@ def __dir__(self): ) ) - if not hasattr(object, '__dir__'): - # python 2.7 not supported - del __dir__ - @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( @@ -2867,7 +2882,8 @@ def get_entry_info(self, group, name): """Return the EntryPoint object for `group`+`name`, or ``None``""" return self.get_entry_map(group).get(name) - def insert_on(self, path, loc=None, replace=False): + # FIXME: 'Distribution.insert_on' is too complex (13) + def insert_on(self, path, loc=None, replace=False): # noqa: C901 """Ensure self.location is on path If replace=False (default): @@ -3037,12 +3053,12 @@ def reqs_for_extra(extra): if not req.marker or req.marker.evaluate({'extra': extra}): yield req - common = frozenset(reqs_for_extra(None)) + common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None))) dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: s_extra = safe_extra(extra.strip()) - dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) + dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common] return dm @@ -3067,40 +3083,23 @@ def issue_warning(*args, **kw): warnings.warn(stacklevel=level + 1, *args, **kw) -class RequirementParseError(ValueError): - def __str__(self): - return ' '.join(self.args) - - def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` + """ + Yield ``Requirement`` objects for each specification in `strs`. `strs` must be a string, or a (possibly-nested) iterable thereof. """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) + return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs)))) - for line in lines: - # Drop comments -- a hash without a space may be in a URL. - if ' #' in line: - line = line[:line.find(' #')] - # If there is a line continuation, drop it, and append the next line. - if line.endswith('\\'): - line = line[:-2].strip() - try: - line += next(lines) - except StopIteration: - return - yield Requirement(line) + +class RequirementParseError(packaging.requirements.InvalidRequirement): + "Compatibility wrapper for InvalidRequirement" class Requirement(packaging.requirements.Requirement): def __init__(self, requirement_string): """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - try: - super(Requirement, self).__init__(requirement_string) - except packaging.requirements.InvalidRequirement as e: - raise RequirementParseError(str(e)) + super(Requirement, self).__init__(requirement_string) self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() @@ -3170,7 +3169,7 @@ def _find_adapter(registry, ob): def ensure_directory(path): """Ensure that the parent directory of `path` exists""" dirname = os.path.dirname(path) - py31compat.makedirs(dirname, exist_ok=True) + os.makedirs(dirname, exist_ok=True) def _bypass_ensure_directory(path): @@ -3248,6 +3247,15 @@ def _initialize(g=globals()): ) +class PkgResourcesDeprecationWarning(Warning): + """ + Base class for warning about deprecations in ``pkg_resources`` + + This class is not derived from ``DeprecationWarning``, and as such is + visible by default. + """ + + @_call_aside def _initialize_master_working_set(): """ @@ -3286,11 +3294,3 @@ def _initialize_master_working_set(): # match order list(map(working_set.add_entry, sys.path)) globals().update(locals()) - -class PkgResourcesDeprecationWarning(Warning): - """ - Base class for warning about deprecations in ``pkg_resources`` - - This class is not derived from ``DeprecationWarning``, and as such is - visible by default. - """ diff --git a/src/pip/_vendor/pkg_resources/py31compat.py b/src/pip/_vendor/pkg_resources/py31compat.py deleted file mode 100644 index a2d3007ceb1..00000000000 --- a/src/pip/_vendor/pkg_resources/py31compat.py +++ /dev/null @@ -1,23 +0,0 @@ -import os -import errno -import sys - -from pip._vendor import six - - -def _makedirs_31(path, exist_ok=False): - try: - os.makedirs(path) - except OSError as exc: - if not exist_ok or exc.errno != errno.EEXIST: - raise - - -# rely on compatibility behavior until mode considerations -# and exists_ok considerations are disentangled. -# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 -needs_makedirs = ( - six.PY2 or - (3, 4) <= sys.version_info < (3, 4, 1) -) -makedirs = _makedirs_31 if needs_makedirs else os.makedirs diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 67452d89fcf..e594211e465 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ rich==12.6.0 pygments==2.13.0 typing_extensions==4.4.0 resolvelib==0.8.1 -setuptools==44.0.0 +setuptools==65.6.3 six==1.16.0 tenacity==8.1.0 tomli==2.0.1 diff --git a/tools/vendoring/patches/pkg_resources.patch b/tools/vendoring/patches/pkg_resources.patch index 6556a860867..39bb2eac253 100644 --- a/tools/vendoring/patches/pkg_resources.patch +++ b/tools/vendoring/patches/pkg_resources.patch @@ -1,22 +1,35 @@ diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py -index a457ff27e..4cd562cf9 100644 +index d59226af9..3b9565893 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py -@@ -77,7 +77,7 @@ except ImportError: - importlib_machinery = None +@@ -77,7 +77,7 @@ + join_continuation, + ) - from . import py31compat -from pkg_resources.extern import appdirs +from pkg_resources.extern import platformdirs from pkg_resources.extern import packaging __import__('pkg_resources.extern.packaging.version') __import__('pkg_resources.extern.packaging.specifiers') -@@ -1310,7 +1310,7 @@ def get_default_cache(): +@@ -1321,7 +1321,7 @@ def get_default_cache(): """ return ( os.environ.get('PYTHON_EGG_CACHE') - or appdirs.user_cache_dir(appname='Python-Eggs') + or platformdirs.user_cache_dir(appname='Python-Eggs') ) - - + + +diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py +index 3f2476a0c..8d5727d35 100644 +--- a/src/pip/_vendor/pkg_resources/__init__.py ++++ b/src/pip/_vendor/pkg_resources/__init__.py +@@ -71,7 +71,7 @@ + except ImportError: + importlib_machinery = None + +-from pkg_resources.extern.jaraco.text import ( ++from pip._internal.utils._jaraco_text import ( + yield_lines, + drop_comment, + join_continuation, From e5c88951a036fced59c77c0212c9d27150da335d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 5 Feb 2023 00:13:32 +0100 Subject: [PATCH 288/730] Do not crash in presence of misformatted hash field in ``direct_url.json``. --- news/11773.bugfix.rst | 1 + src/pip/_internal/models/direct_url.py | 7 ++++++- tests/unit/test_direct_url.py | 7 +++++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 news/11773.bugfix.rst diff --git a/news/11773.bugfix.rst b/news/11773.bugfix.rst new file mode 100644 index 00000000000..077bf061259 --- /dev/null +++ b/news/11773.bugfix.rst @@ -0,0 +1 @@ +Do not crash in presence of misformatted hash field in ``direct_url.json``. diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py index 09b540f916c..c3de70a749c 100644 --- a/src/pip/_internal/models/direct_url.py +++ b/src/pip/_internal/models/direct_url.py @@ -108,7 +108,12 @@ def __init__( if hash is not None: # Auto-populate the hashes key to upgrade to the new format automatically. # We don't back-populate the legacy hash key. - hash_name, hash_value = hash.split("=", 1) + try: + hash_name, hash_value = hash.split("=", 1) + except ValueError: + raise DirectUrlValidationError( + f"invalid archive_info.hash format: {hash!r}" + ) if hashes is None: hashes = {hash_name: hash_value} elif hash_name not in hash: diff --git a/tests/unit/test_direct_url.py b/tests/unit/test_direct_url.py index e1708ae9381..3ca982b5017 100644 --- a/tests/unit/test_direct_url.py +++ b/tests/unit/test_direct_url.py @@ -102,6 +102,13 @@ def test_parsing_validation() -> None: match="more than one of archive_info, dir_info, vcs_info", ): DirectUrl.from_dict({"url": "http://...", "dir_info": {}, "archive_info": {}}) + with pytest.raises( + DirectUrlValidationError, + match="invalid archive_info.hash format", + ): + DirectUrl.from_dict( + {"url": "http://...", "archive_info": {"hash": "sha256:aaa"}} + ) def test_redact_url() -> None: From 85da66d06ba2abe8879fe3a905fa411877b35410 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 5 Feb 2023 11:40:15 +0000 Subject: [PATCH 289/730] Ignore mypy errors in logic from jaraco.text --- setup.cfg | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.cfg b/setup.cfg index 1502abfc86a..ce6662d898d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -42,6 +42,9 @@ disallow_any_generics = True warn_unused_ignores = True no_implicit_optional = True +[mypy-pip._internal.utils._jaraco_text] +ignore_errors = True + [mypy-pip._vendor.*] ignore_errors = True From 62fb64ac9697e36efce3f72193e4bff0a39dbe14 Mon Sep 17 00:00:00 2001 From: Oliver Mannion <125105+tekumara@users.noreply.github.com> Date: Mon, 6 Feb 2023 18:05:31 +1100 Subject: [PATCH 290/730] Ignore PIP_REQUIRE_VIRTUALENV for `pip index` (#11671) Ignore PIP_REQUIRE_VIRTUALENV in `pip index` --- news/11671.feature.rst | 1 + src/pip/_internal/commands/index.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 news/11671.feature.rst diff --git a/news/11671.feature.rst b/news/11671.feature.rst new file mode 100644 index 00000000000..31f81f8da00 --- /dev/null +++ b/news/11671.feature.rst @@ -0,0 +1 @@ +Ignore PIP_REQUIRE_VIRTUALENV for ``pip index`` diff --git a/src/pip/_internal/commands/index.py b/src/pip/_internal/commands/index.py index b4bf0ac06e1..7267effed24 100644 --- a/src/pip/_internal/commands/index.py +++ b/src/pip/_internal/commands/index.py @@ -24,6 +24,7 @@ class IndexCommand(IndexGroupCommand): Inspect information available from package indexes. """ + ignore_require_venv = True usage = """ %prog versions """ From 9abb3c899a3b6e4dad590791f3c8d2421bce66c5 Mon Sep 17 00:00:00 2001 From: Stefano Rivera Date: Sun, 5 Feb 2023 23:06:59 -0800 Subject: [PATCH 291/730] Implement `--break-system-packages` for EXTERNALLY-MANAGED installations (#11780) The PEP 668 expects an override mechanism to ease the transition. This provides an override. --------- Co-authored-by: Pradyun Gedam --- news/11780.feature.rst | 2 ++ src/pip/_internal/cli/cmdoptions.py | 8 ++++++++ src/pip/_internal/commands/install.py | 6 +++++- src/pip/_internal/commands/uninstall.py | 4 +++- src/pip/_internal/exceptions.py | 4 +++- tests/functional/test_pep668.py | 16 ++++++++++++++++ 6 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 news/11780.feature.rst diff --git a/news/11780.feature.rst b/news/11780.feature.rst new file mode 100644 index 00000000000..b765de6c59a --- /dev/null +++ b/news/11780.feature.rst @@ -0,0 +1,2 @@ +Implement ``--break-system-packages`` to permit installing packages into +``EXTERNALLY-MANAGED`` Python installations. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 661c489c73e..1f804097e86 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -164,6 +164,14 @@ class PipOption(Option): ), ) +override_externally_managed: Callable[..., Option] = partial( + Option, + "--break-system-packages", + dest="override_externally_managed", + action="store_true", + help="Allow pip to modify an EXTERNALLY-MANAGED Python installation", +) + python: Callable[..., Option] = partial( Option, "--python", diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index cecaac2bc5b..b20aeddf835 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -215,6 +215,7 @@ def add_options(self) -> None: self.cmd_opts.add_option(cmdoptions.use_pep517()) self.cmd_opts.add_option(cmdoptions.no_use_pep517()) self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) self.cmd_opts.add_option(cmdoptions.config_settings()) self.cmd_opts.add_option(cmdoptions.install_options()) @@ -296,7 +297,10 @@ def run(self, options: Values, args: List[str]) -> int: and options.target_dir is None and options.prefix_path is None ) - if installing_into_current_environment: + if ( + installing_into_current_environment + and not options.override_externally_managed + ): check_externally_managed() upgrade_strategy = "to-satisfy-only" diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py index e5a4c8e10d4..f198fc313ff 100644 --- a/src/pip/_internal/commands/uninstall.py +++ b/src/pip/_internal/commands/uninstall.py @@ -58,6 +58,7 @@ def add_options(self) -> None: help="Don't ask for confirmation of uninstall deletions.", ) self.cmd_opts.add_option(cmdoptions.root_user_action()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options: Values, args: List[str]) -> int: @@ -93,7 +94,8 @@ def run(self, options: Values, args: List[str]) -> int: f'"pip help {self.name}")' ) - check_externally_managed() + if not options.override_externally_managed: + check_externally_managed() protect_pip_from_modification_on_windows( modifying_pip="pip" in reqs_to_uninstall diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index d28713ff79f..d4527295da3 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -696,7 +696,9 @@ def __init__(self, error: Optional[str]) -> None: context=context, note_stmt=( "If you believe this is a mistake, please contact your " - "Python installation or OS distribution provider." + "Python installation or OS distribution provider. " + "You can override this, at the risk of breaking your Python " + "installation or OS, by passing --break-system-packages." ), hint_stmt=Text("See PEP 668 for the detailed specification."), ) diff --git a/tests/functional/test_pep668.py b/tests/functional/test_pep668.py index 1fed85e708e..3c1085668fc 100644 --- a/tests/functional/test_pep668.py +++ b/tests/functional/test_pep668.py @@ -42,6 +42,22 @@ def test_fails(script: PipTestEnvironment, arguments: List[str]) -> None: assert "I am externally managed" in result.stderr +@pytest.mark.parametrize( + "arguments", + [ + pytest.param(["install"], id="install"), + pytest.param(["install", "--dry-run"], id="install-dry-run"), + pytest.param(["uninstall", "-y"], id="uninstall"), + ], +) +@pytest.mark.usefixtures("patch_check_externally_managed") +def test_succeeds_when_overridden( + script: PipTestEnvironment, arguments: List[str] +) -> None: + result = script.pip(*arguments, "pip", "--break-system-packages") + assert "I am externally managed" not in result.stderr + + @pytest.mark.parametrize( "arguments", [ From a2b0eb683821ec4f16f953de76aef58378767d05 Mon Sep 17 00:00:00 2001 From: Phil Elson Date: Fri, 3 Feb 2023 12:00:14 +0100 Subject: [PATCH 292/730] Document the limitations of the pip install --prefix argument, and cross-reference the --python flag, which can be harder to find due to it being a pip level argument --- src/pip/_internal/commands/install.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index cecaac2bc5b..739de020dda 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -156,7 +156,12 @@ def add_options(self) -> None: default=None, help=( "Installation prefix where lib, bin and other top-level " - "folders are placed" + "folders are placed. Note that the resulting installation may " + "contain scripts and other resources which reference the " + "Python interpreter of pip, and not that of ``--prefix``. " + "See also the ``--python`` option if the intention is to " + "install packages into another (possibly pip-free) " + "environment." ), ) From 7ff4da6e8f835867e01e36166cef4fe8a2be8b4b Mon Sep 17 00:00:00 2001 From: Daniele Nicolodi Date: Mon, 6 Feb 2023 12:27:44 +0100 Subject: [PATCH 293/730] Reconcile computation of isolated build environment paths (#11740) Use the same code to determine isolated environment paths at dependency install time and at environment setup time. We do not care about the exact paths but the paths needs to be consistent at package installation time and environment setup. This should fix issues observed on platforms that customize the installation schemes, such as Debian and Homebrew, where dependency installation and isolated build environment setup resolved to different paths. --- news/11740.bugfix.rst | 3 + src/pip/_internal/build_env.py | 18 +++--- src/pip/_internal/locations/__init__.py | 68 +---------------------- src/pip/_internal/locations/_distutils.py | 9 +-- src/pip/_internal/locations/_sysconfig.py | 12 ---- 5 files changed, 14 insertions(+), 96 deletions(-) create mode 100644 news/11740.bugfix.rst diff --git a/news/11740.bugfix.rst b/news/11740.bugfix.rst new file mode 100644 index 00000000000..917beb5354f --- /dev/null +++ b/news/11740.bugfix.rst @@ -0,0 +1,3 @@ +Improve handling of isolated build environments on platforms that +customize the Python's installation schemes, such as Debian and +Homebrew. diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index 24bfa870b07..4f704a3547d 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -9,7 +9,7 @@ import textwrap from collections import OrderedDict from types import TracebackType -from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type +from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union from pip._vendor.certifi import where from pip._vendor.packaging.requirements import Requirement @@ -17,12 +17,7 @@ from pip import __file__ as pip_location from pip._internal.cli.spinners import open_spinner -from pip._internal.locations import ( - get_isolated_environment_bin_path, - get_isolated_environment_lib_paths, - get_platlib, - get_purelib, -) +from pip._internal.locations import get_platlib, get_purelib, get_scheme from pip._internal.metadata import get_default_environment, get_environment from pip._internal.utils.subprocess import call_subprocess from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds @@ -33,12 +28,17 @@ logger = logging.getLogger(__name__) +def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]: + return (a, b) if a != b else (a,) + + class _Prefix: def __init__(self, path: str) -> None: self.path = path self.setup = False - self.bin_dir = get_isolated_environment_bin_path(path) - self.lib_dirs = get_isolated_environment_lib_paths(path) + scheme = get_scheme("", prefix=path) + self.bin_dir = scheme.scripts + self.lib_dirs = _dedup(scheme.purelib, scheme.platlib) def get_runnable_pip() -> str: diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 496844be142..d54bc63eba3 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -4,7 +4,7 @@ import pathlib import sys import sysconfig -from typing import Any, Dict, Generator, List, Optional, Tuple +from typing import Any, Dict, Generator, Optional, Tuple from pip._internal.models.scheme import SCHEME_KEYS, Scheme from pip._internal.utils.compat import WINDOWS @@ -25,8 +25,6 @@ "USER_CACHE_DIR", "get_bin_prefix", "get_bin_user", - "get_isolated_environment_bin_path", - "get_isolated_environment_lib_paths", "get_major_minor_version", "get_platlib", "get_purelib", @@ -467,67 +465,3 @@ def get_platlib() -> str: if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): _log_context() return old - - -def _deduplicated(v1: str, v2: str) -> List[str]: - """Deduplicate values from a list.""" - if v1 == v2: - return [v1] - return [v1, v2] - - -def _looks_like_apple_library(path: str) -> bool: - """Apple patches sysconfig to *always* look under */Library/Python*.""" - if sys.platform[:6] != "darwin": - return False - return path == f"/Library/Python/{get_major_minor_version()}/site-packages" - - -def get_isolated_environment_lib_paths(prefix: str) -> List[str]: - """Return the lib locations under ``prefix``.""" - new_pure, new_plat = _sysconfig.get_isolated_environment_lib_paths(prefix) - if _USE_SYSCONFIG: - return _deduplicated(new_pure, new_plat) - - old_pure, old_plat = _distutils.get_isolated_environment_lib_paths(prefix) - old_lib_paths = _deduplicated(old_pure, old_plat) - - # Apple's Python (shipped with Xcode and Command Line Tools) hard-code - # platlib and purelib to '/Library/Python/X.Y/site-packages'. This will - # cause serious build isolation bugs when Apple starts shipping 3.10 because - # pip will install build backends to the wrong location. This tells users - # who is at fault so Apple may notice it and fix the issue in time. - if all(_looks_like_apple_library(p) for p in old_lib_paths): - deprecated( - reason=( - "Python distributed by Apple's Command Line Tools incorrectly " - "patches sysconfig to always point to '/Library/Python'. This " - "will cause build isolation to operate incorrectly on Python " - "3.10 or later. Please help report this to Apple so they can " - "fix this. https://developer.apple.com/bug-reporting/" - ), - replacement=None, - gone_in=None, - ) - return old_lib_paths - - warned = [ - _warn_if_mismatch( - pathlib.Path(old_pure), - pathlib.Path(new_pure), - key="prefixed-purelib", - ), - _warn_if_mismatch( - pathlib.Path(old_plat), - pathlib.Path(new_plat), - key="prefixed-platlib", - ), - ] - if any(warned): - _log_context(prefix=prefix) - - return old_lib_paths - - -def get_isolated_environment_bin_path(prefix: str) -> str: - return _sysconfig.get_isolated_environment_paths(prefix)["scripts"] diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index a6fbcd2f09d..92bd93179c5 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -21,7 +21,7 @@ from distutils.command.install import SCHEME_KEYS from distutils.command.install import install as distutils_install_command from distutils.sysconfig import get_python_lib -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Union, cast from pip._internal.models.scheme import Scheme from pip._internal.utils.compat import WINDOWS @@ -171,10 +171,3 @@ def get_purelib() -> str: def get_platlib() -> str: return get_python_lib(plat_specific=True) - - -def get_isolated_environment_lib_paths(prefix: str) -> Tuple[str, str]: - return ( - get_python_lib(plat_specific=False, prefix=prefix), - get_python_lib(plat_specific=True, prefix=prefix), - ) diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 38e400f94d4..97aef1f1ac2 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -211,15 +211,3 @@ def get_purelib() -> str: def get_platlib() -> str: return sysconfig.get_paths()["platlib"] - - -def get_isolated_environment_paths(prefix: str) -> typing.Dict[str, str]: - variables = {"base": prefix, "platbase": prefix} - if "venv" in sysconfig.get_scheme_names(): - return sysconfig.get_paths(vars=variables, scheme="venv") - return sysconfig.get_paths(vars=variables) - - -def get_isolated_environment_lib_paths(prefix: str) -> typing.Tuple[str, str]: - paths = get_isolated_environment_paths(prefix) - return (paths["purelib"], paths["platlib"]) From a373982a0cae47bb9f5a311b07f13b445cc97f9e Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 6 Feb 2023 17:26:46 +0000 Subject: [PATCH 294/730] Implement package-selection CLI in `nox -s vendoring` This makes it easier to update/not update certain packages. --- noxfile.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index 1345c417d69..5c4683b7d79 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,7 @@ """Automation using nox. """ +import argparse import glob import os import shutil @@ -183,7 +184,13 @@ def lint(session: nox.Session) -> None: def vendoring(session: nox.Session) -> None: session.install("vendoring~=1.2.0") - if "--upgrade" not in session.posargs: + parser = argparse.ArgumentParser(prog="nox -s vendoring") + parser.add_argument("--upgrade-all", action="store_true") + parser.add_argument("--upgrade", action="append", default=[]) + parser.add_argument("--skip", action="append", default=[]) + args = parser.parse_args(session.posargs) + + if not (args.upgrade or args.upgrade_all): session.run("vendoring", "sync", "-v") return @@ -199,7 +206,9 @@ def pinned_requirements(path: Path) -> Iterator[Tuple[str, str]]: vendor_txt = Path("src/pip/_vendor/vendor.txt") for name, old_version in pinned_requirements(vendor_txt): - if name == "setuptools": + if name in args.skip: + continue + if args.upgrade and name not in args.upgrade: continue # update requirements.txt From 6245fedc9d2bc6a5a6b332080ad34ad8f2f87e23 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 6 Feb 2023 17:19:17 +0000 Subject: [PATCH 295/730] Make resolvelib's provider capable of handling empty iterators This is _technically_ possible with the API, and accounting for that enables the resolver to evolve with this information. --- .../resolution/resolvelib/provider.py | 34 ++++++++++++++----- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index 6300dfc57f0..64e6356f10a 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -124,14 +124,29 @@ def get_preference( # type: ignore * If equal, prefer if any requirement is "pinned", i.e. contains operator ``===`` or ``==``. * If equal, calculate an approximate "depth" and resolve requirements - closer to the user-specified requirements first. + closer to the user-specified requirements first. If the depth cannot + by determined (eg: due to no matching parents), it is considered + infinite. * Order user-specified requirements by the order they are specified. * If equal, prefers "non-free" requirements, i.e. contains at least one operator, such as ``>=`` or ``<``. * If equal, order alphabetically for consistency (helps debuggability). """ - lookups = (r.get_candidate_lookup() for r, _ in information[identifier]) - candidate, ireqs = zip(*lookups) + try: + next(iter(information[identifier])) + except StopIteration: + # There is no information for this identifier, so there's no known + # candidates. + has_information = False + else: + has_information = True + + if has_information: + lookups = (r.get_candidate_lookup() for r, _ in information[identifier]) + candidate, ireqs = zip(*lookups) + else: + candidate, ireqs = None, () + operators = [ specifier.operator for specifier_set in (ireq.specifier for ireq in ireqs if ireq) @@ -146,11 +161,14 @@ def get_preference( # type: ignore requested_order: Union[int, float] = self._user_requested[identifier] except KeyError: requested_order = math.inf - parent_depths = ( - self._known_depths[parent.name] if parent is not None else 0.0 - for _, parent in information[identifier] - ) - inferred_depth = min(d for d in parent_depths) + 1.0 + if has_information: + parent_depths = ( + self._known_depths[parent.name] if parent is not None else 0.0 + for _, parent in information[identifier] + ) + inferred_depth = min(d for d in parent_depths) + 1.0 + else: + inferred_depth = math.inf else: inferred_depth = 1.0 self._known_depths[identifier] = inferred_depth From 8dbc2db274b0724d7b86eb1c2fbac6b0b4a4c4f6 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 6 Feb 2023 17:29:08 +0000 Subject: [PATCH 296/730] Upgrade resolvelib to 0.9.0 --- news/resolvelib.vendor.rst | 1 + src/pip/_vendor/resolvelib/__init__.py | 2 +- .../resolvelib/compat/collections_abc.pyi | 1 + src/pip/_vendor/resolvelib/providers.py | 14 +++--- src/pip/_vendor/resolvelib/providers.pyi | 4 +- src/pip/_vendor/resolvelib/reporters.py | 2 +- src/pip/_vendor/resolvelib/reporters.pyi | 2 +- src/pip/_vendor/resolvelib/resolvers.py | 44 ++++++++++++++++++- src/pip/_vendor/resolvelib/resolvers.pyi | 12 +++++ src/pip/_vendor/resolvelib/structs.py | 11 +++-- src/pip/_vendor/resolvelib/structs.pyi | 2 +- src/pip/_vendor/vendor.txt | 2 +- 12 files changed, 78 insertions(+), 19 deletions(-) create mode 100644 news/resolvelib.vendor.rst create mode 100644 src/pip/_vendor/resolvelib/compat/collections_abc.pyi diff --git a/news/resolvelib.vendor.rst b/news/resolvelib.vendor.rst new file mode 100644 index 00000000000..c8b5c928d19 --- /dev/null +++ b/news/resolvelib.vendor.rst @@ -0,0 +1 @@ +Upgrade resolvelib to 0.9.0 diff --git a/src/pip/_vendor/resolvelib/__init__.py b/src/pip/_vendor/resolvelib/__init__.py index ce05fd30274..fa6995e32aa 100644 --- a/src/pip/_vendor/resolvelib/__init__.py +++ b/src/pip/_vendor/resolvelib/__init__.py @@ -11,7 +11,7 @@ "ResolutionTooDeep", ] -__version__ = "0.8.1" +__version__ = "0.9.0" from .providers import AbstractProvider, AbstractResolver diff --git a/src/pip/_vendor/resolvelib/compat/collections_abc.pyi b/src/pip/_vendor/resolvelib/compat/collections_abc.pyi new file mode 100644 index 00000000000..2a088b19a93 --- /dev/null +++ b/src/pip/_vendor/resolvelib/compat/collections_abc.pyi @@ -0,0 +1 @@ +from collections.abc import Mapping, Sequence diff --git a/src/pip/_vendor/resolvelib/providers.py b/src/pip/_vendor/resolvelib/providers.py index 7d0a9c22a46..e99d87ee75f 100644 --- a/src/pip/_vendor/resolvelib/providers.py +++ b/src/pip/_vendor/resolvelib/providers.py @@ -1,5 +1,5 @@ class AbstractProvider(object): - """Delegate class to provide requirement interface for the resolver.""" + """Delegate class to provide the required interface for the resolver.""" def identify(self, requirement_or_candidate): """Given a requirement, return an identifier for it. @@ -24,9 +24,9 @@ def get_preference( this group of arguments is. :param identifier: An identifier as returned by ``identify()``. This - identifies the dependency matches of which should be returned. + identifies the dependency matches which should be returned. :param resolutions: Mapping of candidates currently pinned by the - resolver. Each key is an identifier, and the value a candidate. + resolver. Each key is an identifier, and the value is a candidate. The candidate may conflict with requirements from ``information``. :param candidates: Mapping of each dependency's possible candidates. Each value is an iterator of candidates. @@ -39,10 +39,10 @@ def get_preference( * ``requirement`` specifies a requirement contributing to the current list of candidates. - * ``parent`` specifies the candidate that provides (dependend on) the + * ``parent`` specifies the candidate that provides (depended on) the requirement, or ``None`` to indicate a root requirement. - The preference could depend on a various of issues, including (not + The preference could depend on various issues, including (not necessarily in this order): * Is this package pinned in the current resolution result? @@ -61,7 +61,7 @@ def get_preference( raise NotImplementedError def find_matches(self, identifier, requirements, incompatibilities): - """Find all possible candidates that satisfy given constraints. + """Find all possible candidates that satisfy the given constraints. :param identifier: An identifier as returned by ``identify()``. This identifies the dependency matches of which should be returned. @@ -92,7 +92,7 @@ def find_matches(self, identifier, requirements, incompatibilities): def is_satisfied_by(self, requirement, candidate): """Whether the given requirement can be satisfied by a candidate. - The candidate is guarenteed to have been generated from the + The candidate is guaranteed to have been generated from the requirement. A boolean should be returned to indicate whether ``candidate`` is a diff --git a/src/pip/_vendor/resolvelib/providers.pyi b/src/pip/_vendor/resolvelib/providers.pyi index 47d6f8abad7..ec054194ee3 100644 --- a/src/pip/_vendor/resolvelib/providers.pyi +++ b/src/pip/_vendor/resolvelib/providers.pyi @@ -1,12 +1,11 @@ from typing import ( Any, - Collection, Generic, Iterable, Iterator, Mapping, - Optional, Protocol, + Sequence, Union, ) @@ -25,6 +24,7 @@ class AbstractProvider(Generic[RT, CT, KT]): resolutions: Mapping[KT, CT], candidates: Mapping[KT, Iterator[CT]], information: Mapping[KT, Iterator[RequirementInformation[RT, CT]]], + backtrack_causes: Sequence[RequirementInformation[RT, CT]], ) -> Preference: ... def find_matches( self, diff --git a/src/pip/_vendor/resolvelib/reporters.py b/src/pip/_vendor/resolvelib/reporters.py index 6695480fff4..688b5e10d86 100644 --- a/src/pip/_vendor/resolvelib/reporters.py +++ b/src/pip/_vendor/resolvelib/reporters.py @@ -36,7 +36,7 @@ def resolving_conflicts(self, causes): :param causes: The information on the collision that caused the backtracking. """ - def backtracking(self, candidate): + def rejecting_candidate(self, criterion, candidate): """Called when rejecting a candidate during backtracking.""" def pinning(self, candidate): diff --git a/src/pip/_vendor/resolvelib/reporters.pyi b/src/pip/_vendor/resolvelib/reporters.pyi index 03d4f09a390..b2ad286ba06 100644 --- a/src/pip/_vendor/resolvelib/reporters.pyi +++ b/src/pip/_vendor/resolvelib/reporters.pyi @@ -6,6 +6,6 @@ class BaseReporter: def ending_round(self, index: int, state: Any) -> Any: ... def ending(self, state: Any) -> Any: ... def adding_requirement(self, requirement: Any, parent: Any) -> Any: ... - def backtracking(self, candidate: Any) -> Any: ... + def rejecting_candidate(self, criterion: Any, candidate: Any) -> Any: ... def resolving_conflicts(self, causes: Any) -> Any: ... def pinning(self, candidate: Any) -> Any: ... diff --git a/src/pip/_vendor/resolvelib/resolvers.py b/src/pip/_vendor/resolvelib/resolvers.py index 787681b03e9..49e30c7f5c4 100644 --- a/src/pip/_vendor/resolvelib/resolvers.py +++ b/src/pip/_vendor/resolvelib/resolvers.py @@ -173,6 +173,31 @@ def _add_to_criteria(self, criteria, requirement, parent): raise RequirementsConflicted(criterion) criteria[identifier] = criterion + def _remove_information_from_criteria(self, criteria, parents): + """Remove information from parents of criteria. + + Concretely, removes all values from each criterion's ``information`` + field that have one of ``parents`` as provider of the requirement. + + :param criteria: The criteria to update. + :param parents: Identifiers for which to remove information from all criteria. + """ + if not parents: + return + for key, criterion in criteria.items(): + criteria[key] = Criterion( + criterion.candidates, + [ + information + for information in criterion.information + if ( + information[1] is None + or self._p.identify(information[1]) not in parents + ) + ], + criterion.incompatibilities, + ) + def _get_preference(self, name): return self._p.get_preference( identifier=name, @@ -212,6 +237,7 @@ def _attempt_to_pin_criterion(self, name): try: criteria = self._get_updated_criteria(candidate) except RequirementsConflicted as e: + self._r.rejecting_candidate(e.criterion, candidate) causes.append(e.criterion) continue @@ -281,8 +307,6 @@ def _backtrack(self): # Also mark the newly known incompatibility. incompatibilities_from_broken.append((name, [candidate])) - self._r.backtracking(candidate=candidate) - # Create a new state from the last known-to-work one, and apply # the previously gathered incompatibility information. def _patch_criteria(): @@ -368,6 +392,11 @@ def resolve(self, requirements, max_rounds): self._r.ending(state=self.state) return self.state + # keep track of satisfied names to calculate diff after pinning + satisfied_names = set(self.state.criteria.keys()) - set( + unsatisfied_names + ) + # Choose the most preferred unpinned criterion to try. name = min(unsatisfied_names, key=self._get_preference) failure_causes = self._attempt_to_pin_criterion(name) @@ -384,6 +413,17 @@ def resolve(self, requirements, max_rounds): if not success: raise ResolutionImpossible(self.state.backtrack_causes) else: + # discard as information sources any invalidated names + # (unsatisfied names that were previously satisfied) + newly_unsatisfied_names = { + key + for key, criterion in self.state.criteria.items() + if key in satisfied_names + and not self._is_current_pin_satisfying(key, criterion) + } + self._remove_information_from_criteria( + self.state.criteria, newly_unsatisfied_names + ) # Pinning was successful. Push a new state to do another pin. self._push_new_state() diff --git a/src/pip/_vendor/resolvelib/resolvers.pyi b/src/pip/_vendor/resolvelib/resolvers.pyi index 0eb5b2162c1..528a1a259af 100644 --- a/src/pip/_vendor/resolvelib/resolvers.pyi +++ b/src/pip/_vendor/resolvelib/resolvers.pyi @@ -55,6 +55,18 @@ class ResolutionImpossible(ResolutionError, Generic[RT, CT]): class ResolutionTooDeep(ResolutionError): round_count: int +# This should be a NamedTuple, but Python 3.6 has a bug that prevents it. +# https://stackoverflow.com/a/50531189/1376863 +class State(tuple, Generic[RT, CT, KT]): + mapping: Mapping[KT, CT] + criteria: Mapping[KT, Criterion[RT, CT, KT]] + backtrack_causes: Collection[RequirementInformation[RT, CT]] + +class Resolution(Generic[RT, CT, KT]): + def resolve( + self, requirements: Iterable[RT], max_rounds: int + ) -> State[RT, CT, KT]: ... + class Result(Generic[RT, CT, KT]): mapping: Mapping[KT, CT] graph: DirectedGraph[Optional[KT]] diff --git a/src/pip/_vendor/resolvelib/structs.py b/src/pip/_vendor/resolvelib/structs.py index 93d1568bd4d..359a34f6018 100644 --- a/src/pip/_vendor/resolvelib/structs.py +++ b/src/pip/_vendor/resolvelib/structs.py @@ -117,13 +117,14 @@ class _FactoryIterableView(object): def __init__(self, factory): self._factory = factory + self._iterable = None def __repr__(self): - return "{}({})".format(type(self).__name__, list(self._factory())) + return "{}({})".format(type(self).__name__, list(self)) def __bool__(self): try: - next(self._factory()) + next(iter(self)) except StopIteration: return False return True @@ -131,7 +132,11 @@ def __bool__(self): __nonzero__ = __bool__ # XXX: Python 2. def __iter__(self): - return self._factory() + iterable = ( + self._factory() if self._iterable is None else self._iterable + ) + self._iterable, current = itertools.tee(iterable) + return current class _SequenceIterableView(object): diff --git a/src/pip/_vendor/resolvelib/structs.pyi b/src/pip/_vendor/resolvelib/structs.pyi index fae2a2fcefc..0ac59f0f00a 100644 --- a/src/pip/_vendor/resolvelib/structs.pyi +++ b/src/pip/_vendor/resolvelib/structs.pyi @@ -16,7 +16,7 @@ RT = TypeVar("RT") # Requirement. CT = TypeVar("CT") # Candidate. _T = TypeVar("_T") -Matches = Union[Iterable[CT], Callable[[], Iterator[CT]]] +Matches = Union[Iterable[CT], Callable[[], Iterable[CT]]] class IteratorMapping(Mapping[KT, _T], metaclass=ABCMeta): pass diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 67452d89fcf..703daf196a6 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -15,7 +15,7 @@ requests==2.28.2 rich==12.6.0 pygments==2.13.0 typing_extensions==4.4.0 -resolvelib==0.8.1 +resolvelib==0.9.0 setuptools==44.0.0 six==1.16.0 tenacity==8.1.0 From 88cccfc142c32b2a957cd90ea1a2535c7102528a Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 6 Feb 2023 17:32:54 +0000 Subject: [PATCH 297/730] Handle `backtracking` -> `rejecting_candidate` change This is a newer method on resolvelib's end for reporting when a candidate is rejected. --- .../resolution/resolvelib/reporter.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py index 6ced5329b81..a95a8e4cf24 100644 --- a/src/pip/_internal/resolution/resolvelib/reporter.py +++ b/src/pip/_internal/resolution/resolvelib/reporter.py @@ -11,9 +11,9 @@ class PipReporter(BaseReporter): def __init__(self) -> None: - self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int) + self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int) - self._messages_at_backtrack = { + self._messages_at_reject_count = { 1: ( "pip is looking at multiple versions of {package_name} to " "determine which version is compatible with other " @@ -32,14 +32,14 @@ def __init__(self) -> None: ), } - def backtracking(self, candidate: Candidate) -> None: - self.backtracks_by_package[candidate.name] += 1 + def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None: + self.reject_count_by_package[candidate.name] += 1 - count = self.backtracks_by_package[candidate.name] - if count not in self._messages_at_backtrack: + count = self.reject_count_by_package[candidate.name] + if count not in self._messages_at_reject_count: return - message = self._messages_at_backtrack[count] + message = self._messages_at_reject_count[count] logger.info("INFO: %s", message.format(package_name=candidate.name)) @@ -61,8 +61,8 @@ def ending(self, state: Any) -> None: def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None: logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent) - def backtracking(self, candidate: Candidate) -> None: - logger.info("Reporter.backtracking(%r)", candidate) + def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None: + logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate) def pinning(self, candidate: Candidate) -> None: logger.info("Reporter.pinning(%r)", candidate) From 4f455ae0b0a6159c25cfcce691d9078bc3c0b454 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 6 Feb 2023 17:45:40 +0000 Subject: [PATCH 298/730] Drop an unused `type: ignore` comment --- src/pip/_internal/resolution/resolvelib/provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index 64e6356f10a..b08cce7f333 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -104,7 +104,7 @@ def __init__( def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str: return requirement_or_candidate.name - def get_preference( # type: ignore + def get_preference( self, identifier: str, resolutions: Mapping[str, Candidate], From 8844795f454e9997639ad5a5b893fc95762d1734 Mon Sep 17 00:00:00 2001 From: Tobias Hermann Date: Tue, 7 Feb 2023 08:03:08 +0100 Subject: [PATCH 299/730] Fix minor typo in features list of version 23.0 (#11785) --- NEWS.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEWS.rst b/NEWS.rst index 2e24fab37ad..19f7594775f 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -22,7 +22,7 @@ Features modify the externally managed environment. (`#11381 `_) - Enable the use of ``keyring`` found on ``PATH``. This allows ``keyring`` installed using ``pipx`` to be used by ``pip``. (`#11589 `_) -- The inspect and installation report formats are now declared stabled, and their version +- The inspect and installation report formats are now declared stable, and their version has been bumped from ``0`` to ``1``. (`#11757 `_) Bug Fixes From f12a2ef2a216315e1e65a844f855f15600e13cfd Mon Sep 17 00:00:00 2001 From: Phil Elson Date: Thu, 9 Feb 2023 13:17:07 +0100 Subject: [PATCH 300/730] Include a news item for the documentation change --- news/11775.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11775.doc.rst diff --git a/news/11775.doc.rst b/news/11775.doc.rst new file mode 100644 index 00000000000..d4eff89c000 --- /dev/null +++ b/news/11775.doc.rst @@ -0,0 +1 @@ +Cross-reference the --python flag in the docs for the --prefix flag, and mention the --prefix limitations with regards to installed console scripts. From e399d7d4ede3f5fad6a3f58b97dbae532daf5e19 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 9 Feb 2023 20:21:11 +0800 Subject: [PATCH 301/730] Doc formatting --- news/11775.doc.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/news/11775.doc.rst b/news/11775.doc.rst index d4eff89c000..18274b7692a 100644 --- a/news/11775.doc.rst +++ b/news/11775.doc.rst @@ -1 +1,2 @@ -Cross-reference the --python flag in the docs for the --prefix flag, and mention the --prefix limitations with regards to installed console scripts. +Cross-reference the ``--python`` flag from the ``--prefix`` flag, +and mention limitations of ``--prefix`` regarding script installation. From 0138bd54c6d346fc2b14e0a9554a1b636fe17001 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 5 Feb 2023 09:50:13 +0000 Subject: [PATCH 302/730] Merge pull request #11779 from sbidoul/fix-direct_url-invalid-hash-sbi Do not crash in presence of misformatted hash field in ``direct_url.json`` --- news/11773.bugfix.rst | 1 + src/pip/_internal/models/direct_url.py | 7 ++++++- tests/unit/test_direct_url.py | 7 +++++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 news/11773.bugfix.rst diff --git a/news/11773.bugfix.rst b/news/11773.bugfix.rst new file mode 100644 index 00000000000..077bf061259 --- /dev/null +++ b/news/11773.bugfix.rst @@ -0,0 +1 @@ +Do not crash in presence of misformatted hash field in ``direct_url.json``. diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py index 09b540f916c..c3de70a749c 100644 --- a/src/pip/_internal/models/direct_url.py +++ b/src/pip/_internal/models/direct_url.py @@ -108,7 +108,12 @@ def __init__( if hash is not None: # Auto-populate the hashes key to upgrade to the new format automatically. # We don't back-populate the legacy hash key. - hash_name, hash_value = hash.split("=", 1) + try: + hash_name, hash_value = hash.split("=", 1) + except ValueError: + raise DirectUrlValidationError( + f"invalid archive_info.hash format: {hash!r}" + ) if hashes is None: hashes = {hash_name: hash_value} elif hash_name not in hash: diff --git a/tests/unit/test_direct_url.py b/tests/unit/test_direct_url.py index e1708ae9381..3ca982b5017 100644 --- a/tests/unit/test_direct_url.py +++ b/tests/unit/test_direct_url.py @@ -102,6 +102,13 @@ def test_parsing_validation() -> None: match="more than one of archive_info, dir_info, vcs_info", ): DirectUrl.from_dict({"url": "http://...", "dir_info": {}, "archive_info": {}}) + with pytest.raises( + DirectUrlValidationError, + match="invalid archive_info.hash format", + ): + DirectUrl.from_dict( + {"url": "http://...", "archive_info": {"hash": "sha256:aaa"}} + ) def test_redact_url() -> None: From 864fd7764b97ffac8c08946caccc2286bee36ed1 Mon Sep 17 00:00:00 2001 From: Oliver Mannion <125105+tekumara@users.noreply.github.com> Date: Mon, 6 Feb 2023 18:05:31 +1100 Subject: [PATCH 303/730] Ignore PIP_REQUIRE_VIRTUALENV for `pip index` (#11671) Ignore PIP_REQUIRE_VIRTUALENV in `pip index` --- news/11671.feature.rst | 1 + src/pip/_internal/commands/index.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 news/11671.feature.rst diff --git a/news/11671.feature.rst b/news/11671.feature.rst new file mode 100644 index 00000000000..31f81f8da00 --- /dev/null +++ b/news/11671.feature.rst @@ -0,0 +1 @@ +Ignore PIP_REQUIRE_VIRTUALENV for ``pip index`` diff --git a/src/pip/_internal/commands/index.py b/src/pip/_internal/commands/index.py index b4bf0ac06e1..7267effed24 100644 --- a/src/pip/_internal/commands/index.py +++ b/src/pip/_internal/commands/index.py @@ -24,6 +24,7 @@ class IndexCommand(IndexGroupCommand): Inspect information available from package indexes. """ + ignore_require_venv = True usage = """ %prog versions """ From e6deb9b87c18cdd27a9ba27cb7e0670ffb81d45e Mon Sep 17 00:00:00 2001 From: Stefano Rivera Date: Sun, 5 Feb 2023 23:06:59 -0800 Subject: [PATCH 304/730] Implement `--break-system-packages` for EXTERNALLY-MANAGED installations (#11780) The PEP 668 expects an override mechanism to ease the transition. This provides an override. --------- Co-authored-by: Pradyun Gedam --- news/11780.feature.rst | 2 ++ src/pip/_internal/cli/cmdoptions.py | 8 ++++++++ src/pip/_internal/commands/install.py | 6 +++++- src/pip/_internal/commands/uninstall.py | 4 +++- src/pip/_internal/exceptions.py | 4 +++- tests/functional/test_pep668.py | 16 ++++++++++++++++ 6 files changed, 37 insertions(+), 3 deletions(-) create mode 100644 news/11780.feature.rst diff --git a/news/11780.feature.rst b/news/11780.feature.rst new file mode 100644 index 00000000000..b765de6c59a --- /dev/null +++ b/news/11780.feature.rst @@ -0,0 +1,2 @@ +Implement ``--break-system-packages`` to permit installing packages into +``EXTERNALLY-MANAGED`` Python installations. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 661c489c73e..1f804097e86 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -164,6 +164,14 @@ class PipOption(Option): ), ) +override_externally_managed: Callable[..., Option] = partial( + Option, + "--break-system-packages", + dest="override_externally_managed", + action="store_true", + help="Allow pip to modify an EXTERNALLY-MANAGED Python installation", +) + python: Callable[..., Option] = partial( Option, "--python", diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index cecaac2bc5b..b20aeddf835 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -215,6 +215,7 @@ def add_options(self) -> None: self.cmd_opts.add_option(cmdoptions.use_pep517()) self.cmd_opts.add_option(cmdoptions.no_use_pep517()) self.cmd_opts.add_option(cmdoptions.check_build_deps()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) self.cmd_opts.add_option(cmdoptions.config_settings()) self.cmd_opts.add_option(cmdoptions.install_options()) @@ -296,7 +297,10 @@ def run(self, options: Values, args: List[str]) -> int: and options.target_dir is None and options.prefix_path is None ) - if installing_into_current_environment: + if ( + installing_into_current_environment + and not options.override_externally_managed + ): check_externally_managed() upgrade_strategy = "to-satisfy-only" diff --git a/src/pip/_internal/commands/uninstall.py b/src/pip/_internal/commands/uninstall.py index e5a4c8e10d4..f198fc313ff 100644 --- a/src/pip/_internal/commands/uninstall.py +++ b/src/pip/_internal/commands/uninstall.py @@ -58,6 +58,7 @@ def add_options(self) -> None: help="Don't ask for confirmation of uninstall deletions.", ) self.cmd_opts.add_option(cmdoptions.root_user_action()) + self.cmd_opts.add_option(cmdoptions.override_externally_managed()) self.parser.insert_option_group(0, self.cmd_opts) def run(self, options: Values, args: List[str]) -> int: @@ -93,7 +94,8 @@ def run(self, options: Values, args: List[str]) -> int: f'"pip help {self.name}")' ) - check_externally_managed() + if not options.override_externally_managed: + check_externally_managed() protect_pip_from_modification_on_windows( modifying_pip="pip" in reqs_to_uninstall diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index d28713ff79f..d4527295da3 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -696,7 +696,9 @@ def __init__(self, error: Optional[str]) -> None: context=context, note_stmt=( "If you believe this is a mistake, please contact your " - "Python installation or OS distribution provider." + "Python installation or OS distribution provider. " + "You can override this, at the risk of breaking your Python " + "installation or OS, by passing --break-system-packages." ), hint_stmt=Text("See PEP 668 for the detailed specification."), ) diff --git a/tests/functional/test_pep668.py b/tests/functional/test_pep668.py index 1fed85e708e..3c1085668fc 100644 --- a/tests/functional/test_pep668.py +++ b/tests/functional/test_pep668.py @@ -42,6 +42,22 @@ def test_fails(script: PipTestEnvironment, arguments: List[str]) -> None: assert "I am externally managed" in result.stderr +@pytest.mark.parametrize( + "arguments", + [ + pytest.param(["install"], id="install"), + pytest.param(["install", "--dry-run"], id="install-dry-run"), + pytest.param(["uninstall", "-y"], id="uninstall"), + ], +) +@pytest.mark.usefixtures("patch_check_externally_managed") +def test_succeeds_when_overridden( + script: PipTestEnvironment, arguments: List[str] +) -> None: + result = script.pip(*arguments, "pip", "--break-system-packages") + assert "I am externally managed" not in result.stderr + + @pytest.mark.parametrize( "arguments", [ From 9a0d9301c24dc5268ce2640096c301ff7190dd8d Mon Sep 17 00:00:00 2001 From: Daniele Nicolodi Date: Mon, 6 Feb 2023 12:27:44 +0100 Subject: [PATCH 305/730] Reconcile computation of isolated build environment paths (#11740) Use the same code to determine isolated environment paths at dependency install time and at environment setup time. We do not care about the exact paths but the paths needs to be consistent at package installation time and environment setup. This should fix issues observed on platforms that customize the installation schemes, such as Debian and Homebrew, where dependency installation and isolated build environment setup resolved to different paths. --- news/11740.bugfix.rst | 3 + src/pip/_internal/build_env.py | 18 +++--- src/pip/_internal/locations/__init__.py | 68 +---------------------- src/pip/_internal/locations/_distutils.py | 9 +-- src/pip/_internal/locations/_sysconfig.py | 12 ---- 5 files changed, 14 insertions(+), 96 deletions(-) create mode 100644 news/11740.bugfix.rst diff --git a/news/11740.bugfix.rst b/news/11740.bugfix.rst new file mode 100644 index 00000000000..917beb5354f --- /dev/null +++ b/news/11740.bugfix.rst @@ -0,0 +1,3 @@ +Improve handling of isolated build environments on platforms that +customize the Python's installation schemes, such as Debian and +Homebrew. diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index 24bfa870b07..4f704a3547d 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -9,7 +9,7 @@ import textwrap from collections import OrderedDict from types import TracebackType -from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type +from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union from pip._vendor.certifi import where from pip._vendor.packaging.requirements import Requirement @@ -17,12 +17,7 @@ from pip import __file__ as pip_location from pip._internal.cli.spinners import open_spinner -from pip._internal.locations import ( - get_isolated_environment_bin_path, - get_isolated_environment_lib_paths, - get_platlib, - get_purelib, -) +from pip._internal.locations import get_platlib, get_purelib, get_scheme from pip._internal.metadata import get_default_environment, get_environment from pip._internal.utils.subprocess import call_subprocess from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds @@ -33,12 +28,17 @@ logger = logging.getLogger(__name__) +def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]: + return (a, b) if a != b else (a,) + + class _Prefix: def __init__(self, path: str) -> None: self.path = path self.setup = False - self.bin_dir = get_isolated_environment_bin_path(path) - self.lib_dirs = get_isolated_environment_lib_paths(path) + scheme = get_scheme("", prefix=path) + self.bin_dir = scheme.scripts + self.lib_dirs = _dedup(scheme.purelib, scheme.platlib) def get_runnable_pip() -> str: diff --git a/src/pip/_internal/locations/__init__.py b/src/pip/_internal/locations/__init__.py index 496844be142..d54bc63eba3 100644 --- a/src/pip/_internal/locations/__init__.py +++ b/src/pip/_internal/locations/__init__.py @@ -4,7 +4,7 @@ import pathlib import sys import sysconfig -from typing import Any, Dict, Generator, List, Optional, Tuple +from typing import Any, Dict, Generator, Optional, Tuple from pip._internal.models.scheme import SCHEME_KEYS, Scheme from pip._internal.utils.compat import WINDOWS @@ -25,8 +25,6 @@ "USER_CACHE_DIR", "get_bin_prefix", "get_bin_user", - "get_isolated_environment_bin_path", - "get_isolated_environment_lib_paths", "get_major_minor_version", "get_platlib", "get_purelib", @@ -467,67 +465,3 @@ def get_platlib() -> str: if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): _log_context() return old - - -def _deduplicated(v1: str, v2: str) -> List[str]: - """Deduplicate values from a list.""" - if v1 == v2: - return [v1] - return [v1, v2] - - -def _looks_like_apple_library(path: str) -> bool: - """Apple patches sysconfig to *always* look under */Library/Python*.""" - if sys.platform[:6] != "darwin": - return False - return path == f"/Library/Python/{get_major_minor_version()}/site-packages" - - -def get_isolated_environment_lib_paths(prefix: str) -> List[str]: - """Return the lib locations under ``prefix``.""" - new_pure, new_plat = _sysconfig.get_isolated_environment_lib_paths(prefix) - if _USE_SYSCONFIG: - return _deduplicated(new_pure, new_plat) - - old_pure, old_plat = _distutils.get_isolated_environment_lib_paths(prefix) - old_lib_paths = _deduplicated(old_pure, old_plat) - - # Apple's Python (shipped with Xcode and Command Line Tools) hard-code - # platlib and purelib to '/Library/Python/X.Y/site-packages'. This will - # cause serious build isolation bugs when Apple starts shipping 3.10 because - # pip will install build backends to the wrong location. This tells users - # who is at fault so Apple may notice it and fix the issue in time. - if all(_looks_like_apple_library(p) for p in old_lib_paths): - deprecated( - reason=( - "Python distributed by Apple's Command Line Tools incorrectly " - "patches sysconfig to always point to '/Library/Python'. This " - "will cause build isolation to operate incorrectly on Python " - "3.10 or later. Please help report this to Apple so they can " - "fix this. https://developer.apple.com/bug-reporting/" - ), - replacement=None, - gone_in=None, - ) - return old_lib_paths - - warned = [ - _warn_if_mismatch( - pathlib.Path(old_pure), - pathlib.Path(new_pure), - key="prefixed-purelib", - ), - _warn_if_mismatch( - pathlib.Path(old_plat), - pathlib.Path(new_plat), - key="prefixed-platlib", - ), - ] - if any(warned): - _log_context(prefix=prefix) - - return old_lib_paths - - -def get_isolated_environment_bin_path(prefix: str) -> str: - return _sysconfig.get_isolated_environment_paths(prefix)["scripts"] diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index a6fbcd2f09d..92bd93179c5 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -21,7 +21,7 @@ from distutils.command.install import SCHEME_KEYS from distutils.command.install import install as distutils_install_command from distutils.sysconfig import get_python_lib -from typing import Dict, List, Optional, Tuple, Union, cast +from typing import Dict, List, Optional, Union, cast from pip._internal.models.scheme import Scheme from pip._internal.utils.compat import WINDOWS @@ -171,10 +171,3 @@ def get_purelib() -> str: def get_platlib() -> str: return get_python_lib(plat_specific=True) - - -def get_isolated_environment_lib_paths(prefix: str) -> Tuple[str, str]: - return ( - get_python_lib(plat_specific=False, prefix=prefix), - get_python_lib(plat_specific=True, prefix=prefix), - ) diff --git a/src/pip/_internal/locations/_sysconfig.py b/src/pip/_internal/locations/_sysconfig.py index 38e400f94d4..97aef1f1ac2 100644 --- a/src/pip/_internal/locations/_sysconfig.py +++ b/src/pip/_internal/locations/_sysconfig.py @@ -211,15 +211,3 @@ def get_purelib() -> str: def get_platlib() -> str: return sysconfig.get_paths()["platlib"] - - -def get_isolated_environment_paths(prefix: str) -> typing.Dict[str, str]: - variables = {"base": prefix, "platbase": prefix} - if "venv" in sysconfig.get_scheme_names(): - return sysconfig.get_paths(vars=variables, scheme="venv") - return sysconfig.get_paths(vars=variables) - - -def get_isolated_environment_lib_paths(prefix: str) -> typing.Tuple[str, str]: - paths = get_isolated_environment_paths(prefix) - return (paths["purelib"], paths["platlib"]) From 3817aef07f4c8a0cb1c43bb9a73f1bb624fc263b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 17 Feb 2023 18:15:15 +0000 Subject: [PATCH 306/730] Bump for release --- NEWS.rst | 19 +++++++++++++++++++ news/11671.feature.rst | 1 - news/11740.bugfix.rst | 3 --- news/11773.bugfix.rst | 1 - news/11780.feature.rst | 2 -- src/pip/__init__.py | 2 +- 6 files changed, 20 insertions(+), 8 deletions(-) delete mode 100644 news/11671.feature.rst delete mode 100644 news/11740.bugfix.rst delete mode 100644 news/11773.bugfix.rst delete mode 100644 news/11780.feature.rst diff --git a/NEWS.rst b/NEWS.rst index 2e24fab37ad..3fb80be60f6 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,25 @@ .. towncrier release notes start +23.0.1 (2023-02-17) +=================== + +Features +-------- + +- Ignore PIP_REQUIRE_VIRTUALENV for ``pip index`` (`#11671 `_) +- Implement ``--break-system-packages`` to permit installing packages into + ``EXTERNALLY-MANAGED`` Python installations. (`#11780 `_) + +Bug Fixes +--------- + +- Improve handling of isolated build environments on platforms that + customize the Python's installation schemes, such as Debian and + Homebrew. (`#11740 `_) +- Do not crash in presence of misformatted hash field in ``direct_url.json``. (`#11773 `_) + + 23.0 (2023-01-30) ================= diff --git a/news/11671.feature.rst b/news/11671.feature.rst deleted file mode 100644 index 31f81f8da00..00000000000 --- a/news/11671.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Ignore PIP_REQUIRE_VIRTUALENV for ``pip index`` diff --git a/news/11740.bugfix.rst b/news/11740.bugfix.rst deleted file mode 100644 index 917beb5354f..00000000000 --- a/news/11740.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improve handling of isolated build environments on platforms that -customize the Python's installation schemes, such as Debian and -Homebrew. diff --git a/news/11773.bugfix.rst b/news/11773.bugfix.rst deleted file mode 100644 index 077bf061259..00000000000 --- a/news/11773.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Do not crash in presence of misformatted hash field in ``direct_url.json``. diff --git a/news/11780.feature.rst b/news/11780.feature.rst deleted file mode 100644 index b765de6c59a..00000000000 --- a/news/11780.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Implement ``--break-system-packages`` to permit installing packages into -``EXTERNALLY-MANAGED`` Python installations. diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 2aead080d0f..42f6c455c6e 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.0" +__version__ = "23.0.1" def main(args: Optional[List[str]] = None) -> int: From 110cd77b3d451b896bceee06eef4f38f33697bc7 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 17 Feb 2023 18:15:15 +0000 Subject: [PATCH 307/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 42f6c455c6e..ce90d06bfd4 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.0.1" +__version__ = "23.1.dev0" def main(args: Optional[List[str]] = None) -> int: From 45e5c418da0e11f5116506c576c776bdf11a9a92 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Fri, 17 Feb 2023 18:17:07 +0000 Subject: [PATCH 308/730] Drop news fragments from 23.0.1 --- news/11671.feature.rst | 1 - news/11740.bugfix.rst | 3 --- news/11773.bugfix.rst | 1 - news/11780.feature.rst | 2 -- 4 files changed, 7 deletions(-) delete mode 100644 news/11671.feature.rst delete mode 100644 news/11740.bugfix.rst delete mode 100644 news/11773.bugfix.rst delete mode 100644 news/11780.feature.rst diff --git a/news/11671.feature.rst b/news/11671.feature.rst deleted file mode 100644 index 31f81f8da00..00000000000 --- a/news/11671.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Ignore PIP_REQUIRE_VIRTUALENV for ``pip index`` diff --git a/news/11740.bugfix.rst b/news/11740.bugfix.rst deleted file mode 100644 index 917beb5354f..00000000000 --- a/news/11740.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improve handling of isolated build environments on platforms that -customize the Python's installation schemes, such as Debian and -Homebrew. diff --git a/news/11773.bugfix.rst b/news/11773.bugfix.rst deleted file mode 100644 index 077bf061259..00000000000 --- a/news/11773.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Do not crash in presence of misformatted hash field in ``direct_url.json``. diff --git a/news/11780.feature.rst b/news/11780.feature.rst deleted file mode 100644 index b765de6c59a..00000000000 --- a/news/11780.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Implement ``--break-system-packages`` to permit installing packages into -``EXTERNALLY-MANAGED`` Python installations. From 816cc815ec849a52f302fe276afcb0d78466dbe1 Mon Sep 17 00:00:00 2001 From: Michael Mintz Date: Mon, 20 Feb 2023 10:37:15 -0500 Subject: [PATCH 309/730] Create SECURITY.md to make the policy official --- SECURITY.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000000..4e423805aee --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,3 @@ +# Security and Vulnerability Reporting + +If you find any security issues, please report to [security@python.org](mailto:security@python.org) From 6ce2afb210347a9508e640ccde929cc182286204 Mon Sep 17 00:00:00 2001 From: Michael Mintz Date: Mon, 20 Feb 2023 11:09:59 -0500 Subject: [PATCH 310/730] Add the news fragment for SECURITY.md --- news/11809.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11809.doc.rst diff --git a/news/11809.doc.rst b/news/11809.doc.rst new file mode 100644 index 00000000000..68c49ea50d5 --- /dev/null +++ b/news/11809.doc.rst @@ -0,0 +1 @@ +Add SECURITY.md to make the policy offical. From 1d0d5d4de4b3958d57dc8f4dd2df864a7142d797 Mon Sep 17 00:00:00 2001 From: Michael Mintz Date: Mon, 20 Feb 2023 11:14:10 -0500 Subject: [PATCH 311/730] Add SECURITY.md to MANIFEST.in --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index e0fba8222af..4716f415730 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,6 +2,7 @@ include AUTHORS.txt include LICENSE.txt include NEWS.rst include README.rst +include SECURITY.md include pyproject.toml include src/pip/_vendor/README.rst From c546c99480875cfe4cdeaefa6d16bad9998d0f70 Mon Sep 17 00:00:00 2001 From: Guillaume Seguin Date: Mon, 6 Jun 2022 11:39:20 -0700 Subject: [PATCH 312/730] Display dependency chain on each Collecting line This tremendously helps understand why a package is being fetched and can help investigate and fix dependency resolver backtracking issues when incoherent constraints/package sets are provided or when new versions of a package trigger a completely different backtracking strategy, leading to very hard to debug situations. --- news/11169.feature.rst | 1 + src/pip/_internal/operations/prepare.py | 10 +++++ tests/functional/test_install.py | 58 +++++++++++++++++++++++++ 3 files changed, 69 insertions(+) create mode 100644 news/11169.feature.rst diff --git a/news/11169.feature.rst b/news/11169.feature.rst new file mode 100644 index 00000000000..54cc6637bc6 --- /dev/null +++ b/news/11169.feature.rst @@ -0,0 +1 @@ +Display dependency chain on each Collecting/Processing log line. diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 4bf414cb005..343a01bef4b 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -270,6 +270,16 @@ def _log_preparing_link(self, req: InstallRequirement) -> None: message = "Collecting %s" information = str(req.req or req) + # If we used req.req, inject requirement source if available (this + # would already be included if we used req directly) + if req.req and req.comes_from: + if isinstance(req.comes_from, str): + comes_from: Optional[str] = req.comes_from + else: + comes_from = req.comes_from.from_path() + if comes_from: + information += f" (from {comes_from})" + if (message, information) != self._previous_requirement_header: self._previous_requirement_header = (message, information) logger.info(message, information) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 3fd9329bc6e..65ba1528860 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2393,3 +2393,61 @@ def find_distributions(self, context=None): result = script.pip("install", "example") assert "Requirement already satisfied: example in " in result.stdout + + +def test_install_pip_prints_req_chain_local(script: PipTestEnvironment) -> None: + """ + Test installing a local package with a dependency and check that the + dependency chain is reported. + """ + + req_path = script.scratch_path.joinpath("requirements.txt") + req_path.write_text("base==0.1.0") + + create_basic_wheel_for_package( + script, + "base", + "0.1.0", + depends=["dep"], + ) + dep_path = create_basic_wheel_for_package( + script, + "dep", + "0.1.0", + ) + + result = script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + "-r", + req_path, + ) + assert_re_match( + rf"Processing .*{re.escape(os.path.basename(dep_path))} " + rf"\(from base==0.1.0->-r {re.escape(str(req_path))} \(line 1\)\)", + result.stdout, + ) + + +@pytest.mark.network +def test_install_pip_prints_req_chain_pypi(script: PipTestEnvironment) -> None: + """ + Test installing a package with a dependency from PyPI and check that the + dependency chain is reported. + """ + req_path = script.scratch_path.joinpath("requirements.txt") + req_path.write_text("Paste[openid]==1.7.5.1") + + result = script.pip( + "install", + "-r", + req_path, + ) + + assert ( + f"Collecting python-openid " + f"(from Paste[openid]==1.7.5.1->-r {req_path} (line 1))" in result.stdout + ) From 30fd549c77031275b9f2bde2b30a7f614780939c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filipe=20La=C3=ADns?= Date: Tue, 28 Feb 2023 00:33:06 +0000 Subject: [PATCH 313/730] Add -C as a short version of --config-settings (#11786) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Filipe Laíns --- news/short-config-settings-option.feature.rst | 1 + src/pip/_internal/cli/cmdoptions.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 news/short-config-settings-option.feature.rst diff --git a/news/short-config-settings-option.feature.rst b/news/short-config-settings-option.feature.rst new file mode 100644 index 00000000000..0da7f86373e --- /dev/null +++ b/news/short-config-settings-option.feature.rst @@ -0,0 +1 @@ +Add ``-C`` as a short version of the ``--config-settings`` option. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 1f804097e86..6a33cf48dc5 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -816,6 +816,7 @@ def _handle_config_settings( config_settings: Callable[..., Option] = partial( Option, + "-C", "--config-settings", dest="config_settings", type=str, From 4b14e7cd7c386839718e88c98b31137448d31e63 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Sun, 5 Mar 2023 04:05:41 -0500 Subject: [PATCH 314/730] Remove no longer needed setuptools hack (#11837) --- news/11837.bugfix.rst | 1 + src/pip/_internal/resolution/resolvelib/provider.py | 11 ----------- 2 files changed, 1 insertion(+), 11 deletions(-) create mode 100644 news/11837.bugfix.rst diff --git a/news/11837.bugfix.rst b/news/11837.bugfix.rst new file mode 100644 index 00000000000..6d33ed6800c --- /dev/null +++ b/news/11837.bugfix.rst @@ -0,0 +1 @@ +More consistent resolution backtracking by removing legacy hack related to setuptools resolution diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index b08cce7f333..315fb9c8902 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -179,16 +179,6 @@ def get_preference( # free, so we always do it first to avoid needless work if it fails. requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER - # HACK: Setuptools have a very long and solid backward compatibility - # track record, and extremely few projects would request a narrow, - # non-recent version range of it since that would break a lot things. - # (Most projects specify it only to request for an installer feature, - # which does not work, but that's another topic.) Intentionally - # delaying Setuptools helps reduce branches the resolver has to check. - # This serves as a temporary fix for issues like "apache-airflow[all]" - # while we work on "proper" branch pruning techniques. - delay_this = identifier == "setuptools" - # Prefer the causes of backtracking on the assumption that the problem # resolving the dependency tree is related to the failures that caused # the backtracking @@ -196,7 +186,6 @@ def get_preference( return ( not requires_python, - delay_this, not direct, not pinned, not backtrack_cause, From 5296a234c88e52cebe888cf534609477bbbc18f4 Mon Sep 17 00:00:00 2001 From: William Edwards Date: Sun, 5 Mar 2023 12:10:50 +0100 Subject: [PATCH 315/730] Add SSH username to Git over SSH documentation example ``` (testvenv) williams-air:testvenv williamedwards$ pip3 install 'foobar @ git+ssh://example.com/foobar' fatal: '/foobar' does not appear to be a git repository fatal: Could not read from remote repository. (testvenv) williams-air:testvenv williamedwards$ pip3 install 'foobar @ git+ssh://git@example.com/foobar' ... Successfully installed foobar-0.0 ``` --- docs/html/topics/vcs-support.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/topics/vcs-support.md b/docs/html/topics/vcs-support.md index faa0633cc7f..465d5ecb78c 100644 --- a/docs/html/topics/vcs-support.md +++ b/docs/html/topics/vcs-support.md @@ -18,7 +18,7 @@ The supported schemes are `git+file`, `git+https`, `git+ssh`, `git+http`, `git+git` and `git`. Here are some of the supported forms: ```none -MyProject @ git+ssh://git.example.com/MyProject +MyProject @ git+ssh://git@git.example.com/MyProject MyProject @ git+file:///home/user/projects/MyProject MyProject @ git+https://git.example.com/MyProject ``` From e0e54e3255f9bf1243db8027b46be16b433ff163 Mon Sep 17 00:00:00 2001 From: William Edwards Date: Sun, 5 Mar 2023 12:17:57 +0100 Subject: [PATCH 316/730] Add NEWS file --- news/11838.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11838.doc.rst diff --git a/news/11838.doc.rst b/news/11838.doc.rst new file mode 100644 index 00000000000..9630aa59885 --- /dev/null +++ b/news/11838.doc.rst @@ -0,0 +1 @@ +Add username to Git over SSH example. From 5189a6e0f42a9873ae023354b7cab66faff8cf1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 12 Mar 2023 15:29:01 +0100 Subject: [PATCH 317/730] --no-binary does not imply setup.py install anymore --- news/11451.removal.rst | 2 ++ src/pip/_internal/commands/install.py | 25 ++------------ src/pip/_internal/utils/deprecation.py | 13 ------- src/pip/_internal/wheel_builder.py | 24 ++----------- tests/functional/test_install.py | 11 ++---- tests/functional/test_install_config.py | 6 ++-- tests/unit/test_wheel_builder.py | 45 ++++++------------------- 7 files changed, 23 insertions(+), 103 deletions(-) create mode 100644 news/11451.removal.rst diff --git a/news/11451.removal.rst b/news/11451.removal.rst new file mode 100644 index 00000000000..c0d1100ed92 --- /dev/null +++ b/news/11451.removal.rst @@ -0,0 +1,2 @@ +``--no-binary`` does not imply ``setup.py install`` anymore. Instead a wheel will be +built locally and installed. diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index f24ef17f917..d25ec9631bc 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -7,7 +7,6 @@ from optparse import SUPPRESS_HELP, Values from typing import Iterable, List, Optional -from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.rich import print_json from pip._internal.cache import WheelCache @@ -22,7 +21,6 @@ from pip._internal.exceptions import CommandError, InstallationError from pip._internal.locations import get_scheme from pip._internal.metadata import get_environment -from pip._internal.models.format_control import FormatControl from pip._internal.models.installation_report import InstallationReport from pip._internal.operations.build.build_tracker import get_build_tracker from pip._internal.operations.check import ConflictDetails, check_install_conflicts @@ -52,26 +50,11 @@ running_under_virtualenv, virtualenv_no_global, ) -from pip._internal.wheel_builder import ( - BdistWheelAllowedPredicate, - build, - should_build_for_install_command, -) +from pip._internal.wheel_builder import build, should_build_for_install_command logger = getLogger(__name__) -def get_check_bdist_wheel_allowed( - format_control: FormatControl, -) -> BdistWheelAllowedPredicate: - def check_binary_allowed(req: InstallRequirement) -> bool: - canonical_name = canonicalize_name(req.name or "") - allowed_formats = format_control.get_allowed_formats(canonical_name) - return "binary" in allowed_formats - - return check_binary_allowed - - class InstallCommand(RequirementCommand): """ Install packages from: @@ -455,14 +438,10 @@ def run(self, options: Values, args: List[str]) -> int: modifying_pip = pip_req.satisfied_by is None protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) - check_bdist_wheel_allowed = get_check_bdist_wheel_allowed( - finder.format_control - ) - reqs_to_build = [ r for r in requirement_set.requirements.values() - if should_build_for_install_command(r, check_bdist_wheel_allowed) + if should_build_for_install_command(r) ] _, build_failures = build( diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 18e9be9f36e..18d68f3ef94 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -173,16 +173,3 @@ def emit_deprecation(self, name: str) -> None: issue=8559, emit_before_install=True, ) - -LegacyInstallReasonNoBinaryForcesSetuptoolsInstall = LegacyInstallReason( - reason=( - "{name} is being installed using the legacy " - "'setup.py install' method, because the '--no-binary' option was enabled " - "for it and this currently disables local wheel building for projects that " - "don't have a 'pyproject.toml' file." - ), - replacement="to enable the '--use-pep517' option", - gone_in="23.1", - issue=11451, - emit_before_install=True, -) diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index 15b30af58e4..612c91ba317 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -5,7 +5,7 @@ import os.path import re import shutil -from typing import Callable, Iterable, List, Optional, Tuple +from typing import Iterable, List, Optional, Tuple from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version from pip._vendor.packaging.version import InvalidVersion, Version @@ -19,10 +19,7 @@ from pip._internal.operations.build.wheel_editable import build_wheel_editable from pip._internal.operations.build.wheel_legacy import build_wheel_legacy from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.deprecation import ( - LegacyInstallReasonMissingWheelPackage, - LegacyInstallReasonNoBinaryForcesSetuptoolsInstall, -) +from pip._internal.utils.deprecation import LegacyInstallReasonMissingWheelPackage from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed from pip._internal.utils.setuptools_build import make_setuptools_clean_args @@ -35,7 +32,6 @@ _egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE) -BdistWheelAllowedPredicate = Callable[[InstallRequirement], bool] BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] @@ -50,7 +46,6 @@ def _contains_egg_info(s: str) -> bool: def _should_build( req: InstallRequirement, need_wheel: bool, - check_bdist_wheel: Optional[BdistWheelAllowedPredicate] = None, ) -> bool: """Return whether an InstallRequirement should be built into a wheel.""" if req.constraint: @@ -81,16 +76,6 @@ def _should_build( if req.use_pep517: return True - assert check_bdist_wheel is not None - if not check_bdist_wheel(req): - # /!\ When we change this to unconditionally return True, we must also remove - # support for `--install-option`. Indeed, `--install-option` implies - # `--no-binary` so we can return False here and run `setup.py install`. - # `--global-option` and `--build-option` can remain until we drop support for - # building with `setup.py bdist_wheel`. - req.legacy_install_reason = LegacyInstallReasonNoBinaryForcesSetuptoolsInstall - return False - if not is_wheel_installed(): # we don't build legacy requirements if wheel is not installed req.legacy_install_reason = LegacyInstallReasonMissingWheelPackage @@ -107,11 +92,8 @@ def should_build_for_wheel_command( def should_build_for_install_command( req: InstallRequirement, - check_bdist_wheel_allowed: BdistWheelAllowedPredicate, ) -> bool: - return _should_build( - req, need_wheel=False, check_bdist_wheel=check_bdist_wheel_allowed - ) + return _should_build(req, need_wheel=False) def _should_cache( diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 65ba1528860..94272a63e54 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1665,12 +1665,9 @@ def test_install_no_binary_disables_building_wheels( # Wheels are built for local directories, but not cached across runs assert "Building wheel for requir" in str(res), str(res) # Don't build wheel for upper which was blacklisted - assert "Building wheel for upper" not in str(res), str(res) - # Wheels are built for local directories, but not cached across runs - assert "Running setup.py install for requir" not in str(res), str(res) + assert "Building wheel for upper" in str(res), str(res) # And these two fell back to sdist based installed. assert "Running setup.py install for wheelb" in str(res), str(res) - assert "Running setup.py install for upper" in str(res), str(res) @pytest.mark.network @@ -1720,10 +1717,8 @@ def test_install_no_binary_disables_cached_wheels( expect_stderr=True, ) assert "Successfully installed upper-2.0" in str(res), str(res) - # No wheel building for upper, which was blacklisted - assert "Building wheel for upper" not in str(res), str(res) - # Must have used source, not a cached wheel to install upper. - assert "Running setup.py install for upper" in str(res), str(res) + # upper is built and not obtained from cache + assert "Building wheel for upper" in str(res), str(res) def test_install_editable_with_wrong_egg_name( diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 99e59b97b18..cbf52b90b07 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -265,10 +265,8 @@ def test_install_no_binary_via_config_disables_cached_wheels( finally: os.unlink(config_file.name) assert "Successfully installed upper-2.0" in str(res), str(res) - # No wheel building for upper, which was blacklisted - assert "Building wheel for upper" not in str(res), str(res) - # Must have used source, not a cached wheel to install upper. - assert "Running setup.py install for upper" in str(res), str(res) + # upper is built and not obtained from cache + assert "Building wheel for upper" in str(res), str(res) @pytest.mark.skipif( diff --git a/tests/unit/test_wheel_builder.py b/tests/unit/test_wheel_builder.py index 5444056e790..9c322053688 100644 --- a/tests/unit/test_wheel_builder.py +++ b/tests/unit/test_wheel_builder.py @@ -58,63 +58,42 @@ def supports_pyproject_editable(self) -> bool: @pytest.mark.parametrize( - "req, disallow_bdist_wheel, expected", + "req, expected", [ - # When binaries are allowed, we build. - (ReqMock(use_pep517=True), False, True), - (ReqMock(use_pep517=False), False, True), - # When binaries are disallowed, we don't build, unless pep517 is - # enabled. - (ReqMock(use_pep517=True), True, True), - (ReqMock(use_pep517=False), True, False), + # We build, whether pep 517 is enabled or not. + (ReqMock(use_pep517=True), True), + (ReqMock(use_pep517=False), True), # We don't build constraints. - (ReqMock(constraint=True), False, False), + (ReqMock(constraint=True), False), # We don't build reqs that are already wheels. - (ReqMock(is_wheel=True), False, False), - (ReqMock(editable=True, use_pep517=False), False, False), + (ReqMock(is_wheel=True), False), + # We build editables if the backend supports PEP 660. + (ReqMock(editable=True, use_pep517=False), False), ( ReqMock(editable=True, use_pep517=True, supports_pyproject_editable=True), - False, True, ), ( ReqMock(editable=True, use_pep517=True, supports_pyproject_editable=False), False, - False, ), - (ReqMock(source_dir=None), False, False), + # We don't build if there is no source dir (whatever that means!). + (ReqMock(source_dir=None), False), # By default (i.e. when binaries are allowed), VCS requirements # should be built in install mode. ( ReqMock(link=Link("git+https://g.c/org/repo"), use_pep517=True), - False, - True, - ), - ( - ReqMock(link=Link("git+https://g.c/org/repo"), use_pep517=False), - False, - True, - ), - # Disallowing binaries, however, should cause them not to be built. - # unless pep517 is enabled. - ( - ReqMock(link=Link("git+https://g.c/org/repo"), use_pep517=True), - True, True, ), ( ReqMock(link=Link("git+https://g.c/org/repo"), use_pep517=False), True, - False, ), ], ) -def test_should_build_for_install_command( - req: ReqMock, disallow_bdist_wheel: bool, expected: bool -) -> None: +def test_should_build_for_install_command(req: ReqMock, expected: bool) -> None: should_build = wheel_builder.should_build_for_install_command( cast(InstallRequirement, req), - check_bdist_wheel_allowed=lambda req: not disallow_bdist_wheel, ) assert should_build is expected @@ -144,7 +123,6 @@ def test_should_build_legacy_wheel_not_installed(is_wheel_installed: mock.Mock) legacy_req = ReqMock(use_pep517=False) should_build = wheel_builder.should_build_for_install_command( cast(InstallRequirement, legacy_req), - check_bdist_wheel_allowed=lambda req: True, ) assert not should_build @@ -155,7 +133,6 @@ def test_should_build_legacy_wheel_installed(is_wheel_installed: mock.Mock) -> N legacy_req = ReqMock(use_pep517=False) should_build = wheel_builder.should_build_for_install_command( cast(InstallRequirement, legacy_req), - check_bdist_wheel_allowed=lambda req: True, ) assert should_build From 25a056dae638699809587a424f67756be3764feb Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Sun, 12 Mar 2023 18:55:38 +0200 Subject: [PATCH 318/730] Replace deprecated BadZipfile with BadZipFile --- src/pip/_internal/network/lazy_wheel.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/network/lazy_wheel.py b/src/pip/_internal/network/lazy_wheel.py index 854a6fa1fdc..82ec50d5106 100644 --- a/src/pip/_internal/network/lazy_wheel.py +++ b/src/pip/_internal/network/lazy_wheel.py @@ -6,7 +6,7 @@ from contextlib import contextmanager from tempfile import NamedTemporaryFile from typing import Any, Dict, Generator, List, Optional, Tuple -from zipfile import BadZipfile, ZipFile +from zipfile import BadZipFile, ZipFile from pip._vendor.packaging.utils import canonicalize_name from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response @@ -160,7 +160,7 @@ def _check_zip(self) -> None: # For read-only ZIP files, ZipFile only needs # methods read, seek, seekable and tell. ZipFile(self) # type: ignore - except BadZipfile: + except BadZipFile: pass else: break From 83c800d3b8b367b6ae1fbf92fd4f699612cecfc7 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Sun, 12 Mar 2023 20:25:47 +0100 Subject: [PATCH 319/730] =?UTF-8?q?=F0=9F=94=A7=20Add=20an=20explicit=20Ch?= =?UTF-8?q?ronographer=20config=20(#11848)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/chronographer.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .github/chronographer.yml diff --git a/.github/chronographer.yml b/.github/chronographer.yml new file mode 100644 index 00000000000..b42883f8f4a --- /dev/null +++ b/.github/chronographer.yml @@ -0,0 +1,30 @@ +--- + +action-hints: + # check-title-prefix: chng # default: `{{ branch-protection-check-name }}: ` + external-docs-url: https://pip.pypa.io/how-to-changelog + inline-markdown: > + Check out https://pip.pypa.io/how-to-changelog + +branch-protection-check-name: Timeline protection + +enforce-name: + # suffix: .md + suffix: .rst + +exclude: + bots: + - dependabot-preview + - dependabot + - patchback + humans: + - pyup-bot + +labels: + skip-changelog: skip news + +paths: # relative modified file paths that do or don't need changelog mention + exclude: [] + include: [] + +... From 9a82bdfc527391bb306fc70814977a14e184f946 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Feb 2023 19:25:04 -0700 Subject: [PATCH 320/730] Quote extras to guard shells with glob qualifiers * Shells like zsh have glob qualifiers that will error if an extra is not quoted. While the glob qualifiers can be disabled, adding quotes guards against errors if people are copy-pasting or do not know that they can disable the behavior. * Use single quotes for Linux/Mac and use double quotes for Windows to follow existing style conventions. --- docs/html/cli/pip_install.rst | 18 +++++++++--------- news/11842.doc.rst | 2 ++ 2 files changed, 11 insertions(+), 9 deletions(-) create mode 100644 news/11842.doc.rst diff --git a/docs/html/cli/pip_install.rst b/docs/html/cli/pip_install.rst index 7c17c264a30..6e517c0fd04 100644 --- a/docs/html/cli/pip_install.rst +++ b/docs/html/cli/pip_install.rst @@ -386,21 +386,21 @@ Examples .. code-block:: shell - python -m pip install SomePackage[PDF] - python -m pip install "SomePackage[PDF] @ git+https://git.repo/SomePackage@main#subdirectory=subdir_path" - python -m pip install .[PDF] # project in current directory - python -m pip install SomePackage[PDF]==3.0 - python -m pip install SomePackage[PDF,EPUB] # multiple extras + python -m pip install 'SomePackage[PDF]' + python -m pip install 'SomePackage[PDF] @ git+https://git.repo/SomePackage@main#subdirectory=subdir_path' + python -m pip install '.[PDF]' # project in current directory + python -m pip install 'SomePackage[PDF]==3.0' + python -m pip install 'SomePackage[PDF,EPUB]' # multiple extras .. tab:: Windows .. code-block:: shell - py -m pip install SomePackage[PDF] + py -m pip install "SomePackage[PDF]" py -m pip install "SomePackage[PDF] @ git+https://git.repo/SomePackage@main#subdirectory=subdir_path" - py -m pip install .[PDF] # project in current directory - py -m pip install SomePackage[PDF]==3.0 - py -m pip install SomePackage[PDF,EPUB] # multiple extras + py -m pip install ".[PDF]" # project in current directory + py -m pip install "SomePackage[PDF]==3.0" + py -m pip install "SomePackage[PDF,EPUB]" # multiple extras #. Install a particular source archive file. diff --git a/news/11842.doc.rst b/news/11842.doc.rst new file mode 100644 index 00000000000..bd063996f54 --- /dev/null +++ b/news/11842.doc.rst @@ -0,0 +1,2 @@ +Quote extras in the pip install docs to guard shells with default glob +qualifiers, like zsh. From ced45695a08b3bfdb240b016e8cfcd72b0a3a56c Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 6 Mar 2023 00:26:29 -0600 Subject: [PATCH 321/730] Quote all install command components that aren't a bare package name * As a means to make things easier for new users, quote everything in a `pip install` command that is part of a package that isn't just a bare package name. * Use single quotes for Linux/Mac and use double quotes for Windows to follow existing style conventions. --- docs/html/cli/pip_install.rst | 48 +++++++++++++++++------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/docs/html/cli/pip_install.rst b/docs/html/cli/pip_install.rst index 6e517c0fd04..951dc2705a3 100644 --- a/docs/html/cli/pip_install.rst +++ b/docs/html/cli/pip_install.rst @@ -11,7 +11,7 @@ Usage .. tab:: Unix/macOS - .. pip-command-usage:: install "python -m pip" + .. pip-command-usage:: install 'python -m pip' .. tab:: Windows @@ -277,7 +277,7 @@ Examples .. code-block:: shell python -m pip install SomePackage # latest version - python -m pip install SomePackage==1.0.4 # specific version + python -m pip install 'SomePackage==1.0.4' # specific version python -m pip install 'SomePackage>=1.0.4' # minimum version .. tab:: Windows @@ -285,8 +285,8 @@ Examples .. code-block:: shell py -m pip install SomePackage # latest version - py -m pip install SomePackage==1.0.4 # specific version - py -m pip install 'SomePackage>=1.0.4' # minimum version + py -m pip install "SomePackage==1.0.4" # specific version + py -m pip install "SomePackage>=1.0.4" # minimum version #. Install a list of requirements specified in a file. See the :ref:`Requirements files `. @@ -349,13 +349,13 @@ Examples .. code-block:: shell - python -m pip install SomeProject@git+https://git.repo/some_pkg.git@1.3.1 + python -m pip install 'SomeProject@git+https://git.repo/some_pkg.git@1.3.1' .. tab:: Windows .. code-block:: shell - py -m pip install SomeProject@git+https://git.repo/some_pkg.git@1.3.1 + py -m pip install "SomeProject@git+https://git.repo/some_pkg.git@1.3.1" #. Install a project from VCS in "editable" mode. See the sections on :doc:`../topics/vcs-support` and :ref:`Editable Installs `. @@ -364,20 +364,20 @@ Examples .. code-block:: shell - python -m pip install -e git+https://git.repo/some_pkg.git#egg=SomePackage # from git - python -m pip install -e hg+https://hg.repo/some_pkg.git#egg=SomePackage # from mercurial - python -m pip install -e svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage # from svn - python -m pip install -e git+https://git.repo/some_pkg.git@feature#egg=SomePackage # from 'feature' branch - python -m pip install -e "git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path" # install a python package from a repo subdirectory + python -m pip install -e 'git+https://git.repo/some_pkg.git#egg=SomePackage' # from git + python -m pip install -e 'hg+https://hg.repo/some_pkg.git#egg=SomePackage' # from mercurial + python -m pip install -e 'svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage' # from svn + python -m pip install -e 'git+https://git.repo/some_pkg.git@feature#egg=SomePackage' # from 'feature' branch + python -m pip install -e 'git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path' # install a python package from a repo subdirectory .. tab:: Windows .. code-block:: shell - py -m pip install -e git+https://git.repo/some_pkg.git#egg=SomePackage # from git - py -m pip install -e hg+https://hg.repo/some_pkg.git#egg=SomePackage # from mercurial - py -m pip install -e svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage # from svn - py -m pip install -e git+https://git.repo/some_pkg.git@feature#egg=SomePackage # from 'feature' branch + py -m pip install -e "git+https://git.repo/some_pkg.git#egg=SomePackage" # from git + py -m pip install -e "hg+https://hg.repo/some_pkg.git#egg=SomePackage" # from mercurial + py -m pip install -e "svn+svn://svn.repo/some_pkg/trunk/#egg=SomePackage" # from svn + py -m pip install -e "git+https://git.repo/some_pkg.git@feature#egg=SomePackage" # from 'feature' branch py -m pip install -e "git+https://git.repo/some_repo.git#egg=subdir&subdirectory=subdir_path" # install a python package from a repo subdirectory #. Install a package with `extras`_. @@ -408,15 +408,15 @@ Examples .. code-block:: shell - python -m pip install ./downloads/SomePackage-1.0.4.tar.gz - python -m pip install http://my.package.repo/SomePackage-1.0.4.zip + python -m pip install './downloads/SomePackage-1.0.4.tar.gz' + python -m pip install 'http://my.package.repo/SomePackage-1.0.4.zip' .. tab:: Windows .. code-block:: shell - py -m pip install ./downloads/SomePackage-1.0.4.tar.gz - py -m pip install http://my.package.repo/SomePackage-1.0.4.zip + py -m pip install "./downloads/SomePackage-1.0.4.tar.gz" + py -m pip install "http://my.package.repo/SomePackage-1.0.4.zip" #. Install a particular source archive file following :pep:`440` direct references. @@ -424,17 +424,17 @@ Examples .. code-block:: shell - python -m pip install SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl - python -m pip install "SomeProject @ http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl" - python -m pip install SomeProject@http://my.package.repo/1.2.3.tar.gz + python -m pip install 'SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl' + python -m pip install 'SomeProject @ http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl' + python -m pip install 'SomeProject@http://my.package.repo/1.2.3.tar.gz' .. tab:: Windows .. code-block:: shell - py -m pip install SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl + py -m pip install "SomeProject@http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl" py -m pip install "SomeProject @ http://my.package.repo/SomeProject-1.2.3-py33-none-any.whl" - py -m pip install SomeProject@http://my.package.repo/1.2.3.tar.gz + py -m pip install "SomeProject@http://my.package.repo/1.2.3.tar.gz" #. Install from alternative package repositories. From 1268487cbab1b2ffab881b5961c0476e655b6c88 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filipe=20La=C3=ADns?= Date: Fri, 17 Mar 2023 21:23:19 +0000 Subject: [PATCH 322/730] Support passing the same key multiple times in --config-settings (#11853) --- news/11681.feature.rst | 4 ++++ src/pip/_internal/cli/cmdoptions.py | 8 +++++++- tests/unit/test_pyproject_config.py | 15 ++++++++++++--- 3 files changed, 23 insertions(+), 4 deletions(-) create mode 100644 news/11681.feature.rst diff --git a/news/11681.feature.rst b/news/11681.feature.rst new file mode 100644 index 00000000000..a0d918b19f9 --- /dev/null +++ b/news/11681.feature.rst @@ -0,0 +1,4 @@ +The ``--config-settings``/``-C`` option now supports using the same key multiple +times. When the same key is specified multiple times, all values are passed to +the build backend as a list, as opposed to the previous behavior where pip would +only pass the last value is the same key was used multiple times. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 86fb284329f..6513bec388d 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -824,7 +824,13 @@ def _handle_config_settings( if dest is None: dest = {} setattr(parser.values, option.dest, dest) - dest[key] = val + if key in dest: + if isinstance(dest[key], list): + dest[key].append(val) + else: + dest[key] = [dest[key], val] + else: + dest[key] = val config_settings: Callable[..., Option] = partial( diff --git a/tests/unit/test_pyproject_config.py b/tests/unit/test_pyproject_config.py index 9937f3880aa..c7e46956055 100644 --- a/tests/unit/test_pyproject_config.py +++ b/tests/unit/test_pyproject_config.py @@ -1,3 +1,5 @@ +from typing import Dict, List + import pytest from pip._internal.commands import create_command @@ -36,9 +38,16 @@ def test_set_config_empty_value() -> None: assert options.config_settings == {"x": ""} -def test_replace_config_value() -> None: +@pytest.mark.parametrize( + ("passed", "expected"), + [ + (["x=hello", "x=world"], {"x": ["hello", "world"]}), + (["x=hello", "x=world", "x=other"], {"x": ["hello", "world", "other"]}), + ], +) +def test_multiple_config_values(passed: List[str], expected: Dict[str, str]) -> None: i = create_command("install") options, _ = i.parse_args( - ["xxx", "--config-settings", "x=hello", "--config-settings", "x=world"] + ["xxx", *(f"--config-settings={option}" for option in passed)] ) - assert options.config_settings == {"x": "world"} + assert options.config_settings == expected From e58fac10868df328bf6658c1e9e89237f2ec2a64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Filipe=20La=C3=ADns?= Date: Fri, 17 Mar 2023 21:24:55 +0000 Subject: [PATCH 323/730] Fix typo in the #11681 news entry MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Filipe Laíns --- news/11681.feature.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/news/11681.feature.rst b/news/11681.feature.rst index a0d918b19f9..00cd05ee18d 100644 --- a/news/11681.feature.rst +++ b/news/11681.feature.rst @@ -1,4 +1,4 @@ The ``--config-settings``/``-C`` option now supports using the same key multiple times. When the same key is specified multiple times, all values are passed to -the build backend as a list, as opposed to the previous behavior where pip would -only pass the last value is the same key was used multiple times. +the build backend as a list, as opposed to the previous behavior, where pip would +only pass the last value if the same key was used multiple times. From afb743c01c095fe5f3fed96e0cd92fbea94542af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 29 Jan 2023 12:28:24 +0100 Subject: [PATCH 324/730] Update direct URL spec links --- docs/html/reference/inspect-report.md | 9 +++++---- docs/html/reference/installation-report.md | 7 ++++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/docs/html/reference/inspect-report.md b/docs/html/reference/inspect-report.md index 8f4811730f9..2bbef6e1757 100644 --- a/docs/html/reference/inspect-report.md +++ b/docs/html/reference/inspect-report.md @@ -50,10 +50,11 @@ the following properties: ``` - `direct_url`: Information about the direct URL that was used for installation, if any, - using the [direct - URL](https://packaging.python.org/en/latest/specifications/direct-url/) data - structure. In most case, this field corresponds to the `direct_url.json` metadata, - except for legacy editable installs, where it is emulated. + using the [direct URL data + structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/). + In most case, this field corresponds to the + [direct_url.json](https://packaging.python.org/en/latest/specifications/direct-url) + metadata, except for legacy editable installs, where it is emulated. - `requested`: `true` if the `REQUESTED` metadata is present, `false` otherwise. This field is only present for modern `.dist-info` installations. diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index 6ed5f659cff..983ec48e759 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -58,9 +58,10 @@ package with the following properties: specifier. - `download_info`: Information about the artifact (to be) downloaded for installation, - using the [direct - URL](https://packaging.python.org/en/latest/specifications/direct-url/) data - structure. When `is_direct` is `true`, this field is the same as the `direct_url.json` + using the [direct URL data + structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/). + When `is_direct` is `true`, this field is the same as the + [direct_url.json](https://packaging.python.org/en/latest/specifications/direct-url) metadata, otherwise it represents the URL of the artifact obtained from the index or `--find-links`. From a04748db8e45025cef737ef04a8b4c3668c1d401 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 12 Mar 2023 13:45:41 +0100 Subject: [PATCH 325/730] Remove support for the deprecated --install-options --- .../reference/requirements-file-format.md | 14 +-- news/11358.removal.rst | 1 + src/pip/_internal/cli/cmdoptions.py | 11 --- src/pip/_internal/commands/install.py | 48 +---------- .../operations/install/editable_legacy.py | 5 +- .../_internal/operations/install/legacy.py | 2 - src/pip/_internal/req/__init__.py | 2 - src/pip/_internal/req/constructors.py | 3 - src/pip/_internal/req/req_file.py | 1 - src/pip/_internal/req/req_install.py | 43 ++-------- .../resolution/resolvelib/candidates.py | 3 - src/pip/_internal/utils/distutils_args.py | 43 ---------- src/pip/_internal/utils/setuptools_build.py | 8 +- tests/functional/test_install.py | 23 ----- tests/functional/test_install_reqs.py | 85 ------------------- tests/unit/test_command_install.py | 40 +-------- tests/unit/test_req_file.py | 19 +---- tests/unit/test_utils_distutils_args.py | 63 -------------- 18 files changed, 20 insertions(+), 394 deletions(-) create mode 100644 news/11358.removal.rst delete mode 100644 src/pip/_internal/utils/distutils_args.py delete mode 100644 tests/unit/test_utils_distutils_args.py diff --git a/docs/html/reference/requirements-file-format.md b/docs/html/reference/requirements-file-format.md index 75e6d0b1e6b..01047587161 100644 --- a/docs/html/reference/requirements-file-format.md +++ b/docs/html/reference/requirements-file-format.md @@ -109,7 +109,6 @@ and two {ref}`--find-links ` locations: The options which can be applied to individual requirements are: -- {ref}`--install-option ` - {ref}`--global-option ` - {ref}`--config-settings ` - `--hash` (for {ref}`Hash-checking mode`) @@ -161,7 +160,7 @@ This disables the use of wheels (cached or otherwise). This could mean that buil This mechanism is only preserved for backwards compatibility and should be considered deprecated. A future release of pip may drop these options. ``` -The `--global-option` and `--install-option` options are used to pass options to `setup.py`. +The `--global-option` option is used to pass options to `setup.py`. ```{attention} These options are highly coupled with how pip invokes setuptools using the {doc}`../reference/build-system/setup-py` build system interface. It is not compatible with newer {doc}`../reference/build-system/pyproject-toml` build system interface. @@ -171,15 +170,10 @@ This is will not work with other build-backends or newer setup.cfg-only projects If you have a declaration like: - FooProject >= 1.2 --global-option="--no-user-cfg" \ - --install-option="--prefix='/usr/local'" \ - --install-option="--no-compile" + FooProject >= 1.2 --global-option="--no-user-cfg" The above translates roughly into running FooProject's `setup.py` script as: - python setup.py --no-user-cfg install --prefix='/usr/local' --no-compile + python setup.py --no-user-cfg install -Note that the only way of giving more than one option to `setup.py` is through multiple `--global-option` and `--install-option` options, as shown in the example above. The value of each option is passed as a single argument to the `setup.py` script. Therefore, a line such as the following is invalid and would result in an installation error. - - # Invalid. Please use '--install-option' twice as shown above. - FooProject >= 1.2 --install-option="--prefix=/usr/local --no-compile" +Note that the only way of giving more than one option to `setup.py` is through multiple `--global-option` options. diff --git a/news/11358.removal.rst b/news/11358.removal.rst new file mode 100644 index 00000000000..23e388a9a39 --- /dev/null +++ b/news/11358.removal.rst @@ -0,0 +1 @@ +Remove support for the deprecated ``--install-options``. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 6513bec388d..2bbff2d4dc1 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -847,17 +847,6 @@ def _handle_config_settings( "to pass multiple keys to the backend.", ) -install_options: Callable[..., Option] = partial( - Option, - "--install-option", - dest="install_options", - action="append", - metavar="options", - help="This option is deprecated. Using this option with location-changing " - "options may cause unexpected behavior. " - "Use pip-level options like --user, --prefix, --root, and --target.", -) - build_options: Callable[..., Option] = partial( Option, "--build-option", diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index d25ec9631bc..41d22c72825 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -5,7 +5,7 @@ import shutil import site from optparse import SUPPRESS_HELP, Values -from typing import Iterable, List, Optional +from typing import List, Optional from pip._vendor.rich import print_json @@ -35,7 +35,6 @@ LegacyInstallReasonFailedBdistWheel, deprecated, ) -from pip._internal.utils.distutils_args import parse_distutils_args from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.logging import getLogger from pip._internal.utils.misc import ( @@ -206,7 +205,6 @@ def add_options(self) -> None: self.cmd_opts.add_option(cmdoptions.override_externally_managed()) self.cmd_opts.add_option(cmdoptions.config_settings()) - self.cmd_opts.add_option(cmdoptions.install_options()) self.cmd_opts.add_option(cmdoptions.global_options()) self.cmd_opts.add_option( @@ -297,8 +295,6 @@ def run(self, options: Values, args: List[str]) -> int: cmdoptions.check_dist_restriction(options, check_target=True) - install_options = options.install_options or [] - logger.verbose("Using %s", get_pip_version()) options.use_user_site = decide_user_install( options.use_user_site, @@ -378,8 +374,6 @@ def run(self, options: Values, args: List[str]) -> int: for req in reqs: req.permit_editable_wheels = True - reject_location_related_install_options(reqs, options.install_options) - preparer = self.make_requirement_preparer( temp_build_dir=directory, options=options, @@ -490,7 +484,6 @@ def run(self, options: Values, args: List[str]) -> int: installed = install_given_reqs( to_install, - install_options, global_options, root=options.root_path, home=target_temp_dir_path, @@ -761,45 +754,6 @@ def decide_user_install( return True -def reject_location_related_install_options( - requirements: List[InstallRequirement], options: Optional[List[str]] -) -> None: - """If any location-changing --install-option arguments were passed for - requirements or on the command-line, then show a deprecation warning. - """ - - def format_options(option_names: Iterable[str]) -> List[str]: - return ["--{}".format(name.replace("_", "-")) for name in option_names] - - offenders = [] - - for requirement in requirements: - install_options = requirement.install_options - location_options = parse_distutils_args(install_options) - if location_options: - offenders.append( - "{!r} from {}".format( - format_options(location_options.keys()), requirement - ) - ) - - if options: - location_options = parse_distutils_args(options) - if location_options: - offenders.append( - "{!r} from command line".format(format_options(location_options.keys())) - ) - - if not offenders: - return - - raise CommandError( - "Location-changing options found in --install-option: {}." - " This is unsupported, use pip-level options like --user," - " --prefix, --root, and --target instead.".format("; ".join(offenders)) - ) - - def create_os_error_message( error: OSError, show_traceback: bool, using_user_site: bool ) -> str: diff --git a/src/pip/_internal/operations/install/editable_legacy.py b/src/pip/_internal/operations/install/editable_legacy.py index bb548cdca75..bebe24e6d3a 100644 --- a/src/pip/_internal/operations/install/editable_legacy.py +++ b/src/pip/_internal/operations/install/editable_legacy.py @@ -1,7 +1,7 @@ """Legacy editable installation process, i.e. `setup.py develop`. """ import logging -from typing import List, Optional, Sequence +from typing import Optional, Sequence from pip._internal.build_env import BuildEnvironment from pip._internal.utils.logging import indent_log @@ -12,7 +12,7 @@ def install_editable( - install_options: List[str], + *, global_options: Sequence[str], prefix: Optional[str], home: Optional[str], @@ -31,7 +31,6 @@ def install_editable( args = make_setuptools_develop_args( setup_py_path, global_options=global_options, - install_options=install_options, no_user_config=isolated, prefix=prefix, home=home, diff --git a/src/pip/_internal/operations/install/legacy.py b/src/pip/_internal/operations/install/legacy.py index 290967dd6d5..0b108d0ca71 100644 --- a/src/pip/_internal/operations/install/legacy.py +++ b/src/pip/_internal/operations/install/legacy.py @@ -55,7 +55,6 @@ def prepend_root(path: str) -> str: def install( - install_options: List[str], global_options: Sequence[str], root: Optional[str], home: Optional[str], @@ -79,7 +78,6 @@ def install( install_args = make_setuptools_install_args( setup_py_path, global_options=global_options, - install_options=install_options, record_filename=record_filename, root=root, prefix=prefix, diff --git a/src/pip/_internal/req/__init__.py b/src/pip/_internal/req/__init__.py index 8d563596668..16de903a44c 100644 --- a/src/pip/_internal/req/__init__.py +++ b/src/pip/_internal/req/__init__.py @@ -36,7 +36,6 @@ def _validate_requirements( def install_given_reqs( requirements: List[InstallRequirement], - install_options: List[str], global_options: Sequence[str], root: Optional[str], home: Optional[str], @@ -71,7 +70,6 @@ def install_given_reqs( try: requirement.install( - install_options, global_options, root=root, home=home, diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index dea7c3b0116..854b1b058d8 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -222,7 +222,6 @@ def install_req_from_editable( constraint=constraint, use_pep517=use_pep517, isolated=isolated, - install_options=options.get("install_options", []) if options else [], global_options=options.get("global_options", []) if options else [], hash_options=options.get("hashes", {}) if options else {}, config_settings=config_settings, @@ -399,7 +398,6 @@ def install_req_from_line( markers=parts.markers, use_pep517=use_pep517, isolated=isolated, - install_options=options.get("install_options", []) if options else [], global_options=options.get("global_options", []) if options else [], hash_options=options.get("hashes", {}) if options else {}, config_settings=config_settings, @@ -493,7 +491,6 @@ def install_req_from_link_and_ireq( markers=ireq.markers, use_pep517=ireq.use_pep517, isolated=ireq.isolated, - install_options=ireq.install_options, global_options=ireq.global_options, hash_options=ireq.hash_options, config_settings=ireq.config_settings, diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index 11ec699acc5..f8f07b0cd96 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -69,7 +69,6 @@ # options to be passed to requirements SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [ - cmdoptions.install_options, cmdoptions.global_options, cmdoptions.hash, ] diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index bb38ec09da4..fa5620e1d6e 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -83,7 +83,7 @@ def __init__( markers: Optional[Marker] = None, use_pep517: Optional[bool] = None, isolated: bool = False, - install_options: Optional[List[str]] = None, + *, global_options: Optional[List[str]] = None, hash_options: Optional[Dict[str, List[str]]] = None, config_settings: Optional[Dict[str, str]] = None, @@ -146,7 +146,6 @@ def __init__( # Set to True after successful installation self.install_succeeded: Optional[bool] = None # Supplied options - self.install_options = install_options if install_options else [] self.global_options = global_options if global_options else [] self.hash_options = hash_options if hash_options else {} self.config_settings = config_settings @@ -746,7 +745,6 @@ def archive(self, build_dir: Optional[str]) -> None: def install( self, - install_options: List[str], global_options: Optional[Sequence[str]] = None, root: Optional[str] = None, home: Optional[str] = None, @@ -767,8 +765,7 @@ def install( global_options = global_options if global_options is not None else [] if self.editable and not self.is_wheel: install_editable_legacy( - install_options, - global_options, + global_options=global_options, prefix=prefix, home=home, use_user_site=use_user_site, @@ -808,13 +805,12 @@ def install( # TODO: Why don't we do this for editable installs? - # Extend the list of global and install options passed on to + # Extend the list of global options passed on to # the setup.py call with the ones from the requirements file. # Options specified in requirements file override those # specified on the command line, since the last option given # to setup.py is the one that is used. global_options = list(global_options) + self.global_options - install_options = list(install_options) + self.install_options try: if ( @@ -823,7 +819,6 @@ def install( ): self.legacy_install_reason.emit_deprecation(self.name) success = install_legacy( - install_options=install_options, global_options=global_options, root=root, home=home, @@ -893,15 +888,6 @@ def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> return False -def _install_option_ignored( - install_options: List[str], reqs: List[InstallRequirement] -) -> bool: - for req in reqs: - if (install_options or req.install_options) and not req.use_pep517: - return False - return True - - class LegacySetupPyOptionsCheckMode(Enum): INSTALL = 1 WHEEL = 2 @@ -913,34 +899,15 @@ def check_legacy_setup_py_options( reqs: List[InstallRequirement], mode: LegacySetupPyOptionsCheckMode, ) -> None: - has_install_options = _has_option(options, reqs, "install_options") has_build_options = _has_option(options, reqs, "build_options") has_global_options = _has_option(options, reqs, "global_options") - legacy_setup_py_options_present = ( - has_install_options or has_build_options or has_global_options - ) + legacy_setup_py_options_present = has_build_options or has_global_options if not legacy_setup_py_options_present: return options.format_control.disallow_binaries() logger.warning( "Implying --no-binary=:all: due to the presence of " - "--build-option / --global-option / --install-option. " + "--build-option / --global-option. " "Consider using --config-settings for more flexibility.", ) - if mode == LegacySetupPyOptionsCheckMode.INSTALL and has_install_options: - if _install_option_ignored(options.install_options, reqs): - logger.warning( - "Ignoring --install-option when building using PEP 517", - ) - else: - deprecated( - reason=( - "--install-option is deprecated because " - "it forces pip to use the 'setup.py install' " - "command which is itself deprecated." - ), - issue=11358, - replacement="to use --config-settings", - gone_in="23.1", - ) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index f5bc343b91b..7f09efc1539 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -66,7 +66,6 @@ def make_install_req_from_link( isolated=template.isolated, constraint=template.constraint, options=dict( - install_options=template.install_options, global_options=template.global_options, hashes=template.hash_options, ), @@ -90,7 +89,6 @@ def make_install_req_from_editable( constraint=template.constraint, permit_editable_wheels=template.permit_editable_wheels, options=dict( - install_options=template.install_options, global_options=template.global_options, hashes=template.hash_options, ), @@ -115,7 +113,6 @@ def _make_install_req_from_dist( isolated=template.isolated, constraint=template.constraint, options=dict( - install_options=template.install_options, global_options=template.global_options, hashes=template.hash_options, ), diff --git a/src/pip/_internal/utils/distutils_args.py b/src/pip/_internal/utils/distutils_args.py deleted file mode 100644 index 2fd1862073f..00000000000 --- a/src/pip/_internal/utils/distutils_args.py +++ /dev/null @@ -1,43 +0,0 @@ -from getopt import GetoptError, getopt -from typing import Dict, List - -_options = [ - "exec-prefix=", - "home=", - "install-base=", - "install-data=", - "install-headers=", - "install-lib=", - "install-platlib=", - "install-purelib=", - "install-scripts=", - "prefix=", - "root=", - "user", -] - - -def parse_distutils_args(args: List[str]) -> Dict[str, str]: - """Parse provided arguments, returning an object that has the matched arguments. - - Any unknown arguments are ignored. - """ - result = {} - for arg in args: - try: - parsed_opt, _ = getopt(args=[arg], shortopts="", longopts=_options) - except GetoptError: - # We don't care about any other options, which here may be - # considered unrecognized since our option list is not - # exhaustive. - continue - - if not parsed_opt: - continue - - option = parsed_opt[0] - name_from_parsed = option[0][2:].replace("-", "_") - value_from_parsed = option[1] or "true" - result[name_from_parsed] = value_from_parsed - - return result diff --git a/src/pip/_internal/utils/setuptools_build.py b/src/pip/_internal/utils/setuptools_build.py index 01ef4a4ca59..0662915cb05 100644 --- a/src/pip/_internal/utils/setuptools_build.py +++ b/src/pip/_internal/utils/setuptools_build.py @@ -103,8 +103,8 @@ def make_setuptools_clean_args( def make_setuptools_develop_args( setup_py_path: str, + *, global_options: Sequence[str], - install_options: Sequence[str], no_user_config: bool, prefix: Optional[str], home: Optional[str], @@ -120,8 +120,6 @@ def make_setuptools_develop_args( args += ["develop", "--no-deps"] - args += install_options - if prefix: args += ["--prefix", prefix] if home is not None: @@ -150,8 +148,8 @@ def make_setuptools_egg_info_args( def make_setuptools_install_args( setup_py_path: str, + *, global_options: Sequence[str], - install_options: Sequence[str], record_filename: str, root: Optional[str], prefix: Optional[str], @@ -190,6 +188,4 @@ def make_setuptools_install_args( if header_dir: args += ["--install-headers", header_dir] - args += install_options - return args diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 94272a63e54..5d2f78c25ec 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -857,29 +857,6 @@ def test_install_with_hacked_egg_info( assert "Successfully installed hackedegginfo-0.0.0\n" in result.stdout -@pytest.mark.network -def test_install_using_install_option_and_editable( - script: PipTestEnvironment, tmpdir: Path -) -> None: - """ - Test installing a tool using -e and --install-option - """ - folder = "script_folder" - script.scratch_path.joinpath(folder).mkdir() - url = local_checkout("git+https://github.com/pypa/pip-test-package", tmpdir) - result = script.pip( - "install", - "-e", - f"{url}#egg=pip-test-package", - f"--install-option=--script-dir={folder}", - expect_stderr=True, - ) - script_file = ( - script.venv / "src/pip-test-package" / folder / f"pip-test-package{script.exe}" - ) - result.did_create(script_file) - - @pytest.mark.xfail @pytest.mark.network @need_mercurial diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 14e1056ae7a..5490b301cd0 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -332,33 +332,6 @@ def test_wheel_user_with_prefix_in_pydistutils_cfg( assert "installed requiresupper" in result.stdout -def test_install_option_in_requirements_file_overrides_cli( - script: PipTestEnvironment, - arg_recording_sdist_maker: Callable[[str], ArgRecordingSdist], -) -> None: - simple_sdist = arg_recording_sdist_maker("simple") - - reqs_file = script.scratch_path.joinpath("reqs.txt") - reqs_file.write_text("simple --install-option='-O0'") - - result = script.pip( - "install", - "--no-index", - "-f", - str(simple_sdist.sdist_path.parent), - "-r", - str(reqs_file), - "--install-option=-O1", - allow_stderr_warning=True, - ) - simple_args = simple_sdist.args() - assert "install" in simple_args - assert simple_args.index("-O1") < simple_args.index("-O0") - assert "Implying --no-binary=:all:" in result.stderr - assert "Consider using --config-settings" in result.stderr - assert "--install-option is deprecated" in result.stderr - - def test_constraints_not_installed_by_default( script: PipTestEnvironment, data: TestData ) -> None: @@ -759,61 +732,3 @@ def test_install_unsupported_wheel_file( in result.stderr ) assert len(result.files_created) == 0 - - -def test_install_options_local_to_package( - script: PipTestEnvironment, - arg_recording_sdist_maker: Callable[[str], ArgRecordingSdist], -) -> None: - """Make sure --install-options does not leak across packages. - - A requirements.txt file can have per-package --install-options; these - should be isolated to just the package instead of leaking to subsequent - packages. This needs to be a functional test because the bug was around - cross-contamination at install time. - """ - - simple1_sdist = arg_recording_sdist_maker("simple1") - simple2_sdist = arg_recording_sdist_maker("simple2") - - reqs_file = script.scratch_path.joinpath("reqs.txt") - reqs_file.write_text( - textwrap.dedent( - """ - simple1 --install-option='-O0' - simple2 - """ - ) - ) - script.pip( - "install", - "--no-index", - "-f", - str(simple1_sdist.sdist_path.parent), - "-r", - reqs_file, - allow_stderr_warning=True, - ) - - simple1_args = simple1_sdist.args() - assert "install" in simple1_args - assert "-O0" in simple1_args - simple2_args = simple2_sdist.args() - assert "install" in simple2_args - assert "-O0" not in simple2_args - - -def test_location_related_install_option_fails(script: PipTestEnvironment) -> None: - simple_sdist = create_basic_sdist_for_package(script, "simple", "0.1.0") - reqs_file = script.scratch_path.joinpath("reqs.txt") - reqs_file.write_text("simple --install-option='--home=/tmp'") - result = script.pip( - "install", - "--no-index", - "-f", - str(simple_sdist.parent), - "-r", - reqs_file, - expect_error=True, - ) - assert "['--home'] from simple" in result.stderr diff --git a/tests/unit/test_command_install.py b/tests/unit/test_command_install.py index 69792dd9839..5e7889fe16b 100644 --- a/tests/unit/test_command_install.py +++ b/tests/unit/test_command_install.py @@ -2,16 +2,9 @@ from unittest import mock import pytest -from pip._vendor.packaging.requirements import Requirement from pip._internal.commands import install -from pip._internal.commands.install import ( - create_os_error_message, - decide_user_install, - reject_location_related_install_options, -) -from pip._internal.exceptions import CommandError -from pip._internal.req.req_install import InstallRequirement +from pip._internal.commands.install import create_os_error_message, decide_user_install class TestDecideUserInstall: @@ -48,37 +41,6 @@ def test_most_cases( assert decide_user_install(use_user_site=None) is result -def test_rejection_for_pip_install_options() -> None: - install_options = ["--prefix=/hello"] - with pytest.raises(CommandError) as e: - reject_location_related_install_options([], install_options) - - assert "['--prefix'] from command line" in str(e.value) - - -def test_rejection_for_location_requirement_options() -> None: - bad_named_req_options = ["--home=/wow"] - bad_named_req = InstallRequirement( - Requirement("hello"), "requirements.txt", install_options=bad_named_req_options - ) - - bad_unnamed_req_options = ["--install-lib=/lib"] - bad_unnamed_req = InstallRequirement( - None, "requirements2.txt", install_options=bad_unnamed_req_options - ) - - with pytest.raises(CommandError) as e: - reject_location_related_install_options( - [bad_named_req, bad_unnamed_req], options=[] - ) - - assert ( - "['--install-lib'] from (from requirements2.txt)" - in str(e.value) - ) - assert "['--home'] from hello (from requirements.txt)" in str(e.value) - - @pytest.mark.parametrize( "error, show_traceback, using_user_site, expected", [ diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 228d0aaa49c..30cbcf71c16 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -344,14 +344,10 @@ def test_nested_constraints_file( assert reqs[0].constraint def test_options_on_a_requirement_line(self, line_processor: LineProcessor) -> None: - line = ( - "SomeProject --install-option=yo1 --install-option yo2 " - '--global-option="yo3" --global-option "yo4"' - ) + line = 'SomeProject --global-option="yo3" --global-option "yo4"' filename = "filename" req = line_processor(line, filename, 1)[0] assert req.global_options == ["yo3", "yo4"] - assert req.install_options == ["yo1", "yo2"] def test_hash_options(self, line_processor: LineProcessor) -> None: """Test the --hash option: mostly its value storage. @@ -870,14 +866,12 @@ def test_install_requirements_with_options( options: mock.Mock, ) -> None: global_option = "--dry-run" - install_option = "--prefix=/opt" content = """ --only-binary :all: - INITools==2.0 --global-option="{global_option}" \ - --install-option "{install_option}" + INITools==2.0 --global-option="{global_option}" """.format( - global_option=global_option, install_option=install_option + global_option=global_option ) with requirements_file(content, tmpdir) as reqs_file: @@ -897,9 +891,4 @@ def test_install_requirements_with_options( last_call = popen.call_args_list[-1] args = last_call[0][0] - assert ( - 0 - < args.index(global_option) - < args.index("install") - < args.index(install_option) - ) + assert 0 < args.index(global_option) < args.index("install") diff --git a/tests/unit/test_utils_distutils_args.py b/tests/unit/test_utils_distutils_args.py deleted file mode 100644 index 21f31e926f2..00000000000 --- a/tests/unit/test_utils_distutils_args.py +++ /dev/null @@ -1,63 +0,0 @@ -import pytest - -from pip._internal.utils.distutils_args import parse_distutils_args - - -def test_unknown_option_is_ok() -> None: - result = parse_distutils_args(["--foo"]) - assert not result - - -def test_option_is_returned() -> None: - result = parse_distutils_args(["--prefix=hello"]) - assert result["prefix"] == "hello" - - -def test_options_are_clobbered() -> None: - # Matches the current setuptools behavior that the last argument - # wins. - result = parse_distutils_args(["--prefix=hello", "--prefix=world"]) - assert result["prefix"] == "world" - - -def test_multiple_options_work() -> None: - result = parse_distutils_args(["--prefix=hello", "--root=world"]) - assert result["prefix"] == "hello" - assert result["root"] == "world" - - -def test_multiple_invocations_do_not_keep_options() -> None: - result = parse_distutils_args(["--prefix=hello1"]) - assert len(result) == 1 - assert result["prefix"] == "hello1" - - result = parse_distutils_args(["--root=world1"]) - assert len(result) == 1 - assert result["root"] == "world1" - - -@pytest.mark.parametrize( - "name,value", - [ - ("exec-prefix", "1"), - ("home", "2"), - ("install-base", "3"), - ("install-data", "4"), - ("install-headers", "5"), - ("install-lib", "6"), - ("install-platlib", "7"), - ("install-purelib", "8"), - ("install-scripts", "9"), - ("prefix", "10"), - ("root", "11"), - ], -) -def test_all_value_options_work(name: str, value: str) -> None: - result = parse_distutils_args([f"--{name}={value}"]) - key_name = name.replace("-", "_") - assert result[key_name] == value - - -def test_user_option_works() -> None: - result = parse_distutils_args(["--user"]) - assert result["user"] From 7047330b0be9594ddf404618a3fd816f637d4021 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 15:26:43 +0100 Subject: [PATCH 326/730] Simplify check_legacy_setup_py_options --- src/pip/_internal/commands/download.py | 9 ++------- src/pip/_internal/commands/install.py | 5 +---- src/pip/_internal/commands/wheel.py | 5 +---- src/pip/_internal/req/req_install.py | 25 +++++++------------------ 4 files changed, 11 insertions(+), 33 deletions(-) diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index 4132e089883..90388d11857 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -8,10 +8,7 @@ from pip._internal.cli.req_command import RequirementCommand, with_cleanup from pip._internal.cli.status_codes import SUCCESS from pip._internal.operations.build.build_tracker import get_build_tracker -from pip._internal.req.req_install import ( - LegacySetupPyOptionsCheckMode, - check_legacy_setup_py_options, -) +from pip._internal.req.req_install import check_legacy_setup_py_options from pip._internal.utils.misc import ensure_dir, normalize_path, write_output from pip._internal.utils.temp_dir import TempDirectory @@ -109,9 +106,7 @@ def run(self, options: Values, args: List[str]) -> int: ) reqs = self.get_requirements(args, options, finder, session) - check_legacy_setup_py_options( - options, reqs, LegacySetupPyOptionsCheckMode.DOWNLOAD - ) + check_legacy_setup_py_options(options, reqs) preparer = self.make_requirement_preparer( temp_build_dir=directory, diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 41d22c72825..4783f807fca 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -27,7 +27,6 @@ from pip._internal.req import install_given_reqs from pip._internal.req.req_install import ( InstallRequirement, - LegacySetupPyOptionsCheckMode, check_legacy_setup_py_options, ) from pip._internal.utils.compat import WINDOWS @@ -345,9 +344,7 @@ def run(self, options: Values, args: List[str]) -> int: try: reqs = self.get_requirements(args, options, finder, session) - check_legacy_setup_py_options( - options, reqs, LegacySetupPyOptionsCheckMode.INSTALL - ) + check_legacy_setup_py_options(options, reqs) if "no-binary-enable-wheel-cache" in options.features_enabled: # TODO: remove format_control from WheelCache when the deprecation cycle diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index 1afbd562c65..a8483559c19 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -12,7 +12,6 @@ from pip._internal.operations.build.build_tracker import get_build_tracker from pip._internal.req.req_install import ( InstallRequirement, - LegacySetupPyOptionsCheckMode, check_legacy_setup_py_options, ) from pip._internal.utils.deprecation import deprecated @@ -122,9 +121,7 @@ def run(self, options: Values, args: List[str]) -> int: ) reqs = self.get_requirements(args, options, finder, session) - check_legacy_setup_py_options( - options, reqs, LegacySetupPyOptionsCheckMode.WHEEL - ) + check_legacy_setup_py_options(options, reqs) if "no-binary-enable-wheel-cache" in options.features_enabled: # TODO: remove format_control from WheelCache when the deprecation cycle diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index fa5620e1d6e..9807f690f37 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -8,7 +8,6 @@ import sys import uuid import zipfile -from enum import Enum from optparse import Values from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union @@ -888,26 +887,16 @@ def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> return False -class LegacySetupPyOptionsCheckMode(Enum): - INSTALL = 1 - WHEEL = 2 - DOWNLOAD = 3 - - def check_legacy_setup_py_options( options: Values, reqs: List[InstallRequirement], - mode: LegacySetupPyOptionsCheckMode, ) -> None: has_build_options = _has_option(options, reqs, "build_options") has_global_options = _has_option(options, reqs, "global_options") - legacy_setup_py_options_present = has_build_options or has_global_options - if not legacy_setup_py_options_present: - return - - options.format_control.disallow_binaries() - logger.warning( - "Implying --no-binary=:all: due to the presence of " - "--build-option / --global-option. " - "Consider using --config-settings for more flexibility.", - ) + if has_build_options or has_global_options: + logger.warning( + "Implying --no-binary=:all: due to the presence of " + "--build-option / --global-option. " + "Consider using --config-settings for more flexibility.", + ) + options.format_control.disallow_binaries() From a878d7f0930d9f1340ab6a69691787ce11977324 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 20 Mar 2023 07:34:21 +0300 Subject: [PATCH 327/730] dont propagate flag --- src/pip/_internal/req/req_install.py | 9 --------- tests/functional/test_install_reqs.py | 4 ++-- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 2b589713c97..9807f690f37 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -148,15 +148,6 @@ def __init__( self.global_options = global_options if global_options else [] self.hash_options = hash_options if hash_options else {} self.config_settings = config_settings - if isinstance(comes_from, InstallRequirement) and comes_from.config_settings: - # 1. If a user-requested package has config settings, those are always used. - # 2. If a user-requested package does not have user-specified config - # settings, but is another package’s transitive dependency, it would - # inherit the dependant’s config settings. - # 3. A transitive cannot have user-specified config settings. - # 4. If a transitive dependency’s dependant has config settings, - # the config settings are inherited. - self.config_settings = comes_from.config_settings # Set to True after successful preparation of this requirement self.prepared = False # User supplied requirement are explicitly requested for installation diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index baad3fc79cd..7fae65de517 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -817,8 +817,8 @@ def test_config_settings_local_to_package( simple1_args = simple1_sdist.args() assert "--verbose" in simple1_args bar_args = bar_sdist.args() - assert "--verbose" in bar_args + assert "--verbose" not in bar_args simple3_args = simple3_sdist.args() - assert "--verbose" in simple3_args + assert "--verbose" not in simple3_args simple2_args = simple2_sdist.args() assert "--verbose" not in simple2_args From 8b02c98bbcd4370c8a25b107fe9b643d5b77beae Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 20 Mar 2023 08:07:26 +0300 Subject: [PATCH 328/730] test merging multiple values --- tests/functional/test_config_settings.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index cc7ec420341..c6a84221379 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -173,10 +173,11 @@ def test_install_config_reqs(script: PipTestEnvironment) -> None: {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC}, ) script.scratch_path.joinpath("reqs.txt").write_text( - "foo --config-settings FOO=Hello" + 'foo --config-settings "--build-option=--cffi" ' + '--config-settings "--build-option=--avx2"' ) script.pip("install", "--no-index", "-f", str(a_sdist.parent), "-r", "reqs.txt") script.assert_installed(foo="1.0") config = script.site_packages_path / "config.json" with open(config, "rb") as f: - assert json.load(f) == {"FOO": "Hello"} + assert json.load(f) == {"--build-option": ["--cffi", "--avx2"]} From 2f6f36c2e56e0519318630db8218c792f9b93533 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 20 Mar 2023 11:48:44 +0300 Subject: [PATCH 329/730] merge cli and reqs config_settings --- src/pip/_internal/cli/req_command.py | 14 +++++++++--- src/pip/_internal/utils/misc.py | 19 ++++++++++++++++- tests/functional/test_config_settings.py | 27 ++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 4b1f3365cc7..3ce4f1eeca2 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -34,6 +34,7 @@ from pip._internal.req.req_install import InstallRequirement from pip._internal.resolution.base import BaseResolver from pip._internal.self_outdated_check import pip_self_version_check +from pip._internal.utils.misc import merge_config_settings from pip._internal.utils.temp_dir import ( TempDirectory, TempDirectoryTypeRegistry, @@ -434,14 +435,21 @@ def get_requirements( for parsed_req in parse_requirements( filename, finder=finder, options=options, session=session ): + config_settings = ( + parsed_req.options.get("config_settings") + if parsed_req.options + else None + ) + if config_settings and options.config_settings: + config_settings = merge_config_settings( + config_settings, options.config_settings + ) req_to_add = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, - config_settings=parsed_req.options.get("config_settings") - if parsed_req.options - else None, + config_settings=config_settings, ) requirements.append(req_to_add) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index baa1ba7eac2..63a8c3e1a4b 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -14,6 +14,8 @@ import sys import sysconfig import urllib.parse +from collections import defaultdict +from collections.abc import Iterable from io import StringIO from itertools import filterfalse, tee, zip_longest from types import TracebackType @@ -24,7 +26,6 @@ ContextManager, Dict, Generator, - Iterable, Iterator, List, Optional, @@ -32,6 +33,7 @@ Tuple, Type, TypeVar, + Union, cast, ) @@ -60,6 +62,7 @@ "remove_auth_from_url", "check_externally_managed", "ConfiguredBuildBackendHookCaller", + "merge_config_settings", ] logger = logging.getLogger(__name__) @@ -737,3 +740,17 @@ def prepare_metadata_for_build_editable( config_settings=cs, _allow_fallback=_allow_fallback, ) + + +def merge_config_settings( + reqs_settings: Dict[str, Union[str, List[str]]], + cli_settings: Dict[str, Union[str, List[str]]], +) -> Dict[str, Union[str, List[str]]]: + dd = defaultdict(list) + for d in (reqs_settings, cli_settings): + for k, v in d.items(): + if isinstance(v, list): + dd[k].extend(v) + else: + dd[k].append(v) + return dict(dd) diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index c6a84221379..0142fcb220a 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -181,3 +181,30 @@ def test_install_config_reqs(script: PipTestEnvironment) -> None: config = script.site_packages_path / "config.json" with open(config, "rb") as f: assert json.load(f) == {"--build-option": ["--cffi", "--avx2"]} + + +def test_merge_cli_reqs_config_settings(script: PipTestEnvironment) -> None: + _, _, project_dir = make_project(script.scratch_path) + a_sdist = create_basic_sdist_for_package( + script, + "foo", + "1.0", + {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC}, + ) + script.scratch_path.joinpath("reqs.txt").write_text( + 'foo --config-settings "FOO=HELLO" --config-settings "FOO=BAR"' + ) + script.pip( + "install", + "--no-index", + "-f", + str(a_sdist.parent), + "-r", + "reqs.txt", + "--config-settings", + "FOO=FOOBAR", + ) + script.assert_installed(foo="1.0") + config = script.site_packages_path / "config.json" + with open(config, "rb") as f: + assert json.load(f) == {"FOO": ["HELLO", "BAR", "FOOBAR"]} From d41b266bc4edf11d631114e9c4c2115872a0afd9 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 20 Mar 2023 12:38:06 +0300 Subject: [PATCH 330/730] fix import --- src/pip/_internal/utils/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 63a8c3e1a4b..6a2b67b0c85 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -15,7 +15,6 @@ import sysconfig import urllib.parse from collections import defaultdict -from collections.abc import Iterable from io import StringIO from itertools import filterfalse, tee, zip_longest from types import TracebackType @@ -26,6 +25,7 @@ ContextManager, Dict, Generator, + Iterable, Iterator, List, Optional, From 146b00030803b5248852b60ec97e47f84e7194bc Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 20 Mar 2023 16:32:42 +0300 Subject: [PATCH 331/730] fix merge_config_settings --- src/pip/_internal/utils/misc.py | 14 ++++++++------ tests/functional/test_config_settings.py | 5 +++-- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 6a2b67b0c85..600763c6ad0 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -14,7 +14,6 @@ import sys import sysconfig import urllib.parse -from collections import defaultdict from io import StringIO from itertools import filterfalse, tee, zip_longest from types import TracebackType @@ -746,11 +745,14 @@ def merge_config_settings( reqs_settings: Dict[str, Union[str, List[str]]], cli_settings: Dict[str, Union[str, List[str]]], ) -> Dict[str, Union[str, List[str]]]: - dd = defaultdict(list) + dd: Dict[str, Union[str, List[str]]] = {} for d in (reqs_settings, cli_settings): for k, v in d.items(): - if isinstance(v, list): - dd[k].extend(v) + if k in dd: + if isinstance(dd[k], list): + dd[k].append(v) + else: + dd[k] = [dd[k], v] else: - dd[k].append(v) - return dict(dd) + dd[k] = v + return dd diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index 0142fcb220a..507509458b0 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -192,7 +192,8 @@ def test_merge_cli_reqs_config_settings(script: PipTestEnvironment) -> None: {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC}, ) script.scratch_path.joinpath("reqs.txt").write_text( - 'foo --config-settings "FOO=HELLO" --config-settings "FOO=BAR"' + 'foo --config-settings "FOO=HELLO" --config-settings "FOO=BAR" ' + '--config-settings "BAZ=BAR"' ) script.pip( "install", @@ -207,4 +208,4 @@ def test_merge_cli_reqs_config_settings(script: PipTestEnvironment) -> None: script.assert_installed(foo="1.0") config = script.site_packages_path / "config.json" with open(config, "rb") as f: - assert json.load(f) == {"FOO": ["HELLO", "BAR", "FOOBAR"]} + assert json.load(f) == {"FOO": ["HELLO", "BAR", "FOOBAR"], "BAZ": "BAR"} From e000aab0ddd29d4ef566b1e70321dbb945b62b85 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 20 Mar 2023 17:09:28 +0300 Subject: [PATCH 332/730] apply cli settings for reqs --- src/pip/_internal/cli/req_command.py | 2 ++ tests/functional/test_config_settings.py | 25 ++++++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 3ce4f1eeca2..dbc04976a4f 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -444,6 +444,8 @@ def get_requirements( config_settings = merge_config_settings( config_settings, options.config_settings ) + elif options.config_settings: + config_settings = options.config_settings req_to_add = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode, diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index 507509458b0..49f1390ddf4 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -209,3 +209,28 @@ def test_merge_cli_reqs_config_settings(script: PipTestEnvironment) -> None: config = script.site_packages_path / "config.json" with open(config, "rb") as f: assert json.load(f) == {"FOO": ["HELLO", "BAR", "FOOBAR"], "BAZ": "BAR"} + + +def test_cli_config_settings_reqs(script: PipTestEnvironment) -> None: + _, _, project_dir = make_project(script.scratch_path) + a_sdist = create_basic_sdist_for_package( + script, + "foo", + "1.0", + {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC}, + ) + script.scratch_path.joinpath("reqs.txt").write_text("foo") + script.pip( + "install", + "--no-index", + "-f", + str(a_sdist.parent), + "-r", + "reqs.txt", + "--config-settings", + "FOO=BAR", + ) + script.assert_installed(foo="1.0") + config = script.site_packages_path / "config.json" + with open(config, "rb") as f: + assert json.load(f) == {"FOO": "BAR"} From bbab400ba673d865287280823da49991950670d3 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 20 Mar 2023 17:43:38 +0300 Subject: [PATCH 333/730] Prefer reqs config_settings over cli config_settings --- src/pip/_internal/cli/req_command.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index dbc04976a4f..e3576644232 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -435,17 +435,18 @@ def get_requirements( for parsed_req in parse_requirements( filename, finder=finder, options=options, session=session ): - config_settings = ( + req_config_settings = ( parsed_req.options.get("config_settings") if parsed_req.options else None ) - if config_settings and options.config_settings: + cli_config_settings = getattr(options, "config_settings", None) + if req_config_settings and cli_config_settings: config_settings = merge_config_settings( - config_settings, options.config_settings + req_config_settings, cli_config_settings ) - elif options.config_settings: - config_settings = options.config_settings + else: + config_settings = req_config_settings or cli_config_settings req_to_add = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode, From 88b1ce5eec419545eda4f1bfea2c59682f575c17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 22 Mar 2023 10:11:06 +0100 Subject: [PATCH 334/730] Add a few back quotes in docs --- docs/html/reference/inspect-report.md | 4 ++-- docs/html/reference/installation-report.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/html/reference/inspect-report.md b/docs/html/reference/inspect-report.md index 2bbef6e1757..1355e5d4274 100644 --- a/docs/html/reference/inspect-report.md +++ b/docs/html/reference/inspect-report.md @@ -23,7 +23,7 @@ The report is a JSON object with the following properties: - `pip_version`: a string with the version of pip used to produce the report. -- `installed`: an array of [InspectReportItem](InspectReportItem) representing the +- `installed`: an array of [`InspectReportItem`](InspectReportItem) representing the distribution packages that are installed. - `environment`: an object describing the environment where the installation report was @@ -53,7 +53,7 @@ the following properties: using the [direct URL data structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/). In most case, this field corresponds to the - [direct_url.json](https://packaging.python.org/en/latest/specifications/direct-url) + [`direct_url.json`](https://packaging.python.org/en/latest/specifications/direct-url) metadata, except for legacy editable installs, where it is emulated. - `requested`: `true` if the `REQUESTED` metadata is present, `false` otherwise. This diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index 983ec48e759..cc2e23b2a20 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -36,7 +36,7 @@ The report is a JSON object with the following properties: - `pip_version`: a string with the version of pip used to produce the report. -- `install`: an array of [InstallationReportItem](InstallationReportItem) representing +- `install`: an array of [`InstallationReportItem`](InstallationReportItem) representing the distribution packages (to be) installed. - `environment`: an object describing the environment where the installation report was @@ -61,7 +61,7 @@ package with the following properties: using the [direct URL data structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/). When `is_direct` is `true`, this field is the same as the - [direct_url.json](https://packaging.python.org/en/latest/specifications/direct-url) + [`direct_url.json`](https://packaging.python.org/en/latest/specifications/direct-url) metadata, otherwise it represents the URL of the artifact obtained from the index or `--find-links`. From 9d160aac761720a6b7a23447f569167b66b6cb24 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Sun, 26 Mar 2023 16:08:50 +0300 Subject: [PATCH 335/730] fix merge_config_settings --- src/pip/_internal/utils/misc.py | 19 ++++++++++++++----- tests/functional/test_config_settings.py | 7 ++++++- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 600763c6ad0..66d97859676 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -27,6 +27,7 @@ Iterable, Iterator, List, + Mapping, Optional, TextIO, Tuple, @@ -742,17 +743,25 @@ def prepare_metadata_for_build_editable( def merge_config_settings( - reqs_settings: Dict[str, Union[str, List[str]]], - cli_settings: Dict[str, Union[str, List[str]]], + reqs_settings: Mapping[str, Union[str, List[str]]], + cli_settings: Mapping[str, Union[str, List[str]]], ) -> Dict[str, Union[str, List[str]]]: dd: Dict[str, Union[str, List[str]]] = {} for d in (reqs_settings, cli_settings): for k, v in d.items(): if k in dd: - if isinstance(dd[k], list): - dd[k].append(v) + value = dd[k] + if isinstance(value, list): + if isinstance(v, list): + value.extend(v) + else: + value.append(v) else: - dd[k] = [dd[k], v] + if isinstance(v, str): + value = [value, v] + else: + value = [value, *v] + dd[k] = value else: dd[k] = v return dd diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index 49f1390ddf4..7d4e3c3d464 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -204,11 +204,16 @@ def test_merge_cli_reqs_config_settings(script: PipTestEnvironment) -> None: "reqs.txt", "--config-settings", "FOO=FOOBAR", + "--config-settings", + "FOO=BARFOO", ) script.assert_installed(foo="1.0") config = script.site_packages_path / "config.json" with open(config, "rb") as f: - assert json.load(f) == {"FOO": ["HELLO", "BAR", "FOOBAR"], "BAZ": "BAR"} + assert json.load(f) == { + "FOO": ["HELLO", "BAR", "FOOBAR", "BARFOO"], + "BAZ": "BAR", + } def test_cli_config_settings_reqs(script: PipTestEnvironment) -> None: From 973113154bd8cca7fa3ca296f3249e6c3742b801 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Luis=20Cano=20Rodr=C3=ADguez?= Date: Sun, 26 Mar 2023 15:39:12 +0200 Subject: [PATCH 336/730] Present found conflicts when discarding some criterion (#10937) --- news/10937.feature.rst | 1 + src/pip/_internal/resolution/resolvelib/reporter.py | 12 ++++++++++++ 2 files changed, 13 insertions(+) create mode 100644 news/10937.feature.rst diff --git a/news/10937.feature.rst b/news/10937.feature.rst new file mode 100644 index 00000000000..2974c577a10 --- /dev/null +++ b/news/10937.feature.rst @@ -0,0 +1 @@ +Present conflict information during installation after each choice that is rejected (pass ``-vv`` to ``pip install`` to show it) diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py index a95a8e4cf24..3c724238a1e 100644 --- a/src/pip/_internal/resolution/resolvelib/reporter.py +++ b/src/pip/_internal/resolution/resolvelib/reporter.py @@ -42,6 +42,18 @@ def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None: message = self._messages_at_reject_count[count] logger.info("INFO: %s", message.format(package_name=candidate.name)) + msg = "Will try a different candidate, due to conflict:" + for req_info in criterion.information: + req, parent = req_info.requirement, req_info.parent + # Inspired by Factory.get_installation_error + msg += "\n " + if parent: + msg += f"{parent.name} {parent.version} depends on " + else: + msg += "The user requested " + msg += req.format_for_error() + logger.debug(msg) + class PipDebuggingReporter(BaseReporter): """A reporter that does an info log for every event it sees.""" From 82b42c80f95e3420348730e444eabad11b3d1556 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 27 Mar 2023 12:33:11 +0200 Subject: [PATCH 337/730] xfail test_pip_wheel_ext_module_with_tmpdir_inside (#11895) --- tests/functional/test_wheel.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index 1894b37a66e..071b60c70f0 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -342,6 +342,15 @@ def test_pip_wheel_with_user_set_in_config( sys.platform.startswith("win"), reason="The empty extension module does not work on Win", ) +@pytest.mark.xfail( + condition=sys.platform == "darwin" and sys.version_info < (3, 9), + reason=( + "Unexplained 'no module named platform' in " + "https://github.com/pypa/wheel/blob" + "/c87e6ed82b58b41b258a3e8c852af8bc1817bb00" + "/src/wheel/vendored/packaging/tags.py#L396-L411" + ), +) def test_pip_wheel_ext_module_with_tmpdir_inside( script: PipTestEnvironment, data: TestData, common_wheels: Path ) -> None: From 440487792ca9d440e5cfdc402e77f8c793a2da70 Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Mon, 27 Mar 2023 13:34:43 +0300 Subject: [PATCH 338/730] Include `AUTHORS.txt` in pip's wheels (#11882) --- news/11882.bugfix.rst | 1 + setup.cfg | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) create mode 100644 news/11882.bugfix.rst diff --git a/news/11882.bugfix.rst b/news/11882.bugfix.rst new file mode 100644 index 00000000000..5373487b188 --- /dev/null +++ b/news/11882.bugfix.rst @@ -0,0 +1 @@ +Include ``AUTHORS.txt`` in pip's wheels. diff --git a/setup.cfg b/setup.cfg index ce6662d898d..2e35be30dd6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -111,6 +111,3 @@ exclude_lines = pragma: no cover # This excludes typing-specific code, which will be validated by mypy anyway. if TYPE_CHECKING - -[metadata] -license_file = LICENSE.txt From ade38264b271e2d02e0a2e00f2626c7e3c4e9ab1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= Date: Mon, 27 Mar 2023 12:54:27 +0200 Subject: [PATCH 339/730] Make sphinx.ext.extlinks captions actual string templates (#11883) --- docs/html/conf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/html/conf.py b/docs/html/conf.py index cc967e0ba3c..aae1364b87a 100644 --- a/docs/html/conf.py +++ b/docs/html/conf.py @@ -74,9 +74,9 @@ # -- Options for extlinks ------------------------------------------------------------- extlinks = { - "issue": ("https://github.com/pypa/pip/issues/%s", "#"), - "pull": ("https://github.com/pypa/pip/pull/%s", "PR #"), - "pypi": ("https://pypi.org/project/%s/", ""), + "issue": ("https://github.com/pypa/pip/issues/%s", "#%s"), + "pull": ("https://github.com/pypa/pip/pull/%s", "PR #%s"), + "pypi": ("https://pypi.org/project/%s/", "%s"), } # -- Options for towncrier_draft extension -------------------------------------------- From 8a1a8d7915cb55ee42fb6f309159df91fbfd795a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 12:10:55 +0100 Subject: [PATCH 340/730] Always use pep 517 when the 'wheel' package is absent --- news/8559.removal.rst | 2 ++ src/pip/_internal/cli/cmdoptions.py | 12 +++++++---- src/pip/_internal/pyproject.py | 9 ++++++-- src/pip/_internal/utils/deprecation.py | 14 ------------ src/pip/_internal/utils/misc.py | 12 ----------- src/pip/_internal/wheel_builder.py | 11 +--------- tests/functional/test_install.py | 30 -------------------------- tests/unit/test_wheel_builder.py | 21 ------------------ 8 files changed, 18 insertions(+), 93 deletions(-) create mode 100644 news/8559.removal.rst diff --git a/news/8559.removal.rst b/news/8559.removal.rst new file mode 100644 index 00000000000..a0953dade6b --- /dev/null +++ b/news/8559.removal.rst @@ -0,0 +1,2 @@ +When the ``wheel`` package is not installed, pip now uses the default build backend +instead of ``setup.py install`` for project without ``pyproject.toml``. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 2bbff2d4dc1..3d78013a9dc 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -783,11 +783,15 @@ def _handle_no_use_pep517( """ raise_option_error(parser, option=option, msg=msg) - # If user doesn't wish to use pep517, we check if setuptools is installed + # If user doesn't wish to use pep517, we check if setuptools and wheel are installed # and raise error if it is not. - if not importlib.util.find_spec("setuptools"): - msg = "It is not possible to use --no-use-pep517 without setuptools installed." - raise_option_error(parser, option=option, msg=msg) + for package in ("setuptools", "wheel"): + if not importlib.util.find_spec(package): + msg = ( + f"It is not possible to use --no-use-pep517 " + f"without {package} installed." + ) + raise_option_error(parser, option=option, msg=msg) # Otherwise, --no-use-pep517 was passed via the command-line. parser.values.use_pep517 = False diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index 1de9f0fde5d..57fef57077d 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -93,12 +93,17 @@ def load_pyproject_toml( # we do so if the project has a pyproject.toml file # or if we cannot import setuptools. - # We fallback to PEP 517 when without setuptools, + # We fallback to PEP 517 when without setuptools or without the wheel package, # so setuptools can be installed as a default build backend. # For more info see: # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9 + # https://github.com/pypa/pip/issues/8559 elif use_pep517 is None: - use_pep517 = has_pyproject or not importlib.util.find_spec("setuptools") + use_pep517 = ( + has_pyproject + or not importlib.util.find_spec("setuptools") + or not importlib.util.find_spec("wheel") + ) # At this point, we know whether we're going to use PEP 517. assert use_pep517 is not None diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 18d68f3ef94..db6daf7183d 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -159,17 +159,3 @@ def emit_deprecation(self, name: str) -> None: issue=8368, emit_after_success=True, ) - - -LegacyInstallReasonMissingWheelPackage = LegacyInstallReason( - reason=( - "{name} is being installed using the legacy " - "'setup.py install' method, because it does not have a " - "'pyproject.toml' and the 'wheel' package " - "is not installed." - ), - replacement="to enable the '--use-pep517' option", - gone_in="23.1", - issue=8559, - emit_before_install=True, -) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index baa1ba7eac2..319e91391ed 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -614,18 +614,6 @@ def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]: return h, length -def is_wheel_installed() -> bool: - """ - Return whether the wheel package is installed. - """ - try: - import wheel # noqa: F401 - except ImportError: - return False - - return True - - def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]: """ Return paired elements. diff --git a/src/pip/_internal/wheel_builder.py b/src/pip/_internal/wheel_builder.py index 612c91ba317..60d75dd18ef 100644 --- a/src/pip/_internal/wheel_builder.py +++ b/src/pip/_internal/wheel_builder.py @@ -19,9 +19,8 @@ from pip._internal.operations.build.wheel_editable import build_wheel_editable from pip._internal.operations.build.wheel_legacy import build_wheel_legacy from pip._internal.req.req_install import InstallRequirement -from pip._internal.utils.deprecation import LegacyInstallReasonMissingWheelPackage from pip._internal.utils.logging import indent_log -from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pip._internal.utils.misc import ensure_dir, hash_file from pip._internal.utils.setuptools_build import make_setuptools_clean_args from pip._internal.utils.subprocess import call_subprocess from pip._internal.utils.temp_dir import TempDirectory @@ -73,14 +72,6 @@ def _should_build( # we only build PEP 660 editable requirements return req.supports_pyproject_editable() - if req.use_pep517: - return True - - if not is_wheel_installed(): - # we don't build legacy requirements if wheel is not installed - req.legacy_install_reason = LegacyInstallReasonMissingWheelPackage - return False - return True diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 5d2f78c25ec..a47f782b7ee 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -12,7 +12,6 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.models.index import PyPI, TestPyPI -from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX from pip._internal.utils.misc import rmtree from tests.conftest import CertFactory from tests.lib import ( @@ -2296,35 +2295,6 @@ def test_install_dry_run(script: PipTestEnvironment, data: TestData) -> None: assert "Successfully installed" not in result.stdout -def test_install_8559_missing_wheel_package( - script: PipTestEnvironment, shared_data: TestData -) -> None: - result = script.pip( - "install", - "--find-links", - shared_data.find_links, - "simple", - allow_stderr_warning=True, - ) - assert DEPRECATION_MSG_PREFIX in result.stderr - assert "'wheel' package is not installed" in result.stderr - assert "using the legacy 'setup.py install' method" in result.stderr - - -@pytest.mark.usefixtures("with_wheel") -def test_install_8559_wheel_package_present( - script: PipTestEnvironment, shared_data: TestData -) -> None: - result = script.pip( - "install", - "--find-links", - shared_data.find_links, - "simple", - allow_stderr_warning=False, - ) - assert DEPRECATION_MSG_PREFIX not in result.stderr - - @pytest.mark.skipif( sys.version_info < (3, 11), reason="3.11 required to find distributions via importlib metadata", diff --git a/tests/unit/test_wheel_builder.py b/tests/unit/test_wheel_builder.py index 9c322053688..9044f945307 100644 --- a/tests/unit/test_wheel_builder.py +++ b/tests/unit/test_wheel_builder.py @@ -2,7 +2,6 @@ import os from pathlib import Path from typing import Optional, cast -from unittest import mock import pytest @@ -117,26 +116,6 @@ def test_should_build_for_wheel_command(req: ReqMock, expected: bool) -> None: assert should_build is expected -@mock.patch("pip._internal.wheel_builder.is_wheel_installed") -def test_should_build_legacy_wheel_not_installed(is_wheel_installed: mock.Mock) -> None: - is_wheel_installed.return_value = False - legacy_req = ReqMock(use_pep517=False) - should_build = wheel_builder.should_build_for_install_command( - cast(InstallRequirement, legacy_req), - ) - assert not should_build - - -@mock.patch("pip._internal.wheel_builder.is_wheel_installed") -def test_should_build_legacy_wheel_installed(is_wheel_installed: mock.Mock) -> None: - is_wheel_installed.return_value = True - legacy_req = ReqMock(use_pep517=False) - should_build = wheel_builder.should_build_for_install_command( - cast(InstallRequirement, legacy_req), - ) - assert should_build - - @pytest.mark.parametrize( "req, expected", [ From 1a0b7f47a0eeff5360ecc751e2e6dde6a2222ffe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 14:24:59 +0100 Subject: [PATCH 341/730] Remove test_installed_files_recorded_in_deterministic_order This test will become useless anyway when we remove setup.py install support. --- tests/functional/test_install.py | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index a47f782b7ee..3932fcfb04f 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1945,28 +1945,6 @@ def test_installing_scripts_on_path_does_not_print_warning( assert "--no-warn-script-location" not in result.stderr -def test_installed_files_recorded_in_deterministic_order( - script: PipTestEnvironment, data: TestData -) -> None: - """ - Ensure that we record the files installed by a package in a deterministic - order, to make installs reproducible. - """ - to_install = data.packages.joinpath("FSPkg") - result = script.pip("install", to_install) - fspkg_folder = script.site_packages / "fspkg" - egg_info = f"FSPkg-0.1.dev0-py{pyversion}.egg-info" - installed_files_path = script.site_packages / egg_info / "installed-files.txt" - result.did_create(fspkg_folder) - result.did_create(installed_files_path) - - installed_files_path = result.files_created[installed_files_path].full - installed_files_lines = [ - p for p in Path(installed_files_path).read_text().split("\n") if p - ] - assert installed_files_lines == sorted(installed_files_lines) - - def test_install_conflict_results_in_warning( script: PipTestEnvironment, data: TestData ) -> None: From 23cc3d523b5998b9be9c34305f6fa3503c96f49a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 14:17:39 +0100 Subject: [PATCH 342/730] Always install wheel in test venvs --- tests/conftest.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 106e7321456..23371f54a6d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -429,6 +429,7 @@ def virtualenv_template( tmpdir_factory: pytest.TempPathFactory, pip_src: Path, setuptools_install: Path, + wheel_install: Path, coverage_install: Path, ) -> Iterator[VirtualEnvironment]: @@ -442,8 +443,9 @@ def virtualenv_template( tmpdir = tmpdir_factory.mktemp("virtualenv") venv = VirtualEnvironment(tmpdir.joinpath("venv_orig"), venv_type=venv_type) - # Install setuptools and pip. + # Install setuptools, wheel and pip. install_pth_link(venv, "setuptools", setuptools_install) + install_pth_link(venv, "wheel", wheel_install) pip_editable = tmpdir_factory.mktemp("pip") / "pip" shutil.copytree(pip_src, pip_editable, symlinks=True) # noxfile.py is Python 3 only @@ -503,7 +505,7 @@ def virtualenv( @pytest.fixture def with_wheel(virtualenv: VirtualEnvironment, wheel_install: Path) -> None: - install_pth_link(virtualenv, "wheel", wheel_install) + pass class ScriptFactory(Protocol): From b11e8e434368ce30e625052f482f8ab0b7d9ec44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 14:25:37 +0100 Subject: [PATCH 343/730] Test presence of dist-info instead of egg-info --- tests/functional/test_install.py | 2 +- tests/functional/test_install_user.py | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 3932fcfb04f..ac1292ad76e 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1174,7 +1174,7 @@ def test_install_package_with_prefix( install_path = join( sysconfig.get_path("purelib", vars={"base": rel_prefix_path}), # we still test for egg-info because no-binary implies setup.py install - f"simple-1.0-py{pyversion}.egg-info", + "simple-1.0.dist-info", ) result.did_create(install_path) diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index 3207f0a45bf..bebc7e4200a 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -133,8 +133,7 @@ def test_install_user_conflict_in_usersite( result2 = script.pip("install", "--user", "INITools==0.1", "--no-binary=:all:") # usersite has 0.1 - # we still test for egg-info because no-binary implies setup.py install - egg_info_folder = script.user_site / f"INITools-0.1-py{pyversion}.egg-info" + dist_info_folder = script.user_site / "INITools-0.1.dist-info" initools_v3_file = ( # file only in 0.3 script.base_path @@ -142,7 +141,7 @@ def test_install_user_conflict_in_usersite( / "initools" / "configparser.py" ) - result2.did_create(egg_info_folder) + result2.did_create(dist_info_folder) assert not isfile(initools_v3_file), initools_v3_file def test_install_user_conflict_in_globalsite( From a38865597dcdb4f00187a95bbe98e861b78eceee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 14:41:53 +0100 Subject: [PATCH 344/730] Update test_install_subprocess_output_handling --- tests/functional/test_install.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index ac1292ad76e..1fa702a401d 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1515,12 +1515,18 @@ def test_install_subprocess_output_handling( # If the install fails, then we *should* show the output... but only once, # even if --verbose is given. result = script.pip(*(args + ["--global-option=--fail"]), expect_error=True) - assert 1 == result.stderr.count("I DIE, I DIE") + # This error is emitted 3 times: + # - by setup.py bdist_wheel + # - by setup.py clean + # - by setup.py install which is used as fallback when setup.py bdist_wheel failed + # Before, it failed only once because it attempted only setup.py install. + # TODO update this when we remove the last setup.py install code path. + assert 3 == result.stderr.count("I DIE, I DIE") result = script.pip( *(args + ["--global-option=--fail", "--verbose"]), expect_error=True ) - assert 1 == result.stderr.count("I DIE, I DIE") + assert 3 == result.stderr.count("I DIE, I DIE") def test_install_log(script: PipTestEnvironment, data: TestData, tmpdir: Path) -> None: From 04e1ab071d303157b0fd6fa68508a8ef95c931c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 14:47:21 +0100 Subject: [PATCH 345/730] Update test_install_package_that_emits_unicode Adapt to the removal of the setup.py install code path. --- tests/data/packages/BrokenEmitsUTF8/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/data/packages/BrokenEmitsUTF8/setup.py b/tests/data/packages/BrokenEmitsUTF8/setup.py index a40bc60c18f..eb4ebf2d380 100644 --- a/tests/data/packages/BrokenEmitsUTF8/setup.py +++ b/tests/data/packages/BrokenEmitsUTF8/setup.py @@ -8,7 +8,7 @@ class FakeError(Exception): pass -if sys.argv[1] == "install": +if sys.argv[1] in ("install", "bdist_wheel"): if hasattr(sys.stdout, "buffer"): sys.stdout.buffer.write( "\nThis package prints out UTF-8 stuff like:\n".encode("utf-8") From 7b11b5328bd69f510646f8947424d94f6d10aa4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 14:50:18 +0100 Subject: [PATCH 346/730] Update test_inspect_basic wheel is now installed in our test venv by default. --- tests/functional/test_inspect.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_inspect.py b/tests/functional/test_inspect.py index 18abf1a46f6..c9f43134624 100644 --- a/tests/functional/test_inspect.py +++ b/tests/functional/test_inspect.py @@ -31,11 +31,12 @@ def test_inspect_basic(simple_script: PipTestEnvironment) -> None: result = simple_script.pip("inspect") report = json.loads(result.stdout) installed = report["installed"] - assert len(installed) == 4 + assert len(installed) == 5 installed_by_name = {i["metadata"]["name"]: i for i in installed} assert installed_by_name.keys() == { "pip", "setuptools", + "wheel", "coverage", "simplewheel", } From a4c91124ce4b1215215c7164b09afb6d31e137c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 15:05:09 +0100 Subject: [PATCH 347/730] Update entrypoint tests console_script entrypoints declarations are stricter when we don't use setup.py install. --- tests/functional/test_uninstall.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index b0e12f6af59..83af0dd520f 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -219,7 +219,11 @@ def test_uninstall_overlapping_package( @pytest.mark.parametrize( - "console_scripts", ["test_ = distutils_install", "test_:test_ = distutils_install"] + "console_scripts", + [ + "test_ = distutils_install:test", + "test_:test_ = distutils_install:test_test", + ], ) def test_uninstall_entry_point_colon_in_name( script: PipTestEnvironment, console_scripts: str @@ -266,7 +270,7 @@ def test_uninstall_gui_scripts(script: PipTestEnvironment) -> None: version="0.1", entry_points={ "gui_scripts": [ - "test_ = distutils_install", + "test_ = distutils_install:test", ], }, ) @@ -300,6 +304,7 @@ def test_uninstall_console_scripts(script: PipTestEnvironment) -> None: os.path.join(script.venv, "build"), "cache", os.path.join("scratch", "discover", "discover.egg-info"), + os.path.join("scratch", "discover", "build"), ], ) @@ -314,7 +319,7 @@ def test_uninstall_console_scripts_uppercase_name(script: PipTestEnvironment) -> version="0.1", entry_points={ "console_scripts": [ - "Test = distutils_install", + "Test = distutils_install:Test", ], }, ) From 053b890e84b2dbdd72f520a7926532e8a5569856 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 15:13:15 +0100 Subject: [PATCH 348/730] Update a few tests for removal of setup.py install We now look for "Building wheel" instead of "running setup.py install" --- tests/functional/test_install_reqs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 5490b301cd0..a52274fec6b 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -466,7 +466,7 @@ def test_constraints_constrain_to_local( "singlemodule", allow_stderr_warning=True, ) - assert "Running setup.py install for singlemodule" in result.stdout + assert "Building wheel for singlemodule" in result.stdout def test_constrained_to_url_install_same_url( @@ -485,7 +485,7 @@ def test_constrained_to_url_install_same_url( to_install, allow_stderr_warning=True, ) - assert "Running setup.py install for singlemodule" in result.stdout, str(result) + assert "Building wheel for singlemodule" in result.stdout, str(result) @pytest.mark.usefixtures("with_wheel") @@ -617,7 +617,7 @@ def test_install_distribution_full_union( result = script.pip_install_local( to_install, f"{to_install}[bar]", f"{to_install}[baz]" ) - assert "Running setup.py install for LocalExtras" in result.stdout + assert "Building wheel for LocalExtras" in result.stdout result.did_create(script.site_packages / "simple") result.did_create(script.site_packages / "singlemodule.py") From b31a308b08f8d0c810cbe3f9b472d2d4436503a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 16:03:13 +0100 Subject: [PATCH 349/730] Fix test_debian_egg_name_workaround Run setup.py install manually since pip does not do it anymore. --- tests/functional/test_install_compat.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/tests/functional/test_install_compat.py b/tests/functional/test_install_compat.py index 4b6b46b02df..ae27ebd536e 100644 --- a/tests/functional/test_install_compat.py +++ b/tests/functional/test_install_compat.py @@ -3,6 +3,7 @@ """ import os +from pathlib import Path import pytest @@ -11,7 +12,11 @@ @pytest.mark.network -def test_debian_egg_name_workaround(script: PipTestEnvironment) -> None: +def test_debian_egg_name_workaround( + script: PipTestEnvironment, + shared_data: TestData, + tmp_path: Path, +) -> None: """ We can uninstall packages installed with the pyversion removed from the egg-info metadata directory name. @@ -22,10 +27,17 @@ def test_debian_egg_name_workaround(script: PipTestEnvironment) -> None: https://bitbucket.org/ianb/pip/issue/104/pip-uninstall-on-ubuntu-linux """ - result = script.pip("install", "INITools==0.2") + result = script.run( + "python", + "setup.py", + "install", + "--single-version-externally-managed", + f"--record={tmp_path / 'record'}", + cwd=shared_data.src / "simplewheel-2.0", + ) egg_info = os.path.join( - script.site_packages, f"INITools-0.2-py{pyversion}.egg-info" + script.site_packages, f"simplewheel-2.0-py{pyversion}.egg-info" ) # Debian only removes pyversion for global installs, not inside a venv @@ -35,7 +47,7 @@ def test_debian_egg_name_workaround(script: PipTestEnvironment) -> None: result.did_create(egg_info, message=f"Couldn't find {egg_info}") # The Debian no-pyversion version of the .egg-info - mangled = os.path.join(script.site_packages, "INITools-0.2.egg-info") + mangled = os.path.join(script.site_packages, "simplewheel-2.0.egg-info") result.did_not_create(mangled, message=f"Found unexpected {mangled}") # Simulate a Debian install by copying the .egg-info to their name for it @@ -46,7 +58,7 @@ def test_debian_egg_name_workaround(script: PipTestEnvironment) -> None: assert os.path.isdir(full_mangled) # Try the uninstall and verify that everything is removed. - result2 = script.pip("uninstall", "INITools", "-y") + result2 = script.pip("uninstall", "simplewheel", "-y") assert_all_changes(result, result2, [script.venv / "build", "cache"]) From 3fd8fde14be7be880c88458d1c5f92211c2ec87e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 16:08:21 +0100 Subject: [PATCH 350/730] Remove with_wheel fixture We install wheel by default in our test env. --- tests/conftest.py | 5 ---- tests/functional/test_download.py | 2 -- tests/functional/test_freeze.py | 3 --- tests/functional/test_install.py | 28 --------------------- tests/functional/test_install_cleanup.py | 1 - tests/functional/test_install_config.py | 1 - tests/functional/test_install_direct_url.py | 6 ----- tests/functional/test_install_index.py | 7 ------ tests/functional/test_install_report.py | 6 ----- tests/functional/test_install_reqs.py | 6 +---- tests/functional/test_install_requested.py | 7 ------ tests/functional/test_install_upgrade.py | 8 ------ tests/functional/test_install_user.py | 1 - tests/functional/test_install_vcs_git.py | 3 --- tests/functional/test_install_wheel.py | 4 +-- tests/functional/test_list.py | 1 - tests/functional/test_pep660.py | 7 ------ tests/functional/test_wheel.py | 2 -- 18 files changed, 2 insertions(+), 96 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 23371f54a6d..13011f4fd87 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -503,11 +503,6 @@ def virtualenv( yield virtualenv_factory(tmpdir.joinpath("workspace", "venv")) -@pytest.fixture -def with_wheel(virtualenv: VirtualEnvironment, wheel_install: Path) -> None: - pass - - class ScriptFactory(Protocol): def __call__( self, diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index ede2213aa70..31418ca8c2b 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -659,7 +659,6 @@ def make_wheel_with_python_requires( return package_dir / "dist" / file_name -@pytest.mark.usefixtures("with_wheel") def test_download__python_version_used_for_python_requires( script: PipTestEnvironment, data: TestData ) -> None: @@ -700,7 +699,6 @@ def make_args(python_version: str) -> List[str]: script.pip(*args) # no exception -@pytest.mark.usefixtures("with_wheel") def test_download_ignore_requires_python_dont_fail_with_wrong_python( script: PipTestEnvironment, ) -> None: diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 49b362d7e96..b24b27edcc6 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -104,7 +104,6 @@ def test_exclude_and_normalization(script: PipTestEnvironment, tmpdir: Path) -> assert "Normalizable_Name" not in result.stdout -@pytest.mark.usefixtures("with_wheel") def test_freeze_multiple_exclude_with_all(script: PipTestEnvironment) -> None: result = script.pip("freeze", "--all") assert "pip==" in result.stdout @@ -962,7 +961,6 @@ def test_freeze_path_multiple( _check_output(result.stdout, expected) -@pytest.mark.usefixtures("with_wheel") def test_freeze_direct_url_archive( script: PipTestEnvironment, shared_data: TestData ) -> None: @@ -1005,7 +1003,6 @@ def test_freeze_include_work_dir_pkg(script: PipTestEnvironment) -> None: assert "simple==1.0" in result.stdout -@pytest.mark.usefixtures("with_wheel") def test_freeze_pep610_editable(script: PipTestEnvironment) -> None: """ Test that a package installed with a direct_url.json with editable=true diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 1fa702a401d..12f9c0141df 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -272,7 +272,6 @@ def test_pep518_forkbombs( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_pip_second_command_line_interface_works( script: PipTestEnvironment, pip_src: Path, @@ -317,7 +316,6 @@ def test_install_exit_status_code_when_blank_requirements_file( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_basic_install_from_pypi(script: PipTestEnvironment) -> None: """ Test installing a package from PyPI. @@ -376,7 +374,6 @@ def test_basic_install_editable_from_git(script: PipTestEnvironment) -> None: _test_install_editable_from_git(script) -@pytest.mark.usefixtures("with_wheel") def test_install_editable_from_git_autobuild_wheel(script: PipTestEnvironment) -> None: _test_install_editable_from_git(script) @@ -503,7 +500,6 @@ def test_vcs_url_urlquote_normalization( @pytest.mark.parametrize("resolver", ["", "--use-deprecated=legacy-resolver"]) -@pytest.mark.usefixtures("with_wheel") def test_basic_install_from_local_directory( script: PipTestEnvironment, data: TestData, resolver: str ) -> None: @@ -533,7 +529,6 @@ def test_basic_install_from_local_directory( ("embedded_rel_path", True), ], ) -@pytest.mark.usefixtures("with_wheel") def test_basic_install_relative_directory( script: PipTestEnvironment, data: TestData, test_type: str, editable: bool ) -> None: @@ -655,7 +650,6 @@ def test_hashed_install_failure_later_flag( ) -@pytest.mark.usefixtures("with_wheel") def test_install_from_local_directory_with_in_tree_build( script: PipTestEnvironment, data: TestData ) -> None: @@ -800,7 +794,6 @@ def test_upgrade_argparse_shadowed(script: PipTestEnvironment) -> None: assert "Not uninstalling argparse" not in result.stdout -@pytest.mark.usefixtures("with_wheel") def test_install_curdir(script: PipTestEnvironment, data: TestData) -> None: """ Test installing current directory ('.'). @@ -817,7 +810,6 @@ def test_install_curdir(script: PipTestEnvironment, data: TestData) -> None: result.did_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_install_pardir(script: PipTestEnvironment, data: TestData) -> None: """ Test installing parent directory ('..'). @@ -877,7 +869,6 @@ def test_install_global_option_using_editable( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_package_with_same_name_in_curdir(script: PipTestEnvironment) -> None: """ Test installing a package with the same name of a local folder @@ -896,7 +887,6 @@ def test_install_package_with_same_name_in_curdir(script: PipTestEnvironment) -> ) -@pytest.mark.usefixtures("with_wheel") def test_install_folder_using_dot_slash(script: PipTestEnvironment) -> None: """ Test installing a folder using pip install ./foldername @@ -909,7 +899,6 @@ def test_install_folder_using_dot_slash(script: PipTestEnvironment) -> None: result.did_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_install_folder_using_slash_in_the_end(script: PipTestEnvironment) -> None: r""" Test installing a folder using pip install foldername/ or foldername\ @@ -922,7 +911,6 @@ def test_install_folder_using_slash_in_the_end(script: PipTestEnvironment) -> No result.did_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_install_folder_using_relative_path(script: PipTestEnvironment) -> None: """ Test installing a folder using pip install folder1/folder2 @@ -937,7 +925,6 @@ def test_install_folder_using_relative_path(script: PipTestEnvironment) -> None: @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_package_which_contains_dev_in_name(script: PipTestEnvironment) -> None: """ Test installing package from PyPI which contains 'dev' in name @@ -949,7 +936,6 @@ def test_install_package_which_contains_dev_in_name(script: PipTestEnvironment) result.did_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_install_package_with_target(script: PipTestEnvironment) -> None: """ Test installing a package using pip install --target @@ -1082,7 +1068,6 @@ def test_install_nonlocal_compatible_wheel_path( @pytest.mark.parametrize("opt", ("--target", "--prefix")) -@pytest.mark.usefixtures("with_wheel") def test_install_with_target_or_prefix_and_scripts_no_warning( opt: str, script: PipTestEnvironment ) -> None: @@ -1121,7 +1106,6 @@ def main(): pass assert "--no-warn-script-location" not in result.stderr, str(result) -@pytest.mark.usefixtures("with_wheel") def test_install_package_with_root(script: PipTestEnvironment, data: TestData) -> None: """ Test installing a package using pip install --root @@ -1318,7 +1302,6 @@ def test_install_package_with_latin1_setup( script.pip("install", to_install) -@pytest.mark.usefixtures("with_wheel") def test_url_req_case_mismatch_no_index( script: PipTestEnvironment, data: TestData ) -> None: @@ -1342,7 +1325,6 @@ def test_url_req_case_mismatch_no_index( result.did_not_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_url_req_case_mismatch_file_index( script: PipTestEnvironment, data: TestData ) -> None: @@ -1372,7 +1354,6 @@ def test_url_req_case_mismatch_file_index( result.did_not_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_url_incorrect_case_no_index( script: PipTestEnvironment, data: TestData ) -> None: @@ -1396,7 +1377,6 @@ def test_url_incorrect_case_no_index( result.did_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_url_incorrect_case_file_index( script: PipTestEnvironment, data: TestData ) -> None: @@ -1546,7 +1526,6 @@ def test_install_topological_sort(script: PipTestEnvironment, data: TestData) -> assert order1 in res or order2 in res, res -@pytest.mark.usefixtures("with_wheel") def test_install_wheel_broken(script: PipTestEnvironment) -> None: res = script.pip_install_local("wheelbroken", allow_stderr_error=True) assert "ERROR: Failed building wheel for wheelbroken" in res.stderr @@ -1554,7 +1533,6 @@ def test_install_wheel_broken(script: PipTestEnvironment) -> None: assert "Successfully installed wheelbroken-0.1" in str(res), str(res) -@pytest.mark.usefixtures("with_wheel") def test_cleanup_after_failed_wheel(script: PipTestEnvironment) -> None: res = script.pip_install_local("wheelbrokenafter", allow_stderr_error=True) assert "ERROR: Failed building wheel for wheelbrokenafter" in res.stderr @@ -1569,7 +1547,6 @@ def test_cleanup_after_failed_wheel(script: PipTestEnvironment) -> None: assert "Running setup.py clean for wheelbrokenafter" in str(res), str(res) -@pytest.mark.usefixtures("with_wheel") def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> None: # We need to use a subprocess to get the right value on Windows. res = script.run( @@ -1622,7 +1599,6 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No ] -@pytest.mark.usefixtures("with_wheel") def test_install_no_binary_disables_building_wheels( script: PipTestEnvironment, data: TestData ) -> None: @@ -1653,7 +1629,6 @@ def test_install_no_binary_disables_building_wheels( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_no_binary_builds_pep_517_wheel( script: PipTestEnvironment, data: TestData ) -> None: @@ -1668,7 +1643,6 @@ def test_install_no_binary_builds_pep_517_wheel( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_no_binary_uses_local_backend( script: PipTestEnvironment, data: TestData, tmpdir: Path ) -> None: @@ -1682,7 +1656,6 @@ def test_install_no_binary_uses_local_backend( assert os.path.isfile(marker), "Local PEP 517 backend not used" -@pytest.mark.usefixtures("with_wheel") def test_install_no_binary_disables_cached_wheels( script: PipTestEnvironment, data: TestData ) -> None: @@ -1821,7 +1794,6 @@ def test_install_incompatible_python_requires_editable( assert _get_expected_error_text() in result.stderr, str(result) -@pytest.mark.usefixtures("with_wheel") def test_install_incompatible_python_requires_wheel(script: PipTestEnvironment) -> None: script.scratch_path.joinpath("pkga").mkdir() pkga_path = script.scratch_path / "pkga" diff --git a/tests/functional/test_install_cleanup.py b/tests/functional/test_install_cleanup.py index c0ea5a425b9..bc34defc978 100644 --- a/tests/functional/test_install_cleanup.py +++ b/tests/functional/test_install_cleanup.py @@ -31,7 +31,6 @@ def test_no_clean_option_blocks_cleaning_after_install( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_pep517_no_legacy_cleanup(script: PipTestEnvironment, data: TestData) -> None: """Test a PEP 517 failed build does not attempt a legacy cleanup""" to_install = data.packages.joinpath("pep517_wrapper_buildsys") diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 04e489cebd8..563b5604a8e 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -245,7 +245,6 @@ def test_options_from_venv_config( assert msg.lower() in result.stdout.lower(), str(result) -@pytest.mark.usefixtures("with_wheel") def test_install_no_binary_via_config_disables_cached_wheels( script: PipTestEnvironment, data: TestData ) -> None: diff --git a/tests/functional/test_install_direct_url.py b/tests/functional/test_install_direct_url.py index cd2a4aea75f..139ef178e77 100644 --- a/tests/functional/test_install_direct_url.py +++ b/tests/functional/test_install_direct_url.py @@ -5,13 +5,11 @@ from tests.lib.direct_url import get_created_direct_url -@pytest.mark.usefixtures("with_wheel") def test_install_find_links_no_direct_url(script: PipTestEnvironment) -> None: result = script.pip_install_local("simple") assert not get_created_direct_url(result, "simple") -@pytest.mark.usefixtures("with_wheel") def test_install_vcs_editable_no_direct_url(script: PipTestEnvironment) -> None: pkg_path = _create_test_package(script.scratch_path, name="testpkg") args = ["install", "-e", f"git+{pkg_path.as_uri()}#egg=testpkg"] @@ -21,7 +19,6 @@ def test_install_vcs_editable_no_direct_url(script: PipTestEnvironment) -> None: assert not get_created_direct_url(result, "testpkg") -@pytest.mark.usefixtures("with_wheel") def test_install_vcs_non_editable_direct_url(script: PipTestEnvironment) -> None: pkg_path = _create_test_package(script.scratch_path, name="testpkg") url = pkg_path.as_uri() @@ -34,7 +31,6 @@ def test_install_vcs_non_editable_direct_url(script: PipTestEnvironment) -> None assert direct_url.info.vcs == "git" -@pytest.mark.usefixtures("with_wheel") def test_install_archive_direct_url(script: PipTestEnvironment, data: TestData) -> None: req = "simple @ " + data.packages.joinpath("simple-2.0.tar.gz").as_uri() assert req.startswith("simple @ file://") @@ -43,7 +39,6 @@ def test_install_archive_direct_url(script: PipTestEnvironment, data: TestData) @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_vcs_constraint_direct_url(script: PipTestEnvironment) -> None: constraints_file = script.scratch_path / "constraints.txt" constraints_file.write_text( @@ -55,7 +50,6 @@ def test_install_vcs_constraint_direct_url(script: PipTestEnvironment) -> None: assert get_created_direct_url(result, "pip_test_package") -@pytest.mark.usefixtures("with_wheel") def test_install_vcs_constraint_direct_file_url(script: PipTestEnvironment) -> None: pkg_path = _create_test_package(script.scratch_path, name="testpkg") url = pkg_path.as_uri() diff --git a/tests/functional/test_install_index.py b/tests/functional/test_install_index.py index c1f0ecbd7c6..b73e28f4794 100644 --- a/tests/functional/test_install_index.py +++ b/tests/functional/test_install_index.py @@ -1,12 +1,9 @@ import shutil import textwrap -import pytest - from tests.lib import PipTestEnvironment, TestData -@pytest.mark.usefixtures("with_wheel") def test_find_links_relative_path(script: PipTestEnvironment, data: TestData) -> None: """Test find-links as a relative path.""" result = script.pip( @@ -23,7 +20,6 @@ def test_find_links_relative_path(script: PipTestEnvironment, data: TestData) -> result.did_create(initools_folder) -@pytest.mark.usefixtures("with_wheel") def test_find_links_no_doctype(script: PipTestEnvironment, data: TestData) -> None: shutil.copy(data.packages / "simple-1.0.tar.gz", script.scratch_path) html = script.scratch_path.joinpath("index.html") @@ -39,7 +35,6 @@ def test_find_links_no_doctype(script: PipTestEnvironment, data: TestData) -> No assert not result.stderr -@pytest.mark.usefixtures("with_wheel") def test_find_links_requirements_file_relative_path( script: PipTestEnvironment, data: TestData ) -> None: @@ -67,7 +62,6 @@ def test_find_links_requirements_file_relative_path( result.did_create(initools_folder) -@pytest.mark.usefixtures("with_wheel") def test_install_from_file_index_hash_link( script: PipTestEnvironment, data: TestData ) -> None: @@ -80,7 +74,6 @@ def test_install_from_file_index_hash_link( result.did_create(dist_info_folder) -@pytest.mark.usefixtures("with_wheel") def test_file_index_url_quoting(script: PipTestEnvironment, data: TestData) -> None: """ Test url quoting of file index url with a space diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index 70f71e22335..83f5b5c2ca5 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -12,7 +12,6 @@ def _install_dict(report: Dict[str, Any]) -> Dict[str, Any]: return {canonicalize_name(i["metadata"]["name"]): i for i in report["install"]} -@pytest.mark.usefixtures("with_wheel") def test_install_report_basic( script: PipTestEnvironment, shared_data: TestData, tmp_path: Path ) -> None: @@ -43,7 +42,6 @@ def test_install_report_basic( ) -@pytest.mark.usefixtures("with_wheel") def test_install_report_dep( script: PipTestEnvironment, shared_data: TestData, tmp_path: Path ) -> None: @@ -66,7 +64,6 @@ def test_install_report_dep( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> None: """Test report for sdist obtained from index.""" report_path = tmp_path / "report.json" @@ -96,7 +93,6 @@ def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> Non @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_report_vcs_and_wheel_cache( script: PipTestEnvironment, tmp_path: Path ) -> None: @@ -157,7 +153,6 @@ def test_install_report_vcs_and_wheel_cache( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_report_vcs_editable( script: PipTestEnvironment, tmp_path: Path ) -> None: @@ -183,7 +178,6 @@ def test_install_report_vcs_editable( assert pip_test_package_report["download_info"]["dir_info"]["editable"] is True -@pytest.mark.usefixtures("with_wheel") def test_install_report_to_stdout( script: PipTestEnvironment, shared_data: TestData ) -> None: diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index a52274fec6b..3ad9534810b 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -62,7 +62,6 @@ def _arg_recording_sdist_maker(name: str) -> ArgRecordingSdist: @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_requirements_file(script: PipTestEnvironment) -> None: """ Test installing from a requirements file. @@ -113,7 +112,6 @@ def test_schema_check_in_requirements_file(script: PipTestEnvironment) -> None: ("embedded_rel_path", True), ], ) -@pytest.mark.usefixtures("with_wheel") def test_relative_requirements_file( script: PipTestEnvironment, data: TestData, test_type: str, editable: bool ) -> None: @@ -161,7 +159,6 @@ def test_relative_requirements_file( @pytest.mark.xfail @pytest.mark.network @need_svn -@pytest.mark.usefixtures("with_wheel") def test_multiple_requirements_files(script: PipTestEnvironment, tmpdir: Path) -> None: """ Test installing from multiple nested requirements files. @@ -305,7 +302,7 @@ def test_install_local_with_subdirectory(script: PipTestEnvironment) -> None: result.assert_installed("version_subpkg.py", editable=False) -@pytest.mark.usefixtures("enable_user_site", "with_wheel") +@pytest.mark.usefixtures("enable_user_site") def test_wheel_user_with_prefix_in_pydistutils_cfg( script: PipTestEnvironment, data: TestData ) -> None: @@ -488,7 +485,6 @@ def test_constrained_to_url_install_same_url( assert "Building wheel for singlemodule" in result.stdout, str(result) -@pytest.mark.usefixtures("with_wheel") def test_double_install_spurious_hash_mismatch( script: PipTestEnvironment, tmpdir: Path, data: TestData ) -> None: diff --git a/tests/functional/test_install_requested.py b/tests/functional/test_install_requested.py index edc289f43d1..2c5cad9fcc8 100644 --- a/tests/functional/test_install_requested.py +++ b/tests/functional/test_install_requested.py @@ -21,7 +21,6 @@ def _assert_requested_absent( assert requested not in result.files_created -@pytest.mark.usefixtures("with_wheel") def test_install_requested_basic(script: PipTestEnvironment, data: TestData) -> None: result = script.pip( "install", "--no-index", "-f", data.find_links, "require_simple" @@ -31,7 +30,6 @@ def test_install_requested_basic(script: PipTestEnvironment, data: TestData) -> _assert_requested_absent(script, result, "simple", "3.0") -@pytest.mark.usefixtures("with_wheel") def test_install_requested_requirements( script: PipTestEnvironment, data: TestData ) -> None: @@ -48,7 +46,6 @@ def test_install_requested_requirements( _assert_requested_absent(script, result, "simple", "3.0") -@pytest.mark.usefixtures("with_wheel") def test_install_requested_dep_in_requirements( script: PipTestEnvironment, data: TestData ) -> None: @@ -68,7 +65,6 @@ def test_install_requested_dep_in_requirements( _assert_requested_present(script, result, "simple", "2.0") -@pytest.mark.usefixtures("with_wheel") def test_install_requested_reqs_and_constraints( script: PipTestEnvironment, data: TestData ) -> None: @@ -89,7 +85,6 @@ def test_install_requested_reqs_and_constraints( _assert_requested_absent(script, result, "simple", "2.0") -@pytest.mark.usefixtures("with_wheel") def test_install_requested_in_reqs_and_constraints( script: PipTestEnvironment, data: TestData ) -> None: @@ -112,7 +107,6 @@ def test_install_requested_in_reqs_and_constraints( _assert_requested_present(script, result, "simple", "2.0") -@pytest.mark.usefixtures("with_wheel") def test_install_requested_from_cli_with_constraint( script: PipTestEnvironment, data: TestData ) -> None: @@ -130,7 +124,6 @@ def test_install_requested_from_cli_with_constraint( _assert_requested_present(script, result, "simple", "2.0") -@pytest.mark.usefixtures("with_wheel") @pytest.mark.network def test_install_requested_from_cli_with_url_constraint( script: PipTestEnvironment, data: TestData diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py index 0da195c051a..fc61d70bc5e 100644 --- a/tests/functional/test_install_upgrade.py +++ b/tests/functional/test_install_upgrade.py @@ -38,7 +38,6 @@ def test_invalid_upgrade_strategy_causes_error(script: PipTestEnvironment) -> No assert "invalid choice" in result.stderr -@pytest.mark.usefixtures("with_wheel") def test_only_if_needed_does_not_upgrade_deps_when_satisfied( script: PipTestEnvironment, resolver_variant: ResolverVariant ) -> None: @@ -66,7 +65,6 @@ def test_only_if_needed_does_not_upgrade_deps_when_satisfied( ), "did not print correct message for not-upgraded requirement" -@pytest.mark.usefixtures("with_wheel") def test_only_if_needed_does_upgrade_deps_when_no_longer_satisfied( script: PipTestEnvironment, ) -> None: @@ -88,7 +86,6 @@ def test_only_if_needed_does_upgrade_deps_when_no_longer_satisfied( assert expected in result.files_deleted, "should have uninstalled simple==1.0" -@pytest.mark.usefixtures("with_wheel") def test_eager_does_upgrade_dependencies_when_currently_satisfied( script: PipTestEnvironment, ) -> None: @@ -109,7 +106,6 @@ def test_eager_does_upgrade_dependencies_when_currently_satisfied( ) in result.files_deleted, "should have uninstalled simple==2.0" -@pytest.mark.usefixtures("with_wheel") def test_eager_does_upgrade_dependencies_when_no_longer_satisfied( script: PipTestEnvironment, ) -> None: @@ -135,7 +131,6 @@ def test_eager_does_upgrade_dependencies_when_no_longer_satisfied( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_upgrade_to_specific_version(script: PipTestEnvironment) -> None: """ It does upgrade to specific version requested. @@ -149,7 +144,6 @@ def test_upgrade_to_specific_version(script: PipTestEnvironment) -> None: @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_upgrade_if_requested(script: PipTestEnvironment) -> None: """ And it does upgrade if requested. @@ -312,7 +306,6 @@ def test_uninstall_rollback(script: PipTestEnvironment, data: TestData) -> None: @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_should_not_install_always_from_cache(script: PipTestEnvironment) -> None: """ If there is an old cached package, pip should download the newer version @@ -326,7 +319,6 @@ def test_should_not_install_always_from_cache(script: PipTestEnvironment) -> Non @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_with_ignoreinstalled_requested(script: PipTestEnvironment) -> None: """ Test old conflicting package is completely ignored diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index bebc7e4200a..9bdadb94203 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -76,7 +76,6 @@ def test_install_subversion_usersite_editable_with_distribute( ) result.assert_installed("INITools", use_user_site=True) - @pytest.mark.usefixtures("with_wheel") def test_install_from_current_directory_into_usersite( self, script: PipTestEnvironment, data: TestData ) -> None: diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 60b7715a9ca..d7e8c26024f 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -186,7 +186,6 @@ def test_install_editable_from_git_with_https( @pytest.mark.network -@pytest.mark.usefixtures("with_wheel") def test_install_noneditable_git(script: PipTestEnvironment) -> None: """ Test installing from a non-editable git URL with a given tag. @@ -580,7 +579,6 @@ def test_check_submodule_addition(script: PipTestEnvironment) -> None: update_result.did_create(script.venv / "src/version-pkg/testpkg/static/testfile2") -@pytest.mark.usefixtures("with_wheel") def test_install_git_branch_not_cached(script: PipTestEnvironment) -> None: """ Installing git urls with a branch revision does not cause wheel caching. @@ -596,7 +594,6 @@ def test_install_git_branch_not_cached(script: PipTestEnvironment) -> None: assert f"Successfully built {PKG}" in result.stdout, result.stdout -@pytest.mark.usefixtures("with_wheel") def test_install_git_sha_cached(script: PipTestEnvironment) -> None: """ Installing git urls with a sha revision does cause wheel caching. diff --git a/tests/functional/test_install_wheel.py b/tests/functional/test_install_wheel.py index 49c2d1d6d7c..4221ae76ae2 100644 --- a/tests/functional/test_install_wheel.py +++ b/tests/functional/test_install_wheel.py @@ -195,7 +195,6 @@ def test_install_from_wheel_with_headers(script: PipTestEnvironment) -> None: assert header_path.read_text() == header_text -@pytest.mark.usefixtures("with_wheel") def test_install_wheel_with_target( script: PipTestEnvironment, shared_data: TestData, tmpdir: Path ) -> None: @@ -216,7 +215,6 @@ def test_install_wheel_with_target( result.did_create(Path("scratch") / "target" / "simpledist") -@pytest.mark.usefixtures("with_wheel") def test_install_wheel_with_target_and_data_files( script: PipTestEnvironment, data: TestData ) -> None: @@ -406,7 +404,7 @@ def test_wheel_record_lines_have_updated_hash_for_scripts( ] -@pytest.mark.usefixtures("enable_user_site", "with_wheel") +@pytest.mark.usefixtures("enable_user_site") def test_install_user_wheel( script: PipTestEnvironment, shared_data: TestData, tmpdir: Path ) -> None: diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index d05fe9dcea5..bd45f82df7f 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -734,7 +734,6 @@ def test_list_include_work_dir_pkg(script: PipTestEnvironment) -> None: assert {"name": "simple", "version": "1.0"} in json_result -@pytest.mark.usefixtures("with_wheel") def test_list_pep610_editable(script: PipTestEnvironment) -> None: """ Test that a package installed with a direct_url.json with editable=true diff --git a/tests/functional/test_pep660.py b/tests/functional/test_pep660.py index 874f7203610..8418b26894c 100644 --- a/tests/functional/test_pep660.py +++ b/tests/functional/test_pep660.py @@ -2,7 +2,6 @@ from pathlib import Path from typing import Any, Dict -import pytest import tomli_w from tests.lib import PipTestEnvironment @@ -94,7 +93,6 @@ def _assert_hook_not_called(project_dir: Path, hook: str) -> None: assert f":{hook} called" not in log, f"{hook} should not have been called" -@pytest.mark.usefixtures("with_wheel") def test_install_pep517_basic(tmpdir: Path, script: PipTestEnvironment) -> None: """ Check that the test harness we have in this file is sane. @@ -110,7 +108,6 @@ def test_install_pep517_basic(tmpdir: Path, script: PipTestEnvironment) -> None: _assert_hook_called(project_dir, "build_wheel") -@pytest.mark.usefixtures("with_wheel") def test_install_pep660_basic(tmpdir: Path, script: PipTestEnvironment) -> None: """ Test with backend that supports build_editable. @@ -131,7 +128,6 @@ def test_install_pep660_basic(tmpdir: Path, script: PipTestEnvironment) -> None: ), "a .egg-link file should not have been created" -@pytest.mark.usefixtures("with_wheel") def test_install_no_pep660_setup_py_fallback( tmpdir: Path, script: PipTestEnvironment ) -> None: @@ -156,7 +152,6 @@ def test_install_no_pep660_setup_py_fallback( ), "a .egg-link file should have been created" -@pytest.mark.usefixtures("with_wheel") def test_install_no_pep660_setup_cfg_fallback( tmpdir: Path, script: PipTestEnvironment ) -> None: @@ -182,7 +177,6 @@ def test_install_no_pep660_setup_cfg_fallback( ), ".egg-link file should have been created" -@pytest.mark.usefixtures("with_wheel") def test_wheel_editable_pep660_basic(tmpdir: Path, script: PipTestEnvironment) -> None: """ Test 'pip wheel' of an editable pep 660 project. @@ -206,7 +200,6 @@ def test_wheel_editable_pep660_basic(tmpdir: Path, script: PipTestEnvironment) - assert len(os.listdir(str(wheel_dir))) == 1, "a wheel should have been created" -@pytest.mark.usefixtures("with_wheel") def test_download_editable_pep660_basic( tmpdir: Path, script: PipTestEnvironment ) -> None: diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index 071b60c70f0..1e3e90e410f 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -10,8 +10,6 @@ from tests.lib import pyversion # noqa: F401 from tests.lib import PipTestEnvironment, TestData -pytestmark = pytest.mark.usefixtures("with_wheel") - def add_files_to_dist_directory(folder: Path) -> None: (folder / "dist").mkdir(parents=True) From 8f52335ae58d694b459807d83f08613a9ac8eb51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 19 Mar 2023 16:47:03 +0100 Subject: [PATCH 351/730] xfail test with colon in console entry point name This was supported by setup.py install but not by our wheel installation logic. --- tests/functional/test_uninstall.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index 83af0dd520f..87e7157497c 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -222,7 +222,12 @@ def test_uninstall_overlapping_package( "console_scripts", [ "test_ = distutils_install:test", - "test_:test_ = distutils_install:test_test", + pytest.param( + "test_:test_ = distutils_install:test_test", + marks=pytest.mark.xfail( + reason="colon not supported in wheel entry point name?" + ), + ), ], ) def test_uninstall_entry_point_colon_in_name( From 6adb7af0aa4a80d283f1bc9ea6805d5718e13fff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 12 Mar 2023 17:38:31 +0100 Subject: [PATCH 352/730] Deprecate --build-option and --global-option --- news/11859.removal.rst | 2 ++ src/pip/_internal/req/req_install.py | 7 ++++++- tests/functional/test_install.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 news/11859.removal.rst diff --git a/news/11859.removal.rst b/news/11859.removal.rst new file mode 100644 index 00000000000..b29cedd7557 --- /dev/null +++ b/news/11859.removal.rst @@ -0,0 +1,2 @@ +Deprecate ``--build-option`` and ``--global-option``. Users are invited to switch to +``--config-settings``. diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 9807f690f37..99af605dea0 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -894,9 +894,14 @@ def check_legacy_setup_py_options( has_build_options = _has_option(options, reqs, "build_options") has_global_options = _has_option(options, reqs, "global_options") if has_build_options or has_global_options: + deprecated( + reason="--build-option and --global-option are deprecated.", + issue=11859, + replacement="to use --config-settings", + gone_in="23.3", + ) logger.warning( "Implying --no-binary=:all: due to the presence of " "--build-option / --global-option. " - "Consider using --config-settings for more flexibility.", ) options.format_control.disallow_binaries() diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 5d2f78c25ec..f3a12065a18 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -843,7 +843,7 @@ def test_install_global_option(script: PipTestEnvironment) -> None: assert "INITools==0.1\n" in result.stdout assert not result.files_created assert "Implying --no-binary=:all:" in result.stderr - assert "Consider using --config-settings" in result.stderr + assert "A possible replacement is to use --config-settings" in result.stderr def test_install_with_hacked_egg_info( From e4d291c5a7694760f7ef818d631f09add07c8ad5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 27 Mar 2023 13:52:23 +0200 Subject: [PATCH 353/730] Combine setuptools and wheel detection in one step It would be annoying if you see an error about setuptools, install it, and only be greeted by another error telling you to install wheel. So we combine the two into one. --- src/pip/_internal/cli/cmdoptions.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 3d78013a9dc..c27ba1c6a9a 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -785,13 +785,13 @@ def _handle_no_use_pep517( # If user doesn't wish to use pep517, we check if setuptools and wheel are installed # and raise error if it is not. - for package in ("setuptools", "wheel"): - if not importlib.util.find_spec(package): - msg = ( - f"It is not possible to use --no-use-pep517 " - f"without {package} installed." - ) - raise_option_error(parser, option=option, msg=msg) + packages = ("setuptools", "wheel") + if not all(importlib.util.find_spec(package) for package in packages): + msg = ( + f"It is not possible to use --no-use-pep517 " + f"without {' and '.join(packages)} installed." + ) + raise_option_error(parser, option=option, msg=msg) # Otherwise, --no-use-pep517 was passed via the command-line. parser.values.use_pep517 = False From 1d9d56ede702f179c12fdd0e199db39f7bd431d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 19 Mar 2023 15:11:03 +0100 Subject: [PATCH 354/730] Refactor handling of per requirement options Move the conversion from options to function arguments up the call chain. --- src/pip/_internal/req/constructors.py | 29 +++++++++++++------ .../resolution/resolvelib/candidates.py | 18 ++++-------- 2 files changed, 26 insertions(+), 21 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 854b1b058d8..de3136fb642 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -11,7 +11,7 @@ import logging import os import re -from typing import Any, Dict, Optional, Set, Tuple, Union +from typing import Dict, List, Optional, Set, Tuple, Union from pip._vendor.packaging.markers import Marker from pip._vendor.packaging.requirements import InvalidRequirement, Requirement @@ -201,9 +201,11 @@ def parse_req_from_editable(editable_req: str) -> RequirementParts: def install_req_from_editable( editable_req: str, comes_from: Optional[Union[InstallRequirement, str]] = None, + *, use_pep517: Optional[bool] = None, isolated: bool = False, - options: Optional[Dict[str, Any]] = None, + global_options: Optional[List[str]] = None, + hash_options: Optional[Dict[str, List[str]]] = None, constraint: bool = False, user_supplied: bool = False, permit_editable_wheels: bool = False, @@ -222,8 +224,8 @@ def install_req_from_editable( constraint=constraint, use_pep517=use_pep517, isolated=isolated, - global_options=options.get("global_options", []) if options else [], - hash_options=options.get("hashes", {}) if options else {}, + global_options=global_options, + hash_options=hash_options, config_settings=config_settings, extras=parts.extras, ) @@ -375,9 +377,11 @@ def _parse_req_string(req_as_string: str) -> Requirement: def install_req_from_line( name: str, comes_from: Optional[Union[str, InstallRequirement]] = None, + *, use_pep517: Optional[bool] = None, isolated: bool = False, - options: Optional[Dict[str, Any]] = None, + global_options: Optional[List[str]] = None, + hash_options: Optional[Dict[str, List[str]]] = None, constraint: bool = False, line_source: Optional[str] = None, user_supplied: bool = False, @@ -398,8 +402,8 @@ def install_req_from_line( markers=parts.markers, use_pep517=use_pep517, isolated=isolated, - global_options=options.get("global_options", []) if options else [], - hash_options=options.get("hashes", {}) if options else {}, + global_options=global_options, + hash_options=hash_options, config_settings=config_settings, constraint=constraint, extras=parts.extras, @@ -471,11 +475,18 @@ def install_req_from_parsed_requirement( comes_from=parsed_req.comes_from, use_pep517=use_pep517, isolated=isolated, - options=parsed_req.options, + global_options=( + parsed_req.options.get("global_options", []) + if parsed_req.options + else [] + ), + hash_options=( + parsed_req.options.get("hashes", {}) if parsed_req.options else {} + ), constraint=parsed_req.constraint, line_source=parsed_req.line_source, user_supplied=user_supplied, - config_settings=config_settings, + config_settings=config_settings, # TODO get this from parsed_req.options? ) return req diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 7f09efc1539..fe83a61231f 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -65,10 +65,8 @@ def make_install_req_from_link( use_pep517=template.use_pep517, isolated=template.isolated, constraint=template.constraint, - options=dict( - global_options=template.global_options, - hashes=template.hash_options, - ), + global_options=template.global_options, + hash_options=template.hash_options, config_settings=template.config_settings, ) ireq.original_link = template.original_link @@ -88,10 +86,8 @@ def make_install_req_from_editable( isolated=template.isolated, constraint=template.constraint, permit_editable_wheels=template.permit_editable_wheels, - options=dict( - global_options=template.global_options, - hashes=template.hash_options, - ), + global_options=template.global_options, + hash_options=template.hash_options, config_settings=template.config_settings, ) @@ -112,10 +108,8 @@ def _make_install_req_from_dist( use_pep517=template.use_pep517, isolated=template.isolated, constraint=template.constraint, - options=dict( - global_options=template.global_options, - hashes=template.hash_options, - ), + global_options=template.global_options, + hash_options=template.hash_options, config_settings=template.config_settings, ) ireq.satisfied_by = dist From efe9d4b762ff4af670bbd9ca4abb780216ba8808 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 19 Mar 2023 15:13:54 +0100 Subject: [PATCH 355/730] Remove unused argument --- src/pip/_internal/req/constructors.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index de3136fb642..8f7dc507c7d 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -456,7 +456,6 @@ def install_req_from_parsed_requirement( isolated: bool = False, use_pep517: Optional[bool] = None, user_supplied: bool = False, - config_settings: Optional[Dict[str, str]] = None, ) -> InstallRequirement: if parsed_req.is_editable: req = install_req_from_editable( @@ -466,7 +465,6 @@ def install_req_from_parsed_requirement( constraint=parsed_req.constraint, isolated=isolated, user_supplied=user_supplied, - config_settings=config_settings, ) else: @@ -486,7 +484,6 @@ def install_req_from_parsed_requirement( constraint=parsed_req.constraint, line_source=parsed_req.line_source, user_supplied=user_supplied, - config_settings=config_settings, # TODO get this from parsed_req.options? ) return req From 5ea358122af44d53c81ee683e381a9943619df50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 19 Mar 2023 15:23:56 +0100 Subject: [PATCH 356/730] Use more kwargs for install_req_from_line For better readability --- src/pip/_internal/cli/req_command.py | 2 +- tests/unit/test_finder.py | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 048b9c99e41..bb33403195b 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -411,7 +411,7 @@ def get_requirements( for req in args: req_to_add = install_req_from_line( req, - None, + comes_from=None, isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py index 366b7eeb4d1..3404d1498e3 100644 --- a/tests/unit/test_finder.py +++ b/tests/unit/test_finder.py @@ -63,7 +63,7 @@ def test_duplicates_sort_ok(data: TestData) -> None: def test_finder_detects_latest_find_links(data: TestData) -> None: """Test PackageFinder detects latest using find-links""" - req = install_req_from_line("simple", None) + req = install_req_from_line("simple") finder = make_test_finder(find_links=[data.find_links]) found = finder.find_requirement(req, False) assert found is not None @@ -72,7 +72,7 @@ def test_finder_detects_latest_find_links(data: TestData) -> None: def test_incorrect_case_file_index(data: TestData) -> None: """Test PackageFinder detects latest using wrong case""" - req = install_req_from_line("dinner", None) + req = install_req_from_line("dinner") finder = make_test_finder(index_urls=[data.find_links3]) found = finder.find_requirement(req, False) assert found is not None @@ -82,7 +82,7 @@ def test_incorrect_case_file_index(data: TestData) -> None: @pytest.mark.network def test_finder_detects_latest_already_satisfied_find_links(data: TestData) -> None: """Test PackageFinder detects latest already satisfied using find-links""" - req = install_req_from_line("simple", None) + req = install_req_from_line("simple") # the latest simple in local pkgs is 3.0 latest_version = "3.0" satisfied_by = Mock( @@ -99,7 +99,7 @@ def test_finder_detects_latest_already_satisfied_find_links(data: TestData) -> N @pytest.mark.network def test_finder_detects_latest_already_satisfied_pypi_links() -> None: """Test PackageFinder detects latest already satisfied using pypi links""" - req = install_req_from_line("initools", None) + req = install_req_from_line("initools") # the latest initools on PyPI is 0.3.1 latest_version = "0.3.1" satisfied_by = Mock( @@ -180,7 +180,7 @@ def test_existing_over_wheel_priority(self, data: TestData) -> None: Test existing install has priority over wheels. `test_link_sorting` also covers this at a lower level """ - req = install_req_from_line("priority", None) + req = install_req_from_line("priority") latest_version = "1.0" satisfied_by = Mock( location="/path", @@ -309,7 +309,7 @@ def test_build_tag_is_less_important_than_other_tags(self) -> None: def test_finder_priority_file_over_page(data: TestData) -> None: """Test PackageFinder prefers file links over equivalent page links""" - req = install_req_from_line("gmpy==1.15", None) + req = install_req_from_line("gmpy==1.15") finder = make_test_finder( find_links=[data.find_links], index_urls=["http://pypi.org/simple/"], @@ -328,7 +328,7 @@ def test_finder_priority_file_over_page(data: TestData) -> None: def test_finder_priority_nonegg_over_eggfragments() -> None: """Test PackageFinder prefers non-egg links over "#egg=" links""" - req = install_req_from_line("bar==1.0", None) + req = install_req_from_line("bar==1.0") links = ["http://foo/bar.py#egg=bar-1.0", "http://foo/bar-1.0.tar.gz"] finder = make_test_finder(links) @@ -358,7 +358,7 @@ def test_finder_only_installs_stable_releases(data: TestData) -> None: Test PackageFinder only accepts stable versioned releases by default. """ - req = install_req_from_line("bar", None) + req = install_req_from_line("bar") # using a local index (that has pre & dev releases) finder = make_test_finder(index_urls=[data.index_url("pre")]) @@ -404,7 +404,7 @@ def test_finder_installs_pre_releases(data: TestData) -> None: Test PackageFinder finds pre-releases if asked to. """ - req = install_req_from_line("bar", None) + req = install_req_from_line("bar") # using a local index (that has pre & dev releases) finder = make_test_finder( @@ -436,7 +436,7 @@ def test_finder_installs_dev_releases(data: TestData) -> None: Test PackageFinder finds dev releases if asked to. """ - req = install_req_from_line("bar", None) + req = install_req_from_line("bar") # using a local index (that has dev releases) finder = make_test_finder( @@ -452,7 +452,7 @@ def test_finder_installs_pre_releases_with_version_spec() -> None: """ Test PackageFinder only accepts stable versioned releases by default. """ - req = install_req_from_line("bar>=0.0.dev0", None) + req = install_req_from_line("bar>=0.0.dev0") links = ["https://foo/bar-1.0.tar.gz", "https://foo/bar-2.0b1.tar.gz"] finder = make_test_finder(links) From 82f1ff0adbd3e59e9996f2b93d7eac0a4986b76d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 19 Mar 2023 16:23:44 +0100 Subject: [PATCH 357/730] Fix type of config_settings arguments --- src/pip/_internal/req/constructors.py | 6 +++--- src/pip/_internal/req/req_install.py | 2 +- src/pip/_internal/utils/misc.py | 19 +++++++++++-------- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 8f7dc507c7d..37dbd32e7b8 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -209,7 +209,7 @@ def install_req_from_editable( constraint: bool = False, user_supplied: bool = False, permit_editable_wheels: bool = False, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, ) -> InstallRequirement: parts = parse_req_from_editable(editable_req) @@ -385,7 +385,7 @@ def install_req_from_line( constraint: bool = False, line_source: Optional[str] = None, user_supplied: bool = False, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, ) -> InstallRequirement: """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. @@ -417,7 +417,7 @@ def install_req_from_req_string( isolated: bool = False, use_pep517: Optional[bool] = None, user_supplied: bool = False, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, ) -> InstallRequirement: try: req = get_requirement(req_string) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 9807f690f37..1966f7e4376 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -85,7 +85,7 @@ def __init__( *, global_options: Optional[List[str]] = None, hash_options: Optional[Dict[str, List[str]]] = None, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, constraint: bool = False, extras: Collection[str] = (), user_supplied: bool = False, diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index baa1ba7eac2..81101b859d0 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -32,6 +32,7 @@ Tuple, Type, TypeVar, + Union, cast, ) @@ -669,7 +670,7 @@ def __init__( def build_wheel( self, wheel_directory: str, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, metadata_directory: Optional[str] = None, ) -> str: cs = self.config_holder.config_settings @@ -678,7 +679,9 @@ def build_wheel( ) def build_sdist( - self, sdist_directory: str, config_settings: Optional[Dict[str, str]] = None + self, + sdist_directory: str, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, ) -> str: cs = self.config_holder.config_settings return super().build_sdist(sdist_directory, config_settings=cs) @@ -686,7 +689,7 @@ def build_sdist( def build_editable( self, wheel_directory: str, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, metadata_directory: Optional[str] = None, ) -> str: cs = self.config_holder.config_settings @@ -695,19 +698,19 @@ def build_editable( ) def get_requires_for_build_wheel( - self, config_settings: Optional[Dict[str, str]] = None + self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None ) -> List[str]: cs = self.config_holder.config_settings return super().get_requires_for_build_wheel(config_settings=cs) def get_requires_for_build_sdist( - self, config_settings: Optional[Dict[str, str]] = None + self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None ) -> List[str]: cs = self.config_holder.config_settings return super().get_requires_for_build_sdist(config_settings=cs) def get_requires_for_build_editable( - self, config_settings: Optional[Dict[str, str]] = None + self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None ) -> List[str]: cs = self.config_holder.config_settings return super().get_requires_for_build_editable(config_settings=cs) @@ -715,7 +718,7 @@ def get_requires_for_build_editable( def prepare_metadata_for_build_wheel( self, metadata_directory: str, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, _allow_fallback: bool = True, ) -> str: cs = self.config_holder.config_settings @@ -728,7 +731,7 @@ def prepare_metadata_for_build_wheel( def prepare_metadata_for_build_editable( self, metadata_directory: str, - config_settings: Optional[Dict[str, str]] = None, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, _allow_fallback: bool = True, ) -> str: cs = self.config_holder.config_settings From 32d66d2c352a1fb2a6212c74ad797c1af03216de Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 27 Mar 2023 13:07:50 +0100 Subject: [PATCH 358/730] Upgrade resolvelib to 1.0.1 (#11879) Co-authored-by: Pradyun Gedam --- news/resolvelib.vendor.rst | 2 +- src/pip/_vendor/resolvelib/__init__.py | 2 +- src/pip/_vendor/resolvelib/resolvers.py | 53 ++++++++++++++++++------- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 42 insertions(+), 17 deletions(-) diff --git a/news/resolvelib.vendor.rst b/news/resolvelib.vendor.rst index c8b5c928d19..ad55516edea 100644 --- a/news/resolvelib.vendor.rst +++ b/news/resolvelib.vendor.rst @@ -1 +1 @@ -Upgrade resolvelib to 0.9.0 +Upgrade resolvelib to 1.0.1 diff --git a/src/pip/_vendor/resolvelib/__init__.py b/src/pip/_vendor/resolvelib/__init__.py index fa6995e32aa..d92acc7bedf 100644 --- a/src/pip/_vendor/resolvelib/__init__.py +++ b/src/pip/_vendor/resolvelib/__init__.py @@ -11,7 +11,7 @@ "ResolutionTooDeep", ] -__version__ = "0.9.0" +__version__ = "1.0.1" from .providers import AbstractProvider, AbstractResolver diff --git a/src/pip/_vendor/resolvelib/resolvers.py b/src/pip/_vendor/resolvelib/resolvers.py index 49e30c7f5c4..2c3d0e306f9 100644 --- a/src/pip/_vendor/resolvelib/resolvers.py +++ b/src/pip/_vendor/resolvelib/resolvers.py @@ -1,4 +1,5 @@ import collections +import itertools import operator from .providers import AbstractResolver @@ -191,8 +192,8 @@ def _remove_information_from_criteria(self, criteria, parents): information for information in criterion.information if ( - information[1] is None - or self._p.identify(information[1]) not in parents + information.parent is None + or self._p.identify(information.parent) not in parents ) ], criterion.incompatibilities, @@ -266,8 +267,8 @@ def _attempt_to_pin_criterion(self, name): # end, signal for backtracking. return causes - def _backtrack(self): - """Perform backtracking. + def _backjump(self, causes): + """Perform backjumping. When we enter here, the stack is like this:: @@ -283,22 +284,46 @@ def _backtrack(self): Each iteration of the loop will: - 1. Discard Z. - 2. Discard Y but remember its incompatibility information gathered + 1. Identify Z. The incompatibility is not always caused by the latest + state. For example, given three requirements A, B and C, with + dependencies A1, B1 and C1, where A1 and B1 are incompatible: the + last state might be related to C, so we want to discard the + previous state. + 2. Discard Z. + 3. Discard Y but remember its incompatibility information gathered previously, and the failure we're dealing with right now. - 3. Push a new state Y' based on X, and apply the incompatibility + 4. Push a new state Y' based on X, and apply the incompatibility information from Y to Y'. - 4a. If this causes Y' to conflict, we need to backtrack again. Make Y' + 5a. If this causes Y' to conflict, we need to backtrack again. Make Y' the new Z and go back to step 2. - 4b. If the incompatibilities apply cleanly, end backtracking. + 5b. If the incompatibilities apply cleanly, end backtracking. """ + incompatible_reqs = itertools.chain( + (c.parent for c in causes if c.parent is not None), + (c.requirement for c in causes), + ) + incompatible_deps = {self._p.identify(r) for r in incompatible_reqs} while len(self._states) >= 3: # Remove the state that triggered backtracking. del self._states[-1] - # Retrieve the last candidate pin and known incompatibilities. - broken_state = self._states.pop() - name, candidate = broken_state.mapping.popitem() + # Ensure to backtrack to a state that caused the incompatibility + incompatible_state = False + while not incompatible_state: + # Retrieve the last candidate pin and known incompatibilities. + try: + broken_state = self._states.pop() + name, candidate = broken_state.mapping.popitem() + except (IndexError, KeyError): + raise ResolutionImpossible(causes) + current_dependencies = { + self._p.identify(d) + for d in self._p.get_dependencies(candidate) + } + incompatible_state = not current_dependencies.isdisjoint( + incompatible_deps + ) + incompatibilities_from_broken = [ (k, list(v.incompatibilities)) for k, v in broken_state.criteria.items() @@ -403,10 +428,10 @@ def resolve(self, requirements, max_rounds): if failure_causes: causes = [i for c in failure_causes for i in c.information] - # Backtrack if pinning fails. The backtrack process puts us in + # Backjump if pinning fails. The backjump process puts us in # an unpinned state, so we can work on it in the next round. self._r.resolving_conflicts(causes=causes) - success = self._backtrack() + success = self._backjump(causes) self.state.backtrack_causes[:] = causes # Dead ends everywhere. Give up. diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 8f11ce95353..5519479bf52 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -15,7 +15,7 @@ requests==2.28.2 rich==12.6.0 pygments==2.13.0 typing_extensions==4.4.0 -resolvelib==0.9.0 +resolvelib==1.0.1 setuptools==65.6.3 six==1.16.0 tenacity==8.1.0 From bc4e542abd42b510af756bf8b075e967b6aaad16 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 27 Mar 2023 16:07:21 +0300 Subject: [PATCH 359/730] test merge_config_settings --- tests/unit/test_utils_misc.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 tests/unit/test_utils_misc.py diff --git a/tests/unit/test_utils_misc.py b/tests/unit/test_utils_misc.py new file mode 100644 index 00000000000..a00d5461f6d --- /dev/null +++ b/tests/unit/test_utils_misc.py @@ -0,0 +1,25 @@ +from typing import Dict, List, Union + +from pip._internal.utils.misc import merge_config_settings + + +def test_merge_config_settings() -> None: + reqs: Dict[str, Union[str, List[str]]] = { + "foo": "bar", + "bar": "foo", + "foobar": ["bar"], + "baz": ["foo"], + } + cli: Dict[str, Union[str, List[str]]] = { + "foo": ["baz"], + "bar": "bar", + "foobar": ["baz"], + "baz": "bar", + } + expected = { + "foo": ["bar", "baz"], + "bar": ["foo", "bar"], + "foobar": ["bar", "baz"], + "baz": ["foo", "bar"], + } + assert merge_config_settings(reqs, cli) == expected From baeb627f3f06e383fb11d6c5ef1c4785db3534ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 22 Mar 2023 10:27:10 +0100 Subject: [PATCH 360/730] docs: clarify installation report note Update a note to reflect the fact that the report format is now stable. --- docs/html/reference/installation-report.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index cc2e23b2a20..5823205f977 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -17,10 +17,9 @@ When considering use cases, please bear in mind that other use cases), this format is *not* meant to be a lock file format as such; - there is no plan for pip to accept an installation report as input for the `install`, `download` or `wheel` commands; -- the `--report` option and this format is intended to become a supported pip feature - (when the format is stabilized to version 1); -- it is however *not* a PyPA interoperability standard and as such its evolution will be - governed by the pip processes and not the PyPA standardization processes. +- while the `--report` option and this format is a supported pip feature, + it is *not* a PyPA interoperability standard and as such its evolution is governed by + the pip processes and not the PyPA standardization processes. ``` ## Specification From cdec6b27ea2304671e53536ed04cbe75b6c94c78 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Mon, 27 Mar 2023 17:36:43 +0300 Subject: [PATCH 361/730] fix typing --- src/pip/_internal/cli/req_command.py | 3 ++- src/pip/_internal/req/constructors.py | 3 +++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index ea47935bb4e..42888a1228b 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -10,7 +10,7 @@ import sys from functools import partial from optparse import Values -from typing import TYPE_CHECKING, Any, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions @@ -441,6 +441,7 @@ def get_requirements( else None ) cli_config_settings = getattr(options, "config_settings", None) + config_settings: Optional[Dict[str, Union[str, List[str]]]] if req_config_settings and cli_config_settings: config_settings = merge_config_settings( req_config_settings, cli_config_settings diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 37dbd32e7b8..9dc41c24bc6 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -456,6 +456,7 @@ def install_req_from_parsed_requirement( isolated: bool = False, use_pep517: Optional[bool] = None, user_supplied: bool = False, + config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, ) -> InstallRequirement: if parsed_req.is_editable: req = install_req_from_editable( @@ -465,6 +466,7 @@ def install_req_from_parsed_requirement( constraint=parsed_req.constraint, isolated=isolated, user_supplied=user_supplied, + config_settings=config_settings, ) else: @@ -484,6 +486,7 @@ def install_req_from_parsed_requirement( constraint=parsed_req.constraint, line_source=parsed_req.line_source, user_supplied=user_supplied, + config_settings=config_settings, ) return req From 20d270d3e27c3db65c4b5371b82e1022b792adff Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 27 Mar 2023 16:21:46 +0100 Subject: [PATCH 362/730] Tweak the Chronographer configuration (#11906) --- .github/chronographer.yml | 28 ++++------------------------ 1 file changed, 4 insertions(+), 24 deletions(-) diff --git a/.github/chronographer.yml b/.github/chronographer.yml index b42883f8f4a..c78eb4a2b7d 100644 --- a/.github/chronographer.yml +++ b/.github/chronographer.yml @@ -1,30 +1,10 @@ ---- - +branch-protection-check-name: Changelog entry action-hints: - # check-title-prefix: chng # default: `{{ branch-protection-check-name }}: ` - external-docs-url: https://pip.pypa.io/how-to-changelog + check-title-prefix: "Chronographer: " + external-docs-url: https://pip.pypa.io/dev/news-entry-failure inline-markdown: > - Check out https://pip.pypa.io/how-to-changelog - -branch-protection-check-name: Timeline protection - + See https://pip.pypa.io/dev/news-entry-failure for details. enforce-name: - # suffix: .md suffix: .rst - -exclude: - bots: - - dependabot-preview - - dependabot - - patchback - humans: - - pyup-bot - labels: skip-changelog: skip news - -paths: # relative modified file paths that do or don't need changelog mention - exclude: [] - include: [] - -... From 5c61b2a52c3bd74250d1ecc6a2a7b13c20e7286f Mon Sep 17 00:00:00 2001 From: Nikhil Ladha Date: Mon, 27 Mar 2023 20:53:02 +0530 Subject: [PATCH 363/730] Configure sphinx-copybutton to strip prompts (#11702) --- docs/html/conf.py | 6 ++++++ news/11702.trivial.rst | 2 ++ 2 files changed, 8 insertions(+) create mode 100644 news/11702.trivial.rst diff --git a/docs/html/conf.py b/docs/html/conf.py index aae1364b87a..683ea7b87d8 100644 --- a/docs/html/conf.py +++ b/docs/html/conf.py @@ -131,3 +131,9 @@ def to_document_name(path: str, base_dir: str) -> str: man_pages = determine_man_pages() + +# -- Options for sphinx_copybutton ---------------------------------------------------- + +copybutton_prompt_text = r"\$ | C\:\> " +copybutton_prompt_is_regexp = True +copybutton_only_copy_prompt_lines = False diff --git a/news/11702.trivial.rst b/news/11702.trivial.rst new file mode 100644 index 00000000000..d27e33d78ce --- /dev/null +++ b/news/11702.trivial.rst @@ -0,0 +1,2 @@ +Strip command line prompts like "$" and "C:>" from the actual command +being copied using the copybutton. From 21a0e0eff50747879b2bea848d566ee26d773656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 27 Mar 2023 18:19:47 +0200 Subject: [PATCH 364/730] Rename newfragment --- ...short-config-settings-option.feature.rst => 11786.feature.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{short-config-settings-option.feature.rst => 11786.feature.rst} (100%) diff --git a/news/short-config-settings-option.feature.rst b/news/11786.feature.rst similarity index 100% rename from news/short-config-settings-option.feature.rst rename to news/11786.feature.rst From 4a8693aa44138ea4da6d4088f6e273a3256ff876 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 27 Mar 2023 19:52:23 +0100 Subject: [PATCH 365/730] Build documentation with Sphinx 6 (#11904) --- docs/html/getting-started.md | 2 +- docs/requirements.txt | 2 +- noxfile.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/html/getting-started.md b/docs/html/getting-started.md index 0967b0eb99f..2b3f0bc9310 100644 --- a/docs/html/getting-started.md +++ b/docs/html/getting-started.md @@ -98,5 +98,5 @@ Successfully uninstalled sampleproject ## Next Steps It is recommended to learn about what virtual environments are and how to use -them. This is covered in the ["Installing Packages"](pypug:tutorials/installing-packages) +them. This is covered in the {doc}`Installing Packages ` tutorial on packaging.python.org. diff --git a/docs/requirements.txt b/docs/requirements.txt index fa3a7390c15..ef72c8fb722 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -sphinx ~= 4.2, != 4.4.0 +sphinx ~= 6.0 towncrier furo myst_parser diff --git a/noxfile.py b/noxfile.py index 5c4683b7d79..565a5039955 100644 --- a/noxfile.py +++ b/noxfile.py @@ -133,6 +133,7 @@ def get_sphinx_build_command(kind: str) -> List[str]: # fmt: off return [ "sphinx-build", + "--keep-going", "-W", "-c", "docs/html", # see note above "-d", "docs/build/doctrees/" + kind, From 78ab4cf071fcbda8af83d6b03be57c27a7008da7 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 27 Mar 2023 20:03:11 +0100 Subject: [PATCH 366/730] Bump all linters other than mypy (#11901) --- .pre-commit-config.yaml | 14 +++++++------- docs/pip_sphinxext.py | 1 - src/pip/_internal/commands/cache.py | 1 - src/pip/_internal/commands/check.py | 1 - src/pip/_internal/commands/download.py | 1 - src/pip/_internal/commands/wheel.py | 1 - src/pip/_internal/index/sources.py | 1 - src/pip/_internal/models/search_scope.py | 1 - src/pip/_internal/network/session.py | 1 - src/pip/_internal/operations/install/legacy.py | 1 - src/pip/_internal/req/constructors.py | 1 - src/pip/_internal/req/req_file.py | 2 -- src/pip/_internal/req/req_install.py | 1 - src/pip/_internal/resolution/resolvelib/factory.py | 1 - .../resolution/resolvelib/requirements.py | 1 - tests/conftest.py | 1 - tests/functional/test_build_env.py | 2 -- tests/functional/test_cache.py | 6 ++++-- tests/functional/test_install.py | 1 - tests/unit/test_base_command.py | 1 - tests/unit/test_collector.py | 1 + tests/unit/test_link.py | 2 +- tests/unit/test_network_cache.py | 1 - tests/unit/test_options.py | 1 - tests/unit/test_req_file.py | 1 - tests/unit/test_utils.py | 1 - tests/unit/test_wheel.py | 1 - 27 files changed, 13 insertions(+), 35 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7a37d18c51d..2fc455b9d64 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ exclude: 'src/pip/_vendor/' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.4.0 hooks: - id: check-builtin-literals - id: check-added-large-files @@ -17,18 +17,18 @@ repos: exclude: .patch - repo: https://github.com/psf/black - rev: 22.6.0 + rev: 23.1.0 hooks: - id: black - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + rev: 6.0.0 hooks: - id: flake8 additional_dependencies: [ - 'flake8-bugbear==22.10.27', - 'flake8-logging-format==0.9.0', - 'flake8-implicit-str-concat==0.3.0', + 'flake8-bugbear', + 'flake8-logging-format', + 'flake8-implicit-str-concat', ] exclude: tests/data @@ -56,7 +56,7 @@ repos: ] - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.9.0 + rev: v1.10.0 hooks: - id: python-no-log-warn - id: python-no-eval diff --git a/docs/pip_sphinxext.py b/docs/pip_sphinxext.py index f398b7d0973..2e559702294 100644 --- a/docs/pip_sphinxext.py +++ b/docs/pip_sphinxext.py @@ -254,7 +254,6 @@ def run(self) -> List[nodes.Node]: lines = [] # Create a tab for each OS for os, variant in os_variants.items(): - # Unpack the values prompt = variant["prompt"] highlighter = variant["highlighter"] diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index c5f03302d6b..e96d2b4924c 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -37,7 +37,6 @@ class CacheCommand(Command): """ def add_options(self) -> None: - self.cmd_opts.add_option( "--format", action="store", diff --git a/src/pip/_internal/commands/check.py b/src/pip/_internal/commands/check.py index 3864220b2b4..584df9f55c5 100644 --- a/src/pip/_internal/commands/check.py +++ b/src/pip/_internal/commands/check.py @@ -20,7 +20,6 @@ class CheckCommand(Command): %prog [options]""" def run(self, options: Values, args: List[str]) -> int: - package_set, parsing_probs = create_package_set_from_installed() missing, conflicting = check_package_set(package_set) diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index 90388d11857..36e947c8c05 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -76,7 +76,6 @@ def add_options(self) -> None: @with_cleanup def run(self, options: Values, args: List[str]) -> int: - options.ignore_installed = True # editable doesn't really make sense for `pip download`, but the bowels # of the RequirementSet code require that property. diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index a8483559c19..ef3c487ea94 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -43,7 +43,6 @@ class WheelCommand(RequirementCommand): %prog [options] ...""" def add_options(self) -> None: - self.cmd_opts.add_option( "-w", "--wheel-dir", diff --git a/src/pip/_internal/index/sources.py b/src/pip/_internal/index/sources.py index eec3f12f7e3..cd9cb8d40f1 100644 --- a/src/pip/_internal/index/sources.py +++ b/src/pip/_internal/index/sources.py @@ -171,7 +171,6 @@ def build_source( expand_dir: bool, cache_link_parsing: bool, ) -> Tuple[Optional[str], Optional[LinkSource]]: - path: Optional[str] = None url: Optional[str] = None if os.path.exists(location): # Is a local path. diff --git a/src/pip/_internal/models/search_scope.py b/src/pip/_internal/models/search_scope.py index a64af73899d..fe61e8116b7 100644 --- a/src/pip/_internal/models/search_scope.py +++ b/src/pip/_internal/models/search_scope.py @@ -79,7 +79,6 @@ def get_formatted_locations(self) -> str: redacted_index_urls = [] if self.index_urls and self.index_urls != [PyPI.simple_url]: for url in self.index_urls: - redacted_index_url = redact_auth_from_url(url) # Parse the URL diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py index e512ac78464..6c40ade1595 100644 --- a/src/pip/_internal/network/session.py +++ b/src/pip/_internal/network/session.py @@ -316,7 +316,6 @@ def cert_verify( class PipSession(requests.Session): - timeout: Optional[int] = None def __init__( diff --git a/src/pip/_internal/operations/install/legacy.py b/src/pip/_internal/operations/install/legacy.py index 0b108d0ca71..38bd542764e 100644 --- a/src/pip/_internal/operations/install/legacy.py +++ b/src/pip/_internal/operations/install/legacy.py @@ -69,7 +69,6 @@ def install( unpacked_source_directory: str, req_description: str, ) -> bool: - header_dir = scheme.headers with TempDirectory(kind="record") as temp_dir: diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 37dbd32e7b8..dc82a7e4f91 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -211,7 +211,6 @@ def install_req_from_editable( permit_editable_wheels: bool = False, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, ) -> InstallRequirement: - parts = parse_req_from_editable(editable_req) return InstallRequirement( diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index f8f07b0cd96..aced95e64c4 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -165,7 +165,6 @@ def handle_requirement_line( line: ParsedLine, options: Optional[optparse.Values] = None, ) -> ParsedRequirement: - # preserve for the nested code path line_comes_from = "{} {} (line {})".format( "-c" if line.constraint else "-r", @@ -210,7 +209,6 @@ def handle_option_line( options: Optional[optparse.Values] = None, session: Optional[PipSession] = None, ) -> None: - if options: # percolate options upward if opts.require_hashes: diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index f0b7c5bcb14..c217542b9c7 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -850,7 +850,6 @@ def install( def check_invalid_constraint_type(req: InstallRequirement) -> str: - # Check for unsupported forms problem = "" if not req.name: diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index a4c24b52a1b..0ad4641b1b1 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -632,7 +632,6 @@ def get_installation_error( e: "ResolutionImpossible[Requirement, Candidate]", constraints: Dict[str, Constraint], ) -> InstallationError: - assert e.causes, "Installation error reported with no cause" # If one of the things we can't solve is "we need Python X.Y", diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index f561f1f1e27..06addc0ddce 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -64,7 +64,6 @@ def name(self) -> str: return format_name(self.project_name, self._extras) def format_for_error(self) -> str: - # Convert comma-separated specifiers into "A, B, ..., F and G" # This makes the specifier a bit more "human readable", without # risking a change in meaning. (Hopefully! Not all edge cases have diff --git a/tests/conftest.py b/tests/conftest.py index 13011f4fd87..57dd7e68a2b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -432,7 +432,6 @@ def virtualenv_template( wheel_install: Path, coverage_install: Path, ) -> Iterator[VirtualEnvironment]: - venv_type: VirtualEnvironmentType if request.config.getoption("--use-venv"): venv_type = "venv" diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py index 93a6b930f66..22a71cd3200 100644 --- a/tests/functional/test_build_env.py +++ b/tests/functional/test_build_env.py @@ -106,7 +106,6 @@ def test_build_env_allow_only_one_install(script: PipTestEnvironment) -> None: def test_build_env_requirements_check(script: PipTestEnvironment) -> None: - create_basic_wheel_for_package(script, "foo", "2.0") create_basic_wheel_for_package(script, "bar", "1.0") create_basic_wheel_for_package(script, "bar", "3.0") @@ -206,7 +205,6 @@ def test_build_env_overlay_prefix_has_priority(script: PipTestEnvironment) -> No @pytest.mark.usefixtures("enable_user_site") def test_build_env_isolation(script: PipTestEnvironment) -> None: - # Create dummy `pkg` wheel. pkg_whl = create_basic_wheel_for_package(script, "pkg", "1.0") diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index 7d20f5e3100..788abdd2be5 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -107,7 +107,7 @@ def list_matches_wheel(wheel_name: str, result: TestPipResult) -> bool: `- foo-1.2.3-py3-none-any.whl `.""" lines = result.stdout.splitlines() expected = f" - {wheel_name}-py3-none-any.whl " - return any(map(lambda l: l.startswith(expected), lines)) + return any(map(lambda line: line.startswith(expected), lines)) def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool: @@ -121,7 +121,9 @@ def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool: expected = f"{wheel_name}-py3-none-any.whl" return any( map( - lambda l: os.path.basename(l).startswith(expected) and os.path.exists(l), + lambda line: ( + os.path.basename(line).startswith(expected) and os.path.exists(line) + ), lines, ) ) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 97ccfa9111c..16ef572b5b1 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2217,7 +2217,6 @@ def test_install_skip_work_dir_pkg(script: PipTestEnvironment, data: TestData) - def test_install_verify_package_name_normalization( script: PipTestEnvironment, package_name: str ) -> None: - """ Test that install of a package again using a name which normalizes to the original package name, is a no-op diff --git a/tests/unit/test_base_command.py b/tests/unit/test_base_command.py index 71a50fca617..daec5fc6c65 100644 --- a/tests/unit/test_base_command.py +++ b/tests/unit/test_base_command.py @@ -22,7 +22,6 @@ def fixed_time() -> Iterator[None]: class FakeCommand(Command): - _name = "fake" def __init__( diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 47307c00e84..26a2ce4b9a6 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -1014,6 +1014,7 @@ def test_link_collector_create_find_links_expansion( """ Test "~" expansion in --find-links paths. """ + # This is a mock version of expanduser() that expands "~" to the tmpdir. def expand_path(path: str) -> str: if path.startswith("~/"): diff --git a/tests/unit/test_link.py b/tests/unit/test_link.py index df4957d5974..311be588858 100644 --- a/tests/unit/test_link.py +++ b/tests/unit/test_link.py @@ -108,7 +108,7 @@ def test_fragments(self) -> None: ) def test_invalid_egg_fragments(self, fragment: str) -> None: url = f"git+https://example.com/package#egg={fragment}" - with pytest.raises(Exception): + with pytest.raises(ValueError): Link(url) @pytest.mark.parametrize( diff --git a/tests/unit/test_network_cache.py b/tests/unit/test_network_cache.py index 8764b134320..a5519864f4c 100644 --- a/tests/unit/test_network_cache.py +++ b/tests/unit/test_network_cache.py @@ -24,7 +24,6 @@ class TestSafeFileCache: """ def test_cache_roundtrip(self, cache_tmpdir: Path) -> None: - cache = SafeFileCache(os.fspath(cache_tmpdir)) assert cache.get("test key") is None cache.set("test key", b"a test string") diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index 39396512a97..9e3a0a5d633 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -454,7 +454,6 @@ def test_config_file_true( class TestGeneralOptions(AddFakeCommandMixin): - # the reason to specifically test general options is due to the # extra processing they receive, and the number of bugs we've had diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 30cbcf71c16..be369f1a051 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -613,7 +613,6 @@ def test_args_long_options(self) -> None: class TestOptionVariants: - # this suite is really just testing optparse, but added it anyway def test_variant1( diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 1daaecbf490..a67a7c1100c 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -53,7 +53,6 @@ class Tests_EgglinkPath: "util.egg_link_path_from_location() tests" def setup_method(self) -> None: - project = "foo" self.mock_dist = Mock(project_name=project) diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index c5a8f3be4f3..059f382b24c 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -518,7 +518,6 @@ def test_invalid_entrypoints_fail( class TestMessageAboutScriptsNotOnPATH: - tilde_warning_msg = ( "NOTE: The current PATH contains path(s) starting with `~`, " "which may not be expanded by all applications." From d89475934c2eefbea75f370726f0865e5235834f Mon Sep 17 00:00:00 2001 From: lorddavidiii Date: Mon, 27 Mar 2023 19:03:46 +0000 Subject: [PATCH 367/730] Norm path before compare (#11719) --- news/11719.bugfix.rst | 1 + src/pip/_internal/operations/install/wheel.py | 8 +++++--- tests/unit/test_wheel.py | 6 ++++++ 3 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 news/11719.bugfix.rst diff --git a/news/11719.bugfix.rst b/news/11719.bugfix.rst new file mode 100644 index 00000000000..c2ae8bc1d5e --- /dev/null +++ b/news/11719.bugfix.rst @@ -0,0 +1 @@ +Normalize paths before checking if installed scripts are on PATH. diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py index c79941398a2..a8cd1330f0f 100644 --- a/src/pip/_internal/operations/install/wheel.py +++ b/src/pip/_internal/operations/install/wheel.py @@ -143,16 +143,18 @@ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]: # We don't want to warn for directories that are on PATH. not_warn_dirs = [ - os.path.normcase(i).rstrip(os.sep) + os.path.normcase(os.path.normpath(i)).rstrip(os.sep) for i in os.environ.get("PATH", "").split(os.pathsep) ] # If an executable sits with sys.executable, we don't warn for it. # This covers the case of venv invocations without activating the venv. - not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + not_warn_dirs.append( + os.path.normcase(os.path.normpath(os.path.dirname(sys.executable))) + ) warn_for: Dict[str, Set[str]] = { parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() - if os.path.normcase(parent_dir) not in not_warn_dirs + if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs } if not warn_for: return None diff --git a/tests/unit/test_wheel.py b/tests/unit/test_wheel.py index 059f382b24c..6d6d1a3dc87 100644 --- a/tests/unit/test_wheel.py +++ b/tests/unit/test_wheel.py @@ -588,6 +588,12 @@ def test_multi_script__single_dir_on_PATH(self) -> None: ) assert retval is None + def test_PATH_check_path_normalization(self) -> None: + retval = self._template( + paths=["/a/./b/../b//c/", "/d/e/bin"], scripts=["/a/b/c/foo"] + ) + assert retval is None + def test_single_script__single_dir_on_PATH(self) -> None: retval = self._template(paths=["/a/b", "/c/d/bin"], scripts=["/a/b/foo"]) assert retval is None From def5ba887e717344ab1cf5f05a3886a725d92418 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 27 Mar 2023 21:04:50 +0200 Subject: [PATCH 368/730] Simplify direct_url.json generation (#11875) --- src/pip/_internal/req/req_install.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index c217542b9c7..940dbe02b73 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -44,10 +44,6 @@ from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path from pip._internal.req.req_uninstall import UninstallPathSet from pip._internal.utils.deprecation import LegacyInstallReason, deprecated -from pip._internal.utils.direct_url_helpers import ( - direct_url_for_editable, - direct_url_from_link, -) from pip._internal.utils.hashes import Hashes from pip._internal.utils.misc import ( ConfiguredBuildBackendHookCaller, @@ -779,16 +775,6 @@ def install( if self.is_wheel: assert self.local_file_path - direct_url = None - # TODO this can be refactored to direct_url = self.download_info - if self.editable: - direct_url = direct_url_for_editable(self.unpacked_source_directory) - elif self.original_link: - direct_url = direct_url_from_link( - self.original_link, - self.source_dir, - self.original_link_is_in_wheel_cache, - ) install_wheel( self.name, self.local_file_path, @@ -796,7 +782,7 @@ def install( req_description=str(self.req), pycompile=pycompile, warn_script_location=warn_script_location, - direct_url=direct_url, + direct_url=self.download_info if self.original_link else None, requested=self.user_supplied, ) self.install_succeeded = True From 28239f9bf7c4c03dfbf8d12eb5cc3aa6ca3c0b08 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 27 Mar 2023 20:06:11 +0100 Subject: [PATCH 369/730] Drop auto-labeling of PRs that are out of date (#11907) --- .github/workflows/label-merge-conflicts.yml | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 .github/workflows/label-merge-conflicts.yml diff --git a/.github/workflows/label-merge-conflicts.yml b/.github/workflows/label-merge-conflicts.yml deleted file mode 100644 index 1de897ca1c4..00000000000 --- a/.github/workflows/label-merge-conflicts.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Autolabel merge conflicts - -permissions: - issues: write - pull-requests: write - -on: - push: - branches: [main] - -jobs: - label-merge-conflicts: - if: github.repository_owner == 'pypa' - runs-on: ubuntu-latest - steps: - - uses: pradyunsg/auto-label-merge-conflicts@v3 - with: - CONFLICT_LABEL_NAME: "needs rebase or merge" - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 8f0201f67a7e4e2b60bd7e7aec4d5dbad17a867a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 12:42:39 +0100 Subject: [PATCH 370/730] Remove no-binary disabling the cache of locally built wheels --- news/11453.removal.rst | 2 ++ src/pip/_internal/cli/cmdoptions.py | 2 +- src/pip/_internal/commands/install.py | 25 ++++--------------------- src/pip/_internal/commands/wheel.py | 22 +++------------------- tests/functional/test_install.py | 4 ++-- 5 files changed, 12 insertions(+), 43 deletions(-) create mode 100644 news/11453.removal.rst diff --git a/news/11453.removal.rst b/news/11453.removal.rst new file mode 100644 index 00000000000..91ebfda0438 --- /dev/null +++ b/news/11453.removal.rst @@ -0,0 +1,2 @@ +``--no-binary`` does not disable the cache of locally built wheels anymore. It only +means "don't download wheels". diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index c27ba1c6a9a..34b5144b007 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -1004,7 +1004,7 @@ def check_list_path_option(options: Values) -> None: choices=[ "fast-deps", "truststore", - "no-binary-enable-wheel-cache", + "no-binary-enable-wheel-cache", # now always on ], help="Enable new functionality, that may be backward incompatible.", ) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 4783f807fca..7bc17be5ae5 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -30,10 +30,7 @@ check_legacy_setup_py_options, ) from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.deprecation import ( - LegacyInstallReasonFailedBdistWheel, - deprecated, -) +from pip._internal.utils.deprecation import LegacyInstallReasonFailedBdistWheel from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.logging import getLogger from pip._internal.utils.misc import ( @@ -347,23 +344,9 @@ def run(self, options: Values, args: List[str]) -> int: check_legacy_setup_py_options(options, reqs) if "no-binary-enable-wheel-cache" in options.features_enabled: - # TODO: remove format_control from WheelCache when the deprecation cycle - # is over - wheel_cache = WheelCache(options.cache_dir) - else: - if options.format_control.no_binary: - deprecated( - reason=( - "--no-binary currently disables reading from " - "the cache of locally built wheels. In the future " - "--no-binary will not influence the wheel cache." - ), - replacement="to use the --no-cache-dir option", - feature_flag="no-binary-enable-wheel-cache", - issue=11453, - gone_in="23.1", - ) - wheel_cache = WheelCache(options.cache_dir, options.format_control) + logger.warning("no-binary-enable-wheel-cache is now active by default.") + + wheel_cache = WheelCache(options.cache_dir) # Only when installing is it permitted to use PEP 660. # In other circumstances (pip wheel, pip download) we generate diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index ef3c487ea94..d1e619c63bf 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -14,7 +14,6 @@ InstallRequirement, check_legacy_setup_py_options, ) -from pip._internal.utils.deprecation import deprecated from pip._internal.utils.misc import ensure_dir, normalize_path from pip._internal.utils.temp_dir import TempDirectory from pip._internal.wheel_builder import build, should_build_for_wheel_command @@ -106,7 +105,6 @@ def run(self, options: Values, args: List[str]) -> int: session = self.get_default_session(options) finder = self._build_package_finder(options, session) - wheel_cache = WheelCache(options.cache_dir, options.format_control) options.wheel_dir = normalize_path(options.wheel_dir) ensure_dir(options.wheel_dir) @@ -123,23 +121,9 @@ def run(self, options: Values, args: List[str]) -> int: check_legacy_setup_py_options(options, reqs) if "no-binary-enable-wheel-cache" in options.features_enabled: - # TODO: remove format_control from WheelCache when the deprecation cycle - # is over - wheel_cache = WheelCache(options.cache_dir) - else: - if options.format_control.no_binary: - deprecated( - reason=( - "--no-binary currently disables reading from " - "the cache of locally built wheels. In the future " - "--no-binary will not influence the wheel cache." - ), - replacement="to use the --no-cache-dir option", - feature_flag="no-binary-enable-wheel-cache", - issue=11453, - gone_in="23.1", - ) - wheel_cache = WheelCache(options.cache_dir, options.format_control) + logger.warning("no-binary-enable-wheel-cache is now active by default.") + + wheel_cache = WheelCache(options.cache_dir) preparer = self.make_requirement_preparer( temp_build_dir=directory, diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 16ef572b5b1..18446d89989 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1656,7 +1656,7 @@ def test_install_no_binary_uses_local_backend( assert os.path.isfile(marker), "Local PEP 517 backend not used" -def test_install_no_binary_disables_cached_wheels( +def test_install_no_binary_uses_cached_wheels( script: PipTestEnvironment, data: TestData ) -> None: # Seed the cache @@ -1673,7 +1673,7 @@ def test_install_no_binary_disables_cached_wheels( ) assert "Successfully installed upper-2.0" in str(res), str(res) # upper is built and not obtained from cache - assert "Building wheel for upper" in str(res), str(res) + assert "Building wheel for upper" not in str(res), str(res) def test_install_editable_with_wrong_egg_name( From 93e6dd718483f248ff964ac797a4b422498813c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 12:48:42 +0100 Subject: [PATCH 371/730] Remove now unused FormatControl in WheelCache --- src/pip/_internal/cache.py | 41 ++++++++++---------------------------- tests/unit/test_cache.py | 11 +++++----- tests/unit/test_req.py | 5 ++--- 3 files changed, 17 insertions(+), 40 deletions(-) diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py index c53b7f023a1..05f0a9acb24 100644 --- a/src/pip/_internal/cache.py +++ b/src/pip/_internal/cache.py @@ -6,14 +6,13 @@ import logging import os from pathlib import Path -from typing import Any, Dict, List, Optional, Set +from typing import Any, Dict, List, Optional from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version from pip._vendor.packaging.utils import canonicalize_name from pip._internal.exceptions import InvalidWheelFilename from pip._internal.models.direct_url import DirectUrl -from pip._internal.models.format_control import FormatControl from pip._internal.models.link import Link from pip._internal.models.wheel import Wheel from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds @@ -33,25 +32,13 @@ def _hash_dict(d: Dict[str, str]) -> str: class Cache: """An abstract class - provides cache directories for data from links - :param cache_dir: The root of the cache. - :param format_control: An object of FormatControl class to limit - binaries being read from the cache. - :param allowed_formats: which formats of files the cache should store. - ('binary' and 'source' are the only allowed values) """ - def __init__( - self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str] - ) -> None: + def __init__(self, cache_dir: str) -> None: super().__init__() assert not cache_dir or os.path.isabs(cache_dir) self.cache_dir = cache_dir or None - self.format_control = format_control - self.allowed_formats = allowed_formats - - _valid_formats = {"source", "binary"} - assert self.allowed_formats.union(_valid_formats) == _valid_formats def _get_cache_path_parts(self, link: Link) -> List[str]: """Get parts of part that must be os.path.joined with cache_dir""" @@ -91,10 +78,6 @@ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: if can_not_cache: return [] - formats = self.format_control.get_allowed_formats(canonical_package_name) - if not self.allowed_formats.intersection(formats): - return [] - candidates = [] path = self.get_path_for_link(link) if os.path.isdir(path): @@ -121,8 +104,8 @@ def get( class SimpleWheelCache(Cache): """A cache of wheels for future installs.""" - def __init__(self, cache_dir: str, format_control: FormatControl) -> None: - super().__init__(cache_dir, format_control, {"binary"}) + def __init__(self, cache_dir: str) -> None: + super().__init__(cache_dir) def get_path_for_link(self, link: Link) -> str: """Return a directory to store cached wheels for link @@ -191,13 +174,13 @@ def get( class EphemWheelCache(SimpleWheelCache): """A SimpleWheelCache that creates it's own temporary cache directory""" - def __init__(self, format_control: FormatControl) -> None: + def __init__(self) -> None: self._temp_dir = TempDirectory( kind=tempdir_kinds.EPHEM_WHEEL_CACHE, globally_managed=True, ) - super().__init__(self._temp_dir.path, format_control) + super().__init__(self._temp_dir.path) class CacheEntry: @@ -221,14 +204,10 @@ class WheelCache(Cache): when a certain link is not found in the simple wheel cache first. """ - def __init__( - self, cache_dir: str, format_control: Optional[FormatControl] = None - ) -> None: - if format_control is None: - format_control = FormatControl() - super().__init__(cache_dir, format_control, {"binary"}) - self._wheel_cache = SimpleWheelCache(cache_dir, format_control) - self._ephem_cache = EphemWheelCache(format_control) + def __init__(self, cache_dir: str) -> None: + super().__init__(cache_dir) + self._wheel_cache = SimpleWheelCache(cache_dir) + self._ephem_cache = EphemWheelCache() def get_path_for_link(self, link: Link) -> str: return self._wheel_cache.get_path_for_link(link) diff --git a/tests/unit/test_cache.py b/tests/unit/test_cache.py index f27daa266c8..d0fee69c39b 100644 --- a/tests/unit/test_cache.py +++ b/tests/unit/test_cache.py @@ -4,13 +4,12 @@ from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version from pip._internal.cache import WheelCache, _hash_dict -from pip._internal.models.format_control import FormatControl from pip._internal.models.link import Link from pip._internal.utils.misc import ensure_dir def test_falsey_path_none() -> None: - wc = WheelCache("", FormatControl()) + wc = WheelCache("") assert wc.cache_dir is None @@ -18,7 +17,7 @@ def test_subdirectory_fragment() -> None: """ Test the subdirectory URL fragment is part of the cache key. """ - wc = WheelCache("/tmp/.foo/", FormatControl()) + wc = WheelCache("/tmp/.foo/") link1 = Link("git+https://g.c/o/r#subdirectory=d1") link2 = Link("git+https://g.c/o/r#subdirectory=d2") assert wc.get_path_for_link(link1) != wc.get_path_for_link(link2) @@ -29,7 +28,7 @@ def test_wheel_name_filter(tmpdir: Path) -> None: Test the wheel cache filters on wheel name when several wheels for different package are stored under the same cache directory. """ - wc = WheelCache(os.fspath(tmpdir), FormatControl()) + wc = WheelCache(os.fspath(tmpdir)) link = Link("https://g.c/package.tar.gz") cache_path = wc.get_path_for_link(link) ensure_dir(cache_path) @@ -57,7 +56,7 @@ def test_link_to_cache(tmpdir: Path) -> None: Test that Link.from_json() produces Links with consistent cache locations """ - wc = WheelCache(os.fspath(tmpdir), FormatControl()) + wc = WheelCache(os.fspath(tmpdir)) # Define our expectations for stable cache path. i_name = interpreter_name() i_version = interpreter_version() @@ -95,7 +94,7 @@ def test_link_to_cache(tmpdir: Path) -> None: def test_get_cache_entry(tmpdir: Path) -> None: - wc = WheelCache(os.fspath(tmpdir), FormatControl()) + wc = WheelCache(os.fspath(tmpdir)) persi_link = Link("https://g.c/o/r/persi") persi_path = wc.get_path_for_link(persi_link) ensure_dir(persi_path) diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index c46883dc2d4..a5286c13a23 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -25,7 +25,6 @@ from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata import select_backend from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo -from pip._internal.models.format_control import FormatControl from pip._internal.models.link import Link from pip._internal.network.session import PipSession from pip._internal.operations.build.build_tracker import get_build_tracker @@ -403,7 +402,7 @@ def test_download_info_archive_legacy_cache( """Test download_info hash is not set for an archive with legacy cache entry.""" url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri() finder = make_test_finder() - wheel_cache = WheelCache(str(tmp_path / "cache"), FormatControl()) + wheel_cache = WheelCache(str(tmp_path / "cache")) cache_entry_dir = wheel_cache.get_path_for_link(Link(url)) Path(cache_entry_dir).mkdir(parents=True) wheel.make_wheel(name="simple", version="1.0").save_to_dir(cache_entry_dir) @@ -426,7 +425,7 @@ def test_download_info_archive_cache_with_origin( url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri() hash = "sha256=ad977496000576e1b6c41f6449a9897087ce9da6db4f15b603fe8372af4bf3c6" finder = make_test_finder() - wheel_cache = WheelCache(str(tmp_path / "cache"), FormatControl()) + wheel_cache = WheelCache(str(tmp_path / "cache")) cache_entry_dir = wheel_cache.get_path_for_link(Link(url)) Path(cache_entry_dir).mkdir(parents=True) Path(cache_entry_dir).joinpath("origin.json").write_text( From 2617ccd8a3a46fb38bd9794d81cc8b9b6fd07602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 29 Mar 2023 08:40:02 +0200 Subject: [PATCH 372/730] Centralize warning about always enabled features --- src/pip/_internal/cli/base_command.py | 9 +++++++++ src/pip/_internal/cli/cmdoptions.py | 9 +++++++-- src/pip/_internal/commands/install.py | 3 --- src/pip/_internal/commands/wheel.py | 3 --- 4 files changed, 16 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 5bd7e67e649..637fba18cfc 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -122,6 +122,15 @@ def _main(self, args: List[str]) -> int: user_log_file=options.log, ) + always_enabled_features = set(options.features_enabled) & set( + cmdoptions.ALWAYS_ENABLED_FEATURES + ) + if always_enabled_features: + logger.warning( + "The following features are always enabled: %s. ", + ", ".join(sorted(always_enabled_features)), + ) + # TODO: Try to get these passing down from the command? # without resorting to os.environ to hold these. # This also affects isolated builds and it should. diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 34b5144b007..7f72332db56 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -994,6 +994,11 @@ def check_list_path_option(options: Values) -> None: ) +# Features that are now always on. A warning is printed if they are used. +ALWAYS_ENABLED_FEATURES = [ + "no-binary-enable-wheel-cache", # always on since 23.1 +] + use_new_feature: Callable[..., Option] = partial( Option, "--use-feature", @@ -1004,8 +1009,8 @@ def check_list_path_option(options: Values) -> None: choices=[ "fast-deps", "truststore", - "no-binary-enable-wheel-cache", # now always on - ], + ] + + ALWAYS_ENABLED_FEATURES, help="Enable new functionality, that may be backward incompatible.", ) diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 7bc17be5ae5..e9fc7ee3aa3 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -343,9 +343,6 @@ def run(self, options: Values, args: List[str]) -> int: reqs = self.get_requirements(args, options, finder, session) check_legacy_setup_py_options(options, reqs) - if "no-binary-enable-wheel-cache" in options.features_enabled: - logger.warning("no-binary-enable-wheel-cache is now active by default.") - wheel_cache = WheelCache(options.cache_dir) # Only when installing is it permitted to use PEP 660. diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index d1e619c63bf..c6a588ff09b 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -120,9 +120,6 @@ def run(self, options: Values, args: List[str]) -> int: reqs = self.get_requirements(args, options, finder, session) check_legacy_setup_py_options(options, reqs) - if "no-binary-enable-wheel-cache" in options.features_enabled: - logger.warning("no-binary-enable-wheel-cache is now active by default.") - wheel_cache = WheelCache(options.cache_dir) preparer = self.make_requirement_preparer( From 10d9cbc601e5cadc45163452b1bc463d8ad2c1f7 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 29 Mar 2023 21:03:36 +0100 Subject: [PATCH 373/730] Reduce resolver rounds by an order of magnitude (#11908) Co-authored-by: Pradyun Gedam --- news/11908.feature.rst | 1 + src/pip/_internal/resolution/resolvelib/resolver.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 news/11908.feature.rst diff --git a/news/11908.feature.rst b/news/11908.feature.rst new file mode 100644 index 00000000000..2b9ec18d98f --- /dev/null +++ b/news/11908.feature.rst @@ -0,0 +1 @@ +Reduce the number of resolver rounds, since backjumping makes the resolver more efficient in finding solutions. This also makes pathological cases fail quicker. diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index a605d6c254f..47bbfecce36 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -88,9 +88,9 @@ def resolve( ) try: - try_to_avoid_resolution_too_deep = 2000000 + limit_how_complex_resolution_can_be = 200000 result = self._result = resolver.resolve( - collected.requirements, max_rounds=try_to_avoid_resolution_too_deep + collected.requirements, max_rounds=limit_how_complex_resolution_can_be ) except ResolutionImpossible as e: From 48986a6d1f4acba7051bf489f11eb667e1c18398 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 18:39:04 +0100 Subject: [PATCH 374/730] Don't fallback to setup.py install when build failed --- news/8368.removal.rst | 2 ++ src/pip/_internal/commands/install.py | 17 ++--------------- src/pip/_internal/utils/deprecation.py | 12 ------------ 3 files changed, 4 insertions(+), 27 deletions(-) create mode 100644 news/8368.removal.rst diff --git a/news/8368.removal.rst b/news/8368.removal.rst new file mode 100644 index 00000000000..44ee33aa78c --- /dev/null +++ b/news/8368.removal.rst @@ -0,0 +1,2 @@ +Remove ``setup.py install`` fallback when building a wheel failed for projects without +``pyproject.toml``. diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index e9fc7ee3aa3..3c15ed4158c 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -30,7 +30,6 @@ check_legacy_setup_py_options, ) from pip._internal.utils.compat import WINDOWS -from pip._internal.utils.deprecation import LegacyInstallReasonFailedBdistWheel from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.logging import getLogger from pip._internal.utils.misc import ( @@ -423,26 +422,14 @@ def run(self, options: Values, args: List[str]) -> int: global_options=global_options, ) - # If we're using PEP 517, we cannot do a legacy setup.py install - # so we fail here. - pep517_build_failure_names: List[str] = [ - r.name for r in build_failures if r.use_pep517 # type: ignore - ] - if pep517_build_failure_names: + if build_failures: raise InstallationError( "Could not build wheels for {}, which is required to " "install pyproject.toml-based projects".format( - ", ".join(pep517_build_failure_names) + ", ".join(r.name for r in build_failures) # type: ignore ) ) - # For now, we just warn about failures building legacy - # requirements, as we'll fall through to a setup.py install for - # those. - for r in build_failures: - if not r.use_pep517: - r.legacy_install_reason = LegacyInstallReasonFailedBdistWheel - to_install = resolver.get_installation_order(requirement_set) # Check for conflicts in the package set we're installing. diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index db6daf7183d..3cd3edd7216 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -147,15 +147,3 @@ def emit_deprecation(self, name: str) -> None: feature_flag=self._feature_flag, issue=self._issue, ) - - -LegacyInstallReasonFailedBdistWheel = LegacyInstallReason( - reason=( - "{name} was installed using the legacy 'setup.py install' " - "method, because a wheel could not be built for it." - ), - replacement="to fix the wheel build issue reported above", - gone_in="23.1", - issue=8368, - emit_after_success=True, -) From bc3feef9ccc1ffb295703002fad7eaae061633c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 18:45:30 +0100 Subject: [PATCH 375/730] Remove setup.py install legacy --- src/pip/_internal/exceptions.py | 14 --- .../_internal/operations/install/legacy.py | 117 ------------------ src/pip/_internal/req/req_install.py | 81 +++--------- src/pip/_internal/utils/deprecation.py | 29 ----- src/pip/_internal/utils/setuptools_build.py | 45 ------- tests/functional/test_show.py | 3 - 6 files changed, 16 insertions(+), 273 deletions(-) delete mode 100644 src/pip/_internal/operations/install/legacy.py diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index d4527295da3..7d92ba69983 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -361,20 +361,6 @@ def __str__(self) -> str: ) -class LegacyInstallFailure(DiagnosticPipError): - """Error occurred while executing `setup.py install`""" - - reference = "legacy-install-failure" - - def __init__(self, package_details: str) -> None: - super().__init__( - message="Encountered error while trying to install package.", - context=package_details, - hint_stmt="See above for output from the failure.", - note_stmt="This is an issue with the package mentioned above, not pip.", - ) - - class InstallationSubprocessError(DiagnosticPipError, InstallationError): """A subprocess call failed.""" diff --git a/src/pip/_internal/operations/install/legacy.py b/src/pip/_internal/operations/install/legacy.py deleted file mode 100644 index 38bd542764e..00000000000 --- a/src/pip/_internal/operations/install/legacy.py +++ /dev/null @@ -1,117 +0,0 @@ -"""Legacy installation process, i.e. `setup.py install`. -""" - -import logging -import os -from typing import List, Optional, Sequence - -from pip._internal.build_env import BuildEnvironment -from pip._internal.exceptions import InstallationError, LegacyInstallFailure -from pip._internal.locations.base import change_root -from pip._internal.models.scheme import Scheme -from pip._internal.utils.misc import ensure_dir -from pip._internal.utils.setuptools_build import make_setuptools_install_args -from pip._internal.utils.subprocess import runner_with_spinner_message -from pip._internal.utils.temp_dir import TempDirectory - -logger = logging.getLogger(__name__) - - -def write_installed_files_from_setuptools_record( - record_lines: List[str], - root: Optional[str], - req_description: str, -) -> None: - def prepend_root(path: str) -> str: - if root is None or not os.path.isabs(path): - return path - else: - return change_root(root, path) - - for line in record_lines: - directory = os.path.dirname(line) - if directory.endswith(".egg-info"): - egg_info_dir = prepend_root(directory) - break - else: - message = ( - "{} did not indicate that it installed an " - ".egg-info directory. Only setup.py projects " - "generating .egg-info directories are supported." - ).format(req_description) - raise InstallationError(message) - - new_lines = [] - for line in record_lines: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append(os.path.relpath(prepend_root(filename), egg_info_dir)) - new_lines.sort() - ensure_dir(egg_info_dir) - inst_files_path = os.path.join(egg_info_dir, "installed-files.txt") - with open(inst_files_path, "w") as f: - f.write("\n".join(new_lines) + "\n") - - -def install( - global_options: Sequence[str], - root: Optional[str], - home: Optional[str], - prefix: Optional[str], - use_user_site: bool, - pycompile: bool, - scheme: Scheme, - setup_py_path: str, - isolated: bool, - req_name: str, - build_env: BuildEnvironment, - unpacked_source_directory: str, - req_description: str, -) -> bool: - header_dir = scheme.headers - - with TempDirectory(kind="record") as temp_dir: - try: - record_filename = os.path.join(temp_dir.path, "install-record.txt") - install_args = make_setuptools_install_args( - setup_py_path, - global_options=global_options, - record_filename=record_filename, - root=root, - prefix=prefix, - header_dir=header_dir, - home=home, - use_user_site=use_user_site, - no_user_config=isolated, - pycompile=pycompile, - ) - - runner = runner_with_spinner_message( - f"Running setup.py install for {req_name}" - ) - with build_env: - runner( - cmd=install_args, - cwd=unpacked_source_directory, - ) - - if not os.path.exists(record_filename): - logger.debug("Record file %s not found", record_filename) - # Signal to the caller that we didn't install the new package - return False - - except Exception as e: - # Signal to the caller that we didn't install the new package - raise LegacyInstallFailure(package_details=req_name) from e - - # At this point, we have successfully installed the requirement. - - # We intentionally do not use any encoding to read the file because - # setuptools writes the file using distutils.file_util.write_file, - # which does not specify an encoding. - with open(record_filename) as f: - record_lines = f.read().splitlines() - - write_installed_files_from_setuptools_record(record_lines, root, req_description) - return True diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 940dbe02b73..baa6716381c 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -20,7 +20,7 @@ from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment -from pip._internal.exceptions import InstallationError, LegacyInstallFailure +from pip._internal.exceptions import InstallationError from pip._internal.locations import get_scheme from pip._internal.metadata import ( BaseDistribution, @@ -39,11 +39,10 @@ from pip._internal.operations.install.editable_legacy import ( install_editable as install_editable_legacy, ) -from pip._internal.operations.install.legacy import install as install_legacy from pip._internal.operations.install.wheel import install_wheel from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path from pip._internal.req.req_uninstall import UninstallPathSet -from pip._internal.utils.deprecation import LegacyInstallReason, deprecated +from pip._internal.utils.deprecation import deprecated from pip._internal.utils.hashes import Hashes from pip._internal.utils.misc import ( ConfiguredBuildBackendHookCaller, @@ -93,7 +92,6 @@ def __init__( self.constraint = constraint self.editable = editable self.permit_editable_wheels = permit_editable_wheels - self.legacy_install_reason: Optional[LegacyInstallReason] = None # source_dir is the local directory where the linked requirement is # located, or unpacked. In case unpacking is needed, creating and @@ -757,10 +755,9 @@ def install( prefix=prefix, ) - global_options = global_options if global_options is not None else [] if self.editable and not self.is_wheel: install_editable_legacy( - global_options=global_options, + global_options=global_options if global_options is not None else [], prefix=prefix, home=home, use_user_site=use_user_site, @@ -773,66 +770,20 @@ def install( self.install_succeeded = True return - if self.is_wheel: - assert self.local_file_path - install_wheel( - self.name, - self.local_file_path, - scheme=scheme, - req_description=str(self.req), - pycompile=pycompile, - warn_script_location=warn_script_location, - direct_url=self.download_info if self.original_link else None, - requested=self.user_supplied, - ) - self.install_succeeded = True - return - - # TODO: Why don't we do this for editable installs? - - # Extend the list of global options passed on to - # the setup.py call with the ones from the requirements file. - # Options specified in requirements file override those - # specified on the command line, since the last option given - # to setup.py is the one that is used. - global_options = list(global_options) + self.global_options - - try: - if ( - self.legacy_install_reason is not None - and self.legacy_install_reason.emit_before_install - ): - self.legacy_install_reason.emit_deprecation(self.name) - success = install_legacy( - global_options=global_options, - root=root, - home=home, - prefix=prefix, - use_user_site=use_user_site, - pycompile=pycompile, - scheme=scheme, - setup_py_path=self.setup_py_path, - isolated=self.isolated, - req_name=self.name, - build_env=self.build_env, - unpacked_source_directory=self.unpacked_source_directory, - req_description=str(self.req), - ) - except LegacyInstallFailure as exc: - self.install_succeeded = False - raise exc - except Exception: - self.install_succeeded = True - raise + assert self.is_wheel + assert self.local_file_path - self.install_succeeded = success - - if ( - success - and self.legacy_install_reason is not None - and self.legacy_install_reason.emit_after_success - ): - self.legacy_install_reason.emit_deprecation(self.name) + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, + warn_script_location=warn_script_location, + direct_url=self.download_info if self.original_link else None, + requested=self.user_supplied, + ) + self.install_succeeded = True def check_invalid_constraint_type(req: InstallRequirement) -> str: diff --git a/src/pip/_internal/utils/deprecation.py b/src/pip/_internal/utils/deprecation.py index 3cd3edd7216..72bd6f25a55 100644 --- a/src/pip/_internal/utils/deprecation.py +++ b/src/pip/_internal/utils/deprecation.py @@ -118,32 +118,3 @@ def deprecated( raise PipDeprecationWarning(message) warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) - - -class LegacyInstallReason: - def __init__( - self, - reason: str, - replacement: Optional[str] = None, - gone_in: Optional[str] = None, - feature_flag: Optional[str] = None, - issue: Optional[int] = None, - emit_after_success: bool = False, - emit_before_install: bool = False, - ): - self._reason = reason - self._replacement = replacement - self._gone_in = gone_in - self._feature_flag = feature_flag - self._issue = issue - self.emit_after_success = emit_after_success - self.emit_before_install = emit_before_install - - def emit_deprecation(self, name: str) -> None: - deprecated( - reason=self._reason.format(name=name), - replacement=self._replacement, - gone_in=self._gone_in, - feature_flag=self._feature_flag, - issue=self._issue, - ) diff --git a/src/pip/_internal/utils/setuptools_build.py b/src/pip/_internal/utils/setuptools_build.py index 0662915cb05..96d1b246067 100644 --- a/src/pip/_internal/utils/setuptools_build.py +++ b/src/pip/_internal/utils/setuptools_build.py @@ -144,48 +144,3 @@ def make_setuptools_egg_info_args( args += ["--egg-base", egg_info_dir] return args - - -def make_setuptools_install_args( - setup_py_path: str, - *, - global_options: Sequence[str], - record_filename: str, - root: Optional[str], - prefix: Optional[str], - header_dir: Optional[str], - home: Optional[str], - use_user_site: bool, - no_user_config: bool, - pycompile: bool, -) -> List[str]: - assert not (use_user_site and prefix) - assert not (use_user_site and root) - - args = make_setuptools_shim_args( - setup_py_path, - global_options=global_options, - no_user_config=no_user_config, - unbuffered_output=True, - ) - args += ["install", "--record", record_filename] - args += ["--single-version-externally-managed"] - - if root is not None: - args += ["--root", root] - if prefix is not None: - args += ["--prefix", prefix] - if home is not None: - args += ["--home", home] - if use_user_site: - args += ["--user", "--prefix="] - - if pycompile: - args += ["--compile"] - else: - args += ["--no-compile"] - - if header_dir: - args += ["--install-headers", header_dir] - - return args diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index a7e9022a5c4..3361eb3ec92 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -4,9 +4,6 @@ from pip import __version__ from pip._internal.commands.show import search_packages_info -from pip._internal.operations.install.legacy import ( - write_installed_files_from_setuptools_record, -) from pip._internal.utils.unpacking import untar_file from tests.lib import PipTestEnvironment, TestData, create_test_package_with_setup From 64967c79072e908510e0627d8571ae920337e43f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 19 Mar 2023 10:33:00 +0100 Subject: [PATCH 376/730] Fix test_install_requirements_with_options This test depended on setup.py install, and tbh, I don't see a good reason to attempt an install when testing the requirements file parser. --- tests/unit/test_req_file.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index be369f1a051..4d5bee249b2 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -1,7 +1,6 @@ import collections import logging import os -import subprocess import textwrap from optparse import Values from pathlib import Path @@ -880,14 +879,4 @@ def test_install_requirements_with_options( ) ) - req.source_dir = os.curdir - with mock.patch.object(subprocess, "Popen") as popen: - popen.return_value.stdout.readline.return_value = b"" - try: - req.install([]) - except Exception: - pass - - last_call = popen.call_args_list[-1] - args = last_call[0][0] - assert 0 < args.index(global_option) < args.index("install") + assert req.global_options == [global_option] From 0060bfa7e395d0b1a588e8c63f350356ce12a664 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 18 Mar 2023 18:46:22 +0100 Subject: [PATCH 377/730] Rework test_show_with_files_from_legacy It relied on a function that does not exist anymore. --- tests/functional/test_show.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/tests/functional/test_show.py b/tests/functional/test_show.py index 3361eb3ec92..b8ec0510a1e 100644 --- a/tests/functional/test_show.py +++ b/tests/functional/test_show.py @@ -1,11 +1,17 @@ import os import pathlib import re +import textwrap from pip import __version__ from pip._internal.commands.show import search_packages_info from pip._internal.utils.unpacking import untar_file -from tests.lib import PipTestEnvironment, TestData, create_test_package_with_setup +from tests.lib import ( + PipTestEnvironment, + TestData, + create_test_package_with_setup, + pyversion, +) def test_basic_show(script: PipTestEnvironment) -> None: @@ -74,10 +80,19 @@ def test_show_with_files_from_legacy( str(setuptools_record), cwd=source_dir, ) - write_installed_files_from_setuptools_record( - setuptools_record.read_text().splitlines(), - root=None, - req_description="simple==1.0", + # Emulate the installed-files.txt generation which previous pip version did + # after running setup.py install (write_installed_files_from_setuptools_record). + egg_info_dir = script.site_packages_path / f"simple-1.0-py{pyversion}.egg-info" + egg_info_dir.joinpath("installed-files.txt").write_text( + textwrap.dedent( + """\ + ../simple/__init__.py + PKG-INFO + SOURCES.txt + dependency_links.txt + top_level.txt + """ + ) ) result = script.pip("show", "--files", "simple") From e5deb4dd8d7989ced863d1995369be7f374cdce4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 27 Mar 2023 17:48:22 +0200 Subject: [PATCH 378/730] Adapt test to the removal of setup.py install code path --- tests/functional/test_install.py | 65 ++++++------------------ tests/functional/test_install_vcs_git.py | 2 +- 2 files changed, 17 insertions(+), 50 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 18446d89989..1e455d88029 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -829,7 +829,10 @@ def test_install_global_option(script: PipTestEnvironment) -> None: (In particular those that disable the actual install action) """ result = script.pip( - "install", "--global-option=--version", "INITools==0.1", expect_stderr=True + "install", + "--global-option=--version", + "INITools==0.1", + expect_error=True, # build is going to fail because of --version ) assert "INITools==0.1\n" in result.stdout assert not result.files_created @@ -1498,15 +1501,12 @@ def test_install_subprocess_output_handling( # This error is emitted 3 times: # - by setup.py bdist_wheel # - by setup.py clean - # - by setup.py install which is used as fallback when setup.py bdist_wheel failed - # Before, it failed only once because it attempted only setup.py install. - # TODO update this when we remove the last setup.py install code path. - assert 3 == result.stderr.count("I DIE, I DIE") + assert 2 == result.stderr.count("I DIE, I DIE") result = script.pip( *(args + ["--global-option=--fail", "--verbose"]), expect_error=True ) - assert 3 == result.stderr.count("I DIE, I DIE") + assert 2 == result.stderr.count("I DIE, I DIE") def test_install_log(script: PipTestEnvironment, data: TestData, tmpdir: Path) -> None: @@ -1526,22 +1526,9 @@ def test_install_topological_sort(script: PipTestEnvironment, data: TestData) -> assert order1 in res or order2 in res, res -def test_install_wheel_broken(script: PipTestEnvironment) -> None: - res = script.pip_install_local("wheelbroken", allow_stderr_error=True) - assert "ERROR: Failed building wheel for wheelbroken" in res.stderr - # Fallback to setup.py install (https://github.com/pypa/pip/issues/8368) - assert "Successfully installed wheelbroken-0.1" in str(res), str(res) - - def test_cleanup_after_failed_wheel(script: PipTestEnvironment) -> None: - res = script.pip_install_local("wheelbrokenafter", allow_stderr_error=True) + res = script.pip_install_local("wheelbrokenafter", expect_error=True) assert "ERROR: Failed building wheel for wheelbrokenafter" in res.stderr - # One of the effects of not cleaning up is broken scripts: - script_py = script.bin_path / "script.py" - assert script_py.exists(), script_py - with open(script_py) as f: - shebang = f.readline().strip() - assert shebang != "#!python", shebang # OK, assert that we *said* we were cleaning up: # /!\ if in need to change this, also change test_pep517_no_legacy_cleanup assert "Running setup.py clean for wheelbrokenafter" in str(res), str(res) @@ -1568,38 +1555,26 @@ def test_install_builds_wheels(script: PipTestEnvironment, data: TestData) -> No "-f", data.find_links, to_install, - allow_stderr_error=True, # error building wheelbroken - ) - expected = ( - "Successfully installed requires-wheelbroken-upper-0" - " upper-2.0 wheelbroken-0.1" + expect_error=True, # error building wheelbroken ) - # Must have installed it all - assert expected in str(res), str(res) wheels: List[str] = [] for _, _, files in os.walk(wheels_cache): wheels.extend(f for f in files if f.endswith(".whl")) - # and built wheels for upper and wheelbroken + # Built wheel for upper assert "Building wheel for upper" in str(res), str(res) + # Built wheel for wheelbroken, but failed assert "Building wheel for wheelb" in str(res), str(res) + assert "Failed to build wheelbroken" in str(res), str(res) # Wheels are built for local directories, but not cached. assert "Building wheel for requir" in str(res), str(res) - # wheelbroken has to run install # into the cache assert wheels != [], str(res) - # and installed from the wheel - assert "Running setup.py install for upper" not in str(res), str(res) - # Wheels are built for local directories, but not cached. - assert "Running setup.py install for requir" not in str(res), str(res) - # wheelbroken has to run install - assert "Running setup.py install for wheelb" in str(res), str(res) - # We want to make sure pure python wheels do not have an implementation tag assert wheels == [ "Upper-2.0-py{}-none-any.whl".format(sys.version_info[0]), ] -def test_install_no_binary_disables_building_wheels( +def test_install_no_binary_builds_wheels( script: PipTestEnvironment, data: TestData ) -> None: to_install = data.packages.joinpath("requires_wheelbroken_upper") @@ -1610,22 +1585,14 @@ def test_install_no_binary_disables_building_wheels( "-f", data.find_links, to_install, - allow_stderr_error=True, # error building wheelbroken + expect_error=True, # error building wheelbroken ) - expected = ( - "Successfully installed requires-wheelbroken-upper-0" - " upper-2.0 wheelbroken-0.1" - ) - # Must have installed it all - assert expected in str(res), str(res) - # and built wheels for wheelbroken only + # Wheels are built for all requirements assert "Building wheel for wheelb" in str(res), str(res) - # Wheels are built for local directories, but not cached across runs assert "Building wheel for requir" in str(res), str(res) - # Don't build wheel for upper which was blacklisted assert "Building wheel for upper" in str(res), str(res) - # And these two fell back to sdist based installed. - assert "Running setup.py install for wheelb" in str(res), str(res) + # Wheelbroken failed to build + assert "Failed to build wheelbroken" in str(res), str(res) @pytest.mark.network diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index d7e8c26024f..971526c5181 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -392,7 +392,7 @@ def test_git_with_non_editable_unpacking( ) result = script.pip( "install", - "--global-option=--version", + "--global-option=--quiet", local_url, allow_stderr_warning=True, ) From 849dcbd6a8b777e2b2b7842fdd12d6831384fc29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 27 Mar 2023 18:09:03 +0200 Subject: [PATCH 379/730] Remove more traces of setup.py install --- docs/html/reference/build-system/setup-py.md | 9 --------- tests/functional/test_install.py | 2 -- tests/functional/test_install_reqs.py | 3 +-- 3 files changed, 1 insertion(+), 13 deletions(-) diff --git a/docs/html/reference/build-system/setup-py.md b/docs/html/reference/build-system/setup-py.md index 53917b8a4c8..0103a3a6a92 100644 --- a/docs/html/reference/build-system/setup-py.md +++ b/docs/html/reference/build-system/setup-py.md @@ -24,8 +24,6 @@ The overall process for building a package is: - Generate the package's metadata. - Generate a wheel for the package. - - If this fails and we're trying to install the package, attempt a direct - installation. The wheel can then be used to perform an installation, if necessary. @@ -58,13 +56,6 @@ If this wheel generation fails, pip runs `setup.py clean` to clean up any build artifacts that may have been generated. After that, pip will attempt a direct installation. -### Direct Installation - -When all else fails, pip will invoke `setup.py install` to install a package -using setuptools' mechanisms to perform the installation. This is currently the -last-resort fallback for projects that cannot be built into wheels, and may not -be supported in the future. - ### Editable Installation For installing packages in "editable" mode diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 1e455d88029..72c72f35c5d 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1160,7 +1160,6 @@ def test_install_package_with_prefix( rel_prefix_path = script.scratch / "prefix" install_path = join( sysconfig.get_path("purelib", vars={"base": rel_prefix_path}), - # we still test for egg-info because no-binary implies setup.py install "simple-1.0.dist-info", ) result.did_create(install_path) @@ -1606,7 +1605,6 @@ def test_install_no_binary_builds_pep_517_wheel( assert expected in str(res), str(res) assert "Building wheel for pep517-setup" in str(res), str(res) - assert "Running setup.py install for pep517-set" not in str(res), str(res) @pytest.mark.network diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 3ad9534810b..3ad1909fe7c 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -325,7 +325,6 @@ def test_wheel_user_with_prefix_in_pydistutils_cfg( "install", "--user", "--no-index", "-f", data.find_links, "requiresupper" ) # Check that we are really installing a wheel - assert "Running setup.py install for requiresupper" not in result.stdout assert "installed requiresupper" in result.stdout @@ -647,7 +646,7 @@ def test_install_distribution_union_with_constraints( msg = "Unnamed requirements are not allowed as constraints" assert msg in result.stderr else: - assert "Running setup.py install for LocalExtras" in result.stdout + assert "Building wheel for LocalExtras" in result.stdout result.did_create(script.site_packages / "singlemodule.py") From 6ea91c84c274ec6a2f16668b141238c58767800e Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Fri, 31 Mar 2023 11:39:01 +0300 Subject: [PATCH 380/730] Add more test cases --- src/pip/_internal/cli/req_command.py | 12 +++------ src/pip/_internal/utils/misc.py | 8 +++--- tests/unit/test_utils_misc.py | 40 ++++++++++++++-------------- 3 files changed, 29 insertions(+), 31 deletions(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 42888a1228b..7f41961b393 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -10,7 +10,7 @@ import sys from functools import partial from optparse import Values -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, List, Optional, Tuple from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions @@ -441,13 +441,9 @@ def get_requirements( else None ) cli_config_settings = getattr(options, "config_settings", None) - config_settings: Optional[Dict[str, Union[str, List[str]]]] - if req_config_settings and cli_config_settings: - config_settings = merge_config_settings( - req_config_settings, cli_config_settings - ) - else: - config_settings = req_config_settings or cli_config_settings + config_settings = merge_config_settings( + req_config_settings, cli_config_settings + ) req_to_add = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode, diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 4d288cd8859..1f95a5624db 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -27,7 +27,6 @@ Iterable, Iterator, List, - Mapping, Optional, TextIO, Tuple, @@ -733,9 +732,12 @@ def prepare_metadata_for_build_editable( def merge_config_settings( - reqs_settings: Mapping[str, Union[str, List[str]]], - cli_settings: Mapping[str, Union[str, List[str]]], + reqs_settings: Optional[Dict[str, Union[str, List[str]]]], + cli_settings: Optional[Dict[str, Union[str, List[str]]]], ) -> Dict[str, Union[str, List[str]]]: + if not reqs_settings or not cli_settings: + return reqs_settings or cli_settings + dd: Dict[str, Union[str, List[str]]] = {} for d in (reqs_settings, cli_settings): for k, v in d.items(): diff --git a/tests/unit/test_utils_misc.py b/tests/unit/test_utils_misc.py index a00d5461f6d..b1f199bfc6b 100644 --- a/tests/unit/test_utils_misc.py +++ b/tests/unit/test_utils_misc.py @@ -1,25 +1,25 @@ -from typing import Dict, List, Union +from typing import Dict, List, Optional, Union + +import pytest from pip._internal.utils.misc import merge_config_settings -def test_merge_config_settings() -> None: - reqs: Dict[str, Union[str, List[str]]] = { - "foo": "bar", - "bar": "foo", - "foobar": ["bar"], - "baz": ["foo"], - } - cli: Dict[str, Union[str, List[str]]] = { - "foo": ["baz"], - "bar": "bar", - "foobar": ["baz"], - "baz": "bar", - } - expected = { - "foo": ["bar", "baz"], - "bar": ["foo", "bar"], - "foobar": ["bar", "baz"], - "baz": ["foo", "bar"], - } +@pytest.mark.parametrize( + "reqs, cli, expected", + [ + ({"foo": "bar"}, {"foo": ["baz"]}, {"foo": ["bar", "baz"]}), + ({"foo": "bar"}, {"foo": "baz"}, {"foo": ["bar", "baz"]}), + ({"foo": ["bar"]}, {"foo": ["baz"]}, {"foo": ["bar", "baz"]}), + ({"foo": ["bar"]}, {"foo": "baz"}, {"foo": ["bar", "baz"]}), + ({"foo": "bar"}, {"foo": ["baz"]}, {"foo": ["bar", "baz"]}), + ({"foo": "bar"}, None, {"foo": "bar"}), + (None, {"foo": ["bar"]}, {"foo": ["bar"]}), + ], +) +def test_merge_config_settings( + reqs: Optional[Dict[str, Union[str, List[str]]]], + cli: Optional[Dict[str, Union[str, List[str]]]], + expected: Dict[str, Union[str, List[str]]], +) -> None: assert merge_config_settings(reqs, cli) == expected From ae23fc2154cc391a6b305b9ecd23552f2044c251 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Fri, 31 Mar 2023 11:48:11 +0300 Subject: [PATCH 381/730] fix typing --- src/pip/_internal/utils/misc.py | 2 +- tests/unit/test_utils_misc.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 1f95a5624db..04d75d04de1 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -734,7 +734,7 @@ def prepare_metadata_for_build_editable( def merge_config_settings( reqs_settings: Optional[Dict[str, Union[str, List[str]]]], cli_settings: Optional[Dict[str, Union[str, List[str]]]], -) -> Dict[str, Union[str, List[str]]]: +) -> Optional[Dict[str, Union[str, List[str]]]]: if not reqs_settings or not cli_settings: return reqs_settings or cli_settings diff --git a/tests/unit/test_utils_misc.py b/tests/unit/test_utils_misc.py index b1f199bfc6b..fed75617b23 100644 --- a/tests/unit/test_utils_misc.py +++ b/tests/unit/test_utils_misc.py @@ -15,11 +15,12 @@ ({"foo": "bar"}, {"foo": ["baz"]}, {"foo": ["bar", "baz"]}), ({"foo": "bar"}, None, {"foo": "bar"}), (None, {"foo": ["bar"]}, {"foo": ["bar"]}), + (None, None, None), ], ) def test_merge_config_settings( reqs: Optional[Dict[str, Union[str, List[str]]]], cli: Optional[Dict[str, Union[str, List[str]]]], - expected: Dict[str, Union[str, List[str]]], + expected: Optional[Dict[str, Union[str, List[str]]]], ) -> None: assert merge_config_settings(reqs, cli) == expected From e0b89b8ac2dd4f44aaeae01cf22a4c18bff84742 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 11:43:01 +0100 Subject: [PATCH 382/730] Upgrade msgpack to 1.0.5 --- news/msgpack.vendor.rst | 1 + src/pip/_vendor/msgpack/__init__.py | 4 ++-- src/pip/_vendor/msgpack/ext.py | 2 +- src/pip/_vendor/msgpack/fallback.py | 2 +- src/pip/_vendor/vendor.txt | 2 +- 5 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 news/msgpack.vendor.rst diff --git a/news/msgpack.vendor.rst b/news/msgpack.vendor.rst new file mode 100644 index 00000000000..9193b7ce52b --- /dev/null +++ b/news/msgpack.vendor.rst @@ -0,0 +1 @@ +Upgrade msgpack to 1.0.5 diff --git a/src/pip/_vendor/msgpack/__init__.py b/src/pip/_vendor/msgpack/__init__.py index 50710218987..1300b866043 100644 --- a/src/pip/_vendor/msgpack/__init__.py +++ b/src/pip/_vendor/msgpack/__init__.py @@ -6,8 +6,8 @@ import sys -version = (1, 0, 4) -__version__ = "1.0.4" +version = (1, 0, 5) +__version__ = "1.0.5" if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2: diff --git a/src/pip/_vendor/msgpack/ext.py b/src/pip/_vendor/msgpack/ext.py index 25544c55564..23e0d6b41ce 100644 --- a/src/pip/_vendor/msgpack/ext.py +++ b/src/pip/_vendor/msgpack/ext.py @@ -56,7 +56,7 @@ def __init__(self, seconds, nanoseconds=0): Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns. """ if not isinstance(seconds, int_types): - raise TypeError("seconds must be an interger") + raise TypeError("seconds must be an integer") if not isinstance(nanoseconds, int_types): raise TypeError("nanoseconds must be an integer") if not (0 <= nanoseconds < 10**9): diff --git a/src/pip/_vendor/msgpack/fallback.py b/src/pip/_vendor/msgpack/fallback.py index f560c7b5509..e8cebc1bef7 100644 --- a/src/pip/_vendor/msgpack/fallback.py +++ b/src/pip/_vendor/msgpack/fallback.py @@ -814,7 +814,7 @@ def _pack( self._pack_raw_header(n) return self._buffer.write(obj) if check(obj, memoryview): - n = len(obj) * obj.itemsize + n = obj.nbytes if n >= 2**32: raise ValueError("Memoryview is too large") self._pack_bin_header(n) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 5519479bf52..d6a7ee598ef 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -2,7 +2,7 @@ CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for t colorama==0.4.6 distlib==0.3.6 distro==1.8.0 -msgpack==1.0.4 +msgpack==1.0.5 packaging==21.3 platformdirs==2.6.2 pyparsing==3.0.9 From 2b9effb4c7a7ab7bc282be14b32f1911a1977dd6 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 11:43:17 +0100 Subject: [PATCH 383/730] Upgrade platformdirs to 3.2.0 --- news/platformdirs.vendor.rst | 1 + src/pip/_vendor/platformdirs/__init__.py | 223 ++++++++++++++++++++--- src/pip/_vendor/platformdirs/__main__.py | 1 + src/pip/_vendor/platformdirs/android.py | 10 +- src/pip/_vendor/platformdirs/api.py | 25 ++- src/pip/_vendor/platformdirs/macos.py | 20 +- src/pip/_vendor/platformdirs/unix.py | 15 +- src/pip/_vendor/platformdirs/version.py | 4 +- src/pip/_vendor/platformdirs/windows.py | 15 +- src/pip/_vendor/vendor.txt | 2 +- 10 files changed, 277 insertions(+), 39 deletions(-) create mode 100644 news/platformdirs.vendor.rst diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst new file mode 100644 index 00000000000..939253e14fc --- /dev/null +++ b/news/platformdirs.vendor.rst @@ -0,0 +1 @@ +Upgrade platformdirs to 3.2.0 diff --git a/src/pip/_vendor/platformdirs/__init__.py b/src/pip/_vendor/platformdirs/__init__.py index 82d907163c7..c46a145cdc1 100644 --- a/src/pip/_vendor/platformdirs/__init__.py +++ b/src/pip/_vendor/platformdirs/__init__.py @@ -27,7 +27,6 @@ def _set_platform_dir_class() -> type[PlatformDirsABC]: from pip._vendor.platformdirs.unix import Unix as Result if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system": - if os.getenv("SHELL") or os.getenv("PREFIX"): return Result @@ -50,15 +49,23 @@ def user_data_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, roaming: bool = False, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. - :param roaming: See `roaming `. + :param roaming: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: data directory tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + roaming=roaming, + ensure_exists=ensure_exists, + ).user_data_dir def site_data_dir( @@ -66,15 +73,23 @@ def site_data_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, multipath: bool = False, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param multipath: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: data directory shared by users """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + multipath=multipath, + ensure_exists=ensure_exists, + ).site_data_dir def user_config_dir( @@ -82,15 +97,23 @@ def user_config_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, roaming: bool = False, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. - :param roaming: See `roaming `. + :param roaming: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: config directory tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + roaming=roaming, + ensure_exists=ensure_exists, + ).user_config_dir def site_config_dir( @@ -98,15 +121,23 @@ def site_config_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, multipath: bool = False, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param multipath: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: config directory shared by the users """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + multipath=multipath, + ensure_exists=ensure_exists, + ).site_config_dir def user_cache_dir( @@ -114,15 +145,47 @@ def user_cache_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, opinion: bool = True, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param opinion: See `roaming `. + :param ensure_exists: See `ensure_exists `. + :returns: cache directory tied to the user + """ + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).user_cache_dir + + +def site_cache_dir( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, + ensure_exists: bool = False, +) -> str: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `opinion `. + :param ensure_exists: See `ensure_exists `. :returns: cache directory tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).site_cache_dir def user_state_dir( @@ -130,15 +193,23 @@ def user_state_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, roaming: bool = False, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. - :param roaming: See `roaming `. + :param roaming: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: state directory tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + roaming=roaming, + ensure_exists=ensure_exists, + ).user_state_dir def user_log_dir( @@ -146,15 +217,23 @@ def user_log_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, opinion: bool = True, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param opinion: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: log directory tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).user_log_dir def user_documents_dir() -> str: @@ -169,15 +248,23 @@ def user_runtime_dir( appauthor: str | None | Literal[False] = None, version: str | None = None, opinion: bool = True, + ensure_exists: bool = False, ) -> str: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param opinion: See `opinion `. + :param ensure_exists: See `ensure_exists `. :returns: runtime directory tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).user_runtime_dir def user_data_path( @@ -185,15 +272,23 @@ def user_data_path( appauthor: str | None | Literal[False] = None, version: str | None = None, roaming: bool = False, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. - :param roaming: See `roaming `. + :param roaming: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: data path tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + roaming=roaming, + ensure_exists=ensure_exists, + ).user_data_path def site_data_path( @@ -201,15 +296,23 @@ def site_data_path( appauthor: str | None | Literal[False] = None, version: str | None = None, multipath: bool = False, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param multipath: See `multipath `. + :param ensure_exists: See `ensure_exists `. :returns: data path shared by users """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + multipath=multipath, + ensure_exists=ensure_exists, + ).site_data_path def user_config_path( @@ -217,15 +320,23 @@ def user_config_path( appauthor: str | None | Literal[False] = None, version: str | None = None, roaming: bool = False, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. - :param roaming: See `roaming `. + :param roaming: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: config path tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + roaming=roaming, + ensure_exists=ensure_exists, + ).user_config_path def site_config_path( @@ -233,15 +344,47 @@ def site_config_path( appauthor: str | None | Literal[False] = None, version: str | None = None, multipath: bool = False, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param multipath: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: config path shared by the users """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + multipath=multipath, + ensure_exists=ensure_exists, + ).site_config_path + + +def site_cache_path( + appname: str | None = None, + appauthor: str | None | Literal[False] = None, + version: str | None = None, + opinion: bool = True, + ensure_exists: bool = False, +) -> Path: + """ + :param appname: See `appname `. + :param appauthor: See `appauthor `. + :param version: See `version `. + :param opinion: See `opinion `. + :param ensure_exists: See `ensure_exists `. + :returns: cache directory tied to the user + """ + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).site_cache_path def user_cache_path( @@ -249,15 +392,23 @@ def user_cache_path( appauthor: str | None | Literal[False] = None, version: str | None = None, opinion: bool = True, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param opinion: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: cache path tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).user_cache_path def user_state_path( @@ -265,15 +416,23 @@ def user_state_path( appauthor: str | None | Literal[False] = None, version: str | None = None, roaming: bool = False, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. - :param roaming: See `roaming `. + :param roaming: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: state path tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + roaming=roaming, + ensure_exists=ensure_exists, + ).user_state_path def user_log_path( @@ -281,15 +440,23 @@ def user_log_path( appauthor: str | None | Literal[False] = None, version: str | None = None, opinion: bool = True, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param opinion: See `roaming `. + :param ensure_exists: See `ensure_exists `. :returns: log path tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).user_log_path def user_documents_path() -> Path: @@ -304,15 +471,23 @@ def user_runtime_path( appauthor: str | None | Literal[False] = None, version: str | None = None, opinion: bool = True, + ensure_exists: bool = False, ) -> Path: """ :param appname: See `appname `. :param appauthor: See `appauthor `. :param version: See `version `. :param opinion: See `opinion `. + :param ensure_exists: See `ensure_exists `. :returns: runtime path tied to the user """ - return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path + return PlatformDirs( + appname=appname, + appauthor=appauthor, + version=version, + opinion=opinion, + ensure_exists=ensure_exists, + ).user_runtime_path __all__ = [ @@ -330,6 +505,7 @@ def user_runtime_path( "user_runtime_dir", "site_data_dir", "site_config_dir", + "site_cache_dir", "user_data_path", "user_config_path", "user_cache_path", @@ -339,4 +515,5 @@ def user_runtime_path( "user_runtime_path", "site_data_path", "site_config_path", + "site_cache_path", ] diff --git a/src/pip/_vendor/platformdirs/__main__.py b/src/pip/_vendor/platformdirs/__main__.py index 9c54bfb438d..7171f13114e 100644 --- a/src/pip/_vendor/platformdirs/__main__.py +++ b/src/pip/_vendor/platformdirs/__main__.py @@ -12,6 +12,7 @@ "user_runtime_dir", "site_data_dir", "site_config_dir", + "site_cache_dir", ) diff --git a/src/pip/_vendor/platformdirs/android.py b/src/pip/_vendor/platformdirs/android.py index eda80935123..f6de7451b25 100644 --- a/src/pip/_vendor/platformdirs/android.py +++ b/src/pip/_vendor/platformdirs/android.py @@ -12,8 +12,9 @@ class Android(PlatformDirsABC): """ Follows the guidance `from here `_. Makes use of the - `appname ` and - `version `. + `appname `, + `version `, + `ensure_exists `. """ @property @@ -43,6 +44,11 @@ def user_cache_dir(self) -> str: """:return: cache directory tied to the user, e.g. e.g. ``/data/user///cache/``""" return self._append_app_name_and_version(cast(str, _android_folder()), "cache") + @property + def site_cache_dir(self) -> str: + """:return: cache directory shared by users, same as `user_cache_dir`""" + return self.user_cache_dir + @property def user_state_dir(self) -> str: """:return: state directory tied to the user, same as `user_data_dir`""" diff --git a/src/pip/_vendor/platformdirs/api.py b/src/pip/_vendor/platformdirs/api.py index 6f6e2c2c69d..f140e8b6db8 100644 --- a/src/pip/_vendor/platformdirs/api.py +++ b/src/pip/_vendor/platformdirs/api.py @@ -22,6 +22,7 @@ def __init__( roaming: bool = False, multipath: bool = False, opinion: bool = True, + ensure_exists: bool = False, ): """ Create a new platform directory. @@ -32,6 +33,7 @@ def __init__( :param roaming: See `roaming`. :param multipath: See `multipath`. :param opinion: See `opinion`. + :param ensure_exists: See `ensure_exists`. """ self.appname = appname #: The name of application. self.appauthor = appauthor @@ -56,6 +58,11 @@ def __init__( returned. By default, the first item would only be returned. """ self.opinion = opinion #: A flag to indicating to use opinionated values. + self.ensure_exists = ensure_exists + """ + Optionally create the directory (and any missing parents) upon access if it does not exist. + By default, no directories are created. + """ def _append_app_name_and_version(self, *base: str) -> str: params = list(base[1:]) @@ -63,7 +70,13 @@ def _append_app_name_and_version(self, *base: str) -> str: params.append(self.appname) if self.version: params.append(self.version) - return os.path.join(base[0], *params) + path = os.path.join(base[0], *params) + self._optionally_create_directory(path) + return path + + def _optionally_create_directory(self, path: str) -> None: + if self.ensure_exists: + Path(path).mkdir(parents=True, exist_ok=True) @property @abstractmethod @@ -90,6 +103,11 @@ def site_config_dir(self) -> str: def user_cache_dir(self) -> str: """:return: cache directory tied to the user""" + @property + @abstractmethod + def site_cache_dir(self) -> str: + """:return: cache directory shared by users""" + @property @abstractmethod def user_state_dir(self) -> str: @@ -135,6 +153,11 @@ def user_cache_path(self) -> Path: """:return: cache path tied to the user""" return Path(self.user_cache_dir) + @property + def site_cache_path(self) -> Path: + """:return: cache path shared by users""" + return Path(self.site_cache_dir) + @property def user_state_path(self) -> Path: """:return: state path tied to the user""" diff --git a/src/pip/_vendor/platformdirs/macos.py b/src/pip/_vendor/platformdirs/macos.py index a01337c7764..ec9751129c1 100644 --- a/src/pip/_vendor/platformdirs/macos.py +++ b/src/pip/_vendor/platformdirs/macos.py @@ -9,14 +9,15 @@ class MacOS(PlatformDirsABC): """ Platform directories for the macOS operating system. Follows the guidance from `Apple documentation `_. - Makes use of the `appname ` and - `version `. + Makes use of the `appname `, + `version `, + `ensure_exists `. """ @property def user_data_dir(self) -> str: """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support")) @property def site_data_dir(self) -> str: @@ -25,19 +26,24 @@ def site_data_dir(self) -> str: @property def user_config_dir(self) -> str: - """:return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/")) + """:return: config directory tied to the user, same as `user_data_dir`""" + return self.user_data_dir @property def site_config_dir(self) -> str: - """:return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``""" - return self._append_app_name_and_version("/Library/Preferences") + """:return: config directory shared by the users, same as `site_data_dir`""" + return self.site_data_dir @property def user_cache_dir(self) -> str: """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``""" return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) + @property + def site_cache_dir(self) -> str: + """:return: cache directory shared by users, e.g. ``/Library/Caches/$appname/$version``""" + return self._append_app_name_and_version("/Library/Caches") + @property def user_state_dir(self) -> str: """:return: state directory tied to the user, same as `user_data_dir`""" diff --git a/src/pip/_vendor/platformdirs/unix.py b/src/pip/_vendor/platformdirs/unix.py index 9aca5a03054..17d355da9f4 100644 --- a/src/pip/_vendor/platformdirs/unix.py +++ b/src/pip/_vendor/platformdirs/unix.py @@ -24,7 +24,8 @@ class Unix(PlatformDirsABC): `appname `, `version `, `multipath `, - `opinion `. + `opinion `, + `ensure_exists `. """ @property @@ -93,6 +94,13 @@ def user_cache_dir(self) -> str: path = os.path.expanduser("~/.cache") return self._append_app_name_and_version(path) + @property + def site_cache_dir(self) -> str: + """ + :return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version`` + """ + return self._append_app_name_and_version("/var/tmp") + @property def user_state_dir(self) -> str: """ @@ -148,6 +156,11 @@ def site_config_path(self) -> Path: """:return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``""" return self._first_item_as_path_if_multipath(self.site_config_dir) + @property + def site_cache_path(self) -> Path: + """:return: cache path shared by users. Only return first item, even if ``multipath`` is set to ``True``""" + return self._first_item_as_path_if_multipath(self.site_cache_dir) + def _first_item_as_path_if_multipath(self, directory: str) -> Path: if self.multipath: # If multipath is True, the first path is returned. diff --git a/src/pip/_vendor/platformdirs/version.py b/src/pip/_vendor/platformdirs/version.py index 9f6eb98e8f0..d906a2c99e6 100644 --- a/src/pip/_vendor/platformdirs/version.py +++ b/src/pip/_vendor/platformdirs/version.py @@ -1,4 +1,4 @@ # file generated by setuptools_scm # don't change, don't track in version control -__version__ = version = '2.6.2' -__version_tuple__ = version_tuple = (2, 6, 2) +__version__ = version = '3.2.0' +__version_tuple__ = version_tuple = (3, 2, 0) diff --git a/src/pip/_vendor/platformdirs/windows.py b/src/pip/_vendor/platformdirs/windows.py index d5c27b34140..e7573c3d6ae 100644 --- a/src/pip/_vendor/platformdirs/windows.py +++ b/src/pip/_vendor/platformdirs/windows.py @@ -17,7 +17,9 @@ class Windows(PlatformDirsABC): `appauthor `, `version `, `roaming `, - `opinion `.""" + `opinion `, + `ensure_exists `. + """ @property def user_data_dir(self) -> str: @@ -41,7 +43,9 @@ def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str: params.append(opinion_value) if self.version: params.append(self.version) - return os.path.join(path, *params) + path = os.path.join(path, *params) + self._optionally_create_directory(path) + return path @property def site_data_dir(self) -> str: @@ -68,6 +72,12 @@ def user_cache_dir(self) -> str: path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA")) return self._append_parts(path, opinion_value="Cache") + @property + def site_cache_dir(self) -> str: + """:return: cache directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname\\Cache\\$version``""" + path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA")) + return self._append_parts(path, opinion_value="Cache") + @property def user_state_dir(self) -> str: """:return: state directory tied to the user, same as `user_data_dir`""" @@ -81,6 +91,7 @@ def user_log_dir(self) -> str: path = self.user_data_dir if self.opinion: path = os.path.join(path, "Logs") + self._optionally_create_directory(path) return path @property diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index d6a7ee598ef..537379517fa 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -4,7 +4,7 @@ distlib==0.3.6 distro==1.8.0 msgpack==1.0.5 packaging==21.3 -platformdirs==2.6.2 +platformdirs==3.2.0 pyparsing==3.0.9 pyproject-hooks==1.0.0 requests==2.28.2 From 141523c5ff1f0a1faf327d0e3b7b39d1ff158c49 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 11:43:36 +0100 Subject: [PATCH 384/730] Upgrade urllib3 to 1.26.15 --- news/urllib3.vendor.rst | 1 + src/pip/_vendor/urllib3/_version.py | 2 +- src/pip/_vendor/urllib3/connection.py | 5 +++++ src/pip/_vendor/urllib3/connectionpool.py | 2 +- src/pip/_vendor/urllib3/util/timeout.py | 9 ++++++--- src/pip/_vendor/urllib3/util/url.py | 4 ++-- src/pip/_vendor/vendor.txt | 2 +- 7 files changed, 17 insertions(+), 8 deletions(-) create mode 100644 news/urllib3.vendor.rst diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst new file mode 100644 index 00000000000..09e82a8f2ff --- /dev/null +++ b/news/urllib3.vendor.rst @@ -0,0 +1 @@ +Upgrade urllib3 to 1.26.15 diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py index 7c031661ba8..e12dd0e7853 100644 --- a/src/pip/_vendor/urllib3/_version.py +++ b/src/pip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.14" +__version__ = "1.26.15" diff --git a/src/pip/_vendor/urllib3/connection.py b/src/pip/_vendor/urllib3/connection.py index 10fb36c4e35..54b96b19154 100644 --- a/src/pip/_vendor/urllib3/connection.py +++ b/src/pip/_vendor/urllib3/connection.py @@ -229,6 +229,11 @@ def putheader(self, header, *values): ) def request(self, method, url, body=None, headers=None): + # Update the inner socket's timeout value to send the request. + # This only triggers if the connection is re-used. + if getattr(self, "sock", None) is not None: + self.sock.settimeout(self.timeout) + if headers is None: headers = {} else: diff --git a/src/pip/_vendor/urllib3/connectionpool.py b/src/pip/_vendor/urllib3/connectionpool.py index 70873927924..c23d736b186 100644 --- a/src/pip/_vendor/urllib3/connectionpool.py +++ b/src/pip/_vendor/urllib3/connectionpool.py @@ -379,7 +379,7 @@ def _make_request( timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() - conn.timeout = timeout_obj.connect_timeout + conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) # Trigger any extra validation we need to do. try: diff --git a/src/pip/_vendor/urllib3/util/timeout.py b/src/pip/_vendor/urllib3/util/timeout.py index ff69593b05b..78e18a62724 100644 --- a/src/pip/_vendor/urllib3/util/timeout.py +++ b/src/pip/_vendor/urllib3/util/timeout.py @@ -2,9 +2,8 @@ import time -# The default socket timeout, used by httplib to indicate that no timeout was -# specified by the user -from socket import _GLOBAL_DEFAULT_TIMEOUT +# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout from ..exceptions import TimeoutStateError @@ -116,6 +115,10 @@ def __repr__(self): # __str__ provided for backwards compatibility __str__ = __repr__ + @classmethod + def resolve_default_timeout(cls, timeout): + return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout + @classmethod def _validate_timeout(cls, value, name): """Check that a timeout attribute is valid. diff --git a/src/pip/_vendor/urllib3/util/url.py b/src/pip/_vendor/urllib3/util/url.py index d6d0bbcea66..a960b2f3c5f 100644 --- a/src/pip/_vendor/urllib3/util/url.py +++ b/src/pip/_vendor/urllib3/util/url.py @@ -50,7 +50,7 @@ "(?:(?:%(hex)s:){0,6}%(hex)s)?::", ] -UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~" IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" @@ -303,7 +303,7 @@ def _normalize_host(host, scheme): def _idna_encode(name): - if name and any([ord(x) > 128 for x in name]): + if name and any(ord(x) >= 128 for x in name): try: from pip._vendor import idna except ImportError: diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 537379517fa..f53657bf296 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -11,7 +11,7 @@ requests==2.28.2 certifi==2022.12.7 chardet==5.1.0 idna==3.4 - urllib3==1.26.14 + urllib3==1.26.15 rich==12.6.0 pygments==2.13.0 typing_extensions==4.4.0 From ffac4e461171e5fbd644c03af896ea1fd7f7df24 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 11:43:52 +0100 Subject: [PATCH 385/730] Upgrade rich to 13.3.3 --- news/rich.vendor.rst | 1 + src/pip/_vendor/rich/_export_format.py | 4 +- src/pip/_vendor/rich/_fileno.py | 24 +++++ src/pip/_vendor/rich/_null_file.py | 14 --- src/pip/_vendor/rich/align.py | 2 +- src/pip/_vendor/rich/ansi.py | 3 + src/pip/_vendor/rich/cells.py | 2 +- src/pip/_vendor/rich/color.py | 16 +-- src/pip/_vendor/rich/console.py | 59 +++++++---- src/pip/_vendor/rich/default_styles.py | 10 +- src/pip/_vendor/rich/file_proxy.py | 5 +- src/pip/_vendor/rich/highlighter.py | 2 +- src/pip/_vendor/rich/json.py | 4 +- src/pip/_vendor/rich/live.py | 2 + src/pip/_vendor/rich/pretty.py | 103 ++++++------------ src/pip/_vendor/rich/progress.py | 21 ++-- src/pip/_vendor/rich/repr.py | 2 +- src/pip/_vendor/rich/rule.py | 6 +- src/pip/_vendor/rich/segment.py | 8 +- src/pip/_vendor/rich/spinner.py | 23 ++-- src/pip/_vendor/rich/style.py | 23 ++++ src/pip/_vendor/rich/syntax.py | 11 +- src/pip/_vendor/rich/text.py | 8 +- src/pip/_vendor/rich/theme.py | 7 +- src/pip/_vendor/rich/traceback.py | 139 +++++++++++++++++++------ src/pip/_vendor/vendor.txt | 2 +- 26 files changed, 304 insertions(+), 197 deletions(-) create mode 100644 news/rich.vendor.rst create mode 100644 src/pip/_vendor/rich/_fileno.py diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst new file mode 100644 index 00000000000..0bedd3bb4e1 --- /dev/null +++ b/news/rich.vendor.rst @@ -0,0 +1 @@ +Upgrade rich to 13.3.3 diff --git a/src/pip/_vendor/rich/_export_format.py b/src/pip/_vendor/rich/_export_format.py index b79c13069b9..094d2dc226d 100644 --- a/src/pip/_vendor/rich/_export_format.py +++ b/src/pip/_vendor/rich/_export_format.py @@ -12,9 +12,7 @@ - -

{code}
- +
{code}
""" diff --git a/src/pip/_vendor/rich/_fileno.py b/src/pip/_vendor/rich/_fileno.py new file mode 100644 index 00000000000..b17ee651174 --- /dev/null +++ b/src/pip/_vendor/rich/_fileno.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import IO, Callable + + +def get_fileno(file_like: IO[str]) -> int | None: + """Get fileno() from a file, accounting for poorly implemented file-like objects. + + Args: + file_like (IO): A file-like object. + + Returns: + int | None: The result of fileno if available, or None if operation failed. + """ + fileno: Callable[[], int] | None = getattr(file_like, "fileno", None) + if fileno is not None: + try: + return fileno() + except Exception: + # `fileno` is documented as potentially raising a OSError + # Alas, from the issues, there are so many poorly implemented file-like objects, + # that `fileno()` can raise just about anything. + return None + return None diff --git a/src/pip/_vendor/rich/_null_file.py b/src/pip/_vendor/rich/_null_file.py index 49038bfcbe5..b659673ef3c 100644 --- a/src/pip/_vendor/rich/_null_file.py +++ b/src/pip/_vendor/rich/_null_file.py @@ -3,20 +3,6 @@ class NullFile(IO[str]): - - # TODO: "mode", "name" and "closed" are only required for Python 3.6. - - @property - def mode(self) -> str: - return "" - - @property - def name(self) -> str: - return "NullFile" - - def closed(self) -> bool: - return False - def close(self) -> None: pass diff --git a/src/pip/_vendor/rich/align.py b/src/pip/_vendor/rich/align.py index d5abb594732..c310b66e783 100644 --- a/src/pip/_vendor/rich/align.py +++ b/src/pip/_vendor/rich/align.py @@ -303,7 +303,7 @@ def __rich_measure__( ), width=60, style="on dark_blue", - title="Algin", + title="Align", ) console.print( diff --git a/src/pip/_vendor/rich/ansi.py b/src/pip/_vendor/rich/ansi.py index 92ef5194117..66365e65360 100644 --- a/src/pip/_vendor/rich/ansi.py +++ b/src/pip/_vendor/rich/ansi.py @@ -43,6 +43,9 @@ def _ansi_tokenize(ansi_text: str) -> Iterable[_AnsiToken]: if start > position: yield _AnsiToken(ansi_text[position:start]) if sgr: + if sgr == "(": + position = end + 1 + continue if sgr.endswith("m"): yield _AnsiToken("", sgr[1:-1], osc) else: diff --git a/src/pip/_vendor/rich/cells.py b/src/pip/_vendor/rich/cells.py index 139b949f7f2..9354f9e3140 100644 --- a/src/pip/_vendor/rich/cells.py +++ b/src/pip/_vendor/rich/cells.py @@ -60,7 +60,7 @@ def _get_codepoint_cell_size(codepoint: int) -> int: """Get the cell size of a character. Args: - character (str): A single character. + codepoint (int): Codepoint of a character. Returns: int: Number of cells (0, 1 or 2) occupied by that character. diff --git a/src/pip/_vendor/rich/color.py b/src/pip/_vendor/rich/color.py index ef2e895d7cb..dfe455937c8 100644 --- a/src/pip/_vendor/rich/color.py +++ b/src/pip/_vendor/rich/color.py @@ -513,15 +513,14 @@ def get_ansi_codes(self, foreground: bool = True) -> Tuple[str, ...]: def downgrade(self, system: ColorSystem) -> "Color": """Downgrade a color system to a system with fewer colors.""" - if self.type in [ColorType.DEFAULT, system]: + if self.type in (ColorType.DEFAULT, system): return self # Convert to 8-bit color from truecolor color if system == ColorSystem.EIGHT_BIT and self.system == ColorSystem.TRUECOLOR: assert self.triplet is not None - red, green, blue = self.triplet.normalized - _h, l, s = rgb_to_hls(red, green, blue) - # If saturation is under 10% assume it is grayscale - if s < 0.1: + _h, l, s = rgb_to_hls(*self.triplet.normalized) + # If saturation is under 15% assume it is grayscale + if s < 0.15: gray = round(l * 25.0) if gray == 0: color_number = 16 @@ -531,8 +530,13 @@ def downgrade(self, system: ColorSystem) -> "Color": color_number = 231 + gray return Color(self.name, ColorType.EIGHT_BIT, number=color_number) + red, green, blue = self.triplet + six_red = red / 95 if red < 95 else 1 + (red - 95) / 40 + six_green = green / 95 if green < 95 else 1 + (green - 95) / 40 + six_blue = blue / 95 if blue < 95 else 1 + (blue - 95) / 40 + color_number = ( - 16 + 36 * round(red * 5.0) + 6 * round(green * 5.0) + round(blue * 5.0) + 16 + 36 * round(six_red) + 6 * round(six_green) + round(six_blue) ) return Color(self.name, ColorType.EIGHT_BIT, number=color_number) diff --git a/src/pip/_vendor/rich/console.py b/src/pip/_vendor/rich/console.py index f805f2dea7d..7c363dfdc5e 100644 --- a/src/pip/_vendor/rich/console.py +++ b/src/pip/_vendor/rich/console.py @@ -1,5 +1,4 @@ import inspect -import io import os import platform import sys @@ -48,6 +47,7 @@ from . import errors, themes from ._emoji_replace import _emoji_replace from ._export_format import CONSOLE_HTML_FORMAT, CONSOLE_SVG_FORMAT +from ._fileno import get_fileno from ._log_render import FormatTimeCallable, LogRender from .align import Align, AlignMethod from .color import ColorSystem, blend_rgb @@ -711,11 +711,6 @@ def __init__( self._force_terminal = None if force_terminal is not None: self._force_terminal = force_terminal - else: - # If FORCE_COLOR env var has any value at all, we force terminal. - force_color = self._environ.get("FORCE_COLOR") - if force_color is not None: - self._force_terminal = True self._file = file self.quiet = quiet @@ -758,7 +753,7 @@ def __init__( self._is_alt_screen = False def __repr__(self) -> str: - return f"" + return f"" @property def file(self) -> IO[str]: @@ -949,6 +944,15 @@ def is_terminal(self) -> bool: # Return False for Idle which claims to be a tty but can't handle ansi codes return False + if self.is_jupyter: + # return False for Jupyter, which may have FORCE_COLOR set + return False + + # If FORCE_COLOR env var has any value at all, we assume a terminal. + force_color = self._environ.get("FORCE_COLOR") + if force_color is not None: + self._force_terminal = True + isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None) try: return False if isatty is None else isatty() @@ -1146,7 +1150,7 @@ def status( status: RenderableType, *, spinner: str = "dots", - spinner_style: str = "status.spinner", + spinner_style: StyleType = "status.spinner", speed: float = 1.0, refresh_per_second: float = 12.5, ) -> "Status": @@ -1523,7 +1527,7 @@ def check_text() -> None: if text: sep_text = Text(sep, justify=justify, end=end) append(sep_text.join(text)) - del text[:] + text.clear() for renderable in objects: renderable = rich_cast(renderable) @@ -2006,12 +2010,11 @@ def _check_buffer(self) -> None: if WINDOWS: use_legacy_windows_render = False if self.legacy_windows: - try: + fileno = get_fileno(self.file) + if fileno is not None: use_legacy_windows_render = ( - self.file.fileno() in _STD_STREAMS_OUTPUT + fileno in _STD_STREAMS_OUTPUT ) - except (ValueError, io.UnsupportedOperation): - pass if use_legacy_windows_render: from pip._vendor.rich._win32_console import LegacyWindowsTerm @@ -2026,13 +2029,31 @@ def _check_buffer(self) -> None: # Either a non-std stream on legacy Windows, or modern Windows. text = self._render_buffer(self._buffer[:]) # https://bugs.python.org/issue37871 + # https://github.com/python/cpython/issues/82052 + # We need to avoid writing more than 32Kb in a single write, due to the above bug write = self.file.write - for line in text.splitlines(True): - try: - write(line) - except UnicodeEncodeError as error: - error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" - raise + # Worse case scenario, every character is 4 bytes of utf-8 + MAX_WRITE = 32 * 1024 // 4 + try: + if len(text) <= MAX_WRITE: + write(text) + else: + batch: List[str] = [] + batch_append = batch.append + size = 0 + for line in text.splitlines(True): + if size + len(line) > MAX_WRITE and batch: + write("".join(batch)) + batch.clear() + size = 0 + batch_append(line) + size += len(line) + if batch: + write("".join(batch)) + batch.clear() + except UnicodeEncodeError as error: + error.reason = f"{error.reason}\n*** You may need to add PYTHONIOENCODING=utf-8 to your environment ***" + raise else: text = self._render_buffer(self._buffer[:]) try: diff --git a/src/pip/_vendor/rich/default_styles.py b/src/pip/_vendor/rich/default_styles.py index 46e9ea52c54..dca37193abf 100644 --- a/src/pip/_vendor/rich/default_styles.py +++ b/src/pip/_vendor/rich/default_styles.py @@ -138,10 +138,11 @@ "tree.line": Style(), "markdown.paragraph": Style(), "markdown.text": Style(), - "markdown.emph": Style(italic=True), + "markdown.em": Style(italic=True), + "markdown.emph": Style(italic=True), # For commonmark backwards compatibility "markdown.strong": Style(bold=True), - "markdown.code": Style(bgcolor="black", color="bright_white"), - "markdown.code_block": Style(dim=True, color="cyan", bgcolor="black"), + "markdown.code": Style(bold=True, color="cyan", bgcolor="black"), + "markdown.code_block": Style(color="cyan", bgcolor="black"), "markdown.block_quote": Style(color="magenta"), "markdown.list": Style(color="cyan"), "markdown.item": Style(), @@ -157,7 +158,8 @@ "markdown.h6": Style(italic=True), "markdown.h7": Style(italic=True, dim=True), "markdown.link": Style(color="bright_blue"), - "markdown.link_url": Style(color="blue"), + "markdown.link_url": Style(color="blue", underline=True), + "markdown.s": Style(strike=True), "iso8601.date": Style(color="blue"), "iso8601.time": Style(color="magenta"), "iso8601.timezone": Style(color="yellow"), diff --git a/src/pip/_vendor/rich/file_proxy.py b/src/pip/_vendor/rich/file_proxy.py index cc69f22f3cc..4b0b0da6c2a 100644 --- a/src/pip/_vendor/rich/file_proxy.py +++ b/src/pip/_vendor/rich/file_proxy.py @@ -34,7 +34,7 @@ def write(self, text: str) -> int: line, new_line, text = text.partition("\n") if new_line: lines.append("".join(buffer) + line) - del buffer[:] + buffer.clear() else: buffer.append(line) break @@ -52,3 +52,6 @@ def flush(self) -> None: if output: self.__console.print(output) del self.__buffer[:] + + def fileno(self) -> int: + return self.__file.fileno() diff --git a/src/pip/_vendor/rich/highlighter.py b/src/pip/_vendor/rich/highlighter.py index 82293dffc49..c2646794a98 100644 --- a/src/pip/_vendor/rich/highlighter.py +++ b/src/pip/_vendor/rich/highlighter.py @@ -82,7 +82,7 @@ class ReprHighlighter(RegexHighlighter): base_style = "repr." highlights = [ - r"(?P<)(?P[-\w.:|]*)(?P[\w\W]*?)(?P>)", + r"(?P<)(?P[-\w.:|]*)(?P[\w\W]*)(?P>)", r'(?P[\w_]{1,50})=(?P"?[\w_]+"?)?', r"(?P[][{}()])", _combine_regex( diff --git a/src/pip/_vendor/rich/json.py b/src/pip/_vendor/rich/json.py index 21b642ab8e5..ea94493f21e 100644 --- a/src/pip/_vendor/rich/json.py +++ b/src/pip/_vendor/rich/json.py @@ -1,3 +1,4 @@ +from pathlib import Path from json import loads, dumps from typing import Any, Callable, Optional, Union @@ -131,8 +132,7 @@ def __rich__(self) -> Text: if args.path == "-": json_data = sys.stdin.read() else: - with open(args.path, "rt") as json_file: - json_data = json_file.read() + json_data = Path(args.path).read_text() except Exception as error: error_console.print(f"Unable to read {args.path!r}; {error}") sys.exit(-1) diff --git a/src/pip/_vendor/rich/live.py b/src/pip/_vendor/rich/live.py index e635fe5c97e..3ebbbc4ccbe 100644 --- a/src/pip/_vendor/rich/live.py +++ b/src/pip/_vendor/rich/live.py @@ -210,6 +210,8 @@ def update(self, renderable: RenderableType, *, refresh: bool = False) -> None: renderable (RenderableType): New renderable to use. refresh (bool, optional): Refresh the display. Defaults to False. """ + if isinstance(renderable, str): + renderable = self.console.render_str(renderable) with self._lock: self._renderable = renderable if refresh: diff --git a/src/pip/_vendor/rich/pretty.py b/src/pip/_vendor/rich/pretty.py index 847b558c9c4..2bd9eb0073d 100644 --- a/src/pip/_vendor/rich/pretty.py +++ b/src/pip/_vendor/rich/pretty.py @@ -30,7 +30,7 @@ try: import attr as _attr_module - _has_attrs = True + _has_attrs = hasattr(_attr_module, "ib") except ImportError: # pragma: no cover _has_attrs = False @@ -55,13 +55,6 @@ ) -JUPYTER_CLASSES_TO_NOT_RENDER = { - # Matplotlib "Artists" manage their own rendering in a Jupyter notebook, and we should not try to render them too. - # "Typically, all [Matplotlib] visible elements in a figure are subclasses of Artist." - "matplotlib.artist.Artist", -} - - def _is_attr_object(obj: Any) -> bool: """Check if an object was created with attrs module.""" return _has_attrs and _attr_module.has(type(obj)) @@ -122,69 +115,40 @@ def _ipy_display_hook( max_string: Optional[int] = None, max_depth: Optional[int] = None, expand_all: bool = False, -) -> None: +) -> Union[str, None]: # needed here to prevent circular import: - from ._inspect import is_object_one_of_types from .console import ConsoleRenderable # always skip rich generated jupyter renderables or None values if _safe_isinstance(value, JupyterRenderable) or value is None: - return + return None console = console or get_console() - if console.is_jupyter: - # Delegate rendering to IPython if the object (and IPython) supports it - # https://ipython.readthedocs.io/en/stable/config/integrating.html#rich-display - ipython_repr_methods = [ - "_repr_html_", - "_repr_markdown_", - "_repr_json_", - "_repr_latex_", - "_repr_jpeg_", - "_repr_png_", - "_repr_svg_", - "_repr_mimebundle_", - ] - for repr_method in ipython_repr_methods: - method = getattr(value, repr_method, None) - if inspect.ismethod(method): - # Calling the method ourselves isn't ideal. The interface for the `_repr_*_` methods - # specifies that if they return None, then they should not be rendered - # by the notebook. - try: - repr_result = method() - except Exception: - continue # If the method raises, treat it as if it doesn't exist, try any others - if repr_result is not None: - return # Delegate rendering to IPython - - # When in a Jupyter notebook let's avoid the display of some specific classes, - # as they result in the rendering of useless and noisy lines such as `
`. - # What does this do? - # --> if the class has "matplotlib.artist.Artist" in its hierarchy for example, we don't render it. - if is_object_one_of_types(value, JUPYTER_CLASSES_TO_NOT_RENDER): - return - - # certain renderables should start on a new line - if _safe_isinstance(value, ConsoleRenderable): - console.line() - - console.print( - value - if _safe_isinstance(value, RichRenderable) - else Pretty( - value, - overflow=overflow, - indent_guides=indent_guides, - max_length=max_length, - max_string=max_string, - max_depth=max_depth, - expand_all=expand_all, - margin=12, - ), - crop=crop, - new_line_start=True, - ) + + with console.capture() as capture: + # certain renderables should start on a new line + if _safe_isinstance(value, ConsoleRenderable): + console.line() + console.print( + value + if _safe_isinstance(value, RichRenderable) + else Pretty( + value, + overflow=overflow, + indent_guides=indent_guides, + max_length=max_length, + max_string=max_string, + max_depth=max_depth, + expand_all=expand_all, + margin=12, + ), + crop=crop, + new_line_start=True, + end="", + ) + # strip trailing newline, not usually part of a text repr + # I'm not sure if this should be prevented at a lower level + return capture.get().rstrip("\n") def _safe_isinstance( @@ -247,7 +211,7 @@ def display_hook(value: Any) -> None: ) builtins._ = value # type: ignore[attr-defined] - try: # pragma: no cover + if "get_ipython" in globals(): ip = get_ipython() # type: ignore[name-defined] from IPython.core.formatters import BaseFormatter @@ -272,7 +236,7 @@ def __call__(self, value: Any) -> Any: # replace plain text formatter with rich formatter rich_formatter = RichFormatter() ip.display_formatter.formatters["text/plain"] = rich_formatter - except Exception: + else: sys.displayhook = display_hook @@ -371,6 +335,7 @@ def __rich_measure__( indent_size=self.indent_size, max_length=self.max_length, max_string=self.max_string, + max_depth=self.max_depth, expand_all=self.expand_all, ) text_width = ( @@ -433,7 +398,7 @@ class Node: is_tuple: bool = False is_namedtuple: bool = False children: Optional[List["Node"]] = None - key_separator = ": " + key_separator: str = ": " separator: str = ", " def iter_tokens(self) -> Iterable[str]: @@ -642,7 +607,6 @@ def _traverse(obj: Any, root: bool = False, depth: int = 0) -> Node: return Node(value_repr="...") obj_type = type(obj) - py_version = (sys.version_info.major, sys.version_info.minor) children: List[Node] reached_max_depth = max_depth is not None and depth >= max_depth @@ -780,7 +744,7 @@ def iter_attrs() -> Iterable[ is_dataclass(obj) and not _safe_isinstance(obj, type) and not fake_attributes - and (_is_dataclass_repr(obj) or py_version == (3, 6)) + and _is_dataclass_repr(obj) ): push_visited(obj_id) children = [] @@ -793,6 +757,7 @@ def iter_attrs() -> Iterable[ close_brace=")", children=children, last=root, + empty=f"{obj.__class__.__name__}()", ) for last, field in loop_last( diff --git a/src/pip/_vendor/rich/progress.py b/src/pip/_vendor/rich/progress.py index e7d163c1377..8b0a315f324 100644 --- a/src/pip/_vendor/rich/progress.py +++ b/src/pip/_vendor/rich/progress.py @@ -4,12 +4,12 @@ import warnings from abc import ABC, abstractmethod from collections import deque -from collections.abc import Sized from dataclasses import dataclass, field from datetime import timedelta from io import RawIOBase, UnsupportedOperation from math import ceil from mmap import mmap +from operator import length_hint from os import PathLike, stat from threading import Event, RLock, Thread from types import TracebackType @@ -151,7 +151,7 @@ def track( pulse_style=pulse_style, ), TaskProgressColumn(show_speed=show_speed), - TimeRemainingColumn(), + TimeRemainingColumn(elapsed_when_finished=True), ) ) progress = Progress( @@ -677,7 +677,7 @@ class TimeElapsedColumn(ProgressColumn): """Renders time elapsed.""" def render(self, task: "Task") -> Text: - """Show time remaining.""" + """Show time elapsed.""" elapsed = task.finished_time if task.finished else task.elapsed if elapsed is None: return Text("-:--:--", style="progress.elapsed") @@ -1197,18 +1197,13 @@ def track( Returns: Iterable[ProgressType]: An iterable of values taken from the provided sequence. """ - - task_total: Optional[float] = None if total is None: - if isinstance(sequence, Sized): - task_total = float(len(sequence)) - else: - task_total = total + total = float(length_hint(sequence)) or None if task_id is None: - task_id = self.add_task(description, total=task_total) + task_id = self.add_task(description, total=total) else: - self.update(task_id, total=task_total) + self.update(task_id, total=total) if self.live.auto_refresh: with _TrackThread(self, task_id, update_period) as track_thread: @@ -1342,7 +1337,7 @@ def open( RuntimeWarning, ) buffering = -1 - elif _mode == "rt" or _mode == "r": + elif _mode in ("rt", "r"): if buffering == 0: raise ValueError("can't have unbuffered text I/O") elif buffering == 1: @@ -1363,7 +1358,7 @@ def open( reader = _Reader(handle, self, task_id, close_handle=True) # wrap the reader in a `TextIOWrapper` if text mode - if mode == "r" or mode == "rt": + if mode in ("r", "rt"): return io.TextIOWrapper( reader, encoding=encoding, diff --git a/src/pip/_vendor/rich/repr.py b/src/pip/_vendor/rich/repr.py index 72d1a7e30b6..f284bcafa6a 100644 --- a/src/pip/_vendor/rich/repr.py +++ b/src/pip/_vendor/rich/repr.py @@ -55,7 +55,7 @@ def auto_repr(self: T) -> str: if key is None: append(repr(value)) else: - if len(default) and default[0] == value: + if default and default[0] == value: continue append(f"{key}={value!r}") else: diff --git a/src/pip/_vendor/rich/rule.py b/src/pip/_vendor/rich/rule.py index 0b78f7a4ec4..fd00ce6e4ce 100644 --- a/src/pip/_vendor/rich/rule.py +++ b/src/pip/_vendor/rich/rule.py @@ -51,13 +51,9 @@ def __rich_console__( ) -> RenderResult: width = options.max_width - # Python3.6 doesn't have an isascii method on str - isascii = getattr(str, "isascii", None) or ( - lambda s: all(ord(c) < 128 for c in s) - ) characters = ( "-" - if (options.ascii_only and not isascii(self.characters)) + if (options.ascii_only and not self.characters.isascii()) else self.characters ) diff --git a/src/pip/_vendor/rich/segment.py b/src/pip/_vendor/rich/segment.py index 1ea5435adc6..e1257984635 100644 --- a/src/pip/_vendor/rich/segment.py +++ b/src/pip/_vendor/rich/segment.py @@ -119,7 +119,7 @@ def _split_cells(cls, segment: "Segment", cut: int) -> Tuple["Segment", "Segment cell_size = get_character_cell_size - pos = int((cut / cell_length) * len(text)) + pos = int((cut / cell_length) * (len(text) - 1)) before = text[:pos] cell_pos = cell_len(before) @@ -303,7 +303,7 @@ def split_and_crop_lines( if include_new_lines: cropped_line.append(new_line_segment) yield cropped_line - del line[:] + line.clear() else: append(segment) if line: @@ -365,7 +365,7 @@ def get_line_length(cls, line: List["Segment"]) -> int: int: The length of the line. """ _cell_len = cell_len - return sum(_cell_len(segment.text) for segment in line) + return sum(_cell_len(text) for text, style, control in line if not control) @classmethod def get_shape(cls, lines: List[List["Segment"]]) -> Tuple[int, int]: @@ -727,7 +727,7 @@ def __rich_console__( console.print(Syntax(code, "python", line_numbers=True)) console.print() console.print( - "When you call [b]print()[/b], Rich [i]renders[/i] the object in to the the following:\n" + "When you call [b]print()[/b], Rich [i]renders[/i] the object in to the following:\n" ) fragments = list(console.render(text)) console.print(fragments) diff --git a/src/pip/_vendor/rich/spinner.py b/src/pip/_vendor/rich/spinner.py index 0879088e14c..91ea630e10f 100644 --- a/src/pip/_vendor/rich/spinner.py +++ b/src/pip/_vendor/rich/spinner.py @@ -11,6 +11,18 @@ class Spinner: + """A spinner animation. + + Args: + name (str): Name of spinner (run python -m rich.spinner). + text (RenderableType, optional): A renderable to display at the right of the spinner (str or Text typically). Defaults to "". + style (StyleType, optional): Style for spinner animation. Defaults to None. + speed (float, optional): Speed factor for animation. Defaults to 1.0. + + Raises: + KeyError: If name isn't one of the supported spinner animations. + """ + def __init__( self, name: str, @@ -19,17 +31,6 @@ def __init__( style: Optional["StyleType"] = None, speed: float = 1.0, ) -> None: - """A spinner animation. - - Args: - name (str): Name of spinner (run python -m rich.spinner). - text (RenderableType, optional): A renderable to display at the right of the spinner (str or Text typically). Defaults to "". - style (StyleType, optional): Style for spinner animation. Defaults to None. - speed (float, optional): Speed factor for animation. Defaults to 1.0. - - Raises: - KeyError: If name isn't one of the supported spinner animations. - """ try: spinner = SPINNERS[name] except KeyError: diff --git a/src/pip/_vendor/rich/style.py b/src/pip/_vendor/rich/style.py index ad388aadb0e..313c889496d 100644 --- a/src/pip/_vendor/rich/style.py +++ b/src/pip/_vendor/rich/style.py @@ -645,6 +645,29 @@ def copy(self) -> "Style": style._meta = self._meta return style + @lru_cache(maxsize=128) + def clear_meta_and_links(self) -> "Style": + """Get a copy of this style with link and meta information removed. + + Returns: + Style: New style object. + """ + if self._null: + return NULL_STYLE + style: Style = self.__new__(Style) + style._ansi = self._ansi + style._style_definition = self._style_definition + style._color = self._color + style._bgcolor = self._bgcolor + style._attributes = self._attributes + style._set_attributes = self._set_attributes + style._link = None + style._link_id = "" + style._hash = self._hash + style._null = False + style._meta = None + return style + def update_link(self, link: Optional[str] = None) -> "Style": """Get a copy with a different value for link. diff --git a/src/pip/_vendor/rich/syntax.py b/src/pip/_vendor/rich/syntax.py index 01bdd04398f..25b226a3a98 100644 --- a/src/pip/_vendor/rich/syntax.py +++ b/src/pip/_vendor/rich/syntax.py @@ -4,6 +4,7 @@ import sys import textwrap from abc import ABC, abstractmethod +from pathlib import Path from typing import ( Any, Dict, @@ -338,8 +339,7 @@ def from_path( Returns: [Syntax]: A Syntax object that may be printed to the console """ - with open(path, "rt", encoding=encoding) as code_file: - code = code_file.read() + code = Path(path).read_text(encoding=encoding) if not lexer: lexer = cls.guess_lexer(path, code=code) @@ -494,7 +494,10 @@ def tokens_to_spans() -> Iterable[Tuple[str, Optional[Style]]]: # Skip over tokens until line start while line_no < _line_start: - _token_type, token = next(tokens) + try: + _token_type, token = next(tokens) + except StopIteration: + break yield (token, None) if token.endswith("\n"): line_no += 1 @@ -671,6 +674,8 @@ def _get_syntax( line_offset = max(0, start_line - 1) lines: Union[List[Text], Lines] = text.split("\n", allow_blank=ends_on_nl) if self.line_range: + if line_offset > len(lines): + return lines = lines[line_offset:end_line] if self.indent_guides and not options.ascii_only: diff --git a/src/pip/_vendor/rich/text.py b/src/pip/_vendor/rich/text.py index b14055aa7b4..998cb87dab7 100644 --- a/src/pip/_vendor/rich/text.py +++ b/src/pip/_vendor/rich/text.py @@ -53,11 +53,7 @@ class Span(NamedTuple): """Style associated with the span.""" def __repr__(self) -> str: - return ( - f"Span({self.start}, {self.end}, {self.style!r})" - if (isinstance(self.style, Style) and self.style._meta) - else f"Span({self.start}, {self.end}, {repr(self.style)})" - ) + return f"Span({self.start}, {self.end}, {self.style!r})" def __bool__(self) -> bool: return self.end > self.start @@ -1204,7 +1200,7 @@ def fit(self, width: int) -> Lines: width (int): Maximum characters in a line. Returns: - Lines: List of lines. + Lines: Lines container. """ lines: Lines = Lines() append = lines.append diff --git a/src/pip/_vendor/rich/theme.py b/src/pip/_vendor/rich/theme.py index bfb3c7f8215..471dfb2f927 100644 --- a/src/pip/_vendor/rich/theme.py +++ b/src/pip/_vendor/rich/theme.py @@ -56,17 +56,20 @@ def from_file( return theme @classmethod - def read(cls, path: str, inherit: bool = True) -> "Theme": + def read( + cls, path: str, inherit: bool = True, encoding: Optional[str] = None + ) -> "Theme": """Read a theme from a path. Args: path (str): Path to a config file readable by Python configparser module. inherit (bool, optional): Inherit default styles. Defaults to True. + encoding (str, optional): Encoding of the config file. Defaults to None. Returns: Theme: A new theme instance. """ - with open(path, "rt") as config_file: + with open(path, "rt", encoding=encoding) as config_file: return cls.from_file(config_file, source=path, inherit=inherit) diff --git a/src/pip/_vendor/rich/traceback.py b/src/pip/_vendor/rich/traceback.py index 1f481298f6f..c4ffe1f99e6 100644 --- a/src/pip/_vendor/rich/traceback.py +++ b/src/pip/_vendor/rich/traceback.py @@ -1,12 +1,24 @@ from __future__ import absolute_import +import linecache import os import platform import sys from dataclasses import dataclass, field from traceback import walk_tb from types import ModuleType, TracebackType -from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Type, Union +from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + Optional, + Sequence, + Tuple, + Type, + Union, +) from pip._vendor.pygments.lexers import guess_lexer_for_filename from pip._vendor.pygments.token import Comment, Keyword, Name, Number, Operator, String @@ -41,6 +53,10 @@ def install( theme: Optional[str] = None, word_wrap: bool = False, show_locals: bool = False, + locals_max_length: int = LOCALS_MAX_LENGTH, + locals_max_string: int = LOCALS_MAX_STRING, + locals_hide_dunder: bool = True, + locals_hide_sunder: Optional[bool] = None, indent_guides: bool = True, suppress: Iterable[Union[str, ModuleType]] = (), max_frames: int = 100, @@ -58,6 +74,11 @@ def install( a theme appropriate for the platform. word_wrap (bool, optional): Enable word wrapping of long lines. Defaults to False. show_locals (bool, optional): Enable display of local variables. Defaults to False. + locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. + Defaults to 10. + locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80. + locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True. + locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False. indent_guides (bool, optional): Enable indent guides in code and locals. Defaults to True. suppress (Sequence[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback. @@ -65,7 +86,13 @@ def install( Callable: The previous exception handler that was replaced. """ - traceback_console = Console(file=sys.stderr) if console is None else console + traceback_console = Console(stderr=True) if console is None else console + + locals_hide_sunder = ( + True + if (traceback_console.is_jupyter and locals_hide_sunder is None) + else locals_hide_sunder + ) def excepthook( type_: Type[BaseException], @@ -82,6 +109,10 @@ def excepthook( theme=theme, word_wrap=word_wrap, show_locals=show_locals, + locals_max_length=locals_max_length, + locals_max_string=locals_max_string, + locals_hide_dunder=locals_hide_dunder, + locals_hide_sunder=bool(locals_hide_sunder), indent_guides=indent_guides, suppress=suppress, max_frames=max_frames, @@ -192,6 +223,8 @@ class Traceback: locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to 10. locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80. + locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True. + locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False. suppress (Sequence[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback. max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100. @@ -208,14 +241,17 @@ class Traceback: def __init__( self, trace: Optional[Trace] = None, + *, width: Optional[int] = 100, extra_lines: int = 3, theme: Optional[str] = None, word_wrap: bool = False, show_locals: bool = False, - indent_guides: bool = True, locals_max_length: int = LOCALS_MAX_LENGTH, locals_max_string: int = LOCALS_MAX_STRING, + locals_hide_dunder: bool = True, + locals_hide_sunder: bool = False, + indent_guides: bool = True, suppress: Iterable[Union[str, ModuleType]] = (), max_frames: int = 100, ): @@ -237,6 +273,8 @@ def __init__( self.indent_guides = indent_guides self.locals_max_length = locals_max_length self.locals_max_string = locals_max_string + self.locals_hide_dunder = locals_hide_dunder + self.locals_hide_sunder = locals_hide_sunder self.suppress: Sequence[str] = [] for suppress_entity in suppress: @@ -257,14 +295,17 @@ def from_exception( exc_type: Type[Any], exc_value: BaseException, traceback: Optional[TracebackType], + *, width: Optional[int] = 100, extra_lines: int = 3, theme: Optional[str] = None, word_wrap: bool = False, show_locals: bool = False, - indent_guides: bool = True, locals_max_length: int = LOCALS_MAX_LENGTH, locals_max_string: int = LOCALS_MAX_STRING, + locals_hide_dunder: bool = True, + locals_hide_sunder: bool = False, + indent_guides: bool = True, suppress: Iterable[Union[str, ModuleType]] = (), max_frames: int = 100, ) -> "Traceback": @@ -283,6 +324,8 @@ def from_exception( locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to 10. locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80. + locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True. + locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False. suppress (Iterable[Union[str, ModuleType]]): Optional sequence of modules or paths to exclude from traceback. max_frames (int): Maximum number of frames to show in a traceback, 0 for no maximum. Defaults to 100. @@ -290,8 +333,16 @@ def from_exception( Traceback: A Traceback instance that may be printed. """ rich_traceback = cls.extract( - exc_type, exc_value, traceback, show_locals=show_locals + exc_type, + exc_value, + traceback, + show_locals=show_locals, + locals_max_length=locals_max_length, + locals_max_string=locals_max_string, + locals_hide_dunder=locals_hide_dunder, + locals_hide_sunder=locals_hide_sunder, ) + return cls( rich_traceback, width=width, @@ -302,6 +353,8 @@ def from_exception( indent_guides=indent_guides, locals_max_length=locals_max_length, locals_max_string=locals_max_string, + locals_hide_dunder=locals_hide_dunder, + locals_hide_sunder=locals_hide_sunder, suppress=suppress, max_frames=max_frames, ) @@ -312,9 +365,12 @@ def extract( exc_type: Type[BaseException], exc_value: BaseException, traceback: Optional[TracebackType], + *, show_locals: bool = False, locals_max_length: int = LOCALS_MAX_LENGTH, locals_max_string: int = LOCALS_MAX_STRING, + locals_hide_dunder: bool = True, + locals_hide_sunder: bool = False, ) -> Trace: """Extract traceback information. @@ -326,6 +382,8 @@ def extract( locals_max_length (int, optional): Maximum length of containers before abbreviating, or None for no abbreviation. Defaults to 10. locals_max_string (int, optional): Maximum length of string before truncating, or None to disable. Defaults to 80. + locals_hide_dunder (bool, optional): Hide locals prefixed with double underscore. Defaults to True. + locals_hide_sunder (bool, optional): Hide locals prefixed with single underscore. Defaults to False. Returns: Trace: A Trace instance which you can use to construct a `Traceback`. @@ -362,6 +420,20 @@ def safe_str(_object: Any) -> str: stacks.append(stack) append = stack.frames.append + def get_locals( + iter_locals: Iterable[Tuple[str, object]] + ) -> Iterable[Tuple[str, object]]: + """Extract locals from an iterator of key pairs.""" + if not (locals_hide_dunder or locals_hide_sunder): + yield from iter_locals + return + for key, value in iter_locals: + if locals_hide_dunder and key.startswith("__"): + continue + if locals_hide_sunder and key.startswith("_"): + continue + yield key, value + for frame_summary, line_no in walk_tb(traceback): filename = frame_summary.f_code.co_filename if filename and not filename.startswith("<"): @@ -369,6 +441,7 @@ def safe_str(_object: Any) -> str: filename = os.path.join(_IMPORT_CWD, filename) if frame_summary.f_locals.get("_rich_traceback_omit", False): continue + frame = Frame( filename=filename or "?", lineno=line_no, @@ -379,7 +452,7 @@ def safe_str(_object: Any) -> str: max_length=locals_max_length, max_string=locals_max_string, ) - for key, value in frame_summary.f_locals.items() + for key, value in get_locals(frame_summary.f_locals.items()) } if show_locals else None, @@ -494,13 +567,14 @@ def _render_syntax_error(self, syntax_error: _SyntaxError) -> RenderResult: highlighter = ReprHighlighter() path_highlighter = PathHighlighter() if syntax_error.filename != "": - text = Text.assemble( - (f" {syntax_error.filename}", "pygments.string"), - (":", "pygments.text"), - (str(syntax_error.lineno), "pygments.number"), - style="pygments.text", - ) - yield path_highlighter(text) + if os.path.exists(syntax_error.filename): + text = Text.assemble( + (f" {syntax_error.filename}", "pygments.string"), + (":", "pygments.text"), + (str(syntax_error.lineno), "pygments.number"), + style="pygments.text", + ) + yield path_highlighter(text) syntax_error_text = highlighter(syntax_error.line.rstrip()) syntax_error_text.no_wrap = True offset = min(syntax_error.offset - 1, len(syntax_error_text)) @@ -531,7 +605,6 @@ def _guess_lexer(cls, filename: str, code: str) -> str: def _render_stack(self, stack: Stack) -> RenderResult: path_highlighter = PathHighlighter() theme = self.theme - code_cache: Dict[str, str] = {} def read_code(filename: str) -> str: """Read files, and cache results on filename. @@ -542,14 +615,7 @@ def read_code(filename: str) -> str: Returns: str: Contents of file """ - code = code_cache.get(filename) - if code is None: - with open( - filename, "rt", encoding="utf-8", errors="replace" - ) as code_file: - code = code_file.read() - code_cache[filename] = code - return code + return "".join(linecache.getlines(filename)) def render_locals(frame: Frame) -> Iterable[ConsoleRenderable]: if frame.locals: @@ -588,14 +654,23 @@ def render_locals(frame: Frame) -> Iterable[ConsoleRenderable]: frame_filename = frame.filename suppressed = any(frame_filename.startswith(path) for path in self.suppress) - text = Text.assemble( - path_highlighter(Text(frame.filename, style="pygments.string")), - (":", "pygments.text"), - (str(frame.lineno), "pygments.number"), - " in ", - (frame.name, "pygments.function"), - style="pygments.text", - ) + if os.path.exists(frame.filename): + text = Text.assemble( + path_highlighter(Text(frame.filename, style="pygments.string")), + (":", "pygments.text"), + (str(frame.lineno), "pygments.number"), + " in ", + (frame.name, "pygments.function"), + style="pygments.text", + ) + else: + text = Text.assemble( + "in ", + (frame.name, "pygments.function"), + (":", "pygments.text"), + (str(frame.lineno), "pygments.number"), + style="pygments.text", + ) if not frame.filename.startswith("<") and not first: yield "" yield text @@ -605,6 +680,10 @@ def render_locals(frame: Frame) -> Iterable[ConsoleRenderable]: if not suppressed: try: code = read_code(frame.filename) + if not code: + # code may be an empty string if the file doesn't exist, OR + # if the traceback filename is generated dynamically + continue lexer_name = self._guess_lexer(frame.filename, code) syntax = Syntax( code, diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index f53657bf296..45f98aec401 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -12,7 +12,7 @@ requests==2.28.2 chardet==5.1.0 idna==3.4 urllib3==1.26.15 -rich==12.6.0 +rich==13.3.3 pygments==2.13.0 typing_extensions==4.4.0 resolvelib==1.0.1 From 54f6384468b1f8ccc0372734309cd70a3c18196b Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 11:44:08 +0100 Subject: [PATCH 386/730] Upgrade typing_extensions to 4.5.0 --- news/typing_extensions.vendor.rst | 1 + src/pip/_vendor/typing_extensions.py | 119 +++++++++++++++++++++++++-- src/pip/_vendor/vendor.txt | 2 +- 3 files changed, 113 insertions(+), 9 deletions(-) create mode 100644 news/typing_extensions.vendor.rst diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst new file mode 100644 index 00000000000..e71aeb66309 --- /dev/null +++ b/news/typing_extensions.vendor.rst @@ -0,0 +1 @@ +Upgrade typing_extensions to 4.5.0 diff --git a/src/pip/_vendor/typing_extensions.py b/src/pip/_vendor/typing_extensions.py index 34199c2a984..9cbf5b87b59 100644 --- a/src/pip/_vendor/typing_extensions.py +++ b/src/pip/_vendor/typing_extensions.py @@ -2,10 +2,12 @@ import collections import collections.abc import functools +import inspect import operator import sys import types as _types import typing +import warnings __all__ = [ @@ -51,6 +53,7 @@ 'assert_type', 'clear_overloads', 'dataclass_transform', + 'deprecated', 'get_overloads', 'final', 'get_args', @@ -728,6 +731,8 @@ def _typeddict_new(*args, total=True, **kwargs): _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' ' /, *, total=True, **kwargs)') + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + class _TypedDictMeta(type): def __init__(cls, name, bases, ns, total=True): super().__init__(name, bases, ns) @@ -753,8 +758,10 @@ def __new__(cls, name, bases, ns, total=True): annotations = {} own_annotations = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + kwds = {"module": tp_dict.__module__} if _TAKES_MODULE else {} own_annotations = { - n: typing._type_check(tp, msg) for n, tp in own_annotations.items() + n: typing._type_check(tp, msg, **kwds) + for n, tp in own_annotations.items() } required_keys = set() optional_keys = set() @@ -1157,7 +1164,7 @@ def __init__(self, default): if isinstance(default, (tuple, list)): self.__default__ = tuple((typing._type_check(d, "Default must be a type") for d in default)) - elif default: + elif default != _marker: self.__default__ = typing._type_check(default, "Default must be a type") else: self.__default__ = None @@ -1171,7 +1178,7 @@ class TypeVar(typing.TypeVar, _DefaultMixin, _root=True): def __init__(self, name, *constraints, bound=None, covariant=False, contravariant=False, - default=None, infer_variance=False): + default=_marker, infer_variance=False): super().__init__(name, *constraints, bound=bound, covariant=covariant, contravariant=contravariant) _DefaultMixin.__init__(self, default) @@ -1258,7 +1265,7 @@ class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True): __module__ = 'typing' def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=None): + default=_marker): super().__init__(name, bound=bound, covariant=covariant, contravariant=contravariant) _DefaultMixin.__init__(self, default) @@ -1334,7 +1341,7 @@ def kwargs(self): return ParamSpecKwargs(self) def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=None): + default=_marker): super().__init__([self]) self.__name__ = name self.__covariant__ = bool(covariant) @@ -1850,7 +1857,7 @@ def _is_unpack(obj): class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True): """Type variable tuple.""" - def __init__(self, name, *, default=None): + def __init__(self, name, *, default=_marker): super().__init__(name) _DefaultMixin.__init__(self, default) @@ -1913,7 +1920,7 @@ def get_shape(self) -> Tuple[*Ts]: def __iter__(self): yield self.__unpacked__ - def __init__(self, name, *, default=None): + def __init__(self, name, *, default=_marker): self.__name__ = name _DefaultMixin.__init__(self, default) @@ -1993,7 +2000,8 @@ def int_or_str(arg: int | str) -> None: raise AssertionError("Expected code to be unreachable") -if hasattr(typing, 'dataclass_transform'): +if sys.version_info >= (3, 12): + # dataclass_transform exists in 3.11 but lacks the frozen_default parameter dataclass_transform = typing.dataclass_transform else: def dataclass_transform( @@ -2001,6 +2009,7 @@ def dataclass_transform( eq_default: bool = True, order_default: bool = False, kw_only_default: bool = False, + frozen_default: bool = False, field_specifiers: typing.Tuple[ typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], ... @@ -2057,6 +2066,8 @@ class CustomerModel(ModelBase): assumed to be True or False if it is omitted by the caller. - ``kw_only_default`` indicates whether the ``kw_only`` parameter is assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. - ``field_specifiers`` specifies a static list of supported classes or functions that describe fields, similar to ``dataclasses.field()``. @@ -2071,6 +2082,7 @@ def decorator(cls_or_fn): "eq_default": eq_default, "order_default": order_default, "kw_only_default": kw_only_default, + "frozen_default": frozen_default, "field_specifiers": field_specifiers, "kwargs": kwargs, } @@ -2102,12 +2114,103 @@ def method(self) -> None: This helps prevent bugs that may occur when a base class is changed without an equivalent change to a child class. + There is no runtime checking of these properties. The decorator + sets the ``__override__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + See PEP 698 for details. """ + try: + __arg.__override__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass return __arg +if hasattr(typing, "deprecated"): + deprecated = typing.deprecated +else: + _T = typing.TypeVar("_T") + + def deprecated( + __msg: str, + *, + category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, + stacklevel: int = 1, + ) -> typing.Callable[[_T], _T]: + """Indicate that a class, function or overload is deprecated. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + No runtime warning is issued. The decorator sets the ``__deprecated__`` + attribute on the decorated object to the deprecation message + passed to the decorator. If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ + def decorator(__arg: _T) -> _T: + if category is None: + __arg.__deprecated__ = __msg + return __arg + elif isinstance(__arg, type): + original_new = __arg.__new__ + has_init = __arg.__init__ is not object.__init__ + + @functools.wraps(original_new) + def __new__(cls, *args, **kwargs): + warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) + # Mirrors a similar check in object.__new__. + if not has_init and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") + if original_new is not object.__new__: + return original_new(cls, *args, **kwargs) + else: + return original_new(cls) + + __arg.__new__ = staticmethod(__new__) + __arg.__deprecated__ = __new__.__deprecated__ = __msg + return __arg + elif callable(__arg): + @functools.wraps(__arg) + def wrapper(*args, **kwargs): + warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) + return __arg(*args, **kwargs) + + __arg.__deprecated__ = wrapper.__deprecated__ = __msg + return wrapper + else: + raise TypeError( + "@deprecated decorator with non-None category must be applied to " + f"a class or callable, not {__arg!r}" + ) + + return decorator + + # We have to do some monkey patching to deal with the dual nature of # Unpack/TypeVarTuple: # - We want Unpack to be a kind of TypeVar so it gets accepted in diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 45f98aec401..3d6ddbabaa2 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -14,7 +14,7 @@ requests==2.28.2 urllib3==1.26.15 rich==13.3.3 pygments==2.13.0 - typing_extensions==4.4.0 + typing_extensions==4.5.0 resolvelib==1.0.1 setuptools==65.6.3 six==1.16.0 From d7e02483264703d6210e8c28937dcf1d9f547796 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 14:34:24 +0100 Subject: [PATCH 387/730] Upgrade setuptools to 67.6.1 --- news/setuptools.vendor.rst | 2 +- src/pip/_vendor/pkg_resources/__init__.py | 599 +++++++++++--------- src/pip/_vendor/vendor.txt | 2 +- tools/vendoring/patches/pkg_resources.patch | 22 - 4 files changed, 332 insertions(+), 293 deletions(-) diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst index f86cecbca92..9cf3f49e21c 100644 --- a/news/setuptools.vendor.rst +++ b/news/setuptools.vendor.rst @@ -1 +1 @@ -Update pkg_resources (via setuptools) to 65.6.3 +Upgrade setuptools to 67.6.1 diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 0ec74f8a6ef..a85aca10f7c 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -12,6 +12,12 @@ .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. + +This module is deprecated. Users are directed to +`importlib.resources `_ +and +`importlib.metadata `_ +instead. """ import sys @@ -34,7 +40,6 @@ import errno import tempfile import textwrap -import itertools import inspect import ntpath import posixpath @@ -54,8 +59,10 @@ # capture these to bypass sandboxing from os import utime + try: from os import mkdir, rename, unlink + WRITE_SUPPORT = True except ImportError: # no write support, probably under GAE @@ -66,6 +73,7 @@ try: import importlib.machinery as importlib_machinery + # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ except ImportError: @@ -79,6 +87,7 @@ from pip._vendor import platformdirs from pip._vendor import packaging + __import__('pip._vendor.packaging.version') __import__('pip._vendor.packaging.specifiers') __import__('pip._vendor.packaging.requirements') @@ -109,6 +118,12 @@ _namespace_packages = None +warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning) + + +_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) + + class PEP440Warning(RuntimeWarning): """ Used when there is an issue with a version or specifier not complying with @@ -116,16 +131,7 @@ class PEP440Warning(RuntimeWarning): """ -def parse_version(v): - try: - return packaging.version.Version(v) - except packaging.version.InvalidVersion: - warnings.warn( - f"{v} is an invalid version and will not be supported in " - "a future release", - PkgResourcesDeprecationWarning, - ) - return packaging.version.LegacyVersion(v) +parse_version = packaging.version.Version _state_vars = {} @@ -197,51 +203,87 @@ def get_supported_platform(): __all__ = [ # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'require', + 'run_script', + 'get_provider', + 'get_distribution', + 'load_entry_point', + 'get_entry_map', + 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - + 'resource_string', + 'resource_stream', + 'resource_filename', + 'resource_listdir', + 'resource_exists', + 'resource_isdir', # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'declare_namespace', + 'working_set', + 'add_activation_listener', + 'find_distributions', + 'set_extraction_path', + 'cleanup_resources', 'get_default_cache', - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - + 'Environment', + 'WorkingSet', + 'ResourceManager', + 'Distribution', + 'Requirement', + 'EntryPoint', # Exceptions - 'ResolutionError', 'VersionConflict', 'DistributionNotFound', - 'UnknownExtra', 'ExtractionError', - + 'ResolutionError', + 'VersionConflict', + 'DistributionNotFound', + 'UnknownExtra', + 'ExtractionError', # Warnings 'PEP440Warning', - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - + 'parse_requirements', + 'parse_version', + 'safe_name', + 'safe_version', + 'get_platform', + 'compatible_platforms', + 'yield_lines', + 'split_sections', + 'safe_extra', + 'to_filename', + 'invalid_marker', + 'evaluate_marker', # filesystem utilities - 'ensure_directory', 'normalize_path', - + 'ensure_directory', + 'normalize_path', # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - + 'EGG_DIST', + 'BINARY_DIST', + 'SOURCE_DIST', + 'CHECKOUT_DIST', + 'DEVELOP_DIST', # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - + 'IMetadataProvider', + 'IResourceProvider', + 'FileMetadata', + 'PathMetadata', + 'EggMetadata', + 'EmptyProvider', + 'empty_provider', + 'NullProvider', + 'EggProvider', + 'DefaultProvider', + 'ZipProvider', + 'register_finder', + 'register_namespace_handler', + 'register_loader_type', + 'fixup_namespace_packages', + 'get_importer', # Warnings 'PkgResourcesDeprecationWarning', - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', + 'run_main', + 'AvailableDistributions', ] @@ -300,8 +342,10 @@ def required_by(self): class DistributionNotFound(ResolutionError): """A requested distribution was not found""" - _template = ("The '{self.req}' distribution was not found " - "and is required by {self.requirers_str}") + _template = ( + "The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}" + ) @property def req(self): @@ -395,7 +439,8 @@ def get_build_platform(): version = _macos_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % ( - int(version[0]), int(version[1]), + int(version[0]), + int(version[1]), _macos_arch(machine), ) except ValueError: @@ -436,15 +481,18 @@ def compatible_platforms(provided, required): if provDarwin: dversion = int(provDarwin.group(1)) macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": + if ( + dversion == 7 + and macosversion >= "10.3" + or dversion == 8 + and macosversion >= "10.4" + ): return True # egg isn't macOS or legacy darwin return False # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): + if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3): return False # is the required OS major update >= the provided one? @@ -506,8 +554,8 @@ def get_metadata(name): def get_metadata_lines(name): """Yield named metadata resource as list of non-blank non-comment lines - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" @@ -720,9 +768,14 @@ def add(self, dist, entry=None, insert=True, replace=False): keys2.append(dist.key) self._added_new(dist) - # FIXME: 'WorkingSet.resolve' is too complex (11) - def resolve(self, requirements, env=None, installer=None, # noqa: C901 - replace_conflicting=False, extras=None): + def resolve( + self, + requirements, + env=None, + installer=None, + replace_conflicting=False, + extras=None, + ): """List all distributions needed to (recursively) meet `requirements` `requirements` must be a sequence of ``Requirement`` objects. `env`, @@ -771,33 +824,9 @@ def resolve(self, requirements, env=None, installer=None, # noqa: C901 if not req_extras.markers_pass(req, extras): continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match( - req, ws, installer, - replace_conflicting=replace_conflicting - ) - if dist is None: - requirers = required_by.get(req, None) - raise DistributionNotFound(req, requirers) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - dependent_req = required_by[req] - raise VersionConflict(dist, req).with_context(dependent_req) + dist = self._resolve_dist( + req, best, replace_conflicting, env, installer, required_by, to_activate + ) # push the new requirements onto the stack new_requirements = dist.requires(req.extras)[::-1] @@ -813,8 +842,38 @@ def resolve(self, requirements, env=None, installer=None, # noqa: C901 # return list of distros to activate return to_activate - def find_plugins( - self, plugin_env, full_env=None, installer=None, fallback=True): + def _resolve_dist( + self, req, best, replace_conflicting, env, installer, required_by, to_activate + ): + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match( + req, ws, installer, replace_conflicting=replace_conflicting + ) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + return dist + + def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): """Find all activatable distributions in `plugin_env` Example usage:: @@ -867,9 +926,7 @@ def find_plugins( list(map(shadow_set.add, self)) for project_name in plugin_projects: - for dist in plugin_env[project_name]: - req = [dist.as_requirement()] try: @@ -933,8 +990,11 @@ def _added_new(self, dist): def __getstate__(self): return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.normalized_to_canonical_keys.copy(), self.callbacks[:] + self.entries[:], + self.entry_keys.copy(), + self.by_key.copy(), + self.normalized_to_canonical_keys.copy(), + self.callbacks[:], ) def __setstate__(self, e_k_b_n_c): @@ -970,8 +1030,8 @@ class Environment: """Searchable snapshot of distributions on a search path""" def __init__( - self, search_path=None, platform=get_supported_platform(), - python=PY_MAJOR): + self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR + ): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. @@ -1038,16 +1098,14 @@ def __getitem__(self, project_name): return self._distmap.get(distribution_key, []) def add(self, dist): - """Add `dist` if we ``can_add()`` it and it has not already been added - """ + """Add `dist` if we ``can_add()`` it and it has not already been added""" if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) - def best_match( - self, req, working_set, installer=None, replace_conflicting=False): + def best_match(self, req, working_set, installer=None, replace_conflicting=False): """Find distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a @@ -1134,6 +1192,7 @@ class ExtractionError(RuntimeError): class ResourceManager: """Manage resource extraction and packages""" + extraction_path = None def __init__(self): @@ -1145,9 +1204,7 @@ def resource_exists(self, package_or_requirement, resource_name): def resource_isdir(self, package_or_requirement, resource_name): """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) + return get_provider(package_or_requirement).resource_isdir(resource_name) def resource_filename(self, package_or_requirement, resource_name): """Return a true filesystem path for specified resource""" @@ -1169,9 +1226,7 @@ def resource_string(self, package_or_requirement, resource_name): def resource_listdir(self, package_or_requirement, resource_name): """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) + return get_provider(package_or_requirement).resource_listdir(resource_name) def extraction_error(self): """Give an error message for problems extracting file(s)""" @@ -1179,7 +1234,8 @@ def extraction_error(self): old_exc = sys.exc_info()[1] cache_path = self.extraction_path or get_default_cache() - tmpl = textwrap.dedent(""" + tmpl = textwrap.dedent( + """ Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) @@ -1194,7 +1250,8 @@ def extraction_error(self): Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. - """).lstrip() + """ + ).lstrip() err = ExtractionError(tmpl.format(**locals())) err.manager = self err.cache_path = cache_path @@ -1293,9 +1350,7 @@ def set_extraction_path(self, path): ``cleanup_resources()``.) """ if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) + raise ValueError("Can't change extraction path, files already extracted") self.extraction_path = path @@ -1319,9 +1374,8 @@ def get_default_cache(): or a platform-relevant user cache dir for an app named "Python-Eggs". """ - return ( - os.environ.get('PYTHON_EGG_CACHE') - or platformdirs.user_cache_dir(appname='Python-Eggs') + return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir( + appname='Python-Eggs' ) @@ -1345,6 +1399,38 @@ def safe_version(version): return re.sub('[^A-Za-z0-9.]+', '-', version) +def _forgiving_version(version): + """Fallback when ``safe_version`` is not safe enough + >>> parse_version(_forgiving_version('0.23ubuntu1')) + + >>> parse_version(_forgiving_version('0.23-')) + + >>> parse_version(_forgiving_version('0.-_')) + + >>> parse_version(_forgiving_version('42.+?1')) + + >>> parse_version(_forgiving_version('hello world')) + + """ + version = version.replace(' ', '.') + match = _PEP440_FALLBACK.search(version) + if match: + safe = match["safe"] + rest = version[len(safe):] + else: + safe = "0" + rest = version + local = f"sanitized.{_safe_segment(rest)}".strip(".") + return f"{safe}.dev0+{local}" + + +def _safe_segment(segment): + """Convert an arbitrary string into a safe segment""" + segment = re.sub('[^A-Za-z0-9.]+', '-', segment) + segment = re.sub('-[^A-Za-z0-9]+', '-', segment) + return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-") + + def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name @@ -1458,8 +1544,9 @@ def run_script(self, script_name, namespace): script = 'scripts/' + script_name if not self.has_metadata(script): raise ResolutionError( - "Script {script!r} not found in metadata at {self.egg_info!r}" - .format(**locals()), + "Script {script!r} not found in metadata at {self.egg_info!r}".format( + **locals() + ), ) script_text = self.get_metadata(script).replace('\r\n', '\n') script_text = script_text.replace('\r', '\n') @@ -1472,8 +1559,12 @@ def run_script(self, script_name, namespace): exec(code, namespace, namespace) else: from linecache import cache + cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename + len(script_text), + 0, + script_text.split('\n'), + script_filename, ) script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) @@ -1553,9 +1644,9 @@ def _validate_resource_path(path): AttributeError: ... """ invalid = ( - os.path.pardir in path.split(posixpath.sep) or - posixpath.isabs(path) or - ntpath.isabs(path) + os.path.pardir in path.split(posixpath.sep) + or posixpath.isabs(path) + or ntpath.isabs(path) ) if not invalid: return @@ -1637,7 +1728,10 @@ def _get(self, path): @classmethod def _register(cls): - loader_names = 'SourceFileLoader', 'SourcelessFileLoader', + loader_names = ( + 'SourceFileLoader', + 'SourcelessFileLoader', + ) for name in loader_names: loader_cls = getattr(importlib_machinery, name, type(None)) register_loader_type(loader_cls, cls) @@ -1697,6 +1791,7 @@ class MemoizedZipManifests(ZipManifests): """ Memoized zipfile manifests. """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') def load(self, path): @@ -1730,20 +1825,16 @@ def _zipinfo_name(self, fspath): if fspath == self.loader.archive: return '' if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.zip_pre) - ) + return fspath[len(self.zip_pre) :] + raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre)) def _parts(self, zip_path): # Convert a zipfile subpath into an egg-relative path part list. # pseudo-fs path fspath = self.zip_pre + zip_path if fspath.startswith(self.egg_root + os.sep): - return fspath[len(self.egg_root) + 1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.egg_root) - ) + return fspath[len(self.egg_root) + 1 :].split(os.sep) + raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root)) @property def zipinfo(self): @@ -1773,25 +1864,20 @@ def _get_date_and_size(zip_stat): # FIXME: 'ZipProvider._extract_resource' is too complex (12) def _extract_resource(self, manager, zip_path): # noqa: C901 - if zip_path in self._index(): for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) + last = self._extract_resource(manager, os.path.join(zip_path, name)) # return the extracted directory name return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) + raise IOError( + '"os.rename" and "os.unlink" are not supported ' 'on this platform' ) + try: + real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path)) if self._is_current(real_path, zip_path): return real_path @@ -2027,70 +2113,21 @@ def find_nothing(importer, path_item, only=False): register_finder(object, find_nothing) -def _by_version_descending(names): - """ - Given a list of filenames, return them in descending order - by version number. - - >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' - >>> _by_version_descending(names) - ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] - """ - def try_parse(name): - """ - Attempt to parse as a version or return a null version. - """ - try: - return packaging.version.Version(name) - except Exception: - return packaging.version.Version('0') - - def _by_version(name): - """ - Parse each component of the filename - """ - name, ext = os.path.splitext(name) - parts = itertools.chain(name.split('-'), [ext]) - return [try_parse(part) for part in parts] - - return sorted(names, key=_by_version, reverse=True) - - def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) if _is_unpacked_egg(path_item): yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item, 'EGG-INFO') - ) + path_item, + metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')), ) return - entries = ( - os.path.join(path_item, child) - for child in safe_listdir(path_item) - ) - - # for performance, before sorting by version, - # screen entries for only those that will yield - # distributions - filtered = ( - entry - for entry in entries - if dist_factory(path_item, entry, only) - ) + entries = (os.path.join(path_item, child) for child in safe_listdir(path_item)) # scan for .egg and .egg-info in directory - path_item_entries = _by_version_descending(filtered) - for entry in path_item_entries: + for entry in sorted(entries): fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) for dist in factory(fullpath): @@ -2101,19 +2138,18 @@ def dist_factory(path_item, entry, only): """Return a dist_factory for the given entry.""" lower = entry.lower() is_egg_info = lower.endswith('.egg-info') - is_dist_info = ( - lower.endswith('.dist-info') and - os.path.isdir(os.path.join(path_item, entry)) + is_dist_info = lower.endswith('.dist-info') and os.path.isdir( + os.path.join(path_item, entry) ) is_meta = is_egg_info or is_dist_info return ( distributions_from_metadata - if is_meta else - find_distributions - if not only and _is_egg_path(entry) else - resolve_egg_link - if not only and lower.endswith('.egg-link') else - NoDists() + if is_meta + else find_distributions + if not only and _is_egg_path(entry) + else resolve_egg_link + if not only and lower.endswith('.egg-link') + else NoDists() ) @@ -2125,6 +2161,7 @@ class NoDists: >>> list(NoDists()('anything')) [] """ + def __bool__(self): return False @@ -2159,7 +2196,10 @@ def distributions_from_metadata(path): metadata = FileMetadata(path) entry = os.path.basename(path) yield Distribution.from_location( - root, entry, metadata, precedence=DEVELOP_DIST, + root, + entry, + metadata, + precedence=DEVELOP_DIST, ) @@ -2181,17 +2221,16 @@ def resolve_egg_link(path): """ referenced_paths = non_empty_lines(path) resolved_paths = ( - os.path.join(os.path.dirname(path), ref) - for ref in referenced_paths + os.path.join(os.path.dirname(path), ref) for ref in referenced_paths ) dist_groups = map(find_distributions, resolved_paths) return next(dist_groups, ()) -register_finder(pkgutil.ImpImporter, find_on_path) +if hasattr(pkgutil, 'ImpImporter'): + register_finder(pkgutil.ImpImporter, find_on_path) -if hasattr(importlib_machinery, 'FileFinder'): - register_finder(importlib_machinery.FileFinder, find_on_path) +register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) @@ -2289,6 +2328,15 @@ def position_in_sys_path(path): def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" + msg = ( + f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n" + "Implementing implicit namespace packages (as specified in PEP 420) " + "is preferred to `pkg_resources.declare_namespace`. " + "See https://setuptools.pypa.io/en/latest/references/" + "keywords.html#keyword-namespace-packages" + ) + warnings.warn(msg, DeprecationWarning, stacklevel=2) + _imp.acquire_lock() try: if packageName in _namespace_packages: @@ -2345,11 +2393,11 @@ def file_ns_handler(importer, path_item, packageName, module): return subpath -register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -register_namespace_handler(zipimport.zipimporter, file_ns_handler) +if hasattr(pkgutil, 'ImpImporter'): + register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -if hasattr(importlib_machinery, 'FileFinder'): - register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) +register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): @@ -2361,8 +2409,7 @@ def null_ns_handler(importer, path_item, packageName, module): def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(os.path.normpath( - _cygwin_patch(filename)))) + return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) def _cygwin_patch(filename): # pragma: nocover @@ -2393,9 +2440,9 @@ def _is_egg_path(path): def _is_zip_egg(path): return ( - path.lower().endswith('.egg') and - os.path.isfile(path) and - zipfile.is_zipfile(path) + path.lower().endswith('.egg') + and os.path.isfile(path) + and zipfile.is_zipfile(path) ) @@ -2403,9 +2450,8 @@ def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. """ - return ( - path.lower().endswith('.egg') and - os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) + return path.lower().endswith('.egg') and os.path.isfile( + os.path.join(path, 'EGG-INFO', 'PKG-INFO') ) @@ -2569,8 +2615,10 @@ def _version_from_file(lines): Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise. """ + def is_version_line(line): return line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) line = next(iter(version_lines), '') _, _, value = line.partition(':') @@ -2579,12 +2627,19 @@ def is_version_line(line): class Distribution: """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' def __init__( - self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): + self, + location=None, + metadata=None, + project_name=None, + version=None, + py_version=PY_MAJOR, + platform=None, + precedence=EGG_DIST, + ): self.project_name = safe_name(project_name or 'Unknown') if version is not None: self._version = safe_version(version) @@ -2607,8 +2662,13 @@ def from_location(cls, location, basename, metadata=None, **kw): 'name', 'ver', 'pyver', 'plat' ) return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw + location, + metadata, + project_name=project_name, + version=version, + py_version=py_version, + platform=platform, + **kw, )._reload_version() def _reload_version(self): @@ -2617,7 +2677,7 @@ def _reload_version(self): @property def hashcmp(self): return ( - self.parsed_version, + self._forgiving_parsed_version, self.precedence, self.key, self.location, @@ -2664,35 +2724,42 @@ def key(self): @property def parsed_version(self): if not hasattr(self, "_parsed_version"): - self._parsed_version = parse_version(self.version) + try: + self._parsed_version = parse_version(self.version) + except packaging.version.InvalidVersion as ex: + info = f"(package: {self.project_name})" + if hasattr(ex, "add_note"): + ex.add_note(info) # PEP 678 + raise + raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None return self._parsed_version - def _warn_legacy_version(self): - LV = packaging.version.LegacyVersion - is_legacy = isinstance(self._parsed_version, LV) - if not is_legacy: - return + @property + def _forgiving_parsed_version(self): + try: + return self.parsed_version + except packaging.version.InvalidVersion as ex: + self._parsed_version = parse_version(_forgiving_version(self.version)) - # While an empty version is technically a legacy version and - # is not a valid PEP 440 version, it's also unlikely to - # actually come from someone and instead it is more likely that - # it comes from setuptools attempting to parse a filename and - # including it in the list. So for that we'll gate this warning - # on if the version is anything at all or not. - if not self.version: - return + notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678 + msg = f"""!!\n\n + ************************************************************************* + {str(ex)}\n{notes} + + This is a long overdue deprecation. + For the time being, `pkg_resources` will use `{self._parsed_version}` + as a replacement to avoid breaking existing environments, + but no future compatibility is guaranteed. - tmpl = textwrap.dedent(""" - '{project_name} ({version})' is being parsed as a legacy, - non PEP 440, - version. You may find odd behavior and sort order. - In particular it will be sorted as less than 0.0. It - is recommended to migrate to PEP 440 compatible - versions. - """).strip().replace('\n', ' ') + If you maintain package {self.project_name} you should implement + the relevant changes to adequate the project to PEP 440 immediately. + ************************************************************************* + \n\n!! + """ + warnings.warn(msg, DeprecationWarning) - warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + return self._parsed_version @property def version(self): @@ -2702,9 +2769,9 @@ def version(self): version = self._get_version() if version is None: path = self._get_metadata_path_for_display(self.PKG_INFO) - msg = ( - "Missing 'Version:' header and/or {} file at path: {}" - ).format(self.PKG_INFO, path) + msg = ("Missing 'Version:' header and/or {} file at path: {}").format( + self.PKG_INFO, path + ) raise ValueError(msg, self) from e return version @@ -2733,8 +2800,7 @@ def _filter_extras(dm): reqs = dm.pop(extra) new_extra, _, marker = extra.partition(':') fails_marker = marker and ( - invalid_marker(marker) - or not evaluate_marker(marker) + invalid_marker(marker) or not evaluate_marker(marker) ) if fails_marker: reqs = [] @@ -2806,8 +2872,9 @@ def activate(self, path=None, replace=False): def egg_name(self): """Return what this distribution's standard .egg filename should be""" filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR + to_filename(self.project_name), + to_filename(self.version), + self.py_version or PY_MAJOR, ) if self.platform: @@ -2837,17 +2904,13 @@ def __getattr__(self, attr): def __dir__(self): return list( set(super(Distribution, self).__dir__()) - | set( - attr for attr in self._provider.__dir__() - if not attr.startswith('_') - ) + | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) ) @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw + _normalize_cached(filename), os.path.basename(filename), metadata, **kw ) def as_requirement(self): @@ -2959,14 +3022,18 @@ def check_version_conflict(self): nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): + if ( + modname not in sys.modules + or modname in nsp + or modname in _namespace_packages + ): continue if modname in ('pkg_resources', 'setuptools', 'site'): continue fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): + if fn and ( + normalize_path(fn).startswith(loc) or fn.startswith(self.location) + ): continue issue_warning( "Module %s was already imported from %s, but %s is being added" @@ -3018,6 +3085,7 @@ class DistInfoDistribution(Distribution): Wrap an actual or potential sys.path entry w/metadata, .dist-info style. """ + PKG_INFO = 'METADATA' EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") @@ -3103,8 +3171,7 @@ def __init__(self, requirement_string): self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() - self.specs = [ - (spec.operator, spec.version) for spec in self.specifier] + self.specs = [(spec.operator, spec.version) for spec in self.specifier] self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, @@ -3116,10 +3183,7 @@ def __init__(self, requirement_string): self.__hash = hash(self.hashCmp) def __eq__(self, other): - return ( - isinstance(other, Requirement) and - self.hashCmp == other.hashCmp - ) + return isinstance(other, Requirement) and self.hashCmp == other.hashCmp def __ne__(self, other): return not self == other @@ -3144,7 +3208,7 @@ def __repr__(self): @staticmethod def parse(s): - req, = parse_requirements(s) + (req,) = parse_requirements(s) return req @@ -3282,10 +3346,7 @@ def _initialize_master_working_set(): # ensure that all distributions added to the working set in the future # (e.g. by calling ``require()``) will get activated as well, # with higher priority (replace=True). - tuple( - dist.activate(replace=False) - for dist in working_set - ) + tuple(dist.activate(replace=False) for dist in working_set) add_activation_listener( lambda dist: dist.activate(replace=True), existing=False, diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 3d6ddbabaa2..70a24f7bd9f 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ rich==13.3.3 pygments==2.13.0 typing_extensions==4.5.0 resolvelib==1.0.1 -setuptools==65.6.3 +setuptools==67.6.1 six==1.16.0 tenacity==8.1.0 tomli==2.0.1 diff --git a/tools/vendoring/patches/pkg_resources.patch b/tools/vendoring/patches/pkg_resources.patch index 39bb2eac253..48ae954311b 100644 --- a/tools/vendoring/patches/pkg_resources.patch +++ b/tools/vendoring/patches/pkg_resources.patch @@ -1,25 +1,3 @@ -diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py -index d59226af9..3b9565893 100644 ---- a/src/pip/_vendor/pkg_resources/__init__.py -+++ b/src/pip/_vendor/pkg_resources/__init__.py -@@ -77,7 +77,7 @@ - join_continuation, - ) - --from pkg_resources.extern import appdirs -+from pkg_resources.extern import platformdirs - from pkg_resources.extern import packaging - __import__('pkg_resources.extern.packaging.version') - __import__('pkg_resources.extern.packaging.specifiers') -@@ -1321,7 +1321,7 @@ def get_default_cache(): - """ - return ( - os.environ.get('PYTHON_EGG_CACHE') -- or appdirs.user_cache_dir(appname='Python-Eggs') -+ or platformdirs.user_cache_dir(appname='Python-Eggs') - ) - - diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 3f2476a0c..8d5727d35 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py From d2852d0ad2261880b25f7b137ae89c8e5bbeae65 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 14:43:48 +0100 Subject: [PATCH 388/730] Upgrade tenacity to 8.2.2 --- news/tenacity.vendor.rst | 1 + src/pip/_vendor/tenacity/__init__.py | 219 ++++++++++++++++------- src/pip/_vendor/tenacity/_asyncio.py | 38 ++-- src/pip/_vendor/tenacity/_utils.py | 8 + src/pip/_vendor/tenacity/after.py | 7 +- src/pip/_vendor/tenacity/before.py | 7 +- src/pip/_vendor/tenacity/before_sleep.py | 17 +- src/pip/_vendor/tenacity/retry.py | 38 +++- src/pip/_vendor/tenacity/stop.py | 11 +- src/pip/_vendor/tenacity/tornadoweb.py | 6 +- src/pip/_vendor/tenacity/wait.py | 44 +++-- src/pip/_vendor/vendor.txt | 2 +- tools/vendoring/patches/tenacity.patch | 8 +- 13 files changed, 282 insertions(+), 124 deletions(-) create mode 100644 news/tenacity.vendor.rst diff --git a/news/tenacity.vendor.rst b/news/tenacity.vendor.rst new file mode 100644 index 00000000000..493d38d0195 --- /dev/null +++ b/news/tenacity.vendor.rst @@ -0,0 +1 @@ +Upgrade tenacity to 8.2.2 diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__init__.py index ab3be3bf63d..4f1603adeb6 100644 --- a/src/pip/_vendor/tenacity/__init__.py +++ b/src/pip/_vendor/tenacity/__init__.py @@ -16,6 +16,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import functools import sys import threading @@ -88,51 +89,13 @@ if t.TYPE_CHECKING: import types - from .wait import wait_base - from .stop import stop_base - - -WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable) -_RetValT = t.TypeVar("_RetValT") - - -@t.overload -def retry(fn: WrappedFn) -> WrappedFn: - pass - - -@t.overload -def retry(*dargs: t.Any, **dkw: t.Any) -> t.Callable[[WrappedFn], WrappedFn]: # noqa - pass - - -def retry(*dargs: t.Any, **dkw: t.Any) -> t.Union[WrappedFn, t.Callable[[WrappedFn], WrappedFn]]: # noqa - """Wrap a function with a new `Retrying` object. - - :param dargs: positional arguments passed to Retrying object - :param dkw: keyword arguments passed to the Retrying object - """ - # support both @retry and @retry() as valid syntax - if len(dargs) == 1 and callable(dargs[0]): - return retry()(dargs[0]) - else: - - def wrap(f: WrappedFn) -> WrappedFn: - if isinstance(f, retry_base): - warnings.warn( - f"Got retry_base instance ({f.__class__.__name__}) as callable argument, " - f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)" - ) - if iscoroutinefunction(f): - r: "BaseRetrying" = AsyncRetrying(*dargs, **dkw) - elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f): - r = TornadoRetrying(*dargs, **dkw) - else: - r = Retrying(*dargs, **dkw) + from .retry import RetryBaseT + from .stop import StopBaseT + from .wait import WaitBaseT - return r.wraps(f) - return wrap +WrappedFnReturnT = t.TypeVar("WrappedFnReturnT") +WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Any]) class TryAgain(Exception): @@ -216,7 +179,7 @@ def __exit__( exc_value: t.Optional[BaseException], traceback: t.Optional["types.TracebackType"], ) -> t.Optional[bool]: - if isinstance(exc_value, BaseException): + if exc_type is not None and exc_value is not None: self.retry_state.set_exception((exc_type, exc_value, traceback)) return True # Swallow exception. else: @@ -229,9 +192,9 @@ class BaseRetrying(ABC): def __init__( self, sleep: t.Callable[[t.Union[int, float]], None] = sleep, - stop: "stop_base" = stop_never, - wait: "wait_base" = wait_none(), - retry: retry_base = retry_if_exception_type(), + stop: "StopBaseT" = stop_never, + wait: "WaitBaseT" = wait_none(), + retry: "RetryBaseT" = retry_if_exception_type(), before: t.Callable[["RetryCallState"], None] = before_nothing, after: t.Callable[["RetryCallState"], None] = after_nothing, before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None, @@ -254,8 +217,8 @@ def __init__( def copy( self, sleep: t.Union[t.Callable[[t.Union[int, float]], None], object] = _unset, - stop: t.Union["stop_base", object] = _unset, - wait: t.Union["wait_base", object] = _unset, + stop: t.Union["StopBaseT", object] = _unset, + wait: t.Union["WaitBaseT", object] = _unset, retry: t.Union[retry_base, object] = _unset, before: t.Union[t.Callable[["RetryCallState"], None], object] = _unset, after: t.Union[t.Callable[["RetryCallState"], None], object] = _unset, @@ -312,9 +275,9 @@ def statistics(self) -> t.Dict[str, t.Any]: statistics from each thread). """ try: - return self._local.statistics + return self._local.statistics # type: ignore[no-any-return] except AttributeError: - self._local.statistics = {} + self._local.statistics = t.cast(t.Dict[str, t.Any], {}) return self._local.statistics def wraps(self, f: WrappedFn) -> WrappedFn: @@ -330,10 +293,10 @@ def wrapped_f(*args: t.Any, **kw: t.Any) -> t.Any: def retry_with(*args: t.Any, **kwargs: t.Any) -> WrappedFn: return self.copy(*args, **kwargs).wraps(f) - wrapped_f.retry = self - wrapped_f.retry_with = retry_with + wrapped_f.retry = self # type: ignore[attr-defined] + wrapped_f.retry_with = retry_with # type: ignore[attr-defined] - return wrapped_f + return wrapped_f # type: ignore[return-value] def begin(self) -> None: self.statistics.clear() @@ -348,15 +311,15 @@ def iter(self, retry_state: "RetryCallState") -> t.Union[DoAttempt, DoSleep, t.A self.before(retry_state) return DoAttempt() - is_explicit_retry = retry_state.outcome.failed and isinstance(retry_state.outcome.exception(), TryAgain) - if not (is_explicit_retry or self.retry(retry_state=retry_state)): + is_explicit_retry = fut.failed and isinstance(fut.exception(), TryAgain) + if not (is_explicit_retry or self.retry(retry_state)): return fut.result() if self.after is not None: self.after(retry_state) self.statistics["delay_since_first_attempt"] = retry_state.seconds_since_start - if self.stop(retry_state=retry_state): + if self.stop(retry_state): if self.retry_error_callback: return self.retry_error_callback(retry_state) retry_exc = self.retry_error_cls(fut) @@ -365,7 +328,7 @@ def iter(self, retry_state: "RetryCallState") -> t.Union[DoAttempt, DoSleep, t.A raise retry_exc from fut.exception() if self.wait: - sleep = self.wait(retry_state=retry_state) + sleep = self.wait(retry_state) else: sleep = 0.0 retry_state.next_action = RetryAction(sleep) @@ -393,14 +356,24 @@ def __iter__(self) -> t.Generator[AttemptManager, None, None]: break @abstractmethod - def __call__(self, fn: t.Callable[..., _RetValT], *args: t.Any, **kwargs: t.Any) -> _RetValT: + def __call__( + self, + fn: t.Callable[..., WrappedFnReturnT], + *args: t.Any, + **kwargs: t.Any, + ) -> WrappedFnReturnT: pass class Retrying(BaseRetrying): """Retrying controller.""" - def __call__(self, fn: t.Callable[..., _RetValT], *args: t.Any, **kwargs: t.Any) -> _RetValT: + def __call__( + self, + fn: t.Callable[..., WrappedFnReturnT], + *args: t.Any, + **kwargs: t.Any, + ) -> WrappedFnReturnT: self.begin() retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs) @@ -410,17 +383,23 @@ def __call__(self, fn: t.Callable[..., _RetValT], *args: t.Any, **kwargs: t.Any) try: result = fn(*args, **kwargs) except BaseException: # noqa: B902 - retry_state.set_exception(sys.exc_info()) + retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type] else: retry_state.set_result(result) elif isinstance(do, DoSleep): retry_state.prepare_for_next_attempt() self.sleep(do) else: - return do + return do # type: ignore[no-any-return] + + +if sys.version_info[1] >= 9: + FutureGenericT = futures.Future[t.Any] +else: + FutureGenericT = futures.Future -class Future(futures.Future): +class Future(FutureGenericT): """Encapsulates a (future or past) attempted call to a target function.""" def __init__(self, attempt_number: int) -> None: @@ -493,13 +472,15 @@ def set_result(self, val: t.Any) -> None: fut.set_result(val) self.outcome, self.outcome_timestamp = fut, ts - def set_exception(self, exc_info: t.Tuple[t.Type[BaseException], BaseException, "types.TracebackType"]) -> None: + def set_exception( + self, exc_info: t.Tuple[t.Type[BaseException], BaseException, "types.TracebackType| None"] + ) -> None: ts = time.monotonic() fut = Future(self.attempt_number) fut.set_exception(exc_info[1]) self.outcome, self.outcome_timestamp = fut, ts - def __repr__(self): + def __repr__(self) -> str: if self.outcome is None: result = "none yet" elif self.outcome.failed: @@ -513,7 +494,115 @@ def __repr__(self): return f"<{clsname} {id(self)}: attempt #{self.attempt_number}; slept for {slept}; last result: {result}>" +@t.overload +def retry(func: WrappedFn) -> WrappedFn: + ... + + +@t.overload +def retry( + sleep: t.Callable[[t.Union[int, float]], None] = sleep, + stop: "StopBaseT" = stop_never, + wait: "WaitBaseT" = wait_none(), + retry: "RetryBaseT" = retry_if_exception_type(), + before: t.Callable[["RetryCallState"], None] = before_nothing, + after: t.Callable[["RetryCallState"], None] = after_nothing, + before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None, + reraise: bool = False, + retry_error_cls: t.Type["RetryError"] = RetryError, + retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None, +) -> t.Callable[[WrappedFn], WrappedFn]: + ... + + +def retry(*dargs: t.Any, **dkw: t.Any) -> t.Any: + """Wrap a function with a new `Retrying` object. + + :param dargs: positional arguments passed to Retrying object + :param dkw: keyword arguments passed to the Retrying object + """ + # support both @retry and @retry() as valid syntax + if len(dargs) == 1 and callable(dargs[0]): + return retry()(dargs[0]) + else: + + def wrap(f: WrappedFn) -> WrappedFn: + if isinstance(f, retry_base): + warnings.warn( + f"Got retry_base instance ({f.__class__.__name__}) as callable argument, " + f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)" + ) + r: "BaseRetrying" + if iscoroutinefunction(f): + r = AsyncRetrying(*dargs, **dkw) + elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f): + r = TornadoRetrying(*dargs, **dkw) + else: + r = Retrying(*dargs, **dkw) + + return r.wraps(f) + + return wrap + + from pip._vendor.tenacity._asyncio import AsyncRetrying # noqa:E402,I100 if tornado: from pip._vendor.tenacity.tornadoweb import TornadoRetrying + + +__all__ = [ + "retry_base", + "retry_all", + "retry_always", + "retry_any", + "retry_if_exception", + "retry_if_exception_type", + "retry_if_exception_cause_type", + "retry_if_not_exception_type", + "retry_if_not_result", + "retry_if_result", + "retry_never", + "retry_unless_exception_type", + "retry_if_exception_message", + "retry_if_not_exception_message", + "sleep", + "sleep_using_event", + "stop_after_attempt", + "stop_after_delay", + "stop_all", + "stop_any", + "stop_never", + "stop_when_event_set", + "wait_chain", + "wait_combine", + "wait_exponential", + "wait_fixed", + "wait_incrementing", + "wait_none", + "wait_random", + "wait_random_exponential", + "wait_full_jitter", + "wait_exponential_jitter", + "before_log", + "before_nothing", + "after_log", + "after_nothing", + "before_sleep_log", + "before_sleep_nothing", + "retry", + "WrappedFn", + "TryAgain", + "NO_RESULT", + "DoAttempt", + "DoSleep", + "BaseAction", + "RetryAction", + "RetryError", + "AttemptManager", + "BaseRetrying", + "Retrying", + "Future", + "RetryCallState", + "AsyncRetrying", +] diff --git a/src/pip/_vendor/tenacity/_asyncio.py b/src/pip/_vendor/tenacity/_asyncio.py index 0f32b5f6207..2e50cd7b40e 100644 --- a/src/pip/_vendor/tenacity/_asyncio.py +++ b/src/pip/_vendor/tenacity/_asyncio.py @@ -17,7 +17,7 @@ import functools import sys -import typing +import typing as t from asyncio import sleep from pip._vendor.tenacity import AttemptManager @@ -26,21 +26,20 @@ from pip._vendor.tenacity import DoSleep from pip._vendor.tenacity import RetryCallState -WrappedFn = typing.TypeVar("WrappedFn", bound=typing.Callable) -_RetValT = typing.TypeVar("_RetValT") +WrappedFnReturnT = t.TypeVar("WrappedFnReturnT") +WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Awaitable[t.Any]]) class AsyncRetrying(BaseRetrying): - def __init__(self, sleep: typing.Callable[[float], typing.Awaitable] = sleep, **kwargs: typing.Any) -> None: + sleep: t.Callable[[float], t.Awaitable[t.Any]] + + def __init__(self, sleep: t.Callable[[float], t.Awaitable[t.Any]] = sleep, **kwargs: t.Any) -> None: super().__init__(**kwargs) self.sleep = sleep - async def __call__( # type: ignore # Change signature from supertype - self, - fn: typing.Callable[..., typing.Awaitable[_RetValT]], - *args: typing.Any, - **kwargs: typing.Any, - ) -> _RetValT: + async def __call__( # type: ignore[override] + self, fn: WrappedFn, *args: t.Any, **kwargs: t.Any + ) -> WrappedFnReturnT: self.begin() retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs) @@ -50,21 +49,24 @@ async def __call__( # type: ignore # Change signature from supertype try: result = await fn(*args, **kwargs) except BaseException: # noqa: B902 - retry_state.set_exception(sys.exc_info()) + retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type] else: retry_state.set_result(result) elif isinstance(do, DoSleep): retry_state.prepare_for_next_attempt() await self.sleep(do) else: - return do + return do # type: ignore[no-any-return] + + def __iter__(self) -> t.Generator[AttemptManager, None, None]: + raise TypeError("AsyncRetrying object is not iterable") def __aiter__(self) -> "AsyncRetrying": self.begin() self._retry_state = RetryCallState(self, fn=None, args=(), kwargs={}) return self - async def __anext__(self) -> typing.Union[AttemptManager, typing.Any]: + async def __anext__(self) -> AttemptManager: while True: do = self.iter(retry_state=self._retry_state) if do is None: @@ -75,18 +77,18 @@ async def __anext__(self) -> typing.Union[AttemptManager, typing.Any]: self._retry_state.prepare_for_next_attempt() await self.sleep(do) else: - return do + raise StopAsyncIteration def wraps(self, fn: WrappedFn) -> WrappedFn: fn = super().wraps(fn) # Ensure wrapper is recognized as a coroutine function. @functools.wraps(fn) - async def async_wrapped(*args: typing.Any, **kwargs: typing.Any) -> typing.Any: + async def async_wrapped(*args: t.Any, **kwargs: t.Any) -> t.Any: return await fn(*args, **kwargs) # Preserve attributes - async_wrapped.retry = fn.retry - async_wrapped.retry_with = fn.retry_with + async_wrapped.retry = fn.retry # type: ignore[attr-defined] + async_wrapped.retry_with = fn.retry_with # type: ignore[attr-defined] - return async_wrapped + return async_wrapped # type: ignore[return-value] diff --git a/src/pip/_vendor/tenacity/_utils.py b/src/pip/_vendor/tenacity/_utils.py index d5c4c9de591..f14ff32096e 100644 --- a/src/pip/_vendor/tenacity/_utils.py +++ b/src/pip/_vendor/tenacity/_utils.py @@ -16,6 +16,7 @@ import sys import typing +from datetime import timedelta # sys.maxsize: @@ -66,3 +67,10 @@ def get_callback_name(cb: typing.Callable[..., typing.Any]) -> str: except AttributeError: pass return ".".join(segments) + + +time_unit_type = typing.Union[int, float, timedelta] + + +def to_seconds(time_unit: time_unit_type) -> float: + return float(time_unit.total_seconds() if isinstance(time_unit, timedelta) else time_unit) diff --git a/src/pip/_vendor/tenacity/after.py b/src/pip/_vendor/tenacity/after.py index c056700f9fa..574c9bcea6e 100644 --- a/src/pip/_vendor/tenacity/after.py +++ b/src/pip/_vendor/tenacity/after.py @@ -36,9 +36,14 @@ def after_log( """After call strategy that logs to some logger the finished attempt.""" def log_it(retry_state: "RetryCallState") -> None: + if retry_state.fn is None: + # NOTE(sileht): can't really happen, but we must please mypy + fn_name = "" + else: + fn_name = _utils.get_callback_name(retry_state.fn) logger.log( log_level, - f"Finished call to '{_utils.get_callback_name(retry_state.fn)}' " + f"Finished call to '{fn_name}' " f"after {sec_format % retry_state.seconds_since_start}(s), " f"this was the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.", ) diff --git a/src/pip/_vendor/tenacity/before.py b/src/pip/_vendor/tenacity/before.py index a72c2c5f70e..cfd7dc72ee7 100644 --- a/src/pip/_vendor/tenacity/before.py +++ b/src/pip/_vendor/tenacity/before.py @@ -32,9 +32,14 @@ def before_log(logger: "logging.Logger", log_level: int) -> typing.Callable[["Re """Before call strategy that logs to some logger the attempt.""" def log_it(retry_state: "RetryCallState") -> None: + if retry_state.fn is None: + # NOTE(sileht): can't really happen, but we must please mypy + fn_name = "" + else: + fn_name = _utils.get_callback_name(retry_state.fn) logger.log( log_level, - f"Starting call to '{_utils.get_callback_name(retry_state.fn)}', " + f"Starting call to '{fn_name}', " f"this is the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.", ) diff --git a/src/pip/_vendor/tenacity/before_sleep.py b/src/pip/_vendor/tenacity/before_sleep.py index b35564fbad8..8c6167fb3a6 100644 --- a/src/pip/_vendor/tenacity/before_sleep.py +++ b/src/pip/_vendor/tenacity/before_sleep.py @@ -36,6 +36,14 @@ def before_sleep_log( """Before call strategy that logs to some logger the attempt.""" def log_it(retry_state: "RetryCallState") -> None: + local_exc_info: BaseException | bool | None + + if retry_state.outcome is None: + raise RuntimeError("log_it() called before outcome was set") + + if retry_state.next_action is None: + raise RuntimeError("log_it() called before next_action was set") + if retry_state.outcome.failed: ex = retry_state.outcome.exception() verb, value = "raised", f"{ex.__class__.__name__}: {ex}" @@ -48,10 +56,15 @@ def log_it(retry_state: "RetryCallState") -> None: verb, value = "returned", retry_state.outcome.result() local_exc_info = False # exc_info does not apply when no exception + if retry_state.fn is None: + # NOTE(sileht): can't really happen, but we must please mypy + fn_name = "" + else: + fn_name = _utils.get_callback_name(retry_state.fn) + logger.log( log_level, - f"Retrying {_utils.get_callback_name(retry_state.fn)} " - f"in {retry_state.next_action.sleep} seconds as it {verb} {value}.", + f"Retrying {fn_name} " f"in {retry_state.next_action.sleep} seconds as it {verb} {value}.", exc_info=local_exc_info, ) diff --git a/src/pip/_vendor/tenacity/retry.py b/src/pip/_vendor/tenacity/retry.py index 9ebeb62d5c9..38988739d64 100644 --- a/src/pip/_vendor/tenacity/retry.py +++ b/src/pip/_vendor/tenacity/retry.py @@ -36,6 +36,9 @@ def __or__(self, other: "retry_base") -> "retry_any": return retry_any(self, other) +RetryBaseT = typing.Union[retry_base, typing.Callable[["RetryCallState"], bool]] + + class _retry_never(retry_base): """Retry strategy that never rejects any result.""" @@ -63,8 +66,14 @@ def __init__(self, predicate: typing.Callable[[BaseException], bool]) -> None: self.predicate = predicate def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.outcome is None: + raise RuntimeError("__call__() called before outcome was set") + if retry_state.outcome.failed: - return self.predicate(retry_state.outcome.exception()) + exception = retry_state.outcome.exception() + if exception is None: + raise RuntimeError("outcome failed but the exception is None") + return self.predicate(exception) else: return False @@ -111,10 +120,17 @@ def __init__( super().__init__(lambda e: not isinstance(e, exception_types)) def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.outcome is None: + raise RuntimeError("__call__() called before outcome was set") + # always retry if no exception was raised if not retry_state.outcome.failed: return True - return self.predicate(retry_state.outcome.exception()) + + exception = retry_state.outcome.exception() + if exception is None: + raise RuntimeError("outcome failed but the exception is None") + return self.predicate(exception) class retry_if_exception_cause_type(retry_base): @@ -134,6 +150,9 @@ def __init__( self.exception_cause_types = exception_types def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.outcome is None: + raise RuntimeError("__call__ called before outcome was set") + if retry_state.outcome.failed: exc = retry_state.outcome.exception() while exc is not None: @@ -151,6 +170,9 @@ def __init__(self, predicate: typing.Callable[[typing.Any], bool]) -> None: self.predicate = predicate def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.outcome is None: + raise RuntimeError("__call__() called before outcome was set") + if not retry_state.outcome.failed: return self.predicate(retry_state.outcome.result()) else: @@ -164,6 +186,9 @@ def __init__(self, predicate: typing.Callable[[typing.Any], bool]) -> None: self.predicate = predicate def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.outcome is None: + raise RuntimeError("__call__() called before outcome was set") + if not retry_state.outcome.failed: return not self.predicate(retry_state.outcome.result()) else: @@ -215,9 +240,16 @@ def __init__( self.predicate = lambda *args_, **kwargs_: not if_predicate(*args_, **kwargs_) def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.outcome is None: + raise RuntimeError("__call__() called before outcome was set") + if not retry_state.outcome.failed: return True - return self.predicate(retry_state.outcome.exception()) + + exception = retry_state.outcome.exception() + if exception is None: + raise RuntimeError("outcome failed but the exception is None") + return self.predicate(exception) class retry_any(retry_base): diff --git a/src/pip/_vendor/tenacity/stop.py b/src/pip/_vendor/tenacity/stop.py index faaae9a8ddb..bb23effdf86 100644 --- a/src/pip/_vendor/tenacity/stop.py +++ b/src/pip/_vendor/tenacity/stop.py @@ -16,6 +16,8 @@ import abc import typing +from pip._vendor.tenacity import _utils + if typing.TYPE_CHECKING: import threading @@ -36,6 +38,9 @@ def __or__(self, other: "stop_base") -> "stop_any": return stop_any(self, other) +StopBaseT = typing.Union[stop_base, typing.Callable[["RetryCallState"], bool]] + + class stop_any(stop_base): """Stop if any of the stop condition is valid.""" @@ -89,8 +94,10 @@ def __call__(self, retry_state: "RetryCallState") -> bool: class stop_after_delay(stop_base): """Stop when the time from the first attempt >= limit.""" - def __init__(self, max_delay: float) -> None: - self.max_delay = max_delay + def __init__(self, max_delay: _utils.time_unit_type) -> None: + self.max_delay = _utils.to_seconds(max_delay) def __call__(self, retry_state: "RetryCallState") -> bool: + if retry_state.seconds_since_start is None: + raise RuntimeError("__call__() called but seconds_since_start is not set") return retry_state.seconds_since_start >= self.max_delay diff --git a/src/pip/_vendor/tenacity/tornadoweb.py b/src/pip/_vendor/tenacity/tornadoweb.py index 8f7731af0e6..e19c30b1890 100644 --- a/src/pip/_vendor/tenacity/tornadoweb.py +++ b/src/pip/_vendor/tenacity/tornadoweb.py @@ -33,8 +33,8 @@ def __init__(self, sleep: "typing.Callable[[float], Future[None]]" = gen.sleep, super().__init__(**kwargs) self.sleep = sleep - @gen.coroutine - def __call__( # type: ignore # Change signature from supertype + @gen.coroutine # type: ignore[misc] + def __call__( self, fn: "typing.Callable[..., typing.Union[typing.Generator[typing.Any, typing.Any, _RetValT], Future[_RetValT]]]", *args: typing.Any, @@ -49,7 +49,7 @@ def __call__( # type: ignore # Change signature from supertype try: result = yield fn(*args, **kwargs) except BaseException: # noqa: B902 - retry_state.set_exception(sys.exc_info()) + retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type] else: retry_state.set_result(result) elif isinstance(do, DoSleep): diff --git a/src/pip/_vendor/tenacity/wait.py b/src/pip/_vendor/tenacity/wait.py index 8fdfc8f9d4e..f9349c02836 100644 --- a/src/pip/_vendor/tenacity/wait.py +++ b/src/pip/_vendor/tenacity/wait.py @@ -17,19 +17,12 @@ import abc import random import typing -from datetime import timedelta from pip._vendor.tenacity import _utils if typing.TYPE_CHECKING: from pip._vendor.tenacity import RetryCallState -wait_unit_type = typing.Union[int, float, timedelta] - - -def to_seconds(wait_unit: wait_unit_type) -> float: - return float(wait_unit.total_seconds() if isinstance(wait_unit, timedelta) else wait_unit) - class wait_base(abc.ABC): """Abstract base class for wait strategies.""" @@ -43,16 +36,19 @@ def __add__(self, other: "wait_base") -> "wait_combine": def __radd__(self, other: "wait_base") -> typing.Union["wait_combine", "wait_base"]: # make it possible to use multiple waits with the built-in sum function - if other == 0: + if other == 0: # type: ignore[comparison-overlap] return self return self.__add__(other) +WaitBaseT = typing.Union[wait_base, typing.Callable[["RetryCallState"], typing.Union[float, int]]] + + class wait_fixed(wait_base): """Wait strategy that waits a fixed amount of time between each retry.""" - def __init__(self, wait: wait_unit_type) -> None: - self.wait_fixed = to_seconds(wait) + def __init__(self, wait: _utils.time_unit_type) -> None: + self.wait_fixed = _utils.to_seconds(wait) def __call__(self, retry_state: "RetryCallState") -> float: return self.wait_fixed @@ -68,9 +64,9 @@ def __init__(self) -> None: class wait_random(wait_base): """Wait strategy that waits a random amount of time between min/max.""" - def __init__(self, min: wait_unit_type = 0, max: wait_unit_type = 1) -> None: # noqa - self.wait_random_min = to_seconds(min) - self.wait_random_max = to_seconds(max) + def __init__(self, min: _utils.time_unit_type = 0, max: _utils.time_unit_type = 1) -> None: # noqa + self.wait_random_min = _utils.to_seconds(min) + self.wait_random_max = _utils.to_seconds(max) def __call__(self, retry_state: "RetryCallState") -> float: return self.wait_random_min + (random.random() * (self.wait_random_max - self.wait_random_min)) @@ -120,13 +116,13 @@ class wait_incrementing(wait_base): def __init__( self, - start: wait_unit_type = 0, - increment: wait_unit_type = 100, - max: wait_unit_type = _utils.MAX_WAIT, # noqa + start: _utils.time_unit_type = 0, + increment: _utils.time_unit_type = 100, + max: _utils.time_unit_type = _utils.MAX_WAIT, # noqa ) -> None: - self.start = to_seconds(start) - self.increment = to_seconds(increment) - self.max = to_seconds(max) + self.start = _utils.to_seconds(start) + self.increment = _utils.to_seconds(increment) + self.max = _utils.to_seconds(max) def __call__(self, retry_state: "RetryCallState") -> float: result = self.start + (self.increment * (retry_state.attempt_number - 1)) @@ -149,13 +145,13 @@ class wait_exponential(wait_base): def __init__( self, multiplier: typing.Union[int, float] = 1, - max: wait_unit_type = _utils.MAX_WAIT, # noqa + max: _utils.time_unit_type = _utils.MAX_WAIT, # noqa exp_base: typing.Union[int, float] = 2, - min: wait_unit_type = 0, # noqa + min: _utils.time_unit_type = 0, # noqa ) -> None: self.multiplier = multiplier - self.min = to_seconds(min) - self.max = to_seconds(max) + self.min = _utils.to_seconds(min) + self.max = _utils.to_seconds(max) self.exp_base = exp_base def __call__(self, retry_state: "RetryCallState") -> float: @@ -206,7 +202,7 @@ class wait_exponential_jitter(wait_base): This implements the strategy described here: https://cloud.google.com/storage/docs/retry-strategy - The wait time is min(initial * (2**n + random.uniform(0, jitter)), maximum) + The wait time is min(initial * 2**n + random.uniform(0, jitter), maximum) where n is the retry count. """ diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 70a24f7bd9f..cfdf3dc4437 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -18,6 +18,6 @@ rich==13.3.3 resolvelib==1.0.1 setuptools==67.6.1 six==1.16.0 -tenacity==8.1.0 +tenacity==8.2.2 tomli==2.0.1 webencodings==0.5.1 diff --git a/tools/vendoring/patches/tenacity.patch b/tools/vendoring/patches/tenacity.patch index 85b29c60ca1..c87b1c5b2c3 100644 --- a/tools/vendoring/patches/tenacity.patch +++ b/tools/vendoring/patches/tenacity.patch @@ -2,14 +2,14 @@ diff --git a/src/pip/_vendor/tenacity/__init__.py b/src/pip/_vendor/tenacity/__i index 88c28d2d6..086ad46e1 100644 --- a/src/pip/_vendor/tenacity/__init__.py +++ b/src/pip/_vendor/tenacity/__init__.py -@@ -76,10 +76,12 @@ from .after import after_nothing # noqa +@@ -82,10 +82,12 @@ from .after import after_nothing # noqa from .before_sleep import before_sleep_log # noqa from .before_sleep import before_sleep_nothing # noqa -try: -- import tornado # type: ignore +- import tornado -except ImportError: -- tornado = None # type: ignore +- tornado = None +# Replace a conditional import with a hard-coded None so that pip does +# not attempt to use tornado even if it is present in the environment. +# If tornado is non-None, tenacity will attempt to execute some code @@ -22,7 +22,7 @@ index 88c28d2d6..086ad46e1 100644 --- a/src/pip/_vendor/tenacity/__init__.py +++ b/src/pip/_vendor/tenacity/__init__.py -@@ -190,7 +190,7 @@ class RetryError(Exception): +@@ -153,7 +153,7 @@ class RetryError(Exception): self.last_attempt = last_attempt super().__init__(last_attempt) From c0ba81850b5ee6ac99bc999370a76928a026b977 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 14:53:28 +0100 Subject: [PATCH 389/730] Upgrade pygments to 2.14.0 --- news/pygments.vendor.rst | 1 + src/pip/_vendor/pygments/__init__.py | 2 +- .../_vendor/pygments/formatters/__init__.py | 1 - src/pip/_vendor/pygments/formatters/html.py | 6 +- src/pip/_vendor/pygments/formatters/irc.py | 35 ++-------- src/pip/_vendor/pygments/lexer.py | 7 +- src/pip/_vendor/pygments/lexers/__init__.py | 1 - src/pip/_vendor/pygments/lexers/_mapping.py | 24 +++++-- src/pip/_vendor/pygments/lexers/python.py | 64 +++++++++---------- src/pip/_vendor/pygments/sphinxext.py | 62 ++++++++++++++++++ src/pip/_vendor/vendor.txt | 2 +- tools/vendoring/patches/pygments.patch | 22 +++++++ 12 files changed, 150 insertions(+), 77 deletions(-) create mode 100644 news/pygments.vendor.rst diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst new file mode 100644 index 00000000000..a6c8edafc69 --- /dev/null +++ b/news/pygments.vendor.rst @@ -0,0 +1 @@ +Upgrade pygments to 2.14.0 diff --git a/src/pip/_vendor/pygments/__init__.py b/src/pip/_vendor/pygments/__init__.py index 7185e537694..d9b0a8dea2e 100644 --- a/src/pip/_vendor/pygments/__init__.py +++ b/src/pip/_vendor/pygments/__init__.py @@ -26,7 +26,7 @@ """ from io import StringIO, BytesIO -__version__ = '2.13.0' +__version__ = '2.14.0' __docformat__ = 'restructuredtext' __all__ = ['lex', 'format', 'highlight'] diff --git a/src/pip/_vendor/pygments/formatters/__init__.py b/src/pip/_vendor/pygments/formatters/__init__.py index 43c4c89aacf..7ecf7eee35f 100644 --- a/src/pip/_vendor/pygments/formatters/__init__.py +++ b/src/pip/_vendor/pygments/formatters/__init__.py @@ -8,7 +8,6 @@ :license: BSD, see LICENSE for details. """ -import re import sys import types from fnmatch import fnmatch diff --git a/src/pip/_vendor/pygments/formatters/html.py b/src/pip/_vendor/pygments/formatters/html.py index d5cda4c4bc3..f22b200c0e6 100644 --- a/src/pip/_vendor/pygments/formatters/html.py +++ b/src/pip/_vendor/pygments/formatters/html.py @@ -878,10 +878,12 @@ def _format_lines(self, tokensource): # for all but the last line for part in parts[:-1]: if line: - if lspan != cspan: + # Also check for part being non-empty, so we avoid creating + # empty tags + if lspan != cspan and part: line.extend(((lspan and ''), cspan, part, (cspan and ''), lsep)) - else: # both are the same + else: # both are the same, or the current part was empty line.extend((part, (lspan and ''), lsep)) yield 1, ''.join(line) line = [] diff --git a/src/pip/_vendor/pygments/formatters/irc.py b/src/pip/_vendor/pygments/formatters/irc.py index 3f6d52deb4c..53e19b83d1e 100644 --- a/src/pip/_vendor/pygments/formatters/irc.py +++ b/src/pip/_vendor/pygments/formatters/irc.py @@ -128,38 +128,12 @@ def __init__(self, **options): self._lineno = 0 def _write_lineno(self, outfile): - self._lineno += 1 - outfile.write("\n%04d: " % self._lineno) - - def _format_unencoded_with_lineno(self, tokensource, outfile): - self._write_lineno(outfile) - - for ttype, value in tokensource: - if value.endswith("\n"): - self._write_lineno(outfile) - value = value[:-1] - color = self.colorscheme.get(ttype) - while color is None: - ttype = ttype.parent - color = self.colorscheme.get(ttype) - if color: - color = color[self.darkbg] - spl = value.split('\n') - for line in spl[:-1]: - self._write_lineno(outfile) - if line: - outfile.write(ircformat(color, line[:-1])) - if spl[-1]: - outfile.write(ircformat(color, spl[-1])) - else: - outfile.write(value) - - outfile.write("\n") + if self.linenos: + self._lineno += 1 + outfile.write("%04d: " % self._lineno) def format_unencoded(self, tokensource, outfile): - if self.linenos: - self._format_unencoded_with_lineno(tokensource, outfile) - return + self._write_lineno(outfile) for ttype, value in tokensource: color = self.colorscheme.get(ttype) @@ -173,6 +147,7 @@ def format_unencoded(self, tokensource, outfile): if line: outfile.write(ircformat(color, line)) outfile.write('\n') + self._write_lineno(outfile) if spl[-1]: outfile.write(ircformat(color, spl[-1])) else: diff --git a/src/pip/_vendor/pygments/lexer.py b/src/pip/_vendor/pygments/lexer.py index ec7f4de32cf..74ab9b9088f 100644 --- a/src/pip/_vendor/pygments/lexer.py +++ b/src/pip/_vendor/pygments/lexer.py @@ -14,15 +14,16 @@ from pip._vendor.pygments.filter import apply_filters, Filter from pip._vendor.pygments.filters import get_filter_by_name -from pip._vendor.pygments.token import Error, Text, Other, _TokenType +from pip._vendor.pygments.token import Error, Text, Other, Whitespace, _TokenType from pip._vendor.pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ make_analysator, Future, guess_decode from pip._vendor.pygments.regexopt import regex_opt __all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer', 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this', - 'default', 'words'] + 'default', 'words', 'line_re'] +line_re = re.compile('.*?\n') _encoding_map = [(b'\xef\xbb\xbf', 'utf-8'), (b'\xff\xfe\0\0', 'utf-32'), @@ -670,7 +671,7 @@ def get_tokens_unprocessed(self, text, stack=('root',)): # at EOL, reset state to "root" statestack = ['root'] statetokens = tokendefs['root'] - yield pos, Text, '\n' + yield pos, Whitespace, '\n' pos += 1 continue yield pos, Error, text[pos] diff --git a/src/pip/_vendor/pygments/lexers/__init__.py b/src/pip/_vendor/pygments/lexers/__init__.py index ed69f24ed35..e75a05791e2 100644 --- a/src/pip/_vendor/pygments/lexers/__init__.py +++ b/src/pip/_vendor/pygments/lexers/__init__.py @@ -8,7 +8,6 @@ :license: BSD, see LICENSE for details. """ -import re import sys import types from fnmatch import fnmatch diff --git a/src/pip/_vendor/pygments/lexers/_mapping.py b/src/pip/_vendor/pygments/lexers/_mapping.py index 40dcaa3c778..1eaaf56e9c2 100644 --- a/src/pip/_vendor/pygments/lexers/_mapping.py +++ b/src/pip/_vendor/pygments/lexers/_mapping.py @@ -30,6 +30,7 @@ 'AppleScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), 'ArduinoLexer': ('pip._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), 'ArrowLexer': ('pip._vendor.pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), + 'ArturoLexer': ('pip._vendor.pygments.lexers.arturo', 'Arturo', ('arturo', 'art'), ('*.art',), ()), 'AscLexer': ('pip._vendor.pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')), 'AspectJLexer': ('pip._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), 'AsymptoteLexer': ('pip._vendor.pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)), @@ -152,13 +153,14 @@ 'EvoqueXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), 'ExeclineLexer': ('pip._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()), 'EzhilLexer': ('pip._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), - 'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)), + 'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi', '*.fsx'), ('text/x-fsharp',)), 'FStarLexer': ('pip._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)), 'FactorLexer': ('pip._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), 'FancyLexer': ('pip._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), 'FantomLexer': ('pip._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), 'FelixLexer': ('pip._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), 'FennelLexer': ('pip._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()), + 'FiftLexer': ('pip._vendor.pygments.lexers.fift', 'Fift', ('fift', 'fif'), ('*.fif',), ()), 'FishShellLexer': ('pip._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), 'FlatlineLexer': ('pip._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), 'FloScriptLexer': ('pip._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), @@ -167,7 +169,9 @@ 'FortranLexer': ('pip._vendor.pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)), 'FoxProLexer': ('pip._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), 'FreeFemLexer': ('pip._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), + 'FuncLexer': ('pip._vendor.pygments.lexers.func', 'FunC', ('func', 'fc'), ('*.fc', '*.func'), ()), 'FutharkLexer': ('pip._vendor.pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)), + 'GAPConsoleLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP session', ('gap-console', 'gap-repl'), ('*.tst',), ()), 'GAPLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), 'GDScriptLexer': ('pip._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')), 'GLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), @@ -196,7 +200,7 @@ 'HaxeLexer': ('pip._vendor.pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), 'HexdumpLexer': ('pip._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), 'HsailLexer': ('pip._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), - 'HspecLexer': ('pip._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()), + 'HspecLexer': ('pip._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), ('*Spec.hs',), ()), 'HtmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), ('*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2'), ('text/html+django', 'text/html+jinja')), 'HtmlGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), 'HtmlLexer': ('pip._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), @@ -236,6 +240,7 @@ 'JsonBareObjectLexer': ('pip._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), 'JsonLdLexer': ('pip._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), 'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')), + 'JsonnetLexer': ('pip._vendor.pygments.lexers.jsonnet', 'Jsonnet', ('jsonnet',), ('*.jsonnet', '*.libsonnet'), ()), 'JspLexer': ('pip._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), 'JuliaConsoleLexer': ('pip._vendor.pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()), 'JuliaLexer': ('pip._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), @@ -270,8 +275,10 @@ 'LogosLexer': ('pip._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), 'LogtalkLexer': ('pip._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('pip._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), - 'MCFunctionLexer': ('pip._vendor.pygments.lexers.mcfunction', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)), + 'MCFunctionLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)), + 'MCSchemaLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCSchema', ('mcschema',), ('*.mcschema',), ('text/mcschema',)), 'MIMELexer': ('pip._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')), + 'MIPSLexer': ('pip._vendor.pygments.lexers.mips', 'MIPS', ('mips',), ('*.mips', '*.MIPS'), ()), 'MOOCodeLexer': ('pip._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), 'MSDOSSessionLexer': ('pip._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), 'Macaulay2Lexer': ('pip._vendor.pygments.lexers.macaulay2', 'Macaulay2', ('macaulay2',), ('*.m2',), ()), @@ -316,7 +323,7 @@ 'MyghtyXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), 'NCLLexer': ('pip._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)), 'NSISLexer': ('pip._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), - 'NasmLexer': ('pip._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)), + 'NasmLexer': ('pip._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM', '*.nasm'), ('text/x-nasm',)), 'NasmObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), 'NemerleLexer': ('pip._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), 'NesCLexer': ('pip._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), @@ -350,6 +357,7 @@ 'PegLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)), 'Perl6Lexer': ('pip._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')), 'PerlLexer': ('pip._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')), + 'PhixLexer': ('pip._vendor.pygments.lexers.phix', 'Phix', ('phix',), ('*.exw',), ('text/x-phix',)), 'PhpLexer': ('pip._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), 'PigLexer': ('pip._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), 'PikeLexer': ('pip._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), @@ -357,6 +365,7 @@ 'PlPgsqlLexer': ('pip._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), 'PointlessLexer': ('pip._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()), 'PonyLexer': ('pip._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()), + 'PortugolLexer': ('pip._vendor.pygments.lexers.pascal', 'Portugol', ('portugol',), ('*.alg', '*.portugol'), ()), 'PostScriptLexer': ('pip._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), 'PostgresLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), @@ -376,7 +385,7 @@ 'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), 'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), 'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), - 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), + 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), 'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), 'PythonUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()), 'QBasicLexer': ('pip._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), @@ -421,7 +430,7 @@ 'SASLexer': ('pip._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), 'SLexer': ('pip._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), 'SMLLexer': ('pip._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), - 'SNBTLexer': ('pip._vendor.pygments.lexers.mcfunction', 'SNBT', ('snbt',), ('*.snbt',), ('text/snbt',)), + 'SNBTLexer': ('pip._vendor.pygments.lexers.minecraft', 'SNBT', ('snbt',), ('*.snbt',), ('text/snbt',)), 'SarlLexer': ('pip._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), 'SassLexer': ('pip._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), 'SaviLexer': ('pip._vendor.pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()), @@ -485,6 +494,7 @@ 'ThingsDBLexer': ('pip._vendor.pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()), 'ThriftLexer': ('pip._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), 'TiddlyWiki5Lexer': ('pip._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), + 'TlbLexer': ('pip._vendor.pygments.lexers.tlb', 'Tl-b', ('tlb',), ('*.tlb',), ()), 'TodotxtLexer': ('pip._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TransactSqlLexer': ('pip._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), 'TreetopLexer': ('pip._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), @@ -519,6 +529,8 @@ 'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()), 'WebIDLLexer': ('pip._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), 'WhileyLexer': ('pip._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), + 'WoWTocLexer': ('pip._vendor.pygments.lexers.wowtoc', 'World of Warcraft TOC', ('wowtoc',), ('*.toc',), ()), + 'WrenLexer': ('pip._vendor.pygments.lexers.wren', 'Wren', ('wren',), ('*.wren',), ()), 'X10Lexer': ('pip._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), 'XMLUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'XML+UL4', ('xml+ul4',), ('*.xmlul4',), ()), 'XQueryLexer': ('pip._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), diff --git a/src/pip/_vendor/pygments/lexers/python.py b/src/pip/_vendor/pygments/lexers/python.py index c24e3c86ef2..3341a382685 100644 --- a/src/pip/_vendor/pygments/lexers/python.py +++ b/src/pip/_vendor/pygments/lexers/python.py @@ -12,18 +12,16 @@ import keyword from pip._vendor.pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ - default, words, combined, do_insertions, this + default, words, combined, do_insertions, this, line_re from pip._vendor.pygments.util import get_bool_opt, shebang_matches from pip._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Generic, Other, Error + Number, Punctuation, Generic, Other, Error, Whitespace from pip._vendor.pygments import unistring as uni __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer', 'Python2Lexer', 'Python2TracebackLexer', 'CythonLexer', 'DgLexer', 'NumPyLexer'] -line_re = re.compile('.*?\n') - class PythonLexer(RegexLexer): """ @@ -42,6 +40,8 @@ class PythonLexer(RegexLexer): filenames = [ '*.py', '*.pyw', + # Type stubs + '*.pyi', # Jython '*.jy', # Sage @@ -100,11 +100,11 @@ def fstring_rules(ttype): tokens = { 'root': [ - (r'\n', Text), + (r'\n', Whitespace), (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")', - bygroups(Text, String.Affix, String.Doc)), + bygroups(Whitespace, String.Affix, String.Doc)), (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')", - bygroups(Text, String.Affix, String.Doc)), + bygroups(Whitespace, String.Affix, String.Doc)), (r'\A#!.+$', Comment.Hashbang), (r'#.*$', Comment.Single), (r'\\\n', Text), @@ -169,7 +169,7 @@ def fstring_rules(ttype): combined('bytesescape', 'dqs')), ("([bB])(')", bygroups(String.Affix, String.Single), combined('bytesescape', 'sqs')), - + (r'[^\S\n]+', Text), include('numbers'), (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator), @@ -192,13 +192,13 @@ def fstring_rules(ttype): (r'(=\s*)?' # debug (https://bugs.python.org/issue36817) r'(\![sraf])?' # conversion r':', String.Interpol, '#pop'), - (r'\s+', Text), # allow new lines + (r'\s+', Whitespace), # allow new lines include('expr'), ], 'expr-inside-fstring-inner': [ (r'[{([]', Punctuation, 'expr-inside-fstring-inner'), (r'[])}]', Punctuation, '#pop'), - (r'\s+', Text), # allow new lines + (r'\s+', Whitespace), # allow new lines include('expr'), ], 'expr-keywords': [ @@ -229,7 +229,7 @@ def fstring_rules(ttype): ], 'soft-keywords-inner': [ # optional `_` keyword - (r'(\s+)([^\n_]*)(_\b)', bygroups(Text, using(this), Keyword)), + (r'(\s+)([^\n_]*)(_\b)', bygroups(Whitespace, using(this), Keyword)), default('#pop') ], 'builtins': [ @@ -445,11 +445,11 @@ def innerstring_rules(ttype): tokens = { 'root': [ - (r'\n', Text), + (r'\n', Whitespace), (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")', - bygroups(Text, String.Affix, String.Doc)), + bygroups(Whitespace, String.Affix, String.Doc)), (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')", - bygroups(Text, String.Affix, String.Doc)), + bygroups(Whitespace, String.Affix, String.Doc)), (r'[^\S\n]+', Text), (r'\A#!.+$', Comment.Hashbang), (r'#.*$', Comment.Single), @@ -742,7 +742,7 @@ class PythonTracebackLexer(RegexLexer): tokens = { 'root': [ - (r'\n', Text), + (r'\n', Whitespace), (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), (r'^During handling of the above exception, another ' r'exception occurred:\n\n', Generic.Traceback), @@ -753,24 +753,24 @@ class PythonTracebackLexer(RegexLexer): ], 'intb': [ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)), + bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)), (r'^( File )("[^"]+")(, line )(\d+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text)), + bygroups(Text, Name.Builtin, Text, Number, Whitespace)), (r'^( )(.+)(\n)', - bygroups(Text, using(PythonLexer), Text), 'markers'), + bygroups(Whitespace, using(PythonLexer), Whitespace), 'markers'), (r'^([ \t]*)(\.\.\.)(\n)', - bygroups(Text, Comment, Text)), # for doctests... + bygroups(Whitespace, Comment, Whitespace)), # for doctests... (r'^([^:]+)(: )(.+)(\n)', - bygroups(Generic.Error, Text, Name, Text), '#pop'), + bygroups(Generic.Error, Text, Name, Whitespace), '#pop'), (r'^([a-zA-Z_][\w.]*)(:?\n)', - bygroups(Generic.Error, Text), '#pop') + bygroups(Generic.Error, Whitespace), '#pop') ], 'markers': [ # Either `PEP 657 ` # error locations in Python 3.11+, or single-caret markers # for syntax errors before that. (r'^( {4,})([~^]+)(\n)', - bygroups(Text, Punctuation.Marker, Text), + bygroups(Whitespace, Punctuation.Marker, Whitespace), '#pop'), default('#pop'), ], @@ -808,17 +808,17 @@ class Python2TracebackLexer(RegexLexer): ], 'intb': [ (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)), + bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)), (r'^( File )("[^"]+")(, line )(\d+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text)), + bygroups(Text, Name.Builtin, Text, Number, Whitespace)), (r'^( )(.+)(\n)', - bygroups(Text, using(Python2Lexer), Text), 'marker'), + bygroups(Text, using(Python2Lexer), Whitespace), 'marker'), (r'^([ \t]*)(\.\.\.)(\n)', - bygroups(Text, Comment, Text)), # for doctests... + bygroups(Text, Comment, Whitespace)), # for doctests... (r'^([^:]+)(: )(.+)(\n)', - bygroups(Generic.Error, Text, Name, Text), '#pop'), + bygroups(Generic.Error, Text, Name, Whitespace), '#pop'), (r'^([a-zA-Z_]\w*)(:?\n)', - bygroups(Generic.Error, Text), '#pop') + bygroups(Generic.Error, Whitespace), '#pop') ], 'marker': [ # For syntax errors. @@ -843,13 +843,13 @@ class CythonLexer(RegexLexer): tokens = { 'root': [ - (r'\n', Text), - (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)), - (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)), + (r'\n', Whitespace), + (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Whitespace, String.Doc)), + (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Whitespace, String.Doc)), (r'[^\S\n]+', Text), (r'#.*$', Comment), (r'[]{}:(),;[]', Punctuation), - (r'\\\n', Text), + (r'\\\n', Whitespace), (r'\\', Text), (r'(in|is|and|or|not)\b', Operator.Word), (r'(<)([a-zA-Z0-9.?]+)(>)', diff --git a/src/pip/_vendor/pygments/sphinxext.py b/src/pip/_vendor/pygments/sphinxext.py index c41bd49dd45..3537ecdb26f 100644 --- a/src/pip/_vendor/pygments/sphinxext.py +++ b/src/pip/_vendor/pygments/sphinxext.py @@ -74,6 +74,8 @@ def run(self): out = self.document_formatters() elif self.arguments[0] == 'filters': out = self.document_filters() + elif self.arguments[0] == 'lexers_overview': + out = self.document_lexers_overview() else: raise Exception('invalid argument for "pygmentsdoc" directive') node = nodes.compound() @@ -83,6 +85,66 @@ def run(self): self.state.document.settings.record_dependencies.add(fn) return node.children + def document_lexers_overview(self): + """Generate a tabular overview of all lexers. + + The columns are the lexer name, the extensions handled by this lexer + (or "None"), the aliases and a link to the lexer class.""" + from pip._vendor.pygments.lexers._mapping import LEXERS + from pip._vendor.pygments.lexers import find_lexer_class + out = [] + + table = [] + + def format_link(name, url): + if url: + return f'`{name} <{url}>`_' + return name + + for classname, data in sorted(LEXERS.items(), key=lambda x: x[1][1].lower()): + lexer_cls = find_lexer_class(data[1]) + extensions = lexer_cls.filenames + lexer_cls.alias_filenames + + table.append({ + 'name': format_link(data[1], lexer_cls.url), + 'extensions': ', '.join(extensions).replace('*', '\\*').replace('_', '\\') or 'None', + 'aliases': ', '.join(data[2]), + 'class': f'{data[0]}.{classname}' + }) + + column_names = ['name', 'extensions', 'aliases', 'class'] + column_lengths = [max([len(row[column]) for row in table if row[column]]) + for column in column_names] + + def write_row(*columns): + """Format a table row""" + out = [] + for l, c in zip(column_lengths, columns): + if c: + out.append(c.ljust(l)) + else: + out.append(' '*l) + + return ' '.join(out) + + def write_seperator(): + """Write a table separator row""" + sep = ['='*c for c in column_lengths] + return write_row(*sep) + + out.append(write_seperator()) + out.append(write_row('Name', 'Extension(s)', 'Short name(s)', 'Lexer class')) + out.append(write_seperator()) + for row in table: + out.append(write_row( + row['name'], + row['extensions'], + row['aliases'], + f':class:`~{row["class"]}`')) + out.append(write_seperator()) + + return '\n'.join(out) + def document_lexers(self): from pip._vendor.pygments.lexers._mapping import LEXERS out = [] diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index cfdf3dc4437..3974df3f11b 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -13,7 +13,7 @@ requests==2.28.2 idna==3.4 urllib3==1.26.15 rich==13.3.3 - pygments==2.13.0 + pygments==2.14.0 typing_extensions==4.5.0 resolvelib==1.0.1 setuptools==67.6.1 diff --git a/tools/vendoring/patches/pygments.patch b/tools/vendoring/patches/pygments.patch index 3cabf9d6dcc..035c7dcaea6 100644 --- a/tools/vendoring/patches/pygments.patch +++ b/tools/vendoring/patches/pygments.patch @@ -35,3 +35,25 @@ index c6e2517df..76255b525 100644 + sys.exit(main(sys.argv)) except KeyboardInterrupt: sys.exit(1) +diff --git a/src/pip/_vendor/pygments/sphinxext.py b/src/pip/_vendor/pygments/sphinxext.py +index 3ea2e36e1..23c19504c 100644 +--- a/src/pip/_vendor/pygments/sphinxext.py ++++ b/src/pip/_vendor/pygments/sphinxext.py +@@ -91,7 +91,7 @@ class PygmentsDoc(Directive): + The columns are the lexer name, the extensions handled by this lexer + (or "None"), the aliases and a link to the lexer class.""" + from pygments.lexers._mapping import LEXERS +- import pygments.lexers ++ from pygments.lexers import find_lexer_class + out = [] + + table = [] +@@ -102,7 +102,7 @@ class PygmentsDoc(Directive): + return name + + for classname, data in sorted(LEXERS.items(), key=lambda x: x[1][1].lower()): +- lexer_cls = pygments.lexers.find_lexer_class(data[1]) ++ lexer_cls = find_lexer_class(data[1]) + extensions = lexer_cls.filenames + lexer_cls.alias_filenames + + table.append({ From 5ae08663dbc88d5372880c1c04b7cc7d196adfa6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 2 Apr 2023 18:39:45 +0200 Subject: [PATCH 390/730] Update comment --- src/pip/_internal/pyproject.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index 57fef57077d..eb8e12b2dec 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -91,7 +91,7 @@ def load_pyproject_toml( # If we haven't worked out whether to use PEP 517 yet, # and the user hasn't explicitly stated a preference, # we do so if the project has a pyproject.toml file - # or if we cannot import setuptools. + # or if we cannot import setuptools or wheels. # We fallback to PEP 517 when without setuptools or without the wheel package, # so setuptools can be installed as a default build backend. From 2ff2d0e375e163792e5dec8ef48481074c86e90a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 2 Apr 2023 18:49:34 +0200 Subject: [PATCH 391/730] Deprecate legacy projects ignoring config settings In the future the presence of config settings will auto enable pep517. --- news/11915.removal.rst | 3 +++ src/pip/_internal/req/req_install.py | 9 +++++++++ 2 files changed, 12 insertions(+) create mode 100644 news/11915.removal.rst diff --git a/news/11915.removal.rst b/news/11915.removal.rst new file mode 100644 index 00000000000..e54b5d574c0 --- /dev/null +++ b/news/11915.removal.rst @@ -0,0 +1,3 @@ +Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now print +a deprecation warning. In the future the presence of config settings will automatically +enable the default build backend for legacy projects and pass the setttings to it. diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index f0b7c5bcb14..2c628d36e93 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -477,6 +477,15 @@ def load_pyproject_toml(self) -> None: ) if pyproject_toml_data is None: + if self.config_settings: + deprecated( + reason=f"Config settings are ignored for project {self}.", + replacement=( + "to use --use-pep517 or add a " + "pyproject.toml file to the project" + ), + gone_in="23.3", + ) self.use_pep517 = False return From 84d3591dcfced938a5808863ecd35ebb530a821d Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 19:04:14 +0100 Subject: [PATCH 392/730] Add documentation on how to fix a local patch during revendoring (#11918) --- src/pip/_vendor/README.rst | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/pip/_vendor/README.rst b/src/pip/_vendor/README.rst index 077f1abf773..a21314543bd 100644 --- a/src/pip/_vendor/README.rst +++ b/src/pip/_vendor/README.rst @@ -118,6 +118,30 @@ Vendoring is automated via the `vendoring ` ``pip/_vendor/vendor.txt`` and the different patches in ``tools/vendoring/patches``. Launch it via ``vendoring sync . -v`` (requires ``vendoring>=0.2.2``). +Tool configuration is done via ``pyproject.toml``. + + +Managing Local Patches +====================== + +The ``vendoring`` tool automatically applies our local patches, but updating, +the patches sometimes no longer apply cleanly. In that case, the update will +fail. To resolve this, take the following steps: + +1. Revert any incomplete changes in the revendoring branch, to ensure you have + a clean starting point. +2. Run the revendoring of the library with a problem again: ``nox -s vendoring + -- --upgrade ``. +3. This will fail again, but you will have the original source in your working + directory. Review the existing patch against the source, and modify the patch + to reflect the new version of the source. If you ``git add`` the changes the + vendoring made, you can modify the source to reflect the patch file and then + generate a new patch with ``git diff``. +4. Now, revert everything *except* the patch file changes. Leave the modified + patch file unstaged but saved in the working tree. +5. Re-run the vendoring. This time, it should pick up the changed patch file + and apply it cleanly. The patch file changes will be committed along with the + revendoring, so the new commit should be ready to test and publish as a PR. Debundling From fb454572b2e455b22a59822b53b203b7232570e6 Mon Sep 17 00:00:00 2001 From: Wu Zhenyu Date: Sun, 28 Aug 2022 17:29:25 +0800 Subject: [PATCH 393/730] Fix `pip completion --zsh` --- news/11416.bugfix.rst | 1 + src/pip/_internal/commands/completion.py | 13 ++++--------- tests/functional/test_completion.py | 16 ++++++---------- 3 files changed, 11 insertions(+), 19 deletions(-) create mode 100644 news/11416.bugfix.rst diff --git a/news/11416.bugfix.rst b/news/11416.bugfix.rst new file mode 100644 index 00000000000..3815b2da864 --- /dev/null +++ b/news/11416.bugfix.rst @@ -0,0 +1 @@ +Fix ``pip completion --zsh``. diff --git a/src/pip/_internal/commands/completion.py b/src/pip/_internal/commands/completion.py index deaa30899e6..30233fc7ad2 100644 --- a/src/pip/_internal/commands/completion.py +++ b/src/pip/_internal/commands/completion.py @@ -22,15 +22,10 @@ complete -o default -F _pip_completion {prog} """, "zsh": """ - function _pip_completion {{ - local words cword - read -Ac words - read -cn cword - reply=( $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) - }} - compctl -K _pip_completion {prog} + #compdef -P pip[0-9.]# + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) """, "fish": """ function __fish_complete_pip diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py index b02cd4fa317..332dd120272 100644 --- a/tests/functional/test_completion.py +++ b/tests/functional/test_completion.py @@ -44,15 +44,10 @@ ( "zsh", """\ -function _pip_completion { - local words cword - read -Ac words - read -cn cword - reply=( $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$(( cword-1 )) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) -} -compctl -K _pip_completion pip""", +#compdef -P pip[0-9.]# +compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )""", ), ( "powershell", @@ -392,7 +387,8 @@ def test_completion_path_after_option( ) -@pytest.mark.parametrize("flag", ["--bash", "--zsh", "--fish", "--powershell"]) +# zsh completion script doesn't contain pip3 +@pytest.mark.parametrize("flag", ["--bash", "--fish", "--powershell"]) def test_completion_uses_same_executable_name( autocomplete_script: PipTestEnvironment, flag: str, deprecated_python: bool ) -> None: From fc295156dcd409ea71f8cc33b09b72431c0a5180 Mon Sep 17 00:00:00 2001 From: Collin Anderson Date: Tue, 4 Apr 2023 13:02:23 -0400 Subject: [PATCH 394/730] Avoid parsing dist.version twice in a row dist.version is a property that parses the version string each time, so it's slightly faster to only parse it once. --- src/pip/_internal/operations/freeze.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py index 930d4c6005e..35445684514 100644 --- a/src/pip/_internal/operations/freeze.py +++ b/src/pip/_internal/operations/freeze.py @@ -145,9 +145,10 @@ def freeze( def _format_as_name_version(dist: BaseDistribution) -> str: - if isinstance(dist.version, Version): - return f"{dist.raw_name}=={dist.version}" - return f"{dist.raw_name}==={dist.version}" + dist_version = dist.version + if isinstance(dist_version, Version): + return f"{dist.raw_name}=={dist_version}" + return f"{dist.raw_name}==={dist_version}" def _get_editable_info(dist: BaseDistribution) -> _EditableInfo: From 6e5d4678220063f7d0feae82ff9cb970252beb76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 7 Apr 2023 23:03:03 +0200 Subject: [PATCH 395/730] Various fixes to the link hash parser --- news/11936.bugfix.rst | 1 + src/pip/_internal/models/link.py | 4 ++-- tests/unit/test_collector.py | 12 ++++++++++++ 3 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 news/11936.bugfix.rst diff --git a/news/11936.bugfix.rst b/news/11936.bugfix.rst new file mode 100644 index 00000000000..4ae3ad69a31 --- /dev/null +++ b/news/11936.bugfix.rst @@ -0,0 +1 @@ +Fix and improve the parsing of hashes embedded in URL fragments. diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index a1e4d5a08df..f4a1313ab47 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -61,13 +61,13 @@ class LinkHash: # against Hashes when hash-checking is needed. This is easier to debug than # proactively discarding an invalid hex digest, as we handle incorrect hashes # and malformed hashes in the same place. - r"({choices})=(.*)".format( + r"[#&]({choices})=([^&]+)".format( choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES) ), ) def __post_init__(self) -> None: - assert self._hash_re.match(f"{self.name}={self.value}") + assert self._hash_re.match(f"#{self.name}={self.value}") @classmethod @functools.lru_cache(maxsize=None) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 26a2ce4b9a6..5f949d14d16 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -1051,6 +1051,18 @@ def expand_path(path: str) -> str: "https://pypi.org/pip-18.0.tar.gz#sha256=aa113592bbe", LinkHash("sha256", "aa113592bbe"), ), + ( + "https://pypi.org/pip-18.0.tar.gz#sha256=aa113592bbe&subdirectory=setup", + LinkHash("sha256", "aa113592bbe"), + ), + ( + "https://pypi.org/pip-18.0.tar.gz#subdirectory=setup&sha256=aa113592bbe", + LinkHash("sha256", "aa113592bbe"), + ), + # "xsha256" is not a valid algorithm, so we discard it. + ("https://pypi.org/pip-18.0.tar.gz#xsha256=aa113592bbe", None), + # Discard empty hash. + ("https://pypi.org/pip-18.0.tar.gz#sha256=", None), ( "https://pypi.org/pip-18.0.tar.gz#md5=aa113592bbe", LinkHash("md5", "aa113592bbe"), From 48152bb2e147f11a350b9b7c90057b15b6ba70ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 7 Apr 2023 23:44:22 +0200 Subject: [PATCH 396/730] pep 658 hashes are not URL fragment hashes --- src/pip/_internal/models/link.py | 34 ++++++++++++++++++++------------ tests/unit/test_collector.py | 18 ++++++++++++++++- 2 files changed, 38 insertions(+), 14 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index f4a1313ab47..765dbce2c42 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -55,7 +55,7 @@ class LinkHash: name: str value: str - _hash_re = re.compile( + _hash_url_fragment_re = re.compile( # NB: we do not validate that the second group (.*) is a valid hex # digest. Instead, we simply keep that string in this class, and then check it # against Hashes when hash-checking is needed. This is easier to debug than @@ -67,13 +67,26 @@ class LinkHash: ) def __post_init__(self) -> None: - assert self._hash_re.match(f"#{self.name}={self.value}") + assert self._hash_url_fragment_re.match(f"#{self.name}={self.value}") + + @classmethod + def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]: + """Parse a PEP 658 data-dist-info-metadata hash.""" + if dist_info_metadata == "true": + return None + try: + name, value = dist_info_metadata.split("=", 1) + except ValueError: + return None + if name not in _SUPPORTED_HASHES: + return None + return cls(name=name, value=value) @classmethod @functools.lru_cache(maxsize=None) - def split_hash_name_and_value(cls, url: str) -> Optional["LinkHash"]: + def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]: """Search a string for a checksum algorithm name and encoded output value.""" - match = cls._hash_re.search(url) + match = cls._hash_url_fragment_re.search(url) if match is None: return None name, value = match.groups() @@ -217,7 +230,7 @@ def __init__( # trying to set a new value. self._url = url - link_hash = LinkHash.split_hash_name_and_value(url) + link_hash = LinkHash.find_hash_url_fragment(url) hashes_from_link = {} if link_hash is None else link_hash.as_dict() if hashes is None: self._hashes = hashes_from_link @@ -402,15 +415,10 @@ def metadata_link(self) -> Optional["Link"]: if self.dist_info_metadata is None: return None metadata_url = f"{self.url_without_fragment}.metadata" - # If data-dist-info-metadata="true" is set, then the metadata file exists, - # but there is no information about its checksum or anything else. - if self.dist_info_metadata != "true": - link_hash = LinkHash.split_hash_name_and_value(self.dist_info_metadata) - else: - link_hash = None - if link_hash is None: + metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata) + if metadata_link_hash is None: return Link(metadata_url) - return Link(metadata_url, hashes=link_hash.as_dict()) + return Link(metadata_url, hashes=metadata_link_hash.as_dict()) def as_hashes(self) -> Hashes: return Hashes({k: [v] for k, v in self._hashes.items()}) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 5f949d14d16..bbb631cf8a4 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -1073,4 +1073,20 @@ def expand_path(path: str) -> str: ], ) def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None: - assert LinkHash.split_hash_name_and_value(url) == result + assert LinkHash.find_hash_url_fragment(url) == result + + +@pytest.mark.parametrize( + "dist_info_metadata, result", + [ + ("sha256=aa113592bbe", LinkHash("sha256", "aa113592bbe")), + ("sha500=aa113592bbe", None), + ("true", None), + ("", None), + ("aa113592bbe", None), + ], +) +def test_pep658_hash_parsing( + dist_info_metadata: str, result: Optional[LinkHash] +) -> None: + assert LinkHash.parse_pep658_hash(dist_info_metadata) == result From 540f8fbb6d294032f318d58ebadec162f1ea0228 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 7 Apr 2023 23:49:38 +0200 Subject: [PATCH 397/730] Keep empty hash, as before --- src/pip/_internal/models/link.py | 2 +- tests/unit/test_collector.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 765dbce2c42..9dd3160d592 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -61,7 +61,7 @@ class LinkHash: # against Hashes when hash-checking is needed. This is easier to debug than # proactively discarding an invalid hex digest, as we handle incorrect hashes # and malformed hashes in the same place. - r"[#&]({choices})=([^&]+)".format( + r"[#&]({choices})=([^&]*)".format( choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES) ), ) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index bbb631cf8a4..e855d78e126 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -1061,8 +1061,11 @@ def expand_path(path: str) -> str: ), # "xsha256" is not a valid algorithm, so we discard it. ("https://pypi.org/pip-18.0.tar.gz#xsha256=aa113592bbe", None), - # Discard empty hash. - ("https://pypi.org/pip-18.0.tar.gz#sha256=", None), + # Empty hash. + ( + "https://pypi.org/pip-18.0.tar.gz#sha256=", + LinkHash("sha256", ""), + ), ( "https://pypi.org/pip-18.0.tar.gz#md5=aa113592bbe", LinkHash("md5", "aa113592bbe"), @@ -1080,6 +1083,7 @@ def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None: "dist_info_metadata, result", [ ("sha256=aa113592bbe", LinkHash("sha256", "aa113592bbe")), + ("sha256=", LinkHash("sha256", "")), ("sha500=aa113592bbe", None), ("true", None), ("", None), From d4274db72739a552a9b5241431a5a2bdfb23de20 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 8 Apr 2023 00:08:17 +0200 Subject: [PATCH 398/730] Simplify assertion --- src/pip/_internal/models/link.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 9dd3160d592..1481a68bf97 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -67,7 +67,7 @@ class LinkHash: ) def __post_init__(self) -> None: - assert self._hash_url_fragment_re.match(f"#{self.name}={self.value}") + assert self.name in _SUPPORTED_HASHES @classmethod def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]: From 89e7208784905d7db6b3bfe75f8be00cc8f65895 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 8 Apr 2023 10:45:07 +0200 Subject: [PATCH 399/730] Improve readability Co-authored-by: Tzu-ping Chung --- src/pip/_internal/models/link.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 1481a68bf97..e741c3283cd 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -74,9 +74,8 @@ def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]: """Parse a PEP 658 data-dist-info-metadata hash.""" if dist_info_metadata == "true": return None - try: - name, value = dist_info_metadata.split("=", 1) - except ValueError: + name, sep, value = dist_info_metadata.partition("=") + if not sep: return None if name not in _SUPPORTED_HASHES: return None From 2f1d4a0218f9daf013292be9656c362cbc1760e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 9 Apr 2023 11:03:49 +0200 Subject: [PATCH 400/730] Warn if --hash is used on a line without requirement in a requirements file --- news/11935.feature.rst | 1 + src/pip/_internal/req/req_file.py | 10 ++++++++++ 2 files changed, 11 insertions(+) create mode 100644 news/11935.feature.rst diff --git a/news/11935.feature.rst b/news/11935.feature.rst new file mode 100644 index 00000000000..b170ca1d8cd --- /dev/null +++ b/news/11935.feature.rst @@ -0,0 +1 @@ +Warn if ``--hash`` is used on a line without requirement in a requirements file. diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index aced95e64c4..8428888d932 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -2,6 +2,7 @@ Requirements file parsing """ +import logging import optparse import os import re @@ -76,6 +77,8 @@ # the 'dest' string values SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] +logger = logging.getLogger(__name__) + class ParsedRequirement: def __init__( @@ -209,6 +212,13 @@ def handle_option_line( options: Optional[optparse.Values] = None, session: Optional[PipSession] = None, ) -> None: + if opts.hashes: + logger.warning( + "%s line %s has --hash but no requirement, and will be ignored.", + filename, + lineno, + ) + if options: # percolate options upward if opts.require_hashes: From 0fbca36516ed9839da9e64ea9bd05518ed83b967 Mon Sep 17 00:00:00 2001 From: q0w <43147888+q0w@users.noreply.github.com> Date: Sun, 9 Apr 2023 14:24:37 +0300 Subject: [PATCH 401/730] Remove merge_config_settings --- src/pip/_internal/cli/req_command.py | 14 ++---- src/pip/_internal/utils/misc.py | 29 ----------- tests/functional/test_config_settings.py | 63 ++---------------------- tests/unit/test_utils_misc.py | 26 ---------- 4 files changed, 6 insertions(+), 126 deletions(-) delete mode 100644 tests/unit/test_utils_misc.py diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 7f41961b393..9ef16155c48 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -34,7 +34,6 @@ from pip._internal.req.req_install import InstallRequirement from pip._internal.resolution.base import BaseResolver from pip._internal.self_outdated_check import pip_self_version_check -from pip._internal.utils.misc import merge_config_settings from pip._internal.utils.temp_dir import ( TempDirectory, TempDirectoryTypeRegistry, @@ -435,21 +434,14 @@ def get_requirements( for parsed_req in parse_requirements( filename, finder=finder, options=options, session=session ): - req_config_settings = ( - parsed_req.options.get("config_settings") - if parsed_req.options - else None - ) - cli_config_settings = getattr(options, "config_settings", None) - config_settings = merge_config_settings( - req_config_settings, cli_config_settings - ) req_to_add = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, - config_settings=config_settings, + config_settings=parsed_req.options.get("config_settings") + if parsed_req.options + else None, ) requirements.append(req_to_add) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 04d75d04de1..bfed8270252 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -61,7 +61,6 @@ "remove_auth_from_url", "check_externally_managed", "ConfiguredBuildBackendHookCaller", - "merge_config_settings", ] logger = logging.getLogger(__name__) @@ -729,31 +728,3 @@ def prepare_metadata_for_build_editable( config_settings=cs, _allow_fallback=_allow_fallback, ) - - -def merge_config_settings( - reqs_settings: Optional[Dict[str, Union[str, List[str]]]], - cli_settings: Optional[Dict[str, Union[str, List[str]]]], -) -> Optional[Dict[str, Union[str, List[str]]]]: - if not reqs_settings or not cli_settings: - return reqs_settings or cli_settings - - dd: Dict[str, Union[str, List[str]]] = {} - for d in (reqs_settings, cli_settings): - for k, v in d.items(): - if k in dd: - value = dd[k] - if isinstance(value, list): - if isinstance(v, list): - value.extend(v) - else: - value.append(v) - else: - if isinstance(v, str): - value = [value, v] - else: - value = [value, *v] - dd[k] = value - else: - dd[k] = v - return dd diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index 7d4e3c3d464..35b506a6b27 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -174,68 +174,11 @@ def test_install_config_reqs(script: PipTestEnvironment) -> None: ) script.scratch_path.joinpath("reqs.txt").write_text( 'foo --config-settings "--build-option=--cffi" ' - '--config-settings "--build-option=--avx2"' + '--config-settings "--build-option=--avx2" ' + "--config-settings FOO=BAR" ) script.pip("install", "--no-index", "-f", str(a_sdist.parent), "-r", "reqs.txt") script.assert_installed(foo="1.0") config = script.site_packages_path / "config.json" with open(config, "rb") as f: - assert json.load(f) == {"--build-option": ["--cffi", "--avx2"]} - - -def test_merge_cli_reqs_config_settings(script: PipTestEnvironment) -> None: - _, _, project_dir = make_project(script.scratch_path) - a_sdist = create_basic_sdist_for_package( - script, - "foo", - "1.0", - {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC}, - ) - script.scratch_path.joinpath("reqs.txt").write_text( - 'foo --config-settings "FOO=HELLO" --config-settings "FOO=BAR" ' - '--config-settings "BAZ=BAR"' - ) - script.pip( - "install", - "--no-index", - "-f", - str(a_sdist.parent), - "-r", - "reqs.txt", - "--config-settings", - "FOO=FOOBAR", - "--config-settings", - "FOO=BARFOO", - ) - script.assert_installed(foo="1.0") - config = script.site_packages_path / "config.json" - with open(config, "rb") as f: - assert json.load(f) == { - "FOO": ["HELLO", "BAR", "FOOBAR", "BARFOO"], - "BAZ": "BAR", - } - - -def test_cli_config_settings_reqs(script: PipTestEnvironment) -> None: - _, _, project_dir = make_project(script.scratch_path) - a_sdist = create_basic_sdist_for_package( - script, - "foo", - "1.0", - {"pyproject.toml": PYPROJECT_TOML, "backend/dummy_backend.py": BACKEND_SRC}, - ) - script.scratch_path.joinpath("reqs.txt").write_text("foo") - script.pip( - "install", - "--no-index", - "-f", - str(a_sdist.parent), - "-r", - "reqs.txt", - "--config-settings", - "FOO=BAR", - ) - script.assert_installed(foo="1.0") - config = script.site_packages_path / "config.json" - with open(config, "rb") as f: - assert json.load(f) == {"FOO": "BAR"} + assert json.load(f) == {"--build-option": ["--cffi", "--avx2"], "FOO": "BAR"} diff --git a/tests/unit/test_utils_misc.py b/tests/unit/test_utils_misc.py deleted file mode 100644 index fed75617b23..00000000000 --- a/tests/unit/test_utils_misc.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Dict, List, Optional, Union - -import pytest - -from pip._internal.utils.misc import merge_config_settings - - -@pytest.mark.parametrize( - "reqs, cli, expected", - [ - ({"foo": "bar"}, {"foo": ["baz"]}, {"foo": ["bar", "baz"]}), - ({"foo": "bar"}, {"foo": "baz"}, {"foo": ["bar", "baz"]}), - ({"foo": ["bar"]}, {"foo": ["baz"]}, {"foo": ["bar", "baz"]}), - ({"foo": ["bar"]}, {"foo": "baz"}, {"foo": ["bar", "baz"]}), - ({"foo": "bar"}, {"foo": ["baz"]}, {"foo": ["bar", "baz"]}), - ({"foo": "bar"}, None, {"foo": "bar"}), - (None, {"foo": ["bar"]}, {"foo": ["bar"]}), - (None, None, None), - ], -) -def test_merge_config_settings( - reqs: Optional[Dict[str, Union[str, List[str]]]], - cli: Optional[Dict[str, Union[str, List[str]]]], - expected: Optional[Dict[str, Union[str, List[str]]]], -) -> None: - assert merge_config_settings(reqs, cli) == expected From 7cb863e2fcb1152f3a2160df3c3581a5e4be1ff0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 9 Apr 2023 16:41:04 +0200 Subject: [PATCH 402/730] Stop propagating `config_settings` to dependencies (#11941) --- .../reference/build-system/pyproject-toml.md | 6 + news/11941.feature.rst | 4 + src/pip/_internal/cli/req_command.py | 1 - src/pip/_internal/req/constructors.py | 2 - tests/functional/test_config_settings.py | 149 ++++++++++++++++-- 5 files changed, 144 insertions(+), 18 deletions(-) create mode 100644 news/11941.feature.rst diff --git a/docs/html/reference/build-system/pyproject-toml.md b/docs/html/reference/build-system/pyproject-toml.md index d2ec0323e6a..a42a3b8c484 100644 --- a/docs/html/reference/build-system/pyproject-toml.md +++ b/docs/html/reference/build-system/pyproject-toml.md @@ -116,6 +116,12 @@ multiple times, in order to specify multiple settings). The supplied configuration settings are passed to every backend hook call. +Configuration settings provided via `--config-settings` command line options (or the +equivalent environment variables or configuration file entries) are passed to the build +of requirements explicitly provided as pip command line arguments. They are not passed +to the build of dependencies, or to the build of requirements provided in requirement +files. + ## Build output It is the responsibility of the build backend to ensure that the output is diff --git a/news/11941.feature.rst b/news/11941.feature.rst new file mode 100644 index 00000000000..404f2cb2de6 --- /dev/null +++ b/news/11941.feature.rst @@ -0,0 +1,4 @@ +Stop propagating CLI ``--config-settings`` to the build dependencies. They already did +not propagate to requirements provided in requirement files. To pass the same config +settings to several requirements, users should provide the requirements as CLI +arguments. diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index bb33403195b..8c326013223 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -344,7 +344,6 @@ def make_resolver( install_req_from_req_string, isolated=options.isolated_mode, use_pep517=use_pep517, - config_settings=getattr(options, "config_settings", None), ) resolver_variant = cls.determine_resolver_variant(options) # The long import name and duplicated invocation is needed to convince diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index dc82a7e4f91..31c2421d761 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -416,7 +416,6 @@ def install_req_from_req_string( isolated: bool = False, use_pep517: Optional[bool] = None, user_supplied: bool = False, - config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, ) -> InstallRequirement: try: req = get_requirement(req_string) @@ -446,7 +445,6 @@ def install_req_from_req_string( isolated=isolated, use_pep517=use_pep517, user_supplied=user_supplied, - config_settings=config_settings, ) diff --git a/tests/functional/test_config_settings.py b/tests/functional/test_config_settings.py index b1e15c01031..91643a3dc34 100644 --- a/tests/functional/test_config_settings.py +++ b/tests/functional/test_config_settings.py @@ -1,8 +1,10 @@ import json +import tarfile from pathlib import Path -from typing import Tuple +from typing import List, Optional, Tuple from zipfile import ZipFile +from pip._internal.utils.urls import path_to_url from tests.lib import PipTestEnvironment PYPROJECT_TOML = """\ @@ -36,9 +38,10 @@ Author: None Author-email: none@example.org License: MIT +{requires_dist} """ -def make_wheel(z, project, version, files): +def make_wheel(z, project, version, requires_dist, files): record = [] def add_file(name, data): data = data.encode("utf-8") @@ -48,7 +51,9 @@ def add_file(name, data): record.append((name, f"sha256={hash}", len(data))) distinfo = f"{project}-{version}.dist-info" add_file(f"{distinfo}/WHEEL", WHEEL) - add_file(f"{distinfo}/METADATA", METADATA.format(project=project, version=version)) + add_file(f"{distinfo}/METADATA", METADATA.format( + project=project, version=version, requires_dist=requires_dist + )) for name, data in files: add_file(name, data) record_name = f"{distinfo}/RECORD" @@ -70,14 +75,14 @@ def build_wheel( ): if config_settings is None: config_settings = {} - w = os.path.join(wheel_directory, "foo-1.0-py3-none-any.whl") + w = os.path.join(wheel_directory, "{{name}}-1.0-py3-none-any.whl") with open(w, "wb") as f: with ZipFile(f, "w") as z: make_wheel( - z, "foo", "1.0", - [("config.json", json.dumps(config_settings))] + z, "{{name}}", "1.0", "{{requires_dist}}", + [("{{name}}-config.json", json.dumps(config_settings))] ) - return "foo-1.0-py3-none-any.whl" + return "{{name}}-1.0-py3-none-any.whl" build_editable = build_wheel @@ -85,14 +90,20 @@ def build_wheel( ''' -def make_project(path: Path) -> Tuple[str, str, Path]: - name = "foo" +def make_project( + path: Path, name: str = "foo", dependencies: Optional[List[str]] = None +) -> Tuple[str, str, Path]: version = "1.0" project_dir = path / name backend = project_dir / "backend" backend.mkdir(parents=True) (project_dir / "pyproject.toml").write_text(PYPROJECT_TOML) - (backend / "dummy_backend.py").write_text(BACKEND_SRC) + requires_dist = [f"Requires-Dist: {dep}" for dep in dependencies or []] + (backend / "dummy_backend.py").write_text( + BACKEND_SRC.replace("{{name}}", name).replace( + "{{requires_dist}}", "\n".join(requires_dist) + ) + ) return name, version, project_dir @@ -108,25 +119,133 @@ def test_backend_sees_config(script: PipTestEnvironment) -> None: wheel_file_path = script.cwd / wheel_file_name with open(wheel_file_path, "rb") as f: with ZipFile(f) as z: - output = z.read("config.json") + output = z.read(f"{name}-config.json") + assert json.loads(output) == {"FOO": "Hello"} + + +def test_backend_sees_config_via_constraint(script: PipTestEnvironment) -> None: + name, version, project_dir = make_project(script.scratch_path) + constraints_file = script.scratch_path / "constraints.txt" + constraints_file.write_text(f"{name} @ {path_to_url(str(project_dir))}") + script.pip( + "wheel", + "--config-settings", + "FOO=Hello", + "-c", + "constraints.txt", + name, + ) + wheel_file_name = f"{name}-{version}-py3-none-any.whl" + wheel_file_path = script.cwd / wheel_file_name + with open(wheel_file_path, "rb") as f: + with ZipFile(f) as z: + output = z.read(f"{name}-config.json") + assert json.loads(output) == {"FOO": "Hello"} + + +def test_backend_sees_config_via_sdist(script: PipTestEnvironment) -> None: + name, version, project_dir = make_project(script.scratch_path) + dists_dir = script.scratch_path / "dists" + dists_dir.mkdir() + with tarfile.open(dists_dir / f"{name}-{version}.tar.gz", "w:gz") as dist_tar: + dist_tar.add(project_dir, arcname=name) + script.pip( + "wheel", + "--config-settings", + "FOO=Hello", + "-f", + dists_dir, + name, + ) + wheel_file_name = f"{name}-{version}-py3-none-any.whl" + wheel_file_path = script.cwd / wheel_file_name + with open(wheel_file_path, "rb") as f: + with ZipFile(f) as z: + output = z.read(f"{name}-config.json") assert json.loads(output) == {"FOO": "Hello"} +def test_req_file_does_not_see_config(script: PipTestEnvironment) -> None: + """Test that CLI config settings do not propagate to requirement files.""" + name, _, project_dir = make_project(script.scratch_path) + reqs_file = script.scratch_path / "reqs.txt" + reqs_file.write_text(f"{project_dir}") + script.pip( + "install", + "--config-settings", + "FOO=Hello", + "-r", + reqs_file, + ) + config = script.site_packages_path / f"{name}-config.json" + with open(config, "rb") as f: + assert json.load(f) == {} + + +def test_dep_does_not_see_config(script: PipTestEnvironment) -> None: + """Test that CLI config settings do not propagate to dependencies.""" + _, _, bar_project_dir = make_project(script.scratch_path, name="bar") + _, _, foo_project_dir = make_project( + script.scratch_path, + name="foo", + dependencies=[f"bar @ {path_to_url(str(bar_project_dir))}"], + ) + script.pip( + "install", + "--config-settings", + "FOO=Hello", + foo_project_dir, + ) + foo_config = script.site_packages_path / "foo-config.json" + with open(foo_config, "rb") as f: + assert json.load(f) == {"FOO": "Hello"} + bar_config = script.site_packages_path / "bar-config.json" + with open(bar_config, "rb") as f: + assert json.load(f) == {} + + +def test_dep_in_req_file_does_not_see_config(script: PipTestEnvironment) -> None: + """Test that CLI config settings do not propagate to dependencies found in + requirement files.""" + _, _, bar_project_dir = make_project(script.scratch_path, name="bar") + _, _, foo_project_dir = make_project( + script.scratch_path, + name="foo", + dependencies=["bar"], + ) + reqs_file = script.scratch_path / "reqs.txt" + reqs_file.write_text(f"bar @ {path_to_url(str(bar_project_dir))}") + script.pip( + "install", + "--config-settings", + "FOO=Hello", + "-r", + reqs_file, + foo_project_dir, + ) + foo_config = script.site_packages_path / "foo-config.json" + with open(foo_config, "rb") as f: + assert json.load(f) == {"FOO": "Hello"} + bar_config = script.site_packages_path / "bar-config.json" + with open(bar_config, "rb") as f: + assert json.load(f) == {} + + def test_install_sees_config(script: PipTestEnvironment) -> None: - _, _, project_dir = make_project(script.scratch_path) + name, _, project_dir = make_project(script.scratch_path) script.pip( "install", "--config-settings", "FOO=Hello", project_dir, ) - config = script.site_packages_path / "config.json" + config = script.site_packages_path / f"{name}-config.json" with open(config, "rb") as f: assert json.load(f) == {"FOO": "Hello"} def test_install_editable_sees_config(script: PipTestEnvironment) -> None: - _, _, project_dir = make_project(script.scratch_path) + name, _, project_dir = make_project(script.scratch_path) script.pip( "install", "--config-settings", @@ -134,6 +253,6 @@ def test_install_editable_sees_config(script: PipTestEnvironment) -> None: "--editable", project_dir, ) - config = script.site_packages_path / "config.json" + config = script.site_packages_path / f"{name}-config.json" with open(config, "rb") as f: assert json.load(f) == {"FOO": "Hello"} From ebcc368a517995d341c2b1a5d1dc5fb12437081e Mon Sep 17 00:00:00 2001 From: Dos Moonen Date: Mon, 10 Apr 2023 11:05:34 +0200 Subject: [PATCH 403/730] Alter test_prompt_for_keyring_if_needed to make sure we test the default is `auto`. --- tests/functional/test_install_config.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 563b5604a8e..9f8a8067787 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -370,7 +370,7 @@ def auth_needed(request: pytest.FixtureRequest) -> bool: return request.param -@pytest.fixture(params=("disabled", "import", "subprocess", "auto")) +@pytest.fixture(params=(None, "disabled", "import", "subprocess", "auto")) def keyring_provider(request: pytest.FixtureRequest) -> str: return request.param @@ -389,17 +389,20 @@ def flags( keyring_provider_implementation: str, ) -> List[str]: if ( - keyring_provider != "auto" + keyring_provider not in [None, "auto"] and keyring_provider_implementation != keyring_provider ): pytest.skip() - flags = ["--keyring-provider", keyring_provider] + flags = [] + if keyring_provider is not None: + flags.append("--keyring-provider") + flags.append(keyring_provider) if not interactive: flags.append("--no-input") if auth_needed: if keyring_provider_implementation == "disabled" or ( - not interactive and keyring_provider == "auto" + not interactive and keyring_provider in [None, "auto"] ): request.applymarker(pytest.mark.xfail()) return flags @@ -441,7 +444,10 @@ def test_prompt_for_keyring_if_needed( virtualenv = virtualenv_factory(workspace.joinpath("venv")) script = script_factory(workspace.joinpath("venv"), virtualenv, environ=environ) - if keyring_provider != "auto" or keyring_provider_implementation != "subprocess": + if ( + keyring_provider not in [None, "auto"] + or keyring_provider_implementation != "subprocess" + ): script.pip( "install", "keyring", From 9605b97b48d10b56a00c885449bbc285d1bcd039 Mon Sep 17 00:00:00 2001 From: Dos Moonen Date: Mon, 10 Apr 2023 11:06:34 +0200 Subject: [PATCH 404/730] Correct default from `disabled` to `auto`. See #11939. --- news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst | 0 src/pip/_internal/cli/cmdoptions.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst diff --git a/news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst b/news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 7f72332db56..02ba6082793 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -257,7 +257,7 @@ class PipOption(Option): "--keyring-provider", dest="keyring_provider", choices=["auto", "disabled", "import", "subprocess"], - default="disabled", + default="auto", help=( "Enable the credential lookup via the keyring library if user input is allowed." " Specify which mechanism to use [disabled, import, subprocess]." From 0ffc54dca3dd0f64eb9498a37908ae756294da7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 8 Apr 2023 17:04:37 +0200 Subject: [PATCH 405/730] Add tests about link hashes validation --- tests/functional/test_install.py | 97 ++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 72c72f35c5d..b749372c13a 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1,3 +1,4 @@ +import hashlib import os import re import ssl @@ -13,6 +14,7 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.models.index import PyPI, TestPyPI from pip._internal.utils.misc import rmtree +from pip._internal.utils.urls import path_to_url from tests.conftest import CertFactory from tests.lib import ( PipTestEnvironment, @@ -616,6 +618,101 @@ def test_hashed_install_failure(script: PipTestEnvironment, tmpdir: Path) -> Non assert len(result.files_created) == 0 +def test_link_hash_pass_require_hashes( + script: PipTestEnvironment, shared_data: TestData +) -> None: + """Test that a good hash in user provided direct URL is + considered valid for --require-hashes.""" + url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz"))) + url = ( + f"{url}#sha256=" + "393043e672415891885c9a2a0929b1af95fb866d6ca016b42d2e6ce53619b653" + ) + script.pip_install_local("--no-deps", "--require-hashes", url) + + +def test_bad_link_hash_install_failure( + script: PipTestEnvironment, shared_data: TestData +) -> None: + """Test that wrong hash in direct URL stop installation.""" + url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz"))) + url = f"{url}#sha256=invalidhash" + result = script.pip_install_local("--no-deps", url, expect_error=True) + assert "THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr + + +def test_link_hash_in_dep_fails_require_hashes( + script: PipTestEnvironment, tmp_path: Path, shared_data: TestData +) -> None: + """Test that a good hash in direct URL dependency is not considered + for --require-hashes.""" + # Create a project named pkga that depends on the simple-1.0.tar.gz with a direct + # URL including a hash. + simple_url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz"))) + simple_url_with_hash = ( + f"{simple_url}#sha256=" + "393043e672415891885c9a2a0929b1af95fb866d6ca016b42d2e6ce53619b653" + ) + project_path = tmp_path / "pkga" + project_path.mkdir() + project_path.joinpath("pyproject.toml").write_text( + textwrap.dedent( + f"""\ + [project] + name = "pkga" + version = "1.0" + dependencies = ["simple @ {simple_url_with_hash}"] + """ + ) + ) + # Build a wheel for pkga and compute its hash. + wheelhouse = tmp_path / "wheehouse" + wheelhouse.mkdir() + script.pip("wheel", "--no-deps", "-w", wheelhouse, project_path) + digest = hashlib.sha256( + wheelhouse.joinpath("pkga-1.0-py3-none-any.whl").read_bytes() + ).hexdigest() + # Install pkga from a requirements file with hash, using --require-hashes. + # This should fail because we have not provided a hash for the 'simple' dependency. + with requirements_file(f"pkga==1.0 --hash sha256:{digest}", tmp_path) as reqs_file: + result = script.pip( + "install", + "--no-build-isolation", + "--require-hashes", + "--no-index", + "-f", + wheelhouse, + "-r", + reqs_file, + expect_error=True, + ) + assert "Hashes are required in --require-hashes mode" in result.stderr + + +def test_bad_link_hash_in_dep_install_failure( + script: PipTestEnvironment, tmp_path: Path, shared_data: TestData +) -> None: + """Test that wrong hash in direct URL dependency stops installation.""" + url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz"))) + url = f"{url}#sha256=invalidhash" + project_path = tmp_path / "pkga" + project_path.mkdir() + project_path.joinpath("pyproject.toml").write_text( + textwrap.dedent( + f"""\ + [project] + name = "pkga" + version = "1.0" + dependencies = ["simple @ {url}"] + """ + ) + ) + result = script.pip_install_local( + "--no-build-isolation", project_path, expect_error=True + ) + assert "THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr, result.stderr + + def assert_re_match(pattern: str, text: str) -> None: assert re.search(pattern, text), f"Could not find {pattern!r} in {text!r}" From f5f0302516e4adc5b8541832da803784d44b0a0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 8 Apr 2023 18:57:37 +0200 Subject: [PATCH 406/730] Fix --require-hashes trusting link hashes When a direct URL with hash is provided as a dependency, --require-hash incorrectly considered the link hash as trusted. --- news/11938.bugfix.rst | 3 +++ src/pip/_internal/req/req_install.py | 7 ++++++- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 news/11938.bugfix.rst diff --git a/news/11938.bugfix.rst b/news/11938.bugfix.rst new file mode 100644 index 00000000000..b299f8e4ff5 --- /dev/null +++ b/news/11938.bugfix.rst @@ -0,0 +1,3 @@ +When package A depends on package B provided as a direct URL dependency including a hash +embedded in the link, the ``--require-hashes`` option did not warn when user supplied hashes +were missing for package B. diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index baa6716381c..e2353f0321a 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -287,7 +287,12 @@ def hashes(self, trust_internet: bool = True) -> Hashes: """ good_hashes = self.hash_options.copy() - link = self.link if trust_internet else self.original_link + if trust_internet: + link = self.link + elif self.original_link and self.user_supplied: + link = self.original_link + else: + link = None if link and link.hash: good_hashes.setdefault(link.hash_name, []).append(link.hash) return Hashes(good_hashes) From 453a5a7e0738c9c0453a3a23db4ef74e9e4e41d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 8 Apr 2023 19:16:50 +0200 Subject: [PATCH 407/730] Add test for combination of invalid link hash and good --hash --- tests/functional/test_install.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index b749372c13a..e50779688f1 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -634,13 +634,29 @@ def test_link_hash_pass_require_hashes( def test_bad_link_hash_install_failure( script: PipTestEnvironment, shared_data: TestData ) -> None: - """Test that wrong hash in direct URL stop installation.""" + """Test that wrong hash in direct URL stops installation.""" url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz"))) url = f"{url}#sha256=invalidhash" result = script.pip_install_local("--no-deps", url, expect_error=True) assert "THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr +def test_bad_link_hash_good_user_hash_install_success( + script: PipTestEnvironment, shared_data: TestData, tmp_path: Path +) -> None: + """Test that wrong hash in direct URL ignored when good --hash provided. + + This behaviour may be accidental? + """ + url = path_to_url(str(shared_data.packages.joinpath("simple-1.0.tar.gz"))) + url = f"{url}#sha256=invalidhash" + digest = "393043e672415891885c9a2a0929b1af95fb866d6ca016b42d2e6ce53619b653" + with requirements_file( + f"simple @ {url} --hash sha256:{digest}", tmp_path + ) as reqs_file: + script.pip_install_local("--no-deps", "--require-hashes", "-r", reqs_file) + + def test_link_hash_in_dep_fails_require_hashes( script: PipTestEnvironment, tmp_path: Path, shared_data: TestData ) -> None: From d46fd99de880566be6c02c4828f80722f4eda5bd Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Thu, 16 Mar 2023 10:54:11 -0700 Subject: [PATCH 408/730] cache normalize_path in req_uninstall and is_local --- src/pip/_internal/req/req_uninstall.py | 11 ++++++----- src/pip/_internal/utils/misc.py | 15 ++++++++++++++- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 15b67385c86..9b5cbf4a0bf 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -11,7 +11,7 @@ from pip._internal.utils.compat import WINDOWS from pip._internal.utils.egg_link import egg_link_path_from_location from pip._internal.utils.logging import getLogger, indent_log -from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree +from pip._internal.utils.misc import ask, is_local, normalize_path, normalize_path_cached, renames, rmtree from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory logger = getLogger(__name__) @@ -312,6 +312,7 @@ def __init__(self, dist: BaseDistribution) -> None: self._pth: Dict[str, UninstallPthEntries] = {} self._dist = dist self._moved_paths = StashedUninstallPathSet() + normalize_path_cached.cache_clear() def _permitted(self, path: str) -> bool: """ @@ -326,7 +327,7 @@ def add(self, path: str) -> None: # we normalize the head to resolve parent directory symlinks, but not # the tail, since we only want to uninstall symlinks, not their targets - path = os.path.join(normalize_path(head), os.path.normcase(tail)) + path = os.path.join(normalize_path_cached(head), os.path.normcase(tail)) if not os.path.exists(path): return @@ -341,7 +342,7 @@ def add(self, path: str) -> None: self.add(cache_from_source(path)) def add_pth(self, pth_file: str, entry: str) -> None: - pth_file = normalize_path(pth_file) + pth_file = normalize_path_cached(pth_file) if self._permitted(pth_file): if pth_file not in self._pth: self._pth[pth_file] = UninstallPthEntries(pth_file) @@ -434,7 +435,7 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": ) return cls(dist) - normalized_dist_location = normalize_path(dist_location) + normalized_dist_location = normalize_path_cached(dist_location) if not dist.local: logger.info( "Not uninstalling %s at %s, outside environment %s", @@ -531,7 +532,7 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": # above, so this only covers the setuptools-style editable. with open(develop_egg_link) as fh: link_pointer = os.path.normcase(fh.readline().strip()) - normalized_link_pointer = normalize_path(link_pointer) + normalized_link_pointer = normalize_path_cached(link_pointer) assert os.path.samefile( normalized_link_pointer, normalized_dist_location ), ( diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index bfed8270252..7cd0eddde75 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -3,6 +3,7 @@ import contextlib import errno +import functools import getpass import hashlib import io @@ -294,6 +295,17 @@ def normalize_path(path: str, resolve_symlinks: bool = True) -> str: return os.path.normcase(path) +@functools.lru_cache +def normalize_path_cached(path: str, resolve_symlinks: bool = True) -> str: + """ + Cache the results of normalize_path when called frequently during certain + operations. Separate function because it is probably unsafe to + cache normalize_path in the general case, e.g. symlinks can be changed + while the process is running. + """ + return normalize_path(str, resolve_symlinks) + + def splitext(path: str) -> Tuple[str, str]: """Like os.path.splitext, but take off .tar too""" base, ext = posixpath.splitext(path) @@ -331,7 +343,8 @@ def is_local(path: str) -> bool: """ if not running_under_virtualenv(): return True - return path.startswith(normalize_path(sys.prefix)) + # Safe to call cached because sys.prefix shouldn't change + return path.startswith(normalize_path_cached(sys.prefix)) def write_output(msg: Any, *args: Any) -> None: From 3e23b57db5279050b2ecc9ec3bebc3f088619344 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Thu, 16 Mar 2023 11:32:39 -0700 Subject: [PATCH 409/730] typo --- src/pip/_internal/utils/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 7cd0eddde75..ff72415392e 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -303,7 +303,7 @@ def normalize_path_cached(path: str, resolve_symlinks: bool = True) -> str: cache normalize_path in the general case, e.g. symlinks can be changed while the process is running. """ - return normalize_path(str, resolve_symlinks) + return normalize_path(path, resolve_symlinks) def splitext(path: str) -> Tuple[str, str]: From 1bb849682dbc0b16770b1b059fb5bccf8ee96f52 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Thu, 16 Mar 2023 12:56:00 -0700 Subject: [PATCH 410/730] Add explicit lru_cache maxsize for Python 3.7 --- src/pip/_internal/utils/misc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index ff72415392e..50c6e77ac41 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -295,7 +295,7 @@ def normalize_path(path: str, resolve_symlinks: bool = True) -> str: return os.path.normcase(path) -@functools.lru_cache +@functools.lru_cache(maxsize=128) def normalize_path_cached(path: str, resolve_symlinks: bool = True) -> str: """ Cache the results of normalize_path when called frequently during certain From 58882a1642129df8cbdf8ed8fff75d458967afe3 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Fri, 17 Mar 2023 13:36:02 -0700 Subject: [PATCH 411/730] Move normalize_path caching to an instance on UninstallPathSet --- src/pip/_internal/req/req_uninstall.py | 16 ++++++++++------ src/pip/_internal/utils/misc.py | 15 +-------------- 2 files changed, 11 insertions(+), 20 deletions(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 9b5cbf4a0bf..9136ae930c6 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -13,6 +13,7 @@ from pip._internal.utils.logging import getLogger, indent_log from pip._internal.utils.misc import ask, is_local, normalize_path, normalize_path_cached, renames, rmtree from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory +from pip._internal.utils.virtualenv import running_under_virtualenv logger = getLogger(__name__) @@ -312,7 +313,7 @@ def __init__(self, dist: BaseDistribution) -> None: self._pth: Dict[str, UninstallPthEntries] = {} self._dist = dist self._moved_paths = StashedUninstallPathSet() - normalize_path_cached.cache_clear() + self._normalize_path_cached = functools.lru_cache(maxsize=256)(normalize_path) def _permitted(self, path: str) -> bool: """ @@ -320,14 +321,17 @@ def _permitted(self, path: str) -> bool: remove/modify, False otherwise. """ - return is_local(path) + # aka is_local, but caching normalized sys.prefix + if not running_under_virtualenv(): + return True + return path.startswith(self._normalize_path_cached(sys.prefix)) def add(self, path: str) -> None: head, tail = os.path.split(path) # we normalize the head to resolve parent directory symlinks, but not # the tail, since we only want to uninstall symlinks, not their targets - path = os.path.join(normalize_path_cached(head), os.path.normcase(tail)) + path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail)) if not os.path.exists(path): return @@ -342,7 +346,7 @@ def add(self, path: str) -> None: self.add(cache_from_source(path)) def add_pth(self, pth_file: str, entry: str) -> None: - pth_file = normalize_path_cached(pth_file) + pth_file = self._normalize_path_cached(pth_file) if self._permitted(pth_file): if pth_file not in self._pth: self._pth[pth_file] = UninstallPthEntries(pth_file) @@ -435,7 +439,7 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": ) return cls(dist) - normalized_dist_location = normalize_path_cached(dist_location) + normalized_dist_location = normalize_path(dist_location) if not dist.local: logger.info( "Not uninstalling %s at %s, outside environment %s", @@ -532,7 +536,7 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": # above, so this only covers the setuptools-style editable. with open(develop_egg_link) as fh: link_pointer = os.path.normcase(fh.readline().strip()) - normalized_link_pointer = normalize_path_cached(link_pointer) + normalized_link_pointer = paths_to_remove._normalize_path_cached(link_pointer) assert os.path.samefile( normalized_link_pointer, normalized_dist_location ), ( diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 50c6e77ac41..bfed8270252 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -3,7 +3,6 @@ import contextlib import errno -import functools import getpass import hashlib import io @@ -295,17 +294,6 @@ def normalize_path(path: str, resolve_symlinks: bool = True) -> str: return os.path.normcase(path) -@functools.lru_cache(maxsize=128) -def normalize_path_cached(path: str, resolve_symlinks: bool = True) -> str: - """ - Cache the results of normalize_path when called frequently during certain - operations. Separate function because it is probably unsafe to - cache normalize_path in the general case, e.g. symlinks can be changed - while the process is running. - """ - return normalize_path(path, resolve_symlinks) - - def splitext(path: str) -> Tuple[str, str]: """Like os.path.splitext, but take off .tar too""" base, ext = posixpath.splitext(path) @@ -343,8 +331,7 @@ def is_local(path: str) -> bool: """ if not running_under_virtualenv(): return True - # Safe to call cached because sys.prefix shouldn't change - return path.startswith(normalize_path_cached(sys.prefix)) + return path.startswith(normalize_path(sys.prefix)) def write_output(msg: Any, *args: Any) -> None: From 82f1d14772b557a705dae7d49192b8c5b92bf05a Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Fri, 17 Mar 2023 14:04:35 -0700 Subject: [PATCH 412/730] Remove reference to utils.misc.normalize_path_cached --- src/pip/_internal/req/req_uninstall.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 9136ae930c6..1e06c43551b 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -11,7 +11,7 @@ from pip._internal.utils.compat import WINDOWS from pip._internal.utils.egg_link import egg_link_path_from_location from pip._internal.utils.logging import getLogger, indent_log -from pip._internal.utils.misc import ask, is_local, normalize_path, normalize_path_cached, renames, rmtree +from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory from pip._internal.utils.virtualenv import running_under_virtualenv From 75db61099d470180cfba3a8f76bc73d26a341719 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Mon, 20 Mar 2023 10:43:33 -0700 Subject: [PATCH 413/730] Mock _permitted instead of is_local --- tests/unit/test_req_uninstall.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py index 4d99acfd30d..f990edc2bef 100644 --- a/tests/unit/test_req_uninstall.py +++ b/tests/unit/test_req_uninstall.py @@ -21,7 +21,7 @@ # Pretend all files are local, so UninstallPathSet accepts files in the tmpdir, # outside the virtualenv -def mock_is_local(path: str) -> bool: +def mock_permitted(ups: UninstallPathSet, path: str) -> bool: return True @@ -129,7 +129,7 @@ def in_tmpdir(paths: List[str]) -> List[str]: class TestUninstallPathSet: def test_add(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) # Fix case for windows tests file_extant = os.path.normcase(os.path.join(tmpdir, "foo")) file_nonexistent = os.path.normcase(os.path.join(tmpdir, "nonexistent")) @@ -145,7 +145,7 @@ def test_add(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: assert ups._paths == {file_extant} def test_add_pth(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) # Fix case for windows tests tmpdir = os.path.normcase(tmp_path) on_windows = sys.platform == "win32" @@ -175,7 +175,7 @@ def test_add_pth(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: @pytest.mark.skipif("sys.platform == 'win32'") def test_add_symlink(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) f = os.path.join(tmpdir, "foo") with open(f, "w"): pass @@ -187,7 +187,7 @@ def test_add_symlink(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> Non assert ups._paths == {foo_link} def test_compact_shorter_path(self, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) monkeypatch.setattr("os.path.exists", lambda p: True) # This deals with nt/posix path differences short_path = os.path.normcase( @@ -202,7 +202,7 @@ def test_compact_shorter_path(self, monkeypatch: pytest.MonkeyPatch) -> None: def test_detect_symlink_dirs( self, monkeypatch: pytest.MonkeyPatch, tmpdir: Path ) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall, "is_local", mock_is_local) + monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) # construct 2 paths: # tmpdir/dir/file From 8e6604c4c165a40f0ea615278e8cca9542c3fc97 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Wed, 22 Mar 2023 13:40:04 -0700 Subject: [PATCH 414/730] Create 11889.bugfix.rst --- news/11889.bugfix.rst | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 news/11889.bugfix.rst diff --git a/news/11889.bugfix.rst b/news/11889.bugfix.rst new file mode 100644 index 00000000000..e760fd1fbf4 --- /dev/null +++ b/news/11889.bugfix.rst @@ -0,0 +1,4 @@ +The ``uninstall`` and ``install --force-reinstall`` commands no longer call +``normalize_path()`` repeatedly on the same paths. Instead, these results are +cached for the duration of an uninstall operation, resulting in improved +performance, particularly on Windows. From 6a8e403148be555cda5046932eadfed4dde2b035 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Thu, 23 Mar 2023 14:23:38 -0700 Subject: [PATCH 415/730] Remove is_local import --- src/pip/_internal/req/req_uninstall.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 1e06c43551b..018e011d5c4 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -11,7 +11,7 @@ from pip._internal.utils.compat import WINDOWS from pip._internal.utils.egg_link import egg_link_path_from_location from pip._internal.utils.logging import getLogger, indent_log -from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree +from pip._internal.utils.misc import ask, normalize_path, renames, rmtree from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory from pip._internal.utils.virtualenv import running_under_virtualenv From 14736eac9c47d9ad30a79dfa60b1bc7dce4feb36 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Thu, 23 Mar 2023 14:26:17 -0700 Subject: [PATCH 416/730] Document _normalize_path_cached --- src/pip/_internal/req/req_uninstall.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 018e011d5c4..34d02ccaf15 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -313,6 +313,9 @@ def __init__(self, dist: BaseDistribution) -> None: self._pth: Dict[str, UninstallPthEntries] = {} self._dist = dist self._moved_paths = StashedUninstallPathSet() + # Create local cache of normalize_path results. Creating an UninstallPathSet + # can result in hundreds/thousands of redundant calls to normalize_path with the same + # args, which hurts performance. self._normalize_path_cached = functools.lru_cache(maxsize=256)(normalize_path) def _permitted(self, path: str) -> bool: From a84f7bf0b041ccf6d8ad88ad0dcd1a355f1ec156 Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Thu, 23 Mar 2023 14:38:52 -0700 Subject: [PATCH 417/730] Fix overly long line length --- src/pip/_internal/req/req_uninstall.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 34d02ccaf15..9320a21d272 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -314,8 +314,8 @@ def __init__(self, dist: BaseDistribution) -> None: self._dist = dist self._moved_paths = StashedUninstallPathSet() # Create local cache of normalize_path results. Creating an UninstallPathSet - # can result in hundreds/thousands of redundant calls to normalize_path with the same - # args, which hurts performance. + # can result in hundreds/thousands of redundant calls to normalize_path with + # the same args, which hurts performance. self._normalize_path_cached = functools.lru_cache(maxsize=256)(normalize_path) def _permitted(self, path: str) -> bool: From 4221c23a2bbd678cc3e41b4d84bad1a90cedc856 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 21:42:30 +0000 Subject: [PATCH 418/730] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/pip/_internal/req/req_uninstall.py | 4 +++- tests/unit/test_req_uninstall.py | 30 +++++++++++++++++++++----- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 9320a21d272..02df22f06c4 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -539,7 +539,9 @@ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet": # above, so this only covers the setuptools-style editable. with open(develop_egg_link) as fh: link_pointer = os.path.normcase(fh.readline().strip()) - normalized_link_pointer = paths_to_remove._normalize_path_cached(link_pointer) + normalized_link_pointer = paths_to_remove._normalize_path_cached( + link_pointer + ) assert os.path.samefile( normalized_link_pointer, normalized_dist_location ), ( diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py index f990edc2bef..b4ae97350e0 100644 --- a/tests/unit/test_req_uninstall.py +++ b/tests/unit/test_req_uninstall.py @@ -129,7 +129,11 @@ def in_tmpdir(paths: List[str]) -> List[str]: class TestUninstallPathSet: def test_add(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) + monkeypatch.setattr( + pip._internal.req.req_uninstall.UninstallPathSet, + "_permitted", + mock_permitted, + ) # Fix case for windows tests file_extant = os.path.normcase(os.path.join(tmpdir, "foo")) file_nonexistent = os.path.normcase(os.path.join(tmpdir, "nonexistent")) @@ -145,7 +149,11 @@ def test_add(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: assert ups._paths == {file_extant} def test_add_pth(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) + monkeypatch.setattr( + pip._internal.req.req_uninstall.UninstallPathSet, + "_permitted", + mock_permitted, + ) # Fix case for windows tests tmpdir = os.path.normcase(tmp_path) on_windows = sys.platform == "win32" @@ -175,7 +183,11 @@ def test_add_pth(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: @pytest.mark.skipif("sys.platform == 'win32'") def test_add_symlink(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) + monkeypatch.setattr( + pip._internal.req.req_uninstall.UninstallPathSet, + "_permitted", + mock_permitted, + ) f = os.path.join(tmpdir, "foo") with open(f, "w"): pass @@ -187,7 +199,11 @@ def test_add_symlink(self, tmpdir: Path, monkeypatch: pytest.MonkeyPatch) -> Non assert ups._paths == {foo_link} def test_compact_shorter_path(self, monkeypatch: pytest.MonkeyPatch) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) + monkeypatch.setattr( + pip._internal.req.req_uninstall.UninstallPathSet, + "_permitted", + mock_permitted, + ) monkeypatch.setattr("os.path.exists", lambda p: True) # This deals with nt/posix path differences short_path = os.path.normcase( @@ -202,7 +218,11 @@ def test_compact_shorter_path(self, monkeypatch: pytest.MonkeyPatch) -> None: def test_detect_symlink_dirs( self, monkeypatch: pytest.MonkeyPatch, tmpdir: Path ) -> None: - monkeypatch.setattr(pip._internal.req.req_uninstall.UninstallPathSet, "_permitted", mock_permitted) + monkeypatch.setattr( + pip._internal.req.req_uninstall.UninstallPathSet, + "_permitted", + mock_permitted, + ) # construct 2 paths: # tmpdir/dir/file From 5294e34fa116784fb5b7f08a857eededc8594a7c Mon Sep 17 00:00:00 2001 From: Ryan Shepherd Date: Thu, 30 Mar 2023 14:43:37 -0700 Subject: [PATCH 419/730] Retry removing the argument from lru_cache --- src/pip/_internal/req/req_uninstall.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index 02df22f06c4..65c9fc49f42 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -316,7 +316,7 @@ def __init__(self, dist: BaseDistribution) -> None: # Create local cache of normalize_path results. Creating an UninstallPathSet # can result in hundreds/thousands of redundant calls to normalize_path with # the same args, which hurts performance. - self._normalize_path_cached = functools.lru_cache(maxsize=256)(normalize_path) + self._normalize_path_cached = functools.lru_cache()(normalize_path) def _permitted(self, path: str) -> bool: """ From 2f271838e7fb32930a7b21a35b9bbdc87a103f34 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 11 Apr 2023 09:34:00 +0200 Subject: [PATCH 420/730] Report requested_extras for direct URLs --- news/11946.bugfix.rst | 2 + .../resolution/resolvelib/candidates.py | 1 + tests/functional/test_install_report.py | 46 +++++++++++++++++++ 3 files changed, 49 insertions(+) create mode 100644 news/11946.bugfix.rst diff --git a/news/11946.bugfix.rst b/news/11946.bugfix.rst new file mode 100644 index 00000000000..74437dc828f --- /dev/null +++ b/news/11946.bugfix.rst @@ -0,0 +1,2 @@ +Correctly report ``requested_extras`` in the installation report when extras are +specified for a local directory installation. diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index fe83a61231f..b1ff2828895 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -71,6 +71,7 @@ def make_install_req_from_link( ) ireq.original_link = template.original_link ireq.link = link + ireq.extras = template.extras return ireq diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index 83f5b5c2ca5..e7fec89856e 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -1,4 +1,5 @@ import json +import textwrap from pathlib import Path from typing import Any, Dict @@ -178,6 +179,51 @@ def test_install_report_vcs_editable( assert pip_test_package_report["download_info"]["dir_info"]["editable"] is True +@pytest.mark.network +def test_install_report_local_path_with_extras( + script: PipTestEnvironment, tmp_path: Path, shared_data: TestData +) -> None: + """Test report remote editable.""" + project_path = tmp_path / "pkga" + project_path.mkdir() + project_path.joinpath("pyproject.toml").write_text( + textwrap.dedent( + """\ + [project] + name = "pkga" + version = "1.0" + + [project.optional-dependencies] + test = ["simple"] + """ + ) + ) + report_path = tmp_path / "report.json" + script.pip( + "install", + "--dry-run", + "--no-build-isolation", + "--no-index", + "--find-links", + str(shared_data.root / "packages/"), + "--report", + str(report_path), + str(project_path) + "[test]", + ) + report = json.loads(report_path.read_text()) + assert len(report["install"]) == 2 + pkga_report = report["install"][0] + assert pkga_report["metadata"]["name"] == "pkga" + assert pkga_report["is_direct"] is True + assert pkga_report["requested"] is True + assert pkga_report["requested_extras"] == ["test"] + simple_report = report["install"][1] + assert simple_report["metadata"]["name"] == "simple" + assert simple_report["is_direct"] is False + assert simple_report["requested"] is False + assert "requested_extras" not in simple_report + + def test_install_report_to_stdout( script: PipTestEnvironment, shared_data: TestData ) -> None: From cdd9c95eacd58237f8dd800aa40756f9cf8e9b81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 11 Apr 2023 14:48:26 +0200 Subject: [PATCH 421/730] Add test for download_info hashes --- tests/functional/test_install_report.py | 33 +++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index 83f5b5c2ca5..4560fc9e7df 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -92,6 +92,39 @@ def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> Non assert "requires_dist" in paste_report["metadata"] +@pytest.mark.network +def test_install_report_direct_archive( + script: PipTestEnvironment, tmp_path: Path, shared_data: TestData +) -> None: + """Test report for direct URL archive.""" + report_path = tmp_path / "report.json" + script.pip( + "install", + str(shared_data.root / "packages" / "simplewheel-1.0-py2.py3-none-any.whl"), + "--dry-run", + "--no-index", + "--report", + str(report_path), + ) + report = json.loads(report_path.read_text()) + assert "install" in report + assert len(report["install"]) == 1 + simplewheel_report = _install_dict(report)["simplewheel"] + assert simplewheel_report["metadata"]["name"] == "simplewheel" + assert simplewheel_report["requested"] is True + assert simplewheel_report["is_direct"] is True + url = simplewheel_report["download_info"]["url"] + assert url.startswith("file://") + assert url.endswith("/packages/simplewheel-1.0-py2.py3-none-any.whl") + assert ( + simplewheel_report["download_info"]["archive_info"]["hash"] + == "sha256=e63aa139caee941ec7f33f057a5b987708c2128238357cf905429846a2008718" + ) + assert simplewheel_report["download_info"]["archive_info"]["hashes"] == { + "sha256": "e63aa139caee941ec7f33f057a5b987708c2128238357cf905429846a2008718" + } + + @pytest.mark.network def test_install_report_vcs_and_wheel_cache( script: PipTestEnvironment, tmp_path: Path From 5119d4651e7881401315be7d262a19b762d636c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 11 Apr 2023 14:41:36 +0200 Subject: [PATCH 422/730] Improve conversion of direct_url.hash to hashes --- src/pip/_internal/models/direct_url.py | 30 ++++++++++++++++---------- tests/unit/test_direct_url.py | 30 ++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py index c3de70a749c..709ea69d42a 100644 --- a/src/pip/_internal/models/direct_url.py +++ b/src/pip/_internal/models/direct_url.py @@ -105,22 +105,30 @@ def __init__( hash: Optional[str] = None, hashes: Optional[Dict[str, str]] = None, ) -> None: - if hash is not None: + self.hashes = hashes + self.hash = hash + + @property + def hash(self) -> Optional[str]: + return self._hash + + @hash.setter + def hash(self, value: Optional[str]) -> None: + if value is not None: # Auto-populate the hashes key to upgrade to the new format automatically. - # We don't back-populate the legacy hash key. + # We don't back-populate the legacy hash key from hashes. try: - hash_name, hash_value = hash.split("=", 1) + hash_name, hash_value = value.split("=", 1) except ValueError: raise DirectUrlValidationError( - f"invalid archive_info.hash format: {hash!r}" + f"invalid archive_info.hash format: {value!r}" ) - if hashes is None: - hashes = {hash_name: hash_value} - elif hash_name not in hash: - hashes = hashes.copy() - hashes[hash_name] = hash_value - self.hash = hash - self.hashes = hashes + if self.hashes is None: + self.hashes = {hash_name: hash_value} + elif hash_name not in self.hashes: + self.hashes = self.hashes.copy() + self.hashes[hash_name] = hash_value + self._hash = value @classmethod def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]: diff --git a/tests/unit/test_direct_url.py b/tests/unit/test_direct_url.py index 3ca982b5017..151e0a30f5b 100644 --- a/tests/unit/test_direct_url.py +++ b/tests/unit/test_direct_url.py @@ -140,3 +140,33 @@ def _redact_archive(url: str) -> str: == "https://${PIP_TOKEN}@g.c/u/p.git" ) assert _redact_git("ssh://git@g.c/u/p.git") == "ssh://git@g.c/u/p.git" + + +def test_hash_to_hashes() -> None: + direct_url = DirectUrl(url="https://e.c/archive.tar.gz", info=ArchiveInfo()) + assert isinstance(direct_url.info, ArchiveInfo) + direct_url.info.hash = "sha256=abcdef" + assert direct_url.info.hashes == {"sha256": "abcdef"} + + +def test_hash_to_hashes_constructor() -> None: + direct_url = DirectUrl( + url="https://e.c/archive.tar.gz", info=ArchiveInfo(hash="sha256=abcdef") + ) + assert isinstance(direct_url.info, ArchiveInfo) + assert direct_url.info.hashes == {"sha256": "abcdef"} + direct_url = DirectUrl( + url="https://e.c/archive.tar.gz", + info=ArchiveInfo(hash="sha256=abcdef", hashes={"sha512": "123456"}), + ) + assert isinstance(direct_url.info, ArchiveInfo) + assert direct_url.info.hashes == {"sha256": "abcdef", "sha512": "123456"} + # In case of conflict between hash and hashes, hashes wins. + direct_url = DirectUrl( + url="https://e.c/archive.tar.gz", + info=ArchiveInfo( + hash="sha256=abcdef", hashes={"sha256": "012345", "sha512": "123456"} + ), + ) + assert isinstance(direct_url.info, ArchiveInfo) + assert direct_url.info.hashes == {"sha256": "012345", "sha512": "123456"} From 4f6d47d8bc7ad7ab4fbbbb374413cc1aef2f06d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 11 Apr 2023 14:41:54 +0200 Subject: [PATCH 423/730] Tweak comments --- src/pip/_internal/models/installation_report.py | 2 +- src/pip/_internal/operations/prepare.py | 5 ++++- src/pip/_internal/resolution/legacy/resolver.py | 2 +- src/pip/_internal/resolution/resolvelib/candidates.py | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index b54afb109b4..fef3757f222 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -14,7 +14,7 @@ def __init__(self, install_requirements: Sequence[InstallRequirement]): def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: assert ireq.download_info, f"No download_info for {ireq}" res = { - # PEP 610 json for the download URL. download_info.archive_info.hash may + # PEP 610 json for the download URL. download_info.archive_info.hashes may # be absent when the requirement was installed from the wheel cache # and the cache entry was populated by an older pip version that did not # record origin.json. diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 343a01bef4b..dda92d29b0b 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -571,12 +571,15 @@ def _prepare_linked_requirement( # Make sure we have a hash in download_info. If we got it as part of the # URL, it will have been verified and we can rely on it. Otherwise we # compute it from the downloaded file. + # FIXME: https://github.com/pypa/pip/issues/11943 if ( isinstance(req.download_info.info, ArchiveInfo) - and not req.download_info.info.hash + and not req.download_info.info.hashes and local_file ): hash = hash_file(local_file.path)[0].hexdigest() + # We populate info.hash for backward compatibility. + # This will automatically populate info.hashes. req.download_info.info.hash = f"sha256={hash}" # For use in later processing, diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py index fb49d41695f..3a561e6db33 100644 --- a/src/pip/_internal/resolution/legacy/resolver.py +++ b/src/pip/_internal/resolution/legacy/resolver.py @@ -436,7 +436,7 @@ def _populate_link(self, req: InstallRequirement) -> None: req.download_info = cache_entry.origin else: # Legacy cache entry that does not have origin.json. - # download_info may miss the archive_info.hash field. + # download_info may miss the archive_info.hashes field. req.download_info = direct_url_from_link( req.link, link_is_in_wheel_cache=cache_entry.persistent ) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index fe83a61231f..4d624994410 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -280,7 +280,7 @@ def __init__( ireq.download_info = cache_entry.origin else: # Legacy cache entry that does not have origin.json. - # download_info may miss the archive_info.hash field. + # download_info may miss the archive_info.hashes field. ireq.download_info = direct_url_from_link( source_link, link_is_in_wheel_cache=cache_entry.persistent ) From 41b0189eba14ee4b841bca39a78962a93de157c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 11 Apr 2023 14:57:00 +0200 Subject: [PATCH 424/730] Add news --- news/11948.bugfix.rst | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 news/11948.bugfix.rst diff --git a/news/11948.bugfix.rst b/news/11948.bugfix.rst new file mode 100644 index 00000000000..74af913814b --- /dev/null +++ b/news/11948.bugfix.rst @@ -0,0 +1,3 @@ +When installing an archive from a direct URL or local file, populate +``download_info.info.hashes`` in the installation report, in addition to the legacy +``download_info.info.hash`` key. From 5bfccfd62d6fab1600a655e1f9932335740bf593 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 11 Apr 2023 22:24:12 +0200 Subject: [PATCH 425/730] Report requested_extras for editable requirements --- .../resolution/resolvelib/candidates.py | 4 +- tests/functional/test_install_report.py | 46 +++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index b1ff2828895..39af0d5db9f 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -79,7 +79,7 @@ def make_install_req_from_editable( link: Link, template: InstallRequirement ) -> InstallRequirement: assert template.editable, "template not editable" - return install_req_from_editable( + ireq = install_req_from_editable( link.url, user_supplied=template.user_supplied, comes_from=template.comes_from, @@ -91,6 +91,8 @@ def make_install_req_from_editable( hash_options=template.hash_options, config_settings=template.config_settings, ) + ireq.extras = template.extras + return ireq def _make_install_req_from_dist( diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index e7fec89856e..b8df6936f9b 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -224,6 +224,52 @@ def test_install_report_local_path_with_extras( assert "requested_extras" not in simple_report +@pytest.mark.network +def test_install_report_editable_local_path_with_extras( + script: PipTestEnvironment, tmp_path: Path, shared_data: TestData +) -> None: + """Test report remote editable.""" + project_path = tmp_path / "pkga" + project_path.mkdir() + project_path.joinpath("pyproject.toml").write_text( + textwrap.dedent( + """\ + [project] + name = "pkga" + version = "1.0" + + [project.optional-dependencies] + test = ["simple"] + """ + ) + ) + report_path = tmp_path / "report.json" + script.pip( + "install", + "--dry-run", + "--no-build-isolation", + "--no-index", + "--find-links", + str(shared_data.root / "packages/"), + "--report", + str(report_path), + "--editable", + str(project_path) + "[test]", + ) + report = json.loads(report_path.read_text()) + assert len(report["install"]) == 2 + pkga_report = report["install"][0] + assert pkga_report["metadata"]["name"] == "pkga" + assert pkga_report["is_direct"] is True + assert pkga_report["requested"] is True + assert pkga_report["requested_extras"] == ["test"] + simple_report = report["install"][1] + assert simple_report["metadata"]["name"] == "simple" + assert simple_report["is_direct"] is False + assert simple_report["requested"] is False + assert "requested_extras" not in simple_report + + def test_install_report_to_stdout( script: PipTestEnvironment, shared_data: TestData ) -> None: From 57191793579e893c584485676c08173e35a8bcaf Mon Sep 17 00:00:00 2001 From: Honnix Date: Wed, 12 Apr 2023 22:55:18 +0200 Subject: [PATCH 426/730] Make it clear that constraints file can be a URL --- docs/html/user_guide.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 966b200f4f5..d92197a7671 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -242,6 +242,10 @@ organisation and use that everywhere. If the thing being installed requires "helloworld" to be installed, your fixed version specified in your constraints file will be used. +Constraints file can be served via a URL, e.g. +http://example.com/constraints.txt instead of only a local file, so that your +organization can provide constraints files online from a centraliazed place. + Constraints file support was added in pip 7.1. In :ref:`Resolver changes 2020` we did a fairly comprehensive overhaul, removing several undocumented and unsupported quirks from the previous implementation, From b62076fd5ff27ff1a3c436264379828fd2d59e30 Mon Sep 17 00:00:00 2001 From: Honnix Date: Wed, 12 Apr 2023 23:02:11 +0200 Subject: [PATCH 427/730] Create 11954.doc.rst --- news/11954.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11954.doc.rst diff --git a/news/11954.doc.rst b/news/11954.doc.rst new file mode 100644 index 00000000000..043dfaaa667 --- /dev/null +++ b/news/11954.doc.rst @@ -0,0 +1 @@ +Make it clear that constraints file can be a URL From beb97187abb8a4ed3debedcc9fc804618d85a4fd Mon Sep 17 00:00:00 2001 From: Honnix Date: Thu, 13 Apr 2023 09:02:27 +0200 Subject: [PATCH 428/730] Update user_guide.rst --- docs/html/user_guide.rst | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index d92197a7671..7175675baef 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -127,6 +127,10 @@ Logically, a Requirements file is just a list of :ref:`pip install` arguments placed in a file. Note that you should not rely on the items in the file being installed by pip in any particular order. +Requirements files can also be served via a URL, e.g. +http://example.com/requirements.txt besides as local files, so that they can +be stored and served in a centralized place. + In practice, there are 4 common uses of Requirements files: 1. Requirements files are used to hold the result from :ref:`pip freeze` for the @@ -242,16 +246,16 @@ organisation and use that everywhere. If the thing being installed requires "helloworld" to be installed, your fixed version specified in your constraints file will be used. -Constraints file can be served via a URL, e.g. -http://example.com/constraints.txt instead of only a local file, so that your -organization can provide constraints files online from a centraliazed place. - Constraints file support was added in pip 7.1. In :ref:`Resolver changes 2020` we did a fairly comprehensive overhaul, removing several undocumented and unsupported quirks from the previous implementation, and stripped constraints files down to being purely a way to specify global (version) limits for packages. +The same as requirements files, constraints files can be served via a URL, e.g. +http://example.com/constraints.txt, so that your organization can store and +serve them in a centralized place. + .. _`Installing from Wheels`: From f9e9a016d7893d2e1098c846abb536ee4bd71468 Mon Sep 17 00:00:00 2001 From: Honnix Date: Thu, 13 Apr 2023 09:05:10 +0200 Subject: [PATCH 429/730] Update user_guide.rst --- docs/html/user_guide.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 7175675baef..3bb9e88adcd 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -252,8 +252,8 @@ undocumented and unsupported quirks from the previous implementation, and stripped constraints files down to being purely a way to specify global (version) limits for packages. -The same as requirements files, constraints files can be served via a URL, e.g. -http://example.com/constraints.txt, so that your organization can store and +The same as requirements files, constraints files can also be served via a URL, +e.g. http://example.com/constraints.txt, so that your organization can store and serve them in a centralized place. .. _`Installing from Wheels`: From 40e56a750606e8221521f85e84560d029c90eb2b Mon Sep 17 00:00:00 2001 From: Honnix Date: Thu, 13 Apr 2023 09:14:50 +0200 Subject: [PATCH 430/730] Update 11954.doc.rst --- news/11954.doc.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/11954.doc.rst b/news/11954.doc.rst index 043dfaaa667..946b4057f8b 100644 --- a/news/11954.doc.rst +++ b/news/11954.doc.rst @@ -1 +1 @@ -Make it clear that constraints file can be a URL +Make it clear that requirements/constraints file can be a URL From d914ffa92700e938ebc5327683605606dfdfacf1 Mon Sep 17 00:00:00 2001 From: Honnix Date: Thu, 13 Apr 2023 09:15:42 +0200 Subject: [PATCH 431/730] Update user_guide.rst --- docs/html/user_guide.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/user_guide.rst b/docs/html/user_guide.rst index 3bb9e88adcd..9a6f2901cd5 100644 --- a/docs/html/user_guide.rst +++ b/docs/html/user_guide.rst @@ -252,7 +252,7 @@ undocumented and unsupported quirks from the previous implementation, and stripped constraints files down to being purely a way to specify global (version) limits for packages. -The same as requirements files, constraints files can also be served via a URL, +Same as requirements files, constraints files can also be served via a URL, e.g. http://example.com/constraints.txt, so that your organization can store and serve them in a centralized place. From 030d2d425b0919dc3ca81820e110aabbddb2ef77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Thu, 13 Apr 2023 09:33:59 +0200 Subject: [PATCH 432/730] Add comment --- src/pip/_internal/models/direct_url.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pip/_internal/models/direct_url.py b/src/pip/_internal/models/direct_url.py index 709ea69d42a..e219d73849b 100644 --- a/src/pip/_internal/models/direct_url.py +++ b/src/pip/_internal/models/direct_url.py @@ -105,6 +105,7 @@ def __init__( hash: Optional[str] = None, hashes: Optional[Dict[str, str]] = None, ) -> None: + # set hashes before hash, since the hash setter will further populate hashes self.hashes = hashes self.hash = hash From 8e2205d8495474df088d773b4658aa4a40aefcac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 25 Mar 2023 14:17:48 +0100 Subject: [PATCH 433/730] Add function to check hashes against known digests --- src/pip/_internal/utils/hashes.py | 7 +++++++ tests/unit/test_utils.py | 8 ++++++++ 2 files changed, 15 insertions(+) diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py index 76727306a4c..843cffc6b3d 100644 --- a/src/pip/_internal/utils/hashes.py +++ b/src/pip/_internal/utils/hashes.py @@ -105,6 +105,13 @@ def check_against_path(self, path: str) -> None: with open(path, "rb") as file: return self.check_against_file(file) + def has_one_of(self, hashes: Dict[str, str]) -> bool: + """Return whether any of the given hashes are allowed.""" + for hash_name, hex_digest in hashes.items(): + if self.is_hash_allowed(hash_name, hex_digest): + return True + return False + def __bool__(self) -> bool: """Return whether I know any known-good hashes.""" return bool(self._allowed) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index a67a7c1100c..450081cfd03 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -425,6 +425,14 @@ def test_hash(self) -> None: cache[Hashes({"sha256": ["ab", "cd"]})] = 42 assert cache[Hashes({"sha256": ["ab", "cd"]})] == 42 + def test_has_one_of(self) -> None: + hashes = Hashes({"sha256": ["abcd", "efgh"], "sha384": ["ijkl"]}) + assert hashes.has_one_of({"sha256": "abcd"}) + assert hashes.has_one_of({"sha256": "efgh"}) + assert not hashes.has_one_of({"sha256": "xyzt"}) + empty_hashes = Hashes() + assert not empty_hashes.has_one_of({"sha256": "xyzt"}) + class TestEncoding: """Tests for pip._internal.utils.encoding""" From 40cd79d6e54d53bccfcad0b855cd87116de896b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 25 Mar 2023 14:18:44 +0100 Subject: [PATCH 434/730] Check hashes of cached built wheels agains origin source archive --- news/5037.feature.rst | 1 + src/pip/_internal/operations/prepare.py | 34 ++++++++++++++++- .../resolution/resolvelib/factory.py | 2 +- tests/functional/test_install.py | 38 +++++++++++++++++++ 4 files changed, 73 insertions(+), 2 deletions(-) create mode 100644 news/5037.feature.rst diff --git a/news/5037.feature.rst b/news/5037.feature.rst new file mode 100644 index 00000000000..b0a25aaeec7 --- /dev/null +++ b/news/5037.feature.rst @@ -0,0 +1 @@ +Support wheel cache when using --require-hashes. diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index dda92d29b0b..6fa6cf5b462 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -536,9 +536,41 @@ def _prepare_linked_requirement( assert req.link link = req.link - self._ensure_link_req_src_dir(req, parallel_builds) hashes = self._get_linked_req_hashes(req) + if ( + hashes + and link.is_wheel + and link.is_file + and req.original_link_is_in_wheel_cache + ): + assert req.download_info is not None + # We need to verify hashes, and we have found the requirement in the cache + # of locally built wheels. + if ( + isinstance(req.download_info.info, ArchiveInfo) + and req.download_info.info.hashes + and hashes.has_one_of(req.download_info.info.hashes) + ): + # At this point we know the requirement was built from a hashable source + # artifact, and we verified that the cache entry's hash of the original + # artifact matches one of the hashes we expect. We don't verify hashes + # against the cached wheel, because the wheel is not the original. + hashes = None + else: + logger.warning( + "The hashes of the source archive found in cache entry " + "don't match, ignoring cached built wheel " + "and re-downloading source." + ) + # For some reason req.original_link is not set here, even though + # req.original_link_is_in_wheel_cache is True. So we get the original + # link from download_info. + req.link = Link(req.download_info.url) # TODO comes_from? + link = req.link + + self._ensure_link_req_src_dir(req, parallel_builds) + if link.is_existing_dir(): local_file = None elif link.url not in self._downloaded: diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 0ad4641b1b1..0331297b85b 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -535,7 +535,7 @@ def get_wheel_cache_entry( hash mismatches. Furthermore, cached wheels at present have nondeterministic contents due to file modification times. """ - if self._wheel_cache is None or self.preparer.require_hashes: + if self._wheel_cache is None: return None return self._wheel_cache.get_cache_entry( link=link, diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index e50779688f1..bc974d1a8e4 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -729,6 +729,44 @@ def test_bad_link_hash_in_dep_install_failure( assert "THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr, result.stderr +def test_hashed_install_from_cache( + script: PipTestEnvironment, data: TestData, tmpdir: Path +) -> None: + """ + Test that installing from a cached built wheel works and that the hash is verified + against the hash of the original source archived stored in the cache entry. + """ + with requirements_file( + "simple2==1.0 --hash=sha256:" + "9336af72ca661e6336eb87bc7de3e8844d853e3848c2b9bbd2e8bf01db88c2c7\n", + tmpdir, + ) as reqs_file: + result = script.pip_install_local( + "--use-pep517", "--no-build-isolation", "-r", reqs_file.resolve() + ) + assert "Created wheel for simple2" in result.stdout + script.pip("uninstall", "simple2", "-y") + result = script.pip_install_local( + "--use-pep517", "--no-build-isolation", "-r", reqs_file.resolve() + ) + assert "Using cached simple2" in result.stdout + # now try with an invalid hash + with requirements_file( + "simple2==1.0 --hash=sha256:invalid\n", + tmpdir, + ) as reqs_file: + script.pip("uninstall", "simple2", "-y") + result = script.pip_install_local( + "--use-pep517", + "--no-build-isolation", + "-r", + reqs_file.resolve(), + expect_error=True, + ) + assert "Using cached simple2" in result.stdout + assert "ERROR: THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr + + def assert_re_match(pattern: str, text: str) -> None: assert re.search(pattern, text), f"Could not find {pattern!r} in {text!r}" From 0e2a0dbe4e8ef51fa4e475cbecc9cf0f29d47570 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 25 Mar 2023 18:34:19 +0100 Subject: [PATCH 435/730] Improve pip wheel wrt hash checking of cached built wheels --- src/pip/_internal/operations/prepare.py | 27 +++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 6fa6cf5b462..44a90b515d7 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -179,7 +179,10 @@ def unpack_url( def _check_download_dir( - link: Link, download_dir: str, hashes: Optional[Hashes] + link: Link, + download_dir: str, + hashes: Optional[Hashes], + warn_on_hash_mismatch: bool = True, ) -> Optional[str]: """Check download_dir for previously downloaded file with correct hash If a correct file is found return its path else None @@ -195,10 +198,11 @@ def _check_download_dir( try: hashes.check_against_path(download_path) except HashMismatch: - logger.warning( - "Previously-downloaded file %s has bad hash. Re-downloading.", - download_path, - ) + if warn_on_hash_mismatch: + logger.warning( + "Previously-downloaded file %s has bad hash. Re-downloading.", + download_path, + ) os.unlink(download_path) return None return download_path @@ -485,7 +489,18 @@ def prepare_linked_requirement( file_path = None if self.download_dir is not None and req.link.is_wheel: hashes = self._get_linked_req_hashes(req) - file_path = _check_download_dir(req.link, self.download_dir, hashes) + file_path = _check_download_dir( + req.link, + self.download_dir, + hashes, + # When a locally built wheel has been found in cache, we don't warn + # about re-downloading when the already downloaded wheel hash does + # not match. This is because the hash must be checked against the + # original link, not the cached link. It that case the already + # downloaded file will be removed and re-fetched from cache (which + # implies a hash check against the cache entry's origin.json). + warn_on_hash_mismatch=not req.original_link_is_in_wheel_cache, + ) if file_path is not None: # The file is already available, so mark it as downloaded From a1af13cd86d57190a0cd0bfd71d61f3002b87cca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 2 Apr 2023 11:26:28 +0200 Subject: [PATCH 436/730] Tweak a condition --- src/pip/_internal/operations/prepare.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 44a90b515d7..a9ad1558d70 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -553,13 +553,10 @@ def _prepare_linked_requirement( hashes = self._get_linked_req_hashes(req) - if ( - hashes - and link.is_wheel - and link.is_file - and req.original_link_is_in_wheel_cache - ): + if hashes and req.original_link_is_in_wheel_cache: assert req.download_info is not None + assert link.is_wheel + assert link.is_file # We need to verify hashes, and we have found the requirement in the cache # of locally built wheels. if ( From f86ba465e39a269167a480e426620c5d5cc8cdfc Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 10 Apr 2023 16:22:41 +0800 Subject: [PATCH 437/730] Fix rst syntax --- news/5037.feature.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/5037.feature.rst b/news/5037.feature.rst index b0a25aaeec7..fe4637b6cf2 100644 --- a/news/5037.feature.rst +++ b/news/5037.feature.rst @@ -1 +1 @@ -Support wheel cache when using --require-hashes. +Support wheel cache when using ``--require-hashes``. From ff8c8e38887880ad81ffd7cfc6a8373213c087b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 10 Apr 2023 14:25:40 +0200 Subject: [PATCH 438/730] Cosmetics --- src/pip/_internal/resolution/resolvelib/candidates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index e5e9d1fd75e..109fbdaf62c 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -259,7 +259,7 @@ def __init__( version: Optional[CandidateVersion] = None, ) -> None: source_link = link - cache_entry = factory.get_wheel_cache_entry(link, name) + cache_entry = factory.get_wheel_cache_entry(source_link, name) if cache_entry is not None: logger.debug("Using cached wheel link: %s", cache_entry.link) link = cache_entry.link From a6ef6485be9512f18121298b058797c578f65d45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 10 Apr 2023 14:32:55 +0200 Subject: [PATCH 439/730] Rename original_link_is_in_wheel_cache to is_wheel_from_cache This more accurately reflects that it is not necessarily related to original_link, original_link being the direct URL link, and the wheel cache can also be populated from sdists URL discovered by the finder. --- src/pip/_internal/operations/prepare.py | 10 +++++----- src/pip/_internal/req/req_install.py | 5 ++++- src/pip/_internal/resolution/legacy/resolver.py | 2 +- src/pip/_internal/resolution/resolvelib/candidates.py | 2 +- tests/unit/test_req.py | 4 ++-- 5 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index a9ad1558d70..10ed17c1924 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -267,7 +267,7 @@ def __init__( def _log_preparing_link(self, req: InstallRequirement) -> None: """Provide context for the requirement being prepared.""" - if req.link.is_file and not req.original_link_is_in_wheel_cache: + if req.link.is_file and not req.is_wheel_from_cache: message = "Processing %s" information = str(display_path(req.link.file_path)) else: @@ -288,7 +288,7 @@ def _log_preparing_link(self, req: InstallRequirement) -> None: self._previous_requirement_header = (message, information) logger.info(message, information) - if req.original_link_is_in_wheel_cache: + if req.is_wheel_from_cache: with indent_log(): logger.info("Using cached %s", req.link.filename) @@ -499,7 +499,7 @@ def prepare_linked_requirement( # original link, not the cached link. It that case the already # downloaded file will be removed and re-fetched from cache (which # implies a hash check against the cache entry's origin.json). - warn_on_hash_mismatch=not req.original_link_is_in_wheel_cache, + warn_on_hash_mismatch=not req.is_wheel_from_cache, ) if file_path is not None: @@ -553,7 +553,7 @@ def _prepare_linked_requirement( hashes = self._get_linked_req_hashes(req) - if hashes and req.original_link_is_in_wheel_cache: + if hashes and req.is_wheel_from_cache: assert req.download_info is not None assert link.is_wheel assert link.is_file @@ -576,7 +576,7 @@ def _prepare_linked_requirement( "and re-downloading source." ) # For some reason req.original_link is not set here, even though - # req.original_link_is_in_wheel_cache is True. So we get the original + # req.is_wheel_from_cache is True. So we get the original # link from download_info. req.link = Link(req.download_info.url) # TODO comes_from? link = req.link diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index e2353f0321a..a8f94d4e096 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -108,7 +108,10 @@ def __init__( # PEP 508 URL requirement link = Link(req.url) self.link = self.original_link = link - self.original_link_is_in_wheel_cache = False + + # When is_wheel_from_cache is True, it means that this InstallRequirement + # is a local wheel file in the cache of locally built wheels. + self.is_wheel_from_cache = False # Information about the location of the artifact that was downloaded . This # property is guaranteed to be set in resolver results. diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py index 3a561e6db33..86d135fca1b 100644 --- a/src/pip/_internal/resolution/legacy/resolver.py +++ b/src/pip/_internal/resolution/legacy/resolver.py @@ -431,7 +431,7 @@ def _populate_link(self, req: InstallRequirement) -> None: if cache_entry is not None: logger.debug("Using cached wheel link: %s", cache_entry.link) if req.link is req.original_link and cache_entry.persistent: - req.original_link_is_in_wheel_cache = True + req.is_wheel_from_cache = True if cache_entry.origin is not None: req.download_info = cache_entry.origin else: diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 109fbdaf62c..3429f01e1a7 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -278,7 +278,7 @@ def __init__( if cache_entry is not None: if cache_entry.persistent and template.link is template.original_link: - ireq.original_link_is_in_wheel_cache = True + ireq.is_wheel_from_cache = True if cache_entry.origin is not None: ireq.download_info = cache_entry.origin else: diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index a5286c13a23..eb486ba0f43 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -411,7 +411,7 @@ def test_download_info_archive_legacy_cache( reqset = resolver.resolve([ireq], True) assert len(reqset.all_requirements) == 1 req = reqset.all_requirements[0] - assert req.original_link_is_in_wheel_cache + assert req.is_wheel_from_cache assert req.download_info assert req.download_info.url == url assert isinstance(req.download_info.info, ArchiveInfo) @@ -437,7 +437,7 @@ def test_download_info_archive_cache_with_origin( reqset = resolver.resolve([ireq], True) assert len(reqset.all_requirements) == 1 req = reqset.all_requirements[0] - assert req.original_link_is_in_wheel_cache + assert req.is_wheel_from_cache assert req.download_info assert req.download_info.url == url assert isinstance(req.download_info.info, ArchiveInfo) From caafe6e87d4f2998a77b194297e1c204cf6e10c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 10 Apr 2023 14:37:40 +0200 Subject: [PATCH 440/730] Add a couple of asserts --- src/pip/_internal/resolution/resolvelib/candidates.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 3429f01e1a7..7c2aae3b8b4 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -277,6 +277,8 @@ def __init__( ) if cache_entry is not None: + assert ireq.link.is_wheel + assert ireq.link.is_file if cache_entry.persistent and template.link is template.original_link: ireq.is_wheel_from_cache = True if cache_entry.origin is not None: From bd746e3136e5e1be2374a079bac66071dd967a8c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 10 Apr 2023 14:46:12 +0200 Subject: [PATCH 441/730] Introduce ireq.cached_wheel_source_link --- src/pip/_internal/operations/prepare.py | 5 +---- src/pip/_internal/req/req_install.py | 3 +++ src/pip/_internal/resolution/legacy/resolver.py | 1 + src/pip/_internal/resolution/resolvelib/candidates.py | 1 + tests/unit/test_req.py | 2 ++ 5 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 10ed17c1924..2273315234d 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -575,10 +575,7 @@ def _prepare_linked_requirement( "don't match, ignoring cached built wheel " "and re-downloading source." ) - # For some reason req.original_link is not set here, even though - # req.is_wheel_from_cache is True. So we get the original - # link from download_info. - req.link = Link(req.download_info.url) # TODO comes_from? + req.link = req.cached_wheel_source_link link = req.link self._ensure_link_req_src_dir(req, parallel_builds) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index a8f94d4e096..50d89b1be37 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -112,6 +112,9 @@ def __init__( # When is_wheel_from_cache is True, it means that this InstallRequirement # is a local wheel file in the cache of locally built wheels. self.is_wheel_from_cache = False + # When is_wheel_from_cache is True, this is the source link corresponding + # to the cache entry, which was used to download and build the cached wheel. + self.cached_wheel_source_link: Optional[Link] = None # Information about the location of the artifact that was downloaded . This # property is guaranteed to be set in resolver results. diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py index 86d135fca1b..216b0b81c27 100644 --- a/src/pip/_internal/resolution/legacy/resolver.py +++ b/src/pip/_internal/resolution/legacy/resolver.py @@ -431,6 +431,7 @@ def _populate_link(self, req: InstallRequirement) -> None: if cache_entry is not None: logger.debug("Using cached wheel link: %s", cache_entry.link) if req.link is req.original_link and cache_entry.persistent: + req.cached_wheel_source_link = req.link req.is_wheel_from_cache = True if cache_entry.origin is not None: req.download_info = cache_entry.origin diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 7c2aae3b8b4..2c315b4600c 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -280,6 +280,7 @@ def __init__( assert ireq.link.is_wheel assert ireq.link.is_file if cache_entry.persistent and template.link is template.original_link: + ireq.cached_wheel_source_link = source_link ireq.is_wheel_from_cache = True if cache_entry.origin is not None: ireq.download_info = cache_entry.origin diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index eb486ba0f43..c9742812be4 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -412,6 +412,7 @@ def test_download_info_archive_legacy_cache( assert len(reqset.all_requirements) == 1 req = reqset.all_requirements[0] assert req.is_wheel_from_cache + assert req.cached_wheel_source_link assert req.download_info assert req.download_info.url == url assert isinstance(req.download_info.info, ArchiveInfo) @@ -438,6 +439,7 @@ def test_download_info_archive_cache_with_origin( assert len(reqset.all_requirements) == 1 req = reqset.all_requirements[0] assert req.is_wheel_from_cache + assert req.cached_wheel_source_link assert req.download_info assert req.download_info.url == url assert isinstance(req.download_info.info, ArchiveInfo) From 4beca6b4c9c510b19dbb6180e962425b89e8c839 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 10 Apr 2023 15:43:57 +0200 Subject: [PATCH 442/730] Improve test --- tests/functional/test_install.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index bc974d1a8e4..63712827479 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -763,6 +763,10 @@ def test_hashed_install_from_cache( reqs_file.resolve(), expect_error=True, ) + assert ( + "WARNING: The hashes of the source archive found in cache entry " + "don't match, ignoring cached built wheel and re-downloading source." + ) in result.stderr assert "Using cached simple2" in result.stdout assert "ERROR: THESE PACKAGES DO NOT MATCH THE HASHES" in result.stderr From efe2d27451d50b165df78093bf5885da713fbdf8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Fri, 14 Apr 2023 08:12:33 +0200 Subject: [PATCH 443/730] Further refactor is_wheel_from_cache --- src/pip/_internal/req/req_install.py | 14 +++++++++----- src/pip/_internal/resolution/legacy/resolver.py | 1 - .../_internal/resolution/resolvelib/candidates.py | 1 - 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 50d89b1be37..a0ea58bd194 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -109,11 +109,9 @@ def __init__( link = Link(req.url) self.link = self.original_link = link - # When is_wheel_from_cache is True, it means that this InstallRequirement - # is a local wheel file in the cache of locally built wheels. - self.is_wheel_from_cache = False - # When is_wheel_from_cache is True, this is the source link corresponding - # to the cache entry, which was used to download and build the cached wheel. + # When this InstallRequirement is a wheel obtained from the cache of locally + # built wheels, this is the source link corresponding to the cache entry, which + # was used to download and build the cached wheel. self.cached_wheel_source_link: Optional[Link] = None # Information about the location of the artifact that was downloaded . This @@ -443,6 +441,12 @@ def is_wheel(self) -> bool: return False return self.link.is_wheel + @property + def is_wheel_from_cache(self) -> bool: + # When True, it means that this InstallRequirement is a local wheel file in the + # cache of locally built wheels. + return self.cached_wheel_source_link is not None + # Things valid for sdists @property def unpacked_source_directory(self) -> str: diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py index 216b0b81c27..b17b7e4530b 100644 --- a/src/pip/_internal/resolution/legacy/resolver.py +++ b/src/pip/_internal/resolution/legacy/resolver.py @@ -432,7 +432,6 @@ def _populate_link(self, req: InstallRequirement) -> None: logger.debug("Using cached wheel link: %s", cache_entry.link) if req.link is req.original_link and cache_entry.persistent: req.cached_wheel_source_link = req.link - req.is_wheel_from_cache = True if cache_entry.origin is not None: req.download_info = cache_entry.origin else: diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 2c315b4600c..31020e27ad1 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -281,7 +281,6 @@ def __init__( assert ireq.link.is_file if cache_entry.persistent and template.link is template.original_link: ireq.cached_wheel_source_link = source_link - ireq.is_wheel_from_cache = True if cache_entry.origin is not None: ireq.download_info = cache_entry.origin else: From 868338f9f79b58eff34dafb168aed65480d080d5 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Apr 2023 10:44:38 +0100 Subject: [PATCH 444/730] Update AUTHORS.txt --- AUTHORS.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/AUTHORS.txt b/AUTHORS.txt index 0f0fb3caf98..e9d3c38916f 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -143,6 +143,7 @@ Clay McClure Cody Cody Soyland Colin Watson +Collin Anderson Connor Osborn Cooper Lees Cooper Ry Lees @@ -212,6 +213,7 @@ Dominic Davis-Foster Donald Stufft Dongweiming doron zarhi +Dos Moonen Douglas Thor DrFeathers Dustin Ingram @@ -274,6 +276,7 @@ gpiks Greg Roodt Greg Ward Guilherme Espada +Guillaume Seguin gutsytechster Guy Rozendorn Guy Tuval @@ -288,6 +291,7 @@ Henrich Hartzer Henry Schreiner Herbert Pfennig Holly Stotelmyer +Honnix Hsiaoming Yang Hugo Lopes Tavares Hugo van Kemenade @@ -358,6 +362,7 @@ Joseph Long Josh Bronson Josh Hansen Josh Schneier +Juan Luis Cano Rodríguez Juanjo Bazán Judah Rand Julian Berman @@ -399,6 +404,7 @@ Leon Sasson Lev Givon Lincoln de Sousa Lipis +lorddavidiii Loren Carvalho Lucas Cimon Ludovic Gasc @@ -432,6 +438,7 @@ Matt Maker Matt Robenolt matthew Matthew Einhorn +Matthew Feickert Matthew Gilliard Matthew Iversen Matthew Trumbell @@ -451,6 +458,7 @@ Michael Michael Aquilina Michael E. Karpeles Michael Klich +Michael Mintz Michael Williamson michaelpacer Michał Górny @@ -482,6 +490,7 @@ Nick Timkovich Nicolas Bock Nicole Harris Nikhil Benesch +Nikhil Ladha Nikita Chepanov Nikolay Korolev Nipunn Koorapati @@ -539,6 +548,7 @@ Philip Molloy Philippe Ombredanne Pi Delport Pierre-Yves Rofes +Pieter Degroote pip Prabakaran Kumaresshan Prabhjyotsing Surjit Singh Sodhi @@ -546,6 +556,7 @@ Prabhu Marappan Pradyun Gedam Prashant Sharma Pratik Mallya +pre-commit-ci[bot] Preet Thakkar Preston Holmes Przemek Wrzos @@ -576,6 +587,7 @@ robin elisha robinson Roey Berman Rohan Jain Roman Bogorodskiy +Roman Donchenko Romuald Brunet ronaudinho Ronny Pfannschmidt @@ -584,6 +596,7 @@ Ross Brattain Roy Wellington Ⅳ Ruairidh MacLeod Russell Keith-Magee +Ryan Shepherd Ryan Wooden ryneeverett Sachi King @@ -652,6 +665,7 @@ Tim Harder Tim Heap tim smith tinruufu +Tobias Hermann Tom Forbes Tom Freudenheim Tom V @@ -685,6 +699,7 @@ Vladimir Rutsky W. Trevor King Wil Tan Wilfred Hughes +William Edwards William ML Leslie William T Olson William Woodruff From 6424ac4600265490462015c2fc7f9a402dba9ed8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Apr 2023 10:44:40 +0100 Subject: [PATCH 445/730] Bump for release --- NEWS.rst | 91 +++++++++++++++++++ news/10476.feature.rst | 1 - news/10937.feature.rst | 1 - news/11169.feature.rst | 1 - news/11325.feature.rst | 1 - news/11358.removal.rst | 1 - news/11451.removal.rst | 2 - news/11453.removal.rst | 2 - news/11529.bugfix.rst | 1 - news/11681.feature.rst | 4 - news/11702.trivial.rst | 2 - news/11719.bugfix.rst | 1 - news/11774.bugfix.rst | 1 - news/11775.doc.rst | 2 - news/11786.feature.rst | 1 - news/11809.doc.rst | 1 - news/11837.bugfix.rst | 1 - news/11838.doc.rst | 1 - news/11842.doc.rst | 2 - news/11859.removal.rst | 2 - news/11882.bugfix.rst | 1 - news/11889.bugfix.rst | 4 - news/11908.feature.rst | 1 - news/11915.removal.rst | 3 - news/11935.feature.rst | 1 - news/11936.bugfix.rst | 1 - news/11938.bugfix.rst | 3 - news/11941.feature.rst | 4 - news/11946.bugfix.rst | 2 - news/11948.bugfix.rst | 3 - news/11954.doc.rst | 1 - news/5037.feature.rst | 1 - news/8368.removal.rst | 2 - news/8559.removal.rst | 2 - news/8719.feature.rst | 1 - news/9752.feature.rst | 1 - ...76-8620-449D-8E31-799CDBCF3FD6.trivial.rst | 0 news/msgpack.vendor.rst | 1 - news/pkg_resources.vendor.rst | 1 - news/platformdirs.vendor.rst | 1 - news/pygments.vendor.rst | 1 - news/resolvelib.vendor.rst | 1 - news/rich.vendor.rst | 1 - news/setuptools.vendor.rst | 1 - news/tenacity.vendor.rst | 1 - news/typing_extensions.vendor.rst | 1 - news/urllib3.vendor.rst | 1 - src/pip/__init__.py | 2 +- 48 files changed, 92 insertions(+), 70 deletions(-) delete mode 100644 news/10476.feature.rst delete mode 100644 news/10937.feature.rst delete mode 100644 news/11169.feature.rst delete mode 100644 news/11325.feature.rst delete mode 100644 news/11358.removal.rst delete mode 100644 news/11451.removal.rst delete mode 100644 news/11453.removal.rst delete mode 100644 news/11529.bugfix.rst delete mode 100644 news/11681.feature.rst delete mode 100644 news/11702.trivial.rst delete mode 100644 news/11719.bugfix.rst delete mode 100644 news/11774.bugfix.rst delete mode 100644 news/11775.doc.rst delete mode 100644 news/11786.feature.rst delete mode 100644 news/11809.doc.rst delete mode 100644 news/11837.bugfix.rst delete mode 100644 news/11838.doc.rst delete mode 100644 news/11842.doc.rst delete mode 100644 news/11859.removal.rst delete mode 100644 news/11882.bugfix.rst delete mode 100644 news/11889.bugfix.rst delete mode 100644 news/11908.feature.rst delete mode 100644 news/11915.removal.rst delete mode 100644 news/11935.feature.rst delete mode 100644 news/11936.bugfix.rst delete mode 100644 news/11938.bugfix.rst delete mode 100644 news/11941.feature.rst delete mode 100644 news/11946.bugfix.rst delete mode 100644 news/11948.bugfix.rst delete mode 100644 news/11954.doc.rst delete mode 100644 news/5037.feature.rst delete mode 100644 news/8368.removal.rst delete mode 100644 news/8559.removal.rst delete mode 100644 news/8719.feature.rst delete mode 100644 news/9752.feature.rst delete mode 100644 news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst delete mode 100644 news/msgpack.vendor.rst delete mode 100644 news/pkg_resources.vendor.rst delete mode 100644 news/platformdirs.vendor.rst delete mode 100644 news/pygments.vendor.rst delete mode 100644 news/resolvelib.vendor.rst delete mode 100644 news/rich.vendor.rst delete mode 100644 news/setuptools.vendor.rst delete mode 100644 news/tenacity.vendor.rst delete mode 100644 news/typing_extensions.vendor.rst delete mode 100644 news/urllib3.vendor.rst diff --git a/NEWS.rst b/NEWS.rst index 5169dad1d43..4ac7fed4a6c 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,97 @@ .. towncrier release notes start +23.1 (2023-04-15) +================= + +Deprecations and Removals +------------------------- + +- Remove support for the deprecated ``--install-options``. (`#11358 `_) +- ``--no-binary`` does not imply ``setup.py install`` anymore. Instead a wheel will be + built locally and installed. (`#11451 `_) +- ``--no-binary`` does not disable the cache of locally built wheels anymore. It only + means "don't download wheels". (`#11453 `_) +- Deprecate ``--build-option`` and ``--global-option``. Users are invited to switch to + ``--config-settings``. (`#11859 `_) +- Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now print + a deprecation warning. In the future the presence of config settings will automatically + enable the default build backend for legacy projects and pass the setttings to it. (`#11915 `_) +- Remove ``setup.py install`` fallback when building a wheel failed for projects without + ``pyproject.toml``. (`#8368 `_) +- When the ``wheel`` package is not installed, pip now uses the default build backend + instead of ``setup.py install`` for project without ``pyproject.toml``. (`#8559 `_) + +Features +-------- + +- Specify egg-link location in assertion message when it does not match installed location to provide better error message for debugging. (`#10476 `_) +- Present conflict information during installation after each choice that is rejected (pass ``-vv`` to ``pip install`` to show it) (`#10937 `_) +- Display dependency chain on each Collecting/Processing log line. (`#11169 `_) +- Support a per-requirement ``--config-settings`` option in requirements files. (`#11325 `_) +- The ``--config-settings``/``-C`` option now supports using the same key multiple + times. When the same key is specified multiple times, all values are passed to + the build backend as a list, as opposed to the previous behavior, where pip would + only pass the last value if the same key was used multiple times. (`#11681 `_) +- Add ``-C`` as a short version of the ``--config-settings`` option. (`#11786 `_) +- Reduce the number of resolver rounds, since backjumping makes the resolver more efficient in finding solutions. This also makes pathological cases fail quicker. (`#11908 `_) +- Warn if ``--hash`` is used on a line without requirement in a requirements file. (`#11935 `_) +- Stop propagating CLI ``--config-settings`` to the build dependencies. They already did + not propagate to requirements provided in requirement files. To pass the same config + settings to several requirements, users should provide the requirements as CLI + arguments. (`#11941 `_) +- Support wheel cache when using ``--require-hashes``. (`#5037 `_) +- Add ``--keyring-provider`` flag. See the Authentication page in the documentation for more info. (`#8719 `_) +- In the case of virtual environments, configuration files are now also included from the base installation. (`#9752 `_) + +Bug Fixes +--------- + +- Fix grammar by changing "A new release of pip available:" to "A new release of pip is available:" in the notice used for indicating that. (`#11529 `_) +- Normalize paths before checking if installed scripts are on PATH. (`#11719 `_) +- Correct the way to decide if keyring is available. (`#11774 `_) +- More consistent resolution backtracking by removing legacy hack related to setuptools resolution (`#11837 `_) +- Include ``AUTHORS.txt`` in pip's wheels. (`#11882 `_) +- The ``uninstall`` and ``install --force-reinstall`` commands no longer call + ``normalize_path()`` repeatedly on the same paths. Instead, these results are + cached for the duration of an uninstall operation, resulting in improved + performance, particularly on Windows. (`#11889 `_) +- Fix and improve the parsing of hashes embedded in URL fragments. (`#11936 `_) +- When package A depends on package B provided as a direct URL dependency including a hash + embedded in the link, the ``--require-hashes`` option did not warn when user supplied hashes + were missing for package B. (`#11938 `_) +- Correctly report ``requested_extras`` in the installation report when extras are + specified for a local directory installation. (`#11946 `_) +- When installing an archive from a direct URL or local file, populate + ``download_info.info.hashes`` in the installation report, in addition to the legacy + ``download_info.info.hash`` key. (`#11948 `_) + +Vendored Libraries +------------------ + +- Upgrade msgpack to 1.0.5 +- Patch pkg_resources to remove dependency on ``jaraco.text``. +- Upgrade platformdirs to 3.2.0 +- Upgrade pygments to 2.14.0 +- Upgrade resolvelib to 1.0.1 +- Upgrade rich to 13.3.3 +- Upgrade setuptools to 67.6.1 +- Upgrade tenacity to 8.2.2 +- Upgrade typing_extensions to 4.5.0 +- Upgrade urllib3 to 1.26.15 + +Improved Documentation +---------------------- + +- Cross-reference the ``--python`` flag from the ``--prefix`` flag, + and mention limitations of ``--prefix`` regarding script installation. (`#11775 `_) +- Add SECURITY.md to make the policy offical. (`#11809 `_) +- Add username to Git over SSH example. (`#11838 `_) +- Quote extras in the pip install docs to guard shells with default glob + qualifiers, like zsh. (`#11842 `_) +- Make it clear that requirements/constraints file can be a URL (`#11954 `_) + + 23.0.1 (2023-02-17) =================== diff --git a/news/10476.feature.rst b/news/10476.feature.rst deleted file mode 100644 index 7c2757771a7..00000000000 --- a/news/10476.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Specify egg-link location in assertion message when it does not match installed location to provide better error message for debugging. diff --git a/news/10937.feature.rst b/news/10937.feature.rst deleted file mode 100644 index 2974c577a10..00000000000 --- a/news/10937.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Present conflict information during installation after each choice that is rejected (pass ``-vv`` to ``pip install`` to show it) diff --git a/news/11169.feature.rst b/news/11169.feature.rst deleted file mode 100644 index 54cc6637bc6..00000000000 --- a/news/11169.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Display dependency chain on each Collecting/Processing log line. diff --git a/news/11325.feature.rst b/news/11325.feature.rst deleted file mode 100644 index 282310816b6..00000000000 --- a/news/11325.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Support a per-requirement ``--config-settings`` option in requirements files. diff --git a/news/11358.removal.rst b/news/11358.removal.rst deleted file mode 100644 index 23e388a9a39..00000000000 --- a/news/11358.removal.rst +++ /dev/null @@ -1 +0,0 @@ -Remove support for the deprecated ``--install-options``. diff --git a/news/11451.removal.rst b/news/11451.removal.rst deleted file mode 100644 index c0d1100ed92..00000000000 --- a/news/11451.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -``--no-binary`` does not imply ``setup.py install`` anymore. Instead a wheel will be -built locally and installed. diff --git a/news/11453.removal.rst b/news/11453.removal.rst deleted file mode 100644 index 91ebfda0438..00000000000 --- a/news/11453.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -``--no-binary`` does not disable the cache of locally built wheels anymore. It only -means "don't download wheels". diff --git a/news/11529.bugfix.rst b/news/11529.bugfix.rst deleted file mode 100644 index d05e404602e..00000000000 --- a/news/11529.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix grammar by changing "A new release of pip available:" to "A new release of pip is available:" in the notice used for indicating that. diff --git a/news/11681.feature.rst b/news/11681.feature.rst deleted file mode 100644 index 00cd05ee18d..00000000000 --- a/news/11681.feature.rst +++ /dev/null @@ -1,4 +0,0 @@ -The ``--config-settings``/``-C`` option now supports using the same key multiple -times. When the same key is specified multiple times, all values are passed to -the build backend as a list, as opposed to the previous behavior, where pip would -only pass the last value if the same key was used multiple times. diff --git a/news/11702.trivial.rst b/news/11702.trivial.rst deleted file mode 100644 index d27e33d78ce..00000000000 --- a/news/11702.trivial.rst +++ /dev/null @@ -1,2 +0,0 @@ -Strip command line prompts like "$" and "C:>" from the actual command -being copied using the copybutton. diff --git a/news/11719.bugfix.rst b/news/11719.bugfix.rst deleted file mode 100644 index c2ae8bc1d5e..00000000000 --- a/news/11719.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Normalize paths before checking if installed scripts are on PATH. diff --git a/news/11774.bugfix.rst b/news/11774.bugfix.rst deleted file mode 100644 index 771246b0b54..00000000000 --- a/news/11774.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Correct the way to decide if keyring is available. diff --git a/news/11775.doc.rst b/news/11775.doc.rst deleted file mode 100644 index 18274b7692a..00000000000 --- a/news/11775.doc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Cross-reference the ``--python`` flag from the ``--prefix`` flag, -and mention limitations of ``--prefix`` regarding script installation. diff --git a/news/11786.feature.rst b/news/11786.feature.rst deleted file mode 100644 index 0da7f86373e..00000000000 --- a/news/11786.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``-C`` as a short version of the ``--config-settings`` option. diff --git a/news/11809.doc.rst b/news/11809.doc.rst deleted file mode 100644 index 68c49ea50d5..00000000000 --- a/news/11809.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Add SECURITY.md to make the policy offical. diff --git a/news/11837.bugfix.rst b/news/11837.bugfix.rst deleted file mode 100644 index 6d33ed6800c..00000000000 --- a/news/11837.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -More consistent resolution backtracking by removing legacy hack related to setuptools resolution diff --git a/news/11838.doc.rst b/news/11838.doc.rst deleted file mode 100644 index 9630aa59885..00000000000 --- a/news/11838.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Add username to Git over SSH example. diff --git a/news/11842.doc.rst b/news/11842.doc.rst deleted file mode 100644 index bd063996f54..00000000000 --- a/news/11842.doc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Quote extras in the pip install docs to guard shells with default glob -qualifiers, like zsh. diff --git a/news/11859.removal.rst b/news/11859.removal.rst deleted file mode 100644 index b29cedd7557..00000000000 --- a/news/11859.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate ``--build-option`` and ``--global-option``. Users are invited to switch to -``--config-settings``. diff --git a/news/11882.bugfix.rst b/news/11882.bugfix.rst deleted file mode 100644 index 5373487b188..00000000000 --- a/news/11882.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Include ``AUTHORS.txt`` in pip's wheels. diff --git a/news/11889.bugfix.rst b/news/11889.bugfix.rst deleted file mode 100644 index e760fd1fbf4..00000000000 --- a/news/11889.bugfix.rst +++ /dev/null @@ -1,4 +0,0 @@ -The ``uninstall`` and ``install --force-reinstall`` commands no longer call -``normalize_path()`` repeatedly on the same paths. Instead, these results are -cached for the duration of an uninstall operation, resulting in improved -performance, particularly on Windows. diff --git a/news/11908.feature.rst b/news/11908.feature.rst deleted file mode 100644 index 2b9ec18d98f..00000000000 --- a/news/11908.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Reduce the number of resolver rounds, since backjumping makes the resolver more efficient in finding solutions. This also makes pathological cases fail quicker. diff --git a/news/11915.removal.rst b/news/11915.removal.rst deleted file mode 100644 index e54b5d574c0..00000000000 --- a/news/11915.removal.rst +++ /dev/null @@ -1,3 +0,0 @@ -Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now print -a deprecation warning. In the future the presence of config settings will automatically -enable the default build backend for legacy projects and pass the setttings to it. diff --git a/news/11935.feature.rst b/news/11935.feature.rst deleted file mode 100644 index b170ca1d8cd..00000000000 --- a/news/11935.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Warn if ``--hash`` is used on a line without requirement in a requirements file. diff --git a/news/11936.bugfix.rst b/news/11936.bugfix.rst deleted file mode 100644 index 4ae3ad69a31..00000000000 --- a/news/11936.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix and improve the parsing of hashes embedded in URL fragments. diff --git a/news/11938.bugfix.rst b/news/11938.bugfix.rst deleted file mode 100644 index b299f8e4ff5..00000000000 --- a/news/11938.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -When package A depends on package B provided as a direct URL dependency including a hash -embedded in the link, the ``--require-hashes`` option did not warn when user supplied hashes -were missing for package B. diff --git a/news/11941.feature.rst b/news/11941.feature.rst deleted file mode 100644 index 404f2cb2de6..00000000000 --- a/news/11941.feature.rst +++ /dev/null @@ -1,4 +0,0 @@ -Stop propagating CLI ``--config-settings`` to the build dependencies. They already did -not propagate to requirements provided in requirement files. To pass the same config -settings to several requirements, users should provide the requirements as CLI -arguments. diff --git a/news/11946.bugfix.rst b/news/11946.bugfix.rst deleted file mode 100644 index 74437dc828f..00000000000 --- a/news/11946.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Correctly report ``requested_extras`` in the installation report when extras are -specified for a local directory installation. diff --git a/news/11948.bugfix.rst b/news/11948.bugfix.rst deleted file mode 100644 index 74af913814b..00000000000 --- a/news/11948.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -When installing an archive from a direct URL or local file, populate -``download_info.info.hashes`` in the installation report, in addition to the legacy -``download_info.info.hash`` key. diff --git a/news/11954.doc.rst b/news/11954.doc.rst deleted file mode 100644 index 946b4057f8b..00000000000 --- a/news/11954.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Make it clear that requirements/constraints file can be a URL diff --git a/news/5037.feature.rst b/news/5037.feature.rst deleted file mode 100644 index fe4637b6cf2..00000000000 --- a/news/5037.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Support wheel cache when using ``--require-hashes``. diff --git a/news/8368.removal.rst b/news/8368.removal.rst deleted file mode 100644 index 44ee33aa78c..00000000000 --- a/news/8368.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -Remove ``setup.py install`` fallback when building a wheel failed for projects without -``pyproject.toml``. diff --git a/news/8559.removal.rst b/news/8559.removal.rst deleted file mode 100644 index a0953dade6b..00000000000 --- a/news/8559.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -When the ``wheel`` package is not installed, pip now uses the default build backend -instead of ``setup.py install`` for project without ``pyproject.toml``. diff --git a/news/8719.feature.rst b/news/8719.feature.rst deleted file mode 100644 index 3f3caf2db89..00000000000 --- a/news/8719.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``--keyring-provider`` flag. See the Authentication page in the documentation for more info. diff --git a/news/9752.feature.rst b/news/9752.feature.rst deleted file mode 100644 index d515267be21..00000000000 --- a/news/9752.feature.rst +++ /dev/null @@ -1 +0,0 @@ -In the case of virtual environments, configuration files are now also included from the base installation. diff --git a/news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst b/news/CD497476-8620-449D-8E31-799CDBCF3FD6.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/msgpack.vendor.rst b/news/msgpack.vendor.rst deleted file mode 100644 index 9193b7ce52b..00000000000 --- a/news/msgpack.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade msgpack to 1.0.5 diff --git a/news/pkg_resources.vendor.rst b/news/pkg_resources.vendor.rst deleted file mode 100644 index a20817dfb24..00000000000 --- a/news/pkg_resources.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Patch pkg_resources to remove dependency on ``jaraco.text``. diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst deleted file mode 100644 index 939253e14fc..00000000000 --- a/news/platformdirs.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade platformdirs to 3.2.0 diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst deleted file mode 100644 index a6c8edafc69..00000000000 --- a/news/pygments.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade pygments to 2.14.0 diff --git a/news/resolvelib.vendor.rst b/news/resolvelib.vendor.rst deleted file mode 100644 index ad55516edea..00000000000 --- a/news/resolvelib.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade resolvelib to 1.0.1 diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst deleted file mode 100644 index 0bedd3bb4e1..00000000000 --- a/news/rich.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade rich to 13.3.3 diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst deleted file mode 100644 index 9cf3f49e21c..00000000000 --- a/news/setuptools.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade setuptools to 67.6.1 diff --git a/news/tenacity.vendor.rst b/news/tenacity.vendor.rst deleted file mode 100644 index 493d38d0195..00000000000 --- a/news/tenacity.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade tenacity to 8.2.2 diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst deleted file mode 100644 index e71aeb66309..00000000000 --- a/news/typing_extensions.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade typing_extensions to 4.5.0 diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst deleted file mode 100644 index 09e82a8f2ff..00000000000 --- a/news/urllib3.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade urllib3 to 1.26.15 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index ce90d06bfd4..4aacc650d0f 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.1.dev0" +__version__ = "23.1" def main(args: Optional[List[str]] = None) -> int: From 2fd3e408dd286ef1f1016bb085b739bdff8625c8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Apr 2023 10:44:40 +0100 Subject: [PATCH 446/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 4aacc650d0f..20d8bf56c17 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.1" +__version__ = "23.2.dev0" def main(args: Optional[List[str]] = None) -> int: From 0621e5ad0d750f0fa7d06e5ec44a4bfd1e65e79c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 10 Apr 2023 15:14:43 +0200 Subject: [PATCH 447/730] Add ireq.is_direct property, for readability --- src/pip/_internal/exceptions.py | 2 +- src/pip/_internal/models/installation_report.py | 2 +- src/pip/_internal/operations/prepare.py | 2 +- src/pip/_internal/req/req_install.py | 11 +++++++++-- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index 7d92ba69983..d95fe44b34a 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -544,7 +544,7 @@ def body(self) -> str: # so the output can be directly copied into the requirements file. package = ( self.req.original_link - if self.req.original_link + if self.req.is_direct # In case someone feeds something downright stupid # to InstallRequirement's constructor. else getattr(self.req, "req", None) diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index fef3757f222..7f001f35ef2 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -22,7 +22,7 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: # is_direct is true if the requirement was a direct URL reference (which # includes editable requirements), and false if the requirement was # downloaded from a PEP 503 index or --find-links. - "is_direct": bool(ireq.original_link), + "is_direct": ireq.is_direct, # requested is true if the requirement was specified by the user (aka # top level requirement), and false if it was installed as a dependency of a # requirement. https://peps.python.org/pep-0376/#requested diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 2273315234d..dd265f255cf 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -352,7 +352,7 @@ def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: # a surprising hash mismatch in the future. # file:/// URLs aren't pinnable, so don't complain about them # not being pinned. - if req.original_link is None and not req.is_pinned: + if not req.is_direct and not req.is_pinned: raise HashUnpinned() # If known-good hashes are missing for this requirement, diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index d01b24a9189..1f479713a94 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -104,6 +104,8 @@ def __init__( if link.is_file: self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) + # original_link is the direct URL that was provided by the user for the + # requirement, either directly or via a constraints file. if link is None and req and req.url: # PEP 508 URL requirement link = Link(req.url) @@ -244,6 +246,11 @@ def supports_pyproject_editable(self) -> bool: def specifier(self) -> SpecifierSet: return self.req.specifier + @property + def is_direct(self) -> bool: + """Whether this requirement was specified as a direct URL.""" + return self.original_link is not None + @property def is_pinned(self) -> bool: """Return whether I am pinned to an exact version. @@ -293,7 +300,7 @@ def hashes(self, trust_internet: bool = True) -> Hashes: good_hashes = self.hash_options.copy() if trust_internet: link = self.link - elif self.original_link and self.user_supplied: + elif self.is_direct and self.user_supplied: link = self.original_link else: link = None @@ -804,7 +811,7 @@ def install( req_description=str(self.req), pycompile=pycompile, warn_script_location=warn_script_location, - direct_url=self.download_info if self.original_link else None, + direct_url=self.download_info if self.is_direct else None, requested=self.user_supplied, ) self.install_succeeded = True From b9c1097f14265c725327fb8a49a377837a2b9aea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 17 Apr 2023 16:42:12 +0200 Subject: [PATCH 448/730] Mention `setup.py develop` change in 23.1 changelog (#11973) --- NEWS.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/NEWS.rst b/NEWS.rst index 4ac7fed4a6c..da8c8726a1d 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -22,13 +22,14 @@ Deprecations and Removals means "don't download wheels". (`#11453 `_) - Deprecate ``--build-option`` and ``--global-option``. Users are invited to switch to ``--config-settings``. (`#11859 `_) -- Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now print +- Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now prints a deprecation warning. In the future the presence of config settings will automatically enable the default build backend for legacy projects and pass the setttings to it. (`#11915 `_) - Remove ``setup.py install`` fallback when building a wheel failed for projects without ``pyproject.toml``. (`#8368 `_) - When the ``wheel`` package is not installed, pip now uses the default build backend - instead of ``setup.py install`` for project without ``pyproject.toml``. (`#8559 `_) + instead of ``setup.py install`` and ``setup.py develop`` for project without + ``pyproject.toml``. (`#8559 `_) Features -------- From 3b8791f8411e4d8aedf5e759dbbd8bb82eff70fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 17 Apr 2023 18:36:42 +0200 Subject: [PATCH 449/730] Update docs about hash checking and wheel cache (#11967) --- docs/html/topics/secure-installs.md | 12 ++++++------ news/11967.doc.rst | 2 ++ 2 files changed, 8 insertions(+), 6 deletions(-) create mode 100644 news/11967.doc.rst diff --git a/docs/html/topics/secure-installs.md b/docs/html/topics/secure-installs.md index f012842b2ac..bda3c4485b0 100644 --- a/docs/html/topics/secure-installs.md +++ b/docs/html/topics/secure-installs.md @@ -59,13 +59,13 @@ It is possible to use multiple hashes for each package. This is important when a ### Interaction with caching -The {ref}`locally-built wheel cache ` is disabled in hash-checking mode to prevent spurious hash mismatch errors. - -These would otherwise occur while installing sdists that had already been automatically built into cached wheels: those wheels would be selected for installation, but their hashes would not match the sdist ones from the requirements file. - -A further complication is that locally built wheels are nondeterministic: contemporary modification times make their way into the archive, making hashes unpredictable across machines and cache flushes. Compilation of C code adds further nondeterminism, as many compilers include random-seeded values in their output. +```{versionchanged} 23.1 +The {ref}`locally-built wheel cache ` is used in hash-checking mode too. +``` -However, wheels fetched from index servers are required to be the same every time. They land in pip's HTTP cache, not its wheel cache, and are used normally in hash-checking mode. The only downside of having the wheel cache disabled is thus extra build time for sdists, and this can be solved by making sure pre-built wheels are available from the index server. +When installing from the cache of locally built wheels in hash-checking mode, pip verifies +the hashes against those of the original source distribution that was used to build the wheel. +These original hashes are obtained from a `origin.json` file stored in each cache entry. ### Using hashes from PyPI (or other index servers) diff --git a/news/11967.doc.rst b/news/11967.doc.rst new file mode 100644 index 00000000000..4d9a23b033f --- /dev/null +++ b/news/11967.doc.rst @@ -0,0 +1,2 @@ +Update documentation to reflect the new behavior of using the cache of locally +built wheels in hash-checking mode. From c8b49cac1efa967065c418f93222419d109b06e5 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 17 Apr 2023 17:46:47 +0100 Subject: [PATCH 450/730] Revert "Upgrade setuptools to 67.6.1" This reverts commit d7e02483264703d6210e8c28937dcf1d9f547796. --- news/setuptools.vendor.rst | 1 + src/pip/_vendor/pkg_resources/__init__.py | 599 +++++++++----------- src/pip/_vendor/vendor.txt | 2 +- tools/vendoring/patches/pkg_resources.patch | 22 + 4 files changed, 293 insertions(+), 331 deletions(-) create mode 100644 news/setuptools.vendor.rst diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst new file mode 100644 index 00000000000..2d6480a898b --- /dev/null +++ b/news/setuptools.vendor.rst @@ -0,0 +1 @@ +Revert pkg_resources (via setuptools) back to 65.6.3 diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index a85aca10f7c..0ec74f8a6ef 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -12,12 +12,6 @@ .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. - -This module is deprecated. Users are directed to -`importlib.resources `_ -and -`importlib.metadata `_ -instead. """ import sys @@ -40,6 +34,7 @@ import errno import tempfile import textwrap +import itertools import inspect import ntpath import posixpath @@ -59,10 +54,8 @@ # capture these to bypass sandboxing from os import utime - try: from os import mkdir, rename, unlink - WRITE_SUPPORT = True except ImportError: # no write support, probably under GAE @@ -73,7 +66,6 @@ try: import importlib.machinery as importlib_machinery - # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ except ImportError: @@ -87,7 +79,6 @@ from pip._vendor import platformdirs from pip._vendor import packaging - __import__('pip._vendor.packaging.version') __import__('pip._vendor.packaging.specifiers') __import__('pip._vendor.packaging.requirements') @@ -118,12 +109,6 @@ _namespace_packages = None -warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning) - - -_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) - - class PEP440Warning(RuntimeWarning): """ Used when there is an issue with a version or specifier not complying with @@ -131,7 +116,16 @@ class PEP440Warning(RuntimeWarning): """ -parse_version = packaging.version.Version +def parse_version(v): + try: + return packaging.version.Version(v) + except packaging.version.InvalidVersion: + warnings.warn( + f"{v} is an invalid version and will not be supported in " + "a future release", + PkgResourcesDeprecationWarning, + ) + return packaging.version.LegacyVersion(v) _state_vars = {} @@ -203,87 +197,51 @@ def get_supported_platform(): __all__ = [ # Basic resource access and distribution/entry point discovery - 'require', - 'run_script', - 'get_provider', - 'get_distribution', - 'load_entry_point', - 'get_entry_map', - 'get_entry_info', + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', - 'resource_string', - 'resource_stream', - 'resource_filename', - 'resource_listdir', - 'resource_exists', - 'resource_isdir', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + # Environmental control - 'declare_namespace', - 'working_set', - 'add_activation_listener', - 'find_distributions', - 'set_extraction_path', - 'cleanup_resources', + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', 'get_default_cache', + # Primary implementation classes - 'Environment', - 'WorkingSet', - 'ResourceManager', - 'Distribution', - 'Requirement', - 'EntryPoint', + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + # Exceptions - 'ResolutionError', - 'VersionConflict', - 'DistributionNotFound', - 'UnknownExtra', - 'ExtractionError', + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + # Warnings 'PEP440Warning', + # Parsing functions and string utilities - 'parse_requirements', - 'parse_version', - 'safe_name', - 'safe_version', - 'get_platform', - 'compatible_platforms', - 'yield_lines', - 'split_sections', - 'safe_extra', - 'to_filename', - 'invalid_marker', - 'evaluate_marker', + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', + # filesystem utilities - 'ensure_directory', - 'normalize_path', + 'ensure_directory', 'normalize_path', + # Distribution "precedence" constants - 'EGG_DIST', - 'BINARY_DIST', - 'SOURCE_DIST', - 'CHECKOUT_DIST', - 'DEVELOP_DIST', + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', - 'IResourceProvider', - 'FileMetadata', - 'PathMetadata', - 'EggMetadata', - 'EmptyProvider', - 'empty_provider', - 'NullProvider', - 'EggProvider', - 'DefaultProvider', - 'ZipProvider', - 'register_finder', - 'register_namespace_handler', - 'register_loader_type', - 'fixup_namespace_packages', - 'get_importer', + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + # Warnings 'PkgResourcesDeprecationWarning', + # Deprecated/backward compatibility only - 'run_main', - 'AvailableDistributions', + 'run_main', 'AvailableDistributions', ] @@ -342,10 +300,8 @@ def required_by(self): class DistributionNotFound(ResolutionError): """A requested distribution was not found""" - _template = ( - "The '{self.req}' distribution was not found " - "and is required by {self.requirers_str}" - ) + _template = ("The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}") @property def req(self): @@ -439,8 +395,7 @@ def get_build_platform(): version = _macos_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % ( - int(version[0]), - int(version[1]), + int(version[0]), int(version[1]), _macos_arch(machine), ) except ValueError: @@ -481,18 +436,15 @@ def compatible_platforms(provided, required): if provDarwin: dversion = int(provDarwin.group(1)) macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if ( - dversion == 7 - and macosversion >= "10.3" - or dversion == 8 - and macosversion >= "10.4" - ): + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": return True # egg isn't macOS or legacy darwin return False # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3): + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): return False # is the required OS major update >= the provided one? @@ -554,8 +506,8 @@ def get_metadata(name): def get_metadata_lines(name): """Yield named metadata resource as list of non-blank non-comment lines - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" @@ -768,14 +720,9 @@ def add(self, dist, entry=None, insert=True, replace=False): keys2.append(dist.key) self._added_new(dist) - def resolve( - self, - requirements, - env=None, - installer=None, - replace_conflicting=False, - extras=None, - ): + # FIXME: 'WorkingSet.resolve' is too complex (11) + def resolve(self, requirements, env=None, installer=None, # noqa: C901 + replace_conflicting=False, extras=None): """List all distributions needed to (recursively) meet `requirements` `requirements` must be a sequence of ``Requirement`` objects. `env`, @@ -824,9 +771,33 @@ def resolve( if not req_extras.markers_pass(req, extras): continue - dist = self._resolve_dist( - req, best, replace_conflicting, env, installer, required_by, to_activate - ) + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match( + req, ws, installer, + replace_conflicting=replace_conflicting + ) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) # push the new requirements onto the stack new_requirements = dist.requires(req.extras)[::-1] @@ -842,38 +813,8 @@ def resolve( # return list of distros to activate return to_activate - def _resolve_dist( - self, req, best, replace_conflicting, env, installer, required_by, to_activate - ): - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match( - req, ws, installer, replace_conflicting=replace_conflicting - ) - if dist is None: - requirers = required_by.get(req, None) - raise DistributionNotFound(req, requirers) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - dependent_req = required_by[req] - raise VersionConflict(dist, req).with_context(dependent_req) - return dist - - def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): + def find_plugins( + self, plugin_env, full_env=None, installer=None, fallback=True): """Find all activatable distributions in `plugin_env` Example usage:: @@ -926,7 +867,9 @@ def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True) list(map(shadow_set.add, self)) for project_name in plugin_projects: + for dist in plugin_env[project_name]: + req = [dist.as_requirement()] try: @@ -990,11 +933,8 @@ def _added_new(self, dist): def __getstate__(self): return ( - self.entries[:], - self.entry_keys.copy(), - self.by_key.copy(), - self.normalized_to_canonical_keys.copy(), - self.callbacks[:], + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.normalized_to_canonical_keys.copy(), self.callbacks[:] ) def __setstate__(self, e_k_b_n_c): @@ -1030,8 +970,8 @@ class Environment: """Searchable snapshot of distributions on a search path""" def __init__( - self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR - ): + self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. @@ -1098,14 +1038,16 @@ def __getitem__(self, project_name): return self._distmap.get(distribution_key, []) def add(self, dist): - """Add `dist` if we ``can_add()`` it and it has not already been added""" + """Add `dist` if we ``can_add()`` it and it has not already been added + """ if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) - def best_match(self, req, working_set, installer=None, replace_conflicting=False): + def best_match( + self, req, working_set, installer=None, replace_conflicting=False): """Find distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a @@ -1192,7 +1134,6 @@ class ExtractionError(RuntimeError): class ResourceManager: """Manage resource extraction and packages""" - extraction_path = None def __init__(self): @@ -1204,7 +1145,9 @@ def resource_exists(self, package_or_requirement, resource_name): def resource_isdir(self, package_or_requirement, resource_name): """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir(resource_name) + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) def resource_filename(self, package_or_requirement, resource_name): """Return a true filesystem path for specified resource""" @@ -1226,7 +1169,9 @@ def resource_string(self, package_or_requirement, resource_name): def resource_listdir(self, package_or_requirement, resource_name): """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir(resource_name) + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) def extraction_error(self): """Give an error message for problems extracting file(s)""" @@ -1234,8 +1179,7 @@ def extraction_error(self): old_exc = sys.exc_info()[1] cache_path = self.extraction_path or get_default_cache() - tmpl = textwrap.dedent( - """ + tmpl = textwrap.dedent(""" Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) @@ -1250,8 +1194,7 @@ def extraction_error(self): Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. - """ - ).lstrip() + """).lstrip() err = ExtractionError(tmpl.format(**locals())) err.manager = self err.cache_path = cache_path @@ -1350,7 +1293,9 @@ def set_extraction_path(self, path): ``cleanup_resources()``.) """ if self.cached_files: - raise ValueError("Can't change extraction path, files already extracted") + raise ValueError( + "Can't change extraction path, files already extracted" + ) self.extraction_path = path @@ -1374,8 +1319,9 @@ def get_default_cache(): or a platform-relevant user cache dir for an app named "Python-Eggs". """ - return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir( - appname='Python-Eggs' + return ( + os.environ.get('PYTHON_EGG_CACHE') + or platformdirs.user_cache_dir(appname='Python-Eggs') ) @@ -1399,38 +1345,6 @@ def safe_version(version): return re.sub('[^A-Za-z0-9.]+', '-', version) -def _forgiving_version(version): - """Fallback when ``safe_version`` is not safe enough - >>> parse_version(_forgiving_version('0.23ubuntu1')) - - >>> parse_version(_forgiving_version('0.23-')) - - >>> parse_version(_forgiving_version('0.-_')) - - >>> parse_version(_forgiving_version('42.+?1')) - - >>> parse_version(_forgiving_version('hello world')) - - """ - version = version.replace(' ', '.') - match = _PEP440_FALLBACK.search(version) - if match: - safe = match["safe"] - rest = version[len(safe):] - else: - safe = "0" - rest = version - local = f"sanitized.{_safe_segment(rest)}".strip(".") - return f"{safe}.dev0+{local}" - - -def _safe_segment(segment): - """Convert an arbitrary string into a safe segment""" - segment = re.sub('[^A-Za-z0-9.]+', '-', segment) - segment = re.sub('-[^A-Za-z0-9]+', '-', segment) - return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-") - - def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name @@ -1544,9 +1458,8 @@ def run_script(self, script_name, namespace): script = 'scripts/' + script_name if not self.has_metadata(script): raise ResolutionError( - "Script {script!r} not found in metadata at {self.egg_info!r}".format( - **locals() - ), + "Script {script!r} not found in metadata at {self.egg_info!r}" + .format(**locals()), ) script_text = self.get_metadata(script).replace('\r\n', '\n') script_text = script_text.replace('\r', '\n') @@ -1559,12 +1472,8 @@ def run_script(self, script_name, namespace): exec(code, namespace, namespace) else: from linecache import cache - cache[script_filename] = ( - len(script_text), - 0, - script_text.split('\n'), - script_filename, + len(script_text), 0, script_text.split('\n'), script_filename ) script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) @@ -1644,9 +1553,9 @@ def _validate_resource_path(path): AttributeError: ... """ invalid = ( - os.path.pardir in path.split(posixpath.sep) - or posixpath.isabs(path) - or ntpath.isabs(path) + os.path.pardir in path.split(posixpath.sep) or + posixpath.isabs(path) or + ntpath.isabs(path) ) if not invalid: return @@ -1728,10 +1637,7 @@ def _get(self, path): @classmethod def _register(cls): - loader_names = ( - 'SourceFileLoader', - 'SourcelessFileLoader', - ) + loader_names = 'SourceFileLoader', 'SourcelessFileLoader', for name in loader_names: loader_cls = getattr(importlib_machinery, name, type(None)) register_loader_type(loader_cls, cls) @@ -1791,7 +1697,6 @@ class MemoizedZipManifests(ZipManifests): """ Memoized zipfile manifests. """ - manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') def load(self, path): @@ -1825,16 +1730,20 @@ def _zipinfo_name(self, fspath): if fspath == self.loader.archive: return '' if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre) :] - raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre)) + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.zip_pre) + ) def _parts(self, zip_path): # Convert a zipfile subpath into an egg-relative path part list. # pseudo-fs path fspath = self.zip_pre + zip_path if fspath.startswith(self.egg_root + os.sep): - return fspath[len(self.egg_root) + 1 :].split(os.sep) - raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root)) + return fspath[len(self.egg_root) + 1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.egg_root) + ) @property def zipinfo(self): @@ -1864,20 +1773,25 @@ def _get_date_and_size(zip_stat): # FIXME: 'ZipProvider._extract_resource' is too complex (12) def _extract_resource(self, manager, zip_path): # noqa: C901 + if zip_path in self._index(): for name in self._index()[zip_path]: - last = self._extract_resource(manager, os.path.join(zip_path, name)) + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) # return the extracted directory name return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: - raise IOError( - '"os.rename" and "os.unlink" are not supported ' 'on this platform' - ) + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') try: - real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path)) + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) if self._is_current(real_path, zip_path): return real_path @@ -2113,21 +2027,70 @@ def find_nothing(importer, path_item, only=False): register_finder(object, find_nothing) +def _by_version_descending(names): + """ + Given a list of filenames, return them in descending order + by version number. + + >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' + >>> _by_version_descending(names) + ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] + """ + def try_parse(name): + """ + Attempt to parse as a version or return a null version. + """ + try: + return packaging.version.Version(name) + except Exception: + return packaging.version.Version('0') + + def _by_version(name): + """ + Parse each component of the filename + """ + name, ext = os.path.splitext(name) + parts = itertools.chain(name.split('-'), [ext]) + return [try_parse(part) for part in parts] + + return sorted(names, key=_by_version, reverse=True) + + def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) if _is_unpacked_egg(path_item): yield Distribution.from_filename( - path_item, - metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')), + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item, 'EGG-INFO') + ) ) return - entries = (os.path.join(path_item, child) for child in safe_listdir(path_item)) + entries = ( + os.path.join(path_item, child) + for child in safe_listdir(path_item) + ) + + # for performance, before sorting by version, + # screen entries for only those that will yield + # distributions + filtered = ( + entry + for entry in entries + if dist_factory(path_item, entry, only) + ) # scan for .egg and .egg-info in directory - for entry in sorted(entries): + path_item_entries = _by_version_descending(filtered) + for entry in path_item_entries: fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) for dist in factory(fullpath): @@ -2138,18 +2101,19 @@ def dist_factory(path_item, entry, only): """Return a dist_factory for the given entry.""" lower = entry.lower() is_egg_info = lower.endswith('.egg-info') - is_dist_info = lower.endswith('.dist-info') and os.path.isdir( - os.path.join(path_item, entry) + is_dist_info = ( + lower.endswith('.dist-info') and + os.path.isdir(os.path.join(path_item, entry)) ) is_meta = is_egg_info or is_dist_info return ( distributions_from_metadata - if is_meta - else find_distributions - if not only and _is_egg_path(entry) - else resolve_egg_link - if not only and lower.endswith('.egg-link') - else NoDists() + if is_meta else + find_distributions + if not only and _is_egg_path(entry) else + resolve_egg_link + if not only and lower.endswith('.egg-link') else + NoDists() ) @@ -2161,7 +2125,6 @@ class NoDists: >>> list(NoDists()('anything')) [] """ - def __bool__(self): return False @@ -2196,10 +2159,7 @@ def distributions_from_metadata(path): metadata = FileMetadata(path) entry = os.path.basename(path) yield Distribution.from_location( - root, - entry, - metadata, - precedence=DEVELOP_DIST, + root, entry, metadata, precedence=DEVELOP_DIST, ) @@ -2221,16 +2181,17 @@ def resolve_egg_link(path): """ referenced_paths = non_empty_lines(path) resolved_paths = ( - os.path.join(os.path.dirname(path), ref) for ref in referenced_paths + os.path.join(os.path.dirname(path), ref) + for ref in referenced_paths ) dist_groups = map(find_distributions, resolved_paths) return next(dist_groups, ()) -if hasattr(pkgutil, 'ImpImporter'): - register_finder(pkgutil.ImpImporter, find_on_path) +register_finder(pkgutil.ImpImporter, find_on_path) -register_finder(importlib_machinery.FileFinder, find_on_path) +if hasattr(importlib_machinery, 'FileFinder'): + register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) @@ -2328,15 +2289,6 @@ def position_in_sys_path(path): def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" - msg = ( - f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n" - "Implementing implicit namespace packages (as specified in PEP 420) " - "is preferred to `pkg_resources.declare_namespace`. " - "See https://setuptools.pypa.io/en/latest/references/" - "keywords.html#keyword-namespace-packages" - ) - warnings.warn(msg, DeprecationWarning, stacklevel=2) - _imp.acquire_lock() try: if packageName in _namespace_packages: @@ -2393,11 +2345,11 @@ def file_ns_handler(importer, path_item, packageName, module): return subpath -if hasattr(pkgutil, 'ImpImporter'): - register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) - +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) register_namespace_handler(zipimport.zipimporter, file_ns_handler) -register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) + +if hasattr(importlib_machinery, 'FileFinder'): + register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): @@ -2409,7 +2361,8 @@ def null_ns_handler(importer, path_item, packageName, module): def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) + return os.path.normcase(os.path.realpath(os.path.normpath( + _cygwin_patch(filename)))) def _cygwin_patch(filename): # pragma: nocover @@ -2440,9 +2393,9 @@ def _is_egg_path(path): def _is_zip_egg(path): return ( - path.lower().endswith('.egg') - and os.path.isfile(path) - and zipfile.is_zipfile(path) + path.lower().endswith('.egg') and + os.path.isfile(path) and + zipfile.is_zipfile(path) ) @@ -2450,8 +2403,9 @@ def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. """ - return path.lower().endswith('.egg') and os.path.isfile( - os.path.join(path, 'EGG-INFO', 'PKG-INFO') + return ( + path.lower().endswith('.egg') and + os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) ) @@ -2615,10 +2569,8 @@ def _version_from_file(lines): Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise. """ - def is_version_line(line): return line.lower().startswith('version:') - version_lines = filter(is_version_line, lines) line = next(iter(version_lines), '') _, _, value = line.partition(':') @@ -2627,19 +2579,12 @@ def is_version_line(line): class Distribution: """Wrap an actual or potential sys.path entry w/metadata""" - PKG_INFO = 'PKG-INFO' def __init__( - self, - location=None, - metadata=None, - project_name=None, - version=None, - py_version=PY_MAJOR, - platform=None, - precedence=EGG_DIST, - ): + self, location=None, metadata=None, project_name=None, + version=None, py_version=PY_MAJOR, platform=None, + precedence=EGG_DIST): self.project_name = safe_name(project_name or 'Unknown') if version is not None: self._version = safe_version(version) @@ -2662,13 +2607,8 @@ def from_location(cls, location, basename, metadata=None, **kw): 'name', 'ver', 'pyver', 'plat' ) return cls( - location, - metadata, - project_name=project_name, - version=version, - py_version=py_version, - platform=platform, - **kw, + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw )._reload_version() def _reload_version(self): @@ -2677,7 +2617,7 @@ def _reload_version(self): @property def hashcmp(self): return ( - self._forgiving_parsed_version, + self.parsed_version, self.precedence, self.key, self.location, @@ -2724,42 +2664,35 @@ def key(self): @property def parsed_version(self): if not hasattr(self, "_parsed_version"): - try: - self._parsed_version = parse_version(self.version) - except packaging.version.InvalidVersion as ex: - info = f"(package: {self.project_name})" - if hasattr(ex, "add_note"): - ex.add_note(info) # PEP 678 - raise - raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None + self._parsed_version = parse_version(self.version) return self._parsed_version - @property - def _forgiving_parsed_version(self): - try: - return self.parsed_version - except packaging.version.InvalidVersion as ex: - self._parsed_version = parse_version(_forgiving_version(self.version)) - - notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678 - msg = f"""!!\n\n - ************************************************************************* - {str(ex)}\n{notes} + def _warn_legacy_version(self): + LV = packaging.version.LegacyVersion + is_legacy = isinstance(self._parsed_version, LV) + if not is_legacy: + return - This is a long overdue deprecation. - For the time being, `pkg_resources` will use `{self._parsed_version}` - as a replacement to avoid breaking existing environments, - but no future compatibility is guaranteed. + # While an empty version is technically a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if not self.version: + return - If you maintain package {self.project_name} you should implement - the relevant changes to adequate the project to PEP 440 immediately. - ************************************************************************* - \n\n!! - """ - warnings.warn(msg, DeprecationWarning) + tmpl = textwrap.dedent(""" + '{project_name} ({version})' is being parsed as a legacy, + non PEP 440, + version. You may find odd behavior and sort order. + In particular it will be sorted as less than 0.0. It + is recommended to migrate to PEP 440 compatible + versions. + """).strip().replace('\n', ' ') - return self._parsed_version + warnings.warn(tmpl.format(**vars(self)), PEP440Warning) @property def version(self): @@ -2769,9 +2702,9 @@ def version(self): version = self._get_version() if version is None: path = self._get_metadata_path_for_display(self.PKG_INFO) - msg = ("Missing 'Version:' header and/or {} file at path: {}").format( - self.PKG_INFO, path - ) + msg = ( + "Missing 'Version:' header and/or {} file at path: {}" + ).format(self.PKG_INFO, path) raise ValueError(msg, self) from e return version @@ -2800,7 +2733,8 @@ def _filter_extras(dm): reqs = dm.pop(extra) new_extra, _, marker = extra.partition(':') fails_marker = marker and ( - invalid_marker(marker) or not evaluate_marker(marker) + invalid_marker(marker) + or not evaluate_marker(marker) ) if fails_marker: reqs = [] @@ -2872,9 +2806,8 @@ def activate(self, path=None, replace=False): def egg_name(self): """Return what this distribution's standard .egg filename should be""" filename = "%s-%s-py%s" % ( - to_filename(self.project_name), - to_filename(self.version), - self.py_version or PY_MAJOR, + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR ) if self.platform: @@ -2904,13 +2837,17 @@ def __getattr__(self, attr): def __dir__(self): return list( set(super(Distribution, self).__dir__()) - | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) + | set( + attr for attr in self._provider.__dir__() + if not attr.startswith('_') + ) ) @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, **kw + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw ) def as_requirement(self): @@ -3022,18 +2959,14 @@ def check_version_conflict(self): nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) for modname in self._get_metadata('top_level.txt'): - if ( - modname not in sys.modules - or modname in nsp - or modname in _namespace_packages - ): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages): continue if modname in ('pkg_resources', 'setuptools', 'site'): continue fn = getattr(sys.modules[modname], '__file__', None) - if fn and ( - normalize_path(fn).startswith(loc) or fn.startswith(self.location) - ): + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): continue issue_warning( "Module %s was already imported from %s, but %s is being added" @@ -3085,7 +3018,6 @@ class DistInfoDistribution(Distribution): Wrap an actual or potential sys.path entry w/metadata, .dist-info style. """ - PKG_INFO = 'METADATA' EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") @@ -3171,7 +3103,8 @@ def __init__(self, requirement_string): self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() - self.specs = [(spec.operator, spec.version) for spec in self.specifier] + self.specs = [ + (spec.operator, spec.version) for spec in self.specifier] self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, @@ -3183,7 +3116,10 @@ def __init__(self, requirement_string): self.__hash = hash(self.hashCmp) def __eq__(self, other): - return isinstance(other, Requirement) and self.hashCmp == other.hashCmp + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) def __ne__(self, other): return not self == other @@ -3208,7 +3144,7 @@ def __repr__(self): @staticmethod def parse(s): - (req,) = parse_requirements(s) + req, = parse_requirements(s) return req @@ -3346,7 +3282,10 @@ def _initialize_master_working_set(): # ensure that all distributions added to the working set in the future # (e.g. by calling ``require()``) will get activated as well, # with higher priority (replace=True). - tuple(dist.activate(replace=False) for dist in working_set) + tuple( + dist.activate(replace=False) + for dist in working_set + ) add_activation_listener( lambda dist: dist.activate(replace=True), existing=False, diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 3974df3f11b..f15208b8bcd 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ rich==13.3.3 pygments==2.14.0 typing_extensions==4.5.0 resolvelib==1.0.1 -setuptools==67.6.1 +setuptools==65.6.3 six==1.16.0 tenacity==8.2.2 tomli==2.0.1 diff --git a/tools/vendoring/patches/pkg_resources.patch b/tools/vendoring/patches/pkg_resources.patch index 48ae954311b..39bb2eac253 100644 --- a/tools/vendoring/patches/pkg_resources.patch +++ b/tools/vendoring/patches/pkg_resources.patch @@ -1,3 +1,25 @@ +diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py +index d59226af9..3b9565893 100644 +--- a/src/pip/_vendor/pkg_resources/__init__.py ++++ b/src/pip/_vendor/pkg_resources/__init__.py +@@ -77,7 +77,7 @@ + join_continuation, + ) + +-from pkg_resources.extern import appdirs ++from pkg_resources.extern import platformdirs + from pkg_resources.extern import packaging + __import__('pkg_resources.extern.packaging.version') + __import__('pkg_resources.extern.packaging.specifiers') +@@ -1321,7 +1321,7 @@ def get_default_cache(): + """ + return ( + os.environ.get('PYTHON_EGG_CACHE') +- or appdirs.user_cache_dir(appname='Python-Eggs') ++ or platformdirs.user_cache_dir(appname='Python-Eggs') + ) + + diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 3f2476a0c..8d5727d35 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py From 7ba5a625bc95599e6f564285af84f39d24c109f8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 21 Apr 2023 17:05:49 +0100 Subject: [PATCH 451/730] Revert "Merge pull request #11487 from pelson/feature/base-prefix-config" This reverts commit 56e5fa3c0fd0544e7b5b9b89d9d7854b82d51242, reversing changes made to 2c09e9c760e67fd801b3e756357de3b3c82cd08d. --- docs/html/topics/configuration.md | 21 +++++-------------- src/pip/_internal/configuration.py | 18 +++------------- tests/unit/test_configuration.py | 33 ++---------------------------- tests/unit/test_options.py | 2 +- 4 files changed, 11 insertions(+), 63 deletions(-) diff --git a/docs/html/topics/configuration.md b/docs/html/topics/configuration.md index 521bc9af4b9..e4aafcd2b98 100644 --- a/docs/html/topics/configuration.md +++ b/docs/html/topics/configuration.md @@ -19,14 +19,13 @@ and how they are related to pip's various command line options. ## Configuration Files -Configuration files can change the default values for command line options. -They are written using standard INI style configuration files. +Configuration files can change the default values for command line option. +They are written using a standard INI style configuration files. -pip has 4 "levels" of configuration files: +pip has 3 "levels" of configuration files: -- `global`: system-wide configuration file, shared across all users. -- `user`: per-user configuration file, shared across all environments. -- `base` : per-base environment configuration file, shared across all virtualenvs with the same base. (available since pip 23.0) +- `global`: system-wide configuration file, shared across users. +- `user`: per-user configuration file. - `site`: per-environment configuration file; i.e. per-virtualenv. ### Location @@ -48,9 +47,6 @@ User The legacy "per-user" configuration file is also loaded, if it exists: {file}`$HOME/.pip/pip.conf`. -Base -: {file}`\{sys.base_prefix\}/pip.conf` - Site : {file}`$VIRTUAL_ENV/pip.conf` ``` @@ -67,9 +63,6 @@ User The legacy "per-user" configuration file is also loaded, if it exists: {file}`$HOME/.pip/pip.conf`. -Base -: {file}`\{sys.base_prefix\}/pip.conf` - Site : {file}`$VIRTUAL_ENV/pip.conf` ``` @@ -88,9 +81,6 @@ User The legacy "per-user" configuration file is also loaded, if it exists: {file}`%HOME%\\pip\\pip.ini` -Base -: {file}`\{sys.base_prefix\}\\pip.ini` - Site : {file}`%VIRTUAL_ENV%\\pip.ini` ``` @@ -112,7 +102,6 @@ order: - `PIP_CONFIG_FILE`, if given. - Global - User -- Base - Site Each file read overrides any values read from previous files, so if the diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 6cce8bcbcce..8fd46c9b8e0 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -36,20 +36,12 @@ kinds = enum( USER="user", # User Specific GLOBAL="global", # System Wide - BASE="base", # Base environment specific (e.g. for all venvs with the same base) - SITE="site", # Environment Specific (e.g. per venv) + SITE="site", # [Virtual] Environment Specific ENV="env", # from PIP_CONFIG_FILE ENV_VAR="env-var", # from Environment Variables ) -OVERRIDE_ORDER = ( - kinds.GLOBAL, - kinds.USER, - kinds.BASE, - kinds.SITE, - kinds.ENV, - kinds.ENV_VAR, -) -VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.BASE, kinds.SITE +OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR +VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE logger = getLogger(__name__) @@ -78,7 +70,6 @@ def get_configuration_files() -> Dict[Kind, List[str]]: os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip") ] - base_config_file = os.path.join(sys.base_prefix, CONFIG_BASENAME) site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) legacy_config_file = os.path.join( os.path.expanduser("~"), @@ -87,7 +78,6 @@ def get_configuration_files() -> Dict[Kind, List[str]]: ) new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME) return { - kinds.BASE: [base_config_file], kinds.GLOBAL: global_config_files, kinds.SITE: [site_config_file], kinds.USER: [legacy_config_file, new_config_file], @@ -354,8 +344,6 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: # The legacy config file is overridden by the new config file yield kinds.USER, config_files[kinds.USER] - yield kinds.BASE, config_files[kinds.BASE] - # finally virtualenv configuration first trumping others yield kinds.SITE, config_files[kinds.SITE] diff --git a/tests/unit/test_configuration.py b/tests/unit/test_configuration.py index b0d655d8fb6..c6b44d45aad 100644 --- a/tests/unit/test_configuration.py +++ b/tests/unit/test_configuration.py @@ -24,17 +24,11 @@ def test_user_loading(self) -> None: self.configuration.load() assert self.configuration.get_value("test.hello") == "2" - def test_base_loading(self) -> None: - self.patch_configuration(kinds.BASE, {"test.hello": "3"}) - - self.configuration.load() - assert self.configuration.get_value("test.hello") == "3" - def test_site_loading(self) -> None: - self.patch_configuration(kinds.SITE, {"test.hello": "4"}) + self.patch_configuration(kinds.SITE, {"test.hello": "3"}) self.configuration.load() - assert self.configuration.get_value("test.hello") == "4" + assert self.configuration.get_value("test.hello") == "3" def test_environment_config_loading(self, monkeypatch: pytest.MonkeyPatch) -> None: contents = """ @@ -113,15 +107,6 @@ def test_no_such_key_error_message_missing_option(self) -> None: with pytest.raises(ConfigurationError, match=pat): self.configuration.get_value("global.index-url") - def test_overrides_normalization(self) -> None: - # Check that normalized names are used in precedence calculations. - # Reminder: USER has higher precedence than GLOBAL. - self.patch_configuration(kinds.USER, {"test.hello-world": "1"}) - self.patch_configuration(kinds.GLOBAL, {"test.hello_world": "0"}) - self.configuration.load() - - assert self.configuration.get_value("test.hello_world") == "1" - class TestConfigurationPrecedence(ConfigurationMixin): # Tests for methods to that determine the order of precedence of @@ -148,13 +133,6 @@ def test_env_overides_global(self) -> None: assert self.configuration.get_value("test.hello") == "0" - def test_site_overides_base(self) -> None: - self.patch_configuration(kinds.BASE, {"test.hello": "2"}) - self.patch_configuration(kinds.SITE, {"test.hello": "1"}) - self.configuration.load() - - assert self.configuration.get_value("test.hello") == "1" - def test_site_overides_user(self) -> None: self.patch_configuration(kinds.USER, {"test.hello": "2"}) self.patch_configuration(kinds.SITE, {"test.hello": "1"}) @@ -169,13 +147,6 @@ def test_site_overides_global(self) -> None: assert self.configuration.get_value("test.hello") == "1" - def test_base_overides_user(self) -> None: - self.patch_configuration(kinds.USER, {"test.hello": "2"}) - self.patch_configuration(kinds.BASE, {"test.hello": "1"}) - self.configuration.load() - - assert self.configuration.get_value("test.hello") == "1" - def test_user_overides_global(self) -> None: self.patch_configuration(kinds.GLOBAL, {"test.hello": "3"}) self.patch_configuration(kinds.USER, {"test.hello": "2"}) diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index 9e3a0a5d633..43d5fdd3d75 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -587,7 +587,7 @@ def test_venv_config_file_found(self, monkeypatch: pytest.MonkeyPatch) -> None: for _, val in cp.iter_config_files(): files.extend(val) - assert len(files) == 5 + assert len(files) == 4 @pytest.mark.parametrize( "args, expect", From b64e74de42d0a6e4f3a81a06b7fd33eea00b7160 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 21 Apr 2023 17:23:38 +0100 Subject: [PATCH 452/730] Add a news file --- news/11987.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11987.bugfix.rst diff --git a/news/11987.bugfix.rst b/news/11987.bugfix.rst new file mode 100644 index 00000000000..de03de64088 --- /dev/null +++ b/news/11987.bugfix.rst @@ -0,0 +1 @@ +Revert `#11487 `_, as it causes issues with virtualenvs created by the Windows Store distribution of Python. From ee40d71817df8346af3d96051a298db8f22e52f1 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 22 Apr 2023 10:13:28 +0100 Subject: [PATCH 453/730] Bump for release --- NEWS.rst | 20 ++++++++++++++++++++ news/11967.doc.rst | 2 -- news/11987.bugfix.rst | 1 - news/setuptools.vendor.rst | 1 - src/pip/__init__.py | 2 +- 5 files changed, 21 insertions(+), 5 deletions(-) delete mode 100644 news/11967.doc.rst delete mode 100644 news/11987.bugfix.rst delete mode 100644 news/setuptools.vendor.rst diff --git a/NEWS.rst b/NEWS.rst index da8c8726a1d..a5c27f34a2b 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,26 @@ .. towncrier release notes start +23.1.1 (2023-04-22) +=================== + +Bug Fixes +--------- + +- Revert `#11487 `_, as it causes issues with virtualenvs created by the Windows Store distribution of Python. (`#11987 `_) + +Vendored Libraries +------------------ + +- Revert pkg_resources (via setuptools) back to 65.6.3 + +Improved Documentation +---------------------- + +- Update documentation to reflect the new behavior of using the cache of locally + built wheels in hash-checking mode. (`#11967 `_) + + 23.1 (2023-04-15) ================= diff --git a/news/11967.doc.rst b/news/11967.doc.rst deleted file mode 100644 index 4d9a23b033f..00000000000 --- a/news/11967.doc.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update documentation to reflect the new behavior of using the cache of locally -built wheels in hash-checking mode. diff --git a/news/11987.bugfix.rst b/news/11987.bugfix.rst deleted file mode 100644 index de03de64088..00000000000 --- a/news/11987.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Revert `#11487 `_, as it causes issues with virtualenvs created by the Windows Store distribution of Python. diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst deleted file mode 100644 index 2d6480a898b..00000000000 --- a/news/setuptools.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Revert pkg_resources (via setuptools) back to 65.6.3 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 20d8bf56c17..b4e13628c9d 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2.dev0" +__version__ = "23.1.1" def main(args: Optional[List[str]] = None) -> int: From 5cbf00c49e2a6974d794534d8041cf370218e98d Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 22 Apr 2023 10:13:28 +0100 Subject: [PATCH 454/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index b4e13628c9d..20d8bf56c17 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.1.1" +__version__ = "23.2.dev0" def main(args: Optional[List[str]] = None) -> int: From 8295c9941b6516ccaaba9c91af025cb2e373dba0 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 24 Apr 2023 12:29:05 -0600 Subject: [PATCH 455/730] Deprecate .egg in the imporlib-metadata backend This provides us a path to remove all pkg_resources usages on Python 3.11 or later, and thus avoid the problem that pkg_resources uses Python API deprecated in 3.12. --- news/11996.process.rst | 1 + src/pip/_internal/metadata/importlib/_envs.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 news/11996.process.rst diff --git a/news/11996.process.rst b/news/11996.process.rst new file mode 100644 index 00000000000..d585bd39183 --- /dev/null +++ b/news/11996.process.rst @@ -0,0 +1 @@ +Deprecate support for eggs for Python 3.11 or later, when the new ``importlib.metadata`` backend is used to load distribution metadata. This only affects the egg *distribution format* (with the ``.egg`` extension); distributions using the ``.egg-info`` *metadata format* (but are not actually eggs) are not affected. For more information about eggs, see `relevant section in the setuptools documentation `__. diff --git a/src/pip/_internal/metadata/importlib/_envs.py b/src/pip/_internal/metadata/importlib/_envs.py index cbec59e2c6d..3850ddaf412 100644 --- a/src/pip/_internal/metadata/importlib/_envs.py +++ b/src/pip/_internal/metadata/importlib/_envs.py @@ -151,7 +151,7 @@ def _emit_egg_deprecation(location: Optional[str]) -> None: deprecated( reason=f"Loading egg at {location} is deprecated.", replacement="to use pip for package installation.", - gone_in=None, + gone_in="23.3", ) @@ -174,7 +174,7 @@ def _iter_distributions(self) -> Iterator[BaseDistribution]: for location in self._paths: yield from finder.find(location) for dist in finder.find_eggs(location): - # _emit_egg_deprecation(dist.location) # TODO: Enable this. + _emit_egg_deprecation(dist.location) yield dist # This must go last because that's how pkg_resources tie-breaks. yield from finder.find_linked(location) From f1a7a6f942b869ae3428dbf55dfc1ca756c78d94 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 2 Apr 2023 14:34:24 +0100 Subject: [PATCH 456/730] Upgrade setuptools to 67.6.1 --- news/setuptools.vendor.rst | 1 + src/pip/_vendor/pkg_resources/__init__.py | 599 +++++++++++--------- src/pip/_vendor/vendor.txt | 2 +- tools/vendoring/patches/pkg_resources.patch | 22 - 4 files changed, 332 insertions(+), 292 deletions(-) create mode 100644 news/setuptools.vendor.rst diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst new file mode 100644 index 00000000000..9cf3f49e21c --- /dev/null +++ b/news/setuptools.vendor.rst @@ -0,0 +1 @@ +Upgrade setuptools to 67.6.1 diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 0ec74f8a6ef..a85aca10f7c 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -12,6 +12,12 @@ .egg files, and unpacked .egg files. It can also work in a limited way with .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. + +This module is deprecated. Users are directed to +`importlib.resources `_ +and +`importlib.metadata `_ +instead. """ import sys @@ -34,7 +40,6 @@ import errno import tempfile import textwrap -import itertools import inspect import ntpath import posixpath @@ -54,8 +59,10 @@ # capture these to bypass sandboxing from os import utime + try: from os import mkdir, rename, unlink + WRITE_SUPPORT = True except ImportError: # no write support, probably under GAE @@ -66,6 +73,7 @@ try: import importlib.machinery as importlib_machinery + # access attribute to force import under delayed import mechanisms. importlib_machinery.__name__ except ImportError: @@ -79,6 +87,7 @@ from pip._vendor import platformdirs from pip._vendor import packaging + __import__('pip._vendor.packaging.version') __import__('pip._vendor.packaging.specifiers') __import__('pip._vendor.packaging.requirements') @@ -109,6 +118,12 @@ _namespace_packages = None +warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning) + + +_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) + + class PEP440Warning(RuntimeWarning): """ Used when there is an issue with a version or specifier not complying with @@ -116,16 +131,7 @@ class PEP440Warning(RuntimeWarning): """ -def parse_version(v): - try: - return packaging.version.Version(v) - except packaging.version.InvalidVersion: - warnings.warn( - f"{v} is an invalid version and will not be supported in " - "a future release", - PkgResourcesDeprecationWarning, - ) - return packaging.version.LegacyVersion(v) +parse_version = packaging.version.Version _state_vars = {} @@ -197,51 +203,87 @@ def get_supported_platform(): __all__ = [ # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'require', + 'run_script', + 'get_provider', + 'get_distribution', + 'load_entry_point', + 'get_entry_map', + 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - + 'resource_string', + 'resource_stream', + 'resource_filename', + 'resource_listdir', + 'resource_exists', + 'resource_isdir', # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'declare_namespace', + 'working_set', + 'add_activation_listener', + 'find_distributions', + 'set_extraction_path', + 'cleanup_resources', 'get_default_cache', - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - + 'Environment', + 'WorkingSet', + 'ResourceManager', + 'Distribution', + 'Requirement', + 'EntryPoint', # Exceptions - 'ResolutionError', 'VersionConflict', 'DistributionNotFound', - 'UnknownExtra', 'ExtractionError', - + 'ResolutionError', + 'VersionConflict', + 'DistributionNotFound', + 'UnknownExtra', + 'ExtractionError', # Warnings 'PEP440Warning', - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - + 'parse_requirements', + 'parse_version', + 'safe_name', + 'safe_version', + 'get_platform', + 'compatible_platforms', + 'yield_lines', + 'split_sections', + 'safe_extra', + 'to_filename', + 'invalid_marker', + 'evaluate_marker', # filesystem utilities - 'ensure_directory', 'normalize_path', - + 'ensure_directory', + 'normalize_path', # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - + 'EGG_DIST', + 'BINARY_DIST', + 'SOURCE_DIST', + 'CHECKOUT_DIST', + 'DEVELOP_DIST', # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - + 'IMetadataProvider', + 'IResourceProvider', + 'FileMetadata', + 'PathMetadata', + 'EggMetadata', + 'EmptyProvider', + 'empty_provider', + 'NullProvider', + 'EggProvider', + 'DefaultProvider', + 'ZipProvider', + 'register_finder', + 'register_namespace_handler', + 'register_loader_type', + 'fixup_namespace_packages', + 'get_importer', # Warnings 'PkgResourcesDeprecationWarning', - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', + 'run_main', + 'AvailableDistributions', ] @@ -300,8 +342,10 @@ def required_by(self): class DistributionNotFound(ResolutionError): """A requested distribution was not found""" - _template = ("The '{self.req}' distribution was not found " - "and is required by {self.requirers_str}") + _template = ( + "The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}" + ) @property def req(self): @@ -395,7 +439,8 @@ def get_build_platform(): version = _macos_vers() machine = os.uname()[4].replace(" ", "_") return "macosx-%d.%d-%s" % ( - int(version[0]), int(version[1]), + int(version[0]), + int(version[1]), _macos_arch(machine), ) except ValueError: @@ -436,15 +481,18 @@ def compatible_platforms(provided, required): if provDarwin: dversion = int(provDarwin.group(1)) macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": + if ( + dversion == 7 + and macosversion >= "10.3" + or dversion == 8 + and macosversion >= "10.4" + ): return True # egg isn't macOS or legacy darwin return False # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): + if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3): return False # is the required OS major update >= the provided one? @@ -506,8 +554,8 @@ def get_metadata(name): def get_metadata_lines(name): """Yield named metadata resource as list of non-blank non-comment lines - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" def metadata_isdir(name): """Is the named metadata a directory? (like ``os.path.isdir()``)""" @@ -720,9 +768,14 @@ def add(self, dist, entry=None, insert=True, replace=False): keys2.append(dist.key) self._added_new(dist) - # FIXME: 'WorkingSet.resolve' is too complex (11) - def resolve(self, requirements, env=None, installer=None, # noqa: C901 - replace_conflicting=False, extras=None): + def resolve( + self, + requirements, + env=None, + installer=None, + replace_conflicting=False, + extras=None, + ): """List all distributions needed to (recursively) meet `requirements` `requirements` must be a sequence of ``Requirement`` objects. `env`, @@ -771,33 +824,9 @@ def resolve(self, requirements, env=None, installer=None, # noqa: C901 if not req_extras.markers_pass(req, extras): continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match( - req, ws, installer, - replace_conflicting=replace_conflicting - ) - if dist is None: - requirers = required_by.get(req, None) - raise DistributionNotFound(req, requirers) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - dependent_req = required_by[req] - raise VersionConflict(dist, req).with_context(dependent_req) + dist = self._resolve_dist( + req, best, replace_conflicting, env, installer, required_by, to_activate + ) # push the new requirements onto the stack new_requirements = dist.requires(req.extras)[::-1] @@ -813,8 +842,38 @@ def resolve(self, requirements, env=None, installer=None, # noqa: C901 # return list of distros to activate return to_activate - def find_plugins( - self, plugin_env, full_env=None, installer=None, fallback=True): + def _resolve_dist( + self, req, best, replace_conflicting, env, installer, required_by, to_activate + ): + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match( + req, ws, installer, replace_conflicting=replace_conflicting + ) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + return dist + + def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): """Find all activatable distributions in `plugin_env` Example usage:: @@ -867,9 +926,7 @@ def find_plugins( list(map(shadow_set.add, self)) for project_name in plugin_projects: - for dist in plugin_env[project_name]: - req = [dist.as_requirement()] try: @@ -933,8 +990,11 @@ def _added_new(self, dist): def __getstate__(self): return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.normalized_to_canonical_keys.copy(), self.callbacks[:] + self.entries[:], + self.entry_keys.copy(), + self.by_key.copy(), + self.normalized_to_canonical_keys.copy(), + self.callbacks[:], ) def __setstate__(self, e_k_b_n_c): @@ -970,8 +1030,8 @@ class Environment: """Searchable snapshot of distributions on a search path""" def __init__( - self, search_path=None, platform=get_supported_platform(), - python=PY_MAJOR): + self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR + ): """Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. @@ -1038,16 +1098,14 @@ def __getitem__(self, project_name): return self._distmap.get(distribution_key, []) def add(self, dist): - """Add `dist` if we ``can_add()`` it and it has not already been added - """ + """Add `dist` if we ``can_add()`` it and it has not already been added""" if self.can_add(dist) and dist.has_version(): dists = self._distmap.setdefault(dist.key, []) if dist not in dists: dists.append(dist) dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) - def best_match( - self, req, working_set, installer=None, replace_conflicting=False): + def best_match(self, req, working_set, installer=None, replace_conflicting=False): """Find distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a @@ -1134,6 +1192,7 @@ class ExtractionError(RuntimeError): class ResourceManager: """Manage resource extraction and packages""" + extraction_path = None def __init__(self): @@ -1145,9 +1204,7 @@ def resource_exists(self, package_or_requirement, resource_name): def resource_isdir(self, package_or_requirement, resource_name): """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) + return get_provider(package_or_requirement).resource_isdir(resource_name) def resource_filename(self, package_or_requirement, resource_name): """Return a true filesystem path for specified resource""" @@ -1169,9 +1226,7 @@ def resource_string(self, package_or_requirement, resource_name): def resource_listdir(self, package_or_requirement, resource_name): """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) + return get_provider(package_or_requirement).resource_listdir(resource_name) def extraction_error(self): """Give an error message for problems extracting file(s)""" @@ -1179,7 +1234,8 @@ def extraction_error(self): old_exc = sys.exc_info()[1] cache_path = self.extraction_path or get_default_cache() - tmpl = textwrap.dedent(""" + tmpl = textwrap.dedent( + """ Can't extract file(s) to egg cache The following error occurred while trying to extract file(s) @@ -1194,7 +1250,8 @@ def extraction_error(self): Perhaps your account does not have write access to this directory? You can change the cache directory by setting the PYTHON_EGG_CACHE environment variable to point to an accessible directory. - """).lstrip() + """ + ).lstrip() err = ExtractionError(tmpl.format(**locals())) err.manager = self err.cache_path = cache_path @@ -1293,9 +1350,7 @@ def set_extraction_path(self, path): ``cleanup_resources()``.) """ if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) + raise ValueError("Can't change extraction path, files already extracted") self.extraction_path = path @@ -1319,9 +1374,8 @@ def get_default_cache(): or a platform-relevant user cache dir for an app named "Python-Eggs". """ - return ( - os.environ.get('PYTHON_EGG_CACHE') - or platformdirs.user_cache_dir(appname='Python-Eggs') + return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir( + appname='Python-Eggs' ) @@ -1345,6 +1399,38 @@ def safe_version(version): return re.sub('[^A-Za-z0-9.]+', '-', version) +def _forgiving_version(version): + """Fallback when ``safe_version`` is not safe enough + >>> parse_version(_forgiving_version('0.23ubuntu1')) + + >>> parse_version(_forgiving_version('0.23-')) + + >>> parse_version(_forgiving_version('0.-_')) + + >>> parse_version(_forgiving_version('42.+?1')) + + >>> parse_version(_forgiving_version('hello world')) + + """ + version = version.replace(' ', '.') + match = _PEP440_FALLBACK.search(version) + if match: + safe = match["safe"] + rest = version[len(safe):] + else: + safe = "0" + rest = version + local = f"sanitized.{_safe_segment(rest)}".strip(".") + return f"{safe}.dev0+{local}" + + +def _safe_segment(segment): + """Convert an arbitrary string into a safe segment""" + segment = re.sub('[^A-Za-z0-9.]+', '-', segment) + segment = re.sub('-[^A-Za-z0-9]+', '-', segment) + return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-") + + def safe_extra(extra): """Convert an arbitrary string to a standard 'extra' name @@ -1458,8 +1544,9 @@ def run_script(self, script_name, namespace): script = 'scripts/' + script_name if not self.has_metadata(script): raise ResolutionError( - "Script {script!r} not found in metadata at {self.egg_info!r}" - .format(**locals()), + "Script {script!r} not found in metadata at {self.egg_info!r}".format( + **locals() + ), ) script_text = self.get_metadata(script).replace('\r\n', '\n') script_text = script_text.replace('\r', '\n') @@ -1472,8 +1559,12 @@ def run_script(self, script_name, namespace): exec(code, namespace, namespace) else: from linecache import cache + cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename + len(script_text), + 0, + script_text.split('\n'), + script_filename, ) script_code = compile(script_text, script_filename, 'exec') exec(script_code, namespace, namespace) @@ -1553,9 +1644,9 @@ def _validate_resource_path(path): AttributeError: ... """ invalid = ( - os.path.pardir in path.split(posixpath.sep) or - posixpath.isabs(path) or - ntpath.isabs(path) + os.path.pardir in path.split(posixpath.sep) + or posixpath.isabs(path) + or ntpath.isabs(path) ) if not invalid: return @@ -1637,7 +1728,10 @@ def _get(self, path): @classmethod def _register(cls): - loader_names = 'SourceFileLoader', 'SourcelessFileLoader', + loader_names = ( + 'SourceFileLoader', + 'SourcelessFileLoader', + ) for name in loader_names: loader_cls = getattr(importlib_machinery, name, type(None)) register_loader_type(loader_cls, cls) @@ -1697,6 +1791,7 @@ class MemoizedZipManifests(ZipManifests): """ Memoized zipfile manifests. """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') def load(self, path): @@ -1730,20 +1825,16 @@ def _zipinfo_name(self, fspath): if fspath == self.loader.archive: return '' if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.zip_pre) - ) + return fspath[len(self.zip_pre) :] + raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre)) def _parts(self, zip_path): # Convert a zipfile subpath into an egg-relative path part list. # pseudo-fs path fspath = self.zip_pre + zip_path if fspath.startswith(self.egg_root + os.sep): - return fspath[len(self.egg_root) + 1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath, self.egg_root) - ) + return fspath[len(self.egg_root) + 1 :].split(os.sep) + raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root)) @property def zipinfo(self): @@ -1773,25 +1864,20 @@ def _get_date_and_size(zip_stat): # FIXME: 'ZipProvider._extract_resource' is too complex (12) def _extract_resource(self, manager, zip_path): # noqa: C901 - if zip_path in self._index(): for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) + last = self._extract_resource(manager, os.path.join(zip_path, name)) # return the extracted directory name return os.path.dirname(last) timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) + raise IOError( + '"os.rename" and "os.unlink" are not supported ' 'on this platform' ) + try: + real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path)) if self._is_current(real_path, zip_path): return real_path @@ -2027,70 +2113,21 @@ def find_nothing(importer, path_item, only=False): register_finder(object, find_nothing) -def _by_version_descending(names): - """ - Given a list of filenames, return them in descending order - by version number. - - >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' - >>> _by_version_descending(names) - ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'bar', 'foo'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] - >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' - >>> _by_version_descending(names) - ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] - """ - def try_parse(name): - """ - Attempt to parse as a version or return a null version. - """ - try: - return packaging.version.Version(name) - except Exception: - return packaging.version.Version('0') - - def _by_version(name): - """ - Parse each component of the filename - """ - name, ext = os.path.splitext(name) - parts = itertools.chain(name.split('-'), [ext]) - return [try_parse(part) for part in parts] - - return sorted(names, key=_by_version, reverse=True) - - def find_on_path(importer, path_item, only=False): """Yield distributions accessible on a sys.path directory""" path_item = _normalize_cached(path_item) if _is_unpacked_egg(path_item): yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item, 'EGG-INFO') - ) + path_item, + metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')), ) return - entries = ( - os.path.join(path_item, child) - for child in safe_listdir(path_item) - ) - - # for performance, before sorting by version, - # screen entries for only those that will yield - # distributions - filtered = ( - entry - for entry in entries - if dist_factory(path_item, entry, only) - ) + entries = (os.path.join(path_item, child) for child in safe_listdir(path_item)) # scan for .egg and .egg-info in directory - path_item_entries = _by_version_descending(filtered) - for entry in path_item_entries: + for entry in sorted(entries): fullpath = os.path.join(path_item, entry) factory = dist_factory(path_item, entry, only) for dist in factory(fullpath): @@ -2101,19 +2138,18 @@ def dist_factory(path_item, entry, only): """Return a dist_factory for the given entry.""" lower = entry.lower() is_egg_info = lower.endswith('.egg-info') - is_dist_info = ( - lower.endswith('.dist-info') and - os.path.isdir(os.path.join(path_item, entry)) + is_dist_info = lower.endswith('.dist-info') and os.path.isdir( + os.path.join(path_item, entry) ) is_meta = is_egg_info or is_dist_info return ( distributions_from_metadata - if is_meta else - find_distributions - if not only and _is_egg_path(entry) else - resolve_egg_link - if not only and lower.endswith('.egg-link') else - NoDists() + if is_meta + else find_distributions + if not only and _is_egg_path(entry) + else resolve_egg_link + if not only and lower.endswith('.egg-link') + else NoDists() ) @@ -2125,6 +2161,7 @@ class NoDists: >>> list(NoDists()('anything')) [] """ + def __bool__(self): return False @@ -2159,7 +2196,10 @@ def distributions_from_metadata(path): metadata = FileMetadata(path) entry = os.path.basename(path) yield Distribution.from_location( - root, entry, metadata, precedence=DEVELOP_DIST, + root, + entry, + metadata, + precedence=DEVELOP_DIST, ) @@ -2181,17 +2221,16 @@ def resolve_egg_link(path): """ referenced_paths = non_empty_lines(path) resolved_paths = ( - os.path.join(os.path.dirname(path), ref) - for ref in referenced_paths + os.path.join(os.path.dirname(path), ref) for ref in referenced_paths ) dist_groups = map(find_distributions, resolved_paths) return next(dist_groups, ()) -register_finder(pkgutil.ImpImporter, find_on_path) +if hasattr(pkgutil, 'ImpImporter'): + register_finder(pkgutil.ImpImporter, find_on_path) -if hasattr(importlib_machinery, 'FileFinder'): - register_finder(importlib_machinery.FileFinder, find_on_path) +register_finder(importlib_machinery.FileFinder, find_on_path) _declare_state('dict', _namespace_handlers={}) _declare_state('dict', _namespace_packages={}) @@ -2289,6 +2328,15 @@ def position_in_sys_path(path): def declare_namespace(packageName): """Declare that package 'packageName' is a namespace package""" + msg = ( + f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n" + "Implementing implicit namespace packages (as specified in PEP 420) " + "is preferred to `pkg_resources.declare_namespace`. " + "See https://setuptools.pypa.io/en/latest/references/" + "keywords.html#keyword-namespace-packages" + ) + warnings.warn(msg, DeprecationWarning, stacklevel=2) + _imp.acquire_lock() try: if packageName in _namespace_packages: @@ -2345,11 +2393,11 @@ def file_ns_handler(importer, path_item, packageName, module): return subpath -register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -register_namespace_handler(zipimport.zipimporter, file_ns_handler) +if hasattr(pkgutil, 'ImpImporter'): + register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) -if hasattr(importlib_machinery, 'FileFinder'): - register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) +register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) def null_ns_handler(importer, path_item, packageName, module): @@ -2361,8 +2409,7 @@ def null_ns_handler(importer, path_item, packageName, module): def normalize_path(filename): """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(os.path.normpath( - _cygwin_patch(filename)))) + return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) def _cygwin_patch(filename): # pragma: nocover @@ -2393,9 +2440,9 @@ def _is_egg_path(path): def _is_zip_egg(path): return ( - path.lower().endswith('.egg') and - os.path.isfile(path) and - zipfile.is_zipfile(path) + path.lower().endswith('.egg') + and os.path.isfile(path) + and zipfile.is_zipfile(path) ) @@ -2403,9 +2450,8 @@ def _is_unpacked_egg(path): """ Determine if given path appears to be an unpacked egg. """ - return ( - path.lower().endswith('.egg') and - os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) + return path.lower().endswith('.egg') and os.path.isfile( + os.path.join(path, 'EGG-INFO', 'PKG-INFO') ) @@ -2569,8 +2615,10 @@ def _version_from_file(lines): Given an iterable of lines from a Metadata file, return the value of the Version field, if present, or None otherwise. """ + def is_version_line(line): return line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) line = next(iter(version_lines), '') _, _, value = line.partition(':') @@ -2579,12 +2627,19 @@ def is_version_line(line): class Distribution: """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' def __init__( - self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): + self, + location=None, + metadata=None, + project_name=None, + version=None, + py_version=PY_MAJOR, + platform=None, + precedence=EGG_DIST, + ): self.project_name = safe_name(project_name or 'Unknown') if version is not None: self._version = safe_version(version) @@ -2607,8 +2662,13 @@ def from_location(cls, location, basename, metadata=None, **kw): 'name', 'ver', 'pyver', 'plat' ) return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw + location, + metadata, + project_name=project_name, + version=version, + py_version=py_version, + platform=platform, + **kw, )._reload_version() def _reload_version(self): @@ -2617,7 +2677,7 @@ def _reload_version(self): @property def hashcmp(self): return ( - self.parsed_version, + self._forgiving_parsed_version, self.precedence, self.key, self.location, @@ -2664,35 +2724,42 @@ def key(self): @property def parsed_version(self): if not hasattr(self, "_parsed_version"): - self._parsed_version = parse_version(self.version) + try: + self._parsed_version = parse_version(self.version) + except packaging.version.InvalidVersion as ex: + info = f"(package: {self.project_name})" + if hasattr(ex, "add_note"): + ex.add_note(info) # PEP 678 + raise + raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None return self._parsed_version - def _warn_legacy_version(self): - LV = packaging.version.LegacyVersion - is_legacy = isinstance(self._parsed_version, LV) - if not is_legacy: - return + @property + def _forgiving_parsed_version(self): + try: + return self.parsed_version + except packaging.version.InvalidVersion as ex: + self._parsed_version = parse_version(_forgiving_version(self.version)) - # While an empty version is technically a legacy version and - # is not a valid PEP 440 version, it's also unlikely to - # actually come from someone and instead it is more likely that - # it comes from setuptools attempting to parse a filename and - # including it in the list. So for that we'll gate this warning - # on if the version is anything at all or not. - if not self.version: - return + notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678 + msg = f"""!!\n\n + ************************************************************************* + {str(ex)}\n{notes} + + This is a long overdue deprecation. + For the time being, `pkg_resources` will use `{self._parsed_version}` + as a replacement to avoid breaking existing environments, + but no future compatibility is guaranteed. - tmpl = textwrap.dedent(""" - '{project_name} ({version})' is being parsed as a legacy, - non PEP 440, - version. You may find odd behavior and sort order. - In particular it will be sorted as less than 0.0. It - is recommended to migrate to PEP 440 compatible - versions. - """).strip().replace('\n', ' ') + If you maintain package {self.project_name} you should implement + the relevant changes to adequate the project to PEP 440 immediately. + ************************************************************************* + \n\n!! + """ + warnings.warn(msg, DeprecationWarning) - warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + return self._parsed_version @property def version(self): @@ -2702,9 +2769,9 @@ def version(self): version = self._get_version() if version is None: path = self._get_metadata_path_for_display(self.PKG_INFO) - msg = ( - "Missing 'Version:' header and/or {} file at path: {}" - ).format(self.PKG_INFO, path) + msg = ("Missing 'Version:' header and/or {} file at path: {}").format( + self.PKG_INFO, path + ) raise ValueError(msg, self) from e return version @@ -2733,8 +2800,7 @@ def _filter_extras(dm): reqs = dm.pop(extra) new_extra, _, marker = extra.partition(':') fails_marker = marker and ( - invalid_marker(marker) - or not evaluate_marker(marker) + invalid_marker(marker) or not evaluate_marker(marker) ) if fails_marker: reqs = [] @@ -2806,8 +2872,9 @@ def activate(self, path=None, replace=False): def egg_name(self): """Return what this distribution's standard .egg filename should be""" filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR + to_filename(self.project_name), + to_filename(self.version), + self.py_version or PY_MAJOR, ) if self.platform: @@ -2837,17 +2904,13 @@ def __getattr__(self, attr): def __dir__(self): return list( set(super(Distribution, self).__dir__()) - | set( - attr for attr in self._provider.__dir__() - if not attr.startswith('_') - ) + | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) ) @classmethod def from_filename(cls, filename, metadata=None, **kw): return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw + _normalize_cached(filename), os.path.basename(filename), metadata, **kw ) def as_requirement(self): @@ -2959,14 +3022,18 @@ def check_version_conflict(self): nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) loc = normalize_path(self.location) for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): + if ( + modname not in sys.modules + or modname in nsp + or modname in _namespace_packages + ): continue if modname in ('pkg_resources', 'setuptools', 'site'): continue fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): + if fn and ( + normalize_path(fn).startswith(loc) or fn.startswith(self.location) + ): continue issue_warning( "Module %s was already imported from %s, but %s is being added" @@ -3018,6 +3085,7 @@ class DistInfoDistribution(Distribution): Wrap an actual or potential sys.path entry w/metadata, .dist-info style. """ + PKG_INFO = 'METADATA' EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") @@ -3103,8 +3171,7 @@ def __init__(self, requirement_string): self.unsafe_name = self.name project_name = safe_name(self.name) self.project_name, self.key = project_name, project_name.lower() - self.specs = [ - (spec.operator, spec.version) for spec in self.specifier] + self.specs = [(spec.operator, spec.version) for spec in self.specifier] self.extras = tuple(map(safe_extra, self.extras)) self.hashCmp = ( self.key, @@ -3116,10 +3183,7 @@ def __init__(self, requirement_string): self.__hash = hash(self.hashCmp) def __eq__(self, other): - return ( - isinstance(other, Requirement) and - self.hashCmp == other.hashCmp - ) + return isinstance(other, Requirement) and self.hashCmp == other.hashCmp def __ne__(self, other): return not self == other @@ -3144,7 +3208,7 @@ def __repr__(self): @staticmethod def parse(s): - req, = parse_requirements(s) + (req,) = parse_requirements(s) return req @@ -3282,10 +3346,7 @@ def _initialize_master_working_set(): # ensure that all distributions added to the working set in the future # (e.g. by calling ``require()``) will get activated as well, # with higher priority (replace=True). - tuple( - dist.activate(replace=False) - for dist in working_set - ) + tuple(dist.activate(replace=False) for dist in working_set) add_activation_listener( lambda dist: dist.activate(replace=True), existing=False, diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index f15208b8bcd..3974df3f11b 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ rich==13.3.3 pygments==2.14.0 typing_extensions==4.5.0 resolvelib==1.0.1 -setuptools==65.6.3 +setuptools==67.6.1 six==1.16.0 tenacity==8.2.2 tomli==2.0.1 diff --git a/tools/vendoring/patches/pkg_resources.patch b/tools/vendoring/patches/pkg_resources.patch index 39bb2eac253..48ae954311b 100644 --- a/tools/vendoring/patches/pkg_resources.patch +++ b/tools/vendoring/patches/pkg_resources.patch @@ -1,25 +1,3 @@ -diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py -index d59226af9..3b9565893 100644 ---- a/src/pip/_vendor/pkg_resources/__init__.py -+++ b/src/pip/_vendor/pkg_resources/__init__.py -@@ -77,7 +77,7 @@ - join_continuation, - ) - --from pkg_resources.extern import appdirs -+from pkg_resources.extern import platformdirs - from pkg_resources.extern import packaging - __import__('pkg_resources.extern.packaging.version') - __import__('pkg_resources.extern.packaging.specifiers') -@@ -1321,7 +1321,7 @@ def get_default_cache(): - """ - return ( - os.environ.get('PYTHON_EGG_CACHE') -- or appdirs.user_cache_dir(appname='Python-Eggs') -+ or platformdirs.user_cache_dir(appname='Python-Eggs') - ) - - diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 3f2476a0c..8d5727d35 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py From 4428130fbe44259562280f0601f034bb8c43bef2 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 25 Apr 2023 16:15:07 +0100 Subject: [PATCH 457/730] Suppress pkg_resources deprecation warning --- src/pip/_internal/cli/main.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/pip/_internal/cli/main.py b/src/pip/_internal/cli/main.py index 0e31221543a..7e061f5b390 100644 --- a/src/pip/_internal/cli/main.py +++ b/src/pip/_internal/cli/main.py @@ -4,6 +4,7 @@ import logging import os import sys +import warnings from typing import List, Optional from pip._internal.cli.autocompletion import autocomplete @@ -46,6 +47,14 @@ def main(args: Optional[List[str]] = None) -> int: if args is None: args = sys.argv[1:] + # Suppress the pkg_resources deprecation warning + # Note - we use a module of .*pkg_resources to cover + # the normal case (pip._vendor.pkg_resources) and the + # devendored case (a bare pkg_resources) + warnings.filterwarnings( + action="ignore", category=DeprecationWarning, module=".*pkg_resources" + ) + # Configure our deprecation warnings to be sent through loggers deprecation.install_warning_logger() From cbc92fd6dd7e0bae4ec81b87521b5c27b0601be5 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 25 Apr 2023 16:17:13 +0100 Subject: [PATCH 458/730] Upgrade setuptools to 67.7.2 --- news/setuptools.vendor.rst | 2 +- src/pip/_vendor/pkg_resources/__init__.py | 3 +++ src/pip/_vendor/vendor.txt | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst index 9cf3f49e21c..569df4b6432 100644 --- a/news/setuptools.vendor.rst +++ b/news/setuptools.vendor.rst @@ -1 +1 @@ -Upgrade setuptools to 67.6.1 +Upgrade setuptools to 67.7.2 diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index a85aca10f7c..1bf26a94226 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -3046,6 +3046,9 @@ def has_version(self): except ValueError: issue_warning("Unbuilt egg for " + repr(self)) return False + except SystemError: + # TODO: remove this except clause when python/cpython#103632 is fixed. + return False return True def clone(self, **kw): diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 3974df3f11b..61063459d6d 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ rich==13.3.3 pygments==2.14.0 typing_extensions==4.5.0 resolvelib==1.0.1 -setuptools==67.6.1 +setuptools==67.7.2 six==1.16.0 tenacity==8.2.2 tomli==2.0.1 From 3fe7e54fceac7a03bcb88ce26cfd0937acfe5e40 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 26 Apr 2023 10:18:28 +0100 Subject: [PATCH 459/730] Bump for release --- NEWS.rst | 9 +++++++++ news/setuptools.vendor.rst | 1 - src/pip/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 news/setuptools.vendor.rst diff --git a/NEWS.rst b/NEWS.rst index a5c27f34a2b..b0ae642634d 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,15 @@ .. towncrier release notes start +23.1.2 (2023-04-26) +=================== + +Vendored Libraries +------------------ + +- Upgrade setuptools to 67.7.2 + + 23.1.1 (2023-04-22) =================== diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst deleted file mode 100644 index 569df4b6432..00000000000 --- a/news/setuptools.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade setuptools to 67.7.2 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 20d8bf56c17..4ad3b2acb9a 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2.dev0" +__version__ = "23.1.2" def main(args: Optional[List[str]] = None) -> int: From cd918f27f9d1b1349ec9b87dc4750417361956f5 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 26 Apr 2023 10:18:28 +0100 Subject: [PATCH 460/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 4ad3b2acb9a..20d8bf56c17 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.1.2" +__version__ = "23.2.dev0" def main(args: Optional[List[str]] = None) -> int: From 28f77d7618d0acbae3242c50fc1e475cbafebd1d Mon Sep 17 00:00:00 2001 From: "Awit (Ah-Wit) Ghirmai" <107564270+awitghirmai@users.noreply.github.com> Date: Wed, 26 Apr 2023 12:52:14 -0600 Subject: [PATCH 461/730] Include nox install instructions in Getting Started dev doc (#12001) --- docs/html/development/getting-started.rst | 24 ++++++++++++++++------- news/no-issue.trivial.rst | 2 ++ 2 files changed, 19 insertions(+), 7 deletions(-) create mode 100644 news/no-issue.trivial.rst diff --git a/docs/html/development/getting-started.rst b/docs/html/development/getting-started.rst index 730f5ece08f..e248259f08d 100644 --- a/docs/html/development/getting-started.rst +++ b/docs/html/development/getting-started.rst @@ -27,23 +27,35 @@ Development Environment pip is a command line application written in Python. For developing pip, you should `install Python`_ on your computer. -For developing pip, you need to install :pypi:`nox`. Often, you can run -``python -m pip install nox`` to install and use it. +For developing pip, you need to install :pypi:`nox`. The full development setup would then be: +.. tab:: Unix/macOS + + .. code-block:: shell + + python -m venv .venv + source .venv/bin/activate + python -m pip install nox + +.. tab:: Windows + + .. code-block:: shell + + py -m venv .venv + .venv\Scripts\activate + py -m pip install nox Running pip From Source Tree ============================ To run the pip executable from your source tree during development, install pip locally using editable installation (inside a virtualenv). -You can then invoke your local source tree pip normally. +You can then invoke your local source tree pip normally (be sure virtualenv is active). .. tab:: Unix/macOS .. code-block:: shell - python -m venv .venv - source .venv/bin/activate python -m pip install -e . python -m pip --version @@ -51,8 +63,6 @@ You can then invoke your local source tree pip normally. .. code-block:: shell - py -m venv .venv - .venv\Scripts\activate py -m pip install -e . py -m pip --version diff --git a/news/no-issue.trivial.rst b/news/no-issue.trivial.rst new file mode 100644 index 00000000000..6440f668716 --- /dev/null +++ b/news/no-issue.trivial.rst @@ -0,0 +1,2 @@ +Added seperate instructions for installing ``nox`` in the ``docs/development/getting-started.rst`` doc. and slight update +to the below ``Running pip From Source Tree`` section. From 21857784d6d7a9139711cf77f77da925fe9189ee Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 26 Apr 2023 12:53:24 -0600 Subject: [PATCH 462/730] Implement PEP 685 extra normalization in resolver All extras from user input or dependant package metadata are properly normalized for comparison and resolution. This ensures requests for extras from a dependant can always correctly find the normalized extra in the dependency, even if the requested extra name is not normalized. Note that this still relies on the declaration of extra names in the dependency's package metadata to be properly normalized when the package is built, since correct comparison between an extra name's normalized and non-normalized forms requires change to the metadata parsing logic, which is only available in packaging 22.0 and up, which pip does not use at the moment. --- news/11649.bugfix.rst | 5 ++++ .../_internal/resolution/resolvelib/base.py | 2 +- .../resolution/resolvelib/candidates.py | 2 +- .../resolution/resolvelib/factory.py | 20 +++++++++------- .../resolution/resolvelib/requirements.py | 2 +- tests/functional/test_install_extras.py | 24 ++++++++++++++++++- 6 files changed, 42 insertions(+), 13 deletions(-) create mode 100644 news/11649.bugfix.rst diff --git a/news/11649.bugfix.rst b/news/11649.bugfix.rst new file mode 100644 index 00000000000..65511711f59 --- /dev/null +++ b/news/11649.bugfix.rst @@ -0,0 +1,5 @@ +Normalize extras according to :pep:`685` from package metadata in the resolver +for comparison. This ensures extras are correctly compared and merged as long +as the package providing the extra(s) is built with values normalized according +to the standard. Note, however, that this *does not* solve cases where the +package itself contains unnormalized extra values in the metadata. diff --git a/src/pip/_internal/resolution/resolvelib/base.py b/src/pip/_internal/resolution/resolvelib/base.py index b206692a0a9..0275385db71 100644 --- a/src/pip/_internal/resolution/resolvelib/base.py +++ b/src/pip/_internal/resolution/resolvelib/base.py @@ -12,7 +12,7 @@ CandidateVersion = Union[LegacyVersion, Version] -def format_name(project: str, extras: FrozenSet[str]) -> str: +def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str: if not extras: return project canonical_extras = sorted(canonicalize_name(e) for e in extras) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 31020e27ad1..48ef9a16daa 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -423,7 +423,7 @@ class ExtrasCandidate(Candidate): def __init__( self, base: BaseCandidate, - extras: FrozenSet[str], + extras: FrozenSet[NormalizedName], ) -> None: self.base = base self.extras = extras diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 0331297b85b..6d1ec31631e 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -112,7 +112,7 @@ def __init__( self._editable_candidate_cache: Cache[EditableCandidate] = {} self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {} self._extras_candidate_cache: Dict[ - Tuple[int, FrozenSet[str]], ExtrasCandidate + Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate ] = {} if not ignore_installed: @@ -138,7 +138,9 @@ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: raise UnsupportedWheel(msg) def _make_extras_candidate( - self, base: BaseCandidate, extras: FrozenSet[str] + self, + base: BaseCandidate, + extras: FrozenSet[NormalizedName], ) -> ExtrasCandidate: cache_key = (id(base), extras) try: @@ -151,7 +153,7 @@ def _make_extras_candidate( def _make_candidate_from_dist( self, dist: BaseDistribution, - extras: FrozenSet[str], + extras: FrozenSet[NormalizedName], template: InstallRequirement, ) -> Candidate: try: @@ -166,7 +168,7 @@ def _make_candidate_from_dist( def _make_candidate_from_link( self, link: Link, - extras: FrozenSet[str], + extras: FrozenSet[NormalizedName], template: InstallRequirement, name: Optional[NormalizedName], version: Optional[CandidateVersion], @@ -244,12 +246,12 @@ def _iter_found_candidates( assert template.req, "Candidates found on index must be PEP 508" name = canonicalize_name(template.req.name) - extras: FrozenSet[str] = frozenset() + extras: FrozenSet[NormalizedName] = frozenset() for ireq in ireqs: assert ireq.req, "Candidates found on index must be PEP 508" specifier &= ireq.req.specifier hashes &= ireq.hashes(trust_internet=False) - extras |= frozenset(ireq.extras) + extras |= frozenset(canonicalize_name(e) for e in ireq.extras) def _get_installed_candidate() -> Optional[Candidate]: """Get the candidate for the currently-installed version.""" @@ -325,7 +327,7 @@ def is_pinned(specifier: SpecifierSet) -> bool: def _iter_explicit_candidates_from_base( self, base_requirements: Iterable[Requirement], - extras: FrozenSet[str], + extras: FrozenSet[NormalizedName], ) -> Iterator[Candidate]: """Produce explicit candidates from the base given an extra-ed package. @@ -392,7 +394,7 @@ def find_candidates( explicit_candidates.update( self._iter_explicit_candidates_from_base( requirements.get(parsed_requirement.name, ()), - frozenset(parsed_requirement.extras), + frozenset(canonicalize_name(e) for e in parsed_requirement.extras), ), ) @@ -452,7 +454,7 @@ def _make_requirement_from_install_req( self._fail_if_link_is_unsupported_wheel(ireq.link) cand = self._make_candidate_from_link( ireq.link, - extras=frozenset(ireq.extras), + extras=frozenset(canonicalize_name(e) for e in ireq.extras), template=ireq, name=canonicalize_name(ireq.name) if ireq.name else None, version=None, diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index 06addc0ddce..7d244c6937a 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -43,7 +43,7 @@ class SpecifierRequirement(Requirement): def __init__(self, ireq: InstallRequirement) -> None: assert ireq.link is None, "This is a link, not a specifier" self._ireq = ireq - self._extras = frozenset(ireq.extras) + self._extras = frozenset(canonicalize_name(e) for e in ireq.extras) def __str__(self) -> str: return str(self._ireq.req) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index c6cef00fa9c..6f2a6bf435f 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -4,7 +4,12 @@ import pytest -from tests.lib import PipTestEnvironment, ResolverVariant, TestData +from tests.lib import ( + PipTestEnvironment, + ResolverVariant, + TestData, + create_basic_wheel_for_package, +) @pytest.mark.network @@ -223,3 +228,20 @@ def test_install_extra_merging( if not fails_on_legacy or resolver_variant == "2020-resolver": expected = f"Successfully installed pkga-0.1 simple-{simple_version}" assert expected in result.stdout + + +def test_install_extras(script: PipTestEnvironment) -> None: + create_basic_wheel_for_package(script, "a", "1", depends=["b", "dep[x-y]"]) + create_basic_wheel_for_package(script, "b", "1", depends=["dep[x_y]"]) + create_basic_wheel_for_package(script, "dep", "1", extras={"x-y": ["meh"]}) + create_basic_wheel_for_package(script, "meh", "1") + + script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + "a", + ) + script.assert_installed(a="1", b="1", dep="1", meh="1") From 5b1fa83ffcd2ceea484d26783be979403d6563f2 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 8 May 2023 14:17:46 +0530 Subject: [PATCH 463/730] Add Python 3.12 classifier (#12019) --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 2179d34d2bf..d73c77b7346 100644 --- a/setup.py +++ b/setup.py @@ -42,6 +42,7 @@ def get_version(rel_path: str) -> str: "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], From f25f8fffbbd16fdb13a4f8977946afe9a3248453 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 8 May 2023 14:24:14 +0530 Subject: [PATCH 464/730] Add Read the Docs build configuration (#12020) --- .readthedocs.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 7d62011a6e3..b6453d8f0b3 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,10 +1,14 @@ version: 2 +build: + os: ubuntu-22.04 + tools: + python: "3.11" + sphinx: builder: htmldir configuration: docs/html/conf.py python: - version: 3.8 install: - requirements: docs/requirements.txt From c3160c5423e778b0dc334a677ae865befd222021 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 9 May 2023 15:39:58 +0800 Subject: [PATCH 465/730] Avoid importing things from conftest It is generally discouraged to import from conftest. Things are now moved to tests.lib and imported from there instead. Also did some cleanup to remove the no-longer-needed nullcontext shim. --- tests/conftest.py | 109 +++--------------------- tests/functional/test_completion.py | 3 +- tests/functional/test_download.py | 4 +- tests/functional/test_help.py | 3 +- tests/functional/test_inspect.py | 3 +- tests/functional/test_install.py | 2 +- tests/functional/test_install_config.py | 4 +- tests/functional/test_list.py | 2 +- tests/lib/__init__.py | 48 +++++++++-- tests/lib/compat.py | 23 +---- tests/lib/server.py | 51 ++++++++++- 11 files changed, 114 insertions(+), 138 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 57dd7e68a2b..5b189443f25 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,23 +1,21 @@ import compileall +import contextlib import fnmatch -import io import os import re import shutil import subprocess import sys -from contextlib import ExitStack, contextmanager from pathlib import Path from typing import ( - TYPE_CHECKING, AnyStr, Callable, + ContextManager, Dict, Iterable, Iterator, List, Optional, - Union, ) from unittest.mock import patch from zipfile import ZipFile @@ -36,25 +34,20 @@ from installer.sources import WheelFile from pip import __file__ as pip_location -from pip._internal.cli.main import main as pip_entry_point from pip._internal.locations import _USE_SYSCONFIG from pip._internal.utils.temp_dir import global_tempdir_manager -from tests.lib import DATA_DIR, SRC_DIR, PipTestEnvironment, TestData -from tests.lib.server import MockServer as _MockServer -from tests.lib.server import make_mock_server, server_running +from tests.lib import ( + DATA_DIR, + SRC_DIR, + CertFactory, + InMemoryPip, + PipTestEnvironment, + ScriptFactory, + TestData, +) +from tests.lib.server import MockServer, make_mock_server from tests.lib.venv import VirtualEnvironment, VirtualEnvironmentType -from .lib.compat import nullcontext - -if TYPE_CHECKING: - from typing import Protocol - - from wsgi import WSGIApplication -else: - # TODO: Protocol was introduced in Python 3.8. Remove this branch when - # dropping support for Python 3.7. - Protocol = object - def pytest_addoption(parser: Parser) -> None: parser.addoption( @@ -325,7 +318,7 @@ def scoped_global_tempdir_manager(request: pytest.FixtureRequest) -> Iterator[No temporary directories in the application. """ if "no_auto_tempdir_manager" in request.keywords: - ctx = nullcontext + ctx: Callable[[], ContextManager[None]] = contextlib.nullcontext else: ctx = global_tempdir_manager @@ -502,16 +495,6 @@ def virtualenv( yield virtualenv_factory(tmpdir.joinpath("workspace", "venv")) -class ScriptFactory(Protocol): - def __call__( - self, - tmpdir: Path, - virtualenv: Optional[VirtualEnvironment] = None, - environ: Optional[Dict[AnyStr, AnyStr]] = None, - ) -> PipTestEnvironment: - ... - - @pytest.fixture(scope="session") def script_factory( virtualenv_factory: Callable[[Path], VirtualEnvironment], @@ -631,26 +614,6 @@ def data(tmpdir: Path) -> TestData: return TestData.copy(tmpdir.joinpath("data")) -class InMemoryPipResult: - def __init__(self, returncode: int, stdout: str) -> None: - self.returncode = returncode - self.stdout = stdout - - -class InMemoryPip: - def pip(self, *args: Union[str, Path]) -> InMemoryPipResult: - orig_stdout = sys.stdout - stdout = io.StringIO() - sys.stdout = stdout - try: - returncode = pip_entry_point([os.fspath(a) for a in args]) - except SystemExit as e: - returncode = e.code or 0 - finally: - sys.stdout = orig_stdout - return InMemoryPipResult(returncode, stdout.getvalue()) - - @pytest.fixture def in_memory_pip() -> InMemoryPip: return InMemoryPip() @@ -662,9 +625,6 @@ def deprecated_python() -> bool: return sys.version_info[:2] in [] -CertFactory = Callable[[], str] - - @pytest.fixture(scope="session") def cert_factory(tmpdir_factory: pytest.TempPathFactory) -> CertFactory: # Delay the import requiring cryptography in order to make it possible @@ -686,49 +646,6 @@ def factory() -> str: return factory -class MockServer: - def __init__(self, server: _MockServer) -> None: - self._server = server - self._running = False - self.context = ExitStack() - - @property - def port(self) -> int: - return self._server.port - - @property - def host(self) -> str: - return self._server.host - - def set_responses(self, responses: Iterable["WSGIApplication"]) -> None: - assert not self._running, "responses cannot be set on running server" - self._server.mock.side_effect = responses - - def start(self) -> None: - assert not self._running, "running server cannot be started" - self.context.enter_context(server_running(self._server)) - self.context.enter_context(self._set_running()) - - @contextmanager - def _set_running(self) -> Iterator[None]: - self._running = True - try: - yield - finally: - self._running = False - - def stop(self) -> None: - assert self._running, "idle server cannot be stopped" - self.context.close() - - def get_requests(self) -> List[Dict[str, str]]: - """Get environ for each received request.""" - assert not self._running, "cannot get mock from running server" - # Legacy: replace call[0][0] with call.args[0] - # when pip drops support for python3.7 - return [call[0][0] for call in self._server.mock.call_args_list] - - @pytest.fixture def mock_server() -> Iterator[MockServer]: server = make_mock_server() diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py index b02cd4fa317..28381c2097e 100644 --- a/tests/functional/test_completion.py +++ b/tests/functional/test_completion.py @@ -5,8 +5,7 @@ import pytest -from tests.conftest import ScriptFactory -from tests.lib import PipTestEnvironment, TestData, TestPipResult +from tests.lib import PipTestEnvironment, ScriptFactory, TestData, TestPipResult if TYPE_CHECKING: from typing import Protocol diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 31418ca8c2b..8c00dc09edf 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -14,15 +14,15 @@ from pip._internal.cli.status_codes import ERROR from pip._internal.utils.urls import path_to_url -from tests.conftest import MockServer, ScriptFactory from tests.lib import ( PipTestEnvironment, + ScriptFactory, TestData, TestPipResult, create_basic_sdist_for_package, create_really_basic_wheel, ) -from tests.lib.server import file_response +from tests.lib.server import MockServer, file_response def fake_wheel(data: TestData, wheel_path: str) -> None: diff --git a/tests/functional/test_help.py b/tests/functional/test_help.py index dba41af5f79..69419b8d9a3 100644 --- a/tests/functional/test_help.py +++ b/tests/functional/test_help.py @@ -5,8 +5,7 @@ from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.commands import commands_dict, create_command from pip._internal.exceptions import CommandError -from tests.conftest import InMemoryPip -from tests.lib import PipTestEnvironment +from tests.lib import InMemoryPip, PipTestEnvironment def test_run_method_should_return_success_when_finds_command_name() -> None: diff --git a/tests/functional/test_inspect.py b/tests/functional/test_inspect.py index c9f43134624..f6690fb1fb1 100644 --- a/tests/functional/test_inspect.py +++ b/tests/functional/test_inspect.py @@ -2,8 +2,7 @@ import pytest -from tests.conftest import ScriptFactory -from tests.lib import PipTestEnvironment, TestData +from tests.lib import PipTestEnvironment, ScriptFactory, TestData @pytest.fixture(scope="session") diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 63712827479..c29880e611a 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -15,8 +15,8 @@ from pip._internal.models.index import PyPI, TestPyPI from pip._internal.utils.misc import rmtree from pip._internal.utils.urls import path_to_url -from tests.conftest import CertFactory from tests.lib import ( + CertFactory, PipTestEnvironment, ResolverVariant, TestData, diff --git a/tests/functional/test_install_config.py b/tests/functional/test_install_config.py index 9f8a8067787..ecaf2f705a2 100644 --- a/tests/functional/test_install_config.py +++ b/tests/functional/test_install_config.py @@ -8,9 +8,9 @@ import pytest -from tests.conftest import CertFactory, MockServer, ScriptFactory -from tests.lib import PipTestEnvironment, TestData +from tests.lib import CertFactory, PipTestEnvironment, ScriptFactory, TestData from tests.lib.server import ( + MockServer, authorization_response, file_response, make_mock_server, diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index bd45f82df7f..a960f3c4e71 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -5,9 +5,9 @@ import pytest from pip._internal.models.direct_url import DirectUrl, DirInfo -from tests.conftest import ScriptFactory from tests.lib import ( PipTestEnvironment, + ScriptFactory, TestData, _create_test_package, create_test_package_with_setup, diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 7410072f50e..cd0b83d1292 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -10,11 +10,12 @@ from base64 import urlsafe_b64encode from contextlib import contextmanager from hashlib import sha256 -from io import BytesIO +from io import BytesIO, StringIO from textwrap import dedent from typing import ( TYPE_CHECKING, Any, + AnyStr, Callable, Dict, Iterable, @@ -32,6 +33,7 @@ from pip._vendor.packaging.utils import canonicalize_name from scripttest import FoundDir, FoundFile, ProcResult, TestFileEnvironment +from pip._internal.cli.main import main as pip_entry_point from pip._internal.index.collector import LinkCollector from pip._internal.index.package_finder import PackageFinder from pip._internal.locations import get_major_minor_version @@ -43,12 +45,12 @@ from tests.lib.wheel import make_wheel if TYPE_CHECKING: - # Literal was introduced in Python 3.8. - from typing import Literal + from typing import Literal, Protocol ResolverVariant = Literal["resolvelib", "legacy"] -else: - ResolverVariant = str +else: # TODO: Remove this branch when dropping support for Python 3.7. + Protocol = object # Protocol was introduced in Python 3.8. + ResolverVariant = str # Literal was introduced in Python 3.8. DATA_DIR = pathlib.Path(__file__).parent.parent.joinpath("data").resolve() SRC_DIR = pathlib.Path(__file__).resolve().parent.parent.parent @@ -1336,3 +1338,39 @@ def need_svn(fn: _Test) -> _Test: def need_mercurial(fn: _Test) -> _Test: return pytest.mark.mercurial(need_executable("Mercurial", ("hg", "version"))(fn)) + + +class InMemoryPipResult: + def __init__(self, returncode: int, stdout: str) -> None: + self.returncode = returncode + self.stdout = stdout + + +class InMemoryPip: + def pip(self, *args: Union[str, pathlib.Path]) -> InMemoryPipResult: + orig_stdout = sys.stdout + stdout = StringIO() + sys.stdout = stdout + try: + returncode = pip_entry_point([os.fspath(a) for a in args]) + except SystemExit as e: + if isinstance(e.code, int): + returncode = e.code + else: + returncode = int(bool(e.code)) + finally: + sys.stdout = orig_stdout + return InMemoryPipResult(returncode, stdout.getvalue()) + + +class ScriptFactory(Protocol): + def __call__( + self, + tmpdir: pathlib.Path, + virtualenv: Optional[VirtualEnvironment] = None, + environ: Optional[Dict[AnyStr, AnyStr]] = None, + ) -> PipTestEnvironment: + ... + + +CertFactory = Callable[[], str] diff --git a/tests/lib/compat.py b/tests/lib/compat.py index 4d44cbddbbc..866ac7a7734 100644 --- a/tests/lib/compat.py +++ b/tests/lib/compat.py @@ -2,32 +2,13 @@ import contextlib import signal -from typing import Iterable, Iterator - - -@contextlib.contextmanager -def nullcontext() -> Iterator[None]: - """ - Context manager that does no additional processing. - - Used as a stand-in for a normal context manager, when a particular block of - code is only sometimes used with a normal context manager: - - cm = optional_cm if condition else nullcontext() - with cm: - # Perform operation, using optional_cm if condition is True - - TODO: Replace with contextlib.nullcontext after dropping Python 3.6 - support. - """ - yield - +from typing import Callable, ContextManager, Iterable, Iterator # Applies on Windows. if not hasattr(signal, "pthread_sigmask"): # We're not relying on this behavior anywhere currently, it's just best # practice. - blocked_signals = nullcontext + blocked_signals: Callable[[], ContextManager[None]] = contextlib.nullcontext else: @contextlib.contextmanager diff --git a/tests/lib/server.py b/tests/lib/server.py index 4cc18452cb5..1048a173d40 100644 --- a/tests/lib/server.py +++ b/tests/lib/server.py @@ -2,9 +2,9 @@ import ssl import threading from base64 import b64encode -from contextlib import contextmanager +from contextlib import ExitStack, contextmanager from textwrap import dedent -from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator, List from unittest.mock import Mock from werkzeug.serving import BaseWSGIServer, WSGIRequestHandler @@ -18,7 +18,7 @@ Body = Iterable[bytes] -class MockServer(BaseWSGIServer): +class _MockServer(BaseWSGIServer): mock: Mock = Mock() @@ -64,7 +64,7 @@ def adapter(environ: "WSGIEnvironment", start_response: "StartResponse") -> Body return adapter -def make_mock_server(**kwargs: Any) -> MockServer: +def make_mock_server(**kwargs: Any) -> _MockServer: """Creates a mock HTTP(S) server listening on a random port on localhost. The `mock` property of the returned server provides and records all WSGI @@ -189,3 +189,46 @@ def responder(environ: "WSGIEnvironment", start_response: "StartResponse") -> Bo return [path.read_bytes()] return responder + + +class MockServer: + def __init__(self, server: _MockServer) -> None: + self._server = server + self._running = False + self.context = ExitStack() + + @property + def port(self) -> int: + return self._server.port + + @property + def host(self) -> str: + return self._server.host + + def set_responses(self, responses: Iterable["WSGIApplication"]) -> None: + assert not self._running, "responses cannot be set on running server" + self._server.mock.side_effect = responses + + def start(self) -> None: + assert not self._running, "running server cannot be started" + self.context.enter_context(server_running(self._server)) + self.context.enter_context(self._set_running()) + + @contextmanager + def _set_running(self) -> Iterator[None]: + self._running = True + try: + yield + finally: + self._running = False + + def stop(self) -> None: + assert self._running, "idle server cannot be stopped" + self.context.close() + + def get_requests(self) -> List[Dict[str, str]]: + """Get environ for each received request.""" + assert not self._running, "cannot get mock from running server" + # Legacy: replace call[0][0] with call.args[0] + # when pip drops support for python3.7 + return [call[0][0] for call in self._server.mock.call_args_list] From b9066d4b00a2fa2cc6529ecb0b5920465e0fb812 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 11 May 2023 13:00:47 +0800 Subject: [PATCH 466/730] Add test cases for normalized weird extra --- tests/functional/test_install_extras.py | 33 ++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index 6f2a6bf435f..21da9d50e1b 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -155,25 +155,50 @@ def test_install_fails_if_extra_at_end( assert "Extras after version" in result.stderr -def test_install_special_extra(script: PipTestEnvironment) -> None: +@pytest.mark.parametrize( + "specified_extra, requested_extra", + [ + ("Hop_hOp-hoP", "Hop_hOp-hoP"), + pytest.param( + "Hop_hOp-hoP", + "hop-hop-hop", + marks=pytest.mark.xfail( + reason=( + "matching a normalized extra request against an" + "unnormalized extra in metadata requires PEP 685 support " + "in packaging (see pypa/pip#11445)." + ), + ), + ), + ("hop-hop-hop", "Hop_hOp-hoP"), + ], +) +def test_install_special_extra( + script: PipTestEnvironment, + specified_extra: str, + requested_extra: str, +) -> None: # Check that uppercase letters and '-' are dealt with # make a dummy project pkga_path = script.scratch_path / "pkga" pkga_path.mkdir() pkga_path.joinpath("setup.py").write_text( textwrap.dedent( - """ + f""" from setuptools import setup setup(name='pkga', version='0.1', - extras_require={'Hop_hOp-hoP': ['missing_pkg']}, + extras_require={{'{specified_extra}': ['missing_pkg']}}, ) """ ) ) result = script.pip( - "install", "--no-index", f"{pkga_path}[Hop_hOp-hoP]", expect_error=True + "install", + "--no-index", + f"{pkga_path}[{requested_extra}]", + expect_error=True, ) assert ( "Could not find a version that satisfies the requirement missing_pkg" From d64190c5fbbf38cf40215ef7122f1b8c6847afc9 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 11 May 2023 14:32:41 +0800 Subject: [PATCH 467/730] Try to find dependencies from unnormalized extras When an unnormalized extra is requested, try to look up dependencies with both its raw and normalized forms, to maintain compatibility when an extra is both specified and requested in a non-standard form. --- .../resolution/resolvelib/candidates.py | 62 ++++++++++++++----- .../resolution/resolvelib/factory.py | 18 +++--- 2 files changed, 57 insertions(+), 23 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 48ef9a16daa..b737bffc9c9 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -423,10 +423,17 @@ class ExtrasCandidate(Candidate): def __init__( self, base: BaseCandidate, - extras: FrozenSet[NormalizedName], + extras: FrozenSet[str], ) -> None: self.base = base - self.extras = extras + self.extras = frozenset(canonicalize_name(e) for e in extras) + # If any extras are requested in their non-normalized forms, keep track + # of their raw values. This is needed when we look up dependencies + # since PEP 685 has not been implemented for marker-matching, and using + # the non-normalized extra for lookup ensures the user can select a + # non-normalized extra in a package with its non-normalized form. + # TODO: Remove this when packaging is upgraded to support PEP 685. + self._unnormalized_extras = extras.difference(self.extras) def __str__(self) -> str: name, rest = str(self.base).split(" ", 1) @@ -477,6 +484,44 @@ def is_editable(self) -> bool: def source_link(self) -> Optional[Link]: return self.base.source_link + def _warn_invalid_extras( + self, + requested: FrozenSet[str], + provided: FrozenSet[str], + ) -> None: + """Emit warnings for invalid extras being requested. + + This emits a warning for each requested extra that is not in the + candidate's ``Provides-Extra`` list. + """ + invalid_extras_to_warn = requested.difference( + provided, + # If an extra is requested in an unnormalized form, skip warning + # about the normalized form being missing. + (canonicalize_name(e) for e in self._unnormalized_extras), + ) + if not invalid_extras_to_warn: + return + for extra in sorted(invalid_extras_to_warn): + logger.warning( + "%s %s does not provide the extra '%s'", + self.base.name, + self.version, + extra, + ) + + def _calculate_valid_requested_extras(self) -> FrozenSet[str]: + """Get a list of valid extras requested by this candidate. + + The user (or upstream dependant) may have specified extras that the + candidate doesn't support. Any unsupported extras are dropped, and each + cause a warning to be logged here. + """ + requested_extras = self.extras.union(self._unnormalized_extras) + provided_extras = frozenset(self.base.dist.iter_provided_extras()) + self._warn_invalid_extras(requested_extras, provided_extras) + return requested_extras.intersection(provided_extras) + def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: factory = self.base._factory @@ -486,18 +531,7 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen if not with_requires: return - # The user may have specified extras that the candidate doesn't - # support. We ignore any unsupported extras here. - valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras()) - invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras()) - for extra in sorted(invalid_extras): - logger.warning( - "%s %s does not provide the extra '%s'", - self.base.name, - self.version, - extra, - ) - + valid_extras = self._calculate_valid_requested_extras() for r in self.base.dist.iter_dependencies(valid_extras): requirement = factory.make_requirement_from_spec( str(r), self.base._ireq, valid_extras diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 6d1ec31631e..ff916236c97 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -140,9 +140,9 @@ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: def _make_extras_candidate( self, base: BaseCandidate, - extras: FrozenSet[NormalizedName], + extras: FrozenSet[str], ) -> ExtrasCandidate: - cache_key = (id(base), extras) + cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras)) try: candidate = self._extras_candidate_cache[cache_key] except KeyError: @@ -153,7 +153,7 @@ def _make_extras_candidate( def _make_candidate_from_dist( self, dist: BaseDistribution, - extras: FrozenSet[NormalizedName], + extras: FrozenSet[str], template: InstallRequirement, ) -> Candidate: try: @@ -168,7 +168,7 @@ def _make_candidate_from_dist( def _make_candidate_from_link( self, link: Link, - extras: FrozenSet[NormalizedName], + extras: FrozenSet[str], template: InstallRequirement, name: Optional[NormalizedName], version: Optional[CandidateVersion], @@ -246,12 +246,12 @@ def _iter_found_candidates( assert template.req, "Candidates found on index must be PEP 508" name = canonicalize_name(template.req.name) - extras: FrozenSet[NormalizedName] = frozenset() + extras: FrozenSet[str] = frozenset() for ireq in ireqs: assert ireq.req, "Candidates found on index must be PEP 508" specifier &= ireq.req.specifier hashes &= ireq.hashes(trust_internet=False) - extras |= frozenset(canonicalize_name(e) for e in ireq.extras) + extras |= frozenset(ireq.extras) def _get_installed_candidate() -> Optional[Candidate]: """Get the candidate for the currently-installed version.""" @@ -327,7 +327,7 @@ def is_pinned(specifier: SpecifierSet) -> bool: def _iter_explicit_candidates_from_base( self, base_requirements: Iterable[Requirement], - extras: FrozenSet[NormalizedName], + extras: FrozenSet[str], ) -> Iterator[Candidate]: """Produce explicit candidates from the base given an extra-ed package. @@ -394,7 +394,7 @@ def find_candidates( explicit_candidates.update( self._iter_explicit_candidates_from_base( requirements.get(parsed_requirement.name, ()), - frozenset(canonicalize_name(e) for e in parsed_requirement.extras), + frozenset(parsed_requirement.extras), ), ) @@ -454,7 +454,7 @@ def _make_requirement_from_install_req( self._fail_if_link_is_unsupported_wheel(ireq.link) cand = self._make_candidate_from_link( ireq.link, - extras=frozenset(canonicalize_name(e) for e in ireq.extras), + extras=frozenset(ireq.extras), template=ireq, name=canonicalize_name(ireq.name) if ireq.name else None, version=None, From 4aa6d88ddcccde3e0f189b447f0c8886ceebe008 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 11 May 2023 15:12:30 +0800 Subject: [PATCH 468/730] Remove extra normalization from format_name util Since this function now always take normalized names, additional normalization is no longer needed. --- src/pip/_internal/resolution/resolvelib/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/base.py b/src/pip/_internal/resolution/resolvelib/base.py index 0275385db71..9c0ef5ca7b9 100644 --- a/src/pip/_internal/resolution/resolvelib/base.py +++ b/src/pip/_internal/resolution/resolvelib/base.py @@ -1,7 +1,7 @@ from typing import FrozenSet, Iterable, Optional, Tuple, Union from pip._vendor.packaging.specifiers import SpecifierSet -from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.utils import NormalizedName from pip._vendor.packaging.version import LegacyVersion, Version from pip._internal.models.link import Link, links_equivalent @@ -15,8 +15,8 @@ def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str: if not extras: return project - canonical_extras = sorted(canonicalize_name(e) for e in extras) - return "{}[{}]".format(project, ",".join(canonical_extras)) + extras_expr = ",".join(sorted(extras)) + return f"{project}[{extras_expr}]" class Constraint: From 07874252f50c3d423d3fdac2cb07ea31952348af Mon Sep 17 00:00:00 2001 From: stonebig Date: Sat, 20 May 2023 13:22:38 +0200 Subject: [PATCH 469/730] make rejection message for count 8 slightly different --- src/pip/_internal/resolution/resolvelib/reporter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py index 3c724238a1e..3be4bd17e6f 100644 --- a/src/pip/_internal/resolution/resolvelib/reporter.py +++ b/src/pip/_internal/resolution/resolvelib/reporter.py @@ -22,7 +22,7 @@ def __init__(self) -> None: 8: ( "pip is looking at multiple versions of {package_name} to " "determine which version is compatible with other " - "requirements. This could take a while." + "requirements. This could take quite a while." ), 13: ( "This is taking longer than usual. You might need to provide " From 1552934c088269437ca183a18a571833e94c8c05 Mon Sep 17 00:00:00 2001 From: stonebig Date: Sat, 20 May 2023 17:12:03 +0200 Subject: [PATCH 470/730] Update reporter.py --- src/pip/_internal/resolution/resolvelib/reporter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/reporter.py b/src/pip/_internal/resolution/resolvelib/reporter.py index 3be4bd17e6f..12adeff7b6e 100644 --- a/src/pip/_internal/resolution/resolvelib/reporter.py +++ b/src/pip/_internal/resolution/resolvelib/reporter.py @@ -20,9 +20,9 @@ def __init__(self) -> None: "requirements. This could take a while." ), 8: ( - "pip is looking at multiple versions of {package_name} to " + "pip is still looking at multiple versions of {package_name} to " "determine which version is compatible with other " - "requirements. This could take quite a while." + "requirements. This could take a while." ), 13: ( "This is taking longer than usual. You might need to provide " From 1d74e7abbdc9a9ef0226ae1c6c410962bbdf501b Mon Sep 17 00:00:00 2001 From: stonebig Date: Sat, 20 May 2023 17:39:36 +0200 Subject: [PATCH 471/730] Create 12040.feature.rst --- news/12040.feature.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12040.feature.rst diff --git a/news/12040.feature.rst b/news/12040.feature.rst new file mode 100644 index 00000000000..beff856f9c7 --- /dev/null +++ b/news/12040.feature.rst @@ -0,0 +1 @@ +make rejection messages slightly different between 1 and 8, so the user can make the difference. From e8b4bae05ead0e3929002838f5d4d56a9eb9bbc9 Mon Sep 17 00:00:00 2001 From: Felipe Peter Date: Sun, 21 May 2023 00:29:06 +0800 Subject: [PATCH 472/730] Remove superfluous "together" --- src/pip/_internal/commands/list.py | 6 ++---- tests/functional/test_list.py | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index c4df1008a76..ac10353194f 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -105,8 +105,7 @@ def add_options(self) -> None: choices=("columns", "freeze", "json"), help=( "Select the output format among: columns (default), freeze, or json. " - "The 'freeze' format cannot be used together with the --outdated " - "option." + "The 'freeze' format cannot be used with the --outdated option." ), ) @@ -161,8 +160,7 @@ def run(self, options: Values, args: List[str]) -> int: if options.outdated and options.list_format == "freeze": raise CommandError( - "List format 'freeze' cannot be used together with the --outdated " - "option." + "List format 'freeze' cannot be used with the --outdated option." ) cmdoptions.check_list_path_option(options) diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index d930346d1ff..4f2be8387f2 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -588,7 +588,7 @@ def test_outdated_formats(script: PipTestEnvironment, data: TestData) -> None: expect_error=True, ) assert ( - "List format 'freeze' cannot be used together with the --outdated option." + "List format 'freeze' cannot be used with the --outdated option." in result.stderr ) From 743e6a31af0b99411b489cef0cd5371ae5c9ca98 Mon Sep 17 00:00:00 2001 From: Chris Kuehl Date: Sat, 20 May 2023 20:01:16 -0500 Subject: [PATCH 473/730] Add test for metadata mismatch (passes on main already) --- tests/functional/test_download.py | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 31418ca8c2b..3190c4004c6 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -1266,6 +1266,8 @@ class Package: metadata: MetadataKind # This will override any dependencies specified in the actual dist's METADATA. requires_dist: Tuple[str, ...] = () + # This will override the Name specified in the actual dist's METADATA. + metadata_name: str = None def metadata_filename(self) -> str: """This is specified by PEP 658.""" @@ -1296,7 +1298,7 @@ def generate_metadata(self) -> bytes: return dedent( f"""\ Metadata-Version: 2.1 - Name: {self.name} + Name: {self.metadata_name or self.name} Version: {self.version} {self.requires_str()} """ @@ -1452,6 +1454,14 @@ def run_for_generated_index( ), # This will raise an error when pip attempts to fetch the metadata file. Package("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile), + # This has a METADATA file with a mismatched name. + Package( + "simple2", + "3.0", + "simple2-3.0.tar.gz", + MetadataKind.Sha256, + metadata_name="not-simple2", + ), ], "colander": [ # Ensure we can read the dependencies from a metadata file within a wheel @@ -1581,3 +1591,18 @@ def test_metadata_not_found( f"ERROR: 404 Client Error: FileNotFoundError for url:.*{expected_re}" ) assert pattern.search(result.stderr), (pattern, result.stderr) + + +def test_produces_error_for_mismatched_package_name_in_metadata( + download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], +) -> None: + """Verify that the package name from the metadata matches the requested package.""" + result, _ = download_generated_html_index( + _simple_packages, + ["simple2==3.0"], + allow_error=True, + ) + assert result.returncode != 0 + assert ( + "simple2-3.0.tar.gz has inconsistent Name: expected 'simple2', but metadata has 'not-simple2'" + ) in result.stdout From 5f0dd4c60e19f903fdc5c2565967c4b9c7ed9e9d Mon Sep 17 00:00:00 2001 From: Chris Kuehl Date: Sat, 20 May 2023 20:10:53 -0500 Subject: [PATCH 474/730] Add regression test for canonicalized package names for PEP658 metadata --- tests/functional/test_download.py | 45 ++++++++++++++++++++++++++++--- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 3190c4004c6..8da185c066e 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -8,7 +8,7 @@ from hashlib import sha256 from pathlib import Path from textwrap import dedent -from typing import Callable, Dict, List, Tuple +from typing import Callable, Dict, List, Optional, Tuple import pytest @@ -1267,7 +1267,7 @@ class Package: # This will override any dependencies specified in the actual dist's METADATA. requires_dist: Tuple[str, ...] = () # This will override the Name specified in the actual dist's METADATA. - metadata_name: str = None + metadata_name: Optional[str] = None def metadata_filename(self) -> str: """This is specified by PEP 658.""" @@ -1501,6 +1501,16 @@ def run_for_generated_index( "priority", "1.0", "priority-1.0-py2.py3-none-any.whl", MetadataKind.NoFile ), ], + "requires-simple-extra": [ + # Metadata name is not canonicalized. + Package( + "requires-simple-extra", + "0.1", + "requires_simple_extra-0.1-py2.py3-none-any.whl", + MetadataKind.Sha256, + metadata_name="Requires_Simple.Extra", + ), + ], } @@ -1604,5 +1614,34 @@ def test_produces_error_for_mismatched_package_name_in_metadata( ) assert result.returncode != 0 assert ( - "simple2-3.0.tar.gz has inconsistent Name: expected 'simple2', but metadata has 'not-simple2'" + "simple2-3.0.tar.gz has inconsistent Name: expected 'simple2', but metadata " + "has 'not-simple2'" ) in result.stdout + + +@pytest.mark.parametrize( + "requirement", + ( + "requires-simple-extra==0.1", + "REQUIRES_SIMPLE-EXTRA==0.1", + "REQUIRES....simple-_-EXTRA==0.1", + ), +) +def test_canonicalizes_package_name_before_verifying_metadata( + download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + requirement: str, +) -> None: + """Verify that the package name from the command line and the package's + METADATA are both canonicalized before comparison. + + Regression test for https://github.com/pypa/pip/issues/12038 + """ + result, download_dir = download_generated_html_index( + _simple_packages, + [requirement], + allow_error=True, + ) + assert result.returncode == 0 + assert os.listdir(download_dir) == [ + "requires_simple_extra-0.1-py2.py3-none-any.whl", + ] From 767bb40755243e1519be262c44abbda81d06868b Mon Sep 17 00:00:00 2001 From: Chris Kuehl Date: Sat, 20 May 2023 20:11:11 -0500 Subject: [PATCH 475/730] Canonicalize package names before comparison for PEP658 metadata --- news/12038.bugfix.rst | 1 + src/pip/_internal/operations/prepare.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 news/12038.bugfix.rst diff --git a/news/12038.bugfix.rst b/news/12038.bugfix.rst new file mode 100644 index 00000000000..184d88dd826 --- /dev/null +++ b/news/12038.bugfix.rst @@ -0,0 +1 @@ +Fix installation of packages with PEP658 metadata using non-canonicalized names diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 2273315234d..6c41d26f54b 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -410,7 +410,7 @@ def _fetch_metadata_using_link_data_attr( # NB: raw_name will fall back to the name from the install requirement if # the Name: field is not present, but it's noted in the raw_name docstring # that that should NEVER happen anyway. - if metadata_dist.raw_name != req.req.name: + if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name): raise MetadataInconsistent( req, "Name", req.req.name, metadata_dist.raw_name ) From 3aaf5c32d6d069f1d3faa754dd4b7c7ea880037d Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Mon, 22 May 2023 00:40:20 +0100 Subject: [PATCH 476/730] Add `.git-blame-ignore-revs` for Black reformatting commits (#12045) Co-authored-by: Pradyun Gedam --- .git-blame-ignore-revs | 35 +++++++++++++++++++++++++++++++++++ MANIFEST.in | 1 + 2 files changed, 36 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000000..c7644d0e6e6 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,35 @@ +917b41d6d73535c090fc312668dff353cdaef906 # Blacken docs/html/conf.py +ed383dd8afa8fe0250dcf9b8962927ada0e21c89 # Blacken docs/pip_sphinxext.py +228405e62451abe8a66233573035007df4be575f # Blacken noxfile.py +f477a9f490e978177b71c9dbaa5465c51ea21129 # Blacken setup.py +e59ba23468390217479465019f8d78e724a23550 # Blacken src/pip/__main__.py +d7013db084e9a52242354ee5754dc5d19ccf062e # Blacken src/pip/_internal/build_env.py +30e9ffacae75378fc3e3df48f754dabad037edb9 # Blacken src/pip/_internal/cache.py +8341d56b46776a805286218ac5fb0e7850fd9341 # Blacken src/pip/_internal/cli/autocompletion.py +3d3461ed65208656358b3595e25d8c31c5c89470 # Blacken src/pip/_internal/cli/base_command.py +d489b0f1b104bc936b0fb17e6c33633664ebdc0e # Blacken src/pip/_internal/cli/cmdoptions.py +591fe4841aefe9befa0530f2a54f820c4ecbb392 # Blacken src/pip/_internal/cli/command_context.py +9265b28ef7248ae1847a80384dbeeb8119c3e2f5 # Blacken src/pip/_internal/cli/main.py +847a369364878c38d210c90beed2737bb6fb3a85 # Blacken src/pip/_internal/cli/main_parser.py +ec97119067041ae58b963935ff5f0e5d9fead80c # Blacken src/pip/_internal/cli/parser.py +6e3b8de22fa39fa3073599ecf9db61367f4b3b32 # Blacken src/pip/_internal/cli/progress_bars.py +55405227de983c5bd5bf0858ea12dbe537d3e490 # Blacken src/pip/_internal/cli/req_command.py +d5ca5c850cae9a0c64882a8f49d3a318699a7e2e # Blacken src/pip/_internal/cli/spinners.py +9747cb48f8430a7a91b36fe697dd18dbddb319f0 # Blacken src/pip/_internal/commands/__init__.py +1c09fd6f124df08ca36bed68085ad68e89bb1957 # Blacken src/pip/_internal/commands/cache.py +315e93d7eb87cd476afcc4eaf0f01a7b56a5037f # Blacken src/pip/_internal/commands/check.py +8ae3b96ed7d24fd24024ccce4840da0dcf635f26 # Blacken src/pip/_internal/commands/completion.py +42ca4792202f26a293ee48380718743a80bbee37 # Blacken src/pip/_internal/commands/configuration.py +790ad78fcd43d41a5bef9dca34a3c128d05eb02c # Blacken src/pip/_internal/commands/debug.py +a6fcc8f045afe257ce321f4012fc8fcb4be01eb3 # Blacken src/pip/_internal/commands/download.py +920e735dfc60109351fbe2f4c483c2f6ede9e52d # Blacken src/pip/_internal/commands/freeze.py +053004e0fcf0851238b1064fbce13aea87b24e9c # Blacken src/pip/_internal/commands/hash.py +a6b6ae487e52c2242045b64cb8962e0a992cfd76 # Blacken src/pip/_internal/commands/help.py +2495cf95a6c7eb61ccf1f9f0e8b8d736af914e53 # Blacken __main__.py +c7ee560e00b85f7486b452c14ff49e4737996eda # Blacken tools/ +8e2e1964a4f0a060f7299a96a911c9e116b2283d # Blacken src/pip/_internal/commands/ +1bc0eef05679e87f45540ab0a294667cb3c6a88e # Blacken src/pip/_internal/network/ +069b01932a7d64a81c708c6254cc93e1f89e6783 # Blacken src/pip/_internal/req +1897784d59e0d5fcda2dd75fea54ddd8be3d502a # Blacken src/pip/_internal/index +94999255d5ede440c37137d210666fdf64302e75 # Reformat the codebase, with black +585037a80a1177f1fa92e159a7079855782e543e # Cleanup implicit string concatenation diff --git a/MANIFEST.in b/MANIFEST.in index 4716f415730..f896c0258e6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -14,6 +14,7 @@ recursive-include src/pip/_vendor *COPYING* include docs/docutils.conf include docs/requirements.txt +exclude .git-blame-ignore-revs exclude .coveragerc exclude .mailmap exclude .appveyor.yml From 7de6455e11f1c2d149fe888654c547ebacf879c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= Date: Mon, 22 May 2023 13:27:06 +0200 Subject: [PATCH 477/730] Remove RIGHT-TO-LEFT OVERRIDE from AUTHORS.txt The names don't seem to be right-to-left anyway. --- AUTHORS.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/AUTHORS.txt b/AUTHORS.txt index e9d3c38916f..f6ece21ce58 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -473,7 +473,7 @@ Miro Hrončok Monica Baluna montefra Monty Taylor -Muha Ajjan‮ +Muha Ajjan Nadav Wexler Nahuel Ambrosini Nate Coraor @@ -725,4 +725,4 @@ Zvezdan Petkovic Łukasz Langa Роман Донченко Семён Марьясин -‮rekcäH nitraM‮ +Martin Häcker From 75f54cae9271179b8cc80435f92336c97e349f9d Mon Sep 17 00:00:00 2001 From: Nate Prewitt Date: Mon, 22 May 2023 17:20:03 -0600 Subject: [PATCH 478/730] Upgrade vendored Requests --- news/requests.vendor.rst | 1 + src/pip/_vendor/requests/__init__.py | 8 +-- src/pip/_vendor/requests/__version__.py | 4 +- src/pip/_vendor/requests/_internal_utils.py | 6 +- src/pip/_vendor/requests/adapters.py | 72 ++++----------------- src/pip/_vendor/requests/api.py | 6 +- src/pip/_vendor/requests/sessions.py | 4 +- src/pip/_vendor/requests/utils.py | 30 +++++---- src/pip/_vendor/vendor.txt | 2 +- 9 files changed, 50 insertions(+), 83 deletions(-) create mode 100644 news/requests.vendor.rst diff --git a/news/requests.vendor.rst b/news/requests.vendor.rst new file mode 100644 index 00000000000..cf10d8cbb85 --- /dev/null +++ b/news/requests.vendor.rst @@ -0,0 +1 @@ +Upgrade Requests to 2.31.0 diff --git a/src/pip/_vendor/requests/__init__.py b/src/pip/_vendor/requests/__init__.py index a4776248038..10ff67ff4d2 100644 --- a/src/pip/_vendor/requests/__init__.py +++ b/src/pip/_vendor/requests/__init__.py @@ -63,10 +63,10 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 major, minor, patch = int(major), int(minor), int(patch) - # urllib3 >= 1.21.1, <= 1.26 - assert major == 1 - assert minor >= 21 - assert minor <= 26 + # urllib3 >= 1.21.1 + assert major >= 1 + if major == 1: + assert minor >= 21 # Check charset_normalizer for compatibility. if chardet_version: diff --git a/src/pip/_vendor/requests/__version__.py b/src/pip/_vendor/requests/__version__.py index 69be3dec741..5063c3f8ee7 100644 --- a/src/pip/_vendor/requests/__version__.py +++ b/src/pip/_vendor/requests/__version__.py @@ -5,8 +5,8 @@ __title__ = "requests" __description__ = "Python HTTP for Humans." __url__ = "https://requests.readthedocs.io" -__version__ = "2.28.2" -__build__ = 0x022802 +__version__ = "2.31.0" +__build__ = 0x023100 __author__ = "Kenneth Reitz" __author_email__ = "me@kennethreitz.org" __license__ = "Apache 2.0" diff --git a/src/pip/_vendor/requests/_internal_utils.py b/src/pip/_vendor/requests/_internal_utils.py index 7dc9bc53360..f2cf635e293 100644 --- a/src/pip/_vendor/requests/_internal_utils.py +++ b/src/pip/_vendor/requests/_internal_utils.py @@ -14,9 +14,11 @@ _VALID_HEADER_VALUE_RE_BYTE = re.compile(rb"^\S[^\r\n]*$|^$") _VALID_HEADER_VALUE_RE_STR = re.compile(r"^\S[^\r\n]*$|^$") +_HEADER_VALIDATORS_STR = (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR) +_HEADER_VALIDATORS_BYTE = (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE) HEADER_VALIDATORS = { - bytes: (_VALID_HEADER_NAME_RE_BYTE, _VALID_HEADER_VALUE_RE_BYTE), - str: (_VALID_HEADER_NAME_RE_STR, _VALID_HEADER_VALUE_RE_STR), + bytes: _HEADER_VALIDATORS_BYTE, + str: _HEADER_VALIDATORS_STR, } diff --git a/src/pip/_vendor/requests/adapters.py b/src/pip/_vendor/requests/adapters.py index f68f7d46753..10c176790b6 100644 --- a/src/pip/_vendor/requests/adapters.py +++ b/src/pip/_vendor/requests/adapters.py @@ -22,7 +22,6 @@ from pip._vendor.urllib3.exceptions import ReadTimeoutError, ResponseError from pip._vendor.urllib3.exceptions import SSLError as _SSLError from pip._vendor.urllib3.poolmanager import PoolManager, proxy_from_url -from pip._vendor.urllib3.response import HTTPResponse from pip._vendor.urllib3.util import Timeout as TimeoutSauce from pip._vendor.urllib3.util import parse_url from pip._vendor.urllib3.util.retry import Retry @@ -194,7 +193,6 @@ def init_poolmanager( num_pools=connections, maxsize=maxsize, block=block, - strict=True, **pool_kwargs, ) @@ -485,63 +483,19 @@ def send( timeout = TimeoutSauce(connect=timeout, read=timeout) try: - if not chunked: - resp = conn.urlopen( - method=request.method, - url=url, - body=request.body, - headers=request.headers, - redirect=False, - assert_same_host=False, - preload_content=False, - decode_content=False, - retries=self.max_retries, - timeout=timeout, - ) - - # Send the request. - else: - if hasattr(conn, "proxy_pool"): - conn = conn.proxy_pool - - low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) - - try: - skip_host = "Host" in request.headers - low_conn.putrequest( - request.method, - url, - skip_accept_encoding=True, - skip_host=skip_host, - ) - - for header, value in request.headers.items(): - low_conn.putheader(header, value) - - low_conn.endheaders() - - for i in request.body: - low_conn.send(hex(len(i))[2:].encode("utf-8")) - low_conn.send(b"\r\n") - low_conn.send(i) - low_conn.send(b"\r\n") - low_conn.send(b"0\r\n\r\n") - - # Receive the response from the server - r = low_conn.getresponse() - - resp = HTTPResponse.from_httplib( - r, - pool=conn, - connection=low_conn, - preload_content=False, - decode_content=False, - ) - except Exception: - # If we hit any problems here, clean up the connection. - # Then, raise so that we can handle the actual exception. - low_conn.close() - raise + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout, + chunked=chunked, + ) except (ProtocolError, OSError) as err: raise ConnectionError(err, request=request) diff --git a/src/pip/_vendor/requests/api.py b/src/pip/_vendor/requests/api.py index 2f71aaed1af..cd0b3eeac3e 100644 --- a/src/pip/_vendor/requests/api.py +++ b/src/pip/_vendor/requests/api.py @@ -106,7 +106,7 @@ def post(url, data=None, json=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response @@ -121,7 +121,7 @@ def put(url, data=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response @@ -136,7 +136,7 @@ def patch(url, data=None, **kwargs): :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response ` object :rtype: requests.Response diff --git a/src/pip/_vendor/requests/sessions.py b/src/pip/_vendor/requests/sessions.py index 6cb3b4dae39..dbcf2a7b0ee 100644 --- a/src/pip/_vendor/requests/sessions.py +++ b/src/pip/_vendor/requests/sessions.py @@ -324,7 +324,9 @@ def rebuild_proxies(self, prepared_request, proxies): except KeyError: username, password = None, None - if username and password: + # urllib3 handles proxy authorization for us in the standard adapter. + # Avoid appending this to TLS tunneled requests where it may be leaked. + if not scheme.startswith('https') and username and password: headers["Proxy-Authorization"] = _basic_auth_str(username, password) return new_proxies diff --git a/src/pip/_vendor/requests/utils.py b/src/pip/_vendor/requests/utils.py index 33f394d265d..36607eda2ec 100644 --- a/src/pip/_vendor/requests/utils.py +++ b/src/pip/_vendor/requests/utils.py @@ -25,7 +25,12 @@ from .__version__ import __version__ # to_native_string is unused here, but imported here for backwards compatibility -from ._internal_utils import HEADER_VALIDATORS, to_native_string # noqa: F401 +from ._internal_utils import ( # noqa: F401 + _HEADER_VALIDATORS_BYTE, + _HEADER_VALIDATORS_STR, + HEADER_VALIDATORS, + to_native_string, +) from .compat import ( Mapping, basestring, @@ -1031,20 +1036,23 @@ def check_header_validity(header): :param header: tuple, in the format (name, value). """ name, value = header + _validate_header_part(header, name, 0) + _validate_header_part(header, value, 1) - for part in header: - if type(part) not in HEADER_VALIDATORS: - raise InvalidHeader( - f"Header part ({part!r}) from {{{name!r}: {value!r}}} must be " - f"of type str or bytes, not {type(part)}" - ) - - _validate_header_part(name, "name", HEADER_VALIDATORS[type(name)][0]) - _validate_header_part(value, "value", HEADER_VALIDATORS[type(value)][1]) +def _validate_header_part(header, header_part, header_validator_index): + if isinstance(header_part, str): + validator = _HEADER_VALIDATORS_STR[header_validator_index] + elif isinstance(header_part, bytes): + validator = _HEADER_VALIDATORS_BYTE[header_validator_index] + else: + raise InvalidHeader( + f"Header part ({header_part!r}) from {header} " + f"must be of type str or bytes, not {type(header_part)}" + ) -def _validate_header_part(header_part, header_kind, validator): if not validator.match(header_part): + header_kind = "name" if header_validator_index == 0 else "value" raise InvalidHeader( f"Invalid leading whitespace, reserved character(s), or return" f"character(s) in header {header_kind}: {header_part!r}" diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 61063459d6d..732e9021725 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -7,7 +7,7 @@ packaging==21.3 platformdirs==3.2.0 pyparsing==3.0.9 pyproject-hooks==1.0.0 -requests==2.28.2 +requests==2.31.0 certifi==2022.12.7 chardet==5.1.0 idna==3.4 From 1ca4529dc02902830be2a668a4e598f22f154071 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 29 May 2023 12:34:21 +0200 Subject: [PATCH 479/730] Ignore invalid origin.json in wheel cache --- news/11985.bugfix.rst | 1 + src/pip/_internal/cache.py | 42 ++++++++++++++++++++++++++++---------- tests/unit/test_req.py | 19 +++++++++++++++++ 3 files changed, 51 insertions(+), 11 deletions(-) create mode 100644 news/11985.bugfix.rst diff --git a/news/11985.bugfix.rst b/news/11985.bugfix.rst new file mode 100644 index 00000000000..66c8e878681 --- /dev/null +++ b/news/11985.bugfix.rst @@ -0,0 +1 @@ +Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py index 05f0a9acb24..8d3a664c7d1 100644 --- a/src/pip/_internal/cache.py +++ b/src/pip/_internal/cache.py @@ -194,7 +194,17 @@ def __init__( self.origin: Optional[DirectUrl] = None origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME if origin_direct_url_path.exists(): - self.origin = DirectUrl.from_json(origin_direct_url_path.read_text()) + try: + self.origin = DirectUrl.from_json( + origin_direct_url_path.read_text(encoding="utf-8") + ) + except Exception as e: + logger.warning( + "Ignoring invalid cache entry origin file %s for %s (%s)", + origin_direct_url_path, + link.filename, + e, + ) class WheelCache(Cache): @@ -257,16 +267,26 @@ def get_cache_entry( @staticmethod def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None: origin_path = Path(cache_dir) / ORIGIN_JSON_NAME - if origin_path.is_file(): - origin = DirectUrl.from_json(origin_path.read_text()) - # TODO: use DirectUrl.equivalent when https://github.com/pypa/pip/pull/10564 - # is merged. - if origin.url != download_info.url: + if origin_path.exists(): + try: + origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8")) + except Exception as e: logger.warning( - "Origin URL %s in cache entry %s does not match download URL %s. " - "This is likely a pip bug or a cache corruption issue.", - origin.url, - cache_dir, - download_info.url, + "Could not read origin file %s in cache entry (%s). " + "Will attempt to overwrite it.", + origin_path, + e, ) + else: + # TODO: use DirectUrl.equivalent when + # https://github.com/pypa/pip/pull/10564 is merged. + if origin.url != download_info.url: + logger.warning( + "Origin URL %s in cache entry %s does not match download URL " + "%s. This is likely a pip bug or a cache corruption issue. " + "Will overwrite it with the new value.", + origin.url, + cache_dir, + download_info.url, + ) origin_path.write_text(download_info.to_json(), encoding="utf-8") diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index c9742812be4..74b9712dc03 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -445,6 +445,25 @@ def test_download_info_archive_cache_with_origin( assert isinstance(req.download_info.info, ArchiveInfo) assert req.download_info.info.hash == hash + def test_download_info_archive_cache_with_invalid_origin( + self, tmp_path: Path, shared_data: TestData, caplog: pytest.LogCaptureFixture + ) -> None: + """Test an invalid origin.json is ignored.""" + url = shared_data.packages.joinpath("simple-1.0.tar.gz").as_uri() + finder = make_test_finder() + wheel_cache = WheelCache(str(tmp_path / "cache")) + cache_entry_dir = wheel_cache.get_path_for_link(Link(url)) + Path(cache_entry_dir).mkdir(parents=True) + Path(cache_entry_dir).joinpath("origin.json").write_text("{") # invalid json + wheel.make_wheel(name="simple", version="1.0").save_to_dir(cache_entry_dir) + with self._basic_resolver(finder, wheel_cache=wheel_cache) as resolver: + ireq = get_processed_req_from_line(f"simple @ {url}") + reqset = resolver.resolve([ireq], True) + assert len(reqset.all_requirements) == 1 + req = reqset.all_requirements[0] + assert req.is_wheel_from_cache + assert "Ignoring invalid cache entry origin file" in caplog.messages[0] + def test_download_info_local_wheel(self, data: TestData) -> None: """Test that download_info is set for requirements from a local wheel.""" finder = make_test_finder() From ab1b312cc5c42efeec21eba71b00a12a706e0162 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 29 May 2023 09:51:08 -0500 Subject: [PATCH 480/730] Update vendored typing extensions to 4.6 (#12056) --- news/typing-extensions.vendor.rst | 1 + src/pip/_vendor/typing_extensions.py | 860 +++++++++++++++++++++------ src/pip/_vendor/vendor.txt | 2 +- 3 files changed, 683 insertions(+), 180 deletions(-) create mode 100644 news/typing-extensions.vendor.rst diff --git a/news/typing-extensions.vendor.rst b/news/typing-extensions.vendor.rst new file mode 100644 index 00000000000..6c95fc1b8d7 --- /dev/null +++ b/news/typing-extensions.vendor.rst @@ -0,0 +1 @@ +Updated typing_extensions to 4.6.0 diff --git a/src/pip/_vendor/typing_extensions.py b/src/pip/_vendor/typing_extensions.py index 9cbf5b87b59..ae740bff748 100644 --- a/src/pip/_vendor/typing_extensions.py +++ b/src/pip/_vendor/typing_extensions.py @@ -33,6 +33,7 @@ 'Coroutine', 'AsyncGenerator', 'AsyncContextManager', + 'Buffer', 'ChainMap', # Concrete collection types. @@ -45,7 +46,13 @@ 'TypedDict', # Structural checks, a.k.a. protocols. + 'SupportsAbs', + 'SupportsBytes', + 'SupportsComplex', + 'SupportsFloat', 'SupportsIndex', + 'SupportsInt', + 'SupportsRound', # One-off things. 'Annotated', @@ -58,6 +65,7 @@ 'final', 'get_args', 'get_origin', + 'get_original_bases', 'get_type_hints', 'IntVar', 'is_typeddict', @@ -71,6 +79,7 @@ 'runtime_checkable', 'Text', 'TypeAlias', + 'TypeAliasType', 'TypeGuard', 'TYPE_CHECKING', 'Never', @@ -86,7 +95,13 @@ # The functions below are modified copies of typing internal helpers. # They are needed by _ProtocolMeta and they provide support for PEP 646. -_marker = object() + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() def _check_generic(cls, parameters, elen=_marker): @@ -260,21 +275,70 @@ def IntVar(name): return typing.TypeVar(name) -# 3.8+: -if hasattr(typing, 'Literal'): +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +if sys.version_info >= (3, 10, 1): Literal = typing.Literal -# 3.7: else: + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) + + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + class _LiteralForm(typing._SpecialForm, _root=True): + def __init__(self, doc: str): + self._name = 'Literal' + self._doc = self.__doc__ = doc def __repr__(self): return 'typing_extensions.' + self._name def __getitem__(self, parameters): - return typing._GenericAlias(self, parameters) + if not isinstance(parameters, tuple): + parameters = (parameters,) - Literal = _LiteralForm('Literal', - doc="""A type that can be used to indicate to type checkers + parameters = _flatten_literal_params(parameters) + + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm(doc="""\ + A type that can be used to indicate to type checkers that the corresponding value has a value literally equivalent to the provided parameter. For example: @@ -288,7 +352,7 @@ def __getitem__(self, parameters): instead of a type.""") -_overload_dummy = typing._overload_dummy # noqa +_overload_dummy = typing._overload_dummy if hasattr(typing, "get_overloads"): # 3.11+ @@ -383,40 +447,54 @@ def clear_overloads(): Counter = typing.Counter ChainMap = typing.ChainMap AsyncGenerator = typing.AsyncGenerator -NewType = typing.NewType Text = typing.Text TYPE_CHECKING = typing.TYPE_CHECKING -_PROTO_WHITELIST = ['Callable', 'Awaitable', - 'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', - 'ContextManager', 'AsyncContextManager'] +_PROTO_ALLOWLIST = { + 'collections.abc': [ + 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', + ], + 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], +} + + +_EXCLUDED_ATTRS = { + "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol", + "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", + "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", + "__subclasshook__", "__orig_class__", "__init__", "__new__", + "__protocol_attrs__", "__callable_proto_members_only__", +} + +if sys.version_info < (3, 8): + _EXCLUDED_ATTRS |= { + "_gorg", "__next_in_mro__", "__extra__", "__tree_hash__", "__args__", + "__origin__" + } + +if sys.version_info >= (3, 9): + _EXCLUDED_ATTRS.add("__class_getitem__") + +if sys.version_info >= (3, 12): + _EXCLUDED_ATTRS.add("__type_params__") + +_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS) def _get_protocol_attrs(cls): attrs = set() for base in cls.__mro__[:-1]: # without object - if base.__name__ in ('Protocol', 'Generic'): + if base.__name__ in {'Protocol', 'Generic'}: continue annotations = getattr(base, '__annotations__', {}) - for attr in list(base.__dict__.keys()) + list(annotations.keys()): - if (not attr.startswith('_abc_') and attr not in ( - '__abstractmethods__', '__annotations__', '__weakref__', - '_is_protocol', '_is_runtime_protocol', '__dict__', - '__args__', '__slots__', - '__next_in_mro__', '__parameters__', '__origin__', - '__orig_bases__', '__extra__', '__tree_hash__', - '__doc__', '__subclasshook__', '__init__', '__new__', - '__module__', '_MutableMapping__marker', '_gorg')): + for attr in (*base.__dict__, *annotations): + if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): attrs.add(attr) return attrs -def _is_callable_members_only(cls): - return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls)) - - def _maybe_adjust_parameters(cls): """Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__. @@ -426,7 +504,7 @@ def _maybe_adjust_parameters(cls): """ tvars = [] if '__orig_bases__' in cls.__dict__: - tvars = typing._collect_type_vars(cls.__orig_bases__) + tvars = _collect_type_vars(cls.__orig_bases__) # Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn]. # If found, tvars must be a subset of it. # If not found, tvars is it. @@ -457,33 +535,82 @@ def _maybe_adjust_parameters(cls): cls.__parameters__ = tuple(tvars) -# 3.8+ -if hasattr(typing, 'Protocol'): +def _caller(depth=2): + try: + return sys._getframe(depth).f_globals.get('__name__', '__main__') + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + +# The performance of runtime-checkable protocols is significantly improved on Python 3.12, +# so we backport the 3.12 version of Protocol to Python <=3.11 +if sys.version_info >= (3, 12): Protocol = typing.Protocol -# 3.7 + runtime_checkable = typing.runtime_checkable else: + def _allow_reckless_class_checks(depth=4): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {'abc', 'functools', None} def _no_init(self, *args, **kwargs): if type(self)._is_protocol: raise TypeError('Protocols cannot be instantiated') - class _ProtocolMeta(abc.ABCMeta): # noqa: B024 - # This metaclass is a bit unfortunate and exists only because of the lack - # of __instancehook__. + class _ProtocolMeta(abc.ABCMeta): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + def __init__(cls, *args, **kwargs): + super().__init__(*args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + cls.__callable_proto_members_only__ = all( + callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ + ) + + def __subclasscheck__(cls, other): + if ( + getattr(cls, '_is_protocol', False) + and not cls.__callable_proto_members_only__ + and not _allow_reckless_class_checks(depth=3) + ): + raise TypeError( + "Protocols with non-method members don't support issubclass()" + ) + return super().__subclasscheck__(other) + def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. - if ((not getattr(cls, '_is_protocol', False) or - _is_callable_members_only(cls)) and - issubclass(instance.__class__, cls)): + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return super().__instancecheck__(instance) + + if ( + not getattr(cls, '_is_runtime_protocol', False) and + not _allow_reckless_class_checks(depth=2) + ): + raise TypeError("Instance and class checks can only be used with" + " @runtime_checkable protocols") + + if super().__instancecheck__(instance): + return True + + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + if val is None and callable(getattr(cls, attr, None)): + break + else: return True - if cls._is_protocol: - if all(hasattr(instance, attr) and - (not callable(getattr(cls, attr, None)) or - getattr(instance, attr) is not None) - for attr in _get_protocol_attrs(cls)): - return True - return super().__instancecheck__(instance) + + return False class Protocol(metaclass=_ProtocolMeta): # There is quite a lot of overlapping code with typing.Generic. @@ -519,6 +646,7 @@ def meth(self) -> T: """ __slots__ = () _is_protocol = True + _is_runtime_protocol = False def __new__(cls, *args, **kwds): if cls is Protocol: @@ -534,7 +662,7 @@ def __class_getitem__(cls, params): raise TypeError( f"Parameter list to {cls.__qualname__}[...] cannot be empty") msg = "Parameters to generic types must be types." - params = tuple(typing._type_check(p, msg) for p in params) # noqa + params = tuple(typing._type_check(p, msg) for p in params) if cls is Protocol: # Generic can only be subscripted with unique type variables. if not all(isinstance(p, typing.TypeVar) for p in params): @@ -570,19 +698,14 @@ def _proto_hook(other): if not cls.__dict__.get('_is_protocol', None): return NotImplemented if not getattr(cls, '_is_runtime_protocol', False): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: + if _allow_reckless_class_checks(): return NotImplemented raise TypeError("Instance and class checks can only be used with" " @runtime protocols") - if not _is_callable_members_only(cls): - if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']: - return NotImplemented - raise TypeError("Protocols with non-method members" - " don't support issubclass()") if not isinstance(other, type): # Same error as for issubclass(1, int) raise TypeError('issubclass() arg 1 must be a class') - for attr in _get_protocol_attrs(cls): + for attr in cls.__protocol_attrs__: for base in other.__mro__: if attr in base.__dict__: if base.__dict__[attr] is None: @@ -607,19 +730,14 @@ def _proto_hook(other): # Check consistency of bases. for base in cls.__bases__: if not (base in (object, typing.Generic) or - base.__module__ == 'collections.abc' and - base.__name__ in _PROTO_WHITELIST or + base.__module__ in _PROTO_ALLOWLIST and + base.__name__ in _PROTO_ALLOWLIST[base.__module__] or isinstance(base, _ProtocolMeta) and base._is_protocol): raise TypeError('Protocols can only inherit from other' f' protocols, got {repr(base)}') - cls.__init__ = _no_init - + if cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init -# 3.8+ -if hasattr(typing, 'runtime_checkable'): - runtime_checkable = typing.runtime_checkable -# 3.7 -else: def runtime_checkable(cls): """Mark a protocol class as a runtime protocol, so that it can be used with isinstance() and issubclass(). Raise TypeError @@ -628,7 +746,10 @@ def runtime_checkable(cls): This allows a simple-minded structural check very similar to the one-offs in collections.abc such as Hashable. """ - if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol: + if not ( + (isinstance(cls, _ProtocolMeta) or issubclass(cls, typing.Generic)) + and getattr(cls, "_is_protocol", False) + ): raise TypeError('@runtime_checkable can be only applied to protocol classes,' f' got {cls!r}') cls._is_runtime_protocol = True @@ -639,11 +760,52 @@ def runtime_checkable(cls): runtime = runtime_checkable -# 3.8+ -if hasattr(typing, 'SupportsIndex'): +# Our version of runtime-checkable protocols is faster on Python 3.7-3.11 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes SupportsIndex = typing.SupportsIndex -# 3.7 + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound else: + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + __slots__ = () + + @abc.abstractmethod + def __int__(self) -> int: + pass + + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + __slots__ = () + + @abc.abstractmethod + def __float__(self) -> float: + pass + + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass + @runtime_checkable class SupportsIndex(Protocol): __slots__ = () @@ -652,8 +814,30 @@ class SupportsIndex(Protocol): def __index__(self) -> int: pass + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + __slots__ = () -if hasattr(typing, "Required"): + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + __slots__ = () + + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +if sys.version_info >= (3, 12): # The standard library TypedDict in Python 3.8 does not store runtime information # about which (if any) keys are optional. See https://bugs.python.org/issue38834 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" @@ -661,15 +845,15 @@ def __index__(self) -> int: # The standard library TypedDict below Python 3.11 does not store runtime # information about optional and required keys when using Required or NotRequired. # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. TypedDict = typing.TypedDict _TypedDictMeta = typing._TypedDictMeta is_typeddict = typing.is_typeddict else: def _check_fails(cls, other): try: - if sys._getframe(1).f_globals['__name__'] not in ['abc', - 'functools', - 'typing']: + if _caller() not in {'abc', 'functools', 'typing'}: # Typed dicts are only for static structural subtyping. raise TypeError('TypedDict does not support instance and class checks') except (AttributeError, ValueError): @@ -692,7 +876,6 @@ def _typeddict_new(*args, total=True, **kwargs): typename, args = args[0], args[1:] # allow the "_typename" keyword be passed elif '_typename' in kwargs: typename = kwargs.pop('_typename') - import warnings warnings.warn("Passing '_typename' as keyword argument is deprecated", DeprecationWarning, stacklevel=2) else: @@ -707,7 +890,6 @@ def _typeddict_new(*args, total=True, **kwargs): 'were given') elif '_fields' in kwargs and len(kwargs) == 1: fields = kwargs.pop('_fields') - import warnings warnings.warn("Passing '_fields' as keyword argument is deprecated", DeprecationWarning, stacklevel=2) else: @@ -719,12 +901,20 @@ def _typeddict_new(*args, total=True, **kwargs): raise TypeError("TypedDict takes either a dict or keyword arguments," " but not both") + if kwargs: + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated, " + "may be removed in a future version, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + ns = {'__annotations__': dict(fields)} - try: + module = _caller() + if module is not None: # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - pass + ns['__module__'] = module return _TypedDictMeta(typename, (), ns, total=total) @@ -751,9 +941,14 @@ def __new__(cls, name, bases, ns, total=True): # Instead, monkey-patch __bases__ onto the class after it's been created. tp_dict = super().__new__(cls, name, (dict,), ns) - if any(issubclass(base, typing.Generic) for base in bases): + is_generic = any(issubclass(base, typing.Generic) for base in bases) + + if is_generic: tp_dict.__bases__ = (typing.Generic, dict) _maybe_adjust_parameters(tp_dict) + else: + # generic TypedDicts get __orig_bases__ from Generic + tp_dict.__orig_bases__ = bases or (TypedDict,) annotations = {} own_annotations = ns.get('__annotations__', {}) @@ -872,9 +1067,6 @@ def greet(name: str) -> None: if hasattr(typing, "Required"): get_type_hints = typing.get_type_hints else: - import functools - import types - # replaces _strip_annotations() def _strip_extras(t): """Strips Annotated, Required and NotRequired from a given type.""" @@ -887,12 +1079,12 @@ def _strip_extras(t): if stripped_args == t.__args__: return t return t.copy_with(stripped_args) - if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias): + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t - return types.GenericAlias(t.__origin__, stripped_args) - if hasattr(types, "UnionType") and isinstance(t, types.UnionType): + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): stripped_args = tuple(_strip_extras(a) for a in t.__args__) if stripped_args == t.__args__: return t @@ -1155,42 +1347,62 @@ def __repr__(self): above.""") +def _set_default(type_param, default): + if isinstance(default, (tuple, list)): + type_param.__default__ = tuple((typing._type_check(d, "Default must be a type") + for d in default)) + elif default != _marker: + type_param.__default__ = typing._type_check(default, "Default must be a type") + else: + type_param.__default__ = None + + +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=3) + if def_mod != 'typing_extensions': + typevarlike.__module__ = def_mod + + class _DefaultMixin: """Mixin for TypeVarLike defaults.""" __slots__ = () + __init__ = _set_default - def __init__(self, default): - if isinstance(default, (tuple, list)): - self.__default__ = tuple((typing._type_check(d, "Default must be a type") - for d in default)) - elif default != _marker: - self.__default__ = typing._type_check(default, "Default must be a type") - else: - self.__default__ = None + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) # Add default and infer_variance parameters from PEP 696 and 695 -class TypeVar(typing.TypeVar, _DefaultMixin, _root=True): +class TypeVar(metaclass=_TypeVarLikeMeta): """Type variable.""" - __module__ = 'typing' + _backported_typevarlike = typing.TypeVar - def __init__(self, name, *constraints, bound=None, - covariant=False, contravariant=False, - default=_marker, infer_variance=False): - super().__init__(name, *constraints, bound=bound, covariant=covariant, - contravariant=contravariant) - _DefaultMixin.__init__(self, default) - self.__infer_variance__ = infer_variance + def __new__(cls, name, *constraints, bound=None, + covariant=False, contravariant=False, + default=_marker, infer_variance=False): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant, + infer_variance=infer_variance) + else: + typevar = typing.TypeVar(name, *constraints, bound=bound, + covariant=covariant, contravariant=contravariant) + if infer_variance and (covariant or contravariant): + raise ValueError("Variance cannot be specified with infer_variance.") + typevar.__infer_variance__ = infer_variance + _set_default(typevar, default) + _set_module(typevar) + return typevar - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") # Python 3.10+ has PEP 612 @@ -1258,25 +1470,33 @@ def __eq__(self, other): # 3.10+ if hasattr(typing, 'ParamSpec'): - # Add default Parameter - PEP 696 - class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True): - """Parameter specification variable.""" - - __module__ = 'typing' + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__(cls, name, *, bound=None, + covariant=False, contravariant=False, + infer_variance=False, default=_marker): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance) + else: + paramspec = typing.ParamSpec(name, bound=bound, + covariant=covariant, + contravariant=contravariant) + paramspec.__infer_variance__ = infer_variance - def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=_marker): - super().__init__(name, bound=bound, covariant=covariant, - contravariant=contravariant) - _DefaultMixin.__init__(self, default) + _set_default(paramspec, default) + _set_module(paramspec) + return paramspec - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") # 3.7-3.9 else: @@ -1341,11 +1561,12 @@ def kwargs(self): return ParamSpecKwargs(self) def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - default=_marker): + infer_variance=False, default=_marker): super().__init__([self]) self.__name__ = name self.__covariant__ = bool(covariant) self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) if bound: self.__bound__ = typing._type_check(bound, 'Bound must be a type.') else: @@ -1353,15 +1574,14 @@ def __init__(self, name, *, bound=None, covariant=False, contravariant=False, _DefaultMixin.__init__(self, default) # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None + def_mod = _caller() if def_mod != 'typing_extensions': self.__module__ = def_mod def __repr__(self): - if self.__covariant__: + if self.__infer_variance__: + prefix = '' + elif self.__covariant__: prefix = '+' elif self.__contravariant__: prefix = '-' @@ -1436,7 +1656,7 @@ def _concatenate_getitem(self, parameters): # 3.10+ if hasattr(typing, 'Concatenate'): Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 # 3.9 elif sys.version_info[:2] >= (3, 9): @_TypeAliasForm @@ -1793,10 +2013,60 @@ class Movie(TypedDict): """) -if hasattr(typing, "Unpack"): # 3.11+ +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] Unpack = typing.Unpack + + def _is_unpack(obj): + return get_origin(obj) is Unpack + elif sys.version_info[:2] >= (3, 9): class _UnpackSpecialForm(typing._SpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC + def __repr__(self): return 'typing_extensions.' + self._name @@ -1805,16 +2075,6 @@ class _UnpackAlias(typing._GenericAlias, _root=True): @_UnpackSpecialForm def Unpack(self, parameters): - """A special typing construct to unpack a variadic type. For example: - - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) - - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... - - """ item = typing._type_check(parameters, f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) @@ -1834,18 +2094,7 @@ def __getitem__(self, parameters): f'{self._name} accepts only a single type.') return _UnpackAlias(self, (item,)) - Unpack = _UnpackForm( - 'Unpack', - doc="""A special typing construct to unpack a variadic type. For example: - - Shape = TypeVarTuple('Shape') - Batch = NewType('Batch', int) - - def add_batch_axis( - x: Array[Unpack[Shape]] - ) -> Array[Batch, Unpack[Shape]]: ... - - """) + Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) def _is_unpack(obj): return isinstance(obj, _UnpackAlias) @@ -1853,21 +2102,20 @@ def _is_unpack(obj): if hasattr(typing, "TypeVarTuple"): # 3.11+ - # Add default Parameter - PEP 696 - class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True): + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): """Type variable tuple.""" - def __init__(self, name, *, default=_marker): - super().__init__(name) - _DefaultMixin.__init__(self, default) + _backported_typevarlike = typing.TypeVarTuple - # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None - if def_mod != 'typing_extensions': - self.__module__ = def_mod + def __new__(cls, name, *, default=_marker): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + return tvt + + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") else: class TypeVarTuple(_DefaultMixin): @@ -1925,10 +2173,7 @@ def __init__(self, name, *, default=_marker): _DefaultMixin.__init__(self, default) # for pickling: - try: - def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): - def_mod = None + def_mod = _caller() if def_mod != 'typing_extensions': self.__module__ = def_mod @@ -2163,7 +2408,15 @@ def g(x: str) -> int: ... When this decorator is applied to an object, the type checker will generate a diagnostic on usage of the deprecated object. - No runtime warning is issued. The decorator sets the ``__deprecated__`` + The warning specified by ``category`` will be emitted on use + of deprecated objects. For functions, that happens on calls; + for classes, on instantiation. If the ``category`` is ``None``, + no warning is emitted. The ``stacklevel`` determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + + The decorator sets the ``__deprecated__`` attribute on the decorated object to the deprecation message passed to the decorator. If applied to an overload, the decorator must be after the ``@overload`` decorator for the attribute to @@ -2223,18 +2476,13 @@ def wrapper(*args, **kwargs): typing._check_generic = _check_generic -# Backport typing.NamedTuple as it exists in Python 3.11. +# Backport typing.NamedTuple as it exists in Python 3.12. # In 3.11, the ability to define generic `NamedTuple`s was supported. # This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. -if sys.version_info >= (3, 11): +# On 3.12, we added __orig_bases__ to call-based NamedTuples +if sys.version_info >= (3, 12): NamedTuple = typing.NamedTuple else: - def _caller(): - try: - return sys._getframe(2).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): # For platforms without _getframe() - return None - def _make_nmtuple(name, types, module, defaults=()): fields = [n for n, t in types] annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") @@ -2294,7 +2542,9 @@ def NamedTuple(__typename, __fields=None, **kwargs): elif kwargs: raise TypeError("Either list of fields or keywords" " can be provided to NamedTuple, not both") - return _make_nmtuple(__typename, __fields, module=_caller()) + nt = _make_nmtuple(__typename, __fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt NamedTuple.__doc__ = typing.NamedTuple.__doc__ _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) @@ -2310,3 +2560,255 @@ def _namedtuple_mro_entries(bases): return (_NamedTuple,) NamedTuple.__mro_entries__ = _namedtuple_mro_entries + + +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + class Buffer(abc.ABC): + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + def get_original_bases(__cls): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from pip._vendor.typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return __cls.__orig_bases__ + except AttributeError: + try: + return __cls.__bases__ + except AttributeError: + raise TypeError( + f'Expected an instance of type, not {type(__cls).__name__!r}' + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if '.' in name: + name = name.rpartition('.')[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + f"Cannot subclass an instance of NewType. " + f"Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f'{self.__module__}.{self.__qualname__}' + + def __reduce__(self): + return self.__qualname__ + + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +if hasattr(typing, "TypeAliasType"): + TypeAliasType = typing.TypeAliasType +else: + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance(obj, ( + type, + _types.GenericAlias, + _types.UnionType, + TypeAliasType, + )) + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: + + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + self.__value__ = value + self.__type_params__ = type_params + + parameters = [] + for type_param in type_params: + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != 'typing_extensions': + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, __name: str, __value: object) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(__name) + super().__setattr__(__name, __value) + + def __delattr__(self, __name: str) -> Never: + self._raise_attribute_error(__name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + parameters = [ + typing._type_check( + item, f'Subscripting {self.__name__} requires a type.' + ) + for item in parameters + ] + return typing._GenericAlias(self, tuple(parameters)) + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + if sys.version_info >= (3, 10): + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 732e9021725..dcf89dc04c5 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -14,7 +14,7 @@ requests==2.31.0 urllib3==1.26.15 rich==13.3.3 pygments==2.14.0 - typing_extensions==4.5.0 + typing_extensions==4.6.0 resolvelib==1.0.1 setuptools==67.7.2 six==1.16.0 From 83f42685b79eff2e4b2154de8fcff694a4638da2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Wed, 31 May 2023 09:13:33 +0200 Subject: [PATCH 481/730] Revert temporary fix for issue 9540 --- src/pip/__main__.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/src/pip/__main__.py b/src/pip/__main__.py index fe34a7b7772..5991326115f 100644 --- a/src/pip/__main__.py +++ b/src/pip/__main__.py @@ -1,6 +1,5 @@ import os import sys -import warnings # Remove '' and current working directory from the first entry # of sys.path, if present to avoid using current directory @@ -20,12 +19,6 @@ sys.path.insert(0, path) if __name__ == "__main__": - # Work around the error reported in #9540, pending a proper fix. - # Note: It is essential the warning filter is set *before* importing - # pip, as the deprecation happens at import time, not runtime. - warnings.filterwarnings( - "ignore", category=DeprecationWarning, module=".*packaging\\.version" - ) from pip._internal.cli.main import main as _main sys.exit(_main()) From 8d381eeec23cd1a0901266b189740d1a20f08534 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 2 Jun 2023 11:03:49 +0100 Subject: [PATCH 482/730] Warn if the --python option is not specified before the subcommand name --- news/12067.bugfix.rst | 1 + src/pip/_internal/cli/base_command.py | 12 ++++++++++++ 2 files changed, 13 insertions(+) create mode 100644 news/12067.bugfix.rst diff --git a/news/12067.bugfix.rst b/news/12067.bugfix.rst new file mode 100644 index 00000000000..84f2d235e79 --- /dev/null +++ b/news/12067.bugfix.rst @@ -0,0 +1 @@ +Warn if the ``--python`` option is specified after the subcommand name. diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 637fba18cfc..87e6cf6deaf 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -131,6 +131,18 @@ def _main(self, args: List[str]) -> int: ", ".join(sorted(always_enabled_features)), ) + + # Make sure that the --python argument isn't specified after the + # subcommand. We can tell, because if --python was specified, + # we should only reach this point if we're running in the created + # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment + # variable set. + if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: + logger.warning( + "The --python option is ignored if placed after " + "the pip subcommand name" + ) + # TODO: Try to get these passing down from the command? # without resorting to os.environ to hold these. # This also affects isolated builds and it should. From 6aa4d48b23e11f6a837b61f4c3fda1a247758357 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 11:53:43 +0100 Subject: [PATCH 483/730] Fix the test failure --- tests/functional/test_install.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 63712827479..8559d93684b 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1157,7 +1157,7 @@ def test_install_nonlocal_compatible_wheel( "--find-links", data.find_links, "--only-binary=:all:", - "--python", + "--python-version", "3", "--platform", "fakeplat", @@ -1177,7 +1177,7 @@ def test_install_nonlocal_compatible_wheel( "--find-links", data.find_links, "--only-binary=:all:", - "--python", + "--python-version", "3", "--platform", "fakeplat", From 073666e2994896a66eff3a474dac3cf03b2dfdd9 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 11:57:04 +0100 Subject: [PATCH 484/730] Make this an error rather than a warning --- news/12067.bugfix.rst | 2 +- src/pip/_internal/cli/base_command.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/news/12067.bugfix.rst b/news/12067.bugfix.rst index 84f2d235e79..87d76bc2b06 100644 --- a/news/12067.bugfix.rst +++ b/news/12067.bugfix.rst @@ -1 +1 @@ -Warn if the ``--python`` option is specified after the subcommand name. +Fail with an error if the ``--python`` option is specified after the subcommand name. diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 87e6cf6deaf..5130a45053f 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -138,10 +138,10 @@ def _main(self, args: List[str]) -> int: # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment # variable set. if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ: - logger.warning( - "The --python option is ignored if placed after " - "the pip subcommand name" + logger.critical( + "The --python option must be placed before the pip subcommand name" ) + sys.exit(ERROR) # TODO: Try to get these passing down from the command? # without resorting to os.environ to hold these. From 16f145d30657093dab2fe66cd3695d248c46db45 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 12:03:22 +0100 Subject: [PATCH 485/730] Add a test for the error --- tests/functional/test_python_option.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/functional/test_python_option.py b/tests/functional/test_python_option.py index 8bf16d7a56b..ca124933e12 100644 --- a/tests/functional/test_python_option.py +++ b/tests/functional/test_python_option.py @@ -39,3 +39,14 @@ def test_python_interpreter( script.pip("--python", env_path, "uninstall", "simplewheel", "--yes") result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == before + +def test_error_python_option_wrong_location( + script: PipTestEnvironment, + tmpdir: Path, + shared_data: TestData, +) -> None: + env_path = os.fspath(tmpdir / "venv") + env = EnvBuilder(with_pip=False) + env.create(env_path) + + script.pip("list", "--python", env_path, "--format=json", expect_error=True) From de8f0b5ed17fc59fab1f20e9cca92c24ca89136d Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 12:11:41 +0100 Subject: [PATCH 486/730] Lint --- src/pip/_internal/cli/base_command.py | 1 - tests/functional/test_python_option.py | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 5130a45053f..6a3b8e6c213 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -131,7 +131,6 @@ def _main(self, args: List[str]) -> int: ", ".join(sorted(always_enabled_features)), ) - # Make sure that the --python argument isn't specified after the # subcommand. We can tell, because if --python was specified, # we should only reach this point if we're running in the created diff --git a/tests/functional/test_python_option.py b/tests/functional/test_python_option.py index ca124933e12..ecfd819eb7c 100644 --- a/tests/functional/test_python_option.py +++ b/tests/functional/test_python_option.py @@ -40,6 +40,7 @@ def test_python_interpreter( result = script.pip("--python", env_path, "list", "--format=json") assert json.loads(result.stdout) == before + def test_error_python_option_wrong_location( script: PipTestEnvironment, tmpdir: Path, From 80cb6f443fca2c51b19ec4c3853b6261d2f4b6e6 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 14:26:05 +0100 Subject: [PATCH 487/730] Switch to ruff for linting --- .pre-commit-config.yaml | 13 ++++--------- noxfile.py | 2 +- pyproject.toml | 30 ++++++++++++++++++++++++++++++ 3 files changed, 35 insertions(+), 10 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2fc455b9d64..dd2fc623522 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,16 +21,11 @@ repos: hooks: - id: black -- repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 +- repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.0.270 hooks: - - id: flake8 - additional_dependencies: [ - 'flake8-bugbear', - 'flake8-logging-format', - 'flake8-implicit-str-concat', - ] - exclude: tests/data + - id: ruff - repo: https://github.com/PyCQA/isort rev: 5.12.0 diff --git a/noxfile.py b/noxfile.py index 565a5039955..ee03447d359 100644 --- a/noxfile.py +++ b/noxfile.py @@ -219,7 +219,7 @@ def pinned_requirements(path: Path) -> Iterator[Tuple[str, str]]: new_version = old_version for inner_name, inner_version in pinned_requirements(vendor_txt): if inner_name == name: - # this is a dedicated assignment, to make flake8 happy + # this is a dedicated assignment, to make lint happy new_version = inner_version break else: diff --git a/pyproject.toml b/pyproject.toml index 139c37e18d7..18c990ef42d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,3 +71,33 @@ setuptools = "pkg_resources" CacheControl = "https://raw.githubusercontent.com/ionrock/cachecontrol/v0.12.6/LICENSE.txt" distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt" webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE" + +[tool.ruff] +exclude = [ + "./build", + ".nox", + ".tox", + ".scratch", + "_vendor", + "data", +] +ignore = [ + "B019", + "B020", + "B904", # Ruff enables opinionated warnings by default + "B905", # Ruff enables opinionated warnings by default + "G202", +] +line-length = 88 +select = [ + "B", + "E", + "F", + "W", + "G", + "ISC", +] + +[tool.ruff.per-file-ignores] +"noxfile.py" = ["G"] +"tests/*" = ["B011"] From 9824a426d466680d132aa027add88ec0dda9116f Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 14:32:44 +0100 Subject: [PATCH 488/730] Fix new lint errors --- src/pip/_internal/network/auth.py | 4 +++- tests/lib/__init__.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/network/auth.py b/src/pip/_internal/network/auth.py index c0efa765c85..94a82fa6618 100644 --- a/src/pip/_internal/network/auth.py +++ b/src/pip/_internal/network/auth.py @@ -514,7 +514,9 @@ def handle_401(self, resp: Response, **kwargs: Any) -> Response: # Consume content and release the original connection to allow our new # request to reuse the same one. - resp.content + # The result of the assignment isn't used, it's just needed to consume + # the content. + _ = resp.content resp.raw.release_conn() # Add our new username and password to the request diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 7410072f50e..2c6dbafb901 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -684,7 +684,9 @@ def run( # Pass expect_stderr=True to allow any stderr. We do this because # we do our checking of stderr further on in check_stderr(). kw["expect_stderr"] = True - result = super().run(cwd=cwd, *args, **kw) + # Ignore linter check + # B026 Star-arg unpacking after a keyword argument is strongly discouraged + result = super().run(cwd=cwd, *args, **kw) # noqa if expect_error and not allow_error: if result.returncode == 0: From 0af7d6de7adfdc2a0c3b5854cac8bccca3f7e887 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 14:36:34 +0100 Subject: [PATCH 489/730] Tidy up file exclusions --- pyproject.toml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 18c990ef42d..5f5dfa6414e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,10 +73,8 @@ distlib = "https://bitbucket.org/pypa/distlib/raw/master/LICENSE.txt" webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LICENSE" [tool.ruff] -exclude = [ +extend-exclude = [ "./build", - ".nox", - ".tox", ".scratch", "_vendor", "data", From 7c3418b2d03aeae665a0a0f01b584ffba4bc57ea Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 14:50:10 +0100 Subject: [PATCH 490/730] Add explicit ID to noqa comment Co-authored-by: q0w <43147888+q0w@users.noreply.github.com> --- tests/lib/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 2c6dbafb901..7c06feaf38c 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -686,7 +686,7 @@ def run( kw["expect_stderr"] = True # Ignore linter check # B026 Star-arg unpacking after a keyword argument is strongly discouraged - result = super().run(cwd=cwd, *args, **kw) # noqa + result = super().run(cwd=cwd, *args, **kw) # noqa: B026 if expect_error and not allow_error: if result.returncode == 0: From bdef9159bd3734381fe90a907750c8e168170c38 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 5 Jun 2023 15:06:11 +0100 Subject: [PATCH 491/730] Use ruff for import sorting as well --- .pre-commit-config.yaml | 6 ------ pyproject.toml | 7 +++++++ tests/functional/test_install_compat.py | 8 ++++++-- tests/functional/test_install_upgrade.py | 9 +++++++-- tests/functional/test_install_user.py | 2 +- tests/functional/test_install_vcs_git.py | 2 +- tests/functional/test_wheel.py | 7 +++++-- 7 files changed, 27 insertions(+), 14 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index dd2fc623522..b0aef0d60b1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,12 +27,6 @@ repos: hooks: - id: ruff -- repo: https://github.com/PyCQA/isort - rev: 5.12.0 - hooks: - - id: isort - files: \.py$ - - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.961 hooks: diff --git a/pyproject.toml b/pyproject.toml index 5f5dfa6414e..b7c0d154598 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -94,8 +94,15 @@ select = [ "W", "G", "ISC", + "I", ] [tool.ruff.per-file-ignores] "noxfile.py" = ["G"] "tests/*" = ["B011"] + +[tool.ruff.isort] +# We need to explicitly make pip "first party" as it's imported by code in +# the docs and tests directories. +known-first-party = ["pip"] +known-third-party = ["pip._vendor"] diff --git a/tests/functional/test_install_compat.py b/tests/functional/test_install_compat.py index ae27ebd536e..8374d487b1f 100644 --- a/tests/functional/test_install_compat.py +++ b/tests/functional/test_install_compat.py @@ -7,8 +7,12 @@ import pytest -from tests.lib import pyversion # noqa: F401 -from tests.lib import PipTestEnvironment, TestData, assert_all_changes +from tests.lib import ( + PipTestEnvironment, + TestData, + assert_all_changes, + pyversion, # noqa: F401 +) @pytest.mark.network diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py index fc61d70bc5e..09c01d7eb18 100644 --- a/tests/functional/test_install_upgrade.py +++ b/tests/functional/test_install_upgrade.py @@ -6,8 +6,13 @@ import pytest -from tests.lib import pyversion # noqa: F401 -from tests.lib import PipTestEnvironment, ResolverVariant, TestData, assert_all_changes +from tests.lib import ( + PipTestEnvironment, + ResolverVariant, + TestData, + assert_all_changes, + pyversion, # noqa: F401 +) from tests.lib.local_repos import local_checkout from tests.lib.wheel import make_wheel diff --git a/tests/functional/test_install_user.py b/tests/functional/test_install_user.py index 9bdadb94203..3cae4a467e9 100644 --- a/tests/functional/test_install_user.py +++ b/tests/functional/test_install_user.py @@ -8,12 +8,12 @@ import pytest -from tests.lib import pyversion # noqa: F401 from tests.lib import ( PipTestEnvironment, TestData, create_basic_wheel_for_package, need_svn, + pyversion, # noqa: F401 ) from tests.lib.local_repos import local_checkout from tests.lib.venv import VirtualEnvironment diff --git a/tests/functional/test_install_vcs_git.py b/tests/functional/test_install_vcs_git.py index 971526c5181..2abc7aa0fd2 100644 --- a/tests/functional/test_install_vcs_git.py +++ b/tests/functional/test_install_vcs_git.py @@ -3,11 +3,11 @@ import pytest -from tests.lib import pyversion # noqa: F401 from tests.lib import ( PipTestEnvironment, _change_test_package_version, _create_test_package, + pyversion, # noqa: F401 ) from tests.lib.git_submodule_helpers import ( _change_test_package_submodule, diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index 1e3e90e410f..cfaef541dcf 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -7,8 +7,11 @@ import pytest from pip._internal.cli.status_codes import ERROR -from tests.lib import pyversion # noqa: F401 -from tests.lib import PipTestEnvironment, TestData +from tests.lib import ( + PipTestEnvironment, + TestData, + pyversion, # noqa: F401 +) def add_files_to_dist_directory(folder: Path) -> None: From 16df8cdcbf895f707fe6f83bdceed8a09315018b Mon Sep 17 00:00:00 2001 From: Robert Pollak Date: Wed, 7 Jun 2023 16:04:20 +0200 Subject: [PATCH 492/730] NEWS.rst typo 'setttings' should be 'settings'. --- NEWS.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEWS.rst b/NEWS.rst index b0ae642634d..f24aaaa4094 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -53,7 +53,7 @@ Deprecations and Removals ``--config-settings``. (`#11859 `_) - Using ``--config-settings`` with projects that don't have a ``pyproject.toml`` now prints a deprecation warning. In the future the presence of config settings will automatically - enable the default build backend for legacy projects and pass the setttings to it. (`#11915 `_) + enable the default build backend for legacy projects and pass the settings to it. (`#11915 `_) - Remove ``setup.py install`` fallback when building a wheel failed for projects without ``pyproject.toml``. (`#8368 `_) - When the ``wheel`` package is not installed, pip now uses the default build backend From 6c3db098ff0b6e537157eff53b1aba79fa7fa4b0 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 7 Jun 2023 20:58:40 +0100 Subject: [PATCH 493/730] Fix parsing of JSON index dist-info-metadata values --- src/pip/_internal/models/link.py | 97 +++++++++++++++++++++----------- tests/unit/test_collector.py | 37 +++++++----- 2 files changed, 89 insertions(+), 45 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index e741c3283cd..ee3045166bb 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -69,18 +69,6 @@ class LinkHash: def __post_init__(self) -> None: assert self.name in _SUPPORTED_HASHES - @classmethod - def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]: - """Parse a PEP 658 data-dist-info-metadata hash.""" - if dist_info_metadata == "true": - return None - name, sep, value = dist_info_metadata.partition("=") - if not sep: - return None - if name not in _SUPPORTED_HASHES: - return None - return cls(name=name, value=value) - @classmethod @functools.lru_cache(maxsize=None) def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]: @@ -107,6 +95,20 @@ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: return hashes.is_hash_allowed(self.name, hex_digest=self.value) +@dataclass(frozen=True) +class MetadataFile: + """Information about a core metadata file associated with a distribution.""" + + hashes: Optional[dict[str, str]] + + # TODO: Do we care about stripping out unsupported hash methods? + def __init__(self, hashes: Optional[dict[str, str]]): + if hashes: + hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} + # We need to use this as this is a frozen dataclass + object.__setattr__(self, "hashes", hashes) + + def _clean_url_path_part(part: str) -> str: """ Clean a "part" of a URL path (i.e. after splitting on "@" characters). @@ -179,7 +181,7 @@ class Link(KeyBasedCompareMixin): "comes_from", "requires_python", "yanked_reason", - "dist_info_metadata", + "metadata_file_data", "cache_link_parsing", "egg_fragment", ] @@ -190,7 +192,7 @@ def __init__( comes_from: Optional[Union[str, "IndexContent"]] = None, requires_python: Optional[str] = None, yanked_reason: Optional[str] = None, - dist_info_metadata: Optional[str] = None, + metadata_file_data: Optional[MetadataFile] = None, cache_link_parsing: bool = True, hashes: Optional[Mapping[str, str]] = None, ) -> None: @@ -208,11 +210,10 @@ def __init__( a simple repository HTML link. If the file has been yanked but no reason was provided, this should be the empty string. See PEP 592 for more information and the specification. - :param dist_info_metadata: the metadata attached to the file, or None if no such - metadata is provided. This is the value of the "data-dist-info-metadata" - attribute, if present, in a simple repository HTML link. This may be parsed - into its own `Link` by `self.metadata_link()`. See PEP 658 for more - information and the specification. + :param metadata_file_data: the metadata attached to the file, or None if + no such metadata is provided. This argument, if not None, indicates + that a separate metadata file exists, and also optionally supplies + hashes for that file. :param cache_link_parsing: A flag that is used elsewhere to determine whether resources retrieved from this link should be cached. PyPI URLs should generally have this set to False, for example. @@ -220,6 +221,10 @@ def __init__( determine the validity of a download. """ + # The comes_from, requires_python, and metadata_file_data arguments are + # only used by classmethods of this class, and are not used in client + # code directly. + # url can be a UNC windows share if url.startswith("\\\\"): url = path_to_url(url) @@ -239,7 +244,7 @@ def __init__( self.comes_from = comes_from self.requires_python = requires_python if requires_python else None self.yanked_reason = yanked_reason - self.dist_info_metadata = dist_info_metadata + self.metadata_file_data = metadata_file_data super().__init__(key=url, defining_class=Link) @@ -262,9 +267,20 @@ def from_json( url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url)) pyrequire = file_data.get("requires-python") yanked_reason = file_data.get("yanked") - dist_info_metadata = file_data.get("dist-info-metadata") hashes = file_data.get("hashes", {}) + # The dist-info-metadata value may be a boolean, or a dict of hashes. + metadata_info = file_data.get("dist-info-metadata", False) + if isinstance(metadata_info, dict): + # The file exists, and hashes have been supplied + metadata_file_data = MetadataFile(metadata_info) + elif metadata_info: + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + else: + # The file does not exist + metadata_file_data = None + # The Link.yanked_reason expects an empty string instead of a boolean. if yanked_reason and not isinstance(yanked_reason, str): yanked_reason = "" @@ -278,7 +294,7 @@ def from_json( requires_python=pyrequire, yanked_reason=yanked_reason, hashes=hashes, - dist_info_metadata=dist_info_metadata, + metadata_file_data=metadata_file_data, ) @classmethod @@ -298,14 +314,35 @@ def from_element( url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href)) pyrequire = anchor_attribs.get("data-requires-python") yanked_reason = anchor_attribs.get("data-yanked") - dist_info_metadata = anchor_attribs.get("data-dist-info-metadata") + + # The dist-info-metadata value may be the string "true", or a string of + # the form "hashname=hashval" + metadata_info = anchor_attribs.get("data-dist-info-metadata") + if metadata_info == "true": + # The file exists, but there are no hashes + metadata_file_data = MetadataFile(None) + elif metadata_info is None: + # The file does not exist + metadata_file_data = None + else: + # The file exists, and hashes have been supplied + hashname, sep, hashval = metadata_info.partition("=") + if sep == "=": + metadata_file_data = MetadataFile({hashname: hashval}) + else: + # Error - data is wrong. Treat as no hashes supplied. + logger.debug( + "Index returned invalid data-dist-info-metadata value: %s", + metadata_info, + ) + metadata_file_data = MetadataFile(None) return cls( url, comes_from=page_url, requires_python=pyrequire, yanked_reason=yanked_reason, - dist_info_metadata=dist_info_metadata, + metadata_file_data=metadata_file_data, ) def __str__(self) -> str: @@ -407,17 +444,13 @@ def subdirectory_fragment(self) -> Optional[str]: return match.group(1) def metadata_link(self) -> Optional["Link"]: - """Implementation of PEP 658 parsing.""" - # Note that Link.from_element() parsing the "data-dist-info-metadata" attribute - # from an HTML anchor tag is typically how the Link.dist_info_metadata attribute - # gets set. - if self.dist_info_metadata is None: + """Return a link to the associated core metadata file (if any).""" + if self.metadata_file_data is None: return None metadata_url = f"{self.url_without_fragment}.metadata" - metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata) - if metadata_link_hash is None: + if self.metadata_file_data.hashes is None: return Link(metadata_url) - return Link(metadata_url, hashes=metadata_link_hash.as_dict()) + return Link(metadata_url, hashes=self.metadata_file_data.hashes) def as_hashes(self) -> Hashes: return Hashes({k: [v] for k, v in self._hashes.items()}) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index e855d78e126..b3c9fcf1f5b 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -30,6 +30,7 @@ from pip._internal.models.link import ( Link, LinkHash, + MetadataFile, _clean_url_path, _ensure_quoted_url, ) @@ -527,7 +528,7 @@ def test_parse_links_json() -> None: requires_python=">=3.7", yanked_reason=None, hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, - dist_info_metadata="sha512=aabdd41", + metadata_file_data=MetadataFile({"sha512": "aabdd41"}), ), ] @@ -603,12 +604,12 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) - ), ], ) -def test_parse_links__dist_info_metadata( +def test_parse_links__metadata_file_data( anchor_html: str, expected: Optional[str], hashes: Dict[str, str], ) -> None: - link = _test_parse_links_data_attribute(anchor_html, "dist_info_metadata", expected) + link = _test_parse_links_data_attribute(anchor_html, "metadata_file_data", expected) assert link._hashes == hashes @@ -1080,17 +1081,27 @@ def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None: @pytest.mark.parametrize( - "dist_info_metadata, result", + "metadata_attrib, expected", [ - ("sha256=aa113592bbe", LinkHash("sha256", "aa113592bbe")), - ("sha256=", LinkHash("sha256", "")), - ("sha500=aa113592bbe", None), - ("true", None), - ("", None), - ("aa113592bbe", None), + ("sha256=aa113592bbe", MetadataFile({"sha256": "aa113592bbe"})), + ("sha256=", MetadataFile({"sha256": ""})), + ("sha500=aa113592bbe", MetadataFile({})), + ("true", MetadataFile(None)), + (None, None), + # TODO: Are these correct? + ("", MetadataFile(None)), + ("aa113592bbe", MetadataFile(None)), ], ) -def test_pep658_hash_parsing( - dist_info_metadata: str, result: Optional[LinkHash] +def test_metadata_file_info_parsing_html( + metadata_attrib: str, expected: Optional[MetadataFile] ) -> None: - assert LinkHash.parse_pep658_hash(dist_info_metadata) == result + attribs: Dict[str, Optional[str]] = { + "href": "something", + "data-dist-info-metadata": metadata_attrib, + } + page_url = "dummy_for_comes_from" + base_url = "https://index.url/simple" + link = Link.from_element(attribs, page_url, base_url) + assert link is not None and link.metadata_file_data == expected + # TODO: Do we need to do something for the JSON data? From cc554edab8897749c87495333018754080d06781 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 7 Jun 2023 21:01:10 +0100 Subject: [PATCH 494/730] Add a news file --- news/12042.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12042.bugfix.rst diff --git a/news/12042.bugfix.rst b/news/12042.bugfix.rst new file mode 100644 index 00000000000..34d97743540 --- /dev/null +++ b/news/12042.bugfix.rst @@ -0,0 +1 @@ +Correctly parse ``dist-info-metadata`` values from JSON-format index data. From 8f89997d0dad1644b258297e2e3b9cc70d44e51d Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 7 Jun 2023 21:11:34 +0100 Subject: [PATCH 495/730] Fix types to be 3.7-compatible --- src/pip/_internal/models/link.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index ee3045166bb..9630448bcfb 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -99,10 +99,10 @@ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool: class MetadataFile: """Information about a core metadata file associated with a distribution.""" - hashes: Optional[dict[str, str]] + hashes: Optional[Dict[str, str]] # TODO: Do we care about stripping out unsupported hash methods? - def __init__(self, hashes: Optional[dict[str, str]]): + def __init__(self, hashes: Optional[Dict[str, str]]): if hashes: hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} # We need to use this as this is a frozen dataclass From cfb4923d5d016dc58dc4e4b896992c476f0ddce8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 7 Jun 2023 21:21:32 +0100 Subject: [PATCH 496/730] Fix bad test data in test_parse_links_json --- tests/unit/test_collector.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index b3c9fcf1f5b..838dd2efb88 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -492,7 +492,7 @@ def test_parse_links_json() -> None: "url": "/files/holygrail-1.0-py3-none-any.whl", "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"}, "requires-python": ">=3.7", - "dist-info-metadata": "sha512=aabdd41", + "dist-info-metadata": {"sha512": "aabdd41"}, }, ], } From 93b274eee79b9c114728f0864a29751ee7698fca Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Wed, 7 Jun 2023 21:44:48 +0100 Subject: [PATCH 497/730] Missed a change to one of the tests --- tests/unit/test_collector.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 838dd2efb88..513e4b1347b 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -587,19 +587,19 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) - # Test with value "true". ( '', - "true", + MetadataFile(None), {}, ), # Test with a provided hash value. ( '', # noqa: E501 - "sha256=aa113592bbe", + MetadataFile({"sha256": "aa113592bbe"}), {}, ), # Test with a provided hash value for both the requirement as well as metadata. ( '', # noqa: E501 - "sha256=aa113592bbe", + MetadataFile({"sha256": "aa113592bbe"}), {"sha512": "abc132409cb"}, ), ], From 232cc9dd5284fbc7554bcd291bf14e31413da78a Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 8 Jun 2023 09:52:51 +0100 Subject: [PATCH 498/730] Parse hash data before passing to MetadataFile --- src/pip/_internal/models/link.py | 24 ++++++++++++++++-------- tests/unit/test_collector.py | 5 ++--- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 9630448bcfb..7b45f3f3ed4 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -101,12 +101,20 @@ class MetadataFile: hashes: Optional[Dict[str, str]] - # TODO: Do we care about stripping out unsupported hash methods? - def __init__(self, hashes: Optional[Dict[str, str]]): - if hashes: - hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} - # We need to use this as this is a frozen dataclass - object.__setattr__(self, "hashes", hashes) + def __post_init__(self) -> None: + if self.hashes is not None: + assert all(name in _SUPPORTED_HASHES for name in self.hashes) + + +def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]: + # Remove any unsupported hash types from the mapping. If this leaves no + # supported hashes, return None + if hashes is None: + return None + hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} + if len(hashes) > 0: + return hashes + return None def _clean_url_path_part(part: str) -> str: @@ -273,7 +281,7 @@ def from_json( metadata_info = file_data.get("dist-info-metadata", False) if isinstance(metadata_info, dict): # The file exists, and hashes have been supplied - metadata_file_data = MetadataFile(metadata_info) + metadata_file_data = MetadataFile(supported_hashes(metadata_info)) elif metadata_info: # The file exists, but there are no hashes metadata_file_data = MetadataFile(None) @@ -328,7 +336,7 @@ def from_element( # The file exists, and hashes have been supplied hashname, sep, hashval = metadata_info.partition("=") if sep == "=": - metadata_file_data = MetadataFile({hashname: hashval}) + metadata_file_data = MetadataFile(supported_hashes({hashname: hashval})) else: # Error - data is wrong. Treat as no hashes supplied. logger.debug( diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 513e4b1347b..d1e68fab76f 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -1085,10 +1085,10 @@ def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None: [ ("sha256=aa113592bbe", MetadataFile({"sha256": "aa113592bbe"})), ("sha256=", MetadataFile({"sha256": ""})), - ("sha500=aa113592bbe", MetadataFile({})), + ("sha500=aa113592bbe", MetadataFile(None)), ("true", MetadataFile(None)), (None, None), - # TODO: Are these correct? + # Attribute is present but invalid ("", MetadataFile(None)), ("aa113592bbe", MetadataFile(None)), ], @@ -1104,4 +1104,3 @@ def test_metadata_file_info_parsing_html( base_url = "https://index.url/simple" link = Link.from_element(attribs, page_url, base_url) assert link is not None and link.metadata_file_data == expected - # TODO: Do we need to do something for the JSON data? From 5168881b438b2851ae4c9459a8c06beee2058639 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 8 Jun 2023 10:10:15 +0100 Subject: [PATCH 499/730] Implement PEP 714 - rename dist-info-metadata --- src/pip/_internal/models/link.py | 19 +++++++++--- tests/unit/test_collector.py | 53 +++++++++++++++++++++++++++++--- 2 files changed, 63 insertions(+), 9 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 7b45f3f3ed4..3cfc3e8c4fe 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -277,8 +277,13 @@ def from_json( yanked_reason = file_data.get("yanked") hashes = file_data.get("hashes", {}) - # The dist-info-metadata value may be a boolean, or a dict of hashes. - metadata_info = file_data.get("dist-info-metadata", False) + # PEP 714: Indexes must use the name core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = file_data.get("core-metadata") + if metadata_info is None: + metadata_info = file_data.get("dist-info-metadata") + + # The metadata info value may be a boolean, or a dict of hashes. if isinstance(metadata_info, dict): # The file exists, and hashes have been supplied metadata_file_data = MetadataFile(supported_hashes(metadata_info)) @@ -286,7 +291,7 @@ def from_json( # The file exists, but there are no hashes metadata_file_data = MetadataFile(None) else: - # The file does not exist + # False or not present: the file does not exist metadata_file_data = None # The Link.yanked_reason expects an empty string instead of a boolean. @@ -323,9 +328,13 @@ def from_element( pyrequire = anchor_attribs.get("data-requires-python") yanked_reason = anchor_attribs.get("data-yanked") - # The dist-info-metadata value may be the string "true", or a string of + # PEP 714: Indexes must use the name data-core-metadata, but + # clients should support the old name as a fallback for compatibility. + metadata_info = anchor_attribs.get("data-core-metadata") + if metadata_info is None: + metadata_info = anchor_attribs.get("data-dist-info-metadata") + # The metadata info value may be the string "true", or a string of # the form "hashname=hashval" - metadata_info = anchor_attribs.get("data-dist-info-metadata") if metadata_info == "true": # The file exists, but there are no hashes metadata_file_data = MetadataFile(None) diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index d1e68fab76f..5410a4afc03 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -486,7 +486,15 @@ def test_parse_links_json() -> None: "requires-python": ">=3.7", "dist-info-metadata": False, }, - # Same as above, but parsing dist-info-metadata. + # Same as above, but parsing core-metadata. + { + "filename": "holygrail-1.0-py3-none-any.whl", + "url": "/files/holygrail-1.0-py3-none-any.whl", + "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + "requires-python": ">=3.7", + "core-metadata": {"sha512": "aabdd41"}, + }, + # Ensure fallback to dist-info-metadata works { "filename": "holygrail-1.0-py3-none-any.whl", "url": "/files/holygrail-1.0-py3-none-any.whl", @@ -494,6 +502,15 @@ def test_parse_links_json() -> None: "requires-python": ">=3.7", "dist-info-metadata": {"sha512": "aabdd41"}, }, + # Ensure that core-metadata gets priority. + { + "filename": "holygrail-1.0-py3-none-any.whl", + "url": "/files/holygrail-1.0-py3-none-any.whl", + "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + "requires-python": ">=3.7", + "core-metadata": {"sha512": "aabdd41"}, + "dist-info-metadata": {"sha512": "this_is_wrong"}, + }, ], } ).encode("utf8") @@ -530,6 +547,22 @@ def test_parse_links_json() -> None: hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, metadata_file_data=MetadataFile({"sha512": "aabdd41"}), ), + Link( + "https://example.com/files/holygrail-1.0-py3-none-any.whl", + comes_from=page.url, + requires_python=">=3.7", + yanked_reason=None, + hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + metadata_file_data=MetadataFile({"sha512": "aabdd41"}), + ), + Link( + "https://example.com/files/holygrail-1.0-py3-none-any.whl", + comes_from=page.url, + requires_python=">=3.7", + yanked_reason=None, + hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"}, + metadata_file_data=MetadataFile({"sha512": "aabdd41"}), + ), ] # Ensure the metadata info can be parsed into the correct link. @@ -586,22 +619,34 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) - ), # Test with value "true". ( - '', + '', MetadataFile(None), {}, ), # Test with a provided hash value. ( - '', # noqa: E501 + '', # noqa: E501 MetadataFile({"sha256": "aa113592bbe"}), {}, ), # Test with a provided hash value for both the requirement as well as metadata. ( - '', # noqa: E501 + '', # noqa: E501 MetadataFile({"sha256": "aa113592bbe"}), {"sha512": "abc132409cb"}, ), + # Ensure the fallback to the old name works. + ( + '', # noqa: E501 + MetadataFile({"sha256": "aa113592bbe"}), + {}, + ), + # Ensure that the data-core-metadata name gets priority. + ( + '', # noqa: E501 + MetadataFile({"sha256": "aa113592bbe"}), + {}, + ), ], ) def test_parse_links__metadata_file_data( From 3eb3ddd873dfde2c146fcc5c82b2fa0ba363ac69 Mon Sep 17 00:00:00 2001 From: Maurits van Rees Date: Thu, 8 Jun 2023 17:57:17 +0200 Subject: [PATCH 500/730] Fix slowness on Python 3.11 when updating an existing large environment. --- news/12079.bugfix.rst | 1 + src/pip/_internal/resolution/resolvelib/candidates.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 news/12079.bugfix.rst diff --git a/news/12079.bugfix.rst b/news/12079.bugfix.rst new file mode 100644 index 00000000000..79496798adc --- /dev/null +++ b/news/12079.bugfix.rst @@ -0,0 +1 @@ +Fix slowness on Python 3.11 when updating an existing large environment. diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 31020e27ad1..de04e1d73f2 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -341,6 +341,7 @@ def __init__( self.dist = dist self._ireq = _make_install_req_from_dist(dist, template) self._factory = factory + self._version = None # This is just logging some messages, so we can do it eagerly. # The returned dist would be exactly the same as self.dist because we @@ -376,7 +377,9 @@ def name(self) -> str: @property def version(self) -> CandidateVersion: - return self.dist.version + if self._version is None: + self._version = self.dist.version + return self._version @property def is_editable(self) -> bool: From 1269d0d240f4d22ed1134bb854bf2177a82a8d66 Mon Sep 17 00:00:00 2001 From: Maurits van Rees Date: Mon, 12 Jun 2023 16:31:28 +0200 Subject: [PATCH 501/730] Update news snippet. --- news/12079.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/12079.bugfix.rst b/news/12079.bugfix.rst index 79496798adc..d65c8ee7698 100644 --- a/news/12079.bugfix.rst +++ b/news/12079.bugfix.rst @@ -1 +1 @@ -Fix slowness on Python 3.11 when updating an existing large environment. +Fix slowness when using ``importlib.metadata`` and there is a large overlap between already installed and to-be-installed packages. From 1a80e41504008c8f3b63b5fb59c6b7476fcae3b9 Mon Sep 17 00:00:00 2001 From: Maurits van Rees Date: Mon, 12 Jun 2023 21:43:34 +0200 Subject: [PATCH 502/730] Mention in changelog that 12079 is an issue mostly in Python 3.11. --- news/12079.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/12079.bugfix.rst b/news/12079.bugfix.rst index d65c8ee7698..b5b7e553bf3 100644 --- a/news/12079.bugfix.rst +++ b/news/12079.bugfix.rst @@ -1 +1 @@ -Fix slowness when using ``importlib.metadata`` and there is a large overlap between already installed and to-be-installed packages. +Fix slowness when using ``importlib.metadata`` and there is a large overlap between already installed and to-be-installed packages. This is the default in Python 3.11, though it can be overridden with the ``_PIP_USE_IMPORTLIB_METADATA`` environment variable. From 6aef9326d93dee61cb69cfa8770ca445891fa990 Mon Sep 17 00:00:00 2001 From: Maurits van Rees Date: Mon, 12 Jun 2023 21:49:12 +0200 Subject: [PATCH 503/730] Update news/12079.bugfix.rst Co-authored-by: Tzu-ping Chung --- news/12079.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/12079.bugfix.rst b/news/12079.bugfix.rst index b5b7e553bf3..5ee05026808 100644 --- a/news/12079.bugfix.rst +++ b/news/12079.bugfix.rst @@ -1 +1 @@ -Fix slowness when using ``importlib.metadata`` and there is a large overlap between already installed and to-be-installed packages. This is the default in Python 3.11, though it can be overridden with the ``_PIP_USE_IMPORTLIB_METADATA`` environment variable. +Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. From 67deaf7576a9aa14f37bd6ebef5bdce038069b60 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Wed, 14 Jun 2023 22:36:15 +0800 Subject: [PATCH 504/730] Add permission check before configuration --- src/pip/_internal/configuration.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 8fd46c9b8e0..35790369a25 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -210,8 +210,16 @@ def save(self) -> None: # Ensure directory exists. ensure_dir(os.path.dirname(fname)) - with open(fname, "w") as f: - parser.write(f) + # Ensure directory's permission(need to be writeable) + if os.access(fname, os.W_OK): + with open(fname, "w") as f: + parser.write(f) + else: + raise ConfigurationError( + "Configuation file not writeable".format( + ": ".join(fname) + ) + ) # # Private routines From 19b41050efdcb2b8e74c16fded3064abb96f95fa Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Wed, 14 Jun 2023 23:00:03 +0800 Subject: [PATCH 505/730] Add permission check before configuration --- news/11920.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11920.bugfix.rst diff --git a/news/11920.bugfix.rst b/news/11920.bugfix.rst new file mode 100644 index 00000000000..f91667c5251 --- /dev/null +++ b/news/11920.bugfix.rst @@ -0,0 +1 @@ +Add permission check before configuration \ No newline at end of file From dafca3f7e3985ce2f2351d168efbd91fa5b2f4b2 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Thu, 15 Jun 2023 19:11:20 +0800 Subject: [PATCH 506/730] Add permission check before configuration --- src/pip/_internal/configuration.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 35790369a25..f8de9239704 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -216,10 +216,8 @@ def save(self) -> None: parser.write(f) else: raise ConfigurationError( - "Configuation file not writeable".format( - ": ".join(fname) + "Configuation file not writeable {}".format(': '.join(fname)) ) - ) # # Private routines From f77661e478d3169e43fd2ba540a0c9778e924a45 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Thu, 15 Jun 2023 19:35:46 +0800 Subject: [PATCH 507/730] Add permission check before configuration --- src/pip/_internal/configuration.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index f8de9239704..ebcfe7df0f8 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -211,10 +211,10 @@ def save(self) -> None: ensure_dir(os.path.dirname(fname)) # Ensure directory's permission(need to be writeable) - if os.access(fname, os.W_OK): + try: with open(fname, "w") as f: parser.write(f) - else: + except: raise ConfigurationError( "Configuation file not writeable {}".format(': '.join(fname)) ) From f74650725b7fa57a5b17f2b839624fa36622ba7e Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Wed, 14 Jun 2023 22:36:15 +0800 Subject: [PATCH 508/730] Add permission check before configuration --- src/pip/_internal/configuration.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 8fd46c9b8e0..35790369a25 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -210,8 +210,16 @@ def save(self) -> None: # Ensure directory exists. ensure_dir(os.path.dirname(fname)) - with open(fname, "w") as f: - parser.write(f) + # Ensure directory's permission(need to be writeable) + if os.access(fname, os.W_OK): + with open(fname, "w") as f: + parser.write(f) + else: + raise ConfigurationError( + "Configuation file not writeable".format( + ": ".join(fname) + ) + ) # # Private routines From 920bcd0c631be649317b5a1e00019ad029353d5c Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Wed, 14 Jun 2023 23:00:03 +0800 Subject: [PATCH 509/730] Add permission check before configuration --- news/11920.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11920.bugfix.rst diff --git a/news/11920.bugfix.rst b/news/11920.bugfix.rst new file mode 100644 index 00000000000..f91667c5251 --- /dev/null +++ b/news/11920.bugfix.rst @@ -0,0 +1 @@ +Add permission check before configuration \ No newline at end of file From 2dbda58efc6bfd0b9115626d294ab95b11712e9a Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Thu, 15 Jun 2023 19:11:20 +0800 Subject: [PATCH 510/730] Add permission check before configuration --- src/pip/_internal/configuration.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 35790369a25..f8de9239704 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -216,10 +216,8 @@ def save(self) -> None: parser.write(f) else: raise ConfigurationError( - "Configuation file not writeable".format( - ": ".join(fname) + "Configuation file not writeable {}".format(': '.join(fname)) ) - ) # # Private routines From 17147b8fd36a851da7897da26eef8f68aec364a0 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Thu, 15 Jun 2023 19:35:46 +0800 Subject: [PATCH 511/730] Add permission check before configuration --- src/pip/_internal/configuration.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index f8de9239704..ebcfe7df0f8 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -211,10 +211,10 @@ def save(self) -> None: ensure_dir(os.path.dirname(fname)) # Ensure directory's permission(need to be writeable) - if os.access(fname, os.W_OK): + try: with open(fname, "w") as f: parser.write(f) - else: + except: raise ConfigurationError( "Configuation file not writeable {}".format(': '.join(fname)) ) From 5986dd27c5797245c70daf922f89bddc16ee656e Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Fri, 16 Jun 2023 21:35:49 +0800 Subject: [PATCH 512/730] Add permission check before configuration --- src/pip/_internal/configuration.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index ebcfe7df0f8..f8de9239704 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -211,10 +211,10 @@ def save(self) -> None: ensure_dir(os.path.dirname(fname)) # Ensure directory's permission(need to be writeable) - try: + if os.access(fname, os.W_OK): with open(fname, "w") as f: parser.write(f) - except: + else: raise ConfigurationError( "Configuation file not writeable {}".format(': '.join(fname)) ) From 05e936aecb6427403c6a99dcbcefa63514b98425 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Sat, 17 Jun 2023 12:16:25 +0800 Subject: [PATCH 513/730] Add permission check before configuration7 --- src/pip/_internal/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index f8de9239704..ba34886b6b1 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -345,7 +345,7 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: # per-user configuration next should_load_user_config = not self.isolated and not ( config_file and os.path.exists(config_file) - ) + ) or not os.access(config_file[kinds.SITE], os.W_OK) if should_load_user_config: # The legacy config file is overridden by the new config file yield kinds.USER, config_files[kinds.USER] From 8747268d44250b164957f3a7671bd30ef9f250c7 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Sat, 17 Jun 2023 12:30:36 +0800 Subject: [PATCH 514/730] Add permission check before configuration8 --- src/pip/_internal/configuration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index ba34886b6b1..0a7d183ae8b 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -345,7 +345,8 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: # per-user configuration next should_load_user_config = not self.isolated and not ( config_file and os.path.exists(config_file) - ) or not os.access(config_file[kinds.SITE], os.W_OK) + ) or not os.access(config_files[kinds.SITE], os.W_OK) + if should_load_user_config: # The legacy config file is overridden by the new config file yield kinds.USER, config_files[kinds.USER] From 81c8a3ffbca1aca2c7bfae1433d0dd52050fc9b3 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Sat, 17 Jun 2023 12:42:47 +0800 Subject: [PATCH 515/730] Add permission check before configuration9 --- src/pip/_internal/configuration.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 0a7d183ae8b..5cd889d3979 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -342,10 +342,24 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: # at the base we have any global configuration yield kinds.GLOBAL, config_files[kinds.GLOBAL] + site_accessable = int + site_index = 0 + site_all_accessable = bool + + for fname in config_files[kinds.SITE]: + site_index += 1 + if os.access(fname, os.W_OK): + site_accessable += 1 + + if site_accessable < site_index: + site_all_accessable = False + elif site_accessable == site_index: + site_all_accessable = True + # per-user configuration next should_load_user_config = not self.isolated and not ( config_file and os.path.exists(config_file) - ) or not os.access(config_files[kinds.SITE], os.W_OK) + ) or not site_all_accessable == True if should_load_user_config: # The legacy config file is overridden by the new config file From 54be97e05ce6e303dddb95f9d8d0291c35f7189c Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Mon, 15 Aug 2022 16:43:58 -0700 Subject: [PATCH 516/730] Use strict optional checking in glibc Suggested by pradyunsg in #11374 `--no-strict-optional` defeats half the purpose of using mypy. This change is trivial, we already catch AttributeError in the case that mypy is concerned about. --- src/pip/_internal/utils/glibc.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/pip/_internal/utils/glibc.py b/src/pip/_internal/utils/glibc.py index 7bd3c20681d..2f64f5aa33d 100644 --- a/src/pip/_internal/utils/glibc.py +++ b/src/pip/_internal/utils/glibc.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import os import sys from typing import Optional, Tuple @@ -21,7 +18,7 @@ def glibc_version_string_confstr() -> Optional[str]: return None try: # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": - _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() # type: ignore[union-attr] except (AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None From b5a40ed64bf534858f9cd43d0a9fcd86e478c836 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Tue, 20 Jun 2023 01:45:24 -0700 Subject: [PATCH 517/730] news --- news/5C12428B-09FA-49BC-A886-6F5D8885BC14.trivial.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 news/5C12428B-09FA-49BC-A886-6F5D8885BC14.trivial.rst diff --git a/news/5C12428B-09FA-49BC-A886-6F5D8885BC14.trivial.rst b/news/5C12428B-09FA-49BC-A886-6F5D8885BC14.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From 42117756313b1b59c3a6b4f637795688fc36b19e Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Tue, 20 Jun 2023 01:48:49 -0700 Subject: [PATCH 518/730] remove the error code to silence ruff --- src/pip/_internal/utils/glibc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/glibc.py b/src/pip/_internal/utils/glibc.py index 2f64f5aa33d..d0e1dbc2c47 100644 --- a/src/pip/_internal/utils/glibc.py +++ b/src/pip/_internal/utils/glibc.py @@ -18,7 +18,7 @@ def glibc_version_string_confstr() -> Optional[str]: return None try: # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": - _, version = os.confstr("CS_GNU_LIBC_VERSION").split() # type: ignore[union-attr] + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() # type: ignore except (AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None From 36014e6f495bd57363e935e466da8f165acd51f6 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Tue, 20 Jun 2023 01:51:56 -0700 Subject: [PATCH 519/730] don't catch attributeerror --- src/pip/_internal/utils/glibc.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/utils/glibc.py b/src/pip/_internal/utils/glibc.py index d0e1dbc2c47..b9dbfbcbf30 100644 --- a/src/pip/_internal/utils/glibc.py +++ b/src/pip/_internal/utils/glibc.py @@ -17,9 +17,12 @@ def glibc_version_string_confstr() -> Optional[str]: if sys.platform == "win32": return None try: + gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION") + if gnu_libc_version is None: + return None # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": - _, version = os.confstr("CS_GNU_LIBC_VERSION").split() # type: ignore - except (AttributeError, OSError, ValueError): + _, version = gnu_libc_version + except (OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None return version From e995f2564495d0bb0cb609c0b48091c3f5708ed8 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Tue, 20 Jun 2023 01:54:20 -0700 Subject: [PATCH 520/730] nope --- src/pip/_internal/utils/glibc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/glibc.py b/src/pip/_internal/utils/glibc.py index b9dbfbcbf30..b80c1881bd3 100644 --- a/src/pip/_internal/utils/glibc.py +++ b/src/pip/_internal/utils/glibc.py @@ -21,7 +21,7 @@ def glibc_version_string_confstr() -> Optional[str]: if gnu_libc_version is None: return None # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": - _, version = gnu_libc_version + _, version = gnu_libc_version.split() except (OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None From b5377aeb73ac1fabc45abb07ed92b209b3213e98 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Tue, 20 Jun 2023 02:04:36 -0700 Subject: [PATCH 521/730] nope --- src/pip/_internal/utils/glibc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/glibc.py b/src/pip/_internal/utils/glibc.py index b80c1881bd3..81342afa447 100644 --- a/src/pip/_internal/utils/glibc.py +++ b/src/pip/_internal/utils/glibc.py @@ -22,7 +22,7 @@ def glibc_version_string_confstr() -> Optional[str]: return None # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": _, version = gnu_libc_version.split() - except (OSError, ValueError): + except (AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None return version From 5bebe850ea1db6ff165e4b95bafb1ee44e4a69e8 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 20 Jun 2023 17:13:18 +0200 Subject: [PATCH 522/730] take non-extra requirements into account for extra installs --- src/pip/_internal/resolution/resolvelib/factory.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 0331297b85b..c117a30c81c 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -385,8 +385,8 @@ def find_candidates( if ireq is not None: ireqs.append(ireq) - # If the current identifier contains extras, add explicit candidates - # from entries from extra-less identifier. + # If the current identifier contains extras, add requires and explicit + # candidates from entries from extra-less identifier. with contextlib.suppress(InvalidRequirement): parsed_requirement = get_requirement(identifier) explicit_candidates.update( @@ -395,6 +395,10 @@ def find_candidates( frozenset(parsed_requirement.extras), ), ) + for req in requirements.get(parsed_requirement.name, []): + _, ireq = req.get_candidate_lookup() + if ireq is not None: + ireqs.append(ireq) # Add explicit candidates from constraints. We only do this if there are # known ireqs, which represent requirements not already explicit. If From 937d8f0b61dbf41f23db9ba62586a6bf6d45c828 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 21 Jun 2023 17:34:30 +0200 Subject: [PATCH 523/730] partial improvement --- src/pip/_internal/req/constructors.py | 32 +++++++++++- .../resolution/resolvelib/candidates.py | 9 ++-- .../resolution/resolvelib/factory.py | 51 ++++++++++++++----- .../resolution/resolvelib/provider.py | 2 + .../resolution/resolvelib/requirements.py | 28 ++++++++-- .../resolution_resolvelib/test_requirement.py | 22 ++++---- 6 files changed, 109 insertions(+), 35 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index c5ca2d85d51..f04a4cbbdbd 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -15,7 +15,7 @@ from pip._vendor.packaging.markers import Marker from pip._vendor.packaging.requirements import InvalidRequirement, Requirement -from pip._vendor.packaging.specifiers import Specifier +from pip._vendor.packaging.specifiers import Specifier, SpecifierSet from pip._internal.exceptions import InstallationError from pip._internal.models.index import PyPI, TestPyPI @@ -504,3 +504,33 @@ def install_req_from_link_and_ireq( config_settings=ireq.config_settings, user_supplied=ireq.user_supplied, ) + + +def install_req_without( + ireq: InstallRequirement, + *, + without_extras: bool = False, + without_specifier: bool = False, +) -> InstallRequirement: + # TODO: clean up hack + req = Requirement(str(ireq.req)) + if without_extras: + req.extras = {} + if without_specifier: + req.specifier = SpecifierSet(prereleases=req.specifier.prereleases) + return InstallRequirement( + req=req, + comes_from=ireq.comes_from, + editable=ireq.editable, + link=ireq.link, + markers=ireq.markers, + use_pep517=ireq.use_pep517, + isolated=ireq.isolated, + global_options=ireq.global_options, + hash_options=ireq.hash_options, + constraint=ireq.constraint, + extras=ireq.extras if not without_extras else [], + config_settings=ireq.config_settings, + user_supplied=ireq.user_supplied, + permit_editable_wheels=ireq.permit_editable_wheels, + ) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index de04e1d73f2..5bac3d6df1f 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -237,10 +237,11 @@ def _prepare(self) -> BaseDistribution: self._check_metadata_consistency(dist) return dist + # TODO: add Explicit dependency on self to extra reqs can benefit from it? def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: requires = self.dist.iter_dependencies() if with_requires else () for r in requires: - yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) yield self._factory.make_requires_python_requirement(self.dist.requires_python) def get_install_requirement(self) -> Optional[InstallRequirement]: @@ -392,7 +393,7 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen if not with_requires: return for r in self.dist.iter_dependencies(): - yield self._factory.make_requirement_from_spec(str(r), self._ireq) + yield from self._factory.make_requirements_from_spec(str(r), self._ireq) def get_install_requirement(self) -> Optional[InstallRequirement]: return None @@ -502,11 +503,9 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen ) for r in self.base.dist.iter_dependencies(valid_extras): - requirement = factory.make_requirement_from_spec( + yield from factory.make_requirements_from_spec( str(r), self.base._ireq, valid_extras ) - if requirement: - yield requirement def get_install_requirement(self) -> Optional[InstallRequirement]: # We don't return anything here, because we always diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index c117a30c81c..4c088209b29 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -441,18 +441,35 @@ def find_candidates( and all(req.is_satisfied_by(c) for req in requirements[identifier]) ) - def _make_requirement_from_install_req( + def _make_requirements_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str] - ) -> Optional[Requirement]: + ) -> list[Requirement]: + # TODO: docstring + """ + Returns requirement objects associated with the given InstallRequirement. In + most cases this will be a single object but the following special cases exist: + - the InstallRequirement has markers that do not apply -> result is empty + - the InstallRequirement has both a constraint and extras -> result is split + in two requirement objects: one with the constraint and one with the + extra. This allows centralized constraint handling for the base, + resulting in fewer candidate rejections. + """ + # TODO: implement -> split in base req with constraint and extra req without if not ireq.match_markers(requested_extras): logger.info( "Ignoring %s: markers '%s' don't match your environment", ireq.name, ireq.markers, ) - return None + return [] if not ireq.link: - return SpecifierRequirement(ireq) + if ireq.extras and ireq.req.specifier: + return [ + SpecifierRequirement(ireq, drop_extras=True), + SpecifierRequirement(ireq, drop_specifier=True), + ] + else: + return [SpecifierRequirement(ireq)] self._fail_if_link_is_unsupported_wheel(ireq.link) cand = self._make_candidate_from_link( ireq.link, @@ -470,8 +487,9 @@ def _make_requirement_from_install_req( # ResolutionImpossible eventually. if not ireq.name: raise self._build_failures[ireq.link] - return UnsatisfiableRequirement(canonicalize_name(ireq.name)) - return self.make_requirement_from_candidate(cand) + return [UnsatisfiableRequirement(canonicalize_name(ireq.name))] + # TODO: here too + return [self.make_requirement_from_candidate(cand)] def collect_root_requirements( self, root_ireqs: List[InstallRequirement] @@ -492,15 +510,17 @@ def collect_root_requirements( else: collected.constraints[name] = Constraint.from_ireq(ireq) else: - req = self._make_requirement_from_install_req( + reqs = self._make_requirements_from_install_req( ireq, requested_extras=(), ) - if req is None: + if not reqs: continue - if ireq.user_supplied and req.name not in collected.user_requested: - collected.user_requested[req.name] = i - collected.requirements.append(req) + + # TODO: clean up reqs[0]? + if ireq.user_supplied and reqs[0].name not in collected.user_requested: + collected.user_requested[reqs[0].name] = i + collected.requirements.extend(reqs) return collected def make_requirement_from_candidate( @@ -508,14 +528,17 @@ def make_requirement_from_candidate( ) -> ExplicitRequirement: return ExplicitRequirement(candidate) - def make_requirement_from_spec( + def make_requirements_from_spec( self, specifier: str, comes_from: Optional[InstallRequirement], requested_extras: Iterable[str] = (), - ) -> Optional[Requirement]: + ) -> list[Requirement]: + # TODO: docstring + """ + """ ireq = self._make_install_req_from_spec(specifier, comes_from) - return self._make_requirement_from_install_req(ireq, requested_extras) + return self._make_requirements_from_install_req(ireq, requested_extras) def make_requires_python_requirement( self, diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index 315fb9c8902..121e48d071b 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -184,6 +184,8 @@ def get_preference( # the backtracking backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes) + # TODO: finally prefer base over extra for the same package + return ( not requires_python, not direct, diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index 06addc0ddce..fe9ae6ba661 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -2,6 +2,7 @@ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name from pip._internal.req.req_install import InstallRequirement +from pip._internal.req.constructors import install_req_without from .base import Candidate, CandidateLookup, Requirement, format_name @@ -39,14 +40,27 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: return candidate == self.candidate +# TODO: add some comments class SpecifierRequirement(Requirement): - def __init__(self, ireq: InstallRequirement) -> None: + # TODO: document additional options + def __init__( + self, + ireq: InstallRequirement, + *, + drop_extras: bool = False, + drop_specifier: bool = False, + ) -> None: assert ireq.link is None, "This is a link, not a specifier" - self._ireq = ireq - self._extras = frozenset(ireq.extras) + self._drop_extras: bool = drop_extras + self._original_extras = frozenset(ireq.extras) + # TODO: name + self._original_req = ireq.req + self._ireq = install_req_without( + ireq, without_extras=self._drop_extras, without_specifier=drop_specifier + ) def __str__(self) -> str: - return str(self._ireq.req) + return str(self._original_req) def __repr__(self) -> str: return "{class_name}({requirement!r})".format( @@ -59,9 +73,13 @@ def project_name(self) -> NormalizedName: assert self._ireq.req, "Specifier-backed ireq is always PEP 508" return canonicalize_name(self._ireq.req.name) + # TODO: make sure this can still be identified for error reporting purposes @property def name(self) -> str: - return format_name(self.project_name, self._extras) + return format_name( + self.project_name, + self._original_extras if not self._drop_extras else frozenset(), + ) def format_for_error(self) -> str: # Convert comma-separated specifiers into "A, B, ..., F and G" diff --git a/tests/unit/resolution_resolvelib/test_requirement.py b/tests/unit/resolution_resolvelib/test_requirement.py index 6864e70ea0a..ce48ab16c49 100644 --- a/tests/unit/resolution_resolvelib/test_requirement.py +++ b/tests/unit/resolution_resolvelib/test_requirement.py @@ -61,9 +61,9 @@ def test_new_resolver_requirement_has_name( ) -> None: """All requirements should have a name""" for spec, name, _ in test_cases: - req = factory.make_requirement_from_spec(spec, comes_from=None) - assert req is not None - assert req.name == name + reqs = factory.make_requirements_from_spec(spec, comes_from=None) + assert len(reqs) == 1 + assert reqs[0].name == name def test_new_resolver_correct_number_of_matches( @@ -71,8 +71,9 @@ def test_new_resolver_correct_number_of_matches( ) -> None: """Requirements should return the correct number of candidates""" for spec, _, match_count in test_cases: - req = factory.make_requirement_from_spec(spec, comes_from=None) - assert req is not None + reqs = factory.make_requirements_from_spec(spec, comes_from=None) + assert len(reqs) == 1 + req = reqs[0] matches = factory.find_candidates( req.name, {req.name: [req]}, @@ -88,8 +89,9 @@ def test_new_resolver_candidates_match_requirement( ) -> None: """Candidates returned from find_candidates should satisfy the requirement""" for spec, _, _ in test_cases: - req = factory.make_requirement_from_spec(spec, comes_from=None) - assert req is not None + reqs = factory.make_requirements_from_spec(spec, comes_from=None) + assert len(reqs) == 1 + req = reqs[0] candidates = factory.find_candidates( req.name, {req.name: [req]}, @@ -104,8 +106,8 @@ def test_new_resolver_candidates_match_requirement( def test_new_resolver_full_resolve(factory: Factory, provider: PipProvider) -> None: """A very basic full resolve""" - req = factory.make_requirement_from_spec("simplewheel", comes_from=None) - assert req is not None + reqs = factory.make_requirements_from_spec("simplewheel", comes_from=None) + assert len(reqs) == 1 r: Resolver[Requirement, Candidate, str] = Resolver(provider, BaseReporter()) - result = r.resolve([req]) + result = r.resolve([reqs[0]]) assert set(result.mapping.keys()) == {"simplewheel"} From c57bad63da242068f74e1eb96c97c3bc800b7f88 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Thu, 22 Jun 2023 11:27:48 +0800 Subject: [PATCH 524/730] Add permission check before configuration10 --- src/pip/_internal/configuration.py | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 5cd889d3979..a44f13321a0 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -211,12 +211,12 @@ def save(self) -> None: ensure_dir(os.path.dirname(fname)) # Ensure directory's permission(need to be writeable) - if os.access(fname, os.W_OK): + try: with open(fname, "w") as f: parser.write(f) - else: + except IOError as error: raise ConfigurationError( - "Configuation file not writeable {}".format(': '.join(fname)) + "An error occurred while writing to the configuration file: {0}\nError message: {1}".format(fname, error) ) # @@ -342,24 +342,10 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: # at the base we have any global configuration yield kinds.GLOBAL, config_files[kinds.GLOBAL] - site_accessable = int - site_index = 0 - site_all_accessable = bool - - for fname in config_files[kinds.SITE]: - site_index += 1 - if os.access(fname, os.W_OK): - site_accessable += 1 - - if site_accessable < site_index: - site_all_accessable = False - elif site_accessable == site_index: - site_all_accessable = True - # per-user configuration next should_load_user_config = not self.isolated and not ( config_file and os.path.exists(config_file) - ) or not site_all_accessable == True + ) if should_load_user_config: # The legacy config file is overridden by the new config file From c4709d2b2251528647e19be0d0f8ea83d1011e24 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Thu, 22 Jun 2023 11:31:43 +0800 Subject: [PATCH 525/730] Add permission check before configuration11 --- src/pip/_internal/configuration.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index a44f13321a0..869842e3742 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -216,7 +216,8 @@ def save(self) -> None: parser.write(f) except IOError as error: raise ConfigurationError( - "An error occurred while writing to the configuration file: {0}\nError message: {1}".format(fname, error) + "An error occurred while writing to the configuration file: {0}\n \ + Error message: {1}".format(fname, error) ) # From 7572dbc09581c139199496adc57fefb9f404a6b2 Mon Sep 17 00:00:00 2001 From: JasonMo1 <111677135+JasonMo1@users.noreply.github.com> Date: Thu, 22 Jun 2023 12:03:58 +0800 Subject: [PATCH 526/730] Add IO check before save configuration1 --- news/11920.bugfix.rst | 2 +- src/pip/_internal/configuration.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/news/11920.bugfix.rst b/news/11920.bugfix.rst index f91667c5251..d8e22ee9bd7 100644 --- a/news/11920.bugfix.rst +++ b/news/11920.bugfix.rst @@ -1 +1 @@ -Add permission check before configuration \ No newline at end of file +Add permission check before configuration diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 869842e3742..46562652faa 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -216,8 +216,10 @@ def save(self) -> None: parser.write(f) except IOError as error: raise ConfigurationError( - "An error occurred while writing to the configuration file: {0}\n \ - Error message: {1}".format(fname, error) + "An error occurred while writing to the configuration file: {0}\n \ + Error message: {1}".format( + fname, error + ) ) # @@ -346,7 +348,7 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: # per-user configuration next should_load_user_config = not self.isolated and not ( config_file and os.path.exists(config_file) - ) + ) if should_load_user_config: # The legacy config file is overridden by the new config file From 5f8f40eb1d0610e530d5e035ba8c7f99d9af9df1 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 22 Jun 2023 11:08:33 +0200 Subject: [PATCH 527/730] refinements --- src/pip/_internal/req/constructors.py | 3 +- .../resolution/resolvelib/candidates.py | 15 +++++++- .../resolution/resolvelib/factory.py | 38 +++++++++++-------- .../resolution/resolvelib/requirements.py | 18 ++++----- 4 files changed, 46 insertions(+), 28 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index f04a4cbbdbd..908876c4cde 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -520,7 +520,8 @@ def install_req_without( req.specifier = SpecifierSet(prereleases=req.specifier.prereleases) return InstallRequirement( req=req, - comes_from=ireq.comes_from, + # TODO: document this!!!! + comes_from=ireq, editable=ireq.editable, link=ireq.link, markers=ireq.markers, diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 5bac3d6df1f..23883484139 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -237,7 +237,6 @@ def _prepare(self) -> BaseDistribution: self._check_metadata_consistency(dist) return dist - # TODO: add Explicit dependency on self to extra reqs can benefit from it? def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: requires = self.dist.iter_dependencies() if with_requires else () for r in requires: @@ -428,9 +427,19 @@ def __init__( self, base: BaseCandidate, extras: FrozenSet[str], + ireq: Optional[InstallRequirement] = None, ) -> None: + """ + :param ireq: the InstallRequirement that led to this candidate, if it + differs from the base's InstallRequirement. This will often be the + case in the sense that this candidate's requirement has the extras + while the base's does not. Unlike the InstallRequirement backed + candidates, this requirement is used solely for reporting purposes, + it does not do any leg work. + """ self.base = base self.extras = extras + self._ireq = ireq def __str__(self) -> str: name, rest = str(self.base).split(" ", 1) @@ -504,7 +513,9 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen for r in self.base.dist.iter_dependencies(valid_extras): yield from factory.make_requirements_from_spec( - str(r), self.base._ireq, valid_extras + str(r), + self._ireq if self._ireq is not None else self.base._ireq, + valid_extras, ) def get_install_requirement(self) -> Optional[InstallRequirement]: diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 4c088209b29..45b8133878b 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -138,13 +138,16 @@ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: raise UnsupportedWheel(msg) def _make_extras_candidate( - self, base: BaseCandidate, extras: FrozenSet[str] + self, + base: BaseCandidate, + extras: FrozenSet[str], + ireq: Optional[InstallRequirement] = None, ) -> ExtrasCandidate: cache_key = (id(base), extras) try: candidate = self._extras_candidate_cache[cache_key] except KeyError: - candidate = ExtrasCandidate(base, extras) + candidate = ExtrasCandidate(base, extras, ireq=ireq) self._extras_candidate_cache[cache_key] = candidate return candidate @@ -161,7 +164,7 @@ def _make_candidate_from_dist( self._installed_candidate_cache[dist.canonical_name] = base if not extras: return base - return self._make_extras_candidate(base, extras) + return self._make_extras_candidate(base, extras, ireq=template) def _make_candidate_from_link( self, @@ -223,7 +226,7 @@ def _make_candidate_from_link( if not extras: return base - return self._make_extras_candidate(base, extras) + return self._make_extras_candidate(base, extras, ireq=template) def _iter_found_candidates( self, @@ -389,16 +392,17 @@ def find_candidates( # candidates from entries from extra-less identifier. with contextlib.suppress(InvalidRequirement): parsed_requirement = get_requirement(identifier) - explicit_candidates.update( - self._iter_explicit_candidates_from_base( - requirements.get(parsed_requirement.name, ()), - frozenset(parsed_requirement.extras), - ), - ) - for req in requirements.get(parsed_requirement.name, []): - _, ireq = req.get_candidate_lookup() - if ireq is not None: - ireqs.append(ireq) + if parsed_requirement.name != identifier: + explicit_candidates.update( + self._iter_explicit_candidates_from_base( + requirements.get(parsed_requirement.name, ()), + frozenset(parsed_requirement.extras), + ), + ) + for req in requirements.get(parsed_requirement.name, []): + _, ireq = req.get_candidate_lookup() + if ireq is not None: + ireqs.append(ireq) # Add explicit candidates from constraints. We only do this if there are # known ireqs, which represent requirements not already explicit. If @@ -444,7 +448,6 @@ def find_candidates( def _make_requirements_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str] ) -> list[Requirement]: - # TODO: docstring """ Returns requirement objects associated with the given InstallRequirement. In most cases this will be a single object but the following special cases exist: @@ -454,7 +457,6 @@ def _make_requirements_from_install_req( extra. This allows centralized constraint handling for the base, resulting in fewer candidate rejections. """ - # TODO: implement -> split in base req with constraint and extra req without if not ireq.match_markers(requested_extras): logger.info( "Ignoring %s: markers '%s' don't match your environment", @@ -466,6 +468,10 @@ def _make_requirements_from_install_req( if ireq.extras and ireq.req.specifier: return [ SpecifierRequirement(ireq, drop_extras=True), + # TODO: put this all the way at the back to have even fewer candidates? + # TODO: probably best to keep specifier as it makes the report + # slightly more readable -> should also update SpecReq constructor + # and req.constructors.install_req_without SpecifierRequirement(ireq, drop_specifier=True), ] else: diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index fe9ae6ba661..180158128af 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -40,7 +40,6 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: return candidate == self.candidate -# TODO: add some comments class SpecifierRequirement(Requirement): # TODO: document additional options def __init__( @@ -52,15 +51,17 @@ def __init__( ) -> None: assert ireq.link is None, "This is a link, not a specifier" self._drop_extras: bool = drop_extras - self._original_extras = frozenset(ireq.extras) - # TODO: name - self._original_req = ireq.req - self._ireq = install_req_without( - ireq, without_extras=self._drop_extras, without_specifier=drop_specifier + self._extras = frozenset(ireq.extras if not drop_extras else ()) + self._ireq = ( + ireq + if not drop_extras and not drop_specifier + else install_req_without( + ireq, without_extras=self._drop_extras, without_specifier=drop_specifier + ) ) def __str__(self) -> str: - return str(self._original_req) + return str(self._ireq) def __repr__(self) -> str: return "{class_name}({requirement!r})".format( @@ -73,12 +74,11 @@ def project_name(self) -> NormalizedName: assert self._ireq.req, "Specifier-backed ireq is always PEP 508" return canonicalize_name(self._ireq.req.name) - # TODO: make sure this can still be identified for error reporting purposes @property def name(self) -> str: return format_name( self.project_name, - self._original_extras if not self._drop_extras else frozenset(), + self._extras, ) def format_for_error(self) -> str: From d09431feb5049ec5e7a9b4ecb5d338a38a14ffc4 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 22 Jun 2023 14:42:05 +0200 Subject: [PATCH 528/730] fixes --- src/pip/_internal/req/constructors.py | 20 +++++++++++++++++-- .../resolution/resolvelib/resolver.py | 15 +++++++++++++- tests/functional/test_install.py | 6 +++--- 3 files changed, 35 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 908876c4cde..9bf1c98440b 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -8,10 +8,11 @@ InstallRequirement. """ +import copy import logging import os import re -from typing import Dict, List, Optional, Set, Tuple, Union +from typing import Collection, Dict, List, Optional, Set, Tuple, Union from pip._vendor.packaging.markers import Marker from pip._vendor.packaging.requirements import InvalidRequirement, Requirement @@ -512,7 +513,6 @@ def install_req_without( without_extras: bool = False, without_specifier: bool = False, ) -> InstallRequirement: - # TODO: clean up hack req = Requirement(str(ireq.req)) if without_extras: req.extras = {} @@ -535,3 +535,19 @@ def install_req_without( user_supplied=ireq.user_supplied, permit_editable_wheels=ireq.permit_editable_wheels, ) + + +def install_req_extend_extras( + ireq: InstallRequirement, + extras: Collection[str], +) -> InstallRequirement: + """ + Returns a copy of an installation requirement with some additional extras. + Makes a shallow copy of the ireq object. + """ + result = copy.copy(ireq) + req = Requirement(str(ireq.req)) + req.extras.update(extras) + result.req = req + result.extras = {*ireq.extras, *extras} + return result diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index 47bbfecce36..c5de0e822c9 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -1,3 +1,4 @@ +import contextlib import functools import logging import os @@ -11,6 +12,7 @@ from pip._internal.cache import WheelCache from pip._internal.index.package_finder import PackageFinder from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import install_req_extend_extras from pip._internal.req.req_install import InstallRequirement from pip._internal.req.req_set import RequirementSet from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider @@ -19,6 +21,7 @@ PipDebuggingReporter, PipReporter, ) +from pip._internal.utils.packaging import get_requirement from .base import Candidate, Requirement from .factory import Factory @@ -101,9 +104,19 @@ def resolve( raise error from e req_set = RequirementSet(check_supported_wheels=check_supported_wheels) - for candidate in result.mapping.values(): + # sort to ensure base candidates come before candidates with extras + for candidate in sorted(result.mapping.values(), key=lambda c: c.name): ireq = candidate.get_install_requirement() if ireq is None: + if candidate.name != candidate.project_name: + # extend existing req's extras + with contextlib.suppress(KeyError): + req = req_set.get_requirement(candidate.project_name) + req_set.add_named_requirement( + install_req_extend_extras( + req, get_requirement(candidate.name).extras + ) + ) continue # Check if there is already an installation under the same name, diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 8559d93684b..f5ac31a8e8c 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2465,6 +2465,6 @@ def test_install_pip_prints_req_chain_pypi(script: PipTestEnvironment) -> None: ) assert ( - f"Collecting python-openid " - f"(from Paste[openid]==1.7.5.1->-r {req_path} (line 1))" in result.stdout - ) + "Collecting python-openid " + f"(from Paste[openid]->Paste[openid]==1.7.5.1->-r {req_path} (line 1))" + ) in result.stdout From 49027d7de3c9441b612c65ac68ec39e893a8385f Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 22 Jun 2023 14:59:43 +0200 Subject: [PATCH 529/730] cleanup --- src/pip/_internal/req/constructors.py | 20 +++++------ .../resolution/resolvelib/factory.py | 34 ++++++++++++------- .../resolution/resolvelib/requirements.py | 18 ++++------ 3 files changed, 37 insertions(+), 35 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 9bf1c98440b..8b1438afe1e 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -507,20 +507,16 @@ def install_req_from_link_and_ireq( ) -def install_req_without( - ireq: InstallRequirement, - *, - without_extras: bool = False, - without_specifier: bool = False, -) -> InstallRequirement: +def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: + """ + Creates a new InstallationRequirement using the given template but without + any extras. Sets the original requirement as the new one's parent + (comes_from). + """ req = Requirement(str(ireq.req)) - if without_extras: - req.extras = {} - if without_specifier: - req.specifier = SpecifierSet(prereleases=req.specifier.prereleases) + req.extras = {} return InstallRequirement( req=req, - # TODO: document this!!!! comes_from=ireq, editable=ireq.editable, link=ireq.link, @@ -530,7 +526,7 @@ def install_req_without( global_options=ireq.global_options, hash_options=ireq.hash_options, constraint=ireq.constraint, - extras=ireq.extras if not without_extras else [], + extras=[], config_settings=ireq.config_settings, user_supplied=ireq.user_supplied, permit_editable_wheels=ireq.permit_editable_wheels, diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 45b8133878b..0c2c6ab793d 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -468,18 +468,18 @@ def _make_requirements_from_install_req( if ireq.extras and ireq.req.specifier: return [ SpecifierRequirement(ireq, drop_extras=True), - # TODO: put this all the way at the back to have even fewer candidates? - # TODO: probably best to keep specifier as it makes the report - # slightly more readable -> should also update SpecReq constructor - # and req.constructors.install_req_without - SpecifierRequirement(ireq, drop_specifier=True), + # TODO: put this all the way at the back to have even fewer + # candidates? + SpecifierRequirement(ireq), ] else: return [SpecifierRequirement(ireq)] self._fail_if_link_is_unsupported_wheel(ireq.link) cand = self._make_candidate_from_link( ireq.link, - extras=frozenset(ireq.extras), + # make just the base candidate so the corresponding requirement can be split + # in case of extras (see docstring) + extras=frozenset(), template=ireq, name=canonicalize_name(ireq.name) if ireq.name else None, version=None, @@ -494,8 +494,12 @@ def _make_requirements_from_install_req( if not ireq.name: raise self._build_failures[ireq.link] return [UnsatisfiableRequirement(canonicalize_name(ireq.name))] - # TODO: here too - return [self.make_requirement_from_candidate(cand)] + return [ + self.make_requirement_from_candidate(cand), + self.make_requirement_from_candidate( + self._make_extras_candidate(cand, frozenset(ireq.extras), ireq) + ), + ] def collect_root_requirements( self, root_ireqs: List[InstallRequirement] @@ -523,9 +527,9 @@ def collect_root_requirements( if not reqs: continue - # TODO: clean up reqs[0]? - if ireq.user_supplied and reqs[0].name not in collected.user_requested: - collected.user_requested[reqs[0].name] = i + template = reqs[0] + if ireq.user_supplied and template.name not in collected.user_requested: + collected.user_requested[template.name] = i collected.requirements.extend(reqs) return collected @@ -540,8 +544,14 @@ def make_requirements_from_spec( comes_from: Optional[InstallRequirement], requested_extras: Iterable[str] = (), ) -> list[Requirement]: - # TODO: docstring """ + Returns requirement objects associated with the given specifier. In most cases + this will be a single object but the following special cases exist: + - the specifier has markers that do not apply -> result is empty + - the specifier has both a constraint and extras -> result is split + in two requirement objects: one with the constraint and one with the + extra. This allows centralized constraint handling for the base, + resulting in fewer candidate rejections. """ ireq = self._make_install_req_from_spec(specifier, comes_from) return self._make_requirements_from_install_req(ireq, requested_extras) diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index 180158128af..31a515da9ac 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -2,7 +2,7 @@ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name from pip._internal.req.req_install import InstallRequirement -from pip._internal.req.constructors import install_req_without +from pip._internal.req.constructors import install_req_drop_extras from .base import Candidate, CandidateLookup, Requirement, format_name @@ -41,24 +41,20 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: class SpecifierRequirement(Requirement): - # TODO: document additional options def __init__( self, ireq: InstallRequirement, *, drop_extras: bool = False, - drop_specifier: bool = False, ) -> None: + """ + :param drop_extras: Ignore any extras that are part of the install requirement, + making this a requirement on the base only. + """ assert ireq.link is None, "This is a link, not a specifier" self._drop_extras: bool = drop_extras - self._extras = frozenset(ireq.extras if not drop_extras else ()) - self._ireq = ( - ireq - if not drop_extras and not drop_specifier - else install_req_without( - ireq, without_extras=self._drop_extras, without_specifier=drop_specifier - ) - ) + self._ireq = ireq if not drop_extras else install_req_drop_extras(ireq) + self._extras = frozenset(self._ireq.extras) def __str__(self) -> str: return str(self._ireq) From cb0f97f70e8b7fbc89e63ed1d2fb5b2dd233fafb Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 22 Jun 2023 15:56:23 +0200 Subject: [PATCH 530/730] reverted troublesome changes --- src/pip/_internal/resolution/resolvelib/factory.py | 11 ++--------- tests/functional/test_install.py | 6 +++--- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 0c2c6ab793d..847cbee8dc8 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -477,9 +477,7 @@ def _make_requirements_from_install_req( self._fail_if_link_is_unsupported_wheel(ireq.link) cand = self._make_candidate_from_link( ireq.link, - # make just the base candidate so the corresponding requirement can be split - # in case of extras (see docstring) - extras=frozenset(), + extras=frozenset(ireq.extras), template=ireq, name=canonicalize_name(ireq.name) if ireq.name else None, version=None, @@ -494,12 +492,7 @@ def _make_requirements_from_install_req( if not ireq.name: raise self._build_failures[ireq.link] return [UnsatisfiableRequirement(canonicalize_name(ireq.name))] - return [ - self.make_requirement_from_candidate(cand), - self.make_requirement_from_candidate( - self._make_extras_candidate(cand, frozenset(ireq.extras), ireq) - ), - ] + return [self.make_requirement_from_candidate(cand)] def collect_root_requirements( self, root_ireqs: List[InstallRequirement] diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index f5ac31a8e8c..8559d93684b 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2465,6 +2465,6 @@ def test_install_pip_prints_req_chain_pypi(script: PipTestEnvironment) -> None: ) assert ( - "Collecting python-openid " - f"(from Paste[openid]->Paste[openid]==1.7.5.1->-r {req_path} (line 1))" - ) in result.stdout + f"Collecting python-openid " + f"(from Paste[openid]==1.7.5.1->-r {req_path} (line 1))" in result.stdout + ) From 3160293193d947eecb16c2d69d754b04d98b2bab Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 22 Jun 2023 16:10:09 +0200 Subject: [PATCH 531/730] improvement --- src/pip/_internal/resolution/resolvelib/factory.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 847cbee8dc8..03820edde6a 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -468,8 +468,6 @@ def _make_requirements_from_install_req( if ireq.extras and ireq.req.specifier: return [ SpecifierRequirement(ireq, drop_extras=True), - # TODO: put this all the way at the back to have even fewer - # candidates? SpecifierRequirement(ireq), ] else: @@ -524,6 +522,15 @@ def collect_root_requirements( if ireq.user_supplied and template.name not in collected.user_requested: collected.user_requested[template.name] = i collected.requirements.extend(reqs) + # Put requirements with extras at the end of the root requires. This does not + # affect resolvelib's picking preference but it does affect its initial criteria + # population: by putting extras at the end we enable the candidate finder to + # present resolvelib with a smaller set of candidates to resolvelib, already + # taking into account any non-transient constraints on the associated base. This + # means resolvelib will have fewer candidates to visit and reject. + # Python's list sort is stable, meaning relative order is kept for objects with + # the same key. + collected.requirements.sort(key=lambda r: r.name != r.project_name) return collected def make_requirement_from_candidate( From 1038f15496f037181a18e5d67d8a0f33e5cb7cc9 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 22 Jun 2023 16:24:26 +0200 Subject: [PATCH 532/730] stray todo --- src/pip/_internal/resolution/resolvelib/provider.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/provider.py b/src/pip/_internal/resolution/resolvelib/provider.py index 121e48d071b..315fb9c8902 100644 --- a/src/pip/_internal/resolution/resolvelib/provider.py +++ b/src/pip/_internal/resolution/resolvelib/provider.py @@ -184,8 +184,6 @@ def get_preference( # the backtracking backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes) - # TODO: finally prefer base over extra for the same package - return ( not requires_python, not direct, From 9fa64244522a237725e95e36eaedcef4ac25e87c Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 25 Jun 2023 12:06:02 +0100 Subject: [PATCH 533/730] Remove the no-response workflow (#12102) --- .github/workflows/no-response.yml | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 .github/workflows/no-response.yml diff --git a/.github/workflows/no-response.yml b/.github/workflows/no-response.yml deleted file mode 100644 index 939290b93e5..00000000000 --- a/.github/workflows/no-response.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: No Response - -# Both `issue_comment` and `scheduled` event types are required for this Action -# to work properly. -on: - issue_comment: - types: [created] - schedule: - # Schedule for five minutes after the hour, every hour - - cron: '5 * * * *' - -jobs: - noResponse: - runs-on: ubuntu-latest - steps: - - uses: lee-dohm/no-response@v0.5.0 - with: - token: ${{ github.token }} - responseRequiredLabel: "S: awaiting response" From 108c055f727e91b366b3f12854d1dccc39195d2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 27 Jun 2023 15:10:38 +0200 Subject: [PATCH 534/730] Revert "xfail test_pip_wheel_ext_module_with_tmpdir_inside" This reverts commit fab519dfd936def55ed019a0da18d0d77509fb95. --- tests/functional/test_wheel.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index cfaef541dcf..c0e27949256 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -343,15 +343,6 @@ def test_pip_wheel_with_user_set_in_config( sys.platform.startswith("win"), reason="The empty extension module does not work on Win", ) -@pytest.mark.xfail( - condition=sys.platform == "darwin" and sys.version_info < (3, 9), - reason=( - "Unexplained 'no module named platform' in " - "https://github.com/pypa/wheel/blob" - "/c87e6ed82b58b41b258a3e8c852af8bc1817bb00" - "/src/wheel/vendored/packaging/tags.py#L396-L411" - ), -) def test_pip_wheel_ext_module_with_tmpdir_inside( script: PipTestEnvironment, data: TestData, common_wheels: Path ) -> None: From c1ead0aa37d5f3526820fcbec1c89011c5063236 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 23 May 2022 11:14:16 -0400 Subject: [PATCH 535/730] Switch to new cache format and new cache location. --- news/2984.bugfix | 1 + src/pip/_internal/cli/req_command.py | 2 +- src/pip/_internal/commands/cache.py | 21 ++++++++++++------ src/pip/_internal/network/cache.py | 32 +++++++++++++++++++++------- tests/functional/test_cache.py | 4 ++-- 5 files changed, 43 insertions(+), 17 deletions(-) create mode 100644 news/2984.bugfix diff --git a/news/2984.bugfix b/news/2984.bugfix new file mode 100644 index 00000000000..d75974349ed --- /dev/null +++ b/news/2984.bugfix @@ -0,0 +1 @@ +pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). \ No newline at end of file diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index c2f4e38bed8..c9c2019591e 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -123,7 +123,7 @@ def _build_session( ssl_context = None session = PipSession( - cache=os.path.join(cache_dir, "http") if cache_dir else None, + cache=os.path.join(cache_dir, "http-v2") if cache_dir else None, retries=retries if retries is not None else options.retries, trusted_hosts=options.trusted_hosts, index_urls=self._get_index_urls(options), diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index e96d2b4924c..a11e151f3c8 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -93,17 +93,21 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: num_http_files = len(self._find_http_files(options)) num_packages = len(self._find_wheels(options, "*")) - http_cache_location = self._cache_dir(options, "http") + http_cache_location = self._cache_dir(options, "http-v2") + old_http_cache_location = self._cache_dir(options, "http") wheels_cache_location = self._cache_dir(options, "wheels") http_cache_size = filesystem.format_directory_size(http_cache_location) + old_http_cache_size = filesystem.format_directory_size(old_http_cache_location) wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) message = ( textwrap.dedent( """ - Package index page cache location: {http_cache_location} - Package index page cache size: {http_cache_size} - Number of HTTP files: {num_http_files} + Package index page cache location (new): {http_cache_location} + Package index page cache location (old): {old_http_cache_location} + Package index page cache size (new): {http_cache_size} + Package index page cache size (old): {old_http_cache_size} + Number of HTTP files (old+new cache): {num_http_files} Locally built wheels location: {wheels_cache_location} Locally built wheels size: {wheels_cache_size} Number of locally built wheels: {package_count} @@ -111,7 +115,9 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: ) .format( http_cache_location=http_cache_location, + old_http_cache_location=old_http_cache_location, http_cache_size=http_cache_size, + old_http_cache_size=old_http_cache_size, num_http_files=num_http_files, wheels_cache_location=wheels_cache_location, package_count=num_packages, @@ -195,8 +201,11 @@ def _cache_dir(self, options: Values, subdir: str) -> str: return os.path.join(options.cache_dir, subdir) def _find_http_files(self, options: Values) -> List[str]: - http_dir = self._cache_dir(options, "http") - return filesystem.find_files(http_dir, "*") + old_http_dir = self._cache_dir(options, "http") + new_http_dir = self._cache_dir(options, "http-v2") + return filesystem.find_files(old_http_dir, "*") + filesystem.find_files( + new_http_dir, "*" + ) def _find_wheels(self, options: Values, pattern: str) -> List[str]: wheel_dir = self._cache_dir(options, "wheels") diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index a81a2398519..b85be2e487b 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -3,10 +3,10 @@ import os from contextlib import contextmanager -from typing import Generator, Optional +from typing import BinaryIO, Generator, Optional -from pip._vendor.cachecontrol.cache import BaseCache -from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache +from pip._vendor.cachecontrol.caches import SeparateBodyFileCache from pip._vendor.requests.models import Response from pip._internal.utils.filesystem import adjacent_tmp_file, replace @@ -28,7 +28,7 @@ def suppressed_cache_errors() -> Generator[None, None, None]: pass -class SafeFileCache(BaseCache): +class SafeFileCache(SeparateBodyBaseCache): """ A file based cache which is safe to use even when the target directory may not be accessible or writable. @@ -43,7 +43,7 @@ def _get_cache_path(self, name: str) -> str: # From cachecontrol.caches.file_cache.FileCache._fn, brought into our # class for backwards-compatibility and to avoid using a non-public # method. - hashed = FileCache.encode(name) + hashed = SeparateBodyFileCache.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) @@ -53,17 +53,33 @@ def get(self, key: str) -> Optional[bytes]: with open(path, "rb") as f: return f.read() - def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: - path = self._get_cache_path(key) + def _write(self, path: str, data: bytes) -> None: with suppressed_cache_errors(): ensure_dir(os.path.dirname(path)) with adjacent_tmp_file(path) as f: - f.write(value) + f.write(data) replace(f.name, path) + def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: + path = self._get_cache_path(key) + self._write(path, value) + def delete(self, key: str) -> None: path = self._get_cache_path(key) with suppressed_cache_errors(): os.remove(path) + os.remove(path + ".body") + + def get_body(self, key: str) -> Optional[BinaryIO]: + path = self._get_cache_path(key) + ".body" + with suppressed_cache_errors(): + return open(path, "rb") + + def set_body(self, key: str, body: Optional[bytes]) -> None: + if body is None: + # Workaround for https://github.com/ionrock/cachecontrol/issues/276 + return + path = self._get_cache_path(key) + ".body" + self._write(path, body) diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index 788abdd2be5..5eea6a96e99 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -20,7 +20,7 @@ def cache_dir(script: PipTestEnvironment) -> str: @pytest.fixture def http_cache_dir(cache_dir: str) -> str: - return os.path.normcase(os.path.join(cache_dir, "http")) + return os.path.normcase(os.path.join(cache_dir, "http-v2")) @pytest.fixture @@ -211,7 +211,7 @@ def test_cache_info( ) -> None: result = script.pip("cache", "info") - assert f"Package index page cache location: {http_cache_dir}" in result.stdout + assert f"Package index page cache location (new): {http_cache_dir}" in result.stdout assert f"Locally built wheels location: {wheel_cache_dir}" in result.stdout num_wheels = len(wheel_cache_files) assert f"Number of locally built wheels: {num_wheels}" in result.stdout From fa87c9eb23dd25ad5cb03fe480a3fc4b92deb7a6 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 23 May 2022 13:06:38 -0400 Subject: [PATCH 536/730] Testing for body methods of network cache. --- src/pip/_internal/network/cache.py | 1 + tests/unit/test_network_cache.py | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index b85be2e487b..11c76bf0f93 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -70,6 +70,7 @@ def delete(self, key: str) -> None: path = self._get_cache_path(key) with suppressed_cache_errors(): os.remove(path) + with suppressed_cache_errors(): os.remove(path + ".body") def get_body(self, key: str) -> Optional[BinaryIO]: diff --git a/tests/unit/test_network_cache.py b/tests/unit/test_network_cache.py index a5519864f4c..88597e4c186 100644 --- a/tests/unit/test_network_cache.py +++ b/tests/unit/test_network_cache.py @@ -31,6 +31,14 @@ def test_cache_roundtrip(self, cache_tmpdir: Path) -> None: cache.delete("test key") assert cache.get("test key") is None + def test_cache_roundtrip_body(self, cache_tmpdir: Path) -> None: + cache = SafeFileCache(os.fspath(cache_tmpdir)) + assert cache.get_body("test key") is None + cache.set_body("test key", b"a test string") + assert cache.get_body("test key").read() == b"a test string" + cache.delete("test key") + assert cache.get_body("test key") is None + @pytest.mark.skipif("sys.platform == 'win32'") def test_safe_get_no_perms( self, cache_tmpdir: Path, monkeypatch: pytest.MonkeyPatch From fde34fdf8416a9692c07a899d2668f3f6ccf9df7 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 24 May 2022 12:06:15 -0400 Subject: [PATCH 537/730] Temporary workaround for https://github.com/ionrock/cachecontrol/issues/276 until it's fixed upstream. --- src/pip/_vendor/cachecontrol/controller.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/pip/_vendor/cachecontrol/controller.py b/src/pip/_vendor/cachecontrol/controller.py index 7f23529f115..14ba629768c 100644 --- a/src/pip/_vendor/cachecontrol/controller.py +++ b/src/pip/_vendor/cachecontrol/controller.py @@ -407,7 +407,17 @@ def update_cached_response(self, request, response): """ cache_url = self.cache_url(request.url) - cached_response = self.serializer.loads(request, self.cache.get(cache_url)) + # NOTE: This is a hot-patch for + # https://github.com/ionrock/cachecontrol/issues/276 until it's fixed + # upstream. + if isinstance(self.cache, SeparateBodyBaseCache): + body_file = self.cache.get_body(cache_url) + else: + body_file = None + + cached_response = self.serializer.loads( + request, self.cache.get(cache_url), body_file + ) if not cached_response: # we didn't have a cached response From 5b7c999581e1b892a8048f6bd1275e8501614911 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 24 May 2022 12:10:05 -0400 Subject: [PATCH 538/730] Whitespace fix. --- news/2984.bugfix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/2984.bugfix b/news/2984.bugfix index d75974349ed..cce561815c9 100644 --- a/news/2984.bugfix +++ b/news/2984.bugfix @@ -1 +1 @@ -pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). \ No newline at end of file +pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). From 7a609bfdd5a23d404124a0ace5e3598966fe2466 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 24 May 2022 12:11:34 -0400 Subject: [PATCH 539/730] Mypy fix. --- tests/unit/test_network_cache.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_network_cache.py b/tests/unit/test_network_cache.py index 88597e4c186..d62d1ab696c 100644 --- a/tests/unit/test_network_cache.py +++ b/tests/unit/test_network_cache.py @@ -35,7 +35,9 @@ def test_cache_roundtrip_body(self, cache_tmpdir: Path) -> None: cache = SafeFileCache(os.fspath(cache_tmpdir)) assert cache.get_body("test key") is None cache.set_body("test key", b"a test string") - assert cache.get_body("test key").read() == b"a test string" + body = cache.get_body("test key") + assert body is not None + assert body.read() == b"a test string" cache.delete("test key") assert cache.get_body("test key") is None From 3dbba12132b55a937c095c1c5537baf8652533ad Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 24 May 2022 12:12:29 -0400 Subject: [PATCH 540/730] Correct name. --- news/{2984.bugfix => 2984.bugfix.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{2984.bugfix => 2984.bugfix.rst} (100%) diff --git a/news/2984.bugfix b/news/2984.bugfix.rst similarity index 100% rename from news/2984.bugfix rename to news/2984.bugfix.rst From bff05e5622b1dcf66c1556fb421441086b93456c Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 24 May 2022 14:50:29 -0400 Subject: [PATCH 541/730] Switch to proposed upstream fix. --- src/pip/_internal/network/cache.py | 3 -- src/pip/_vendor/cachecontrol/controller.py | 57 +++++++++++----------- 2 files changed, 28 insertions(+), 32 deletions(-) diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index 11c76bf0f93..f52e9974fc2 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -79,8 +79,5 @@ def get_body(self, key: str) -> Optional[BinaryIO]: return open(path, "rb") def set_body(self, key: str, body: Optional[bytes]) -> None: - if body is None: - # Workaround for https://github.com/ionrock/cachecontrol/issues/276 - return path = self._get_cache_path(key) + ".body" self._write(path, body) diff --git a/src/pip/_vendor/cachecontrol/controller.py b/src/pip/_vendor/cachecontrol/controller.py index 14ba629768c..7af0e002da0 100644 --- a/src/pip/_vendor/cachecontrol/controller.py +++ b/src/pip/_vendor/cachecontrol/controller.py @@ -122,6 +122,26 @@ def parse_cache_control(self, headers): return retval + def _load_from_cache(self, request): + """ + Load a cached response, or return None if it's not available. + """ + cache_url = request.url + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return None + + if isinstance(self.cache, SeparateBodyBaseCache): + body_file = self.cache.get_body(cache_url) + else: + body_file = None + + result = self.serializer.loads(request, cache_data, body_file) + if result is None: + logger.warning("Cache entry deserialization failed, entry ignored") + return result + def cached_request(self, request): """ Return a cached response if it exists in the cache, otherwise @@ -140,21 +160,9 @@ def cached_request(self, request): logger.debug('Request header has "max_age" as 0, cache bypassed') return False - # Request allows serving from the cache, let's see if we find something - cache_data = self.cache.get(cache_url) - if cache_data is None: - logger.debug("No cache entry available") - return False - - if isinstance(self.cache, SeparateBodyBaseCache): - body_file = self.cache.get_body(cache_url) - else: - body_file = None - - # Check whether it can be deserialized - resp = self.serializer.loads(request, cache_data, body_file) + # Check whether we can load the response from the cache: + resp = self._load_from_cache(request) if not resp: - logger.warning("Cache entry deserialization failed, entry ignored") return False # If we have a cached permanent redirect, return it immediately. We @@ -240,8 +248,7 @@ def cached_request(self, request): return False def conditional_headers(self, request): - cache_url = self.cache_url(request.url) - resp = self.serializer.loads(request, self.cache.get(cache_url)) + resp = self._load_from_cache(request) new_headers = {} if resp: @@ -267,7 +274,10 @@ def _cache_set(self, cache_url, request, response, body=None, expires_time=None) self.serializer.dumps(request, response, b""), expires=expires_time, ) - self.cache.set_body(cache_url, body) + # body is None can happen when, for example, we're only updating + # headers, as is the case in update_cached_response(). + if body is not None: + self.cache.set_body(cache_url, body) else: self.cache.set( cache_url, @@ -406,18 +416,7 @@ def update_cached_response(self, request, response): gotten a 304 as the response. """ cache_url = self.cache_url(request.url) - - # NOTE: This is a hot-patch for - # https://github.com/ionrock/cachecontrol/issues/276 until it's fixed - # upstream. - if isinstance(self.cache, SeparateBodyBaseCache): - body_file = self.cache.get_body(cache_url) - else: - body_file = None - - cached_response = self.serializer.loads( - request, self.cache.get(cache_url), body_file - ) + cached_response = self._load_from_cache(request) if not cached_response: # we didn't have a cached response From 46f9154daecffa52966f6e917f0819cfabb112ad Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 24 May 2022 15:11:16 -0400 Subject: [PATCH 542/730] Make sure the file gets closed. --- tests/unit/test_network_cache.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_network_cache.py b/tests/unit/test_network_cache.py index d62d1ab696c..aa849f3b03a 100644 --- a/tests/unit/test_network_cache.py +++ b/tests/unit/test_network_cache.py @@ -37,7 +37,8 @@ def test_cache_roundtrip_body(self, cache_tmpdir: Path) -> None: cache.set_body("test key", b"a test string") body = cache.get_body("test key") assert body is not None - assert body.read() == b"a test string" + with body: + assert body.read() == b"a test string" cache.delete("test key") assert cache.get_body("test key") is None From bada6316dfcb16d50f214b88f8d2424f0e9d990b Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 24 May 2022 15:13:46 -0400 Subject: [PATCH 543/730] More accurate type. --- src/pip/_internal/network/cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index f52e9974fc2..d6b8ccdcf36 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -78,6 +78,6 @@ def get_body(self, key: str) -> Optional[BinaryIO]: with suppressed_cache_errors(): return open(path, "rb") - def set_body(self, key: str, body: Optional[bytes]) -> None: + def set_body(self, key: str, body: bytes) -> None: path = self._get_cache_path(key) + ".body" self._write(path, body) From ca08c16b9e81ce21021831fb1bdfe3a76387fd25 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 2 Jun 2023 13:56:56 -0400 Subject: [PATCH 544/730] Vendor latest version of CacheControl. --- src/pip/_vendor/cachecontrol.pyi | 1 - src/pip/_vendor/cachecontrol/__init__.py | 18 ++- src/pip/_vendor/cachecontrol/_cmd.py | 24 ++- src/pip/_vendor/cachecontrol/adapter.py | 83 +++++++---- src/pip/_vendor/cachecontrol/cache.py | 31 ++-- .../_vendor/cachecontrol/caches/__init__.py | 5 +- .../_vendor/cachecontrol/caches/file_cache.py | 78 +++++----- .../cachecontrol/caches/redis_cache.py | 29 ++-- src/pip/_vendor/cachecontrol/compat.py | 32 ---- src/pip/_vendor/cachecontrol/controller.py | 116 ++++++++++----- src/pip/_vendor/cachecontrol/filewrapper.py | 27 ++-- src/pip/_vendor/cachecontrol/heuristics.py | 54 ++++--- src/pip/_vendor/cachecontrol/py.typed | 0 src/pip/_vendor/cachecontrol/serialize.py | 139 ++++++++++++------ src/pip/_vendor/cachecontrol/wrapper.py | 33 +++-- src/pip/_vendor/vendor.txt | 2 +- 16 files changed, 407 insertions(+), 265 deletions(-) delete mode 100644 src/pip/_vendor/cachecontrol.pyi delete mode 100644 src/pip/_vendor/cachecontrol/compat.py create mode 100644 src/pip/_vendor/cachecontrol/py.typed diff --git a/src/pip/_vendor/cachecontrol.pyi b/src/pip/_vendor/cachecontrol.pyi deleted file mode 100644 index 636a66bacaf..00000000000 --- a/src/pip/_vendor/cachecontrol.pyi +++ /dev/null @@ -1 +0,0 @@ -from cachecontrol import * \ No newline at end of file diff --git a/src/pip/_vendor/cachecontrol/__init__.py b/src/pip/_vendor/cachecontrol/__init__.py index f631ae6df47..3701cdd6be8 100644 --- a/src/pip/_vendor/cachecontrol/__init__.py +++ b/src/pip/_vendor/cachecontrol/__init__.py @@ -8,11 +8,21 @@ """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.12.11" +__version__ = "0.13.0" -from .wrapper import CacheControl -from .adapter import CacheControlAdapter -from .controller import CacheController +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.controller import CacheController +from pip._vendor.cachecontrol.wrapper import CacheControl + +__all__ = [ + "__author__", + "__email__", + "__version__", + "CacheControlAdapter", + "CacheController", + "CacheControl", +] import logging + logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/src/pip/_vendor/cachecontrol/_cmd.py b/src/pip/_vendor/cachecontrol/_cmd.py index 4266b5ee92a..ab4dac3dde1 100644 --- a/src/pip/_vendor/cachecontrol/_cmd.py +++ b/src/pip/_vendor/cachecontrol/_cmd.py @@ -3,6 +3,8 @@ # SPDX-License-Identifier: Apache-2.0 import logging +from argparse import ArgumentParser +from typing import TYPE_CHECKING from pip._vendor import requests @@ -10,16 +12,19 @@ from pip._vendor.cachecontrol.cache import DictCache from pip._vendor.cachecontrol.controller import logger -from argparse import ArgumentParser +if TYPE_CHECKING: + from argparse import Namespace + from pip._vendor.cachecontrol.controller import CacheController -def setup_logging(): + +def setup_logging() -> None: logger.setLevel(logging.DEBUG) handler = logging.StreamHandler() logger.addHandler(handler) -def get_session(): +def get_session() -> requests.Session: adapter = CacheControlAdapter( DictCache(), cache_etags=True, serializer=None, heuristic=None ) @@ -27,17 +32,17 @@ def get_session(): sess.mount("http://", adapter) sess.mount("https://", adapter) - sess.cache_controller = adapter.controller + sess.cache_controller = adapter.controller # type: ignore[attr-defined] return sess -def get_args(): +def get_args() -> "Namespace": parser = ArgumentParser() parser.add_argument("url", help="The URL to try and cache") return parser.parse_args() -def main(args=None): +def main() -> None: args = get_args() sess = get_session() @@ -48,10 +53,13 @@ def main(args=None): setup_logging() # try setting the cache - sess.cache_controller.cache_response(resp.request, resp.raw) + cache_controller: "CacheController" = ( + sess.cache_controller # type: ignore[attr-defined] + ) + cache_controller.cache_response(resp.request, resp.raw) # Now try to get it - if sess.cache_controller.cached_request(resp.request): + if cache_controller.cached_request(resp.request): print("Cached!") else: print("Not cached :(") diff --git a/src/pip/_vendor/cachecontrol/adapter.py b/src/pip/_vendor/cachecontrol/adapter.py index 94c75e1a05b..83c08e003fe 100644 --- a/src/pip/_vendor/cachecontrol/adapter.py +++ b/src/pip/_vendor/cachecontrol/adapter.py @@ -2,15 +2,24 @@ # # SPDX-License-Identifier: Apache-2.0 -import types import functools +import types import zlib +from typing import TYPE_CHECKING, Any, Collection, Mapping, Optional, Tuple, Type, Union from pip._vendor.requests.adapters import HTTPAdapter -from .controller import CacheController, PERMANENT_REDIRECT_STATUSES -from .cache import DictCache -from .filewrapper import CallbackFileWrapper +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import PERMANENT_REDIRECT_STATUSES, CacheController +from pip._vendor.cachecontrol.filewrapper import CallbackFileWrapper + +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest, Response + from pip._vendor.urllib3 import HTTPResponse + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer class CacheControlAdapter(HTTPAdapter): @@ -18,15 +27,15 @@ class CacheControlAdapter(HTTPAdapter): def __init__( self, - cache=None, - cache_etags=True, - controller_class=None, - serializer=None, - heuristic=None, - cacheable_methods=None, - *args, - **kw - ): + cache: Optional["BaseCache"] = None, + cache_etags: bool = True, + controller_class: Optional[Type[CacheController]] = None, + serializer: Optional["Serializer"] = None, + heuristic: Optional["BaseHeuristic"] = None, + cacheable_methods: Optional[Collection[str]] = None, + *args: Any, + **kw: Any, + ) -> None: super(CacheControlAdapter, self).__init__(*args, **kw) self.cache = DictCache() if cache is None else cache self.heuristic = heuristic @@ -37,7 +46,18 @@ def __init__( self.cache, cache_etags=cache_etags, serializer=serializer ) - def send(self, request, cacheable_methods=None, **kw): + def send( + self, + request: "PreparedRequest", + stream: bool = False, + timeout: Union[None, float, Tuple[float, float], Tuple[float, None]] = None, + verify: Union[bool, str] = True, + cert: Union[ + None, bytes, str, Tuple[Union[bytes, str], Union[bytes, str]] + ] = None, + proxies: Optional[Mapping[str, str]] = None, + cacheable_methods: Optional[Collection[str]] = None, + ) -> "Response": """ Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. @@ -54,13 +74,19 @@ def send(self, request, cacheable_methods=None, **kw): # check for etags and add headers if appropriate request.headers.update(self.controller.conditional_headers(request)) - resp = super(CacheControlAdapter, self).send(request, **kw) + resp = super(CacheControlAdapter, self).send( + request, stream, timeout, verify, cert, proxies + ) return resp def build_response( - self, request, response, from_cache=False, cacheable_methods=None - ): + self, + request: "PreparedRequest", + response: "HTTPResponse", + from_cache: bool = False, + cacheable_methods: Optional[Collection[str]] = None, + ) -> "Response": """ Build a response by making a request or using the cache. @@ -102,36 +128,39 @@ def build_response( else: # Wrap the response file with a wrapper that will cache the # response when the stream has been consumed. - response._fp = CallbackFileWrapper( - response._fp, + response._fp = CallbackFileWrapper( # type: ignore[attr-defined] + response._fp, # type: ignore[attr-defined] functools.partial( self.controller.cache_response, request, response ), ) if response.chunked: - super_update_chunk_length = response._update_chunk_length + super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined] - def _update_chunk_length(self): + def _update_chunk_length(self: "HTTPResponse") -> None: super_update_chunk_length() if self.chunk_left == 0: - self._fp._close() + self._fp._close() # type: ignore[attr-defined] - response._update_chunk_length = types.MethodType( + response._update_chunk_length = types.MethodType( # type: ignore[attr-defined] _update_chunk_length, response ) - resp = super(CacheControlAdapter, self).build_response(request, response) + resp: "Response" = super( # type: ignore[no-untyped-call] + CacheControlAdapter, self + ).build_response(request, response) # See if we should invalidate the cache. if request.method in self.invalidating_methods and resp.ok: + assert request.url is not None cache_url = self.controller.cache_url(request.url) self.cache.delete(cache_url) # Give the request a from_cache attr to let people use it - resp.from_cache = from_cache + resp.from_cache = from_cache # type: ignore[attr-defined] return resp - def close(self): + def close(self) -> None: self.cache.close() - super(CacheControlAdapter, self).close() + super(CacheControlAdapter, self).close() # type: ignore[no-untyped-call] diff --git a/src/pip/_vendor/cachecontrol/cache.py b/src/pip/_vendor/cachecontrol/cache.py index 2a965f595ff..61031d23441 100644 --- a/src/pip/_vendor/cachecontrol/cache.py +++ b/src/pip/_vendor/cachecontrol/cache.py @@ -7,37 +7,43 @@ safe in-memory dictionary. """ from threading import Lock +from typing import IO, TYPE_CHECKING, MutableMapping, Optional, Union +if TYPE_CHECKING: + from datetime import datetime -class BaseCache(object): - def get(self, key): +class BaseCache(object): + def get(self, key: str) -> Optional[bytes]: raise NotImplementedError() - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: Optional[Union[int, "datetime"]] = None + ) -> None: raise NotImplementedError() - def delete(self, key): + def delete(self, key: str) -> None: raise NotImplementedError() - def close(self): + def close(self) -> None: pass class DictCache(BaseCache): - - def __init__(self, init_dict=None): + def __init__(self, init_dict: Optional[MutableMapping[str, bytes]] = None) -> None: self.lock = Lock() self.data = init_dict or {} - def get(self, key): + def get(self, key: str) -> Optional[bytes]: return self.data.get(key, None) - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: Optional[Union[int, "datetime"]] = None + ) -> None: with self.lock: self.data.update({key: value}) - def delete(self, key): + def delete(self, key: str) -> None: with self.lock: if key in self.data: self.data.pop(key) @@ -55,10 +61,11 @@ class SeparateBodyBaseCache(BaseCache): Similarly, the body should be loaded separately via ``get_body()``. """ - def set_body(self, key, body): + + def set_body(self, key: str, body: bytes) -> None: raise NotImplementedError() - def get_body(self, key): + def get_body(self, key: str) -> Optional["IO[bytes]"]: """ Return the body as file-like object. """ diff --git a/src/pip/_vendor/cachecontrol/caches/__init__.py b/src/pip/_vendor/cachecontrol/caches/__init__.py index 37827291fb5..24ff469ff98 100644 --- a/src/pip/_vendor/cachecontrol/caches/__init__.py +++ b/src/pip/_vendor/cachecontrol/caches/__init__.py @@ -2,8 +2,7 @@ # # SPDX-License-Identifier: Apache-2.0 -from .file_cache import FileCache, SeparateBodyFileCache -from .redis_cache import RedisCache - +from pip._vendor.cachecontrol.caches.file_cache import FileCache, SeparateBodyFileCache +from pip._vendor.cachecontrol.caches.redis_cache import RedisCache __all__ = ["FileCache", "SeparateBodyFileCache", "RedisCache"] diff --git a/src/pip/_vendor/cachecontrol/caches/file_cache.py b/src/pip/_vendor/cachecontrol/caches/file_cache.py index f1ddb2ebdf9..0437c4e8a13 100644 --- a/src/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/src/pip/_vendor/cachecontrol/caches/file_cache.py @@ -5,18 +5,18 @@ import hashlib import os from textwrap import dedent +from typing import IO, TYPE_CHECKING, Optional, Type, Union -from ..cache import BaseCache, SeparateBodyBaseCache -from ..controller import CacheController +from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.controller import CacheController -try: - FileNotFoundError -except NameError: - # py2.X - FileNotFoundError = (IOError, OSError) +if TYPE_CHECKING: + from datetime import datetime + from filelock import BaseFileLock -def _secure_open_write(filename, fmode): + +def _secure_open_write(filename: str, fmode: int) -> "IO[bytes]": # We only want to write to this file, so open it in write only mode flags = os.O_WRONLY @@ -62,37 +62,27 @@ class _FileCacheMixin: def __init__( self, - directory, - forever=False, - filemode=0o0600, - dirmode=0o0700, - use_dir_lock=None, - lock_class=None, - ): - - if use_dir_lock is not None and lock_class is not None: - raise ValueError("Cannot use use_dir_lock and lock_class together") - + directory: str, + forever: bool = False, + filemode: int = 0o0600, + dirmode: int = 0o0700, + lock_class: Optional[Type["BaseFileLock"]] = None, + ) -> None: try: - from lockfile import LockFile - from lockfile.mkdirlockfile import MkdirLockFile + if lock_class is None: + from filelock import FileLock + + lock_class = FileLock except ImportError: notice = dedent( """ NOTE: In order to use the FileCache you must have - lockfile installed. You can install it via pip: - pip install lockfile + filelock installed. You can install it via pip: + pip install filelock """ ) raise ImportError(notice) - else: - if use_dir_lock: - lock_class = MkdirLockFile - - elif lock_class is None: - lock_class = LockFile - self.directory = directory self.forever = forever self.filemode = filemode @@ -100,17 +90,17 @@ def __init__( self.lock_class = lock_class @staticmethod - def encode(x): + def encode(x: str) -> str: return hashlib.sha224(x.encode()).hexdigest() - def _fn(self, name): + def _fn(self, name: str) -> str: # NOTE: This method should not change as some may depend on it. # See: https://github.com/ionrock/cachecontrol/issues/63 hashed = self.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) - def get(self, key): + def get(self, key: str) -> Optional[bytes]: name = self._fn(key) try: with open(name, "rb") as fh: @@ -119,11 +109,13 @@ def get(self, key): except FileNotFoundError: return None - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: Optional[Union[int, "datetime"]] = None + ) -> None: name = self._fn(key) self._write(name, value) - def _write(self, path, data: bytes): + def _write(self, path: str, data: bytes) -> None: """ Safely write the data to the given path. """ @@ -133,12 +125,12 @@ def _write(self, path, data: bytes): except (IOError, OSError): pass - with self.lock_class(path) as lock: + with self.lock_class(path + ".lock"): # Write our actual file - with _secure_open_write(lock.path, self.filemode) as fh: + with _secure_open_write(path, self.filemode) as fh: fh.write(data) - def _delete(self, key, suffix): + def _delete(self, key: str, suffix: str) -> None: name = self._fn(key) + suffix if not self.forever: try: @@ -153,7 +145,7 @@ class FileCache(_FileCacheMixin, BaseCache): downloads. """ - def delete(self, key): + def delete(self, key: str) -> None: self._delete(key, "") @@ -163,23 +155,23 @@ class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache): peak memory usage. """ - def get_body(self, key): + def get_body(self, key: str) -> Optional["IO[bytes]"]: name = self._fn(key) + ".body" try: return open(name, "rb") except FileNotFoundError: return None - def set_body(self, key, body): + def set_body(self, key: str, body: bytes) -> None: name = self._fn(key) + ".body" self._write(name, body) - def delete(self, key): + def delete(self, key: str) -> None: self._delete(key, "") self._delete(key, ".body") -def url_to_file_path(url, filecache): +def url_to_file_path(url: str, filecache: FileCache) -> str: """Return the file cache path based on the URL. This does not ensure the file exists! diff --git a/src/pip/_vendor/cachecontrol/caches/redis_cache.py b/src/pip/_vendor/cachecontrol/caches/redis_cache.py index 2cba4b07080..f7ae45d3828 100644 --- a/src/pip/_vendor/cachecontrol/caches/redis_cache.py +++ b/src/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -4,36 +4,45 @@ from __future__ import division -from datetime import datetime +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Optional, Union + from pip._vendor.cachecontrol.cache import BaseCache +if TYPE_CHECKING: + from redis import Redis -class RedisCache(BaseCache): - def __init__(self, conn): +class RedisCache(BaseCache): + def __init__(self, conn: "Redis[bytes]") -> None: self.conn = conn - def get(self, key): + def get(self, key: str) -> Optional[bytes]: return self.conn.get(key) - def set(self, key, value, expires=None): + def set( + self, key: str, value: bytes, expires: Optional[Union[int, datetime]] = None + ) -> None: if not expires: self.conn.set(key, value) elif isinstance(expires, datetime): - expires = expires - datetime.utcnow() - self.conn.setex(key, int(expires.total_seconds()), value) + now_utc = datetime.now(timezone.utc) + if expires.tzinfo is None: + now_utc = now_utc.replace(tzinfo=None) + delta = expires - now_utc + self.conn.setex(key, int(delta.total_seconds()), value) else: self.conn.setex(key, expires, value) - def delete(self, key): + def delete(self, key: str) -> None: self.conn.delete(key) - def clear(self): + def clear(self) -> None: """Helper for clearing all the keys in a database. Use with caution!""" for key in self.conn.keys(): self.conn.delete(key) - def close(self): + def close(self) -> None: """Redis uses connection pooling, no need to close the connection.""" pass diff --git a/src/pip/_vendor/cachecontrol/compat.py b/src/pip/_vendor/cachecontrol/compat.py deleted file mode 100644 index ccec9379dba..00000000000 --- a/src/pip/_vendor/cachecontrol/compat.py +++ /dev/null @@ -1,32 +0,0 @@ -# SPDX-FileCopyrightText: 2015 Eric Larson -# -# SPDX-License-Identifier: Apache-2.0 - -try: - from urllib.parse import urljoin -except ImportError: - from urlparse import urljoin - - -try: - import cPickle as pickle -except ImportError: - import pickle - -# Handle the case where the requests module has been patched to not have -# urllib3 bundled as part of its source. -try: - from pip._vendor.requests.packages.urllib3.response import HTTPResponse -except ImportError: - from pip._vendor.urllib3.response import HTTPResponse - -try: - from pip._vendor.requests.packages.urllib3.util import is_fp_closed -except ImportError: - from pip._vendor.urllib3.util import is_fp_closed - -# Replicate some six behaviour -try: - text_type = unicode -except NameError: - text_type = str diff --git a/src/pip/_vendor/cachecontrol/controller.py b/src/pip/_vendor/cachecontrol/controller.py index 7af0e002da0..3365d962130 100644 --- a/src/pip/_vendor/cachecontrol/controller.py +++ b/src/pip/_vendor/cachecontrol/controller.py @@ -5,17 +5,25 @@ """ The httplib2 algorithms ported for use with requests. """ +import calendar import logging import re -import calendar import time from email.utils import parsedate_tz +from typing import TYPE_CHECKING, Collection, Dict, Mapping, Optional, Tuple, Union from pip._vendor.requests.structures import CaseInsensitiveDict -from .cache import DictCache, SeparateBodyBaseCache -from .serialize import Serializer +from pip._vendor.cachecontrol.cache import DictCache, SeparateBodyBaseCache +from pip._vendor.cachecontrol.serialize import Serializer + +if TYPE_CHECKING: + from typing import Literal + from pip._vendor.requests import PreparedRequest + from pip._vendor.urllib3 import HTTPResponse + + from pip._vendor.cachecontrol.cache import BaseCache logger = logging.getLogger(__name__) @@ -24,12 +32,14 @@ PERMANENT_REDIRECT_STATUSES = (301, 308) -def parse_uri(uri): +def parse_uri(uri: str) -> Tuple[str, str, str, str, str]: """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) """ - groups = URI.match(uri).groups() + match = URI.match(uri) + assert match is not None + groups = match.groups() return (groups[1], groups[3], groups[4], groups[6], groups[8]) @@ -37,7 +47,11 @@ class CacheController(object): """An interface to see if request should cached or not.""" def __init__( - self, cache=None, cache_etags=True, serializer=None, status_codes=None + self, + cache: Optional["BaseCache"] = None, + cache_etags: bool = True, + serializer: Optional[Serializer] = None, + status_codes: Optional[Collection[int]] = None, ): self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags @@ -45,7 +59,7 @@ def __init__( self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) @classmethod - def _urlnorm(cls, uri): + def _urlnorm(cls, uri: str) -> str: """Normalize the URL to create a safe key for the cache""" (scheme, authority, path, query, fragment) = parse_uri(uri) if not scheme or not authority: @@ -65,10 +79,12 @@ def _urlnorm(cls, uri): return defrag_uri @classmethod - def cache_url(cls, uri): + def cache_url(cls, uri: str) -> str: return cls._urlnorm(uri) - def parse_cache_control(self, headers): + def parse_cache_control( + self, headers: Mapping[str, str] + ) -> Dict[str, Optional[int]]: known_directives = { # https://tools.ietf.org/html/rfc7234#section-5.2 "max-age": (int, True), @@ -87,7 +103,7 @@ def parse_cache_control(self, headers): cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) - retval = {} + retval: Dict[str, Optional[int]] = {} for cc_directive in cc_headers.split(","): if not cc_directive.strip(): @@ -122,11 +138,12 @@ def parse_cache_control(self, headers): return retval - def _load_from_cache(self, request): + def _load_from_cache(self, request: "PreparedRequest") -> Optional["HTTPResponse"]: """ Load a cached response, or return None if it's not available. """ cache_url = request.url + assert cache_url is not None cache_data = self.cache.get(cache_url) if cache_data is None: logger.debug("No cache entry available") @@ -142,11 +159,14 @@ def _load_from_cache(self, request): logger.warning("Cache entry deserialization failed, entry ignored") return result - def cached_request(self, request): + def cached_request( + self, request: "PreparedRequest" + ) -> Union["HTTPResponse", "Literal[False]"]: """ Return a cached response if it exists in the cache, otherwise return False. """ + assert request.url is not None cache_url = self.cache_url(request.url) logger.debug('Looking up "%s" in the cache', cache_url) cc = self.parse_cache_control(request.headers) @@ -182,7 +202,7 @@ def cached_request(self, request): logger.debug(msg) return resp - headers = CaseInsensitiveDict(resp.headers) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) if not headers or "date" not in headers: if "etag" not in headers: # Without date or etag, the cached response can never be used @@ -193,7 +213,9 @@ def cached_request(self, request): return False now = time.time() - date = calendar.timegm(parsedate_tz(headers["date"])) + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) current_age = max(0, now - date) logger.debug("Current age based on date: %i", current_age) @@ -207,28 +229,30 @@ def cached_request(self, request): freshness_lifetime = 0 # Check the max-age pragma in the cache control header - if "max-age" in resp_cc: - freshness_lifetime = resp_cc["max-age"] + max_age = resp_cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) # If there isn't a max-age, check for an expires header elif "expires" in headers: expires = parsedate_tz(headers["expires"]) if expires is not None: - expire_time = calendar.timegm(expires) - date + expire_time = calendar.timegm(expires[:6]) - date freshness_lifetime = max(0, expire_time) logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) # Determine if we are setting freshness limit in the # request. Note, this overrides what was in the response. - if "max-age" in cc: - freshness_lifetime = cc["max-age"] + max_age = cc.get("max-age") + if max_age is not None: + freshness_lifetime = max_age logger.debug( "Freshness lifetime from request max-age: %i", freshness_lifetime ) - if "min-fresh" in cc: - min_fresh = cc["min-fresh"] + min_fresh = cc.get("min-fresh") + if min_fresh is not None: # adjust our current age by our min fresh current_age += min_fresh logger.debug("Adjusted current age from min-fresh: %i", current_age) @@ -247,12 +271,12 @@ def cached_request(self, request): # return the original handler return False - def conditional_headers(self, request): + def conditional_headers(self, request: "PreparedRequest") -> Dict[str, str]: resp = self._load_from_cache(request) new_headers = {} if resp: - headers = CaseInsensitiveDict(resp.headers) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict(resp.headers) if "etag" in headers: new_headers["If-None-Match"] = headers["ETag"] @@ -262,7 +286,14 @@ def conditional_headers(self, request): return new_headers - def _cache_set(self, cache_url, request, response, body=None, expires_time=None): + def _cache_set( + self, + cache_url: str, + request: "PreparedRequest", + response: "HTTPResponse", + body: Optional[bytes] = None, + expires_time: Optional[int] = None, + ) -> None: """ Store the data in the cache. """ @@ -285,7 +316,13 @@ def _cache_set(self, cache_url, request, response, body=None, expires_time=None) expires=expires_time, ) - def cache_response(self, request, response, body=None, status_codes=None): + def cache_response( + self, + request: "PreparedRequest", + response: "HTTPResponse", + body: Optional[bytes] = None, + status_codes: Optional[Collection[int]] = None, + ) -> None: """ Algorithm for caching requests. @@ -300,10 +337,14 @@ def cache_response(self, request, response, body=None, status_codes=None): ) return - response_headers = CaseInsensitiveDict(response.headers) + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) if "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) else: date = 0 @@ -322,6 +363,7 @@ def cache_response(self, request, response, body=None, status_codes=None): cc_req = self.parse_cache_control(request.headers) cc = self.parse_cache_control(response_headers) + assert request.url is not None cache_url = self.cache_url(request.url) logger.debug('Updating cache with response from "%s"', cache_url) @@ -354,7 +396,7 @@ def cache_response(self, request, response, body=None, status_codes=None): if response_headers.get("expires"): expires = parsedate_tz(response_headers["expires"]) if expires is not None: - expires_time = calendar.timegm(expires) - date + expires_time = calendar.timegm(expires[:6]) - date expires_time = max(expires_time, 14 * 86400) @@ -372,11 +414,14 @@ def cache_response(self, request, response, body=None, status_codes=None): # is no date header then we can't do anything about expiring # the cache. elif "date" in response_headers: - date = calendar.timegm(parsedate_tz(response_headers["date"])) + time_tuple = parsedate_tz(response_headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) # cache when there is a max-age > 0 - if "max-age" in cc and cc["max-age"] > 0: + max_age = cc.get("max-age") + if max_age is not None and max_age > 0: logger.debug("Caching b/c date exists and max-age > 0") - expires_time = cc["max-age"] + expires_time = max_age self._cache_set( cache_url, request, @@ -391,7 +436,7 @@ def cache_response(self, request, response, body=None, status_codes=None): if response_headers["expires"]: expires = parsedate_tz(response_headers["expires"]) if expires is not None: - expires_time = calendar.timegm(expires) - date + expires_time = calendar.timegm(expires[:6]) - date else: expires_time = None @@ -408,13 +453,16 @@ def cache_response(self, request, response, body=None, status_codes=None): expires_time, ) - def update_cached_response(self, request, response): + def update_cached_response( + self, request: "PreparedRequest", response: "HTTPResponse" + ) -> "HTTPResponse": """On a 304 we will get a new set of headers that we want to update our cached value with, assuming we have one. This should only ever be called when we've sent an ETag and gotten a 304 as the response. """ + assert request.url is not None cache_url = self.cache_url(request.url) cached_response = self._load_from_cache(request) @@ -434,7 +482,7 @@ def update_cached_response(self, request, response): cached_response.headers.update( dict( (k, v) - for k, v in response.headers.items() + for k, v in response.headers.items() # type: ignore[no-untyped-call] if k.lower() not in excluded_headers ) ) diff --git a/src/pip/_vendor/cachecontrol/filewrapper.py b/src/pip/_vendor/cachecontrol/filewrapper.py index f5ed5f6f6ec..472ba600161 100644 --- a/src/pip/_vendor/cachecontrol/filewrapper.py +++ b/src/pip/_vendor/cachecontrol/filewrapper.py @@ -2,8 +2,12 @@ # # SPDX-License-Identifier: Apache-2.0 -from tempfile import NamedTemporaryFile import mmap +from tempfile import NamedTemporaryFile +from typing import TYPE_CHECKING, Any, Callable, Optional + +if TYPE_CHECKING: + from http.client import HTTPResponse class CallbackFileWrapper(object): @@ -25,12 +29,14 @@ class CallbackFileWrapper(object): performance impact. """ - def __init__(self, fp, callback): + def __init__( + self, fp: "HTTPResponse", callback: Optional[Callable[[bytes], None]] + ) -> None: self.__buf = NamedTemporaryFile("rb+", delete=True) self.__fp = fp self.__callback = callback - def __getattr__(self, name): + def __getattr__(self, name: str) -> Any: # The vaguaries of garbage collection means that self.__fp is # not always set. By using __getattribute__ and the private # name[0] allows looking up the attribute value and raising an @@ -42,7 +48,7 @@ def __getattr__(self, name): fp = self.__getattribute__("_CallbackFileWrapper__fp") return getattr(fp, name) - def __is_fp_closed(self): + def __is_fp_closed(self) -> bool: try: return self.__fp.fp is None @@ -50,7 +56,8 @@ def __is_fp_closed(self): pass try: - return self.__fp.closed + closed: bool = self.__fp.closed + return closed except AttributeError: pass @@ -59,7 +66,7 @@ def __is_fp_closed(self): # TODO: Add some logging here... return False - def _close(self): + def _close(self) -> None: if self.__callback: if self.__buf.tell() == 0: # Empty file: @@ -86,8 +93,8 @@ def _close(self): # Important when caching big files. self.__buf.close() - def read(self, amt=None): - data = self.__fp.read(amt) + def read(self, amt: Optional[int] = None) -> bytes: + data: bytes = self.__fp.read(amt) if data: # We may be dealing with b'', a sign that things are over: # it's passed e.g. after we've already closed self.__buf. @@ -97,8 +104,8 @@ def read(self, amt=None): return data - def _safe_read(self, amt): - data = self.__fp._safe_read(amt) + def _safe_read(self, amt: int) -> bytes: + data: bytes = self.__fp._safe_read(amt) # type: ignore[attr-defined] if amt == 2 and data == b"\r\n": # urllib executes this read to toss the CRLF at the end # of the chunk. diff --git a/src/pip/_vendor/cachecontrol/heuristics.py b/src/pip/_vendor/cachecontrol/heuristics.py index ebe4a96f589..1e88ada68f2 100644 --- a/src/pip/_vendor/cachecontrol/heuristics.py +++ b/src/pip/_vendor/cachecontrol/heuristics.py @@ -4,26 +4,27 @@ import calendar import time - +from datetime import datetime, timedelta, timezone from email.utils import formatdate, parsedate, parsedate_tz +from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional -from datetime import datetime, timedelta +if TYPE_CHECKING: + from pip._vendor.urllib3 import HTTPResponse TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" -def expire_after(delta, date=None): - date = date or datetime.utcnow() +def expire_after(delta: timedelta, date: Optional[datetime] = None) -> datetime: + date = date or datetime.now(timezone.utc) return date + delta -def datetime_to_header(dt): +def datetime_to_header(dt: datetime) -> str: return formatdate(calendar.timegm(dt.timetuple())) class BaseHeuristic(object): - - def warning(self, response): + def warning(self, response: "HTTPResponse") -> Optional[str]: """ Return a valid 1xx warning header value describing the cache adjustments. @@ -34,7 +35,7 @@ def warning(self, response): """ return '110 - "Response is Stale"' - def update_headers(self, response): + def update_headers(self, response: "HTTPResponse") -> Dict[str, str]: """Update the response headers with any new headers. NOTE: This SHOULD always include some Warning header to @@ -43,7 +44,7 @@ def update_headers(self, response): """ return {} - def apply(self, response): + def apply(self, response: "HTTPResponse") -> "HTTPResponse": updated_headers = self.update_headers(response) if updated_headers: @@ -61,12 +62,12 @@ class OneDayCache(BaseHeuristic): future. """ - def update_headers(self, response): + def update_headers(self, response: "HTTPResponse") -> Dict[str, str]: headers = {} if "expires" not in response.headers: date = parsedate(response.headers["date"]) - expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6], tzinfo=timezone.utc)) # type: ignore[misc] headers["expires"] = datetime_to_header(expires) headers["cache-control"] = "public" return headers @@ -77,14 +78,14 @@ class ExpiresAfter(BaseHeuristic): Cache **all** requests for a defined time period. """ - def __init__(self, **kw): + def __init__(self, **kw: Any) -> None: self.delta = timedelta(**kw) - def update_headers(self, response): + def update_headers(self, response: "HTTPResponse") -> Dict[str, str]: expires = expire_after(self.delta) return {"expires": datetime_to_header(expires), "cache-control": "public"} - def warning(self, response): + def warning(self, response: "HTTPResponse") -> Optional[str]: tmpl = "110 - Automatically cached for %s. Response might be stale" return tmpl % self.delta @@ -101,12 +102,23 @@ class LastModified(BaseHeuristic): http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 Unlike mozilla we limit this to 24-hr. """ + cacheable_by_default_statuses = { - 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + 200, + 203, + 204, + 206, + 300, + 301, + 404, + 405, + 410, + 414, + 501, } - def update_headers(self, resp): - headers = resp.headers + def update_headers(self, resp: "HTTPResponse") -> Dict[str, str]: + headers: Mapping[str, str] = resp.headers if "expires" in headers: return {} @@ -120,9 +132,11 @@ def update_headers(self, resp): if "date" not in headers or "last-modified" not in headers: return {} - date = calendar.timegm(parsedate_tz(headers["date"])) + time_tuple = parsedate_tz(headers["date"]) + assert time_tuple is not None + date = calendar.timegm(time_tuple[:6]) last_modified = parsedate(headers["last-modified"]) - if date is None or last_modified is None: + if last_modified is None: return {} now = time.time() @@ -135,5 +149,5 @@ def update_headers(self, resp): expires = date + freshness_lifetime return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} - def warning(self, resp): + def warning(self, resp: "HTTPResponse") -> Optional[str]: return None diff --git a/src/pip/_vendor/cachecontrol/py.typed b/src/pip/_vendor/cachecontrol/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_vendor/cachecontrol/serialize.py b/src/pip/_vendor/cachecontrol/serialize.py index 7fe1a3e33a3..f21eaea6f39 100644 --- a/src/pip/_vendor/cachecontrol/serialize.py +++ b/src/pip/_vendor/cachecontrol/serialize.py @@ -5,19 +5,23 @@ import base64 import io import json +import pickle import zlib +from typing import IO, TYPE_CHECKING, Any, Mapping, Optional from pip._vendor import msgpack from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.urllib3 import HTTPResponse -from .compat import HTTPResponse, pickle, text_type +if TYPE_CHECKING: + from pip._vendor.requests import PreparedRequest, Request -def _b64_decode_bytes(b): +def _b64_decode_bytes(b: str) -> bytes: return base64.b64decode(b.encode("ascii")) -def _b64_decode_str(s): +def _b64_decode_str(s: str) -> str: return _b64_decode_bytes(s).decode("utf8") @@ -25,54 +29,57 @@ def _b64_decode_str(s): class Serializer(object): - def dumps(self, request, response, body=None): - response_headers = CaseInsensitiveDict(response.headers) + def dumps( + self, + request: "PreparedRequest", + response: HTTPResponse, + body: Optional[bytes] = None, + ) -> bytes: + response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + response.headers + ) if body is None: # When a body isn't passed in, we'll read the response. We # also update the response with a new file handler to be # sure it acts as though it was never read. body = response.read(decode_content=False) - response._fp = io.BytesIO(body) - - # NOTE: This is all a bit weird, but it's really important that on - # Python 2.x these objects are unicode and not str, even when - # they contain only ascii. The problem here is that msgpack - # understands the difference between unicode and bytes and we - # have it set to differentiate between them, however Python 2 - # doesn't know the difference. Forcing these to unicode will be - # enough to have msgpack know the difference. + response._fp = io.BytesIO(body) # type: ignore[attr-defined] + response.length_remaining = len(body) + data = { - u"response": { - u"body": body, # Empty bytestring if body is stored separately - u"headers": dict( - (text_type(k), text_type(v)) for k, v in response.headers.items() - ), - u"status": response.status, - u"version": response.version, - u"reason": text_type(response.reason), - u"strict": response.strict, - u"decode_content": response.decode_content, + "response": { + "body": body, # Empty bytestring if body is stored separately + "headers": dict((str(k), str(v)) for k, v in response.headers.items()), # type: ignore[no-untyped-call] + "status": response.status, + "version": response.version, + "reason": str(response.reason), + "decode_content": response.decode_content, } } # Construct our vary headers - data[u"vary"] = {} - if u"vary" in response_headers: - varied_headers = response_headers[u"vary"].split(",") + data["vary"] = {} + if "vary" in response_headers: + varied_headers = response_headers["vary"].split(",") for header in varied_headers: - header = text_type(header).strip() + header = str(header).strip() header_value = request.headers.get(header, None) if header_value is not None: - header_value = text_type(header_value) - data[u"vary"][header] = header_value + header_value = str(header_value) + data["vary"][header] = header_value return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) - def loads(self, request, data, body_file=None): + def loads( + self, + request: "PreparedRequest", + data: bytes, + body_file: Optional["IO[bytes]"] = None, + ) -> Optional[HTTPResponse]: # Short circuit if we've been given an empty set of data if not data: - return + return None # Determine what version of the serializer the data was serialized # with @@ -88,18 +95,23 @@ def loads(self, request, data, body_file=None): ver = b"cc=0" # Get the version number out of the cc=N - ver = ver.split(b"=", 1)[-1].decode("ascii") + verstr = ver.split(b"=", 1)[-1].decode("ascii") # Dispatch to the actual load method for the given version try: - return getattr(self, "_loads_v{}".format(ver))(request, data, body_file) + return getattr(self, "_loads_v{}".format(verstr))(request, data, body_file) # type: ignore[no-any-return] except AttributeError: # This is a version we don't have a loads function for, so we'll # just treat it as a miss and return None - return - - def prepare_response(self, request, cached, body_file=None): + return None + + def prepare_response( + self, + request: "Request", + cached: Mapping[str, Any], + body_file: Optional["IO[bytes]"] = None, + ) -> Optional[HTTPResponse]: """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ @@ -108,23 +120,26 @@ def prepare_response(self, request, cached, body_file=None): # This case is also handled in the controller code when creating # a cache entry, but is left here for backwards compatibility. if "*" in cached.get("vary", {}): - return + return None # Ensure that the Vary headers for the cached response match our # request for header, value in cached.get("vary", {}).items(): if request.headers.get(header, None) != value: - return + return None body_raw = cached["response"].pop("body") - headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( + data=cached["response"]["headers"] + ) if headers.get("transfer-encoding", "") == "chunked": headers.pop("transfer-encoding") cached["response"]["headers"] = headers try: + body: "IO[bytes]" if body_file is None: body = io.BytesIO(body_raw) else: @@ -138,28 +153,46 @@ def prepare_response(self, request, cached, body_file=None): # TypeError: 'str' does not support the buffer interface body = io.BytesIO(body_raw.encode("utf8")) + # Discard any `strict` parameter serialized by older version of cachecontrol. + cached["response"].pop("strict", None) + return HTTPResponse(body=body, preload_content=False, **cached["response"]) - def _loads_v0(self, request, data, body_file=None): + def _loads_v0( + self, + request: "Request", + data: bytes, + body_file: Optional["IO[bytes]"] = None, + ) -> None: # The original legacy cache data. This doesn't contain enough # information to construct everything we need, so we'll treat this as # a miss. return - def _loads_v1(self, request, data, body_file=None): + def _loads_v1( + self, + request: "Request", + data: bytes, + body_file: Optional["IO[bytes]"] = None, + ) -> Optional[HTTPResponse]: try: cached = pickle.loads(data) except ValueError: - return + return None return self.prepare_response(request, cached, body_file) - def _loads_v2(self, request, data, body_file=None): + def _loads_v2( + self, + request: "Request", + data: bytes, + body_file: Optional["IO[bytes]"] = None, + ) -> Optional[HTTPResponse]: assert body_file is None try: cached = json.loads(zlib.decompress(data).decode("utf8")) except (ValueError, zlib.error): - return + return None # We need to decode the items that we've base64 encoded cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) @@ -175,16 +208,26 @@ def _loads_v2(self, request, data, body_file=None): return self.prepare_response(request, cached, body_file) - def _loads_v3(self, request, data, body_file): + def _loads_v3( + self, + request: "Request", + data: bytes, + body_file: Optional["IO[bytes]"] = None, + ) -> None: # Due to Python 2 encoding issues, it's impossible to know for sure # exactly how to load v3 entries, thus we'll treat these as a miss so # that they get rewritten out as v4 entries. return - def _loads_v4(self, request, data, body_file=None): + def _loads_v4( + self, + request: "Request", + data: bytes, + body_file: Optional["IO[bytes]"] = None, + ) -> Optional[HTTPResponse]: try: cached = msgpack.loads(data, raw=False) except ValueError: - return + return None return self.prepare_response(request, cached, body_file) diff --git a/src/pip/_vendor/cachecontrol/wrapper.py b/src/pip/_vendor/cachecontrol/wrapper.py index b6ee7f20398..293e69fe7d4 100644 --- a/src/pip/_vendor/cachecontrol/wrapper.py +++ b/src/pip/_vendor/cachecontrol/wrapper.py @@ -2,21 +2,30 @@ # # SPDX-License-Identifier: Apache-2.0 -from .adapter import CacheControlAdapter -from .cache import DictCache +from typing import TYPE_CHECKING, Collection, Optional, Type +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache -def CacheControl( - sess, - cache=None, - cache_etags=True, - serializer=None, - heuristic=None, - controller_class=None, - adapter_class=None, - cacheable_methods=None, -): +if TYPE_CHECKING: + from pip._vendor import requests + + from pip._vendor.cachecontrol.cache import BaseCache + from pip._vendor.cachecontrol.controller import CacheController + from pip._vendor.cachecontrol.heuristics import BaseHeuristic + from pip._vendor.cachecontrol.serialize import Serializer + +def CacheControl( + sess: "requests.Session", + cache: Optional["BaseCache"] = None, + cache_etags: bool = True, + serializer: Optional["Serializer"] = None, + heuristic: Optional["BaseHeuristic"] = None, + controller_class: Optional[Type["CacheController"]] = None, + adapter_class: Optional[Type[CacheControlAdapter]] = None, + cacheable_methods: Optional[Collection[str]] = None, +) -> "requests.Session": cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index dcf89dc04c5..d0f4c71cccc 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,4 +1,4 @@ -CacheControl==0.12.11 # Make sure to update the license in pyproject.toml for this. +CacheControl==0.13.0 # Make sure to update the license in pyproject.toml for this. colorama==0.4.6 distlib==0.3.6 distro==1.8.0 From 9fb93c478ef7d5e1423cc66467bb63c686864828 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Fri, 2 Jun 2023 14:00:15 -0400 Subject: [PATCH 545/730] mypy fix. --- src/pip/_internal/network/cache.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/network/cache.py b/src/pip/_internal/network/cache.py index d6b8ccdcf36..a4d13620532 100644 --- a/src/pip/_internal/network/cache.py +++ b/src/pip/_internal/network/cache.py @@ -3,7 +3,8 @@ import os from contextlib import contextmanager -from typing import BinaryIO, Generator, Optional +from datetime import datetime +from typing import BinaryIO, Generator, Optional, Union from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache from pip._vendor.cachecontrol.caches import SeparateBodyFileCache @@ -62,7 +63,9 @@ def _write(self, path: str, data: bytes) -> None: replace(f.name, path) - def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None: + def set( + self, key: str, value: bytes, expires: Union[int, datetime, None] = None + ) -> None: path = self._get_cache_path(key) self._write(path, value) From 28590a0a0809b3bb8999b4d08aa93bd9ffb3458d Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Jun 2023 09:29:35 -0400 Subject: [PATCH 546/730] Improve documentation of caching and the cache subcommand. --- docs/html/topics/caching.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/docs/html/topics/caching.md b/docs/html/topics/caching.md index 954cebe402d..19bd064a74c 100644 --- a/docs/html/topics/caching.md +++ b/docs/html/topics/caching.md @@ -27,6 +27,12 @@ While this cache attempts to minimize network activity, it does not prevent network access altogether. If you want a local install solution that circumvents accessing PyPI, see {ref}`Installing from local packages`. +In versions prior to 23.2, this cache was stored in a directory called `http` in +the main cache directory (see below for its location). In 23.2 and later, a new +cache format is used, stored in a directory called `http-v2`. If you have +completely switched to newer versions of `pip`, you may wish to delete the old +directory. + (wheel-caching)= ### Locally built wheels @@ -124,11 +130,11 @@ The {ref}`pip cache` command can be used to manage pip's cache. ### Removing a single package -`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache. +`pip cache remove setuptools` removes all wheel files related to setuptools from pip's cache. HTTP cache files are not removed at this time. ### Removing the cache -`pip cache purge` will clear all wheel files from pip's cache. +`pip cache purge` will clear all files from pip's wheel and HTTP caches. ### Listing cached files From dcd2d5e344f27149789f05edb9da45994eac2473 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Mon, 12 Jun 2023 09:30:30 -0400 Subject: [PATCH 547/730] Update CacheControl to 0.13.1. --- src/pip/_vendor/cachecontrol/__init__.py | 2 +- src/pip/_vendor/cachecontrol/_cmd.py | 5 +- src/pip/_vendor/cachecontrol/adapter.py | 51 ++++---- src/pip/_vendor/cachecontrol/cache.py | 18 +-- .../_vendor/cachecontrol/caches/file_cache.py | 17 +-- .../cachecontrol/caches/redis_cache.py | 10 +- src/pip/_vendor/cachecontrol/controller.py | 58 +++++---- src/pip/_vendor/cachecontrol/filewrapper.py | 9 +- src/pip/_vendor/cachecontrol/heuristics.py | 23 ++-- src/pip/_vendor/cachecontrol/serialize.py | 111 +++++++----------- src/pip/_vendor/cachecontrol/wrapper.py | 19 +-- src/pip/_vendor/vendor.txt | 2 +- 12 files changed, 149 insertions(+), 176 deletions(-) diff --git a/src/pip/_vendor/cachecontrol/__init__.py b/src/pip/_vendor/cachecontrol/__init__.py index 3701cdd6be8..4d20bc9b12a 100644 --- a/src/pip/_vendor/cachecontrol/__init__.py +++ b/src/pip/_vendor/cachecontrol/__init__.py @@ -8,7 +8,7 @@ """ __author__ = "Eric Larson" __email__ = "eric@ionrock.org" -__version__ = "0.13.0" +__version__ = "0.13.1" from pip._vendor.cachecontrol.adapter import CacheControlAdapter from pip._vendor.cachecontrol.controller import CacheController diff --git a/src/pip/_vendor/cachecontrol/_cmd.py b/src/pip/_vendor/cachecontrol/_cmd.py index ab4dac3dde1..2c84208a5d8 100644 --- a/src/pip/_vendor/cachecontrol/_cmd.py +++ b/src/pip/_vendor/cachecontrol/_cmd.py @@ -1,6 +1,7 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import logging from argparse import ArgumentParser @@ -36,7 +37,7 @@ def get_session() -> requests.Session: return sess -def get_args() -> "Namespace": +def get_args() -> Namespace: parser = ArgumentParser() parser.add_argument("url", help="The URL to try and cache") return parser.parse_args() @@ -53,7 +54,7 @@ def main() -> None: setup_logging() # try setting the cache - cache_controller: "CacheController" = ( + cache_controller: CacheController = ( sess.cache_controller # type: ignore[attr-defined] ) cache_controller.cache_response(resp.request, resp.raw) diff --git a/src/pip/_vendor/cachecontrol/adapter.py b/src/pip/_vendor/cachecontrol/adapter.py index 83c08e003fe..3e83e308dba 100644 --- a/src/pip/_vendor/cachecontrol/adapter.py +++ b/src/pip/_vendor/cachecontrol/adapter.py @@ -1,11 +1,12 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import functools import types import zlib -from typing import TYPE_CHECKING, Any, Collection, Mapping, Optional, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, Collection, Mapping from pip._vendor.requests.adapters import HTTPAdapter @@ -27,16 +28,16 @@ class CacheControlAdapter(HTTPAdapter): def __init__( self, - cache: Optional["BaseCache"] = None, + cache: BaseCache | None = None, cache_etags: bool = True, - controller_class: Optional[Type[CacheController]] = None, - serializer: Optional["Serializer"] = None, - heuristic: Optional["BaseHeuristic"] = None, - cacheable_methods: Optional[Collection[str]] = None, + controller_class: type[CacheController] | None = None, + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + cacheable_methods: Collection[str] | None = None, *args: Any, **kw: Any, ) -> None: - super(CacheControlAdapter, self).__init__(*args, **kw) + super().__init__(*args, **kw) self.cache = DictCache() if cache is None else cache self.heuristic = heuristic self.cacheable_methods = cacheable_methods or ("GET",) @@ -48,16 +49,14 @@ def __init__( def send( self, - request: "PreparedRequest", + request: PreparedRequest, stream: bool = False, - timeout: Union[None, float, Tuple[float, float], Tuple[float, None]] = None, - verify: Union[bool, str] = True, - cert: Union[ - None, bytes, str, Tuple[Union[bytes, str], Union[bytes, str]] - ] = None, - proxies: Optional[Mapping[str, str]] = None, - cacheable_methods: Optional[Collection[str]] = None, - ) -> "Response": + timeout: None | float | tuple[float, float] | tuple[float, None] = None, + verify: bool | str = True, + cert: (None | bytes | str | tuple[bytes | str, bytes | str]) = None, + proxies: Mapping[str, str] | None = None, + cacheable_methods: Collection[str] | None = None, + ) -> Response: """ Send a request. Use the request information to see if it exists in the cache and cache the response if we need to and can. @@ -74,19 +73,17 @@ def send( # check for etags and add headers if appropriate request.headers.update(self.controller.conditional_headers(request)) - resp = super(CacheControlAdapter, self).send( - request, stream, timeout, verify, cert, proxies - ) + resp = super().send(request, stream, timeout, verify, cert, proxies) return resp def build_response( self, - request: "PreparedRequest", - response: "HTTPResponse", + request: PreparedRequest, + response: HTTPResponse, from_cache: bool = False, - cacheable_methods: Optional[Collection[str]] = None, - ) -> "Response": + cacheable_methods: Collection[str] | None = None, + ) -> Response: """ Build a response by making a request or using the cache. @@ -137,7 +134,7 @@ def build_response( if response.chunked: super_update_chunk_length = response._update_chunk_length # type: ignore[attr-defined] - def _update_chunk_length(self: "HTTPResponse") -> None: + def _update_chunk_length(self: HTTPResponse) -> None: super_update_chunk_length() if self.chunk_left == 0: self._fp._close() # type: ignore[attr-defined] @@ -146,9 +143,7 @@ def _update_chunk_length(self: "HTTPResponse") -> None: _update_chunk_length, response ) - resp: "Response" = super( # type: ignore[no-untyped-call] - CacheControlAdapter, self - ).build_response(request, response) + resp: Response = super().build_response(request, response) # type: ignore[no-untyped-call] # See if we should invalidate the cache. if request.method in self.invalidating_methods and resp.ok: @@ -163,4 +158,4 @@ def _update_chunk_length(self: "HTTPResponse") -> None: def close(self) -> None: self.cache.close() - super(CacheControlAdapter, self).close() # type: ignore[no-untyped-call] + super().close() # type: ignore[no-untyped-call] diff --git a/src/pip/_vendor/cachecontrol/cache.py b/src/pip/_vendor/cachecontrol/cache.py index 61031d23441..3293b0057c7 100644 --- a/src/pip/_vendor/cachecontrol/cache.py +++ b/src/pip/_vendor/cachecontrol/cache.py @@ -6,19 +6,21 @@ The cache object API for implementing caches. The default is a thread safe in-memory dictionary. """ +from __future__ import annotations + from threading import Lock -from typing import IO, TYPE_CHECKING, MutableMapping, Optional, Union +from typing import IO, TYPE_CHECKING, MutableMapping if TYPE_CHECKING: from datetime import datetime -class BaseCache(object): - def get(self, key: str) -> Optional[bytes]: +class BaseCache: + def get(self, key: str) -> bytes | None: raise NotImplementedError() def set( - self, key: str, value: bytes, expires: Optional[Union[int, "datetime"]] = None + self, key: str, value: bytes, expires: int | datetime | None = None ) -> None: raise NotImplementedError() @@ -30,15 +32,15 @@ def close(self) -> None: class DictCache(BaseCache): - def __init__(self, init_dict: Optional[MutableMapping[str, bytes]] = None) -> None: + def __init__(self, init_dict: MutableMapping[str, bytes] | None = None) -> None: self.lock = Lock() self.data = init_dict or {} - def get(self, key: str) -> Optional[bytes]: + def get(self, key: str) -> bytes | None: return self.data.get(key, None) def set( - self, key: str, value: bytes, expires: Optional[Union[int, "datetime"]] = None + self, key: str, value: bytes, expires: int | datetime | None = None ) -> None: with self.lock: self.data.update({key: value}) @@ -65,7 +67,7 @@ class SeparateBodyBaseCache(BaseCache): def set_body(self, key: str, body: bytes) -> None: raise NotImplementedError() - def get_body(self, key: str) -> Optional["IO[bytes]"]: + def get_body(self, key: str) -> IO[bytes] | None: """ Return the body as file-like object. """ diff --git a/src/pip/_vendor/cachecontrol/caches/file_cache.py b/src/pip/_vendor/cachecontrol/caches/file_cache.py index 0437c4e8a13..1fd28013084 100644 --- a/src/pip/_vendor/cachecontrol/caches/file_cache.py +++ b/src/pip/_vendor/cachecontrol/caches/file_cache.py @@ -1,11 +1,12 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import hashlib import os from textwrap import dedent -from typing import IO, TYPE_CHECKING, Optional, Type, Union +from typing import IO, TYPE_CHECKING from pip._vendor.cachecontrol.cache import BaseCache, SeparateBodyBaseCache from pip._vendor.cachecontrol.controller import CacheController @@ -16,7 +17,7 @@ from filelock import BaseFileLock -def _secure_open_write(filename: str, fmode: int) -> "IO[bytes]": +def _secure_open_write(filename: str, fmode: int) -> IO[bytes]: # We only want to write to this file, so open it in write only mode flags = os.O_WRONLY @@ -39,7 +40,7 @@ def _secure_open_write(filename: str, fmode: int) -> "IO[bytes]": # there try: os.remove(filename) - except (IOError, OSError): + except OSError: # The file must not exist already, so we can just skip ahead to opening pass @@ -66,7 +67,7 @@ def __init__( forever: bool = False, filemode: int = 0o0600, dirmode: int = 0o0700, - lock_class: Optional[Type["BaseFileLock"]] = None, + lock_class: type[BaseFileLock] | None = None, ) -> None: try: if lock_class is None: @@ -100,7 +101,7 @@ def _fn(self, name: str) -> str: parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) - def get(self, key: str) -> Optional[bytes]: + def get(self, key: str) -> bytes | None: name = self._fn(key) try: with open(name, "rb") as fh: @@ -110,7 +111,7 @@ def get(self, key: str) -> Optional[bytes]: return None def set( - self, key: str, value: bytes, expires: Optional[Union[int, "datetime"]] = None + self, key: str, value: bytes, expires: int | datetime | None = None ) -> None: name = self._fn(key) self._write(name, value) @@ -122,7 +123,7 @@ def _write(self, path: str, data: bytes) -> None: # Make sure the directory exists try: os.makedirs(os.path.dirname(path), self.dirmode) - except (IOError, OSError): + except OSError: pass with self.lock_class(path + ".lock"): @@ -155,7 +156,7 @@ class SeparateBodyFileCache(_FileCacheMixin, SeparateBodyBaseCache): peak memory usage. """ - def get_body(self, key: str) -> Optional["IO[bytes]"]: + def get_body(self, key: str) -> IO[bytes] | None: name = self._fn(key) + ".body" try: return open(name, "rb") diff --git a/src/pip/_vendor/cachecontrol/caches/redis_cache.py b/src/pip/_vendor/cachecontrol/caches/redis_cache.py index f7ae45d3828..f4f68c47bf6 100644 --- a/src/pip/_vendor/cachecontrol/caches/redis_cache.py +++ b/src/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -1,11 +1,11 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from __future__ import division from datetime import datetime, timezone -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING from pip._vendor.cachecontrol.cache import BaseCache @@ -14,14 +14,14 @@ class RedisCache(BaseCache): - def __init__(self, conn: "Redis[bytes]") -> None: + def __init__(self, conn: Redis[bytes]) -> None: self.conn = conn - def get(self, key: str) -> Optional[bytes]: + def get(self, key: str) -> bytes | None: return self.conn.get(key) def set( - self, key: str, value: bytes, expires: Optional[Union[int, datetime]] = None + self, key: str, value: bytes, expires: int | datetime | None = None ) -> None: if not expires: self.conn.set(key, value) diff --git a/src/pip/_vendor/cachecontrol/controller.py b/src/pip/_vendor/cachecontrol/controller.py index 3365d962130..586b9f97b80 100644 --- a/src/pip/_vendor/cachecontrol/controller.py +++ b/src/pip/_vendor/cachecontrol/controller.py @@ -5,12 +5,14 @@ """ The httplib2 algorithms ported for use with requests. """ +from __future__ import annotations + import calendar import logging import re import time from email.utils import parsedate_tz -from typing import TYPE_CHECKING, Collection, Dict, Mapping, Optional, Tuple, Union +from typing import TYPE_CHECKING, Collection, Mapping from pip._vendor.requests.structures import CaseInsensitiveDict @@ -32,7 +34,7 @@ PERMANENT_REDIRECT_STATUSES = (301, 308) -def parse_uri(uri: str) -> Tuple[str, str, str, str, str]: +def parse_uri(uri: str) -> tuple[str, str, str, str, str]: """Parses a URI using the regex given in Appendix B of RFC 3986. (scheme, authority, path, query, fragment) = parse_uri(uri) @@ -43,15 +45,15 @@ def parse_uri(uri: str) -> Tuple[str, str, str, str, str]: return (groups[1], groups[3], groups[4], groups[6], groups[8]) -class CacheController(object): +class CacheController: """An interface to see if request should cached or not.""" def __init__( self, - cache: Optional["BaseCache"] = None, + cache: BaseCache | None = None, cache_etags: bool = True, - serializer: Optional[Serializer] = None, - status_codes: Optional[Collection[int]] = None, + serializer: Serializer | None = None, + status_codes: Collection[int] | None = None, ): self.cache = DictCache() if cache is None else cache self.cache_etags = cache_etags @@ -82,9 +84,7 @@ def _urlnorm(cls, uri: str) -> str: def cache_url(cls, uri: str) -> str: return cls._urlnorm(uri) - def parse_cache_control( - self, headers: Mapping[str, str] - ) -> Dict[str, Optional[int]]: + def parse_cache_control(self, headers: Mapping[str, str]) -> dict[str, int | None]: known_directives = { # https://tools.ietf.org/html/rfc7234#section-5.2 "max-age": (int, True), @@ -103,7 +103,7 @@ def parse_cache_control( cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) - retval: Dict[str, Optional[int]] = {} + retval: dict[str, int | None] = {} for cc_directive in cc_headers.split(","): if not cc_directive.strip(): @@ -138,7 +138,7 @@ def parse_cache_control( return retval - def _load_from_cache(self, request: "PreparedRequest") -> Optional["HTTPResponse"]: + def _load_from_cache(self, request: PreparedRequest) -> HTTPResponse | None: """ Load a cached response, or return None if it's not available. """ @@ -159,9 +159,7 @@ def _load_from_cache(self, request: "PreparedRequest") -> Optional["HTTPResponse logger.warning("Cache entry deserialization failed, entry ignored") return result - def cached_request( - self, request: "PreparedRequest" - ) -> Union["HTTPResponse", "Literal[False]"]: + def cached_request(self, request: PreparedRequest) -> HTTPResponse | Literal[False]: """ Return a cached response if it exists in the cache, otherwise return False. @@ -271,7 +269,7 @@ def cached_request( # return the original handler return False - def conditional_headers(self, request: "PreparedRequest") -> Dict[str, str]: + def conditional_headers(self, request: PreparedRequest) -> dict[str, str]: resp = self._load_from_cache(request) new_headers = {} @@ -289,10 +287,10 @@ def conditional_headers(self, request: "PreparedRequest") -> Dict[str, str]: def _cache_set( self, cache_url: str, - request: "PreparedRequest", - response: "HTTPResponse", - body: Optional[bytes] = None, - expires_time: Optional[int] = None, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + expires_time: int | None = None, ) -> None: """ Store the data in the cache. @@ -318,10 +316,10 @@ def _cache_set( def cache_response( self, - request: "PreparedRequest", - response: "HTTPResponse", - body: Optional[bytes] = None, - status_codes: Optional[Collection[int]] = None, + request: PreparedRequest, + response: HTTPResponse, + body: bytes | None = None, + status_codes: Collection[int] | None = None, ) -> None: """ Algorithm for caching requests. @@ -400,7 +398,7 @@ def cache_response( expires_time = max(expires_time, 14 * 86400) - logger.debug("etag object cached for {0} seconds".format(expires_time)) + logger.debug(f"etag object cached for {expires_time} seconds") logger.debug("Caching due to etag") self._cache_set(cache_url, request, response, body, expires_time) @@ -441,7 +439,7 @@ def cache_response( expires_time = None logger.debug( - "Caching b/c of expires header. expires in {0} seconds".format( + "Caching b/c of expires header. expires in {} seconds".format( expires_time ) ) @@ -454,8 +452,8 @@ def cache_response( ) def update_cached_response( - self, request: "PreparedRequest", response: "HTTPResponse" - ) -> "HTTPResponse": + self, request: PreparedRequest, response: HTTPResponse + ) -> HTTPResponse: """On a 304 we will get a new set of headers that we want to update our cached value with, assuming we have one. @@ -480,11 +478,11 @@ def update_cached_response( excluded_headers = ["content-length"] cached_response.headers.update( - dict( - (k, v) + { + k: v for k, v in response.headers.items() # type: ignore[no-untyped-call] if k.lower() not in excluded_headers - ) + } ) # we want a 200 b/c we have content via the cache diff --git a/src/pip/_vendor/cachecontrol/filewrapper.py b/src/pip/_vendor/cachecontrol/filewrapper.py index 472ba600161..25143902a26 100644 --- a/src/pip/_vendor/cachecontrol/filewrapper.py +++ b/src/pip/_vendor/cachecontrol/filewrapper.py @@ -1,16 +1,17 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import mmap from tempfile import NamedTemporaryFile -from typing import TYPE_CHECKING, Any, Callable, Optional +from typing import TYPE_CHECKING, Any, Callable if TYPE_CHECKING: from http.client import HTTPResponse -class CallbackFileWrapper(object): +class CallbackFileWrapper: """ Small wrapper around a fp object which will tee everything read into a buffer, and when that file is closed it will execute a callback with the @@ -30,7 +31,7 @@ class CallbackFileWrapper(object): """ def __init__( - self, fp: "HTTPResponse", callback: Optional[Callable[[bytes], None]] + self, fp: HTTPResponse, callback: Callable[[bytes], None] | None ) -> None: self.__buf = NamedTemporaryFile("rb+", delete=True) self.__fp = fp @@ -93,7 +94,7 @@ def _close(self) -> None: # Important when caching big files. self.__buf.close() - def read(self, amt: Optional[int] = None) -> bytes: + def read(self, amt: int | None = None) -> bytes: data: bytes = self.__fp.read(amt) if data: # We may be dealing with b'', a sign that things are over: diff --git a/src/pip/_vendor/cachecontrol/heuristics.py b/src/pip/_vendor/cachecontrol/heuristics.py index 1e88ada68f2..b9d72ca4ac5 100644 --- a/src/pip/_vendor/cachecontrol/heuristics.py +++ b/src/pip/_vendor/cachecontrol/heuristics.py @@ -1,12 +1,13 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations import calendar import time from datetime import datetime, timedelta, timezone from email.utils import formatdate, parsedate, parsedate_tz -from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional +from typing import TYPE_CHECKING, Any, Mapping if TYPE_CHECKING: from pip._vendor.urllib3 import HTTPResponse @@ -14,7 +15,7 @@ TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" -def expire_after(delta: timedelta, date: Optional[datetime] = None) -> datetime: +def expire_after(delta: timedelta, date: datetime | None = None) -> datetime: date = date or datetime.now(timezone.utc) return date + delta @@ -23,8 +24,8 @@ def datetime_to_header(dt: datetime) -> str: return formatdate(calendar.timegm(dt.timetuple())) -class BaseHeuristic(object): - def warning(self, response: "HTTPResponse") -> Optional[str]: +class BaseHeuristic: + def warning(self, response: HTTPResponse) -> str | None: """ Return a valid 1xx warning header value describing the cache adjustments. @@ -35,7 +36,7 @@ def warning(self, response: "HTTPResponse") -> Optional[str]: """ return '110 - "Response is Stale"' - def update_headers(self, response: "HTTPResponse") -> Dict[str, str]: + def update_headers(self, response: HTTPResponse) -> dict[str, str]: """Update the response headers with any new headers. NOTE: This SHOULD always include some Warning header to @@ -44,7 +45,7 @@ def update_headers(self, response: "HTTPResponse") -> Dict[str, str]: """ return {} - def apply(self, response: "HTTPResponse") -> "HTTPResponse": + def apply(self, response: HTTPResponse) -> HTTPResponse: updated_headers = self.update_headers(response) if updated_headers: @@ -62,7 +63,7 @@ class OneDayCache(BaseHeuristic): future. """ - def update_headers(self, response: "HTTPResponse") -> Dict[str, str]: + def update_headers(self, response: HTTPResponse) -> dict[str, str]: headers = {} if "expires" not in response.headers: @@ -81,11 +82,11 @@ class ExpiresAfter(BaseHeuristic): def __init__(self, **kw: Any) -> None: self.delta = timedelta(**kw) - def update_headers(self, response: "HTTPResponse") -> Dict[str, str]: + def update_headers(self, response: HTTPResponse) -> dict[str, str]: expires = expire_after(self.delta) return {"expires": datetime_to_header(expires), "cache-control": "public"} - def warning(self, response: "HTTPResponse") -> Optional[str]: + def warning(self, response: HTTPResponse) -> str | None: tmpl = "110 - Automatically cached for %s. Response might be stale" return tmpl % self.delta @@ -117,7 +118,7 @@ class LastModified(BaseHeuristic): 501, } - def update_headers(self, resp: "HTTPResponse") -> Dict[str, str]: + def update_headers(self, resp: HTTPResponse) -> dict[str, str]: headers: Mapping[str, str] = resp.headers if "expires" in headers: @@ -149,5 +150,5 @@ def update_headers(self, resp: "HTTPResponse") -> Dict[str, str]: expires = date + freshness_lifetime return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} - def warning(self, resp: "HTTPResponse") -> Optional[str]: + def warning(self, resp: HTTPResponse) -> str | None: return None diff --git a/src/pip/_vendor/cachecontrol/serialize.py b/src/pip/_vendor/cachecontrol/serialize.py index f21eaea6f39..f9e967c3c34 100644 --- a/src/pip/_vendor/cachecontrol/serialize.py +++ b/src/pip/_vendor/cachecontrol/serialize.py @@ -1,39 +1,27 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -import base64 import io -import json -import pickle -import zlib -from typing import IO, TYPE_CHECKING, Any, Mapping, Optional +from typing import IO, TYPE_CHECKING, Any, Mapping, cast from pip._vendor import msgpack from pip._vendor.requests.structures import CaseInsensitiveDict from pip._vendor.urllib3 import HTTPResponse if TYPE_CHECKING: - from pip._vendor.requests import PreparedRequest, Request + from pip._vendor.requests import PreparedRequest -def _b64_decode_bytes(b: str) -> bytes: - return base64.b64decode(b.encode("ascii")) +class Serializer: + serde_version = "4" - -def _b64_decode_str(s: str) -> str: - return _b64_decode_bytes(s).decode("utf8") - - -_default_body_read = object() - - -class Serializer(object): def dumps( self, - request: "PreparedRequest", + request: PreparedRequest, response: HTTPResponse, - body: Optional[bytes] = None, + body: bytes | None = None, ) -> bytes: response_headers: CaseInsensitiveDict[str] = CaseInsensitiveDict( response.headers @@ -50,7 +38,7 @@ def dumps( data = { "response": { "body": body, # Empty bytestring if body is stored separately - "headers": dict((str(k), str(v)) for k, v in response.headers.items()), # type: ignore[no-untyped-call] + "headers": {str(k): str(v) for k, v in response.headers.items()}, # type: ignore[no-untyped-call] "status": response.status, "version": response.version, "reason": str(response.reason), @@ -69,14 +57,17 @@ def dumps( header_value = str(header_value) data["vary"][header] = header_value - return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) + return b",".join([f"cc={self.serde_version}".encode(), self.serialize(data)]) + + def serialize(self, data: dict[str, Any]) -> bytes: + return cast(bytes, msgpack.dumps(data, use_bin_type=True)) def loads( self, - request: "PreparedRequest", + request: PreparedRequest, data: bytes, - body_file: Optional["IO[bytes]"] = None, - ) -> Optional[HTTPResponse]: + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: # Short circuit if we've been given an empty set of data if not data: return None @@ -99,7 +90,7 @@ def loads( # Dispatch to the actual load method for the given version try: - return getattr(self, "_loads_v{}".format(verstr))(request, data, body_file) # type: ignore[no-any-return] + return getattr(self, f"_loads_v{verstr}")(request, data, body_file) # type: ignore[no-any-return] except AttributeError: # This is a version we don't have a loads function for, so we'll @@ -108,10 +99,10 @@ def loads( def prepare_response( self, - request: "Request", + request: PreparedRequest, cached: Mapping[str, Any], - body_file: Optional["IO[bytes]"] = None, - ) -> Optional[HTTPResponse]: + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: """Verify our vary headers match and construct a real urllib3 HTTPResponse object. """ @@ -139,7 +130,7 @@ def prepare_response( cached["response"]["headers"] = headers try: - body: "IO[bytes]" + body: IO[bytes] if body_file is None: body = io.BytesIO(body_raw) else: @@ -160,71 +151,53 @@ def prepare_response( def _loads_v0( self, - request: "Request", + request: PreparedRequest, data: bytes, - body_file: Optional["IO[bytes]"] = None, + body_file: IO[bytes] | None = None, ) -> None: # The original legacy cache data. This doesn't contain enough # information to construct everything we need, so we'll treat this as # a miss. - return + return None def _loads_v1( self, - request: "Request", + request: PreparedRequest, data: bytes, - body_file: Optional["IO[bytes]"] = None, - ) -> Optional[HTTPResponse]: - try: - cached = pickle.loads(data) - except ValueError: - return None - - return self.prepare_response(request, cached, body_file) + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v1" pickled cache format. This is no longer supported + # for security reasons, so we treat it as a miss. + return None def _loads_v2( self, - request: "Request", + request: PreparedRequest, data: bytes, - body_file: Optional["IO[bytes]"] = None, - ) -> Optional[HTTPResponse]: - assert body_file is None - try: - cached = json.loads(zlib.decompress(data).decode("utf8")) - except (ValueError, zlib.error): - return None - - # We need to decode the items that we've base64 encoded - cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) - cached["response"]["headers"] = dict( - (_b64_decode_str(k), _b64_decode_str(v)) - for k, v in cached["response"]["headers"].items() - ) - cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) - cached["vary"] = dict( - (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) - for k, v in cached["vary"].items() - ) - - return self.prepare_response(request, cached, body_file) + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: + # The "v2" compressed base64 cache format. + # This has been removed due to age and poor size/performance + # characteristics, so we treat it as a miss. + return None def _loads_v3( self, - request: "Request", + request: PreparedRequest, data: bytes, - body_file: Optional["IO[bytes]"] = None, + body_file: IO[bytes] | None = None, ) -> None: # Due to Python 2 encoding issues, it's impossible to know for sure # exactly how to load v3 entries, thus we'll treat these as a miss so # that they get rewritten out as v4 entries. - return + return None def _loads_v4( self, - request: "Request", + request: PreparedRequest, data: bytes, - body_file: Optional["IO[bytes]"] = None, - ) -> Optional[HTTPResponse]: + body_file: IO[bytes] | None = None, + ) -> HTTPResponse | None: try: cached = msgpack.loads(data, raw=False) except ValueError: diff --git a/src/pip/_vendor/cachecontrol/wrapper.py b/src/pip/_vendor/cachecontrol/wrapper.py index 293e69fe7d4..f618bc363f1 100644 --- a/src/pip/_vendor/cachecontrol/wrapper.py +++ b/src/pip/_vendor/cachecontrol/wrapper.py @@ -1,8 +1,9 @@ # SPDX-FileCopyrightText: 2015 Eric Larson # # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations -from typing import TYPE_CHECKING, Collection, Optional, Type +from typing import TYPE_CHECKING, Collection from pip._vendor.cachecontrol.adapter import CacheControlAdapter from pip._vendor.cachecontrol.cache import DictCache @@ -17,15 +18,15 @@ def CacheControl( - sess: "requests.Session", - cache: Optional["BaseCache"] = None, + sess: requests.Session, + cache: BaseCache | None = None, cache_etags: bool = True, - serializer: Optional["Serializer"] = None, - heuristic: Optional["BaseHeuristic"] = None, - controller_class: Optional[Type["CacheController"]] = None, - adapter_class: Optional[Type[CacheControlAdapter]] = None, - cacheable_methods: Optional[Collection[str]] = None, -) -> "requests.Session": + serializer: Serializer | None = None, + heuristic: BaseHeuristic | None = None, + controller_class: type[CacheController] | None = None, + adapter_class: type[CacheControlAdapter] | None = None, + cacheable_methods: Collection[str] | None = None, +) -> requests.Session: cache = DictCache() if cache is None else cache adapter_class = adapter_class or CacheControlAdapter adapter = adapter_class( diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index d0f4c71cccc..c6809dfd6c3 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -1,4 +1,4 @@ -CacheControl==0.13.0 # Make sure to update the license in pyproject.toml for this. +CacheControl==0.13.1 # Make sure to update the license in pyproject.toml for this. colorama==0.4.6 distlib==0.3.6 distro==1.8.0 From a0976d8832f52c2f14472f7b20b1cf1776a63ac8 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 27 Jun 2023 14:47:09 +0100 Subject: [PATCH 548/730] Fix lint issues --- tests/unit/test_network_auth.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 5dde6da57c5..e3cb772bb05 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -193,7 +193,7 @@ def test_keyring_get_password( expect: Tuple[Optional[str], Optional[str]], ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth( index_urls=["http://example.com/path2", "http://example.com/path3"], keyring_provider="import", @@ -205,7 +205,7 @@ def test_keyring_get_password( def test_keyring_get_password_after_prompt(monkeypatch: pytest.MonkeyPatch) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: @@ -221,7 +221,7 @@ def test_keyring_get_password_after_prompt_when_none( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: @@ -242,7 +242,7 @@ def test_keyring_get_password_username_in_index( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth( index_urls=["http://user@example.com/path2", "http://example.com/path4"], keyring_provider="import", @@ -278,7 +278,7 @@ def test_keyring_set_password( expect_save: bool, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(prompting=True, keyring_provider="import") monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) @@ -354,7 +354,7 @@ def get_credential(self, system: str, username: str) -> Optional[Credential]: def test_keyring_get_credential( monkeypatch: pytest.MonkeyPatch, url: str, expect: str ) -> None: - monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) auth = MultiDomainBasicAuth( index_urls=["http://example.com/path1", "http://example.com/path2"], keyring_provider="import", @@ -378,7 +378,7 @@ def get_credential(self, system: str, username: str) -> None: def test_broken_keyring_disables_keyring(monkeypatch: pytest.MonkeyPatch) -> None: keyring_broken = KeyringModuleBroken() - monkeypatch.setitem(sys.modules, "keyring", keyring_broken) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring_broken) auth = MultiDomainBasicAuth( index_urls=["http://example.com/"], keyring_provider="import" From c7daa07f6a65c73173f623c1be34ed2956628715 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 27 Jun 2023 14:47:39 +0100 Subject: [PATCH 549/730] Reword the check for no hashes --- src/pip/_internal/models/link.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index 3cfc3e8c4fe..4453519ad02 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -112,9 +112,9 @@ def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str if hashes is None: return None hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES} - if len(hashes) > 0: - return hashes - return None + if not hashes: + return None + return hashes def _clean_url_path_part(part: str) -> str: From fab8cf7479f573a9284ae4c9a85f776c951c6656 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Tue, 27 Jun 2023 15:49:40 +0200 Subject: [PATCH 550/730] Remove Unused "type: ignore" comments --- tests/unit/test_network_auth.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index 5dde6da57c5..e3cb772bb05 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -193,7 +193,7 @@ def test_keyring_get_password( expect: Tuple[Optional[str], Optional[str]], ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth( index_urls=["http://example.com/path2", "http://example.com/path3"], keyring_provider="import", @@ -205,7 +205,7 @@ def test_keyring_get_password( def test_keyring_get_password_after_prompt(monkeypatch: pytest.MonkeyPatch) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: @@ -221,7 +221,7 @@ def test_keyring_get_password_after_prompt_when_none( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(keyring_provider="import") def ask_input(prompt: str) -> str: @@ -242,7 +242,7 @@ def test_keyring_get_password_username_in_index( monkeypatch: pytest.MonkeyPatch, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth( index_urls=["http://user@example.com/path2", "http://example.com/path4"], keyring_provider="import", @@ -278,7 +278,7 @@ def test_keyring_set_password( expect_save: bool, ) -> None: keyring = KeyringModuleV1() - monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring) auth = MultiDomainBasicAuth(prompting=True, keyring_provider="import") monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None)) monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds) @@ -354,7 +354,7 @@ def get_credential(self, system: str, username: str) -> Optional[Credential]: def test_keyring_get_credential( monkeypatch: pytest.MonkeyPatch, url: str, expect: str ) -> None: - monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) auth = MultiDomainBasicAuth( index_urls=["http://example.com/path1", "http://example.com/path2"], keyring_provider="import", @@ -378,7 +378,7 @@ def get_credential(self, system: str, username: str) -> None: def test_broken_keyring_disables_keyring(monkeypatch: pytest.MonkeyPatch) -> None: keyring_broken = KeyringModuleBroken() - monkeypatch.setitem(sys.modules, "keyring", keyring_broken) # type: ignore[misc] + monkeypatch.setitem(sys.modules, "keyring", keyring_broken) auth = MultiDomainBasicAuth( index_urls=["http://example.com/"], keyring_provider="import" From 782cff7e0121d5160acddbfae2ef41e98492ffe5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 10 Apr 2023 15:59:59 +0200 Subject: [PATCH 551/730] Warn when legacy versions and specifiers are resolved Also warn in pip check. ... --- news/12063.removal.rst | 2 ++ src/pip/_internal/commands/check.py | 2 ++ src/pip/_internal/commands/download.py | 1 + src/pip/_internal/commands/install.py | 3 ++ src/pip/_internal/commands/wheel.py | 1 + src/pip/_internal/operations/check.py | 38 ++++++++++++++++++++++++++ src/pip/_internal/req/req_set.py | 37 +++++++++++++++++++++++++ 7 files changed, 84 insertions(+) create mode 100644 news/12063.removal.rst diff --git a/news/12063.removal.rst b/news/12063.removal.rst new file mode 100644 index 00000000000..037b0c6089a --- /dev/null +++ b/news/12063.removal.rst @@ -0,0 +1,2 @@ +Deprecate legacy version and version specifiers that don't conform to `PEP 440 +`_ diff --git a/src/pip/_internal/commands/check.py b/src/pip/_internal/commands/check.py index 584df9f55c5..5efd0a34160 100644 --- a/src/pip/_internal/commands/check.py +++ b/src/pip/_internal/commands/check.py @@ -7,6 +7,7 @@ from pip._internal.operations.check import ( check_package_set, create_package_set_from_installed, + warn_legacy_versions_and_specifiers, ) from pip._internal.utils.misc import write_output @@ -21,6 +22,7 @@ class CheckCommand(Command): def run(self, options: Values, args: List[str]) -> int: package_set, parsing_probs = create_package_set_from_installed() + warn_legacy_versions_and_specifiers(package_set) missing, conflicting = check_package_set(package_set) for project_name in missing: diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index 36e947c8c05..63bd53a50c8 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -130,6 +130,7 @@ def run(self, options: Values, args: List[str]) -> int: self.trace_basic_info(finder) requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + requirement_set.warn_legacy_versions_and_specifiers() downloaded: List[str] = [] for req in requirement_set.requirements.values(): diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 3c15ed4158c..f6a300804f4 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -387,6 +387,9 @@ def run(self, options: Values, args: List[str]) -> int: json.dump(report.to_dict(), f, indent=2, ensure_ascii=False) if options.dry_run: + # In non dry-run mode, the legacy versions and specifiers check + # will be done as part of conflict detection. + requirement_set.warn_legacy_versions_and_specifiers() would_install_items = sorted( (r.metadata["name"], r.metadata["version"]) for r in requirement_set.requirements_to_install diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index c6a588ff09b..e6735bd8da7 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -145,6 +145,7 @@ def run(self, options: Values, args: List[str]) -> int: self.trace_basic_info(finder) requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + requirement_set.warn_legacy_versions_and_specifiers() reqs_to_build: List[InstallRequirement] = [] for req in requirement_set.requirements.values(): diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py index e3bce69b204..2610459228f 100644 --- a/src/pip/_internal/operations/check.py +++ b/src/pip/_internal/operations/check.py @@ -5,12 +5,15 @@ from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.specifiers import LegacySpecifier from pip._vendor.packaging.utils import NormalizedName, canonicalize_name +from pip._vendor.packaging.version import LegacyVersion from pip._internal.distributions import make_distribution_for_install_requirement from pip._internal.metadata import get_default_environment from pip._internal.metadata.base import DistributionVersion from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import deprecated logger = logging.getLogger(__name__) @@ -57,6 +60,8 @@ def check_package_set( package name and returns a boolean. """ + warn_legacy_versions_and_specifiers(package_set) + missing = {} conflicting = {} @@ -147,3 +152,36 @@ def _create_whitelist( break return packages_affected + + +def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: + for project_name, package_details in package_set.items(): + if isinstance(package_details.version, LegacyVersion): + deprecated( + reason=( + f"{project_name} {package_details.version} " + f"has a non-standard version number." + ), + replacement=( + f"to upgrade to a newer version of {project_name} " + f"or contact the author to suggest that they " + f"release a version with a conforming version number" + ), + issue=12063, + gone_in="23.3", + ) + for dep in package_details.dependencies: + if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): + deprecated( + reason=( + f"{project_name} {package_details.version} " + f"has a non-standard dependency specifier {dep}." + ), + replacement=( + f"to upgrade to a newer version of {project_name} " + f"or contact the author to suggest that they " + f"release a version with a conforming dependency specifiers" + ), + issue=12063, + gone_in="23.3", + ) diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py index ec7a6e07a25..cff67601737 100644 --- a/src/pip/_internal/req/req_set.py +++ b/src/pip/_internal/req/req_set.py @@ -2,9 +2,12 @@ from collections import OrderedDict from typing import Dict, List +from pip._vendor.packaging.specifiers import LegacySpecifier from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import LegacyVersion from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.deprecation import deprecated logger = logging.getLogger(__name__) @@ -80,3 +83,37 @@ def requirements_to_install(self) -> List[InstallRequirement]: for install_req in self.all_requirements if not install_req.constraint and not install_req.satisfied_by ] + + def warn_legacy_versions_and_specifiers(self) -> None: + for req in self.requirements_to_install: + version = req.get_dist().version + if isinstance(version, LegacyVersion): + deprecated( + reason=( + f"pip has selected the non standard version {version} " + f"of {req}. In the future this version will be " + f"ignored as it isn't standard compliant." + ), + replacement=( + "set or update constraints to select another version " + "or contact the package author to fix the version number" + ), + issue=12063, + gone_in="23.3", + ) + for dep in req.get_dist().iter_dependencies(): + if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): + deprecated( + reason=( + f"pip has selected {req} {version} which has non " + f"standard dependency specifier {dep}. " + f"In the future this version of {req} will be " + f"ignored as it isn't standard compliant." + ), + replacement=( + "set or update constraints to select another version " + "or contact the package author to fix the version number" + ), + issue=12063, + gone_in="23.3", + ) From 6507734aac0f3c0972aef5097b8d0b4defb791f8 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 29 Jun 2023 16:51:11 +0800 Subject: [PATCH 552/730] Fix string formatting --- src/pip/_internal/configuration.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 46562652faa..35189d0d2c8 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -216,10 +216,8 @@ def save(self) -> None: parser.write(f) except IOError as error: raise ConfigurationError( - "An error occurred while writing to the configuration file: {0}\n \ - Error message: {1}".format( - fname, error - ) + f"An error occurred while writing to the configuration file " + f"{fname}: {error}" ) # From 41f138e43a6c54804b7c7fe3d4a8477508ef4a97 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 29 Jun 2023 16:51:34 +0800 Subject: [PATCH 553/730] Minimize changeset --- src/pip/_internal/configuration.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 35189d0d2c8..0ed33ac9c75 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -347,7 +347,6 @@ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]: should_load_user_config = not self.isolated and not ( config_file and os.path.exists(config_file) ) - if should_load_user_config: # The legacy config file is overridden by the new config file yield kinds.USER, config_files[kinds.USER] From 256af8f6912799ebf66b2ebc6707aae2e0487fe1 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Thu, 29 Jun 2023 17:28:16 +0800 Subject: [PATCH 554/730] Catch OSError instead of IOError --- src/pip/_internal/configuration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/configuration.py b/src/pip/_internal/configuration.py index 0ed33ac9c75..96f824955bf 100644 --- a/src/pip/_internal/configuration.py +++ b/src/pip/_internal/configuration.py @@ -214,7 +214,7 @@ def save(self) -> None: try: with open(fname, "w") as f: parser.write(f) - except IOError as error: + except OSError as error: raise ConfigurationError( f"An error occurred while writing to the configuration file " f"{fname}: {error}" From 5d0c2773b8bf0cc569c82e6d3697fe7b89c71192 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Fri, 30 Jun 2023 11:38:12 +0100 Subject: [PATCH 555/730] Stop using a RAM disk for the Windows tests --- .github/workflows/ci.yml | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3b35e93b21f..1361980565d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -167,24 +167,13 @@ jobs: with: python-version: ${{ matrix.python }} - # We use a RAMDisk on Windows, since filesystem IO is a big slowdown - # for our tests. - - name: Create a RAMDisk - run: ./tools/ci/New-RAMDisk.ps1 -Drive R -Size 1GB - - - name: Setup RAMDisk permissions - run: | - mkdir R:\Temp - $acl = Get-Acl "R:\Temp" - $rule = New-Object System.Security.AccessControl.FileSystemAccessRule( - "Everyone", "FullControl", "ContainerInherit,ObjectInherit", "None", "Allow" - ) - $acl.AddAccessRule($rule) - Set-Acl "R:\Temp" $acl - + # We use C:\Temp (which is already available on the worker) + # as a temporary directory for all of the tests because the + # default value (under the user dir) is more deeply nested + # and causes tests to fail with "path too long" errors. - run: pip install nox env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" # Main check - name: Run unit tests @@ -194,7 +183,7 @@ jobs: -m unit --verbose --numprocesses auto --showlocals env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" - name: Run integration tests (group 1) if: matrix.group == 1 @@ -203,7 +192,7 @@ jobs: -m integration -k "not test_install" --verbose --numprocesses auto --showlocals env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" - name: Run integration tests (group 2) if: matrix.group == 2 @@ -212,7 +201,7 @@ jobs: -m integration -k "test_install" --verbose --numprocesses auto --showlocals env: - TEMP: "R:\\Temp" + TEMP: "C:\\Temp" tests-zipapp: name: tests / zipapp From 45468f06d429080a9042909b76cfc25fce9bee5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Mon, 29 May 2023 13:57:52 +0200 Subject: [PATCH 556/730] Pass revisions options explicitly to mercurial commands --- news/12119.bugfix.rst | 3 +++ src/pip/_internal/vcs/mercurial.py | 2 +- tests/unit/test_vcs.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 news/12119.bugfix.rst diff --git a/news/12119.bugfix.rst b/news/12119.bugfix.rst new file mode 100644 index 00000000000..da8d8b04dcd --- /dev/null +++ b/news/12119.bugfix.rst @@ -0,0 +1,3 @@ +Pass the ``-r`` flag to mercurial to be explicit that a revision is passed and protect +against ``hg`` options injection as part of VCS URLs. Users that do not have control on +VCS URLs passed to pip are advised to upgrade. diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index 2a005e0aff2..4595960b5bf 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -31,7 +31,7 @@ class Mercurial(VersionControl): @staticmethod def get_base_rev_args(rev: str) -> List[str]: - return [rev] + return ["-r", rev] def fetch_new( self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index 566c88cf02b..38daaa0f21d 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -66,7 +66,7 @@ def test_rev_options_repr() -> None: # First check VCS-specific RevOptions behavior. (Bazaar, [], ["-r", "123"], {}), (Git, ["HEAD"], ["123"], {}), - (Mercurial, [], ["123"], {}), + (Mercurial, [], ["-r", "123"], {}), (Subversion, [], ["-r", "123"], {}), # Test extra_args. For this, test using a single VersionControl class. ( From b99e082b003788f2e8abbad47d461f495faad892 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 1 Jul 2023 14:42:10 +0100 Subject: [PATCH 557/730] Record download of completed partial requirements --- src/pip/_internal/operations/prepare.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 130b9737742..8d7151353f0 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -471,6 +471,7 @@ def _complete_partial_requirements( logger.debug("Downloading link %s to %s", link, filepath) req = links_to_fully_download[link] req.local_file_path = filepath + self._downloaded[req.link.url] = filepath # This step is necessary to ensure all lazy wheels are processed # successfully by the 'download', 'wheel', and 'install' commands. From cb25bf3731d46697586fc72a24ba1f8e57311377 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 1 Jul 2023 14:51:10 +0100 Subject: [PATCH 558/730] Add a news file --- news/11847.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11847.bugfix.rst diff --git a/news/11847.bugfix.rst b/news/11847.bugfix.rst new file mode 100644 index 00000000000..1cad477eaa2 --- /dev/null +++ b/news/11847.bugfix.rst @@ -0,0 +1 @@ +Prevent downloading files twice when PEP 658 metadata is present From 647ba8d07e7832ea69d93f9a686d8f276e669a14 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 3 Jul 2023 10:35:01 +0100 Subject: [PATCH 559/730] Limit the double download fix to wheels --- src/pip/_internal/operations/prepare.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 8d7151353f0..5d9bedc031a 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -471,7 +471,19 @@ def _complete_partial_requirements( logger.debug("Downloading link %s to %s", link, filepath) req = links_to_fully_download[link] req.local_file_path = filepath - self._downloaded[req.link.url] = filepath + # TODO: This needs fixing for sdists + # This is an emergency fix for #11847, which reports that + # distributions get downloaded twice when metadata is loaded + # from a PEP 658 standalone metadata file. Setting _downloaded + # fixes this for wheels, but breaks the sdist case (tests + # test_download_metadata). As PyPI is currently not serving + # metadata for wheels, this is not an immediate issue. + # Fixing the problem properly looks like it will require a + # complete refactoring of the `prepare_linked_requirements_more` + # logic, and I haven't a clue where to start on that, so for now + # I have fixed the issue *just* for wheels. + if req.is_wheel: + self._downloaded[req.link.url] = filepath # This step is necessary to ensure all lazy wheels are processed # successfully by the 'download', 'wheel', and 'install' commands. From 8e80a3ad9a5b80de72efad6cbad3bebf2328642b Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Mon, 3 Jul 2023 10:45:01 +0100 Subject: [PATCH 560/730] Fix typo --- src/pip/_internal/operations/prepare.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 5d9bedc031a..49d86268a3b 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -476,7 +476,7 @@ def _complete_partial_requirements( # distributions get downloaded twice when metadata is loaded # from a PEP 658 standalone metadata file. Setting _downloaded # fixes this for wheels, but breaks the sdist case (tests - # test_download_metadata). As PyPI is currently not serving + # test_download_metadata). As PyPI is currently only serving # metadata for wheels, this is not an immediate issue. # Fixing the problem properly looks like it will require a # complete refactoring of the `prepare_linked_requirements_more` From 82c6a8c9b69abd8f085836f5c362942c9b27bb00 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 15:35:47 +0800 Subject: [PATCH 561/730] Add 3.12 to noxfile --- noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index ee03447d359..e64686dcabe 100644 --- a/noxfile.py +++ b/noxfile.py @@ -67,7 +67,7 @@ def should_update_common_wheels() -> bool: # ----------------------------------------------------------------------------- # Development Commands # ----------------------------------------------------------------------------- -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "pypy3"]) +@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy3"]) def test(session: nox.Session) -> None: # Get the common wheels. if should_update_common_wheels(): From c05884a7144012c5bea1ef9c64d568d5649f6cd1 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 15:49:48 +0800 Subject: [PATCH 562/730] Ensure setuptools and wheel are installed when needed --- noxfile.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/noxfile.py b/noxfile.py index e64686dcabe..041d9039974 100644 --- a/noxfile.py +++ b/noxfile.py @@ -89,6 +89,7 @@ def test(session: nox.Session) -> None: shutil.rmtree(sdist_dir, ignore_errors=True) # fmt: off + session.install("setuptools") session.run( "python", "setup.py", "sdist", "--formats=zip", "--dist-dir", sdist_dir, silent=True, @@ -351,6 +352,7 @@ def build_dists(session: nox.Session) -> List[str]: ) session.log("# Build distributions") + session.install("setuptools", "wheel") session.run("python", "setup.py", "sdist", "bdist_wheel", silent=True) produced_dists = glob.glob("dist/*") From 4abb8ac07b4f532a592555bb6b7196c066f572c6 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 15:53:01 +0800 Subject: [PATCH 563/730] Fix Pytest --use-venv init --- tests/lib/venv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lib/venv.py b/tests/lib/venv.py index e65a3291230..12aa739db23 100644 --- a/tests/lib/venv.py +++ b/tests/lib/venv.py @@ -124,7 +124,7 @@ def _create(self, clear: bool = False) -> None: ) elif self._venv_type == "venv": builder = _venv.EnvBuilder() - context = builder.ensure_directories(self.location) + context = builder.ensure_directories(os.fspath(self.location)) builder.create_configuration(context) builder.setup_python(context) self.site.mkdir(parents=True, exist_ok=True) From 56f8b38e1e791bbfc18ec9aae68c113c84d3187b Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 17:01:08 +0800 Subject: [PATCH 564/730] Fix type handling --- tests/conftest.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 57dd7e68a2b..a183cadf2e9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,7 +49,7 @@ if TYPE_CHECKING: from typing import Protocol - from wsgi import WSGIApplication + from _typeshed.wsgi import WSGIApplication else: # TODO: Protocol was introduced in Python 3.8. Remove this branch when # dropping support for Python 3.7. @@ -645,7 +645,12 @@ def pip(self, *args: Union[str, Path]) -> InMemoryPipResult: try: returncode = pip_entry_point([os.fspath(a) for a in args]) except SystemExit as e: - returncode = e.code or 0 + if isinstance(e.code, int): + returncode = e.code + elif e.code: + returncode = 1 + else: + returncode = 0 finally: sys.stdout = orig_stdout return InMemoryPipResult(returncode, stdout.getvalue()) From 4a014f953da027cb314a995ee16f0e28fb821915 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 17:03:11 +0800 Subject: [PATCH 565/730] Replace distutils with sysconfig on Python 3.12+ --- tests/functional/test_build_env.py | 41 +++++++++++++++++++++--------- 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/tests/functional/test_build_env.py b/tests/functional/test_build_env.py index 22a71cd3200..20cd181be07 100644 --- a/tests/functional/test_build_env.py +++ b/tests/functional/test_build_env.py @@ -1,4 +1,5 @@ import os +import sys from textwrap import dedent from typing import Optional @@ -203,6 +204,31 @@ def test_build_env_overlay_prefix_has_priority(script: PipTestEnvironment) -> No assert result.stdout.strip() == "2.0", str(result) +if sys.version_info < (3, 12): + BUILD_ENV_ERROR_DEBUG_CODE = r""" + from distutils.sysconfig import get_python_lib + print( + f'imported `pkg` from `{pkg.__file__}`', + file=sys.stderr) + print('system sites:\n ' + '\n '.join(sorted({ + get_python_lib(plat_specific=0), + get_python_lib(plat_specific=1), + })), file=sys.stderr) + """ +else: + BUILD_ENV_ERROR_DEBUG_CODE = r""" + from sysconfig import get_paths + paths = get_paths() + print( + f'imported `pkg` from `{pkg.__file__}`', + file=sys.stderr) + print('system sites:\n ' + '\n '.join(sorted({ + paths['platlib'], + paths['purelib'], + })), file=sys.stderr) + """ + + @pytest.mark.usefixtures("enable_user_site") def test_build_env_isolation(script: PipTestEnvironment) -> None: # Create dummy `pkg` wheel. @@ -231,8 +257,7 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None: run_with_build_env( script, "", - r""" - from distutils.sysconfig import get_python_lib + f""" import sys try: @@ -240,17 +265,9 @@ def test_build_env_isolation(script: PipTestEnvironment) -> None: except ImportError: pass else: - print( - f'imported `pkg` from `{pkg.__file__}`', - file=sys.stderr) - print('system sites:\n ' + '\n '.join(sorted({ - get_python_lib(plat_specific=0), - get_python_lib(plat_specific=1), - })), file=sys.stderr) - print('sys.path:\n ' + '\n '.join(sys.path), file=sys.stderr) + {BUILD_ENV_ERROR_DEBUG_CODE} + print('sys.path:\\n ' + '\\n '.join(sys.path), file=sys.stderr) sys.exit(1) - """ - f""" # second check: direct check of exclusion of system site packages import os From 60d3c0447ef6ea659f6f4666758e50c0ba99fb69 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 17:04:09 +0800 Subject: [PATCH 566/730] Skip tests that do not make sense on Python 3.12+ --- tests/functional/test_install.py | 20 +++++++++++++++----- tests/functional/test_uninstall.py | 20 ++++++++++++++++++++ 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 8559d93684b..eabddfe58fa 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -848,14 +848,18 @@ def test_editable_install__local_dir_no_setup_py( ) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network -def test_editable_install__local_dir_no_setup_py_with_pyproject( +def test_editable_install_legacy__local_dir_no_setup_py_with_pyproject( script: PipTestEnvironment, ) -> None: """ - Test installing in editable mode from a local directory with no setup.py - but that does have pyproject.toml with a build backend that does not support - the build_editable hook. + Test installing in legacy editable mode from a local directory with no + setup.py but that does have pyproject.toml with a build backend that does + not support the build_editable hook. """ local_dir = script.scratch_path.joinpath("temp") local_dir.mkdir() @@ -1383,8 +1387,14 @@ def test_install_editable_with_prefix_setup_py(script: PipTestEnvironment) -> No _test_install_editable_with_prefix(script, {"setup.py": setup_py}) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network -def test_install_editable_with_prefix_setup_cfg(script: PipTestEnvironment) -> None: +def test_install_editable_legacy_with_prefix_setup_cfg( + script: PipTestEnvironment, +) -> None: setup_cfg = """[metadata] name = pkga version = 0.1 diff --git a/tests/functional/test_uninstall.py b/tests/functional/test_uninstall.py index 87e7157497c..be7fe4c3341 100644 --- a/tests/functional/test_uninstall.py +++ b/tests/functional/test_uninstall.py @@ -37,6 +37,10 @@ def test_basic_uninstall(script: PipTestEnvironment) -> None: assert_all_changes(result, result2, [script.venv / "build", "cache"]) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="distutils is no longer available in Python 3.12+", +) def test_basic_uninstall_distutils(script: PipTestEnvironment) -> None: """ Test basic install and uninstall. @@ -68,6 +72,10 @@ def test_basic_uninstall_distutils(script: PipTestEnvironment) -> None: ) in result.stderr +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_basic_uninstall_with_scripts(script: PipTestEnvironment) -> None: """ @@ -101,6 +109,10 @@ def test_uninstall_invalid_parameter( assert expected_message in result.stderr +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_uninstall_easy_install_after_import(script: PipTestEnvironment) -> None: """ @@ -126,6 +138,10 @@ def test_uninstall_easy_install_after_import(script: PipTestEnvironment) -> None ) +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_uninstall_trailing_newline(script: PipTestEnvironment) -> None: """ @@ -337,6 +353,10 @@ def test_uninstall_console_scripts_uppercase_name(script: PipTestEnvironment) -> assert not script_name.exists() +@pytest.mark.skipif( + sys.version_info >= (3, 12), + reason="Setuptools<64 does not support Python 3.12+", +) @pytest.mark.network def test_uninstall_easy_installed_console_scripts(script: PipTestEnvironment) -> None: """ From 37640292a17e40fc980988f7cc01b4bb544fe477 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 17:13:39 +0800 Subject: [PATCH 567/730] Add CPython 3.12 to documentaiton --- docs/html/installation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/html/installation.md b/docs/html/installation.md index 036a91397a5..c61fceaed2b 100644 --- a/docs/html/installation.md +++ b/docs/html/installation.md @@ -103,7 +103,7 @@ $ pip install --upgrade pip The current version of pip works on: - Windows, Linux and MacOS. -- CPython 3.7, 3.8, 3.9, 3.10 and latest PyPy3. +- CPython 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, and latest PyPy3. pip is tested to work on the latest patch version of the Python interpreter, for each of the minor versions listed above. Previous patch versions are From e29dc1cb0a775b761d71ec659cc49f99bd6c23fb Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 4 Jul 2023 17:06:04 +0800 Subject: [PATCH 568/730] Add Python 3.12 to CI --- .github/workflows/ci.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1361980565d..c9a2ff6659a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -91,7 +91,7 @@ jobs: - run: git diff --exit-code tests-unix: - name: tests / ${{ matrix.python }} / ${{ matrix.os }} + name: tests / ${{ matrix.python.key || matrix.python }} / ${{ matrix.os }} runs-on: ${{ matrix.os }}-latest needs: [packaging, determine-changes] @@ -109,12 +109,14 @@ jobs: - "3.9" - "3.10" - "3.11" + - key: "3.12" + full: "3.12.0-beta.3" steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: - python-version: ${{ matrix.python }} + python-version: ${{ matrix.python.full || matrix.python }} - name: Install Ubuntu dependencies if: matrix.os == 'Ubuntu' @@ -129,12 +131,12 @@ jobs: # Main check - name: Run unit tests run: >- - nox -s test-${{ matrix.python }} -- + nox -s test-${{ matrix.python.key || matrix.python }} -- -m unit --verbose --numprocesses auto --showlocals - name: Run integration tests run: >- - nox -s test-${{ matrix.python }} -- + nox -s test-${{ matrix.python.key || matrix.python }} -- -m integration --verbose --numprocesses auto --showlocals --durations=5 From 5dc65eabb75f89d4f4749b6c764042c227f6870a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A0=D0=BE=D0=BC=D0=B0=D0=BD=20=D0=94=D0=BE=D0=BD=D1=87?= =?UTF-8?q?=D0=B5=D0=BD=D0=BA=D0=BE?= Date: Mon, 15 May 2023 22:42:51 +0400 Subject: [PATCH 569/730] Don't exclude setuptools, distribute & wheel from freeze output on Python 3.12+ Due to the advent of build isolation, it is no longer necessary to install setuptools and wheel in an environment just to install other packages. Moreover, on Python 3.12 both ensurepip [1] and virtualenv [2] are to stop installing setuptools & wheel by default. This means that when those packages are present in a Python 3.12+ environment, it is reasonable to assume that they are runtime dependencies of the user's project, and therefore should be included in freeze output. distribute is just obsolete. [1] https://github.com/python/cpython/issues/95299 [2] https://github.com/pypa/virtualenv/pull/2558 --- news/4256.removal.rst | 3 +++ src/pip/_internal/commands/freeze.py | 5 ++++- tests/functional/test_freeze.py | 20 +++++++++++++++++++- 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 news/4256.removal.rst diff --git a/news/4256.removal.rst b/news/4256.removal.rst new file mode 100644 index 00000000000..5440f532add --- /dev/null +++ b/news/4256.removal.rst @@ -0,0 +1,3 @@ +``freeze`` no longer excludes the ``setuptools``, ``distribute`` and ``wheel`` +packages from the output by default when running on Python 3.12 or later. +Use ``--exclude`` if you wish to exclude any of these packages. diff --git a/src/pip/_internal/commands/freeze.py b/src/pip/_internal/commands/freeze.py index 5fa6d39b2c7..87f281d76fb 100644 --- a/src/pip/_internal/commands/freeze.py +++ b/src/pip/_internal/commands/freeze.py @@ -8,7 +8,10 @@ from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs -DEV_PKGS = {"pip", "setuptools", "distribute", "wheel"} +DEV_PKGS = {"pip"} + +if sys.version_info < (3, 12): + DEV_PKGS |= {"setuptools", "distribute", "wheel"} class FreezeCommand(Command): diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index b24b27edcc6..81a660ab6f4 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -88,11 +88,29 @@ def test_basic_freeze(script: PipTestEnvironment) -> None: def test_freeze_with_pip(script: PipTestEnvironment) -> None: - """Test pip shows itself""" + """Test that pip shows itself only when --all is used""" + result = script.pip("freeze") + assert "pip==" not in result.stdout result = script.pip("freeze", "--all") assert "pip==" in result.stdout +def test_freeze_with_setuptools(script: PipTestEnvironment) -> None: + """ + Test that pip shows setuptools only when --all is used + or Python version is >=3.12 + """ + + result = script.pip("freeze") + if sys.version_info >= (3, 12): + assert "setuptools==" in result.stdout + else: + assert "setuptools==" not in result.stdout + + result = script.pip("freeze", "--all") + assert "setuptools==" in result.stdout + + def test_exclude_and_normalization(script: PipTestEnvironment, tmpdir: Path) -> None: req_path = wheel.make_wheel(name="Normalizable_Name", version="1.0").save_to_dir( tmpdir From 393ccfbc31eccdf7f053ee4d62b055e515ef3183 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A0=D0=BE=D0=BC=D0=B0=D0=BD=20=D0=94=D0=BE=D0=BD=D1=87?= =?UTF-8?q?=D0=B5=D0=BD=D0=BA=D0=BE?= Date: Mon, 29 May 2023 02:23:01 +0400 Subject: [PATCH 570/730] test_freeze_with_setuptools: use mocks This makes it possible to test both branches on any Python version. --- src/pip/_internal/commands/freeze.py | 20 +++++++++++----- tests/functional/test_freeze.py | 34 ++++++++++++++++++++++------ 2 files changed, 41 insertions(+), 13 deletions(-) diff --git a/src/pip/_internal/commands/freeze.py b/src/pip/_internal/commands/freeze.py index 87f281d76fb..fd9d88a8b01 100644 --- a/src/pip/_internal/commands/freeze.py +++ b/src/pip/_internal/commands/freeze.py @@ -1,6 +1,6 @@ import sys from optparse import Values -from typing import List +from typing import AbstractSet, List from pip._internal.cli import cmdoptions from pip._internal.cli.base_command import Command @@ -8,10 +8,18 @@ from pip._internal.operations.freeze import freeze from pip._internal.utils.compat import stdlib_pkgs -DEV_PKGS = {"pip"} -if sys.version_info < (3, 12): - DEV_PKGS |= {"setuptools", "distribute", "wheel"} +def _should_suppress_build_backends() -> bool: + return sys.version_info < (3, 12) + + +def _dev_pkgs() -> AbstractSet[str]: + pkgs = {"pip"} + + if _should_suppress_build_backends(): + pkgs |= {"setuptools", "distribute", "wheel"} + + return pkgs class FreezeCommand(Command): @@ -64,7 +72,7 @@ def add_options(self) -> None: action="store_true", help=( "Do not skip these packages in the output:" - " {}".format(", ".join(DEV_PKGS)) + " {}".format(", ".join(_dev_pkgs())) ), ) self.cmd_opts.add_option( @@ -80,7 +88,7 @@ def add_options(self) -> None: def run(self, options: Values, args: List[str]) -> int: skip = set(stdlib_pkgs) if not options.freeze_all: - skip.update(DEV_PKGS) + skip.update(_dev_pkgs()) if options.excludes: skip.update(options.excludes) diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index 81a660ab6f4..d6122308a69 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -98,18 +98,38 @@ def test_freeze_with_pip(script: PipTestEnvironment) -> None: def test_freeze_with_setuptools(script: PipTestEnvironment) -> None: """ Test that pip shows setuptools only when --all is used - or Python version is >=3.12 + or _should_suppress_build_backends() returns false """ - result = script.pip("freeze") - if sys.version_info >= (3, 12): - assert "setuptools==" in result.stdout - else: - assert "setuptools==" not in result.stdout - result = script.pip("freeze", "--all") assert "setuptools==" in result.stdout + (script.site_packages_path / "mock.pth").write_text("import mock\n") + + (script.site_packages_path / "mock.py").write_text( + textwrap.dedent( + """\ + import pip._internal.commands.freeze as freeze + freeze._should_suppress_build_backends = lambda: False + """ + ) + ) + + result = script.pip("freeze") + assert "setuptools==" in result.stdout + + (script.site_packages_path / "mock.py").write_text( + textwrap.dedent( + """\ + import pip._internal.commands.freeze as freeze + freeze._should_suppress_build_backends = lambda: True + """ + ) + ) + + result = script.pip("freeze") + assert "setuptools==" not in result.stdout + def test_exclude_and_normalization(script: PipTestEnvironment, tmpdir: Path) -> None: req_path = wheel.make_wheel(name="Normalizable_Name", version="1.0").save_to_dir( From 7a69c00720fb8e660ef0d1df174b79e039bdba95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A0=D0=BE=D0=BC=D0=B0=D0=BD=20=D0=94=D0=BE=D0=BD=D1=87?= =?UTF-8?q?=D0=B5=D0=BD=D0=BA=D0=BE?= Date: Fri, 7 Jul 2023 01:48:03 +0300 Subject: [PATCH 571/730] Make the changelog entry more verbose Co-authored-by: Tzu-ping Chung --- news/4256.removal.rst | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/news/4256.removal.rst b/news/4256.removal.rst index 5440f532add..eb89898501b 100644 --- a/news/4256.removal.rst +++ b/news/4256.removal.rst @@ -1,3 +1,4 @@ -``freeze`` no longer excludes the ``setuptools``, ``distribute`` and ``wheel`` -packages from the output by default when running on Python 3.12 or later. -Use ``--exclude`` if you wish to exclude any of these packages. +``freeze`` no longer excludes the ``setuptools``, ``distribute``, and ``wheel`` +from the output when running on Python 3.12 or later, where they are not +included in a virtual environment by default. Use ``--exclude`` if you wish to +exclude any of these packages. From 856c7ec27e8c1dc58a32191c1bcbbc151727a893 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 9 Jul 2023 21:30:08 +0100 Subject: [PATCH 572/730] Upgrade platformdirs to 3.8.1 --- news/platformdirs.vendor.rst | 1 + src/pip/_vendor/platformdirs/__init__.py | 143 +++++++++++++++-------- src/pip/_vendor/platformdirs/__main__.py | 24 ++-- src/pip/_vendor/platformdirs/android.py | 112 +++++++++++++++--- src/pip/_vendor/platformdirs/api.py | 70 ++++++++--- src/pip/_vendor/platformdirs/macos.py | 33 +++++- src/pip/_vendor/platformdirs/unix.py | 97 +++++++++------ src/pip/_vendor/platformdirs/version.py | 4 +- src/pip/_vendor/platformdirs/windows.py | 104 +++++++++++++---- src/pip/_vendor/vendor.txt | 2 +- 10 files changed, 441 insertions(+), 149 deletions(-) create mode 100644 news/platformdirs.vendor.rst diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst new file mode 100644 index 00000000000..f396d84a666 --- /dev/null +++ b/news/platformdirs.vendor.rst @@ -0,0 +1 @@ +Upgrade platformdirs to 3.8.1 diff --git a/src/pip/_vendor/platformdirs/__init__.py b/src/pip/_vendor/platformdirs/__init__.py index c46a145cdc1..5ebf5957b46 100644 --- a/src/pip/_vendor/platformdirs/__init__.py +++ b/src/pip/_vendor/platformdirs/__init__.py @@ -6,17 +6,20 @@ import os import sys -from pathlib import Path - -if sys.version_info >= (3, 8): # pragma: no cover (py38+) - from typing import Literal -else: # pragma: no cover (py38+) - from pip._vendor.typing_extensions import Literal +from typing import TYPE_CHECKING from .api import PlatformDirsABC from .version import __version__ from .version import __version_tuple__ as __version_info__ +if TYPE_CHECKING: + from pathlib import Path + + if sys.version_info >= (3, 8): # pragma: no cover (py38+) + from typing import Literal + else: # pragma: no cover (py38+) + from pip._vendor.typing_extensions import Literal + def _set_platform_dir_class() -> type[PlatformDirsABC]: if sys.platform == "win32": @@ -48,8 +51,8 @@ def user_data_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -72,8 +75,8 @@ def site_data_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -96,8 +99,8 @@ def user_config_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -120,8 +123,8 @@ def site_config_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -144,8 +147,8 @@ def user_cache_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -168,8 +171,8 @@ def site_cache_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -192,8 +195,8 @@ def user_state_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -216,8 +219,8 @@ def user_log_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -237,18 +240,36 @@ def user_log_dir( def user_documents_dir() -> str: - """ - :returns: documents directory tied to the user - """ + """:returns: documents directory tied to the user""" return PlatformDirs().user_documents_dir +def user_downloads_dir() -> str: + """:returns: downloads directory tied to the user""" + return PlatformDirs().user_downloads_dir + + +def user_pictures_dir() -> str: + """:returns: pictures directory tied to the user""" + return PlatformDirs().user_pictures_dir + + +def user_videos_dir() -> str: + """:returns: videos directory tied to the user""" + return PlatformDirs().user_videos_dir + + +def user_music_dir() -> str: + """:returns: music directory tied to the user""" + return PlatformDirs().user_music_dir + + def user_runtime_dir( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> str: """ :param appname: See `appname `. @@ -271,8 +292,8 @@ def user_data_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -295,8 +316,8 @@ def site_data_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -319,8 +340,8 @@ def user_config_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -343,8 +364,8 @@ def site_config_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - multipath: bool = False, - ensure_exists: bool = False, + multipath: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -367,8 +388,8 @@ def site_cache_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -391,8 +412,8 @@ def user_cache_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -415,8 +436,8 @@ def user_state_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - ensure_exists: bool = False, + roaming: bool = False, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -439,8 +460,8 @@ def user_log_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -460,18 +481,36 @@ def user_log_path( def user_documents_path() -> Path: - """ - :returns: documents path tied to the user - """ + """:returns: documents path tied to the user""" return PlatformDirs().user_documents_path +def user_downloads_path() -> Path: + """:returns: downloads path tied to the user""" + return PlatformDirs().user_downloads_path + + +def user_pictures_path() -> Path: + """:returns: pictures path tied to the user""" + return PlatformDirs().user_pictures_path + + +def user_videos_path() -> Path: + """:returns: videos path tied to the user""" + return PlatformDirs().user_videos_path + + +def user_music_path() -> Path: + """:returns: music path tied to the user""" + return PlatformDirs().user_music_path + + def user_runtime_path( appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - opinion: bool = True, - ensure_exists: bool = False, + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 ) -> Path: """ :param appname: See `appname `. @@ -502,6 +541,10 @@ def user_runtime_path( "user_state_dir", "user_log_dir", "user_documents_dir", + "user_downloads_dir", + "user_pictures_dir", + "user_videos_dir", + "user_music_dir", "user_runtime_dir", "site_data_dir", "site_config_dir", @@ -512,6 +555,10 @@ def user_runtime_path( "user_state_path", "user_log_path", "user_documents_path", + "user_downloads_path", + "user_pictures_path", + "user_videos_path", + "user_music_path", "user_runtime_path", "site_data_path", "site_config_path", diff --git a/src/pip/_vendor/platformdirs/__main__.py b/src/pip/_vendor/platformdirs/__main__.py index 7171f13114e..6a0d6dd12e3 100644 --- a/src/pip/_vendor/platformdirs/__main__.py +++ b/src/pip/_vendor/platformdirs/__main__.py @@ -1,3 +1,4 @@ +"""Main entry point.""" from __future__ import annotations from pip._vendor.platformdirs import PlatformDirs, __version__ @@ -9,6 +10,10 @@ "user_state_dir", "user_log_dir", "user_documents_dir", + "user_downloads_dir", + "user_pictures_dir", + "user_videos_dir", + "user_music_dir", "user_runtime_dir", "site_data_dir", "site_config_dir", @@ -17,30 +22,31 @@ def main() -> None: + """Run main entry point.""" app_name = "MyApp" app_author = "MyCompany" - print(f"-- platformdirs {__version__} --") + print(f"-- platformdirs {__version__} --") # noqa: T201 - print("-- app dirs (with optional 'version')") + print("-- app dirs (with optional 'version')") # noqa: T201 dirs = PlatformDirs(app_name, app_author, version="1.0") for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 - print("\n-- app dirs (without optional 'version')") + print("\n-- app dirs (without optional 'version')") # noqa: T201 dirs = PlatformDirs(app_name, app_author) for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 - print("\n-- app dirs (without optional 'appauthor')") + print("\n-- app dirs (without optional 'appauthor')") # noqa: T201 dirs = PlatformDirs(app_name) for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 - print("\n-- app dirs (with disabled 'appauthor')") + print("\n-- app dirs (with disabled 'appauthor')") # noqa: T201 dirs = PlatformDirs(app_name, appauthor=False) for prop in PROPS: - print(f"{prop}: {getattr(dirs, prop)}") + print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201 if __name__ == "__main__": diff --git a/src/pip/_vendor/platformdirs/android.py b/src/pip/_vendor/platformdirs/android.py index f6de7451b25..76527dda41f 100644 --- a/src/pip/_vendor/platformdirs/android.py +++ b/src/pip/_vendor/platformdirs/android.py @@ -1,3 +1,4 @@ +"""Android.""" from __future__ import annotations import os @@ -30,7 +31,8 @@ def site_data_dir(self) -> str: @property def user_config_dir(self) -> str: """ - :return: config directory tied to the user, e.g. ``/data/user///shared_prefs/`` + :return: config directory tied to the user, e.g. \ + ``/data/user///shared_prefs/`` """ return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs") @@ -62,16 +64,34 @@ def user_log_dir(self) -> str: """ path = self.user_cache_dir if self.opinion: - path = os.path.join(path, "log") + path = os.path.join(path, "log") # noqa: PTH118 return path @property def user_documents_dir(self) -> str: - """ - :return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents`` - """ + """:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``""" return _android_documents_folder() + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user e.g. ``/storage/emulated/0/Downloads``""" + return _android_downloads_folder() + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user e.g. ``/storage/emulated/0/Pictures``""" + return _android_pictures_folder() + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user e.g. ``/storage/emulated/0/DCIM/Camera``""" + return _android_videos_folder() + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user e.g. ``/storage/emulated/0/Music``""" + return _android_music_folder() + @property def user_runtime_dir(self) -> str: """ @@ -80,20 +100,20 @@ def user_runtime_dir(self) -> str: """ path = self.user_cache_dir if self.opinion: - path = os.path.join(path, "tmp") + path = os.path.join(path, "tmp") # noqa: PTH118 return path @lru_cache(maxsize=1) def _android_folder() -> str | None: - """:return: base folder for the Android OS or None if cannot be found""" + """:return: base folder for the Android OS or None if it cannot be found""" try: # First try to get path to android app via pyjnius from jnius import autoclass - Context = autoclass("android.content.Context") # noqa: N806 - result: str | None = Context.getFilesDir().getParentFile().getAbsolutePath() - except Exception: + context = autoclass("android.content.Context") + result: str | None = context.getFilesDir().getParentFile().getAbsolutePath() + except Exception: # noqa: BLE001 # if fails find an android folder looking path on the sys.path pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files") for path in sys.path: @@ -112,15 +132,79 @@ def _android_documents_folder() -> str: try: from jnius import autoclass - Context = autoclass("android.content.Context") # noqa: N806 - Environment = autoclass("android.os.Environment") # noqa: N806 - documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath() - except Exception: + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + documents_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOCUMENTS).getAbsolutePath() + except Exception: # noqa: BLE001 documents_dir = "/storage/emulated/0/Documents" return documents_dir +@lru_cache(maxsize=1) +def _android_downloads_folder() -> str: + """:return: downloads folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + downloads_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DOWNLOADS).getAbsolutePath() + except Exception: # noqa: BLE001 + downloads_dir = "/storage/emulated/0/Downloads" + + return downloads_dir + + +@lru_cache(maxsize=1) +def _android_pictures_folder() -> str: + """:return: pictures folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + pictures_dir: str = context.getExternalFilesDir(environment.DIRECTORY_PICTURES).getAbsolutePath() + except Exception: # noqa: BLE001 + pictures_dir = "/storage/emulated/0/Pictures" + + return pictures_dir + + +@lru_cache(maxsize=1) +def _android_videos_folder() -> str: + """:return: videos folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + videos_dir: str = context.getExternalFilesDir(environment.DIRECTORY_DCIM).getAbsolutePath() + except Exception: # noqa: BLE001 + videos_dir = "/storage/emulated/0/DCIM/Camera" + + return videos_dir + + +@lru_cache(maxsize=1) +def _android_music_folder() -> str: + """:return: music folder for the Android OS""" + # Get directories with pyjnius + try: + from jnius import autoclass + + context = autoclass("android.content.Context") + environment = autoclass("android.os.Environment") + music_dir: str = context.getExternalFilesDir(environment.DIRECTORY_MUSIC).getAbsolutePath() + except Exception: # noqa: BLE001 + music_dir = "/storage/emulated/0/Music" + + return music_dir + + __all__ = [ "Android", ] diff --git a/src/pip/_vendor/platformdirs/api.py b/src/pip/_vendor/platformdirs/api.py index f140e8b6db8..d64ebb9d45c 100644 --- a/src/pip/_vendor/platformdirs/api.py +++ b/src/pip/_vendor/platformdirs/api.py @@ -1,29 +1,33 @@ +"""Base API.""" from __future__ import annotations import os -import sys from abc import ABC, abstractmethod from pathlib import Path +from typing import TYPE_CHECKING -if sys.version_info >= (3, 8): # pragma: no branch - from typing import Literal # pragma: no cover +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 8): # pragma: no cover (py38+) + from typing import Literal + else: # pragma: no cover (py38+) + from pip._vendor.typing_extensions import Literal class PlatformDirsABC(ABC): - """ - Abstract base class for platform directories. - """ + """Abstract base class for platform directories.""" - def __init__( + def __init__( # noqa: PLR0913 self, appname: str | None = None, appauthor: str | None | Literal[False] = None, version: str | None = None, - roaming: bool = False, - multipath: bool = False, - opinion: bool = True, - ensure_exists: bool = False, - ): + roaming: bool = False, # noqa: FBT001, FBT002 + multipath: bool = False, # noqa: FBT001, FBT002 + opinion: bool = True, # noqa: FBT001, FBT002 + ensure_exists: bool = False, # noqa: FBT001, FBT002 + ) -> None: """ Create a new platform directory. @@ -70,7 +74,7 @@ def _append_app_name_and_version(self, *base: str) -> str: params.append(self.appname) if self.version: params.append(self.version) - path = os.path.join(base[0], *params) + path = os.path.join(base[0], *params) # noqa: PTH118 self._optionally_create_directory(path) return path @@ -123,6 +127,26 @@ def user_log_dir(self) -> str: def user_documents_dir(self) -> str: """:return: documents directory tied to the user""" + @property + @abstractmethod + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user""" + + @property + @abstractmethod + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user""" + + @property + @abstractmethod + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user""" + + @property + @abstractmethod + def user_music_dir(self) -> str: + """:return: music directory tied to the user""" + @property @abstractmethod def user_runtime_dir(self) -> str: @@ -173,6 +197,26 @@ def user_documents_path(self) -> Path: """:return: documents path tied to the user""" return Path(self.user_documents_dir) + @property + def user_downloads_path(self) -> Path: + """:return: downloads path tied to the user""" + return Path(self.user_downloads_dir) + + @property + def user_pictures_path(self) -> Path: + """:return: pictures path tied to the user""" + return Path(self.user_pictures_dir) + + @property + def user_videos_path(self) -> Path: + """:return: videos path tied to the user""" + return Path(self.user_videos_dir) + + @property + def user_music_path(self) -> Path: + """:return: music path tied to the user""" + return Path(self.user_music_dir) + @property def user_runtime_path(self) -> Path: """:return: runtime path tied to the user""" diff --git a/src/pip/_vendor/platformdirs/macos.py b/src/pip/_vendor/platformdirs/macos.py index ec9751129c1..a753e2a3aa2 100644 --- a/src/pip/_vendor/platformdirs/macos.py +++ b/src/pip/_vendor/platformdirs/macos.py @@ -1,6 +1,7 @@ +"""macOS.""" from __future__ import annotations -import os +import os.path from .api import PlatformDirsABC @@ -17,7 +18,7 @@ class MacOS(PlatformDirsABC): @property def user_data_dir(self) -> str: """:return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support")) # noqa: PTH111 @property def site_data_dir(self) -> str: @@ -37,7 +38,7 @@ def site_config_dir(self) -> str: @property def user_cache_dir(self) -> str: """:return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches")) # noqa: PTH111 @property def site_cache_dir(self) -> str: @@ -52,17 +53,37 @@ def user_state_dir(self) -> str: @property def user_log_dir(self) -> str: """:return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs")) # noqa: PTH111 @property def user_documents_dir(self) -> str: """:return: documents directory tied to the user, e.g. ``~/Documents``""" - return os.path.expanduser("~/Documents") + return os.path.expanduser("~/Documents") # noqa: PTH111 + + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user, e.g. ``~/Downloads``""" + return os.path.expanduser("~/Downloads") # noqa: PTH111 + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user, e.g. ``~/Pictures``""" + return os.path.expanduser("~/Pictures") # noqa: PTH111 + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user, e.g. ``~/Movies``""" + return os.path.expanduser("~/Movies") # noqa: PTH111 + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user, e.g. ``~/Music``""" + return os.path.expanduser("~/Music") # noqa: PTH111 @property def user_runtime_dir(self) -> str: """:return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``""" - return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems")) + return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems")) # noqa: PTH111 __all__ = [ diff --git a/src/pip/_vendor/platformdirs/unix.py b/src/pip/_vendor/platformdirs/unix.py index 17d355da9f4..468b0ab4957 100644 --- a/src/pip/_vendor/platformdirs/unix.py +++ b/src/pip/_vendor/platformdirs/unix.py @@ -1,3 +1,4 @@ +"""Unix.""" from __future__ import annotations import os @@ -7,12 +8,14 @@ from .api import PlatformDirsABC -if sys.platform.startswith("linux"): # pragma: no branch # no op check, only to please the type checker - from os import getuid -else: +if sys.platform == "win32": def getuid() -> int: - raise RuntimeError("should only be used on Linux") + msg = "should only be used on Unix" + raise RuntimeError(msg) + +else: + from os import getuid class Unix(PlatformDirsABC): @@ -36,7 +39,7 @@ def user_data_dir(self) -> str: """ path = os.environ.get("XDG_DATA_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.local/share") + path = os.path.expanduser("~/.local/share") # noqa: PTH111 return self._append_app_name_and_version(path) @property @@ -56,7 +59,7 @@ def _with_multi_path(self, path: str) -> str: path_list = path.split(os.pathsep) if not self.multipath: path_list = path_list[0:1] - path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list] + path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list] # noqa: PTH111 return os.pathsep.join(path_list) @property @@ -67,7 +70,7 @@ def user_config_dir(self) -> str: """ path = os.environ.get("XDG_CONFIG_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.config") + path = os.path.expanduser("~/.config") # noqa: PTH111 return self._append_app_name_and_version(path) @property @@ -91,15 +94,13 @@ def user_cache_dir(self) -> str: """ path = os.environ.get("XDG_CACHE_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.cache") + path = os.path.expanduser("~/.cache") # noqa: PTH111 return self._append_app_name_and_version(path) @property def site_cache_dir(self) -> str: - """ - :return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version`` - """ - return self._append_app_name_and_version("/var/tmp") + """:return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version``""" + return self._append_app_name_and_version("/var/tmp") # noqa: S108 @property def user_state_dir(self) -> str: @@ -109,41 +110,60 @@ def user_state_dir(self) -> str: """ path = os.environ.get("XDG_STATE_HOME", "") if not path.strip(): - path = os.path.expanduser("~/.local/state") + path = os.path.expanduser("~/.local/state") # noqa: PTH111 return self._append_app_name_and_version(path) @property def user_log_dir(self) -> str: - """ - :return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it - """ + """:return: log directory tied to the user, same as `user_state_dir` if not opinionated else ``log`` in it""" path = self.user_state_dir if self.opinion: - path = os.path.join(path, "log") + path = os.path.join(path, "log") # noqa: PTH118 return path @property def user_documents_dir(self) -> str: - """ - :return: documents directory tied to the user, e.g. ``~/Documents`` - """ - documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR") - if documents_dir is None: - documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip() - if not documents_dir: - documents_dir = os.path.expanduser("~/Documents") + """:return: documents directory tied to the user, e.g. ``~/Documents``""" + return _get_user_media_dir("XDG_DOCUMENTS_DIR", "~/Documents") - return documents_dir + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user, e.g. ``~/Downloads``""" + return _get_user_media_dir("XDG_DOWNLOAD_DIR", "~/Downloads") + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user, e.g. ``~/Pictures``""" + return _get_user_media_dir("XDG_PICTURES_DIR", "~/Pictures") + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user, e.g. ``~/Videos``""" + return _get_user_media_dir("XDG_VIDEOS_DIR", "~/Videos") + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user, e.g. ``~/Music``""" + return _get_user_media_dir("XDG_MUSIC_DIR", "~/Music") @property def user_runtime_dir(self) -> str: """ :return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or - ``$XDG_RUNTIME_DIR/$appname/$version`` + ``$XDG_RUNTIME_DIR/$appname/$version``. + + For FreeBSD/OpenBSD/NetBSD, it would return ``/var/run/user/$(id -u)/$appname/$version`` if + exists, otherwise ``/tmp/runtime-$(id -u)/$appname/$version``, if``$XDG_RUNTIME_DIR`` + is not set. """ path = os.environ.get("XDG_RUNTIME_DIR", "") if not path.strip(): - path = f"/run/user/{getuid()}" + if sys.platform.startswith(("freebsd", "openbsd", "netbsd")): + path = f"/var/run/user/{getuid()}" + if not Path(path).exists(): + path = f"/tmp/runtime-{getuid()}" # noqa: S108 + else: + path = f"/run/user/{getuid()}" return self._append_app_name_and_version(path) @property @@ -168,13 +188,23 @@ def _first_item_as_path_if_multipath(self, directory: str) -> Path: return Path(directory) +def _get_user_media_dir(env_var: str, fallback_tilde_path: str) -> str: + media_dir = _get_user_dirs_folder(env_var) + if media_dir is None: + media_dir = os.environ.get(env_var, "").strip() + if not media_dir: + media_dir = os.path.expanduser(fallback_tilde_path) # noqa: PTH111 + + return media_dir + + def _get_user_dirs_folder(key: str) -> str | None: - """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/""" - user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs") - if os.path.exists(user_dirs_config_path): + """Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/.""" + user_dirs_config_path = Path(Unix().user_config_dir) / "user-dirs.dirs" + if user_dirs_config_path.exists(): parser = ConfigParser() - with open(user_dirs_config_path) as stream: + with user_dirs_config_path.open() as stream: # Add fake section header, so ConfigParser doesn't complain parser.read_string(f"[top]\n{stream.read()}") @@ -183,8 +213,7 @@ def _get_user_dirs_folder(key: str) -> str | None: path = parser["top"][key].strip('"') # Handle relative home paths - path = path.replace("$HOME", os.path.expanduser("~")) - return path + return path.replace("$HOME", os.path.expanduser("~")) # noqa: PTH111 return None diff --git a/src/pip/_vendor/platformdirs/version.py b/src/pip/_vendor/platformdirs/version.py index d906a2c99e6..dc8c44cf7b2 100644 --- a/src/pip/_vendor/platformdirs/version.py +++ b/src/pip/_vendor/platformdirs/version.py @@ -1,4 +1,4 @@ # file generated by setuptools_scm # don't change, don't track in version control -__version__ = version = '3.2.0' -__version_tuple__ = version_tuple = (3, 2, 0) +__version__ = version = '3.8.1' +__version_tuple__ = version_tuple = (3, 8, 1) diff --git a/src/pip/_vendor/platformdirs/windows.py b/src/pip/_vendor/platformdirs/windows.py index e7573c3d6ae..b52c9c6ea89 100644 --- a/src/pip/_vendor/platformdirs/windows.py +++ b/src/pip/_vendor/platformdirs/windows.py @@ -1,16 +1,21 @@ +"""Windows.""" from __future__ import annotations import ctypes import os import sys from functools import lru_cache -from typing import Callable +from typing import TYPE_CHECKING from .api import PlatformDirsABC +if TYPE_CHECKING: + from collections.abc import Callable + class Windows(PlatformDirsABC): - """`MSDN on where to store app data files + """ + `MSDN on where to store app data files `_. Makes use of the `appname `, @@ -43,7 +48,7 @@ def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str: params.append(opinion_value) if self.version: params.append(self.version) - path = os.path.join(path, *params) + path = os.path.join(path, *params) # noqa: PTH118 self._optionally_create_directory(path) return path @@ -85,36 +90,53 @@ def user_state_dir(self) -> str: @property def user_log_dir(self) -> str: - """ - :return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it - """ + """:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it""" path = self.user_data_dir if self.opinion: - path = os.path.join(path, "Logs") + path = os.path.join(path, "Logs") # noqa: PTH118 self._optionally_create_directory(path) return path @property def user_documents_dir(self) -> str: - """ - :return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents`` - """ + """:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``""" return os.path.normpath(get_win_folder("CSIDL_PERSONAL")) + @property + def user_downloads_dir(self) -> str: + """:return: downloads directory tied to the user e.g. ``%USERPROFILE%\\Downloads``""" + return os.path.normpath(get_win_folder("CSIDL_DOWNLOADS")) + + @property + def user_pictures_dir(self) -> str: + """:return: pictures directory tied to the user e.g. ``%USERPROFILE%\\Pictures``""" + return os.path.normpath(get_win_folder("CSIDL_MYPICTURES")) + + @property + def user_videos_dir(self) -> str: + """:return: videos directory tied to the user e.g. ``%USERPROFILE%\\Videos``""" + return os.path.normpath(get_win_folder("CSIDL_MYVIDEO")) + + @property + def user_music_dir(self) -> str: + """:return: music directory tied to the user e.g. ``%USERPROFILE%\\Music``""" + return os.path.normpath(get_win_folder("CSIDL_MYMUSIC")) + @property def user_runtime_dir(self) -> str: """ :return: runtime directory tied to the user, e.g. ``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname`` """ - path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp")) + path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp")) # noqa: PTH118 return self._append_parts(path) def get_win_folder_from_env_vars(csidl_name: str) -> str: """Get folder from environment variables.""" - if csidl_name == "CSIDL_PERSONAL": # does not have an environment name - return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") + result = get_win_folder_if_csidl_name_not_env_var(csidl_name) + if result is not None: + return result env_var_name = { "CSIDL_APPDATA": "APPDATA", @@ -122,28 +144,54 @@ def get_win_folder_from_env_vars(csidl_name: str) -> str: "CSIDL_LOCAL_APPDATA": "LOCALAPPDATA", }.get(csidl_name) if env_var_name is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") + msg = f"Unknown CSIDL name: {csidl_name}" + raise ValueError(msg) result = os.environ.get(env_var_name) if result is None: - raise ValueError(f"Unset environment variable: {env_var_name}") + msg = f"Unset environment variable: {env_var_name}" + raise ValueError(msg) return result +def get_win_folder_if_csidl_name_not_env_var(csidl_name: str) -> str | None: + """Get folder for a CSIDL name that does not exist as an environment variable.""" + if csidl_name == "CSIDL_PERSONAL": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents") # noqa: PTH118 + + if csidl_name == "CSIDL_DOWNLOADS": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Downloads") # noqa: PTH118 + + if csidl_name == "CSIDL_MYPICTURES": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Pictures") # noqa: PTH118 + + if csidl_name == "CSIDL_MYVIDEO": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Videos") # noqa: PTH118 + + if csidl_name == "CSIDL_MYMUSIC": + return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Music") # noqa: PTH118 + return None + + def get_win_folder_from_registry(csidl_name: str) -> str: - """Get folder from the registry. + """ + Get folder from the registry. - This is a fallback technique at best. I'm not sure if using the - registry for this guarantees us the correct answer for all CSIDL_* - names. + This is a fallback technique at best. I'm not sure if using the registry for these guarantees us the correct answer + for all CSIDL_* names. """ shell_folder_name = { "CSIDL_APPDATA": "AppData", "CSIDL_COMMON_APPDATA": "Common AppData", "CSIDL_LOCAL_APPDATA": "Local AppData", "CSIDL_PERSONAL": "Personal", + "CSIDL_DOWNLOADS": "{374DE290-123F-4565-9164-39C4925E467B}", + "CSIDL_MYPICTURES": "My Pictures", + "CSIDL_MYVIDEO": "My Video", + "CSIDL_MYMUSIC": "My Music", }.get(csidl_name) if shell_folder_name is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") + msg = f"Unknown CSIDL name: {csidl_name}" + raise ValueError(msg) if sys.platform != "win32": # only needed for mypy type checker to know that this code runs only on Windows raise NotImplementedError import winreg @@ -155,25 +203,37 @@ def get_win_folder_from_registry(csidl_name: str) -> str: def get_win_folder_via_ctypes(csidl_name: str) -> str: """Get folder with ctypes.""" + # There is no 'CSIDL_DOWNLOADS'. + # Use 'CSIDL_PROFILE' (40) and append the default folder 'Downloads' instead. + # https://learn.microsoft.com/en-us/windows/win32/shell/knownfolderid + csidl_const = { "CSIDL_APPDATA": 26, "CSIDL_COMMON_APPDATA": 35, "CSIDL_LOCAL_APPDATA": 28, "CSIDL_PERSONAL": 5, + "CSIDL_MYPICTURES": 39, + "CSIDL_MYVIDEO": 14, + "CSIDL_MYMUSIC": 13, + "CSIDL_DOWNLOADS": 40, }.get(csidl_name) if csidl_const is None: - raise ValueError(f"Unknown CSIDL name: {csidl_name}") + msg = f"Unknown CSIDL name: {csidl_name}" + raise ValueError(msg) buf = ctypes.create_unicode_buffer(1024) windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) # Downgrade to short path name if it has highbit chars. - if any(ord(c) > 255 for c in buf): + if any(ord(c) > 255 for c in buf): # noqa: PLR2004 buf2 = ctypes.create_unicode_buffer(1024) if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): buf = buf2 + if csidl_name == "CSIDL_DOWNLOADS": + return os.path.join(buf.value, "Downloads") # noqa: PTH118 + return buf.value diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index dcf89dc04c5..07671fb58af 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -4,7 +4,7 @@ distlib==0.3.6 distro==1.8.0 msgpack==1.0.5 packaging==21.3 -platformdirs==3.2.0 +platformdirs==3.8.1 pyparsing==3.0.9 pyproject-hooks==1.0.0 requests==2.31.0 From 6ee8884ac4a241f665aa712bcbe0e185012bccdb Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 9 Jul 2023 21:30:39 +0100 Subject: [PATCH 573/730] Upgrade pyparsing to 3.1.0 --- news/pyparsing.vendor.rst | 1 + src/pip/_vendor/pyparsing/__init__.py | 75 +- src/pip/_vendor/pyparsing/actions.py | 34 +- src/pip/_vendor/pyparsing/common.py | 58 +- src/pip/_vendor/pyparsing/core.py | 1197 +++++++++++------ src/pip/_vendor/pyparsing/diagram/__init__.py | 32 +- src/pip/_vendor/pyparsing/exceptions.py | 64 +- src/pip/_vendor/pyparsing/helpers.py | 196 +-- src/pip/_vendor/pyparsing/results.py | 128 +- src/pip/_vendor/pyparsing/testing.py | 24 +- src/pip/_vendor/pyparsing/unicode.py | 101 +- src/pip/_vendor/pyparsing/util.py | 89 +- src/pip/_vendor/vendor.txt | 2 +- 13 files changed, 1232 insertions(+), 769 deletions(-) create mode 100644 news/pyparsing.vendor.rst diff --git a/news/pyparsing.vendor.rst b/news/pyparsing.vendor.rst new file mode 100644 index 00000000000..9feffb2460f --- /dev/null +++ b/news/pyparsing.vendor.rst @@ -0,0 +1 @@ +Upgrade pyparsing to 3.1.0 diff --git a/src/pip/_vendor/pyparsing/__init__.py b/src/pip/_vendor/pyparsing/__init__.py index 75372500ed9..88bc10ac18a 100644 --- a/src/pip/_vendor/pyparsing/__init__.py +++ b/src/pip/_vendor/pyparsing/__init__.py @@ -56,7 +56,7 @@ :class:`'|'`, :class:`'^'` and :class:`'&'` operators. The :class:`ParseResults` object returned from -:class:`ParserElement.parseString` can be +:class:`ParserElement.parse_string` can be accessed as a nested list, a dictionary, or an object with named attributes. @@ -85,11 +85,11 @@ and :class:`'&'` operators to combine simple expressions into more complex ones - associate names with your parsed results using - :class:`ParserElement.setResultsName` + :class:`ParserElement.set_results_name` - access the parsed data, which is returned as a :class:`ParseResults` object - - find some helpful expression short-cuts like :class:`delimitedList` - and :class:`oneOf` + - find some helpful expression short-cuts like :class:`DelimitedList` + and :class:`one_of` - find more useful common expressions in the :class:`pyparsing_common` namespace class """ @@ -106,30 +106,22 @@ class version_info(NamedTuple): @property def __version__(self): return ( - "{}.{}.{}".format(self.major, self.minor, self.micro) + f"{self.major}.{self.minor}.{self.micro}" + ( - "{}{}{}".format( - "r" if self.releaselevel[0] == "c" else "", - self.releaselevel[0], - self.serial, - ), + f"{'r' if self.releaselevel[0] == 'c' else ''}{self.releaselevel[0]}{self.serial}", "", )[self.releaselevel == "final"] ) def __str__(self): - return "{} {} / {}".format(__name__, self.__version__, __version_time__) + return f"{__name__} {self.__version__} / {__version_time__}" def __repr__(self): - return "{}.{}({})".format( - __name__, - type(self).__name__, - ", ".join("{}={!r}".format(*nv) for nv in zip(self._fields, self)), - ) + return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})" -__version_info__ = version_info(3, 0, 9, "final", 0) -__version_time__ = "05 May 2022 07:02 UTC" +__version_info__ = version_info(3, 1, 0, "final", 1) +__version_time__ = "18 Jun 2023 14:05 UTC" __version__ = __version_info__.__version__ __versionTime__ = __version_time__ __author__ = "Paul McGuire " @@ -139,9 +131,9 @@ def __repr__(self): from .actions import * from .core import __diag__, __compat__ from .results import * -from .core import * +from .core import * # type: ignore[misc, assignment] from .core import _builtin_exprs as core_builtin_exprs -from .helpers import * +from .helpers import * # type: ignore[misc, assignment] from .helpers import _builtin_exprs as helper_builtin_exprs from .unicode import unicode_set, UnicodeRangeList, pyparsing_unicode as unicode @@ -153,11 +145,11 @@ def __repr__(self): # define backward compat synonyms if "pyparsing_unicode" not in globals(): - pyparsing_unicode = unicode + pyparsing_unicode = unicode # type: ignore[misc] if "pyparsing_common" not in globals(): - pyparsing_common = common + pyparsing_common = common # type: ignore[misc] if "pyparsing_test" not in globals(): - pyparsing_test = testing + pyparsing_test = testing # type: ignore[misc] core_builtin_exprs += common_builtin_exprs + helper_builtin_exprs @@ -174,7 +166,9 @@ def __repr__(self): "CaselessKeyword", "CaselessLiteral", "CharsNotIn", + "CloseMatch", "Combine", + "DelimitedList", "Dict", "Each", "Empty", @@ -227,9 +221,11 @@ def __repr__(self): "alphas8bit", "any_close_tag", "any_open_tag", + "autoname_elements", "c_style_comment", "col", "common_html_entity", + "condition_as_parse_action", "counted_array", "cpp_style_comment", "dbl_quoted_string", @@ -241,6 +237,7 @@ def __repr__(self): "html_comment", "identchars", "identbodychars", + "infix_notation", "java_style_comment", "line", "line_end", @@ -255,8 +252,12 @@ def __repr__(self): "null_debug_action", "nums", "one_of", + "original_text_for", "printables", "punc8bit", + "pyparsing_common", + "pyparsing_test", + "pyparsing_unicode", "python_style_comment", "quoted_string", "remove_quotes", @@ -267,28 +268,20 @@ def __repr__(self): "srange", "string_end", "string_start", + "token_map", "trace_parse_action", + "ungroup", + "unicode_set", "unicode_string", "with_attribute", - "indentedBlock", - "original_text_for", - "ungroup", - "infix_notation", - "locatedExpr", "with_class", - "CloseMatch", - "token_map", - "pyparsing_common", - "pyparsing_unicode", - "unicode_set", - "condition_as_parse_action", - "pyparsing_test", # pre-PEP8 compatibility names "__versionTime__", "anyCloseTag", "anyOpenTag", "cStyleComment", "commonHTMLEntity", + "conditionAsParseAction", "countedArray", "cppStyleComment", "dblQuotedString", @@ -296,9 +289,12 @@ def __repr__(self): "delimitedList", "dictOf", "htmlComment", + "indentedBlock", + "infixNotation", "javaStyleComment", "lineEnd", "lineStart", + "locatedExpr", "makeHTMLTags", "makeXMLTags", "matchOnlyAtCol", @@ -308,6 +304,7 @@ def __repr__(self): "nullDebugAction", "oneOf", "opAssoc", + "originalTextFor", "pythonStyleComment", "quotedString", "removeQuotes", @@ -317,15 +314,9 @@ def __repr__(self): "sglQuotedString", "stringEnd", "stringStart", + "tokenMap", "traceParseAction", "unicodeString", "withAttribute", - "indentedBlock", - "originalTextFor", - "infixNotation", - "locatedExpr", "withClass", - "tokenMap", - "conditionAsParseAction", - "autoname_elements", ] diff --git a/src/pip/_vendor/pyparsing/actions.py b/src/pip/_vendor/pyparsing/actions.py index f72c66e7431..ca6e4c6afb4 100644 --- a/src/pip/_vendor/pyparsing/actions.py +++ b/src/pip/_vendor/pyparsing/actions.py @@ -1,7 +1,7 @@ # actions.py from .exceptions import ParseException -from .util import col +from .util import col, replaced_by_pep8 class OnlyOnce: @@ -38,7 +38,7 @@ def match_only_at_col(n): def verify_col(strg, locn, toks): if col(locn, strg) != n: - raise ParseException(strg, locn, "matched token not at column {}".format(n)) + raise ParseException(strg, locn, f"matched token not at column {n}") return verify_col @@ -148,15 +148,13 @@ def pa(s, l, tokens): raise ParseException( s, l, - "attribute {!r} has value {!r}, must be {!r}".format( - attrName, tokens[attrName], attrValue - ), + f"attribute {attrName!r} has value {tokens[attrName]!r}, must be {attrValue!r}", ) return pa -with_attribute.ANY_VALUE = object() +with_attribute.ANY_VALUE = object() # type: ignore [attr-defined] def with_class(classname, namespace=""): @@ -195,13 +193,25 @@ def with_class(classname, namespace=""): 1 4 0 1 0 1,3 2,3 1,1 """ - classattr = "{}:class".format(namespace) if namespace else "class" + classattr = f"{namespace}:class" if namespace else "class" return with_attribute(**{classattr: classname}) # pre-PEP8 compatibility symbols -replaceWith = replace_with -removeQuotes = remove_quotes -withAttribute = with_attribute -withClass = with_class -matchOnlyAtCol = match_only_at_col +# fmt: off +@replaced_by_pep8(replace_with) +def replaceWith(): ... + +@replaced_by_pep8(remove_quotes) +def removeQuotes(): ... + +@replaced_by_pep8(with_attribute) +def withAttribute(): ... + +@replaced_by_pep8(with_class) +def withClass(): ... + +@replaced_by_pep8(match_only_at_col) +def matchOnlyAtCol(): ... + +# fmt: on diff --git a/src/pip/_vendor/pyparsing/common.py b/src/pip/_vendor/pyparsing/common.py index 1859fb79cc4..7a666b276df 100644 --- a/src/pip/_vendor/pyparsing/common.py +++ b/src/pip/_vendor/pyparsing/common.py @@ -1,6 +1,6 @@ # common.py from .core import * -from .helpers import delimited_list, any_open_tag, any_close_tag +from .helpers import DelimitedList, any_open_tag, any_close_tag from datetime import datetime @@ -22,17 +22,17 @@ class pyparsing_common: Parse actions: - - :class:`convertToInteger` - - :class:`convertToFloat` - - :class:`convertToDate` - - :class:`convertToDatetime` - - :class:`stripHTMLTags` - - :class:`upcaseTokens` - - :class:`downcaseTokens` + - :class:`convert_to_integer` + - :class:`convert_to_float` + - :class:`convert_to_date` + - :class:`convert_to_datetime` + - :class:`strip_html_tags` + - :class:`upcase_tokens` + - :class:`downcase_tokens` Example:: - pyparsing_common.number.runTests(''' + pyparsing_common.number.run_tests(''' # any int or real number, returned as the appropriate type 100 -100 @@ -42,7 +42,7 @@ class pyparsing_common: 1e-12 ''') - pyparsing_common.fnumber.runTests(''' + pyparsing_common.fnumber.run_tests(''' # any int or real number, returned as float 100 -100 @@ -52,19 +52,19 @@ class pyparsing_common: 1e-12 ''') - pyparsing_common.hex_integer.runTests(''' + pyparsing_common.hex_integer.run_tests(''' # hex numbers 100 FF ''') - pyparsing_common.fraction.runTests(''' + pyparsing_common.fraction.run_tests(''' # fractions 1/2 -3/4 ''') - pyparsing_common.mixed_integer.runTests(''' + pyparsing_common.mixed_integer.run_tests(''' # mixed fractions 1 1/2 @@ -73,8 +73,8 @@ class pyparsing_common: ''') import uuid - pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) - pyparsing_common.uuid.runTests(''' + pyparsing_common.uuid.set_parse_action(token_map(uuid.UUID)) + pyparsing_common.uuid.run_tests(''' # uuid 12345678-1234-5678-1234-567812345678 ''') @@ -260,8 +260,8 @@ def convert_to_date(fmt: str = "%Y-%m-%d"): Example:: date_expr = pyparsing_common.iso8601_date.copy() - date_expr.setParseAction(pyparsing_common.convertToDate()) - print(date_expr.parseString("1999-12-31")) + date_expr.set_parse_action(pyparsing_common.convert_to_date()) + print(date_expr.parse_string("1999-12-31")) prints:: @@ -287,8 +287,8 @@ def convert_to_datetime(fmt: str = "%Y-%m-%dT%H:%M:%S.%f"): Example:: dt_expr = pyparsing_common.iso8601_datetime.copy() - dt_expr.setParseAction(pyparsing_common.convertToDatetime()) - print(dt_expr.parseString("1999-12-31T23:59:59.999")) + dt_expr.set_parse_action(pyparsing_common.convert_to_datetime()) + print(dt_expr.parse_string("1999-12-31T23:59:59.999")) prints:: @@ -326,9 +326,9 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): # strip HTML links from normal text text = 'More info at the pyparsing wiki page' - td, td_end = makeHTMLTags("TD") - table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end - print(table_text.parseString(text).body) + td, td_end = make_html_tags("TD") + table_text = td + SkipTo(td_end).set_parse_action(pyparsing_common.strip_html_tags)("body") + td_end + print(table_text.parse_string(text).body) Prints:: @@ -348,7 +348,7 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): .streamline() .set_name("commaItem") ) - comma_separated_list = delimited_list( + comma_separated_list = DelimitedList( Opt(quoted_string.copy() | _commasepitem, default="") ).set_name("comma separated list") """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" @@ -363,7 +363,7 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): url = Regex( # https://mathiasbynens.be/demo/url-regex # https://gist.github.com/dperini/729294 - r"^" + + r"(?P" + # protocol identifier (optional) # short syntax // still required r"(?:(?:(?Phttps?|ftp):)?\/\/)" + @@ -405,18 +405,26 @@ def strip_html_tags(s: str, l: int, tokens: ParseResults): r"(\?(?P[^#]*))?" + # fragment (optional) r"(#(?P\S*))?" + - r"$" + r")" ).set_name("url") + """URL (http/https/ftp scheme)""" # fmt: on # pre-PEP8 compatibility names convertToInteger = convert_to_integer + """Deprecated - use :class:`convert_to_integer`""" convertToFloat = convert_to_float + """Deprecated - use :class:`convert_to_float`""" convertToDate = convert_to_date + """Deprecated - use :class:`convert_to_date`""" convertToDatetime = convert_to_datetime + """Deprecated - use :class:`convert_to_datetime`""" stripHTMLTags = strip_html_tags + """Deprecated - use :class:`strip_html_tags`""" upcaseTokens = upcase_tokens + """Deprecated - use :class:`upcase_tokens`""" downcaseTokens = downcase_tokens + """Deprecated - use :class:`downcase_tokens`""" _builtin_exprs = [ diff --git a/src/pip/_vendor/pyparsing/core.py b/src/pip/_vendor/pyparsing/core.py index 6ff3c766f7d..8d5a856ecd6 100644 --- a/src/pip/_vendor/pyparsing/core.py +++ b/src/pip/_vendor/pyparsing/core.py @@ -1,19 +1,22 @@ # # core.py # + +from collections import deque import os import typing from typing import ( - NamedTuple, - Union, - Callable, Any, + Callable, Generator, - Tuple, List, - TextIO, - Set, + NamedTuple, Sequence, + Set, + TextIO, + Tuple, + Union, + cast, ) from abc import ABC, abstractmethod from enum import Enum @@ -40,6 +43,7 @@ _flatten, LRUMemo as _LRUMemo, UnboundedMemo as _UnboundedMemo, + replaced_by_pep8, ) from .exceptions import * from .actions import * @@ -134,6 +138,7 @@ def enable_all_warnings(cls) -> None: class Diagnostics(Enum): """ Diagnostic configuration (all default to disabled) + - ``warn_multiple_tokens_in_named_alternation`` - flag to enable warnings when a results name is defined on a :class:`MatchFirst` or :class:`Or` expression with one or more :class:`And` subexpressions - ``warn_ungrouped_named_tokens_in_collection`` - flag to enable warnings when a results @@ -228,6 +233,8 @@ def _should_enable_warnings( } _generatorType = types.GeneratorType +ParseImplReturnType = Tuple[int, Any] +PostParseReturnType = Union[ParseResults, Sequence[ParseResults]] ParseAction = Union[ Callable[[], Any], Callable[[ParseResults], Any], @@ -256,7 +263,7 @@ def _should_enable_warnings( alphanums = alphas + nums printables = "".join([c for c in string.printable if c not in string.whitespace]) -_trim_arity_call_line: traceback.StackSummary = None +_trim_arity_call_line: traceback.StackSummary = None # type: ignore[assignment] def _trim_arity(func, max_limit=3): @@ -269,11 +276,6 @@ def _trim_arity(func, max_limit=3): limit = 0 found_arity = False - def extract_tb(tb, limit=0): - frames = traceback.extract_tb(tb, limit=limit) - frame_summary = frames[-1] - return [frame_summary[:2]] - # synthesize what would be returned by traceback.extract_stack at the call to # user's parse action 'func', so that we don't incur call penalty at parse time @@ -297,8 +299,10 @@ def wrapper(*args): raise else: tb = te.__traceback__ + frames = traceback.extract_tb(tb, limit=2) + frame_summary = frames[-1] trim_arity_type_error = ( - extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth + [frame_summary[:2]][-1][:2] == pa_call_line_synth ) del tb @@ -320,7 +324,7 @@ def wrapper(*args): def condition_as_parse_action( - fn: ParseCondition, message: str = None, fatal: bool = False + fn: ParseCondition, message: typing.Optional[str] = None, fatal: bool = False ) -> ParseAction: """ Function to convert a simple predicate function that returns ``True`` or ``False`` @@ -353,15 +357,9 @@ def _default_start_debug_action( cache_hit_str = "*" if cache_hit else "" print( ( - "{}Match {} at loc {}({},{})\n {}\n {}^".format( - cache_hit_str, - expr, - loc, - lineno(loc, instring), - col(loc, instring), - line(loc, instring), - " " * (col(loc, instring) - 1), - ) + f"{cache_hit_str}Match {expr} at loc {loc}({lineno(loc, instring)},{col(loc, instring)})\n" + f" {line(loc, instring)}\n" + f" {' ' * (col(loc, instring) - 1)}^" ) ) @@ -375,7 +373,7 @@ def _default_success_debug_action( cache_hit: bool = False, ): cache_hit_str = "*" if cache_hit else "" - print("{}Matched {} -> {}".format(cache_hit_str, expr, toks.as_list())) + print(f"{cache_hit_str}Matched {expr} -> {toks.as_list()}") def _default_exception_debug_action( @@ -386,11 +384,7 @@ def _default_exception_debug_action( cache_hit: bool = False, ): cache_hit_str = "*" if cache_hit else "" - print( - "{}Match {} failed, {} raised: {}".format( - cache_hit_str, expr, type(exc).__name__, exc - ) - ) + print(f"{cache_hit_str}Match {expr} failed, {type(exc).__name__} raised: {exc}") def null_debug_action(*args): @@ -402,7 +396,7 @@ class ParserElement(ABC): DEFAULT_WHITE_CHARS: str = " \n\t\r" verbose_stacktrace: bool = False - _literalStringClass: typing.Optional[type] = None + _literalStringClass: type = None # type: ignore[assignment] @staticmethod def set_default_whitespace_chars(chars: str) -> None: @@ -447,6 +441,18 @@ def inline_literals_using(cls: type) -> None: """ ParserElement._literalStringClass = cls + @classmethod + def using_each(cls, seq, **class_kwargs): + """ + Yields a sequence of class(obj, **class_kwargs) for obj in seq. + + Example:: + + LPAR, RPAR, LBRACE, RBRACE, SEMI = Suppress.using_each("(){};") + + """ + yield from (cls(obj, **class_kwargs) for obj in seq) + class DebugActions(NamedTuple): debug_try: typing.Optional[DebugStartAction] debug_match: typing.Optional[DebugSuccessAction] @@ -455,9 +461,9 @@ class DebugActions(NamedTuple): def __init__(self, savelist: bool = False): self.parseAction: List[ParseAction] = list() self.failAction: typing.Optional[ParseFailAction] = None - self.customName = None - self._defaultName = None - self.resultsName = None + self.customName: str = None # type: ignore[assignment] + self._defaultName: typing.Optional[str] = None + self.resultsName: str = None # type: ignore[assignment] self.saveAsList = savelist self.skipWhitespace = True self.whiteChars = set(ParserElement.DEFAULT_WHITE_CHARS) @@ -490,12 +496,29 @@ def suppress_warning(self, warning_type: Diagnostics) -> "ParserElement": base.suppress_warning(Diagnostics.warn_on_parse_using_empty_Forward) # statement would normally raise a warning, but is now suppressed - print(base.parseString("x")) + print(base.parse_string("x")) """ self.suppress_warnings_.append(warning_type) return self + def visit_all(self): + """General-purpose method to yield all expressions and sub-expressions + in a grammar. Typically just for internal use. + """ + to_visit = deque([self]) + seen = set() + while to_visit: + cur = to_visit.popleft() + + # guard against looping forever through recursive grammars + if cur in seen: + continue + seen.add(cur) + + to_visit.extend(cur.recurse()) + yield cur + def copy(self) -> "ParserElement": """ Make a copy of this :class:`ParserElement`. Useful for defining @@ -585,11 +608,11 @@ def breaker(instring, loc, doActions=True, callPreParse=True): pdb.set_trace() return _parseMethod(instring, loc, doActions, callPreParse) - breaker._originalParseMethod = _parseMethod - self._parse = breaker + breaker._originalParseMethod = _parseMethod # type: ignore [attr-defined] + self._parse = breaker # type: ignore [assignment] else: if hasattr(self._parse, "_originalParseMethod"): - self._parse = self._parse._originalParseMethod + self._parse = self._parse._originalParseMethod # type: ignore [attr-defined, assignment] return self def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": @@ -601,9 +624,9 @@ def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": Each parse action ``fn`` is a callable method with 0-3 arguments, called as ``fn(s, loc, toks)`` , ``fn(loc, toks)`` , ``fn(toks)`` , or just ``fn()`` , where: - - s = the original string being parsed (see note below) - - loc = the location of the matching substring - - toks = a list of the matched tokens, packaged as a :class:`ParseResults` object + - ``s`` = the original string being parsed (see note below) + - ``loc`` = the location of the matching substring + - ``toks`` = a list of the matched tokens, packaged as a :class:`ParseResults` object The parsed tokens are passed to the parse action as ParseResults. They can be modified in place using list-style append, extend, and pop operations to update @@ -621,7 +644,7 @@ def set_parse_action(self, *fns: ParseAction, **kwargs) -> "ParserElement": Optional keyword arguments: - - call_during_try = (default= ``False``) indicate if parse action should be run during + - ``call_during_try`` = (default= ``False``) indicate if parse action should be run during lookaheads and alternate testing. For parse actions that have side effects, it is important to only call the parse action once it is determined that it is being called as part of a successful parse. For parse actions that perform additional @@ -697,10 +720,10 @@ def add_condition(self, *fns: ParseCondition, **kwargs) -> "ParserElement": Optional keyword arguments: - - message = define a custom message to be used in the raised exception - - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise + - ``message`` = define a custom message to be used in the raised exception + - ``fatal`` = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException - - call_during_try = boolean to indicate if this method should be called during internal tryParse calls, + - ``call_during_try`` = boolean to indicate if this method should be called during internal tryParse calls, default=False Example:: @@ -716,7 +739,9 @@ def add_condition(self, *fns: ParseCondition, **kwargs) -> "ParserElement": for fn in fns: self.parseAction.append( condition_as_parse_action( - fn, message=kwargs.get("message"), fatal=kwargs.get("fatal", False) + fn, + message=str(kwargs.get("message")), + fatal=bool(kwargs.get("fatal", False)), ) ) @@ -731,30 +756,33 @@ def set_fail_action(self, fn: ParseFailAction) -> "ParserElement": Fail acton fn is a callable function that takes the arguments ``fn(s, loc, expr, err)`` where: - - s = string being parsed - - loc = location where expression match was attempted and failed - - expr = the parse expression that failed - - err = the exception thrown + - ``s`` = string being parsed + - ``loc`` = location where expression match was attempted and failed + - ``expr`` = the parse expression that failed + - ``err`` = the exception thrown The function returns no value. It may throw :class:`ParseFatalException` if it is desired to stop parsing immediately.""" self.failAction = fn return self - def _skipIgnorables(self, instring, loc): + def _skipIgnorables(self, instring: str, loc: int) -> int: + if not self.ignoreExprs: + return loc exprsFound = True + ignore_expr_fns = [e._parse for e in self.ignoreExprs] while exprsFound: exprsFound = False - for e in self.ignoreExprs: + for ignore_fn in ignore_expr_fns: try: while 1: - loc, dummy = e._parse(instring, loc) + loc, dummy = ignore_fn(instring, loc) exprsFound = True except ParseException: pass return loc - def preParse(self, instring, loc): + def preParse(self, instring: str, loc: int) -> int: if self.ignoreExprs: loc = self._skipIgnorables(instring, loc) @@ -830,7 +858,7 @@ def _parseNoCache( try: for fn in self.parseAction: try: - tokens = fn(instring, tokens_start, ret_tokens) + tokens = fn(instring, tokens_start, ret_tokens) # type: ignore [call-arg, arg-type] except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") raise exc from parse_action_exc @@ -853,7 +881,7 @@ def _parseNoCache( else: for fn in self.parseAction: try: - tokens = fn(instring, tokens_start, ret_tokens) + tokens = fn(instring, tokens_start, ret_tokens) # type: ignore [call-arg, arg-type] except IndexError as parse_action_exc: exc = ParseException("exception raised in parse action") raise exc from parse_action_exc @@ -875,17 +903,24 @@ def _parseNoCache( return loc, ret_tokens - def try_parse(self, instring: str, loc: int, raise_fatal: bool = False) -> int: + def try_parse( + self, + instring: str, + loc: int, + *, + raise_fatal: bool = False, + do_actions: bool = False, + ) -> int: try: - return self._parse(instring, loc, doActions=False)[0] + return self._parse(instring, loc, doActions=do_actions)[0] except ParseFatalException: if raise_fatal: raise raise ParseException(instring, loc, self.errmsg, self) - def can_parse_next(self, instring: str, loc: int) -> bool: + def can_parse_next(self, instring: str, loc: int, do_actions: bool = False) -> bool: try: - self.try_parse(instring, loc) + self.try_parse(instring, loc, do_actions=do_actions) except (ParseException, IndexError): return False else: @@ -897,10 +932,23 @@ def can_parse_next(self, instring: str, loc: int) -> bool: Tuple[int, "Forward", bool], Tuple[int, Union[ParseResults, Exception]] ] = {} + class _CacheType(dict): + """ + class to help type checking + """ + + not_in_cache: bool + + def get(self, *args): + ... + + def set(self, *args): + ... + # argument cache for optimizing repeated calls when backtracking through recursive expressions packrat_cache = ( - {} - ) # this is set later by enabled_packrat(); this is here so that reset_cache() doesn't fail + _CacheType() + ) # set later by enable_packrat(); this is here so that reset_cache() doesn't fail packrat_cache_lock = RLock() packrat_cache_stats = [0, 0] @@ -930,24 +978,25 @@ def _parseCache( ParserElement.packrat_cache_stats[HIT] += 1 if self.debug and self.debugActions.debug_try: try: - self.debugActions.debug_try(instring, loc, self, cache_hit=True) + self.debugActions.debug_try(instring, loc, self, cache_hit=True) # type: ignore [call-arg] except TypeError: pass if isinstance(value, Exception): if self.debug and self.debugActions.debug_fail: try: self.debugActions.debug_fail( - instring, loc, self, value, cache_hit=True + instring, loc, self, value, cache_hit=True # type: ignore [call-arg] ) except TypeError: pass raise value + value = cast(Tuple[int, ParseResults, int], value) loc_, result, endloc = value[0], value[1].copy(), value[2] if self.debug and self.debugActions.debug_match: try: self.debugActions.debug_match( - instring, loc_, endloc, self, result, cache_hit=True + instring, loc_, endloc, self, result, cache_hit=True # type: ignore [call-arg] ) except TypeError: pass @@ -1009,7 +1058,7 @@ def enable_left_recursion( Parameters: - - cache_size_limit - (default=``None``) - memoize at most this many + - ``cache_size_limit`` - (default=``None``) - memoize at most this many ``Forward`` elements during matching; if ``None`` (the default), memoize all ``Forward`` elements. @@ -1022,9 +1071,9 @@ def enable_left_recursion( elif ParserElement._packratEnabled: raise RuntimeError("Packrat and Bounded Recursion are not compatible") if cache_size_limit is None: - ParserElement.recursion_memos = _UnboundedMemo() + ParserElement.recursion_memos = _UnboundedMemo() # type: ignore[assignment] elif cache_size_limit > 0: - ParserElement.recursion_memos = _LRUMemo(capacity=cache_size_limit) + ParserElement.recursion_memos = _LRUMemo(capacity=cache_size_limit) # type: ignore[assignment] else: raise NotImplementedError("Memo size of %s" % cache_size_limit) ParserElement._left_recursion_enabled = True @@ -1040,7 +1089,7 @@ def enable_packrat(cache_size_limit: int = 128, *, force: bool = False) -> None: Parameters: - - cache_size_limit - (default= ``128``) - if an integer value is provided + - ``cache_size_limit`` - (default= ``128``) - if an integer value is provided will limit the size of the packrat cache; if None is passed, then the cache size will be unbounded; if 0 is passed, the cache will be effectively disabled. @@ -1070,7 +1119,7 @@ def enable_packrat(cache_size_limit: int = 128, *, force: bool = False) -> None: if cache_size_limit is None: ParserElement.packrat_cache = _UnboundedCache() else: - ParserElement.packrat_cache = _FifoCache(cache_size_limit) + ParserElement.packrat_cache = _FifoCache(cache_size_limit) # type: ignore[assignment] ParserElement._parse = ParserElement._parseCache def parse_string( @@ -1088,7 +1137,7 @@ def parse_string( an object with attributes if the given parser includes results names. If the input string is required to match the entire grammar, ``parse_all`` flag must be set to ``True``. This - is also equivalent to ending the grammar with :class:`StringEnd`(). + is also equivalent to ending the grammar with :class:`StringEnd`\\ (). To report proper column numbers, ``parse_string`` operates on a copy of the input string where all tabs are converted to spaces (8 spaces per tab, as per the default in ``string.expandtabs``). If the input string @@ -1198,7 +1247,9 @@ def scan_string( try: while loc <= instrlen and matches < maxMatches: try: - preloc = preparseFn(instring, loc) + preloc: int = preparseFn(instring, loc) + nextLoc: int + tokens: ParseResults nextLoc, tokens = parseFn(instring, preloc, callPreParse=False) except ParseException: loc = preloc + 1 @@ -1352,7 +1403,7 @@ def split( def __add__(self, other) -> "ParserElement": """ Implementation of ``+`` operator - returns :class:`And`. Adding strings to a :class:`ParserElement` - converts them to :class:`Literal`s by default. + converts them to :class:`Literal`\\ s by default. Example:: @@ -1364,11 +1415,11 @@ def __add__(self, other) -> "ParserElement": Hello, World! -> ['Hello', ',', 'World', '!'] - ``...`` may be used as a parse expression as a short form of :class:`SkipTo`. + ``...`` may be used as a parse expression as a short form of :class:`SkipTo`:: Literal('start') + ... + Literal('end') - is equivalent to: + is equivalent to:: Literal('start') + SkipTo('end')("_skipped*") + Literal('end') @@ -1382,11 +1433,7 @@ def __add__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return And([self, other]) def __radd__(self, other) -> "ParserElement": @@ -1399,11 +1446,7 @@ def __radd__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other + self def __sub__(self, other) -> "ParserElement": @@ -1413,11 +1456,7 @@ def __sub__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return self + And._ErrorStop() + other def __rsub__(self, other) -> "ParserElement": @@ -1427,11 +1466,7 @@ def __rsub__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other - self def __mul__(self, other) -> "ParserElement": @@ -1440,11 +1475,12 @@ def __mul__(self, other) -> "ParserElement": ``expr + expr + expr``. Expressions may also be multiplied by a 2-integer tuple, similar to ``{min, max}`` multipliers in regular expressions. Tuples may also include ``None`` as in: + - ``expr*(n, None)`` or ``expr*(n, )`` is equivalent - to ``expr*n + ZeroOrMore(expr)`` - (read as "at least n instances of ``expr``") + to ``expr*n + ZeroOrMore(expr)`` + (read as "at least n instances of ``expr``") - ``expr*(None, n)`` is equivalent to ``expr*(0, n)`` - (read as "0 to n instances of ``expr``") + (read as "0 to n instances of ``expr``") - ``expr*(None, None)`` is equivalent to ``ZeroOrMore(expr)`` - ``expr*(1, None)`` is equivalent to ``OneOrMore(expr)`` @@ -1477,17 +1513,9 @@ def __mul__(self, other) -> "ParserElement": minElements, optElements = other optElements -= minElements else: - raise TypeError( - "cannot multiply ParserElement and ({}) objects".format( - ",".join(type(item).__name__ for item in other) - ) - ) + return NotImplemented else: - raise TypeError( - "cannot multiply ParserElement and {} objects".format( - type(other).__name__ - ) - ) + return NotImplemented if minElements < 0: raise ValueError("cannot multiply ParserElement by negative value") @@ -1531,13 +1559,12 @@ def __or__(self, other) -> "ParserElement": return _PendingSkip(self, must_skip=True) if isinstance(other, str_type): + # `expr | ""` is equivalent to `Opt(expr)` + if other == "": + return Opt(self) other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return MatchFirst([self, other]) def __ror__(self, other) -> "ParserElement": @@ -1547,11 +1574,7 @@ def __ror__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other | self def __xor__(self, other) -> "ParserElement": @@ -1561,11 +1584,7 @@ def __xor__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return Or([self, other]) def __rxor__(self, other) -> "ParserElement": @@ -1575,11 +1594,7 @@ def __rxor__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other ^ self def __and__(self, other) -> "ParserElement": @@ -1589,11 +1604,7 @@ def __and__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return Each([self, other]) def __rand__(self, other) -> "ParserElement": @@ -1603,11 +1614,7 @@ def __rand__(self, other) -> "ParserElement": if isinstance(other, str_type): other = self._literalStringClass(other) if not isinstance(other, ParserElement): - raise TypeError( - "Cannot combine element of type {} with ParserElement".format( - type(other).__name__ - ) - ) + return NotImplemented return other & self def __invert__(self) -> "ParserElement": @@ -1636,38 +1643,58 @@ def __getitem__(self, key): ``None`` may be used in place of ``...``. - Note that ``expr[..., n]`` and ``expr[m, n]``do not raise an exception - if more than ``n`` ``expr``s exist in the input stream. If this behavior is + Note that ``expr[..., n]`` and ``expr[m, n]`` do not raise an exception + if more than ``n`` ``expr``\\ s exist in the input stream. If this behavior is desired, then write ``expr[..., n] + ~expr``. + + For repetition with a stop_on expression, use slice notation: + + - ``expr[...: end_expr]`` and ``expr[0, ...: end_expr]`` are equivalent to ``ZeroOrMore(expr, stop_on=end_expr)`` + - ``expr[1, ...: end_expr]`` is equivalent to ``OneOrMore(expr, stop_on=end_expr)`` + """ + stop_on_defined = False + stop_on = NoMatch() + if isinstance(key, slice): + key, stop_on = key.start, key.stop + if key is None: + key = ... + stop_on_defined = True + elif isinstance(key, tuple) and isinstance(key[-1], slice): + key, stop_on = (key[0], key[1].start), key[1].stop + stop_on_defined = True + # convert single arg keys to tuples + if isinstance(key, str_type): + key = (key,) try: - if isinstance(key, str_type): - key = (key,) iter(key) except TypeError: key = (key, key) if len(key) > 2: raise TypeError( - "only 1 or 2 index arguments supported ({}{})".format( - key[:5], "... [{}]".format(len(key)) if len(key) > 5 else "" - ) + f"only 1 or 2 index arguments supported ({key[:5]}{f'... [{len(key)}]' if len(key) > 5 else ''})" ) # clip to 2 elements ret = self * tuple(key[:2]) + ret = typing.cast(_MultipleMatch, ret) + + if stop_on_defined: + ret.stopOn(stop_on) + return ret - def __call__(self, name: str = None) -> "ParserElement": + def __call__(self, name: typing.Optional[str] = None) -> "ParserElement": """ Shortcut for :class:`set_results_name`, with ``list_all_matches=False``. If ``name`` is given with a trailing ``'*'`` character, then ``list_all_matches`` will be passed as ``True``. - If ``name` is omitted, same as calling :class:`copy`. + If ``name`` is omitted, same as calling :class:`copy`. Example:: @@ -1775,17 +1802,18 @@ def set_debug_actions( should have the signature ``fn(input_string: str, location: int, expression: ParserElement, exception: Exception, cache_hit: bool)`` """ self.debugActions = self.DebugActions( - start_action or _default_start_debug_action, - success_action or _default_success_debug_action, - exception_action or _default_exception_debug_action, + start_action or _default_start_debug_action, # type: ignore[truthy-function] + success_action or _default_success_debug_action, # type: ignore[truthy-function] + exception_action or _default_exception_debug_action, # type: ignore[truthy-function] ) self.debug = True return self - def set_debug(self, flag: bool = True) -> "ParserElement": + def set_debug(self, flag: bool = True, recurse: bool = False) -> "ParserElement": """ Enable display of debugging messages while doing pattern matching. Set ``flag`` to ``True`` to enable, ``False`` to disable. + Set ``recurse`` to ``True`` to set the debug flag on this expression and all sub-expressions. Example:: @@ -1819,6 +1847,11 @@ def set_debug(self, flag: bool = True) -> "ParserElement": which makes debugging and exception messages easier to understand - for instance, the default name created for the :class:`Word` expression without calling ``set_name`` is ``"W:(A-Za-z)"``. """ + if recurse: + for expr in self.visit_all(): + expr.set_debug(flag, recurse=False) + return self + if flag: self.set_debug_actions( _default_start_debug_action, @@ -1836,7 +1869,7 @@ def default_name(self) -> str: return self._defaultName @abstractmethod - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: """ Child classes must define this method, which defines how the ``default_name`` is set. """ @@ -1844,7 +1877,9 @@ def _generateDefaultName(self): def set_name(self, name: str) -> "ParserElement": """ Define name for this expression, makes debugging and exception messages clearer. + Example:: + Word(nums).parse_string("ABC") # -> Exception: Expected W:(0-9) (at char 0), (line:1, col:1) Word(nums).set_name("integer").parse_string("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) """ @@ -1870,7 +1905,7 @@ def streamline(self) -> "ParserElement": self._defaultName = None return self - def recurse(self) -> Sequence["ParserElement"]: + def recurse(self) -> List["ParserElement"]: return [] def _checkRecursion(self, parseElementList): @@ -1882,6 +1917,11 @@ def validate(self, validateTrace=None) -> None: """ Check defined expressions for valid structure, check for infinite recursive definitions. """ + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) self._checkRecursion([]) def parse_file( @@ -1899,8 +1939,10 @@ def parse_file( """ parseAll = parseAll or parse_all try: + file_or_filename = typing.cast(TextIO, file_or_filename) file_contents = file_or_filename.read() except AttributeError: + file_or_filename = typing.cast(str, file_or_filename) with open(file_or_filename, "r", encoding=encoding) as f: file_contents = f.read() try: @@ -1932,6 +1974,7 @@ def matches( inline microtests of sub expressions while building up larger parser. Parameters: + - ``test_string`` - to test against this expression for a match - ``parse_all`` - (default= ``True``) - flag to pass to :class:`parse_string` when running tests @@ -1955,7 +1998,7 @@ def run_tests( full_dump: bool = True, print_results: bool = True, failure_tests: bool = False, - post_parse: Callable[[str, ParseResults], str] = None, + post_parse: typing.Optional[Callable[[str, ParseResults], str]] = None, file: typing.Optional[TextIO] = None, with_line_numbers: bool = False, *, @@ -1963,7 +2006,7 @@ def run_tests( fullDump: bool = True, printResults: bool = True, failureTests: bool = False, - postParse: Callable[[str, ParseResults], str] = None, + postParse: typing.Optional[Callable[[str, ParseResults], str]] = None, ) -> Tuple[bool, List[Tuple[str, Union[ParseResults, Exception]]]]: """ Execute the parse expression on a series of test strings, showing each @@ -1971,6 +2014,7 @@ def run_tests( run a parse expression against a list of sample strings. Parameters: + - ``tests`` - a list of separate test strings, or a multiline string of test strings - ``parse_all`` - (default= ``True``) - flag to pass to :class:`parse_string` when running tests - ``comment`` - (default= ``'#'``) - expression for indicating embedded comments in the test @@ -2067,22 +2111,27 @@ def run_tests( failureTests = failureTests or failure_tests postParse = postParse or post_parse if isinstance(tests, str_type): + tests = typing.cast(str, tests) line_strip = type(tests).strip tests = [line_strip(test_line) for test_line in tests.rstrip().splitlines()] - if isinstance(comment, str_type): - comment = Literal(comment) + comment_specified = comment is not None + if comment_specified: + if isinstance(comment, str_type): + comment = typing.cast(str, comment) + comment = Literal(comment) + comment = typing.cast(ParserElement, comment) if file is None: file = sys.stdout print_ = file.write result: Union[ParseResults, Exception] - allResults = [] - comments = [] + allResults: List[Tuple[str, Union[ParseResults, Exception]]] = [] + comments: List[str] = [] success = True NL = Literal(r"\n").add_parse_action(replace_with("\n")).ignore(quoted_string) BOM = "\ufeff" for t in tests: - if comment is not None and comment.matches(t, False) or comments and not t: + if comment_specified and comment.matches(t, False) or comments and not t: comments.append( pyparsing_test.with_line_numbers(t) if with_line_numbers else t ) @@ -2107,7 +2156,7 @@ def run_tests( success = success and failureTests result = pe except Exception as exc: - out.append("FAIL-EXCEPTION: {}: {}".format(type(exc).__name__, exc)) + out.append(f"FAIL-EXCEPTION: {type(exc).__name__}: {exc}") if ParserElement.verbose_stacktrace: out.extend(traceback.format_tb(exc.__traceback__)) success = success and failureTests @@ -2127,9 +2176,7 @@ def run_tests( except Exception as e: out.append(result.dump(full=fullDump)) out.append( - "{} failed: {}: {}".format( - postParse.__name__, type(e).__name__, e - ) + f"{postParse.__name__} failed: {type(e).__name__}: {e}" ) else: out.append(result.dump(full=fullDump)) @@ -2148,19 +2195,28 @@ def create_diagram( vertical: int = 3, show_results_names: bool = False, show_groups: bool = False, + embed: bool = False, **kwargs, ) -> None: """ Create a railroad diagram for the parser. Parameters: - - output_html (str or file-like object) - output target for generated + + - ``output_html`` (str or file-like object) - output target for generated diagram HTML - - vertical (int) - threshold for formatting multiple alternatives vertically + - ``vertical`` (int) - threshold for formatting multiple alternatives vertically instead of horizontally (default=3) - - show_results_names - bool flag whether diagram should show annotations for + - ``show_results_names`` - bool flag whether diagram should show annotations for defined results names - - show_groups - bool flag whether groups should be highlighted with an unlabeled surrounding box + - ``show_groups`` - bool flag whether groups should be highlighted with an unlabeled surrounding box + - ``embed`` - bool flag whether generated HTML should omit , , and tags to embed + the resulting HTML in an enclosing HTML source + - ``head`` - str containing additional HTML to insert into the section of the generated code; + can be used to insert custom CSS styling + - ``body`` - str containing additional HTML to insert at the beginning of the section of the + generated code + Additional diagram-formatting keyword arguments can also be included; see railroad.Diagram class. """ @@ -2183,38 +2239,93 @@ def create_diagram( ) if isinstance(output_html, (str, Path)): with open(output_html, "w", encoding="utf-8") as diag_file: - diag_file.write(railroad_to_html(railroad)) + diag_file.write(railroad_to_html(railroad, embed=embed, **kwargs)) else: # we were passed a file-like object, just write to it - output_html.write(railroad_to_html(railroad)) - - setDefaultWhitespaceChars = set_default_whitespace_chars - inlineLiteralsUsing = inline_literals_using - setResultsName = set_results_name - setBreak = set_break - setParseAction = set_parse_action - addParseAction = add_parse_action - addCondition = add_condition - setFailAction = set_fail_action - tryParse = try_parse + output_html.write(railroad_to_html(railroad, embed=embed, **kwargs)) + + # Compatibility synonyms + # fmt: off + @staticmethod + @replaced_by_pep8(inline_literals_using) + def inlineLiteralsUsing(): ... + + @staticmethod + @replaced_by_pep8(set_default_whitespace_chars) + def setDefaultWhitespaceChars(): ... + + @replaced_by_pep8(set_results_name) + def setResultsName(self): ... + + @replaced_by_pep8(set_break) + def setBreak(self): ... + + @replaced_by_pep8(set_parse_action) + def setParseAction(self): ... + + @replaced_by_pep8(add_parse_action) + def addParseAction(self): ... + + @replaced_by_pep8(add_condition) + def addCondition(self): ... + + @replaced_by_pep8(set_fail_action) + def setFailAction(self): ... + + @replaced_by_pep8(try_parse) + def tryParse(self): ... + + @staticmethod + @replaced_by_pep8(enable_left_recursion) + def enableLeftRecursion(): ... + + @staticmethod + @replaced_by_pep8(enable_packrat) + def enablePackrat(): ... + + @replaced_by_pep8(parse_string) + def parseString(self): ... + + @replaced_by_pep8(scan_string) + def scanString(self): ... + + @replaced_by_pep8(transform_string) + def transformString(self): ... + + @replaced_by_pep8(search_string) + def searchString(self): ... + + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... + + @replaced_by_pep8(set_whitespace_chars) + def setWhitespaceChars(self): ... + + @replaced_by_pep8(parse_with_tabs) + def parseWithTabs(self): ... + + @replaced_by_pep8(set_debug_actions) + def setDebugActions(self): ... + + @replaced_by_pep8(set_debug) + def setDebug(self): ... + + @replaced_by_pep8(set_name) + def setName(self): ... + + @replaced_by_pep8(parse_file) + def parseFile(self): ... + + @replaced_by_pep8(run_tests) + def runTests(self): ... + canParseNext = can_parse_next resetCache = reset_cache - enableLeftRecursion = enable_left_recursion - enablePackrat = enable_packrat - parseString = parse_string - scanString = scan_string - searchString = search_string - transformString = transform_string - setWhitespaceChars = set_whitespace_chars - parseWithTabs = parse_with_tabs - setDebugActions = set_debug_actions - setDebug = set_debug defaultName = default_name - setName = set_name - parseFile = parse_file - runTests = run_tests - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + # fmt: on class _PendingSkip(ParserElement): @@ -2225,7 +2336,7 @@ def __init__(self, expr: ParserElement, must_skip: bool = False): self.anchor = expr self.must_skip = must_skip - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return str(self.anchor + Empty()).replace("Empty", "...") def __add__(self, other) -> "ParserElement": @@ -2266,21 +2377,10 @@ class Token(ParserElement): def __init__(self): super().__init__(savelist=False) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return type(self).__name__ -class Empty(Token): - """ - An empty token, will always match. - """ - - def __init__(self): - super().__init__() - self.mayReturnEmpty = True - self.mayIndexError = False - - class NoMatch(Token): """ A token that will never match. @@ -2312,25 +2412,33 @@ class Literal(Token): use :class:`Keyword` or :class:`CaselessKeyword`. """ + def __new__(cls, match_string: str = "", *, matchString: str = ""): + # Performance tuning: select a subclass with optimized parseImpl + if cls is Literal: + match_string = matchString or match_string + if not match_string: + return super().__new__(Empty) + if len(match_string) == 1: + return super().__new__(_SingleCharLiteral) + + # Default behavior + return super().__new__(cls) + + # Needed to make copy.copy() work correctly if we customize __new__ + def __getnewargs__(self): + return (self.match,) + def __init__(self, match_string: str = "", *, matchString: str = ""): super().__init__() match_string = matchString or match_string self.match = match_string self.matchLen = len(match_string) - try: - self.firstMatchChar = match_string[0] - except IndexError: - raise ValueError("null string passed to Literal; use Empty() instead") + self.firstMatchChar = match_string[:1] self.errmsg = "Expected " + self.name self.mayReturnEmpty = False self.mayIndexError = False - # Performance tuning: modify __class__ to select - # a parseImpl optimized for single-character check - if self.matchLen == 1 and type(self) is Literal: - self.__class__ = _SingleCharLiteral - - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return repr(self.match) def parseImpl(self, instring, loc, doActions=True): @@ -2341,6 +2449,23 @@ def parseImpl(self, instring, loc, doActions=True): raise ParseException(instring, loc, self.errmsg, self) +class Empty(Literal): + """ + An empty token, will always match. + """ + + def __init__(self, match_string="", *, matchString=""): + super().__init__("") + self.mayReturnEmpty = True + self.mayIndexError = False + + def _generateDefaultName(self) -> str: + return "Empty" + + def parseImpl(self, instring, loc, doActions=True): + return loc, [] + + class _SingleCharLiteral(Literal): def parseImpl(self, instring, loc, doActions=True): if instring[loc] == self.firstMatchChar: @@ -2354,8 +2479,8 @@ def parseImpl(self, instring, loc, doActions=True): class Keyword(Token): """ Token to exactly match a specified string as a keyword, that is, - it must be immediately followed by a non-keyword character. Compare - with :class:`Literal`: + it must be immediately preceded and followed by whitespace or + non-keyword characters. Compare with :class:`Literal`: - ``Literal("if")`` will match the leading ``'if'`` in ``'ifAndOnlyIf'``. @@ -2365,7 +2490,7 @@ class Keyword(Token): Accepts two optional constructor arguments in addition to the keyword string: - - ``identChars`` is a string of characters that would be valid + - ``ident_chars`` is a string of characters that would be valid identifier characters, defaulting to all alphanumerics + "_" and "$" - ``caseless`` allows case-insensitive matching, default is ``False``. @@ -2400,7 +2525,7 @@ def __init__( self.firstMatchChar = match_string[0] except IndexError: raise ValueError("null string passed to Keyword; use Empty() instead") - self.errmsg = "Expected {} {}".format(type(self).__name__, self.name) + self.errmsg = f"Expected {type(self).__name__} {self.name}" self.mayReturnEmpty = False self.mayIndexError = False self.caseless = caseless @@ -2409,7 +2534,7 @@ def __init__( identChars = identChars.upper() self.identChars = set(identChars) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return repr(self.match) def parseImpl(self, instring, loc, doActions=True): @@ -2559,7 +2684,7 @@ class CloseMatch(Token): def __init__( self, match_string: str, - max_mismatches: int = None, + max_mismatches: typing.Optional[int] = None, *, maxMismatches: int = 1, caseless=False, @@ -2568,15 +2693,13 @@ def __init__( super().__init__() self.match_string = match_string self.maxMismatches = maxMismatches - self.errmsg = "Expected {!r} (with up to {} mismatches)".format( - self.match_string, self.maxMismatches - ) + self.errmsg = f"Expected {self.match_string!r} (with up to {self.maxMismatches} mismatches)" self.caseless = caseless self.mayIndexError = False self.mayReturnEmpty = False - def _generateDefaultName(self): - return "{}:{!r}".format(type(self).__name__, self.match_string) + def _generateDefaultName(self) -> str: + return f"{type(self).__name__}:{self.match_string!r}" def parseImpl(self, instring, loc, doActions=True): start = loc @@ -2612,7 +2735,9 @@ def parseImpl(self, instring, loc, doActions=True): class Word(Token): """Token for matching words composed of allowed character sets. + Parameters: + - ``init_chars`` - string of all characters that should be used to match as a word; "ABC" will match "AAA", "ABAB", "CBAC", etc.; if ``body_chars`` is also specified, then this is the string of @@ -2697,26 +2822,24 @@ def __init__( super().__init__() if not initChars: raise ValueError( - "invalid {}, initChars cannot be empty string".format( - type(self).__name__ - ) + f"invalid {type(self).__name__}, initChars cannot be empty string" ) - initChars = set(initChars) - self.initChars = initChars + initChars_set = set(initChars) if excludeChars: - excludeChars = set(excludeChars) - initChars -= excludeChars + excludeChars_set = set(excludeChars) + initChars_set -= excludeChars_set if bodyChars: - bodyChars = set(bodyChars) - excludeChars - self.initCharsOrig = "".join(sorted(initChars)) + bodyChars = "".join(set(bodyChars) - excludeChars_set) + self.initChars = initChars_set + self.initCharsOrig = "".join(sorted(initChars_set)) if bodyChars: - self.bodyCharsOrig = "".join(sorted(bodyChars)) self.bodyChars = set(bodyChars) + self.bodyCharsOrig = "".join(sorted(bodyChars)) else: - self.bodyCharsOrig = "".join(sorted(initChars)) - self.bodyChars = set(initChars) + self.bodyChars = initChars_set + self.bodyCharsOrig = self.initCharsOrig self.maxSpecified = max > 0 @@ -2725,6 +2848,11 @@ def __init__( "cannot specify a minimum length < 1; use Opt(Word()) if zero-length word is permitted" ) + if self.maxSpecified and min > max: + raise ValueError( + f"invalid args, if min and max both specified min must be <= max (min={min}, max={max})" + ) + self.minLen = min if max > 0: @@ -2733,62 +2861,66 @@ def __init__( self.maxLen = _MAX_INT if exact > 0: + min = max = exact self.maxLen = exact self.minLen = exact self.errmsg = "Expected " + self.name self.mayIndexError = False self.asKeyword = asKeyword + if self.asKeyword: + self.errmsg += " as a keyword" # see if we can make a regex for this Word - if " " not in self.initChars | self.bodyChars and (min == 1 and exact == 0): + if " " not in (self.initChars | self.bodyChars): + if len(self.initChars) == 1: + re_leading_fragment = re.escape(self.initCharsOrig) + else: + re_leading_fragment = f"[{_collapse_string_to_ranges(self.initChars)}]" + if self.bodyChars == self.initChars: if max == 0: repeat = "+" elif max == 1: repeat = "" else: - repeat = "{{{},{}}}".format( - self.minLen, "" if self.maxLen == _MAX_INT else self.maxLen - ) - self.reString = "[{}]{}".format( - _collapse_string_to_ranges(self.initChars), - repeat, - ) - elif len(self.initChars) == 1: - if max == 0: - repeat = "*" - else: - repeat = "{{0,{}}}".format(max - 1) - self.reString = "{}[{}]{}".format( - re.escape(self.initCharsOrig), - _collapse_string_to_ranges(self.bodyChars), - repeat, - ) + if self.minLen != self.maxLen: + repeat = f"{{{self.minLen},{'' if self.maxLen == _MAX_INT else self.maxLen}}}" + else: + repeat = f"{{{self.minLen}}}" + self.reString = f"{re_leading_fragment}{repeat}" else: - if max == 0: - repeat = "*" - elif max == 2: + if max == 1: + re_body_fragment = "" repeat = "" else: - repeat = "{{0,{}}}".format(max - 1) - self.reString = "[{}][{}]{}".format( - _collapse_string_to_ranges(self.initChars), - _collapse_string_to_ranges(self.bodyChars), - repeat, + re_body_fragment = f"[{_collapse_string_to_ranges(self.bodyChars)}]" + if max == 0: + repeat = "*" + elif max == 2: + repeat = "?" if min <= 1 else "" + else: + if min != max: + repeat = f"{{{min - 1 if min > 0 else 0},{max - 1}}}" + else: + repeat = f"{{{min - 1 if min > 0 else 0}}}" + + self.reString = ( + f"{re_leading_fragment}" f"{re_body_fragment}" f"{repeat}" ) + if self.asKeyword: - self.reString = r"\b" + self.reString + r"\b" + self.reString = rf"\b{self.reString}\b" try: self.re = re.compile(self.reString) except re.error: - self.re = None + self.re = None # type: ignore[assignment] else: self.re_match = self.re.match - self.__class__ = _WordRegex + self.parseImpl = self.parseImpl_regex # type: ignore[assignment] - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: def charsAsStr(s): max_repr_len = 16 s = _collapse_string_to_ranges(s, re_escape=False) @@ -2798,11 +2930,9 @@ def charsAsStr(s): return s if self.initChars != self.bodyChars: - base = "W:({}, {})".format( - charsAsStr(self.initChars), charsAsStr(self.bodyChars) - ) + base = f"W:({charsAsStr(self.initChars)}, {charsAsStr(self.bodyChars)})" else: - base = "W:({})".format(charsAsStr(self.initChars)) + base = f"W:({charsAsStr(self.initChars)})" # add length specification if self.minLen > 1 or self.maxLen != _MAX_INT: @@ -2810,11 +2940,11 @@ def charsAsStr(s): if self.minLen == 1: return base[2:] else: - return base + "{{{}}}".format(self.minLen) + return base + f"{{{self.minLen}}}" elif self.maxLen == _MAX_INT: - return base + "{{{},...}}".format(self.minLen) + return base + f"{{{self.minLen},...}}" else: - return base + "{{{},{}}}".format(self.minLen, self.maxLen) + return base + f"{{{self.minLen},{self.maxLen}}}" return base def parseImpl(self, instring, loc, doActions=True): @@ -2849,9 +2979,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, instring[start:loc] - -class _WordRegex(Word): - def parseImpl(self, instring, loc, doActions=True): + def parseImpl_regex(self, instring, loc, doActions=True): result = self.re_match(instring, loc) if not result: raise ParseException(instring, loc, self.errmsg, self) @@ -2860,7 +2988,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, result.group() -class Char(_WordRegex): +class Char(Word): """A short-cut class for defining :class:`Word` ``(characters, exact=1)``, when defining a match of any single character in a string of characters. @@ -2878,13 +3006,8 @@ def __init__( asKeyword = asKeyword or as_keyword excludeChars = excludeChars or exclude_chars super().__init__( - charset, exact=1, asKeyword=asKeyword, excludeChars=excludeChars + charset, exact=1, as_keyword=asKeyword, exclude_chars=excludeChars ) - self.reString = "[{}]".format(_collapse_string_to_ranges(self.initChars)) - if asKeyword: - self.reString = r"\b{}\b".format(self.reString) - self.re = re.compile(self.reString) - self.re_match = self.re.match class Regex(Token): @@ -2954,9 +3077,9 @@ def __init__( self.asGroupList = asGroupList self.asMatch = asMatch if self.asGroupList: - self.parseImpl = self.parseImplAsGroupList + self.parseImpl = self.parseImplAsGroupList # type: ignore [assignment] if self.asMatch: - self.parseImpl = self.parseImplAsMatch + self.parseImpl = self.parseImplAsMatch # type: ignore [assignment] @cached_property def re(self): @@ -2966,9 +3089,7 @@ def re(self): try: return re.compile(self.pattern, self.flags) except re.error: - raise ValueError( - "invalid pattern ({!r}) passed to Regex".format(self.pattern) - ) + raise ValueError(f"invalid pattern ({self.pattern!r}) passed to Regex") @cached_property def re_match(self): @@ -2978,7 +3099,7 @@ def re_match(self): def mayReturnEmpty(self): return self.re_match("") is not None - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "Re:({})".format(repr(self.pattern).replace("\\\\", "\\")) def parseImpl(self, instring, loc, doActions=True): @@ -3024,10 +3145,12 @@ def sub(self, repl: str) -> ParserElement: # prints "

main title

" """ if self.asGroupList: - raise TypeError("cannot use sub() with Regex(asGroupList=True)") + raise TypeError("cannot use sub() with Regex(as_group_list=True)") if self.asMatch and callable(repl): - raise TypeError("cannot use sub() with a callable with Regex(asMatch=True)") + raise TypeError( + "cannot use sub() with a callable with Regex(as_match=True)" + ) if self.asMatch: @@ -3081,7 +3204,7 @@ class QuotedString(Token): [['This is the "quote"']] [['This is the quote with "embedded" quotes']] """ - ws_map = ((r"\t", "\t"), (r"\n", "\n"), (r"\f", "\f"), (r"\r", "\r")) + ws_map = dict(((r"\t", "\t"), (r"\n", "\n"), (r"\f", "\f"), (r"\r", "\r"))) def __init__( self, @@ -3120,57 +3243,54 @@ def __init__( else: endQuoteChar = endQuoteChar.strip() if not endQuoteChar: - raise ValueError("endQuoteChar cannot be the empty string") - - self.quoteChar = quote_char - self.quoteCharLen = len(quote_char) - self.firstQuoteChar = quote_char[0] - self.endQuoteChar = endQuoteChar - self.endQuoteCharLen = len(endQuoteChar) - self.escChar = escChar - self.escQuote = escQuote - self.unquoteResults = unquoteResults - self.convertWhitespaceEscapes = convertWhitespaceEscapes + raise ValueError("end_quote_char cannot be the empty string") + + self.quoteChar: str = quote_char + self.quoteCharLen: int = len(quote_char) + self.firstQuoteChar: str = quote_char[0] + self.endQuoteChar: str = endQuoteChar + self.endQuoteCharLen: int = len(endQuoteChar) + self.escChar: str = escChar or "" + self.escQuote: str = escQuote or "" + self.unquoteResults: bool = unquoteResults + self.convertWhitespaceEscapes: bool = convertWhitespaceEscapes + self.multiline = multiline sep = "" inner_pattern = "" if escQuote: - inner_pattern += r"{}(?:{})".format(sep, re.escape(escQuote)) + inner_pattern += rf"{sep}(?:{re.escape(escQuote)})" sep = "|" if escChar: - inner_pattern += r"{}(?:{}.)".format(sep, re.escape(escChar)) + inner_pattern += rf"{sep}(?:{re.escape(escChar)}.)" sep = "|" - self.escCharReplacePattern = re.escape(self.escChar) + "(.)" + self.escCharReplacePattern = re.escape(escChar) + "(.)" if len(self.endQuoteChar) > 1: inner_pattern += ( - "{}(?:".format(sep) + f"{sep}(?:" + "|".join( - "(?:{}(?!{}))".format( - re.escape(self.endQuoteChar[:i]), - re.escape(self.endQuoteChar[i:]), - ) + f"(?:{re.escape(self.endQuoteChar[:i])}(?!{re.escape(self.endQuoteChar[i:])}))" for i in range(len(self.endQuoteChar) - 1, 0, -1) ) + ")" ) sep = "|" + self.flags = re.RegexFlag(0) + if multiline: self.flags = re.MULTILINE | re.DOTALL - inner_pattern += r"{}(?:[^{}{}])".format( - sep, - _escape_regex_range_chars(self.endQuoteChar[0]), - (_escape_regex_range_chars(escChar) if escChar is not None else ""), + inner_pattern += ( + rf"{sep}(?:[^{_escape_regex_range_chars(self.endQuoteChar[0])}" + rf"{(_escape_regex_range_chars(escChar) if escChar is not None else '')}])" ) else: - self.flags = 0 - inner_pattern += r"{}(?:[^{}\n\r{}])".format( - sep, - _escape_regex_range_chars(self.endQuoteChar[0]), - (_escape_regex_range_chars(escChar) if escChar is not None else ""), + inner_pattern += ( + rf"{sep}(?:[^{_escape_regex_range_chars(self.endQuoteChar[0])}\n\r" + rf"{(_escape_regex_range_chars(escChar) if escChar is not None else '')}])" ) self.pattern = "".join( @@ -3183,26 +3303,33 @@ def __init__( ] ) + if self.unquoteResults: + if self.convertWhitespaceEscapes: + self.unquote_scan_re = re.compile( + rf"({'|'.join(re.escape(k) for k in self.ws_map)})|({re.escape(self.escChar)}.)|(\n|.)", + flags=self.flags, + ) + else: + self.unquote_scan_re = re.compile( + rf"({re.escape(self.escChar)}.)|(\n|.)", flags=self.flags + ) + try: self.re = re.compile(self.pattern, self.flags) self.reString = self.pattern self.re_match = self.re.match except re.error: - raise ValueError( - "invalid pattern {!r} passed to Regex".format(self.pattern) - ) + raise ValueError(f"invalid pattern {self.pattern!r} passed to Regex") self.errmsg = "Expected " + self.name self.mayIndexError = False self.mayReturnEmpty = True - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: if self.quoteChar == self.endQuoteChar and isinstance(self.quoteChar, str_type): - return "string enclosed in {!r}".format(self.quoteChar) + return f"string enclosed in {self.quoteChar!r}" - return "quoted string, starting with {} ending with {}".format( - self.quoteChar, self.endQuoteChar - ) + return f"quoted string, starting with {self.quoteChar} ending with {self.endQuoteChar}" def parseImpl(self, instring, loc, doActions=True): result = ( @@ -3217,19 +3344,24 @@ def parseImpl(self, instring, loc, doActions=True): ret = result.group() if self.unquoteResults: - # strip off quotes ret = ret[self.quoteCharLen : -self.endQuoteCharLen] if isinstance(ret, str_type): - # replace escaped whitespace - if "\\" in ret and self.convertWhitespaceEscapes: - for wslit, wschar in self.ws_map: - ret = ret.replace(wslit, wschar) - - # replace escaped characters - if self.escChar: - ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) + if self.convertWhitespaceEscapes: + ret = "".join( + self.ws_map[match.group(1)] + if match.group(1) + else match.group(2)[-1] + if match.group(2) + else match.group(3) + for match in self.unquote_scan_re.finditer(ret) + ) + else: + ret = "".join( + match.group(1)[-1] if match.group(1) else match.group(2) + for match in self.unquote_scan_re.finditer(ret) + ) # replace escaped quotes if self.escQuote: @@ -3252,7 +3384,7 @@ class CharsNotIn(Token): # define a comma-separated-value as anything that is not a ',' csv_value = CharsNotIn(',') - print(delimited_list(csv_value).parse_string("dkls,lsdkjf,s12 34,@!#,213")) + print(DelimitedList(csv_value).parse_string("dkls,lsdkjf,s12 34,@!#,213")) prints:: @@ -3294,12 +3426,12 @@ def __init__( self.mayReturnEmpty = self.minLen == 0 self.mayIndexError = False - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: not_chars_str = _collapse_string_to_ranges(self.notChars) if len(not_chars_str) > 16: - return "!W:({}...)".format(self.notChars[: 16 - 3]) + return f"!W:({self.notChars[: 16 - 3]}...)" else: - return "!W:({})".format(self.notChars) + return f"!W:({self.notChars})" def parseImpl(self, instring, loc, doActions=True): notchars = self.notCharsSet @@ -3376,7 +3508,7 @@ def __init__(self, ws: str = " \t\r\n", min: int = 1, max: int = 0, exact: int = self.maxLen = exact self.minLen = exact - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "".join(White.whiteStrs[c] for c in self.matchWhite) def parseImpl(self, instring, loc, doActions=True): @@ -3411,7 +3543,7 @@ def __init__(self, colno: int): super().__init__() self.col = colno - def preParse(self, instring, loc): + def preParse(self, instring: str, loc: int) -> int: if col(loc, instring) != self.col: instrlen = len(instring) if self.ignoreExprs: @@ -3446,7 +3578,7 @@ class LineStart(PositionToken): B AAA and definitely not this one ''' - for t in (LineStart() + 'AAA' + restOfLine).search_string(test): + for t in (LineStart() + 'AAA' + rest_of_line).search_string(test): print(t) prints:: @@ -3464,7 +3596,7 @@ def __init__(self): self.skipper = Empty().set_whitespace_chars(self.whiteChars) self.errmsg = "Expected start of line" - def preParse(self, instring, loc): + def preParse(self, instring: str, loc: int) -> int: if loc == 0: return loc else: @@ -3624,7 +3756,7 @@ def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = False self.exprs = [exprs] self.callPreparse = False - def recurse(self) -> Sequence[ParserElement]: + def recurse(self) -> List[ParserElement]: return self.exprs[:] def append(self, other) -> ParserElement: @@ -3669,8 +3801,8 @@ def ignore(self, other) -> ParserElement: e.ignore(self.ignoreExprs[-1]) return self - def _generateDefaultName(self): - return "{}:({})".format(self.__class__.__name__, str(self.exprs)) + def _generateDefaultName(self) -> str: + return f"{self.__class__.__name__}:({str(self.exprs)})" def streamline(self) -> ParserElement: if self.streamlined: @@ -3714,6 +3846,11 @@ def streamline(self) -> ParserElement: return self def validate(self, validateTrace=None) -> None: + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) tmp = (validateTrace if validateTrace is not None else [])[:] + [self] for e in self.exprs: e.validate(tmp) @@ -3721,6 +3858,7 @@ def validate(self, validateTrace=None) -> None: def copy(self) -> ParserElement: ret = super().copy() + ret = typing.cast(ParseExpression, ret) ret.exprs = [e.copy() for e in self.exprs] return ret @@ -3750,8 +3888,14 @@ def _setResultsName(self, name, listAllMatches=False): return super()._setResultsName(name, listAllMatches) - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + # Compatibility synonyms + # fmt: off + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... + + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + # fmt: on class And(ParseExpression): @@ -3777,7 +3921,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.leave_whitespace() - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "-" def __init__( @@ -3789,7 +3933,9 @@ def __init__( for i, expr in enumerate(exprs): if expr is Ellipsis: if i < len(exprs) - 1: - skipto_arg: ParserElement = (Empty() + exprs[i + 1]).exprs[-1] + skipto_arg: ParserElement = typing.cast( + ParseExpression, (Empty() + exprs[i + 1]) + ).exprs[-1] tmp.append(SkipTo(skipto_arg)("_skipped*")) else: raise Exception( @@ -3822,8 +3968,9 @@ def streamline(self) -> ParserElement: and isinstance(e.exprs[-1], _PendingSkip) for e in self.exprs[:-1] ): + deleted_expr_marker = NoMatch() for i, e in enumerate(self.exprs[:-1]): - if e is None: + if e is deleted_expr_marker: continue if ( isinstance(e, ParseExpression) @@ -3831,17 +3978,19 @@ def streamline(self) -> ParserElement: and isinstance(e.exprs[-1], _PendingSkip) ): e.exprs[-1] = e.exprs[-1] + self.exprs[i + 1] - self.exprs[i + 1] = None - self.exprs = [e for e in self.exprs if e is not None] + self.exprs[i + 1] = deleted_expr_marker + self.exprs = [e for e in self.exprs if e is not deleted_expr_marker] super().streamline() # link any IndentedBlocks to the prior expression + prev: ParserElement + cur: ParserElement for prev, cur in zip(self.exprs, self.exprs[1:]): # traverse cur or any first embedded expr of cur looking for an IndentedBlock # (but watch out for recursive grammar) seen = set() - while cur: + while True: if id(cur) in seen: break seen.add(id(cur)) @@ -3853,7 +4002,10 @@ def streamline(self) -> ParserElement: ) break subs = cur.recurse() - cur = next(iter(subs), None) + next_first = next(iter(subs), None) + if next_first is None: + break + cur = typing.cast(ParserElement, next_first) self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) return self @@ -3884,13 +4036,14 @@ def parseImpl(self, instring, loc, doActions=True): ) else: loc, exprtokens = e._parse(instring, loc, doActions) - if exprtokens or exprtokens.haskeys(): - resultlist += exprtokens + resultlist += exprtokens return loc, resultlist def __iadd__(self, other): if isinstance(other, str_type): other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented return self.append(other) # And([self, other]) def _checkRecursion(self, parseElementList): @@ -3900,7 +4053,7 @@ def _checkRecursion(self, parseElementList): if not e.mayReturnEmpty: break - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: inner = " ".join(str(e) for e in self.exprs) # strip off redundant inner {}'s while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": @@ -3958,7 +4111,7 @@ def parseImpl(self, instring, loc, doActions=True): loc2 = e.try_parse(instring, loc, raise_fatal=True) except ParseFatalException as pfe: pfe.__traceback__ = None - pfe.parserElement = e + pfe.parser_element = e fatals.append(pfe) maxException = None maxExcLoc = -1 @@ -4016,12 +4169,15 @@ def parseImpl(self, instring, loc, doActions=True): if len(fatals) > 1: fatals.sort(key=lambda e: -e.loc) if fatals[0].loc == fatals[1].loc: - fatals.sort(key=lambda e: (-e.loc, -len(str(e.parserElement)))) + fatals.sort(key=lambda e: (-e.loc, -len(str(e.parser_element)))) max_fatal = fatals[0] raise max_fatal if maxException is not None: - maxException.msg = self.errmsg + # infer from this check that all alternatives failed at the current position + # so emit this collective error message instead of any single error message + if maxExcLoc == loc: + maxException.msg = self.errmsg raise maxException else: raise ParseException( @@ -4031,9 +4187,11 @@ def parseImpl(self, instring, loc, doActions=True): def __ixor__(self, other): if isinstance(other, str_type): other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented return self.append(other) # Or([self, other]) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + " ^ ".join(str(e) for e in self.exprs) + "}" def _setResultsName(self, name, listAllMatches=False): @@ -4118,7 +4276,7 @@ def parseImpl(self, instring, loc, doActions=True): ) except ParseFatalException as pfe: pfe.__traceback__ = None - pfe.parserElement = e + pfe.parser_element = e raise except ParseException as err: if err.loc > maxExcLoc: @@ -4132,7 +4290,10 @@ def parseImpl(self, instring, loc, doActions=True): maxExcLoc = len(instring) if maxException is not None: - maxException.msg = self.errmsg + # infer from this check that all alternatives failed at the current position + # so emit this collective error message instead of any individual error message + if maxExcLoc == loc: + maxException.msg = self.errmsg raise maxException else: raise ParseException( @@ -4142,9 +4303,11 @@ def parseImpl(self, instring, loc, doActions=True): def __ior__(self, other): if isinstance(other, str_type): other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented return self.append(other) # MatchFirst([self, other]) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + " | ".join(str(e) for e in self.exprs) + "}" def _setResultsName(self, name, listAllMatches=False): @@ -4242,6 +4405,13 @@ def __init__(self, exprs: typing.Iterable[ParserElement], savelist: bool = True) self.initExprGroups = True self.saveAsList = True + def __iand__(self, other): + if isinstance(other, str_type): + other = self._literalStringClass(other) + if not isinstance(other, ParserElement): + return NotImplemented + return self.append(other) # Each([self, other]) + def streamline(self) -> ParserElement: super().streamline() if self.exprs: @@ -4296,7 +4466,7 @@ def parseImpl(self, instring, loc, doActions=True): tmpLoc = e.try_parse(instring, tmpLoc, raise_fatal=True) except ParseFatalException as pfe: pfe.__traceback__ = None - pfe.parserElement = e + pfe.parser_element = e fatals.append(pfe) failed.append(e) except ParseException: @@ -4315,7 +4485,7 @@ def parseImpl(self, instring, loc, doActions=True): if len(fatals) > 1: fatals.sort(key=lambda e: -e.loc) if fatals[0].loc == fatals[1].loc: - fatals.sort(key=lambda e: (-e.loc, -len(str(e.parserElement)))) + fatals.sort(key=lambda e: (-e.loc, -len(str(e.parser_element)))) max_fatal = fatals[0] raise max_fatal @@ -4324,7 +4494,7 @@ def parseImpl(self, instring, loc, doActions=True): raise ParseException( instring, loc, - "Missing one or more required elements ({})".format(missing), + f"Missing one or more required elements ({missing})", ) # add any unmatched Opts, in case they have default values defined @@ -4337,7 +4507,7 @@ def parseImpl(self, instring, loc, doActions=True): return loc, total_results - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + " & ".join(str(e) for e in self.exprs) + "}" @@ -4349,12 +4519,14 @@ class ParseElementEnhance(ParserElement): def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): super().__init__(savelist) if isinstance(expr, str_type): + expr_str = typing.cast(str, expr) if issubclass(self._literalStringClass, Token): - expr = self._literalStringClass(expr) + expr = self._literalStringClass(expr_str) # type: ignore[call-arg] elif issubclass(type(self), self._literalStringClass): - expr = Literal(expr) + expr = Literal(expr_str) else: - expr = self._literalStringClass(Literal(expr)) + expr = self._literalStringClass(Literal(expr_str)) # type: ignore[assignment, call-arg] + expr = typing.cast(ParserElement, expr) self.expr = expr if expr is not None: self.mayIndexError = expr.mayIndexError @@ -4367,12 +4539,16 @@ def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): self.callPreparse = expr.callPreparse self.ignoreExprs.extend(expr.ignoreExprs) - def recurse(self) -> Sequence[ParserElement]: + def recurse(self) -> List[ParserElement]: return [self.expr] if self.expr is not None else [] def parseImpl(self, instring, loc, doActions=True): if self.expr is not None: - return self.expr._parse(instring, loc, doActions, callPreParse=False) + try: + return self.expr._parse(instring, loc, doActions, callPreParse=False) + except ParseBaseException as pbe: + pbe.msg = self.errmsg + raise else: raise ParseException(instring, loc, "No expression defined", self) @@ -4380,8 +4556,8 @@ def leave_whitespace(self, recursive: bool = True) -> ParserElement: super().leave_whitespace(recursive) if recursive: - self.expr = self.expr.copy() if self.expr is not None: + self.expr = self.expr.copy() self.expr.leave_whitespace(recursive) return self @@ -4389,8 +4565,8 @@ def ignore_whitespace(self, recursive: bool = True) -> ParserElement: super().ignore_whitespace(recursive) if recursive: - self.expr = self.expr.copy() if self.expr is not None: + self.expr = self.expr.copy() self.expr.ignore_whitespace(recursive) return self @@ -4420,6 +4596,11 @@ def _checkRecursion(self, parseElementList): self.expr._checkRecursion(subRecCheckList) def validate(self, validateTrace=None) -> None: + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) if validateTrace is None: validateTrace = [] tmp = validateTrace[:] + [self] @@ -4427,11 +4608,17 @@ def validate(self, validateTrace=None) -> None: self.expr.validate(tmp) self._checkRecursion([]) - def _generateDefaultName(self): - return "{}:({})".format(self.__class__.__name__, str(self.expr)) + def _generateDefaultName(self) -> str: + return f"{self.__class__.__name__}:({str(self.expr)})" + + # Compatibility synonyms + # fmt: off + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + # fmt: on class IndentedBlock(ParseElementEnhance): @@ -4443,13 +4630,13 @@ class IndentedBlock(ParseElementEnhance): class _Indent(Empty): def __init__(self, ref_col: int): super().__init__() - self.errmsg = "expected indent at column {}".format(ref_col) + self.errmsg = f"expected indent at column {ref_col}" self.add_condition(lambda s, l, t: col(l, s) == ref_col) class _IndentGreater(Empty): def __init__(self, ref_col: int): super().__init__() - self.errmsg = "expected indent at column greater than {}".format(ref_col) + self.errmsg = f"expected indent at column greater than {ref_col}" self.add_condition(lambda s, l, t: col(l, s) > ref_col) def __init__( @@ -4469,7 +4656,7 @@ def parseImpl(self, instring, loc, doActions=True): # see if self.expr matches at the current location - if not it will raise an exception # and no further work is necessary - self.expr.try_parse(instring, anchor_loc, doActions) + self.expr.try_parse(instring, anchor_loc, do_actions=doActions) indent_col = col(anchor_loc, instring) peer_detect_expr = self._Indent(indent_col) @@ -4532,7 +4719,7 @@ class AtLineStart(ParseElementEnhance): B AAA and definitely not this one ''' - for t in (AtLineStart('AAA') + restOfLine).search_string(test): + for t in (AtLineStart('AAA') + rest_of_line).search_string(test): print(t) prints:: @@ -4598,9 +4785,9 @@ class PrecededBy(ParseElementEnhance): Parameters: - - expr - expression that must match prior to the current parse + - ``expr`` - expression that must match prior to the current parse location - - retreat - (default= ``None``) - (int) maximum number of characters + - ``retreat`` - (default= ``None``) - (int) maximum number of characters to lookbehind prior to the current parse location If the lookbehind expression is a string, :class:`Literal`, @@ -4627,6 +4814,7 @@ def __init__( self.mayIndexError = False self.exact = False if isinstance(expr, str_type): + expr = typing.cast(str, expr) retreat = len(expr) self.exact = True elif isinstance(expr, (Literal, Keyword)): @@ -4746,18 +4934,18 @@ def __init__(self, expr: Union[ParserElement, str]): self.errmsg = "Found unwanted token, " + str(self.expr) def parseImpl(self, instring, loc, doActions=True): - if self.expr.can_parse_next(instring, loc): + if self.expr.can_parse_next(instring, loc, do_actions=doActions): raise ParseException(instring, loc, self.errmsg, self) return loc, [] - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "~{" + str(self.expr) + "}" class _MultipleMatch(ParseElementEnhance): def __init__( self, - expr: ParserElement, + expr: Union[str, ParserElement], stop_on: typing.Optional[Union[ParserElement, str]] = None, *, stopOn: typing.Optional[Union[ParserElement, str]] = None, @@ -4781,7 +4969,7 @@ def parseImpl(self, instring, loc, doActions=True): self_skip_ignorables = self._skipIgnorables check_ender = self.not_ender is not None if check_ender: - try_not_ender = self.not_ender.tryParse + try_not_ender = self.not_ender.try_parse # must be at least one (but first see if we are the stopOn sentinel; # if so, fail) @@ -4798,8 +4986,7 @@ def parseImpl(self, instring, loc, doActions=True): else: preloc = loc loc, tmptokens = self_expr_parse(instring, preloc, doActions) - if tmptokens or tmptokens.haskeys(): - tokens += tmptokens + tokens += tmptokens except (ParseException, IndexError): pass @@ -4837,10 +5024,11 @@ class OneOrMore(_MultipleMatch): Repetition of one or more of the given expression. Parameters: - - expr - expression that must match one or more times - - stop_on - (default= ``None``) - expression for a terminating sentinel - (only required if the sentinel would ordinarily match the repetition - expression) + + - ``expr`` - expression that must match one or more times + - ``stop_on`` - (default= ``None``) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) Example:: @@ -4859,7 +5047,7 @@ class OneOrMore(_MultipleMatch): (attr_expr * (1,)).parse_string(text).pprint() """ - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "{" + str(self.expr) + "}..." @@ -4868,6 +5056,7 @@ class ZeroOrMore(_MultipleMatch): Optional repetition of zero or more of the given expression. Parameters: + - ``expr`` - expression that must match zero or more times - ``stop_on`` - expression for a terminating sentinel (only required if the sentinel would ordinarily match the repetition @@ -4878,7 +5067,7 @@ class ZeroOrMore(_MultipleMatch): def __init__( self, - expr: ParserElement, + expr: Union[str, ParserElement], stop_on: typing.Optional[Union[ParserElement, str]] = None, *, stopOn: typing.Optional[Union[ParserElement, str]] = None, @@ -4892,10 +5081,75 @@ def parseImpl(self, instring, loc, doActions=True): except (ParseException, IndexError): return loc, ParseResults([], name=self.resultsName) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: return "[" + str(self.expr) + "]..." +class DelimitedList(ParseElementEnhance): + def __init__( + self, + expr: Union[str, ParserElement], + delim: Union[str, ParserElement] = ",", + combine: bool = False, + min: typing.Optional[int] = None, + max: typing.Optional[int] = None, + *, + allow_trailing_delim: bool = False, + ): + """Helper to define a delimited list of expressions - the delimiter + defaults to ','. By default, the list elements and delimiters can + have intervening whitespace, and comments, but this can be + overridden by passing ``combine=True`` in the constructor. If + ``combine`` is set to ``True``, the matching tokens are + returned as a single token string, with the delimiters included; + otherwise, the matching tokens are returned as a list of tokens, + with the delimiters suppressed. + + If ``allow_trailing_delim`` is set to True, then the list may end with + a delimiter. + + Example:: + + DelimitedList(Word(alphas)).parse_string("aa,bb,cc") # -> ['aa', 'bb', 'cc'] + DelimitedList(Word(hexnums), delim=':', combine=True).parse_string("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + if isinstance(expr, str_type): + expr = ParserElement._literalStringClass(expr) + expr = typing.cast(ParserElement, expr) + + if min is not None: + if min < 1: + raise ValueError("min must be greater than 0") + if max is not None: + if min is not None and max < min: + raise ValueError("max must be greater than, or equal to min") + + self.content = expr + self.raw_delim = str(delim) + self.delim = delim + self.combine = combine + if not combine: + self.delim = Suppress(delim) + self.min = min or 1 + self.max = max + self.allow_trailing_delim = allow_trailing_delim + + delim_list_expr = self.content + (self.delim + self.content) * ( + self.min - 1, + None if self.max is None else self.max - 1, + ) + if self.allow_trailing_delim: + delim_list_expr += Opt(self.delim) + + if self.combine: + delim_list_expr = Combine(delim_list_expr) + + super().__init__(delim_list_expr, savelist=True) + + def _generateDefaultName(self) -> str: + return "{0} [{1} {0}]...".format(self.content.streamline(), self.raw_delim) + + class _NullToken: def __bool__(self): return False @@ -4909,6 +5163,7 @@ class Opt(ParseElementEnhance): Optional matching of the given expression. Parameters: + - ``expr`` - expression that must match zero or more times - ``default`` (optional) - value to be returned if the optional expression is not found. @@ -4969,7 +5224,7 @@ def parseImpl(self, instring, loc, doActions=True): tokens = [] return loc, tokens - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: inner = str(self.expr) # strip off redundant inner {}'s while len(inner) > 1 and inner[0 :: len(inner) - 1] == "{}": @@ -4986,6 +5241,7 @@ class SkipTo(ParseElementEnhance): expression is found. Parameters: + - ``expr`` - target expression marking the end of the data to be skipped - ``include`` - if ``True``, the target expression is also parsed (the skipped text and target expression are returned as a 2-element @@ -5045,14 +5301,15 @@ def __init__( self, other: Union[ParserElement, str], include: bool = False, - ignore: bool = None, + ignore: typing.Optional[Union[ParserElement, str]] = None, fail_on: typing.Optional[Union[ParserElement, str]] = None, *, - failOn: Union[ParserElement, str] = None, + failOn: typing.Optional[Union[ParserElement, str]] = None, ): super().__init__(other) failOn = failOn or fail_on - self.ignoreExpr = ignore + if ignore is not None: + self.ignore(ignore) self.mayReturnEmpty = True self.mayIndexError = False self.includeMatch = include @@ -5070,9 +5327,7 @@ def parseImpl(self, instring, loc, doActions=True): self_failOn_canParseNext = ( self.failOn.canParseNext if self.failOn is not None else None ) - self_ignoreExpr_tryParse = ( - self.ignoreExpr.tryParse if self.ignoreExpr is not None else None - ) + self_preParse = self.preParse if self.callPreparse else None tmploc = loc while tmploc <= instrlen: @@ -5081,13 +5336,9 @@ def parseImpl(self, instring, loc, doActions=True): if self_failOn_canParseNext(instring, tmploc): break - if self_ignoreExpr_tryParse is not None: - # advance past ignore expressions - while 1: - try: - tmploc = self_ignoreExpr_tryParse(instring, tmploc) - except ParseBaseException: - break + if self_preParse is not None: + # skip grammar-ignored expressions + tmploc = self_preParse(instring, tmploc) try: self_expr_parse(instring, tmploc, doActions=False, callPreParse=False) @@ -5145,15 +5396,20 @@ class Forward(ParseElementEnhance): def __init__(self, other: typing.Optional[Union[ParserElement, str]] = None): self.caller_frame = traceback.extract_stack(limit=2)[0] - super().__init__(other, savelist=False) + super().__init__(other, savelist=False) # type: ignore[arg-type] self.lshift_line = None - def __lshift__(self, other): + def __lshift__(self, other) -> "Forward": if hasattr(self, "caller_frame"): del self.caller_frame if isinstance(other, str_type): other = self._literalStringClass(other) + + if not isinstance(other, ParserElement): + return NotImplemented + self.expr = other + self.streamlined = other.streamlined self.mayIndexError = self.expr.mayIndexError self.mayReturnEmpty = self.expr.mayReturnEmpty self.set_whitespace_chars( @@ -5162,13 +5418,16 @@ def __lshift__(self, other): self.skipWhitespace = self.expr.skipWhitespace self.saveAsList = self.expr.saveAsList self.ignoreExprs.extend(self.expr.ignoreExprs) - self.lshift_line = traceback.extract_stack(limit=2)[-2] + self.lshift_line = traceback.extract_stack(limit=2)[-2] # type: ignore[assignment] return self - def __ilshift__(self, other): + def __ilshift__(self, other) -> "Forward": + if not isinstance(other, ParserElement): + return NotImplemented + return self << other - def __or__(self, other): + def __or__(self, other) -> "ParserElement": caller_line = traceback.extract_stack(limit=2)[-2] if ( __diag__.warn_on_match_first_with_lshift_operator @@ -5205,12 +5464,12 @@ def parseImpl(self, instring, loc, doActions=True): not in self.suppress_warnings_ ): # walk stack until parse_string, scan_string, search_string, or transform_string is found - parse_fns = [ + parse_fns = ( "parse_string", "scan_string", "search_string", "transform_string", - ] + ) tb = traceback.extract_stack(limit=200) for i, frm in enumerate(reversed(tb), start=1): if frm.name in parse_fns: @@ -5308,6 +5567,11 @@ def streamline(self) -> ParserElement: return self def validate(self, validateTrace=None) -> None: + warnings.warn( + "ParserElement.validate() is deprecated, and should not be used to check for left recursion", + DeprecationWarning, + stacklevel=2, + ) if validateTrace is None: validateTrace = [] @@ -5317,7 +5581,7 @@ def validate(self, validateTrace=None) -> None: self.expr.validate(tmp) self._checkRecursion([]) - def _generateDefaultName(self): + def _generateDefaultName(self) -> str: # Avoid infinite recursion by setting a temporary _defaultName self._defaultName = ": ..." @@ -5356,8 +5620,14 @@ def _setResultsName(self, name, list_all_matches=False): return super()._setResultsName(name, list_all_matches) - ignoreWhitespace = ignore_whitespace - leaveWhitespace = leave_whitespace + # Compatibility synonyms + # fmt: off + @replaced_by_pep8(leave_whitespace) + def leaveWhitespace(self): ... + + @replaced_by_pep8(ignore_whitespace) + def ignoreWhitespace(self): ... + # fmt: on class TokenConverter(ParseElementEnhance): @@ -5439,11 +5709,11 @@ class Group(TokenConverter): ident = Word(alphas) num = Word(nums) term = ident | num - func = ident + Opt(delimited_list(term)) + func = ident + Opt(DelimitedList(term)) print(func.parse_string("fn a, b, 100")) # -> ['fn', 'a', 'b', '100'] - func = ident + Group(Opt(delimited_list(term))) + func = ident + Group(Opt(DelimitedList(term))) print(func.parse_string("fn a, b, 100")) # -> ['fn', ['a', 'b', '100']] """ @@ -5579,7 +5849,7 @@ class Suppress(TokenConverter): ['a', 'b', 'c', 'd'] ['START', 'relevant text ', 'END'] - (See also :class:`delimited_list`.) + (See also :class:`DelimitedList`.) """ def __init__(self, expr: Union[ParserElement, str], savelist: bool = False): @@ -5638,15 +5908,13 @@ def z(*paArgs): s, l, t = paArgs[-3:] if len(paArgs) > 3: thisFunc = paArgs[0].__class__.__name__ + "." + thisFunc - sys.stderr.write( - ">>entering {}(line: {!r}, {}, {!r})\n".format(thisFunc, line(l, s), l, t) - ) + sys.stderr.write(f">>entering {thisFunc}(line: {line(l, s)!r}, {l}, {t!r})\n") try: ret = f(*paArgs) except Exception as exc: - sys.stderr.write("< str: ) try: return "".join(_expanded(part) for part in _reBracketExpr.parse_string(s).body) - except Exception: + except Exception as e: return "" @@ -5769,7 +6037,11 @@ def autoname_elements() -> None: Utility to simplify mass-naming of parser elements, for generating railroad diagram with named subdiagrams. """ - for name, var in sys._getframe().f_back.f_locals.items(): + calling_frame = sys._getframe().f_back + if calling_frame is None: + return + calling_frame = typing.cast(types.FrameType, calling_frame) + for name, var in calling_frame.f_locals.items(): if isinstance(var, ParserElement) and not var.customName: var.set_name(name) @@ -5783,9 +6055,28 @@ def autoname_elements() -> None: ).set_name("string enclosed in single quotes") quoted_string = Combine( - Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"' - | Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'" -).set_name("quotedString using single or double quotes") + (Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').set_name( + "double quoted string" + ) + | (Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").set_name( + "single quoted string" + ) +).set_name("quoted string using single or double quotes") + +python_quoted_string = Combine( + (Regex(r'"""(?:[^"\\]|""(?!")|"(?!"")|\\.)*', flags=re.MULTILINE) + '"""').set_name( + "multiline double quoted string" + ) + ^ ( + Regex(r"'''(?:[^'\\]|''(?!')|'(?!'')|\\.)*", flags=re.MULTILINE) + "'''" + ).set_name("multiline single quoted string") + ^ (Regex(r'"(?:[^"\n\r\\]|(?:\\")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*') + '"').set_name( + "double quoted string" + ) + ^ (Regex(r"'(?:[^'\n\r\\]|(?:\\')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*") + "'").set_name( + "single quoted string" + ) +).set_name("Python quoted string") unicode_string = Combine("u" + quoted_string.copy()).set_name("unicode string literal") @@ -5800,9 +6091,7 @@ def autoname_elements() -> None: ] # backward compatibility names -tokenMap = token_map -conditionAsParseAction = condition_as_parse_action -nullDebugAction = null_debug_action +# fmt: off sglQuotedString = sgl_quoted_string dblQuotedString = dbl_quoted_string quotedString = quoted_string @@ -5811,4 +6100,16 @@ def autoname_elements() -> None: lineEnd = line_end stringStart = string_start stringEnd = string_end -traceParseAction = trace_parse_action + +@replaced_by_pep8(null_debug_action) +def nullDebugAction(): ... + +@replaced_by_pep8(trace_parse_action) +def traceParseAction(): ... + +@replaced_by_pep8(condition_as_parse_action) +def conditionAsParseAction(): ... + +@replaced_by_pep8(token_map) +def tokenMap(): ... +# fmt: on diff --git a/src/pip/_vendor/pyparsing/diagram/__init__.py b/src/pip/_vendor/pyparsing/diagram/__init__.py index 1506d66bf4e..83f9018ee93 100644 --- a/src/pip/_vendor/pyparsing/diagram/__init__.py +++ b/src/pip/_vendor/pyparsing/diagram/__init__.py @@ -1,3 +1,4 @@ +# mypy: ignore-errors import railroad from pip._vendor import pyparsing import typing @@ -17,11 +18,13 @@ jinja2_template_source = """\ +{% if not embed %} +{% endif %} {% if not head %} - +{% endif %} {{ body | safe }} {% for diagram in diagrams %}
@@ -41,8 +46,10 @@
{% endfor %} +{% if not embed %} +{% endif %} """ template = Template(jinja2_template_source) @@ -127,7 +134,7 @@ def __call__(self) -> T: return self.func(*args, **kwargs) -def railroad_to_html(diagrams: List[NamedDiagram], **kwargs) -> str: +def railroad_to_html(diagrams: List[NamedDiagram], embed=False, **kwargs) -> str: """ Given a list of NamedDiagram, produce a single HTML string that visualises those diagrams :params kwargs: kwargs to be passed in to the template @@ -137,13 +144,17 @@ def railroad_to_html(diagrams: List[NamedDiagram], **kwargs) -> str: if diagram.diagram is None: continue io = StringIO() - diagram.diagram.writeSvg(io.write) + try: + css = kwargs.get('css') + diagram.diagram.writeStandalone(io.write, css=css) + except AttributeError: + diagram.diagram.writeSvg(io.write) title = diagram.name if diagram.index == 0: title += " (root)" data.append({"title": title, "text": "", "svg": io.getvalue()}) - return template.render(diagrams=data, **kwargs) + return template.render(diagrams=data, embed=embed, **kwargs) def resolve_partial(partial: "EditablePartial[T]") -> T: @@ -398,7 +409,6 @@ def _inner( show_results_names: bool = False, show_groups: bool = False, ) -> typing.Optional[EditablePartial]: - ret = fn( element, parent, @@ -555,9 +565,11 @@ def _to_diagram_element( else: ret = EditablePartial.from_call(railroad.Group, label="", item="") elif isinstance(element, pyparsing.TokenConverter): - ret = EditablePartial.from_call( - AnnotatedItem, label=type(element).__name__.lower(), item="" - ) + label = type(element).__name__.lower() + if label == "tokenconverter": + ret = EditablePartial.from_call(railroad.Sequence, items=[]) + else: + ret = EditablePartial.from_call(AnnotatedItem, label=label, item="") elif isinstance(element, pyparsing.Opt): ret = EditablePartial.from_call(railroad.Optional, item="") elif isinstance(element, pyparsing.OneOrMore): @@ -571,10 +583,12 @@ def _to_diagram_element( elif isinstance(element, pyparsing.Empty) and not element.customName: # Skip unnamed "Empty" elements ret = None - elif len(exprs) > 1: + elif isinstance(element, pyparsing.ParseElementEnhance): ret = EditablePartial.from_call(railroad.Sequence, items=[]) elif len(exprs) > 0 and not element_results_name: ret = EditablePartial.from_call(railroad.Group, item="", label=name) + elif len(exprs) > 0: + ret = EditablePartial.from_call(railroad.Sequence, items=[]) else: terminal = EditablePartial.from_call(railroad.Terminal, element.defaultName) ret = terminal diff --git a/src/pip/_vendor/pyparsing/exceptions.py b/src/pip/_vendor/pyparsing/exceptions.py index a38447bb05b..12219f124ae 100644 --- a/src/pip/_vendor/pyparsing/exceptions.py +++ b/src/pip/_vendor/pyparsing/exceptions.py @@ -4,7 +4,13 @@ import sys import typing -from .util import col, line, lineno, _collapse_string_to_ranges +from .util import ( + col, + line, + lineno, + _collapse_string_to_ranges, + replaced_by_pep8, +) from .unicode import pyparsing_unicode as ppu @@ -19,6 +25,20 @@ class ExceptionWordUnicode(ppu.Latin1, ppu.LatinA, ppu.LatinB, ppu.Greek, ppu.Cy class ParseBaseException(Exception): """base exception class for all parsing runtime exceptions""" + loc: int + msg: str + pstr: str + parser_element: typing.Any # "ParserElement" + args: typing.Tuple[str, int, typing.Optional[str]] + + __slots__ = ( + "loc", + "msg", + "pstr", + "parser_element", + "args", + ) + # Performance tuning: we construct a *lot* of these, so keep this # constructor as small and fast as possible def __init__( @@ -35,7 +55,7 @@ def __init__( else: self.msg = msg self.pstr = pstr - self.parser_element = self.parserElement = elem + self.parser_element = elem self.args = (pstr, loc, msg) @staticmethod @@ -64,7 +84,7 @@ def explain_exception(exc, depth=16): if isinstance(exc, ParseBaseException): ret.append(exc.line) ret.append(" " * (exc.column - 1) + "^") - ret.append("{}: {}".format(type(exc).__name__, exc)) + ret.append(f"{type(exc).__name__}: {exc}") if depth > 0: callers = inspect.getinnerframes(exc.__traceback__, context=depth) @@ -74,7 +94,9 @@ def explain_exception(exc, depth=16): f_self = frm.f_locals.get("self", None) if isinstance(f_self, ParserElement): - if frm.f_code.co_name not in ("parseImpl", "_parseNoCache"): + if not frm.f_code.co_name.startswith( + ("parseImpl", "_parseNoCache") + ): continue if id(f_self) in seen: continue @@ -82,21 +104,19 @@ def explain_exception(exc, depth=16): self_type = type(f_self) ret.append( - "{}.{} - {}".format( - self_type.__module__, self_type.__name__, f_self - ) + f"{self_type.__module__}.{self_type.__name__} - {f_self}" ) elif f_self is not None: self_type = type(f_self) - ret.append("{}.{}".format(self_type.__module__, self_type.__name__)) + ret.append(f"{self_type.__module__}.{self_type.__name__}") else: code = frm.f_code if code.co_name in ("wrapper", ""): continue - ret.append("{}".format(code.co_name)) + ret.append(code.co_name) depth -= 1 if not depth: @@ -110,7 +130,7 @@ def _from_exception(cls, pe): internal factory method to simplify creating one type of ParseException from another - avoids having __init__ signature conflicts among subclasses """ - return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) + return cls(pe.pstr, pe.loc, pe.msg, pe.parser_element) @property def line(self) -> str: @@ -140,6 +160,15 @@ def column(self) -> int: """ return col(self.loc, self.pstr) + # pre-PEP8 compatibility + @property + def parserElement(self): + return self.parser_element + + @parserElement.setter + def parserElement(self, elem): + self.parser_element = elem + def __str__(self) -> str: if self.pstr: if self.loc >= len(self.pstr): @@ -154,14 +183,14 @@ def __str__(self) -> str: foundstr = (", found %r" % found).replace(r"\\", "\\") else: foundstr = "" - return "{}{} (at char {}), (line:{}, col:{})".format( - self.msg, foundstr, self.loc, self.lineno, self.column - ) + return f"{self.msg}{foundstr} (at char {self.loc}), (line:{self.lineno}, col:{self.column})" def __repr__(self): return str(self) - def mark_input_line(self, marker_string: str = None, *, markerString=">!<") -> str: + def mark_input_line( + self, marker_string: typing.Optional[str] = None, *, markerString: str = ">!<" + ) -> str: """ Extracts the exception line from the input string, and marks the location of the exception with a special symbol. @@ -214,7 +243,10 @@ def explain(self, depth=16) -> str: """ return self.explain_exception(self, depth) - markInputline = mark_input_line + # fmt: off + @replaced_by_pep8(mark_input_line) + def markInputline(self): ... + # fmt: on class ParseException(ParseBaseException): @@ -264,4 +296,4 @@ def __init__(self, parseElementList): self.parseElementTrace = parseElementList def __str__(self) -> str: - return "RecursiveGrammarException: {}".format(self.parseElementTrace) + return f"RecursiveGrammarException: {self.parseElementTrace}" diff --git a/src/pip/_vendor/pyparsing/helpers.py b/src/pip/_vendor/pyparsing/helpers.py index 9588b3b7801..018f0d6ac86 100644 --- a/src/pip/_vendor/pyparsing/helpers.py +++ b/src/pip/_vendor/pyparsing/helpers.py @@ -1,73 +1,22 @@ # helpers.py import html.entities import re +import sys import typing from . import __diag__ from .core import * -from .util import _bslash, _flatten, _escape_regex_range_chars +from .util import ( + _bslash, + _flatten, + _escape_regex_range_chars, + replaced_by_pep8, +) # # global helpers # -def delimited_list( - expr: Union[str, ParserElement], - delim: Union[str, ParserElement] = ",", - combine: bool = False, - min: typing.Optional[int] = None, - max: typing.Optional[int] = None, - *, - allow_trailing_delim: bool = False, -) -> ParserElement: - """Helper to define a delimited list of expressions - the delimiter - defaults to ','. By default, the list elements and delimiters can - have intervening whitespace, and comments, but this can be - overridden by passing ``combine=True`` in the constructor. If - ``combine`` is set to ``True``, the matching tokens are - returned as a single token string, with the delimiters included; - otherwise, the matching tokens are returned as a list of tokens, - with the delimiters suppressed. - - If ``allow_trailing_delim`` is set to True, then the list may end with - a delimiter. - - Example:: - - delimited_list(Word(alphas)).parse_string("aa,bb,cc") # -> ['aa', 'bb', 'cc'] - delimited_list(Word(hexnums), delim=':', combine=True).parse_string("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] - """ - if isinstance(expr, str_type): - expr = ParserElement._literalStringClass(expr) - - dlName = "{expr} [{delim} {expr}]...{end}".format( - expr=str(expr.copy().streamline()), - delim=str(delim), - end=" [{}]".format(str(delim)) if allow_trailing_delim else "", - ) - - if not combine: - delim = Suppress(delim) - - if min is not None: - if min < 1: - raise ValueError("min must be greater than 0") - min -= 1 - if max is not None: - if min is not None and max <= min: - raise ValueError("max must be greater than, or equal to min") - max -= 1 - delimited_list_expr = expr + (delim + expr)[min, max] - - if allow_trailing_delim: - delimited_list_expr += Opt(delim) - - if combine: - return Combine(delimited_list_expr).set_name(dlName) - else: - return delimited_list_expr.set_name(dlName) - - def counted_array( expr: ParserElement, int_expr: typing.Optional[ParserElement] = None, @@ -187,7 +136,7 @@ def must_match_these_tokens(s, l, t): theseTokens = _flatten(t.as_list()) if theseTokens != matchTokens: raise ParseException( - s, l, "Expected {}, found{}".format(matchTokens, theseTokens) + s, l, f"Expected {matchTokens}, found{theseTokens}" ) rep.set_parse_action(must_match_these_tokens, callDuringTry=True) @@ -218,7 +167,7 @@ def one_of( - ``caseless`` - treat all literals as caseless - (default= ``False``) - ``use_regex`` - as an optimization, will generate a :class:`Regex` object; otherwise, will generate - a :class:`MatchFirst` object (if ``caseless=True`` or ``asKeyword=True``, or if + a :class:`MatchFirst` object (if ``caseless=True`` or ``as_keyword=True``, or if creating a :class:`Regex` raises an exception) - (default= ``True``) - ``as_keyword`` - enforce :class:`Keyword`-style matching on the generated expressions - (default= ``False``) @@ -262,6 +211,7 @@ def one_of( symbols: List[str] = [] if isinstance(strs, str_type): + strs = typing.cast(str, strs) symbols = strs.split() elif isinstance(strs, Iterable): symbols = list(strs) @@ -293,15 +243,13 @@ def one_of( try: if all(len(sym) == 1 for sym in symbols): # symbols are just single characters, create range regex pattern - patt = "[{}]".format( - "".join(_escape_regex_range_chars(sym) for sym in symbols) - ) + patt = f"[{''.join(_escape_regex_range_chars(sym) for sym in symbols)}]" else: patt = "|".join(re.escape(sym) for sym in symbols) # wrap with \b word break markers if defining as keywords if asKeyword: - patt = r"\b(?:{})\b".format(patt) + patt = rf"\b(?:{patt})\b" ret = Regex(patt, flags=re_flags).set_name(" | ".join(symbols)) @@ -371,7 +319,7 @@ def original_text_for( expression. Useful to restore the parsed fields of an HTML start tag into the raw tag text itself, or to revert separate tokens with intervening whitespace back to the original matching input text. By - default, returns astring containing the original parsed text. + default, returns a string containing the original parsed text. If the optional ``as_string`` argument is passed as ``False``, then the return value is @@ -390,7 +338,7 @@ def original_text_for( src = "this is test bold text normal text " for tag in ("b", "i"): opener, closer = make_html_tags(tag) - patt = original_text_for(opener + SkipTo(closer) + closer) + patt = original_text_for(opener + ... + closer) print(patt.search_string(src)[0]) prints:: @@ -426,7 +374,7 @@ def ungroup(expr: ParserElement) -> ParserElement: def locatedExpr(expr: ParserElement) -> ParserElement: """ - (DEPRECATED - future code should use the Located class) + (DEPRECATED - future code should use the :class:`Located` class) Helper to decorate a returned token with its starting and ending locations in the input string. @@ -437,12 +385,12 @@ def locatedExpr(expr: ParserElement) -> ParserElement: - ``value`` - the actual parsed results Be careful if the input text contains ```` characters, you - may want to call :class:`ParserElement.parseWithTabs` + may want to call :class:`ParserElement.parse_with_tabs` Example:: wd = Word(alphas) - for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + for match in locatedExpr(wd).search_string("ljsdf123lksdjjf123lkkjj1222"): print(match) prints:: @@ -471,6 +419,7 @@ def nested_expr( closing delimiters (``"("`` and ``")"`` are the default). Parameters: + - ``opener`` - opening character for a nested list (default= ``"("``); can also be a pyparsing expression - ``closer`` - closing character for a nested list @@ -507,7 +456,7 @@ def nested_expr( c_function = (decl_data_type("type") + ident("name") - + LPAR + Opt(delimited_list(arg), [])("args") + RPAR + + LPAR + Opt(DelimitedList(arg), [])("args") + RPAR + code_body("body")) c_function.ignore(c_style_comment) @@ -539,6 +488,8 @@ def nested_expr( raise ValueError("opening and closing strings cannot be the same") if content is None: if isinstance(opener, str_type) and isinstance(closer, str_type): + opener = typing.cast(str, opener) + closer = typing.cast(str, closer) if len(opener) == 1 and len(closer) == 1: if ignoreExpr is not None: content = Combine( @@ -695,12 +646,15 @@ def make_xml_tags( ) -def replace_html_entity(t): +def replace_html_entity(s, l, t): """Helper parser action to replace common HTML entities with their special characters""" return _htmlEntityMap.get(t.entity) class OpAssoc(Enum): + """Enumeration of operator associativity + - used in constructing InfixNotationOperatorSpec for :class:`infix_notation`""" + LEFT = 1 RIGHT = 2 @@ -742,6 +696,7 @@ def infix_notation( improve your parser performance. Parameters: + - ``base_expr`` - expression representing the most basic operand to be used in the expression - ``op_list`` - list of tuples, one for each operator precedence level @@ -764,11 +719,11 @@ def infix_notation( ``set_parse_action(*fn)`` (:class:`ParserElement.set_parse_action`) - ``lpar`` - expression for matching left-parentheses; if passed as a - str, then will be parsed as Suppress(lpar). If lpar is passed as + str, then will be parsed as ``Suppress(lpar)``. If lpar is passed as an expression (such as ``Literal('(')``), then it will be kept in the parsed results, and grouped with them. (default= ``Suppress('(')``) - ``rpar`` - expression for matching right-parentheses; if passed as a - str, then will be parsed as Suppress(rpar). If rpar is passed as + str, then will be parsed as ``Suppress(rpar)``. If rpar is passed as an expression (such as ``Literal(')')``), then it will be kept in the parsed results, and grouped with them. (default= ``Suppress(')')``) @@ -800,9 +755,13 @@ def infix_notation( (5+3)*6 [[[5, '+', 3], '*', 6]] + (5+x)*y + [[[5, '+', 'x'], '*', 'y']] + -2--11 [[['-', 2], '-', ['-', 11]]] """ + # captive version of FollowedBy that does not do parse actions or capture results names class _FB(FollowedBy): def parseImpl(self, instring, loc, doActions=True): @@ -823,19 +782,25 @@ def parseImpl(self, instring, loc, doActions=True): else: lastExpr = base_expr | (lpar + ret + rpar) + arity: int + rightLeftAssoc: opAssoc + pa: typing.Optional[ParseAction] + opExpr1: ParserElement + opExpr2: ParserElement for i, operDef in enumerate(op_list): - opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] + opExpr, arity, rightLeftAssoc, pa = (operDef + (None,))[:4] # type: ignore[assignment] if isinstance(opExpr, str_type): opExpr = ParserElement._literalStringClass(opExpr) + opExpr = typing.cast(ParserElement, opExpr) if arity == 3: if not isinstance(opExpr, (tuple, list)) or len(opExpr) != 2: raise ValueError( "if numterms=3, opExpr must be a tuple or list of two expressions" ) opExpr1, opExpr2 = opExpr - term_name = "{}{} term".format(opExpr1, opExpr2) + term_name = f"{opExpr1}{opExpr2} term" else: - term_name = "{} term".format(opExpr) + term_name = f"{opExpr} term" if not 1 <= arity <= 3: raise ValueError("operator must be unary (1), binary (2), or ternary (3)") @@ -843,7 +808,8 @@ def parseImpl(self, instring, loc, doActions=True): if rightLeftAssoc not in (OpAssoc.LEFT, OpAssoc.RIGHT): raise ValueError("operator must indicate right or left associativity") - thisExpr: Forward = Forward().set_name(term_name) + thisExpr: ParserElement = Forward().set_name(term_name) + thisExpr = typing.cast(Forward, thisExpr) if rightLeftAssoc is OpAssoc.LEFT: if arity == 1: matchExpr = _FB(lastExpr + opExpr) + Group(lastExpr + opExpr[1, ...]) @@ -890,7 +856,7 @@ def parseImpl(self, instring, loc, doActions=True): def indentedBlock(blockStatementExpr, indentStack, indent=True, backup_stacks=[]): """ - (DEPRECATED - use IndentedBlock class instead) + (DEPRECATED - use :class:`IndentedBlock` class instead) Helper method for defining space-delimited indentation blocks, such as those used to define block statements in Python source code. @@ -1063,22 +1029,28 @@ def checkUnindent(s, l, t): ] +# compatibility function, superseded by DelimitedList class +def delimited_list( + expr: Union[str, ParserElement], + delim: Union[str, ParserElement] = ",", + combine: bool = False, + min: typing.Optional[int] = None, + max: typing.Optional[int] = None, + *, + allow_trailing_delim: bool = False, +) -> ParserElement: + """(DEPRECATED - use :class:`DelimitedList` class)""" + return DelimitedList( + expr, delim, combine, min, max, allow_trailing_delim=allow_trailing_delim + ) + + # pre-PEP8 compatible names -delimitedList = delimited_list -countedArray = counted_array -matchPreviousLiteral = match_previous_literal -matchPreviousExpr = match_previous_expr -oneOf = one_of -dictOf = dict_of -originalTextFor = original_text_for -nestedExpr = nested_expr -makeHTMLTags = make_html_tags -makeXMLTags = make_xml_tags -anyOpenTag, anyCloseTag = any_open_tag, any_close_tag -commonHTMLEntity = common_html_entity -replaceHTMLEntity = replace_html_entity +# fmt: off opAssoc = OpAssoc -infixNotation = infix_notation +anyOpenTag = any_open_tag +anyCloseTag = any_close_tag +commonHTMLEntity = common_html_entity cStyleComment = c_style_comment htmlComment = html_comment restOfLine = rest_of_line @@ -1086,3 +1058,43 @@ def checkUnindent(s, l, t): cppStyleComment = cpp_style_comment javaStyleComment = java_style_comment pythonStyleComment = python_style_comment + +@replaced_by_pep8(DelimitedList) +def delimitedList(): ... + +@replaced_by_pep8(DelimitedList) +def delimited_list(): ... + +@replaced_by_pep8(counted_array) +def countedArray(): ... + +@replaced_by_pep8(match_previous_literal) +def matchPreviousLiteral(): ... + +@replaced_by_pep8(match_previous_expr) +def matchPreviousExpr(): ... + +@replaced_by_pep8(one_of) +def oneOf(): ... + +@replaced_by_pep8(dict_of) +def dictOf(): ... + +@replaced_by_pep8(original_text_for) +def originalTextFor(): ... + +@replaced_by_pep8(nested_expr) +def nestedExpr(): ... + +@replaced_by_pep8(make_html_tags) +def makeHTMLTags(): ... + +@replaced_by_pep8(make_xml_tags) +def makeXMLTags(): ... + +@replaced_by_pep8(replace_html_entity) +def replaceHTMLEntity(): ... + +@replaced_by_pep8(infix_notation) +def infixNotation(): ... +# fmt: on diff --git a/src/pip/_vendor/pyparsing/results.py b/src/pip/_vendor/pyparsing/results.py index 00c9421d3b0..0313049763b 100644 --- a/src/pip/_vendor/pyparsing/results.py +++ b/src/pip/_vendor/pyparsing/results.py @@ -1,18 +1,25 @@ # results.py -from collections.abc import MutableMapping, Mapping, MutableSequence, Iterator +from collections.abc import ( + MutableMapping, + Mapping, + MutableSequence, + Iterator, + Sequence, + Container, +) import pprint -from weakref import ref as wkref -from typing import Tuple, Any +from typing import Tuple, Any, Dict, Set, List str_type: Tuple[type, ...] = (str, bytes) _generator_type = type((_ for _ in ())) class _ParseResultsWithOffset: + tup: Tuple["ParseResults", int] __slots__ = ["tup"] - def __init__(self, p1, p2): - self.tup = (p1, p2) + def __init__(self, p1: "ParseResults", p2: int): + self.tup: Tuple[ParseResults, int] = (p1, p2) def __getitem__(self, i): return self.tup[i] @@ -47,7 +54,7 @@ class ParseResults: result = date_str.parse_string("1999/12/31") def test(s, fn=repr): - print("{} -> {}".format(s, fn(eval(s)))) + print(f"{s} -> {fn(eval(s))}") test("list(result)") test("result[0]") test("result['month']") @@ -70,27 +77,33 @@ def test(s, fn=repr): - year: '1999' """ - _null_values: Tuple[Any, ...] = (None, [], "", ()) + _null_values: Tuple[Any, ...] = (None, [], ()) - __slots__ = [ + _name: str + _parent: "ParseResults" + _all_names: Set[str] + _modal: bool + _toklist: List[Any] + _tokdict: Dict[str, Any] + + __slots__ = ( "_name", "_parent", "_all_names", "_modal", "_toklist", "_tokdict", - "__weakref__", - ] + ) class List(list): """ Simple wrapper class to distinguish parsed list results that should be preserved - as actual Python lists, instead of being converted to :class:`ParseResults`: + as actual Python lists, instead of being converted to :class:`ParseResults`:: LBRACK, RBRACK = map(pp.Suppress, "[]") element = pp.Forward() item = ppc.integer - element_list = LBRACK + pp.delimited_list(element) + RBRACK + element_list = LBRACK + pp.DelimitedList(element) + RBRACK # add parse actions to convert from ParseResults to actual Python collection types def as_python_list(t): @@ -107,7 +120,7 @@ def as_python_list(t): (2,3,4) ''', post_parse=lambda s, r: (r[0], type(r[0]))) - prints: + prints:: 100 (100, ) @@ -127,8 +140,7 @@ def __new__(cls, contained=None): if not isinstance(contained, list): raise TypeError( - "{} may only be constructed with a list," - " not {}".format(cls.__name__, type(contained).__name__) + f"{cls.__name__} may only be constructed with a list, not {type(contained).__name__}" ) return list.__new__(cls) @@ -159,6 +171,7 @@ def __new__(cls, toklist=None, name=None, **kwargs): def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): + self._tokdict: Dict[str, _ParseResultsWithOffset] self._modal = modal if name is not None and name != "": if isinstance(name, int): @@ -210,7 +223,7 @@ def __setitem__(self, k, v, isinstance=isinstance): ] sub = v if isinstance(sub, ParseResults): - sub._parent = wkref(self) + sub._parent = self def __delitem__(self, i): if isinstance(i, (int, slice)): @@ -263,7 +276,7 @@ def haskeys(self) -> bool: """ Since ``keys()`` returns an iterator, this method is helpful in bypassing code that looks for the existence of any defined results names.""" - return bool(self._tokdict) + return not not self._tokdict def pop(self, *args, **kwargs): """ @@ -311,9 +324,7 @@ def remove_LABEL(tokens): if k == "default": args = (args[0], v) else: - raise TypeError( - "pop() got an unexpected keyword argument {!r}".format(k) - ) + raise TypeError(f"pop() got an unexpected keyword argument {k!r}") if isinstance(args[0], int) or len(args) == 1 or args[0] in self: index = args[0] ret = self[index] @@ -423,12 +434,15 @@ def __getattr__(self, name): raise AttributeError(name) return "" - def __add__(self, other) -> "ParseResults": + def __add__(self, other: "ParseResults") -> "ParseResults": ret = self.copy() ret += other return ret - def __iadd__(self, other) -> "ParseResults": + def __iadd__(self, other: "ParseResults") -> "ParseResults": + if not other: + return self + if other._tokdict: offset = len(self._toklist) addoffset = lambda a: offset if a < 0 else a + offset @@ -441,7 +455,7 @@ def __iadd__(self, other) -> "ParseResults": for k, v in otherdictitems: self[k] = v if isinstance(v[0], ParseResults): - v[0]._parent = wkref(self) + v[0]._parent = self self._toklist += other._toklist self._all_names |= other._all_names @@ -456,7 +470,7 @@ def __radd__(self, other) -> "ParseResults": return other + self def __repr__(self) -> str: - return "{}({!r}, {})".format(type(self).__name__, self._toklist, self.as_dict()) + return f"{type(self).__name__}({self._toklist!r}, {self.as_dict()})" def __str__(self) -> str: return ( @@ -532,7 +546,10 @@ def to_item(obj): def copy(self) -> "ParseResults": """ - Returns a new copy of a :class:`ParseResults` object. + Returns a new shallow copy of a :class:`ParseResults` object. `ParseResults` + items contained within the source are shared with the copy. Use + :class:`ParseResults.deepcopy()` to create a copy with its own separate + content values. """ ret = ParseResults(self._toklist) ret._tokdict = self._tokdict.copy() @@ -541,6 +558,27 @@ def copy(self) -> "ParseResults": ret._name = self._name return ret + def deepcopy(self) -> "ParseResults": + """ + Returns a new deep copy of a :class:`ParseResults` object. + """ + ret = self.copy() + # replace values with copies if they are of known mutable types + for i, obj in enumerate(self._toklist): + if isinstance(obj, ParseResults): + self._toklist[i] = obj.deepcopy() + elif isinstance(obj, (str, bytes)): + pass + elif isinstance(obj, MutableMapping): + self._toklist[i] = dest = type(obj)() + for k, v in obj.items(): + dest[k] = v.deepcopy() if isinstance(v, ParseResults) else v + elif isinstance(obj, Container): + self._toklist[i] = type(obj)( + v.deepcopy() if isinstance(v, ParseResults) else v for v in obj + ) + return ret + def get_name(self): r""" Returns the results name for this token expression. Useful when several @@ -569,20 +607,17 @@ def get_name(self): if self._name: return self._name elif self._parent: - par = self._parent() - - def find_in_parent(sub): - return next( - ( - k - for k, vlist in par._tokdict.items() - for v, loc in vlist - if sub is v - ), - None, - ) - - return find_in_parent(self) if par else None + par: "ParseResults" = self._parent + parent_tokdict_items = par._tokdict.items() + return next( + ( + k + for k, vlist in parent_tokdict_items + for v, loc in vlist + if v is self + ), + None, + ) elif ( len(self) == 1 and len(self._tokdict) == 1 @@ -623,7 +658,7 @@ def dump(self, indent="", full=True, include_list=True, _depth=0) -> str: for k, v in items: if out: out.append(NL) - out.append("{}{}- {}: ".format(indent, (" " * _depth), k)) + out.append(f"{indent}{(' ' * _depth)}- {k}: ") if isinstance(v, ParseResults): if v: out.append( @@ -685,7 +720,7 @@ def pprint(self, *args, **kwargs): num = Word(nums) func = Forward() term = ident | num | Group('(' + func + ')') - func <<= ident + Group(Optional(delimited_list(term))) + func <<= ident + Group(Optional(DelimitedList(term))) result = func.parse_string("fna a,b,(fnb c,d,200),100") result.pprint(width=40) @@ -705,7 +740,7 @@ def __getstate__(self): self._toklist, ( self._tokdict.copy(), - self._parent is not None and self._parent() or None, + None, self._all_names, self._name, ), @@ -714,10 +749,7 @@ def __getstate__(self): def __setstate__(self, state): self._toklist, (self._tokdict, par, inAccumNames, self._name) = state self._all_names = set(inAccumNames) - if par is not None: - self._parent = wkref(par) - else: - self._parent = None + self._parent = None def __getnewargs__(self): return self._toklist, self._name @@ -738,6 +770,7 @@ def is_iterable(obj): iter(obj) except Exception: return False + # str's are iterable, but in pyparsing, we don't want to iterate over them else: return not isinstance(obj, str_type) @@ -752,8 +785,11 @@ def is_iterable(obj): return ret asList = as_list + """Deprecated - use :class:`as_list`""" asDict = as_dict + """Deprecated - use :class:`as_dict`""" getName = get_name + """Deprecated - use :class:`get_name`""" MutableMapping.register(ParseResults) diff --git a/src/pip/_vendor/pyparsing/testing.py b/src/pip/_vendor/pyparsing/testing.py index 84a0ef17078..6a254c1c5e2 100644 --- a/src/pip/_vendor/pyparsing/testing.py +++ b/src/pip/_vendor/pyparsing/testing.py @@ -222,7 +222,7 @@ def assertRunTestResults( ) else: # warning here maybe? - print("no validation for {!r}".format(test_string)) + print(f"no validation for {test_string!r}") # do this last, in case some specific test results can be reported instead self.assertTrue( @@ -265,15 +265,18 @@ def with_line_numbers( if expand_tabs: s = s.expandtabs() if mark_control is not None: + mark_control = typing.cast(str, mark_control) if mark_control == "unicode": - tbl = str.maketrans( - {c: u for c, u in zip(range(0, 33), range(0x2400, 0x2433))} - | {127: 0x2421} - ) + transtable_map = { + c: u for c, u in zip(range(0, 33), range(0x2400, 0x2433)) + } + transtable_map[127] = 0x2421 + tbl = str.maketrans(transtable_map) eol_mark = "" else: + ord_mark_control = ord(mark_control) tbl = str.maketrans( - {c: mark_control for c in list(range(0, 32)) + [127]} + {c: ord_mark_control for c in list(range(0, 32)) + [127]} ) s = s.translate(tbl) if mark_spaces is not None and mark_spaces != " ": @@ -303,7 +306,7 @@ def with_line_numbers( header0 = ( lead + "".join( - "{}{}".format(" " * 99, (i + 1) % 100) + f"{' ' * 99}{(i + 1) % 100}" for i in range(max(max_line_len // 100, 1)) ) + "\n" @@ -313,10 +316,7 @@ def with_line_numbers( header1 = ( header0 + lead - + "".join( - " {}".format((i + 1) % 10) - for i in range(-(-max_line_len // 10)) - ) + + "".join(f" {(i + 1) % 10}" for i in range(-(-max_line_len // 10))) + "\n" ) header2 = lead + "1234567890" * (-(-max_line_len // 10)) + "\n" @@ -324,7 +324,7 @@ def with_line_numbers( header1 + header2 + "\n".join( - "{:{}d}:{}{}".format(i, lineno_width, line, eol_mark) + f"{i:{lineno_width}d}:{line}{eol_mark}" for i, line in enumerate(s_lines, start=start_line) ) + "\n" diff --git a/src/pip/_vendor/pyparsing/unicode.py b/src/pip/_vendor/pyparsing/unicode.py index 06526203911..ec0b3a4fe60 100644 --- a/src/pip/_vendor/pyparsing/unicode.py +++ b/src/pip/_vendor/pyparsing/unicode.py @@ -64,27 +64,27 @@ def _chars_for_ranges(cls): @_lazyclassproperty def printables(cls): - "all non-whitespace characters in this range" + """all non-whitespace characters in this range""" return "".join(filterfalse(str.isspace, cls._chars_for_ranges)) @_lazyclassproperty def alphas(cls): - "all alphabetic characters in this range" + """all alphabetic characters in this range""" return "".join(filter(str.isalpha, cls._chars_for_ranges)) @_lazyclassproperty def nums(cls): - "all numeric digit characters in this range" + """all numeric digit characters in this range""" return "".join(filter(str.isdigit, cls._chars_for_ranges)) @_lazyclassproperty def alphanums(cls): - "all alphanumeric characters in this range" + """all alphanumeric characters in this range""" return cls.alphas + cls.nums @_lazyclassproperty def identchars(cls): - "all characters in this range that are valid identifier characters, plus underscore '_'" + """all characters in this range that are valid identifier characters, plus underscore '_'""" return "".join( sorted( set( @@ -100,13 +100,13 @@ def identchars(cls): def identbodychars(cls): """ all characters in this range that are valid identifier body characters, - plus the digits 0-9 + plus the digits 0-9, and · (Unicode MIDDLE DOT) """ return "".join( sorted( set( cls.identchars - + "0123456789" + + "0123456789·" + "".join( [c for c in cls._chars_for_ranges if ("_" + c).isidentifier()] ) @@ -114,6 +114,16 @@ def identbodychars(cls): ) ) + @_lazyclassproperty + def identifier(cls): + """ + a pyparsing Word expression for an identifier using this range's definitions for + identchars and identbodychars + """ + from pip._vendor.pyparsing import Word + + return Word(cls.identchars, cls.identbodychars) + class pyparsing_unicode(unicode_set): """ @@ -128,32 +138,32 @@ class pyparsing_unicode(unicode_set): ] class BasicMultilingualPlane(unicode_set): - "Unicode set for the Basic Multilingual Plane" + """Unicode set for the Basic Multilingual Plane""" _ranges: UnicodeRangeList = [ (0x0020, 0xFFFF), ] class Latin1(unicode_set): - "Unicode set for Latin-1 Unicode Character Range" + """Unicode set for Latin-1 Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0020, 0x007E), (0x00A0, 0x00FF), ] class LatinA(unicode_set): - "Unicode set for Latin-A Unicode Character Range" + """Unicode set for Latin-A Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0100, 0x017F), ] class LatinB(unicode_set): - "Unicode set for Latin-B Unicode Character Range" + """Unicode set for Latin-B Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0180, 0x024F), ] class Greek(unicode_set): - "Unicode set for Greek Unicode Character Ranges" + """Unicode set for Greek Unicode Character Ranges""" _ranges: UnicodeRangeList = [ (0x0342, 0x0345), (0x0370, 0x0377), @@ -193,7 +203,7 @@ class Greek(unicode_set): ] class Cyrillic(unicode_set): - "Unicode set for Cyrillic Unicode Character Range" + """Unicode set for Cyrillic Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0400, 0x052F), (0x1C80, 0x1C88), @@ -206,7 +216,7 @@ class Cyrillic(unicode_set): ] class Chinese(unicode_set): - "Unicode set for Chinese Unicode Character Range" + """Unicode set for Chinese Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x2E80, 0x2E99), (0x2E9B, 0x2EF3), @@ -229,8 +239,7 @@ class Chinese(unicode_set): ] class Japanese(unicode_set): - "Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges" - _ranges: UnicodeRangeList = [] + """Unicode set for Japanese Unicode Character Range, combining Kanji, Hiragana, and Katakana ranges""" class Kanji(unicode_set): "Unicode set for Kanji Unicode Character Range" @@ -240,7 +249,7 @@ class Kanji(unicode_set): ] class Hiragana(unicode_set): - "Unicode set for Hiragana Unicode Character Range" + """Unicode set for Hiragana Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x3041, 0x3096), (0x3099, 0x30A0), @@ -252,7 +261,7 @@ class Hiragana(unicode_set): ] class Katakana(unicode_set): - "Unicode set for Katakana Unicode Character Range" + """Unicode set for Katakana Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x3099, 0x309C), (0x30A0, 0x30FF), @@ -265,8 +274,18 @@ class Katakana(unicode_set): (0x1F213,), ] + 漢字 = Kanji + カタカナ = Katakana + ひらがな = Hiragana + + _ranges = ( + Kanji._ranges + + Hiragana._ranges + + Katakana._ranges + ) + class Hangul(unicode_set): - "Unicode set for Hangul (Korean) Unicode Character Range" + """Unicode set for Hangul (Korean) Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x1100, 0x11FF), (0x302E, 0x302F), @@ -288,17 +307,17 @@ class Hangul(unicode_set): Korean = Hangul class CJK(Chinese, Japanese, Hangul): - "Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range" + """Unicode set for combined Chinese, Japanese, and Korean (CJK) Unicode Character Range""" class Thai(unicode_set): - "Unicode set for Thai Unicode Character Range" + """Unicode set for Thai Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0E01, 0x0E3A), (0x0E3F, 0x0E5B) ] class Arabic(unicode_set): - "Unicode set for Arabic Unicode Character Range" + """Unicode set for Arabic Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0600, 0x061B), (0x061E, 0x06FF), @@ -306,7 +325,7 @@ class Arabic(unicode_set): ] class Hebrew(unicode_set): - "Unicode set for Hebrew Unicode Character Range" + """Unicode set for Hebrew Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0591, 0x05C7), (0x05D0, 0x05EA), @@ -320,33 +339,23 @@ class Hebrew(unicode_set): ] class Devanagari(unicode_set): - "Unicode set for Devanagari Unicode Character Range" + """Unicode set for Devanagari Unicode Character Range""" _ranges: UnicodeRangeList = [ (0x0900, 0x097F), (0xA8E0, 0xA8FF) ] - # fmt: on + BMP = BasicMultilingualPlane + # add language identifiers using language Unicode + العربية = Arabic + 中文 = Chinese + кириллица = Cyrillic + Ελληνικά = Greek + עִברִית = Hebrew + 日本語 = Japanese + 한국어 = Korean + ไทย = Thai + देवनागरी = Devanagari -pyparsing_unicode.Japanese._ranges = ( - pyparsing_unicode.Japanese.Kanji._ranges - + pyparsing_unicode.Japanese.Hiragana._ranges - + pyparsing_unicode.Japanese.Katakana._ranges -) - -pyparsing_unicode.BMP = pyparsing_unicode.BasicMultilingualPlane - -# add language identifiers using language Unicode -pyparsing_unicode.العربية = pyparsing_unicode.Arabic -pyparsing_unicode.中文 = pyparsing_unicode.Chinese -pyparsing_unicode.кириллица = pyparsing_unicode.Cyrillic -pyparsing_unicode.Ελληνικά = pyparsing_unicode.Greek -pyparsing_unicode.עִברִית = pyparsing_unicode.Hebrew -pyparsing_unicode.日本語 = pyparsing_unicode.Japanese -pyparsing_unicode.Japanese.漢字 = pyparsing_unicode.Japanese.Kanji -pyparsing_unicode.Japanese.カタカナ = pyparsing_unicode.Japanese.Katakana -pyparsing_unicode.Japanese.ひらがな = pyparsing_unicode.Japanese.Hiragana -pyparsing_unicode.한국어 = pyparsing_unicode.Korean -pyparsing_unicode.ไทย = pyparsing_unicode.Thai -pyparsing_unicode.देवनागरी = pyparsing_unicode.Devanagari + # fmt: on diff --git a/src/pip/_vendor/pyparsing/util.py b/src/pip/_vendor/pyparsing/util.py index 34ce092c6d0..d8d3f414cca 100644 --- a/src/pip/_vendor/pyparsing/util.py +++ b/src/pip/_vendor/pyparsing/util.py @@ -1,12 +1,14 @@ # util.py +import inspect import warnings import types import collections import itertools -from functools import lru_cache -from typing import List, Union, Iterable +from functools import lru_cache, wraps +from typing import Callable, List, Union, Iterable, TypeVar, cast _bslash = chr(92) +C = TypeVar("C", bound=Callable) class __config_flags: @@ -20,18 +22,15 @@ class __config_flags: def _set(cls, dname, value): if dname in cls._fixed_names: warnings.warn( - "{}.{} {} is {} and cannot be overridden".format( - cls.__name__, - dname, - cls._type_desc, - str(getattr(cls, dname)).upper(), - ) + f"{cls.__name__}.{dname} {cls._type_desc} is {str(getattr(cls, dname)).upper()}" + f" and cannot be overridden", + stacklevel=3, ) return if dname in cls._all_names: setattr(cls, dname, value) else: - raise ValueError("no such {} {!r}".format(cls._type_desc, dname)) + raise ValueError(f"no such {cls._type_desc} {dname!r}") enable = classmethod(lambda cls, name: cls._set(name, True)) disable = classmethod(lambda cls, name: cls._set(name, False)) @@ -45,7 +44,7 @@ def col(loc: int, strg: str) -> int: Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See - :class:`ParserElement.parseString` for more + :class:`ParserElement.parse_string` for more information on parsing strings containing ```` s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. @@ -60,7 +59,7 @@ def lineno(loc: int, strg: str) -> int: The first line is number 1. Note - the default parsing behavior is to expand tabs in the input string - before starting the parsing process. See :class:`ParserElement.parseString` + before starting the parsing process. See :class:`ParserElement.parse_string` for more information on parsing strings containing ```` s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. @@ -102,19 +101,24 @@ def clear(_): class _FifoCache: def __init__(self, size): self.not_in_cache = not_in_cache = object() - cache = collections.OrderedDict() + cache = {} + keyring = [object()] * size cache_get = cache.get + cache_pop = cache.pop + keyiter = itertools.cycle(range(size)) def get(_, key): return cache_get(key, not_in_cache) def set_(_, key, value): cache[key] = value - while len(cache) > size: - cache.popitem(last=False) + i = next(keyiter) + cache_pop(keyring[i], None) + keyring[i] = key def clear(_): cache.clear() + keyring[:] = [object()] * size self.size = size self.get = types.MethodType(get, self) @@ -189,9 +193,9 @@ def is_consecutive(c): is_consecutive.value = next(is_consecutive.counter) return is_consecutive.value - is_consecutive.prev = 0 - is_consecutive.counter = itertools.count() - is_consecutive.value = -1 + is_consecutive.prev = 0 # type: ignore [attr-defined] + is_consecutive.counter = itertools.count() # type: ignore [attr-defined] + is_consecutive.value = -1 # type: ignore [attr-defined] def escape_re_range_char(c): return "\\" + c if c in r"\^-][" else c @@ -215,9 +219,7 @@ def no_escape_re_range_char(c): else: sep = "" if ord(last) == ord(first) + 1 else "-" ret.append( - "{}{}{}".format( - escape_re_range_char(first), sep, escape_re_range_char(last) - ) + f"{escape_re_range_char(first)}{sep}{escape_re_range_char(last)}" ) else: ret = [escape_re_range_char(c) for c in s] @@ -233,3 +235,50 @@ def _flatten(ll: list) -> list: else: ret.append(i) return ret + + +def _make_synonym_function(compat_name: str, fn: C) -> C: + # In a future version, uncomment the code in the internal _inner() functions + # to begin emitting DeprecationWarnings. + + # Unwrap staticmethod/classmethod + fn = getattr(fn, "__func__", fn) + + # (Presence of 'self' arg in signature is used by explain_exception() methods, so we take + # some extra steps to add it if present in decorated function.) + if "self" == list(inspect.signature(fn).parameters)[0]: + + @wraps(fn) + def _inner(self, *args, **kwargs): + # warnings.warn( + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # ) + return fn(self, *args, **kwargs) + + else: + + @wraps(fn) + def _inner(*args, **kwargs): + # warnings.warn( + # f"Deprecated - use {fn.__name__}", DeprecationWarning, stacklevel=3 + # ) + return fn(*args, **kwargs) + + _inner.__doc__ = f"""Deprecated - use :class:`{fn.__name__}`""" + _inner.__name__ = compat_name + _inner.__annotations__ = fn.__annotations__ + if isinstance(fn, types.FunctionType): + _inner.__kwdefaults__ = fn.__kwdefaults__ + elif isinstance(fn, type) and hasattr(fn, "__init__"): + _inner.__kwdefaults__ = fn.__init__.__kwdefaults__ + else: + _inner.__kwdefaults__ = None + _inner.__qualname__ = fn.__qualname__ + return cast(C, _inner) + + +def replaced_by_pep8(fn: C) -> Callable[[Callable], C]: + """ + Decorator for pre-PEP8 compatibility synonyms, to link them to the new function. + """ + return lambda other: _make_synonym_function(other.__name__, fn) diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 07671fb58af..02307f1a3c3 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -5,7 +5,7 @@ distro==1.8.0 msgpack==1.0.5 packaging==21.3 platformdirs==3.8.1 -pyparsing==3.0.9 +pyparsing==3.1.0 pyproject-hooks==1.0.0 requests==2.31.0 certifi==2022.12.7 From 50af834c1e9abb76b97e0c7284cb1aa6f0e8202b Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 9 Jul 2023 21:31:12 +0100 Subject: [PATCH 574/730] Upgrade certifi to 2023.5.7 --- news/certifi.vendor.rst | 1 + src/pip/_vendor/certifi/__init__.py | 2 +- src/pip/_vendor/certifi/cacert.pem | 62 +++++++++++++++++++++++++++++ src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 65 insertions(+), 2 deletions(-) create mode 100644 news/certifi.vendor.rst diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst new file mode 100644 index 00000000000..68018f44a54 --- /dev/null +++ b/news/certifi.vendor.rst @@ -0,0 +1 @@ +Upgrade certifi to 2023.5.7 diff --git a/src/pip/_vendor/certifi/__init__.py b/src/pip/_vendor/certifi/__init__.py index a3546f12555..705f416d6b0 100644 --- a/src/pip/_vendor/certifi/__init__.py +++ b/src/pip/_vendor/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2022.12.07" +__version__ = "2023.05.07" diff --git a/src/pip/_vendor/certifi/cacert.pem b/src/pip/_vendor/certifi/cacert.pem index df9e4e3c755..5183934bb75 100644 --- a/src/pip/_vendor/certifi/cacert.pem +++ b/src/pip/_vendor/certifi/cacert.pem @@ -4525,3 +4525,65 @@ BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu 9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= -----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA1" +# Serial: 113562791157148395269083148143378328608 +# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 +# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a +# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae +-----BEGIN CERTIFICATE----- +MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU +MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI +T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz +MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF +SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh +bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z +xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ +spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 +58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR +at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll +5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq +nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK +V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ +pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO +z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn +jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ +WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF +7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 +YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli +awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u ++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 +X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN +SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo +P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI ++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz +znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 +eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 +YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy +r/6zcCwupvI= +-----END CERTIFICATE----- + +# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY +# Label: "BJCA Global Root CA2" +# Serial: 58605626836079930195615843123109055211 +# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c +# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 +# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 +-----BEGIN CERTIFICATE----- +MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw +CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ +VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy +MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ +TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS +b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B +IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ ++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK +sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD +AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA +94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B +43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== +-----END CERTIFICATE----- diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 02307f1a3c3..3a2567a8cb5 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -8,7 +8,7 @@ platformdirs==3.8.1 pyparsing==3.1.0 pyproject-hooks==1.0.0 requests==2.31.0 - certifi==2022.12.7 + certifi==2023.5.7 chardet==5.1.0 idna==3.4 urllib3==1.26.15 From 6c33bbf8fb09bcae2b17ec57a4dfe57efe0ac9fc Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 9 Jul 2023 21:31:45 +0100 Subject: [PATCH 575/730] Upgrade rich to 13.4.2 --- news/rich.vendor.rst | 1 + src/pip/_vendor/rich/console.py | 2 +- src/pip/_vendor/rich/syntax.py | 4 +--- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 4 insertions(+), 5 deletions(-) create mode 100644 news/rich.vendor.rst diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst new file mode 100644 index 00000000000..9f44d8bdfcd --- /dev/null +++ b/news/rich.vendor.rst @@ -0,0 +1 @@ +Upgrade rich to 13.4.2 diff --git a/src/pip/_vendor/rich/console.py b/src/pip/_vendor/rich/console.py index 7c363dfdc5e..e559cbb43c1 100644 --- a/src/pip/_vendor/rich/console.py +++ b/src/pip/_vendor/rich/console.py @@ -952,6 +952,7 @@ def is_terminal(self) -> bool: force_color = self._environ.get("FORCE_COLOR") if force_color is not None: self._force_terminal = True + return True isatty: Optional[Callable[[], bool]] = getattr(self.file, "isatty", None) try: @@ -2000,7 +2001,6 @@ def _check_buffer(self) -> None: self._record_buffer.extend(self._buffer[:]) if self._buffer_index == 0: - if self.is_jupyter: # pragma: no cover from .jupyter import display diff --git a/src/pip/_vendor/rich/syntax.py b/src/pip/_vendor/rich/syntax.py index 25b226a3a98..57033766483 100644 --- a/src/pip/_vendor/rich/syntax.py +++ b/src/pip/_vendor/rich/syntax.py @@ -590,7 +590,6 @@ def _get_number_styles(self, console: Console) -> Tuple[Style, Style, Style]: def __rich_measure__( self, console: "Console", options: "ConsoleOptions" ) -> "Measurement": - _, right, _, left = Padding.unpack(self.padding) padding = left + right if self.code_width is not None: @@ -688,7 +687,7 @@ def _get_syntax( lines = ( Text("\n") .join(lines) - .with_indent_guides(self.tab_size, style=style) + .with_indent_guides(self.tab_size, style=style + Style(italic=False)) .split("\n", allow_blank=True) ) @@ -830,7 +829,6 @@ def _get_code_index_for_syntax_position( if __name__ == "__main__": # pragma: no cover - import argparse import sys diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 3a2567a8cb5..f3e9d2fd3e5 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -12,7 +12,7 @@ requests==2.31.0 chardet==5.1.0 idna==3.4 urllib3==1.26.15 -rich==13.3.3 +rich==13.4.2 pygments==2.14.0 typing_extensions==4.6.0 resolvelib==1.0.1 From b6d519f775014c1ba84691c448c4f0c43db7b59f Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 9 Jul 2023 21:32:17 +0100 Subject: [PATCH 576/730] Upgrade pygments to 2.15.1 --- news/pygments.vendor.rst | 1 + src/pip/_vendor/pygments/__init__.py | 24 +-- src/pip/_vendor/pygments/__main__.py | 2 +- src/pip/_vendor/pygments/cmdline.py | 4 +- src/pip/_vendor/pygments/console.py | 2 +- src/pip/_vendor/pygments/filter.py | 2 +- src/pip/_vendor/pygments/filters/__init__.py | 2 +- src/pip/_vendor/pygments/formatter.py | 52 +++++-- .../_vendor/pygments/formatters/__init__.py | 54 ++++--- .../_vendor/pygments/formatters/_mapping.py | 4 +- src/pip/_vendor/pygments/formatters/bbcode.py | 2 +- src/pip/_vendor/pygments/formatters/groff.py | 4 +- src/pip/_vendor/pygments/formatters/html.py | 26 ++-- src/pip/_vendor/pygments/formatters/img.py | 2 +- src/pip/_vendor/pygments/formatters/irc.py | 2 +- src/pip/_vendor/pygments/formatters/latex.py | 2 +- src/pip/_vendor/pygments/formatters/other.py | 2 +- .../pygments/formatters/pangomarkup.py | 2 +- src/pip/_vendor/pygments/formatters/rtf.py | 2 +- src/pip/_vendor/pygments/formatters/svg.py | 2 +- .../_vendor/pygments/formatters/terminal.py | 2 +- .../pygments/formatters/terminal256.py | 2 +- src/pip/_vendor/pygments/lexer.py | 106 ++++++++++--- src/pip/_vendor/pygments/lexers/__init__.py | 90 +++++++---- src/pip/_vendor/pygments/lexers/_mapping.py | 10 +- src/pip/_vendor/pygments/lexers/python.py | 146 +++++++++--------- src/pip/_vendor/pygments/modeline.py | 2 +- src/pip/_vendor/pygments/plugin.py | 2 +- src/pip/_vendor/pygments/regexopt.py | 2 +- src/pip/_vendor/pygments/scanner.py | 2 +- src/pip/_vendor/pygments/sphinxext.py | 2 +- src/pip/_vendor/pygments/style.py | 2 +- src/pip/_vendor/pygments/styles/__init__.py | 16 +- src/pip/_vendor/pygments/token.py | 2 +- src/pip/_vendor/pygments/unistring.py | 6 +- src/pip/_vendor/pygments/util.py | 30 +++- src/pip/_vendor/vendor.txt | 2 +- 37 files changed, 389 insertions(+), 228 deletions(-) create mode 100644 news/pygments.vendor.rst diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst new file mode 100644 index 00000000000..b8aa9e56358 --- /dev/null +++ b/news/pygments.vendor.rst @@ -0,0 +1 @@ +Upgrade pygments to 2.15.1 diff --git a/src/pip/_vendor/pygments/__init__.py b/src/pip/_vendor/pygments/__init__.py index d9b0a8dea2e..39c84aae5d8 100644 --- a/src/pip/_vendor/pygments/__init__.py +++ b/src/pip/_vendor/pygments/__init__.py @@ -21,12 +21,12 @@ .. _Pygments master branch: https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from io import StringIO, BytesIO -__version__ = '2.14.0' +__version__ = '2.15.1' __docformat__ = 'restructuredtext' __all__ = ['lex', 'format', 'highlight'] @@ -34,7 +34,9 @@ def lex(code, lexer): """ - Lex ``code`` with ``lexer`` and return an iterable of tokens. + Lex `code` with the `lexer` (must be a `Lexer` instance) + and return an iterable of tokens. Currently, this only calls + `lexer.get_tokens()`. """ try: return lexer.get_tokens(code) @@ -49,11 +51,12 @@ def lex(code, lexer): def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin """ - Format a tokenlist ``tokens`` with the formatter ``formatter``. + Format ``tokens`` (an iterable of tokens) with the formatter ``formatter`` + (a `Formatter` instance). - If ``outfile`` is given and a valid file object (an object - with a ``write`` method), the result will be written to it, otherwise - it is returned as a string. + If ``outfile`` is given and a valid file object (an object with a + ``write`` method), the result will be written to it, otherwise it + is returned as a string. """ try: if not outfile: @@ -73,10 +76,7 @@ def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builti def highlight(code, lexer, formatter, outfile=None): """ - Lex ``code`` with ``lexer`` and format it with the formatter ``formatter``. - - If ``outfile`` is given and a valid file object (an object - with a ``write`` method), the result will be written to it, otherwise - it is returned as a string. + This is the most high-level highlighting function. It combines `lex` and + `format` in one function. """ return format(lex(code, lexer), formatter, outfile) diff --git a/src/pip/_vendor/pygments/__main__.py b/src/pip/_vendor/pygments/__main__.py index 90cafd93426..2f7f8cbad05 100644 --- a/src/pip/_vendor/pygments/__main__.py +++ b/src/pip/_vendor/pygments/__main__.py @@ -4,7 +4,7 @@ Main entry point for ``python -m pygments``. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/cmdline.py b/src/pip/_vendor/pygments/cmdline.py index de73b06b4cf..eec1775ba5f 100644 --- a/src/pip/_vendor/pygments/cmdline.py +++ b/src/pip/_vendor/pygments/cmdline.py @@ -4,7 +4,7 @@ Command line interface. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -185,7 +185,7 @@ def main_inner(parser, argns): return 0 if argns.V: - print('Pygments version %s, (c) 2006-2022 by Georg Brandl, Matthäus ' + print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus ' 'Chajdas and contributors.' % __version__) return 0 diff --git a/src/pip/_vendor/pygments/console.py b/src/pip/_vendor/pygments/console.py index 2ada68e03b3..deb4937f74f 100644 --- a/src/pip/_vendor/pygments/console.py +++ b/src/pip/_vendor/pygments/console.py @@ -4,7 +4,7 @@ Format colored console output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/filter.py b/src/pip/_vendor/pygments/filter.py index e5c96649382..dafa08d1569 100644 --- a/src/pip/_vendor/pygments/filter.py +++ b/src/pip/_vendor/pygments/filter.py @@ -4,7 +4,7 @@ Module that implements the default filter. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/filters/__init__.py b/src/pip/_vendor/pygments/filters/__init__.py index c302a6c0c53..5aa9ecbb80c 100644 --- a/src/pip/_vendor/pygments/filters/__init__.py +++ b/src/pip/_vendor/pygments/filters/__init__.py @@ -5,7 +5,7 @@ Module containing filter lookup functions and default filters. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatter.py b/src/pip/_vendor/pygments/formatter.py index a2349ef8652..3ca4892fa31 100644 --- a/src/pip/_vendor/pygments/formatter.py +++ b/src/pip/_vendor/pygments/formatter.py @@ -4,7 +4,7 @@ Base formatter class. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -26,7 +26,21 @@ class Formatter: """ Converts a token stream to text. - Options accepted: + Formatters should have attributes to help selecting them. These + are similar to the corresponding :class:`~pygments.lexer.Lexer` + attributes. + + .. autoattribute:: name + :no-value: + + .. autoattribute:: aliases + :no-value: + + .. autoattribute:: filenames + :no-value: + + You can pass options as keyword arguments to the constructor. + All formatters accept these basic options: ``style`` The style to use, can be a string or a Style subclass @@ -47,15 +61,19 @@ class Formatter: support (default: None). ``outencoding`` Overrides ``encoding`` if given. + """ - #: Name of the formatter + #: Full name for the formatter, in human-readable form. name = None - #: Shortcuts for the formatter + #: A list of short, unique identifiers that can be used to lookup + #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. aliases = [] - #: fn match rules + #: A list of fnmatch patterns that match filenames for which this + #: formatter can produce output. The patterns in this list should be unique + #: among all formatters. filenames = [] #: If True, this formatter outputs Unicode strings when no encoding @@ -63,6 +81,11 @@ class Formatter: unicodeoutput = True def __init__(self, **options): + """ + As with lexers, this constructor takes arbitrary optional arguments, + and if you override it, you should first process your own options, then + call the base class implementation. + """ self.style = _lookup_style(options.get('style', 'default')) self.full = get_bool_opt(options, 'full', False) self.title = options.get('title', '') @@ -75,18 +98,25 @@ def __init__(self, **options): def get_style_defs(self, arg=''): """ - Return the style definitions for the current style as a string. + This method must return statements or declarations suitable to define + the current style for subsequent highlighted text (e.g. CSS classes + in the `HTMLFormatter`). - ``arg`` is an additional argument whose meaning depends on the - formatter used. Note that ``arg`` can also be a list or tuple - for some formatters like the html formatter. + The optional argument `arg` can be used to modify the generation and + is formatter dependent (it is standardized because it can be given on + the command line). + + This method is called by the ``-S`` :doc:`command-line option `, + the `arg` is then given by the ``-a`` option. """ return '' def format(self, tokensource, outfile): """ - Format ``tokensource``, an iterable of ``(tokentype, tokenstring)`` - tuples and write it into ``outfile``. + This method must format the tokens from the `tokensource` iterable and + write the formatted version to the file object `outfile`. + + Formatter options can control how exactly the tokens are converted. """ if self.encoding: # wrap the outfile in a StreamWriter diff --git a/src/pip/_vendor/pygments/formatters/__init__.py b/src/pip/_vendor/pygments/formatters/__init__.py index 7ecf7eee35f..39db84262d8 100644 --- a/src/pip/_vendor/pygments/formatters/__init__.py +++ b/src/pip/_vendor/pygments/formatters/__init__.py @@ -4,13 +4,14 @@ Pygments formatters. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ +import re import sys import types -from fnmatch import fnmatch +import fnmatch from os.path import basename from pip._vendor.pygments.formatters._mapping import FORMATTERS @@ -21,6 +22,16 @@ 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) _formatter_cache = {} # classes by name +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + def _load_formatters(module_name): """Load a formatter (and all others in the module too).""" @@ -57,9 +68,12 @@ def find_formatter_class(alias): def get_formatter_by_name(_alias, **options): - """Lookup and instantiate a formatter by alias. + """ + Return an instance of a :class:`.Formatter` subclass that has `alias` in its + aliases list. The formatter is given the `options` at its instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that + alias is found. """ cls = find_formatter_class(_alias) if cls is None: @@ -67,19 +81,18 @@ def get_formatter_by_name(_alias, **options): return cls(**options) -def load_formatter_from_file(filename, formattername="CustomFormatter", - **options): - """Load a formatter from a file. - - This method expects a file located relative to the current working - directory, which contains a class named CustomFormatter. By default, - it expects the Formatter to be named CustomFormatter; you can specify - your own class name as the second argument to this function. +def load_formatter_from_file(filename, formattername="CustomFormatter", **options): + """ + Return a `Formatter` subclass instance loaded from the provided file, relative + to the current directory. - Users should be very careful with the input, because this method - is equivalent to running eval on the input file. + The file is expected to contain a Formatter class named ``formattername`` + (by default, CustomFormatter). Users should be very careful with the input, because + this method is equivalent to running ``eval()`` on the input file. The formatter is + given the `options` at its instantiation. - Raises ClassNotFound if there are any problems importing the Formatter. + :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading + the formatter. .. versionadded:: 2.2 """ @@ -104,20 +117,23 @@ def load_formatter_from_file(filename, formattername="CustomFormatter", def get_formatter_for_filename(fn, **options): - """Lookup and instantiate a formatter by filename pattern. + """ + Return a :class:`.Formatter` subclass instance that has a filename pattern + matching `fn`. The formatter is given the `options` at its instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename + is found. """ fn = basename(fn) for modname, name, _, filenames, _ in FORMATTERS.values(): for filename in filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): if name not in _formatter_cache: _load_formatters(modname) return _formatter_cache[name](**options) for cls in find_plugin_formatters(): for filename in cls.filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): return cls(**options) raise ClassNotFound("no formatter found for file name %r" % fn) diff --git a/src/pip/_vendor/pygments/formatters/_mapping.py b/src/pip/_vendor/pygments/formatters/_mapping.py index 6e34f960784..72ca84040b6 100644 --- a/src/pip/_vendor/pygments/formatters/_mapping.py +++ b/src/pip/_vendor/pygments/formatters/_mapping.py @@ -1,12 +1,12 @@ # Automatically generated by scripts/gen_mapfiles.py. -# DO NOT EDIT BY HAND; run `make mapfiles` instead. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. FORMATTERS = { 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'), - 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags within a ``
`` tag, wrapped in a ``
`` tag. The ``
``'s CSS class can be set by the `cssclass` option."), + 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags. By default, the content is enclosed in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). The ``
``'s CSS class can be set by the `cssclass` option."), 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'), 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), diff --git a/src/pip/_vendor/pygments/formatters/bbcode.py b/src/pip/_vendor/pygments/formatters/bbcode.py index 2be2b4e3129..c4db8f4ef21 100644 --- a/src/pip/_vendor/pygments/formatters/bbcode.py +++ b/src/pip/_vendor/pygments/formatters/bbcode.py @@ -4,7 +4,7 @@ BBcode formatter. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/groff.py b/src/pip/_vendor/pygments/formatters/groff.py index f3dcbce9b9f..30a528e668f 100644 --- a/src/pip/_vendor/pygments/formatters/groff.py +++ b/src/pip/_vendor/pygments/formatters/groff.py @@ -4,7 +4,7 @@ Formatter for groff output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -84,7 +84,7 @@ def _define_colors(self, outfile): if ndef['color'] is not None: colors.add(ndef['color']) - for color in colors: + for color in sorted(colors): outfile.write('.defcolor ' + color + ' rgb #' + color + '\n') diff --git a/src/pip/_vendor/pygments/formatters/html.py b/src/pip/_vendor/pygments/formatters/html.py index f22b200c0e6..931d7c3fe29 100644 --- a/src/pip/_vendor/pygments/formatters/html.py +++ b/src/pip/_vendor/pygments/formatters/html.py @@ -4,7 +4,7 @@ Formatter for HTML output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -62,7 +62,7 @@ def _get_ttype_class(ttype): CSSFILE_TEMPLATE = '''\ /* generated by Pygments -Copyright 2006-2022 by the Pygments team. +Copyright 2006-2023 by the Pygments team. Licensed under the BSD license, see LICENSE for details. */ %(styledefs)s @@ -73,7 +73,7 @@ def _get_ttype_class(ttype): "http://www.w3.org/TR/html4/strict.dtd"> @@ -112,9 +112,9 @@ def _get_ttype_class(ttype): class HtmlFormatter(Formatter): r""" - Format tokens as HTML 4 ```` tags within a ``
`` tag, wrapped
-    in a ``
`` tag. The ``
``'s CSS class can be set by the `cssclass` - option. + Format tokens as HTML 4 ```` tags. By default, the content is enclosed + in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). + The ``
``'s CSS class can be set by the `cssclass` option. If the `linenos` option is set to ``"table"``, the ``
`` is
     additionally wrapped inside a ```` which has one row and two
@@ -140,8 +140,6 @@ class HtmlFormatter(Formatter):
 
     (whitespace added to improve clarity).
 
-    Wrapping can be disabled using the `nowrap` option.
-
     A list of lines can be specified using the `hl_lines` option to make these
     lines highlighted (as of Pygments 0.11).
 
@@ -187,8 +185,8 @@ class HtmlFormatter(Formatter):
     Additional options accepted:
 
     `nowrap`
-        If set to ``True``, don't wrap the tokens at all, not even inside a ``
``
-        tag. This disables most other options (default: ``False``).
+        If set to ``True``, don't add a ``
`` and a ``
`` tag + around the tokens. This disables most other options (default: ``False``). `full` Tells the formatter to output a "full" document, i.e. a complete @@ -635,7 +633,7 @@ def _wrap_full(self, inner, outfile): # write CSS file only if noclobber_cssfile isn't given as an option. try: if not os.path.exists(cssfilename) or not self.noclobber_cssfile: - with open(cssfilename, "w") as cf: + with open(cssfilename, "w", encoding="utf-8") as cf: cf.write(CSSFILE_TEMPLATE % {'styledefs': self.get_style_defs('body')}) except OSError as err: @@ -721,7 +719,7 @@ def _wrap_tablelinenos(self, inner): yield 0, dummyoutfile.getvalue() yield 0, '
' yield 0, '
' - + def _wrap_inlinelinenos(self, inner): # need a list of lines since we need the width of a single number :( @@ -946,9 +944,9 @@ def wrap(self, source): output = source if self.wrapcode: output = self._wrap_code(output) - + output = self._wrap_pre(output) - + return output def format_unencoded(self, tokensource, outfile): diff --git a/src/pip/_vendor/pygments/formatters/img.py b/src/pip/_vendor/pygments/formatters/img.py index 0f36a32ba33..a338c1588fd 100644 --- a/src/pip/_vendor/pygments/formatters/img.py +++ b/src/pip/_vendor/pygments/formatters/img.py @@ -4,7 +4,7 @@ Formatter for Pixmap output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/irc.py b/src/pip/_vendor/pygments/formatters/irc.py index 53e19b83d1e..2144d439e0f 100644 --- a/src/pip/_vendor/pygments/formatters/irc.py +++ b/src/pip/_vendor/pygments/formatters/irc.py @@ -4,7 +4,7 @@ Formatter for IRC output - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/latex.py b/src/pip/_vendor/pygments/formatters/latex.py index 4a7375a5ceb..ca539b40f6a 100644 --- a/src/pip/_vendor/pygments/formatters/latex.py +++ b/src/pip/_vendor/pygments/formatters/latex.py @@ -4,7 +4,7 @@ Formatter for LaTeX fancyvrb output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/other.py b/src/pip/_vendor/pygments/formatters/other.py index 1e39cd42a8c..990ead48021 100644 --- a/src/pip/_vendor/pygments/formatters/other.py +++ b/src/pip/_vendor/pygments/formatters/other.py @@ -4,7 +4,7 @@ Other formatters: NullFormatter, RawTokenFormatter. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/pangomarkup.py b/src/pip/_vendor/pygments/formatters/pangomarkup.py index bd00866b8b9..6bb325d0788 100644 --- a/src/pip/_vendor/pygments/formatters/pangomarkup.py +++ b/src/pip/_vendor/pygments/formatters/pangomarkup.py @@ -4,7 +4,7 @@ Formatter for Pango markup output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/rtf.py b/src/pip/_vendor/pygments/formatters/rtf.py index 4114d1688c3..125189c6fa5 100644 --- a/src/pip/_vendor/pygments/formatters/rtf.py +++ b/src/pip/_vendor/pygments/formatters/rtf.py @@ -4,7 +4,7 @@ A formatter that generates RTF files. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/svg.py b/src/pip/_vendor/pygments/formatters/svg.py index 075150a4b58..a8727ed8592 100644 --- a/src/pip/_vendor/pygments/formatters/svg.py +++ b/src/pip/_vendor/pygments/formatters/svg.py @@ -4,7 +4,7 @@ Formatter for SVG output. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/terminal.py b/src/pip/_vendor/pygments/formatters/terminal.py index e0bda16a236..abb8770811f 100644 --- a/src/pip/_vendor/pygments/formatters/terminal.py +++ b/src/pip/_vendor/pygments/formatters/terminal.py @@ -4,7 +4,7 @@ Formatter for terminal output with ANSI sequences. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/formatters/terminal256.py b/src/pip/_vendor/pygments/formatters/terminal256.py index 201b3c32832..0cfe5d1612e 100644 --- a/src/pip/_vendor/pygments/formatters/terminal256.py +++ b/src/pip/_vendor/pygments/formatters/terminal256.py @@ -10,7 +10,7 @@ Formatter version 1. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/lexer.py b/src/pip/_vendor/pygments/lexer.py index 74ab9b9088f..eb2c1b46b69 100644 --- a/src/pip/_vendor/pygments/lexer.py +++ b/src/pip/_vendor/pygments/lexer.py @@ -4,7 +4,7 @@ Base lexer classes. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -50,7 +50,31 @@ class Lexer(metaclass=LexerMeta): """ Lexer for a specific language. - Basic options recognized: + See also :doc:`lexerdevelopment`, a high-level guide to writing + lexers. + + Lexer classes have attributes used for choosing the most appropriate + lexer based on various criteria. + + .. autoattribute:: name + :no-value: + .. autoattribute:: aliases + :no-value: + .. autoattribute:: filenames + :no-value: + .. autoattribute:: alias_filenames + .. autoattribute:: mimetypes + :no-value: + .. autoattribute:: priority + + Lexers included in Pygments should have an additional attribute: + + .. autoattribute:: url + :no-value: + + You can pass options to the constructor. The basic options recognized + by all lexers and processed by the base `Lexer` class are: + ``stripnl`` Strip leading and trailing newlines from the input (default: True). ``stripall`` @@ -74,28 +98,55 @@ class Lexer(metaclass=LexerMeta): Overrides the ``encoding`` if given. """ - #: Name of the lexer + #: Full name of the lexer, in human-readable form name = None - #: URL of the language specification/definition - url = None - - #: Shortcuts for the lexer + #: A list of short, unique identifiers that can be used to look + #: up the lexer from a list, e.g., using `get_lexer_by_name()`. aliases = [] - #: File name globs + #: A list of `fnmatch` patterns that match filenames which contain + #: content for this lexer. The patterns in this list should be unique among + #: all lexers. filenames = [] - #: Secondary file name globs + #: A list of `fnmatch` patterns that match filenames which may or may not + #: contain content for this lexer. This list is used by the + #: :func:`.guess_lexer_for_filename()` function, to determine which lexers + #: are then included in guessing the correct one. That means that + #: e.g. every lexer for HTML and a template language should include + #: ``\*.html`` in this list. alias_filenames = [] - #: MIME types + #: A list of MIME types for content that can be lexed with this lexer. mimetypes = [] #: Priority, should multiple lexers match and no content is provided priority = 0 + #: URL of the language specification/definition. Used in the Pygments + #: documentation. + url = None + def __init__(self, **options): + """ + This constructor takes arbitrary options as keyword arguments. + Every subclass must first process its own options and then call + the `Lexer` constructor, since it processes the basic + options like `stripnl`. + + An example looks like this: + + .. sourcecode:: python + + def __init__(self, **options): + self.compress = options.get('compress', '') + Lexer.__init__(self, **options) + + As these options must all be specifiable as strings (due to the + command line usage), there are various utility functions + available to help with that, see `Utilities`_. + """ self.options = options self.stripnl = get_bool_opt(options, 'stripnl', True) self.stripall = get_bool_opt(options, 'stripall', False) @@ -124,10 +175,13 @@ def add_filter(self, filter_, **options): def analyse_text(text): """ - Has to return a float between ``0`` and ``1`` that indicates - if a lexer wants to highlight this text. Used by ``guess_lexer``. - If this method returns ``0`` it won't highlight it in any case, if - it returns ``1`` highlighting with this lexer is guaranteed. + A static method which is called for lexer guessing. + + It should analyse the text and return a float in the range + from ``0.0`` to ``1.0``. If it returns ``0.0``, the lexer + will not be selected as the most probable one, if it returns + ``1.0``, it will be selected immediately. This is used by + `guess_lexer`. The `LexerMeta` metaclass automatically wraps this function so that it works like a static method (no ``self`` or ``cls`` @@ -138,12 +192,17 @@ def analyse_text(text): def get_tokens(self, text, unfiltered=False): """ - Return an iterable of (tokentype, value) pairs generated from - `text`. If `unfiltered` is set to `True`, the filtering mechanism - is bypassed even if filters are defined. + This method is the basic interface of a lexer. It is called by + the `highlight()` function. It must process the text and return an + iterable of ``(tokentype, value)`` pairs from `text`. + + Normally, you don't need to override this method. The default + implementation processes the options recognized by all lexers + (`stripnl`, `stripall` and so on), and then yields all tokens + from `get_tokens_unprocessed()`, with the ``index`` dropped. - Also preprocess the text, i.e. expand tabs and strip it if - wanted and applies registered filters. + If `unfiltered` is set to `True`, the filtering mechanism is + bypassed even if filters are defined. """ if not isinstance(text, str): if self.encoding == 'guess': @@ -197,11 +256,12 @@ def streamer(): def get_tokens_unprocessed(self, text): """ - Return an iterable of (index, tokentype, value) pairs where "index" - is the starting position of the token within the input text. + This method should process the text and return an iterable of + ``(index, tokentype, value)`` tuples where ``index`` is the starting + position of the token within the input text. - In subclasses, implement this method as a generator to - maximize effectiveness. + It must be overridden by subclasses. It is recommended to + implement it as a generator to maximize effectiveness. """ raise NotImplementedError diff --git a/src/pip/_vendor/pygments/lexers/__init__.py b/src/pip/_vendor/pygments/lexers/__init__.py index e75a05791e2..d97c3e395ed 100644 --- a/src/pip/_vendor/pygments/lexers/__init__.py +++ b/src/pip/_vendor/pygments/lexers/__init__.py @@ -4,13 +4,14 @@ Pygments lexers. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ +import re import sys import types -from fnmatch import fnmatch +import fnmatch from os.path import basename from pip._vendor.pygments.lexers._mapping import LEXERS @@ -27,6 +28,16 @@ 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT) _lexer_cache = {} +_pattern_cache = {} + + +def _fn_matches(fn, glob): + """Return whether the supplied file name fn matches pattern filename.""" + if glob not in _pattern_cache: + pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) + return pattern.match(fn) + return _pattern_cache[glob].match(fn) + def _load_lexers(module_name): """Load a lexer (and all others in the module too).""" @@ -51,9 +62,9 @@ def get_all_lexers(plugins=True): def find_lexer_class(name): - """Lookup a lexer class by name. - - Return None if not found. + """ + Return the `Lexer` subclass that with the *name* attribute as given by + the *name* argument. """ if name in _lexer_cache: return _lexer_cache[name] @@ -69,10 +80,15 @@ def find_lexer_class(name): def find_lexer_class_by_name(_alias): - """Lookup a lexer class by alias. + """ + Return the `Lexer` subclass that has `alias` in its aliases list, without + instantiating it. Like `get_lexer_by_name`, but does not instantiate the class. + Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is + found. + .. versionadded:: 2.2 """ if not _alias: @@ -91,9 +107,13 @@ def find_lexer_class_by_name(_alias): def get_lexer_by_name(_alias, **options): - """Get a lexer by an alias. + """ + Return an instance of a `Lexer` subclass that has `alias` in its + aliases list. The lexer is given the `options` at its + instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if no lexer with that alias is + found. """ if not _alias: raise ClassNotFound('no lexer for alias %r found' % _alias) @@ -158,13 +178,13 @@ def find_lexer_class_for_filename(_fn, code=None): fn = basename(_fn) for modname, name, _, filenames, _ in LEXERS.values(): for filename in filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): if name not in _lexer_cache: _load_lexers(modname) matches.append((_lexer_cache[name], filename)) for cls in find_plugin_lexers(): for filename in cls.filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): matches.append((cls, filename)) if isinstance(code, bytes): @@ -192,10 +212,15 @@ def get_rating(info): def get_lexer_for_filename(_fn, code=None, **options): """Get a lexer for a filename. - If multiple lexers match the filename pattern, use ``analyse_text()`` to - figure out which one is more appropriate. + Return a `Lexer` subclass instance that has a filename pattern + matching `fn`. The lexer is given the `options` at its + instantiation. - Raises ClassNotFound if not found. + Raise :exc:`pygments.util.ClassNotFound` if no lexer for that filename + is found. + + If multiple lexers match the filename pattern, use their ``analyse_text()`` + methods to figure out which one is more appropriate. """ res = find_lexer_class_for_filename(_fn, code) if not res: @@ -204,9 +229,12 @@ def get_lexer_for_filename(_fn, code=None, **options): def get_lexer_for_mimetype(_mime, **options): - """Get a lexer for a mimetype. + """ + Return a `Lexer` subclass instance that has `mime` in its mimetype + list. The lexer is given the `options` at its instantiation. - Raises ClassNotFound if not found. + Will raise :exc:`pygments.util.ClassNotFound` if not lexer for that mimetype + is found. """ for modname, name, _, _, mimetypes in LEXERS.values(): if _mime in mimetypes: @@ -232,30 +260,22 @@ def _iter_lexerclasses(plugins=True): def guess_lexer_for_filename(_fn, _text, **options): """ - Lookup all lexers that handle those filenames primary (``filenames``) - or secondary (``alias_filenames``). Then run a text analysis for those - lexers and choose the best result. - - usage:: - - >>> from pygments.lexers import guess_lexer_for_filename - >>> guess_lexer_for_filename('hello.html', '<%= @foo %>') - - >>> guess_lexer_for_filename('hello.html', '

{{ title|e }}

') - - >>> guess_lexer_for_filename('style.css', 'a { color: }') - + As :func:`guess_lexer()`, but only lexers which have a pattern in `filenames` + or `alias_filenames` that matches `filename` are taken into consideration. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. """ fn = basename(_fn) primary = {} matching_lexers = set() for lexer in _iter_lexerclasses(): for filename in lexer.filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): matching_lexers.add(lexer) primary[lexer] = True for filename in lexer.alias_filenames: - if fnmatch(fn, filename): + if _fn_matches(fn, filename): matching_lexers.add(lexer) primary[lexer] = False if not matching_lexers: @@ -282,7 +302,15 @@ def type_sort(t): def guess_lexer(_text, **options): - """Guess a lexer by strong distinctions in the text (eg, shebang).""" + """ + Return a `Lexer` subclass instance that's guessed from the text in + `text`. For that, the :meth:`.analyse_text()` method of every known lexer + class is called with the text as argument, and the lexer which returned the + highest value will be instantiated and returned. + + :exc:`pygments.util.ClassNotFound` is raised if no lexer thinks it can + handle the content. + """ if not isinstance(_text, str): inencoding = options.get('inencoding', options.get('encoding')) diff --git a/src/pip/_vendor/pygments/lexers/_mapping.py b/src/pip/_vendor/pygments/lexers/_mapping.py index 1eaaf56e9c2..de6a0153b77 100644 --- a/src/pip/_vendor/pygments/lexers/_mapping.py +++ b/src/pip/_vendor/pygments/lexers/_mapping.py @@ -1,5 +1,5 @@ # Automatically generated by scripts/gen_mapfiles.py. -# DO NOT EDIT BY HAND; run `make mapfiles` instead. +# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. LEXERS = { 'ABAPLexer': ('pip._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), @@ -71,6 +71,7 @@ 'CadlLexer': ('pip._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), 'CapDLLexer': ('pip._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), 'CapnProtoLexer': ('pip._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()), + 'CarbonLexer': ('pip._vendor.pygments.lexers.carbon', 'Carbon', ('carbon',), ('*.carbon',), ('text/x-carbon',)), 'CbmBasicV2Lexer': ('pip._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), 'CddlLexer': ('pip._vendor.pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)), 'CeylonLexer': ('pip._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), @@ -121,6 +122,7 @@ 'DarcsPatchLexer': ('pip._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), 'DartLexer': ('pip._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), 'Dasm16Lexer': ('pip._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), + 'DaxLexer': ('pip._vendor.pygments.lexers.dax', 'Dax', ('dax',), ('*.dax',), ()), 'DebianControlLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()), 'DelphiLexer': ('pip._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), 'DevicetreeLexer': ('pip._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), @@ -368,6 +370,7 @@ 'PortugolLexer': ('pip._vendor.pygments.lexers.pascal', 'Portugol', ('portugol',), ('*.alg', '*.portugol'), ()), 'PostScriptLexer': ('pip._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), + 'PostgresExplainLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL EXPLAIN dialect', ('postgres-explain',), ('*.explain',), ('text/x-postgresql-explain',)), 'PostgresLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), 'PovrayLexer': ('pip._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), 'PowerShellLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), @@ -488,7 +491,7 @@ 'TeraTermLexer': ('pip._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)), 'TermcapLexer': ('pip._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()), 'TerminfoLexer': ('pip._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()), - 'TerraformLexer': ('pip._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), + 'TerraformLexer': ('pip._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf', 'hcl'), ('*.tf', '*.hcl'), ('application/x-tf', 'application/x-terraform')), 'TexLexer': ('pip._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), 'TextLexer': ('pip._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'ThingsDBLexer': ('pip._vendor.pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()), @@ -528,7 +531,9 @@ 'WDiffLexer': ('pip._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), 'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()), 'WebIDLLexer': ('pip._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), + 'WgslLexer': ('pip._vendor.pygments.lexers.wgsl', 'WebGPU Shading Language', ('wgsl',), ('*.wgsl',), ('text/wgsl',)), 'WhileyLexer': ('pip._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)), + 'WikitextLexer': ('pip._vendor.pygments.lexers.markup', 'Wikitext', ('wikitext', 'mediawiki'), (), ('text/x-wiki',)), 'WoWTocLexer': ('pip._vendor.pygments.lexers.wowtoc', 'World of Warcraft TOC', ('wowtoc',), ('*.toc',), ()), 'WrenLexer': ('pip._vendor.pygments.lexers.wren', 'Wren', ('wren',), ('*.wren',), ()), 'X10Lexer': ('pip._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)), @@ -540,6 +545,7 @@ 'XmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), 'XmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), 'XorgLexer': ('pip._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()), + 'XppLexer': ('pip._vendor.pygments.lexers.dotnet', 'X++', ('xpp', 'x++'), ('*.xpp',), ()), 'XsltLexer': ('pip._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), 'XtendLexer': ('pip._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), 'XtlangLexer': ('pip._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()), diff --git a/src/pip/_vendor/pygments/lexers/python.py b/src/pip/_vendor/pygments/lexers/python.py index 3341a382685..e9bf2d33727 100644 --- a/src/pip/_vendor/pygments/lexers/python.py +++ b/src/pip/_vendor/pygments/lexers/python.py @@ -4,15 +4,15 @@ Lexers for Python and related languages. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re import keyword -from pip._vendor.pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ - default, words, combined, do_insertions, this, line_re +from pip._vendor.pygments.lexer import DelegatingLexer, Lexer, RegexLexer, include, \ + bygroups, using, default, words, combined, do_insertions, this, line_re from pip._vendor.pygments.util import get_bool_opt, shebang_matches from pip._vendor.pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Generic, Other, Error, Whitespace @@ -234,16 +234,16 @@ def fstring_rules(ttype): ], 'builtins': [ (words(( - '__import__', 'abs', 'all', 'any', 'bin', 'bool', 'bytearray', - 'breakpoint', 'bytes', 'chr', 'classmethod', 'compile', 'complex', - 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'filter', - 'float', 'format', 'frozenset', 'getattr', 'globals', 'hasattr', - 'hash', 'hex', 'id', 'input', 'int', 'isinstance', 'issubclass', - 'iter', 'len', 'list', 'locals', 'map', 'max', 'memoryview', - 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'print', - 'property', 'range', 'repr', 'reversed', 'round', 'set', 'setattr', - 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', - 'type', 'vars', 'zip'), prefix=r'(?>> )(.*\n)', bygroups(Generic.Prompt, Other.Code), 'continuations'), + # This happens, e.g., when tracebacks are embedded in documentation; + # trailing whitespaces are often stripped in such contexts. + (r'(>>>)(\n)', bygroups(Generic.Prompt, Whitespace)), + (r'(\^C)?Traceback \(most recent call last\):\n', Other.Traceback, 'traceback'), + # SyntaxError starts with this + (r' File "[^"]+", line \d+', Other.Traceback, 'traceback'), + (r'.*\n', Generic.Output), + ], + 'continuations': [ + (r'(\.\.\. )(.*\n)', bygroups(Generic.Prompt, Other.Code)), + # See above. + (r'(\.\.\.)(\n)', bygroups(Generic.Prompt, Whitespace)), + default('#pop'), + ], + 'traceback': [ + # As soon as we see a traceback, consume everything until the next + # >>> prompt. + (r'(?=>>>( |$))', Text, '#pop'), + (r'(KeyboardInterrupt)(\n)', bygroups(Name.Class, Whitespace)), + (r'.*\n', Other.Traceback), + ], + } -class PythonConsoleLexer(Lexer): +class PythonConsoleLexer(DelegatingLexer): """ For Python console output or doctests, such as: .. sourcecode:: pycon >>> a = 'foo' - >>> print a + >>> print(a) foo >>> 1 / 0 Traceback (most recent call last): @@ -659,70 +694,28 @@ class PythonConsoleLexer(Lexer): .. versionchanged:: 2.5 Now defaults to ``True``. """ + name = 'Python console session' aliases = ['pycon'] mimetypes = ['text/x-python-doctest'] def __init__(self, **options): - self.python3 = get_bool_opt(options, 'python3', True) - Lexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - if self.python3: - pylexer = PythonLexer(**self.options) - tblexer = PythonTracebackLexer(**self.options) + python3 = get_bool_opt(options, 'python3', True) + if python3: + pylexer = PythonLexer + tblexer = PythonTracebackLexer else: - pylexer = Python2Lexer(**self.options) - tblexer = Python2TracebackLexer(**self.options) - - curcode = '' - insertions = [] - curtb = '' - tbindex = 0 - tb = 0 - for match in line_re.finditer(text): - line = match.group() - if line.startswith('>>> ') or line.startswith('... '): - tb = 0 - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:4])])) - curcode += line[4:] - elif line.rstrip() == '...' and not tb: - # only a new >>> prompt can end an exception block - # otherwise an ellipsis in place of the traceback frames - # will be mishandled - insertions.append((len(curcode), - [(0, Generic.Prompt, '...')])) - curcode += line[3:] - else: - if curcode: - yield from do_insertions( - insertions, pylexer.get_tokens_unprocessed(curcode)) - curcode = '' - insertions = [] - if (line.startswith('Traceback (most recent call last):') or - re.match(' File "[^"]+", line \\d+\\n$', line)): - tb = 1 - curtb = line - tbindex = match.start() - elif line == 'KeyboardInterrupt\n': - yield match.start(), Name.Class, line - elif tb: - curtb += line - if not (line.startswith(' ') or line.strip() == '...'): - tb = 0 - for i, t, v in tblexer.get_tokens_unprocessed(curtb): - yield tbindex+i, t, v - curtb = '' - else: - yield match.start(), Generic.Output, line - if curcode: - yield from do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)) - if curtb: - for i, t, v in tblexer.get_tokens_unprocessed(curtb): - yield tbindex+i, t, v - + pylexer = Python2Lexer + tblexer = Python2TracebackLexer + # We have two auxiliary lexers. Use DelegatingLexer twice with + # different tokens. TODO: DelegatingLexer should support this + # directly, by accepting a tuplet of auxiliary lexers and a tuple of + # distinguishing tokens. Then we wouldn't need this intermediary + # class. + class _ReplaceInnerCode(DelegatingLexer): + def __init__(self, **options): + super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options) + super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options) class PythonTracebackLexer(RegexLexer): """ @@ -743,7 +736,7 @@ class PythonTracebackLexer(RegexLexer): tokens = { 'root': [ (r'\n', Whitespace), - (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), + (r'^(\^C)?Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), (r'^During handling of the above exception, another ' r'exception occurred:\n\n', Generic.Traceback), (r'^The above exception was the direct cause of the ' @@ -763,7 +756,8 @@ class PythonTracebackLexer(RegexLexer): (r'^([^:]+)(: )(.+)(\n)', bygroups(Generic.Error, Text, Name, Whitespace), '#pop'), (r'^([a-zA-Z_][\w.]*)(:?\n)', - bygroups(Generic.Error, Whitespace), '#pop') + bygroups(Generic.Error, Whitespace), '#pop'), + default('#pop'), ], 'markers': [ # Either `PEP 657 ` diff --git a/src/pip/_vendor/pygments/modeline.py b/src/pip/_vendor/pygments/modeline.py index 43630835ca6..7b6f6a324ba 100644 --- a/src/pip/_vendor/pygments/modeline.py +++ b/src/pip/_vendor/pygments/modeline.py @@ -4,7 +4,7 @@ A simple modeline parser (based on pymodeline). - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/plugin.py b/src/pip/_vendor/pygments/plugin.py index 3590bee8d29..7b722d58db0 100644 --- a/src/pip/_vendor/pygments/plugin.py +++ b/src/pip/_vendor/pygments/plugin.py @@ -34,7 +34,7 @@ yourfilter = yourfilter:YourFilter - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/regexopt.py b/src/pip/_vendor/pygments/regexopt.py index ae0079199b9..45223eccc10 100644 --- a/src/pip/_vendor/pygments/regexopt.py +++ b/src/pip/_vendor/pygments/regexopt.py @@ -5,7 +5,7 @@ An algorithm that generates optimized regexes for matching long lists of literal strings. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/scanner.py b/src/pip/_vendor/pygments/scanner.py index d47ed4828a0..32a2f303296 100644 --- a/src/pip/_vendor/pygments/scanner.py +++ b/src/pip/_vendor/pygments/scanner.py @@ -11,7 +11,7 @@ Have a look at the `DelphiLexer` to get an idea of how to use this scanner. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re diff --git a/src/pip/_vendor/pygments/sphinxext.py b/src/pip/_vendor/pygments/sphinxext.py index 3537ecdb26f..2c7facde830 100644 --- a/src/pip/_vendor/pygments/sphinxext.py +++ b/src/pip/_vendor/pygments/sphinxext.py @@ -5,7 +5,7 @@ Sphinx extension to generate automatic documentation of lexers, formatters and filters. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/style.py b/src/pip/_vendor/pygments/style.py index 84abbc20599..edc19627dba 100644 --- a/src/pip/_vendor/pygments/style.py +++ b/src/pip/_vendor/pygments/style.py @@ -4,7 +4,7 @@ Basic style object. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/styles/__init__.py b/src/pip/_vendor/pygments/styles/__init__.py index 44cc0efb086..7401cf5d3a3 100644 --- a/src/pip/_vendor/pygments/styles/__init__.py +++ b/src/pip/_vendor/pygments/styles/__init__.py @@ -4,15 +4,15 @@ Contains built-in styles. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pip._vendor.pygments.plugin import find_plugin_styles from pip._vendor.pygments.util import ClassNotFound - -#: Maps style names to 'submodule::classname'. +#: A dictionary of built-in styles, mapping style names to +#: ``'submodule::classname'`` strings. STYLE_MAP = { 'default': 'default::DefaultStyle', 'emacs': 'emacs::EmacsStyle', @@ -66,6 +66,13 @@ def get_style_by_name(name): + """ + Return a style class by its short name. The names of the builtin styles + are listed in :data:`pygments.styles.STYLE_MAP`. + + Will raise :exc:`pygments.util.ClassNotFound` if no style of that name is + found. + """ if name in STYLE_MAP: mod, cls = STYLE_MAP[name].split('::') builtin = "yes" @@ -90,8 +97,7 @@ def get_style_by_name(name): def get_all_styles(): - """Return a generator for all styles by name, - both builtin and plugin.""" + """Return a generator for all styles by name, both builtin and plugin.""" yield from STYLE_MAP for name, _ in find_plugin_styles(): yield name diff --git a/src/pip/_vendor/pygments/token.py b/src/pip/_vendor/pygments/token.py index e3e565ad591..7395cb6a620 100644 --- a/src/pip/_vendor/pygments/token.py +++ b/src/pip/_vendor/pygments/token.py @@ -4,7 +4,7 @@ Basic token types and the standard tokens. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ diff --git a/src/pip/_vendor/pygments/unistring.py b/src/pip/_vendor/pygments/unistring.py index 2e3c80869d9..39f6baeedfb 100644 --- a/src/pip/_vendor/pygments/unistring.py +++ b/src/pip/_vendor/pygments/unistring.py @@ -7,7 +7,7 @@ Inspired by chartypes_create.py from the MoinMoin project. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -112,7 +112,7 @@ def _handle_runs(char_list): # pragma: no cover categories = {'xid_start': [], 'xid_continue': []} - with open(__file__) as fp: + with open(__file__, encoding='utf-8') as fp: content = fp.read() header = content[:content.find('Cc =')] @@ -136,7 +136,7 @@ def _handle_runs(char_list): # pragma: no cover if ('a' + c).isidentifier(): categories['xid_continue'].append(c) - with open(__file__, 'w') as fp: + with open(__file__, 'w', encoding='utf-8') as fp: fp.write(header) for cat in sorted(categories): diff --git a/src/pip/_vendor/pygments/util.py b/src/pip/_vendor/pygments/util.py index 8032962dc99..941fdb9ec7a 100644 --- a/src/pip/_vendor/pygments/util.py +++ b/src/pip/_vendor/pygments/util.py @@ -4,7 +4,7 @@ Utility functions. - :copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -32,10 +32,16 @@ class ClassNotFound(ValueError): class OptionError(Exception): - pass - + """ + This exception will be raised by all option processing functions if + the type or value of the argument is not correct. + """ def get_choice_opt(options, optname, allowed, default=None, normcase=False): + """ + If the key `optname` from the dictionary is not in the sequence + `allowed`, raise an error, otherwise return it. + """ string = options.get(optname, default) if normcase: string = string.lower() @@ -46,6 +52,17 @@ def get_choice_opt(options, optname, allowed, default=None, normcase=False): def get_bool_opt(options, optname, default=None): + """ + Intuitively, this is `options.get(optname, default)`, but restricted to + Boolean value. The Booleans can be represented as string, in order to accept + Boolean value from the command line arguments. If the key `optname` is + present in the dictionary `options` and is not associated with a Boolean, + raise an `OptionError`. If it is absent, `default` is returned instead. + + The valid string values for ``True`` are ``1``, ``yes``, ``true`` and + ``on``, the ones for ``False`` are ``0``, ``no``, ``false`` and ``off`` + (matched case-insensitively). + """ string = options.get(optname, default) if isinstance(string, bool): return string @@ -66,6 +83,7 @@ def get_bool_opt(options, optname, default=None): def get_int_opt(options, optname, default=None): + """As :func:`get_bool_opt`, but interpret the value as an integer.""" string = options.get(optname, default) try: return int(string) @@ -78,8 +96,12 @@ def get_int_opt(options, optname, default=None): 'must give an integer value' % ( string, optname)) - def get_list_opt(options, optname, default=None): + """ + If the key `optname` from the dictionary `options` is a string, + split it at whitespace and return it. If it is already a list + or a tuple, it is returned as a list. + """ val = options.get(optname, default) if isinstance(val, str): return val.split() diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index f3e9d2fd3e5..7ea9dd4fc09 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -13,7 +13,7 @@ requests==2.31.0 idna==3.4 urllib3==1.26.15 rich==13.4.2 - pygments==2.14.0 + pygments==2.15.1 typing_extensions==4.6.0 resolvelib==1.0.1 setuptools==67.7.2 From 92494e94e9c86d1184483d884dd52382ce5e542e Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 9 Jul 2023 21:32:49 +0100 Subject: [PATCH 577/730] Upgrade typing_extensions to 4.7.1 --- news/typing_extensions.vendor.rst | 1 + src/pip/_vendor/typing_extensions.LICENSE | 33 +- src/pip/_vendor/typing_extensions.py | 884 ++++++++++++++-------- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 602 insertions(+), 318 deletions(-) create mode 100644 news/typing_extensions.vendor.rst diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst new file mode 100644 index 00000000000..6a18f7f2ff0 --- /dev/null +++ b/news/typing_extensions.vendor.rst @@ -0,0 +1 @@ +Upgrade typing_extensions to 4.7.1 diff --git a/src/pip/_vendor/typing_extensions.LICENSE b/src/pip/_vendor/typing_extensions.LICENSE index 1df6b3b8de0..f26bcf4d2de 100644 --- a/src/pip/_vendor/typing_extensions.LICENSE +++ b/src/pip/_vendor/typing_extensions.LICENSE @@ -2,12 +2,12 @@ A. HISTORY OF THE SOFTWARE ========================== Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands as a successor of a language called ABC. Guido remains Python's principal author, although it includes many contributions from others. In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) in Reston, Virginia where he released several versions of the software. @@ -19,7 +19,7 @@ https://www.python.org/psf/) was formed, a non-profit organization created specifically to own Python-related Intellectual Property. Zope Corporation was a sponsoring member of the PSF. -All Python releases are Open Source (see http://www.opensource.org for +All Python releases are Open Source (see https://opensource.org for the Open Source Definition). Historically, most, but not all, Python releases have also been GPL-compatible; the table below summarizes the various releases. @@ -59,6 +59,17 @@ direction to make these releases possible. B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON =============================================================== +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 -------------------------------------------- @@ -73,7 +84,7 @@ analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. @@ -252,3 +263,17 @@ FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/src/pip/_vendor/typing_extensions.py b/src/pip/_vendor/typing_extensions.py index ae740bff748..4f93acffbdc 100644 --- a/src/pip/_vendor/typing_extensions.py +++ b/src/pip/_vendor/typing_extensions.py @@ -9,7 +9,6 @@ import typing import warnings - __all__ = [ # Super-special typing primitives. 'Any', @@ -66,8 +65,10 @@ 'get_args', 'get_origin', 'get_original_bases', + 'get_protocol_members', 'get_type_hints', 'IntVar', + 'is_protocol', 'is_typeddict', 'Literal', 'NewType', @@ -86,6 +87,45 @@ 'NoReturn', 'Required', 'NotRequired', + + # Pure aliases, have always been in typing + 'AbstractSet', + 'AnyStr', + 'BinaryIO', + 'Callable', + 'Collection', + 'Container', + 'Dict', + 'ForwardRef', + 'FrozenSet', + 'Generator', + 'Generic', + 'Hashable', + 'IO', + 'ItemsView', + 'Iterable', + 'Iterator', + 'KeysView', + 'List', + 'Mapping', + 'MappingView', + 'Match', + 'MutableMapping', + 'MutableSequence', + 'MutableSet', + 'Optional', + 'Pattern', + 'Reversible', + 'Sequence', + 'Set', + 'Sized', + 'TextIO', + 'Tuple', + 'Union', + 'ValuesView', + 'cast', + 'no_type_check', + 'no_type_check_decorator', ] # for backward compatibility @@ -202,17 +242,19 @@ def __new__(cls, *args, **kwargs): ClassVar = typing.ClassVar + +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return 'typing_extensions.' + self._name + + # On older versions of typing there is an internal class named "Final". # 3.8+ if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7): Final = typing.Final # 3.7 else: - class _FinalForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - + class _FinalForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') @@ -304,14 +346,11 @@ def __eq__(self, other): def __hash__(self): return hash(frozenset(_value_and_type_iter(self.__args__))) - class _LiteralForm(typing._SpecialForm, _root=True): + class _LiteralForm(_ExtensionsSpecialForm, _root=True): def __init__(self, doc: str): self._name = 'Literal' self._doc = self.__doc__ = doc - def __repr__(self): - return 'typing_extensions.' + self._name - def __getitem__(self, parameters): if not isinstance(parameters, tuple): parameters = (parameters,) @@ -454,9 +493,10 @@ def clear_overloads(): _PROTO_ALLOWLIST = { 'collections.abc': [ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', + 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', ], 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'typing_extensions': ['Buffer'], } @@ -546,9 +586,8 @@ def _caller(depth=2): # so we backport the 3.12 version of Protocol to Python <=3.11 if sys.version_info >= (3, 12): Protocol = typing.Protocol - runtime_checkable = typing.runtime_checkable else: - def _allow_reckless_class_checks(depth=4): + def _allow_reckless_class_checks(depth=3): """Allow instance and class checks for special stdlib modules. The abc and functools modules indiscriminately call isinstance() and issubclass() on the whole MRO of a user class, which may contain protocols. @@ -559,11 +598,41 @@ def _no_init(self, *args, **kwargs): if type(self)._is_protocol: raise TypeError('Protocols cannot be instantiated') - class _ProtocolMeta(abc.ABCMeta): + if sys.version_info >= (3, 8): + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + _typing_Protocol = typing.Protocol + _ProtocolMetaBase = type(_typing_Protocol) + else: + _typing_Protocol = _marker + _ProtocolMetaBase = abc.ABCMeta + + class _ProtocolMeta(_ProtocolMetaBase): # This metaclass is somewhat unfortunate, # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, _typing_Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, _typing_Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + f"Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + def __init__(cls, *args, **kwargs): - super().__init__(*args, **kwargs) + abc.ABCMeta.__init__(cls, *args, **kwargs) if getattr(cls, "_is_protocol", False): cls.__protocol_attrs__ = _get_protocol_attrs(cls) # PEP 544 prohibits using issubclass() @@ -573,31 +642,46 @@ def __init__(cls, *args, **kwargs): ) def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) if ( getattr(cls, '_is_protocol', False) - and not cls.__callable_proto_members_only__ - and not _allow_reckless_class_checks(depth=3) + and not _allow_reckless_class_checks() ): - raise TypeError( - "Protocols with non-method members don't support issubclass()" - ) - return super().__subclasscheck__(other) + if not isinstance(other, type): + # Same error message as for issubclass(1, int). + raise TypeError('issubclass() arg 1 must be a class') + if ( + not cls.__callable_proto_members_only__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + raise TypeError( + "Protocols with non-method members don't support issubclass()" + ) + if not getattr(cls, '_is_runtime_protocol', False): + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + return abc.ABCMeta.__subclasscheck__(cls, other) def __instancecheck__(cls, instance): # We need this method for situations where attributes are # assigned in __init__. + if cls is Protocol: + return type.__instancecheck__(cls, instance) if not getattr(cls, "_is_protocol", False): # i.e., it's a concrete subclass of a protocol - return super().__instancecheck__(instance) + return abc.ABCMeta.__instancecheck__(cls, instance) if ( not getattr(cls, '_is_runtime_protocol', False) and - not _allow_reckless_class_checks(depth=2) + not _allow_reckless_class_checks() ): raise TypeError("Instance and class checks can only be used with" " @runtime_checkable protocols") - if super().__instancecheck__(instance): + if abc.ABCMeta.__instancecheck__(cls, instance): return True for attr in cls.__protocol_attrs__: @@ -612,132 +696,164 @@ def __instancecheck__(cls, instance): return False - class Protocol(metaclass=_ProtocolMeta): - # There is quite a lot of overlapping code with typing.Generic. - # Unfortunately it is hard to avoid this while these live in two different - # modules. The duplicated code will be removed when Protocol is moved to typing. - """Base class for protocol classes. Protocol classes are defined as:: - - class Proto(Protocol): - def meth(self) -> int: - ... + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol on Python 3.8+ + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return ( + cls is Protocol and other is getattr(typing, "Protocol", object()) + ) - Such classes are primarily used with static type checkers that recognize - structural subtyping (static duck-typing), for example:: + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get('_is_protocol', False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break - class C: - def meth(self) -> int: - return 0 + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, '__annotations__', {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True - def func(x: Proto) -> int: - return x.meth() + if sys.version_info >= (3, 8): + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False - func(C()) # Passes static type check + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) - See PEP 544 for details. Protocol classes decorated with - @typing_extensions.runtime act as simple-minded runtime protocol that checks - only the presence of given attributes, ignoring their type signatures. + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get('_is_protocol', False): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) - Protocol classes can be generic, they are defined as:: + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook - class GenProto(Protocol[T]): - def meth(self) -> T: - ... - """ - __slots__ = () - _is_protocol = True - _is_runtime_protocol = False + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init - def __new__(cls, *args, **kwds): - if cls is Protocol: - raise TypeError("Type Protocol cannot be instantiated; " - "it can only be used as a base class") - return super().__new__(cls) - - @typing._tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple): - params = (params,) - if not params and cls is not typing.Tuple: - raise TypeError( - f"Parameter list to {cls.__qualname__}[...] cannot be empty") - msg = "Parameters to generic types must be types." - params = tuple(typing._type_check(p, msg) for p in params) - if cls is Protocol: - # Generic can only be subscripted with unique type variables. - if not all(isinstance(p, typing.TypeVar) for p in params): - i = 0 - while isinstance(params[i], typing.TypeVar): - i += 1 - raise TypeError( - "Parameters to Protocol[...] must all be type variables." - f" Parameter {i + 1} is {params[i]}") - if len(set(params)) != len(params): + else: + class Protocol(metaclass=_ProtocolMeta): + # There is quite a lot of overlapping code with typing.Generic. + # Unfortunately it is hard to avoid this on Python <3.8, + # as the typing module on Python 3.7 doesn't let us subclass typing.Generic! + """Base class for protocol classes. Protocol classes are defined as:: + + class Proto(Protocol): + def meth(self) -> int: + ... + + Such classes are primarily used with static type checkers that recognize + structural subtyping (static duck-typing), for example:: + + class C: + def meth(self) -> int: + return 0 + + def func(x: Proto) -> int: + return x.meth() + + func(C()) # Passes static type check + + See PEP 544 for details. Protocol classes decorated with + @typing_extensions.runtime_checkable act + as simple-minded runtime-checkable protocols that check + only the presence of given attributes, ignoring their type signatures. + + Protocol classes can be generic, they are defined as:: + + class GenProto(Protocol[T]): + def meth(self) -> T: + ... + """ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __new__(cls, *args, **kwds): + if cls is Protocol: + raise TypeError("Type Protocol cannot be instantiated; " + "it can only be used as a base class") + return super().__new__(cls) + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple): + params = (params,) + if not params and cls is not typing.Tuple: raise TypeError( - "Parameters to Protocol[...] must all be unique") - else: - # Subscripting a regular Generic subclass. - _check_generic(cls, params, len(cls.__parameters__)) - return typing._GenericAlias(cls, params) - - def __init_subclass__(cls, *args, **kwargs): - if '__orig_bases__' in cls.__dict__: - error = typing.Generic in cls.__orig_bases__ - else: - error = typing.Generic in cls.__bases__ - if error: - raise TypeError("Cannot inherit from plain Generic") - _maybe_adjust_parameters(cls) + f"Parameter list to {cls.__qualname__}[...] cannot be empty") + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) + if cls is Protocol: + # Generic can only be subscripted with unique type variables. + if not all(isinstance(p, typing.TypeVar) for p in params): + i = 0 + while isinstance(params[i], typing.TypeVar): + i += 1 + raise TypeError( + "Parameters to Protocol[...] must all be type variables." + f" Parameter {i + 1} is {params[i]}") + if len(set(params)) != len(params): + raise TypeError( + "Parameters to Protocol[...] must all be unique") + else: + # Subscripting a regular Generic subclass. + _check_generic(cls, params, len(cls.__parameters__)) + return typing._GenericAlias(cls, params) - # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', None): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) + def __init_subclass__(cls, *args, **kwargs): + if '__orig_bases__' in cls.__dict__: + error = typing.Generic in cls.__orig_bases__ + else: + error = typing.Generic in cls.__bases__ + if error: + raise TypeError("Cannot inherit from plain Generic") + _maybe_adjust_parameters(cls) - # Set (or override) the protocol subclass hook. - def _proto_hook(other): + # Determine if this is a protocol or a concrete subclass. if not cls.__dict__.get('_is_protocol', None): - return NotImplemented - if not getattr(cls, '_is_runtime_protocol', False): - if _allow_reckless_class_checks(): - return NotImplemented - raise TypeError("Instance and class checks can only be used with" - " @runtime protocols") - if not isinstance(other, type): - # Same error as for issubclass(1, int) - raise TypeError('issubclass() arg 1 must be a class') - for attr in cls.__protocol_attrs__: - for base in other.__mro__: - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - annotations = getattr(base, '__annotations__', {}) - if (isinstance(annotations, typing.Mapping) and - attr in annotations and - isinstance(other, _ProtocolMeta) and - other._is_protocol): - break - else: - return NotImplemented - return True - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook + cls._is_protocol = any(b is Protocol for b in cls.__bases__) - # We have nothing more to do for non-protocols. - if not cls._is_protocol: - return + # Set (or override) the protocol subclass hook. + if '__subclasshook__' not in cls.__dict__: + cls.__subclasshook__ = _proto_hook - # Check consistency of bases. - for base in cls.__bases__: - if not (base in (object, typing.Generic) or - base.__module__ in _PROTO_ALLOWLIST and - base.__name__ in _PROTO_ALLOWLIST[base.__module__] or - isinstance(base, _ProtocolMeta) and base._is_protocol): - raise TypeError('Protocols can only inherit from other' - f' protocols, got {repr(base)}') - if cls.__init__ is Protocol.__init__: - cls.__init__ = _no_init + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + +if sys.version_info >= (3, 8): + runtime_checkable = typing.runtime_checkable +else: def runtime_checkable(cls): """Mark a protocol class as a runtime protocol, so that it can be used with isinstance() and issubclass(). Raise TypeError @@ -837,7 +953,22 @@ def __round__(self, ndigits: int = 0) -> T_co: pass -if sys.version_info >= (3, 12): +def _ensure_subclassable(mro_entries): + def inner(func): + if sys.implementation.name == "pypy" and sys.version_info < (3, 9): + cls_dict = { + "__call__": staticmethod(func), + "__mro_entries__": staticmethod(mro_entries) + } + t = type(func.__name__, (), cls_dict) + return functools.update_wrapper(t(), func) + else: + func.__mro_entries__ = mro_entries + return func + return inner + + +if sys.version_info >= (3, 13): # The standard library TypedDict in Python 3.8 does not store runtime information # about which (if any) keys are optional. See https://bugs.python.org/issue38834 # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" @@ -847,117 +978,61 @@ def __round__(self, ndigits: int = 0) -> T_co: # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. # Aaaand on 3.12 we add __orig_bases__ to TypedDict # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. TypedDict = typing.TypedDict _TypedDictMeta = typing._TypedDictMeta is_typeddict = typing.is_typeddict else: - def _check_fails(cls, other): - try: - if _caller() not in {'abc', 'functools', 'typing'}: - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') - except (AttributeError, ValueError): - pass - return False - - def _dict_new(*args, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - return dict(*args, **kwargs) - - _dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)' - - def _typeddict_new(*args, total=True, **kwargs): - if not args: - raise TypeError('TypedDict.__new__(): not enough arguments') - _, args = args[0], args[1:] # allow the "cls" keyword be passed - if args: - typename, args = args[0], args[1:] # allow the "_typename" keyword be passed - elif '_typename' in kwargs: - typename = kwargs.pop('_typename') - warnings.warn("Passing '_typename' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - raise TypeError("TypedDict.__new__() missing 1 required positional " - "argument: '_typename'") - if args: - try: - fields, = args # allow the "_fields" keyword be passed - except ValueError: - raise TypeError('TypedDict.__new__() takes from 2 to 3 ' - f'positional arguments but {len(args) + 2} ' - 'were given') - elif '_fields' in kwargs and len(kwargs) == 1: - fields = kwargs.pop('_fields') - warnings.warn("Passing '_fields' as keyword argument is deprecated", - DeprecationWarning, stacklevel=2) - else: - fields = None - - if fields is None: - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") - - if kwargs: - warnings.warn( - "The kwargs-based syntax for TypedDict definitions is deprecated, " - "may be removed in a future version, and may not be " - "understood by third-party type checkers.", - DeprecationWarning, - stacklevel=2, - ) + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters - ns = {'__annotations__': dict(fields)} - module = _caller() - if module is not None: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = module + if sys.version_info >= (3, 8): + _fake_name = "Protocol" + else: + _fake_name = "_Protocol" - return _TypedDictMeta(typename, (), ns, total=total) + class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, total=True): + """Create new typed dict class object. - _typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,' - ' /, *, total=True, **kwargs)') + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError('cannot inherit from both a TypedDict type ' + 'and a non-TypedDict base class') - _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () - class _TypedDictMeta(type): - def __init__(cls, name, bases, ns, total=True): - super().__init__(name, bases, ns) + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" (or "_Protocol" on 3.7). + tp_dict = type.__new__(_TypedDictMeta, _fake_name, (*generic_base, dict), ns) + tp_dict.__name__ = name + if tp_dict.__qualname__ == _fake_name: + tp_dict.__qualname__ = name - def __new__(cls, name, bases, ns, total=True): - # Create new typed dict class object. - # This method is called directly when TypedDict is subclassed, - # or via _typeddict_new when TypedDict is instantiated. This way - # TypedDict supports all three syntaxes described in its docstring. - # Subclasses and instances of TypedDict return actual dictionaries - # via _dict_new. - ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new - # Don't insert typing.Generic into __bases__ here, - # or Generic.__init_subclass__ will raise TypeError - # in the super().__new__() call. - # Instead, monkey-patch __bases__ onto the class after it's been created. - tp_dict = super().__new__(cls, name, (dict,), ns) - - is_generic = any(issubclass(base, typing.Generic) for base in bases) - - if is_generic: - tp_dict.__bases__ = (typing.Generic, dict) - _maybe_adjust_parameters(tp_dict) - else: - # generic TypedDicts get __orig_bases__ from Generic - tp_dict.__orig_bases__ = bases or (TypedDict,) + if not hasattr(tp_dict, '__orig_bases__'): + tp_dict.__orig_bases__ = bases annotations = {} own_annotations = ns.get('__annotations__', {}) msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - kwds = {"module": tp_dict.__module__} if _TAKES_MODULE else {} - own_annotations = { - n: typing._type_check(tp, msg, **kwds) - for n, tp in own_annotations.items() - } + if _TAKES_MODULE: + own_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in own_annotations.items() + } + else: + own_annotations = { + n: typing._type_check(tp, msg) + for n, tp in own_annotations.items() + } required_keys = set() optional_keys = set() @@ -991,17 +1066,25 @@ def __new__(cls, name, bases, ns, total=True): tp_dict.__total__ = total return tp_dict - __instancecheck__ = __subclasscheck__ = _check_fails + __call__ = dict # static method + + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError('TypedDict does not support instance and class checks') + + __instancecheck__ = __subclasscheck__ + + _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) - TypedDict = _TypedDictMeta('TypedDict', (dict,), {}) - TypedDict.__module__ = __name__ - TypedDict.__doc__ = \ - """A simple typed name space. At runtime it is equivalent to a plain dict. + @_ensure_subclassable(lambda bases: (_TypedDict,)) + def TypedDict(__typename, __fields=_marker, *, total=True, **kwargs): + """A simple typed namespace. At runtime it is equivalent to a plain dict. - TypedDict creates a dictionary type that expects all of its - instances to have a certain set of keys, with each key + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is associated with a value of a consistent type. This expectation - is not checked at runtime but is only enforced by type checkers. + is not checked at runtime. + Usage:: class Point2D(TypedDict): @@ -1016,14 +1099,66 @@ class Point2D(TypedDict): The type info can be accessed via the Point2D.__annotations__ dict, and the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports two additional equivalent forms:: + TypedDict supports an additional equivalent form:: - Point2D = TypedDict('Point2D', x=int, y=int, label=str) Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - The class syntax is only supported in Python 3.6+, while two other - syntax forms work for Python 2.7 and 3.2+ + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. """ + if __fields is _marker or __fields is None: + if __fields is _marker: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{__typename} = TypedDict({__typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + ) + example + "." + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + __fields = kwargs + elif kwargs: + raise TypeError("TypedDict takes either a dict or keyword arguments," + " but not both") + if kwargs: + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {'__annotations__': dict(__fields)} + module = _caller() + if module is not None: + # Setting correct module is necessary to make typed dict classes pickleable. + ns['__module__'] = module + + td = _TypedDictMeta(__typename, (), ns, total=total) + td.__orig_bases__ = (TypedDict,) + return td if hasattr(typing, "_TypedDictMeta"): _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) @@ -1041,7 +1176,10 @@ class Film(TypedDict): is_typeddict(Film) # => True is_typeddict(Union[list, str]) # => False """ - return isinstance(tp, tuple(_TYPEDDICT_TYPES)) + # On 3.8, this would otherwise return True + if hasattr(typing, "TypedDict") and tp is typing.TypedDict: + return False + return isinstance(tp, _TYPEDDICT_TYPES) if hasattr(typing, "assert_type"): @@ -1311,11 +1449,7 @@ def get_args(tp): TypeAlias = typing.TypeAlias # 3.9 elif sys.version_info[:2] >= (3, 9): - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeAliasForm + @_ExtensionsSpecialForm def TypeAlias(self, parameters): """Special marker indicating that an assignment should be recognized as a proper type alias definition by type @@ -1330,21 +1464,19 @@ def TypeAlias(self, parameters): raise TypeError(f"{self} is not subscriptable") # 3.7-3.8 else: - class _TypeAliasForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - TypeAlias = _TypeAliasForm('TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. + TypeAlias = _ExtensionsSpecialForm( + 'TypeAlias', + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. - For example:: + For example:: - Predicate: TypeAlias = Callable[..., bool] + Predicate: TypeAlias = Callable[..., bool] - It's invalid when used anywhere except as in the example - above.""") + It's invalid when used anywhere except as in the example + above.""" + ) def _set_default(type_param, default): @@ -1659,7 +1791,7 @@ def _concatenate_getitem(self, parameters): _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 # 3.9 elif sys.version_info[:2] >= (3, 9): - @_TypeAliasForm + @_ExtensionsSpecialForm def Concatenate(self, parameters): """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a higher order function which adds, removes or transforms parameters of a @@ -1674,10 +1806,7 @@ def Concatenate(self, parameters): return _concatenate_getitem(self, parameters) # 3.7-8 else: - class _ConcatenateForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): return _concatenate_getitem(self, parameters) @@ -1699,11 +1828,7 @@ def __getitem__(self, parameters): TypeGuard = typing.TypeGuard # 3.9 elif sys.version_info[:2] >= (3, 9): - class _TypeGuardForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - @_TypeGuardForm + @_ExtensionsSpecialForm def TypeGuard(self, parameters): """Special typing form used to annotate the return type of a user-defined type guard function. ``TypeGuard`` only accepts a single type argument. @@ -1751,11 +1876,7 @@ def is_str(val: Union[str, float]): return typing._GenericAlias(self, (item,)) # 3.7-3.8 else: - class _TypeGuardForm(typing._SpecialForm, _root=True): - - def __repr__(self): - return 'typing_extensions.' + self._name - + class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type') @@ -1929,10 +2050,6 @@ def int_or_str(arg: int | str) -> None: Required = typing.Required NotRequired = typing.NotRequired elif sys.version_info[:2] >= (3, 9): - class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - @_ExtensionsSpecialForm def Required(self, parameters): """A special typing construct to mark a key of a total=False TypedDict @@ -1971,10 +2088,7 @@ class Movie(TypedDict): return typing._GenericAlias(self, (item,)) else: - class _RequiredForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _RequiredForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') @@ -2062,14 +2176,11 @@ def _is_unpack(obj): return get_origin(obj) is Unpack elif sys.version_info[:2] >= (3, 9): - class _UnpackSpecialForm(typing._SpecialForm, _root=True): + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): def __init__(self, getitem): super().__init__(getitem) self.__doc__ = _UNPACK_DOC - def __repr__(self): - return 'typing_extensions.' + self._name - class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar @@ -2085,10 +2196,7 @@ def _is_unpack(obj): class _UnpackAlias(typing._GenericAlias, _root=True): __class__ = typing.TypeVar - class _UnpackForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - + class _UnpackForm(_ExtensionsSpecialForm, _root=True): def __getitem__(self, parameters): item = typing._type_check(parameters, f'{self._name} accepts only a single type.') @@ -2436,11 +2544,11 @@ def decorator(__arg: _T) -> _T: @functools.wraps(original_new) def __new__(cls, *args, **kwargs): warnings.warn(__msg, category=category, stacklevel=stacklevel + 1) - # Mirrors a similar check in object.__new__. - if not has_init and (args or kwargs): - raise TypeError(f"{cls.__name__}() takes no arguments") if original_new is not object.__new__: return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif not has_init and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") else: return original_new(cls) @@ -2480,7 +2588,8 @@ def wrapper(*args, **kwargs): # In 3.11, the ability to define generic `NamedTuple`s was supported. # This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. # On 3.12, we added __orig_bases__ to call-based NamedTuples -if sys.version_info >= (3, 12): +# On 3.13, we deprecated kwargs-based NamedTuples +if sys.version_info >= (3, 13): NamedTuple = typing.NamedTuple else: def _make_nmtuple(name, types, module, defaults=()): @@ -2524,8 +2633,11 @@ def __new__(cls, typename, bases, ns): ) nm_tpl.__bases__ = bases if typing.Generic in bases: - class_getitem = typing.Generic.__class_getitem__.__func__ - nm_tpl.__class_getitem__ = classmethod(class_getitem) + if hasattr(typing, '_generic_class_getitem'): # 3.12+ + nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) # update from user namespace without overriding special namedtuple attributes for key in ns: if key in _prohibited_namedtuple_fields: @@ -2536,30 +2648,87 @@ def __new__(cls, typename, bases, ns): nm_tpl.__init_subclass__() return nm_tpl - def NamedTuple(__typename, __fields=None, **kwargs): - if __fields is None: - __fields = kwargs.items() + _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + @_ensure_subclassable(_namedtuple_mro_entries) + def NamedTuple(__typename, __fields=_marker, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if __fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{__typename} = NamedTuple({__typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." + elif __fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{__typename} = NamedTuple({__typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + ) + example + "." elif kwargs: raise TypeError("Either list of fields or keywords" " can be provided to NamedTuple, not both") + if __fields is _marker or __fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + __fields = kwargs.items() nt = _make_nmtuple(__typename, __fields, module=_caller()) nt.__orig_bases__ = (NamedTuple,) return nt - NamedTuple.__doc__ = typing.NamedTuple.__doc__ - _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) - # On 3.8+, alter the signature so that it matches typing.NamedTuple. # The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7, # so just leave the signature as it is on 3.7. if sys.version_info >= (3, 8): - NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)' - - def _namedtuple_mro_entries(bases): - assert NamedTuple in bases - return (_NamedTuple,) - - NamedTuple.__mro_entries__ = _namedtuple_mro_entries + _new_signature = '(typename, fields=None, /, **kwargs)' + if isinstance(NamedTuple, _types.FunctionType): + NamedTuple.__text_signature__ = _new_signature + else: + NamedTuple.__call__.__text_signature__ = _new_signature if hasattr(collections.abc, "Buffer"): @@ -2812,3 +2981,92 @@ def __ror__(self, left): if not _is_unionable(left): return NotImplemented return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + def is_protocol(__tp: type) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(__tp, type) + and getattr(__tp, '_is_protocol', False) + and __tp is not Protocol + and __tp is not getattr(typing, "Protocol", object()) + ) + + def get_protocol_members(__tp: type) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(__tp): + raise TypeError(f'{__tp!r} is not a Protocol') + if hasattr(__tp, '__protocol_attrs__'): + return frozenset(__tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(__tp)) + + +# Aliases for items that have always been in typing. +# Explicitly assign these (rather than using `from typing import *` at the top), +# so that we get a CI error if one of these is deleted from typing.py +# in a future version of Python +AbstractSet = typing.AbstractSet +AnyStr = typing.AnyStr +BinaryIO = typing.BinaryIO +Callable = typing.Callable +Collection = typing.Collection +Container = typing.Container +Dict = typing.Dict +ForwardRef = typing.ForwardRef +FrozenSet = typing.FrozenSet +Generator = typing.Generator +Generic = typing.Generic +Hashable = typing.Hashable +IO = typing.IO +ItemsView = typing.ItemsView +Iterable = typing.Iterable +Iterator = typing.Iterator +KeysView = typing.KeysView +List = typing.List +Mapping = typing.Mapping +MappingView = typing.MappingView +Match = typing.Match +MutableMapping = typing.MutableMapping +MutableSequence = typing.MutableSequence +MutableSet = typing.MutableSet +Optional = typing.Optional +Pattern = typing.Pattern +Reversible = typing.Reversible +Sequence = typing.Sequence +Set = typing.Set +Sized = typing.Sized +TextIO = typing.TextIO +Tuple = typing.Tuple +Union = typing.Union +ValuesView = typing.ValuesView +cast = typing.cast +no_type_check = typing.no_type_check +no_type_check_decorator = typing.no_type_check_decorator diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 7ea9dd4fc09..ecad3657368 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -14,7 +14,7 @@ requests==2.31.0 urllib3==1.26.15 rich==13.4.2 pygments==2.15.1 - typing_extensions==4.6.0 + typing_extensions==4.7.1 resolvelib==1.0.1 setuptools==67.7.2 six==1.16.0 From 4e2295d1091c4a7ba0a6c0ace3b11f49f13ab21d Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sun, 9 Jul 2023 21:33:22 +0100 Subject: [PATCH 578/730] Upgrade setuptools to 68.0.0 --- news/setuptools.vendor.rst | 1 + src/pip/_vendor/pkg_resources/LICENSE | 2 -- src/pip/_vendor/pkg_resources/__init__.py | 17 +++++++++-------- src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) create mode 100644 news/setuptools.vendor.rst diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst new file mode 100644 index 00000000000..1bbb489934c --- /dev/null +++ b/news/setuptools.vendor.rst @@ -0,0 +1 @@ +Upgrade setuptools to 68.0.0 diff --git a/src/pip/_vendor/pkg_resources/LICENSE b/src/pip/_vendor/pkg_resources/LICENSE index 353924be0e5..1bb5a44356f 100644 --- a/src/pip/_vendor/pkg_resources/LICENSE +++ b/src/pip/_vendor/pkg_resources/LICENSE @@ -1,5 +1,3 @@ -Copyright Jason R. Coombs - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the diff --git a/src/pip/_vendor/pkg_resources/__init__.py b/src/pip/_vendor/pkg_resources/__init__.py index 1bf26a94226..ad2794077b0 100644 --- a/src/pip/_vendor/pkg_resources/__init__.py +++ b/src/pip/_vendor/pkg_resources/__init__.py @@ -13,11 +13,8 @@ .zip files and with custom PEP 302 loaders that support the ``get_data()`` method. -This module is deprecated. Users are directed to -`importlib.resources `_ -and -`importlib.metadata `_ -instead. +This module is deprecated. Users are directed to :mod:`importlib.resources`, +:mod:`importlib.metadata` and :pypi:`packaging` instead. """ import sys @@ -118,7 +115,12 @@ _namespace_packages = None -warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning) +warnings.warn( + "pkg_resources is deprecated as an API. " + "See https://setuptools.pypa.io/en/latest/pkg_resources.html", + DeprecationWarning, + stacklevel=2 +) _PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) @@ -1659,10 +1661,9 @@ def _validate_resource_path(path): # for compatibility, warn; in future # raise ValueError(msg) - warnings.warn( + issue_warning( msg[:-1] + " and will raise exceptions in a future release.", DeprecationWarning, - stacklevel=4, ) def _get(self, path): diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index ecad3657368..88cc56c9cb4 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -16,7 +16,7 @@ rich==13.4.2 pygments==2.15.1 typing_extensions==4.7.1 resolvelib==1.0.1 -setuptools==67.7.2 +setuptools==68.0.0 six==1.16.0 tenacity==8.2.2 tomli==2.0.1 From c5c85075a4f3fdd4afe9b58bfebcf43a10722826 Mon Sep 17 00:00:00 2001 From: Joe Bylund Date: Sun, 9 Jul 2023 18:56:34 -0400 Subject: [PATCH 579/730] Update vendored urllib3 from 1.26.15 to 1.26.16 --- news/urllib3.vendor.rst | 1 + src/pip/_vendor/urllib3/_version.py | 2 +- src/pip/_vendor/urllib3/connectionpool.py | 38 ++++- .../packages/backports/weakref_finalize.py | 155 ++++++++++++++++++ src/pip/_vendor/urllib3/poolmanager.py | 2 +- src/pip/_vendor/vendor.txt | 2 +- 6 files changed, 189 insertions(+), 11 deletions(-) create mode 100644 news/urllib3.vendor.rst create mode 100644 src/pip/_vendor/urllib3/packages/backports/weakref_finalize.py diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst new file mode 100644 index 00000000000..b446ff9a6ca --- /dev/null +++ b/news/urllib3.vendor.rst @@ -0,0 +1 @@ +Upgrade urllib3 to 1.26.16 diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py index e12dd0e7853..d69ca314570 100644 --- a/src/pip/_vendor/urllib3/_version.py +++ b/src/pip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.15" +__version__ = "1.26.16" diff --git a/src/pip/_vendor/urllib3/connectionpool.py b/src/pip/_vendor/urllib3/connectionpool.py index c23d736b186..96844d93374 100644 --- a/src/pip/_vendor/urllib3/connectionpool.py +++ b/src/pip/_vendor/urllib3/connectionpool.py @@ -50,6 +50,13 @@ from .util.url import _normalize_host as normalize_host from .util.url import get_host, parse_url +try: # Platform-specific: Python 3 + import weakref + + weakref_finalize = weakref.finalize +except AttributeError: # Platform-specific: Python 2 + from .packages.backports.weakref_finalize import weakref_finalize + xrange = six.moves.xrange log = logging.getLogger(__name__) @@ -220,6 +227,16 @@ def __init__( self.conn_kw["proxy"] = self.proxy self.conn_kw["proxy_config"] = self.proxy_config + # Do not pass 'self' as callback to 'finalize'. + # Then the 'finalize' would keep an endless living (leak) to self. + # By just passing a reference to the pool allows the garbage collector + # to free self if nobody else has a reference to it. + pool = self.pool + + # Close all the HTTPConnections in the pool before the + # HTTPConnectionPool object is garbage collected. + weakref_finalize(self, _close_pool_connections, pool) + def _new_conn(self): """ Return a fresh :class:`HTTPConnection`. @@ -489,14 +506,8 @@ def close(self): # Disable access to the pool old_pool, self.pool = self.pool, None - try: - while True: - conn = old_pool.get(block=False) - if conn: - conn.close() - - except queue.Empty: - pass # Done. + # Close all the HTTPConnections in the pool. + _close_pool_connections(old_pool) def is_same_host(self, url): """ @@ -1108,3 +1119,14 @@ def _normalize_host(host, scheme): if host.startswith("[") and host.endswith("]"): host = host[1:-1] return host + + +def _close_pool_connections(pool): + """Drains a queue of connections and closes each one.""" + try: + while True: + conn = pool.get(block=False) + if conn: + conn.close() + except queue.Empty: + pass # Done. diff --git a/src/pip/_vendor/urllib3/packages/backports/weakref_finalize.py b/src/pip/_vendor/urllib3/packages/backports/weakref_finalize.py new file mode 100644 index 00000000000..a2f2966e549 --- /dev/null +++ b/src/pip/_vendor/urllib3/packages/backports/weakref_finalize.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +""" +backports.weakref_finalize +~~~~~~~~~~~~~~~~~~ + +Backports the Python 3 ``weakref.finalize`` method. +""" +from __future__ import absolute_import + +import itertools +import sys +from weakref import ref + +__all__ = ["weakref_finalize"] + + +class weakref_finalize(object): + """Class for finalization of weakrefable objects + finalize(obj, func, *args, **kwargs) returns a callable finalizer + object which will be called when obj is garbage collected. The + first time the finalizer is called it evaluates func(*arg, **kwargs) + and returns the result. After this the finalizer is dead, and + calling it just returns None. + When the program exits any remaining finalizers for which the + atexit attribute is true will be run in reverse order of creation. + By default atexit is true. + """ + + # Finalizer objects don't have any state of their own. They are + # just used as keys to lookup _Info objects in the registry. This + # ensures that they cannot be part of a ref-cycle. + + __slots__ = () + _registry = {} + _shutdown = False + _index_iter = itertools.count() + _dirty = False + _registered_with_atexit = False + + class _Info(object): + __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index") + + def __init__(self, obj, func, *args, **kwargs): + if not self._registered_with_atexit: + # We may register the exit function more than once because + # of a thread race, but that is harmless + import atexit + + atexit.register(self._exitfunc) + weakref_finalize._registered_with_atexit = True + info = self._Info() + info.weakref = ref(obj, self) + info.func = func + info.args = args + info.kwargs = kwargs or None + info.atexit = True + info.index = next(self._index_iter) + self._registry[self] = info + weakref_finalize._dirty = True + + def __call__(self, _=None): + """If alive then mark as dead and return func(*args, **kwargs); + otherwise return None""" + info = self._registry.pop(self, None) + if info and not self._shutdown: + return info.func(*info.args, **(info.kwargs or {})) + + def detach(self): + """If alive then mark as dead and return (obj, func, args, kwargs); + otherwise return None""" + info = self._registry.get(self) + obj = info and info.weakref() + if obj is not None and self._registry.pop(self, None): + return (obj, info.func, info.args, info.kwargs or {}) + + def peek(self): + """If alive then return (obj, func, args, kwargs); + otherwise return None""" + info = self._registry.get(self) + obj = info and info.weakref() + if obj is not None: + return (obj, info.func, info.args, info.kwargs or {}) + + @property + def alive(self): + """Whether finalizer is alive""" + return self in self._registry + + @property + def atexit(self): + """Whether finalizer should be called at exit""" + info = self._registry.get(self) + return bool(info) and info.atexit + + @atexit.setter + def atexit(self, value): + info = self._registry.get(self) + if info: + info.atexit = bool(value) + + def __repr__(self): + info = self._registry.get(self) + obj = info and info.weakref() + if obj is None: + return "<%s object at %#x; dead>" % (type(self).__name__, id(self)) + else: + return "<%s object at %#x; for %r at %#x>" % ( + type(self).__name__, + id(self), + type(obj).__name__, + id(obj), + ) + + @classmethod + def _select_for_exit(cls): + # Return live finalizers marked for exit, oldest first + L = [(f, i) for (f, i) in cls._registry.items() if i.atexit] + L.sort(key=lambda item: item[1].index) + return [f for (f, i) in L] + + @classmethod + def _exitfunc(cls): + # At shutdown invoke finalizers for which atexit is true. + # This is called once all other non-daemonic threads have been + # joined. + reenable_gc = False + try: + if cls._registry: + import gc + + if gc.isenabled(): + reenable_gc = True + gc.disable() + pending = None + while True: + if pending is None or weakref_finalize._dirty: + pending = cls._select_for_exit() + weakref_finalize._dirty = False + if not pending: + break + f = pending.pop() + try: + # gc is disabled, so (assuming no daemonic + # threads) the following is the only line in + # this function which might trigger creation + # of a new finalizer + f() + except Exception: + sys.excepthook(*sys.exc_info()) + assert f not in cls._registry + finally: + # prevent any more finalizers from executing during shutdown + weakref_finalize._shutdown = True + if reenable_gc: + gc.enable() diff --git a/src/pip/_vendor/urllib3/poolmanager.py b/src/pip/_vendor/urllib3/poolmanager.py index ca4ec341184..14b10daf3a9 100644 --- a/src/pip/_vendor/urllib3/poolmanager.py +++ b/src/pip/_vendor/urllib3/poolmanager.py @@ -171,7 +171,7 @@ class PoolManager(RequestMethods): def __init__(self, num_pools=10, headers=None, **connection_pool_kw): RequestMethods.__init__(self, headers) self.connection_pool_kw = connection_pool_kw - self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) + self.pools = RecentlyUsedContainer(num_pools) # Locally set the pool classes and keys so other PoolManagers can # override them. diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 88cc56c9cb4..4ab2915fb80 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -11,7 +11,7 @@ requests==2.31.0 certifi==2023.5.7 chardet==5.1.0 idna==3.4 - urllib3==1.26.15 + urllib3==1.26.16 rich==13.4.2 pygments==2.15.1 typing_extensions==4.7.1 From a40a24d8674cb4d5076b8df36c1530f43d80410d Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Mon, 10 Jul 2023 16:58:37 +0800 Subject: [PATCH 580/730] Use -dev instead of pinning to beta.3 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c9a2ff6659a..6cb7f1f7e8e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -110,7 +110,7 @@ jobs: - "3.10" - "3.11" - key: "3.12" - full: "3.12.0-beta.3" + full: "3.12.0-dev" steps: - uses: actions/checkout@v3 From 25f4e6eabf8fb8f10ea10e4bd9c542ed30cbba5e Mon Sep 17 00:00:00 2001 From: Greg Roodt Date: Mon, 10 Jul 2023 21:59:42 +1000 Subject: [PATCH 581/730] Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree --- src/pip/_internal/utils/misc.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index bfed8270252..8afd85d9bed 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -127,10 +127,13 @@ def get_prog() -> str: # Tenacity raises RetryError by default, explicitly raise the original exception @retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) def rmtree(dir: str, ignore_errors: bool = False) -> None: - shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) + if sys.version_info >= (3, 12): + shutil.rmtree(dir, ignore_errors=ignore_errors, onexc=rmtree_errorhandler) + else: + shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) -def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: ExcInfo) -> None: +def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: Union[ExcInfo, BaseException]) -> None: """On Windows, the files in .svn are read-only, so when rmtree() tries to remove them, an exception is thrown. We catch that here, remove the read-only attribute, and hopefully continue without problems.""" From 9b47bc0fea396caf0cc6a89a404c039c6a44ac40 Mon Sep 17 00:00:00 2001 From: Greg Roodt Date: Mon, 10 Jul 2023 22:04:46 +1000 Subject: [PATCH 582/730] Make black formatter happy --- src/pip/_internal/utils/misc.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 8afd85d9bed..afcf1709e80 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -133,7 +133,9 @@ def rmtree(dir: str, ignore_errors: bool = False) -> None: shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) -def rmtree_errorhandler(func: Callable[..., Any], path: str, exc_info: Union[ExcInfo, BaseException]) -> None: +def rmtree_errorhandler( + func: Callable[..., Any], path: str, exc_info: Union[ExcInfo, BaseException] +) -> None: """On Windows, the files in .svn are read-only, so when rmtree() tries to remove them, an exception is thrown. We catch that here, remove the read-only attribute, and hopefully continue without problems.""" From 41506d7bbf25009f0de06218744082ca4299f666 Mon Sep 17 00:00:00 2001 From: Greg Roodt Date: Tue, 11 Jul 2023 12:11:47 +1000 Subject: [PATCH 583/730] Add news entry --- news/11957.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11957.bugfix.rst diff --git a/news/11957.bugfix.rst b/news/11957.bugfix.rst new file mode 100644 index 00000000000..96d56ef800f --- /dev/null +++ b/news/11957.bugfix.rst @@ -0,0 +1 @@ +Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree \ No newline at end of file From 2c4947d51a002cd0ba4b01ec7682d86f297e6d37 Mon Sep 17 00:00:00 2001 From: Greg Roodt Date: Tue, 11 Jul 2023 12:14:53 +1000 Subject: [PATCH 584/730] Add news entry --- news/11957.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/11957.bugfix.rst b/news/11957.bugfix.rst index 96d56ef800f..8d72e1733a0 100644 --- a/news/11957.bugfix.rst +++ b/news/11957.bugfix.rst @@ -1 +1 @@ -Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree \ No newline at end of file +Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree From 0cabefbce800b6bde91f869e83dc48bd0ea4aa64 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 12 Jul 2023 02:54:10 +0800 Subject: [PATCH 585/730] Ensure requirements are prepared before get_dist() The legacy resolver seems to prepare requirements differently that can leave them half-done in certain code paths. I'm not quite sure how it's doing things, but at least we can do this to make sure they do things right. --- src/pip/_internal/commands/download.py | 5 ++++- src/pip/_internal/commands/wheel.py | 4 +++- src/pip/_internal/resolution/resolvelib/resolver.py | 3 +++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/commands/download.py b/src/pip/_internal/commands/download.py index 63bd53a50c8..54247a78a65 100644 --- a/src/pip/_internal/commands/download.py +++ b/src/pip/_internal/commands/download.py @@ -130,7 +130,6 @@ def run(self, options: Values, args: List[str]) -> int: self.trace_basic_info(finder) requirement_set = resolver.resolve(reqs, check_supported_wheels=True) - requirement_set.warn_legacy_versions_and_specifiers() downloaded: List[str] = [] for req in requirement_set.requirements.values(): @@ -138,6 +137,10 @@ def run(self, options: Values, args: List[str]) -> int: assert req.name is not None preparer.save_linked_requirement(req) downloaded.append(req.name) + + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + requirement_set.warn_legacy_versions_and_specifiers() + if downloaded: write_output("Successfully downloaded %s", " ".join(downloaded)) diff --git a/src/pip/_internal/commands/wheel.py b/src/pip/_internal/commands/wheel.py index e6735bd8da7..ed578aa2500 100644 --- a/src/pip/_internal/commands/wheel.py +++ b/src/pip/_internal/commands/wheel.py @@ -145,7 +145,6 @@ def run(self, options: Values, args: List[str]) -> int: self.trace_basic_info(finder) requirement_set = resolver.resolve(reqs, check_supported_wheels=True) - requirement_set.warn_legacy_versions_and_specifiers() reqs_to_build: List[InstallRequirement] = [] for req in requirement_set.requirements.values(): @@ -154,6 +153,9 @@ def run(self, options: Values, args: List[str]) -> int: elif should_build_for_wheel_command(req): reqs_to_build.append(req) + preparer.prepare_linked_requirements_more(requirement_set.requirements.values()) + requirement_set.warn_legacy_versions_and_specifiers() + # build wheels build_successes, build_failures = build( reqs_to_build, diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index 47bbfecce36..d5b238608b2 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -159,6 +159,9 @@ def resolve( reqs = req_set.all_requirements self.factory.preparer.prepare_linked_requirements_more(reqs) + for req in reqs: + req.prepared = True + req.needs_more_preparation = False return req_set def get_installation_order( From 8aa17580ed623d926795e0cfb8885b4a4b4e044e Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 13 Jul 2023 14:57:18 +0200 Subject: [PATCH 586/730] dropped unused attribute --- src/pip/_internal/resolution/resolvelib/requirements.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index 31a515da9ac..e23b948ffc2 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -52,7 +52,6 @@ def __init__( making this a requirement on the base only. """ assert ireq.link is None, "This is a link, not a specifier" - self._drop_extras: bool = drop_extras self._ireq = ireq if not drop_extras else install_req_drop_extras(ireq) self._extras = frozenset(self._ireq.extras) From faa3289a94c59cdf647ce9d9c9277714c9363a62 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 13 Jul 2023 16:40:56 +0200 Subject: [PATCH 587/730] use regex for requirement update --- src/pip/_internal/req/constructors.py | 28 +++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 8b1438afe1e..ee38b9a6183 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -58,6 +58,26 @@ def convert_extras(extras: Optional[str]) -> Set[str]: return get_requirement("placeholder" + extras.lower()).extras +def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement: + """ + Returns a new requirement based on the given one, with the supplied extras. If the + given requirement already has extras those are replaced (or dropped if no new extras + are given). + """ + match: re.Match = re.fullmatch(r"([^;\[<>~=]+)(\[[^\]]*\])?(.*)", str(req)) + # ireq.req is a valid requirement so the regex should match + assert match is not None + pre: Optional[str] = match.group(1) + post: Optional[str] = match.group(3) + assert pre is not None and post is not None + extras: str = ( + "[%s]" % ",".join(sorted(new_extras)) + if new_extras + else "" + ) + return Requirement(pre + extras + post) + + def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: """Parses an editable requirement into: - a requirement name @@ -513,10 +533,8 @@ def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: any extras. Sets the original requirement as the new one's parent (comes_from). """ - req = Requirement(str(ireq.req)) - req.extras = {} return InstallRequirement( - req=req, + req=_set_requirement_extras(ireq.req, set()), comes_from=ireq, editable=ireq.editable, link=ireq.link, @@ -542,8 +560,6 @@ def install_req_extend_extras( Makes a shallow copy of the ireq object. """ result = copy.copy(ireq) - req = Requirement(str(ireq.req)) - req.extras.update(extras) - result.req = req result.extras = {*ireq.extras, *extras} + result.req = _set_requirement_extras(ireq.req, result.extras) return result From 7e8da6176f9da32e44b8a1515e450ca8158a356a Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 13 Jul 2023 17:02:53 +0200 Subject: [PATCH 588/730] clarification --- src/pip/_internal/req/constructors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index ee38b9a6183..f97bded9887 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -65,7 +65,7 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme are given). """ match: re.Match = re.fullmatch(r"([^;\[<>~=]+)(\[[^\]]*\])?(.*)", str(req)) - # ireq.req is a valid requirement so the regex should match + # ireq.req is a valid requirement so the regex should always match assert match is not None pre: Optional[str] = match.group(1) post: Optional[str] = match.group(3) From ae23f967efedf33e3da148612657064b3b5a0695 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Jul 2023 10:04:59 +0100 Subject: [PATCH 589/730] Update AUTHORS.txt --- AUTHORS.txt | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/AUTHORS.txt b/AUTHORS.txt index f6ece21ce58..299459fa959 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -71,6 +71,7 @@ atse Atsushi Odagiri Avinash Karhana Avner Cohen +Awit (Ah-Wit) Ghirmai Baptiste Mispelon Barney Gale barneygale @@ -126,6 +127,7 @@ Chih-Hsuan Yen Chris Brinker Chris Hunt Chris Jerdonek +Chris Kuehl Chris McDonough Chris Pawley Chris Pryer @@ -330,6 +332,8 @@ Jarek Potiuk jarondl Jason Curtis Jason R. Coombs +JasonMo +JasonMo1 Jay Graves Jean-Christophe Fillion-Robin Jeff Barber @@ -344,6 +348,7 @@ Jim Fisher Jim Garrison Jiun Bae Jivan Amara +Joe Bylund Joe Michelini John Paton John T. Wodder II @@ -473,7 +478,7 @@ Miro Hrončok Monica Baluna montefra Monty Taylor -Muha Ajjan +Muha Ajjan‮ Nadav Wexler Nahuel Ambrosini Nate Coraor @@ -582,6 +587,7 @@ Rishi RobberPhex Robert Collins Robert McGibbon +Robert Pollak Robert T. McGibbon robin elisha robinson Roey Berman @@ -614,6 +620,7 @@ SeongSoo Cho Sergey Vasilyev Seth Michael Larson Seth Woodworth +Shantanu shireenrao Shivansh-007 Shlomi Fish @@ -638,6 +645,7 @@ Steve Barnes Steve Dower Steve Kowalik Steven Myint +Steven Silvester stonebig Stéphane Bidoul Stéphane Bidoul (ACSONE) @@ -707,6 +715,7 @@ Wilson Mo wim glenn Winson Luk Wolfgang Maier +Wu Zhenyu XAMES3 Xavier Fernandez xoviat @@ -725,4 +734,4 @@ Zvezdan Petkovic Łukasz Langa Роман Донченко Семён Марьясин -Martin Häcker +‮rekcäH nitraM‮ From a3c2c43c5309ff219674b1d73a6dbf491a727a5e Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Jul 2023 10:05:07 +0100 Subject: [PATCH 590/730] Bump for release --- NEWS.rst | 54 +++++++++++++++++++ news/11416.bugfix.rst | 1 - news/11847.bugfix.rst | 1 - news/11920.bugfix.rst | 1 - news/11957.bugfix.rst | 1 - news/11985.bugfix.rst | 1 - news/11996.process.rst | 1 - news/12038.bugfix.rst | 1 - news/12040.feature.rst | 1 - news/12042.bugfix.rst | 1 - news/12063.removal.rst | 2 - news/12067.bugfix.rst | 1 - news/12079.bugfix.rst | 1 - news/12119.bugfix.rst | 3 -- ...d6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst | 0 news/4256.removal.rst | 4 -- ...8B-09FA-49BC-A886-6F5D8885BC14.trivial.rst | 0 news/certifi.vendor.rst | 1 - news/no-issue.trivial.rst | 2 - news/platformdirs.vendor.rst | 1 - news/pygments.vendor.rst | 1 - news/pyparsing.vendor.rst | 1 - news/requests.vendor.rst | 1 - news/rich.vendor.rst | 1 - news/setuptools.vendor.rst | 1 - news/typing-extensions.vendor.rst | 1 - news/typing_extensions.vendor.rst | 1 - news/urllib3.vendor.rst | 1 - src/pip/__init__.py | 2 +- 29 files changed, 55 insertions(+), 33 deletions(-) delete mode 100644 news/11416.bugfix.rst delete mode 100644 news/11847.bugfix.rst delete mode 100644 news/11920.bugfix.rst delete mode 100644 news/11957.bugfix.rst delete mode 100644 news/11985.bugfix.rst delete mode 100644 news/11996.process.rst delete mode 100644 news/12038.bugfix.rst delete mode 100644 news/12040.feature.rst delete mode 100644 news/12042.bugfix.rst delete mode 100644 news/12063.removal.rst delete mode 100644 news/12067.bugfix.rst delete mode 100644 news/12079.bugfix.rst delete mode 100644 news/12119.bugfix.rst delete mode 100644 news/3297dfd6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst delete mode 100644 news/4256.removal.rst delete mode 100644 news/5C12428B-09FA-49BC-A886-6F5D8885BC14.trivial.rst delete mode 100644 news/certifi.vendor.rst delete mode 100644 news/no-issue.trivial.rst delete mode 100644 news/platformdirs.vendor.rst delete mode 100644 news/pygments.vendor.rst delete mode 100644 news/pyparsing.vendor.rst delete mode 100644 news/requests.vendor.rst delete mode 100644 news/rich.vendor.rst delete mode 100644 news/setuptools.vendor.rst delete mode 100644 news/typing-extensions.vendor.rst delete mode 100644 news/typing_extensions.vendor.rst delete mode 100644 news/urllib3.vendor.rst diff --git a/NEWS.rst b/NEWS.rst index f24aaaa4094..6fa4f94ab1a 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,60 @@ .. towncrier release notes start +23.2 (2023-07-15) +================= + +Process +------- + +- Deprecate support for eggs for Python 3.11 or later, when the new ``importlib.metadata`` backend is used to load distribution metadata. This only affects the egg *distribution format* (with the ``.egg`` extension); distributions using the ``.egg-info`` *metadata format* (but are not actually eggs) are not affected. For more information about eggs, see `relevant section in the setuptools documentation `__. + +Deprecations and Removals +------------------------- + +- Deprecate legacy version and version specifiers that don't conform to `PEP 440 + `_ (`#12063 `_) +- ``freeze`` no longer excludes the ``setuptools``, ``distribute``, and ``wheel`` + from the output when running on Python 3.12 or later, where they are not + included in a virtual environment by default. Use ``--exclude`` if you wish to + exclude any of these packages. (`#4256 `_) + +Features +-------- + +- make rejection messages slightly different between 1 and 8, so the user can make the difference. (`#12040 `_) + +Bug Fixes +--------- + +- Fix ``pip completion --zsh``. (`#11416 `_) +- Prevent downloading files twice when PEP 658 metadata is present (`#11847 `_) +- Add permission check before configuration (`#11920 `_) +- Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 `_) +- Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. (`#11985 `_) +- Fix installation of packages with PEP658 metadata using non-canonicalized names (`#12038 `_) +- Correctly parse ``dist-info-metadata`` values from JSON-format index data. (`#12042 `_) +- Fail with an error if the ``--python`` option is specified after the subcommand name. (`#12067 `_) +- Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. (`#12079 `_) +- Pass the ``-r`` flag to mercurial to be explicit that a revision is passed and protect + against ``hg`` options injection as part of VCS URLs. Users that do not have control on + VCS URLs passed to pip are advised to upgrade. (`#12119 `_) + +Vendored Libraries +------------------ + +- Upgrade certifi to 2023.5.7 +- Upgrade platformdirs to 3.8.1 +- Upgrade pygments to 2.15.1 +- Upgrade pyparsing to 3.1.0 +- Upgrade Requests to 2.31.0 +- Upgrade rich to 13.4.2 +- Upgrade setuptools to 68.0.0 +- Updated typing_extensions to 4.6.0 +- Upgrade typing_extensions to 4.7.1 +- Upgrade urllib3 to 1.26.16 + + 23.1.2 (2023-04-26) =================== diff --git a/news/11416.bugfix.rst b/news/11416.bugfix.rst deleted file mode 100644 index 3815b2da864..00000000000 --- a/news/11416.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``pip completion --zsh``. diff --git a/news/11847.bugfix.rst b/news/11847.bugfix.rst deleted file mode 100644 index 1cad477eaa2..00000000000 --- a/news/11847.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Prevent downloading files twice when PEP 658 metadata is present diff --git a/news/11920.bugfix.rst b/news/11920.bugfix.rst deleted file mode 100644 index d8e22ee9bd7..00000000000 --- a/news/11920.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Add permission check before configuration diff --git a/news/11957.bugfix.rst b/news/11957.bugfix.rst deleted file mode 100644 index 8d72e1733a0..00000000000 --- a/news/11957.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree diff --git a/news/11985.bugfix.rst b/news/11985.bugfix.rst deleted file mode 100644 index 66c8e878681..00000000000 --- a/news/11985.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Ignore invalid or unreadable ``origin.json`` files in the cache of locally built wheels. diff --git a/news/11996.process.rst b/news/11996.process.rst deleted file mode 100644 index d585bd39183..00000000000 --- a/news/11996.process.rst +++ /dev/null @@ -1 +0,0 @@ -Deprecate support for eggs for Python 3.11 or later, when the new ``importlib.metadata`` backend is used to load distribution metadata. This only affects the egg *distribution format* (with the ``.egg`` extension); distributions using the ``.egg-info`` *metadata format* (but are not actually eggs) are not affected. For more information about eggs, see `relevant section in the setuptools documentation `__. diff --git a/news/12038.bugfix.rst b/news/12038.bugfix.rst deleted file mode 100644 index 184d88dd826..00000000000 --- a/news/12038.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix installation of packages with PEP658 metadata using non-canonicalized names diff --git a/news/12040.feature.rst b/news/12040.feature.rst deleted file mode 100644 index beff856f9c7..00000000000 --- a/news/12040.feature.rst +++ /dev/null @@ -1 +0,0 @@ -make rejection messages slightly different between 1 and 8, so the user can make the difference. diff --git a/news/12042.bugfix.rst b/news/12042.bugfix.rst deleted file mode 100644 index 34d97743540..00000000000 --- a/news/12042.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Correctly parse ``dist-info-metadata`` values from JSON-format index data. diff --git a/news/12063.removal.rst b/news/12063.removal.rst deleted file mode 100644 index 037b0c6089a..00000000000 --- a/news/12063.removal.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate legacy version and version specifiers that don't conform to `PEP 440 -`_ diff --git a/news/12067.bugfix.rst b/news/12067.bugfix.rst deleted file mode 100644 index 87d76bc2b06..00000000000 --- a/news/12067.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fail with an error if the ``--python`` option is specified after the subcommand name. diff --git a/news/12079.bugfix.rst b/news/12079.bugfix.rst deleted file mode 100644 index 5ee05026808..00000000000 --- a/news/12079.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix slowness when using ``importlib.metadata`` (the default way for pip to read metadata in Python 3.11+) and there is a large overlap between already installed and to-be-installed packages. diff --git a/news/12119.bugfix.rst b/news/12119.bugfix.rst deleted file mode 100644 index da8d8b04dcd..00000000000 --- a/news/12119.bugfix.rst +++ /dev/null @@ -1,3 +0,0 @@ -Pass the ``-r`` flag to mercurial to be explicit that a revision is passed and protect -against ``hg`` options injection as part of VCS URLs. Users that do not have control on -VCS URLs passed to pip are advised to upgrade. diff --git a/news/3297dfd6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst b/news/3297dfd6-b078-4452-97a1-7d2c1ab41ca1.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/4256.removal.rst b/news/4256.removal.rst deleted file mode 100644 index eb89898501b..00000000000 --- a/news/4256.removal.rst +++ /dev/null @@ -1,4 +0,0 @@ -``freeze`` no longer excludes the ``setuptools``, ``distribute``, and ``wheel`` -from the output when running on Python 3.12 or later, where they are not -included in a virtual environment by default. Use ``--exclude`` if you wish to -exclude any of these packages. diff --git a/news/5C12428B-09FA-49BC-A886-6F5D8885BC14.trivial.rst b/news/5C12428B-09FA-49BC-A886-6F5D8885BC14.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst deleted file mode 100644 index 68018f44a54..00000000000 --- a/news/certifi.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade certifi to 2023.5.7 diff --git a/news/no-issue.trivial.rst b/news/no-issue.trivial.rst deleted file mode 100644 index 6440f668716..00000000000 --- a/news/no-issue.trivial.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added seperate instructions for installing ``nox`` in the ``docs/development/getting-started.rst`` doc. and slight update -to the below ``Running pip From Source Tree`` section. diff --git a/news/platformdirs.vendor.rst b/news/platformdirs.vendor.rst deleted file mode 100644 index f396d84a666..00000000000 --- a/news/platformdirs.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade platformdirs to 3.8.1 diff --git a/news/pygments.vendor.rst b/news/pygments.vendor.rst deleted file mode 100644 index b8aa9e56358..00000000000 --- a/news/pygments.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade pygments to 2.15.1 diff --git a/news/pyparsing.vendor.rst b/news/pyparsing.vendor.rst deleted file mode 100644 index 9feffb2460f..00000000000 --- a/news/pyparsing.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade pyparsing to 3.1.0 diff --git a/news/requests.vendor.rst b/news/requests.vendor.rst deleted file mode 100644 index cf10d8cbb85..00000000000 --- a/news/requests.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade Requests to 2.31.0 diff --git a/news/rich.vendor.rst b/news/rich.vendor.rst deleted file mode 100644 index 9f44d8bdfcd..00000000000 --- a/news/rich.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade rich to 13.4.2 diff --git a/news/setuptools.vendor.rst b/news/setuptools.vendor.rst deleted file mode 100644 index 1bbb489934c..00000000000 --- a/news/setuptools.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade setuptools to 68.0.0 diff --git a/news/typing-extensions.vendor.rst b/news/typing-extensions.vendor.rst deleted file mode 100644 index 6c95fc1b8d7..00000000000 --- a/news/typing-extensions.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Updated typing_extensions to 4.6.0 diff --git a/news/typing_extensions.vendor.rst b/news/typing_extensions.vendor.rst deleted file mode 100644 index 6a18f7f2ff0..00000000000 --- a/news/typing_extensions.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade typing_extensions to 4.7.1 diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst deleted file mode 100644 index b446ff9a6ca..00000000000 --- a/news/urllib3.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade urllib3 to 1.26.16 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 20d8bf56c17..696541ddfb5 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2.dev0" +__version__ = "23.2" def main(args: Optional[List[str]] = None) -> int: From b6a2670599ded25ffcebc33b5c8b583ccef87f27 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 15 Jul 2023 10:05:07 +0100 Subject: [PATCH 591/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 696541ddfb5..00ce8ad456d 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2" +__version__ = "23.3.dev0" def main(args: Optional[List[str]] = None) -> int: From 5d0a464afc80cd155bd29678fe3461d354c904d8 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Sat, 15 Jul 2023 19:14:24 +0800 Subject: [PATCH 592/730] Remove unneeded .0 Co-authored-by: Pradyun Gedam --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6cb7f1f7e8e..50ec976afce 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -110,7 +110,7 @@ jobs: - "3.10" - "3.11" - key: "3.12" - full: "3.12.0-dev" + full: "3.12-dev" steps: - uses: actions/checkout@v3 From 26814251c04f459dce8e9502aa42eebdb125ee20 Mon Sep 17 00:00:00 2001 From: Matthew Treinish Date: Sat, 15 Jul 2023 17:50:50 -0400 Subject: [PATCH 593/730] Correct typo in 23.2 Changelog Bug Fixes The first entry in the bug fixes section for the 23.2 release's changelog incorrectly listed the the PR/Issue for "Fix pip completion --zsh" as #11416. However, the PR that fixed this issue was actually #11417. This commit fixes this oversight/off by one error so that the changelog points to the correct PR. --- NEWS.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEWS.rst b/NEWS.rst index 6fa4f94ab1a..b39f561bdf7 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -35,7 +35,7 @@ Features Bug Fixes --------- -- Fix ``pip completion --zsh``. (`#11416 `_) +- Fix ``pip completion --zsh``. (`#11417 `_) - Prevent downloading files twice when PEP 658 metadata is present (`#11847 `_) - Add permission check before configuration (`#11920 `_) - Fix deprecation warnings in Python 3.12 for usage of shutil.rmtree (`#11957 `_) From 593b85f4abd30688648436bb9baca3b8f7b32b51 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 17 Jul 2023 02:40:25 -0700 Subject: [PATCH 594/730] Use strict optional checking in misc.py (#11382) --- ...98-7F32-4890-97C1-7403A685733D.trivial.rst | 0 src/pip/_internal/network/session.py | 10 ++++++---- src/pip/_internal/utils/misc.py | 20 +++++++++---------- 3 files changed, 16 insertions(+), 14 deletions(-) create mode 100644 news/14514698-7F32-4890-97C1-7403A685733D.trivial.rst diff --git a/news/14514698-7F32-4890-97C1-7403A685733D.trivial.rst b/news/14514698-7F32-4890-97C1-7403A685733D.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_internal/network/session.py b/src/pip/_internal/network/session.py index 6c40ade1595..887dc14e796 100644 --- a/src/pip/_internal/network/session.py +++ b/src/pip/_internal/network/session.py @@ -419,15 +419,17 @@ def add_trusted_host( msg += f" (from {source})" logger.info(msg) - host_port = parse_netloc(host) - if host_port not in self.pip_trusted_origins: - self.pip_trusted_origins.append(host_port) + parsed_host, parsed_port = parse_netloc(host) + if parsed_host is None: + raise ValueError(f"Trusted host URL must include a host part: {host!r}") + if (parsed_host, parsed_port) not in self.pip_trusted_origins: + self.pip_trusted_origins.append((parsed_host, parsed_port)) self.mount( build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter ) self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter) - if not host_port[1]: + if not parsed_port: self.mount( build_url_from_netloc(host, scheme="http") + ":", self._trusted_host_adapter, diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index afcf1709e80..bd191c4e14f 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import contextlib import errno import getpass @@ -344,17 +341,18 @@ def write_output(msg: Any, *args: Any) -> None: class StreamWrapper(StringIO): - orig_stream: TextIO = None + orig_stream: TextIO @classmethod def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper": - cls.orig_stream = orig_stream - return cls() + ret = cls() + ret.orig_stream = orig_stream + return ret # compileall.compile_dir() needs stdout.encoding to print to stdout - # https://github.com/python/mypy/issues/4125 + # type ignore is because TextIOBase.encoding is writeable @property - def encoding(self): # type: ignore + def encoding(self) -> str: # type: ignore return self.orig_stream.encoding @@ -422,7 +420,7 @@ def build_url_from_netloc(netloc: str, scheme: str = "https") -> str: return f"{scheme}://{netloc}" -def parse_netloc(netloc: str) -> Tuple[str, Optional[int]]: +def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]: """ Return the host-port pair from a netloc. """ @@ -510,7 +508,9 @@ def _redact_netloc(netloc: str) -> Tuple[str]: return (redact_netloc(netloc),) -def split_auth_netloc_from_url(url: str) -> Tuple[str, str, Tuple[str, str]]: +def split_auth_netloc_from_url( + url: str, +) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]: """ Parse a url into separate netloc, auth, and url with no auth. From 660dafb37f7ea3775230fccd4d483f73b8769560 Mon Sep 17 00:00:00 2001 From: Ales Erjavec Date: Mon, 9 Dec 2019 16:59:36 +0100 Subject: [PATCH 595/730] Ignore errors in temporary directory cleanup pip should not exit with an error when it fails to cleanup temporary files after it has already successfully installed packages. --- src/pip/_internal/utils/misc.py | 40 +++++++++++++++++++++++------ src/pip/_internal/utils/temp_dir.py | 34 ++++++++++++++++++++++-- tests/unit/test_utils.py | 10 +++++--- tests/unit/test_utils_temp_dir.py | 23 +++++++++++++++++ 4 files changed, 94 insertions(+), 13 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index bd191c4e14f..f366aa4053b 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -11,6 +11,7 @@ import sys import sysconfig import urllib.parse +from functools import partial from io import StringIO from itertools import filterfalse, tee, zip_longest from types import TracebackType @@ -123,15 +124,35 @@ def get_prog() -> str: # Retry every half second for up to 3 seconds # Tenacity raises RetryError by default, explicitly raise the original exception @retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) -def rmtree(dir: str, ignore_errors: bool = False) -> None: +def rmtree( + dir: str, + ignore_errors: bool = False, + onexc: Optional[Callable[[Any, Any, Any], Any]] = None, +) -> None: + if ignore_errors: + onexc = _onerror_ignore + elif onexc is None: + onexc = _onerror_reraise if sys.version_info >= (3, 12): - shutil.rmtree(dir, ignore_errors=ignore_errors, onexc=rmtree_errorhandler) + shutil.rmtree(dir, onexc=partial(rmtree_errorhandler, onexc=onexc)) else: - shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) + shutil.rmtree(dir, onerror=partial(rmtree_errorhandler, onexc=onexc)) + + +def _onerror_ignore(*_args: Any) -> None: + pass + + +def _onerror_reraise(*_args: Any) -> None: + raise def rmtree_errorhandler( - func: Callable[..., Any], path: str, exc_info: Union[ExcInfo, BaseException] + func: Callable[..., Any], + path: str, + exc_info: Union[ExcInfo, BaseException], + *, + onexc: Callable[..., Any] = _onerror_reraise, ) -> None: """On Windows, the files in .svn are read-only, so when rmtree() tries to remove them, an exception is thrown. We catch that here, remove the @@ -146,10 +167,13 @@ def rmtree_errorhandler( # convert to read/write os.chmod(path, stat.S_IWRITE) # use the original function to repeat the operation - func(path) - return - else: - raise + try: + func(path) + return + except OSError: + pass + + onexc(func, path, exc_info) def display_path(path: str) -> str: diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index 8ee8a1cb180..7d3960734cf 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -3,8 +3,9 @@ import logging import os.path import tempfile +import traceback from contextlib import ExitStack, contextmanager -from typing import Any, Dict, Generator, Optional, TypeVar, Union +from typing import Any, Callable, Dict, Generator, Optional, Tuple, Type, TypeVar, Union from pip._internal.utils.misc import enum, rmtree @@ -106,6 +107,7 @@ def __init__( delete: Union[bool, None, _Default] = _default, kind: str = "temp", globally_managed: bool = False, + ignore_cleanup_errors: bool = True, ): super().__init__() @@ -128,6 +130,7 @@ def __init__( self._deleted = False self.delete = delete self.kind = kind + self.ignore_cleanup_errors = ignore_cleanup_errors if globally_managed: assert _tempdir_manager is not None @@ -170,7 +173,34 @@ def cleanup(self) -> None: self._deleted = True if not os.path.exists(self._path): return - rmtree(self._path) + + def onerror( + func: Callable[[str], Any], + path: str, + exc_info: Tuple[Type[BaseException], BaseException, Any], + ) -> None: + """Log a warning for a `rmtree` error and continue""" + exc_val = "\n".join(traceback.format_exception_only(*exc_info[:2])) + exc_val = exc_val.rstrip() # remove trailing new line + if func in (os.unlink, os.remove, os.rmdir): + logging.warning( + "Failed to remove a temporary file '%s' due to %s.\n" + "You can safely remove it manually.", + path, + exc_val, + ) + else: + logging.warning("%s failed with %s.", func.__qualname__, exc_val) + + if self.ignore_cleanup_errors: + try: + # first try with tenacity; retrying to handle ephemeral errors + rmtree(self._path, ignore_errors=False) + except OSError: + # last pass ignore/log all errors + rmtree(self._path, onexc=onerror) + else: + rmtree(self._path) class AdjacentTempDirectory(TempDirectory): diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 450081cfd03..d3b0d32d12f 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -257,9 +257,13 @@ def test_rmtree_errorhandler_reraises_error(tmpdir: Path) -> None: except RuntimeError: # Make sure the handler reraises an exception with pytest.raises(RuntimeError, match="test message"): - # Argument 3 to "rmtree_errorhandler" has incompatible type "None"; expected - # "Tuple[Type[BaseException], BaseException, TracebackType]" - rmtree_errorhandler(mock_func, path, None) # type: ignore[arg-type] + # Argument 3 to "rmtree_errorhandler" has incompatible type + # "Union[Tuple[Type[BaseException], BaseException, TracebackType], + # Tuple[None, None, None]]"; expected "Tuple[Type[BaseException], + # BaseException, TracebackType]" + rmtree_errorhandler( + mock_func, path, sys.exc_info() # type: ignore[arg-type] + ) mock_func.assert_not_called() diff --git a/tests/unit/test_utils_temp_dir.py b/tests/unit/test_utils_temp_dir.py index 4a656d23ace..a6cd0d0e5af 100644 --- a/tests/unit/test_utils_temp_dir.py +++ b/tests/unit/test_utils_temp_dir.py @@ -4,6 +4,7 @@ import tempfile from pathlib import Path from typing import Any, Iterator, Optional, Union +from unittest import mock import pytest @@ -274,3 +275,25 @@ def test_tempdir_registry_lazy(should_delete: bool) -> None: registry.set_delete("test-for-lazy", should_delete) assert os.path.exists(path) assert os.path.exists(path) == (not should_delete) + + +def test_tempdir_cleanup_ignore_errors() -> None: + os_unlink = os.unlink + + # mock os.unlink to fail with EACCES for a specific filename to simulate + # how removing a loaded exe/dll behaves. + def unlink(name: str, *args: Any, **kwargs: Any) -> None: + if "bomb" in name: + raise PermissionError(name) + else: + os_unlink(name) + + with mock.patch("os.unlink", unlink): + with TempDirectory(ignore_cleanup_errors=True) as tmp_dir: + path = tmp_dir.path + with open(os.path.join(path, "bomb"), "a"): + pass + + filename = os.path.join(path, "bomb") + assert os.path.isfile(filename) + os.unlink(filename) From 4ff65abdf7a0f22ab0200993342168bf96aff8b9 Mon Sep 17 00:00:00 2001 From: Ales Erjavec Date: Fri, 19 Aug 2022 14:25:59 +0200 Subject: [PATCH 596/730] Fix 'force' remove file without write permissions Preserve existing mode flags, handle case where we even lack permission to change the mode. --- src/pip/_internal/utils/misc.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index f366aa4053b..b7b32f0f8cf 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -154,24 +154,34 @@ def rmtree_errorhandler( *, onexc: Callable[..., Any] = _onerror_reraise, ) -> None: - """On Windows, the files in .svn are read-only, so when rmtree() tries to - remove them, an exception is thrown. We catch that here, remove the - read-only attribute, and hopefully continue without problems.""" + """ + `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`). + + * If a file is readonly then it's write flag is set and operation is + retried. + + * `onerror` is the original callback from `rmtree(... onerror=onerror)` + that is chained at the end if the "rm -f" still fails. + """ try: - has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + st_mode = os.stat(path).st_mode except OSError: # it's equivalent to os.path.exists return - if has_attr_readonly: + if not st_mode & stat.S_IWRITE: # convert to read/write - os.chmod(path, stat.S_IWRITE) - # use the original function to repeat the operation try: - func(path) - return + os.chmod(path, st_mode | stat.S_IWRITE) except OSError: pass + else: + # use the original function to repeat the operation + try: + func(path) + return + except OSError: + pass onexc(func, path, exc_info) From dddf4f829308536211107c691c83e75c1f9bacf5 Mon Sep 17 00:00:00 2001 From: Ales Erjavec Date: Mon, 22 Aug 2022 12:37:23 +0200 Subject: [PATCH 597/730] Add a news entry --- news/11394.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11394.bugfix.rst diff --git a/news/11394.bugfix.rst b/news/11394.bugfix.rst new file mode 100644 index 00000000000..9f2501db46c --- /dev/null +++ b/news/11394.bugfix.rst @@ -0,0 +1 @@ +Ignore errors in temporary directory cleanup (show a warning instead). From 6a9098ed48f421be549059b5dd3a92325f1bb37b Mon Sep 17 00:00:00 2001 From: Ales Erjavec Date: Wed, 7 Sep 2022 12:07:28 +0200 Subject: [PATCH 598/730] Show a single warning on temp directory cleanup Log individual errors at debug logging level. --- src/pip/_internal/utils/temp_dir.py | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index 7d3960734cf..ae8591ec83d 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -5,7 +5,18 @@ import tempfile import traceback from contextlib import ExitStack, contextmanager -from typing import Any, Callable, Dict, Generator, Optional, Tuple, Type, TypeVar, Union +from typing import ( + Any, + Callable, + Dict, + Generator, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, +) from pip._internal.utils.misc import enum, rmtree @@ -174,6 +185,8 @@ def cleanup(self) -> None: if not os.path.exists(self._path): return + errors: List[BaseException] = [] + def onerror( func: Callable[[str], Any], path: str, @@ -183,14 +196,14 @@ def onerror( exc_val = "\n".join(traceback.format_exception_only(*exc_info[:2])) exc_val = exc_val.rstrip() # remove trailing new line if func in (os.unlink, os.remove, os.rmdir): - logging.warning( - "Failed to remove a temporary file '%s' due to %s.\n" - "You can safely remove it manually.", + logger.debug( + "Failed to remove a temporary file '%s' due to %s.\n", path, exc_val, ) else: - logging.warning("%s failed with %s.", func.__qualname__, exc_val) + logger.debug("%s failed with %s.", func.__qualname__, exc_val) + errors.append(exc_info[1]) if self.ignore_cleanup_errors: try: @@ -199,6 +212,12 @@ def onerror( except OSError: # last pass ignore/log all errors rmtree(self._path, onexc=onerror) + if errors: + logger.warning( + "Failed to remove contents of a temporary directory '%s'.\n" + "You can safely remove it manually.", + self._path, + ) else: rmtree(self._path) From 2928750ae60dba841d22b398c779a099d1fdf3ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ale=C5=A1=20Erjavec?= Date: Wed, 7 Sep 2022 14:45:27 +0200 Subject: [PATCH 599/730] Change warning wording Co-authored-by: Tzu-ping Chung --- src/pip/_internal/utils/temp_dir.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index ae8591ec83d..99d1ba834ef 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -214,7 +214,7 @@ def onerror( rmtree(self._path, onexc=onerror) if errors: logger.warning( - "Failed to remove contents of a temporary directory '%s'.\n" + "Failed to remove contents in a temporary directory '%s'.\n" "You can safely remove it manually.", self._path, ) From c12139de9b51da9947d3b36b4f0e2e0c8f467663 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 18 Jul 2023 20:50:10 +0100 Subject: [PATCH 600/730] Disable PEP 658 for the legacy resolver --- news/12156.bugfix.rst | 1 + src/pip/_internal/cli/req_command.py | 3 +++ src/pip/_internal/operations/prepare.py | 9 +++++++++ 3 files changed, 13 insertions(+) create mode 100644 news/12156.bugfix.rst diff --git a/news/12156.bugfix.rst b/news/12156.bugfix.rst new file mode 100644 index 00000000000..da0be566d91 --- /dev/null +++ b/news/12156.bugfix.rst @@ -0,0 +1 @@ +Disable PEP 658 metadata fetching with the legacy resolver. diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index c2f4e38bed8..86070f10c14 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -287,6 +287,7 @@ def make_requirement_preparer( """ temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None + legacy_resolver = False resolver_variant = cls.determine_resolver_variant(options) if resolver_variant == "2020-resolver": @@ -300,6 +301,7 @@ def make_requirement_preparer( "production." ) else: + legacy_resolver = True lazy_wheel = False if "fast-deps" in options.features_enabled: logger.warning( @@ -320,6 +322,7 @@ def make_requirement_preparer( use_user_site=use_user_site, lazy_wheel=lazy_wheel, verbosity=verbosity, + legacy_resolver=legacy_resolver, ) @classmethod diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 49d86268a3b..cb121bcb252 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -226,6 +226,7 @@ def __init__( use_user_site: bool, lazy_wheel: bool, verbosity: int, + legacy_resolver: bool, ) -> None: super().__init__() @@ -259,6 +260,9 @@ def __init__( # How verbose should underlying tooling be? self.verbosity = verbosity + # Are we using the legacy resolver? + self.legacy_resolver = legacy_resolver + # Memoized downloaded files, as mapping of url: path. self._downloaded: Dict[str, str] = {} @@ -365,6 +369,11 @@ def _fetch_metadata_only( self, req: InstallRequirement, ) -> Optional[BaseDistribution]: + if self.legacy_resolver: + logger.debug( + "Metadata-only fetching is not used in the legacy resolver", + ) + return None if self.require_hashes: logger.debug( "Metadata-only fetching is not used as hash checking is required", From 39aa7ed50e26d77a4a277fa525add44b6f7b3bcd Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Tue, 18 Jul 2023 21:00:11 +0100 Subject: [PATCH 601/730] Fix a direct creation of RequirementPreparer in the tests --- tests/unit/test_req.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index 74b9712dc03..545828f8eea 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -105,6 +105,7 @@ def _basic_resolver( use_user_site=False, lazy_wheel=False, verbosity=0, + legacy_resolver=True, ) yield Resolver( preparer=preparer, From 81a0711192c32126a7b11d6898677274cdbc40b5 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 22 Jul 2023 10:10:42 +0100 Subject: [PATCH 602/730] Update AUTHORS.txt --- AUTHORS.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.txt b/AUTHORS.txt index 299459fa959..77eb39a427d 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -446,6 +446,7 @@ Matthew Einhorn Matthew Feickert Matthew Gilliard Matthew Iversen +Matthew Treinish Matthew Trumbell Matthew Willson Matthias Bussonnier From 4a79e65cb6aac84505ad92d272a29f0c3c1aedce Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 22 Jul 2023 10:10:43 +0100 Subject: [PATCH 603/730] Bump for release --- NEWS.rst | 9 +++++++++ news/12156.bugfix.rst | 1 - news/14514698-7F32-4890-97C1-7403A685733D.trivial.rst | 0 src/pip/__init__.py | 2 +- 4 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 news/12156.bugfix.rst delete mode 100644 news/14514698-7F32-4890-97C1-7403A685733D.trivial.rst diff --git a/NEWS.rst b/NEWS.rst index b39f561bdf7..fc3bb6697ad 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,15 @@ .. towncrier release notes start +23.2.1 (2023-07-22) +=================== + +Bug Fixes +--------- + +- Disable PEP 658 metadata fetching with the legacy resolver. (`#12156 `_) + + 23.2 (2023-07-15) ================= diff --git a/news/12156.bugfix.rst b/news/12156.bugfix.rst deleted file mode 100644 index da0be566d91..00000000000 --- a/news/12156.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Disable PEP 658 metadata fetching with the legacy resolver. diff --git a/news/14514698-7F32-4890-97C1-7403A685733D.trivial.rst b/news/14514698-7F32-4890-97C1-7403A685733D.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 00ce8ad456d..6633ef7ed7c 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.3.dev0" +__version__ = "23.2.1" def main(args: Optional[List[str]] = None) -> int: From 5b7ca11888a694bb9ce24221d54a0f548e7d4462 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Sat, 22 Jul 2023 10:10:44 +0100 Subject: [PATCH 604/730] Bump for development --- src/pip/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 6633ef7ed7c..00ce8ad456d 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.2.1" +__version__ = "23.3.dev0" def main(args: Optional[List[str]] = None) -> int: From 5e4e44a2544e89d5ec2357b6acb7ede57c6e0f56 Mon Sep 17 00:00:00 2001 From: Illia Volochii Date: Sat, 22 Jul 2023 20:39:04 +0300 Subject: [PATCH 605/730] Drop a fallback to using SecureTransport on macOS --- src/pip/_internal/__init__.py | 1 - .../_internal/utils/inject_securetransport.py | 35 ------------------- tools/news/12175.removal.rst | 1 + 3 files changed, 1 insertion(+), 36 deletions(-) delete mode 100644 src/pip/_internal/utils/inject_securetransport.py create mode 100644 tools/news/12175.removal.rst diff --git a/src/pip/_internal/__init__.py b/src/pip/_internal/__init__.py index 6afb5c627ce..96c6b88c112 100755 --- a/src/pip/_internal/__init__.py +++ b/src/pip/_internal/__init__.py @@ -1,6 +1,5 @@ from typing import List, Optional -import pip._internal.utils.inject_securetransport # noqa from pip._internal.utils import _log # init_logging() must be called before any call to logging.getLogger() diff --git a/src/pip/_internal/utils/inject_securetransport.py b/src/pip/_internal/utils/inject_securetransport.py deleted file mode 100644 index 276aa79bb81..00000000000 --- a/src/pip/_internal/utils/inject_securetransport.py +++ /dev/null @@ -1,35 +0,0 @@ -"""A helper module that injects SecureTransport, on import. - -The import should be done as early as possible, to ensure all requests and -sessions (or whatever) are created after injecting SecureTransport. - -Note that we only do the injection on macOS, when the linked OpenSSL is too -old to handle TLSv1.2. -""" - -import sys - - -def inject_securetransport() -> None: - # Only relevant on macOS - if sys.platform != "darwin": - return - - try: - import ssl - except ImportError: - return - - # Checks for OpenSSL 1.0.1 - if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100F: - return - - try: - from pip._vendor.urllib3.contrib import securetransport - except (ImportError, OSError): - return - - securetransport.inject_into_urllib3() - - -inject_securetransport() diff --git a/tools/news/12175.removal.rst b/tools/news/12175.removal.rst new file mode 100644 index 00000000000..10ac142de48 --- /dev/null +++ b/tools/news/12175.removal.rst @@ -0,0 +1 @@ +Drop a fallback to using SecureTransport on macOS as it is not needed anymore. \ No newline at end of file From 86704d57e246ae5a84d3545df30d04ed5a908e56 Mon Sep 17 00:00:00 2001 From: Illia Volochii Date: Sat, 22 Jul 2023 20:53:16 +0300 Subject: [PATCH 606/730] Move the news entry to the right directory --- {tools/news => news}/12175.removal.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {tools/news => news}/12175.removal.rst (100%) diff --git a/tools/news/12175.removal.rst b/news/12175.removal.rst similarity index 100% rename from tools/news/12175.removal.rst rename to news/12175.removal.rst From 2a23ce94749de92405edca234618845056da0b51 Mon Sep 17 00:00:00 2001 From: Illia Volochii Date: Sat, 22 Jul 2023 20:57:04 +0300 Subject: [PATCH 607/730] Fix ending of the news entry --- news/12175.removal.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/12175.removal.rst b/news/12175.removal.rst index 10ac142de48..16e5bb02a07 100644 --- a/news/12175.removal.rst +++ b/news/12175.removal.rst @@ -1 +1 @@ -Drop a fallback to using SecureTransport on macOS as it is not needed anymore. \ No newline at end of file +Drop a fallback to using SecureTransport on macOS as it is not needed anymore. From 765732df4fb0be1fe1046bc9c2d0604c34304b53 Mon Sep 17 00:00:00 2001 From: Illia Volochii Date: Mon, 24 Jul 2023 18:51:02 +0300 Subject: [PATCH 608/730] Update the news entry --- news/12175.removal.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/12175.removal.rst b/news/12175.removal.rst index 16e5bb02a07..bf3500f351a 100644 --- a/news/12175.removal.rst +++ b/news/12175.removal.rst @@ -1 +1 @@ -Drop a fallback to using SecureTransport on macOS as it is not needed anymore. +Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions. From ff9aeae0d2e39720e40e8fffae942d659495fd84 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 15:36:33 +0200 Subject: [PATCH 609/730] added resolver test case --- tests/functional/test_new_resolver.py | 82 +++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index fc52ab9c8d8..88dd635ae26 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -2272,6 +2272,88 @@ def test_new_resolver_dont_backtrack_on_extra_if_base_constrained( script.assert_installed(pkg="1.0", dep="1.0") +@pytest.mark.parametrize("swap_order", (True, False)) +@pytest.mark.parametrize("two_extras", (True, False)) +def test_new_resolver_dont_backtrack_on_extra_if_base_constrained_in_requirement( + script: PipTestEnvironment, swap_order: bool, two_extras: bool +) -> None: + """ + Verify that a requirement with a constraint on a package (either on the base + on the base with an extra) causes the resolver to infer the same constraint for + any (other) extras with the same base. + + :param swap_order: swap the order the install specifiers appear in + :param two_extras: also add an extra for the constrained specifier + """ + create_basic_wheel_for_package(script, "dep", "1.0") + create_basic_wheel_for_package( + script, "pkg", "1.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + create_basic_wheel_for_package( + script, "pkg", "2.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + + to_install: tuple[str, str] = ( + "pkg[ext1]", "pkg[ext2]==1.0" if two_extras else "pkg==1.0" + ) + + result = script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + *(to_install if not swap_order else reversed(to_install)), + ) + assert "pkg-2.0" not in result.stdout, "Should not try 2.0 due to constraint" + script.assert_installed(pkg="1.0", dep="1.0") + + +@pytest.mark.parametrize("swap_order", (True, False)) +@pytest.mark.parametrize("two_extras", (True, False)) +def test_new_resolver_dont_backtrack_on_conflicting_constraints_on_extras( + script: PipTestEnvironment, swap_order: bool, two_extras: bool +) -> None: + """ + Verify that conflicting constraints on the same package with different + extras cause the resolver to trivially reject the request rather than + trying any candidates. + + :param swap_order: swap the order the install specifiers appear in + :param two_extras: also add an extra for the second specifier + """ + create_basic_wheel_for_package(script, "dep", "1.0") + create_basic_wheel_for_package( + script, "pkg", "1.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + create_basic_wheel_for_package( + script, "pkg", "2.0", extras={"ext1": ["dep"], "ext2": ["dep"]} + ) + + to_install: tuple[str, str] = ( + "pkg[ext1]>1", "pkg[ext2]==1.0" if two_extras else "pkg==1.0" + ) + + result = script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + *(to_install if not swap_order else reversed(to_install)), + expect_error=True, + ) + assert "pkg-2.0" not in result.stdout or "pkg-1.0" not in result.stdout, ( + "Should only try one of 1.0, 2.0 depending on order" + ) + assert "looking at multiple versions" not in result.stdout, ( + "Should not have to look at multiple versions to conclude conflict" + ) + assert "conflict is caused by" in result.stdout, ( + "Resolver should be trivially able to find conflict cause" + ) + + def test_new_resolver_respect_user_requested_if_extra_is_installed( script: PipTestEnvironment, ) -> None: From 3fa373c0789699233726c02c2e72643d66da26e0 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 15:59:20 +0200 Subject: [PATCH 610/730] added test for comes-from reporting --- tests/functional/test_new_resolver.py | 28 +++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index 88dd635ae26..e597669b3a2 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -2429,3 +2429,31 @@ def test_new_resolver_works_when_failing_package_builds_are_disallowed( ) script.assert_installed(pkg2="1.0", pkg1="1.0") + + +@pytest.mark.parametrize("swap_order", (True, False)) +def test_new_resolver_comes_from_with_extra( + script: PipTestEnvironment, swap_order: bool +) -> None: + """ + Verify that reporting where a dependency comes from is accurate when it comes + from a package with an extra. + + :param swap_order: swap the order the install specifiers appear in + """ + create_basic_wheel_for_package(script, "dep", "1.0") + create_basic_wheel_for_package(script, "pkg", "1.0", extras={"ext": ["dep"]}) + + to_install: tuple[str, str] = ("pkg", "pkg[ext]") + + result = script.pip( + "install", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + *(to_install if not swap_order else reversed(to_install)), + ) + assert "(from pkg[ext])" in result.stdout + assert "(from pkg)" not in result.stdout + script.assert_installed(pkg="1.0", dep="1.0") From e5690173515ce0dc82bcbc254d9211ca4124031c Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 16:34:23 +0200 Subject: [PATCH 611/730] added test case for report bugfix --- tests/functional/test_install_report.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index 003b29d3821..1c3ffe80b70 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -64,14 +64,26 @@ def test_install_report_dep( assert _install_dict(report)["simple"]["requested"] is False +@pytest.mark.parametrize( + "specifiers", + [ + # result should be the same regardless of the method and order in which + # extras are specified + ("Paste[openid]==1.7.5.1",), + ("Paste==1.7.5.1", "Paste[openid]==1.7.5.1"), + ("Paste[openid]==1.7.5.1", "Paste==1.7.5.1"), + ], +) @pytest.mark.network -def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> None: +def test_install_report_index( + script: PipTestEnvironment, tmp_path: Path, specifiers: tuple[str, ...] +) -> None: """Test report for sdist obtained from index.""" report_path = tmp_path / "report.json" script.pip( "install", "--dry-run", - "Paste[openid]==1.7.5.1", + *specifiers, "--report", str(report_path), ) From cc6a2bded22001a6a3996f741b674ab1bab835ff Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 16:38:51 +0200 Subject: [PATCH 612/730] added second report test case --- tests/functional/test_install_report.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index 1c3ffe80b70..f9a2e27c033 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -105,6 +105,26 @@ def test_install_report_index( assert "requires_dist" in paste_report["metadata"] +@pytest.mark.network +def test_install_report_index_multiple_extras( + script: PipTestEnvironment, tmp_path: Path +) -> None: + """Test report for sdist obtained from index, with multiple extras requested.""" + report_path = tmp_path / "report.json" + script.pip( + "install", + "--dry-run", + "Paste[openid]", + "Paste[subprocess]", + "--report", + str(report_path), + ) + report = json.loads(report_path.read_text()) + install_dict = _install_dict(report) + assert "paste" in install_dict + assert install_dict["paste"]["requested_extras"] == ["openid", "subprocess"] + + @pytest.mark.network def test_install_report_direct_archive( script: PipTestEnvironment, tmp_path: Path, shared_data: TestData From 4ae829cb3f40d6a64c86988e2f591c3344123bcd Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 17:14:50 +0200 Subject: [PATCH 613/730] news entries --- news/11924.bugfix.rst | 1 + news/12095.bugfix.rst | 1 + 2 files changed, 2 insertions(+) create mode 100644 news/11924.bugfix.rst create mode 100644 news/12095.bugfix.rst diff --git a/news/11924.bugfix.rst b/news/11924.bugfix.rst new file mode 100644 index 00000000000..30bc60e6bce --- /dev/null +++ b/news/11924.bugfix.rst @@ -0,0 +1 @@ +Improve extras resolution for multiple constraints on same base package. diff --git a/news/12095.bugfix.rst b/news/12095.bugfix.rst new file mode 100644 index 00000000000..1f5018326ba --- /dev/null +++ b/news/12095.bugfix.rst @@ -0,0 +1 @@ +Consistently report whether a dependency comes from an extra. From dc01a40d413351085410a39dc6f616c5e1e21002 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 17:19:21 +0200 Subject: [PATCH 614/730] py38 compatibility --- src/pip/_internal/resolution/resolvelib/factory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 03820edde6a..fdb5c4987ae 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -447,7 +447,7 @@ def find_candidates( def _make_requirements_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str] - ) -> list[Requirement]: + ) -> List[Requirement]: """ Returns requirement objects associated with the given InstallRequirement. In most cases this will be a single object but the following special cases exist: @@ -543,7 +543,7 @@ def make_requirements_from_spec( specifier: str, comes_from: Optional[InstallRequirement], requested_extras: Iterable[str] = (), - ) -> list[Requirement]: + ) -> List[Requirement]: """ Returns requirement objects associated with the given specifier. In most cases this will be a single object but the following special cases exist: From 292387f20b8c6e0d57e9eec940621ef0932499c8 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 17:25:05 +0200 Subject: [PATCH 615/730] py37 compatibility --- tests/functional/test_install_report.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index f9a2e27c033..d7553ec0352 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -1,7 +1,7 @@ import json import textwrap from pathlib import Path -from typing import Any, Dict +from typing import Any, Dict, Tuple import pytest from packaging.utils import canonicalize_name @@ -76,7 +76,7 @@ def test_install_report_dep( ) @pytest.mark.network def test_install_report_index( - script: PipTestEnvironment, tmp_path: Path, specifiers: tuple[str, ...] + script: PipTestEnvironment, tmp_path: Path, specifiers: Tuple[str, ...] ) -> None: """Test report for sdist obtained from index.""" report_path = tmp_path / "report.json" From 39e1102800af8be86ed385aed7f93f6535262d29 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Tue, 25 Jul 2023 17:33:06 +0200 Subject: [PATCH 616/730] fixed minor type errors --- src/pip/_internal/req/constructors.py | 16 +++++++++++++--- .../_internal/resolution/resolvelib/factory.py | 2 +- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index f97bded9887..3b7243f8256 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -64,7 +64,9 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme given requirement already has extras those are replaced (or dropped if no new extras are given). """ - match: re.Match = re.fullmatch(r"([^;\[<>~=]+)(\[[^\]]*\])?(.*)", str(req)) + match: Optional[re.Match[str]] = re.fullmatch( + r"([^;\[<>~=]+)(\[[^\]]*\])?(.*)", str(req) + ) # ireq.req is a valid requirement so the regex should always match assert match is not None pre: Optional[str] = match.group(1) @@ -534,7 +536,11 @@ def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: (comes_from). """ return InstallRequirement( - req=_set_requirement_extras(ireq.req, set()), + req=( + _set_requirement_extras(ireq.req, set()) + if ireq.req is not None + else None + ), comes_from=ireq, editable=ireq.editable, link=ireq.link, @@ -561,5 +567,9 @@ def install_req_extend_extras( """ result = copy.copy(ireq) result.extras = {*ireq.extras, *extras} - result.req = _set_requirement_extras(ireq.req, result.extras) + result.req = ( + _set_requirement_extras(ireq.req, result.extras) + if ireq.req is not None + else None + ) return result diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index fdb5c4987ae..2812fab57d9 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -465,7 +465,7 @@ def _make_requirements_from_install_req( ) return [] if not ireq.link: - if ireq.extras and ireq.req.specifier: + if ireq.extras and ireq.req is not None and ireq.req.specifier: return [ SpecifierRequirement(ireq, drop_extras=True), SpecifierRequirement(ireq), From e6333bb4d18edc8aec9b38601f81867ad1036807 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 10:32:58 +0200 Subject: [PATCH 617/730] linting --- src/pip/_internal/req/constructors.py | 12 +++------- .../resolution/resolvelib/requirements.py | 2 +- tests/functional/test_new_resolver.py | 24 ++++++++++--------- 3 files changed, 17 insertions(+), 21 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 3b7243f8256..b5f176e6b38 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -16,7 +16,7 @@ from pip._vendor.packaging.markers import Marker from pip._vendor.packaging.requirements import InvalidRequirement, Requirement -from pip._vendor.packaging.specifiers import Specifier, SpecifierSet +from pip._vendor.packaging.specifiers import Specifier from pip._internal.exceptions import InstallationError from pip._internal.models.index import PyPI, TestPyPI @@ -72,11 +72,7 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme pre: Optional[str] = match.group(1) post: Optional[str] = match.group(3) assert pre is not None and post is not None - extras: str = ( - "[%s]" % ",".join(sorted(new_extras)) - if new_extras - else "" - ) + extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else "" return Requirement(pre + extras + post) @@ -537,9 +533,7 @@ def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement: """ return InstallRequirement( req=( - _set_requirement_extras(ireq.req, set()) - if ireq.req is not None - else None + _set_requirement_extras(ireq.req, set()) if ireq.req is not None else None ), comes_from=ireq, editable=ireq.editable, diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index e23b948ffc2..ad9892a17a2 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -1,8 +1,8 @@ from pip._vendor.packaging.specifiers import SpecifierSet from pip._vendor.packaging.utils import NormalizedName, canonicalize_name -from pip._internal.req.req_install import InstallRequirement from pip._internal.req.constructors import install_req_drop_extras +from pip._internal.req.req_install import InstallRequirement from .base import Candidate, CandidateLookup, Requirement, format_name diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index e597669b3a2..77dede2fc5a 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -2294,7 +2294,8 @@ def test_new_resolver_dont_backtrack_on_extra_if_base_constrained_in_requirement ) to_install: tuple[str, str] = ( - "pkg[ext1]", "pkg[ext2]==1.0" if two_extras else "pkg==1.0" + "pkg[ext1]", + "pkg[ext2]==1.0" if two_extras else "pkg==1.0", ) result = script.pip( @@ -2331,7 +2332,8 @@ def test_new_resolver_dont_backtrack_on_conflicting_constraints_on_extras( ) to_install: tuple[str, str] = ( - "pkg[ext1]>1", "pkg[ext2]==1.0" if two_extras else "pkg==1.0" + "pkg[ext1]>1", + "pkg[ext2]==1.0" if two_extras else "pkg==1.0", ) result = script.pip( @@ -2343,15 +2345,15 @@ def test_new_resolver_dont_backtrack_on_conflicting_constraints_on_extras( *(to_install if not swap_order else reversed(to_install)), expect_error=True, ) - assert "pkg-2.0" not in result.stdout or "pkg-1.0" not in result.stdout, ( - "Should only try one of 1.0, 2.0 depending on order" - ) - assert "looking at multiple versions" not in result.stdout, ( - "Should not have to look at multiple versions to conclude conflict" - ) - assert "conflict is caused by" in result.stdout, ( - "Resolver should be trivially able to find conflict cause" - ) + assert ( + "pkg-2.0" not in result.stdout or "pkg-1.0" not in result.stdout + ), "Should only try one of 1.0, 2.0 depending on order" + assert ( + "looking at multiple versions" not in result.stdout + ), "Should not have to look at multiple versions to conclude conflict" + assert ( + "conflict is caused by" in result.stdout + ), "Resolver should be trivially able to find conflict cause" def test_new_resolver_respect_user_requested_if_extra_is_installed( From 12073891776472dd3517106da1c26483d64e1557 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 10:33:43 +0200 Subject: [PATCH 618/730] made primary news fragment of type feature --- news/{11924.bugfix.rst => 11924.feature.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{11924.bugfix.rst => 11924.feature.rst} (100%) diff --git a/news/11924.bugfix.rst b/news/11924.feature.rst similarity index 100% rename from news/11924.bugfix.rst rename to news/11924.feature.rst From 6663b89a4d465f675b88bce52d4ad7cef9164c6a Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 10:37:00 +0200 Subject: [PATCH 619/730] added final bugfix news entry --- news/11924.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/11924.bugfix.rst diff --git a/news/11924.bugfix.rst b/news/11924.bugfix.rst new file mode 100644 index 00000000000..7a9ee3151a4 --- /dev/null +++ b/news/11924.bugfix.rst @@ -0,0 +1 @@ +Include all requested extras in the install report (``--report``). From 314d7c12549a60c8460b1e2a8dac82fe0cca848a Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 10:52:42 +0200 Subject: [PATCH 620/730] simplified regex --- src/pip/_internal/req/constructors.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index b5f176e6b38..c03ae718e90 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -65,7 +65,10 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme are given). """ match: Optional[re.Match[str]] = re.fullmatch( - r"([^;\[<>~=]+)(\[[^\]]*\])?(.*)", str(req) + # see https://peps.python.org/pep-0508/#complete-grammar + r"([\w\t .-]+)(\[[^\]]*\])?(.*)", + str(req), + flags=re.ASCII, ) # ireq.req is a valid requirement so the regex should always match assert match is not None From cc909e87e5ccada46b4eb8a2a90c329614dc9b01 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 11:06:24 +0200 Subject: [PATCH 621/730] reverted unnecessary changes --- src/pip/_internal/resolution/resolvelib/requirements.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index ad9892a17a2..becbd6c4bcc 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -56,7 +56,7 @@ def __init__( self._extras = frozenset(self._ireq.extras) def __str__(self) -> str: - return str(self._ireq) + return str(self._ireq.req) def __repr__(self) -> str: return "{class_name}({requirement!r})".format( @@ -71,10 +71,7 @@ def project_name(self) -> NormalizedName: @property def name(self) -> str: - return format_name( - self.project_name, - self._extras, - ) + return format_name(self.project_name, self._extras) def format_for_error(self) -> str: # Convert comma-separated specifiers into "A, B, ..., F and G" From 6ed231a52be89295e3cecdb4f41eaf63f3152941 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 11:34:37 +0200 Subject: [PATCH 622/730] added unit tests for install req manipulation --- tests/unit/test_req.py | 87 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 1 deletion(-) diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index 545828f8eea..b4819d8320a 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -6,7 +6,7 @@ import tempfile from functools import partial from pathlib import Path -from typing import Iterator, Optional, Tuple, cast +from typing import Iterator, Optional, Set, Tuple, cast from unittest import mock import pytest @@ -33,6 +33,8 @@ from pip._internal.req.constructors import ( _get_url_from_path, _looks_like_path, + install_req_drop_extras, + install_req_extend_extras, install_req_from_editable, install_req_from_line, install_req_from_parsed_requirement, @@ -763,6 +765,89 @@ def test_requirement_file(self) -> None: assert "appears to be a requirements file." in err_msg assert "If that is the case, use the '-r' flag to install" in err_msg + @pytest.mark.parametrize( + "inp, out", + [ + ("pkg", "pkg"), + ("pkg==1.0", "pkg==1.0"), + ("pkg ; python_version<='3.6'", "pkg"), + ("pkg[ext]", "pkg"), + ("pkg [ ext1, ext2 ]", "pkg"), + ("pkg [ ext1, ext2 ] @ https://example.com/", "pkg@ https://example.com/"), + ("pkg [ext] == 1.0; python_version<='3.6'", "pkg==1.0"), + ("pkg-all.allowed_chars0 ~= 2.0", "pkg-all.allowed_chars0~=2.0"), + ("pkg-all.allowed_chars0 [ext] ~= 2.0", "pkg-all.allowed_chars0~=2.0"), + ], + ) + def test_install_req_drop_extras(self, inp: str, out: str) -> None: + """ + Test behavior of install_req_drop_extras + """ + req = install_req_from_line(inp) + without_extras = install_req_drop_extras(req) + assert not without_extras.extras + assert str(without_extras.req) == out + # should always be a copy + assert req is not without_extras + assert req.req is not without_extras.req + # comes_from should point to original + assert without_extras.comes_from is req + # all else should be the same + assert without_extras.link == req.link + assert without_extras.markers == req.markers + assert without_extras.use_pep517 == req.use_pep517 + assert without_extras.isolated == req.isolated + assert without_extras.global_options == req.global_options + assert without_extras.hash_options == req.hash_options + assert without_extras.constraint == req.constraint + assert without_extras.config_settings == req.config_settings + assert without_extras.user_supplied == req.user_supplied + assert without_extras.permit_editable_wheels == req.permit_editable_wheels + + @pytest.mark.parametrize( + "inp, extras, out", + [ + ("pkg", {}, "pkg"), + ("pkg==1.0", {}, "pkg==1.0"), + ("pkg[ext]", {}, "pkg[ext]"), + ("pkg", {"ext"}, "pkg[ext]"), + ("pkg==1.0", {"ext"}, "pkg[ext]==1.0"), + ("pkg==1.0", {"ext1", "ext2"}, "pkg[ext1,ext2]==1.0"), + ("pkg; python_version<='3.6'", {"ext"}, "pkg[ext]"), + ("pkg[ext1,ext2]==1.0", {"ext2", "ext3"}, "pkg[ext1,ext2,ext3]==1.0"), + ( + "pkg-all.allowed_chars0 [ ext1 ] @ https://example.com/", + {"ext2"}, + "pkg-all.allowed_chars0[ext1,ext2]@ https://example.com/", + ), + ], + ) + def test_install_req_extend_extras( + self, inp: str, extras: Set[str], out: str + ) -> None: + """ + Test behavior of install_req_extend_extras + """ + req = install_req_from_line(inp) + extended = install_req_extend_extras(req, extras) + assert str(extended.req) == out + assert extended.req is not None + assert set(extended.extras) == set(extended.req.extras) + # should always be a copy + assert req is not extended + assert req.req is not extended.req + # all else should be the same + assert extended.link == req.link + assert extended.markers == req.markers + assert extended.use_pep517 == req.use_pep517 + assert extended.isolated == req.isolated + assert extended.global_options == req.global_options + assert extended.hash_options == req.hash_options + assert extended.constraint == req.constraint + assert extended.config_settings == req.config_settings + assert extended.user_supplied == req.user_supplied + assert extended.permit_editable_wheels == req.permit_editable_wheels + @mock.patch("pip._internal.req.req_install.os.path.abspath") @mock.patch("pip._internal.req.req_install.os.path.exists") From 55e9762873d824608ae52570ef3dcbb65e75f833 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 14:02:50 +0200 Subject: [PATCH 623/730] windows compatibility --- tests/lib/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 7c06feaf38c..d424a5e8dae 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -645,7 +645,7 @@ def run( cwd = cwd or self.cwd if sys.platform == "win32": # Partial fix for ScriptTest.run using `shell=True` on Windows. - args = tuple(str(a).replace("^", "^^").replace("&", "^&") for a in args) + args = tuple(str(a).replace("^", "^^").replace("&", "^&").replace(">", "^>") for a in args) if allow_error: kw["expect_error"] = True From 504485c27644f7a8b44cec179bfc0fac181b0d9e Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 14:03:26 +0200 Subject: [PATCH 624/730] lint --- tests/lib/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index d424a5e8dae..b6996f31d91 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -645,7 +645,10 @@ def run( cwd = cwd or self.cwd if sys.platform == "win32": # Partial fix for ScriptTest.run using `shell=True` on Windows. - args = tuple(str(a).replace("^", "^^").replace("&", "^&").replace(">", "^>") for a in args) + args = tuple( + str(a).replace("^", "^^").replace("&", "^&").replace(">", "^>") + for a in args + ) if allow_error: kw["expect_error"] = True From f4a7c0c569caf665c11379cd9629e5a5163867f5 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 14:12:38 +0200 Subject: [PATCH 625/730] cleaned up windows fix --- tests/lib/__init__.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index b6996f31d91..a7f2ade1a1d 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -645,10 +645,7 @@ def run( cwd = cwd or self.cwd if sys.platform == "win32": # Partial fix for ScriptTest.run using `shell=True` on Windows. - args = tuple( - str(a).replace("^", "^^").replace("&", "^&").replace(">", "^>") - for a in args - ) + args = tuple(re.sub("([&|()<>^])", r"^\1", str(a)) for a in args) if allow_error: kw["expect_error"] = True From 32e95be2130333e4f543778302fdc4d0c47043ad Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 26 Jul 2023 14:31:12 +0200 Subject: [PATCH 626/730] exclude brackets --- tests/lib/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index a7f2ade1a1d..018152930c2 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -645,7 +645,7 @@ def run( cwd = cwd or self.cwd if sys.platform == "win32": # Partial fix for ScriptTest.run using `shell=True` on Windows. - args = tuple(re.sub("([&|()<>^])", r"^\1", str(a)) for a in args) + args = tuple(re.sub("([&|<>^])", r"^\1", str(a)) for a in args) if allow_error: kw["expect_error"] = True From eeb3d8fdff94080b7978c4d2d7126020cddf9c93 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 28 Jul 2023 01:00:06 -0400 Subject: [PATCH 627/730] synthesize a traceback to get a normal exc_info from an exception - as per https://docs.python.org/3.12/whatsnew/3.12.html#shutil, we must expect only an exception and *not* the full exc_info from the new onexc function (the documentation of this is very misleading and still uses the label "excinfo": https://docs.python.org/3.12/library/shutil.html#shutil.rmtree) --- src/pip/_internal/utils/misc.py | 36 +++++++++++++++++++++++++---- src/pip/_internal/utils/temp_dir.py | 3 ++- 2 files changed, 33 insertions(+), 6 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index b7b32f0f8cf..1e79dbe988b 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -1,5 +1,6 @@ import contextlib import errno +import functools import getpass import hashlib import io @@ -14,7 +15,8 @@ from functools import partial from io import StringIO from itertools import filterfalse, tee, zip_longest -from types import TracebackType +from pathlib import Path +from types import FunctionType, TracebackType from typing import ( Any, BinaryIO, @@ -67,6 +69,8 @@ ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] VersionInfo = Tuple[int, int, int] NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]] +OnExc = Callable[[FunctionType, Path, BaseException], Any] +OnErr = Callable[[FunctionType, Path, ExcInfo], Any] def get_pip_version() -> str: @@ -121,22 +125,44 @@ def get_prog() -> str: return "pip" +def bare_exc_to_onexc(exc_val: BaseException) -> ExcInfo: + exc_ty = type(exc_val) + tb = exc_val.__traceback__ + if tb is None: + import inspect + + frame = inspect.currentframe() + assert frame is not None + tb = TracebackType(None, frame, frame.f_lasti, frame.f_lineno) + return (exc_ty, exc_val, tb) + + +def extract_exc_info_arg(f: OnErr) -> OnExc: + def g(fn: FunctionType, p: Path, e: BaseException) -> Any: + info = bare_exc_to_onexc(e) + return f(fn, p, info) + + return functools.update_wrapper(g, f) + + # Retry every half second for up to 3 seconds # Tenacity raises RetryError by default, explicitly raise the original exception @retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) def rmtree( dir: str, ignore_errors: bool = False, - onexc: Optional[Callable[[Any, Any, Any], Any]] = None, + onexc: Optional[OnErr] = None, ) -> None: if ignore_errors: onexc = _onerror_ignore - elif onexc is None: + if onexc is None: onexc = _onerror_reraise + handler: OnErr = partial(rmtree_errorhandler, onexc=onexc) if sys.version_info >= (3, 12): - shutil.rmtree(dir, onexc=partial(rmtree_errorhandler, onexc=onexc)) + exc_handler = extract_exc_info_arg(handler) + shutil.rmtree(dir, onexc=exc_handler) else: - shutil.rmtree(dir, onerror=partial(rmtree_errorhandler, onexc=onexc)) + shutil.rmtree(dir, onerror=handler) def _onerror_ignore(*_args: Any) -> None: diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index 99d1ba834ef..130be333932 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -5,6 +5,7 @@ import tempfile import traceback from contextlib import ExitStack, contextmanager +from pathlib import Path from typing import ( Any, Callable, @@ -189,7 +190,7 @@ def cleanup(self) -> None: def onerror( func: Callable[[str], Any], - path: str, + path: Path, exc_info: Tuple[Type[BaseException], BaseException, Any], ) -> None: """Log a warning for a `rmtree` error and continue""" From 4f036be496a40b90fa665432c001e9f9cce3dbee Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 28 Jul 2023 01:19:46 -0400 Subject: [PATCH 628/730] add news entry --- news/12187.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12187.bugfix.rst diff --git a/news/12187.bugfix.rst b/news/12187.bugfix.rst new file mode 100644 index 00000000000..63760ad91e3 --- /dev/null +++ b/news/12187.bugfix.rst @@ -0,0 +1 @@ +Fix improper handling of the new onexc argument of shutil.rmtree() in python 3.12. From eabefd40214f96e30eb2b0e607f9a4c7ffbf5e43 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 28 Jul 2023 02:18:36 -0400 Subject: [PATCH 629/730] revert the traceback wrapping --- src/pip/_internal/utils/misc.py | 34 +++++++++------------------------ 1 file changed, 9 insertions(+), 25 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 1e79dbe988b..69540978d1d 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -1,6 +1,5 @@ import contextlib import errno -import functools import getpass import hashlib import io @@ -125,26 +124,6 @@ def get_prog() -> str: return "pip" -def bare_exc_to_onexc(exc_val: BaseException) -> ExcInfo: - exc_ty = type(exc_val) - tb = exc_val.__traceback__ - if tb is None: - import inspect - - frame = inspect.currentframe() - assert frame is not None - tb = TracebackType(None, frame, frame.f_lasti, frame.f_lineno) - return (exc_ty, exc_val, tb) - - -def extract_exc_info_arg(f: OnErr) -> OnExc: - def g(fn: FunctionType, p: Path, e: BaseException) -> Any: - info = bare_exc_to_onexc(e) - return f(fn, p, info) - - return functools.update_wrapper(g, f) - - # Retry every half second for up to 3 seconds # Tenacity raises RetryError by default, explicitly raise the original exception @retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5)) @@ -157,10 +136,15 @@ def rmtree( onexc = _onerror_ignore if onexc is None: onexc = _onerror_reraise - handler: OnErr = partial(rmtree_errorhandler, onexc=onexc) + handler: OnErr = partial( + # `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to + # `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`. + cast(Union[OnExc, OnErr], rmtree_errorhandler), + onexc=onexc, + ) if sys.version_info >= (3, 12): - exc_handler = extract_exc_info_arg(handler) - shutil.rmtree(dir, onexc=exc_handler) + # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil. + shutil.rmtree(dir, onexc=handler) else: shutil.rmtree(dir, onerror=handler) @@ -178,7 +162,7 @@ def rmtree_errorhandler( path: str, exc_info: Union[ExcInfo, BaseException], *, - onexc: Callable[..., Any] = _onerror_reraise, + onexc: OnExc = _onerror_reraise, ) -> None: """ `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`). From 454e9768fbc4250db28aa17f3365fea0d131253d Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 28 Jul 2023 02:43:49 -0400 Subject: [PATCH 630/730] incorporate review comments --- src/pip/_internal/utils/misc.py | 8 +++++--- src/pip/_internal/utils/temp_dir.py | 20 ++++++++++---------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 69540978d1d..9a6353fc8d3 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -130,7 +130,7 @@ def get_prog() -> str: def rmtree( dir: str, ignore_errors: bool = False, - onexc: Optional[OnErr] = None, + onexc: Optional[OnExc] = None, ) -> None: if ignore_errors: onexc = _onerror_ignore @@ -158,8 +158,8 @@ def _onerror_reraise(*_args: Any) -> None: def rmtree_errorhandler( - func: Callable[..., Any], - path: str, + func: FunctionType, + path: Path, exc_info: Union[ExcInfo, BaseException], *, onexc: OnExc = _onerror_reraise, @@ -193,6 +193,8 @@ def rmtree_errorhandler( except OSError: pass + if not isinstance(exc_info, BaseException): + _, exc_info, _ = exc_info onexc(func, path, exc_info) diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index 130be333932..38c5f7c7c94 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -6,15 +6,13 @@ import traceback from contextlib import ExitStack, contextmanager from pathlib import Path +from types import FunctionType from typing import ( Any, - Callable, Dict, Generator, List, Optional, - Tuple, - Type, TypeVar, Union, ) @@ -189,22 +187,24 @@ def cleanup(self) -> None: errors: List[BaseException] = [] def onerror( - func: Callable[[str], Any], + func: FunctionType, path: Path, - exc_info: Tuple[Type[BaseException], BaseException, Any], + exc_val: BaseException, ) -> None: """Log a warning for a `rmtree` error and continue""" - exc_val = "\n".join(traceback.format_exception_only(*exc_info[:2])) - exc_val = exc_val.rstrip() # remove trailing new line + formatted_exc = "\n".join( + traceback.format_exception_only(type(exc_val), exc_val) + ) + formatted_exc = formatted_exc.rstrip() # remove trailing new line if func in (os.unlink, os.remove, os.rmdir): logger.debug( "Failed to remove a temporary file '%s' due to %s.\n", path, - exc_val, + formatted_exc, ) else: - logger.debug("%s failed with %s.", func.__qualname__, exc_val) - errors.append(exc_info[1]) + logger.debug("%s failed with %s.", func.__qualname__, formatted_exc) + errors.append(exc_val) if self.ignore_cleanup_errors: try: From 6cc961ee799745a8ccf0256f670364c59d29b8c7 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 28 Jul 2023 14:46:23 +0800 Subject: [PATCH 631/730] Uppercase Python --- news/12187.bugfix.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/news/12187.bugfix.rst b/news/12187.bugfix.rst index 63760ad91e3..b4d106b974f 100644 --- a/news/12187.bugfix.rst +++ b/news/12187.bugfix.rst @@ -1 +1 @@ -Fix improper handling of the new onexc argument of shutil.rmtree() in python 3.12. +Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12. From 4babc076631d5ac48a0f9573fff406ab2dd6b6e4 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Fri, 28 Jul 2023 15:32:29 +0800 Subject: [PATCH 632/730] Move metadata-fetching log to VERBOSE level --- news/12155.process.rst | 6 ++++++ src/pip/_internal/operations/prepare.py | 6 +++--- 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 news/12155.process.rst diff --git a/news/12155.process.rst b/news/12155.process.rst new file mode 100644 index 00000000000..5f77231c864 --- /dev/null +++ b/news/12155.process.rst @@ -0,0 +1,6 @@ +The metadata-fetching log message is moved to the VERBOSE level and now hidden +by default. The more significant information in this message to most users are +already available in surrounding logs (the package name and version of the +metadata being fetched), while the URL to the exact metadata file is generally +too long and clutters the output. The message can be brought back with +``--verbose``. diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index cb121bcb252..c07b261fd3b 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -4,7 +4,6 @@ # The following comment should be removed at some point in the future. # mypy: strict-optional=False -import logging import mimetypes import os import shutil @@ -37,6 +36,7 @@ from pip._internal.network.session import PipSession from pip._internal.operations.build.build_tracker import BuildTracker from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils._log import getLogger from pip._internal.utils.direct_url_helpers import ( direct_url_for_editable, direct_url_from_link, @@ -53,7 +53,7 @@ from pip._internal.utils.unpacking import unpack_file from pip._internal.vcs import vcs -logger = logging.getLogger(__name__) +logger = getLogger(__name__) def _get_prepared_distribution( @@ -394,7 +394,7 @@ def _fetch_metadata_using_link_data_attr( if metadata_link is None: return None assert req.req is not None - logger.info( + logger.verbose( "Obtaining dependency information for %s from %s", req.req, metadata_link, From b47f77d330bb7a8642af5490c0f03642f039974c Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Mon, 31 Jul 2023 20:21:23 -0400 Subject: [PATCH 633/730] add lots of comments on the function of BuildTracker --- src/pip/_internal/distributions/base.py | 12 +++++ src/pip/_internal/distributions/installed.py | 6 +++ src/pip/_internal/distributions/sdist.py | 8 ++- src/pip/_internal/distributions/wheel.py | 6 +++ .../operations/build/build_tracker.py | 49 ++++++++++++------- src/pip/_internal/operations/prepare.py | 10 ++-- 6 files changed, 68 insertions(+), 23 deletions(-) diff --git a/src/pip/_internal/distributions/base.py b/src/pip/_internal/distributions/base.py index 75ce2dc9057..6fb0d7b7772 100644 --- a/src/pip/_internal/distributions/base.py +++ b/src/pip/_internal/distributions/base.py @@ -1,4 +1,5 @@ import abc +from typing import Optional from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata.base import BaseDistribution @@ -19,12 +20,23 @@ class AbstractDistribution(metaclass=abc.ABCMeta): - we must be able to create a Distribution object exposing the above metadata. + + - if we need to do work in the build tracker, we must be able to generate a unique + string to identify the requirement in the build tracker. """ def __init__(self, req: InstallRequirement) -> None: super().__init__() self.req = req + @abc.abstractproperty + def build_tracker_id(self) -> Optional[str]: + """A string that uniquely identifies this requirement to the build tracker. + + If None, then this dist has no work to do in the build tracker, and + ``.prepare_distribution_metadata()`` will not be called.""" + raise NotImplementedError() + @abc.abstractmethod def get_metadata_distribution(self) -> BaseDistribution: raise NotImplementedError() diff --git a/src/pip/_internal/distributions/installed.py b/src/pip/_internal/distributions/installed.py index edb38aa1a6c..ab8d53be740 100644 --- a/src/pip/_internal/distributions/installed.py +++ b/src/pip/_internal/distributions/installed.py @@ -1,3 +1,5 @@ +from typing import Optional + from pip._internal.distributions.base import AbstractDistribution from pip._internal.index.package_finder import PackageFinder from pip._internal.metadata import BaseDistribution @@ -10,6 +12,10 @@ class InstalledDistribution(AbstractDistribution): been computed. """ + @property + def build_tracker_id(self) -> Optional[str]: + return None + def get_metadata_distribution(self) -> BaseDistribution: assert self.req.satisfied_by is not None, "not actually installed" return self.req.satisfied_by diff --git a/src/pip/_internal/distributions/sdist.py b/src/pip/_internal/distributions/sdist.py index 4c25647930c..15ff42b7b15 100644 --- a/src/pip/_internal/distributions/sdist.py +++ b/src/pip/_internal/distributions/sdist.py @@ -1,5 +1,5 @@ import logging -from typing import Iterable, Set, Tuple +from typing import Iterable, Optional, Set, Tuple from pip._internal.build_env import BuildEnvironment from pip._internal.distributions.base import AbstractDistribution @@ -18,6 +18,12 @@ class SourceDistribution(AbstractDistribution): generated, either using PEP 517 or using the legacy `setup.py egg_info`. """ + @property + def build_tracker_id(self) -> Optional[str]: + """Identify this requirement uniquely by its link.""" + assert self.req.link + return self.req.link.url_without_fragment + def get_metadata_distribution(self) -> BaseDistribution: return self.req.get_dist() diff --git a/src/pip/_internal/distributions/wheel.py b/src/pip/_internal/distributions/wheel.py index 03aac775b53..eb16e25cbcc 100644 --- a/src/pip/_internal/distributions/wheel.py +++ b/src/pip/_internal/distributions/wheel.py @@ -1,3 +1,5 @@ +from typing import Optional + from pip._vendor.packaging.utils import canonicalize_name from pip._internal.distributions.base import AbstractDistribution @@ -15,6 +17,10 @@ class WheelDistribution(AbstractDistribution): This does not need any preparation as wheels can be directly unpacked. """ + @property + def build_tracker_id(self) -> Optional[str]: + return None + def get_metadata_distribution(self) -> BaseDistribution: """Loads the metadata from the wheel file into memory and returns a Distribution that uses it, not relying on the wheel file or diff --git a/src/pip/_internal/operations/build/build_tracker.py b/src/pip/_internal/operations/build/build_tracker.py index 6621549b844..ffcdbbc03f2 100644 --- a/src/pip/_internal/operations/build/build_tracker.py +++ b/src/pip/_internal/operations/build/build_tracker.py @@ -51,10 +51,20 @@ def get_build_tracker() -> Generator["BuildTracker", None, None]: yield tracker +class TrackerId(str): + """Uniquely identifying string provided to the build tracker.""" + + class BuildTracker: + """Ensure that an sdist cannot request itself as a setup requirement. + + When an sdist is prepared, it identifies its setup requirements in the + context of ``BuildTracker#track()``. If a requirement shows up recursively, this + raises an exception. This stops fork bombs embedded in malicious packages.""" + def __init__(self, root: str) -> None: self._root = root - self._entries: Set[InstallRequirement] = set() + self._entries: Dict[TrackerId, InstallRequirement] = {} logger.debug("Created build tracker: %s", self._root) def __enter__(self) -> "BuildTracker": @@ -69,16 +79,15 @@ def __exit__( ) -> None: self.cleanup() - def _entry_path(self, link: Link) -> str: - hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + def _entry_path(self, key: TrackerId) -> str: + hashed = hashlib.sha224(key.encode()).hexdigest() return os.path.join(self._root, hashed) - def add(self, req: InstallRequirement) -> None: + def add(self, req: InstallRequirement, key: TrackerId) -> None: """Add an InstallRequirement to build tracking.""" - assert req.link # Get the file to write information about this requirement. - entry_path = self._entry_path(req.link) + entry_path = self._entry_path(key) # Try reading from the file. If it exists and can be read from, a build # is already in progress, so a LookupError is raised. @@ -92,33 +101,37 @@ def add(self, req: InstallRequirement) -> None: raise LookupError(message) # If we're here, req should really not be building already. - assert req not in self._entries + assert key not in self._entries # Start tracking this requirement. with open(entry_path, "w", encoding="utf-8") as fp: fp.write(str(req)) - self._entries.add(req) + self._entries[key] = req logger.debug("Added %s to build tracker %r", req, self._root) - def remove(self, req: InstallRequirement) -> None: + def remove(self, req: InstallRequirement, key: TrackerId) -> None: """Remove an InstallRequirement from build tracking.""" - assert req.link - # Delete the created file and the corresponding entries. - os.unlink(self._entry_path(req.link)) - self._entries.remove(req) + # Delete the created file and the corresponding entry. + os.unlink(self._entry_path(key)) + del self._entries[key] logger.debug("Removed %s from build tracker %r", req, self._root) def cleanup(self) -> None: - for req in set(self._entries): - self.remove(req) + for key, req in list(self._entries.items()): + self.remove(req, key) logger.debug("Removed build tracker: %r", self._root) @contextlib.contextmanager - def track(self, req: InstallRequirement) -> Generator[None, None, None]: - self.add(req) + def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]: + """Ensure that `key` cannot install itself as a setup requirement. + + :raises LookupError: If `key` was already provided in a parent invocation of + the context introduced by this method.""" + tracker_id = TrackerId(key) + self.add(req, tracker_id) yield - self.remove(req) + self.remove(req, tracker_id) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index c07b261fd3b..8402be01bbf 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -65,10 +65,12 @@ def _get_prepared_distribution( ) -> BaseDistribution: """Prepare a distribution for installation.""" abstract_dist = make_distribution_for_install_requirement(req) - with build_tracker.track(req): - abstract_dist.prepare_distribution_metadata( - finder, build_isolation, check_build_deps - ) + tracker_id = abstract_dist.build_tracker_id + if tracker_id is not None: + with build_tracker.track(req, tracker_id): + abstract_dist.prepare_distribution_metadata( + finder, build_isolation, check_build_deps + ) return abstract_dist.get_metadata_distribution() From 023b3d923746ffd4a7cafce471250af7daaa4fed Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 1 Aug 2023 13:59:42 -0400 Subject: [PATCH 634/730] add news --- news/12194.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12194.trivial.rst diff --git a/news/12194.trivial.rst b/news/12194.trivial.rst new file mode 100644 index 00000000000..dfe5bbf1f06 --- /dev/null +++ b/news/12194.trivial.rst @@ -0,0 +1 @@ +Add lots of comments to the ``BuildTracker``. From e4d2e6e6b250dcb5e5bfafaed85472c8baf54886 Mon Sep 17 00:00:00 2001 From: Jeff Widman Date: Wed, 2 Aug 2023 12:26:27 -0700 Subject: [PATCH 635/730] Remove superfluous callout of new resolver The new resolver has been out for nearly three years now. I don't think we need to highlight it on the Readme anymore. Folks who are truly affected are far more likely to google their errors and then get redirected to it. By removing the noise from the Readme, it stops distracting from other stuff. --- README.rst | 5 ----- 1 file changed, 5 deletions(-) diff --git a/README.rst b/README.rst index 7e08f857c4c..c2d5c969397 100644 --- a/README.rst +++ b/README.rst @@ -19,8 +19,6 @@ We release updates regularly, with a new version every 3 months. Find more detai * `Release notes`_ * `Release process`_ -In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right. - **Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3. If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: @@ -49,9 +47,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. .. _Release process: https://pip.pypa.io/en/latest/development/release-process/ .. _GitHub page: https://github.com/pypa/pip .. _Development documentation: https://pip.pypa.io/en/latest/development -.. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html -.. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020 -.. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html .. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support .. _Issue tracking: https://github.com/pypa/pip/issues .. _Discourse channel: https://discuss.python.org/c/packaging From d65ba2f6b6f446058b289b35373b0a6c36720fba Mon Sep 17 00:00:00 2001 From: Jeff Widman Date: Wed, 2 Aug 2023 13:54:12 -0700 Subject: [PATCH 636/730] Replace python2 deprecation with a badge of supported python versions The python world has (mostly) moved on from Python 2. Anyone not already aware of the py2->py3 migration is probably new to the ecosystem and started on Python 3. Additionally, it's convenient to see at a glance what versions of Python are supported by the current release. This pulls from PyPI versions, so will not immediately match `main` (we can probably change this to match `main` if preferred). So by doing this it's both more useful going forward, and also lets us drop the explicit notice about dropping Python 2 support. --- README.rst | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index c2d5c969397..6ff117db5d2 100644 --- a/README.rst +++ b/README.rst @@ -3,9 +3,15 @@ pip - The Python Package Installer .. image:: https://img.shields.io/pypi/v/pip.svg :target: https://pypi.org/project/pip/ + :alt: PyPI + +.. image:: https://img.shields.io/pypi/pyversions/pip + :target: https://pypi.org/project/pip + :alt: PyPI - Python Version .. image:: https://readthedocs.org/projects/pip/badge/?version=latest :target: https://pip.pypa.io/en/latest + :alt: Documentation pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. @@ -19,8 +25,6 @@ We release updates regularly, with a new version every 3 months. Find more detai * `Release notes`_ * `Release process`_ -**Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3. - If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms: * `Issue tracking`_ @@ -47,7 +51,6 @@ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. .. _Release process: https://pip.pypa.io/en/latest/development/release-process/ .. _GitHub page: https://github.com/pypa/pip .. _Development documentation: https://pip.pypa.io/en/latest/development -.. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support .. _Issue tracking: https://github.com/pypa/pip/issues .. _Discourse channel: https://discuss.python.org/c/packaging .. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa From a19ade74a5cebbe73e8ab6e88f4123e0e2e54c06 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Fri, 4 Aug 2023 16:07:27 -0700 Subject: [PATCH 637/730] Use strict optional checking in req_install.py (#11379) * Use strict optional checking in req_install.py Suggested by pradyunsg in #11374 Since half of the API of this class depends on self.req not being None, it seems like we should just prevent users from passing None here. However, I wasn't able to make that change. Rather than sprinkle asserts everywhere, I added "checked" properties. I find this less ad hoc and easier to adapt if e.g. we're able to make self.req never None in the future. There are now some code paths where we have asserts that we didn't before. I relied on other type hints in pip's code base to be accurate. If that is not the case and we actually relied on some function being able to accept None when not typed as such, we may hit these asserts. But hopefully tests would catch such a thing. * news * black * inline asserts * code review * fix up merge issue * fix specifier bug --- ...60-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst | 0 src/pip/_internal/req/req_install.py | 33 +++++++++++-------- 2 files changed, 20 insertions(+), 13 deletions(-) create mode 100644 news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst diff --git a/news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst b/news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 1f479713a94..542d6c78f96 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -1,6 +1,3 @@ -# The following comment should be removed at some point in the future. -# mypy: strict-optional=False - import functools import logging import os @@ -244,6 +241,7 @@ def supports_pyproject_editable(self) -> bool: @property def specifier(self) -> SpecifierSet: + assert self.req is not None return self.req.specifier @property @@ -257,7 +255,8 @@ def is_pinned(self) -> bool: For example, some-package==1.2 is pinned; some-package>1.2 is not. """ - specifiers = self.specifier + assert self.req is not None + specifiers = self.req.specifier return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: @@ -305,6 +304,7 @@ def hashes(self, trust_internet: bool = True) -> Hashes: else: link = None if link and link.hash: + assert link.hash_name is not None good_hashes.setdefault(link.hash_name, []).append(link.hash) return Hashes(good_hashes) @@ -314,6 +314,7 @@ def from_path(self) -> Optional[str]: return None s = str(self.req) if self.comes_from: + comes_from: Optional[str] if isinstance(self.comes_from, str): comes_from = self.comes_from else: @@ -345,7 +346,7 @@ def ensure_build_location( # When parallel builds are enabled, add a UUID to the build directory # name so multiple builds do not interfere with each other. - dir_name: str = canonicalize_name(self.name) + dir_name: str = canonicalize_name(self.req.name) if parallel_builds: dir_name = f"{dir_name}_{uuid.uuid4().hex}" @@ -388,6 +389,7 @@ def _set_requirement(self) -> None: ) def warn_on_mismatching_name(self) -> None: + assert self.req is not None metadata_name = canonicalize_name(self.metadata["Name"]) if canonicalize_name(self.req.name) == metadata_name: # Everything is fine. @@ -457,6 +459,7 @@ def is_wheel_from_cache(self) -> bool: # Things valid for sdists @property def unpacked_source_directory(self) -> str: + assert self.source_dir, f"No source dir for {self}" return os.path.join( self.source_dir, self.link and self.link.subdirectory_fragment or "" ) @@ -543,7 +546,7 @@ def prepare_metadata(self) -> None: Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. Under legacy processing, call setup.py egg-info. """ - assert self.source_dir + assert self.source_dir, f"No source dir for {self}" details = self.name or f"from {self.link}" if self.use_pep517: @@ -592,8 +595,10 @@ def get_dist(self) -> BaseDistribution: if self.metadata_directory: return get_directory_distribution(self.metadata_directory) elif self.local_file_path and self.is_wheel: + assert self.req is not None return get_wheel_distribution( - FilesystemWheel(self.local_file_path), canonicalize_name(self.name) + FilesystemWheel(self.local_file_path), + canonicalize_name(self.req.name), ) raise AssertionError( f"InstallRequirement {self} has no metadata directory and no wheel: " @@ -601,9 +606,9 @@ def get_dist(self) -> BaseDistribution: ) def assert_source_matches_version(self) -> None: - assert self.source_dir + assert self.source_dir, f"No source dir for {self}" version = self.metadata["version"] - if self.req.specifier and version not in self.req.specifier: + if self.req and self.req.specifier and version not in self.req.specifier: logger.warning( "Requested %s, but installing version %s", self, @@ -696,9 +701,10 @@ def _clean_zip_name(name: str, prefix: str) -> str: name = name.replace(os.path.sep, "/") return name + assert self.req is not None path = os.path.join(parentdir, path) name = _clean_zip_name(path, rootdir) - return self.name + "/" + name + return self.req.name + "/" + name def archive(self, build_dir: Optional[str]) -> None: """Saves archive to provided build_dir. @@ -777,8 +783,9 @@ def install( use_user_site: bool = False, pycompile: bool = True, ) -> None: + assert self.req is not None scheme = get_scheme( - self.name, + self.req.name, user=use_user_site, home=home, root=root, @@ -792,7 +799,7 @@ def install( prefix=prefix, home=home, use_user_site=use_user_site, - name=self.name, + name=self.req.name, setup_py_path=self.setup_py_path, isolated=self.isolated, build_env=self.build_env, @@ -805,7 +812,7 @@ def install( assert self.local_file_path install_wheel( - self.name, + self.req.name, self.local_file_path, scheme=scheme, req_description=str(self.req), From d311e6e603ea7d4a2ef6c6f465308f523cfe83d2 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Sun, 6 Aug 2023 09:26:54 -0500 Subject: [PATCH 638/730] Upgrade certifi to 2023.7.22 (#12206) --- news/certifi.vendor.rst | 1 + src/pip/_vendor/certifi/__init__.py | 2 +- src/pip/_vendor/certifi/cacert.pem | 324 ++++++++++++++++------------ src/pip/_vendor/vendor.txt | 2 +- 4 files changed, 188 insertions(+), 141 deletions(-) create mode 100644 news/certifi.vendor.rst diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst new file mode 100644 index 00000000000..aacd17183f1 --- /dev/null +++ b/news/certifi.vendor.rst @@ -0,0 +1 @@ +Upgrade certifi to 2023.7.22 diff --git a/src/pip/_vendor/certifi/__init__.py b/src/pip/_vendor/certifi/__init__.py index 705f416d6b0..8ce89cef706 100644 --- a/src/pip/_vendor/certifi/__init__.py +++ b/src/pip/_vendor/certifi/__init__.py @@ -1,4 +1,4 @@ from .core import contents, where __all__ = ["contents", "where"] -__version__ = "2023.05.07" +__version__ = "2023.07.22" diff --git a/src/pip/_vendor/certifi/cacert.pem b/src/pip/_vendor/certifi/cacert.pem index 5183934bb75..02123695d01 100644 --- a/src/pip/_vendor/certifi/cacert.pem +++ b/src/pip/_vendor/certifi/cacert.pem @@ -791,34 +791,6 @@ uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= -----END CERTIFICATE----- -# Issuer: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 1 O=Hongkong Post -# Label: "Hongkong Post Root CA 1" -# Serial: 1000 -# MD5 Fingerprint: a8:0d:6f:39:78:b9:43:6d:77:42:6d:98:5a:cc:23:ca -# SHA1 Fingerprint: d6:da:a8:20:8d:09:d2:15:4d:24:b5:2f:cb:34:6e:b2:58:b2:8a:58 -# SHA256 Fingerprint: f9:e6:7d:33:6c:51:00:2a:c0:54:c6:32:02:2d:66:dd:a2:e7:e3:ff:f1:0a:d0:61:ed:31:d8:bb:b4:10:cf:b2 ------BEGIN CERTIFICATE----- -MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx -FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg -Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG -A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr -b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ -jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn -PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh -ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9 -nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h -q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED -MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC -mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3 -7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB -oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs -EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO -fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi -AmvZWg== ------END CERTIFICATE----- - # Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. # Label: "SecureSign RootCA11" @@ -1676,50 +1648,6 @@ HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= -----END CERTIFICATE----- -# Issuer: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Subject: CN=E-Tugra Certification Authority O=E-Tu\u011fra EBG Bili\u015fim Teknolojileri ve Hizmetleri A.\u015e. OU=E-Tugra Sertifikasyon Merkezi -# Label: "E-Tugra Certification Authority" -# Serial: 7667447206703254355 -# MD5 Fingerprint: b8:a1:03:63:b0:bd:21:71:70:8a:6f:13:3a:bb:79:49 -# SHA1 Fingerprint: 51:c6:e7:08:49:06:6e:f3:92:d4:5c:a0:0d:6d:a3:62:8f:c3:52:39 -# SHA256 Fingerprint: b0:bf:d5:2b:b0:d7:d9:bd:92:bf:5d:4d:c1:3d:a2:55:c0:2c:54:2f:37:83:65:ea:89:39:11:f5:5e:55:f2:3c ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIIamg+nFGby1MwDQYJKoZIhvcNAQELBQAwgbIxCzAJBgNV -BAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+BgNVBAoMN0UtVHXEn3JhIEVCRyBC -aWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhpem1ldGxlcmkgQS7Fni4xJjAkBgNV -BAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBNZXJrZXppMSgwJgYDVQQDDB9FLVR1 -Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTEzMDMwNTEyMDk0OFoXDTIz -MDMwMzEyMDk0OFowgbIxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHDAZBbmthcmExQDA+ -BgNVBAoMN0UtVHXEn3JhIEVCRyBCaWxpxZ9pbSBUZWtub2xvamlsZXJpIHZlIEhp -em1ldGxlcmkgQS7Fni4xJjAkBgNVBAsMHUUtVHVncmEgU2VydGlmaWthc3lvbiBN -ZXJrZXppMSgwJgYDVQQDDB9FLVR1Z3JhIENlcnRpZmljYXRpb24gQXV0aG9yaXR5 -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA4vU/kwVRHoViVF56C/UY -B4Oufq9899SKa6VjQzm5S/fDxmSJPZQuVIBSOTkHS0vdhQd2h8y/L5VMzH2nPbxH -D5hw+IyFHnSOkm0bQNGZDbt1bsipa5rAhDGvykPL6ys06I+XawGb1Q5KCKpbknSF -Q9OArqGIW66z6l7LFpp3RMih9lRozt6Plyu6W0ACDGQXwLWTzeHxE2bODHnv0ZEo -q1+gElIwcxmOj+GMB6LDu0rw6h8VqO4lzKRG+Bsi77MOQ7osJLjFLFzUHPhdZL3D -k14opz8n8Y4e0ypQBaNV2cvnOVPAmJ6MVGKLJrD3fY185MaeZkJVgkfnsliNZvcH -fC425lAcP9tDJMW/hkd5s3kc91r0E+xs+D/iWR+V7kI+ua2oMoVJl0b+SzGPWsut -dEcf6ZG33ygEIqDUD13ieU/qbIWGvaimzuT6w+Gzrt48Ue7LE3wBf4QOXVGUnhMM -ti6lTPk5cDZvlsouDERVxcr6XQKj39ZkjFqzAQqptQpHF//vkUAqjqFGOjGY5RH8 -zLtJVor8udBhmm9lbObDyz51Sf6Pp+KJxWfXnUYTTjF2OySznhFlhqt/7x3U+Lzn -rFpct1pHXFXOVbQicVtbC/DP3KBhZOqp12gKY6fgDT+gr9Oq0n7vUaDmUStVkhUX -U8u3Zg5mTPj5dUyQ5xJwx0UCAwEAAaNjMGEwHQYDVR0OBBYEFC7j27JJ0JxUeVz6 -Jyr+zE7S6E5UMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAULuPbsknQnFR5 -XPonKv7MTtLoTlQwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAF -Nzr0TbdF4kV1JI+2d1LoHNgQk2Xz8lkGpD4eKexd0dCrfOAKkEh47U6YA5n+KGCR -HTAduGN8qOY1tfrTYXbm1gdLymmasoR6d5NFFxWfJNCYExL/u6Au/U5Mh/jOXKqY -GwXgAEZKgoClM4so3O0409/lPun++1ndYYRP0lSWE2ETPo+Aab6TR7U1Q9Jauz1c -77NCR807VRMGsAnb/WP2OogKmW9+4c4bU2pEZiNRCHu8W1Ki/QY3OEBhj0qWuJA3 -+GbHeJAAFS6LrVE1Uweoa2iu+U48BybNCAVwzDk/dr2l02cmAYamU9JgO3xDf1WK -vJUawSg5TB9D0pH0clmKuVb8P7Sd2nCcdlqMQ1DujjByTd//SffGqWfZbawCEeI6 -FiWnWAjLb1NBnEg4R2gz0dfHj9R0IdTDBZB6/86WiLEVKV0jq9BgoRJP3vQXzTLl -yb/IQ639Lo7xr+L0mPoSHyDYwKcMhcWQ9DstliaxLL5Mq+ux0orJ23gTDx4JnW2P -AJ8C2sH6H3p6CcRK5ogql5+Ji/03X186zjhZhkuvcQu02PJwT58yE+Owp1fl2tpD -y4Q08ijE6m30Ku/Ba3ba+367hTzSU8JNvnHhRdH9I2cNE3X7z2VnIp2usAnRCf8d -NL/+I5c30jn6PQ0GC7TbO6Orb1wdtn7os4I07QZcJA== ------END CERTIFICATE----- - # Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center # Label: "T-TeleSec GlobalRoot Class 2" @@ -4397,73 +4325,6 @@ ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR -----END CERTIFICATE----- -# Issuer: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Subject: CN=E-Tugra Global Root CA RSA v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Label: "E-Tugra Global Root CA RSA v3" -# Serial: 75951268308633135324246244059508261641472512052 -# MD5 Fingerprint: 22:be:10:f6:c2:f8:03:88:73:5f:33:29:47:28:47:a4 -# SHA1 Fingerprint: e9:a8:5d:22:14:52:1c:5b:aa:0a:b4:be:24:6a:23:8a:c9:ba:e2:a9 -# SHA256 Fingerprint: ef:66:b0:b1:0a:3c:db:9f:2e:36:48:c7:6b:d2:af:18:ea:d2:bf:e6:f1:17:65:5e:28:c4:06:0d:a1:a3:f4:c2 ------BEGIN CERTIFICATE----- -MIIF8zCCA9ugAwIBAgIUDU3FzRYilZYIfrgLfxUGNPt5EDQwDQYJKoZIhvcNAQEL -BQAwgYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUt -VHVncmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYw -JAYDVQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIFJTQSB2MzAeFw0yMDAzMTgw -OTA3MTdaFw00NTAzMTIwOTA3MTdaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMG -QW5rYXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1 -Z3JhIFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBD -QSBSU0EgdjMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCiZvCJt3J7 -7gnJY9LTQ91ew6aEOErxjYG7FL1H6EAX8z3DeEVypi6Q3po61CBxyryfHUuXCscx -uj7X/iWpKo429NEvx7epXTPcMHD4QGxLsqYxYdE0PD0xesevxKenhOGXpOhL9hd8 -7jwH7eKKV9y2+/hDJVDqJ4GohryPUkqWOmAalrv9c/SF/YP9f4RtNGx/ardLAQO/ -rWm31zLZ9Vdq6YaCPqVmMbMWPcLzJmAy01IesGykNz709a/r4d+ABs8qQedmCeFL -l+d3vSFtKbZnwy1+7dZ5ZdHPOrbRsV5WYVB6Ws5OUDGAA5hH5+QYfERaxqSzO8bG -wzrwbMOLyKSRBfP12baqBqG3q+Sx6iEUXIOk/P+2UNOMEiaZdnDpwA+mdPy70Bt4 -znKS4iicvObpCdg604nmvi533wEKb5b25Y08TVJ2Glbhc34XrD2tbKNSEhhw5oBO -M/J+JjKsBY04pOZ2PJ8QaQ5tndLBeSBrW88zjdGUdjXnXVXHt6woq0bM5zshtQoK -5EpZ3IE1S0SVEgpnpaH/WwAH0sDM+T/8nzPyAPiMbIedBi3x7+PmBvrFZhNb/FAH -nnGGstpvdDDPk1Po3CLW3iAfYY2jLqN4MpBs3KwytQXk9TwzDdbgh3cXTJ2w2Amo -DVf3RIXwyAS+XF1a4xeOVGNpf0l0ZAWMowIDAQABo2MwYTAPBgNVHRMBAf8EBTAD -AQH/MB8GA1UdIwQYMBaAFLK0ruYt9ybVqnUtdkvAG1Mh0EjvMB0GA1UdDgQWBBSy -tK7mLfcm1ap1LXZLwBtTIdBI7zAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEL -BQADggIBAImocn+M684uGMQQgC0QDP/7FM0E4BQ8Tpr7nym/Ip5XuYJzEmMmtcyQ -6dIqKe6cLcwsmb5FJ+Sxce3kOJUxQfJ9emN438o2Fi+CiJ+8EUdPdk3ILY7r3y18 -Tjvarvbj2l0Upq7ohUSdBm6O++96SmotKygY/r+QLHUWnw/qln0F7psTpURs+APQ -3SPh/QMSEgj0GDSz4DcLdxEBSL9htLX4GdnLTeqjjO/98Aa1bZL0SmFQhO3sSdPk -vmjmLuMxC1QLGpLWgti2omU8ZgT5Vdps+9u1FGZNlIM7zR6mK7L+d0CGq+ffCsn9 -9t2HVhjYsCxVYJb6CH5SkPVLpi6HfMsg2wY+oF0Dd32iPBMbKaITVaA9FCKvb7jQ -mhty3QUBjYZgv6Rn7rWlDdF/5horYmbDB7rnoEgcOMPpRfunf/ztAmgayncSd6YA -VSgU7NbHEqIbZULpkejLPoeJVF3Zr52XnGnnCv8PWniLYypMfUeUP95L6VPQMPHF -9p5J3zugkaOj/s1YzOrfr28oO6Bpm4/srK4rVJ2bBLFHIK+WEj5jlB0E5y67hscM -moi/dkfv97ALl2bSRM9gUgfh1SxKOidhd8rXj+eHDjD/DLsE4mHDosiXYY60MGo8 -bcIHX0pzLz/5FooBZu+6kcpSV3uu1OYP3Qt6f4ueJiDPO++BcYNZ ------END CERTIFICATE----- - -# Issuer: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Subject: CN=E-Tugra Global Root CA ECC v3 O=E-Tugra EBG A.S. OU=E-Tugra Trust Center -# Label: "E-Tugra Global Root CA ECC v3" -# Serial: 218504919822255052842371958738296604628416471745 -# MD5 Fingerprint: 46:bc:81:bb:f1:b5:1e:f7:4b:96:bc:14:e2:e7:27:64 -# SHA1 Fingerprint: 8a:2f:af:57:53:b1:b0:e6:a1:04:ec:5b:6a:69:71:6d:f6:1c:e2:84 -# SHA256 Fingerprint: 87:3f:46:85:fa:7f:56:36:25:25:2e:6d:36:bc:d7:f1:6f:c2:49:51:f2:64:e4:7e:1b:95:4f:49:08:cd:ca:13 ------BEGIN CERTIFICATE----- -MIICpTCCAiqgAwIBAgIUJkYZdzHhT28oNt45UYbm1JeIIsEwCgYIKoZIzj0EAwMw -gYAxCzAJBgNVBAYTAlRSMQ8wDQYDVQQHEwZBbmthcmExGTAXBgNVBAoTEEUtVHVn -cmEgRUJHIEEuUy4xHTAbBgNVBAsTFEUtVHVncmEgVHJ1c3QgQ2VudGVyMSYwJAYD -VQQDEx1FLVR1Z3JhIEdsb2JhbCBSb290IENBIEVDQyB2MzAeFw0yMDAzMTgwOTQ2 -NThaFw00NTAzMTIwOTQ2NThaMIGAMQswCQYDVQQGEwJUUjEPMA0GA1UEBxMGQW5r -YXJhMRkwFwYDVQQKExBFLVR1Z3JhIEVCRyBBLlMuMR0wGwYDVQQLExRFLVR1Z3Jh -IFRydXN0IENlbnRlcjEmMCQGA1UEAxMdRS1UdWdyYSBHbG9iYWwgUm9vdCBDQSBF -Q0MgdjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASOmCm/xxAeJ9urA8woLNheSBkQ -KczLWYHMjLiSF4mDKpL2w6QdTGLVn9agRtwcvHbB40fQWxPa56WzZkjnIZpKT4YK -fWzqTTKACrJ6CZtpS5iB4i7sAnCWH/31Rs7K3IKjYzBhMA8GA1UdEwEB/wQFMAMB -Af8wHwYDVR0jBBgwFoAU/4Ixcj75xGZsrTie0bBRiKWQzPUwHQYDVR0OBBYEFP+C -MXI++cRmbK04ntGwUYilkMz1MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNp -ADBmAjEA5gVYaWHlLcoNy/EZCL3W/VGSGn5jVASQkZo1kTmZ+gepZpO6yGjUij/6 -7W4WAie3AjEA3VoXK3YdZUKWpqxdinlW2Iob35reX8dQj7FbcQwm32pAAOwzkSFx -vmjkI6TZraE3 ------END CERTIFICATE----- - # Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. # Label: "Security Communication RootCA3" @@ -4587,3 +4448,188 @@ AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA 94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B 43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== -----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root E46" +# Serial: 88989738453351742415770396670917916916 +# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 +# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a +# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 +-----BEGIN CERTIFICATE----- +MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw +CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T +ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN +MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG +A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT +ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA +IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC +WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ +6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B +Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa +qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q +4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== +-----END CERTIFICATE----- + +# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited +# Label: "Sectigo Public Server Authentication Root R46" +# Serial: 156256931880233212765902055439220583700 +# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 +# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 +# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 +-----BEGIN CERTIFICATE----- +MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf +MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD +Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw +HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY +MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp +YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB +AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa +ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz +SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf +iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X +ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 +IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS +VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE +SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu ++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt +8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L +HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt +zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P +AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c +mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ +YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 +gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA +Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB +JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX +DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui +TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 +dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 +LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp +0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY +QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS RSA Root CA 2022" +# Serial: 148535279242832292258835760425842727825 +# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da +# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca +# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed +-----BEGIN CERTIFICATE----- +MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO +MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD +DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX +DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw +b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC +AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP +L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY +t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins +S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 +PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO +L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 +R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w +dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS ++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS +d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG +AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f +gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j +BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z +NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt +hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM +QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf +R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ +DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW +P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy +lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq +bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w +AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q +r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji +Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU +98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= +-----END CERTIFICATE----- + +# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation +# Label: "SSL.com TLS ECC Root CA 2022" +# Serial: 26605119622390491762507526719404364228 +# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 +# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 +# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 +-----BEGIN CERTIFICATE----- +MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw +CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT +U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 +MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh +dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG +ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm +acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN +SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME +GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW +uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp +15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN +b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA ECC TLS 2021" +# Serial: 81873346711060652204712539181482831616 +# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 +# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd +# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 +-----BEGIN CERTIFICATE----- +MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w +LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w +CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 +MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF +Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI +zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X +tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 +AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 +KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD +aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu +CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo +9H1/IISpQuQo +-----END CERTIFICATE----- + +# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos +# Label: "Atos TrustedRoot Root CA RSA TLS 2021" +# Serial: 111436099570196163832749341232207667876 +# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 +# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 +# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f +-----BEGIN CERTIFICATE----- +MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM +MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx +MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 +MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD +QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN +BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z +4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv +Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ +kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs +GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln +nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh +3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD +0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy +geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 +ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB +c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI +pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU +dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS +4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs +o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ +qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw +xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM +rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 +AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR +0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY +o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 +dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE +oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== +-----END CERTIFICATE----- diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 4ab2915fb80..08e1acb016c 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -8,7 +8,7 @@ platformdirs==3.8.1 pyparsing==3.1.0 pyproject-hooks==1.0.0 requests==2.31.0 - certifi==2023.5.7 + certifi==2023.7.22 chardet==5.1.0 idna==3.4 urllib3==1.26.16 From 901db9cf8dc01477dda0601a3d0bc20eaa16073e Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sun, 6 Aug 2023 09:08:16 -0700 Subject: [PATCH 639/730] Use a set for TargetPython.get_tags for performance (#12204) --- news/12204.feature.rst | 1 + src/pip/_internal/commands/debug.py | 2 +- src/pip/_internal/index/package_finder.py | 4 ++-- src/pip/_internal/models/target_python.py | 18 +++++++++++++++--- .../_internal/resolution/resolvelib/factory.py | 2 +- tests/unit/test_target_python.py | 18 +++++++++--------- 6 files changed, 29 insertions(+), 16 deletions(-) create mode 100644 news/12204.feature.rst diff --git a/news/12204.feature.rst b/news/12204.feature.rst new file mode 100644 index 00000000000..6ffdf5123b1 --- /dev/null +++ b/news/12204.feature.rst @@ -0,0 +1 @@ +Improve use of datastructures to make candidate selection 1.6x faster diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 2a3e7d298f3..88a4f798d46 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -105,7 +105,7 @@ def show_tags(options: Values) -> None: tag_limit = 10 target_python = make_target_python(options) - tags = target_python.get_tags() + tags = target_python.get_sorted_tags() # Display the target options that were explicitly provided. formatted_target = target_python.format_given() diff --git a/src/pip/_internal/index/package_finder.py b/src/pip/_internal/index/package_finder.py index b6f8d57e854..2121ca327e6 100644 --- a/src/pip/_internal/index/package_finder.py +++ b/src/pip/_internal/index/package_finder.py @@ -198,7 +198,7 @@ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]: reason = f"wrong project name (not {self.project_name})" return (LinkType.different_project, reason) - supported_tags = self._target_python.get_tags() + supported_tags = self._target_python.get_unsorted_tags() if not wheel.supported(supported_tags): # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. @@ -414,7 +414,7 @@ def create( if specifier is None: specifier = specifiers.SpecifierSet() - supported_tags = target_python.get_tags() + supported_tags = target_python.get_sorted_tags() return cls( project_name=project_name, diff --git a/src/pip/_internal/models/target_python.py b/src/pip/_internal/models/target_python.py index 744bd7ef58b..67ea5da73a5 100644 --- a/src/pip/_internal/models/target_python.py +++ b/src/pip/_internal/models/target_python.py @@ -1,5 +1,5 @@ import sys -from typing import List, Optional, Tuple +from typing import List, Optional, Set, Tuple from pip._vendor.packaging.tags import Tag @@ -22,6 +22,7 @@ class TargetPython: "py_version", "py_version_info", "_valid_tags", + "_valid_tags_set", ] def __init__( @@ -61,8 +62,9 @@ def __init__( self.py_version = py_version self.py_version_info = py_version_info - # This is used to cache the return value of get_tags(). + # This is used to cache the return value of get_(un)sorted_tags. self._valid_tags: Optional[List[Tag]] = None + self._valid_tags_set: Optional[Set[Tag]] = None def format_given(self) -> str: """ @@ -84,7 +86,7 @@ def format_given(self) -> str: f"{key}={value!r}" for key, value in key_values if value is not None ) - def get_tags(self) -> List[Tag]: + def get_sorted_tags(self) -> List[Tag]: """ Return the supported PEP 425 tags to check wheel candidates against. @@ -108,3 +110,13 @@ def get_tags(self) -> List[Tag]: self._valid_tags = tags return self._valid_tags + + def get_unsorted_tags(self) -> Set[Tag]: + """Exactly the same as get_sorted_tags, but returns a set. + + This is important for performance. + """ + if self._valid_tags_set is None: + self._valid_tags_set = set(self.get_sorted_tags()) + + return self._valid_tags_set diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 0331297b85b..ed78580ab97 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -132,7 +132,7 @@ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None: if not link.is_wheel: return wheel = Wheel(link.filename) - if wheel.supported(self._finder.target_python.get_tags()): + if wheel.supported(self._finder.target_python.get_unsorted_tags()): return msg = f"{link.filename} is not a supported wheel on this platform." raise UnsupportedWheel(msg) diff --git a/tests/unit/test_target_python.py b/tests/unit/test_target_python.py index d3e27e39ae8..b659c61fe08 100644 --- a/tests/unit/test_target_python.py +++ b/tests/unit/test_target_python.py @@ -88,12 +88,12 @@ def test_format_given(self, kwargs: Dict[str, Any], expected: str) -> None: ((3, 7, 3), "37"), # Check a minor version with two digits. ((3, 10, 1), "310"), - # Check that versions=None is passed to get_tags(). + # Check that versions=None is passed to get_sorted_tags(). (None, None), ], ) @mock.patch("pip._internal.models.target_python.get_supported") - def test_get_tags( + def test_get_sorted_tags( self, mock_get_supported: mock.Mock, py_version_info: Optional[Tuple[int, ...]], @@ -102,7 +102,7 @@ def test_get_tags( mock_get_supported.return_value = ["tag-1", "tag-2"] target_python = TargetPython(py_version_info=py_version_info) - actual = target_python.get_tags() + actual = target_python.get_sorted_tags() assert actual == ["tag-1", "tag-2"] actual = mock_get_supported.call_args[1]["version"] @@ -111,14 +111,14 @@ def test_get_tags( # Check that the value was cached. assert target_python._valid_tags == ["tag-1", "tag-2"] - def test_get_tags__uses_cached_value(self) -> None: + def test_get_unsorted_tags__uses_cached_value(self) -> None: """ - Test that get_tags() uses the cached value. + Test that get_unsorted_tags() uses the cached value. """ target_python = TargetPython(py_version_info=None) - target_python._valid_tags = [ + target_python._valid_tags_set = { Tag("py2", "none", "any"), Tag("py3", "none", "any"), - ] - actual = target_python.get_tags() - assert actual == [Tag("py2", "none", "any"), Tag("py3", "none", "any")] + } + actual = target_python.get_unsorted_tags() + assert actual == {Tag("py2", "none", "any"), Tag("py3", "none", "any")} From d8cd93f4fa61df6be7786e301e4c17ab654d0107 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Tue, 8 Aug 2023 05:37:38 -0700 Subject: [PATCH 640/730] Fix incorrect use of re function in tests (#12213) --- news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst | 0 tests/lib/__init__.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst diff --git a/news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst b/news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 7c06feaf38c..b827f88ba13 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -1187,7 +1187,7 @@ def create_basic_wheel_for_package( # Fix wheel distribution name by replacing runs of non-alphanumeric # characters with an underscore _ as per PEP 491 - name = re.sub(r"[^\w\d.]+", "_", name, re.UNICODE) + name = re.sub(r"[^\w\d.]+", "_", name) archive_name = f"{name}-{version}-py2.py3-none-any.whl" archive_path = script.scratch_path / archive_name From b2a151500b1e827de3ca950881204e4f4c9df08a Mon Sep 17 00:00:00 2001 From: Joshua Date: Tue, 8 Aug 2023 13:37:57 -0500 Subject: [PATCH 641/730] --dry-run is cool --- src/pip/_internal/cli/cmdoptions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 02ba6082793..64bc59bbd66 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -92,10 +92,10 @@ def check_dist_restriction(options: Values, check_target: bool = False) -> None: ) if check_target: - if dist_restriction_set and not options.target_dir: + if not options.dry_run and dist_restriction_set and not options.target_dir: raise CommandError( "Can not use any platform or abi specific options unless " - "installing via '--target'" + "installing via '--target' or using '--dry-run'" ) From 38126ce5f811f98c2f45fcbfad47f3d5538120fe Mon Sep 17 00:00:00 2001 From: Joshua Date: Tue, 8 Aug 2023 13:42:11 -0500 Subject: [PATCH 642/730] HEAR YE HEAR YE --- news/12215.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12215.bugfix.rst diff --git a/news/12215.bugfix.rst b/news/12215.bugfix.rst new file mode 100644 index 00000000000..f814540ff60 --- /dev/null +++ b/news/12215.bugfix.rst @@ -0,0 +1 @@ +Fix unnecessary error when using ``--dry-run`` and ``--python-version`` without ``--target`` From 864139adb0819053e76a3db4c50e916ef79cfc8c Mon Sep 17 00:00:00 2001 From: Joshua Date: Tue, 8 Aug 2023 14:32:59 -0500 Subject: [PATCH 643/730] add tests --- tests/functional/test_install.py | 34 ++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index eabddfe58fa..03868324631 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2459,6 +2459,40 @@ def test_install_pip_prints_req_chain_local(script: PipTestEnvironment) -> None: ) +def test_install_dist_restriction_without_target(script: PipTestEnvironment) -> None: + result = script.pip( + "install", "--python-version=3.1", "--only-binary=:all:", expect_error=True + ) + assert ( + "Can not use any platform or abi specific options unless installing " + "via '--target'" in result.stderr + ), str(result) + + +def test_install_dist_restriction_dry_run_doesnt_require_target( + script: PipTestEnvironment, +) -> None: + create_basic_wheel_for_package( + script, + "base", + "0.1.0", + ) + + result = script.pip( + "install", + "--python-version=3.1", + "--only-binary=:all:", + "--dry-run", + "--no-cache-dir", + "--no-index", + "--find-links", + script.scratch_path, + "base", + ) + + assert not result.stderr, str(result) + + @pytest.mark.network def test_install_pip_prints_req_chain_pypi(script: PipTestEnvironment) -> None: """ From 46754f1580d46aaa11147796336cd79ea1682caa Mon Sep 17 00:00:00 2001 From: Joshua Date: Wed, 9 Aug 2023 09:04:26 -0500 Subject: [PATCH 644/730] It's not a bug(fix) it's a FEATURE --- news/12215.bugfix.rst | 1 - news/12215.feature.rst | 1 + 2 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 news/12215.bugfix.rst create mode 100644 news/12215.feature.rst diff --git a/news/12215.bugfix.rst b/news/12215.bugfix.rst deleted file mode 100644 index f814540ff60..00000000000 --- a/news/12215.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix unnecessary error when using ``--dry-run`` and ``--python-version`` without ``--target`` diff --git a/news/12215.feature.rst b/news/12215.feature.rst new file mode 100644 index 00000000000..407dc903ed9 --- /dev/null +++ b/news/12215.feature.rst @@ -0,0 +1 @@ +Allow ``pip install --dry-run`` to use platform and ABI overriding options similar to ``--target``. From b1fd3ac3e483b59ce15b1006a0a757f4502864cb Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 11 Aug 2023 04:37:31 -0400 Subject: [PATCH 645/730] Update src/pip/_internal/operations/build/build_tracker.py Co-authored-by: Paul Moore --- src/pip/_internal/operations/build/build_tracker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/operations/build/build_tracker.py b/src/pip/_internal/operations/build/build_tracker.py index ffcdbbc03f2..d4cdcb89d89 100644 --- a/src/pip/_internal/operations/build/build_tracker.py +++ b/src/pip/_internal/operations/build/build_tracker.py @@ -59,7 +59,7 @@ class BuildTracker: """Ensure that an sdist cannot request itself as a setup requirement. When an sdist is prepared, it identifies its setup requirements in the - context of ``BuildTracker#track()``. If a requirement shows up recursively, this + context of ``BuildTracker.track()``. If a requirement shows up recursively, this raises an exception. This stops fork bombs embedded in malicious packages.""" def __init__(self, root: str) -> None: From 4a853ea34831e9fa9695cd966905cd5b591e0b01 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Fri, 11 Aug 2023 04:38:07 -0400 Subject: [PATCH 646/730] Update src/pip/_internal/operations/build/build_tracker.py Co-authored-by: Paul Moore --- src/pip/_internal/operations/build/build_tracker.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/operations/build/build_tracker.py b/src/pip/_internal/operations/build/build_tracker.py index d4cdcb89d89..37919322b00 100644 --- a/src/pip/_internal/operations/build/build_tracker.py +++ b/src/pip/_internal/operations/build/build_tracker.py @@ -60,7 +60,9 @@ class BuildTracker: When an sdist is prepared, it identifies its setup requirements in the context of ``BuildTracker.track()``. If a requirement shows up recursively, this - raises an exception. This stops fork bombs embedded in malicious packages.""" + raises an exception. + + This stops fork bombs embedded in malicious packages.""" def __init__(self, root: str) -> None: self._root = root From 8e305f262fbba1e4a1555efdaee982877d235012 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Mon, 31 Jul 2023 04:05:45 -0400 Subject: [PATCH 647/730] add test for the *existing* `install --dry-run` functionality --- tests/functional/test_install.py | 67 +++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 6 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index eabddfe58fa..56efe2a5cfc 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -7,7 +7,7 @@ import textwrap from os.path import curdir, join, pardir from pathlib import Path -from typing import Dict, List, Tuple +from typing import Callable, Dict, Iterable, List, Optional, Tuple import pytest @@ -20,6 +20,7 @@ PipTestEnvironment, ResolverVariant, TestData, + TestPipResult, _create_svn_repo, _create_test_package, create_basic_wheel_for_package, @@ -2371,14 +2372,68 @@ def test_install_logs_pip_version_in_debug( assert_re_match(pattern, result.stdout) -def test_install_dry_run(script: PipTestEnvironment, data: TestData) -> None: - """Test that pip install --dry-run logs what it would install.""" - result = script.pip( - "install", "--dry-run", "--find-links", data.find_links, "simple" - ) +@pytest.fixture +def install_find_links( + script: PipTestEnvironment, + data: TestData, +) -> Callable[[Iterable[str], bool, Optional[Path]], TestPipResult]: + def install( + args: Iterable[str], dry_run: bool, target_dir: Optional[Path] + ) -> TestPipResult: + return script.pip( + "install", + *( + ( + "--target", + str(target_dir), + ) + if target_dir is not None + else () + ), + *(("--dry-run",) if dry_run else ()), + "--no-index", + "--find-links", + data.find_links, + *args, + ) + + return install + + +@pytest.mark.parametrize( + "with_target_dir", + (True, False), +) +def test_install_dry_run_nothing_installed( + script: PipTestEnvironment, + tmpdir: Path, + install_find_links: Callable[[Iterable[str], bool, Optional[Path]], TestPipResult], + with_target_dir: bool, +) -> None: + """Test that pip install --dry-run logs what it would install, but doesn't actually + install anything.""" + if with_target_dir: + install_dir = tmpdir / "fake-install" + install_dir.mkdir() + else: + install_dir = None + + result = install_find_links(["simple"], True, install_dir) assert "Would install simple-3.0" in result.stdout assert "Successfully installed" not in result.stdout + script.assert_not_installed("simple") + if with_target_dir: + assert not os.listdir(install_dir) + + # Ensure that the same install command would normally have worked if not for + # --dry-run. + install_find_links(["simple"], False, install_dir) + if with_target_dir: + assert os.listdir(install_dir) + else: + script.assert_installed(simple="3.0") + @pytest.mark.skipif( sys.version_info < (3, 11), From e86da734c7a1bd909ff8c911a276f8f2e6df9348 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Mon, 31 Jul 2023 05:41:42 -0400 Subject: [PATCH 648/730] add test for hash mismatch --- tests/functional/test_fast_deps.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/functional/test_fast_deps.py b/tests/functional/test_fast_deps.py index 0109db825b7..b76b833b938 100644 --- a/tests/functional/test_fast_deps.py +++ b/tests/functional/test_fast_deps.py @@ -2,12 +2,14 @@ import json import os import pathlib +import re from os.path import basename from typing import Iterable from pip._vendor.packaging.utils import canonicalize_name from pytest import mark +from pip._internal.utils.misc import hash_file from tests.lib import PipTestEnvironment, TestData, TestPipResult @@ -101,3 +103,31 @@ def test_hash_mismatch(script: PipTestEnvironment, tmp_path: pathlib.Path) -> No expect_error=True, ) assert "DO NOT MATCH THE HASHES" in result.stderr + + +@mark.network +def test_hash_mismatch_existing_download( + script: PipTestEnvironment, tmp_path: pathlib.Path +) -> None: + reqs = tmp_path / "requirements.txt" + reqs.write_text("idna==2.10") + dl_dir = tmp_path / "downloads" + dl_dir.mkdir() + idna_wheel = dl_dir / "idna-2.10-py2.py3-none-any.whl" + idna_wheel.write_text("asdf") + result = script.pip( + "download", + "--use-feature=fast-deps", + "-r", + str(reqs), + "-d", + str(dl_dir), + allow_stderr_warning=True, + ) + assert re.search( + r"WARNING: Previously-downloaded file.*has bad hash", result.stderr + ) + assert ( + hash_file(str(idna_wheel))[0].hexdigest() + == "b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ) From 67ff36b838f543037169828be95f86f903d609c6 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Mon, 31 Jul 2023 04:07:55 -0400 Subject: [PATCH 649/730] move directory metadata test out of req install tests --- tests/unit/metadata/test_metadata.py | 14 ++++++++++++++ tests/unit/test_req.py | 17 ----------------- 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/tests/unit/metadata/test_metadata.py b/tests/unit/metadata/test_metadata.py index f77178fb9c1..47093fb54d1 100644 --- a/tests/unit/metadata/test_metadata.py +++ b/tests/unit/metadata/test_metadata.py @@ -129,3 +129,17 @@ def test_dist_found_in_zip(tmp_path: Path) -> None: dist = get_environment([location]).get_distribution("pkg") assert dist is not None and dist.location is not None assert Path(dist.location) == Path(location) + + +@pytest.mark.parametrize( + "path", + ( + "/path/to/foo.egg-info".replace("/", os.path.sep), + # Tests issue fixed by https://github.com/pypa/pip/pull/2530 + "/path/to/foo.egg-info/".replace("/", os.path.sep), + ), +) +def test_trailing_slash_directory_metadata(path: str) -> None: + dist = get_directory_distribution(path) + assert dist.raw_name == dist.canonical_name == "foo" + assert dist.location == "/path/to".replace("/", os.path.sep) diff --git a/tests/unit/test_req.py b/tests/unit/test_req.py index 545828f8eea..2d1fa269490 100644 --- a/tests/unit/test_req.py +++ b/tests/unit/test_req.py @@ -23,7 +23,6 @@ PreviousBuildDirError, ) from pip._internal.index.package_finder import PackageFinder -from pip._internal.metadata import select_backend from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo from pip._internal.models.link import Link from pip._internal.network.session import PipSession @@ -600,22 +599,6 @@ def test_url_preserved_editable_req(self) -> None: assert req.link is not None assert req.link.url == url - @pytest.mark.parametrize( - "path", - ( - "/path/to/foo.egg-info".replace("/", os.path.sep), - # Tests issue fixed by https://github.com/pypa/pip/pull/2530 - "/path/to/foo.egg-info/".replace("/", os.path.sep), - ), - ) - def test_get_dist(self, path: str) -> None: - req = install_req_from_line("foo") - req.metadata_directory = path - dist = req.get_dist() - assert isinstance(dist, select_backend().Distribution) - assert dist.raw_name == dist.canonical_name == "foo" - assert dist.location == "/path/to".replace("/", os.path.sep) - def test_markers(self) -> None: for line in ( # recommended syntax From 20b54de4dfb3567e088ca058819de43df90364d9 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 1 Aug 2023 13:34:50 -0400 Subject: [PATCH 650/730] add news --- news/12183.trivial.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12183.trivial.rst diff --git a/news/12183.trivial.rst b/news/12183.trivial.rst new file mode 100644 index 00000000000..c22e854c9a5 --- /dev/null +++ b/news/12183.trivial.rst @@ -0,0 +1 @@ +Add test cases for some behaviors of ``install --dry-run`` and ``--use-feature=fast-deps``. From e27af2c3c9a336d6379b24f92cd98cb14c1bd090 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Sat, 12 Aug 2023 10:46:22 -0400 Subject: [PATCH 651/730] add notes on hash mismatch testing --- tests/functional/test_fast_deps.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_fast_deps.py b/tests/functional/test_fast_deps.py index b76b833b938..9e529c0891e 100644 --- a/tests/functional/test_fast_deps.py +++ b/tests/functional/test_fast_deps.py @@ -106,9 +106,12 @@ def test_hash_mismatch(script: PipTestEnvironment, tmp_path: pathlib.Path) -> No @mark.network -def test_hash_mismatch_existing_download( +def test_hash_mismatch_existing_download_for_metadata_only_wheel( script: PipTestEnvironment, tmp_path: pathlib.Path ) -> None: + """Metadata-only wheels from PEP 658 or fast-deps check for hash matching in + a separate code path than when the wheel is downloaded all at once. Make sure we + still check for hash mismatches.""" reqs = tmp_path / "requirements.txt" reqs.write_text("idna==2.10") dl_dir = tmp_path / "downloads" @@ -117,6 +120,7 @@ def test_hash_mismatch_existing_download( idna_wheel.write_text("asdf") result = script.pip( "download", + # Ensure that we have a metadata-only dist for idna. "--use-feature=fast-deps", "-r", str(reqs), @@ -127,6 +131,7 @@ def test_hash_mismatch_existing_download( assert re.search( r"WARNING: Previously-downloaded file.*has bad hash", result.stderr ) + # This is the correct hash for idna==2.10. assert ( hash_file(str(idna_wheel))[0].hexdigest() == "b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" From 8704c7a5dbd62367b01f39317b6213578683f4f6 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Sat, 12 Aug 2023 10:51:33 -0400 Subject: [PATCH 652/730] remove unnecessary fixture --- tests/functional/test_install.py | 54 ++++++++++++++++---------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 56efe2a5cfc..5e8a82fb345 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -7,7 +7,7 @@ import textwrap from os.path import curdir, join, pardir from pathlib import Path -from typing import Callable, Dict, Iterable, List, Optional, Tuple +from typing import Dict, Iterable, List, Optional, Tuple import pytest @@ -2372,32 +2372,30 @@ def test_install_logs_pip_version_in_debug( assert_re_match(pattern, result.stdout) -@pytest.fixture def install_find_links( script: PipTestEnvironment, data: TestData, -) -> Callable[[Iterable[str], bool, Optional[Path]], TestPipResult]: - def install( - args: Iterable[str], dry_run: bool, target_dir: Optional[Path] - ) -> TestPipResult: - return script.pip( - "install", - *( - ( - "--target", - str(target_dir), - ) - if target_dir is not None - else () - ), - *(("--dry-run",) if dry_run else ()), - "--no-index", - "--find-links", - data.find_links, - *args, - ) - - return install + args: Iterable[str], + *, + dry_run: bool, + target_dir: Optional[Path], +) -> TestPipResult: + return script.pip( + "install", + *( + ( + "--target", + str(target_dir), + ) + if target_dir is not None + else () + ), + *(("--dry-run",) if dry_run else ()), + "--no-index", + "--find-links", + data.find_links, + *args, + ) @pytest.mark.parametrize( @@ -2406,8 +2404,8 @@ def install( ) def test_install_dry_run_nothing_installed( script: PipTestEnvironment, + data: TestData, tmpdir: Path, - install_find_links: Callable[[Iterable[str], bool, Optional[Path]], TestPipResult], with_target_dir: bool, ) -> None: """Test that pip install --dry-run logs what it would install, but doesn't actually @@ -2418,7 +2416,9 @@ def test_install_dry_run_nothing_installed( else: install_dir = None - result = install_find_links(["simple"], True, install_dir) + result = install_find_links( + script, data, ["simple"], dry_run=True, target_dir=install_dir + ) assert "Would install simple-3.0" in result.stdout assert "Successfully installed" not in result.stdout @@ -2428,7 +2428,7 @@ def test_install_dry_run_nothing_installed( # Ensure that the same install command would normally have worked if not for # --dry-run. - install_find_links(["simple"], False, install_dir) + install_find_links(script, data, ["simple"], dry_run=False, target_dir=install_dir) if with_target_dir: assert os.listdir(install_dir) else: From 2e365bdab1a37c296eae264991c2e88d57f73a67 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 1 Aug 2023 22:11:07 -0400 Subject: [PATCH 653/730] move test_download_metadata mock pypi index utilities to conftest.py --- tests/conftest.py | 242 ++++++++++++++++++++++++ tests/functional/test_download.py | 303 ++++-------------------------- 2 files changed, 282 insertions(+), 263 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index a183cadf2e9..f481e06c8d6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,22 +1,32 @@ import compileall import fnmatch +import http.server import io import os import re import shutil import subprocess import sys +import threading from contextlib import ExitStack, contextmanager +from dataclasses import dataclass +from enum import Enum +from hashlib import sha256 from pathlib import Path +from textwrap import dedent from typing import ( TYPE_CHECKING, + Any, AnyStr, Callable, + ClassVar, Dict, Iterable, Iterator, List, Optional, + Set, + Tuple, Union, ) from unittest.mock import patch @@ -750,3 +760,235 @@ def proxy(request: pytest.FixtureRequest) -> str: @pytest.fixture def enable_user_site(virtualenv: VirtualEnvironment) -> None: virtualenv.user_site_packages = True + + +class MetadataKind(Enum): + """All the types of values we might be provided for the data-dist-info-metadata + attribute from PEP 658.""" + + # Valid: will read metadata from the dist instead. + No = "none" + # Valid: will read the .metadata file, but won't check its hash. + Unhashed = "unhashed" + # Valid: will read the .metadata file and check its hash matches. + Sha256 = "sha256" + # Invalid: will error out after checking the hash. + WrongHash = "wrong-hash" + # Invalid: will error out after failing to fetch the .metadata file. + NoFile = "no-file" + + +@dataclass(frozen=True) +class FakePackage: + """Mock package structure used to generate a PyPI repository. + + FakePackage name and version should correspond to sdists (.tar.gz files) in our test + data.""" + + name: str + version: str + filename: str + metadata: MetadataKind + # This will override any dependencies specified in the actual dist's METADATA. + requires_dist: Tuple[str, ...] = () + # This will override the Name specified in the actual dist's METADATA. + metadata_name: Optional[str] = None + + def metadata_filename(self) -> str: + """This is specified by PEP 658.""" + return f"{self.filename}.metadata" + + def generate_additional_tag(self) -> str: + """This gets injected into the tag in the generated PyPI index page for this + package.""" + if self.metadata == MetadataKind.No: + return "" + if self.metadata in [MetadataKind.Unhashed, MetadataKind.NoFile]: + return 'data-dist-info-metadata="true"' + if self.metadata == MetadataKind.WrongHash: + return 'data-dist-info-metadata="sha256=WRONG-HASH"' + assert self.metadata == MetadataKind.Sha256 + checksum = sha256(self.generate_metadata()).hexdigest() + return f'data-dist-info-metadata="sha256={checksum}"' + + def requires_str(self) -> str: + if not self.requires_dist: + return "" + joined = " and ".join(self.requires_dist) + return f"Requires-Dist: {joined}" + + def generate_metadata(self) -> bytes: + """This is written to `self.metadata_filename()` and will override the actual + dist's METADATA, unless `self.metadata == MetadataKind.NoFile`.""" + return dedent( + f"""\ + Metadata-Version: 2.1 + Name: {self.metadata_name or self.name} + Version: {self.version} + {self.requires_str()} + """ + ).encode("utf-8") + + +@pytest.fixture(scope="session") +def fake_packages() -> Dict[str, List[FakePackage]]: + """The package database we generate for testing PEP 658 support.""" + return { + "simple": [ + FakePackage("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256), + FakePackage("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No), + # This will raise a hashing error. + FakePackage("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash), + ], + "simple2": [ + # Override the dependencies here in order to force pip to download + # simple-1.0.tar.gz as well. + FakePackage( + "simple2", + "1.0", + "simple2-1.0.tar.gz", + MetadataKind.Unhashed, + ("simple==1.0",), + ), + # This will raise an error when pip attempts to fetch the metadata file. + FakePackage("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile), + # This has a METADATA file with a mismatched name. + FakePackage( + "simple2", + "3.0", + "simple2-3.0.tar.gz", + MetadataKind.Sha256, + metadata_name="not-simple2", + ), + ], + "colander": [ + # Ensure we can read the dependencies from a metadata file within a wheel + # *without* PEP 658 metadata. + FakePackage( + "colander", + "0.9.9", + "colander-0.9.9-py2.py3-none-any.whl", + MetadataKind.No, + ), + ], + "compilewheel": [ + # Ensure we can override the dependencies of a wheel file by injecting PEP + # 658 metadata. + FakePackage( + "compilewheel", + "1.0", + "compilewheel-1.0-py2.py3-none-any.whl", + MetadataKind.Unhashed, + ("simple==1.0",), + ), + ], + "has-script": [ + # Ensure we check PEP 658 metadata hashing errors for wheel files. + FakePackage( + "has-script", + "1.0", + "has.script-1.0-py2.py3-none-any.whl", + MetadataKind.WrongHash, + ), + ], + "translationstring": [ + FakePackage( + "translationstring", + "1.1", + "translationstring-1.1.tar.gz", + MetadataKind.No, + ), + ], + "priority": [ + # Ensure we check for a missing metadata file for wheels. + FakePackage( + "priority", + "1.0", + "priority-1.0-py2.py3-none-any.whl", + MetadataKind.NoFile, + ), + ], + "requires-simple-extra": [ + # Metadata name is not canonicalized. + FakePackage( + "requires-simple-extra", + "0.1", + "requires_simple_extra-0.1-py2.py3-none-any.whl", + MetadataKind.Sha256, + metadata_name="Requires_Simple.Extra", + ), + ], + } + + +@pytest.fixture(scope="session") +def html_index_for_packages( + shared_data: TestData, + fake_packages: Dict[str, List[FakePackage]], + tmpdir_factory: pytest.TempPathFactory, +) -> Path: + """Generate a PyPI HTML package index within a local directory pointing to + synthetic test data.""" + html_dir = tmpdir_factory.mktemp("fake_index_html_content") + + # (1) Generate the content for a PyPI index.html. + pkg_links = "\n".join( + f' {pkg}' for pkg in fake_packages.keys() + ) + index_html = f"""\ + + + + + Simple index + + +{pkg_links} + +""" + # (2) Generate the index.html in a new subdirectory of the temp directory. + (html_dir / "index.html").write_text(index_html) + + # (3) Generate subdirectories for individual packages, each with their own + # index.html. + for pkg, links in fake_packages.items(): + pkg_subdir = html_dir / pkg + pkg_subdir.mkdir() + + download_links: List[str] = [] + for package_link in links: + # (3.1) Generate the tag which pip can crawl pointing to this + # specific package version. + download_links.append( + f' {package_link.filename}
' # noqa: E501 + ) + # (3.2) Copy over the corresponding file in `shared_data.packages`. + shutil.copy( + shared_data.packages / package_link.filename, + pkg_subdir / package_link.filename, + ) + # (3.3) Write a metadata file, if applicable. + if package_link.metadata != MetadataKind.NoFile: + with open(pkg_subdir / package_link.metadata_filename(), "wb") as f: + f.write(package_link.generate_metadata()) + + # (3.4) After collating all the download links and copying over the files, + # write an index.html with the generated download links for each + # copied file for this specific package name. + download_links_str = "\n".join(download_links) + pkg_index_content = f"""\ + + + + + Links for {pkg} + + +

Links for {pkg}

+{download_links_str} + +""" + with open(pkg_subdir / "index.html", "w") as f: + f.write(pkg_index_content) + + return html_dir diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index 8da185c066e..bedadc7045b 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -1,14 +1,11 @@ +import http.server import os import re import shutil import textwrap -import uuid -from dataclasses import dataclass -from enum import Enum from hashlib import sha256 from pathlib import Path -from textwrap import dedent -from typing import Callable, Dict, List, Optional, Tuple +from typing import Callable, List, Tuple import pytest @@ -1237,181 +1234,47 @@ def test_download_use_pep517_propagation( assert len(downloads) == 2 -class MetadataKind(Enum): - """All the types of values we might be provided for the data-dist-info-metadata - attribute from PEP 658.""" - - # Valid: will read metadata from the dist instead. - No = "none" - # Valid: will read the .metadata file, but won't check its hash. - Unhashed = "unhashed" - # Valid: will read the .metadata file and check its hash matches. - Sha256 = "sha256" - # Invalid: will error out after checking the hash. - WrongHash = "wrong-hash" - # Invalid: will error out after failing to fetch the .metadata file. - NoFile = "no-file" - - -@dataclass(frozen=True) -class Package: - """Mock package structure used to generate a PyPI repository. - - Package name and version should correspond to sdists (.tar.gz files) in our test - data.""" - - name: str - version: str - filename: str - metadata: MetadataKind - # This will override any dependencies specified in the actual dist's METADATA. - requires_dist: Tuple[str, ...] = () - # This will override the Name specified in the actual dist's METADATA. - metadata_name: Optional[str] = None - - def metadata_filename(self) -> str: - """This is specified by PEP 658.""" - return f"{self.filename}.metadata" - - def generate_additional_tag(self) -> str: - """This gets injected into the tag in the generated PyPI index page for this - package.""" - if self.metadata == MetadataKind.No: - return "" - if self.metadata in [MetadataKind.Unhashed, MetadataKind.NoFile]: - return 'data-dist-info-metadata="true"' - if self.metadata == MetadataKind.WrongHash: - return 'data-dist-info-metadata="sha256=WRONG-HASH"' - assert self.metadata == MetadataKind.Sha256 - checksum = sha256(self.generate_metadata()).hexdigest() - return f'data-dist-info-metadata="sha256={checksum}"' - - def requires_str(self) -> str: - if not self.requires_dist: - return "" - joined = " and ".join(self.requires_dist) - return f"Requires-Dist: {joined}" - - def generate_metadata(self) -> bytes: - """This is written to `self.metadata_filename()` and will override the actual - dist's METADATA, unless `self.metadata == MetadataKind.NoFile`.""" - return dedent( - f"""\ - Metadata-Version: 2.1 - Name: {self.metadata_name or self.name} - Version: {self.version} - {self.requires_str()} - """ - ).encode("utf-8") - - -@pytest.fixture(scope="function") -def write_index_html_content(tmpdir: Path) -> Callable[[str], Path]: - """Generate a PyPI package index.html within a temporary local directory.""" - html_dir = tmpdir / "index_html_content" - html_dir.mkdir() - - def generate_index_html_subdir(index_html: str) -> Path: - """Create a new subdirectory after a UUID and write an index.html.""" - new_subdir = html_dir / uuid.uuid4().hex - new_subdir.mkdir() - - with open(new_subdir / "index.html", "w") as f: - f.write(index_html) - - return new_subdir - - return generate_index_html_subdir - - @pytest.fixture(scope="function") -def html_index_for_packages( - shared_data: TestData, - write_index_html_content: Callable[[str], Path], -) -> Callable[..., Path]: - """Generate a PyPI HTML package index within a local directory pointing to - blank data.""" +def download_local_html_index( + script: PipTestEnvironment, + html_index_for_packages: Path, + tmpdir: Path, +) -> Callable[..., Tuple[TestPipResult, Path]]: + """Execute `pip download` against a generated PyPI index.""" + download_dir = tmpdir / "download_dir" - def generate_html_index_for_packages(packages: Dict[str, List[Package]]) -> Path: + def run_for_generated_index( + args: List[str], + allow_error: bool = False, + ) -> Tuple[TestPipResult, Path]: """ - Produce a PyPI directory structure pointing to the specified packages. + Produce a PyPI directory structure pointing to the specified packages, then + execute `pip download -i ...` pointing to our generated index. """ - # (1) Generate the content for a PyPI index.html. - pkg_links = "\n".join( - f' {pkg}' for pkg in packages.keys() - ) - index_html = f"""\ - - - - - Simple index - - -{pkg_links} - -""" - # (2) Generate the index.html in a new subdirectory of the temp directory. - index_html_subdir = write_index_html_content(index_html) - - # (3) Generate subdirectories for individual packages, each with their own - # index.html. - for pkg, links in packages.items(): - pkg_subdir = index_html_subdir / pkg - pkg_subdir.mkdir() - - download_links: List[str] = [] - for package_link in links: - # (3.1) Generate the tag which pip can crawl pointing to this - # specific package version. - download_links.append( - f' {package_link.filename}
' # noqa: E501 - ) - # (3.2) Copy over the corresponding file in `shared_data.packages`. - shutil.copy( - shared_data.packages / package_link.filename, - pkg_subdir / package_link.filename, - ) - # (3.3) Write a metadata file, if applicable. - if package_link.metadata != MetadataKind.NoFile: - with open(pkg_subdir / package_link.metadata_filename(), "wb") as f: - f.write(package_link.generate_metadata()) - - # (3.4) After collating all the download links and copying over the files, - # write an index.html with the generated download links for each - # copied file for this specific package name. - download_links_str = "\n".join(download_links) - pkg_index_content = f"""\ - - - - - Links for {pkg} - - -

Links for {pkg}

-{download_links_str} - -""" - with open(pkg_subdir / "index.html", "w") as f: - f.write(pkg_index_content) - - return index_html_subdir - - return generate_html_index_for_packages + pip_args = [ + "download", + "-d", + str(download_dir), + "-i", + path_to_url(str(html_index_for_packages)), + *args, + ] + result = script.pip(*pip_args, allow_error=allow_error) + return (result, download_dir) + + return run_for_generated_index @pytest.fixture(scope="function") -def download_generated_html_index( +def download_server_html_index( script: PipTestEnvironment, - html_index_for_packages: Callable[[Dict[str, List[Package]]], Path], tmpdir: Path, + html_index_with_onetime_server: http.server.ThreadingHTTPServer, ) -> Callable[..., Tuple[TestPipResult, Path]]: """Execute `pip download` against a generated PyPI index.""" download_dir = tmpdir / "download_dir" def run_for_generated_index( - packages: Dict[str, List[Package]], args: List[str], allow_error: bool = False, ) -> Tuple[TestPipResult, Path]: @@ -1419,13 +1282,12 @@ def run_for_generated_index( Produce a PyPI directory structure pointing to the specified packages, then execute `pip download -i ...` pointing to our generated index. """ - index_dir = html_index_for_packages(packages) pip_args = [ "download", "-d", str(download_dir), "-i", - path_to_url(str(index_dir)), + "http://localhost:8000", *args, ] result = script.pip(*pip_args, allow_error=allow_error) @@ -1434,86 +1296,6 @@ def run_for_generated_index( return run_for_generated_index -# The package database we generate for testing PEP 658 support. -_simple_packages: Dict[str, List[Package]] = { - "simple": [ - Package("simple", "1.0", "simple-1.0.tar.gz", MetadataKind.Sha256), - Package("simple", "2.0", "simple-2.0.tar.gz", MetadataKind.No), - # This will raise a hashing error. - Package("simple", "3.0", "simple-3.0.tar.gz", MetadataKind.WrongHash), - ], - "simple2": [ - # Override the dependencies here in order to force pip to download - # simple-1.0.tar.gz as well. - Package( - "simple2", - "1.0", - "simple2-1.0.tar.gz", - MetadataKind.Unhashed, - ("simple==1.0",), - ), - # This will raise an error when pip attempts to fetch the metadata file. - Package("simple2", "2.0", "simple2-2.0.tar.gz", MetadataKind.NoFile), - # This has a METADATA file with a mismatched name. - Package( - "simple2", - "3.0", - "simple2-3.0.tar.gz", - MetadataKind.Sha256, - metadata_name="not-simple2", - ), - ], - "colander": [ - # Ensure we can read the dependencies from a metadata file within a wheel - # *without* PEP 658 metadata. - Package( - "colander", "0.9.9", "colander-0.9.9-py2.py3-none-any.whl", MetadataKind.No - ), - ], - "compilewheel": [ - # Ensure we can override the dependencies of a wheel file by injecting PEP - # 658 metadata. - Package( - "compilewheel", - "1.0", - "compilewheel-1.0-py2.py3-none-any.whl", - MetadataKind.Unhashed, - ("simple==1.0",), - ), - ], - "has-script": [ - # Ensure we check PEP 658 metadata hashing errors for wheel files. - Package( - "has-script", - "1.0", - "has.script-1.0-py2.py3-none-any.whl", - MetadataKind.WrongHash, - ), - ], - "translationstring": [ - Package( - "translationstring", "1.1", "translationstring-1.1.tar.gz", MetadataKind.No - ), - ], - "priority": [ - # Ensure we check for a missing metadata file for wheels. - Package( - "priority", "1.0", "priority-1.0-py2.py3-none-any.whl", MetadataKind.NoFile - ), - ], - "requires-simple-extra": [ - # Metadata name is not canonicalized. - Package( - "requires-simple-extra", - "0.1", - "requires_simple_extra-0.1-py2.py3-none-any.whl", - MetadataKind.Sha256, - metadata_name="Requires_Simple.Extra", - ), - ], -} - - @pytest.mark.parametrize( "requirement_to_download, expected_outputs", [ @@ -1530,14 +1312,13 @@ def run_for_generated_index( ], ) def test_download_metadata( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], requirement_to_download: str, expected_outputs: List[str], ) -> None: """Verify that if a data-dist-info-metadata attribute is present, then it is used instead of the actual dist's METADATA.""" - _, download_dir = download_generated_html_index( - _simple_packages, + _, download_dir = download_local_html_index( [requirement_to_download], ) assert sorted(os.listdir(download_dir)) == expected_outputs @@ -1557,14 +1338,13 @@ def test_download_metadata( ], ) def test_incorrect_metadata_hash( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], requirement_to_download: str, real_hash: str, ) -> None: """Verify that if a hash for data-dist-info-metadata is provided, it must match the actual hash of the metadata file.""" - result, _ = download_generated_html_index( - _simple_packages, + result, _ = download_local_html_index( [requirement_to_download], allow_error=True, ) @@ -1583,15 +1363,14 @@ def test_incorrect_metadata_hash( ], ) def test_metadata_not_found( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], requirement_to_download: str, expected_url: str, ) -> None: """Verify that if a data-dist-info-metadata attribute is provided, that pip will fetch the .metadata file at the location specified by PEP 658, and error if unavailable.""" - result, _ = download_generated_html_index( - _simple_packages, + result, _ = download_local_html_index( [requirement_to_download], allow_error=True, ) @@ -1604,11 +1383,10 @@ def test_metadata_not_found( def test_produces_error_for_mismatched_package_name_in_metadata( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], ) -> None: """Verify that the package name from the metadata matches the requested package.""" - result, _ = download_generated_html_index( - _simple_packages, + result, _ = download_local_html_index( ["simple2==3.0"], allow_error=True, ) @@ -1628,7 +1406,7 @@ def test_produces_error_for_mismatched_package_name_in_metadata( ), ) def test_canonicalizes_package_name_before_verifying_metadata( - download_generated_html_index: Callable[..., Tuple[TestPipResult, Path]], + download_local_html_index: Callable[..., Tuple[TestPipResult, Path]], requirement: str, ) -> None: """Verify that the package name from the command line and the package's @@ -1636,8 +1414,7 @@ def test_canonicalizes_package_name_before_verifying_metadata( Regression test for https://github.com/pypa/pip/issues/12038 """ - result, download_dir = download_generated_html_index( - _simple_packages, + result, download_dir = download_local_html_index( [requirement], allow_error=True, ) From 50a2fb4f9fca0427c192608c30f3cf536b4e4ed4 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Wed, 2 Aug 2023 00:02:04 -0400 Subject: [PATCH 654/730] add mock server to test that each dist is downloaded exactly once --- tests/conftest.py | 48 +++++++++++++++++++++++++++++ tests/functional/test_download.py | 51 +++++++++++++++++++++++++++++++ 2 files changed, 99 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index f481e06c8d6..25581af9a45 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -992,3 +992,51 @@ def html_index_for_packages( f.write(pkg_index_content) return html_dir + + +class OneTimeDownloadHandler(http.server.SimpleHTTPRequestHandler): + """Serve files from the current directory, but error if a file is downloaded more + than once.""" + + _seen_paths: ClassVar[Set[str]] = set() + + def do_GET(self) -> None: + if self.path in self._seen_paths: + self.send_error( + http.HTTPStatus.NOT_FOUND, + f"File {self.path} not available more than once!", + ) + return + super().do_GET() + if not (self.path.endswith("/") or self.path.endswith(".metadata")): + self._seen_paths.add(self.path) + + +@pytest.fixture(scope="function") +def html_index_with_onetime_server( + html_index_for_packages: Path, +) -> Iterator[http.server.ThreadingHTTPServer]: + """Serve files from a generated pypi index, erroring if a file is downloaded more + than once. + + Provide `-i http://localhost:8000` to pip invocations to point them at this server. + """ + + class InDirectoryServer(http.server.ThreadingHTTPServer): + def finish_request(self, request: Any, client_address: Any) -> None: + self.RequestHandlerClass( + request, client_address, self, directory=str(html_index_for_packages) # type: ignore[call-arg] # noqa: E501 + ) + + class Handler(OneTimeDownloadHandler): + _seen_paths: ClassVar[Set[str]] = set() + + with InDirectoryServer(("", 8000), Handler) as httpd: + server_thread = threading.Thread(target=httpd.serve_forever) + server_thread.start() + + try: + yield httpd + finally: + httpd.shutdown() + server_thread.join() diff --git a/tests/functional/test_download.py b/tests/functional/test_download.py index bedadc7045b..c204f424b67 100644 --- a/tests/functional/test_download.py +++ b/tests/functional/test_download.py @@ -1324,6 +1324,57 @@ def test_download_metadata( assert sorted(os.listdir(download_dir)) == expected_outputs +@pytest.mark.parametrize( + "requirement_to_download, expected_outputs, doubled_path", + [ + ( + "simple2==1.0", + ["simple-1.0.tar.gz", "simple2-1.0.tar.gz"], + "/simple2/simple2-1.0.tar.gz", + ), + ("simple==2.0", ["simple-2.0.tar.gz"], "/simple/simple-2.0.tar.gz"), + ( + "colander", + ["colander-0.9.9-py2.py3-none-any.whl", "translationstring-1.1.tar.gz"], + "/colander/colander-0.9.9-py2.py3-none-any.whl", + ), + ( + "compilewheel", + [ + "compilewheel-1.0-py2.py3-none-any.whl", + "simple-1.0.tar.gz", + ], + "/compilewheel/compilewheel-1.0-py2.py3-none-any.whl", + ), + ], +) +def test_download_metadata_server( + download_server_html_index: Callable[..., Tuple[TestPipResult, Path]], + requirement_to_download: str, + expected_outputs: List[str], + doubled_path: str, +) -> None: + """Verify that if a data-dist-info-metadata attribute is present, then it is used + instead of the actual dist's METADATA. + + Additionally, verify that each dist is downloaded exactly once using a mock server. + + This is a regression test for issue https://github.com/pypa/pip/issues/11847. + """ + _, download_dir = download_server_html_index( + [requirement_to_download, "--no-cache-dir"], + ) + assert sorted(os.listdir(download_dir)) == expected_outputs + shutil.rmtree(download_dir) + result, _ = download_server_html_index( + [requirement_to_download, "--no-cache-dir"], + allow_error=True, + ) + assert result.returncode != 0 + expected_msg = f"File {doubled_path} not available more than once!" + assert expected_msg in result.stderr + + @pytest.mark.parametrize( "requirement_to_download, real_hash", [ From 22637722aa7c9da0aaf58be82b6bef16d6efd097 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 1 Aug 2023 20:45:26 -0400 Subject: [PATCH 655/730] fix #11847 for sdists --- src/pip/_internal/operations/prepare.py | 50 ++++++++++++++++--------- 1 file changed, 33 insertions(+), 17 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 8402be01bbf..81bf48fbb75 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -7,7 +7,7 @@ import mimetypes import os import shutil -from typing import Dict, Iterable, List, Optional +from typing import Dict, Iterable, List, Optional, Set from pip._vendor.packaging.utils import canonicalize_name @@ -474,6 +474,8 @@ def _complete_partial_requirements( assert req.link links_to_fully_download[req.link] = req + reqs_with_newly_unpacked_source_dirs: Set[Link] = set() + batch_download = self._batch_download( links_to_fully_download.keys(), temp_dir, @@ -481,25 +483,35 @@ def _complete_partial_requirements( for link, (filepath, _) in batch_download: logger.debug("Downloading link %s to %s", link, filepath) req = links_to_fully_download[link] + # Record the downloaded file path so wheel reqs can extract a Distribution + # in .get_dist(). req.local_file_path = filepath - # TODO: This needs fixing for sdists - # This is an emergency fix for #11847, which reports that - # distributions get downloaded twice when metadata is loaded - # from a PEP 658 standalone metadata file. Setting _downloaded - # fixes this for wheels, but breaks the sdist case (tests - # test_download_metadata). As PyPI is currently only serving - # metadata for wheels, this is not an immediate issue. - # Fixing the problem properly looks like it will require a - # complete refactoring of the `prepare_linked_requirements_more` - # logic, and I haven't a clue where to start on that, so for now - # I have fixed the issue *just* for wheels. - if req.is_wheel: - self._downloaded[req.link.url] = filepath + # Record that the file is downloaded so we don't do it again in + # _prepare_linked_requirement(). + self._downloaded[req.link.url] = filepath + + # If this is an sdist, we need to unpack it and set the .source_dir + # immediately after downloading, as _prepare_linked_requirement() assumes + # the req is either not downloaded at all, or both downloaded and + # unpacked. The downloading and unpacking is is typically done with + # unpack_url(), but we separate the downloading and unpacking steps here in + # order to use the BatchDownloader. + if not req.is_wheel: + hashes = self._get_linked_req_hashes(req) + assert filepath == _check_download_dir(req.link, temp_dir, hashes) + self._ensure_link_req_src_dir(req, parallel_builds) + unpack_file(filepath, req.source_dir) + reqs_with_newly_unpacked_source_dirs.add(req.link) # This step is necessary to ensure all lazy wheels are processed # successfully by the 'download', 'wheel', and 'install' commands. for req in partially_downloaded_reqs: - self._prepare_linked_requirement(req, parallel_builds) + self._prepare_linked_requirement( + req, + parallel_builds, + source_dir_exists_already=req.link + in reqs_with_newly_unpacked_source_dirs, + ) def prepare_linked_requirement( self, req: InstallRequirement, parallel_builds: bool = False @@ -570,7 +582,10 @@ def prepare_linked_requirements_more( ) def _prepare_linked_requirement( - self, req: InstallRequirement, parallel_builds: bool + self, + req: InstallRequirement, + parallel_builds: bool, + source_dir_exists_already: bool = False, ) -> BaseDistribution: assert req.link link = req.link @@ -602,7 +617,8 @@ def _prepare_linked_requirement( req.link = req.cached_wheel_source_link link = req.link - self._ensure_link_req_src_dir(req, parallel_builds) + if not source_dir_exists_already: + self._ensure_link_req_src_dir(req, parallel_builds) if link.is_existing_dir(): local_file = None From 957ad95c7d2ca70077a61a3adb551824818f929b Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Tue, 1 Aug 2023 21:18:15 -0400 Subject: [PATCH 656/730] add news entry --- news/12191.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12191.bugfix.rst diff --git a/news/12191.bugfix.rst b/news/12191.bugfix.rst new file mode 100644 index 00000000000..1f384835fef --- /dev/null +++ b/news/12191.bugfix.rst @@ -0,0 +1 @@ +Prevent downloading sdists twice when PEP 658 metadata is present. From bfa8a5532d45815d2229ba2e2a920fde6bffc800 Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Thu, 3 Aug 2023 05:50:37 -0400 Subject: [PATCH 657/730] clean up duplicated code --- src/pip/_internal/operations/prepare.py | 54 +++++-------------------- src/pip/_internal/req/req_install.py | 29 ++++++++++++- tests/conftest.py | 52 +++++++++++++----------- 3 files changed, 68 insertions(+), 67 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 81bf48fbb75..1b32d7eec3e 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -7,7 +7,8 @@ import mimetypes import os import shutil -from typing import Dict, Iterable, List, Optional, Set +from pathlib import Path +from typing import Dict, Iterable, List, Optional from pip._vendor.packaging.utils import canonicalize_name @@ -20,7 +21,6 @@ InstallationError, MetadataInconsistent, NetworkConnectionError, - PreviousBuildDirError, VcsHashUnsupported, ) from pip._internal.index.package_finder import PackageFinder @@ -47,7 +47,6 @@ display_path, hash_file, hide_url, - is_installable_dir, ) from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.unpacking import unpack_file @@ -319,21 +318,7 @@ def _ensure_link_req_src_dir( autodelete=True, parallel_builds=parallel_builds, ) - - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - # TODO: this check is now probably dead code - if is_installable_dir(req.source_dir): - raise PreviousBuildDirError( - "pip can't proceed with requirements '{}' due to a" - "pre-existing build directory ({}). This is likely " - "due to a previous installation that failed . pip is " - "being responsible and not assuming it can delete this. " - "Please delete it and try again.".format(req, req.source_dir) - ) + req.ensure_pristine_source_checkout() def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes: # By the time this is called, the requirement's link should have @@ -474,8 +459,6 @@ def _complete_partial_requirements( assert req.link links_to_fully_download[req.link] = req - reqs_with_newly_unpacked_source_dirs: Set[Link] = set() - batch_download = self._batch_download( links_to_fully_download.keys(), temp_dir, @@ -490,28 +473,17 @@ def _complete_partial_requirements( # _prepare_linked_requirement(). self._downloaded[req.link.url] = filepath - # If this is an sdist, we need to unpack it and set the .source_dir - # immediately after downloading, as _prepare_linked_requirement() assumes - # the req is either not downloaded at all, or both downloaded and - # unpacked. The downloading and unpacking is is typically done with - # unpack_url(), but we separate the downloading and unpacking steps here in - # order to use the BatchDownloader. + # If this is an sdist, we need to unpack it after downloading, but the + # .source_dir won't be set up until we are in _prepare_linked_requirement(). + # Add the downloaded archive to the install requirement to unpack after + # preparing the source dir. if not req.is_wheel: - hashes = self._get_linked_req_hashes(req) - assert filepath == _check_download_dir(req.link, temp_dir, hashes) - self._ensure_link_req_src_dir(req, parallel_builds) - unpack_file(filepath, req.source_dir) - reqs_with_newly_unpacked_source_dirs.add(req.link) + req.needs_unpacked_archive(Path(filepath)) # This step is necessary to ensure all lazy wheels are processed # successfully by the 'download', 'wheel', and 'install' commands. for req in partially_downloaded_reqs: - self._prepare_linked_requirement( - req, - parallel_builds, - source_dir_exists_already=req.link - in reqs_with_newly_unpacked_source_dirs, - ) + self._prepare_linked_requirement(req, parallel_builds) def prepare_linked_requirement( self, req: InstallRequirement, parallel_builds: bool = False @@ -582,10 +554,7 @@ def prepare_linked_requirements_more( ) def _prepare_linked_requirement( - self, - req: InstallRequirement, - parallel_builds: bool, - source_dir_exists_already: bool = False, + self, req: InstallRequirement, parallel_builds: bool ) -> BaseDistribution: assert req.link link = req.link @@ -617,8 +586,7 @@ def _prepare_linked_requirement( req.link = req.cached_wheel_source_link link = req.link - if not source_dir_exists_already: - self._ensure_link_req_src_dir(req, parallel_builds) + self._ensure_link_req_src_dir(req, parallel_builds) if link.is_existing_dir(): local_file = None diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 542d6c78f96..614c6de9c3d 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -6,6 +6,7 @@ import uuid import zipfile from optparse import Values +from pathlib import Path from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union from pip._vendor.packaging.markers import Marker @@ -17,7 +18,7 @@ from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment -from pip._internal.exceptions import InstallationError +from pip._internal.exceptions import InstallationError, PreviousBuildDirError from pip._internal.locations import get_scheme from pip._internal.metadata import ( BaseDistribution, @@ -47,11 +48,13 @@ backup_dir, display_path, hide_url, + is_installable_dir, redact_auth_from_url, ) from pip._internal.utils.packaging import safe_extra from pip._internal.utils.subprocess import runner_with_spinner_message from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds +from pip._internal.utils.unpacking import unpack_file from pip._internal.utils.virtualenv import running_under_virtualenv from pip._internal.vcs import vcs @@ -180,6 +183,9 @@ def __init__( # This requirement needs more preparation before it can be built self.needs_more_preparation = False + # This requirement needs to be unpacked before it can be installed. + self._archive_source: Optional[Path] = None + def __str__(self) -> str: if self.req: s = str(self.req) @@ -645,6 +651,27 @@ def ensure_has_source_dir( parallel_builds=parallel_builds, ) + def needs_unpacked_archive(self, archive_source: Path) -> None: + assert self._archive_source is None + self._archive_source = archive_source + + def ensure_pristine_source_checkout(self) -> None: + """Ensure the source directory has not yet been built in.""" + assert self.source_dir is not None + if self._archive_source is not None: + unpack_file(str(self._archive_source), self.source_dir) + elif is_installable_dir(self.source_dir): + # If a checkout exists, it's unwise to keep going. + # version inconsistencies are logged later, but do not fail + # the installation. + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a " + "pre-existing build directory ({}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again.".format(self, self.source_dir) + ) + # For editable installations def update_editable(self) -> None: if not self.link: diff --git a/tests/conftest.py b/tests/conftest.py index 25581af9a45..cd9931c66d9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -935,17 +935,21 @@ def html_index_for_packages( pkg_links = "\n".join( f' {pkg}' for pkg in fake_packages.keys() ) - index_html = f"""\ - - - - - Simple index - - -{pkg_links} - -""" + # Output won't be nicely indented because dedent() acts after f-string + # arg insertion. + index_html = dedent( + f"""\ + + + + + Simple index + + + {pkg_links} + + """ + ) # (2) Generate the index.html in a new subdirectory of the temp directory. (html_dir / "index.html").write_text(index_html) @@ -976,18 +980,20 @@ def html_index_for_packages( # write an index.html with the generated download links for each # copied file for this specific package name. download_links_str = "\n".join(download_links) - pkg_index_content = f"""\ - - - - - Links for {pkg} - - -

Links for {pkg}

-{download_links_str} - -""" + pkg_index_content = dedent( + f"""\ + + + + + Links for {pkg} + + +

Links for {pkg}

+ {download_links_str} + + """ + ) with open(pkg_subdir / "index.html", "w") as f: f.write(pkg_index_content) From 39da6e051a30e90b12608ca90e96e554d82fd15f Mon Sep 17 00:00:00 2001 From: Danny McClanahan <1305167+cosmicexplorer@users.noreply.github.com> Date: Mon, 14 Aug 2023 07:55:55 -0400 Subject: [PATCH 658/730] use f-string in exception message --- src/pip/_internal/req/req_install.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 614c6de9c3d..8110114ca14 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -665,11 +665,11 @@ def ensure_pristine_source_checkout(self) -> None: # version inconsistencies are logged later, but do not fail # the installation. raise PreviousBuildDirError( - "pip can't proceed with requirements '{}' due to a " - "pre-existing build directory ({}). This is likely " + f"pip can't proceed with requirements '{self}' due to a " + f"pre-existing build directory ({self.source_dir}). This is likely " "due to a previous installation that failed . pip is " "being responsible and not assuming it can delete this. " - "Please delete it and try again.".format(self, self.source_dir) + "Please delete it and try again." ) # For editable installations From 361b02bce0a7283bacd1f26bca64e3facd64aecf Mon Sep 17 00:00:00 2001 From: ddelange <14880945+ddelange@users.noreply.github.com> Date: Mon, 14 Aug 2023 19:09:50 +0200 Subject: [PATCH 659/730] Add is_yanked to installation report --- news/12224.feature.rst | 1 + .../_internal/models/installation_report.py | 3 ++ tests/functional/test_install_report.py | 53 +++++++++++++++++++ 3 files changed, 57 insertions(+) create mode 100644 news/12224.feature.rst diff --git a/news/12224.feature.rst b/news/12224.feature.rst new file mode 100644 index 00000000000..5a6977254f5 --- /dev/null +++ b/news/12224.feature.rst @@ -0,0 +1 @@ +Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but still was selected by pip conform PEP 592. diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index 7f001f35ef2..31c20675144 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -23,6 +23,9 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: # includes editable requirements), and false if the requirement was # downloaded from a PEP 503 index or --find-links. "is_direct": ireq.is_direct, + # is_yanked is true if the requirement was yanked from the index, but + # still was selected by pip conform PEP 592 + "is_yanked": ireq.link.is_yanked if ireq.link else False, # requested is true if the requirement was specified by the user (aka # top level requirement), and false if it was installed as a dependency of a # requirement. https://peps.python.org/pep-0376/#requested diff --git a/tests/functional/test_install_report.py b/tests/functional/test_install_report.py index 003b29d3821..a0f8559782a 100644 --- a/tests/functional/test_install_report.py +++ b/tests/functional/test_install_report.py @@ -64,6 +64,59 @@ def test_install_report_dep( assert _install_dict(report)["simple"]["requested"] is False +def test_yanked_version( + script: PipTestEnvironment, data: TestData, tmp_path: Path +) -> None: + """ + Test is_yanked is True when explicitly requesting a yanked package. + Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version (PEP 592). + """ + report_path = tmp_path / "report.json" + script.pip( + "install", + "simple==3.0", + "--index-url", + data.index_url("yanked"), + "--dry-run", + "--report", + str(report_path), + allow_stderr_warning=True, + ) + report = json.loads(report_path.read_text()) + simple_report = _install_dict(report)["simple"] + assert simple_report["requested"] is True + assert simple_report["is_direct"] is False + assert simple_report["is_yanked"] is True + assert simple_report["metadata"]["version"] == "3.0" + + +def test_skipped_yanked_version( + script: PipTestEnvironment, data: TestData, tmp_path: Path +) -> None: + """ + Test is_yanked is False when not explicitly requesting a yanked package. + Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version (PEP 592). + """ + report_path = tmp_path / "report.json" + script.pip( + "install", + "simple", + "--index-url", + data.index_url("yanked"), + "--dry-run", + "--report", + str(report_path), + ) + report = json.loads(report_path.read_text()) + simple_report = _install_dict(report)["simple"] + assert simple_report["requested"] is True + assert simple_report["is_direct"] is False + assert simple_report["is_yanked"] is False + assert simple_report["metadata"]["version"] == "2.0" + + @pytest.mark.network def test_install_report_index(script: PipTestEnvironment, tmp_path: Path) -> None: """Test report for sdist obtained from index.""" From 553690b39ecb405fd3fb1504d82161e71b02da40 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 15 Aug 2023 10:49:49 +0800 Subject: [PATCH 660/730] Period --- src/pip/_internal/models/installation_report.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index 31c20675144..2acc10d1ae8 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -24,7 +24,7 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: # downloaded from a PEP 503 index or --find-links. "is_direct": ireq.is_direct, # is_yanked is true if the requirement was yanked from the index, but - # still was selected by pip conform PEP 592 + # still was selected by pip conform PEP 592. "is_yanked": ireq.link.is_yanked if ireq.link else False, # requested is true if the requirement was specified by the user (aka # top level requirement), and false if it was installed as a dependency of a From 3c5e2aed045d68ab49cf2e47bda2826659bc91cc Mon Sep 17 00:00:00 2001 From: ddelange <14880945+ddelange@users.noreply.github.com> Date: Tue, 15 Aug 2023 12:06:56 +0200 Subject: [PATCH 661/730] PR Suggestion Co-authored-by: Paul Moore --- src/pip/_internal/models/installation_report.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index 2acc10d1ae8..e38e8f1c0a5 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -24,7 +24,7 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: # downloaded from a PEP 503 index or --find-links. "is_direct": ireq.is_direct, # is_yanked is true if the requirement was yanked from the index, but - # still was selected by pip conform PEP 592. + # was still selected by pip to conform to PEP 592. "is_yanked": ireq.link.is_yanked if ireq.link else False, # requested is true if the requirement was specified by the user (aka # top level requirement), and false if it was installed as a dependency of a From b4437789a0ed10d8a6c4d76710d512d42a9999ad Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 23 Aug 2023 11:24:51 +0800 Subject: [PATCH 662/730] Fix rtd config --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index b6453d8f0b3..c0d2bba55e9 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -6,7 +6,7 @@ build: python: "3.11" sphinx: - builder: htmldir + builder: dirhtml configuration: docs/html/conf.py python: From 695b9f5ab575cb6043a61d88309a0be038168d0b Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 23 Aug 2023 11:28:18 +0800 Subject: [PATCH 663/730] Upgrade Sphinx to 7.x --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index ef72c8fb722..debfa632b7a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -sphinx ~= 6.0 +sphinx ~= 7.0 towncrier furo myst_parser From 55205b940d451c517f1e66279a6d5a98dd00d275 Mon Sep 17 00:00:00 2001 From: ddelange <14880945+ddelange@users.noreply.github.com> Date: Fri, 25 Aug 2023 09:43:14 +0200 Subject: [PATCH 664/730] Update installation report docs --- docs/html/reference/installation-report.md | 5 +++++ news/12224.feature.rst | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/html/reference/installation-report.md b/docs/html/reference/installation-report.md index 5823205f977..e0cfcd97e8b 100644 --- a/docs/html/reference/installation-report.md +++ b/docs/html/reference/installation-report.md @@ -56,6 +56,9 @@ package with the following properties: URL reference. `false` if the requirements was provided as a name and version specifier. +- `is_yanked`: `true` if the requirement was yanked from the index, but was still + selected by pip conform to [PEP 592](https://peps.python.org/pep-0592/#installers). + - `download_info`: Information about the artifact (to be) downloaded for installation, using the [direct URL data structure](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/). @@ -106,6 +109,7 @@ will produce an output similar to this (metadata abriged for brevity): } }, "is_direct": false, + "is_yanked": false, "requested": true, "metadata": { "name": "pydantic", @@ -133,6 +137,7 @@ will produce an output similar to this (metadata abriged for brevity): } }, "is_direct": true, + "is_yanked": false, "requested": true, "metadata": { "name": "packaging", diff --git a/news/12224.feature.rst b/news/12224.feature.rst index 5a6977254f5..d874265787a 100644 --- a/news/12224.feature.rst +++ b/news/12224.feature.rst @@ -1 +1 @@ -Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but still was selected by pip conform PEP 592. +Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to PEP 592. From 510c6acf69cc21f62a10d1c149609890e74bf430 Mon Sep 17 00:00:00 2001 From: ddelange <14880945+ddelange@users.noreply.github.com> Date: Sat, 26 Aug 2023 12:17:40 +0200 Subject: [PATCH 665/730] Filter out yanked links from available versions error message --- news/12225.bugfix.rst | 1 + .../resolution/resolvelib/factory.py | 20 +++++++++++++- tests/functional/test_install.py | 27 +++++++++++++++++++ 3 files changed, 47 insertions(+), 1 deletion(-) create mode 100644 news/12225.bugfix.rst diff --git a/news/12225.bugfix.rst b/news/12225.bugfix.rst new file mode 100644 index 00000000000..e1e0c323dc3 --- /dev/null +++ b/news/12225.bugfix.rst @@ -0,0 +1 @@ +Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message. diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index ed78580ab97..2eb80d4d552 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -603,8 +603,26 @@ def _report_single_requirement_conflict( cands = self._finder.find_all_candidates(req.project_name) skipped_by_requires_python = self._finder.requires_python_skipped_reasons() - versions = [str(v) for v in sorted({c.version for c in cands})] + versions_set: Set[CandidateVersion] = set() + yanked_versions_set: Set[CandidateVersion] = set() + for c in cands: + is_yanked = c.link.is_yanked if c.link else False + if is_yanked: + yanked_versions_set.add(c.version) + else: + versions_set.add(c.version) + + versions = [str(v) for v in sorted(versions_set)] + yanked_versions = [str(v) for v in sorted(yanked_versions_set)] + + if yanked_versions: + # Saying "version X is yanked" isn't entirely accurate. + # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842 + logger.critical( + "Ignored the following yanked versions: %s", + ", ".join(yanked_versions) or "none", + ) if skipped_by_requires_python: logger.critical( "Ignored the following versions that require a different python " diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index 5e8a82fb345..161881419d6 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -2242,6 +2242,33 @@ def test_install_yanked_file_and_print_warning( assert "Successfully installed simple-3.0\n" in result.stdout, str(result) +def test_yanked_version_missing_from_availble_versions_error_message( + script: PipTestEnvironment, data: TestData +) -> None: + """ + Test yanked version is missing from available versions error message. + + Yanked files are always ignored, unless they are the only file that + matches a version specifier that "pins" to an exact version (PEP 592). + """ + result = script.pip( + "install", + "simple==", + "--index-url", + data.index_url("yanked"), + expect_error=True, + ) + # the yanked version (3.0) is filtered out from the output: + expected_warning = ( + "Could not find a version that satisfies the requirement simple== " + "(from versions: 1.0, 2.0)" + ) + assert expected_warning in result.stderr, str(result) + # and mentioned in a separate warning: + expected_warning = "Ignored the following yanked versions: 3.0" + assert expected_warning in result.stderr, str(result) + + def test_error_all_yanked_files_and_no_pin( script: PipTestEnvironment, data: TestData ) -> None: From 69a1e956dae1d6ced4bc6e66883b271f1f7a10e9 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Sat, 26 Aug 2023 10:20:40 +0200 Subject: [PATCH 666/730] Ruff rules C4,C90,PERF --- .pre-commit-config.yaml | 2 +- ...FF-ABE1-48C7-954C-7C3EB229135F.feature.rst | 1 + pyproject.toml | 30 +++++++++++++---- src/pip/_internal/cache.py | 6 ++-- src/pip/_internal/cli/autocompletion.py | 5 +-- src/pip/_internal/commands/cache.py | 12 ++----- src/pip/_internal/commands/debug.py | 2 +- src/pip/_internal/commands/list.py | 2 +- src/pip/_internal/locations/_distutils.py | 2 +- .../_internal/models/installation_report.py | 2 +- src/pip/_internal/operations/install/wheel.py | 6 ++-- src/pip/_internal/req/req_uninstall.py | 2 +- tests/functional/test_cache.py | 20 ++++-------- tests/functional/test_help.py | 4 +-- tests/functional/test_list.py | 32 ++++++++----------- tests/lib/__init__.py | 18 +++++------ tests/unit/test_finder.py | 6 ++-- tests/unit/test_logging.py | 14 ++++---- tests/unit/test_req_uninstall.py | 7 ++-- tests/unit/test_self_check_outdated.py | 2 +- tests/unit/test_target_python.py | 16 +++++----- tests/unit/test_vcs.py | 2 +- 22 files changed, 93 insertions(+), 100 deletions(-) create mode 100644 news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.feature.rst diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b0aef0d60b1..1c497c29426 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.0.270 + rev: v0.0.286 hooks: - id: ruff diff --git a/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.feature.rst b/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.feature.rst new file mode 100644 index 00000000000..7f6c1d5612e --- /dev/null +++ b/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.feature.rst @@ -0,0 +1 @@ +Add ruff rules ASYNC,C4,C90,PERF,PLE,PLR for minor optimizations and to set upper limits on code complexity. diff --git a/pyproject.toml b/pyproject.toml index b7c0d154598..c3c21802f7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,9 +74,9 @@ webencodings = "https://github.com/SimonSapin/python-webencodings/raw/master/LIC [tool.ruff] extend-exclude = [ + "_vendor", "./build", ".scratch", - "_vendor", "data", ] ignore = [ @@ -88,21 +88,37 @@ ignore = [ ] line-length = 88 select = [ + "ASYNC", "B", + "C4", + "C90", "E", "F", - "W", "G", - "ISC", "I", + "ISC", + "PERF", + "PLE", + "PLR0", + "W", ] -[tool.ruff.per-file-ignores] -"noxfile.py" = ["G"] -"tests/*" = ["B011"] - [tool.ruff.isort] # We need to explicitly make pip "first party" as it's imported by code in # the docs and tests directories. known-first-party = ["pip"] known-third-party = ["pip._vendor"] + +[tool.ruff.mccabe] +max-complexity = 33 # default is 10 + +[tool.ruff.per-file-ignores] +"noxfile.py" = ["G"] +"src/pip/_internal/*" = ["PERF203"] +"tests/*" = ["B011"] + +[tool.ruff.pylint] +max-args = 15 # default is 5 +max-branches = 28 # default is 12 +max-returns = 13 # default is 6 +max-statements = 134 # default is 50 diff --git a/src/pip/_internal/cache.py b/src/pip/_internal/cache.py index 8d3a664c7d1..f45ac23e95a 100644 --- a/src/pip/_internal/cache.py +++ b/src/pip/_internal/cache.py @@ -78,12 +78,10 @@ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]: if can_not_cache: return [] - candidates = [] path = self.get_path_for_link(link) if os.path.isdir(path): - for candidate in os.listdir(path): - candidates.append((candidate, path)) - return candidates + return [(candidate, path) for candidate in os.listdir(path)] + return [] def get_path_for_link(self, link: Link) -> str: """Return a directory to store cached items in for link.""" diff --git a/src/pip/_internal/cli/autocompletion.py b/src/pip/_internal/cli/autocompletion.py index 226fe84dc0d..e5950b90696 100644 --- a/src/pip/_internal/cli/autocompletion.py +++ b/src/pip/_internal/cli/autocompletion.py @@ -71,8 +71,9 @@ def autocomplete() -> None: for opt in subcommand.parser.option_list_all: if opt.help != optparse.SUPPRESS_HELP: - for opt_str in opt._long_opts + opt._short_opts: - options.append((opt_str, opt.nargs)) + options += [ + (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts + ] # filter out previously specified options from available options prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]] diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index e96d2b4924c..f6430980c36 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -3,10 +3,10 @@ from optparse import Values from typing import Any, List -import pip._internal.utils.filesystem as filesystem from pip._internal.cli.base_command import Command from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.exceptions import CommandError, PipError +from pip._internal.utils import filesystem from pip._internal.utils.logging import getLogger logger = getLogger(__name__) @@ -151,14 +151,8 @@ def format_for_human(self, files: List[str]) -> None: logger.info("\n".join(sorted(results))) def format_for_abspath(self, files: List[str]) -> None: - if not files: - return - - results = [] - for filename in files: - results.append(filename) - - logger.info("\n".join(sorted(results))) + if files: + logger.info("\n".join(sorted(files))) def remove_cache_items(self, options: Values, args: List[Any]) -> None: if len(args) > 1: diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 88a4f798d46..564409c6839 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -135,7 +135,7 @@ def show_tags(options: Values) -> None: def ca_bundle_info(config: Configuration) -> str: levels = set() - for key, _ in config.items(): + for key, _ in config.items(): # noqa: PERF102 Configuration has no keys() method. levels.add(key.split(".")[0]) if not levels: diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index ac10353194f..2ec456b9548 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -297,7 +297,7 @@ def output_package_listing_columns( # Create and add a separator. if len(data) > 0: - pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes))) + pkg_strings.insert(1, " ".join(("-" * x for x in sizes))) for val in pkg_strings: write_output(val) diff --git a/src/pip/_internal/locations/_distutils.py b/src/pip/_internal/locations/_distutils.py index 92bd93179c5..48689f5fbe4 100644 --- a/src/pip/_internal/locations/_distutils.py +++ b/src/pip/_internal/locations/_distutils.py @@ -89,7 +89,7 @@ def distutils_scheme( # finalize_options(); we only want to override here if the user # has explicitly requested it hence going back to the config if "install_lib" in d.get_option_dict("install"): - scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + scheme.update({"purelib": i.install_lib, "platlib": i.install_lib}) if running_under_virtualenv(): if home: diff --git a/src/pip/_internal/models/installation_report.py b/src/pip/_internal/models/installation_report.py index 7f001f35ef2..da0334bd5d4 100644 --- a/src/pip/_internal/models/installation_report.py +++ b/src/pip/_internal/models/installation_report.py @@ -33,7 +33,7 @@ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]: } if ireq.user_supplied and ireq.extras: # For top level requirements, the list of requested extras, if any. - res["requested_extras"] = list(sorted(ireq.extras)) + res["requested_extras"] = sorted(ireq.extras) return res def to_dict(self) -> Dict[str, Any]: diff --git a/src/pip/_internal/operations/install/wheel.py b/src/pip/_internal/operations/install/wheel.py index a8cd1330f0f..58a7730597b 100644 --- a/src/pip/_internal/operations/install/wheel.py +++ b/src/pip/_internal/operations/install/wheel.py @@ -267,9 +267,9 @@ def get_csv_rows_for_installed( path = _fs_to_record_path(f, lib_dir) digest, length = rehash(f) installed_rows.append((path, digest, length)) - for installed_record_path in installed.values(): - installed_rows.append((installed_record_path, "", "")) - return installed_rows + return installed_rows + [ + (installed_record_path, "", "") for installed_record_path in installed.values() + ] def get_console_script_specs(console: Dict[str, str]) -> List[str]: diff --git a/src/pip/_internal/req/req_uninstall.py b/src/pip/_internal/req/req_uninstall.py index ad5178e76ff..861aa4f2286 100644 --- a/src/pip/_internal/req/req_uninstall.py +++ b/src/pip/_internal/req/req_uninstall.py @@ -274,7 +274,7 @@ def stash(self, path: str) -> str: def commit(self) -> None: """Commits the uninstall by removing stashed files.""" - for _, save_dir in self._save_dirs.items(): + for save_dir in self._save_dirs.values(): save_dir.cleanup() self._moves = [] self._save_dirs = {} diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index 788abdd2be5..a6640cbbf71 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -36,10 +36,7 @@ def http_cache_files(http_cache_dir: str) -> List[str]: return [] filenames = glob(os.path.join(destination, "*")) - files = [] - for filename in filenames: - files.append(os.path.join(destination, filename)) - return files + return [os.path.join(destination, filename) for filename in filenames] @pytest.fixture @@ -50,10 +47,7 @@ def wheel_cache_files(wheel_cache_dir: str) -> List[str]: return [] filenames = glob(os.path.join(destination, "*.whl")) - files = [] - for filename in filenames: - files.append(os.path.join(destination, filename)) - return files + return [os.path.join(destination, filename) for filename in filenames] @pytest.fixture @@ -107,7 +101,7 @@ def list_matches_wheel(wheel_name: str, result: TestPipResult) -> bool: `- foo-1.2.3-py3-none-any.whl `.""" lines = result.stdout.splitlines() expected = f" - {wheel_name}-py3-none-any.whl " - return any(map(lambda line: line.startswith(expected), lines)) + return any((line.startswith(expected) for line in lines)) def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool: @@ -120,11 +114,9 @@ def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool: lines = result.stdout.splitlines() expected = f"{wheel_name}-py3-none-any.whl" return any( - map( - lambda line: ( - os.path.basename(line).startswith(expected) and os.path.exists(line) - ), - lines, + ( + (os.path.basename(line).startswith(expected) and os.path.exists(line)) + for line in lines ) ) diff --git a/tests/functional/test_help.py b/tests/functional/test_help.py index dba41af5f79..9627a121531 100644 --- a/tests/functional/test_help.py +++ b/tests/functional/test_help.py @@ -102,8 +102,8 @@ def test_help_commands_equally_functional(in_memory_pip: InMemoryPip) -> None: results = list(map(in_memory_pip.pip, ("help", "--help"))) results.append(in_memory_pip.pip()) - out = map(lambda x: x.stdout, results) - ret = map(lambda x: x.returncode, results) + out = (x.stdout for x in results) + ret = (x.returncode for x in results) msg = '"pip --help" != "pip help" != "pip"' assert len(set(out)) == 1, "output of: " + msg diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index 4f2be8387f2..cf8900a32bd 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -273,25 +273,19 @@ def test_outdated_flag(script: PipTestEnvironment, data: TestData) -> None: "latest_version": "3.0", "latest_filetype": "sdist", } in json_output - assert ( - dict( - name="simplewheel", - version="1.0", - latest_version="2.0", - latest_filetype="wheel", - ) - in json_output - ) - assert ( - dict( - name="pip-test-package", - version="0.1", - latest_version="0.1.1", - latest_filetype="sdist", - editable_project_location="", - ) - in json_output - ) + assert { + "name": "simplewheel", + "version": "1.0", + "latest_version": "2.0", + "latest_filetype": "wheel", + } in json_output + assert { + "name": "pip-test-package", + "version": "0.1", + "latest_version": "0.1.1", + "latest_filetype": "sdist", + "editable_project_location": "", + } in json_output assert "simple2" not in {p["name"] for p in json_output} diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index b827f88ba13..3c8ca98f791 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -740,21 +740,19 @@ def easy_install(self, *args: str, **kwargs: Any) -> TestPipResult: def assert_installed(self, **kwargs: str) -> None: ret = self.pip("list", "--format=json") - installed = set( + installed = { (canonicalize_name(val["name"]), val["version"]) for val in json.loads(ret.stdout) - ) - expected = set((canonicalize_name(k), v) for k, v in kwargs.items()) + } + expected = {(canonicalize_name(k), v) for k, v in kwargs.items()} assert expected <= installed, "{!r} not all in {!r}".format(expected, installed) def assert_not_installed(self, *args: str) -> None: ret = self.pip("list", "--format=json") - installed = set( - canonicalize_name(val["name"]) for val in json.loads(ret.stdout) - ) + installed = {canonicalize_name(val["name"]) for val in json.loads(ret.stdout)} # None of the given names should be listed as installed, i.e. their # intersection should be empty. - expected = set(canonicalize_name(k) for k in args) + expected = {canonicalize_name(k) for k in args} assert not (expected & installed), "{!r} contained in {!r}".format( expected, installed ) @@ -798,16 +796,16 @@ def prefix_match(path: str, prefix_path: StrPath) -> bool: return path.startswith(prefix) start_keys = { - k for k in start.keys() if not any([prefix_match(k, i) for i in ignore]) + k for k in start.keys() if not any(prefix_match(k, i) for i in ignore) } - end_keys = {k for k in end.keys() if not any([prefix_match(k, i) for i in ignore])} + end_keys = {k for k in end.keys() if not any(prefix_match(k, i) for i in ignore)} deleted = {k: start[k] for k in start_keys.difference(end_keys)} created = {k: end[k] for k in end_keys.difference(start_keys)} updated = {} for k in start_keys.intersection(end_keys): if start[k].size != end[k].size: updated[k] = end[k] - return dict(deleted=deleted, created=created, updated=updated) + return {"deleted": deleted, "created": created, "updated": updated} def assert_all_changes( diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py index 3404d1498e3..393e83d5a95 100644 --- a/tests/unit/test_finder.py +++ b/tests/unit/test_finder.py @@ -234,7 +234,7 @@ def test_link_sorting(self) -> None: ) sort_key = evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) - results2 = sorted(reversed(links), key=sort_key, reverse=True) + results2 = sorted(links, key=sort_key, reverse=True) assert links == results, results assert links == results2, results2 @@ -261,7 +261,7 @@ def test_link_sorting_wheels_with_build_tags(self) -> None: candidate_evaluator = CandidateEvaluator.create("my-project") sort_key = candidate_evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) - results2 = sorted(reversed(links), key=sort_key, reverse=True) + results2 = sorted(links, key=sort_key, reverse=True) assert links == results, results assert links == results2, results2 @@ -301,7 +301,7 @@ def test_build_tag_is_less_important_than_other_tags(self) -> None: ) sort_key = evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) - results2 = sorted(reversed(links), key=sort_key, reverse=True) + results2 = sorted(links, key=sort_key, reverse=True) assert links == results, results assert links == results2, results2 diff --git a/tests/unit/test_logging.py b/tests/unit/test_logging.py index 3ba6ed57ca5..9d507d74277 100644 --- a/tests/unit/test_logging.py +++ b/tests/unit/test_logging.py @@ -21,13 +21,13 @@ class TestIndentingFormatter: def make_record(self, msg: str, level_name: str) -> logging.LogRecord: level_number = getattr(logging, level_name) - attrs = dict( - msg=msg, - created=1547704837.040001 + time.timezone, - msecs=40, - levelname=level_name, - levelno=level_number, - ) + attrs = { + "msg": msg, + "created": 1547704837.040001 + time.timezone, + "msecs": 40, + "levelname": level_name, + "levelno": level_number, + } record = logging.makeLogRecord(attrs) return record diff --git a/tests/unit/test_req_uninstall.py b/tests/unit/test_req_uninstall.py index b4ae97350e0..6a846e20272 100644 --- a/tests/unit/test_req_uninstall.py +++ b/tests/unit/test_req_uninstall.py @@ -59,10 +59,9 @@ def iter_declared_entries(self) -> Optional[Iterator[str]]: def test_compressed_listing(tmpdir: Path) -> None: def in_tmpdir(paths: List[str]) -> List[str]: - li = [] - for path in paths: - li.append(str(os.path.join(tmpdir, path.replace("/", os.path.sep)))) - return li + return [ + str(os.path.join(tmpdir, path.replace("/", os.path.sep))) for path in paths + ] sample = in_tmpdir( [ diff --git a/tests/unit/test_self_check_outdated.py b/tests/unit/test_self_check_outdated.py index c025ff30275..6b2333f188c 100644 --- a/tests/unit/test_self_check_outdated.py +++ b/tests/unit/test_self_check_outdated.py @@ -40,7 +40,7 @@ def test_pip_self_version_check_calls_underlying_implementation( ) -> None: # GIVEN mock_session = Mock() - fake_options = Values(dict(cache_dir=str(tmpdir))) + fake_options = Values({"cache_dir": str(tmpdir)}) # WHEN self_outdated_check.pip_self_version_check(mock_session, fake_options) diff --git a/tests/unit/test_target_python.py b/tests/unit/test_target_python.py index b659c61fe08..bc171376941 100644 --- a/tests/unit/test_target_python.py +++ b/tests/unit/test_target_python.py @@ -54,18 +54,18 @@ def test_init__py_version_info_none(self) -> None: "kwargs, expected", [ ({}, ""), - (dict(py_version_info=(3, 6)), "version_info='3.6'"), + ({"py_version_info": (3, 6)}, "version_info='3.6'"), ( - dict(platforms=["darwin"], py_version_info=(3, 6)), + {"platforms": ["darwin"], "py_version_info": (3, 6)}, "platforms=['darwin'] version_info='3.6'", ), ( - dict( - platforms=["darwin"], - py_version_info=(3, 6), - abis=["cp36m"], - implementation="cp", - ), + { + "platforms": ["darwin"], + "py_version_info": (3, 6), + "abis": ["cp36m"], + "implementation": "cp", + }, ( "platforms=['darwin'] version_info='3.6' abis=['cp36m'] " "implementation='cp'" diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index 38daaa0f21d..3ecc69abfcb 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -73,7 +73,7 @@ def test_rev_options_repr() -> None: Git, ["HEAD", "opt1", "opt2"], ["123", "opt1", "opt2"], - dict(extra_args=["opt1", "opt2"]), + {"extra_args": ["opt1", "opt2"]}, ), ], ) From 0a24a001fbe451aa399063555634f8d971776a21 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Mon, 28 Aug 2023 15:04:54 +0200 Subject: [PATCH 667/730] Fix issues raised in code review --- ...rst => 4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst} | 0 pyproject.toml | 1 + src/pip/_internal/commands/debug.py | 5 +---- src/pip/_internal/commands/list.py | 2 +- tests/functional/test_cache.py | 2 +- tests/lib/__init__.py | 6 ++---- tests/unit/test_finder.py | 6 +++--- 7 files changed, 9 insertions(+), 13 deletions(-) rename news/{4A0C40FF-ABE1-48C7-954C-7C3EB229135F.feature.rst => 4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst} (100%) diff --git a/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.feature.rst b/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst similarity index 100% rename from news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.feature.rst rename to news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst diff --git a/pyproject.toml b/pyproject.toml index c3c21802f7f..7a4fe62463f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -116,6 +116,7 @@ max-complexity = 33 # default is 10 "noxfile.py" = ["G"] "src/pip/_internal/*" = ["PERF203"] "tests/*" = ["B011"] +"tests/unit/test_finder.py" = ["C414"] [tool.ruff.pylint] max-args = 15 # default is 5 diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 564409c6839..3d6416023f2 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -134,10 +134,7 @@ def show_tags(options: Values) -> None: def ca_bundle_info(config: Configuration) -> str: - levels = set() - for key, _ in config.items(): # noqa: PERF102 Configuration has no keys() method. - levels.add(key.split(".")[0]) - + levels = {key.split(".")[0] for key, _ in config.items()} # noqa: PERF102 if not levels: return "Not specified" diff --git a/src/pip/_internal/commands/list.py b/src/pip/_internal/commands/list.py index 2ec456b9548..e551dda9a96 100644 --- a/src/pip/_internal/commands/list.py +++ b/src/pip/_internal/commands/list.py @@ -297,7 +297,7 @@ def output_package_listing_columns( # Create and add a separator. if len(data) > 0: - pkg_strings.insert(1, " ".join(("-" * x for x in sizes))) + pkg_strings.insert(1, " ".join("-" * x for x in sizes)) for val in pkg_strings: write_output(val) diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index a6640cbbf71..8bee7e4fc51 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -101,7 +101,7 @@ def list_matches_wheel(wheel_name: str, result: TestPipResult) -> bool: `- foo-1.2.3-py3-none-any.whl `.""" lines = result.stdout.splitlines() expected = f" - {wheel_name}-py3-none-any.whl " - return any((line.startswith(expected) for line in lines)) + return any(line.startswith(expected) for line in lines) def list_matches_wheel_abspath(wheel_name: str, result: TestPipResult) -> bool: diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 3c8ca98f791..a48423570c4 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -795,10 +795,8 @@ def prefix_match(path: str, prefix_path: StrPath) -> bool: prefix = prefix.rstrip(os.path.sep) + os.path.sep return path.startswith(prefix) - start_keys = { - k for k in start.keys() if not any(prefix_match(k, i) for i in ignore) - } - end_keys = {k for k in end.keys() if not any(prefix_match(k, i) for i in ignore)} + start_keys = {k for k in start if not any(prefix_match(k, i) for i in ignore)} + end_keys = {k for k in end if not any(prefix_match(k, i) for i in ignore)} deleted = {k: start[k] for k in start_keys.difference(end_keys)} created = {k: end[k] for k in end_keys.difference(start_keys)} updated = {} diff --git a/tests/unit/test_finder.py b/tests/unit/test_finder.py index 393e83d5a95..3404d1498e3 100644 --- a/tests/unit/test_finder.py +++ b/tests/unit/test_finder.py @@ -234,7 +234,7 @@ def test_link_sorting(self) -> None: ) sort_key = evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) - results2 = sorted(links, key=sort_key, reverse=True) + results2 = sorted(reversed(links), key=sort_key, reverse=True) assert links == results, results assert links == results2, results2 @@ -261,7 +261,7 @@ def test_link_sorting_wheels_with_build_tags(self) -> None: candidate_evaluator = CandidateEvaluator.create("my-project") sort_key = candidate_evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) - results2 = sorted(links, key=sort_key, reverse=True) + results2 = sorted(reversed(links), key=sort_key, reverse=True) assert links == results, results assert links == results2, results2 @@ -301,7 +301,7 @@ def test_build_tag_is_less_important_than_other_tags(self) -> None: ) sort_key = evaluator._sort_key results = sorted(links, key=sort_key, reverse=True) - results2 = sorted(links, key=sort_key, reverse=True) + results2 = sorted(reversed(links), key=sort_key, reverse=True) assert links == results, results assert links == results2, results2 From c127512f13f933a15123f90489e36192060889a5 Mon Sep 17 00:00:00 2001 From: studioj <22102283+studioj@users.noreply.github.com> Date: Thu, 24 Aug 2023 23:23:46 +0200 Subject: [PATCH 668/730] small update for authentication.md --- docs/html/topics/authentication.md | 2 +- news/zhsdgdlsjgksdfj.trivial.rst | 0 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 news/zhsdgdlsjgksdfj.trivial.rst diff --git a/docs/html/topics/authentication.md b/docs/html/topics/authentication.md index 966ac3e7a0d..a2649071762 100644 --- a/docs/html/topics/authentication.md +++ b/docs/html/topics/authentication.md @@ -68,7 +68,7 @@ man pages][netrc-docs]. pip supports loading credentials stored in your keyring using the {pypi}`keyring` library, which can be enabled py passing `--keyring-provider` with a value of `auto`, `disabled`, `import`, or `subprocess`. The default -value `auto` respects `--no-input` and not query keyring at all if the option +value `auto` respects `--no-input` and does not query keyring at all if the option is used; otherwise it tries the `import`, `subprocess`, and `disabled` providers (in this order) and uses the first one that works. diff --git a/news/zhsdgdlsjgksdfj.trivial.rst b/news/zhsdgdlsjgksdfj.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From 0c0099b23b1109cd26c66def926ffab4c99f73cd Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Tue, 29 Aug 2023 11:31:42 +0200 Subject: [PATCH 669/730] Ruff misidentifies config as a dict Co-authored-by: Tzu-ping Chung --- src/pip/_internal/commands/debug.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 3d6416023f2..f76e033df1a 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -134,6 +134,8 @@ def show_tags(options: Values) -> None: def ca_bundle_info(config: Configuration) -> str: + # Ruff misidentifies config as a dict. + # Configuration does not have support the mapping interface. levels = {key.split(".")[0] for key, _ in config.items()} # noqa: PERF102 if not levels: return "Not specified" From f61250303515e04c8bfc5306d91686cc0d661ee9 Mon Sep 17 00:00:00 2001 From: Paul Ganssle <1377457+pganssle@users.noreply.github.com> Date: Thu, 31 Aug 2023 04:28:31 -0400 Subject: [PATCH 670/730] Remove uses of `utcnow` in non-vendored code (#12006) * Remove reference to utcnow This cleans up some of the datetime handling in the self check. Note that this changes the format of the state file, since the datetime now uses ``.isoformat()`` instead of ``.strftime``. Reading an outdated state file will still work on Python 3.11+, but not on earlier versions. * Use aware datetime object in x509.CertificateBuilder --- news/12005.bugfix.rst | 1 + src/pip/_internal/self_outdated_check.py | 15 ++++++--------- tests/lib/certs.py | 6 +++--- tests/unit/test_self_check_outdated.py | 11 ++++++++--- 4 files changed, 18 insertions(+), 15 deletions(-) create mode 100644 news/12005.bugfix.rst diff --git a/news/12005.bugfix.rst b/news/12005.bugfix.rst new file mode 100644 index 00000000000..98a3e5112df --- /dev/null +++ b/news/12005.bugfix.rst @@ -0,0 +1 @@ +Removed uses of ``datetime.datetime.utcnow`` from non-vendored code. diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py index 41cc42c5677..eefbc498b3f 100644 --- a/src/pip/_internal/self_outdated_check.py +++ b/src/pip/_internal/self_outdated_check.py @@ -28,8 +28,7 @@ from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace from pip._internal.utils.misc import ensure_dir -_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" - +_WEEK = datetime.timedelta(days=7) logger = logging.getLogger(__name__) @@ -73,12 +72,10 @@ def get(self, current_time: datetime.datetime) -> Optional[str]: if "pypi_version" not in self._state: return None - seven_days_in_seconds = 7 * 24 * 60 * 60 - # Determine if we need to refresh the state - last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT) - seconds_since_last_check = (current_time - last_check).total_seconds() - if seconds_since_last_check > seven_days_in_seconds: + last_check = datetime.datetime.fromisoformat(self._state["last_check"]) + time_since_last_check = current_time - last_check + if time_since_last_check > _WEEK: return None return self._state["pypi_version"] @@ -100,7 +97,7 @@ def set(self, pypi_version: str, current_time: datetime.datetime) -> None: # Include the key so it's easy to tell which pip wrote the # file. "key": self.key, - "last_check": current_time.strftime(_DATE_FMT), + "last_check": current_time.isoformat(), "pypi_version": pypi_version, } @@ -229,7 +226,7 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non try: upgrade_prompt = _self_version_check_logic( state=SelfCheckState(cache_dir=options.cache_dir), - current_time=datetime.datetime.utcnow(), + current_time=datetime.datetime.now(datetime.timezone.utc), local_version=installed_dist.version, get_remote_version=functools.partial( _get_current_remote_pip_version, session, options diff --git a/tests/lib/certs.py b/tests/lib/certs.py index 54b484ac0e7..9e6542d2d57 100644 --- a/tests/lib/certs.py +++ b/tests/lib/certs.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Tuple from cryptography import x509 @@ -23,8 +23,8 @@ def make_tls_cert(hostname: str) -> Tuple[x509.Certificate, rsa.RSAPrivateKey]: .issuer_name(issuer) .public_key(key.public_key()) .serial_number(x509.random_serial_number()) - .not_valid_before(datetime.utcnow()) - .not_valid_after(datetime.utcnow() + timedelta(days=10)) + .not_valid_before(datetime.now(timezone.utc)) + .not_valid_after(datetime.now(timezone.utc) + timedelta(days=10)) .add_extension( x509.SubjectAlternativeName([x509.DNSName(hostname)]), critical=False, diff --git a/tests/unit/test_self_check_outdated.py b/tests/unit/test_self_check_outdated.py index c025ff30275..011df08ae14 100644 --- a/tests/unit/test_self_check_outdated.py +++ b/tests/unit/test_self_check_outdated.py @@ -49,7 +49,9 @@ def test_pip_self_version_check_calls_underlying_implementation( mocked_state.assert_called_once_with(cache_dir=str(tmpdir)) mocked_function.assert_called_once_with( state=mocked_state(cache_dir=str(tmpdir)), - current_time=datetime.datetime(1970, 1, 2, 11, 0, 0), + current_time=datetime.datetime( + 1970, 1, 2, 11, 0, 0, tzinfo=datetime.timezone.utc + ), local_version=ANY, get_remote_version=ANY, ) @@ -167,7 +169,10 @@ def test_writes_expected_statefile(self, tmpdir: Path) -> None: # WHEN state = self_outdated_check.SelfCheckState(cache_dir=str(cache_dir)) - state.set("1.0.0", datetime.datetime(2000, 1, 1, 0, 0, 0)) + state.set( + "1.0.0", + datetime.datetime(2000, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc), + ) # THEN assert state._statefile_path == os.fspath(expected_path) @@ -175,6 +180,6 @@ def test_writes_expected_statefile(self, tmpdir: Path) -> None: contents = expected_path.read_text() assert json.loads(contents) == { "key": sys.prefix, - "last_check": "2000-01-01T00:00:00Z", + "last_check": "2000-01-01T00:00:00+00:00", "pypi_version": "1.0.0", } From 50c49f1d8340980a80f2d5248a66d62d07697358 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Fri, 1 Sep 2023 22:41:00 +0200 Subject: [PATCH 671/730] GitHub Actions: setup-python allow-prereleases for 3.12 (#12252) --- .github/workflows/ci.yml | 6 +++--- news/12252.trivial.rst | 0 2 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 news/12252.trivial.rst diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 50ec976afce..41d3ab9463a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,14 +109,14 @@ jobs: - "3.9" - "3.10" - "3.11" - - key: "3.12" - full: "3.12-dev" + - "3.12" steps: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: - python-version: ${{ matrix.python.full || matrix.python }} + python-version: ${{ matrix.python }} + allow-prereleases: true - name: Install Ubuntu dependencies if: matrix.os == 'Ubuntu' diff --git a/news/12252.trivial.rst b/news/12252.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From a88e73b29870d0682049ee88b4d9f6239e5f238b Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Sat, 2 Sep 2023 12:12:54 +0200 Subject: [PATCH 672/730] Fix typos in dep resolution notes doc --- docs/html/topics/more-dependency-resolution.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/html/topics/more-dependency-resolution.md b/docs/html/topics/more-dependency-resolution.md index 31967a6a920..1c7836e5c0f 100644 --- a/docs/html/topics/more-dependency-resolution.md +++ b/docs/html/topics/more-dependency-resolution.md @@ -8,7 +8,7 @@ and this article is intended to help readers understand what is happening ```{note} This document is a work in progress. The details included are accurate (at the time of writing), but there is additional information, in particular around -pip's interface with resolvelib, which have not yet been included. +pip's interface with resolvelib, which has not yet been included. Contributions to improve this document are welcome. ``` @@ -26,7 +26,7 @@ The practical implication of that is that there will always be some situations where pip cannot determine what to install in a reasonable length of time. We make every effort to ensure that such situations happen rarely, but eliminating them altogether isn't even theoretically possible. We'll discuss what options -yopu have if you hit a problem situation like this a little later. +you have if you hit a problem situation like this a little later. ## Python specific issues @@ -136,7 +136,7 @@ operations: that satisfy them. This is essentially where the finder interacts with the resolver. * `is_satisfied_by` - checks if a candidate satisfies a requirement. This is - basically the implementation of what a requirement meams. + basically the implementation of what a requirement means. * `get_dependencies` - get the dependency metadata for a candidate. This is the implementation of the process of getting and reading package metadata. From 7c5b2f2ca9dbb4bc2ff638fe09a11e332fb1123a Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Tue, 5 Sep 2023 17:31:55 -0500 Subject: [PATCH 673/730] Update security policy (#12254) Provide a link to the CNA/PSRT disclosure process. --- SECURITY.md | 11 +++++++++-- news/12254.process.rst | 1 + 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 news/12254.process.rst diff --git a/SECURITY.md b/SECURITY.md index 4e423805aee..e75a1c0de68 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,3 +1,10 @@ -# Security and Vulnerability Reporting +# Security Policy -If you find any security issues, please report to [security@python.org](mailto:security@python.org) +## Reporting a Vulnerability + +Please read the guidelines on reporting security issues [on the +official website](https://www.python.org/dev/security/) for +instructions on how to report a security-related problem to +the Python Security Response Team responsibly. + +To reach the response team, email `security at python dot org`. diff --git a/news/12254.process.rst b/news/12254.process.rst new file mode 100644 index 00000000000..e546902685b --- /dev/null +++ b/news/12254.process.rst @@ -0,0 +1 @@ +Added reference to `vulnerability reporting guidelines `_ to pip's security policy. From af43e139b626ab67e5bccae5fa1645b1dbb68fec Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 6 Sep 2023 15:39:11 +0800 Subject: [PATCH 674/730] Drive by split() limit to improve performance --- src/pip/_internal/commands/debug.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index f76e033df1a..1b1fd3ea5cc 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -136,7 +136,7 @@ def show_tags(options: Values) -> None: def ca_bundle_info(config: Configuration) -> str: # Ruff misidentifies config as a dict. # Configuration does not have support the mapping interface. - levels = {key.split(".")[0] for key, _ in config.items()} # noqa: PERF102 + levels = {key.split(".", 1)[0] for key, _ in config.items()} # noqa: PERF102 if not levels: return "Not specified" From 99a00f0a8deee30dafb7448eefdbc1c9b3b6062d Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 6 Sep 2023 11:36:19 +0200 Subject: [PATCH 675/730] pre-commit autoupdate except mypy --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1c497c29426..c8d81deed7a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,13 +17,13 @@ repos: exclude: .patch - repo: https://github.com/psf/black - rev: 23.1.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.0.286 + rev: v0.0.287 hooks: - id: ruff From 2281e91d4e19ece6b279133c4eaf2632fcf204bc Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Wed, 6 Sep 2023 11:44:45 +0200 Subject: [PATCH 676/730] pre-commit autoupdate except mypy --- news/12261.trivial.rst | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 news/12261.trivial.rst diff --git a/news/12261.trivial.rst b/news/12261.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From 21bfe401a96ddecb2a827b9b3cd5ff1b833b151f Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 6 Sep 2023 11:50:10 +0200 Subject: [PATCH 677/730] use more stable sort key --- src/pip/_internal/resolution/resolvelib/resolver.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index 4c53dfb25c1..2e4941da814 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -104,8 +104,9 @@ def resolve( raise error from e req_set = RequirementSet(check_supported_wheels=check_supported_wheels) - # sort to ensure base candidates come before candidates with extras - for candidate in sorted(result.mapping.values(), key=lambda c: c.name): + # process candidates with extras last to ensure their base equivalent is already in the req_set if appropriate + # Python's sort is stable so using a binary key function keeps relative order within both subsets + for candidate in sorted(result.mapping.values(), key=lambda c: c.name != c.project_name): ireq = candidate.get_install_requirement() if ireq is None: if candidate.name != candidate.project_name: From 0de374e4df5707955fc6bf9602ee86ea21c8f258 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 6 Sep 2023 13:52:41 +0200 Subject: [PATCH 678/730] review comment: return iterator instead of list --- .../resolution/resolvelib/factory.py | 64 +++++++++---------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 0eb7a1c662e..81f482c8626 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -1,4 +1,5 @@ import contextlib +import itertools import functools import logging from typing import ( @@ -447,7 +448,7 @@ def find_candidates( def _make_requirements_from_install_req( self, ireq: InstallRequirement, requested_extras: Iterable[str] - ) -> List[Requirement]: + ) -> Iterator[Requirement]: """ Returns requirement objects associated with the given InstallRequirement. In most cases this will be a single object but the following special cases exist: @@ -463,34 +464,32 @@ def _make_requirements_from_install_req( ireq.name, ireq.markers, ) - return [] - if not ireq.link: + yield from () + elif not ireq.link: if ireq.extras and ireq.req is not None and ireq.req.specifier: - return [ - SpecifierRequirement(ireq, drop_extras=True), - SpecifierRequirement(ireq), - ] + yield SpecifierRequirement(ireq, drop_extras=True), + yield SpecifierRequirement(ireq) + else: + self._fail_if_link_is_unsupported_wheel(ireq.link) + cand = self._make_candidate_from_link( + ireq.link, + extras=frozenset(ireq.extras), + template=ireq, + name=canonicalize_name(ireq.name) if ireq.name else None, + version=None, + ) + if cand is None: + # There's no way we can satisfy a URL requirement if the underlying + # candidate fails to build. An unnamed URL must be user-supplied, so + # we fail eagerly. If the URL is named, an unsatisfiable requirement + # can make the resolver do the right thing, either backtrack (and + # maybe find some other requirement that's buildable) or raise a + # ResolutionImpossible eventually. + if not ireq.name: + raise self._build_failures[ireq.link] + yield UnsatisfiableRequirement(canonicalize_name(ireq.name)) else: - return [SpecifierRequirement(ireq)] - self._fail_if_link_is_unsupported_wheel(ireq.link) - cand = self._make_candidate_from_link( - ireq.link, - extras=frozenset(ireq.extras), - template=ireq, - name=canonicalize_name(ireq.name) if ireq.name else None, - version=None, - ) - if cand is None: - # There's no way we can satisfy a URL requirement if the underlying - # candidate fails to build. An unnamed URL must be user-supplied, so - # we fail eagerly. If the URL is named, an unsatisfiable requirement - # can make the resolver do the right thing, either backtrack (and - # maybe find some other requirement that's buildable) or raise a - # ResolutionImpossible eventually. - if not ireq.name: - raise self._build_failures[ireq.link] - return [UnsatisfiableRequirement(canonicalize_name(ireq.name))] - return [self.make_requirement_from_candidate(cand)] + yield self.make_requirement_from_candidate(cand) def collect_root_requirements( self, root_ireqs: List[InstallRequirement] @@ -511,13 +510,14 @@ def collect_root_requirements( else: collected.constraints[name] = Constraint.from_ireq(ireq) else: - reqs = self._make_requirements_from_install_req( - ireq, - requested_extras=(), + reqs = list( + self._make_requirements_from_install_req( + ireq, + requested_extras=(), + ) ) if not reqs: continue - template = reqs[0] if ireq.user_supplied and template.name not in collected.user_requested: collected.user_requested[template.name] = i @@ -543,7 +543,7 @@ def make_requirements_from_spec( specifier: str, comes_from: Optional[InstallRequirement], requested_extras: Iterable[str] = (), - ) -> List[Requirement]: + ) -> Iterator[Requirement]: """ Returns requirement objects associated with the given specifier. In most cases this will be a single object but the following special cases exist: From 5a0167902261b97df768cbcf4757b665cd39229e Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 6 Sep 2023 13:54:28 +0200 Subject: [PATCH 679/730] Update src/pip/_internal/req/constructors.py Co-authored-by: Tzu-ping Chung --- src/pip/_internal/req/constructors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index c03ae718e90..a40191954f8 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -76,7 +76,7 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme post: Optional[str] = match.group(3) assert pre is not None and post is not None extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else "" - return Requirement(pre + extras + post) + return Requirement(f"{pre}{extras}{post}") def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]: From 4e73e3e96e99f79d8458517278f67e33796a7fd0 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 6 Sep 2023 14:39:51 +0200 Subject: [PATCH 680/730] review comment: subclass instead of constructor flag --- .../resolution/resolvelib/factory.py | 4 ++-- .../resolution/resolvelib/requirements.py | 24 ++++++++++--------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 81f482c8626..af13a33215b 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -1,5 +1,4 @@ import contextlib -import itertools import functools import logging from typing import ( @@ -63,6 +62,7 @@ ExplicitRequirement, RequiresPythonRequirement, SpecifierRequirement, + SpecifierWithoutExtrasRequirement, UnsatisfiableRequirement, ) @@ -467,7 +467,7 @@ def _make_requirements_from_install_req( yield from () elif not ireq.link: if ireq.extras and ireq.req is not None and ireq.req.specifier: - yield SpecifierRequirement(ireq, drop_extras=True), + yield SpecifierWithoutExtrasRequirement(ireq), yield SpecifierRequirement(ireq) else: self._fail_if_link_is_unsupported_wheel(ireq.link) diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index becbd6c4bcc..9c2512823a3 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -41,18 +41,9 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: class SpecifierRequirement(Requirement): - def __init__( - self, - ireq: InstallRequirement, - *, - drop_extras: bool = False, - ) -> None: - """ - :param drop_extras: Ignore any extras that are part of the install requirement, - making this a requirement on the base only. - """ + def __init__(self, ireq: InstallRequirement) -> None: assert ireq.link is None, "This is a link, not a specifier" - self._ireq = ireq if not drop_extras else install_req_drop_extras(ireq) + self._ireq = ireq self._extras = frozenset(self._ireq.extras) def __str__(self) -> str: @@ -102,6 +93,17 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: return spec.contains(candidate.version, prereleases=True) +class SpecifierWithoutExtrasRequirement(SpecifierRequirement): + """ + Requirement backed by an install requirement on a base package. Trims extras from its install requirement if there are any. + """ + + def __init__(self, ireq: InstallRequirement) -> None: + assert ireq.link is None, "This is a link, not a specifier" + self._ireq = install_req_drop_extras(ireq) + self._extras = frozenset(self._ireq.extras) + + class RequiresPythonRequirement(Requirement): """A requirement representing Requires-Python metadata.""" From 50cd318cefcdd2a451b0a70a3bf3f31a3ecc6b99 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 6 Sep 2023 15:06:19 +0200 Subject: [PATCH 681/730] review comment: renamed and moved up ExtrasCandidate._ireq --- src/pip/_internal/resolution/resolvelib/candidates.py | 9 +++++---- src/pip/_internal/resolution/resolvelib/factory.py | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 23883484139..d658be37284 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -427,10 +427,11 @@ def __init__( self, base: BaseCandidate, extras: FrozenSet[str], - ireq: Optional[InstallRequirement] = None, + *, + comes_from: Optional[InstallRequirement] = None, ) -> None: """ - :param ireq: the InstallRequirement that led to this candidate, if it + :param ireq: the InstallRequirement that led to this candidate if it differs from the base's InstallRequirement. This will often be the case in the sense that this candidate's requirement has the extras while the base's does not. Unlike the InstallRequirement backed @@ -439,7 +440,7 @@ def __init__( """ self.base = base self.extras = extras - self._ireq = ireq + self._comes_from = comes_from if comes_from is not None else self.base._ireq def __str__(self) -> str: name, rest = str(self.base).split(" ", 1) @@ -514,7 +515,7 @@ def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requiremen for r in self.base.dist.iter_dependencies(valid_extras): yield from factory.make_requirements_from_spec( str(r), - self._ireq if self._ireq is not None else self.base._ireq, + self._comes_from, valid_extras, ) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index af13a33215b..8c5a779911f 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -142,13 +142,14 @@ def _make_extras_candidate( self, base: BaseCandidate, extras: FrozenSet[str], - ireq: Optional[InstallRequirement] = None, + *, + comes_from: Optional[InstallRequirement] = None, ) -> ExtrasCandidate: cache_key = (id(base), extras) try: candidate = self._extras_candidate_cache[cache_key] except KeyError: - candidate = ExtrasCandidate(base, extras, ireq=ireq) + candidate = ExtrasCandidate(base, extras, comes_from=comes_from) self._extras_candidate_cache[cache_key] = candidate return candidate @@ -165,7 +166,7 @@ def _make_candidate_from_dist( self._installed_candidate_cache[dist.canonical_name] = base if not extras: return base - return self._make_extras_candidate(base, extras, ireq=template) + return self._make_extras_candidate(base, extras, comes_from=template) def _make_candidate_from_link( self, @@ -227,7 +228,7 @@ def _make_candidate_from_link( if not extras: return base - return self._make_extras_candidate(base, extras, ireq=template) + return self._make_extras_candidate(base, extras, comes_from=template) def _iter_found_candidates( self, From f5602fa0b8a26733cc144b5e1449730fdf620c31 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 6 Sep 2023 15:12:17 +0200 Subject: [PATCH 682/730] added message to invariant assertions --- src/pip/_internal/req/constructors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index a40191954f8..f0f043b0021 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -71,10 +71,10 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme flags=re.ASCII, ) # ireq.req is a valid requirement so the regex should always match - assert match is not None + assert match is not None, f"regex match on requirement {req} failed, this should never happen" pre: Optional[str] = match.group(1) post: Optional[str] = match.group(3) - assert pre is not None and post is not None + assert pre is not None and post is not None, f"regex group selection for requirement {req} failed, this should never happen" extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else "" return Requirement(f"{pre}{extras}{post}") From 449522a8286d28d2c88776dca3cc67b3064982d3 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 6 Sep 2023 15:16:22 +0200 Subject: [PATCH 683/730] minor fixes and linting --- src/pip/_internal/req/constructors.py | 8 ++++++-- src/pip/_internal/resolution/resolvelib/factory.py | 2 +- .../_internal/resolution/resolvelib/requirements.py | 3 ++- src/pip/_internal/resolution/resolvelib/resolver.py | 10 +++++++--- tests/unit/resolution_resolvelib/test_requirement.py | 8 ++++---- 5 files changed, 20 insertions(+), 11 deletions(-) diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index f0f043b0021..b52c9a456bb 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -71,10 +71,14 @@ def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requireme flags=re.ASCII, ) # ireq.req is a valid requirement so the regex should always match - assert match is not None, f"regex match on requirement {req} failed, this should never happen" + assert ( + match is not None + ), f"regex match on requirement {req} failed, this should never happen" pre: Optional[str] = match.group(1) post: Optional[str] = match.group(3) - assert pre is not None and post is not None, f"regex group selection for requirement {req} failed, this should never happen" + assert ( + pre is not None and post is not None + ), f"regex group selection for requirement {req} failed, this should never happen" extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else "" return Requirement(f"{pre}{extras}{post}") diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 8c5a779911f..905449f68db 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -468,7 +468,7 @@ def _make_requirements_from_install_req( yield from () elif not ireq.link: if ireq.extras and ireq.req is not None and ireq.req.specifier: - yield SpecifierWithoutExtrasRequirement(ireq), + yield SpecifierWithoutExtrasRequirement(ireq) yield SpecifierRequirement(ireq) else: self._fail_if_link_is_unsupported_wheel(ireq.link) diff --git a/src/pip/_internal/resolution/resolvelib/requirements.py b/src/pip/_internal/resolution/resolvelib/requirements.py index 9c2512823a3..02cdf65f144 100644 --- a/src/pip/_internal/resolution/resolvelib/requirements.py +++ b/src/pip/_internal/resolution/resolvelib/requirements.py @@ -95,7 +95,8 @@ def is_satisfied_by(self, candidate: Candidate) -> bool: class SpecifierWithoutExtrasRequirement(SpecifierRequirement): """ - Requirement backed by an install requirement on a base package. Trims extras from its install requirement if there are any. + Requirement backed by an install requirement on a base package. + Trims extras from its install requirement if there are any. """ def __init__(self, ireq: InstallRequirement) -> None: diff --git a/src/pip/_internal/resolution/resolvelib/resolver.py b/src/pip/_internal/resolution/resolvelib/resolver.py index 2e4941da814..c12beef0b2a 100644 --- a/src/pip/_internal/resolution/resolvelib/resolver.py +++ b/src/pip/_internal/resolution/resolvelib/resolver.py @@ -104,9 +104,13 @@ def resolve( raise error from e req_set = RequirementSet(check_supported_wheels=check_supported_wheels) - # process candidates with extras last to ensure their base equivalent is already in the req_set if appropriate - # Python's sort is stable so using a binary key function keeps relative order within both subsets - for candidate in sorted(result.mapping.values(), key=lambda c: c.name != c.project_name): + # process candidates with extras last to ensure their base equivalent is + # already in the req_set if appropriate. + # Python's sort is stable so using a binary key function keeps relative order + # within both subsets. + for candidate in sorted( + result.mapping.values(), key=lambda c: c.name != c.project_name + ): ireq = candidate.get_install_requirement() if ireq is None: if candidate.name != candidate.project_name: diff --git a/tests/unit/resolution_resolvelib/test_requirement.py b/tests/unit/resolution_resolvelib/test_requirement.py index ce48ab16c49..642136a54fd 100644 --- a/tests/unit/resolution_resolvelib/test_requirement.py +++ b/tests/unit/resolution_resolvelib/test_requirement.py @@ -61,7 +61,7 @@ def test_new_resolver_requirement_has_name( ) -> None: """All requirements should have a name""" for spec, name, _ in test_cases: - reqs = factory.make_requirements_from_spec(spec, comes_from=None) + reqs = list(factory.make_requirements_from_spec(spec, comes_from=None)) assert len(reqs) == 1 assert reqs[0].name == name @@ -71,7 +71,7 @@ def test_new_resolver_correct_number_of_matches( ) -> None: """Requirements should return the correct number of candidates""" for spec, _, match_count in test_cases: - reqs = factory.make_requirements_from_spec(spec, comes_from=None) + reqs = list(factory.make_requirements_from_spec(spec, comes_from=None)) assert len(reqs) == 1 req = reqs[0] matches = factory.find_candidates( @@ -89,7 +89,7 @@ def test_new_resolver_candidates_match_requirement( ) -> None: """Candidates returned from find_candidates should satisfy the requirement""" for spec, _, _ in test_cases: - reqs = factory.make_requirements_from_spec(spec, comes_from=None) + reqs = list(factory.make_requirements_from_spec(spec, comes_from=None)) assert len(reqs) == 1 req = reqs[0] candidates = factory.find_candidates( @@ -106,7 +106,7 @@ def test_new_resolver_candidates_match_requirement( def test_new_resolver_full_resolve(factory: Factory, provider: PipProvider) -> None: """A very basic full resolve""" - reqs = factory.make_requirements_from_spec("simplewheel", comes_from=None) + reqs = list(factory.make_requirements_from_spec("simplewheel", comes_from=None)) assert len(reqs) == 1 r: Resolver[Requirement, Candidate, str] = Resolver(provider, BaseReporter()) result = r.resolve([reqs[0]]) From d5e3f0c4b4d6aa4b432cd5480abb234e2e3332fb Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Sep 2023 11:54:00 -0400 Subject: [PATCH 684/730] Use versionchanged syntax --- docs/html/topics/caching.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/html/topics/caching.md b/docs/html/topics/caching.md index 19bd064a74c..8d6c40f112d 100644 --- a/docs/html/topics/caching.md +++ b/docs/html/topics/caching.md @@ -27,11 +27,12 @@ While this cache attempts to minimize network activity, it does not prevent network access altogether. If you want a local install solution that circumvents accessing PyPI, see {ref}`Installing from local packages`. -In versions prior to 23.2, this cache was stored in a directory called `http` in -the main cache directory (see below for its location). In 23.2 and later, a new -cache format is used, stored in a directory called `http-v2`. If you have -completely switched to newer versions of `pip`, you may wish to delete the old -directory. +```{versionchanged} 23.3 +A new cache format is now used, stored in a directory called `http-v2` (see +below for this directory's location). Previously this cache was stored in a +directory called `http` in the main cache directory. If you have completely +switched to newer versions of `pip`, you may wish to delete the old directory. +``` (wheel-caching)= From b273cee6c5b3572390a3fe9316b2e86661934ce9 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Wed, 6 Sep 2023 16:42:38 -0400 Subject: [PATCH 685/730] Combine one entry, explain difference between entries better. --- src/pip/_internal/commands/cache.py | 16 ++++++++-------- tests/functional/test_cache.py | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index 83efabe8785..0b3380da006 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -96,18 +96,19 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: http_cache_location = self._cache_dir(options, "http-v2") old_http_cache_location = self._cache_dir(options, "http") wheels_cache_location = self._cache_dir(options, "wheels") - http_cache_size = filesystem.format_directory_size(http_cache_location) - old_http_cache_size = filesystem.format_directory_size(old_http_cache_location) + http_cache_size = ( + filesystem.format_directory_size(http_cache_location) + + filesystem.format_directory_size(old_http_cache_location) + ) wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) message = ( textwrap.dedent( """ - Package index page cache location (new): {http_cache_location} - Package index page cache location (old): {old_http_cache_location} - Package index page cache size (new): {http_cache_size} - Package index page cache size (old): {old_http_cache_size} - Number of HTTP files (old+new cache): {num_http_files} + Package index page cache location (pip v23.3+): {http_cache_location} + Package index page cache location (older pips): {old_http_cache_location} + Package index page cache size: {http_cache_size} + Number of HTTP files: {num_http_files} Locally built wheels location: {wheels_cache_location} Locally built wheels size: {wheels_cache_size} Number of locally built wheels: {package_count} @@ -117,7 +118,6 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: http_cache_location=http_cache_location, old_http_cache_location=old_http_cache_location, http_cache_size=http_cache_size, - old_http_cache_size=old_http_cache_size, num_http_files=num_http_files, wheels_cache_location=wheels_cache_location, package_count=num_packages, diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index ddafd7332d6..c5d910d453f 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -203,7 +203,7 @@ def test_cache_info( ) -> None: result = script.pip("cache", "info") - assert f"Package index page cache location (new): {http_cache_dir}" in result.stdout + assert f"Package index page cache location (pip v23.3+): {http_cache_dir}" in result.stdout assert f"Locally built wheels location: {wheel_cache_dir}" in result.stdout num_wheels = len(wheel_cache_files) assert f"Number of locally built wheels: {num_wheels}" in result.stdout From 2951666df5042dc6a329e017e9befcf2b54c25d4 Mon Sep 17 00:00:00 2001 From: Sviatoslav Sydorenko Date: Thu, 7 Sep 2023 01:17:57 +0200 Subject: [PATCH 686/730] Exclude PR #9634 reformatting from Git blame --- .git-blame-ignore-revs | 1 + 1 file changed, 1 insertion(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index c7644d0e6e6..f09b08660e7 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -33,3 +33,4 @@ c7ee560e00b85f7486b452c14ff49e4737996eda # Blacken tools/ 1897784d59e0d5fcda2dd75fea54ddd8be3d502a # Blacken src/pip/_internal/index 94999255d5ede440c37137d210666fdf64302e75 # Reformat the codebase, with black 585037a80a1177f1fa92e159a7079855782e543e # Cleanup implicit string concatenation +8a6f6ac19b80a6dc35900a47016c851d9fcd2ee2 # Blacken src/pip/_internal/resolution directory From 952ab6d837fbece1a221a1d0409eb27f2bb8c544 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Thu, 7 Sep 2023 10:31:49 +0200 Subject: [PATCH 687/730] Update src/pip/_internal/resolution/resolvelib/factory.py Co-authored-by: Tzu-ping Chung --- src/pip/_internal/resolution/resolvelib/factory.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py index 905449f68db..2b51aab67df 100644 --- a/src/pip/_internal/resolution/resolvelib/factory.py +++ b/src/pip/_internal/resolution/resolvelib/factory.py @@ -465,7 +465,6 @@ def _make_requirements_from_install_req( ireq.name, ireq.markers, ) - yield from () elif not ireq.link: if ireq.extras and ireq.req is not None and ireq.req.specifier: yield SpecifierWithoutExtrasRequirement(ireq) From ab9f6f37f125401f547cd5df84c66d1fb50e4203 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Thu, 7 Sep 2023 12:07:50 -0400 Subject: [PATCH 688/730] Fix formatting, combine numbers not strings! Co-authored-by: Pradyun Gedam --- src/pip/_internal/commands/cache.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index 0b3380da006..32d1a221d1e 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -97,8 +97,8 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: old_http_cache_location = self._cache_dir(options, "http") wheels_cache_location = self._cache_dir(options, "wheels") http_cache_size = ( - filesystem.format_directory_size(http_cache_location) + - filesystem.format_directory_size(old_http_cache_location) + filesystem.format_size(filesystem.directory_size(http_cache_location) + + filesystem.directory_size(old_http_cache_location)) ) wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) From fbda0a2ba7e6676f286e515c11b77ded8de996b0 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Fri, 8 Sep 2023 16:32:42 +0200 Subject: [PATCH 689/730] Update tests/unit/resolution_resolvelib/test_requirement.py Co-authored-by: Pradyun Gedam --- tests/unit/resolution_resolvelib/test_requirement.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/resolution_resolvelib/test_requirement.py b/tests/unit/resolution_resolvelib/test_requirement.py index 642136a54fd..b8cd13cb566 100644 --- a/tests/unit/resolution_resolvelib/test_requirement.py +++ b/tests/unit/resolution_resolvelib/test_requirement.py @@ -109,5 +109,5 @@ def test_new_resolver_full_resolve(factory: Factory, provider: PipProvider) -> N reqs = list(factory.make_requirements_from_spec("simplewheel", comes_from=None)) assert len(reqs) == 1 r: Resolver[Requirement, Candidate, str] = Resolver(provider, BaseReporter()) - result = r.resolve([reqs[0]]) + result = r.resolve(reqs) assert set(result.mapping.keys()) == {"simplewheel"} From 83ca10ab6012bab3654728b335d3d3b56ac6da06 Mon Sep 17 00:00:00 2001 From: Shahar Epstein <60007259+shahar1@users.noreply.github.com> Date: Sun, 10 Sep 2023 11:25:34 +0300 Subject: [PATCH 690/730] Update search command docs (#12271) --- docs/html/cli/pip_search.rst | 6 ++++++ news/12059.doc.rst | 1 + 2 files changed, 7 insertions(+) create mode 100644 news/12059.doc.rst diff --git a/docs/html/cli/pip_search.rst b/docs/html/cli/pip_search.rst index 9905a1bafac..93ddab3fa78 100644 --- a/docs/html/cli/pip_search.rst +++ b/docs/html/cli/pip_search.rst @@ -21,6 +21,12 @@ Usage Description =========== +.. attention:: + PyPI no longer supports ``pip search`` (or XML-RPC search). Please use https://pypi.org/search (via a browser) + instead. See https://warehouse.pypa.io/api-reference/xml-rpc.html#deprecated-methods for more information. + + However, XML-RPC search (and this command) may still be supported by indexes other than PyPI. + .. pip-command-description:: search diff --git a/news/12059.doc.rst b/news/12059.doc.rst new file mode 100644 index 00000000000..bf3a8d3e662 --- /dev/null +++ b/news/12059.doc.rst @@ -0,0 +1 @@ +Document that ``pip search`` support has been removed from PyPI From dc188a87e43d7ce1debfe4ed3557ed4023d32504 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 12 Sep 2023 14:11:48 +0800 Subject: [PATCH 691/730] Skip test failing on new Python/setuptools combo This is a temporary measure until we fix the importlib.metadata backend. --- tests/functional/test_install_extras.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index c6cef00fa9c..db4a811e0fd 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -150,6 +150,10 @@ def test_install_fails_if_extra_at_end( assert "Extras after version" in result.stderr +@pytest.mark.skipif( + "sys.version_info >= (3, 11)", + reason="Setuptools incompatibility with importlib.metadata; see GH-12267", +) def test_install_special_extra(script: PipTestEnvironment) -> None: # Check that uppercase letters and '-' are dealt with # make a dummy project From c94d81a36de643d2a1176430452a06862a77f58d Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Tue, 12 Sep 2023 16:00:40 +0800 Subject: [PATCH 692/730] Setuptools now implements proper normalization --- tests/functional/test_install_extras.py | 12 +----------- tests/requirements-common_wheels.txt | 3 ++- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index 21da9d50e1b..20942939763 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -159,17 +159,7 @@ def test_install_fails_if_extra_at_end( "specified_extra, requested_extra", [ ("Hop_hOp-hoP", "Hop_hOp-hoP"), - pytest.param( - "Hop_hOp-hoP", - "hop-hop-hop", - marks=pytest.mark.xfail( - reason=( - "matching a normalized extra request against an" - "unnormalized extra in metadata requires PEP 685 support " - "in packaging (see pypa/pip#11445)." - ), - ), - ), + ("Hop_hOp-hoP", "hop-hop-hop"), ("hop-hop-hop", "Hop_hOp-hoP"), ], ) diff --git a/tests/requirements-common_wheels.txt b/tests/requirements-common_wheels.txt index 6403ed73898..939a111a071 100644 --- a/tests/requirements-common_wheels.txt +++ b/tests/requirements-common_wheels.txt @@ -5,7 +5,8 @@ # 4. Replacing the `setuptools` entry below with a `file:///...` URL # (Adjust artifact directory used based on preference and operating system) -setuptools >= 40.8.0, != 60.6.0 +# Implements new extra normalization. +setuptools >= 68.2 wheel # As required by pytest-cov. coverage >= 4.4 From 9ee4b8ce36ce3f0f57615db017334a43a97d2dea Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Mon, 26 Jun 2023 22:18:08 -0500 Subject: [PATCH 693/730] Vendor truststore --- docs/html/topics/https-certificates.md | 12 +- news/truststore.vendor.rst | 1 + src/pip/_internal/cli/req_command.py | 2 +- src/pip/_vendor/__init__.py | 1 + src/pip/_vendor/truststore/LICENSE | 21 + src/pip/_vendor/truststore/__init__.py | 13 + src/pip/_vendor/truststore/_api.py | 302 ++++++++++ src/pip/_vendor/truststore/_macos.py | 501 +++++++++++++++++ src/pip/_vendor/truststore/_openssl.py | 66 +++ src/pip/_vendor/truststore/_ssl_constants.py | 12 + src/pip/_vendor/truststore/_windows.py | 554 +++++++++++++++++++ src/pip/_vendor/truststore/py.typed | 0 src/pip/_vendor/vendor.txt | 1 + tests/functional/test_truststore.py | 15 - 14 files changed, 1474 insertions(+), 27 deletions(-) create mode 100644 news/truststore.vendor.rst create mode 100644 src/pip/_vendor/truststore/LICENSE create mode 100644 src/pip/_vendor/truststore/__init__.py create mode 100644 src/pip/_vendor/truststore/_api.py create mode 100644 src/pip/_vendor/truststore/_macos.py create mode 100644 src/pip/_vendor/truststore/_openssl.py create mode 100644 src/pip/_vendor/truststore/_ssl_constants.py create mode 100644 src/pip/_vendor/truststore/_windows.py create mode 100644 src/pip/_vendor/truststore/py.typed diff --git a/docs/html/topics/https-certificates.md b/docs/html/topics/https-certificates.md index b42c463e6cc..341cfc632de 100644 --- a/docs/html/topics/https-certificates.md +++ b/docs/html/topics/https-certificates.md @@ -28,19 +28,9 @@ It is possible to use the system trust store, instead of the bundled certifi certificates for verifying HTTPS certificates. This approach will typically support corporate proxy certificates without additional configuration. -In order to use system trust stores, you need to: - -- Use Python 3.10 or newer. -- Install the {pypi}`truststore` package, in the Python environment you're - running pip in. - - This is typically done by installing this package using a system package - manager or by using pip in {ref}`Hash-checking mode` for this package and - trusting the network using the `--trusted-host` flag. +In order to use system trust stores, you need to use Python 3.10 or newer. ```{pip-cli} - $ python -m pip install truststore - [...] $ python -m pip install SomePackage --use-feature=truststore [...] Successfully installed SomePackage diff --git a/news/truststore.vendor.rst b/news/truststore.vendor.rst new file mode 100644 index 00000000000..ee974728d92 --- /dev/null +++ b/news/truststore.vendor.rst @@ -0,0 +1 @@ +Add truststore 0.7.0 diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 86070f10c14..80b35a80aae 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -58,7 +58,7 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]: return None try: - import truststore + from ..._vendor import truststore except ImportError: raise CommandError( "To use the truststore feature, 'truststore' must be installed into " diff --git a/src/pip/_vendor/__init__.py b/src/pip/_vendor/__init__.py index b22f7abb93b..c1884baf3d1 100644 --- a/src/pip/_vendor/__init__.py +++ b/src/pip/_vendor/__init__.py @@ -117,4 +117,5 @@ def vendored(modulename): vendored("rich.traceback") vendored("tenacity") vendored("tomli") + vendored("truststore") vendored("urllib3") diff --git a/src/pip/_vendor/truststore/LICENSE b/src/pip/_vendor/truststore/LICENSE new file mode 100644 index 00000000000..7ec568c1136 --- /dev/null +++ b/src/pip/_vendor/truststore/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 Seth Michael Larson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/src/pip/_vendor/truststore/__init__.py b/src/pip/_vendor/truststore/__init__.py new file mode 100644 index 00000000000..0f3a4d9e1e1 --- /dev/null +++ b/src/pip/_vendor/truststore/__init__.py @@ -0,0 +1,13 @@ +"""Verify certificates using native system trust stores""" + +import sys as _sys + +if _sys.version_info < (3, 10): + raise ImportError("truststore requires Python 3.10 or later") + +from ._api import SSLContext, extract_from_ssl, inject_into_ssl # noqa: E402 + +del _api, _sys # type: ignore[name-defined] # noqa: F821 + +__all__ = ["SSLContext", "inject_into_ssl", "extract_from_ssl"] +__version__ = "0.7.0" diff --git a/src/pip/_vendor/truststore/_api.py b/src/pip/_vendor/truststore/_api.py new file mode 100644 index 00000000000..2647042418f --- /dev/null +++ b/src/pip/_vendor/truststore/_api.py @@ -0,0 +1,302 @@ +import array +import ctypes +import mmap +import os +import pickle +import platform +import socket +import ssl +import typing + +import _ssl # type: ignore[import] + +from ._ssl_constants import _original_SSLContext, _original_super_SSLContext + +if platform.system() == "Windows": + from ._windows import _configure_context, _verify_peercerts_impl +elif platform.system() == "Darwin": + from ._macos import _configure_context, _verify_peercerts_impl +else: + from ._openssl import _configure_context, _verify_peercerts_impl + +# From typeshed/stdlib/ssl.pyi +_StrOrBytesPath: typing.TypeAlias = str | bytes | os.PathLike[str] | os.PathLike[bytes] +_PasswordType: typing.TypeAlias = str | bytes | typing.Callable[[], str | bytes] + +# From typeshed/stdlib/_typeshed/__init__.py +_ReadableBuffer: typing.TypeAlias = typing.Union[ + bytes, + memoryview, + bytearray, + "array.array[typing.Any]", + mmap.mmap, + "ctypes._CData", + pickle.PickleBuffer, +] + + +def inject_into_ssl() -> None: + """Injects the :class:`truststore.SSLContext` into the ``ssl`` + module by replacing :class:`ssl.SSLContext`. + """ + setattr(ssl, "SSLContext", SSLContext) + # urllib3 holds on to its own reference of ssl.SSLContext + # so we need to replace that reference too. + try: + import pip._vendor.urllib3.util.ssl_ as urllib3_ssl + + setattr(urllib3_ssl, "SSLContext", SSLContext) + except ImportError: + pass + + +def extract_from_ssl() -> None: + """Restores the :class:`ssl.SSLContext` class to its original state""" + setattr(ssl, "SSLContext", _original_SSLContext) + try: + import pip._vendor.urllib3.util.ssl_ as urllib3_ssl + + urllib3_ssl.SSLContext = _original_SSLContext + except ImportError: + pass + + +class SSLContext(ssl.SSLContext): + """SSLContext API that uses system certificates on all platforms""" + + def __init__(self, protocol: int = None) -> None: # type: ignore[assignment] + self._ctx = _original_SSLContext(protocol) + + class TruststoreSSLObject(ssl.SSLObject): + # This object exists because wrap_bio() doesn't + # immediately do the handshake so we need to do + # certificate verifications after SSLObject.do_handshake() + + def do_handshake(self) -> None: + ret = super().do_handshake() + _verify_peercerts(self, server_hostname=self.server_hostname) + return ret + + self._ctx.sslobject_class = TruststoreSSLObject + + def wrap_socket( + self, + sock: socket.socket, + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: str | None = None, + session: ssl.SSLSession | None = None, + ) -> ssl.SSLSocket: + # Use a context manager here because the + # inner SSLContext holds on to our state + # but also does the actual handshake. + with _configure_context(self._ctx): + ssl_sock = self._ctx.wrap_socket( + sock, + server_side=server_side, + server_hostname=server_hostname, + do_handshake_on_connect=do_handshake_on_connect, + suppress_ragged_eofs=suppress_ragged_eofs, + session=session, + ) + try: + _verify_peercerts(ssl_sock, server_hostname=server_hostname) + except Exception: + ssl_sock.close() + raise + return ssl_sock + + def wrap_bio( + self, + incoming: ssl.MemoryBIO, + outgoing: ssl.MemoryBIO, + server_side: bool = False, + server_hostname: str | None = None, + session: ssl.SSLSession | None = None, + ) -> ssl.SSLObject: + with _configure_context(self._ctx): + ssl_obj = self._ctx.wrap_bio( + incoming, + outgoing, + server_hostname=server_hostname, + server_side=server_side, + session=session, + ) + return ssl_obj + + def load_verify_locations( + self, + cafile: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, + capath: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, + cadata: str | _ReadableBuffer | None = None, + ) -> None: + return self._ctx.load_verify_locations( + cafile=cafile, capath=capath, cadata=cadata + ) + + def load_cert_chain( + self, + certfile: _StrOrBytesPath, + keyfile: _StrOrBytesPath | None = None, + password: _PasswordType | None = None, + ) -> None: + return self._ctx.load_cert_chain( + certfile=certfile, keyfile=keyfile, password=password + ) + + def load_default_certs( + self, purpose: ssl.Purpose = ssl.Purpose.SERVER_AUTH + ) -> None: + return self._ctx.load_default_certs(purpose) + + def set_alpn_protocols(self, alpn_protocols: typing.Iterable[str]) -> None: + return self._ctx.set_alpn_protocols(alpn_protocols) + + def set_npn_protocols(self, npn_protocols: typing.Iterable[str]) -> None: + return self._ctx.set_npn_protocols(npn_protocols) + + def set_ciphers(self, __cipherlist: str) -> None: + return self._ctx.set_ciphers(__cipherlist) + + def get_ciphers(self) -> typing.Any: + return self._ctx.get_ciphers() + + def session_stats(self) -> dict[str, int]: + return self._ctx.session_stats() + + def cert_store_stats(self) -> dict[str, int]: + raise NotImplementedError() + + @typing.overload + def get_ca_certs( + self, binary_form: typing.Literal[False] = ... + ) -> list[typing.Any]: + ... + + @typing.overload + def get_ca_certs(self, binary_form: typing.Literal[True] = ...) -> list[bytes]: + ... + + @typing.overload + def get_ca_certs(self, binary_form: bool = ...) -> typing.Any: + ... + + def get_ca_certs(self, binary_form: bool = False) -> list[typing.Any] | list[bytes]: + raise NotImplementedError() + + @property + def check_hostname(self) -> bool: + return self._ctx.check_hostname + + @check_hostname.setter + def check_hostname(self, value: bool) -> None: + self._ctx.check_hostname = value + + @property + def hostname_checks_common_name(self) -> bool: + return self._ctx.hostname_checks_common_name + + @hostname_checks_common_name.setter + def hostname_checks_common_name(self, value: bool) -> None: + self._ctx.hostname_checks_common_name = value + + @property + def keylog_filename(self) -> str: + return self._ctx.keylog_filename + + @keylog_filename.setter + def keylog_filename(self, value: str) -> None: + self._ctx.keylog_filename = value + + @property + def maximum_version(self) -> ssl.TLSVersion: + return self._ctx.maximum_version + + @maximum_version.setter + def maximum_version(self, value: ssl.TLSVersion) -> None: + _original_super_SSLContext.maximum_version.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def minimum_version(self) -> ssl.TLSVersion: + return self._ctx.minimum_version + + @minimum_version.setter + def minimum_version(self, value: ssl.TLSVersion) -> None: + _original_super_SSLContext.minimum_version.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def options(self) -> ssl.Options: + return self._ctx.options + + @options.setter + def options(self, value: ssl.Options) -> None: + _original_super_SSLContext.options.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def post_handshake_auth(self) -> bool: + return self._ctx.post_handshake_auth + + @post_handshake_auth.setter + def post_handshake_auth(self, value: bool) -> None: + self._ctx.post_handshake_auth = value + + @property + def protocol(self) -> ssl._SSLMethod: + return self._ctx.protocol + + @property + def security_level(self) -> int: # type: ignore[override] + return self._ctx.security_level + + @property + def verify_flags(self) -> ssl.VerifyFlags: + return self._ctx.verify_flags + + @verify_flags.setter + def verify_flags(self, value: ssl.VerifyFlags) -> None: + _original_super_SSLContext.verify_flags.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + @property + def verify_mode(self) -> ssl.VerifyMode: + return self._ctx.verify_mode + + @verify_mode.setter + def verify_mode(self, value: ssl.VerifyMode) -> None: + _original_super_SSLContext.verify_mode.__set__( # type: ignore[attr-defined] + self._ctx, value + ) + + +def _verify_peercerts( + sock_or_sslobj: ssl.SSLSocket | ssl.SSLObject, server_hostname: str | None +) -> None: + """ + Verifies the peer certificates from an SSLSocket or SSLObject + against the certificates in the OS trust store. + """ + sslobj: ssl.SSLObject = sock_or_sslobj # type: ignore[assignment] + try: + while not hasattr(sslobj, "get_unverified_chain"): + sslobj = sslobj._sslobj # type: ignore[attr-defined] + except AttributeError: + pass + + # SSLObject.get_unverified_chain() returns 'None' + # if the peer sends no certificates. This is common + # for the server-side scenario. + unverified_chain: typing.Sequence[_ssl.Certificate] = ( + sslobj.get_unverified_chain() or () # type: ignore[attr-defined] + ) + cert_bytes = [cert.public_bytes(_ssl.ENCODING_DER) for cert in unverified_chain] + _verify_peercerts_impl( + sock_or_sslobj.context, cert_bytes, server_hostname=server_hostname + ) diff --git a/src/pip/_vendor/truststore/_macos.py b/src/pip/_vendor/truststore/_macos.py new file mode 100644 index 00000000000..7dc440bf362 --- /dev/null +++ b/src/pip/_vendor/truststore/_macos.py @@ -0,0 +1,501 @@ +import contextlib +import ctypes +import platform +import ssl +import typing +from ctypes import ( + CDLL, + POINTER, + c_bool, + c_char_p, + c_int32, + c_long, + c_uint32, + c_ulong, + c_void_p, +) +from ctypes.util import find_library + +from ._ssl_constants import _set_ssl_context_verify_mode + +_mac_version = platform.mac_ver()[0] +_mac_version_info = tuple(map(int, _mac_version.split("."))) +if _mac_version_info < (10, 8): + raise ImportError( + f"Only OS X 10.8 and newer are supported, not {_mac_version_info[0]}.{_mac_version_info[1]}" + ) + + +def _load_cdll(name: str, macos10_16_path: str) -> CDLL: + """Loads a CDLL by name, falling back to known path on 10.16+""" + try: + # Big Sur is technically 11 but we use 10.16 due to the Big Sur + # beta being labeled as 10.16. + path: str | None + if _mac_version_info >= (10, 16): + path = macos10_16_path + else: + path = find_library(name) + if not path: + raise OSError # Caught and reraised as 'ImportError' + return CDLL(path, use_errno=True) + except OSError: + raise ImportError(f"The library {name} failed to load") from None + + +Security = _load_cdll( + "Security", "/System/Library/Frameworks/Security.framework/Security" +) +CoreFoundation = _load_cdll( + "CoreFoundation", + "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", +) + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFErrorRef = POINTER(CFError) +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFArrayCallBacks = c_void_p +CFOptionFlags = c_uint32 + +SecCertificateRef = POINTER(c_void_p) +SecPolicyRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 + +try: + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argtypes = [SecTrustRef, Boolean] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecPolicyCreateRevocation.argtypes = [CFOptionFlags] + Security.SecPolicyCreateRevocation.restype = SecPolicyRef + + Security.SecPolicyCreateSSL.argtypes = [Boolean, CFStringRef] + Security.SecPolicyCreateSSL.restype = SecPolicyRef + + Security.SecTrustCreateWithCertificates.argtypes = [ + CFTypeRef, + CFTypeRef, + POINTER(SecTrustRef), + ] + Security.SecTrustCreateWithCertificates.restype = OSStatus + + Security.SecTrustGetTrustResult.argtypes = [ + SecTrustRef, + POINTER(SecTrustResultType), + ] + Security.SecTrustGetTrustResult.restype = OSStatus + + Security.SecTrustRef = SecTrustRef # type: ignore[attr-defined] + Security.SecTrustResultType = SecTrustResultType # type: ignore[attr-defined] + Security.OSStatus = OSStatus # type: ignore[attr-defined] + + kSecRevocationUseAnyAvailableMethod = 3 + kSecRevocationRequirePositiveResponse = 8 + + CoreFoundation.CFRelease.argtypes = [CFTypeRef] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding, + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding, + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.CFErrorGetCode.argtypes = [CFErrorRef] + CoreFoundation.CFErrorGetCode.restype = CFIndex + + CoreFoundation.CFErrorCopyDescription.argtypes = [CFErrorRef] + CoreFoundation.CFErrorCopyDescription.restype = CFStringRef + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( # type: ignore[attr-defined] + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( # type: ignore[attr-defined] + CoreFoundation, "kCFTypeArrayCallBacks" + ) + + CoreFoundation.CFTypeRef = CFTypeRef # type: ignore[attr-defined] + CoreFoundation.CFArrayRef = CFArrayRef # type: ignore[attr-defined] + CoreFoundation.CFStringRef = CFStringRef # type: ignore[attr-defined] + CoreFoundation.CFErrorRef = CFErrorRef # type: ignore[attr-defined] + +except AttributeError: + raise ImportError("Error initializing ctypes") from None + + +def _handle_osstatus(result: OSStatus, _: typing.Any, args: typing.Any) -> typing.Any: + """ + Raises an error if the OSStatus value is non-zero. + """ + if int(result) == 0: + return args + + # Returns a CFString which we need to transform + # into a UTF-8 Python string. + error_message_cfstring = None + try: + error_message_cfstring = Security.SecCopyErrorMessageString(result, None) + + # First step is convert the CFString into a C string pointer. + # We try the fast no-copy way first. + error_message_cfstring_c_void_p = ctypes.cast( + error_message_cfstring, ctypes.POINTER(ctypes.c_void_p) + ) + message = CoreFoundation.CFStringGetCStringPtr( + error_message_cfstring_c_void_p, CFConst.kCFStringEncodingUTF8 + ) + + # Quoting the Apple dev docs: + # + # "A pointer to a C string or NULL if the internal + # storage of theString does not allow this to be + # returned efficiently." + # + # So we need to get our hands dirty. + if message is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + error_message_cfstring_c_void_p, + buffer, + 1024, + CFConst.kCFStringEncodingUTF8, + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + message = buffer.value + + finally: + if error_message_cfstring is not None: + CoreFoundation.CFRelease(error_message_cfstring) + + # If no message can be found for this status we come + # up with a generic one that forwards the status code. + if message is None or message == "": + message = f"SecureTransport operation returned a non-zero OSStatus: {result}" + + raise ssl.SSLError(message) + + +Security.SecTrustCreateWithCertificates.errcheck = _handle_osstatus # type: ignore[assignment] +Security.SecTrustSetAnchorCertificates.errcheck = _handle_osstatus # type: ignore[assignment] +Security.SecTrustGetTrustResult.errcheck = _handle_osstatus # type: ignore[assignment] + + +class CFConst: + """CoreFoundation constants""" + + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + errSecIncompleteCertRevocationCheck = -67635 + errSecHostNameMismatch = -67602 + errSecCertificateExpired = -67818 + errSecNotTrusted = -67843 + + +def _bytes_to_cf_data_ref(value: bytes) -> CFDataRef: # type: ignore[valid-type] + return CoreFoundation.CFDataCreate( # type: ignore[no-any-return] + CoreFoundation.kCFAllocatorDefault, value, len(value) + ) + + +def _bytes_to_cf_string(value: bytes) -> CFString: + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(value) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str # type: ignore[no-any-return] + + +def _cf_string_ref_to_str(cf_string_ref: CFStringRef) -> str | None: # type: ignore[valid-type] + """ + Creates a Unicode string from a CFString object. Used entirely for error + reporting. + Yes, it annoys me quite a lot that this function is this complex. + """ + + string = CoreFoundation.CFStringGetCStringPtr( + cf_string_ref, CFConst.kCFStringEncodingUTF8 + ) + if string is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + cf_string_ref, buffer, 1024, CFConst.kCFStringEncodingUTF8 + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + string = buffer.value + if string is not None: + string = string.decode("utf-8") + return string # type: ignore[no-any-return] + + +def _der_certs_to_cf_cert_array(certs: list[bytes]) -> CFMutableArrayRef: # type: ignore[valid-type] + """Builds a CFArray of SecCertificateRefs from a list of DER-encoded certificates. + Responsibility of the caller to call CoreFoundation.CFRelease on the CFArray. + """ + cf_array = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_array: + raise MemoryError("Unable to allocate memory!") + + for cert_data in certs: + cf_data = None + sec_cert_ref = None + try: + cf_data = _bytes_to_cf_data_ref(cert_data) + sec_cert_ref = Security.SecCertificateCreateWithData( + CoreFoundation.kCFAllocatorDefault, cf_data + ) + CoreFoundation.CFArrayAppendValue(cf_array, sec_cert_ref) + finally: + if cf_data: + CoreFoundation.CFRelease(cf_data) + if sec_cert_ref: + CoreFoundation.CFRelease(sec_cert_ref) + + return cf_array # type: ignore[no-any-return] + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + check_hostname = ctx.check_hostname + verify_mode = ctx.verify_mode + ctx.check_hostname = False + _set_ssl_context_verify_mode(ctx, ssl.CERT_NONE) + try: + yield + finally: + ctx.check_hostname = check_hostname + _set_ssl_context_verify_mode(ctx, verify_mode) + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + certs = None + policies = None + trust = None + cf_error = None + try: + if server_hostname is not None: + cf_str_hostname = None + try: + cf_str_hostname = _bytes_to_cf_string(server_hostname.encode("ascii")) + ssl_policy = Security.SecPolicyCreateSSL(True, cf_str_hostname) + finally: + if cf_str_hostname: + CoreFoundation.CFRelease(cf_str_hostname) + else: + ssl_policy = Security.SecPolicyCreateSSL(True, None) + + policies = ssl_policy + if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN: + # Add explicit policy requiring positive revocation checks + policies = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + CoreFoundation.CFArrayAppendValue(policies, ssl_policy) + CoreFoundation.CFRelease(ssl_policy) + revocation_policy = Security.SecPolicyCreateRevocation( + kSecRevocationUseAnyAvailableMethod + | kSecRevocationRequirePositiveResponse + ) + CoreFoundation.CFArrayAppendValue(policies, revocation_policy) + CoreFoundation.CFRelease(revocation_policy) + elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF: + raise NotImplementedError("VERIFY_CRL_CHECK_LEAF not implemented for macOS") + + certs = None + try: + certs = _der_certs_to_cf_cert_array(cert_chain) + + # Now that we have certificates loaded and a SecPolicy + # we can finally create a SecTrust object! + trust = Security.SecTrustRef() + Security.SecTrustCreateWithCertificates( + certs, policies, ctypes.byref(trust) + ) + + finally: + # The certs are now being held by SecTrust so we can + # release our handles for the array. + if certs: + CoreFoundation.CFRelease(certs) + + # If there are additional trust anchors to load we need to transform + # the list of DER-encoded certificates into a CFArray. Otherwise + # pass 'None' to signal that we only want system / fetched certificates. + ctx_ca_certs_der: list[bytes] | None = ssl_context.get_ca_certs( + binary_form=True + ) + if ctx_ca_certs_der: + ctx_ca_certs = None + try: + ctx_ca_certs = _der_certs_to_cf_cert_array(cert_chain) + Security.SecTrustSetAnchorCertificates(trust, ctx_ca_certs) + finally: + if ctx_ca_certs: + CoreFoundation.CFRelease(ctx_ca_certs) + else: + Security.SecTrustSetAnchorCertificates(trust, None) + + cf_error = CoreFoundation.CFErrorRef() + sec_trust_eval_result = Security.SecTrustEvaluateWithError( + trust, ctypes.byref(cf_error) + ) + # sec_trust_eval_result is a bool (0 or 1) + # where 1 means that the certs are trusted. + if sec_trust_eval_result == 1: + is_trusted = True + elif sec_trust_eval_result == 0: + is_trusted = False + else: + raise ssl.SSLError( + f"Unknown result from Security.SecTrustEvaluateWithError: {sec_trust_eval_result!r}" + ) + + cf_error_code = 0 + if not is_trusted: + cf_error_code = CoreFoundation.CFErrorGetCode(cf_error) + + # If the error is a known failure that we're + # explicitly okay with from SSLContext configuration + # we can set is_trusted accordingly. + if ssl_context.verify_mode != ssl.CERT_REQUIRED and ( + cf_error_code == CFConst.errSecNotTrusted + or cf_error_code == CFConst.errSecCertificateExpired + ): + is_trusted = True + elif ( + not ssl_context.check_hostname + and cf_error_code == CFConst.errSecHostNameMismatch + ): + is_trusted = True + + # If we're still not trusted then we start to + # construct and raise the SSLCertVerificationError. + if not is_trusted: + cf_error_string_ref = None + try: + cf_error_string_ref = CoreFoundation.CFErrorCopyDescription(cf_error) + + # Can this ever return 'None' if there's a CFError? + cf_error_message = ( + _cf_string_ref_to_str(cf_error_string_ref) + or "Certificate verification failed" + ) + + # TODO: Not sure if we need the SecTrustResultType for anything? + # We only care whether or not it's a success or failure for now. + sec_trust_result_type = Security.SecTrustResultType() + Security.SecTrustGetTrustResult( + trust, ctypes.byref(sec_trust_result_type) + ) + + err = ssl.SSLCertVerificationError(cf_error_message) + err.verify_message = cf_error_message + err.verify_code = cf_error_code + raise err + finally: + if cf_error_string_ref: + CoreFoundation.CFRelease(cf_error_string_ref) + + finally: + if policies: + CoreFoundation.CFRelease(policies) + if trust: + CoreFoundation.CFRelease(trust) diff --git a/src/pip/_vendor/truststore/_openssl.py b/src/pip/_vendor/truststore/_openssl.py new file mode 100644 index 00000000000..9951cf75c40 --- /dev/null +++ b/src/pip/_vendor/truststore/_openssl.py @@ -0,0 +1,66 @@ +import contextlib +import os +import re +import ssl +import typing + +# candidates based on https://github.com/tiran/certifi-system-store by Christian Heimes +_CA_FILE_CANDIDATES = [ + # Alpine, Arch, Fedora 34+, OpenWRT, RHEL 9+, BSD + "/etc/ssl/cert.pem", + # Fedora <= 34, RHEL <= 9, CentOS <= 9 + "/etc/pki/tls/cert.pem", + # Debian, Ubuntu (requires ca-certificates) + "/etc/ssl/certs/ca-certificates.crt", + # SUSE + "/etc/ssl/ca-bundle.pem", +] + +_HASHED_CERT_FILENAME_RE = re.compile(r"^[0-9a-fA-F]{8}\.[0-9]$") + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + # First, check whether the default locations from OpenSSL + # seem like they will give us a usable set of CA certs. + # ssl.get_default_verify_paths already takes care of: + # - getting cafile from either the SSL_CERT_FILE env var + # or the path configured when OpenSSL was compiled, + # and verifying that that path exists + # - getting capath from either the SSL_CERT_DIR env var + # or the path configured when OpenSSL was compiled, + # and verifying that that path exists + # In addition we'll check whether capath appears to contain certs. + defaults = ssl.get_default_verify_paths() + if defaults.cafile or (defaults.capath and _capath_contains_certs(defaults.capath)): + ctx.set_default_verify_paths() + else: + # cafile from OpenSSL doesn't exist + # and capath from OpenSSL doesn't contain certs. + # Let's search other common locations instead. + for cafile in _CA_FILE_CANDIDATES: + if os.path.isfile(cafile): + ctx.load_verify_locations(cafile=cafile) + break + + yield + + +def _capath_contains_certs(capath: str) -> bool: + """Check whether capath exists and contains certs in the expected format.""" + if not os.path.isdir(capath): + return False + for name in os.listdir(capath): + if _HASHED_CERT_FILENAME_RE.match(name): + return True + return False + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + # This is a no-op because we've enabled SSLContext's built-in + # verification via verify_mode=CERT_REQUIRED, and don't need to repeat it. + pass diff --git a/src/pip/_vendor/truststore/_ssl_constants.py b/src/pip/_vendor/truststore/_ssl_constants.py new file mode 100644 index 00000000000..be60f8301ec --- /dev/null +++ b/src/pip/_vendor/truststore/_ssl_constants.py @@ -0,0 +1,12 @@ +import ssl + +# Hold on to the original class so we can create it consistently +# even if we inject our own SSLContext into the ssl module. +_original_SSLContext = ssl.SSLContext +_original_super_SSLContext = super(_original_SSLContext, _original_SSLContext) + + +def _set_ssl_context_verify_mode( + ssl_context: ssl.SSLContext, verify_mode: ssl.VerifyMode +) -> None: + _original_super_SSLContext.verify_mode.__set__(ssl_context, verify_mode) # type: ignore[attr-defined] diff --git a/src/pip/_vendor/truststore/_windows.py b/src/pip/_vendor/truststore/_windows.py new file mode 100644 index 00000000000..3de4960a1b0 --- /dev/null +++ b/src/pip/_vendor/truststore/_windows.py @@ -0,0 +1,554 @@ +import contextlib +import ssl +import typing +from ctypes import WinDLL # type: ignore +from ctypes import WinError # type: ignore +from ctypes import ( + POINTER, + Structure, + c_char_p, + c_ulong, + c_void_p, + c_wchar_p, + cast, + create_unicode_buffer, + pointer, + sizeof, +) +from ctypes.wintypes import ( + BOOL, + DWORD, + HANDLE, + LONG, + LPCSTR, + LPCVOID, + LPCWSTR, + LPFILETIME, + LPSTR, + LPWSTR, +) +from typing import TYPE_CHECKING, Any + +from ._ssl_constants import _set_ssl_context_verify_mode + +HCERTCHAINENGINE = HANDLE +HCERTSTORE = HANDLE +HCRYPTPROV_LEGACY = HANDLE + + +class CERT_CONTEXT(Structure): + _fields_ = ( + ("dwCertEncodingType", DWORD), + ("pbCertEncoded", c_void_p), + ("cbCertEncoded", DWORD), + ("pCertInfo", c_void_p), + ("hCertStore", HCERTSTORE), + ) + + +PCERT_CONTEXT = POINTER(CERT_CONTEXT) +PCCERT_CONTEXT = POINTER(PCERT_CONTEXT) + + +class CERT_ENHKEY_USAGE(Structure): + _fields_ = ( + ("cUsageIdentifier", DWORD), + ("rgpszUsageIdentifier", POINTER(LPSTR)), + ) + + +PCERT_ENHKEY_USAGE = POINTER(CERT_ENHKEY_USAGE) + + +class CERT_USAGE_MATCH(Structure): + _fields_ = ( + ("dwType", DWORD), + ("Usage", CERT_ENHKEY_USAGE), + ) + + +class CERT_CHAIN_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("RequestedUsage", CERT_USAGE_MATCH), + ("RequestedIssuancePolicy", CERT_USAGE_MATCH), + ("dwUrlRetrievalTimeout", DWORD), + ("fCheckRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ("pftCacheResync", LPFILETIME), + ("pStrongSignPara", c_void_p), + ("dwStrongSignFlags", DWORD), + ) + + +if TYPE_CHECKING: + PCERT_CHAIN_PARA = pointer[CERT_CHAIN_PARA] # type: ignore[misc] +else: + PCERT_CHAIN_PARA = POINTER(CERT_CHAIN_PARA) + + +class CERT_TRUST_STATUS(Structure): + _fields_ = ( + ("dwErrorStatus", DWORD), + ("dwInfoStatus", DWORD), + ) + + +class CERT_CHAIN_ELEMENT(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("pCertContext", PCERT_CONTEXT), + ("TrustStatus", CERT_TRUST_STATUS), + ("pRevocationInfo", c_void_p), + ("pIssuanceUsage", PCERT_ENHKEY_USAGE), + ("pApplicationUsage", PCERT_ENHKEY_USAGE), + ("pwszExtendedErrorInfo", LPCWSTR), + ) + + +PCERT_CHAIN_ELEMENT = POINTER(CERT_CHAIN_ELEMENT) + + +class CERT_SIMPLE_CHAIN(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("TrustStatus", CERT_TRUST_STATUS), + ("cElement", DWORD), + ("rgpElement", POINTER(PCERT_CHAIN_ELEMENT)), + ("pTrustListInfo", c_void_p), + ("fHasRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ) + + +PCERT_SIMPLE_CHAIN = POINTER(CERT_SIMPLE_CHAIN) + + +class CERT_CHAIN_CONTEXT(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("TrustStatus", CERT_TRUST_STATUS), + ("cChain", DWORD), + ("rgpChain", POINTER(PCERT_SIMPLE_CHAIN)), + ("cLowerQualityChainContext", DWORD), + ("rgpLowerQualityChainContext", c_void_p), + ("fHasRevocationFreshnessTime", BOOL), + ("dwRevocationFreshnessTime", DWORD), + ) + + +PCERT_CHAIN_CONTEXT = POINTER(CERT_CHAIN_CONTEXT) +PCCERT_CHAIN_CONTEXT = POINTER(PCERT_CHAIN_CONTEXT) + + +class SSL_EXTRA_CERT_CHAIN_POLICY_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwAuthType", DWORD), + ("fdwChecks", DWORD), + ("pwszServerName", LPCWSTR), + ) + + +class CERT_CHAIN_POLICY_PARA(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwFlags", DWORD), + ("pvExtraPolicyPara", c_void_p), + ) + + +PCERT_CHAIN_POLICY_PARA = POINTER(CERT_CHAIN_POLICY_PARA) + + +class CERT_CHAIN_POLICY_STATUS(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("dwError", DWORD), + ("lChainIndex", LONG), + ("lElementIndex", LONG), + ("pvExtraPolicyStatus", c_void_p), + ) + + +PCERT_CHAIN_POLICY_STATUS = POINTER(CERT_CHAIN_POLICY_STATUS) + + +class CERT_CHAIN_ENGINE_CONFIG(Structure): + _fields_ = ( + ("cbSize", DWORD), + ("hRestrictedRoot", HCERTSTORE), + ("hRestrictedTrust", HCERTSTORE), + ("hRestrictedOther", HCERTSTORE), + ("cAdditionalStore", DWORD), + ("rghAdditionalStore", c_void_p), + ("dwFlags", DWORD), + ("dwUrlRetrievalTimeout", DWORD), + ("MaximumCachedCertificates", DWORD), + ("CycleDetectionModulus", DWORD), + ("hExclusiveRoot", HCERTSTORE), + ("hExclusiveTrustedPeople", HCERTSTORE), + ("dwExclusiveFlags", DWORD), + ) + + +PCERT_CHAIN_ENGINE_CONFIG = POINTER(CERT_CHAIN_ENGINE_CONFIG) +PHCERTCHAINENGINE = POINTER(HCERTCHAINENGINE) + +X509_ASN_ENCODING = 0x00000001 +PKCS_7_ASN_ENCODING = 0x00010000 +CERT_STORE_PROV_MEMORY = b"Memory" +CERT_STORE_ADD_USE_EXISTING = 2 +USAGE_MATCH_TYPE_OR = 1 +OID_PKIX_KP_SERVER_AUTH = c_char_p(b"1.3.6.1.5.5.7.3.1") +CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000 +CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000 +CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = 0x00000007 +CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008 +CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010 +CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040 +CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020 +CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080 +CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = 0x00000F00 +CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000 +CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000 +AUTHTYPE_SERVER = 2 +CERT_CHAIN_POLICY_SSL = 4 +FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000 +FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200 + +# Flags to set for SSLContext.verify_mode=CERT_NONE +CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS = ( + CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS + | CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG + | CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG + | CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG + | CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG + | CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG + | CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS + | CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG + | CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG +) + +wincrypt = WinDLL("crypt32.dll") +kernel32 = WinDLL("kernel32.dll") + + +def _handle_win_error(result: bool, _: Any, args: Any) -> Any: + if not result: + # Note, actually raises OSError after calling GetLastError and FormatMessage + raise WinError() + return args + + +CertCreateCertificateChainEngine = wincrypt.CertCreateCertificateChainEngine +CertCreateCertificateChainEngine.argtypes = ( + PCERT_CHAIN_ENGINE_CONFIG, + PHCERTCHAINENGINE, +) +CertCreateCertificateChainEngine.errcheck = _handle_win_error + +CertOpenStore = wincrypt.CertOpenStore +CertOpenStore.argtypes = (LPCSTR, DWORD, HCRYPTPROV_LEGACY, DWORD, c_void_p) +CertOpenStore.restype = HCERTSTORE +CertOpenStore.errcheck = _handle_win_error + +CertAddEncodedCertificateToStore = wincrypt.CertAddEncodedCertificateToStore +CertAddEncodedCertificateToStore.argtypes = ( + HCERTSTORE, + DWORD, + c_char_p, + DWORD, + DWORD, + PCCERT_CONTEXT, +) +CertAddEncodedCertificateToStore.restype = BOOL + +CertCreateCertificateContext = wincrypt.CertCreateCertificateContext +CertCreateCertificateContext.argtypes = (DWORD, c_char_p, DWORD) +CertCreateCertificateContext.restype = PCERT_CONTEXT +CertCreateCertificateContext.errcheck = _handle_win_error + +CertGetCertificateChain = wincrypt.CertGetCertificateChain +CertGetCertificateChain.argtypes = ( + HCERTCHAINENGINE, + PCERT_CONTEXT, + LPFILETIME, + HCERTSTORE, + PCERT_CHAIN_PARA, + DWORD, + c_void_p, + PCCERT_CHAIN_CONTEXT, +) +CertGetCertificateChain.restype = BOOL +CertGetCertificateChain.errcheck = _handle_win_error + +CertVerifyCertificateChainPolicy = wincrypt.CertVerifyCertificateChainPolicy +CertVerifyCertificateChainPolicy.argtypes = ( + c_ulong, + PCERT_CHAIN_CONTEXT, + PCERT_CHAIN_POLICY_PARA, + PCERT_CHAIN_POLICY_STATUS, +) +CertVerifyCertificateChainPolicy.restype = BOOL + +CertCloseStore = wincrypt.CertCloseStore +CertCloseStore.argtypes = (HCERTSTORE, DWORD) +CertCloseStore.restype = BOOL +CertCloseStore.errcheck = _handle_win_error + +CertFreeCertificateChain = wincrypt.CertFreeCertificateChain +CertFreeCertificateChain.argtypes = (PCERT_CHAIN_CONTEXT,) + +CertFreeCertificateContext = wincrypt.CertFreeCertificateContext +CertFreeCertificateContext.argtypes = (PCERT_CONTEXT,) + +CertFreeCertificateChainEngine = wincrypt.CertFreeCertificateChainEngine +CertFreeCertificateChainEngine.argtypes = (HCERTCHAINENGINE,) + +FormatMessageW = kernel32.FormatMessageW +FormatMessageW.argtypes = ( + DWORD, + LPCVOID, + DWORD, + DWORD, + LPWSTR, + DWORD, + c_void_p, +) +FormatMessageW.restype = DWORD + + +def _verify_peercerts_impl( + ssl_context: ssl.SSLContext, + cert_chain: list[bytes], + server_hostname: str | None = None, +) -> None: + """Verify the cert_chain from the server using Windows APIs.""" + pCertContext = None + hIntermediateCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None) + try: + # Add intermediate certs to an in-memory cert store + for cert_bytes in cert_chain[1:]: + CertAddEncodedCertificateToStore( + hIntermediateCertStore, + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, + cert_bytes, + len(cert_bytes), + CERT_STORE_ADD_USE_EXISTING, + None, + ) + + # Cert context for leaf cert + leaf_cert = cert_chain[0] + pCertContext = CertCreateCertificateContext( + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, leaf_cert, len(leaf_cert) + ) + + # Chain params to match certs for serverAuth extended usage + cert_enhkey_usage = CERT_ENHKEY_USAGE() + cert_enhkey_usage.cUsageIdentifier = 1 + cert_enhkey_usage.rgpszUsageIdentifier = (c_char_p * 1)(OID_PKIX_KP_SERVER_AUTH) + cert_usage_match = CERT_USAGE_MATCH() + cert_usage_match.Usage = cert_enhkey_usage + chain_params = CERT_CHAIN_PARA() + chain_params.RequestedUsage = cert_usage_match + chain_params.cbSize = sizeof(chain_params) + pChainPara = pointer(chain_params) + + if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN: + chain_flags = CERT_CHAIN_REVOCATION_CHECK_CHAIN + elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF: + chain_flags = CERT_CHAIN_REVOCATION_CHECK_END_CERT + else: + chain_flags = 0 + + try: + # First attempt to verify using the default Windows system trust roots + # (default chain engine). + _get_and_verify_cert_chain( + ssl_context, + None, + hIntermediateCertStore, + pCertContext, + pChainPara, + server_hostname, + chain_flags=chain_flags, + ) + except ssl.SSLCertVerificationError: + # If that fails but custom CA certs have been added + # to the SSLContext using load_verify_locations, + # try verifying using a custom chain engine + # that trusts the custom CA certs. + custom_ca_certs: list[bytes] | None = ssl_context.get_ca_certs( + binary_form=True + ) + if custom_ca_certs: + _verify_using_custom_ca_certs( + ssl_context, + custom_ca_certs, + hIntermediateCertStore, + pCertContext, + pChainPara, + server_hostname, + chain_flags=chain_flags, + ) + else: + raise + finally: + CertCloseStore(hIntermediateCertStore, 0) + if pCertContext: + CertFreeCertificateContext(pCertContext) + + +def _get_and_verify_cert_chain( + ssl_context: ssl.SSLContext, + hChainEngine: HCERTCHAINENGINE | None, + hIntermediateCertStore: HCERTSTORE, + pPeerCertContext: c_void_p, + pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type] + server_hostname: str | None, + chain_flags: int, +) -> None: + ppChainContext = None + try: + # Get cert chain + ppChainContext = pointer(PCERT_CHAIN_CONTEXT()) + CertGetCertificateChain( + hChainEngine, # chain engine + pPeerCertContext, # leaf cert context + None, # current system time + hIntermediateCertStore, # additional in-memory cert store + pChainPara, # chain-building parameters + chain_flags, + None, # reserved + ppChainContext, # the resulting chain context + ) + pChainContext = ppChainContext.contents + + # Verify cert chain + ssl_extra_cert_chain_policy_para = SSL_EXTRA_CERT_CHAIN_POLICY_PARA() + ssl_extra_cert_chain_policy_para.cbSize = sizeof( + ssl_extra_cert_chain_policy_para + ) + ssl_extra_cert_chain_policy_para.dwAuthType = AUTHTYPE_SERVER + ssl_extra_cert_chain_policy_para.fdwChecks = 0 + if server_hostname: + ssl_extra_cert_chain_policy_para.pwszServerName = c_wchar_p(server_hostname) + + chain_policy = CERT_CHAIN_POLICY_PARA() + chain_policy.pvExtraPolicyPara = cast( + pointer(ssl_extra_cert_chain_policy_para), c_void_p + ) + if ssl_context.verify_mode == ssl.CERT_NONE: + chain_policy.dwFlags |= CERT_CHAIN_POLICY_VERIFY_MODE_NONE_FLAGS + if not ssl_context.check_hostname: + chain_policy.dwFlags |= CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG + chain_policy.cbSize = sizeof(chain_policy) + + pPolicyPara = pointer(chain_policy) + policy_status = CERT_CHAIN_POLICY_STATUS() + policy_status.cbSize = sizeof(policy_status) + pPolicyStatus = pointer(policy_status) + CertVerifyCertificateChainPolicy( + CERT_CHAIN_POLICY_SSL, + pChainContext, + pPolicyPara, + pPolicyStatus, + ) + + # Check status + error_code = policy_status.dwError + if error_code: + # Try getting a human readable message for an error code. + error_message_buf = create_unicode_buffer(1024) + error_message_chars = FormatMessageW( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + None, + error_code, + 0, + error_message_buf, + sizeof(error_message_buf), + None, + ) + + # See if we received a message for the error, + # otherwise we use a generic error with the + # error code and hope that it's search-able. + if error_message_chars <= 0: + error_message = f"Certificate chain policy error {error_code:#x} [{policy_status.lElementIndex}]" + else: + error_message = error_message_buf.value.strip() + + err = ssl.SSLCertVerificationError(error_message) + err.verify_message = error_message + err.verify_code = error_code + raise err from None + finally: + if ppChainContext: + CertFreeCertificateChain(ppChainContext.contents) + + +def _verify_using_custom_ca_certs( + ssl_context: ssl.SSLContext, + custom_ca_certs: list[bytes], + hIntermediateCertStore: HCERTSTORE, + pPeerCertContext: c_void_p, + pChainPara: PCERT_CHAIN_PARA, # type: ignore[valid-type] + server_hostname: str | None, + chain_flags: int, +) -> None: + hChainEngine = None + hRootCertStore = CertOpenStore(CERT_STORE_PROV_MEMORY, 0, None, 0, None) + try: + # Add custom CA certs to an in-memory cert store + for cert_bytes in custom_ca_certs: + CertAddEncodedCertificateToStore( + hRootCertStore, + X509_ASN_ENCODING | PKCS_7_ASN_ENCODING, + cert_bytes, + len(cert_bytes), + CERT_STORE_ADD_USE_EXISTING, + None, + ) + + # Create a custom cert chain engine which exclusively trusts + # certs from our hRootCertStore + cert_chain_engine_config = CERT_CHAIN_ENGINE_CONFIG() + cert_chain_engine_config.cbSize = sizeof(cert_chain_engine_config) + cert_chain_engine_config.hExclusiveRoot = hRootCertStore + pConfig = pointer(cert_chain_engine_config) + phChainEngine = pointer(HCERTCHAINENGINE()) + CertCreateCertificateChainEngine( + pConfig, + phChainEngine, + ) + hChainEngine = phChainEngine.contents + + # Get and verify a cert chain using the custom chain engine + _get_and_verify_cert_chain( + ssl_context, + hChainEngine, + hIntermediateCertStore, + pPeerCertContext, + pChainPara, + server_hostname, + chain_flags, + ) + finally: + if hChainEngine: + CertFreeCertificateChainEngine(hChainEngine) + CertCloseStore(hRootCertStore, 0) + + +@contextlib.contextmanager +def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]: + check_hostname = ctx.check_hostname + verify_mode = ctx.verify_mode + ctx.check_hostname = False + _set_ssl_context_verify_mode(ctx, ssl.CERT_NONE) + try: + yield + finally: + ctx.check_hostname = check_hostname + _set_ssl_context_verify_mode(ctx, verify_mode) diff --git a/src/pip/_vendor/truststore/py.typed b/src/pip/_vendor/truststore/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 08e1acb016c..56cf7551727 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -20,4 +20,5 @@ setuptools==68.0.0 six==1.16.0 tenacity==8.2.2 tomli==2.0.1 +truststore==0.7.0 webencodings==0.5.1 diff --git a/tests/functional/test_truststore.py b/tests/functional/test_truststore.py index 33153d0fbf9..cc90343b52d 100644 --- a/tests/functional/test_truststore.py +++ b/tests/functional/test_truststore.py @@ -27,20 +27,6 @@ def test_truststore_error_on_old_python(pip: PipRunner) -> None: assert "The truststore feature is only available for Python 3.10+" in result.stderr -@pytest.mark.skipif(sys.version_info < (3, 10), reason="3.10+ required for truststore") -def test_truststore_error_without_preinstalled(pip: PipRunner) -> None: - result = pip( - "install", - "--no-index", - "does-not-matter", - expect_error=True, - ) - assert ( - "To use the truststore feature, 'truststore' must be installed into " - "pip's current environment." - ) in result.stderr - - @pytest.mark.skipif(sys.version_info < (3, 10), reason="3.10+ required for truststore") @pytest.mark.network @pytest.mark.parametrize( @@ -56,6 +42,5 @@ def test_trustore_can_install( pip: PipRunner, package: str, ) -> None: - script.pip("install", "truststore") result = pip("install", package) assert "Successfully installed" in result.stdout From 9a65b887a44555ba6b1ad19b8b81c834472ce3b8 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Mon, 26 Jun 2023 22:49:06 -0500 Subject: [PATCH 694/730] Use absolute instead of relative imports for vendored modules Co-authored-by: Tzu-ping Chung --- src/pip/_internal/cli/req_command.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 80b35a80aae..a2395d68c54 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -58,7 +58,7 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]: return None try: - from ..._vendor import truststore + from pip._vendor import truststore except ImportError: raise CommandError( "To use the truststore feature, 'truststore' must be installed into " From 44857c6e82c3219b39d55c61aded270a480b4f5d Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Mon, 26 Jun 2023 22:52:57 -0500 Subject: [PATCH 695/730] Update error message to forward platform-specific error --- src/pip/_internal/cli/req_command.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index a2395d68c54..080739aa979 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -59,10 +59,9 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]: try: from pip._vendor import truststore - except ImportError: + except ImportError as e: raise CommandError( - "To use the truststore feature, 'truststore' must be installed into " - "pip's current environment." + f"The truststore feature is unavailable: {e}" ) return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) From 63f19b5eade3891d98fbc419e2a0b686e11752b4 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Sat, 19 Aug 2023 11:49:32 -0500 Subject: [PATCH 696/730] Explicitly require Python 3.10+ for vendoring task --- noxfile.py | 6 ++++++ src/pip/_internal/cli/req_command.py | 4 +--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/noxfile.py b/noxfile.py index 041d9039974..a3e7ceab4cc 100644 --- a/noxfile.py +++ b/noxfile.py @@ -184,6 +184,12 @@ def lint(session: nox.Session) -> None: # git reset --hard origin/main @nox.session def vendoring(session: nox.Session) -> None: + # Ensure that the session Python is running 3.10+ + # so that truststore can be installed correctly. + session.run( + "python", "-c", "import sys; sys.exit(1 if sys.version_info < (3, 10) else 0)" + ) + session.install("vendoring~=1.2.0") parser = argparse.ArgumentParser(prog="nox -s vendoring") diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 080739aa979..7a53d510586 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -60,9 +60,7 @@ def _create_truststore_ssl_context() -> Optional["SSLContext"]: try: from pip._vendor import truststore except ImportError as e: - raise CommandError( - f"The truststore feature is unavailable: {e}" - ) + raise CommandError(f"The truststore feature is unavailable: {e}") return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT) From fca773ccde9a95df9ff8c9153e3497fc13571912 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Tue, 22 Aug 2023 20:43:54 -0500 Subject: [PATCH 697/730] Allow truststore to not import on Python 3.9 and earlier --- src/pip/_internal/commands/debug.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 1b1fd3ea5cc..ab8280db48d 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -46,22 +46,29 @@ def create_vendor_txt_map() -> Dict[str, str]: return dict(line.split("==", 1) for line in lines) -def get_module_from_module_name(module_name: str) -> ModuleType: +def get_module_from_module_name(module_name: str) -> Optional[ModuleType]: # Module name can be uppercase in vendor.txt for some reason... module_name = module_name.lower().replace("-", "_") # PATCH: setuptools is actually only pkg_resources. if module_name == "setuptools": module_name = "pkg_resources" - __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) - return getattr(pip._vendor, module_name) + try: + __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0) + return getattr(pip._vendor, module_name) + except ImportError: + # We allow 'truststore' to fail to import due + # to being unavailable on Python 3.9 and earlier. + if module_name == "truststore" and sys.version_info < (3, 10): + return None + raise def get_vendor_version_from_module(module_name: str) -> Optional[str]: module = get_module_from_module_name(module_name) version = getattr(module, "__version__", None) - if not version: + if module and not version: # Try to find version in debundled module info. assert module.__file__ is not None env = get_environment([os.path.dirname(module.__file__)]) From bff1e6a67be0545dd7e2ac1cb5189463370c3b55 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Tue, 12 Sep 2023 15:55:51 -0500 Subject: [PATCH 698/730] Vendor truststore 0.8.0 --- news/truststore.vendor.rst | 2 +- src/pip/_vendor/truststore/__init__.py | 2 +- src/pip/_vendor/truststore/_api.py | 38 ++++++++++---------- src/pip/_vendor/truststore/_ssl_constants.py | 19 ++++++++++ src/pip/_vendor/vendor.txt | 2 +- 5 files changed, 41 insertions(+), 22 deletions(-) diff --git a/news/truststore.vendor.rst b/news/truststore.vendor.rst index ee974728d92..63c71d72d2f 100644 --- a/news/truststore.vendor.rst +++ b/news/truststore.vendor.rst @@ -1 +1 @@ -Add truststore 0.7.0 +Add truststore 0.8.0 diff --git a/src/pip/_vendor/truststore/__init__.py b/src/pip/_vendor/truststore/__init__.py index 0f3a4d9e1e1..59930f455b0 100644 --- a/src/pip/_vendor/truststore/__init__.py +++ b/src/pip/_vendor/truststore/__init__.py @@ -10,4 +10,4 @@ del _api, _sys # type: ignore[name-defined] # noqa: F821 __all__ = ["SSLContext", "inject_into_ssl", "extract_from_ssl"] -__version__ = "0.7.0" +__version__ = "0.8.0" diff --git a/src/pip/_vendor/truststore/_api.py b/src/pip/_vendor/truststore/_api.py index 2647042418f..829aff72672 100644 --- a/src/pip/_vendor/truststore/_api.py +++ b/src/pip/_vendor/truststore/_api.py @@ -1,8 +1,4 @@ -import array -import ctypes -import mmap import os -import pickle import platform import socket import ssl @@ -10,7 +6,12 @@ import _ssl # type: ignore[import] -from ._ssl_constants import _original_SSLContext, _original_super_SSLContext +from ._ssl_constants import ( + _original_SSLContext, + _original_super_SSLContext, + _truststore_SSLContext_dunder_class, + _truststore_SSLContext_super_class, +) if platform.system() == "Windows": from ._windows import _configure_context, _verify_peercerts_impl @@ -19,21 +20,13 @@ else: from ._openssl import _configure_context, _verify_peercerts_impl +if typing.TYPE_CHECKING: + from pip._vendor.typing_extensions import Buffer + # From typeshed/stdlib/ssl.pyi _StrOrBytesPath: typing.TypeAlias = str | bytes | os.PathLike[str] | os.PathLike[bytes] _PasswordType: typing.TypeAlias = str | bytes | typing.Callable[[], str | bytes] -# From typeshed/stdlib/_typeshed/__init__.py -_ReadableBuffer: typing.TypeAlias = typing.Union[ - bytes, - memoryview, - bytearray, - "array.array[typing.Any]", - mmap.mmap, - "ctypes._CData", - pickle.PickleBuffer, -] - def inject_into_ssl() -> None: """Injects the :class:`truststore.SSLContext` into the ``ssl`` @@ -61,9 +54,16 @@ def extract_from_ssl() -> None: pass -class SSLContext(ssl.SSLContext): +class SSLContext(_truststore_SSLContext_super_class): # type: ignore[misc] """SSLContext API that uses system certificates on all platforms""" + @property # type: ignore[misc] + def __class__(self) -> type: + # Dirty hack to get around isinstance() checks + # for ssl.SSLContext instances in aiohttp/trustme + # when using non-CPython implementations. + return _truststore_SSLContext_dunder_class or SSLContext + def __init__(self, protocol: int = None) -> None: # type: ignore[assignment] self._ctx = _original_SSLContext(protocol) @@ -129,7 +129,7 @@ def load_verify_locations( self, cafile: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, capath: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None, - cadata: str | _ReadableBuffer | None = None, + cadata: typing.Union[str, "Buffer", None] = None, ) -> None: return self._ctx.load_verify_locations( cafile=cafile, capath=capath, cadata=cadata @@ -252,7 +252,7 @@ def protocol(self) -> ssl._SSLMethod: return self._ctx.protocol @property - def security_level(self) -> int: # type: ignore[override] + def security_level(self) -> int: return self._ctx.security_level @property diff --git a/src/pip/_vendor/truststore/_ssl_constants.py b/src/pip/_vendor/truststore/_ssl_constants.py index be60f8301ec..b1ee7a3cb13 100644 --- a/src/pip/_vendor/truststore/_ssl_constants.py +++ b/src/pip/_vendor/truststore/_ssl_constants.py @@ -1,10 +1,29 @@ import ssl +import sys +import typing # Hold on to the original class so we can create it consistently # even if we inject our own SSLContext into the ssl module. _original_SSLContext = ssl.SSLContext _original_super_SSLContext = super(_original_SSLContext, _original_SSLContext) +# CPython is known to be good, but non-CPython implementations +# may implement SSLContext differently so to be safe we don't +# subclass the SSLContext. + +# This is returned by truststore.SSLContext.__class__() +_truststore_SSLContext_dunder_class: typing.Optional[type] + +# This value is the superclass of truststore.SSLContext. +_truststore_SSLContext_super_class: type + +if sys.implementation.name == "cpython": + _truststore_SSLContext_super_class = _original_SSLContext + _truststore_SSLContext_dunder_class = None +else: + _truststore_SSLContext_super_class = object + _truststore_SSLContext_dunder_class = _original_SSLContext + def _set_ssl_context_verify_mode( ssl_context: ssl.SSLContext, verify_mode: ssl.VerifyMode diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 56cf7551727..ade8512e25a 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -20,5 +20,5 @@ setuptools==68.0.0 six==1.16.0 tenacity==8.2.2 tomli==2.0.1 -truststore==0.7.0 +truststore==0.8.0 webencodings==0.5.1 From 90c4a4230d0dff833e5e087cd85cebde1c134233 Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 13 Sep 2023 12:23:59 +0800 Subject: [PATCH 699/730] Manually build package and revert xfail marker --- tests/functional/test_install_extras.py | 26 ++++++++----------------- tests/requirements-common_wheels.txt | 6 +----- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index 813c95bfa34..8ccbcf1998d 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -163,12 +163,10 @@ def test_install_fails_if_extra_at_end( "Hop_hOp-hoP", "hop-hop-hop", marks=pytest.mark.xfail( - "sys.version_info < (3, 8)", reason=( "matching a normalized extra request against an" "unnormalized extra in metadata requires PEP 685 support " - "in either packaging or the build tool. Setuptools " - "implements this in 68.2, which requires 3.8+" + "in packaging (see pypa/pip#11445)." ), ), ), @@ -180,26 +178,18 @@ def test_install_special_extra( specified_extra: str, requested_extra: str, ) -> None: - # Check that uppercase letters and '-' are dealt with - # make a dummy project - pkga_path = script.scratch_path / "pkga" - pkga_path.mkdir() - pkga_path.joinpath("setup.py").write_text( - textwrap.dedent( - f""" - from setuptools import setup - setup(name='pkga', - version='0.1', - extras_require={{'{specified_extra}': ['missing_pkg']}}, - ) - """ - ) + """Check extra normalization is implemented according to specification.""" + pkga_path = create_basic_wheel_for_package( + script, + name="pkga", + version="0.1", + extras={specified_extra: ["missing_pkg"]}, ) result = script.pip( "install", "--no-index", - f"{pkga_path}[{requested_extra}]", + f"pkga[{requested_extra}] @ {pkga_path.as_uri()}", expect_error=True, ) assert ( diff --git a/tests/requirements-common_wheels.txt b/tests/requirements-common_wheels.txt index 8963e333757..6403ed73898 100644 --- a/tests/requirements-common_wheels.txt +++ b/tests/requirements-common_wheels.txt @@ -5,11 +5,7 @@ # 4. Replacing the `setuptools` entry below with a `file:///...` URL # (Adjust artifact directory used based on preference and operating system) -# Implements new extra normalization. -setuptools >= 68.2 ; python_version >= '3.8' -setuptools >= 40.8.0, != 60.6.0 ; python_version < '3.8' - +setuptools >= 40.8.0, != 60.6.0 wheel - # As required by pytest-cov. coverage >= 4.4 From 7127fc96f4dfd7ab9b873664b57318c9fc693e3a Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 13 Sep 2023 13:27:11 +0800 Subject: [PATCH 700/730] Prevent eager extra normalization This removes extra normalization when metadata is loaded into the data structures, so we can obtain the raw values later in the process during resolution. The change in match_markers is needed because this is relied on by the legacy resolver. Since we removed eager normalization, we need to do that when the extras are used instead to maintain compatibility. --- src/pip/_internal/metadata/importlib/_dists.py | 7 ++----- src/pip/_internal/req/req_install.py | 5 +++-- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/pip/_internal/metadata/importlib/_dists.py b/src/pip/_internal/metadata/importlib/_dists.py index 65c043c87ef..c43ef8d01f9 100644 --- a/src/pip/_internal/metadata/importlib/_dists.py +++ b/src/pip/_internal/metadata/importlib/_dists.py @@ -27,7 +27,6 @@ Wheel, ) from pip._internal.utils.misc import normalize_path -from pip._internal.utils.packaging import safe_extra from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file @@ -208,12 +207,10 @@ def _metadata_impl(self) -> email.message.Message: return cast(email.message.Message, self._dist.metadata) def iter_provided_extras(self) -> Iterable[str]: - return ( - safe_extra(extra) for extra in self.metadata.get_all("Provides-Extra", []) - ) + return (extra for extra in self.metadata.get_all("Provides-Extra", [])) def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: - contexts: Sequence[Dict[str, str]] = [{"extra": safe_extra(e)} for e in extras] + contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras] for req_string in self.metadata.get_all("Requires-Dist", []): req = Requirement(req_string) if not req.marker: diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 8110114ca14..84f337d6e5b 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -128,7 +128,7 @@ def __init__( if extras: self.extras = extras elif req: - self.extras = {safe_extra(extra) for extra in req.extras} + self.extras = req.extras else: self.extras = set() if markers is None and req: @@ -272,7 +272,8 @@ def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> boo extras_requested = ("",) if self.markers is not None: return any( - self.markers.evaluate({"extra": extra}) for extra in extras_requested + self.markers.evaluate({"extra": safe_extra(extra)}) + for extra in extras_requested ) else: return True From 9ba2bb90fb57e4ee5f624ecd39eade207863c21a Mon Sep 17 00:00:00 2001 From: Tzu-ping Chung Date: Wed, 13 Sep 2023 16:35:59 +0800 Subject: [PATCH 701/730] Straighten up extra comps across metadata backends The importlib.metadata and pkg_resources backends unfortunately normalize extras differently, and we don't really want to continue using the latter's logic (being partially lossy while still not compliant to standards), so we add a new abstraction for the purpose. --- src/pip/_internal/metadata/__init__.py | 3 +- src/pip/_internal/metadata/base.py | 32 +++++++++++++------ .../_internal/metadata/importlib/__init__.py | 4 ++- .../_internal/metadata/importlib/_dists.py | 8 ++++- src/pip/_internal/metadata/pkg_resources.py | 10 +++++- src/pip/_internal/req/req_install.py | 6 +++- .../resolution/resolvelib/candidates.py | 23 ++++++++----- 7 files changed, 64 insertions(+), 22 deletions(-) diff --git a/src/pip/_internal/metadata/__init__.py b/src/pip/_internal/metadata/__init__.py index 9f73ca7105f..aa232b6cabd 100644 --- a/src/pip/_internal/metadata/__init__.py +++ b/src/pip/_internal/metadata/__init__.py @@ -9,7 +9,7 @@ from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel if TYPE_CHECKING: - from typing import Protocol + from typing import Literal, Protocol else: Protocol = object @@ -50,6 +50,7 @@ def _should_use_importlib_metadata() -> bool: class Backend(Protocol): + NAME: 'Literal["importlib", "pkg_resources"]' Distribution: Type[BaseDistribution] Environment: Type[BaseEnvironment] diff --git a/src/pip/_internal/metadata/base.py b/src/pip/_internal/metadata/base.py index cafb79fb3dc..92491244108 100644 --- a/src/pip/_internal/metadata/base.py +++ b/src/pip/_internal/metadata/base.py @@ -24,7 +24,7 @@ from pip._vendor.packaging.requirements import Requirement from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet -from pip._vendor.packaging.utils import NormalizedName +from pip._vendor.packaging.utils import NormalizedName, canonicalize_name from pip._vendor.packaging.version import LegacyVersion, Version from pip._internal.exceptions import NoneMetadataError @@ -37,7 +37,6 @@ from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here. from pip._internal.utils.egg_link import egg_link_path_from_sys_path from pip._internal.utils.misc import is_local, normalize_path -from pip._internal.utils.packaging import safe_extra from pip._internal.utils.urls import url_to_path from ._json import msg_to_json @@ -460,6 +459,19 @@ def iter_provided_extras(self) -> Iterable[str]: For modern .dist-info distributions, this is the collection of "Provides-Extra:" entries in distribution metadata. + + The return value of this function is not particularly useful other than + display purposes due to backward compatibility issues and the extra + names being poorly normalized prior to PEP 685. If you want to perform + logic operations on extras, use :func:`is_extra_provided` instead. + """ + raise NotImplementedError() + + def is_extra_provided(self, extra: str) -> bool: + """Check whether an extra is provided by this distribution. + + This is needed mostly for compatibility issues with pkg_resources not + following the extra normalization rules defined in PEP 685. """ raise NotImplementedError() @@ -537,10 +549,11 @@ def _iter_egg_info_extras(self) -> Iterable[str]: """Get extras from the egg-info directory.""" known_extras = {""} for entry in self._iter_requires_txt_entries(): - if entry.extra in known_extras: + extra = canonicalize_name(entry.extra) + if extra in known_extras: continue - known_extras.add(entry.extra) - yield entry.extra + known_extras.add(extra) + yield extra def _iter_egg_info_dependencies(self) -> Iterable[str]: """Get distribution dependencies from the egg-info directory. @@ -556,10 +569,11 @@ def _iter_egg_info_dependencies(self) -> Iterable[str]: all currently available PEP 517 backends, although not standardized. """ for entry in self._iter_requires_txt_entries(): - if entry.extra and entry.marker: - marker = f'({entry.marker}) and extra == "{safe_extra(entry.extra)}"' - elif entry.extra: - marker = f'extra == "{safe_extra(entry.extra)}"' + extra = canonicalize_name(entry.extra) + if extra and entry.marker: + marker = f'({entry.marker}) and extra == "{extra}"' + elif extra: + marker = f'extra == "{extra}"' elif entry.marker: marker = entry.marker else: diff --git a/src/pip/_internal/metadata/importlib/__init__.py b/src/pip/_internal/metadata/importlib/__init__.py index 5e7af9fe521..a779138db10 100644 --- a/src/pip/_internal/metadata/importlib/__init__.py +++ b/src/pip/_internal/metadata/importlib/__init__.py @@ -1,4 +1,6 @@ from ._dists import Distribution from ._envs import Environment -__all__ = ["Distribution", "Environment"] +__all__ = ["NAME", "Distribution", "Environment"] + +NAME = "importlib" diff --git a/src/pip/_internal/metadata/importlib/_dists.py b/src/pip/_internal/metadata/importlib/_dists.py index c43ef8d01f9..26370facf28 100644 --- a/src/pip/_internal/metadata/importlib/_dists.py +++ b/src/pip/_internal/metadata/importlib/_dists.py @@ -207,7 +207,13 @@ def _metadata_impl(self) -> email.message.Message: return cast(email.message.Message, self._dist.metadata) def iter_provided_extras(self) -> Iterable[str]: - return (extra for extra in self.metadata.get_all("Provides-Extra", [])) + return self.metadata.get_all("Provides-Extra", []) + + def is_extra_provided(self, extra: str) -> bool: + return any( + canonicalize_name(provided_extra) == canonicalize_name(extra) + for provided_extra in self.metadata.get_all("Provides-Extra", []) + ) def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras] diff --git a/src/pip/_internal/metadata/pkg_resources.py b/src/pip/_internal/metadata/pkg_resources.py index f330ef12a2c..bb11e5bd8a5 100644 --- a/src/pip/_internal/metadata/pkg_resources.py +++ b/src/pip/_internal/metadata/pkg_resources.py @@ -24,8 +24,12 @@ Wheel, ) +__all__ = ["NAME", "Distribution", "Environment"] + logger = logging.getLogger(__name__) +NAME = "pkg_resources" + class EntryPoint(NamedTuple): name: str @@ -212,12 +216,16 @@ def _metadata_impl(self) -> email.message.Message: def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]: if extras: # pkg_resources raises on invalid extras, so we sanitize. - extras = frozenset(extras).intersection(self._dist.extras) + extras = frozenset(pkg_resources.safe_extra(e) for e in extras) + extras = extras.intersection(self._dist.extras) return self._dist.requires(extras) def iter_provided_extras(self) -> Iterable[str]: return self._dist.extras + def is_extra_provided(self, extra: str) -> bool: + return pkg_resources.safe_extra(extra) in self._dist.extras + class Environment(BaseEnvironment): def __init__(self, ws: pkg_resources.WorkingSet) -> None: diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 84f337d6e5b..f8957e5d994 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -272,7 +272,11 @@ def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> boo extras_requested = ("",) if self.markers is not None: return any( - self.markers.evaluate({"extra": safe_extra(extra)}) + self.markers.evaluate({"extra": extra}) + # TODO: Remove these two variants when packaging is upgraded to + # support the marker comparison logic specified in PEP 685. + or self.markers.evaluate({"extra": safe_extra(extra)}) + or self.markers.evaluate({"extra": canonicalize_name(extra)}) for extra in extras_requested ) else: diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index 13204b9f1a8..67737a5092f 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -435,7 +435,8 @@ def __init__( # since PEP 685 has not been implemented for marker-matching, and using # the non-normalized extra for lookup ensures the user can select a # non-normalized extra in a package with its non-normalized form. - # TODO: Remove this when packaging is upgraded to support PEP 685. + # TODO: Remove this attribute when packaging is upgraded to support the + # marker comparison logic specified in PEP 685. self._unnormalized_extras = extras.difference(self.extras) def __str__(self) -> str: @@ -490,18 +491,20 @@ def source_link(self) -> Optional[Link]: def _warn_invalid_extras( self, requested: FrozenSet[str], - provided: FrozenSet[str], + valid: FrozenSet[str], ) -> None: """Emit warnings for invalid extras being requested. This emits a warning for each requested extra that is not in the candidate's ``Provides-Extra`` list. """ - invalid_extras_to_warn = requested.difference( - provided, + invalid_extras_to_warn = frozenset( + extra + for extra in requested + if extra not in valid # If an extra is requested in an unnormalized form, skip warning # about the normalized form being missing. - (canonicalize_name(e) for e in self._unnormalized_extras), + and extra in self.extras ) if not invalid_extras_to_warn: return @@ -521,9 +524,13 @@ def _calculate_valid_requested_extras(self) -> FrozenSet[str]: cause a warning to be logged here. """ requested_extras = self.extras.union(self._unnormalized_extras) - provided_extras = frozenset(self.base.dist.iter_provided_extras()) - self._warn_invalid_extras(requested_extras, provided_extras) - return requested_extras.intersection(provided_extras) + valid_extras = frozenset( + extra + for extra in requested_extras + if self.base.dist.is_extra_provided(extra) + ) + self._warn_invalid_extras(requested_extras, valid_extras) + return valid_extras def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]: factory = self.base._factory From ce949466c96086a36aefb8ed1106113fca731fa6 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 13 Sep 2023 15:14:07 +0200 Subject: [PATCH 702/730] fixed argument name in docstring --- src/pip/_internal/resolution/resolvelib/candidates.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pip/_internal/resolution/resolvelib/candidates.py b/src/pip/_internal/resolution/resolvelib/candidates.py index d658be37284..bf89a515da4 100644 --- a/src/pip/_internal/resolution/resolvelib/candidates.py +++ b/src/pip/_internal/resolution/resolvelib/candidates.py @@ -431,7 +431,7 @@ def __init__( comes_from: Optional[InstallRequirement] = None, ) -> None: """ - :param ireq: the InstallRequirement that led to this candidate if it + :param comes_from: the InstallRequirement that led to this candidate if it differs from the base's InstallRequirement. This will often be the case in the sense that this candidate's requirement has the extras while the base's does not. Unlike the InstallRequirement backed From 0f543e3c7e05d40e1ecf684cade068fed1c200f9 Mon Sep 17 00:00:00 2001 From: Sander Van Balen Date: Wed, 13 Sep 2023 16:48:16 +0200 Subject: [PATCH 703/730] made assertions more robust --- tests/functional/test_new_resolver.py | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_new_resolver.py b/tests/functional/test_new_resolver.py index 77dede2fc5a..b5945edf89b 100644 --- a/tests/functional/test_new_resolver.py +++ b/tests/functional/test_new_resolver.py @@ -6,6 +6,7 @@ import pytest +from tests.conftest import ScriptFactory from tests.lib import ( PipTestEnvironment, create_basic_sdist_for_package, @@ -13,6 +14,7 @@ create_test_package_with_setup, ) from tests.lib.direct_url import get_created_direct_url +from tests.lib.venv import VirtualEnvironment from tests.lib.wheel import make_wheel if TYPE_CHECKING: @@ -2313,7 +2315,11 @@ def test_new_resolver_dont_backtrack_on_extra_if_base_constrained_in_requirement @pytest.mark.parametrize("swap_order", (True, False)) @pytest.mark.parametrize("two_extras", (True, False)) def test_new_resolver_dont_backtrack_on_conflicting_constraints_on_extras( - script: PipTestEnvironment, swap_order: bool, two_extras: bool + tmpdir: pathlib.Path, + virtualenv: VirtualEnvironment, + script_factory: ScriptFactory, + swap_order: bool, + two_extras: bool, ) -> None: """ Verify that conflicting constraints on the same package with different @@ -2323,6 +2329,11 @@ def test_new_resolver_dont_backtrack_on_conflicting_constraints_on_extras( :param swap_order: swap the order the install specifiers appear in :param two_extras: also add an extra for the second specifier """ + script: PipTestEnvironment = script_factory( + tmpdir.joinpath("workspace"), + virtualenv, + {**os.environ, "PIP_RESOLVER_DEBUG": "1"}, + ) create_basic_wheel_for_package(script, "dep", "1.0") create_basic_wheel_for_package( script, "pkg", "1.0", extras={"ext1": ["dep"], "ext2": ["dep"]} @@ -2348,9 +2359,13 @@ def test_new_resolver_dont_backtrack_on_conflicting_constraints_on_extras( assert ( "pkg-2.0" not in result.stdout or "pkg-1.0" not in result.stdout ), "Should only try one of 1.0, 2.0 depending on order" + assert "Reporter.starting()" in result.stdout, ( + "This should never fail unless the debug reporting format has changed," + " in which case the other assertions in this test need to be reviewed." + ) assert ( - "looking at multiple versions" not in result.stdout - ), "Should not have to look at multiple versions to conclude conflict" + "Reporter.rejecting_candidate" not in result.stdout + ), "Should be able to conclude conflict before even selecting a candidate" assert ( "conflict is caused by" in result.stdout ), "Resolver should be trivially able to find conflict cause" From 3b4738cf9aba08b9fe6dc9a2ac667bff2a2585a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 17 Sep 2023 20:02:47 +0200 Subject: [PATCH 704/730] Fix git version parsing issue --- news/12280.bugfix.rst | 1 + src/pip/_internal/vcs/git.py | 2 +- tests/unit/test_vcs.py | 15 +++++++++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 news/12280.bugfix.rst diff --git a/news/12280.bugfix.rst b/news/12280.bugfix.rst new file mode 100644 index 00000000000..77de283d398 --- /dev/null +++ b/news/12280.bugfix.rst @@ -0,0 +1 @@ +Fix crash when the git version number contains something else than digits and dots. diff --git a/src/pip/_internal/vcs/git.py b/src/pip/_internal/vcs/git.py index 8d1d4993767..8c242cf8956 100644 --- a/src/pip/_internal/vcs/git.py +++ b/src/pip/_internal/vcs/git.py @@ -101,7 +101,7 @@ def get_git_version(self) -> Tuple[int, ...]: if not match: logger.warning("Can't parse git version: %s", version) return () - return tuple(int(c) for c in match.groups()) + return (int(match.group(1)), int(match.group(2))) @classmethod def get_current_branch(cls, location: str) -> Optional[str]: diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index 3ecc69abfcb..fb6c3ea31ce 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -598,6 +598,21 @@ def test_get_git_version() -> None: assert git_version >= (1, 0, 0) +@pytest.mark.parametrize( + ("version", "expected"), + [ + ("git version 2.17", (2, 17)), + ("git version 2.18.1", (2, 18)), + ("git version 2.35.GIT", (2, 35)), # gh:12280 + ("oh my git version 2.37.GIT", ()), # invalid version + ("git version 2.GIT", ()), # invalid version + ], +) +def test_get_git_version_parser(version: str, expected: Tuple[int, int]) -> None: + with mock.patch("pip._internal.vcs.git.Git.run_command", return_value=version): + assert Git().get_git_version() == expected + + @pytest.mark.parametrize( "use_interactive,is_atty,expected", [ From 184e4826269da5caa73c8f4114fb00cd1678c075 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Wed, 20 Sep 2023 18:48:52 -0400 Subject: [PATCH 705/730] Clarify --prefer-binary --- src/pip/_internal/cli/cmdoptions.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 64bc59bbd66..84b40a8fc52 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -670,7 +670,10 @@ def prefer_binary() -> Option: dest="prefer_binary", action="store_true", default=False, - help="Prefer older binary packages over newer source packages.", + help=( + "Prefer binary packages over source packages, even if the " + "source packages are newer." + ), ) From 677c3eed9fd6c150d9ea3781442da781f1d65f2e Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Wed, 20 Sep 2023 18:53:33 -0400 Subject: [PATCH 706/730] Add news --- news/12122.doc.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12122.doc.rst diff --git a/news/12122.doc.rst b/news/12122.doc.rst new file mode 100644 index 00000000000..49a3308a25c --- /dev/null +++ b/news/12122.doc.rst @@ -0,0 +1 @@ +Clarify --prefer-binary in CLI and docs From 3d6b0be901d5e8296b3d1303c70cbc38600f9cd6 Mon Sep 17 00:00:00 2001 From: Lukas Geiger Date: Thu, 21 Sep 2023 01:00:34 +0100 Subject: [PATCH 707/730] Remove outdated noqa comments --- news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst | 0 pyproject.toml | 1 + src/pip/_internal/cli/cmdoptions.py | 6 +++--- src/pip/_internal/cli/parser.py | 4 ++-- src/pip/_internal/commands/debug.py | 4 +--- src/pip/_internal/commands/install.py | 2 +- tests/functional/test_install_compat.py | 2 +- tests/functional/test_wheel.py | 2 +- 8 files changed, 10 insertions(+), 11 deletions(-) create mode 100644 news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst diff --git a/news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst b/news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/pyproject.toml b/pyproject.toml index 7a4fe62463f..b720c460297 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,6 +101,7 @@ select = [ "PLE", "PLR0", "W", + "RUF100", ] [tool.ruff.isort] diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index 84b40a8fc52..8fb16dc4a6a 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -826,7 +826,7 @@ def _handle_config_settings( ) -> None: key, sep, val = value.partition("=") if sep != "=": - parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa + parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") dest = getattr(parser.values, option.dest) if dest is None: dest = {} @@ -921,13 +921,13 @@ def _handle_merge_hash( algo, digest = value.split(":", 1) except ValueError: parser.error( - "Arguments to {} must be a hash name " # noqa + "Arguments to {} must be a hash name " "followed by a value, like --hash=sha256:" "abcde...".format(opt_str) ) if algo not in STRONG_HASHES: parser.error( - "Allowed hash algorithms for {} are {}.".format( # noqa + "Allowed hash algorithms for {} are {}.".format( opt_str, ", ".join(STRONG_HASHES) ) ) diff --git a/src/pip/_internal/cli/parser.py b/src/pip/_internal/cli/parser.py index c762cf2781d..64cf9719730 100644 --- a/src/pip/_internal/cli/parser.py +++ b/src/pip/_internal/cli/parser.py @@ -229,7 +229,7 @@ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: val = strtobool(val) except ValueError: self.error( - "{} is not a valid value for {} option, " # noqa + "{} is not a valid value for {} option, " "please specify a boolean value like yes/no, " "true/false or 1/0 instead.".format(val, key) ) @@ -240,7 +240,7 @@ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]: val = int(val) if not isinstance(val, int) or val < 0: self.error( - "{} is not a valid value for {} option, " # noqa + "{} is not a valid value for {} option, " "please instead specify either a non-negative integer " "or a boolean value like yes/no or false/true " "which is equivalent to 1/0.".format(val, key) diff --git a/src/pip/_internal/commands/debug.py b/src/pip/_internal/commands/debug.py index 1b1fd3ea5cc..a29c625e81a 100644 --- a/src/pip/_internal/commands/debug.py +++ b/src/pip/_internal/commands/debug.py @@ -134,9 +134,7 @@ def show_tags(options: Values) -> None: def ca_bundle_info(config: Configuration) -> str: - # Ruff misidentifies config as a dict. - # Configuration does not have support the mapping interface. - levels = {key.split(".", 1)[0] for key, _ in config.items()} # noqa: PERF102 + levels = {key.split(".", 1)[0] for key, _ in config.items()} if not levels: return "Not specified" diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index f6a300804f4..d88cafe5a4b 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -501,7 +501,7 @@ def run(self, options: Values, args: List[str]) -> int: show_traceback, options.use_user_site, ) - logger.error(message, exc_info=show_traceback) # noqa + logger.error(message, exc_info=show_traceback) return ERROR diff --git a/tests/functional/test_install_compat.py b/tests/functional/test_install_compat.py index 8374d487b1f..6c809e75307 100644 --- a/tests/functional/test_install_compat.py +++ b/tests/functional/test_install_compat.py @@ -11,7 +11,7 @@ PipTestEnvironment, TestData, assert_all_changes, - pyversion, # noqa: F401 + pyversion, ) diff --git a/tests/functional/test_wheel.py b/tests/functional/test_wheel.py index c0e27949256..042f5824613 100644 --- a/tests/functional/test_wheel.py +++ b/tests/functional/test_wheel.py @@ -10,7 +10,7 @@ from tests.lib import ( PipTestEnvironment, TestData, - pyversion, # noqa: F401 + pyversion, ) From eddd9ddb66e6a3b74d7d2f3187fd246c955e00b7 Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Sat, 23 Sep 2023 09:10:13 -0700 Subject: [PATCH 708/730] Enable mypy's strict equality checks (#12209) This makes mypy check more behaviours within the codebase. Co-authored-by: Pradyun Gedam --- news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst | 0 setup.cfg | 11 +++++++---- src/pip/_internal/utils/temp_dir.py | 4 ++-- tests/functional/test_list.py | 3 +-- tests/lib/__init__.py | 4 +++- tests/unit/test_network_auth.py | 2 +- tests/unit/test_options.py | 4 ++-- tests/unit/test_target_python.py | 10 +++++----- 8 files changed, 21 insertions(+), 17 deletions(-) create mode 100644 news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst diff --git a/news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst b/news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/setup.cfg b/setup.cfg index 2e35be30dd6..08a1795eb93 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,11 +36,14 @@ per-file-ignores = [mypy] mypy_path = $MYPY_CONFIG_FILE_DIR/src + +strict = True + +no_implicit_reexport = False +allow_subclassing_any = True +allow_untyped_calls = True +warn_return_any = False ignore_missing_imports = True -disallow_untyped_defs = True -disallow_any_generics = True -warn_unused_ignores = True -no_implicit_optional = True [mypy-pip._internal.utils._jaraco_text] ignore_errors = True diff --git a/src/pip/_internal/utils/temp_dir.py b/src/pip/_internal/utils/temp_dir.py index 38c5f7c7c94..4eec5f37f76 100644 --- a/src/pip/_internal/utils/temp_dir.py +++ b/src/pip/_internal/utils/temp_dir.py @@ -6,9 +6,9 @@ import traceback from contextlib import ExitStack, contextmanager from pathlib import Path -from types import FunctionType from typing import ( Any, + Callable, Dict, Generator, List, @@ -187,7 +187,7 @@ def cleanup(self) -> None: errors: List[BaseException] = [] def onerror( - func: FunctionType, + func: Callable[..., Any], path: Path, exc_val: BaseException, ) -> None: diff --git a/tests/functional/test_list.py b/tests/functional/test_list.py index cf8900a32bd..03dce41e740 100644 --- a/tests/functional/test_list.py +++ b/tests/functional/test_list.py @@ -595,8 +595,7 @@ def test_outdated_formats(script: PipTestEnvironment, data: TestData) -> None: "--outdated", "--format=json", ) - data = json.loads(result.stdout) - assert data == [ + assert json.loads(result.stdout) == [ { "name": "simple", "version": "1.0", diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index a48423570c4..2e8b239ac91 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -46,7 +46,9 @@ # Literal was introduced in Python 3.8. from typing import Literal - ResolverVariant = Literal["resolvelib", "legacy"] + ResolverVariant = Literal[ + "resolvelib", "legacy", "2020-resolver", "legacy-resolver" + ] else: ResolverVariant = str diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py index e3cb772bb05..5bd85f8cd95 100644 --- a/tests/unit/test_network_auth.py +++ b/tests/unit/test_network_auth.py @@ -352,7 +352,7 @@ def get_credential(self, system: str, username: str) -> Optional[Credential]: ), ) def test_keyring_get_credential( - monkeypatch: pytest.MonkeyPatch, url: str, expect: str + monkeypatch: pytest.MonkeyPatch, url: str, expect: Tuple[str, str] ) -> None: monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) auth = MultiDomainBasicAuth( diff --git a/tests/unit/test_options.py b/tests/unit/test_options.py index 43d5fdd3d75..22ff7f721d7 100644 --- a/tests/unit/test_options.py +++ b/tests/unit/test_options.py @@ -2,7 +2,7 @@ from contextlib import contextmanager from optparse import Values from tempfile import NamedTemporaryFile -from typing import Any, Dict, Iterator, List, Tuple, Union, cast +from typing import Any, Dict, Iterator, List, Tuple, Type, Union, cast import pytest @@ -605,7 +605,7 @@ def test_config_file_options( self, monkeypatch: pytest.MonkeyPatch, args: List[str], - expect: Union[None, str, PipError], + expect: Union[None, str, Type[PipError]], ) -> None: cmd = cast(ConfigurationCommand, create_command("config")) # Replace a handler with a no-op to avoid side effects diff --git a/tests/unit/test_target_python.py b/tests/unit/test_target_python.py index bc171376941..31df5935ee3 100644 --- a/tests/unit/test_target_python.py +++ b/tests/unit/test_target_python.py @@ -99,17 +99,17 @@ def test_get_sorted_tags( py_version_info: Optional[Tuple[int, ...]], expected_version: Optional[str], ) -> None: - mock_get_supported.return_value = ["tag-1", "tag-2"] + dummy_tags = [Tag("py4", "none", "any"), Tag("py5", "none", "any")] + mock_get_supported.return_value = dummy_tags target_python = TargetPython(py_version_info=py_version_info) actual = target_python.get_sorted_tags() - assert actual == ["tag-1", "tag-2"] + assert actual == dummy_tags - actual = mock_get_supported.call_args[1]["version"] - assert actual == expected_version + assert mock_get_supported.call_args[1]["version"] == expected_version # Check that the value was cached. - assert target_python._valid_tags == ["tag-1", "tag-2"] + assert target_python._valid_tags == dummy_tags def test_get_unsorted_tags__uses_cached_value(self) -> None: """ From 666be3544b3a4f663f77399e5f82af55e72dbaae Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Sat, 23 Sep 2023 13:33:18 -0700 Subject: [PATCH 709/730] Avoid use of 2020-resolver and legacy-resolver --- ...69-21F3-49F6-B938-AB16E326F82C.trivial.rst | 0 src/pip/_internal/cli/req_command.py | 6 +-- src/pip/_internal/commands/install.py | 4 +- tests/conftest.py | 4 +- tests/functional/test_freeze.py | 2 +- tests/functional/test_install.py | 8 ++-- tests/functional/test_install_extras.py | 2 +- tests/functional/test_install_reqs.py | 38 ++++++++----------- tests/functional/test_install_upgrade.py | 2 +- tests/lib/__init__.py | 4 +- tests/unit/test_req_file.py | 4 +- 11 files changed, 32 insertions(+), 42 deletions(-) create mode 100644 news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst diff --git a/news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst b/news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_internal/cli/req_command.py b/src/pip/_internal/cli/req_command.py index 86070f10c14..96d8efaf585 100644 --- a/src/pip/_internal/cli/req_command.py +++ b/src/pip/_internal/cli/req_command.py @@ -268,7 +268,7 @@ def determine_resolver_variant(options: Values) -> str: if "legacy-resolver" in options.deprecated_features_enabled: return "legacy" - return "2020-resolver" + return "resolvelib" @classmethod def make_requirement_preparer( @@ -290,7 +290,7 @@ def make_requirement_preparer( legacy_resolver = False resolver_variant = cls.determine_resolver_variant(options) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": lazy_wheel = "fast-deps" in options.features_enabled if lazy_wheel: logger.warning( @@ -352,7 +352,7 @@ def make_resolver( # The long import name and duplicated invocation is needed to convince # Mypy into correctly typechecking. Otherwise it would complain the # "Resolver" class being redefined. - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": import pip._internal.resolution.resolvelib.resolver return pip._internal.resolution.resolvelib.resolver.Resolver( diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index f6a300804f4..d53bbd059c3 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -595,7 +595,7 @@ def _warn_about_conflicts( "source of the following dependency conflicts." ) else: - assert resolver_variant == "2020-resolver" + assert resolver_variant == "resolvelib" parts.append( "pip's dependency resolver does not currently take into account " "all the packages that are installed. This behaviour is the " @@ -628,7 +628,7 @@ def _warn_about_conflicts( requirement=req, dep_name=dep_name, dep_version=dep_version, - you=("you" if resolver_variant == "2020-resolver" else "you'll"), + you=("you" if resolver_variant == "resolvelib" else "you'll"), ) parts.append(message) diff --git a/tests/conftest.py b/tests/conftest.py index cd9931c66d9..07cd468c15d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -76,8 +76,8 @@ def pytest_addoption(parser: Parser) -> None: parser.addoption( "--resolver", action="store", - default="2020-resolver", - choices=["2020-resolver", "legacy"], + default="resolvelib", + choices=["resolvelib", "legacy"], help="use given resolver in tests", ) parser.addoption( diff --git a/tests/functional/test_freeze.py b/tests/functional/test_freeze.py index d6122308a69..9a5937df3de 100644 --- a/tests/functional/test_freeze.py +++ b/tests/functional/test_freeze.py @@ -629,7 +629,7 @@ def test_freeze_nested_vcs( --extra-index-url http://ignore --find-links http://ignore --index-url http://ignore - --use-feature 2020-resolver + --use-feature resolvelib """ ) diff --git a/tests/functional/test_install.py b/tests/functional/test_install.py index bf051294338..485710eaa85 100644 --- a/tests/functional/test_install.py +++ b/tests/functional/test_install.py @@ -1209,9 +1209,9 @@ def test_install_nonlocal_compatible_wheel_path( "--no-index", "--only-binary=:all:", Path(data.packages) / "simplewheel-2.0-py3-fakeabi-fakeplat.whl", - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert result.returncode == ERROR else: assert result.returncode == SUCCESS @@ -1825,14 +1825,14 @@ def test_install_editable_with_wrong_egg_name( "install", "--editable", f"file://{pkga_path}#egg=pkgb", - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) assert ( "Generating metadata for package pkgb produced metadata " "for project name pkga. Fix your #egg=pkgb " "fragments." ) in result.stderr - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "has inconsistent" in result.stdout, str(result) else: assert "Successfully installed pkga" in str(result), str(result) diff --git a/tests/functional/test_install_extras.py b/tests/functional/test_install_extras.py index 8ccbcf1998d..1dd67be0a0c 100644 --- a/tests/functional/test_install_extras.py +++ b/tests/functional/test_install_extras.py @@ -242,7 +242,7 @@ def test_install_extra_merging( expect_error=(fails_on_legacy and resolver_variant == "legacy"), ) - if not fails_on_legacy or resolver_variant == "2020-resolver": + if not fails_on_legacy or resolver_variant == "resolvelib": expected = f"Successfully installed pkga-0.1 simple-{simple_version}" assert expected in result.stdout diff --git a/tests/functional/test_install_reqs.py b/tests/functional/test_install_reqs.py index 96cff0dc5da..c21b9ba83de 100644 --- a/tests/functional/test_install_reqs.py +++ b/tests/functional/test_install_reqs.py @@ -392,11 +392,8 @@ def test_constraints_local_editable_install_causes_error( to_install, expect_error=True, ) - if resolver_variant == "legacy-resolver": - assert "Could not satisfy constraints" in result.stderr, str(result) - else: - # Because singlemodule only has 0.0.1 available. - assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) + # Because singlemodule only has 0.0.1 available. + assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) @pytest.mark.network @@ -426,11 +423,8 @@ def test_constraints_local_install_causes_error( to_install, expect_error=True, ) - if resolver_variant == "legacy-resolver": - assert "Could not satisfy constraints" in result.stderr, str(result) - else: - # Because singlemodule only has 0.0.1 available. - assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) + # Because singlemodule only has 0.0.1 available. + assert "Cannot install singlemodule 0.0.1" in result.stderr, str(result) def test_constraints_constrain_to_local_editable( @@ -451,9 +445,9 @@ def test_constraints_constrain_to_local_editable( script.scratch_path / "constraints.txt", "singlemodule", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Editable requirements are not allowed as constraints" in result.stderr else: assert "Running setup.py develop for singlemodule" in result.stdout @@ -551,9 +545,9 @@ def test_install_with_extras_from_constraints( script.scratch_path / "constraints.txt", "LocalExtras", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Constraints cannot have extras" in result.stderr else: result.did_create(script.site_packages / "simple") @@ -589,9 +583,9 @@ def test_install_with_extras_joined( script.scratch_path / "constraints.txt", "LocalExtras[baz]", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Constraints cannot have extras" in result.stderr else: result.did_create(script.site_packages / "simple") @@ -610,9 +604,9 @@ def test_install_with_extras_editable_joined( script.scratch_path / "constraints.txt", "LocalExtras[baz]", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Editable requirements are not allowed as constraints" in result.stderr else: result.did_create(script.site_packages / "simple") @@ -654,9 +648,9 @@ def test_install_distribution_union_with_constraints( script.scratch_path / "constraints.txt", f"{to_install}[baz]", allow_stderr_warning=True, - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": msg = "Unnamed requirements are not allowed as constraints" assert msg in result.stderr else: @@ -674,9 +668,9 @@ def test_install_distribution_union_with_versions( result = script.pip_install_local( f"{to_install_001}[bar]", f"{to_install_002}[baz]", - expect_error=(resolver_variant == "2020-resolver"), + expect_error=(resolver_variant == "resolvelib"), ) - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": assert "Cannot install localextras[bar]" in result.stderr assert ("localextras[bar] 0.0.1 depends on localextras 0.0.1") in result.stdout assert ("localextras[baz] 0.0.2 depends on localextras 0.0.2") in result.stdout diff --git a/tests/functional/test_install_upgrade.py b/tests/functional/test_install_upgrade.py index 09c01d7eb18..6556fcdf599 100644 --- a/tests/functional/test_install_upgrade.py +++ b/tests/functional/test_install_upgrade.py @@ -172,7 +172,7 @@ def test_upgrade_with_newest_already_installed( "install", "--upgrade", "-f", data.find_links, "--no-index", "simple" ) assert not result.files_created, "simple upgraded when it should not have" - if resolver_variant == "2020-resolver": + if resolver_variant == "resolvelib": msg = "Requirement already satisfied" else: msg = "already up-to-date" diff --git a/tests/lib/__init__.py b/tests/lib/__init__.py index 2e8b239ac91..a48423570c4 100644 --- a/tests/lib/__init__.py +++ b/tests/lib/__init__.py @@ -46,9 +46,7 @@ # Literal was introduced in Python 3.8. from typing import Literal - ResolverVariant = Literal[ - "resolvelib", "legacy", "2020-resolver", "legacy-resolver" - ] + ResolverVariant = Literal["resolvelib", "legacy"] else: ResolverVariant = str diff --git a/tests/unit/test_req_file.py b/tests/unit/test_req_file.py index 439c41563b7..7a196eb8dd6 100644 --- a/tests/unit/test_req_file.py +++ b/tests/unit/test_req_file.py @@ -471,9 +471,7 @@ def test_use_feature_with_error( ) -> None: """--use-feature triggers error when parsing requirements files.""" with pytest.raises(RequirementsFileParseError): - line_processor( - "--use-feature=2020-resolver", "filename", 1, options=options - ) + line_processor("--use-feature=resolvelib", "filename", 1, options=options) def test_relative_local_find_links( self, From ac19f79049dea400ae1c3ddd7937c1a3ba90f507 Mon Sep 17 00:00:00 2001 From: hauntsaninja Date: Sat, 23 Sep 2023 17:47:57 -0700 Subject: [PATCH 710/730] Follow imports for more vendored dependencies This will allow mypy to notice if you e.g. try to call a colorama function that does not exist. Note we won't report any errors in vendored code due to the ignore_errors config above. It would also be quite easy to let mypy look at pkg_resources code, but this would involve the addition of like three type ignores. --- news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst | 0 setup.cfg | 4 ---- 2 files changed, 4 deletions(-) create mode 100644 news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst diff --git a/news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst b/news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d diff --git a/setup.cfg b/setup.cfg index 08a1795eb93..b87fec7ef73 100644 --- a/setup.cfg +++ b/setup.cfg @@ -54,12 +54,8 @@ ignore_errors = True # These vendored libraries use runtime magic to populate things and don't sit # well with static typing out of the box. Eventually we should provide correct # typing information for their public interface and remove these configs. -[mypy-pip._vendor.colorama] -follow_imports = skip [mypy-pip._vendor.pkg_resources] follow_imports = skip -[mypy-pip._vendor.progress.*] -follow_imports = skip [mypy-pip._vendor.requests.*] follow_imports = skip From 64d2dc3253e4a81e437931fb9b30d636556461d1 Mon Sep 17 00:00:00 2001 From: Itamar Turner-Trauring Date: Tue, 26 Sep 2023 10:28:27 -0400 Subject: [PATCH 711/730] Fix lints --- src/pip/_internal/commands/cache.py | 8 ++++---- tests/functional/test_cache.py | 5 ++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/pip/_internal/commands/cache.py b/src/pip/_internal/commands/cache.py index 32d1a221d1e..1f3b5fe142b 100644 --- a/src/pip/_internal/commands/cache.py +++ b/src/pip/_internal/commands/cache.py @@ -96,9 +96,9 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: http_cache_location = self._cache_dir(options, "http-v2") old_http_cache_location = self._cache_dir(options, "http") wheels_cache_location = self._cache_dir(options, "wheels") - http_cache_size = ( - filesystem.format_size(filesystem.directory_size(http_cache_location) + - filesystem.directory_size(old_http_cache_location)) + http_cache_size = filesystem.format_size( + filesystem.directory_size(http_cache_location) + + filesystem.directory_size(old_http_cache_location) ) wheels_cache_size = filesystem.format_directory_size(wheels_cache_location) @@ -112,7 +112,7 @@ def get_cache_info(self, options: Values, args: List[Any]) -> None: Locally built wheels location: {wheels_cache_location} Locally built wheels size: {wheels_cache_size} Number of locally built wheels: {package_count} - """ + """ # noqa: E501 ) .format( http_cache_location=http_cache_location, diff --git a/tests/functional/test_cache.py b/tests/functional/test_cache.py index c5d910d453f..a744dbbb9bc 100644 --- a/tests/functional/test_cache.py +++ b/tests/functional/test_cache.py @@ -203,7 +203,10 @@ def test_cache_info( ) -> None: result = script.pip("cache", "info") - assert f"Package index page cache location (pip v23.3+): {http_cache_dir}" in result.stdout + assert ( + f"Package index page cache location (pip v23.3+): {http_cache_dir}" + in result.stdout + ) assert f"Locally built wheels location: {wheel_cache_dir}" in result.stdout num_wheels = len(wheel_cache_files) assert f"Number of locally built wheels: {num_wheels}" in result.stdout From 9692d48822187d3b0107cc4b1333d74cf3374222 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 1 Oct 2023 11:51:37 +0200 Subject: [PATCH 712/730] Drop isort and flake8 settings from setup.cfg Since we use ruff, these are not used anymore. --- setup.cfg | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/setup.cfg b/setup.cfg index b87fec7ef73..0be3ef08b82 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,39 +1,3 @@ -[isort] -profile = black -skip = - ./build, - .nox, - .tox, - .scratch, - _vendor, - data -known_third_party = - pip._vendor - -[flake8] -max-line-length = 88 -exclude = - ./build, - .nox, - .tox, - .scratch, - _vendor, - data -enable-extensions = G -extend-ignore = - G200, G202, - # black adds spaces around ':' - E203, - # using a cache - B019, - # reassigning variables in a loop - B020, -per-file-ignores = - # G: The plugin logging-format treats every .log and .error as logging. - noxfile.py: G - # B011: Do not call assert False since python -O removes these calls - tests/*: B011 - [mypy] mypy_path = $MYPY_CONFIG_FILE_DIR/src From 3f6e81694f8cce0866d934e1deedbdccdd0deb6b Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 1 Oct 2023 12:22:41 +0100 Subject: [PATCH 713/730] Rework how the logging stack handles rich objects This makes it possible to render content via rich without a magic string and relies on a proper mechanism supported by the logging stack. --- src/pip/_internal/cli/base_command.py | 2 +- src/pip/_internal/self_outdated_check.py | 2 +- src/pip/_internal/utils/logging.py | 4 ++-- src/pip/_internal/utils/subprocess.py | 2 +- tests/unit/test_utils_subprocess.py | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 6a3b8e6c213..db9d5cc6624 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -181,7 +181,7 @@ def exc_logging_wrapper(*args: Any) -> int: assert isinstance(status, int) return status except DiagnosticPipError as exc: - logger.error("[present-rich] %s", exc) + logger.error("%s", exc, extra={"rich": True}) logger.debug("Exception information:", exc_info=True) return ERROR diff --git a/src/pip/_internal/self_outdated_check.py b/src/pip/_internal/self_outdated_check.py index eefbc498b3f..cb18edbed8e 100644 --- a/src/pip/_internal/self_outdated_check.py +++ b/src/pip/_internal/self_outdated_check.py @@ -233,7 +233,7 @@ def pip_self_version_check(session: PipSession, options: optparse.Values) -> Non ), ) if upgrade_prompt is not None: - logger.warning("[present-rich] %s", upgrade_prompt) + logger.warning("%s", upgrade_prompt, extra={"rich": True}) except Exception: logger.warning("There was an error checking the latest version of pip.") logger.debug("See below for error", exc_info=True) diff --git a/src/pip/_internal/utils/logging.py b/src/pip/_internal/utils/logging.py index c10e1f4ced6..95982dfb691 100644 --- a/src/pip/_internal/utils/logging.py +++ b/src/pip/_internal/utils/logging.py @@ -155,8 +155,8 @@ def emit(self, record: logging.LogRecord) -> None: # If we are given a diagnostic error to present, present it with indentation. assert isinstance(record.args, tuple) - if record.msg == "[present-rich] %s" and len(record.args) == 1: - rich_renderable = record.args[0] + if getattr(record, "rich", False): + (rich_renderable,) = record.args assert isinstance( rich_renderable, (ConsoleRenderable, RichCast, str) ), f"{rich_renderable} is not rich-console-renderable" diff --git a/src/pip/_internal/utils/subprocess.py b/src/pip/_internal/utils/subprocess.py index 1e8ff50edfb..79580b05320 100644 --- a/src/pip/_internal/utils/subprocess.py +++ b/src/pip/_internal/utils/subprocess.py @@ -209,7 +209,7 @@ def call_subprocess( output_lines=all_output if not showing_subprocess else None, ) if log_failed_cmd: - subprocess_logger.error("[present-rich] %s", error) + subprocess_logger.error("%s", error, extra={"rich": True}) subprocess_logger.verbose( "[bold magenta]full command[/]: [blue]%s[/]", escape(format_command_args(cmd)), diff --git a/tests/unit/test_utils_subprocess.py b/tests/unit/test_utils_subprocess.py index a694b717fcb..2dbd5d77e4b 100644 --- a/tests/unit/test_utils_subprocess.py +++ b/tests/unit/test_utils_subprocess.py @@ -260,9 +260,9 @@ def test_info_logging__subprocess_error( expected = ( None, [ - # pytest's caplog overrides th formatter, which means that we + # pytest's caplog overrides the formatter, which means that we # won't see the message formatted through our formatters. - ("pip.subprocessor", ERROR, "[present-rich]"), + ("pip.subprocessor", ERROR, "subprocess error exited with 1"), ], ) # The spinner should spin three times in this case since the From ccc4bbcdfd06b1903af3b4cf6bf845be3b3c8b88 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 1 Oct 2023 15:05:20 +0200 Subject: [PATCH 714/730] Postpone some deprecation removals --- src/pip/_internal/req/req_install.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index f8957e5d994..dd8a0db2792 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -514,7 +514,7 @@ def load_pyproject_toml(self) -> None: "to use --use-pep517 or add a " "pyproject.toml file to the project" ), - gone_in="23.3", + gone_in="24.0", ) self.use_pep517 = False return @@ -904,7 +904,7 @@ def check_legacy_setup_py_options( reason="--build-option and --global-option are deprecated.", issue=11859, replacement="to use --config-settings", - gone_in="23.3", + gone_in="24.0", ) logger.warning( "Implying --no-binary=:all: due to the presence of " From 389cb799d0da9a840749fcd14878928467ed49b4 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 1 Oct 2023 14:10:25 +0100 Subject: [PATCH 715/730] Use `-r=...` instead of `-r ...` for hg This ensures that the resulting revision can not be misinterpreted as an option. --- src/pip/_internal/vcs/mercurial.py | 2 +- tests/unit/test_vcs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/vcs/mercurial.py b/src/pip/_internal/vcs/mercurial.py index 4595960b5bf..e440c122169 100644 --- a/src/pip/_internal/vcs/mercurial.py +++ b/src/pip/_internal/vcs/mercurial.py @@ -31,7 +31,7 @@ class Mercurial(VersionControl): @staticmethod def get_base_rev_args(rev: str) -> List[str]: - return ["-r", rev] + return [f"-r={rev}"] def fetch_new( self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int diff --git a/tests/unit/test_vcs.py b/tests/unit/test_vcs.py index fb6c3ea31ce..4a3750f2d36 100644 --- a/tests/unit/test_vcs.py +++ b/tests/unit/test_vcs.py @@ -66,7 +66,7 @@ def test_rev_options_repr() -> None: # First check VCS-specific RevOptions behavior. (Bazaar, [], ["-r", "123"], {}), (Git, ["HEAD"], ["123"], {}), - (Mercurial, [], ["-r", "123"], {}), + (Mercurial, [], ["-r=123"], {}), (Subversion, [], ["-r", "123"], {}), # Test extra_args. For this, test using a single VersionControl class. ( From 408b5248dc8934af50811190cb7df913116031b0 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Sun, 1 Oct 2023 13:49:06 +0100 Subject: [PATCH 716/730] :newspaper: --- news/12306.bugfix.rst | 1 + 1 file changed, 1 insertion(+) create mode 100644 news/12306.bugfix.rst diff --git a/news/12306.bugfix.rst b/news/12306.bugfix.rst new file mode 100644 index 00000000000..eb6eecaaf1b --- /dev/null +++ b/news/12306.bugfix.rst @@ -0,0 +1 @@ +Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial. From dcb9dc03698b8c59451b98d236ae44b4959c0343 Mon Sep 17 00:00:00 2001 From: Christian Clauss Date: Tue, 3 Oct 2023 09:01:40 +0200 Subject: [PATCH 717/730] Wrap long lines --- .pre-commit-config.yaml | 3 +-- tests/conftest.py | 5 ++++- tests/unit/test_collector.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c8d81deed7a..2c576d90a5b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,8 +22,7 @@ repos: - id: black - repo: https://github.com/astral-sh/ruff-pre-commit - # Ruff version. - rev: v0.0.287 + rev: v0.0.292 hooks: - id: ruff diff --git a/tests/conftest.py b/tests/conftest.py index 07cd468c15d..62191f99c56 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1031,7 +1031,10 @@ def html_index_with_onetime_server( class InDirectoryServer(http.server.ThreadingHTTPServer): def finish_request(self, request: Any, client_address: Any) -> None: self.RequestHandlerClass( - request, client_address, self, directory=str(html_index_for_packages) # type: ignore[call-arg] # noqa: E501 + request, + client_address, + self, + directory=str(html_index_for_packages), # type: ignore[call-arg] ) class Handler(OneTimeDownloadHandler): diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py index 5410a4afc03..3c8b81de44d 100644 --- a/tests/unit/test_collector.py +++ b/tests/unit/test_collector.py @@ -625,7 +625,7 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) - ), # Test with a provided hash value. ( - '', # noqa: E501 + '', MetadataFile({"sha256": "aa113592bbe"}), {}, ), From ac962890b513253376f543febbe189a1aca26ef9 Mon Sep 17 00:00:00 2001 From: Kurt McKee Date: Wed, 24 May 2023 20:43:20 -0500 Subject: [PATCH 718/730] Add a dependabot config to update CI actions monthly --- .github/dependabot.yml | 6 ++++++ news/d7179b28-bc23-46aa-9175-834117a42dbd.trivial.rst | 0 2 files changed, 6 insertions(+) create mode 100644 .github/dependabot.yml create mode 100644 news/d7179b28-bc23-46aa-9175-834117a42dbd.trivial.rst diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000000..8ac6b8c4984 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" diff --git a/news/d7179b28-bc23-46aa-9175-834117a42dbd.trivial.rst b/news/d7179b28-bc23-46aa-9175-834117a42dbd.trivial.rst new file mode 100644 index 00000000000..e69de29bb2d From dba399fe6a41615d0a59899f7ac6dfdd26a42731 Mon Sep 17 00:00:00 2001 From: Wu Zhenyu Date: Sat, 22 Jul 2023 15:31:12 +0800 Subject: [PATCH 719/730] Fix #12166 - tests expected results indendation was off - add bugfix news entry --- news/12166.bugfix.rst | 1 + src/pip/_internal/commands/completion.py | 15 ++++++++++++--- tests/functional/test_completion.py | 15 ++++++++++++--- 3 files changed, 25 insertions(+), 6 deletions(-) create mode 100644 news/12166.bugfix.rst diff --git a/news/12166.bugfix.rst b/news/12166.bugfix.rst new file mode 100644 index 00000000000..491597c7f1a --- /dev/null +++ b/news/12166.bugfix.rst @@ -0,0 +1 @@ +Fix completion script for zsh diff --git a/src/pip/_internal/commands/completion.py b/src/pip/_internal/commands/completion.py index 30233fc7ad2..9e89e279883 100644 --- a/src/pip/_internal/commands/completion.py +++ b/src/pip/_internal/commands/completion.py @@ -23,9 +23,18 @@ """, "zsh": """ #compdef -P pip[0-9.]# - compadd $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$((CURRENT-1)) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) + __pip() {{ + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) + }} + if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + __pip "$@" + else + # eval/source/. command, register function for later + compdef __pip -P 'pip[0-9.]#' + fi """, "fish": """ function __fish_complete_pip diff --git a/tests/functional/test_completion.py b/tests/functional/test_completion.py index 2e3f31729d7..2aa861aacb7 100644 --- a/tests/functional/test_completion.py +++ b/tests/functional/test_completion.py @@ -44,9 +44,18 @@ "zsh", """\ #compdef -P pip[0-9.]# -compadd $( COMP_WORDS="$words[*]" \\ - COMP_CWORD=$((CURRENT-1)) \\ - PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )""", +__pip() { + compadd $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$((CURRENT-1)) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ) +} +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + __pip "$@" +else + # eval/source/. command, register function for later + compdef __pip -P 'pip[0-9.]#' +fi""", ), ( "powershell", From 431cf5af82f43431b75fa495b114c1177ab97f8d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Oct 2023 19:00:48 +0000 Subject: [PATCH 720/730] Bump dessant/lock-threads from 3 to 4 Bumps [dessant/lock-threads](https://github.com/dessant/lock-threads) from 3 to 4. - [Release notes](https://github.com/dessant/lock-threads/releases) - [Changelog](https://github.com/dessant/lock-threads/blob/main/CHANGELOG.md) - [Commits](https://github.com/dessant/lock-threads/compare/v3...v4) --- updated-dependencies: - dependency-name: dessant/lock-threads dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/lock-threads.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lock-threads.yml b/.github/workflows/lock-threads.yml index 990440dd6c8..dc68b683bef 100644 --- a/.github/workflows/lock-threads.yml +++ b/.github/workflows/lock-threads.yml @@ -17,7 +17,7 @@ jobs: if: github.repository_owner == 'pypa' runs-on: ubuntu-latest steps: - - uses: dessant/lock-threads@v3 + - uses: dessant/lock-threads@v4 with: issue-inactive-days: '30' pr-inactive-days: '15' From 0042cc94cc6d9b74307343107b3812fb9c56fda8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Oct 2023 19:00:51 +0000 Subject: [PATCH 721/730] Bump actions/checkout from 3 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 12 ++++++------ .github/workflows/news-file.yml | 2 +- .github/workflows/update-rtd-redirects.yml | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 41d3ab9463a..5f7cd942bf4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: "3.x" @@ -57,7 +57,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: "3.x" @@ -81,7 +81,7 @@ jobs: github.event_name != 'pull_request' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: "3.x" @@ -112,7 +112,7 @@ jobs: - "3.12" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} @@ -164,7 +164,7 @@ jobs: group: [1, 2] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} @@ -215,7 +215,7 @@ jobs: github.event_name != 'pull_request' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/news-file.yml b/.github/workflows/news-file.yml index 371e12fd755..398ad1b7e67 100644 --- a/.github/workflows/news-file.yml +++ b/.github/workflows/news-file.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: # `towncrier check` runs `git diff --name-only origin/main...`, which # needs a non-shallow clone. diff --git a/.github/workflows/update-rtd-redirects.yml b/.github/workflows/update-rtd-redirects.yml index 8259b6c0b6a..c333a09a30d 100644 --- a/.github/workflows/update-rtd-redirects.yml +++ b/.github/workflows/update-rtd-redirects.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest environment: RTD Deploys steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: python-version: "3.11" From d9b47d0173b5d531a1c5a10172c73c37f43d8f51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 8 Oct 2023 17:19:09 +0200 Subject: [PATCH 722/730] Update egg deprecation message --- src/pip/_internal/metadata/importlib/_envs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pip/_internal/metadata/importlib/_envs.py b/src/pip/_internal/metadata/importlib/_envs.py index 3850ddaf412..048dc55dcb2 100644 --- a/src/pip/_internal/metadata/importlib/_envs.py +++ b/src/pip/_internal/metadata/importlib/_envs.py @@ -151,7 +151,8 @@ def _emit_egg_deprecation(location: Optional[str]) -> None: deprecated( reason=f"Loading egg at {location} is deprecated.", replacement="to use pip for package installation.", - gone_in="23.3", + gone_in="24.3", + issue=12330, ) From 76a8c0f2652027b4938a13e9abef500c5f43b185 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 8 Oct 2023 18:17:05 +0200 Subject: [PATCH 723/730] Postpone deprecation of legacy versions and specifiers --- src/pip/_internal/operations/check.py | 4 ++-- src/pip/_internal/req/req_set.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/pip/_internal/operations/check.py b/src/pip/_internal/operations/check.py index 2610459228f..1b7fd7ab7fd 100644 --- a/src/pip/_internal/operations/check.py +++ b/src/pip/_internal/operations/check.py @@ -168,7 +168,7 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: f"release a version with a conforming version number" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) for dep in package_details.dependencies: if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): @@ -183,5 +183,5 @@ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None: f"release a version with a conforming dependency specifiers" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py index cff67601737..1bf73d595f6 100644 --- a/src/pip/_internal/req/req_set.py +++ b/src/pip/_internal/req/req_set.py @@ -99,7 +99,7 @@ def warn_legacy_versions_and_specifiers(self) -> None: "or contact the package author to fix the version number" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) for dep in req.get_dist().iter_dependencies(): if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier): @@ -115,5 +115,5 @@ def warn_legacy_versions_and_specifiers(self) -> None: "or contact the package author to fix the version number" ), issue=12063, - gone_in="23.3", + gone_in="24.0", ) From 496b268c1b9ce3466c08eb4819e5460a943d1793 Mon Sep 17 00:00:00 2001 From: Damian Shaw Date: Wed, 11 Oct 2023 11:36:40 -0400 Subject: [PATCH 724/730] Update "Running Tests" documentation (#12334) Co-authored-by: Paul Moore Co-authored-by: Pradyun Gedam --- docs/html/development/getting-started.rst | 14 +++++++++++++- news/12334.doc.rst | 1 + 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 news/12334.doc.rst diff --git a/docs/html/development/getting-started.rst b/docs/html/development/getting-started.rst index e248259f08d..34d647fc231 100644 --- a/docs/html/development/getting-started.rst +++ b/docs/html/development/getting-started.rst @@ -73,7 +73,7 @@ pip's tests are written using the :pypi:`pytest` test framework and :mod:`unittest.mock`. :pypi:`nox` is used to automate the setup and execution of pip's tests. -It is preferable to run the tests in parallel for better experience during development, +It is preferable to run the tests in parallel for a better experience during development, since the tests can take a long time to finish when run sequentially. To run tests: @@ -104,6 +104,15 @@ can select tests using the various ways that pytest provides: $ # Using keywords $ nox -s test-3.10 -- -k "install and not wheel" +.. note:: + + When running pip's tests with OS distribution Python versions, be aware that some + functional tests may fail due to potential patches introduced by the distribution. + For all tests to pass consider: + + - Installing Python from `python.org`_ or compile from source + - Or, using `pyenv`_ to assist with source compilation + Running pip's entire test suite requires supported version control tools (subversion, bazaar, git, and mercurial) to be installed. If you are missing any of these VCS, those tests should be skipped automatically. You can also @@ -114,6 +123,9 @@ explicitly tell pytest to skip those tests: $ nox -s test-3.10 -- -k "not svn" $ nox -s test-3.10 -- -k "not (svn or git)" +.. _python.org: https://www.python.org/downloads/ +.. _pyenv: https://github.com/pyenv/pyenv + Running Linters =============== diff --git a/news/12334.doc.rst b/news/12334.doc.rst new file mode 100644 index 00000000000..ff3d877e5e8 --- /dev/null +++ b/news/12334.doc.rst @@ -0,0 +1 @@ +Document that using OS-provided Python can cause pip's test suite to report false failures. From 2333ef3b53a71fb7acc9e76d6ff90409576b2250 Mon Sep 17 00:00:00 2001 From: Paul Moore Date: Thu, 12 Oct 2023 12:12:06 +0100 Subject: [PATCH 725/730] Upgrade urllib3 to 1.26.17 (#12343) --- news/urllib3.vendor.rst | 1 + src/pip/_vendor/urllib3/_version.py | 2 +- src/pip/_vendor/urllib3/request.py | 21 +++++++++++++++++++++ src/pip/_vendor/urllib3/util/retry.py | 2 +- src/pip/_vendor/vendor.txt | 2 +- 5 files changed, 25 insertions(+), 3 deletions(-) create mode 100644 news/urllib3.vendor.rst diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst new file mode 100644 index 00000000000..37032f67a0e --- /dev/null +++ b/news/urllib3.vendor.rst @@ -0,0 +1 @@ +Upgrade urllib3 to 1.26.17 diff --git a/src/pip/_vendor/urllib3/_version.py b/src/pip/_vendor/urllib3/_version.py index d69ca314570..cad75fb5df8 100644 --- a/src/pip/_vendor/urllib3/_version.py +++ b/src/pip/_vendor/urllib3/_version.py @@ -1,2 +1,2 @@ # This file is protected via CODEOWNERS -__version__ = "1.26.16" +__version__ = "1.26.17" diff --git a/src/pip/_vendor/urllib3/request.py b/src/pip/_vendor/urllib3/request.py index 398386a5b9f..3b4cf999225 100644 --- a/src/pip/_vendor/urllib3/request.py +++ b/src/pip/_vendor/urllib3/request.py @@ -1,6 +1,9 @@ from __future__ import absolute_import +import sys + from .filepost import encode_multipart_formdata +from .packages import six from .packages.six.moves.urllib.parse import urlencode __all__ = ["RequestMethods"] @@ -168,3 +171,21 @@ def request_encode_body( extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw) + + +if not six.PY2: + + class RequestModule(sys.modules[__name__].__class__): + def __call__(self, *args, **kwargs): + """ + If user tries to call this module directly urllib3 v2.x style raise an error to the user + suggesting they may need urllib3 v2 + """ + raise TypeError( + "'module' object is not callable\n" + "urllib3.request() method is not supported in this release, " + "upgrade to urllib3 v2 to use it\n" + "see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html" + ) + + sys.modules[__name__].__class__ = RequestModule diff --git a/src/pip/_vendor/urllib3/util/retry.py b/src/pip/_vendor/urllib3/util/retry.py index 2490d5e5b63..60ef6c4f3f9 100644 --- a/src/pip/_vendor/urllib3/util/retry.py +++ b/src/pip/_vendor/urllib3/util/retry.py @@ -235,7 +235,7 @@ class Retry(object): RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) #: Default headers to be used for ``remove_headers_on_redirect`` - DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"]) #: Maximum backoff time. DEFAULT_BACKOFF_MAX = 120 diff --git a/src/pip/_vendor/vendor.txt b/src/pip/_vendor/vendor.txt index 43ced2a4b89..8dbe1341377 100644 --- a/src/pip/_vendor/vendor.txt +++ b/src/pip/_vendor/vendor.txt @@ -11,7 +11,7 @@ requests==2.31.0 certifi==2023.7.22 chardet==5.1.0 idna==3.4 - urllib3==1.26.16 + urllib3==1.26.17 rich==13.4.2 pygments==2.15.1 typing_extensions==4.7.1 From d1659b87e46abd0a2dcc74f2160dd52e6190e13b Mon Sep 17 00:00:00 2001 From: Ed Morley <501702+edmorley@users.noreply.github.com> Date: Tue, 10 Oct 2023 21:49:43 +0100 Subject: [PATCH 726/730] Correct issue number for NEWS entry added by #12197 The NEWS entry added in PR #12197 referenced issue #12191, however, the issue it actually fixed was #11847. --- news/{12191.bugfix.rst => 11847.bugfix.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{12191.bugfix.rst => 11847.bugfix.rst} (100%) diff --git a/news/12191.bugfix.rst b/news/11847.bugfix.rst similarity index 100% rename from news/12191.bugfix.rst rename to news/11847.bugfix.rst From 8f0ed32413daa411a728b50cd7776b9c02b010d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sat, 14 Oct 2023 13:50:49 +0200 Subject: [PATCH 727/730] Redact URLs in Collecting... logs --- news/12350.bugfix.rst | 1 + src/pip/_internal/operations/prepare.py | 3 ++- src/pip/_internal/req/req_install.py | 3 ++- src/pip/_internal/utils/misc.py | 8 ++++++++ tests/unit/test_utils.py | 26 +++++++++++++++++++++++++ 5 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 news/12350.bugfix.rst diff --git a/news/12350.bugfix.rst b/news/12350.bugfix.rst new file mode 100644 index 00000000000..3fb16b4ed6a --- /dev/null +++ b/news/12350.bugfix.rst @@ -0,0 +1 @@ +Redact password from URLs in some additional places. diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 1b32d7eec3e..488e76358be 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -47,6 +47,7 @@ display_path, hash_file, hide_url, + redact_auth_from_requirement, ) from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.unpacking import unpack_file @@ -277,7 +278,7 @@ def _log_preparing_link(self, req: InstallRequirement) -> None: information = str(display_path(req.link.file_path)) else: message = "Collecting %s" - information = str(req.req or req) + information = redact_auth_from_requirement(req.req) if req.req else str(req) # If we used req.req, inject requirement source if available (this # would already be included if we used req directly) diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index dd8a0db2792..e556be2b40b 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -49,6 +49,7 @@ display_path, hide_url, is_installable_dir, + redact_auth_from_requirement, redact_auth_from_url, ) from pip._internal.utils.packaging import safe_extra @@ -188,7 +189,7 @@ def __init__( def __str__(self) -> str: if self.req: - s = str(self.req) + s = redact_auth_from_requirement(self.req) if self.link: s += " from {}".format(redact_auth_from_url(self.link.url)) elif self.link: diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 9a6353fc8d3..78060e86417 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -35,6 +35,7 @@ cast, ) +from pip._vendor.packaging.requirements import Requirement from pip._vendor.pyproject_hooks import BuildBackendHookCaller from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed @@ -578,6 +579,13 @@ def redact_auth_from_url(url: str) -> str: return _transform_url(url, _redact_netloc)[0] +def redact_auth_from_requirement(req: Requirement) -> str: + """Replace the password in a given requirement url with ****.""" + if not req.url: + return str(req) + return str(req).replace(req.url, redact_auth_from_url(req.url)) + + class HiddenText: def __init__(self, secret: str, redacted: str) -> None: self.secret = secret diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index d3b0d32d12f..1352b766481 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -14,6 +14,7 @@ from unittest.mock import Mock, patch import pytest +from pip._vendor.packaging.requirements import Requirement from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError from pip._internal.utils.deprecation import PipDeprecationWarning, deprecated @@ -37,6 +38,7 @@ normalize_path, normalize_version_info, parse_netloc, + redact_auth_from_requirement, redact_auth_from_url, redact_netloc, remove_auth_from_url, @@ -765,6 +767,30 @@ def test_redact_auth_from_url(auth_url: str, expected_url: str) -> None: assert url == expected_url +@pytest.mark.parametrize( + "req, expected", + [ + ("pkga", "pkga"), + ( + "resolvelib@ " + " git+https://test-user:test-pass@github.com/sarugaku/resolvelib@1.0.1", + "resolvelib@" + " git+https://test-user:****@github.com/sarugaku/resolvelib@1.0.1", + ), + ( + "resolvelib@" + " git+https://test-user:test-pass@github.com/sarugaku/resolvelib@1.0.1" + " ; python_version>='3.6'", + "resolvelib@" + " git+https://test-user:****@github.com/sarugaku/resolvelib@1.0.1" + ' ; python_version >= "3.6"', + ), + ], +) +def test_redact_auth_from_requirement(req: str, expected: str) -> None: + assert redact_auth_from_requirement(Requirement(req)) == expected + + class TestHiddenText: def test_basic(self) -> None: """ From 8d0278771c7325b04f02cb073c8ef02827cbeb93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 15 Oct 2023 10:22:52 +0200 Subject: [PATCH 728/730] Reclassify news fragment This is not for the process category, and probably not significant enough for a feature news entry. --- news/{12155.process.rst => 12155.trivial.rst} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename news/{12155.process.rst => 12155.trivial.rst} (100%) diff --git a/news/12155.process.rst b/news/12155.trivial.rst similarity index 100% rename from news/12155.process.rst rename to news/12155.trivial.rst From 3e85558b10722598fb3353126e2f19979f7cf7dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 15 Oct 2023 10:23:01 +0200 Subject: [PATCH 729/730] Update AUTHORS.txt --- AUTHORS.txt | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/AUTHORS.txt b/AUTHORS.txt index 77eb39a427d..49e30f69678 100644 --- a/AUTHORS.txt +++ b/AUTHORS.txt @@ -20,6 +20,7 @@ Albert-Guan albertg Alberto Sottile Aleks Bunin +Ales Erjavec Alethea Flowers Alex Gaynor Alex Grönholm @@ -30,6 +31,7 @@ Alex Stachowiak Alexander Shtyrov Alexandre Conrad Alexey Popravka +Aleš Erjavec Alli Ami Fischman Ananya Maiti @@ -196,9 +198,11 @@ David Runge David Tucker David Wales Davidovich +ddelange Deepak Sharma Deepyaman Datta Denise Yu +dependabot[bot] derwolfe Desetude Devesh Kumar Singh @@ -312,6 +316,7 @@ Ilya Baryshev Inada Naoki Ionel Cristian Mărieș Ionel Maries Cristian +Itamar Turner-Trauring Ivan Pozdeev Jacob Kim Jacob Walls @@ -338,6 +343,7 @@ Jay Graves Jean-Christophe Fillion-Robin Jeff Barber Jeff Dairiki +Jeff Widman Jelmer Vernooij jenix21 Jeremy Stanley @@ -367,6 +373,7 @@ Joseph Long Josh Bronson Josh Hansen Josh Schneier +Joshua Juan Luis Cano Rodríguez Juanjo Bazán Judah Rand @@ -397,6 +404,7 @@ KOLANICH kpinc Krishna Oza Kumar McMillan +Kurt McKee Kyle Persohn lakshmanaram Laszlo Kiss-Kollar @@ -413,6 +421,7 @@ lorddavidiii Loren Carvalho Lucas Cimon Ludovic Gasc +Lukas Geiger Lukas Juhrich Luke Macken Luo Jiebin @@ -529,6 +538,7 @@ Patrick Jenkins Patrick Lawson patricktokeeffe Patrik Kopkan +Paul Ganssle Paul Kehrer Paul Moore Paul Nasrat @@ -609,6 +619,7 @@ ryneeverett Sachi King Salvatore Rinchiera sandeepkiran-js +Sander Van Balen Savio Jomton schlamar Scott Kitterman @@ -621,6 +632,7 @@ SeongSoo Cho Sergey Vasilyev Seth Michael Larson Seth Woodworth +Shahar Epstein Shantanu shireenrao Shivansh-007 @@ -648,6 +660,7 @@ Steve Kowalik Steven Myint Steven Silvester stonebig +studioj Stéphane Bidoul Stéphane Bidoul (ACSONE) Stéphane Klein From e3dc91dad93a020b3034a87ebe59027f63370fe8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Bidoul?= Date: Sun, 15 Oct 2023 10:23:02 +0200 Subject: [PATCH 730/730] Bump for release --- NEWS.rst | 57 +++++++++++++++++++ news/11394.bugfix.rst | 1 - news/11649.bugfix.rst | 5 -- news/11847.bugfix.rst | 1 - news/11924.bugfix.rst | 1 - news/11924.feature.rst | 1 - news/12005.bugfix.rst | 1 - news/12059.doc.rst | 1 - news/12095.bugfix.rst | 1 - news/12122.doc.rst | 1 - news/12155.trivial.rst | 6 -- news/12166.bugfix.rst | 1 - news/12175.removal.rst | 1 - news/12183.trivial.rst | 1 - news/12187.bugfix.rst | 1 - news/12194.trivial.rst | 1 - news/12204.feature.rst | 1 - news/12215.feature.rst | 1 - news/12224.feature.rst | 1 - news/12225.bugfix.rst | 1 - news/12252.trivial.rst | 0 news/12254.process.rst | 1 - news/12261.trivial.rst | 0 news/12280.bugfix.rst | 1 - news/12306.bugfix.rst | 1 - news/12334.doc.rst | 1 - news/12350.bugfix.rst | 1 - ...EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst | 0 ...69-21F3-49F6-B938-AB16E326F82C.trivial.rst | 0 news/2984.bugfix.rst | 1 - ...FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst | 1 - ...DE-8011-4146-8CAD-85D7756D88A6.trivial.rst | 0 ...F4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst | 0 ...60-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst | 0 ...CA-A0CF-4309-B808-1210C0B54632.trivial.rst | 0 news/certifi.vendor.rst | 1 - ...28-bc23-46aa-9175-834117a42dbd.trivial.rst | 0 news/truststore.vendor.rst | 1 - news/urllib3.vendor.rst | 1 - news/zhsdgdlsjgksdfj.trivial.rst | 0 src/pip/__init__.py | 2 +- 41 files changed, 58 insertions(+), 39 deletions(-) delete mode 100644 news/11394.bugfix.rst delete mode 100644 news/11649.bugfix.rst delete mode 100644 news/11847.bugfix.rst delete mode 100644 news/11924.bugfix.rst delete mode 100644 news/11924.feature.rst delete mode 100644 news/12005.bugfix.rst delete mode 100644 news/12059.doc.rst delete mode 100644 news/12095.bugfix.rst delete mode 100644 news/12122.doc.rst delete mode 100644 news/12155.trivial.rst delete mode 100644 news/12166.bugfix.rst delete mode 100644 news/12175.removal.rst delete mode 100644 news/12183.trivial.rst delete mode 100644 news/12187.bugfix.rst delete mode 100644 news/12194.trivial.rst delete mode 100644 news/12204.feature.rst delete mode 100644 news/12215.feature.rst delete mode 100644 news/12224.feature.rst delete mode 100644 news/12225.bugfix.rst delete mode 100644 news/12252.trivial.rst delete mode 100644 news/12254.process.rst delete mode 100644 news/12261.trivial.rst delete mode 100644 news/12280.bugfix.rst delete mode 100644 news/12306.bugfix.rst delete mode 100644 news/12334.doc.rst delete mode 100644 news/12350.bugfix.rst delete mode 100644 news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst delete mode 100644 news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst delete mode 100644 news/2984.bugfix.rst delete mode 100644 news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst delete mode 100644 news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst delete mode 100644 news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst delete mode 100644 news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst delete mode 100644 news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst delete mode 100644 news/certifi.vendor.rst delete mode 100644 news/d7179b28-bc23-46aa-9175-834117a42dbd.trivial.rst delete mode 100644 news/truststore.vendor.rst delete mode 100644 news/urllib3.vendor.rst delete mode 100644 news/zhsdgdlsjgksdfj.trivial.rst diff --git a/NEWS.rst b/NEWS.rst index fc3bb6697ad..27ac69d793a 100644 --- a/NEWS.rst +++ b/NEWS.rst @@ -9,6 +9,63 @@ .. towncrier release notes start +23.3 (2023-10-15) +================= + +Process +------- + +- Added reference to `vulnerability reporting guidelines `_ to pip's security policy. + +Deprecations and Removals +------------------------- + +- Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions. (`#12175 `_) + +Features +-------- + +- Improve extras resolution for multiple constraints on same base package. (`#11924 `_) +- Improve use of datastructures to make candidate selection 1.6x faster (`#12204 `_) +- Allow ``pip install --dry-run`` to use platform and ABI overriding options similar to ``--target``. (`#12215 `_) +- Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to PEP 592. (`#12224 `_) + +Bug Fixes +--------- + +- Ignore errors in temporary directory cleanup (show a warning instead). (`#11394 `_) +- Normalize extras according to :pep:`685` from package metadata in the resolver + for comparison. This ensures extras are correctly compared and merged as long + as the package providing the extra(s) is built with values normalized according + to the standard. Note, however, that this *does not* solve cases where the + package itself contains unnormalized extra values in the metadata. (`#11649 `_) +- Prevent downloading sdists twice when PEP 658 metadata is present. (`#11847 `_) +- Include all requested extras in the install report (``--report``). (`#11924 `_) +- Removed uses of ``datetime.datetime.utcnow`` from non-vendored code. (`#12005 `_) +- Consistently report whether a dependency comes from an extra. (`#12095 `_) +- Fix completion script for zsh (`#12166 `_) +- Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12. (`#12187 `_) +- Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message. (`#12225 `_) +- Fix crash when the git version number contains something else than digits and dots. (`#12280 `_) +- Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial. (`#12306 `_) +- Redact password from URLs in some additional places. (`#12350 `_) +- pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). (`#2984 `_) + +Vendored Libraries +------------------ + +- Upgrade certifi to 2023.7.22 +- Add truststore 0.8.0 +- Upgrade urllib3 to 1.26.17 + +Improved Documentation +---------------------- + +- Document that ``pip search`` support has been removed from PyPI (`#12059 `_) +- Clarify --prefer-binary in CLI and docs (`#12122 `_) +- Document that using OS-provided Python can cause pip's test suite to report false failures. (`#12334 `_) + + 23.2.1 (2023-07-22) =================== diff --git a/news/11394.bugfix.rst b/news/11394.bugfix.rst deleted file mode 100644 index 9f2501db46c..00000000000 --- a/news/11394.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Ignore errors in temporary directory cleanup (show a warning instead). diff --git a/news/11649.bugfix.rst b/news/11649.bugfix.rst deleted file mode 100644 index 65511711f59..00000000000 --- a/news/11649.bugfix.rst +++ /dev/null @@ -1,5 +0,0 @@ -Normalize extras according to :pep:`685` from package metadata in the resolver -for comparison. This ensures extras are correctly compared and merged as long -as the package providing the extra(s) is built with values normalized according -to the standard. Note, however, that this *does not* solve cases where the -package itself contains unnormalized extra values in the metadata. diff --git a/news/11847.bugfix.rst b/news/11847.bugfix.rst deleted file mode 100644 index 1f384835fef..00000000000 --- a/news/11847.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Prevent downloading sdists twice when PEP 658 metadata is present. diff --git a/news/11924.bugfix.rst b/news/11924.bugfix.rst deleted file mode 100644 index 7a9ee3151a4..00000000000 --- a/news/11924.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Include all requested extras in the install report (``--report``). diff --git a/news/11924.feature.rst b/news/11924.feature.rst deleted file mode 100644 index 30bc60e6bce..00000000000 --- a/news/11924.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Improve extras resolution for multiple constraints on same base package. diff --git a/news/12005.bugfix.rst b/news/12005.bugfix.rst deleted file mode 100644 index 98a3e5112df..00000000000 --- a/news/12005.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Removed uses of ``datetime.datetime.utcnow`` from non-vendored code. diff --git a/news/12059.doc.rst b/news/12059.doc.rst deleted file mode 100644 index bf3a8d3e662..00000000000 --- a/news/12059.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Document that ``pip search`` support has been removed from PyPI diff --git a/news/12095.bugfix.rst b/news/12095.bugfix.rst deleted file mode 100644 index 1f5018326ba..00000000000 --- a/news/12095.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Consistently report whether a dependency comes from an extra. diff --git a/news/12122.doc.rst b/news/12122.doc.rst deleted file mode 100644 index 49a3308a25c..00000000000 --- a/news/12122.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Clarify --prefer-binary in CLI and docs diff --git a/news/12155.trivial.rst b/news/12155.trivial.rst deleted file mode 100644 index 5f77231c864..00000000000 --- a/news/12155.trivial.rst +++ /dev/null @@ -1,6 +0,0 @@ -The metadata-fetching log message is moved to the VERBOSE level and now hidden -by default. The more significant information in this message to most users are -already available in surrounding logs (the package name and version of the -metadata being fetched), while the URL to the exact metadata file is generally -too long and clutters the output. The message can be brought back with -``--verbose``. diff --git a/news/12166.bugfix.rst b/news/12166.bugfix.rst deleted file mode 100644 index 491597c7f1a..00000000000 --- a/news/12166.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix completion script for zsh diff --git a/news/12175.removal.rst b/news/12175.removal.rst deleted file mode 100644 index bf3500f351a..00000000000 --- a/news/12175.removal.rst +++ /dev/null @@ -1 +0,0 @@ -Drop a fallback to using SecureTransport on macOS. It was useful when pip detected OpenSSL older than 1.0.1, but the current pip does not support any Python version supporting such old OpenSSL versions. diff --git a/news/12183.trivial.rst b/news/12183.trivial.rst deleted file mode 100644 index c22e854c9a5..00000000000 --- a/news/12183.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Add test cases for some behaviors of ``install --dry-run`` and ``--use-feature=fast-deps``. diff --git a/news/12187.bugfix.rst b/news/12187.bugfix.rst deleted file mode 100644 index b4d106b974f..00000000000 --- a/news/12187.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix improper handling of the new onexc argument of ``shutil.rmtree()`` in Python 3.12. diff --git a/news/12194.trivial.rst b/news/12194.trivial.rst deleted file mode 100644 index dfe5bbf1f06..00000000000 --- a/news/12194.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Add lots of comments to the ``BuildTracker``. diff --git a/news/12204.feature.rst b/news/12204.feature.rst deleted file mode 100644 index 6ffdf5123b1..00000000000 --- a/news/12204.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Improve use of datastructures to make candidate selection 1.6x faster diff --git a/news/12215.feature.rst b/news/12215.feature.rst deleted file mode 100644 index 407dc903ed9..00000000000 --- a/news/12215.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Allow ``pip install --dry-run`` to use platform and ABI overriding options similar to ``--target``. diff --git a/news/12224.feature.rst b/news/12224.feature.rst deleted file mode 100644 index d874265787a..00000000000 --- a/news/12224.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``is_yanked`` boolean entry to the installation report (``--report``) to indicate whether the requirement was yanked from the index, but was still selected by pip conform to PEP 592. diff --git a/news/12225.bugfix.rst b/news/12225.bugfix.rst deleted file mode 100644 index e1e0c323dc3..00000000000 --- a/news/12225.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Filter out yanked links from the available versions error message: "(from versions: 1.0, 2.0, 3.0)" will not contain yanked versions conform PEP 592. The yanked versions (if any) will be mentioned in a separate error message. diff --git a/news/12252.trivial.rst b/news/12252.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/12254.process.rst b/news/12254.process.rst deleted file mode 100644 index e546902685b..00000000000 --- a/news/12254.process.rst +++ /dev/null @@ -1 +0,0 @@ -Added reference to `vulnerability reporting guidelines `_ to pip's security policy. diff --git a/news/12261.trivial.rst b/news/12261.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/12280.bugfix.rst b/news/12280.bugfix.rst deleted file mode 100644 index 77de283d398..00000000000 --- a/news/12280.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix crash when the git version number contains something else than digits and dots. diff --git a/news/12306.bugfix.rst b/news/12306.bugfix.rst deleted file mode 100644 index eb6eecaaf1b..00000000000 --- a/news/12306.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Use ``-r=...`` instead of ``-r ...`` to specify references with Mercurial. diff --git a/news/12334.doc.rst b/news/12334.doc.rst deleted file mode 100644 index ff3d877e5e8..00000000000 --- a/news/12334.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Document that using OS-provided Python can cause pip's test suite to report false failures. diff --git a/news/12350.bugfix.rst b/news/12350.bugfix.rst deleted file mode 100644 index 3fb16b4ed6a..00000000000 --- a/news/12350.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Redact password from URLs in some additional places. diff --git a/news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst b/news/12AE57EC-683C-4A8E-BCCB-851FCD0730B4.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst b/news/1F54AB69-21F3-49F6-B938-AB16E326F82C.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/2984.bugfix.rst b/news/2984.bugfix.rst deleted file mode 100644 index cce561815c9..00000000000 --- a/news/2984.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -pip uses less memory when caching large packages. As a result, there is a new on-disk cache format stored in a new directory ($PIP_CACHE_DIR/http-v2). diff --git a/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst b/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst deleted file mode 100644 index 7f6c1d5612e..00000000000 --- a/news/4A0C40FF-ABE1-48C7-954C-7C3EB229135F.trivial.rst +++ /dev/null @@ -1 +0,0 @@ -Add ruff rules ASYNC,C4,C90,PERF,PLE,PLR for minor optimizations and to set upper limits on code complexity. diff --git a/news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst b/news/732404DE-8011-4146-8CAD-85D7756D88A6.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst b/news/80291DF4-7B0F-4268-B682-E1FCA1C3ACED.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst b/news/85F7E260-68FF-4C1E-A2CB-CF8634829D2D.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst b/news/E2B261CA-A0CF-4309-B808-1210C0B54632.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/certifi.vendor.rst b/news/certifi.vendor.rst deleted file mode 100644 index aacd17183f1..00000000000 --- a/news/certifi.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade certifi to 2023.7.22 diff --git a/news/d7179b28-bc23-46aa-9175-834117a42dbd.trivial.rst b/news/d7179b28-bc23-46aa-9175-834117a42dbd.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/news/truststore.vendor.rst b/news/truststore.vendor.rst deleted file mode 100644 index 63c71d72d2f..00000000000 --- a/news/truststore.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Add truststore 0.8.0 diff --git a/news/urllib3.vendor.rst b/news/urllib3.vendor.rst deleted file mode 100644 index 37032f67a0e..00000000000 --- a/news/urllib3.vendor.rst +++ /dev/null @@ -1 +0,0 @@ -Upgrade urllib3 to 1.26.17 diff --git a/news/zhsdgdlsjgksdfj.trivial.rst b/news/zhsdgdlsjgksdfj.trivial.rst deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/src/pip/__init__.py b/src/pip/__init__.py index 00ce8ad456d..62498a779d5 100644 --- a/src/pip/__init__.py +++ b/src/pip/__init__.py @@ -1,6 +1,6 @@ from typing import List, Optional -__version__ = "23.3.dev0" +__version__ = "23.3" def main(args: Optional[List[str]] = None) -> int: