From 9ad32770c7345c34077950f1defd2df834744613 Mon Sep 17 00:00:00 2001 From: Rakhi Sharma Date: Wed, 6 Jul 2016 19:49:20 +0530 Subject: [PATCH 01/63] Bug 734326 - Use Australis button styling for bookmarks toolbar items on windows and linux, r=Gijs, ui-r=shorlander MozReview-Commit-ID: BjbUqluBLwm --HG-- extra : rebase_source : 8fb9d01b93a23536fbddb4ee6e01e06e888158ae --- browser/themes/linux/browser.css | 22 +++++++++++++++++----- browser/themes/windows/browser.css | 16 +++++++++++----- 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/browser/themes/linux/browser.css b/browser/themes/linux/browser.css index 430e92d84704..b7170de3766c 100644 --- a/browser/themes/linux/browser.css +++ b/browser/themes/linux/browser.css @@ -140,12 +140,24 @@ toolbarbutton.bookmark-item:not(.subviewbutton):not(:hover):not(:active):not([op color: inherit; } +toolbarbutton.bookmark-item:not(.subviewbutton) { + -moz-appearance: none; + border: 1px solid transparent; + border-radius: 2px; + transition-property: background-color, border-color; + transition-duration: 150ms; +} + +toolbarbutton.bookmark-item:not(.subviewbutton):hover:not([open]) { + background-color: var(--toolbarbutton-hover-background); + border-color: var(--toolbarbutton-hover-bordercolor); +} + toolbarbutton.bookmark-item:not(.subviewbutton):hover:active, toolbarbutton.bookmark-item[open="true"] { - padding-top: 3px; - padding-bottom: 1px; - padding-inline-start: 4px; - padding-inline-end: 2px; + background: var(--toolbarbutton-active-background); + box-shadow: var(--toolbarbutton-active-boxshadow); + border-color: var(--toolbarbutton-active-bordercolor); } .bookmark-item > .toolbarbutton-icon, @@ -1670,7 +1682,7 @@ toolbarbutton.chevron { list-style-image: url("chrome://global/skin/toolbar/chevron.gif") !important; } -toolbar[brighttext] toolbarbutton.chevron:not(:hover):not([open="true"]) { +toolbar[brighttext] toolbarbutton.chevron { list-style-image: url("chrome://global/skin/toolbar/chevron-inverted.png") !important; } diff --git a/browser/themes/windows/browser.css b/browser/themes/windows/browser.css index bc0d54d44d08..6dd2812f291b 100644 --- a/browser/themes/windows/browser.css +++ b/browser/themes/windows/browser.css @@ -464,14 +464,20 @@ toolbarbutton.bookmark-item:not(.subviewbutton), #personal-bookmarks[cui-areatype="toolbar"]:not([overflowedItem=true]) > #bookmarks-toolbar-placeholder { margin: 0; padding: 2px 3px; + -moz-appearance: none; + border: 1px solid transparent; } -toolbarbutton.bookmark-item:not([disabled="true"]):not(.subviewbutton):hover:active, +toolbarbutton.bookmark-item:not(.subviewbutton):hover:not([disabled="true"]):not([open]) { + border-color: var(--toolbarbutton-hover-bordercolor); + background: var(--toolbarbutton-hover-background); +} + +toolbarbutton.bookmark-item:not(.subviewbutton):hover:active:not([disabled="true"]), toolbarbutton.bookmark-item[open="true"] { - padding-top: 3px; - padding-bottom: 1px; - padding-inline-start: 4px; - padding-inline-end: 2px; + border-color: var(--toolbarbutton-active-bordercolor); + box-shadow: var(--toolbarbutton-active-boxshadow); + background: var(--toolbarbutton-active-background); } .bookmark-item > .toolbarbutton-icon, From 7805abd00a5a42115ff7a50c8d25c16a9aba6bb0 Mon Sep 17 00:00:00 2001 From: Mike Conley Date: Wed, 20 Jul 2016 11:00:25 -0400 Subject: [PATCH 02/63] Bug 1287938 - Make ts_paint use the parent process start time and first paint info in calculations. r=jmaher Instead of checking for the paint time of the tspaint_test.html content, we're now measuring the delta between process start and first paint as reported by the parent process's startup info. MozReview-Commit-ID: 868mf2vazwL --HG-- extra : rebase_source : 7ba8062be91ca00ab1bbcd386b4c9213148e41f7 --- .../talos/startup_test/tspaint_test.html | 28 ++++++------------ .../chrome/talos-powers-content.js | 17 +++++++++++ .../components/TalosPowersService.js | 26 ++++++++++++++++ .../content/TalosPowersContent.js | 15 ++++++++++ testing/talos/talos/talos-powers/install.rdf | 2 +- .../talos-powers/talos-powers-signed.xpi | Bin 14934 -> 25707 bytes 6 files changed, 68 insertions(+), 20 deletions(-) diff --git a/testing/talos/talos/startup_test/tspaint_test.html b/testing/talos/talos/startup_test/tspaint_test.html index 60553dcfd486..a5470e841ee4 100755 --- a/testing/talos/talos/startup_test/tspaint_test.html +++ b/testing/talos/talos/startup_test/tspaint_test.html @@ -6,37 +6,28 @@ + - diff --git a/testing/talos/talos/talos-powers/chrome/talos-powers-content.js b/testing/talos/talos/talos-powers/chrome/talos-powers-content.js index 73cfb9bcb83a..f5c565c36560 100644 --- a/testing/talos/talos/talos-powers/chrome/talos-powers-content.js +++ b/testing/talos/talos/talos-powers/chrome/talos-powers-content.js @@ -63,3 +63,20 @@ addEventListener("TalosPowersContentFocus", (e) => { }, content); content.dispatchEvent(new content.CustomEvent("TalosPowersContentFocused", contentEvent)); }, true, true); + +addEventListener("TalosPowersContentGetStartupInfo", (e) => { + sendAsyncMessage("TalosPowersContent:GetStartupInfo"); + addMessageListener("TalosPowersContent:GetStartupInfo:Result", + function onResult(msg) { + removeMessageListener("TalosPowersContent:GetStartupInfo:Result", + onResult); + let event = Cu.cloneInto({ + bubbles: true, + detail: msg.data, + }, content); + + content.dispatchEvent( + new content.CustomEvent("TalosPowersContentGetStartupInfoResult", + event)); + }); +}); diff --git a/testing/talos/talos/talos-powers/components/TalosPowersService.js b/testing/talos/talos/talos-powers/components/TalosPowersService.js index 218faf3ec5df..49889598e189 100644 --- a/testing/talos/talos/talos-powers/components/TalosPowersService.js +++ b/testing/talos/talos/talos-powers/components/TalosPowersService.js @@ -40,6 +40,7 @@ TalosPowersService.prototype = { Services.mm.addMessageListener("Talos:ForceQuit", this); Services.mm.addMessageListener("TalosContentProfiler:Command", this); Services.mm.addMessageListener("TalosPowersContent:ForceCCAndGC", this); + Services.mm.addMessageListener("TalosPowersContent:GetStartupInfo", this); Services.obs.addObserver(this, "xpcom-shutdown", false); }, @@ -63,6 +64,9 @@ TalosPowersService.prototype = { Cu.forceShrinkingGC(); break; } + case "TalosPowersContent:GetStartupInfo": { + this.receiveGetStartupInfo(message); + } } }, @@ -230,6 +234,28 @@ TalosPowersService.prototype = { dump('Force Quit failed: ' + e); } }, + + receiveGetStartupInfo(message) { + let mm = message.target.messageManager; + let startupInfo = Services.startup.getStartupInfo(); + + if (!startupInfo["firstPaint"]) { + // It's possible that we were called early enough that + // the firstPaint measurement hasn't been set yet. In + // that case, we set up an observer for the next time + // a window is painted and re-retrieve the startup info. + let obs = function(subject, topic) { + Services.obs.removeObserver(this, topic); + startupInfo = Services.startup.getStartupInfo(); + mm.sendAsyncMessage("TalosPowersContent:GetStartupInfo:Result", + startupInfo); + }; + Services.obs.addObserver(obs, "widget-first-paint", false); + } else { + mm.sendAsyncMessage("TalosPowersContent:GetStartupInfo:Result", + startupInfo); + } + }, }; this.NSGetFactory = XPCOMUtils.generateNSGetFactory([TalosPowersService]); diff --git a/testing/talos/talos/talos-powers/content/TalosPowersContent.js b/testing/talos/talos/talos-powers/content/TalosPowersContent.js index a42a208a3799..fbcc0b2dff95 100644 --- a/testing/talos/talos/talos-powers/content/TalosPowersContent.js +++ b/testing/talos/talos/talos-powers/content/TalosPowersContent.js @@ -29,5 +29,20 @@ var TalosPowersContent; })); }, + getStartupInfo() { + return new Promise((resolve) => { + var event = new CustomEvent("TalosPowersContentGetStartupInfo", { + bubbles: true, + }); + document.dispatchEvent(event); + + addEventListener("TalosPowersContentGetStartupInfoResult", + function onResult(e) { + removeEventListener("TalosPowersContentGetStartupInfoResult", + onResult); + resolve(e.detail); + }); + }); + }, }; })(); diff --git a/testing/talos/talos/talos-powers/install.rdf b/testing/talos/talos/talos-powers/install.rdf index e352e03491d5..0a6285ae025d 100644 --- a/testing/talos/talos/talos-powers/install.rdf +++ b/testing/talos/talos/talos-powers/install.rdf @@ -4,7 +4,7 @@ xmlns:em="http://www.mozilla.org/2004/em-rdf#"> talos-powers@mozilla.org - 1.0.8 + 1.0.10 {ec8030f7-c20a-464f-9b0e-13a3a9e97384} diff --git a/testing/talos/talos/talos-powers/talos-powers-signed.xpi b/testing/talos/talos/talos-powers/talos-powers-signed.xpi index 2963458372fe80850c654fd6d30f0d191673ffc0..642a2332eadcd9a243ce4258e919da710295e163 100644 GIT binary patch delta 20287 zcma%hW00o7vgX&eZDZQDZQHhu>Hene?rGcZX`9owZQGbOch0#t?m2sQH)1Q|eWQL< zR#ikkk@aNev*dO@0)nR`3kHr3005u?Z{ZJ8--RZjKj8rY4P*cSJ&}k53%IH2&9(4> z$r)7_Q#Je39oZbm-*j7A%FJ+17u9roh$Ur6fGwro7Ny>j);9&2Q36%D-g44*9G8S9 zwf$Q6sSCK#edU^a{Q30uK6RXR{gid=D{#yWh#$q4Ad{P;yqpOt>(Le~X#m7aW59zN zp8#;Ni=n~5ClsVXWv*;iV!Y-4w!(Me*QPn!fHLu9yDOZ8j!c>szjq@@ zp9T%7yinABbzD|$bIs?Ytt5z5_2E=49wyo?r>*QYNu+CzWEg)oA#~G}wH(QQi5`!=@OVyXvrPJb?M%L#Z|OypoX+O_uc!?V=Q(Lx75A)~&#hhO)ZK}`3M)p@aB zWr^_|epHg49}Qf=Je9u%9m?QwQo07uFHDJAszKH!23s>|1kj!d%E2-HiG|^Tp@Ku{I1=* zZicVmG}6CdG`!{8dAoA9Y_^qwtL^A7c6G@|LW$#(5@6Ml<){N1QcKF~>MX6#5|CQi zChFY%78!Al=dJ$`+9GK%h@c#Yj&{2Tq=S=sxHXf`?u{`cjH6Rl@b_`1h7FLx?~hq+ z&G&onw&R@flxrIUUfXammOK~b$V6ylsJ7cI zeMHkM?|vzJGy0GCpF&ivw_f#*VSTby8GRwOk8sIngU|~(>}m4_VO$ky?MvZ)4_Lt( zZJ@rq2z;4VbFs!3kDri?A=i(EKWBb}^X0^Jd1qh}G6*T@h-kq|lxxp7%V^u7aAl36 zuZ$Fek^HTJ(J6GeB#HNkl#6EaR<4rBvxpy@)0=-Znb>52$`y2(eG%*{;)R4BWjs?l zca$+|3uCw`Q#oM<+^J$N?+e*Ezx5ZJ$AQ-6tB_B->(hOM=n{&rWd4$(EF@dXv!z23kgioMvdv zv-_&P83-X5V>LrMf%-Dh!;)05^w${-23ap<#E&Y{a`srxRqzH@<6pxqsqAdO#(@;h z8z}}E+EWN#-BpP=9gfJmAj|~QxT3-MN5oX4$*U}swxPi#5Uz@qAE(5Nr;D11`Lxs;g39{k$iGh4RB=;ae@SSv5Z1Vw@kv=4RcAcI@2PM7= z_Lm%5uiI%6q)f~x$crh8$Yb4}pn){-Jl)-Rb?xjjdWX=3%h zrO@^p+77NGGoPp%o8cFBGPT?BG<&!il{Nkcm73q>yGI=b370Az|={x?3S3i9y-h7IRj+mV2H}= z>RogeS*8m*_g-rt#nM|XKppF5y)wOTz*n-awOS1vg$OhAtn~hAnk+FN2qs1|e_SP& z@T{8B0ixNY*y^u4fOG5!Ti_50BVL!1<zjY#mzsC-t&gCKhZRfwKc9=TxcPUPlTFr|De_70bj*!zB! z;=E{8H^LuU#teGrJX_1l&EWx&&qBnwhV#UuwZKn$wRnswH>2@7{2F+Q>vJ~t+Vsr; zE~}?rP_r$hohlUWq0^2+J@3HS|4Qce+HFCnJSga%`r!nTUn(Tl$i;q%ep?*vE<6s9 zQwMBj6|;V|h<3KWlu}jJ_bm-X3dAfT%6Vqj99Uj2kj^_nFom*w%IxvIu2SlYyB-;U z7kC$Gd|#Tz3lSOzkikP)8847BAyZIbfZug3wqoZ=QNsz$L{b1L1yB8w z@hWd1Q==hi715D3yv&ZnLd`*paq{WZV`MR(6RbUfyJHD%{=h4d%9G8ti`v@GpI>$4 z$il=zs1mL*MF^yq6+wAQ!Uxild|RTzdhgpE%J{bQu@5d7e`;OdGHd;afN^0NL7~GM zOe&wf*|#wzs-9b?InCIP$Lw(Qk}QNg;^ zEj9zI6`ZIL_`uApIoRV565Qfq7mA?|3{k7Q1!wjRX#@k##nTrYN^ZzSH2t&WN_*J? zZlz|+@9`79Wco6s<&ev{3j(1-DehF?$^D!W>`;&PKJ5y&&&ElfQeXoAU@ZWRM%0<^ z4S@S9C9SS?*J^{M;}5XRI6+mzn+UW14LHSZwR$L(AVB7tAp0aQ{;bEWU%dhmWQ;lK zWy>MD_Wl&<%%QqOqv&?BGQ zu%~VsCtH)hJRT_uDMg5mi298UPt$V-OYFIA!efURvqQpaf+Ek)gbtt16~8GuJ55cq zRn^rT7y`+4JtQ^fyUe&4erD0^mpizoH?6k@_AxUEWwp%^{qBADhV4pH_2T^!RxgSI zJW$eB1hBfVzZ?7KP)iCKLiX_IKUj&dcid|PPL}j8xDqrmu(UIjhEqix%_U-nT#c%v z6J%V>63?y5jRd}5eafL9(u!~YgnF+MpqZ#JWdwRirm)6EK>FzjWjb^Iq+jRc9sN|G z+K~73?caWJGP?sUitxeE0!b2ol~>YdeHC3x!6++v%s0`C^lwY2vcnb)*5lJNmP7O; zAg;XrGEaMNiu%yos;2{K&J7?=GxX&C@uxHdwg0(A9BSvwI>ZFAssBpD0HMASjO1g< z)Ch=FtlLWMocG!?3qcY1?ZU{#_ez9wxBh!FyQLfU;Gd%?JhD;GJ$#F6T^2n5H7mtD zh55(Io2oi9j5SY$O&@~t{Zt^mm?r1V9qiXu0RG09P6~%Kxn<;Xp-8n~;~!B`x!HF# zAvGNZBC|%EDV_FT!Q1B{%Ldp4Tm~8nH{XG%&BBx=w}z3Tln?4`jn1&&{jA4wV;eVE zn(a;tYTeBhb7U~bQ94Ahyy}XL&I*>IE)<(fE4JHnU-Z9O%q4*}k#)rxZOaK6g!tf} zS8La4rFhO#y}CUJvM!F;&E~1+66Fuqp6T1`W6<9b(zs!N^%>XvX_dqhQar?Bb0h~= z%fK+JW>_z-+ft4!qh;H2i{oBWjXT+?FbcYGr!MszfkBKtlFF`@9^3W(uGQFftxJL_ z$#G=*(=Mwqfl-!0ozRBMc+N=krmMYL*FN3*ovxQpZ;k+ERzk0png=%n)PwLF!nN=F zBugj$`!FW8;rRVFQ2Hv5vwMb+O!)}NokOv`G@O<`@8U2X*J1jq`}z=6o?`yuXlbKv z25+@sj|&@jykqI$P^pBY97|oNz3tDw)_&CC=7Fsk9n(I1qXRl!SRhUD-P-O)QT-Q# zV@sA(d5TPsfjN%H7%vvldnjs?x~iX1NL5^)m+QbRpP~{=B4ZRTvX=OK84_83Izw)MEI?ht^3t*TlFaW?a5dc6Az)sBKBnC1#x>#GP zt3d$33Lefh+`ZrcAmArp0Kh+go7%dLcx@Q|%QZ&#Kcx)J8&73H@E9`MZ%L!{I_rY$ zQNZDBLhG^PLeUaTB{Ls@+t>IBg*f^bMe;ui{A(+5?)KZhA4@1A4GfC#Iy>NI(mI>K zQYgge!}7UKUIdMG-ADkRLr@J?i_t8m3=J~2-L++$mnLMgkp=?1@?F)r*-@6$pJg9S zDRZVn(A&y2FLN5O_9S?eaTsAQI~N;Yb@zZ?LR(bJ8ojTxk)|dyu`;p?&`=a% z!CjyCpSs~pO+|NQfx1S{m!xy0Awtdvf%O^1;MT}qYW5v!Eaw34)A4BCLR>S8D>|zg z24Qug;Iwg}F0)E88oM-yG?z zLTp)rar8Dt<7NZFXHn;7jhHex)vub$B}{LI z>Iz}&TLWz-@7M(qwd}TTo-Z@8y4l96^J2iQU9?l28q0%2t8z4GM5Xk%d?)!OJBVayLsG+@&R2i_exXNh4JHF@tS%qJNTDJ*z!y7WG~+MU z{`k$)ClvF=tiYlES$LmiQTbi;AF(q;TClGvXnD9K%RsOz2Ds@maa&=QWY$Sc8ATKr?*B9E`3266xwh@q4{vSbP24}4GFg;HR6?zHJ}E!mW8&^uI6Hm&tHgwX7Dl9VM1E5Neb zjfGYdzZCFIahgGl;1#JF-yf>Tt;=51<^emTF(0tBRJ?TQE}0mkMT)>6r^R$eo6;#m zg1QsSsWv@B2`((5IF$Ztwk9?dDQxzp5y`M=C(T%;b;t?t~q#Sr{!(^0u`Tcw6l;@P&- zQT<~7$^hNB6I~E339(4yL}5(HP={1Jcn$}X)W0^q-SCX(X%CZFij$J;bWkEuF7ymN zTIe@a9o3ecU*YZ$jOZOS^3dL{=&oi{VAgYnJ*)^mVm*9aeIiB3i7Ak>|M7<*`#25{ z>!pG;1N$0Z{=-m!7t3!v!S;h+JBfJLl5q^LsK3Y^J?Q3Zh^kbEb$#c56;9oeA3UBt zmvpr9YF@0M7?u{XoKj-U;)r_K(OYqy(!^WPp26@8Dp5!4@cS_b6{eKSY1=9xuHVQ1 zxt9!r4hYc=MgA3LLhTO};_&*Qfx}$*gTd{{ccrm;b$n@d<6bUR0^46K>42HT4|5Vb z<^{B0unm|~`_^pIA~0nejak(ycbSr!<#z0u%h%LdBwsghNxdBCT4RpQkt(ZhRiqFN zwVU#T!p!q7ZvhRmid^EwYq<8eK^~wP;&IL$q5?7*k}B0bopO}EG*RC`nbRw*qeB&W z9p!{=aIbL*R@S_lDHi{diw&#Z32w)OtHV zw5zeLj`jZA4Urqr$}v{!`1h65PX_VFqPnSp+AA%N2Dv0k9TwTmKNeB0NH3!$t}A`? z;kmY3re+l_{5|PG<)VxHAPI0DL!*8m35b~D4V3|DfENywt~7uA8p=fQ0cJTFP2I+> zOwEq8WMo~Hc=!q{MWfh;2oXMe;k!-pP`JLvc zV|hLY%>7U;D-}uXE0jo110Pf4DFJU|2g`k!58FlP3Zw;?H%7HgsTnBlxCt@!D z)pm@hUZuqOJ5d-4%~Gx7&O?AL&#IA{>RWf2rK~LLks{A{TEfl{wGUBs=ND7me^2?4F{{7 z>3%Ntw?h9v0~&x0Fm-frGk0)f{HLWlTCnDS@XLRl`44N!ypr}W5CGr@1OPzr_so1Q zLg4?)GFwePZjA}K>sXV|85WvCJ+5O-Mtwn?Rv9L_>;8l*ajzjr9{_DMQnsaL9I zrO5PM!KoM;X%pn9BIPD-6P>3-H60$J!nKksa5*2ePc022yj^EjOE=|U1O3)ocdlK( zgOz8mGQ=L|wXag~cylcubI?+CGSWZ~?^4#vb;jH(ij-@5GL<7S_EY1FO9)3^ky;36 zOeL`+s3oAp&qwh0wI{~-%)py=*07FO8xND)gx{=x#=VHSQ#ra$s>mlFtvY+P8Hlnv zkYI&RAp?H(yq=6wUHR5o01CHajs)GPd>3Ie&~qi5aBWE0oM$9Qw+>iHb)6wX?x*-> z*z_3kJ_mQ+|2n$B=haR@RCx+tTf0FmX>BuFw>5)}lnW`w`D*Ub>qYWHR1j9>GtbGH zzN0eE=_33#>y~${o>kd*q?9!WOh44%+G2>!!Wuf0Oicw)GT{tK|@vJn3XyM_ZAL1^HL(t}hBX)RpfU&z7z z&3NGc0@m8W)y>$>j={yu;-AQ)0*U`mV|SHtg?%RU&0|`*m6(GSc~W9!Tx{hu3{;Wu zuFQ?V5Tf|Y1kkCb4AY9@6U3ecRC< zV$l$C&e1ZyZpg*SfhLLDSuh+C&9e5*ZRV#-S_+3dI{0)j6c1`);RBAS=6;>i-m{fZA9tF;yN}3h?MCi@!V86n_27 z_hHa(Qaer{an^qWPuSlN@~;i;yZ$EE>k^p$Pc>XKqO!ge74jy;$-CkNtB4yh_7r0U`ryLMq+xq(U@PP+0^EQ zXd0DGH1KlzRMPNaf+?BPsg;Jh%@AOI2~EMHJ*zt#LAsb?7Vg%%wVRe7i6(K2+4o~B zgi1GPDwYa^P%leYzk4j`NPAP#e1`fNZ0g)CZ{(eYcbTcU3aDhea|LKLZ6eKoo|(FSTBZ2mVmOZuG}`4w%=WG9)6pZo}T`}V$4CBn@>I6 zf$&X1Feq6?3d%Le5#W_KG!y(7nR5qTuZ#(8qsziaoPKO5FyRF{5I3?uUP%7>w4Yo& zqa@#e)>-E~#?YL&Y2lm=s|%;~a}^}Bs1_*DI7q%?zDr&jR?!VFLi)x!0f8c9>P_x# zwjIpQmS&B}507`g0W7&POYkiqo5i+SzgH9@QhIw+nRd74!9 zaRu7EYC@E{p>A6|7RiJCnIr}6p%AmvCKHO>HM5Ad^wz`sV0orpM_XebKih59wBc%?3Z;YR0yG*LScZQHdFqitPJ=by@3Oh&eXu^KAZeYE zh8db>k?pW(j5UaX5Oy&jZ!Njvt%|@6Rg@av?p}cJaKW6PV{wyV4DCki<;B)kxbmN^ zQaMZmb`!f@e5t;N>KjqKroz#;Js5uN{W(L;=Mn#A_dTWh-(7?)=OmTD*r#d*SBeV7 zf(*~}N3F(hCA*_ARargigBB1i1V%jRf!DNSF%~fdAZ)RO=&_52#bTPhfr}Zu9?B_v z7%Qid$@XvtXVY+@5Y`hv*-XS6ozD3eH zn?X-HCltLEW#Ds?E@H{lVTeumlQ58?2P<#>8ma>%hf*S2EV@wnh6DQm>uD^aiAwlhtP;yF{ZF#B%KGh*j6PY*Ctlgem zGYmr198ps3)Gz0IiL7!y;Gs0FDJ0Hg=`2%RqEe{dEGcxg_c?G%R=Ky$TrU6Iy$91k z7g#D!43<%DIU;$fN(aFtOZY%v|H!bbfTWmUQgpH+dlr7qcordnybcV_;&&qNS0NSk z!sVqla7i3TUoR~Os)x`>J4B?L54WP3Vjy48DuGIS260O|(I*yZaLqg|N?vKZgGf}tV zj;d1UJ3A?sZ+onR1GZ-!hxoxo){NDe+vfsedf&>M)po*_v!U$iZlN69Zypt0qN$k^|c7p{8Yzw2My;-XP?ZL+))U#^0!_t8WPv*Fmn zW?GyeD6u@ikTLIbMf=s2CQvtRs7B_jY3@q&KYHB*-4@=N8MB>uz2a>>k+t^8hFuL7 z#ZhP!%yv@ah{`MA#PU<9XCN;=7U`b}hB4*r=L16SZAH$=SJ9?g;>yr|wPvDsptvJm zz@0j9;oI{PZj(NCn9|FC%fk~~`!k-q*B-Fp{_EFSo}_Qg*^f7{A1ktMnuu8Yz<2*J zOXhY5;7U|Gvy+#@a_8~)5=6ZeDFPw>0t~z7I$)@v6B7xp7qcp9lJ?p zRuq=v`aNbn(odW9&bp|%QvTFw0yTU3?8|9rBw%wn65=ft)Xr#c?d7WzY{#zg&? zZ*AmWwbtfI{g<3>5TAyQUF-0Vh&cR0&}#+`PK@8QOmqzA_&>PqsGgSeCn*8Dl9E`P zA*0ipSA1s`BcM=lXLh&X>K_u;WT?MfNQL9srL90_DiUu0wk-g};%9pct8NcQ+B6PkqD`J#?B@pT?S*Jdq9pMP93F&f7WNPK z+Vyb$*xxZL>SJ6{hc26Ya)$L7%asYC6m1+dxyusdcFCFWfHq1NYl9y{-R6@f2F%=? ze8ZX)*qn5%7Tkm}GQ+$HHq4-tD6KK19xOaOk>r{c%+6=;VEUz!KvqiBk?`o9rNi$2ZMs-S{b_wmsKKJKtcdxI8VkEEnslB&<3a6w= z=Q>dtUxpu=Ab=$BT46<#KOk819gHyv;A63Q#kxU!m>!lJd{1TMo9)Vc*7bh#cl>^~ z=kWGr_k&UecP=rLP7B3SK}vO@NFoGT>Jb2T_2&zM_W};)+9Tx|Xdh#ZN-zg(Q3)Fq ze?}EWYeX`plD>F9t_{wW^ZWa|L7{e}&8kR~P5OKTNU`k&+WIlLRo+oJ<2Ce{Fo#(@ zv@mHN@5dSz{`$l8oFpKh%^NxSY(=WVMz_c=-w>py0Sd~o^zAH zJ%H#1&~8r8Sgrxen``9qL$3V(n4jfL_7s|A#_)*+BGxd8) zHiot3L+n0@`moB2Nt(hWejBz%N2~q1ite^lIfQFPA;rEscKuiYbpA+LcIap~Ano@$ zh5yJkZ&oJ*;Y?5y&4+jBQ?Mj3mC7e;@p7;z(_r${?D~{W)!RK$GfH5tpL{K{PSoQh z=yGM(;TL$cg2Kr`u)f-N18ChAi+P+si%M+)B89QcF`+O7N(~jkJ2cG)O_+%MjI!|u zWz&Oen5>Eq!xWM@b?j*&bC^e_PXoZImFf4C4wim=A=Z9qwb6o~v^V5H=M<52d{L>e z_OBiuSVgUH{CWh)HGsEOgNJUzD-dj=#i|~$+fCyYXHIdc80&5F724JX6I!LnhQ|9bC>dt zIPFNtDKBNOeyUj1w7?cjc}gwj%u&MrXL+-1&dnlZ>IoFKG+riPw=rm(-x6}5rf@^_ zHtY|Q8Y$#>F^5d*kWsV11bZ9x0E$_96blbjXEQpm3_Um{;xmWlky@e=u4)X61b*pL z9K(cy{HQ*EhXhn~pUEj>c~Evuno?n@M=Opy?^6$vdsKNjOXDzWvGB@PoiE7*DFI)AE9CakL> z-9ng|;xp?YktBzAy5dxGsW6%(KqJoa{VlzJ2_T)$Lc7p+QU3E4VaC9`ARYs^q5w*GBX*T!btr) z2|-RG@pV6TYRfF6Z;1kVVewwm{26QfyMALg82&^}uZ#*tKC%+Dy~j2~xBP9(jBEJa z#Qk=k4r-2Od>{U!zC>=~(&n;Y5b`|oc%dre=bGFKqD7%|;q~V40%rGx4hqwrBK=$? z%q4ln;E|DUK((0?j@pD@cv>b5On_%IMRH-AHYyDM#_2wUuuFr}-7+*{1_I~xkbtIj zjLMt*e(tKX0xK2<#dJY|-&DV*nBH{qAe*&`ez$^mV^H_1acmy^{2n1qK+#d;=NH9O zFVm_}XRBP=BanEN(ckNFdCP9sy}pzX4BtLmxZeFv3S8qnsMhh*fMQ>xRddYeVhZ++ zyjnW2yPMuNFwQQ*OYBM%fNudWg3g*!3m8Y8nIsLEg@6#e6RhzL>9zs0SI{@qlXJd3E;!TOxzU8(u(Gp)bg{P63X0xwbxf_A7pv1t$)$*XCr zDk#%A82DG!_UAiw@Ty{;^NkuZZKQ9$Y?9-MM(m!=OVLV2i$n4-L*il zeQ%bm}mKifh()j zv_0}N;u>gtMEtk-mM64Wuv z*ryDz%5#gpUK>w3q~nHnu%dPP5;}BDUcit`b24K%tih9-OSkI~cAG9H{_ONffiCwm7wP&7?#n&bH3wI9lqafE|wVw;FloQp42p8ye8 zFJ}0X%va?XWG53Vm3ScODF0mN}1OTONXm5 z1zt|v^Q(GmEb@FtO&5^cw^shtSHYqD-u4@Q21)vY`&adD{4c*$yqS8vNBCO{RQhN2 z?%&IQ3~I)9j;>0Mp5`vDs^%^p)~4nRHm(e=Ku@QCGEjm4vod&@=^L%5PB6@vBnd|^ zg7G3MuK*=#nUc<1yg9^DJa_5QqJQDgk}VQYvXkvQeX()n2mh3{e%h%0ED9~fgj|R` z`y5^)NX|SoP<9&t1M@7WhRXb#I9?E)goacC?bes@X)1daxO&-4{l3dTYH4}2x5l0#?@Wg1)%l$; zzCyrV@7EfdZgxzc>B3RvSs0c6bVI^)q&yg*wn4J$(56xK7(JueYIm~7NF zumg)l(Y@D7V5eT`tn+fo5wnL+)m>8QR}lIi3-fgz)A1u)j)+`MOpc!|)!ny~OEik! zW2!lSSMVKv`#YV-OY`zgcE%EzUhF3O@Sf^9_I~}N@G%N`RWp~HdOqC^zcKx9%l~KU zXi$&euN61l9ToCg-1YDU{aLT|DWkihFfq~8njJJEL} zK4{z>4JYUeC{`mzEii+w9;Q#&91tQfA`tyrtmvSaz0a4>TT(B4K6pY z*hElqppc+DLEJYCH-FSaZvj?63U_#%VE%3(fK}fc*IU}7%miz|FZpmv+s|nfu0VJ= z5J~z=)&m)LC0~>bsOpF5SazMuj1#wEG-dA*bxCd4PDr2J{DiR-qQsX+;1Zs*gLWpv zJXsdRoK`L_c7%V}aQ5a9rE6M=w=$S8s>?=QGEKs}C95C}gL3GMF7-lv*b$8xg<_uX znU&(K3jVXGEm2agJz!iXqE`Bwk_=w?Shrbe!We4&T}68_a6TWCBH&OJl>J}_Pf?BT(O+X)}rU@~q{1kCNEWTb`*&$Q@@1NJLc3Vjp3!BK_ zAn=ly2%B$-f#rn>Y(rWXg*^3U8ALi_&d57H+Gj)cEz)SO$Z)9Xhb{bPLMrZNVriQq ztJn@`4H^?x`etEr#(d@J2-1iXLkr2cXh*sR-$(DQn1*$6dB*axZh zQ~0;*I=yd2Wohy06K7p=(Ho|!sOxsisB`H_Q}o6ffs_tC%Hrd?1?@Pho8reCLXp@^ zvuqKuD(zB1NtvehwG4 zMLsu!mdCWN1`jelkz)kn=_^2ypOxsU>|nR`@;EGBj@Y}Ratwrr_1>l6rsf}q5Zn?s zT4hS$0~3;>&FmtZDa6$oc&YUbA&Z>P+?xc@0v3@A@lDPf9vY(-`n8}fd#JHMtVe76 zM(p7F*&z|W3$M%GNgw>+B(qU{;@T8#Ys_oq$6)IxeNEDKWn))g@6%#1+glwHZS?N3 z{WyeWza(jhv+jXB^5qaD%$#JZx9)-d9ki--4O@Ar3Go5}J&ma2`lkh>+USWaWQdYUrrIi?D0B zvzIK!r~x-vF+198NzN|AhEG2s`^C8s^KtPfBbo_bF_t3R&^$m}b4m`G7vY)p=#mU1 zfJKK~mc)#zD^KFuRc`{R`FOpgAPN@;_#X-uls#oSwK7uVZ4ZuCZ$IWaU6xJL=nDFG zijd?m;-Wh&dP-1T1$9snVmQ&`=rR>AtK<6=2!!lHXE%O2?ZT?^cYI_7(w%8X10!7a ztw_(7B|l9Hd%Pui}&m{kJKcpCMU-@O54g^E}N?|TX+g5ZENbXLPWkL*JZL; z6IhLBc#w&$^5w9SGvG`Pi*DJgPGl;7LCl8L%ksuklo+P@{%YAk^!DzUjRwn zJDY)xPONk;uoHGSuHwOFrH4`i0>@9HD33&4uGEnm7P0%mijPTk9z>~zk{WSYW^YH1 zu#9}t{avi)5l#fo%{+B1Lgsh)QyaS(biaQqKMm(qZIc$38rB^b=alGdT~K9Fd2n^> z$W}?jHTH#H+DVb7bJ?|(!<_la|AuVOO1cbc?jnnA$^~BQ=wLiS4A#4219aXRa%~9Z z;_s+wU)@k+KiqH#z6cru2@2k)>~&9+V`q+N>G$B&w#x%QHF8D{)|*&j=C>KZ7ge41 z(X&vKT+qp5poK*sklC5~&YbcfBwO-UmOiaAD#OTv$H}*xa{gsq+1uTvQT3Jh=WV%m z>x<$U+)L1l(~I!@-7)a32k7ra@{F+bs2GHF&fcwcZV18vX4{Kf>yw?5``=I9|GZt6 znFk5pC9xR0Mdp8)P)oH$JrL`;BPB6w^5P;x^7Mij*V?qRv>eIkHhpgW5@&njp|nR0 zc21vWZJ@qI@f*xclP{REZBFXBL?Lz8U0k+K;U6XgOYoQttW6(Ap*&uQomPY5@*keB z7tDlsUdgMXf72lX8s4^XAQZZFD>zaTQ1{?bwQql#MVT38mM3voH${qj3?pRKl3|Y4 zFV+@e;=Q0$iK%4?<7;Mq=_u}r@HS}bIpjnj)Uuq86aoVIT)>2^%UZUnVNPHwf9vO3 zj7O3I#oU%Fy%=uG2Tc}`V!kPW1jXoO|AEQZACqUnxF1uvjhW#zn@}%Bn*p7c1S(nW zdzXPXk=DpsArHA$-J}0(l~>vLc^XL;k^>o1=&~(U`8CX(gct<`v^HXGf1y=A2_gxA z)72tffRpVgYCQ=;@;T9_LHPD^tw2>{!jvv5k_GrzX z?H<`3*BxOeL8l&$TAWHKVCM?cC%_+s5X=yeJL?P<2MUJ|!rO~s)D?^4bxPX*%;N>X z2lMSEWP)XYVyx+P?KQ4NZSKQ>@Xsfu%j!Mx4|E zoGAs|F z#DupcKM#5!w13Vo2$og>fMoHWFZ;m8{Epkb+V)muhj+BPKS(vFCg6)B5-#l0oyVX< z$twdT#e<04vmc1gX^kf|@^R>m)Uhpd-wF*wW(4Ure_Hp~&6(t~$~umPN3UQx)+&X{ zP4twLd-KxcSp6IYP_Yn5xb-CpgkZ=UwDs>`W65yVOgY1 zxhp9*F+vEAAYH+Zk#1BUIEwew)ChmdJe9}71=gbQ=syI`Bg#JCtuf{A)l%|t=PLxO zV?(*SeGh^~Io+AQVnSBX0^&VR;c-2j+wK?P;}({QO0sx1oP~F7&pXgQoicKyj2|qB zj7}JA<-D5OXOZ57=(%HzzOw;&Q^t)2R$VI4um}vUuClY}ytnu+)CD*`f3I5(l~Cp7 zMcQjqBQ$Ec9PrzV|B*HeV(doBQY#*Iq$%a2)i3JrDPKHSm6l`q-j15AHpMAKfzw!z z8e=wu@h!=3>0Dh=Qds(X=o6rH`Z&!GV0K} zWcRMZ;)-hpSG@RD;+cnaMO9dlh3`3MKph6%mN2D5;MSf+QK#Y;R=meYGKC< zqYi!Xqyh~??$W2;p35}sq);3OIyt%M>T$15>?YRhw}$jHe0|hfpAc#nP913mv|D}O zYhb9_ObX4n^Kg{_z6TUw3?*eridCyvMXNEy=59$sgosXGuz76n&v0_|J}G#qp|$(1 zjk&;q+^tOg$5kOj=S#WmkGlh`qtulku4{Z;B7ozJgo4h<1a=BE9}b~;{7_lU?q<=u ziA5ulmz-azU=kdJcv?<-D*iY{ughzz{*%EFc9Sp{oLDZ2dLpd1g{nqJD=@ zvf4;gO_{)5PWA&?7_`Mt5C_VsOT)D*H&qa$d(ooSDmGNunF&yJ$P>HmUPj9^OQ~!Q zm^#g9qmzO5S_tJGPJZRnXvS^+Jwku=ZUCxrox zC8H_iJ7g}%++u{Wg4$b94PPd%rB^Pf2ZV-HO@+yMGJ;0SDP+U$6@@3-e`1ct>LJ#A zu)$-^7V3wWGSA-`vdQ6&f?(T7Ip~Q|Y}g5O+4u&9=hFpMvWwD6UICXV4HQ!; zrpRZDw)=fz7`rai`7K$9$qHT*RjxdXTBjN}s6PwF++pOSvRWF^qRa3qS{-8t%sf^% z8E@|rEWJrLF&UMnQK zCWa}P7a`OcDH9DOu02udoQdOCzBC_e3~G&0Car52uW*e z*I3K29zLc2iExc}w+naTcs+QzBN*F|8s9eeC4NV&W{ZTUyoifM*=Y;U5{%wIgF z@W?GF-r(8zZoe))qE(1T$Yh6Pxr@Ea5U=72TCVAiN@@OEc^VzHA`w*P=-r;I1_?}@ z{?ST}B9Cq_-FnkYQ%je~6A;JjHTVVFsDLBivdegj!Yk9)Cn_|yRe1V0%eddWHSf&& zFwoZa#dGK?dX^paIFJE3<9{`BCD2f{Z+wu^Si@K|S(7cYG{%%IcX-*=||z31F>&$;)U-}^q#d!Kus z=f1z+yQv%3pH!wMyi)w6?(&hyR+D{O~u%a$B@k`YzC@f(RAiqrVkly7U%)U&o!NoOI_qiH>!S&&~J zSz~ei&@C%7PTpUl-e}rCQ10XRO=P>23Itkq#c}X1>!5{Py|YZ?G%%Glt04iz(VlBl zIW-YA*PBKLeA@@~&Gtt2BW!IwaF+bq8Q$NsEkLDiA)j>*LtuRQ@7|Rv@0{3M5&Q*nQ zD|>OpSbC@^im3eRnhQHFn!^_jv#}T{^>jAr%X9uJ;Zg((_ueZ`)#eb80VCRuHBW=x zoY@CK$b@ZBuXyDY5!#WLnS8*xKos8av}A%(Yf+8T?mB%lnzV*Jz){T3r+{jgCDMbq zn6}%NPXJhg^7tXrra|WZa3X%n5|^QVY3SZ~?#Z%)RKG;M#uk-b>1Rim+n`p?e%P{W zK%ziPp*cJx?v!^!n|qI#JM4^e&EluoU4W}DcdN!83*PEdGm1rrwR|U)Wy6a~{WJR-wRkpKpL9lz2OQvK;rA79fO-X7l{#Vh!du zm)+-B74Pb!K2eOfv3)QWIBeq;M4MawAyA!kOugz0FOuyA#FGGvLXld+%_Tyxe2P`RiMXo6FL z>cz?fG3kq`>5J1P)%n#+7s@L1L7YI(1IZ3O|CkxLyf1)ZT~G!XZOlbKeL+_#F4IhO zQ`?El3q}^*@in#XH}_vYVyTs!bz9Kj13ae5CU4YqFHYa&Yv`L9*VZTa0sSCSC5PAT zh+F%f2X8*97-xhSki|iy8uwMc7d4^!Ae{P+N{8C>&(dab3zAFIZhKsmY?>BzmY}Lz%dv{NH)PY$h4XrdfAVga$Dm6w zyKE{3&$Wkl>ZDi3PHhpwcfxwJiXKad!WwgYg>HI4Dd^sA&uZz!Xc;G)rv4}l3QNrW2WlkExhLH!{b91-dh z1&~=2upR{4lrx4+)xh74a_Wk1q|9E>x|MTL3^h@Y6xLa)Dr&Z&?u2N+wDC=zT=tQE zmA}r^G+(~&P-(+MeEpzpgW?gp!PueoNF6ihK zrV(y2Rpdd^1yXhn4BfLb=p@U#h8U*!VR!QtOUE#?8|WL_d=|VX7J?XCE( z7uWBafr0NT55FH&N;YhW&gCSqYds8`5;|pe5E&6psw^#`$l#(I9X~Gys1dha)63I~_e0db1ZB1LrvWh>Wo19(1_bc%vuq zNAEzwq?W+gVs&jt_!Qh%ELNg_83m^P=)RPM6L;dg{%Ht7-aM&24H!GbMuzDsm4O^- z<|jVukY?{~J-b)Mei=HR^7gxB0ImM`!rh5mZ#9=ivc;P7lPc<<=!HI& zCqr*1{bs5B87cCHU%2EYAITMG1oqT)W6ZN1Jq38H8b(Q=WA^2V$0I!57v6D+`g@C9OM_Ub52{;bXi{)J-2480pA$$eN`*W!|Iq}&v)C2PCH z(ny_`;)k}8CMl{JZbCW-2~`0Ry1`rl!6>t?*uln>vuV$Z@zE-Z>DY|YM}fx_ zW1f0u)x=w^801SEeZA&vAQt2hi4^S`LUP9#=GtjKstYjL=l^LmDJPFaAwiy5ws!6x zQL`13D^gb&9PxRhR#o!(CEcs^%aMchMPP9zl%p8M6NngnbkBKJrIg zQh%cGiV%_i<7%2PZ$&}tDOW*Co2W@y|5^B^df_H&NxfqiddLoS&;@Ai2JY)qe7Urp z)b8zEy=t6xrU&9_mF5~R*^KUoVhE&0T5XQjVLoWA&Kn+ZDlywGUQOdsdDOV{#%GfL zymML;;e@lh&8)QKeLJ->LXKqcwb;MbzNNKuC)GmbH5OYIt}n%>nS8u_eEayG`=gY_ z`&N;odgY2aM`_!G#6-BP%lmx1+15BhZ0ul;|7;9J0712O{Jh9Q^-L#x1J*KcK+&aL ze1vpeS>zuj(DW|0{X}CSjA24^7NesvKKCqCG7KOF;rzc@J6$WoXj=fwhNZHAzfjI> z4C`Oo8DWNahG|^$vV=P@Y2)9eA=nOmOPf>u`UJJm$C8Mi#KPt=&%}>8CKZ06W45Jvo_QuTGD(ix<`o dVfx*lEsQPDfuIV9@CyNi0Eo3>Uf7X-=YLS=;2r<~ delta 9575 zcmaKS1ymhbv-QQ@U4py2yAvFOdk6$~w}#-7U>A1?1cDRXCAhm=f&_O90e+bIzb`ZM z=Do94_v+PsYxg-_T~)jG?frg;Syv)e1t@525C{Yh!i%Ywn}3k!yhjFsw9r8y>;w@b zJYXf+if{U!uaTB5x^=!#N#=u@sqa|y>q@<1*~kYm z&6IQpG4*W8`KP_I1jf9aj57BP=De;SAC9-`4chNqjCsXO*xr%iP+DWuTXxElEkK8I zf&#^1AYt>11M?jbodoEDgCL<*ADtoJVgZ(m5RlT0Vgb13lH?}v(5O7AgS1XiHGo%O zG1|K>dC_<=B~wS5@M88fq;)>*SBi>F4P@FL9zJcHd@XpW4#)eE=TiBg&D_K^`AwCQ zv~rPb2ojG0G@leDd$5d#kW{oBvCIRY< zPca|+#%$yHS~&G!V}II2$##}@iHK_4k~tZPd=tF~dP^P%LVCPFLhyPuOaXE6GDGt6 zLm(}9J^G74@Xt@s663=iRoOnDcsc}|n!Ovm7r$5h+TYNH*p_q(j?lu}+1a%@-Wzzk zy@wxADB4geG2P(yg~$`+<~BvRD@lGbK7~f zv}{iYy0O*Ejh`}p_)!}DO_I&ITk3W*DBUZW5F1K<^^?;s=NjD}xppCV#K3XGd#*vx z{N4ln!Ho%t?#2nJ4YDhx0r4<{yR%!x^G|0^XsoOVZtM{ygEUrt`MHF`UCp&c()1b_ zgLX3{H_GF5Y&7PED|>n#68plRJLkEQ1{-OqArR+62j`UQs2WhoceABC-*@+xjzTGH zP%lr0Y*;6RM-ntltPZAqu?OPr1X}KWwe$>9+@m_uWNxF<;_>+wTQ2zyf-{om?3y#y zd-f?Shj2ovYGNOo%i>y3C-#{&0Scb5rH9>>5Um=Dt_t1K4g05sO!d_J{Uf$s-RiHbth0B zSh^8`y}GC5B&`GyPs7ePdwH-q``zZX(yrf%IvC@jC%2W@HHH^+gpO_0DeD-w+HaZl zRa{v?3SZJ2jr(RiD-(du50`~Kfl4;wap-;^>vG`r+9Ip4X+JobmnT+>Nh;UC zYmP=$UP0;ma@uY21PMGgGUQ_!S8f;@CB0pCEULa=>Mw)x^CP4d5t&%apH-$7UJY*a zI5Cp_!qkae)X^92TcS*xzfyAB!9_H{TJAnt+9{9M@c(O-#JGgmvZfBay zgMF!VrAVw^?lK|?WVs>6ZQ)#rfn#JT@6Md{?Yjab7U`}JD5-FHg9NgUxGr~OLW=67whY@!HS9h#wtAaEUN}Iy$)tcCSC{FXx^N)lMvIFg(g{&0X z>KCZ1YOcQlXH%|2V}1gky>ukzOVn;HtHX_Qn&-;bf;M~R$cK*7N6Or1U6fb`^;U)K zP=wV<9X?~+&eaQhR}r1g3JHC~A{H?6@P-cz0eyR)I8F=_x@aGCFbcS% zZPz{J%Q%LpifeaTUWK|2YQ|rOb7NQMP!C%c26H?D;78v3Em4e?GHvhD5Bu|6i6$-# zf^{aHe39Kd;k5^7G9!V)g1oWk>Fm6#~5={i$8l=-5CX}!}D z2wZzL2DtY`k!$FDP~KYd9f)tRS)Fy~2zhB_@s<@VR8TmrNSA@a(;gk0O)wb<$vsAwc(74< z&3;fRWs>Jf`?XHgxjlsSo)~sFr#8_q6~IXOiM!SSq813${thLNg%H@Kv~>n=;uQeu z1#!W}#HS<3K!|m&`vn}SPLTR`n!uSO!|9;ap5zun6s?25IkLg@V9;I27CTLK>X;6< zk-HDWaKd<4lnAd&&o!FQV6^ZCoa9M1lQ9Vq!H+fc8m!d3+0Yj_B%r%0;xdf=1i&*o zwTsXS35gy6(a@0i$a38=MpV{;5ymP(rfe8mL&nvZAbJeB84>=d8=d}kNvC2Y`+O4} z3LB3uFWIWYkdLF}dVvFJOY7TXoptxmDhXHE#`ir_AOf~zwJf+XtiC<0Ga$R_JyyAt zYNp%V8nyFwA3LUN_IIu97Vv0Fhace(rd9i$<>wq#zN;k`6eyVq)qI&D`_}J!Sf|$~ zV1gnsA@)xnS^j?x&d$!!Pv zOPqzy0vj6^Nt$NknV6QK^@G6@$p)YK&bf!YXI3M_#*8-9UABey-Ydd!Y{qS`*Q=>a zYU@}!&R65L&9q1zKpACU+i|R;2MV+`;^eBXIb%5qYsb3ktao)JM&P+>ZS3K-HO!qL zYmIdZEEmR(#%_HSH?f!Y$%@}7l|U~f)Q9*j*^h|>`z=daj>?=R@G8dIIt6D$s2@jz z>T1W7SDCSiOdhmiKLZ&}Fn9W4gOT)2T+CQLSmm--WHG@Y0I-N%7i65gTE36E;%)uv zGP@zps6x?KiCAA`ccR!w3|0@3PCBylU?}o3h`-&eGk8?qpUEOefzdT-D4C!MOWM#` zg546%?uc)!imbS-pf_{#mR^_?h^ea7{qZHw%=glXXV>*Jth~}y4e@Ja)J#lPRxho2 zzYl)=;=?htirjeK1z-aVrLpP~_z42(G;+*costKL-)$4VCLp)&2oYoT)I^0nZLlB= zkh-+El#;ZD6f~&x>5S$p7!X{*^e?T1aTYs)Kbx&*D|7Yi;HszVCJ-PH+LM);rL%*b zIg6dCy_JQzi|hY1UsY>p_yZ*kSBKVB=3VQ z+)-~GruhWRyBz#DNhj<0{7eEpP00J}EKa=0=^lN!g_$5~)G^;3|J2iU_|+YWZmC&@ z4YqjtFq`6+9o&X0kLwW-l%1Qz$l3+4sS9EHxANF9L4cDmvy?MP_C$uNWwHe=hP@@c zJ!XRj3N|8xHC^~SZ>0$#gB2OhA{~1fii#<@;s_}~v7Of$9tDNvg{;Tx8Yj7z>(v@QOn7kvo{q%l)d$-AcrK5;MF6~7$E0N!3dzi|l<`2P$nC|S+z5nI7lK<>Qo zw}%5#xW(tt7ZH4}g3KT5wynLeAnoCquSYz4a5#S*DqWaTeS5jKtYXEfO&Z5|lz7B` zmO9(_Dn05W8a?MQ0fv~FH)IL%sTOXvi%Hwu+*{8r~!V{S7>JMT!h?U6U6&T2AMq0(!5yr10OG zI(_Ez;oFzH+YyS}sv5OP7?hrgpMsp8!9r*J)m%;EBDIRDvwLpd~*PKgDwV0x8x2 zNbwP(ow9UQE2xn5=@{<}#{I?wjzpQdCN~Rlhjp?*nO);z+MqWYlzp(3>%n3nY1C$7 zSh<$bxtnQ_=(Cf#E~E?{@ge2V^_=OL|GIz`Tw-_P_hIqdrrT+7R;k+VF{f`=wq4dd zxYWKv_Rtr;BtNqx!60H!{MDDE!WWojK!TN{s}!TGqh^&=ryWd6H0}C+q;r|BnKbBq z8C)m(T?zOHL&_IDmwH1mwF-<@(`&1tce3|hqip1M+(B}Sm>xe?Z^mGLb&;eh=r}@~ z60YkwL`ty4(nfIZ48q&)FuOjAOEUOEVes5{o9^JO+XlsdaW+C9t{LT+mXA`&00viF zt=Ky1Ar4PY`Xk!|=VK`hBA6vDKV(v^gjhSfN_}Ppzg7rOx$kNWFC?qkQ|7TpQnj`u z>|)YCs)W1Y_1wkLI%TlFj;8U08i1$~IsM8u1KqlU@59(Miv`#EGt-YK8%;O^x)L2b z!~CKQMS%pj)H4UJ!v5HdR5?qk3i!3yyX(N(15;r^^NHT#lly6kgwwZH%vx(ztaUuS zIqJq}3dSOPeG@~|-E{J~ri}e7oBm21NJd15n`v@#jmTv4ccB@=&Q#qk6ylzQ&5bRaG|BX{>KY}8x|sG6rhtV{Q&1JM zB`gzq;J9J(sMBPUygK8-kzo8JXB&+)C0@wNG8Hx5Mc0MB(mQJwa88_rJ6n*iQBxUu zYwuCsc~K8n@aH829OK{-wavuc?+W^y>%T~qoR-}mqQi9BzIHT@4>3fbnlY%UnaqoW z=o&h!&tJY~ouBh=X&iSj*Ga zN<^d#=!?3ra61X31K|oZG1B*D*y-o=2pV4iWJPo z(kL*GV%0rURiB9r`-vTnW%)_O%xd-~Brmgb+~&NV&7hPiS}}{BZ6!-ejy;ZqwQz|* zT5YfR+Dv&KyZevB_z5^GGzuNuq+Hs({uB)zaY?&?_+q%4uVAycR|8ou^QN6;8N98t zLu^pfTPLT~XgeBQ*MT~u@8yT(CLuA)S?8ZwB4eE>pxS+Jq_hmz<*Xwh?eM?0H$b<4 z*LhcL+cZFakC`Aq<+FVI6W6UWvRS#xPVG|%OM-oR_e9;{DYtF=%IqWR=*RZA0$1Tj4;!y9#@3EcOk_OJ1Z5q$h)IBN+{gTx#gNI=>R||^ zvUp54RX~e5&V#0n-cr@c`p&{u9HYXR1z37=?OXqzx2A!AMr{B z;AkXNv_yjjg&<&e((pc_x>0&Pt4ekveTzVtc>Z$ECk+ecJ++pi#|~NakyPRRdPGXJ ziM|(~NN7i=`))?dBGA8Z$juw%x*Ym#Q9Rx3&9{E=U}T?T#JRTqcC7)n$YGoBk<85O z>|jMR8sB&7-jJ&t!MD`Agwj3-yHKDp6^ z>+p+4_S;6UD?2LNJ}TU~%p)JYk4mqF=!!vQZNg8;E??f5HjjHje4zR^fwppwSCL3= z_-G(w$~FyHN{{kZ#_1DijMqqI3B5%NeDeH+>6}e56M%z*+H=J=c_0Dn;pwfb zFhAada5ZPqblpW*yPPFtD_*{H3iMO!m$Wp#LsHeWzA(jf`+BsG(Kkt~#K13IY)X@+Jd; zNS~M$fJcV$H)i$B{=U@XAt5R~6S4o2x2FT7Klq!Sql5hugLC;ig?r{>zi;%+{DAci z+88Jh2n-Jbkv!ch!1XBIXY&l z$T7nOyQ?DXN7J=@$8qhNu_R+^)J2LAX|qD3*2qEXgBHPt+s<(>ADOJ#t-`Y7q5r9f z!K`GG^g+gBxI-8|nNS3T0eI#_U{3qGspGKJXom#pSFXxcj0}~R^j@KT43Z|Czw6{p8 zi8;gFk&Wg|ZhhWXr!f}dUFwOwk!Qg)#{>2zc|VdCn-qH9r?v;XHc4R~BJs!qtqS74zL%#>_s;1OXXsD#T&!dN0iP>yU zDH%HW3$hF}3^BPh@sVaw1N7oYGJ?J$+OMi$l4LaC2oK^zixKY?Z(NI*FpaB1^G}4l zl`Z`U#~8NfHwemh4+xsp#4XT(lxql!RY9s*sPiA|=0^+M_4D77uJH;U&zHt99L1h9{kjqq^gh_; zW0zzjH$N5bv|8ZMo+WomFe&$}7ai^XxQiCU;7le5Z{|(5TlHG!HEDV$AW!#MNK|{q z{GowFHIl83maq6NzN?C((t*X(M0b>d9L-;ui- zcU+69Yu8fV4ECPvX%3HK?d$2|S%;NcX-|%#t?_wX6ZvDU(a?U+4KsY7FY@uU3;*7i zYMkxYJBG#rjQzutR%C40aVaO;6F6L(ro||w9-?|y|GZG32-~>4>rVt3$k##5^n{S?eEo&q`BnB-pqIZh>Ni4DEy_yIdcn7?uADNWuCVyNpl5(O)FVoHgTc6l1lbq_SZzAjTvb8{|X#CA(M z4C#(yJMa*SJ4aa(;}LqJyDH5XWRdQ0!r1+u!dHhSpouOiMJEeQ>*@outOM!~&s|k0 zQ=BY9s&WE=FyBmmHCfI$y+4JY#lv-$ASC_8Y?rmtoPxot^zD32l@UbVid)(mQ77HB zaVb*I8?Lu!&F#CS6|as4qY%e?>$o?MF$_LBE0OY|?JHMBOL`j@ctun6KWtX=RY>=7 z=!=_QG-CJGNIZB_&%CgI1CcvTQKytZqj1FIK1r1bgpZx`FT@U36o6!*j(33z<>n#T z9k;^ait36ozK48ThcpQeKZUzTOFIq`dyB%g-yZdTO3kpZ7_EbzJc{__(dVRYW$)r@ zYHQ2l{Kn#MrGWP-L%XRDDSv0fUfE$(oG0gZPk>LOLm>kYOJmU`zIY_)_jbkfyUUw$ zJ$~5d%GG?^A{4XlQf_GIP#{KX!Zc@%F@{NKJg4@mm5DU=E!gmDd*=R2iJxsMKba=B z-w#LTk#SL5MJhG-UTN^x^$ce9VJv2vFo~0R6+C>CsAMB6;G!k-I67~*DzrwZV{dU0 zfqBi?>J;S(n7wM`O*GK$3+{-gZ-d=`H?Sm@)KVr~z1cBOR}S%o!(N01LS%9sb{53^f?^^>exHr(-*A|-p_sD~7tfX}jIh_)3vyYUR{8Z} zJW~r&B*^nCZSFFBpI|;w<(2Z#XtEwcQRYHNWg2ikF%Ze{P1NUwAJnt|o{IL<_(N2Y z`s47=9o&EAn7>!#zl$o*rC%MJn~3|V8gCH%S&_}DNdKkuXHbd4qdxz^-lj>bgbE1) zy@gJQ(IWyT2deYfuwmC*RW=@yC~~gzG+98O=r|+0QO)ov(3Q}qKRJXQEg3Ev7PKlb zeA@Hnf1J2yLPa04A<1K+(fu?yN0b=3@u^7TJ!jILy!xl~oXMzeo79Zp;9f6qwX)`M z?dK!8ZN_BD!-v&kL|xWdZELCJsy7Gh&S4rwAuIO4mfo%5O!wzj?j)8~BJ4|#acVuT z&DPG`wS$K}RLYm?uT%g;V_d5A7@SXh6TdDXWJ#>_4`1WqOUct39Su1XMxE9uVHA`b4OGbl?E3;W$8i+Nf@)-sC?z-x+6EpgiA zftPgw{~G}h0uj^N7xb(P(_{nA;4QadBiO`HD4MP0-i5qIJ^PbRBP@A;=tFg!eZsqp z$<_f_Kd4P+jGI&Y(oz2)xhBem0^#q4+F&=%#&BGAspD964GUAzv<60&2}<*a#`pY> z^ckUbB;0mQn;|Ij$r*!)Co|Ow!)S-1a`j%ovhd#hYcX(g^rk9c^MmP(n%_hSiutlWD&48_i#JbTjMpAtRjIK zrsSxB1Ii(vs5l(Xsp52ej^GE|#p?T~uj|7J2!xV~_%wjF(Fn+v+W-+S^(?M{)Ffug z89hhG#H}=qfTD@KpKfIU=yy(eep}AuXi^d)4PU-Gn=(+xyo7pxeu>qM7%9n!%n|wS z9LmnF@trU+3VWiU7q!KvH*UzLbP_E8?bgS%RHBbsdqh;Xuc()togki2At14#{_&Yd z0-X^D0QxOQ{!_$!js%D&p%4UOMFIV9lIAlq$zMSJscHTO8F>Qv9||Y%4~6tkxW5GD zv;F@c&r0W?&^apTSqK&WTaNydc>l)u7moiM z1Eh_g5NPxdDfOA~|8+6{M*Dv+rjX)!G5=Q+@Ov)*(gyqi1plY6)+rL$&B*?~oIkVH z-y!}%K>o6v7Mg@C<9}%@{_5&~D|pWkf6n0}9q9ioe}8>2|J1XeGw<_I27#X4Y|8MT zx8~mx@$V)5ODg^Y=FbTwGbBKm{Nthg$3uUOi{IJ#>GP*Q?El;aKoD>yJeX0C{U<1` hpU!(dZCHQ%kuG Date: Mon, 25 Jul 2016 17:26:44 +0800 Subject: [PATCH 03/63] Bug 1289052 - Move nsRuleNode methods as static functions to .cpp file. r=heycam MozReview-Commit-ID: 7tG2NlvQzGz --HG-- extra : rebase_source : dc44ac14f3cdbb73c1425b078a63394da2f96e81 --- layout/style/nsRuleNode.cpp | 62 ++++++++++++++++++------------------- layout/style/nsRuleNode.h | 21 ------------- 2 files changed, 31 insertions(+), 52 deletions(-) diff --git a/layout/style/nsRuleNode.cpp b/layout/style/nsRuleNode.cpp index c0511881ce1f..9e638098a9f6 100644 --- a/layout/style/nsRuleNode.cpp +++ b/layout/style/nsRuleNode.cpp @@ -4184,13 +4184,12 @@ inline uint32_t ListLength(const T* aList) return len; } - - -already_AddRefed -nsRuleNode::GetShadowData(const nsCSSValueList* aList, - nsStyleContext* aContext, - bool aIsBoxShadow, - RuleNodeCacheConditions& aConditions) +static already_AddRefed +GetShadowData(const nsCSSValueList* aList, + nsStyleContext* aContext, + bool aIsBoxShadow, + nsPresContext* aPresContext, + RuleNodeCacheConditions& aConditions) { uint32_t arrayLength = ListLength(aList); @@ -4213,13 +4212,13 @@ nsRuleNode::GetShadowData(const nsCSSValueList* aList, // OK to pass bad aParentCoord since we're not passing SETCOORD_INHERIT unitOK = SetCoord(arr->Item(0), tempCoord, nsStyleCoord(), SETCOORD_LENGTH | SETCOORD_CALC_LENGTH_ONLY, - aContext, mPresContext, aConditions); + aContext, aPresContext, aConditions); NS_ASSERTION(unitOK, "unexpected unit"); item->mXOffset = tempCoord.GetCoordValue(); unitOK = SetCoord(arr->Item(1), tempCoord, nsStyleCoord(), SETCOORD_LENGTH | SETCOORD_CALC_LENGTH_ONLY, - aContext, mPresContext, aConditions); + aContext, aPresContext, aConditions); NS_ASSERTION(unitOK, "unexpected unit"); item->mYOffset = tempCoord.GetCoordValue(); @@ -4228,7 +4227,7 @@ nsRuleNode::GetShadowData(const nsCSSValueList* aList, unitOK = SetCoord(arr->Item(2), tempCoord, nsStyleCoord(), SETCOORD_LENGTH | SETCOORD_CALC_LENGTH_ONLY | SETCOORD_CALC_CLAMP_NONNEGATIVE, - aContext, mPresContext, aConditions); + aContext, aPresContext, aConditions); NS_ASSERTION(unitOK, "unexpected unit"); item->mRadius = tempCoord.GetCoordValue(); } else { @@ -4239,7 +4238,7 @@ nsRuleNode::GetShadowData(const nsCSSValueList* aList, if (aIsBoxShadow && arr->Item(3).GetUnit() != eCSSUnit_Null) { unitOK = SetCoord(arr->Item(3), tempCoord, nsStyleCoord(), SETCOORD_LENGTH | SETCOORD_CALC_LENGTH_ONLY, - aContext, mPresContext, aConditions); + aContext, aPresContext, aConditions); NS_ASSERTION(unitOK, "unexpected unit"); item->mSpread = tempCoord.GetCoordValue(); } else { @@ -4249,7 +4248,7 @@ nsRuleNode::GetShadowData(const nsCSSValueList* aList, if (arr->Item(4).GetUnit() != eCSSUnit_Null) { item->mHasColor = true; // 2nd argument can be bogus since inherit is not a valid color - unitOK = SetColor(arr->Item(4), 0, mPresContext, aContext, item->mColor, + unitOK = SetColor(arr->Item(4), 0, aPresContext, aContext, item->mColor, aConditions); NS_ASSERTION(unitOK, "unexpected unit"); } @@ -4452,7 +4451,7 @@ nsRuleNode::ComputeTextData(void* aStartStruct, textShadowValue->GetUnit() == eCSSUnit_ListDep) { // List of arrays text->mTextShadow = GetShadowData(textShadowValue->GetListValue(), - aContext, false, conditions); + aContext, false, mPresContext, conditions); } } @@ -9549,11 +9548,11 @@ nsRuleNode::ComputeSVGData(void* aStartStruct, COMPUTE_END_INHERITED(SVG, svg) } -already_AddRefed -nsRuleNode::GetStyleBasicShapeFromCSSValue(const nsCSSValue& aValue, - nsStyleContext* aStyleContext, - nsPresContext* aPresContext, - RuleNodeCacheConditions& aConditions) +static already_AddRefed +GetStyleBasicShapeFromCSSValue(const nsCSSValue& aValue, + nsStyleContext* aStyleContext, + nsPresContext* aPresContext, + RuleNodeCacheConditions& aConditions) { RefPtr basicShape; @@ -9698,12 +9697,12 @@ nsRuleNode::GetStyleBasicShapeFromCSSValue(const nsCSSValue& aValue, return basicShape.forget(); } -void -nsRuleNode::SetStyleClipPathToCSSValue(nsStyleClipPath* aStyleClipPath, - const nsCSSValue* aValue, - nsStyleContext* aStyleContext, - nsPresContext* aPresContext, - RuleNodeCacheConditions& aConditions) +static void +SetStyleClipPathToCSSValue(nsStyleClipPath* aStyleClipPath, + const nsCSSValue* aValue, + nsStyleContext* aStyleContext, + nsPresContext* aPresContext, + RuleNodeCacheConditions& aConditions) { MOZ_ASSERT(aValue->GetUnit() == eCSSUnit_Array, "expected a basic shape or reference box"); @@ -9740,12 +9739,12 @@ nsRuleNode::SetStyleClipPathToCSSValue(nsStyleClipPath* aStyleClipPath, } // Returns true if the nsStyleFilter was successfully set using the nsCSSValue. -bool -nsRuleNode::SetStyleFilterToCSSValue(nsStyleFilter* aStyleFilter, - const nsCSSValue& aValue, - nsStyleContext* aStyleContext, - nsPresContext* aPresContext, - RuleNodeCacheConditions& aConditions) +static bool +SetStyleFilterToCSSValue(nsStyleFilter* aStyleFilter, + const nsCSSValue& aValue, + nsStyleContext* aStyleContext, + nsPresContext* aPresContext, + RuleNodeCacheConditions& aConditions) { nsCSSUnit unit = aValue.GetUnit(); if (unit == eCSSUnit_URL) { @@ -9773,6 +9772,7 @@ nsRuleNode::SetStyleFilterToCSSValue(nsStyleFilter* aStyleFilter, filterFunction->Item(1).GetListValue(), aStyleContext, false, + aPresContext, aConditions); aStyleFilter->SetDropShadow(shadowArray); return true; @@ -10161,7 +10161,7 @@ nsRuleNode::ComputeEffectsData(void* aStartStruct, case eCSSUnit_List: case eCSSUnit_ListDep: effects->mBoxShadow = GetShadowData(boxShadowValue->GetListValue(), - aContext, true, conditions); + aContext, true, mPresContext, conditions); break; default: diff --git a/layout/style/nsRuleNode.h b/layout/style/nsRuleNode.h index c58f60392f7a..8c14cb08d3b9 100644 --- a/layout/style/nsRuleNode.h +++ b/layout/style/nsRuleNode.h @@ -783,27 +783,6 @@ protected: inline RuleDetail CheckSpecifiedProperties(const nsStyleStructID aSID, const nsRuleData* aRuleData); - already_AddRefed - GetShadowData(const nsCSSValueList* aList, - nsStyleContext* aContext, - bool aIsBoxShadow, - mozilla::RuleNodeCacheConditions& aConditions); - already_AddRefed - GetStyleBasicShapeFromCSSValue(const nsCSSValue& aValue, - nsStyleContext* aStyleContext, - nsPresContext* aPresContext, - mozilla::RuleNodeCacheConditions& aConditions); - bool SetStyleFilterToCSSValue(nsStyleFilter* aStyleFilter, - const nsCSSValue& aValue, - nsStyleContext* aStyleContext, - nsPresContext* aPresContext, - mozilla::RuleNodeCacheConditions& aConditions); - void SetStyleClipPathToCSSValue(nsStyleClipPath* aStyleClipPath, - const nsCSSValue* aValue, - nsStyleContext* aStyleContext, - nsPresContext* aPresContext, - mozilla::RuleNodeCacheConditions& aConditions); - private: nsRuleNode(nsPresContext* aPresContext, nsRuleNode* aParent, nsIStyleRule* aRule, mozilla::SheetType aLevel, bool aIsImportant); From 8cd914069c4fd8bd556f5655ba7de866af55e618 Mon Sep 17 00:00:00 2001 From: Tom Tromey Date: Thu, 21 Jul 2016 08:39:29 -0600 Subject: [PATCH 04/63] Bug 1264649 - add reject-some-requires eslint rule; r=pbro MozReview-Commit-ID: FVxy2c5Wsgg --HG-- extra : rebase_source : 901f8aee971f9ab48cef7eceffb4cfc8ad567822 --- .../eslint-plugin-mozilla/docs/index.rst | 4 ++ .../docs/reject-some-requires.rst | 12 +++++ .../eslint/eslint-plugin-mozilla/lib/index.js | 2 + .../lib/rules/reject-some-requires.js | 48 +++++++++++++++++++ .../eslint/eslint-plugin-mozilla/package.json | 2 +- tools/lint/eslint/manifest.tt | 4 +- tools/lint/eslint/npm-shrinkwrap.json | 14 +++--- tools/lint/mach_commands.py | 2 +- 8 files changed, 77 insertions(+), 11 deletions(-) create mode 100644 tools/lint/eslint/eslint-plugin-mozilla/docs/reject-some-requires.rst create mode 100644 tools/lint/eslint/eslint-plugin-mozilla/lib/rules/reject-some-requires.js diff --git a/tools/lint/eslint/eslint-plugin-mozilla/docs/index.rst b/tools/lint/eslint/eslint-plugin-mozilla/docs/index.rst index 3623c1a7fbdc..55dfee3c03f3 100644 --- a/tools/lint/eslint/eslint-plugin-mozilla/docs/index.rst +++ b/tools/lint/eslint/eslint-plugin-mozilla/docs/index.rst @@ -32,6 +32,9 @@ second argument (meaning they add the exported properties into global scope). "Cu.importGlobalProperties". Use of this function is undesirable in some parts of the tree. +``reject-some-requires`` rejects some calls to ``require``, according +to a regexp passed in as an option. + ``this-top-level-scope`` treats top-level assignments like ``this.mumble = value`` as declaring a global. @@ -79,5 +82,6 @@ Example configuration:: no-aArgs no-cpows-in-tests reject-importGlobalProperties + reject-some-requires this-top-level-scope var-only-at-top-level diff --git a/tools/lint/eslint/eslint-plugin-mozilla/docs/reject-some-requires.rst b/tools/lint/eslint/eslint-plugin-mozilla/docs/reject-some-requires.rst new file mode 100644 index 000000000000..a0845d220d8a --- /dev/null +++ b/tools/lint/eslint/eslint-plugin-mozilla/docs/reject-some-requires.rst @@ -0,0 +1,12 @@ +.. _reject-some-requires: + +==================== +reject-some-requires +==================== + +Rule Details +------------ + +This takes an option, a regular expression. Invocations of +``require`` with a string literal argument are matched against this +regexp; and if it matches, the ``require`` use is flagged. diff --git a/tools/lint/eslint/eslint-plugin-mozilla/lib/index.js b/tools/lint/eslint/eslint-plugin-mozilla/lib/index.js index edf31e09dd23..e1f694c3602e 100644 --- a/tools/lint/eslint/eslint-plugin-mozilla/lib/index.js +++ b/tools/lint/eslint/eslint-plugin-mozilla/lib/index.js @@ -26,6 +26,7 @@ module.exports = { "no-cpows-in-tests": require("../lib/rules/no-cpows-in-tests"), "no-single-arg-cu-import": require("../lib/rules/no-single-arg-cu-import"), "reject-importGlobalProperties": require("../lib/rules/reject-importGlobalProperties"), + "reject-some-requires": require("../lib/rules/reject-some-requires"), "var-only-at-top-level": require("../lib/rules/var-only-at-top-level") }, rulesConfig: { @@ -38,6 +39,7 @@ module.exports = { "no-cpows-in-tests": 0, "no-single-arg-cu-import": 0, "reject-importGlobalProperties": 0, + "reject-some-requires": 0, "var-only-at-top-level": 0 } }; diff --git a/tools/lint/eslint/eslint-plugin-mozilla/lib/rules/reject-some-requires.js b/tools/lint/eslint/eslint-plugin-mozilla/lib/rules/reject-some-requires.js new file mode 100644 index 000000000000..746f98a1f160 --- /dev/null +++ b/tools/lint/eslint/eslint-plugin-mozilla/lib/rules/reject-some-requires.js @@ -0,0 +1,48 @@ +/** + * @fileoverview Reject some uses of require. + * + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. + */ + +"use strict"; + +// ----------------------------------------------------------------------------- +// Rule Definition +// ----------------------------------------------------------------------------- + +module.exports = function(context) { + + // --------------------------------------------------------------------------- + // Public + // -------------------------------------------------------------------------- + + if (typeof(context.options[0]) !== "string") { + throw new Error("reject-some-requires expects a regexp"); + } + const RX = new RegExp(context.options[0]); + + const checkPath = function(node, path) { + if (RX.test(path)) { + context.report(node, `require(${path}) is not allowed`); + } + }; + + return { + "CallExpression": function(node) { + if (node.callee.type == "Identifier" && + node.callee.name == "require" && + node.arguments.length == 1 && + node.arguments[0].type == "Literal") { + checkPath(node, node.arguments[0].value); + } else if (node.callee.type == "MemberExpression" && + node.callee.property.type == "Identifier" && + node.callee.property.name == "lazyRequireGetter" && + node.arguments.length >= 3 && + node.arguments[2].type == "Literal") { + checkPath(node, node.arguments[2].value); + } + } + }; +}; diff --git a/tools/lint/eslint/eslint-plugin-mozilla/package.json b/tools/lint/eslint/eslint-plugin-mozilla/package.json index aa1f00c5f621..123bf21c9e0a 100644 --- a/tools/lint/eslint/eslint-plugin-mozilla/package.json +++ b/tools/lint/eslint/eslint-plugin-mozilla/package.json @@ -1,6 +1,6 @@ { "name": "eslint-plugin-mozilla", - "version": "0.1.1", + "version": "0.2.0", "description": "A collection of rules that help enforce JavaScript coding standard in the Mozilla project.", "keywords": [ "eslint", diff --git a/tools/lint/eslint/manifest.tt b/tools/lint/eslint/manifest.tt index ed35d0de2c52..1eabd483c5c2 100644 --- a/tools/lint/eslint/manifest.tt +++ b/tools/lint/eslint/manifest.tt @@ -1,8 +1,8 @@ [ { -"size": 2348527, +"size": 2349680, "visibility": "public", -"digest": "2e6c1f35b178e2ee1055c89f020f6b3b88f310a4b63f2fbb2023016c3890f672f86f8e35f716745135740c59fdccd3ad46d48c7995e7d281aa19d74637caa405", +"digest": "2b02ae6dd4bc735990660f97a831f05e604c28120977e4120cf59619fb02be22cbd42be26ec2bd176f172f4566f3dfb445082e8d9651346662b8fb8fde407b8c", "algorithm": "sha512", "filename": "eslint.tar.gz" } diff --git a/tools/lint/eslint/npm-shrinkwrap.json b/tools/lint/eslint/npm-shrinkwrap.json index f19571d66648..deec183be73c 100644 --- a/tools/lint/eslint/npm-shrinkwrap.json +++ b/tools/lint/eslint/npm-shrinkwrap.json @@ -56,9 +56,9 @@ "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.1.tgz" }, "brace-expansion": { - "version": "1.1.5", + "version": "1.1.6", "from": "brace-expansion@>=1.0.0 <2.0.0", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.5.tgz" + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.6.tgz" }, "caller-path": { "version": "0.1.0", @@ -220,7 +220,7 @@ "resolved": "https://registry.npmjs.org/eslint-plugin-html/-/eslint-plugin-html-1.4.0.tgz" }, "eslint-plugin-mozilla": { - "version": "0.1.1", + "version": "0.2.0", "from": "eslint-plugin-mozilla", "resolved": "file:eslint-plugin-mozilla", "dependencies": { @@ -279,9 +279,9 @@ "resolved": "https://registry.npmjs.org/exit-hook/-/exit-hook-1.1.1.tgz" }, "fast-levenshtein": { - "version": "1.1.3", + "version": "1.1.4", "from": "fast-levenshtein@>=1.1.0 <2.0.0", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-1.1.3.tgz" + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-1.1.4.tgz" }, "figures": { "version": "1.7.0", @@ -569,9 +569,9 @@ "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-1.0.1.tgz" }, "rimraf": { - "version": "2.5.3", + "version": "2.5.4", "from": "rimraf@>=2.2.8 <3.0.0", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.5.3.tgz" + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.5.4.tgz" }, "run-async": { "version": "0.1.0", diff --git a/tools/lint/mach_commands.py b/tools/lint/mach_commands.py index cfc26ccd59c7..b21960c95b46 100644 --- a/tools/lint/mach_commands.py +++ b/tools/lint/mach_commands.py @@ -32,7 +32,7 @@ here = os.path.abspath(os.path.dirname(__file__)) ESLINT_PACKAGES = [ "eslint@2.9.0", "eslint-plugin-html@1.4.0", - "eslint-plugin-mozilla@0.1.1", + "eslint-plugin-mozilla@0.2.0", "eslint-plugin-react@4.2.3" ] From 46650a73f43dd8a5948bbbf39f47a89a1faf431d Mon Sep 17 00:00:00 2001 From: Tom Tromey Date: Thu, 21 Jul 2016 09:04:47 -0600 Subject: [PATCH 05/63] Bug 1264649 - enable reject-some-requires rule for inspector; r=pbro MozReview-Commit-ID: AX58XXecRcL --HG-- extra : rebase_source : ec04a97c61a8cd5ba04e1f144df3f6799852d08b --- devtools/.eslintrc.mochitests | 7 ++++++- devtools/.eslintrc.xpcshell | 2 ++ devtools/client/inspector/.eslintrc | 12 ++++++++++++ devtools/client/inspector/breadcrumbs.js | 2 ++ devtools/client/inspector/computed/computed.js | 4 ++++ devtools/client/inspector/inspector-commands.js | 2 ++ devtools/client/inspector/inspector-panel.js | 2 ++ devtools/client/inspector/inspector-search.js | 2 ++ devtools/client/inspector/layout/layout.js | 2 ++ devtools/client/inspector/markup/markup.js | 4 ++++ devtools/client/inspector/rules/models/rule.js | 2 ++ .../client/inspector/rules/models/text-property.js | 4 ++++ devtools/client/inspector/rules/rules.js | 4 ++++ devtools/client/inspector/rules/views/rule-editor.js | 4 ++++ .../inspector/rules/views/text-property-editor.js | 2 ++ devtools/client/inspector/shared/utils.js | 2 ++ devtools/client/inspector/toolsidebar.js | 2 ++ 17 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 devtools/client/inspector/.eslintrc diff --git a/devtools/.eslintrc.mochitests b/devtools/.eslintrc.mochitests index bec9930503f4..8a3f9b6ad871 100644 --- a/devtools/.eslintrc.mochitests +++ b/devtools/.eslintrc.mochitests @@ -11,5 +11,10 @@ "synthesizeKeyFromKeyTag": true, "TargetFactory": true, "waitForTick": true, - } + }, + + "rules": { + // Tests can always import anything. + "mozilla/reject-some-requires": 0, + }, } diff --git a/devtools/.eslintrc.xpcshell b/devtools/.eslintrc.xpcshell index ad267ef70bab..fc6594d3ef4e 100644 --- a/devtools/.eslintrc.xpcshell +++ b/devtools/.eslintrc.xpcshell @@ -15,5 +15,7 @@ "block-scoped-var": 0, // Allow run_test to be unused in xpcshell "no-unused-vars": [2, { "varsIgnorePattern": "run_test" }], + // Tests can always import anything. + "mozilla/reject-some-requires": 0, } } diff --git a/devtools/client/inspector/.eslintrc b/devtools/client/inspector/.eslintrc new file mode 100644 index 000000000000..690da943289c --- /dev/null +++ b/devtools/client/inspector/.eslintrc @@ -0,0 +1,12 @@ +{ + // Extend from the devtools eslintrc. + "extends": "../../.eslintrc", + + "rules": { + // The inspector is being migrated to HTML and cleaned of + // chrome-privileged code, so this rule disallows requiring chrome + // code. Some files in the inspector disable this rule still. The + // goal is to enable the rule globally on all files. + "mozilla/reject-some-requires": [2, "^(chrome|chrome:.*|resource:.*|devtools/server/.*|.*\\.jsm)$"], + }, +} diff --git a/devtools/client/inspector/breadcrumbs.js b/devtools/client/inspector/breadcrumbs.js index 561a02780455..28b33c71690f 100644 --- a/devtools/client/inspector/breadcrumbs.js +++ b/devtools/client/inspector/breadcrumbs.js @@ -6,7 +6,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const Services = require("Services"); const promise = require("promise"); const FocusManager = Services.focus; diff --git a/devtools/client/inspector/computed/computed.js b/devtools/client/inspector/computed/computed.js index 38aed291e26f..061f4fa01e79 100644 --- a/devtools/client/inspector/computed/computed.js +++ b/devtools/client/inspector/computed/computed.js @@ -8,7 +8,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Cc, Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const ToolDefinitions = require("devtools/client/definitions").Tools; const CssLogic = require("devtools/shared/inspector/css-logic"); @@ -20,7 +22,9 @@ const {OutputParser} = require("devtools/client/shared/output-parser"); const {PrefObserver, PREF_ORIG_SOURCES} = require("devtools/client/styleeditor/utils"); const {createChild} = require("devtools/client/inspector/shared/utils"); const {gDevTools} = require("devtools/client/framework/devtools"); +/* eslint-disable mozilla/reject-some-requires */ const {XPCOMUtils} = require("resource://gre/modules/XPCOMUtils.jsm"); +/* eslint-enable mozilla/reject-some-requires */ const {getCssProperties} = require("devtools/shared/fronts/css-properties"); loader.lazyRequireGetter(this, "overlays", diff --git a/devtools/client/inspector/inspector-commands.js b/devtools/client/inspector/inspector-commands.js index ad0e9235a327..b6f686e97d8b 100644 --- a/devtools/client/inspector/inspector-commands.js +++ b/devtools/client/inspector/inspector-commands.js @@ -6,7 +6,9 @@ const l10n = require("gcli/l10n"); loader.lazyRequireGetter(this, "gDevTools", "devtools/client/framework/devtools", true); +/* eslint-disable mozilla/reject-some-requires */ const {EyeDropper, HighlighterEnvironment} = require("devtools/server/actors/highlighters"); +/* eslint-enable mozilla/reject-some-requires */ const Telemetry = require("devtools/client/shared/telemetry"); exports.items = [{ diff --git a/devtools/client/inspector/inspector-panel.js b/devtools/client/inspector/inspector-panel.js index e7973837c421..9b9b47a32e3b 100644 --- a/devtools/client/inspector/inspector-panel.js +++ b/devtools/client/inspector/inspector-panel.js @@ -8,7 +8,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Cc, Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ var Services = require("Services"); var promise = require("promise"); diff --git a/devtools/client/inspector/inspector-search.js b/devtools/client/inspector/inspector-search.js index 1c05f77d1599..23c45e8c6d75 100644 --- a/devtools/client/inspector/inspector-search.js +++ b/devtools/client/inspector/inspector-search.js @@ -4,7 +4,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const promise = require("promise"); const {Task} = require("devtools/shared/task"); diff --git a/devtools/client/inspector/layout/layout.js b/devtools/client/inspector/layout/layout.js index e710d19d3263..c6baddd173cc 100644 --- a/devtools/client/inspector/layout/layout.js +++ b/devtools/client/inspector/layout/layout.js @@ -6,7 +6,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Cc, Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const {Task} = require("devtools/shared/task"); const {InplaceEditor, editableItem} = require("devtools/client/shared/inplace-editor"); diff --git a/devtools/client/inspector/markup/markup.js b/devtools/client/inspector/markup/markup.js index 20df694af0cb..46a696c27589 100644 --- a/devtools/client/inspector/markup/markup.js +++ b/devtools/client/inspector/markup/markup.js @@ -7,7 +7,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Cc, Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ // Page size for pageup/pagedown const PAGE_SIZE = 10; @@ -54,7 +56,9 @@ const {KeyShortcuts} = require("devtools/client/shared/key-shortcuts"); const {template} = require("devtools/shared/gcli/templater"); const nodeConstants = require("devtools/shared/dom-node-constants"); const nodeFilterConstants = require("devtools/shared/dom-node-filter-constants"); +/* eslint-disable mozilla/reject-some-requires */ const {XPCOMUtils} = require("resource://gre/modules/XPCOMUtils.jsm"); +/* eslint-enable mozilla/reject-some-requires */ loader.lazyRequireGetter(this, "CSS", "CSS"); loader.lazyGetter(this, "AutocompletePopup", () => { diff --git a/devtools/client/inspector/rules/models/rule.js b/devtools/client/inspector/rules/models/rule.js index 5b773887ff26..a570e5521214 100644 --- a/devtools/client/inspector/rules/models/rule.js +++ b/devtools/client/inspector/rules/models/rule.js @@ -6,7 +6,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const promise = require("promise"); const CssLogic = require("devtools/shared/inspector/css-logic"); const {ELEMENT_STYLE} = require("devtools/shared/specs/styles"); diff --git a/devtools/client/inspector/rules/models/text-property.js b/devtools/client/inspector/rules/models/text-property.js index 60038065388a..cc7df18cf173 100644 --- a/devtools/client/inspector/rules/models/text-property.js +++ b/devtools/client/inspector/rules/models/text-property.js @@ -6,10 +6,14 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Cc, Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const {escapeCSSComment} = require("devtools/shared/css-parsing-utils"); const {getCssProperties} = require("devtools/shared/fronts/css-properties"); +/* eslint-disable mozilla/reject-some-requires */ const {XPCOMUtils} = require("resource://gre/modules/XPCOMUtils.jsm"); +/* eslint-enable mozilla/reject-some-requires */ XPCOMUtils.defineLazyGetter(this, "domUtils", function () { return Cc["@mozilla.org/inspector/dom-utils;1"].getService(Ci.inIDOMUtils); diff --git a/devtools/client/inspector/rules/rules.js b/devtools/client/inspector/rules/rules.js index 53cec0cab585..f08b32a34875 100644 --- a/devtools/client/inspector/rules/rules.js +++ b/devtools/client/inspector/rules/rules.js @@ -7,11 +7,15 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Cc, Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const promise = require("promise"); const defer = require("devtools/shared/defer"); const Services = require("Services"); +/* eslint-disable mozilla/reject-some-requires */ const {XPCOMUtils} = require("resource://gre/modules/XPCOMUtils.jsm"); +/* eslint-enable mozilla/reject-some-requires */ const {Task} = require("devtools/shared/task"); const {Tools} = require("devtools/client/definitions"); const {l10n} = require("devtools/shared/inspector/css-logic"); diff --git a/devtools/client/inspector/rules/views/rule-editor.js b/devtools/client/inspector/rules/views/rule-editor.js index 6e9dde1a884d..8cb14447b755 100644 --- a/devtools/client/inspector/rules/views/rule-editor.js +++ b/devtools/client/inspector/rules/views/rule-editor.js @@ -4,8 +4,12 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ +/* eslint-disable mozilla/reject-some-requires */ const {XPCOMUtils} = require("resource://gre/modules/XPCOMUtils.jsm"); +/* eslint-enable mozilla/reject-some-requires */ const {l10n} = require("devtools/shared/inspector/css-logic"); const {ELEMENT_STYLE} = require("devtools/shared/specs/styles"); const {PREF_ORIG_SOURCES} = require("devtools/client/styleeditor/utils"); diff --git a/devtools/client/inspector/rules/views/text-property-editor.js b/devtools/client/inspector/rules/views/text-property-editor.js index f04c58cda4d0..cfe7b16b0e57 100644 --- a/devtools/client/inspector/rules/views/text-property-editor.js +++ b/devtools/client/inspector/rules/views/text-property-editor.js @@ -4,7 +4,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const {l10n} = require("devtools/shared/inspector/css-logic"); const {getCssProperties} = require("devtools/shared/fronts/css-properties"); const {InplaceEditor, editableField} = diff --git a/devtools/client/inspector/shared/utils.js b/devtools/client/inspector/shared/utils.js index 7bf2d87cdeb4..b4c4e626bb2d 100644 --- a/devtools/client/inspector/shared/utils.js +++ b/devtools/client/inspector/shared/utils.js @@ -6,7 +6,9 @@ "use strict"; +/* eslint-disable mozilla/reject-some-requires */ const {Ci} = require("chrome"); +/* eslint-enable mozilla/reject-some-requires */ const {parseDeclarations} = require("devtools/shared/css-parsing-utils"); const promise = require("promise"); const {getCSSLexer} = require("devtools/shared/css-lexer"); diff --git a/devtools/client/inspector/toolsidebar.js b/devtools/client/inspector/toolsidebar.js index 65dde2a1409a..84f3ab2fe835 100644 --- a/devtools/client/inspector/toolsidebar.js +++ b/devtools/client/inspector/toolsidebar.js @@ -10,7 +10,9 @@ var Services = require("Services"); var EventEmitter = require("devtools/shared/event-emitter"); var Telemetry = require("devtools/client/shared/telemetry"); var { Task } = require("devtools/shared/task"); +/* eslint-disable mozilla/reject-some-requires */ var { XPCOMUtils } = require("resource://gre/modules/XPCOMUtils.jsm"); +/* eslint-enable mozilla/reject-some-requires */ /** * This object represents replacement for ToolSidebar From 8bec715c9675099ffec3ce23937e029e2efd594d Mon Sep 17 00:00:00 2001 From: Andrew Comminos Date: Thu, 21 Jul 2016 13:41:09 -0400 Subject: [PATCH 06/63] Bug 1287666 - Avoid a round trip to the X server in nsShmImage::CreateDrawTarget. r=lsalzman MozReview-Commit-ID: LpvsntOCMab --HG-- extra : rebase_source : e13ad6f33b875bcae59110347b696ea640085bbb --- widget/nsShmImage.cpp | 51 ++++++++++++++++++++++++------------------- widget/nsShmImage.h | 4 +++- 2 files changed, 32 insertions(+), 23 deletions(-) diff --git a/widget/nsShmImage.cpp b/widget/nsShmImage.cpp index 86b136634f58..74c3517dd66c 100644 --- a/widget/nsShmImage.cpp +++ b/widget/nsShmImage.cpp @@ -32,18 +32,14 @@ nsShmImage::nsShmImage(Display* aDisplay, , mSize(0, 0) , mPixmap(XCB_NONE) , mGC(XCB_NONE) + , mRequestPending(false) , mShmSeg(XCB_NONE) , mShmId(-1) , mShmAddr(nullptr) { mConnection = XGetXCBConnection(aDisplay); - mozilla::PodZero(&mLastRequest); - if (aDisplay == mozilla::DefaultXDisplay()) { - // If another thread spins the X event loop during a checked call, - // an error that should've been checked by XCB may be handled by the Xlib - // error handler. See bug 1287463. - NS_WARNING("Main thread X display used with nsShmImage!"); - } + mozilla::PodZero(&mPutRequest); + mozilla::PodZero(&mSyncRequest); } nsShmImage::~nsShmImage() @@ -250,13 +246,19 @@ nsShmImage::CreateDrawTarget(const mozilla::LayoutDeviceIntRegion& aRegion) // Wait for any in-flight requests to complete. // Typically X clients would wait for a XShmCompletionEvent to be received, // but this works as it's sent immediately after the request is processed. - xcb_generic_error_t* error; - if (mLastRequest.sequence != XCB_NONE && - (error = xcb_request_check(mConnection, mLastRequest))) - { - gShmAvailable = false; - free(error); - return nullptr; + if (mRequestPending) { + xcb_get_input_focus_reply_t* reply; + if ((reply = xcb_get_input_focus_reply(mConnection, mSyncRequest, nullptr))) { + free(reply); + } + mRequestPending = false; + + xcb_generic_error_t* error; + if ((error = xcb_request_check(mConnection, mPutRequest))) { + gShmAvailable = false; + free(error); + return nullptr; + } } // Due to bug 1205045, we must avoid making GTK calls off the main thread to query window size. @@ -301,17 +303,22 @@ nsShmImage::Put(const mozilla::LayoutDeviceIntRegion& aRegion) xrects.Length(), xrects.Elements()); if (mPixmap != XCB_NONE) { - mLastRequest = xcb_copy_area_checked(mConnection, mPixmap, mWindow, mGC, - 0, 0, 0, 0, mSize.width, mSize.height); + mPutRequest = xcb_copy_area_checked(mConnection, mPixmap, mWindow, mGC, + 0, 0, 0, 0, mSize.width, mSize.height); } else { - mLastRequest = xcb_shm_put_image_checked(mConnection, mWindow, mGC, - mSize.width, mSize.height, - 0, 0, mSize.width, mSize.height, - 0, 0, mDepth, - XCB_IMAGE_FORMAT_Z_PIXMAP, 0, - mShmSeg, 0); + mPutRequest = xcb_shm_put_image_checked(mConnection, mWindow, mGC, + mSize.width, mSize.height, + 0, 0, mSize.width, mSize.height, + 0, 0, mDepth, + XCB_IMAGE_FORMAT_Z_PIXMAP, 0, + mShmSeg, 0); } + // Send a request that returns a response so that we don't have to start a + // sync in nsShmImage::CreateDrawTarget to retrieve the result of mPutRequest. + mSyncRequest = xcb_get_input_focus(mConnection); + mRequestPending = true; + xcb_flush(mConnection); } diff --git a/widget/nsShmImage.h b/widget/nsShmImage.h index 5be968fe1717..129a2d08c2fd 100644 --- a/widget/nsShmImage.h +++ b/widget/nsShmImage.h @@ -58,7 +58,9 @@ private: xcb_pixmap_t mPixmap; xcb_gcontext_t mGC; - xcb_void_cookie_t mLastRequest; + xcb_void_cookie_t mPutRequest; + xcb_get_input_focus_cookie_t mSyncRequest; + bool mRequestPending; xcb_shm_seg_t mShmSeg; int mShmId; From 2727c3db2a6ec33697e54682f0dc12c832eedd10 Mon Sep 17 00:00:00 2001 From: Tom Tromey Date: Thu, 21 Jul 2016 13:12:21 -0600 Subject: [PATCH 07/63] Bug 1286356 - isolate import of Console in event-emitter; r=gregtatum MozReview-Commit-ID: HIgYdui8wwO --HG-- extra : rebase_source : 3faef9161155645bbb649681826f68fe2ee3d60f --- devtools/shared/event-emitter.js | 44 +++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/devtools/shared/event-emitter.js b/devtools/shared/event-emitter.js index b258e7a99273..e676e2cdc318 100644 --- a/devtools/shared/event-emitter.js +++ b/devtools/shared/event-emitter.js @@ -5,24 +5,46 @@ "use strict"; (function (factory) { + // This file can be loaded in several different ways. It can be + // require()d, either from the main thread or from a worker thread; + // or it can be imported via Cu.import. These different forms + // explain some of the hairiness of this code. + // + // It's important for the devtools-as-html project that a require() + // on the main thread not use any chrome privileged APIs. Instead, + // the body of the main function can only require() (not Cu.import) + // modules that are available in the devtools content mode. This, + // plus the lack of |console| in workers, results in some gyrations + // in the definition of |console|. if (this.module && module.id.indexOf("event-emitter") >= 0) { + let console; + if (isWorker) { + console = { + error: () => {} + }; + } else { + console = this.console; + } // require - factory.call(this, require, exports, module); + factory.call(this, require, exports, module, console); } else { - // Cu.import + // Cu.import. This snippet implements a sort of miniature loader, + // which is responsible for appropriately translating require() + // requests from the client function. This code can use + // Cu.import, because it is never run in the devtools-in-content + // mode. this.isWorker = false; + const Cu = Components.utils; + let console = Cu.import("resource://gre/modules/Console.jsm", {}).console; // Bug 1259045: This module is loaded early in firefox startup as a JSM, // but it doesn't depends on any real module. We can save a few cycles // and bytes by not loading Loader.jsm. let require = function (module) { - const Cu = Components.utils; switch (module) { case "devtools/shared/defer": return Cu.import("resource://gre/modules/Promise.jsm", {}).Promise.defer; case "Services": return Cu.import("resource://gre/modules/Services.jsm", {}).Services; - case "resource://gre/modules/Console.jsm": - return Cu.import("resource://gre/modules/Console.jsm", {}); case "chrome": return { Cu, @@ -31,10 +53,13 @@ } return null; }; - factory.call(this, require, this, { exports: this }); + factory.call(this, require, this, { exports: this }, console); this.EXPORTED_SYMBOLS = ["EventEmitter"]; } -}).call(this, function (require, exports, module) { +}).call(this, function (require, exports, module, console) { + // âš âš âš âš âš âš âš âš âš âš âš âš âš âš âš âš  + // After this point the code may not use Cu.import, and should only + // require() modules that are "clean-for-content". let EventEmitter = this.EventEmitter = function () {}; module.exports = EventEmitter; @@ -44,18 +69,13 @@ const defer = require("devtools/shared/defer"); let loggingEnabled = true; - let console = {}; if (!isWorker) { - console = require("resource://gre/modules/Console.jsm").console; loggingEnabled = Services.prefs.getBoolPref("devtools.dump.emit"); Services.prefs.addObserver("devtools.dump.emit", { observe: () => { loggingEnabled = Services.prefs.getBoolPref("devtools.dump.emit"); } }, false); - } else { - // Workers can't load JSMs, so we can't import Console.jsm here. - console.error = () => {}; } /** From 6fa3ed32ec2408e11785f4bdce09b503f14b98d1 Mon Sep 17 00:00:00 2001 From: Andrew Comminos Date: Tue, 12 Jul 2016 15:01:21 -0400 Subject: [PATCH 08/63] Bug 1286847 - Remove calls to XGetGeometry from the compositor thread. r=jgilbert,jrmuizel MozReview-Commit-ID: IAd2y1FgiFn --HG-- extra : rebase_source : 48bab0df53c02a0abc8bf65927882526aa54e0bf --- gfx/gl/GLContext.h | 8 -------- gfx/gl/GLContextGLX.h | 2 -- gfx/gl/GLContextProviderGLX.cpp | 14 -------------- gfx/layers/opengl/CompositorOGL.cpp | 24 +++++------------------- 4 files changed, 5 insertions(+), 43 deletions(-) diff --git a/gfx/gl/GLContext.h b/gfx/gl/GLContext.h index ffd1094ac136..16c0f4166d69 100644 --- a/gfx/gl/GLContext.h +++ b/gfx/gl/GLContext.h @@ -29,7 +29,6 @@ #define MOZ_GL_DEBUG 1 #endif -#include "../../mfbt/Maybe.h" #include "../../mfbt/RefPtr.h" #include "../../mfbt/UniquePtr.h" @@ -3303,13 +3302,6 @@ public: GLuint GetFB(); - /* - * Retrieves the size of the native windowing system drawable. - */ - virtual Maybe GetTargetSize() { - return Maybe(); - }; - private: void GetShaderPrecisionFormatNonES2(GLenum shadertype, GLenum precisiontype, GLint* range, GLint* precision) { switch (precisiontype) { diff --git a/gfx/gl/GLContextGLX.h b/gfx/gl/GLContextGLX.h index 645b21e4cd3f..ca476baec0c4 100644 --- a/gfx/gl/GLContextGLX.h +++ b/gfx/gl/GLContextGLX.h @@ -66,8 +66,6 @@ public: // Undoes the effect of a drawable override. bool RestoreDrawable(); - virtual Maybe GetTargetSize() override; - private: friend class GLContextProviderGLX; diff --git a/gfx/gl/GLContextProviderGLX.cpp b/gfx/gl/GLContextProviderGLX.cpp index 0a1cdf536e78..5046570af9e3 100644 --- a/gfx/gl/GLContextProviderGLX.cpp +++ b/gfx/gl/GLContextProviderGLX.cpp @@ -987,20 +987,6 @@ GLContextGLX::SwapBuffers() return true; } -Maybe -GLContextGLX::GetTargetSize() -{ - unsigned int width = 0, height = 0; - Window root; - int x, y; - unsigned int border, depth; - XGetGeometry(mDisplay, mDrawable, &root, &x, &y, &width, &height, - &border, &depth); - Maybe size; - size.emplace(width, height); - return size; -} - bool GLContextGLX::OverrideDrawable(GLXDrawable drawable) { diff --git a/gfx/layers/opengl/CompositorOGL.cpp b/gfx/layers/opengl/CompositorOGL.cpp index f0862699382b..a43851e3d7e9 100644 --- a/gfx/layers/opengl/CompositorOGL.cpp +++ b/gfx/layers/opengl/CompositorOGL.cpp @@ -707,15 +707,9 @@ CompositorOGL::BeginFrame(const nsIntRegion& aInvalidRegion, // assumes scissor is enabled while it does clears. mGLContext->fEnable(LOCAL_GL_SCISSOR_TEST); - // Prefer the native windowing system's provided window size for the viewport. - IntSize viewportSize = - mGLContext->GetTargetSize().valueOr(mWidgetSize.ToUnknownSize()); - if (viewportSize != mWidgetSize.ToUnknownSize()) { - mGLContext->fScissor(0, 0, viewportSize.width, viewportSize.height); - } - RefPtr rt = - CompositingRenderTargetOGL::RenderTargetForWindow(this, viewportSize); + CompositingRenderTargetOGL::RenderTargetForWindow(this, + IntSize(width, height)); SetRenderTarget(rt); #ifdef DEBUG @@ -1501,21 +1495,13 @@ CompositorOGL::EndFrame() return; } + mCurrentRenderTarget = nullptr; + if (mTexturePool) { mTexturePool->EndFrame(); } - // If our window size changed during composition, we should discard the frame. - // We don't need to worry about rescheduling a composite, as widget - // implementations handle this in their expose event listeners. - // See bug 1184534. TODO: implement this for single-buffered targets? - IntSize targetSize = mGLContext->GetTargetSize().valueOr(mViewportSize); - if (!(mCurrentRenderTarget->IsWindow() && targetSize != mViewportSize)) { - mGLContext->SwapBuffers(); - } - - mCurrentRenderTarget = nullptr; - + mGLContext->SwapBuffers(); mGLContext->fBindBuffer(LOCAL_GL_ARRAY_BUFFER, 0); // Unbind all textures From 1c43032b0bf7854d368fa8134f3483c0008d887c Mon Sep 17 00:00:00 2001 From: Andrew Comminos Date: Mon, 18 Jul 2016 14:34:40 -0400 Subject: [PATCH 09/63] Bug 1286847 - Only enable scissor testing where required in the GL compositor. r=nical MozReview-Commit-ID: 5mxlDtY1YgK --HG-- extra : rebase_source : 9c9662fd6a43ece2b34b4468213e7f5bf98145a3 --- gfx/layers/opengl/CompositingRenderTargetOGL.cpp | 5 ++++- gfx/layers/opengl/CompositorOGL.cpp | 9 +++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/gfx/layers/opengl/CompositingRenderTargetOGL.cpp b/gfx/layers/opengl/CompositingRenderTargetOGL.cpp index b394a535f70f..c05b8edfd346 100644 --- a/gfx/layers/opengl/CompositingRenderTargetOGL.cpp +++ b/gfx/layers/opengl/CompositingRenderTargetOGL.cpp @@ -6,6 +6,7 @@ #include "CompositingRenderTargetOGL.h" #include "GLContext.h" #include "GLReadTexImageHelper.h" +#include "ScopedGLHelpers.h" #include "mozilla/gfx/2D.h" namespace mozilla { @@ -69,7 +70,9 @@ CompositingRenderTargetOGL::BindRenderTarget() } if (needsClear) { - mGL->fScissor(0, 0, mInitParams.mSize.width, mInitParams.mSize.height); + ScopedGLState scopedScissorTestState(mGL, LOCAL_GL_SCISSOR_TEST, true); + ScopedScissorRect autoScissorRect(mGL, 0, 0, mInitParams.mSize.width, + mInitParams.mSize.height); mGL->fClearColor(0.0, 0.0, 0.0, 0.0); mGL->fClearDepth(0.0); mGL->fClear(LOCAL_GL_COLOR_BUFFER_BIT | LOCAL_GL_DEPTH_BUFFER_BIT); diff --git a/gfx/layers/opengl/CompositorOGL.cpp b/gfx/layers/opengl/CompositorOGL.cpp index a43851e3d7e9..5c1e1180d102 100644 --- a/gfx/layers/opengl/CompositorOGL.cpp +++ b/gfx/layers/opengl/CompositorOGL.cpp @@ -703,10 +703,6 @@ CompositorOGL::BeginFrame(const nsIntRegion& aInvalidRegion, LOCAL_GL_ONE, LOCAL_GL_ONE_MINUS_SRC_ALPHA); mGLContext->fEnable(LOCAL_GL_BLEND); - // Make sure SCISSOR is enabled before setting the render target, since the RT - // assumes scissor is enabled while it does clears. - mGLContext->fEnable(LOCAL_GL_SCISSOR_TEST); - RefPtr rt = CompositingRenderTargetOGL::RenderTargetForWindow(this, IntSize(width, height)); @@ -1054,8 +1050,9 @@ CompositorOGL::DrawQuad(const Rect& aRect, clipRect.MoveBy(mRenderOffset.x, mRenderOffset.y); } - gl()->fScissor(clipRect.x, FlipY(clipRect.y + clipRect.height), - clipRect.width, clipRect.height); + ScopedGLState scopedScissorTestState(mGLContext, LOCAL_GL_SCISSOR_TEST, true); + ScopedScissorRect autoScissorRect(mGLContext, clipRect.x, FlipY(clipRect.y + clipRect.height), + clipRect.width, clipRect.height); MaskType maskType; EffectMask* effectMask; From 439f1b43945d9a1be8aafe16cad034ad97a666c1 Mon Sep 17 00:00:00 2001 From: Dan Minor Date: Mon, 25 Jul 2016 09:49:05 -0400 Subject: [PATCH 10/63] Bug 1289123: Remove test_zmedia_cleanup.html from Autophone WebRTC job manifest; r=drno MozReview-Commit-ID: CwC1Kr3mU9Q --HG-- extra : rebase_source : 895780ea63df7e89a2ad16ea736a73e84d2ae936 --- testing/mochitest/manifests/autophone-webrtc.ini | 1 - 1 file changed, 1 deletion(-) diff --git a/testing/mochitest/manifests/autophone-webrtc.ini b/testing/mochitest/manifests/autophone-webrtc.ini index 3f743f46cf0d..2f237c271850 100644 --- a/testing/mochitest/manifests/autophone-webrtc.ini +++ b/testing/mochitest/manifests/autophone-webrtc.ini @@ -132,7 +132,6 @@ skip-if = true # Bug 1265878 [../tests/dom/media/tests/mochitest/test_peerConnection_verifyVideoAfterRenegotiation.html] [../tests/dom/media/tests/mochitest/test_peerConnection_webAudio.html] [../tests/dom/media/tests/mochitest/test_selftest.html] -[../tests/dom/media/tests/mochitest/test_zmedia_cleanup.html] [../tests/dom/media/tests/mochitest/identity/test_fingerprints.html] skip-if = true # Bug 1200411 [../tests/dom/media/tests/mochitest/identity/test_getIdentityAssertion.html] From cd8204fb3b396c27281bacfe9cf62a0288f346b8 Mon Sep 17 00:00:00 2001 From: Andi-Bogdan Postelnicu Date: Mon, 25 Jul 2016 13:13:19 +0300 Subject: [PATCH 11/63] Bug 1289064 - eliminate null check on |iid| in XPCConvert::NativeInterface2JSObject. r=bholley MozReview-Commit-ID: EtiVWiHlYx5 --HG-- extra : rebase_source : 4dcd94c19f0b7f68787658e8937b2535fe37b39f --- js/xpconnect/src/XPCConvert.cpp | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/js/xpconnect/src/XPCConvert.cpp b/js/xpconnect/src/XPCConvert.cpp index 091bdf7ce556..83cfb42ab6b1 100644 --- a/js/xpconnect/src/XPCConvert.cpp +++ b/js/xpconnect/src/XPCConvert.cpp @@ -823,18 +823,16 @@ XPCConvert::NativeInterface2JSObject(MutableHandleValue d, // Go ahead and create an XPCWrappedNative for this object. AutoMarkingNativeInterfacePtr iface(cx); - if (iid) { + if (Interface) + iface = *Interface; + + if (!iface) { + iface = XPCNativeInterface::GetNewOrUsed(iid); + if (!iface) + return false; + if (Interface) - iface = *Interface; - - if (!iface) { - iface = XPCNativeInterface::GetNewOrUsed(iid); - if (!iface) - return false; - - if (Interface) - *Interface = iface; - } + *Interface = iface; } RefPtr wrapper; From 217ec4c7ccac7f570ede4eaff11d0f7fb3ff352e Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Mon, 25 Jul 2016 12:46:07 -0700 Subject: [PATCH 12/63] Bug 1288610 - Add functions for creating deterministic tar archives; r=ted I have a need to create tar archives deterministically and reproducibly. Since we already have similar functionality in mozpack for producting zip/jar archives, I figured it made sense for this functionality to live in mozpack. I made the functionality as simple as possible: we only accept files from the filesystem and the set of files must be known in advance. No class to hold/buffer state: just a simple function that takes a mapping of files and writes to a stream. MozReview-Commit-ID: If0NTcA7wpc --HG-- extra : rebase_source : 9cbea36347ba2840dd5bff9dffefd994a73a0725 --- python/mozbuild/mozpack/archive.py | 107 +++++++++++ python/mozbuild/mozpack/test/test_archive.py | 190 +++++++++++++++++++ 2 files changed, 297 insertions(+) create mode 100644 python/mozbuild/mozpack/archive.py create mode 100644 python/mozbuild/mozpack/test/test_archive.py diff --git a/python/mozbuild/mozpack/archive.py b/python/mozbuild/mozpack/archive.py new file mode 100644 index 000000000000..f3015ff21c30 --- /dev/null +++ b/python/mozbuild/mozpack/archive.py @@ -0,0 +1,107 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import bz2 +import gzip +import stat +import tarfile + + +# 2016-01-01T00:00:00+0000 +DEFAULT_MTIME = 1451606400 + + +def create_tar_from_files(fp, files): + """Create a tar file deterministically. + + Receives a dict mapping names of files in the archive to local filesystem + paths. + + The files will be archived and written to the passed file handle opened + for writing. + + Only regular files can be written. + + FUTURE accept mozpack.files classes for writing + FUTURE accept a filename argument (or create APIs to write files) + """ + with tarfile.open(name='', mode='w', fileobj=fp, dereference=True) as tf: + for archive_path, fs_path in sorted(files.items()): + ti = tf.gettarinfo(fs_path, archive_path) + + if not ti.isreg(): + raise ValueError('not a regular file: %s' % fs_path) + + # Disallow setuid and setgid bits. This is an arbitrary restriction. + # However, since we set uid/gid to root:root, setuid and setgid + # would be a glaring security hole if the archive were + # uncompressed as root. + if ti.mode & (stat.S_ISUID | stat.S_ISGID): + raise ValueError('cannot add file with setuid or setgid set: ' + '%s' % fs_path) + + # Set uid, gid, username, and group as deterministic values. + ti.uid = 0 + ti.gid = 0 + ti.uname = '' + ti.gname = '' + + # Set mtime to a constant value. + ti.mtime = DEFAULT_MTIME + + with open(fs_path, 'rb') as fh: + tf.addfile(ti, fh) + + +def create_tar_gz_from_files(fp, files, filename=None, compresslevel=9): + """Create a tar.gz file deterministically from files. + + This is a glorified wrapper around ``create_tar_from_files`` that + adds gzip compression. + + The passed file handle should be opened for writing in binary mode. + When the function returns, all data has been written to the handle. + """ + # Offset 3-7 in the gzip header contains an mtime. Pin it to a known + # value so output is deterministic. + gf = gzip.GzipFile(filename=filename or '', mode='wb', fileobj=fp, + compresslevel=compresslevel, mtime=DEFAULT_MTIME) + with gf: + create_tar_from_files(gf, files) + + +class _BZ2Proxy(object): + """File object that proxies writes to a bz2 compressor.""" + def __init__(self, fp, compresslevel=9): + self.fp = fp + self.compressor = bz2.BZ2Compressor(compresslevel=compresslevel) + self.pos = 0 + + def tell(self): + return self.pos + + def write(self, data): + data = self.compressor.compress(data) + self.pos += len(data) + self.fp.write(data) + + def close(self): + data = self.compressor.flush() + self.pos += len(data) + self.fp.write(data) + + +def create_tar_bz2_from_files(fp, files, compresslevel=9): + """Create a tar.bz2 file deterministically from files. + + This is a glorified wrapper around ``create_tar_from_files`` that + adds bzip2 compression. + + This function is similar to ``create_tar_gzip_from_files()``. + """ + proxy = _BZ2Proxy(fp, compresslevel=compresslevel) + create_tar_from_files(proxy, files) + proxy.close() diff --git a/python/mozbuild/mozpack/test/test_archive.py b/python/mozbuild/mozpack/test/test_archive.py new file mode 100644 index 000000000000..6f61f7eb7a7c --- /dev/null +++ b/python/mozbuild/mozpack/test/test_archive.py @@ -0,0 +1,190 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import hashlib +import os +import shutil +import stat +import tarfile +import tempfile +import unittest + +from mozpack.archive import ( + DEFAULT_MTIME, + create_tar_from_files, + create_tar_gz_from_files, + create_tar_bz2_from_files, +) + +from mozunit import main + + +MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH + + +def file_hash(path): + h = hashlib.sha1() + with open(path, 'rb') as fh: + while True: + data = fh.read(8192) + if not data: + break + h.update(data) + + return h.hexdigest() + + +class TestArchive(unittest.TestCase): + def _create_files(self, root): + files = {} + for i in range(10): + p = os.path.join(root, b'file%d' % i) + with open(p, 'wb') as fh: + fh.write(b'file%d' % i) + # Need to set permissions or umask may influence testing. + os.chmod(p, MODE_STANDARD) + files[b'file%d' % i] = p + + return files + + def _verify_basic_tarfile(self, tf): + self.assertEqual(len(tf.getmembers()), 10) + + names = ['file%d' % i for i in range(10)] + self.assertEqual(tf.getnames(), names) + + for ti in tf.getmembers(): + self.assertEqual(ti.uid, 0) + self.assertEqual(ti.gid, 0) + self.assertEqual(ti.uname, '') + self.assertEqual(ti.gname, '') + self.assertEqual(ti.mode, MODE_STANDARD) + self.assertEqual(ti.mtime, DEFAULT_MTIME) + + def test_dirs_refused(self): + d = tempfile.mkdtemp() + try: + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + with self.assertRaisesRegexp(ValueError, 'not a regular'): + create_tar_from_files(fh, {'test': d}) + finally: + shutil.rmtree(d) + + def test_setuid_setgid_refused(self): + d = tempfile.mkdtemp() + try: + uid = os.path.join(d, 'setuid') + gid = os.path.join(d, 'setgid') + with open(uid, 'a'): + pass + with open(gid, 'a'): + pass + + os.chmod(uid, MODE_STANDARD | stat.S_ISUID) + os.chmod(gid, MODE_STANDARD | stat.S_ISGID) + + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'): + create_tar_from_files(fh, {'test': uid}) + with self.assertRaisesRegexp(ValueError, 'cannot add file with setuid'): + create_tar_from_files(fh, {'test': gid}) + finally: + shutil.rmtree(d) + + def test_create_tar_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + create_tar_from_files(fh, files) + + # Output should be deterministic. + self.assertEqual(file_hash(tp), 'cd16cee6f13391abd94dfa435d2633b61ed727f1') + + with tarfile.open(tp, 'r') as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_executable_preserved(self): + d = tempfile.mkdtemp() + try: + p = os.path.join(d, 'exec') + with open(p, 'wb') as fh: + fh.write('#!/bin/bash\n') + os.chmod(p, MODE_STANDARD | stat.S_IXUSR) + + tp = os.path.join(d, 'test.tar') + with open(tp, 'wb') as fh: + create_tar_from_files(fh, {'exec': p}) + + self.assertEqual(file_hash(tp), '357e1b81c0b6cfdfa5d2d118d420025c3c76ee93') + + with tarfile.open(tp, 'r') as tf: + m = tf.getmember('exec') + self.assertEqual(m.mode, MODE_STANDARD | stat.S_IXUSR) + + finally: + shutil.rmtree(d) + + def test_create_tar_gz_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + gp = os.path.join(d, 'test.tar.gz') + with open(gp, 'wb') as fh: + create_tar_gz_from_files(fh, files) + + self.assertEqual(file_hash(gp), 'acb602239c1aeb625da5e69336775609516d60f5') + + with tarfile.open(gp, 'r:gz') as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_tar_gz_name(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + gp = os.path.join(d, 'test.tar.gz') + with open(gp, 'wb') as fh: + create_tar_gz_from_files(fh, files, filename='foobar', compresslevel=1) + + self.assertEqual(file_hash(gp), 'fd099f96480cc1100f37baa8e89a6b820dbbcbd3') + + with tarfile.open(gp, 'r:gz') as tf: + self._verify_basic_tarfile(tf) + + finally: + shutil.rmtree(d) + + def test_create_tar_bz2_basic(self): + d = tempfile.mkdtemp() + try: + files = self._create_files(d) + + bp = os.path.join(d, 'test.tar.bz2') + with open(bp, 'wb') as fh: + create_tar_bz2_from_files(fh, files) + + self.assertEqual(file_hash(bp), '1827ad00dfe7acf857b7a1c95ce100361e3f6eea') + + with tarfile.open(bp, 'r:bz2') as tf: + self._verify_basic_tarfile(tf) + finally: + shutil.rmtree(d) + + +if __name__ == '__main__': + main() From edbd669ecaf10568ea1c5dbae2f60fd2e882c47c Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Fri, 22 Jul 2016 10:20:06 -0700 Subject: [PATCH 13/63] Bug 1288567 - Extract function for creating context tars; r=dustin Upcoming commits will refactor how context tarballs are created. In preparation for this, we establish a standalone function for creating context tarballs and refactor docker_image.py to use it. MozReview-Commit-ID: KEW6ppO1vCl --HG-- extra : rebase_source : b81decf9ca14ff0216514f47419e96eb57d6f851 --- taskcluster/taskgraph/task/docker_image.py | 7 +++---- taskcluster/taskgraph/util/docker.py | 23 ++++++++++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/taskcluster/taskgraph/task/docker_image.py b/taskcluster/taskgraph/task/docker_image.py index ac74d4146445..7de56fd87754 100644 --- a/taskcluster/taskgraph/task/docker_image.py +++ b/taskcluster/taskgraph/task/docker_image.py @@ -9,13 +9,13 @@ import json import os import re import urllib2 -import tarfile import time from . import base from taskgraph.util.docker import ( + create_context_tar, docker_image, - generate_context_hash + generate_context_hash, ) from taskgraph.util.templates import Templates @@ -138,8 +138,7 @@ class DockerImageTask(base.Task): if not os.path.exists(os.path.dirname(destination)): os.makedirs(os.path.dirname(destination)) - with tarfile.open(destination, 'w:gz') as tar: - tar.add(context_dir, arcname=image_name) + create_context_tar(context_dir, destination, image_name) @classmethod def from_json(cls, task_dict): diff --git a/taskcluster/taskgraph/util/docker.py b/taskcluster/taskgraph/util/docker.py index 06217c5f0bbe..9f59f118be4e 100644 --- a/taskcluster/taskgraph/util/docker.py +++ b/taskcluster/taskgraph/util/docker.py @@ -6,6 +6,7 @@ from __future__ import absolute_import, print_function, unicode_literals import hashlib import os +import tarfile GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..', '..')) DOCKER_ROOT = os.path.join(GECKO, 'testing', 'docker') @@ -53,3 +54,25 @@ def generate_context_hash(image_path): context_hash.update(file_hash.hexdigest() + '\t' + relative_filename + '\n') return context_hash.hexdigest() + + +def create_context_tar(context_dir, out_path, prefix): + """Create a context tarball. + + A directory ``context_dir`` containing a Dockerfile will be assembled into + a gzipped tar file at ``out_path``. Files inside the archive will be + prefixed by directory ``prefix``. + + Returns the SHA-256 hex digest of the created archive. + """ + with tarfile.open(out_path, 'w:gz') as tar: + tar.add(context_dir, arcname=prefix) + + h = hashlib.sha256() + with open(out_path, 'rb') as fh: + while True: + data = fh.read(32768) + if not data: + break + h.update(data) + return h.hexdigest() From fd01511ec73987359b79d4557f617feab103bd7c Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Fri, 22 Jul 2016 10:29:58 -0700 Subject: [PATCH 14/63] Bug 1288567 - Use deterministic tar archive generation; r=dustin We recently implemented code in mozpack for performing deterministic tar file creation. It normalizes things like uids, gids, and mtimes that creep into archives. MozReview-Commit-ID: 1tn5eXkqACQ --HG-- extra : rebase_source : 6b069a3a50c9103ae4f6185b26d6a37658179f42 --- taskcluster/taskgraph/util/docker.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/taskcluster/taskgraph/util/docker.py b/taskcluster/taskgraph/util/docker.py index 9f59f118be4e..d191675fb3b5 100644 --- a/taskcluster/taskgraph/util/docker.py +++ b/taskcluster/taskgraph/util/docker.py @@ -6,7 +6,11 @@ from __future__ import absolute_import, print_function, unicode_literals import hashlib import os -import tarfile + +from mozpack.archive import ( + create_tar_gz_from_files, +) + GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..', '..')) DOCKER_ROOT = os.path.join(GECKO, 'testing', 'docker') @@ -65,8 +69,17 @@ def create_context_tar(context_dir, out_path, prefix): Returns the SHA-256 hex digest of the created archive. """ - with tarfile.open(out_path, 'w:gz') as tar: - tar.add(context_dir, arcname=prefix) + archive_files = {} + + for root, dirs, files in os.walk(context_dir): + for f in files: + source_path = os.path.join(root, f) + rel = source_path[len(context_dir) + 1:] + archive_path = os.path.join(prefix, rel) + archive_files[archive_path] = source_path + + with open(out_path, 'wb') as fh: + create_tar_gz_from_files(fh, archive_files, '%s.tar.gz' % prefix) h = hashlib.sha256() with open(out_path, 'rb') as fh: From 6cf855965ee6306bb1da6b4fbd22a26242e0a981 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Fri, 22 Jul 2016 10:24:08 -0700 Subject: [PATCH 15/63] Bug 1288567 - Use context hash of tar file; r=dustin Now that tar file generation is deterministic, we can use the hash of the created archive rather than the hash of the files that are (presumably) in the archive. This temporarily breaks consistent hashing by using independent hashing mechanisms. This will be cleaned up in a subsequent commit. MozReview-Commit-ID: CWooVGfDKZO --HG-- extra : rebase_source : 86923635f960e23f1483068bf809dec7aeacefd1 --- taskcluster/taskgraph/task/docker_image.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/taskcluster/taskgraph/task/docker_image.py b/taskcluster/taskgraph/task/docker_image.py index 7de56fd87754..8a30c526ca92 100644 --- a/taskcluster/taskgraph/task/docker_image.py +++ b/taskcluster/taskgraph/task/docker_image.py @@ -64,10 +64,8 @@ class DockerImageTask(base.Task): templates = Templates(path) for image_name in config['images']: context_path = os.path.join('testing', 'docker', image_name) - context_hash = generate_context_hash(context_path) image_parameters = dict(parameters) - image_parameters['context_hash'] = context_hash image_parameters['context_path'] = context_path image_parameters['artifact_path'] = 'public/image.tar' image_parameters['image_name'] = image_name @@ -80,12 +78,16 @@ class DockerImageTask(base.Task): "artifacts/decision_task/image_contexts/{}/context.tar.gz".format(image_name)) image_parameters['context_url'] = ARTIFACT_URL.format( os.environ['TASK_ID'], image_artifact_path) - cls.create_context_tar(context_path, destination, image_name) + context_hash = cls.create_context_tar(context_path, destination, + image_name) else: # skip context generation since this isn't a decision task # TODO: generate context tarballs using subdirectory clones in # the image-building task so we don't have to worry about this. image_parameters['context_url'] = 'file:///tmp/' + image_artifact_path + context_hash = generate_context_hash(context_path) + + image_parameters['context_hash'] = context_hash image_task = templates.load('image.yml', image_parameters) @@ -133,12 +135,15 @@ class DockerImageTask(base.Task): @classmethod def create_context_tar(cls, context_dir, destination, image_name): - 'Creates a tar file of a particular context directory.' + """Creates a tar file of a particular context directory. + + Returns the SHA-256 hex digest of the created file. + """ destination = os.path.abspath(destination) if not os.path.exists(os.path.dirname(destination)): os.makedirs(os.path.dirname(destination)) - create_context_tar(context_dir, destination, image_name) + return create_context_tar(context_dir, destination, image_name) @classmethod def from_json(cls, task_dict): From e44882ed6dc7858ea600ea4ccee98c50a655bb5d Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Fri, 22 Jul 2016 10:32:58 -0700 Subject: [PATCH 16/63] Bug 1288567 - Pass topsrcdir into create_context_tar; r=dustin Relying on global variables like GECKO is a bit dangerous. To facilitate testing of archive generation in subsequent commits, let's pass an path into this function. The argument is currently unused. MozReview-Commit-ID: Et1UYraflDP --HG-- extra : rebase_source : 012095bc2450c72467f3f65f71bcb4cf6efcde66 --- taskcluster/taskgraph/task/docker_image.py | 2 +- taskcluster/taskgraph/util/docker.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/taskcluster/taskgraph/task/docker_image.py b/taskcluster/taskgraph/task/docker_image.py index 8a30c526ca92..020b409a8c41 100644 --- a/taskcluster/taskgraph/task/docker_image.py +++ b/taskcluster/taskgraph/task/docker_image.py @@ -143,7 +143,7 @@ class DockerImageTask(base.Task): if not os.path.exists(os.path.dirname(destination)): os.makedirs(os.path.dirname(destination)) - return create_context_tar(context_dir, destination, image_name) + return create_context_tar(GECKO, context_dir, destination, image_name) @classmethod def from_json(cls, task_dict): diff --git a/taskcluster/taskgraph/util/docker.py b/taskcluster/taskgraph/util/docker.py index d191675fb3b5..48cd7eeaf31f 100644 --- a/taskcluster/taskgraph/util/docker.py +++ b/taskcluster/taskgraph/util/docker.py @@ -60,7 +60,7 @@ def generate_context_hash(image_path): return context_hash.hexdigest() -def create_context_tar(context_dir, out_path, prefix): +def create_context_tar(topsrcdir, context_dir, out_path, prefix): """Create a context tarball. A directory ``context_dir`` containing a Dockerfile will be assembled into From b1bf2c70e803870fbf4137c2c22562d66bbb2365 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Fri, 22 Jul 2016 10:57:27 -0700 Subject: [PATCH 17/63] Bug 1288567 - Add basic test for context tar creation; r=dustin Now that the context tar creation function is standalone and doesn't rely on external state, we can start unit testing it easier. We establish a basic unit test that verifies the function works as advertised and that output is deterministic. MozReview-Commit-ID: H4MY28PiHSN --HG-- extra : rebase_source : 692a5e3d5af6edd14b3d4ceb7c90cd1e0344052f --- .../taskgraph/test/test_util_docker.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/taskcluster/taskgraph/test/test_util_docker.py b/taskcluster/taskgraph/test/test_util_docker.py index 459bb04df0e3..2c38af04c3c3 100644 --- a/taskcluster/taskgraph/test/test_util_docker.py +++ b/taskcluster/taskgraph/test/test_util_docker.py @@ -6,6 +6,8 @@ from __future__ import absolute_import, print_function, unicode_literals import os import shutil +import stat +import tarfile import tempfile import unittest @@ -13,6 +15,9 @@ from ..util import docker from mozunit import MockedOpen +MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH + + class TestDocker(unittest.TestCase): def test_generate_context_hash(self): @@ -46,3 +51,29 @@ class TestDocker(unittest.TestCase): files["{}/myimage/VERSION".format(docker.DOCKER_ROOT)] = "1.2.3" with MockedOpen(files): self.assertEqual(docker.docker_image('myimage'), "mozilla/myimage:1.2.3") + + def test_create_context_tar_basic(self): + tmp = tempfile.mkdtemp() + try: + d = os.path.join(tmp, 'test_image') + os.mkdir(d) + with open(os.path.join(d, 'Dockerfile'), 'a'): + pass + os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD) + + with open(os.path.join(d, 'extra'), 'a'): + pass + os.chmod(os.path.join(d, 'extra'), MODE_STANDARD) + + tp = os.path.join(tmp, 'tar') + h = docker.create_context_tar(tmp, d, tp, 'my_image') + self.assertEqual(h, '2a6d7f1627eba60daf85402418e041d728827d309143c6bc1c6bb3035bde6717') + + # File prefix should be "my_image" + with tarfile.open(tp, 'r:gz') as tf: + self.assertEqual(tf.getnames(), [ + 'my_image/Dockerfile', + 'my_image/extra', + ]) + finally: + shutil.rmtree(tmp) From 0c32f4dd338bf183086d5f5070a39f9fd2cc3ad3 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Fri, 22 Jul 2016 12:46:06 -0700 Subject: [PATCH 18/63] Bug 1288567 - Use create_context_tar in generate_context_hash; r=dustin This restores order to only having a single hash for a context directory. Using a tempfile here is a bit unfortunate. It can be optimized later, if needed. MozReview-Commit-ID: LMNsvt3fDYx --HG-- extra : rebase_source : 8c2b70164aed6d744a71d170d0324797e755cbaf --- taskcluster/taskgraph/docker.py | 2 +- taskcluster/taskgraph/task/docker_image.py | 2 +- .../taskgraph/test/test_util_docker.py | 4 +-- taskcluster/taskgraph/util/docker.py | 35 ++++++------------- 4 files changed, 15 insertions(+), 28 deletions(-) diff --git a/taskcluster/taskgraph/docker.py b/taskcluster/taskgraph/docker.py index 383a95ff15f7..345a11231f78 100644 --- a/taskcluster/taskgraph/docker.py +++ b/taskcluster/taskgraph/docker.py @@ -21,7 +21,7 @@ ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}' def load_image_by_name(image_name): context_path = os.path.join(GECKO, 'testing', 'docker', image_name) - context_hash = docker.generate_context_hash(context_path) + context_hash = docker.generate_context_hash(GECKO, context_path, image_name) image_index_url = INDEX_URL.format('mozilla-central', image_name, context_hash) print("Fetching", image_index_url) diff --git a/taskcluster/taskgraph/task/docker_image.py b/taskcluster/taskgraph/task/docker_image.py index 020b409a8c41..5ef31811976e 100644 --- a/taskcluster/taskgraph/task/docker_image.py +++ b/taskcluster/taskgraph/task/docker_image.py @@ -85,7 +85,7 @@ class DockerImageTask(base.Task): # TODO: generate context tarballs using subdirectory clones in # the image-building task so we don't have to worry about this. image_parameters['context_url'] = 'file:///tmp/' + image_artifact_path - context_hash = generate_context_hash(context_path) + context_hash = generate_context_hash(GECKO, context_path, image_name) image_parameters['context_hash'] = context_hash diff --git a/taskcluster/taskgraph/test/test_util_docker.py b/taskcluster/taskgraph/test/test_util_docker.py index 2c38af04c3c3..d0dd715cd54c 100644 --- a/taskcluster/taskgraph/test/test_util_docker.py +++ b/taskcluster/taskgraph/test/test_util_docker.py @@ -31,8 +31,8 @@ class TestDocker(unittest.TestCase): with open(os.path.join(tmpdir, 'docker', 'my-image', 'a-file'), "w") as f: f.write("data\n") self.assertEqual( - docker.generate_context_hash('docker/my-image'), - '781143fcc6cc72c9024b058665265cb6bae3fb8031cad7227dd169ffbfced434' + docker.generate_context_hash(docker.GECKO, 'docker/my-image', 'my-image'), + '872d76a656f47ea17c043023ecc9ae6a222ba6d2a8df67b75498bba382e4fb07' ) finally: docker.GECKO = old_GECKO diff --git a/taskcluster/taskgraph/util/docker.py b/taskcluster/taskgraph/util/docker.py index 48cd7eeaf31f..76c3d26a6a7c 100644 --- a/taskcluster/taskgraph/util/docker.py +++ b/taskcluster/taskgraph/util/docker.py @@ -6,6 +6,7 @@ from __future__ import absolute_import, print_function, unicode_literals import hashlib import os +import tempfile from mozpack.archive import ( create_tar_gz_from_files, @@ -33,31 +34,17 @@ def docker_image(name): return '{}/{}:{}'.format(registry, name, version) -def generate_context_hash(image_path): - '''Generates a sha256 hash for context directory used to build an image. +def generate_context_hash(topsrcdir, image_path, image_name): + """Generates a sha256 hash for context directory used to build an image.""" - Contents of the directory are sorted alphabetically, contents of each file is hashed, - and then a hash is created for both the file hashs as well as their paths. - - This ensures that hashs are consistent and also change based on if file locations - within the context directory change. - ''' - context_hash = hashlib.sha256() - files = [] - - for dirpath, dirnames, filenames in os.walk(os.path.join(GECKO, image_path)): - for filename in filenames: - files.append(os.path.join(dirpath, filename)) - - for filename in sorted(files): - relative_filename = filename.replace(GECKO, '') - with open(filename, 'rb') as f: - file_hash = hashlib.sha256() - data = f.read() - file_hash.update(data) - context_hash.update(file_hash.hexdigest() + '\t' + relative_filename + '\n') - - return context_hash.hexdigest() + # It is a bit unfortunate we have to create a temp file here - it would + # be nicer to use an in-memory buffer. + fd, p = tempfile.mkstemp() + os.close(fd) + try: + return create_context_tar(topsrcdir, image_path, p, image_name) + finally: + os.unlink(p) def create_context_tar(topsrcdir, context_dir, out_path, prefix): From fc3f70b542d456f283315be55c9ff2a134ba1817 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Thu, 21 Jul 2016 16:51:30 -0700 Subject: [PATCH 19/63] Bug 1288567 - Add special Dockerfile syntax to add arbitrary files to context; r=dustin A limitation of traditional docker build context generation is it only includes files from the same directory as the Dockerfile. When repositories have multiple, related Dockerfiles, this limitation results file duplication or putting all Dockerfiles in the same directory (which isn't feasible for mozilla-central since they would need to be in the root directory). This commit enhances Dockerfiles to allow *any* file from the repository checkout to be ADDed to the docker build context. Using the syntax "# %include " you are able to include paths or directories (relative from the top source directory root) in the generated context archive. Files add this way are available under the "topsrcdir/" path and can be ADDed to Docker images. Since context archive generation is deterministic and the hash of the resulting archive is used to determine when images need to be rebuilt, any extra included file that changes will change the hash of the context archive and force image regeneration. Basic tests for the new feature have been added. MozReview-Commit-ID: 4hPZesJuGQV --HG-- extra : rebase_source : 99fae9fe82102126fbee879c134981047bb4a601 --- taskcluster/docs/docker-images.rst | 42 +++++++ taskcluster/docs/index.rst | 1 + .../taskgraph/test/test_util_docker.py | 109 ++++++++++++++++++ taskcluster/taskgraph/util/docker.py | 38 ++++++ 4 files changed, 190 insertions(+) create mode 100644 taskcluster/docs/docker-images.rst diff --git a/taskcluster/docs/docker-images.rst b/taskcluster/docs/docker-images.rst new file mode 100644 index 000000000000..22dea4dead06 --- /dev/null +++ b/taskcluster/docs/docker-images.rst @@ -0,0 +1,42 @@ +.. taskcluster_dockerimages: + +============= +Docker Images +============= + +TaskCluster Docker images are defined in the source directory under +``testing/docker``. Each directory therein contains the name of an +image used as part of the task graph. + +Adding Extra Files to Images +============================ + +Dockerfile syntax has been extended to allow *any* file from the +source checkout to be added to the image build *context*. (Traditionally +you can only ``ADD`` files from the same directory as the Dockerfile.) + +Simply add the following syntax as a comment in a Dockerfile:: + + # %include + +e.g. + + # %include mach + # %include testing/mozharness + +The argument to ``# %include`` is a relative path from the root level of +the source directory. It can be a file or a directory. If a file, only that +file will be added. If a directory, every file under that directory will be +added (even files that are untracked or ignored by version control). + +Files added using ``# %include`` syntax are available inside the build +context under the ``topsrcdir/`` path. + +Files are added as they exist on disk. e.g. executable flags should be +preserved. However, the file owner/group is changed to ``root`` and the +``mtime`` of the file is normalized. + +Here is an example Dockerfile snippet:: + + # %include mach + ADD topsrcdir/mach /home/worker/mach diff --git a/taskcluster/docs/index.rst b/taskcluster/docs/index.rst index fcc9406b6a86..6602bbab370c 100644 --- a/taskcluster/docs/index.rst +++ b/taskcluster/docs/index.rst @@ -28,3 +28,4 @@ check out the :doc:`how-to section `. transforms yaml-templates how-tos + docker-images diff --git a/taskcluster/taskgraph/test/test_util_docker.py b/taskcluster/taskgraph/test/test_util_docker.py index d0dd715cd54c..5b703a885d01 100644 --- a/taskcluster/taskgraph/test/test_util_docker.py +++ b/taskcluster/taskgraph/test/test_util_docker.py @@ -77,3 +77,112 @@ class TestDocker(unittest.TestCase): ]) finally: shutil.rmtree(tmp) + + def test_create_context_topsrcdir_files(self): + tmp = tempfile.mkdtemp() + try: + d = os.path.join(tmp, 'test-image') + os.mkdir(d) + with open(os.path.join(d, 'Dockerfile'), 'wb') as fh: + fh.write(b'# %include extra/file0\n') + os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD) + + extra = os.path.join(tmp, 'extra') + os.mkdir(extra) + with open(os.path.join(extra, 'file0'), 'a'): + pass + os.chmod(os.path.join(extra, 'file0'), MODE_STANDARD) + + tp = os.path.join(tmp, 'tar') + h = docker.create_context_tar(tmp, d, tp, 'test_image') + self.assertEqual(h, '20faeb7c134f21187b142b5fadba94ae58865dc929c6c293d8cbc0a087269338') + + with tarfile.open(tp, 'r:gz') as tf: + self.assertEqual(tf.getnames(), [ + 'test_image/Dockerfile', + 'test_image/topsrcdir/extra/file0', + ]) + finally: + shutil.rmtree(tmp) + + def test_create_context_absolute_path(self): + tmp = tempfile.mkdtemp() + try: + d = os.path.join(tmp, 'test-image') + os.mkdir(d) + + # Absolute paths in %include syntax are not allowed. + with open(os.path.join(d, 'Dockerfile'), 'wb') as fh: + fh.write(b'# %include /etc/shadow\n') + + with self.assertRaisesRegexp(Exception, 'cannot be absolute'): + docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test') + finally: + shutil.rmtree(tmp) + + def test_create_context_outside_topsrcdir(self): + tmp = tempfile.mkdtemp() + try: + d = os.path.join(tmp, 'test-image') + os.mkdir(d) + + with open(os.path.join(d, 'Dockerfile'), 'wb') as fh: + fh.write(b'# %include foo/../../../etc/shadow\n') + + with self.assertRaisesRegexp(Exception, 'path outside topsrcdir'): + docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test') + finally: + shutil.rmtree(tmp) + + def test_create_context_missing_extra(self): + tmp = tempfile.mkdtemp() + try: + d = os.path.join(tmp, 'test-image') + os.mkdir(d) + + with open(os.path.join(d, 'Dockerfile'), 'wb') as fh: + fh.write(b'# %include does/not/exist\n') + + with self.assertRaisesRegexp(Exception, 'path does not exist'): + docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test') + finally: + shutil.rmtree(tmp) + + def test_create_context_extra_directory(self): + tmp = tempfile.mkdtemp() + try: + d = os.path.join(tmp, 'test-image') + os.mkdir(d) + + with open(os.path.join(d, 'Dockerfile'), 'wb') as fh: + fh.write(b'# %include extra\n') + fh.write(b'# %include file0\n') + os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD) + + extra = os.path.join(tmp, 'extra') + os.mkdir(extra) + for i in range(3): + p = os.path.join(extra, 'file%d' % i) + with open(p, 'wb') as fh: + fh.write(b'file%d' % i) + os.chmod(p, MODE_STANDARD) + + with open(os.path.join(tmp, 'file0'), 'a'): + pass + os.chmod(os.path.join(tmp, 'file0'), MODE_STANDARD) + + tp = os.path.join(tmp, 'tar') + h = docker.create_context_tar(tmp, d, tp, 'my_image') + + self.assertEqual(h, 'e5440513ab46ae4c1d056269e1c6715d5da7d4bd673719d360411e35e5b87205') + + with tarfile.open(tp, 'r:gz') as tf: + self.assertEqual(tf.getnames(), [ + 'my_image/Dockerfile', + 'my_image/topsrcdir/extra/file0', + 'my_image/topsrcdir/extra/file1', + 'my_image/topsrcdir/extra/file2', + 'my_image/topsrcdir/file0', + ]) + finally: + shutil.rmtree(tmp) diff --git a/taskcluster/taskgraph/util/docker.py b/taskcluster/taskgraph/util/docker.py index 76c3d26a6a7c..cfe9782fe59d 100644 --- a/taskcluster/taskgraph/util/docker.py +++ b/taskcluster/taskgraph/util/docker.py @@ -54,6 +54,15 @@ def create_context_tar(topsrcdir, context_dir, out_path, prefix): a gzipped tar file at ``out_path``. Files inside the archive will be prefixed by directory ``prefix``. + We also scan the source Dockerfile for special syntax that influences + context generation. + + If a line in the Dockerfile has the form ``# %include ``, + the relative path specified on that line will be matched against + files in the source repository and added to the context under the + path ``topsrcdir/``. If an entry is a directory, we add all files + under that directory. + Returns the SHA-256 hex digest of the created archive. """ archive_files = {} @@ -65,6 +74,35 @@ def create_context_tar(topsrcdir, context_dir, out_path, prefix): archive_path = os.path.join(prefix, rel) archive_files[archive_path] = source_path + # Parse Dockerfile for special syntax of extra files to include. + with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh: + for line in fh: + line = line.rstrip() + if not line.startswith('# %include'): + continue + + p = line[len('# %include '):].strip() + if os.path.isabs(p): + raise Exception('extra include path cannot be absolute: %s' % p) + + fs_path = os.path.normpath(os.path.join(topsrcdir, p)) + # Check for filesystem traversal exploits. + if not fs_path.startswith(topsrcdir): + raise Exception('extra include path outside topsrcdir: %s' % p) + + if not os.path.exists(fs_path): + raise Exception('extra include path does not exist: %s' % p) + + if os.path.isdir(fs_path): + for root, dirs, files in os.walk(fs_path): + for f in files: + source_path = os.path.join(root, f) + archive_path = os.path.join(prefix, 'topsrcdir', p, f) + archive_files[archive_path] = source_path + else: + archive_path = os.path.join(prefix, 'topsrcdir', p) + archive_files[archive_path] = fs_path + with open(out_path, 'wb') as fh: create_tar_gz_from_files(fh, archive_files, '%s.tar.gz' % prefix) From 260d3ee7ed0404199a34f86cd9ae4f9c37280e85 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Mon, 25 Jul 2016 11:48:20 -0700 Subject: [PATCH 20/63] Bug 1288567 - Inline create_context_tar; r=dustin The function was only used once and was providing little to no value. A test of this function has been removed. Tests for the lower-level context creation function are sufficient. MozReview-Commit-ID: D9EhmZQlqG5 --HG-- extra : rebase_source : 4b3faa0fc5f085c1c77fe5636744946a6d442b05 --- taskcluster/taskgraph/task/docker_image.py | 21 +++++++------------ .../taskgraph/test/test_task_docker_image.py | 7 ------- 2 files changed, 7 insertions(+), 21 deletions(-) diff --git a/taskcluster/taskgraph/task/docker_image.py b/taskcluster/taskgraph/task/docker_image.py index 5ef31811976e..95fcca5d79c2 100644 --- a/taskcluster/taskgraph/task/docker_image.py +++ b/taskcluster/taskgraph/task/docker_image.py @@ -78,8 +78,13 @@ class DockerImageTask(base.Task): "artifacts/decision_task/image_contexts/{}/context.tar.gz".format(image_name)) image_parameters['context_url'] = ARTIFACT_URL.format( os.environ['TASK_ID'], image_artifact_path) - context_hash = cls.create_context_tar(context_path, destination, - image_name) + + destination = os.path.abspath(destination) + if not os.path.exists(os.path.dirname(destination)): + os.makedirs(os.path.dirname(destination)) + + context_hash = create_context_tar(GECKO, context_path, + destination, image_name) else: # skip context generation since this isn't a decision task # TODO: generate context tarballs using subdirectory clones in @@ -133,18 +138,6 @@ class DockerImageTask(base.Task): return False, None - @classmethod - def create_context_tar(cls, context_dir, destination, image_name): - """Creates a tar file of a particular context directory. - - Returns the SHA-256 hex digest of the created file. - """ - destination = os.path.abspath(destination) - if not os.path.exists(os.path.dirname(destination)): - os.makedirs(os.path.dirname(destination)) - - return create_context_tar(GECKO, context_dir, destination, image_name) - @classmethod def from_json(cls, task_dict): # Generating index_paths for optimization diff --git a/taskcluster/taskgraph/test/test_task_docker_image.py b/taskcluster/taskgraph/test/test_task_docker_image.py index 337681aea919..8f247db3e4ee 100644 --- a/taskcluster/taskgraph/test/test_task_docker_image.py +++ b/taskcluster/taskgraph/test/test_task_docker_image.py @@ -5,7 +5,6 @@ from __future__ import absolute_import, print_function, unicode_literals import unittest -import tempfile import os from ..task import docker_image @@ -31,12 +30,6 @@ class TestDockerImageKind(unittest.TestCase): # TODO: optimize_task - def test_create_context_tar(self): - image_dir = os.path.join(docker_image.GECKO, 'testing', 'docker', 'image_builder') - tarball = tempfile.mkstemp()[1] - self.task.create_context_tar(image_dir, tarball, 'image_builder') - self.failUnless(os.path.exists(tarball)) - os.unlink(tarball) if __name__ == '__main__': main() From 76e569b32536a3ca7c2ad00383097e83511690fe Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Thu, 21 Jul 2016 10:06:38 -0700 Subject: [PATCH 21/63] Bug 1247168 - Create worker user/group in decision image; r=dustin In preparation for running tasks as the worker user. MozReview-Commit-ID: DLgD0lh5V2C --HG-- extra : rebase_source : 1508517f9fbc986ada96cbe4ee77847ad6e1afcc extra : histedit_source : 4b2957c47fcab8704416748613e7ff5badc61897 --- testing/docker/decision/Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/testing/docker/decision/Dockerfile b/testing/docker/decision/Dockerfile index 3471f4d285a3..60c967c1238f 100644 --- a/testing/docker/decision/Dockerfile +++ b/testing/docker/decision/Dockerfile @@ -1,6 +1,10 @@ FROM ubuntu:14.04 MAINTAINER Greg Arndt +# Add worker user +RUN useradd -d /home/worker -s /bin/bash -m worker +RUN mkdir /home/worker/artifacts && chown worker:worker /home/worker/artifacts + # install non-build specific dependencies in a single layer ADD system-setup.sh /tmp/system-setup.sh RUN bash /tmp/system-setup.sh From 03efbd9159987bc9ff5b2a58895cb3ec8a745b99 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Tue, 19 Jul 2016 14:10:14 -0700 Subject: [PATCH 22/63] Bug 1247168 - Don't remove hg-shared scopes; r=dustin These scopes will be introduced in subsequent commits. We don't want them to get removed, just like we don't want the tc-vcs caches to get removed. MozReview-Commit-ID: 790SzpAUxE4 --HG-- extra : rebase_source : d8148aa38bad09468f67ded1a64f432e2e30c63a extra : source : 923b86a15d91cb22999a7c94a4a708845d23aa9d extra : histedit_source : 6d85127fdcc2e795a54eeb836ce11f158e5e88cd --- taskcluster/taskgraph/task/legacy.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/taskcluster/taskgraph/task/legacy.py b/taskcluster/taskgraph/task/legacy.py index 23bc612796f9..cca8fc6eeb01 100644 --- a/taskcluster/taskgraph/task/legacy.py +++ b/taskcluster/taskgraph/task/legacy.py @@ -145,12 +145,13 @@ def set_interactive_task(task, interactive): def remove_caches_from_task(task): - r"""Remove all caches but tc-vcs from the task. + r"""Remove all caches but vcs from the task. :param task: task definition. """ whitelist = [ re.compile("^level-[123]-.*-tc-vcs(-public-sources)?$"), + re.compile("^level-[123]-hg-shared"), re.compile("^tooltool-cache$"), ] try: From 3006858aaf52423c77b8cd758d3b62fec98ed589 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Wed, 20 Jul 2016 09:21:38 -0700 Subject: [PATCH 23/63] Bug 1247168 - Change decision image to Ubuntu 16.04; r=dustin We're currently running Ubuntu 14.04 in the decision image. While it is still in LTS support, 16.04 ships with a modern, properly configured Python 2.7. So we upgrade to 16.04 and drop the install of Python from source because it is no longer needed. This is part 1 of a larger refactor to this image. MozReview-Commit-ID: CTbsPmTjcgs --HG-- extra : rebase_source : eca12e98c8ff63cb302ea580da9296bd4cf31a4f extra : histedit_source : 1a40405a9360239bf95d368c43ccfd0681609500 --- testing/docker/decision/Dockerfile | 4 ++-- testing/docker/decision/system-setup.sh | 22 +--------------------- 2 files changed, 3 insertions(+), 23 deletions(-) diff --git a/testing/docker/decision/Dockerfile b/testing/docker/decision/Dockerfile index 60c967c1238f..d4eca10be372 100644 --- a/testing/docker/decision/Dockerfile +++ b/testing/docker/decision/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:14.04 +FROM ubuntu:16.04 MAINTAINER Greg Arndt # Add worker user @@ -12,7 +12,7 @@ RUN bash /tmp/system-setup.sh ADD bin /home/worker/bin RUN chmod +x /home/worker/bin/* -ENV PATH /home/worker/bin:/usr/local/lib/python2.7.10/bin:$PATH +ENV PATH /home/worker/bin:$PATH ENV SHELL /bin/bash ENV HOME /home/worker diff --git a/testing/docker/decision/system-setup.sh b/testing/docker/decision/system-setup.sh index 818c481ee2cf..c97d59c0b88a 100644 --- a/testing/docker/decision/system-setup.sh +++ b/testing/docker/decision/system-setup.sh @@ -32,6 +32,7 @@ apt-get install -y --force-yes --no-install-recommends \ mime-support \ netbase \ net-tools \ + python2.7 \ python-dev \ python-pip \ python-crypto \ @@ -55,27 +56,6 @@ tooltool_fetch() { curl https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py > ${BUILD}/tooltool.py -cd $BUILD -tooltool_fetch <<'EOF' -[ -{ - "size": 12250696, - "digest": "67615a6defbcda062f15a09f9dd3b9441afd01a8cc3255e5bc45b925378a0ddc38d468b7701176f6cc153ec52a4f21671b433780d9bde343aa9b9c1b2ae29feb", - "algorithm": "sha512", - "filename": "Python-2.7.10.tar.xz", - "unpack": true -} -] -EOF - -cd Python-2.7.10 -./configure --prefix /usr/local/lib/python2.7.10 -make -j$(nproc) -make install - -PATH=/usr/local/lib/python2.7.10/bin/:$PATH -python --version - # Enough python utilities to get "peep" working cd $BUILD tooltool_fetch <<'EOF' From 6780646c93b2424f1e8c00413604cecaf642a4b8 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Wed, 20 Jul 2016 09:31:48 -0700 Subject: [PATCH 24/63] Bug 1247168 - Don't install virtualenv in the decision image; r=dustin AFAICT this isn't needed. MozReview-Commit-ID: 9eG7a4yHFPw --HG-- extra : rebase_source : 023104eef3405bc749384f58cf6310051de05dc0 extra : histedit_source : cc2872f9cba231306efc3e5857f29def48c167c0 --- testing/docker/decision/system-setup.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/testing/docker/decision/system-setup.sh b/testing/docker/decision/system-setup.sh index c97d59c0b88a..7c23bc70237f 100644 --- a/testing/docker/decision/system-setup.sh +++ b/testing/docker/decision/system-setup.sh @@ -104,9 +104,6 @@ pip install peep # remaining Python utilities are installed with `peep` from upstream # repositories; peep verifies file integrity for us cat >requirements.txt <<'EOF' -# sha256: 90pZQ6kAXB6Je8-H9-ivfgDAb6l3e5rWkfafn6VKh9g -virtualenv==13.1.2 - # sha256: wJnELXTi1SC2HdNyzZlrD6dgXAZheDT9exPHm5qaWzA mercurial==3.7.3 EOF From e403ff5d178c47be5adba5bbba9c28385291b0a4 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Wed, 20 Jul 2016 09:43:01 -0700 Subject: [PATCH 25/63] Bug 1247168 - Install Mercurial 3.8.4 from .deb files; r=dustin We change the installation of Mercurial from via peep to .deb files in tooltool. The .deb files were produced by Mercurial's built-in make targets to produce .deb packages. As part of this, we upgrade to Mercurial 3.8.4. It should be a drop-in replacement. Since we no longer use peep, we stop installing it and pip/setuptools since they were only needed to run peep. It's worth noting that we choose to install from .deb files instead of pip because this keeps image creation small and simple. Otherwise we'd have to install a compiler, etc. MozReview-Commit-ID: INnKDHkX2uk --HG-- extra : rebase_source : 0c6f30ff193dba5fbb5d90603e00f8be02816f9d extra : histedit_source : 2afd18a694447bd133c26b7ccd562cdf7453b674 --- testing/docker/decision/system-setup.sh | 50 +++++-------------------- 1 file changed, 9 insertions(+), 41 deletions(-) diff --git a/testing/docker/decision/system-setup.sh b/testing/docker/decision/system-setup.sh index 7c23bc70237f..6cc1f6a8f136 100644 --- a/testing/docker/decision/system-setup.sh +++ b/testing/docker/decision/system-setup.sh @@ -56,58 +56,26 @@ tooltool_fetch() { curl https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py > ${BUILD}/tooltool.py -# Enough python utilities to get "peep" working +# Install Mercurial from custom debs since distro packages tend to lag behind. cd $BUILD -tooltool_fetch <<'EOF' +tooltool_fetch <=7.0 -# https://github.com/erikrose/peep/pull/94 - -cd $BUILD -cd pip-6.1.1 -python setup.py install - -cd $BUILD -pip install peep-2.4.1.tar.gz - -# Peep (latest) -cd $BUILD -pip install peep - -# remaining Python utilities are installed with `peep` from upstream -# repositories; peep verifies file integrity for us -cat >requirements.txt <<'EOF' -# sha256: wJnELXTi1SC2HdNyzZlrD6dgXAZheDT9exPHm5qaWzA -mercurial==3.7.3 -EOF -peep install -r requirements.txt +dpkg -i mercurial-common_3.8.4_all.deb mercurial_3.8.4_amd64.deb # Install node tooltool_fetch <<'EOF' From 2042b087831be7b979e0bf132f79ac1b11a59077 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Wed, 20 Jul 2016 11:59:22 -0700 Subject: [PATCH 26/63] Bug 1247168 - Stop installing most system packages; r=dustin I'm not sure why the decision image has so many packages installed. Most of them don't need to exist because the decision image only needs to obtain a copy of the Firefox repo and run `mach`. This doesn't require any build system per se. And all the Python dependencies are vendored in the Firefox repo. All we need is a Python 2.7 interpreter. This change reduces the decision image size from ~700 MB to ~300 MB. MozReview-Commit-ID: CUqc5TUVZSc --HG-- extra : rebase_source : 5a2b3888b4c54c29bc8c8b9215ce36a4340574e5 extra : histedit_source : 61e70b06b703c3262ae1bc2f527f1919a3f450ec --- testing/docker/decision/system-setup.sh | 40 ++----------------------- 1 file changed, 3 insertions(+), 37 deletions(-) diff --git a/testing/docker/decision/system-setup.sh b/testing/docker/decision/system-setup.sh index 6cc1f6a8f136..303289cda473 100644 --- a/testing/docker/decision/system-setup.sh +++ b/testing/docker/decision/system-setup.sh @@ -6,51 +6,17 @@ test `whoami` == 'root' apt-get update apt-get install -y --force-yes --no-install-recommends \ - autotools-dev \ - blt-dev \ - bzip2 \ - curl \ ca-certificates \ - dpkg-dev \ - gcc-multilib \ - g++-multilib \ + curl \ jq \ - libbluetooth-dev \ - libbz2-dev \ - libexpat1-dev \ - libffi-dev \ - libffi6 \ - libffi6-dbg \ - libgdbm-dev \ - libgpm2 \ - libncursesw5-dev \ - libreadline-dev \ - libsqlite3-dev \ - libssl-dev \ - libtinfo-dev \ - make \ - mime-support \ - netbase \ - net-tools \ - python2.7 \ - python-dev \ - python-pip \ - python-crypto \ - python-mox3 \ - python-pil \ - python-ply \ - quilt \ - tar \ - tk-dev \ - xz-utils \ - zlib1g-dev + python BUILD=/root/build mkdir $BUILD tooltool_fetch() { cat >manifest.tt - python $BUILD/tooltool.py fetch + python2.7 $BUILD/tooltool.py fetch rm manifest.tt } From 91d48d1847a3d9db93b2fecb63c23c37c8bf1860 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Wed, 20 Jul 2016 10:03:17 -0700 Subject: [PATCH 27/63] Bug 1247168 - Vendor tooltool.py; r=dustin Previously, we were downloading tooltool.py from github.com. There were a few problems with this. First, there is a dependency on a 3rd party service. While the Docker image should be cached, as a matter of principle we don't like hitting 3rd party services in our automation. The file is small enough, so we just vendor it. Second - and more importantly - we weren't validating the integrity of the downloaded file. This means that a MiTM could possibly alter the content of the file without us knowing (they would need a valid CA but since the Ubuntu trusted CA bundle contains a lot of CAs from e.g. governments, this isn't out of the question). Vendoring the file removes this risk. Third, behavior wasn't deterministic over time. We were always downloading the "master" revision of the file. I like determinism over time. Vendoring makes things deterministic. MozReview-Commit-ID: 4DdSd42BnAu --HG-- extra : rebase_source : cf73d2741fc186bebf06233efefdf85cd8cea3f2 extra : histedit_source : 76c7d81266a72010a9969ea32ac13c7bce2a0601 --- testing/docker/decision/Dockerfile | 3 +- testing/docker/decision/system-setup.sh | 4 +- testing/docker/decision/tooltool.py | 1022 +++++++++++++++++++++++ 3 files changed, 1024 insertions(+), 5 deletions(-) create mode 100755 testing/docker/decision/tooltool.py diff --git a/testing/docker/decision/Dockerfile b/testing/docker/decision/Dockerfile index d4eca10be372..49fb3ea38703 100644 --- a/testing/docker/decision/Dockerfile +++ b/testing/docker/decision/Dockerfile @@ -5,8 +5,7 @@ MAINTAINER Greg Arndt RUN useradd -d /home/worker -s /bin/bash -m worker RUN mkdir /home/worker/artifacts && chown worker:worker /home/worker/artifacts -# install non-build specific dependencies in a single layer -ADD system-setup.sh /tmp/system-setup.sh +ADD system-setup.sh tooltool.py /tmp/ RUN bash /tmp/system-setup.sh ADD bin /home/worker/bin diff --git a/testing/docker/decision/system-setup.sh b/testing/docker/decision/system-setup.sh index 303289cda473..fb96770be30c 100644 --- a/testing/docker/decision/system-setup.sh +++ b/testing/docker/decision/system-setup.sh @@ -16,12 +16,10 @@ mkdir $BUILD tooltool_fetch() { cat >manifest.tt - python2.7 $BUILD/tooltool.py fetch + python2.7 /tmp/tooltool.py fetch rm manifest.tt } -curl https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py > ${BUILD}/tooltool.py - # Install Mercurial from custom debs since distro packages tend to lag behind. cd $BUILD tooltool_fetch < +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation version 2 +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA +# 02110-1301, USA. + +# A manifest file specifies files in that directory that are stored +# elsewhere. This file should only list files in the same directory +# in which the manifest file resides and it should be called +# 'manifest.tt' + +import hashlib +import httplib +import json +import logging +import optparse +import os +import shutil +import sys +import tarfile +import tempfile +import threading +import time +import urllib2 +import urlparse +import zipfile + +from subprocess import PIPE +from subprocess import Popen + +__version__ = '1' + +DEFAULT_MANIFEST_NAME = 'manifest.tt' +TOOLTOOL_PACKAGE_SUFFIX = '.TOOLTOOL-PACKAGE' + + +log = logging.getLogger(__name__) + + +class FileRecordJSONEncoderException(Exception): + pass + + +class InvalidManifest(Exception): + pass + + +class ExceptionWithFilename(Exception): + + def __init__(self, filename): + Exception.__init__(self) + self.filename = filename + + +class BadFilenameException(ExceptionWithFilename): + pass + + +class DigestMismatchException(ExceptionWithFilename): + pass + + +class MissingFileException(ExceptionWithFilename): + pass + + +class FileRecord(object): + + def __init__(self, filename, size, digest, algorithm, unpack=False, + visibility=None, setup=None): + object.__init__(self) + if '/' in filename or '\\' in filename: + log.error( + "The filename provided contains path information and is, therefore, invalid.") + raise BadFilenameException(filename=filename) + self.filename = filename + self.size = size + self.digest = digest + self.algorithm = algorithm + self.unpack = unpack + self.visibility = visibility + self.setup = setup + + def __eq__(self, other): + if self is other: + return True + if self.filename == other.filename and \ + self.size == other.size and \ + self.digest == other.digest and \ + self.algorithm == other.algorithm and \ + self.visibility == other.visibility: + return True + else: + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __str__(self): + return repr(self) + + def __repr__(self): + return "%s.%s(filename='%s', size=%s, digest='%s', algorithm='%s', visibility=%r)" % ( + __name__, self.__class__.__name__, self.filename, self.size, + self.digest, self.algorithm, self.visibility) + + def present(self): + # Doesn't check validity + return os.path.exists(self.filename) + + def validate_size(self): + if self.present(): + return self.size == os.path.getsize(self.filename) + else: + log.debug( + "trying to validate size on a missing file, %s", self.filename) + raise MissingFileException(filename=self.filename) + + def validate_digest(self): + if self.present(): + with open(self.filename, 'rb') as f: + return self.digest == digest_file(f, self.algorithm) + else: + log.debug( + "trying to validate digest on a missing file, %s', self.filename") + raise MissingFileException(filename=self.filename) + + def validate(self): + if self.validate_size(): + if self.validate_digest(): + return True + return False + + def describe(self): + if self.present() and self.validate(): + return "'%s' is present and valid" % self.filename + elif self.present(): + return "'%s' is present and invalid" % self.filename + else: + return "'%s' is absent" % self.filename + + +def create_file_record(filename, algorithm): + fo = open(filename, 'rb') + stored_filename = os.path.split(filename)[1] + fr = FileRecord(stored_filename, os.path.getsize( + filename), digest_file(fo, algorithm), algorithm) + fo.close() + return fr + + +class FileRecordJSONEncoder(json.JSONEncoder): + + def encode_file_record(self, obj): + if not issubclass(type(obj), FileRecord): + err = "FileRecordJSONEncoder is only for FileRecord and lists of FileRecords, " \ + "not %s" % obj.__class__.__name__ + log.warn(err) + raise FileRecordJSONEncoderException(err) + else: + rv = { + 'filename': obj.filename, + 'size': obj.size, + 'algorithm': obj.algorithm, + 'digest': obj.digest, + } + if obj.unpack: + rv['unpack'] = True + if obj.visibility is not None: + rv['visibility'] = obj.visibility + if obj.setup: + rv['setup'] = obj.setup + return rv + + def default(self, f): + if issubclass(type(f), list): + record_list = [] + for i in f: + record_list.append(self.encode_file_record(i)) + return record_list + else: + return self.encode_file_record(f) + + +class FileRecordJSONDecoder(json.JSONDecoder): + + """I help the json module materialize a FileRecord from + a JSON file. I understand FileRecords and lists of + FileRecords. I ignore things that I don't expect for now""" + # TODO: make this more explicit in what it's looking for + # and error out on unexpected things + + def process_file_records(self, obj): + if isinstance(obj, list): + record_list = [] + for i in obj: + record = self.process_file_records(i) + if issubclass(type(record), FileRecord): + record_list.append(record) + return record_list + required_fields = [ + 'filename', + 'size', + 'algorithm', + 'digest', + ] + if isinstance(obj, dict): + missing = False + for req in required_fields: + if req not in obj: + missing = True + break + + if not missing: + unpack = obj.get('unpack', False) + visibility = obj.get('visibility', None) + setup = obj.get('setup') + rv = FileRecord( + obj['filename'], obj['size'], obj['digest'], obj['algorithm'], + unpack, visibility, setup) + log.debug("materialized %s" % rv) + return rv + return obj + + def decode(self, s): + decoded = json.JSONDecoder.decode(self, s) + rv = self.process_file_records(decoded) + return rv + + +class Manifest(object): + + valid_formats = ('json',) + + def __init__(self, file_records=None): + self.file_records = file_records or [] + + def __eq__(self, other): + if self is other: + return True + if len(self.file_records) != len(other.file_records): + log.debug('Manifests differ in number of files') + return False + # sort the file records by filename before comparing + mine = sorted((fr.filename, fr) for fr in self.file_records) + theirs = sorted((fr.filename, fr) for fr in other.file_records) + return mine == theirs + + def __ne__(self, other): + return not self.__eq__(other) + + def __deepcopy__(self, memo): + # This is required for a deep copy + return Manifest(self.file_records[:]) + + def __copy__(self): + return Manifest(self.file_records) + + def copy(self): + return Manifest(self.file_records[:]) + + def present(self): + return all(i.present() for i in self.file_records) + + def validate_sizes(self): + return all(i.validate_size() for i in self.file_records) + + def validate_digests(self): + return all(i.validate_digest() for i in self.file_records) + + def validate(self): + return all(i.validate() for i in self.file_records) + + def load(self, data_file, fmt='json'): + assert fmt in self.valid_formats + if fmt == 'json': + try: + self.file_records.extend( + json.load(data_file, cls=FileRecordJSONDecoder)) + except ValueError: + raise InvalidManifest("trying to read invalid manifest file") + + def loads(self, data_string, fmt='json'): + assert fmt in self.valid_formats + if fmt == 'json': + try: + self.file_records.extend( + json.loads(data_string, cls=FileRecordJSONDecoder)) + except ValueError: + raise InvalidManifest("trying to read invalid manifest file") + + def dump(self, output_file, fmt='json'): + assert fmt in self.valid_formats + if fmt == 'json': + rv = json.dump( + self.file_records, output_file, indent=0, cls=FileRecordJSONEncoder, + separators=(',', ': ')) + print >> output_file, '' + return rv + + def dumps(self, fmt='json'): + assert fmt in self.valid_formats + if fmt == 'json': + return json.dumps(self.file_records, cls=FileRecordJSONEncoder) + + +def digest_file(f, a): + """I take a file like object 'f' and return a hex-string containing + of the result of the algorithm 'a' applied to 'f'.""" + h = hashlib.new(a) + chunk_size = 1024 * 10 + data = f.read(chunk_size) + while data: + h.update(data) + data = f.read(chunk_size) + name = repr(f.name) if hasattr(f, 'name') else 'a file' + log.debug('hashed %s with %s to be %s', name, a, h.hexdigest()) + return h.hexdigest() + + +def execute(cmd): + """Execute CMD, logging its stdout at the info level""" + process = Popen(cmd, shell=True, stdout=PIPE) + while True: + line = process.stdout.readline() + if not line: + break + log.info(line.replace('\n', ' ')) + return process.wait() == 0 + + +def open_manifest(manifest_file): + """I know how to take a filename and load it into a Manifest object""" + if os.path.exists(manifest_file): + manifest = Manifest() + with open(manifest_file) as f: + manifest.load(f) + log.debug("loaded manifest from file '%s'" % manifest_file) + return manifest + else: + log.debug("tried to load absent file '%s' as manifest" % manifest_file) + raise InvalidManifest( + "manifest file '%s' does not exist" % manifest_file) + + +def list_manifest(manifest_file): + """I know how print all the files in a location""" + try: + manifest = open_manifest(manifest_file) + except InvalidManifest as e: + log.error("failed to load manifest file at '%s': %s" % ( + manifest_file, + str(e), + )) + return False + for f in manifest.file_records: + print "%s\t%s\t%s" % ("P" if f.present() else "-", + "V" if f.present() and f.validate() else "-", + f.filename) + return True + + +def validate_manifest(manifest_file): + """I validate that all files in a manifest are present and valid but + don't fetch or delete them if they aren't""" + try: + manifest = open_manifest(manifest_file) + except InvalidManifest as e: + log.error("failed to load manifest file at '%s': %s" % ( + manifest_file, + str(e), + )) + return False + invalid_files = [] + absent_files = [] + for f in manifest.file_records: + if not f.present(): + absent_files.append(f) + else: + if not f.validate(): + invalid_files.append(f) + if len(invalid_files + absent_files) == 0: + return True + else: + return False + + +def add_files(manifest_file, algorithm, filenames, visibility, unpack): + # returns True if all files successfully added, False if not + # and doesn't catch library Exceptions. If any files are already + # tracked in the manifest, return will be False because they weren't + # added + all_files_added = True + # Create a old_manifest object to add to + if os.path.exists(manifest_file): + old_manifest = open_manifest(manifest_file) + else: + old_manifest = Manifest() + log.debug("creating a new manifest file") + new_manifest = Manifest() # use a different manifest for the output + for filename in filenames: + log.debug("adding %s" % filename) + path, name = os.path.split(filename) + new_fr = create_file_record(filename, algorithm) + new_fr.visibility = visibility + new_fr.unpack = unpack + log.debug("appending a new file record to manifest file") + add = True + for fr in old_manifest.file_records: + log.debug("manifest file has '%s'" % "', ".join( + [x.filename for x in old_manifest.file_records])) + if new_fr == fr: + log.info("file already in old_manifest") + add = False + elif filename == fr.filename: + log.error("manifest already contains a different file named %s" % filename) + add = False + if add: + new_manifest.file_records.append(new_fr) + log.debug("added '%s' to manifest" % filename) + else: + all_files_added = False + # copy any files in the old manifest that aren't in the new one + new_filenames = set(fr.filename for fr in new_manifest.file_records) + for old_fr in old_manifest.file_records: + if old_fr.filename not in new_filenames: + new_manifest.file_records.append(old_fr) + with open(manifest_file, 'wb') as output: + new_manifest.dump(output, fmt='json') + return all_files_added + + +def touch(f): + """Used to modify mtime in cached files; + mtime is used by the purge command""" + try: + os.utime(f, None) + except OSError: + log.warn('impossible to update utime of file %s' % f) + + +def fetch_file(base_urls, file_record, grabchunk=1024 * 4, auth_file=None, region=None): + # A file which is requested to be fetched that exists locally will be + # overwritten by this function + fd, temp_path = tempfile.mkstemp(dir=os.getcwd()) + os.close(fd) + fetched_path = None + for base_url in base_urls: + # Generate the URL for the file on the server side + url = urlparse.urljoin(base_url, + '%s/%s' % (file_record.algorithm, file_record.digest)) + if region is not None: + url += '?region=' + region + + log.info("Attempting to fetch from '%s'..." % base_url) + + # Well, the file doesn't exist locally. Let's fetch it. + try: + req = urllib2.Request(url) + _authorize(req, auth_file) + f = urllib2.urlopen(req) + log.debug("opened %s for reading" % url) + with open(temp_path, 'wb') as out: + k = True + size = 0 + while k: + # TODO: print statistics as file transfers happen both for info and to stop + # buildbot timeouts + indata = f.read(grabchunk) + out.write(indata) + size += len(indata) + if indata == '': + k = False + log.info("File %s fetched from %s as %s" % + (file_record.filename, base_url, temp_path)) + fetched_path = temp_path + break + except (urllib2.URLError, urllib2.HTTPError, ValueError) as e: + log.info("...failed to fetch '%s' from %s" % + (file_record.filename, base_url)) + log.debug("%s" % e) + except IOError: # pragma: no cover + log.info("failed to write to temporary file for '%s'" % + file_record.filename, exc_info=True) + + # cleanup temp file in case of issues + if fetched_path: + return os.path.split(fetched_path)[1] + else: + try: + os.remove(temp_path) + except OSError: # pragma: no cover + pass + return None + + +def clean_path(dirname): + """Remove a subtree if is exists. Helper for unpack_file().""" + if os.path.exists(dirname): + log.info('rm tree: %s' % dirname) + shutil.rmtree(dirname) + + +def unpack_file(filename, setup=None): + """Untar `filename`, assuming it is uncompressed or compressed with bzip2, + xz, gzip, or unzip a zip file. The file is assumed to contain a single + directory with a name matching the base of the given filename. + Xz support is handled by shelling out to 'tar'.""" + if tarfile.is_tarfile(filename): + tar_file, zip_ext = os.path.splitext(filename) + base_file, tar_ext = os.path.splitext(tar_file) + clean_path(base_file) + log.info('untarring "%s"' % filename) + tar = tarfile.open(filename) + tar.extractall() + tar.close() + elif filename.endswith('.tar.xz'): + base_file = filename.replace('.tar.xz', '') + clean_path(base_file) + log.info('untarring "%s"' % filename) + if not execute('tar -Jxf %s 2>&1' % filename): + return False + elif zipfile.is_zipfile(filename): + base_file = filename.replace('.zip', '') + clean_path(base_file) + log.info('unzipping "%s"' % filename) + z = zipfile.ZipFile(filename) + z.extractall() + z.close() + else: + log.error("Unknown archive extension for filename '%s'" % filename) + return False + + if setup and not execute(os.path.join(base_file, setup)): + return False + return True + + +def fetch_files(manifest_file, base_urls, filenames=[], cache_folder=None, + auth_file=None, region=None): + # Lets load the manifest file + try: + manifest = open_manifest(manifest_file) + except InvalidManifest as e: + log.error("failed to load manifest file at '%s': %s" % ( + manifest_file, + str(e), + )) + return False + + # we want to track files already in current working directory AND valid + # we will not need to fetch these + present_files = [] + + # We want to track files that fail to be fetched as well as + # files that are fetched + failed_files = [] + fetched_files = [] + + # Files that we want to unpack. + unpack_files = [] + + # Setup for unpacked files. + setup_files = {} + + # Lets go through the manifest and fetch the files that we want + for f in manifest.file_records: + # case 1: files are already present + if f.present(): + if f.validate(): + present_files.append(f.filename) + if f.unpack: + unpack_files.append(f.filename) + else: + # we have an invalid file here, better to cleanup! + # this invalid file needs to be replaced with a good one + # from the local cash or fetched from a tooltool server + log.info("File %s is present locally but it is invalid, so I will remove it " + "and try to fetch it" % f.filename) + os.remove(os.path.join(os.getcwd(), f.filename)) + + # check if file is already in cache + if cache_folder and f.filename not in present_files: + try: + shutil.copy(os.path.join(cache_folder, f.digest), + os.path.join(os.getcwd(), f.filename)) + log.info("File %s retrieved from local cache %s" % + (f.filename, cache_folder)) + touch(os.path.join(cache_folder, f.digest)) + + filerecord_for_validation = FileRecord( + f.filename, f.size, f.digest, f.algorithm) + if filerecord_for_validation.validate(): + present_files.append(f.filename) + if f.unpack: + unpack_files.append(f.filename) + else: + # the file copied from the cache is invalid, better to + # clean up the cache version itself as well + log.warn("File %s retrieved from cache is invalid! I am deleting it from the " + "cache as well" % f.filename) + os.remove(os.path.join(os.getcwd(), f.filename)) + os.remove(os.path.join(cache_folder, f.digest)) + except IOError: + log.info("File %s not present in local cache folder %s" % + (f.filename, cache_folder)) + + # now I will try to fetch all files which are not already present and + # valid, appending a suffix to avoid race conditions + temp_file_name = None + # 'filenames' is the list of filenames to be managed, if this variable + # is a non empty list it can be used to filter if filename is in + # present_files, it means that I have it already because it was already + # either in the working dir or in the cache + if (f.filename in filenames or len(filenames) == 0) and f.filename not in present_files: + log.debug("fetching %s" % f.filename) + temp_file_name = fetch_file(base_urls, f, auth_file=auth_file, region=region) + if temp_file_name: + fetched_files.append((f, temp_file_name)) + else: + failed_files.append(f.filename) + else: + log.debug("skipping %s" % f.filename) + + if f.setup: + if f.unpack: + setup_files[f.filename] = f.setup + else: + log.error("'setup' requires 'unpack' being set for %s" % f.filename) + failed_files.append(f.filename) + + # lets ensure that fetched files match what the manifest specified + for localfile, temp_file_name in fetched_files: + # since I downloaded to a temp file, I need to perform all validations on the temp file + # this is why filerecord_for_validation is created + + filerecord_for_validation = FileRecord( + temp_file_name, localfile.size, localfile.digest, localfile.algorithm) + + if filerecord_for_validation.validate(): + # great! + # I can rename the temp file + log.info("File integrity verified, renaming %s to %s" % + (temp_file_name, localfile.filename)) + os.rename(os.path.join(os.getcwd(), temp_file_name), + os.path.join(os.getcwd(), localfile.filename)) + + if localfile.unpack: + unpack_files.append(localfile.filename) + + # if I am using a cache and a new file has just been retrieved from a + # remote location, I need to update the cache as well + if cache_folder: + log.info("Updating local cache %s..." % cache_folder) + try: + if not os.path.exists(cache_folder): + log.info("Creating cache in %s..." % cache_folder) + os.makedirs(cache_folder, 0700) + shutil.copy(os.path.join(os.getcwd(), localfile.filename), + os.path.join(cache_folder, localfile.digest)) + log.info("Local cache %s updated with %s" % (cache_folder, + localfile.filename)) + touch(os.path.join(cache_folder, localfile.digest)) + except (OSError, IOError): + log.warning('Impossible to add file %s to cache folder %s' % + (localfile.filename, cache_folder), exc_info=True) + else: + failed_files.append(localfile.filename) + log.error("'%s'" % filerecord_for_validation.describe()) + os.remove(temp_file_name) + + # Unpack files that need to be unpacked. + for filename in unpack_files: + if not unpack_file(filename, setup_files.get(filename)): + failed_files.append(filename) + + # If we failed to fetch or validate a file, we need to fail + if len(failed_files) > 0: + log.error("The following files failed: '%s'" % + "', ".join(failed_files)) + return False + return True + + +def freespace(p): + "Returns the number of bytes free under directory `p`" + if sys.platform == 'win32': # pragma: no cover + # os.statvfs doesn't work on Windows + import win32file + + secsPerClus, bytesPerSec, nFreeClus, totClus = win32file.GetDiskFreeSpace( + p) + return secsPerClus * bytesPerSec * nFreeClus + else: + r = os.statvfs(p) + return r.f_frsize * r.f_bavail + + +def purge(folder, gigs): + """If gigs is non 0, it deletes files in `folder` until `gigs` GB are free, + starting from older files. If gigs is 0, a full purge will be performed. + No recursive deletion of files in subfolder is performed.""" + + full_purge = bool(gigs == 0) + gigs *= 1024 * 1024 * 1024 + + if not full_purge and freespace(folder) >= gigs: + log.info("No need to cleanup") + return + + files = [] + for f in os.listdir(folder): + p = os.path.join(folder, f) + # it deletes files in folder without going into subfolders, + # assuming the cache has a flat structure + if not os.path.isfile(p): + continue + mtime = os.path.getmtime(p) + files.append((mtime, p)) + + # iterate files sorted by mtime + for _, f in sorted(files): + log.info("removing %s to free up space" % f) + try: + os.remove(f) + except OSError: + log.info("Impossible to remove %s" % f, exc_info=True) + if not full_purge and freespace(folder) >= gigs: + break + + +def _log_api_error(e): + if hasattr(e, 'hdrs') and e.hdrs['content-type'] == 'application/json': + json_resp = json.load(e.fp) + log.error("%s: %s" % (json_resp['error']['name'], + json_resp['error']['description'])) + else: + log.exception("Error making RelengAPI request:") + + +def _authorize(req, auth_file): + if auth_file: + log.debug("using bearer token in %s" % auth_file) + req.add_unredirected_header('Authorization', + 'Bearer %s' % (open(auth_file).read().strip())) + + +def _send_batch(base_url, auth_file, batch, region): + url = urlparse.urljoin(base_url, 'upload') + if region is not None: + url += "?region=" + region + req = urllib2.Request(url, json.dumps(batch), {'Content-Type': 'application/json'}) + _authorize(req, auth_file) + try: + resp = urllib2.urlopen(req) + except (urllib2.URLError, urllib2.HTTPError) as e: + _log_api_error(e) + return None + return json.load(resp)['result'] + + +def _s3_upload(filename, file): + # urllib2 does not support streaming, so we fall back to good old httplib + url = urlparse.urlparse(file['put_url']) + cls = httplib.HTTPSConnection if url.scheme == 'https' else httplib.HTTPConnection + host, port = url.netloc.split(':') if ':' in url.netloc else (url.netloc, 443) + port = int(port) + conn = cls(host, port) + try: + req_path = "%s?%s" % (url.path, url.query) if url.query else url.path + conn.request('PUT', req_path, open(filename), + {'Content-type': 'application/octet-stream'}) + resp = conn.getresponse() + resp_body = resp.read() + conn.close() + if resp.status != 200: + raise RuntimeError("Non-200 return from AWS: %s %s\n%s" % + (resp.status, resp.reason, resp_body)) + except Exception: + file['upload_exception'] = sys.exc_info() + file['upload_ok'] = False + else: + file['upload_ok'] = True + + +def _notify_upload_complete(base_url, auth_file, file): + req = urllib2.Request( + urlparse.urljoin( + base_url, + 'upload/complete/%(algorithm)s/%(digest)s' % file)) + _authorize(req, auth_file) + try: + urllib2.urlopen(req) + except urllib2.HTTPError as e: + if e.code != 409: + _log_api_error(e) + return + # 409 indicates that the upload URL hasn't expired yet and we + # should retry after a delay + to_wait = int(e.headers.get('X-Retry-After', 60)) + log.warning("Waiting %d seconds for upload URLs to expire" % to_wait) + time.sleep(to_wait) + _notify_upload_complete(base_url, auth_file, file) + except Exception: + log.exception("While notifying server of upload completion:") + + +def upload(manifest, message, base_urls, auth_file, region): + try: + manifest = open_manifest(manifest) + except InvalidManifest: + log.exception("failed to load manifest file at '%s'") + return False + + # verify the manifest, since we'll need the files present to upload + if not manifest.validate(): + log.error('manifest is invalid') + return False + + if any(fr.visibility is None for fr in manifest.file_records): + log.error('All files in a manifest for upload must have a visibility set') + + # convert the manifest to an upload batch + batch = { + 'message': message, + 'files': {}, + } + for fr in manifest.file_records: + batch['files'][fr.filename] = { + 'size': fr.size, + 'digest': fr.digest, + 'algorithm': fr.algorithm, + 'visibility': fr.visibility, + } + + # make the upload request + resp = _send_batch(base_urls[0], auth_file, batch, region) + if not resp: + return None + files = resp['files'] + + # Upload the files, each in a thread. This allows us to start all of the + # uploads before any of the URLs expire. + threads = {} + for filename, file in files.iteritems(): + if 'put_url' in file: + log.info("%s: starting upload" % (filename,)) + thd = threading.Thread(target=_s3_upload, + args=(filename, file)) + thd.daemon = 1 + thd.start() + threads[filename] = thd + else: + log.info("%s: already exists on server" % (filename,)) + + # re-join all of those threads as they exit + success = True + while threads: + for filename, thread in threads.items(): + if not thread.is_alive(): + # _s3_upload has annotated file with result information + file = files[filename] + thread.join() + if file['upload_ok']: + log.info("%s: uploaded" % filename) + else: + log.error("%s: failed" % filename, + exc_info=file['upload_exception']) + success = False + del threads[filename] + + # notify the server that the uploads are completed. If the notification + # fails, we don't consider that an error (the server will notice + # eventually) + for filename, file in files.iteritems(): + if 'put_url' in file and file['upload_ok']: + log.info("notifying server of upload completion for %s" % (filename,)) + _notify_upload_complete(base_urls[0], auth_file, file) + + return success + + +def process_command(options, args): + """ I know how to take a list of program arguments and + start doing the right thing with them""" + cmd = args[0] + cmd_args = args[1:] + log.debug("processing '%s' command with args '%s'" % + (cmd, '", "'.join(cmd_args))) + log.debug("using options: %s" % options) + + if cmd == 'list': + return list_manifest(options['manifest']) + if cmd == 'validate': + return validate_manifest(options['manifest']) + elif cmd == 'add': + return add_files(options['manifest'], options['algorithm'], cmd_args, + options['visibility'], options['unpack']) + elif cmd == 'purge': + if options['cache_folder']: + purge(folder=options['cache_folder'], gigs=options['size']) + else: + log.critical('please specify the cache folder to be purged') + return False + elif cmd == 'fetch': + return fetch_files( + options['manifest'], + options['base_url'], + cmd_args, + cache_folder=options['cache_folder'], + auth_file=options.get("auth_file"), + region=options.get('region')) + elif cmd == 'upload': + if not options.get('message'): + log.critical('upload command requires a message') + return False + return upload( + options.get('manifest'), + options.get('message'), + options.get('base_url'), + options.get('auth_file'), + options.get('region')) + else: + log.critical('command "%s" is not implemented' % cmd) + return False + + +def main(argv, _skip_logging=False): + # Set up option parsing + parser = optparse.OptionParser() + parser.add_option('-q', '--quiet', default=logging.INFO, + dest='loglevel', action='store_const', const=logging.ERROR) + parser.add_option('-v', '--verbose', + dest='loglevel', action='store_const', const=logging.DEBUG) + parser.add_option('-m', '--manifest', default=DEFAULT_MANIFEST_NAME, + dest='manifest', action='store', + help='specify the manifest file to be operated on') + parser.add_option('-d', '--algorithm', default='sha512', + dest='algorithm', action='store', + help='hashing algorithm to use (only sha512 is allowed)') + parser.add_option('--visibility', default=None, + dest='visibility', choices=['internal', 'public'], + help='Visibility level of this file; "internal" is for ' + 'files that cannot be distributed out of the company ' + 'but not for secrets; "public" files are available to ' + 'anyone withou trestriction') + parser.add_option('--unpack', default=False, + dest='unpack', action='store_true', + help='Request unpacking this file after fetch.' + ' This is helpful with tarballs.') + parser.add_option('-o', '--overwrite', default=False, + dest='overwrite', action='store_true', + help='UNUSED; present for backward compatibility') + parser.add_option('--url', dest='base_url', action='append', + help='RelengAPI URL ending with /tooltool/; default ' + 'is appropriate for Mozilla') + parser.add_option('-c', '--cache-folder', dest='cache_folder', + help='Local cache folder') + parser.add_option('-s', '--size', + help='free space required (in GB)', dest='size', + type='float', default=0.) + parser.add_option('-r', '--region', help='Preferred AWS region for upload or fetch; ' + 'example: --region=us-west-2') + parser.add_option('--message', + help='The "commit message" for an upload; format with a bug number ' + 'and brief comment', + dest='message') + parser.add_option('--authentication-file', + help='Use the RelengAPI token found in the given file to ' + 'authenticate to the RelengAPI server.', + dest='auth_file') + + (options_obj, args) = parser.parse_args(argv[1:]) + + # default the options list if not provided + if not options_obj.base_url: + options_obj.base_url = ['https://api.pub.build.mozilla.org/tooltool/'] + + # ensure all URLs have a trailing slash + def add_slash(url): + return url if url.endswith('/') else (url + '/') + options_obj.base_url = [add_slash(u) for u in options_obj.base_url] + + # expand ~ in --authentication-file + if options_obj.auth_file: + options_obj.auth_file = os.path.expanduser(options_obj.auth_file) + + # Dictionaries are easier to work with + options = vars(options_obj) + + log.setLevel(options['loglevel']) + + # Set up logging, for now just to the console + if not _skip_logging: # pragma: no cover + ch = logging.StreamHandler() + cf = logging.Formatter("%(levelname)s - %(message)s") + ch.setFormatter(cf) + log.addHandler(ch) + + if options['algorithm'] != 'sha512': + parser.error('only --algorithm sha512 is supported') + + if len(args) < 1: + parser.error('You must specify a command') + + return 0 if process_command(options, args) else 1 + +if __name__ == "__main__": # pragma: no cover + sys.exit(main(sys.argv)) From 1c9ec9038883a32689ed8b9887febca8ca4d740b Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Wed, 20 Jul 2016 10:22:26 -0700 Subject: [PATCH 28/63] Bug 1247168 - Configure Mercurial and install global hgrc; r=dustin web.cacerts matches what the Ubuntu package does by default. [progress] changes are to make output in TaskCluster logs less spammy (only 1 update per second instead of up to 10). The robustcheckout extension will be used in a subsequent commit to handle repository checkouts. MozReview-Commit-ID: 2PvW4wEGk2u --HG-- extra : rebase_source : 742627ba823d4f2097a4273e6cc6af8bb842c69f extra : histedit_source : d479c1923c71605e9511e877b4b90d3b4d42f542 --- testing/docker/decision/system-setup.sh | 34 +++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/testing/docker/decision/system-setup.sh b/testing/docker/decision/system-setup.sh index fb96770be30c..9b7058c2cde6 100644 --- a/testing/docker/decision/system-setup.sh +++ b/testing/docker/decision/system-setup.sh @@ -41,6 +41,40 @@ EOF dpkg -i mercurial-common_3.8.4_all.deb mercurial_3.8.4_amd64.deb +mkdir -p /usr/local/mercurial +chown 755 /usr/local/mercurial +cd /usr/local/mercurial +tooltool_fetch <<'EOF' +[ +{ + "size": 11849, + "digest": "c88d9b8afd6649bd28bbacfa654ebefec8087a01d1662004aae088d485edeb03a92df1193d1310c0369d7721f475b974fcd4a911428ec65936f7e40cf1609c49", + "algorithm": "sha512", + "filename": "robustcheckout.py" +} +] +EOF + +chmod 644 /usr/local/mercurial/robustcheckout.py + +# Install a global hgrc file with reasonable defaults. +mkdir -p /etc/mercurial +cat >/etc/mercurial/hgrc < Date: Wed, 20 Jul 2016 15:34:10 -0700 Subject: [PATCH 29/63] Bug 1247168 - Actually use workspace cache in decision task; r=dustin The decision task configures a /home/worker/workspace cache. However, the command we run in the container references a "workspace" relative path. From logs in automation, it appears that PWD during execution is "/" because "workspace" is being resolved to "/workspace." The net result of this is we appear to be performing a VCS clone+checkout on every single task. This commit fixes the paths so our workspace cache is actually used. MozReview-Commit-ID: Kj6REep5bSs --HG-- extra : rebase_source : 8cd8be43dfd34f2970b47721c3da8e3957a8bfed extra : histedit_source : b81dce523a88e44eb3fa8b1a68840066edca382d --- .taskcluster.yml | 4 ++-- taskcluster/taskgraph/action.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.taskcluster.yml b/.taskcluster.yml index 2dd4e5c2ce91..790b3952f864 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -89,8 +89,8 @@ tasks: - -cx - > mkdir -p /home/worker/artifacts && - checkout-gecko workspace && - cd workspace/gecko && + checkout-gecko /home/worker/workspace && + cd /home/worker/workspace/gecko && ln -s /home/worker/artifacts artifacts && ./mach taskgraph decision --pushlog-id='{{pushlog_id}}' diff --git a/taskcluster/taskgraph/action.yml b/taskcluster/taskgraph/action.yml index 67d3cab19b0f..14fa7d5b54b2 100644 --- a/taskcluster/taskgraph/action.yml +++ b/taskcluster/taskgraph/action.yml @@ -54,8 +54,8 @@ payload: - -cx - > mkdir -p /home/worker/artifacts && - checkout-gecko workspace && - cd workspace/gecko && + checkout-gecko /home/worker/workspace && + cd /home/worker/workspace/gecko && ln -s /home/worker/artifacts artifacts && ./mach taskgraph action-task --decision-id='{{decision_task_id}}' From b56b243b8147093af355d07b17927416a2fcccde Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Wed, 20 Jul 2016 12:10:52 -0700 Subject: [PATCH 30/63] Bug 1247168 - Use robustcheckout in decision image; r=dustin Now that Mercurial 3.8.4 and robustcheckout are in place, we convert checkout-gecko from tc-vcs to robustcheckout. As part of this, we remove references to tc-vcs from the Docker image. This completes our changes to the decision Docker image. Image size has been reduced from ~725 MB to ~217 MB. Not bad. MozReview-Commit-ID: Hx9d02Al1TP --HG-- extra : rebase_source : 05114e4e0e7fbbab2c89f25074abfeb7b9ba62ef extra : histedit_source : 193c0bbb64cc1e468b5d7bb969d7f74e25947bde --- testing/docker/decision/bin/checkout-gecko | 30 ++++++++++------------ testing/docker/decision/system-setup.sh | 17 ------------ 2 files changed, 14 insertions(+), 33 deletions(-) diff --git a/testing/docker/decision/bin/checkout-gecko b/testing/docker/decision/bin/checkout-gecko index 82fc2e051020..c8fb42b81698 100644 --- a/testing/docker/decision/bin/checkout-gecko +++ b/testing/docker/decision/bin/checkout-gecko @@ -1,13 +1,11 @@ #! /bin/bash -e -set -e +set -ex # Ensure we have at least enough to check gecko out... test $GECKO_BASE_REPOSITORY -# Workspace to checkout gecko into... -WORKSPACE=$1 -mkdir -p $WORKSPACE +DESTDIR=$1 res=`curl --fail --retry 5 http://taskcluster/secrets/v1/secret/project/taskcluster/gecko/hgfingerprint` FP=`echo $res | jq -r .secret.content` @@ -18,16 +16,16 @@ if [[ ! "$FP" =~ ^[a-f0-9:]+$ ]]; then exit 1 fi -mkdir /etc/mercurial -cat >/etc/mercurial/hgrc < Date: Thu, 21 Jul 2016 11:44:55 -0700 Subject: [PATCH 31/63] Bug 1247168 - Introduce scripts for running action and decision tasks; r=dustin When we switch to use robustcheckout for version control foo, we'll also be taking the opportunity to have the decision and action tasks execute as the "worker" user. Since caches are mounted and owned by root and since tasks initially run as root, this makes defining the container command in YAML a bit difficult because we have to do some work as root then switch users and continue executing. Rather than shoehorning all that complicated logic into YAML, we introduce bash scripts that do it. These will be plugged into the task YAML when we formally switch the tasks to use the new Docker image. We provide one script for running Gecko decision tasks. We provide another for running action tasks. These are the two consumers of the decision image we care about. We also sneak in a change to add the executable bit to checkout-gecko. MozReview-Commit-ID: CXlyHZJSHcP --HG-- extra : rebase_source : 80621d4833a9d745eaff7da4641dfd4ace8ae1db extra : histedit_source : e6ce7de5d14c8781d8dd94a8eff76c3227cd18b5 --- testing/docker/decision/Dockerfile | 1 - testing/docker/decision/bin/checkout-gecko | 0 testing/docker/decision/bin/run-action | 19 +++++++++++++++++++ testing/docker/decision/bin/run-decision | 19 +++++++++++++++++++ testing/docker/decision/system-setup.sh | 3 ++- 5 files changed, 40 insertions(+), 2 deletions(-) mode change 100644 => 100755 testing/docker/decision/bin/checkout-gecko create mode 100755 testing/docker/decision/bin/run-action create mode 100755 testing/docker/decision/bin/run-decision diff --git a/testing/docker/decision/Dockerfile b/testing/docker/decision/Dockerfile index 49fb3ea38703..1710bfa68a9a 100644 --- a/testing/docker/decision/Dockerfile +++ b/testing/docker/decision/Dockerfile @@ -9,7 +9,6 @@ ADD system-setup.sh tooltool.py /tmp/ RUN bash /tmp/system-setup.sh ADD bin /home/worker/bin -RUN chmod +x /home/worker/bin/* ENV PATH /home/worker/bin:$PATH ENV SHELL /bin/bash diff --git a/testing/docker/decision/bin/checkout-gecko b/testing/docker/decision/bin/checkout-gecko old mode 100644 new mode 100755 diff --git a/testing/docker/decision/bin/run-action b/testing/docker/decision/bin/run-action new file mode 100755 index 000000000000..3eec39eb6c18 --- /dev/null +++ b/testing/docker/decision/bin/run-action @@ -0,0 +1,19 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -ex + +# The script starts executing as root. We need to change ownership +# of the caches because they are initially owned by root:root. +if [ $(id -u) = 0 ]; then + chown worker:worker /home/worker/hg-shared /home/worker/workspace + + exec sudo -E -u worker /home/worker/bin/run-action "${@}" +fi + +/home/worker/bin/checkout-gecko /home/worker/workspace/gecko +cd /home/worker/workspace/gecko +ln -s /home/worker/artifacts artifacts && +eval "./mach taskgraph action-task ${ACTION_ARGS}" diff --git a/testing/docker/decision/bin/run-decision b/testing/docker/decision/bin/run-decision new file mode 100755 index 000000000000..b442ce25ccab --- /dev/null +++ b/testing/docker/decision/bin/run-decision @@ -0,0 +1,19 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -ex + +# The script starts executing as root. We need to change ownership +# of the caches because they are initially owned by root:root. +if [ $(id -u) = 0 ]; then + chown worker:worker /home/worker/hg-shared /home/worker/workspace + + exec sudo -E -u worker /home/worker/bin/run-decision "${@}" +fi + +/home/worker/bin/checkout-gecko /home/worker/workspace/gecko +cd /home/worker/workspace/gecko +ln -s /home/worker/artifacts artifacts +eval "./mach taskgraph decision ${DECISION_ARGS}" diff --git a/testing/docker/decision/system-setup.sh b/testing/docker/decision/system-setup.sh index b406def34696..893c32d3b64e 100644 --- a/testing/docker/decision/system-setup.sh +++ b/testing/docker/decision/system-setup.sh @@ -9,7 +9,8 @@ apt-get install -y --force-yes --no-install-recommends \ ca-certificates \ curl \ jq \ - python + python \ + sudo BUILD=/root/build mkdir $BUILD From 6aa899bbf44397f1f84eabbe6e9bfb31b795ce22 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Thu, 21 Jul 2016 11:32:07 -0700 Subject: [PATCH 32/63] Bug 1247168 - Tag and use version 0.1.2 of the decision image; r=dustin Changes to the decision Docker image have been compelted. We're ready to use the new image. We tag the image, update version references, change the task caches so the new Mercurial pooled storage from the robustcheckout extension is used, and convert the decision tasks to run as the "worker" user. MozReview-Commit-ID: 61v9Ivy59zG --HG-- extra : rebase_source : 640318a87660950c5e0680867a1bfdd68e35f127 extra : histedit_source : ec53fc576c00e5f2053167b37544ac7afccaecb5 --- .taskcluster.yml | 41 ++++++++++++++------------------ taskcluster/taskgraph/action.yml | 18 +++++--------- testing/docker/decision/VERSION | 2 +- 3 files changed, 25 insertions(+), 36 deletions(-) diff --git a/.taskcluster.yml b/.taskcluster.yml index 790b3952f864..ca4efcdf1ae7 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -70,29 +70,8 @@ tasks: GECKO_HEAD_REPOSITORY: '{{{url}}}' GECKO_HEAD_REF: '{{revision}}' GECKO_HEAD_REV: '{{revision}}' - - cache: - level-{{level}}-{{project}}-tc-vcs-public-sources: /home/worker/.tc-vcs/ - level-{{level}}-{{project}}-gecko-decision: /home/worker/workspace - - features: - taskclusterProxy: true - - # Note: This task is built server side without the context or tooling that - # exist in tree so we must hard code the version - image: 'taskcluster/decision:0.1.0' - - maxRunTime: 1800 - - command: - - /bin/bash - - -cx - - > - mkdir -p /home/worker/artifacts && - checkout-gecko /home/worker/workspace && - cd /home/worker/workspace/gecko && - ln -s /home/worker/artifacts artifacts && - ./mach taskgraph decision + # Arguments passed into `mach taskgraph decision` + DECISION_ARGS: > --pushlog-id='{{pushlog_id}}' --project='{{project}}' --message='{{comment}}' @@ -104,6 +83,22 @@ tasks: --head-rev='{{revision}}' --revision-hash='{{revision_hash}}' + cache: + level-{{level}}-hg-shared: /home/worker/hg-shared + level-{{level}}-{{project}}-gecko-decision: /home/worker/workspace + + features: + taskclusterProxy: true + + # Note: This task is built server side without the context or tooling that + # exist in tree so we must hard code the version + image: 'taskcluster/decision:0.1.2' + + maxRunTime: 1800 + + command: + - /home/worker/bin/run-decision + artifacts: 'public': type: 'directory' diff --git a/taskcluster/taskgraph/action.yml b/taskcluster/taskgraph/action.yml index 14fa7d5b54b2..c81fdd696c45 100644 --- a/taskcluster/taskgraph/action.yml +++ b/taskcluster/taskgraph/action.yml @@ -31,9 +31,12 @@ payload: GECKO_HEAD_REPOSITORY: '{{{head_repository}}}' GECKO_HEAD_REF: '{{head_ref}}' GECKO_HEAD_REV: '{{head_rev}}' + ACTION_ARGS: > + --decision-id='{{decision_task_id}}' + --task-labels='{{task_labels}}' cache: - level-{{level}}-{{project}}-tc-vcs-public-sources: /home/worker/.tc-vcs/ + level-{{level}}-hg-shared: /home/worker/hg-shared level-{{level}}-{{project}}-gecko-decision: /home/worker/workspace features: @@ -41,7 +44,7 @@ payload: # Note: This task is built server side without the context or tooling that # exist in tree so we must hard code the version - image: 'taskcluster/decision:0.1.0' + image: 'taskcluster/decision:0.1.2' # Virtually no network or other potentially risky operations happen as part # of the task timeout aside from the initial clone. We intentionally have @@ -50,16 +53,7 @@ payload: maxRunTime: 1800 command: - - /bin/bash - - -cx - - > - mkdir -p /home/worker/artifacts && - checkout-gecko /home/worker/workspace && - cd /home/worker/workspace/gecko && - ln -s /home/worker/artifacts artifacts && - ./mach taskgraph action-task - --decision-id='{{decision_task_id}}' - --task-labels='{{task_labels}}' + - /home/worker/bin/run-action artifacts: 'public': diff --git a/testing/docker/decision/VERSION b/testing/docker/decision/VERSION index 6e8bf73aa550..d917d3e26adc 100644 --- a/testing/docker/decision/VERSION +++ b/testing/docker/decision/VERSION @@ -1 +1 @@ -0.1.0 +0.1.2 From 193231d4c9040a1cfd1b3fc7b9b5609ffdef5d73 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Tue, 19 Jul 2016 13:54:20 -0700 Subject: [PATCH 33/63] Bug 1247168 - Reformat requirements.txt; r=dustin Visual aligning makes diffs harder to read. Use line continuations to avoid this. Also make the package list alphabetical. MozReview-Commit-ID: KqT4aqYyZfH --HG-- extra : rebase_source : 08d2e4f61860bf6183ec3afaf598be158cd182be extra : histedit_source : ff450a22617425214e90d42a6f1b530da8682847 --- testing/docker/lint/system-setup.sh | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/testing/docker/lint/system-setup.sh b/testing/docker/lint/system-setup.sh index 735823919143..4ac3ad1d481e 100644 --- a/testing/docker/lint/system-setup.sh +++ b/testing/docker/lint/system-setup.sh @@ -74,10 +74,14 @@ npm install -g taskcluster-vcs-v2.3.12.tar.gz ### cat >requirements.txt <<'EOF' -mccabe==0.4.0 --hash=sha256:cbc2938f6c01061bc6d21d0c838c2489664755cb18676f0734d7617f4577d09e -pep8==1.7.0 --hash=sha256:4fc2e478addcf17016657dff30b2d8d611e8341fac19ccf2768802f6635d7b8a -pyflakes==1.2.3 --hash=sha256:e87bac26c62ea5b45067cc89e4a12f56e1483f1f2cda17e7c9b375b9fd2f40da -flake8==2.5.4 --hash=sha256:fb5a67af4024622287a76abf6b7fe4fb3cfacf765a790976ce64f52c44c88e4a +flake8==2.5.4 \ + --hash=sha256:fb5a67af4024622287a76abf6b7fe4fb3cfacf765a790976ce64f52c44c88e4a +mccabe==0.4.0 \ + --hash=sha256:cbc2938f6c01061bc6d21d0c838c2489664755cb18676f0734d7617f4577d09e +pep8==1.7.0 \ + --hash=sha256:4fc2e478addcf17016657dff30b2d8d611e8341fac19ccf2768802f6635d7b8a +pyflakes==1.2.3 \ + --hash=sha256:e87bac26c62ea5b45067cc89e4a12f56e1483f1f2cda17e7c9b375b9fd2f40da EOF pip install --require-hashes -r requirements.txt From d1a2f7ba433ef78bdab7aa98d6503758e9bd0365 Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Fri, 22 Jul 2016 13:04:56 -0700 Subject: [PATCH 34/63] Bug 1247168 - Use vendored tooltool in lint image; r=dustin Using our special Dockerfile syntax to include arbitrary files, we include the previously vendored tooltool.py file in the image build context and add it directly from there. No github.com communication needed. MozReview-Commit-ID: J42iXj87LEu --HG-- extra : rebase_source : 90845e6793629b56998bf2fae2985913ee49c4eb extra : histedit_source : 1fd5e64e40ae700efcf78b54e2a865b0594e0955 --- testing/docker/lint/Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/testing/docker/lint/Dockerfile b/testing/docker/lint/Dockerfile index 7548d4425cec..f0b858be8ab1 100644 --- a/testing/docker/lint/Dockerfile +++ b/testing/docker/lint/Dockerfile @@ -4,10 +4,9 @@ MAINTAINER Andrew Halberstadt RUN useradd -d /home/worker -s /bin/bash -m worker WORKDIR /home/worker -# Install tooltool directly from github. RUN mkdir /build -ADD https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py /build/tooltool.py -RUN chmod +rx /build/tooltool.py +# %include testing/docker/decision/tooltool.py +ADD topsrcdir/testing/docker/decision/tooltool.py /build/tooltool.py # Install lint packages ADD system-setup.sh /tmp/system-setup.sh From 5cab45c387ead9c2b2a109396b0dec255926ae9c Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Tue, 19 Jul 2016 14:43:25 -0700 Subject: [PATCH 35/63] Bug 1247168 - Install Mercurial 3.8.4 in lint image; r=dustin Like we do for the decision image, we install Mercurial 3.8.4 from deb files hosted on tooltool. This provides more control and determinism than installing via apt. As part of this change, Mercurial is upgraded from whatever was hosted in apt to 3.8.4. Since the deb packages don't provide a global hgrc, we create one ourselves. This is effectively copied from the decision image. Most of the work is being done in a new, standalone install-mercurial.sh script. This script is part of the newly-established testing/docker/recipes directory. The intent of this directory is to hold common files referenced by multiple images. Our custom Dockerfile syntax to include files from outside the directory with the Dockerfile is used to add these files to the build context. MozReview-Commit-ID: K7gVm2Geihj --HG-- extra : rebase_source : 6d1089ac34e43d399c7cf608d09eaaf405df00f7 extra : histedit_source : 656a4cea33ef913102b03238475461884c2749a0 --- testing/docker/lint/Dockerfile | 7 ++-- testing/docker/lint/system-setup.sh | 3 +- testing/docker/recipes/install-mercurial.sh | 42 +++++++++++++++++++++ 3 files changed, 48 insertions(+), 4 deletions(-) create mode 100644 testing/docker/recipes/install-mercurial.sh diff --git a/testing/docker/lint/Dockerfile b/testing/docker/lint/Dockerfile index f0b858be8ab1..6f8d1e191549 100644 --- a/testing/docker/lint/Dockerfile +++ b/testing/docker/lint/Dockerfile @@ -8,9 +8,10 @@ RUN mkdir /build # %include testing/docker/decision/tooltool.py ADD topsrcdir/testing/docker/decision/tooltool.py /build/tooltool.py -# Install lint packages -ADD system-setup.sh /tmp/system-setup.sh -RUN bash /tmp/system-setup.sh +# %include testing/docker/recipes/install-mercurial.sh +ADD topsrcdir/testing/docker/recipes/install-mercurial.sh /build/install-mercurial.sh +ADD system-setup.sh /tmp/system-setup.sh +RUN bash /tmp/system-setup.sh # Set variable normally configured at login, by the shells parent process, these # are taken from GNU su manual diff --git a/testing/docker/lint/system-setup.sh b/testing/docker/lint/system-setup.sh index 4ac3ad1d481e..d2d654260669 100644 --- a/testing/docker/lint/system-setup.sh +++ b/testing/docker/lint/system-setup.sh @@ -11,7 +11,6 @@ cd /setup apt_packages=() apt_packages+=('curl') -apt_packages+=('mercurial') apt_packages+=('python') apt_packages+=('python-pip') apt_packages+=('sudo') @@ -31,6 +30,8 @@ tooltool_fetch() { rm manifest.tt } +cd /build +. install-mercurial.sh ### # ESLint Setup diff --git a/testing/docker/recipes/install-mercurial.sh b/testing/docker/recipes/install-mercurial.sh new file mode 100644 index 000000000000..be3787b0ba3b --- /dev/null +++ b/testing/docker/recipes/install-mercurial.sh @@ -0,0 +1,42 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# This script installs and configures Mercurial. + +set -e + +# ASSERTION: We are running Ubuntu 16.04. +tooltool_fetch </etc/mercurial/hgrc < Date: Tue, 19 Jul 2016 16:46:25 -0700 Subject: [PATCH 36/63] Bug 1247168 - Make mozilla-unified the base repository for Firefox; r=dustin https://hg.mozilla.org/mozilla-unified contains heads from all the major Firefox repos. In addition, it is encoded on the server in such a way that it is several hundred megabytes smaller despite containing 30,000+ more changesets. This means faster clones and faster operations. Cloning this repo does require a new version of Mercurial. So if any TC tasks are using an ancient Mercurial - one that is vulnerable to known CVE issues in fact - this will flush them out. MozReview-Commit-ID: 2VHDa6FEeeJ --HG-- extra : rebase_source : db4990bcde0503fd14d82a5d16c71adbb4f92be3 extra : histedit_source : 6c555bd3df12536d1c48b45d8cc76611e3f7032f --- .taskcluster.yml | 4 +++- taskcluster/taskgraph/action.yml | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.taskcluster.yml b/.taskcluster.yml index ca4efcdf1ae7..799d6e77f833 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -66,11 +66,13 @@ tasks: env: # checkout-gecko uses these to check out the source; the inputs # to `mach taskgraph decision` are all on the command line. - GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-central' + GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified' GECKO_HEAD_REPOSITORY: '{{{url}}}' GECKO_HEAD_REF: '{{revision}}' GECKO_HEAD_REV: '{{revision}}' # Arguments passed into `mach taskgraph decision` + # TODO use mozilla-unified for the base repository once the tc-vcs + # tar.gz archives are created or tc-vcs isn't being used. DECISION_ARGS: > --pushlog-id='{{pushlog_id}}' --project='{{project}}' diff --git a/taskcluster/taskgraph/action.yml b/taskcluster/taskgraph/action.yml index c81fdd696c45..cfa6b594557d 100644 --- a/taskcluster/taskgraph/action.yml +++ b/taskcluster/taskgraph/action.yml @@ -27,7 +27,7 @@ routes: payload: env: - GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-central' + GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified' GECKO_HEAD_REPOSITORY: '{{{head_repository}}}' GECKO_HEAD_REF: '{{head_ref}}' GECKO_HEAD_REV: '{{head_rev}}' From 622ed9c93cb24d57d02604463b644dea00cd761e Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Thu, 21 Jul 2016 14:57:37 -0700 Subject: [PATCH 37/63] Bug 1247168 - Add a script to perform a checkout then run a command; r=dustin The script will be used as the main command in task YAML files. It changes ownership of caches. Then switches to the "worker" user. Then performs a Gecko checkout. Then executes whatever command was requested via its arguments. The script has been added to the shared recipes directory so it can eventually be used by other Docker images. This means if we e.g. want to add Git support, we only need to update one file in the tree. MozReview-Commit-ID: Fuy1VrdSGYn --HG-- extra : rebase_source : 407b2c584d56c95e9d9b23781539f2979a775893 extra : histedit_source : bd8b7fd541ed27da31082730ad3054b68b06544b --- testing/docker/lint/Dockerfile | 4 ++ testing/docker/recipes/checkout-gecko-and-run | 37 +++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100755 testing/docker/recipes/checkout-gecko-and-run diff --git a/testing/docker/lint/Dockerfile b/testing/docker/lint/Dockerfile index 6f8d1e191549..a992995d0666 100644 --- a/testing/docker/lint/Dockerfile +++ b/testing/docker/lint/Dockerfile @@ -13,6 +13,10 @@ ADD topsrcdir/testing/docker/recipes/install-mercurial.sh /build/install-mercuri ADD system-setup.sh /tmp/system-setup.sh RUN bash /tmp/system-setup.sh +# %include testing/docker/recipes/checkout-gecko-and-run +ADD topsrcdir/testing/docker/recipes/checkout-gecko-and-run /home/worker/bin/checkout-gecko-and-run +RUN chown -R worker:worker /home/worker/bin && chmod 755 /home/worker/bin/* + # Set variable normally configured at login, by the shells parent process, these # are taken from GNU su manual ENV HOME /home/worker diff --git a/testing/docker/recipes/checkout-gecko-and-run b/testing/docker/recipes/checkout-gecko-and-run new file mode 100755 index 000000000000..4a851b06f4e5 --- /dev/null +++ b/testing/docker/recipes/checkout-gecko-and-run @@ -0,0 +1,37 @@ +#!/bin/bash +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +set -ex + +# The script starts executing as root. We need to change ownership +# of the caches because they are initially owned by root:root. There +# may not be a cache mount/directory on some instances. So create the +# directory if missing. +if [ $(id -u) = 0 ]; then + mkdir -p /home/worker/workspace + chown worker:worker /home/worker/hg-shared /home/worker/workspace + + exec sudo -E -u worker ${0} "${@}" +fi + +DEST=$1 +shift + +# We set the base repository to mozilla-central so tc-vcs doesn't get +# confused. Switch to mozilla-unified because robustcheckout works best +# with it. +if [ "${GECKO_BASE_REPOSITORY}" = "https://hg.mozilla.org/mozilla-central" ]; then + GECKO_BASE_REPOSITORY=https://hg.mozilla.org/mozilla-unified +fi + +/usr/bin/hg robustcheckout \ + --sharebase /home/worker/hg-shared \ + --purge \ + --upstream ${GECKO_BASE_REPOSITORY} \ + --revision ${GECKO_HEAD_REV} \ + ${GECKO_HEAD_REPOSITORY} \ + ${DEST} + +exec "${@}" From 9275cbf8aff72a97db573caba5ac995630bed45a Mon Sep 17 00:00:00 2001 From: Gregory Szorc Date: Tue, 19 Jul 2016 13:30:03 -0700 Subject: [PATCH 38/63] Bug 1247168 - Convert lint image and tasks to use robustcheckout; r=dustin The robustcheckout Mercurial extension does a clone+checkout optimally. Read the bug for more on it. robustcheckout is already used by mozharness automation. It has resulted in a significant reduction in I/O usage and utilization in automation. This commit replaces tc-vcs with the robustcheckout equivalent. We replace the existing tc-vcs scope and cache with a new one. Because Dustin and I are paranoid, we maintain separate caches per SCM level - even though we could arguably share the same cache. Defense in depth. Robustcheckout (when used with --sharebase) pools storage for related repos automatically. i.e. changesets from inbound and central will be in the same store. This means you likely only have one copy of each changeset per cache. This can result in significant space savings. And, since there are fewer copies floating around, hg.mozilla.org and various network appliances are working less too! Since tc-vcs is no longer used, we stop it from being installed. While we're here, we also change the images to execute as the "worker" user. This happens automatically as a result of using the "checkout-and-run" script. MozReview-Commit-ID: EDeebuP7TkT --HG-- extra : rebase_source : 2bec5dd9d6fe5565831bb35f195859aa12dd0bf2 extra : intermediate-source : 06481d97a485f6566554b087bc3880d76361e8ec extra : source : d368700c93ef085325a081219d7aeb8512bc54a1 extra : histedit_source : c07505273fc8f10acf8e8d3ee01e327afd0aa63d --- taskcluster/ci/legacy/tasks/lint.yml | 11 +++++++---- .../ci/legacy/tasks/tests/eslint-gecko.yml | 3 ++- .../ci/legacy/tasks/tests/mozlint-flake8.yml | 3 ++- .../ci/legacy/tasks/tests/taskgraph-tests.yml | 3 ++- testing/docker/lint/system-setup.sh | 18 +++--------------- testing/docker/recipes/install-mercurial.sh | 17 +++++++++++++++++ 6 files changed, 33 insertions(+), 22 deletions(-) diff --git a/taskcluster/ci/legacy/tasks/lint.yml b/taskcluster/ci/legacy/tasks/lint.yml index 18dbea167fd1..cdd6a51ece40 100644 --- a/taskcluster/ci/legacy/tasks/lint.yml +++ b/taskcluster/ci/legacy/tasks/lint.yml @@ -23,16 +23,19 @@ task: - 'index.gecko.v1.{{project}}.revision.linux.{{head_rev}}.{{build_name}}' - 'index.gecko.v1.{{project}}.latest.linux.{{build_name}}' scopes: - # Nearly all of our build tasks use tc-vcs so just include the scope across - # the board. - - 'docker-worker:cache:level-{{level}}-{{project}}-tc-vcs' + - 'docker-worker:cache:level-{{level}}-hg-shared' payload: # Thirty minutes should be enough for lint checks maxRunTime: 1800 cache: - level-{{level}}-{{project}}-tc-vcs: '/home/worker/.tc-vcs' + level-{{level}}-hg-shared: '/home/worker/hg-shared' + + env: + GECKO_BASE_REPOSITORY: '{{base_repository}}' + GECKO_HEAD_REPOSITORY: '{{head_repository}}' + GECKO_HEAD_REV: '{{head_rev}}' extra: build_product: '{{build_product}}' diff --git a/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml b/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml index 6eed081d984d..a140d57d2454 100644 --- a/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml +++ b/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml @@ -20,10 +20,11 @@ task: task-reference: "" command: + - /home/worker/bin/checkout-gecko-and-run + - gecko - bash - -cx - > - tc-vcs checkout ./gecko {{base_repository}} {{head_repository}} {{head_rev}} {{head_ref}} && cd gecko/tools/lint/eslint && /build/tooltool.py fetch -m manifest.tt && tar xvfz eslint.tar.gz && diff --git a/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml b/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml index b53e0a9841b2..ad0aa7051916 100644 --- a/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml +++ b/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml @@ -18,10 +18,11 @@ task: taskId: task-reference: "" command: + - /home/worker/bin/checkout-gecko-and-run + - gecko - bash - -cx - > - tc-vcs checkout ./gecko {{base_repository}} {{head_repository}} {{head_rev}} {{head_ref}} && cd gecko && ./mach lint -l flake8 -f treeherder extra: diff --git a/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml b/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml index f002b7dc5395..54e1291d6911 100644 --- a/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml +++ b/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml @@ -18,10 +18,11 @@ task: taskId: task-reference: "" command: + - /home/worker/bin/checkout-gecko-and-run + - gecko - bash - -cx - > - tc-vcs checkout ./gecko {{base_repository}} {{head_repository}} {{head_rev}} {{head_ref}} && cd gecko && ./mach taskgraph python-tests extra: diff --git a/testing/docker/lint/system-setup.sh b/testing/docker/lint/system-setup.sh index d2d654260669..9f8052a2d81a 100644 --- a/testing/docker/lint/system-setup.sh +++ b/testing/docker/lint/system-setup.sh @@ -41,6 +41,7 @@ cd /build # For future reference things like this don't need to be uploaded to tooltool, as long # as we verify the hash, we can download it from the external net. +cd /setup tooltool_fetch <<'EOF' [ { @@ -55,25 +56,11 @@ tar -C /usr/local --strip-components 1 -xJ < node-*.tar.xz node -v # verify npm -v -# install taskcluster-vcs@2.3.12 -tooltool_fetch <<'EOF' -[ -{ -"size": 6282161, -"visibility": "public", -"digest": "a781a96e596f6403eca6ec2300adb9c1a396659393e16993c66f98a658050e557bc681d521f70b50c1162aa4b435274e0098ffcbd37cbe969c0e4f69be19a1e0", -"algorithm": "sha512", -"filename": "taskcluster-vcs-v2.3.12.tar.gz" -} -] -EOF -npm install -g taskcluster-vcs-v2.3.12.tar.gz - - ### # Flake8 Setup ### +cd /setup cat >requirements.txt <<'EOF' flake8==2.5.4 \ --hash=sha256:fb5a67af4024622287a76abf6b7fe4fb3cfacf765a790976ce64f52c44c88e4a @@ -89,3 +76,4 @@ pip install --require-hashes -r requirements.txt cd / rm -rf /setup + diff --git a/testing/docker/recipes/install-mercurial.sh b/testing/docker/recipes/install-mercurial.sh index be3787b0ba3b..bc1ea620266c 100644 --- a/testing/docker/recipes/install-mercurial.sh +++ b/testing/docker/recipes/install-mercurial.sh @@ -27,6 +27,20 @@ EOF dpkg -i mercurial-common_3.8.4_all.deb mercurial_3.8.4_amd64.deb +mkdir -p /usr/local/mercurial +cd /usr/local/mercurial +tooltool_fetch <<'EOF' +[ +{ + "size": 11849, + "digest": "c88d9b8afd6649bd28bbacfa654ebefec8087a01d1662004aae088d485edeb03a92df1193d1310c0369d7721f475b974fcd4a911428ec65936f7e40cf1609c49", + "algorithm": "sha512", + "filename": "robustcheckout.py" +} +] +EOF +chmod 644 /usr/local/mercurial/robustcheckout.py + mkdir -p /etc/mercurial cat >/etc/mercurial/hgrc < Date: Wed, 20 Jul 2016 16:21:46 -0700 Subject: [PATCH 39/63] Bug 1247168 - Use a cache for repo checkout in lint tasks; r=dustin Previously, every lint task would have to create its own checkout. This was time consuming. The robustcheckout extension purges the working copy of *all* untracked and ignored files. It also restores modified files to their original state. In other words, as long as you trust Mercurial to go from revision X to revision Y, robustcheckout is as good as a fresh checkout. This commit adds a cache for the working directory checkout so lint tasks only have to effectively perform incremental `hg update` between task executions. This should make tasks spend a lot less time doing version control foo. On Try, time for flake8 tasks is currently hovering around 4 minutes. After this change, I've seen tasks finish as quickly as 11s! But that was with a hacked up legacy.py that added the workspace cache to the whitelist for Try. While I would like to see workspace reuse on Try eventually, this is not the right commit to roll that out in. MozReview-Commit-ID: 66P2rt896qE --HG-- extra : rebase_source : 2a7c8d396e85ba4eae84f8843256050a2288c9d0 extra : intermediate-source : c478ecb14bf6164ef0c955acee3a0a2f18e415c5 extra : source : 63d54efdc1f8effb0370644c11014a3f0404073b extra : histedit_source : 560701ce41c870171443c78e3a9de3998fbf9306 --- taskcluster/ci/legacy/tasks/lint.yml | 2 ++ taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml | 4 ++-- taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml | 4 ++-- taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml | 4 ++-- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/taskcluster/ci/legacy/tasks/lint.yml b/taskcluster/ci/legacy/tasks/lint.yml index cdd6a51ece40..7c0d02525272 100644 --- a/taskcluster/ci/legacy/tasks/lint.yml +++ b/taskcluster/ci/legacy/tasks/lint.yml @@ -24,6 +24,7 @@ task: - 'index.gecko.v1.{{project}}.latest.linux.{{build_name}}' scopes: - 'docker-worker:cache:level-{{level}}-hg-shared' + - 'docker-worker:cache:level-{{level}}-workspace' payload: # Thirty minutes should be enough for lint checks @@ -31,6 +32,7 @@ task: cache: level-{{level}}-hg-shared: '/home/worker/hg-shared' + level-{{level}}-workspace: '/home/worker/workspace' env: GECKO_BASE_REPOSITORY: '{{base_repository}}' diff --git a/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml b/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml index a140d57d2454..9dea3ede0394 100644 --- a/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml +++ b/taskcluster/ci/legacy/tasks/tests/eslint-gecko.yml @@ -21,11 +21,11 @@ task: command: - /home/worker/bin/checkout-gecko-and-run - - gecko + - /home/worker/workspace/gecko - bash - -cx - > - cd gecko/tools/lint/eslint && + cd /home/worker/workspace/gecko/tools/lint/eslint && /build/tooltool.py fetch -m manifest.tt && tar xvfz eslint.tar.gz && rm eslint.tar.gz && diff --git a/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml b/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml index ad0aa7051916..5632f22e3f1d 100644 --- a/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml +++ b/taskcluster/ci/legacy/tasks/tests/mozlint-flake8.yml @@ -19,11 +19,11 @@ task: task-reference: "" command: - /home/worker/bin/checkout-gecko-and-run - - gecko + - /home/worker/workspace/gecko - bash - -cx - > - cd gecko && + cd /home/worker/workspace/gecko && ./mach lint -l flake8 -f treeherder extra: locations: diff --git a/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml b/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml index 54e1291d6911..2db83599f2c2 100644 --- a/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml +++ b/taskcluster/ci/legacy/tasks/tests/taskgraph-tests.yml @@ -19,11 +19,11 @@ task: task-reference: "" command: - /home/worker/bin/checkout-gecko-and-run - - gecko + - /home/worker/workspace/gecko - bash - -cx - > - cd gecko && + cd /home/worker/workspace/gecko && ./mach taskgraph python-tests extra: locations: From 581b147a7505a2db82493da6e5b4da251198fac2 Mon Sep 17 00:00:00 2001 From: Mike Hommey Date: Fri, 22 Jul 2016 19:13:56 +0900 Subject: [PATCH 40/63] Bug 1289239 - Remove autoconf-based C++ compiler validation check. r=chmanchester Python configure is already checking that the C++ compiler is indeed a C++ compiler, no need to double check in old-configure. --HG-- extra : rebase_source : fcb6fc7ac88dcf3ef172cd30e23454b654e08c03 --- build/autoconf/compiler-opts.m4 | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/build/autoconf/compiler-opts.m4 b/build/autoconf/compiler-opts.m4 index eaa4a0c67010..1e849b1cc79e 100644 --- a/build/autoconf/compiler-opts.m4 +++ b/build/autoconf/compiler-opts.m4 @@ -147,18 +147,6 @@ if test "$CLANG_CXX"; then _WARNINGS_CXXFLAGS="${_WARNINGS_CXXFLAGS} -Wno-unknown-warning-option -Wno-return-type-c-linkage" fi -AC_MSG_CHECKING([whether the C++ compiler ($CXX $CXXFLAGS $LDFLAGS) actually is a C++ compiler]) -AC_LANG_SAVE -AC_LANG_CPLUSPLUS -_SAVE_LIBS=$LIBS -LIBS= -AC_TRY_LINK([#include ], [int *foo = new int;],, - AC_MSG_RESULT([no]) - AC_MSG_ERROR([$CXX $CXXFLAGS $LDFLAGS failed to compile and link a simple C++ source.])) -LIBS=$_SAVE_LIBS -AC_LANG_RESTORE -AC_MSG_RESULT([yes]) - if test -n "$DEVELOPER_OPTIONS"; then MOZ_FORCE_GOLD=1 fi From 02e5baaf7489ba4ee309a2cf49c643629bd6beef Mon Sep 17 00:00:00 2001 From: Mike Hommey Date: Fri, 22 Jul 2016 15:56:26 +0900 Subject: [PATCH 41/63] Bug 1289246 - Do not hardcode mt.exe as the Manifest Tool. r=chmanchester Configure uses the value of the MT environment variable before falling back to mt(.exe), but the build system was completely ignoring the MT environment variable. --HG-- extra : rebase_source : 8c9b43aeb08493ae5bd6d6361f4f18f097ea0553 --- config/external/nss/Makefile.in | 1 + config/rules.mk | 16 ++++++++-------- js/src/old-configure.in | 4 +++- old-configure.in | 4 +++- 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/config/external/nss/Makefile.in b/config/external/nss/Makefile.in index b3a17bfea5fa..dc1f03424984 100644 --- a/config/external/nss/Makefile.in +++ b/config/external/nss/Makefile.in @@ -117,6 +117,7 @@ export MOZ_DEBUG_SYMBOLS DEFAULT_GMAKE_FLAGS = DEFAULT_GMAKE_FLAGS += CC='$(CC)' +DEFAULT_GMAKE_FLAGS += MT='$(MT)' DEFAULT_GMAKE_FLAGS += SOURCE_MD_DIR=$(ABS_DIST) DEFAULT_GMAKE_FLAGS += SOURCE_MDHEADERS_DIR=$(NSPR_INCLUDE_DIR) DEFAULT_GMAKE_FLAGS += DIST=$(ABS_DIST) diff --git a/config/rules.mk b/config/rules.mk index 41eb664201fd..1a1856486575 100644 --- a/config/rules.mk +++ b/config/rules.mk @@ -662,14 +662,14 @@ ifdef MSMANIFEST_TOOL @if test -f $@.manifest; then \ if test -f '$(srcdir)/$@.manifest'; then \ echo 'Embedding manifest from $(srcdir)/$@.manifest and $@.manifest'; \ - mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' $@.manifest -OUTPUTRESOURCE:$@\;1; \ + $(MT) -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' $@.manifest -OUTPUTRESOURCE:$@\;1; \ else \ echo 'Embedding manifest from $@.manifest'; \ - mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \ + $(MT) -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \ fi; \ elif test -f '$(srcdir)/$@.manifest'; then \ echo 'Embedding manifest from $(srcdir)/$@.manifest'; \ - mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' -OUTPUTRESOURCE:$@\;1; \ + $(MT) -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' -OUTPUTRESOURCE:$@\;1; \ fi endif # MSVC with manifest tool ifdef MOZ_PROFILE_GENERATE @@ -697,14 +697,14 @@ ifdef MSMANIFEST_TOOL @if test -f $@.manifest; then \ if test -f '$(srcdir)/$@.manifest'; then \ echo 'Embedding manifest from $(srcdir)/$@.manifest and $@.manifest'; \ - mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' $@.manifest -OUTPUTRESOURCE:$@\;1; \ + $(MT) -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' $@.manifest -OUTPUTRESOURCE:$@\;1; \ else \ echo 'Embedding manifest from $@.manifest'; \ - mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \ + $(MT) -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \ fi; \ elif test -f '$(srcdir)/$@.manifest'; then \ echo 'Embedding manifest from $(srcdir)/$@.manifest'; \ - mt.exe -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' -OUTPUTRESOURCE:$@\;1; \ + $(MT) -NOLOGO -MANIFEST '$(win_srcdir)/$@.manifest' -OUTPUTRESOURCE:$@\;1; \ fi endif # MSVC with manifest tool else @@ -732,7 +732,7 @@ ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH)) $(EXPAND_LD) -nologo -out:$@ -pdb:$(LINK_PDBFILE) $< $(WIN32_EXE_LDFLAGS) $(LDFLAGS) $(MOZ_PROGRAM_LDFLAGS) $(STATIC_LIBS) $(SHARED_LIBS) $(EXTRA_LIBS) $(OS_LIBS) ifdef MSMANIFEST_TOOL @if test -f $@.manifest; then \ - mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \ + $(MT) -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;1; \ rm -f $@.manifest; \ fi endif # MSVC with manifest tool @@ -833,7 +833,7 @@ ifeq (_WINNT,$(GNU_CC)_$(OS_ARCH)) ifdef MSMANIFEST_TOOL ifdef EMBED_MANIFEST_AT @if test -f $@.manifest; then \ - mt.exe -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;$(EMBED_MANIFEST_AT); \ + $(MT) -NOLOGO -MANIFEST $@.manifest -OUTPUTRESOURCE:$@\;$(EMBED_MANIFEST_AT); \ rm -f $@.manifest; \ fi endif # EMBED_MANIFEST_AT diff --git a/js/src/old-configure.in b/js/src/old-configure.in index 11dab45dc5f3..126c57ed7a86 100644 --- a/js/src/old-configure.in +++ b/js/src/old-configure.in @@ -241,7 +241,8 @@ case "$target" in dnl Ensure that mt.exe is 'Microsoft (R) Manifest Tool', dnl not something else like "magnetic tape manipulation utility". - MSMT_TOOL=`${MT-mt} 2>&1|grep 'Microsoft (R) Manifest Tool'` + MT=${MT-mt.exe} + MSMT_TOOL=`${MT} 2>&1|grep 'Microsoft (R) Manifest Tool'` if test -z "$MSMT_TOOL"; then AC_MSG_ERROR([Microsoft (R) Manifest Tool must be in your \$PATH.]) fi @@ -256,6 +257,7 @@ case "$target" in MSMANIFEST_TOOL=1 unset MSMT_TOOL + AC_SUBST(MT) # Check linker version _LD_FULL_VERSION=`"${LD}" -v 2>&1 | sed -nre "$_MSVC_VER_FILTER"` diff --git a/old-configure.in b/old-configure.in index bb246a1fb6bb..d2204312ba47 100644 --- a/old-configure.in +++ b/old-configure.in @@ -368,7 +368,8 @@ case "$target" in dnl Ensure that mt.exe is 'Microsoft (R) Manifest Tool', dnl not something else like "magnetic tape manipulation utility". - MSMT_TOOL=`${MT-mt} 2>&1|grep 'Microsoft (R) Manifest Tool'` + MT=${MT-mt.exe} + MSMT_TOOL=`${MT} 2>&1|grep 'Microsoft (R) Manifest Tool'` if test -z "$MSMT_TOOL"; then AC_MSG_ERROR([Microsoft (R) Manifest Tool must be in your \$PATH.]) fi @@ -383,6 +384,7 @@ case "$target" in MSMANIFEST_TOOL=1 unset MSMT_TOOL + AC_SUBST(MT) # Check linker version _LD_FULL_VERSION=`"${LD}" -v 2>&1 | sed -nre "$_MSVC_VER_FILTER"` From 237d5b4a0d872fded04839b0a9e0d5a7a1d4922b Mon Sep 17 00:00:00 2001 From: JW Wang Date: Tue, 12 Jul 2016 14:32:36 +0800 Subject: [PATCH 42/63] Bug 1288344. Part 1 - Don't change play state when entering/exiting dormant state. r=cpearce MozReview-Commit-ID: 24damxCvWl8 --HG-- extra : rebase_source : 31058b1ace6d1d8e462b24f96845bb81251445d7 --- dom/media/MediaDecoder.cpp | 16 ++-------------- dom/media/omx/MediaOmxCommonDecoder.cpp | 2 -- 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/dom/media/MediaDecoder.cpp b/dom/media/MediaDecoder.cpp index 435e6ad8ee70..4d1134373ce5 100644 --- a/dom/media/MediaDecoder.cpp +++ b/dom/media/MediaDecoder.cpp @@ -372,20 +372,8 @@ MediaDecoder::UpdateDormantState(bool aDormantTimeout, bool aActivity) return; } - if (mIsDormant) { - DECODER_LOG("UpdateDormantState() entering DORMANT state"); - // enter dormant state - mDecoderStateMachine->DispatchSetDormant(true); - if (IsEnded()) { - mWasEndedWhenEnteredDormant = true; - } - mNextState = mPlayState; - ChangeState(PLAY_STATE_LOADING); - } else { - DECODER_LOG("UpdateDormantState() leaving DORMANT state"); - // exit dormant state - mDecoderStateMachine->DispatchSetDormant(false); - } + DECODER_LOG("UpdateDormantState() %s DORMANT state", mIsDormant ? "entering" : "exiting"); + mDecoderStateMachine->DispatchSetDormant(mIsDormant); } void diff --git a/dom/media/omx/MediaOmxCommonDecoder.cpp b/dom/media/omx/MediaOmxCommonDecoder.cpp index 2e6841223e7c..8667e5fc96cb 100644 --- a/dom/media/omx/MediaOmxCommonDecoder.cpp +++ b/dom/media/omx/MediaOmxCommonDecoder.cpp @@ -156,8 +156,6 @@ MediaOmxCommonDecoder::ResumeStateMachine() // Call Seek of MediaDecoderStateMachine to suppress seek events. GetStateMachine()->InvokeSeek(target); - mNextState = mPlayState; - ChangeState(PLAY_STATE_LOADING); // exit dormant state GetStateMachine()->DispatchSetDormant(false); UpdateLogicalPosition(); From db58db163a727caf7abbb25eeaefacdb586b331e Mon Sep 17 00:00:00 2001 From: JW Wang Date: Tue, 12 Jul 2016 14:34:06 +0800 Subject: [PATCH 43/63] Bug 1288344. Part 2 - Remove MediaDecoder::mWasEndedWhenEnteredDormant. r=cpearce MozReview-Commit-ID: edGkP9mv6L --HG-- extra : rebase_source : 4833d10975c6dd241d41bf86f81b08a9fe76784c --- dom/media/MediaDecoder.cpp | 5 +---- dom/media/MediaDecoder.h | 6 ------ 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/dom/media/MediaDecoder.cpp b/dom/media/MediaDecoder.cpp index 4d1134373ce5..41b5b7570860 100644 --- a/dom/media/MediaDecoder.cpp +++ b/dom/media/MediaDecoder.cpp @@ -510,7 +510,6 @@ MediaDecoder::MediaDecoder(MediaDecoderOwner* aOwner) , mMediaTracksConstructed(false) , mFiredMetadataLoaded(false) , mIsDormant(false) - , mWasEndedWhenEnteredDormant(false) , mIsHeuristicDormantSupported( Preferences::GetBool("media.decoder.heuristic.dormant.enabled", false)) , mHeuristicDormantTimeout( @@ -818,7 +817,6 @@ MediaDecoder::Seek(double aTime, SeekTarget::Type aSeekType, dom::Promise* aProm int64_t timeUsecs = TimeUnit::FromSeconds(aTime).ToMicroseconds(); mLogicalPosition = aTime; - mWasEndedWhenEnteredDormant = false; mLogicallySeeking = true; SeekTarget target = SeekTarget(timeUsecs, aSeekType); @@ -1122,8 +1120,7 @@ bool MediaDecoder::IsEnded() const { MOZ_ASSERT(NS_IsMainThread()); - return mPlayState == PLAY_STATE_ENDED || - (mWasEndedWhenEnteredDormant && (mPlayState != PLAY_STATE_SHUTDOWN)); + return mPlayState == PLAY_STATE_ENDED; } void diff --git a/dom/media/MediaDecoder.h b/dom/media/MediaDecoder.h index cf22ad398d7e..4d88a1d93022 100644 --- a/dom/media/MediaDecoder.h +++ b/dom/media/MediaDecoder.h @@ -715,12 +715,6 @@ protected: // True if MediaDecoder is in dormant state. bool mIsDormant; - // True if MediaDecoder was PLAY_STATE_ENDED state, when entering to dormant. - // When MediaCodec is in dormant during PLAY_STATE_ENDED state, PlayState - // becomes different from PLAY_STATE_ENDED. But the MediaDecoder need to act - // as in PLAY_STATE_ENDED state to MediaDecoderOwner. - bool mWasEndedWhenEnteredDormant; - // True if heuristic dormant is supported. const bool mIsHeuristicDormantSupported; From 271ec9548b3093834fe2c57caa98b82b5c391d65 Mon Sep 17 00:00:00 2001 From: Botond Ballo Date: Fri, 8 Jul 2016 16:30:47 -0400 Subject: [PATCH 44/63] Bug 1285619 - Introduce a ResetLayerStateForRecycling() helper function. r=mstange MozReview-Commit-ID: DLktq4PYNba --HG-- extra : rebase_source : ff9445f1d2593232b30d1ef643af5dd30c7e6375 --- layout/base/FrameLayerBuilder.cpp | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/layout/base/FrameLayerBuilder.cpp b/layout/base/FrameLayerBuilder.cpp index e794d3764fd6..7f1199b2a538 100644 --- a/layout/base/FrameLayerBuilder.cpp +++ b/layout/base/FrameLayerBuilder.cpp @@ -2050,6 +2050,15 @@ FrameLayerBuilder::GetDebugSingleOldPaintedLayerForFrame(nsIFrame* aFrame) return layer->AsPaintedLayer(); } +// Reset state that should not persist when a layer is recycled. +static void +ResetLayerStateForRecycling(Layer* aLayer) { + // Currently, this clears the mask layer and ancestor mask layers. + // Other cleanup may be added here. + aLayer->SetMaskLayer(nullptr); + aLayer->SetAncestorMaskLayers({}); +} + already_AddRefed ContainerState::CreateOrRecycleColorLayer(PaintedLayer *aPainted) { @@ -2268,8 +2277,7 @@ ContainerState::RecyclePaintedLayer(PaintedLayer* aLayer, { // Clear clip rect and mask layer so we don't accidentally stay clipped. // We will reapply any necessary clipping. - aLayer->SetMaskLayer(nullptr); - aLayer->SetAncestorMaskLayers({}); + ResetLayerStateForRecycling(aLayer); aLayer->ClearExtraDumpInfo(); PaintedDisplayItemLayerUserData* data = @@ -5230,8 +5238,7 @@ FrameLayerBuilder::BuildContainerLayerFor(nsDisplayListBuilder* aBuilder, NS_ASSERTION(oldLayer->GetType() == Layer::TYPE_CONTAINER, "Wrong layer type"); containerLayer = static_cast(oldLayer); - containerLayer->SetMaskLayer(nullptr); - containerLayer->SetAncestorMaskLayers({}); + ResetLayerStateForRecycling(containerLayer); } } } From 63753c933bf1f3138c9cbc20102a9f8bcb64882b Mon Sep 17 00:00:00 2001 From: Botond Ballo Date: Fri, 8 Jul 2016 17:24:48 -0400 Subject: [PATCH 45/63] Bug 1285619 - Call ResetLayerStateForRecycling() when recycling an OwnLayer. r=mstange MozReview-Commit-ID: 66J6wBcrYDQ --HG-- extra : rebase_source : 6ce3db9d7711a833b6385f531548f660fa6997ca --- layout/base/FrameLayerBuilder.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/layout/base/FrameLayerBuilder.cpp b/layout/base/FrameLayerBuilder.cpp index 7f1199b2a538..f21b294d8e5a 100644 --- a/layout/base/FrameLayerBuilder.cpp +++ b/layout/base/FrameLayerBuilder.cpp @@ -5424,7 +5424,7 @@ FrameLayerBuilder::GetLeafLayerFor(nsDisplayListBuilder* aBuilder, // layer rendering. return nullptr; } - layer->SetMaskLayer(nullptr); + ResetLayerStateForRecycling(layer); return layer; } From 662cc63aec65693a4ff32556c8590c970970a8a4 Mon Sep 17 00:00:00 2001 From: Botond Ballo Date: Fri, 8 Jul 2016 17:25:09 -0400 Subject: [PATCH 46/63] Bug 1285619 - Call ResetLayerStateForRecycling() when recycling an image or color layer. r=mstange MozReview-Commit-ID: GD4NRCoJXWf --HG-- extra : rebase_source : 72d5f05d209e75335c0c2b3f2a7fde1942d553e7 --- layout/base/FrameLayerBuilder.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/layout/base/FrameLayerBuilder.cpp b/layout/base/FrameLayerBuilder.cpp index f21b294d8e5a..94d72da2a57e 100644 --- a/layout/base/FrameLayerBuilder.cpp +++ b/layout/base/FrameLayerBuilder.cpp @@ -2066,7 +2066,7 @@ ContainerState::CreateOrRecycleColorLayer(PaintedLayer *aPainted) static_cast(aPainted->GetUserData(&gPaintedDisplayItemLayerUserData)); RefPtr layer = data->mColorLayer; if (layer) { - layer->SetMaskLayer(nullptr); + ResetLayerStateForRecycling(layer); layer->ClearExtraDumpInfo(); } else { // Create a new layer @@ -2090,7 +2090,7 @@ ContainerState::CreateOrRecycleImageLayer(PaintedLayer *aPainted) static_cast(aPainted->GetUserData(&gPaintedDisplayItemLayerUserData)); RefPtr layer = data->mImageLayer; if (layer) { - layer->SetMaskLayer(nullptr); + ResetLayerStateForRecycling(layer); layer->ClearExtraDumpInfo(); } else { // Create a new layer From 694dff250f079112b134600318e9f0002dec6e77 Mon Sep 17 00:00:00 2001 From: JW Wang Date: Wed, 13 Jul 2016 16:35:37 +0800 Subject: [PATCH 47/63] Bug 1289004. Part 1 - Constify and devirtualize some functions. r=cpearce. MozReview-Commit-ID: 8f14ekpinCR --HG-- extra : rebase_source : ca1d62df789639817e4c7da01919783038db22f0 extra : source : 5dc15cac9b631fd10b03902b3ba627fd763a49aa --- dom/media/MediaDecoder.cpp | 2 +- dom/media/MediaDecoder.h | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dom/media/MediaDecoder.cpp b/dom/media/MediaDecoder.cpp index 41b5b7570860..b82fd8136e2d 100644 --- a/dom/media/MediaDecoder.cpp +++ b/dom/media/MediaDecoder.cpp @@ -482,7 +482,7 @@ MediaDecoder::SetInfinite(bool aInfinite) } bool -MediaDecoder::IsInfinite() +MediaDecoder::IsInfinite() const { MOZ_ASSERT(NS_IsMainThread()); return mInfiniteStream; diff --git a/dom/media/MediaDecoder.h b/dom/media/MediaDecoder.h index 4d88a1d93022..c144765e40ca 100644 --- a/dom/media/MediaDecoder.h +++ b/dom/media/MediaDecoder.h @@ -220,7 +220,7 @@ public: virtual double GetDuration(); // Return true if the stream is infinite (see SetInfinite). - virtual bool IsInfinite(); + bool IsInfinite() const; // Called by MediaResource when some data has been received. // Call on the main thread only. @@ -232,12 +232,12 @@ public: // Return true if we are currently seeking in the media resource. // Call on the main thread only. - virtual bool IsSeeking() const; + bool IsSeeking() const; // Return true if the decoder has reached the end of playback or the decoder // has shutdown. // Call on the main thread only. - virtual bool IsEndedOrShutdown() const; + bool IsEndedOrShutdown() const; // Return true if the MediaDecoderOwner's error attribute is not null. // If the MediaDecoder is shutting down, OwnerHasError will return true. From 12fd7814cd7ecc22568c31716afe6bca126b4c76 Mon Sep 17 00:00:00 2001 From: JW Wang Date: Wed, 13 Jul 2016 16:45:30 +0800 Subject: [PATCH 48/63] Bug 1289004. Part 2 - Add MediaDecoder::IsShutdown(). r=cpearce MozReview-Commit-ID: 50muOKcUqYi --HG-- extra : rebase_source : d97e968c980dcc146824bb6703747f23b0a5b651 extra : source : 2ca76a5cecbb9c6ef107794eab32bb13633880a2 --- dom/media/MediaDecoder.cpp | 71 ++++++++++++++----------- dom/media/MediaDecoder.h | 4 +- dom/media/omx/MediaOmxCommonDecoder.cpp | 2 +- 3 files changed, 43 insertions(+), 34 deletions(-) diff --git a/dom/media/MediaDecoder.cpp b/dom/media/MediaDecoder.cpp index b82fd8136e2d..87221cd96efb 100644 --- a/dom/media/MediaDecoder.cpp +++ b/dom/media/MediaDecoder.cpp @@ -293,7 +293,7 @@ MediaDecoder::NotifyOwnerActivityChanged(bool aIsVisible) { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) { + if (IsShutdown()) { return; } @@ -323,7 +323,7 @@ MediaDecoder::UpdateDormantState(bool aDormantTimeout, bool aActivity) { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown || + if (IsShutdown() || !mDecoderStateMachine || mPlayState == PLAY_STATE_SHUTDOWN || !mOwner->GetVideoFrameContainer() || @@ -395,7 +395,7 @@ MediaDecoder::StartDormantTimer() } if (mIsHeuristicDormant || - mShuttingDown || + IsShutdown() || mIsVisible || (mPlayState != PLAY_STATE_PAUSED && !IsEnded())) @@ -476,7 +476,7 @@ void MediaDecoder::SetInfinite(bool aInfinite) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); mInfiniteStream = aInfinite; DurationChanged(); } @@ -607,7 +607,7 @@ MediaDecoder::Shutdown() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) { + if (IsShutdown()) { return; } @@ -807,7 +807,7 @@ nsresult MediaDecoder::Seek(double aTime, SeekTarget::Type aSeekType, dom::Promise* aPromise /*=nullptr*/) { MOZ_ASSERT(NS_IsMainThread()); - NS_ENSURE_TRUE(!mShuttingDown, NS_ERROR_FAILURE); + NS_ENSURE_TRUE(!IsShutdown(), NS_ERROR_FAILURE); UpdateDormantState(false /* aDormantTimeout */, true /* aActivity */); @@ -910,7 +910,7 @@ MediaDecoder::MetadataLoaded(nsAutoPtr aInfo, MediaDecoderEventVisibility aEventVisibility) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); DECODER_LOG("MetadataLoaded, channels=%u rate=%u hasAudio=%d hasVideo=%d", aInfo->mAudio.mChannels, aInfo->mAudio.mRate, @@ -983,7 +983,7 @@ MediaDecoder::FirstFrameLoaded(nsAutoPtr aInfo, MediaDecoderEventVisibility aEventVisibility) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); DECODER_LOG("FirstFrameLoaded, channels=%u rate=%u hasAudio=%d hasVideo=%d mPlayState=%s mIsDormant=%d", aInfo->mAudio.mChannels, aInfo->mAudio.mRate, @@ -1016,7 +1016,7 @@ nsresult MediaDecoder::FinishDecoderSetup(MediaResource* aResource) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); HTMLMediaElement* element = mOwner->GetMediaElement(); NS_ENSURE_TRUE(element, NS_ERROR_FAILURE); element->FinishDecoderSetup(this, aResource); @@ -1027,7 +1027,7 @@ void MediaDecoder::ResetConnectionState() { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); // Notify the media element that connection gets lost. mOwner->ResetConnectionState(); @@ -1042,7 +1042,7 @@ void MediaDecoder::NetworkError() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) + if (IsShutdown()) return; mOwner->NetworkError(); @@ -1053,7 +1053,7 @@ void MediaDecoder::DecodeError() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) + if (IsShutdown()) return; mOwner->DecodeError(); @@ -1085,7 +1085,7 @@ bool MediaDecoder::OwnerHasError() const { MOZ_ASSERT(NS_IsMainThread()); - return mShuttingDown || mOwner->HasError(); + return IsShutdown() || mOwner->HasError(); } class MediaElementGMPCrashHelper : public GMPCrashHelper @@ -1123,12 +1123,19 @@ MediaDecoder::IsEnded() const return mPlayState == PLAY_STATE_ENDED; } +bool +MediaDecoder::IsShutdown() const +{ + MOZ_ASSERT(NS_IsMainThread()); + return mShuttingDown; +} + void MediaDecoder::PlaybackEnded() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown || + if (IsShutdown() || mLogicallySeeking || mPlayState == PLAY_STATE_LOADING) { return; @@ -1205,7 +1212,7 @@ void MediaDecoder::NotifySuspendedStatusChanged() { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); if (mResource) { bool suspended = mResource->IsSuspendedByCache(); mOwner->NotifySuspendedByCache(suspended); @@ -1216,7 +1223,7 @@ void MediaDecoder::NotifyBytesDownloaded() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) { + if (IsShutdown()) { return; } UpdatePlaybackRate(); @@ -1227,7 +1234,7 @@ void MediaDecoder::NotifyDownloadEnded(nsresult aStatus) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); DECODER_LOG("NotifyDownloadEnded, status=%x", aStatus); @@ -1253,7 +1260,7 @@ void MediaDecoder::NotifyPrincipalChanged() { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); nsCOMPtr newPrincipal = GetCurrentPrincipal(); mMediaPrincipalHandle = MakePrincipalHandle(newPrincipal); mOwner->NotifyDecoderPrincipalChanged(); @@ -1263,7 +1270,7 @@ void MediaDecoder::NotifyBytesConsumed(int64_t aBytes, int64_t aOffset) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); if (mIgnoreProgressData) { return; @@ -1282,7 +1289,7 @@ MediaDecoder::OnSeekResolved(SeekResolveValue aVal) MOZ_ASSERT(NS_IsMainThread()); mSeekRequest.Complete(); - if (mShuttingDown) + if (IsShutdown()) return; bool fireEnded = false; @@ -1322,7 +1329,7 @@ void MediaDecoder::SeekingStarted(MediaDecoderEventVisibility aEventVisibility) { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) + if (IsShutdown()) return; if (aEventVisibility != MediaDecoderEventVisibility::Suppressed) { @@ -1361,7 +1368,7 @@ void MediaDecoder::UpdateLogicalPositionInternal(MediaDecoderEventVisibility aEventVisibility) { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) { + if (IsShutdown()) { return; } @@ -1386,7 +1393,7 @@ MediaDecoder::DurationChanged() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) { + if (IsShutdown()) { return; } @@ -1666,7 +1673,7 @@ MediaDecoder::NotifyDataArrived() { MOZ_ASSERT(NS_IsMainThread()); // Don't publish events since task queues might be shutting down. - if (mShuttingDown) { + if (IsShutdown()) { return; } @@ -1684,7 +1691,7 @@ void MediaDecoder::FireTimeUpdate() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) { + if (IsShutdown()) { return; } mOwner->FireTimeUpdate(true); @@ -1864,7 +1871,7 @@ MediaDecoder::GetOwner() { MOZ_ASSERT(NS_IsMainThread()); // mOwner is valid until shutdown. - return !mShuttingDown ? mOwner : nullptr; + return !IsShutdown() ? mOwner : nullptr; } void @@ -1872,7 +1879,7 @@ MediaDecoder::ConstructMediaTracks() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown || mMediaTracksConstructed || !mInfo) { + if (IsShutdown() || mMediaTracksConstructed || !mInfo) { return; } @@ -1908,7 +1915,7 @@ MediaDecoder::RemoveMediaTracks() { MOZ_ASSERT(NS_IsMainThread()); - if (mShuttingDown) { + if (IsShutdown()) { return; } @@ -1949,16 +1956,16 @@ void MediaDecoder::DumpDebugInfo() { DUMP_LOG("metadata: channels=%u rate=%u hasAudio=%d hasVideo=%d, " - "state: mPlayState=%s mIsDormant=%d, mShuttingDown=%d", + "state: mPlayState=%s mIsDormant=%d, IsShutdown()=%d", mInfo ? mInfo->mAudio.mChannels : 0, mInfo ? mInfo->mAudio.mRate : 0, mInfo ? mInfo->HasAudio() : 0, mInfo ? mInfo->HasVideo() : 0, - PlayStateStr(), mIsDormant, mShuttingDown); + PlayStateStr(), mIsDormant, IsShutdown()); nsString str; GetMozDebugReaderData(str); DUMP_LOG("reader data:\n%s", NS_ConvertUTF16toUTF8(str).get()); - if (!mShuttingDown && GetStateMachine()) { + if (!IsShutdown() && GetStateMachine()) { GetStateMachine()->DumpDebugInfo(); } } @@ -1966,7 +1973,7 @@ MediaDecoder::DumpDebugInfo() void MediaDecoder::NotifyAudibleStateChanged() { - MOZ_ASSERT(!mShuttingDown); + MOZ_ASSERT(!IsShutdown()); mOwner->SetAudibleState(mIsAudioDataAudible); } diff --git a/dom/media/MediaDecoder.h b/dom/media/MediaDecoder.h index c144765e40ca..f70a4880ed82 100644 --- a/dom/media/MediaDecoder.h +++ b/dom/media/MediaDecoder.h @@ -493,7 +493,7 @@ private: void UpdateReadyState() { MOZ_ASSERT(NS_IsMainThread()); - if (!mShuttingDown) { + if (!IsShutdown()) { mOwner->UpdateReadyState(); } } @@ -528,6 +528,8 @@ protected: // Return true if the decoder has reached the end of playback bool IsEnded() const; + bool IsShutdown() const; + // Called by the state machine to notify the decoder that the duration // has changed. void DurationChanged(); diff --git a/dom/media/omx/MediaOmxCommonDecoder.cpp b/dom/media/omx/MediaOmxCommonDecoder.cpp index 8667e5fc96cb..8f01c6b74415 100644 --- a/dom/media/omx/MediaOmxCommonDecoder.cpp +++ b/dom/media/omx/MediaOmxCommonDecoder.cpp @@ -138,7 +138,7 @@ MediaOmxCommonDecoder::ResumeStateMachine() MOZ_ASSERT(NS_IsMainThread()); DECODER_LOG(LogLevel::Debug, ("%s current time %f", __PRETTY_FUNCTION__, mLogicalPosition)); - if (mShuttingDown) { + if (IsShutdown()) { return; } From 0b638406f0f537d1d903d251a689a4a18e9c590d Mon Sep 17 00:00:00 2001 From: JW Wang Date: Wed, 13 Jul 2016 16:48:27 +0800 Subject: [PATCH 49/63] Bug 1289004. Part 3 - Remove MediaDecoder::mShuttingDown. r=cpearce MozReview-Commit-ID: DoJ4UFuyz2h --HG-- extra : rebase_source : 531df3335548186338fc2eb5b5b4b0bb4cc6c101 extra : source : d4f4a98303c3ddc14986478aa44f137cce87af30 --- dom/media/MediaDecoder.cpp | 20 +++++++------------- dom/media/MediaDecoder.h | 6 ------ 2 files changed, 7 insertions(+), 19 deletions(-) diff --git a/dom/media/MediaDecoder.cpp b/dom/media/MediaDecoder.cpp index 87221cd96efb..7bc2ec15c52a 100644 --- a/dom/media/MediaDecoder.cpp +++ b/dom/media/MediaDecoder.cpp @@ -325,7 +325,6 @@ MediaDecoder::UpdateDormantState(bool aDormantTimeout, bool aActivity) if (IsShutdown() || !mDecoderStateMachine || - mPlayState == PLAY_STATE_SHUTDOWN || !mOwner->GetVideoFrameContainer() || (mOwner->GetMediaElement() && mOwner->GetMediaElement()->IsBeingDestroyed()) || !mDormantSupported) @@ -425,12 +424,13 @@ void MediaDecoder::Pause() { MOZ_ASSERT(NS_IsMainThread()); - if (mPlayState == PLAY_STATE_LOADING || - IsEnded()) { + if (IsShutdown()) { + return; + } + if (mPlayState == PLAY_STATE_LOADING || IsEnded()) { mNextState = PLAY_STATE_PAUSED; return; } - ChangeState(PLAY_STATE_PAUSED); } @@ -504,7 +504,6 @@ MediaDecoder::MediaDecoder(MediaDecoderOwner* aOwner) , mVideoFrameContainer(aOwner->GetVideoFrameContainer()) , mPlaybackStatistics(new MediaChannelStatistics()) , mPinnedForSeek(false) - , mShuttingDown(false) , mPausedForPlaybackRateNull(false) , mMinimizePreroll(false) , mMediaTracksConstructed(false) @@ -611,8 +610,6 @@ MediaDecoder::Shutdown() return; } - mShuttingDown = true; - // Unwatch all watch targets to prevent further notifications. mWatchManager.Shutdown(); @@ -1078,7 +1075,7 @@ bool MediaDecoder::IsEndedOrShutdown() const { MOZ_ASSERT(NS_IsMainThread()); - return IsEnded() || mPlayState == PLAY_STATE_SHUTDOWN; + return IsEnded() || IsShutdown(); } bool @@ -1127,7 +1124,7 @@ bool MediaDecoder::IsShutdown() const { MOZ_ASSERT(NS_IsMainThread()); - return mShuttingDown; + return mPlayState == PLAY_STATE_SHUTDOWN; } void @@ -1341,15 +1338,12 @@ void MediaDecoder::ChangeState(PlayState aState) { MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(!IsShutdown(), "SHUTDOWN is the final state."); if (mNextState == aState) { mNextState = PLAY_STATE_PAUSED; } - if (mPlayState == PLAY_STATE_SHUTDOWN) { - return; - } - DECODER_LOG("ChangeState %s => %s", PlayStateStr(), ToPlayStateStr(aState)); mPlayState = aState; diff --git a/dom/media/MediaDecoder.h b/dom/media/MediaDecoder.h index f70a4880ed82..637e884cd988 100644 --- a/dom/media/MediaDecoder.h +++ b/dom/media/MediaDecoder.h @@ -684,12 +684,6 @@ protected: // while seeking. bool mPinnedForSeek; - // True if the decoder is being shutdown. At this point all events that - // are currently queued need to return immediately to prevent javascript - // being run that operates on the element and decoder during shutdown. - // Read/Write from the main thread only. - bool mShuttingDown; - // True if the playback is paused because the playback rate member is 0.0. bool mPausedForPlaybackRateNull; From 39b242ab371689f8b052716d6c4ec11b24900c3d Mon Sep 17 00:00:00 2001 From: Boris Chiou Date: Wed, 20 Jul 2016 15:14:05 +0800 Subject: [PATCH 50/63] Bug 1272475 - Part 1: Clamp max/min float value in the parser of CSS Transform function. r=heycam To avoid calculate +/-infinite function value, we clamp it in the parser level. Also, we use EnsureNotNan while calculating the interpolation for translate function, so it's also better to do EnsureNotNan before call SetFloatValue() while calculating the interpolation for rotate (AddCSSValueAngle) and scale functions. MozReview-Commit-ID: 1k19ytyNG1N --HG-- extra : rebase_source : 73f152b52e067d52a1925215ad78a4c5cc736fd7 --- layout/style/StyleAnimationValue.cpp | 16 +++++++++------- layout/style/nsCSSParser.cpp | 10 ++++++++++ layout/style/nsCSSValue.cpp | 4 ++-- layout/style/nsCSSValue.h | 2 ++ 4 files changed, 23 insertions(+), 9 deletions(-) diff --git a/layout/style/StyleAnimationValue.cpp b/layout/style/StyleAnimationValue.cpp index a576d987ded4..65aa0eded1ba 100644 --- a/layout/style/StyleAnimationValue.cpp +++ b/layout/style/StyleAnimationValue.cpp @@ -1082,13 +1082,15 @@ AddCSSValueAngle(double aCoeff1, const nsCSSValue &aValue1, { if (aValue1.GetUnit() == aValue2.GetUnit()) { // To avoid floating point error, if the units match, maintain the unit. - aResult.SetFloatValue(aCoeff1 * aValue1.GetFloatValue() + - aCoeff2 * aValue2.GetFloatValue(), - aValue1.GetUnit()); + aResult.SetFloatValue( + EnsureNotNan(aCoeff1 * aValue1.GetFloatValue() + + aCoeff2 * aValue2.GetFloatValue()), + aValue1.GetUnit()); } else { - aResult.SetFloatValue(aCoeff1 * aValue1.GetAngleValueInRadians() + - aCoeff2 * aValue2.GetAngleValueInRadians(), - eCSSUnit_Radian); + aResult.SetFloatValue( + EnsureNotNan(aCoeff1 * aValue1.GetAngleValueInRadians() + + aCoeff2 * aValue2.GetAngleValueInRadians()), + eCSSUnit_Radian); } } @@ -1254,7 +1256,7 @@ AddTransformScale(double aCoeff1, const nsCSSValue &aValue1, float v1 = aValue1.GetFloatValue() - 1.0f, v2 = aValue2.GetFloatValue() - 1.0f; float result = v1 * aCoeff1 + v2 * aCoeff2; - aResult.SetFloatValue(result + 1.0f, eCSSUnit_Number); + aResult.SetFloatValue(EnsureNotNan(result + 1.0f), eCSSUnit_Number); } /* static */ already_AddRefed diff --git a/layout/style/nsCSSParser.cpp b/layout/style/nsCSSParser.cpp index 1720deec83b9..ef2dbe3e4dd8 100644 --- a/layout/style/nsCSSParser.cpp +++ b/layout/style/nsCSSParser.cpp @@ -15417,6 +15417,16 @@ CSSParserImpl::ParseFunctionInternals(const uint32_t aVariantMask[], break; } + if (nsCSSValue::IsFloatUnit(newValue.GetUnit())) { + // Clamp infinity or -infinity values to max float or -max float to avoid + // calculations with infinity. + newValue.SetFloatValue( + mozilla::clamped(newValue.GetFloatValue(), + -std::numeric_limits::max(), + std::numeric_limits::max()), + newValue.GetUnit()); + } + aOutput.AppendElement(newValue); if (ExpectSymbol(',', true)) { diff --git a/layout/style/nsCSSValue.cpp b/layout/style/nsCSSValue.cpp index 7240ecb9aa25..7e2f68830454 100644 --- a/layout/style/nsCSSValue.cpp +++ b/layout/style/nsCSSValue.cpp @@ -413,9 +413,9 @@ void nsCSSValue::SetPercentValue(float aValue) void nsCSSValue::SetFloatValue(float aValue, nsCSSUnit aUnit) { - MOZ_ASSERT(eCSSUnit_Number <= aUnit, "not a float value"); + MOZ_ASSERT(IsFloatUnit(aUnit), "not a float value"); Reset(); - if (eCSSUnit_Number <= aUnit) { + if (IsFloatUnit(aUnit)) { mUnit = aUnit; mValue.mFloat = aValue; MOZ_ASSERT(!mozilla::IsNaN(mValue.mFloat)); diff --git a/layout/style/nsCSSValue.h b/layout/style/nsCSSValue.h index 4a94eb697285..383582331855 100644 --- a/layout/style/nsCSSValue.h +++ b/layout/style/nsCSSValue.h @@ -516,6 +516,8 @@ public: { return eCSSUnit_Point <= aUnit && aUnit <= eCSSUnit_Pixel; } bool IsPixelLengthUnit() const { return IsPixelLengthUnit(mUnit); } + static bool IsFloatUnit(nsCSSUnit aUnit) + { return eCSSUnit_Number <= aUnit; } bool IsAngularUnit() const { return eCSSUnit_Degree <= mUnit && mUnit <= eCSSUnit_Turn; } bool IsFrequencyUnit() const From a8ff634f849f2fe2e0d2eacf02043942c759c643 Mon Sep 17 00:00:00 2001 From: Boris Chiou Date: Wed, 20 Jul 2016 16:00:36 +0800 Subject: [PATCH 51/63] Bug 1272475 - Part 2: Add crashtests and mochitests. r=heycam MozReview-Commit-ID: A8CpiMLfa7Q --HG-- extra : rebase_source : 5a9b2911c41e4c08249a9d85e95a8bb2fa906493 --- dom/animation/test/crashtests/1272475-1.html | 20 +++++++ dom/animation/test/crashtests/1272475-2.html | 20 +++++++ dom/animation/test/crashtests/crashtests.list | 2 + dom/animation/test/mochitest.ini | 2 + .../test/mozilla/file_transform_limits.html | 55 +++++++++++++++++++ .../test/mozilla/test_transform_limits.html | 14 +++++ 6 files changed, 113 insertions(+) create mode 100644 dom/animation/test/crashtests/1272475-1.html create mode 100644 dom/animation/test/crashtests/1272475-2.html create mode 100644 dom/animation/test/mozilla/file_transform_limits.html create mode 100644 dom/animation/test/mozilla/test_transform_limits.html diff --git a/dom/animation/test/crashtests/1272475-1.html b/dom/animation/test/crashtests/1272475-1.html new file mode 100644 index 000000000000..e0b04953881d --- /dev/null +++ b/dom/animation/test/crashtests/1272475-1.html @@ -0,0 +1,20 @@ + + + + Bug 1272475 - scale function with an extreme large value + + + + + diff --git a/dom/animation/test/crashtests/1272475-2.html b/dom/animation/test/crashtests/1272475-2.html new file mode 100644 index 000000000000..da0e8605bdf5 --- /dev/null +++ b/dom/animation/test/crashtests/1272475-2.html @@ -0,0 +1,20 @@ + + + + Bug 1272475 - rotate function with an extreme large value + + + + + diff --git a/dom/animation/test/crashtests/crashtests.list b/dom/animation/test/crashtests/crashtests.list index 2526ac59fc9c..4e46b2cb776e 100644 --- a/dom/animation/test/crashtests/crashtests.list +++ b/dom/animation/test/crashtests/crashtests.list @@ -6,5 +6,7 @@ pref(dom.animations-api.core.enabled,true) load 1216842-3.html pref(dom.animations-api.core.enabled,true) load 1216842-4.html pref(dom.animations-api.core.enabled,true) load 1216842-5.html pref(dom.animations-api.core.enabled,true) load 1216842-6.html +pref(dom.animations-api.core.enabled,true) load 1272475-1.html +pref(dom.animations-api.core.enabled,true) load 1272475-2.html pref(dom.animations-api.core.enabled,true) load 1278485-1.html pref(dom.animations-api.core.enabled,true) load 1277272-1.html diff --git a/dom/animation/test/mochitest.ini b/dom/animation/test/mochitest.ini index 73ceec023180..35c0e9a2e4e1 100644 --- a/dom/animation/test/mochitest.ini +++ b/dom/animation/test/mochitest.ini @@ -41,6 +41,7 @@ support-files = mozilla/file_document-timeline-origin-time-range.html mozilla/file_hide_and_show.html mozilla/file_partial_keyframes.html + mozilla/file_transform_limits.html style/file_animation-seeking-with-current-time.html style/file_animation-seeking-with-start-time.html testcommon.js @@ -89,5 +90,6 @@ skip-if = (toolkit == 'gonk' && debug) [mozilla/test_hide_and_show.html] [mozilla/test_partial_keyframes.html] [mozilla/test_set-easing.html] +[mozilla/test_transform_limits.html] [style/test_animation-seeking-with-current-time.html] [style/test_animation-seeking-with-start-time.html] diff --git a/dom/animation/test/mozilla/file_transform_limits.html b/dom/animation/test/mozilla/file_transform_limits.html new file mode 100644 index 000000000000..d4c813c67d7a --- /dev/null +++ b/dom/animation/test/mozilla/file_transform_limits.html @@ -0,0 +1,55 @@ + + + + + + diff --git a/dom/animation/test/mozilla/test_transform_limits.html b/dom/animation/test/mozilla/test_transform_limits.html new file mode 100644 index 000000000000..6c9b5e4fa992 --- /dev/null +++ b/dom/animation/test/mozilla/test_transform_limits.html @@ -0,0 +1,14 @@ + + + + +
+ From 8f657367f9218b2cf50775a1f8ac5c88a78ca3c4 Mon Sep 17 00:00:00 2001 From: Tim Nguyen Date: Mon, 25 Jul 2016 18:13:40 +0200 Subject: [PATCH 52/63] Bug 1288888 - Fix colouring of filter button. r=bgrins MozReview-Commit-ID: EdBAsFB2R6d --- devtools/client/themes/images/filter.svg | 15 ++++++++++++--- devtools/client/themes/toolbars.css | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/devtools/client/themes/images/filter.svg b/devtools/client/themes/images/filter.svg index 015953682f42..90bc6165f315 100644 --- a/devtools/client/themes/images/filter.svg +++ b/devtools/client/themes/images/filter.svg @@ -1,7 +1,16 @@ - - - + + + + + + + diff --git a/devtools/client/themes/toolbars.css b/devtools/client/themes/toolbars.css index 2514b2bee294..bc221521b65d 100644 --- a/devtools/client/themes/toolbars.css +++ b/devtools/client/themes/toolbars.css @@ -364,7 +364,7 @@ } .devtools-filterinput { - background-image: var(--filter-image); + background-image: url(images/filter.svg#filterinput); } .devtools-searchinput:-moz-locale-dir(rtl), From 7233b6225716b9f27e73b81fb222861dd11aa2c9 Mon Sep 17 00:00:00 2001 From: Andrzej Hunt Date: Fri, 22 Jul 2016 16:16:41 -0700 Subject: [PATCH 53/63] Bug 1288103 - Add experimental MOZ_ANDROID_ACTIVITY_STREAM build flag r=chmanchester This will be used to enable the activity stream panel in place of the HomePager. We are likely to migrate this to a switchboard flag in future once the new panel becomes shippable (we are still investigating other distribution mechanisms, so it is entirely possible this will completely change in future). MozReview-Commit-ID: I9VSliO0IXE --HG-- extra : rebase_source : 5c6578e41d7bc4849a7aa4a74c4be6cebc966231 --- mobile/android/base/AppConstants.java.in | 7 +++++++ mobile/android/base/moz.build | 3 ++- mobile/android/moz.configure | 7 +++++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/mobile/android/base/AppConstants.java.in b/mobile/android/base/AppConstants.java.in index e3bcf17b4ce2..a9d2f3785bfe 100644 --- a/mobile/android/base/AppConstants.java.in +++ b/mobile/android/base/AppConstants.java.in @@ -364,4 +364,11 @@ public class AppConstants { // Do nothing. //#endif } + + public static final boolean MOZ_ANDROID_ACTIVITY_STREAM = +//#ifdef MOZ_ANDROID_ACTIVITY_STREAM + true; +//#else + false; +//#endif } diff --git a/mobile/android/base/moz.build b/mobile/android/base/moz.build index e4665e96f18d..50da9f9037bf 100644 --- a/mobile/android/base/moz.build +++ b/mobile/android/base/moz.build @@ -965,7 +965,8 @@ for var in ('MOZ_ANDROID_ANR_REPORTER', 'MOZ_LINKER_EXTRACT', 'MOZ_DEBUG', 'MOZ_ANDROID_DOWNLOADS_INTEGRATION', 'MOZ_INSTALL_TRACKING', 'MOZ_ANDROID_GCM', 'MOZ_ANDROID_EXCLUDE_FONTS', 'MOZ_LOCALE_SWITCHER', 'MOZ_ANDROID_BEAM', 'MOZ_ANDROID_DOWNLOAD_CONTENT_SERVICE', - 'MOZ_SWITCHBOARD', 'MOZ_ANDROID_CUSTOM_TABS'): + 'MOZ_SWITCHBOARD', 'MOZ_ANDROID_CUSTOM_TABS', + 'MOZ_ANDROID_ACTIVITY_STREAM'): if CONFIG[var]: DEFINES[var] = 1 diff --git a/mobile/android/moz.configure b/mobile/android/moz.configure index 452f49c3b928..23e6f971fa6d 100644 --- a/mobile/android/moz.configure +++ b/mobile/android/moz.configure @@ -48,6 +48,13 @@ project_flag('MOZ_SWITCHBOARD', help='Include Switchboard A/B framework on Android', default=True) +option(env='MOZ_ANDROID_ACTIVITY_STREAM', + help='Enable Activity Stream on Android (replacing the default HomePager)', + default=False) + +set_config('MOZ_ANDROID_ACTIVITY_STREAM', + depends_if('MOZ_ANDROID_ACTIVITY_STREAM')(lambda _: True)) + option('--disable-android-apz', env='MOZ_ANDROID_APZ', help='Disable the C++ async pan/zoom code and use the Java version instead') From cccfcddf71740b771afb25658144c243ee70466f Mon Sep 17 00:00:00 2001 From: Andrzej Hunt Date: Thu, 21 Jul 2016 11:31:57 -0700 Subject: [PATCH 54/63] Bug 1288124 - Add basic/blank Activity Stream panel and show if flag is enabled r=jonalmeida,sebastian MozReview-Commit-ID: JqbOywOFsoF --HG-- extra : rebase_source : 10e51f16996c347872a8319861fe13de5368ca05 --- .../java/org/mozilla/gecko/BrowserApp.java | 46 ++++++++----- .../home/activitystream/ActivityStream.java | 69 +++++++++++++++++++ mobile/android/base/moz.build | 1 + .../base/resources/layout/activity_stream.xml | 16 +++++ .../base/resources/layout/gecko_app.xml | 5 ++ 5 files changed, 118 insertions(+), 19 deletions(-) create mode 100644 mobile/android/base/java/org/mozilla/gecko/home/activitystream/ActivityStream.java create mode 100644 mobile/android/base/resources/layout/activity_stream.xml diff --git a/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java b/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java index 65385f7c3379..236dcfb4a0b0 100644 --- a/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java +++ b/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java @@ -2691,29 +2691,37 @@ public class BrowserApp extends GeckoApp } if (mHomeScreen == null) { - final ViewStub homePagerStub = (ViewStub) findViewById(R.id.home_pager_stub); - mHomeScreen = (HomePager) homePagerStub.inflate(); + if (AppConstants.MOZ_ANDROID_ACTIVITY_STREAM) { + final ViewStub asStub = (ViewStub) findViewById(R.id.activity_stream_stub); + mHomeScreen = (HomeScreen) asStub.inflate(); + } else { + final ViewStub homePagerStub = (ViewStub) findViewById(R.id.home_pager_stub); + mHomeScreen = (HomeScreen) homePagerStub.inflate(); - mHomeScreen.setOnPanelChangeListener(new HomeScreen.OnPanelChangeListener() { - @Override - public void onPanelSelected(String panelId) { - final Tab currentTab = Tabs.getInstance().getSelectedTab(); - if (currentTab != null) { - currentTab.setMostRecentHomePanel(panelId); + // For now these listeners are HomePager specific. In future we might want + // to have a more abstracted data storage, with one Bundle containing all + // relevant restore data. + mHomeScreen.setOnPanelChangeListener(new HomeScreen.OnPanelChangeListener() { + @Override + public void onPanelSelected(String panelId) { + final Tab currentTab = Tabs.getInstance().getSelectedTab(); + if (currentTab != null) { + currentTab.setMostRecentHomePanel(panelId); + } } - } - }); + }); - // Set this listener to persist restore data (via the Tab) every time panel state changes. - mHomeScreen.setPanelStateChangeListener(new HomeFragment.PanelStateChangeListener() { - @Override - public void onStateChanged(Bundle bundle) { - final Tab currentTab = Tabs.getInstance().getSelectedTab(); - if (currentTab != null) { - currentTab.setMostRecentHomePanelData(bundle); + // Set this listener to persist restore data (via the Tab) every time panel state changes. + mHomeScreen.setPanelStateChangeListener(new HomeFragment.PanelStateChangeListener() { + @Override + public void onStateChanged(Bundle bundle) { + final Tab currentTab = Tabs.getInstance().getSelectedTab(); + if (currentTab != null) { + currentTab.setMostRecentHomePanelData(bundle); + } } - } - }); + }); + } // Don't show the banner in guest mode. if (!Restrictions.isUserRestricted()) { diff --git a/mobile/android/base/java/org/mozilla/gecko/home/activitystream/ActivityStream.java b/mobile/android/base/java/org/mozilla/gecko/home/activitystream/ActivityStream.java new file mode 100644 index 000000000000..dd856f6f82a8 --- /dev/null +++ b/mobile/android/base/java/org/mozilla/gecko/home/activitystream/ActivityStream.java @@ -0,0 +1,69 @@ +/* -*- Mode: Java; c-basic-offset: 4; tab-width: 20; indent-tabs-mode: nil; -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + package org.mozilla.gecko.home.activitystream; + +import android.content.Context; +import android.os.Bundle; +import android.support.v4.app.FragmentManager; +import android.support.v4.app.LoaderManager; +import android.util.AttributeSet; +import android.widget.FrameLayout; +import android.widget.LinearLayout; + +import org.mozilla.gecko.animation.PropertyAnimator; +import org.mozilla.gecko.home.HomeBanner; +import org.mozilla.gecko.home.HomeFragment; +import org.mozilla.gecko.home.HomeScreen; + +public class ActivityStream extends FrameLayout implements HomeScreen { + + public ActivityStream(Context context, AttributeSet attrs) { + super(context, attrs); + } + + @Override + public boolean isVisible() { + // This is dependent on the loading state - currently we're a dumb panel so we're always + // "visible" + return true; + } + + @Override + public void onToolbarFocusChange(boolean hasFocus) { + // We don't care: this is HomePager specific + } + + @Override + public void showPanel(String panelId, Bundle restoreData) { + // We could use this to restore Panel data. In practice this isn't likely to be relevant for + // AS and can be ignore for now. + } + + @Override + public void setOnPanelChangeListener(OnPanelChangeListener listener) { + // As with showPanel: not relevant yet, could be used for persistence (scroll position?) + } + + @Override + public void setPanelStateChangeListener(HomeFragment.PanelStateChangeListener listener) { + // See setOnPanelChangeListener + } + + @Override + public void setBanner(HomeBanner banner) { + // TODO: we should probably implement this to show snippets. + } + + @Override + public void load(LoaderManager lm, FragmentManager fm, String panelId, Bundle restoreData, + PropertyAnimator animator) { + // Signal to load data from storage as needed, compare with HomePager + } + + @Override + public void unload() { + // Signal to clear data that has been loaded, compare with HomePager + } +} diff --git a/mobile/android/base/moz.build b/mobile/android/base/moz.build index 50da9f9037bf..e82b322a8cf0 100644 --- a/mobile/android/base/moz.build +++ b/mobile/android/base/moz.build @@ -401,6 +401,7 @@ gbjar.sources += ['java/org/mozilla/gecko/' + x for x in [ 'health/HealthRecorder.java', 'health/SessionInformation.java', 'health/StubbedHealthRecorder.java', + 'home/activitystream/ActivityStream.java', 'home/BookmarkFolderView.java', 'home/BookmarkScreenshotRow.java', 'home/BookmarksListAdapter.java', diff --git a/mobile/android/base/resources/layout/activity_stream.xml b/mobile/android/base/resources/layout/activity_stream.xml new file mode 100644 index 000000000000..8c493847f9f1 --- /dev/null +++ b/mobile/android/base/resources/layout/activity_stream.xml @@ -0,0 +1,16 @@ + + + + + \ No newline at end of file diff --git a/mobile/android/base/resources/layout/gecko_app.xml b/mobile/android/base/resources/layout/gecko_app.xml index bfd49b9245d7..cae87e565364 100644 --- a/mobile/android/base/resources/layout/gecko_app.xml +++ b/mobile/android/base/resources/layout/gecko_app.xml @@ -66,6 +66,11 @@ android:layout_width="match_parent" android:layout_height="match_parent"/> + + Date: Fri, 22 Jul 2016 16:02:56 -0400 Subject: [PATCH 55/63] Bug 1284726 - move MediaStreamTrackEvent.webidl back out of MOZ_WEBRTC (used in gUM). r=jesup MozReview-Commit-ID: Hx2RHxxgIQR --HG-- extra : rebase_source : a5ed1c403bd92650fa4ae4f55640e97936121499 --- dom/webidl/moz.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dom/webidl/moz.build b/dom/webidl/moz.build index 38309629f349..4ddfbd5414aa 100644 --- a/dom/webidl/moz.build +++ b/dom/webidl/moz.build @@ -794,6 +794,7 @@ GENERATED_EVENTS_WEBIDL_FILES = [ 'IccChangeEvent.webidl', 'ImageCaptureErrorEvent.webidl', 'MediaStreamEvent.webidl', + 'MediaStreamTrackEvent.webidl', 'MozCellBroadcastEvent.webidl', 'MozClirModeEvent.webidl', 'MozContactChangeEvent.webidl', @@ -835,7 +836,6 @@ GENERATED_EVENTS_WEBIDL_FILES = [ if CONFIG['MOZ_WEBRTC']: GENERATED_EVENTS_WEBIDL_FILES += [ - 'MediaStreamTrackEvent.webidl', 'RTCDataChannelEvent.webidl', 'RTCPeerConnectionIceEvent.webidl', 'RTCTrackEvent.webidl', From b301ca80e65ec349ad345dc274627c08b56a654c Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Sat, 23 Jul 2016 22:44:47 -0700 Subject: [PATCH 56/63] Bug 1278261 - drop api-9 mozilla-release release support, remove all api-9 and api-11 references, r=Callek this can be uplifted through 48 mozilla-release MozReview-Commit-ID: KncTJ8hAgfO --HG-- extra : rebase_source : 18dc6c107a138317f95b433e33bf3081166c4478 --- .../releng_sub_android_configs/64_api_11.py | 8 -- .../64_api_11_debug.py | 9 -- .../releng_sub_android_configs/64_api_9.py | 8 -- .../64_api_9_debug.py | 9 -- .../configs/merge_day/beta_to_release.py | 1 - .../configs/merge_day/central_to_aurora.py | 1 - .../single_locale/ash_android-api-9.py | 97 ------------------ .../mozilla-aurora_android-api-9.py | 97 ------------------ .../mozilla-central_android-api-9.py | 97 ------------------ .../release_mozilla-beta_android_api_9.py | 95 ------------------ .../release_mozilla-release_android_api_9.py | 95 ------------------ ...ging_release_mozilla-beta_android_api_9.py | 95 ------------------ ...g_release_mozilla-release_android_api_9.py | 98 ------------------- .../mozharness/mozilla/building/buildbase.py | 5 - 14 files changed, 715 deletions(-) delete mode 100644 testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11.py delete mode 100644 testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11_debug.py delete mode 100644 testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9.py delete mode 100644 testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9_debug.py delete mode 100644 testing/mozharness/configs/single_locale/ash_android-api-9.py delete mode 100644 testing/mozharness/configs/single_locale/mozilla-aurora_android-api-9.py delete mode 100644 testing/mozharness/configs/single_locale/mozilla-central_android-api-9.py delete mode 100644 testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_9.py delete mode 100644 testing/mozharness/configs/single_locale/release_mozilla-release_android_api_9.py delete mode 100644 testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_9.py delete mode 100644 testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_9.py diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11.py deleted file mode 100644 index ef990a4bca7e..000000000000 --- a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11.py +++ /dev/null @@ -1,8 +0,0 @@ -config = { - 'base_name': 'Android armv7 API 11+ %(branch)s', - 'stage_platform': 'android-api-11', - 'build_type': 'api-11-opt', - 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-11/nightly', - 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest', - 'multi_locale_config_platform': 'android', -} diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11_debug.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11_debug.py deleted file mode 100644 index c7d3ba0e08b0..000000000000 --- a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_11_debug.py +++ /dev/null @@ -1,9 +0,0 @@ -config = { - 'base_name': 'Android armv7 API 11+ %(branch)s debug', - 'stage_platform': 'android-api-11-debug', - 'build_type': 'api-11-debug', - 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-11/debug', - 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest', - 'multi_locale_config_platform': 'android', - 'debug_build': True, -} diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9.py deleted file mode 100644 index f8f542ae8208..000000000000 --- a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9.py +++ /dev/null @@ -1,8 +0,0 @@ -config = { - 'base_name': 'Android armv7 API 9 %(branch)s', - 'stage_platform': 'android-api-9', - 'build_type': 'api-9-opt', - 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-9-10-constrained/nightly', - 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest', - 'multi_locale_config_platform': 'android', -} diff --git a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9_debug.py b/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9_debug.py deleted file mode 100644 index b3fa11f76d47..000000000000 --- a/testing/mozharness/configs/builds/releng_sub_android_configs/64_api_9_debug.py +++ /dev/null @@ -1,9 +0,0 @@ -config = { - 'base_name': 'Android armv7 API 9 %(branch)s debug', - 'stage_platform': 'android-api-9-debug', - 'build_type': 'api-9-debug', - 'src_mozconfig': 'mobile/android/config/mozconfigs/android-api-9-10-constrained/debug', - 'tooltool_manifest_src': 'mobile/android/config/tooltool-manifests/android/releng.manifest', - 'multi_locale_config_platform': 'android', - 'debug_build': True, -} diff --git a/testing/mozharness/configs/merge_day/beta_to_release.py b/testing/mozharness/configs/merge_day/beta_to_release.py index bf21f4a79616..0fe65aca1b52 100644 --- a/testing/mozharness/configs/merge_day/beta_to_release.py +++ b/testing/mozharness/configs/merge_day/beta_to_release.py @@ -16,7 +16,6 @@ config = { "ac_add_options --with-branding=mobile/android/branding/beta", "ac_add_options --with-branding=mobile/android/branding/official") for d in ["mobile/android/config/mozconfigs/android-api-15/", - "mobile/android/config/mozconfigs/android-api-9-10-constrained/", "mobile/android/config/mozconfigs/android-x86/"] for f in ["debug", "nightly", "l10n-nightly", "l10n-release", "release"] ] + [ diff --git a/testing/mozharness/configs/merge_day/central_to_aurora.py b/testing/mozharness/configs/merge_day/central_to_aurora.py index 4bb90cb75795..c362361f0f6d 100644 --- a/testing/mozharness/configs/merge_day/central_to_aurora.py +++ b/testing/mozharness/configs/merge_day/central_to_aurora.py @@ -86,7 +86,6 @@ config = { 8, # Fennec aurora channel 10, # Firefox aurora channel 18, # MetroFirefox aurora channel - 106, # Fennec api-9 aurora channel ], "balrog_credentials_file": "oauth.txt", diff --git a/testing/mozharness/configs/single_locale/ash_android-api-9.py b/testing/mozharness/configs/single_locale/ash_android-api-9.py deleted file mode 100644 index 2a80cb700f4f..000000000000 --- a/testing/mozharness/configs/single_locale/ash_android-api-9.py +++ /dev/null @@ -1,97 +0,0 @@ -BRANCH = "ash" -MOZ_UPDATE_CHANNEL = "nightly" -MOZILLA_DIR = BRANCH -OBJDIR = "obj-l10n" -EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/nightly/latest-%s-android-api-9/en-US" % BRANCH -HG_SHARE_BASE_DIR = "/builds/hg-shared" - -config = { - "branch": BRANCH, - "log_name": "single_locale", - "objdir": OBJDIR, - "is_automation": True, - "buildbot_json_path": "buildprops.json", - "force_clobber": True, - "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber", - "locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR, - "locales_dir": "mobile/android/locales", - "ignore_locales": ["en-US"], - "nightly_build": True, - 'balrog_credentials_file': 'oauth.txt', - "tools_repo": "https://hg.mozilla.org/build/tools", - "tooltool_config": { - "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest", - "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR, - }, - "exes": { - 'tooltool.py': '/builds/tooltool.py', - }, - "repos": [{ - "repo": "https://hg.mozilla.org/projects/ash", - "branch": "default", - "dest": MOZILLA_DIR, - }, { - "repo": "https://hg.mozilla.org/build/buildbot-configs", - "branch": "default", - "dest": "buildbot-configs" - }, { - "repo": "https://hg.mozilla.org/build/tools", - "branch": "default", - "dest": "tools" - }], - "hg_l10n_base": "https://hg.mozilla.org/l10n-central", - "hg_l10n_tag": "default", - 'vcs_share_base': HG_SHARE_BASE_DIR, - - "l10n_dir": "l10n-central", - "repack_env": { - # so ugly, bug 951238 - "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64", - "MOZ_OBJDIR": OBJDIR, - "EN_US_BINARY_URL": EN_US_BINARY_URL, - "LOCALE_MERGEDIR": "%(abs_merge_dir)s/", - "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL, - }, - "upload_branch": "%s-android-api-9" % BRANCH, - "ssh_key_dir": "~/.ssh", - "merge_locales": True, - "mozilla_dir": MOZILLA_DIR, - "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-9-10-constrained/l10n-nightly" % MOZILLA_DIR, - "signature_verification_script": "tools/release/signing/verify-android-signature.sh", - "stage_product": "mobile", - "platform": "android", - "build_type": "api-9-opt", - - # Balrog - "build_target": "Android_arm-eabi-gcc3", - - # Mock - "mock_target": "mozilla-centos6-x86_64-android", - "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache', - 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General', - 'gtk2-devel', 'libnotify-devel', 'yasm', - 'alsa-lib-devel', 'libcurl-devel', - 'wireless-tools-devel', 'libX11-devel', - 'libXt-devel', 'mesa-libGL-devel', - 'gnome-vfs2-devel', 'GConf2-devel', 'wget', - 'mpfr', # required for system compiler - 'xorg-x11-font*', # fonts required for PGO - 'imake', # required for makedepend!?! - 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo - 'valgrind', 'dbus-x11', - 'pulseaudio-libs-devel', - 'gstreamer-devel', 'gstreamer-plugins-base-devel', - 'freetype-2.3.11-6.el6_1.8.x86_64', - 'freetype-devel-2.3.11-6.el6_1.8.x86_64', - 'java-1.7.0-openjdk-devel', - 'openssh-clients', - 'zlib-devel-1.2.3-27.el6.i686', - ], - "mock_files": [ - ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"), - ('/home/cltbld/.hgrc', '/builds/.hgrc'), - ('/builds/relengapi.tok', '/builds/relengapi.tok'), - ('/tools/tooltool.py', '/builds/tooltool.py'), - ('/usr/local/lib/hgext', '/usr/local/lib/hgext'), - ], -} diff --git a/testing/mozharness/configs/single_locale/mozilla-aurora_android-api-9.py b/testing/mozharness/configs/single_locale/mozilla-aurora_android-api-9.py deleted file mode 100644 index 1b7bf57dad9e..000000000000 --- a/testing/mozharness/configs/single_locale/mozilla-aurora_android-api-9.py +++ /dev/null @@ -1,97 +0,0 @@ -BRANCH = "mozilla-aurora" -MOZ_UPDATE_CHANNEL = "aurora" -MOZILLA_DIR = BRANCH -OBJDIR = "obj-l10n" -EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/nightly/latest-%s-android-api-9/en-US" % BRANCH -HG_SHARE_BASE_DIR = "/builds/hg-shared" - -config = { - "branch": BRANCH, - "log_name": "single_locale", - "objdir": OBJDIR, - "is_automation": True, - "buildbot_json_path": "buildprops.json", - "force_clobber": True, - "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber", - "locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR, - "locales_dir": "mobile/android/locales", - "ignore_locales": ["en-US"], - "nightly_build": True, - 'balrog_credentials_file': 'oauth.txt', - "tools_repo": "https://hg.mozilla.org/build/tools", - "tooltool_config": { - "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest", - "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR, - }, - "exes": { - 'tooltool.py': '/builds/tooltool.py', - }, - "repos": [{ - "repo": "https://hg.mozilla.org/releases/mozilla-aurora", - "branch": "default", - "dest": MOZILLA_DIR, - }, { - "repo": "https://hg.mozilla.org/build/buildbot-configs", - "branch": "default", - "dest": "buildbot-configs" - }, { - "repo": "https://hg.mozilla.org/build/tools", - "branch": "default", - "dest": "tools" - }], - "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/%s" % BRANCH, - "hg_l10n_tag": "default", - 'vcs_share_base': HG_SHARE_BASE_DIR, - - "l10n_dir": MOZILLA_DIR, - "repack_env": { - # so ugly, bug 951238 - "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64", - "MOZ_OBJDIR": OBJDIR, - "EN_US_BINARY_URL": EN_US_BINARY_URL, - "LOCALE_MERGEDIR": "%(abs_merge_dir)s/", - "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL, - }, - "upload_branch": "%s-android-api-9" % BRANCH, - "ssh_key_dir": "~/.ssh", - "merge_locales": True, - "mozilla_dir": MOZILLA_DIR, - "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-9-10-constrained/l10n-nightly" % MOZILLA_DIR, - "signature_verification_script": "tools/release/signing/verify-android-signature.sh", - "stage_product": "mobile", - "platform": "android", - "build_type": "api-9-opt", - - # Balrog - "build_target": "Android_arm-eabi-gcc3", - - # Mock - "mock_target": "mozilla-centos6-x86_64-android", - "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache', - 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General', - 'gtk2-devel', 'libnotify-devel', 'yasm', - 'alsa-lib-devel', 'libcurl-devel', - 'wireless-tools-devel', 'libX11-devel', - 'libXt-devel', 'mesa-libGL-devel', - 'gnome-vfs2-devel', 'GConf2-devel', 'wget', - 'mpfr', # required for system compiler - 'xorg-x11-font*', # fonts required for PGO - 'imake', # required for makedepend!?! - 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo - 'valgrind', 'dbus-x11', - 'pulseaudio-libs-devel', - 'gstreamer-devel', 'gstreamer-plugins-base-devel', - 'freetype-2.3.11-6.el6_1.8.x86_64', - 'freetype-devel-2.3.11-6.el6_1.8.x86_64', - 'java-1.7.0-openjdk-devel', - 'openssh-clients', - 'zlib-devel-1.2.3-27.el6.i686', - ], - "mock_files": [ - ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"), - ('/home/cltbld/.hgrc', '/builds/.hgrc'), - ('/builds/relengapi.tok', '/builds/relengapi.tok'), - ('/tools/tooltool.py', '/builds/tooltool.py'), - ('/usr/local/lib/hgext', '/usr/local/lib/hgext'), - ], -} diff --git a/testing/mozharness/configs/single_locale/mozilla-central_android-api-9.py b/testing/mozharness/configs/single_locale/mozilla-central_android-api-9.py deleted file mode 100644 index 4dbfce76f254..000000000000 --- a/testing/mozharness/configs/single_locale/mozilla-central_android-api-9.py +++ /dev/null @@ -1,97 +0,0 @@ -BRANCH = "mozilla-central" -MOZ_UPDATE_CHANNEL = "nightly" -MOZILLA_DIR = BRANCH -OBJDIR = "obj-l10n" -EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/nightly/latest-%s-android-api-9/en-US" % BRANCH -HG_SHARE_BASE_DIR = "/builds/hg-shared" - -config = { - "branch": BRANCH, - "log_name": "single_locale", - "objdir": OBJDIR, - "is_automation": True, - "buildbot_json_path": "buildprops.json", - "force_clobber": True, - "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber", - "locales_file": "%s/mobile/android/locales/all-locales" % MOZILLA_DIR, - "locales_dir": "mobile/android/locales", - "ignore_locales": ["en-US"], - "nightly_build": True, - 'balrog_credentials_file': 'oauth.txt', - "tools_repo": "https://hg.mozilla.org/build/tools", - "tooltool_config": { - "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest", - "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR, - }, - "exes": { - 'tooltool.py': '/builds/tooltool.py', - }, - "repos": [{ - "repo": "https://hg.mozilla.org/mozilla-central", - "branch": "default", - "dest": MOZILLA_DIR, - }, { - "repo": "https://hg.mozilla.org/build/buildbot-configs", - "branch": "default", - "dest": "buildbot-configs" - }, { - "repo": "https://hg.mozilla.org/build/tools", - "branch": "default", - "dest": "tools" - }], - "hg_l10n_base": "https://hg.mozilla.org/l10n-central", - "hg_l10n_tag": "default", - 'vcs_share_base': HG_SHARE_BASE_DIR, - - "l10n_dir": "l10n-central", - "repack_env": { - # so ugly, bug 951238 - "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64", - "MOZ_OBJDIR": OBJDIR, - "EN_US_BINARY_URL": EN_US_BINARY_URL, - "LOCALE_MERGEDIR": "%(abs_merge_dir)s/", - "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL, - }, - "upload_branch": "%s-android-api-9" % BRANCH, - "ssh_key_dir": "~/.ssh", - "merge_locales": True, - "mozilla_dir": MOZILLA_DIR, - "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-9-10-constrained/l10n-nightly" % MOZILLA_DIR, - "signature_verification_script": "tools/release/signing/verify-android-signature.sh", - "stage_product": "mobile", - "platform": "android", - "build_type": "api-9-opt", - - # Balrog - "build_target": "Android_arm-eabi-gcc3", - - # Mock - "mock_target": "mozilla-centos6-x86_64-android", - "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache', - 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General', - 'gtk2-devel', 'libnotify-devel', 'yasm', - 'alsa-lib-devel', 'libcurl-devel', - 'wireless-tools-devel', 'libX11-devel', - 'libXt-devel', 'mesa-libGL-devel', - 'gnome-vfs2-devel', 'GConf2-devel', 'wget', - 'mpfr', # required for system compiler - 'xorg-x11-font*', # fonts required for PGO - 'imake', # required for makedepend!?! - 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo - 'valgrind', 'dbus-x11', - 'pulseaudio-libs-devel', - 'gstreamer-devel', 'gstreamer-plugins-base-devel', - 'freetype-2.3.11-6.el6_1.8.x86_64', - 'freetype-devel-2.3.11-6.el6_1.8.x86_64', - 'java-1.7.0-openjdk-devel', - 'openssh-clients', - 'zlib-devel-1.2.3-27.el6.i686', - ], - "mock_files": [ - ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"), - ('/home/cltbld/.hgrc', '/builds/.hgrc'), - ('/builds/relengapi.tok', '/builds/relengapi.tok'), - ('/tools/tooltool.py', '/builds/tooltool.py'), - ('/usr/local/lib/hgext', '/usr/local/lib/hgext'), - ], -} diff --git a/testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_9.py b/testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_9.py deleted file mode 100644 index 668b204a429c..000000000000 --- a/testing/mozharness/configs/single_locale/release_mozilla-beta_android_api_9.py +++ /dev/null @@ -1,95 +0,0 @@ -BRANCH = "mozilla-beta" -MOZ_UPDATE_CHANNEL = "beta" -MOZILLA_DIR = BRANCH -OBJDIR = "obj-l10n" -EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-9/en-US" -HG_SHARE_BASE_DIR = "/builds/hg-shared" - -config = { - "stage_product": "mobile", - "log_name": "single_locale", - "objdir": OBJDIR, - "is_automation": True, - "buildbot_json_path": "buildprops.json", - "force_clobber": True, - "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber", - "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-beta.json", - "locales_dir": "mobile/android/locales", - "locales_platform": "android", - "ignore_locales": ["en-US"], - "balrog_credentials_file": "oauth.txt", - "tools_repo": "https://hg.mozilla.org/build/tools", - "is_release": True, - "tooltool_config": { - "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest", - "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR, - }, - "exes": { - 'tooltool.py': '/builds/tooltool.py', - }, - "repos": [{ - "repo": "https://hg.mozilla.org/releases/mozilla-beta", - "branch": "default", - "dest": MOZILLA_DIR, - }, { - "repo": "https://hg.mozilla.org/build/buildbot-configs", - "branch": "default", - "dest": "buildbot-configs" - }, { - "repo": "https://hg.mozilla.org/build/tools", - "branch": "default", - "dest": "tools" - }], - "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/%s" % BRANCH, - "hg_l10n_tag": "default", - 'vcs_share_base': HG_SHARE_BASE_DIR, - "l10n_dir": MOZILLA_DIR, - - "release_config_file": "buildbot-configs/mozilla/release-fennec-mozilla-beta.py", - "repack_env": { - # so ugly, bug 951238 - "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64", - "MOZ_PKG_VERSION": "%(version)s", - "MOZ_OBJDIR": OBJDIR, - "LOCALE_MERGEDIR": "%(abs_merge_dir)s/", - "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL, - }, - "base_en_us_binary_url": EN_US_BINARY_URL, - "upload_branch": "%s-android-api-9" % BRANCH, - "ssh_key_dir": "~/.ssh", - "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-9/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s", - "merge_locales": True, - "mozilla_dir": MOZILLA_DIR, - "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-9-10-constrained/l10n-release" % MOZILLA_DIR, - "signature_verification_script": "tools/release/signing/verify-android-signature.sh", - "key_alias": "release", - # Mock - "mock_target": "mozilla-centos6-x86_64-android", - "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache', - 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General', - 'gtk2-devel', 'libnotify-devel', 'yasm', - 'alsa-lib-devel', 'libcurl-devel', - 'wireless-tools-devel', 'libX11-devel', - 'libXt-devel', 'mesa-libGL-devel', - 'gnome-vfs2-devel', 'GConf2-devel', 'wget', - 'mpfr', # required for system compiler - 'xorg-x11-font*', # fonts required for PGO - 'imake', # required for makedepend!?! - 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo - 'valgrind', 'dbus-x11', - 'pulseaudio-libs-devel', - 'gstreamer-devel', 'gstreamer-plugins-base-devel', - 'freetype-2.3.11-6.el6_1.8.x86_64', - 'freetype-devel-2.3.11-6.el6_1.8.x86_64', - 'java-1.7.0-openjdk-devel', - 'openssh-clients', - 'zlib-devel-1.2.3-27.el6.i686', - ], - "mock_files": [ - ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"), - ('/home/cltbld/.hgrc', '/builds/.hgrc'), - ('/builds/relengapi.tok', '/builds/relengapi.tok'), - ('/tools/tooltool.py', '/builds/tooltool.py'), - ('/usr/local/lib/hgext', '/usr/local/lib/hgext'), - ], -} diff --git a/testing/mozharness/configs/single_locale/release_mozilla-release_android_api_9.py b/testing/mozharness/configs/single_locale/release_mozilla-release_android_api_9.py deleted file mode 100644 index 487bbcce1e31..000000000000 --- a/testing/mozharness/configs/single_locale/release_mozilla-release_android_api_9.py +++ /dev/null @@ -1,95 +0,0 @@ -BRANCH = "mozilla-release" -MOZ_UPDATE_CHANNEL = "release" -MOZILLA_DIR = BRANCH -OBJDIR = "obj-l10n" -EN_US_BINARY_URL = "http://archive.mozilla.org/pub/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-9/en-US" -HG_SHARE_BASE_DIR = "/builds/hg-shared" - -config = { - "stage_product": "mobile", - "log_name": "single_locale", - "objdir": OBJDIR, - "is_automation": True, - "buildbot_json_path": "buildprops.json", - "force_clobber": True, - "clobberer_url": "https://api.pub.build.mozilla.org/clobberer/lastclobber", - "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-release.json", - "locales_dir": "mobile/android/locales", - "locales_platform": "android", - "ignore_locales": ["en-US"], - "balrog_credentials_file": "oauth.txt", - "tools_repo": "https://hg.mozilla.org/build/tools", - "is_release": True, - "tooltool_config": { - "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest", - "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR, - }, - "exes": { - 'tooltool.py': '/builds/tooltool.py', - }, - "repos": [{ - "repo": "https://hg.mozilla.org/releases/mozilla-release", - "branch": "default", - "dest": MOZILLA_DIR, - }, { - "repo": "https://hg.mozilla.org/build/buildbot-configs", - "branch": "default", - "dest": "buildbot-configs" - }, { - "repo": "https://hg.mozilla.org/build/tools", - "branch": "default", - "dest": "tools" - }], - "hg_l10n_base": "https://hg.mozilla.org/releases/l10n/%s" % BRANCH, - "hg_l10n_tag": "default", - 'vcs_share_base': HG_SHARE_BASE_DIR, - "l10n_dir": MOZILLA_DIR, - - "release_config_file": "buildbot-configs/mozilla/release-fennec-mozilla-release.py", - "repack_env": { - # so ugly, bug 951238 - "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64", - "MOZ_PKG_VERSION": "%(version)s", - "MOZ_OBJDIR": OBJDIR, - "LOCALE_MERGEDIR": "%(abs_merge_dir)s/", - "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL, - }, - "base_en_us_binary_url": EN_US_BINARY_URL, - "upload_branch": "%s-android-api-9" % BRANCH, - "ssh_key_dir": "~/.ssh", - "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-9/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s", - "merge_locales": True, - "mozilla_dir": MOZILLA_DIR, - "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-9-10-constrained/l10n-release" % MOZILLA_DIR, - "signature_verification_script": "tools/release/signing/verify-android-signature.sh", - "key_alias": "release", - # Mock - "mock_target": "mozilla-centos6-x86_64-android", - "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache', - 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General', - 'gtk2-devel', 'libnotify-devel', 'yasm', - 'alsa-lib-devel', 'libcurl-devel', - 'wireless-tools-devel', 'libX11-devel', - 'libXt-devel', 'mesa-libGL-devel', - 'gnome-vfs2-devel', 'GConf2-devel', 'wget', - 'mpfr', # required for system compiler - 'xorg-x11-font*', # fonts required for PGO - 'imake', # required for makedepend!?! - 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo - 'valgrind', 'dbus-x11', - 'pulseaudio-libs-devel', - 'gstreamer-devel', 'gstreamer-plugins-base-devel', - 'freetype-2.3.11-6.el6_1.8.x86_64', - 'freetype-devel-2.3.11-6.el6_1.8.x86_64', - 'java-1.7.0-openjdk-devel', - 'openssh-clients', - 'zlib-devel-1.2.3-27.el6.i686', - ], - "mock_files": [ - ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"), - ('/home/cltbld/.hgrc', '/builds/.hgrc'), - ('/builds/relengapi.tok', '/builds/relengapi.tok'), - ('/tools/tooltool.py', '/builds/tooltool.py'), - ('/usr/local/lib/hgext', '/usr/local/lib/hgext'), - ], -} diff --git a/testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_9.py b/testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_9.py deleted file mode 100644 index a84247f07cbf..000000000000 --- a/testing/mozharness/configs/single_locale/staging_release_mozilla-beta_android_api_9.py +++ /dev/null @@ -1,95 +0,0 @@ -BRANCH = "mozilla-beta" -MOZ_UPDATE_CHANNEL = "beta" -MOZILLA_DIR = BRANCH -OBJDIR = "obj-l10n" -STAGE_SERVER = "ftp.stage.mozaws.net" -EN_US_BINARY_URL = "http://" + STAGE_SERVER + "/pub/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-9/en-US" -HG_SHARE_BASE_DIR = "/builds/hg-shared" - -config = { - "log_name": "single_locale", - "objdir": OBJDIR, - "is_automation": True, - "buildbot_json_path": "buildprops.json", - "force_clobber": True, - "clobberer_url": "https://api-pub-build.allizom.org/clobberer/lastclobber", - "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-beta.json", - "locales_dir": "mobile/android/locales", - "locales_platform": "android", - "ignore_locales": ["en-US"], - "balrog_credentials_file": "oauth.txt", - "tools_repo": "https://hg.mozilla.org/build/tools", - "is_release": True, - "tooltool_config": { - "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest", - "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR, - }, - "exes": { - 'tooltool.py': '/builds/tooltool.py', - }, - "repos": [{ - "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-beta", - "branch": "default", - "dest": MOZILLA_DIR, - }, { - "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs", - "branch": "default", - "dest": "buildbot-configs" - }, { - "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools", - "branch": "default", - "dest": "tools" - }], - "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s/", - "hg_l10n_tag": "default", - 'vcs_share_base': HG_SHARE_BASE_DIR, - "l10n_dir": MOZILLA_DIR, - - "release_config_file": "buildbot-configs/mozilla/staging_release-fennec-mozilla-beta.py", - "repack_env": { - # so ugly, bug 951238 - "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64", - "MOZ_PKG_VERSION": "%(version)s", - "MOZ_OBJDIR": OBJDIR, - "LOCALE_MERGEDIR": "%(abs_merge_dir)s/", - "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL, - }, - "base_en_us_binary_url": EN_US_BINARY_URL, - "upload_branch": "%s-android-api-9" % BRANCH, - "ssh_key_dir": "~/.ssh", - "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-9/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s", - "merge_locales": True, - "mozilla_dir": MOZILLA_DIR, - "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-9-10-constrained/l10n-release" % MOZILLA_DIR, - "signature_verification_script": "tools/release/signing/verify-android-signature.sh", - - # Mock - "mock_target": "mozilla-centos6-x86_64-android", - "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache', - 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General', - 'gtk2-devel', 'libnotify-devel', 'yasm', - 'alsa-lib-devel', 'libcurl-devel', - 'wireless-tools-devel', 'libX11-devel', - 'libXt-devel', 'mesa-libGL-devel', - 'gnome-vfs2-devel', 'GConf2-devel', 'wget', - 'mpfr', # required for system compiler - 'xorg-x11-font*', # fonts required for PGO - 'imake', # required for makedepend!?! - 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo - 'valgrind', 'dbus-x11', - 'pulseaudio-libs-devel', - 'gstreamer-devel', 'gstreamer-plugins-base-devel', - 'freetype-2.3.11-6.el6_1.8.x86_64', - 'freetype-devel-2.3.11-6.el6_1.8.x86_64', - 'java-1.7.0-openjdk-devel', - 'openssh-clients', - 'zlib-devel-1.2.3-27.el6.i686', - ], - "mock_files": [ - ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"), - ('/home/cltbld/.hgrc', '/builds/.hgrc'), - ('/builds/relengapi.tok', '/builds/relengapi.tok'), - ('/tools/tooltool.py', '/builds/tooltool.py'), - ('/usr/local/lib/hgext', '/usr/local/lib/hgext'), - ], -} diff --git a/testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_9.py b/testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_9.py deleted file mode 100644 index 2885e29e8cc5..000000000000 --- a/testing/mozharness/configs/single_locale/staging_release_mozilla-release_android_api_9.py +++ /dev/null @@ -1,98 +0,0 @@ -BRANCH = "mozilla-release" -MOZ_UPDATE_CHANNEL = "release" -MOZILLA_DIR = BRANCH -OBJDIR = "obj-l10n" -STAGE_SERVER = "dev-stage01.srv.releng.scl3.mozilla.com" -EN_US_BINARY_URL = "http://" + STAGE_SERVER + "/pub/mozilla.org/mobile/candidates/%(version)s-candidates/build%(buildnum)d/android-api-9/en-US" -HG_SHARE_BASE_DIR = "/builds/hg-shared" - -config = { - "log_name": "single_locale", - "objdir": OBJDIR, - "is_automation": True, - "buildbot_json_path": "buildprops.json", - "force_clobber": True, - "clobberer_url": "https://api-pub-build.allizom.org/clobberer/lastclobber", - "locales_file": "buildbot-configs/mozilla/l10n-changesets_mobile-release.json", - "locales_dir": "mobile/android/locales", - "locales_platform": "android", - "ignore_locales": ["en-US"], - "balrog_credentials_file": "oauth.txt", - "tools_repo": "https://hg.mozilla.org/build/tools", - "is_release": True, - "balrog_credentials_file": "oauth.txt", - "tools_repo": "https://hg.mozilla.org/build/tools", - "is_release": True, - "tooltool_config": { - "manifest": "mobile/android/config/tooltool-manifests/android/releng.manifest", - "output_dir": "%(abs_work_dir)s/" + MOZILLA_DIR, - }, - "exes": { - 'tooltool.py': '/builds/tooltool.py', - }, - "repos": [{ - "repo": "https://hg.mozilla.org/%(user_repo_override)s/mozilla-release", - "branch": "default", - "dest": MOZILLA_DIR, - }, { - "repo": "https://hg.mozilla.org/%(user_repo_override)s/buildbot-configs", - "branch": "default", - "dest": "buildbot-configs" - }, { - "repo": "https://hg.mozilla.org/%(user_repo_override)s/tools", - "branch": "default", - "dest": "tools" - }], - "hg_l10n_base": "https://hg.mozilla.org/%(user_repo_override)s/", - "hg_l10n_tag": "default", - 'vcs_share_base': HG_SHARE_BASE_DIR, - "l10n_dir": MOZILLA_DIR, - - "release_config_file": "buildbot-configs/mozilla/staging_release-fennec-mozilla-release.py", - "repack_env": { - # so ugly, bug 951238 - "LD_LIBRARY_PATH": "/lib:/tools/gcc-4.7.2-0moz1/lib:/tools/gcc-4.7.2-0moz1/lib64", - "MOZ_PKG_VERSION": "%(version)s", - "MOZ_OBJDIR": OBJDIR, - "LOCALE_MERGEDIR": "%(abs_merge_dir)s/", - "MOZ_UPDATE_CHANNEL": MOZ_UPDATE_CHANNEL, - }, - "base_en_us_binary_url": EN_US_BINARY_URL, - "upload_branch": "%s-android-api-9" % BRANCH, - "ssh_key_dir": "~/.ssh", - "base_post_upload_cmd": "post_upload.py -p mobile -n %(buildnum)s -v %(version)s --builddir android-api-9/%(locale)s --release-to-mobile-candidates-dir --nightly-dir=candidates %(post_upload_extra)s", - "merge_locales": True, - "mozilla_dir": MOZILLA_DIR, - "mozconfig": "%s/mobile/android/config/mozconfigs/android-api-9-10-constrained/l10n-release" % MOZILLA_DIR, - "signature_verification_script": "tools/release/signing/verify-android-signature.sh", - - # Mock - "mock_target": "mozilla-centos6-x86_64-android", - "mock_packages": ['autoconf213', 'python', 'zip', 'mozilla-python27-mercurial', 'git', 'ccache', - 'glibc-static', 'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General', - 'gtk2-devel', 'libnotify-devel', 'yasm', - 'alsa-lib-devel', 'libcurl-devel', - 'wireless-tools-devel', 'libX11-devel', - 'libXt-devel', 'mesa-libGL-devel', - 'gnome-vfs2-devel', 'GConf2-devel', 'wget', - 'mpfr', # required for system compiler - 'xorg-x11-font*', # fonts required for PGO - 'imake', # required for makedepend!?! - 'gcc45_0moz3', 'gcc454_0moz1', 'gcc472_0moz1', 'gcc473_0moz1', 'yasm', 'ccache', # <-- from releng repo - 'valgrind', 'dbus-x11', - 'pulseaudio-libs-devel', - 'gstreamer-devel', 'gstreamer-plugins-base-devel', - 'freetype-2.3.11-6.el6_1.8.x86_64', - 'freetype-devel-2.3.11-6.el6_1.8.x86_64', - 'java-1.7.0-openjdk-devel', - 'openssh-clients', - 'zlib-devel-1.2.3-27.el6.i686', - ], - "mock_files": [ - ("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"), - ('/home/cltbld/.hgrc', '/builds/.hgrc'), - ('/builds/relengapi.tok', '/builds/relengapi.tok'), - ('/tools/tooltool.py', '/builds/tooltool.py'), - ('/usr/local/lib/hgext', '/usr/local/lib/hgext'), - ], -} diff --git a/testing/mozharness/mozharness/mozilla/building/buildbase.py b/testing/mozharness/mozharness/mozilla/building/buildbase.py index cb8cd13a91a4..35f7924eecf3 100755 --- a/testing/mozharness/mozharness/mozilla/building/buildbase.py +++ b/testing/mozharness/mozharness/mozilla/building/buildbase.py @@ -357,15 +357,10 @@ class BuildOptionParser(object): 'graphene': 'builds/releng_sub_%s_configs/%s_graphene.py', 'horizon': 'builds/releng_sub_%s_configs/%s_horizon.py', 'source': 'builds/releng_sub_%s_configs/%s_source.py', - 'api-9': 'builds/releng_sub_%s_configs/%s_api_9.py', - 'api-11': 'builds/releng_sub_%s_configs/%s_api_11.py', 'api-15-gradle-dependencies': 'builds/releng_sub_%s_configs/%s_api_15_gradle_dependencies.py', 'api-15': 'builds/releng_sub_%s_configs/%s_api_15.py', - 'api-9-debug': 'builds/releng_sub_%s_configs/%s_api_9_debug.py', - 'api-11-debug': 'builds/releng_sub_%s_configs/%s_api_11_debug.py', 'api-15-debug': 'builds/releng_sub_%s_configs/%s_api_15_debug.py', 'x86': 'builds/releng_sub_%s_configs/%s_x86.py', - 'api-11-partner-sample1': 'builds/releng_sub_%s_configs/%s_api_11_partner_sample1.py', 'api-15-partner-sample1': 'builds/releng_sub_%s_configs/%s_api_15_partner_sample1.py', 'android-test': 'builds/releng_sub_%s_configs/%s_test.py', 'android-checkstyle': 'builds/releng_sub_%s_configs/%s_checkstyle.py', From ea27d6dd7955f26b9762b7538e483b60d7609a2c Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Fri, 22 Jul 2016 18:24:50 -0700 Subject: [PATCH 57/63] Bug 1288882 - use_vcs_unique_share defined in mozharness migration configs is ignored, r=Callek a=release DONTBUILD note: this requires a clean work dir unfortunately. so you have to blow away the fake build/hg-share and any repos in build/ MozReview-Commit-ID: 3TfNLdga9Dt --HG-- extra : rebase_source : 25972c5b53eb1bddd490c7aea6a085b713ff7d03 extra : amend_source : 5841fb61e94ab9c4c0f43b344f1a68d589a5c356 --- testing/mozharness/configs/merge_day/aurora_to_beta.py | 1 - testing/mozharness/configs/merge_day/beta_to_release.py | 1 - testing/mozharness/configs/merge_day/bump_esr.py | 1 - testing/mozharness/configs/merge_day/central_to_aurora.py | 1 - testing/mozharness/configs/merge_day/release_to_esr.py | 1 - .../mozharness/configs/merge_day/staging_beta_migration.py | 1 - testing/mozharness/scripts/merge_day/gecko_migration.py | 5 +++++ 7 files changed, 5 insertions(+), 6 deletions(-) diff --git a/testing/mozharness/configs/merge_day/aurora_to_beta.py b/testing/mozharness/configs/merge_day/aurora_to_beta.py index d47b6da7fd24..dc1fc4c83e95 100644 --- a/testing/mozharness/configs/merge_day/aurora_to_beta.py +++ b/testing/mozharness/configs/merge_day/aurora_to_beta.py @@ -60,7 +60,6 @@ config = { "# Enable enforcing that add-ons are signed by the trusted root") ], - "use_vcs_unique_share": True, "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'), # "hg_share_base": None, "tools_repo_url": "https://hg.mozilla.org/build/tools", diff --git a/testing/mozharness/configs/merge_day/beta_to_release.py b/testing/mozharness/configs/merge_day/beta_to_release.py index 0fe65aca1b52..0316272bf6ad 100644 --- a/testing/mozharness/configs/merge_day/beta_to_release.py +++ b/testing/mozharness/configs/merge_day/beta_to_release.py @@ -28,7 +28,6 @@ config = { "MAR_CHANNEL_ID=firefox-mozilla-release"), ], - "use_vcs_unique_share": True, "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'), # "hg_share_base": None, "tools_repo_url": "https://hg.mozilla.org/build/tools", diff --git a/testing/mozharness/configs/merge_day/bump_esr.py b/testing/mozharness/configs/merge_day/bump_esr.py index 137b132699c4..3b25abb9d45a 100644 --- a/testing/mozharness/configs/merge_day/bump_esr.py +++ b/testing/mozharness/configs/merge_day/bump_esr.py @@ -2,7 +2,6 @@ import os ABS_WORK_DIR = os.path.join(os.getcwd(), "build") config = { - "use_vcs_unique_share": True, "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'), "log_name": "bump_esr", "version_files": [ diff --git a/testing/mozharness/configs/merge_day/central_to_aurora.py b/testing/mozharness/configs/merge_day/central_to_aurora.py index c362361f0f6d..36347f667f61 100644 --- a/testing/mozharness/configs/merge_day/central_to_aurora.py +++ b/testing/mozharness/configs/merge_day/central_to_aurora.py @@ -69,7 +69,6 @@ config = { "mobile/android/locales/all-locales" ], - "use_vcs_unique_share": True, "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'), # "hg_share_base": None, "tools_repo_url": "https://hg.mozilla.org/build/tools", diff --git a/testing/mozharness/configs/merge_day/release_to_esr.py b/testing/mozharness/configs/merge_day/release_to_esr.py index e7b24cea31e8..d9f260ad5770 100644 --- a/testing/mozharness/configs/merge_day/release_to_esr.py +++ b/testing/mozharness/configs/merge_day/release_to_esr.py @@ -21,7 +21,6 @@ config = { "MAR_CHANNEL_ID=firefox-mozilla-release", "MAR_CHANNEL_ID=firefox-mozilla-esr"), ], - "use_vcs_unique_share": True, "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'), # "hg_share_base": None, "tools_repo_url": "https://hg.mozilla.org/build/tools", diff --git a/testing/mozharness/configs/merge_day/staging_beta_migration.py b/testing/mozharness/configs/merge_day/staging_beta_migration.py index df9c8b317e61..9b6ac198e3cc 100644 --- a/testing/mozharness/configs/merge_day/staging_beta_migration.py +++ b/testing/mozharness/configs/merge_day/staging_beta_migration.py @@ -9,7 +9,6 @@ ABS_WORK_DIR = os.path.join(os.getcwd(), "build") config = { "log_name": "staging_beta", - "use_vcs_unique_share": True, "vcs_share_base": os.path.join(ABS_WORK_DIR, 'hg-shared'), "tools_repo_url": "https://hg.mozilla.org/build/tools", "tools_repo_branch": "default", diff --git a/testing/mozharness/scripts/merge_day/gecko_migration.py b/testing/mozharness/scripts/merge_day/gecko_migration.py index df200d938cbf..523866d7a158 100755 --- a/testing/mozharness/scripts/merge_day/gecko_migration.py +++ b/testing/mozharness/scripts/merge_day/gecko_migration.py @@ -156,6 +156,11 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin, "branch": self.config.get("%s_repo_branch" % (k,), "default"), "dest": dirs['abs_%s_dir' % k], "vcs": "hg", + # "hg" vcs uses robustcheckout extension requires the use of a share + # but having a share breaks migration logic when merging repos. + # Solution: tell hg vcs to create a unique share directory for each + # gecko repo. see mozharness/base/vcs/mercurial.py for implementation + "use_vcs_unique_share": True, }) else: self.warning("Skipping %s" % repo_key) From cb74fc1327028e13bb1f66817da07ca09e4edcec Mon Sep 17 00:00:00 2001 From: Rail Aliiev Date: Mon, 25 Jul 2016 11:24:53 -0400 Subject: [PATCH 58/63] Bug 1289153 - merge scripts should pass --new-branch for m-r push r=jlund a=release DONTBUILD MozReview-Commit-ID: Ftb6NTMM5Wz --- testing/mozharness/scripts/merge_day/gecko_migration.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/testing/mozharness/scripts/merge_day/gecko_migration.py b/testing/mozharness/scripts/merge_day/gecko_migration.py index 523866d7a158..453fd2ff9c00 100755 --- a/testing/mozharness/scripts/merge_day/gecko_migration.py +++ b/testing/mozharness/scripts/merge_day/gecko_migration.py @@ -182,7 +182,11 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin, return dirs.get('abs_from_dir'), dirs.get('abs_to_dir') def query_push_args(self, cwd): - return ['-r', '.'] + if cwd == self.query_abs_dirs()['abs_to_dir'] and \ + self.config['migration_behavior'] == 'beta_to_release': + return ['--new-branch', '-r', '.'] + else: + return ['-r', '.'] def query_from_revision(self): """ Shortcut to get the revision for the from repo From 9c3e690c9a70caa46c5935cc4524fbf1e308dfad Mon Sep 17 00:00:00 2001 From: Hiroyuki Ikezoe Date: Tue, 26 Jul 2016 06:44:11 +0900 Subject: [PATCH 59/63] Bug 1287725 - Drop KeyframeEffectReadOnly::HasAnimationOfProperties and nsLayoutUtils::HasCurrentAnimationsForProperties. r=birtles MozReview-Commit-ID: K33ga7KpLIo --HG-- extra : rebase_source : 57bc8f2edffb7e89ea790f5cc0fa6848770d1873 --- dom/animation/KeyframeEffect.cpp | 13 ------------- dom/animation/KeyframeEffect.h | 2 -- layout/base/ActiveLayerTracker.cpp | 2 +- layout/base/nsLayoutUtils.cpp | 14 -------------- layout/base/nsLayoutUtils.h | 8 -------- 5 files changed, 1 insertion(+), 38 deletions(-) diff --git a/dom/animation/KeyframeEffect.cpp b/dom/animation/KeyframeEffect.cpp index 5de9ff52a8d1..955e4e30b566 100644 --- a/dom/animation/KeyframeEffect.cpp +++ b/dom/animation/KeyframeEffect.cpp @@ -507,19 +507,6 @@ KeyframeEffectReadOnly::GetAnimationOfProperty(nsCSSProperty aProperty) const return nullptr; } -bool -KeyframeEffectReadOnly::HasAnimationOfProperties( - const nsCSSProperty* aProperties, - size_t aPropertyCount) const -{ - for (size_t i = 0; i < aPropertyCount; i++) { - if (HasAnimationOfProperty(aProperties[i])) { - return true; - } - } - return false; -} - #ifdef DEBUG bool SpecifiedKeyframeArraysAreEqual(const nsTArray& aA, diff --git a/dom/animation/KeyframeEffect.h b/dom/animation/KeyframeEffect.h index 9eaa4d6a3f14..c21641a93f17 100644 --- a/dom/animation/KeyframeEffect.h +++ b/dom/animation/KeyframeEffect.h @@ -294,8 +294,6 @@ public: bool HasAnimationOfProperty(nsCSSProperty aProperty) const { return GetAnimationOfProperty(aProperty) != nullptr; } - bool HasAnimationOfProperties(const nsCSSProperty* aProperties, - size_t aPropertyCount) const; const InfallibleTArray& Properties() const { return mProperties; } diff --git a/layout/base/ActiveLayerTracker.cpp b/layout/base/ActiveLayerTracker.cpp index b8815b6dde0b..57773d985ae4 100644 --- a/layout/base/ActiveLayerTracker.cpp +++ b/layout/base/ActiveLayerTracker.cpp @@ -446,7 +446,7 @@ ActiveLayerTracker::IsStyleAnimated(nsDisplayListBuilder* aBuilder, if (aProperty == eCSSProperty_transform && aFrame->Combines3DTransformWithAncestors()) { return IsStyleAnimated(aBuilder, aFrame->GetParent(), aProperty); } - return nsLayoutUtils::HasCurrentAnimationsForProperties(aFrame, &aProperty, 1); + return nsLayoutUtils::HasCurrentAnimationOfProperty(aFrame, aProperty); } /* static */ bool diff --git a/layout/base/nsLayoutUtils.cpp b/layout/base/nsLayoutUtils.cpp index 0c84de124a75..4c4bb17d78b3 100644 --- a/layout/base/nsLayoutUtils.cpp +++ b/layout/base/nsLayoutUtils.cpp @@ -495,20 +495,6 @@ nsLayoutUtils::HasCurrentTransitions(const nsIFrame* aFrame) ); } -bool -nsLayoutUtils::HasCurrentAnimationsForProperties(const nsIFrame* aFrame, - const nsCSSProperty* aProperties, - size_t aPropertyCount) -{ - return HasMatchingAnimations(aFrame, - [&aProperties, &aPropertyCount](KeyframeEffectReadOnly& aEffect) - { - return aEffect.IsCurrent() && - aEffect.HasAnimationOfProperties(aProperties, aPropertyCount); - } - ); -} - bool nsLayoutUtils::HasRelevantAnimationOfProperty(const nsIFrame* aFrame, nsCSSProperty aProperty) diff --git a/layout/base/nsLayoutUtils.h b/layout/base/nsLayoutUtils.h index ec850361cd55..6420372614ce 100644 --- a/layout/base/nsLayoutUtils.h +++ b/layout/base/nsLayoutUtils.h @@ -2245,14 +2245,6 @@ public: */ static bool HasCurrentTransitions(const nsIFrame* aFrame); - /** - * Returns true if the frame has any current animations or transitions - * for any of the specified properties. - */ - static bool HasCurrentAnimationsForProperties(const nsIFrame* aFrame, - const nsCSSProperty* aProperties, - size_t aPropertyCount); - /** * Returns true if the frame has current or in-effect (i.e. in before phase, * running or filling) animations or transitions for the From 54dc70b4e6a59350e0b7d2bb855b6f4c2f64b078 Mon Sep 17 00:00:00 2001 From: Henrik Skupin Date: Mon, 25 Jul 2016 18:23:26 +0200 Subject: [PATCH 60/63] Bug 1284502 - Marionette harness should log exceptions from run() only once. r=maja_zf Whenever an exception is raised while tests are executed, the log error message should only be printed once. As best this should happen in `cli()`, so that subclasses can better set their own behavior, and we safe us from re-raising the exception. MozReview-Commit-ID: 5NLBnJAjUMQ --HG-- extra : rebase_source : 17e1574c8671037912d85c0575db493c96f972b2 --- .../marionette/harness/marionette/runtests.py | 23 ++++++++----------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/testing/marionette/harness/marionette/runtests.py b/testing/marionette/harness/marionette/runtests.py index 96ca286f80f4..aadda3c54970 100644 --- a/testing/marionette/harness/marionette/runtests.py +++ b/testing/marionette/harness/marionette/runtests.py @@ -62,18 +62,11 @@ class MarionetteHarness(object): self._testcase_class.pydebugger = __import__(self.args['pydebugger']) def run(self): - try: - self.process_args() - tests = self.args.pop('tests') - runner = self._runner_class(**self.args) - runner.run_tests(tests) - return runner.failed + runner.crashed - except Exception: - logger = self.args.get('logger') - if logger: - logger.error('Failure during test execution.', - exc_info=True) - raise + self.process_args() + tests = self.args.pop('tests') + runner = self._runner_class(**self.args) + runner.run_tests(tests) + return runner.failed + runner.crashed def cli(runner_class=MarionetteTestRunner, parser_class=MarionetteArguments, @@ -88,11 +81,13 @@ def cli(runner_class=MarionetteTestRunner, parser_class=MarionetteArguments, """ logger = mozlog.commandline.setup_logging('Marionette test runner', {}) try: - failed = harness_class(runner_class, parser_class, testcase_class, args=args).run() + harness_instance = harness_class(runner_class, parser_class, testcase_class, + args=args) + failed = harness_instance.run() if failed > 0: sys.exit(10) except Exception: - logger.error('Failure during harness setup', exc_info=True) + logger.error('Failure during harness execution', exc_info=True) sys.exit(1) sys.exit(0) From bd05d8b08bbab532b2053594c401097d2f136204 Mon Sep 17 00:00:00 2001 From: Michael Comella Date: Mon, 25 Jul 2016 13:57:45 -0700 Subject: [PATCH 61/63] Bug 1289006 - Return gracefully if listFiles returns null in telemetry store. r=grisha This changeset will correct the crash we're seeing in the bug. The docs support that File.listFiles can return null. MozReview-Commit-ID: FHYGErshhoP --HG-- extra : rebase_source : 15d0c4a3d283924627f1f97a1f99637244c49c08 --- .../telemetry/stores/TelemetryJSONFilePingStore.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java b/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java index 7ad7df8870f5..0bc0a7f79eca 100644 --- a/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java +++ b/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java @@ -129,7 +129,15 @@ public class TelemetryJSONFilePingStore extends TelemetryPingStore { @Override public ArrayList getAllPings() { - final List files = Arrays.asList(storeDir.listFiles(uuidFilenameFilter)); + final File[] fileArray = storeDir.listFiles(uuidFilenameFilter); + if (fileArray == null) { + // Intentionally don't log all info for the store directory to prevent leaking the path. + Log.w(LOGTAG, "listFiles unexpectedly returned null - unable to retrieve pings. Debug: exists? " + + storeDir.exists() + "; directory? " + storeDir.isDirectory()); + return new ArrayList<>(1); + } + + final List files = Arrays.asList(fileArray); Collections.sort(files, fileLastModifiedComparator); // oldest to newest final ArrayList out = new ArrayList<>(files.size()); for (final File file : files) { From 7e2617e5085f44528152c5ae013d73c845da227c Mon Sep 17 00:00:00 2001 From: Michael Comella Date: Mon, 25 Jul 2016 14:24:03 -0700 Subject: [PATCH 62/63] Bug 1289006 - Add code to assert potential crashing case never happens & tests. r=grisha I expect the crashes occurred because one of the following: * the store already existed as a file * the store was a directory that did not have the appropriate permissions In the telemetry code, none of the cases above should happen so I assert that they never do. If the crashes did occur for these reasons, the user will unfortunately continue to crash but at least we'll know where our assumptions are going wrong. I originally intended to write a regression test for listFiles returning null but it requires the application code to be modified in non-trivial ways (e.g. accessor methods we might forget to use) so I decided against it. MozReview-Commit-ID: 9V9H84ehbdO --HG-- extra : rebase_source : 8290e515c9010bef639e92d1b0420bebe5c7d61c --- .../stores/TelemetryJSONFilePingStore.java | 11 ++++++++ .../TestTelemetryJSONFilePingStore.java | 28 +++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java b/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java index 0bc0a7f79eca..7a499dfec997 100644 --- a/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java +++ b/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java @@ -78,9 +78,20 @@ public class TelemetryJSONFilePingStore extends TelemetryPingStore { @WorkerThread // Writes to disk public TelemetryJSONFilePingStore(final File storeDir, final String profileName) { super(profileName); + if (storeDir.exists() && !storeDir.isDirectory()) { + // An alternative is to create a new directory, but we wouldn't + // be able to access it later so it's better to throw. + throw new IllegalStateException("Store dir unexpectedly exists & is not a directory - cannot continue"); + } + this.storeDir = storeDir; this.storeDir.mkdirs(); uuidFilenameFilter = new FilenameRegexFilter(UUIDUtil.UUID_PATTERN); + + if (!this.storeDir.canRead() || !this.storeDir.canWrite() || !this.storeDir.canExecute()) { + throw new IllegalStateException("Cannot read, write, or execute store dir: " + + this.storeDir.canRead() + " " + this.storeDir.canWrite() + " " + this.storeDir.canExecute()); + } } @VisibleForTesting File getPingFile(final String docID) { diff --git a/mobile/android/tests/background/junit4/src/org/mozilla/gecko/telemetry/stores/TestTelemetryJSONFilePingStore.java b/mobile/android/tests/background/junit4/src/org/mozilla/gecko/telemetry/stores/TestTelemetryJSONFilePingStore.java index a25f8ca9b29e..a95a8b292b8d 100644 --- a/mobile/android/tests/background/junit4/src/org/mozilla/gecko/telemetry/stores/TestTelemetryJSONFilePingStore.java +++ b/mobile/android/tests/background/junit4/src/org/mozilla/gecko/telemetry/stores/TestTelemetryJSONFilePingStore.java @@ -19,6 +19,7 @@ import org.mozilla.gecko.util.FileUtils; import java.io.File; import java.io.FileOutputStream; +import java.io.FilenameFilter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -70,6 +71,33 @@ public class TestTelemetryJSONFilePingStore { assertEquals("Temp dir contains one dir (the store dir)", 1, tempDir.getRoot().list().length); } + @Test(expected = IllegalStateException.class) + public void testConstructorStoreAlreadyExistsAsNonDirectory() throws Exception { + final File file = tempDir.newFile(); + new TelemetryJSONFilePingStore(file, "profileName"); // expected to throw. + } + + @Test(expected = IllegalStateException.class) + public void testConstructorDirIsNotReadable() throws Exception { + final File dir = tempDir.newFolder(); + dir.setReadable(false); + new TelemetryJSONFilePingStore(dir, "profileName"); // expected to throw. + } + + @Test(expected = IllegalStateException.class) + public void testConstructorDirIsNotWritable() throws Exception { + final File dir = tempDir.newFolder(); + dir.setWritable(false); + new TelemetryJSONFilePingStore(dir, "profileName"); // expected to throw. + } + + @Test(expected = IllegalStateException.class) + public void testConstructorDirIsNotExecutable() throws Exception { + final File dir = tempDir.newFolder(); + dir.setExecutable(false); + new TelemetryJSONFilePingStore(dir, "profileName"); // expected to throw. + } + @Test public void testStorePingStoresCorrectData() throws Exception { assertStoreFileCount(0); From 574482a5c5f342892b05b410e3e95bc8a3327def Mon Sep 17 00:00:00 2001 From: Kurt Carpenter Date: Tue, 26 Jul 2016 10:27:42 +1000 Subject: [PATCH 63/63] Bug 1251841 - switchIFURIInWindow can ignore fragments AND query strings, add tests. r=markh MozReview-Commit-ID: DVLzg2KHQCd --- browser/base/content/browser.js | 46 +++++++++++-------- ...025195_switchToTabHavingURI_aOpenParams.js | 38 +++++++++++++-- browser/base/content/utilityOverlay.js | 2 +- 3 files changed, 64 insertions(+), 22 deletions(-) diff --git a/browser/base/content/browser.js b/browser/base/content/browser.js index afac5b28d654..633415b92e25 100755 --- a/browser/base/content/browser.js +++ b/browser/base/content/browser.js @@ -7452,7 +7452,7 @@ var gRemoteTabsUI = { * passed via this object. * This object also allows: * - 'ignoreFragment' property to be set to true to exclude fragment-portion - * matching when comparing URIs. + * matching when comparing URIs. Fragment will be replaced. * - 'ignoreQueryString' property to be set to true to exclude query string * matching when comparing URIs. * - 'replaceQueryString' property to be set to true to exclude query string @@ -7488,31 +7488,41 @@ function switchToTabHavingURI(aURI, aOpenNew, aOpenParams={}) { return false; } + //Remove the query string, fragment, both, or neither from a given url. + function cleanURL(url, removeQuery, removeFragment) { + let ret = url; + if (removeFragment) { + ret = ret.split("#")[0]; + if (removeQuery) { + // This removes a query, if present before the fragment. + ret = ret.split("?")[0]; + } + } else if (removeQuery) { + // This is needed in case there is a fragment after the query. + let fragment = ret.split("#")[1]; + ret = ret.split("?")[0].concat( + (fragment != undefined) ? "#".concat(fragment) : ""); + } + return ret; + } + + // Need to handle nsSimpleURIs here too (e.g. about:...), which don't + // work correctly with URL objects - so treat them as strings + let requestedCompare = cleanURL( + aURI.spec, ignoreQueryString || replaceQueryString, ignoreFragment); let browsers = aWindow.gBrowser.browsers; for (let i = 0; i < browsers.length; i++) { let browser = browsers[i]; - if (ignoreFragment ? browser.currentURI.equalsExceptRef(aURI) : - browser.currentURI.equals(aURI)) { - // Focus the matching window & tab + let browserCompare = cleanURL( + browser.currentURI.spec, ignoreQueryString || replaceQueryString, ignoreFragment); + if (requestedCompare == browserCompare) { aWindow.focus(); - if (ignoreFragment) { - let spec = aURI.spec; - browser.loadURI(spec); + if (ignoreFragment || replaceQueryString) { + browser.loadURI(aURI.spec); } aWindow.gBrowser.tabContainer.selectedIndex = i; return true; } - if (ignoreQueryString || replaceQueryString) { - if (browser.currentURI.spec.split("?")[0] == aURI.spec.split("?")[0]) { - // Focus the matching window & tab - aWindow.focus(); - if (replaceQueryString) { - browser.loadURI(aURI.spec); - } - aWindow.gBrowser.tabContainer.selectedIndex = i; - return true; - } - } } return false; } diff --git a/browser/base/content/test/urlbar/browser_bug1025195_switchToTabHavingURI_aOpenParams.js b/browser/base/content/test/urlbar/browser_bug1025195_switchToTabHavingURI_aOpenParams.js index cdd2f2be4e0d..0edecbf36ce4 100644 --- a/browser/base/content/test/urlbar/browser_bug1025195_switchToTabHavingURI_aOpenParams.js +++ b/browser/base/content/test/urlbar/browser_bug1025195_switchToTabHavingURI_aOpenParams.js @@ -2,7 +2,7 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -add_task(function test_ignoreFragment() { +add_task(function *test_ignoreFragment() { let tabRefAboutHome = gBrowser.addTab("about:home#1"); yield promiseTabLoaded(tabRefAboutHome); let tabRefAboutMozilla = gBrowser.addTab("about:mozilla"); @@ -35,7 +35,7 @@ add_task(function test_ignoreFragment() { cleanupTestTabs(); }); -add_task(function test_ignoreQueryString() { +add_task(function* test_ignoreQueryString() { let tabRefAboutHome = gBrowser.addTab("about:home?hello=firefox"); yield promiseTabLoaded(tabRefAboutHome); let tabRefAboutMozilla = gBrowser.addTab("about:mozilla"); @@ -52,7 +52,7 @@ add_task(function test_ignoreQueryString() { cleanupTestTabs(); }); -add_task(function test_replaceQueryString() { +add_task(function* test_replaceQueryString() { let tabRefAboutHome = gBrowser.addTab("about:home?hello=firefox"); yield promiseTabLoaded(tabRefAboutHome); let tabRefAboutMozilla = gBrowser.addTab("about:mozilla"); @@ -72,6 +72,38 @@ add_task(function test_replaceQueryString() { cleanupTestTabs(); }); +add_task(function* test_replaceQueryStringAndFragment() { + let tabRefAboutHome = gBrowser.addTab("about:home?hello=firefox#aaa"); + yield promiseTabLoaded(tabRefAboutHome); + let tabRefAboutMozilla = gBrowser.addTab("about:mozilla?hello=firefoxos#aaa"); + yield promiseTabLoaded(tabRefAboutMozilla); + gBrowser.selectedTab = tabRefAboutMozilla; + + switchTab("about:home", false); + gBrowser.removeCurrentTab(); + switchTab("about:home?hello=firefox#aaa", true); + is(tabRefAboutHome, gBrowser.selectedTab, "Selected tab should be the initial about:home tab"); + switchTab("about:mozilla?hello=firefox#bbb", true, { replaceQueryString: true, ignoreFragment: true }); + is(tabRefAboutMozilla, gBrowser.selectedTab, "Selected tab should be the initial about:mozilla tab"); + switchTab("about:home?hello=firefoxos#bbb", true, { ignoreQueryString: true, ignoreFragment: true }); + is(tabRefAboutHome, gBrowser.selectedTab, "Selected tab should be the initial about:home tab"); + cleanupTestTabs(); +}); + +add_task(function* test_ignoreQueryStringIgnoresFragment() { + let tabRefAboutHome = gBrowser.addTab("about:home?hello=firefox#aaa"); + yield promiseTabLoaded(tabRefAboutHome); + let tabRefAboutMozilla = gBrowser.addTab("about:mozilla?hello=firefoxos#aaa"); + yield promiseTabLoaded(tabRefAboutMozilla); + gBrowser.selectedTab = tabRefAboutMozilla; + + switchTab("about:home?hello=firefox#bbb", false, { ignoreQueryString: true }); + gBrowser.removeCurrentTab(); + switchTab("about:home?hello=firefoxos#aaa", true, { ignoreQueryString: true }); + is(tabRefAboutHome, gBrowser.selectedTab, "Selected tab should be the initial about:home tab"); + cleanupTestTabs(); +}); + // Begin helpers function cleanupTestTabs() { diff --git a/browser/base/content/utilityOverlay.js b/browser/base/content/utilityOverlay.js index e59fa56cb89d..6e2419647868 100644 --- a/browser/base/content/utilityOverlay.js +++ b/browser/base/content/utilityOverlay.js @@ -627,7 +627,7 @@ function openPreferences(paneID, extraArgs) win = Services.ww.openWindow(null, Services.prefs.getCharPref("browser.chromeURL"), "_blank", "chrome,dialog=no,all", windowArguments); } else { - newLoad = !win.switchToTabHavingURI(preferencesURL, true, {ignoreFragment: true}); + newLoad = !win.switchToTabHavingURI(preferencesURL, true, { ignoreFragment: true, replaceQueryString: true }); browser = win.gBrowser.selectedBrowser; }