diff --git a/README.md b/README.md
index b259e40..48a4119 100644
--- a/README.md
+++ b/README.md
@@ -94,7 +94,32 @@ steps:
### With Private Registry (GitHub Packages)
-When using `registry-url`, set `run-install: false` and run install manually with the auth token, otherwise the default auto-install will fail for private packages.
+If your repo has a `.npmrc` that declares the registry, pass `NODE_AUTH_TOKEN`
+via `env` and let the default `vp install` run — no `registry-url` needed.
+When `NODE_AUTH_TOKEN` is set, the action auto-generates a matching
+`_authToken` entry at `$RUNNER_TEMP/.npmrc` for each registry declared in your
+repo `.npmrc` that doesn't already have one, so your repo `.npmrc` can stay
+minimal:
+
+```yaml
+# .npmrc in the repo (auth line not required — action adds it):
+# @myorg:registry=https://npm.pkg.github.com
+
+steps:
+ - uses: actions/checkout@v6
+ - uses: voidzero-dev/setup-vp@v1
+ with:
+ node-version: "lts"
+ env:
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
+```
+
+If you already have the `_authToken` line in your repo `.npmrc` (e.g. for local
+dev symmetry), that's respected as-is and the action won't overwrite it.
+
+Alternatively, pass `registry-url` explicitly to bypass the action's repo-level
+`.npmrc` detection and auth propagation logic (the package manager may still
+read the repo `.npmrc` per its own config resolution):
```yaml
steps:
diff --git a/dist/index.mjs b/dist/index.mjs
index 85b248f..a8375cf 100644
--- a/dist/index.mjs
+++ b/dist/index.mjs
@@ -208,4 +208,4 @@ More info on storage limits: https://docs.github.com/en/billing/managing-billing
`)}}})),gM=F((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.mergeRpcOptions=void 0;let t=fM();function n(e,n){if(!n)return e;let i={};r(e,i),r(n,i);for(let a of Object.keys(n)){let o=n[a];switch(a){case`jsonOptions`:i.jsonOptions=t.mergeJsonOptions(e.jsonOptions,i.jsonOptions);break;case`binaryOptions`:i.binaryOptions=t.mergeBinaryOptions(e.binaryOptions,i.binaryOptions);break;case`meta`:i.meta={},r(e.meta,i.meta),r(n.meta,i.meta);break;case`interceptors`:i.interceptors=e.interceptors?e.interceptors.concat(o):o.concat();break}}return i}e.mergeRpcOptions=n;function r(e,t){if(!e)return;let n=t;for(let[t,r]of Object.entries(e))r instanceof Date?n[t]=new Date(r.getTime()):Array.isArray(r)?n[t]=r.concat():n[t]=r}})),_M=F((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.Deferred=e.DeferredState=void 0;var t;(function(e){e[e.PENDING=0]=`PENDING`,e[e.REJECTED=1]=`REJECTED`,e[e.RESOLVED=2]=`RESOLVED`})(t=e.DeferredState||={}),e.Deferred=class{constructor(e=!0){this._state=t.PENDING,this._promise=new Promise((e,t)=>{this._resolve=e,this._reject=t}),e&&this._promise.catch(e=>{})}get state(){return this._state}get promise(){return this._promise}resolve(e){if(this.state!==t.PENDING)throw Error(`cannot resolve ${t[this.state].toLowerCase()}`);this._resolve(e),this._state=t.RESOLVED}reject(e){if(this.state!==t.PENDING)throw Error(`cannot reject ${t[this.state].toLowerCase()}`);this._reject(e),this._state=t.REJECTED}resolvePending(e){this._state===t.PENDING&&this.resolve(e)}rejectPending(e){this._state===t.PENDING&&this.reject(e)}}})),vM=F((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.RpcOutputStreamController=void 0;let t=_M(),n=fM();e.RpcOutputStreamController=class{constructor(){this._lis={nxt:[],msg:[],err:[],cmp:[]},this._closed=!1,this._itState={q:[]}}onNext(e){return this.addLis(e,this._lis.nxt)}onMessage(e){return this.addLis(e,this._lis.msg)}onError(e){return this.addLis(e,this._lis.err)}onComplete(e){return this.addLis(e,this._lis.cmp)}addLis(e,t){return t.push(e),()=>{let n=t.indexOf(e);n>=0&&t.splice(n,1)}}clearLis(){for(let e of Object.values(this._lis))e.splice(0,e.length)}get closed(){return this._closed!==!1}notifyNext(e,t,r){n.assert(+!!e+ +!!t+ +!!r<=1,`only one emission at a time`),e&&this.notifyMessage(e),t&&this.notifyError(t),r&&this.notifyComplete()}notifyMessage(e){n.assert(!this.closed,`stream is closed`),this.pushIt({value:e,done:!1}),this._lis.msg.forEach(t=>t(e)),this._lis.nxt.forEach(t=>t(e,void 0,!1))}notifyError(e){n.assert(!this.closed,`stream is closed`),this._closed=e,this.pushIt(e),this._lis.err.forEach(t=>t(e)),this._lis.nxt.forEach(t=>t(void 0,e,!1)),this.clearLis()}notifyComplete(){n.assert(!this.closed,`stream is closed`),this._closed=!0,this.pushIt({value:null,done:!0}),this._lis.cmp.forEach(e=>e()),this._lis.nxt.forEach(e=>e(void 0,void 0,!0)),this.clearLis()}[Symbol.asyncIterator](){return this._closed===!0?this.pushIt({value:null,done:!0}):this._closed!==!1&&this.pushIt(this._closed),{next:()=>{let e=this._itState;n.assert(e,`bad state`),n.assert(!e.p,`iterator contract broken`);let r=e.q.shift();return r?`value`in r?Promise.resolve(r):Promise.reject(r):(e.p=new t.Deferred,e.p.promise)}}}pushIt(e){let r=this._itState;if(r.p){let i=r.p;n.assert(i.state==t.DeferredState.PENDING,`iterator contract broken`),`value`in e?i.resolve(e):i.reject(e),delete r.p}else r.q.push(e)}}})),yM=F((e=>{var t=e&&e.__awaiter||function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})};Object.defineProperty(e,`__esModule`,{value:!0}),e.UnaryCall=void 0,e.UnaryCall=class{constructor(e,t,n,r,i,a,o){this.method=e,this.requestHeaders=t,this.request=n,this.headers=r,this.response=i,this.status=a,this.trailers=o}then(e,t){return this.promiseFinished().then(t=>e?Promise.resolve(e(t)):t,e=>t?Promise.resolve(t(e)):Promise.reject(e))}promiseFinished(){return t(this,void 0,void 0,function*(){let[e,t,n,r]=yield Promise.all([this.headers,this.response,this.status,this.trailers]);return{method:this.method,requestHeaders:this.requestHeaders,request:this.request,headers:e,response:t,status:n,trailers:r}})}}})),bM=F((e=>{var t=e&&e.__awaiter||function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})};Object.defineProperty(e,`__esModule`,{value:!0}),e.ServerStreamingCall=void 0,e.ServerStreamingCall=class{constructor(e,t,n,r,i,a,o){this.method=e,this.requestHeaders=t,this.request=n,this.headers=r,this.responses=i,this.status=a,this.trailers=o}then(e,t){return this.promiseFinished().then(t=>e?Promise.resolve(e(t)):t,e=>t?Promise.resolve(t(e)):Promise.reject(e))}promiseFinished(){return t(this,void 0,void 0,function*(){let[e,t,n]=yield Promise.all([this.headers,this.status,this.trailers]);return{method:this.method,requestHeaders:this.requestHeaders,request:this.request,headers:e,status:t,trailers:n}})}}})),xM=F((e=>{var t=e&&e.__awaiter||function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})};Object.defineProperty(e,`__esModule`,{value:!0}),e.ClientStreamingCall=void 0,e.ClientStreamingCall=class{constructor(e,t,n,r,i,a,o){this.method=e,this.requestHeaders=t,this.requests=n,this.headers=r,this.response=i,this.status=a,this.trailers=o}then(e,t){return this.promiseFinished().then(t=>e?Promise.resolve(e(t)):t,e=>t?Promise.resolve(t(e)):Promise.reject(e))}promiseFinished(){return t(this,void 0,void 0,function*(){let[e,t,n,r]=yield Promise.all([this.headers,this.response,this.status,this.trailers]);return{method:this.method,requestHeaders:this.requestHeaders,headers:e,response:t,status:n,trailers:r}})}}})),SM=F((e=>{var t=e&&e.__awaiter||function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})};Object.defineProperty(e,`__esModule`,{value:!0}),e.DuplexStreamingCall=void 0,e.DuplexStreamingCall=class{constructor(e,t,n,r,i,a,o){this.method=e,this.requestHeaders=t,this.requests=n,this.headers=r,this.responses=i,this.status=a,this.trailers=o}then(e,t){return this.promiseFinished().then(t=>e?Promise.resolve(e(t)):t,e=>t?Promise.resolve(t(e)):Promise.reject(e))}promiseFinished(){return t(this,void 0,void 0,function*(){let[e,t,n]=yield Promise.all([this.headers,this.status,this.trailers]);return{method:this.method,requestHeaders:this.requestHeaders,headers:e,status:t,trailers:n}})}}})),CM=F((e=>{var t=e&&e.__awaiter||function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})};Object.defineProperty(e,`__esModule`,{value:!0}),e.TestTransport=void 0;let n=hM(),r=fM(),i=vM(),a=gM(),o=yM(),s=bM(),c=xM(),l=SM();var u=class e{constructor(e){this.suppressUncaughtRejections=!0,this.headerDelay=10,this.responseDelay=50,this.betweenResponseDelay=10,this.afterResponseDelay=10,this.data=e??{}}get sentMessages(){return this.lastInput instanceof f?this.lastInput.sent:typeof this.lastInput==`object`?[this.lastInput.single]:[]}get sendComplete(){return this.lastInput instanceof f?this.lastInput.completed:typeof this.lastInput==`object`}promiseHeaders(){let t=this.data.headers??e.defaultHeaders;return t instanceof n.RpcError?Promise.reject(t):Promise.resolve(t)}promiseSingleResponse(e){if(this.data.response instanceof n.RpcError)return Promise.reject(this.data.response);let t;return Array.isArray(this.data.response)?(r.assert(this.data.response.length>0),t=this.data.response[0]):t=this.data.response===void 0?e.O.create():this.data.response,r.assert(e.O.is(t)),Promise.resolve(t)}streamResponses(e,i,a){return t(this,void 0,void 0,function*(){let t=[];if(this.data.response===void 0)t.push(e.O.create());else if(Array.isArray(this.data.response))for(let n of this.data.response)r.assert(e.O.is(n)),t.push(n);else this.data.response instanceof n.RpcError||(r.assert(e.O.is(this.data.response)),t.push(this.data.response));try{yield d(this.responseDelay,a)(void 0)}catch(e){i.notifyError(e);return}if(this.data.response instanceof n.RpcError){i.notifyError(this.data.response);return}for(let e of t){i.notifyMessage(e);try{yield d(this.betweenResponseDelay,a)(void 0)}catch(e){i.notifyError(e);return}}if(this.data.status instanceof n.RpcError){i.notifyError(this.data.status);return}if(this.data.trailers instanceof n.RpcError){i.notifyError(this.data.trailers);return}i.notifyComplete()})}promiseStatus(){let t=this.data.status??e.defaultStatus;return t instanceof n.RpcError?Promise.reject(t):Promise.resolve(t)}promiseTrailers(){let t=this.data.trailers??e.defaultTrailers;return t instanceof n.RpcError?Promise.reject(t):Promise.resolve(t)}maybeSuppressUncaught(...e){if(this.suppressUncaughtRejections)for(let t of e)t.catch(()=>{})}mergeOptions(e){return a.mergeRpcOptions({},e)}unary(e,t,n){let r=n.meta??{},i=this.promiseHeaders().then(d(this.headerDelay,n.abort)),a=i.catch(e=>{}).then(d(this.responseDelay,n.abort)).then(t=>this.promiseSingleResponse(e)),s=a.catch(e=>{}).then(d(this.afterResponseDelay,n.abort)).then(e=>this.promiseStatus()),c=a.catch(e=>{}).then(d(this.afterResponseDelay,n.abort)).then(e=>this.promiseTrailers());return this.maybeSuppressUncaught(s,c),this.lastInput={single:t},new o.UnaryCall(e,r,t,i,a,s,c)}serverStreaming(e,t,n){let r=n.meta??{},a=this.promiseHeaders().then(d(this.headerDelay,n.abort)),o=new i.RpcOutputStreamController,c=a.then(d(this.responseDelay,n.abort)).catch(()=>{}).then(()=>this.streamResponses(e,o,n.abort)).then(d(this.afterResponseDelay,n.abort)),l=c.then(()=>this.promiseStatus()),u=c.then(()=>this.promiseTrailers());return this.maybeSuppressUncaught(l,u),this.lastInput={single:t},new s.ServerStreamingCall(e,r,t,a,o,l,u)}clientStreaming(e,t){let n=t.meta??{},r=this.promiseHeaders().then(d(this.headerDelay,t.abort)),i=r.catch(e=>{}).then(d(this.responseDelay,t.abort)).then(t=>this.promiseSingleResponse(e)),a=i.catch(e=>{}).then(d(this.afterResponseDelay,t.abort)).then(e=>this.promiseStatus()),o=i.catch(e=>{}).then(d(this.afterResponseDelay,t.abort)).then(e=>this.promiseTrailers());return this.maybeSuppressUncaught(a,o),this.lastInput=new f(this.data,t.abort),new c.ClientStreamingCall(e,n,this.lastInput,r,i,a,o)}duplex(e,t){let n=t.meta??{},r=this.promiseHeaders().then(d(this.headerDelay,t.abort)),a=new i.RpcOutputStreamController,o=r.then(d(this.responseDelay,t.abort)).catch(()=>{}).then(()=>this.streamResponses(e,a,t.abort)).then(d(this.afterResponseDelay,t.abort)),s=o.then(()=>this.promiseStatus()),c=o.then(()=>this.promiseTrailers());return this.maybeSuppressUncaught(s,c),this.lastInput=new f(this.data,t.abort),new l.DuplexStreamingCall(e,n,this.lastInput,r,a,s,c)}};e.TestTransport=u,u.defaultHeaders={responseHeader:`test`},u.defaultStatus={code:`OK`,detail:`all good`},u.defaultTrailers={responseTrailer:`test`};function d(e,t){return r=>new Promise((i,a)=>{if(t?.aborted)a(new n.RpcError(`user cancel`,`CANCELLED`));else{let o=setTimeout(()=>i(r),e);t&&t.addEventListener(`abort`,e=>{clearTimeout(o),a(new n.RpcError(`user cancel`,`CANCELLED`))})}})}var f=class{constructor(e,t){this._completed=!1,this._sent=[],this.data=e,this.abort=t}get sent(){return this._sent}get completed(){return this._completed}send(e){if(this.data.inputMessage instanceof n.RpcError)return Promise.reject(this.data.inputMessage);let t=this.data.inputMessage===void 0?10:this.data.inputMessage;return Promise.resolve(void 0).then(()=>{this._sent.push(e)}).then(d(t,this.abort))}complete(){if(this.data.inputComplete instanceof n.RpcError)return Promise.reject(this.data.inputComplete);let e=this.data.inputComplete===void 0?10:this.data.inputComplete;return Promise.resolve(void 0).then(()=>{this._completed=!0}).then(d(e,this.abort))}}})),wM=F((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.stackDuplexStreamingInterceptors=e.stackClientStreamingInterceptors=e.stackServerStreamingInterceptors=e.stackUnaryInterceptors=e.stackIntercept=void 0;let t=fM();function n(e,n,r,i,a){if(e==`unary`){let e=(e,t,r)=>n.unary(e,t,r);for(let t of(i.interceptors??[]).filter(e=>e.interceptUnary).reverse()){let n=e;e=(e,r,i)=>t.interceptUnary(n,e,r,i)}return e(r,a,i)}if(e==`serverStreaming`){let e=(e,t,r)=>n.serverStreaming(e,t,r);for(let t of(i.interceptors??[]).filter(e=>e.interceptServerStreaming).reverse()){let n=e;e=(e,r,i)=>t.interceptServerStreaming(n,e,r,i)}return e(r,a,i)}if(e==`clientStreaming`){let e=(e,t)=>n.clientStreaming(e,t);for(let t of(i.interceptors??[]).filter(e=>e.interceptClientStreaming).reverse()){let n=e;e=(e,r)=>t.interceptClientStreaming(n,e,r)}return e(r,i)}if(e==`duplex`){let e=(e,t)=>n.duplex(e,t);for(let t of(i.interceptors??[]).filter(e=>e.interceptDuplex).reverse()){let n=e;e=(e,r)=>t.interceptDuplex(n,e,r)}return e(r,i)}t.assertNever(e)}e.stackIntercept=n;function r(e,t,r,i){return n(`unary`,e,t,i,r)}e.stackUnaryInterceptors=r;function i(e,t,r,i){return n(`serverStreaming`,e,t,i,r)}e.stackServerStreamingInterceptors=i;function a(e,t,r){return n(`clientStreaming`,e,t,r)}e.stackClientStreamingInterceptors=a;function o(e,t,r){return n(`duplex`,e,t,r)}e.stackDuplexStreamingInterceptors=o})),TM=F((e=>{Object.defineProperty(e,`__esModule`,{value:!0}),e.ServerCallContextController=void 0,e.ServerCallContextController=class{constructor(e,t,n,r,i={code:`OK`,detail:``}){this._cancelled=!1,this._listeners=[],this.method=e,this.headers=t,this.deadline=n,this.trailers={},this._sendRH=r,this.status=i}notifyCancelled(){if(!this._cancelled){this._cancelled=!0;for(let e of this._listeners)e()}}sendResponseHeaders(e){this._sendRH(e)}get cancelled(){return this._cancelled}onCancel(e){let t=this._listeners;return t.push(e),()=>{let n=t.indexOf(e);n>=0&&t.splice(n,1)}}}})),EM=F((e=>{Object.defineProperty(e,`__esModule`,{value:!0});var t=mM();Object.defineProperty(e,`ServiceType`,{enumerable:!0,get:function(){return t.ServiceType}});var n=pM();Object.defineProperty(e,`readMethodOptions`,{enumerable:!0,get:function(){return n.readMethodOptions}}),Object.defineProperty(e,`readMethodOption`,{enumerable:!0,get:function(){return n.readMethodOption}}),Object.defineProperty(e,`readServiceOption`,{enumerable:!0,get:function(){return n.readServiceOption}});var r=hM();Object.defineProperty(e,`RpcError`,{enumerable:!0,get:function(){return r.RpcError}});var i=gM();Object.defineProperty(e,`mergeRpcOptions`,{enumerable:!0,get:function(){return i.mergeRpcOptions}});var a=vM();Object.defineProperty(e,`RpcOutputStreamController`,{enumerable:!0,get:function(){return a.RpcOutputStreamController}});var o=CM();Object.defineProperty(e,`TestTransport`,{enumerable:!0,get:function(){return o.TestTransport}});var s=_M();Object.defineProperty(e,`Deferred`,{enumerable:!0,get:function(){return s.Deferred}}),Object.defineProperty(e,`DeferredState`,{enumerable:!0,get:function(){return s.DeferredState}});var c=SM();Object.defineProperty(e,`DuplexStreamingCall`,{enumerable:!0,get:function(){return c.DuplexStreamingCall}});var l=xM();Object.defineProperty(e,`ClientStreamingCall`,{enumerable:!0,get:function(){return l.ClientStreamingCall}});var u=bM();Object.defineProperty(e,`ServerStreamingCall`,{enumerable:!0,get:function(){return u.ServerStreamingCall}});var d=yM();Object.defineProperty(e,`UnaryCall`,{enumerable:!0,get:function(){return d.UnaryCall}});var f=wM();Object.defineProperty(e,`stackIntercept`,{enumerable:!0,get:function(){return f.stackIntercept}}),Object.defineProperty(e,`stackDuplexStreamingInterceptors`,{enumerable:!0,get:function(){return f.stackDuplexStreamingInterceptors}}),Object.defineProperty(e,`stackClientStreamingInterceptors`,{enumerable:!0,get:function(){return f.stackClientStreamingInterceptors}}),Object.defineProperty(e,`stackServerStreamingInterceptors`,{enumerable:!0,get:function(){return f.stackServerStreamingInterceptors}}),Object.defineProperty(e,`stackUnaryInterceptors`,{enumerable:!0,get:function(){return f.stackUnaryInterceptors}});var p=TM();Object.defineProperty(e,`ServerCallContextController`,{enumerable:!0,get:function(){return p.ServerCallContextController}})}))(),$=fM();const DM=new class extends $.MessageType{constructor(){super(`github.actions.results.entities.v1.CacheScope`,[{no:1,name:`scope`,kind:`scalar`,T:9},{no:2,name:`permission`,kind:`scalar`,T:3}])}create(e){let t={scope:``,permission:`0`};return globalThis.Object.defineProperty(t,$.MESSAGE_TYPE,{enumerable:!1,value:this}),e!==void 0&&(0,$.reflectionMergePartial)(this,t,e),t}internalBinaryRead(e,t,n,r){let i=r??this.create(),a=e.pos+t;for(;e.posDM}])}create(e){let t={repositoryId:`0`,scope:[]};return globalThis.Object.defineProperty(t,$.MESSAGE_TYPE,{enumerable:!1,value:this}),e!==void 0&&(0,$.reflectionMergePartial)(this,t,e),t}internalBinaryRead(e,t,n,r){let i=r??this.create(),a=e.pos+t;for(;e.posOM},{no:2,name:`key`,kind:`scalar`,T:9},{no:3,name:`version`,kind:`scalar`,T:9}])}create(e){let t={key:``,version:``};return globalThis.Object.defineProperty(t,$.MESSAGE_TYPE,{enumerable:!1,value:this}),e!==void 0&&(0,$.reflectionMergePartial)(this,t,e),t}internalBinaryRead(e,t,n,r){let i=r??this.create(),a=e.pos+t;for(;e.posOM},{no:2,name:`key`,kind:`scalar`,T:9},{no:3,name:`size_bytes`,kind:`scalar`,T:3},{no:4,name:`version`,kind:`scalar`,T:9}])}create(e){let t={key:``,sizeBytes:`0`,version:``};return globalThis.Object.defineProperty(t,$.MESSAGE_TYPE,{enumerable:!1,value:this}),e!==void 0&&(0,$.reflectionMergePartial)(this,t,e),t}internalBinaryRead(e,t,n,r){let i=r??this.create(),a=e.pos+t;for(;e.posOM},{no:2,name:`key`,kind:`scalar`,T:9},{no:3,name:`restore_keys`,kind:`scalar`,repeat:2,T:9},{no:4,name:`version`,kind:`scalar`,T:9}])}create(e){let t={key:``,restoreKeys:[],version:``};return globalThis.Object.defineProperty(t,$.MESSAGE_TYPE,{enumerable:!1,value:this}),e!==void 0&&(0,$.reflectionMergePartial)(this,t,e),t}internalBinaryRead(e,t,n,r){let i=r??this.create(),a=e.pos+t;for(;e.posAM.fromJson(e,{ignoreUnknownFields:!0}))}FinalizeCacheEntryUpload(e){let t=jM.toJson(e,{useProtoFieldName:!0,emitDefaultValues:!1});return this.rpc.request(`github.actions.results.api.v1.CacheService`,`FinalizeCacheEntryUpload`,`application/json`,t).then(e=>MM.fromJson(e,{ignoreUnknownFields:!0}))}GetCacheEntryDownloadURL(e){let t=NM.toJson(e,{useProtoFieldName:!0,emitDefaultValues:!1});return this.rpc.request(`github.actions.results.api.v1.CacheService`,`GetCacheEntryDownloadURL`,`application/json`,t).then(e=>PM.fromJson(e,{ignoreUnknownFields:!0}))}};function IM(e){if(e)try{let t=new URL(e).searchParams.get(`sig`);t&&(kr(t),kr(encodeURIComponent(t)))}catch(t){R(`Failed to parse URL: ${e} ${t instanceof Error?t.message:String(t)}`)}}function LM(e){if(typeof e!=`object`||!e){R(`body is not an object or is null`);return}`signed_upload_url`in e&&typeof e.signed_upload_url==`string`&&IM(e.signed_upload_url),`signed_download_url`in e&&typeof e.signed_download_url==`string`&&IM(e.signed_download_url)}var RM=function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})},zM=class{constructor(e,t,n,r){this.maxAttempts=5,this.baseRetryIntervalMilliseconds=3e3,this.retryMultiplier=1.5;let i=Ap();this.baseUrl=xj(),t&&(this.maxAttempts=t),n&&(this.baseRetryIntervalMilliseconds=n),r&&(this.retryMultiplier=r),this.httpClient=new Vn(e,[new Wn(i)])}request(e,t,n,r){return RM(this,void 0,void 0,function*(){let i=new URL(`/twirp/${e}/${t}`,this.baseUrl).href;R(`[Request] ${t} ${i}`);let a={"Content-Type":n};try{let{body:e}=yield this.retryableRequest(()=>RM(this,void 0,void 0,function*(){return this.httpClient.post(i,JSON.stringify(r),a)}));return e}catch(e){throw Error(`Failed to ${t}: ${e.message}`)}})}retryableRequest(e){return RM(this,void 0,void 0,function*(){let t=0,n=``,r=``;for(;t0&&Lr(`You've hit a rate limit, your rate limit will reset in ${t} seconds`)}throw new XA(`Rate limited: ${n}`)}}catch(e){if(e instanceof SyntaxError&&R(`Raw Body: ${r}`),e instanceof YA||e instanceof XA)throw e;if(JA.isNetworkErrorCode(e?.code))throw new JA(e?.code);i=!0,n=e.message}if(!i)throw Error(`Received non-retryable error: ${n}`);if(t+1===this.maxAttempts)throw Error(`Failed to make request after ${this.maxAttempts} attempts: ${n}`);let a=this.getExponentialRetryTimeMilliseconds(t);Rr(`Attempt ${t+1} of ${this.maxAttempts} failed with error: ${n}. Retrying request in ${a} ms...`),yield this.sleep(a),t++}throw Error(`Request failed`)})}isSuccessStatusCode(e){return e?e>=200&&e<300:!1}isRetryableHttpStatusCode(e){return e?[Nn.BadGateway,Nn.GatewayTimeout,Nn.InternalServerError,Nn.ServiceUnavailable].includes(e):!1}sleep(e){return RM(this,void 0,void 0,function*(){return new Promise(t=>setTimeout(t,e))})}getExponentialRetryTimeMilliseconds(e){if(e<0)throw Error(`attempt should be a positive integer`);if(e===0)return this.baseRetryIntervalMilliseconds;let t=this.baseRetryIntervalMilliseconds*this.retryMultiplier**+e,n=t*this.retryMultiplier;return Math.trunc(Math.random()*(n-t)+t)}};function BM(e){return new FM(new zM(wj(),e?.maxAttempts,e?.retryIntervalMs,e?.retryMultiplier))}var VM=function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})};const HM=process.platform===`win32`;function UM(){return VM(this,void 0,void 0,function*(){switch(process.platform){case`win32`:{let e=yield Dp(),t=hp;if(e)return{path:e,type:dp.GNU};if(s(t))return{path:t,type:dp.BSD};break}case`darwin`:{let e=yield _r(`gtar`,!1);return e?{path:e,type:dp.GNU}:{path:yield _r(`tar`,!0),type:dp.BSD}}default:break}return{path:yield _r(`tar`,!0),type:dp.GNU}})}function WM(e,t,n){return VM(this,arguments,void 0,function*(e,t,n,r=``){let i=[`"${e.path}"`],a=Ep(t),o=`cache.tar`,s=KM(),c=e.type===dp.BSD&&t!==up.Gzip&&HM;switch(n){case`create`:i.push(`--posix`,`-cf`,c?o:a.replace(RegExp(`\\${u.sep}`,`g`),`/`),`--exclude`,c?o:a.replace(RegExp(`\\${u.sep}`,`g`),`/`),`-P`,`-C`,s.replace(RegExp(`\\${u.sep}`,`g`),`/`),`--files-from`,_p);break;case`extract`:i.push(`-xf`,c?o:r.replace(RegExp(`\\${u.sep}`,`g`),`/`),`-P`,`-C`,s.replace(RegExp(`\\${u.sep}`,`g`),`/`));break;case`list`:i.push(`-tf`,c?o:r.replace(RegExp(`\\${u.sep}`,`g`),`/`),`-P`);break}if(e.type===dp.GNU)switch(process.platform){case`win32`:i.push(`--force-local`);break;case`darwin`:i.push(`--delay-directory-restore`);break}return i})}function GM(e,t){return VM(this,arguments,void 0,function*(e,t,n=``){let r,i=yield UM(),a=yield WM(i,e,t,n),o=t===`create`?yield JM(i,e):yield qM(i,e,n),s=i.type===dp.BSD&&e!==up.Gzip&&HM;return r=s&&t!==`create`?[[...o].join(` `),[...a].join(` `)]:[[...a].join(` `),[...o].join(` `)],s?r:[r.join(` `)]})}function KM(){return process.env.GITHUB_WORKSPACE??process.cwd()}function qM(e,t,n){return VM(this,void 0,void 0,function*(){let r=e.type===dp.BSD&&t!==up.Gzip&&HM;switch(t){case up.Zstd:return r?[`zstd -d --long=30 --force -o`,gp,n.replace(RegExp(`\\${u.sep}`,`g`),`/`)]:[`--use-compress-program`,HM?`"zstd -d --long=30"`:`unzstd --long=30`];case up.ZstdWithoutLong:return r?[`zstd -d --force -o`,gp,n.replace(RegExp(`\\${u.sep}`,`g`),`/`)]:[`--use-compress-program`,HM?`"zstd -d"`:`unzstd`];default:return[`-z`]}})}function JM(e,t){return VM(this,void 0,void 0,function*(){let n=Ep(t),r=e.type===dp.BSD&&t!==up.Gzip&&HM;switch(t){case up.Zstd:return r?[`zstd -T0 --long=30 --force -o`,n.replace(RegExp(`\\${u.sep}`,`g`),`/`),gp]:[`--use-compress-program`,HM?`"zstd -T0 --long=30"`:`zstdmt --long=30`];case up.ZstdWithoutLong:return r?[`zstd -T0 --force -o`,n.replace(RegExp(`\\${u.sep}`,`g`),`/`),gp]:[`--use-compress-program`,HM?`"zstd -T0"`:`zstdmt`];default:return[`-z`]}})}function YM(e,t){return VM(this,void 0,void 0,function*(){for(let n of e)try{yield Tr(n,void 0,{cwd:t,env:Object.assign(Object.assign({},process.env),{MSYS:`winsymlinks:nativestrict`})})}catch(e){throw Error(`${n.split(` `)[0]} failed with error: ${e?.message}`)}})}function XM(e,t){return VM(this,void 0,void 0,function*(){yield YM(yield GM(t,`list`,e))})}function ZM(e,t){return VM(this,void 0,void 0,function*(){yield gr(KM()),yield YM(yield GM(t,`extract`,e))})}function QM(e,t,n){return VM(this,void 0,void 0,function*(){l(u.join(e,_p),t.join(`
`)),yield YM(yield GM(n,`create`),e)})}var $M=function(e,t,n,r){function i(e){return e instanceof n?e:new n(function(t){t(e)})}return new(n||=Promise)(function(n,a){function o(e){try{c(r.next(e))}catch(e){a(e)}}function s(e){try{c(r.throw(e))}catch(e){a(e)}}function c(e){e.done?n(e.value):i(e.value).then(o,s)}c((r=r.apply(e,t||[])).next())})},eN=class e extends Error{constructor(t){super(t),this.name=`ValidationError`,Object.setPrototypeOf(this,e.prototype)}},tN=class e extends Error{constructor(t){super(t),this.name=`ReserveCacheError`,Object.setPrototypeOf(this,e.prototype)}},nN=class e extends Error{constructor(t){super(t),this.name=`FinalizeCacheError`,Object.setPrototypeOf(this,e.prototype)}};function rN(e){if(!e||e.length===0)throw new eN(`Path Validation Error: At least one directory or file path is required`)}function iN(e){if(e.length>512)throw new eN(`Key Validation Error: ${e} cannot be larger than 512 characters.`);if(!/^[^,]*$/.test(e))throw new eN(`Key Validation Error: ${e} cannot contain commas.`)}function aN(e,t,n,r){return $M(this,arguments,void 0,function*(e,t,n,r,i=!1){let a=bj();switch(R(`Cache service version: ${a}`),rN(e),a){case`v2`:return yield sN(e,t,n,r,i);default:return yield oN(e,t,n,r,i)}})}function oN(e,t,n,r){return $M(this,arguments,void 0,function*(e,t,n,r,i=!1){n||=[];let a=[t,...n];if(R(`Resolved Keys:`),R(JSON.stringify(a)),a.length>10)throw new eN(`Key Validation Error: Keys are limited to a maximum of 10.`);for(let e of a)iN(e);let o=yield Tp(),s=``;try{let t=yield Aj(a,e,{compressionMethod:o,enableCrossOsArchive:i});if(!t?.archiveLocation)return;if(r?.lookupOnly)return Rr(`Lookup only - skipping download`),t.cacheKey;s=u.join(yield bp(),Ep(o)),R(`Archive Path: ${s}`),yield Mj(t.archiveLocation,s,r),Fr()&&(yield XM(s,o));let n=xp(s);return Rr(`Cache Size: ~${Math.round(n/(1024*1024))} MB (${n} B)`),yield ZM(s,o),Rr(`Cache restored successfully`),t.cacheKey}catch(e){let t=e;if(t.name===eN.name)throw e;t instanceof zn&&typeof t.statusCode==`number`&&t.statusCode>=500?Ir(`Failed to restore: ${e.message}`):Lr(`Failed to restore: ${e.message}`)}finally{try{yield Cp(s)}catch(e){R(`Failed to delete archive: ${e}`)}}})}function sN(e,t,n,r){return $M(this,arguments,void 0,function*(e,t,n,r,i=!1){r=Object.assign(Object.assign({},r),{useAzureSdk:!0}),n||=[];let a=[t,...n];if(R(`Resolved Keys:`),R(JSON.stringify(a)),a.length>10)throw new eN(`Key Validation Error: Keys are limited to a maximum of 10.`);for(let e of a)iN(e);let o=``;try{let s=BM(),c=yield Tp(),l={key:t,restoreKeys:n,version:kp(e,c,i)},d=yield s.GetCacheEntryDownloadURL(l);if(!d.ok){R(`Cache not found for version ${l.version} of keys: ${a.join(`, `)}`);return}if(l.key===d.matchedKey?Rr(`Cache hit for: ${d.matchedKey}`):Rr(`Cache hit for restore-key: ${d.matchedKey}`),r?.lookupOnly)return Rr(`Lookup only - skipping download`),d.matchedKey;o=u.join(yield bp(),Ep(c)),R(`Archive path: ${o}`),R(`Starting download of archive to: ${o}`),yield Mj(d.signedDownloadUrl,o,r);let f=xp(o);return Rr(`Cache Size: ~${Math.round(f/(1024*1024))} MB (${f} B)`),Fr()&&(yield XM(o,c)),yield ZM(o,c),Rr(`Cache restored successfully`),d.matchedKey}catch(e){let t=e;if(t.name===eN.name)throw e;t instanceof zn&&typeof t.statusCode==`number`&&t.statusCode>=500?Ir(`Failed to restore: ${e.message}`):Lr(`Failed to restore: ${e.message}`)}finally{try{o&&(yield Cp(o))}catch(e){R(`Failed to delete archive: ${e}`)}}})}function cN(e,t,n){return $M(this,arguments,void 0,function*(e,t,n,r=!1){let i=bj();switch(R(`Cache service version: ${i}`),rN(e),iN(t),i){case`v2`:return yield uN(e,t,n,r);default:return yield lN(e,t,n,r)}})}function lN(e,t,n){return $M(this,arguments,void 0,function*(e,t,n,r=!1){let i=yield Tp(),a=-1,o=yield Sp(e);if(R(`Cache Paths:`),R(`${JSON.stringify(o)}`),o.length===0)throw Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);let s=yield bp(),c=u.join(s,Ep(i));R(`Archive Path: ${c}`);try{yield QM(s,o,i),Fr()&&(yield XM(c,i));let l=xp(c);if(R(`File Size: ${l}`),l>10737418240&&!yj())throw Error(`Cache size of ~${Math.round(l/(1024*1024))} MB (${l} B) is over the 10GB limit, not saving cache.`);R(`Reserving Cache`);let u=yield Nj(t,e,{compressionMethod:i,enableCrossOsArchive:r,cacheSize:l});if(u?.result?.cacheId)a=u?.result?.cacheId;else if(u?.statusCode===400)throw Error(u?.error?.message??`Cache size of ~${Math.round(l/(1024*1024))} MB (${l} B) is over the data cap limit, not saving cache.`);else throw new tN(`Unable to reserve cache with key ${t}, another job may be creating this cache. More details: ${u?.error?.message}`);R(`Saving Cache (ID: ${a})`),yield Rj(a,c,``,n)}catch(e){let t=e;if(t.name===eN.name)throw e;t.name===tN.name?Rr(`Failed to save: ${t.message}`):t instanceof zn&&typeof t.statusCode==`number`&&t.statusCode>=500?Ir(`Failed to save: ${t.message}`):Lr(`Failed to save: ${t.message}`)}finally{try{yield Cp(c)}catch(e){R(`Failed to delete archive: ${e}`)}}return a})}function uN(e,t,n){return $M(this,arguments,void 0,function*(e,t,n,r=!1){n=Object.assign(Object.assign({},n),{uploadChunkSize:64*1024*1024,uploadConcurrency:8,useAzureSdk:!0});let i=yield Tp(),a=BM(),o=-1,s=yield Sp(e);if(R(`Cache Paths:`),R(`${JSON.stringify(s)}`),s.length===0)throw Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`);let c=yield bp(),l=u.join(c,Ep(i));R(`Archive Path: ${l}`);try{yield QM(c,s,i),Fr()&&(yield XM(l,i));let u=xp(l);R(`File Size: ${u}`),n.archiveSizeBytes=u,R(`Reserving Cache`);let d=kp(e,i,r),f={key:t,version:d},p;try{let e=yield a.CreateCacheEntry(f);if(!e.ok)throw e.message&&Lr(`Cache reservation failed: ${e.message}`),Error(e.message||`Response was not ok`);p=e.signedUploadUrl}catch(e){throw R(`Failed to reserve cache: ${e}`),new tN(`Unable to reserve cache with key ${t}, another job may be creating this cache.`)}R(`Attempting to upload cache located at: ${l}`),yield Rj(o,l,p,n);let m={key:t,version:d,sizeBytes:`${u}`},h=yield a.FinalizeCacheEntryUpload(m);if(R(`FinalizeCacheEntryUploadResponse: ${h.ok}`),!h.ok)throw h.message?new nN(h.message):Error(`Unable to finalize cache with key ${t}, another job may be finalizing this cache.`);o=parseInt(h.entryId)}catch(e){let t=e;if(t.name===eN.name)throw e;t.name===tN.name?Rr(`Failed to save: ${t.message}`):t.name===nN.name?Lr(t.message):t instanceof zn&&typeof t.statusCode==`number`&&t.statusCode>=500?Ir(`Failed to save: ${t.message}`):Lr(`Failed to save: ${t.message}`)}finally{try{yield Cp(l)}catch(e){R(`Failed to delete archive: ${e}`)}}return o})}async function dN(e){let t=Cd(e),n=Ed(e.cacheDependencyPath,t);if(!n){Lr(e.cacheDependencyPath?`No lock file found for cache-dependency-path: ${e.cacheDependencyPath}. Skipping cache restore.`:`No lock file found in project directory: ${t}. Skipping cache restore.`),Nr(`cache-hit`,!1);return}Rr(`Using lock file: ${n.path}`);let r=M(n.path);Rr(`Resolving dependency cache directory in: ${r}`);let i=await Od(n.type,r);if(!i.length){Lr(`No cache directories found for ${n.type} in ${r}. Skipping cache restore.`),Nr(`cache-hit`,!1);return}R(`Cache paths: ${i.join(`, `)}`),Vr(`CACHE_PATHS`,JSON.stringify(i));let a=process.env.RUNNER_OS||fe(),o=ue(),s=await _f(n.path);if(!s)throw Error(`Failed to generate hash for lock file: ${n.path}`);let c=`vite-plus-${a}-${o}-${n.type}-${s}`,l=[`vite-plus-${a}-${o}-${n.type}-`,`vite-plus-${a}-${o}-`];R(`Primary key: ${c}`),R(`Restore keys: ${l.join(`, `)}`),Vr(`CACHE_PRIMARY_KEY`,c);let u=await aN(i,c,l);u?(Rr(`Cache restored from key: ${u}`),Vr(`CACHE_MATCHED_KEY`,u),Nr(`cache-hit`,!0)):(Rr(`Cache not found`),Nr(`cache-hit`,!1))}async function fN(){let e=Hr(`CACHE_PRIMARY_KEY`),t=Hr(`CACHE_MATCHED_KEY`),n=Hr(`CACHE_PATHS`);if(!e){Rr(`No cache key found. Skipping cache save.`);return}if(!n){Rr(`No cache paths found. Skipping cache save.`);return}if(e===t){Rr(`Cache hit on primary key "${e}". Skipping save.`);return}let r=JSON.parse(n);if(!r.length){Rr(`Empty cache paths. Skipping cache save.`);return}try{if(await cN(r,e)===-1){Lr(`Cache save failed or was skipped.`);return}Rr(`Cache saved with key: ${e}`)}catch(e){Lr(`Failed to save cache: ${String(e)}`)}}function pN(e,t){let n=Sd(e,t||xd()),r;try{r=ie(n,`utf-8`)}catch{throw Error(`node-version-file not found: ${n}`)}let i=j(n),a;if(a=i===`.tool-versions`?gN(r):i===`package.json`?vN(r):mN(r),!a)throw Error(`No Node.js version found in ${e}`);return a=a.replace(/^v/i,``),Rr(`Resolved Node.js version '${a}' from ${e}`),a}function mN(e){for(let t of e.split(`
`)){let e=(t.includes(`#`)?t.slice(0,t.indexOf(`#`)):t).trim();if(e)return hN(e)}}function hN(e){let t=e.toLowerCase();return t===`node`||t===`stable`?`latest`:e}function gN(e){for(let t of e.split(`
-`)){let e=t.trim();if(!e||e.startsWith(`#`))continue;let[n,...r]=e.split(/\s+/);if(!(n!==`nodejs`&&n!==`node`)){for(let e of r)if(_N(e))return e}}}function _N(e){return!!e&&e!==`system`&&!e.startsWith(`ref:`)&&!e.startsWith(`path:`)}function vN(e){let t;try{t=JSON.parse(e)}catch{throw Error(`Failed to parse package.json: invalid JSON`)}let n=t.devEngines;if(n?.runtime){let e=yN(n.runtime);if(e)return e}let r=t.engines;if(r?.node&&typeof r.node==`string`)return r.node}function yN(e){let t=Array.isArray(e)?e:[e];for(let e of t)if(e?.name===`node`&&typeof e.version==`string`)return e.version}function bN(e,t){let n;try{n=new URL(e)}catch{throw Error(`Invalid registry-url: "${e}". Must be a valid URL.`)}xN(n.href.endsWith(`/`)?n.href:n.href+`/`,te(process.env.RUNNER_TEMP||process.cwd(),`.npmrc`),t)}function xN(e,t,n){n||new URL(e).hostname===`npm.pkg.github.com`&&(n=process.env.GITHUB_REPOSITORY_OWNER);let r=``;n&&(r=(n.startsWith(`@`)?n:`@`+n).toLowerCase()+`:`),R(`Setting auth in ${t}`);let i=e.replace(/^\w+:/,``).toLowerCase(),a=[];if(P(t)){let e=ie(t,`utf8`);for(let t of e.split(/\r?\n/)){let e=t.toLowerCase();e.startsWith(`${r}registry`)||e.startsWith(i)&&e.includes(`_authtoken`)||a.push(t)}}let o=e.replace(/^\w+:/,``)+":_authToken=${NODE_AUTH_TOKEN}",s=`${r}registry=${e}`;a.push(o,s),se(t,a.join(le)),Or(`NPM_CONFIG_USERCONFIG`,t),Or(`NODE_AUTH_TOKEN`,process.env.NODE_AUTH_TOKEN||`XXXXX-XXXXX-XXXXX-XXXXX`)}async function SN(e){Vr(`IS_POST`,`true`);let t=Cd(e),n=e.nodeVersion;!n&&e.nodeVersionFile&&(n=pN(e.nodeVersionFile,t)),await jd(e),n&&(Rr(`Setting up Node.js ${n} via vp env use...`),await Tr(`vp`,[`env`,`use`,n])),e.registryUrl&&bN(e.registryUrl,e.scope),e.cache&&await dN(e),e.runInstall.length>0&&await Fd(e),await CN(t)}async function CN(e){try{let t=(await Er(`vp`,[`--version`],{cwd:e,silent:!0})).stdout.trim();Rr(t);let n=t.match(/Global:\s*v?([\d.]+[^\s]*)/i)?.[1]||`unknown`;Vr(`INSTALLED_VERSION`,n),Nr(`version`,n)}catch(e){Lr(`Could not get vp version: ${String(e)}`),Nr(`version`,`unknown`)}}async function wN(e){e.cache&&await fN()}async function TN(){let e=vd();Hr(`IS_POST`)===`true`?await wN(e):await SN(e)}TN().catch(e=>{console.error(e),Pr(e instanceof Error?e.message:String(e))});export{};
\ No newline at end of file
+`)){let e=t.trim();if(!e||e.startsWith(`#`))continue;let[n,...r]=e.split(/\s+/);if(!(n!==`nodejs`&&n!==`node`)){for(let e of r)if(_N(e))return e}}}function _N(e){return!!e&&e!==`system`&&!e.startsWith(`ref:`)&&!e.startsWith(`path:`)}function vN(e){let t;try{t=JSON.parse(e)}catch{throw Error(`Failed to parse package.json: invalid JSON`)}let n=t.devEngines;if(n?.runtime){let e=yN(n.runtime);if(e)return e}let r=t.engines;if(r?.node&&typeof r.node==`string`)return r.node}function yN(e){let t=Array.isArray(e)?e:[e];for(let e of t)if(e?.name===`node`&&typeof e.version==`string`)return e.version}function bN(){return te(process.env.RUNNER_TEMP||process.cwd(),`.npmrc`)}function xN(e){return e.replace(/^\w+:/,``)}function SN(e){return(xN(e)+`:_authtoken`).toLowerCase()}function CN(e){return`${xN(e)}:_authToken=\${NODE_AUTH_TOKEN}`}function wN(e){try{return ie(e,`utf8`)}catch(e){if(e.code===`ENOENT`)return;throw e}}function TN(e,t){let n;try{n=new URL(e)}catch{throw Error(`Invalid registry-url: "${e}". Must be a valid URL.`)}EN(n.href.endsWith(`/`)?n.href:n.href+`/`,bN(),t)}function EN(e,t,n){n||new URL(e).hostname===`npm.pkg.github.com`&&(n=process.env.GITHUB_REPOSITORY_OWNER);let r=``;n&&(r=(n.startsWith(`@`)?n:`@`+n).toLowerCase()+`:`),R(`Setting auth in ${t}`);let i=xN(e).toLowerCase(),a=[],o=wN(t);if(o!==void 0)for(let e of o.split(/\r?\n/)){let t=e.toLowerCase();t.startsWith(`${r}registry`)||t.startsWith(i)&&t.includes(`_authtoken`)||a.push(e)}a.push(CN(e),`${r}registry=${e}`),se(t,a.join(le)),Or(`NPM_CONFIG_USERCONFIG`,t),Or(`NODE_AUTH_TOKEN`,process.env.NODE_AUTH_TOKEN||`XXXXX-XXXXX-XXXXX-XXXXX`)}const DN=new Set([`GITHUB_TOKEN`]),ON=new Set([`PATH`,`HOME`,`USERPROFILE`,`TMPDIR`,`CI`]);function kN(e){return ON.has(e)||e.startsWith(`RUNNER_`)?!0:e.startsWith(`GITHUB_`)?!DN.has(e):!1}function AN(e){let t=new Set,n=new Set,r=new Set;for(let i of e.split(/\r?\n/)){let e=i.trim();if(!e||e.startsWith(`;`)||e.startsWith(`#`))continue;let a=e.indexOf(`=`);if(a<0)continue;let o=e.slice(0,a).trim().toLowerCase(),s=e.slice(a+1).trim();(o===`registry`||o.endsWith(`:registry`))&&(s.includes("${")||t.add(s.endsWith(`/`)?s:s+`/`)),o.startsWith(`//`)&&o.endsWith(`:_authtoken`)&&n.add(o);for(let e of s.matchAll(/\$\{(\w+)\}/g))r.add(e[1])}return{registriesNeedingAuth:[...t].filter(e=>!n.has(SN(e))),envVarRefs:r}}function jN(e){let t=bN(),n=new Set(e.map(SN)),r=wN(t),i=[...(r===void 0?[]:r.split(/\r?\n/)).filter(e=>{let t=e.indexOf(`=`);return t<=0?!0:!n.has(e.slice(0,t).trim().toLowerCase())}),...e.map(CN)].join(le);if(Or(`NPM_CONFIG_USERCONFIG`,t),r===i){R(`Supplemental .npmrc at ${t} already current`);return}se(t,i),Rr(`Wrote _authToken entries to ${t} for registries: ${e.join(`, `)}`)}function MN(e){let t=N(e,`.npmrc`),n=wN(t);if(n===void 0)return;let{registriesNeedingAuth:r,envVarRefs:i}=AN(n);process.env.NODE_AUTH_TOKEN&&r.length>0&&(jN(r),i.add(`NODE_AUTH_TOKEN`));let a=[...i].filter(e=>!kN(e)&&!!process.env[e]);if(a.length===0){R(`Project .npmrc at ${t}: no auth env vars to propagate`);return}Rr(`Detected project .npmrc at ${t}. Propagating auth env vars: ${a.join(`, `)}`);for(let e of a)Or(e,process.env[e])}async function NN(e){Vr(`IS_POST`,`true`);let t=Cd(e),n=e.nodeVersion;!n&&e.nodeVersionFile&&(n=pN(e.nodeVersionFile,t)),await jd(e),n&&(Rr(`Setting up Node.js ${n} via vp env use...`),await Tr(`vp`,[`env`,`use`,n])),e.registryUrl?TN(e.registryUrl,e.scope):MN(t),e.cache&&await dN(e),e.runInstall.length>0&&await Fd(e),await PN(t)}async function PN(e){try{let t=(await Er(`vp`,[`--version`],{cwd:e,silent:!0})).stdout.trim();Rr(t);let n=t.match(/Global:\s*v?([\d.]+[^\s]*)/i)?.[1]||`unknown`;Vr(`INSTALLED_VERSION`,n),Nr(`version`,n)}catch(e){Lr(`Could not get vp version: ${String(e)}`),Nr(`version`,`unknown`)}}async function FN(e){e.cache&&await fN()}async function IN(){let e=vd();Hr(`IS_POST`)===`true`?await FN(e):await NN(e)}IN().catch(e=>{console.error(e),Pr(e instanceof Error?e.message:String(e))});export{};
\ No newline at end of file
diff --git a/src/auth.test.ts b/src/auth.test.ts
index 4bd7eee..d494a24 100644
--- a/src/auth.test.ts
+++ b/src/auth.test.ts
@@ -1,11 +1,12 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vite-plus/test";
-import { join } from "node:path";
import { existsSync, readFileSync, writeFileSync } from "node:fs";
-import { configAuthentication } from "./auth.js";
-import { exportVariable } from "@actions/core";
+import { join } from "node:path";
+import { configAuthentication, propagateProjectNpmrcAuth } from "./auth.js";
+import { exportVariable, info } from "@actions/core";
vi.mock("@actions/core", () => ({
debug: vi.fn(),
+ info: vi.fn(),
exportVariable: vi.fn(),
}));
@@ -177,3 +178,262 @@ describe("configAuthentication", () => {
expect(exportVariable).toHaveBeenCalledWith("NODE_AUTH_TOKEN", "my-real-token");
});
});
+
+describe("propagateProjectNpmrcAuth", () => {
+ const runnerTemp = "/tmp/runner";
+ const projectDir = "/workspace/project";
+ const npmrcPath = join(projectDir, ".npmrc");
+ const supplementalPath = join(runnerTemp, ".npmrc");
+
+ function mockNpmrc(content: string, supplemental?: string): void {
+ vi.mocked(readFileSync).mockImplementation((p) => {
+ if (p === npmrcPath) return content;
+ if (p === supplementalPath && supplemental !== undefined) return supplemental;
+ const err = Object.assign(new Error("ENOENT"), { code: "ENOENT" });
+ throw err;
+ });
+ vi.mocked(existsSync).mockImplementation(
+ (p) => p === supplementalPath && supplemental !== undefined,
+ );
+ }
+
+ function mockNoNpmrc(): void {
+ vi.mocked(readFileSync).mockImplementation(() => {
+ const err = Object.assign(new Error("ENOENT"), { code: "ENOENT" });
+ throw err;
+ });
+ vi.mocked(existsSync).mockReturnValue(false);
+ }
+
+ beforeEach(() => {
+ vi.stubEnv("RUNNER_TEMP", runnerTemp);
+ });
+
+ afterEach(() => {
+ vi.unstubAllEnvs();
+ vi.resetAllMocks();
+ });
+
+ it("does nothing when there is no project .npmrc", () => {
+ mockNoNpmrc();
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(exportVariable).not.toHaveBeenCalled();
+ expect(writeFileSync).not.toHaveBeenCalled();
+ });
+
+ it("exports referenced env vars that are set in the environment", () => {
+ mockNpmrc("//npm.pkg.github.com/:_authToken=${NODE_AUTH_TOKEN}");
+ vi.stubEnv("NODE_AUTH_TOKEN", "my-real-token");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(exportVariable).toHaveBeenCalledWith("NODE_AUTH_TOKEN", "my-real-token");
+ expect(info).toHaveBeenCalledWith(expect.stringContaining(".npmrc"));
+ });
+
+ it("skips env vars that are not set", () => {
+ mockNpmrc("//npm.pkg.github.com/:_authToken=${NODE_AUTH_TOKEN}");
+ vi.stubEnv("NODE_AUTH_TOKEN", "");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(exportVariable).not.toHaveBeenCalled();
+ });
+
+ it("does not re-export PATH or HOME even if referenced", () => {
+ mockNpmrc("cache=${HOME}/.npm-cache");
+ vi.stubEnv("HOME", "/home/runner");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(exportVariable).not.toHaveBeenCalledWith("HOME", expect.anything());
+ });
+
+ it("blocks runner-managed GITHUB_* and RUNNER_* vars by default", () => {
+ mockNpmrc(
+ [
+ "tag=${GITHUB_REF}",
+ "agent=${RUNNER_NAME}",
+ "//npm.pkg.github.com/:_authToken=${NODE_AUTH_TOKEN}",
+ ].join("\n"),
+ );
+ vi.stubEnv("GITHUB_REF", "refs/heads/main");
+ vi.stubEnv("RUNNER_NAME", "runner-1");
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(exportVariable).not.toHaveBeenCalledWith("GITHUB_REF", expect.anything());
+ expect(exportVariable).not.toHaveBeenCalledWith("RUNNER_NAME", expect.anything());
+ expect(exportVariable).toHaveBeenCalledWith("NODE_AUTH_TOKEN", "tok");
+ });
+
+ it("allows GITHUB_TOKEN through as an auth token", () => {
+ mockNpmrc("//npm.pkg.github.com/:_authToken=${GITHUB_TOKEN}");
+ vi.stubEnv("GITHUB_TOKEN", "gh-token");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(exportVariable).toHaveBeenCalledWith("GITHUB_TOKEN", "gh-token");
+ });
+
+ it("exports all referenced auth-like env vars, deduping repeats", () => {
+ mockNpmrc(
+ [
+ "//npm.pkg.github.com/:_authToken=${GITHUB_TOKEN}",
+ "//registry.example.com/:_authToken=${NPM_TOKEN}",
+ "//other.example.com/:_authToken=${GITHUB_TOKEN}",
+ ].join("\n"),
+ );
+ vi.stubEnv("GITHUB_TOKEN", "gh-token");
+ vi.stubEnv("NPM_TOKEN", "npm-token");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(exportVariable).toHaveBeenCalledWith("GITHUB_TOKEN", "gh-token");
+ expect(exportVariable).toHaveBeenCalledWith("NPM_TOKEN", "npm-token");
+ const ghCalls = vi.mocked(exportVariable).mock.calls.filter((c) => c[0] === "GITHUB_TOKEN");
+ expect(ghCalls).toHaveLength(1);
+ });
+
+ it("rethrows non-ENOENT read errors", () => {
+ vi.mocked(readFileSync).mockImplementation(() => {
+ throw Object.assign(new Error("EACCES"), { code: "EACCES" });
+ });
+
+ expect(() => propagateProjectNpmrcAuth(projectDir)).toThrow("EACCES");
+ });
+
+ it("auto-writes _authToken for a scoped registry when NODE_AUTH_TOKEN is set", () => {
+ mockNpmrc("@myorg:registry=https://npm.pkg.github.com");
+ vi.stubEnv("NODE_AUTH_TOKEN", "ghp_xxx");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(writeFileSync).toHaveBeenCalledWith(
+ supplementalPath,
+ expect.stringContaining("//npm.pkg.github.com/:_authToken=${NODE_AUTH_TOKEN}"),
+ );
+ expect(exportVariable).toHaveBeenCalledWith("NPM_CONFIG_USERCONFIG", supplementalPath);
+ expect(exportVariable).toHaveBeenCalledWith("NODE_AUTH_TOKEN", "ghp_xxx");
+ });
+
+ it("auto-writes _authToken for the default registry", () => {
+ mockNpmrc("registry=https://registry.example.com");
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(writeFileSync).toHaveBeenCalledWith(
+ supplementalPath,
+ expect.stringContaining("//registry.example.com/:_authToken=${NODE_AUTH_TOKEN}"),
+ );
+ });
+
+ it("does not overwrite existing _authToken entries in the project .npmrc", () => {
+ mockNpmrc(
+ [
+ "@myorg:registry=https://npm.pkg.github.com",
+ "//npm.pkg.github.com/:_authToken=${GITHUB_TOKEN}",
+ ].join("\n"),
+ );
+ vi.stubEnv("NODE_AUTH_TOKEN", "ghp_xxx");
+ vi.stubEnv("GITHUB_TOKEN", "gh-token");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(writeFileSync).not.toHaveBeenCalled();
+ expect(exportVariable).toHaveBeenCalledWith("GITHUB_TOKEN", "gh-token");
+ });
+
+ it("does not write supplemental .npmrc when NODE_AUTH_TOKEN is not set", () => {
+ mockNpmrc("@myorg:registry=https://npm.pkg.github.com");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(writeFileSync).not.toHaveBeenCalled();
+ expect(exportVariable).not.toHaveBeenCalledWith("NPM_CONFIG_USERCONFIG", expect.anything());
+ });
+
+ it("writes _authToken for multiple missing registries", () => {
+ mockNpmrc(
+ ["@a:registry=https://one.example.com", "@b:registry=https://two.example.com"].join("\n"),
+ );
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ const written = vi.mocked(writeFileSync).mock.calls[0]![1] as string;
+ expect(written).toContain("//one.example.com/:_authToken=${NODE_AUTH_TOKEN}");
+ expect(written).toContain("//two.example.com/:_authToken=${NODE_AUTH_TOKEN}");
+ });
+
+ it("preserves unrelated lines already in RUNNER_TEMP/.npmrc", () => {
+ mockNpmrc(
+ "@myorg:registry=https://npm.pkg.github.com",
+ "always-auth=true\n//other.example.com/:_authToken=preserved",
+ );
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ const written = vi.mocked(writeFileSync).mock.calls[0]![1] as string;
+ expect(written).toContain("always-auth=true");
+ expect(written).toContain("//other.example.com/:_authToken=preserved");
+ expect(written).toContain("//npm.pkg.github.com/:_authToken=${NODE_AUTH_TOKEN}");
+ });
+
+ it("replaces stale _authToken for the same registry in RUNNER_TEMP/.npmrc", () => {
+ mockNpmrc(
+ "@myorg:registry=https://npm.pkg.github.com",
+ "//npm.pkg.github.com/:_authToken=old-value",
+ );
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ const written = vi.mocked(writeFileSync).mock.calls[0]![1] as string;
+ expect(written).not.toContain("old-value");
+ expect(written).toContain("//npm.pkg.github.com/:_authToken=${NODE_AUTH_TOKEN}");
+ });
+
+ it("skips the write on re-run when RUNNER_TEMP/.npmrc already matches", () => {
+ mockNpmrc(
+ "@myorg:registry=https://npm.pkg.github.com",
+ `//npm.pkg.github.com/:_authToken=\${NODE_AUTH_TOKEN}`,
+ );
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(writeFileSync).not.toHaveBeenCalled();
+ expect(exportVariable).toHaveBeenCalledWith("NPM_CONFIG_USERCONFIG", supplementalPath);
+ });
+
+ it("skips registries whose value contains ${VAR} (cannot synthesize a valid auth key)", () => {
+ mockNpmrc("@myorg:registry=${CUSTOM_REGISTRY}");
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+ vi.stubEnv("CUSTOM_REGISTRY", "https://npm.example.com");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(writeFileSync).not.toHaveBeenCalled();
+ expect(exportVariable).toHaveBeenCalledWith("CUSTOM_REGISTRY", "https://npm.example.com");
+ });
+
+ it("treats _authToken key case-insensitively when checking project .npmrc", () => {
+ mockNpmrc(
+ [
+ "@myorg:registry=https://npm.pkg.github.com",
+ "//npm.pkg.github.com/:_AUTHTOKEN=${NODE_AUTH_TOKEN}",
+ ].join("\n"),
+ );
+ vi.stubEnv("NODE_AUTH_TOKEN", "tok");
+
+ propagateProjectNpmrcAuth(projectDir);
+
+ expect(writeFileSync).not.toHaveBeenCalled();
+ });
+});
diff --git a/src/auth.ts b/src/auth.ts
index e1b8b62..fce7908 100644
--- a/src/auth.ts
+++ b/src/auth.ts
@@ -1,14 +1,41 @@
-import { existsSync, readFileSync, writeFileSync } from "node:fs";
+import { readFileSync, writeFileSync } from "node:fs";
import { EOL } from "node:os";
-import { resolve } from "node:path";
-import { debug, exportVariable } from "@actions/core";
+import { join, resolve } from "node:path";
+import { debug, exportVariable, info } from "@actions/core";
+
+// Literal written into `.npmrc`; pnpm/npm expand it against the env at install time.
+const NODE_AUTH_TOKEN_REF = "${NODE_AUTH_TOKEN}";
+
+function getRunnerNpmrcPath(): string {
+ return resolve(process.env.RUNNER_TEMP || process.cwd(), ".npmrc");
+}
+
+function stripProtocol(url: string): string {
+ return url.replace(/^\w+:/, "");
+}
+
+function authKeyFor(registryUrl: string): string {
+ return (stripProtocol(registryUrl) + ":_authtoken").toLowerCase();
+}
+
+function buildAuthLine(registryUrl: string): string {
+ return `${stripProtocol(registryUrl)}:_authToken=${NODE_AUTH_TOKEN_REF}`;
+}
+
+function readNpmrc(path: string): string | undefined {
+ try {
+ return readFileSync(path, "utf8");
+ } catch (err) {
+ if ((err as NodeJS.ErrnoException).code === "ENOENT") return undefined;
+ throw err;
+ }
+}
/**
* Configure npm registry authentication by writing a .npmrc file.
* Ported from actions/setup-node's authutil.ts.
*/
export function configAuthentication(registryUrl: string, scope?: string): void {
- // Validate and normalize the registry URL
let url: URL;
try {
url = new URL(registryUrl);
@@ -16,11 +43,8 @@ export function configAuthentication(registryUrl: string, scope?: string): void
throw new Error(`Invalid registry-url: "${registryUrl}". Must be a valid URL.`);
}
- // Ensure trailing slash
const normalizedUrl = url.href.endsWith("/") ? url.href : url.href + "/";
- const npmrc = resolve(process.env.RUNNER_TEMP || process.cwd(), ".npmrc");
-
- writeRegistryToFile(normalizedUrl, npmrc, scope);
+ writeRegistryToFile(normalizedUrl, getRunnerNpmrcPath(), scope);
}
function writeRegistryToFile(registryUrl: string, fileLocation: string, scope?: string): void {
@@ -39,25 +63,20 @@ function writeRegistryToFile(registryUrl: string, fileLocation: string, scope?:
debug(`Setting auth in ${fileLocation}`);
- // Compute the auth line prefix for filtering existing entries
- const authPrefix = registryUrl.replace(/^\w+:/, "").toLowerCase();
+ const authPrefix = stripProtocol(registryUrl).toLowerCase();
const lines: string[] = [];
- if (existsSync(fileLocation)) {
- const curContents = readFileSync(fileLocation, "utf8");
- for (const line of curContents.split(/\r?\n/)) {
+ const existing = readNpmrc(fileLocation);
+ if (existing !== undefined) {
+ for (const line of existing.split(/\r?\n/)) {
const lower = line.toLowerCase();
- // Remove existing registry and auth token lines for this scope/registry
if (lower.startsWith(`${scopePrefix}registry`)) continue;
if (lower.startsWith(authPrefix) && lower.includes("_authtoken")) continue;
lines.push(line);
}
}
- // Auth token line: remove protocol prefix from registry URL
- const authString = registryUrl.replace(/^\w+:/, "") + ":_authToken=${NODE_AUTH_TOKEN}";
- const registryString = `${scopePrefix}registry=${registryUrl}`;
- lines.push(authString, registryString);
+ lines.push(buildAuthLine(registryUrl), `${scopePrefix}registry=${registryUrl}`);
writeFileSync(fileLocation, lines.join(EOL));
@@ -65,3 +84,121 @@ function writeRegistryToFile(registryUrl: string, fileLocation: string, scope?:
// Export placeholder if NODE_AUTH_TOKEN is not set so npm doesn't error
exportVariable("NODE_AUTH_TOKEN", process.env.NODE_AUTH_TOKEN || "XXXXX-XXXXX-XXXXX-XXXXX");
}
+
+// GitHub-Actions-managed namespaces: re-exporting any of these via GITHUB_ENV
+// could clobber runner-provided values for subsequent steps. Block the whole
+// prefix by default; allow only vars that are legitimately passed as auth tokens.
+const RUNTIME_ENV_ALLOWLIST = new Set(["GITHUB_TOKEN"]);
+const ALWAYS_RESERVED = new Set(["PATH", "HOME", "USERPROFILE", "TMPDIR", "CI"]);
+
+function isReservedEnvVar(name: string): boolean {
+ if (ALWAYS_RESERVED.has(name)) return true;
+ if (name.startsWith("RUNNER_")) return true;
+ if (name.startsWith("GITHUB_")) return !RUNTIME_ENV_ALLOWLIST.has(name);
+ return false;
+}
+
+function analyzeProjectNpmrc(content: string): {
+ registriesNeedingAuth: string[];
+ envVarRefs: Set;
+} {
+ const registries = new Set();
+ const authKeys = new Set();
+ const envVarRefs = new Set();
+
+ for (const rawLine of content.split(/\r?\n/)) {
+ const line = rawLine.trim();
+ if (!line || line.startsWith(";") || line.startsWith("#")) continue;
+
+ const eq = line.indexOf("=");
+ if (eq < 0) continue;
+ const lowerKey = line.slice(0, eq).trim().toLowerCase();
+ const value = line.slice(eq + 1).trim();
+
+ if (lowerKey === "registry" || lowerKey.endsWith(":registry")) {
+ // Skip values that rely on env-var expansion — the key for the matching
+ // `_authToken` line must be a literal URL, and `${VAR}` isn't expanded
+ // inside `.npmrc` keys by npm/pnpm.
+ if (!value.includes("${")) {
+ registries.add(value.endsWith("/") ? value : value + "/");
+ }
+ }
+ if (lowerKey.startsWith("//") && lowerKey.endsWith(":_authtoken")) {
+ authKeys.add(lowerKey);
+ }
+ for (const m of value.matchAll(/\$\{(\w+)\}/g)) {
+ envVarRefs.add(m[1]!);
+ }
+ }
+
+ return {
+ registriesNeedingAuth: [...registries].filter((url) => !authKeys.has(authKeyFor(url))),
+ envVarRefs,
+ };
+}
+
+function writeSupplementalAuth(registries: string[]): void {
+ const npmrcPath = getRunnerNpmrcPath();
+ const authKeysToReplace = new Set(registries.map(authKeyFor));
+
+ const existing = readNpmrc(npmrcPath);
+ const existingLines = existing === undefined ? [] : existing.split(/\r?\n/);
+
+ const keepLines = existingLines.filter((line) => {
+ const eq = line.indexOf("=");
+ if (eq <= 0) return true;
+ return !authKeysToReplace.has(line.slice(0, eq).trim().toLowerCase());
+ });
+
+ const nextContent = [...keepLines, ...registries.map(buildAuthLine)].join(EOL);
+ exportVariable("NPM_CONFIG_USERCONFIG", npmrcPath);
+
+ if (existing === nextContent) {
+ debug(`Supplemental .npmrc at ${npmrcPath} already current`);
+ return;
+ }
+
+ writeFileSync(npmrcPath, nextContent);
+ info(`Wrote _authToken entries to ${npmrcPath} for registries: ${registries.join(", ")}`);
+}
+
+/**
+ * Handle auth for the project's existing `.npmrc` without requiring
+ * `registry-url` in the workflow.
+ *
+ * - If `.npmrc` declares a custom registry but no matching `_authToken` entry
+ * and `NODE_AUTH_TOKEN` is set, write a supplemental `_authToken=${NODE_AUTH_TOKEN}`
+ * line to `$RUNNER_TEMP/.npmrc` and point `NPM_CONFIG_USERCONFIG` at it, so the
+ * repo `.npmrc` can stay to just `@scope:registry=`.
+ * - For any `${VAR}` references already in the project `.npmrc`, re-export those
+ * env vars via `GITHUB_ENV` so they remain visible to package-manager
+ * subprocesses and subsequent steps.
+ */
+export function propagateProjectNpmrcAuth(projectDir: string): void {
+ const npmrcPath = join(projectDir, ".npmrc");
+ const content = readNpmrc(npmrcPath);
+ if (content === undefined) return;
+
+ const { registriesNeedingAuth, envVarRefs } = analyzeProjectNpmrc(content);
+
+ if (process.env.NODE_AUTH_TOKEN && registriesNeedingAuth.length > 0) {
+ writeSupplementalAuth(registriesNeedingAuth);
+ envVarRefs.add("NODE_AUTH_TOKEN");
+ }
+
+ const propagatable = [...envVarRefs].filter(
+ (name) => !isReservedEnvVar(name) && !!process.env[name],
+ );
+
+ if (propagatable.length === 0) {
+ debug(`Project .npmrc at ${npmrcPath}: no auth env vars to propagate`);
+ return;
+ }
+
+ info(
+ `Detected project .npmrc at ${npmrcPath}. Propagating auth env vars: ${propagatable.join(", ")}`,
+ );
+ for (const name of propagatable) {
+ exportVariable(name, process.env[name]!);
+ }
+}
diff --git a/src/index.ts b/src/index.ts
index 4ac4148..a14e55d 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -8,7 +8,7 @@ import { saveCache } from "./cache-save.js";
import { State, Outputs } from "./types.js";
import type { Inputs } from "./types.js";
import { resolveNodeVersionFile } from "./node-version-file.js";
-import { configAuthentication } from "./auth.js";
+import { configAuthentication, propagateProjectNpmrcAuth } from "./auth.js";
import { getConfiguredProjectDir } from "./utils.js";
async function runMain(inputs: Inputs): Promise {
@@ -31,9 +31,11 @@ async function runMain(inputs: Inputs): Promise {
await exec("vp", ["env", "use", nodeVersion]);
}
- // Step 4: Configure registry authentication if specified
+ // Step 4: Configure registry authentication
if (inputs.registryUrl) {
configAuthentication(inputs.registryUrl, inputs.scope);
+ } else {
+ propagateProjectNpmrcAuth(projectDir);
}
// Step 5: Restore cache if enabled