9 Commits

Author SHA1 Message Date
jiriks74
5e336bf8e3 fix(ci): Docker is missing in small/medium act runners
Some checks failed
Auto-merge Dependabot / automerge (pull_request) Has been skipped
CI / build (pull_request) Failing after 2m14s
CI / test (built) (pull_request) Has been skipped
CI / test (committed) (pull_request) Has been skipped
CI / commentTestSuiteHelp (pull_request) Has been skipped
CI / package (pull_request) Has been skipped
2025-03-25 16:41:55 -04:00
aa035fd851 adjust ERROR_PR_ALREADY_EXISTS based on updated failure message
Some checks failed
CI / build (push) Failing after 4m14s
CI / test (built) (push) Has been skipped
CI / test (committed) (push) Has been skipped
CI / commentTestSuiteHelp (push) Has been skipped
CI / package (push) Has been skipped
Rebase Upstream / sync (push) Successful in 32s
Uses below error message as a basis:

Create or update the pull request
Attempting creation of pull request
::error::pull request already exists for these targets [id: 30, issue_id: 27, head_repo_id: 1, base_repo_id: 1, head_branch: update-flake-lock, base_branch: main]
2025-03-04 09:42:42 -05:00
6b3a86bf8b CI
Some checks failed
CI / build (push) Failing after 45s
CI / test (built) (push) Has been skipped
CI / test (committed) (push) Has been skipped
CI / commentTestSuiteHelp (push) Has been skipped
CI / package (push) Has been skipped
Rebase Upstream / sync (push) Failing after 8s
Signed-off-by: ahuston-0 <aliceghuston@gmail.com>
2025-02-01 16:27:43 -05:00
afa1190aa1 format determineApiUrl code
Some checks failed
CI / build (push) Failing after 40s
CI / test (built) (push) Has been skipped
CI / test (committed) (push) Has been skipped
CI / commentTestSuiteHelp (push) Has been skipped
CI / package (push) Has been skipped
Signed-off-by: ahuston-0 <aliceghuston@gmail.com>
2025-02-01 16:01:59 -05:00
7741a3efa3 move to working folder
Some checks failed
CI / test (built) (push) Has been skipped
CI / build (push) Failing after 1m5s
CI / test (committed) (push) Has been skipped
CI / commentTestSuiteHelp (push) Has been skipped
CI / package (push) Has been skipped
Signed-off-by: ahuston-0 <aliceghuston@gmail.com>
2025-02-01 15:55:58 -05:00
3adf2066c5 add upstream sync
Signed-off-by: ahuston-0 <aliceghuston@gmail.com>
2025-02-01 15:55:58 -05:00
jiriks74
686c3a0061 chore: Build 2025-02-01 15:55:58 -05:00
jiriks74
6218c872ac feat: Handle API URLs for Forgejo, Gitea, GitLab and GitHub 2025-02-01 15:55:58 -05:00
jiriks74
1f2b960401 fix: Use the v1 api as v3 is not available on Forgejo. 2025-02-01 15:55:58 -05:00
17 changed files with 827 additions and 2192 deletions

View File

@@ -3,27 +3,18 @@ updates:
- package-ecosystem: "github-actions" - package-ecosystem: "github-actions"
directory: "/" directory: "/"
schedule: schedule:
interval: "monthly" interval: "weekly"
day: "tuesday" day: "tuesday"
labels: labels:
- "dependencies" - "dependencies"
groups:
github-actions:
applies-to: version-updates
patterns:
- "*"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/" directory: "/"
schedule: schedule:
interval: "monthly" interval: "weekly"
day: "tuesday" day: "tuesday"
ignore: ignore:
- dependency-name: "*" - dependency-name: "*"
update-types: ["version-update:semver-major"] update-types: ["version-update:semver-major"]
labels: labels:
- "dependencies" - "dependencies"
groups:
npm:
applies-to: version-updates
patterns:
- "*"

View File

@@ -19,21 +19,23 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/setup-node@v6 - uses: actions/setup-node@v4
with: with:
node-version: 20.x node-version: 20.x
cache: npm cache: npm
- name: Install Docker
run: apt update && apt install docker.io -y
- run: npm ci - run: npm ci
- run: npm run build - run: npm run build
- run: npm run format-check - run: npm run format-check
- run: npm run lint - run: npm run lint
- run: npm run test - run: npm run test
- uses: actions/upload-artifact@v5 - uses: actions/upload-artifact@v4
with: with:
name: dist name: dist
path: dist path: dist
- uses: actions/upload-artifact@v5 - uses: actions/upload-artifact@v4
with: with:
name: action.yml name: action.yml
path: action.yml path: action.yml
@@ -46,16 +48,16 @@ jobs:
matrix: matrix:
target: [built, committed] target: [built, committed]
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
with: with:
ref: main ref: main
- if: matrix.target == 'built' || github.event_name == 'pull_request' - if: matrix.target == 'built' || github.event_name == 'pull_request'
uses: actions/download-artifact@v6 uses: actions/download-artifact@v4
with: with:
name: dist name: dist
path: dist path: dist
- if: matrix.target == 'built' || github.event_name == 'pull_request' - if: matrix.target == 'built' || github.event_name == 'pull_request'
uses: actions/download-artifact@v6 uses: actions/download-artifact@v4
with: with:
name: action.yml name: action.yml
path: . path: .
@@ -92,7 +94,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Find Comment - name: Find Comment
uses: peter-evans/find-comment@v4 uses: peter-evans/find-comment@v3
id: fc id: fc
with: with:
issue-number: ${{ github.event.number }} issue-number: ${{ github.event.number }}
@@ -101,7 +103,7 @@ jobs:
- if: steps.fc.outputs.comment-id == '' - if: steps.fc.outputs.comment-id == ''
name: Create comment name: Create comment
uses: peter-evans/create-or-update-comment@v5 uses: peter-evans/create-or-update-comment@v4
with: with:
issue-number: ${{ github.event.number }} issue-number: ${{ github.event.number }}
body: | body: |
@@ -118,8 +120,8 @@ jobs:
needs: [test] needs: [test]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- uses: actions/download-artifact@v6 - uses: actions/download-artifact@v4
with: with:
name: dist name: dist
path: dist path: dist

View File

@@ -6,7 +6,7 @@ jobs:
createPullRequest: createPullRequest:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
- name: Make changes to pull request - name: Make changes to pull request
run: date +%s > report.txt run: date +%s > report.txt
@@ -29,8 +29,8 @@ jobs:
labels: | labels: |
report report
automated pr automated pr
assignees: retepsnave assignees: peter-evans
reviewers: retepsnave reviewers: peter-evans
milestone: 1 milestone: 1
draft: false draft: false
branch: example-patches branch: example-patches
@@ -42,7 +42,7 @@ jobs:
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
- name: Add reaction - name: Add reaction
uses: peter-evans/create-or-update-comment@v5 uses: peter-evans/create-or-update-comment@v4
with: with:
repository: ${{ github.event.client_payload.github.payload.repository.full_name }} repository: ${{ github.event.client_payload.github.payload.repository.full_name }}
comment-id: ${{ github.event.client_payload.github.payload.comment.id }} comment-id: ${{ github.event.client_payload.github.payload.comment.id }}

View File

@@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Slash Command Dispatch - name: Slash Command Dispatch
uses: peter-evans/slash-command-dispatch@v5 uses: peter-evans/slash-command-dispatch@v4
with: with:
token: ${{ secrets.ACTIONS_BOT_TOKEN }} token: ${{ secrets.ACTIONS_BOT_TOKEN }}
config: > config: >
@@ -18,6 +18,12 @@ jobs:
"repository": "peter-evans/create-pull-request-tests", "repository": "peter-evans/create-pull-request-tests",
"named_args": true "named_args": true
}, },
{
"command": "testv5",
"permission": "admin",
"repository": "peter-evans/create-pull-request-tests",
"named_args": true
},
{ {
"command": "clean", "command": "clean",
"permission": "admin", "permission": "admin",

View File

@@ -18,7 +18,7 @@ jobs:
tag: tag:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v6 - uses: actions/checkout@v4
with: with:
token: ${{ secrets.ACTIONS_BOT_TOKEN }} token: ${{ secrets.ACTIONS_BOT_TOKEN }}
fetch-depth: 0 fetch-depth: 0

View File

@@ -53,7 +53,7 @@ All inputs are **optional**. If not set, sensible defaults will be used.
| `token` | The token that the action will use to create and update the pull request. See [token](#token). | `GITHUB_TOKEN` | | `token` | The token that the action will use to create and update the pull request. See [token](#token). | `GITHUB_TOKEN` |
| `branch-token` | The token that the action will use to create and update the branch. See [branch-token](#branch-token). | Defaults to the value of `token` | | `branch-token` | The token that the action will use to create and update the branch. See [branch-token](#branch-token). | Defaults to the value of `token` |
| `path` | Relative path under `GITHUB_WORKSPACE` to the repository. | `GITHUB_WORKSPACE` | | `path` | Relative path under `GITHUB_WORKSPACE` to the repository. | `GITHUB_WORKSPACE` |
| `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. See [Add specific paths](#add-specific-paths). | If no paths are specified, all new and modified files are added. | | `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. If no paths are specified, all new and modified files are added. See [Add specific paths](#add-specific-paths). | |
| `commit-message` | The message to use when committing changes. See [commit-message](#commit-message). | `[create-pull-request] automated change` | | `commit-message` | The message to use when committing changes. See [commit-message](#commit-message). | `[create-pull-request] automated change` |
| `committer` | The committer name and email address in the format `Display Name <email@address.com>`. Defaults to the GitHub Actions bot user on github.com. | `github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>` | | `committer` | The committer name and email address in the format `Display Name <email@address.com>`. Defaults to the GitHub Actions bot user on github.com. | `github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>` |
| `author` | The author name and email address in the format `Display Name <email@address.com>`. Defaults to the user who triggered the workflow run. | `${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>` | | `author` | The author name and email address in the format `Display Name <email@address.com>`. Defaults to the user who triggered the workflow run. | `${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>` |

4
dist/790.index.js vendored
View File

@@ -6,8 +6,8 @@ exports.modules = {
/***/ 790: /***/ 790:
/***/ ((__unused_webpack_module, exports, __webpack_require__) => { /***/ ((__unused_webpack_module, exports, __webpack_require__) => {
var y=Object.defineProperty;var c=(R,o)=>y(R,"name",{value:o,configurable:!0});const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(3465),__webpack_require__(3136),__webpack_require__(7030),__webpack_require__(3024),__webpack_require__(6760);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(R=>R|32,"lower"),noop=c(()=>{},"noop"),g=class g{constructor(o){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},o=`\r var N=Object.defineProperty;var c=(_,a)=>N(_,"name",{value:a,configurable:!0});__webpack_require__(3024),__webpack_require__(6760);const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(3465),__webpack_require__(3136),__webpack_require__(7030);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(_=>_|32,"lower"),noop=c(()=>{},"noop");class MultipartParser{static{c(this,"MultipartParser")}constructor(a){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},a=`\r
--`+o;const t=new Uint8Array(o.length);for(let n=0;n<o.length;n++)t[n]=o.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(o){let t=0;const n=o.length;let E=this.index,{lookbehind:l,boundary:h,boundaryChars:H,index:e,state:a,flags:d}=this;const b=this.boundary.length,m=b-1,O=o.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,_,N)=>{(p===void 0||p!==_)&&this[D](N&&N.subarray(p,_))},"callback"),L=c((D,p)=>{const _=D+"Mark";_ in this&&(p?(T(D,this[_],t,o),delete this[_]):(T(D,this[_],o.length,o),this[_]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=o[t],a){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)d|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(d&F.LAST_BOUNDARY&&r===HYPHEN)a=S.END,d=0;else if(!(d&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),a=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:a=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),a=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),a=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),a=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),a=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;a=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),a=S.PART_DATA_START;break;case S.PART_DATA_START:a=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(o[t]in H);)t+=b;t-=m,r=o[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?d|=F.PART_BOUNDARY:r===HYPHEN?d|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(d&F.PART_BOUNDARY){if(e=0,r===LF){d&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),a=S.HEADER_FIELD_START;break}}else d&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),a=S.END,d=0):e=0;if(e>0)l[e-1]=r;else if(E>0){const D=new Uint8Array(l.buffer,l.byteOffset,l.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${a}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=a,this.flags=d}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}};c(g,"MultipartParser");let MultipartParser=g;function _fileName(R){const o=R.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!o)return;const t=o[2]||o[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,l)=>String.fromCharCode(l)),n}c(_fileName,"_fileName");async function toFormData(R,o){if(!/multipart/i.test(o))throw new TypeError("Failed to fetch");const t=o.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,l,h,H,e,a;const d=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{d.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(d,a,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",l="",h="",H="",e="",a=null,d.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){l+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(l+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=l.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),a=_fileName(l),a&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=l);l="",E=""};for await(const i of R)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData; --`+a;const t=new Uint8Array(a.length);for(let n=0;n<a.length;n++)t[n]=a.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(a){let t=0;const n=a.length;let E=this.index,{lookbehind:d,boundary:h,boundaryChars:H,index:e,state:o,flags:l}=this;const b=this.boundary.length,m=b-1,O=a.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,R,g)=>{(p===void 0||p!==R)&&this[D](g&&g.subarray(p,R))},"callback"),L=c((D,p)=>{const R=D+"Mark";R in this&&(p?(T(D,this[R],t,a),delete this[R]):(T(D,this[R],a.length,a),this[R]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=a[t],o){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)l|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(l&F.LAST_BOUNDARY&&r===HYPHEN)o=S.END,l=0;else if(!(l&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),o=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:o=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),o=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),o=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),o=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),o=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;o=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),o=S.PART_DATA_START;break;case S.PART_DATA_START:o=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(a[t]in H);)t+=b;t-=m,r=a[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?l|=F.PART_BOUNDARY:r===HYPHEN?l|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(l&F.PART_BOUNDARY){if(e=0,r===LF){l&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),o=S.HEADER_FIELD_START;break}}else l&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),o=S.END,l=0):e=0;if(e>0)d[e-1]=r;else if(E>0){const D=new Uint8Array(d.buffer,d.byteOffset,d.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${o}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=o,this.flags=l}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}}function _fileName(_){const a=_.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!a)return;const t=a[2]||a[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,d)=>String.fromCharCode(d)),n}c(_fileName,"_fileName");async function toFormData(_,a){if(!/multipart/i.test(a))throw new TypeError("Failed to fetch");const t=a.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,d,h,H,e,o;const l=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{l.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(l,o,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",d="",h="",H="",e="",o=null,l.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){d+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(d+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=d.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),o=_fileName(d),o&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=d);d="",E=""};for await(const i of _)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData;
/***/ }) /***/ })

703
dist/index.js vendored

File diff suppressed because one or more lines are too long

View File

@@ -25,7 +25,7 @@ This document covers terminology, how the action works, general usage guidelines
## Terminology ## Terminology
[Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected. [Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected.
A pull request references two branches: A pull request references two branches:
@@ -150,7 +150,7 @@ There are a number of workarounds with different pros and cons.
- Use the default `GITHUB_TOKEN` and allow the action to create pull requests that have no checks enabled. Manually close pull requests and immediately reopen them. This will enable `on: pull_request` workflows to run and be added as checks. To prevent merging of pull requests without checks erroneously, use [branch protection rules](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests). - Use the default `GITHUB_TOKEN` and allow the action to create pull requests that have no checks enabled. Manually close pull requests and immediately reopen them. This will enable `on: pull_request` workflows to run and be added as checks. To prevent merging of pull requests without checks erroneously, use [branch protection rules](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests).
- Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `ready_for_review` in `on: pull_request`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft. - Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `on: ready_for_review`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft.
- Use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) created on an account that has write access to the repository that pull requests are being created in. This is the standard workaround and [recommended by GitHub](https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow). It's advisable to use a dedicated [machine account](https://docs.github.com/en/github/site-policy/github-terms-of-service#3-account-requirements) that has collaborator access to the repository, rather than creating a PAT on a personal user account. Also note that because the account that owns the PAT will be the creator of pull requests, that user account will be unable to perform actions such as request changes or approve the pull request. - Use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) created on an account that has write access to the repository that pull requests are being created in. This is the standard workaround and [recommended by GitHub](https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow). It's advisable to use a dedicated [machine account](https://docs.github.com/en/github/site-policy/github-terms-of-service#3-account-requirements) that has collaborator access to the repository, rather than creating a PAT on a personal user account. Also note that because the account that owns the PAT will be the creator of pull requests, that user account will be unable to perform actions such as request changes or approve the pull request.
@@ -197,9 +197,8 @@ Checking out a branch from a different repository from where the workflow is exe
Allowing the action to push with a configured deploy key will trigger `on: push` workflows. This makes it an alternative to using a PAT to trigger checks for pull requests. Allowing the action to push with a configured deploy key will trigger `on: push` workflows. This makes it an alternative to using a PAT to trigger checks for pull requests.
> [!NOTE] > [!NOTE]
> - You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement. > You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement.
> This method only makes sense if creating a pull request in the repository where the workflow is running. > This method only makes sense if creating a pull request in the repository where the workflow is running.
> - You cannot use deploy keys with [commit signature verification for bots](#commit-signature-verification-for-bots) (`sign-commits: true`).
How to use SSH (deploy keys) with create-pull-request action: How to use SSH (deploy keys) with create-pull-request action:
@@ -272,7 +271,7 @@ The `token` input will then default to the repository's `GITHUB_TOKEN`, which wi
The following is an example of pushing to a fork using GitHub App tokens. The following is an example of pushing to a fork using GitHub App tokens.
```yaml ```yaml
- uses: actions/create-github-app-token@v2 - uses: actions/create-github-app-token@v1
id: generate-token id: generate-token
with: with:
app-id: ${{ secrets.APP_ID }} app-id: ${{ secrets.APP_ID }}
@@ -319,7 +318,7 @@ GitHub App generated tokens can be configured with fine-grained permissions and
```yaml ```yaml
steps: steps:
- uses: actions/create-github-app-token@v2 - uses: actions/create-github-app-token@v1
id: generate-token id: generate-token
with: with:
app-id: ${{ secrets.APP_ID }} app-id: ${{ secrets.APP_ID }}
@@ -342,7 +341,7 @@ For this case a token must be generated from the GitHub App installation of the
In the following example, a pull request is being created in remote repo `owner/repo`. In the following example, a pull request is being created in remote repo `owner/repo`.
```yaml ```yaml
steps: steps:
- uses: actions/create-github-app-token@v2 - uses: actions/create-github-app-token@v1
id: generate-token id: generate-token
with: with:
app-id: ${{ secrets.APP_ID }} app-id: ${{ secrets.APP_ID }}
@@ -374,7 +373,7 @@ The action supports two methods to sign commits, [commit signature verification
The action can sign commits as `github-actions[bot]` when using the repository's default `GITHUB_TOKEN`, or your own bot when using [GitHub App tokens](#authenticating-with-github-app-generated-tokens). The action can sign commits as `github-actions[bot]` when using the repository's default `GITHUB_TOKEN`, or your own bot when using [GitHub App tokens](#authenticating-with-github-app-generated-tokens).
> [!IMPORTANT] > [!IMPORTANT]
> - When setting `sign-commits: true` the action will ignore the `committer` and `author` inputs. > - When setting `sign-commits: true` the action will ignore the `committer` and `author` inputs.
> - If you attempt to use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) the action will create the pull request, but commits will *not* be signed. Commit signing is only supported with bot generated tokens. > - If you attempt to use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) the action will create the pull request, but commits will *not* be signed. Commit signing is only supported with bot generated tokens.
> - The GitHub API has a 40MiB limit when creating git blobs. An error will be raised if there are files in the pull request larger than this. If you hit this limit, use [GPG commit signature verification](#gpg-commit-signature-verification) instead. > - The GitHub API has a 40MiB limit when creating git blobs. An error will be raised if there are files in the pull request larger than this. If you hit this limit, use [GPG commit signature verification](#gpg-commit-signature-verification) instead.
@@ -397,7 +396,7 @@ In this example, the `token` input is generated using a GitHub App. This will si
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/create-github-app-token@v2 - uses: actions/create-github-app-token@v1
id: generate-token id: generate-token
with: with:
app-id: ${{ secrets.APP_ID }} app-id: ${{ secrets.APP_ID }}

1948
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -31,33 +31,33 @@
"dependencies": { "dependencies": {
"@actions/core": "^1.11.1", "@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@octokit/core": "^6.1.6", "@octokit/core": "^6.1.3",
"@octokit/plugin-paginate-rest": "^11.6.0", "@octokit/plugin-paginate-rest": "^11.4.0",
"@octokit/plugin-rest-endpoint-methods": "^13.5.0", "@octokit/plugin-rest-endpoint-methods": "^13.3.0",
"@octokit/plugin-throttling": "^9.6.1", "@octokit/plugin-throttling": "^9.4.0",
"node-fetch-native": "^1.6.7", "node-fetch-native": "^1.6.6",
"p-limit": "^6.2.0", "p-limit": "^6.2.0",
"uuid": "^9.0.1" "uuid": "^9.0.1"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^29.5.14", "@types/jest": "^29.5.14",
"@types/node": "^18.19.130", "@types/node": "^18.19.74",
"@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/eslint-plugin": "^7.18.0",
"@typescript-eslint/parser": "^7.18.0", "@typescript-eslint/parser": "^7.18.0",
"@vercel/ncc": "^0.38.4", "@vercel/ncc": "^0.38.3",
"eslint": "^8.57.1", "eslint": "^8.57.1",
"eslint-import-resolver-typescript": "^3.10.1", "eslint-import-resolver-typescript": "^3.7.0",
"eslint-plugin-github": "^4.10.2", "eslint-plugin-github": "^4.10.2",
"eslint-plugin-import": "^2.32.0", "eslint-plugin-import": "^2.31.0",
"eslint-plugin-jest": "^27.9.0", "eslint-plugin-jest": "^27.9.0",
"eslint-plugin-prettier": "^5.5.4", "eslint-plugin-prettier": "^5.2.3",
"jest": "^29.7.0", "jest": "^29.7.0",
"jest-circus": "^29.7.0", "jest-circus": "^29.7.0",
"jest-environment-jsdom": "^29.7.0", "jest-environment-jsdom": "^29.7.0",
"js-yaml": "^4.1.1", "js-yaml": "^4.1.0",
"prettier": "^3.7.3", "prettier": "^3.4.2",
"ts-jest": "^29.4.5", "ts-jest": "^29.2.5",
"typescript": "^5.9.3", "typescript": "^5.7.3",
"undici": "^6.22.0" "undici": "^6.21.1"
} }
} }

View File

@@ -19,7 +19,7 @@ export async function getWorkingBaseAndType(
): Promise<[string, WorkingBaseType]> { ): Promise<[string, WorkingBaseType]> {
const symbolicRefResult = await git.exec( const symbolicRefResult = await git.exec(
['symbolic-ref', 'HEAD', '--short'], ['symbolic-ref', 'HEAD', '--short'],
{allowAllExitCodes: true} true
) )
if (symbolicRefResult.exitCode == 0) { if (symbolicRefResult.exitCode == 0) {
// A ref is checked out // A ref is checked out
@@ -200,7 +200,7 @@ export async function createOrUpdateBranch(
} else { } else {
aopts.push('-A') aopts.push('-A')
} }
await git.exec(aopts, {allowAllExitCodes: true}) await git.exec(aopts, true)
const popts = ['-m', commitMessage] const popts = ['-m', commitMessage]
if (signoff) { if (signoff) {
popts.push('--signoff') popts.push('--signoff')

View File

@@ -123,14 +123,12 @@ export async function createPullRequest(inputs: Inputs): Promise<void> {
`The 'base' and 'branch' for a pull request must be different branches. Unable to continue.` `The 'base' and 'branch' for a pull request must be different branches. Unable to continue.`
) )
} }
if (utils.isSelfHosted()) { // For self-hosted runners the repository state persists between runs.
// For self-hosted runners the repository state persists between runs. // This command prunes the stale remote ref when the pull request branch was
// This command prunes the stale remote ref when the pull request branch was // deleted after being merged or closed. Without this the push using
// deleted after being merged or closed. Without this the push using // '--force-with-lease' fails due to "stale info."
// '--force-with-lease' fails due to "stale info." // https://github.com/peter-evans/create-pull-request/issues/633
// https://github.com/peter-evans/create-pull-request/issues/633 await git.exec(['remote', 'prune', branchRemoteName])
await git.exec(['remote', 'prune', branchRemoteName])
}
core.endGroup() core.endGroup()
// Apply the branch suffix if set // Apply the branch suffix if set
@@ -215,7 +213,6 @@ export async function createPullRequest(inputs: Inputs): Promise<void> {
const stashed = await git.stashPush(['--include-untracked']) const stashed = await git.stashPush(['--include-untracked'])
await git.checkout(inputs.branch) await git.checkout(inputs.branch)
const pushSignedCommitsResult = await ghBranch.pushSignedCommits( const pushSignedCommitsResult = await ghBranch.pushSignedCommits(
git,
result.branchCommits, result.branchCommits,
result.baseCommit, result.baseCommit,
repoPath, repoPath,

View File

@@ -2,7 +2,6 @@ import * as exec from '@actions/exec'
import * as io from '@actions/io' import * as io from '@actions/io'
import * as utils from './utils' import * as utils from './utils'
import * as path from 'path' import * as path from 'path'
import stream, {Writable} from 'stream'
const tagsRefSpec = '+refs/tags/*:refs/tags/*' const tagsRefSpec = '+refs/tags/*:refs/tags/*'
@@ -22,12 +21,6 @@ export type Commit = {
unparsedChanges: string[] unparsedChanges: string[]
} }
export type ExecOpts = {
allowAllExitCodes?: boolean
encoding?: 'utf8' | 'base64'
suppressGitCmdOutput?: boolean
}
export class GitCommandManager { export class GitCommandManager {
private gitPath: string private gitPath: string
private workingDirectory: string private workingDirectory: string
@@ -73,7 +66,7 @@ export class GitCommandManager {
args.push(...options) args.push(...options)
} }
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes}) return await this.exec(args, allowAllExitCodes)
} }
async commit( async commit(
@@ -89,7 +82,7 @@ export class GitCommandManager {
args.push(...options) args.push(...options)
} }
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes}) return await this.exec(args, allowAllExitCodes)
} }
async config( async config(
@@ -120,7 +113,7 @@ export class GitCommandManager {
configKey, configKey,
configValue configValue
], ],
{allowAllExitCodes: true} true
) )
return output.exitCode === 0 return output.exitCode === 0
} }
@@ -163,20 +156,17 @@ export class GitCommandManager {
async getCommit(ref: string): Promise<Commit> { async getCommit(ref: string): Promise<Commit> {
const endOfBody = '###EOB###' const endOfBody = '###EOB###'
const output = await this.exec( const output = await this.exec([
[ '-c',
'-c', 'core.quotePath=false',
'core.quotePath=false', 'show',
'show', '--raw',
'--raw', '--cc',
'--cc', '--no-renames',
'--no-renames', '--no-abbrev',
'--no-abbrev', `--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`,
`--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`, ref
ref ])
],
{suppressGitCmdOutput: true}
)
const lines = output.stdout.split('\n') const lines = output.stdout.split('\n')
const endOfBodyIndex = lines.lastIndexOf(endOfBody) const endOfBodyIndex = lines.lastIndexOf(endOfBody)
const detailLines = lines.slice(0, endOfBodyIndex) const detailLines = lines.slice(0, endOfBodyIndex)
@@ -232,7 +222,7 @@ export class GitCommandManager {
if (options) { if (options) {
args.push(...options) args.push(...options)
} }
const output = await this.exec(args, {allowAllExitCodes: true}) const output = await this.exec(args, true)
return output.exitCode === 1 return output.exitCode === 1
} }
@@ -288,15 +278,6 @@ export class GitCommandManager {
return output.stdout.trim() return output.stdout.trim()
} }
async showFileAtRefBase64(ref: string, path: string): Promise<string> {
const args = ['show', `${ref}:${path}`]
const output = await this.exec(args, {
encoding: 'base64',
suppressGitCmdOutput: true
})
return output.stdout.trim()
}
async stashPush(options?: string[]): Promise<boolean> { async stashPush(options?: string[]): Promise<boolean> {
const args = ['stash', 'push'] const args = ['stash', 'push']
if (options) { if (options) {
@@ -345,7 +326,7 @@ export class GitCommandManager {
configKey, configKey,
configValue configValue
], ],
{allowAllExitCodes: true} true
) )
return output.exitCode === 0 return output.exitCode === 0
} }
@@ -353,7 +334,7 @@ export class GitCommandManager {
async tryGetRemoteUrl(): Promise<string> { async tryGetRemoteUrl(): Promise<string> {
const output = await this.exec( const output = await this.exec(
['config', '--local', '--get', 'remote.origin.url'], ['config', '--local', '--get', 'remote.origin.url'],
{allowAllExitCodes: true} true
) )
if (output.exitCode !== 0) { if (output.exitCode !== 0) {
@@ -368,30 +349,16 @@ export class GitCommandManager {
return stdout return stdout
} }
async exec( async exec(args: string[], allowAllExitCodes = false): Promise<GitOutput> {
args: string[],
{
encoding = 'utf8',
allowAllExitCodes = false,
suppressGitCmdOutput = false
}: ExecOpts = {}
): Promise<GitOutput> {
const result = new GitOutput() const result = new GitOutput()
if (process.env['CPR_SHOW_GIT_CMD_OUTPUT']) {
// debug mode overrides the suppressGitCmdOutput option
suppressGitCmdOutput = false
}
const env = {} const env = {}
for (const key of Object.keys(process.env)) { for (const key of Object.keys(process.env)) {
env[key] = process.env[key] env[key] = process.env[key]
} }
const stdout: Buffer[] = [] const stdout: string[] = []
let stdoutLength = 0 const stderr: string[] = []
const stderr: Buffer[] = []
let stderrLength = 0
const options = { const options = {
cwd: this.workingDirectory, cwd: this.workingDirectory,
@@ -399,21 +366,17 @@ export class GitCommandManager {
ignoreReturnCode: allowAllExitCodes, ignoreReturnCode: allowAllExitCodes,
listeners: { listeners: {
stdout: (data: Buffer) => { stdout: (data: Buffer) => {
stdout.push(data) stdout.push(data.toString())
stdoutLength += data.length
}, },
stderr: (data: Buffer) => { stderr: (data: Buffer) => {
stderr.push(data) stderr.push(data.toString())
stderrLength += data.length
} }
}, }
outStream: outStreamHandler(process.stdout, suppressGitCmdOutput),
errStream: outStreamHandler(process.stderr, suppressGitCmdOutput)
} }
result.exitCode = await exec.exec(`"${this.gitPath}"`, args, options) result.exitCode = await exec.exec(`"${this.gitPath}"`, args, options)
result.stdout = Buffer.concat(stdout, stdoutLength).toString(encoding) result.stdout = stdout.join('')
result.stderr = Buffer.concat(stderr, stderrLength).toString(encoding) result.stderr = stderr.join('')
return result return result
} }
} }
@@ -423,24 +386,3 @@ class GitOutput {
stderr = '' stderr = ''
exitCode = 0 exitCode = 0
} }
const outStreamHandler = (
outStream: Writable,
suppressGitCmdOutput: boolean
): Writable => {
return new stream.Writable({
write(chunk, _, next) {
if (suppressGitCmdOutput) {
const lines = chunk.toString().trimEnd().split('\n')
for (const line of lines) {
if (line.startsWith('[command]')) {
outStream.write(`${line}\n`)
}
}
} else {
outStream.write(chunk)
}
next()
}
})
}

View File

@@ -22,7 +22,6 @@ export class GitConfigHelper {
private extraheaderConfigPlaceholderValue = 'AUTHORIZATION: basic ***' private extraheaderConfigPlaceholderValue = 'AUTHORIZATION: basic ***'
private extraheaderConfigValueRegex = '^AUTHORIZATION:' private extraheaderConfigValueRegex = '^AUTHORIZATION:'
private persistedExtraheaderConfigValue = '' private persistedExtraheaderConfigValue = ''
private backedUpCredentialFiles: string[] = []
private constructor(git: GitCommandManager) { private constructor(git: GitCommandManager) {
this.git = git this.git = git
@@ -122,15 +121,11 @@ export class GitConfigHelper {
async savePersistedAuth(): Promise<void> { async savePersistedAuth(): Promise<void> {
const serverUrl = new URL(`https://${this.getGitRemote().hostname}`) const serverUrl = new URL(`https://${this.getGitRemote().hostname}`)
this.extraheaderConfigKey = `http.${serverUrl.origin}/.extraheader` this.extraheaderConfigKey = `http.${serverUrl.origin}/.extraheader`
// Backup checkout@v6 credential files if they exist
await this.hideCredentialFiles()
// Save and unset persisted extraheader credential in git config if it exists // Save and unset persisted extraheader credential in git config if it exists
this.persistedExtraheaderConfigValue = await this.getAndUnset() this.persistedExtraheaderConfigValue = await this.getAndUnset()
} }
async restorePersistedAuth(): Promise<void> { async restorePersistedAuth(): Promise<void> {
// Restore checkout@v6 credential files if they were backed up
await this.unhideCredentialFiles()
if (this.persistedExtraheaderConfigValue) { if (this.persistedExtraheaderConfigValue) {
try { try {
await this.setExtraheaderConfig(this.persistedExtraheaderConfigValue) await this.setExtraheaderConfig(this.persistedExtraheaderConfigValue)
@@ -174,51 +169,6 @@ export class GitConfigHelper {
) )
} }
private async hideCredentialFiles(): Promise<void> {
// Temporarily hide checkout@v6 credential files to avoid duplicate auth headers
const runnerTemp = process.env['RUNNER_TEMP']
if (!runnerTemp) {
return
}
try {
const files = await fs.promises.readdir(runnerTemp)
for (const file of files) {
if (file.startsWith('git-credentials-') && file.endsWith('.config')) {
const sourcePath = path.join(runnerTemp, file)
const backupPath = `${sourcePath}.bak`
await fs.promises.rename(sourcePath, backupPath)
this.backedUpCredentialFiles.push(backupPath)
core.info(
`Temporarily hiding checkout credential file: ${file} (will be restored after)`
)
}
}
} catch (e) {
// If directory doesn't exist or we can't read it, just continue
core.debug(
`Could not backup credential files: ${utils.getErrorMessage(e)}`
)
}
}
private async unhideCredentialFiles(): Promise<void> {
// Restore checkout@v6 credential files that were backed up
for (const backupPath of this.backedUpCredentialFiles) {
try {
const originalPath = backupPath.replace(/\.bak$/, '')
await fs.promises.rename(backupPath, originalPath)
const fileName = path.basename(originalPath)
core.info(`Restored checkout credential file: ${fileName}`)
} catch (e) {
core.warning(
`Failed to restore credential file ${backupPath}: ${utils.getErrorMessage(e)}`
)
}
}
this.backedUpCredentialFiles = []
}
private async getAndUnset(): Promise<string> { private async getAndUnset(): Promise<string> {
let configValue = '' let configValue = ''
// Save and unset persisted extraheader credential in git config if it exists // Save and unset persisted extraheader credential in git config if it exists

View File

@@ -1,6 +1,6 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import {Inputs} from './create-pull-request' import {Inputs} from './create-pull-request'
import {Commit, GitCommandManager} from './git-command-manager' import {Commit} from './git-command-manager'
import {Octokit, OctokitOptions, throttleOptions} from './octokit-client' import {Octokit, OctokitOptions, throttleOptions} from './octokit-client'
import pLimit from 'p-limit' import pLimit from 'p-limit'
import * as utils from './utils' import * as utils from './utils'
@@ -97,50 +97,6 @@ export class GitHubHelper {
} }
} }
private async getPullNumber(
baseRepository: string,
headBranch: string,
baseBranch: string
): Promise<number> {
const {data: pulls} = await this.octokit.rest.pulls.list({
...this.parseRepository(baseRepository),
state: 'open',
head: headBranch,
base: baseBranch
})
let pullNumber: number | undefined = undefined
if (pulls?.length === 0 || pulls === null || pulls === undefined) {
// This is a fallback due to a bug that affects the list endpoint when called on forks with the same owner as the repository parent.
core.info(
`Pull request not found via list endpoint; attempting fallback mechanism`
)
for await (const response of this.octokit.paginate.iterator(
this.octokit.rest.pulls.list,
{
...this.parseRepository(baseRepository),
state: 'open',
base: baseBranch
}
)) {
const existingPull = response.data.find(
pull => pull.head.label === headBranch
)
if (existingPull !== undefined) {
pullNumber = existingPull.number
break
}
}
} else {
pullNumber = pulls[0].number
}
if (pullNumber === undefined) {
throw new Error(
`Failed to find pull request number for branch ${headBranch}`
)
}
return pullNumber
}
private async createOrUpdate( private async createOrUpdate(
inputs: Inputs, inputs: Inputs,
baseRepository: string, baseRepository: string,
@@ -191,15 +147,16 @@ export class GitHubHelper {
// Update the pull request that exists for this branch and base // Update the pull request that exists for this branch and base
core.info(`Fetching existing pull request`) core.info(`Fetching existing pull request`)
const pullNumber = await this.getPullNumber( const {data: pulls} = await this.octokit.rest.pulls.list({
baseRepository, ...this.parseRepository(baseRepository),
headBranch, state: 'open',
inputs.base head: headBranch,
) base: inputs.base
})
core.info(`Attempting update of pull request`) core.info(`Attempting update of pull request`)
const {data: pull} = await this.octokit.rest.pulls.update({ const {data: pull} = await this.octokit.rest.pulls.update({
...this.parseRepository(baseRepository), ...this.parseRepository(baseRepository),
pull_number: pullNumber, pull_number: pulls[0].number,
title: inputs.title, title: inputs.title,
body: inputs.body body: inputs.body
}) })
@@ -297,7 +254,6 @@ export class GitHubHelper {
} }
async pushSignedCommits( async pushSignedCommits(
git: GitCommandManager,
branchCommits: Commit[], branchCommits: Commit[],
baseCommit: Commit, baseCommit: Commit,
repoPath: string, repoPath: string,
@@ -311,7 +267,6 @@ export class GitHubHelper {
} }
for (const commit of branchCommits) { for (const commit of branchCommits) {
headCommit = await this.createCommit( headCommit = await this.createCommit(
git,
commit, commit,
headCommit, headCommit,
repoPath, repoPath,
@@ -323,7 +278,6 @@ export class GitHubHelper {
} }
private async createCommit( private async createCommit(
git: GitCommandManager,
commit: Commit, commit: Commit,
parentCommit: CommitResponse, parentCommit: CommitResponse,
repoPath: string, repoPath: string,
@@ -349,10 +303,10 @@ export class GitHubHelper {
let sha: string | null = null let sha: string | null = null
if (status === 'A' || status === 'M') { if (status === 'A' || status === 'M') {
try { try {
const {data: blob} = await blobCreationLimit(async () => const {data: blob} = await blobCreationLimit(() =>
this.octokit.rest.git.createBlob({ this.octokit.rest.git.createBlob({
...repository, ...repository,
content: await git.showFileAtRefBase64(commit.sha, path), content: utils.readFileBase64([repoPath, path]),
encoding: 'base64' encoding: 'base64'
}) })
) )

View File

@@ -126,6 +126,16 @@ export function readFile(path: string): string {
return fs.readFileSync(path, 'utf-8') return fs.readFileSync(path, 'utf-8')
} }
export function readFileBase64(pathParts: string[]): string {
const resolvedPath = path.resolve(...pathParts)
if (fs.lstatSync(resolvedPath).isSymbolicLink()) {
return fs
.readlinkSync(resolvedPath, {encoding: 'buffer'})
.toString('base64')
}
return fs.readFileSync(resolvedPath).toString('base64')
}
/* eslint-disable @typescript-eslint/no-explicit-any */ /* eslint-disable @typescript-eslint/no-explicit-any */
function hasErrorCode(error: any): error is {code: string} { function hasErrorCode(error: any): error is {code: string} {
return typeof (error && error.code) === 'string' return typeof (error && error.code) === 'string'
@@ -135,8 +145,3 @@ export function getErrorMessage(error: unknown) {
if (error instanceof Error) return error.message if (error instanceof Error) return error.message
return String(error) return String(error)
} }
export const isSelfHosted = (): boolean =>
process.env['RUNNER_ENVIRONMENT'] !== 'github-hosted' &&
(process.env['AGENT_ISSELFHOSTED'] === '1' ||
process.env['AGENT_ISSELFHOSTED'] === undefined)