Compare commits
9 Commits
main
...
feature/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e336bf8e3 | ||
| aa035fd851 | |||
| 6b3a86bf8b | |||
| afa1190aa1 | |||
| 7741a3efa3 | |||
| 3adf2066c5 | |||
|
|
686c3a0061 | ||
|
|
6218c872ac | ||
|
|
1f2b960401 |
15
.github/dependabot.yml
vendored
15
.github/dependabot.yml
vendored
@@ -3,27 +3,18 @@ updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
interval: "weekly"
|
||||
day: "tuesday"
|
||||
labels:
|
||||
- "dependencies"
|
||||
groups:
|
||||
github-actions:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
interval: "weekly"
|
||||
day: "tuesday"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major"]
|
||||
labels:
|
||||
- "dependencies"
|
||||
groups:
|
||||
npm:
|
||||
applies-to: version-updates
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
24
.github/workflows/ci.yml
vendored
24
.github/workflows/ci.yml
vendored
@@ -19,21 +19,23 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-node@v6
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20.x
|
||||
cache: npm
|
||||
- name: Install Docker
|
||||
run: apt update && apt install docker.io -y
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: npm run format-check
|
||||
- run: npm run lint
|
||||
- run: npm run test
|
||||
- uses: actions/upload-artifact@v5
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist
|
||||
path: dist
|
||||
- uses: actions/upload-artifact@v5
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: action.yml
|
||||
path: action.yml
|
||||
@@ -46,16 +48,16 @@ jobs:
|
||||
matrix:
|
||||
target: [built, committed]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
- if: matrix.target == 'built' || github.event_name == 'pull_request'
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: dist
|
||||
path: dist
|
||||
- if: matrix.target == 'built' || github.event_name == 'pull_request'
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: action.yml
|
||||
path: .
|
||||
@@ -92,7 +94,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find Comment
|
||||
uses: peter-evans/find-comment@v4
|
||||
uses: peter-evans/find-comment@v3
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
@@ -101,7 +103,7 @@ jobs:
|
||||
|
||||
- if: steps.fc.outputs.comment-id == ''
|
||||
name: Create comment
|
||||
uses: peter-evans/create-or-update-comment@v5
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ github.event.number }}
|
||||
body: |
|
||||
@@ -118,8 +120,8 @@ jobs:
|
||||
needs: [test]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/download-artifact@v6
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: dist
|
||||
path: dist
|
||||
|
||||
8
.github/workflows/cpr-example-command.yml
vendored
8
.github/workflows/cpr-example-command.yml
vendored
@@ -6,7 +6,7 @@ jobs:
|
||||
createPullRequest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Make changes to pull request
|
||||
run: date +%s > report.txt
|
||||
@@ -29,8 +29,8 @@ jobs:
|
||||
labels: |
|
||||
report
|
||||
automated pr
|
||||
assignees: retepsnave
|
||||
reviewers: retepsnave
|
||||
assignees: peter-evans
|
||||
reviewers: peter-evans
|
||||
milestone: 1
|
||||
draft: false
|
||||
branch: example-patches
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}"
|
||||
|
||||
- name: Add reaction
|
||||
uses: peter-evans/create-or-update-comment@v5
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
repository: ${{ github.event.client_payload.github.payload.repository.full_name }}
|
||||
comment-id: ${{ github.event.client_payload.github.payload.comment.id }}
|
||||
|
||||
8
.github/workflows/slash-command-dispatch.yml
vendored
8
.github/workflows/slash-command-dispatch.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Slash Command Dispatch
|
||||
uses: peter-evans/slash-command-dispatch@v5
|
||||
uses: peter-evans/slash-command-dispatch@v4
|
||||
with:
|
||||
token: ${{ secrets.ACTIONS_BOT_TOKEN }}
|
||||
config: >
|
||||
@@ -18,6 +18,12 @@ jobs:
|
||||
"repository": "peter-evans/create-pull-request-tests",
|
||||
"named_args": true
|
||||
},
|
||||
{
|
||||
"command": "testv5",
|
||||
"permission": "admin",
|
||||
"repository": "peter-evans/create-pull-request-tests",
|
||||
"named_args": true
|
||||
},
|
||||
{
|
||||
"command": "clean",
|
||||
"permission": "admin",
|
||||
|
||||
2
.github/workflows/update-major-version.yml
vendored
2
.github/workflows/update-major-version.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
tag:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.ACTIONS_BOT_TOKEN }}
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -53,7 +53,7 @@ All inputs are **optional**. If not set, sensible defaults will be used.
|
||||
| `token` | The token that the action will use to create and update the pull request. See [token](#token). | `GITHUB_TOKEN` |
|
||||
| `branch-token` | The token that the action will use to create and update the branch. See [branch-token](#branch-token). | Defaults to the value of `token` |
|
||||
| `path` | Relative path under `GITHUB_WORKSPACE` to the repository. | `GITHUB_WORKSPACE` |
|
||||
| `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. See [Add specific paths](#add-specific-paths). | If no paths are specified, all new and modified files are added. |
|
||||
| `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. If no paths are specified, all new and modified files are added. See [Add specific paths](#add-specific-paths). | |
|
||||
| `commit-message` | The message to use when committing changes. See [commit-message](#commit-message). | `[create-pull-request] automated change` |
|
||||
| `committer` | The committer name and email address in the format `Display Name <email@address.com>`. Defaults to the GitHub Actions bot user on github.com. | `github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>` |
|
||||
| `author` | The author name and email address in the format `Display Name <email@address.com>`. Defaults to the user who triggered the workflow run. | `${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>` |
|
||||
|
||||
4
dist/790.index.js
vendored
4
dist/790.index.js
vendored
@@ -6,8 +6,8 @@ exports.modules = {
|
||||
/***/ 790:
|
||||
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
|
||||
|
||||
var y=Object.defineProperty;var c=(R,o)=>y(R,"name",{value:o,configurable:!0});const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(3465),__webpack_require__(3136),__webpack_require__(7030),__webpack_require__(3024),__webpack_require__(6760);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(R=>R|32,"lower"),noop=c(()=>{},"noop"),g=class g{constructor(o){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},o=`\r
|
||||
--`+o;const t=new Uint8Array(o.length);for(let n=0;n<o.length;n++)t[n]=o.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(o){let t=0;const n=o.length;let E=this.index,{lookbehind:l,boundary:h,boundaryChars:H,index:e,state:a,flags:d}=this;const b=this.boundary.length,m=b-1,O=o.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,_,N)=>{(p===void 0||p!==_)&&this[D](N&&N.subarray(p,_))},"callback"),L=c((D,p)=>{const _=D+"Mark";_ in this&&(p?(T(D,this[_],t,o),delete this[_]):(T(D,this[_],o.length,o),this[_]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=o[t],a){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)d|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(d&F.LAST_BOUNDARY&&r===HYPHEN)a=S.END,d=0;else if(!(d&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),a=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:a=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),a=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),a=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),a=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),a=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;a=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),a=S.PART_DATA_START;break;case S.PART_DATA_START:a=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(o[t]in H);)t+=b;t-=m,r=o[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?d|=F.PART_BOUNDARY:r===HYPHEN?d|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(d&F.PART_BOUNDARY){if(e=0,r===LF){d&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),a=S.HEADER_FIELD_START;break}}else d&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),a=S.END,d=0):e=0;if(e>0)l[e-1]=r;else if(E>0){const D=new Uint8Array(l.buffer,l.byteOffset,l.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${a}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=a,this.flags=d}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}};c(g,"MultipartParser");let MultipartParser=g;function _fileName(R){const o=R.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!o)return;const t=o[2]||o[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,l)=>String.fromCharCode(l)),n}c(_fileName,"_fileName");async function toFormData(R,o){if(!/multipart/i.test(o))throw new TypeError("Failed to fetch");const t=o.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,l,h,H,e,a;const d=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{d.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(d,a,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",l="",h="",H="",e="",a=null,d.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){l+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(l+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=l.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),a=_fileName(l),a&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=l);l="",E=""};for await(const i of R)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData;
|
||||
var N=Object.defineProperty;var c=(_,a)=>N(_,"name",{value:a,configurable:!0});__webpack_require__(3024),__webpack_require__(6760);const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(3465),__webpack_require__(3136),__webpack_require__(7030);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(_=>_|32,"lower"),noop=c(()=>{},"noop");class MultipartParser{static{c(this,"MultipartParser")}constructor(a){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},a=`\r
|
||||
--`+a;const t=new Uint8Array(a.length);for(let n=0;n<a.length;n++)t[n]=a.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(a){let t=0;const n=a.length;let E=this.index,{lookbehind:d,boundary:h,boundaryChars:H,index:e,state:o,flags:l}=this;const b=this.boundary.length,m=b-1,O=a.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,R,g)=>{(p===void 0||p!==R)&&this[D](g&&g.subarray(p,R))},"callback"),L=c((D,p)=>{const R=D+"Mark";R in this&&(p?(T(D,this[R],t,a),delete this[R]):(T(D,this[R],a.length,a),this[R]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=a[t],o){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)l|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(l&F.LAST_BOUNDARY&&r===HYPHEN)o=S.END,l=0;else if(!(l&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),o=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:o=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),o=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),o=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),o=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),o=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;o=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),o=S.PART_DATA_START;break;case S.PART_DATA_START:o=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(a[t]in H);)t+=b;t-=m,r=a[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?l|=F.PART_BOUNDARY:r===HYPHEN?l|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(l&F.PART_BOUNDARY){if(e=0,r===LF){l&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),o=S.HEADER_FIELD_START;break}}else l&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),o=S.END,l=0):e=0;if(e>0)d[e-1]=r;else if(E>0){const D=new Uint8Array(d.buffer,d.byteOffset,d.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${o}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=o,this.flags=l}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}}function _fileName(_){const a=_.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!a)return;const t=a[2]||a[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,d)=>String.fromCharCode(d)),n}c(_fileName,"_fileName");async function toFormData(_,a){if(!/multipart/i.test(a))throw new TypeError("Failed to fetch");const t=a.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,d,h,H,e,o;const l=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{l.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(l,o,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",d="",h="",H="",e="",o=null,l.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){d+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(d+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=d.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),o=_fileName(d),o&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=d);d="",E=""};for await(const i of _)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData;
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
703
dist/index.js
vendored
703
dist/index.js
vendored
File diff suppressed because one or more lines are too long
@@ -25,7 +25,7 @@ This document covers terminology, how the action works, general usage guidelines
|
||||
|
||||
## Terminology
|
||||
|
||||
[Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected.
|
||||
[Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected.
|
||||
|
||||
A pull request references two branches:
|
||||
|
||||
@@ -150,7 +150,7 @@ There are a number of workarounds with different pros and cons.
|
||||
|
||||
- Use the default `GITHUB_TOKEN` and allow the action to create pull requests that have no checks enabled. Manually close pull requests and immediately reopen them. This will enable `on: pull_request` workflows to run and be added as checks. To prevent merging of pull requests without checks erroneously, use [branch protection rules](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests).
|
||||
|
||||
- Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `ready_for_review` in `on: pull_request`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft.
|
||||
- Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `on: ready_for_review`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft.
|
||||
|
||||
- Use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) created on an account that has write access to the repository that pull requests are being created in. This is the standard workaround and [recommended by GitHub](https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow). It's advisable to use a dedicated [machine account](https://docs.github.com/en/github/site-policy/github-terms-of-service#3-account-requirements) that has collaborator access to the repository, rather than creating a PAT on a personal user account. Also note that because the account that owns the PAT will be the creator of pull requests, that user account will be unable to perform actions such as request changes or approve the pull request.
|
||||
|
||||
@@ -197,9 +197,8 @@ Checking out a branch from a different repository from where the workflow is exe
|
||||
Allowing the action to push with a configured deploy key will trigger `on: push` workflows. This makes it an alternative to using a PAT to trigger checks for pull requests.
|
||||
|
||||
> [!NOTE]
|
||||
> - You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement.
|
||||
> You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement.
|
||||
> This method only makes sense if creating a pull request in the repository where the workflow is running.
|
||||
> - You cannot use deploy keys with [commit signature verification for bots](#commit-signature-verification-for-bots) (`sign-commits: true`).
|
||||
|
||||
How to use SSH (deploy keys) with create-pull-request action:
|
||||
|
||||
@@ -272,7 +271,7 @@ The `token` input will then default to the repository's `GITHUB_TOKEN`, which wi
|
||||
|
||||
The following is an example of pushing to a fork using GitHub App tokens.
|
||||
```yaml
|
||||
- uses: actions/create-github-app-token@v2
|
||||
- uses: actions/create-github-app-token@v1
|
||||
id: generate-token
|
||||
with:
|
||||
app-id: ${{ secrets.APP_ID }}
|
||||
@@ -319,7 +318,7 @@ GitHub App generated tokens can be configured with fine-grained permissions and
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/create-github-app-token@v2
|
||||
- uses: actions/create-github-app-token@v1
|
||||
id: generate-token
|
||||
with:
|
||||
app-id: ${{ secrets.APP_ID }}
|
||||
@@ -342,7 +341,7 @@ For this case a token must be generated from the GitHub App installation of the
|
||||
In the following example, a pull request is being created in remote repo `owner/repo`.
|
||||
```yaml
|
||||
steps:
|
||||
- uses: actions/create-github-app-token@v2
|
||||
- uses: actions/create-github-app-token@v1
|
||||
id: generate-token
|
||||
with:
|
||||
app-id: ${{ secrets.APP_ID }}
|
||||
@@ -374,7 +373,7 @@ The action supports two methods to sign commits, [commit signature verification
|
||||
|
||||
The action can sign commits as `github-actions[bot]` when using the repository's default `GITHUB_TOKEN`, or your own bot when using [GitHub App tokens](#authenticating-with-github-app-generated-tokens).
|
||||
|
||||
> [!IMPORTANT]
|
||||
> [!IMPORTANT]
|
||||
> - When setting `sign-commits: true` the action will ignore the `committer` and `author` inputs.
|
||||
> - If you attempt to use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) the action will create the pull request, but commits will *not* be signed. Commit signing is only supported with bot generated tokens.
|
||||
> - The GitHub API has a 40MiB limit when creating git blobs. An error will be raised if there are files in the pull request larger than this. If you hit this limit, use [GPG commit signature verification](#gpg-commit-signature-verification) instead.
|
||||
@@ -397,7 +396,7 @@ In this example, the `token` input is generated using a GitHub App. This will si
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/create-github-app-token@v2
|
||||
- uses: actions/create-github-app-token@v1
|
||||
id: generate-token
|
||||
with:
|
||||
app-id: ${{ secrets.APP_ID }}
|
||||
|
||||
1948
package-lock.json
generated
1948
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
30
package.json
30
package.json
@@ -31,33 +31,33 @@
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@octokit/core": "^6.1.6",
|
||||
"@octokit/plugin-paginate-rest": "^11.6.0",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^13.5.0",
|
||||
"@octokit/plugin-throttling": "^9.6.1",
|
||||
"node-fetch-native": "^1.6.7",
|
||||
"@octokit/core": "^6.1.3",
|
||||
"@octokit/plugin-paginate-rest": "^11.4.0",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^13.3.0",
|
||||
"@octokit/plugin-throttling": "^9.4.0",
|
||||
"node-fetch-native": "^1.6.6",
|
||||
"p-limit": "^6.2.0",
|
||||
"uuid": "^9.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^18.19.130",
|
||||
"@types/node": "^18.19.74",
|
||||
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
||||
"@typescript-eslint/parser": "^7.18.0",
|
||||
"@vercel/ncc": "^0.38.4",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-import-resolver-typescript": "^3.10.1",
|
||||
"eslint-import-resolver-typescript": "^3.7.0",
|
||||
"eslint-plugin-github": "^4.10.2",
|
||||
"eslint-plugin-import": "^2.32.0",
|
||||
"eslint-plugin-import": "^2.31.0",
|
||||
"eslint-plugin-jest": "^27.9.0",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"eslint-plugin-prettier": "^5.2.3",
|
||||
"jest": "^29.7.0",
|
||||
"jest-circus": "^29.7.0",
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
"js-yaml": "^4.1.1",
|
||||
"prettier": "^3.7.3",
|
||||
"ts-jest": "^29.4.5",
|
||||
"typescript": "^5.9.3",
|
||||
"undici": "^6.22.0"
|
||||
"js-yaml": "^4.1.0",
|
||||
"prettier": "^3.4.2",
|
||||
"ts-jest": "^29.2.5",
|
||||
"typescript": "^5.7.3",
|
||||
"undici": "^6.21.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ export async function getWorkingBaseAndType(
|
||||
): Promise<[string, WorkingBaseType]> {
|
||||
const symbolicRefResult = await git.exec(
|
||||
['symbolic-ref', 'HEAD', '--short'],
|
||||
{allowAllExitCodes: true}
|
||||
true
|
||||
)
|
||||
if (symbolicRefResult.exitCode == 0) {
|
||||
// A ref is checked out
|
||||
@@ -200,7 +200,7 @@ export async function createOrUpdateBranch(
|
||||
} else {
|
||||
aopts.push('-A')
|
||||
}
|
||||
await git.exec(aopts, {allowAllExitCodes: true})
|
||||
await git.exec(aopts, true)
|
||||
const popts = ['-m', commitMessage]
|
||||
if (signoff) {
|
||||
popts.push('--signoff')
|
||||
|
||||
@@ -123,14 +123,12 @@ export async function createPullRequest(inputs: Inputs): Promise<void> {
|
||||
`The 'base' and 'branch' for a pull request must be different branches. Unable to continue.`
|
||||
)
|
||||
}
|
||||
if (utils.isSelfHosted()) {
|
||||
// For self-hosted runners the repository state persists between runs.
|
||||
// This command prunes the stale remote ref when the pull request branch was
|
||||
// deleted after being merged or closed. Without this the push using
|
||||
// '--force-with-lease' fails due to "stale info."
|
||||
// https://github.com/peter-evans/create-pull-request/issues/633
|
||||
await git.exec(['remote', 'prune', branchRemoteName])
|
||||
}
|
||||
// For self-hosted runners the repository state persists between runs.
|
||||
// This command prunes the stale remote ref when the pull request branch was
|
||||
// deleted after being merged or closed. Without this the push using
|
||||
// '--force-with-lease' fails due to "stale info."
|
||||
// https://github.com/peter-evans/create-pull-request/issues/633
|
||||
await git.exec(['remote', 'prune', branchRemoteName])
|
||||
core.endGroup()
|
||||
|
||||
// Apply the branch suffix if set
|
||||
@@ -215,7 +213,6 @@ export async function createPullRequest(inputs: Inputs): Promise<void> {
|
||||
const stashed = await git.stashPush(['--include-untracked'])
|
||||
await git.checkout(inputs.branch)
|
||||
const pushSignedCommitsResult = await ghBranch.pushSignedCommits(
|
||||
git,
|
||||
result.branchCommits,
|
||||
result.baseCommit,
|
||||
repoPath,
|
||||
|
||||
@@ -2,7 +2,6 @@ import * as exec from '@actions/exec'
|
||||
import * as io from '@actions/io'
|
||||
import * as utils from './utils'
|
||||
import * as path from 'path'
|
||||
import stream, {Writable} from 'stream'
|
||||
|
||||
const tagsRefSpec = '+refs/tags/*:refs/tags/*'
|
||||
|
||||
@@ -22,12 +21,6 @@ export type Commit = {
|
||||
unparsedChanges: string[]
|
||||
}
|
||||
|
||||
export type ExecOpts = {
|
||||
allowAllExitCodes?: boolean
|
||||
encoding?: 'utf8' | 'base64'
|
||||
suppressGitCmdOutput?: boolean
|
||||
}
|
||||
|
||||
export class GitCommandManager {
|
||||
private gitPath: string
|
||||
private workingDirectory: string
|
||||
@@ -73,7 +66,7 @@ export class GitCommandManager {
|
||||
args.push(...options)
|
||||
}
|
||||
|
||||
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes})
|
||||
return await this.exec(args, allowAllExitCodes)
|
||||
}
|
||||
|
||||
async commit(
|
||||
@@ -89,7 +82,7 @@ export class GitCommandManager {
|
||||
args.push(...options)
|
||||
}
|
||||
|
||||
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes})
|
||||
return await this.exec(args, allowAllExitCodes)
|
||||
}
|
||||
|
||||
async config(
|
||||
@@ -120,7 +113,7 @@ export class GitCommandManager {
|
||||
configKey,
|
||||
configValue
|
||||
],
|
||||
{allowAllExitCodes: true}
|
||||
true
|
||||
)
|
||||
return output.exitCode === 0
|
||||
}
|
||||
@@ -163,20 +156,17 @@ export class GitCommandManager {
|
||||
|
||||
async getCommit(ref: string): Promise<Commit> {
|
||||
const endOfBody = '###EOB###'
|
||||
const output = await this.exec(
|
||||
[
|
||||
'-c',
|
||||
'core.quotePath=false',
|
||||
'show',
|
||||
'--raw',
|
||||
'--cc',
|
||||
'--no-renames',
|
||||
'--no-abbrev',
|
||||
`--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`,
|
||||
ref
|
||||
],
|
||||
{suppressGitCmdOutput: true}
|
||||
)
|
||||
const output = await this.exec([
|
||||
'-c',
|
||||
'core.quotePath=false',
|
||||
'show',
|
||||
'--raw',
|
||||
'--cc',
|
||||
'--no-renames',
|
||||
'--no-abbrev',
|
||||
`--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`,
|
||||
ref
|
||||
])
|
||||
const lines = output.stdout.split('\n')
|
||||
const endOfBodyIndex = lines.lastIndexOf(endOfBody)
|
||||
const detailLines = lines.slice(0, endOfBodyIndex)
|
||||
@@ -232,7 +222,7 @@ export class GitCommandManager {
|
||||
if (options) {
|
||||
args.push(...options)
|
||||
}
|
||||
const output = await this.exec(args, {allowAllExitCodes: true})
|
||||
const output = await this.exec(args, true)
|
||||
return output.exitCode === 1
|
||||
}
|
||||
|
||||
@@ -288,15 +278,6 @@ export class GitCommandManager {
|
||||
return output.stdout.trim()
|
||||
}
|
||||
|
||||
async showFileAtRefBase64(ref: string, path: string): Promise<string> {
|
||||
const args = ['show', `${ref}:${path}`]
|
||||
const output = await this.exec(args, {
|
||||
encoding: 'base64',
|
||||
suppressGitCmdOutput: true
|
||||
})
|
||||
return output.stdout.trim()
|
||||
}
|
||||
|
||||
async stashPush(options?: string[]): Promise<boolean> {
|
||||
const args = ['stash', 'push']
|
||||
if (options) {
|
||||
@@ -345,7 +326,7 @@ export class GitCommandManager {
|
||||
configKey,
|
||||
configValue
|
||||
],
|
||||
{allowAllExitCodes: true}
|
||||
true
|
||||
)
|
||||
return output.exitCode === 0
|
||||
}
|
||||
@@ -353,7 +334,7 @@ export class GitCommandManager {
|
||||
async tryGetRemoteUrl(): Promise<string> {
|
||||
const output = await this.exec(
|
||||
['config', '--local', '--get', 'remote.origin.url'],
|
||||
{allowAllExitCodes: true}
|
||||
true
|
||||
)
|
||||
|
||||
if (output.exitCode !== 0) {
|
||||
@@ -368,30 +349,16 @@ export class GitCommandManager {
|
||||
return stdout
|
||||
}
|
||||
|
||||
async exec(
|
||||
args: string[],
|
||||
{
|
||||
encoding = 'utf8',
|
||||
allowAllExitCodes = false,
|
||||
suppressGitCmdOutput = false
|
||||
}: ExecOpts = {}
|
||||
): Promise<GitOutput> {
|
||||
async exec(args: string[], allowAllExitCodes = false): Promise<GitOutput> {
|
||||
const result = new GitOutput()
|
||||
|
||||
if (process.env['CPR_SHOW_GIT_CMD_OUTPUT']) {
|
||||
// debug mode overrides the suppressGitCmdOutput option
|
||||
suppressGitCmdOutput = false
|
||||
}
|
||||
|
||||
const env = {}
|
||||
for (const key of Object.keys(process.env)) {
|
||||
env[key] = process.env[key]
|
||||
}
|
||||
|
||||
const stdout: Buffer[] = []
|
||||
let stdoutLength = 0
|
||||
const stderr: Buffer[] = []
|
||||
let stderrLength = 0
|
||||
const stdout: string[] = []
|
||||
const stderr: string[] = []
|
||||
|
||||
const options = {
|
||||
cwd: this.workingDirectory,
|
||||
@@ -399,21 +366,17 @@ export class GitCommandManager {
|
||||
ignoreReturnCode: allowAllExitCodes,
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
stdout.push(data)
|
||||
stdoutLength += data.length
|
||||
stdout.push(data.toString())
|
||||
},
|
||||
stderr: (data: Buffer) => {
|
||||
stderr.push(data)
|
||||
stderrLength += data.length
|
||||
stderr.push(data.toString())
|
||||
}
|
||||
},
|
||||
outStream: outStreamHandler(process.stdout, suppressGitCmdOutput),
|
||||
errStream: outStreamHandler(process.stderr, suppressGitCmdOutput)
|
||||
}
|
||||
}
|
||||
|
||||
result.exitCode = await exec.exec(`"${this.gitPath}"`, args, options)
|
||||
result.stdout = Buffer.concat(stdout, stdoutLength).toString(encoding)
|
||||
result.stderr = Buffer.concat(stderr, stderrLength).toString(encoding)
|
||||
result.stdout = stdout.join('')
|
||||
result.stderr = stderr.join('')
|
||||
return result
|
||||
}
|
||||
}
|
||||
@@ -423,24 +386,3 @@ class GitOutput {
|
||||
stderr = ''
|
||||
exitCode = 0
|
||||
}
|
||||
|
||||
const outStreamHandler = (
|
||||
outStream: Writable,
|
||||
suppressGitCmdOutput: boolean
|
||||
): Writable => {
|
||||
return new stream.Writable({
|
||||
write(chunk, _, next) {
|
||||
if (suppressGitCmdOutput) {
|
||||
const lines = chunk.toString().trimEnd().split('\n')
|
||||
for (const line of lines) {
|
||||
if (line.startsWith('[command]')) {
|
||||
outStream.write(`${line}\n`)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
outStream.write(chunk)
|
||||
}
|
||||
next()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -22,7 +22,6 @@ export class GitConfigHelper {
|
||||
private extraheaderConfigPlaceholderValue = 'AUTHORIZATION: basic ***'
|
||||
private extraheaderConfigValueRegex = '^AUTHORIZATION:'
|
||||
private persistedExtraheaderConfigValue = ''
|
||||
private backedUpCredentialFiles: string[] = []
|
||||
|
||||
private constructor(git: GitCommandManager) {
|
||||
this.git = git
|
||||
@@ -122,15 +121,11 @@ export class GitConfigHelper {
|
||||
async savePersistedAuth(): Promise<void> {
|
||||
const serverUrl = new URL(`https://${this.getGitRemote().hostname}`)
|
||||
this.extraheaderConfigKey = `http.${serverUrl.origin}/.extraheader`
|
||||
// Backup checkout@v6 credential files if they exist
|
||||
await this.hideCredentialFiles()
|
||||
// Save and unset persisted extraheader credential in git config if it exists
|
||||
this.persistedExtraheaderConfigValue = await this.getAndUnset()
|
||||
}
|
||||
|
||||
async restorePersistedAuth(): Promise<void> {
|
||||
// Restore checkout@v6 credential files if they were backed up
|
||||
await this.unhideCredentialFiles()
|
||||
if (this.persistedExtraheaderConfigValue) {
|
||||
try {
|
||||
await this.setExtraheaderConfig(this.persistedExtraheaderConfigValue)
|
||||
@@ -174,51 +169,6 @@ export class GitConfigHelper {
|
||||
)
|
||||
}
|
||||
|
||||
private async hideCredentialFiles(): Promise<void> {
|
||||
// Temporarily hide checkout@v6 credential files to avoid duplicate auth headers
|
||||
const runnerTemp = process.env['RUNNER_TEMP']
|
||||
if (!runnerTemp) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const files = await fs.promises.readdir(runnerTemp)
|
||||
for (const file of files) {
|
||||
if (file.startsWith('git-credentials-') && file.endsWith('.config')) {
|
||||
const sourcePath = path.join(runnerTemp, file)
|
||||
const backupPath = `${sourcePath}.bak`
|
||||
await fs.promises.rename(sourcePath, backupPath)
|
||||
this.backedUpCredentialFiles.push(backupPath)
|
||||
core.info(
|
||||
`Temporarily hiding checkout credential file: ${file} (will be restored after)`
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// If directory doesn't exist or we can't read it, just continue
|
||||
core.debug(
|
||||
`Could not backup credential files: ${utils.getErrorMessage(e)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private async unhideCredentialFiles(): Promise<void> {
|
||||
// Restore checkout@v6 credential files that were backed up
|
||||
for (const backupPath of this.backedUpCredentialFiles) {
|
||||
try {
|
||||
const originalPath = backupPath.replace(/\.bak$/, '')
|
||||
await fs.promises.rename(backupPath, originalPath)
|
||||
const fileName = path.basename(originalPath)
|
||||
core.info(`Restored checkout credential file: ${fileName}`)
|
||||
} catch (e) {
|
||||
core.warning(
|
||||
`Failed to restore credential file ${backupPath}: ${utils.getErrorMessage(e)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
this.backedUpCredentialFiles = []
|
||||
}
|
||||
|
||||
private async getAndUnset(): Promise<string> {
|
||||
let configValue = ''
|
||||
// Save and unset persisted extraheader credential in git config if it exists
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as core from '@actions/core'
|
||||
import {Inputs} from './create-pull-request'
|
||||
import {Commit, GitCommandManager} from './git-command-manager'
|
||||
import {Commit} from './git-command-manager'
|
||||
import {Octokit, OctokitOptions, throttleOptions} from './octokit-client'
|
||||
import pLimit from 'p-limit'
|
||||
import * as utils from './utils'
|
||||
@@ -97,50 +97,6 @@ export class GitHubHelper {
|
||||
}
|
||||
}
|
||||
|
||||
private async getPullNumber(
|
||||
baseRepository: string,
|
||||
headBranch: string,
|
||||
baseBranch: string
|
||||
): Promise<number> {
|
||||
const {data: pulls} = await this.octokit.rest.pulls.list({
|
||||
...this.parseRepository(baseRepository),
|
||||
state: 'open',
|
||||
head: headBranch,
|
||||
base: baseBranch
|
||||
})
|
||||
let pullNumber: number | undefined = undefined
|
||||
if (pulls?.length === 0 || pulls === null || pulls === undefined) {
|
||||
// This is a fallback due to a bug that affects the list endpoint when called on forks with the same owner as the repository parent.
|
||||
core.info(
|
||||
`Pull request not found via list endpoint; attempting fallback mechanism`
|
||||
)
|
||||
for await (const response of this.octokit.paginate.iterator(
|
||||
this.octokit.rest.pulls.list,
|
||||
{
|
||||
...this.parseRepository(baseRepository),
|
||||
state: 'open',
|
||||
base: baseBranch
|
||||
}
|
||||
)) {
|
||||
const existingPull = response.data.find(
|
||||
pull => pull.head.label === headBranch
|
||||
)
|
||||
if (existingPull !== undefined) {
|
||||
pullNumber = existingPull.number
|
||||
break
|
||||
}
|
||||
}
|
||||
} else {
|
||||
pullNumber = pulls[0].number
|
||||
}
|
||||
if (pullNumber === undefined) {
|
||||
throw new Error(
|
||||
`Failed to find pull request number for branch ${headBranch}`
|
||||
)
|
||||
}
|
||||
return pullNumber
|
||||
}
|
||||
|
||||
private async createOrUpdate(
|
||||
inputs: Inputs,
|
||||
baseRepository: string,
|
||||
@@ -191,15 +147,16 @@ export class GitHubHelper {
|
||||
|
||||
// Update the pull request that exists for this branch and base
|
||||
core.info(`Fetching existing pull request`)
|
||||
const pullNumber = await this.getPullNumber(
|
||||
baseRepository,
|
||||
headBranch,
|
||||
inputs.base
|
||||
)
|
||||
const {data: pulls} = await this.octokit.rest.pulls.list({
|
||||
...this.parseRepository(baseRepository),
|
||||
state: 'open',
|
||||
head: headBranch,
|
||||
base: inputs.base
|
||||
})
|
||||
core.info(`Attempting update of pull request`)
|
||||
const {data: pull} = await this.octokit.rest.pulls.update({
|
||||
...this.parseRepository(baseRepository),
|
||||
pull_number: pullNumber,
|
||||
pull_number: pulls[0].number,
|
||||
title: inputs.title,
|
||||
body: inputs.body
|
||||
})
|
||||
@@ -297,7 +254,6 @@ export class GitHubHelper {
|
||||
}
|
||||
|
||||
async pushSignedCommits(
|
||||
git: GitCommandManager,
|
||||
branchCommits: Commit[],
|
||||
baseCommit: Commit,
|
||||
repoPath: string,
|
||||
@@ -311,7 +267,6 @@ export class GitHubHelper {
|
||||
}
|
||||
for (const commit of branchCommits) {
|
||||
headCommit = await this.createCommit(
|
||||
git,
|
||||
commit,
|
||||
headCommit,
|
||||
repoPath,
|
||||
@@ -323,7 +278,6 @@ export class GitHubHelper {
|
||||
}
|
||||
|
||||
private async createCommit(
|
||||
git: GitCommandManager,
|
||||
commit: Commit,
|
||||
parentCommit: CommitResponse,
|
||||
repoPath: string,
|
||||
@@ -349,10 +303,10 @@ export class GitHubHelper {
|
||||
let sha: string | null = null
|
||||
if (status === 'A' || status === 'M') {
|
||||
try {
|
||||
const {data: blob} = await blobCreationLimit(async () =>
|
||||
const {data: blob} = await blobCreationLimit(() =>
|
||||
this.octokit.rest.git.createBlob({
|
||||
...repository,
|
||||
content: await git.showFileAtRefBase64(commit.sha, path),
|
||||
content: utils.readFileBase64([repoPath, path]),
|
||||
encoding: 'base64'
|
||||
})
|
||||
)
|
||||
|
||||
15
src/utils.ts
15
src/utils.ts
@@ -126,6 +126,16 @@ export function readFile(path: string): string {
|
||||
return fs.readFileSync(path, 'utf-8')
|
||||
}
|
||||
|
||||
export function readFileBase64(pathParts: string[]): string {
|
||||
const resolvedPath = path.resolve(...pathParts)
|
||||
if (fs.lstatSync(resolvedPath).isSymbolicLink()) {
|
||||
return fs
|
||||
.readlinkSync(resolvedPath, {encoding: 'buffer'})
|
||||
.toString('base64')
|
||||
}
|
||||
return fs.readFileSync(resolvedPath).toString('base64')
|
||||
}
|
||||
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
function hasErrorCode(error: any): error is {code: string} {
|
||||
return typeof (error && error.code) === 'string'
|
||||
@@ -135,8 +145,3 @@ export function getErrorMessage(error: unknown) {
|
||||
if (error instanceof Error) return error.message
|
||||
return String(error)
|
||||
}
|
||||
|
||||
export const isSelfHosted = (): boolean =>
|
||||
process.env['RUNNER_ENVIRONMENT'] !== 'github-hosted' &&
|
||||
(process.env['AGENT_ISSELFHOSTED'] === '1' ||
|
||||
process.env['AGENT_ISSELFHOSTED'] === undefined)
|
||||
|
||||
Reference in New Issue
Block a user