Compare commits
86 Commits
Author | SHA1 | Date | |
---|---|---|---|
c845b6b500 | |||
78692d2c94 | |||
0ac8383f60 | |||
48c9c39841 | |||
03e1521d02 | |||
|
3909512c7f | ||
|
3d89efcc1b | ||
|
16eadd1e3a | ||
|
0f2a09222f | ||
|
cbde760b07 | ||
|
88ed63ce14 | ||
|
6658b293f0 | ||
|
7884c077c7 | ||
|
f689536846 | ||
|
1f1b618e8e | ||
|
889dce9eab | ||
|
1e36d60fe9 | ||
|
9b309f7eaa | ||
|
3b1f4bffdc | ||
|
26f1fcb7bd | ||
|
799af1ac09 | ||
|
450b15d522 | ||
|
14576695c2 | ||
|
e67dfa71de | ||
|
4a56bdc321 | ||
|
a7b20e1da2 | ||
|
e89bd69e40 | ||
|
5bae925c86 | ||
|
7a23c9b2d1 | ||
|
88d86bdd1a | ||
|
5ccebef7df | ||
|
7c6dcd5e40 | ||
|
b926f73506 | ||
|
e468aa4ac9 | ||
|
d57e551ebc | ||
|
98106d3f2b | ||
|
45f0e76ebd | ||
|
5aec14b1da | ||
|
52bf914c5c | ||
|
2fbdcbbbe7 | ||
|
e630501767 | ||
|
5ddfb02482 | ||
|
62822539b5 | ||
|
a7759c6f84 | ||
|
7b02d9ab89 | ||
|
58258f452b | ||
|
c16e6f558f | ||
|
599a7e63a6 | ||
|
271a8d0340 | ||
|
6f7efd1c24 | ||
|
13c47c5747 | ||
|
63e58290d7 | ||
|
a92c90fcab | ||
|
b23b62d487 | ||
|
dd2324fc52 | ||
|
367180cbdf | ||
|
25575a12f3 | ||
|
a56e7a56e9 | ||
|
eac17dc6a3 | ||
|
a2e685f814 | ||
|
6cfd146ec9 | ||
|
b38e8d38a1 | ||
|
8a41570d99 | ||
|
2e9b4cc10e | ||
|
1681a83d43 | ||
|
ba5b4b46a5 | ||
|
479e1068be | ||
|
15ab4d71d8 | ||
|
a39470890e | ||
|
9821aed555 | ||
|
49cf7e9854 | ||
|
176fdd231e | ||
|
842a5fb093 | ||
|
9965b9dd5e | ||
|
e23e628200 | ||
|
16bbacf50f | ||
|
c4623efb09 | ||
|
2539354181 | ||
|
c52b9e2028 | ||
|
ae3093d7e8 | ||
|
8606317131 | ||
|
a302671c1f | ||
|
67ccf781d6 | ||
|
bb88e27d3f | ||
|
b378ed537a | ||
|
fa9200e5b4 |
4
.github/workflows/cpr-example-command.yml
vendored
4
.github/workflows/cpr-example-command.yml
vendored
@ -29,8 +29,8 @@ jobs:
|
|||||||
labels: |
|
labels: |
|
||||||
report
|
report
|
||||||
automated pr
|
automated pr
|
||||||
assignees: peter-evans
|
assignees: retepsnave
|
||||||
reviewers: peter-evans
|
reviewers: retepsnave
|
||||||
milestone: 1
|
milestone: 1
|
||||||
draft: false
|
draft: false
|
||||||
branch: example-patches
|
branch: example-patches
|
||||||
|
6
.github/workflows/slash-command-dispatch.yml
vendored
6
.github/workflows/slash-command-dispatch.yml
vendored
@ -18,12 +18,6 @@ jobs:
|
|||||||
"repository": "peter-evans/create-pull-request-tests",
|
"repository": "peter-evans/create-pull-request-tests",
|
||||||
"named_args": true
|
"named_args": true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"command": "testv5",
|
|
||||||
"permission": "admin",
|
|
||||||
"repository": "peter-evans/create-pull-request-tests",
|
|
||||||
"named_args": true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"command": "clean",
|
"command": "clean",
|
||||||
"permission": "admin",
|
"permission": "admin",
|
||||||
|
18
.github/workflows/upstream_sync.yml
vendored
Normal file
18
.github/workflows/upstream_sync.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# .github/workflows/sync.yml
|
||||||
|
name: Rebase Upstream
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * 0" # run once a week
|
||||||
|
workflow_dispatch: # run manually
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
sync:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@master
|
||||||
|
with:
|
||||||
|
fetch-depth: 10 # greater than the number of commits you made
|
||||||
|
- uses: imba-tjd/rebase-upstream-action@master
|
||||||
|
with: # all args are optional
|
||||||
|
upstream: peter-evans/create-pull-request
|
||||||
|
branch: main
|
@ -53,7 +53,7 @@ All inputs are **optional**. If not set, sensible defaults will be used.
|
|||||||
| `token` | The token that the action will use to create and update the pull request. See [token](#token). | `GITHUB_TOKEN` |
|
| `token` | The token that the action will use to create and update the pull request. See [token](#token). | `GITHUB_TOKEN` |
|
||||||
| `branch-token` | The token that the action will use to create and update the branch. See [branch-token](#branch-token). | Defaults to the value of `token` |
|
| `branch-token` | The token that the action will use to create and update the branch. See [branch-token](#branch-token). | Defaults to the value of `token` |
|
||||||
| `path` | Relative path under `GITHUB_WORKSPACE` to the repository. | `GITHUB_WORKSPACE` |
|
| `path` | Relative path under `GITHUB_WORKSPACE` to the repository. | `GITHUB_WORKSPACE` |
|
||||||
| `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. If no paths are specified, all new and modified files are added. See [Add specific paths](#add-specific-paths). | |
|
| `add-paths` | A comma or newline-separated list of file paths to commit. Paths should follow git's [pathspec](https://git-scm.com/docs/gitglossary#Documentation/gitglossary.txt-aiddefpathspecapathspec) syntax. See [Add specific paths](#add-specific-paths). | If no paths are specified, all new and modified files are added. |
|
||||||
| `commit-message` | The message to use when committing changes. See [commit-message](#commit-message). | `[create-pull-request] automated change` |
|
| `commit-message` | The message to use when committing changes. See [commit-message](#commit-message). | `[create-pull-request] automated change` |
|
||||||
| `committer` | The committer name and email address in the format `Display Name <email@address.com>`. Defaults to the GitHub Actions bot user on github.com. | `github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>` |
|
| `committer` | The committer name and email address in the format `Display Name <email@address.com>`. Defaults to the GitHub Actions bot user on github.com. | `github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>` |
|
||||||
| `author` | The author name and email address in the format `Display Name <email@address.com>`. Defaults to the user who triggered the workflow run. | `${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>` |
|
| `author` | The author name and email address in the format `Display Name <email@address.com>`. Defaults to the user who triggered the workflow run. | `${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>` |
|
||||||
|
@ -19,15 +19,15 @@ git clone git://127.0.0.1/repos/test-base.git /git/local/repos/test-base
|
|||||||
cd /git/local/repos/test-base
|
cd /git/local/repos/test-base
|
||||||
git config --global user.email "you@example.com"
|
git config --global user.email "you@example.com"
|
||||||
git config --global user.name "Your Name"
|
git config --global user.name "Your Name"
|
||||||
echo "#test-base" > README_TEMP.md
|
echo "#test-base" > README→TEMP.md
|
||||||
git add .
|
git add .
|
||||||
git commit -m "initial commit"
|
git commit -m "initial commit"
|
||||||
git commit --allow-empty -m "empty commit for tests"
|
git commit --allow-empty -m "empty commit for tests"
|
||||||
echo "#test-base :sparkles:" > README_TEMP.md
|
echo "#test-base :sparkles:" > README→TEMP.md
|
||||||
git add .
|
git add .
|
||||||
git commit -m "add sparkles" -m "Change description:
|
git commit -m "add sparkles" -m "Change description:
|
||||||
- updates README_TEMP.md to add sparkles to the title"
|
- updates README→TEMP.md to add sparkles to the title"
|
||||||
mv README_TEMP.md README.md
|
mv README→TEMP.md README.md
|
||||||
git add .
|
git add .
|
||||||
git commit -m "rename readme"
|
git commit -m "rename readme"
|
||||||
git push -u
|
git push -u
|
||||||
|
@ -20,7 +20,7 @@ describe('git-command-manager integration tests', () => {
|
|||||||
expect(initialCommit.signed).toBeFalsy()
|
expect(initialCommit.signed).toBeFalsy()
|
||||||
expect(initialCommit.changes[0].mode).toEqual('100644')
|
expect(initialCommit.changes[0].mode).toEqual('100644')
|
||||||
expect(initialCommit.changes[0].status).toEqual('A')
|
expect(initialCommit.changes[0].status).toEqual('A')
|
||||||
expect(initialCommit.changes[0].path).toEqual('README_TEMP.md')
|
expect(initialCommit.changes[0].path).toEqual('README→TEMP.md') // filename contains unicode
|
||||||
|
|
||||||
expect(emptyCommit.subject).toEqual('empty commit for tests')
|
expect(emptyCommit.subject).toEqual('empty commit for tests')
|
||||||
expect(emptyCommit.tree).toEqual(initialCommit.tree) // empty commits have no tree and reference the parent's
|
expect(emptyCommit.tree).toEqual(initialCommit.tree) // empty commits have no tree and reference the parent's
|
||||||
@ -33,7 +33,7 @@ describe('git-command-manager integration tests', () => {
|
|||||||
expect(modifiedCommit.signed).toBeFalsy()
|
expect(modifiedCommit.signed).toBeFalsy()
|
||||||
expect(modifiedCommit.changes[0].mode).toEqual('100644')
|
expect(modifiedCommit.changes[0].mode).toEqual('100644')
|
||||||
expect(modifiedCommit.changes[0].status).toEqual('M')
|
expect(modifiedCommit.changes[0].status).toEqual('M')
|
||||||
expect(modifiedCommit.changes[0].path).toEqual('README_TEMP.md')
|
expect(modifiedCommit.changes[0].path).toEqual('README→TEMP.md')
|
||||||
|
|
||||||
expect(headCommit.subject).toEqual('rename readme')
|
expect(headCommit.subject).toEqual('rename readme')
|
||||||
expect(headCommit.parents[0]).toEqual(modifiedCommit.sha)
|
expect(headCommit.parents[0]).toEqual(modifiedCommit.sha)
|
||||||
@ -43,6 +43,6 @@ describe('git-command-manager integration tests', () => {
|
|||||||
expect(headCommit.changes[0].path).toEqual('README.md')
|
expect(headCommit.changes[0].path).toEqual('README.md')
|
||||||
expect(headCommit.changes[1].mode).toEqual('100644')
|
expect(headCommit.changes[1].mode).toEqual('100644')
|
||||||
expect(headCommit.changes[1].status).toEqual('D')
|
expect(headCommit.changes[1].status).toEqual('D')
|
||||||
expect(headCommit.changes[1].path).toEqual('README_TEMP.md')
|
expect(headCommit.changes[1].path).toEqual('README→TEMP.md')
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
set -xv
|
||||||
|
|
||||||
IMAGE="cpr-integration-tests:latest"
|
IMAGE="cpr-integration-tests:latest"
|
||||||
ARG1=${1:-}
|
ARG1=${1:-}
|
||||||
|
4
dist/790.index.js
vendored
4
dist/790.index.js
vendored
@ -6,8 +6,8 @@ exports.modules = {
|
|||||||
/***/ 790:
|
/***/ 790:
|
||||||
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
|
/***/ ((__unused_webpack_module, exports, __webpack_require__) => {
|
||||||
|
|
||||||
var y=Object.defineProperty;var c=(R,o)=>y(R,"name",{value:o,configurable:!0});__webpack_require__(3024),__webpack_require__(6760);const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(7713),__webpack_require__(3136),__webpack_require__(7030);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(R=>R|32,"lower"),noop=c(()=>{},"noop"),g=class g{constructor(o){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},o=`\r
|
var N=Object.defineProperty;var c=(_,a)=>N(_,"name",{value:a,configurable:!0});__webpack_require__(3024),__webpack_require__(6760);const node=__webpack_require__(117);__webpack_require__(7067),__webpack_require__(4708),__webpack_require__(8522),__webpack_require__(7075),__webpack_require__(4573),__webpack_require__(7975),__webpack_require__(3465),__webpack_require__(3136),__webpack_require__(7030);let s=0;const S={START_BOUNDARY:s++,HEADER_FIELD_START:s++,HEADER_FIELD:s++,HEADER_VALUE_START:s++,HEADER_VALUE:s++,HEADER_VALUE_ALMOST_DONE:s++,HEADERS_ALMOST_DONE:s++,PART_DATA_START:s++,PART_DATA:s++,END:s++};let f=1;const F={PART_BOUNDARY:f,LAST_BOUNDARY:f*=2},LF=10,CR=13,SPACE=32,HYPHEN=45,COLON=58,A=97,Z=122,lower=c(_=>_|32,"lower"),noop=c(()=>{},"noop");class MultipartParser{static{c(this,"MultipartParser")}constructor(a){this.index=0,this.flags=0,this.onHeaderEnd=noop,this.onHeaderField=noop,this.onHeadersEnd=noop,this.onHeaderValue=noop,this.onPartBegin=noop,this.onPartData=noop,this.onPartEnd=noop,this.boundaryChars={},a=`\r
|
||||||
--`+o;const t=new Uint8Array(o.length);for(let n=0;n<o.length;n++)t[n]=o.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(o){let t=0;const n=o.length;let E=this.index,{lookbehind:l,boundary:h,boundaryChars:H,index:e,state:a,flags:d}=this;const b=this.boundary.length,m=b-1,O=o.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,_,N)=>{(p===void 0||p!==_)&&this[D](N&&N.subarray(p,_))},"callback"),L=c((D,p)=>{const _=D+"Mark";_ in this&&(p?(T(D,this[_],t,o),delete this[_]):(T(D,this[_],o.length,o),this[_]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=o[t],a){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)d|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(d&F.LAST_BOUNDARY&&r===HYPHEN)a=S.END,d=0;else if(!(d&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),a=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:a=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),a=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),a=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),a=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),a=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;a=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),a=S.PART_DATA_START;break;case S.PART_DATA_START:a=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(o[t]in H);)t+=b;t-=m,r=o[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?d|=F.PART_BOUNDARY:r===HYPHEN?d|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(d&F.PART_BOUNDARY){if(e=0,r===LF){d&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),a=S.HEADER_FIELD_START;break}}else d&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),a=S.END,d=0):e=0;if(e>0)l[e-1]=r;else if(E>0){const D=new Uint8Array(l.buffer,l.byteOffset,l.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${a}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=a,this.flags=d}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}};c(g,"MultipartParser");let MultipartParser=g;function _fileName(R){const o=R.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!o)return;const t=o[2]||o[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,l)=>String.fromCharCode(l)),n}c(_fileName,"_fileName");async function toFormData(R,o){if(!/multipart/i.test(o))throw new TypeError("Failed to fetch");const t=o.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,l,h,H,e,a;const d=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{d.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(d,a,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",l="",h="",H="",e="",a=null,d.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){l+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(l+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=l.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),a=_fileName(l),a&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=l);l="",E=""};for await(const i of R)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData;
|
--`+a;const t=new Uint8Array(a.length);for(let n=0;n<a.length;n++)t[n]=a.charCodeAt(n),this.boundaryChars[t[n]]=!0;this.boundary=t,this.lookbehind=new Uint8Array(this.boundary.length+8),this.state=S.START_BOUNDARY}write(a){let t=0;const n=a.length;let E=this.index,{lookbehind:d,boundary:h,boundaryChars:H,index:e,state:o,flags:l}=this;const b=this.boundary.length,m=b-1,O=a.length;let r,P;const u=c(D=>{this[D+"Mark"]=t},"mark"),i=c(D=>{delete this[D+"Mark"]},"clear"),T=c((D,p,R,g)=>{(p===void 0||p!==R)&&this[D](g&&g.subarray(p,R))},"callback"),L=c((D,p)=>{const R=D+"Mark";R in this&&(p?(T(D,this[R],t,a),delete this[R]):(T(D,this[R],a.length,a),this[R]=0))},"dataCallback");for(t=0;t<n;t++)switch(r=a[t],o){case S.START_BOUNDARY:if(e===h.length-2){if(r===HYPHEN)l|=F.LAST_BOUNDARY;else if(r!==CR)return;e++;break}else if(e-1===h.length-2){if(l&F.LAST_BOUNDARY&&r===HYPHEN)o=S.END,l=0;else if(!(l&F.LAST_BOUNDARY)&&r===LF)e=0,T("onPartBegin"),o=S.HEADER_FIELD_START;else return;break}r!==h[e+2]&&(e=-2),r===h[e+2]&&e++;break;case S.HEADER_FIELD_START:o=S.HEADER_FIELD,u("onHeaderField"),e=0;case S.HEADER_FIELD:if(r===CR){i("onHeaderField"),o=S.HEADERS_ALMOST_DONE;break}if(e++,r===HYPHEN)break;if(r===COLON){if(e===1)return;L("onHeaderField",!0),o=S.HEADER_VALUE_START;break}if(P=lower(r),P<A||P>Z)return;break;case S.HEADER_VALUE_START:if(r===SPACE)break;u("onHeaderValue"),o=S.HEADER_VALUE;case S.HEADER_VALUE:r===CR&&(L("onHeaderValue",!0),T("onHeaderEnd"),o=S.HEADER_VALUE_ALMOST_DONE);break;case S.HEADER_VALUE_ALMOST_DONE:if(r!==LF)return;o=S.HEADER_FIELD_START;break;case S.HEADERS_ALMOST_DONE:if(r!==LF)return;T("onHeadersEnd"),o=S.PART_DATA_START;break;case S.PART_DATA_START:o=S.PART_DATA,u("onPartData");case S.PART_DATA:if(E=e,e===0){for(t+=m;t<O&&!(a[t]in H);)t+=b;t-=m,r=a[t]}if(e<h.length)h[e]===r?(e===0&&L("onPartData",!0),e++):e=0;else if(e===h.length)e++,r===CR?l|=F.PART_BOUNDARY:r===HYPHEN?l|=F.LAST_BOUNDARY:e=0;else if(e-1===h.length)if(l&F.PART_BOUNDARY){if(e=0,r===LF){l&=~F.PART_BOUNDARY,T("onPartEnd"),T("onPartBegin"),o=S.HEADER_FIELD_START;break}}else l&F.LAST_BOUNDARY&&r===HYPHEN?(T("onPartEnd"),o=S.END,l=0):e=0;if(e>0)d[e-1]=r;else if(E>0){const D=new Uint8Array(d.buffer,d.byteOffset,d.byteLength);T("onPartData",0,E,D),E=0,u("onPartData"),t--}break;case S.END:break;default:throw new Error(`Unexpected state entered: ${o}`)}L("onHeaderField"),L("onHeaderValue"),L("onPartData"),this.index=e,this.state=o,this.flags=l}end(){if(this.state===S.HEADER_FIELD_START&&this.index===0||this.state===S.PART_DATA&&this.index===this.boundary.length)this.onPartEnd();else if(this.state!==S.END)throw new Error("MultipartParser.end(): stream ended unexpectedly")}}function _fileName(_){const a=_.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);if(!a)return;const t=a[2]||a[3]||"";let n=t.slice(t.lastIndexOf("\\")+1);return n=n.replace(/%22/g,'"'),n=n.replace(/&#(\d{4});/g,(E,d)=>String.fromCharCode(d)),n}c(_fileName,"_fileName");async function toFormData(_,a){if(!/multipart/i.test(a))throw new TypeError("Failed to fetch");const t=a.match(/boundary=(?:"([^"]+)"|([^;]+))/i);if(!t)throw new TypeError("no or bad content-type header, no multipart boundary");const n=new MultipartParser(t[1]||t[2]);let E,d,h,H,e,o;const l=[],b=new node.FormData,m=c(i=>{h+=u.decode(i,{stream:!0})},"onPartData"),O=c(i=>{l.push(i)},"appendToFile"),r=c(()=>{const i=new node.File(l,o,{type:e});b.append(H,i)},"appendFileToFormData"),P=c(()=>{b.append(H,h)},"appendEntryToFormData"),u=new TextDecoder("utf-8");u.decode(),n.onPartBegin=function(){n.onPartData=m,n.onPartEnd=P,E="",d="",h="",H="",e="",o=null,l.length=0},n.onHeaderField=function(i){E+=u.decode(i,{stream:!0})},n.onHeaderValue=function(i){d+=u.decode(i,{stream:!0})},n.onHeaderEnd=function(){if(d+=u.decode(),E=E.toLowerCase(),E==="content-disposition"){const i=d.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);i&&(H=i[2]||i[3]||""),o=_fileName(d),o&&(n.onPartData=O,n.onPartEnd=r)}else E==="content-type"&&(e=d);d="",E=""};for await(const i of _)n.write(i);return n.end(),b}c(toFormData,"toFormData"),exports.toFormData=toFormData;
|
||||||
|
|
||||||
|
|
||||||
/***/ })
|
/***/ })
|
||||||
|
1000
dist/index.js
vendored
1000
dist/index.js
vendored
File diff suppressed because one or more lines are too long
@ -25,7 +25,7 @@ This document covers terminology, how the action works, general usage guidelines
|
|||||||
|
|
||||||
## Terminology
|
## Terminology
|
||||||
|
|
||||||
[Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected.
|
[Pull requests](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests#about-pull-requests) are proposed changes to a repository branch that can be reviewed by a repository's collaborators before being accepted or rejected.
|
||||||
|
|
||||||
A pull request references two branches:
|
A pull request references two branches:
|
||||||
|
|
||||||
@ -150,7 +150,7 @@ There are a number of workarounds with different pros and cons.
|
|||||||
|
|
||||||
- Use the default `GITHUB_TOKEN` and allow the action to create pull requests that have no checks enabled. Manually close pull requests and immediately reopen them. This will enable `on: pull_request` workflows to run and be added as checks. To prevent merging of pull requests without checks erroneously, use [branch protection rules](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests).
|
- Use the default `GITHUB_TOKEN` and allow the action to create pull requests that have no checks enabled. Manually close pull requests and immediately reopen them. This will enable `on: pull_request` workflows to run and be added as checks. To prevent merging of pull requests without checks erroneously, use [branch protection rules](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests).
|
||||||
|
|
||||||
- Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `on: ready_for_review`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft.
|
- Create draft pull requests by setting the `draft: always-true` input, and configure your workflow to trigger `ready_for_review` in `on: pull_request`. The workflow will run when users manually click the "Ready for review" button on the draft pull requests. If the pull request is updated by the action, the `always-true` mode ensures that the pull request will be converted back to a draft.
|
||||||
|
|
||||||
- Use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) created on an account that has write access to the repository that pull requests are being created in. This is the standard workaround and [recommended by GitHub](https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow). It's advisable to use a dedicated [machine account](https://docs.github.com/en/github/site-policy/github-terms-of-service#3-account-requirements) that has collaborator access to the repository, rather than creating a PAT on a personal user account. Also note that because the account that owns the PAT will be the creator of pull requests, that user account will be unable to perform actions such as request changes or approve the pull request.
|
- Use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) created on an account that has write access to the repository that pull requests are being created in. This is the standard workaround and [recommended by GitHub](https://docs.github.com/en/actions/using-workflows/triggering-a-workflow#triggering-a-workflow-from-a-workflow). It's advisable to use a dedicated [machine account](https://docs.github.com/en/github/site-policy/github-terms-of-service#3-account-requirements) that has collaborator access to the repository, rather than creating a PAT on a personal user account. Also note that because the account that owns the PAT will be the creator of pull requests, that user account will be unable to perform actions such as request changes or approve the pull request.
|
||||||
|
|
||||||
@ -197,8 +197,9 @@ Checking out a branch from a different repository from where the workflow is exe
|
|||||||
Allowing the action to push with a configured deploy key will trigger `on: push` workflows. This makes it an alternative to using a PAT to trigger checks for pull requests.
|
Allowing the action to push with a configured deploy key will trigger `on: push` workflows. This makes it an alternative to using a PAT to trigger checks for pull requests.
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement.
|
> - You cannot use deploy keys alone to [create a pull request in a remote repository](#creating-pull-requests-in-a-remote-repository) because then using a PAT would become a requirement.
|
||||||
> This method only makes sense if creating a pull request in the repository where the workflow is running.
|
> This method only makes sense if creating a pull request in the repository where the workflow is running.
|
||||||
|
> - You cannot use deploy keys with [commit signature verification for bots](#commit-signature-verification-for-bots) (`sign-commits: true`).
|
||||||
|
|
||||||
How to use SSH (deploy keys) with create-pull-request action:
|
How to use SSH (deploy keys) with create-pull-request action:
|
||||||
|
|
||||||
@ -373,7 +374,7 @@ The action supports two methods to sign commits, [commit signature verification
|
|||||||
|
|
||||||
The action can sign commits as `github-actions[bot]` when using the repository's default `GITHUB_TOKEN`, or your own bot when using [GitHub App tokens](#authenticating-with-github-app-generated-tokens).
|
The action can sign commits as `github-actions[bot]` when using the repository's default `GITHUB_TOKEN`, or your own bot when using [GitHub App tokens](#authenticating-with-github-app-generated-tokens).
|
||||||
|
|
||||||
> [!IMPORTANT]
|
> [!IMPORTANT]
|
||||||
> - When setting `sign-commits: true` the action will ignore the `committer` and `author` inputs.
|
> - When setting `sign-commits: true` the action will ignore the `committer` and `author` inputs.
|
||||||
> - If you attempt to use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) the action will create the pull request, but commits will *not* be signed. Commit signing is only supported with bot generated tokens.
|
> - If you attempt to use a [Personal Access Token (PAT)](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) the action will create the pull request, but commits will *not* be signed. Commit signing is only supported with bot generated tokens.
|
||||||
> - The GitHub API has a 40MiB limit when creating git blobs. An error will be raised if there are files in the pull request larger than this. If you hit this limit, use [GPG commit signature verification](#gpg-commit-signature-verification) instead.
|
> - The GitHub API has a 40MiB limit when creating git blobs. An error will be raised if there are files in the pull request larger than this. If you hit this limit, use [GPG commit signature verification](#gpg-commit-signature-verification) instead.
|
||||||
|
@ -22,7 +22,7 @@
|
|||||||
- [Dynamic configuration using variables](#dynamic-configuration-using-variables)
|
- [Dynamic configuration using variables](#dynamic-configuration-using-variables)
|
||||||
- [Using a markdown template](#using-a-markdown-template)
|
- [Using a markdown template](#using-a-markdown-template)
|
||||||
- [Debugging GitHub Actions](#debugging-github-actions)
|
- [Debugging GitHub Actions](#debugging-github-actions)
|
||||||
|
- [Show an annotation message for a created pull request](#show-an-annotation-message-for-a-created-pull-request)
|
||||||
|
|
||||||
## Use case: Create a pull request to update X on push
|
## Use case: Create a pull request to update X on push
|
||||||
|
|
||||||
@ -612,3 +612,30 @@ To enable step debug logging set the secret `ACTIONS_STEP_DEBUG` to `true` in th
|
|||||||
MATRIX_CONTEXT: ${{ toJson(matrix) }}
|
MATRIX_CONTEXT: ${{ toJson(matrix) }}
|
||||||
run: echo "$MATRIX_CONTEXT"
|
run: echo "$MATRIX_CONTEXT"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Show an annotation message for a created pull request
|
||||||
|
|
||||||
|
Showing an annotation message for a created or updated pull request allows you to confirm the pull request easily, such as by visiting the link. This can be achieved by adding a step that uses the [`notice` workflow command](https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions?tool=bash#setting-a-notice-message).
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```yml
|
||||||
|
- name: Create Pull Request
|
||||||
|
id: cpr
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
|
||||||
|
- name: Show message for created Pull Request
|
||||||
|
if: ${{ steps.cpr.outputs.pull-request-url && steps.cpr.outputs.pull-request-operation != 'none' }}
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
PR_URL: ${{ steps.cpr.outputs.pull-request-url }}
|
||||||
|
PR_OPERATION: ${{ steps.cpr.outputs.pull-request-operation }}
|
||||||
|
run: |
|
||||||
|
echo "::notice::${PR_URL} was ${PR_OPERATION}."
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, when a pull request is created, you will be able to see the following message on an action run page (e.g., `/actions/runs/12812393039`):
|
||||||
|
|
||||||
|
```
|
||||||
|
https://github.com/peter-evans/create-pull-request/pull/1 was created.
|
||||||
|
```
|
||||||
|
789
package-lock.json
generated
789
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
26
package.json
26
package.json
@ -31,33 +31,33 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.11.1",
|
"@actions/core": "^1.11.1",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@octokit/core": "^6.1.2",
|
"@octokit/core": "^6.1.5",
|
||||||
"@octokit/plugin-paginate-rest": "^11.3.6",
|
"@octokit/plugin-paginate-rest": "^11.6.0",
|
||||||
"@octokit/plugin-rest-endpoint-methods": "^13.2.6",
|
"@octokit/plugin-rest-endpoint-methods": "^13.5.0",
|
||||||
"@octokit/plugin-throttling": "^9.3.2",
|
"@octokit/plugin-throttling": "^9.6.1",
|
||||||
"node-fetch-native": "^1.6.4",
|
"node-fetch-native": "^1.6.6",
|
||||||
"p-limit": "^6.1.0",
|
"p-limit": "^6.2.0",
|
||||||
"uuid": "^9.0.1"
|
"uuid": "^9.0.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^29.5.14",
|
"@types/jest": "^29.5.14",
|
||||||
"@types/node": "^18.19.67",
|
"@types/node": "^18.19.110",
|
||||||
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
"@typescript-eslint/eslint-plugin": "^7.18.0",
|
||||||
"@typescript-eslint/parser": "^7.18.0",
|
"@typescript-eslint/parser": "^7.18.0",
|
||||||
"@vercel/ncc": "^0.38.3",
|
"@vercel/ncc": "^0.38.3",
|
||||||
"eslint": "^8.57.1",
|
"eslint": "^8.57.1",
|
||||||
"eslint-import-resolver-typescript": "^3.7.0",
|
"eslint-import-resolver-typescript": "^3.10.1",
|
||||||
"eslint-plugin-github": "^4.10.2",
|
"eslint-plugin-github": "^4.10.2",
|
||||||
"eslint-plugin-import": "^2.31.0",
|
"eslint-plugin-import": "^2.31.0",
|
||||||
"eslint-plugin-jest": "^27.9.0",
|
"eslint-plugin-jest": "^27.9.0",
|
||||||
"eslint-plugin-prettier": "^5.2.1",
|
"eslint-plugin-prettier": "^5.4.1",
|
||||||
"jest": "^29.7.0",
|
"jest": "^29.7.0",
|
||||||
"jest-circus": "^29.7.0",
|
"jest-circus": "^29.7.0",
|
||||||
"jest-environment-jsdom": "^29.7.0",
|
"jest-environment-jsdom": "^29.7.0",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"prettier": "^3.4.2",
|
"prettier": "^3.5.3",
|
||||||
"ts-jest": "^29.2.5",
|
"ts-jest": "^29.3.4",
|
||||||
"typescript": "^5.7.2",
|
"typescript": "^5.8.3",
|
||||||
"undici": "^6.21.0"
|
"undici": "^6.21.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ export async function getWorkingBaseAndType(
|
|||||||
): Promise<[string, WorkingBaseType]> {
|
): Promise<[string, WorkingBaseType]> {
|
||||||
const symbolicRefResult = await git.exec(
|
const symbolicRefResult = await git.exec(
|
||||||
['symbolic-ref', 'HEAD', '--short'],
|
['symbolic-ref', 'HEAD', '--short'],
|
||||||
true
|
{allowAllExitCodes: true}
|
||||||
)
|
)
|
||||||
if (symbolicRefResult.exitCode == 0) {
|
if (symbolicRefResult.exitCode == 0) {
|
||||||
// A ref is checked out
|
// A ref is checked out
|
||||||
@ -200,7 +200,7 @@ export async function createOrUpdateBranch(
|
|||||||
} else {
|
} else {
|
||||||
aopts.push('-A')
|
aopts.push('-A')
|
||||||
}
|
}
|
||||||
await git.exec(aopts, true)
|
await git.exec(aopts, {allowAllExitCodes: true})
|
||||||
const popts = ['-m', commitMessage]
|
const popts = ['-m', commitMessage]
|
||||||
if (signoff) {
|
if (signoff) {
|
||||||
popts.push('--signoff')
|
popts.push('--signoff')
|
||||||
|
@ -51,8 +51,8 @@ export async function createPullRequest(inputs: Inputs): Promise<void> {
|
|||||||
core.startGroup('Determining the base and head repositories')
|
core.startGroup('Determining the base and head repositories')
|
||||||
const baseRemote = gitConfigHelper.getGitRemote()
|
const baseRemote = gitConfigHelper.getGitRemote()
|
||||||
// Init the GitHub clients
|
// Init the GitHub clients
|
||||||
const apiUrl = await GitHubHelper.determineApiUrl(baseRemote.hostname);
|
const apiUrl = await GitHubHelper.determineApiUrl(baseRemote.hostname)
|
||||||
core.info(`Using API base URL: ${apiUrl}`);
|
core.info(`Using API base URL: ${apiUrl}`)
|
||||||
const ghBranch = new GitHubHelper(apiUrl, inputs.branchToken)
|
const ghBranch = new GitHubHelper(apiUrl, inputs.branchToken)
|
||||||
const ghPull = new GitHubHelper(apiUrl, inputs.token)
|
const ghPull = new GitHubHelper(apiUrl, inputs.token)
|
||||||
// Determine the head repository; the target for the pull request branch
|
// Determine the head repository; the target for the pull request branch
|
||||||
@ -213,6 +213,7 @@ export async function createPullRequest(inputs: Inputs): Promise<void> {
|
|||||||
const stashed = await git.stashPush(['--include-untracked'])
|
const stashed = await git.stashPush(['--include-untracked'])
|
||||||
await git.checkout(inputs.branch)
|
await git.checkout(inputs.branch)
|
||||||
const pushSignedCommitsResult = await ghBranch.pushSignedCommits(
|
const pushSignedCommitsResult = await ghBranch.pushSignedCommits(
|
||||||
|
git,
|
||||||
result.branchCommits,
|
result.branchCommits,
|
||||||
result.baseCommit,
|
result.baseCommit,
|
||||||
repoPath,
|
repoPath,
|
||||||
|
@ -2,6 +2,7 @@ import * as exec from '@actions/exec'
|
|||||||
import * as io from '@actions/io'
|
import * as io from '@actions/io'
|
||||||
import * as utils from './utils'
|
import * as utils from './utils'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
|
import stream, {Writable} from 'stream'
|
||||||
|
|
||||||
const tagsRefSpec = '+refs/tags/*:refs/tags/*'
|
const tagsRefSpec = '+refs/tags/*:refs/tags/*'
|
||||||
|
|
||||||
@ -21,6 +22,12 @@ export type Commit = {
|
|||||||
unparsedChanges: string[]
|
unparsedChanges: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type ExecOpts = {
|
||||||
|
allowAllExitCodes?: boolean
|
||||||
|
encoding?: 'utf8' | 'base64'
|
||||||
|
suppressGitCmdOutput?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
export class GitCommandManager {
|
export class GitCommandManager {
|
||||||
private gitPath: string
|
private gitPath: string
|
||||||
private workingDirectory: string
|
private workingDirectory: string
|
||||||
@ -66,7 +73,7 @@ export class GitCommandManager {
|
|||||||
args.push(...options)
|
args.push(...options)
|
||||||
}
|
}
|
||||||
|
|
||||||
return await this.exec(args, allowAllExitCodes)
|
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes})
|
||||||
}
|
}
|
||||||
|
|
||||||
async commit(
|
async commit(
|
||||||
@ -82,7 +89,7 @@ export class GitCommandManager {
|
|||||||
args.push(...options)
|
args.push(...options)
|
||||||
}
|
}
|
||||||
|
|
||||||
return await this.exec(args, allowAllExitCodes)
|
return await this.exec(args, {allowAllExitCodes: allowAllExitCodes})
|
||||||
}
|
}
|
||||||
|
|
||||||
async config(
|
async config(
|
||||||
@ -113,7 +120,7 @@ export class GitCommandManager {
|
|||||||
configKey,
|
configKey,
|
||||||
configValue
|
configValue
|
||||||
],
|
],
|
||||||
true
|
{allowAllExitCodes: true}
|
||||||
)
|
)
|
||||||
return output.exitCode === 0
|
return output.exitCode === 0
|
||||||
}
|
}
|
||||||
@ -156,15 +163,20 @@ export class GitCommandManager {
|
|||||||
|
|
||||||
async getCommit(ref: string): Promise<Commit> {
|
async getCommit(ref: string): Promise<Commit> {
|
||||||
const endOfBody = '###EOB###'
|
const endOfBody = '###EOB###'
|
||||||
const output = await this.exec([
|
const output = await this.exec(
|
||||||
'show',
|
[
|
||||||
'--raw',
|
'-c',
|
||||||
'--cc',
|
'core.quotePath=false',
|
||||||
'--no-renames',
|
'show',
|
||||||
'--no-abbrev',
|
'--raw',
|
||||||
`--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`,
|
'--cc',
|
||||||
ref
|
'--no-renames',
|
||||||
])
|
'--no-abbrev',
|
||||||
|
`--format=%H%n%T%n%P%n%G?%n%s%n%b%n${endOfBody}`,
|
||||||
|
ref
|
||||||
|
],
|
||||||
|
{suppressGitCmdOutput: true}
|
||||||
|
)
|
||||||
const lines = output.stdout.split('\n')
|
const lines = output.stdout.split('\n')
|
||||||
const endOfBodyIndex = lines.lastIndexOf(endOfBody)
|
const endOfBodyIndex = lines.lastIndexOf(endOfBody)
|
||||||
const detailLines = lines.slice(0, endOfBodyIndex)
|
const detailLines = lines.slice(0, endOfBodyIndex)
|
||||||
@ -220,7 +232,7 @@ export class GitCommandManager {
|
|||||||
if (options) {
|
if (options) {
|
||||||
args.push(...options)
|
args.push(...options)
|
||||||
}
|
}
|
||||||
const output = await this.exec(args, true)
|
const output = await this.exec(args, {allowAllExitCodes: true})
|
||||||
return output.exitCode === 1
|
return output.exitCode === 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -276,6 +288,15 @@ export class GitCommandManager {
|
|||||||
return output.stdout.trim()
|
return output.stdout.trim()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async showFileAtRefBase64(ref: string, path: string): Promise<string> {
|
||||||
|
const args = ['show', `${ref}:${path}`]
|
||||||
|
const output = await this.exec(args, {
|
||||||
|
encoding: 'base64',
|
||||||
|
suppressGitCmdOutput: true
|
||||||
|
})
|
||||||
|
return output.stdout.trim()
|
||||||
|
}
|
||||||
|
|
||||||
async stashPush(options?: string[]): Promise<boolean> {
|
async stashPush(options?: string[]): Promise<boolean> {
|
||||||
const args = ['stash', 'push']
|
const args = ['stash', 'push']
|
||||||
if (options) {
|
if (options) {
|
||||||
@ -324,7 +345,7 @@ export class GitCommandManager {
|
|||||||
configKey,
|
configKey,
|
||||||
configValue
|
configValue
|
||||||
],
|
],
|
||||||
true
|
{allowAllExitCodes: true}
|
||||||
)
|
)
|
||||||
return output.exitCode === 0
|
return output.exitCode === 0
|
||||||
}
|
}
|
||||||
@ -332,7 +353,7 @@ export class GitCommandManager {
|
|||||||
async tryGetRemoteUrl(): Promise<string> {
|
async tryGetRemoteUrl(): Promise<string> {
|
||||||
const output = await this.exec(
|
const output = await this.exec(
|
||||||
['config', '--local', '--get', 'remote.origin.url'],
|
['config', '--local', '--get', 'remote.origin.url'],
|
||||||
true
|
{allowAllExitCodes: true}
|
||||||
)
|
)
|
||||||
|
|
||||||
if (output.exitCode !== 0) {
|
if (output.exitCode !== 0) {
|
||||||
@ -347,16 +368,30 @@ export class GitCommandManager {
|
|||||||
return stdout
|
return stdout
|
||||||
}
|
}
|
||||||
|
|
||||||
async exec(args: string[], allowAllExitCodes = false): Promise<GitOutput> {
|
async exec(
|
||||||
|
args: string[],
|
||||||
|
{
|
||||||
|
encoding = 'utf8',
|
||||||
|
allowAllExitCodes = false,
|
||||||
|
suppressGitCmdOutput = false
|
||||||
|
}: ExecOpts = {}
|
||||||
|
): Promise<GitOutput> {
|
||||||
const result = new GitOutput()
|
const result = new GitOutput()
|
||||||
|
|
||||||
|
if (process.env['CPR_SHOW_GIT_CMD_OUTPUT']) {
|
||||||
|
// debug mode overrides the suppressGitCmdOutput option
|
||||||
|
suppressGitCmdOutput = false
|
||||||
|
}
|
||||||
|
|
||||||
const env = {}
|
const env = {}
|
||||||
for (const key of Object.keys(process.env)) {
|
for (const key of Object.keys(process.env)) {
|
||||||
env[key] = process.env[key]
|
env[key] = process.env[key]
|
||||||
}
|
}
|
||||||
|
|
||||||
const stdout: string[] = []
|
const stdout: Buffer[] = []
|
||||||
const stderr: string[] = []
|
let stdoutLength = 0
|
||||||
|
const stderr: Buffer[] = []
|
||||||
|
let stderrLength = 0
|
||||||
|
|
||||||
const options = {
|
const options = {
|
||||||
cwd: this.workingDirectory,
|
cwd: this.workingDirectory,
|
||||||
@ -364,17 +399,21 @@ export class GitCommandManager {
|
|||||||
ignoreReturnCode: allowAllExitCodes,
|
ignoreReturnCode: allowAllExitCodes,
|
||||||
listeners: {
|
listeners: {
|
||||||
stdout: (data: Buffer) => {
|
stdout: (data: Buffer) => {
|
||||||
stdout.push(data.toString())
|
stdout.push(data)
|
||||||
|
stdoutLength += data.length
|
||||||
},
|
},
|
||||||
stderr: (data: Buffer) => {
|
stderr: (data: Buffer) => {
|
||||||
stderr.push(data.toString())
|
stderr.push(data)
|
||||||
|
stderrLength += data.length
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
outStream: outStreamHandler(process.stdout, suppressGitCmdOutput),
|
||||||
|
errStream: outStreamHandler(process.stderr, suppressGitCmdOutput)
|
||||||
}
|
}
|
||||||
|
|
||||||
result.exitCode = await exec.exec(`"${this.gitPath}"`, args, options)
|
result.exitCode = await exec.exec(`"${this.gitPath}"`, args, options)
|
||||||
result.stdout = stdout.join('')
|
result.stdout = Buffer.concat(stdout, stdoutLength).toString(encoding)
|
||||||
result.stderr = stderr.join('')
|
result.stderr = Buffer.concat(stderr, stderrLength).toString(encoding)
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -384,3 +423,24 @@ class GitOutput {
|
|||||||
stderr = ''
|
stderr = ''
|
||||||
exitCode = 0
|
exitCode = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const outStreamHandler = (
|
||||||
|
outStream: Writable,
|
||||||
|
suppressGitCmdOutput: boolean
|
||||||
|
): Writable => {
|
||||||
|
return new stream.Writable({
|
||||||
|
write(chunk, _, next) {
|
||||||
|
if (suppressGitCmdOutput) {
|
||||||
|
const lines = chunk.toString().trimEnd().split('\n')
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith('[command]')) {
|
||||||
|
outStream.write(`${line}\n`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
outStream.write(chunk)
|
||||||
|
}
|
||||||
|
next()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import {Inputs} from './create-pull-request'
|
import {Inputs} from './create-pull-request'
|
||||||
import {Commit} from './git-command-manager'
|
import {Commit, GitCommandManager} from './git-command-manager'
|
||||||
import {Octokit, OctokitOptions, throttleOptions} from './octokit-client'
|
import {Octokit, OctokitOptions, throttleOptions} from './octokit-client'
|
||||||
import pLimit from 'p-limit'
|
import pLimit from 'p-limit'
|
||||||
import * as utils from './utils'
|
import * as utils from './utils'
|
||||||
|
|
||||||
const ERROR_PR_ALREADY_EXISTS = 'A pull request already exists for'
|
const ERROR_PR_ALREADY_EXISTS = 'pull request already exists for'
|
||||||
const ERROR_PR_REVIEW_TOKEN_SCOPE =
|
const ERROR_PR_REVIEW_TOKEN_SCOPE =
|
||||||
'Validation Failed: "Could not resolve to a node with the global id of'
|
'Validation Failed: "Could not resolve to a node with the global id of'
|
||||||
const ERROR_PR_FORK_COLLAB = `Fork collab can't be granted by someone without permission`
|
const ERROR_PR_FORK_COLLAB = `Fork collab can't be granted by someone without permission`
|
||||||
@ -46,42 +46,47 @@ export class GitHubHelper {
|
|||||||
if (token) {
|
if (token) {
|
||||||
options.auth = `${token}`
|
options.auth = `${token}`
|
||||||
}
|
}
|
||||||
options.baseUrl = apiUrl;
|
options.baseUrl = apiUrl
|
||||||
options.throttle = throttleOptions
|
options.throttle = throttleOptions
|
||||||
this.octokit = new Octokit(options)
|
this.octokit = new Octokit(options)
|
||||||
}
|
}
|
||||||
|
|
||||||
static async determineApiUrl(hostname: string): Promise<string> {
|
static async determineApiUrl(hostname: string): Promise<string> {
|
||||||
if (hostname === 'github.com') {
|
if (hostname === 'github.com') {
|
||||||
return "https://api.github.com";
|
return 'https://api.github.com'
|
||||||
}
|
}
|
||||||
|
|
||||||
const baseUrl = `https://${hostname}`;
|
const baseUrl = `https://${hostname}`
|
||||||
const possiblePaths = ['/api/v4/version', '/api/forgejo/v1/version', '/api/v1/version'];
|
const possiblePaths = [
|
||||||
|
'/api/v4/version',
|
||||||
|
'/api/forgejo/v1/version',
|
||||||
|
'/api/v1/version'
|
||||||
|
]
|
||||||
|
|
||||||
for (const path of possiblePaths) {
|
for (const path of possiblePaths) {
|
||||||
try {
|
try {
|
||||||
const url = `${baseUrl}${path}`;
|
const url = `${baseUrl}${path}`
|
||||||
const response = await fetch(url, { method: 'GET', redirect: 'manual' }); // GitLab redirects
|
const response = await fetch(url, {method: 'GET', redirect: 'manual'}) // GitLab redirects
|
||||||
// invalid API paths
|
// invalid API paths
|
||||||
// to login prompt
|
// to login prompt
|
||||||
// which returns 200
|
// which returns 200
|
||||||
|
|
||||||
const contentType = response.headers.get('Content-Type') || '';
|
const contentType = response.headers.get('Content-Type') || ''
|
||||||
if (
|
if (
|
||||||
(response.ok || [401, 403].includes(response.status)) && // We might get 401, 403
|
(response.ok || [401, 403].includes(response.status)) && // We might get 401, 403
|
||||||
// as we're unauthorised
|
// as we're unauthorised
|
||||||
contentType.includes('application/json')
|
contentType.includes('application/json')
|
||||||
) {
|
) {
|
||||||
return path.includes('/version') ? url.replace('/version', '') : url;
|
return path.includes('/version') ? url.replace('/version', '') : url
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Ignore errors and try the next path
|
// Ignore errors and try the next path
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error(`Unable to determine API base URL for hostname: ${hostname}`);
|
throw new Error(
|
||||||
|
`Unable to determine API base URL for hostname: ${hostname}`
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
private parseRepository(repository: string): Repository {
|
private parseRepository(repository: string): Repository {
|
||||||
@ -249,6 +254,7 @@ export class GitHubHelper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async pushSignedCommits(
|
async pushSignedCommits(
|
||||||
|
git: GitCommandManager,
|
||||||
branchCommits: Commit[],
|
branchCommits: Commit[],
|
||||||
baseCommit: Commit,
|
baseCommit: Commit,
|
||||||
repoPath: string,
|
repoPath: string,
|
||||||
@ -262,6 +268,7 @@ export class GitHubHelper {
|
|||||||
}
|
}
|
||||||
for (const commit of branchCommits) {
|
for (const commit of branchCommits) {
|
||||||
headCommit = await this.createCommit(
|
headCommit = await this.createCommit(
|
||||||
|
git,
|
||||||
commit,
|
commit,
|
||||||
headCommit,
|
headCommit,
|
||||||
repoPath,
|
repoPath,
|
||||||
@ -273,6 +280,7 @@ export class GitHubHelper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private async createCommit(
|
private async createCommit(
|
||||||
|
git: GitCommandManager,
|
||||||
commit: Commit,
|
commit: Commit,
|
||||||
parentCommit: CommitResponse,
|
parentCommit: CommitResponse,
|
||||||
repoPath: string,
|
repoPath: string,
|
||||||
@ -298,10 +306,10 @@ export class GitHubHelper {
|
|||||||
let sha: string | null = null
|
let sha: string | null = null
|
||||||
if (status === 'A' || status === 'M') {
|
if (status === 'A' || status === 'M') {
|
||||||
try {
|
try {
|
||||||
const {data: blob} = await blobCreationLimit(() =>
|
const {data: blob} = await blobCreationLimit(async () =>
|
||||||
this.octokit.rest.git.createBlob({
|
this.octokit.rest.git.createBlob({
|
||||||
...repository,
|
...repository,
|
||||||
content: utils.readFileBase64([repoPath, path]),
|
content: await git.showFileAtRefBase64(commit.sha, path),
|
||||||
encoding: 'base64'
|
encoding: 'base64'
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
10
src/utils.ts
10
src/utils.ts
@ -126,16 +126,6 @@ export function readFile(path: string): string {
|
|||||||
return fs.readFileSync(path, 'utf-8')
|
return fs.readFileSync(path, 'utf-8')
|
||||||
}
|
}
|
||||||
|
|
||||||
export function readFileBase64(pathParts: string[]): string {
|
|
||||||
const resolvedPath = path.resolve(...pathParts)
|
|
||||||
if (fs.lstatSync(resolvedPath).isSymbolicLink()) {
|
|
||||||
return fs
|
|
||||||
.readlinkSync(resolvedPath, {encoding: 'buffer'})
|
|
||||||
.toString('base64')
|
|
||||||
}
|
|
||||||
return fs.readFileSync(resolvedPath).toString('base64')
|
|
||||||
}
|
|
||||||
|
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
function hasErrorCode(error: any): error is {code: string} {
|
function hasErrorCode(error: any): error is {code: string} {
|
||||||
return typeof (error && error.code) === 'string'
|
return typeof (error && error.code) === 'string'
|
||||||
|
Loading…
x
Reference in New Issue
Block a user