1
Fork 0
mirror of https://codeberg.org/forgejo/forgejo.git synced 2024-10-02 21:33:16 +00:00

Compare commits

...

32 commits

Author SHA1 Message Date
Otto Richter
e6b84f9191 ci: move composite workflow location
otherwise, they unfortunately display in the Forgejo UI with an error message
2024-10-01 16:51:02 +02:00
Otto
de99a9c93e Merge pull request 'Update actions/setup-go action to v5 (forgejo)' (#5441) from renovate/forgejo-actions-setup-go-5.x into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5441
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
Reviewed-by: Otto <otto@codeberg.org>
2024-10-01 14:17:19 +00:00
forgejo-renovate-action
36f2b8656e Merge pull request 'Update ghcr.io/devcontainers/features/git-lfs Docker tag to v1.2.3 (forgejo)' (#5437) from renovate/forgejo-devcontainer-packages into forgejo 2024-10-01 13:54:58 +00:00
Renovate Bot
be373e948e Update actions/setup-go action to v5 2024-10-01 10:02:16 +00:00
Renovate Bot
f79d2fba07 Update ghcr.io/devcontainers/features/git-lfs Docker tag to v1.2.3 2024-10-01 10:02:11 +00:00
forgejo-renovate-action
d2eac83f6a Merge pull request 'Update dependency eslint-plugin-sonarjs to v2.0.3 (forgejo)' (#5436) from renovate/forgejo-linters into forgejo 2024-10-01 08:53:47 +00:00
Earl Warren
df0e50a08f Merge pull request 'Update dependency @github/text-expander-element to v2.7.2 (forgejo)' (#5396) from renovate/forgejo-github-text-expander-element-2.x into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5396
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
2024-10-01 08:38:55 +00:00
Earl Warren
d25a3709d9 Merge pull request 'chore: remove spurious comment in tests' (#5434) from earl-warren/forgejo:wip-container-cleanup into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5434
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
2024-10-01 08:37:36 +00:00
Renovate Bot
e28a1e1d38 Update dependency eslint-plugin-sonarjs to v2.0.3 2024-10-01 08:05:28 +00:00
Earl Warren
aec55ac1b6 Merge pull request '[gitea] week 2024-40 cherry pick (gitea/main -> forgejo)' (#5416) from earl-warren/wcp/2024-40 into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5416
Reviewed-by: Shiny Nematoda <snematoda@noreply.codeberg.org>
2024-10-01 07:52:05 +00:00
Earl Warren
9a7fc2e55e Merge pull request 'Update actions/cache action to v4 (forgejo)' (#5426) from renovate/forgejo-actions-cache-4.x into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5426
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
2024-10-01 07:50:24 +00:00
Earl Warren
2099c2af7c
chore: remove spurious comment in tests 2024-09-30 18:47:30 +02:00
Earl Warren
c4d2635839 Merge pull request 'fix: referenced sha256:* container images may be deleted' (#5430) from earl-warren/forgejo:wip-container-cleanup into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5430
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
2024-09-30 16:45:09 +00:00
Earl Warren
08999e8189 Merge pull request 'Lock file maintenance (forgejo)' (#5424) from renovate/forgejo-lock-file-maintenance into forgejo
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5424
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
2024-09-30 15:31:34 +00:00
Earl Warren
0a5fd7fdb8
fix: referenced sha256:* container images may be deleted
The inventory of the sha256:* images and the manifest index that
reference them is incomplete because it does not take into account any
image older than the expiration limit. As a result some sha256:* will
be considered orphaned although they are referenced from a manifest
index that was created more recently than the expiration limit.

There must not be any filtering based on the creation time when
building the inventory. The expiration limit must only be taken into
account when deleting orphaned images: those that are more recent than
the expiration limit must not be deleted.

This limit is specially important because it protects against a race
between a cleanup task and an ongoing mirroring task. A mirroring
task (such as skopeo sync) will first upload sha256:* images and then
create the corresponding manifest index. If a cleanup races against
it, the sha256:* images that are not yet referenced will be deleted
without skopeo noticing and the published index manifest that happens
at a later time will contain references to non-existent images.
2024-09-30 16:56:21 +02:00
Renovate Bot
e3eaa284bb Update actions/checkout action to v4 (forgejo) (#5427)
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5427
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
Co-authored-by: Renovate Bot <forgejo-renovate-action@forgejo.org>
Co-committed-by: Renovate Bot <forgejo-renovate-action@forgejo.org>
2024-09-30 13:08:26 +00:00
Renovate Bot
92305933b1 Update actions/setup-node action to v4 (forgejo) (#5428)
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/5428
Reviewed-by: Michael Kriese <michael.kriese@gmx.de>
Co-authored-by: Renovate Bot <forgejo-renovate-action@forgejo.org>
Co-committed-by: Renovate Bot <forgejo-renovate-action@forgejo.org>
2024-09-30 12:56:08 +00:00
Renovate Bot
502a6a4461 Update actions/cache action to v4 2024-09-30 12:06:49 +00:00
forgejo-renovate-action
b2aa644859 Merge pull request 'Update renovate to v38.101.1 (forgejo)' (#5422) from renovate/forgejo-renovate into forgejo 2024-09-30 06:25:46 +00:00
Renovate Bot
0c993085e0 Lock file maintenance 2024-09-30 02:03:35 +00:00
Renovate Bot
1b06287fe3 Update renovate to v38.101.1 2024-09-30 00:08:52 +00:00
Earl Warren
c9ea78eb1c
chore(release-notes): weekly cherry-pick week 2024-40 2024-09-29 11:35:18 +02:00
Bruno Sofiato
8178d6eaba
Change the code search to sort results by relevance (#32134)
Resolves #32129

Signed-off-by: Bruno Sofiato <bruno.sofiato@gmail.com>
(cherry picked from commit 99d0510cb69c3c53cee05ef0e83ed02389925a90)
2024-09-29 11:00:25 +02:00
yp05327
36af3348bc
Fix wrong status of Set up Job when first step is skipped (#32120)
Fix #32089

(cherry picked from commit 6fa962f409c84477a7a4cf35b4a38a4a93fc3224)
2024-09-29 10:38:49 +02:00
Lunny Xiao
b496317b5a
Fix bug when deleting a migrated branch (#32075)
After migrating a repository with pull request, the branch is missed and
after the pull request merged, the branch cannot be deleted.

(cherry picked from commit 5a8568459d22e57cac506465463660526ca6a08f)

Conflicts:
	services/repository/branch.go
  conflict because of [GITEA] Fix typo in formatting error e71b5a038e
2024-09-29 10:37:39 +02:00
Lunny Xiao
5ccf79d05c
Include collaboration repositories on dashboard source/forks/mirrors list (#31946)
Fix #13489

In the original implementation, only `All` will display your owned and
collaborated repositories. For other filters like `Source`, `Mirrors`
and etc. will only display your owned repositories.

This PR removed the limitations. Now except `collbrations`, other
filters will always display your owned and collaborated repositories.

(cherry picked from commit 4947bec8360c152daca23e120eae1732d3848492)
2024-09-29 10:28:08 +02:00
Kemal Zebari
30b8b45e5e
Truncate commit message during Discord webhook push events (#31970)
Resolves #31668.

(cherry picked from commit aadbe0488f454b9f7f5a56765f4530f9d1e2c6ec)
2024-09-29 10:25:52 +02:00
Lunny Xiao
65b3136261
Fix panic when cloning with wrong ssh format. (#32076)
(cherry picked from commit 3f2d8f873035b614b4cdb447d8e16f5af82cefe8)
2024-09-29 10:22:01 +02:00
cloudchamb3r
ff65d34f03
Fix Bug in Issue/pulls list (#32081)
fix #32080

## After
### for opened issues
<img width="1199" alt="Screenshot 2024-09-19 at 6 29 31 PM"
src="https://github.com/user-attachments/assets/86cf48ad-5e4b-4dcb-8abe-4d7fd74e0aec">

### for closed issues
<img width="1208" alt="Screenshot 2024-09-19 at 6 29 37 PM"
src="https://github.com/user-attachments/assets/a16bc545-bfcf-49a4-be52-3e7334910482">

### for all issues
<img width="1340" alt="Screenshot 2024-09-20 at 12 07 12 PM"
src="https://github.com/user-attachments/assets/b2309c8f-e59d-44e9-ae3b-bf54e1196169">

(cherry picked from commit e1f0598c8f5af5ac95f5e13b74fbab99506762db)
2024-09-29 10:08:30 +02:00
Earl Warren
a226064711
Fix artifact v4 upload above 8MB (#31664) (fix lint errors) 2024-09-29 09:58:47 +02:00
ChristopherHX
8f0a05a7e4
Fix artifact v4 upload above 8MB (#31664)
Multiple chunks are uploaded with type "block" without using
"appendBlock" and eventually out of order for bigger uploads.
8MB seems to be the chunk size

This change parses the blockList uploaded after all blocks to get the
final artifact size and order them correctly before calculating the
sha256 checksum over all blocks

Fixes #31354

(cherry picked from commit b594cec2bda6f861effedb2e8e0a7ebba191c0e9)

Conflicts:
	routers/api/actions/artifactsv4.go
  conflict because of Refactor AppURL usage (#30885) 67c1a07285008cc00036a87cef966c3bd519a50c
    that was not cherry-picked in Forgejo
    the resolution consist of removing the extra ctx argument
2024-09-29 09:24:15 +02:00
Renovate Bot
ed656ca0f3 Update dependency @github/text-expander-element to v2.7.2 2024-09-26 00:05:12 +00:00
35 changed files with 1049 additions and 540 deletions

View file

@ -6,7 +6,7 @@
"ghcr.io/devcontainers/features/node:1": { "ghcr.io/devcontainers/features/node:1": {
"version": "20" "version": "20"
}, },
"ghcr.io/devcontainers/features/git-lfs:1.2.1": {}, "ghcr.io/devcontainers/features/git-lfs:1.2.3": {},
"ghcr.io/devcontainers-contrib/features/poetry:2": {}, "ghcr.io/devcontainers-contrib/features/poetry:2": {},
"ghcr.io/devcontainers/features/python:1": { "ghcr.io/devcontainers/features/python:1": {
"version": "3.12" "version": "3.12"

View file

@ -25,7 +25,7 @@ jobs:
if: ${{ !startsWith(vars.ROLE, 'forgejo-') }} if: ${{ !startsWith(vars.ROLE, 'forgejo-') }}
runs-on: self-hosted runs-on: self-hosted
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- id: forgejo - id: forgejo
uses: https://code.forgejo.org/actions/setup-forgejo@v1 uses: https://code.forgejo.org/actions/setup-forgejo@v1

View file

@ -27,7 +27,7 @@ jobs:
# root is used for testing, allow it # root is used for testing, allow it
if: vars.ROLE == 'forgejo-integration' || github.repository_owner == 'root' if: vars.ROLE == 'forgejo-integration' || github.repository_owner == 'root'
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
@ -37,11 +37,11 @@ jobs:
repository="${{ github.repository }}" repository="${{ github.repository }}"
echo "value=${repository##*/}" >> "$GITHUB_OUTPUT" echo "value=${repository##*/}" >> "$GITHUB_OUTPUT"
- uses: https://code.forgejo.org/actions/setup-node@v3 - uses: https://code.forgejo.org/actions/setup-node@v4
with: with:
node-version: 20 node-version: 20
- uses: https://code.forgejo.org/actions/setup-go@v4 - uses: https://code.forgejo.org/actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
@ -87,7 +87,7 @@ jobs:
- name: cache node_modules - name: cache node_modules
id: node id: node
uses: https://code.forgejo.org/actions/cache@v3 uses: https://code.forgejo.org/actions/cache@v4
with: with:
path: | path: |
node_modules node_modules

View file

@ -20,7 +20,7 @@ jobs:
image: 'code.forgejo.org/oci/playwright:latest' image: 'code.forgejo.org/oci/playwright:latest'
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v4 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: https://code.forgejo.org/actions/setup-go@v4 - uses: https://code.forgejo.org/actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- run: | - run: |

View file

@ -39,7 +39,7 @@ jobs:
runs-on: self-hosted runs-on: self-hosted
if: vars.DOER != '' && vars.FORGEJO != '' && vars.TO_OWNER != '' && vars.FROM_OWNER != '' && secrets.TOKEN != '' if: vars.DOER != '' && vars.FORGEJO != '' && vars.TO_OWNER != '' && vars.FROM_OWNER != '' && secrets.TOKEN != ''
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: copy & sign - name: copy & sign
uses: https://code.forgejo.org/forgejo/forgejo-build-publish/publish@v5 uses: https://code.forgejo.org/forgejo/forgejo-build-publish/publish@v5
@ -72,7 +72,7 @@ jobs:
- name: set up go for the DNS update below - name: set up go for the DNS update below
if: vars.ROLE == 'forgejo-experimental' && secrets.OVH_APP_KEY != '' if: vars.ROLE == 'forgejo-experimental' && secrets.OVH_APP_KEY != ''
uses: https://code.forgejo.org/actions/setup-go@v4 uses: https://code.forgejo.org/actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
- name: update the _release.experimental DNS record - name: update the _release.experimental DNS record

View file

@ -11,9 +11,9 @@ jobs:
container: container:
image: 'code.forgejo.org/oci/node:20-bookworm' image: 'code.forgejo.org/oci/node:20-bookworm'
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: https://code.forgejo.org/actions/setup-go@v4 - uses: https://code.forgejo.org/actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
cache: false cache: false

View file

@ -12,7 +12,7 @@ jobs:
container: container:
image: 'code.forgejo.org/oci/node:20-bookworm' image: 'code.forgejo.org/oci/node:20-bookworm'
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- name: event - name: event
run: | run: |
@ -23,7 +23,7 @@ jobs:
${{ toJSON(github.event) }} ${{ toJSON(github.event) }}
EOF EOF
- uses: https://code.forgejo.org/actions/setup-go@v4 - uses: https://code.forgejo.org/actions/setup-go@v5
with: with:
go-version-file: "go.mod" go-version-file: "go.mod"
cache: false cache: false

View file

@ -23,7 +23,7 @@ jobs:
runs-on: docker runs-on: docker
container: container:
image: code.forgejo.org/forgejo-contrib/renovate:38.93.2 image: code.forgejo.org/forgejo-contrib/renovate:38.101.1
steps: steps:
- name: Load renovate repo cache - name: Load renovate repo cache

View file

@ -19,18 +19,18 @@ jobs:
cat <<'EOF' cat <<'EOF'
${{ toJSON(github) }} ${{ toJSON(github) }}
EOF EOF
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows/composite/setup-env - uses: ./.forgejo/workflows-composite/setup-env
- run: su forgejo -c 'make deps-backend deps-tools' - run: su forgejo -c 'make deps-backend deps-tools'
- run: su forgejo -c 'make --always-make -j$(nproc) lint-backend tidy-check swagger-check fmt-check swagger-validate' # ensure the "go-licenses" make target runs - run: su forgejo -c 'make --always-make -j$(nproc) lint-backend tidy-check swagger-check fmt-check swagger-validate' # ensure the "go-licenses" make target runs
- uses: ./.forgejo/workflows/composite/build-backend - uses: ./.forgejo/workflows-composite/build-backend
frontend-checks: frontend-checks:
if: ${{ !startsWith(vars.ROLE, 'forgejo-') }} if: ${{ !startsWith(vars.ROLE, 'forgejo-') }}
runs-on: docker runs-on: docker
container: container:
image: 'code.forgejo.org/oci/node:20-bookworm' image: 'code.forgejo.org/oci/node:20-bookworm'
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- run: make deps-frontend - run: make deps-frontend
- run: make lint-frontend - run: make lint-frontend
- run: make checks-frontend - run: make checks-frontend
@ -57,17 +57,17 @@ jobs:
MINIO_ROOT_USER: 123456 MINIO_ROOT_USER: 123456
MINIO_ROOT_PASSWORD: 12345678 MINIO_ROOT_PASSWORD: 12345678
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows/composite/setup-env - uses: ./.forgejo/workflows-composite/setup-env
- name: install git >= 2.42 - name: install git >= 2.42
uses: ./.forgejo/workflows/composite/apt-install-from uses: ./.forgejo/workflows-composite/apt-install-from
with: with:
packages: git packages: git
- name: test release-notes-assistant.sh - name: test release-notes-assistant.sh
run: | run: |
apt-get -q install -qq -y jq apt-get -q install -qq -y jq
./release-notes-assistant.sh test_main ./release-notes-assistant.sh test_main
- uses: ./.forgejo/workflows/composite/build-backend - uses: ./.forgejo/workflows-composite/build-backend
- run: | - run: |
su forgejo -c 'make test-backend test-check' su forgejo -c 'make test-backend test-check'
timeout-minutes: 50 timeout-minutes: 50
@ -101,13 +101,13 @@ jobs:
image: ${{ matrix.cacher.image }} image: ${{ matrix.cacher.image }}
options: ${{ matrix.cacher.options }} options: ${{ matrix.cacher.options }}
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows/composite/setup-env - uses: ./.forgejo/workflows-composite/setup-env
- name: install git >= 2.42 - name: install git >= 2.42
uses: ./.forgejo/workflows/composite/apt-install-from uses: ./.forgejo/workflows-composite/apt-install-from
with: with:
packages: git packages: git
- uses: ./.forgejo/workflows/composite/build-backend - uses: ./.forgejo/workflows-composite/build-backend
- run: | - run: |
su forgejo -c 'make test-remote-cacher test-check' su forgejo -c 'make test-remote-cacher test-check'
timeout-minutes: 50 timeout-minutes: 50
@ -132,13 +132,13 @@ jobs:
# #
MYSQL_EXTRA_FLAGS: --innodb-adaptive-flushing=OFF --innodb-buffer-pool-size=4G --innodb-log-buffer-size=128M --innodb-flush-log-at-trx-commit=0 --innodb-flush-log-at-timeout=30 --innodb-flush-method=nosync --innodb-fsync-threshold=1000000000 MYSQL_EXTRA_FLAGS: --innodb-adaptive-flushing=OFF --innodb-buffer-pool-size=4G --innodb-log-buffer-size=128M --innodb-flush-log-at-trx-commit=0 --innodb-flush-log-at-timeout=30 --innodb-flush-method=nosync --innodb-fsync-threshold=1000000000
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows/composite/setup-env - uses: ./.forgejo/workflows-composite/setup-env
- name: install dependencies & git >= 2.42 - name: install dependencies & git >= 2.42
uses: ./.forgejo/workflows/composite/apt-install-from uses: ./.forgejo/workflows-composite/apt-install-from
with: with:
packages: git git-lfs packages: git git-lfs
- uses: ./.forgejo/workflows/composite/build-backend - uses: ./.forgejo/workflows-composite/build-backend
- run: | - run: |
su forgejo -c 'make test-mysql-migration test-mysql' su forgejo -c 'make test-mysql-migration test-mysql'
timeout-minutes: 50 timeout-minutes: 50
@ -164,13 +164,13 @@ jobs:
POSTGRES_DB: test POSTGRES_DB: test
POSTGRES_PASSWORD: postgres POSTGRES_PASSWORD: postgres
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows/composite/setup-env - uses: ./.forgejo/workflows-composite/setup-env
- name: install dependencies & git >= 2.42 - name: install dependencies & git >= 2.42
uses: ./.forgejo/workflows/composite/apt-install-from uses: ./.forgejo/workflows-composite/apt-install-from
with: with:
packages: git git-lfs packages: git git-lfs
- uses: ./.forgejo/workflows/composite/build-backend - uses: ./.forgejo/workflows-composite/build-backend
- run: | - run: |
su forgejo -c 'make test-pgsql-migration test-pgsql' su forgejo -c 'make test-pgsql-migration test-pgsql'
timeout-minutes: 50 timeout-minutes: 50
@ -185,13 +185,13 @@ jobs:
container: container:
image: 'code.forgejo.org/oci/node:20-bookworm' image: 'code.forgejo.org/oci/node:20-bookworm'
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows/composite/setup-env - uses: ./.forgejo/workflows-composite/setup-env
- name: install dependencies & git >= 2.42 - name: install dependencies & git >= 2.42
uses: ./.forgejo/workflows/composite/apt-install-from uses: ./.forgejo/workflows-composite/apt-install-from
with: with:
packages: git git-lfs packages: git git-lfs
- uses: ./.forgejo/workflows/composite/build-backend - uses: ./.forgejo/workflows-composite/build-backend
- run: | - run: |
su forgejo -c 'make test-sqlite-migration test-sqlite' su forgejo -c 'make test-sqlite-migration test-sqlite'
timeout-minutes: 50 timeout-minutes: 50
@ -212,7 +212,7 @@ jobs:
container: container:
image: 'code.forgejo.org/oci/node:20-bookworm' image: 'code.forgejo.org/oci/node:20-bookworm'
steps: steps:
- uses: https://code.forgejo.org/actions/checkout@v3 - uses: https://code.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows/composite/setup-env - uses: ./.forgejo/workflows-composite/setup-env
- run: su forgejo -c 'make deps-backend deps-tools' - run: su forgejo -c 'make deps-backend deps-tools'
- run: su forgejo -c 'make security-check' - run: su forgejo -c 'make security-check'

View file

@ -39,7 +39,7 @@ GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1 # renovate: datasour
DEADCODE_PACKAGE ?= golang.org/x/tools/cmd/deadcode@v0.25.0 # renovate: datasource=go DEADCODE_PACKAGE ?= golang.org/x/tools/cmd/deadcode@v0.25.0 # renovate: datasource=go
GOMOCK_PACKAGE ?= go.uber.org/mock/mockgen@v0.4.0 # renovate: datasource=go GOMOCK_PACKAGE ?= go.uber.org/mock/mockgen@v0.4.0 # renovate: datasource=go
GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.16.2 # renovate: datasource=go GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.16.2 # renovate: datasource=go
RENOVATE_NPM_PACKAGE ?= renovate@38.93.2 # renovate: datasource=docker packageName=code.forgejo.org/forgejo-contrib/renovate RENOVATE_NPM_PACKAGE ?= renovate@38.101.1 # renovate: datasource=docker packageName=code.forgejo.org/forgejo-contrib/renovate
ifeq ($(HAS_GO), yes) ifeq ($(HAS_GO), yes)
CGO_EXTRA_CFLAGS := -DSQLITE_MAX_VARIABLE_NUMBER=32766 CGO_EXTRA_CFLAGS := -DSQLITE_MAX_VARIABLE_NUMBER=32766

View file

@ -147,6 +147,12 @@ func runServ(c *cli.Context) error {
return nil return nil
} }
defer func() {
if err := recover(); err != nil {
_ = fail(ctx, "Internal Server Error", "Panic: %v\n%s", err, log.Stack(2))
}
}()
keys := strings.Split(c.Args().First(), "-") keys := strings.Split(c.Args().First(), "-")
if len(keys) != 2 || keys[0] != "key" { if len(keys) != 2 || keys[0] != "key" {
return fail(ctx, "Key ID format error", "Invalid key argument: %s", c.Args().First()) return fail(ctx, "Key ID format error", "Invalid key argument: %s", c.Args().First())
@ -193,10 +199,7 @@ func runServ(c *cli.Context) error {
} }
verb := words[0] verb := words[0]
repoPath := words[1] repoPath := strings.TrimPrefix(words[1], "/")
if repoPath[0] == '/' {
repoPath = repoPath[1:]
}
var lfsVerb string var lfsVerb string
if verb == lfsAuthenticateVerb { if verb == lfsAuthenticateVerb {

View file

@ -18,8 +18,32 @@ func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep {
return fullStepsOfEmptySteps(task) return fullStepsOfEmptySteps(task)
} }
firstStep := task.Steps[0] // firstStep is the first step that has run or running, not include preStep.
// For example,
// 1. preStep(Success) -> step1(Success) -> step2(Running) -> step3(Waiting) -> postStep(Waiting): firstStep is step1.
// 2. preStep(Success) -> step1(Skipped) -> step2(Success) -> postStep(Success): firstStep is step2.
// 3. preStep(Success) -> step1(Running) -> step2(Waiting) -> postStep(Waiting): firstStep is step1.
// 4. preStep(Success) -> step1(Skipped) -> step2(Skipped) -> postStep(Skipped): firstStep is nil.
// 5. preStep(Success) -> step1(Cancelled) -> step2(Cancelled) -> postStep(Cancelled): firstStep is nil.
var firstStep *actions_model.ActionTaskStep
// lastHasRunStep is the last step that has run.
// For example,
// 1. preStep(Success) -> step1(Success) -> step2(Running) -> step3(Waiting) -> postStep(Waiting): lastHasRunStep is step1.
// 2. preStep(Success) -> step1(Success) -> step2(Success) -> step3(Success) -> postStep(Success): lastHasRunStep is step3.
// 3. preStep(Success) -> step1(Success) -> step2(Failure) -> step3 -> postStep(Waiting): lastHasRunStep is step2.
// So its Stopped is the Started of postStep when there are no more steps to run.
var lastHasRunStep *actions_model.ActionTaskStep
var logIndex int64 var logIndex int64
for _, step := range task.Steps {
if firstStep == nil && (step.Status.HasRun() || step.Status.IsRunning()) {
firstStep = step
}
if step.Status.HasRun() {
lastHasRunStep = step
}
logIndex += step.LogLength
}
preStep := &actions_model.ActionTaskStep{ preStep := &actions_model.ActionTaskStep{
Name: preStepName, Name: preStepName,
@ -28,32 +52,17 @@ func FullSteps(task *actions_model.ActionTask) []*actions_model.ActionTaskStep {
Status: actions_model.StatusRunning, Status: actions_model.StatusRunning,
} }
if firstStep.Status.HasRun() || firstStep.Status.IsRunning() { // No step has run or is running, so preStep is equal to the task
if firstStep == nil {
preStep.Stopped = task.Stopped
preStep.Status = task.Status
} else {
preStep.LogLength = firstStep.LogIndex preStep.LogLength = firstStep.LogIndex
preStep.Stopped = firstStep.Started preStep.Stopped = firstStep.Started
preStep.Status = actions_model.StatusSuccess preStep.Status = actions_model.StatusSuccess
} else if task.Status.IsDone() {
preStep.Stopped = task.Stopped
preStep.Status = actions_model.StatusFailure
if task.Status.IsSkipped() {
preStep.Status = actions_model.StatusSkipped
}
} }
logIndex += preStep.LogLength logIndex += preStep.LogLength
// lastHasRunStep is the last step that has run.
// For example,
// 1. preStep(Success) -> step1(Success) -> step2(Running) -> step3(Waiting) -> postStep(Waiting): lastHasRunStep is step1.
// 2. preStep(Success) -> step1(Success) -> step2(Success) -> step3(Success) -> postStep(Success): lastHasRunStep is step3.
// 3. preStep(Success) -> step1(Success) -> step2(Failure) -> step3 -> postStep(Waiting): lastHasRunStep is step2.
// So its Stopped is the Started of postStep when there are no more steps to run.
var lastHasRunStep *actions_model.ActionTaskStep
for _, step := range task.Steps {
if step.Status.HasRun() {
lastHasRunStep = step
}
logIndex += step.LogLength
}
if lastHasRunStep == nil { if lastHasRunStep == nil {
lastHasRunStep = preStep lastHasRunStep = preStep
} }

View file

@ -137,6 +137,25 @@ func TestFullSteps(t *testing.T) {
{Name: postStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0}, {Name: postStepName, Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
}, },
}, },
{
name: "first step is skipped",
task: &actions_model.ActionTask{
Steps: []*actions_model.ActionTaskStep{
{Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
{Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
},
Status: actions_model.StatusSuccess,
Started: 10000,
Stopped: 10100,
LogLength: 100,
},
want: []*actions_model.ActionTaskStep{
{Name: preStepName, Status: actions_model.StatusSuccess, LogIndex: 0, LogLength: 10, Started: 10000, Stopped: 10010},
{Status: actions_model.StatusSkipped, LogIndex: 0, LogLength: 0, Started: 0, Stopped: 0},
{Status: actions_model.StatusSuccess, LogIndex: 10, LogLength: 80, Started: 10010, Stopped: 10090},
{Name: postStepName, Status: actions_model.StatusSuccess, LogIndex: 90, LogLength: 10, Started: 10090, Stopped: 10100},
},
},
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {

View file

@ -288,6 +288,8 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
searchRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10)) searchRequest.AddFacet("languages", bleve.NewFacetRequest("Language", 10))
} }
searchRequest.SortBy([]string{"-_score", "UpdatedAt"})
result, err := b.inner.Indexer.SearchInContext(ctx, searchRequest) result, err := b.inner.Indexer.SearchInContext(ctx, searchRequest)
if err != nil { if err != nil {
return 0, nil, nil, err return 0, nil, nil, err

View file

@ -318,7 +318,8 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
NumOfFragments(0). // return all highting content on fragments NumOfFragments(0). // return all highting content on fragments
HighlighterType("fvh"), HighlighterType("fvh"),
). ).
Sort("repo_id", true). Sort("_score", false).
Sort("updated_at", true).
From(start).Size(pageSize). From(start).Size(pageSize).
Do(ctx) Do(ctx)
if err != nil { if err != nil {
@ -349,7 +350,8 @@ func (b *Indexer) Search(ctx context.Context, opts *internal.SearchOptions) (int
NumOfFragments(0). // return all highting content on fragments NumOfFragments(0). // return all highting content on fragments
HighlighterType("fvh"), HighlighterType("fvh"),
). ).
Sort("repo_id", true). Sort("_score", false).
Sort("updated_at", true).
From(start).Size(pageSize). From(start).Size(pageSize).
Do(ctx) Do(ctx)
if err != nil { if err != nil {

784
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -9,7 +9,7 @@
"@citation-js/plugin-software-formats": "0.6.1", "@citation-js/plugin-software-formats": "0.6.1",
"@github/markdown-toolbar-element": "2.2.3", "@github/markdown-toolbar-element": "2.2.3",
"@github/relative-time-element": "4.4.3", "@github/relative-time-element": "4.4.3",
"@github/text-expander-element": "2.7.1", "@github/text-expander-element": "2.7.2",
"@mcaptcha/vanilla-glue": "0.1.0-alpha-3", "@mcaptcha/vanilla-glue": "0.1.0-alpha-3",
"@primer/octicons": "19.9.0", "@primer/octicons": "19.9.0",
"ansi_up": "6.0.2", "ansi_up": "6.0.2",
@ -77,7 +77,7 @@
"eslint-plugin-no-use-extend-native": "0.5.0", "eslint-plugin-no-use-extend-native": "0.5.0",
"eslint-plugin-playwright": "1.6.2", "eslint-plugin-playwright": "1.6.2",
"eslint-plugin-regexp": "2.6.0", "eslint-plugin-regexp": "2.6.0",
"eslint-plugin-sonarjs": "2.0.2", "eslint-plugin-sonarjs": "2.0.3",
"eslint-plugin-unicorn": "55.0.0", "eslint-plugin-unicorn": "55.0.0",
"eslint-plugin-vitest-globals": "1.5.0", "eslint-plugin-vitest-globals": "1.5.0",
"eslint-plugin-vue": "9.28.0", "eslint-plugin-vue": "9.28.0",

3
release-notes/5416.md Normal file
View file

@ -0,0 +1,3 @@
feat: [commit](https://codeberg.org/forgejo/forgejo/commit/8178d6eaba64d05799fd3b62fa889bd13bee07c7) Code search results when using the bleve indexer are sorted by relevance.
fix: [commit](https://codeberg.org/forgejo/forgejo/commit/b496317b5a2aea970bc94ccf6fcde35cd417ec20) After migrating a repository that contains merged pull requests, the branch is missing and cannot be deleted.
fix: [commit](https://codeberg.org/forgejo/forgejo/commit/a226064711899da07d6b1455a68ef758f2f3e7e0) Forgejo Actions artifact v4 upload above 8MB.

View file

@ -123,6 +123,54 @@ func listChunksByRunID(st storage.ObjectStorage, runID int64) (map[int64][]*chun
return chunksMap, nil return chunksMap, nil
} }
func listChunksByRunIDV4(st storage.ObjectStorage, runID, artifactID int64, blist *BlockList) ([]*chunkFileItem, error) {
storageDir := fmt.Sprintf("tmpv4%d", runID)
var chunks []*chunkFileItem
chunkMap := map[string]*chunkFileItem{}
dummy := &chunkFileItem{}
for _, name := range blist.Latest {
chunkMap[name] = dummy
}
if err := st.IterateObjects(storageDir, func(fpath string, obj storage.Object) error {
baseName := filepath.Base(fpath)
if !strings.HasPrefix(baseName, "block-") {
return nil
}
// when read chunks from storage, it only contains storage dir and basename,
// no matter the subdirectory setting in storage config
item := chunkFileItem{Path: storageDir + "/" + baseName, ArtifactID: artifactID}
var size int64
var b64chunkName string
if _, err := fmt.Sscanf(baseName, "block-%d-%d-%s", &item.RunID, &size, &b64chunkName); err != nil {
return fmt.Errorf("parse content range error: %v", err)
}
rchunkName, err := base64.URLEncoding.DecodeString(b64chunkName)
if err != nil {
return fmt.Errorf("failed to parse chunkName: %v", err)
}
chunkName := string(rchunkName)
item.End = item.Start + size - 1
if _, ok := chunkMap[chunkName]; ok {
chunkMap[chunkName] = &item
}
return nil
}); err != nil {
return nil, err
}
for i, name := range blist.Latest {
chunk, ok := chunkMap[name]
if !ok || chunk.Path == "" {
return nil, fmt.Errorf("missing Chunk (%d/%d): %s", i, len(blist.Latest), name)
}
chunks = append(chunks, chunk)
if i > 0 {
chunk.Start = chunkMap[blist.Latest[i-1]].End + 1
chunk.End += chunk.Start
}
}
return chunks, nil
}
func mergeChunksForRun(ctx *ArtifactContext, st storage.ObjectStorage, runID int64, artifactName string) error { func mergeChunksForRun(ctx *ArtifactContext, st storage.ObjectStorage, runID int64, artifactName string) error {
// read all db artifacts by name // read all db artifacts by name
artifacts, err := db.Find[actions.ActionArtifact](ctx, actions.FindArtifactsOptions{ artifacts, err := db.Find[actions.ActionArtifact](ctx, actions.FindArtifactsOptions{
@ -230,7 +278,7 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st
rawChecksum := hash.Sum(nil) rawChecksum := hash.Sum(nil)
actualChecksum := hex.EncodeToString(rawChecksum) actualChecksum := hex.EncodeToString(rawChecksum)
if !strings.HasSuffix(checksum, actualChecksum) { if !strings.HasSuffix(checksum, actualChecksum) {
return fmt.Errorf("update artifact error checksum is invalid") return fmt.Errorf("update artifact error checksum is invalid %v vs %v", checksum, actualChecksum)
} }
} }

View file

@ -24,8 +24,15 @@ package actions
// PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=block // PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=block
// 1.3. Continue Upload Zip Content to Blobstorage (unauthenticated request), repeat until everything is uploaded // 1.3. Continue Upload Zip Content to Blobstorage (unauthenticated request), repeat until everything is uploaded
// PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=appendBlock // PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=appendBlock
// 1.4. Unknown xml payload to Blobstorage (unauthenticated request), ignored for now // 1.4. BlockList xml payload to Blobstorage (unauthenticated request)
// Files of about 800MB are parallel in parallel and / or out of order, this file is needed to enshure the correct order
// PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=blockList // PUT: http://localhost:3000/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact?sig=mO7y35r4GyjN7fwg0DTv3-Fv1NDXD84KLEgLpoPOtDI=&expires=2024-01-23+21%3A48%3A37.20833956+%2B0100+CET&artifactName=test&taskID=75&comp=blockList
// Request
// <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
// <BlockList>
// <Latest>blockId1</Latest>
// <Latest>blockId2</Latest>
// </BlockList>
// 1.5. FinalizeArtifact // 1.5. FinalizeArtifact
// Post: /twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact // Post: /twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact
// Request // Request
@ -82,6 +89,7 @@ import (
"crypto/hmac" "crypto/hmac"
"crypto/sha256" "crypto/sha256"
"encoding/base64" "encoding/base64"
"encoding/xml"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
@ -153,31 +161,34 @@ func ArtifactsV4Routes(prefix string) *web.Route {
return m return m
} }
func (r artifactV4Routes) buildSignature(endp, expires, artifactName string, taskID int64) []byte { func (r artifactV4Routes) buildSignature(endp, expires, artifactName string, taskID, artifactID int64) []byte {
mac := hmac.New(sha256.New, setting.GetGeneralTokenSigningSecret()) mac := hmac.New(sha256.New, setting.GetGeneralTokenSigningSecret())
mac.Write([]byte(endp)) mac.Write([]byte(endp))
mac.Write([]byte(expires)) mac.Write([]byte(expires))
mac.Write([]byte(artifactName)) mac.Write([]byte(artifactName))
mac.Write([]byte(fmt.Sprint(taskID))) mac.Write([]byte(fmt.Sprint(taskID)))
mac.Write([]byte(fmt.Sprint(artifactID)))
return mac.Sum(nil) return mac.Sum(nil)
} }
func (r artifactV4Routes) buildArtifactURL(endp, artifactName string, taskID int64) string { func (r artifactV4Routes) buildArtifactURL(endp, artifactName string, taskID, artifactID int64) string {
expires := time.Now().Add(60 * time.Minute).Format("2006-01-02 15:04:05.999999999 -0700 MST") expires := time.Now().Add(60 * time.Minute).Format("2006-01-02 15:04:05.999999999 -0700 MST")
uploadURL := strings.TrimSuffix(setting.AppURL, "/") + strings.TrimSuffix(r.prefix, "/") + uploadURL := strings.TrimSuffix(setting.AppURL, "/") + strings.TrimSuffix(r.prefix, "/") +
"/" + endp + "?sig=" + base64.URLEncoding.EncodeToString(r.buildSignature(endp, expires, artifactName, taskID)) + "&expires=" + url.QueryEscape(expires) + "&artifactName=" + url.QueryEscape(artifactName) + "&taskID=" + fmt.Sprint(taskID) "/" + endp + "?sig=" + base64.URLEncoding.EncodeToString(r.buildSignature(endp, expires, artifactName, taskID, artifactID)) + "&expires=" + url.QueryEscape(expires) + "&artifactName=" + url.QueryEscape(artifactName) + "&taskID=" + fmt.Sprint(taskID) + "&artifactID=" + fmt.Sprint(artifactID)
return uploadURL return uploadURL
} }
func (r artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*actions.ActionTask, string, bool) { func (r artifactV4Routes) verifySignature(ctx *ArtifactContext, endp string) (*actions.ActionTask, string, bool) {
rawTaskID := ctx.Req.URL.Query().Get("taskID") rawTaskID := ctx.Req.URL.Query().Get("taskID")
rawArtifactID := ctx.Req.URL.Query().Get("artifactID")
sig := ctx.Req.URL.Query().Get("sig") sig := ctx.Req.URL.Query().Get("sig")
expires := ctx.Req.URL.Query().Get("expires") expires := ctx.Req.URL.Query().Get("expires")
artifactName := ctx.Req.URL.Query().Get("artifactName") artifactName := ctx.Req.URL.Query().Get("artifactName")
dsig, _ := base64.URLEncoding.DecodeString(sig) dsig, _ := base64.URLEncoding.DecodeString(sig)
taskID, _ := strconv.ParseInt(rawTaskID, 10, 64) taskID, _ := strconv.ParseInt(rawTaskID, 10, 64)
artifactID, _ := strconv.ParseInt(rawArtifactID, 10, 64)
expecedsig := r.buildSignature(endp, expires, artifactName, taskID) expecedsig := r.buildSignature(endp, expires, artifactName, taskID, artifactID)
if !hmac.Equal(dsig, expecedsig) { if !hmac.Equal(dsig, expecedsig) {
log.Error("Error unauthorized") log.Error("Error unauthorized")
ctx.Error(http.StatusUnauthorized, "Error unauthorized") ctx.Error(http.StatusUnauthorized, "Error unauthorized")
@ -272,6 +283,8 @@ func (r *artifactV4Routes) createArtifact(ctx *ArtifactContext) {
return return
} }
artifact.ContentEncoding = ArtifactV4ContentEncoding artifact.ContentEncoding = ArtifactV4ContentEncoding
artifact.FileSize = 0
artifact.FileCompressedSize = 0
if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil { if err := actions.UpdateArtifactByID(ctx, artifact.ID, artifact); err != nil {
log.Error("Error UpdateArtifactByID: %v", err) log.Error("Error UpdateArtifactByID: %v", err)
ctx.Error(http.StatusInternalServerError, "Error UpdateArtifactByID") ctx.Error(http.StatusInternalServerError, "Error UpdateArtifactByID")
@ -280,7 +293,7 @@ func (r *artifactV4Routes) createArtifact(ctx *ArtifactContext) {
respData := CreateArtifactResponse{ respData := CreateArtifactResponse{
Ok: true, Ok: true,
SignedUploadUrl: r.buildArtifactURL("UploadArtifact", artifactName, ctx.ActionTask.ID), SignedUploadUrl: r.buildArtifactURL("UploadArtifact", artifactName, ctx.ActionTask.ID, artifact.ID),
} }
r.sendProtbufBody(ctx, &respData) r.sendProtbufBody(ctx, &respData)
} }
@ -306,6 +319,8 @@ func (r *artifactV4Routes) uploadArtifact(ctx *ArtifactContext) {
comp := ctx.Req.URL.Query().Get("comp") comp := ctx.Req.URL.Query().Get("comp")
switch comp { switch comp {
case "block", "appendBlock": case "block", "appendBlock":
blockid := ctx.Req.URL.Query().Get("blockid")
if blockid == "" {
// get artifact by name // get artifact by name
artifact, err := r.getArtifactByName(ctx, task.Job.RunID, artifactName) artifact, err := r.getArtifactByName(ctx, task.Job.RunID, artifactName)
if err != nil { if err != nil {
@ -314,11 +329,6 @@ func (r *artifactV4Routes) uploadArtifact(ctx *ArtifactContext) {
return return
} }
if comp == "block" {
artifact.FileSize = 0
artifact.FileCompressedSize = 0
}
_, err = appendUploadChunk(r.fs, ctx, artifact, artifact.FileSize, ctx.Req.ContentLength, artifact.RunID) _, err = appendUploadChunk(r.fs, ctx, artifact, artifact.FileSize, ctx.Req.ContentLength, artifact.RunID)
if err != nil { if err != nil {
log.Error("Error runner api getting task: task is not running") log.Error("Error runner api getting task: task is not running")
@ -332,12 +342,54 @@ func (r *artifactV4Routes) uploadArtifact(ctx *ArtifactContext) {
ctx.Error(http.StatusInternalServerError, "Error UpdateArtifactByID") ctx.Error(http.StatusInternalServerError, "Error UpdateArtifactByID")
return return
} }
} else {
_, err := r.fs.Save(fmt.Sprintf("tmpv4%d/block-%d-%d-%s", task.Job.RunID, task.Job.RunID, ctx.Req.ContentLength, base64.URLEncoding.EncodeToString([]byte(blockid))), ctx.Req.Body, -1)
if err != nil {
log.Error("Error runner api getting task: task is not running")
ctx.Error(http.StatusInternalServerError, "Error runner api getting task: task is not running")
return
}
}
ctx.JSON(http.StatusCreated, "appended") ctx.JSON(http.StatusCreated, "appended")
case "blocklist": case "blocklist":
rawArtifactID := ctx.Req.URL.Query().Get("artifactID")
artifactID, _ := strconv.ParseInt(rawArtifactID, 10, 64)
_, err := r.fs.Save(fmt.Sprintf("tmpv4%d/%d-%d-blocklist", task.Job.RunID, task.Job.RunID, artifactID), ctx.Req.Body, -1)
if err != nil {
log.Error("Error runner api getting task: task is not running")
ctx.Error(http.StatusInternalServerError, "Error runner api getting task: task is not running")
return
}
ctx.JSON(http.StatusCreated, "created") ctx.JSON(http.StatusCreated, "created")
} }
} }
type BlockList struct {
Latest []string `xml:"Latest"`
}
type Latest struct {
Value string `xml:",chardata"`
}
func (r *artifactV4Routes) readBlockList(runID, artifactID int64) (*BlockList, error) {
blockListName := fmt.Sprintf("tmpv4%d/%d-%d-blocklist", runID, runID, artifactID)
s, err := r.fs.Open(blockListName)
if err != nil {
return nil, err
}
xdec := xml.NewDecoder(s)
blockList := &BlockList{}
err = xdec.Decode(blockList)
delerr := r.fs.Delete(blockListName)
if delerr != nil {
log.Warn("Failed to delete blockList %s: %v", blockListName, delerr)
}
return blockList, err
}
func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) { func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) {
var req FinalizeArtifactRequest var req FinalizeArtifactRequest
@ -356,18 +408,34 @@ func (r *artifactV4Routes) finalizeArtifact(ctx *ArtifactContext) {
ctx.Error(http.StatusNotFound, "Error artifact not found") ctx.Error(http.StatusNotFound, "Error artifact not found")
return return
} }
var chunks []*chunkFileItem
blockList, err := r.readBlockList(runID, artifact.ID)
if err != nil {
log.Warn("Failed to read BlockList, fallback to old behavior: %v", err)
chunkMap, err := listChunksByRunID(r.fs, runID) chunkMap, err := listChunksByRunID(r.fs, runID)
if err != nil { if err != nil {
log.Error("Error merge chunks: %v", err) log.Error("Error merge chunks: %v", err)
ctx.Error(http.StatusInternalServerError, "Error merge chunks") ctx.Error(http.StatusInternalServerError, "Error merge chunks")
return return
} }
chunks, ok := chunkMap[artifact.ID] chunks, ok = chunkMap[artifact.ID]
if !ok { if !ok {
log.Error("Error merge chunks") log.Error("Error merge chunks")
ctx.Error(http.StatusInternalServerError, "Error merge chunks") ctx.Error(http.StatusInternalServerError, "Error merge chunks")
return return
} }
} else {
chunks, err = listChunksByRunIDV4(r.fs, runID, artifact.ID, blockList)
if err != nil {
log.Error("Error merge chunks: %v", err)
ctx.Error(http.StatusInternalServerError, "Error merge chunks")
return
}
artifact.FileSize = chunks[len(chunks)-1].End + 1
artifact.FileCompressedSize = chunks[len(chunks)-1].End + 1
}
checksum := "" checksum := ""
if req.Hash != nil { if req.Hash != nil {
checksum = req.Hash.Value checksum = req.Hash.Value
@ -468,7 +536,7 @@ func (r *artifactV4Routes) getSignedArtifactURL(ctx *ArtifactContext) {
} }
} }
if respData.SignedUrl == "" { if respData.SignedUrl == "" {
respData.SignedUrl = r.buildArtifactURL("DownloadArtifact", artifactName, ctx.ActionTask.ID) respData.SignedUrl = r.buildArtifactURL("DownloadArtifact", artifactName, ctx.ActionTask.ID, artifact.ID)
} }
r.sendProtbufBody(ctx, &respData) r.sendProtbufBody(ctx, &respData)
} }

View file

@ -476,6 +476,7 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption opt
ctx.Data["PosterID"] = posterID ctx.Data["PosterID"] = posterID
ctx.Data["IsFuzzy"] = isFuzzy ctx.Data["IsFuzzy"] = isFuzzy
ctx.Data["Keyword"] = keyword ctx.Data["Keyword"] = keyword
ctx.Data["IsShowClosed"] = isShowClosed
switch { switch {
case isShowClosed.Value(): case isShowClosed.Value():
ctx.Data["State"] = "closed" ctx.Data["State"] = "closed"

View file

@ -0,0 +1,115 @@
// Copyright 2024 The Forgejo Authors.
// SPDX-License-Identifier: GPL-3.0-or-later
package container
import (
"testing"
"time"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/packages"
"code.gitea.io/gitea/models/unittest"
"code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log"
container_module "code.gitea.io/gitea/modules/packages/container"
"code.gitea.io/gitea/modules/test"
"code.gitea.io/gitea/modules/timeutil"
container_service "code.gitea.io/gitea/services/packages/container"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestCleanupSHA256(t *testing.T) {
require.NoError(t, unittest.PrepareTestDatabase())
defer test.MockVariableValue(&container_service.SHA256BatchSize, 1)()
ctx := db.DefaultContext
createContainer := func(t *testing.T, name, version, digest string, created timeutil.TimeStamp) {
t.Helper()
ownerID := int64(2001)
p := packages.Package{
OwnerID: ownerID,
LowerName: name,
Type: packages.TypeContainer,
}
_, err := db.GetEngine(ctx).Insert(&p)
require.NoError(t, err)
var metadata string
if digest != "" {
m := container_module.Metadata{
Manifests: []*container_module.Manifest{
{
Digest: digest,
},
},
}
mt, err := json.Marshal(m)
require.NoError(t, err)
metadata = string(mt)
}
v := packages.PackageVersion{
PackageID: p.ID,
LowerVersion: version,
MetadataJSON: metadata,
CreatedUnix: created,
}
_, err = db.GetEngine(ctx).NoAutoTime().Insert(&v)
require.NoError(t, err)
}
cleanupAndCheckLogs := func(t *testing.T, olderThan time.Duration, expected ...string) {
t.Helper()
logChecker, cleanup := test.NewLogChecker(log.DEFAULT, log.TRACE)
logChecker.Filter(expected...)
logChecker.StopMark(container_service.SHA256LogFinish)
defer cleanup()
require.NoError(t, CleanupExpiredData(ctx, olderThan))
logFiltered, logStopped := logChecker.Check(5 * time.Second)
assert.True(t, logStopped)
filtered := make([]bool, 0, len(expected))
for range expected {
filtered = append(filtered, true)
}
assert.EqualValues(t, filtered, logFiltered, expected)
}
ancient := 1 * time.Hour
t.Run("no packages, cleanup nothing", func(t *testing.T) {
cleanupAndCheckLogs(t, ancient, "Nothing to cleanup")
})
orphan := "orphan"
createdLongAgo := timeutil.TimeStamp(time.Now().Add(-(ancient * 2)).Unix())
createdRecently := timeutil.TimeStamp(time.Now().Add(-(ancient / 2)).Unix())
t.Run("an orphaned package created a long time ago is removed", func(t *testing.T) {
createContainer(t, orphan, "sha256:"+orphan, "", createdLongAgo)
cleanupAndCheckLogs(t, ancient, "Removing 1 entries from `package_version`")
cleanupAndCheckLogs(t, ancient, "Nothing to cleanup")
})
t.Run("a newly created orphaned package is not cleaned up", func(t *testing.T) {
createContainer(t, orphan, "sha256:"+orphan, "", createdRecently)
cleanupAndCheckLogs(t, ancient, "1 out of 1 container image(s) are not deleted because they were created less than")
cleanupAndCheckLogs(t, 0, "Removing 1 entries from `package_version`")
cleanupAndCheckLogs(t, 0, "Nothing to cleanup")
})
t.Run("a referenced package is not removed", func(t *testing.T) {
referenced := "referenced"
digest := "sha256:" + referenced
createContainer(t, referenced, digest, "", createdRecently)
index := "index"
createContainer(t, index, index, digest, createdRecently)
cleanupAndCheckLogs(t, ancient, "Nothing to cleanup")
})
}

View file

@ -0,0 +1,14 @@
// Copyright 2024 The Forgejo Authors.
// SPDX-License-Identifier: GPL-3.0-or-later
package container
import (
"testing"
"code.gitea.io/gitea/models/unittest"
)
func TestMain(m *testing.M) {
unittest.MainTest(m)
}

View file

@ -13,6 +13,7 @@ import (
"code.gitea.io/gitea/modules/json" "code.gitea.io/gitea/modules/json"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
container_module "code.gitea.io/gitea/modules/packages/container" container_module "code.gitea.io/gitea/modules/packages/container"
"code.gitea.io/gitea/modules/timeutil"
) )
var ( var (
@ -37,18 +38,24 @@ func cleanupSHA256(outerCtx context.Context, olderThan time.Duration) error {
defer committer.Close() defer committer.Close()
foundAtLeastOneSHA256 := false foundAtLeastOneSHA256 := false
shaToVersionID := make(map[string]int64, 100) type packageVersion struct {
id int64
created timeutil.TimeStamp
}
shaToPackageVersion := make(map[string]packageVersion, 100)
knownSHA := make(map[string]any, 100) knownSHA := make(map[string]any, 100)
// compute before making the inventory to not race against ongoing
// image creations
old := timeutil.TimeStamp(time.Now().Add(-olderThan).Unix())
log.Debug("Look for all package_version.version that start with sha256:") log.Debug("Look for all package_version.version that start with sha256:")
old := time.Now().Add(-olderThan).Unix()
// Iterate over all container versions in ascending order and store // Iterate over all container versions in ascending order and store
// in shaToVersionID all versions with a sha256: prefix. If an index // in shaToPackageVersion all versions with a sha256: prefix. If an index
// manifest is found, the sha256: digest it references are removed // manifest is found, the sha256: digest it references are removed
// from shaToVersionID. If the sha256: digest found in an index // from shaToPackageVersion. If the sha256: digest found in an index
// manifest is not already in shaToVersionID, it is stored in // manifest is not already in shaToPackageVersion, it is stored in
// knownSHA to be dealt with later. // knownSHA to be dealt with later.
// //
// Although it is theoretically possible that a sha256: is uploaded // Although it is theoretically possible that a sha256: is uploaded
@ -56,16 +63,16 @@ func cleanupSHA256(outerCtx context.Context, olderThan time.Duration) error {
// normal order of operations. First the sha256: version is uploaded // normal order of operations. First the sha256: version is uploaded
// and then the index manifest. When the iteration completes, // and then the index manifest. When the iteration completes,
// knownSHA will therefore be empty most of the time and // knownSHA will therefore be empty most of the time and
// shaToVersionID will only contain unreferenced sha256: versions. // shaToPackageVersion will only contain unreferenced sha256: versions.
if err := db.GetEngine(ctx). if err := db.GetEngine(ctx).
Select("`package_version`.`id`, `package_version`.`lower_version`, `package_version`.`metadata_json`"). Select("`package_version`.`id`, `package_version`.`created_unix`, `package_version`.`lower_version`, `package_version`.`metadata_json`").
Join("INNER", "`package`", "`package`.`id` = `package_version`.`package_id`"). Join("INNER", "`package`", "`package`.`id` = `package_version`.`package_id`").
Where("`package`.`type` = ? AND `package_version`.`created_unix` < ?", packages.TypeContainer, old). Where("`package`.`type` = ?", packages.TypeContainer).
OrderBy("`package_version`.`id` ASC"). OrderBy("`package_version`.`id` ASC").
Iterate(new(packages.PackageVersion), func(_ int, bean any) error { Iterate(new(packages.PackageVersion), func(_ int, bean any) error {
v := bean.(*packages.PackageVersion) v := bean.(*packages.PackageVersion)
if strings.HasPrefix(v.LowerVersion, "sha256:") { if strings.HasPrefix(v.LowerVersion, "sha256:") {
shaToVersionID[v.LowerVersion] = v.ID shaToPackageVersion[v.LowerVersion] = packageVersion{id: v.ID, created: v.CreatedUnix}
foundAtLeastOneSHA256 = true foundAtLeastOneSHA256 = true
} else if strings.Contains(v.MetadataJSON, `"manifests":[{`) { } else if strings.Contains(v.MetadataJSON, `"manifests":[{`) {
var metadata container_module.Metadata var metadata container_module.Metadata
@ -74,8 +81,8 @@ func cleanupSHA256(outerCtx context.Context, olderThan time.Duration) error {
return nil return nil
} }
for _, manifest := range metadata.Manifests { for _, manifest := range metadata.Manifests {
if _, ok := shaToVersionID[manifest.Digest]; ok { if _, ok := shaToPackageVersion[manifest.Digest]; ok {
delete(shaToVersionID, manifest.Digest) delete(shaToPackageVersion, manifest.Digest)
} else { } else {
knownSHA[manifest.Digest] = true knownSHA[manifest.Digest] = true
} }
@ -87,10 +94,10 @@ func cleanupSHA256(outerCtx context.Context, olderThan time.Duration) error {
} }
for sha := range knownSHA { for sha := range knownSHA {
delete(shaToVersionID, sha) delete(shaToPackageVersion, sha)
} }
if len(shaToVersionID) == 0 { if len(shaToPackageVersion) == 0 {
if foundAtLeastOneSHA256 { if foundAtLeastOneSHA256 {
log.Debug("All container images with a version matching sha256:* are referenced by an index manifest") log.Debug("All container images with a version matching sha256:* are referenced by an index manifest")
} else { } else {
@ -100,15 +107,24 @@ func cleanupSHA256(outerCtx context.Context, olderThan time.Duration) error {
return nil return nil
} }
found := len(shaToVersionID) found := len(shaToPackageVersion)
log.Warn("%d container image(s) with a version matching sha256:* are not referenced by an index manifest", found) log.Warn("%d container image(s) with a version matching sha256:* are not referenced by an index manifest", found)
log.Debug("Deleting unreferenced image versions from `package_version`, `package_file` and `package_property` (%d at a time)", SHA256BatchSize) log.Debug("Deleting unreferenced image versions from `package_version`, `package_file` and `package_property` (%d at a time)", SHA256BatchSize)
packageVersionIDs := make([]int64, 0, SHA256BatchSize) packageVersionIDs := make([]int64, 0, SHA256BatchSize)
for _, id := range shaToVersionID { tooYoung := 0
packageVersionIDs = append(packageVersionIDs, id) for _, p := range shaToPackageVersion {
if p.created < old {
packageVersionIDs = append(packageVersionIDs, p.id)
} else {
tooYoung++
}
}
if tooYoung > 0 {
log.Warn("%d out of %d container image(s) are not deleted because they were created less than %v ago", tooYoung, found, olderThan)
} }
for len(packageVersionIDs) > 0 { for len(packageVersionIDs) > 0 {

View file

@ -430,13 +430,12 @@ func DeleteBranch(ctx context.Context, doer *user_model.User, repo *repo_model.R
} }
rawBranch, err := git_model.GetBranch(ctx, repo.ID, branchName) rawBranch, err := git_model.GetBranch(ctx, repo.ID, branchName)
if err != nil { if err != nil && !git_model.IsErrBranchNotExist(err) {
return fmt.Errorf("GetBranch: %v", err) return fmt.Errorf("GetBranch: %v", err)
} }
if rawBranch.IsDeleted { // database branch record not exist or it's a deleted branch
return nil notExist := git_model.IsErrBranchNotExist(err) || rawBranch.IsDeleted
}
commit, err := gitRepo.GetBranchCommit(branchName) commit, err := gitRepo.GetBranchCommit(branchName)
if err != nil { if err != nil {
@ -444,9 +443,11 @@ func DeleteBranch(ctx context.Context, doer *user_model.User, repo *repo_model.R
} }
if err := db.WithTx(ctx, func(ctx context.Context) error { if err := db.WithTx(ctx, func(ctx context.Context) error {
if !notExist {
if err := git_model.AddDeletedBranch(ctx, repo.ID, branchName, doer.ID); err != nil { if err := git_model.AddDeletedBranch(ctx, repo.ID, branchName, doer.ID); err != nil {
return err return err
} }
}
return gitRepo.DeleteBranch(branchName, git.DeleteBranchOptions{ return gitRepo.DeleteBranch(branchName, git.DeleteBranchOptions{
Force: true, Force: true,

View file

@ -12,6 +12,7 @@ import (
"net/url" "net/url"
"strconv" "strconv"
"strings" "strings"
"unicode/utf8"
webhook_model "code.gitea.io/gitea/models/webhook" webhook_model "code.gitea.io/gitea/models/webhook"
"code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/git"
@ -179,8 +180,14 @@ func (d discordConvertor) Push(p *api.PushPayload) (DiscordPayload, error) {
var text string var text string
// for each commit, generate attachment text // for each commit, generate attachment text
for i, commit := range p.Commits { for i, commit := range p.Commits {
text += fmt.Sprintf("[%s](%s) %s - %s", commit.ID[:7], commit.URL, // limit the commit message display to just the summary, otherwise it would be hard to read
strings.TrimRight(commit.Message, "\r\n"), commit.Author.Name) message := strings.TrimRight(strings.SplitN(commit.Message, "\n", 1)[0], "\r")
// a limit of 50 is set because GitHub does the same
if utf8.RuneCountInString(message) > 50 {
message = fmt.Sprintf("%.47s...", message)
}
text += fmt.Sprintf("[%s](%s) %s - %s", commit.ID[:7], commit.URL, message, commit.Author.Name)
// add linebreak to each commit but the last // add linebreak to each commit but the last
if i < len(p.Commits)-1 { if i < len(p.Commits)-1 {
text += "\n" text += "\n"

View file

@ -80,6 +80,20 @@ func TestDiscordPayload(t *testing.T) {
assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL) assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
}) })
t.Run("PushWithLongCommitMessage", func(t *testing.T) {
p := pushTestMultilineCommitMessagePayload()
pl, err := dc.Push(p)
require.NoError(t, err)
assert.Len(t, pl.Embeds, 1)
assert.Equal(t, "[test/repo:test] 2 new commits", pl.Embeds[0].Title)
assert.Equal(t, "[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) This is a commit summary ⚠️⚠️⚠️⚠️ containing 你好... - user1\n[2020558](http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778) This is a commit summary ⚠️⚠️⚠️⚠️ containing 你好... - user1", pl.Embeds[0].Description)
assert.Equal(t, p.Sender.UserName, pl.Embeds[0].Author.Name)
assert.Equal(t, setting.AppURL+p.Sender.UserName, pl.Embeds[0].Author.URL)
assert.Equal(t, p.Sender.AvatarURL, pl.Embeds[0].Author.IconURL)
})
t.Run("Issue", func(t *testing.T) { t.Run("Issue", func(t *testing.T) {
p := issueTestPayload() p := issueTestPayload()

View file

@ -64,9 +64,17 @@ func forkTestPayload() *api.ForkPayload {
} }
func pushTestPayload() *api.PushPayload { func pushTestPayload() *api.PushPayload {
return pushTestPayloadWithCommitMessage("commit message")
}
func pushTestMultilineCommitMessagePayload() *api.PushPayload {
return pushTestPayloadWithCommitMessage("This is a commit summary ⚠️⚠️⚠️⚠️ containing 你好 ⚠️⚠️️\n\nThis is the message body.")
}
func pushTestPayloadWithCommitMessage(message string) *api.PushPayload {
commit := &api.PayloadCommit{ commit := &api.PayloadCommit{
ID: "2020558fe2e34debb818a514715839cabd25e778", ID: "2020558fe2e34debb818a514715839cabd25e778",
Message: "commit message", Message: message,
URL: "http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778", URL: "http://localhost:3000/test/repo/commit/2020558fe2e34debb818a514715839cabd25e778",
Author: &api.PayloadUser{ Author: &api.PayloadUser{
Name: "user1", Name: "user1",

View file

@ -1,9 +1,9 @@
<div class="ui secondary filter menu"> <div class="ui secondary filter menu">
{{if not .Repository.IsArchived}} {{if not .Repository.IsArchived}}
<!-- Action Button --> <!-- Action Button -->
{{if .IsShowClosed}} {{if and .IsShowClosed.Has .IsShowClosed.Value}}
<button class="ui primary basic button issue-action" data-action="open" data-url="{{$.RepoLink}}/issues/status">{{ctx.Locale.Tr "repo.issues.action_open"}}</button> <button class="ui primary basic button issue-action" data-action="open" data-url="{{$.RepoLink}}/issues/status">{{ctx.Locale.Tr "repo.issues.action_open"}}</button>
{{else}} {{else if and .IsShowClosed.Has (not .IsShowClosed.Value)}}
<button class="ui red basic button issue-action" data-action="close" data-url="{{$.RepoLink}}/issues/status">{{ctx.Locale.Tr "repo.issues.action_close"}}</button> <button class="ui red basic button issue-action" data-action="close" data-url="{{$.RepoLink}}/issues/status">{{ctx.Locale.Tr "repo.issues.action_close"}}</button>
{{end}} {{end}}
{{if $.IsRepoAdmin}} {{if $.IsRepoAdmin}}

View file

@ -7,12 +7,14 @@ import (
"bytes" "bytes"
"crypto/sha256" "crypto/sha256"
"encoding/hex" "encoding/hex"
"encoding/xml"
"io" "io"
"net/http" "net/http"
"strings" "strings"
"testing" "testing"
"time" "time"
"code.gitea.io/gitea/modules/storage"
"code.gitea.io/gitea/routers/api/actions" "code.gitea.io/gitea/routers/api/actions"
actions_service "code.gitea.io/gitea/services/actions" actions_service "code.gitea.io/gitea/services/actions"
"code.gitea.io/gitea/tests" "code.gitea.io/gitea/tests"
@ -175,6 +177,134 @@ func TestActionsArtifactV4UploadSingleFileWithRetentionDays(t *testing.T) {
assert.True(t, finalizeResp.Ok) assert.True(t, finalizeResp.Ok)
} }
func TestActionsArtifactV4UploadSingleFileWithPotentialHarmfulBlockID(t *testing.T) {
defer tests.PrepareTestEnv(t)()
token, err := actions_service.CreateAuthorizationToken(48, 792, 193)
require.NoError(t, err)
// acquire artifact upload url
req := NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/CreateArtifact", toProtoJSON(&actions.CreateArtifactRequest{
Version: 4,
Name: "artifactWithPotentialHarmfulBlockID",
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).AddTokenAuth(token)
resp := MakeRequest(t, req, http.StatusOK)
var uploadResp actions.CreateArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &uploadResp)
assert.True(t, uploadResp.Ok)
assert.Contains(t, uploadResp.SignedUploadUrl, "/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact")
// get upload urls
idx := strings.Index(uploadResp.SignedUploadUrl, "/twirp/")
url := uploadResp.SignedUploadUrl[idx:] + "&comp=block&blockid=%2f..%2fmyfile"
blockListURL := uploadResp.SignedUploadUrl[idx:] + "&comp=blocklist"
// upload artifact chunk
body := strings.Repeat("A", 1024)
req = NewRequestWithBody(t, "PUT", url, strings.NewReader(body))
MakeRequest(t, req, http.StatusCreated)
// verify that the exploit didn't work
_, err = storage.Actions.Stat("myfile")
require.Error(t, err)
// upload artifact blockList
blockList := &actions.BlockList{
Latest: []string{
"/../myfile",
},
}
rawBlockList, err := xml.Marshal(blockList)
require.NoError(t, err)
req = NewRequestWithBody(t, "PUT", blockListURL, bytes.NewReader(rawBlockList))
MakeRequest(t, req, http.StatusCreated)
t.Logf("Create artifact confirm")
sha := sha256.Sum256([]byte(body))
// confirm artifact upload
req = NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact", toProtoJSON(&actions.FinalizeArtifactRequest{
Name: "artifactWithPotentialHarmfulBlockID",
Size: 1024,
Hash: wrapperspb.String("sha256:" + hex.EncodeToString(sha[:])),
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).
AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)
var finalizeResp actions.FinalizeArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &finalizeResp)
assert.True(t, finalizeResp.Ok)
}
func TestActionsArtifactV4UploadSingleFileWithChunksOutOfOrder(t *testing.T) {
defer tests.PrepareTestEnv(t)()
token, err := actions_service.CreateAuthorizationToken(48, 792, 193)
require.NoError(t, err)
// acquire artifact upload url
req := NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/CreateArtifact", toProtoJSON(&actions.CreateArtifactRequest{
Version: 4,
Name: "artifactWithChunksOutOfOrder",
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).AddTokenAuth(token)
resp := MakeRequest(t, req, http.StatusOK)
var uploadResp actions.CreateArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &uploadResp)
assert.True(t, uploadResp.Ok)
assert.Contains(t, uploadResp.SignedUploadUrl, "/twirp/github.actions.results.api.v1.ArtifactService/UploadArtifact")
// get upload urls
idx := strings.Index(uploadResp.SignedUploadUrl, "/twirp/")
block1URL := uploadResp.SignedUploadUrl[idx:] + "&comp=block&blockid=block1"
block2URL := uploadResp.SignedUploadUrl[idx:] + "&comp=block&blockid=block2"
blockListURL := uploadResp.SignedUploadUrl[idx:] + "&comp=blocklist"
// upload artifact chunks
bodyb := strings.Repeat("B", 1024)
req = NewRequestWithBody(t, "PUT", block2URL, strings.NewReader(bodyb))
MakeRequest(t, req, http.StatusCreated)
bodya := strings.Repeat("A", 1024)
req = NewRequestWithBody(t, "PUT", block1URL, strings.NewReader(bodya))
MakeRequest(t, req, http.StatusCreated)
// upload artifact blockList
blockList := &actions.BlockList{
Latest: []string{
"block1",
"block2",
},
}
rawBlockList, err := xml.Marshal(blockList)
require.NoError(t, err)
req = NewRequestWithBody(t, "PUT", blockListURL, bytes.NewReader(rawBlockList))
MakeRequest(t, req, http.StatusCreated)
t.Logf("Create artifact confirm")
sha := sha256.Sum256([]byte(bodya + bodyb))
// confirm artifact upload
req = NewRequestWithBody(t, "POST", "/twirp/github.actions.results.api.v1.ArtifactService/FinalizeArtifact", toProtoJSON(&actions.FinalizeArtifactRequest{
Name: "artifactWithChunksOutOfOrder",
Size: 2048,
Hash: wrapperspb.String("sha256:" + hex.EncodeToString(sha[:])),
WorkflowRunBackendId: "792",
WorkflowJobRunBackendId: "193",
})).
AddTokenAuth(token)
resp = MakeRequest(t, req, http.StatusOK)
var finalizeResp actions.FinalizeArtifactResponse
protojson.Unmarshal(resp.Body.Bytes(), &finalizeResp)
assert.True(t, finalizeResp.Ok)
}
func TestActionsArtifactV4DownloadSingle(t *testing.T) { func TestActionsArtifactV4DownloadSingle(t *testing.T) {
defer tests.PrepareTestEnv(t)() defer tests.PrepareTestEnv(t)()

View file

@ -198,9 +198,9 @@
} }
}, },
"node_modules/@octokit/core/node_modules/@octokit/request-error": { "node_modules/@octokit/core/node_modules/@octokit/request-error": {
"version": "6.1.4", "version": "6.1.5",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.4.tgz", "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.5.tgz",
"integrity": "sha512-VpAhIUxwhWZQImo/dWAN/NpPqqojR6PSLgLYAituLM6U+ddx9hCioFGwBr5Mi+oi5CLeJkcAs3gJ0PYYzU6wUg==", "integrity": "sha512-IlBTfGX8Yn/oFPMwSfvugfncK2EwRLjzbrpifNaMY8o/HTEAFqCA1FZxjD9cWvSKBHgrIhc4CSBIzMxiLsbzFQ==",
"license": "MIT", "license": "MIT",
"peer": true, "peer": true,
"dependencies": { "dependencies": {
@ -211,9 +211,9 @@
} }
}, },
"node_modules/@octokit/core/node_modules/@octokit/types": { "node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "13.5.0", "version": "13.6.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.0.tgz",
"integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", "integrity": "sha512-CrooV/vKCXqwLa+osmHLIMUb87brpgUqlqkPGc6iE2wCkUvTrHiXFMhAKoDDaAAYJrtKtrFTgSQTg5nObBEaew==",
"license": "MIT", "license": "MIT",
"peer": true, "peer": true,
"dependencies": { "dependencies": {
@ -304,9 +304,9 @@
} }
}, },
"node_modules/@octokit/graphql/node_modules/@octokit/request-error": { "node_modules/@octokit/graphql/node_modules/@octokit/request-error": {
"version": "6.1.4", "version": "6.1.5",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.4.tgz", "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.5.tgz",
"integrity": "sha512-VpAhIUxwhWZQImo/dWAN/NpPqqojR6PSLgLYAituLM6U+ddx9hCioFGwBr5Mi+oi5CLeJkcAs3gJ0PYYzU6wUg==", "integrity": "sha512-IlBTfGX8Yn/oFPMwSfvugfncK2EwRLjzbrpifNaMY8o/HTEAFqCA1FZxjD9cWvSKBHgrIhc4CSBIzMxiLsbzFQ==",
"license": "MIT", "license": "MIT",
"peer": true, "peer": true,
"dependencies": { "dependencies": {
@ -317,9 +317,9 @@
} }
}, },
"node_modules/@octokit/graphql/node_modules/@octokit/types": { "node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "13.5.0", "version": "13.6.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.0.tgz",
"integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", "integrity": "sha512-CrooV/vKCXqwLa+osmHLIMUb87brpgUqlqkPGc6iE2wCkUvTrHiXFMhAKoDDaAAYJrtKtrFTgSQTg5nObBEaew==",
"license": "MIT", "license": "MIT",
"peer": true, "peer": true,
"dependencies": { "dependencies": {
@ -492,9 +492,9 @@
"license": "MIT" "license": "MIT"
}, },
"node_modules/@types/node": { "node_modules/@types/node": {
"version": "22.5.5", "version": "22.7.4",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.5.tgz", "resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.4.tgz",
"integrity": "sha512-Xjs4y5UPO/CLdzpgR6GirZJx36yScjh73+2NlLlkFRSoQN8B0DpfXPdZGnvVmLRLOsqDpOfTNv7D9trgGhmOIA==", "integrity": "sha512-y+NPi1rFzDs1NdQHHToqeiX2TIS79SWEAw9GYhkkx8bD0ChpfqC+n2j5OXOCpzfojBEBt6DnEnnG9MY0zk1XLg==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"undici-types": "~6.19.2" "undici-types": "~6.19.2"
@ -1115,9 +1115,9 @@
} }
}, },
"node_modules/browserslist": { "node_modules/browserslist": {
"version": "4.23.3", "version": "4.24.0",
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz",
"integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==", "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==",
"funding": [ "funding": [
{ {
"type": "opencollective", "type": "opencollective",
@ -1134,8 +1134,8 @@
], ],
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"caniuse-lite": "^1.0.30001646", "caniuse-lite": "^1.0.30001663",
"electron-to-chromium": "^1.5.4", "electron-to-chromium": "^1.5.28",
"node-releases": "^2.0.18", "node-releases": "^2.0.18",
"update-browserslist-db": "^1.1.0" "update-browserslist-db": "^1.1.0"
}, },
@ -1219,9 +1219,9 @@
} }
}, },
"node_modules/caniuse-lite": { "node_modules/caniuse-lite": {
"version": "1.0.30001660", "version": "1.0.30001664",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001660.tgz", "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001664.tgz",
"integrity": "sha512-GacvNTTuATm26qC74pt+ad1fW15mlQ/zuTzzY1ZoIzECTP8HURDfF43kNxPgf7H1jmelCBQTTbBNxdSXOA7Bqg==", "integrity": "sha512-AmE7k4dXiNKQipgn7a2xg558IRqPN3jMQY/rOsbxDhrd0tyChwbITBfiwtnqz8bi2M5mIWbxAYBvk7W7QBUS2g==",
"funding": [ "funding": [
{ {
"type": "opencollective", "type": "opencollective",
@ -1961,9 +1961,9 @@
} }
}, },
"node_modules/electron-to-chromium": { "node_modules/electron-to-chromium": {
"version": "1.5.23", "version": "1.5.29",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.23.tgz", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.29.tgz",
"integrity": "sha512-mBhODedOXg4v5QWwl21DjM5amzjmI1zw9EPrPK/5Wx7C8jt33bpZNrC7OhHUG3pxRtbLpr3W2dXT+Ph1SsfRZA==", "integrity": "sha512-PF8n2AlIhCKXQ+gTpiJi0VhcHDb69kYX4MtCiivctc2QD3XuNZ/XIOlbGzt7WAjjEev0TtaH6Cu3arZExm5DOw==",
"license": "ISC" "license": "ISC"
}, },
"node_modules/emoji-regex": { "node_modules/emoji-regex": {
@ -2618,7 +2618,7 @@
"version": "1.2.13", "version": "1.2.13",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz",
"integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==",
"deprecated": "The v1 package contains DANGEROUS / INSECURE binaries. Upgrade to safe fsevents v2", "deprecated": "Upgrade to fsevents v2 to mitigate potential security issues",
"hasInstallScript": true, "hasInstallScript": true,
"license": "MIT", "license": "MIT",
"optional": true, "optional": true,
@ -6239,9 +6239,9 @@
} }
}, },
"node_modules/package-json-from-dist": { "node_modules/package-json-from-dist": {
"version": "1.0.0", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
"integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
"license": "BlueOak-1.0.0" "license": "BlueOak-1.0.0"
}, },
"node_modules/parse-filepath": { "node_modules/parse-filepath": {
@ -8229,9 +8229,9 @@
} }
}, },
"node_modules/update-browserslist-db": { "node_modules/update-browserslist-db": {
"version": "1.1.0", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz",
"integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==",
"funding": [ "funding": [
{ {
"type": "opencollective", "type": "opencollective",
@ -8248,8 +8248,8 @@
], ],
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"escalade": "^3.1.2", "escalade": "^3.2.0",
"picocolors": "^1.0.1" "picocolors": "^1.1.0"
}, },
"bin": { "bin": {
"update-browserslist-db": "cli.js" "update-browserslist-db": "cli.js"

View file

@ -78,7 +78,6 @@ const sfc = {
searchURL() { searchURL() {
return `${this.subUrl}/repo/search?sort=updated&order=desc&uid=${this.uid}&team_id=${this.teamId}&q=${this.searchQuery return `${this.subUrl}/repo/search?sort=updated&order=desc&uid=${this.uid}&team_id=${this.teamId}&q=${this.searchQuery
}&page=${this.page}&limit=${this.searchLimit}&mode=${this.repoTypes[this.reposFilter].searchMode }&page=${this.page}&limit=${this.searchLimit}&mode=${this.repoTypes[this.reposFilter].searchMode
}${this.reposFilter !== 'all' ? '&exclusive=1' : ''
}${this.archivedFilter === 'archived' ? '&archived=true' : ''}${this.archivedFilter === 'unarchived' ? '&archived=false' : '' }${this.archivedFilter === 'archived' ? '&archived=true' : ''}${this.archivedFilter === 'unarchived' ? '&archived=false' : ''
}${this.privateFilter === 'private' ? '&is_private=true' : ''}${this.privateFilter === 'public' ? '&is_private=false' : '' }${this.privateFilter === 'private' ? '&is_private=true' : ''}${this.privateFilter === 'public' ? '&is_private=false' : ''
}`; }`;